Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions docs/src/content/docs/development/structure.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ lib/
├── data/ # Data layer
│ ├── model/ # Data models by feature
│ ├── provider/ # Riverpod providers
│ └── store/ # Local storage (Hive)
│ └── store/ # Local storage (SQLite/SQLCipher via SqliteStore)
├── view/ # UI layer
│ ├── page/ # Main pages
│ └── widget/ # Reusable widgets
Expand Down Expand Up @@ -52,7 +52,7 @@ Riverpod providers for dependency injection and state management:

### Stores (`lib/data/store/`)

Hive-based local storage:
SqliteStore-based local storage (SQLCipher):

- Server storage
- Settings storage
Expand Down
4 changes: 2 additions & 2 deletions docs/src/content/docs/zh/development/structure.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ lib/
├── data/ # 数据层
│ ├── model/ # 按功能划分的数据模型
│ ├── provider/ # Riverpod provider
│ └── store/ # 本地存储 (Hive)
│ └── store/ # 本地存储 (SqliteStore/SQLCipher)
├── view/ # UI 层
│ ├── page/ # 主要页面
│ └── widget/ # 可复用组件
Expand Down Expand Up @@ -52,7 +52,7 @@ lib/

### 存储 (`lib/data/store/`)

基于 Hive 的本地存储:
基于 SqliteStore(SQLCipher)的本地存储:

- 服务器存储
- 设置存储
Expand Down
6 changes: 4 additions & 2 deletions lib/core/utils/server.dart
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ Future<SSHClient> genClient(
// Main thread
final jumpId = spi.jumpId;
if (jumpId != null) {
return jumpSpisById?[jumpId] ?? Stores.server.box.get(jumpId);
return jumpSpisById?[jumpId] ?? Stores.server.fetchOne(jumpId);
}
}();
if (jumpSpi_ != null) {
Expand Down Expand Up @@ -369,7 +369,7 @@ Future<void> ensureKnownHostKey(

final jumpId = spi.jumpId;
final jumpSpi = jumpId != null
? (jumpSpisById?[jumpId] ?? Stores.server.box.get(jumpId))
? (jumpSpisById?[jumpId] ?? Stores.server.fetchOne(jumpId))
: null;
if (jumpSpi != null && !_hasKnownHostFingerprintForSpi(jumpSpi, cache)) {
await ensureKnownHostKey(
Expand All @@ -388,6 +388,8 @@ Future<void> ensureKnownHostKey(
timeout: timeout,
onKeyboardInteractive: onKeyboardInteractive,
knownHostFingerprints: cache,
jumpSpisById: jumpSpisById,
visitedServerIds: chainVisitedServerIds,
);

try {
Expand Down
329 changes: 329 additions & 0 deletions lib/data/migration/hive_to_sqlite_migrator.dart
Original file line number Diff line number Diff line change
@@ -0,0 +1,329 @@
import 'dart:convert';
import 'dart:io';

import 'package:fl_lib/fl_lib.dart';
import 'package:hive_ce_flutter/hive_flutter.dart';
import 'package:path_provider/path_provider.dart';
import 'package:server_box/data/model/server/connection_stat.dart';
import 'package:server_box/data/model/server/private_key_info.dart';
import 'package:server_box/data/model/server/server_private_info.dart';
import 'package:server_box/data/model/server/snippet.dart';
import 'package:server_box/data/res/build_data.dart';
import 'package:server_box/data/res/store.dart';
import 'package:server_box/hive/hive_registrar.g.dart';

abstract final class HiveToSqliteMigrator {
static const _migratedFlagKey = 'sqlite_migrated_v1';
static const _migratedBuildKey = 'sqlite_migrated_build';
static const _migratedBoxFlagPrefix = 'sqlite_migrated_box_';
static const _legacySuffix = '.legacy.bak';
static const _legacyHiveEncKey = 'hive_key';
static const _legacyHiveEncKeyCompat = 'flutter.$_legacyHiveEncKey';
static const _boxNames = <String>[
'setting',
'server',
'docker',
'key',
'snippet',
'history',
'connection_stats',
];

static Future<void> migrateIfNeeded() async {
final migrated = PrefStore.shared.get<bool>(_migratedFlagKey) ?? false;
if (migrated) return;

try {
final path = await _legacyHivePath();
final hasLegacy = _hasLegacyFiles(path);
if (!hasLegacy) {
await _markAllBoxesMigrated();
await _setMigratedFlag();
return;
}

await Hive.initFlutter();
Hive.registerAdapters();

await _migrateOne(
boxName: 'setting',
target: Stores.setting,
path: path,
normalize: _normalizeGeneric,
);
await _migrateOne(
boxName: 'server',
target: Stores.server,
path: path,
normalize: _normalizeSpi,
);
await _migrateOne(
boxName: 'docker',
target: Stores.container,
path: path,
normalize: _normalizeGeneric,
);
await _migrateOne(
boxName: 'key',
target: Stores.key,
path: path,
normalize: _normalizePrivateKey,
);
await _migrateOne(
boxName: 'snippet',
target: Stores.snippet,
path: path,
normalize: _normalizeSnippet,
);
await _migrateOne(
boxName: 'history',
target: Stores.history,
path: path,
normalize: _normalizeGeneric,
);
await _migrateOne(
boxName: 'connection_stats',
target: Stores.connectionStats,
path: path,
normalize: _normalizeConnectionStat,
);

if (!_allBoxesMigrated()) {
Loggers.app.warning(
'Hive to SQLite migration was partially completed. '
'Will retry next launch without archiving legacy hive files.',
);
return;
}

await _archiveLegacyFiles(path);
await _setMigratedFlag();
} catch (e, s) {
Loggers.app.warning(
'Hive to SQLite migration aborted due to unexpected error',
e,
s,
);
}
}

static Future<bool> _migrateOne({
required String boxName,
required SqliteStore target,
required String path,
required Object? Function(Object?) normalize,
}) async {
final migratedBoxKey = _migratedBoxFlagKey(boxName);
final migrated = PrefStore.shared.get<bool>(migratedBoxKey) ?? false;
if (migrated) return true;

final hasEnc = File(path.joinPath('${boxName}_enc.hive')).existsSync();
final hasPlain = File(path.joinPath('$boxName.hive')).existsSync();
if (!hasEnc && !hasPlain) {
await PrefStore.shared.set(migratedBoxKey, true);
return true;
}

Box<dynamic>? box;
try {
box = await _openLegacyBox(
boxName: boxName,
path: path,
hasEnc: hasEnc,
hasPlain: hasPlain,
);
for (final rawKey in box.keys) {
if (rawKey is! String) continue;
final normalized = normalize(box.get(rawKey));
if (normalized == null) continue;
target.set(rawKey, normalized);
}
Comment thread
devin-ai-integration[bot] marked this conversation as resolved.
await target.flush();
await PrefStore.shared.set(migratedBoxKey, true);
return true;
} catch (e, s) {
Loggers.app.warning('Migrate hive box `$boxName` failed', e, s);
return false;
} finally {
await box?.close();
}
}

static Future<Box<dynamic>> _openLegacyBox({
required String boxName,
required String path,
required bool hasEnc,
required bool hasPlain,
}) async {
final cipher = await _legacyCipher();
final openErrors = <String>[];

if (hasEnc) {
try {
return await Hive.openBox(
'${boxName}_enc',
path: path,
encryptionCipher: cipher,
);
} catch (e, s) {
openErrors.add('encrypted: $e');
Loggers.app.warning('Open encrypted hive box `$boxName` failed', e, s);
}
}

if (hasPlain) {
try {
return await Hive.openBox(boxName, path: path);
} catch (e, s) {
openErrors.add('plain: $e');
Loggers.app.warning('Open plain hive box `$boxName` failed', e, s);
}
}

throw StateError(
'Unable to open legacy hive box `$boxName` at `$path` '
'(${openErrors.join(', ')})',
);
}

static Future<HiveAesCipher?> _legacyCipher() async {
final key = await _readLegacyHiveKey();
if (key == null || key.isEmpty) return null;
try {
return HiveAesCipher(base64Url.decode(key));
} catch (e, s) {
Loggers.app.warning('Decode hive cipher failed', e, s);
return null;
}
}

static bool _hasLegacyFiles(String path) {
for (final box in _boxNames) {
final enc = File(path.joinPath('${box}_enc.hive'));
final plain = File(path.joinPath('$box.hive'));
if (enc.existsSync() || plain.existsSync()) {
return true;
}
}
return false;
}

static Future<void> _archiveLegacyFiles(String path) async {
for (final name in _boxNames) {
await _archiveOne(File(path.joinPath('${name}_enc.hive')));
await _archiveOne(File(path.joinPath('${name}_enc.lock')));
await _archiveOne(File(path.joinPath('$name.hive')));
await _archiveOne(File(path.joinPath('$name.lock')));
}
}

static String _migratedBoxFlagKey(String boxName) {
return '$_migratedBoxFlagPrefix$boxName';
}

static bool _allBoxesMigrated() {
for (final boxName in _boxNames) {
final migrated = PrefStore.shared.get<bool>(_migratedBoxFlagKey(boxName));
if (migrated != true) return false;
}
return true;
}

static Future<void> _markAllBoxesMigrated() async {
for (final boxName in _boxNames) {
await PrefStore.shared.set(_migratedBoxFlagKey(boxName), true);
}
}

static Future<void> _setMigratedFlag() async {
if (!_allBoxesMigrated()) return;
await PrefStore.shared.set(_migratedFlagKey, true);
await PrefStore.shared.set(_migratedBuildKey, BuildData.build);
}

static Future<void> _archiveOne(File file) async {
if (!file.existsSync()) return;
final target = File('${file.path}$_legacySuffix');
if (target.existsSync()) {
await target.delete();
}
await file.rename(target.path);
}

static Future<String> _legacyHivePath() async {
return switch (Pfs.type) {
Pfs.linux || Pfs.windows => Paths.doc,
_ => (await getApplicationDocumentsDirectory()).path,
};
}

static Future<String?> _readLegacyHiveKey() async {
// ignore: deprecated_member_use
final secureStoreKey = await SecureStoreProps.hivePwd.read();
if (secureStoreKey != null && secureStoreKey.isNotEmpty) {
return secureStoreKey;
}

final prefKey =
PrefStore.shared.get<String>(_legacyHiveEncKey) ??
PrefStore.shared.get<String>(_legacyHiveEncKeyCompat);
if (prefKey != null && prefKey.isNotEmpty) {
// Keep key source aligned with previous Hive behavior.
// ignore: deprecated_member_use
await SecureStoreProps.hivePwd.write(prefKey);
return prefKey;
}
return null;
}

static Object? _normalizeGeneric(Object? raw) {
return _normalizeRaw(raw);
}

static Object? _normalizeSpi(Object? raw) {
if (raw is Spi) return raw.toJson();
return _normalizeRaw(raw);
}

static Object? _normalizeSnippet(Object? raw) {
if (raw is Snippet) return raw.toJson();
return _normalizeRaw(raw);
}

static Object? _normalizePrivateKey(Object? raw) {
if (raw is PrivateKeyInfo) return raw.toJson();
return _normalizeRaw(raw);
}

static Object? _normalizeConnectionStat(Object? raw) {
if (raw is ConnectionStat) return raw.toJson();
return _normalizeRaw(raw);
}

static Object? _normalizeRaw(Object? raw) {
if (raw == null) return null;
if (raw is bool || raw is int || raw is double || raw is String) return raw;
if (raw is Enum) return raw.name;
if (raw is List) {
return raw.map(_normalizeRaw).toList(growable: false);
}
if (raw is Map) {
return <String, Object?>{
for (final entry in raw.entries)
entry.key.toString(): _normalizeRaw(entry.value),
};
}
try {
final dynamic obj = raw;
final jsonObj = obj.toJson();
return _normalizeRaw(jsonObj);
} catch (e, s) {
Loggers.app.warning(
'Normalize migration value failed(type: ${raw.runtimeType})',
e,
s,
);
return null;
}
}
}
Loading