mirror of
https://github.com/vrtmrz/obsidian-livesync.git
synced 2026-02-20 19:18:47 +00:00
Preparing v0.24.0
This commit is contained in:
328
src/modules/essential/ModuleInitializerFile.ts
Normal file
328
src/modules/essential/ModuleInitializerFile.ts
Normal file
@@ -0,0 +1,328 @@
|
||||
import { unique } from "octagonal-wheels/collection";
|
||||
import { QueueProcessor } from "octagonal-wheels/concurrency/processor";
|
||||
import { throttle } from "octagonal-wheels/function";
|
||||
import { eventHub } from "../../common/events.ts";
|
||||
import { BASE_IS_NEW, compareFileFreshness, EVEN, getPath, isValidPath, TARGET_IS_NEW } from "../../common/utils.ts";
|
||||
import { type FilePathWithPrefixLC, type FilePathWithPrefix, type MetaEntry, isMetaEntry, type EntryDoc, LOG_LEVEL_VERBOSE, LOG_LEVEL_NOTICE, LOG_LEVEL_INFO, LOG_LEVEL_DEBUG, type UXFileInfoStub } from "../../lib/src/common/types.ts";
|
||||
import { isAnyNote } from "../../lib/src/common/utils.ts";
|
||||
import { stripAllPrefixes } from "../../lib/src/string_and_binary/path.ts";
|
||||
import { AbstractModule } from "../AbstractModule.ts";
|
||||
import type { ICoreModule } from "../ModuleTypes.ts";
|
||||
|
||||
export class ModuleInitializerFile extends AbstractModule implements ICoreModule {
|
||||
|
||||
async $$performFullScan(showingNotice?: boolean): Promise<void> {
|
||||
|
||||
this._log("Opening the key-value database", LOG_LEVEL_VERBOSE);
|
||||
const isInitialized = await (this.core.kvDB.get<boolean>("initialized")) || false;
|
||||
// synchronize all files between database and storage.
|
||||
if (!this.settings.isConfigured) {
|
||||
if (showingNotice) {
|
||||
this._log("LiveSync is not configured yet. Synchronising between the storage and the local database is now prevented.", LOG_LEVEL_NOTICE, "syncAll");
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (showingNotice) {
|
||||
this._log("Initializing", LOG_LEVEL_NOTICE, "syncAll");
|
||||
}
|
||||
|
||||
this._log("Initialize and checking database files");
|
||||
this._log("Checking deleted files");
|
||||
await this.collectDeletedFiles();
|
||||
|
||||
this._log("Collecting local files on the storage", LOG_LEVEL_VERBOSE);
|
||||
const filesStorageSrc = this.core.storageAccess.getFiles();
|
||||
|
||||
const _filesStorage = [] as typeof filesStorageSrc;
|
||||
|
||||
for (const f of filesStorageSrc) {
|
||||
if (await this.core.$$isTargetFile(f.path, f != filesStorageSrc[0])) {
|
||||
_filesStorage.push(f);
|
||||
}
|
||||
}
|
||||
|
||||
const convertCase = <FilePathWithPrefix>(path: FilePathWithPrefix): FilePathWithPrefixLC => {
|
||||
if (this.settings.handleFilenameCaseSensitive) {
|
||||
return path as FilePathWithPrefixLC;
|
||||
}
|
||||
return (path as string).toLowerCase() as FilePathWithPrefixLC;
|
||||
}
|
||||
|
||||
// If handleFilenameCaseSensitive is enabled, `FilePathWithPrefixLC` is the same as `FilePathWithPrefix`.
|
||||
|
||||
const storageFileNameMap = Object.fromEntries(_filesStorage.map((e) => [
|
||||
e.path, e] as [FilePathWithPrefix, UXFileInfoStub]));
|
||||
|
||||
const storageFileNames = Object.keys(storageFileNameMap) as FilePathWithPrefix[];
|
||||
|
||||
const storageFileNameCapsPair = storageFileNames.map((e) => [
|
||||
e, convertCase(e)] as [FilePathWithPrefix, FilePathWithPrefixLC]);
|
||||
|
||||
// const storageFileNameCS2CI = Object.fromEntries(storageFileNameCapsPair) as Record<FilePathWithPrefix, FilePathWithPrefixLC>;
|
||||
const storageFileNameCI2CS = Object.fromEntries(storageFileNameCapsPair.map(e => [
|
||||
e[1], e[0]])) as Record<FilePathWithPrefixLC, FilePathWithPrefix>;
|
||||
|
||||
|
||||
this._log("Collecting local files on the DB", LOG_LEVEL_VERBOSE);
|
||||
const _DBEntries = [] as MetaEntry[];
|
||||
// const _DBEntriesTask = [] as (() => Promise<MetaEntry | false>)[];
|
||||
let count = 0;
|
||||
for await (const doc of this.localDatabase.findAllNormalDocs()) {
|
||||
count++;
|
||||
if (count % 25 == 0) this._log(`Collecting local files on the DB: ${count}`, showingNotice ? LOG_LEVEL_NOTICE : LOG_LEVEL_INFO, "syncAll");
|
||||
const path = getPath(doc);
|
||||
|
||||
if (isValidPath(path) && await this.core.$$isTargetFile(path, true)) {
|
||||
if (!isMetaEntry(doc)) {
|
||||
this._log(`Invalid entry: ${path}`, LOG_LEVEL_INFO);
|
||||
continue;
|
||||
}
|
||||
_DBEntries.push(doc);
|
||||
}
|
||||
}
|
||||
|
||||
const databaseFileNameMap = Object.fromEntries(_DBEntries.map((e) => [
|
||||
getPath(e), e] as [FilePathWithPrefix, MetaEntry]));
|
||||
const databaseFileNames = Object.keys(databaseFileNameMap) as FilePathWithPrefix[];
|
||||
const databaseFileNameCapsPair = databaseFileNames.map((e) => [
|
||||
e, convertCase(e)] as [FilePathWithPrefix, FilePathWithPrefixLC]);
|
||||
// const databaseFileNameCS2CI = Object.fromEntries(databaseFileNameCapsPair) as Record<FilePathWithPrefix, FilePathWithPrefixLC>;
|
||||
const databaseFileNameCI2CS = Object.fromEntries(databaseFileNameCapsPair.map(e => [
|
||||
e[1], e[0]])) as Record<FilePathWithPrefix, FilePathWithPrefixLC>;
|
||||
|
||||
const allFiles = unique([
|
||||
...Object.keys(databaseFileNameCI2CS),
|
||||
...Object.keys(storageFileNameCI2CS)]) as FilePathWithPrefixLC[];
|
||||
|
||||
this._log(`Total files in the database: ${databaseFileNames.length}`, LOG_LEVEL_VERBOSE, "syncAll");
|
||||
this._log(`Total files in the storage: ${storageFileNames.length}`, LOG_LEVEL_VERBOSE, "syncAll");
|
||||
this._log(`Total files: ${allFiles.length}`, LOG_LEVEL_VERBOSE, "syncAll");
|
||||
|
||||
|
||||
const filesExistOnlyInStorage = allFiles.filter((e) => !databaseFileNameCI2CS[e]);
|
||||
const filesExistOnlyInDatabase = allFiles.filter((e) => !storageFileNameCI2CS[e]);
|
||||
const filesExistBoth = allFiles.filter((e) => databaseFileNameCI2CS[e] && storageFileNameCI2CS[e]);
|
||||
|
||||
this._log(`Files exist only in storage: ${filesExistOnlyInStorage.length}`, LOG_LEVEL_VERBOSE, "syncAll");
|
||||
this._log(`Files exist only in database: ${filesExistOnlyInDatabase.length}`, LOG_LEVEL_VERBOSE, "syncAll");
|
||||
this._log(`Files exist both in storage and database: ${filesExistBoth.length}`, LOG_LEVEL_VERBOSE, "syncAll");
|
||||
|
||||
this._log("Synchronising...");
|
||||
const processStatus = {} as Record<string, string>;
|
||||
const logLevel = showingNotice ? LOG_LEVEL_NOTICE : LOG_LEVEL_INFO;
|
||||
const updateLog = throttle((key: string, msg: string) => {
|
||||
processStatus[key] = msg;
|
||||
const log = Object.values(processStatus).join("\n");
|
||||
this._log(log, logLevel, "syncAll");
|
||||
}, 25);
|
||||
|
||||
const initProcess = [];
|
||||
const runAll = async <T>(procedureName: string, objects: T[], callback: (arg: T) => Promise<void>) => {
|
||||
if (objects.length == 0) {
|
||||
this._log(`${procedureName}: Nothing to do`);
|
||||
return;
|
||||
}
|
||||
this._log(procedureName);
|
||||
if (!this.localDatabase.isReady) throw Error("Database is not ready!");
|
||||
let success = 0;
|
||||
let failed = 0;
|
||||
const step = 10;
|
||||
const processor = new QueueProcessor(async (e) => {
|
||||
try {
|
||||
await callback(e[0]);
|
||||
success++;
|
||||
// return
|
||||
} catch (ex) {
|
||||
this._log(`Error while ${procedureName}`, LOG_LEVEL_NOTICE);
|
||||
this._log(ex, LOG_LEVEL_VERBOSE);
|
||||
failed++;
|
||||
}
|
||||
if ((success + failed) % step == 0) {
|
||||
const msg = `${procedureName}: DONE:${success}, FAILED:${failed}, LAST:${processor._queue.length}`;
|
||||
updateLog(procedureName, msg);
|
||||
}
|
||||
return;
|
||||
}, {
|
||||
batchSize: 1,
|
||||
concurrentLimit: 10,
|
||||
delay: 0,
|
||||
suspended: true,
|
||||
maintainDelay: false,
|
||||
interval: 0
|
||||
}, objects)
|
||||
await processor.waitForAllDoneAndTerminate();
|
||||
const msg = `${procedureName} All done: DONE:${success}, FAILED:${failed}`;
|
||||
updateLog(procedureName, msg)
|
||||
}
|
||||
initProcess.push(runAll("UPDATE DATABASE", filesExistOnlyInStorage, async (e) => {
|
||||
// console.warn("UPDATE DATABASE", e);
|
||||
const file = storageFileNameMap[storageFileNameCI2CS[e]];
|
||||
if (!this.core.$$isFileSizeExceeded(file.stat.size)) {
|
||||
const path = file.path;
|
||||
await this.core.fileHandler.storeFileToDB(file);
|
||||
// fireAndForget(() => this.checkAndApplySettingFromMarkdown(path, true));
|
||||
eventHub.emitEvent("event-file-changed", { file: path, automated: true });
|
||||
} else {
|
||||
this._log(`UPDATE DATABASE: ${e} has been skipped due to file size exceeding the limit`, logLevel);
|
||||
}
|
||||
}));
|
||||
initProcess.push(runAll("UPDATE STORAGE", filesExistOnlyInDatabase, async (e) => {
|
||||
const w = databaseFileNameMap[databaseFileNameCI2CS[e]];
|
||||
const path = getPath(w) ?? e;
|
||||
if (w && !(w.deleted || w._deleted)) {
|
||||
if (!this.core.$$isFileSizeExceeded(w.size)) {
|
||||
// await this.pullFile(path, undefined, false, undefined, false);
|
||||
// Memo: No need to force
|
||||
await this.core.fileHandler.dbToStorage(path, null, true);
|
||||
// fireAndForget(() => this.checkAndApplySettingFromMarkdown(e, true));
|
||||
eventHub.emitEvent("event-file-changed", {
|
||||
file: e, automated: true
|
||||
});
|
||||
this._log(`Check or pull from db:${path} OK`);
|
||||
} else {
|
||||
this._log(`UPDATE STORAGE: ${path} has been skipped due to file size exceeding the limit`, logLevel);
|
||||
}
|
||||
} else if (w) {
|
||||
this._log(`Deletion history skipped: ${path}`, LOG_LEVEL_VERBOSE);
|
||||
} else {
|
||||
this._log(`entry not found: ${path}`);
|
||||
}
|
||||
}));
|
||||
|
||||
const fileMap = filesExistBoth.map(path => {
|
||||
const file = storageFileNameMap[storageFileNameCI2CS[path]];
|
||||
const doc = databaseFileNameMap[databaseFileNameCI2CS[path]];
|
||||
return { file, doc }
|
||||
})
|
||||
initProcess.push(runAll("SYNC DATABASE AND STORAGE", fileMap, async (e) => {
|
||||
const { file, doc } = e;
|
||||
if (!this.core.$$isFileSizeExceeded(file.stat.size) && !this.core.$$isFileSizeExceeded(doc.size)) {
|
||||
await this.syncFileBetweenDBandStorage(file, doc);
|
||||
// fireAndForget(() => this.checkAndApplySettingFromMarkdown(getPath(doc), true));
|
||||
eventHub.emitEvent("event-file-changed", {
|
||||
file: getPath(doc), automated: true
|
||||
});
|
||||
} else {
|
||||
this._log(`SYNC DATABASE AND STORAGE: ${getPath(doc)} has been skipped due to file size exceeding the limit`, logLevel);
|
||||
}
|
||||
}))
|
||||
|
||||
await Promise.all(initProcess);
|
||||
|
||||
// this.setStatusBarText(`NOW TRACKING!`);
|
||||
this._log("Initialized, NOW TRACKING!");
|
||||
if (!isInitialized) {
|
||||
await (this.core.kvDB.set("initialized", true))
|
||||
}
|
||||
if (showingNotice) {
|
||||
this._log("Initialize done!", LOG_LEVEL_NOTICE, "syncAll");
|
||||
}
|
||||
}
|
||||
|
||||
async syncFileBetweenDBandStorage(file: UXFileInfoStub, doc: MetaEntry) {
|
||||
if (!doc) {
|
||||
throw new Error(`Missing doc:${(file as any).path}`)
|
||||
}
|
||||
if ("path" in file) {
|
||||
const w = this.core.storageAccess.getFileStub((file as any).path);
|
||||
if (w) {
|
||||
file = w;
|
||||
} else {
|
||||
throw new Error(`Missing file:${(file as any).path}`)
|
||||
}
|
||||
}
|
||||
|
||||
const compareResult = compareFileFreshness(file, doc);
|
||||
switch (compareResult) {
|
||||
case BASE_IS_NEW:
|
||||
if (!this.core.$$isFileSizeExceeded(file.stat.size)) {
|
||||
this._log("STORAGE -> DB :" + file.path);
|
||||
await this.core.fileHandler.storeFileToDB(file);
|
||||
eventHub.emitEvent("event-file-changed", {
|
||||
file: file.path, automated: true
|
||||
});
|
||||
} else {
|
||||
this._log(`STORAGE -> DB : ${file.path} has been skipped due to file size exceeding the limit`, LOG_LEVEL_NOTICE);
|
||||
}
|
||||
break;
|
||||
case TARGET_IS_NEW:
|
||||
if (!this.core.$$isFileSizeExceeded(doc.size)) {
|
||||
this._log("STORAGE <- DB :" + file.path);
|
||||
if (!await this.core.fileHandler.dbToStorage(doc, stripAllPrefixes(file.path), true)) {
|
||||
this._log(`STORAGE <- DB : Cloud not read ${file.path}, possibly deleted`, LOG_LEVEL_NOTICE);
|
||||
}
|
||||
return caches;
|
||||
} else {
|
||||
this._log(`STORAGE <- DB : ${file.path} has been skipped due to file size exceeding the limit`, LOG_LEVEL_NOTICE);
|
||||
}
|
||||
break;
|
||||
case EVEN:
|
||||
this._log("STORAGE == DB :" + file.path + "", LOG_LEVEL_DEBUG);
|
||||
break;
|
||||
default:
|
||||
this._log("STORAGE ?? DB :" + file.path + " Something got weird");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
// This method uses an old version of database accessor, which is not recommended.
|
||||
// TODO: Fix
|
||||
async collectDeletedFiles() {
|
||||
const limitDays = this.settings.automaticallyDeleteMetadataOfDeletedFiles;
|
||||
if (limitDays <= 0) return;
|
||||
this._log(`Checking expired file history`);
|
||||
const limit = Date.now() - (86400 * 1000 * limitDays);
|
||||
const notes: {
|
||||
path: string,
|
||||
mtime: number,
|
||||
ttl: number,
|
||||
doc: PouchDB.Core.ExistingDocument<EntryDoc & PouchDB.Core.AllDocsMeta>
|
||||
}[] = [];
|
||||
for await (const doc of this.localDatabase.findAllDocs({ conflicts: true })) {
|
||||
if (isAnyNote(doc)) {
|
||||
if (doc.deleted && (doc.mtime - limit) < 0) {
|
||||
notes.push({
|
||||
path: getPath(doc),
|
||||
mtime: doc.mtime,
|
||||
ttl: (doc.mtime - limit) / 1000 / 86400,
|
||||
doc: doc
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
if (notes.length == 0) {
|
||||
this._log("There are no old documents");
|
||||
this._log(`Checking expired file history done`);
|
||||
return;
|
||||
}
|
||||
for (const v of notes) {
|
||||
this._log(`Deletion history expired: ${v.path}`);
|
||||
const delDoc = v.doc;
|
||||
delDoc._deleted = true;
|
||||
await this.localDatabase.putRaw(delDoc);
|
||||
}
|
||||
this._log(`Checking expired file history done`);
|
||||
}
|
||||
|
||||
async $$initializeDatabase(showingNotice: boolean = false, reopenDatabase = true): Promise<boolean> {
|
||||
this.core.isReady = false;
|
||||
if ((!reopenDatabase) || await this.core.$$openDatabase()) {
|
||||
if (this.localDatabase.isReady) {
|
||||
await this.core.$$performFullScan(showingNotice);
|
||||
}
|
||||
if (!await this.core.$everyOnDatabaseInitialized(showingNotice)) {
|
||||
this._log(`Initializing database has been failed on some module`, LOG_LEVEL_NOTICE);
|
||||
return false;
|
||||
}
|
||||
this.core.isReady = true;
|
||||
// run queued event once.
|
||||
await this.core.$everyCommitPendingFileEvent();
|
||||
return true;
|
||||
} else {
|
||||
this.core.isReady = false;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
90
src/modules/essential/ModuleKeyValueDB.ts
Normal file
90
src/modules/essential/ModuleKeyValueDB.ts
Normal file
@@ -0,0 +1,90 @@
|
||||
import { delay, yieldMicrotask } from "octagonal-wheels/promises";
|
||||
import { OpenKeyValueDatabase } from "../../common/KeyValueDB.ts";
|
||||
import type { LiveSyncLocalDB } from "../../lib/src/pouchdb/LiveSyncLocalDB.ts";
|
||||
import { LOG_LEVEL_NOTICE, LOG_LEVEL_VERBOSE } from "octagonal-wheels/common/logger";
|
||||
import { AbstractModule } from "../AbstractModule.ts";
|
||||
import type { ICoreModule } from "../ModuleTypes.ts";
|
||||
|
||||
export class ModuleKeyValueDB extends AbstractModule implements ICoreModule {
|
||||
|
||||
tryCloseKvDB() {
|
||||
try {
|
||||
this.core.kvDB?.close();
|
||||
return true;
|
||||
} catch (e) {
|
||||
this._log("Failed to close KeyValueDB", LOG_LEVEL_VERBOSE);
|
||||
this._log(e);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
async openKeyValueDB(): Promise<boolean> {
|
||||
await delay(10);
|
||||
try {
|
||||
this.tryCloseKvDB();
|
||||
await delay(10);
|
||||
await yieldMicrotask();
|
||||
this.core.kvDB = await OpenKeyValueDatabase(this.core.$$getVaultName() + "-livesync-kv");
|
||||
await yieldMicrotask();
|
||||
await delay(100);
|
||||
} catch (e) {
|
||||
this.core.kvDB = undefined!;
|
||||
this._log("Failed to open KeyValueDB", LOG_LEVEL_NOTICE);
|
||||
this._log(e, LOG_LEVEL_VERBOSE);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
$allOnDBUnload(db: LiveSyncLocalDB): void {
|
||||
if (this.core.kvDB) this.core.kvDB.close();
|
||||
}
|
||||
$allOnDBClose(db: LiveSyncLocalDB): void {
|
||||
if (this.core.kvDB) this.core.kvDB.close();
|
||||
}
|
||||
|
||||
async $everyOnloadAfterLoadSettings(): Promise<boolean> {
|
||||
if (!await this.openKeyValueDB()) {
|
||||
return false;
|
||||
}
|
||||
this.core.simpleStore = this.core.$$getSimpleStore<any>("os");
|
||||
return Promise.resolve(true);
|
||||
}
|
||||
$$getSimpleStore<T>(kind: string) {
|
||||
const prefix = `${kind}-`;
|
||||
return {
|
||||
get: async (key: string): Promise<T> => {
|
||||
return await this.core.kvDB.get(`${prefix}${key}`);
|
||||
},
|
||||
set: async (key: string, value: any): Promise<void> => {
|
||||
await this.core.kvDB.set(`${prefix}${key}`, value);
|
||||
},
|
||||
delete: async (key: string): Promise<void> => {
|
||||
await this.core.kvDB.del(`${prefix}${key}`);
|
||||
},
|
||||
keys: async (from: string | undefined, to: string | undefined, count?: number | undefined): Promise<string[]> => {
|
||||
const ret = this.core.kvDB.keys(IDBKeyRange.bound(`${prefix}${from || ""}`, `${prefix}${to || ""}`), count);
|
||||
return (await ret).map(e => e.toString()).filter(e => e.startsWith(prefix)).map(e => e.substring(prefix.length));
|
||||
}
|
||||
}
|
||||
}
|
||||
$everyOnInitializeDatabase(db: LiveSyncLocalDB): Promise<boolean> {
|
||||
return this.openKeyValueDB();
|
||||
}
|
||||
|
||||
async $everyOnResetDatabase(db: LiveSyncLocalDB): Promise<boolean> {
|
||||
try {
|
||||
const kvDBKey = "queued-files"
|
||||
await this.core.kvDB.del(kvDBKey);
|
||||
// localStorage.removeItem(lsKey);
|
||||
await this.core.kvDB.destroy();
|
||||
await yieldMicrotask();
|
||||
this.core.kvDB = await OpenKeyValueDatabase(this.core.$$getVaultName() + "-livesync-kv");
|
||||
await delay(100);
|
||||
} catch (e) {
|
||||
this.core.kvDB = undefined!;
|
||||
this._log("Failed to reset KeyValueDB", LOG_LEVEL_NOTICE);
|
||||
this._log(e, LOG_LEVEL_VERBOSE);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
210
src/modules/essential/ModuleMigration.ts
Normal file
210
src/modules/essential/ModuleMigration.ts
Normal file
@@ -0,0 +1,210 @@
|
||||
import { LOG_LEVEL_INFO, LOG_LEVEL_NOTICE, LOG_LEVEL_VERBOSE } from 'octagonal-wheels/common/logger.js';
|
||||
import { SETTING_VERSION_SUPPORT_CASE_INSENSITIVE } from '../../lib/src/common/types.js';
|
||||
import { EVENT_REQUEST_OPEN_SETTINGS, EVENT_REQUEST_OPEN_SETUP_URI, eventHub } from '../../common/events.ts';
|
||||
import { AbstractModule } from "../AbstractModule.ts";
|
||||
import type { ICoreModule } from "../ModuleTypes.ts";
|
||||
|
||||
export class ModuleMigration extends AbstractModule implements ICoreModule {
|
||||
|
||||
async migrateDisableBulkSend() {
|
||||
if (this.settings.sendChunksBulk) {
|
||||
this._log("Send chunks in bulk has been enabled, however, this feature had been corrupted. Sorry for your inconvenience. Automatically disabled.", LOG_LEVEL_NOTICE);
|
||||
this.settings.sendChunksBulk = false;
|
||||
this.settings.sendChunksBulkMaxSize = 1;
|
||||
await this.saveSettings();
|
||||
}
|
||||
}
|
||||
async migrationCheck() {
|
||||
const old = this.settings.settingVersion;
|
||||
const current = SETTING_VERSION_SUPPORT_CASE_INSENSITIVE;
|
||||
// Check each migrations(old -> current)
|
||||
if (!await this.migrateToCaseInsensitive(old, current)) {
|
||||
this._log(`Migration failed or cancelled from ${old} to ${current}`, LOG_LEVEL_NOTICE);
|
||||
return;
|
||||
}
|
||||
}
|
||||
async migrateToCaseInsensitive(old: number, current: number) {
|
||||
if (this.settings.handleFilenameCaseSensitive !== undefined && this.settings.doNotUseFixedRevisionForChunks !== undefined) {
|
||||
if (current < SETTING_VERSION_SUPPORT_CASE_INSENSITIVE) {
|
||||
this.settings.settingVersion = SETTING_VERSION_SUPPORT_CASE_INSENSITIVE;
|
||||
await this.saveSettings();
|
||||
}
|
||||
return true;
|
||||
}
|
||||
if (old >= SETTING_VERSION_SUPPORT_CASE_INSENSITIVE && this.settings.handleFilenameCaseSensitive !== undefined && this.settings.doNotUseFixedRevisionForChunks !== undefined) {
|
||||
return true;
|
||||
}
|
||||
|
||||
let remoteHandleFilenameCaseSensitive: undefined | boolean = undefined;
|
||||
let remoteDoNotUseFixedRevisionForChunks: undefined | boolean = undefined;
|
||||
let remoteChecked = false;
|
||||
try {
|
||||
const remoteInfo = await this.core.replicator.getRemotePreferredTweakValues(this.settings);
|
||||
if (remoteInfo) {
|
||||
remoteHandleFilenameCaseSensitive = "handleFilenameCaseSensitive" in remoteInfo ? remoteInfo.handleFilenameCaseSensitive : false;
|
||||
remoteDoNotUseFixedRevisionForChunks = "doNotUseFixedRevisionForChunks" in remoteInfo ? remoteInfo.doNotUseFixedRevisionForChunks : false;
|
||||
if (remoteHandleFilenameCaseSensitive !== undefined || remoteDoNotUseFixedRevisionForChunks !== undefined) {
|
||||
remoteChecked = true;
|
||||
}
|
||||
} else {
|
||||
this._log("Failed to fetch remote tweak values", LOG_LEVEL_INFO);
|
||||
}
|
||||
} catch (ex) {
|
||||
this._log("Could not get remote tweak values", LOG_LEVEL_INFO);
|
||||
this._log(ex, LOG_LEVEL_VERBOSE);
|
||||
}
|
||||
|
||||
if (remoteChecked) {
|
||||
// The case that the remote could be checked.
|
||||
if (remoteHandleFilenameCaseSensitive && remoteDoNotUseFixedRevisionForChunks) {
|
||||
// Migrated, but configured as same as old behaviour.
|
||||
this.settings.handleFilenameCaseSensitive = true;
|
||||
this.settings.doNotUseFixedRevisionForChunks = true;
|
||||
this.settings.settingVersion = SETTING_VERSION_SUPPORT_CASE_INSENSITIVE;
|
||||
this._log(`Migrated to db:${current} with the same behaviour as before`, LOG_LEVEL_INFO);
|
||||
await this.saveSettings();
|
||||
return true;
|
||||
}
|
||||
const message = `As you may already know, the self-hosted LiveSync has changed its default behaviour and database structure.
|
||||
|
||||
And thankfully, with your time and efforts, the remote database appears to have already been migrated. Congratulations!
|
||||
|
||||
However, we need a bit more. The configuration of this device is not compatible with the remote database. We will need to fetch the remote database again. Should we fetch from the remote again now?
|
||||
|
||||
___Note: We cannot synchronise until the configuration has been changed and the database has been fetched again.___
|
||||
___Note2: The chunks are completely immutable, we can fetch only the metadata and difference.___
|
||||
`;
|
||||
const OPTION_FETCH = "Yes, fetch again";
|
||||
const DISMISS = "No, please ask again";
|
||||
const options = [OPTION_FETCH, DISMISS];
|
||||
const ret = await this.core.confirm.confirmWithMessage("Case Sensitivity", message, options, "No, please ask again", 40);
|
||||
if (ret == OPTION_FETCH) {
|
||||
this.settings.handleFilenameCaseSensitive = remoteHandleFilenameCaseSensitive || false;
|
||||
this.settings.doNotUseFixedRevisionForChunks = remoteDoNotUseFixedRevisionForChunks || false;
|
||||
this.settings.settingVersion = SETTING_VERSION_SUPPORT_CASE_INSENSITIVE;
|
||||
await this.saveSettings();
|
||||
try {
|
||||
await this.core.rebuilder.scheduleFetch();
|
||||
return;
|
||||
} catch (ex) {
|
||||
this._log("Failed to create redflag2", LOG_LEVEL_VERBOSE);
|
||||
this._log(ex, LOG_LEVEL_VERBOSE);
|
||||
}
|
||||
return false;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
const ENABLE_BOTH = "Enable both";
|
||||
const ENABLE_FILENAME_CASE_INSENSITIVE = "Enable only #1";
|
||||
const ENABLE_FIXED_REVISION_FOR_CHUNKS = "Enable only #2";
|
||||
const ADJUST_TO_REMOTE = "Adjust to remote";
|
||||
const DISMISS = "Decide it later";
|
||||
const KEEP = "Keep previous behaviour";
|
||||
const message = `Since v0.23.21, the self-hosted LiveSync has changed the default behaviour and database structure. The following changes have been made:
|
||||
|
||||
1. **Case sensitivity of filenames**
|
||||
The handling of filenames is now case-insensitive. This is a beneficial change for most platforms, other than Linux and iOS, which do not manage filename case sensitivity effectively.
|
||||
(On These, a warning will be displayed for files with the same name but different cases).
|
||||
|
||||
2. **Revision handling of the chunks**
|
||||
Chunks are immutable, which allows their revisions to be fixed. This change will enhance the performance of file saving.
|
||||
|
||||
___However, to enable either of these changes, both remote and local databases need to be rebuilt. This process takes a few minutes, and we recommend doing it when you have ample time.___
|
||||
|
||||
- If you wish to maintain the previous behaviour, you can skip this process by using \`${KEEP}\`.
|
||||
- If you do not have enough time, please choose \`${DISMISS}\`. You will be prompted again later.
|
||||
- If you have rebuilt the database on another device, please select \`${DISMISS}\` and try synchronizing again. Since a difference has been detected, you will be prompted again.
|
||||
`;
|
||||
const options = [
|
||||
ENABLE_BOTH,
|
||||
ENABLE_FILENAME_CASE_INSENSITIVE,
|
||||
ENABLE_FIXED_REVISION_FOR_CHUNKS];
|
||||
if (remoteChecked) {
|
||||
options.push(ADJUST_TO_REMOTE);
|
||||
}
|
||||
options.push(KEEP, DISMISS);
|
||||
const ret = await this.core.confirm.confirmWithMessage("Case Sensitivity", message, options, DISMISS, 40);
|
||||
console.dir(ret);
|
||||
switch (ret) {
|
||||
case ENABLE_BOTH:
|
||||
this.settings.handleFilenameCaseSensitive = false;
|
||||
this.settings.doNotUseFixedRevisionForChunks = false;
|
||||
break;
|
||||
case ENABLE_FILENAME_CASE_INSENSITIVE:
|
||||
this.settings.handleFilenameCaseSensitive = false;
|
||||
this.settings.doNotUseFixedRevisionForChunks = true;
|
||||
break;
|
||||
case ENABLE_FIXED_REVISION_FOR_CHUNKS:
|
||||
this.settings.doNotUseFixedRevisionForChunks = false;
|
||||
this.settings.handleFilenameCaseSensitive = true;
|
||||
break;
|
||||
case KEEP:
|
||||
this.settings.handleFilenameCaseSensitive = true;
|
||||
this.settings.doNotUseFixedRevisionForChunks = true;
|
||||
this.settings.settingVersion = SETTING_VERSION_SUPPORT_CASE_INSENSITIVE;
|
||||
await this.saveSettings();
|
||||
return true;
|
||||
case DISMISS:
|
||||
default:
|
||||
return false;
|
||||
|
||||
}
|
||||
this.settings.settingVersion = SETTING_VERSION_SUPPORT_CASE_INSENSITIVE;
|
||||
await this.saveSettings();
|
||||
await this.core.rebuilder.scheduleRebuild();
|
||||
await this.core.$$performRestart();
|
||||
|
||||
}
|
||||
|
||||
async $everyOnFirstInitialize(): Promise<boolean> {
|
||||
if (!this.localDatabase.isReady) {
|
||||
this._log(`Something went wrong! The local database is not ready`, LOG_LEVEL_NOTICE);
|
||||
return false;
|
||||
}
|
||||
if (this.settings.isConfigured) {
|
||||
await this.migrationCheck();
|
||||
await this.migrateDisableBulkSend();
|
||||
}
|
||||
if (!this.settings.isConfigured) {
|
||||
// Case sensitivity
|
||||
const message = `Hello and welcome to Self-hosted LiveSync.
|
||||
|
||||
Your device seems to **not be configured yet**. Please finish the setup and synchronise your vaults!
|
||||
|
||||
Click anywhere to stop counting down.
|
||||
|
||||
## At the first device
|
||||
- With Setup URI -> Use \`Use the copied setup URI\`.
|
||||
If you have configured it automatically, you should have one.
|
||||
- Without Setup URI -> Use \`Setup wizard\` in setting dialogue. **\`Minimal setup\` is recommended**.
|
||||
- What is the Setup URI? -> Do not worry! We have [some docs](https://github.com/vrtmrz/obsidian-livesync/blob/main/README.md#how-to-use) now. Please refer to them once.
|
||||
|
||||
## At the subsequent device
|
||||
- With Setup URI -> Use \`Use the copied setup URI\`.
|
||||
If you do not have it yet, you can copy it on the first device.
|
||||
- Without Setup URI -> Use \`Setup wizard\` in setting dialogue, but **strongly recommends using setup URI**.
|
||||
`
|
||||
const OPEN_SETUP = "Open setting dialog";
|
||||
const USE_SETUP = "Use the copied setup URI";
|
||||
const DISMISS = "Dismiss";
|
||||
|
||||
const ret = await this.core.confirm.confirmWithMessage("Welcome to Self-hosted LiveSync", message, [
|
||||
USE_SETUP, OPEN_SETUP, DISMISS], DISMISS, 40);
|
||||
if (ret === OPEN_SETUP) {
|
||||
try {
|
||||
eventHub.emitEvent(EVENT_REQUEST_OPEN_SETTINGS);
|
||||
} catch (ex) {
|
||||
this._log("Something went wrong on opening setting dialog, please open it manually", LOG_LEVEL_NOTICE);
|
||||
this._log(ex, LOG_LEVEL_VERBOSE);
|
||||
}
|
||||
} else if (ret == USE_SETUP) {
|
||||
eventHub.emitEvent(EVENT_REQUEST_OPEN_SETUP_URI);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user