mirror of
https://github.com/vrtmrz/obsidian-livesync.git
synced 2026-02-20 19:18:47 +00:00
Refactor for 0.25.43-patched-5 (very long, please refer the updates.md)
This commit is contained in:
@@ -23,7 +23,7 @@ import {
|
||||
type UXFileInfo,
|
||||
type UXFileInfoStub,
|
||||
} from "../lib/src/common/types.ts";
|
||||
import { ICHeader, ICXHeader } from "./types.ts";
|
||||
export { ICHeader, ICXHeader } from "./types.ts";
|
||||
import type ObsidianLiveSyncPlugin from "../main.ts";
|
||||
import { writeString } from "../lib/src/string_and_binary/convert.ts";
|
||||
import { fireAndForget } from "../lib/src/common/utils.ts";
|
||||
@@ -68,21 +68,13 @@ export function getPathFromTFile(file: TAbstractFile) {
|
||||
return file.path as FilePath;
|
||||
}
|
||||
|
||||
import { isInternalFile } from "@lib/common/typeUtils.ts";
|
||||
export function getPathFromUXFileInfo(file: UXFileInfoStub | string | FilePathWithPrefix) {
|
||||
if (typeof file == "string") return file as FilePathWithPrefix;
|
||||
return file.path;
|
||||
}
|
||||
export function getStoragePathFromUXFileInfo(file: UXFileInfoStub | string | FilePathWithPrefix) {
|
||||
if (typeof file == "string") return stripAllPrefixes(file as FilePathWithPrefix);
|
||||
return stripAllPrefixes(file.path);
|
||||
}
|
||||
export function getDatabasePathFromUXFileInfo(file: UXFileInfoStub | string | FilePathWithPrefix) {
|
||||
if (typeof file == "string" && file.startsWith(ICXHeader)) return file as FilePathWithPrefix;
|
||||
const prefix = isInternalFile(file) ? ICHeader : "";
|
||||
if (typeof file == "string") return (prefix + stripAllPrefixes(file as FilePathWithPrefix)) as FilePathWithPrefix;
|
||||
return (prefix + stripAllPrefixes(file.path)) as FilePathWithPrefix;
|
||||
}
|
||||
import {
|
||||
isInternalFile,
|
||||
getPathFromUXFileInfo,
|
||||
getStoragePathFromUXFileInfo,
|
||||
getDatabasePathFromUXFileInfo,
|
||||
} from "@lib/common/typeUtils.ts";
|
||||
export { isInternalFile, getPathFromUXFileInfo, getStoragePathFromUXFileInfo, getDatabasePathFromUXFileInfo };
|
||||
|
||||
const memos: { [key: string]: any } = {};
|
||||
export function memoObject<T>(key: string, obj: T): T {
|
||||
@@ -263,10 +255,8 @@ export function requestToCouchDBWithCredentials(
|
||||
return _requestToCouchDB(baseUri, credentials, origin, uri, body, method, customHeaders);
|
||||
}
|
||||
|
||||
export const BASE_IS_NEW = Symbol("base");
|
||||
export const TARGET_IS_NEW = Symbol("target");
|
||||
export const EVEN = Symbol("even");
|
||||
|
||||
import { BASE_IS_NEW, EVEN, TARGET_IS_NEW } from "@lib/common/models/shared.const.symbols.ts";
|
||||
export { BASE_IS_NEW, EVEN, TARGET_IS_NEW };
|
||||
// Why 2000? : ZIP FILE Does not have enough resolution.
|
||||
const resolution = 2000;
|
||||
export function compareMTime(
|
||||
|
||||
2
src/lib
2
src/lib
Submodule src/lib updated: 75d46c7163...56fc24e001
196
src/main.ts
196
src/main.ts
@@ -4,6 +4,7 @@ import {
|
||||
type ObsidianLiveSyncSettings,
|
||||
type DatabaseConnectingStatus,
|
||||
type HasSettings,
|
||||
LOG_LEVEL_INFO,
|
||||
} from "./lib/src/common/types.ts";
|
||||
import { type SimpleStore } from "./lib/src/common/utils.ts";
|
||||
import { type LiveSyncLocalDBEnv } from "./lib/src/pouchdb/LiveSyncLocalDB.ts";
|
||||
@@ -16,9 +17,7 @@ import { type LiveSyncJournalReplicatorEnv } from "./lib/src/replication/journal
|
||||
import { type LiveSyncCouchDBReplicatorEnv } from "./lib/src/replication/couchdb/LiveSyncReplicator.js";
|
||||
import type { CheckPointInfo } from "./lib/src/replication/journal/JournalSyncTypes.js";
|
||||
import type { IObsidianModule } from "./modules/AbstractObsidianModule.ts";
|
||||
|
||||
import { ModuleDev } from "./modules/extras/ModuleDev.ts";
|
||||
import { ModuleFileAccessObsidian } from "./modules/coreObsidian/ModuleFileAccessObsidian.ts";
|
||||
import { ModuleMigration } from "./modules/essential/ModuleMigration.ts";
|
||||
|
||||
import { ModuleCheckRemoteSize } from "./modules/essentialObsidian/ModuleCheckRemoteSize.ts";
|
||||
@@ -30,15 +29,13 @@ import { ModuleRedFlag } from "./modules/coreFeatures/ModuleRedFlag.ts";
|
||||
import { ModuleObsidianMenu } from "./modules/essentialObsidian/ModuleObsidianMenu.ts";
|
||||
import { ModuleSetupObsidian } from "./modules/features/ModuleSetupObsidian.ts";
|
||||
import { SetupManager } from "./modules/features/SetupManager.ts";
|
||||
import type { StorageAccess } from "./modules/interfaces/StorageAccess.ts";
|
||||
import type { StorageAccess } from "@lib/interfaces/StorageAccess.ts";
|
||||
import type { Confirm } from "./lib/src/interfaces/Confirm.ts";
|
||||
import type { Rebuilder } from "./modules/interfaces/DatabaseRebuilder.ts";
|
||||
import type { DatabaseFileAccess } from "./modules/interfaces/DatabaseFileAccess.ts";
|
||||
import { ModuleDatabaseFileAccess } from "./modules/core/ModuleDatabaseFileAccess.ts";
|
||||
import { ModuleFileHandler } from "./modules/core/ModuleFileHandler.ts";
|
||||
import type { Rebuilder } from "@lib/interfaces/DatabaseRebuilder.ts";
|
||||
import type { DatabaseFileAccess } from "@lib/interfaces/DatabaseFileAccess.ts";
|
||||
import { ModuleObsidianAPI } from "./modules/essentialObsidian/ModuleObsidianAPI.ts";
|
||||
import { ModuleObsidianEvents } from "./modules/essentialObsidian/ModuleObsidianEvents.ts";
|
||||
import { type AbstractModule } from "./modules/AbstractModule.ts";
|
||||
import { AbstractModule } from "./modules/AbstractModule.ts";
|
||||
import { ModuleObsidianSettingDialogue } from "./modules/features/ModuleObsidianSettingTab.ts";
|
||||
import { ModuleObsidianDocumentHistory } from "./modules/features/ModuleObsidianDocumentHistory.ts";
|
||||
import { ModuleObsidianGlobalHistory } from "./modules/features/ModuleGlobalHistory.ts";
|
||||
@@ -53,7 +50,6 @@ import { ModuleRemoteGovernor } from "./modules/coreFeatures/ModuleRemoteGoverno
|
||||
import { ModuleConflictChecker } from "./modules/coreFeatures/ModuleConflictChecker.ts";
|
||||
import { ModuleResolvingMismatchedTweaks } from "./modules/coreFeatures/ModuleResolveMismatchedTweaks.ts";
|
||||
import { ModuleIntegratedTest } from "./modules/extras/ModuleIntegratedTest.ts";
|
||||
import { ModuleRebuilder } from "./modules/core/ModuleRebuilder.ts";
|
||||
import { ModuleReplicateTest } from "./modules/extras/ModuleReplicateTest.ts";
|
||||
import { ModuleLiveSyncMain } from "./modules/main/ModuleLiveSyncMain.ts";
|
||||
import { LocalDatabaseMaintenance } from "./features/LocalDatabaseMainte/CmdLocalDatabaseMainte.ts";
|
||||
@@ -61,6 +57,15 @@ import { P2PReplicator } from "./features/P2PSync/CmdP2PReplicator.ts";
|
||||
import type { InjectableServiceHub } from "./lib/src/services/implements/injectable/InjectableServiceHub.ts";
|
||||
import { ObsidianServiceHub } from "./modules/services/ObsidianServiceHub.ts";
|
||||
import type { ServiceContext } from "./lib/src/services/base/ServiceBase.ts";
|
||||
import { ServiceRebuilder } from "@lib/serviceModules/Rebuilder.ts";
|
||||
import type { IFileHandler } from "@lib/interfaces/FileHandler.ts";
|
||||
import { ServiceDatabaseFileAccess } from "@/serviceModules/DatabaseFileAccess.ts";
|
||||
import { ServiceFileAccessObsidian } from "@/serviceModules/ServiceFileAccessObsidian.ts";
|
||||
import { StorageEventManagerObsidian } from "@/modules/coreObsidian/storageLib/StorageEventManager.ts";
|
||||
import { ObsidianFileAccess } from "@/modules/coreObsidian/storageLib/SerializedFileAccess.ts";
|
||||
import { StorageAccessManager } from "@lib/managers/StorageProcessingManager.ts";
|
||||
import { __$checkInstanceBinding } from "./lib/src/dev/checks.ts";
|
||||
import { ServiceFileHandler } from "./serviceModules/FileHandler.ts";
|
||||
|
||||
export default class ObsidianLiveSyncPlugin
|
||||
extends Plugin
|
||||
@@ -87,16 +92,17 @@ export default class ObsidianLiveSyncPlugin
|
||||
this._services = new ObsidianServiceHub(this);
|
||||
}
|
||||
|
||||
// Keep order to display the dialogue in order.
|
||||
addOns = [] as LiveSyncCommands[];
|
||||
/**
|
||||
* Bind functions to the service hub (for migration purpose).
|
||||
*/
|
||||
// bindFunctions = (this.serviceHub as ObsidianServiceHub).bindFunctions.bind(this.serviceHub);
|
||||
|
||||
/**
|
||||
* register an add-onn to the plug-in.
|
||||
* Add-ons are features that are not essential to the core functionality of the plugin,
|
||||
* @param addOn
|
||||
*/
|
||||
private _registerAddOn(addOn: LiveSyncCommands) {
|
||||
this.addOns.push(addOn);
|
||||
}
|
||||
|
||||
private registerAddOns() {
|
||||
this._registerAddOn(new ConfigSync(this));
|
||||
this._registerAddOn(new HiddenFileSync(this));
|
||||
@@ -104,6 +110,11 @@ export default class ObsidianLiveSyncPlugin
|
||||
this._registerAddOn(new P2PReplicator(this));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get an add-on by its class name. Returns undefined if not found.
|
||||
* @param cls
|
||||
* @returns
|
||||
*/
|
||||
getAddOn<T extends LiveSyncCommands>(cls: string) {
|
||||
for (const addon of this.addOns) {
|
||||
if (addon.constructor.name == cls) return addon as T;
|
||||
@@ -111,53 +122,52 @@ export default class ObsidianLiveSyncPlugin
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* The modules of the plug-in. Modules are responsible for specific features or functionalities of the plug-in, such as file handling, conflict resolution, replication, etc.
|
||||
*/
|
||||
private modules = [
|
||||
// Move to registerModules
|
||||
] as (IObsidianModule | AbstractModule)[];
|
||||
|
||||
/**
|
||||
* Get a module by its class. Throws an error if not found.
|
||||
* Mostly used for getting SetupManager.
|
||||
* @param constructor
|
||||
* @returns
|
||||
*/
|
||||
getModule<T extends IObsidianModule>(constructor: new (...args: any[]) => T): T {
|
||||
for (const module of this.modules) {
|
||||
if (module.constructor === constructor) return module as T;
|
||||
}
|
||||
throw new Error(`Module ${constructor} not found or not loaded.`);
|
||||
}
|
||||
getModulesByType<T extends IObsidianModule>(constructor: new (...args: any[]) => T): T[] {
|
||||
const matchedModules: T[] = [];
|
||||
for (const module of this.modules) {
|
||||
if (module instanceof constructor) matchedModules.push(module);
|
||||
}
|
||||
return matchedModules;
|
||||
}
|
||||
|
||||
/**
|
||||
* Register a module to the plug-in.
|
||||
* @param module The module to register.
|
||||
*/
|
||||
private _registerModule(module: IObsidianModule) {
|
||||
this.modules.push(module);
|
||||
}
|
||||
private registerModules() {
|
||||
this._registerModule(new ModuleLiveSyncMain(this));
|
||||
// Only on Obsidian
|
||||
this._registerModule(new ModuleDatabaseFileAccess(this));
|
||||
// Common
|
||||
this._registerModule(new ModuleConflictChecker(this));
|
||||
this._registerModule(new ModuleReplicatorMinIO(this));
|
||||
this._registerModule(new ModuleReplicatorCouchDB(this));
|
||||
this._registerModule(new ModuleReplicator(this));
|
||||
this._registerModule(new ModuleFileHandler(this));
|
||||
this._registerModule(new ModuleConflictResolver(this));
|
||||
this._registerModule(new ModuleRemoteGovernor(this));
|
||||
this._registerModule(new ModuleTargetFilter(this));
|
||||
this._registerModule(new ModulePeriodicProcess(this));
|
||||
// Essential Modules
|
||||
this._registerModule(new ModuleInitializerFile(this));
|
||||
this._registerModule(new ModuleObsidianAPI(this, this));
|
||||
this._registerModule(new ModuleObsidianEvents(this, this));
|
||||
this._registerModule(new ModuleFileAccessObsidian(this, this));
|
||||
this._registerModule(new ModuleObsidianSettings(this));
|
||||
this._registerModule(new ModuleResolvingMismatchedTweaks(this));
|
||||
this._registerModule(new ModuleObsidianSettingsAsMarkdown(this));
|
||||
this._registerModule(new ModuleObsidianSettingDialogue(this, this));
|
||||
this._registerModule(new ModuleLog(this, this));
|
||||
this._registerModule(new ModuleObsidianMenu(this));
|
||||
this._registerModule(new ModuleRebuilder(this));
|
||||
this._registerModule(new ModuleSetupObsidian(this));
|
||||
this._registerModule(new ModuleObsidianDocumentHistory(this, this));
|
||||
this._registerModule(new ModuleMigration(this));
|
||||
@@ -172,6 +182,26 @@ export default class ObsidianLiveSyncPlugin
|
||||
this._registerModule(new SetupManager(this));
|
||||
}
|
||||
|
||||
/**
|
||||
* Bind module functions to services.
|
||||
*/
|
||||
private bindModuleFunctions() {
|
||||
for (const module of this.modules) {
|
||||
if (module instanceof AbstractModule) {
|
||||
module.onBindFunction(this, this.services);
|
||||
__$checkInstanceBinding(module); // Check if all functions are properly bound, and log warnings if not.
|
||||
} else {
|
||||
this.services.API.addLog(
|
||||
`Module ${module.constructor.name} does not have onBindFunction, skipping binding.`,
|
||||
LOG_LEVEL_INFO
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @obsolete Use services.UI.confirm instead. The confirm function to show a confirmation dialog to the user.
|
||||
*/
|
||||
get confirm(): Confirm {
|
||||
return this.services.UI.confirm;
|
||||
}
|
||||
@@ -183,39 +213,68 @@ export default class ObsidianLiveSyncPlugin
|
||||
return this.settings;
|
||||
}
|
||||
|
||||
/**
|
||||
* @obsolete Use services.database.localDatabase instead. The local database instance.
|
||||
*/
|
||||
get localDatabase() {
|
||||
return this.services.database.localDatabase;
|
||||
}
|
||||
|
||||
/**
|
||||
* @obsolete Use services.database.managers instead. The database managers, including entry manager, revision manager, etc.
|
||||
*/
|
||||
get managers() {
|
||||
return this.services.database.managers;
|
||||
}
|
||||
|
||||
/**
|
||||
* @obsolete Use services.database.localDatabase instead. Get the PouchDB database instance. Note that this is not the same as the local database instance, which is a wrapper around the PouchDB database.
|
||||
* @returns The PouchDB database instance.
|
||||
*/
|
||||
getDatabase(): PouchDB.Database<EntryDoc> {
|
||||
return this.localDatabase.localDatabase;
|
||||
}
|
||||
|
||||
/**
|
||||
* @obsolete Use services.keyValueDB.simpleStore instead. A simple key-value store for storing non-file data, such as checkpoints, sync status, etc.
|
||||
*/
|
||||
get simpleStore() {
|
||||
return this.services.keyValueDB.simpleStore as SimpleStore<CheckPointInfo>;
|
||||
}
|
||||
|
||||
/**
|
||||
* @obsolete Use services.replication.getActiveReplicator instead. Get the active replicator instance. Note that there can be multiple replicators, but only one can be active at a time.
|
||||
*/
|
||||
get replicator() {
|
||||
return this.services.replicator.getActiveReplicator()!;
|
||||
}
|
||||
|
||||
// initialised at ModuleFileAccessObsidian
|
||||
storageAccess!: StorageAccess;
|
||||
// initialised at ModuleDatabaseFileAccess
|
||||
databaseFileAccess!: DatabaseFileAccess;
|
||||
// initialised at ModuleFileHandler
|
||||
fileHandler!: ModuleFileHandler;
|
||||
// initialised at ModuleRebuilder
|
||||
rebuilder!: Rebuilder;
|
||||
|
||||
/**
|
||||
* @obsolete Use services.keyValueDB.kvDB instead. Get the key-value database instance. This is used for storing large data that cannot be stored in the simple store, such as file metadata, etc.
|
||||
*/
|
||||
get kvDB() {
|
||||
return this.services.keyValueDB.kvDB;
|
||||
}
|
||||
|
||||
/// Modules which were relied on services
|
||||
/**
|
||||
* Storage Accessor for handling file operations.
|
||||
*/
|
||||
storageAccess: StorageAccess;
|
||||
/**
|
||||
* Database File Accessor for handling file operations related to the database, such as exporting the database, importing from a file, etc.
|
||||
*/
|
||||
databaseFileAccess: DatabaseFileAccess;
|
||||
|
||||
/**
|
||||
* File Handler for handling file operations related to replication, such as resolving conflicts, applying changes from replication, etc.
|
||||
*/
|
||||
fileHandler: IFileHandler;
|
||||
/**
|
||||
* Rebuilder for handling database rebuilding operations.
|
||||
*/
|
||||
rebuilder: Rebuilder;
|
||||
|
||||
requestCount = reactiveSource(0);
|
||||
responseCount = reactiveSource(0);
|
||||
totalQueued = reactiveSource(0);
|
||||
@@ -240,11 +299,74 @@ export default class ObsidianLiveSyncPlugin
|
||||
syncStatus: "CLOSED" as DatabaseConnectingStatus,
|
||||
});
|
||||
|
||||
private initialiseServiceModules() {
|
||||
const storageAccessManager = new StorageAccessManager();
|
||||
// If we want to implement to the other platform, implement ObsidianXXXXXService.
|
||||
const vaultAccess = new ObsidianFileAccess(this.app, this, storageAccessManager);
|
||||
const storageEventManager = new StorageEventManagerObsidian(this, this, storageAccessManager);
|
||||
const storageAccess = new ServiceFileAccessObsidian({
|
||||
API: this.services.API,
|
||||
setting: this.services.setting,
|
||||
fileProcessing: this.services.fileProcessing,
|
||||
vault: this.services.vault,
|
||||
appLifecycle: this.services.appLifecycle,
|
||||
storageEventManager: storageEventManager,
|
||||
storageAccessManager: storageAccessManager,
|
||||
vaultAccess: vaultAccess,
|
||||
});
|
||||
|
||||
const databaseFileAccess = new ServiceDatabaseFileAccess({
|
||||
API: this.services.API,
|
||||
database: this.services.database,
|
||||
path: this.services.path,
|
||||
storageAccess: storageAccess,
|
||||
vault: this.services.vault,
|
||||
});
|
||||
|
||||
const fileHandler = new ServiceFileHandler({
|
||||
API: this.services.API,
|
||||
databaseFileAccess: databaseFileAccess,
|
||||
conflict: this.services.conflict,
|
||||
setting: this.services.setting,
|
||||
fileProcessing: this.services.fileProcessing,
|
||||
vault: this.services.vault,
|
||||
path: this.services.path,
|
||||
replication: this.services.replication,
|
||||
storageAccess: storageAccess,
|
||||
});
|
||||
const rebuilder = new ServiceRebuilder({
|
||||
API: this.services.API,
|
||||
database: this.services.database,
|
||||
appLifecycle: this.services.appLifecycle,
|
||||
setting: this.services.setting,
|
||||
remote: this.services.remote,
|
||||
databaseEvents: this.services.databaseEvents,
|
||||
replication: this.services.replication,
|
||||
replicator: this.services.replicator,
|
||||
UI: this.services.UI,
|
||||
vault: this.services.vault,
|
||||
fileHandler: fileHandler,
|
||||
storageAccess: storageAccess,
|
||||
});
|
||||
return {
|
||||
rebuilder,
|
||||
fileHandler,
|
||||
databaseFileAccess,
|
||||
storageAccess,
|
||||
};
|
||||
}
|
||||
|
||||
constructor(app: App, manifest: PluginManifest) {
|
||||
super(app, manifest);
|
||||
this.initialiseServices();
|
||||
this.registerModules();
|
||||
this.registerAddOns();
|
||||
const instances = this.initialiseServiceModules();
|
||||
this.rebuilder = instances.rebuilder;
|
||||
this.fileHandler = instances.fileHandler;
|
||||
this.databaseFileAccess = instances.databaseFileAccess;
|
||||
this.storageAccess = instances.storageAccess;
|
||||
this.bindModuleFunctions();
|
||||
}
|
||||
|
||||
private async _startUp() {
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { LOG_LEVEL_NOTICE, LOG_LEVEL_VERBOSE, Logger } from "octagonal-wheels/common/logger";
|
||||
import type { AnyEntry, FilePathWithPrefix } from "@lib/common/types";
|
||||
import type { LiveSyncCore } from "@/main";
|
||||
import { __$checkInstanceBinding } from "@lib/dev/checks";
|
||||
import { stripAllPrefixes } from "@lib/string_and_binary/path";
|
||||
import { createInstanceLogFunction } from "@/lib/src/services/lib/logUtils";
|
||||
|
||||
@@ -41,9 +40,7 @@ export abstract class AbstractModule {
|
||||
// Override if needed.
|
||||
}
|
||||
constructor(public core: LiveSyncCore) {
|
||||
this.onBindFunction(core, core.services);
|
||||
Logger(`[${this.constructor.name}] Loaded`, LOG_LEVEL_VERBOSE);
|
||||
__$checkInstanceBinding(this);
|
||||
}
|
||||
saveSettings = this.core.saveSettings.bind(this.core);
|
||||
|
||||
|
||||
@@ -1,352 +0,0 @@
|
||||
import { LOG_LEVEL_VERBOSE } from "octagonal-wheels/common/logger";
|
||||
import { EVENT_FILE_SAVED, eventHub } from "../../common/events";
|
||||
import {
|
||||
getDatabasePathFromUXFileInfo,
|
||||
getStoragePathFromUXFileInfo,
|
||||
isInternalMetadata,
|
||||
markChangesAreSame,
|
||||
} from "../../common/utils";
|
||||
import type {
|
||||
UXFileInfoStub,
|
||||
FilePathWithPrefix,
|
||||
UXFileInfo,
|
||||
MetaEntry,
|
||||
LoadedEntry,
|
||||
FilePath,
|
||||
SavingEntry,
|
||||
DocumentID,
|
||||
} from "../../lib/src/common/types";
|
||||
import type { DatabaseFileAccess } from "../interfaces/DatabaseFileAccess";
|
||||
import { isPlainText, shouldBeIgnored, stripAllPrefixes } from "../../lib/src/string_and_binary/path";
|
||||
import {
|
||||
createBlob,
|
||||
createTextBlob,
|
||||
delay,
|
||||
determineTypeFromBlob,
|
||||
isDocContentSame,
|
||||
readContent,
|
||||
} from "../../lib/src/common/utils";
|
||||
import { serialized } from "octagonal-wheels/concurrency/lock";
|
||||
import { AbstractModule } from "../AbstractModule.ts";
|
||||
import { ICHeader } from "../../common/types.ts";
|
||||
import type { LiveSyncCore } from "../../main.ts";
|
||||
|
||||
export class ModuleDatabaseFileAccess extends AbstractModule implements DatabaseFileAccess {
|
||||
private _everyOnload(): Promise<boolean> {
|
||||
this.core.databaseFileAccess = this;
|
||||
return Promise.resolve(true);
|
||||
}
|
||||
|
||||
private async _everyModuleTest(): Promise<boolean> {
|
||||
if (!this.settings.enableDebugTools) return Promise.resolve(true);
|
||||
const testString = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nullam nec purus nec nunc";
|
||||
// Before test, we need to delete completely.
|
||||
const conflicts = await this.getConflictedRevs("autoTest.md" as FilePathWithPrefix);
|
||||
for (const rev of conflicts) {
|
||||
await this.delete("autoTest.md" as FilePathWithPrefix, rev);
|
||||
}
|
||||
await this.delete("autoTest.md" as FilePathWithPrefix);
|
||||
// OK, begin!
|
||||
|
||||
await this._test(
|
||||
"storeContent",
|
||||
async () => await this.storeContent("autoTest.md" as FilePathWithPrefix, testString)
|
||||
);
|
||||
// For test, we need to clear the caches.
|
||||
this.localDatabase.clearCaches();
|
||||
await this._test("readContent", async () => {
|
||||
const content = await this.fetch("autoTest.md" as FilePathWithPrefix);
|
||||
if (!content) return "File not found";
|
||||
if (content.deleted) return "File is deleted";
|
||||
return (await content.body.text()) == testString
|
||||
? true
|
||||
: `Content is not same ${await content.body.text()}`;
|
||||
});
|
||||
await this._test("delete", async () => await this.delete("autoTest.md" as FilePathWithPrefix));
|
||||
await this._test("read deleted content", async () => {
|
||||
const content = await this.fetch("autoTest.md" as FilePathWithPrefix);
|
||||
if (!content) return true;
|
||||
if (content.deleted) return true;
|
||||
return `Still exist !:${await content.body.text()},${JSON.stringify(content, undefined, 2)}`;
|
||||
});
|
||||
await delay(100);
|
||||
return this.testDone();
|
||||
}
|
||||
|
||||
async checkIsTargetFile(file: UXFileInfoStub | FilePathWithPrefix): Promise<boolean> {
|
||||
const path = getStoragePathFromUXFileInfo(file);
|
||||
if (!(await this.services.vault.isTargetFile(path))) {
|
||||
this._log(`File is not target: ${path}`, LOG_LEVEL_VERBOSE);
|
||||
return false;
|
||||
}
|
||||
if (shouldBeIgnored(path)) {
|
||||
this._log(`File should be ignored: ${path}`, LOG_LEVEL_VERBOSE);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
async delete(file: UXFileInfoStub | FilePathWithPrefix, rev?: string): Promise<boolean> {
|
||||
if (!(await this.checkIsTargetFile(file))) {
|
||||
return true;
|
||||
}
|
||||
const fullPath = getDatabasePathFromUXFileInfo(file);
|
||||
try {
|
||||
this._log(`deleteDB By path:${fullPath}`);
|
||||
return await this.deleteFromDBbyPath(fullPath, rev);
|
||||
} catch (ex) {
|
||||
this._log(`Failed to delete ${fullPath}`);
|
||||
this._log(ex, LOG_LEVEL_VERBOSE);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async createChunks(file: UXFileInfo, force: boolean = false, skipCheck?: boolean): Promise<boolean> {
|
||||
return await this.__store(file, force, skipCheck, true);
|
||||
}
|
||||
|
||||
async store(file: UXFileInfo, force: boolean = false, skipCheck?: boolean): Promise<boolean> {
|
||||
return await this.__store(file, force, skipCheck, false);
|
||||
}
|
||||
async storeContent(path: FilePathWithPrefix, content: string): Promise<boolean> {
|
||||
const blob = createTextBlob(content);
|
||||
const bytes = (await blob.arrayBuffer()).byteLength;
|
||||
const isInternal = path.startsWith(".") ? true : undefined;
|
||||
const dummyUXFileInfo: UXFileInfo = {
|
||||
name: path.split("/").pop() as string,
|
||||
path: path,
|
||||
stat: {
|
||||
size: bytes,
|
||||
ctime: Date.now(),
|
||||
mtime: Date.now(),
|
||||
type: "file",
|
||||
},
|
||||
body: blob,
|
||||
isInternal,
|
||||
};
|
||||
return await this.__store(dummyUXFileInfo, true, false, false);
|
||||
}
|
||||
|
||||
private async __store(
|
||||
file: UXFileInfo,
|
||||
force: boolean = false,
|
||||
skipCheck?: boolean,
|
||||
onlyChunks?: boolean
|
||||
): Promise<boolean> {
|
||||
if (!skipCheck) {
|
||||
if (!(await this.checkIsTargetFile(file))) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
if (!file) {
|
||||
this._log("File seems bad", LOG_LEVEL_VERBOSE);
|
||||
return false;
|
||||
}
|
||||
// const path = getPathFromUXFileInfo(file);
|
||||
const isPlain = isPlainText(file.name);
|
||||
const possiblyLarge = !isPlain;
|
||||
const content = file.body;
|
||||
|
||||
const datatype = determineTypeFromBlob(content);
|
||||
const idPrefix = file.isInternal ? ICHeader : "";
|
||||
const fullPath = getStoragePathFromUXFileInfo(file);
|
||||
const fullPathOnDB = getDatabasePathFromUXFileInfo(file);
|
||||
|
||||
if (possiblyLarge) this._log(`Processing: ${fullPath}`, LOG_LEVEL_VERBOSE);
|
||||
|
||||
// if (isInternalMetadata(fullPath)) {
|
||||
// this._log(`Internal file: ${fullPath}`, LOG_LEVEL_VERBOSE);
|
||||
// return false;
|
||||
// }
|
||||
if (file.isInternal) {
|
||||
if (file.deleted) {
|
||||
file.stat = {
|
||||
size: 0,
|
||||
ctime: Date.now(),
|
||||
mtime: Date.now(),
|
||||
type: "file",
|
||||
};
|
||||
} else if (file.stat == undefined) {
|
||||
const stat = await this.core.storageAccess.statHidden(file.path);
|
||||
if (!stat) {
|
||||
// We stored actually deleted or not since here, so this is an unexpected case. we should raise an error.
|
||||
this._log(`Internal file not found: ${fullPath}`, LOG_LEVEL_VERBOSE);
|
||||
return false;
|
||||
}
|
||||
file.stat = stat;
|
||||
}
|
||||
}
|
||||
|
||||
const idMain = await this.services.path.path2id(fullPath);
|
||||
|
||||
const id = (idPrefix + idMain) as DocumentID;
|
||||
const d: SavingEntry = {
|
||||
_id: id,
|
||||
path: fullPathOnDB,
|
||||
data: content,
|
||||
ctime: file.stat.ctime,
|
||||
mtime: file.stat.mtime,
|
||||
size: file.stat.size,
|
||||
children: [],
|
||||
datatype: datatype,
|
||||
type: datatype,
|
||||
eden: {},
|
||||
};
|
||||
//upsert should locked
|
||||
const msg = `STORAGE -> DB (${datatype}) `;
|
||||
const isNotChanged = await serialized("file-" + fullPath, async () => {
|
||||
if (force) {
|
||||
this._log(msg + "Force writing " + fullPath, LOG_LEVEL_VERBOSE);
|
||||
return false;
|
||||
}
|
||||
// Commented out temporarily: this checks that the file was made ourself.
|
||||
// if (this.core.storageAccess.recentlyTouched(file)) {
|
||||
// return true;
|
||||
// }
|
||||
try {
|
||||
const old = await this.localDatabase.getDBEntry(d.path, undefined, false, true, false);
|
||||
if (old !== false) {
|
||||
const oldData = { data: old.data, deleted: old._deleted || old.deleted };
|
||||
const newData = { data: d.data, deleted: d._deleted || d.deleted };
|
||||
if (oldData.deleted != newData.deleted) return false;
|
||||
if (!(await isDocContentSame(old.data, newData.data))) return false;
|
||||
this._log(
|
||||
msg + "Skipped (not changed) " + fullPath + (d._deleted || d.deleted ? " (deleted)" : ""),
|
||||
LOG_LEVEL_VERBOSE
|
||||
);
|
||||
markChangesAreSame(old, d.mtime, old.mtime);
|
||||
return true;
|
||||
// d._rev = old._rev;
|
||||
}
|
||||
} catch (ex) {
|
||||
this._log(
|
||||
msg +
|
||||
"Error, Could not check the diff for the old one." +
|
||||
(force ? "force writing." : "") +
|
||||
fullPath +
|
||||
(d._deleted || d.deleted ? " (deleted)" : ""),
|
||||
LOG_LEVEL_VERBOSE
|
||||
);
|
||||
this._log(ex, LOG_LEVEL_VERBOSE);
|
||||
return !force;
|
||||
}
|
||||
return false;
|
||||
});
|
||||
if (isNotChanged) {
|
||||
this._log(msg + " Skip " + fullPath, LOG_LEVEL_VERBOSE);
|
||||
return true;
|
||||
}
|
||||
const ret = await this.localDatabase.putDBEntry(d, onlyChunks);
|
||||
if (ret !== false) {
|
||||
this._log(msg + fullPath);
|
||||
eventHub.emitEvent(EVENT_FILE_SAVED);
|
||||
}
|
||||
return ret != false;
|
||||
}
|
||||
|
||||
async getConflictedRevs(file: UXFileInfoStub | FilePathWithPrefix): Promise<string[]> {
|
||||
if (!(await this.checkIsTargetFile(file))) {
|
||||
return [];
|
||||
}
|
||||
const filename = getDatabasePathFromUXFileInfo(file);
|
||||
const doc = await this.localDatabase.getDBEntryMeta(filename, { conflicts: true }, true);
|
||||
if (doc === false) {
|
||||
return [];
|
||||
}
|
||||
return doc._conflicts || [];
|
||||
}
|
||||
|
||||
async fetch(
|
||||
file: UXFileInfoStub | FilePathWithPrefix,
|
||||
rev?: string,
|
||||
waitForReady?: boolean,
|
||||
skipCheck = false
|
||||
): Promise<UXFileInfo | false> {
|
||||
if (skipCheck && !(await this.checkIsTargetFile(file))) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const entry = await this.fetchEntry(file, rev, waitForReady, true);
|
||||
if (entry === false) {
|
||||
return false;
|
||||
}
|
||||
const data = createBlob(readContent(entry));
|
||||
const path = stripAllPrefixes(entry.path);
|
||||
const fileInfo: UXFileInfo = {
|
||||
name: path.split("/").pop() as string,
|
||||
path: path,
|
||||
stat: {
|
||||
size: entry.size,
|
||||
ctime: entry.ctime,
|
||||
mtime: entry.mtime,
|
||||
type: "file",
|
||||
},
|
||||
body: data,
|
||||
deleted: entry.deleted || entry._deleted,
|
||||
};
|
||||
if (isInternalMetadata(entry.path)) {
|
||||
fileInfo.isInternal = true;
|
||||
}
|
||||
return fileInfo;
|
||||
}
|
||||
async fetchEntryMeta(
|
||||
file: UXFileInfoStub | FilePathWithPrefix,
|
||||
rev?: string,
|
||||
skipCheck = false
|
||||
): Promise<MetaEntry | false> {
|
||||
const dbFileName = getDatabasePathFromUXFileInfo(file);
|
||||
if (skipCheck && !(await this.checkIsTargetFile(file))) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const doc = await this.localDatabase.getDBEntryMeta(dbFileName, rev ? { rev: rev } : undefined, true);
|
||||
if (doc === false) {
|
||||
return false;
|
||||
}
|
||||
return doc as MetaEntry;
|
||||
}
|
||||
async fetchEntryFromMeta(
|
||||
meta: MetaEntry,
|
||||
waitForReady: boolean = true,
|
||||
skipCheck = false
|
||||
): Promise<LoadedEntry | false> {
|
||||
if (skipCheck && !(await this.checkIsTargetFile(meta.path))) {
|
||||
return false;
|
||||
}
|
||||
const doc = await this.localDatabase.getDBEntryFromMeta(meta as LoadedEntry, false, waitForReady);
|
||||
if (doc === false) {
|
||||
return false;
|
||||
}
|
||||
return doc;
|
||||
}
|
||||
async fetchEntry(
|
||||
file: UXFileInfoStub | FilePathWithPrefix,
|
||||
rev?: string,
|
||||
waitForReady: boolean = true,
|
||||
skipCheck = false
|
||||
): Promise<LoadedEntry | false> {
|
||||
if (skipCheck && !(await this.checkIsTargetFile(file))) {
|
||||
return false;
|
||||
}
|
||||
const entry = await this.fetchEntryMeta(file, rev, true);
|
||||
if (entry === false) {
|
||||
return false;
|
||||
}
|
||||
const doc = await this.fetchEntryFromMeta(entry, waitForReady, true);
|
||||
return doc;
|
||||
}
|
||||
async deleteFromDBbyPath(fullPath: FilePath | FilePathWithPrefix, rev?: string): Promise<boolean> {
|
||||
if (!(await this.checkIsTargetFile(fullPath))) {
|
||||
this._log(`storeFromStorage: File is not target: ${fullPath}`);
|
||||
return true;
|
||||
}
|
||||
const opt = rev ? { rev: rev } : undefined;
|
||||
const ret = await this.localDatabase.deleteDBEntry(fullPath, opt);
|
||||
eventHub.emitEvent(EVENT_FILE_SAVED);
|
||||
return ret;
|
||||
}
|
||||
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
|
||||
services.appLifecycle.onLoaded.addHandler(this._everyOnload.bind(this));
|
||||
services.test.test.addHandler(this._everyModuleTest.bind(this));
|
||||
}
|
||||
}
|
||||
@@ -1,436 +0,0 @@
|
||||
import { LOG_LEVEL_INFO, LOG_LEVEL_NOTICE, LOG_LEVEL_VERBOSE } from "octagonal-wheels/common/logger";
|
||||
import { serialized } from "octagonal-wheels/concurrency/lock";
|
||||
import type { FileEventItem } from "../../common/types";
|
||||
import type {
|
||||
FilePath,
|
||||
FilePathWithPrefix,
|
||||
MetaEntry,
|
||||
UXFileInfo,
|
||||
UXFileInfoStub,
|
||||
UXInternalFileInfoStub,
|
||||
} from "../../lib/src/common/types";
|
||||
import { AbstractModule } from "../AbstractModule.ts";
|
||||
import { compareFileFreshness, EVEN, getStoragePathFromUXFileInfo, markChangesAreSame } from "../../common/utils";
|
||||
import { getDocDataAsArray, isDocContentSame, readAsBlob, readContent } from "../../lib/src/common/utils";
|
||||
import { shouldBeIgnored } from "../../lib/src/string_and_binary/path";
|
||||
import { Semaphore } from "octagonal-wheels/concurrency/semaphore";
|
||||
import { eventHub } from "../../common/events.ts";
|
||||
import type { LiveSyncCore } from "../../main.ts";
|
||||
|
||||
export class ModuleFileHandler extends AbstractModule {
|
||||
get db() {
|
||||
return this.core.databaseFileAccess;
|
||||
}
|
||||
get storage() {
|
||||
return this.core.storageAccess;
|
||||
}
|
||||
|
||||
_everyOnloadStart(): Promise<boolean> {
|
||||
this.core.fileHandler = this;
|
||||
return Promise.resolve(true);
|
||||
}
|
||||
|
||||
async readFileFromStub(file: UXFileInfoStub | UXFileInfo) {
|
||||
if ("body" in file && file.body) {
|
||||
return file;
|
||||
}
|
||||
const readFile = await this.storage.readStubContent(file);
|
||||
if (!readFile) {
|
||||
throw new Error(`File ${file.path} is not exist on the storage`);
|
||||
}
|
||||
return readFile;
|
||||
}
|
||||
|
||||
async storeFileToDB(
|
||||
info: UXFileInfoStub | UXFileInfo | UXInternalFileInfoStub | FilePathWithPrefix,
|
||||
force: boolean = false,
|
||||
onlyChunks: boolean = false
|
||||
): Promise<boolean> {
|
||||
const file = typeof info === "string" ? this.storage.getFileStub(info) : info;
|
||||
if (file == null) {
|
||||
this._log(`File ${info} is not exist on the storage`, LOG_LEVEL_VERBOSE);
|
||||
return false;
|
||||
}
|
||||
// const file = item.args.file;
|
||||
if (file.isInternal) {
|
||||
this._log(
|
||||
`Internal file ${file.path} is not allowed to be processed on processFileEvent`,
|
||||
LOG_LEVEL_VERBOSE
|
||||
);
|
||||
return false;
|
||||
}
|
||||
// First, check the file on the database
|
||||
const entry = await this.db.fetchEntry(file, undefined, true, true);
|
||||
|
||||
if (!entry || entry.deleted || entry._deleted) {
|
||||
// If the file is not exist on the database, then it should be created.
|
||||
const readFile = await this.readFileFromStub(file);
|
||||
if (!onlyChunks) {
|
||||
return await this.db.store(readFile);
|
||||
} else {
|
||||
return await this.db.createChunks(readFile, false, true);
|
||||
}
|
||||
}
|
||||
|
||||
// entry is exist on the database, check the difference between the file and the entry.
|
||||
|
||||
let shouldApplied = false;
|
||||
if (!force && !onlyChunks) {
|
||||
// 1. if the time stamp is far different, then it should be updated.
|
||||
// Note: This checks only the mtime with the resolution reduced to 2 seconds.
|
||||
// 2 seconds it for the ZIP file's mtime. If not, we cannot backup the vault as the ZIP file.
|
||||
// This is hardcoded on `compareMtime` of `src/common/utils.ts`.
|
||||
if (compareFileFreshness(file, entry) !== EVEN) {
|
||||
shouldApplied = true;
|
||||
}
|
||||
// 2. if not, the content should be checked.
|
||||
let readFile: UXFileInfo | undefined = undefined;
|
||||
if (!shouldApplied) {
|
||||
readFile = await this.readFileFromStub(file);
|
||||
if (!readFile) {
|
||||
this._log(`File ${file.path} is not exist on the storage`, LOG_LEVEL_NOTICE);
|
||||
return false;
|
||||
}
|
||||
if (await isDocContentSame(getDocDataAsArray(entry.data), readFile.body)) {
|
||||
// Timestamp is different but the content is same. therefore, two timestamps should be handled as same.
|
||||
// So, mark the changes are same.
|
||||
markChangesAreSame(readFile, readFile.stat.mtime, entry.mtime);
|
||||
} else {
|
||||
shouldApplied = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (!shouldApplied) {
|
||||
this._log(`File ${file.path} is not changed`, LOG_LEVEL_VERBOSE);
|
||||
return true;
|
||||
}
|
||||
if (!readFile) readFile = await this.readFileFromStub(file);
|
||||
// If the file is changed, then the file should be stored.
|
||||
if (onlyChunks) {
|
||||
return await this.db.createChunks(readFile, false, true);
|
||||
} else {
|
||||
return await this.db.store(readFile, false, true);
|
||||
}
|
||||
} else {
|
||||
// If force is true, then it should be updated.
|
||||
const readFile = await this.readFileFromStub(file);
|
||||
if (onlyChunks) {
|
||||
return await this.db.createChunks(readFile, true, true);
|
||||
} else {
|
||||
return await this.db.store(readFile, true, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async deleteFileFromDB(info: UXFileInfoStub | UXInternalFileInfoStub | FilePath): Promise<boolean> {
|
||||
const file = typeof info === "string" ? this.storage.getFileStub(info) : info;
|
||||
if (file == null) {
|
||||
this._log(`File ${info} is not exist on the storage`, LOG_LEVEL_VERBOSE);
|
||||
return false;
|
||||
}
|
||||
// const file = item.args.file;
|
||||
if (file.isInternal) {
|
||||
this._log(
|
||||
`Internal file ${file.path} is not allowed to be processed on processFileEvent`,
|
||||
LOG_LEVEL_VERBOSE
|
||||
);
|
||||
return false;
|
||||
}
|
||||
// First, check the file on the database
|
||||
const entry = await this.db.fetchEntry(file, undefined, true, true);
|
||||
if (!entry || entry.deleted || entry._deleted) {
|
||||
this._log(`File ${file.path} is not exist or already deleted on the database`, LOG_LEVEL_VERBOSE);
|
||||
return false;
|
||||
}
|
||||
// Check the file is already conflicted. if so, only the conflicted one should be deleted.
|
||||
const conflictedRevs = await this.db.getConflictedRevs(file);
|
||||
if (conflictedRevs.length > 0) {
|
||||
// If conflicted, then it should be deleted. entry._rev should be own file's rev.
|
||||
// TODO: I BELIEVED SO. BUT I NOTICED THAT I AN NOT SURE. I SHOULD CHECK THIS.
|
||||
// ANYWAY, I SHOULD DELETE THE FILE. ACTUALLY WE SIMPLY DELETED THE FILE UNTIL PREVIOUS VERSIONS.
|
||||
return await this.db.delete(file, entry._rev);
|
||||
}
|
||||
// Otherwise, the file should be deleted simply. This is the previous behaviour.
|
||||
return await this.db.delete(file);
|
||||
}
|
||||
|
||||
async deleteRevisionFromDB(
|
||||
info: UXFileInfoStub | FilePath | FilePathWithPrefix,
|
||||
rev: string
|
||||
): Promise<boolean | undefined> {
|
||||
//TODO: Possibly check the conflicting.
|
||||
return await this.db.delete(info, rev);
|
||||
}
|
||||
|
||||
async resolveConflictedByDeletingRevision(
|
||||
info: UXFileInfoStub | FilePath,
|
||||
rev: string
|
||||
): Promise<boolean | undefined> {
|
||||
const path = getStoragePathFromUXFileInfo(info);
|
||||
if (!(await this.deleteRevisionFromDB(info, rev))) {
|
||||
this._log(`Failed to delete the conflicted revision ${rev} of ${path}`, LOG_LEVEL_VERBOSE);
|
||||
return false;
|
||||
}
|
||||
if (!(await this.dbToStorageWithSpecificRev(info, rev, true))) {
|
||||
this._log(`Failed to apply the resolved revision ${rev} of ${path} to the storage`, LOG_LEVEL_VERBOSE);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async dbToStorageWithSpecificRev(
|
||||
info: UXFileInfoStub | UXFileInfo | FilePath | null,
|
||||
rev: string,
|
||||
force?: boolean
|
||||
): Promise<boolean> {
|
||||
const file = typeof info === "string" ? this.storage.getFileStub(info) : info;
|
||||
if (file == null) {
|
||||
this._log(`File ${info} is not exist on the storage`, LOG_LEVEL_VERBOSE);
|
||||
return false;
|
||||
}
|
||||
const docEntry = await this.db.fetchEntryMeta(file, rev, true);
|
||||
if (!docEntry) {
|
||||
this._log(`File ${file.path} is not exist on the database`, LOG_LEVEL_VERBOSE);
|
||||
return false;
|
||||
}
|
||||
return await this.dbToStorage(docEntry, file, force);
|
||||
}
|
||||
|
||||
async dbToStorage(
|
||||
entryInfo: MetaEntry | FilePathWithPrefix,
|
||||
info: UXFileInfoStub | UXFileInfo | FilePath | null,
|
||||
force?: boolean
|
||||
): Promise<boolean> {
|
||||
const file = typeof info === "string" ? this.storage.getFileStub(info) : info;
|
||||
const mode = file == null ? "create" : "modify";
|
||||
const pathFromEntryInfo = typeof entryInfo === "string" ? entryInfo : this.getPath(entryInfo);
|
||||
const docEntry = await this.db.fetchEntryMeta(pathFromEntryInfo, undefined, true);
|
||||
if (!docEntry) {
|
||||
this._log(`File ${pathFromEntryInfo} is not exist on the database`, LOG_LEVEL_VERBOSE);
|
||||
return false;
|
||||
}
|
||||
const path = this.getPath(docEntry);
|
||||
|
||||
// 1. Check if it already conflicted.
|
||||
const revs = await this.db.getConflictedRevs(path);
|
||||
if (revs.length > 0) {
|
||||
// Some conflicts are exist.
|
||||
if (this.settings.writeDocumentsIfConflicted) {
|
||||
// If configured to write the document even if conflicted, then it should be written.
|
||||
// NO OP
|
||||
} else {
|
||||
// If not, then it should be checked. and will be processed later (i.e., after the conflict is resolved).
|
||||
await this.services.conflict.queueCheckForIfOpen(path);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// 2. Check if the file is already exist on the storage.
|
||||
const existDoc = this.storage.getStub(path);
|
||||
if (existDoc && existDoc.isFolder) {
|
||||
this._log(`Folder ${path} is already exist on the storage as a folder`, LOG_LEVEL_VERBOSE);
|
||||
// We can do nothing, and other modules should also nothing to do.
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check existence of both file and docEntry.
|
||||
const existOnDB = !(docEntry._deleted || docEntry.deleted || false);
|
||||
const existOnStorage = existDoc != null;
|
||||
if (!existOnDB && !existOnStorage) {
|
||||
this._log(`File ${path} seems to be deleted, but already not on storage`, LOG_LEVEL_VERBOSE);
|
||||
return true;
|
||||
}
|
||||
if (!existOnDB && existOnStorage) {
|
||||
// Deletion has been Transferred. Storage files will be deleted.
|
||||
// Note: If the folder becomes empty, the folder will be deleted if not configured to keep it.
|
||||
// This behaviour is implemented on the `ModuleFileAccessObsidian`.
|
||||
// And it does not care actually deleted.
|
||||
await this.storage.deleteVaultItem(path);
|
||||
return true;
|
||||
}
|
||||
// Okay, the file is exist on the database. Let's check the file is exist on the storage.
|
||||
const docRead = await this.db.fetchEntryFromMeta(docEntry);
|
||||
if (!docRead) {
|
||||
this._log(`File ${path} is not exist on the database`, LOG_LEVEL_VERBOSE);
|
||||
return false;
|
||||
}
|
||||
|
||||
// If we want to process size mismatched files -- in case of having files created by some integrations, enable the toggle.
|
||||
if (!this.settings.processSizeMismatchedFiles) {
|
||||
// Check the file is not corrupted
|
||||
// (Zero is a special case, may be created by some APIs and it might be acceptable).
|
||||
if (docRead.size != 0 && docRead.size !== readAsBlob(docRead).size) {
|
||||
this._log(
|
||||
`File ${path} seems to be corrupted! Writing prevented. (${docRead.size} != ${readAsBlob(docRead).size})`,
|
||||
LOG_LEVEL_NOTICE
|
||||
);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
const docData = readContent(docRead);
|
||||
|
||||
if (existOnStorage && !force) {
|
||||
// The file is exist on the storage. Let's check the difference between the file and the entry.
|
||||
// But, if force is true, then it should be updated.
|
||||
// Ok, we have to compare.
|
||||
let shouldApplied = false;
|
||||
// 1. if the time stamp is far different, then it should be updated.
|
||||
// Note: This checks only the mtime with the resolution reduced to 2 seconds.
|
||||
// 2 seconds it for the ZIP file's mtime. If not, we cannot backup the vault as the ZIP file.
|
||||
// This is hardcoded on `compareMtime` of `src/common/utils.ts`.
|
||||
if (compareFileFreshness(existDoc, docEntry) !== EVEN) {
|
||||
shouldApplied = true;
|
||||
}
|
||||
// 2. if not, the content should be checked.
|
||||
|
||||
if (!shouldApplied) {
|
||||
const readFile = await this.readFileFromStub(existDoc);
|
||||
if (await isDocContentSame(docData, readFile.body)) {
|
||||
// The content is same. So, we do not need to update the file.
|
||||
shouldApplied = false;
|
||||
// Timestamp is different but the content is same. therefore, two timestamps should be handled as same.
|
||||
// So, mark the changes are same.
|
||||
markChangesAreSame(docRead, docRead.mtime, existDoc.stat.mtime);
|
||||
} else {
|
||||
shouldApplied = true;
|
||||
}
|
||||
}
|
||||
if (!shouldApplied) {
|
||||
this._log(`File ${docRead.path} is not changed`, LOG_LEVEL_VERBOSE);
|
||||
return true;
|
||||
}
|
||||
// Let's apply the changes.
|
||||
} else {
|
||||
this._log(
|
||||
`File ${docRead.path} ${existOnStorage ? "(new) " : ""} ${force ? " (forced)" : ""}`,
|
||||
LOG_LEVEL_VERBOSE
|
||||
);
|
||||
}
|
||||
await this.storage.ensureDir(path);
|
||||
const ret = await this.storage.writeFileAuto(path, docData, { ctime: docRead.ctime, mtime: docRead.mtime });
|
||||
await this.storage.touched(path);
|
||||
this.storage.triggerFileEvent(mode, path);
|
||||
return ret;
|
||||
}
|
||||
|
||||
private async _anyHandlerProcessesFileEvent(item: FileEventItem): Promise<boolean> {
|
||||
const eventItem = item.args;
|
||||
const type = item.type;
|
||||
const path = eventItem.file.path;
|
||||
if (!(await this.services.vault.isTargetFile(path))) {
|
||||
this._log(`File ${path} is not the target file`, LOG_LEVEL_VERBOSE);
|
||||
return false;
|
||||
}
|
||||
if (shouldBeIgnored(path)) {
|
||||
this._log(`File ${path} should be ignored`, LOG_LEVEL_VERBOSE);
|
||||
return false;
|
||||
}
|
||||
const lockKey = `processFileEvent-${path}`;
|
||||
return await serialized(lockKey, async () => {
|
||||
switch (type) {
|
||||
case "CREATE":
|
||||
case "CHANGED":
|
||||
return await this.storeFileToDB(item.args.file);
|
||||
case "DELETE":
|
||||
return await this.deleteFileFromDB(item.args.file);
|
||||
case "INTERNAL":
|
||||
// this should be handled on the other module.
|
||||
return false;
|
||||
default:
|
||||
this._log(`Unsupported event type: ${type}`, LOG_LEVEL_VERBOSE);
|
||||
return false;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async _anyProcessReplicatedDoc(entry: MetaEntry): Promise<boolean> {
|
||||
return await serialized(entry.path, async () => {
|
||||
if (!(await this.services.vault.isTargetFile(entry.path))) {
|
||||
this._log(`File ${entry.path} is not the target file`, LOG_LEVEL_VERBOSE);
|
||||
return false;
|
||||
}
|
||||
if (this.services.vault.isFileSizeTooLarge(entry.size)) {
|
||||
this._log(`File ${entry.path} is too large (on database) to be processed`, LOG_LEVEL_VERBOSE);
|
||||
return false;
|
||||
}
|
||||
if (shouldBeIgnored(entry.path)) {
|
||||
this._log(`File ${entry.path} should be ignored`, LOG_LEVEL_VERBOSE);
|
||||
return false;
|
||||
}
|
||||
const path = this.getPath(entry);
|
||||
|
||||
const targetFile = this.storage.getStub(this.getPathWithoutPrefix(entry));
|
||||
if (targetFile && targetFile.isFolder) {
|
||||
this._log(`${path} is already exist as the folder`);
|
||||
// Nothing to do and other modules should also nothing to do.
|
||||
return true;
|
||||
} else {
|
||||
if (targetFile && this.services.vault.isFileSizeTooLarge(targetFile.stat.size)) {
|
||||
this._log(`File ${targetFile.path} is too large (on storage) to be processed`, LOG_LEVEL_VERBOSE);
|
||||
return false;
|
||||
}
|
||||
this._log(
|
||||
`Processing ${path} (${entry._id.substring(0, 8)} :${entry._rev?.substring(0, 5)}) : Started...`,
|
||||
LOG_LEVEL_VERBOSE
|
||||
);
|
||||
// Before writing (or skipped ), merging dialogue should be cancelled.
|
||||
eventHub.emitEvent("conflict-cancelled", path);
|
||||
const ret = await this.dbToStorage(entry, targetFile);
|
||||
this._log(`Processing ${path} (${entry._id.substring(0, 8)} :${entry._rev?.substring(0, 5)}) : Done`);
|
||||
return ret;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async createAllChunks(showingNotice?: boolean): Promise<void> {
|
||||
this._log("Collecting local files on the storage", LOG_LEVEL_VERBOSE);
|
||||
const semaphore = Semaphore(10);
|
||||
|
||||
let processed = 0;
|
||||
const filesStorageSrc = this.storage.getFiles();
|
||||
const incProcessed = () => {
|
||||
processed++;
|
||||
if (processed % 25 == 0)
|
||||
this._log(
|
||||
`Creating missing chunks: ${processed} of ${total} files`,
|
||||
showingNotice ? LOG_LEVEL_NOTICE : LOG_LEVEL_INFO,
|
||||
"chunkCreation"
|
||||
);
|
||||
};
|
||||
const total = filesStorageSrc.length;
|
||||
const procAllChunks = filesStorageSrc.map(async (file) => {
|
||||
if (!(await this.services.vault.isTargetFile(file))) {
|
||||
incProcessed();
|
||||
return true;
|
||||
}
|
||||
if (this.services.vault.isFileSizeTooLarge(file.stat.size)) {
|
||||
incProcessed();
|
||||
return true;
|
||||
}
|
||||
if (shouldBeIgnored(file.path)) {
|
||||
incProcessed();
|
||||
return true;
|
||||
}
|
||||
const release = await semaphore.acquire();
|
||||
incProcessed();
|
||||
try {
|
||||
await this.storeFileToDB(file, false, true);
|
||||
} catch (ex) {
|
||||
this._log(ex, LOG_LEVEL_VERBOSE);
|
||||
} finally {
|
||||
release();
|
||||
}
|
||||
});
|
||||
await Promise.all(procAllChunks);
|
||||
this._log(
|
||||
`Creating chunks Done: ${processed} of ${total} files`,
|
||||
showingNotice ? LOG_LEVEL_NOTICE : LOG_LEVEL_INFO,
|
||||
"chunkCreation"
|
||||
);
|
||||
}
|
||||
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
|
||||
services.appLifecycle.onInitialise.addHandler(this._everyOnloadStart.bind(this));
|
||||
services.fileProcessing.processFileEvent.addHandler(this._anyHandlerProcessesFileEvent.bind(this));
|
||||
services.replication.processSynchroniseResult.addHandler(this._anyProcessReplicatedDoc.bind(this));
|
||||
}
|
||||
}
|
||||
@@ -1,313 +0,0 @@
|
||||
import { delay } from "octagonal-wheels/promises";
|
||||
import {
|
||||
DEFAULT_SETTINGS,
|
||||
FLAGMD_REDFLAG2_HR,
|
||||
FLAGMD_REDFLAG3_HR,
|
||||
LOG_LEVEL_NOTICE,
|
||||
LOG_LEVEL_VERBOSE,
|
||||
REMOTE_COUCHDB,
|
||||
REMOTE_MINIO,
|
||||
} from "../../lib/src/common/types.ts";
|
||||
import { AbstractModule } from "../AbstractModule.ts";
|
||||
import type { Rebuilder } from "../interfaces/DatabaseRebuilder.ts";
|
||||
import type { LiveSyncCouchDBReplicator } from "../../lib/src/replication/couchdb/LiveSyncReplicator.ts";
|
||||
import { fetchAllUsedChunks } from "@/lib/src/pouchdb/chunks.ts";
|
||||
import { EVENT_DATABASE_REBUILT, eventHub } from "src/common/events.ts";
|
||||
import type { LiveSyncCore } from "../../main.ts";
|
||||
|
||||
export class ModuleRebuilder extends AbstractModule implements Rebuilder {
|
||||
private _everyOnload(): Promise<boolean> {
|
||||
this.core.rebuilder = this;
|
||||
return Promise.resolve(true);
|
||||
}
|
||||
async $performRebuildDB(
|
||||
method: "localOnly" | "remoteOnly" | "rebuildBothByThisDevice" | "localOnlyWithChunks"
|
||||
): Promise<void> {
|
||||
if (method == "localOnly") {
|
||||
await this.$fetchLocal();
|
||||
}
|
||||
if (method == "localOnlyWithChunks") {
|
||||
await this.$fetchLocal(true);
|
||||
}
|
||||
if (method == "remoteOnly") {
|
||||
await this.$rebuildRemote();
|
||||
}
|
||||
if (method == "rebuildBothByThisDevice") {
|
||||
await this.$rebuildEverything();
|
||||
}
|
||||
}
|
||||
|
||||
async informOptionalFeatures() {
|
||||
await this.core.services.UI.showMarkdownDialog(
|
||||
"All optional features are disabled",
|
||||
`Customisation Sync and Hidden File Sync will all be disabled.
|
||||
Please enable them from the settings screen after setup is complete.`,
|
||||
["OK"]
|
||||
);
|
||||
}
|
||||
async askUsingOptionalFeature(opt: { enableFetch?: boolean; enableOverwrite?: boolean }) {
|
||||
if (
|
||||
(await this.core.confirm.askYesNoDialog(
|
||||
"Do you want to enable extra features? If you are new to Self-hosted LiveSync, try the core feature first!",
|
||||
{ title: "Enable extra features", defaultOption: "No", timeout: 15 }
|
||||
)) == "yes"
|
||||
) {
|
||||
await this.services.setting.suggestOptionalFeatures(opt);
|
||||
}
|
||||
}
|
||||
|
||||
async rebuildRemote() {
|
||||
await this.services.setting.suspendExtraSync();
|
||||
this.core.settings.isConfigured = true;
|
||||
this.core.settings.notifyThresholdOfRemoteStorageSize = DEFAULT_SETTINGS.notifyThresholdOfRemoteStorageSize;
|
||||
await this.services.setting.realiseSetting();
|
||||
await this.services.remote.markLocked();
|
||||
await this.services.remote.tryResetDatabase();
|
||||
await this.services.remote.markLocked();
|
||||
await delay(500);
|
||||
// await this.askUsingOptionalFeature({ enableOverwrite: true });
|
||||
await delay(1000);
|
||||
await this.services.remote.replicateAllToRemote(true);
|
||||
await delay(1000);
|
||||
await this.services.remote.replicateAllToRemote(true, true);
|
||||
await this.informOptionalFeatures();
|
||||
}
|
||||
$rebuildRemote(): Promise<void> {
|
||||
return this.rebuildRemote();
|
||||
}
|
||||
|
||||
async rebuildEverything() {
|
||||
await this.services.setting.suspendExtraSync();
|
||||
// await this.askUseNewAdapter();
|
||||
this.core.settings.isConfigured = true;
|
||||
this.core.settings.notifyThresholdOfRemoteStorageSize = DEFAULT_SETTINGS.notifyThresholdOfRemoteStorageSize;
|
||||
await this.services.setting.realiseSetting();
|
||||
await this.resetLocalDatabase();
|
||||
await delay(1000);
|
||||
await this.services.databaseEvents.initialiseDatabase(true, true, true);
|
||||
await this.services.remote.markLocked();
|
||||
await this.services.remote.tryResetDatabase();
|
||||
await this.services.remote.markLocked();
|
||||
await delay(500);
|
||||
// We do not have any other devices' data, so we do not need to ask for overwriting.
|
||||
// await this.askUsingOptionalFeature({ enableOverwrite: false });
|
||||
await delay(1000);
|
||||
await this.services.remote.replicateAllToRemote(true);
|
||||
await delay(1000);
|
||||
await this.services.remote.replicateAllToRemote(true, true);
|
||||
await this.informOptionalFeatures();
|
||||
}
|
||||
|
||||
$rebuildEverything(): Promise<void> {
|
||||
return this.rebuildEverything();
|
||||
}
|
||||
|
||||
$fetchLocal(makeLocalChunkBeforeSync?: boolean, preventMakeLocalFilesBeforeSync?: boolean): Promise<void> {
|
||||
return this.fetchLocal(makeLocalChunkBeforeSync, preventMakeLocalFilesBeforeSync);
|
||||
}
|
||||
|
||||
async scheduleRebuild(): Promise<void> {
|
||||
try {
|
||||
await this.core.storageAccess.writeFileAuto(FLAGMD_REDFLAG2_HR, "");
|
||||
} catch (ex) {
|
||||
this._log("Could not create red_flag_rebuild.md", LOG_LEVEL_NOTICE);
|
||||
this._log(ex, LOG_LEVEL_VERBOSE);
|
||||
}
|
||||
this.services.appLifecycle.performRestart();
|
||||
}
|
||||
async scheduleFetch(): Promise<void> {
|
||||
try {
|
||||
await this.core.storageAccess.writeFileAuto(FLAGMD_REDFLAG3_HR, "");
|
||||
} catch (ex) {
|
||||
this._log("Could not create red_flag_fetch.md", LOG_LEVEL_NOTICE);
|
||||
this._log(ex, LOG_LEVEL_VERBOSE);
|
||||
}
|
||||
this.services.appLifecycle.performRestart();
|
||||
}
|
||||
|
||||
private async _tryResetRemoteDatabase(): Promise<void> {
|
||||
await this.core.replicator.tryResetRemoteDatabase(this.settings);
|
||||
}
|
||||
|
||||
private async _tryCreateRemoteDatabase(): Promise<void> {
|
||||
await this.core.replicator.tryCreateRemoteDatabase(this.settings);
|
||||
}
|
||||
|
||||
private _onResetLocalDatabase(): Promise<boolean> {
|
||||
this.core.storageAccess.clearTouched();
|
||||
return Promise.resolve(true);
|
||||
}
|
||||
|
||||
async suspendAllSync() {
|
||||
this.core.settings.liveSync = false;
|
||||
this.core.settings.periodicReplication = false;
|
||||
this.core.settings.syncOnSave = false;
|
||||
this.core.settings.syncOnEditorSave = false;
|
||||
this.core.settings.syncOnStart = false;
|
||||
this.core.settings.syncOnFileOpen = false;
|
||||
this.core.settings.syncAfterMerge = false;
|
||||
await this.services.setting.suspendExtraSync();
|
||||
}
|
||||
async suspendReflectingDatabase() {
|
||||
if (this.core.settings.doNotSuspendOnFetching) return;
|
||||
if (this.core.settings.remoteType == REMOTE_MINIO) return;
|
||||
this._log(
|
||||
`Suspending reflection: Database and storage changes will not be reflected in each other until completely finished the fetching.`,
|
||||
LOG_LEVEL_NOTICE
|
||||
);
|
||||
this.core.settings.suspendParseReplicationResult = true;
|
||||
this.core.settings.suspendFileWatching = true;
|
||||
await this.core.saveSettings();
|
||||
}
|
||||
async resumeReflectingDatabase() {
|
||||
if (this.core.settings.doNotSuspendOnFetching) return;
|
||||
if (this.core.settings.remoteType == REMOTE_MINIO) return;
|
||||
this._log(`Database and storage reflection has been resumed!`, LOG_LEVEL_NOTICE);
|
||||
this.core.settings.suspendParseReplicationResult = false;
|
||||
this.core.settings.suspendFileWatching = false;
|
||||
await this.services.vault.scanVault(true);
|
||||
await this.services.replication.onBeforeReplicate(false); //TODO: Check actual need of this.
|
||||
await this.core.saveSettings();
|
||||
}
|
||||
// No longer needed, both adapters have each advantages and disadvantages.
|
||||
// async askUseNewAdapter() {
|
||||
// if (!this.core.settings.useIndexedDBAdapter) {
|
||||
// const message = `Now this core has been configured to use the old database adapter for keeping compatibility. Do you want to deactivate it?`;
|
||||
// const CHOICE_YES = "Yes, disable and use latest";
|
||||
// const CHOICE_NO = "No, keep compatibility";
|
||||
// const choices = [CHOICE_YES, CHOICE_NO];
|
||||
//
|
||||
// const ret = await this.core.confirm.confirmWithMessage(
|
||||
// "Database adapter",
|
||||
// message,
|
||||
// choices,
|
||||
// CHOICE_YES,
|
||||
// 10
|
||||
// );
|
||||
// if (ret == CHOICE_YES) {
|
||||
// this.core.settings.useIndexedDBAdapter = true;
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
async fetchLocal(makeLocalChunkBeforeSync?: boolean, preventMakeLocalFilesBeforeSync?: boolean) {
|
||||
await this.services.setting.suspendExtraSync();
|
||||
// await this.askUseNewAdapter();
|
||||
this.core.settings.isConfigured = true;
|
||||
this.core.settings.notifyThresholdOfRemoteStorageSize = DEFAULT_SETTINGS.notifyThresholdOfRemoteStorageSize;
|
||||
if (this.core.settings.maxMTimeForReflectEvents > 0) {
|
||||
const date = new Date(this.core.settings.maxMTimeForReflectEvents);
|
||||
|
||||
const ask = `Your settings restrict file reflection times to no later than ${date}.
|
||||
|
||||
**This is a recovery configuration.**
|
||||
|
||||
This operation should only be performed on an empty vault.
|
||||
Are you sure you wish to proceed?`;
|
||||
const PROCEED = "I understand, proceed";
|
||||
const CANCEL = "Cancel operation";
|
||||
const CLEARANDPROCEED = "Clear restriction and proceed";
|
||||
const choices = [PROCEED, CLEARANDPROCEED, CANCEL] as const;
|
||||
const ret = await this.core.confirm.askSelectStringDialogue(ask, choices, {
|
||||
title: "Confirm restricted fetch",
|
||||
defaultAction: CANCEL,
|
||||
timeout: 0,
|
||||
});
|
||||
if (ret == CLEARANDPROCEED) {
|
||||
this.core.settings.maxMTimeForReflectEvents = 0;
|
||||
await this.core.saveSettings();
|
||||
}
|
||||
if (ret == CANCEL) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
await this.suspendReflectingDatabase();
|
||||
await this.services.setting.realiseSetting();
|
||||
await this.resetLocalDatabase();
|
||||
await delay(1000);
|
||||
await this.services.database.openDatabase({
|
||||
databaseEvents: this.services.databaseEvents,
|
||||
replicator: this.services.replicator,
|
||||
});
|
||||
// this.core.isReady = true;
|
||||
this.services.appLifecycle.markIsReady();
|
||||
if (makeLocalChunkBeforeSync) {
|
||||
await this.core.fileHandler.createAllChunks(true);
|
||||
} else if (!preventMakeLocalFilesBeforeSync) {
|
||||
await this.services.databaseEvents.initialiseDatabase(true, true, true);
|
||||
} else {
|
||||
// Do not create local file entries before sync (Means use remote information)
|
||||
}
|
||||
await this.services.remote.markResolved();
|
||||
await delay(500);
|
||||
await this.services.remote.replicateAllFromRemote(true);
|
||||
await delay(1000);
|
||||
await this.services.remote.replicateAllFromRemote(true);
|
||||
await this.resumeReflectingDatabase();
|
||||
await this.informOptionalFeatures();
|
||||
// No longer enable
|
||||
// await this.askUsingOptionalFeature({ enableFetch: true });
|
||||
}
|
||||
async fetchLocalWithRebuild() {
|
||||
return await this.fetchLocal(true);
|
||||
}
|
||||
|
||||
private async _allSuspendAllSync(): Promise<boolean> {
|
||||
await this.suspendAllSync();
|
||||
return true;
|
||||
}
|
||||
|
||||
async resetLocalDatabase() {
|
||||
if (this.core.settings.isConfigured && this.core.settings.additionalSuffixOfDatabaseName == "") {
|
||||
// Discard the non-suffixed database
|
||||
await this.services.database.resetDatabase();
|
||||
}
|
||||
const suffix = this.services.API.getAppID() || "";
|
||||
this.core.settings.additionalSuffixOfDatabaseName = suffix;
|
||||
await this.services.database.resetDatabase();
|
||||
eventHub.emitEvent(EVENT_DATABASE_REBUILT);
|
||||
}
|
||||
async fetchRemoteChunks() {
|
||||
if (
|
||||
!this.core.settings.doNotSuspendOnFetching &&
|
||||
!this.core.settings.useOnlyLocalChunk &&
|
||||
this.core.settings.remoteType == REMOTE_COUCHDB
|
||||
) {
|
||||
this._log(`Fetching chunks`, LOG_LEVEL_NOTICE);
|
||||
const replicator = this.services.replicator.getActiveReplicator() as LiveSyncCouchDBReplicator;
|
||||
const remoteDB = await replicator.connectRemoteCouchDBWithSetting(
|
||||
this.settings,
|
||||
this.services.API.isMobile(),
|
||||
true
|
||||
);
|
||||
if (typeof remoteDB == "string") {
|
||||
this._log(remoteDB, LOG_LEVEL_NOTICE);
|
||||
} else {
|
||||
await fetchAllUsedChunks(this.localDatabase.localDatabase, remoteDB.db);
|
||||
}
|
||||
this._log(`Fetching chunks done`, LOG_LEVEL_NOTICE);
|
||||
}
|
||||
}
|
||||
async resolveAllConflictedFilesByNewerOnes() {
|
||||
this._log(`Resolving conflicts by newer ones`, LOG_LEVEL_NOTICE);
|
||||
const files = this.core.storageAccess.getFileNames();
|
||||
|
||||
let i = 0;
|
||||
for (const file of files) {
|
||||
if (i++ % 10)
|
||||
this._log(
|
||||
`Check and Processing ${i} / ${files.length}`,
|
||||
LOG_LEVEL_NOTICE,
|
||||
"resolveAllConflictedFilesByNewerOnes"
|
||||
);
|
||||
await this.services.conflict.resolveByNewest(file);
|
||||
}
|
||||
this._log(`Done!`, LOG_LEVEL_NOTICE, "resolveAllConflictedFilesByNewerOnes");
|
||||
}
|
||||
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
|
||||
services.appLifecycle.onLoaded.addHandler(this._everyOnload.bind(this));
|
||||
services.database.onDatabaseReset.addHandler(this._onResetLocalDatabase.bind(this));
|
||||
services.remote.tryResetDatabase.setHandler(this._tryResetRemoteDatabase.bind(this));
|
||||
services.remote.tryCreateDatabase.setHandler(this._tryCreateRemoteDatabase.bind(this));
|
||||
services.setting.suspendAllSync.addHandler(this._allSuspendAllSync.bind(this));
|
||||
}
|
||||
}
|
||||
@@ -211,10 +211,30 @@ export class ModuleConflictResolver extends AbstractModule {
|
||||
}
|
||||
return true;
|
||||
}
|
||||
private async _resolveAllConflictedFilesByNewerOnes() {
|
||||
this._log(`Resolving conflicts by newer ones`, LOG_LEVEL_NOTICE);
|
||||
|
||||
const files = this.core.storageAccess.getFileNames();
|
||||
|
||||
let i = 0;
|
||||
for (const file of files) {
|
||||
if (i++ % 10)
|
||||
this._log(
|
||||
`Check and Processing ${i} / ${files.length}`,
|
||||
LOG_LEVEL_NOTICE,
|
||||
"resolveAllConflictedFilesByNewerOnes"
|
||||
);
|
||||
await this.services.conflict.resolveByNewest(file);
|
||||
}
|
||||
this._log(`Done!`, LOG_LEVEL_NOTICE, "resolveAllConflictedFilesByNewerOnes");
|
||||
}
|
||||
|
||||
onBindFunction(core: LiveSyncCore, services: InjectableServiceHub): void {
|
||||
services.conflict.resolveByDeletingRevision.setHandler(this._resolveConflictByDeletingRev.bind(this));
|
||||
services.conflict.resolve.setHandler(this._resolveConflict.bind(this));
|
||||
services.conflict.resolveByNewest.setHandler(this._anyResolveConflictByNewest.bind(this));
|
||||
services.conflict.resolveAllConflictedFilesByNewerOnes.setHandler(
|
||||
this._resolveAllConflictedFilesByNewerOnes.bind(this)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ import type { FilePath, UXFileInfoStub } from "../../../lib/src/common/types.ts"
|
||||
import { createBinaryBlob, isDocContentSame } from "../../../lib/src/common/utils.ts";
|
||||
import type { InternalFileInfo } from "../../../common/types.ts";
|
||||
import { markChangesAreSame } from "../../../common/utils.ts";
|
||||
import type { StorageAccess } from "../../interfaces/StorageAccess.ts";
|
||||
import type { IStorageAccessManager } from "@lib/interfaces/StorageAccess.ts";
|
||||
import type { LiveSyncCore } from "@/main.ts";
|
||||
function toArrayBuffer(arr: Uint8Array<ArrayBuffer> | ArrayBuffer | DataView<ArrayBuffer>): ArrayBuffer {
|
||||
if (arr instanceof Uint8Array) {
|
||||
@@ -16,44 +16,64 @@ function toArrayBuffer(arr: Uint8Array<ArrayBuffer> | ArrayBuffer | DataView<Arr
|
||||
}
|
||||
return arr;
|
||||
}
|
||||
// TODO: add abstraction for the file access (as wrapping TFile or something similar)
|
||||
export abstract class FileAccessBase<TNativeFile> {
|
||||
storageAccessManager: IStorageAccessManager;
|
||||
constructor(storageAccessManager: IStorageAccessManager) {
|
||||
this.storageAccessManager = storageAccessManager;
|
||||
}
|
||||
abstract getPath(file: TNativeFile | string): FilePath;
|
||||
}
|
||||
|
||||
export class SerializedFileAccess {
|
||||
export class ObsidianFileAccess extends FileAccessBase<TFile> {
|
||||
app: App;
|
||||
plugin: LiveSyncCore;
|
||||
storageAccess: StorageAccess;
|
||||
constructor(app: App, plugin: LiveSyncCore, storageAccess: StorageAccess) {
|
||||
|
||||
getPath(file: string | TFile): FilePath {
|
||||
return (typeof file === "string" ? file : file.path) as FilePath;
|
||||
}
|
||||
|
||||
constructor(app: App, plugin: LiveSyncCore, storageAccessManager: IStorageAccessManager) {
|
||||
super(storageAccessManager);
|
||||
this.app = app;
|
||||
this.plugin = plugin;
|
||||
this.storageAccess = storageAccess;
|
||||
}
|
||||
|
||||
async tryAdapterStat(file: TFile | string) {
|
||||
const path = file instanceof TFile ? file.path : file;
|
||||
return await this.storageAccess.processReadFile(path as FilePath, async () => {
|
||||
return await this.storageAccessManager.processReadFile(path as FilePath, async () => {
|
||||
if (!(await this.app.vault.adapter.exists(path))) return null;
|
||||
return this.app.vault.adapter.stat(path);
|
||||
});
|
||||
}
|
||||
async adapterStat(file: TFile | string) {
|
||||
const path = file instanceof TFile ? file.path : file;
|
||||
return await this.storageAccess.processReadFile(path as FilePath, () => this.app.vault.adapter.stat(path));
|
||||
return await this.storageAccessManager.processReadFile(path as FilePath, () =>
|
||||
this.app.vault.adapter.stat(path)
|
||||
);
|
||||
}
|
||||
async adapterExists(file: TFile | string) {
|
||||
const path = file instanceof TFile ? file.path : file;
|
||||
return await this.storageAccess.processReadFile(path as FilePath, () => this.app.vault.adapter.exists(path));
|
||||
return await this.storageAccessManager.processReadFile(path as FilePath, () =>
|
||||
this.app.vault.adapter.exists(path)
|
||||
);
|
||||
}
|
||||
async adapterRemove(file: TFile | string) {
|
||||
const path = file instanceof TFile ? file.path : file;
|
||||
return await this.storageAccess.processReadFile(path as FilePath, () => this.app.vault.adapter.remove(path));
|
||||
return await this.storageAccessManager.processWriteFile(path as FilePath, () =>
|
||||
this.app.vault.adapter.remove(path)
|
||||
);
|
||||
}
|
||||
|
||||
async adapterRead(file: TFile | string) {
|
||||
const path = file instanceof TFile ? file.path : file;
|
||||
return await this.storageAccess.processReadFile(path as FilePath, () => this.app.vault.adapter.read(path));
|
||||
return await this.storageAccessManager.processReadFile(path as FilePath, () =>
|
||||
this.app.vault.adapter.read(path)
|
||||
);
|
||||
}
|
||||
async adapterReadBinary(file: TFile | string) {
|
||||
const path = file instanceof TFile ? file.path : file;
|
||||
return await this.storageAccess.processReadFile(path as FilePath, () =>
|
||||
return await this.storageAccessManager.processReadFile(path as FilePath, () =>
|
||||
this.app.vault.adapter.readBinary(path)
|
||||
);
|
||||
}
|
||||
@@ -61,9 +81,11 @@ export class SerializedFileAccess {
|
||||
async adapterReadAuto(file: TFile | string) {
|
||||
const path = file instanceof TFile ? file.path : file;
|
||||
if (isPlainText(path)) {
|
||||
return await this.storageAccess.processReadFile(path as FilePath, () => this.app.vault.adapter.read(path));
|
||||
return await this.storageAccessManager.processReadFile(path as FilePath, () =>
|
||||
this.app.vault.adapter.read(path)
|
||||
);
|
||||
}
|
||||
return await this.storageAccess.processReadFile(path as FilePath, () =>
|
||||
return await this.storageAccessManager.processReadFile(path as FilePath, () =>
|
||||
this.app.vault.adapter.readBinary(path)
|
||||
);
|
||||
}
|
||||
@@ -75,39 +97,47 @@ export class SerializedFileAccess {
|
||||
) {
|
||||
const path = file instanceof TFile ? file.path : file;
|
||||
if (typeof data === "string") {
|
||||
return await this.storageAccess.processWriteFile(path as FilePath, () =>
|
||||
return await this.storageAccessManager.processWriteFile(path as FilePath, () =>
|
||||
this.app.vault.adapter.write(path, data, options)
|
||||
);
|
||||
} else {
|
||||
return await this.storageAccess.processWriteFile(path as FilePath, () =>
|
||||
return await this.storageAccessManager.processWriteFile(path as FilePath, () =>
|
||||
this.app.vault.adapter.writeBinary(path, toArrayBuffer(data), options)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
adapterList(basePath: string): Promise<{ files: string[]; folders: string[] }> {
|
||||
return Promise.resolve(this.app.vault.adapter.list(basePath));
|
||||
}
|
||||
|
||||
async vaultCacheRead(file: TFile) {
|
||||
return await this.storageAccess.processReadFile(file.path as FilePath, () => this.app.vault.cachedRead(file));
|
||||
return await this.storageAccessManager.processReadFile(file.path as FilePath, () =>
|
||||
this.app.vault.cachedRead(file)
|
||||
);
|
||||
}
|
||||
|
||||
async vaultRead(file: TFile) {
|
||||
return await this.storageAccess.processReadFile(file.path as FilePath, () => this.app.vault.read(file));
|
||||
return await this.storageAccessManager.processReadFile(file.path as FilePath, () => this.app.vault.read(file));
|
||||
}
|
||||
|
||||
async vaultReadBinary(file: TFile) {
|
||||
return await this.storageAccess.processReadFile(file.path as FilePath, () => this.app.vault.readBinary(file));
|
||||
return await this.storageAccessManager.processReadFile(file.path as FilePath, () =>
|
||||
this.app.vault.readBinary(file)
|
||||
);
|
||||
}
|
||||
|
||||
async vaultReadAuto(file: TFile) {
|
||||
const path = file.path;
|
||||
if (isPlainText(path)) {
|
||||
return await this.storageAccess.processReadFile(path as FilePath, () => this.app.vault.read(file));
|
||||
return await this.storageAccessManager.processReadFile(path as FilePath, () => this.app.vault.read(file));
|
||||
}
|
||||
return await this.storageAccess.processReadFile(path as FilePath, () => this.app.vault.readBinary(file));
|
||||
return await this.storageAccessManager.processReadFile(path as FilePath, () => this.app.vault.readBinary(file));
|
||||
}
|
||||
|
||||
async vaultModify(file: TFile, data: string | ArrayBuffer | Uint8Array<ArrayBuffer>, options?: DataWriteOptions) {
|
||||
if (typeof data === "string") {
|
||||
return await this.storageAccess.processWriteFile(file.path as FilePath, async () => {
|
||||
return await this.storageAccessManager.processWriteFile(file.path as FilePath, async () => {
|
||||
const oldData = await this.app.vault.read(file);
|
||||
if (data === oldData) {
|
||||
if (options && options.mtime) markChangesAreSame(file.path, file.stat.mtime, options.mtime);
|
||||
@@ -117,7 +147,7 @@ export class SerializedFileAccess {
|
||||
return true;
|
||||
});
|
||||
} else {
|
||||
return await this.storageAccess.processWriteFile(file.path as FilePath, async () => {
|
||||
return await this.storageAccessManager.processWriteFile(file.path as FilePath, async () => {
|
||||
const oldData = await this.app.vault.readBinary(file);
|
||||
if (await isDocContentSame(createBinaryBlob(oldData), createBinaryBlob(data))) {
|
||||
if (options && options.mtime) markChangesAreSame(file.path, file.stat.mtime, options.mtime);
|
||||
@@ -134,11 +164,11 @@ export class SerializedFileAccess {
|
||||
options?: DataWriteOptions
|
||||
): Promise<TFile> {
|
||||
if (typeof data === "string") {
|
||||
return await this.storageAccess.processWriteFile(path as FilePath, () =>
|
||||
return await this.storageAccessManager.processWriteFile(path as FilePath, () =>
|
||||
this.app.vault.create(path, data, options)
|
||||
);
|
||||
} else {
|
||||
return await this.storageAccess.processWriteFile(path as FilePath, () =>
|
||||
return await this.storageAccessManager.processWriteFile(path as FilePath, () =>
|
||||
this.app.vault.createBinary(path, toArrayBuffer(data), options)
|
||||
);
|
||||
}
|
||||
@@ -147,18 +177,21 @@ export class SerializedFileAccess {
|
||||
trigger(name: string, ...data: any[]) {
|
||||
return this.app.vault.trigger(name, ...data);
|
||||
}
|
||||
async reconcileInternalFile(path: string) {
|
||||
await (this.app.vault.adapter as any)?.reconcileInternalFile(path);
|
||||
}
|
||||
|
||||
async adapterAppend(normalizedPath: string, data: string, options?: DataWriteOptions) {
|
||||
return await this.app.vault.adapter.append(normalizedPath, data, options);
|
||||
}
|
||||
|
||||
async delete(file: TFile | TFolder, force = false) {
|
||||
return await this.storageAccess.processWriteFile(file.path as FilePath, () =>
|
||||
return await this.storageAccessManager.processWriteFile(file.path as FilePath, () =>
|
||||
this.app.vault.delete(file, force)
|
||||
);
|
||||
}
|
||||
async trash(file: TFile | TFolder, force = false) {
|
||||
return await this.storageAccess.processWriteFile(file.path as FilePath, () =>
|
||||
return await this.storageAccessManager.processWriteFile(file.path as FilePath, () =>
|
||||
this.app.vault.trash(file, force)
|
||||
);
|
||||
}
|
||||
|
||||
@@ -10,7 +10,6 @@ import {
|
||||
type FileEventType,
|
||||
type FilePath,
|
||||
type UXFileInfoStub,
|
||||
type UXInternalFileInfoStub,
|
||||
} from "../../../lib/src/common/types.ts";
|
||||
import { delay, fireAndForget, throttle } from "../../../lib/src/common/utils.ts";
|
||||
import { type FileEventItem } from "../../../common/types.ts";
|
||||
@@ -20,19 +19,11 @@ import { Semaphore } from "octagonal-wheels/concurrency/semaphore";
|
||||
import type { LiveSyncCore } from "../../../main.ts";
|
||||
import { InternalFileToUXFileInfoStub, TFileToUXFileInfoStub } from "./utilObsidian.ts";
|
||||
import ObsidianLiveSyncPlugin from "../../../main.ts";
|
||||
import type { StorageAccess } from "../../interfaces/StorageAccess.ts";
|
||||
import type { IStorageAccessManager } from "@lib/interfaces/StorageAccess.ts";
|
||||
import { HiddenFileSync } from "../../../features/HiddenFileSync/CmdHiddenFileSync.ts";
|
||||
import { promiseWithResolvers, type PromiseWithResolvers } from "octagonal-wheels/promises";
|
||||
// import { InternalFileToUXFileInfo } from "../platforms/obsidian.ts";
|
||||
import { StorageEventManager, type FileEvent } from "@lib/interfaces/StorageEventManager.ts";
|
||||
|
||||
export type FileEvent = {
|
||||
type: FileEventType;
|
||||
file: UXFileInfoStub | UXInternalFileInfoStub;
|
||||
oldPath?: string;
|
||||
cachedData?: string;
|
||||
skipBatchWait?: boolean;
|
||||
cancelled?: boolean;
|
||||
};
|
||||
type WaitInfo = {
|
||||
since: number;
|
||||
type: FileEventType;
|
||||
@@ -46,20 +37,10 @@ type FileEventItemSentinelFlush = {
|
||||
};
|
||||
type FileEventItemSentinel = FileEventItemSentinelFlush;
|
||||
|
||||
export abstract class StorageEventManager {
|
||||
abstract beginWatch(): Promise<void>;
|
||||
|
||||
abstract appendQueue(items: FileEvent[], ctx?: any): Promise<void>;
|
||||
|
||||
abstract isWaiting(filename: FilePath): boolean;
|
||||
abstract waitForIdle(): Promise<void>;
|
||||
abstract restoreState(): Promise<void>;
|
||||
}
|
||||
|
||||
export class StorageEventManagerObsidian extends StorageEventManager {
|
||||
plugin: ObsidianLiveSyncPlugin;
|
||||
core: LiveSyncCore;
|
||||
storageAccess: StorageAccess;
|
||||
storageAccess: IStorageAccessManager;
|
||||
get services() {
|
||||
return this.core.services;
|
||||
}
|
||||
@@ -83,9 +64,9 @@ export class StorageEventManagerObsidian extends StorageEventManager {
|
||||
*/
|
||||
snapShotRestored: Promise<void> | null = null;
|
||||
|
||||
constructor(plugin: ObsidianLiveSyncPlugin, core: LiveSyncCore, storageAccess: StorageAccess) {
|
||||
constructor(plugin: ObsidianLiveSyncPlugin, core: LiveSyncCore, storageAccessManager: IStorageAccessManager) {
|
||||
super();
|
||||
this.storageAccess = storageAccess;
|
||||
this.storageAccess = storageAccessManager;
|
||||
this.plugin = plugin;
|
||||
this.core = core;
|
||||
this.cmdHiddenFileSync = this.plugin.getAddOn(HiddenFileSync.name) as HiddenFileSync;
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import { TFile, type TAbstractFile, type TFolder } from "../../../deps.ts";
|
||||
import { ICHeader } from "../../../common/types.ts";
|
||||
import type { SerializedFileAccess } from "./SerializedFileAccess.ts";
|
||||
import type { ObsidianFileAccess } from "./SerializedFileAccess.ts";
|
||||
import { addPrefix, isPlainText } from "../../../lib/src/string_and_binary/path.ts";
|
||||
import { LOG_LEVEL_VERBOSE, Logger } from "octagonal-wheels/common/logger";
|
||||
import { createBlob } from "../../../lib/src/common/utils.ts";
|
||||
@@ -51,7 +51,7 @@ export async function TFileToUXFileInfo(
|
||||
|
||||
export async function InternalFileToUXFileInfo(
|
||||
fullPath: string,
|
||||
vaultAccess: SerializedFileAccess,
|
||||
vaultAccess: ObsidianFileAccess,
|
||||
prefix: string = ICHeader
|
||||
): Promise<UXFileInfo> {
|
||||
const name = fullPath.split("/").pop() as string;
|
||||
|
||||
@@ -511,10 +511,11 @@ ABCDEFGHIJKLMNOPQRSTUVWXYZ`;
|
||||
return this.__assertStorageContent((this.testRootPath + "task.md") as FilePath, mergedDoc, false, true);
|
||||
}
|
||||
|
||||
// No longer tested
|
||||
async checkConflictResolution() {
|
||||
this._log("Before testing conflicted files, resolve all once", LOG_LEVEL_NOTICE);
|
||||
await this.core.rebuilder.resolveAllConflictedFilesByNewerOnes();
|
||||
await this.core.rebuilder.resolveAllConflictedFilesByNewerOnes();
|
||||
await this.services.conflict.resolveAllConflictedFilesByNewerOnes();
|
||||
await this.services.conflict.resolveAllConflictedFilesByNewerOnes();
|
||||
await this.services.replication.replicate();
|
||||
await delay(1000);
|
||||
if (!(await this.testConflictAutomatic())) {
|
||||
|
||||
@@ -320,6 +320,25 @@ export class ModuleObsidianSettings extends AbstractModule {
|
||||
private _currentSettings(): ObsidianLiveSyncSettings {
|
||||
return this.settings;
|
||||
}
|
||||
private _updateSettings(updateFn: (settings: ObsidianLiveSyncSettings) => ObsidianLiveSyncSettings): Promise<void> {
|
||||
try {
|
||||
const updated = updateFn(this.settings);
|
||||
this.settings = updated;
|
||||
} catch (ex) {
|
||||
this._log("Error in update function: " + ex, LOG_LEVEL_URGENT);
|
||||
return Promise.reject(ex);
|
||||
}
|
||||
return Promise.resolve();
|
||||
}
|
||||
private _applyPartial(partial: Partial<ObsidianLiveSyncSettings>): Promise<void> {
|
||||
try {
|
||||
this.settings = { ...this.settings, ...partial };
|
||||
} catch (ex) {
|
||||
this._log("Error in applying partial settings: " + ex, LOG_LEVEL_URGENT);
|
||||
return Promise.reject(ex);
|
||||
}
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
|
||||
super.onBindFunction(core, services);
|
||||
@@ -329,6 +348,8 @@ export class ModuleObsidianSettings extends AbstractModule {
|
||||
services.setting.adjustSettings.setHandler(this._adjustSettings.bind(this));
|
||||
services.setting.loadSettings.setHandler(this._loadSettings.bind(this));
|
||||
services.setting.currentSettings.setHandler(this._currentSettings.bind(this));
|
||||
services.setting.updateSettings.setHandler(this._updateSettings.bind(this));
|
||||
services.setting.applyPartial.setHandler(this._applyPartial.bind(this));
|
||||
services.setting.saveDeviceAndVaultName.setHandler(this._saveDeviceAndVaultName.bind(this));
|
||||
services.setting.saveSettingData.setHandler(this._saveSettingData.bind(this));
|
||||
}
|
||||
|
||||
@@ -361,7 +361,7 @@ ${stringifyYaml({
|
||||
.setButtonText("Resolve All")
|
||||
.setCta()
|
||||
.onClick(async () => {
|
||||
await this.plugin.rebuilder.resolveAllConflictedFilesByNewerOnes();
|
||||
await this.services.conflict.resolveAllConflictedFilesByNewerOnes();
|
||||
})
|
||||
);
|
||||
|
||||
|
||||
@@ -1,34 +0,0 @@
|
||||
import type {
|
||||
FilePathWithPrefix,
|
||||
LoadedEntry,
|
||||
MetaEntry,
|
||||
UXFileInfo,
|
||||
UXFileInfoStub,
|
||||
} from "../../lib/src/common/types";
|
||||
|
||||
export interface DatabaseFileAccess {
|
||||
delete: (file: UXFileInfoStub | FilePathWithPrefix, rev?: string) => Promise<boolean>;
|
||||
store: (file: UXFileInfo, force?: boolean, skipCheck?: boolean) => Promise<boolean>;
|
||||
storeContent(path: FilePathWithPrefix, content: string): Promise<boolean>;
|
||||
createChunks: (file: UXFileInfo, force?: boolean, skipCheck?: boolean) => Promise<boolean>;
|
||||
fetch: (
|
||||
file: UXFileInfoStub | FilePathWithPrefix,
|
||||
rev?: string,
|
||||
waitForReady?: boolean,
|
||||
skipCheck?: boolean
|
||||
) => Promise<UXFileInfo | false>;
|
||||
fetchEntryFromMeta: (meta: MetaEntry, waitForReady?: boolean, skipCheck?: boolean) => Promise<LoadedEntry | false>;
|
||||
fetchEntryMeta: (
|
||||
file: UXFileInfoStub | FilePathWithPrefix,
|
||||
rev?: string,
|
||||
skipCheck?: boolean
|
||||
) => Promise<MetaEntry | false>;
|
||||
fetchEntry: (
|
||||
file: UXFileInfoStub | FilePathWithPrefix,
|
||||
rev?: string,
|
||||
waitForReady?: boolean,
|
||||
skipCheck?: boolean
|
||||
) => Promise<LoadedEntry | false>;
|
||||
getConflictedRevs: (file: UXFileInfoStub | FilePathWithPrefix) => Promise<string[]>;
|
||||
// storeFromStorage: (file: UXFileInfoStub | FilePathWithPrefix, force?: boolean) => Promise<boolean>;
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
export interface Rebuilder {
|
||||
$performRebuildDB(
|
||||
method: "localOnly" | "remoteOnly" | "rebuildBothByThisDevice" | "localOnlyWithChunks"
|
||||
): Promise<void>;
|
||||
$rebuildRemote(): Promise<void>;
|
||||
$rebuildEverything(): Promise<void>;
|
||||
$fetchLocal(makeLocalChunkBeforeSync?: boolean, preventMakeLocalFilesBeforeSync?: boolean): Promise<void>;
|
||||
|
||||
scheduleRebuild(): Promise<void>;
|
||||
scheduleFetch(): Promise<void>;
|
||||
resolveAllConflictedFilesByNewerOnes(): Promise<void>;
|
||||
}
|
||||
@@ -1,61 +0,0 @@
|
||||
import type {
|
||||
FilePath,
|
||||
FilePathWithPrefix,
|
||||
UXDataWriteOptions,
|
||||
UXFileInfo,
|
||||
UXFileInfoStub,
|
||||
UXFolderInfo,
|
||||
UXStat,
|
||||
} from "../../lib/src/common/types";
|
||||
import type { CustomRegExp } from "../../lib/src/common/utils";
|
||||
|
||||
export interface StorageAccess {
|
||||
restoreState(): Promise<void>;
|
||||
processWriteFile<T>(file: UXFileInfoStub | FilePathWithPrefix, proc: () => Promise<T>): Promise<T>;
|
||||
processReadFile<T>(file: UXFileInfoStub | FilePathWithPrefix, proc: () => Promise<T>): Promise<T>;
|
||||
isFileProcessing(file: UXFileInfoStub | FilePathWithPrefix): boolean;
|
||||
|
||||
deleteVaultItem(file: FilePathWithPrefix | UXFileInfoStub | UXFolderInfo): Promise<void>;
|
||||
|
||||
writeFileAuto(path: string, data: string | ArrayBuffer, opt?: UXDataWriteOptions): Promise<boolean>;
|
||||
|
||||
readFileAuto(path: string): Promise<string | ArrayBuffer>;
|
||||
readFileText(path: string): Promise<string>;
|
||||
isExists(path: string): Promise<boolean>;
|
||||
writeHiddenFileAuto(path: string, data: string | ArrayBuffer, opt?: UXDataWriteOptions): Promise<boolean>;
|
||||
appendHiddenFile(path: string, data: string, opt?: UXDataWriteOptions): Promise<boolean>;
|
||||
|
||||
stat(path: string): Promise<UXStat | null>;
|
||||
statHidden(path: string): Promise<UXStat | null>;
|
||||
removeHidden(path: string): Promise<boolean>;
|
||||
readHiddenFileAuto(path: string): Promise<string | ArrayBuffer>;
|
||||
readHiddenFileBinary(path: string): Promise<ArrayBuffer>;
|
||||
readHiddenFileText(path: string): Promise<string>;
|
||||
isExistsIncludeHidden(path: string): Promise<boolean>;
|
||||
// This could be work also for the hidden files.
|
||||
ensureDir(path: string): Promise<boolean>;
|
||||
triggerFileEvent(event: string, path: string): void;
|
||||
triggerHiddenFile(path: string): Promise<void>;
|
||||
|
||||
getFileStub(path: string): UXFileInfoStub | null;
|
||||
readStubContent(stub: UXFileInfoStub): Promise<UXFileInfo | false>;
|
||||
getStub(path: string): UXFileInfoStub | UXFolderInfo | null;
|
||||
|
||||
getFiles(): UXFileInfoStub[];
|
||||
getFileNames(): FilePathWithPrefix[];
|
||||
|
||||
touched(file: UXFileInfoStub | FilePathWithPrefix): Promise<void>;
|
||||
recentlyTouched(file: UXFileInfoStub | FilePathWithPrefix): boolean;
|
||||
clearTouched(): void;
|
||||
|
||||
// -- Low-Level
|
||||
delete(file: FilePathWithPrefix | UXFileInfoStub | string, force: boolean): Promise<void>;
|
||||
trash(file: FilePathWithPrefix | UXFileInfoStub | string, system: boolean): Promise<void>;
|
||||
|
||||
getFilesIncludeHidden(
|
||||
basePath: string,
|
||||
includeFilter?: CustomRegExp[],
|
||||
excludeFilter?: CustomRegExp[],
|
||||
skipFolder?: string[]
|
||||
): Promise<FilePath[]>;
|
||||
}
|
||||
@@ -55,7 +55,11 @@ export class ObsidianServiceHub extends InjectableServiceHub<ObsidianServiceCont
|
||||
databaseEvents: databaseEvents,
|
||||
vault: vault,
|
||||
});
|
||||
const config = new ObsidianConfigService(context, vault);
|
||||
const config = new ObsidianConfigService(context, {
|
||||
vaultService: vault,
|
||||
settingService: setting,
|
||||
APIService: API,
|
||||
});
|
||||
const replicator = new ObsidianReplicatorService(context, {
|
||||
settingService: setting,
|
||||
appLifecycleService: appLifecycle,
|
||||
|
||||
15
src/serviceModules/DatabaseFileAccess.ts
Normal file
15
src/serviceModules/DatabaseFileAccess.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
import { markChangesAreSame } from "@/common/utils";
|
||||
import type { AnyEntry } from "@lib/common/types";
|
||||
|
||||
import type { DatabaseFileAccess } from "@lib/interfaces/DatabaseFileAccess.ts";
|
||||
import { ServiceDatabaseFileAccessBase } from "@lib/serviceModules/ServiceDatabaseFileAccessBase";
|
||||
|
||||
// markChangesAreSame uses persistent data implicitly, we should refactor it too.
|
||||
// For now, to make the refactoring done once, we just use them directly.
|
||||
// Hence it is not on /src/lib/src/serviceModules. (markChangesAreSame is using indexedDB).
|
||||
// TODO: REFACTOR
|
||||
export class ServiceDatabaseFileAccess extends ServiceDatabaseFileAccessBase implements DatabaseFileAccess {
|
||||
markChangesAreSame(old: AnyEntry, newMtime: number, oldMtime: number): void {
|
||||
markChangesAreSame(old, newMtime, oldMtime);
|
||||
}
|
||||
}
|
||||
26
src/serviceModules/FileHandler.ts
Normal file
26
src/serviceModules/FileHandler.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
import {
|
||||
compareFileFreshness,
|
||||
markChangesAreSame,
|
||||
type BASE_IS_NEW,
|
||||
type EVEN,
|
||||
type TARGET_IS_NEW,
|
||||
} from "@/common/utils";
|
||||
import type { AnyEntry } from "@lib/common/models/db.type";
|
||||
import type { UXFileInfo, UXFileInfoStub } from "@lib/common/models/fileaccess.type";
|
||||
import { ServiceFileHandlerBase } from "@lib/serviceModules/ServiceFileHandlerBase";
|
||||
|
||||
// markChangesAreSame uses persistent data implicitly, we should refactor it too.
|
||||
// also, compareFileFreshness depends on marked changes, so we should refactor it as well. For now, to make the refactoring done once, we just use them directly.
|
||||
// Hence it is not on /src/lib/src/serviceModules. (markChangesAreSame is using indexedDB).
|
||||
// TODO: REFACTOR
|
||||
export class ServiceFileHandler extends ServiceFileHandlerBase {
|
||||
override markChangesAreSame(old: UXFileInfo | AnyEntry, newMtime: number, oldMtime: number) {
|
||||
return markChangesAreSame(old, newMtime, oldMtime);
|
||||
}
|
||||
override compareFileFreshness(
|
||||
baseFile: UXFileInfoStub | AnyEntry | undefined,
|
||||
checkTarget: UXFileInfo | AnyEntry | undefined
|
||||
): typeof TARGET_IS_NEW | typeof BASE_IS_NEW | typeof EVEN {
|
||||
return compareFileFreshness(baseFile, checkTarget);
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,4 @@
|
||||
import { TFile, TFolder, type ListedFiles } from "@/deps.ts";
|
||||
import { SerializedFileAccess } from "./storageLib/SerializedFileAccess";
|
||||
import { AbstractObsidianModule } from "../AbstractObsidianModule.ts";
|
||||
import { LOG_LEVEL_INFO, LOG_LEVEL_VERBOSE } from "octagonal-wheels/common/logger";
|
||||
import type {
|
||||
FilePath,
|
||||
@@ -10,48 +8,50 @@ import type {
|
||||
UXFileInfoStub,
|
||||
UXFolderInfo,
|
||||
UXStat,
|
||||
} from "../../lib/src/common/types";
|
||||
import { TFileToUXFileInfoStub, TFolderToUXFileInfoStub } from "./storageLib/utilObsidian.ts";
|
||||
import { StorageEventManagerObsidian, type StorageEventManager } from "./storageLib/StorageEventManager";
|
||||
import type { StorageAccess } from "../interfaces/StorageAccess";
|
||||
import { createBlob, type CustomRegExp } from "../../lib/src/common/utils";
|
||||
import { serialized } from "octagonal-wheels/concurrency/lock_v2";
|
||||
import type { LiveSyncCore } from "../../main.ts";
|
||||
import type ObsidianLiveSyncPlugin from "../../main.ts";
|
||||
import type { InjectableServiceHub } from "../../lib/src/services/InjectableServices.ts";
|
||||
} from "@lib/common/types";
|
||||
|
||||
const fileLockPrefix = "file-lock:";
|
||||
import { ServiceModuleBase } from "@lib/serviceModules/ServiceModuleBase";
|
||||
import type { APIService } from "@lib/services/base/APIService";
|
||||
import type { IStorageAccessManager, StorageAccess } from "@lib/interfaces/StorageAccess.ts";
|
||||
import type { AppLifecycleService } from "@lib/services/base/AppLifecycleService";
|
||||
import type { FileProcessingService } from "@lib/services/base/FileProcessingService";
|
||||
import { ObsidianFileAccess } from "@/modules/coreObsidian/storageLib/SerializedFileAccess";
|
||||
import { StorageEventManager } from "@lib/interfaces/StorageEventManager.ts";
|
||||
import { TFileToUXFileInfoStub, TFolderToUXFileInfoStub } from "@/modules/coreObsidian/storageLib/utilObsidian";
|
||||
import { createBlob, type CustomRegExp } from "@lib/common/utils";
|
||||
import type { VaultService } from "@lib/services/base/VaultService";
|
||||
import type { SettingService } from "@lib/services/base/SettingService";
|
||||
|
||||
export class ModuleFileAccessObsidian extends AbstractObsidianModule implements StorageAccess {
|
||||
processingFiles: Set<FilePathWithPrefix> = new Set();
|
||||
processWriteFile<T>(file: UXFileInfoStub | FilePathWithPrefix, proc: () => Promise<T>): Promise<T> {
|
||||
const path = typeof file === "string" ? file : file.path;
|
||||
return serialized(`${fileLockPrefix}${path}`, async () => {
|
||||
try {
|
||||
this.processingFiles.add(path);
|
||||
return await proc();
|
||||
} finally {
|
||||
this.processingFiles.delete(path);
|
||||
}
|
||||
});
|
||||
export interface StorageAccessObsidianDependencies {
|
||||
API: APIService;
|
||||
appLifecycle: AppLifecycleService;
|
||||
fileProcessing: FileProcessingService;
|
||||
vault: VaultService;
|
||||
setting: SettingService;
|
||||
storageEventManager: StorageEventManager;
|
||||
storageAccessManager: IStorageAccessManager;
|
||||
vaultAccess: ObsidianFileAccess;
|
||||
}
|
||||
|
||||
export class ServiceFileAccessObsidian
|
||||
extends ServiceModuleBase<StorageAccessObsidianDependencies>
|
||||
implements StorageAccess
|
||||
{
|
||||
private vaultAccess: ObsidianFileAccess;
|
||||
private vaultManager: StorageEventManager;
|
||||
private vault: VaultService;
|
||||
private setting: SettingService;
|
||||
|
||||
constructor(services: StorageAccessObsidianDependencies) {
|
||||
super(services);
|
||||
// this.appLifecycle = services.appLifecycle;
|
||||
this.vault = services.vault;
|
||||
this.setting = services.setting;
|
||||
this.vaultManager = services.storageEventManager;
|
||||
this.vaultAccess = services.vaultAccess;
|
||||
services.appLifecycle.onFirstInitialise.addHandler(this._everyOnFirstInitialize.bind(this));
|
||||
services.fileProcessing.commitPendingFileEvents.addHandler(this._everyCommitPendingFileEvent.bind(this));
|
||||
}
|
||||
processReadFile<T>(file: UXFileInfoStub | FilePathWithPrefix, proc: () => Promise<T>): Promise<T> {
|
||||
const path = typeof file === "string" ? file : file.path;
|
||||
return serialized(`${fileLockPrefix}${path}`, async () => {
|
||||
try {
|
||||
this.processingFiles.add(path);
|
||||
return await proc();
|
||||
} finally {
|
||||
this.processingFiles.delete(path);
|
||||
}
|
||||
});
|
||||
}
|
||||
isFileProcessing(file: UXFileInfoStub | FilePathWithPrefix): boolean {
|
||||
const path = typeof file === "string" ? file : file.path;
|
||||
return this.processingFiles.has(path);
|
||||
}
|
||||
vaultAccess!: SerializedFileAccess;
|
||||
vaultManager: StorageEventManager = new StorageEventManagerObsidian(this.plugin, this.core, this);
|
||||
|
||||
restoreState() {
|
||||
return this.vaultManager.restoreState();
|
||||
@@ -61,21 +61,11 @@ export class ModuleFileAccessObsidian extends AbstractObsidianModule implements
|
||||
return Promise.resolve(true);
|
||||
}
|
||||
|
||||
// $$flushFileEventQueue(): void {
|
||||
// this.vaultManager.flushQueue();
|
||||
// }
|
||||
|
||||
async _everyCommitPendingFileEvent(): Promise<boolean> {
|
||||
await this.vaultManager.waitForIdle();
|
||||
return Promise.resolve(true);
|
||||
}
|
||||
|
||||
_everyOnloadStart(): Promise<boolean> {
|
||||
this.vaultAccess = new SerializedFileAccess(this.app, this.plugin, this);
|
||||
this.core.storageAccess = this;
|
||||
return Promise.resolve(true);
|
||||
}
|
||||
|
||||
async writeFileAuto(path: string, data: string | ArrayBuffer, opt?: UXDataWriteOptions): Promise<boolean> {
|
||||
const file = this.vaultAccess.getAbstractFileByPath(path);
|
||||
if (file instanceof TFile) {
|
||||
@@ -200,8 +190,7 @@ export class ModuleFileAccessObsidian extends AbstractObsidianModule implements
|
||||
this.vaultAccess.trigger(event, file);
|
||||
}
|
||||
async triggerHiddenFile(path: string): Promise<void> {
|
||||
//@ts-ignore internal function
|
||||
await this.app.vault.adapter.reconcileInternalFile(path);
|
||||
await this.vaultAccess.reconcileInternalFile(path);
|
||||
}
|
||||
// getFileStub(file: TFile): UXFileInfoStub {
|
||||
// return TFileToUXFileInfoStub(file);
|
||||
@@ -252,7 +241,8 @@ export class ModuleFileAccessObsidian extends AbstractObsidianModule implements
|
||||
): Promise<FilePath[]> {
|
||||
let w: ListedFiles;
|
||||
try {
|
||||
w = await this.app.vault.adapter.list(basePath);
|
||||
w = await this.vaultAccess.adapterList(basePath);
|
||||
// w = await this.plugin.app.vault.adapter.list(basePath);
|
||||
} catch (ex) {
|
||||
this._log(`Could not traverse(getFilesIncludeHidden):${basePath}`, LOG_LEVEL_INFO);
|
||||
this._log(ex, LOG_LEVEL_VERBOSE);
|
||||
@@ -268,7 +258,7 @@ export class ModuleFileAccessObsidian extends AbstractObsidianModule implements
|
||||
if (excludeFilter && excludeFilter.some((ee) => ee.test(file))) {
|
||||
continue;
|
||||
}
|
||||
if (await this.services.vault.isIgnoredByIgnoreFile(file)) continue;
|
||||
if (await this.vault.isIgnoredByIgnoreFile(file)) continue;
|
||||
files.push(file);
|
||||
}
|
||||
|
||||
@@ -281,7 +271,7 @@ export class ModuleFileAccessObsidian extends AbstractObsidianModule implements
|
||||
if (excludeFilter && excludeFilter.some((e) => e.test(v))) {
|
||||
continue;
|
||||
}
|
||||
if (await this.services.vault.isIgnoredByIgnoreFile(v)) {
|
||||
if (await this.vault.isIgnoredByIgnoreFile(v)) {
|
||||
continue;
|
||||
}
|
||||
// OK, deep dive!
|
||||
@@ -339,10 +329,11 @@ export class ModuleFileAccessObsidian extends AbstractObsidianModule implements
|
||||
|
||||
async __deleteVaultItem(file: TFile | TFolder) {
|
||||
if (file instanceof TFile) {
|
||||
if (!(await this.services.vault.isTargetFile(file.path))) return;
|
||||
if (!(await this.vault.isTargetFile(file.path))) return;
|
||||
}
|
||||
const dir = file.parent;
|
||||
if (this.settings.trashInsteadDelete) {
|
||||
const settings = this.setting.currentSettings();
|
||||
if (settings.trashInsteadDelete) {
|
||||
await this.vaultAccess.trash(file, false);
|
||||
} else {
|
||||
await this.vaultAccess.delete(file, true);
|
||||
@@ -351,7 +342,7 @@ export class ModuleFileAccessObsidian extends AbstractObsidianModule implements
|
||||
if (dir) {
|
||||
this._log(`files: ${dir.children.length}`);
|
||||
if (dir.children.length == 0) {
|
||||
if (!this.settings.doNotDeleteFolder) {
|
||||
if (!settings.doNotDeleteFolder) {
|
||||
this._log(
|
||||
`All files under the parent directory (${dir.path}) have been deleted, so delete this one.`
|
||||
);
|
||||
@@ -369,13 +360,4 @@ export class ModuleFileAccessObsidian extends AbstractObsidianModule implements
|
||||
return await this.__deleteVaultItem(file);
|
||||
}
|
||||
}
|
||||
|
||||
constructor(plugin: ObsidianLiveSyncPlugin, core: LiveSyncCore) {
|
||||
super(plugin, core);
|
||||
}
|
||||
onBindFunction(core: LiveSyncCore, services: InjectableServiceHub): void {
|
||||
services.appLifecycle.onFirstInitialise.addHandler(this._everyOnFirstInitialize.bind(this));
|
||||
services.appLifecycle.onInitialise.addHandler(this._everyOnloadStart.bind(this));
|
||||
services.fileProcessing.commitPendingFileEvents.addHandler(this._everyCommitPendingFileEvent.bind(this));
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user