Improved:

- Reduced remote database checking to improve speed and reduce bandwidth.

Fixed:
- Chunks which previously misinterpreted are now interpreted correctly.
- Deleted file detection on hidden file synchronising now works fine.
- Now the Customisation sync is surely quiet while it has been disabled.
This commit is contained in:
vorotamoroz
2023-05-17 16:20:07 +09:00
parent d8ecbb593b
commit 842da980d7
5 changed files with 18 additions and 13 deletions

View File

@@ -145,7 +145,7 @@ export class ConfigSync extends LiveSyncCommands {
if (this.plugin.suspended) {
return;
}
if (this.settings.autoSweepPlugins) {
if (this.settings.autoSweepPlugins && this.settings.usePluginSync) {
await this.scanAllConfigFiles(false);
}
this.periodicPluginSweepProcessor.enable(this.settings.autoSweepPluginsPeriodic && !this.settings.watchInternalFileChanges ? (PERIODIC_PLUGIN_SWEEP * 1000) : 0);
@@ -567,6 +567,7 @@ export class ConfigSync extends LiveSyncCommands {
}
async watchVaultRawEventsAsync(path: FilePath) {
if (!this.settings.usePluginSync) return false;
if (!this.isTargetPath(path)) return false;
const stat = await this.app.vault.adapter.stat(path);
// Make sure that target is a file.

View File

@@ -495,7 +495,7 @@ export class HiddenFileSync extends LiveSyncCommands {
const mtime = new Date().getTime();
await runWithLock("file-" + prefixedFileName, false, async () => {
try {
const old = await this.localDatabase.getDBEntry(prefixedFileName, null, false, false) as InternalFileEntry | false;
const old = await this.localDatabase.getDBEntryMeta(prefixedFileName, null, true) as InternalFileEntry | false;
let saveData: InternalFileEntry;
if (old === false) {
saveData = {
@@ -541,7 +541,7 @@ export class HiddenFileSync extends LiveSyncCommands {
try {
// Check conflicted status
//TODO option
const fileOnDB = await this.localDatabase.getDBEntry(prefixedFileName, { conflicts: true }, false, false);
const fileOnDB = await this.localDatabase.getDBEntry(prefixedFileName, { conflicts: true }, false, true);
if (fileOnDB === false)
throw new Error(`File not found on database.:${filename}`);
// Prevent overwrite for Prevent overwriting while some conflicted revision exists.

View File

@@ -19,7 +19,7 @@ export class ObsidianLiveSyncSettingTab extends PluginSettingTab {
this.plugin = plugin;
}
async testConnection(): Promise<void> {
const db = await this.plugin.replicator.connectRemoteCouchDBWithSetting(this.plugin.settings, this.plugin.isMobile);
const db = await this.plugin.replicator.connectRemoteCouchDBWithSetting(this.plugin.settings, this.plugin.isMobile, true);
if (typeof db === "string") {
this.plugin.addLog(`could not connect to ${this.plugin.settings.couchDB_URI} : ${this.plugin.settings.couchDB_DBNAME} \n(${db})`, LOG_LEVEL.NOTICE);
return;
@@ -376,7 +376,7 @@ export class ObsidianLiveSyncSettingTab extends PluginSettingTab {
useDynamicIterationCount: useDynamicIterationCount,
};
console.dir(settingForCheck);
const db = await this.plugin.replicator.connectRemoteCouchDBWithSetting(settingForCheck, this.plugin.isMobile);
const db = await this.plugin.replicator.connectRemoteCouchDBWithSetting(settingForCheck, this.plugin.isMobile, true);
if (typeof db === "string") {
Logger("Could not connect to the database.", LOG_LEVEL.NOTICE);
return false;

Submodule src/lib updated: 051b50ca38...ec4ecacb43

View File

@@ -87,7 +87,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
}
processReplication = (e: PouchDB.Core.ExistingDocument<EntryDoc>[]) => this.parseReplicationResult(e);
async connectRemoteCouchDB(uri: string, auth: { username: string; password: string }, disableRequestURI: boolean, passphrase: string | false, useDynamicIterationCount: boolean): Promise<string | { db: PouchDB.Database<EntryDoc>; info: PouchDB.Core.DatabaseInfo }> {
async connectRemoteCouchDB(uri: string, auth: { username: string; password: string }, disableRequestURI: boolean, passphrase: string | false, useDynamicIterationCount: boolean, performSetup: boolean, skipInfo: boolean): Promise<string | { db: PouchDB.Database<EntryDoc>; info: PouchDB.Core.DatabaseInfo }> {
if (!isValidRemoteCouchDBURI(uri)) return "Remote URI is not valid";
if (uri.toLowerCase() != uri) return "Remote URI and database name could not contain capital letters.";
if (uri.indexOf(" ") !== -1) return "Remote URI and database name could not contain spaces.";
@@ -104,6 +104,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
const conf: PouchDB.HttpAdapter.HttpAdapterConfiguration = {
adapter: "http",
auth,
skip_setup: !performSetup,
fetch: async (url: string | Request, opts: RequestInit) => {
let size = "";
const localURL = url.toString().substring(uri.length);
@@ -192,6 +193,9 @@ export default class ObsidianLiveSyncPlugin extends Plugin
if (passphrase !== "false" && typeof passphrase === "string") {
enableEncryption(db, passphrase, useDynamicIterationCount);
}
if (skipInfo) {
return { db: db, info: {} };
}
try {
const info = await db.info();
return { db: db, info: info };
@@ -1364,8 +1368,8 @@ export default class ObsidianLiveSyncPlugin extends Plugin
// If `Read chunks online` is disabled, chunks should be transferred before here.
// However, in some cases, chunks are after that. So, if missing chunks exist, we have to wait for them.
if ((!this.settings.readChunksOnline) && "children" in doc) {
const c = await this.localDatabase.collectChunksWithCache(doc.children)
const missing = c.filter((e) => !e.chunk).map((e) => e.id);
const c = await this.localDatabase.collectChunksWithCache(doc.children);
const missing = c.filter((e) => e.chunk === false).map((e) => e.id);
if (missing.length > 0) Logger(`${path} (${doc._id}, ${doc._rev}) Queued (waiting ${missing.length} items)`, LOG_LEVEL.VERBOSE);
newQueue.missingChildren = missing;
this.queuedFiles.push(newQueue);
@@ -1381,15 +1385,15 @@ export default class ObsidianLiveSyncPlugin extends Plugin
const docsSorted = docs.sort((a, b) => b.mtime - a.mtime);
L1:
for (const change of docsSorted) {
if (isChunk(change._id)) {
await this.parseIncomingChunk(change);
continue;
}
for (const proc of this.addOns) {
if (await proc.parseReplicationResultItem(change)) {
continue L1;
}
}
if (isChunk(change._id)) {
await this.parseIncomingChunk(change);
continue;
}
if (change._id == SYNCINFO_ID) {
continue;
}