- Fixed garbage collection error while unreferenced chunks exist many.
- Fixed filename validation on Linux.

Improved:
- Showing status is now thinned for performance.
- Enhance caching while collecting chunks.
This commit is contained in:
vorotamoroz
2023-05-02 17:59:58 +09:00
parent ce25eee74b
commit 808fdc0944
3 changed files with 45 additions and 39 deletions

Submodule src/lib updated: 3b20495ec9...fb3070851f

View File

@@ -1350,7 +1350,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
const docMtime = ~~(doc.mtime / 1000); const docMtime = ~~(doc.mtime / 1000);
//TODO: some margin required. //TODO: some margin required.
if (localMtime >= docMtime) { if (localMtime >= docMtime) {
Logger(`${doc._id} Skipped, older than storage.`, LOG_LEVEL.VERBOSE); Logger(`${path} (${doc._id}, ${doc._rev}) Skipped, older than storage.`, LOG_LEVEL.VERBOSE);
return; return;
} }
} }
@@ -1361,12 +1361,12 @@ export default class ObsidianLiveSyncPlugin extends Plugin
missingChildren: [] as string[], missingChildren: [] as string[],
timeout: now + this.chunkWaitTimeout, timeout: now + this.chunkWaitTimeout,
}; };
// If `Read chunks online` is enabled, retrieve chunks from the remote CouchDB directly. // If `Read chunks online` is disabled, chunks should be transferred before here.
// However, in some cases, chunks are after that. So, if missing chunks exist, we have to wait for them.
if ((!this.settings.readChunksOnline) && "children" in doc) { if ((!this.settings.readChunksOnline) && "children" in doc) {
const c = await this.localDatabase.allDocsRaw<EntryDoc>({ keys: doc.children, include_docs: false }); const c = await this.localDatabase.collectChunksWithCache(doc.children)
const missing = c.rows.filter((e) => "error" in e).map((e) => e.key); const missing = c.filter((e) => !e.chunk).map((e) => e.id);
// fetch from remote if (missing.length > 0) Logger(`${path} (${doc._id}, ${doc._rev}) Queued (waiting ${missing.length} items)`, LOG_LEVEL.VERBOSE);
if (missing.length > 0) Logger(`${doc._id}(${doc._rev}) Queued (waiting ${missing.length} items)`, LOG_LEVEL.VERBOSE);
newQueue.missingChildren = missing; newQueue.missingChildren = missing;
this.queuedFiles.push(newQueue); this.queuedFiles.push(newQueue);
} else { } else {
@@ -1509,27 +1509,25 @@ export default class ObsidianLiveSyncPlugin extends Plugin
return; return;
} }
logHideTimer: NodeJS.Timeout = null;
setStatusBarText(message: string = null, log: string = null) { setStatusBarText(message: string = null, log: string = null) {
if (!this.statusBar) return; if (!this.statusBar) return;
const newMsg = typeof message == "string" ? message : this.lastMessage; const newMsg = typeof message == "string" ? message : this.lastMessage;
const newLog = typeof log == "string" ? log : this.lastLog; const newLog = typeof log == "string" ? log : this.lastLog;
if (`${this.lastMessage}-${this.lastLog}` != `${newMsg}-${newLog}`) { if (`${this.lastMessage}-${this.lastLog}` != `${newMsg}-${newLog}`) {
this.statusBar.setText(newMsg.split("\n")[0]); scheduleTask("update-display", 50, () => {
this.statusBar.setText(newMsg.split("\n")[0]);
if (this.settings.showStatusOnEditor) { if (this.settings.showStatusOnEditor) {
const root = activeDocument.documentElement; const root = activeDocument.documentElement;
const q = root.querySelectorAll(`.CodeMirror-wrap,.cm-s-obsidian>.cm-editor,.canvas-wrapper`); const q = root.querySelectorAll(`.CodeMirror-wrap,.cm-s-obsidian>.cm-editor,.canvas-wrapper`);
q.forEach(e => e.setAttr("data-log", '' + (newMsg + "\n" + newLog) + '')) q.forEach(e => e.setAttr("data-log", '' + (newMsg + "\n" + newLog) + ''))
} else { } else {
const root = activeDocument.documentElement; const root = activeDocument.documentElement;
const q = root.querySelectorAll(`.CodeMirror-wrap,.cm-s-obsidian>.cm-editor,.canvas-wrapper`); const q = root.querySelectorAll(`.CodeMirror-wrap,.cm-s-obsidian>.cm-editor,.canvas-wrapper`);
q.forEach(e => e.setAttr("data-log", '')) q.forEach(e => e.setAttr("data-log", ''))
} }
if (this.logHideTimer != null) { }, true);
clearTimeout(this.logHideTimer); scheduleTask("log-hide", 3000, () => this.setStatusBarText(null, ""));
}
this.logHideTimer = setTimeout(() => this.setStatusBarText(null, ""), 3000);
this.lastMessage = newMsg; this.lastMessage = newMsg;
this.lastLog = newLog; this.lastLog = newLog;
} }
@@ -2137,16 +2135,10 @@ Or if you are sure know what had been happened, we can unlock the database from
conflictedCheckFiles: FilePath[] = []; conflictedCheckFiles: FilePath[] = [];
// queueing the conflicted file check // queueing the conflicted file check
conflictedCheckTimer: number;
queueConflictedCheck(file: TFile) { queueConflictedCheck(file: TFile) {
this.conflictedCheckFiles = this.conflictedCheckFiles.filter((e) => e != file.path); this.conflictedCheckFiles = this.conflictedCheckFiles.filter((e) => e != file.path);
this.conflictedCheckFiles.push(getPathFromTFile(file)); this.conflictedCheckFiles.push(getPathFromTFile(file));
if (this.conflictedCheckTimer != null) { scheduleTask("check-conflict", 100, async () => {
window.clearTimeout(this.conflictedCheckTimer);
}
this.conflictedCheckTimer = window.setTimeout(async () => {
this.conflictedCheckTimer = null;
const checkFiles = JSON.parse(JSON.stringify(this.conflictedCheckFiles)) as FilePath[]; const checkFiles = JSON.parse(JSON.stringify(this.conflictedCheckFiles)) as FilePath[];
for (const filename of checkFiles) { for (const filename of checkFiles) {
try { try {
@@ -2158,7 +2150,7 @@ Or if you are sure know what had been happened, we can unlock the database from
Logger(ex); Logger(ex);
} }
} }
}, 100); });
} }
async showIfConflicted(filename: FilePathWithPrefix) { async showIfConflicted(filename: FilePathWithPrefix) {

View File

@@ -44,7 +44,10 @@ export function getPathFromTFile(file: TAbstractFile) {
} }
const tasks: { [key: string]: ReturnType<typeof setTimeout> } = {}; const tasks: { [key: string]: ReturnType<typeof setTimeout> } = {};
export function scheduleTask(key: string, timeout: number, proc: (() => Promise<any> | void)) { export function scheduleTask(key: string, timeout: number, proc: (() => Promise<any> | void), skipIfTaskExist?: boolean) {
if (skipIfTaskExist && key in tasks) {
return;
}
cancelTask(key); cancelTask(key);
tasks[key] = setTimeout(async () => { tasks[key] = setTimeout(async () => {
delete tasks[key]; delete tasks[key];
@@ -663,6 +666,14 @@ export const remoteDatabaseCleanup = async (plugin: ObsidianLiveSyncPlugin, dryR
return Number.parseInt((info as any)?.sizes?.[key] ?? 0); return Number.parseInt((info as any)?.sizes?.[key] ?? 0);
} }
await runWithLock("clean-up:remote", true, async () => { await runWithLock("clean-up:remote", true, async () => {
const CHUNK_SIZE = 100;
function makeChunkedArrayFromArray<T>(items: T[]): T[][] {
const chunked = [];
for (let i = 0; i < items.length; i += CHUNK_SIZE) {
chunked.push(items.slice(i, i + CHUNK_SIZE));
}
return chunked;
}
try { try {
const ret = await plugin.replicator.connectRemoteCouchDBWithSetting(plugin.settings, plugin.isMobile); const ret = await plugin.replicator.connectRemoteCouchDBWithSetting(plugin.settings, plugin.isMobile);
if (typeof ret === "string") { if (typeof ret === "string") {
@@ -701,14 +712,17 @@ export const remoteDatabaseCleanup = async (plugin: ObsidianLiveSyncPlugin, dryR
return; return;
} }
Logger(`Deleting unreferenced chunks: ${removeItems}`, LOG_LEVEL.NOTICE, "clean-up-db"); Logger(`Deleting unreferenced chunks: ${removeItems}`, LOG_LEVEL.NOTICE, "clean-up-db");
const rets = await _requestToCouchDBFetch( const buffer = makeChunkedArrayFromArray(Object.entries(payload));
`${plugin.settings.couchDB_URI}/${plugin.settings.couchDB_DBNAME}`, for (const chunkedPayload of buffer) {
plugin.settings.couchDB_USER, const rets = await _requestToCouchDBFetch(
plugin.settings.couchDB_PASSWORD, `${plugin.settings.couchDB_URI}/${plugin.settings.couchDB_DBNAME}`,
"_purge", plugin.settings.couchDB_USER,
payload, "POST"); plugin.settings.couchDB_PASSWORD,
// const result = await rets(); "_purge",
Logger(JSON.stringify(await rets.json()), LOG_LEVEL.VERBOSE); chunkedPayload.reduce((p, c) => ({ ...p, [c[0]]: c[1] }), {}), "POST");
// const result = await rets();
Logger(JSON.stringify(await rets.json()), LOG_LEVEL.VERBOSE);
}
Logger(`Compacting database...`, LOG_LEVEL.NOTICE, "clean-up-db"); Logger(`Compacting database...`, LOG_LEVEL.NOTICE, "clean-up-db");
await db.compact(); await db.compact();
const endInfo = await db.info(); const endInfo = await db.info();