- Implemented:

- Database clean-up is now in beta 2!
    We can shrink the remote database by deleting unused chunks, with keeping history.
    Note: Local database is not cleaned up totally. We have to `Fetch` again to let it done.
    **Note2**: Still in beta. Please back your vault up anything before.
- Fixed:
  - The log updates are not thinned out now.
This commit is contained in:
vorotamoroz
2023-07-25 00:29:47 +09:00
parent 5e844372cb
commit b1d1ba0e6b
4 changed files with 76 additions and 338 deletions

View File

@@ -7,7 +7,7 @@ import { Logger } from "./lib/src/logger";
import { checkSyncInfo, isCloudantURI } from "./lib/src/utils_couchdb.js"; import { checkSyncInfo, isCloudantURI } from "./lib/src/utils_couchdb.js";
import { testCrypt } from "./lib/src/e2ee_v2"; import { testCrypt } from "./lib/src/e2ee_v2";
import ObsidianLiveSyncPlugin from "./main"; import ObsidianLiveSyncPlugin from "./main";
import { balanceChunks, localDatabaseCleanUp, performRebuildDB, remoteDatabaseCleanup, requestToCouchDB } from "./utils"; import { performRebuildDB, requestToCouchDB } from "./utils";
export class ObsidianLiveSyncSettingTab extends PluginSettingTab { export class ObsidianLiveSyncSettingTab extends PluginSettingTab {
@@ -1851,26 +1851,6 @@ ${stringifyYaml(pluginConfig)}`;
}) })
) )
new Setting(containerMaintenanceEl)
.setName("(Beta) Clean the remote database")
.setDesc("")
.addButton((button) =>
button.setButtonText("Count")
.setDisabled(false)
.onClick(async () => {
await remoteDatabaseCleanup(this.plugin, true);
})
).addButton((button) =>
button.setButtonText("Perform cleaning")
.setDisabled(false)
.setWarning()
.onClick(async () => {
// @ts-ignore
this.plugin.app.setting.close()
await remoteDatabaseCleanup(this.plugin, false);
await balanceChunks(this.plugin, false);
})
);
containerMaintenanceEl.createEl("h4", { text: "The local database" }); containerMaintenanceEl.createEl("h4", { text: "The local database" });
@@ -1887,26 +1867,6 @@ ${stringifyYaml(pluginConfig)}`;
}) })
) )
new Setting(containerMaintenanceEl)
.setName("(Beta) Clean the local database")
.setDesc("This feature requires disabling 'Use an old adapter for compatibility'")
.addButton((button) =>
button.setButtonText("Count")
.setDisabled(false)
.onClick(async () => {
await localDatabaseCleanUp(this.plugin, false, true);
})
).addButton((button) =>
button.setButtonText("Perform cleaning")
.setDisabled(false)
.setWarning()
.onClick(async () => {
// @ts-ignore
this.plugin.app.setting.close()
await localDatabaseCleanUp(this.plugin, false, false);
})
);
new Setting(containerMaintenanceEl) new Setting(containerMaintenanceEl)
.setName("Discard local database to reset or uninstall Self-hosted LiveSync") .setName("Discard local database to reset or uninstall Self-hosted LiveSync")
.addButton((button) => .addButton((button) =>
@@ -1922,6 +1882,25 @@ ${stringifyYaml(pluginConfig)}`;
containerMaintenanceEl.createEl("h4", { text: "Both databases" }); containerMaintenanceEl.createEl("h4", { text: "Both databases" });
new Setting(containerMaintenanceEl)
.setName("(Beta2) Clean up databases")
.setDesc("Delete unused chunks to shrink the database. This feature requires disabling 'Use an old adapter for compatibility'")
.addButton((button) =>
button.setButtonText("DryRun")
.setDisabled(false)
.onClick(async () => {
await this.plugin.dryRunGC();
})
).addButton((button) =>
button.setButtonText("Perform cleaning")
.setDisabled(false)
.setWarning()
.onClick(async () => {
// @ts-ignore
this.plugin.app.setting.close()
await this.plugin.dbGC();
})
);
new Setting(containerMaintenanceEl) new Setting(containerMaintenanceEl)
.setName("Rebuild everything") .setName("Rebuild everything")
.setDesc("Rebuild local and remote database with local files.") .setDesc("Rebuild local and remote database with local files.")
@@ -1934,19 +1913,6 @@ ${stringifyYaml(pluginConfig)}`;
await rebuildDB("rebuildBothByThisDevice"); await rebuildDB("rebuildBothByThisDevice");
}) })
) )
new Setting(containerMaintenanceEl)
.setName("(Beta) Complement each other with possible missing chunks.")
.setDesc("")
.addButton((button) =>
button
.setButtonText("Balance")
.setWarning()
.setDisabled(false)
.onClick(async () => {
await balanceChunks(this.plugin, false);
})
)
applyDisplayEnabled(); applyDisplayEnabled();
addScreenElement("70", containerMaintenanceEl); addScreenElement("70", containerMaintenanceEl);

Submodule src/lib updated: aacf942453...642efefaf1

View File

@@ -10,9 +10,9 @@ import { PouchDB } from "./lib/src/pouchdb-browser.js";
import { ConflictResolveModal } from "./ConflictResolveModal"; import { ConflictResolveModal } from "./ConflictResolveModal";
import { ObsidianLiveSyncSettingTab } from "./ObsidianLiveSyncSettingTab"; import { ObsidianLiveSyncSettingTab } from "./ObsidianLiveSyncSettingTab";
import { DocumentHistoryModal } from "./DocumentHistoryModal"; import { DocumentHistoryModal } from "./DocumentHistoryModal";
import { applyPatch, cancelAllPeriodicTask, cancelAllTasks, cancelTask, generatePatchObj, id2path, isObjectMargeApplicable, isSensibleMargeApplicable, flattenObject, path2id, scheduleTask, tryParseJSON, createFile, modifyFile, isValidPath, getAbstractFileByPath, touch, recentlyTouched, isInternalMetadata, isPluginMetadata, stripInternalMetadataPrefix, isChunk, askSelectString, askYesNo, askString, PeriodicProcessor, clearTouched, getPath, getPathWithoutPrefix, getPathFromTFile, localDatabaseCleanUp, balanceChunks, performRebuildDB } from "./utils"; import { applyPatch, cancelAllPeriodicTask, cancelAllTasks, cancelTask, generatePatchObj, id2path, isObjectMargeApplicable, isSensibleMargeApplicable, flattenObject, path2id, scheduleTask, tryParseJSON, createFile, modifyFile, isValidPath, getAbstractFileByPath, touch, recentlyTouched, isInternalMetadata, isPluginMetadata, stripInternalMetadataPrefix, isChunk, askSelectString, askYesNo, askString, PeriodicProcessor, clearTouched, getPath, getPathWithoutPrefix, getPathFromTFile, performRebuildDB } from "./utils";
import { encrypt, tryDecrypt } from "./lib/src/e2ee_v2"; import { encrypt, tryDecrypt } from "./lib/src/e2ee_v2";
import { enableEncryption, isCloudantURI, isErrorOfMissingDoc, isValidRemoteCouchDBURI } from "./lib/src/utils_couchdb"; import { balanceChunkPurgedDBs, enableEncryption, isCloudantURI, isErrorOfMissingDoc, isValidRemoteCouchDBURI, purgeUnreferencedChunks } from "./lib/src/utils_couchdb";
import { getGlobalStore, ObservableStore, observeStores } from "./lib/src/store"; import { getGlobalStore, ObservableStore, observeStores } from "./lib/src/store";
import { lockStore, logMessageStore, logStore, type LogEntry } from "./lib/src/stores"; import { lockStore, logMessageStore, logStore, type LogEntry } from "./lib/src/stores";
import { setNoticeClass } from "./lib/src/wrapper"; import { setNoticeClass } from "./lib/src/wrapper";
@@ -240,6 +240,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
createPouchDBInstance<T>(name?: string, options?: PouchDB.Configuration.DatabaseConfiguration): PouchDB.Database<T> { createPouchDBInstance<T>(name?: string, options?: PouchDB.Configuration.DatabaseConfiguration): PouchDB.Database<T> {
if (this.settings.useIndexedDBAdapter) { if (this.settings.useIndexedDBAdapter) {
options.adapter = "indexeddb"; options.adapter = "indexeddb";
options.purged_infos_limit = 1;
return new PouchDB(name + "-indexeddb", options); return new PouchDB(name + "-indexeddb", options);
} }
return new PouchDB(name, options); return new PouchDB(name, options);
@@ -1566,19 +1567,19 @@ export default class ObsidianLiveSyncPlugin extends Plugin
const newMsg = typeof message == "string" ? message : this.lastMessage; const newMsg = typeof message == "string" ? message : this.lastMessage;
const newLog = typeof log == "string" ? log : this.lastLog; const newLog = typeof log == "string" ? log : this.lastLog;
if (`${this.lastMessage}-${this.lastLog}` != `${newMsg}-${newLog}`) { if (`${this.lastMessage}-${this.lastLog}` != `${newMsg}-${newLog}`) {
scheduleTask("update-display", 50, () => { // scheduleTask("update-display", 50, () => {
this.statusBar.setText(newMsg.split("\n")[0]); this.statusBar.setText(newMsg.split("\n")[0]);
if (this.settings.showStatusOnEditor) { if (this.settings.showStatusOnEditor) {
const root = activeDocument.documentElement; const root = activeDocument.documentElement;
const q = root.querySelectorAll(`.CodeMirror-wrap,.cm-s-obsidian>.cm-editor,.canvas-wrapper`); const q = root.querySelectorAll(`.CodeMirror-wrap,.cm-s-obsidian>.cm-editor,.canvas-wrapper`);
q.forEach(e => e.setAttr("data-log", '' + (newMsg + "\n" + newLog) + '')) q.forEach(e => e.setAttr("data-log", '' + (newMsg + "\n" + newLog) + ''))
} else { } else {
const root = activeDocument.documentElement; const root = activeDocument.documentElement;
const q = root.querySelectorAll(`.CodeMirror-wrap,.cm-s-obsidian>.cm-editor,.canvas-wrapper`); const q = root.querySelectorAll(`.CodeMirror-wrap,.cm-s-obsidian>.cm-editor,.canvas-wrapper`);
q.forEach(e => e.setAttr("data-log", '')) q.forEach(e => e.setAttr("data-log", ''))
} }
}, true); // }, true);
scheduleTask("log-hide", 3000, () => this.setStatusBarText(null, "")); scheduleTask("log-hide", 3000, () => this.setStatusBarText(null, ""));
this.lastMessage = newMsg; this.lastMessage = newMsg;
this.lastLog = newLog; this.lastLog = newLog;
@@ -1599,22 +1600,19 @@ export default class ObsidianLiveSyncPlugin extends Plugin
if (!ret) { if (!ret) {
if (this.replicator.remoteLockedAndDeviceNotAccepted) { if (this.replicator.remoteLockedAndDeviceNotAccepted) {
if (this.replicator.remoteCleaned) { if (this.replicator.remoteCleaned) {
const message = ` Logger(`The remote database has been cleaned up. The local database of this device also should be done.`, showMessage ? LOG_LEVEL.NOTICE : LOG_LEVEL.INFO);
The remote database has been cleaned up. const remoteDB = await this.getReplicator().connectRemoteCouchDBWithSetting(this.settings, this.getIsMobile(), true);
To synchronize, this device must also be cleaned up or fetch everything again once. if (typeof remoteDB == "string") {
Fetching may takes some time. Cleaning up is not stable yet but fast. Logger(remoteDB, LOG_LEVEL.NOTICE);
` return false;
const CHOICE_CLEANUP = "Clean up";
const CHOICE_FETCH = "Fetch again";
const CHOICE_DISMISS = "Dismiss";
const ret = await confirmWithMessage(this, "Locked", message, [CHOICE_CLEANUP, CHOICE_FETCH, CHOICE_DISMISS], CHOICE_DISMISS, 10);
if (ret == CHOICE_CLEANUP) {
await localDatabaseCleanUp(this, true, false);
await balanceChunks(this, false);
}
if (ret == CHOICE_FETCH) {
await performRebuildDB(this, "localOnly");
} }
// TODO Check actually sent.
await runWithLock("cleanup", true, async () => {
await balanceChunkPurgedDBs(this.localDatabase.localDatabase, remoteDB.db);
await purgeUnreferencedChunks(this.localDatabase.localDatabase, false);
await this.getReplicator().markRemoteResolved(this.settings);
});
Logger("The local database has been cleaned up.", showMessage ? LOG_LEVEL.NOTICE : LOG_LEVEL.INFO)
} else { } else {
const message = ` const message = `
The remote database has been rebuilt. The remote database has been rebuilt.
@@ -2490,6 +2488,33 @@ Or if you are sure know what had been happened, we can unlock the database from
return this.localDatabase.isTargetFile(file); return this.localDatabase.isTargetFile(file);
} }
} }
async dryRunGC() {
await runWithLock("cleanup", true, async () => {
const remoteDBConn = await this.getReplicator().connectRemoteCouchDBWithSetting(this.settings, this.isMobile)
if (typeof (remoteDBConn) == "string") {
Logger(remoteDBConn);
return;
}
await purgeUnreferencedChunks(remoteDBConn.db, true, this.settings, false);
await purgeUnreferencedChunks(this.localDatabase.localDatabase, true);
});
}
async dbGC() {
// Lock the remote completely once.
await runWithLock("cleanup", true, async () => {
this.getReplicator().markRemoteLocked(this.settings, true, true);
const remoteDBConn = await this.getReplicator().connectRemoteCouchDBWithSetting(this.settings, this.isMobile)
if (typeof (remoteDBConn) == "string") {
Logger(remoteDBConn);
return;
}
await purgeUnreferencedChunks(remoteDBConn.db, false, this.settings, true);
await purgeUnreferencedChunks(this.localDatabase.localDatabase, false);
await balanceChunkPurgedDBs(this.localDatabase.localDatabase, remoteDBConn.db);
this.localDatabase.refreshSettings();
Logger("The remote database has been cleaned up! Other devices will be cleaned up on the next synchronisation.")
});
}
} }

View File

@@ -2,11 +2,10 @@ import { type DataWriteOptions, normalizePath, TFile, Platform, TAbstractFile, A
import { path2id_base, id2path_base, isValidFilenameInLinux, isValidFilenameInDarwin, isValidFilenameInWidows, isValidFilenameInAndroid, stripAllPrefixes } from "./lib/src/path"; import { path2id_base, id2path_base, isValidFilenameInLinux, isValidFilenameInDarwin, isValidFilenameInWidows, isValidFilenameInAndroid, stripAllPrefixes } from "./lib/src/path";
import { Logger } from "./lib/src/logger"; import { Logger } from "./lib/src/logger";
import { type AnyEntry, type DocumentID, type EntryDoc, type EntryHasPath, type FilePath, type FilePathWithPrefix, LOG_LEVEL, type NewEntry } from "./lib/src/types"; import { type AnyEntry, type DocumentID, type EntryHasPath, type FilePath, type FilePathWithPrefix, LOG_LEVEL } from "./lib/src/types";
import { CHeader, ICHeader, ICHeaderLength, PSCHeader } from "./types"; import { CHeader, ICHeader, ICHeaderLength, PSCHeader } from "./types";
import { InputStringDialog, PopoverSelectString } from "./dialogs"; import { InputStringDialog, PopoverSelectString } from "./dialogs";
import ObsidianLiveSyncPlugin from "./main"; import ObsidianLiveSyncPlugin from "./main";
import { runWithLock } from "./lib/src/lock";
// For backward compatibility, using the path for determining id. // For backward compatibility, using the path for determining id.
// Only CouchDB unacceptable ID (that starts with an underscore) has been prefixed with "/". // Only CouchDB unacceptable ID (that starts with an underscore) has been prefixed with "/".
@@ -439,12 +438,6 @@ export class PeriodicProcessor {
} }
} }
function sizeToHumanReadable(size: number | undefined) {
if (!size) return "-";
const i = Math.floor(Math.log(size) / Math.log(1024));
return Number.parseInt((size / Math.pow(1024, i)).toFixed(2)) + ' ' + ['B', 'kB', 'MB', 'GB', 'TB'][i];
}
export const _requestToCouchDBFetch = async (baseUri: string, username: string, password: string, path?: string, body?: string | any, method?: string) => { export const _requestToCouchDBFetch = async (baseUri: string, username: string, password: string, path?: string, body?: string | any, method?: string) => {
const utf8str = String.fromCharCode.apply(null, new TextEncoder().encode(`${username}:${password}`)); const utf8str = String.fromCharCode.apply(null, new TextEncoder().encode(`${username}:${password}`));
const encoded = window.btoa(utf8str); const encoded = window.btoa(utf8str);
@@ -492,249 +485,3 @@ export async function performRebuildDB(plugin: ObsidianLiveSyncPlugin, method: "
await plugin.addOnSetup.rebuildEverything(); await plugin.addOnSetup.rebuildEverything();
} }
} }
export const gatherChunkUsage = async (db: PouchDB.Database<EntryDoc>) => {
const used = new Map();
const unreferenced = new Map();
const removed = new Map();
const missing = new Map();
const xx = await db.allDocs({ startkey: "h:", endkey: `h:\u{10ffff}` });
for (const xxd of xx.rows) {
const chunk = xxd.id
unreferenced.set(chunk, xxd.value.rev);
}
const x = await db.find({ limit: 999999999, selector: { children: { $exists: true, $type: "array" } }, fields: ["_id", "path", "mtime", "children"] });
for (const temp of x.docs) {
for (const chunk of (temp as NewEntry).children) {
used.set(chunk, (used.has(chunk) ? used.get(chunk) : 0) + 1);
if (unreferenced.has(chunk)) {
removed.set(chunk, unreferenced.get(chunk));
unreferenced.delete(chunk);
} else {
if (!removed.has(chunk)) {
if (!missing.has(temp._id)) {
missing.set(temp._id, []);
}
missing.get(temp._id).push(chunk);
}
}
}
}
return { used, unreferenced, missing };
}
export const localDatabaseCleanUp = async (plugin: ObsidianLiveSyncPlugin, force: boolean, dryRun: boolean) => {
await runWithLock("clean-up:local", true, async () => {
const db = plugin.localDatabase.localDatabase;
if ((db as any)?.adapter != "indexeddb") {
if (force && !dryRun) {
Logger("Fetch from the remote database", LOG_LEVEL.NOTICE, "clean-up-db");
await performRebuildDB(plugin, "localOnly");
return;
} else {
Logger("This feature requires disabling `Use an old adapter for compatibility`.", LOG_LEVEL.NOTICE, "clean-up-db");
return;
}
}
Logger(`The remote database has been locked for garbage collection`, LOG_LEVEL.NOTICE, "clean-up-db");
Logger(`Gathering chunk usage information`, LOG_LEVEL.NOTICE, "clean-up-db");
const { unreferenced, missing } = await gatherChunkUsage(db);
if (missing.size != 0) {
Logger(`Some chunks are not found! We have to rescue`, LOG_LEVEL.NOTICE);
Logger(missing, LOG_LEVEL.VERBOSE);
} else {
Logger(`All chunks are OK`, LOG_LEVEL.NOTICE);
}
const payload = {} as Record<string, string[]>;
for (const [id, rev] of unreferenced) {
payload[id] = [rev];
}
const removeItems = Object.keys(payload).length;
if (removeItems == 0) {
Logger(`No unreferenced chunks found (Local)`, LOG_LEVEL.NOTICE);
await plugin.markRemoteResolved();
}
if (dryRun) {
Logger(`There are ${removeItems} unreferenced chunks (Local)`, LOG_LEVEL.NOTICE);
return;
}
Logger(`Deleting unreferenced chunks: ${removeItems}`, LOG_LEVEL.NOTICE, "clean-up-db");
for (const [id, rev] of unreferenced) {
//@ts-ignore
const ret = await db.purge(id, rev);
Logger(ret, LOG_LEVEL.VERBOSE);
}
plugin.localDatabase.refreshSettings();
Logger(`Compacting local database...`, LOG_LEVEL.NOTICE, "clean-up-db");
await db.compact();
await plugin.markRemoteResolved();
Logger("Done!", LOG_LEVEL.NOTICE, "clean-up-db");
})
}
export const balanceChunks = async (plugin: ObsidianLiveSyncPlugin, dryRun: boolean) => {
await runWithLock("clean-up:balance", true, async () => {
const localDB = plugin.localDatabase.localDatabase;
Logger(`Gathering chunk usage information`, LOG_LEVEL.NOTICE, "clean-up-db");
const ret = await plugin.replicator.connectRemoteCouchDBWithSetting(plugin.settings, plugin.isMobile);
if (typeof ret === "string") {
Logger(`Connect error: ${ret}`, LOG_LEVEL.NOTICE, "clean-up-db");
return;
}
const localChunks = new Map<string, string>();
const xx = await localDB.allDocs({ startkey: "h:", endkey: `h:\u{10ffff}` });
for (const xxd of xx.rows) {
const chunk = xxd.id
localChunks.set(chunk, xxd.value.rev);
}
// const info = ret.info;
const remoteDB = ret.db;
const remoteChunks = new Map<string, string>();
const xxr = await remoteDB.allDocs({ startkey: "h:", endkey: `h:\u{10ffff}` });
for (const xxd of xxr.rows) {
const chunk = xxd.id
remoteChunks.set(chunk, xxd.value.rev);
}
const localToRemote = new Map<string, string>([...localChunks]);
const remoteToLocal = new Map<string, string>([...remoteChunks]);
for (const id of new Set([...localChunks.keys(), ...remoteChunks.keys()])) {
if (remoteChunks.has(id)) {
localToRemote.delete(id);
}
if (localChunks.has(id)) {
remoteToLocal.delete(id);
}
}
function arrayToChunkedArray<T>(src: T[], size = 25) {
const ret = [] as T[][];
let i = 0;
while (i < src.length) {
ret.push(src.slice(i, i += size));
}
return ret;
}
if (localToRemote.size == 0) {
Logger(`No chunks need to be sent`, LOG_LEVEL.NOTICE);
} else {
Logger(`${localToRemote.size} chunks need to be sent`, LOG_LEVEL.NOTICE);
if (!dryRun) {
const w = arrayToChunkedArray([...localToRemote]);
for (const chunk of w) {
for (const [id,] of chunk) {
const queryRet = await localDB.allDocs({ keys: [id], include_docs: true });
const docs = queryRet.rows.filter(e => !("error" in e)).map(x => x.doc);
const ret = await remoteDB.bulkDocs(docs, { new_edits: false });
Logger(ret, LOG_LEVEL.VERBOSE);
}
}
Logger(`Done! ${remoteToLocal.size} chunks have been sent`, LOG_LEVEL.NOTICE);
}
}
if (remoteToLocal.size == 0) {
Logger(`No chunks need to be retrieved`, LOG_LEVEL.NOTICE);
} else {
Logger(`${remoteToLocal.size} chunks need to be retrieved`, LOG_LEVEL.NOTICE);
if (!dryRun) {
const w = arrayToChunkedArray([...remoteToLocal]);
for (const chunk of w) {
for (const [id,] of chunk) {
const queryRet = await remoteDB.allDocs({ keys: [id], include_docs: true });
const docs = queryRet.rows.filter(e => !("error" in e)).map(x => x.doc);
const ret = await localDB.bulkDocs(docs, { new_edits: false });
Logger(ret, LOG_LEVEL.VERBOSE);
}
}
Logger(`Done! ${remoteToLocal.size} chunks have been retrieved`, LOG_LEVEL.NOTICE);
}
}
})
}
export const remoteDatabaseCleanup = async (plugin: ObsidianLiveSyncPlugin, dryRun: boolean) => {
const getSize = function (info: PouchDB.Core.DatabaseInfo, key: "active" | "external" | "file") {
return Number.parseInt((info as any)?.sizes?.[key] ?? 0);
}
await runWithLock("clean-up:remote", true, async () => {
const CHUNK_SIZE = 100;
function makeChunkedArrayFromArray<T>(items: T[]): T[][] {
const chunked = [];
for (let i = 0; i < items.length; i += CHUNK_SIZE) {
chunked.push(items.slice(i, i + CHUNK_SIZE));
}
return chunked;
}
try {
const ret = await plugin.replicator.connectRemoteCouchDBWithSetting(plugin.settings, plugin.isMobile);
if (typeof ret === "string") {
Logger(`Connect error: ${ret}`, LOG_LEVEL.NOTICE, "clean-up-db");
return;
}
const info = ret.info;
Logger(JSON.stringify(info), LOG_LEVEL.VERBOSE, "clean-up-db");
Logger(`Database active-size: ${sizeToHumanReadable(getSize(info, "active"))}, external-size:${sizeToHumanReadable(getSize(info, "external"))}, file-size: ${sizeToHumanReadable(getSize(info, "file"))}`, LOG_LEVEL.NOTICE);
if (!dryRun) {
Logger(`The remote database has been locked for garbage collection`, LOG_LEVEL.NOTICE, "clean-up-db");
await plugin.markRemoteLocked(true);
}
Logger(`Gathering chunk usage information`, LOG_LEVEL.NOTICE, "clean-up-db");
const db = ret.db;
const { unreferenced, missing } = await gatherChunkUsage(db);
if (missing.size != 0) {
Logger(`Some chunks are not found! We have to rescue`, LOG_LEVEL.NOTICE);
Logger(missing, LOG_LEVEL.VERBOSE);
} else {
Logger(`All chunks are OK`, LOG_LEVEL.NOTICE);
}
const payload = {} as Record<string, string[]>;
for (const [id, rev] of unreferenced) {
payload[id] = [rev];
}
const removeItems = Object.keys(payload).length;
if (removeItems == 0) {
Logger(`No unreferenced chunk found (Remote)`, LOG_LEVEL.NOTICE);
return;
}
if (dryRun) {
Logger(`There are ${removeItems} unreferenced chunks (Remote)`, LOG_LEVEL.NOTICE);
return;
}
Logger(`Deleting unreferenced chunks: ${removeItems}`, LOG_LEVEL.NOTICE, "clean-up-db");
const buffer = makeChunkedArrayFromArray(Object.entries(payload));
for (const chunkedPayload of buffer) {
const rets = await _requestToCouchDBFetch(
`${plugin.settings.couchDB_URI}/${plugin.settings.couchDB_DBNAME}`,
plugin.settings.couchDB_USER,
plugin.settings.couchDB_PASSWORD,
"_purge",
chunkedPayload.reduce((p, c) => ({ ...p, [c[0]]: c[1] }), {}), "POST");
// const result = await rets();
Logger(JSON.stringify(await rets.json()), LOG_LEVEL.VERBOSE);
}
Logger(`Compacting database...`, LOG_LEVEL.NOTICE, "clean-up-db");
await db.compact();
const endInfo = await db.info();
Logger(`Processed database active-size: ${sizeToHumanReadable(getSize(endInfo, "active"))}, external-size:${sizeToHumanReadable(getSize(endInfo, "external"))}, file-size: ${sizeToHumanReadable(getSize(endInfo, "file"))}`, LOG_LEVEL.NOTICE);
Logger(`Reduced sizes: active-size: ${sizeToHumanReadable(getSize(info, "active") - getSize(endInfo, "active"))}, external-size:${sizeToHumanReadable(getSize(info, "external") - getSize(endInfo, "external"))}, file-size: ${sizeToHumanReadable(getSize(info, "file") - getSize(endInfo, "file"))}`, LOG_LEVEL.NOTICE);
Logger(JSON.stringify(endInfo), LOG_LEVEL.VERBOSE, "clean-up-db");
Logger(`Local database cleaning up...`);
await localDatabaseCleanUp(plugin, true, false);
} catch (ex) {
Logger("Failed to clean up db.")
Logger(ex, LOG_LEVEL.VERBOSE);
}
});
}