mirror of
https://github.com/vrtmrz/obsidian-livesync.git
synced 2025-12-24 07:01:29 +00:00
- Fixed:
- Fixed the issue that binary files were sometimes corrupted.
- Fixed customisation sync data could be corrupted.
- Improved:
- Now the remote database costs lower memory.
- This release requires a brief wait on the first synchronisation, to track the latest changeset again.
- Description added for the `Device name`.
- Refactored:
- Many type-errors have been resolved.
- Obsolete file has been deleted.
This commit is contained in:
@@ -4,7 +4,7 @@ import { Notice, type PluginManifest, parseYaml, normalizePath, type ListedFiles
|
|||||||
import type { EntryDoc, LoadedEntry, InternalFileEntry, FilePathWithPrefix, FilePath, DocumentID, AnyEntry, SavingEntry } from "./lib/src/types";
|
import type { EntryDoc, LoadedEntry, InternalFileEntry, FilePathWithPrefix, FilePath, DocumentID, AnyEntry, SavingEntry } from "./lib/src/types";
|
||||||
import { LOG_LEVEL_INFO, LOG_LEVEL_NOTICE, LOG_LEVEL_VERBOSE, MODE_SELECTIVE } from "./lib/src/types";
|
import { LOG_LEVEL_INFO, LOG_LEVEL_NOTICE, LOG_LEVEL_VERBOSE, MODE_SELECTIVE } from "./lib/src/types";
|
||||||
import { ICXHeader, PERIODIC_PLUGIN_SWEEP, } from "./types";
|
import { ICXHeader, PERIODIC_PLUGIN_SWEEP, } from "./types";
|
||||||
import { createSavingEntryFromLoadedEntry, createTextBlob, delay, fireAndForget, getDocData, isDocContentSame, sendSignal, waitForSignal } from "./lib/src/utils";
|
import { createSavingEntryFromLoadedEntry, createTextBlob, delay, fireAndForget, getDocData, isDocContentSame } from "./lib/src/utils";
|
||||||
import { Logger } from "./lib/src/logger";
|
import { Logger } from "./lib/src/logger";
|
||||||
import { readString, decodeBinary, arrayBufferToBase64, digestHash } from "./lib/src/strbin";
|
import { readString, decodeBinary, arrayBufferToBase64, digestHash } from "./lib/src/strbin";
|
||||||
import { serialized } from "./lib/src/lock";
|
import { serialized } from "./lib/src/lock";
|
||||||
@@ -398,7 +398,7 @@ export class ConfigSync extends LiveSyncCommands {
|
|||||||
showJSONMergeDialogAndMerge(docA: LoadedEntry, docB: LoadedEntry, pluginDataA: PluginDataEx, pluginDataB: PluginDataEx): Promise<boolean> {
|
showJSONMergeDialogAndMerge(docA: LoadedEntry, docB: LoadedEntry, pluginDataA: PluginDataEx, pluginDataB: PluginDataEx): Promise<boolean> {
|
||||||
const fileA = { ...pluginDataA.files[0], ctime: pluginDataA.files[0].mtime, _id: `${pluginDataA.documentPath}` as DocumentID };
|
const fileA = { ...pluginDataA.files[0], ctime: pluginDataA.files[0].mtime, _id: `${pluginDataA.documentPath}` as DocumentID };
|
||||||
const fileB = pluginDataB.files[0];
|
const fileB = pluginDataB.files[0];
|
||||||
const docAx = { ...docA, ...fileA } as LoadedEntry, docBx = { ...docB, ...fileB } as LoadedEntry
|
const docAx = { ...docA, ...fileA, datatype: "newnote" } as LoadedEntry, docBx = { ...docB, ...fileB, datatype: "newnote" } as LoadedEntry
|
||||||
return serialized("config:merge-data", () => new Promise((res) => {
|
return serialized("config:merge-data", () => new Promise((res) => {
|
||||||
Logger("Opening data-merging dialog", LOG_LEVEL_VERBOSE);
|
Logger("Opening data-merging dialog", LOG_LEVEL_VERBOSE);
|
||||||
// const docs = [docA, docB];
|
// const docs = [docA, docB];
|
||||||
|
|||||||
@@ -1,314 +0,0 @@
|
|||||||
import { normalizePath, type PluginManifest } from "./deps";
|
|
||||||
import type { DocumentID, EntryDoc, FilePathWithPrefix, LoadedEntry, SavingEntry } from "./lib/src/types";
|
|
||||||
import { LOG_LEVEL_INFO, LOG_LEVEL_NOTICE, LOG_LEVEL_VERBOSE } from "./lib/src/types";
|
|
||||||
import { type PluginDataEntry, PERIODIC_PLUGIN_SWEEP, type PluginList, type DevicePluginList, PSCHeader, PSCHeaderEnd } from "./types";
|
|
||||||
import { createTextBlob, getDocData, isDocContentSame } from "./lib/src/utils";
|
|
||||||
import { Logger } from "./lib/src/logger";
|
|
||||||
import { PouchDB } from "./lib/src/pouchdb-browser.js";
|
|
||||||
import { isPluginMetadata, PeriodicProcessor } from "./utils";
|
|
||||||
import { PluginDialogModal } from "./dialogs";
|
|
||||||
import { NewNotice } from "./lib/src/wrapper";
|
|
||||||
import { versionNumberString2Number } from "./lib/src/strbin";
|
|
||||||
import { serialized, skipIfDuplicated } from "./lib/src/lock";
|
|
||||||
import { LiveSyncCommands } from "./LiveSyncCommands";
|
|
||||||
|
|
||||||
export class PluginAndTheirSettings extends LiveSyncCommands {
|
|
||||||
|
|
||||||
get deviceAndVaultName() {
|
|
||||||
return this.plugin.deviceAndVaultName;
|
|
||||||
}
|
|
||||||
pluginDialog: PluginDialogModal = null;
|
|
||||||
periodicPluginSweepProcessor = new PeriodicProcessor(this.plugin, async () => await this.sweepPlugin(false));
|
|
||||||
|
|
||||||
showPluginSyncModal() {
|
|
||||||
if (this.pluginDialog != null) {
|
|
||||||
this.pluginDialog.open();
|
|
||||||
} else {
|
|
||||||
this.pluginDialog = new PluginDialogModal(this.app, this.plugin);
|
|
||||||
this.pluginDialog.open();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
hidePluginSyncModal() {
|
|
||||||
if (this.pluginDialog != null) {
|
|
||||||
this.pluginDialog.close();
|
|
||||||
this.pluginDialog = null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
onload(): void | Promise<void> {
|
|
||||||
this.plugin.addCommand({
|
|
||||||
id: "livesync-plugin-dialog",
|
|
||||||
name: "Show Plugins and their settings",
|
|
||||||
callback: () => {
|
|
||||||
this.showPluginSyncModal();
|
|
||||||
},
|
|
||||||
});
|
|
||||||
this.showPluginSyncModal();
|
|
||||||
}
|
|
||||||
onunload() {
|
|
||||||
this.hidePluginSyncModal();
|
|
||||||
this.periodicPluginSweepProcessor?.disable();
|
|
||||||
}
|
|
||||||
parseReplicationResultItem(doc: PouchDB.Core.ExistingDocument<EntryDoc>) {
|
|
||||||
if (isPluginMetadata(doc._id)) {
|
|
||||||
if (this.settings.notifyPluginOrSettingUpdated) {
|
|
||||||
this.triggerCheckPluginUpdate();
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
async beforeReplicate(showMessage: boolean) {
|
|
||||||
if (this.settings.autoSweepPlugins) {
|
|
||||||
await this.sweepPlugin(showMessage);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
async onResume() {
|
|
||||||
if (this.plugin.suspended)
|
|
||||||
return;
|
|
||||||
if (this.settings.autoSweepPlugins) {
|
|
||||||
await this.sweepPlugin(false);
|
|
||||||
}
|
|
||||||
this.periodicPluginSweepProcessor.enable(this.settings.autoSweepPluginsPeriodic && !this.settings.watchInternalFileChanges ? (PERIODIC_PLUGIN_SWEEP * 1000) : 0);
|
|
||||||
}
|
|
||||||
async onInitializeDatabase(showNotice: boolean) {
|
|
||||||
if (this.settings.usePluginSync) {
|
|
||||||
try {
|
|
||||||
Logger("Scanning plugins...");
|
|
||||||
await this.sweepPlugin(showNotice);
|
|
||||||
Logger("Scanning plugins done");
|
|
||||||
} catch (ex) {
|
|
||||||
Logger("Scanning plugins failed");
|
|
||||||
Logger(ex, LOG_LEVEL_VERBOSE);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async realizeSettingSyncMode() {
|
|
||||||
this.periodicPluginSweepProcessor?.disable();
|
|
||||||
if (this.plugin.suspended)
|
|
||||||
return;
|
|
||||||
if (this.settings.autoSweepPlugins) {
|
|
||||||
await this.sweepPlugin(false);
|
|
||||||
}
|
|
||||||
this.periodicPluginSweepProcessor.enable(this.settings.autoSweepPluginsPeriodic && !this.settings.watchInternalFileChanges ? (PERIODIC_PLUGIN_SWEEP * 1000) : 0);
|
|
||||||
}
|
|
||||||
|
|
||||||
triggerCheckPluginUpdate() {
|
|
||||||
(async () => await this.checkPluginUpdate())();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
async getPluginList(): Promise<{ plugins: PluginList; allPlugins: DevicePluginList; thisDevicePlugins: DevicePluginList; }> {
|
|
||||||
const docList = await this.localDatabase.allDocsRaw<PluginDataEntry>({ startkey: PSCHeader, endkey: PSCHeaderEnd, include_docs: false });
|
|
||||||
const oldDocs: PluginDataEntry[] = ((await Promise.all(docList.rows.map(async (e) => await this.localDatabase.getDBEntry(e.id as FilePathWithPrefix /* WARN!! THIS SHOULD BE WRAPPED */)))).filter((e) => e !== false) as LoadedEntry[]).map((e) => JSON.parse(getDocData(e.data)));
|
|
||||||
const plugins: { [key: string]: PluginDataEntry[]; } = {};
|
|
||||||
const allPlugins: { [key: string]: PluginDataEntry; } = {};
|
|
||||||
const thisDevicePlugins: { [key: string]: PluginDataEntry; } = {};
|
|
||||||
for (const v of oldDocs) {
|
|
||||||
if (typeof plugins[v.deviceVaultName] === "undefined") {
|
|
||||||
plugins[v.deviceVaultName] = [];
|
|
||||||
}
|
|
||||||
plugins[v.deviceVaultName].push(v);
|
|
||||||
allPlugins[v._id] = v;
|
|
||||||
if (v.deviceVaultName == this.deviceAndVaultName) {
|
|
||||||
thisDevicePlugins[v.manifest.id] = v;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return { plugins, allPlugins, thisDevicePlugins };
|
|
||||||
}
|
|
||||||
|
|
||||||
async checkPluginUpdate() {
|
|
||||||
if (!this.plugin.settings.usePluginSync)
|
|
||||||
return;
|
|
||||||
await this.sweepPlugin(false);
|
|
||||||
const { allPlugins, thisDevicePlugins } = await this.getPluginList();
|
|
||||||
const arrPlugins = Object.values(allPlugins);
|
|
||||||
let updateFound = false;
|
|
||||||
for (const plugin of arrPlugins) {
|
|
||||||
const ownPlugin = thisDevicePlugins[plugin.manifest.id];
|
|
||||||
if (ownPlugin) {
|
|
||||||
const remoteVersion = versionNumberString2Number(plugin.manifest.version);
|
|
||||||
const ownVersion = versionNumberString2Number(ownPlugin.manifest.version);
|
|
||||||
if (remoteVersion > ownVersion) {
|
|
||||||
updateFound = true;
|
|
||||||
}
|
|
||||||
if (((plugin.mtime / 1000) | 0) > ((ownPlugin.mtime / 1000) | 0) && (plugin.dataJson ?? "") != (ownPlugin.dataJson ?? "")) {
|
|
||||||
updateFound = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (updateFound) {
|
|
||||||
const fragment = createFragment((doc) => {
|
|
||||||
doc.createEl("a", null, (a) => {
|
|
||||||
a.text = "There're some new plugins or their settings";
|
|
||||||
a.addEventListener("click", () => this.showPluginSyncModal());
|
|
||||||
});
|
|
||||||
});
|
|
||||||
NewNotice(fragment, 10000);
|
|
||||||
} else {
|
|
||||||
Logger("Everything is up to date.", LOG_LEVEL_NOTICE);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async sweepPlugin(showMessage = false, specificPluginPath = "") {
|
|
||||||
if (!this.settings.usePluginSync)
|
|
||||||
return;
|
|
||||||
if (!this.localDatabase.isReady)
|
|
||||||
return;
|
|
||||||
// @ts-ignore
|
|
||||||
const pl = this.app.plugins;
|
|
||||||
const manifests: PluginManifest[] = Object.values(pl.manifests);
|
|
||||||
let specificPlugin = "";
|
|
||||||
if (specificPluginPath != "") {
|
|
||||||
specificPlugin = manifests.find(e => e.dir.endsWith("/" + specificPluginPath))?.id ?? "";
|
|
||||||
}
|
|
||||||
await skipIfDuplicated("sweepplugin", async () => {
|
|
||||||
const logLevel = showMessage ? LOG_LEVEL_NOTICE : LOG_LEVEL_INFO;
|
|
||||||
if (!this.deviceAndVaultName) {
|
|
||||||
Logger("You have to set your device name.", LOG_LEVEL_NOTICE);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
Logger("Scanning plugins", logLevel);
|
|
||||||
const oldDocs = await this.localDatabase.allDocsRaw<EntryDoc>({
|
|
||||||
startkey: `ps:${this.deviceAndVaultName}-${specificPlugin}`,
|
|
||||||
endkey: `ps:${this.deviceAndVaultName}-${specificPlugin}\u{10ffff}`,
|
|
||||||
include_docs: true,
|
|
||||||
});
|
|
||||||
// Logger("OLD DOCS.", LOG_LEVEL_VERBOSE);
|
|
||||||
// sweep current plugin.
|
|
||||||
const procs = manifests.map(async (m) => {
|
|
||||||
const pluginDataEntryID = `ps:${this.deviceAndVaultName}-${m.id}` as DocumentID;
|
|
||||||
try {
|
|
||||||
if (specificPlugin && m.id != specificPlugin) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
Logger(`Reading plugin:${m.name}(${m.id})`, LOG_LEVEL_VERBOSE);
|
|
||||||
const path = normalizePath(m.dir) + "/";
|
|
||||||
const files = ["manifest.json", "main.js", "styles.css", "data.json"];
|
|
||||||
const pluginData: { [key: string]: string; } = {};
|
|
||||||
for (const file of files) {
|
|
||||||
const thePath = path + file;
|
|
||||||
if (await this.plugin.vaultAccess.adapterExists(thePath)) {
|
|
||||||
pluginData[file] = await this.plugin.vaultAccess.adapterRead(thePath);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let mtime = 0;
|
|
||||||
if (await this.plugin.vaultAccess.adapterExists(path + "/data.json")) {
|
|
||||||
mtime = (await this.plugin.vaultAccess.adapterStat(path + "/data.json")).mtime;
|
|
||||||
}
|
|
||||||
|
|
||||||
const p: PluginDataEntry = {
|
|
||||||
_id: pluginDataEntryID,
|
|
||||||
dataJson: pluginData["data.json"],
|
|
||||||
deviceVaultName: this.deviceAndVaultName,
|
|
||||||
mainJs: pluginData["main.js"],
|
|
||||||
styleCss: pluginData["styles.css"],
|
|
||||||
manifest: m,
|
|
||||||
manifestJson: pluginData["manifest.json"],
|
|
||||||
mtime: mtime,
|
|
||||||
type: "plugin",
|
|
||||||
};
|
|
||||||
const blob = createTextBlob(JSON.stringify(p));
|
|
||||||
const d: SavingEntry = {
|
|
||||||
_id: p._id,
|
|
||||||
path: p._id as string as FilePathWithPrefix,
|
|
||||||
data: blob,
|
|
||||||
ctime: mtime,
|
|
||||||
mtime: mtime,
|
|
||||||
size: blob.size,
|
|
||||||
children: [],
|
|
||||||
datatype: "plain",
|
|
||||||
type: "plain"
|
|
||||||
};
|
|
||||||
Logger(`check diff:${m.name}(${m.id})`, LOG_LEVEL_VERBOSE);
|
|
||||||
await serialized("plugin-" + m.id, async () => {
|
|
||||||
const old = await this.localDatabase.getDBEntry(p._id as string as FilePathWithPrefix /* This also should be explained */, null, false, false);
|
|
||||||
if (old !== false) {
|
|
||||||
const oldData = { data: old.data, deleted: old._deleted };
|
|
||||||
const newData = { data: d.data, deleted: d._deleted };
|
|
||||||
if (await isDocContentSame(oldData.data, newData.data) && oldData.deleted == newData.deleted) {
|
|
||||||
Logger(`Nothing changed:${m.name}`);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
await this.localDatabase.putDBEntry(d);
|
|
||||||
Logger(`Plugin saved:${m.name}`, logLevel);
|
|
||||||
});
|
|
||||||
} catch (ex) {
|
|
||||||
Logger(`Plugin save failed:${m.name}`, LOG_LEVEL_NOTICE);
|
|
||||||
} finally {
|
|
||||||
oldDocs.rows = oldDocs.rows.filter((e) => e.id != pluginDataEntryID);
|
|
||||||
}
|
|
||||||
//remove saved plugin data.
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
await Promise.all(procs);
|
|
||||||
|
|
||||||
const delDocs = oldDocs.rows.map((e) => {
|
|
||||||
// e.doc._deleted = true;
|
|
||||||
if (e.doc.type == "newnote" || e.doc.type == "plain") {
|
|
||||||
e.doc.deleted = true;
|
|
||||||
if (this.settings.deleteMetadataOfDeletedFiles) {
|
|
||||||
e.doc._deleted = true;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
e.doc._deleted = true;
|
|
||||||
}
|
|
||||||
return e.doc;
|
|
||||||
});
|
|
||||||
Logger(`Deleting old plugin:(${delDocs.length})`, LOG_LEVEL_VERBOSE);
|
|
||||||
await this.localDatabase.bulkDocsRaw(delDocs);
|
|
||||||
Logger(`Scan plugin done.`, logLevel);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async applyPluginData(plugin: PluginDataEntry) {
|
|
||||||
await serialized("plugin-" + plugin.manifest.id, async () => {
|
|
||||||
const pluginTargetFolderPath = normalizePath(plugin.manifest.dir) + "/";
|
|
||||||
// @ts-ignore
|
|
||||||
const stat = this.app.plugins.enabledPlugins.has(plugin.manifest.id) == true;
|
|
||||||
if (stat) {
|
|
||||||
// @ts-ignore
|
|
||||||
await this.app.plugins.unloadPlugin(plugin.manifest.id);
|
|
||||||
Logger(`Unload plugin:${plugin.manifest.id}`, LOG_LEVEL_NOTICE);
|
|
||||||
}
|
|
||||||
if (plugin.dataJson)
|
|
||||||
await this.plugin.vaultAccess.adapterWrite(pluginTargetFolderPath + "data.json", plugin.dataJson);
|
|
||||||
Logger("wrote:" + pluginTargetFolderPath + "data.json", LOG_LEVEL_NOTICE);
|
|
||||||
if (stat) {
|
|
||||||
// @ts-ignore
|
|
||||||
await this.app.plugins.loadPlugin(plugin.manifest.id);
|
|
||||||
Logger(`Load plugin:${plugin.manifest.id}`, LOG_LEVEL_NOTICE);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async applyPlugin(plugin: PluginDataEntry) {
|
|
||||||
await serialized("plugin-" + plugin.manifest.id, async () => {
|
|
||||||
// @ts-ignore
|
|
||||||
const stat = this.app.plugins.enabledPlugins.has(plugin.manifest.id) == true;
|
|
||||||
if (stat) {
|
|
||||||
// @ts-ignore
|
|
||||||
await this.app.plugins.unloadPlugin(plugin.manifest.id);
|
|
||||||
Logger(`Unload plugin:${plugin.manifest.id}`, LOG_LEVEL_NOTICE);
|
|
||||||
}
|
|
||||||
|
|
||||||
const pluginTargetFolderPath = normalizePath(plugin.manifest.dir) + "/";
|
|
||||||
if ((await this.plugin.vaultAccess.adapterExists(pluginTargetFolderPath)) === false) {
|
|
||||||
await this.app.vault.adapter.mkdir(pluginTargetFolderPath);
|
|
||||||
}
|
|
||||||
await this.plugin.vaultAccess.adapterWrite(pluginTargetFolderPath + "main.js", plugin.mainJs);
|
|
||||||
await this.plugin.vaultAccess.adapterWrite(pluginTargetFolderPath + "manifest.json", plugin.manifestJson);
|
|
||||||
if (plugin.styleCss)
|
|
||||||
await this.plugin.vaultAccess.adapterWrite(pluginTargetFolderPath + "styles.css", plugin.styleCss);
|
|
||||||
if (stat) {
|
|
||||||
// @ts-ignore
|
|
||||||
await this.app.plugins.loadPlugin(plugin.manifest.id);
|
|
||||||
Logger(`Load plugin:${plugin.manifest.id}`, LOG_LEVEL_NOTICE);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -25,7 +25,7 @@ function readDocument(w: LoadedEntry) {
|
|||||||
if (isImage(w.path)) {
|
if (isImage(w.path)) {
|
||||||
return new Uint8Array(decodeBinary(w.data));
|
return new Uint8Array(decodeBinary(w.data));
|
||||||
}
|
}
|
||||||
if (w.data == "plain") return getDocData(w.data);
|
if (w.type == "plain" || w.datatype == "plain") return getDocData(w.data);
|
||||||
if (isComparableTextDecode(w.path)) return readString(new Uint8Array(decodeBinary(w.data)));
|
if (isComparableTextDecode(w.path)) return readString(new Uint8Array(decodeBinary(w.data)));
|
||||||
if (isComparableText(w.path)) return getDocData(w.data);
|
if (isComparableText(w.path)) return getDocData(w.data);
|
||||||
try {
|
try {
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
import ObsidianLiveSyncPlugin from "./main";
|
import ObsidianLiveSyncPlugin from "./main";
|
||||||
import { onDestroy, onMount } from "svelte";
|
import { onDestroy, onMount } from "svelte";
|
||||||
import type { AnyEntry, FilePathWithPrefix } from "./lib/src/types";
|
import type { AnyEntry, FilePathWithPrefix } from "./lib/src/types";
|
||||||
import { getDocData, isDocContentSame, readAsBlob } from "./lib/src/utils";
|
import { getDocData, isAnyNote, isDocContentSame, readAsBlob } from "./lib/src/utils";
|
||||||
import { diff_match_patch } from "./deps";
|
import { diff_match_patch } from "./deps";
|
||||||
import { DocumentHistoryModal } from "./DocumentHistoryModal";
|
import { DocumentHistoryModal } from "./DocumentHistoryModal";
|
||||||
import { isPlainText, stripAllPrefixes } from "./lib/src/path";
|
import { isPlainText, stripAllPrefixes } from "./lib/src/path";
|
||||||
@@ -30,7 +30,7 @@
|
|||||||
|
|
||||||
type HistoryData = {
|
type HistoryData = {
|
||||||
id: string;
|
id: string;
|
||||||
rev: string;
|
rev?: string;
|
||||||
path: string;
|
path: string;
|
||||||
dirname: string;
|
dirname: string;
|
||||||
filename: string;
|
filename: string;
|
||||||
@@ -53,12 +53,12 @@
|
|||||||
if (docA.mtime < range_from_epoch) {
|
if (docA.mtime < range_from_epoch) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
if (docA.type != "newnote" && docA.type != "plain") continue;
|
if (!isAnyNote(docA)) continue;
|
||||||
const path = plugin.getPath(docA as AnyEntry);
|
const path = plugin.getPath(docA as AnyEntry);
|
||||||
const isPlain = isPlainText(docA.path);
|
const isPlain = isPlainText(docA.path);
|
||||||
const revs = await db.getRaw(docA._id, { revs_info: true });
|
const revs = await db.getRaw(docA._id, { revs_info: true });
|
||||||
let p: string = undefined;
|
let p: string | undefined = undefined;
|
||||||
const reversedRevs = revs._revs_info.reverse();
|
const reversedRevs = (revs._revs_info ?? []).reverse();
|
||||||
const DIFF_DELETE = -1;
|
const DIFF_DELETE = -1;
|
||||||
|
|
||||||
const DIFF_EQUAL = 0;
|
const DIFF_EQUAL = 0;
|
||||||
@@ -177,7 +177,7 @@
|
|||||||
onDestroy(() => {});
|
onDestroy(() => {});
|
||||||
|
|
||||||
function showHistory(file: string, rev: string) {
|
function showHistory(file: string, rev: string) {
|
||||||
new DocumentHistoryModal(plugin.app, plugin, file as unknown as FilePathWithPrefix, null, rev).open();
|
new DocumentHistoryModal(plugin.app, plugin, file as unknown as FilePathWithPrefix, undefined, rev).open();
|
||||||
}
|
}
|
||||||
function openFile(file: string) {
|
function openFile(file: string) {
|
||||||
plugin.app.workspace.openLinkText(file, file);
|
plugin.app.workspace.openLinkText(file, file);
|
||||||
@@ -232,7 +232,7 @@
|
|||||||
<td>
|
<td>
|
||||||
<span class="rev">
|
<span class="rev">
|
||||||
{#if entry.isPlain}
|
{#if entry.isPlain}
|
||||||
<a on:click={() => showHistory(entry.path, entry.rev)}>{entry.rev}</a>
|
<a on:click={() => showHistory(entry.path, entry?.rev || "")}>{entry.rev}</a>
|
||||||
{:else}
|
{:else}
|
||||||
{entry.rev}
|
{entry.rev}
|
||||||
{/if}
|
{/if}
|
||||||
|
|||||||
@@ -6,15 +6,15 @@
|
|||||||
import { mergeObject } from "./utils";
|
import { mergeObject } from "./utils";
|
||||||
|
|
||||||
export let docs: LoadedEntry[] = [];
|
export let docs: LoadedEntry[] = [];
|
||||||
export let callback: (keepRev: string, mergedStr?: string) => Promise<void> = async (_, __) => {
|
export let callback: (keepRev?: string, mergedStr?: string) => Promise<void> = async (_, __) => {
|
||||||
Promise.resolve();
|
Promise.resolve();
|
||||||
};
|
};
|
||||||
export let filename: FilePath = "" as FilePath;
|
export let filename: FilePath = "" as FilePath;
|
||||||
export let nameA: string = "A";
|
export let nameA: string = "A";
|
||||||
export let nameB: string = "B";
|
export let nameB: string = "B";
|
||||||
export let defaultSelect: string = "";
|
export let defaultSelect: string = "";
|
||||||
let docA: LoadedEntry = undefined;
|
let docA: LoadedEntry;
|
||||||
let docB: LoadedEntry = undefined;
|
let docB: LoadedEntry;
|
||||||
let docAContent = "";
|
let docAContent = "";
|
||||||
let docBContent = "";
|
let docBContent = "";
|
||||||
let objA: any = {};
|
let objA: any = {};
|
||||||
@@ -28,7 +28,8 @@
|
|||||||
function docToString(doc: LoadedEntry) {
|
function docToString(doc: LoadedEntry) {
|
||||||
return doc.datatype == "plain" ? getDocData(doc.data) : readString(new Uint8Array(decodeBinary(doc.data)));
|
return doc.datatype == "plain" ? getDocData(doc.data) : readString(new Uint8Array(decodeBinary(doc.data)));
|
||||||
}
|
}
|
||||||
function revStringToRevNumber(rev: string) {
|
function revStringToRevNumber(rev?: string) {
|
||||||
|
if (!rev) return "";
|
||||||
return rev.split("-")[0];
|
return rev.split("-")[0];
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -44,15 +45,15 @@
|
|||||||
}
|
}
|
||||||
function apply() {
|
function apply() {
|
||||||
if (docA._id == docB._id) {
|
if (docA._id == docB._id) {
|
||||||
if (mode == "A") return callback(docA._rev, null);
|
if (mode == "A") return callback(docA._rev!, undefined);
|
||||||
if (mode == "B") return callback(docB._rev, null);
|
if (mode == "B") return callback(docB._rev!, undefined);
|
||||||
} else {
|
} else {
|
||||||
if (mode == "A") return callback(null, docToString(docA));
|
if (mode == "A") return callback(undefined, docToString(docA));
|
||||||
if (mode == "B") return callback(null, docToString(docB));
|
if (mode == "B") return callback(undefined, docToString(docB));
|
||||||
}
|
}
|
||||||
if (mode == "BA") return callback(null, JSON.stringify(objBA, null, 2));
|
if (mode == "BA") return callback(undefined, JSON.stringify(objBA, null, 2));
|
||||||
if (mode == "AB") return callback(null, JSON.stringify(objAB, null, 2));
|
if (mode == "AB") return callback(undefined, JSON.stringify(objAB, null, 2));
|
||||||
callback(null, null);
|
callback(undefined, undefined);
|
||||||
}
|
}
|
||||||
$: {
|
$: {
|
||||||
if (docs && docs.length >= 1) {
|
if (docs && docs.length >= 1) {
|
||||||
@@ -133,13 +134,17 @@
|
|||||||
{/if}
|
{/if}
|
||||||
<div>
|
<div>
|
||||||
{nameA}
|
{nameA}
|
||||||
{#if docA._id == docB._id} Rev:{revStringToRevNumber(docA._rev)} {/if} ,{new Date(docA.mtime).toLocaleString()}
|
{#if docA._id == docB._id}
|
||||||
|
Rev:{revStringToRevNumber(docA._rev)}
|
||||||
|
{/if} ,{new Date(docA.mtime).toLocaleString()}
|
||||||
{docAContent.length} letters
|
{docAContent.length} letters
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div>
|
<div>
|
||||||
{nameB}
|
{nameB}
|
||||||
{#if docA._id == docB._id} Rev:{revStringToRevNumber(docB._rev)} {/if} ,{new Date(docB.mtime).toLocaleString()}
|
{#if docA._id == docB._id}
|
||||||
|
Rev:{revStringToRevNumber(docB._rev)}
|
||||||
|
{/if} ,{new Date(docB.mtime).toLocaleString()}
|
||||||
{docBContent.length} letters
|
{docBContent.length} letters
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
|||||||
@@ -2050,7 +2050,7 @@ ${stringifyYaml(pluginConfig)}`;
|
|||||||
|
|
||||||
const vaultName = new Setting(containerPluginSettings)
|
const vaultName = new Setting(containerPluginSettings)
|
||||||
.setName("Device name")
|
.setName("Device name")
|
||||||
.setDesc("Unique name between all synchronized devices")
|
.setDesc("Unique name between all synchronized devices. To edit this setting, please disable customization sync once.")
|
||||||
.addText((text) => {
|
.addText((text) => {
|
||||||
text.setPlaceholder("desktop")
|
text.setPlaceholder("desktop")
|
||||||
.setValue(this.plugin.deviceAndVaultName)
|
.setValue(this.plugin.deviceAndVaultName)
|
||||||
|
|||||||
2
src/lib
2
src/lib
Submodule src/lib updated: 29e23f5763...98809f37df
113
src/main.ts
113
src/main.ts
@@ -4,7 +4,7 @@ import { type Diff, DIFF_DELETE, DIFF_EQUAL, DIFF_INSERT, diff_match_patch, stri
|
|||||||
import { Notice, Plugin, TFile, addIcon, TFolder, normalizePath, TAbstractFile, Editor, MarkdownView, type RequestUrlParam, type RequestUrlResponse, requestUrl, type MarkdownFileInfo } from "./deps";
|
import { Notice, Plugin, TFile, addIcon, TFolder, normalizePath, TAbstractFile, Editor, MarkdownView, type RequestUrlParam, type RequestUrlResponse, requestUrl, type MarkdownFileInfo } from "./deps";
|
||||||
import { type EntryDoc, type LoadedEntry, type ObsidianLiveSyncSettings, type diff_check_result, type diff_result_leaf, type EntryBody, LOG_LEVEL, VER, DEFAULT_SETTINGS, type diff_result, FLAGMD_REDFLAG, SYNCINFO_ID, SALT_OF_PASSPHRASE, type ConfigPassphraseStore, type CouchDBConnection, FLAGMD_REDFLAG2, FLAGMD_REDFLAG3, PREFIXMD_LOGFILE, type DatabaseConnectingStatus, type EntryHasPath, type DocumentID, type FilePathWithPrefix, type FilePath, type AnyEntry, LOG_LEVEL_DEBUG, LOG_LEVEL_INFO, LOG_LEVEL_NOTICE, LOG_LEVEL_URGENT, LOG_LEVEL_VERBOSE, type SavingEntry, MISSING_OR_ERROR, NOT_CONFLICTED, AUTO_MERGED, CANCELLED, LEAVE_TO_SUBSEQUENT, FLAGMD_REDFLAG2_HR, FLAGMD_REDFLAG3_HR, } from "./lib/src/types";
|
import { type EntryDoc, type LoadedEntry, type ObsidianLiveSyncSettings, type diff_check_result, type diff_result_leaf, type EntryBody, LOG_LEVEL, VER, DEFAULT_SETTINGS, type diff_result, FLAGMD_REDFLAG, SYNCINFO_ID, SALT_OF_PASSPHRASE, type ConfigPassphraseStore, type CouchDBConnection, FLAGMD_REDFLAG2, FLAGMD_REDFLAG3, PREFIXMD_LOGFILE, type DatabaseConnectingStatus, type EntryHasPath, type DocumentID, type FilePathWithPrefix, type FilePath, type AnyEntry, LOG_LEVEL_DEBUG, LOG_LEVEL_INFO, LOG_LEVEL_NOTICE, LOG_LEVEL_URGENT, LOG_LEVEL_VERBOSE, type SavingEntry, MISSING_OR_ERROR, NOT_CONFLICTED, AUTO_MERGED, CANCELLED, LEAVE_TO_SUBSEQUENT, FLAGMD_REDFLAG2_HR, FLAGMD_REDFLAG3_HR, } from "./lib/src/types";
|
||||||
import { type InternalFileInfo, type CacheData, type FileEventItem, FileWatchEventQueueMax } from "./types";
|
import { type InternalFileInfo, type CacheData, type FileEventItem, FileWatchEventQueueMax } from "./types";
|
||||||
import { arrayToChunkedArray, createBlob, fireAndForget, getDocData, isDocContentSame, isObjectDifferent, readContent, sendValue } from "./lib/src/utils";
|
import { arrayToChunkedArray, createBlob, determineTypeFromBlob, fireAndForget, getDocData, isAnyNote, isDocContentSame, isObjectDifferent, readContent, sendValue } from "./lib/src/utils";
|
||||||
import { Logger, setGlobalLogFunction } from "./lib/src/logger";
|
import { Logger, setGlobalLogFunction } from "./lib/src/logger";
|
||||||
import { PouchDB } from "./lib/src/pouchdb-browser.js";
|
import { PouchDB } from "./lib/src/pouchdb-browser.js";
|
||||||
import { ConflictResolveModal } from "./ConflictResolveModal";
|
import { ConflictResolveModal } from "./ConflictResolveModal";
|
||||||
@@ -381,7 +381,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
|
|||||||
const notesList = notes.map(e => e.dispPath);
|
const notesList = notes.map(e => e.dispPath);
|
||||||
const target = await this.askSelectString("File to view History", notesList);
|
const target = await this.askSelectString("File to view History", notesList);
|
||||||
if (target) {
|
if (target) {
|
||||||
const targetId = notes.find(e => e.dispPath == target);
|
const targetId = notes.find(e => e.dispPath == target)!;
|
||||||
this.showHistory(targetId.path, targetId.id);
|
this.showHistory(targetId.path, targetId.id);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -399,7 +399,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
|
|||||||
}
|
}
|
||||||
const target = await this.askSelectString("File to resolve conflict", notesList);
|
const target = await this.askSelectString("File to resolve conflict", notesList);
|
||||||
if (target) {
|
if (target) {
|
||||||
const targetItem = notes.find(e => e.dispPath == target);
|
const targetItem = notes.find(e => e.dispPath == target)!;
|
||||||
this.resolveConflicted(targetItem.path);
|
this.resolveConflicted(targetItem.path);
|
||||||
await this.conflictCheckQueue.waitForPipeline();
|
await this.conflictCheckQueue.waitForPipeline();
|
||||||
return true;
|
return true;
|
||||||
@@ -426,7 +426,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin
|
|||||||
const limit = Date.now() - (86400 * 1000 * limitDays);
|
const limit = Date.now() - (86400 * 1000 * limitDays);
|
||||||
const notes: { path: string, mtime: number, ttl: number, doc: PouchDB.Core.ExistingDocument<EntryDoc & PouchDB.Core.AllDocsMeta> }[] = [];
|
const notes: { path: string, mtime: number, ttl: number, doc: PouchDB.Core.ExistingDocument<EntryDoc & PouchDB.Core.AllDocsMeta> }[] = [];
|
||||||
for await (const doc of this.localDatabase.findAllDocs({ conflicts: true })) {
|
for await (const doc of this.localDatabase.findAllDocs({ conflicts: true })) {
|
||||||
if (doc.type == "newnote" || doc.type == "plain") {
|
if (isAnyNote(doc)) {
|
||||||
if (doc.deleted && (doc.mtime - limit) < 0) {
|
if (doc.deleted && (doc.mtime - limit) < 0) {
|
||||||
notes.push({ path: this.getPath(doc), mtime: doc.mtime, ttl: (doc.mtime - limit) / 1000 / 86400, doc: doc });
|
notes.push({ path: this.getPath(doc), mtime: doc.mtime, ttl: (doc.mtime - limit) / 1000 / 86400, doc: doc });
|
||||||
}
|
}
|
||||||
@@ -691,7 +691,7 @@ Note: We can always able to read V1 format. It will be progressively converted.
|
|||||||
name: "Show history",
|
name: "Show history",
|
||||||
callback: () => {
|
callback: () => {
|
||||||
const file = this.getActiveFile();
|
const file = this.getActiveFile();
|
||||||
if (file) this.showHistory(file, null);
|
if (file) this.showHistory(file, undefined);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
this.addCommand({
|
this.addCommand({
|
||||||
@@ -763,7 +763,7 @@ Note: We can always able to read V1 format. It will be progressively converted.
|
|||||||
const ret = this.extractSettingFromWholeText(doc);
|
const ret = this.extractSettingFromWholeText(doc);
|
||||||
return ret.body != "";
|
return ret.body != "";
|
||||||
}
|
}
|
||||||
this.checkAndApplySettingFromMarkdown(ctx.file.path, false);
|
if (ctx.file) this.checkAndApplySettingFromMarkdown(ctx.file.path, false);
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -1084,7 +1084,7 @@ Note: We can always able to read V1 format. It will be progressively converted.
|
|||||||
async checkAndApplySettingFromMarkdown(filename: string, automated?: boolean) {
|
async checkAndApplySettingFromMarkdown(filename: string, automated?: boolean) {
|
||||||
if (automated && !this.settings.notifyAllSettingSyncFile) {
|
if (automated && !this.settings.notifyAllSettingSyncFile) {
|
||||||
if (!this.settings.settingSyncFile || this.settings.settingSyncFile != filename) {
|
if (!this.settings.settingSyncFile || this.settings.settingSyncFile != filename) {
|
||||||
Logger(`Setting file (${filename}) is not matched to the current configuration. skipped.`, LOG_LEVEL_VERBOSE);
|
Logger(`Setting file (${filename}) is not matched to the current configuration. skipped.`, LOG_LEVEL_DEBUG);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1147,7 +1147,7 @@ Note: We can always able to read V1 format. It will be progressively converted.
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
generateSettingForMarkdown(settings?: ObsidianLiveSyncSettings, keepCredential?: boolean): Partial<ObsidianLiveSyncSettings> {
|
generateSettingForMarkdown(settings?: ObsidianLiveSyncSettings, keepCredential?: boolean): Partial<ObsidianLiveSyncSettings> {
|
||||||
const saveData = { ...(settings ? settings : this.settings) };
|
const saveData = { ...(settings ? settings : this.settings) } as Partial<ObsidianLiveSyncSettings>;
|
||||||
delete saveData.encryptedCouchDBConnection;
|
delete saveData.encryptedCouchDBConnection;
|
||||||
delete saveData.encryptedPassphrase;
|
delete saveData.encryptedPassphrase;
|
||||||
if (!saveData.writeCredentialsForSettingSync && !keepCredential) {
|
if (!saveData.writeCredentialsForSettingSync && !keepCredential) {
|
||||||
@@ -1404,13 +1404,12 @@ We can perform a command in this file.
|
|||||||
getFilePath(file: TAbstractFile): string {
|
getFilePath(file: TAbstractFile): string {
|
||||||
if (file instanceof TFolder) {
|
if (file instanceof TFolder) {
|
||||||
if (file.isRoot()) return "";
|
if (file.isRoot()) return "";
|
||||||
return this.getFilePath(file.parent) + "/" + file.name;
|
return this.getFilePath(file.parent!) + "/" + file.name;
|
||||||
}
|
}
|
||||||
if (file instanceof TFile) {
|
if (file instanceof TFile) {
|
||||||
return this.getFilePath(file.parent) + "/" + file.name;
|
return this.getFilePath(file.parent!) + "/" + file.name;
|
||||||
}
|
}
|
||||||
|
return this.getFilePath(file.parent!) + "/" + file.name;
|
||||||
return this.getFilePath(file.parent) + "/" + file.name;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async watchVaultRenameAsync(file: TFile, oldFile: any, cache?: CacheData) {
|
async watchVaultRenameAsync(file: TFile, oldFile: any, cache?: CacheData) {
|
||||||
@@ -1543,7 +1542,7 @@ We can perform a command in this file.
|
|||||||
await this.deleteVaultItem(file);
|
await this.deleteVaultItem(file);
|
||||||
} else {
|
} else {
|
||||||
// Conflict has been resolved at this time,
|
// Conflict has been resolved at this time,
|
||||||
await this.pullFile(path, null, true);
|
await this.pullFile(path, undefined, true);
|
||||||
}
|
}
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -1552,8 +1551,8 @@ We can perform a command in this file.
|
|||||||
|
|
||||||
const doc = existDoc;
|
const doc = existDoc;
|
||||||
|
|
||||||
if (doc.datatype != "newnote" && doc.datatype != "plain") {
|
if (!isAnyNote(doc)) {
|
||||||
Logger(msg + "ERROR, Invalid datatype: " + path + "(" + doc.datatype + ")", LOG_LEVEL_NOTICE);
|
Logger(msg + "ERROR, Invalid type: " + path + "(" + (doc as any)?.type || "type missing" + ")", LOG_LEVEL_NOTICE);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
// if (!force && localMtime >= docMtime) return;
|
// if (!force && localMtime >= docMtime) return;
|
||||||
@@ -1600,11 +1599,13 @@ We can perform a command in this file.
|
|||||||
await this.vaultAccess.delete(file, true);
|
await this.vaultAccess.delete(file, true);
|
||||||
}
|
}
|
||||||
Logger(`xxx <- STORAGE (deleted) ${file.path}`);
|
Logger(`xxx <- STORAGE (deleted) ${file.path}`);
|
||||||
Logger(`files: ${dir.children.length}`);
|
if (dir) {
|
||||||
if (dir.children.length == 0) {
|
Logger(`files: ${dir.children.length}`);
|
||||||
if (!this.settings.doNotDeleteFolder) {
|
if (dir.children.length == 0) {
|
||||||
Logger(`All files under the parent directory (${dir.path}) have been deleted, so delete this one.`);
|
if (!this.settings.doNotDeleteFolder) {
|
||||||
await this.deleteVaultItem(dir);
|
Logger(`All files under the parent directory (${dir.path}) have been deleted, so delete this one.`);
|
||||||
|
await this.deleteVaultItem(dir);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1635,7 +1636,7 @@ We can perform a command in this file.
|
|||||||
const chunkedIds = arrayToChunkedArray(ids, batchSize);
|
const chunkedIds = arrayToChunkedArray(ids, batchSize);
|
||||||
for await (const idsBatch of chunkedIds) {
|
for await (const idsBatch of chunkedIds) {
|
||||||
const ret = await this.localDatabase.allDocsRaw<EntryDoc>({ keys: idsBatch, include_docs: true, limit: 100 });
|
const ret = await this.localDatabase.allDocsRaw<EntryDoc>({ keys: idsBatch, include_docs: true, limit: 100 });
|
||||||
this.replicationResultProcessor.enqueueAll(ret.rows.map(doc => doc.doc));
|
this.replicationResultProcessor.enqueueAll(ret.rows.map(doc => doc.doc!));
|
||||||
await this.replicationResultProcessor.waitForPipeline();
|
await this.replicationResultProcessor.waitForPipeline();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1643,12 +1644,11 @@ We can perform a command in this file.
|
|||||||
|
|
||||||
databaseQueueCount = reactiveSource(0);
|
databaseQueueCount = reactiveSource(0);
|
||||||
databaseQueuedProcessor = new QueueProcessor(async (docs: EntryBody[]) => {
|
databaseQueuedProcessor = new QueueProcessor(async (docs: EntryBody[]) => {
|
||||||
const dbDoc = docs[0];
|
const dbDoc = docs[0] as LoadedEntry; // It has no `data`
|
||||||
const path = this.getPath(dbDoc);
|
const path = this.getPath(dbDoc);
|
||||||
// If `Read chunks online` is disabled, chunks should be transferred before here.
|
// If `Read chunks online` is disabled, chunks should be transferred before here.
|
||||||
// However, in some cases, chunks are after that. So, if missing chunks exist, we have to wait for them.
|
// However, in some cases, chunks are after that. So, if missing chunks exist, we have to wait for them.
|
||||||
const datatype = (!("type" in dbDoc) || dbDoc.type == "notes") ? "newnote" : dbDoc.type;
|
const doc = await this.localDatabase.getDBEntryFromMeta({ ...dbDoc }, {}, false, true, true);
|
||||||
const doc = await this.localDatabase.getDBEntryFromMeta({ ...dbDoc, datatype, data: [] }, {}, false, true, true);
|
|
||||||
if (!doc) {
|
if (!doc) {
|
||||||
Logger(`Something went wrong while gathering content of ${path} (${dbDoc._id.substring(0, 8)}, ${dbDoc._rev?.substring(0, 10)}) `, LOG_LEVEL_NOTICE)
|
Logger(`Something went wrong while gathering content of ${path} (${dbDoc._id.substring(0, 8)}, ${dbDoc._rev?.substring(0, 10)}) `, LOG_LEVEL_NOTICE)
|
||||||
return;
|
return;
|
||||||
@@ -1710,7 +1710,7 @@ We can perform a command in this file.
|
|||||||
) {
|
) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (change.type == "plain" || change.type == "newnote") {
|
if (isAnyNote(change)) {
|
||||||
if (this.databaseQueuedProcessor._isSuspended) {
|
if (this.databaseQueuedProcessor._isSuspended) {
|
||||||
Logger(`Processing scheduled: ${change.path}`, LOG_LEVEL_INFO);
|
Logger(`Processing scheduled: ${change.path}`, LOG_LEVEL_INFO);
|
||||||
}
|
}
|
||||||
@@ -1883,7 +1883,7 @@ We can perform a command in this file.
|
|||||||
scheduleTask("log-hide", 3000, () => { this.statusLog.value = "" });
|
scheduleTask("log-hide", 3000, () => { this.statusLog.value = "" });
|
||||||
}
|
}
|
||||||
|
|
||||||
async replicate(showMessage?: boolean) {
|
async replicate(showMessage: boolean = false) {
|
||||||
if (!this.isReady) return;
|
if (!this.isReady) return;
|
||||||
if (isLockAcquired("cleanup")) {
|
if (isLockAcquired("cleanup")) {
|
||||||
Logger("Database cleaning up is in process. replication has been cancelled", LOG_LEVEL_NOTICE);
|
Logger("Database cleaning up is in process. replication has been cancelled", LOG_LEVEL_NOTICE);
|
||||||
@@ -1956,7 +1956,7 @@ Or if you are sure know what had been happened, we can unlock the database from
|
|||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
async initializeDatabase(showingNotice?: boolean, reopenDatabase = true) {
|
async initializeDatabase(showingNotice: boolean = false, reopenDatabase = true) {
|
||||||
this.isReady = false;
|
this.isReady = false;
|
||||||
if ((!reopenDatabase) || await this.openDatabase()) {
|
if ((!reopenDatabase) || await this.openDatabase()) {
|
||||||
if (this.localDatabase.isReady) {
|
if (this.localDatabase.isReady) {
|
||||||
@@ -1974,17 +1974,17 @@ Or if you are sure know what had been happened, we can unlock the database from
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async replicateAllToServer(showingNotice?: boolean) {
|
async replicateAllToServer(showingNotice: boolean = false) {
|
||||||
if (!this.isReady) return false;
|
if (!this.isReady) return false;
|
||||||
await Promise.all(this.addOns.map(e => e.beforeReplicate(showingNotice)));
|
await Promise.all(this.addOns.map(e => e.beforeReplicate(showingNotice)));
|
||||||
return await this.replicator.replicateAllToServer(this.settings, showingNotice);
|
return await this.replicator.replicateAllToServer(this.settings, showingNotice);
|
||||||
}
|
}
|
||||||
async replicateAllFromServer(showingNotice?: boolean) {
|
async replicateAllFromServer(showingNotice: boolean = false) {
|
||||||
if (!this.isReady) return false;
|
if (!this.isReady) return false;
|
||||||
return await this.replicator.replicateAllFromServer(this.settings, showingNotice);
|
return await this.replicator.replicateAllFromServer(this.settings, showingNotice);
|
||||||
}
|
}
|
||||||
|
|
||||||
async markRemoteLocked(lockByClean?: boolean) {
|
async markRemoteLocked(lockByClean: boolean = false) {
|
||||||
return await this.replicator.markRemoteLocked(this.settings, true, lockByClean);
|
return await this.replicator.markRemoteLocked(this.settings, true, lockByClean);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -2096,7 +2096,7 @@ Or if you are sure know what had been happened, we can unlock the database from
|
|||||||
const w = await this.localDatabase.getDBEntryMeta(e, {}, true);
|
const w = await this.localDatabase.getDBEntryMeta(e, {}, true);
|
||||||
if (w && !(w.deleted || w._deleted)) {
|
if (w && !(w.deleted || w._deleted)) {
|
||||||
if (!this.isFileSizeExceeded(w.size)) {
|
if (!this.isFileSizeExceeded(w.size)) {
|
||||||
await this.pullFile(e, filesStorage, false, null, false);
|
await this.pullFile(e, filesStorage, false, undefined, false);
|
||||||
fireAndForget(() => this.checkAndApplySettingFromMarkdown(e, true));
|
fireAndForget(() => this.checkAndApplySettingFromMarkdown(e, true));
|
||||||
Logger(`Check or pull from db:${e} OK`);
|
Logger(`Check or pull from db:${e} OK`);
|
||||||
} else {
|
} else {
|
||||||
@@ -2414,11 +2414,11 @@ Or if you are sure know what had been happened, we can unlock the database from
|
|||||||
const conflictedRevNo = Number(conflictedRev.split("-")[0]);
|
const conflictedRevNo = Number(conflictedRev.split("-")[0]);
|
||||||
//Search
|
//Search
|
||||||
const revFrom = (await this.localDatabase.getRaw<EntryDoc>(await this.path2id(path), { revs_info: true }));
|
const revFrom = (await this.localDatabase.getRaw<EntryDoc>(await this.path2id(path), { revs_info: true }));
|
||||||
const commonBase = revFrom._revs_info.filter(e => e.status == "available" && Number(e.rev.split("-")[0]) < conflictedRevNo).first()?.rev ?? "";
|
const commonBase = (revFrom._revs_info || []).filter(e => e.status == "available" && Number(e.rev.split("-")[0]) < conflictedRevNo).first()?.rev ?? "";
|
||||||
let p = undefined;
|
let p = undefined;
|
||||||
if (commonBase) {
|
if (commonBase) {
|
||||||
if (isSensibleMargeApplicable(path)) {
|
if (isSensibleMargeApplicable(path)) {
|
||||||
const result = await this.mergeSensibly(path, commonBase, test._rev, conflictedRev);
|
const result = await this.mergeSensibly(path, commonBase, test._rev!, conflictedRev);
|
||||||
if (result) {
|
if (result) {
|
||||||
p = result.filter(e => e[0] != DIFF_DELETE).map((e) => e[1]).join("");
|
p = result.filter(e => e[0] != DIFF_DELETE).map((e) => e[1]).join("");
|
||||||
// can be merged.
|
// can be merged.
|
||||||
@@ -2428,7 +2428,7 @@ Or if you are sure know what had been happened, we can unlock the database from
|
|||||||
}
|
}
|
||||||
} else if (isObjectMargeApplicable(path)) {
|
} else if (isObjectMargeApplicable(path)) {
|
||||||
// can be merged.
|
// can be merged.
|
||||||
const result = await this.mergeObject(path, commonBase, test._rev, conflictedRev);
|
const result = await this.mergeObject(path, commonBase, test._rev!, conflictedRev);
|
||||||
if (result) {
|
if (result) {
|
||||||
Logger(`Object merge:${path}`, LOG_LEVEL_INFO);
|
Logger(`Object merge:${path}`, LOG_LEVEL_INFO);
|
||||||
p = result;
|
p = result;
|
||||||
@@ -2457,7 +2457,7 @@ Or if you are sure know what had been happened, we can unlock the database from
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
// should be one or more conflicts;
|
// should be one or more conflicts;
|
||||||
const leftLeaf = await this.getConflictedDoc(path, test._rev);
|
const leftLeaf = await this.getConflictedDoc(path, test._rev!);
|
||||||
const rightLeaf = await this.getConflictedDoc(path, conflicts[0]);
|
const rightLeaf = await this.getConflictedDoc(path, conflicts[0]);
|
||||||
if (leftLeaf == false) {
|
if (leftLeaf == false) {
|
||||||
// what's going on..
|
// what's going on..
|
||||||
@@ -2467,7 +2467,7 @@ Or if you are sure know what had been happened, we can unlock the database from
|
|||||||
if (rightLeaf == false) {
|
if (rightLeaf == false) {
|
||||||
// Conflicted item could not load, delete this.
|
// Conflicted item could not load, delete this.
|
||||||
await this.localDatabase.deleteDBEntry(path, { rev: conflicts[0] });
|
await this.localDatabase.deleteDBEntry(path, { rev: conflicts[0] });
|
||||||
await this.pullFile(path, null, true);
|
await this.pullFile(path, undefined, true);
|
||||||
Logger(`could not get old revisions, automatically used newer one:${path}`, LOG_LEVEL_NOTICE);
|
Logger(`could not get old revisions, automatically used newer one:${path}`, LOG_LEVEL_NOTICE);
|
||||||
return AUTO_MERGED;
|
return AUTO_MERGED;
|
||||||
}
|
}
|
||||||
@@ -2483,7 +2483,7 @@ Or if you are sure know what had been happened, we can unlock the database from
|
|||||||
loser = rightLeaf;
|
loser = rightLeaf;
|
||||||
}
|
}
|
||||||
await this.localDatabase.deleteDBEntry(path, { rev: loser.rev });
|
await this.localDatabase.deleteDBEntry(path, { rev: loser.rev });
|
||||||
await this.pullFile(path, null, true);
|
await this.pullFile(path, undefined, true);
|
||||||
Logger(`Automatically merged (${isSame ? "same," : ""}${isBinary ? "binary," : ""}${alwaysNewer ? "alwaysNewer" : ""}) :${path}`, LOG_LEVEL_NOTICE);
|
Logger(`Automatically merged (${isSame ? "same," : ""}${isBinary ? "binary," : ""}${alwaysNewer ? "alwaysNewer" : ""}) :${path}`, LOG_LEVEL_NOTICE);
|
||||||
return AUTO_MERGED;
|
return AUTO_MERGED;
|
||||||
}
|
}
|
||||||
@@ -2561,16 +2561,16 @@ Or if you are sure know what had been happened, we can unlock the database from
|
|||||||
if (selected === CANCELLED) {
|
if (selected === CANCELLED) {
|
||||||
// Cancelled by UI, or another conflict.
|
// Cancelled by UI, or another conflict.
|
||||||
Logger(`Merge: Cancelled ${filename}`, LOG_LEVEL_INFO);
|
Logger(`Merge: Cancelled ${filename}`, LOG_LEVEL_INFO);
|
||||||
return;
|
return false;
|
||||||
}
|
}
|
||||||
const testDoc = await this.localDatabase.getDBEntry(filename, { conflicts: true }, false, false, true);
|
const testDoc = await this.localDatabase.getDBEntry(filename, { conflicts: true }, false, false, true);
|
||||||
if (testDoc === false) {
|
if (testDoc === false) {
|
||||||
Logger(`Merge: Could not read ${filename} from the local database`, LOG_LEVEL_VERBOSE);
|
Logger(`Merge: Could not read ${filename} from the local database`, LOG_LEVEL_VERBOSE);
|
||||||
return;
|
return false;
|
||||||
}
|
}
|
||||||
if (!testDoc._conflicts) {
|
if (!testDoc._conflicts) {
|
||||||
Logger(`Merge: Nothing to do ${filename}`, LOG_LEVEL_VERBOSE);
|
Logger(`Merge: Nothing to do ${filename}`, LOG_LEVEL_VERBOSE);
|
||||||
return;
|
return false;
|
||||||
}
|
}
|
||||||
const toDelete = selected;
|
const toDelete = selected;
|
||||||
const toKeep = conflictCheckResult.left.rev != toDelete ? conflictCheckResult.left.rev : conflictCheckResult.right.rev;
|
const toKeep = conflictCheckResult.left.rev != toDelete ? conflictCheckResult.left.rev : conflictCheckResult.right.rev;
|
||||||
@@ -2592,11 +2592,11 @@ Or if you are sure know what had been happened, we can unlock the database from
|
|||||||
Logger(`Merge: Changes has been concatenated: ${filename}`);
|
Logger(`Merge: Changes has been concatenated: ${filename}`);
|
||||||
} else if (typeof toDelete === "string") {
|
} else if (typeof toDelete === "string") {
|
||||||
await this.localDatabase.deleteDBEntry(filename, { rev: toDelete });
|
await this.localDatabase.deleteDBEntry(filename, { rev: toDelete });
|
||||||
await this.pullFile(filename, null, true, toKeep);
|
await this.pullFile(filename, undefined, true, toKeep);
|
||||||
Logger(`Conflict resolved:${filename}`);
|
Logger(`Conflict resolved:${filename}`);
|
||||||
} else {
|
} else {
|
||||||
Logger(`Merge: Something went wrong: ${filename}, (${toDelete})`, LOG_LEVEL_NOTICE);
|
Logger(`Merge: Something went wrong: ${filename}, (${toDelete})`, LOG_LEVEL_NOTICE);
|
||||||
return;
|
return false;
|
||||||
}
|
}
|
||||||
// In here, some merge has been processed.
|
// In here, some merge has been processed.
|
||||||
// So we have to run replication if configured.
|
// So we have to run replication if configured.
|
||||||
@@ -2605,6 +2605,7 @@ Or if you are sure know what had been happened, we can unlock the database from
|
|||||||
}
|
}
|
||||||
// And, check it again.
|
// And, check it again.
|
||||||
this.conflictCheckQueue.enqueue(filename);
|
this.conflictCheckQueue.enqueue(filename);
|
||||||
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
async pullFile(filename: FilePathWithPrefix, fileList?: TFile[], force?: boolean, rev?: string, waitForReady = true) {
|
async pullFile(filename: FilePathWithPrefix, fileList?: TFile[], force?: boolean, rev?: string, waitForReady = true) {
|
||||||
@@ -2612,7 +2613,7 @@ Or if you are sure know what had been happened, we can unlock the database from
|
|||||||
if (!await this.isTargetFile(filename)) return;
|
if (!await this.isTargetFile(filename)) return;
|
||||||
if (targetFile == null) {
|
if (targetFile == null) {
|
||||||
//have to create;
|
//have to create;
|
||||||
const doc = await this.localDatabase.getDBEntry(filename, rev ? { rev: rev } : null, false, waitForReady);
|
const doc = await this.localDatabase.getDBEntry(filename, rev ? { rev: rev } : undefined, false, waitForReady);
|
||||||
if (doc === false) {
|
if (doc === false) {
|
||||||
Logger(`${filename} Skipped`);
|
Logger(`${filename} Skipped`);
|
||||||
return;
|
return;
|
||||||
@@ -2621,7 +2622,7 @@ Or if you are sure know what had been happened, we can unlock the database from
|
|||||||
} else if (targetFile instanceof TFile) {
|
} else if (targetFile instanceof TFile) {
|
||||||
//normal case
|
//normal case
|
||||||
const file = targetFile;
|
const file = targetFile;
|
||||||
const doc = await this.localDatabase.getDBEntry(filename, rev ? { rev: rev } : null, false, waitForReady);
|
const doc = await this.localDatabase.getDBEntry(filename, rev ? { rev: rev } : undefined, false, waitForReady);
|
||||||
if (doc === false) {
|
if (doc === false) {
|
||||||
Logger(`${filename} Skipped`);
|
Logger(`${filename} Skipped`);
|
||||||
return;
|
return;
|
||||||
@@ -2661,7 +2662,7 @@ Or if you are sure know what had been happened, we can unlock the database from
|
|||||||
case TARGET_IS_NEW:
|
case TARGET_IS_NEW:
|
||||||
if (!this.isFileSizeExceeded(doc.size)) {
|
if (!this.isFileSizeExceeded(doc.size)) {
|
||||||
Logger("STORAGE <- DB :" + file.path);
|
Logger("STORAGE <- DB :" + file.path);
|
||||||
const docx = await this.localDatabase.getDBEntry(getPathFromTFile(file), null, false, false, true);
|
const docx = await this.localDatabase.getDBEntry(getPathFromTFile(file), undefined, false, false, true);
|
||||||
if (docx != false) {
|
if (docx != false) {
|
||||||
await this.processEntryDoc(docx, file);
|
await this.processEntryDoc(docx, file);
|
||||||
} else {
|
} else {
|
||||||
@@ -2687,22 +2688,12 @@ Or if you are sure know what had been happened, we can unlock the database from
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
// let content: Blob;
|
// let content: Blob;
|
||||||
// let datatype: "plain" | "newnote" = "newnote";
|
|
||||||
const isPlain = isPlainText(file.name);
|
const isPlain = isPlainText(file.name);
|
||||||
const possiblyLarge = !isPlain;
|
const possiblyLarge = !isPlain;
|
||||||
// if (!cache) {
|
// if (!cache) {
|
||||||
if (possiblyLarge) Logger(`Reading : ${file.path}`, LOG_LEVEL_VERBOSE);
|
if (possiblyLarge) Logger(`Reading : ${file.path}`, LOG_LEVEL_VERBOSE);
|
||||||
const content = createBlob(await this.vaultAccess.vaultReadAuto(file));
|
const content = createBlob(await this.vaultAccess.vaultReadAuto(file));
|
||||||
const datatype = isPlain ? "plain" : "newnote";
|
const datatype = determineTypeFromBlob(content);
|
||||||
// }
|
|
||||||
// else if (cache instanceof ArrayBuffer) {
|
|
||||||
// Logger(`Cache Reading: ${file.path}`, LOG_LEVEL_VERBOSE);
|
|
||||||
// content = createBinaryBlob(cache);
|
|
||||||
// datatype = "newnote"
|
|
||||||
// } else {
|
|
||||||
// content = createTextBlob(cache);
|
|
||||||
// datatype = "plain";
|
|
||||||
// }
|
|
||||||
if (possiblyLarge) Logger(`Processing: ${file.path}`, LOG_LEVEL_VERBOSE);
|
if (possiblyLarge) Logger(`Processing: ${file.path}`, LOG_LEVEL_VERBOSE);
|
||||||
const fullPath = getPathFromTFile(file);
|
const fullPath = getPathFromTFile(file);
|
||||||
const id = await this.path2id(fullPath);
|
const id = await this.path2id(fullPath);
|
||||||
@@ -2724,7 +2715,7 @@ Or if you are sure know what had been happened, we can unlock the database from
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
const old = await this.localDatabase.getDBEntry(fullPath, null, false, false);
|
const old = await this.localDatabase.getDBEntry(fullPath, undefined, false, false);
|
||||||
if (old !== false) {
|
if (old !== false) {
|
||||||
const oldData = { data: old.data, deleted: old._deleted || old.deleted };
|
const oldData = { data: old.data, deleted: old._deleted || old.deleted };
|
||||||
const newData = { data: d.data, deleted: d._deleted || d.deleted };
|
const newData = { data: d.data, deleted: d._deleted || d.deleted };
|
||||||
@@ -2800,7 +2791,7 @@ Or if you are sure know what had been happened, we can unlock the database from
|
|||||||
const id = await this.path2id(path);
|
const id = await this.path2id(path);
|
||||||
const doc = await this.localDatabase.getRaw<AnyEntry>(id, { conflicts: true });
|
const doc = await this.localDatabase.getRaw<AnyEntry>(id, { conflicts: true });
|
||||||
// If there is no conflict, return with false.
|
// If there is no conflict, return with false.
|
||||||
if (!("_conflicts" in doc)) return false;
|
if (!("_conflicts" in doc) || doc._conflicts === undefined) return false;
|
||||||
if (doc._conflicts.length == 0) return false;
|
if (doc._conflicts.length == 0) return false;
|
||||||
Logger(`Hidden file conflicted:${this.getPath(doc)}`);
|
Logger(`Hidden file conflicted:${this.getPath(doc)}`);
|
||||||
const conflicts = doc._conflicts.sort((a, b) => Number(a.split("-")[0]) - Number(b.split("-")[0]));
|
const conflicts = doc._conflicts.sort((a, b) => Number(a.split("-")[0]) - Number(b.split("-")[0]));
|
||||||
@@ -2833,7 +2824,7 @@ Or if you are sure know what had been happened, we can unlock the database from
|
|||||||
}
|
}
|
||||||
async getIgnoreFile(path: string) {
|
async getIgnoreFile(path: string) {
|
||||||
if (this.ignoreFileCache.has(path)) {
|
if (this.ignoreFileCache.has(path)) {
|
||||||
return this.ignoreFileCache.get(path);
|
return this.ignoreFileCache.get(path) ?? false;
|
||||||
} else {
|
} else {
|
||||||
return await this.readIgnoreFile(path);
|
return await this.readIgnoreFile(path);
|
||||||
}
|
}
|
||||||
@@ -2909,9 +2900,9 @@ Or if you are sure know what had been happened, we can unlock the database from
|
|||||||
const fragment = createFragment((doc) => {
|
const fragment = createFragment((doc) => {
|
||||||
|
|
||||||
const [beforeText, afterText] = dialogText.split("{HERE}", 2);
|
const [beforeText, afterText] = dialogText.split("{HERE}", 2);
|
||||||
doc.createEl("span", null, (a) => {
|
doc.createEl("span", undefined, (a) => {
|
||||||
a.appendText(beforeText);
|
a.appendText(beforeText);
|
||||||
a.appendChild(a.createEl("a", null, (anchor) => {
|
a.appendChild(a.createEl("a", undefined, (anchor) => {
|
||||||
anchorCallback(anchor);
|
anchorCallback(anchor);
|
||||||
}));
|
}));
|
||||||
|
|
||||||
|
|||||||
@@ -451,7 +451,11 @@ export function isMarkedAsSameChanges(file: TFile | AnyEntry | string, mtimes: n
|
|||||||
return EVEN;
|
return EVEN;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
export function compareFileFreshness(baseFile: TFile | AnyEntry, checkTarget: TFile | AnyEntry): typeof BASE_IS_NEW | typeof TARGET_IS_NEW | typeof EVEN {
|
export function compareFileFreshness(baseFile: TFile | AnyEntry | undefined, checkTarget: TFile | AnyEntry | undefined): typeof BASE_IS_NEW | typeof TARGET_IS_NEW | typeof EVEN {
|
||||||
|
if (baseFile === undefined && checkTarget == undefined) return EVEN;
|
||||||
|
if (baseFile == undefined) return TARGET_IS_NEW;
|
||||||
|
if (checkTarget == undefined) return BASE_IS_NEW;
|
||||||
|
|
||||||
const modifiedBase = baseFile instanceof TFile ? baseFile?.stat?.mtime ?? 0 : baseFile?.mtime ?? 0;
|
const modifiedBase = baseFile instanceof TFile ? baseFile?.stat?.mtime ?? 0 : baseFile?.mtime ?? 0;
|
||||||
const modifiedTarget = checkTarget instanceof TFile ? checkTarget?.stat?.mtime ?? 0 : checkTarget?.mtime ?? 0;
|
const modifiedTarget = checkTarget instanceof TFile ? checkTarget?.stat?.mtime ?? 0 : checkTarget?.mtime ?? 0;
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user