mirror of
https://github.com/vrtmrz/obsidian-livesync.git
synced 2026-02-22 20:18:48 +00:00
Compare commits
8 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e2f06181fa | ||
|
|
bb6d787607 | ||
|
|
cb406e2db6 | ||
|
|
0a1248c5fc | ||
|
|
7b9b934c61 | ||
|
|
27505f3024 | ||
|
|
1cddcf8b95 | ||
|
|
fddc466b0f |
@@ -2,8 +2,9 @@
|
||||
|
||||
[Japanese docs](./README_ja.md).
|
||||
|
||||
Self-hosted LiveSync is a community implemented synchronization plugin.
|
||||
It uses Self-hosted or you procured CouchDB as the server. Available on every obsidian installed devices.
|
||||
Self-hosted LiveSync is a community implemented synchronization plugin.
|
||||
It uses Self-hosted or you procured CouchDB as the server. Available on every obsidian installed devices.
|
||||
|
||||
Note: It has no compatibilities with official "Sync".
|
||||
|
||||

|
||||
@@ -37,6 +38,8 @@ First, get your database ready. IBM Cloudant is preferred for testing. Or you ca
|
||||
1. [Setup IBM Cloudant](docs/setup_cloudant.md)
|
||||
2. [Setup your CouchDB](docs/setup_own_server.md)
|
||||
|
||||
Note: Information about hosting service wanted! Currently, [Using fly.io](https://github.com/vrtmrz/obsidian-livesync/discussions/85) is on the table.
|
||||
|
||||
### First device
|
||||
|
||||
1. Install the plugin on your device.
|
||||
|
||||
@@ -151,6 +151,24 @@ Self-hosted LiveSync will delete the folder when the folder becomes empty. If th
|
||||
### Use newer file if conflicted (beta)
|
||||
Always use the newer file to resolve and overwrite when conflict has occurred.
|
||||
|
||||
|
||||
### Experimental.
|
||||
### Sync hidden files
|
||||
|
||||
Synchronize hidden files.
|
||||
|
||||
- Scan hidden files before replication.
|
||||
If you enable this option, all hidden files are scanned once before replication.
|
||||
|
||||
- Scan hidden files periodicaly.
|
||||
If you enable this option, all hidden files will be scanned each [n] seconds.
|
||||
|
||||
Hidden files are not actively detected, so we need scanning.
|
||||
|
||||
Each scan stores the file with their modification time. And if the file has been disappeared, the fact is also stored. Then, When the entry of the hidden file has been replicated, it will be reflected in the storage if the entry is newer than storage.
|
||||
|
||||
Therefore, the clock must be adjusted. If the modification time is determined to be older, the changeset will be skipped or cancelled (It means, **deleted**), even if the file spawned in a hidden folder.
|
||||
|
||||
### Advanced settings
|
||||
Self-hosted LiveSync using PouchDB and synchronizes with the remote by [this protocol](https://docs.couchdb.org/en/stable/replication/protocol.html).
|
||||
So, it splits every entry into chunks to be acceptable by the database with limited payload size and document size.
|
||||
|
||||
@@ -150,6 +150,30 @@ Self-hosted LiveSyncは通常、フォルダ内のファイルがすべて削除
|
||||
### Use newer file if conflicted (beta)
|
||||
競合が発生したとき、常に新しいファイルを使用して競合を自動的に解決します。
|
||||
|
||||
|
||||
### Experimental.
|
||||
### Sync hidden files
|
||||
|
||||
隠しファイルを同期します
|
||||
|
||||
- Scan hidden files before replication.
|
||||
このオプション有効にすると、レプリケーションを実行する前に隠しファイルをスキャンします。
|
||||
|
||||
- Scan hidden files periodicaly.
|
||||
このオプションを有効にすると、n秒おきに隠しファイルをスキャンします。
|
||||
|
||||
隠しファイルは能動的に検出されないため、スキャンが必要です。
|
||||
スキャンでは、ファイルと共にファイルの変更時刻を保存します。もしファイルが消された場合は、その事実も保存します。このファイルを記録したエントリーがレプリケーションされた際、ストレージよりも新しい場合はストレージに反映されます。
|
||||
|
||||
そのため、端末のクロックは時刻合わせされている必要があります。ファイルが隠しフォルダに生成された場合でも、もし変更時刻が古いと判断された場合はスキップされるかキャンセル(つまり、削除)されます。
|
||||
|
||||
|
||||
Each scan stores the file with their modification time. And if the file has been disappeared, the fact is also stored. Then, When the entry of the hidden file has been replicated, it will be reflected in the storage if the entry is newer than storage.
|
||||
|
||||
Therefore, the clock must be adjusted. If the modification time is old, the changeset will be skipped or cancelled (It means, **deleted**), even if the file spawned in a hidden folder.
|
||||
|
||||
|
||||
|
||||
### Advanced settings
|
||||
Self-hosted LiveSyncはPouchDBを使用し、リモートと[このプロトコル](https://docs.couchdb.org/en/stable/replication/protocol.html)で同期しています。
|
||||
そのため、全てのノートなどはデータベースが許容するペイロードサイズやドキュメントサイズに併せてチャンクに分割されています。
|
||||
|
||||
@@ -3,7 +3,7 @@ import process from "process";
|
||||
import builtins from "builtin-modules";
|
||||
import sveltePlugin from "esbuild-svelte";
|
||||
import sveltePreprocess from "svelte-preprocess";
|
||||
|
||||
import fs from "node:fs";
|
||||
const banner = `/*
|
||||
THIS IS A GENERATED/BUNDLED FILE BY ESBUILD
|
||||
if you want to view the source, please visit the github repository of this plugin
|
||||
@@ -11,7 +11,8 @@ if you want to view the source, please visit the github repository of this plugi
|
||||
`;
|
||||
|
||||
const prod = process.argv[2] === "production";
|
||||
|
||||
const manifestJson = JSON.parse(fs.readFileSync("./manifest.json"));
|
||||
const packageJson = JSON.parse(fs.readFileSync("./package.json"));
|
||||
esbuild
|
||||
.build({
|
||||
banner: {
|
||||
@@ -19,6 +20,10 @@ esbuild
|
||||
},
|
||||
entryPoints: ["src/main.ts"],
|
||||
bundle: true,
|
||||
define: {
|
||||
"MANIFEST_VERSION": `"${manifestJson.version}"`,
|
||||
"PACKAGE_VERSION": `"${packageJson.version}"`,
|
||||
},
|
||||
external: ["obsidian", "electron", ...builtins],
|
||||
format: "cjs",
|
||||
watch: !prod,
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"id": "obsidian-livesync",
|
||||
"name": "Self-hosted LiveSync",
|
||||
"version": "0.11.10",
|
||||
"version": "0.12.2",
|
||||
"minAppVersion": "0.9.12",
|
||||
"description": "Community implementation of self-hosted livesync. Reflect your vault changes to some other devices immediately. Please make sure to disable other synchronize solutions to avoid content corruption or duplication.",
|
||||
"author": "vorotamoroz",
|
||||
|
||||
4
package-lock.json
generated
4
package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "obsidian-livesync",
|
||||
"version": "0.11.10",
|
||||
"version": "0.12.2",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "obsidian-livesync",
|
||||
"version": "0.11.10",
|
||||
"version": "0.12.2",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"diff-match-patch": "^1.0.5",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "obsidian-livesync",
|
||||
"version": "0.11.10",
|
||||
"version": "0.12.2",
|
||||
"description": "Reflect your vault changes to some other devices immediately. Please make sure to disable other synchronize solutions to avoid content corruption or duplication.",
|
||||
"main": "main.js",
|
||||
"type": "module",
|
||||
|
||||
@@ -20,6 +20,7 @@ import {
|
||||
MILSTONE_DOCID,
|
||||
DatabaseConnectingStatus,
|
||||
ChunkVersionRange,
|
||||
NoteEntry,
|
||||
} from "./lib/src/types";
|
||||
import { RemoteDBSettings } from "./lib/src/types";
|
||||
import { resolveWithIgnoreKnownError, runWithLock, shouldSplitAsPlainText, splitPieces2, enableEncryption } from "./lib/src/utils";
|
||||
@@ -304,7 +305,7 @@ export class LocalPouchDB {
|
||||
} else {
|
||||
obj = await this.localDatabase.get(id);
|
||||
}
|
||||
|
||||
const deleted = "deleted" in obj ? obj.deleted : undefined;
|
||||
if (obj.type && obj.type == "leaf") {
|
||||
//do nothing for leaf;
|
||||
return false;
|
||||
@@ -330,6 +331,8 @@ export class LocalPouchDB {
|
||||
_conflicts: obj._conflicts,
|
||||
children: children,
|
||||
datatype: type,
|
||||
deleted: deleted,
|
||||
type: type
|
||||
};
|
||||
return doc;
|
||||
}
|
||||
@@ -350,6 +353,7 @@ export class LocalPouchDB {
|
||||
} else {
|
||||
obj = await this.localDatabase.get(id);
|
||||
}
|
||||
const deleted = "deleted" in obj ? obj.deleted : undefined;
|
||||
|
||||
if (obj.type && obj.type == "leaf") {
|
||||
//do nothing for leaf;
|
||||
@@ -358,7 +362,7 @@ export class LocalPouchDB {
|
||||
|
||||
//Check it out and fix docs to regular case
|
||||
if (!obj.type || (obj.type && obj.type == "notes")) {
|
||||
const note = obj as Entry;
|
||||
const note = obj as NoteEntry;
|
||||
const doc: LoadedEntry & PouchDB.Core.IdMeta & PouchDB.Core.GetMeta = {
|
||||
data: note.data,
|
||||
_id: note._id,
|
||||
@@ -370,6 +374,8 @@ export class LocalPouchDB {
|
||||
_conflicts: obj._conflicts,
|
||||
children: [],
|
||||
datatype: "newnote",
|
||||
deleted: deleted,
|
||||
type: "newnote",
|
||||
};
|
||||
if (typeof this.corruptedEntries[doc._id] != "undefined") {
|
||||
delete this.corruptedEntries[doc._id];
|
||||
@@ -414,6 +420,8 @@ export class LocalPouchDB {
|
||||
children: obj.children,
|
||||
datatype: obj.type,
|
||||
_conflicts: obj._conflicts,
|
||||
deleted: deleted,
|
||||
type: obj.type
|
||||
};
|
||||
if (dump) {
|
||||
Logger(`therefore:`);
|
||||
@@ -684,7 +692,7 @@ export class LocalPouchDB {
|
||||
throw ex;
|
||||
}
|
||||
}
|
||||
const r = await this.localDatabase.put(newDoc, { force: true });
|
||||
const r = await this.localDatabase.put<PlainEntry | NewEntry>(newDoc, { force: true });
|
||||
if (typeof this.corruptedEntries[note._id] != "undefined") {
|
||||
delete this.corruptedEntries[note._id];
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { App, PluginSettingTab, Setting, sanitizeHTMLToDom, RequestUrlParam, requestUrl } from "obsidian";
|
||||
import { App, PluginSettingTab, Setting, sanitizeHTMLToDom, RequestUrlParam, requestUrl, TextAreaComponent } from "obsidian";
|
||||
import { EntryDoc, LOG_LEVEL, RemoteDBSettings } from "./lib/src/types";
|
||||
import { path2id, id2path } from "./utils";
|
||||
import { delay, runWithLock } from "./lib/src/utils";
|
||||
@@ -591,6 +591,31 @@ export class ObsidianLiveSyncSettingTab extends PluginSettingTab {
|
||||
});
|
||||
text.inputEl.setAttribute("type", "number");
|
||||
});
|
||||
let newDatabaseName = this.plugin.settings.additionalSuffixOfDatabaseName + "";
|
||||
new Setting(containerLocalDatabaseEl)
|
||||
.setName("Database suffix")
|
||||
.setDesc("Set unique name for using same vault name on different directory.")
|
||||
.addText((text) => {
|
||||
text.setPlaceholder("")
|
||||
.setValue(newDatabaseName)
|
||||
.onChange((value) => {
|
||||
newDatabaseName = value;
|
||||
|
||||
});
|
||||
}).addButton((button) => {
|
||||
button.setButtonText("Change")
|
||||
.onClick(async () => {
|
||||
if (this.plugin.settings.additionalSuffixOfDatabaseName == newDatabaseName) {
|
||||
Logger("Suffix was not changed.", LOG_LEVEL.NOTICE);
|
||||
return;
|
||||
}
|
||||
this.plugin.settings.additionalSuffixOfDatabaseName = newDatabaseName;
|
||||
await this.plugin.saveSettings();
|
||||
Logger("Suffix has been changed. Reopening database...", LOG_LEVEL.NOTICE);
|
||||
await this.plugin.initializeDatabase();
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
addScreenElement("10", containerLocalDatabaseEl);
|
||||
const containerGeneralSettingsEl = containerEl.createDiv();
|
||||
@@ -765,9 +790,109 @@ export class ObsidianLiveSyncSettingTab extends PluginSettingTab {
|
||||
await this.plugin.saveSettings();
|
||||
})
|
||||
);
|
||||
|
||||
containerSyncSettingEl.createEl("h3", {
|
||||
text: sanitizeHTMLToDom(`Experimental`),
|
||||
});
|
||||
new Setting(containerSyncSettingEl)
|
||||
.setName("Sync hidden files.")
|
||||
.addToggle((toggle) =>
|
||||
toggle.setValue(this.plugin.settings.syncInternalFiles).onChange(async (value) => {
|
||||
this.plugin.settings.syncInternalFiles = value;
|
||||
await this.plugin.saveSettings();
|
||||
})
|
||||
);
|
||||
new Setting(containerSyncSettingEl)
|
||||
.setName("Scan hidden files before replication.")
|
||||
.addToggle((toggle) =>
|
||||
toggle.setValue(this.plugin.settings.syncInternalFilesBeforeReplication).onChange(async (value) => {
|
||||
this.plugin.settings.syncInternalFilesBeforeReplication = value;
|
||||
await this.plugin.saveSettings();
|
||||
})
|
||||
);
|
||||
new Setting(containerSyncSettingEl)
|
||||
.setName("Scan hidden files periodicaly.")
|
||||
.setDesc("Seconds, zero to disable.")
|
||||
.addText((text) => {
|
||||
text.setPlaceholder("")
|
||||
.setValue(this.plugin.settings.syncInternalFilesInterval + "")
|
||||
.onChange(async (value) => {
|
||||
let v = Number(value);
|
||||
if (isNaN(v) || v < 10) {
|
||||
v = 10;
|
||||
}
|
||||
this.plugin.settings.syncInternalFilesInterval = v;
|
||||
await this.plugin.saveSettings();
|
||||
});
|
||||
text.inputEl.setAttribute("type", "number");
|
||||
});
|
||||
let skipPatternTextArea: TextAreaComponent = null;
|
||||
const defaultSkipPattern = "\\/node_modules\\/, \\/\\.git\\/, \\/obsidian-livesync\\/";
|
||||
const defaultSkipPatternXPlat = defaultSkipPattern + ",\\/workspace$";
|
||||
new Setting(containerSyncSettingEl)
|
||||
.setName("Skip patterns")
|
||||
.setDesc(
|
||||
"Regular expression, If you use hidden file sync between desktop and mobile, adding `workspace$` is recommended."
|
||||
)
|
||||
.addTextArea((text) => {
|
||||
text
|
||||
.setValue(this.plugin.settings.syncInternalFilesIgnorePatterns)
|
||||
.setPlaceholder("\\/node_modules\\/, \\/\\.git\\/")
|
||||
.onChange(async (value) => {
|
||||
this.plugin.settings.syncInternalFilesIgnorePatterns = value;
|
||||
await this.plugin.saveSettings();
|
||||
})
|
||||
skipPatternTextArea = text;
|
||||
return text;
|
||||
}
|
||||
);
|
||||
new Setting(containerSyncSettingEl)
|
||||
.setName("Skip patterns defaults")
|
||||
.addButton((button) => {
|
||||
button.setButtonText("Default")
|
||||
.onClick(async () => {
|
||||
skipPatternTextArea.setValue(defaultSkipPattern);
|
||||
this.plugin.settings.syncInternalFilesIgnorePatterns = defaultSkipPattern;
|
||||
await this.plugin.saveSettings();
|
||||
})
|
||||
}).addButton((button) => {
|
||||
button.setButtonText("Cross-platform")
|
||||
.onClick(async () => {
|
||||
skipPatternTextArea.setValue(defaultSkipPatternXPlat);
|
||||
this.plugin.settings.syncInternalFilesIgnorePatterns = defaultSkipPatternXPlat;
|
||||
await this.plugin.saveSettings();
|
||||
})
|
||||
})
|
||||
|
||||
new Setting(containerSyncSettingEl)
|
||||
.setName("Touch hidden files")
|
||||
.setDesc("Update the modified time of all hidden files to the current time.")
|
||||
.addButton((button) =>
|
||||
button
|
||||
.setButtonText("Touch")
|
||||
.setWarning()
|
||||
.setDisabled(false)
|
||||
.setClass("sls-btn-left")
|
||||
.onClick(async () => {
|
||||
const filesAll = await this.plugin.scanInternalFiles();
|
||||
const targetFiles = await this.plugin.filterTargetFiles(filesAll);
|
||||
const now = Date.now();
|
||||
const newFiles = targetFiles.map(e => ({ ...e, mtime: now }));
|
||||
let i = 0;
|
||||
const maxFiles = newFiles.length;
|
||||
for (const file of newFiles) {
|
||||
i++;
|
||||
Logger(`Touched:${file.path} (${i}/${maxFiles})`, LOG_LEVEL.NOTICE, "touch-files");
|
||||
await this.plugin.applyMTimeToFile(file);
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
containerSyncSettingEl.createEl("h3", {
|
||||
text: sanitizeHTMLToDom(`Advanced settings`),
|
||||
});
|
||||
containerSyncSettingEl.createEl("div", {
|
||||
text: sanitizeHTMLToDom(`Advanced settings<br>
|
||||
If you reached the payload size limit when using IBM Cloudant, please set batch size and batch limit to a lower value.`),
|
||||
text: `If you reached the payload size limit when using IBM Cloudant, please set batch size and batch limit to a lower value.`,
|
||||
});
|
||||
new Setting(containerSyncSettingEl)
|
||||
.setName("Batch size")
|
||||
|
||||
2
src/lib
2
src/lib
Submodule src/lib updated: 548265c701...1133f82732
511
src/main.ts
511
src/main.ts
@@ -1,8 +1,8 @@
|
||||
import { debounce, Notice, Plugin, TFile, addIcon, TFolder, normalizePath, TAbstractFile, Editor, MarkdownView, PluginManifest, Modal, App, FuzzySuggestModal, Setting } from "obsidian";
|
||||
import { diff_match_patch } from "diff-match-patch";
|
||||
|
||||
import { EntryDoc, LoadedEntry, ObsidianLiveSyncSettings, diff_check_result, diff_result_leaf, EntryBody, LOG_LEVEL, VER, DEFAULT_SETTINGS, diff_result, FLAGMD_REDFLAG, SYNCINFO_ID } from "./lib/src/types";
|
||||
import { PluginDataEntry, PERIODIC_PLUGIN_SWEEP, PluginList, DevicePluginList } from "./types";
|
||||
import { EntryDoc, LoadedEntry, ObsidianLiveSyncSettings, diff_check_result, diff_result_leaf, EntryBody, LOG_LEVEL, VER, DEFAULT_SETTINGS, diff_result, FLAGMD_REDFLAG, SYNCINFO_ID, InternalFileEntry } from "./lib/src/types";
|
||||
import { PluginDataEntry, PERIODIC_PLUGIN_SWEEP, PluginList, DevicePluginList, InternalFileInfo } from "./types";
|
||||
import {
|
||||
base64ToString,
|
||||
arrayBufferToBase64,
|
||||
@@ -18,6 +18,7 @@ import {
|
||||
NewNotice,
|
||||
getLocks,
|
||||
Parallels,
|
||||
WrappedNotice,
|
||||
} from "./lib/src/utils";
|
||||
import { Logger, setLogger } from "./lib/src/logger";
|
||||
import { LocalPouchDB } from "./LocalPouchDB";
|
||||
@@ -28,7 +29,7 @@ import { DocumentHistoryModal } from "./DocumentHistoryModal";
|
||||
|
||||
//@ts-ignore
|
||||
import PluginPane from "./PluginPane.svelte";
|
||||
import { id2path, path2id } from "./utils";
|
||||
import { clearAllPeriodic, clearAllTriggers, disposeMemoObject, id2path, memoIfNotExist, memoObject, path2id, retriveMemoObject, setTrigger } from "./utils";
|
||||
import { decrypt, encrypt } from "./lib/src/e2ee_v2";
|
||||
|
||||
const isDebug = false;
|
||||
@@ -188,6 +189,10 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
deviceAndVaultName: string;
|
||||
isMobile = false;
|
||||
|
||||
getVaultName(): string {
|
||||
return this.app.vault.getName() + (this.settings?.additionalSuffixOfDatabaseName ? ("-" + this.settings.additionalSuffixOfDatabaseName) : "");
|
||||
}
|
||||
|
||||
setInterval(handler: () => any, timeout?: number): number {
|
||||
const timer = window.setInterval(handler, timeout);
|
||||
this.registerInterval(timer);
|
||||
@@ -213,7 +218,12 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
async onload() {
|
||||
setLogger(this.addLog.bind(this)); // Logger moved to global.
|
||||
Logger("loading plugin");
|
||||
const lsname = "obsidian-live-sync-ver" + this.app.vault.getName();
|
||||
//@ts-ignore
|
||||
const manifestVersion = MANIFEST_VERSION || "-";
|
||||
//@ts-ignore
|
||||
const packageVersion = PACKAGE_VERSION || "-";
|
||||
Logger(`Self-hosted LiveSync v${manifestVersion} ${packageVersion} `);
|
||||
const lsname = "obsidian-live-sync-ver" + this.getVaultName();
|
||||
const last_version = localStorage.getItem(lsname);
|
||||
await this.loadSettings();
|
||||
//@ts-ignore
|
||||
@@ -295,6 +305,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
this.settings.autoSweepPlugins = false;
|
||||
this.settings.usePluginSync = false;
|
||||
this.settings.suspendFileWatching = true;
|
||||
this.settings.syncInternalFiles = false;
|
||||
await this.saveSettings();
|
||||
await this.openDatabase();
|
||||
const warningMessage = "The red flag is raised! The whole initialize steps are skipped, and any file changes are not captured.";
|
||||
@@ -500,6 +511,14 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
this.showPluginSyncModal();
|
||||
},
|
||||
});
|
||||
|
||||
this.addCommand({
|
||||
id: "livesync-scaninternal",
|
||||
name: "Sync hidden files",
|
||||
callback: () => {
|
||||
this.syncInternalFilesAndDatabase("safe", true);
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
pluginDialog: PluginDialogModal = null;
|
||||
@@ -531,10 +550,13 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
}
|
||||
this.clearPeriodicSync();
|
||||
this.clearPluginSweep();
|
||||
this.clearInternalFileScan();
|
||||
if (this.localDatabase != null) {
|
||||
this.localDatabase.closeReplication();
|
||||
this.localDatabase.close();
|
||||
}
|
||||
clearAllPeriodic();
|
||||
clearAllTriggers();
|
||||
window.removeEventListener("visibilitychange", this.watchWindowVisiblity);
|
||||
Logger("unloading plugin");
|
||||
}
|
||||
@@ -543,7 +565,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
if (this.localDatabase != null) {
|
||||
this.localDatabase.close();
|
||||
}
|
||||
const vaultName = this.app.vault.getName();
|
||||
const vaultName = this.getVaultName();
|
||||
Logger("Open Database...");
|
||||
//@ts-ignore
|
||||
const isMobile = this.app.isMobile;
|
||||
@@ -570,7 +592,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
// So, use history is always enabled.
|
||||
this.settings.useHistory = true;
|
||||
|
||||
const lsname = "obsidian-live-sync-vaultanddevicename-" + this.app.vault.getName();
|
||||
const lsname = "obsidian-live-sync-vaultanddevicename-" + this.getVaultName();
|
||||
if (this.settings.deviceAndVaultName != "") {
|
||||
if (!localStorage.getItem(lsname)) {
|
||||
this.deviceAndVaultName = this.settings.deviceAndVaultName;
|
||||
@@ -586,7 +608,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
}
|
||||
|
||||
async saveSettings() {
|
||||
const lsname = "obsidian-live-sync-vaultanddevicename-" + this.app.vault.getName();
|
||||
const lsname = "obsidian-live-sync-vaultanddevicename-" + this.getVaultName();
|
||||
|
||||
localStorage.setItem(lsname, this.deviceAndVaultName || "");
|
||||
await this.saveData(this.settings);
|
||||
@@ -849,7 +871,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
if (this.settings && !this.settings.showVerboseLog && level == LOG_LEVEL.VERBOSE) {
|
||||
return;
|
||||
}
|
||||
const valutName = this.app.vault.getName();
|
||||
const valutName = this.getVaultName();
|
||||
const timestamp = new Date().toLocaleString();
|
||||
const messagecontent = typeof message == "string" ? message : message instanceof Error ? `${message.name}:${message.message}` : JSON.stringify(message, null, 2);
|
||||
const newmessage = timestamp + "->" + messagecontent;
|
||||
@@ -1103,11 +1125,11 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
|
||||
saveQueuedFiles() {
|
||||
const saveData = JSON.stringify(this.queuedFiles.filter((e) => !e.done).map((e) => e.entry._id));
|
||||
const lsname = "obsidian-livesync-queuefiles-" + this.app.vault.getName();
|
||||
const lsname = "obsidian-livesync-queuefiles-" + this.getVaultName();
|
||||
localStorage.setItem(lsname, saveData);
|
||||
}
|
||||
async loadQueuedFiles() {
|
||||
const lsname = "obsidian-livesync-queuefiles-" + this.app.vault.getName();
|
||||
const lsname = "obsidian-livesync-queuefiles-" + this.getVaultName();
|
||||
const ids = JSON.parse(localStorage.getItem(lsname) || "[]") as string[];
|
||||
const ret = await this.localDatabase.localDatabase.allDocs({ keys: ids, include_docs: true });
|
||||
for (const doc of ret.rows) {
|
||||
@@ -1124,6 +1146,13 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
const now = new Date().getTime();
|
||||
if (queue.missingChildren.length == 0) {
|
||||
queue.done = true;
|
||||
if (queue.entry._id.startsWith("i:")) {
|
||||
//system file
|
||||
const filename = id2path(queue.entry._id.substring("i:".length));
|
||||
Logger(`Applying hidden file, ${queue.entry._id} (${queue.entry._rev}) change...`);
|
||||
await this.syncInternalFilesAndDatabase("pull", false, false, [filename])
|
||||
Logger(`Applied hidden file, ${queue.entry._id} (${queue.entry._rev}) change...`);
|
||||
}
|
||||
if (isValidPath(id2path(queue.entry._id))) {
|
||||
Logger(`Applying ${queue.entry._id} (${queue.entry._rev}) change...`);
|
||||
await this.handleDBChanged(queue.entry);
|
||||
@@ -1162,7 +1191,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
}
|
||||
async parseIncomingDoc(doc: PouchDB.Core.ExistingDocument<EntryBody>) {
|
||||
const skipOldFile = this.settings.skipOlderFilesOnSync && false; //patched temporary.
|
||||
if (skipOldFile) {
|
||||
if ((!doc._id.startsWith("i:")) && skipOldFile) {
|
||||
const info = this.app.vault.getAbstractFileByPath(id2path(doc._id));
|
||||
|
||||
if (info && info instanceof TFile) {
|
||||
@@ -1304,6 +1333,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
this.localDatabase.closeReplication();
|
||||
this.clearPeriodicSync();
|
||||
this.clearPluginSweep();
|
||||
this.clearInternalFileScan();
|
||||
await this.applyBatchChange();
|
||||
// disable all sync temporary.
|
||||
if (this.suspended) return;
|
||||
@@ -1314,8 +1344,12 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
this.localDatabase.openReplication(this.settings, true, false, this.parseReplicationResult);
|
||||
this.refreshStatusText();
|
||||
}
|
||||
if (this.settings.syncInternalFiles) {
|
||||
await this.syncInternalFilesAndDatabase("safe", false);
|
||||
}
|
||||
this.setPeriodicSync();
|
||||
this.setPluginSweep();
|
||||
this.setPeriodicInternalFileScan();
|
||||
}
|
||||
|
||||
lastMessage = "";
|
||||
@@ -1414,6 +1448,9 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
await this.sweepPlugin(false);
|
||||
}
|
||||
await this.loadQueuedFiles();
|
||||
if (this.settings.syncInternalFiles && this.settings.syncInternalFilesBeforeReplication) {
|
||||
await this.syncInternalFilesAndDatabase("push", showMessage);
|
||||
}
|
||||
this.localDatabase.openReplication(this.settings, false, showMessage, this.parseReplicationResult);
|
||||
}
|
||||
|
||||
@@ -1877,6 +1914,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
size: file.stat.size,
|
||||
children: [],
|
||||
datatype: datatype,
|
||||
type: datatype,
|
||||
};
|
||||
//upsert should locked
|
||||
const msg = `DB <- STORAGE (${datatype}) `;
|
||||
@@ -2016,6 +2054,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
size: 0,
|
||||
children: [],
|
||||
datatype: "plain",
|
||||
type: "plain"
|
||||
};
|
||||
Logger(`check diff:${m.name}(${m.id})`, LOG_LEVEL.VERBOSE);
|
||||
await runWithLock("plugin-" + m.id, false, async () => {
|
||||
@@ -2091,4 +2130,454 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
periodicInternalFileScanHandler: number = null;
|
||||
|
||||
clearInternalFileScan() {
|
||||
if (this.periodicInternalFileScanHandler != null) {
|
||||
clearInterval(this.periodicInternalFileScanHandler);
|
||||
this.periodicInternalFileScanHandler = null;
|
||||
}
|
||||
}
|
||||
|
||||
setPeriodicInternalFileScan() {
|
||||
if (this.periodicInternalFileScanHandler != null) {
|
||||
this.clearInternalFileScan();
|
||||
}
|
||||
if (this.settings.syncInternalFiles && this.settings.syncInternalFilesInterval > 0) {
|
||||
this.periodicPluginSweepHandler = this.setInterval(async () => await this.periodicInternalFileScan(), this.settings.syncInternalFilesInterval * 1000);
|
||||
}
|
||||
}
|
||||
|
||||
async periodicInternalFileScan() {
|
||||
await this.syncInternalFilesAndDatabase("push", false);
|
||||
}
|
||||
|
||||
async getFiles(
|
||||
path: string,
|
||||
ignoreList: string[],
|
||||
filter: RegExp[]
|
||||
) {
|
||||
const w = await this.app.vault.adapter.list(path);
|
||||
let files = [
|
||||
...w.files
|
||||
.filter((e) => !ignoreList.some((ee) => e.endsWith(ee)))
|
||||
.filter((e) => !filter || filter.some((ee) => e.match(ee))),
|
||||
];
|
||||
L1: for (const v of w.folders) {
|
||||
for (const ignore of ignoreList) {
|
||||
if (v.endsWith(ignore)) {
|
||||
continue L1;
|
||||
}
|
||||
}
|
||||
files = files.concat(await this.getFiles(v, ignoreList, filter));
|
||||
}
|
||||
return files;
|
||||
}
|
||||
|
||||
async scanInternalFiles(): Promise<InternalFileInfo[]> {
|
||||
const ignoreFiles = ["node_modules", ".git", "obsidian-pouch"];
|
||||
const root = this.app.vault.getRoot();
|
||||
const findRoot = root.path;
|
||||
const filenames = (await this.getFiles(findRoot, ignoreFiles, null)).filter(e => e.startsWith(".")).filter(e => !e.startsWith(".trash"));
|
||||
const files = filenames.map(async e => {
|
||||
return {
|
||||
path: e,
|
||||
stat: await this.app.vault.adapter.stat(e)
|
||||
}
|
||||
});
|
||||
const result: InternalFileInfo[] = [];
|
||||
for (const f of files) {
|
||||
const w = await f;
|
||||
result.push({
|
||||
...w,
|
||||
...w.stat
|
||||
})
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
async storeInternaFileToDatabase(file: InternalFileInfo, forceWrite = false) {
|
||||
const id = "i:" + path2id(file.path);
|
||||
const contentBin = await this.app.vault.adapter.readBinary(file.path);
|
||||
const content = await arrayBufferToBase64(contentBin);
|
||||
const mtime = file.mtime;
|
||||
await runWithLock("file-" + id, false, async () => {
|
||||
const old = await this.localDatabase.getDBEntry(id, null, false, false);
|
||||
let saveData: LoadedEntry;
|
||||
if (old === false) {
|
||||
saveData = {
|
||||
_id: id,
|
||||
data: content,
|
||||
mtime,
|
||||
ctime: mtime,
|
||||
datatype: "newnote",
|
||||
size: file.size,
|
||||
children: [],
|
||||
deleted: false,
|
||||
type: "newnote",
|
||||
}
|
||||
} else {
|
||||
if (old.data == content && !forceWrite) {
|
||||
// Logger(`internal files STORAGE --> DB:${file.path}: Not changed`);
|
||||
return;
|
||||
}
|
||||
saveData =
|
||||
{
|
||||
...old,
|
||||
data: content,
|
||||
mtime,
|
||||
size: file.size,
|
||||
datatype: "newnote",
|
||||
children: [],
|
||||
deleted: false,
|
||||
type: "newnote",
|
||||
}
|
||||
}
|
||||
await this.localDatabase.putDBEntry(saveData, true);
|
||||
Logger(`STORAGE --> DB:${file.path}: (hidden) Done`);
|
||||
});
|
||||
}
|
||||
|
||||
async deleteInternaFileOnDatabase(filename: string, forceWrite = false) {
|
||||
const id = "i:" + path2id(filename);
|
||||
const mtime = new Date().getTime();
|
||||
await runWithLock("file-" + id, false, async () => {
|
||||
const old = await this.localDatabase.getDBEntry(id, null, false, false) as InternalFileEntry | false;
|
||||
let saveData: InternalFileEntry;
|
||||
if (old === false) {
|
||||
saveData = {
|
||||
_id: id,
|
||||
mtime,
|
||||
ctime: mtime,
|
||||
size: 0,
|
||||
children: [],
|
||||
deleted: true,
|
||||
type: "newnote",
|
||||
}
|
||||
} else {
|
||||
if (old.deleted) {
|
||||
Logger(`STORAGE -x> DB:${filename}: (hidden) already deleted`);
|
||||
return;
|
||||
}
|
||||
saveData =
|
||||
{
|
||||
...old,
|
||||
mtime,
|
||||
size: 0,
|
||||
children: [],
|
||||
deleted: true,
|
||||
type: "newnote",
|
||||
}
|
||||
}
|
||||
await this.localDatabase.localDatabase.put(saveData);
|
||||
Logger(`STORAGE -x> DB:${filename}: (hidden) Done`);
|
||||
|
||||
});
|
||||
}
|
||||
async ensureDirectoryEx(fullpath: string) {
|
||||
const pathElements = fullpath.split("/");
|
||||
pathElements.pop();
|
||||
let c = "";
|
||||
for (const v of pathElements) {
|
||||
c += v;
|
||||
try {
|
||||
await this.app.vault.adapter.mkdir(c);
|
||||
} catch (ex) {
|
||||
// basically skip exceptions.
|
||||
if (ex.message && ex.message == "Folder already exists.") {
|
||||
// especialy this message is.
|
||||
} else {
|
||||
Logger("Folder Create Error");
|
||||
Logger(ex);
|
||||
}
|
||||
}
|
||||
c += "/";
|
||||
}
|
||||
}
|
||||
async extractInternaFileFromDatabase(filename: string, force = false) {
|
||||
const isExists = await this.app.vault.adapter.exists(filename);
|
||||
const id = "i:" + path2id(filename);
|
||||
|
||||
return await runWithLock("file-" + id, false, async () => {
|
||||
const fileOnDB = await this.localDatabase.getDBEntry(id, null, false, false) as false | LoadedEntry;
|
||||
if (fileOnDB === false) throw new Error(`File not found on database.:${id}`);
|
||||
const deleted = "deleted" in fileOnDB ? fileOnDB.deleted : false;
|
||||
if (deleted) {
|
||||
if (!isExists) {
|
||||
Logger(`STORAGE <x- DB:${filename}: deleted (hidden) Deleted on DB, but the file is already not found on storage.`);
|
||||
} else {
|
||||
Logger(`STORAGE <x- DB:${filename}: deleted (hidden).`);
|
||||
await this.app.vault.adapter.remove(filename);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
if (!isExists) {
|
||||
await this.ensureDirectoryEx(filename);
|
||||
await this.app.vault.adapter.writeBinary(filename, base64ToArrayBuffer(fileOnDB.data), { mtime: fileOnDB.mtime, ctime: fileOnDB.ctime });
|
||||
Logger(`STORAGE <-- DB:${filename}: written (hidden,new${force ? ", force" : ""})`);
|
||||
return true;
|
||||
} else {
|
||||
try {
|
||||
// const stat = await this.app.vault.adapter.stat(filename);
|
||||
// const fileMTime = ~~(stat.mtime/1000);
|
||||
// const docMtime = ~~(old.mtime/1000);
|
||||
const contentBin = await this.app.vault.adapter.readBinary(filename);
|
||||
const content = await arrayBufferToBase64(contentBin);
|
||||
if (content == fileOnDB.data && !force) {
|
||||
Logger(`STORAGE <-- DB:${filename}: skipped (hidden) Not changed`);
|
||||
return false;
|
||||
}
|
||||
await this.app.vault.adapter.writeBinary(filename, base64ToArrayBuffer(fileOnDB.data), { mtime: fileOnDB.mtime, ctime: fileOnDB.ctime });
|
||||
Logger(`STORAGE <-- DB:${filename}: written (hidden, overwrite${force ? ", force" : ""})`);
|
||||
return true;
|
||||
} catch (ex) {
|
||||
Logger(ex);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
filterTargetFiles(files: InternalFileInfo[], targetFiles: string[] | false = false) {
|
||||
const ignorePatterns = this.settings.syncInternalFilesIgnorePatterns.toLocaleLowerCase()
|
||||
.replace(/\n| /g, "")
|
||||
.split(",").filter(e => e).map(e => new RegExp(e));
|
||||
// const files = await this.scanInternalFiles();
|
||||
return files.filter(file => !ignorePatterns.some(e => file.path.match(e))).filter(file => !targetFiles || (targetFiles && targetFiles.indexOf(file.path) !== -1))
|
||||
//if (ignorePatterns.some(e => filename.match(e))) continue;
|
||||
//if (targetFiles !== false && targetFiles.indexOf(filename) == -1) continue;
|
||||
}
|
||||
|
||||
async applyMTimeToFile(file: InternalFileInfo) {
|
||||
await this.app.vault.adapter.append(file.path, "", { ctime: file.ctime, mtime: file.mtime });
|
||||
}
|
||||
confirmPopup: WrappedNotice = null;
|
||||
|
||||
//TODO: Tidy up. Even though it is experimental feature, So dirty...
|
||||
async syncInternalFilesAndDatabase(direction: "push" | "pull" | "safe", showMessage: boolean, files: InternalFileInfo[] | false = false, targetFiles: string[] | false = false) {
|
||||
const logLevel = showMessage ? LOG_LEVEL.NOTICE : LOG_LEVEL.INFO;
|
||||
Logger("Scanning hidden files.", logLevel, "sync_internal");
|
||||
const ignorePatterns = this.settings.syncInternalFilesIgnorePatterns.toLocaleLowerCase()
|
||||
.replace(/\n| /g, "")
|
||||
.split(",").filter(e => e).map(e => new RegExp(e));
|
||||
if (!files) files = await this.scanInternalFiles();
|
||||
const filesOnDB = (await this.localDatabase.localDatabase.allDocs({ startkey: "i:", endkey: "i;", include_docs: true })).rows.map(e => e.doc) as InternalFileEntry[];
|
||||
const allFileNamesSrc = [...new Set([...files.map(e => normalizePath(e.path)), ...filesOnDB.map(e => normalizePath(id2path(e._id.substring("i:".length))))])];
|
||||
const allFileNames = allFileNamesSrc.filter(filename => !targetFiles || (targetFiles && targetFiles.indexOf(filename) !== -1))
|
||||
function compareMTime(a: number, b: number) {
|
||||
const wa = ~~(a / 1000);
|
||||
const wb = ~~(b / 1000);
|
||||
const diff = wa - wb;
|
||||
return diff;
|
||||
}
|
||||
|
||||
const fileCount = allFileNames.length;
|
||||
let processed = 0;
|
||||
let filesChanged = 0;
|
||||
const p = Parallels();
|
||||
const limit = 10;
|
||||
// count updated files up as like this below:
|
||||
// .obsidian: 2
|
||||
// .obsidian/workspace: 1
|
||||
// .obsidian/plugins: 1
|
||||
// .obsidian/plugins/recent-files-obsidian: 1
|
||||
// .obsidian/plugins/recent-files-obsidian/data.json: 1
|
||||
const updatedFolders: { [key: string]: number } = {}
|
||||
const countUpdatedFolder = (path: string) => {
|
||||
const pieces = path.split("/");
|
||||
let c = pieces.shift();
|
||||
let pathPieces = "";
|
||||
filesChanged++;
|
||||
while (c) {
|
||||
pathPieces += (pathPieces != "" ? "/" : "") + c;
|
||||
pathPieces = normalizePath(pathPieces);
|
||||
if (!(pathPieces in updatedFolders)) {
|
||||
updatedFolders[pathPieces] = 0;
|
||||
}
|
||||
updatedFolders[pathPieces]++;
|
||||
c = pieces.shift();
|
||||
}
|
||||
}
|
||||
// Cache update time information for files which have already been processed (mainly for files that were skipped due to the same content)
|
||||
let caches: { [key: string]: { storageMtime: number; docMtime: number } } = {};
|
||||
caches = await this.localDatabase.kvDB.get<{ [key: string]: { storageMtime: number; docMtime: number } }>("diff-caches-internal") || {};
|
||||
for (const filename of allFileNames) {
|
||||
processed++;
|
||||
if (processed % 100 == 0) Logger(`Hidden file: ${processed}/${fileCount}`, logLevel, "sync_internal");
|
||||
if (ignorePatterns.some(e => filename.match(e))) continue;
|
||||
|
||||
const fileOnStorage = files.find(e => e.path == filename);
|
||||
const fileOnDatabase = filesOnDB.find(e => e._id == "i:" + id2path(filename));
|
||||
// TODO: Fix this somehow smart.
|
||||
let proc: Promise<void> | null;
|
||||
|
||||
if (fileOnStorage && fileOnDatabase) {
|
||||
// Both => Synchronize
|
||||
const cache = filename in caches ? caches[filename] : { storageMtime: 0, docMtime: 0 };
|
||||
if (fileOnDatabase.mtime == cache.docMtime && fileOnStorage.mtime == cache.storageMtime) {
|
||||
continue;
|
||||
}
|
||||
const nw = compareMTime(fileOnStorage.mtime, fileOnDatabase.mtime);
|
||||
if (nw == 0) continue;
|
||||
|
||||
if (nw > 0) {
|
||||
proc = (async (fileOnStorage) => {
|
||||
await this.storeInternaFileToDatabase(fileOnStorage);
|
||||
cache.docMtime = fileOnDatabase.mtime;
|
||||
cache.storageMtime = fileOnStorage.mtime;
|
||||
caches[filename] = cache;
|
||||
})(fileOnStorage);
|
||||
|
||||
}
|
||||
if (nw < 0) {
|
||||
proc = (async (filename) => {
|
||||
if (await this.extractInternaFileFromDatabase(filename)) {
|
||||
cache.docMtime = fileOnDatabase.mtime;
|
||||
cache.storageMtime = fileOnStorage.mtime;
|
||||
caches[filename] = cache;
|
||||
countUpdatedFolder(filename);
|
||||
}
|
||||
})(filename);
|
||||
|
||||
}
|
||||
} else if (!fileOnStorage && fileOnDatabase) {
|
||||
if (direction == "push") {
|
||||
if (fileOnDatabase.deleted) {
|
||||
// await this.storeInternaFileToDatabase(fileOnStorage);
|
||||
} else {
|
||||
proc = (async () => {
|
||||
await this.deleteInternaFileOnDatabase(filename);
|
||||
})();
|
||||
}
|
||||
} else if (direction == "pull") {
|
||||
proc = (async () => {
|
||||
if (await this.extractInternaFileFromDatabase(filename)) {
|
||||
countUpdatedFolder(filename);
|
||||
}
|
||||
})();
|
||||
} else if (direction == "safe") {
|
||||
if (fileOnDatabase.deleted) {
|
||||
// await this.storeInternaFileToDatabase(fileOnStorage);
|
||||
} else {
|
||||
proc = (async () => {
|
||||
if (await this.extractInternaFileFromDatabase(filename)) {
|
||||
countUpdatedFolder(filename);
|
||||
}
|
||||
})();
|
||||
}
|
||||
}
|
||||
} else if (fileOnStorage && !fileOnDatabase) {
|
||||
proc = (async () => {
|
||||
await this.storeInternaFileToDatabase(fileOnStorage);
|
||||
})();
|
||||
} else {
|
||||
throw new Error("Invalid state on hidden file sync");
|
||||
// Something corrupted?
|
||||
}
|
||||
if (proc) p.add(proc);
|
||||
await p.wait(limit);
|
||||
}
|
||||
await p.all();
|
||||
await this.localDatabase.kvDB.set("diff-caches-internal", caches);
|
||||
|
||||
// When files has been retreived from the database. they must be reloaded.
|
||||
if (direction == "pull" && filesChanged != 0) {
|
||||
const configDir = normalizePath(this.app.vault.configDir);
|
||||
// Show notification to restart obsidian when something has been changed in configDir.
|
||||
if (configDir in updatedFolders) {
|
||||
// Numbers of updated files that is below of configDir.
|
||||
let updatedCount = updatedFolders[configDir];
|
||||
try {
|
||||
//@ts-ignore
|
||||
const manifests = Object.values(this.app.plugins.manifests) as PluginManifest[];
|
||||
//@ts-ignore
|
||||
const enabledPlugins = this.app.plugins.enabledPlugins as Set<string>;
|
||||
const enabledPluginManifests = manifests.filter(e => enabledPlugins.has(e.id));
|
||||
for (const manifest of enabledPluginManifests) {
|
||||
if (manifest.dir in updatedFolders) {
|
||||
// If notified about plug-ins, reloading Obsidian may not be necessary.
|
||||
updatedCount -= updatedFolders[manifest.dir];
|
||||
const updatePluginId = manifest.id;
|
||||
const updatePluginName = manifest.name;
|
||||
const fragment = createFragment((doc) => {
|
||||
doc.createEl("span", null, (a) => {
|
||||
a.appendText(`Files in ${updatePluginName} has been updated, Press `)
|
||||
a.appendChild(a.createEl("a", null, (anchor) => {
|
||||
anchor.text = "HERE";
|
||||
anchor.addEventListener("click", async () => {
|
||||
Logger(`Unloading plugin: ${updatePluginName}`, LOG_LEVEL.NOTICE, "pluin-reload-" + updatePluginId);
|
||||
// @ts-ignore
|
||||
await this.app.plugins.unloadPlugin(updatePluginId);
|
||||
// @ts-ignore
|
||||
await this.app.plugins.loadPlugin(updatePluginId);
|
||||
Logger(`Plugin reloaded: ${updatePluginName}`, LOG_LEVEL.NOTICE, "pluin-reload-" + updatePluginId);
|
||||
});
|
||||
}))
|
||||
|
||||
a.appendText(` to reload ${updatePluginName}, or press elsewhere to dismiss this message.`)
|
||||
});
|
||||
});
|
||||
|
||||
const updatedPluginKey = "popupUpdated-" + updatePluginId;
|
||||
setTrigger(updatedPluginKey, 1000, async () => {
|
||||
const popup = await memoIfNotExist(updatedPluginKey, () => new Notice(fragment, 0));
|
||||
//@ts-ignore
|
||||
const isShown = popup?.noticeEl?.isShown();
|
||||
if (!isShown) {
|
||||
memoObject(updatedPluginKey, new Notice(fragment, 0))
|
||||
}
|
||||
setTrigger(updatedPluginKey + "-close", 20000, () => {
|
||||
const popup = retriveMemoObject<Notice>(updatedPluginKey)
|
||||
if (!popup) return;
|
||||
//@ts-ignore
|
||||
if (popup?.noticeEl?.isShown()) {
|
||||
popup.hide();
|
||||
}
|
||||
disposeMemoObject(updatedPluginKey);
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
} catch (ex) {
|
||||
Logger("Error on checking plugin status.");
|
||||
Logger(ex, LOG_LEVEL.VERBOSE);
|
||||
|
||||
}
|
||||
|
||||
// If something changes left, notify for reloading Obsidian.
|
||||
if (updatedCount != 0) {
|
||||
const fragment = createFragment((doc) => {
|
||||
doc.createEl("span", null, (a) => {
|
||||
a.appendText(`Hidden files have been synchronized, Press `)
|
||||
a.appendChild(a.createEl("a", null, (anchor) => {
|
||||
anchor.text = "HERE";
|
||||
anchor.addEventListener("click", () => {
|
||||
// @ts-ignore
|
||||
this.app.commands.executeCommandById("app:reload")
|
||||
});
|
||||
}))
|
||||
|
||||
a.appendText(` to reload obsidian, or press elsewhere to dismiss this message.`)
|
||||
});
|
||||
});
|
||||
|
||||
setTrigger("popupUpdated-" + configDir, 1000, () => {
|
||||
//@ts-ignore
|
||||
const isShown = this.confirmPopup?.noticeEl?.isShown();
|
||||
if (!isShown) {
|
||||
this.confirmPopup = new Notice(fragment, 0);
|
||||
}
|
||||
setTrigger("popupClose" + configDir, 20000, () => {
|
||||
this.confirmPopup?.hide();
|
||||
this.confirmPopup = null;
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Logger(`Hidden files scanned: ${filesChanged} files had been modified`, logLevel, "sync_internal");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -22,3 +22,11 @@ export interface DevicePluginList {
|
||||
[key: string]: PluginDataEntry;
|
||||
}
|
||||
export const PERIODIC_PLUGIN_SWEEP = 60;
|
||||
|
||||
export interface InternalFileInfo {
|
||||
path: string;
|
||||
mtime: number;
|
||||
ctime: number;
|
||||
size: number;
|
||||
deleted?: boolean;
|
||||
}
|
||||
|
||||
61
src/utils.ts
61
src/utils.ts
@@ -12,3 +12,64 @@ export function path2id(filename: string): string {
|
||||
export function id2path(filename: string): string {
|
||||
return id2path_base(normalizePath(filename));
|
||||
}
|
||||
|
||||
const triggers: { [key: string]: ReturnType<typeof setTimeout> } = {};
|
||||
export function setTrigger(key: string, timeout: number, proc: (() => Promise<any> | void)) {
|
||||
clearTrigger(key);
|
||||
triggers[key] = setTimeout(async () => {
|
||||
delete triggers[key];
|
||||
await proc();
|
||||
}, timeout);
|
||||
}
|
||||
export function clearTrigger(key: string) {
|
||||
if (key in triggers) {
|
||||
clearTimeout(triggers[key]);
|
||||
}
|
||||
}
|
||||
export function clearAllTriggers() {
|
||||
for (const v in triggers) {
|
||||
clearTimeout(triggers[v]);
|
||||
}
|
||||
}
|
||||
const intervals: { [key: string]: ReturnType<typeof setInterval> } = {};
|
||||
export function setPeriodic(key: string, timeout: number, proc: (() => Promise<any> | void)) {
|
||||
clearPeriodic(key);
|
||||
intervals[key] = setInterval(async () => {
|
||||
delete intervals[key];
|
||||
await proc();
|
||||
}, timeout);
|
||||
}
|
||||
export function clearPeriodic(key: string) {
|
||||
if (key in intervals) {
|
||||
clearInterval(intervals[key]);
|
||||
}
|
||||
}
|
||||
export function clearAllPeriodic() {
|
||||
for (const v in intervals) {
|
||||
clearInterval(intervals[v]);
|
||||
}
|
||||
}
|
||||
|
||||
const memos: { [key: string]: any } = {};
|
||||
export function memoObject<T>(key: string, obj: T): T {
|
||||
memos[key] = obj;
|
||||
return memos[key] as T;
|
||||
}
|
||||
export async function memoIfNotExist<T>(key: string, func: () => T | Promise<T>): Promise<T> {
|
||||
if (!(key in memos)) {
|
||||
const w = func();
|
||||
const v = w instanceof Promise ? (await w) : w;
|
||||
memos[key] = v;
|
||||
}
|
||||
return memos[key] as T;
|
||||
}
|
||||
export function retriveMemoObject<T>(key: string): T | false {
|
||||
if (key in memos) {
|
||||
return memos[key];
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
export function disposeMemoObject(key: string) {
|
||||
delete memos[key];
|
||||
}
|
||||
Reference in New Issue
Block a user