mirror of
https://github.com/vrtmrz/obsidian-livesync.git
synced 2026-02-22 20:18:48 +00:00
Compare commits
8 Commits
0.7.2
...
snyk-upgra
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7da930a8bb | ||
|
|
1e3de47d92 | ||
|
|
a50f0965f6 | ||
|
|
9d3aa35b0b | ||
|
|
b4b9684a55 | ||
|
|
221cccb845 | ||
|
|
801500f924 | ||
|
|
3545ae9690 |
3
.gitmodules
vendored
Normal file
3
.gitmodules
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
[submodule "src/lib"]
|
||||
path = src/lib
|
||||
url = https://github.com/vrtmrz/livesync-commonlib
|
||||
@@ -18,7 +18,14 @@ Note: This password is saved into your Obsidian's vault in plain text.
|
||||
The Database name to synchronize.
|
||||
⚠️If not exist, created automatically.
|
||||
|
||||
|
||||
### Use the old connecting method
|
||||
Since v0.8.0, Self-hosted LiveSync uses Obsidian's API to connect to the CouchDB instead of the browser API.
|
||||
This method will increase the performance and avoid troubles with the CORS.
|
||||
But it doesn't been well tested yet. If you are troubled, please disable this option once.
|
||||
|
||||
### Test Database connection
|
||||
You can check the connection by clicking this button.
|
||||
|
||||
## Local Database Configurations
|
||||
"Local Database" is created inside your obsidian.
|
||||
@@ -44,6 +51,8 @@ As a result, Obsidian's behavior is temporarily slowed down.
|
||||
Default is 300 seconds.
|
||||
If you are an early adopter, maybe this value is left as 30 seconds. Please change this value to larger values.
|
||||
|
||||
Note: If you want to use "Use history", this vault must be set to 0.
|
||||
|
||||
### Manual Garbage Collect
|
||||
Run "Garbage Collection" manually.
|
||||
|
||||
@@ -52,6 +61,8 @@ Encrypt your database. It affects only the database, your files are left as plai
|
||||
|
||||
The encryption algorithm is AES-GCM.
|
||||
|
||||
Note: If you want to use "Plugins and their settings", you have to enable this.
|
||||
|
||||
### Passphrase
|
||||
The passphrase to used as the key of encryption. Please use the long text.
|
||||
|
||||
@@ -195,6 +206,29 @@ You can set synchronization method at once as these pattern:
|
||||
- Sync on File Open : disabled
|
||||
- Sync on Start : disabled
|
||||
|
||||
### Use history
|
||||
If you enable this option, you can keep document histories in your database.
|
||||
(Not all intermediate changes are synchronized.)
|
||||
You can check the changes caused by your edit and/or replication.
|
||||
|
||||
### Enable plugin synchronization
|
||||
If you want to use this feature, you have to activate this feature by this switch.
|
||||
|
||||
### Sweep plugins automatically
|
||||
Plugin sweep will run before replication automatically.
|
||||
|
||||
### Sweep plugins periodically
|
||||
Plugin sweep will run each 1 minute.
|
||||
|
||||
### Notify updates
|
||||
When replication is complete, a message will be notified if a newer version of the plugin applied to this device is configured on another device.
|
||||
|
||||
### Device and Vault name
|
||||
To save the plugins, you have to set a unique name every each device.
|
||||
|
||||
### Open
|
||||
Open the "Plugins and their settings" dialog.
|
||||
|
||||
## Hatch
|
||||
From here, everything is under the hood. Please handle it with care.
|
||||
|
||||
|
||||
1
lib
Submodule
1
lib
Submodule
Submodule lib added at 315ef99845
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"id": "obsidian-livesync",
|
||||
"name": "Self-hosted LiveSync",
|
||||
"version": "0.7.2",
|
||||
"version": "0.8.3",
|
||||
"minAppVersion": "0.9.12",
|
||||
"description": "Community implementation of self-hosted livesync. Reflect your vault changes to some other devices immediately. Please make sure to disable other synchronize solutions to avoid content corruption or duplication.",
|
||||
"author": "vorotamoroz",
|
||||
|
||||
739
package-lock.json
generated
739
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
26
package.json
26
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "obsidian-livesync",
|
||||
"version": "0.7.2",
|
||||
"version": "0.8.4",
|
||||
"description": "Reflect your vault changes to some other devices immediately. Please make sure to disable other synchronize solutions to avoid content corruption or duplication.",
|
||||
"main": "main.js",
|
||||
"type": "module",
|
||||
@@ -21,20 +21,28 @@
|
||||
"@types/pouchdb-browser": "^6.1.3",
|
||||
"@typescript-eslint/eslint-plugin": "^5.7.0",
|
||||
"@typescript-eslint/parser": "^5.0.0",
|
||||
"eslint": "^7.32.0",
|
||||
"eslint-config-airbnb-base": "^14.2.1",
|
||||
"eslint-plugin-import": "^2.25.2",
|
||||
"obsidian": "^0.13.11",
|
||||
"rollup": "^2.32.1",
|
||||
"tslib": "^2.2.0",
|
||||
"typescript": "^4.2.4",
|
||||
"builtin-modules": "^3.2.0",
|
||||
"esbuild": "0.13.12",
|
||||
"esbuild-svelte": "^0.6.0",
|
||||
"svelte-preprocess": "^4.10.2"
|
||||
"eslint": "^7.32.0",
|
||||
"eslint-config-airbnb-base": "^14.2.1",
|
||||
"eslint-plugin-import": "^2.25.2",
|
||||
"obsidian": "^0.14.6",
|
||||
"rollup": "^2.32.1",
|
||||
"svelte-preprocess": "^4.10.5",
|
||||
"tslib": "^2.2.0",
|
||||
"typescript": "^4.2.4"
|
||||
},
|
||||
"dependencies": {
|
||||
"diff-match-patch": "^1.0.5",
|
||||
"esbuild": "0.13.12",
|
||||
"esbuild-svelte": "^0.6.0",
|
||||
"pouchdb-adapter-http": "^7.3.0",
|
||||
"pouchdb-adapter-idb": "^7.3.0",
|
||||
"pouchdb-core": "^7.3.0",
|
||||
"pouchdb-mapreduce": "^7.3.0",
|
||||
"pouchdb-replication": "^7.3.0",
|
||||
"svelte-preprocess": "^4.10.5",
|
||||
"xxhash-wasm": "^0.4.2"
|
||||
}
|
||||
}
|
||||
|
||||
1
pouchdb-browser-webpack/.gitignore
vendored
1
pouchdb-browser-webpack/.gitignore
vendored
@@ -1 +0,0 @@
|
||||
node_modules
|
||||
@@ -1,2 +0,0 @@
|
||||
# PouchDB-browser
|
||||
just webpacked.
|
||||
File diff suppressed because one or more lines are too long
9820
pouchdb-browser-webpack/package-lock.json
generated
9820
pouchdb-browser-webpack/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,23 +0,0 @@
|
||||
{
|
||||
"name": "pouchdb-browser-webpack",
|
||||
"version": "1.0.0",
|
||||
"description": "pouchdb-browser webpack",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
"build": "webpack --mode=production --node-env=production",
|
||||
"build:dev": "webpack --mode=development",
|
||||
"build:prod": "webpack --mode=production --node-env=production",
|
||||
"watch": "webpack --watch"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"pouchdb-browser": "^7.2.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"webpack": "^5.58.1",
|
||||
"webpack-cli": "^4.9.0"
|
||||
}
|
||||
}
|
||||
@@ -1,4 +0,0 @@
|
||||
// This module just webpacks pouchdb-browser
|
||||
import * as PouchDB_src from "pouchdb-browser";
|
||||
const PouchDB = PouchDB_src.default;
|
||||
export { PouchDB };
|
||||
@@ -1,30 +0,0 @@
|
||||
// Generated using webpack-cli https://github.com/webpack/webpack-cli
|
||||
|
||||
const path = require("path");
|
||||
|
||||
const isProduction = process.env.NODE_ENV == "production";
|
||||
|
||||
const config = {
|
||||
entry: "./src/index.js",
|
||||
output: {
|
||||
filename: "pouchdb-browser.js",
|
||||
path: path.resolve(__dirname, "dist"),
|
||||
library: {
|
||||
type: "module",
|
||||
},
|
||||
},
|
||||
experiments: {
|
||||
outputModule: true,
|
||||
},
|
||||
plugins: [],
|
||||
module: {},
|
||||
};
|
||||
|
||||
module.exports = () => {
|
||||
if (isProduction) {
|
||||
config.mode = "production";
|
||||
} else {
|
||||
config.mode = "development";
|
||||
}
|
||||
return config;
|
||||
};
|
||||
File diff suppressed because one or more lines are too long
@@ -1,7 +1,7 @@
|
||||
import { App, Modal } from "obsidian";
|
||||
import { DIFF_DELETE, DIFF_EQUAL, DIFF_INSERT } from "diff-match-patch";
|
||||
import { diff_result } from "./types";
|
||||
import { escapeStringToHTML } from "./utils";
|
||||
import { diff_result } from "./lib/src/types";
|
||||
import { escapeStringToHTML } from "./lib/src/utils";
|
||||
|
||||
export class ConflictResolveModal extends Modal {
|
||||
// result: Array<[number, string]>;
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
import { TFile, Modal, App } from "obsidian";
|
||||
import { path2id, escapeStringToHTML } from "./utils";
|
||||
import { path2id } from "./utils";
|
||||
import { escapeStringToHTML } from "./lib/src/utils";
|
||||
import ObsidianLiveSyncPlugin from "./main";
|
||||
import { DIFF_DELETE, DIFF_EQUAL, DIFF_INSERT, diff_match_patch } from "diff-match-patch";
|
||||
import { LOG_LEVEL } from "./lib/src/types";
|
||||
import { Logger } from "./lib/src/logger";
|
||||
|
||||
export class DocumentHistoryModal extends Modal {
|
||||
plugin: ObsidianLiveSyncPlugin;
|
||||
@@ -14,6 +17,7 @@ export class DocumentHistoryModal extends Modal {
|
||||
file: string;
|
||||
|
||||
revs_info: PouchDB.Core.RevisionInfo[] = [];
|
||||
currentText = "";
|
||||
|
||||
constructor(app: App, plugin: ObsidianLiveSyncPlugin, file: TFile) {
|
||||
super(app);
|
||||
@@ -37,6 +41,7 @@ export class DocumentHistoryModal extends Modal {
|
||||
const index = this.revs_info.length - 1 - (this.range.value as any) / 1;
|
||||
const rev = this.revs_info[index];
|
||||
const w = await db.getDBEntry(path2id(this.file), { rev: rev.rev }, false, false);
|
||||
this.currentText = "";
|
||||
|
||||
if (w === false) {
|
||||
this.info.innerHTML = "";
|
||||
@@ -44,6 +49,7 @@ export class DocumentHistoryModal extends Modal {
|
||||
} else {
|
||||
this.info.innerHTML = `Modified:${new Date(w.mtime).toLocaleString()}`;
|
||||
let result = "";
|
||||
this.currentText = w.data;
|
||||
if (this.showDiff) {
|
||||
const prevRevIdx = this.revs_info.length - 1 - ((this.range.value as any) / 1 - 1);
|
||||
if (prevRevIdx >= 0 && prevRevIdx < this.revs_info.length) {
|
||||
@@ -124,6 +130,14 @@ export class DocumentHistoryModal extends Modal {
|
||||
this.contentView = div;
|
||||
div.addClass("op-scrollable");
|
||||
div.addClass("op-pre");
|
||||
const buttons = contentEl.createDiv("");
|
||||
buttons.createEl("button", { text: "Copy to clipboard" }, (e) => {
|
||||
e.addClass("mod-cta");
|
||||
e.addEventListener("click", async () => {
|
||||
await navigator.clipboard.writeText(this.currentText);
|
||||
Logger(`Old content copied to clipboard`, LOG_LEVEL.NOTICE);
|
||||
});
|
||||
});
|
||||
}
|
||||
onClose() {
|
||||
const { contentEl } = this;
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { Notice } from "obsidian";
|
||||
import { PouchDB } from "../pouchdb-browser-webpack/dist/pouchdb-browser.js";
|
||||
import { PouchDB } from "./pouchdb-browser";
|
||||
import xxhash from "xxhash-wasm";
|
||||
import {
|
||||
Entry,
|
||||
@@ -10,7 +9,6 @@ import {
|
||||
NewEntry,
|
||||
PlainEntry,
|
||||
LoadedEntry,
|
||||
ObsidianLiveSyncSettings,
|
||||
Credential,
|
||||
EntryMilestoneInfo,
|
||||
LOG_LEVEL,
|
||||
@@ -22,16 +20,18 @@ import {
|
||||
VER,
|
||||
MILSTONE_DOCID,
|
||||
DatabaseConnectingStatus,
|
||||
} from "./types";
|
||||
import { resolveWithIgnoreKnownError, delay, path2id, runWithLock, isPlainText } from "./utils";
|
||||
import { Logger } from "./logger";
|
||||
} from "./lib/src/types";
|
||||
import { decrypt, encrypt } from "./lib/src/e2ee";
|
||||
import { RemoteDBSettings } from "./lib/src/types";
|
||||
import { resolveWithIgnoreKnownError, delay, runWithLock, isPlainText, splitPieces, NewNotice, WrappedNotice } from "./lib/src/utils";
|
||||
import { path2id } from "./utils";
|
||||
import { Logger } from "./lib/src/logger";
|
||||
import { checkRemoteVersion, connectRemoteCouchDB, getLastPostFailedBySize } from "./utils_couchdb";
|
||||
import { decrypt, encrypt } from "./e2ee";
|
||||
|
||||
export class LocalPouchDB {
|
||||
auth: Credential;
|
||||
dbname: string;
|
||||
settings: ObsidianLiveSyncSettings;
|
||||
settings: RemoteDBSettings;
|
||||
localDatabase: PouchDB.Database<EntryDoc>;
|
||||
nodeid = "";
|
||||
isReady = false;
|
||||
@@ -77,7 +77,7 @@ export class LocalPouchDB {
|
||||
this.localDatabase.removeAllListeners();
|
||||
}
|
||||
|
||||
constructor(settings: ObsidianLiveSyncSettings, dbname: string) {
|
||||
constructor(settings: RemoteDBSettings, dbname: string) {
|
||||
this.auth = {
|
||||
username: "",
|
||||
password: "",
|
||||
@@ -503,7 +503,7 @@ export class LocalPouchDB {
|
||||
}
|
||||
async putDBEntry(note: LoadedEntry) {
|
||||
await this.waitForGCComplete();
|
||||
let leftData = note.data;
|
||||
// let leftData = note.data;
|
||||
const savenNotes = [];
|
||||
let processed = 0;
|
||||
let made = 0;
|
||||
@@ -516,53 +516,22 @@ export class LocalPouchDB {
|
||||
pieceSize = MAX_DOC_SIZE;
|
||||
plainSplit = true;
|
||||
}
|
||||
|
||||
const newLeafs: EntryLeaf[] = [];
|
||||
do {
|
||||
// To keep low bandwith and database size,
|
||||
// Dedup pieces on database.
|
||||
// from 0.1.10, for best performance. we use markdown delimiters
|
||||
// 1. \n[^\n]{longLineThreshold}[^\n]*\n -> long sentence shuld break.
|
||||
// 2. \n\n shold break
|
||||
// 3. \r\n\r\n should break
|
||||
// 4. \n# should break.
|
||||
let cPieceSize = pieceSize;
|
||||
if (plainSplit) {
|
||||
let minimumChunkSize = this.settings.minimumChunkSize;
|
||||
if (minimumChunkSize < 10) minimumChunkSize = 10;
|
||||
let longLineThreshold = this.settings.longLineThreshold;
|
||||
if (longLineThreshold < 100) longLineThreshold = 100;
|
||||
cPieceSize = 0;
|
||||
// lookup for next splittion .
|
||||
// we're standing on "\n"
|
||||
do {
|
||||
const n1 = leftData.indexOf("\n", cPieceSize + 1);
|
||||
const n2 = leftData.indexOf("\n\n", cPieceSize + 1);
|
||||
const n3 = leftData.indexOf("\r\n\r\n", cPieceSize + 1);
|
||||
const n4 = leftData.indexOf("\n#", cPieceSize + 1);
|
||||
if (n1 == -1 && n2 == -1 && n3 == -1 && n4 == -1) {
|
||||
cPieceSize = MAX_DOC_SIZE;
|
||||
break;
|
||||
}
|
||||
// To keep low bandwith and database size,
|
||||
// Dedup pieces on database.
|
||||
// from 0.1.10, for best performance. we use markdown delimiters
|
||||
// 1. \n[^\n]{longLineThreshold}[^\n]*\n -> long sentence shuld break.
|
||||
// 2. \n\n shold break
|
||||
// 3. \r\n\r\n should break
|
||||
// 4. \n# should break.
|
||||
let minimumChunkSize = this.settings.minimumChunkSize;
|
||||
if (minimumChunkSize < 10) minimumChunkSize = 10;
|
||||
let longLineThreshold = this.settings.longLineThreshold;
|
||||
if (longLineThreshold < 100) longLineThreshold = 100;
|
||||
|
||||
if (n1 > longLineThreshold) {
|
||||
// long sentence is an established piece
|
||||
cPieceSize = n1;
|
||||
} else {
|
||||
// cPieceSize = Math.min.apply([n2, n3, n4].filter((e) => e > 1));
|
||||
// ^ heavy.
|
||||
if (n1 > 0 && cPieceSize < n1) cPieceSize = n1;
|
||||
if (n2 > 0 && cPieceSize < n2) cPieceSize = n2 + 1;
|
||||
if (n3 > 0 && cPieceSize < n3) cPieceSize = n3 + 3;
|
||||
// Choose shorter, empty line and \n#
|
||||
if (n4 > 0 && cPieceSize > n4) cPieceSize = n4 + 0;
|
||||
cPieceSize++;
|
||||
}
|
||||
} while (cPieceSize < minimumChunkSize);
|
||||
}
|
||||
|
||||
// piece size determined.
|
||||
const piece = leftData.substring(0, cPieceSize);
|
||||
leftData = leftData.substring(cPieceSize);
|
||||
const pieces = splitPieces(note.data, pieceSize, plainSplit, minimumChunkSize, longLineThreshold);
|
||||
for (const piece of pieces()) {
|
||||
processed++;
|
||||
let leafid = "";
|
||||
// Get hash of piece.
|
||||
@@ -646,7 +615,7 @@ export class LocalPouchDB {
|
||||
}
|
||||
}
|
||||
savenNotes.push(leafid);
|
||||
} while (leftData != "");
|
||||
}
|
||||
let saved = true;
|
||||
if (newLeafs.length > 0) {
|
||||
try {
|
||||
@@ -727,14 +696,14 @@ export class LocalPouchDB {
|
||||
// no op now,
|
||||
return true;
|
||||
}
|
||||
replicateAllToServer(setting: ObsidianLiveSyncSettings, showingNotice?: boolean) {
|
||||
replicateAllToServer(setting: RemoteDBSettings, showingNotice?: boolean) {
|
||||
return new Promise(async (res, rej) => {
|
||||
await this.waitForGCComplete();
|
||||
this.closeReplication();
|
||||
Logger("send all data to server", LOG_LEVEL.NOTICE);
|
||||
let notice: Notice = null;
|
||||
let notice: WrappedNotice = null;
|
||||
if (showingNotice) {
|
||||
notice = new Notice("Initializing", 0);
|
||||
notice = NewNotice("Initializing", 0);
|
||||
}
|
||||
this.syncStatus = "STARTED";
|
||||
this.updateInfo();
|
||||
@@ -743,7 +712,7 @@ export class LocalPouchDB {
|
||||
username: setting.couchDB_USER,
|
||||
password: setting.couchDB_PASSWORD,
|
||||
};
|
||||
const dbret = await connectRemoteCouchDB(uri, auth);
|
||||
const dbret = await connectRemoteCouchDB(uri, auth, setting.disableRequestURI);
|
||||
if (typeof dbret === "string") {
|
||||
Logger(`could not connect to ${uri}:${dbret}`, LOG_LEVEL.NOTICE);
|
||||
if (notice != null) notice.hide();
|
||||
@@ -800,7 +769,7 @@ export class LocalPouchDB {
|
||||
});
|
||||
}
|
||||
|
||||
async checkReplicationConnectivity(setting: ObsidianLiveSyncSettings, keepAlive: boolean, skipCheck: boolean) {
|
||||
async checkReplicationConnectivity(setting: RemoteDBSettings, keepAlive: boolean, skipCheck: boolean) {
|
||||
if (!this.isReady) {
|
||||
Logger("Database is not ready.");
|
||||
return false;
|
||||
@@ -808,7 +777,7 @@ export class LocalPouchDB {
|
||||
|
||||
await this.waitForGCComplete();
|
||||
if (setting.versionUpFlash != "") {
|
||||
new Notice("Open settings and check message, please.");
|
||||
NewNotice("Open settings and check message, please.");
|
||||
return false;
|
||||
}
|
||||
const uri = setting.couchDB_URI + (setting.couchDB_DBNAME == "" ? "" : "/" + setting.couchDB_DBNAME);
|
||||
@@ -820,7 +789,7 @@ export class LocalPouchDB {
|
||||
Logger("Another replication running.");
|
||||
return false;
|
||||
}
|
||||
const dbret = await connectRemoteCouchDB(uri, auth);
|
||||
const dbret = await connectRemoteCouchDB(uri, auth, setting.disableRequestURI);
|
||||
if (typeof dbret === "string") {
|
||||
Logger(`could not connect to ${uri}: ${dbret}`, LOG_LEVEL.NOTICE);
|
||||
return false;
|
||||
@@ -839,15 +808,6 @@ export class LocalPouchDB {
|
||||
locked: false,
|
||||
accepted_nodes: [this.nodeid],
|
||||
};
|
||||
// const remoteInfo = dbret.info;
|
||||
// const localInfo = await this.localDatabase.info();
|
||||
// const remoteDocsCount = remoteInfo.doc_count;
|
||||
// const localDocsCount = localInfo.doc_count;
|
||||
// const remoteUpdSeq = typeof remoteInfo.update_seq == "string" ? Number(remoteInfo.update_seq.split("-")[0]) : remoteInfo.update_seq;
|
||||
// const localUpdSeq = typeof localInfo.update_seq == "string" ? Number(localInfo.update_seq.split("-")[0]) : localInfo.update_seq;
|
||||
|
||||
// Logger(`Database diffences: remote:${remoteDocsCount} docs / last update ${remoteUpdSeq}`);
|
||||
// Logger(`Database diffences: local :${localDocsCount} docs / last update ${localUpdSeq}`);
|
||||
|
||||
const remoteMilestone: EntryMilestoneInfo = await resolveWithIgnoreKnownError(dbret.db.get(MILSTONE_DOCID), defMilestonePoint);
|
||||
this.remoteLocked = remoteMilestone.locked;
|
||||
@@ -870,20 +830,20 @@ export class LocalPouchDB {
|
||||
return { db: dbret.db, info: dbret.info, syncOptionBase, syncOption };
|
||||
}
|
||||
|
||||
async openReplication(setting: ObsidianLiveSyncSettings, keepAlive: boolean, showResult: boolean, callback: (e: PouchDB.Core.ExistingDocument<EntryDoc>[]) => Promise<void>): Promise<boolean> {
|
||||
async openReplication(setting: RemoteDBSettings, keepAlive: boolean, showResult: boolean, callback: (e: PouchDB.Core.ExistingDocument<EntryDoc>[]) => Promise<void>): Promise<boolean> {
|
||||
return await runWithLock("replicate", false, () => {
|
||||
return this._openReplication(setting, keepAlive, showResult, callback, false);
|
||||
});
|
||||
}
|
||||
|
||||
originalSetting: ObsidianLiveSyncSettings = null;
|
||||
originalSetting: RemoteDBSettings = null;
|
||||
// last_seq: number = 200;
|
||||
async _openReplication(setting: ObsidianLiveSyncSettings, keepAlive: boolean, showResult: boolean, callback: (e: PouchDB.Core.ExistingDocument<EntryDoc>[]) => Promise<void>, retrying: boolean): Promise<boolean> {
|
||||
async _openReplication(setting: RemoteDBSettings, keepAlive: boolean, showResult: boolean, callback: (e: PouchDB.Core.ExistingDocument<EntryDoc>[]) => Promise<void>, retrying: boolean): Promise<boolean> {
|
||||
const ret = await this.checkReplicationConnectivity(setting, keepAlive, retrying);
|
||||
if (ret === false) return false;
|
||||
let notice: Notice = null;
|
||||
let notice: WrappedNotice = null;
|
||||
if (showResult) {
|
||||
notice = new Notice("Looking for the point last synchronized point.", 0);
|
||||
notice = NewNotice("Looking for the point last synchronized point.", 0);
|
||||
}
|
||||
const { db, syncOptionBase, syncOption } = ret;
|
||||
//replicate once
|
||||
@@ -919,12 +879,10 @@ export class LocalPouchDB {
|
||||
.on("change", async (e) => {
|
||||
try {
|
||||
if (e.direction == "pull") {
|
||||
// console.log(`pulled data:${e.change.docs.map((e) => e._id).join(",")}`);
|
||||
await callback(e.change.docs);
|
||||
Logger(`replicated ${e.change.docs_read} doc(s)`);
|
||||
this.docArrived += e.change.docs.length;
|
||||
} else {
|
||||
// console.log(`put data:${e.change.docs.map((e) => e._id).join(",")}`);
|
||||
this.docSent += e.change.docs.length;
|
||||
}
|
||||
if (notice != null) {
|
||||
@@ -974,7 +932,7 @@ export class LocalPouchDB {
|
||||
Logger("Replication stopped.", LOG_LEVEL.NOTICE);
|
||||
} else {
|
||||
// Duplicate settings for smaller batch.
|
||||
const xsetting: ObsidianLiveSyncSettings = JSON.parse(JSON.stringify(setting));
|
||||
const xsetting: RemoteDBSettings = JSON.parse(JSON.stringify(setting));
|
||||
xsetting.batch_size = Math.ceil(xsetting.batch_size / 2);
|
||||
xsetting.batches_limit = Math.ceil(xsetting.batches_limit / 2);
|
||||
if (xsetting.batch_size <= 3 || xsetting.batches_limit <= 3) {
|
||||
@@ -1074,14 +1032,14 @@ export class LocalPouchDB {
|
||||
this.disposeHashCache();
|
||||
Logger("Local Database Reset", LOG_LEVEL.NOTICE);
|
||||
}
|
||||
async tryResetRemoteDatabase(setting: ObsidianLiveSyncSettings) {
|
||||
async tryResetRemoteDatabase(setting: RemoteDBSettings) {
|
||||
await this.closeReplication();
|
||||
const uri = setting.couchDB_URI + (setting.couchDB_DBNAME == "" ? "" : "/" + setting.couchDB_DBNAME);
|
||||
const auth: Credential = {
|
||||
username: setting.couchDB_USER,
|
||||
password: setting.couchDB_PASSWORD,
|
||||
};
|
||||
const con = await connectRemoteCouchDB(uri, auth);
|
||||
const con = await connectRemoteCouchDB(uri, auth, setting.disableRequestURI);
|
||||
if (typeof con == "string") return;
|
||||
try {
|
||||
await con.db.destroy();
|
||||
@@ -1092,24 +1050,24 @@ export class LocalPouchDB {
|
||||
Logger(ex, LOG_LEVEL.NOTICE);
|
||||
}
|
||||
}
|
||||
async tryCreateRemoteDatabase(setting: ObsidianLiveSyncSettings) {
|
||||
async tryCreateRemoteDatabase(setting: RemoteDBSettings) {
|
||||
await this.closeReplication();
|
||||
const uri = setting.couchDB_URI + (setting.couchDB_DBNAME == "" ? "" : "/" + setting.couchDB_DBNAME);
|
||||
const auth: Credential = {
|
||||
username: setting.couchDB_USER,
|
||||
password: setting.couchDB_PASSWORD,
|
||||
};
|
||||
const con2 = await connectRemoteCouchDB(uri, auth);
|
||||
const con2 = await connectRemoteCouchDB(uri, auth, setting.disableRequestURI);
|
||||
if (typeof con2 === "string") return;
|
||||
Logger("Remote Database Created or Connected", LOG_LEVEL.NOTICE);
|
||||
}
|
||||
async markRemoteLocked(setting: ObsidianLiveSyncSettings, locked: boolean) {
|
||||
async markRemoteLocked(setting: RemoteDBSettings, locked: boolean) {
|
||||
const uri = setting.couchDB_URI + (setting.couchDB_DBNAME == "" ? "" : "/" + setting.couchDB_DBNAME);
|
||||
const auth: Credential = {
|
||||
username: setting.couchDB_USER,
|
||||
password: setting.couchDB_PASSWORD,
|
||||
};
|
||||
const dbret = await connectRemoteCouchDB(uri, auth);
|
||||
const dbret = await connectRemoteCouchDB(uri, auth, setting.disableRequestURI);
|
||||
if (typeof dbret === "string") {
|
||||
Logger(`could not connect to ${uri}:${dbret}`, LOG_LEVEL.NOTICE);
|
||||
return;
|
||||
@@ -1137,13 +1095,13 @@ export class LocalPouchDB {
|
||||
}
|
||||
await dbret.db.put(remoteMilestone);
|
||||
}
|
||||
async markRemoteResolved(setting: ObsidianLiveSyncSettings) {
|
||||
async markRemoteResolved(setting: RemoteDBSettings) {
|
||||
const uri = setting.couchDB_URI + (setting.couchDB_DBNAME == "" ? "" : "/" + setting.couchDB_DBNAME);
|
||||
const auth: Credential = {
|
||||
username: setting.couchDB_USER,
|
||||
password: setting.couchDB_PASSWORD,
|
||||
};
|
||||
const dbret = await connectRemoteCouchDB(uri, auth);
|
||||
const dbret = await connectRemoteCouchDB(uri, auth, setting.disableRequestURI);
|
||||
if (typeof dbret === "string") {
|
||||
Logger(`could not connect to ${uri}:${dbret}`, LOG_LEVEL.NOTICE);
|
||||
return;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { App, Modal } from "obsidian";
|
||||
import { escapeStringToHTML } from "./utils";
|
||||
import { escapeStringToHTML } from "./lib/src/utils";
|
||||
import ObsidianLiveSyncPlugin from "./main";
|
||||
|
||||
export class LogDisplayModal extends Modal {
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import { App, Notice, PluginSettingTab, Setting, sanitizeHTMLToDom } from "obsidian";
|
||||
import { EntryDoc, LOG_LEVEL } from "./types";
|
||||
import { path2id, id2path, runWithLock } from "./utils";
|
||||
import { Logger } from "./logger";
|
||||
import { App, PluginSettingTab, Setting, sanitizeHTMLToDom } from "obsidian";
|
||||
import { EntryDoc, LOG_LEVEL } from "./lib/src/types";
|
||||
import { path2id, id2path } from "./utils";
|
||||
import { NewNotice, runWithLock } from "./lib/src/utils";
|
||||
import { Logger } from "./lib/src/logger";
|
||||
import { connectRemoteCouchDB } from "./utils_couchdb";
|
||||
import { testCrypt } from "./e2ee";
|
||||
import { testCrypt } from "./lib/src/e2ee";
|
||||
import ObsidianLiveSyncPlugin from "./main";
|
||||
|
||||
export class ObsidianLiveSyncSettingTab extends PluginSettingTab {
|
||||
@@ -14,10 +15,14 @@ export class ObsidianLiveSyncSettingTab extends PluginSettingTab {
|
||||
this.plugin = plugin;
|
||||
}
|
||||
async testConnection(): Promise<void> {
|
||||
const db = await connectRemoteCouchDB(this.plugin.settings.couchDB_URI + (this.plugin.settings.couchDB_DBNAME == "" ? "" : "/" + this.plugin.settings.couchDB_DBNAME), {
|
||||
username: this.plugin.settings.couchDB_USER,
|
||||
password: this.plugin.settings.couchDB_PASSWORD,
|
||||
});
|
||||
const db = await connectRemoteCouchDB(
|
||||
this.plugin.settings.couchDB_URI + (this.plugin.settings.couchDB_DBNAME == "" ? "" : "/" + this.plugin.settings.couchDB_DBNAME),
|
||||
{
|
||||
username: this.plugin.settings.couchDB_USER,
|
||||
password: this.plugin.settings.couchDB_PASSWORD,
|
||||
},
|
||||
this.plugin.settings.disableRequestURI
|
||||
);
|
||||
if (typeof db === "string") {
|
||||
this.plugin.addLog(`could not connect to ${this.plugin.settings.couchDB_URI} : ${this.plugin.settings.couchDB_DBNAME} \n(${db})`, LOG_LEVEL.NOTICE);
|
||||
return;
|
||||
@@ -165,6 +170,12 @@ export class ObsidianLiveSyncSettingTab extends PluginSettingTab {
|
||||
this.plugin.settings.couchDB_DBNAME = value;
|
||||
await this.plugin.saveSettings();
|
||||
})
|
||||
),
|
||||
new Setting(containerRemoteDatabaseEl).setName("Use the old connecting method").addToggle((toggle) =>
|
||||
toggle.setValue(this.plugin.settings.disableRequestURI).onChange(async (value) => {
|
||||
this.plugin.settings.disableRequestURI = value;
|
||||
await this.plugin.saveSettings();
|
||||
})
|
||||
)
|
||||
);
|
||||
|
||||
@@ -603,7 +614,7 @@ export class ObsidianLiveSyncSettingTab extends PluginSettingTab {
|
||||
);
|
||||
|
||||
new Setting(containerMiscellaneousEl)
|
||||
.setName("Use history (beta)")
|
||||
.setName("Use history")
|
||||
.setDesc("Use history dialog (Restart required, auto compaction would be disabled, and more storage will be consumed)")
|
||||
.addToggle((toggle) =>
|
||||
toggle.setValue(this.plugin.settings.useHistory).onChange(async (value) => {
|
||||
@@ -678,7 +689,7 @@ export class ObsidianLiveSyncSettingTab extends PluginSettingTab {
|
||||
.onClick(async () => {
|
||||
const files = this.app.vault.getFiles();
|
||||
Logger("Verify and repair all files started", LOG_LEVEL.NOTICE);
|
||||
const notice = new Notice("", 0);
|
||||
const notice = NewNotice("", 0);
|
||||
let i = 0;
|
||||
for (const file of files) {
|
||||
i++;
|
||||
@@ -704,7 +715,7 @@ export class ObsidianLiveSyncSettingTab extends PluginSettingTab {
|
||||
.setDisabled(false)
|
||||
.setWarning()
|
||||
.onClick(async () => {
|
||||
const notice = new Notice("", 0);
|
||||
const notice = NewNotice("", 0);
|
||||
Logger(`Begin sanity check`, LOG_LEVEL.INFO);
|
||||
notice.setMessage(`Begin sanity check`);
|
||||
await runWithLock("sancheck", true, async () => {
|
||||
@@ -824,7 +835,7 @@ export class ObsidianLiveSyncSettingTab extends PluginSettingTab {
|
||||
|
||||
const updateDisabledOfDeviceAndVaultName = () => {
|
||||
vaultName.setDisabled(this.plugin.settings.autoSweepPlugins || this.plugin.settings.autoSweepPluginsPeriodic);
|
||||
vaultName.setTooltip(this.plugin.settings.autoSweepPlugins || this.plugin.settings.autoSweepPluginsPeriodic ? "You could not change when you enabling auto sweep." : "");
|
||||
vaultName.setTooltip(this.plugin.settings.autoSweepPlugins || this.plugin.settings.autoSweepPluginsPeriodic ? "You could not change when you enabling auto scan." : "");
|
||||
};
|
||||
new Setting(containerPluginSettings).setName("Enable plugin synchronization").addToggle((toggle) =>
|
||||
toggle.setValue(this.plugin.settings.usePluginSync).onChange(async (value) => {
|
||||
@@ -832,16 +843,10 @@ export class ObsidianLiveSyncSettingTab extends PluginSettingTab {
|
||||
await this.plugin.saveSettings();
|
||||
})
|
||||
);
|
||||
new Setting(containerPluginSettings).setName("Show own plugins and settings").addToggle((toggle) =>
|
||||
toggle.setValue(this.plugin.settings.showOwnPlugins).onChange(async (value) => {
|
||||
this.plugin.settings.showOwnPlugins = value;
|
||||
await this.plugin.saveSettings();
|
||||
})
|
||||
);
|
||||
|
||||
new Setting(containerPluginSettings)
|
||||
.setName("Sweep plugins automatically")
|
||||
.setDesc("Sweep plugins before replicating.")
|
||||
.setName("Scan plugins automatically")
|
||||
.setDesc("Scan plugins before replicating.")
|
||||
.addToggle((toggle) =>
|
||||
toggle.setValue(this.plugin.settings.autoSweepPlugins).onChange(async (value) => {
|
||||
this.plugin.settings.autoSweepPlugins = value;
|
||||
@@ -851,8 +856,8 @@ export class ObsidianLiveSyncSettingTab extends PluginSettingTab {
|
||||
);
|
||||
|
||||
new Setting(containerPluginSettings)
|
||||
.setName("Sweep plugins periodically")
|
||||
.setDesc("Sweep plugins each 1 minutes.")
|
||||
.setName("Scan plugins periodically")
|
||||
.setDesc("Scan plugins each 1 minutes.")
|
||||
.addToggle((toggle) =>
|
||||
toggle.setValue(this.plugin.settings.autoSweepPluginsPeriodic).onChange(async (value) => {
|
||||
this.plugin.settings.autoSweepPluginsPeriodic = value;
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
import ObsidianLiveSyncPlugin from "./main";
|
||||
import { onMount } from "svelte";
|
||||
import { DevicePluginList, PluginDataEntry } from "./types";
|
||||
import { versionNumberString2Number } from "./utils";
|
||||
import { versionNumberString2Number } from "./lib/src/utils";
|
||||
|
||||
type JudgeResult = "" | "NEWER" | "EVEN" | "EVEN_BUT_DIFFERENT" | "OLDER" | "REMOTE_ONLY";
|
||||
|
||||
@@ -266,7 +266,7 @@
|
||||
|
||||
<div class="ols-plugins-div-buttons">
|
||||
<button class="mod-cta" on:click={checkUpdates}>Check Updates</button>
|
||||
<button class="mod-cta" on:click={sweepPlugins}>Sweep installed</button>
|
||||
<button class="mod-cta" on:click={sweepPlugins}>Scan installed</button>
|
||||
<button class="mod-cta" on:click={applyPlugins}>Apply all</button>
|
||||
</div>
|
||||
<!-- <div class="ols-plugins-div-buttons">-->
|
||||
|
||||
168
src/e2ee.ts
168
src/e2ee.ts
@@ -1,168 +0,0 @@
|
||||
import { Logger } from "./logger";
|
||||
import { LOG_LEVEL } from "./types";
|
||||
|
||||
export type encodedData = [encryptedData: string, iv: string, salt: string];
|
||||
export type KeyBuffer = {
|
||||
index: string;
|
||||
key: CryptoKey;
|
||||
salt: Uint8Array;
|
||||
};
|
||||
|
||||
const KeyBuffs: KeyBuffer[] = [];
|
||||
const decKeyBuffs: KeyBuffer[] = [];
|
||||
|
||||
const KEY_RECYCLE_COUNT = 100;
|
||||
let recycleCount = KEY_RECYCLE_COUNT;
|
||||
|
||||
let semiStaticFieldBuffer: Uint8Array = null;
|
||||
const nonceBuffer: Uint32Array = new Uint32Array(1);
|
||||
|
||||
export async function getKeyForEncrypt(passphrase: string): Promise<[CryptoKey, Uint8Array]> {
|
||||
// For performance, the plugin reuses the key KEY_RECYCLE_COUNT times.
|
||||
const f = KeyBuffs.find((e) => e.index == passphrase);
|
||||
if (f) {
|
||||
recycleCount--;
|
||||
if (recycleCount > 0) {
|
||||
return [f.key, f.salt];
|
||||
}
|
||||
KeyBuffs.remove(f);
|
||||
recycleCount = KEY_RECYCLE_COUNT;
|
||||
}
|
||||
const xpassphrase = new TextEncoder().encode(passphrase);
|
||||
const digest = await crypto.subtle.digest({ name: "SHA-256" }, xpassphrase);
|
||||
const keyMaterial = await crypto.subtle.importKey("raw", digest, { name: "PBKDF2" }, false, ["deriveKey"]);
|
||||
const salt = crypto.getRandomValues(new Uint8Array(16));
|
||||
const key = await crypto.subtle.deriveKey(
|
||||
{
|
||||
name: "PBKDF2",
|
||||
salt,
|
||||
iterations: 100000,
|
||||
hash: "SHA-256",
|
||||
},
|
||||
keyMaterial,
|
||||
{ name: "AES-GCM", length: 256 },
|
||||
false,
|
||||
["encrypt"]
|
||||
);
|
||||
KeyBuffs.push({
|
||||
index: passphrase,
|
||||
key,
|
||||
salt,
|
||||
});
|
||||
while (KeyBuffs.length > 50) {
|
||||
KeyBuffs.shift();
|
||||
}
|
||||
return [key, salt];
|
||||
}
|
||||
|
||||
export async function getKeyForDecryption(passphrase: string, salt: Uint8Array): Promise<[CryptoKey, Uint8Array]> {
|
||||
const bufKey = passphrase + uint8ArrayToHexString(salt);
|
||||
const f = decKeyBuffs.find((e) => e.index == bufKey);
|
||||
if (f) {
|
||||
return [f.key, f.salt];
|
||||
}
|
||||
const xpassphrase = new TextEncoder().encode(passphrase);
|
||||
const digest = await crypto.subtle.digest({ name: "SHA-256" }, xpassphrase);
|
||||
const keyMaterial = await crypto.subtle.importKey("raw", digest, { name: "PBKDF2" }, false, ["deriveKey"]);
|
||||
const key = await crypto.subtle.deriveKey(
|
||||
{
|
||||
name: "PBKDF2",
|
||||
salt,
|
||||
iterations: 100000,
|
||||
hash: "SHA-256",
|
||||
},
|
||||
keyMaterial,
|
||||
{ name: "AES-GCM", length: 256 },
|
||||
false,
|
||||
["decrypt"]
|
||||
);
|
||||
decKeyBuffs.push({
|
||||
index: bufKey,
|
||||
key,
|
||||
salt,
|
||||
});
|
||||
while (decKeyBuffs.length > 50) {
|
||||
decKeyBuffs.shift();
|
||||
}
|
||||
return [key, salt];
|
||||
}
|
||||
|
||||
function getSemiStaticField(reset?: boolean) {
|
||||
// return fixed field of iv.
|
||||
if (semiStaticFieldBuffer != null && !reset) {
|
||||
return semiStaticFieldBuffer;
|
||||
}
|
||||
semiStaticFieldBuffer = crypto.getRandomValues(new Uint8Array(12));
|
||||
return semiStaticFieldBuffer;
|
||||
}
|
||||
|
||||
function getNonce() {
|
||||
// This is nonce, so do not send same thing.
|
||||
nonceBuffer[0]++;
|
||||
if (nonceBuffer[0] > 10000) {
|
||||
// reset semi-static field.
|
||||
getSemiStaticField(true);
|
||||
}
|
||||
return nonceBuffer;
|
||||
}
|
||||
|
||||
function uint8ArrayToHexString(src: Uint8Array): string {
|
||||
return Array.from(src)
|
||||
.map((e: number): string => `00${e.toString(16)}`.slice(-2))
|
||||
.join("");
|
||||
}
|
||||
function hexStringToUint8Array(src: string): Uint8Array {
|
||||
const srcArr = [...src];
|
||||
const arr = srcArr.reduce((acc, _, i) => (i % 2 ? acc : [...acc, srcArr.slice(i, i + 2).join("")]), []).map((e) => parseInt(e, 16));
|
||||
return Uint8Array.from(arr);
|
||||
}
|
||||
export async function encrypt(input: string, passphrase: string) {
|
||||
const [key, salt] = await getKeyForEncrypt(passphrase);
|
||||
// Create initial vector with semifixed part and incremental part
|
||||
// I think it's not good against related-key attacks.
|
||||
const fixedPart = getSemiStaticField();
|
||||
const invocationPart = getNonce();
|
||||
const iv = Uint8Array.from([...fixedPart, ...new Uint8Array(invocationPart.buffer)]);
|
||||
const plainStringified: string = JSON.stringify(input);
|
||||
const plainStringBuffer: Uint8Array = new TextEncoder().encode(plainStringified);
|
||||
const encryptedDataArrayBuffer = await crypto.subtle.encrypt({ name: "AES-GCM", iv }, key, plainStringBuffer);
|
||||
|
||||
const encryptedData = window.btoa(Array.from(new Uint8Array(encryptedDataArrayBuffer), (char) => String.fromCharCode(char)).join(""));
|
||||
|
||||
//return data with iv and salt.
|
||||
const response: encodedData = [encryptedData, uint8ArrayToHexString(iv), uint8ArrayToHexString(salt)];
|
||||
const ret = JSON.stringify(response);
|
||||
return ret;
|
||||
}
|
||||
|
||||
export async function decrypt(encryptedResult: string, passphrase: string): Promise<string> {
|
||||
try {
|
||||
const [encryptedData, ivString, salt]: encodedData = JSON.parse(encryptedResult);
|
||||
const [key] = await getKeyForDecryption(passphrase, hexStringToUint8Array(salt));
|
||||
const iv = hexStringToUint8Array(ivString);
|
||||
// decode base 64, it should increase speed and i should with in MAX_DOC_SIZE_BIN, so it won't OOM.
|
||||
const encryptedDataBin = window.atob(encryptedData);
|
||||
const encryptedDataArrayBuffer = Uint8Array.from(encryptedDataBin.split(""), (char) => char.charCodeAt(0));
|
||||
const plainStringBuffer: ArrayBuffer = await crypto.subtle.decrypt({ name: "AES-GCM", iv }, key, encryptedDataArrayBuffer);
|
||||
const plainStringified = new TextDecoder().decode(plainStringBuffer);
|
||||
const plain = JSON.parse(plainStringified);
|
||||
return plain;
|
||||
} catch (ex) {
|
||||
Logger("Couldn't decode! You should wrong the passphrases", LOG_LEVEL.VERBOSE);
|
||||
Logger(ex, LOG_LEVEL.VERBOSE);
|
||||
throw ex;
|
||||
}
|
||||
}
|
||||
|
||||
export async function testCrypt() {
|
||||
const src = "supercalifragilisticexpialidocious";
|
||||
const encoded = await encrypt(src, "passwordTest");
|
||||
const decrypted = await decrypt(encoded, "passwordTest");
|
||||
if (src != decrypted) {
|
||||
Logger("WARNING! Your device would not support encryption.", LOG_LEVEL.VERBOSE);
|
||||
return false;
|
||||
} else {
|
||||
Logger("CRYPT LOGIC OK", LOG_LEVEL.VERBOSE);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@@ -1,13 +0,0 @@
|
||||
import { LOG_LEVEL } from "./types";
|
||||
|
||||
// eslint-disable-next-line require-await
|
||||
export let Logger: (message: any, levlel?: LOG_LEVEL) => Promise<void> = async (message, _) => {
|
||||
const timestamp = new Date().toLocaleString();
|
||||
const messagecontent = typeof message == "string" ? message : message instanceof Error ? `${message.name}:${message.message}` : JSON.stringify(message, null, 2);
|
||||
const newmessage = timestamp + "->" + messagecontent;
|
||||
console.log(newmessage);
|
||||
};
|
||||
|
||||
export function setLogger(loggerFun: (message: any, levlel?: LOG_LEVEL) => Promise<void>) {
|
||||
Logger = loggerFun;
|
||||
}
|
||||
102
src/main.ts
102
src/main.ts
@@ -1,25 +1,24 @@
|
||||
import { debounce, Notice, Plugin, TFile, addIcon, TFolder, normalizePath, TAbstractFile, Editor, MarkdownView, PluginManifest, Modal, App } from "obsidian";
|
||||
import { diff_match_patch } from "diff-match-patch";
|
||||
|
||||
import { EntryDoc, LoadedEntry, ObsidianLiveSyncSettings, diff_check_result, diff_result_leaf, EntryBody, LOG_LEVEL, VER, DEFAULT_SETTINGS, diff_result, FLAGMD_REDFLAG } from "./lib/src/types";
|
||||
import { PluginDataEntry, PERIODIC_PLUGIN_SWEEP, PluginList, DevicePluginList } from "./types";
|
||||
import {
|
||||
EntryDoc,
|
||||
LoadedEntry,
|
||||
ObsidianLiveSyncSettings,
|
||||
diff_check_result,
|
||||
diff_result_leaf,
|
||||
EntryBody,
|
||||
PluginDataEntry,
|
||||
LOG_LEVEL,
|
||||
VER,
|
||||
PERIODIC_PLUGIN_SWEEP,
|
||||
DEFAULT_SETTINGS,
|
||||
PluginList,
|
||||
DevicePluginList,
|
||||
diff_result,
|
||||
FLAGMD_REDFLAG,
|
||||
} from "./types";
|
||||
import { base64ToString, arrayBufferToBase64, base64ToArrayBuffer, isValidPath, versionNumberString2Number, id2path, path2id, runWithLock, shouldBeIgnored, getProcessingCounts, setLockNotifier, isPlainText } from "./utils";
|
||||
import { Logger, setLogger } from "./logger";
|
||||
base64ToString,
|
||||
arrayBufferToBase64,
|
||||
base64ToArrayBuffer,
|
||||
isValidPath,
|
||||
versionNumberString2Number,
|
||||
runWithLock,
|
||||
shouldBeIgnored,
|
||||
getProcessingCounts,
|
||||
setLockNotifier,
|
||||
isPlainText,
|
||||
setNoticeClass,
|
||||
NewNotice,
|
||||
allSettledWithConcurrencyLimit,
|
||||
} from "./lib/src/utils";
|
||||
import { Logger, setLogger } from "./lib/src/logger";
|
||||
import { LocalPouchDB } from "./LocalPouchDB";
|
||||
import { LogDisplayModal } from "./LogDisplayModal";
|
||||
import { ConflictResolveModal } from "./ConflictResolveModal";
|
||||
@@ -27,7 +26,8 @@ import { ObsidianLiveSyncSettingTab } from "./ObsidianLiveSyncSettingTab";
|
||||
import { DocumentHistoryModal } from "./DocumentHistoryModal";
|
||||
|
||||
import PluginPane from "./PluginPane.svelte";
|
||||
|
||||
import { id2path, path2id } from "./utils";
|
||||
setNoticeClass(Notice);
|
||||
class PluginDialogModal extends Modal {
|
||||
plugin: ObsidianLiveSyncPlugin;
|
||||
logEl: HTMLDivElement;
|
||||
@@ -427,10 +427,21 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
return;
|
||||
}
|
||||
if (this.settings.suspendFileWatching) return;
|
||||
|
||||
// If batchsave is enabled, queue all changes and do nothing.
|
||||
if (this.settings.batchSave) {
|
||||
this.batchFileChange = Array.from(new Set([...this.batchFileChange, file.path]));
|
||||
this.refreshStatusText();
|
||||
~(async () => {
|
||||
const meta = await this.localDatabase.getDBEntryMeta(file.path);
|
||||
if (meta != false) {
|
||||
const localMtime = ~~(file.stat.mtime / 1000);
|
||||
const docMtime = ~~(meta.mtime / 1000);
|
||||
if (localMtime !== docMtime) {
|
||||
// Perhaps we have to modify (to using newer doc), but we don't be sure to every device's clock is adjusted.
|
||||
this.batchFileChange = Array.from(new Set([...this.batchFileChange, file.path]));
|
||||
this.refreshStatusText();
|
||||
}
|
||||
}
|
||||
})();
|
||||
return;
|
||||
}
|
||||
this.watchVaultChangeAsync(file, ...args);
|
||||
@@ -691,7 +702,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
}
|
||||
}
|
||||
|
||||
async doc2storate_modify(docEntry: EntryBody, file: TFile, force?: boolean) {
|
||||
async doc2storage_modify(docEntry: EntryBody, file: TFile, force?: boolean) {
|
||||
const pathSrc = id2path(docEntry._id);
|
||||
if (shouldBeIgnored(pathSrc)) {
|
||||
return;
|
||||
@@ -770,7 +781,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
} else if (targetFile instanceof TFile) {
|
||||
const doc = change;
|
||||
const file = targetFile;
|
||||
await this.doc2storate_modify(doc, file);
|
||||
await this.doc2storage_modify(doc, file);
|
||||
this.queueConflictedCheck(file);
|
||||
} else {
|
||||
Logger(`${id2path(change._id)} is already exist as the folder`);
|
||||
@@ -836,7 +847,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
a.addEventListener("click", () => this.showPluginSyncModal());
|
||||
});
|
||||
});
|
||||
new Notice(fragment, 10000);
|
||||
NewNotice(fragment, 10000);
|
||||
} else {
|
||||
Logger("Everything is up to date.", LOG_LEVEL.NOTICE);
|
||||
}
|
||||
@@ -953,7 +964,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
|
||||
async replicate(showMessage?: boolean) {
|
||||
if (this.settings.versionUpFlash != "") {
|
||||
new Notice("Open settings and check message, please.");
|
||||
NewNotice("Open settings and check message, please.");
|
||||
return;
|
||||
}
|
||||
await this.applyBatchChange();
|
||||
@@ -991,7 +1002,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
// synchronize all files between database and storage.
|
||||
let notice: Notice = null;
|
||||
if (showingNotice) {
|
||||
notice = new Notice("Initializing", 0);
|
||||
notice = NewNotice("Initializing", 0);
|
||||
}
|
||||
const filesStorage = this.app.vault.getFiles();
|
||||
const filesStorageName = filesStorage.map((e) => e.path);
|
||||
@@ -1013,12 +1024,14 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
Logger(procedurename);
|
||||
let i = 0;
|
||||
// let lastTicks = performance.now() + 2000;
|
||||
let workProcs = 0;
|
||||
const procs = objects.map(async (e) => {
|
||||
try {
|
||||
workProcs++;
|
||||
await callback(e);
|
||||
i++;
|
||||
if (i % 25 == 0) {
|
||||
const notify = `${procedurename} : ${i}/${count}`;
|
||||
const notify = `${procedurename} : ${workProcs}/${count} (Pending:${workProcs})`;
|
||||
if (notice != null) notice.setMessage(notify);
|
||||
Logger(notify);
|
||||
this.setStatusBarText(notify);
|
||||
@@ -1026,27 +1039,12 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
} catch (ex) {
|
||||
Logger(`Error while ${procedurename}`, LOG_LEVEL.NOTICE);
|
||||
Logger(ex);
|
||||
} finally {
|
||||
workProcs--;
|
||||
}
|
||||
});
|
||||
// @ts-ignore
|
||||
if (!Promise.allSettled) {
|
||||
await Promise.all(
|
||||
procs.map((p) =>
|
||||
p
|
||||
.then((value) => ({
|
||||
status: "fulfilled",
|
||||
value,
|
||||
}))
|
||||
.catch((reason) => ({
|
||||
status: "rejected",
|
||||
reason,
|
||||
}))
|
||||
)
|
||||
);
|
||||
} else {
|
||||
// @ts-ignore
|
||||
await Promise.allSettled(procs);
|
||||
}
|
||||
|
||||
await allSettledWithConcurrencyLimit(procs, 10);
|
||||
};
|
||||
await runAll("UPDATE DATABASE", onlyInStorage, async (e) => {
|
||||
Logger(`Update into ${e.path}`);
|
||||
@@ -1232,10 +1230,10 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
//concat both,
|
||||
// write data,and delete both old rev.
|
||||
const p = conflictCheckResult.diff.map((e) => e[1]).join("");
|
||||
await this.app.vault.modify(file, p);
|
||||
await this.updateIntoDB(file);
|
||||
await this.localDatabase.deleteDBEntry(file.path, { rev: conflictCheckResult.left.rev });
|
||||
await this.localDatabase.deleteDBEntry(file.path, { rev: conflictCheckResult.right.rev });
|
||||
await this.app.vault.modify(file, p);
|
||||
await this.updateIntoDB(file);
|
||||
await this.pullFile(file.path);
|
||||
Logger("concat both file");
|
||||
setTimeout(() => {
|
||||
@@ -1318,7 +1316,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
const file = targetFile;
|
||||
const doc = await this.localDatabase.getDBEntry(filename, rev ? { rev: rev } : null, false, waitForReady);
|
||||
if (doc === false) return;
|
||||
await this.doc2storate_modify(doc, file, force);
|
||||
await this.doc2storage_modify(doc, file, force);
|
||||
} else {
|
||||
Logger(`target files:${filename} is exists as the folder`);
|
||||
//something went wrong..
|
||||
@@ -1343,7 +1341,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
Logger(`${storageMtime} < ${docMtime}`);
|
||||
const docx = await this.localDatabase.getDBEntry(file.path, null, false, false);
|
||||
if (docx != false) {
|
||||
await this.doc2storate_modify(docx, file);
|
||||
await this.doc2storage_modify(docx, file);
|
||||
}
|
||||
} else {
|
||||
// Logger("EVEN :" + file.path, LOG_LEVEL.VERBOSE);
|
||||
@@ -1460,7 +1458,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
Logger("You have to set your device and vault name.", LOG_LEVEL.NOTICE);
|
||||
return;
|
||||
}
|
||||
Logger("Sweeping plugins", logLevel);
|
||||
Logger("Scanning plugins", logLevel);
|
||||
const db = this.localDatabase.localDatabase;
|
||||
const oldDocs = await db.allDocs({
|
||||
startkey: `ps:${this.deviceAndVaultName}-`,
|
||||
@@ -1532,7 +1530,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
|
||||
return e.doc;
|
||||
});
|
||||
await db.bulkDocs(delDocs);
|
||||
Logger(`Sweep plugin done.`, logLevel);
|
||||
Logger(`Scan plugin done.`, logLevel);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
9
src/pouchdb-browser.ts
Normal file
9
src/pouchdb-browser.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
import PouchDB from "pouchdb-core";
|
||||
import IDBPouch from "pouchdb-adapter-idb";
|
||||
import HttpPouch from "pouchdb-adapter-http";
|
||||
import mapreduce from "pouchdb-mapreduce";
|
||||
import replication from "pouchdb-replication";
|
||||
|
||||
PouchDB.plugin(IDBPouch).plugin(HttpPouch).plugin(mapreduce).plugin(replication);
|
||||
|
||||
export { PouchDB };
|
||||
215
src/types.ts
215
src/types.ts
@@ -1,152 +1,7 @@
|
||||
// docs should be encoded as base64, so 1 char -> 1 bytes
|
||||
// and cloudant limitation is 1MB , we use 900kb;
|
||||
|
||||
import { PluginManifest } from "obsidian";
|
||||
import * as PouchDB from "pouchdb";
|
||||
import { DatabaseEntry } from "./lib/src/types";
|
||||
|
||||
export const MAX_DOC_SIZE = 1000; // for .md file, but if delimiters exists. use that before.
|
||||
export const MAX_DOC_SIZE_BIN = 102400; // 100kb
|
||||
export const VER = 10;
|
||||
|
||||
export const RECENT_MOFIDIED_DOCS_QTY = 30;
|
||||
export const LEAF_WAIT_TIMEOUT = 90000; // in synchronization, waiting missing leaf time out.
|
||||
export const LOG_LEVEL = {
|
||||
VERBOSE: 1,
|
||||
INFO: 10,
|
||||
NOTICE: 100,
|
||||
URGENT: 1000,
|
||||
} as const;
|
||||
export type LOG_LEVEL = typeof LOG_LEVEL[keyof typeof LOG_LEVEL];
|
||||
export const VERSIONINFO_DOCID = "obsydian_livesync_version";
|
||||
export const MILSTONE_DOCID = "_local/obsydian_livesync_milestone";
|
||||
export const NODEINFO_DOCID = "_local/obsydian_livesync_nodeinfo";
|
||||
|
||||
export interface ObsidianLiveSyncSettings {
|
||||
couchDB_URI: string;
|
||||
couchDB_USER: string;
|
||||
couchDB_PASSWORD: string;
|
||||
couchDB_DBNAME: string;
|
||||
liveSync: boolean;
|
||||
syncOnSave: boolean;
|
||||
syncOnStart: boolean;
|
||||
syncOnFileOpen: boolean;
|
||||
savingDelay: number;
|
||||
lessInformationInLog: boolean;
|
||||
gcDelay: number;
|
||||
versionUpFlash: string;
|
||||
minimumChunkSize: number;
|
||||
longLineThreshold: number;
|
||||
showVerboseLog: boolean;
|
||||
suspendFileWatching: boolean;
|
||||
trashInsteadDelete: boolean;
|
||||
periodicReplication: boolean;
|
||||
periodicReplicationInterval: number;
|
||||
encrypt: boolean;
|
||||
passphrase: string;
|
||||
workingEncrypt: boolean;
|
||||
workingPassphrase: string;
|
||||
doNotDeleteFolder: boolean;
|
||||
resolveConflictsByNewerFile: boolean;
|
||||
batchSave: boolean;
|
||||
deviceAndVaultName: string;
|
||||
usePluginSettings: boolean;
|
||||
showOwnPlugins: boolean;
|
||||
showStatusOnEditor: boolean;
|
||||
usePluginSync: boolean;
|
||||
autoSweepPlugins: boolean;
|
||||
autoSweepPluginsPeriodic: boolean;
|
||||
notifyPluginOrSettingUpdated: boolean;
|
||||
checkIntegrityOnSave: boolean;
|
||||
batch_size: number;
|
||||
batches_limit: number;
|
||||
useHistory: boolean;
|
||||
}
|
||||
|
||||
export const DEFAULT_SETTINGS: ObsidianLiveSyncSettings = {
|
||||
couchDB_URI: "",
|
||||
couchDB_USER: "",
|
||||
couchDB_PASSWORD: "",
|
||||
couchDB_DBNAME: "",
|
||||
liveSync: false,
|
||||
syncOnSave: false,
|
||||
syncOnStart: false,
|
||||
savingDelay: 200,
|
||||
lessInformationInLog: false,
|
||||
gcDelay: 300,
|
||||
versionUpFlash: "",
|
||||
minimumChunkSize: 20,
|
||||
longLineThreshold: 250,
|
||||
showVerboseLog: false,
|
||||
suspendFileWatching: false,
|
||||
trashInsteadDelete: true,
|
||||
periodicReplication: false,
|
||||
periodicReplicationInterval: 60,
|
||||
syncOnFileOpen: false,
|
||||
encrypt: false,
|
||||
passphrase: "",
|
||||
workingEncrypt: false,
|
||||
workingPassphrase: "",
|
||||
doNotDeleteFolder: false,
|
||||
resolveConflictsByNewerFile: false,
|
||||
batchSave: false,
|
||||
deviceAndVaultName: "",
|
||||
usePluginSettings: false,
|
||||
showOwnPlugins: false,
|
||||
showStatusOnEditor: false,
|
||||
usePluginSync: false,
|
||||
autoSweepPlugins: false,
|
||||
autoSweepPluginsPeriodic: false,
|
||||
notifyPluginOrSettingUpdated: false,
|
||||
checkIntegrityOnSave: false,
|
||||
batch_size: 250,
|
||||
batches_limit: 40,
|
||||
useHistory: false,
|
||||
};
|
||||
|
||||
export const PERIODIC_PLUGIN_SWEEP = 60;
|
||||
|
||||
export interface Entry {
|
||||
_id: string;
|
||||
data: string;
|
||||
_rev?: string;
|
||||
ctime: number;
|
||||
mtime: number;
|
||||
size: number;
|
||||
_deleted?: boolean;
|
||||
_conflicts?: string[];
|
||||
type?: "notes";
|
||||
}
|
||||
export interface NewEntry {
|
||||
_id: string;
|
||||
children: string[];
|
||||
_rev?: string;
|
||||
ctime: number;
|
||||
mtime: number;
|
||||
size: number;
|
||||
_deleted?: boolean;
|
||||
_conflicts?: string[];
|
||||
NewNote: true;
|
||||
type: "newnote";
|
||||
}
|
||||
export interface PlainEntry {
|
||||
_id: string;
|
||||
children: string[];
|
||||
_rev?: string;
|
||||
ctime: number;
|
||||
mtime: number;
|
||||
size: number;
|
||||
_deleted?: boolean;
|
||||
NewNote: true;
|
||||
_conflicts?: string[];
|
||||
type: "plain";
|
||||
}
|
||||
export type LoadedEntry = Entry & {
|
||||
children: string[];
|
||||
datatype: "plain" | "newnote";
|
||||
};
|
||||
|
||||
export interface PluginDataEntry {
|
||||
_id: string;
|
||||
export interface PluginDataEntry extends DatabaseEntry {
|
||||
deviceVaultName: string;
|
||||
mtime: number;
|
||||
manifest: PluginManifest;
|
||||
@@ -155,73 +10,10 @@ export interface PluginDataEntry {
|
||||
styleCss?: string;
|
||||
// it must be encrypted.
|
||||
dataJson?: string;
|
||||
_rev?: string;
|
||||
_deleted?: boolean;
|
||||
_conflicts?: string[];
|
||||
type: "plugin";
|
||||
}
|
||||
|
||||
export interface EntryLeaf {
|
||||
_id: string;
|
||||
data: string;
|
||||
_deleted?: boolean;
|
||||
type: "leaf";
|
||||
_rev?: string;
|
||||
}
|
||||
|
||||
export interface EntryVersionInfo {
|
||||
_id: typeof VERSIONINFO_DOCID;
|
||||
_rev?: string;
|
||||
type: "versioninfo";
|
||||
version: number;
|
||||
_deleted?: boolean;
|
||||
}
|
||||
|
||||
export interface EntryMilestoneInfo {
|
||||
_id: typeof MILSTONE_DOCID;
|
||||
_rev?: string;
|
||||
type: "milestoneinfo";
|
||||
_deleted?: boolean;
|
||||
created: number;
|
||||
accepted_nodes: string[];
|
||||
locked: boolean;
|
||||
}
|
||||
|
||||
export interface EntryNodeInfo {
|
||||
_id: typeof NODEINFO_DOCID;
|
||||
_rev?: string;
|
||||
_deleted?: boolean;
|
||||
type: "nodeinfo";
|
||||
nodeid: string;
|
||||
}
|
||||
|
||||
export type EntryBody = Entry | NewEntry | PlainEntry;
|
||||
export type EntryDoc = EntryBody | LoadedEntry | EntryLeaf | EntryVersionInfo | EntryMilestoneInfo | EntryNodeInfo;
|
||||
|
||||
export type diff_result_leaf = {
|
||||
rev: string;
|
||||
data: string;
|
||||
ctime: number;
|
||||
mtime: number;
|
||||
};
|
||||
export type dmp_result = Array<[number, string]>;
|
||||
|
||||
export type diff_result = {
|
||||
left: diff_result_leaf;
|
||||
right: diff_result_leaf;
|
||||
diff: dmp_result;
|
||||
};
|
||||
export type diff_check_result = boolean | diff_result;
|
||||
|
||||
export type Credential = {
|
||||
username: string;
|
||||
password: string;
|
||||
};
|
||||
|
||||
export type EntryDocResponse = EntryDoc & PouchDB.Core.IdMeta & PouchDB.Core.GetMeta;
|
||||
|
||||
export type DatabaseConnectingStatus = "STARTED" | "NOT_CONNECTED" | "PAUSED" | "CONNECTED" | "COMPLETED" | "CLOSED" | "ERRORED";
|
||||
|
||||
export interface PluginList {
|
||||
[key: string]: PluginDataEntry[];
|
||||
}
|
||||
@@ -229,5 +21,4 @@ export interface PluginList {
|
||||
export interface DevicePluginList {
|
||||
[key: string]: PluginDataEntry;
|
||||
}
|
||||
|
||||
export const FLAGMD_REDFLAG = "redflag.md";
|
||||
export const PERIODIC_PLUGIN_SWEEP = 60;
|
||||
|
||||
243
src/utils.ts
243
src/utils.ts
@@ -1,249 +1,14 @@
|
||||
import { normalizePath } from "obsidian";
|
||||
import { Logger } from "./logger";
|
||||
import { FLAGMD_REDFLAG, LOG_LEVEL } from "./types";
|
||||
|
||||
export function arrayBufferToBase64(buffer: ArrayBuffer): Promise<string> {
|
||||
return new Promise((res) => {
|
||||
const blob = new Blob([buffer], { type: "application/octet-binary" });
|
||||
const reader = new FileReader();
|
||||
reader.onload = function (evt) {
|
||||
const dataurl = evt.target.result.toString();
|
||||
res(dataurl.substr(dataurl.indexOf(",") + 1));
|
||||
};
|
||||
reader.readAsDataURL(blob);
|
||||
});
|
||||
}
|
||||
|
||||
export function base64ToString(base64: string): string {
|
||||
try {
|
||||
const binary_string = window.atob(base64);
|
||||
const len = binary_string.length;
|
||||
const bytes = new Uint8Array(len);
|
||||
for (let i = 0; i < len; i++) {
|
||||
bytes[i] = binary_string.charCodeAt(i);
|
||||
}
|
||||
return new TextDecoder().decode(bytes);
|
||||
} catch (ex) {
|
||||
return base64;
|
||||
}
|
||||
}
|
||||
export function base64ToArrayBuffer(base64: string): ArrayBuffer {
|
||||
try {
|
||||
const binary_string = window.atob(base64);
|
||||
const len = binary_string.length;
|
||||
const bytes = new Uint8Array(len);
|
||||
for (let i = 0; i < len; i++) {
|
||||
bytes[i] = binary_string.charCodeAt(i);
|
||||
}
|
||||
return bytes.buffer;
|
||||
} catch (ex) {
|
||||
try {
|
||||
return new Uint16Array(
|
||||
[].map.call(base64, function (c: string) {
|
||||
return c.charCodeAt(0);
|
||||
})
|
||||
).buffer;
|
||||
} catch (ex2) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const escapeStringToHTML = (str: string) => {
|
||||
if (!str) return "";
|
||||
return str.replace(/[<>&"'`]/g, (match) => {
|
||||
const escape: any = {
|
||||
"<": "<",
|
||||
">": ">",
|
||||
"&": "&",
|
||||
'"': """,
|
||||
"'": "'",
|
||||
"`": "`",
|
||||
};
|
||||
return escape[match];
|
||||
});
|
||||
};
|
||||
|
||||
export function resolveWithIgnoreKnownError<T>(p: Promise<T>, def: T): Promise<T> {
|
||||
return new Promise((res, rej) => {
|
||||
p.then(res).catch((ex) => (ex.status && ex.status == 404 ? res(def) : rej(ex)));
|
||||
});
|
||||
}
|
||||
|
||||
export function isValidPath(filename: string): boolean {
|
||||
// eslint-disable-next-line no-control-regex
|
||||
const regex = /[\u0000-\u001f]|[\\":?<>|*#]/g;
|
||||
let x = filename.replace(regex, "_");
|
||||
const win = /(\\|\/)(COM\d|LPT\d|CON|PRN|AUX|NUL|CLOCK$)($|\.)/gi;
|
||||
const sx = (x = x.replace(win, "/_"));
|
||||
return sx == filename;
|
||||
}
|
||||
|
||||
export function shouldBeIgnored(filename: string): boolean {
|
||||
if (filename == FLAGMD_REDFLAG) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
export function versionNumberString2Number(version: string): number {
|
||||
return version // "1.23.45"
|
||||
.split(".") // 1 23 45
|
||||
.reverse() // 45 23 1
|
||||
.map((e, i) => ((e as any) / 1) * 1000 ** i) // 45 23000 1000000
|
||||
.reduce((prev, current) => prev + current, 0); // 1023045
|
||||
}
|
||||
|
||||
export const delay = (ms: number): Promise<void> => {
|
||||
return new Promise((res) => {
|
||||
setTimeout(() => {
|
||||
res();
|
||||
}, ms);
|
||||
});
|
||||
};
|
||||
import { path2id_base, id2path_base } from "./lib/src/utils";
|
||||
|
||||
// For backward compatibility, using the path for determining id.
|
||||
// Only CouchDB nonacceptable ID (that starts with an underscore) has been prefixed with "/".
|
||||
// The first slash will be deleted when the path is normalized.
|
||||
export function path2id(filename: string): string {
|
||||
let x = normalizePath(filename);
|
||||
if (x.startsWith("_")) x = "/" + x;
|
||||
return x;
|
||||
const x = normalizePath(filename);
|
||||
return path2id_base(x);
|
||||
}
|
||||
export function id2path(filename: string): string {
|
||||
return normalizePath(filename);
|
||||
}
|
||||
|
||||
const runningProcs: string[] = [];
|
||||
const pendingProcs: { [key: string]: (() => Promise<void>)[] } = {};
|
||||
function objectToKey(key: any): string {
|
||||
if (typeof key === "string") return key;
|
||||
const keys = Object.keys(key).sort((a, b) => a.localeCompare(b));
|
||||
return keys.map((e) => e + objectToKey(key[e])).join(":");
|
||||
}
|
||||
export function getProcessingCounts() {
|
||||
let count = 0;
|
||||
for (const v in pendingProcs) {
|
||||
count += pendingProcs[v].length;
|
||||
}
|
||||
count += runningProcs.length;
|
||||
return count;
|
||||
}
|
||||
|
||||
let externalNotifier: () => void = () => {};
|
||||
let notifyTimer: number = null;
|
||||
export function setLockNotifier(fn: () => void) {
|
||||
externalNotifier = fn;
|
||||
}
|
||||
function notifyLock() {
|
||||
if (notifyTimer != null) {
|
||||
window.clearTimeout(notifyTimer);
|
||||
}
|
||||
notifyTimer = window.setTimeout(() => {
|
||||
externalNotifier();
|
||||
}, 100);
|
||||
}
|
||||
// Just run async/await as like transacion ISOLATION SERIALIZABLE
|
||||
export function runWithLock<T>(key: unknown, ignoreWhenRunning: boolean, proc: () => Promise<T>): Promise<T> {
|
||||
// Logger(`Lock:${key}:enter`, LOG_LEVEL.VERBOSE);
|
||||
const lockKey = typeof key === "string" ? key : objectToKey(key);
|
||||
const handleNextProcs = () => {
|
||||
if (typeof pendingProcs[lockKey] === "undefined") {
|
||||
//simply unlock
|
||||
runningProcs.remove(lockKey);
|
||||
notifyLock();
|
||||
// Logger(`Lock:${lockKey}:released`, LOG_LEVEL.VERBOSE);
|
||||
} else {
|
||||
Logger(`Lock:${lockKey}:left ${pendingProcs[lockKey].length}`, LOG_LEVEL.VERBOSE);
|
||||
let nextProc = null;
|
||||
nextProc = pendingProcs[lockKey].shift();
|
||||
notifyLock();
|
||||
if (nextProc) {
|
||||
// left some
|
||||
nextProc()
|
||||
.then()
|
||||
.catch((err) => {
|
||||
Logger(err);
|
||||
})
|
||||
.finally(() => {
|
||||
if (pendingProcs && lockKey in pendingProcs && pendingProcs[lockKey].length == 0) {
|
||||
delete pendingProcs[lockKey];
|
||||
notifyLock();
|
||||
}
|
||||
queueMicrotask(() => {
|
||||
handleNextProcs();
|
||||
});
|
||||
});
|
||||
} else {
|
||||
if (pendingProcs && lockKey in pendingProcs && pendingProcs[lockKey].length == 0) {
|
||||
delete pendingProcs[lockKey];
|
||||
notifyLock();
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
if (runningProcs.contains(lockKey)) {
|
||||
if (ignoreWhenRunning) {
|
||||
return null;
|
||||
}
|
||||
if (typeof pendingProcs[lockKey] === "undefined") {
|
||||
pendingProcs[lockKey] = [];
|
||||
}
|
||||
let responderRes: (value: T | PromiseLike<T>) => void;
|
||||
let responderRej: (reason?: unknown) => void;
|
||||
const responder = new Promise<T>((res, rej) => {
|
||||
responderRes = res;
|
||||
responderRej = rej;
|
||||
//wait for subproc resolved
|
||||
});
|
||||
const subproc = () =>
|
||||
new Promise<void>((res, rej) => {
|
||||
proc()
|
||||
.then((v) => {
|
||||
// Logger(`Lock:${key}:processed`, LOG_LEVEL.VERBOSE);
|
||||
handleNextProcs();
|
||||
responderRes(v);
|
||||
res();
|
||||
})
|
||||
.catch((reason) => {
|
||||
Logger(`Lock:${key}:rejected`, LOG_LEVEL.VERBOSE);
|
||||
handleNextProcs();
|
||||
rej(reason);
|
||||
responderRej(reason);
|
||||
});
|
||||
});
|
||||
|
||||
pendingProcs[lockKey].push(subproc);
|
||||
notifyLock();
|
||||
// Logger(`Lock:${lockKey}:queud:left${pendingProcs[lockKey].length}`, LOG_LEVEL.VERBOSE);
|
||||
return responder;
|
||||
} else {
|
||||
runningProcs.push(lockKey);
|
||||
notifyLock();
|
||||
// Logger(`Lock:${lockKey}:aqquired`, LOG_LEVEL.VERBOSE);
|
||||
return new Promise((res, rej) => {
|
||||
proc()
|
||||
.then((v) => {
|
||||
handleNextProcs();
|
||||
res(v);
|
||||
})
|
||||
.catch((reason) => {
|
||||
handleNextProcs();
|
||||
rej(reason);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export function isPlainText(filename: string): boolean {
|
||||
if (filename.endsWith(".md")) return true;
|
||||
if (filename.endsWith(".txt")) return true;
|
||||
if (filename.endsWith(".svg")) return true;
|
||||
if (filename.endsWith(".html")) return true;
|
||||
if (filename.endsWith(".csv")) return true;
|
||||
if (filename.endsWith(".css")) return true;
|
||||
if (filename.endsWith(".js")) return true;
|
||||
if (filename.endsWith(".xml")) return true;
|
||||
|
||||
return false;
|
||||
return id2path_base(normalizePath(filename));
|
||||
}
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import { Logger } from "./logger";
|
||||
import { LOG_LEVEL, VER, VERSIONINFO_DOCID, EntryVersionInfo, EntryDoc } from "./types";
|
||||
import { resolveWithIgnoreKnownError } from "./utils";
|
||||
import { PouchDB } from "../pouchdb-browser-webpack/dist/pouchdb-browser.js";
|
||||
import { Logger } from "./lib/src/logger";
|
||||
import { LOG_LEVEL, VER, VERSIONINFO_DOCID, EntryVersionInfo, EntryDoc } from "./lib/src/types";
|
||||
import { resolveWithIgnoreKnownError } from "./lib/src/utils";
|
||||
import { PouchDB } from "./pouchdb-browser";
|
||||
import { requestUrl, RequestUrlParam, RequestUrlResponse } from "obsidian";
|
||||
|
||||
export const isValidRemoteCouchDBURI = (uri: string): boolean => {
|
||||
if (uri.startsWith("https://")) return true;
|
||||
@@ -12,8 +13,30 @@ let last_post_successed = false;
|
||||
export const getLastPostFailedBySize = () => {
|
||||
return !last_post_successed;
|
||||
};
|
||||
export const connectRemoteCouchDB = async (uri: string, auth: { username: string; password: string }): Promise<string | { db: PouchDB.Database<EntryDoc>; info: PouchDB.Core.DatabaseInfo }> => {
|
||||
const fetchByAPI = async (request: RequestUrlParam): Promise<RequestUrlResponse> => {
|
||||
const ret = await requestUrl(request);
|
||||
if (ret.status - (ret.status % 100) !== 200) {
|
||||
const er: Error & { status?: number } = new Error(`Request Error:${ret.status}`);
|
||||
if (ret.json) {
|
||||
er.message = ret.json.reason;
|
||||
er.name = `${ret.json.error ?? ""}:${ret.json.message ?? ""}`;
|
||||
}
|
||||
er.status = ret.status;
|
||||
throw er;
|
||||
}
|
||||
return ret;
|
||||
};
|
||||
|
||||
export const connectRemoteCouchDB = async (uri: string, auth: { username: string; password: string }, disableRequestURI: boolean): Promise<string | { db: PouchDB.Database<EntryDoc>; info: PouchDB.Core.DatabaseInfo }> => {
|
||||
if (!isValidRemoteCouchDBURI(uri)) return "Remote URI is not valid";
|
||||
let authHeader = "";
|
||||
if (auth.username && auth.password) {
|
||||
const utf8str = String.fromCharCode.apply(null, new TextEncoder().encode(`${auth.username}:${auth.password}`));
|
||||
const encoded = window.btoa(utf8str);
|
||||
authHeader = "Basic " + encoded;
|
||||
} else {
|
||||
authHeader = "";
|
||||
}
|
||||
const conf: PouchDB.HttpAdapter.HttpAdapterConfiguration = {
|
||||
adapter: "http",
|
||||
auth,
|
||||
@@ -35,6 +58,51 @@ export const connectRemoteCouchDB = async (uri: string, auth: { username: string
|
||||
}
|
||||
size = ` (${opts_length})`;
|
||||
}
|
||||
|
||||
if (!disableRequestURI && typeof url == "string" && typeof (opts.body ?? "") == "string") {
|
||||
const body = opts.body as string;
|
||||
|
||||
const transformedHeaders = { ...(opts.headers as Record<string, string>) };
|
||||
if (authHeader != "") transformedHeaders["authorization"] = authHeader;
|
||||
delete transformedHeaders["host"];
|
||||
delete transformedHeaders["Host"];
|
||||
delete transformedHeaders["content-length"];
|
||||
delete transformedHeaders["Content-Length"];
|
||||
const requestParam: RequestUrlParam = {
|
||||
url: url as string,
|
||||
method: opts.method,
|
||||
body: body,
|
||||
headers: transformedHeaders,
|
||||
contentType: "application/json",
|
||||
// contentType: opts.headers,
|
||||
};
|
||||
|
||||
try {
|
||||
const r = await fetchByAPI(requestParam);
|
||||
if (method == "POST" || method == "PUT") {
|
||||
last_post_successed = r.status - (r.status % 100) == 200;
|
||||
} else {
|
||||
last_post_successed = true;
|
||||
}
|
||||
Logger(`HTTP:${method}${size} to:${localURL} -> ${r.status}`, LOG_LEVEL.VERBOSE);
|
||||
|
||||
return new Response(r.arrayBuffer, {
|
||||
headers: r.headers,
|
||||
status: r.status,
|
||||
statusText: `${r.status}`,
|
||||
});
|
||||
} catch (ex) {
|
||||
Logger(`HTTP:${method}${size} to:${localURL} -> failed`, LOG_LEVEL.VERBOSE);
|
||||
if (!size_ok && (method == "POST" || method == "PUT")) {
|
||||
last_post_successed = false;
|
||||
}
|
||||
Logger(ex);
|
||||
throw ex;
|
||||
}
|
||||
}
|
||||
|
||||
// -old implementation
|
||||
|
||||
try {
|
||||
const responce: Response = await fetch(url, opts);
|
||||
if (method == "POST" || method == "PUT") {
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
// "importsNotUsedAsValues": "error",
|
||||
"importHelpers": true,
|
||||
"alwaysStrict": true,
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"lib": ["es2018", "DOM", "ES5", "ES6", "ES7"]
|
||||
},
|
||||
"include": ["**/*.ts"],
|
||||
|
||||
Reference in New Issue
Block a user