Compare commits

...

3 Commits

Author SHA1 Message Date
vorotamoroz
d5e6419504 bump 2023-01-15 11:17:08 +09:00
vorotamoroz
1bf1e1540d Fix diff check. 2023-01-15 11:09:23 +09:00
vorotamoroz
be1e6b11ac Fixed
- Large files addressed.
2023-01-13 19:43:39 +09:00
7 changed files with 29 additions and 22 deletions

View File

@@ -1,7 +1,7 @@
{
"id": "obsidian-livesync",
"name": "Self-hosted LiveSync",
"version": "0.17.9",
"version": "0.17.10",
"minAppVersion": "0.9.12",
"description": "Community implementation of self-hosted livesync. Reflect your vault changes to some other devices immediately. Please make sure to disable other synchronize solutions to avoid content corruption or duplication.",
"author": "vorotamoroz",

4
package-lock.json generated
View File

@@ -1,12 +1,12 @@
{
"name": "obsidian-livesync",
"version": "0.17.9",
"version": "0.17.10",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "obsidian-livesync",
"version": "0.17.9",
"version": "0.17.10",
"license": "MIT",
"dependencies": {
"diff-match-patch": "^1.0.5",

View File

@@ -1,6 +1,6 @@
{
"name": "obsidian-livesync",
"version": "0.17.9",
"version": "0.17.10",
"description": "Reflect your vault changes to some other devices immediately. Please make sure to disable other synchronize solutions to avoid content corruption or duplication.",
"main": "main.js",
"type": "module",

View File

@@ -1423,7 +1423,7 @@ ${stringifyYaml(pluginConfig)}`;
const releaser = await semaphore.acquire(1, "verifyAndRepair");
try {
Logger(`Update into ${file.path}`);
Logger(`UPDATE DATABASE ${file.path}`);
await this.plugin.updateIntoDB(file, false, null, true);
i++;
Logger(`${i}/${files.length}\n${file.path}`, LOG_LEVEL.NOTICE, "verify");

Submodule src/lib updated: 4ef5986b4d...2284678a59

View File

@@ -19,6 +19,8 @@ import {
getLocks,
WrappedNotice,
Semaphore,
getDocData,
isDocContentSame,
} from "./lib/src/utils";
import { Logger, setLogger } from "./lib/src/logger";
import { LocalPouchDB } from "./LocalPouchDB";
@@ -1214,7 +1216,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
}
await this.ensureDirectory(path);
try {
const newFile = await this.app.vault.create(normalizePath(path), doc.data, {
const newFile = await this.app.vault.create(normalizePath(path), getDocData(doc.data), {
ctime: doc.ctime,
mtime: doc.mtime,
});
@@ -1304,7 +1306,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
}
await this.ensureDirectory(path);
try {
await this.app.vault.modify(file, doc.data, { ctime: doc.ctime, mtime: doc.mtime });
await this.app.vault.modify(file, getDocData(doc.data), { ctime: doc.ctime, mtime: doc.mtime });
Logger(msg + path);
// this.batchFileChange = this.batchFileChange.filter((e) => e != file.path);
const xf = getAbstractFileByPath(file.path) as TFile;
@@ -1866,7 +1868,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
};
await runAll("UPDATE DATABASE", onlyInStorage, async (e) => {
Logger(`Update into ${e.path}`);
Logger(`UPDATE DATABASE ${e.path}`);
await this.updateIntoDB(e, initialScan);
});
if (!initialScan) {
@@ -1957,18 +1959,18 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
try {
const doc = await this.localDatabase.getDBEntry(path, { rev: rev }, false, false, true);
if (doc === false) return false;
let data = doc.data;
let data = getDocData(doc.data)
if (doc.datatype == "newnote") {
data = base64ToString(doc.data);
data = base64ToString(data);
} else if (doc.datatype == "plain") {
data = doc.data;
// NO OP.
}
return {
deleted: doc.deleted || doc._deleted,
ctime: doc.ctime,
mtime: doc.mtime,
rev: rev,
data: data,
data: data
};
} catch (ex) {
if (ex.status && ex.status == 404) {
@@ -2462,11 +2464,13 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
if (shouldBeIgnored(file.path)) {
return;
}
let content = "";
let content: string | string[];
let datatype: "plain" | "newnote" = "newnote";
if (!cache) {
if (!isPlainText(file.name)) {
Logger(`Reading : ${file.path}`, LOG_LEVEL.VERBOSE);
const contentBin = await this.app.vault.readBinary(file);
Logger(`Processing: ${file.path}`, LOG_LEVEL.VERBOSE);
content = await arrayBufferToBase64(contentBin);
datatype = "newnote";
} else {
@@ -2502,12 +2506,12 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
try {
const old = await this.localDatabase.getDBEntry(fullPath, null, false, false);
if (old !== false) {
const oldData = { data: old.data, deleted: old._deleted || old.deleted, };
const oldData = { data: old.data, deleted: old._deleted || old.deleted };
const newData = { data: d.data, deleted: d._deleted || d.deleted };
if (JSON.stringify(oldData) == JSON.stringify(newData)) {
Logger(msg + "Skipped (not changed) " + fullPath + ((d._deleted || d.deleted) ? " (deleted)" : ""), LOG_LEVEL.VERBOSE);
return true;
}
if (oldData.deleted != newData.deleted) return false;
if (!isDocContentSame(old.data, newData.data)) return false;
Logger(msg + "Skipped (not changed) " + fullPath + ((d._deleted || d.deleted) ? " (deleted)" : ""), LOG_LEVEL.VERBOSE);
return true;
// d._rev = old._rev;
}
} catch (ex) {
@@ -2569,7 +2573,7 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
async getPluginList(): Promise<{ plugins: PluginList; allPlugins: DevicePluginList; thisDevicePlugins: DevicePluginList }> {
const db = this.localDatabase.localDatabase;
const docList = await db.allDocs<PluginDataEntry>({ startkey: PSCHeader, endkey: PSCHeaderEnd, include_docs: false });
const oldDocs: PluginDataEntry[] = ((await Promise.all(docList.rows.map(async (e) => await this.localDatabase.getDBEntry(e.id)))).filter((e) => e !== false) as LoadedEntry[]).map((e) => JSON.parse(e.data));
const oldDocs: PluginDataEntry[] = ((await Promise.all(docList.rows.map(async (e) => await this.localDatabase.getDBEntry(e.id)))).filter((e) => e !== false) as LoadedEntry[]).map((e) => JSON.parse(getDocData(e.data)));
const plugins: { [key: string]: PluginDataEntry[] } = {};
const allPlugins: { [key: string]: PluginDataEntry } = {};
const thisDevicePlugins: { [key: string]: PluginDataEntry } = {};

View File

@@ -42,9 +42,12 @@
- New Feature: Now we can synchronise automatically after merging conflicts.
- 0.17.9
- Fixed: Conflict merge of internal files is no longer broken.
- Improved: Smoother status display inside the editor.
- Fixed: Conflict merge of internal files is no longer broken.
- Improved: Smoother status display inside the editor.
- 0.17.10
- Fixed: Large file synchronising has been now addressed!
Note: When synchronising large files, we have to set `Chunk size` to lower than 50, disable `Read chunks online`, `Batch size` should be set 50-100, and `Batch limit` could be around 20.
### 0.16.0
- Now hidden files need not be scanned. Changes will be detected automatically.
- If you want it to back to its previous behaviour, please disable `Monitor changes to internal files`.