- Improved:

- Now all revisions will be shown only its first a few letters.
- Fixed:
  - Check before modifying files has been implemented.
  - Content change detection has been improved.
This commit is contained in:
vorotamoroz
2023-12-11 12:22:17 +09:00
parent d2de5b4710
commit c071d822e1
6 changed files with 45 additions and 27 deletions

View File

@@ -436,7 +436,7 @@ export class HiddenFileSync extends LiveSyncCommands {
type: "newnote",
};
} else {
if (isDocContentSame(old.data, content) && !forceWrite) {
if (await isDocContentSame(old.data, content) && !forceWrite) {
// Logger(`STORAGE --> DB:${file.path}: (hidden) Not changed`, LOG_LEVEL_VERBOSE);
return;
}
@@ -560,7 +560,7 @@ export class HiddenFileSync extends LiveSyncCommands {
} else {
const contentBin = await this.plugin.vaultAccess.adapterReadBinary(filename);
const content = await encodeBinary(contentBin);
if (isDocContentSame(content, fileOnDB.data) && !force) {
if (await isDocContentSame(content, fileOnDB.data) && !force) {
// Logger(`STORAGE <-- DB:${filename}: skipped (hidden) Not changed`, LOG_LEVEL_VERBOSE);
return true;
}

View File

@@ -228,7 +228,7 @@ export class PluginAndTheirSettings extends LiveSyncCommands {
if (old !== false) {
const oldData = { data: old.data, deleted: old._deleted };
const newData = { data: d.data, deleted: d._deleted };
if (isDocContentSame(oldData.data, newData.data) && oldData.deleted == newData.deleted) {
if (await isDocContentSame(oldData.data, newData.data) && oldData.deleted == newData.deleted) {
Logger(`Nothing changed:${m.name}`);
return;
}

View File

@@ -66,10 +66,7 @@
for (const revInfo of reversedRevs) {
if (revInfo.status == "available") {
const doc =
(!isPlain && showDiffInfo) || (checkStorageDiff && revInfo.rev == docA._rev)
? await db.getDBEntry(path, { rev: revInfo.rev }, false, false, true)
: await db.getDBEntryMeta(path, { rev: revInfo.rev }, true);
const doc = (!isPlain && showDiffInfo) || (checkStorageDiff && revInfo.rev == docA._rev) ? await db.getDBEntry(path, { rev: revInfo.rev }, false, false, true) : await db.getDBEntryMeta(path, { rev: revInfo.rev }, true);
if (doc === false) continue;
const rev = revInfo.rev;
@@ -112,11 +109,11 @@
let result = false;
if (isPlainText(docA.path)) {
const data = await plugin.vaultAccess.adapterRead(abs);
result = isDocContentSame(data, doc.data);
result = await isDocContentSame(data, doc.data);
} else {
const data = await plugin.vaultAccess.adapterReadBinary(abs);
const dataEEncoded = createBinaryBlob(data);
result = isDocContentSame(dataEEncoded, doc.data);
result = await isDocContentSame(dataEEncoded, doc.data);
}
if (result) {
diffDetail += " ⚖️";

View File

@@ -1,6 +1,7 @@
import { type App, TFile, type DataWriteOptions, TFolder, TAbstractFile } from "./deps";
import { serialized } from "./lib/src/lock";
import type { FilePath } from "./lib/src/types";
import { createBinaryBlob, isDocContentSame } from "./lib/src/utils";
function getFileLockKey(file: TFile | TFolder | string) {
return `fl:${typeof (file) == "string" ? file : file.path}`;
}
@@ -65,9 +66,22 @@ export class SerializedFileAccess {
async vaultModify(file: TFile, data: string | ArrayBuffer | Uint8Array, options?: DataWriteOptions) {
if (typeof (data) === "string") {
return await serialized(getFileLockKey(file), () => this.app.vault.modify(file, data, options));
return await serialized(getFileLockKey(file), async () => {
const oldData = await this.app.vault.read(file);
if (data === oldData) return false
await this.app.vault.modify(file, data, options)
return true;
}
);
} else {
return await serialized(getFileLockKey(file), () => this.app.vault.modifyBinary(file, toArrayBuffer(data), options));
return await serialized(getFileLockKey(file), async () => {
const oldData = await this.app.vault.readBinary(file);
if (isDocContentSame(createBinaryBlob(oldData), createBinaryBlob(data))) {
return false;
}
await this.app.vault.modifyBinary(file, toArrayBuffer(data), options)
return true;
});
}
}
async vaultCreate(path: string, data: string | ArrayBuffer | Uint8Array, options?: DataWriteOptions): Promise<TFile> {

Submodule src/lib updated: f108111ae8...18822f1e02

View File

@@ -33,7 +33,7 @@ import { GlobalHistoryView, VIEW_TYPE_GLOBAL_HISTORY } from "./GlobalHistoryView
import { LogPaneView, VIEW_TYPE_LOG } from "./LogPaneView";
import { mapAllTasksWithConcurrencyLimit, processAllTasksWithConcurrencyLimit } from "./lib/src/task";
import { LRUCache } from "./lib/src/LRUCache";
import { SerializedFileAccess } from "./SerializedFileAccess";
import { SerializedFileAccess } from "./SerializedFileAccess.ts";
setNoticeClass(Notice);
@@ -1306,17 +1306,22 @@ Note: We can always able to read V1 format. It will be progressively converted.
await this.ensureDirectoryEx(path);
try {
let outFile;
let isChanged = true;
if (mode == "create") {
const normalizedPath = normalizePath(path);
await this.vaultAccess.vaultCreate(normalizedPath, writeData, { ctime: doc.ctime, mtime: doc.mtime, });
outFile = this.vaultAccess.getAbstractFileByPath(normalizedPath) as TFile;
} else {
await this.vaultAccess.vaultModify(file, writeData, { ctime: doc.ctime, mtime: doc.mtime });
isChanged = await this.vaultAccess.vaultModify(file, writeData, { ctime: doc.ctime, mtime: doc.mtime });
outFile = this.vaultAccess.getAbstractFileByPath(getPathFromTFile(file)) as TFile;
}
Logger(msg + path);
this.vaultAccess.touch(outFile);
this.app.vault.trigger(mode, outFile);
if (isChanged) {
Logger(msg + path);
this.vaultAccess.touch(outFile);
this.app.vault.trigger(mode, outFile);
} else {
Logger(msg + "Skipped, the file is the same: " + path, LOG_LEVEL_VERBOSE);
}
} catch (ex) {
Logger(msg + "ERROR, Could not write: " + path, LOG_LEVEL_NOTICE);
@@ -1370,12 +1375,12 @@ Note: We can always able to read V1 format. It will be progressively converted.
try {
const releaser = await semaphore.acquire(1);
serialized(`dbchanged-${path}`, async () => {
Logger(`Applying ${path} (${entry._id}: ${entry._rev}) change...`, LOG_LEVEL_VERBOSE);
Logger(`Applying ${path} (${entry._id}: ${entry._rev?.substring(0, 5)}) change...`, LOG_LEVEL_VERBOSE);
await this.handleDBChangedAsync(entry);
Logger(`Applied ${path} (${entry._id}:${entry._rev}) change...`);
Logger(`Applied ${path} (${entry._id}:${entry._rev?.substring(0, 5)}) change...`);
}).finally(() => { releaser(); });
} catch (ex) {
Logger(`Failed to apply the change of ${path} (${entry._id}:${entry._rev})`);
Logger(`Failed to apply the change of ${path} (${entry._id}:${entry._rev?.substring(0, 5)})`);
}
} while (this.queuedEntries.length > 0);
} finally {
@@ -1501,11 +1506,11 @@ Note: We can always able to read V1 format. It will be progressively converted.
const skipOldFile = this.settings.skipOlderFilesOnSync && false; //patched temporary.
// Do not handle internal files if the feature has not been enabled.
if (isInternalMetadata(doc._id) && !this.settings.syncInternalFiles) {
Logger(`Skipped: ${path} (${doc._id}, ${doc._rev}) Hidden file sync is disabled.`, LOG_LEVEL_VERBOSE);
Logger(`Skipped: ${path} (${doc._id}, ${doc._rev?.substring(0, 10)}) Hidden file sync is disabled.`, LOG_LEVEL_VERBOSE);
return;
}
if (isCustomisationSyncMetadata(doc._id) && !this.settings.usePluginSync) {
Logger(`Skipped: ${path} (${doc._id}, ${doc._rev}) Customization sync is disabled.`, LOG_LEVEL_VERBOSE);
Logger(`Skipped: ${path} (${doc._id}, ${doc._rev?.substring(0, 10)}) Customization sync is disabled.`, LOG_LEVEL_VERBOSE);
return;
}
// It is better for your own safety, not to handle the following files
@@ -1528,7 +1533,7 @@ Note: We can always able to read V1 format. It will be progressively converted.
const docMtime = ~~(doc.mtime / 1000);
//TODO: some margin required.
if (localMtime >= docMtime) {
Logger(`${path} (${doc._id}, ${doc._rev}) Skipped, older than storage.`, LOG_LEVEL_VERBOSE);
Logger(`${path} (${doc._id}, ${doc._rev?.substring(0, 10)}) Skipped, older than storage.`, LOG_LEVEL_VERBOSE);
return;
}
}
@@ -1544,7 +1549,7 @@ Note: We can always able to read V1 format. It will be progressively converted.
if ((!this.settings.readChunksOnline) && "children" in doc) {
const c = await this.localDatabase.collectChunksWithCache(doc.children as DocumentID[]);
const missing = c.filter((e) => e.chunk === false).map((e) => e.id);
if (missing.length > 0) Logger(`${path} (${doc._id}, ${doc._rev}) Queued (waiting ${missing.length} items)`, LOG_LEVEL_VERBOSE);
if (missing.length > 0) Logger(`${path} (${doc._id}, ${doc._rev?.substring(0, 10)}) Queued (waiting ${missing.length} items)`, LOG_LEVEL_VERBOSE);
newQueue.missingChildren = missing;
this.queuedFiles.push(newQueue);
} else {
@@ -2265,12 +2270,14 @@ Or if you are sure know what had been happened, we can unlock the database from
const file = this.vaultAccess.getAbstractFileByPath(stripAllPrefixes(path)) as TFile;
if (file) {
await this.vaultAccess.vaultModify(file, p);
await this.updateIntoDB(file);
if (await this.vaultAccess.vaultModify(file, p)) {
await this.updateIntoDB(file);
}
} else {
const newFile = await this.vaultAccess.vaultCreate(path, p);
await this.updateIntoDB(newFile);
}
// ?
await this.pullFile(path);
Logger(`Automatically merged (sensible) :${path}`, LOG_LEVEL_INFO);
return true;
@@ -2562,7 +2569,7 @@ Or if you are sure know what had been happened, we can unlock the database from
const oldData = { data: old.data, deleted: old._deleted || old.deleted };
const newData = { data: d.data, deleted: d._deleted || d.deleted };
if (oldData.deleted != newData.deleted) return false;
if (!isDocContentSame(old.data, newData.data)) return false;
if (!await isDocContentSame(old.data, newData.data)) return false;
Logger(msg + "Skipped (not changed) " + fullPath + ((d._deleted || d.deleted) ? " (deleted)" : ""), LOG_LEVEL_VERBOSE);
return true;
// d._rev = old._rev;