This commit is contained in:
vorotamoroz
2026-01-07 08:38:33 +00:00
parent b52ceec36a
commit 336f2c8a4d
21 changed files with 8136 additions and 61 deletions

129
test/suite/db_common.ts Normal file
View File

@@ -0,0 +1,129 @@
import { compareMTime, EVEN } from "@/common/utils";
import { TFile, type DataWriteOptions } from "@/deps";
import type { FilePath } from "@/lib/src/common/types";
import { isDocContentSame, readContent } from "@/lib/src/common/utils";
import { waitForIdle, type LiveSyncHarness } from "../harness/harness";
import { expect } from "vitest";
export const defaultFileOption = {
mtime: new Date(2026, 0, 1, 0, 1, 2, 3).getTime(),
} as const satisfies DataWriteOptions;
export async function storeFile(
harness: LiveSyncHarness,
path: string,
content: string | Blob,
deleteBeforeSend = false,
fileOptions = defaultFileOption
) {
if (deleteBeforeSend && harness.app.vault.getAbstractFileByPath(path)) {
console.log(`Deleting existing file ${path}`);
await harness.app.vault.delete(harness.app.vault.getAbstractFileByPath(path) as TFile);
}
// Create file via vault
if (content instanceof Blob) {
console.log(`Creating binary file ${path}`);
await harness.app.vault.createBinary(path, await content.arrayBuffer(), fileOptions);
} else {
await harness.app.vault.create(path, content, fileOptions);
}
// Ensure file is created
const file = harness.app.vault.getAbstractFileByPath(path);
expect(file).toBeInstanceOf(TFile);
if (file instanceof TFile) {
expect(compareMTime(file.stat.mtime, fileOptions?.mtime ?? defaultFileOption.mtime)).toBe(EVEN);
if (content instanceof Blob) {
const readContent = await harness.app.vault.readBinary(file);
expect(await isDocContentSame(readContent, content)).toBe(true);
} else {
const readContent = await harness.app.vault.read(file);
expect(readContent).toBe(content);
}
}
await harness.plugin.services.fileProcessing.commitPendingFileEvents();
await waitForIdle(harness);
return file;
}
export async function readFromLocalDB(harness: LiveSyncHarness, path: string) {
const entry = await harness.plugin.localDatabase.getDBEntry(path as FilePath);
expect(entry).not.toBe(false);
return entry;
}
export async function readFromVault(
harness: LiveSyncHarness,
path: string,
isBinary: boolean = false,
fileOptions = defaultFileOption
): Promise<string | ArrayBuffer> {
const file = harness.app.vault.getAbstractFileByPath(path);
expect(file).toBeInstanceOf(TFile);
if (file instanceof TFile) {
// console.log(`MTime: ${file.stat.mtime}, Expected: ${fileOptions.mtime}`);
if (fileOptions.mtime !== undefined) {
expect(compareMTime(file.stat.mtime, fileOptions.mtime)).toBe(EVEN);
}
const content = isBinary ? await harness.app.vault.readBinary(file) : await harness.app.vault.read(file);
return content;
}
throw new Error("File not found in vault");
}
export async function checkStoredFileInDB(
harness: LiveSyncHarness,
path: string,
content: string | Blob,
fileOptions = defaultFileOption
) {
const entry = await readFromLocalDB(harness, path);
if (entry === false) {
throw new Error("DB Content not found");
}
const contentToCheck = content instanceof Blob ? await content.arrayBuffer() : content;
const isDocSame = await isDocContentSame(readContent(entry), contentToCheck);
if (fileOptions.mtime !== undefined) {
expect(compareMTime(entry.mtime, fileOptions.mtime)).toBe(EVEN);
}
expect(isDocSame).toBe(true);
return Promise.resolve();
}
export async function testFileWrite(
harness: LiveSyncHarness,
path: string,
content: string | Blob,
skipCheckToBeWritten = false,
fileOptions = defaultFileOption
) {
const file = await storeFile(harness, path, content, false, fileOptions);
expect(file).toBeInstanceOf(TFile);
await harness.plugin.services.fileProcessing.commitPendingFileEvents();
await waitForIdle(harness);
const vaultFile = await readFromVault(harness, path, content instanceof Blob, fileOptions);
expect(await isDocContentSame(vaultFile, content)).toBe(true);
await harness.plugin.services.fileProcessing.commitPendingFileEvents();
await waitForIdle(harness);
if (skipCheckToBeWritten) {
return Promise.resolve();
}
await checkStoredFileInDB(harness, path, content);
return Promise.resolve();
}
export async function testFileRead(
harness: LiveSyncHarness,
path: string,
expectedContent: string | Blob,
fileOptions = defaultFileOption
) {
await waitForIdle(harness);
const file = await readFromVault(harness, path, expectedContent instanceof Blob, fileOptions);
const isDocSame = await isDocContentSame(file, expectedContent);
expect(isDocSame).toBe(true);
// Check local database entry
const entry = await readFromLocalDB(harness, path);
expect(entry).not.toBe(false);
if (entry === false) {
throw new Error("DB Content not found");
}
const isDBDocSame = await isDocContentSame(readContent(entry), expectedContent);
expect(isDBDocSame).toBe(true);
return await Promise.resolve();
}

View File

@@ -0,0 +1,125 @@
import { beforeAll, describe, expect, it, test } from "vitest";
import { generateHarness, waitForIdle, waitForReady, type LiveSyncHarness } from "../harness/harness";
import { TFile } from "obsidian";
import { DEFAULT_SETTINGS, type FilePath, type ObsidianLiveSyncSettings } from "@/lib/src/common/types";
import { isDocContentSame, readContent } from "@/lib/src/common/utils";
import { DummyFileSourceInisialised, generateBinaryFile, generateFile, init } from "../utils/dummyfile";
const localdb_test_setting = {
...DEFAULT_SETTINGS,
isConfigured: true,
handleFilenameCaseSensitive: false,
} as ObsidianLiveSyncSettings;
describe("Plugin Integration Test (Local Database)", async () => {
let harness: LiveSyncHarness;
const vaultName = "TestVault" + Date.now();
beforeAll(async () => {
await DummyFileSourceInisialised;
harness = await generateHarness(vaultName, localdb_test_setting);
await waitForReady(harness);
});
it("should be instantiated and defined", async () => {
expect(harness.plugin).toBeDefined();
expect(harness.plugin.app).toBe(harness.app);
return await Promise.resolve();
});
it("should have services initialized", async () => {
expect(harness.plugin.services).toBeDefined();
return await Promise.resolve();
});
it("should have local database initialized", async () => {
expect(harness.plugin.localDatabase).toBeDefined();
expect(harness.plugin.localDatabase.isReady).toBe(true);
return await Promise.resolve();
});
it("should store the changes into the local database", async () => {
const path = "test-store6.md";
const content = "Hello, World!";
if (harness.app.vault.getAbstractFileByPath(path)) {
console.log(`Deleting existing file ${path}`);
await harness.app.vault.delete(harness.app.vault.getAbstractFileByPath(path) as TFile);
}
// Create file via vault
await harness.app.vault.create(path, content);
const file = harness.app.vault.getAbstractFileByPath(path);
expect(file).toBeInstanceOf(TFile);
if (file instanceof TFile) {
const readContent = await harness.app.vault.read(file);
expect(readContent).toBe(content);
}
await harness.plugin.services.fileProcessing.commitPendingFileEvents();
await waitForIdle(harness);
// await delay(100); // Wait a bit for the local database to process
const entry = await harness.plugin.localDatabase.getDBEntry(path as FilePath);
expect(entry).not.toBe(false);
if (entry) {
expect(readContent(entry)).toBe(content);
}
return await Promise.resolve();
});
test.each([10, 100, 1000, 10000, 50000, 100000])("should handle large file of size %i bytes", async (size) => {
const path = `test-large-file-${size}.md`;
const content = Array.from(generateFile(size)).join("");
if (harness.app.vault.getAbstractFileByPath(path)) {
console.log(`Deleting existing file ${path}`);
await harness.app.vault.delete(harness.app.vault.getAbstractFileByPath(path) as TFile);
}
// Create file via vault
await harness.app.vault.create(path, content);
const file = harness.app.vault.getAbstractFileByPath(path);
expect(file).toBeInstanceOf(TFile);
if (file instanceof TFile) {
const readContent = await harness.app.vault.read(file);
expect(readContent).toBe(content);
}
await harness.plugin.services.fileProcessing.commitPendingFileEvents();
await waitForIdle(harness);
const entry = await harness.plugin.localDatabase.getDBEntry(path as FilePath);
expect(entry).not.toBe(false);
if (entry) {
expect(readContent(entry)).toBe(content);
}
return await Promise.resolve();
});
const binaryMap = Array.from({ length: 7 }, (_, i) => Math.pow(2, i * 4));
test.each(binaryMap)("should handle binary file of size %i bytes", async (size) => {
const path = `test-binary-file-${size}.bin`;
const content = new Blob([...generateBinaryFile(size)], { type: "application/octet-stream" });
if (harness.app.vault.getAbstractFileByPath(path)) {
console.log(`Deleting existing file ${path}`);
await harness.app.vault.delete(harness.app.vault.getAbstractFileByPath(path) as TFile);
}
// Create file via vault
await harness.app.vault.createBinary(path, await content.arrayBuffer());
const file = harness.app.vault.getAbstractFileByPath(path);
expect(file).toBeInstanceOf(TFile);
if (file instanceof TFile) {
const readContent = await harness.app.vault.readBinary(file);
expect(await isDocContentSame(readContent, content)).toBe(true);
}
await harness.plugin.services.fileProcessing.commitPendingFileEvents();
await waitForIdle(harness);
const entry = await harness.plugin.localDatabase.getDBEntry(path as FilePath);
expect(entry).not.toBe(false);
if (entry) {
const entryContent = await readContent(entry);
if (!(entryContent instanceof ArrayBuffer)) {
throw new Error("Entry content is not an ArrayBuffer");
}
// const expectedContent = await content.arrayBuffer();
expect(await isDocContentSame(entryContent, content)).toBe(true);
}
return await Promise.resolve();
});
});

300
test/suite/sync.test.ts Normal file
View File

@@ -0,0 +1,300 @@
// Functional Test on Main Cases
// This test suite only covers main functional cases of synchronisation. Event handling, error cases,
// and edge, resolving conflicts, etc. will be covered in separate test suites.
import { beforeAll, describe, expect, it, test } from "vitest";
import { generateHarness, waitForIdle, waitForReady, type LiveSyncHarness } from "../harness/harness";
import {
DEFAULT_SETTINGS,
PREFERRED_JOURNAL_SYNC,
PREFERRED_SETTING_SELF_HOSTED,
RemoteTypes,
type FilePath,
type ObsidianLiveSyncSettings,
} from "@/lib/src/common/types";
import {
DummyFileSourceInisialised,
FILE_SIZE_BINS,
FILE_SIZE_MD,
generateBinaryFile,
generateFile,
} from "../utils/dummyfile";
import { checkStoredFileInDB, defaultFileOption, testFileRead, testFileWrite } from "./db_common";
import { delay } from "@/lib/src/common/utils";
const env = (import.meta as any).env;
const sync_test_setting_base = {
...DEFAULT_SETTINGS,
isConfigured: true,
handleFilenameCaseSensitive: false,
couchDB_URI: `${env.hostname}`,
couchDB_DBNAME: `${env.dbname}`,
couchDB_USER: `${env.username}`,
couchDB_PASSWORD: `${env.password}`,
bucket: `${env.bucketName}`,
region: "us-east-1",
endpoint: `${env.minioEndpoint}`,
accessKey: `${env.accessKey}`,
secretKey: `${env.secretKey}`,
useCustomRequestHandler: true,
forcePathStyle: true,
bucketPrefix: "",
} as ObsidianLiveSyncSettings;
function generateName(prefix: string, type: string, ext: string, size: number) {
return `${prefix}-${type}-file-${size}.${ext}`;
}
function* generateCase() {
const passpharse = "thetest-Passphrase3+9-for-e2ee!";
const REMOTE_RECOMMENDED = {
[RemoteTypes.REMOTE_COUCHDB]: PREFERRED_SETTING_SELF_HOSTED,
[RemoteTypes.REMOTE_MINIO]: PREFERRED_JOURNAL_SYNC,
};
for (const remoteType of [RemoteTypes.REMOTE_MINIO, RemoteTypes.REMOTE_COUCHDB]) {
for (const useE2EE of [false, true]) {
yield {
setting: {
...sync_test_setting_base,
...REMOTE_RECOMMENDED[remoteType],
remoteType,
encrypt: useE2EE,
passphrase: useE2EE ? passpharse : "",
usePathObfuscation: useE2EE,
} as ObsidianLiveSyncSettings,
};
}
}
}
const cases = Array.from(generateCase());
const fileOptions = defaultFileOption;
async function prepareRemote(harness: LiveSyncHarness, setting: ObsidianLiveSyncSettings, shouldReset = false) {
if (shouldReset) {
await delay(1000);
await harness.plugin.services.replicator.getActiveReplicator()?.tryResetRemoteDatabase(harness.plugin.settings);
} else {
await harness.plugin.services.replicator
.getActiveReplicator()
?.tryCreateRemoteDatabase(harness.plugin.settings);
}
await harness.plugin.services.replicator.getActiveReplicator()?.markRemoteResolved(harness.plugin.settings);
// No exceptions should be thrown
const status = await harness.plugin.services.replicator
.getActiveReplicator()
?.getRemoteStatus(harness.plugin.settings);
console.log("Remote status:", status);
expect(status).not.toBeFalsy();
}
describe("Replication Suite Tests", async () => {
describe.each(cases)("Replication Tests - Remote: $setting.remoteType, E2EE: $setting.encrypt", ({ setting }) => {
const nameFile = (type: string, ext: string, size: number) => generateName("sync-test", type, ext, size);
beforeAll(async () => {
await DummyFileSourceInisialised;
});
describe("Remote Database Initialization", async () => {
let harnessInit: LiveSyncHarness;
const sync_test_setting_init = {
...setting,
} as ObsidianLiveSyncSettings;
it("should initialize remote database", async () => {
const vaultName = "TestVault" + Date.now();
console.log(`BeforeEach - Remote Database Initialization - Vault: ${vaultName}`);
harnessInit = await generateHarness(vaultName, sync_test_setting_init);
await waitForReady(harnessInit);
expect(harnessInit.plugin).toBeDefined();
expect(harnessInit.plugin.app).toBe(harnessInit.app);
await waitForIdle(harnessInit);
});
it("should reset remote database", async () => {
// harnessInit = await generateHarness(vaultName, sync_test_setting_init);
await waitForReady(harnessInit);
await prepareRemote(harnessInit, sync_test_setting_init, true);
});
it("should be prepared for replication", async () => {
// harnessInit = await generateHarness(vaultName, sync_test_setting_init);
await waitForReady(harnessInit);
// await prepareRemote(harness, sync_test_setting_init, false);
const status = await harnessInit.plugin.services.replicator
.getActiveReplicator()
?.getRemoteStatus(sync_test_setting_init);
console.log("Connected devices after reset:", status);
expect(status).not.toBeFalsy();
});
});
describe("Replication - Upload", async () => {
let harnessUpload: LiveSyncHarness;
const sync_test_setting_upload = {
...setting,
} as ObsidianLiveSyncSettings;
it("Setup Upload Harness", async () => {
const vaultName = "TestVault" + Date.now();
console.log(`BeforeAll - Replication Upload - Vault: ${vaultName}`);
harnessUpload = await generateHarness(vaultName, sync_test_setting_upload);
await waitForReady(harnessUpload);
expect(harnessUpload.plugin).toBeDefined();
expect(harnessUpload.plugin.app).toBe(harnessUpload.app);
waitForIdle(harnessUpload);
});
it("should be instantiated and defined", async () => {
expect(harnessUpload.plugin).toBeDefined();
expect(harnessUpload.plugin.app).toBe(harnessUpload.app);
});
it("should have services initialized", async () => {
expect(harnessUpload.plugin.services).toBeDefined();
});
it("should have local database initialized", async () => {
expect(harnessUpload.plugin.localDatabase).toBeDefined();
expect(harnessUpload.plugin.localDatabase.isReady).toBe(true);
});
it("should prepare remote database", async () => {
await prepareRemote(harnessUpload, sync_test_setting_upload, false);
});
// describe("File Creation", async () => {
it("should store single file", async () => {
const content = "Hello, World!";
const path = nameFile("store", "md", 0);
await testFileWrite(harnessUpload, path, content, false, fileOptions);
// Perform replication
// await harness.plugin.services.replication.replicate(true);
});
it("should different content of several files are stored correctly", async () => {
await testFileWrite(harnessUpload, nameFile("test-diff-1", "md", 0), "Content A", false, fileOptions);
await testFileWrite(harnessUpload, nameFile("test-diff-2", "md", 0), "Content B", false, fileOptions);
await testFileWrite(harnessUpload, nameFile("test-diff-3", "md", 0), "Content C", false, fileOptions);
});
test.each(FILE_SIZE_MD)("should handle large file of size %i bytes", async (size) => {
const content = Array.from(generateFile(size)).join("");
const path = nameFile("large", "md", size);
const isTooLarge = harnessUpload.plugin.services.vault.isFileSizeTooLarge(size);
if (isTooLarge) {
console.log(`Skipping file of size ${size} bytes as it is too large to sync.`);
expect(true).toBe(true);
} else {
await testFileWrite(harnessUpload, path, content, false, fileOptions);
}
});
test.each(FILE_SIZE_BINS)("should handle binary file of size %i bytes", async (size) => {
// const isTooLarge = harness.plugin.services.vault.isFileSizeTooLarge(size);
const content = new Blob([...generateBinaryFile(size)], { type: "application/octet-stream" });
const path = nameFile("binary", "bin", size);
await testFileWrite(harnessUpload, path, content, true, fileOptions);
const isTooLarge = harnessUpload.plugin.services.vault.isFileSizeTooLarge(size);
if (isTooLarge) {
console.log(`Skipping file of size ${size} bytes as it is too large to sync.`);
expect(true).toBe(true);
} else {
await checkStoredFileInDB(harnessUpload, path, content, fileOptions);
}
});
// });
// Perform final replication after all tests
it("Replication after uploads", async () => {
await harnessUpload.plugin.services.replication.replicate(true);
await waitForIdle(harnessUpload);
// Ensure all files are uploaded
await harnessUpload.plugin.services.replication.replicate(true);
await waitForIdle(harnessUpload);
});
});
describe("Replication - Download", async () => {
let harnessDownload: LiveSyncHarness;
// Download into a new vault
const sync_test_setting_download = {
...setting,
} as ObsidianLiveSyncSettings;
it("should initialize remote database", async () => {
const vaultName = "TestVault" + Date.now();
harnessDownload = await generateHarness(vaultName, sync_test_setting_download);
await waitForReady(harnessDownload);
await prepareRemote(harnessDownload, sync_test_setting_download, false);
await harnessDownload.plugin.services.replication.replicate(true);
await waitForIdle(harnessDownload);
// Version info might be downloaded, and then replication will be interrupted,
await harnessDownload.plugin.services.replication.replicate(true); // Ensure all files are downloaded
await waitForIdle(harnessDownload);
});
it("should perform initial replication to download files", async () => {
await harnessDownload.plugin.services.replicator
.getActiveReplicator()
?.markRemoteResolved(sync_test_setting_download);
await harnessDownload.plugin.services.replication.replicate(true);
await waitForIdle(harnessDownload);
// Version info might be downloaded, and then replication will be interrupted,
await harnessDownload.plugin.services.replication.replicate(true); // Ensure all files are downloaded
await waitForIdle(harnessDownload);
});
it("should be instantiated and defined", async () => {
expect(harnessDownload.plugin).toBeDefined();
expect(harnessDownload.plugin.app).toBe(harnessDownload.app);
});
it("should have services initialized", async () => {
expect(harnessDownload.plugin.services).toBeDefined();
});
it("should have local database initialized", async () => {
expect(harnessDownload.plugin.localDatabase).toBeDefined();
expect(harnessDownload.plugin.localDatabase.isReady).toBe(true);
});
// describe("File Checking", async () => {
it("should retrieve the single file", async () => {
const expectedContent = "Hello, World!";
const path = nameFile("store", "md", 0);
await testFileRead(harnessDownload, path, expectedContent, fileOptions);
});
it("should retrieve different content of several files correctly", async () => {
await testFileRead(harnessDownload, nameFile("test-diff-1", "md", 0), "Content A", fileOptions);
await testFileRead(harnessDownload, nameFile("test-diff-2", "md", 0), "Content B", fileOptions);
await testFileRead(harnessDownload, nameFile("test-diff-3", "md", 0), "Content C", fileOptions);
});
test.each(FILE_SIZE_MD)("should retrieve the file %i bytes", async (size) => {
const content = Array.from(generateFile(size)).join("");
const path = nameFile("large", "md", size);
const isTooLarge = harnessDownload.plugin.services.vault.isFileSizeTooLarge(size);
if (isTooLarge) {
const entry = await harnessDownload.plugin.localDatabase.getDBEntry(path as FilePath);
console.log(`Skipping file of size ${size} bytes as it is too large to sync.`);
expect(entry).toBe(false);
} else {
await testFileRead(harnessDownload, path, content, fileOptions);
}
});
test.each(FILE_SIZE_BINS)("should handle binary file of size %i bytes", async (size) => {
const path = nameFile("binary", "bin", size);
const isTooLarge = harnessDownload.plugin.services.vault.isFileSizeTooLarge(size);
if (isTooLarge) {
const entry = await harnessDownload.plugin.localDatabase.getDBEntry(path as FilePath);
console.log(`Skipping file of size ${size} bytes as it is too large to sync.`);
expect(entry).toBe(false);
} else {
const content = new Blob([...generateBinaryFile(size)], { type: "application/octet-stream" });
await testFileRead(harnessDownload, path, content, fileOptions);
}
});
// });
});
it("Wait for idle state", async () => {
await delay(100);
});
});
});