This commit is contained in:
vorotamoroz
2026-01-07 08:38:33 +00:00
parent b52ceec36a
commit 336f2c8a4d
21 changed files with 8136 additions and 61 deletions

6
.gitignore vendored
View File

@@ -23,4 +23,8 @@ data.json
.env
# local config files
*.local
*.local
cov_profile/**
coverage

9
.test.env Normal file
View File

@@ -0,0 +1,9 @@
hostname=http://localhost:5984/
dbname=livesync-test-db2
minioEndpoint=http://127.0.0.1:9000
username=admin
password=testpassword
accessKey=minioadmin
secretKey=minioadmin
bucketName=livesync-test-bucket
# ENABLE_DEBUGGER=true

6188
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -24,7 +24,26 @@
"prettyCheck": "npm run prettyNoWrite -- --check",
"prettyNoWrite": "prettier --config ./.prettierrc.mjs \"**/*.js\" \"**/*.ts\" \"**/*.json\" ",
"check": "npm run lint && npm run svelte-check",
"unittest": "deno test -A --no-check --coverage=cov_profile --v8-flags=--expose-gc --trace-leaks ./src/"
"unittest": "deno test -A --no-check --coverage=cov_profile --v8-flags=--expose-gc --trace-leaks ./src/",
"test": "vitest run",
"test:install-playwright": "npx playwright install chromium",
"test:coverage": "vitest run --coverage",
"test:docker-couchdb:up": "npx dotenv-cli -e .env -e .test.env -- ./test/shell/couchdb-start.sh",
"test:docker-couchdb:init": "npx dotenv-cli -e .env -e .test.env -- ./test/shell/couchdb-init.sh",
"test:docker-couchdb:start": "npm run test:docker-couchdb:up && sleep 5 && npm run test:docker-couchdb:init",
"test:docker-couchdb:down": "docker stop couchdb-test && docker rm couchdb-test",
"test:docker-couchdb:stop": "npm run test:docker-couchdb:down",
"test:docker-s3:up": "npx dotenv-cli -e .env -e .test.env -- ./test/shell/minio-start.sh",
"test:docker-s3:init": "npx dotenv-cli -e .env -e .test.env -- ./test/shell/minio-init.sh",
"test:docker-s3:start": "npm run test:docker-s3:up && sleep 3 && npm run test:docker-s3:init",
"test:docker-s3:down": "docker stop minio-test && docker rm minio-test",
"test:docker-s3:stop": "npm run test:docker-s3:down",
"test:docker-all:up": "npm run test:docker-couchdb:up && npm run test:docker-s3:up",
"test:docker-all:init": "npm run test:docker-couchdb:init && npm run test:docker-s3:init",
"test:docker-all:down": "npm run test:docker-couchdb:down && npm run test:docker-s3:down",
"test:docker-all:start": "npm run test:docker-all:up && sleep 5 && npm run test:docker-all:init",
"test:docker-all:stop": "npm run test:docker-all:down",
"test:full": "npm run test:docker-all:start && vitest run --coverage && npm run test:docker-all:stop"
},
"keywords": [],
"author": "vorotamoroz",
@@ -49,7 +68,12 @@
"@types/transform-pouch": "^1.0.6",
"@typescript-eslint/eslint-plugin": "8.46.2",
"@typescript-eslint/parser": "8.46.2",
"@vitest/browser": "^4.0.16",
"@vitest/browser-playwright": "^4.0.16",
"@vitest/coverage-v8": "^4.0.16",
"builtin-modules": "5.0.0",
"dotenv": "^17.2.3",
"dotenv-cli": "^11.0.0",
"esbuild": "0.25.0",
"esbuild-plugin-inline-worker": "^0.1.1",
"esbuild-svelte": "^0.9.3",
@@ -59,6 +83,7 @@
"events": "^3.3.0",
"glob": "^11.0.3",
"obsidian": "^1.8.7",
"playwright": "^1.57.0",
"postcss": "^8.5.3",
"postcss-load-config": "^6.0.1",
"pouchdb-adapter-http": "^9.0.0",
@@ -81,6 +106,9 @@
"tslib": "^2.8.1",
"tsx": "^4.20.6",
"typescript": "5.9.3",
"vite": "^7.3.0",
"vitest": "^4.0.16",
"webdriverio": "^9.23.0",
"yaml": "^2.8.0"
},
"dependencies": {

138
test/harness/harness.ts Normal file
View File

@@ -0,0 +1,138 @@
import { App } from "obsidian";
import ObsidianLiveSyncPlugin from "@/main";
import { DEFAULT_SETTINGS, type ObsidianLiveSyncSettings } from "@/lib/src/common/types";
import { LOG_LEVEL_VERBOSE, Logger, setGlobalLogFunction } from "@lib/common/logger";
import { SettingCache } from "./obsidian-mock";
import { delay, promiseWithResolvers } from "octagonal-wheels/promises";
import { EVENT_LAYOUT_READY, eventHub } from "@/common/events";
import { EVENT_PLATFORM_UNLOADED } from "@/lib/src/PlatformAPIs/base/APIBase";
import { serialized } from "octagonal-wheels/concurrency/lock_v2";
export type LiveSyncHarness = {
app: App;
plugin: ObsidianLiveSyncPlugin;
dispose: () => Promise<void>;
disposalPromise: Promise<void>;
isDisposed: () => boolean;
};
function overrideLogFunction(vaultName: string) {
setGlobalLogFunction((msg, level, key) => {
if (level && level < LOG_LEVEL_VERBOSE) {
return;
}
if (msg instanceof Error) {
console.error(msg.stack);
} else {
console.log(
`[${vaultName}] :: [${key ?? "Global"}][${level ?? 1}]: ${msg instanceof Error ? msg.stack : msg}`
);
}
});
}
export async function generateHarness(
paramVaultName?: string,
settings?: Partial<ObsidianLiveSyncSettings>
): Promise<LiveSyncHarness> {
// return await serialized("harness-generation-lock", async () => {
// Dispose previous harness to avoid multiple harness running at the same time
// if (previousHarness && !previousHarness.isDisposed()) {
// console.log(`Previous harness detected, waiting for disposal...`);
// await previousHarness.disposalPromise;
// previousHarness = null;
// await delay(100);
// }
const vaultName = paramVaultName ?? "TestVault" + Date.now();
const setting = {
...DEFAULT_SETTINGS,
...settings,
};
overrideLogFunction(vaultName);
//@ts-ignore Mocked in harness
const app = new App(vaultName);
// setting and vault name
SettingCache.set(app, setting);
SettingCache.set(app.vault, vaultName);
//@ts-ignore
const manifest_version = `${MANIFEST_VERSION || "0.0.0-harness"}`;
overrideLogFunction(vaultName);
const manifest = {
id: "obsidian-livesync",
name: "Self-hosted LiveSync (Harnessed)",
version: manifest_version,
minAppVersion: "0.15.0",
description: "Testing",
author: "vrtmrz",
authorUrl: "",
isDesktopOnly: false,
};
const plugin = new ObsidianLiveSyncPlugin(app, manifest);
overrideLogFunction(vaultName);
// Initial load
await plugin.onload();
let isDisposed = false;
const waitPromise = promiseWithResolvers<void>();
eventHub.once(EVENT_PLATFORM_UNLOADED, async () => {
await delay(100);
isDisposed = true;
waitPromise.resolve();
});
eventHub.once(EVENT_LAYOUT_READY, () => {
plugin.app.vault.trigger("layout-ready");
});
const harness: LiveSyncHarness = {
app,
plugin,
dispose: async () => {
await plugin.onunload();
return waitPromise.promise;
},
disposalPromise: waitPromise.promise,
isDisposed: () => isDisposed,
};
await delay(100);
console.log(`Harness for vault '${vaultName}' is ready.`);
// previousHarness = harness;
return harness;
}
export async function waitForReady(harness: LiveSyncHarness): Promise<void> {
for (let i = 0; i < 10; i++) {
if (harness.plugin.services.appLifecycle.isReady()) {
console.log("App Lifecycle is ready");
return;
}
await delay(100);
}
throw new Error(`Initialisation Timed out!`);
}
export async function waitForIdle(harness: LiveSyncHarness): Promise<void> {
for (let i = 0; i < 20; i++) {
await delay(25);
const processing =
harness.plugin.databaseQueueCount.value +
harness.plugin.processingFileEventCount.value +
harness.plugin.pendingFileEventCount.value +
harness.plugin.totalQueued.value +
harness.plugin.batched.value +
harness.plugin.processing.value +
harness.plugin.storageApplyingCount.value;
if (processing === 0) {
console.log(`Idle after ${i} loops`);
return;
}
}
}
export async function waitForClosed(harness: LiveSyncHarness): Promise<void> {
await delay(100);
for (let i = 0; i < 10; i++) {
if (harness.plugin.services.appLifecycle.hasUnloaded()) {
console.log("App Lifecycle has unloaded");
return;
}
await delay(100);
}
}

View File

@@ -0,0 +1,813 @@
/* eslint-disable @typescript-eslint/no-unsafe-function-type */
export const SettingCache = new Map<any, any>();
//@ts-ignore obsidian global
globalThis.activeDocument = document;
declare const hostPlatform: string | undefined;
// import { interceptFetchForLogging } from "../harness/utils/intercept";
// interceptFetchForLogging();
globalThis.process = {
platform: (hostPlatform || "win32") as any,
} as any;
console.warn(`[Obsidian Mock] process.platform is set to ${globalThis.process.platform}`);
export class TAbstractFile {
vault: Vault;
path: string;
name: string;
parent: TFolder | null;
constructor(vault: Vault, path: string, name: string, parent: TFolder | null) {
this.vault = vault;
this.path = path;
this.name = name;
this.parent = parent;
}
}
export class TFile extends TAbstractFile {
stat: {
ctime: number;
mtime: number;
size: number;
} = { ctime: Date.now(), mtime: Date.now(), size: 0 };
get extension(): string {
return this.name.split(".").pop() || "";
}
get basename(): string {
const parts = this.name.split(".");
if (parts.length > 1) parts.pop();
return parts.join(".");
}
}
export class TFolder extends TAbstractFile {
children: TAbstractFile[] = [];
get isRoot(): boolean {
return this.path === "" || this.path === "/";
}
}
export class EventRef {}
// class StorageMap<T, U> extends Map<T, U> {
// constructor(saveName?: string) {
// super();
// if (saveName) {
// this.saveName = saveName;
// void this.restore(saveName);
// }
// }
// private saveName: string = "";
// async restore(saveName: string) {
// this.saveName = saveName;
// const db = await OpenKeyValueDatabase(saveName);
// const data = await db.get<{ [key: string]: U }>("data");
// if (data) {
// for (const key of Object.keys(data)) {
// this.set(key as any as T, data[key]);
// }
// }
// db.close();
// return this;
// }
// saving: boolean = false;
// async save() {
// if (this.saveName === "") {
// return;
// }
// if (this.saving) {
// return;
// }
// try {
// this.saving = true;
// const db = await OpenKeyValueDatabase(this.saveName);
// const data: { [key: string]: U } = {};
// for (const [key, value] of this.entries()) {
// data[key as any as string] = value;
// }
// await db.set("data", data);
// db.close();
// } finally {
// this.saving = false;
// }
// }
// set(key: T, value: U): this {
// super.set(key, value);
// void this.save();
// return this;
// }
// }
export class Vault {
adapter: DataAdapter;
vaultName: string = "MockVault";
private files: Map<string, TAbstractFile> = new Map();
private contents: Map<string, string | ArrayBuffer> = new Map();
private root: TFolder;
private listeners: Map<string, Set<Function>> = new Map();
constructor(vaultName?: string) {
if (vaultName) {
this.vaultName = vaultName;
this.files = new Map();
this.contents = new Map();
}
this.adapter = new DataAdapter(this);
this.root = new TFolder(this, "", "", null);
this.files.set("", this.root);
}
getAbstractFileByPath(path: string): TAbstractFile | null {
if (path === "/") path = "";
const file = this.files.get(path);
return file || null;
}
getAbstractFileByPathInsensitive(path: string): TAbstractFile | null {
const lowerPath = path.toLowerCase();
for (const [p, file] of this.files.entries()) {
if (p.toLowerCase() === lowerPath) {
return file;
}
}
return null;
}
getFiles(): TFile[] {
return Array.from(this.files.values()).filter((f) => f instanceof TFile);
}
async _adapterRead(path: string): Promise<string | null> {
await Promise.resolve();
const file = this.contents.get(path);
if (typeof file === "string") {
return file;
}
if (file instanceof ArrayBuffer) {
return new TextDecoder().decode(file);
}
return null;
}
async _adapterReadBinary(path: string): Promise<ArrayBuffer | null> {
await Promise.resolve();
const file = this.contents.get(path);
if (file instanceof ArrayBuffer) {
return file;
}
if (typeof file === "string") {
return new TextEncoder().encode(file).buffer;
}
return null;
}
async read(file: TFile): Promise<string> {
await Promise.resolve();
const content = this.contents.get(file.path);
if (typeof content === "string") return content;
if (content instanceof ArrayBuffer) {
return new TextDecoder().decode(content);
}
return "";
}
async readBinary(file: TFile): Promise<ArrayBuffer> {
await Promise.resolve();
const content = this.contents.get(file.path);
if (content instanceof ArrayBuffer) return content;
if (typeof content === "string") {
return new TextEncoder().encode(content).buffer;
}
return new ArrayBuffer(0);
}
private async _create(path: string, data: string | ArrayBuffer, options?: DataWriteOptions): Promise<TFile> {
if (this.files.has(path)) throw new Error("File already exists");
const name = path.split("/").pop() || "";
const parentPath = path.includes("/") ? path.substring(0, path.lastIndexOf("/")) : "";
let parent = this.getAbstractFileByPath(parentPath);
if (!parent || !(parent instanceof TFolder)) {
parent = await this.createFolder(parentPath);
}
const file = new TFile(this, path, name, parent as TFolder);
file.stat.size = typeof data === "string" ? new TextEncoder().encode(data).length : data.byteLength;
file.stat.ctime = options?.ctime ?? Date.now();
file.stat.mtime = options?.mtime ?? Date.now();
this.files.set(path, file);
this.contents.set(path, data);
(parent as TFolder).children.push(file);
// console.dir(this.files);
this.trigger("create", file);
return file;
}
async create(path: string, data: string, options?: DataWriteOptions): Promise<TFile> {
return await this._create(path, data, options);
}
async createBinary(path: string, data: ArrayBuffer, options?: DataWriteOptions): Promise<TFile> {
return await this._create(path, data, options);
}
async _modify(file: TFile, data: string | ArrayBuffer, options?: DataWriteOptions): Promise<void> {
await Promise.resolve();
this.contents.set(file.path, data);
file.stat.mtime = options?.mtime ?? Date.now();
file.stat.ctime = options?.ctime ?? file.stat.ctime ?? Date.now();
file.stat.size = typeof data === "string" ? data.length : data.byteLength;
this.files.set(file.path, file);
this.trigger("modify", file);
}
async modify(file: TFile, data: string, options?: DataWriteOptions): Promise<void> {
return await this._modify(file, data, options);
}
async modifyBinary(file: TFile, data: ArrayBuffer, options?: DataWriteOptions): Promise<void> {
return await this._modify(file, data, options);
}
async createFolder(path: string): Promise<TFolder> {
if (path === "") return this.root;
if (this.files.has(path)) {
const f = this.files.get(path);
if (f instanceof TFolder) return f;
throw new Error("Path is a file");
}
const name = path.split("/").pop() || "";
const parentPath = path.includes("/") ? path.substring(0, path.lastIndexOf("/")) : "";
const parent = await this.createFolder(parentPath);
const folder = new TFolder(this, path, name, parent);
this.files.set(path, folder);
parent.children.push(folder);
return folder;
}
async delete(file: TAbstractFile, force?: boolean): Promise<void> {
await Promise.resolve();
this.files.delete(file.path);
this.contents.delete(file.path);
if (file.parent) {
file.parent.children = file.parent.children.filter((c) => c !== file);
}
this.trigger("delete", file);
}
async trash(file: TAbstractFile, system: boolean): Promise<void> {
await Promise.resolve();
return this.delete(file);
}
on(name: string, callback: (...args: any[]) => any, ctx?: any): EventRef {
if (!this.listeners.has(name)) {
this.listeners.set(name, new Set());
}
const boundCallback = ctx ? callback.bind(ctx) : callback;
this.listeners.get(name)!.add(boundCallback);
return { name, callback: boundCallback } as any;
}
off(name: string, callback: any) {
this.listeners.get(name)?.delete(callback);
}
offref(ref: EventRef) {
const { name, callback } = ref as any;
this.off(name, callback);
}
trigger(name: string, ...args: any[]) {
this.listeners.get(name)?.forEach((cb) => cb(...args));
}
getName(): string {
return SettingCache.get(this) || "MockVault";
}
}
export class DataAdapter {
vault: Vault;
constructor(vault: Vault) {
this.vault = vault;
}
stat(path: string): Promise<{ ctime: number; mtime: number; size: number }> {
const file = this.vault.getAbstractFileByPath(path);
if (file && file instanceof TFile) {
return Promise.resolve({
ctime: file.stat.ctime,
mtime: file.stat.mtime,
size: file.stat.size,
});
}
return Promise.reject(new Error("File not found"));
}
async list(path: string): Promise<{ files: string[]; folders: string[] }> {
await Promise.resolve();
const abstractFile = this.vault.getAbstractFileByPath(path);
if (abstractFile instanceof TFolder) {
const files: string[] = [];
const folders: string[] = [];
for (const child of abstractFile.children) {
if (child instanceof TFile) files.push(child.path);
else if (child instanceof TFolder) folders.push(child.path);
}
return { files, folders };
}
return { files: [], folders: [] };
}
async _write(path: string, data: string | ArrayBuffer, options?: DataWriteOptions): Promise<void> {
const file = this.vault.getAbstractFileByPath(path);
if (file instanceof TFile) {
if (typeof data === "string") {
await this.vault.modify(file, data, options);
} else {
await this.vault.modifyBinary(file, data, options);
}
} else {
if (typeof data === "string") {
await this.vault.create(path, data, options);
} else {
await this.vault.createBinary(path, data, options);
}
}
}
async write(path: string, data: string, options?: DataWriteOptions): Promise<void> {
return await this._write(path, data, options);
}
async writeBinary(path: string, data: ArrayBuffer, options?: DataWriteOptions): Promise<void> {
return await this._write(path, data, options);
}
async read(path: string): Promise<string> {
const file = this.vault.getAbstractFileByPath(path);
if (file instanceof TFile) return await this.vault.read(file);
throw new Error("File not found");
}
async readBinary(path: string): Promise<ArrayBuffer> {
const file = this.vault.getAbstractFileByPath(path);
if (file instanceof TFile) return await this.vault.readBinary(file);
throw new Error("File not found");
}
async exists(path: string): Promise<boolean> {
await Promise.resolve();
return this.vault.getAbstractFileByPath(path) !== null;
}
async mkdir(path: string): Promise<void> {
await this.vault.createFolder(path);
}
async remove(path: string): Promise<void> {
const file = this.vault.getAbstractFileByPath(path);
if (file) await this.vault.delete(file);
}
}
class Events {
_eventEmitter = new EventTarget();
_events = new Map<any, any>();
_eventTarget(cb: any) {
const x = this._events.get(cb);
if (x) {
return x;
}
const callback = (evt: any) => {
x(evt?.detail ?? undefined);
};
this._events.set(cb, callback);
return callback;
}
on(name: string, cb: any, ctx?: any) {
this._eventEmitter.addEventListener(name, this._eventTarget(cb));
}
trigger(name: string, args: any) {
const evt = new CustomEvent(name, {
detail: args,
});
this._eventEmitter.dispatchEvent(evt);
}
}
class Workspace extends Events {
getActiveFile() {
return null;
}
getMostRecentLeaf() {
return null;
}
onLayoutReady(cb: any) {
// cb();
// console.log("[Obsidian Mock] Workspace onLayoutReady registered");
// this._eventEmitter.addEventListener("layout-ready", () => {
// console.log("[Obsidian Mock] Workspace layout-ready event triggered");
setTimeout(() => {
cb();
}, 200);
// });
}
getLeavesOfType() {
return [];
}
getLeaf() {
return { setViewState: () => Promise.resolve(), revealLeaf: () => Promise.resolve() };
}
revealLeaf() {
return Promise.resolve();
}
containerEl: HTMLElement = document.createElement("div");
}
export class App {
vaultName: string = "MockVault";
constructor(vaultName?: string) {
if (vaultName) {
this.vaultName = vaultName;
}
this.vault = new Vault(this.vaultName);
}
vault: Vault;
workspace: Workspace = new Workspace();
metadataCache: any = {
on: (name: string, cb: any, ctx?: any) => {},
getFileCache: () => null,
};
}
export class Plugin {
app: App;
manifest: any;
settings: any;
commands: Map<string, any> = new Map();
constructor(app: App, manifest: any) {
this.app = app;
this.manifest = manifest;
}
async loadData(): Promise<any> {
await Promise.resolve();
return SettingCache.get(this.app) ?? {};
}
async saveData(data: any): Promise<void> {
await Promise.resolve();
SettingCache.set(this.app, data);
}
onload() {}
onunload() {}
addSettingTab(tab: any) {}
addCommand(command: any) {
this.commands.set(command.id, command);
}
addStatusBarItem() {
return {
setText: () => {},
setClass: () => {},
addClass: () => {},
};
}
addRibbonIcon() {
const icon = {
setAttribute: () => icon,
addClass: () => icon,
onclick: () => {},
};
return icon;
}
registerView(type: string, creator: any) {}
registerObsidianProtocolHandler(handler: any) {}
registerEvent(handler: any) {}
registerDomEvent(target: any, eventName: string, handler: any) {}
}
export class Notice {
constructor(message: string) {
console.log("Notice:", message);
}
}
export class Modal {
app: App;
contentEl: HTMLElement;
titleEl: HTMLElement;
constructor(app: App) {
this.app = app;
this.contentEl = document.createElement("div");
this.titleEl = document.createElement("div");
}
open() {
this.onOpen();
}
close() {
this.onClose();
}
onOpen() {}
onClose() {}
setPlaceholder(p: string) {}
setTitle(t: string) {}
}
export class PluginSettingTab {
app: App;
plugin: Plugin;
containerEl: HTMLElement;
constructor(app: App, plugin: Plugin) {
this.app = app;
this.plugin = plugin;
this.containerEl = document.createElement("div");
}
display() {}
}
export function normalizePath(path: string): string {
return path.replace(/\\/g, "/").replace(/\/+$/, "");
}
export const Platform = {
isDesktop: true,
isMobile: false,
};
export class Menu {
addItem(cb: (item: MenuItem) => any) {
cb(new MenuItem());
return this;
}
showAtMouseEvent(evt: MouseEvent) {}
}
export class MenuItem {
setTitle(title: string) {
return this;
}
setIcon(icon: string) {
return this;
}
onClick(cb: (evt: MouseEvent) => any) {
return this;
}
}
export class MenuSeparator {}
export class Component {
load() {}
unload() {}
}
export class ButtonComponent extends Component {
buttonEl: HTMLButtonElement = document.createElement("button");
setButtonText(text: string) {
return this;
}
setCta() {
return this;
}
onClick(cb: any) {
return this;
}
setClass(c: string) {
return this;
}
}
export class TextComponent extends Component {
inputEl: HTMLInputElement = document.createElement("input");
onChange(cb: any) {
return this;
}
setValue(v: string) {
return this;
}
}
export class ToggleComponent extends Component {
onChange(cb: any) {
return this;
}
setValue(v: boolean) {
return this;
}
}
export class DropdownComponent extends Component {
addOption(v: string, d: string) {
return this;
}
addOptions(o: any) {
return this;
}
onChange(cb: any) {
return this;
}
setValue(v: string) {
return this;
}
}
export class SliderComponent extends Component {
onChange(cb: any) {
return this;
}
setValue(v: number) {
return this;
}
}
export class Setting {
nameEl: HTMLElement;
descEl: HTMLElement;
controlEl: HTMLElement;
infoEl: HTMLElement;
constructor(containerEl: HTMLElement) {
this.nameEl = containerEl.createDiv();
this.descEl = containerEl.createDiv();
this.controlEl = containerEl.createDiv();
this.infoEl = containerEl.createDiv();
}
setName(name: string) {
return this;
}
setDesc(desc: string) {
return this;
}
setClass(c: string) {
return this;
}
addText(cb: (text: TextComponent) => any) {
cb(new TextComponent());
return this;
}
addToggle(cb: (toggle: ToggleComponent) => any) {
cb(new ToggleComponent());
return this;
}
addButton(cb: (btn: ButtonComponent) => any) {
cb(new ButtonComponent());
return this;
}
addDropdown(cb: (dropdown: DropdownComponent) => any) {
cb(new DropdownComponent());
return this;
}
addSlider(cb: (slider: SliderComponent) => any) {
cb(new SliderComponent());
return this;
}
}
// HTMLElement extensions
if (typeof HTMLElement !== "undefined") {
const proto = HTMLElement.prototype as any;
proto.createDiv = function (o?: any) {
const div = document.createElement("div");
if (o?.cls) div.addClass(o.cls);
if (o?.text) div.setText(o.text);
this.appendChild(div);
return div;
};
proto.createEl = function (tag: string, o?: any) {
const el = document.createElement(tag);
if (o?.cls) el.addClass(o.cls);
if (o?.text) el.setText(o.text);
this.appendChild(el);
return el;
};
proto.createSpan = function (o?: any) {
return this.createEl("span", o);
};
proto.empty = function () {
this.innerHTML = "";
};
proto.setText = function (t: string) {
this.textContent = t;
};
proto.addClass = function (c: string) {
this.classList.add(c);
};
proto.removeClass = function (c: string) {
this.classList.remove(c);
};
proto.toggleClass = function (c: string, b: boolean) {
this.classList.toggle(c, b);
};
proto.hasClass = function (c: string) {
return this.classList.contains(c);
};
}
export class Editor {}
export class FuzzySuggestModal<T> {
constructor(app: App) {}
setPlaceholder(p: string) {}
open() {}
close() {}
private __dummy(_: T): never {
throw new Error("Not implemented.");
}
}
export class MarkdownRenderer {
static render(app: App, md: string, el: HTMLElement, path: string, component: Component) {
el.innerHTML = md;
return Promise.resolve();
}
}
export class MarkdownView {}
export class TextAreaComponent extends Component {}
export class ItemView {}
export class WorkspaceLeaf {}
export function sanitizeHTMLToDom(html: string) {
const div = document.createElement("div");
div.innerHTML = html;
return div;
}
export function addIcon() {}
export const debounce = (fn: any) => fn;
export async function request(options: any) {
const result = await requestUrl(options);
return result.text;
}
export async function requestUrl({
body,
headers,
method,
url,
contentType,
}: RequestUrlParam): Promise<RequestUrlResponse> {
// console.log("[requestUrl] Mock called:", { method, url, contentType });
const reqHeadersObj: Record<string, string> = {};
for (const key of Object.keys(headers || {})) {
reqHeadersObj[key.toLowerCase()] = headers[key];
}
if (contentType) {
reqHeadersObj["content-type"] = contentType;
}
reqHeadersObj["Cache-Control"] = "no-cache, no-store, must-revalidate";
reqHeadersObj["Pragma"] = "no-cache";
reqHeadersObj["Expires"] = "0";
const result = await fetch(url, {
method: method,
headers: {
...reqHeadersObj,
},
body: body,
});
const headersObj: Record<string, string> = {};
result.headers.forEach((value, key) => {
headersObj[key] = value;
});
let json = undefined;
let text = undefined;
let arrayBuffer = undefined;
try {
const isJson = result.headers.get("content-type")?.includes("application/json");
arrayBuffer = await result.arrayBuffer();
const isText = result.headers.get("content-type")?.startsWith("text/");
if (isText || isJson) {
text = new TextDecoder().decode(arrayBuffer);
}
if (isJson) {
json = await JSON.parse(text || "{}");
}
} catch (e) {
console.warn("Failed to parse response:", e);
// ignore
}
return {
status: result.status,
headers: headersObj,
text: text,
json: json,
arrayBuffer: arrayBuffer,
};
}
export function stringifyYaml(obj: any) {
return JSON.stringify(obj);
}
export function parseYaml(s: string) {
return JSON.parse(s);
}
export function getLanguage() {
return "en";
}
export function setIcon(el: HTMLElement, icon: string) {}
export function arrayBufferToBase64(buffer: ArrayBuffer): string {
return btoa(String.fromCharCode(...new Uint8Array(buffer)));
}
export function base64ToArrayBuffer(base64: string): ArrayBuffer {
return Uint8Array.from(atob(base64), (c) => c.charCodeAt(0)).buffer;
}
export type DataWriteOptions = any;
export type PluginManifest = any;
export type RequestUrlParam = any;
export type RequestUrlResponse = any;
export type MarkdownFileInfo = any;
export type ListedFiles = {
files: string[];
folders: string[];
};

View File

@@ -0,0 +1,51 @@
export function interceptFetchForLogging() {
const originalFetch = globalThis.fetch;
globalThis.fetch = async (...params: any[]) => {
const paramObj = params[0];
const initObj = params[1];
const url = typeof paramObj === "string" ? paramObj : paramObj.url;
const method = initObj?.method || "GET";
const headers = initObj?.headers || {};
const body = initObj?.body || null;
const headersObj: Record<string, string> = {};
if (headers instanceof Headers) {
headers.forEach((value, key) => {
headersObj[key] = value;
});
}
console.dir({
mockedFetch: {
url,
method,
headers: headersObj,
},
});
try {
const res = await originalFetch(...params);
console.log(`[Obsidian Mock] Fetch response: ${res.status} ${res.statusText} for ${method} ${url}`);
const resClone = res.clone();
const contentType = resClone.headers.get("content-type") || "";
const isJson = contentType.includes("application/json");
if (isJson) {
const data = await resClone.json();
console.dir({ mockedFetchResponseJson: data });
} else {
const ab = await resClone.arrayBuffer();
const text = new TextDecoder().decode(ab);
const isText = /^text\//.test(contentType);
if (isText) {
console.dir({
mockedFetchResponseText: ab.byteLength < 1000 ? text : text.slice(0, 1000) + "...(truncated)",
});
} else {
console.log(`[Obsidian Mock] Fetch response is of content-type ${contentType}, not logging body.`);
}
}
return res;
} catch (e) {
// console.error("[Obsidian Mock] Fetch error:", e);
console.error(`[Obsidian Mock] Fetch failed for ${method} ${url}, error:`, e);
throw e;
}
};
}

View File

@@ -0,0 +1,33 @@
#!/bin/bash
if [[ -z "$hostname" ]]; then
echo "ERROR: Hostname missing"
exit 1
fi
if [[ -z "$username" ]]; then
echo "ERROR: Username missing"
exit 1
fi
if [[ -z "$password" ]]; then
echo "ERROR: Password missing"
exit 1
fi
if [[ -z "$node" ]]; then
echo "INFO: defaulting to _local"
node=_local
fi
echo "-- Configuring CouchDB by REST APIs... -->"
until (curl -X POST "${hostname}/_cluster_setup" -H "Content-Type: application/json" -d "{\"action\":\"enable_single_node\",\"username\":\"${username}\",\"password\":\"${password}\",\"bind_address\":\"0.0.0.0\",\"port\":5984,\"singlenode\":true}" --user "${username}:${password}"); do sleep 5; done
until (curl -X PUT "${hostname}/_node/${node}/_config/chttpd/require_valid_user" -H "Content-Type: application/json" -d '"true"' --user "${username}:${password}"); do sleep 5; done
until (curl -X PUT "${hostname}/_node/${node}/_config/chttpd_auth/require_valid_user" -H "Content-Type: application/json" -d '"true"' --user "${username}:${password}"); do sleep 5; done
until (curl -X PUT "${hostname}/_node/${node}/_config/httpd/WWW-Authenticate" -H "Content-Type: application/json" -d '"Basic realm=\"couchdb\""' --user "${username}:${password}"); do sleep 5; done
until (curl -X PUT "${hostname}/_node/${node}/_config/httpd/enable_cors" -H "Content-Type: application/json" -d '"true"' --user "${username}:${password}"); do sleep 5; done
until (curl -X PUT "${hostname}/_node/${node}/_config/chttpd/enable_cors" -H "Content-Type: application/json" -d '"true"' --user "${username}:${password}"); do sleep 5; done
until (curl -X PUT "${hostname}/_node/${node}/_config/chttpd/max_http_request_size" -H "Content-Type: application/json" -d '"4294967296"' --user "${username}:${password}"); do sleep 5; done
until (curl -X PUT "${hostname}/_node/${node}/_config/couchdb/max_document_size" -H "Content-Type: application/json" -d '"50000000"' --user "${username}:${password}"); do sleep 5; done
until (curl -X PUT "${hostname}/_node/${node}/_config/cors/credentials" -H "Content-Type: application/json" -d '"true"' --user "${username}:${password}"); do sleep 5; done
until (curl -X PUT "${hostname}/_node/${node}/_config/cors/origins" -H "Content-Type: application/json" -d '"*"' --user "${username}:${password}"); do sleep 5; done
echo "<-- Configuring CouchDB by REST APIs Done!"

View File

@@ -0,0 +1,3 @@
#!/bin/bash
set -e
docker run -d --name couchdb-test -p 5984:5984 -e COUCHDB_USER=$username -e COUCHDB_PASSWORD=$password couchdb:3.5.0

47
test/shell/minio-init.sh Normal file
View File

@@ -0,0 +1,47 @@
#!/bin/bash
set -e
cat >/tmp/mybucket-rw.json <<EOF
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": ["s3:GetBucketLocation","s3:ListBucket"],
"Resource": ["arn:aws:s3:::$bucketName"]
},
{
"Effect": "Allow",
"Action": ["s3:GetObject","s3:PutObject","s3:DeleteObject"],
"Resource": ["arn:aws:s3:::$bucketName/*"]
}
]
}
EOF
# echo "<CORSConfiguration>
# <CORSRule>
# <AllowedOrigin>http://localhost:63315</AllowedOrigin>
# <AllowedOrigin>http://localhost:63316</AllowedOrigin>
# <AllowedOrigin>http://localhost</AllowedOrigin>
# <AllowedMethod>GET</AllowedMethod>
# <AllowedMethod>PUT</AllowedMethod>
# <AllowedMethod>POST</AllowedMethod>
# <AllowedMethod>DELETE</AllowedMethod>
# <AllowedMethod>HEAD</AllowedMethod>
# <AllowedHeader>*</AllowedHeader>
# </CORSRule>
# </CORSConfiguration>" > /tmp/cors.xml
# docker run --rm --network host -v /tmp/mybucket-rw.json:/tmp/mybucket-rw.json --entrypoint=/bin/sh minio/mc -c "
# mc alias set myminio $minioEndpoint $username $password
# mc mb --ignore-existing myminio/$bucketName
# mc admin policy create myminio my-custom-policy /tmp/mybucket-rw.json
# echo 'Creating service account for user $username with access key $accessKey'
# mc admin user svcacct add --access-key '$accessKey' --secret-key '$secretKey' myminio '$username'
# mc admin policy attach myminio my-custom-policy --user '$accessKey'
# echo 'Verifying policy and user creation:'
# mc admin user svcacct info myminio '$accessKey'
# "
docker run --rm --network host -v /tmp/mybucket-rw.json:/tmp/mybucket-rw.json --entrypoint=/bin/sh minio/mc -c "
mc alias set myminio $minioEndpoint $accessKey $secretKey
mc mb --ignore-existing myminio/$bucketName
"

View File

@@ -0,0 +1,2 @@
#!/bin/bash
docker run -d --name minio-test -p 9000:9000 -p 9001:9001 -e MINIO_ROOT_USER=$accessKey -e MINIO_ROOT_PASSWORD=$secretKey -e MINIO_SERVER_URL=$minioEndpoint minio/minio server /data --console-address ':9001'

129
test/suite/db_common.ts Normal file
View File

@@ -0,0 +1,129 @@
import { compareMTime, EVEN } from "@/common/utils";
import { TFile, type DataWriteOptions } from "@/deps";
import type { FilePath } from "@/lib/src/common/types";
import { isDocContentSame, readContent } from "@/lib/src/common/utils";
import { waitForIdle, type LiveSyncHarness } from "../harness/harness";
import { expect } from "vitest";
export const defaultFileOption = {
mtime: new Date(2026, 0, 1, 0, 1, 2, 3).getTime(),
} as const satisfies DataWriteOptions;
export async function storeFile(
harness: LiveSyncHarness,
path: string,
content: string | Blob,
deleteBeforeSend = false,
fileOptions = defaultFileOption
) {
if (deleteBeforeSend && harness.app.vault.getAbstractFileByPath(path)) {
console.log(`Deleting existing file ${path}`);
await harness.app.vault.delete(harness.app.vault.getAbstractFileByPath(path) as TFile);
}
// Create file via vault
if (content instanceof Blob) {
console.log(`Creating binary file ${path}`);
await harness.app.vault.createBinary(path, await content.arrayBuffer(), fileOptions);
} else {
await harness.app.vault.create(path, content, fileOptions);
}
// Ensure file is created
const file = harness.app.vault.getAbstractFileByPath(path);
expect(file).toBeInstanceOf(TFile);
if (file instanceof TFile) {
expect(compareMTime(file.stat.mtime, fileOptions?.mtime ?? defaultFileOption.mtime)).toBe(EVEN);
if (content instanceof Blob) {
const readContent = await harness.app.vault.readBinary(file);
expect(await isDocContentSame(readContent, content)).toBe(true);
} else {
const readContent = await harness.app.vault.read(file);
expect(readContent).toBe(content);
}
}
await harness.plugin.services.fileProcessing.commitPendingFileEvents();
await waitForIdle(harness);
return file;
}
export async function readFromLocalDB(harness: LiveSyncHarness, path: string) {
const entry = await harness.plugin.localDatabase.getDBEntry(path as FilePath);
expect(entry).not.toBe(false);
return entry;
}
export async function readFromVault(
harness: LiveSyncHarness,
path: string,
isBinary: boolean = false,
fileOptions = defaultFileOption
): Promise<string | ArrayBuffer> {
const file = harness.app.vault.getAbstractFileByPath(path);
expect(file).toBeInstanceOf(TFile);
if (file instanceof TFile) {
// console.log(`MTime: ${file.stat.mtime}, Expected: ${fileOptions.mtime}`);
if (fileOptions.mtime !== undefined) {
expect(compareMTime(file.stat.mtime, fileOptions.mtime)).toBe(EVEN);
}
const content = isBinary ? await harness.app.vault.readBinary(file) : await harness.app.vault.read(file);
return content;
}
throw new Error("File not found in vault");
}
export async function checkStoredFileInDB(
harness: LiveSyncHarness,
path: string,
content: string | Blob,
fileOptions = defaultFileOption
) {
const entry = await readFromLocalDB(harness, path);
if (entry === false) {
throw new Error("DB Content not found");
}
const contentToCheck = content instanceof Blob ? await content.arrayBuffer() : content;
const isDocSame = await isDocContentSame(readContent(entry), contentToCheck);
if (fileOptions.mtime !== undefined) {
expect(compareMTime(entry.mtime, fileOptions.mtime)).toBe(EVEN);
}
expect(isDocSame).toBe(true);
return Promise.resolve();
}
export async function testFileWrite(
harness: LiveSyncHarness,
path: string,
content: string | Blob,
skipCheckToBeWritten = false,
fileOptions = defaultFileOption
) {
const file = await storeFile(harness, path, content, false, fileOptions);
expect(file).toBeInstanceOf(TFile);
await harness.plugin.services.fileProcessing.commitPendingFileEvents();
await waitForIdle(harness);
const vaultFile = await readFromVault(harness, path, content instanceof Blob, fileOptions);
expect(await isDocContentSame(vaultFile, content)).toBe(true);
await harness.plugin.services.fileProcessing.commitPendingFileEvents();
await waitForIdle(harness);
if (skipCheckToBeWritten) {
return Promise.resolve();
}
await checkStoredFileInDB(harness, path, content);
return Promise.resolve();
}
export async function testFileRead(
harness: LiveSyncHarness,
path: string,
expectedContent: string | Blob,
fileOptions = defaultFileOption
) {
await waitForIdle(harness);
const file = await readFromVault(harness, path, expectedContent instanceof Blob, fileOptions);
const isDocSame = await isDocContentSame(file, expectedContent);
expect(isDocSame).toBe(true);
// Check local database entry
const entry = await readFromLocalDB(harness, path);
expect(entry).not.toBe(false);
if (entry === false) {
throw new Error("DB Content not found");
}
const isDBDocSame = await isDocContentSame(readContent(entry), expectedContent);
expect(isDBDocSame).toBe(true);
return await Promise.resolve();
}

View File

@@ -0,0 +1,125 @@
import { beforeAll, describe, expect, it, test } from "vitest";
import { generateHarness, waitForIdle, waitForReady, type LiveSyncHarness } from "../harness/harness";
import { TFile } from "obsidian";
import { DEFAULT_SETTINGS, type FilePath, type ObsidianLiveSyncSettings } from "@/lib/src/common/types";
import { isDocContentSame, readContent } from "@/lib/src/common/utils";
import { DummyFileSourceInisialised, generateBinaryFile, generateFile, init } from "../utils/dummyfile";
const localdb_test_setting = {
...DEFAULT_SETTINGS,
isConfigured: true,
handleFilenameCaseSensitive: false,
} as ObsidianLiveSyncSettings;
describe("Plugin Integration Test (Local Database)", async () => {
let harness: LiveSyncHarness;
const vaultName = "TestVault" + Date.now();
beforeAll(async () => {
await DummyFileSourceInisialised;
harness = await generateHarness(vaultName, localdb_test_setting);
await waitForReady(harness);
});
it("should be instantiated and defined", async () => {
expect(harness.plugin).toBeDefined();
expect(harness.plugin.app).toBe(harness.app);
return await Promise.resolve();
});
it("should have services initialized", async () => {
expect(harness.plugin.services).toBeDefined();
return await Promise.resolve();
});
it("should have local database initialized", async () => {
expect(harness.plugin.localDatabase).toBeDefined();
expect(harness.plugin.localDatabase.isReady).toBe(true);
return await Promise.resolve();
});
it("should store the changes into the local database", async () => {
const path = "test-store6.md";
const content = "Hello, World!";
if (harness.app.vault.getAbstractFileByPath(path)) {
console.log(`Deleting existing file ${path}`);
await harness.app.vault.delete(harness.app.vault.getAbstractFileByPath(path) as TFile);
}
// Create file via vault
await harness.app.vault.create(path, content);
const file = harness.app.vault.getAbstractFileByPath(path);
expect(file).toBeInstanceOf(TFile);
if (file instanceof TFile) {
const readContent = await harness.app.vault.read(file);
expect(readContent).toBe(content);
}
await harness.plugin.services.fileProcessing.commitPendingFileEvents();
await waitForIdle(harness);
// await delay(100); // Wait a bit for the local database to process
const entry = await harness.plugin.localDatabase.getDBEntry(path as FilePath);
expect(entry).not.toBe(false);
if (entry) {
expect(readContent(entry)).toBe(content);
}
return await Promise.resolve();
});
test.each([10, 100, 1000, 10000, 50000, 100000])("should handle large file of size %i bytes", async (size) => {
const path = `test-large-file-${size}.md`;
const content = Array.from(generateFile(size)).join("");
if (harness.app.vault.getAbstractFileByPath(path)) {
console.log(`Deleting existing file ${path}`);
await harness.app.vault.delete(harness.app.vault.getAbstractFileByPath(path) as TFile);
}
// Create file via vault
await harness.app.vault.create(path, content);
const file = harness.app.vault.getAbstractFileByPath(path);
expect(file).toBeInstanceOf(TFile);
if (file instanceof TFile) {
const readContent = await harness.app.vault.read(file);
expect(readContent).toBe(content);
}
await harness.plugin.services.fileProcessing.commitPendingFileEvents();
await waitForIdle(harness);
const entry = await harness.plugin.localDatabase.getDBEntry(path as FilePath);
expect(entry).not.toBe(false);
if (entry) {
expect(readContent(entry)).toBe(content);
}
return await Promise.resolve();
});
const binaryMap = Array.from({ length: 7 }, (_, i) => Math.pow(2, i * 4));
test.each(binaryMap)("should handle binary file of size %i bytes", async (size) => {
const path = `test-binary-file-${size}.bin`;
const content = new Blob([...generateBinaryFile(size)], { type: "application/octet-stream" });
if (harness.app.vault.getAbstractFileByPath(path)) {
console.log(`Deleting existing file ${path}`);
await harness.app.vault.delete(harness.app.vault.getAbstractFileByPath(path) as TFile);
}
// Create file via vault
await harness.app.vault.createBinary(path, await content.arrayBuffer());
const file = harness.app.vault.getAbstractFileByPath(path);
expect(file).toBeInstanceOf(TFile);
if (file instanceof TFile) {
const readContent = await harness.app.vault.readBinary(file);
expect(await isDocContentSame(readContent, content)).toBe(true);
}
await harness.plugin.services.fileProcessing.commitPendingFileEvents();
await waitForIdle(harness);
const entry = await harness.plugin.localDatabase.getDBEntry(path as FilePath);
expect(entry).not.toBe(false);
if (entry) {
const entryContent = await readContent(entry);
if (!(entryContent instanceof ArrayBuffer)) {
throw new Error("Entry content is not an ArrayBuffer");
}
// const expectedContent = await content.arrayBuffer();
expect(await isDocContentSame(entryContent, content)).toBe(true);
}
return await Promise.resolve();
});
});

300
test/suite/sync.test.ts Normal file
View File

@@ -0,0 +1,300 @@
// Functional Test on Main Cases
// This test suite only covers main functional cases of synchronisation. Event handling, error cases,
// and edge, resolving conflicts, etc. will be covered in separate test suites.
import { beforeAll, describe, expect, it, test } from "vitest";
import { generateHarness, waitForIdle, waitForReady, type LiveSyncHarness } from "../harness/harness";
import {
DEFAULT_SETTINGS,
PREFERRED_JOURNAL_SYNC,
PREFERRED_SETTING_SELF_HOSTED,
RemoteTypes,
type FilePath,
type ObsidianLiveSyncSettings,
} from "@/lib/src/common/types";
import {
DummyFileSourceInisialised,
FILE_SIZE_BINS,
FILE_SIZE_MD,
generateBinaryFile,
generateFile,
} from "../utils/dummyfile";
import { checkStoredFileInDB, defaultFileOption, testFileRead, testFileWrite } from "./db_common";
import { delay } from "@/lib/src/common/utils";
const env = (import.meta as any).env;
const sync_test_setting_base = {
...DEFAULT_SETTINGS,
isConfigured: true,
handleFilenameCaseSensitive: false,
couchDB_URI: `${env.hostname}`,
couchDB_DBNAME: `${env.dbname}`,
couchDB_USER: `${env.username}`,
couchDB_PASSWORD: `${env.password}`,
bucket: `${env.bucketName}`,
region: "us-east-1",
endpoint: `${env.minioEndpoint}`,
accessKey: `${env.accessKey}`,
secretKey: `${env.secretKey}`,
useCustomRequestHandler: true,
forcePathStyle: true,
bucketPrefix: "",
} as ObsidianLiveSyncSettings;
function generateName(prefix: string, type: string, ext: string, size: number) {
return `${prefix}-${type}-file-${size}.${ext}`;
}
function* generateCase() {
const passpharse = "thetest-Passphrase3+9-for-e2ee!";
const REMOTE_RECOMMENDED = {
[RemoteTypes.REMOTE_COUCHDB]: PREFERRED_SETTING_SELF_HOSTED,
[RemoteTypes.REMOTE_MINIO]: PREFERRED_JOURNAL_SYNC,
};
for (const remoteType of [RemoteTypes.REMOTE_MINIO, RemoteTypes.REMOTE_COUCHDB]) {
for (const useE2EE of [false, true]) {
yield {
setting: {
...sync_test_setting_base,
...REMOTE_RECOMMENDED[remoteType],
remoteType,
encrypt: useE2EE,
passphrase: useE2EE ? passpharse : "",
usePathObfuscation: useE2EE,
} as ObsidianLiveSyncSettings,
};
}
}
}
const cases = Array.from(generateCase());
const fileOptions = defaultFileOption;
async function prepareRemote(harness: LiveSyncHarness, setting: ObsidianLiveSyncSettings, shouldReset = false) {
if (shouldReset) {
await delay(1000);
await harness.plugin.services.replicator.getActiveReplicator()?.tryResetRemoteDatabase(harness.plugin.settings);
} else {
await harness.plugin.services.replicator
.getActiveReplicator()
?.tryCreateRemoteDatabase(harness.plugin.settings);
}
await harness.plugin.services.replicator.getActiveReplicator()?.markRemoteResolved(harness.plugin.settings);
// No exceptions should be thrown
const status = await harness.plugin.services.replicator
.getActiveReplicator()
?.getRemoteStatus(harness.plugin.settings);
console.log("Remote status:", status);
expect(status).not.toBeFalsy();
}
describe("Replication Suite Tests", async () => {
describe.each(cases)("Replication Tests - Remote: $setting.remoteType, E2EE: $setting.encrypt", ({ setting }) => {
const nameFile = (type: string, ext: string, size: number) => generateName("sync-test", type, ext, size);
beforeAll(async () => {
await DummyFileSourceInisialised;
});
describe("Remote Database Initialization", async () => {
let harnessInit: LiveSyncHarness;
const sync_test_setting_init = {
...setting,
} as ObsidianLiveSyncSettings;
it("should initialize remote database", async () => {
const vaultName = "TestVault" + Date.now();
console.log(`BeforeEach - Remote Database Initialization - Vault: ${vaultName}`);
harnessInit = await generateHarness(vaultName, sync_test_setting_init);
await waitForReady(harnessInit);
expect(harnessInit.plugin).toBeDefined();
expect(harnessInit.plugin.app).toBe(harnessInit.app);
await waitForIdle(harnessInit);
});
it("should reset remote database", async () => {
// harnessInit = await generateHarness(vaultName, sync_test_setting_init);
await waitForReady(harnessInit);
await prepareRemote(harnessInit, sync_test_setting_init, true);
});
it("should be prepared for replication", async () => {
// harnessInit = await generateHarness(vaultName, sync_test_setting_init);
await waitForReady(harnessInit);
// await prepareRemote(harness, sync_test_setting_init, false);
const status = await harnessInit.plugin.services.replicator
.getActiveReplicator()
?.getRemoteStatus(sync_test_setting_init);
console.log("Connected devices after reset:", status);
expect(status).not.toBeFalsy();
});
});
describe("Replication - Upload", async () => {
let harnessUpload: LiveSyncHarness;
const sync_test_setting_upload = {
...setting,
} as ObsidianLiveSyncSettings;
it("Setup Upload Harness", async () => {
const vaultName = "TestVault" + Date.now();
console.log(`BeforeAll - Replication Upload - Vault: ${vaultName}`);
harnessUpload = await generateHarness(vaultName, sync_test_setting_upload);
await waitForReady(harnessUpload);
expect(harnessUpload.plugin).toBeDefined();
expect(harnessUpload.plugin.app).toBe(harnessUpload.app);
waitForIdle(harnessUpload);
});
it("should be instantiated and defined", async () => {
expect(harnessUpload.plugin).toBeDefined();
expect(harnessUpload.plugin.app).toBe(harnessUpload.app);
});
it("should have services initialized", async () => {
expect(harnessUpload.plugin.services).toBeDefined();
});
it("should have local database initialized", async () => {
expect(harnessUpload.plugin.localDatabase).toBeDefined();
expect(harnessUpload.plugin.localDatabase.isReady).toBe(true);
});
it("should prepare remote database", async () => {
await prepareRemote(harnessUpload, sync_test_setting_upload, false);
});
// describe("File Creation", async () => {
it("should store single file", async () => {
const content = "Hello, World!";
const path = nameFile("store", "md", 0);
await testFileWrite(harnessUpload, path, content, false, fileOptions);
// Perform replication
// await harness.plugin.services.replication.replicate(true);
});
it("should different content of several files are stored correctly", async () => {
await testFileWrite(harnessUpload, nameFile("test-diff-1", "md", 0), "Content A", false, fileOptions);
await testFileWrite(harnessUpload, nameFile("test-diff-2", "md", 0), "Content B", false, fileOptions);
await testFileWrite(harnessUpload, nameFile("test-diff-3", "md", 0), "Content C", false, fileOptions);
});
test.each(FILE_SIZE_MD)("should handle large file of size %i bytes", async (size) => {
const content = Array.from(generateFile(size)).join("");
const path = nameFile("large", "md", size);
const isTooLarge = harnessUpload.plugin.services.vault.isFileSizeTooLarge(size);
if (isTooLarge) {
console.log(`Skipping file of size ${size} bytes as it is too large to sync.`);
expect(true).toBe(true);
} else {
await testFileWrite(harnessUpload, path, content, false, fileOptions);
}
});
test.each(FILE_SIZE_BINS)("should handle binary file of size %i bytes", async (size) => {
// const isTooLarge = harness.plugin.services.vault.isFileSizeTooLarge(size);
const content = new Blob([...generateBinaryFile(size)], { type: "application/octet-stream" });
const path = nameFile("binary", "bin", size);
await testFileWrite(harnessUpload, path, content, true, fileOptions);
const isTooLarge = harnessUpload.plugin.services.vault.isFileSizeTooLarge(size);
if (isTooLarge) {
console.log(`Skipping file of size ${size} bytes as it is too large to sync.`);
expect(true).toBe(true);
} else {
await checkStoredFileInDB(harnessUpload, path, content, fileOptions);
}
});
// });
// Perform final replication after all tests
it("Replication after uploads", async () => {
await harnessUpload.plugin.services.replication.replicate(true);
await waitForIdle(harnessUpload);
// Ensure all files are uploaded
await harnessUpload.plugin.services.replication.replicate(true);
await waitForIdle(harnessUpload);
});
});
describe("Replication - Download", async () => {
let harnessDownload: LiveSyncHarness;
// Download into a new vault
const sync_test_setting_download = {
...setting,
} as ObsidianLiveSyncSettings;
it("should initialize remote database", async () => {
const vaultName = "TestVault" + Date.now();
harnessDownload = await generateHarness(vaultName, sync_test_setting_download);
await waitForReady(harnessDownload);
await prepareRemote(harnessDownload, sync_test_setting_download, false);
await harnessDownload.plugin.services.replication.replicate(true);
await waitForIdle(harnessDownload);
// Version info might be downloaded, and then replication will be interrupted,
await harnessDownload.plugin.services.replication.replicate(true); // Ensure all files are downloaded
await waitForIdle(harnessDownload);
});
it("should perform initial replication to download files", async () => {
await harnessDownload.plugin.services.replicator
.getActiveReplicator()
?.markRemoteResolved(sync_test_setting_download);
await harnessDownload.plugin.services.replication.replicate(true);
await waitForIdle(harnessDownload);
// Version info might be downloaded, and then replication will be interrupted,
await harnessDownload.plugin.services.replication.replicate(true); // Ensure all files are downloaded
await waitForIdle(harnessDownload);
});
it("should be instantiated and defined", async () => {
expect(harnessDownload.plugin).toBeDefined();
expect(harnessDownload.plugin.app).toBe(harnessDownload.app);
});
it("should have services initialized", async () => {
expect(harnessDownload.plugin.services).toBeDefined();
});
it("should have local database initialized", async () => {
expect(harnessDownload.plugin.localDatabase).toBeDefined();
expect(harnessDownload.plugin.localDatabase.isReady).toBe(true);
});
// describe("File Checking", async () => {
it("should retrieve the single file", async () => {
const expectedContent = "Hello, World!";
const path = nameFile("store", "md", 0);
await testFileRead(harnessDownload, path, expectedContent, fileOptions);
});
it("should retrieve different content of several files correctly", async () => {
await testFileRead(harnessDownload, nameFile("test-diff-1", "md", 0), "Content A", fileOptions);
await testFileRead(harnessDownload, nameFile("test-diff-2", "md", 0), "Content B", fileOptions);
await testFileRead(harnessDownload, nameFile("test-diff-3", "md", 0), "Content C", fileOptions);
});
test.each(FILE_SIZE_MD)("should retrieve the file %i bytes", async (size) => {
const content = Array.from(generateFile(size)).join("");
const path = nameFile("large", "md", size);
const isTooLarge = harnessDownload.plugin.services.vault.isFileSizeTooLarge(size);
if (isTooLarge) {
const entry = await harnessDownload.plugin.localDatabase.getDBEntry(path as FilePath);
console.log(`Skipping file of size ${size} bytes as it is too large to sync.`);
expect(entry).toBe(false);
} else {
await testFileRead(harnessDownload, path, content, fileOptions);
}
});
test.each(FILE_SIZE_BINS)("should handle binary file of size %i bytes", async (size) => {
const path = nameFile("binary", "bin", size);
const isTooLarge = harnessDownload.plugin.services.vault.isFileSizeTooLarge(size);
if (isTooLarge) {
const entry = await harnessDownload.plugin.localDatabase.getDBEntry(path as FilePath);
console.log(`Skipping file of size ${size} bytes as it is too large to sync.`);
expect(entry).toBe(false);
} else {
const content = new Blob([...generateBinaryFile(size)], { type: "application/octet-stream" });
await testFileRead(harnessDownload, path, content, fileOptions);
}
});
// });
});
it("Wait for idle state", async () => {
await delay(100);
});
});
});

View File

@@ -0,0 +1,51 @@
import { writeFile } from "../utils/fileapi.vite";
import { DummyFileSourceInisialised, generateBinaryFile, generateFile } from "../utils/dummyfile";
import { describe, expect, it } from "vitest";
describe("Test File Teet", async () => {
await DummyFileSourceInisialised;
it("should generate binary file correctly", async () => {
const size = 5000;
let generatedSize = 0;
const chunks: Uint8Array[] = [];
const generator = generateBinaryFile(size);
const blob = new Blob([...generator], { type: "application/octet-stream" });
const buf = await blob.arrayBuffer();
const hexDump = new Uint8Array(buf)
.toHex()
.match(/.{1,32}/g)
?.join("\n");
const secondDummy = generateBinaryFile(size);
const secondBlob = new Blob([...secondDummy], { type: "application/octet-stream" });
const secondBuf = await secondBlob.arrayBuffer();
const secondHexDump = new Uint8Array(secondBuf)
.toHex()
.match(/.{1,32}/g)
?.join("\n");
if (hexDump !== secondHexDump) {
throw new Error("Generated binary files do not match");
}
expect(hexDump).toBe(secondHexDump);
// await writeFile("test/testtest/dummyfile.test.bin", buf);
// await writeFile("test/testtest/dummyfile.test.bin.hexdump.txt", hexDump || "");
});
it("should generate text file correctly", async () => {
const size = 25000;
let generatedSize = 0;
let content = "";
const generator = generateFile(size);
const out = [...generator];
// const blob = new Blob(out, { type: "text/plain" });
content = out.join("");
const secondDummy = generateFile(size);
const secondOut = [...secondDummy];
const secondContent = secondOut.join("");
if (content !== secondContent) {
throw new Error("Generated text files do not match");
}
expect(content).toBe(secondContent);
// await writeFile("test/testtest/dummyfile.test.txt", await blob.text());
});
});

77
test/utils/dummyfile.ts Normal file
View File

@@ -0,0 +1,77 @@
import { DEFAULT_SETTINGS } from "@/lib/src/common/types.ts";
import { readFile } from "../utils/fileapi.vite.ts";
let charset = "";
export async function init() {
console.log("Initializing dummyfile utils...");
charset = (await readFile("test/utils/testcharvariants.txt")).toString();
console.log(`Loaded charset of length ${charset.length}`);
console.log(charset);
}
export const DummyFileSourceInisialised = init();
function* indexer(range: number = 1000, seed: number = 0): Generator<number, number, number> {
let t = seed | 0;
while (true) {
t = (t + 0x6d2b79f5) | 0;
let z = t;
z = Math.imul(z ^ (z >>> 15), z | 1);
z ^= z + Math.imul(z ^ (z >>> 7), z | 61);
const float = ((z ^ (z >>> 14)) >>> 0) / 4294967296;
yield Math.floor(float * range);
}
}
export function* generateFile(size: number): Generator<string> {
const chunkSourceStr = charset;
const chunkStore = [...chunkSourceStr]; // To support indexing avoiding multi-byte issues
const bufSize = 1024;
let buf = "";
let generated = 0;
const indexGen = indexer(chunkStore.length);
while (generated < size) {
const f = indexGen.next().value;
buf += chunkStore[f];
generated += 1;
if (buf.length >= bufSize) {
yield buf;
buf = "";
}
}
if (buf.length > 0) {
yield buf;
}
}
export function* generateBinaryFile(size: number): Generator<Uint8Array<ArrayBuffer>> {
let generated = 0;
const pattern = Array.from({ length: 256 }, (_, i) => i);
const indexGen = indexer(pattern.length);
const bufSize = 1024;
const buf = new Uint8Array(bufSize);
let bufIdx = 0;
while (generated < size) {
const f = indexGen.next().value;
buf[bufIdx] = pattern[f];
bufIdx += 1;
generated += 1;
if (bufIdx >= bufSize) {
yield buf;
bufIdx = 0;
}
}
if (bufIdx > 0) {
yield buf.subarray(0, bufIdx);
}
}
// File size for markdown test files (10B to 1MB, roughly logarithmic scale)
export const FILE_SIZE_MD = [10, 100, 1000, 10000, 100000, 1000000];
// File size for test files (10B to 40MB, roughly logarithmic scale)
export const FILE_SIZE_BINS = [
10,
100,
1000,
50000,
100000,
5000000,
DEFAULT_SETTINGS.syncMaxSizeInMB * 1024 * 1024 + 1,
];

View File

@@ -0,0 +1,3 @@
import { server } from "vitest/browser";
const { readFile, writeFile } = server.commands;
export { readFile, writeFile };

View File

@@ -0,0 +1,17 @@
國破山河在,城春草木深。
感時花濺淚,恨別鳥驚心。
烽火連三月,家書抵萬金。
白頭搔更短,渾欲不勝簪。
«Nel mezzo del cammin di nostra vita
mi ritrovai per una selva oscura,
ché la diritta via era smarrita.»
Духовной жаждою томим,
В пустыне мрачной я влачился, —
И шестикрылый серафим
На перепутье мне явился.
Shall I compare thee to a summers day?
Thou art more lovely and more temperate:
Rough winds do shake the darling buds of May,
And summers lease hath all too short a date:
📜🖋️ 🏺 🏛️ 春望𠮷ché🇷🇺АaRTLO🏳🌈👨👩👧👦lʼanatraアイウエオ

View File

@@ -23,6 +23,13 @@
"@lib/*": ["src/lib/src/*"]
}
},
"include": ["**/*.ts"],
"exclude": ["pouchdb-browser-webpack", "utils", "src/lib/apps", "**/*.test.ts"]
"include": ["**/*.ts", "test/**/*.test.ts"],
"exclude": [
"pouchdb-browser-webpack",
"utils",
"src/lib/apps",
"src/**/*.test.ts",
"**/_test/**",
"src/**/*.test.ts"
]
}

3
vite.config.ts Normal file
View File

@@ -0,0 +1,3 @@
import { defineConfig } from "vite";
export default defineConfig({});

161
vitest.config.ts Normal file
View File

@@ -0,0 +1,161 @@
import { defineConfig } from "vitest/config";
import { playwright } from "@vitest/browser-playwright";
import { svelte } from "@sveltejs/vite-plugin-svelte";
import { sveltePreprocess } from "svelte-preprocess";
import inlineWorkerPlugin from "esbuild-plugin-inline-worker";
import path from "path";
import { fileURLToPath } from "node:url";
import fs from "node:fs";
import dotenv from "dotenv";
import { platform } from "node:process";
const __dirname = path.dirname(fileURLToPath(import.meta.url));
const defEnv = dotenv.config({ path: ".env" }).parsed;
const testEnv = dotenv.config({ path: ".test.env" }).parsed;
const env = Object.assign({}, defEnv, testEnv);
const debuggerEnabled = env?.ENABLE_DEBUGGER === "true";
const headless = !debuggerEnabled && env?.HEADLESS !== "false";
const manifestJson = JSON.parse(fs.readFileSync("./manifest.json") + "");
const packageJson = JSON.parse(fs.readFileSync("./package.json") + "");
const updateInfo = JSON.stringify(fs.readFileSync("./updates.md") + "");
const prod = false;
const moduleAliasPlugin = {
name: "module-alias",
setup(build: any) {
build.onResolve({ filter: /.(dev)(.ts|)$/ }, (args: any) => {
// console.log(args.path);
if (prod) {
const prodTs = args.path.replace(".dev", ".prod");
const statFile = prodTs.endsWith(".ts") ? prodTs : prodTs + ".ts";
const realPath = path.join(args.resolveDir, statFile);
console.log(`Checking ${statFile}`);
if (fs.existsSync(realPath)) {
console.log(`Replaced ${args.path} with ${prodTs}`);
return {
path: realPath,
namespace: "file",
};
}
}
return null;
});
build.onResolve({ filter: /.(platform)(.ts|)$/ }, (args: any) => {
// console.log(args.path);
if (prod) {
const prodTs = args.path.replace(".platform", ".obsidian");
const statFile = prodTs.endsWith(".ts") ? prodTs : prodTs + ".ts";
const realPath = path.join(args.resolveDir, statFile);
console.log(`Checking ${statFile}`);
if (fs.existsSync(realPath)) {
console.log(`Replaced ${args.path} with ${prodTs}`);
return {
path: realPath,
namespace: "file",
};
}
}
return null;
});
},
};
const externals = [
"obsidian",
"electron",
"crypto",
"@codemirror/autocomplete",
"@codemirror/collab",
"@codemirror/commands",
"@codemirror/language",
"@codemirror/lint",
"@codemirror/search",
"@codemirror/state",
"@codemirror/view",
"@lezer/common",
"@lezer/highlight",
"@lezer/lr",
];
const define = {
MANIFEST_VERSION: `"${manifestJson.version}"`,
PACKAGE_VERSION: `"${packageJson.version}"`,
UPDATE_INFO: `${updateInfo}`,
global: "globalThis",
hostPlatform: `"${platform}"`,
};
export default defineConfig({
plugins: [
moduleAliasPlugin,
inlineWorkerPlugin({
external: externals,
treeShaking: true,
}),
svelte({
preprocess: sveltePreprocess(),
compilerOptions: { css: "injected", preserveComments: false },
}),
],
resolve: {
alias: {
obsidian: path.resolve(__dirname, "./test/harness/obsidian-mock.ts"),
"@": path.resolve(__dirname, "./src"),
"@lib": path.resolve(__dirname, "./src/lib/src"),
src: path.resolve(__dirname, "./src"),
},
},
esbuild: {
define: define,
target: "es2018",
platform: "browser",
},
// define,
server: {
headers: {
"Service-Worker-Allowed": "/",
},
},
test: {
env: env,
testTimeout: 10000,
fileParallelism: false,
isolate: true,
watch: false,
// environment: "browser",
include: ["test/**/*.test.ts"],
coverage: {
include: ["src/**/*.ts", "src/**/*.svelte"],
exclude: ["**/*.test.ts", "src/lib/**/*.test.ts"],
provider: "v8",
reporter: ["text", "json", "html"],
// ignoreEmptyLines: true,
},
browser: {
provider: playwright({
launchOptions: {
args: ["--js-flags=--expose-gc"],
chromiumSandbox: true,
},
}),
enabled: true,
screenshotFailures: false,
instances: [
{
execArgv: ["--js-flags=--expose-gc"],
browser: "chromium",
headless,
inspector: debuggerEnabled
? {
waitForDebugger: true,
enabled: true,
}
: undefined,
printConsoleTrace: true,
},
],
headless,
fileParallelism: false,
ui: debuggerEnabled ? true : false,
},
},
});