mirror of
https://github.com/vrtmrz/obsidian-livesync.git
synced 2026-01-20 04:05:28 +00:00
Tests:
- More tests have been added.
This commit is contained in:
@@ -6,4 +6,6 @@ password=testpassword
|
||||
accessKey=minioadmin
|
||||
secretKey=minioadmin
|
||||
bucketName=livesync-test-bucket
|
||||
# ENABLE_DEBUGGER=true
|
||||
# ENABLE_DEBUGGER=true
|
||||
# PRINT_LIVESYNC_LOGS=true
|
||||
# ENABLE_UI=true
|
||||
14
package.json
14
package.json
@@ -32,16 +32,20 @@
|
||||
"test:docker-couchdb:up": "npx dotenv-cli -e .env -e .test.env -- ./test/shell/couchdb-start.sh",
|
||||
"test:docker-couchdb:init": "npx dotenv-cli -e .env -e .test.env -- ./test/shell/couchdb-init.sh",
|
||||
"test:docker-couchdb:start": "npm run test:docker-couchdb:up && sleep 5 && npm run test:docker-couchdb:init",
|
||||
"test:docker-couchdb:down": "docker stop couchdb-test && docker rm couchdb-test",
|
||||
"test:docker-couchdb:down": "npx dotenv-cli -e .env -e .test.env -- ./test/shell/couchdb-stop.sh",
|
||||
"test:docker-couchdb:stop": "npm run test:docker-couchdb:down",
|
||||
"test:docker-s3:up": "npx dotenv-cli -e .env -e .test.env -- ./test/shell/minio-start.sh",
|
||||
"test:docker-s3:init": "npx dotenv-cli -e .env -e .test.env -- ./test/shell/minio-init.sh",
|
||||
"test:docker-s3:start": "npm run test:docker-s3:up && sleep 3 && npm run test:docker-s3:init",
|
||||
"test:docker-s3:down": "docker stop minio-test && docker rm minio-test",
|
||||
"test:docker-s3:down": "npx dotenv-cli -e .env -e .test.env -- ./test/shell/minio-stop.sh",
|
||||
"test:docker-s3:stop": "npm run test:docker-s3:down",
|
||||
"test:docker-all:up": "npm run test:docker-couchdb:up && npm run test:docker-s3:up",
|
||||
"test:docker-all:init": "npm run test:docker-couchdb:init && npm run test:docker-s3:init",
|
||||
"test:docker-all:down": "npm run test:docker-couchdb:down && npm run test:docker-s3:down",
|
||||
"test:docker-p2p:up": "npx dotenv-cli -e .env -e .test.env -- ./test/shell/p2p-start.sh",
|
||||
"test:docker-p2p:init": "npx dotenv-cli -e .env -e .test.env -- ./test/shell/p2p-init.sh",
|
||||
"test:docker-p2p:down": "npx dotenv-cli -e .env -e .test.env -- ./test/shell/p2p-stop.sh",
|
||||
"test:docker-p2p:stop": "npm run test:docker-p2p:down",
|
||||
"test:docker-all:up": "npm run test:docker-couchdb:up && npm run test:docker-s3:up && npm run test:docker-p2p:up",
|
||||
"test:docker-all:init": "npm run test:docker-couchdb:init && npm run test:docker-s3:init && npm run test:docker-p2p:init",
|
||||
"test:docker-all:down": "npm run test:docker-couchdb:down && npm run test:docker-s3:down && npm run test:docker-p2p:down",
|
||||
"test:docker-all:start": "npm run test:docker-all:up && sleep 5 && npm run test:docker-all:init",
|
||||
"test:docker-all:stop": "npm run test:docker-all:down",
|
||||
"test:full": "npm run test:docker-all:start && vitest run --coverage && npm run test:docker-all:stop"
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
import { App } from "obsidian";
|
||||
import ObsidianLiveSyncPlugin from "@/main";
|
||||
import { DEFAULT_SETTINGS, type ObsidianLiveSyncSettings } from "@/lib/src/common/types";
|
||||
import { LOG_LEVEL_VERBOSE, Logger, setGlobalLogFunction } from "@lib/common/logger";
|
||||
import { LOG_LEVEL_VERBOSE, setGlobalLogFunction } from "@lib/common/logger";
|
||||
import { SettingCache } from "./obsidian-mock";
|
||||
import { delay, promiseWithResolvers } from "octagonal-wheels/promises";
|
||||
import { EVENT_LAYOUT_READY, eventHub } from "@/common/events";
|
||||
import { EVENT_PLATFORM_UNLOADED } from "@/lib/src/PlatformAPIs/base/APIBase";
|
||||
import { serialized } from "octagonal-wheels/concurrency/lock_v2";
|
||||
import { env } from "../suite/variables";
|
||||
|
||||
export type LiveSyncHarness = {
|
||||
app: App;
|
||||
@@ -15,8 +15,12 @@ export type LiveSyncHarness = {
|
||||
disposalPromise: Promise<void>;
|
||||
isDisposed: () => boolean;
|
||||
};
|
||||
const isLiveSyncLogEnabled = env?.PRINT_LIVESYNC_LOGS === "true";
|
||||
function overrideLogFunction(vaultName: string) {
|
||||
setGlobalLogFunction((msg, level, key) => {
|
||||
if (!isLiveSyncLogEnabled) {
|
||||
return;
|
||||
}
|
||||
if (level && level < LOG_LEVEL_VERBOSE) {
|
||||
return;
|
||||
}
|
||||
@@ -71,11 +75,14 @@ export async function generateHarness(
|
||||
const plugin = new ObsidianLiveSyncPlugin(app, manifest);
|
||||
overrideLogFunction(vaultName);
|
||||
// Initial load
|
||||
await delay(100);
|
||||
await plugin.onload();
|
||||
let isDisposed = false;
|
||||
const waitPromise = promiseWithResolvers<void>();
|
||||
eventHub.once(EVENT_PLATFORM_UNLOADED, async () => {
|
||||
console.log(`Harness for vault '${vaultName}' disposed.`);
|
||||
await delay(100);
|
||||
eventHub.offAll();
|
||||
isDisposed = true;
|
||||
waitPromise.resolve();
|
||||
});
|
||||
@@ -121,7 +128,9 @@ export async function waitForIdle(harness: LiveSyncHarness): Promise<void> {
|
||||
harness.plugin.storageApplyingCount.value;
|
||||
|
||||
if (processing === 0) {
|
||||
console.log(`Idle after ${i} loops`);
|
||||
if (i > 0) {
|
||||
console.log(`Idle after ${i} loops`);
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -115,9 +115,9 @@ export class Vault {
|
||||
constructor(vaultName?: string) {
|
||||
if (vaultName) {
|
||||
this.vaultName = vaultName;
|
||||
this.files = new Map();
|
||||
this.contents = new Map();
|
||||
}
|
||||
this.files = new Map();
|
||||
this.contents = new Map();
|
||||
this.adapter = new DataAdapter(this);
|
||||
this.root = new TFolder(this, "", "", null);
|
||||
this.files.set("", this.root);
|
||||
@@ -220,6 +220,7 @@ export class Vault {
|
||||
file.stat.mtime = options?.mtime ?? Date.now();
|
||||
file.stat.ctime = options?.ctime ?? file.stat.ctime ?? Date.now();
|
||||
file.stat.size = typeof data === "string" ? data.length : data.byteLength;
|
||||
console.warn(`[Obsidian Mock ${this.vaultName}] Modified file at path: '${file.path}'`);
|
||||
this.files.set(file.path, file);
|
||||
this.trigger("modify", file);
|
||||
}
|
||||
@@ -284,7 +285,7 @@ export class Vault {
|
||||
}
|
||||
|
||||
getName(): string {
|
||||
return SettingCache.get(this) || "MockVault";
|
||||
return this.vaultName;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -489,22 +490,40 @@ export class Modal {
|
||||
app: App;
|
||||
contentEl: HTMLElement;
|
||||
titleEl: HTMLElement;
|
||||
modalEl: HTMLElement;
|
||||
isOpen: boolean = false;
|
||||
|
||||
constructor(app: App) {
|
||||
this.app = app;
|
||||
this.contentEl = document.createElement("div");
|
||||
this.contentEl.className = "modal-content";
|
||||
this.titleEl = document.createElement("div");
|
||||
this.titleEl.className = "modal-title";
|
||||
this.modalEl = document.createElement("div");
|
||||
this.modalEl.className = "modal";
|
||||
this.modalEl.style.display = "none";
|
||||
this.modalEl.appendChild(this.titleEl);
|
||||
this.modalEl.appendChild(this.contentEl);
|
||||
}
|
||||
open() {
|
||||
this.isOpen = true;
|
||||
this.modalEl.style.display = "block";
|
||||
if (!this.modalEl.parentElement) {
|
||||
document.body.appendChild(this.modalEl);
|
||||
}
|
||||
this.onOpen();
|
||||
}
|
||||
close() {
|
||||
this.isOpen = false;
|
||||
this.modalEl.style.display = "none";
|
||||
this.onClose();
|
||||
}
|
||||
onOpen() {}
|
||||
onClose() {}
|
||||
setPlaceholder(p: string) {}
|
||||
setTitle(t: string) {}
|
||||
setTitle(t: string) {
|
||||
this.titleEl.textContent = t;
|
||||
}
|
||||
}
|
||||
|
||||
export class PluginSettingTab {
|
||||
@@ -555,59 +574,210 @@ export class Component {
|
||||
|
||||
export class ButtonComponent extends Component {
|
||||
buttonEl: HTMLButtonElement = document.createElement("button");
|
||||
private clickHandler: ((evt: MouseEvent) => any) | null = null;
|
||||
|
||||
constructor() {
|
||||
super();
|
||||
this.buttonEl = document.createElement("button");
|
||||
this.buttonEl.type = "button";
|
||||
}
|
||||
|
||||
setButtonText(text: string) {
|
||||
this.buttonEl.textContent = text;
|
||||
return this;
|
||||
}
|
||||
|
||||
setCta() {
|
||||
this.buttonEl.classList.add("mod-cta");
|
||||
return this;
|
||||
}
|
||||
onClick(cb: any) {
|
||||
|
||||
onClick(cb: (evt: MouseEvent) => any) {
|
||||
this.clickHandler = cb;
|
||||
this.buttonEl.removeEventListener("click", this.clickHandler);
|
||||
this.buttonEl.addEventListener("click", (evt) => cb(evt as MouseEvent));
|
||||
return this;
|
||||
}
|
||||
|
||||
setClass(c: string) {
|
||||
this.buttonEl.classList.add(c);
|
||||
return this;
|
||||
}
|
||||
|
||||
setTooltip(tooltip: string) {
|
||||
this.buttonEl.title = tooltip;
|
||||
return this;
|
||||
}
|
||||
|
||||
setDisabled(disabled: boolean) {
|
||||
this.buttonEl.disabled = disabled;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
export class TextComponent extends Component {
|
||||
inputEl: HTMLInputElement = document.createElement("input");
|
||||
onChange(cb: any) {
|
||||
private changeHandler: ((value: string) => any) | null = null;
|
||||
|
||||
constructor() {
|
||||
super();
|
||||
this.inputEl = document.createElement("input");
|
||||
this.inputEl.type = "text";
|
||||
}
|
||||
|
||||
onChange(cb: (value: string) => any) {
|
||||
this.changeHandler = cb;
|
||||
this.inputEl.removeEventListener("change", this.handleChange);
|
||||
this.inputEl.addEventListener("change", this.handleChange);
|
||||
this.inputEl.addEventListener("input", (evt) => {
|
||||
const target = evt.target as HTMLInputElement;
|
||||
cb(target.value);
|
||||
});
|
||||
return this;
|
||||
}
|
||||
|
||||
private handleChange = (evt: Event) => {
|
||||
if (this.changeHandler) {
|
||||
const target = evt.target as HTMLInputElement;
|
||||
this.changeHandler(target.value);
|
||||
}
|
||||
};
|
||||
|
||||
setValue(v: string) {
|
||||
this.inputEl.value = v;
|
||||
return this;
|
||||
}
|
||||
|
||||
setPlaceholder(p: string) {
|
||||
this.inputEl.placeholder = p;
|
||||
return this;
|
||||
}
|
||||
|
||||
setDisabled(disabled: boolean) {
|
||||
this.inputEl.disabled = disabled;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
export class ToggleComponent extends Component {
|
||||
onChange(cb: any) {
|
||||
inputEl: HTMLInputElement = document.createElement("input");
|
||||
private changeHandler: ((value: boolean) => any) | null = null;
|
||||
|
||||
constructor() {
|
||||
super();
|
||||
this.inputEl = document.createElement("input");
|
||||
this.inputEl.type = "checkbox";
|
||||
}
|
||||
|
||||
onChange(cb: (value: boolean) => any) {
|
||||
this.changeHandler = cb;
|
||||
this.inputEl.addEventListener("change", (evt) => {
|
||||
const target = evt.target as HTMLInputElement;
|
||||
cb(target.checked);
|
||||
});
|
||||
return this;
|
||||
}
|
||||
|
||||
setValue(v: boolean) {
|
||||
this.inputEl.checked = v;
|
||||
return this;
|
||||
}
|
||||
|
||||
setDisabled(disabled: boolean) {
|
||||
this.inputEl.disabled = disabled;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
export class DropdownComponent extends Component {
|
||||
selectEl: HTMLSelectElement = document.createElement("select");
|
||||
private changeHandler: ((value: string) => any) | null = null;
|
||||
|
||||
constructor() {
|
||||
super();
|
||||
this.selectEl = document.createElement("select");
|
||||
}
|
||||
|
||||
addOption(v: string, d: string) {
|
||||
const option = document.createElement("option");
|
||||
option.value = v;
|
||||
option.textContent = d;
|
||||
this.selectEl.appendChild(option);
|
||||
return this;
|
||||
}
|
||||
addOptions(o: any) {
|
||||
|
||||
addOptions(o: Record<string, string>) {
|
||||
for (const [value, display] of Object.entries(o)) {
|
||||
this.addOption(value, display);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
onChange(cb: any) {
|
||||
|
||||
onChange(cb: (value: string) => any) {
|
||||
this.changeHandler = cb;
|
||||
this.selectEl.addEventListener("change", (evt) => {
|
||||
const target = evt.target as HTMLSelectElement;
|
||||
cb(target.value);
|
||||
});
|
||||
return this;
|
||||
}
|
||||
|
||||
setValue(v: string) {
|
||||
this.selectEl.value = v;
|
||||
return this;
|
||||
}
|
||||
|
||||
setDisabled(disabled: boolean) {
|
||||
this.selectEl.disabled = disabled;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
export class SliderComponent extends Component {
|
||||
onChange(cb: any) {
|
||||
inputEl: HTMLInputElement = document.createElement("input");
|
||||
private changeHandler: ((value: number) => any) | null = null;
|
||||
|
||||
constructor() {
|
||||
super();
|
||||
this.inputEl = document.createElement("input");
|
||||
this.inputEl.type = "range";
|
||||
}
|
||||
|
||||
onChange(cb: (value: number) => any) {
|
||||
this.changeHandler = cb;
|
||||
this.inputEl.addEventListener("change", (evt) => {
|
||||
const target = evt.target as HTMLInputElement;
|
||||
cb(parseFloat(target.value));
|
||||
});
|
||||
this.inputEl.addEventListener("input", (evt) => {
|
||||
const target = evt.target as HTMLInputElement;
|
||||
cb(parseFloat(target.value));
|
||||
});
|
||||
return this;
|
||||
}
|
||||
|
||||
setValue(v: number) {
|
||||
this.inputEl.value = String(v);
|
||||
return this;
|
||||
}
|
||||
|
||||
setMin(min: number) {
|
||||
this.inputEl.min = String(min);
|
||||
return this;
|
||||
}
|
||||
|
||||
setMax(max: number) {
|
||||
this.inputEl.max = String(max);
|
||||
return this;
|
||||
}
|
||||
|
||||
setStep(step: number) {
|
||||
this.inputEl.step = String(step);
|
||||
return this;
|
||||
}
|
||||
|
||||
setDisabled(disabled: boolean) {
|
||||
this.inputEl.disabled = disabled;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
@@ -625,32 +795,42 @@ export class Setting {
|
||||
this.infoEl = containerEl.createDiv();
|
||||
}
|
||||
setName(name: string) {
|
||||
this.nameEl.setText(name);
|
||||
return this;
|
||||
}
|
||||
setDesc(desc: string) {
|
||||
this.descEl.setText(desc);
|
||||
return this;
|
||||
}
|
||||
setClass(c: string) {
|
||||
this.controlEl.addClass(c);
|
||||
return this;
|
||||
}
|
||||
addText(cb: (text: TextComponent) => any) {
|
||||
cb(new TextComponent());
|
||||
const component = new TextComponent();
|
||||
this.controlEl.appendChild(component.inputEl);
|
||||
cb(component);
|
||||
return this;
|
||||
}
|
||||
addToggle(cb: (toggle: ToggleComponent) => any) {
|
||||
cb(new ToggleComponent());
|
||||
const component = new ToggleComponent();
|
||||
cb(component);
|
||||
return this;
|
||||
}
|
||||
addButton(cb: (btn: ButtonComponent) => any) {
|
||||
cb(new ButtonComponent());
|
||||
const btn = new ButtonComponent();
|
||||
this.controlEl.appendChild(btn.buttonEl);
|
||||
cb(btn);
|
||||
return this;
|
||||
}
|
||||
addDropdown(cb: (dropdown: DropdownComponent) => any) {
|
||||
cb(new DropdownComponent());
|
||||
const component = new DropdownComponent();
|
||||
cb(component);
|
||||
return this;
|
||||
}
|
||||
addSlider(cb: (slider: SliderComponent) => any) {
|
||||
cb(new SliderComponent());
|
||||
const component = new SliderComponent();
|
||||
cb(component);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,7 +21,7 @@ export function interceptFetchForLogging() {
|
||||
},
|
||||
});
|
||||
try {
|
||||
const res = await originalFetch(...params);
|
||||
const res = await originalFetch.apply(globalThis, params as any);
|
||||
console.log(`[Obsidian Mock] Fetch response: ${res.status} ${res.statusText} for ${method} ${url}`);
|
||||
const resClone = res.clone();
|
||||
const contentType = resClone.headers.get("content-type") || "";
|
||||
|
||||
145
test/lib/commands.ts
Normal file
145
test/lib/commands.ts
Normal file
@@ -0,0 +1,145 @@
|
||||
import type { P2PSyncSetting } from "@/lib/src/common/types";
|
||||
import { delay } from "octagonal-wheels/promises";
|
||||
import type { BrowserContext, Page } from "playwright";
|
||||
import type { Plugin } from "vitest/config";
|
||||
import type { BrowserCommand } from "vitest/node";
|
||||
import { serialized } from "octagonal-wheels/concurrency/lock";
|
||||
export const grantClipboardPermissions: BrowserCommand = async (ctx) => {
|
||||
if (ctx.provider.name === "playwright") {
|
||||
await ctx.context.grantPermissions(["clipboard-read", "clipboard-write"]);
|
||||
console.log("Granted clipboard permissions");
|
||||
return;
|
||||
}
|
||||
};
|
||||
let peerPage: Page | undefined;
|
||||
let peerPageContext: BrowserContext | undefined;
|
||||
let previousName = "";
|
||||
async function setValue(page: Page, selector: string, value: string) {
|
||||
const e = await page.waitForSelector(selector);
|
||||
await e.fill(value);
|
||||
}
|
||||
async function closePeerContexts() {
|
||||
const peerPageLocal = peerPage;
|
||||
const peerPageContextLocal = peerPageContext;
|
||||
if (peerPageLocal) {
|
||||
await peerPageLocal.close();
|
||||
}
|
||||
if (peerPageContextLocal) {
|
||||
await peerPageContextLocal.close();
|
||||
}
|
||||
}
|
||||
export const openWebPeer: BrowserCommand<[P2PSyncSetting, serverPeerName: string]> = async (
|
||||
ctx,
|
||||
setting: P2PSyncSetting,
|
||||
serverPeerName: string = "p2p-livesync-web-peer"
|
||||
) => {
|
||||
if (ctx.provider.name === "playwright") {
|
||||
const previousPage = ctx.page;
|
||||
if (peerPage !== undefined) {
|
||||
if (previousName === serverPeerName) {
|
||||
console.log(`WebPeer for ${serverPeerName} already opened`);
|
||||
return;
|
||||
}
|
||||
console.log(`Closing previous WebPeer for ${previousName}`);
|
||||
await closePeerContexts();
|
||||
}
|
||||
console.log(`Opening webPeer`);
|
||||
return serialized("webpeer", async () => {
|
||||
const browser = ctx.context.browser()!;
|
||||
const context = await browser.newContext();
|
||||
peerPageContext = context;
|
||||
peerPage = await context.newPage();
|
||||
previousName = serverPeerName;
|
||||
console.log(`Navigating...`);
|
||||
await peerPage.goto("http://localhost:8081");
|
||||
await peerPage.waitForLoadState();
|
||||
console.log(`Navigated!`);
|
||||
await setValue(peerPage, "#app > main [placeholder*=wss]", setting.P2P_relays);
|
||||
await setValue(peerPage, "#app > main [placeholder*=anything]", setting.P2P_roomID);
|
||||
await setValue(peerPage, "#app > main [placeholder*=password]", setting.P2P_passphrase);
|
||||
await setValue(peerPage, "#app > main [placeholder*=iphone]", serverPeerName);
|
||||
// await peerPage.getByTitle("Enable P2P Replicator").setChecked(true);
|
||||
await peerPage.getByRole("checkbox").first().setChecked(true);
|
||||
// (await peerPage.waitForSelector("Save and Apply")).click();
|
||||
await peerPage.getByText("Save and Apply").click();
|
||||
await delay(100);
|
||||
await peerPage.reload();
|
||||
await delay(500);
|
||||
for (let i = 0; i < 10; i++) {
|
||||
await delay(100);
|
||||
const btn = peerPage.getByRole("button").filter({ hasText: /^connect/i });
|
||||
if ((await peerPage.getByText(/disconnect/i).count()) > 0) {
|
||||
break;
|
||||
}
|
||||
await btn.click();
|
||||
}
|
||||
await previousPage.bringToFront();
|
||||
ctx.context.on("close", async () => {
|
||||
console.log("Browser context is closing, closing peer page if exists");
|
||||
await closePeerContexts();
|
||||
});
|
||||
console.log("Web peer page opened");
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export const closeWebPeer: BrowserCommand = async (ctx) => {
|
||||
if (ctx.provider.name === "playwright") {
|
||||
return serialized("webpeer", async () => {
|
||||
await closePeerContexts();
|
||||
peerPage = undefined;
|
||||
peerPageContext = undefined;
|
||||
previousName = "";
|
||||
console.log("Web peer page closed");
|
||||
});
|
||||
}
|
||||
};
|
||||
export const acceptWebPeer: BrowserCommand = async (ctx) => {
|
||||
if (peerPage) {
|
||||
// Detect dialogue
|
||||
const buttonsOnDialogs = await peerPage.$$("popup .buttons button");
|
||||
for (const b of buttonsOnDialogs) {
|
||||
const text = (await b.innerText()).toLowerCase();
|
||||
// console.log(`Dialog button found: ${text}`);
|
||||
if (text === "accept") {
|
||||
console.log("Accepting dialog");
|
||||
await b.click({ timeout: 300 });
|
||||
await delay(500);
|
||||
}
|
||||
}
|
||||
const buttons = peerPage.getByRole("button").filter({ hasText: /^accept$/i });
|
||||
const a = await buttons.all();
|
||||
for (const b of a) {
|
||||
await b.click({ timeout: 300 });
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
export default function BrowserCommands(): Plugin {
|
||||
return {
|
||||
name: "vitest:custom-commands",
|
||||
config() {
|
||||
return {
|
||||
test: {
|
||||
browser: {
|
||||
commands: {
|
||||
grantClipboardPermissions,
|
||||
openWebPeer,
|
||||
closeWebPeer,
|
||||
acceptWebPeer,
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
},
|
||||
};
|
||||
}
|
||||
declare module "vitest/browser" {
|
||||
interface BrowserCommands {
|
||||
grantClipboardPermissions: () => Promise<void>;
|
||||
openWebPeer: (setting: P2PSyncSetting, serverPeerName: string) => Promise<void>;
|
||||
closeWebPeer: () => Promise<void>;
|
||||
acceptWebPeer: () => Promise<boolean>;
|
||||
}
|
||||
}
|
||||
70
test/lib/ui.ts
Normal file
70
test/lib/ui.ts
Normal file
@@ -0,0 +1,70 @@
|
||||
import { page } from "vitest/browser";
|
||||
import { delay } from "@/lib/src/common/utils";
|
||||
|
||||
export async function waitForDialogShown(dialogText: string, timeout = 500) {
|
||||
const ttl = Date.now() + timeout;
|
||||
while (Date.now() < ttl) {
|
||||
try {
|
||||
await delay(50);
|
||||
const dialog = page
|
||||
.getByText(dialogText)
|
||||
.elements()
|
||||
.filter((e) => e.classList.contains("modal-title"))
|
||||
.filter((e) => e.checkVisibility());
|
||||
if (dialog.length === 0) {
|
||||
continue;
|
||||
}
|
||||
return true;
|
||||
} catch (e) {
|
||||
// Ignore
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
export async function waitForDialogHidden(dialogText: string | RegExp, timeout = 500) {
|
||||
const ttl = Date.now() + timeout;
|
||||
while (Date.now() < ttl) {
|
||||
try {
|
||||
await delay(50);
|
||||
const dialog = page
|
||||
.getByText(dialogText)
|
||||
.elements()
|
||||
.filter((e) => e.classList.contains("modal-title"))
|
||||
.filter((e) => e.checkVisibility());
|
||||
if (dialog.length > 0) {
|
||||
// console.log(`Still exist ${dialogText.toString()}`);
|
||||
continue;
|
||||
}
|
||||
return true;
|
||||
} catch (e) {
|
||||
// Ignore
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
export async function waitForButtonClick(buttonText: string | RegExp, timeout = 500) {
|
||||
const ttl = Date.now() + timeout;
|
||||
while (Date.now() < ttl) {
|
||||
try {
|
||||
await delay(100);
|
||||
const buttons = page
|
||||
.getByText(buttonText)
|
||||
.elements()
|
||||
.filter((e) => e.checkVisibility() && e.tagName.toLowerCase() == "button");
|
||||
if (buttons.length == 0) {
|
||||
// console.log(`Could not found ${buttonText.toString()}`);
|
||||
continue;
|
||||
}
|
||||
console.log(`Button detected: ${buttonText.toString()}`);
|
||||
// console.dir(buttons[0])
|
||||
await page.elementLocator(buttons[0]).click();
|
||||
await delay(100);
|
||||
return true;
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
// Ignore
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
21
test/lib/util.ts
Normal file
21
test/lib/util.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
import { delay } from "@/lib/src/common/utils";
|
||||
|
||||
export async function waitTaskWithFollowups<T>(
|
||||
task: Promise<T>,
|
||||
followup: () => Promise<void>,
|
||||
timeout: number = 10000,
|
||||
interval: number = 1000
|
||||
): Promise<T> {
|
||||
const symbolNotCompleted = Symbol("notCompleted");
|
||||
const isCompleted = () => Promise.race([task, Promise.resolve(symbolNotCompleted)]);
|
||||
const ttl = Date.now() + timeout;
|
||||
do {
|
||||
const state = await isCompleted();
|
||||
if (state !== symbolNotCompleted) {
|
||||
return state;
|
||||
}
|
||||
await followup();
|
||||
await delay(interval);
|
||||
} while (Date.now() < ttl);
|
||||
throw new Error("Task did not complete in time");
|
||||
}
|
||||
3
test/shell/couchdb-stop.sh
Normal file
3
test/shell/couchdb-stop.sh
Normal file
@@ -0,0 +1,3 @@
|
||||
#!/bin/bash
|
||||
docker stop couchdb-test
|
||||
docker rm couchdb-test
|
||||
3
test/shell/minio-stop.sh
Normal file
3
test/shell/minio-stop.sh
Normal file
@@ -0,0 +1,3 @@
|
||||
#!/bin/bash
|
||||
docker stop minio-test
|
||||
docker rm minio-test
|
||||
2
test/shell/p2p-init.sh
Normal file
2
test/shell/p2p-init.sh
Normal file
@@ -0,0 +1,2 @@
|
||||
#!/bin/bash
|
||||
echo "P2P Init - No additional initialization required."
|
||||
8
test/shell/p2p-start.sh
Normal file
8
test/shell/p2p-start.sh
Normal file
@@ -0,0 +1,8 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
script_dir=$(dirname "$0")
|
||||
webpeer_dir=$script_dir/../../src/lib/apps/webpeer
|
||||
|
||||
docker run -d --name relay-test -p 4000:8080 scsibug/nostr-rs-relay:latest
|
||||
npm run --prefix $webpeer_dir build
|
||||
docker run -d --name webpeer-test -p 8081:8043 -v $webpeer_dir/dist:/srv/http pierrezemb/gostatic
|
||||
5
test/shell/p2p-stop.sh
Normal file
5
test/shell/p2p-stop.sh
Normal file
@@ -0,0 +1,5 @@
|
||||
#!/bin/bash
|
||||
docker stop relay-test
|
||||
docker rm relay-test
|
||||
docker stop webpeer-test
|
||||
docker rm webpeer-test
|
||||
@@ -11,7 +11,7 @@ const localdb_test_setting = {
|
||||
handleFilenameCaseSensitive: false,
|
||||
} as ObsidianLiveSyncSettings;
|
||||
|
||||
describe("Plugin Integration Test (Local Database)", async () => {
|
||||
describe.skip("Plugin Integration Test (Local Database)", async () => {
|
||||
let harness: LiveSyncHarness;
|
||||
const vaultName = "TestVault" + Date.now();
|
||||
|
||||
|
||||
275
test/suite/sync.senario.basic.ts
Normal file
275
test/suite/sync.senario.basic.ts
Normal file
@@ -0,0 +1,275 @@
|
||||
// Functional Test on Main Cases
|
||||
// This test suite only covers main functional cases of synchronisation. Event handling, error cases,
|
||||
// and edge, resolving conflicts, etc. will be covered in separate test suites.
|
||||
import { afterAll, beforeAll, describe, expect, it, test } from "vitest";
|
||||
import { generateHarness, waitForIdle, waitForReady, type LiveSyncHarness } from "../harness/harness";
|
||||
import { RemoteTypes, type FilePath, type ObsidianLiveSyncSettings } from "@/lib/src/common/types";
|
||||
|
||||
import {
|
||||
DummyFileSourceInisialised,
|
||||
FILE_SIZE_BINS,
|
||||
FILE_SIZE_MD,
|
||||
generateBinaryFile,
|
||||
generateFile,
|
||||
} from "../utils/dummyfile";
|
||||
import { checkStoredFileInDB, testFileRead, testFileWrite } from "./db_common";
|
||||
import { delay } from "@/lib/src/common/utils";
|
||||
import { commands } from "vitest/browser";
|
||||
import { closeReplication, performReplication, prepareRemote } from "./sync_common";
|
||||
import type { DataWriteOptions } from "obsidian";
|
||||
|
||||
type MTimedDataWriteOptions = DataWriteOptions & { mtime: number };
|
||||
export type TestOptions = {
|
||||
setting: ObsidianLiveSyncSettings;
|
||||
fileOptions: MTimedDataWriteOptions;
|
||||
};
|
||||
function generateName(prefix: string, type: string, ext: string, size: number) {
|
||||
return `${prefix}-${type}-file-${size}.${ext}`;
|
||||
}
|
||||
export function syncBasicCase(label: string, { setting, fileOptions }: TestOptions) {
|
||||
describe("Replication Suite Tests - " + label, () => {
|
||||
const nameFile = (type: string, ext: string, size: number) => generateName("sync-test", type, ext, size);
|
||||
let serverPeerName = "";
|
||||
// TODO: Harness disposal may broke the event loop of P2P replication
|
||||
// so we keep the harnesses alive until all tests are done.
|
||||
// It may trystero's somethong, or not.
|
||||
let harnessUpload: LiveSyncHarness;
|
||||
let harnessDownload: LiveSyncHarness;
|
||||
beforeAll(async () => {
|
||||
await DummyFileSourceInisialised;
|
||||
if (setting.remoteType === RemoteTypes.REMOTE_P2P) {
|
||||
// await commands.closeWebPeer();
|
||||
serverPeerName = "t-" + Date.now();
|
||||
setting.P2P_AutoAcceptingPeers = serverPeerName;
|
||||
setting.P2P_AutoSyncPeers = serverPeerName;
|
||||
setting.P2P_DevicePeerName = "client-" + Date.now();
|
||||
await commands.openWebPeer(setting, serverPeerName);
|
||||
}
|
||||
});
|
||||
afterAll(async () => {
|
||||
if (setting.remoteType === RemoteTypes.REMOTE_P2P) {
|
||||
await commands.closeWebPeer();
|
||||
// await closeP2PReplicatorConnections(harnessUpload);
|
||||
}
|
||||
});
|
||||
|
||||
describe("Remote Database Initialization", () => {
|
||||
let harnessInit: LiveSyncHarness;
|
||||
const sync_test_setting_init = {
|
||||
...setting,
|
||||
} as ObsidianLiveSyncSettings;
|
||||
beforeAll(async () => {
|
||||
const vaultName = "TestVault" + Date.now();
|
||||
console.log(`BeforeAll - Remote Database Initialization - Vault: ${vaultName}`);
|
||||
harnessInit = await generateHarness(vaultName, sync_test_setting_init);
|
||||
await waitForReady(harnessInit);
|
||||
expect(harnessInit.plugin).toBeDefined();
|
||||
expect(harnessInit.plugin.app).toBe(harnessInit.app);
|
||||
await waitForIdle(harnessInit);
|
||||
});
|
||||
afterAll(async () => {
|
||||
await harnessInit.plugin.services.replicator.getActiveReplicator()?.closeReplication();
|
||||
await harnessInit.dispose();
|
||||
await delay(1000);
|
||||
});
|
||||
|
||||
it("should reset remote database", async () => {
|
||||
// harnessInit = await generateHarness(vaultName, sync_test_setting_init);
|
||||
await waitForReady(harnessInit);
|
||||
await prepareRemote(harnessInit, sync_test_setting_init, true);
|
||||
});
|
||||
it("should be prepared for replication", async () => {
|
||||
await waitForReady(harnessInit);
|
||||
if (setting.remoteType !== RemoteTypes.REMOTE_P2P) {
|
||||
const status = await harnessInit.plugin.services.replicator
|
||||
.getActiveReplicator()
|
||||
?.getRemoteStatus(sync_test_setting_init);
|
||||
console.log("Connected devices after reset:", status);
|
||||
expect(status).not.toBeFalsy();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("Replication - Upload", () => {
|
||||
const sync_test_setting_upload = {
|
||||
...setting,
|
||||
} as ObsidianLiveSyncSettings;
|
||||
|
||||
beforeAll(async () => {
|
||||
const vaultName = "TestVault" + Date.now();
|
||||
console.log(`BeforeAll - Replication Upload - Vault: ${vaultName}`);
|
||||
if (setting.remoteType === RemoteTypes.REMOTE_P2P) {
|
||||
sync_test_setting_upload.P2P_AutoAcceptingPeers = serverPeerName;
|
||||
sync_test_setting_upload.P2P_AutoSyncPeers = serverPeerName;
|
||||
sync_test_setting_upload.P2P_DevicePeerName = "up-" + Date.now();
|
||||
}
|
||||
harnessUpload = await generateHarness(vaultName, sync_test_setting_upload);
|
||||
await waitForReady(harnessUpload);
|
||||
expect(harnessUpload.plugin).toBeDefined();
|
||||
expect(harnessUpload.plugin.app).toBe(harnessUpload.app);
|
||||
await waitForIdle(harnessUpload);
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await closeReplication(harnessUpload);
|
||||
});
|
||||
|
||||
it("should be instantiated and defined", () => {
|
||||
expect(harnessUpload.plugin).toBeDefined();
|
||||
expect(harnessUpload.plugin.app).toBe(harnessUpload.app);
|
||||
});
|
||||
|
||||
it("should have services initialized", () => {
|
||||
expect(harnessUpload.plugin.services).toBeDefined();
|
||||
});
|
||||
|
||||
it("should have local database initialized", () => {
|
||||
expect(harnessUpload.plugin.localDatabase).toBeDefined();
|
||||
expect(harnessUpload.plugin.localDatabase.isReady).toBe(true);
|
||||
});
|
||||
|
||||
it("should prepare remote database", async () => {
|
||||
await prepareRemote(harnessUpload, sync_test_setting_upload, false);
|
||||
});
|
||||
|
||||
// describe("File Creation", async () => {
|
||||
it("should a file has been created", async () => {
|
||||
const content = "Hello, World!";
|
||||
const path = nameFile("store", "md", 0);
|
||||
await testFileWrite(harnessUpload, path, content, false, fileOptions);
|
||||
// Perform replication
|
||||
// await harness.plugin.services.replication.replicate(true);
|
||||
});
|
||||
it("should different content of several files have been created correctly", async () => {
|
||||
await testFileWrite(harnessUpload, nameFile("test-diff-1", "md", 0), "Content A", false, fileOptions);
|
||||
await testFileWrite(harnessUpload, nameFile("test-diff-2", "md", 0), "Content B", false, fileOptions);
|
||||
await testFileWrite(harnessUpload, nameFile("test-diff-3", "md", 0), "Content C", false, fileOptions);
|
||||
});
|
||||
|
||||
test.each(FILE_SIZE_MD)("should large file of size %i bytes has been created", async (size) => {
|
||||
const content = Array.from(generateFile(size)).join("");
|
||||
const path = nameFile("large", "md", size);
|
||||
const isTooLarge = harnessUpload.plugin.services.vault.isFileSizeTooLarge(size);
|
||||
if (isTooLarge) {
|
||||
console.log(`Skipping file of size ${size} bytes as it is too large to sync.`);
|
||||
expect(true).toBe(true);
|
||||
} else {
|
||||
await testFileWrite(harnessUpload, path, content, false, fileOptions);
|
||||
}
|
||||
});
|
||||
|
||||
test.each(FILE_SIZE_BINS)("should binary file of size %i bytes has been created", async (size) => {
|
||||
const content = new Blob([...generateBinaryFile(size)], { type: "application/octet-stream" });
|
||||
const path = nameFile("binary", "bin", size);
|
||||
await testFileWrite(harnessUpload, path, content, true, fileOptions);
|
||||
const isTooLarge = harnessUpload.plugin.services.vault.isFileSizeTooLarge(size);
|
||||
if (isTooLarge) {
|
||||
console.log(`Skipping file of size ${size} bytes as it is too large to sync.`);
|
||||
expect(true).toBe(true);
|
||||
} else {
|
||||
await checkStoredFileInDB(harnessUpload, path, content, fileOptions);
|
||||
}
|
||||
});
|
||||
|
||||
it("Replication after uploads", async () => {
|
||||
await performReplication(harnessUpload);
|
||||
await performReplication(harnessUpload);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Replication - Download", () => {
|
||||
// Download into a new vault
|
||||
const sync_test_setting_download = {
|
||||
...setting,
|
||||
} as ObsidianLiveSyncSettings;
|
||||
beforeAll(async () => {
|
||||
const vaultName = "TestVault" + Date.now();
|
||||
console.log(`BeforeAll - Replication Download - Vault: ${vaultName}`);
|
||||
if (setting.remoteType === RemoteTypes.REMOTE_P2P) {
|
||||
sync_test_setting_download.P2P_AutoAcceptingPeers = serverPeerName;
|
||||
sync_test_setting_download.P2P_AutoSyncPeers = serverPeerName;
|
||||
sync_test_setting_download.P2P_DevicePeerName = "down-" + Date.now();
|
||||
}
|
||||
harnessDownload = await generateHarness(vaultName, sync_test_setting_download);
|
||||
await waitForReady(harnessDownload);
|
||||
await prepareRemote(harnessDownload, sync_test_setting_download, false);
|
||||
|
||||
await performReplication(harnessDownload);
|
||||
await waitForIdle(harnessDownload);
|
||||
await delay(1000);
|
||||
await performReplication(harnessDownload);
|
||||
await waitForIdle(harnessDownload);
|
||||
});
|
||||
afterAll(async () => {
|
||||
await closeReplication(harnessDownload);
|
||||
});
|
||||
|
||||
it("should be instantiated and defined", () => {
|
||||
expect(harnessDownload.plugin).toBeDefined();
|
||||
expect(harnessDownload.plugin.app).toBe(harnessDownload.app);
|
||||
});
|
||||
|
||||
it("should have services initialized", () => {
|
||||
expect(harnessDownload.plugin.services).toBeDefined();
|
||||
});
|
||||
|
||||
it("should have local database initialized", () => {
|
||||
expect(harnessDownload.plugin.localDatabase).toBeDefined();
|
||||
expect(harnessDownload.plugin.localDatabase.isReady).toBe(true);
|
||||
});
|
||||
|
||||
it("should a file has been synchronised", async () => {
|
||||
const expectedContent = "Hello, World!";
|
||||
const path = nameFile("store", "md", 0);
|
||||
await testFileRead(harnessDownload, path, expectedContent, fileOptions);
|
||||
});
|
||||
it("should different content of several files have been synchronised", async () => {
|
||||
await testFileRead(harnessDownload, nameFile("test-diff-1", "md", 0), "Content A", fileOptions);
|
||||
await testFileRead(harnessDownload, nameFile("test-diff-2", "md", 0), "Content B", fileOptions);
|
||||
await testFileRead(harnessDownload, nameFile("test-diff-3", "md", 0), "Content C", fileOptions);
|
||||
});
|
||||
|
||||
test.each(FILE_SIZE_MD)("should the file %i bytes had been synchronised", async (size) => {
|
||||
const content = Array.from(generateFile(size)).join("");
|
||||
const path = nameFile("large", "md", size);
|
||||
const isTooLarge = harnessDownload.plugin.services.vault.isFileSizeTooLarge(size);
|
||||
if (isTooLarge) {
|
||||
const entry = await harnessDownload.plugin.localDatabase.getDBEntry(path as FilePath);
|
||||
console.log(`Skipping file of size ${size} bytes as it is too large to sync.`);
|
||||
expect(entry).toBe(false);
|
||||
} else {
|
||||
await testFileRead(harnessDownload, path, content, fileOptions);
|
||||
}
|
||||
});
|
||||
|
||||
test.each(FILE_SIZE_BINS)("should binary file of size %i bytes had been synchronised", async (size) => {
|
||||
const path = nameFile("binary", "bin", size);
|
||||
|
||||
const isTooLarge = harnessDownload.plugin.services.vault.isFileSizeTooLarge(size);
|
||||
if (isTooLarge) {
|
||||
const entry = await harnessDownload.plugin.localDatabase.getDBEntry(path as FilePath);
|
||||
console.log(`Skipping file of size ${size} bytes as it is too large to sync.`);
|
||||
expect(entry).toBe(false);
|
||||
} else {
|
||||
const content = new Blob([...generateBinaryFile(size)], { type: "application/octet-stream" });
|
||||
await testFileRead(harnessDownload, path, content, fileOptions);
|
||||
}
|
||||
});
|
||||
});
|
||||
afterAll(async () => {
|
||||
if (harnessDownload) {
|
||||
await closeReplication(harnessDownload);
|
||||
await harnessDownload.dispose();
|
||||
await delay(1000);
|
||||
}
|
||||
if (harnessUpload) {
|
||||
await closeReplication(harnessUpload);
|
||||
await harnessUpload.dispose();
|
||||
await delay(1000);
|
||||
}
|
||||
});
|
||||
it("Wait for idle state", async () => {
|
||||
await delay(100);
|
||||
});
|
||||
});
|
||||
}
|
||||
50
test/suite/sync.single.test.ts
Normal file
50
test/suite/sync.single.test.ts
Normal file
@@ -0,0 +1,50 @@
|
||||
// Functional Test on Main Cases
|
||||
// This test suite only covers main functional cases of synchronisation. Event handling, error cases,
|
||||
// and edge, resolving conflicts, etc. will be covered in separate test suites.
|
||||
import { describe } from "vitest";
|
||||
import {
|
||||
PREFERRED_JOURNAL_SYNC,
|
||||
PREFERRED_SETTING_SELF_HOSTED,
|
||||
RemoteTypes,
|
||||
type ObsidianLiveSyncSettings,
|
||||
} from "@/lib/src/common/types";
|
||||
|
||||
import { defaultFileOption } from "./db_common";
|
||||
import { syncBasicCase } from "./sync.senario.basic.ts";
|
||||
import { settingBase } from "./variables.ts";
|
||||
const sync_test_setting_base = settingBase;
|
||||
export const env = (import.meta as any).env;
|
||||
function* generateCase() {
|
||||
const passpharse = "thetest-Passphrase3+9-for-e2ee!";
|
||||
const REMOTE_RECOMMENDED = {
|
||||
[RemoteTypes.REMOTE_COUCHDB]: PREFERRED_SETTING_SELF_HOSTED,
|
||||
[RemoteTypes.REMOTE_MINIO]: PREFERRED_JOURNAL_SYNC,
|
||||
[RemoteTypes.REMOTE_P2P]: PREFERRED_SETTING_SELF_HOSTED,
|
||||
};
|
||||
const remoteTypes = [RemoteTypes.REMOTE_COUCHDB];
|
||||
// const remoteTypes = [RemoteTypes.REMOTE_P2P];
|
||||
const e2eeOptions = [false];
|
||||
// const e2eeOptions = [true];
|
||||
for (const remoteType of remoteTypes) {
|
||||
for (const useE2EE of e2eeOptions) {
|
||||
yield {
|
||||
setting: {
|
||||
...sync_test_setting_base,
|
||||
...REMOTE_RECOMMENDED[remoteType],
|
||||
remoteType,
|
||||
encrypt: useE2EE,
|
||||
passphrase: useE2EE ? passpharse : "",
|
||||
usePathObfuscation: useE2EE,
|
||||
} as ObsidianLiveSyncSettings,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
describe.skip("Replication Suite Tests (Single)", async () => {
|
||||
const cases = Array.from(generateCase());
|
||||
const fileOptions = defaultFileOption;
|
||||
describe.each(cases)("Replication Tests - Remote: $setting.remoteType, E2EE: $setting.encrypt", ({ setting }) => {
|
||||
syncBasicCase(`Remote: ${setting.remoteType}, E2EE: ${setting.encrypt}`, { setting, fileOptions });
|
||||
});
|
||||
});
|
||||
@@ -1,57 +1,32 @@
|
||||
// Functional Test on Main Cases
|
||||
// This test suite only covers main functional cases of synchronisation. Event handling, error cases,
|
||||
// and edge, resolving conflicts, etc. will be covered in separate test suites.
|
||||
import { beforeAll, describe, expect, it, test } from "vitest";
|
||||
import { generateHarness, waitForIdle, waitForReady, type LiveSyncHarness } from "../harness/harness";
|
||||
import { describe } from "vitest";
|
||||
import {
|
||||
DEFAULT_SETTINGS,
|
||||
PREFERRED_JOURNAL_SYNC,
|
||||
PREFERRED_SETTING_SELF_HOSTED,
|
||||
RemoteTypes,
|
||||
type FilePath,
|
||||
type ObsidianLiveSyncSettings,
|
||||
} from "@/lib/src/common/types";
|
||||
|
||||
import {
|
||||
DummyFileSourceInisialised,
|
||||
FILE_SIZE_BINS,
|
||||
FILE_SIZE_MD,
|
||||
generateBinaryFile,
|
||||
generateFile,
|
||||
} from "../utils/dummyfile";
|
||||
import { checkStoredFileInDB, defaultFileOption, testFileRead, testFileWrite } from "./db_common";
|
||||
import { delay } from "@/lib/src/common/utils";
|
||||
const env = (import.meta as any).env;
|
||||
const sync_test_setting_base = {
|
||||
...DEFAULT_SETTINGS,
|
||||
isConfigured: true,
|
||||
handleFilenameCaseSensitive: false,
|
||||
couchDB_URI: `${env.hostname}`,
|
||||
couchDB_DBNAME: `${env.dbname}`,
|
||||
couchDB_USER: `${env.username}`,
|
||||
couchDB_PASSWORD: `${env.password}`,
|
||||
bucket: `${env.bucketName}`,
|
||||
region: "us-east-1",
|
||||
endpoint: `${env.minioEndpoint}`,
|
||||
accessKey: `${env.accessKey}`,
|
||||
secretKey: `${env.secretKey}`,
|
||||
useCustomRequestHandler: true,
|
||||
forcePathStyle: true,
|
||||
bucketPrefix: "",
|
||||
} as ObsidianLiveSyncSettings;
|
||||
|
||||
function generateName(prefix: string, type: string, ext: string, size: number) {
|
||||
return `${prefix}-${type}-file-${size}.${ext}`;
|
||||
}
|
||||
|
||||
import { defaultFileOption } from "./db_common";
|
||||
import { syncBasicCase } from "./sync.senario.basic.ts";
|
||||
import { settingBase } from "./variables.ts";
|
||||
const sync_test_setting_base = settingBase;
|
||||
export const env = (import.meta as any).env;
|
||||
function* generateCase() {
|
||||
const passpharse = "thetest-Passphrase3+9-for-e2ee!";
|
||||
const REMOTE_RECOMMENDED = {
|
||||
[RemoteTypes.REMOTE_COUCHDB]: PREFERRED_SETTING_SELF_HOSTED,
|
||||
[RemoteTypes.REMOTE_MINIO]: PREFERRED_JOURNAL_SYNC,
|
||||
[RemoteTypes.REMOTE_P2P]: PREFERRED_SETTING_SELF_HOSTED,
|
||||
};
|
||||
for (const remoteType of [RemoteTypes.REMOTE_MINIO, RemoteTypes.REMOTE_COUCHDB]) {
|
||||
for (const useE2EE of [false, true]) {
|
||||
const remoteTypes = [RemoteTypes.REMOTE_COUCHDB, RemoteTypes.REMOTE_MINIO];
|
||||
// const remoteTypes = [RemoteTypes.REMOTE_P2P];
|
||||
const e2eeOptions = [false, true];
|
||||
// const e2eeOptions = [true];
|
||||
for (const remoteType of remoteTypes) {
|
||||
for (const useE2EE of e2eeOptions) {
|
||||
yield {
|
||||
setting: {
|
||||
...sync_test_setting_base,
|
||||
@@ -66,235 +41,10 @@ function* generateCase() {
|
||||
}
|
||||
}
|
||||
|
||||
const cases = Array.from(generateCase());
|
||||
const fileOptions = defaultFileOption;
|
||||
async function prepareRemote(harness: LiveSyncHarness, setting: ObsidianLiveSyncSettings, shouldReset = false) {
|
||||
if (shouldReset) {
|
||||
await delay(1000);
|
||||
await harness.plugin.services.replicator.getActiveReplicator()?.tryResetRemoteDatabase(harness.plugin.settings);
|
||||
} else {
|
||||
await harness.plugin.services.replicator
|
||||
.getActiveReplicator()
|
||||
?.tryCreateRemoteDatabase(harness.plugin.settings);
|
||||
}
|
||||
await harness.plugin.services.replicator.getActiveReplicator()?.markRemoteResolved(harness.plugin.settings);
|
||||
// No exceptions should be thrown
|
||||
const status = await harness.plugin.services.replicator
|
||||
.getActiveReplicator()
|
||||
?.getRemoteStatus(harness.plugin.settings);
|
||||
console.log("Remote status:", status);
|
||||
expect(status).not.toBeFalsy();
|
||||
}
|
||||
|
||||
describe("Replication Suite Tests", async () => {
|
||||
describe("Replication Suite Tests (Normal)", async () => {
|
||||
const cases = Array.from(generateCase());
|
||||
const fileOptions = defaultFileOption;
|
||||
describe.each(cases)("Replication Tests - Remote: $setting.remoteType, E2EE: $setting.encrypt", ({ setting }) => {
|
||||
const nameFile = (type: string, ext: string, size: number) => generateName("sync-test", type, ext, size);
|
||||
beforeAll(async () => {
|
||||
await DummyFileSourceInisialised;
|
||||
});
|
||||
|
||||
describe("Remote Database Initialization", async () => {
|
||||
let harnessInit: LiveSyncHarness;
|
||||
const sync_test_setting_init = {
|
||||
...setting,
|
||||
} as ObsidianLiveSyncSettings;
|
||||
|
||||
it("should initialize remote database", async () => {
|
||||
const vaultName = "TestVault" + Date.now();
|
||||
console.log(`BeforeEach - Remote Database Initialization - Vault: ${vaultName}`);
|
||||
harnessInit = await generateHarness(vaultName, sync_test_setting_init);
|
||||
await waitForReady(harnessInit);
|
||||
expect(harnessInit.plugin).toBeDefined();
|
||||
expect(harnessInit.plugin.app).toBe(harnessInit.app);
|
||||
await waitForIdle(harnessInit);
|
||||
});
|
||||
|
||||
it("should reset remote database", async () => {
|
||||
// harnessInit = await generateHarness(vaultName, sync_test_setting_init);
|
||||
await waitForReady(harnessInit);
|
||||
await prepareRemote(harnessInit, sync_test_setting_init, true);
|
||||
});
|
||||
it("should be prepared for replication", async () => {
|
||||
// harnessInit = await generateHarness(vaultName, sync_test_setting_init);
|
||||
await waitForReady(harnessInit);
|
||||
// await prepareRemote(harness, sync_test_setting_init, false);
|
||||
const status = await harnessInit.plugin.services.replicator
|
||||
.getActiveReplicator()
|
||||
?.getRemoteStatus(sync_test_setting_init);
|
||||
console.log("Connected devices after reset:", status);
|
||||
expect(status).not.toBeFalsy();
|
||||
});
|
||||
});
|
||||
|
||||
describe("Replication - Upload", async () => {
|
||||
let harnessUpload: LiveSyncHarness;
|
||||
|
||||
const sync_test_setting_upload = {
|
||||
...setting,
|
||||
} as ObsidianLiveSyncSettings;
|
||||
|
||||
it("Setup Upload Harness", async () => {
|
||||
const vaultName = "TestVault" + Date.now();
|
||||
console.log(`BeforeAll - Replication Upload - Vault: ${vaultName}`);
|
||||
harnessUpload = await generateHarness(vaultName, sync_test_setting_upload);
|
||||
await waitForReady(harnessUpload);
|
||||
expect(harnessUpload.plugin).toBeDefined();
|
||||
expect(harnessUpload.plugin.app).toBe(harnessUpload.app);
|
||||
waitForIdle(harnessUpload);
|
||||
});
|
||||
|
||||
it("should be instantiated and defined", async () => {
|
||||
expect(harnessUpload.plugin).toBeDefined();
|
||||
expect(harnessUpload.plugin.app).toBe(harnessUpload.app);
|
||||
});
|
||||
|
||||
it("should have services initialized", async () => {
|
||||
expect(harnessUpload.plugin.services).toBeDefined();
|
||||
});
|
||||
|
||||
it("should have local database initialized", async () => {
|
||||
expect(harnessUpload.plugin.localDatabase).toBeDefined();
|
||||
expect(harnessUpload.plugin.localDatabase.isReady).toBe(true);
|
||||
});
|
||||
|
||||
it("should prepare remote database", async () => {
|
||||
await prepareRemote(harnessUpload, sync_test_setting_upload, false);
|
||||
});
|
||||
|
||||
// describe("File Creation", async () => {
|
||||
it("should store single file", async () => {
|
||||
const content = "Hello, World!";
|
||||
const path = nameFile("store", "md", 0);
|
||||
await testFileWrite(harnessUpload, path, content, false, fileOptions);
|
||||
// Perform replication
|
||||
// await harness.plugin.services.replication.replicate(true);
|
||||
});
|
||||
it("should different content of several files are stored correctly", async () => {
|
||||
await testFileWrite(harnessUpload, nameFile("test-diff-1", "md", 0), "Content A", false, fileOptions);
|
||||
await testFileWrite(harnessUpload, nameFile("test-diff-2", "md", 0), "Content B", false, fileOptions);
|
||||
await testFileWrite(harnessUpload, nameFile("test-diff-3", "md", 0), "Content C", false, fileOptions);
|
||||
});
|
||||
|
||||
test.each(FILE_SIZE_MD)("should handle large file of size %i bytes", async (size) => {
|
||||
const content = Array.from(generateFile(size)).join("");
|
||||
const path = nameFile("large", "md", size);
|
||||
const isTooLarge = harnessUpload.plugin.services.vault.isFileSizeTooLarge(size);
|
||||
if (isTooLarge) {
|
||||
console.log(`Skipping file of size ${size} bytes as it is too large to sync.`);
|
||||
expect(true).toBe(true);
|
||||
} else {
|
||||
await testFileWrite(harnessUpload, path, content, false, fileOptions);
|
||||
}
|
||||
});
|
||||
|
||||
test.each(FILE_SIZE_BINS)("should handle binary file of size %i bytes", async (size) => {
|
||||
// const isTooLarge = harness.plugin.services.vault.isFileSizeTooLarge(size);
|
||||
const content = new Blob([...generateBinaryFile(size)], { type: "application/octet-stream" });
|
||||
const path = nameFile("binary", "bin", size);
|
||||
await testFileWrite(harnessUpload, path, content, true, fileOptions);
|
||||
const isTooLarge = harnessUpload.plugin.services.vault.isFileSizeTooLarge(size);
|
||||
if (isTooLarge) {
|
||||
console.log(`Skipping file of size ${size} bytes as it is too large to sync.`);
|
||||
expect(true).toBe(true);
|
||||
} else {
|
||||
await checkStoredFileInDB(harnessUpload, path, content, fileOptions);
|
||||
}
|
||||
});
|
||||
// });
|
||||
// Perform final replication after all tests
|
||||
it("Replication after uploads", async () => {
|
||||
await harnessUpload.plugin.services.replication.replicate(true);
|
||||
await waitForIdle(harnessUpload);
|
||||
// Ensure all files are uploaded
|
||||
await harnessUpload.plugin.services.replication.replicate(true);
|
||||
await waitForIdle(harnessUpload);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Replication - Download", async () => {
|
||||
let harnessDownload: LiveSyncHarness;
|
||||
// Download into a new vault
|
||||
const sync_test_setting_download = {
|
||||
...setting,
|
||||
} as ObsidianLiveSyncSettings;
|
||||
it("should initialize remote database", async () => {
|
||||
const vaultName = "TestVault" + Date.now();
|
||||
harnessDownload = await generateHarness(vaultName, sync_test_setting_download);
|
||||
await waitForReady(harnessDownload);
|
||||
await prepareRemote(harnessDownload, sync_test_setting_download, false);
|
||||
await harnessDownload.plugin.services.replication.replicate(true);
|
||||
await waitForIdle(harnessDownload);
|
||||
// Version info might be downloaded, and then replication will be interrupted,
|
||||
await harnessDownload.plugin.services.replication.replicate(true); // Ensure all files are downloaded
|
||||
await waitForIdle(harnessDownload);
|
||||
});
|
||||
|
||||
it("should perform initial replication to download files", async () => {
|
||||
await harnessDownload.plugin.services.replicator
|
||||
.getActiveReplicator()
|
||||
?.markRemoteResolved(sync_test_setting_download);
|
||||
await harnessDownload.plugin.services.replication.replicate(true);
|
||||
await waitForIdle(harnessDownload);
|
||||
// Version info might be downloaded, and then replication will be interrupted,
|
||||
await harnessDownload.plugin.services.replication.replicate(true); // Ensure all files are downloaded
|
||||
await waitForIdle(harnessDownload);
|
||||
});
|
||||
|
||||
it("should be instantiated and defined", async () => {
|
||||
expect(harnessDownload.plugin).toBeDefined();
|
||||
expect(harnessDownload.plugin.app).toBe(harnessDownload.app);
|
||||
});
|
||||
|
||||
it("should have services initialized", async () => {
|
||||
expect(harnessDownload.plugin.services).toBeDefined();
|
||||
});
|
||||
|
||||
it("should have local database initialized", async () => {
|
||||
expect(harnessDownload.plugin.localDatabase).toBeDefined();
|
||||
expect(harnessDownload.plugin.localDatabase.isReady).toBe(true);
|
||||
});
|
||||
// describe("File Checking", async () => {
|
||||
it("should retrieve the single file", async () => {
|
||||
const expectedContent = "Hello, World!";
|
||||
const path = nameFile("store", "md", 0);
|
||||
await testFileRead(harnessDownload, path, expectedContent, fileOptions);
|
||||
});
|
||||
it("should retrieve different content of several files correctly", async () => {
|
||||
await testFileRead(harnessDownload, nameFile("test-diff-1", "md", 0), "Content A", fileOptions);
|
||||
await testFileRead(harnessDownload, nameFile("test-diff-2", "md", 0), "Content B", fileOptions);
|
||||
await testFileRead(harnessDownload, nameFile("test-diff-3", "md", 0), "Content C", fileOptions);
|
||||
});
|
||||
|
||||
test.each(FILE_SIZE_MD)("should retrieve the file %i bytes", async (size) => {
|
||||
const content = Array.from(generateFile(size)).join("");
|
||||
const path = nameFile("large", "md", size);
|
||||
const isTooLarge = harnessDownload.plugin.services.vault.isFileSizeTooLarge(size);
|
||||
if (isTooLarge) {
|
||||
const entry = await harnessDownload.plugin.localDatabase.getDBEntry(path as FilePath);
|
||||
console.log(`Skipping file of size ${size} bytes as it is too large to sync.`);
|
||||
expect(entry).toBe(false);
|
||||
} else {
|
||||
await testFileRead(harnessDownload, path, content, fileOptions);
|
||||
}
|
||||
});
|
||||
|
||||
test.each(FILE_SIZE_BINS)("should handle binary file of size %i bytes", async (size) => {
|
||||
const path = nameFile("binary", "bin", size);
|
||||
|
||||
const isTooLarge = harnessDownload.plugin.services.vault.isFileSizeTooLarge(size);
|
||||
if (isTooLarge) {
|
||||
const entry = await harnessDownload.plugin.localDatabase.getDBEntry(path as FilePath);
|
||||
console.log(`Skipping file of size ${size} bytes as it is too large to sync.`);
|
||||
expect(entry).toBe(false);
|
||||
} else {
|
||||
const content = new Blob([...generateBinaryFile(size)], { type: "application/octet-stream" });
|
||||
await testFileRead(harnessDownload, path, content, fileOptions);
|
||||
}
|
||||
});
|
||||
// });
|
||||
});
|
||||
it("Wait for idle state", async () => {
|
||||
await delay(100);
|
||||
});
|
||||
syncBasicCase(`Remote: ${setting.remoteType}, E2EE: ${setting.encrypt}`, { setting, fileOptions });
|
||||
});
|
||||
});
|
||||
|
||||
117
test/suite/sync_common.ts
Normal file
117
test/suite/sync_common.ts
Normal file
@@ -0,0 +1,117 @@
|
||||
import { expect } from "vitest";
|
||||
import { waitForIdle, type LiveSyncHarness } from "../harness/harness";
|
||||
import { LOG_LEVEL_INFO, RemoteTypes, type ObsidianLiveSyncSettings } from "@/lib/src/common/types";
|
||||
|
||||
import { delay } from "@/lib/src/common/utils";
|
||||
import { commands } from "vitest/browser";
|
||||
import { LiveSyncTrysteroReplicator } from "@/lib/src/replication/trystero/LiveSyncTrysteroReplicator";
|
||||
import { waitTaskWithFollowups } from "../lib/util";
|
||||
async function waitForP2PPeers(harness: LiveSyncHarness) {
|
||||
if (harness.plugin.settings.remoteType === RemoteTypes.REMOTE_P2P) {
|
||||
// Wait for peers to connect
|
||||
const maxRetries = 10;
|
||||
let retries = maxRetries;
|
||||
const replicator = await harness.plugin.services.replicator.getActiveReplicator();
|
||||
if (!(replicator instanceof LiveSyncTrysteroReplicator)) {
|
||||
throw new Error("Replicator is not an instance of LiveSyncTrysteroReplicator");
|
||||
}
|
||||
const p2pReplicator = await replicator.getP2PConnection(LOG_LEVEL_INFO);
|
||||
if (!p2pReplicator) {
|
||||
throw new Error("P2P Replicator is not initialized");
|
||||
}
|
||||
while (retries-- > 0) {
|
||||
const peers = p2pReplicator.knownAdvertisements;
|
||||
|
||||
if (peers && peers.length > 0) {
|
||||
console.log("P2P peers connected:", peers);
|
||||
return;
|
||||
}
|
||||
await commands.acceptWebPeer();
|
||||
console.log(`Waiting for any P2P peers to be connected... ${maxRetries - retries}/${maxRetries}`);
|
||||
console.dir(peers);
|
||||
await delay(3000);
|
||||
await commands.acceptWebPeer();
|
||||
}
|
||||
console.log("Failed to connect P2P peers after retries");
|
||||
throw new Error("P2P peers did not connect in time.");
|
||||
}
|
||||
}
|
||||
export async function closeP2PReplicatorConnections(harness: LiveSyncHarness) {
|
||||
if (harness.plugin.settings.remoteType === RemoteTypes.REMOTE_P2P) {
|
||||
const replicator = await harness.plugin.services.replicator.getActiveReplicator();
|
||||
if (!(replicator instanceof LiveSyncTrysteroReplicator)) {
|
||||
throw new Error("Replicator is not an instance of LiveSyncTrysteroReplicator");
|
||||
}
|
||||
replicator.closeReplication();
|
||||
await delay(30);
|
||||
replicator.closeReplication();
|
||||
replicator.closeReplication();
|
||||
await delay(1000);
|
||||
console.log("P2P replicator connections closed");
|
||||
// if (replicator instanceof LiveSyncTrysteroReplicator) {
|
||||
// replicator.closeReplication();
|
||||
// await delay(1000);
|
||||
// }
|
||||
}
|
||||
}
|
||||
|
||||
export async function performReplication(harness: LiveSyncHarness) {
|
||||
await waitForP2PPeers(harness);
|
||||
await delay(500);
|
||||
const p = harness.plugin.services.replication.replicate(true);
|
||||
const task =
|
||||
harness.plugin.settings.remoteType === RemoteTypes.REMOTE_P2P
|
||||
? waitTaskWithFollowups(
|
||||
p,
|
||||
() => {
|
||||
// Accept any peer dialogs during replication (fire and forget)
|
||||
void commands.acceptWebPeer();
|
||||
return Promise.resolve();
|
||||
},
|
||||
30000,
|
||||
500
|
||||
)
|
||||
: p;
|
||||
const result = await task;
|
||||
await waitForIdle(harness);
|
||||
if (harness.plugin.settings.remoteType === RemoteTypes.REMOTE_P2P) {
|
||||
await closeP2PReplicatorConnections(harness);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
export async function closeReplication(harness: LiveSyncHarness) {
|
||||
if (harness.plugin.settings.remoteType === RemoteTypes.REMOTE_P2P) {
|
||||
return await closeP2PReplicatorConnections(harness);
|
||||
}
|
||||
const replicator = await harness.plugin.services.replicator.getActiveReplicator();
|
||||
if (!replicator) {
|
||||
console.log("No active replicator to close");
|
||||
return;
|
||||
}
|
||||
await replicator.closeReplication();
|
||||
await waitForIdle(harness);
|
||||
console.log("Replication closed");
|
||||
}
|
||||
|
||||
export async function prepareRemote(harness: LiveSyncHarness, setting: ObsidianLiveSyncSettings, shouldReset = false) {
|
||||
if (setting.remoteType !== RemoteTypes.REMOTE_P2P) {
|
||||
if (shouldReset) {
|
||||
await delay(1000);
|
||||
await harness.plugin.services.replicator
|
||||
.getActiveReplicator()
|
||||
?.tryResetRemoteDatabase(harness.plugin.settings);
|
||||
} else {
|
||||
await harness.plugin.services.replicator
|
||||
.getActiveReplicator()
|
||||
?.tryCreateRemoteDatabase(harness.plugin.settings);
|
||||
}
|
||||
await harness.plugin.services.replicator.getActiveReplicator()?.markRemoteResolved(harness.plugin.settings);
|
||||
// No exceptions should be thrown
|
||||
const status = await harness.plugin.services.replicator
|
||||
.getActiveReplicator()
|
||||
?.getRemoteStatus(harness.plugin.settings);
|
||||
console.log("Remote status:", status);
|
||||
expect(status).not.toBeFalsy();
|
||||
}
|
||||
}
|
||||
51
test/suite/syncp2p.test.ts
Normal file
51
test/suite/syncp2p.test.ts
Normal file
@@ -0,0 +1,51 @@
|
||||
// Functional Test on Main Cases
|
||||
// This test suite only covers main functional cases of synchronisation. Event handling, error cases,
|
||||
// and edge, resolving conflicts, etc. will be covered in separate test suites.
|
||||
import { describe } from "vitest";
|
||||
import {
|
||||
PREFERRED_JOURNAL_SYNC,
|
||||
PREFERRED_SETTING_SELF_HOSTED,
|
||||
RemoteTypes,
|
||||
type ObsidianLiveSyncSettings,
|
||||
} from "@/lib/src/common/types";
|
||||
|
||||
import { settingBase } from "./variables.ts";
|
||||
import { defaultFileOption } from "./db_common";
|
||||
import { syncBasicCase } from "./sync.senario.basic.ts";
|
||||
|
||||
export const env = (import.meta as any).env;
|
||||
function* generateCase() {
|
||||
const sync_test_setting_base = settingBase;
|
||||
const passpharse = "thetest-Passphrase3+9-for-e2ee!";
|
||||
const REMOTE_RECOMMENDED = {
|
||||
[RemoteTypes.REMOTE_COUCHDB]: PREFERRED_SETTING_SELF_HOSTED,
|
||||
[RemoteTypes.REMOTE_MINIO]: PREFERRED_JOURNAL_SYNC,
|
||||
[RemoteTypes.REMOTE_P2P]: PREFERRED_SETTING_SELF_HOSTED,
|
||||
};
|
||||
// const remoteTypes = [RemoteTypes.REMOTE_COUCHDB, RemoteTypes.REMOTE_MINIO, RemoteTypes.REMOTE_P2P];
|
||||
const remoteTypes = [RemoteTypes.REMOTE_P2P];
|
||||
// const e2eeOptions = [false, true];
|
||||
const e2eeOptions = [true];
|
||||
for (const remoteType of remoteTypes) {
|
||||
for (const useE2EE of e2eeOptions) {
|
||||
yield {
|
||||
setting: {
|
||||
...sync_test_setting_base,
|
||||
...REMOTE_RECOMMENDED[remoteType],
|
||||
remoteType,
|
||||
encrypt: useE2EE,
|
||||
passphrase: useE2EE ? passpharse : "",
|
||||
usePathObfuscation: useE2EE,
|
||||
} as ObsidianLiveSyncSettings,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
describe("Replication Suite Tests (P2P)", async () => {
|
||||
const cases = Array.from(generateCase());
|
||||
const fileOptions = defaultFileOption;
|
||||
describe.each(cases)("Replication Tests - Remote: $setting.remoteType, E2EE: $setting.encrypt", ({ setting }) => {
|
||||
syncBasicCase(`Remote: ${setting.remoteType}, E2EE: ${setting.encrypt}`, { setting, fileOptions });
|
||||
});
|
||||
});
|
||||
39
test/suite/variables.ts
Normal file
39
test/suite/variables.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
import { DoctorRegulation } from "@/lib/src/common/configForDoc";
|
||||
import {
|
||||
DEFAULT_SETTINGS,
|
||||
ChunkAlgorithms,
|
||||
AutoAccepting,
|
||||
type ObsidianLiveSyncSettings,
|
||||
} from "@/lib/src/common/types";
|
||||
export const env = (import.meta as any).env;
|
||||
export const settingBase = {
|
||||
...DEFAULT_SETTINGS,
|
||||
isConfigured: true,
|
||||
handleFilenameCaseSensitive: false,
|
||||
couchDB_URI: `${env.hostname}`,
|
||||
couchDB_DBNAME: `${env.dbname}`,
|
||||
couchDB_USER: `${env.username}`,
|
||||
couchDB_PASSWORD: `${env.password}`,
|
||||
bucket: `${env.bucketName}`,
|
||||
region: "us-east-1",
|
||||
endpoint: `${env.minioEndpoint}`,
|
||||
accessKey: `${env.accessKey}`,
|
||||
secretKey: `${env.secretKey}`,
|
||||
useCustomRequestHandler: true,
|
||||
forcePathStyle: true,
|
||||
bucketPrefix: "",
|
||||
usePluginSyncV2: true,
|
||||
chunkSplitterVersion: ChunkAlgorithms.RabinKarp,
|
||||
doctorProcessedVersion: DoctorRegulation.version,
|
||||
notifyThresholdOfRemoteStorageSize: 800,
|
||||
P2P_AutoAccepting: AutoAccepting.ALL,
|
||||
P2P_AutoBroadcast: true,
|
||||
P2P_AutoStart: true,
|
||||
P2P_Enabled: true,
|
||||
P2P_passphrase: "p2psync-test",
|
||||
P2P_roomID: "p2psync-test",
|
||||
P2P_DevicePeerName: "p2psync-test",
|
||||
P2P_relays: "ws://localhost:4000/",
|
||||
P2P_AutoAcceptingPeers: "p2p-livesync-web-peer",
|
||||
P2P_SyncOnReplication: "p2p-livesync-web-peer",
|
||||
} as ObsidianLiveSyncSettings;
|
||||
@@ -13,6 +13,7 @@ describe("Test File Teet", async () => {
|
||||
const blob = new Blob([...generator], { type: "application/octet-stream" });
|
||||
const buf = await blob.arrayBuffer();
|
||||
const hexDump = new Uint8Array(buf)
|
||||
//@ts-ignore
|
||||
.toHex()
|
||||
.match(/.{1,32}/g)
|
||||
?.join("\n");
|
||||
@@ -20,6 +21,7 @@ describe("Test File Teet", async () => {
|
||||
const secondBlob = new Blob([...secondDummy], { type: "application/octet-stream" });
|
||||
const secondBuf = await secondBlob.arrayBuffer();
|
||||
const secondHexDump = new Uint8Array(secondBuf)
|
||||
//@ts-ignore
|
||||
.toHex()
|
||||
.match(/.{1,32}/g)
|
||||
?.join("\n");
|
||||
|
||||
94
test/unit/dialog.test.ts
Normal file
94
test/unit/dialog.test.ts
Normal file
@@ -0,0 +1,94 @@
|
||||
// Dialog Unit Tests
|
||||
import { beforeAll, describe, expect, it } from "vitest";
|
||||
import { commands } from "vitest/browser";
|
||||
|
||||
import { generateHarness, waitForIdle, waitForReady, type LiveSyncHarness } from "../harness/harness";
|
||||
import { ChunkAlgorithms, DEFAULT_SETTINGS, type ObsidianLiveSyncSettings } from "@/lib/src/common/types";
|
||||
|
||||
import { DummyFileSourceInisialised } from "../utils/dummyfile";
|
||||
|
||||
import { page } from "vitest/browser";
|
||||
import { DoctorRegulation } from "@/lib/src/common/configForDoc";
|
||||
import { waitForDialogHidden, waitForDialogShown } from "../lib/ui";
|
||||
const env = (import.meta as any).env;
|
||||
const dialog_setting_base = {
|
||||
...DEFAULT_SETTINGS,
|
||||
isConfigured: true,
|
||||
handleFilenameCaseSensitive: false,
|
||||
couchDB_URI: `${env.hostname}`,
|
||||
couchDB_DBNAME: `${env.dbname}`,
|
||||
couchDB_USER: `${env.username}`,
|
||||
couchDB_PASSWORD: `${env.password}`,
|
||||
bucket: `${env.bucketName}`,
|
||||
region: "us-east-1",
|
||||
endpoint: `${env.minioEndpoint}`,
|
||||
accessKey: `${env.accessKey}`,
|
||||
secretKey: `${env.secretKey}`,
|
||||
useCustomRequestHandler: true,
|
||||
forcePathStyle: true,
|
||||
bucketPrefix: "",
|
||||
usePluginSyncV2: true,
|
||||
chunkSplitterVersion: ChunkAlgorithms.RabinKarp,
|
||||
doctorProcessedVersion: DoctorRegulation.version,
|
||||
notifyThresholdOfRemoteStorageSize: 800,
|
||||
} as ObsidianLiveSyncSettings;
|
||||
|
||||
function checkDialogVisibility(dialogText: string, shouldBeVisible: boolean): void {
|
||||
const dialog = page.getByText(dialogText);
|
||||
expect(dialog).toHaveClass(/modal-title/);
|
||||
if (!shouldBeVisible) {
|
||||
expect(dialog).not.toBeVisible();
|
||||
} else {
|
||||
expect(dialog).toBeVisible();
|
||||
}
|
||||
return;
|
||||
}
|
||||
function checkDialogShown(dialogText: string) {
|
||||
checkDialogVisibility(dialogText, true);
|
||||
}
|
||||
function checkDialogHidden(dialogText: string) {
|
||||
checkDialogVisibility(dialogText, false);
|
||||
}
|
||||
|
||||
describe("Dialog Tests", async () => {
|
||||
// describe.each(cases)("Replication Tests - Remote: $setting.remoteType, E2EE: $setting.encrypt", ({ setting }) => {
|
||||
const setting = dialog_setting_base;
|
||||
beforeAll(async () => {
|
||||
await DummyFileSourceInisialised;
|
||||
await commands.grantClipboardPermissions();
|
||||
});
|
||||
let harness: LiveSyncHarness;
|
||||
const vaultName = "TestVault" + Date.now();
|
||||
beforeAll(async () => {
|
||||
harness = await generateHarness(vaultName, setting);
|
||||
await waitForReady(harness);
|
||||
expect(harness.plugin).toBeDefined();
|
||||
expect(harness.plugin.app).toBe(harness.app);
|
||||
await waitForIdle(harness);
|
||||
});
|
||||
it("should show copy to clipboard dialog and confirm", async () => {
|
||||
const testString = "This is a test string to copy to clipboard.";
|
||||
const title = "Copy Test";
|
||||
const result = harness.plugin.services.UI.promptCopyToClipboard(title, testString);
|
||||
const isDialogShown = await waitForDialogShown(title, 500);
|
||||
expect(isDialogShown).toBe(true);
|
||||
const copyButton = page.getByText("📋");
|
||||
expect(copyButton).toBeDefined();
|
||||
expect(copyButton).toBeVisible();
|
||||
await copyButton.click();
|
||||
const copyResultButton = page.getByText("✔️");
|
||||
expect(copyResultButton).toBeDefined();
|
||||
expect(copyResultButton).toBeVisible();
|
||||
const clipboardText = await navigator.clipboard.readText();
|
||||
expect(clipboardText).toBe(testString);
|
||||
const okButton = page.getByText("OK");
|
||||
expect(okButton).toBeDefined();
|
||||
expect(okButton).toBeVisible();
|
||||
await okButton.click();
|
||||
const resultValue = await result;
|
||||
expect(resultValue).toBe(true);
|
||||
// Check that the dialog is closed
|
||||
const isDialogHidden = await waitForDialogHidden(title, 500);
|
||||
expect(isDialogHidden).toBe(true);
|
||||
});
|
||||
});
|
||||
@@ -9,13 +9,16 @@ import fs from "node:fs";
|
||||
import dotenv from "dotenv";
|
||||
import { platform } from "node:process";
|
||||
|
||||
import { acceptWebPeer, closeWebPeer, grantClipboardPermissions, openWebPeer } from "./test/lib/commands.ts";
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
|
||||
const defEnv = dotenv.config({ path: ".env" }).parsed;
|
||||
const testEnv = dotenv.config({ path: ".test.env" }).parsed;
|
||||
const env = Object.assign({}, defEnv, testEnv);
|
||||
const debuggerEnabled = env?.ENABLE_DEBUGGER === "true";
|
||||
const headless = !debuggerEnabled && env?.HEADLESS !== "false";
|
||||
const enableUI = env?.ENABLE_UI === "true";
|
||||
// const livesyncLogsEnabled = env?.PRINT_LIVESYNC_LOGS === "true";
|
||||
const headless = !debuggerEnabled && !enableUI;
|
||||
const manifestJson = JSON.parse(fs.readFileSync("./manifest.json") + "");
|
||||
const packageJson = JSON.parse(fs.readFileSync("./package.json") + "");
|
||||
const updateInfo = JSON.stringify(fs.readFileSync("./updates.md") + "");
|
||||
@@ -112,10 +115,12 @@ export default defineConfig({
|
||||
headers: {
|
||||
"Service-Worker-Allowed": "/",
|
||||
},
|
||||
port: 5173,
|
||||
},
|
||||
test: {
|
||||
env: env,
|
||||
testTimeout: 10000,
|
||||
testTimeout: 40000,
|
||||
hookTimeout: 50000,
|
||||
fileParallelism: false,
|
||||
isolate: true,
|
||||
watch: false,
|
||||
@@ -130,6 +135,13 @@ export default defineConfig({
|
||||
// ignoreEmptyLines: true,
|
||||
},
|
||||
browser: {
|
||||
isolate: true,
|
||||
commands: {
|
||||
grantClipboardPermissions,
|
||||
openWebPeer,
|
||||
closeWebPeer,
|
||||
acceptWebPeer,
|
||||
},
|
||||
provider: playwright({
|
||||
launchOptions: {
|
||||
args: ["--js-flags=--expose-gc"],
|
||||
@@ -143,19 +155,26 @@ export default defineConfig({
|
||||
execArgv: ["--js-flags=--expose-gc"],
|
||||
browser: "chromium",
|
||||
headless,
|
||||
|
||||
isolate: true,
|
||||
inspector: debuggerEnabled
|
||||
? {
|
||||
waitForDebugger: true,
|
||||
enabled: true,
|
||||
}
|
||||
: undefined,
|
||||
printConsoleTrace: true,
|
||||
printConsoleTrace: debuggerEnabled,
|
||||
onUnhandledError(error) {
|
||||
// Ignore certain errors
|
||||
const msg = error.message || "";
|
||||
if (msg.includes("Cannot create so many PeerConnections")) {
|
||||
return false;
|
||||
}
|
||||
},
|
||||
},
|
||||
],
|
||||
headless,
|
||||
fileParallelism: false,
|
||||
ui: debuggerEnabled ? true : false,
|
||||
ui: debuggerEnabled || enableUI ? true : false,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
Reference in New Issue
Block a user