Compare commits

...

18 Commits

Author SHA1 Message Date
vorotamoroz
2ff60dd5ac Add missed files 2026-03-18 12:20:52 +01:00
vorotamoroz
c3341da242 Fix english 2026-03-18 12:05:15 +01:00
vorotamoroz
c2bfaeb5a9 Fixed: wrong import 2026-03-18 12:03:51 +01:00
vorotamoroz
c454616e1c bump 2026-03-18 12:01:57 +01:00
vorotamoroz
c88e73b7d3 Add note 2026-03-18 11:55:50 +01:00
vorotamoroz
3a29818612 - Delete items which are no longer used that might cause potential problems
- Fix Some Imports
- Fix floating promises on tests
2026-03-18 11:54:22 +01:00
vorotamoroz
ee69085830 Fixed: Some buttons on the setting dialogue now respond correctly again (#827). 2026-03-18 11:51:52 +01:00
vorotamoroz
3963f7c971 Refactored: P2P replicator has been refactored to be a little roust and easier to understand. 2026-03-18 11:49:41 +01:00
vorotamoroz
602fcef949 - Fixed the issue where the detail level was not being applied in the log pane.
- Pop-ups are now shown.
- Add coverage for test.
- Pop-ups are now shown in the web app as well.
2026-03-18 11:48:31 +01:00
vorotamoroz
075d260fdd Fixed:
- Fixed the corrupted display of the help message.
- Remove some unnecessary codes.
2026-03-18 11:46:52 +01:00
vorotamoroz
0717093d81 update for npm ci 2026-03-17 20:09:28 +09:00
vorotamoroz
1f87a9fd3d port setupManager, setupProtocol to serviceFeature
remove styles on webapp UI, and add stylesheet
2026-03-17 19:58:12 +09:00
vorotamoroz
fdd3a3aecb Add: vaultSelector (webapp) 2026-03-17 19:51:04 +09:00
vorotamoroz
d8281390c4 bump 2026-03-17 10:34:29 +01:00
vorotamoroz
08b1712f39 bump 2026-03-16 00:49:54 +09:00
vorotamoroz
6c69547cef ### Fixed
- Fixed flaky timing issues in P2P synchronisation.
- Fixed more binary file handling issues in CLI.

### Tests

- Rewrite P2P end-to-end tests to use the CLI as host.
2026-03-16 00:48:22 +09:00
vorotamoroz
89bf0488c3 Refactor: More refactor P2P Replicator 2026-03-15 04:07:47 +09:00
vorotamoroz
653cf8dfbe Refactor: Refactor P2P Replicator 2026-03-15 03:33:03 +09:00
49 changed files with 4439 additions and 1013 deletions

View File

@@ -56,7 +56,7 @@ jobs:
if: ${{ inputs.testsuite == '' || inputs.testsuite == 'suitep2p/' }}
env:
CI: true
run: npm run test suitep2p/
run: npm run test:p2p
- name: Stop test services (CouchDB)
run: npm run test:docker-couchdb:stop
if: ${{ inputs.testsuite == '' || inputs.testsuite == 'suite/' }}

View File

@@ -1,7 +1,7 @@
{
"id": "obsidian-livesync",
"name": "Self-hosted LiveSync",
"version": "0.25.52-patched-2",
"version": "0.25.54",
"minAppVersion": "0.9.12",
"description": "Community implementation of self-hosted livesync. Reflect your vault changes to some other devices immediately. Please make sure to disable other synchronize solutions to avoid content corruption or duplication.",
"author": "vorotamoroz",

1248
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "obsidian-livesync",
"version": "0.25.52-patched-2",
"version": "0.25.54",
"description": "Reflect your vault changes to some other devices immediately. Please make sure to disable other synchronize solutions to avoid content corruption or duplication.",
"main": "main.js",
"type": "module",
@@ -53,7 +53,8 @@
"test:docker-all:down": "npm run test:docker-couchdb:down ; npm run test:docker-s3:down ; npm run test:docker-p2p:down",
"test:docker-all:start": "npm run test:docker-all:up && sleep 5 && npm run test:docker-all:init",
"test:docker-all:stop": "npm run test:docker-all:down",
"test:full": "npm run test:docker-all:start && vitest run --coverage && npm run test:docker-all:stop"
"test:full": "npm run test:docker-all:start && vitest run --coverage && npm run test:docker-all:stop",
"test:p2p": "bash test/suitep2p/run-p2p-tests.sh"
},
"keywords": [],
"author": "vorotamoroz",
@@ -67,6 +68,7 @@
"@tsconfig/svelte": "^5.0.8",
"@types/deno": "^2.5.0",
"@types/diff-match-patch": "^1.0.36",
"@types/markdown-it": "^14.1.2",
"@types/node": "^24.10.13",
"@types/pouchdb": "^6.4.2",
"@types/pouchdb-adapter-http": "^6.1.6",
@@ -118,6 +120,7 @@
"tsx": "^4.21.0",
"typescript": "5.9.3",
"vite": "^7.3.1",
"vite-plugin-istanbul": "^8.0.0",
"vitest": "^4.0.16",
"webdriverio": "^9.24.0",
"yaml": "^2.8.2"
@@ -133,6 +136,7 @@
"diff-match-patch": "^1.0.5",
"fflate": "^0.8.2",
"idb": "^8.0.3",
"markdown-it": "^14.1.1",
"minimatch": "^10.2.2",
"node-datachannel": "^0.32.1",
"octagonal-wheels": "^0.1.45",

View File

@@ -1,4 +1,5 @@
.livesync
test/*
!test/*.sh
node_modules
node_modules
.*.json

View File

@@ -1,8 +1,8 @@
import type { LiveSyncBaseCore } from "../../../LiveSyncBaseCore";
import { P2P_DEFAULT_SETTINGS, SETTING_KEY_P2P_DEVICE_NAME, type EntryDoc } from "@lib/common/types";
import { P2P_DEFAULT_SETTINGS } from "@lib/common/types";
import type { ServiceContext } from "@lib/services/base/ServiceBase";
import { TrysteroReplicator } from "@lib/replication/trystero/TrysteroReplicator";
import { LiveSyncTrysteroReplicator } from "@lib/replication/trystero/LiveSyncTrysteroReplicator";
import { addP2PEventHandlers } from "@lib/replication/trystero/addP2PEventHandlers";
type CLIP2PPeer = {
peerId: string;
name: string;
@@ -32,42 +32,14 @@ function validateP2PSettings(core: LiveSyncBaseCore<ServiceContext, any>) {
settings.P2P_IsHeadless = true;
}
async function createReplicator(core: LiveSyncBaseCore<ServiceContext, any>): Promise<TrysteroReplicator> {
function createReplicator(core: LiveSyncBaseCore<ServiceContext, any>): LiveSyncTrysteroReplicator {
validateP2PSettings(core);
const getSettings = () => core.services.setting.currentSettings();
const getDB = () => core.services.database.localDatabase.localDatabase;
const getSimpleStore = () => core.services.keyValueDB.openSimpleStore("p2p-sync");
const getDeviceName = () =>
core.services.config.getSmallConfig(SETTING_KEY_P2P_DEVICE_NAME) || core.services.vault.getVaultName();
const env = {
get settings() {
return getSettings();
},
get db() {
return getDB();
},
get simpleStore() {
return getSimpleStore();
},
get deviceName() {
return getDeviceName();
},
get platform() {
return core.services.API.getPlatform();
},
get confirm() {
return core.services.API.confirm;
},
processReplicatedDocs: async (docs: EntryDoc[]) => {
await core.services.replication.parseSynchroniseResult(docs as any);
},
};
return new TrysteroReplicator(env as any);
const replicator = new LiveSyncTrysteroReplicator({ services: core.services });
addP2PEventHandlers(replicator);
return replicator;
}
function getSortedPeers(replicator: TrysteroReplicator): CLIP2PPeer[] {
function getSortedPeers(replicator: LiveSyncTrysteroReplicator): CLIP2PPeer[] {
return [...replicator.knownAdvertisements]
.map((peer) => ({ peerId: peer.peerId, name: peer.name }))
.sort((a, b) => a.peerId.localeCompare(b.peerId));
@@ -77,7 +49,7 @@ export async function collectPeers(
core: LiveSyncBaseCore<ServiceContext, any>,
timeoutSec: number
): Promise<CLIP2PPeer[]> {
const replicator = await createReplicator(core);
const replicator = createReplicator(core);
await replicator.open();
try {
await delay(timeoutSec * 1000);
@@ -107,7 +79,7 @@ export async function syncWithPeer(
peerToken: string,
timeoutSec: number
): Promise<CLIP2PPeer> {
const replicator = await createReplicator(core);
const replicator = createReplicator(core);
await replicator.open();
try {
const timeoutMs = timeoutSec * 1000;
@@ -142,8 +114,8 @@ export async function syncWithPeer(
}
}
export async function openP2PHost(core: LiveSyncBaseCore<ServiceContext, any>): Promise<TrysteroReplicator> {
const replicator = await createReplicator(core);
export async function openP2PHost(core: LiveSyncBaseCore<ServiceContext, any>): Promise<LiveSyncTrysteroReplicator> {
const replicator = createReplicator(core);
await replicator.open();
return replicator;
}

View File

@@ -23,6 +23,7 @@ import * as fs from "fs/promises";
import * as path from "path";
import { NodeServiceContext, NodeServiceHub } from "./services/NodeServiceHub";
import { LiveSyncBaseCore } from "../../LiveSyncBaseCore";
import { ModuleReplicatorP2P } from "../../modules/core/ModuleReplicatorP2P";
import { initialiseServiceModulesCLI } from "./serviceModules/CLIServiceModules";
import { DEFAULT_SETTINGS, LOG_LEVEL_VERBOSE, type LOG_LEVEL, type ObsidianLiveSyncSettings } from "@lib/common/types";
import type { InjectableServiceHub } from "@lib/services/implements/injectable/InjectableServiceHub";
@@ -44,23 +45,6 @@ import { stripAllPrefixes } from "@lib/string_and_binary/path";
const SETTINGS_FILE = ".livesync/settings.json";
defaultLoggerEnv.minLogLevel = LOG_LEVEL_DEBUG;
// DI the log again.
// const recentLogEntries = reactiveSource<LogEntry[]>([]);
// const globalLogFunction = (message: any, level?: number, key?: string) => {
// const messageX =
// message instanceof Error
// ? new LiveSyncError("[Error Logged]: " + message.message, { cause: message })
// : message;
// const entry = { message: messageX, level, key } as LogEntry;
// recentLogEntries.value = [...recentLogEntries.value, entry];
// };
// setGlobalLogFunction((msg, level) => {
// console.error(`[${level}] ${typeof msg === "string" ? msg : JSON.stringify(msg)}`);
// if (msg instanceof Error) {
// console.error(msg);
// }
// });
function printHelp(): void {
console.log(`
Self-hosted LiveSync CLI
@@ -77,8 +61,8 @@ Commands:
p2p-sync <peer> <timeout>
Sync with the specified peer-id or peer-name
p2p-host Start P2P host mode and wait until interrupted
push <src> <dst> Push local file <src> into local database path <dst>
pull <src> <dst> Pull file <src> from local database into local file <dst>
push <src> <dst> Push local file <src> into local database path <dst>
pull <src> <dst> Pull file <src> from local database into local file <dst>
pull-rev <src> <dst> <rev> Pull file <src> at specific revision <rev> into local file <dst>
setup <setupURI> Apply setup URI to settings file
put <dst> Read UTF-8 content from stdin and write to local database path <dst>
@@ -89,12 +73,12 @@ Commands:
rm <path> Mark a file as deleted in local database
resolve <path> <rev> Resolve conflicts by keeping <rev> and deleting others
Examples:
livesync-cli ./my-database sync
livesync-cli ./my-database sync
livesync-cli ./my-database p2p-peers 5
livesync-cli ./my-database p2p-sync my-peer-name 15
livesync-cli ./my-database p2p-host
livesync-cli ./my-database --settings ./custom-settings.json push ./note.md folder/note.md
livesync-cli ./my-database pull folder/note.md ./exports/note.md
livesync-cli ./my-database --settings ./custom-settings.json push ./note.md folder/note.md
livesync-cli ./my-database pull folder/note.md ./exports/note.md
livesync-cli ./my-database pull-rev folder/note.md ./exports/note.old.md 3-abcdef
livesync-cli ./my-database setup "obsidian://setuplivesync?settings=..."
echo "Hello" | livesync-cli ./my-database put notes/hello.md
@@ -105,7 +89,7 @@ Examples:
livesync-cli ./my-database rm notes/hello.md
livesync-cli ./my-database resolve notes/hello.md 3-abcdef
livesync-cli init-settings ./data.json
livesync-cli ./my-database --verbose
livesync-cli ./my-database --verbose
`);
}
@@ -352,7 +336,10 @@ export async function main() {
(core: LiveSyncBaseCore<NodeServiceContext, any>, serviceHub: InjectableServiceHub<NodeServiceContext>) => {
return initialiseServiceModulesCLI(vaultPath, core, serviceHub);
},
() => [], // No extra modules
(core) => [
// No modules need to be registered for P2P replication in CLI. Directly using Replicators in p2p.ts
// new ModuleReplicatorP2P(core),
],
() => [], // No add-ons
(core) => {
// Add target filter to prevent internal files are handled

View File

@@ -19,9 +19,9 @@
"test:e2e:setup-put-cat": "bash test/test-setup-put-cat-linux.sh",
"test:e2e:sync-two-local": "bash test/test-sync-two-local-databases-linux.sh",
"test:e2e:p2p": "bash test/test-p2p-three-nodes-conflict-linux.sh",
"test:e2e:p2p-upload-download-repro": "bash test/test-p2p-upload-download-repro-linux.sh",
"test:e2e:p2p-host": "bash test/test-p2p-host-linux.sh",
"test:e2e:p2p-sync": "bash test/test-p2p-sync-linux.sh",
"test:e2e:p2p-peers:local-relay": "bash test/test-p2p-peers-local-relay.sh",
"test:e2e:mirror": "bash test/test-mirror-linux.sh",
"pretest:e2e:all": "npm run build",
"test:e2e:all": " export RUN_BUILD=0 && npm run test:e2e:setup-put-cat && npm run test:e2e:push-pull && npm run test:e2e:sync-two-local && npm run test:e2e:p2p && npm run test:e2e:mirror && npm run test:e2e:two-vaults && npm run test:e2e:p2p"

View File

View File

@@ -0,0 +1,228 @@
#!/usr/bin/env bash
set -euo pipefail
SCRIPT_DIR="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" && pwd)"
CLI_DIR="$(cd -- "$SCRIPT_DIR/.." && pwd)"
cd "$CLI_DIR"
source "$SCRIPT_DIR/test-helpers.sh"
display_test_info
RUN_BUILD="${RUN_BUILD:-1}"
KEEP_TEST_DATA="${KEEP_TEST_DATA:-1}"
VERBOSE_TEST_LOGGING="${VERBOSE_TEST_LOGGING:-0}"
RELAY="${RELAY:-ws://localhost:4000/}"
USE_INTERNAL_RELAY="${USE_INTERNAL_RELAY:-1}"
APP_ID="${APP_ID:-self-hosted-livesync-cli-tests}"
PEERS_TIMEOUT="${PEERS_TIMEOUT:-20}"
SYNC_TIMEOUT="${SYNC_TIMEOUT:-240}"
ROOM_ID="p2p-room-$(date +%s)-$RANDOM-$RANDOM"
PASSPHRASE="p2p-pass-$(date +%s)-$RANDOM-$RANDOM"
HOST_PEER_NAME="p2p-cli-host"
UPLOAD_PEER_NAME="p2p-cli-upload-$(date +%s)-$RANDOM"
DOWNLOAD_PEER_NAME="p2p-cli-download-$(date +%s)-$RANDOM"
cli_test_init_cli_cmd
if [[ "$RUN_BUILD" == "1" ]]; then
echo "[INFO] building CLI"
npm run build
fi
WORK_DIR="$(mktemp -d "${TMPDIR:-/tmp}/livesync-cli-p2p-upload-download.XXXXXX")"
VAULT_HOST="$WORK_DIR/vault-host"
VAULT_UP="$WORK_DIR/vault-up"
VAULT_DOWN="$WORK_DIR/vault-down"
SETTINGS_HOST="$WORK_DIR/settings-host.json"
SETTINGS_UP="$WORK_DIR/settings-up.json"
SETTINGS_DOWN="$WORK_DIR/settings-down.json"
HOST_LOG="$WORK_DIR/p2p-host.log"
mkdir -p "$VAULT_HOST" "$VAULT_UP" "$VAULT_DOWN"
cleanup() {
local exit_code=$?
if [[ -n "${HOST_PID:-}" ]] && kill -0 "$HOST_PID" >/dev/null 2>&1; then
kill -TERM "$HOST_PID" >/dev/null 2>&1 || true
wait "$HOST_PID" >/dev/null 2>&1 || true
fi
if [[ "${P2P_RELAY_STARTED:-0}" == "1" ]]; then
cli_test_stop_p2p_relay
fi
if [[ "$KEEP_TEST_DATA" != "1" ]]; then
rm -rf "$WORK_DIR"
else
echo "[INFO] KEEP_TEST_DATA=1, preserving artefacts at $WORK_DIR"
fi
exit "$exit_code"
}
trap cleanup EXIT
if [[ "$USE_INTERNAL_RELAY" == "1" ]]; then
if cli_test_is_local_p2p_relay "$RELAY"; then
cli_test_start_p2p_relay
P2P_RELAY_STARTED=1
else
echo "[INFO] USE_INTERNAL_RELAY=1 but RELAY is not local ($RELAY), skipping local relay startup"
fi
fi
run_cli_host() {
run_cli "$VAULT_HOST" --settings "$SETTINGS_HOST" "$@"
}
run_cli_up() {
run_cli "$VAULT_UP" --settings "$SETTINGS_UP" "$@"
}
run_cli_down() {
run_cli "$VAULT_DOWN" --settings "$SETTINGS_DOWN" "$@"
}
apply_p2p_test_tweaks() {
local settings_file="$1"
local device_name="$2"
SETTINGS_FILE="$settings_file" DEVICE_NAME="$device_name" PASSPHRASE_VAL="$PASSPHRASE" node <<'NODE'
const fs = require("node:fs");
const settingsPath = process.env.SETTINGS_FILE;
const data = JSON.parse(fs.readFileSync(settingsPath, "utf-8"));
data.remoteType = "ONLY_P2P";
data.encrypt = true;
data.passphrase = process.env.PASSPHRASE_VAL;
data.usePathObfuscation = true;
data.handleFilenameCaseSensitive = false;
data.customChunkSize = 50;
data.usePluginSyncV2 = true;
data.doNotUseFixedRevisionForChunks = false;
data.P2P_DevicePeerName = process.env.DEVICE_NAME;
data.isConfigured = true;
fs.writeFileSync(settingsPath, JSON.stringify(data, null, 2), "utf-8");
NODE
}
discover_peer_id() {
local side="$1"
local output
local peer_id
if [[ "$side" == "up" ]]; then
output="$(run_cli_up p2p-peers "$PEERS_TIMEOUT")"
else
output="$(run_cli_down p2p-peers "$PEERS_TIMEOUT")"
fi
peer_id="$(awk -F $'\t' 'NF>=3 && $1=="[peer]" {print $2; exit}' <<< "$output")"
if [[ -z "$peer_id" ]]; then
echo "[FAIL] ${side} could not discover any peer" >&2
echo "[FAIL] peers output:" >&2
echo "$output" >&2
return 1
fi
echo "$peer_id"
}
echo "[INFO] preparing settings"
echo "[INFO] relay=$RELAY room=$ROOM_ID app=$APP_ID"
cli_test_init_settings_file "$SETTINGS_HOST"
cli_test_init_settings_file "$SETTINGS_UP"
cli_test_init_settings_file "$SETTINGS_DOWN"
cli_test_apply_p2p_settings "$SETTINGS_HOST" "$ROOM_ID" "$PASSPHRASE" "$APP_ID" "$RELAY" "~.*"
cli_test_apply_p2p_settings "$SETTINGS_UP" "$ROOM_ID" "$PASSPHRASE" "$APP_ID" "$RELAY" "~.*"
cli_test_apply_p2p_settings "$SETTINGS_DOWN" "$ROOM_ID" "$PASSPHRASE" "$APP_ID" "$RELAY" "~.*"
apply_p2p_test_tweaks "$SETTINGS_HOST" "$HOST_PEER_NAME"
apply_p2p_test_tweaks "$SETTINGS_UP" "$UPLOAD_PEER_NAME"
apply_p2p_test_tweaks "$SETTINGS_DOWN" "$DOWNLOAD_PEER_NAME"
echo "[CASE] start p2p-host"
run_cli_host p2p-host >"$HOST_LOG" 2>&1 &
HOST_PID=$!
for _ in 1 2 3 4 5 6 7 8 9 10 11 12; do
if grep -Fq "P2P host is running" "$HOST_LOG"; then
break
fi
sleep 1
done
if ! grep -Fq "P2P host is running" "$HOST_LOG"; then
echo "[FAIL] p2p-host did not become ready" >&2
cat "$HOST_LOG" >&2
exit 1
fi
echo "[PASS] p2p-host started"
echo "[CASE] upload peer discovers host"
HOST_PEER_ID_FOR_UP="$(discover_peer_id up)"
echo "[PASS] upload peer discovered host: $HOST_PEER_ID_FOR_UP"
echo "[CASE] upload phase writes source files"
STORE_TEXT="$WORK_DIR/store-file.md"
DIFF_A_TEXT="$WORK_DIR/test-diff-1.md"
DIFF_B_TEXT="$WORK_DIR/test-diff-2.md"
DIFF_C_TEXT="$WORK_DIR/test-diff-3.md"
printf 'Hello, World!\n' > "$STORE_TEXT"
printf 'Content A\n' > "$DIFF_A_TEXT"
printf 'Content B\n' > "$DIFF_B_TEXT"
printf 'Content C\n' > "$DIFF_C_TEXT"
run_cli_up push "$STORE_TEXT" p2p/store-file.md >/dev/null
run_cli_up push "$DIFF_A_TEXT" p2p/test-diff-1.md >/dev/null
run_cli_up push "$DIFF_B_TEXT" p2p/test-diff-2.md >/dev/null
run_cli_up push "$DIFF_C_TEXT" p2p/test-diff-3.md >/dev/null
LARGE_TXT_100K="$WORK_DIR/large-100k.txt"
LARGE_TXT_1M="$WORK_DIR/large-1m.txt"
head -c 100000 /dev/zero | tr '\0' 'a' > "$LARGE_TXT_100K"
head -c 1000000 /dev/zero | tr '\0' 'b' > "$LARGE_TXT_1M"
run_cli_up push "$LARGE_TXT_100K" p2p/large-100000.md >/dev/null
run_cli_up push "$LARGE_TXT_1M" p2p/large-1000000.md >/dev/null
BINARY_100K="$WORK_DIR/binary-100k.bin"
BINARY_5M="$WORK_DIR/binary-5m.bin"
head -c 100000 /dev/urandom > "$BINARY_100K"
head -c 5000000 /dev/urandom > "$BINARY_5M"
run_cli_up push "$BINARY_100K" p2p/binary-100000.bin >/dev/null
run_cli_up push "$BINARY_5M" p2p/binary-5000000.bin >/dev/null
echo "[PASS] upload source files prepared"
echo "[CASE] upload phase syncs to host"
run_cli_up p2p-sync "$HOST_PEER_ID_FOR_UP" "$SYNC_TIMEOUT" >/dev/null
run_cli_up p2p-sync "$HOST_PEER_ID_FOR_UP" "$SYNC_TIMEOUT" >/dev/null
echo "[PASS] upload phase synced"
echo "[CASE] download peer discovers host"
HOST_PEER_ID_FOR_DOWN="$(discover_peer_id down)"
echo "[PASS] download peer discovered host: $HOST_PEER_ID_FOR_DOWN"
echo "[CASE] download phase syncs from host"
run_cli_down p2p-sync "$HOST_PEER_ID_FOR_DOWN" "$SYNC_TIMEOUT" >/dev/null
run_cli_down p2p-sync "$HOST_PEER_ID_FOR_DOWN" "$SYNC_TIMEOUT" >/dev/null
echo "[PASS] download phase synced"
echo "[CASE] verify text files on download peer"
DOWN_STORE_TEXT="$WORK_DIR/down-store-file.md"
DOWN_DIFF_A_TEXT="$WORK_DIR/down-test-diff-1.md"
DOWN_DIFF_B_TEXT="$WORK_DIR/down-test-diff-2.md"
DOWN_DIFF_C_TEXT="$WORK_DIR/down-test-diff-3.md"
run_cli_down pull p2p/store-file.md "$DOWN_STORE_TEXT" >/dev/null
run_cli_down pull p2p/test-diff-1.md "$DOWN_DIFF_A_TEXT" >/dev/null
run_cli_down pull p2p/test-diff-2.md "$DOWN_DIFF_B_TEXT" >/dev/null
run_cli_down pull p2p/test-diff-3.md "$DOWN_DIFF_C_TEXT" >/dev/null
cmp -s "$STORE_TEXT" "$DOWN_STORE_TEXT" || { echo "[FAIL] store-file mismatch" >&2; exit 1; }
cmp -s "$DIFF_A_TEXT" "$DOWN_DIFF_A_TEXT" || { echo "[FAIL] test-diff-1 mismatch" >&2; exit 1; }
cmp -s "$DIFF_B_TEXT" "$DOWN_DIFF_B_TEXT" || { echo "[FAIL] test-diff-2 mismatch" >&2; exit 1; }
cmp -s "$DIFF_C_TEXT" "$DOWN_DIFF_C_TEXT" || { echo "[FAIL] test-diff-3 mismatch" >&2; exit 1; }
echo "[CASE] verify pushed files on download peer"
DOWN_LARGE_100K="$WORK_DIR/down-large-100k.txt"
DOWN_LARGE_1M="$WORK_DIR/down-large-1m.txt"
DOWN_BINARY_100K="$WORK_DIR/down-binary-100k.bin"
DOWN_BINARY_5M="$WORK_DIR/down-binary-5m.bin"
run_cli_down pull p2p/large-100000.md "$DOWN_LARGE_100K" >/dev/null
run_cli_down pull p2p/large-1000000.md "$DOWN_LARGE_1M" >/dev/null
run_cli_down pull p2p/binary-100000.bin "$DOWN_BINARY_100K" >/dev/null
run_cli_down pull p2p/binary-5000000.bin "$DOWN_BINARY_5M" >/dev/null
cmp -s "$LARGE_TXT_100K" "$DOWN_LARGE_100K" || { echo "[FAIL] large-100000 mismatch" >&2; exit 1; }
cmp -s "$LARGE_TXT_1M" "$DOWN_LARGE_1M" || { echo "[FAIL] large-1000000 mismatch" >&2; exit 1; }
cmp -s "$BINARY_100K" "$DOWN_BINARY_100K" || { echo "[FAIL] binary-100000 mismatch" >&2; exit 1; }
cmp -s "$BINARY_5M" "$DOWN_BINARY_5M" || { echo "[FAIL] binary-5000000 mismatch" >&2; exit 1; }
echo "[PASS] CLI P2P upload/download reproduction scenario completed"

View File

@@ -2,3 +2,4 @@ node_modules
dist
.DS_Store
*.log
.nyc_output

View File

@@ -55,8 +55,8 @@ The built files will be in the `dist` directory.
### Usage
1. Open the webapp in your browser
2. Grant directory access when prompted
1. Open the webapp in your browser (`webapp.html`)
2. Select a vault from history or grant access to a new directory
3. Configure CouchDB connection by editing `.livesync/settings.json` in your vault
- You can also copy data.json from Obsidian's plug-in folder.
@@ -98,8 +98,11 @@ webapp/
│ ├── ServiceFileAccessImpl.ts
│ ├── DatabaseFileAccess.ts
│ └── FSAPIServiceModules.ts
├── main.ts # Application entry point
├── index.html # HTML entry
├── bootstrap.ts # Vault picker + startup orchestration
├── main.ts # LiveSync core bootstrap (after vault selected)
├── vaultSelector.ts # FileSystem handle history and permission flow
├── webapp.html # Main HTML entry
├── index.html # Redirect entry for compatibility
├── package.json
├── vite.config.ts
└── README.md

View File

@@ -0,0 +1,139 @@
import { LiveSyncWebApp } from "./main";
import { VaultHistoryStore, type VaultHistoryItem } from "./vaultSelector";
const historyStore = new VaultHistoryStore();
let app: LiveSyncWebApp | null = null;
function getRequiredElement<T extends HTMLElement>(id: string): T {
const element = document.getElementById(id);
if (!element) {
throw new Error(`Missing element: #${id}`);
}
return element as T;
}
function setStatus(kind: "info" | "warning" | "error" | "success", message: string): void {
const statusEl = getRequiredElement<HTMLDivElement>("status");
statusEl.className = kind;
statusEl.textContent = message;
}
function setBusyState(isBusy: boolean): void {
const pickNewBtn = getRequiredElement<HTMLButtonElement>("pick-new-vault");
pickNewBtn.disabled = isBusy;
const historyButtons = document.querySelectorAll<HTMLButtonElement>(".vault-item button");
historyButtons.forEach((button) => {
button.disabled = isBusy;
});
}
function formatLastUsed(unixMillis: number): string {
if (!unixMillis) {
return "unknown";
}
return new Date(unixMillis).toLocaleString();
}
async function renderHistoryList(): Promise<VaultHistoryItem[]> {
const listEl = getRequiredElement<HTMLDivElement>("vault-history-list");
const emptyEl = getRequiredElement<HTMLParagraphElement>("vault-history-empty");
const [items, lastUsedId] = await Promise.all([historyStore.getVaultHistory(), historyStore.getLastUsedVaultId()]);
listEl.innerHTML = "";
emptyEl.classList.toggle("is-hidden", items.length > 0);
for (const item of items) {
const row = document.createElement("div");
row.className = "vault-item";
const info = document.createElement("div");
info.className = "vault-item-info";
const name = document.createElement("div");
name.className = "vault-item-name";
name.textContent = item.name;
const meta = document.createElement("div");
meta.className = "vault-item-meta";
const label = item.id === lastUsedId ? "Last used" : "Used";
meta.textContent = `${label}: ${formatLastUsed(item.lastUsedAt)}`;
info.append(name, meta);
const useButton = document.createElement("button");
useButton.type = "button";
useButton.textContent = "Use this vault";
useButton.addEventListener("click", () => {
void startWithHistory(item);
});
row.append(info, useButton);
listEl.appendChild(row);
}
return items;
}
async function startWithHandle(handle: FileSystemDirectoryHandle): Promise<void> {
setStatus("info", `Starting LiveSync with vault: ${handle.name}`);
app = new LiveSyncWebApp(handle);
await app.initialize();
const selectorEl = getRequiredElement<HTMLDivElement>("vault-selector");
selectorEl.classList.add("is-hidden");
}
async function startWithHistory(item: VaultHistoryItem): Promise<void> {
setBusyState(true);
try {
const handle = await historyStore.activateHistoryItem(item);
await startWithHandle(handle);
} catch (error) {
console.error("[Directory] Failed to open history vault:", error);
setStatus("error", `Failed to open saved vault: ${String(error)}`);
setBusyState(false);
}
}
async function startWithNewPicker(): Promise<void> {
setBusyState(true);
try {
const handle = await historyStore.pickNewVault();
await startWithHandle(handle);
} catch (error) {
console.error("[Directory] Failed to pick vault:", error);
setStatus("warning", `Vault selection was cancelled or failed: ${String(error)}`);
setBusyState(false);
}
}
async function initializeVaultSelector(): Promise<void> {
setStatus("info", "Select a vault folder to start LiveSync.");
const pickNewBtn = getRequiredElement<HTMLButtonElement>("pick-new-vault");
pickNewBtn.addEventListener("click", () => {
void startWithNewPicker();
});
await renderHistoryList();
}
window.addEventListener("load", async () => {
try {
await initializeVaultSelector();
} catch (error) {
console.error("Failed to initialize vault selector:", error);
setStatus("error", `Initialization failed: ${String(error)}`);
}
});
window.addEventListener("beforeunload", () => {
void app?.shutdown();
});
(window as any).livesyncApp = {
getApp: () => app,
historyStore,
};

View File

@@ -3,207 +3,10 @@
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Self-hosted LiveSync WebApp</title>
<style>
* {
margin: 0;
padding: 0;
box-sizing: border-box;
}
body {
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen, Ubuntu, Cantarell, sans-serif;
background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
min-height: 100vh;
display: flex;
align-items: center;
justify-content: center;
padding: 20px;
}
.container {
background: white;
border-radius: 12px;
box-shadow: 0 20px 60px rgba(0, 0, 0, 0.3);
padding: 40px;
max-width: 600px;
width: 100%;
}
h1 {
color: #333;
margin-bottom: 10px;
font-size: 28px;
}
.subtitle {
color: #666;
margin-bottom: 30px;
font-size: 14px;
}
#status {
padding: 15px;
border-radius: 8px;
margin-bottom: 20px;
font-size: 14px;
font-weight: 500;
}
#status.error {
background: #fee;
color: #c33;
border: 1px solid #fcc;
}
#status.warning {
background: #ffeaa7;
color: #d63031;
border: 1px solid #fdcb6e;
}
#status.success {
background: #d4edda;
color: #155724;
border: 1px solid #c3e6cb;
}
#status.info {
background: #d1ecf1;
color: #0c5460;
border: 1px solid #bee5eb;
}
.info-section {
margin-top: 30px;
padding: 20px;
background: #f8f9fa;
border-radius: 8px;
}
.info-section h2 {
font-size: 18px;
margin-bottom: 15px;
color: #333;
}
.info-section ul {
list-style: none;
padding-left: 0;
}
.info-section li {
padding: 8px 0;
color: #666;
font-size: 14px;
}
.info-section li::before {
content: "•";
color: #667eea;
font-weight: bold;
display: inline-block;
width: 1em;
margin-left: -1em;
padding-right: 0.5em;
}
.feature-list {
margin-top: 20px;
}
.feature-list h3 {
font-size: 16px;
margin-bottom: 10px;
color: #444;
}
code {
background: #e9ecef;
padding: 2px 6px;
border-radius: 4px;
font-family: 'Courier New', monospace;
font-size: 13px;
}
.footer {
margin-top: 30px;
text-align: center;
color: #999;
font-size: 12px;
}
.footer a {
color: #667eea;
text-decoration: none;
}
.footer a:hover {
text-decoration: underline;
}
.console-link {
margin-top: 20px;
text-align: center;
font-size: 13px;
color: #666;
}
@media (max-width: 600px) {
.container {
padding: 30px 20px;
}
h1 {
font-size: 24px;
}
}
</style>
<title>Self-hosted LiveSync WebApp Launcher</title>
<meta http-equiv="refresh" content="0; url=./webapp.html">
</head>
<body>
<div class="container">
<h1>🔄 Self-hosted LiveSync</h1>
<p class="subtitle">Browser-based Self-hosted LiveSync using FileSystem API</p>
<div id="status" class="info">
Initialising...
</div>
<div class="info-section">
<h2>About This Application</h2>
<ul>
<li>Runs entirely in your browser</li>
<li>Uses FileSystem API to access your local vault</li>
<li>Syncs with CouchDB server (like Obsidian plugin)</li>
<li>Settings stored in <code>.livesync/settings.json</code></li>
<li>Real-time file watching with FileSystemObserver (Chrome 124+)</li>
</ul>
</div>
<div class="info-section">
<h2>How to Use</h2>
<ul>
<li>Grant directory access when prompted</li>
<li>Create <code>.livesync/settings.json</code> in your vault folder. (Compatible with Obsidian's Self-hosted LiveSync)</li>
<li>Add your CouchDB connection details</li>
<li>Your files will be synced automatically</li>
</ul>
</div>
<div class="console-link">
💡 Open browser console (F12) for detailed logs
</div>
<div class="footer">
<p>
Powered by
<a href="https://github.com/vrtmrz/obsidian-livesync" target="_blank">
Self-hosted LiveSync
</a>
</p>
</div>
</div>
<script type="module" src="./main.ts"></script>
<p>Redirecting to <a href="./webapp.html">WebApp</a>...</p>
</body>
</html>

View File

@@ -13,10 +13,11 @@ import type { InjectableSettingService } from "@lib/services/implements/injectab
import { useOfflineScanner } from "@lib/serviceFeatures/offlineScanner";
import { useRedFlagFeatures } from "@/serviceFeatures/redFlag";
import { useCheckRemoteSize } from "@lib/serviceFeatures/checkRemoteSize";
import { useSetupURIFeature } from "@lib/serviceFeatures/setupObsidian/setupUri";
import { SetupManager } from "@/modules/features/SetupManager";
// import { ModuleObsidianSettingsAsMarkdown } from "@/modules/features/ModuleObsidianSettingAsMarkdown";
import { ModuleSetupObsidian } from "@/modules/features/ModuleSetupObsidian";
// import { ModuleObsidianMenu } from "@/modules/essentialObsidian/ModuleObsidianMenu";
import { useSetupManagerHandlersFeature } from "@/serviceFeatures/setupObsidian/setupManagerHandlers";
import { useP2PReplicatorCommands } from "@/lib/src/replication/trystero/useP2PReplicatorCommands";
import { useP2PReplicatorFeature } from "@/lib/src/replication/trystero/useP2PReplicatorFeature";
const SETTINGS_DIR = ".livesync";
const SETTINGS_FILE = "settings.json";
@@ -47,21 +48,18 @@ const DEFAULT_SETTINGS: Partial<ObsidianLiveSyncSettings> = {
};
class LiveSyncWebApp {
private rootHandle: FileSystemDirectoryHandle | null = null;
private rootHandle: FileSystemDirectoryHandle;
private core: LiveSyncBaseCore<ServiceContext, any> | null = null;
private serviceHub: BrowserServiceHub<ServiceContext> | null = null;
constructor(rootHandle: FileSystemDirectoryHandle) {
this.rootHandle = rootHandle;
}
async initialize() {
console.log("Self-hosted LiveSync WebApp");
console.log("Initializing...");
// Request directory access
await this.requestDirectoryAccess();
if (!this.rootHandle) {
throw new Error("Failed to get directory access");
}
console.log(`Vault directory: ${this.rootHandle.name}`);
// Create service context and hub
@@ -98,18 +96,26 @@ class LiveSyncWebApp {
return DEFAULT_SETTINGS as ObsidianLiveSyncSettings;
});
// App lifecycle handlers
this.serviceHub.appLifecycle.scheduleRestart.setHandler(async () => {
console.log("[AppLifecycle] Restart requested");
await this.shutdown();
await this.initialize();
setTimeout(() => {
window.location.reload();
}, 1000);
});
// Create LiveSync core
this.core = new LiveSyncBaseCore(
this.serviceHub,
(core, serviceHub) => {
return initialiseServiceModulesFSAPI(this.rootHandle!, core, serviceHub);
return initialiseServiceModulesFSAPI(this.rootHandle, core, serviceHub);
},
(core) => [
// new ModuleObsidianEvents(this, core),
// new ModuleObsidianSettingDialogue(this, core),
// new ModuleObsidianMenu(core),
new ModuleSetupObsidian(core),
new SetupManager(core),
// new ModuleObsidianSettingsAsMarkdown(core),
// new ModuleLog(this, core),
// new ModuleObsidianDocumentHistory(this, core),
@@ -118,13 +124,19 @@ class LiveSyncWebApp {
// new ModuleDev(this, core),
// new ModuleReplicateTest(this, core),
// new ModuleIntegratedTest(this, core),
// new SetupManager(core),
// new ModuleReplicatorP2P(core), // Register P2P replicator for CLI (useP2PReplicator is not used here)
new SetupManager(core),
],
() => [], // No add-ons
(core) => {
useOfflineScanner(core);
useRedFlagFeatures(core);
useCheckRemoteSize(core);
const replicator = useP2PReplicatorFeature(core);
useP2PReplicatorCommands(core, replicator);
const setupManager = core.getModule(SetupManager);
useSetupManagerHandlersFeature(core, setupManager);
useSetupURIFeature(core);
}
);
@@ -133,8 +145,6 @@ class LiveSyncWebApp {
}
private async saveSettingsToFile(data: ObsidianLiveSyncSettings): Promise<void> {
if (!this.rootHandle) return;
try {
// Create .livesync directory if it doesn't exist
const livesyncDir = await this.rootHandle.getDirectoryHandle(SETTINGS_DIR, { create: true });
@@ -151,8 +161,6 @@ class LiveSyncWebApp {
}
private async loadSettingsFromFile(): Promise<Partial<ObsidianLiveSyncSettings> | null> {
if (!this.rootHandle) return null;
try {
const livesyncDir = await this.rootHandle.getDirectoryHandle(SETTINGS_DIR);
const fileHandle = await livesyncDir.getFileHandle(SETTINGS_FILE);
@@ -165,90 +173,6 @@ class LiveSyncWebApp {
}
}
private async requestDirectoryAccess() {
try {
// Check if we have a cached directory handle
const cached = await this.loadCachedDirectoryHandle();
if (cached) {
// Verify permission (cast to any for compatibility)
try {
const permission = await (cached as any).queryPermission({ mode: "readwrite" });
if (permission === "granted") {
this.rootHandle = cached;
console.log("[Directory] Using cached directory handle");
return;
}
} catch (e) {
// queryPermission might not be supported, try to use anyway
console.log("[Directory] Could not verify permission, requesting new access");
}
}
// Request new directory access
console.log("[Directory] Requesting directory access...");
this.rootHandle = await (window as any).showDirectoryPicker({
mode: "readwrite",
startIn: "documents",
});
// Save the handle for next time
await this.saveCachedDirectoryHandle(this.rootHandle);
console.log("[Directory] Directory access granted");
} catch (error) {
console.error("[Directory] Failed to get directory access:", error);
throw error;
}
}
private async saveCachedDirectoryHandle(handle: FileSystemDirectoryHandle) {
try {
// Use IndexedDB to store the directory handle
const db = await this.openHandleDB();
const transaction = db.transaction(["handles"], "readwrite");
const store = transaction.objectStore("handles");
await new Promise((resolve, reject) => {
const request = store.put(handle, "rootHandle");
request.onsuccess = resolve;
request.onerror = reject;
});
db.close();
} catch (error) {
console.error("[Directory] Failed to cache handle:", error);
}
}
private async loadCachedDirectoryHandle(): Promise<FileSystemDirectoryHandle | null> {
try {
const db = await this.openHandleDB();
const transaction = db.transaction(["handles"], "readonly");
const store = transaction.objectStore("handles");
const handle = await new Promise<FileSystemDirectoryHandle | null>((resolve, reject) => {
const request = store.get("rootHandle");
request.onsuccess = () => resolve(request.result || null);
request.onerror = reject;
});
db.close();
return handle;
} catch (error) {
console.error("[Directory] Failed to load cached handle:", error);
return null;
}
}
private async openHandleDB(): Promise<IDBDatabase> {
return new Promise((resolve, reject) => {
const request = indexedDB.open("livesync-webapp-handles", 1);
request.onerror = () => reject(request.error);
request.onsuccess = () => resolve(request.result);
request.onupgradeneeded = (event) => {
const db = (event.target as IDBOpenDBRequest).result;
if (!db.objectStoreNames.contains("handles")) {
db.createObjectStore("handles");
}
};
});
}
private async start() {
if (!this.core) {
throw new Error("Core not initialized");
@@ -333,21 +257,4 @@ class LiveSyncWebApp {
}
}
// Initialize on load
const app = new LiveSyncWebApp();
window.addEventListener("load", async () => {
try {
await app.initialize();
} catch (error) {
console.error("Failed to initialize:", error);
}
});
// Handle page unload
window.addEventListener("beforeunload", () => {
void app.shutdown();
});
// Export for debugging
(window as any).livesyncApp = app;
export { LiveSyncWebApp };

View File

@@ -0,0 +1,81 @@
import { defineConfig, devices } from "@playwright/test";
import * as path from "path";
import * as fs from "fs";
import { fileURLToPath } from "url";
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
// ---------------------------------------------------------------------------
// Load environment variables from .test.env (root) so that CouchDB
// connection details are visible to the test process.
// ---------------------------------------------------------------------------
function loadEnvFile(envPath: string): Record<string, string> {
const result: Record<string, string> = {};
if (!fs.existsSync(envPath)) return result;
const lines = fs.readFileSync(envPath, "utf-8").split("\n");
for (const line of lines) {
const trimmed = line.trim();
if (!trimmed || trimmed.startsWith("#")) continue;
const eq = trimmed.indexOf("=");
if (eq < 0) continue;
const key = trimmed.slice(0, eq).trim();
const val = trimmed.slice(eq + 1).trim();
result[key] = val;
}
return result;
}
// __dirname is src/apps/webapp — root is three levels up
const ROOT = path.resolve(__dirname, "../../..");
const envVars = {
...loadEnvFile(path.join(ROOT, ".env")),
...loadEnvFile(path.join(ROOT, ".test.env")),
};
// Make the loaded variables available to all test files via process.env.
for (const [k, v] of Object.entries(envVars)) {
if (!(k in process.env)) {
process.env[k] = v;
}
}
export default defineConfig({
testDir: "./test",
// Give each test plenty of time for replication round-trips.
timeout: 120_000,
expect: { timeout: 30_000 },
// Run test files sequentially; the tests themselves manage two contexts.
fullyParallel: false,
workers: 1,
reporter: "list",
use: {
baseURL: "http://localhost:3000",
// Use Chromium for OPFS and FileSystem API support.
...devices["Desktop Chrome"],
headless: true,
// Launch args to match the main vitest browser config.
launchOptions: {
args: ["--js-flags=--expose-gc"],
},
},
projects: [
{
name: "chromium",
use: { ...devices["Desktop Chrome"] },
},
],
// Start the vite dev server before running the tests.
webServer: {
command: "npx vite --port 3000",
url: "http://localhost:3000",
// Re-use a running dev server when developing locally.
reuseExistingServer: !process.env.CI,
timeout: 30_000,
// Run from the webapp directory so vite finds its config.
cwd: __dirname,
},
});

View File

@@ -0,0 +1,203 @@
/**
* LiveSync WebApp E2E test entry point.
*
* When served by vite dev server (at /test.html), this module wires up
* `window.livesyncTest`, a plain JS API that Playwright tests can call via
* `page.evaluate()`. All methods are async and serialisation-safe.
*
* Vault storage is backed by OPFS so no `showDirectoryPicker()` interaction
* is required, making it fully headless-compatible.
*/
import { LiveSyncWebApp } from "./main";
import type { ObsidianLiveSyncSettings } from "@lib/common/types";
import type { FilePathWithPrefix } from "@lib/common/types";
// --------------------------------------------------------------------------
// Internal state one app instance per page / browser context
// --------------------------------------------------------------------------
let app: LiveSyncWebApp | null = null;
// --------------------------------------------------------------------------
// Helpers
// --------------------------------------------------------------------------
/** Strip the "plain:" / "enc:" / … prefix used internally in PouchDB paths. */
function stripPrefix(raw: string): string {
return raw.replace(/^[^:]+:/, "");
}
/**
* Poll every 300 ms until all known processing queues are drained, or until
* the timeout elapses. Mirrors `waitForIdle` in the existing vitest harness.
*/
async function waitForIdle(core: any, timeoutMs = 60_000): Promise<void> {
const deadline = Date.now() + timeoutMs;
while (Date.now() < deadline) {
const q =
(core.services?.replication?.databaseQueueCount?.value ?? 0) +
(core.services?.fileProcessing?.totalQueued?.value ?? 0) +
(core.services?.fileProcessing?.batched?.value ?? 0) +
(core.services?.fileProcessing?.processing?.value ?? 0) +
(core.services?.replication?.storageApplyingCount?.value ?? 0);
if (q === 0) return;
await new Promise<void>((r) => setTimeout(r, 300));
}
throw new Error(`waitForIdle timed out after ${timeoutMs} ms`);
}
function getCore(): any {
const core = (app as any)?.core;
if (!core) throw new Error("Vault not initialised call livesyncTest.init() first");
return core;
}
// --------------------------------------------------------------------------
// Public test API
// --------------------------------------------------------------------------
export interface LiveSyncTestAPI {
/**
* Initialise a vault in OPFS under the given name and apply `settings`.
* Any previous contents of the OPFS directory are wiped first so each
* test run starts clean.
*/
init(vaultName: string, settings: Partial<ObsidianLiveSyncSettings>): Promise<void>;
/**
* Write `content` to the local PouchDB under `vaultPath` (equivalent to
* the CLI `put` command). Waiting for the DB write to finish is
* included; you still need to call `replicate()` to push to remote.
*/
putFile(vaultPath: string, content: string): Promise<boolean>;
/**
* Mark `vaultPath` as deleted in the local PouchDB (equivalent to CLI
* `rm`). Call `replicate()` afterwards to propagate to remote.
*/
deleteFile(vaultPath: string): Promise<boolean>;
/**
* Run one full replication cycle (push + pull) against the remote CouchDB,
* then wait for the local storage-application queue to drain.
*/
replicate(): Promise<boolean>;
/**
* Wait until all processing queues are idle. Usually not needed after
* `putFile` / `deleteFile` since those already await, but useful when
* testing results after `replicate()`.
*/
waitForIdle(timeoutMs?: number): Promise<void>;
/**
* Return metadata for `vaultPath` from the local database, or `null` if
* not found / deleted.
*/
getInfo(vaultPath: string): Promise<{
path: string;
revision: string;
conflicts: string[];
size: number;
mtime: number;
} | null>;
/** Convenience wrapper: returns true when the doc has ≥1 conflict revision. */
hasConflict(vaultPath: string): Promise<boolean>;
/** Tear down the current app instance. */
shutdown(): Promise<void>;
}
// --------------------------------------------------------------------------
// Implementation
// --------------------------------------------------------------------------
const livesyncTest: LiveSyncTestAPI = {
async init(vaultName: string, settings: Partial<ObsidianLiveSyncSettings>): Promise<void> {
// Clean up any stale OPFS data from previous runs.
const opfsRoot = await navigator.storage.getDirectory();
try {
await opfsRoot.removeEntry(vaultName, { recursive: true });
} catch {
// directory did not exist that's fine
}
const vaultDir = await opfsRoot.getDirectoryHandle(vaultName, { create: true });
// Pre-write settings so they are loaded during initialise().
const livesyncDir = await vaultDir.getDirectoryHandle(".livesync", { create: true });
const settingsFile = await livesyncDir.getFileHandle("settings.json", { create: true });
const writable = await settingsFile.createWritable();
await writable.write(JSON.stringify(settings));
await writable.close();
app = new LiveSyncWebApp(vaultDir);
await app.initialize();
// Give background startup tasks a moment to settle.
await waitForIdle(getCore(), 30_000);
},
async putFile(vaultPath: string, content: string): Promise<boolean> {
const core = getCore();
const result = await core.serviceModules.databaseFileAccess.storeContent(
vaultPath as FilePathWithPrefix,
content
);
await waitForIdle(core);
return result !== false;
},
async deleteFile(vaultPath: string): Promise<boolean> {
const core = getCore();
const result = await core.serviceModules.databaseFileAccess.delete(vaultPath as FilePathWithPrefix);
await waitForIdle(core);
return result !== false;
},
async replicate(): Promise<boolean> {
const core = getCore();
const result = await core.services.replication.replicate(true);
// After replicate() resolves, remote docs may still be queued for
// local storage application wait until all queues are drained.
await waitForIdle(core);
return result !== false;
},
async waitForIdle(timeoutMs?: number): Promise<void> {
await waitForIdle(getCore(), timeoutMs ?? 60_000);
},
async getInfo(vaultPath: string) {
const core = getCore();
const db = core.services?.database;
for await (const doc of db.localDatabase.findAllNormalDocs({ conflicts: true })) {
if (doc._deleted || doc.deleted) continue;
const docPath = stripPrefix(doc.path ?? "");
if (docPath !== vaultPath) continue;
return {
path: docPath,
revision: (doc._rev as string) ?? "",
conflicts: (doc._conflicts as string[]) ?? [],
size: (doc.size as number) ?? 0,
mtime: (doc.mtime as number) ?? 0,
};
}
return null;
},
async hasConflict(vaultPath: string): Promise<boolean> {
const info = await livesyncTest.getInfo(vaultPath);
return (info?.conflicts?.length ?? 0) > 0;
},
async shutdown(): Promise<void> {
if (app) {
await app.shutdown();
app = null;
}
},
};
// Expose on window for Playwright page.evaluate() calls.
(window as any).livesyncTest = livesyncTest;

26
src/apps/webapp/test.html Normal file
View File

@@ -0,0 +1,26 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>LiveSync WebApp E2E Test Page</title>
<style>
body {
font-family: monospace;
padding: 1rem;
}
#status {
margin-top: 1rem;
padding: 0.5rem;
border: 1px solid #ccc;
}
</style>
</head>
<body>
<h2>LiveSync WebApp E2E</h2>
<p>This page is used by Playwright tests only. <code>window.livesyncTest</code> is exposed by the script below.</p>
<!-- status div required by LiveSyncWebApp internal helpers -->
<div id="status">Loading…</div>
<script type="module" src="/test-entry.ts"></script>
</body>
</html>

View File

@@ -0,0 +1,294 @@
/**
* WebApp E2E tests two-vault scenarios.
*
* Each vault (A and B) runs in its own browser context so that JavaScript
* global state (including Trystero's global signalling tables) is fully
* isolated. The two vaults communicate only through the shared remote
* CouchDB database.
*
* Vault storage is OPFS-backed no file-picker interaction needed.
*
* Prerequisites:
* - A reachable CouchDB instance whose connection details are in .test.env
* (read automatically by playwright.config.ts).
*
* How to run:
* cd src/apps/webapp && npm run test:e2e
*/
import { test, expect, type BrowserContext, type Page, type TestInfo } from "@playwright/test";
import type { LiveSyncTestAPI } from "../test-entry";
import { mkdirSync, writeFileSync } from "node:fs";
import path from "node:path";
import { fileURLToPath } from "node:url";
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
// ---------------------------------------------------------------------------
// Settings helpers
// ---------------------------------------------------------------------------
function requireEnv(name: string): string {
const v = process.env[name];
if (!v) throw new Error(`Missing required env variable: ${name}`);
return v;
}
async function ensureCouchDbDatabase(uri: string, user: string, pass: string, dbName: string): Promise<void> {
const base = uri.replace(/\/+$/, "");
const dbUrl = `${base}/${encodeURIComponent(dbName)}`;
const auth = Buffer.from(`${user}:${pass}`, "utf-8").toString("base64");
const response = await fetch(dbUrl, {
method: "PUT",
headers: {
Authorization: `Basic ${auth}`,
},
});
// 201: created, 202: accepted, 412: already exists
if (response.status === 201 || response.status === 202 || response.status === 412) {
return;
}
const body = await response.text().catch(() => "");
throw new Error(`Failed to ensure CouchDB database (${response.status}): ${body}`);
}
function buildSettings(dbName: string): Record<string, unknown> {
return {
// Remote database (shared between A and B this is the replication target)
couchDB_URI: requireEnv("hostname").replace(/\/+$/, ""),
couchDB_USER: process.env["username"] ?? "",
couchDB_PASSWORD: process.env["password"] ?? "",
couchDB_DBNAME: dbName,
// Core behaviour
isConfigured: true,
liveSync: false,
syncOnSave: false,
syncOnStart: false,
periodicReplication: false,
gcDelay: 0,
savingDelay: 0,
notifyThresholdOfRemoteStorageSize: 0,
// Encryption off for test simplicity
encrypt: false,
// Disable plugin/hidden-file sync (not needed in webapp)
usePluginSync: false,
autoSweepPlugins: false,
autoSweepPluginsPeriodic: false,
//Auto accept perr
P2P_AutoAcceptingPeers: "~.*",
};
}
// ---------------------------------------------------------------------------
// Test-page helpers
// ---------------------------------------------------------------------------
/** Navigate to the test entry page and wait for `window.livesyncTest`. */
async function openTestPage(ctx: BrowserContext): Promise<Page> {
const page = await ctx.newPage();
await page.goto("/test.html");
await page.waitForFunction(() => !!(window as any).livesyncTest, { timeout: 20_000 });
return page;
}
/** Type-safe wrapper calls `window.livesyncTest.<method>(...args)` in the page. */
async function call<M extends keyof LiveSyncTestAPI>(
page: Page,
method: M,
...args: Parameters<LiveSyncTestAPI[M]>
): Promise<Awaited<ReturnType<LiveSyncTestAPI[M]>>> {
const invoke = () =>
page.evaluate(([m, a]) => (window as any).livesyncTest[m](...a), [method, args] as [
string,
unknown[],
]) as Promise<Awaited<ReturnType<LiveSyncTestAPI[M]>>>;
try {
return await invoke();
} catch (ex: any) {
const message = String(ex?.message ?? ex);
// Some startup flows may trigger one page reload; recover once.
if (
message.includes("Execution context was destroyed") ||
message.includes("Most likely the page has been closed")
) {
await page.waitForFunction(() => !!(window as any).livesyncTest, { timeout: 20_000 });
return await invoke();
}
throw ex;
}
}
async function dumpCoverage(page: Page | undefined, label: string, testInfo: TestInfo): Promise<void> {
if (!process.env.PW_COVERAGE || !page || page.isClosed()) {
return;
}
const cov = await page
.evaluate(() => {
const data = (window as any).__coverage__;
if (!data) return null;
// Reset between tests to avoid runaway accumulation.
(window as any).__coverage__ = {};
return data;
})
.catch(() => null!);
if (!cov) return;
if (typeof cov === "object" && Object.keys(cov as Record<string, unknown>).length === 0) {
return;
}
const outDir = path.resolve(__dirname, "../.nyc_output");
mkdirSync(outDir, { recursive: true });
const name = `${testInfo.testId.replace(/[^a-zA-Z0-9_-]/g, "_")}-${label}.json`;
writeFileSync(path.join(outDir, name), JSON.stringify(cov), "utf-8");
}
// ---------------------------------------------------------------------------
// Two-vault E2E suite
// ---------------------------------------------------------------------------
test.describe("WebApp two-vault E2E", () => {
let ctxA: BrowserContext;
let ctxB: BrowserContext;
let pageA: Page;
let pageB: Page;
const DB_SUFFIX = `${Date.now()}-${Math.random().toString(36).slice(2, 8)}`;
const dbName = `${requireEnv("dbname")}-${DB_SUFFIX}`;
const settings = buildSettings(dbName);
test.beforeAll(async ({ browser }) => {
await ensureCouchDbDatabase(
String(settings.couchDB_URI ?? ""),
String(settings.couchDB_USER ?? ""),
String(settings.couchDB_PASSWORD ?? ""),
dbName
);
// Open Vault A and Vault B in completely separate browser contexts.
// Each context has its own JS runtime, IndexedDB and OPFS root, so
// Trystero global state and PouchDB instance names cannot collide.
ctxA = await browser.newContext();
ctxB = await browser.newContext();
pageA = await openTestPage(ctxA);
pageB = await openTestPage(ctxB);
await call(pageA, "init", "testvault_a", settings as any);
await call(pageB, "init", "testvault_b", settings as any);
});
test.afterAll(async () => {
await call(pageA, "shutdown").catch(() => {});
await call(pageB, "shutdown").catch(() => {});
await ctxA.close();
await ctxB.close();
});
test.afterEach(async ({}, testInfo) => {
await dumpCoverage(pageA, "vaultA", testInfo);
await dumpCoverage(pageB, "vaultB", testInfo);
});
// -----------------------------------------------------------------------
// Case 1: Vault A writes a file and can read its metadata back from the
// local database (no replication yet).
// -----------------------------------------------------------------------
test("Case 1: A writes a file and can get its info", async () => {
const FILE = "e2e/case1-a-only.md";
const CONTENT = "hello from vault A";
const ok = await call(pageA, "putFile", FILE, CONTENT);
expect(ok).toBe(true);
const info = await call(pageA, "getInfo", FILE);
expect(info).not.toBeNull();
expect(info!.path).toBe(FILE);
expect(info!.revision).toBeTruthy();
expect(info!.conflicts).toHaveLength(0);
});
// -----------------------------------------------------------------------
// Case 2: Vault A writes a file, both vaults replicate, and Vault B ends
// up with the file in its local database.
// -----------------------------------------------------------------------
test("Case 2: A writes a file, both replicate, B receives the file", async () => {
const FILE = "e2e/case2-sync.md";
const CONTENT = "content from A should appear in B";
await call(pageA, "putFile", FILE, CONTENT);
// A pushes to remote, B pulls from remote.
await call(pageA, "replicate");
await call(pageB, "replicate");
const infoB = await call(pageB, "getInfo", FILE);
expect(infoB).not.toBeNull();
expect(infoB!.path).toBe(FILE);
});
// -----------------------------------------------------------------------
// Case 3: Vault A deletes the file it synced in case 2. After both
// vaults replicate, Vault B no longer sees the file.
// -----------------------------------------------------------------------
test("Case 3: A deletes the file, both replicate, B no longer sees it", async () => {
// This test depends on Case 2 having put e2e/case2-sync.md into both vaults.
const FILE = "e2e/case2-sync.md";
await call(pageA, "deleteFile", FILE);
await call(pageA, "replicate");
await call(pageB, "replicate");
const infoB = await call(pageB, "getInfo", FILE);
// The file should be gone (null means not found or deleted).
expect(infoB).toBeNull();
});
// -----------------------------------------------------------------------
// Case 4: A and B each independently edit the same file that was already
// synced. After both vaults replicate the editing cycle, both
// vaults report a conflict on that file.
// -----------------------------------------------------------------------
test("Case 4: concurrent edits from A and B produce a conflict on both sides", async () => {
const FILE = "e2e/case4-conflict.md";
// 1) Write a baseline and synchronise so both vaults start from the
// same revision.
await call(pageA, "putFile", FILE, "base content");
await call(pageA, "replicate");
await call(pageB, "replicate");
// Confirm B has the base file with no conflicts yet.
const baseInfoB = await call(pageB, "getInfo", FILE);
expect(baseInfoB).not.toBeNull();
expect(baseInfoB!.conflicts).toHaveLength(0);
// 2) Both vaults write diverging content without syncing in between
// this creates two competing revisions.
await call(pageA, "putFile", FILE, "content from A (conflict side)");
await call(pageB, "putFile", FILE, "content from B (conflict side)");
// 3) Run replication on both sides. The order mirrors the pattern
// from the CLI two-vault tests (A → remote → B → remote → A).
await call(pageA, "replicate");
await call(pageB, "replicate");
await call(pageA, "replicate"); // re-check from A to pick up B's revision
// 4) At least one side must report a conflict.
const hasConflictA = await call(pageA, "hasConflict", FILE);
const hasConflictB = await call(pageB, "hasConflict", FILE);
expect(
hasConflictA || hasConflictB,
"Expected a conflict to appear on vault A or vault B after diverging edits"
).toBe(true);
});
});

View File

@@ -0,0 +1,191 @@
const HANDLE_DB_NAME = "livesync-webapp-handles";
const HANDLE_STORE_NAME = "handles";
const LAST_USED_KEY = "meta:lastUsedVaultId";
const VAULT_KEY_PREFIX = "vault:";
const MAX_HISTORY_COUNT = 10;
export type VaultHistoryItem = {
id: string;
name: string;
handle: FileSystemDirectoryHandle;
lastUsedAt: number;
};
type VaultHistoryValue = VaultHistoryItem;
function makeVaultKey(id: string): string {
return `${VAULT_KEY_PREFIX}${id}`;
}
function parseVaultId(key: string): string | null {
if (!key.startsWith(VAULT_KEY_PREFIX)) {
return null;
}
return key.slice(VAULT_KEY_PREFIX.length);
}
function randomId(): string {
const n = Math.random().toString(36).slice(2, 10);
return `${Date.now()}-${n}`;
}
async function hasReadWritePermission(handle: FileSystemDirectoryHandle, requestIfNeeded: boolean): Promise<boolean> {
const h = handle as any;
if (typeof h.queryPermission === "function") {
const queried = await h.queryPermission({ mode: "readwrite" });
if (queried === "granted") {
return true;
}
}
if (!requestIfNeeded) {
return false;
}
if (typeof h.requestPermission === "function") {
const requested = await h.requestPermission({ mode: "readwrite" });
return requested === "granted";
}
return true;
}
export class VaultHistoryStore {
private async openHandleDB(): Promise<IDBDatabase> {
return new Promise((resolve, reject) => {
const request = indexedDB.open(HANDLE_DB_NAME, 1);
request.onerror = () => reject(request.error);
request.onsuccess = () => resolve(request.result);
request.onupgradeneeded = (event) => {
const db = (event.target as IDBOpenDBRequest).result;
if (!db.objectStoreNames.contains(HANDLE_STORE_NAME)) {
db.createObjectStore(HANDLE_STORE_NAME);
}
};
});
}
private async withStore<T>(mode: IDBTransactionMode, task: (store: IDBObjectStore) => Promise<T>): Promise<T> {
const db = await this.openHandleDB();
try {
const tx = db.transaction([HANDLE_STORE_NAME], mode);
const store = tx.objectStore(HANDLE_STORE_NAME);
return await task(store);
} finally {
db.close();
}
}
private async requestAsPromise<T>(request: IDBRequest<T>): Promise<T> {
return new Promise((resolve, reject) => {
request.onsuccess = () => resolve(request.result);
request.onerror = () => reject(request.error);
});
}
async getLastUsedVaultId(): Promise<string | null> {
return this.withStore("readonly", async (store) => {
const value = await this.requestAsPromise(store.get(LAST_USED_KEY));
return typeof value === "string" ? value : null;
});
}
async getVaultHistory(): Promise<VaultHistoryItem[]> {
return this.withStore("readonly", async (store) => {
const keys = (await this.requestAsPromise(store.getAllKeys())) as IDBValidKey[];
const values = (await this.requestAsPromise(store.getAll())) as unknown[];
const items: VaultHistoryItem[] = [];
for (let i = 0; i < keys.length; i++) {
const key = String(keys[i]);
const id = parseVaultId(key);
const value = values[i] as Partial<VaultHistoryValue> | undefined;
if (!id || !value || !value.handle || !value.name) {
continue;
}
items.push({
id,
name: String(value.name),
handle: value.handle,
lastUsedAt: Number(value.lastUsedAt || 0),
});
}
items.sort((a, b) => b.lastUsedAt - a.lastUsedAt);
return items;
});
}
async saveSelectedVault(handle: FileSystemDirectoryHandle): Promise<VaultHistoryItem> {
const now = Date.now();
const existing = await this.getVaultHistory();
let matched: VaultHistoryItem | null = null;
for (const item of existing) {
try {
if (await item.handle.isSameEntry(handle)) {
matched = item;
break;
}
} catch {
// Ignore handles that cannot be compared, keep scanning.
}
}
const item: VaultHistoryItem = {
id: matched?.id ?? randomId(),
name: handle.name,
handle,
lastUsedAt: now,
};
await this.withStore("readwrite", async (store): Promise<void> => {
await this.requestAsPromise(store.put(item, makeVaultKey(item.id)));
await this.requestAsPromise(store.put(item.id, LAST_USED_KEY));
const merged = [...existing.filter((v) => v.id !== item.id), item].sort(
(a, b) => b.lastUsedAt - a.lastUsedAt
);
const stale = merged.slice(MAX_HISTORY_COUNT);
for (const old of stale) {
await this.requestAsPromise(store.delete(makeVaultKey(old.id)));
}
});
return item;
}
async activateHistoryItem(item: VaultHistoryItem): Promise<FileSystemDirectoryHandle> {
const granted = await hasReadWritePermission(item.handle, true);
if (!granted) {
throw new Error("Vault permissions were not granted");
}
const activated: VaultHistoryItem = {
...item,
lastUsedAt: Date.now(),
};
await this.withStore("readwrite", async (store): Promise<void> => {
await this.requestAsPromise(store.put(activated, makeVaultKey(activated.id)));
await this.requestAsPromise(store.put(activated.id, LAST_USED_KEY));
});
return item.handle;
}
async pickNewVault(): Promise<FileSystemDirectoryHandle> {
const picker = (window as any).showDirectoryPicker;
if (typeof picker !== "function") {
throw new Error("FileSystem API showDirectoryPicker is not supported in this browser");
}
const handle = (await picker({
mode: "readwrite",
startIn: "documents",
})) as FileSystemDirectoryHandle;
const granted = await hasReadWritePermission(handle, true);
if (!granted) {
throw new Error("Vault permissions were not granted");
}
await this.saveSelectedVault(handle);
return handle;
}
}

View File

@@ -1,16 +1,45 @@
import { defineConfig } from "vite";
import { svelte } from "@sveltejs/vite-plugin-svelte";
import istanbul from "vite-plugin-istanbul";
import path from "node:path";
import { readFileSync } from "node:fs";
const packageJson = JSON.parse(readFileSync("../../../package.json", "utf-8"));
const manifestJson = JSON.parse(readFileSync("../../../manifest.json", "utf-8"));
const enableCoverage = process.env.PW_COVERAGE === "1";
const repoRoot = path.resolve(__dirname, "../../..");
// https://vite.dev/config/
export default defineConfig({
plugins: [svelte()],
plugins: [
svelte(),
...(enableCoverage
? [
istanbul({
cwd: repoRoot,
include: ["src/**/*.ts", "src/**/*.svelte"],
exclude: [
"node_modules",
"dist",
"test",
"coverage",
"src/apps/webapp/test/**",
"playwright.config.ts",
"vite.config.ts",
"**/*.spec.ts",
"**/*.test.ts",
],
extension: [".js", ".ts", ".svelte"],
requireEnv: false,
cypress: false,
checkProd: false,
}),
]
: []),
],
resolve: {
alias: {
"@": path.resolve(__dirname, "../../"),
"@lib": path.resolve(__dirname, "../../lib/src"),
obsidian: path.resolve(__dirname, "../../../test/harness/obsidian-mock.ts"),
},
},
base: "./",
@@ -18,14 +47,21 @@ export default defineConfig({
outDir: "dist",
emptyOutDir: true,
rollupOptions: {
// test.html is used by the Playwright dev-server; include it here
// so the production build doesn't emit warnings about unused inputs.
input: {
index: path.resolve(__dirname, "index.html"),
webapp: path.resolve(__dirname, "webapp.html"),
test: path.resolve(__dirname, "test.html"),
},
external: ["crypto"],
},
},
define: {
MANIFEST_VERSION: JSON.stringify(process.env.MANIFEST_VERSION || manifestJson.version || "0.0.0"),
PACKAGE_VERSION: JSON.stringify(process.env.PACKAGE_VERSION || packageJson.version || "0.0.0"),
global: "globalThis",
hostPlatform: JSON.stringify(process.platform || "linux"),
},
server: {
port: 3000,

402
src/apps/webapp/webapp.css Normal file
View File

@@ -0,0 +1,402 @@
* {
margin: 0;
padding: 0;
box-sizing: border-box;
}
:root {
--background-primary: #ffffff;
--background-primary-alt: #667eea;
--background-secondary: #f0f0f0;
--background-secondary-alt: #e0e0e0;
--background-modifier-border: #d0d0d0;
--text-normal: #333333;
--text-warning: #d9534f;
--text-accent: #5bc0de;
--text-on-accent: #ffffff;
}
body {
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Oxygen, Ubuntu, Cantarell, sans-serif;
background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
min-height: 100vh;
display: flex;
align-items: center;
justify-content: center;
padding: 20px;
}
.container {
background: white;
border-radius: 12px;
box-shadow: 0 20px 60px rgba(0, 0, 0, 0.3);
padding: 40px;
max-width: 700px;
width: 100%;
}
h1 {
color: #333;
margin-bottom: 10px;
font-size: 28px;
}
.subtitle {
color: #666;
margin-bottom: 24px;
font-size: 14px;
}
#status {
padding: 15px;
border-radius: 8px;
margin-bottom: 20px;
font-size: 14px;
font-weight: 500;
}
#status.error {
background: #fee;
color: #c33;
border: 1px solid #fcc;
}
#status.warning {
background: #ffeaa7;
color: #d63031;
border: 1px solid #fdcb6e;
}
#status.success {
background: #d4edda;
color: #155724;
border: 1px solid #c3e6cb;
}
#status.info {
background: #d1ecf1;
color: #0c5460;
border: 1px solid #bee5eb;
}
.vault-selector {
border: 1px solid #e6e9f2;
border-radius: 8px;
padding: 16px;
background: #fbfcff;
margin-bottom: 22px;
}
.vault-selector h2 {
font-size: 18px;
margin-bottom: 8px;
color: #333;
}
.vault-selector p {
color: #555;
font-size: 14px;
margin-bottom: 12px;
}
.vault-list {
display: flex;
flex-direction: column;
gap: 10px;
margin-bottom: 12px;
}
.vault-item {
border: 1px solid #d9deee;
border-radius: 8px;
padding: 10px 12px;
display: flex;
align-items: center;
justify-content: space-between;
gap: 12px;
background: #fff;
}
.vault-item-info {
min-width: 0;
}
.vault-item-name {
font-weight: 600;
color: #1f2a44;
word-break: break-word;
}
.vault-item-meta {
margin-top: 2px;
font-size: 12px;
color: #63708f;
}
button {
border: none;
border-radius: 6px;
padding: 8px 12px;
background: #2f5ae5;
color: #fff;
cursor: pointer;
font-weight: 600;
white-space: nowrap;
}
button:hover {
background: #1e4ad6;
opacity: 0.7;
}
button:disabled {
cursor: not-allowed;
opacity: 0.6;
}
.empty-note {
font-size: 13px;
color: #6c757d;
margin-bottom: 8px;
}
.empty-note.is-hidden,
.vault-selector.is-hidden {
display: none;
}
.info-section {
margin-top: 20px;
padding: 20px;
background: #f8f9fa;
border-radius: 8px;
}
.info-section h2 {
font-size: 18px;
margin-bottom: 12px;
color: #333;
}
.info-section ul {
list-style: none;
padding-left: 0;
}
.info-section li {
padding: 7px 0;
color: #666;
font-size: 14px;
}
.info-section li::before {
content: "•";
color: #667eea;
font-weight: bold;
display: inline-block;
width: 1em;
margin-left: -1em;
padding-right: 0.5em;
}
code {
background: #e9ecef;
padding: 2px 6px;
border-radius: 4px;
font-family: "Courier New", monospace;
font-size: 13px;
}
.footer {
margin-top: 24px;
text-align: center;
color: #999;
font-size: 12px;
}
.footer a {
color: #667eea;
text-decoration: none;
}
.footer a:hover {
text-decoration: underline;
}
body.livesync-log-visible {
min-height: 100vh;
padding-bottom: 42vh;
}
#livesync-log-panel {
position: fixed;
left: 0;
right: 0;
bottom: 0;
height: 42vh;
z-index: 900;
display: flex;
flex-direction: column;
background: #0f172a;
border-top: 1px solid #334155;
}
.livesync-log-header {
padding: 8px 12px;
font-size: 12px;
font-weight: 600;
color: #e2e8f0;
background: #111827;
border-bottom: 1px solid #334155;
}
#livesync-log-viewport {
flex: 1;
overflow: auto;
padding: 8px 12px;
font-family: ui-monospace, SFMono-Regular, Menlo, Consolas, "Liberation Mono", monospace;
font-size: 12px;
line-height: 1.4;
color: #e2e8f0;
white-space: pre-wrap;
word-break: break-word;
}
.livesync-log-line {
margin-bottom: 2px;
}
#livesync-command-bar {
position: fixed;
right: 16px;
bottom: 16px;
z-index: 1000;
display: flex;
flex-wrap: wrap;
gap: 8px;
max-width: 40vw;
padding: 10px;
border-radius: 10px;
background: rgba(255, 255, 255, 0.95);
box-shadow: 0 4px 16px rgba(0, 0, 0, 0.2);
}
.livesync-command-button {
border: 1px solid #ddd;
border-radius: 8px;
padding: 6px 10px;
background: #fff;
color: #111827;
cursor: pointer;
font-size: 12px;
line-height: 1.2;
white-space: nowrap;
font-weight: 500;
}
.livesync-command-button:hover:not(:disabled) {
background: #f3f4f6;
}
.livesync-command-button.is-disabled {
opacity: 0.55;
}
#livesync-window-root {
position: fixed;
top: 16px;
left: 16px;
right: 16px;
bottom: calc(42vh + 16px);
z-index: 850;
display: flex;
flex-direction: column;
border-radius: 10px;
background: rgba(255, 255, 255, 0.98);
box-shadow: 0 4px 16px rgba(0, 0, 0, 0.18);
overflow: hidden;
}
#livesync-window-tabs {
display: flex;
gap: 6px;
padding: 8px;
background: #f3f4f6;
border-bottom: 1px solid #e5e7eb;
}
#livesync-window-body {
position: relative;
flex: 1;
overflow: auto;
padding: 10px;
}
.livesync-window-tab {
border: 1px solid #d1d5db;
background: #fff;
color: #111827;
padding: 4px 8px;
border-radius: 6px;
cursor: pointer;
font-size: 12px;
font-weight: 500;
}
.livesync-window-tab.is-active {
background: #e0e7ff;
border-color: #818cf8;
}
.livesync-window-panel {
display: none;
width: 100%;
height: 100%;
overflow: auto;
}
.livesync-window-panel.is-active {
display: block;
}
@media (max-width: 600px) {
.container {
padding: 28px 18px;
}
h1 {
font-size: 24px;
}
.vault-item {
flex-direction: column;
align-items: stretch;
}
#livesync-command-bar {
max-width: calc(100vw - 24px);
right: 12px;
left: 12px;
bottom: 12px;
}
}
popup {
position: fixed;
min-width: 80vw;
max-width: 90vw;
min-height: 40vh;
max-height: 80vh;
background: rgba(255, 255, 255, 0.8);
padding: 1em;
border-radius: 6px;
box-shadow: 0 8px 24px rgba(0, 0, 0, 0.2);
z-index: 10000;
overflow-y: auto;
display: flex;
align-items: center;
justify-content: center;
backdrop-filter: blur(15px);
border-radius: 10px;
z-index: 10;
}

View File

@@ -0,0 +1,45 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Self-hosted LiveSync WebApp</title>
<link rel="stylesheet" href="./webapp.css">
</head>
<body>
<div class="container">
<h1>Self-hosted LiveSync on Web</h1>
<p class="subtitle">Browser-based Self-hosted LiveSync using FileSystem API</p>
<div id="status" class="info">Initialising...</div>
<div id="vault-selector" class="vault-selector">
<h2>Select Vault Folder</h2>
<p>Open a vault you already used, or pick a new folder.</p>
<div id="vault-history-list" class="vault-list"></div>
<p id="vault-history-empty" class="empty-note">No saved vaults yet.</p>
<button id="pick-new-vault" type="button">Choose new vault folder</button>
</div>
<div class="info-section">
<h2>How to Use</h2>
<ul>
<li>Select a vault folder and grant permission</li>
<li>Create <code>.livesync/settings.json</code> in your vault folder</li>
<li>Or use Setup-URI to apply settings</li>
<li>Your files will be synced after "replicate now"</li>
</ul>
</div>
<div class="footer">
<p>
Powered by
<a href="https://github.com/vrtmrz/obsidian-livesync" target="_blank">Self-hosted LiveSync</a>
</p>
</div>
</div>
<script type="module" src="./bootstrap.ts"></script>
</body>
</html>

View File

@@ -1,10 +1,8 @@
import { PouchDB } from "@lib/pouchdb/pouchdb-browser";
import {
type EntryDoc,
type LOG_LEVEL,
type ObsidianLiveSyncSettings,
type P2PSyncSetting,
LOG_LEVEL_NOTICE,
LOG_LEVEL_VERBOSE,
P2P_DEFAULT_SETTINGS,
REMOTE_P2P,
@@ -12,35 +10,27 @@ import {
import { eventHub } from "@lib/hub/hub";
import type { Confirm } from "@lib/interfaces/Confirm";
import { LOG_LEVEL_INFO, Logger } from "@lib/common/logger";
import { LOG_LEVEL_NOTICE, Logger } from "@lib/common/logger";
import { storeP2PStatusLine } from "./CommandsShim";
import {
EVENT_P2P_PEER_SHOW_EXTRA_MENU,
type CommandShim,
type PeerStatus,
type PluginShim,
} from "@lib/replication/trystero/P2PReplicatorPaneCommon";
import {
closeP2PReplicator,
openP2PReplicator,
P2PLogCollector,
type P2PReplicatorBase,
} from "@lib/replication/trystero/P2PReplicatorCore";
import { P2PLogCollector, type P2PReplicatorBase, useP2PReplicator } from "@lib/replication/trystero/P2PReplicatorCore";
import type { SimpleStore } from "octagonal-wheels/databases/SimpleStoreBase";
import { reactiveSource } from "octagonal-wheels/dataobject/reactive_v2";
import { EVENT_SETTING_SAVED } from "@lib/events/coreEvents";
import { unique } from "octagonal-wheels/collection";
import { BrowserServiceHub } from "@lib/services/BrowserServices";
import { TrysteroReplicator } from "@lib/replication/trystero/TrysteroReplicator";
import { SETTING_KEY_P2P_DEVICE_NAME } from "@lib/common/types";
import { ServiceContext } from "@lib/services/base/ServiceBase";
import type { InjectableServiceHub } from "@lib/services/InjectableServices";
import { Menu } from "@lib/services/implements/browser/Menu";
import type { InjectableVaultServiceCompat } from "@lib/services/implements/injectable/InjectableVaultService";
import { SimpleStoreIDBv2 } from "octagonal-wheels/databases/SimpleStoreIDBv2";
import type { InjectableAPIService } from "@/lib/src/services/implements/injectable/InjectableAPIService";
import type { BrowserAPIService } from "@/lib/src/services/implements/browser/BrowserAPIService";
import type { InjectableSettingService } from "@/lib/src/services/implements/injectable/InjectableSettingService";
import { LiveSyncTrysteroReplicator } from "@lib/replication/trystero/LiveSyncTrysteroReplicator";
function addToList(item: string, list: string) {
return unique(
@@ -60,12 +50,10 @@ function removeFromList(item: string, list: string) {
.join(",");
}
export class P2PReplicatorShim implements P2PReplicatorBase, CommandShim {
export class P2PReplicatorShim implements P2PReplicatorBase {
storeP2PStatusLine = reactiveSource("");
plugin!: PluginShim;
// environment!: IEnvironment;
confirm!: Confirm;
// simpleStoreAPI!: ISimpleStoreAPI;
db?: PouchDB.Database<EntryDoc>;
services: InjectableServiceHub<ServiceContext>;
@@ -76,12 +64,30 @@ export class P2PReplicatorShim implements P2PReplicatorBase, CommandShim {
return this.db;
}
_simpleStore!: SimpleStore<any>;
async closeDB() {
if (this.db) {
await this.db.close();
this.db = undefined;
}
}
private _liveSyncReplicator?: LiveSyncTrysteroReplicator;
p2pLogCollector!: P2PLogCollector;
private _initP2PReplicator() {
const {
replicator,
p2pLogCollector,
storeP2PStatusLine: p2pStatusLine,
} = useP2PReplicator({ services: this.services } as any);
this._liveSyncReplicator = replicator;
this.p2pLogCollector = p2pLogCollector;
p2pLogCollector.p2pReplicationLine.onChanged((line) => {
storeP2PStatusLine.set(line.value);
});
}
constructor() {
const browserServiceHub = new BrowserServiceHub<ServiceContext>();
this.services = browserServiceHub;
@@ -89,7 +95,6 @@ export class P2PReplicatorShim implements P2PReplicatorBase, CommandShim {
(this.services.API as BrowserAPIService<ServiceContext>).getSystemVaultName.setHandler(
() => "p2p-livesync-web-peer"
);
// this.services.API.addLog.setHandler(Logger);
const repStore = SimpleStoreIDBv2.open<any>("p2p-livesync-web-peer");
this._simpleStore = repStore;
let _settings = { ...P2P_DEFAULT_SETTINGS, additionalSuffixOfDatabaseName: "" } as ObsidianLiveSyncSettings;
@@ -103,14 +108,13 @@ export class P2PReplicatorShim implements P2PReplicatorBase, CommandShim {
return settings;
});
}
get settings() {
return this.services.setting.currentSettings() as P2PSyncSetting;
}
async init() {
// const { simpleStoreAPI } = await getWrappedSynchromesh();
// this.confirm = confirm;
this.confirm = this.services.UI.confirm;
// this.environment = environment;
if (this.db) {
try {
@@ -123,30 +127,16 @@ export class P2PReplicatorShim implements P2PReplicatorBase, CommandShim {
await this.services.setting.loadSettings();
this.plugin = {
// saveSettings: async () => {
// await repStore.set("settings", _settings);
// eventHub.emitEvent(EVENT_SETTING_SAVED, _settings);
// },
// get settings() {
// return _settings;
// },
// set settings(newSettings: P2PSyncSetting) {
// _settings = { ..._settings, ...newSettings };
// },
// rebuilder: null,
// core: {
// settings: this.services.setting.settings,
// },
services: this.services,
core: {
services: this.services,
},
// $$scheduleAppReload: () => {},
// $$getVaultName: () => "p2p-livesync-web-peer",
};
// const deviceName = this.getDeviceName();
const database_name = this.settings.P2P_AppID + "-" + this.settings.P2P_roomID + "p2p-livesync-web-peer";
this.db = new PouchDB<EntryDoc>(database_name);
this._initP2PReplicator();
setTimeout(() => {
if (this.settings.P2P_AutoStart && this.settings.P2P_Enabled) {
void this.open();
@@ -155,7 +145,7 @@ export class P2PReplicatorShim implements P2PReplicatorBase, CommandShim {
return this;
}
_log(msg: any, level?: LOG_LEVEL): void {
_log(msg: any, level?: any): void {
Logger(msg, level);
}
_notice(msg: string, key?: string): void {
@@ -167,14 +157,10 @@ export class P2PReplicatorShim implements P2PReplicatorBase, CommandShim {
simpleStore(): SimpleStore<any> {
return this._simpleStore;
}
handleReplicatedDocuments(docs: EntryDoc[]): Promise<boolean> {
// No op. This is a client and does not need to process the docs
handleReplicatedDocuments(_docs: EntryDoc[]): Promise<boolean> {
return Promise.resolve(true);
}
getPluginShim() {
return {};
}
getConfig(key: string) {
const vaultName = this.services.vault.getVaultName();
const dbKey = `${vaultName}-${key}`;
@@ -189,9 +175,7 @@ export class P2PReplicatorShim implements P2PReplicatorBase, CommandShim {
getDeviceName(): string {
return this.getConfig(SETTING_KEY_P2P_DEVICE_NAME) ?? this.plugin.services.vault.getVaultName();
}
getPlatform(): string {
return "pseudo-replicator";
}
m?: Menu;
afterConstructor(): void {
eventHub.onEvent(EVENT_P2P_PEER_SHOW_EXTRA_MENU, ({ peer, event }) => {
@@ -202,12 +186,6 @@ export class P2PReplicatorShim implements P2PReplicatorBase, CommandShim {
.addItem((item) => item.setTitle("📥 Only Fetch").onClick(() => this.replicateFrom(peer)))
.addItem((item) => item.setTitle("📤 Only Send").onClick(() => this.replicateTo(peer)))
.addSeparator()
// .addItem((item) => {
// item.setTitle("🔧 Get Configuration").onClick(async () => {
// await this.getRemoteConfig(peer);
// });
// })
// .addSeparator()
.addItem((item) => {
const mark = peer.syncOnConnect ? "checkmark" : null;
item.setTitle("Toggle Sync on connect")
@@ -234,97 +212,43 @@ export class P2PReplicatorShim implements P2PReplicatorBase, CommandShim {
});
void this.m.showAtPosition({ x: event.x, y: event.y });
});
this.p2pLogCollector.p2pReplicationLine.onChanged((line) => {
storeP2PStatusLine.set(line.value);
});
}
_replicatorInstance?: TrysteroReplicator;
p2pLogCollector = new P2PLogCollector();
async open() {
await openP2PReplicator(this);
await this._liveSyncReplicator?.open();
}
async close() {
await closeP2PReplicator(this);
await this._liveSyncReplicator?.close();
}
enableBroadcastCastings() {
return this?._replicatorInstance?.enableBroadcastChanges();
return this._liveSyncReplicator?.enableBroadcastChanges();
}
disableBroadcastCastings() {
return this?._replicatorInstance?.disableBroadcastChanges();
}
async initialiseP2PReplicator(): Promise<TrysteroReplicator> {
await this.init();
try {
if (this._replicatorInstance) {
await this._replicatorInstance.close();
this._replicatorInstance = undefined;
}
if (!this.settings.P2P_AppID) {
this.settings.P2P_AppID = P2P_DEFAULT_SETTINGS.P2P_AppID;
}
const getInitialDeviceName = () =>
this.getConfig(SETTING_KEY_P2P_DEVICE_NAME) || this.services.vault.getVaultName();
const getSettings = () => this.settings;
const store = () => this.simpleStore();
const getDB = () => this.getDB();
const getConfirm = () => this.confirm;
const getPlatform = () => this.getPlatform();
const env = {
get db() {
return getDB();
},
get confirm() {
return getConfirm();
},
get deviceName() {
return getInitialDeviceName();
},
get platform() {
return getPlatform();
},
get settings() {
return getSettings();
},
processReplicatedDocs: async (docs: EntryDoc[]): Promise<void> => {
await this.handleReplicatedDocuments(docs);
// No op. This is a client and does not need to process the docs
},
get simpleStore() {
return store();
},
};
this._replicatorInstance = new TrysteroReplicator(env);
return this._replicatorInstance;
} catch (e) {
this._log(
e instanceof Error ? e.message : "Something occurred on Initialising P2P Replicator",
LOG_LEVEL_INFO
);
this._log(e, LOG_LEVEL_VERBOSE);
throw e;
}
return this._liveSyncReplicator?.disableBroadcastChanges();
}
get replicator() {
return this._replicatorInstance!;
return this._liveSyncReplicator;
}
async replicateFrom(peer: PeerStatus) {
await this.replicator.replicateFrom(peer.peerId);
const r = this._liveSyncReplicator;
if (!r) return;
await r.replicateFrom(peer.peerId);
}
async replicateTo(peer: PeerStatus) {
await this.replicator.requestSynchroniseToPeer(peer.peerId);
await this._liveSyncReplicator?.requestSynchroniseToPeer(peer.peerId);
}
async getRemoteConfig(peer: PeerStatus) {
Logger(
`Requesting remote config for ${peer.name}. Please input the passphrase on the remote device`,
LOG_LEVEL_NOTICE
);
const remoteConfig = await this.replicator.getRemoteConfig(peer.peerId);
const remoteConfig = await this._liveSyncReplicator?.getRemoteConfig(peer.peerId);
if (remoteConfig) {
Logger(`Remote config for ${peer.name} is retrieved successfully`);
const DROP = "Yes, and drop local database";
@@ -344,9 +268,7 @@ export class P2PReplicatorShim implements P2PReplicatorBase, CommandShim {
if (remoteConfig.remoteType !== REMOTE_P2P) {
const yn2 = await this.confirm.askYesNoDialog(
`Do you want to set the remote type to "P2P Sync" to rebuild by "P2P replication"?`,
{
title: "Rebuild from remote device",
}
{ title: "Rebuild from remote device" }
);
if (yn2 === "yes") {
remoteConfig.remoteType = REMOTE_P2P;
@@ -355,9 +277,7 @@ export class P2PReplicatorShim implements P2PReplicatorBase, CommandShim {
}
}
await this.services.setting.applyPartial(remoteConfig, true);
if (yn === DROP) {
// await this.plugin.rebuilder.scheduleFetch();
} else {
if (yn !== DROP) {
await this.plugin.core.services.appLifecycle.scheduleRestart();
}
} else {
@@ -381,8 +301,6 @@ export class P2PReplicatorShim implements P2PReplicatorBase, CommandShim {
[targetSetting]: currentSettingAll ? currentSettingAll[targetSetting] : "",
};
if (peer[prop]) {
// this.plugin.settings[targetSetting] = removeFromList(peer.name, this.plugin.settings[targetSetting]);
// await this.plugin.saveSettings();
currentSetting[targetSetting] = removeFromList(peer.name, currentSetting[targetSetting]);
} else {
currentSetting[targetSetting] = addToList(peer.name, currentSetting[targetSetting]);

View File

@@ -16,9 +16,6 @@ export const EVENT_REQUEST_RELOAD_SETTING_TAB = "reload-setting-tab";
export const EVENT_REQUEST_OPEN_PLUGIN_SYNC_DIALOG = "request-open-plugin-sync-dialog";
export const EVENT_REQUEST_OPEN_P2P = "request-open-p2p";
export const EVENT_REQUEST_CLOSE_P2P = "request-close-p2p";
export const EVENT_REQUEST_RUN_DOCTOR = "request-run-doctor";
export const EVENT_REQUEST_RUN_FIX_INCOMPLETE = "request-run-fix-incomplete";
@@ -36,8 +33,6 @@ declare global {
[EVENT_REQUEST_OPEN_SETTING_WIZARD]: undefined;
[EVENT_REQUEST_RELOAD_SETTING_TAB]: undefined;
[EVENT_LEAF_ACTIVE_CHANGED]: undefined;
[EVENT_REQUEST_CLOSE_P2P]: undefined;
[EVENT_REQUEST_OPEN_P2P]: undefined;
[EVENT_REQUEST_OPEN_SETUP_URI]: undefined;
[EVENT_REQUEST_COPY_SETUP_URI]: undefined;
[EVENT_REQUEST_SHOW_SETUP_QR]: undefined;

View File

@@ -1,278 +0,0 @@
import { P2PReplicatorPaneView, VIEW_TYPE_P2P } from "./P2PReplicator/P2PReplicatorPaneView.ts";
import {
AutoAccepting,
LOG_LEVEL_NOTICE,
P2P_DEFAULT_SETTINGS,
REMOTE_P2P,
type EntryDoc,
type P2PSyncSetting,
type RemoteDBSettings,
} from "../../lib/src/common/types.ts";
import { LiveSyncCommands } from "../LiveSyncCommands.ts";
import {
LiveSyncTrysteroReplicator,
setReplicatorFunc,
} from "../../lib/src/replication/trystero/LiveSyncTrysteroReplicator.ts";
import { EVENT_REQUEST_OPEN_P2P, eventHub } from "../../common/events.ts";
import type { LiveSyncAbstractReplicator } from "../../lib/src/replication/LiveSyncAbstractReplicator.ts";
import { LOG_LEVEL_INFO, LOG_LEVEL_VERBOSE, Logger } from "octagonal-wheels/common/logger";
import type { CommandShim } from "../../lib/src/replication/trystero/P2PReplicatorPaneCommon.ts";
import {
addP2PEventHandlers,
closeP2PReplicator,
openP2PReplicator,
P2PLogCollector,
removeP2PReplicatorInstance,
type P2PReplicatorBase,
} from "../../lib/src/replication/trystero/P2PReplicatorCore.ts";
import { reactiveSource } from "octagonal-wheels/dataobject/reactive_v2";
import type { Confirm } from "../../lib/src/interfaces/Confirm.ts";
import type ObsidianLiveSyncPlugin from "../../main.ts";
import type { SimpleStore } from "octagonal-wheels/databases/SimpleStoreBase";
// import { getPlatformName } from "../../lib/src/PlatformAPIs/obsidian/Environment.ts";
import type { LiveSyncCore } from "../../main.ts";
import { TrysteroReplicator } from "../../lib/src/replication/trystero/TrysteroReplicator.ts";
import { SETTING_KEY_P2P_DEVICE_NAME } from "../../lib/src/common/types.ts";
export class P2PReplicator extends LiveSyncCommands implements P2PReplicatorBase, CommandShim {
storeP2PStatusLine = reactiveSource("");
getSettings(): P2PSyncSetting {
return this.core.settings;
}
getDB() {
return this.core.localDatabase.localDatabase;
}
get confirm(): Confirm {
return this.core.confirm;
}
_simpleStore!: SimpleStore<any>;
simpleStore(): SimpleStore<any> {
return this._simpleStore;
}
constructor(plugin: ObsidianLiveSyncPlugin, core: LiveSyncCore) {
super(plugin, core);
setReplicatorFunc(() => this._replicatorInstance);
addP2PEventHandlers(this);
this.afterConstructor();
// onBindFunction is called in super class
// this.onBindFunction(plugin, plugin.services);
}
async handleReplicatedDocuments(docs: EntryDoc[]): Promise<boolean> {
// console.log("Processing Replicated Docs", docs);
return await this.services.replication.parseSynchroniseResult(
docs as PouchDB.Core.ExistingDocument<EntryDoc>[]
);
}
_anyNewReplicator(settingOverride: Partial<RemoteDBSettings> = {}): Promise<LiveSyncAbstractReplicator> {
const settings = { ...this.settings, ...settingOverride };
if (settings.remoteType == REMOTE_P2P) {
return Promise.resolve(new LiveSyncTrysteroReplicator(this.plugin.core));
}
return undefined!;
}
_replicatorInstance?: TrysteroReplicator;
p2pLogCollector = new P2PLogCollector();
afterConstructor() {
return;
}
async open() {
await openP2PReplicator(this);
}
async close() {
await closeP2PReplicator(this);
}
getConfig(key: string) {
return this.services.config.getSmallConfig(key);
}
setConfig(key: string, value: string) {
return this.services.config.setSmallConfig(key, value);
}
enableBroadcastCastings() {
return this?._replicatorInstance?.enableBroadcastChanges();
}
disableBroadcastCastings() {
return this?._replicatorInstance?.disableBroadcastChanges();
}
init() {
this._simpleStore = this.services.keyValueDB.openSimpleStore("p2p-sync");
return Promise.resolve(this);
}
async initialiseP2PReplicator(): Promise<TrysteroReplicator> {
await this.init();
try {
if (this._replicatorInstance) {
await this._replicatorInstance.close();
this._replicatorInstance = undefined;
}
if (!this.settings.P2P_AppID) {
this.settings.P2P_AppID = P2P_DEFAULT_SETTINGS.P2P_AppID;
}
const getInitialDeviceName = () =>
this.getConfig(SETTING_KEY_P2P_DEVICE_NAME) || this.services.vault.getVaultName();
const getSettings = () => this.settings;
const store = () => this.simpleStore();
const getDB = () => this.getDB();
const getConfirm = () => this.confirm;
const getPlatform = () => this.services.API.getPlatform();
const env = {
get db() {
return getDB();
},
get confirm() {
return getConfirm();
},
get deviceName() {
return getInitialDeviceName();
},
get platform() {
return getPlatform();
},
get settings() {
return getSettings();
},
processReplicatedDocs: async (docs: EntryDoc[]): Promise<void> => {
await this.handleReplicatedDocuments(docs);
// No op. This is a client and does not need to process the docs
},
get simpleStore() {
return store();
},
};
this._replicatorInstance = new TrysteroReplicator(env);
return this._replicatorInstance;
} catch (e) {
this._log(
e instanceof Error ? e.message : "Something occurred on Initialising P2P Replicator",
LOG_LEVEL_INFO
);
this._log(e, LOG_LEVEL_VERBOSE);
throw e;
}
}
onunload(): void {
removeP2PReplicatorInstance();
void this.close();
}
onload(): void | Promise<void> {
eventHub.onEvent(EVENT_REQUEST_OPEN_P2P, () => {
void this.openPane();
});
this.p2pLogCollector.p2pReplicationLine.onChanged((line) => {
this.storeP2PStatusLine.value = line.value;
});
}
async _everyOnInitializeDatabase(): Promise<boolean> {
await this.initialiseP2PReplicator();
return Promise.resolve(true);
}
private async _allSuspendExtraSync() {
this.plugin.core.settings.P2P_Enabled = false;
this.plugin.core.settings.P2P_AutoAccepting = AutoAccepting.NONE;
this.plugin.core.settings.P2P_AutoBroadcast = false;
this.plugin.core.settings.P2P_AutoStart = false;
this.plugin.core.settings.P2P_AutoSyncPeers = "";
this.plugin.core.settings.P2P_AutoWatchPeers = "";
return await Promise.resolve(true);
}
// async $everyOnLoadStart() {
// return await Promise.resolve();
// }
async openPane() {
await this.services.API.showWindow(VIEW_TYPE_P2P);
}
async _everyOnloadStart(): Promise<boolean> {
this.plugin.registerView(
VIEW_TYPE_P2P,
(leaf) => new P2PReplicatorPaneView(leaf, this.plugin.core, this.plugin)
);
this.plugin.addCommand({
id: "open-p2p-replicator",
name: "P2P Sync : Open P2P Replicator",
callback: async () => {
await this.openPane();
},
});
this.plugin.addCommand({
id: "p2p-establish-connection",
name: "P2P Sync : Connect to the Signalling Server",
checkCallback: (isChecking) => {
if (isChecking) {
return !(this._replicatorInstance?.server?.isServing ?? false);
}
void this.open();
},
});
this.plugin.addCommand({
id: "p2p-close-connection",
name: "P2P Sync : Disconnect from the Signalling Server",
checkCallback: (isChecking) => {
if (isChecking) {
return this._replicatorInstance?.server?.isServing ?? false;
}
Logger(`Closing P2P Connection`, LOG_LEVEL_NOTICE);
void this.close();
},
});
this.plugin.addCommand({
id: "replicate-now-by-p2p",
name: "Replicate now by P2P",
checkCallback: (isChecking) => {
if (isChecking) {
if (this.settings.remoteType == REMOTE_P2P) return false;
if (!this._replicatorInstance?.server?.isServing) return false;
return true;
}
void this._replicatorInstance?.replicateFromCommand(false);
},
});
this.plugin
.addRibbonIcon("waypoints", "P2P Replicator", async () => {
await this.openPane();
})
.addClass("livesync-ribbon-replicate-p2p");
return await Promise.resolve(true);
}
_everyAfterResumeProcess(): Promise<boolean> {
if (this.settings.P2P_Enabled && this.settings.P2P_AutoStart) {
setTimeout(() => void this.open(), 100);
}
const rep = this._replicatorInstance;
rep?.allowReconnection();
return Promise.resolve(true);
}
_everyBeforeSuspendProcess(): Promise<boolean> {
const rep = this._replicatorInstance;
rep?.disconnectFromServer();
return Promise.resolve(true);
}
override onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
services.replicator.getNewReplicator.addHandler(this._anyNewReplicator.bind(this));
services.databaseEvents.onDatabaseInitialisation.addHandler(this._everyOnInitializeDatabase.bind(this));
services.appLifecycle.onInitialise.addHandler(this._everyOnloadStart.bind(this));
services.appLifecycle.onSuspending.addHandler(this._everyBeforeSuspendProcess.bind(this));
services.appLifecycle.onResumed.addHandler(this._everyAfterResumeProcess.bind(this));
services.setting.suspendExtraSync.addHandler(this._allSuspendExtraSync.bind(this));
}
}

View File

@@ -4,10 +4,9 @@
import {
AcceptedStatus,
ConnectionStatus,
type CommandShim,
type PeerStatus,
type PluginShim,
} from "../../../lib/src/replication/trystero/P2PReplicatorPaneCommon";
import type { LiveSyncTrysteroReplicator } from "../../../lib/src/replication/trystero/LiveSyncTrysteroReplicator";
import PeerStatusRow from "../P2PReplicator/PeerStatusRow.svelte";
import { EVENT_LAYOUT_READY, eventHub } from "../../../common/events";
import {
@@ -23,7 +22,7 @@
import type { LiveSyncBaseCore } from "@/LiveSyncBaseCore";
interface Props {
cmdSync: CommandShim;
cmdSync: LiveSyncTrysteroReplicator;
core: LiveSyncBaseCore;
}
@@ -95,9 +94,8 @@
},
true
);
cmdSync.setConfig(SETTING_KEY_P2P_DEVICE_NAME, eDeviceName);
core.services.config.setSmallConfig(SETTING_KEY_P2P_DEVICE_NAME, eDeviceName);
deviceName = eDeviceName;
// await plugin.saveSettings();
}
async function revert() {
eP2PEnabled = settings.P2P_Enabled;
@@ -115,7 +113,7 @@
const applyLoadSettings = (d: P2PSyncSetting, force: boolean) => {
if (force) {
const initDeviceName =
cmdSync.getConfig(SETTING_KEY_P2P_DEVICE_NAME) ?? core.services.vault.getVaultName();
core.services.config.getSmallConfig(SETTING_KEY_P2P_DEVICE_NAME) ?? core.services.vault.getVaultName();
deviceName = initDeviceName;
eDeviceName = initDeviceName;
}
@@ -239,16 +237,16 @@
await cmdSync.close();
}
function startBroadcasting() {
void cmdSync.enableBroadcastCastings();
void cmdSync.enableBroadcastChanges();
}
function stopBroadcasting() {
void cmdSync.disableBroadcastCastings();
void cmdSync.disableBroadcastChanges();
}
const initialDialogStatusKey = `p2p-dialog-status`;
const getDialogStatus = () => {
try {
const initialDialogStatus = JSON.parse(cmdSync.getConfig(initialDialogStatusKey) ?? "{}") as {
const initialDialogStatus = JSON.parse(core.services.config.getSmallConfig(initialDialogStatusKey) ?? "{}") as {
notice?: boolean;
setting?: boolean;
};
@@ -265,7 +263,7 @@
notice: isNoticeOpened,
setting: isSettingOpened,
};
cmdSync.setConfig(initialDialogStatusKey, JSON.stringify(dialogStatus));
core.services.config.setSmallConfig(initialDialogStatusKey, JSON.stringify(dialogStatus));
});
let isObsidian = $derived.by(() => {
return core.services.API.getPlatform() === "obsidian";

View File

@@ -1,19 +1,15 @@
import { Menu, WorkspaceLeaf } from "@/deps.ts";
import ReplicatorPaneComponent from "./P2PReplicatorPane.svelte";
import type ObsidianLiveSyncPlugin from "../../../main.ts";
import { mount } from "svelte";
import { SvelteItemView } from "../../../common/SvelteItemView.ts";
import { eventHub } from "../../../common/events.ts";
import { SvelteItemView } from "@/common/SvelteItemView.ts";
import { eventHub } from "@/common/events.ts";
import { unique } from "octagonal-wheels/collection";
import { LOG_LEVEL_NOTICE, REMOTE_P2P } from "../../../lib/src/common/types.ts";
import { Logger } from "../../../lib/src/common/logger.ts";
import { P2PReplicator } from "../CmdP2PReplicator.ts";
import {
EVENT_P2P_PEER_SHOW_EXTRA_MENU,
type PeerStatus,
} from "../../../lib/src/replication/trystero/P2PReplicatorPaneCommon.ts";
import { LOG_LEVEL_NOTICE, REMOTE_P2P } from "@lib/common/types.ts";
import { Logger } from "@lib/common/logger.ts";
import { EVENT_P2P_PEER_SHOW_EXTRA_MENU, type PeerStatus } from "@lib/replication/trystero/P2PReplicatorPaneCommon.ts";
import type { LiveSyncBaseCore } from "@/LiveSyncBaseCore.ts";
import type { P2PPaneParams } from "@/lib/src/replication/trystero/UseP2PReplicatorResult";
export const VIEW_TYPE_P2P = "p2p-replicator";
function addToList(item: string, list: string) {
@@ -35,8 +31,8 @@ function removeFromList(item: string, list: string) {
}
export class P2PReplicatorPaneView extends SvelteItemView {
// plugin: ObsidianLiveSyncPlugin;
core: LiveSyncBaseCore;
private _p2pResult: P2PPaneParams;
override icon = "waypoints";
title: string = "";
override navigation = false;
@@ -45,11 +41,7 @@ export class P2PReplicatorPaneView extends SvelteItemView {
return "waypoints";
}
get replicator() {
const r = this.core.getAddOn<P2PReplicator>(P2PReplicator.name);
if (!r || !r._replicatorInstance) {
throw new Error("Replicator not found");
}
return r._replicatorInstance;
return this._p2pResult.replicator;
}
async replicateFrom(peer: PeerStatus) {
await this.replicator.replicateFrom(peer.peerId);
@@ -131,10 +123,10 @@ And you can also drop the local database to rebuild from the remote device.`,
await this.core.services.setting.applyPartial(currentSetting, true);
}
m?: Menu;
constructor(leaf: WorkspaceLeaf, core: LiveSyncBaseCore, plugin: ObsidianLiveSyncPlugin) {
constructor(leaf: WorkspaceLeaf, core: LiveSyncBaseCore, p2pResult: P2PPaneParams) {
super(leaf);
// this.plugin = plugin;
this.core = core;
this._p2pResult = p2pResult;
eventHub.onEvent(EVENT_P2P_PEER_SHOW_EXTRA_MENU, ({ peer, event }) => {
if (this.m) {
this.m.hide();
@@ -192,14 +184,10 @@ And you can also drop the local database to rebuild from the remote device.`,
}
}
instantiateComponent(target: HTMLElement) {
const cmdSync = this.core.getAddOn<P2PReplicator>(P2PReplicator.name);
if (!cmdSync) {
throw new Error("Replicator not found");
}
return mount(ReplicatorPaneComponent, {
target: target,
props: {
cmdSync: cmdSync,
cmdSync: this._p2pResult.replicator,
core: this.core,
},
});

View File

@@ -1,7 +1,7 @@
<script lang="ts">
import { getContext } from "svelte";
import { AcceptedStatus, type PeerStatus } from "../../../lib/src/replication/trystero/P2PReplicatorPaneCommon";
import type { P2PReplicator } from "../CmdP2PReplicator";
import type { LiveSyncTrysteroReplicator } from "../../../lib/src/replication/trystero/LiveSyncTrysteroReplicator";
import { eventHub } from "../../../common/events";
import { EVENT_P2P_PEER_SHOW_EXTRA_MENU } from "../../../lib/src/replication/trystero/P2PReplicatorPaneCommon";
@@ -57,7 +57,7 @@
let isNew = $derived.by(() => peer.accepted === AcceptedStatus.UNKNOWN);
function makeDecision(isAccepted: boolean, isTemporary: boolean) {
cmdReplicator._replicatorInstance?.server?.makeDecision({
replicator.makeDecision({
peerId: peer.peerId,
name: peer.name,
decision: isAccepted,
@@ -65,13 +65,12 @@
});
}
function revokeDecision() {
cmdReplicator._replicatorInstance?.server?.revokeDecision({
replicator.revokeDecision({
peerId: peer.peerId,
name: peer.name,
});
}
const cmdReplicator = getContext<() => P2PReplicator>("getReplicator")();
const replicator = cmdReplicator._replicatorInstance!;
const replicator = getContext<() => LiveSyncTrysteroReplicator>("getReplicator")();
const peerAttrLabels = $derived.by(() => {
const attrs = [];
@@ -87,14 +86,14 @@
return attrs;
});
function startWatching() {
replicator.watchPeer(peer.peerId);
replicator?.watchPeer(peer.peerId);
}
function stopWatching() {
replicator.unwatchPeer(peer.peerId);
replicator?.unwatchPeer(peer.peerId);
}
function sync() {
replicator.sync(peer.peerId, false);
void replicator?.sync(peer.peerId, false);
}
function moreMenu(evt: MouseEvent) {

Submodule src/lib updated: 10aa32108b...202038d19e

View File

@@ -14,8 +14,7 @@ import { ModuleObsidianGlobalHistory } from "./modules/features/ModuleGlobalHist
import { ModuleIntegratedTest } from "./modules/extras/ModuleIntegratedTest.ts";
import { ModuleReplicateTest } from "./modules/extras/ModuleReplicateTest.ts";
import { LocalDatabaseMaintenance } from "./features/LocalDatabaseMainte/CmdLocalDatabaseMainte.ts";
import { P2PReplicator } from "./features/P2PSync/CmdP2PReplicator.ts";
import type { InjectableServiceHub } from "./lib/src/services/implements/injectable/InjectableServiceHub.ts";
import type { InjectableServiceHub } from "@lib/services/implements/injectable/InjectableServiceHub.ts";
import { ObsidianServiceHub } from "./modules/services/ObsidianServiceHub.ts";
import { ServiceRebuilder } from "@lib/serviceModules/Rebuilder.ts";
import { ServiceDatabaseFileAccess } from "@/serviceModules/DatabaseFileAccess.ts";
@@ -26,17 +25,23 @@ import { FileAccessObsidian } from "./serviceModules/FileAccessObsidian.ts";
import { StorageEventManagerObsidian } from "./managers/StorageEventManagerObsidian.ts";
import type { ServiceModules } from "./types.ts";
import { setNoticeClass } from "@lib/mock_and_interop/wrapper.ts";
import type { ObsidianServiceContext } from "./lib/src/services/implements/obsidian/ObsidianServiceContext.ts";
import type { ObsidianServiceContext } from "@lib/services/implements/obsidian/ObsidianServiceContext.ts";
import { LiveSyncBaseCore } from "./LiveSyncBaseCore.ts";
import { ModuleSetupObsidian } from "./modules/features/ModuleSetupObsidian.ts";
import { ModuleObsidianMenu } from "./modules/essentialObsidian/ModuleObsidianMenu.ts";
import { ModuleObsidianSettingsAsMarkdown } from "./modules/features/ModuleObsidianSettingAsMarkdown.ts";
import { SetupManager } from "./modules/features/SetupManager.ts";
import { ModuleMigration } from "./modules/essential/ModuleMigration.ts";
import { enableI18nFeature } from "./serviceFeatures/onLayoutReady/enablei18n.ts";
import { useOfflineScanner } from "./lib/src/serviceFeatures/offlineScanner.ts";
import { useCheckRemoteSize } from "./lib/src/serviceFeatures/checkRemoteSize.ts";
import { useOfflineScanner } from "@lib/serviceFeatures/offlineScanner.ts";
import { useCheckRemoteSize } from "@lib/serviceFeatures/checkRemoteSize.ts";
import { useRedFlagFeatures } from "./serviceFeatures/redFlag.ts";
import { useSetupProtocolFeature } from "./serviceFeatures/setupObsidian/setupProtocol.ts";
import { useSetupQRCodeFeature } from "@lib/serviceFeatures/setupObsidian/qrCode";
import { useSetupURIFeature } from "@lib/serviceFeatures/setupObsidian/setupUri";
import { useSetupManagerHandlersFeature } from "./serviceFeatures/setupObsidian/setupManagerHandlers.ts";
import { useP2PReplicatorFeature } from "@lib/replication/trystero/useP2PReplicatorFeature.ts";
import { useP2PReplicatorCommands } from "@lib/replication/trystero/useP2PReplicatorCommands.ts";
import { useP2PReplicatorUI } from "./serviceFeatures/useP2PReplicatorUI.ts";
export type LiveSyncCore = LiveSyncBaseCore<ObsidianServiceContext, LiveSyncCommands>;
export default class ObsidianLiveSyncPlugin extends Plugin {
core: LiveSyncCore;
@@ -142,7 +147,6 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
new ModuleObsidianEvents(this, core),
new ModuleObsidianSettingDialogue(this, core),
new ModuleObsidianMenu(core),
new ModuleSetupObsidian(core),
new ModuleObsidianSettingsAsMarkdown(core),
new ModuleLog(this, core),
new ModuleObsidianDocumentHistory(this, core),
@@ -161,7 +165,6 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
new ConfigSync(this, core),
new HiddenFileSync(this, core),
new LocalDatabaseMaintenance(this, core),
new P2PReplicator(this, core),
];
return addOns;
},
@@ -170,9 +173,21 @@ export default class ObsidianLiveSyncPlugin extends Plugin {
const featuresInitialiser = enableI18nFeature;
const curriedFeature = () => featuresInitialiser(core);
core.services.appLifecycle.onLayoutReady.addHandler(curriedFeature);
const setupManager = core.getModule(SetupManager);
useSetupProtocolFeature(core, setupManager);
useSetupQRCodeFeature(core);
useSetupURIFeature(core);
useSetupManagerHandlersFeature(core, setupManager);
useOfflineScanner(core);
useRedFlagFeatures(core);
useCheckRemoteSize(core);
// p2pReplicatorResult = useP2PReplicator(core, [
// VIEW_TYPE_P2P,
// (leaf: any) => new P2PReplicatorPaneView(leaf, core, p2pReplicatorResult!),
// ]);
const replicator = useP2PReplicatorFeature(core);
useP2PReplicatorCommands(core, replicator);
useP2PReplicatorUI(core, core, replicator);
}
);
}

View File

@@ -4,6 +4,13 @@ import { AbstractModule } from "../AbstractModule";
import { LiveSyncTrysteroReplicator } from "../../lib/src/replication/trystero/LiveSyncTrysteroReplicator";
import type { LiveSyncCore } from "../../main";
// Note:
// This module registers only the `getNewReplicator` handler for the P2P replicator.
// `useP2PReplicator` (see P2PReplicatorCore.ts) already registers the same `getNewReplicator`
// handler internally, so this module is redundant in environments that call `useP2PReplicator`.
// Register this module only in environments that do NOT use `useP2PReplicator` (e.g. CLI).
// In other words: just resolving `getNewReplicator` via this module is all that is needed
// to satisfy what `useP2PReplicator` requires from the replicator service.
export class ModuleReplicatorP2P extends AbstractModule {
_anyNewReplicator(settingOverride: Partial<RemoteDBSettings> = {}): Promise<LiveSyncAbstractReplicator | false> {
const settings = { ...this.settings, ...settingOverride };
@@ -12,23 +19,7 @@ export class ModuleReplicatorP2P extends AbstractModule {
}
return Promise.resolve(false);
}
_everyAfterResumeProcess(): Promise<boolean> {
if (this.settings.remoteType == REMOTE_P2P) {
// // If LiveSync enabled, open replication
// if (this.settings.liveSync) {
// fireAndForget(() => this.core.replicator.openReplication(this.settings, true, false, false));
// }
// // If sync on start enabled, open replication
// if (!this.settings.liveSync && this.settings.syncOnStart) {
// // Possibly ok as if only share the result
// fireAndForget(() => this.core.replicator.openReplication(this.settings, false, false, false));
// }
}
return Promise.resolve(true);
}
override onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
services.replicator.getNewReplicator.addHandler(this._anyNewReplicator.bind(this));
services.appLifecycle.onResumed.addHandler(this._everyAfterResumeProcess.bind(this));
}
}

View File

@@ -30,7 +30,7 @@ import { LOG_LEVEL_NOTICE, setGlobalLogFunction } from "octagonal-wheels/common/
import { LogPaneView, VIEW_TYPE_LOG } from "./Log/LogPaneView.ts";
import { serialized } from "octagonal-wheels/concurrency/lock";
import { $msg } from "src/lib/src/common/i18n.ts";
import { P2PLogCollector } from "../../lib/src/replication/trystero/P2PReplicatorCore.ts";
import { P2PLogCollector } from "@/lib/src/replication/trystero/P2PLogCollector.ts";
import type { LiveSyncCore } from "../../main.ts";
import { LiveSyncError } from "@lib/common/LSError.ts";
import { isValidPath } from "@/common/utils.ts";

View File

@@ -321,8 +321,8 @@ export class ObsidianLiveSyncSettingTab extends PluginSettingTab {
}
closeSetting() {
// @ts-ignore
this.core.app.setting.close();
//@ts-ignore :
this.plugin.app.setting.close();
}
handleElement(element: HTMLElement, func: OnUpdateFunc) {

View File

@@ -106,10 +106,10 @@ export class ModuleLiveSyncMain extends AbstractModule {
this._log($msg("moduleLiveSyncMain.logReadChangelog"), LOG_LEVEL_NOTICE);
}
//@ts-ignore
if (this.isMobile) {
this.settings.disableRequestURI = true;
}
// //@ts-ignore
// if (this.isMobile) {
// this.settings.disableRequestURI = true;
// }
if (last_version && Number(last_version) < VER) {
this.settings.liveSync = false;
this.settings.syncOnSave = false;

View File

@@ -0,0 +1,34 @@
import { type SetupManager, UserMode } from "@/modules/features/SetupManager";
import type { SetupFeatureHost } from "@lib/serviceFeatures/setupObsidian/types";
import { EVENT_REQUEST_OPEN_P2P_SETTINGS, EVENT_REQUEST_OPEN_SETUP_URI } from "@lib/events/coreEvents";
import { eventHub } from "@lib/hub/hub";
import { fireAndForget } from "@lib/common/utils";
import type { NecessaryServices } from "@lib/interfaces/ServiceModule";
export async function openSetupURI(setupManager: SetupManager) {
await setupManager.onUseSetupURI(UserMode.Unknown);
}
export async function openP2PSettings(host: SetupFeatureHost, setupManager: SetupManager) {
return await setupManager.onP2PManualSetup(UserMode.Update, host.services.setting.currentSettings(), false);
}
export function useSetupManagerHandlersFeature(
host: NecessaryServices<"API" | "UI" | "setting" | "appLifecycle", never>,
setupManager: SetupManager
) {
host.services.appLifecycle.onLoaded.addHandler(() => {
host.services.API.addCommand({
id: "livesync-opensetupuri",
name: "Use the copied setup URI (Formerly Open setup URI)",
callback: () => fireAndForget(openSetupURI(setupManager)),
});
eventHub.onEvent(EVENT_REQUEST_OPEN_SETUP_URI, () => fireAndForget(() => openSetupURI(setupManager)));
eventHub.onEvent(EVENT_REQUEST_OPEN_P2P_SETTINGS, () =>
fireAndForget(() => openP2PSettings(host, setupManager))
);
return Promise.resolve(true);
});
}

View File

@@ -0,0 +1,87 @@
import { describe, expect, it, vi, afterEach } from "vitest";
import { eventHub } from "@lib/hub/hub";
import { EVENT_REQUEST_OPEN_P2P_SETTINGS, EVENT_REQUEST_OPEN_SETUP_URI } from "@lib/events/coreEvents";
import { openP2PSettings, openSetupURI, useSetupManagerHandlersFeature } from "./setupManagerHandlers";
vi.mock("@/modules/features/SetupManager", () => {
return {
UserMode: {
Unknown: "unknown",
Update: "unknown",
},
};
});
describe("setupObsidian/setupManagerHandlers", () => {
afterEach(() => {
vi.restoreAllMocks();
vi.clearAllMocks();
});
it("openSetupURI should delegate to SetupManager.onUseSetupURI", async () => {
const setupManager = {
onUseSetupURI: vi.fn(async () => await Promise.resolve(true)),
} as any;
await openSetupURI(setupManager);
expect(setupManager.onUseSetupURI).toHaveBeenCalledWith("unknown");
});
it("openP2PSettings should delegate to SetupManager.onP2PManualSetup", async () => {
const settings = { x: 1 };
const host = {
services: {
setting: {
currentSettings: vi.fn(() => settings),
},
},
} as any;
const setupManager = {
onP2PManualSetup: vi.fn(async () => await Promise.resolve(true)),
} as any;
await openP2PSettings(host, setupManager);
expect(setupManager.onP2PManualSetup).toHaveBeenCalledWith("unknown", settings, false);
});
it("useSetupManagerHandlersFeature should register onLoaded handler that wires command and events", async () => {
const addHandler = vi.fn();
const addCommand = vi.fn();
const onEventSpy = vi.spyOn(eventHub, "onEvent");
const host = {
services: {
API: {
addCommand,
},
appLifecycle: {
onLoaded: {
addHandler,
},
},
setting: {
currentSettings: vi.fn(() => ({ x: 1 })),
},
},
} as any;
const setupManager = {
onUseSetupURI: vi.fn(async () => await Promise.resolve(true)),
onP2PManualSetup: vi.fn(async () => await Promise.resolve(true)),
} as any;
useSetupManagerHandlersFeature(host, setupManager);
expect(addHandler).toHaveBeenCalledTimes(1);
const loadedHandler = addHandler.mock.calls[0][0] as () => Promise<boolean>;
await loadedHandler();
expect(addCommand).toHaveBeenCalledWith(
expect.objectContaining({
id: "livesync-opensetupuri",
name: "Use the copied setup URI (Formerly Open setup URI)",
})
);
expect(onEventSpy).toHaveBeenCalledWith(EVENT_REQUEST_OPEN_SETUP_URI, expect.any(Function));
expect(onEventSpy).toHaveBeenCalledWith(EVENT_REQUEST_OPEN_P2P_SETTINGS, expect.any(Function));
});
});

View File

@@ -0,0 +1,37 @@
import { LOG_LEVEL_NOTICE, LOG_LEVEL_VERBOSE } from "@lib/common/types";
import type { LogFunction } from "@lib/services/lib/logUtils";
import { createInstanceLogFunction } from "@lib/services/lib/logUtils";
import type { SetupFeatureHost } from "@lib/serviceFeatures/setupObsidian/types";
import { configURIBase } from "@/common/types";
import type { NecessaryServices } from "@lib/interfaces/ServiceModule";
import { type SetupManager, UserMode } from "@/modules/features/SetupManager";
async function handleSetupProtocol(setupManager: SetupManager, conf: Record<string, string>) {
if (conf.settings) {
await setupManager.onUseSetupURI(UserMode.Unknown, `${configURIBase}${encodeURIComponent(conf.settings)}`);
} else if (conf.settingsQR) {
await setupManager.decodeQR(conf.settingsQR);
}
}
export function registerSetupProtocolHandler(host: SetupFeatureHost, log: LogFunction, setupManager: SetupManager) {
try {
host.services.API.registerProtocolHandler("setuplivesync", async (conf) => {
await handleSetupProtocol(setupManager, conf);
});
} catch (e) {
log("Failed to register protocol handler. This feature may not work in some environments.", LOG_LEVEL_NOTICE);
log(e, LOG_LEVEL_VERBOSE);
}
}
export function useSetupProtocolFeature(
host: NecessaryServices<"API" | "UI" | "setting" | "appLifecycle", never>,
setupManager: SetupManager
) {
const log = createInstanceLogFunction("SF:SetupProtocol", host.services.API);
host.services.appLifecycle.onLoaded.addHandler(() => {
registerSetupProtocolHandler(host, log, setupManager);
return Promise.resolve(true);
});
}

View File

@@ -0,0 +1,131 @@
import { describe, expect, it, vi, afterEach } from "vitest";
import { registerSetupProtocolHandler, useSetupProtocolFeature } from "./setupProtocol";
vi.mock("@/common/types", () => {
return {
configURIBase: "mock-config://",
};
});
vi.mock("@/modules/features/SetupManager", () => {
return {
UserMode: {
Unknown: "unknown",
Update: "unknown",
},
};
});
describe("setupObsidian/setupProtocol", () => {
afterEach(() => {
vi.restoreAllMocks();
vi.clearAllMocks();
});
it("registerSetupProtocolHandler should route settings payload to onUseSetupURI", async () => {
let protocolHandler: ((params: Record<string, string>) => Promise<void>) | undefined;
const host = {
services: {
API: {
registerProtocolHandler: vi.fn(
(_action: string, handler: (params: Record<string, string>) => Promise<void>) => {
protocolHandler = handler;
}
),
},
},
} as any;
const log = vi.fn();
const setupManager = {
onUseSetupURI: vi.fn(async () => await Promise.resolve(true)),
decodeQR: vi.fn(async () => await Promise.resolve(true)),
} as any;
registerSetupProtocolHandler(host, log, setupManager);
expect(host.services.API.registerProtocolHandler).toHaveBeenCalledWith("setuplivesync", expect.any(Function));
await protocolHandler!({ settings: "a b" });
expect(setupManager.onUseSetupURI).toHaveBeenCalledWith(
"unknown",
`mock-config://${encodeURIComponent("a b")}`
);
expect(setupManager.decodeQR).not.toHaveBeenCalled();
});
it("registerSetupProtocolHandler should route settingsQR payload to decodeQR", async () => {
let protocolHandler: ((params: Record<string, string>) => Promise<void>) | undefined;
const host = {
services: {
API: {
registerProtocolHandler: vi.fn(
(_action: string, handler: (params: Record<string, string>) => Promise<void>) => {
protocolHandler = handler;
}
),
},
},
} as any;
const log = vi.fn();
const setupManager = {
onUseSetupURI: vi.fn(async () => await Promise.resolve(true)),
decodeQR: vi.fn(async () => await Promise.resolve(true)),
} as any;
registerSetupProtocolHandler(host, log, setupManager);
await protocolHandler!({ settingsQR: "qr-data" });
expect(setupManager.decodeQR).toHaveBeenCalledWith("qr-data");
expect(setupManager.onUseSetupURI).not.toHaveBeenCalled();
});
it("registerSetupProtocolHandler should log and continue when registration throws", () => {
const host = {
services: {
API: {
registerProtocolHandler: vi.fn(() => {
throw new Error("register failed");
}),
},
},
} as any;
const log = vi.fn();
const setupManager = {
onUseSetupURI: vi.fn(),
decodeQR: vi.fn(),
} as any;
registerSetupProtocolHandler(host, log, setupManager);
expect(log).toHaveBeenCalledTimes(2);
});
it("useSetupProtocolFeature should register onLoaded handler", async () => {
const addHandler = vi.fn();
const registerProtocolHandler = vi.fn();
const host = {
services: {
API: {
addLog: vi.fn(),
registerProtocolHandler,
},
appLifecycle: {
onLoaded: {
addHandler,
},
},
},
} as any;
const setupManager = {
onUseSetupURI: vi.fn(),
decodeQR: vi.fn(),
} as any;
useSetupProtocolFeature(host, setupManager);
expect(addHandler).toHaveBeenCalledTimes(1);
const loadedHandler = addHandler.mock.calls[0][0] as () => Promise<boolean>;
await loadedHandler();
expect(registerProtocolHandler).toHaveBeenCalledWith("setuplivesync", expect.any(Function));
});
});

View File

@@ -0,0 +1,76 @@
import { eventHub, EVENT_REQUEST_OPEN_P2P } from "@/common/events";
import { reactiveSource } from "octagonal-wheels/dataobject/reactive_v2";
import type { NecessaryServices } from "@lib/interfaces/ServiceModule";
import { type UseP2PReplicatorResult } from "@/lib/src/replication/trystero/UseP2PReplicatorResult";
import { P2PLogCollector } from "@/lib/src/replication/trystero/P2PLogCollector";
import { P2PReplicatorPaneView, VIEW_TYPE_P2P } from "@/features/P2PSync/P2PReplicator/P2PReplicatorPaneView";
import type { LiveSyncCore } from "@/main";
/**
* ServiceFeature: P2P Replicator lifecycle management.
* Binds a LiveSyncTrysteroReplicator to the host's lifecycle events,
* following the same middleware style as useOfflineScanner.
*
* @param viewTypeAndFactory Optional [viewType, factory] pair for registering the P2P pane view.
* When provided, also registers commands and ribbon icon via services.API.
*/
export function useP2PReplicatorUI(
host: NecessaryServices<
| "API"
| "appLifecycle"
| "setting"
| "vault"
| "database"
| "databaseEvents"
| "keyValueDB"
| "replication"
| "config"
| "UI"
| "replicator",
never
>,
core: LiveSyncCore,
replicator: UseP2PReplicatorResult
) {
// const env: LiveSyncTrysteroReplicatorEnv = { services: host.services as any };
const getReplicator = () => replicator.replicator;
const p2pLogCollector = new P2PLogCollector();
const storeP2PStatusLine = reactiveSource("");
p2pLogCollector.p2pReplicationLine.onChanged((line) => {
storeP2PStatusLine.value = line.value;
});
// Register view, commands and ribbon if a view factory is provided
const viewType = VIEW_TYPE_P2P;
const factory = (leaf: any) => {
return new P2PReplicatorPaneView(leaf, core, {
replicator: getReplicator(),
p2pLogCollector,
storeP2PStatusLine,
});
};
const openPane = () => host.services.API.showWindow(viewType);
host.services.API.registerWindow(viewType, factory);
host.services.appLifecycle.onInitialise.addHandler(() => {
eventHub.onEvent(EVENT_REQUEST_OPEN_P2P, () => {
void openPane();
});
host.services.API.addCommand({
id: "open-p2p-replicator",
name: "P2P Sync : Open P2P Replicator",
callback: () => {
void openPane();
},
});
host.services.API.addRibbonIcon("waypoints", "P2P Replicator", () => {
void openPane();
})?.addClass?.("livesync-ribbon-replicate-p2p");
return Promise.resolve(true);
});
return { replicator: getReplicator(), p2pLogCollector, storeP2PStatusLine };
}

View File

@@ -116,6 +116,22 @@ export const acceptWebPeer: BrowserCommand = async (ctx) => {
return false;
};
/** Write arbitrary text to a file on the Node.js host (used for phase handoff). */
export const writeHandoffFile: BrowserCommand<[filePath: string, content: string]> = async (
_ctx,
filePath: string,
content: string
) => {
const fs = await import("node:fs/promises");
await fs.writeFile(filePath, content, "utf-8");
};
/** Read a file from the Node.js host (used for phase handoff). */
export const readHandoffFile: BrowserCommand<[filePath: string]> = async (_ctx, filePath: string): Promise<string> => {
const fs = await import("node:fs/promises");
return fs.readFile(filePath, "utf-8");
};
export default function BrowserCommands(): Plugin {
return {
name: "vitest:custom-commands",
@@ -128,6 +144,8 @@ export default function BrowserCommands(): Plugin {
openWebPeer,
closeWebPeer,
acceptWebPeer,
writeHandoffFile,
readHandoffFile,
},
},
},
@@ -141,5 +159,7 @@ declare module "vitest/browser" {
openWebPeer: (setting: P2PSyncSetting, serverPeerName: string) => Promise<void>;
closeWebPeer: () => Promise<void>;
acceptWebPeer: () => Promise<boolean>;
writeHandoffFile: (filePath: string, content: string) => Promise<void>;
readHandoffFile: (filePath: string) => Promise<string>;
}
}

View File

@@ -1,6 +1,6 @@
import { expect } from "vitest";
import { waitForIdle, type LiveSyncHarness } from "../harness/harness";
import { LOG_LEVEL_INFO, RemoteTypes, type ObsidianLiveSyncSettings } from "@/lib/src/common/types";
import { RemoteTypes, type ObsidianLiveSyncSettings } from "@/lib/src/common/types";
import { delay, fireAndForget } from "@/lib/src/common/utils";
import { commands } from "vitest/browser";
@@ -15,14 +15,10 @@ async function waitForP2PPeers(harness: LiveSyncHarness) {
if (!(replicator instanceof LiveSyncTrysteroReplicator)) {
throw new Error("Replicator is not an instance of LiveSyncTrysteroReplicator");
}
const p2pReplicator = await replicator.getP2PConnection(LOG_LEVEL_INFO);
if (!p2pReplicator) {
throw new Error("P2P Replicator is not initialized");
}
while (retries-- > 0) {
fireAndForget(() => commands.acceptWebPeer());
await delay(1000);
const peers = p2pReplicator.knownAdvertisements;
const peers = replicator.knownAdvertisements;
if (peers && peers.length > 0) {
console.log("P2P peers connected:", peers);

194
test/suitep2p/run-p2p-tests.sh Executable file
View File

@@ -0,0 +1,194 @@
#!/usr/bin/env bash
set -euo pipefail
SCRIPT_DIR="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd -- "$SCRIPT_DIR/../.." && pwd)"
CLI_DIR="$REPO_ROOT/src/apps/cli"
CLI_TEST_HELPERS="$CLI_DIR/test/test-helpers.sh"
source "$CLI_TEST_HELPERS"
RUN_BUILD="${RUN_BUILD:-1}"
KEEP_TEST_DATA="${KEEP_TEST_DATA:-1}"
VERBOSE_TEST_LOGGING="${VERBOSE_TEST_LOGGING:-1}"
RELAY="${RELAY:-ws://localhost:4000/}"
USE_INTERNAL_RELAY="${USE_INTERNAL_RELAY:-1}"
APP_ID="${APP_ID:-self-hosted-livesync-vitest-p2p}"
HOST_PEER_NAME="${HOST_PEER_NAME:-p2p-cli-host}"
ROOM_ID="p2p-room-$(date +%s)-$RANDOM-$RANDOM"
PASSPHRASE="p2p-pass-$(date +%s)-$RANDOM-$RANDOM"
UPLOAD_PEER_NAME="p2p-upload-$(date +%s)-$RANDOM"
DOWNLOAD_PEER_NAME="p2p-download-$(date +%s)-$RANDOM"
UPLOAD_VAULT_NAME="TestVaultUpload-$(date +%s)-$RANDOM"
DOWNLOAD_VAULT_NAME="TestVaultDownload-$(date +%s)-$RANDOM"
# ---- Build CLI ----
if [[ "$RUN_BUILD" == "1" ]]; then
echo "[INFO] building CLI"
(cd "$CLI_DIR" && npm run build)
fi
# ---- Temp directory ----
WORK_DIR="$(mktemp -d "${TMPDIR:-/tmp}/livesync-vitest-p2p.XXXXXX")"
VAULT_HOST="$WORK_DIR/vault-host"
SETTINGS_HOST="$WORK_DIR/settings-host.json"
HOST_LOG="$WORK_DIR/p2p-host.log"
# Handoff file: upload phase writes this; download phase reads it.
HANDOFF_FILE="$WORK_DIR/p2p-test-handoff.json"
mkdir -p "$VAULT_HOST"
# ---- Setup CLI command (uses npm run cli from CLI_DIR) ----
# Override run_cli to invoke the built binary directly from CLI_DIR
run_cli() {
(cd "$CLI_DIR" && node dist/index.cjs "$@")
}
# ---- Create host settings ----
echo "[INFO] relay=$RELAY room=$ROOM_ID app=$APP_ID host=$HOST_PEER_NAME"
cli_test_init_settings_file "$SETTINGS_HOST"
cli_test_apply_p2p_settings "$SETTINGS_HOST" "$ROOM_ID" "$PASSPHRASE" "$APP_ID" "$RELAY" "~.*"
# Set host peer name
SETTINGS_HOST_FILE="$SETTINGS_HOST" HOST_PEER_NAME_VAL="$HOST_PEER_NAME" HOST_PASSPHRASE_VAL="$PASSPHRASE" node <<'NODE'
const fs = require("node:fs");
const data = JSON.parse(fs.readFileSync(process.env.SETTINGS_HOST_FILE, "utf-8"));
// Keep tweak values aligned with browser-side P2P test settings.
data.remoteType = "ONLY_P2P";
data.encrypt = true;
data.passphrase = process.env.HOST_PASSPHRASE_VAL;
data.usePathObfuscation = true;
data.handleFilenameCaseSensitive = false;
data.customChunkSize = 50;
data.usePluginSyncV2 = true;
data.doNotUseFixedRevisionForChunks = false;
data.P2P_DevicePeerName = process.env.HOST_PEER_NAME_VAL;
fs.writeFileSync(process.env.SETTINGS_HOST_FILE, JSON.stringify(data, null, 2), "utf-8");
NODE
# ---- Cleanup trap ----
cleanup() {
local exit_code=$?
if [[ -n "${HOST_PID:-}" ]] && kill -0 "$HOST_PID" >/dev/null 2>&1; then
echo "[INFO] stopping CLI host (PID=$HOST_PID)"
kill -TERM "$HOST_PID" >/dev/null 2>&1 || true
wait "$HOST_PID" >/dev/null 2>&1 || true
fi
if [[ "${P2P_RELAY_STARTED:-0}" == "1" ]]; then
cli_test_stop_p2p_relay
fi
if [[ "$KEEP_TEST_DATA" != "1" ]]; then
rm -rf "$WORK_DIR"
else
echo "[INFO] KEEP_TEST_DATA=1, preserving artefacts at $WORK_DIR"
fi
exit "$exit_code"
}
trap cleanup EXIT
start_host() {
local attempt=0
while [[ "$attempt" -lt 5 ]]; do
attempt=$((attempt + 1))
echo "[INFO] starting CLI p2p-host (attempt $attempt/5)"
: >"$HOST_LOG"
(cd "$CLI_DIR" && node dist/index.cjs "$VAULT_HOST" --settings "$SETTINGS_HOST" -d p2p-host) >"$HOST_LOG" 2>&1 &
HOST_PID=$!
local host_ready=0
local exited_early=0
for i in $(seq 1 30); do
if grep -qF "P2P host is running" "$HOST_LOG" 2>/dev/null; then
host_ready=1
break
fi
if ! kill -0 "$HOST_PID" >/dev/null 2>&1; then
exited_early=1
break
fi
echo "[INFO] waiting for p2p-host to be ready... ($i/30)"
sleep 1
done
if [[ "$host_ready" == "1" ]]; then
echo "[INFO] p2p-host is ready (PID=$HOST_PID)"
return 0
fi
wait "$HOST_PID" >/dev/null 2>&1 || true
HOST_PID=
if grep -qF "Resource temporarily unavailable" "$HOST_LOG" 2>/dev/null; then
echo "[INFO] p2p-host database lock is still being released, retrying..."
sleep 2
continue
fi
if [[ "$exited_early" == "1" ]]; then
echo "[FAIL] CLI host process exited unexpectedly" >&2
else
echo "[FAIL] p2p-host did not become ready within 30 seconds" >&2
fi
cat "$HOST_LOG" >&2
exit 1
done
echo "[FAIL] p2p-host could not be restarted after multiple attempts" >&2
cat "$HOST_LOG" >&2
exit 1
}
# ---- Start local relay if needed ----
if [[ "$USE_INTERNAL_RELAY" == "1" ]]; then
if cli_test_is_local_p2p_relay "$RELAY"; then
cli_test_start_p2p_relay
P2P_RELAY_STARTED=1
else
echo "[INFO] USE_INTERNAL_RELAY=1 but RELAY is not local ($RELAY), skipping"
fi
fi
start_host
# Common env vars passed to both vitest runs
P2P_ENV=(
P2P_TEST_ROOM_ID="$ROOM_ID"
P2P_TEST_PASSPHRASE="$PASSPHRASE"
P2P_TEST_HOST_PEER_NAME="$HOST_PEER_NAME"
P2P_TEST_RELAY="$RELAY"
P2P_TEST_APP_ID="$APP_ID"
P2P_TEST_HANDOFF_FILE="$HANDOFF_FILE"
P2P_TEST_UPLOAD_PEER_NAME="$UPLOAD_PEER_NAME"
P2P_TEST_DOWNLOAD_PEER_NAME="$DOWNLOAD_PEER_NAME"
P2P_TEST_UPLOAD_VAULT_NAME="$UPLOAD_VAULT_NAME"
P2P_TEST_DOWNLOAD_VAULT_NAME="$DOWNLOAD_VAULT_NAME"
)
cd "$REPO_ROOT"
# ---- Phase 1: Upload ----
# Each vitest run gets a fresh browser process, so Trystero's module-level
# global state (occupiedRooms, didInit, etc.) is clean for every phase.
echo "[INFO] running P2P vitest — upload phase"
env "${P2P_ENV[@]}" \
npx dotenv-cli -e .env -e .test.env -- \
vitest run --config vitest.config.p2p.ts test/suitep2p/syncp2p.p2p-up.test.ts
echo "[INFO] upload phase completed"
# ---- Phase 2: Download ----
# Keep the same host process alive so its database handle and relay presence stay stable.
echo "[INFO] waiting 5s before download phase..."
sleep 5
echo "[INFO] running P2P vitest — download phase"
env "${P2P_ENV[@]}" \
npx dotenv-cli -e .env -e .test.env -- \
vitest run --config vitest.config.p2p.ts test/suitep2p/syncp2p.p2p-down.test.ts
echo "[INFO] download phase completed"
echo "[INFO] P2P vitest suite completed"

View File

@@ -0,0 +1,175 @@
/**
* P2P-specific sync helpers.
*
* Derived from test/suite/sync_common.ts but with all acceptWebPeer() calls
* removed. When using a CLI p2p-host with P2P_AutoAcceptingPeers="~.*", peer
* acceptance is automatic and no Playwright dialog interaction is needed.
*/
import { expect } from "vitest";
import { waitForIdle, type LiveSyncHarness } from "../harness/harness";
import { RemoteTypes, type ObsidianLiveSyncSettings } from "@/lib/src/common/types";
import { delay } from "@/lib/src/common/utils";
import { LiveSyncTrysteroReplicator } from "@/lib/src/replication/trystero/LiveSyncTrysteroReplicator";
import { waitTaskWithFollowups } from "../lib/util";
const P2P_REPLICATION_TIMEOUT_MS = 180000;
async function testWebSocketConnection(relayUrl: string): Promise<void> {
return new Promise((resolve, reject) => {
console.log(`[P2P Debug] Testing WebSocket connection to ${relayUrl}`);
try {
const ws = new WebSocket(relayUrl);
const timer = setTimeout(() => {
ws.close();
reject(new Error(`WebSocket connection to ${relayUrl} timed out`));
}, 5000);
ws.onopen = () => {
clearTimeout(timer);
console.log(`[P2P Debug] WebSocket connected to ${relayUrl} successfully`);
ws.close();
resolve();
};
ws.onerror = (e) => {
clearTimeout(timer);
console.error(`[P2P Debug] WebSocket error connecting to ${relayUrl}:`, e);
reject(new Error(`WebSocket connection to ${relayUrl} failed`));
};
} catch (e) {
console.error(`[P2P Debug] WebSocket constructor threw:`, e);
reject(e);
}
});
}
async function waitForP2PPeers(harness: LiveSyncHarness) {
if (harness.plugin.core.settings.remoteType === RemoteTypes.REMOTE_P2P) {
const maxRetries = 20;
let retries = maxRetries;
const replicator = await harness.plugin.core.services.replicator.getActiveReplicator();
console.log("[P2P Debug] replicator type:", replicator?.constructor?.name);
if (!(replicator instanceof LiveSyncTrysteroReplicator)) {
throw new Error("Replicator is not an instance of LiveSyncTrysteroReplicator");
}
// Ensure P2P is open (getActiveReplicator returns a fresh instance that may not be open yet)
if (!replicator.server?.isServing) {
console.log("[P2P Debug] P2P not yet serving, calling open()");
// Test WebSocket connectivity first
const relay = harness.plugin.core.settings.P2P_relays?.split(",")[0]?.trim();
if (relay) {
try {
await testWebSocketConnection(relay);
} catch (e) {
console.error("[P2P Debug] WebSocket connectivity test failed:", e);
}
}
try {
await replicator.open();
console.log("[P2P Debug] open() completed, isServing:", replicator.server?.isServing);
} catch (e) {
console.error("[P2P Debug] open() threw:", e);
}
}
// Wait for P2P server to actually start (room joined)
for (let i = 0; i < 30; i++) {
const serving = replicator.server?.isServing;
console.log(`[P2P Debug] isServing: ${serving} (${i}/30)`);
if (serving) break;
await delay(500);
if (i === 29) throw new Error("P2P server did not start in time.");
}
while (retries-- > 0) {
await delay(1000);
const peers = replicator.knownAdvertisements;
if (peers && peers.length > 0) {
console.log("P2P peers connected:", peers);
return;
}
console.log(`Waiting for any P2P peers to be connected... ${maxRetries - retries}/${maxRetries}`);
console.dir(peers);
await delay(1000);
}
console.log("Failed to connect P2P peers after retries");
throw new Error("P2P peers did not connect in time.");
}
}
export async function closeP2PReplicatorConnections(harness: LiveSyncHarness) {
if (harness.plugin.core.settings.remoteType === RemoteTypes.REMOTE_P2P) {
const replicator = await harness.plugin.core.services.replicator.getActiveReplicator();
if (!(replicator instanceof LiveSyncTrysteroReplicator)) {
throw new Error("Replicator is not an instance of LiveSyncTrysteroReplicator");
}
replicator.closeReplication();
await delay(30);
replicator.closeReplication();
await delay(1000);
console.log("P2P replicator connections closed");
}
}
export async function performReplication(harness: LiveSyncHarness) {
await waitForP2PPeers(harness);
await delay(500);
if (harness.plugin.core.settings.remoteType === RemoteTypes.REMOTE_P2P) {
const replicator = await harness.plugin.core.services.replicator.getActiveReplicator();
if (!(replicator instanceof LiveSyncTrysteroReplicator)) {
throw new Error("Replicator is not an instance of LiveSyncTrysteroReplicator");
}
const knownPeers = replicator.knownAdvertisements;
const targetPeer = knownPeers.find((peer) => peer.name.startsWith("vault-host")) ?? knownPeers[0] ?? undefined;
if (!targetPeer) {
throw new Error("No connected P2P peer to synchronise with");
}
const p = replicator.sync(targetPeer.peerId, true);
const result = await waitTaskWithFollowups(p, () => Promise.resolve(), P2P_REPLICATION_TIMEOUT_MS, 500);
if (result && typeof result === "object" && "error" in result && result.error) {
throw result.error;
}
return result;
}
return await harness.plugin.core.services.replication.replicate(true);
}
export async function closeReplication(harness: LiveSyncHarness) {
if (harness.plugin.core.settings.remoteType === RemoteTypes.REMOTE_P2P) {
return await closeP2PReplicatorConnections(harness);
}
const replicator = await harness.plugin.core.services.replicator.getActiveReplicator();
if (!replicator) {
console.log("No active replicator to close");
return;
}
await replicator.closeReplication();
await waitForIdle(harness);
console.log("Replication closed");
}
export async function prepareRemote(harness: LiveSyncHarness, setting: ObsidianLiveSyncSettings, shouldReset = false) {
// P2P has no remote database to initialise — skip
if (setting.remoteType === RemoteTypes.REMOTE_P2P) return;
if (shouldReset) {
await delay(1000);
await harness.plugin.core.services.replicator
.getActiveReplicator()
?.tryResetRemoteDatabase(harness.plugin.core.settings);
} else {
await harness.plugin.core.services.replicator
.getActiveReplicator()
?.tryCreateRemoteDatabase(harness.plugin.core.settings);
}
await harness.plugin.core.services.replicator
.getActiveReplicator()
?.markRemoteResolved(harness.plugin.core.settings);
const status = await harness.plugin.core.services.replicator
.getActiveReplicator()
?.getRemoteStatus(harness.plugin.core.settings);
console.log("Remote status:", status);
expect(status).not.toBeFalsy();
}

View File

@@ -0,0 +1,165 @@
/**
* P2P Replication Tests — Download phase (process 2 of 2)
*
* Executed by run-p2p-tests.sh as the second vitest process, after the
* upload phase has completed and the CLI host holds all the data.
*
* Reads the handoff JSON written by the upload phase to know which files
* to verify, then replicates from the CLI host and checks every file.
*/
import { afterAll, beforeAll, beforeEach, describe, expect, it, test } from "vitest";
import { generateHarness, waitForIdle, waitForReady, type LiveSyncHarness } from "../harness/harness";
import {
PREFERRED_SETTING_SELF_HOSTED,
RemoteTypes,
type FilePath,
type ObsidianLiveSyncSettings,
AutoAccepting,
} from "@/lib/src/common/types";
import { DummyFileSourceInisialised, generateBinaryFile, generateFile } from "../utils/dummyfile";
import { defaultFileOption, testFileRead } from "../suite/db_common";
import { delay } from "@/lib/src/common/utils";
import { closeReplication, performReplication } from "./sync_common_p2p";
import { settingBase } from "../suite/variables";
const env = (import.meta as any).env;
const ROOM_ID: string = env.P2P_TEST_ROOM_ID ?? "p2p-test-room";
const PASSPHRASE: string = env.P2P_TEST_PASSPHRASE ?? "p2p-test-pass";
const HOST_PEER_NAME: string = env.P2P_TEST_HOST_PEER_NAME ?? "p2p-cli-host";
const RELAY: string = env.P2P_TEST_RELAY ?? "ws://localhost:4000/";
const APP_ID: string = env.P2P_TEST_APP_ID ?? "self-hosted-livesync-vitest-p2p";
const DOWNLOAD_PEER_NAME: string = env.P2P_TEST_DOWNLOAD_PEER_NAME ?? `p2p-download-${Date.now()}`;
const DOWNLOAD_VAULT_NAME: string = env.P2P_TEST_DOWNLOAD_VAULT_NAME ?? `TestVaultDownload-${Date.now()}`;
const HANDOFF_FILE: string = env.P2P_TEST_HANDOFF_FILE ?? "/tmp/p2p-test-handoff.json";
console.log("[P2P Down] ROOM_ID:", ROOM_ID, "HOST:", HOST_PEER_NAME, "RELAY:", RELAY, "APP_ID:", APP_ID);
console.log("[P2P Down] HANDOFF_FILE:", HANDOFF_FILE);
const p2pSetting: ObsidianLiveSyncSettings = {
...settingBase,
...PREFERRED_SETTING_SELF_HOSTED,
showVerboseLog: true,
remoteType: RemoteTypes.REMOTE_P2P,
encrypt: true,
passphrase: PASSPHRASE,
usePathObfuscation: true,
P2P_Enabled: true,
P2P_AppID: APP_ID,
handleFilenameCaseSensitive: false,
P2P_AutoAccepting: AutoAccepting.ALL,
P2P_AutoBroadcast: true,
P2P_AutoStart: true,
P2P_passphrase: PASSPHRASE,
P2P_roomID: ROOM_ID,
P2P_relays: RELAY,
P2P_AutoAcceptingPeers: "~.*",
P2P_SyncOnReplication: HOST_PEER_NAME,
};
const fileOptions = defaultFileOption;
const nameFile = (type: string, ext: string, size: number) => `p2p-cli-test-${type}-file-${size}.${ext}`;
/** Read the handoff JSON produced by the upload phase. */
async function readHandoff(): Promise<{ fileSizeMd: number[]; fileSizeBins: number[] }> {
const { commands } = await import("@vitest/browser/context");
const raw = await commands.readHandoffFile(HANDOFF_FILE);
return JSON.parse(raw);
}
describe("P2P Replication — Download", () => {
let harnessDownload: LiveSyncHarness;
let fileSizeMd: number[] = [];
let fileSizeBins: number[] = [];
const downloadSetting: ObsidianLiveSyncSettings = {
...p2pSetting,
P2P_DevicePeerName: DOWNLOAD_PEER_NAME,
};
beforeAll(async () => {
await DummyFileSourceInisialised;
const handoff = await readHandoff();
fileSizeMd = handoff.fileSizeMd;
fileSizeBins = handoff.fileSizeBins;
console.log("[P2P Down] handoff loaded — md sizes:", fileSizeMd, "bin sizes:", fileSizeBins);
const vaultName = DOWNLOAD_VAULT_NAME;
console.log(`[P2P Down] BeforeAll - Vault: ${vaultName}`);
console.log(`[P2P Down] Peer name: ${DOWNLOAD_PEER_NAME}`);
harnessDownload = await generateHarness(vaultName, downloadSetting);
await waitForReady(harnessDownload);
await performReplication(harnessDownload);
await waitForIdle(harnessDownload);
await delay(1000);
await performReplication(harnessDownload);
await waitForIdle(harnessDownload);
await delay(3000);
});
beforeEach(async () => {
await performReplication(harnessDownload);
await waitForIdle(harnessDownload);
});
afterAll(async () => {
await closeReplication(harnessDownload);
await harnessDownload.dispose();
await delay(1000);
});
it("should be instantiated and defined", () => {
expect(harnessDownload.plugin).toBeDefined();
expect(harnessDownload.plugin.app).toBe(harnessDownload.app);
});
it("should have services initialized", () => {
expect(harnessDownload.plugin.core.services).toBeDefined();
});
it("should have local database initialized", () => {
expect(harnessDownload.plugin.core.localDatabase).toBeDefined();
expect(harnessDownload.plugin.core.localDatabase.isReady).toBe(true);
});
it("should have synchronised the stored file", async () => {
await testFileRead(harnessDownload, nameFile("store", "md", 0), "Hello, World!", fileOptions);
});
it("should have synchronised files with different content", async () => {
await testFileRead(harnessDownload, nameFile("test-diff-1", "md", 0), "Content A", fileOptions);
await testFileRead(harnessDownload, nameFile("test-diff-2", "md", 0), "Content B", fileOptions);
await testFileRead(harnessDownload, nameFile("test-diff-3", "md", 0), "Content C", fileOptions);
});
// NOTE: test.each cannot use variables populated in beforeAll, so we use
// a single it() that iterates over the sizes loaded from the handoff file.
it("should have synchronised all large md files", async () => {
for (const size of fileSizeMd) {
const content = Array.from(generateFile(size)).join("");
const path = nameFile("large", "md", size);
const isTooLarge = harnessDownload.plugin.core.services.vault.isFileSizeTooLarge(size);
if (isTooLarge) {
const entry = await harnessDownload.plugin.core.localDatabase.getDBEntry(path as FilePath);
expect(entry).toBe(false);
} else {
await testFileRead(harnessDownload, path, content, fileOptions);
}
}
});
it("should have synchronised all binary files", async () => {
for (const size of fileSizeBins) {
const path = nameFile("binary", "bin", size);
const isTooLarge = harnessDownload.plugin.core.services.vault.isFileSizeTooLarge(size);
if (isTooLarge) {
const entry = await harnessDownload.plugin.core.localDatabase.getDBEntry(path as FilePath);
expect(entry).toBe(false);
} else {
const content = new Blob([...generateBinaryFile(size)], { type: "application/octet-stream" });
await testFileRead(harnessDownload, path, content, fileOptions);
}
}
});
});

View File

@@ -0,0 +1,161 @@
/**
* P2P Replication Tests — Upload phase (process 1 of 2)
*
* Executed by run-p2p-tests.sh as the first vitest process.
* Writes files into the local DB, replicates them to the CLI host,
* then writes a handoff JSON so the download process knows what to verify.
*
* Trystero has module-level global state (occupiedRooms, didInit, etc.)
* that cannot be safely reused across upload→download within the same
* browser process. Running upload and download as separate vitest
* invocations gives each phase a fresh browser context.
*/
import { afterAll, beforeAll, describe, expect, it, test } from "vitest";
import { generateHarness, waitForIdle, waitForReady, type LiveSyncHarness } from "../harness/harness";
import {
PREFERRED_SETTING_SELF_HOSTED,
RemoteTypes,
type ObsidianLiveSyncSettings,
AutoAccepting,
} from "@/lib/src/common/types";
import {
DummyFileSourceInisialised,
FILE_SIZE_BINS,
FILE_SIZE_MD,
generateBinaryFile,
generateFile,
} from "../utils/dummyfile";
import { checkStoredFileInDB, defaultFileOption, testFileWrite } from "../suite/db_common";
import { delay } from "@/lib/src/common/utils";
import { closeReplication, performReplication } from "./sync_common_p2p";
import { settingBase } from "../suite/variables";
const env = (import.meta as any).env;
const ROOM_ID: string = env.P2P_TEST_ROOM_ID ?? "p2p-test-room";
const PASSPHRASE: string = env.P2P_TEST_PASSPHRASE ?? "p2p-test-pass";
const HOST_PEER_NAME: string = env.P2P_TEST_HOST_PEER_NAME ?? "p2p-cli-host";
const RELAY: string = env.P2P_TEST_RELAY ?? "ws://localhost:4000/";
const APP_ID: string = env.P2P_TEST_APP_ID ?? "self-hosted-livesync-vitest-p2p";
const UPLOAD_PEER_NAME: string = env.P2P_TEST_UPLOAD_PEER_NAME ?? `p2p-upload-${Date.now()}`;
const UPLOAD_VAULT_NAME: string = env.P2P_TEST_UPLOAD_VAULT_NAME ?? `TestVaultUpload-${Date.now()}`;
// Path written by run-p2p-tests.sh; the download phase reads it back.
const HANDOFF_FILE: string = env.P2P_TEST_HANDOFF_FILE ?? "/tmp/p2p-test-handoff.json";
console.log("[P2P Up] ROOM_ID:", ROOM_ID, "HOST:", HOST_PEER_NAME, "RELAY:", RELAY, "APP_ID:", APP_ID);
console.log("[P2P Up] HANDOFF_FILE:", HANDOFF_FILE);
const p2pSetting: ObsidianLiveSyncSettings = {
...settingBase,
...PREFERRED_SETTING_SELF_HOSTED,
showVerboseLog: true,
remoteType: RemoteTypes.REMOTE_P2P,
encrypt: true,
passphrase: PASSPHRASE,
usePathObfuscation: true,
P2P_Enabled: true,
P2P_AppID: APP_ID,
handleFilenameCaseSensitive: false,
P2P_AutoAccepting: AutoAccepting.ALL,
P2P_AutoBroadcast: true,
P2P_AutoStart: true,
P2P_passphrase: PASSPHRASE,
P2P_roomID: ROOM_ID,
P2P_relays: RELAY,
P2P_AutoAcceptingPeers: "~.*",
P2P_SyncOnReplication: HOST_PEER_NAME,
};
const fileOptions = defaultFileOption;
const nameFile = (type: string, ext: string, size: number) => `p2p-cli-test-${type}-file-${size}.${ext}`;
/** Write the handoff JSON so the download phase knows which files to verify. */
async function writeHandoff() {
const handoff = {
fileSizeMd: FILE_SIZE_MD,
fileSizeBins: FILE_SIZE_BINS,
};
const { commands } = await import("@vitest/browser/context");
await commands.writeHandoffFile(HANDOFF_FILE, JSON.stringify(handoff));
console.log("[P2P Up] handoff written to", HANDOFF_FILE);
}
describe("P2P Replication — Upload", () => {
let harnessUpload: LiveSyncHarness;
const uploadSetting: ObsidianLiveSyncSettings = {
...p2pSetting,
P2P_DevicePeerName: UPLOAD_PEER_NAME,
};
beforeAll(async () => {
await DummyFileSourceInisialised;
const vaultName = UPLOAD_VAULT_NAME;
console.log(`[P2P Up] BeforeAll - Vault: ${vaultName}`);
console.log(`[P2P Up] Peer name: ${UPLOAD_PEER_NAME}`);
harnessUpload = await generateHarness(vaultName, uploadSetting);
await waitForReady(harnessUpload);
expect(harnessUpload.plugin).toBeDefined();
await waitForIdle(harnessUpload);
});
afterAll(async () => {
await closeReplication(harnessUpload);
await harnessUpload.dispose();
await delay(1000);
});
it("should be instantiated and defined", () => {
expect(harnessUpload.plugin).toBeDefined();
expect(harnessUpload.plugin.app).toBe(harnessUpload.app);
});
it("should have services initialized", () => {
expect(harnessUpload.plugin.core.services).toBeDefined();
});
it("should have local database initialized", () => {
expect(harnessUpload.plugin.core.localDatabase).toBeDefined();
expect(harnessUpload.plugin.core.localDatabase.isReady).toBe(true);
});
it("should create a file", async () => {
await testFileWrite(harnessUpload, nameFile("store", "md", 0), "Hello, World!", false, fileOptions);
});
it("should create several files with different content", async () => {
await testFileWrite(harnessUpload, nameFile("test-diff-1", "md", 0), "Content A", false, fileOptions);
await testFileWrite(harnessUpload, nameFile("test-diff-2", "md", 0), "Content B", false, fileOptions);
await testFileWrite(harnessUpload, nameFile("test-diff-3", "md", 0), "Content C", false, fileOptions);
});
test.each(FILE_SIZE_MD)("should create large md file of size %i bytes", async (size) => {
const content = Array.from(generateFile(size)).join("");
const path = nameFile("large", "md", size);
const isTooLarge = harnessUpload.plugin.core.services.vault.isFileSizeTooLarge(size);
if (isTooLarge) {
expect(true).toBe(true);
} else {
await testFileWrite(harnessUpload, path, content, false, fileOptions);
}
});
test.each(FILE_SIZE_BINS)("should create binary file of size %i bytes", async (size) => {
const content = new Blob([...generateBinaryFile(size)], { type: "application/octet-stream" });
const path = nameFile("binary", "bin", size);
await testFileWrite(harnessUpload, path, content, true, fileOptions);
const isTooLarge = harnessUpload.plugin.core.services.vault.isFileSizeTooLarge(size);
if (!isTooLarge) {
await checkStoredFileInDB(harnessUpload, path, content, fileOptions);
}
});
it("should replicate uploads to CLI host", async () => {
await performReplication(harnessUpload);
await performReplication(harnessUpload);
});
it("should write handoff file for download phase", async () => {
await writeHandoff();
});
});

View File

@@ -3,34 +3,69 @@ Since 19th July, 2025 (beta1 in 0.25.0-beta1, 13th July, 2025)
The head note of 0.25 is now in [updates_old.md](https://github.com/vrtmrz/obsidian-livesync/blob/main/updates_old.md). Because 0.25 got a lot of updates, thankfully, compatibility is kept and we do not need breaking changes! In other words, when get enough stabled. The next version will be v1.0.0. Even though it my hope.
## 0.25.52-patched-2
## 0.25.54
14th March, 2026
18th March, 2026
### Fixed
- No longer unexpected `Unhandled Rejections` during P2P operations (waiting acceptance).
- Fixed an issue where conflicts cannot be resolved in Journal Sync
### CLI new features
- `mirror` command has been added to the CLI. This command is intended to mirror the storage to the local database.
- `p2p-sync`, `p2p-peers`, and `p2p-host` commands have been added to the CLI. These commands are intended for P2P synchronisation.
- Yes, no more need for a [LiveSync PeerServer](https://github.com/vrtmrz/livesync-serverpeer) for virtual environments! The CLI can handle it by itself.
## 0.25.52-patched-1
12th March, 2026
### Fixed
- Fixed Journal Sync had not been working on some timing, due to a compatibility issue (for a long time).
- ServiceFileAccessBase now correctly handles the reading of binary files.
- HeadlessAPIService now correctly provides the online status (always online) to the plug-in.
- Non-worker version of bgWorker now correctly handles some functions.
- Remote storage size check now works correctly again (#818).
- Some buttons on the settings dialogue now respond correctly again (#827).
### Refactored
- P2P replicator has been refactored to be a little more robust and easier to understand.
- Delete items which are no longer used that might cause potential problems
### CLI
- Fixed the corrupted display of the help message.
- Remove some unnecessary code.
### WebApp
- Fixed the issue where the detail level was not being applied in the log pane.
- Pop-ups are now shown.
- Add coverage for the test.
- Pop-ups are now shown in the web app as well.
## 0.25.53
17th March, 2026
I did wonder whether I should have released a minor version update, but when I actually tested it, compatibility seemed to be intact, so I didnt. Hmm.
### Fixed
#### P2P Synchronisation
- Fixed flaky timing issues in P2P synchronisation.
- No longer unexpected `Unhandled Rejections` during P2P operations (waiting for acceptance).
#### Journal Sync
- Fixed an issue where some conflicts cannot be resolved in Journal Sync.
- Many minor fixes have been made for better stability and reliability.
### Tests
- Rewrite P2P end-to-end tests to use the CLI as a host.
### CLI
We have previously developed FileSystem LiveSync and various other components in a separate repository, but updates have been significantly delayed, and we have been plagued by compatibility issues. Now, a CLI tool using the same core logic is emerging. This does not directly manipulate the file system, but it offers a more convenient way of working and can also communicate with Object Storage. We can also resolve conflicts. Please refer to the code in `src/apps/cli` for the [self-hosted-livesync-cli](./src/apps/cli/README.md) for more details.
- Add `self-hosted-livesync-cli` to `src/apps/cli` as a headless and dedicated version.
- P2P sync and Object Storage are also supported in the CLI.
- Yes, we have finally managed to 'get one file'.
- Also, no more need for a [LiveSync PeerServer](https://github.com/vrtmrz/livesync-serverpeer) for virtual environments! The CLI can do it.
- Now binary files are also supported in the CLI.
### Refactored or internal changes
- ServiceFileAccessBase now correctly handles the reading of binary files.
- HeadlessAPIService now correctly provides the online status (always online) to the plug-in.
- Non-worker version of bgWorker now correctly handles some functions.
- Separated `ObsidianLiveSyncPlugin` into `ObsidianLiveSyncPlugin` and `LiveSyncBaseCore`.
- Now `LiveSyncCore` indicates the type specified version of `LiveSyncBaseCore`.
- Referencing `plugin.xxx` has been rewritten to referencing the corresponding service or `core.xxx`.
@@ -39,22 +74,9 @@ The head note of 0.25 is now in [updates_old.md](https://github.com/vrtmrz/obsid
- ControlService now provides the readiness for processing operations.
- DatabaseService is now able to modify database opening options on derived classes.
- Now `useOfflineScanner`, `useCheckRemoteSize`, and `useRedFlagFeatures` are set from `main.ts`, instead of `LiveSyncBaseCore`.
### Internal API changes
- Storage Access APIs are now yielding Promises. This is to allow more limited storage platforms to be supported.
- Journal Replicator now yields true after the replication is done.
### CLI
We have previously developed FileSystem LiveSync and various other components in a separate repository, but updates have been significantly delayed, and we have been plagued by compatibility issues. Now, a CLI tool using the same core logic is emerging. This does not directly manipulate the file system, but it offers a more convenient way of working and can also communicate with Object Storage. We can also resolve conflicts. Please refer to the code in `src/apps/cli` for the [self-hosted-livesync-cli](./src/apps/cli/README.md) for more details.
- Add `self-hosted-livesync-cli` to `src/apps/cli` as a headless and dedicated version.
- Add more tests.
- Object Storage support has also been confirmed (and fixed) in CLI.
- Yes, we have finally managed to 'get one file'.
- Now binary files are also supported in the CLI.
### R&D
- Browser-version of Self-hosted LiveSync is now in development. This is not intended for public use now, but I will eventually make it available for testing.
@@ -257,67 +279,5 @@ This release is identical to 0.25.41-patched-3, except for the version number.
- Migrated from the outdated, unstable platform abstraction layer to services.
- A bit more services will be added in the future for better maintainability.
## 0.25.41
24th January, 2026
### Fixed
- No longer `No available splitter for settings!!` errors occur after fetching old remote settings while rebuilding local database. (#748)
### Improved
- Boot sequence warning is now kept in the in-editor notification area.
### New feature
- We can now set the maximum modified time for reflect events in the settings. (for #754)
- This setting can be configured from `Patches` -> `Remediation` in the settings dialogue.
- Enabling this setting will restrict the propagation from the database to storage to only those changes made before the specified date and time.
- This feature is primarily intended for recovery purposes. After placing `redflag.md` in an empty vault and importing the Self-hosted LiveSync configuration, please perform this configuration, and then fetch the local database from the remote.
- This feature is useful when we want to prevent recent unwanted changes from being reflected in the local storage.
### Refactored
- Module to service refactoring has been started for better maintainability:
- UI module has been moved to UI service.
### Behaviour change
- Default chunk splitter version has been changed to `Rabin-Karp` for new installations.
## 0.25.40
23rd January, 2026
### Fixed
- Fixed an issue where some events were not triggered correctly after the refactoring in 0.25.39.
## 0.25.39
23rd January, 2026
Also no behaviour changes or fixes in this release. Just refactoring for better maintainability. Thank you for your patience! I will address some of the reported issues soon.
However, this is not a minor refactoring, so please be careful. Let me know if you find any unexpected behaviour after this update.
### Refactored
- Rewrite the service's binding/handler assignment systems
- Removed loopholes that allowed traversal between services to clarify dependencies.
- Consolidated the hidden state-related state, the handler, and the addition of bindings to the handler into a single object.
- Currently, functions that can have handlers added implement either addHandler or setHandler directly on the function itself.
I understand there are differing opinions on this, but for now, this is how it stands.
- Services now possess a Context. Please ensure each platform has a class that inherits from ServiceContext.
- To permit services to be dynamically bound, the services themselves are now defined by interfaces.
## 0.25.38
17th January, 2026
### Fixed
- Fixed an issue where indexedDB would not close correctly on some environments, causing unexpected errors during database operations.
Full notes are in
[updates_old.md](https://github.com/vrtmrz/obsidian-livesync/blob/main/updates_old.md).

View File

@@ -3,6 +3,177 @@ Since 19th July, 2025 (beta1 in 0.25.0-beta1, 13th July, 2025)
The head note of 0.25 is now in [updates_old.md](https://github.com/vrtmrz/obsidian-livesync/blob/main/updates_old.md). Because 0.25 got a lot of updates, thankfully, compatibility is kept and we do not need breaking changes! In other words, when get enough stabled. The next version will be v1.0.0. Even though it my hope.
## 0.25.53
17th March, 2026
I did wonder whether I should have released a minor version update, but when I actually tested it, compatibility seemed to be intact, so I didnt. Hmm.
### Fixed
#### P2P Synchronisation
- Fixed flaky timing issues in P2P synchronisation.
- No longer unexpected `Unhandled Rejections` during P2P operations (waiting for acceptance).
#### Journal Sync
- Fixed an issue where some conflicts cannot be resolved in Journal Sync.
- Many minor fixes have been made for better stability and reliability.
### Tests
- Rewrite P2P end-to-end tests to use the CLI as a host.
### CLI
We have previously developed FileSystem LiveSync and various other components in a separate repository, but updates have been significantly delayed, and we have been plagued by compatibility issues. Now, a CLI tool using the same core logic is emerging. This does not directly manipulate the file system, but it offers a more convenient way of working and can also communicate with Object Storage. We can also resolve conflicts. Please refer to the code in `src/apps/cli` for the [self-hosted-livesync-cli](./src/apps/cli/README.md) for more details.
- Add `self-hosted-livesync-cli` to `src/apps/cli` as a headless and dedicated version.
- P2P sync and Object Storage are also supported in the CLI.
- Yes, we have finally managed to 'get one file'.
- Also, no more need for a [LiveSync PeerServer](https://github.com/vrtmrz/livesync-serverpeer) for virtual environments! The CLI can do it.
- Now binary files are also supported in the CLI.
### Refactored or internal changes
- ServiceFileAccessBase now correctly handles the reading of binary files.
- HeadlessAPIService now correctly provides the online status (always online) to the plug-in.
- Non-worker version of bgWorker now correctly handles some functions.
- Separated `ObsidianLiveSyncPlugin` into `ObsidianLiveSyncPlugin` and `LiveSyncBaseCore`.
- Now `LiveSyncCore` indicates the type specified version of `LiveSyncBaseCore`.
- Referencing `plugin.xxx` has been rewritten to referencing the corresponding service or `core.xxx`.
- Offline change scanner and the local database preparation have been separated.
- Set default priority for processFileEvent and processSynchroniseResult for the place to add hooks.
- ControlService now provides the readiness for processing operations.
- DatabaseService is now able to modify database opening options on derived classes.
- Now `useOfflineScanner`, `useCheckRemoteSize`, and `useRedFlagFeatures` are set from `main.ts`, instead of `LiveSyncBaseCore`.
- Storage Access APIs are now yielding Promises. This is to allow more limited storage platforms to be supported.
- Journal Replicator now yields true after the replication is done.
### R&D
- Browser-version of Self-hosted LiveSync is now in development. This is not intended for public use now, but I will eventually make it available for testing.
- We can see the code in `src/apps/webapp` for the browser version.
## 0.25.52-patched-3
16th March, 2026
### Fixed
- Fixed flaky timing issues in P2P synchronisation.
- Fixed more binary file handling issues in CLI.
### Tests
- Rewrite P2P end-to-end tests to use the CLI as host.
## 0.25.52-patched-2
14th March, 2026
### Fixed
- No longer unexpected `Unhandled Rejections` during P2P operations (waiting acceptance).
- Fixed an issue where conflicts cannot be resolved in Journal Sync
### CLI new features
- `mirror` command has been added to the CLI. This command is intended to mirror the storage to the local database.
- `p2p-sync`, `p2p-peers`, and `p2p-host` commands have been added to the CLI. These commands are intended for P2P synchronisation.
- Yes, no more need for a [LiveSync PeerServer](https://github.com/vrtmrz/livesync-serverpeer) for virtual environments! The CLI can handle it by itself.
## 0.25.52-patched-1
12th March, 2026
### Fixed
- Fixed Journal Sync had not been working on some timing, due to a compatibility issue (for a long time).
- ServiceFileAccessBase now correctly handles the reading of binary files.
- HeadlessAPIService now correctly provides the online status (always online) to the plug-in.
- Non-worker version of bgWorker now correctly handles some functions.
### Refactored
- Separated `ObsidianLiveSyncPlugin` into `ObsidianLiveSyncPlugin` and `LiveSyncBaseCore`.
- Now `LiveSyncCore` indicates the type specified version of `LiveSyncBaseCore`.
- Referencing `plugin.xxx` has been rewritten to referencing the corresponding service or `core.xxx`.
- Offline change scanner and the local database preparation have been separated.
- Set default priority for processFileEvent and processSynchroniseResult for the place to add hooks.
- ControlService now provides the readiness for processing operations.
- DatabaseService is now able to modify database opening options on derived classes.
- Now `useOfflineScanner`, `useCheckRemoteSize`, and `useRedFlagFeatures` are set from `main.ts`, instead of `LiveSyncBaseCore`.
### Internal API changes
- Storage Access APIs are now yielding Promises. This is to allow more limited storage platforms to be supported.
- Journal Replicator now yields true after the replication is done.
### CLI
We have previously developed FileSystem LiveSync and various other components in a separate repository, but updates have been significantly delayed, and we have been plagued by compatibility issues. Now, a CLI tool using the same core logic is emerging. This does not directly manipulate the file system, but it offers a more convenient way of working and can also communicate with Object Storage. We can also resolve conflicts. Please refer to the code in `src/apps/cli` for the [self-hosted-livesync-cli](./src/apps/cli/README.md) for more details.
- Add `self-hosted-livesync-cli` to `src/apps/cli` as a headless and dedicated version.
- Add more tests.
- Object Storage support has also been confirmed (and fixed) in CLI.
- Yes, we have finally managed to 'get one file'.
- Now binary files are also supported in the CLI.
### R&D
- Browser-version of Self-hosted LiveSync is now in development. This is not intended for public use now, but I will eventually make it available for testing.
- We can see the code in `src/apps/webapp` for the browser version.
## 0.25.52
9th March, 2026
Excuses: Too much `I`.
Whilst I had a fever, I could not figure it out at all, but once I felt better, I spotted the problem in about thirty seconds. I apologise for causing you concern. I am grateful for your patience.
I would like to devise a mechanism for running simple test scenarios. Now that we have got the Obsidian CLI up and running, it seems the perfect opportunity.
To improve the bus factor, we really need to organise the source code more thoroughly. Your cooperation and contributions would be greatly appreciated.
### Fixed
- No longer unexpected deletion-propagation occurs when the parent directory is not empty (#813).
### Revert reversions
- Reverted the reversion of ModuleCheckRemoteSize. Now it is back to the service feature.
## 0.25.51
7th March, 2026
### Reverted
- Reverted to ModuleRedFlag and ModuleInitializerFile to the previous version because of some unexpected issues. (#813)
- I will re-implement them in the future with better design and tests.
## 0.25.50
3rd March, 2026
Note: 0.25.49 has been skipped because of too verbose logging (credentials are logged in verbose level, but I realised that could lead to unexpected exposure on issue reporting). Please bump to 0.25.50 to get the fix if you are on 0.25.49. (No expected behaviour changes except the logging).
### Fixed
- No longer deleted files are not clickable in the Global History pane.
- Diff view now uses more specific classes (#803).
- A message of configuration mismatching slightly added for better understanding.
- Now it says `When replication is initiated manually via the command palette or ribbon, a dialogue box will open to address this.` to make it clear that the user can fix the issue by themselves.
### Refactored
- `ModuleRedFlag` has been refactored to `serviceFeatures/redFlag` and also tested.
- `ModuleInitializerFile` has been refactored to `lib/serviceFeatures/offlineScanner` and also tested.
## 0.25.48
2nd March, 2026

82
vitest.config.p2p.ts Normal file
View File

@@ -0,0 +1,82 @@
import { defineConfig, mergeConfig } from "vitest/config";
import { playwright } from "@vitest/browser-playwright";
import viteConfig from "./vitest.config.common";
import path from "path";
import dotenv from "dotenv";
import { grantClipboardPermissions, writeHandoffFile, readHandoffFile } from "./test/lib/commands";
const defEnv = dotenv.config({ path: ".env" }).parsed;
const testEnv = dotenv.config({ path: ".test.env" }).parsed;
// Merge: dotenv files < process.env (so shell-injected vars like P2P_TEST_* take precedence)
const p2pEnv: Record<string, string> = {};
if (process.env.P2P_TEST_ROOM_ID) p2pEnv.P2P_TEST_ROOM_ID = process.env.P2P_TEST_ROOM_ID;
if (process.env.P2P_TEST_PASSPHRASE) p2pEnv.P2P_TEST_PASSPHRASE = process.env.P2P_TEST_PASSPHRASE;
if (process.env.P2P_TEST_HOST_PEER_NAME) p2pEnv.P2P_TEST_HOST_PEER_NAME = process.env.P2P_TEST_HOST_PEER_NAME;
if (process.env.P2P_TEST_RELAY) p2pEnv.P2P_TEST_RELAY = process.env.P2P_TEST_RELAY;
if (process.env.P2P_TEST_APP_ID) p2pEnv.P2P_TEST_APP_ID = process.env.P2P_TEST_APP_ID;
if (process.env.P2P_TEST_HANDOFF_FILE) p2pEnv.P2P_TEST_HANDOFF_FILE = process.env.P2P_TEST_HANDOFF_FILE;
const env = Object.assign({}, defEnv, testEnv, p2pEnv);
const debuggerEnabled = env?.ENABLE_DEBUGGER === "true";
const enableUI = env?.ENABLE_UI === "true";
const headless = !debuggerEnabled && !enableUI;
export default mergeConfig(
viteConfig,
defineConfig({
resolve: {
alias: {
obsidian: path.resolve(__dirname, "./test/harness/obsidian-mock.ts"),
},
},
test: {
env: env,
testTimeout: 240000,
hookTimeout: 240000,
fileParallelism: false,
isolate: true,
watch: false,
// Run all CLI-host P2P test files (*.p2p.test.ts, *.p2p-up.test.ts, *.p2p-down.test.ts)
include: ["test/suitep2p/**/*.p2p*.test.ts"],
browser: {
isolate: true,
// Only grantClipboardPermissions is needed; no openWebPeer/acceptWebPeer
commands: {
grantClipboardPermissions,
writeHandoffFile,
readHandoffFile,
},
provider: playwright({
launchOptions: {
args: [
"--js-flags=--expose-gc",
"--allow-insecure-localhost",
"--disable-web-security",
"--ignore-certificate-errors",
],
},
}),
enabled: true,
screenshotFailures: false,
instances: [
{
execArgv: ["--js-flags=--expose-gc"],
browser: "chromium",
headless,
isolate: true,
inspector: debuggerEnabled ? { waitForDebugger: true, enabled: true } : undefined,
printConsoleTrace: true,
onUnhandledError(error) {
const msg = error.message || "";
if (msg.includes("Cannot create so many PeerConnections")) {
return false;
}
},
},
],
headless,
fileParallelism: false,
ui: debuggerEnabled || enableUI ? true : false,
},
},
})
);