Compare commits

...

50 Commits

Author SHA1 Message Date
vorotamoroz
91c9746886 Merge pull request #900 from vrtmrz/v0_25_62
Releasing v0.25.62
2026-05-14 20:15:02 +09:00
vorotamoroz
75b44b1636 bump 2026-05-14 09:40:12 +00:00
vorotamoroz
f1fe48c1ee add version-bump 2026-05-14 09:35:53 +00:00
vorotamoroz
437e7c0d9c Fixed an issue where a connection could not be established when attempting to connect to a brand-new remote database without going through the set-up wizard or configuration checking 2026-05-14 09:32:34 +00:00
vorotamoroz
5ffa7ec7ee Merge pull request #897 from vrtmrz/v0_25_61
Releaseing v0.25.61
2026-05-13 23:06:57 +09:00
vorotamoroz
a1859f5d2e bump 2026-05-13 14:44:38 +01:00
vorotamoroz
785af8cb8f Merge pull request #896 from vrtmrz/address_community_review
Address community review
2026-05-13 22:29:37 +09:00
vorotamoroz
06e1f4aa4a Update subrepo pointer 2026-05-13 14:25:49 +01:00
vorotamoroz
767f22ce9c Merge branch 'address_community_review' of https://github.com/vrtmrz/obsidian-livesync into address_community_review 2026-05-13 14:16:51 +01:00
vorotamoroz
6a9bba702c chore: ran prettier 2026-05-13 14:10:56 +01:00
vorotamoroz
de2397dc3f Adding a rough DI 2026-05-13 14:10:55 +01:00
vorotamoroz
daaad9212e Fix package-lock 2026-05-13 14:10:54 +01:00
vorotamoroz
a6891374a1 chore: Package modernise, update linter 2026-05-13 14:10:01 +01:00
vorotamoroz
b1cadf0549 prettify 2026-05-13 14:07:58 +01:00
vorotamoroz
95f40cc954 (chore): removing DOM Operation 2026-05-13 14:07:58 +01:00
vorotamoroz
8deaf123d6 Update eslint config to ignore file,
fix some type error on LiveSyncBaseCore
2026-05-13 14:06:51 +01:00
vorotamoroz
053813bffb Update for review once 2026-05-13 14:06:51 +01:00
vorotamoroz
cc7af03618 chore: Package modernise, update linter 2026-05-13 14:06:51 +01:00
vorotamoroz
a130e3700e prettify 2026-05-13 14:06:50 +01:00
vorotamoroz
0549e901b2 (chore): removing DOM Operation 2026-05-13 14:06:49 +01:00
vorotamoroz
e9afe06968 Merge pull request #895 from vrtmrz/update_lib
Update lib to fix P2P problems and merging contributions
2026-05-13 22:01:17 +09:00
vorotamoroz
c45aca4794 fixed: fixed subrepo pointer
I thought I’d rebased it, but it turns out everything had been merged.
2026-05-13 13:45:09 +01:00
vorotamoroz
e2c54aaf43 Update lib to fix P2P problems 2026-05-13 13:31:11 +01:00
vorotamoroz
37715d4c9f chore: ran prettier 2026-05-13 11:12:40 +00:00
vorotamoroz
106367fa41 Adding a rough DI 2026-05-13 11:09:04 +00:00
vorotamoroz
538130aa91 Fix package-lock 2026-05-13 11:36:01 +01:00
vorotamoroz
c9d0357fec Merge branch 'address_community_review' of https://github.com/vrtmrz/obsidian-livesync into address_community_review 2026-05-13 11:35:01 +01:00
vorotamoroz
d05c76da36 Update eslint config to ignore file,
fix some type error on LiveSyncBaseCore
2026-05-13 11:33:46 +01:00
vorotamoroz
d2eb6ecbaf Update for review once 2026-05-13 11:33:46 +01:00
vorotamoroz
25a6fde212 chore: Package modernise, update linter 2026-05-13 11:33:45 +01:00
vorotamoroz
e8f8b680ef prettify 2026-05-13 11:33:03 +01:00
vorotamoroz
6c30f2b863 (chore): removing DOM Operation 2026-05-13 11:33:03 +01:00
vorotamoroz
8dda24a689 Merge pull request #882 from vrtmrz/p2p-rpc
feat: use new p2p-rpc wrapper
2026-05-13 19:24:26 +09:00
vorotamoroz
fbbb63906a Merge branch 'main' into p2p-rpc 2026-05-13 19:22:03 +09:00
vorotamoroz
1e66a7f144 Merge pull request #894 from vrtmrz/fix_unexpected_error_on_startup
fixed: fixed unexpected error during startup
2026-05-13 19:16:18 +09:00
vorotamoroz
df79d81475 fixed: fixed unexpected error during startup 2026-05-13 10:14:47 +00:00
vorotamoroz
ad71355859 Merge pull request #893 from brian-spackman/fix-fractional-mtime-on-linux
fix: truncate sub-millisecond CLI mtimes to prevent mobile crash
2026-05-13 19:12:56 +09:00
vorotamoroz
95dc079fad Merge pull request #843 from andrewleech/daemon-sync
cli: implement continuous sync daemon mode
2026-05-13 18:53:59 +09:00
vorotamoroz
770d4af4a0 Update eslint config to ignore file,
fix some type error on LiveSyncBaseCore
2026-05-13 10:15:45 +01:00
vorotamoroz
3b311248cb Update for review once 2026-05-13 08:02:50 +01:00
Andrew Leech
67996f6d0a cli: fix stale stat.size in NodeVaultAdapter causing corrupted file errors
chokidar stats are captured at poll time and may not reflect the file's
final byte length by the time vault.read() is called. The downstream
integrity check compares stat.size to content length; a mismatch causes
other LiveSync clients to reject the file as corrupted.

Fix by updating file.stat.size from the actual content in read() and
readBinary().

Co-authored-by: Joysimple <Joysimple@users.noreply.github.com>
2026-05-13 16:56:08 +10:00
vorotamoroz
5772811a45 chore: Package modernise, update linter 2026-05-13 04:40:32 +01:00
vorotamoroz
55529cd71e prettify 2026-05-13 03:58:08 +01:00
vorotamoroz
2e9b8b7b62 (chore): removing DOM Operation 2026-05-13 03:55:11 +01:00
Andrew Leech
4ab2e41d18 cli daemon: set disableCheckingConfigMismatch for headless operation
The config mismatch dialog's defaultAction is "Dismiss" which blocks
replication. Since the daemon cannot resolve mismatches interactively,
skip the check entirely and accept the remote configuration as-is.
2026-05-13 11:21:06 +10:00
Andrew Leech
c0ad8ee15a cli: add configurable ignore rules and deployment artifacts
IgnoreRules (src/apps/cli/serviceModules/IgnoreRules.ts):
- Reads .livesync/ignore for user-defined glob patterns
- Applies gitignore matchBase semantics: patterns without / get **/ prefix,
  patterns ending with / get ** appended for directory contents
- Supports `import: .gitignore` directive to merge gitignore patterns
- Rejects negation patterns with a warning (not fully supportable)
- Integrated into both daemon and mirror commands via isTargetFile handler

Wiring:
- IgnoreRules loaded before LiveSyncBaseCore construction so beginWatch()
  receives rules when it fires during onLoad/onFirstInitialise
- Passed through initialiseServiceModulesCLI -> StorageEventManagerCLI ->
  CLIStorageEventManagerAdapter -> CLIWatchAdapter

Deployment:
- src/apps/cli/deploy/livesync-cli.service - systemd unit template
- src/apps/cli/deploy/install.sh - user/system install script

Testing:
- src/apps/cli/test/test-daemon-linux.sh - e2e tests for ignore rules
- src/apps/cli/serviceModules/IgnoreRules.unit.spec.ts - 15 unit tests
- src/apps/cli/commands/daemonCommand.unit.spec.ts - 7 unit tests
2026-05-13 11:21:06 +10:00
Andrew Leech
e6ae516493 cli: implement local→CouchDB file watching via chokidar
- Add chokidar ^4.0.0 as dependency (root package.json, runtime-package.json)
- Mark chokidar as external in vite.config.ts (not bundled, loaded at runtime)
- Implement CLIWatchAdapter.beginWatch() with chokidar:
  - ignoreInitial: true (startup files handled by mirror scan)
  - awaitWriteFinish to prevent partial-write events
  - Excludes dotfiles and .livesync/ directory at watcher level
  - Maps add/change/unlink/addDir/unlinkDir to IStorageEventWatchHandlers
  - Fatal error handler: logs clearly and releases watcher resources
- Add close() to CLIWatchAdapter, StorageEventManagerCLI for clean shutdown
- Register onUnload hook in CLIServiceModules to close watcher on shutdown
2026-05-13 11:21:06 +10:00
Andrew Leech
a4d5ef4620 cli: implement daemon startup sequence and CouchDB→local sync
- Add daemon command to help text and --interval/-i flag for polling mode
- Capture original sync settings before suspendAllSync() clobbers them
- Implement daemon startup: mirror scan → restore settings → applySettings()
  which triggers the full suspend/resume lifecycle and starts the _changes feed
- Guard processSynchroniseResult no-op to non-daemon commands so default
  handler writes incoming CouchDB changes to the local filesystem
- Polling mode: restore settings + clearInterval-safe try/catch error handling
- Warn when both liveSync and syncOnStart are false after restore (no-op config)
- Fix: only block indefinitely if daemon startup succeeded
2026-05-13 11:21:06 +10:00
Brian Spackman
3f7bb047ac fix: floor sub-millisecond CLI mtimes to prevent mobile crash
On Linux, fs.Stats.mtimeMs and ctimeMs return floats with sub-millisecond
precision derived from the kernel's nanosecond filesystem mtime. Stored
raw, this produces document timestamps like 1778511180024.462 in CouchDB
rather than integer milliseconds.

Mobile clients running LiveSync 0.25.60 have been observed to crash when
processing change-feed updates carrying non-integer millisecond timestamps
from CLI-written documents. Desktop and mobile GUI plugins write integer
milliseconds, so the crash only manifests when the headless CLI on Linux
is the source. Whether the issue was introduced in 0.25.60 or had been
latent in earlier versions hasn't been investigated; 0.25.60 is the
version where the crash was confirmed and the fix verified.

Floor the values at every stat-read site (six across three adapters and
one command) so CLI-written documents carry integer-millisecond
timestamps consistent with the rest of the mesh.
2026-05-12 18:00:25 -06:00
vorotamoroz
a6be20695a feat: use new p2p-rpc wrapper 2026-05-11 03:49:35 +01:00
46 changed files with 3372 additions and 397 deletions

View File

@@ -2,7 +2,6 @@
import esbuild from "esbuild"; import esbuild from "esbuild";
import process from "process"; import process from "process";
import builtins from "builtin-modules";
import sveltePlugin from "esbuild-svelte"; import sveltePlugin from "esbuild-svelte";
import { sveltePreprocess } from "svelte-preprocess"; import { sveltePreprocess } from "svelte-preprocess";
import fs from "node:fs"; import fs from "node:fs";

View File

@@ -1,103 +1,83 @@
import typescriptEslint from "@typescript-eslint/eslint-plugin";
import svelte from "eslint-plugin-svelte";
import _import from "eslint-plugin-import";
import { fixupPluginRules } from "@eslint/compat";
import tsParser from "@typescript-eslint/parser"; import tsParser from "@typescript-eslint/parser";
import path from "node:path"; import obsidianmd from "eslint-plugin-obsidianmd";
import { fileURLToPath } from "node:url"; import globals from "globals";
import js from "@eslint/js"; import { defineConfig, globalIgnores } from "eslint/config";
import { FlatCompat } from "@eslint/eslintrc"; import * as sveltePlugin from "eslint-plugin-svelte";
const __filename = fileURLToPath(import.meta.url); export default defineConfig([
const __dirname = path.dirname(__filename); globalIgnores([
const compat = new FlatCompat({ "**/node_modules/*",
baseDirectory: __dirname, "**/jest.config.js",
recommendedConfig: js.configs.recommended, "src/lib/coverage",
allConfig: js.configs.all, "src/lib/browsertest",
}); "**/test.ts",
"**/tests.ts",
export default [ "**/**test.ts",
"**/**.test.ts",
"**/*.unit.spec.ts",
"**/esbuild.*.mjs",
"**/terser.*.mjs",
"**/node_modules",
"**/build",
"**/.eslintrc.js.bak",
"src/lib/src/patches/pouchdb-utils",
"**/esbuild.config.mjs",
"**/rollup.config.js",
"modules/octagonal-wheels/rollup.config.js",
"modules/octagonal-wheels/dist/**/*",
"src/lib/test",
"src/lib/_tools",
"src/lib/src/cli",
"**/main.js",
"src/apps/**/*",
".prettierrc.*.mjs",
".prettierrc.mjs",
"*.config.mjs",
"src/apps/**/*",
"src/lib/src/services/implements/browser/**",
"src/lib/src/services/implements/headless/**",
"src/lib/src/API",
]),
...sveltePlugin.configs["flat/base"],
...obsidianmd.configs.recommended,
{ {
ignores: [ files: ["**/*.ts"],
"**/node_modules/*",
"**/jest.config.js",
"src/lib/coverage",
"src/lib/browsertest",
"**/test.ts",
"**/tests.ts",
"**/**test.ts",
"**/**.test.ts",
"**/esbuild.*.mjs",
"**/terser.*.mjs",
"**/node_modules",
"**/build",
"**/.eslintrc.js.bak",
"src/lib/src/patches/pouchdb-utils",
"**/esbuild.config.mjs",
"**/rollup.config.js",
"modules/octagonal-wheels/rollup.config.js",
"modules/octagonal-wheels/dist/**/*",
"src/lib/test",
"src/lib/_tools",
"src/lib/src/cli",
"**/main.js",
"src/apps/**/*",
".prettierrc.*.mjs",
".prettierrc.mjs",
"*.config.mjs"
],
},
...compat.extends(
"eslint:recommended",
"plugin:@typescript-eslint/eslint-recommended",
"plugin:@typescript-eslint/recommended"
),
{
plugins: {
"@typescript-eslint": typescriptEslint,
svelte,
import: fixupPluginRules(_import),
},
languageOptions: { languageOptions: {
globals: { ...globals.browser },
parser: tsParser, parser: tsParser,
ecmaVersion: 5,
sourceType: "module",
parserOptions: { parserOptions: {
project: ["tsconfig.json"], project: "./tsconfig.json",
}, },
}, },
rules: { rules: {
"no-unused-vars": "off", "no-unused-vars": "off",
"@typescript-eslint/no-unused-vars": ["error", { args: "none" }],
"@typescript-eslint/no-unused-vars": [
"error",
{
args: "none",
},
],
"no-unused-labels": "off", "no-unused-labels": "off",
"@typescript-eslint/ban-ts-comment": "off", "@typescript-eslint/ban-ts-comment": "off",
"no-prototype-builtins": "off", "no-prototype-builtins": "off",
"@typescript-eslint/no-empty-function": "off", "@typescript-eslint/no-empty-function": "off",
"require-await": "error", "require-await": "error",
"obsidianmd/rule-custom-message": "off", // Temporary
"obsidianmd/ui/sentence-case": "off", // Temporary
"@typescript-eslint/require-await": "warn", "@typescript-eslint/require-await": "warn",
"@typescript-eslint/no-misused-promises": "warn", "@typescript-eslint/no-misused-promises": "warn",
"@typescript-eslint/no-floating-promises": "warn", "@typescript-eslint/no-floating-promises": "warn",
"no-async-promise-executor": "warn", "no-async-promise-executor": "warn",
"@typescript-eslint/no-explicit-any": "off", "@typescript-eslint/no-explicit-any": "off",
"@typescript-eslint/no-unnecessary-type-assertion": "error", "@typescript-eslint/no-unnecessary-type-assertion": "error",
"no-constant-condition": ["error", { checkLoops: false }],
"no-constant-condition": [
"error",
{
checkLoops: false,
},
],
}, },
}, },
]; {
files: ["**/*.svelte"],
languageOptions: {
parserOptions: {
parser: tsParser,
},
},
rules: {
"no-unused-vars": ["error", { argsIgnorePattern: "^_", varsIgnorePattern: "^_" }],
"obsidianmd/no-plugin-as-component": "off", // Temporary
},
},
]);

View File

@@ -1,10 +1,10 @@
{ {
"id": "obsidian-livesync", "id": "obsidian-livesync",
"name": "Self-hosted LiveSync", "name": "Self-hosted LiveSync",
"version": "0.25.60", "version": "0.25.62",
"minAppVersion": "0.9.12", "minAppVersion": "1.7.2",
"description": "Community implementation of self-hosted livesync. Reflect your vault changes to some other devices immediately. Please make sure to disable other synchronize solutions to avoid content corruption or duplication.", "description": "Community implementation of self-hosted livesync. Reflect your vault changes to some other devices immediately. Please make sure to disable other synchronize solutions to avoid content corruption or duplication.",
"author": "vorotamoroz", "author": "vorotamoroz",
"authorUrl": "https://github.com/vrtmrz", "authorUrl": "https://github.com/vrtmrz",
"isDesktopOnly": false "isDesktopOnly": false
} }

1392
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{ {
"name": "obsidian-livesync", "name": "obsidian-livesync",
"version": "0.25.60", "version": "0.25.62",
"description": "Reflect your vault changes to some other devices immediately. Please make sure to disable other synchronize solutions to avoid content corruption or duplication.", "description": "Reflect your vault changes to some other devices immediately. Please make sure to disable other synchronize solutions to avoid content corruption or duplication.",
"main": "main.js", "main": "main.js",
"type": "module", "type": "module",
@@ -54,21 +54,21 @@
"test:docker-all:start": "npm run test:docker-all:up && sleep 5 && npm run test:docker-all:init", "test:docker-all:start": "npm run test:docker-all:up && sleep 5 && npm run test:docker-all:init",
"test:docker-all:stop": "npm run test:docker-all:down", "test:docker-all:stop": "npm run test:docker-all:down",
"test:full": "npm run test:docker-all:start && vitest run --coverage && npm run test:docker-all:stop", "test:full": "npm run test:docker-all:start && vitest run --coverage && npm run test:docker-all:stop",
"test:p2p": "bash test/suitep2p/run-p2p-tests.sh" "test:p2p": "bash test/suitep2p/run-p2p-tests.sh",
"version": "node version-bump.mjs && git add manifest.json versions.json"
}, },
"keywords": [], "keywords": [],
"author": "vorotamoroz", "author": "vorotamoroz",
"license": "MIT", "license": "MIT",
"devDependencies": { "devDependencies": {
"@chialab/esbuild-plugin-worker": "^0.19.0", "@chialab/esbuild-plugin-worker": "^0.19.0",
"@eslint/compat": "^2.0.2",
"@eslint/eslintrc": "^3.3.4",
"@eslint/js": "^9.39.3", "@eslint/js": "^9.39.3",
"@sveltejs/vite-plugin-svelte": "^6.2.4", "@sveltejs/vite-plugin-svelte": "^6.2.4",
"@tsconfig/svelte": "^5.0.8", "@tsconfig/svelte": "^5.0.8",
"@types/deno": "^2.5.0", "@types/deno": "^2.5.0",
"@types/diff-match-patch": "^1.0.36", "@types/diff-match-patch": "^1.0.36",
"@types/markdown-it": "^14.1.2", "@types/markdown-it": "^14.1.2",
"@types/micromatch": "^4.0.10",
"@types/node": "^24.10.13", "@types/node": "^24.10.13",
"@types/pouchdb": "^6.4.2", "@types/pouchdb": "^6.4.2",
"@types/pouchdb-adapter-http": "^6.1.6", "@types/pouchdb-adapter-http": "^6.1.6",
@@ -83,18 +83,15 @@
"@vitest/browser": "^4.1.1", "@vitest/browser": "^4.1.1",
"@vitest/browser-playwright": "^4.1.1", "@vitest/browser-playwright": "^4.1.1",
"@vitest/coverage-v8": "^4.1.1", "@vitest/coverage-v8": "^4.1.1",
"builtin-modules": "5.0.0",
"dotenv": "^17.3.1",
"dotenv-cli": "^11.0.0", "dotenv-cli": "^11.0.0",
"esbuild": "0.25.0", "esbuild": "0.25.0",
"esbuild-plugin-inline-worker": "^0.1.1", "esbuild-plugin-inline-worker": "^0.1.1",
"esbuild-svelte": "^0.9.4", "esbuild-svelte": "^0.9.4",
"eslint": "^9.39.3", "eslint": "^9.39.3",
"eslint-plugin-import": "^2.32.0", "eslint-plugin-obsidianmd": "^0.3.0",
"eslint-plugin-svelte": "^3.15.0", "eslint-plugin-svelte": "^3.15.0",
"events": "^3.3.0", "events": "^3.3.0",
"glob": "^13.0.6", "globals": "^14.0.0",
"obsidian": "^1.12.3",
"playwright": "^1.58.2", "playwright": "^1.58.2",
"postcss": "^8.5.6", "postcss": "^8.5.6",
"postcss-load-config": "^6.0.1", "postcss-load-config": "^6.0.1",
@@ -115,6 +112,7 @@
"svelte-check": "^4.4.3", "svelte-check": "^4.4.3",
"svelte-preprocess": "^6.0.3", "svelte-preprocess": "^6.0.3",
"terser": "^5.39.0", "terser": "^5.39.0",
"tinyglobby": "^0.2.15",
"transform-pouch": "^2.0.0", "transform-pouch": "^2.0.0",
"tslib": "^2.8.1", "tslib": "^2.8.1",
"tsx": "^4.21.0", "tsx": "^4.21.0",
@@ -133,11 +131,14 @@
"@smithy/protocol-http": "^5.3.9", "@smithy/protocol-http": "^5.3.9",
"@smithy/querystring-builder": "^4.2.9", "@smithy/querystring-builder": "^4.2.9",
"@trystero-p2p/nostr": "^0.23.0", "@trystero-p2p/nostr": "^0.23.0",
"chokidar": "^4.0.0",
"commander": "^14.0.3", "commander": "^14.0.3",
"obsidian": "^1.12.3",
"diff-match-patch": "^1.0.5", "diff-match-patch": "^1.0.5",
"fflate": "^0.8.2", "fflate": "^0.8.2",
"idb": "^8.0.3", "idb": "^8.0.3",
"markdown-it": "^14.1.1", "markdown-it": "^14.1.1",
"micromatch": "^4.0.0",
"minimatch": "^10.2.2", "minimatch": "^10.2.2",
"octagonal-wheels": "^0.1.45", "octagonal-wheels": "^0.1.45",
"pouchdb-adapter-leveldb": "^9.0.0", "pouchdb-adapter-leveldb": "^9.0.0",

View File

@@ -1,4 +1,5 @@
import { LOG_LEVEL_INFO } from "octagonal-wheels/common/logger"; import { LOG_LEVEL_INFO } from "octagonal-wheels/common/logger";
import type PouchDB from "pouchdb-core";
import type { SimpleStore } from "octagonal-wheels/databases/SimpleStoreBase"; import type { SimpleStore } from "octagonal-wheels/databases/SimpleStoreBase";
import type { HasSettings, ObsidianLiveSyncSettings, EntryDoc } from "./lib/src/common/types"; import type { HasSettings, ObsidianLiveSyncSettings, EntryDoc } from "./lib/src/common/types";
import { __$checkInstanceBinding } from "./lib/src/dev/checks"; import { __$checkInstanceBinding } from "./lib/src/dev/checks";
@@ -123,7 +124,7 @@ export class LiveSyncBaseCore<
for (const module of this.modules) { for (const module of this.modules) {
if (module.constructor === constructor) return module as T; if (module.constructor === constructor) return module as T;
} }
throw new Error(`Module ${constructor} not found or not loaded.`); throw new Error(`Module ${constructor.name} not found or not loaded.`);
} }
/** /**
@@ -160,8 +161,10 @@ export class LiveSyncBaseCore<
module.onBindFunction(this, this.services); module.onBindFunction(this, this.services);
__$checkInstanceBinding(module); // Check if all functions are properly bound, and log warnings if not. __$checkInstanceBinding(module); // Check if all functions are properly bound, and log warnings if not.
} else { } else {
// module should not be never.
const moduleName = (module as unknown)?.constructor?.name ?? "unknown";
this.services.API.addLog( this.services.API.addLog(
`Module ${(module as any)?.constructor?.name ?? "unknown"} does not have onBindFunction, skipping binding.`, `Module ${moduleName} does not have onBindFunction, skipping binding.`,
LOG_LEVEL_INFO LOG_LEVEL_INFO
); );
} }

View File

@@ -297,9 +297,11 @@ Options:
--force, -f Overwrite existing file on init-settings --force, -f Overwrite existing file on init-settings
--verbose, -v Enable verbose logging --verbose, -v Enable verbose logging
--debug, -d Enable debug logging (includes verbose) --debug, -d Enable debug logging (includes verbose)
--help, -h Show help message --interval <N>, -i <N> (daemon only) Poll CouchDB every N seconds instead of using the _changes feed
--help, -h Show this help message
Commands: Commands:
daemon (default) Run mirror scan then continuously sync CouchDB <-> local filesystem
init-settings [path] Create settings JSON from DEFAULT_SETTINGS init-settings [path] Create settings JSON from DEFAULT_SETTINGS
sync Run one replication cycle and exit sync Run one replication cycle and exit
p2p-peers <timeout> Show discovered peers as [peer]<TAB><peer-id><TAB><peer-name> p2p-peers <timeout> Show discovered peers as [peer]<TAB><peer-id><TAB><peer-name>
@@ -406,6 +408,86 @@ In other words, it performs the following actions:
Note: `mirror` does not respect file deletions. If a file is deleted in storage, it will be restored on the next `mirror` run. To delete a file, use the `rm` command instead. This is a little inconvenient, but it is intentional behaviour (if we handle this automatically in `mirror`, we should be against a ton of edge cases). Note: `mirror` does not respect file deletions. If a file is deleted in storage, it will be restored on the next `mirror` run. To delete a file, use the `rm` command instead. This is a little inconvenient, but it is intentional behaviour (if we handle this automatically in `mirror`, we should be against a ton of edge cases).
##### daemon
`daemon` is the default command when no command is specified. It runs an initial mirror scan and then continuously syncs changes in both directions:
- **CouchDB → local filesystem**: via the `_changes` feed (LiveSync mode, default) or periodic polling (`--interval N`).
- **local filesystem → CouchDB**: via chokidar file watching. Any file created, modified, or deleted in the vault directory is pushed to CouchDB.
In **LiveSync mode** the `_changes` feed delivers remote changes as they arrive, with sub-second latency. In **polling mode** (`--interval N`) the CLI polls CouchDB every N seconds. Use polling mode if your CouchDB instance does not support long-lived HTTP connections, or if you need predictable network usage.
The daemon exits cleanly on `SIGINT` or `SIGTERM`.
```bash
# LiveSync mode (default — _changes feed, near-real-time)
livesync-cli /path/to/vault
# Polling mode — poll every 60 seconds
livesync-cli /path/to/vault --interval 60
```
### .livesync/ignore
Place a `.livesync/ignore` file in your vault root to exclude files from sync in both directions (local → CouchDB and CouchDB → local).
**Format:**
- Lines beginning with `#` are comments.
- Blank lines are ignored.
- All other lines are [minimatch](https://github.com/isaacs/minimatch) glob patterns, relative to the vault root.
- The directive `import: .gitignore` (exactly this string) reads `.gitignore` from the vault root and merges its non-comment, non-blank lines into the ignore rules.
- Negation patterns (lines starting with `!`) are not supported and will cause an error on load.
**Example `.livesync/ignore`:**
```
# Ignore temporary files
*.tmp
*.swp
# Ignore build output
build/
dist/
# Merge patterns from .gitignore
import: .gitignore
```
Patterns apply in both directions: the chokidar watcher will not emit events for matched files, and the `isTargetFile` filter will exclude them from CouchDB → local sync.
Changes to this file require a daemon restart to take effect.
### Systemd Installation
The `deploy/` directory contains a systemd unit template and an install script.
**Automated install (user service, recommended):**
```bash
bash src/apps/cli/deploy/install.sh --vault /path/to/vault
```
**With polling interval:**
```bash
bash src/apps/cli/deploy/install.sh --vault /path/to/vault --interval 60
```
**System-wide install** (requires root / sudo for `/etc/systemd/system/`):
```bash
bash src/apps/cli/deploy/install.sh --system --vault /path/to/vault
```
The script:
1. Builds the CLI (`npm install` + `npm run build`).
2. Installs the binary to `~/.local/bin/livesync-cli` (user) or `/usr/local/bin/livesync-cli` (system).
3. Writes the unit file to `~/.config/systemd/user/livesync-cli.service` (user) or `/etc/systemd/system/livesync-cli.service` (system).
4. Runs `systemctl [--user] daemon-reload && systemctl [--user] enable --now livesync-cli`.
**Manual setup** — if you prefer to manage the unit yourself, copy `deploy/livesync-cli.service`, replace `LIVESYNC_BIN` and `LIVESYNC_VAULT_PATH` with the actual binary path and vault path, then install to the appropriate systemd directory.
### Planned options: ### Planned options:
- `--immediate`: Perform sync after the command (e.g. `push`, `pull`, `put`, `rm`). - `--immediate`: Perform sync after the command (e.g. `push`, `pull`, `put`, `rm`).

View File

@@ -39,12 +39,6 @@ export class NodeFileSystemAdapter implements IFileSystemAdapter<NodeFile, NodeF
async getAbstractFileByPath(p: FilePath | string): Promise<NodeFile | null> { async getAbstractFileByPath(p: FilePath | string): Promise<NodeFile | null> {
const pathStr = this.normalisePath(p); const pathStr = this.normalisePath(p);
const cached = this.fileCache.get(pathStr);
if (cached) {
return cached;
}
return await this.refreshFile(pathStr); return await this.refreshFile(pathStr);
} }
@@ -104,14 +98,15 @@ export class NodeFileSystemAdapter implements IFileSystemAdapter<NodeFile, NodeF
path: pathStr as FilePath, path: pathStr as FilePath,
stat: { stat: {
size: stat.size, size: stat.size,
mtime: stat.mtimeMs, mtime: Math.floor(stat.mtimeMs),
ctime: stat.ctimeMs, ctime: Math.floor(stat.ctimeMs),
type: "file", type: "file",
}, },
}; };
this.fileCache.set(pathStr, file); this.fileCache.set(pathStr, file);
return file; return file;
} catch { } catch {
// Evict so a deleted file is not returned by subsequent cache scans.
this.fileCache.delete(pathStr); this.fileCache.delete(pathStr);
return null; return null;
} }
@@ -137,8 +132,8 @@ export class NodeFileSystemAdapter implements IFileSystemAdapter<NodeFile, NodeF
path: entryRelativePath as FilePath, path: entryRelativePath as FilePath,
stat: { stat: {
size: stat.size, size: stat.size,
mtime: stat.mtimeMs, mtime: Math.floor(stat.mtimeMs),
ctime: stat.ctimeMs, ctime: Math.floor(stat.ctimeMs),
type: "file", type: "file",
}, },
}; };

View File

@@ -28,8 +28,8 @@ export class NodeStorageAdapter implements IStorageAdapter<NodeStat> {
const stat = await fs.stat(this.resolvePath(p)); const stat = await fs.stat(this.resolvePath(p));
return { return {
size: stat.size, size: stat.size,
mtime: stat.mtimeMs, mtime: Math.floor(stat.mtimeMs),
ctime: stat.ctimeMs, ctime: Math.floor(stat.ctimeMs),
type: stat.isDirectory() ? "folder" : "file", type: stat.isDirectory() ? "folder" : "file",
}; };
} catch { } catch {

View File

@@ -15,7 +15,12 @@ export class NodeVaultAdapter implements IVaultAdapter<NodeFile> {
} }
async read(file: NodeFile): Promise<string> { async read(file: NodeFile): Promise<string> {
return await fs.readFile(this.resolvePath(file.path), "utf-8"); const content = await fs.readFile(this.resolvePath(file.path), "utf-8");
// Correct stale stat.size — chokidar stats may be from a poll before the final write.
// The downstream document integrity check compares stat.size to content length, so
// they must agree or other clients reject the file as corrupted.
file.stat.size = Buffer.byteLength(content, "utf-8");
return content;
} }
async cachedRead(file: NodeFile): Promise<string> { async cachedRead(file: NodeFile): Promise<string> {
@@ -25,6 +30,8 @@ export class NodeVaultAdapter implements IVaultAdapter<NodeFile> {
async readBinary(file: NodeFile): Promise<ArrayBuffer> { async readBinary(file: NodeFile): Promise<ArrayBuffer> {
const buffer = await fs.readFile(this.resolvePath(file.path)); const buffer = await fs.readFile(this.resolvePath(file.path));
// Same correction as read() — ensure stat.size matches actual byte length.
file.stat.size = buffer.length;
return buffer.buffer.slice(buffer.byteOffset, buffer.byteOffset + buffer.byteLength) as ArrayBuffer; return buffer.buffer.slice(buffer.byteOffset, buffer.byteOffset + buffer.byteLength) as ArrayBuffer;
} }
@@ -66,8 +73,8 @@ export class NodeVaultAdapter implements IVaultAdapter<NodeFile> {
path: p as any, path: p as any,
stat: { stat: {
size: stat.size, size: stat.size,
mtime: stat.mtimeMs, mtime: Math.floor(stat.mtimeMs),
ctime: stat.ctimeMs, ctime: Math.floor(stat.ctimeMs),
type: "file", type: "file",
}, },
}; };
@@ -89,8 +96,8 @@ export class NodeVaultAdapter implements IVaultAdapter<NodeFile> {
path: p as any, path: p as any,
stat: { stat: {
size: stat.size, size: stat.size,
mtime: stat.mtimeMs, mtime: Math.floor(stat.mtimeMs),
ctime: stat.ctimeMs, ctime: Math.floor(stat.ctimeMs),
type: "file", type: "file",
}, },
}; };

View File

@@ -0,0 +1,312 @@
import { describe, expect, it, vi, beforeEach, afterEach } from "vitest";
import { runCommand } from "./runCommand";
import type { CLIOptions } from "./types";
// Mock performFullScan so daemon tests don't require a real CouchDB connection.
vi.mock("@lib/serviceFeatures/offlineScanner", () => ({
performFullScan: vi.fn(async () => true),
}));
// Mock UnresolvedErrorManager to avoid event-hub side effects.
vi.mock("@lib/services/base/UnresolvedErrorManager", () => ({
UnresolvedErrorManager: class UnresolvedErrorManager {
showError() {}
clearError() {}
clearErrors() {}
},
}));
import * as offlineScanner from "@lib/serviceFeatures/offlineScanner";
function createCoreMock() {
return {
services: {
control: {
activated: Promise.resolve(),
applySettings: vi.fn(async () => {}),
},
setting: {
applyPartial: vi.fn(async () => {}),
currentSettings: vi.fn(() => ({ liveSync: true, syncOnStart: false })),
},
replication: {
replicate: vi.fn(async () => true),
},
appLifecycle: {
onUnload: {
addHandler: vi.fn(),
},
},
},
serviceModules: {
fileHandler: {
dbToStorage: vi.fn(async () => true),
storeFileToDB: vi.fn(async () => true),
},
storageAccess: {
readFileAuto: vi.fn(async () => ""),
writeFileAuto: vi.fn(async () => {}),
},
databaseFileAccess: {
fetch: vi.fn(async () => undefined),
},
},
} as any;
}
function makeDaemonOptions(interval?: number): CLIOptions {
return {
command: "daemon",
commandArgs: [],
databasePath: "/tmp/vault",
verbose: false,
force: false,
interval,
};
}
const baseContext = {
vaultPath: "/tmp/vault",
settingsPath: "/tmp/vault/.livesync/settings.json",
originalSyncSettings: {
liveSync: true,
syncOnStart: false,
periodicReplication: false,
syncOnSave: false,
syncOnEditorSave: false,
syncOnFileOpen: false,
syncAfterMerge: false,
},
} as any;
describe("daemon command", () => {
beforeEach(() => {
vi.restoreAllMocks();
vi.useFakeTimers();
});
afterEach(() => {
vi.useRealTimers();
});
it("calls performFullScan during startup", async () => {
const core = createCoreMock();
vi.mocked(offlineScanner.performFullScan).mockResolvedValue(true);
await runCommand(makeDaemonOptions(), { ...baseContext, core });
expect(offlineScanner.performFullScan).toHaveBeenCalledTimes(1);
});
it("returns false when performFullScan fails", async () => {
const core = createCoreMock();
vi.mocked(offlineScanner.performFullScan).mockResolvedValue(false);
const result = await runCommand(makeDaemonOptions(), { ...baseContext, core });
expect(result).toBe(false);
});
it("polling mode: calls setTimeout when interval option is set", async () => {
const core = createCoreMock();
vi.mocked(offlineScanner.performFullScan).mockResolvedValue(true);
const setTimeoutSpy = vi.spyOn(globalThis, "setTimeout");
await runCommand(makeDaemonOptions(30), { ...baseContext, core });
expect(setTimeoutSpy).toHaveBeenCalledTimes(1);
// Interval should be in milliseconds (30s → 30000ms)
expect(setTimeoutSpy).toHaveBeenCalledWith(expect.any(Function), 30000);
});
it("polling mode: applies settings with suspendFileWatching=false before setting interval", async () => {
const core = createCoreMock();
vi.mocked(offlineScanner.performFullScan).mockResolvedValue(true);
await runCommand(makeDaemonOptions(10), { ...baseContext, core });
expect(core.services.setting.applyPartial).toHaveBeenCalledWith(
expect.objectContaining({ suspendFileWatching: false }),
true
);
expect(core.services.control.applySettings).toHaveBeenCalledTimes(1);
});
it("liveSync mode: calls applyPartial and applySettings", async () => {
const core = createCoreMock();
vi.mocked(offlineScanner.performFullScan).mockResolvedValue(true);
await runCommand(makeDaemonOptions(), { ...baseContext, core });
expect(core.services.setting.applyPartial).toHaveBeenCalledWith(
expect.objectContaining({
...baseContext.originalSyncSettings,
suspendFileWatching: false,
}),
true
);
expect(core.services.control.applySettings).toHaveBeenCalledTimes(1);
});
it("liveSync mode: logs warning when both liveSync and syncOnStart are false", async () => {
const core = createCoreMock();
core.services.setting.currentSettings = vi.fn(() => ({
liveSync: false,
syncOnStart: false,
}));
vi.mocked(offlineScanner.performFullScan).mockResolvedValue(true);
const consoleSpy = vi.spyOn(console, "error").mockImplementation(() => {});
const result = await runCommand(makeDaemonOptions(), { ...baseContext, core });
expect(result).toBe(true);
const warningCalls = consoleSpy.mock.calls.filter(
(args) => typeof args[0] === "string" && args[0].includes("liveSync and syncOnStart are both disabled")
);
expect(warningCalls.length).toBeGreaterThan(0);
});
it("liveSync mode: no warning when liveSync is true", async () => {
const core = createCoreMock();
core.services.setting.currentSettings = vi.fn(() => ({
liveSync: true,
syncOnStart: false,
}));
vi.mocked(offlineScanner.performFullScan).mockResolvedValue(true);
const consoleSpy = vi.spyOn(console, "error").mockImplementation(() => {});
await runCommand(makeDaemonOptions(), { ...baseContext, core });
const warningCalls = consoleSpy.mock.calls.filter(
(args) => typeof args[0] === "string" && args[0].includes("liveSync and syncOnStart are both disabled")
);
expect(warningCalls.length).toBe(0);
});
it("calls replicate before performFullScan", async () => {
const core = createCoreMock();
const callOrder: string[] = [];
core.services.replication.replicate = vi.fn(async () => {
callOrder.push("replicate");
return true;
});
vi.mocked(offlineScanner.performFullScan).mockImplementation(async () => {
callOrder.push("performFullScan");
return true;
});
await runCommand(makeDaemonOptions(), { ...baseContext, core });
expect(callOrder).toEqual(["replicate", "performFullScan"]);
});
it("returns false when initial replication fails", async () => {
const core = createCoreMock();
core.services.replication.replicate = vi.fn(async () => false);
vi.mocked(offlineScanner.performFullScan).mockClear();
const result = await runCommand(makeDaemonOptions(), { ...baseContext, core });
expect(result).toBe(false);
// performFullScan should NOT have been called
expect(offlineScanner.performFullScan).not.toHaveBeenCalled();
});
it("polling mode: registers onUnload handler that clears timeout", async () => {
const core = createCoreMock();
vi.mocked(offlineScanner.performFullScan).mockResolvedValue(true);
await runCommand(makeDaemonOptions(10), { ...baseContext, core });
// onUnload handler should have been registered
expect(core.services.appLifecycle.onUnload.addHandler).toHaveBeenCalledTimes(1);
const handler = core.services.appLifecycle.onUnload.addHandler.mock.calls[0][0];
// Get the timeout ID that was created
const clearTimeoutSpy = vi.spyOn(globalThis, "clearTimeout");
await handler();
expect(clearTimeoutSpy).toHaveBeenCalledTimes(1);
});
it("polling backoff: interval escalates on failure, caps at 300000ms, then halves on recovery", async () => {
const core = createCoreMock();
vi.mocked(offlineScanner.performFullScan).mockResolvedValue(true);
vi.spyOn(console, "error").mockImplementation(() => {});
// startup replicate (call 1) succeeds; poll calls 27 fail; call 8 succeeds.
let callCount = 0;
core.services.replication.replicate = vi.fn(async () => {
callCount++;
if (callCount === 1) return true; // initial startup replicate
if (callCount <= 7) throw new Error("network failure");
return true; // recovery
});
const baseMs = 30 * 1000;
const setTimeoutSpy = vi.spyOn(globalThis, "setTimeout");
await runCommand(makeDaemonOptions(30), { ...baseContext, core });
// After runCommand returns the first setTimeout has been scheduled.
// setTimeoutSpy.mock.calls[0] is the initial schedule (baseMs).
expect(setTimeoutSpy.mock.calls[0][1]).toBe(baseMs);
// Advance through 6 failure polls. After each failure the next setTimeout
// should be scheduled with a larger (or capped) interval.
// formula: min(base * 2^n, 300000). base=30000ms.
// failure 1: 30000*2=60000, failure 2: 30000*4=120000,
// failure 3: 30000*8=240000, failure 4: 30000*16=480000→capped, 5→cap, 6→cap
const expectedIntervals = [
baseMs * 2, // after failure 1: 60000
baseMs * 4, // after failure 2: 120000
baseMs * 8, // after failure 3: 240000
300_000, // after failure 4 (would be 480000, capped)
300_000, // after failure 5 (cap)
300_000, // after failure 6 (cap)
];
for (const expected of expectedIntervals) {
const prevCallCount = setTimeoutSpy.mock.calls.length;
await vi.advanceTimersByTimeAsync(setTimeoutSpy.mock.calls[prevCallCount - 1][1] as number);
const newCallCount = setTimeoutSpy.mock.calls.length;
expect(newCallCount).toBeGreaterThan(prevCallCount);
expect(setTimeoutSpy.mock.calls[newCallCount - 1][1]).toBe(expected);
}
// Now trigger the success poll — interval should halve each time toward base.
// After failure 6, consecutiveFailures=6, currentIntervalMs=300000.
// On success: consecutiveFailures=5, currentIntervalMs=150000.
const prevCallCount = setTimeoutSpy.mock.calls.length;
await vi.advanceTimersByTimeAsync(setTimeoutSpy.mock.calls[prevCallCount - 1][1] as number);
const afterSuccessCallCount = setTimeoutSpy.mock.calls.length;
expect(afterSuccessCallCount).toBeGreaterThan(prevCallCount);
// The interval after one success should be halved (300000 / 2 = 150000).
expect(setTimeoutSpy.mock.calls[afterSuccessCallCount - 1][1]).toBe(150_000);
});
it("polling error handling: replicate rejection is caught and console.error is called", async () => {
const core = createCoreMock();
vi.mocked(offlineScanner.performFullScan).mockResolvedValue(true);
const consoleSpy = vi.spyOn(console, "error").mockImplementation(() => {});
// Make replicate succeed on the initial call (startup), then fail on the poll.
let callCount = 0;
core.services.replication.replicate = vi.fn(async () => {
callCount++;
if (callCount === 1) return true; // startup replicate
throw new Error("network failure");
});
const intervalMs = 30 * 1000;
await runCommand(makeDaemonOptions(30), { ...baseContext, core });
// Advance time to trigger the first poll callback and flush its async work.
await vi.advanceTimersByTimeAsync(intervalMs);
// No unhandled rejection — the error was caught internally.
const errorCalls = consoleSpy.mock.calls.filter(
(args) => typeof args[0] === "string" && args[0].includes("Poll error")
);
expect(errorCalls.length).toBeGreaterThan(0);
});
});

View File

@@ -15,6 +15,106 @@ export async function runCommand(options: CLIOptions, context: CLICommandContext
await core.services.control.activated; await core.services.control.activated;
if (options.command === "daemon") { if (options.command === "daemon") {
const log = (msg: unknown) => console.error(`[Daemon] ${msg}`);
// Skip the config mismatch dialog — the daemon cannot resolve it interactively
// and the default "Dismiss" action would block replication. The daemon should
// accept whatever configuration the remote has.
await core.services.setting.applyPartial({ disableCheckingConfigMismatch: true }, true);
// 1. Replicate CouchDB → local PouchDB so the mirror scan has content to work with.
log("Replicating from CouchDB...");
const replResult = await core.services.replication.replicate(true);
if (!replResult) {
console.error("[Daemon] Initial CouchDB replication failed, cannot continue");
return false;
}
log("CouchDB replication complete");
// 2. Mirror scan to reconcile PouchDB ↔ local filesystem.
const errorManager = new UnresolvedErrorManager(core.services.appLifecycle);
log("Running mirror scan...");
const scanOk = await performFullScan(core as any, log, errorManager, false, true);
if (!scanOk) {
console.error("[Daemon] Mirror scan failed, cannot continue");
return false;
}
log("Mirror scan complete");
// 3. Re-enable sync.
const restoreSyncSettings = async () => {
await core.services.setting.applyPartial(
{
...context.originalSyncSettings,
suspendFileWatching: false,
},
true
);
// applySettings fires the full lifecycle: onSuspending → onResumed.
// ModuleReplicatorCouchDB starts continuous replication on onResumed
// via fireAndForget.
await core.services.control.applySettings();
// Lifecycle events (onSuspending) may re-enable suspension flags.
// Clear them explicitly after the lifecycle completes. applyPartial
// with true is a direct store write — it does not re-trigger lifecycle.
await core.services.setting.applyPartial(
{
suspendFileWatching: false,
suspendParseReplicationResult: false,
},
true
);
};
if (options.interval) {
log(`Polling mode: syncing every ${options.interval}s`);
await restoreSyncSettings();
const baseIntervalMs = options.interval * 1000;
let currentIntervalMs = baseIntervalMs;
let consecutiveFailures = 0;
const maxIntervalMs = 5 * 60 * 1000; // 5 minutes cap
const poll = async () => {
try {
await core.services.replication.replicate(true);
if (consecutiveFailures > 0) {
consecutiveFailures--;
currentIntervalMs = Math.max(currentIntervalMs / 2, baseIntervalMs);
log(`Replication recovered`);
}
} catch (err) {
consecutiveFailures++;
currentIntervalMs = Math.min(baseIntervalMs * Math.pow(2, consecutiveFailures), maxIntervalMs);
console.error(`[Daemon] Poll error (${consecutiveFailures} consecutive):`, err);
if (consecutiveFailures >= 5) {
console.error(
`[Daemon] Warning: ${consecutiveFailures} consecutive failures, backing off to ${Math.round(currentIntervalMs / 1000)}s`
);
}
}
pollTimer = setTimeout(poll, currentIntervalMs);
};
let pollTimer: ReturnType<typeof setTimeout> = setTimeout(poll, currentIntervalMs);
core.services.appLifecycle.onUnload.addHandler(async () => {
clearTimeout(pollTimer);
return true;
});
} else {
log("LiveSync mode: restoring sync settings and starting _changes feed");
await restoreSyncSettings();
// The applySettings() lifecycle fires onResumed → ModuleReplicatorCouchDB which
// starts continuous replication via fireAndForget(openReplication). Don't call
// openReplication directly — it races with the handler and causes dedup/termination.
log("LiveSync active");
const currentSettings = core.services.setting.currentSettings();
if (!currentSettings.liveSync && !currentSettings.syncOnStart) {
console.error(
"[Daemon] Warning: liveSync and syncOnStart are both disabled in settings. " +
"No sync will occur. Set liveSync=true in your settings file for continuous sync, " +
"or use --interval for polling mode."
);
}
}
return true; return true;
} }
@@ -83,8 +183,8 @@ export async function runCommand(options: CLIOptions, context: CLICommandContext
console.log(`[Command] push ${sourcePath} -> ${destinationDatabasePath}`); console.log(`[Command] push ${sourcePath} -> ${destinationDatabasePath}`);
await core.serviceModules.storageAccess.writeFileAuto(destinationDatabasePath, toArrayBuffer(sourceData), { await core.serviceModules.storageAccess.writeFileAuto(destinationDatabasePath, toArrayBuffer(sourceData), {
mtime: sourceStat.mtimeMs, mtime: Math.floor(sourceStat.mtimeMs),
ctime: sourceStat.ctimeMs, ctime: Math.floor(sourceStat.ctimeMs),
}); });
const destinationPathWithPrefix = destinationDatabasePath as FilePathWithPrefix; const destinationPathWithPrefix = destinationDatabasePath as FilePathWithPrefix;
const stored = await core.serviceModules.fileHandler.storeFileToDB(destinationPathWithPrefix, true); const stored = await core.serviceModules.fileHandler.storeFileToDB(destinationPathWithPrefix, true);

View File

@@ -1,5 +1,6 @@
import { LiveSyncBaseCore } from "../../../LiveSyncBaseCore"; import { LiveSyncBaseCore } from "../../../LiveSyncBaseCore";
import { ServiceContext } from "@lib/services/base/ServiceBase"; import { ServiceContext } from "@lib/services/base/ServiceBase";
import type { ObsidianLiveSyncSettings } from "@lib/common/types";
export type CLICommand = export type CLICommand =
| "daemon" | "daemon"
@@ -29,15 +30,27 @@ export interface CLIOptions {
force?: boolean; force?: boolean;
command: CLICommand; command: CLICommand;
commandArgs: string[]; commandArgs: string[];
interval?: number;
} }
export interface CLICommandContext { export interface CLICommandContext {
databasePath: string; databasePath: string;
core: LiveSyncBaseCore<ServiceContext, any>; core: LiveSyncBaseCore<ServiceContext, any>;
settingsPath: string; settingsPath: string;
originalSyncSettings: Pick<
ObsidianLiveSyncSettings,
| "liveSync"
| "syncOnStart"
| "periodicReplication"
| "syncOnSave"
| "syncOnEditorSave"
| "syncOnFileOpen"
| "syncAfterMerge"
>;
} }
export const VALID_COMMANDS = new Set([ export const VALID_COMMANDS = new Set([
"daemon",
"sync", "sync",
"p2p-peers", "p2p-peers",
"p2p-sync", "p2p-sync",

187
src/apps/cli/deploy/install.sh Executable file
View File

@@ -0,0 +1,187 @@
#!/usr/bin/env bash
# install.sh — install livesync-cli as a systemd service
#
# Usage:
# install.sh [--user] [--system] [--vault <path>] [--interval <N>]
#
# Defaults: user install, prompts for vault path if not supplied.
set -euo pipefail
SCRIPT_DIR="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd -- "$SCRIPT_DIR/../../.." && pwd)"
CLI_DIR="$REPO_ROOT/src/apps/cli"
SERVICE_TEMPLATE="$SCRIPT_DIR/livesync-cli.service"
# ── Argument parsing ────────────────────────────────────────────────────────
INSTALL_MODE="user"
VAULT_PATH=""
INTERVAL=""
FORCE=0
while [[ $# -gt 0 ]]; do
case "$1" in
--user)
INSTALL_MODE="user"
shift
;;
--system)
INSTALL_MODE="system"
shift
;;
--vault)
if [[ -z "${2:-}" ]]; then
echo "Error: --vault requires a path argument" >&2
exit 1
fi
VAULT_PATH="$2"
shift 2
;;
--interval)
if [[ -z "${2:-}" ]]; then
echo "Error: --interval requires a numeric argument" >&2
exit 1
fi
INTERVAL="$2"
if ! [[ "$INTERVAL" =~ ^[1-9][0-9]*$ ]]; then
echo "Error: --interval requires a positive integer, got '$INTERVAL'" >&2
exit 1
fi
shift 2
;;
--force|-f)
FORCE=1
shift
;;
--help|-h)
cat <<EOF
Usage: install.sh [--user|--system] [--vault <path>] [--interval <N>] [--force]
--user Install as a user systemd service (default, ~/.config/systemd/user/)
--system Install as a system systemd service (/etc/systemd/system/)
--vault Path to the vault directory (prompted if omitted)
--interval Poll CouchDB every N seconds instead of using the _changes feed
--force Overwrite existing service unit without prompting
EOF
exit 0
;;
*)
echo "Error: Unknown argument: $1" >&2
exit 1
;;
esac
done
# ── Vault path ──────────────────────────────────────────────────────────────
if [[ -z "$VAULT_PATH" ]]; then
if [ ! -t 0 ]; then
echo "Error: --vault is required in non-interactive mode" >&2
exit 1
fi
printf 'Vault path: '
read -r VAULT_PATH
fi
_orig_vault="$VAULT_PATH"
if ! VAULT_PATH="$(cd -- "$VAULT_PATH" 2>/dev/null && pwd)"; then
echo "Error: vault directory does not exist: $_orig_vault" >&2
exit 1
fi
echo "[INFO] Vault: $VAULT_PATH"
echo "[INFO] Install mode: $INSTALL_MODE"
# ── Build ────────────────────────────────────────────────────────────────────
echo "[INFO] Building CLI from $REPO_ROOT..."
(cd "$REPO_ROOT" && npm install --silent)
(cd "$CLI_DIR" && npm run build)
BUILT_CJS="$CLI_DIR/dist/index.cjs"
if [[ ! -f "$BUILT_CJS" ]]; then
echo "Error: build output not found: $BUILT_CJS" >&2
exit 1
fi
# ── Install binary ───────────────────────────────────────────────────────────
if [[ "$INSTALL_MODE" == "user" ]]; then
BIN_DIR="$HOME/.local/bin"
UNIT_DIR="$HOME/.config/systemd/user"
SYSTEMCTL_FLAGS="--user"
else
BIN_DIR="/usr/local/bin"
UNIT_DIR="/etc/systemd/system"
SYSTEMCTL_FLAGS=""
fi
mkdir -p "$BIN_DIR"
LIVESYNC_BIN="$BIN_DIR/livesync-cli"
LIVESYNC_JS="$BIN_DIR/livesync-cli.js"
# Copy the CJS bundle so the wrapper is self-contained and independent of the
# build directory location.
cp "$BUILT_CJS" "$LIVESYNC_JS"
# Write a bash wrapper that invokes node on the installed bundle.
cat > "$LIVESYNC_BIN" <<WRAPPER
#!/usr/bin/env bash
exec node "$LIVESYNC_JS" "\$@"
WRAPPER
chmod +x "$LIVESYNC_BIN"
echo "[INFO] Installed bundle: $LIVESYNC_JS"
echo "[INFO] Installed binary: $LIVESYNC_BIN"
# ── Write systemd unit ───────────────────────────────────────────────────────
mkdir -p "$UNIT_DIR"
UNIT_PATH="$UNIT_DIR/livesync-cli.service"
EXEC_START="\"$LIVESYNC_BIN\" \"$VAULT_PATH\""
if [[ -n "$INTERVAL" ]]; then
EXEC_START="\"$LIVESYNC_BIN\" \"$VAULT_PATH\" --interval $INTERVAL"
fi
# Check for existing service and offer to overwrite.
if [[ -f "$UNIT_PATH" ]] && [[ "$FORCE" -eq 0 ]]; then
if [ ! -t 0 ]; then
echo "Error: service unit already exists at $UNIT_PATH; use --force to overwrite" >&2
exit 1
fi
printf 'Service unit already exists at %s. Overwrite? [y/N]: ' "$UNIT_PATH"
read -r CONFIRM
case "$CONFIRM" in
[yY]|[yY][eE][sS]) : ;;
*)
echo "[INFO] Aborted. Existing unit left in place."
exit 0
;;
esac
fi
# In awk gsub(), '&' in the replacement means "matched text"; escape any literal '&'
# in path variables before passing them as awk replacement strings.
AWK_BIN="${LIVESYNC_BIN//&/\\&}"
AWK_VAULT="${VAULT_PATH//&/\\&}"
awk -v bin="$AWK_BIN" -v vault="$AWK_VAULT" -v exec_start="ExecStart=$EXEC_START" \
'/^ExecStart=/ { print exec_start; next } {gsub("LIVESYNC_BIN", bin); gsub("LIVESYNC_VAULT_PATH", vault); print}' \
"$SERVICE_TEMPLATE" > "$UNIT_PATH"
echo "[INFO] Installed unit: $UNIT_PATH"
# ── Enable service ───────────────────────────────────────────────────────────
if ! command -v systemctl >/dev/null 2>&1; then
echo "[WARN] systemctl not found — skipping service activation"
echo "[INFO] To enable manually, copy $UNIT_PATH to the correct systemd directory and run:"
echo " systemctl $SYSTEMCTL_FLAGS daemon-reload"
echo " systemctl $SYSTEMCTL_FLAGS enable --now livesync-cli"
exit 0
fi
# shellcheck disable=SC2086
systemctl $SYSTEMCTL_FLAGS daemon-reload
# shellcheck disable=SC2086
systemctl $SYSTEMCTL_FLAGS enable --now livesync-cli
echo ""
echo "[Done] livesync-cli service installed and started."
echo ""
# shellcheck disable=SC2086
systemctl $SYSTEMCTL_FLAGS status livesync-cli --no-pager || true

View File

@@ -0,0 +1,17 @@
[Unit]
Description=Self-hosted LiveSync CLI Daemon
After=network-online.target
Wants=network-online.target
[Service]
Type=simple
ExecStart=LIVESYNC_BIN LIVESYNC_VAULT_PATH
Restart=on-failure
RestartSec=10
TimeoutStartSec=300
StandardOutput=journal
StandardError=journal
LimitNOFILE=65536
[Install]
WantedBy=default.target

View File

@@ -26,6 +26,7 @@ import { VALID_COMMANDS } from "./commands/types";
import type { CLICommand, CLIOptions } from "./commands/types"; import type { CLICommand, CLIOptions } from "./commands/types";
import { getPathFromUXFileInfo } from "@lib/common/typeUtils"; import { getPathFromUXFileInfo } from "@lib/common/typeUtils";
import { stripAllPrefixes } from "@lib/string_and_binary/path"; import { stripAllPrefixes } from "@lib/string_and_binary/path";
import { IgnoreRules } from "./serviceModules/IgnoreRules";
const SETTINGS_FILE = ".livesync/settings.json"; const SETTINGS_FILE = ".livesync/settings.json";
ensureGlobalNodeLocalStorage(); ensureGlobalNodeLocalStorage();
@@ -43,7 +44,8 @@ Arguments:
database-path Path to the local database directory database-path Path to the local database directory
Commands: Commands:
sync Run one replication cycle and exit daemon (default) Run mirror scan then continuously sync CouchDB <-> local filesystem
sync Run one replication cycle and exit
p2p-peers <timeout> Show discovered peers as [peer]\t<peer-id>\t<peer-name> p2p-peers <timeout> Show discovered peers as [peer]\t<peer-id>\t<peer-name>
p2p-sync <peer> <timeout> p2p-sync <peer> <timeout>
Sync with the specified peer-id or peer-name Sync with the specified peer-id or peer-name
@@ -60,24 +62,30 @@ Commands:
rm <path> Mark a file as deleted in local database rm <path> Mark a file as deleted in local database
resolve <path> <rev> Resolve conflicts by keeping <rev> and deleting others resolve <path> <rev> Resolve conflicts by keeping <rev> and deleting others
mirror [vault-path] Mirror database contents to the local file system (vault-path defaults to database-path) mirror [vault-path] Mirror database contents to the local file system (vault-path defaults to database-path)
Options:
--interval <N>, -i <N> (daemon only) Poll CouchDB every N seconds instead of using the _changes feed
Examples: Examples:
livesync-cli ./my-database sync livesync-cli ./my-database Run daemon (LiveSync mode)
livesync-cli ./my-database p2p-peers 5 livesync-cli ./my-database --interval 30 Run daemon (polling every 30s)
livesync-cli ./my-database p2p-sync my-peer-name 15 livesync-cli ./my-database sync
livesync-cli ./my-database p2p-host livesync-cli ./my-database p2p-peers 5
livesync-cli ./my-database --settings ./custom-settings.json push ./note.md folder/note.md livesync-cli ./my-database p2p-sync my-peer-name 15
livesync-cli ./my-database pull folder/note.md ./exports/note.md livesync-cli ./my-database p2p-host
livesync-cli ./my-database pull-rev folder/note.md ./exports/note.old.md 3-abcdef livesync-cli ./my-database --settings ./custom-settings.json push ./note.md folder/note.md
livesync-cli ./my-database setup "obsidian://setuplivesync?settings=..." livesync-cli ./my-database pull folder/note.md ./exports/note.md
echo "Hello" | livesync-cli ./my-database put notes/hello.md livesync-cli ./my-database pull-rev folder/note.md ./exports/note.old.md 3-abcdef
livesync-cli ./my-database cat notes/hello.md livesync-cli ./my-database setup "obsidian://setuplivesync?settings=..."
livesync-cli ./my-database cat-rev notes/hello.md 3-abcdef echo "Hello" | livesync-cli ./my-database put notes/hello.md
livesync-cli ./my-database ls notes/ livesync-cli ./my-database cat notes/hello.md
livesync-cli ./my-database info notes/hello.md livesync-cli ./my-database cat-rev notes/hello.md 3-abcdef
livesync-cli ./my-database rm notes/hello.md livesync-cli ./my-database ls notes/
livesync-cli ./my-database resolve notes/hello.md 3-abcdef livesync-cli ./my-database info notes/hello.md
livesync-cli init-settings ./data.json livesync-cli ./my-database rm notes/hello.md
livesync-cli ./my-database --verbose livesync-cli ./my-database resolve notes/hello.md 3-abcdef
livesync-cli init-settings ./data.json
livesync-cli ./my-database --verbose
`); `);
} }
@@ -94,6 +102,7 @@ export function parseArgs(): CLIOptions {
let verbose = false; let verbose = false;
let debug = false; let debug = false;
let force = false; let force = false;
let interval: number | undefined;
let command: CLICommand = "daemon"; let command: CLICommand = "daemon";
const commandArgs: string[] = []; const commandArgs: string[] = [];
@@ -110,6 +119,21 @@ export function parseArgs(): CLIOptions {
settingsPath = args[i]; settingsPath = args[i];
break; break;
} }
case "--interval":
case "-i": {
i++;
if (!args[i]) {
console.error(`Error: Missing value for ${token}`);
process.exit(1);
}
const n = parseInt(args[i], 10);
if (!Number.isInteger(n) || n <= 0) {
console.error(`Error: --interval requires a positive integer, got '${args[i]}'`);
process.exit(1);
}
interval = n;
break;
}
case "--debug": case "--debug":
case "-d": case "-d":
// debugging automatically enables verbose logging, as it is intended for debugging issues. // debugging automatically enables verbose logging, as it is intended for debugging issues.
@@ -164,6 +188,7 @@ export function parseArgs(): CLIOptions {
force, force,
command, command,
commandArgs, commandArgs,
interval,
}; };
} }
@@ -197,6 +222,9 @@ async function createDefaultSettingsFile(options: CLIOptions) {
export async function main() { export async function main() {
const options = parseArgs(); const options = parseArgs();
if (options.interval && options.command !== "daemon") {
console.error(`Warning: --interval is only used in daemon mode, ignored for '${options.command}'`);
}
const avoidStdoutNoise = const avoidStdoutNoise =
options.command === "cat" || options.command === "cat" ||
options.command === "cat-rev" || options.command === "cat-rev" ||
@@ -248,6 +276,17 @@ export async function main() {
infoLog(`Settings: ${settingsPath}`); infoLog(`Settings: ${settingsPath}`);
infoLog(""); infoLog("");
// For daemon and mirror mode, load ignore rules before the core is constructed so that
// chokidar's ignored option is populated when beginWatch() fires during onLoad().
const watchEnabled = options.command === "daemon";
const vaultPath =
options.command === "mirror" && options.commandArgs[0] ? path.resolve(options.commandArgs[0]) : databasePath;
let ignoreRules: IgnoreRules | undefined;
if (options.command === "daemon" || options.command === "mirror") {
ignoreRules = new IgnoreRules(vaultPath);
await ignoreRules.load();
}
// Create service context and hub // Create service context and hub
const context = new NodeServiceContext(databasePath); const context = new NodeServiceContext(databasePath);
const serviceHubInstance = new NodeServiceHub<NodeServiceContext>(databasePath, context); const serviceHubInstance = new NodeServiceHub<NodeServiceContext>(databasePath, context);
@@ -278,11 +317,14 @@ export async function main() {
} }
console.error(`${prefix} ${message}`); console.error(`${prefix} ${message}`);
}); });
// Prevent replication result to be processed automatically. // Prevent replication result from being processed automatically in non-daemon commands.
serviceHubInstance.replication.processSynchroniseResult.addHandler(async () => { // In daemon mode the default handler must run so changes are applied to the filesystem.
console.error(`[Info] Replication result received, but not processed automatically in CLI mode.`); if (options.command !== "daemon") {
return await Promise.resolve(true); serviceHubInstance.replication.processSynchroniseResult.addHandler(async () => {
}, -100); console.error(`[Info] Replication result received, but not processed automatically in CLI mode.`);
return await Promise.resolve(true);
}, -100);
}
// Setup settings handlers // Setup settings handlers
const settingService = serviceHubInstance.setting; const settingService = serviceHubInstance.setting;
@@ -324,11 +366,7 @@ export async function main() {
const core = new LiveSyncBaseCore( const core = new LiveSyncBaseCore(
serviceHubInstance, serviceHubInstance,
(core: LiveSyncBaseCore<NodeServiceContext, any>, serviceHub: InjectableServiceHub<NodeServiceContext>) => { (core: LiveSyncBaseCore<NodeServiceContext, any>, serviceHub: InjectableServiceHub<NodeServiceContext>) => {
const mirrorVaultPath = return initialiseServiceModulesCLI(vaultPath, core, serviceHub, ignoreRules, watchEnabled);
options.command === "mirror" && options.commandArgs[0]
? path.resolve(options.commandArgs[0])
: databasePath;
return initialiseServiceModulesCLI(mirrorVaultPath, core, serviceHub);
}, },
(core) => [ (core) => [
// No modules need to be registered for P2P replication in CLI. Directly using Replicators in p2p.ts // No modules need to be registered for P2P replication in CLI. Directly using Replicators in p2p.ts
@@ -344,8 +382,25 @@ export async function main() {
if (parts.some((part) => part.startsWith("."))) { if (parts.some((part) => part.startsWith("."))) {
return await Promise.resolve(false); return await Promise.resolve(false);
} }
// PouchDB LevelDB database directory lives in the vault directory.
if (parts[0]?.endsWith("-livesync-v2")) {
return await Promise.resolve(false);
}
return await Promise.resolve(true); return await Promise.resolve(true);
}, -1 /* highest priority */); }, -1 /* highest priority */);
// Apply user-defined ignore rules for daemon mode (lower priority, runs after dotfile check).
if (ignoreRules) {
const rules = ignoreRules;
core.services.vault.isTargetFile.addHandler(async (target) => {
const targetPath = stripAllPrefixes(getPathFromUXFileInfo(target));
if (rules.shouldIgnore(targetPath)) {
return false;
}
// undefined = pass through to next handler in chain
return undefined;
}, 0);
}
} }
); );
@@ -366,6 +421,25 @@ export async function main() {
process.on("SIGINT", () => shutdown("SIGINT")); process.on("SIGINT", () => shutdown("SIGINT"));
process.on("SIGTERM", () => shutdown("SIGTERM")); process.on("SIGTERM", () => shutdown("SIGTERM"));
// Save the settings file before any lifecycle events can mutate and persist them.
// suspendAllSync and other lifecycle hooks clobber sync settings in memory, and
// various code paths persist the clobbered state to disk. We restore on shutdown.
const settingsBackup = await fs.readFile(settingsPath, "utf-8").catch(() => null);
// Restore settings file on any exit to undo lifecycle mutations.
// Write to a temp path first so a crash mid-write doesn't leave a truncated file.
process.on("exit", () => {
if (settingsBackup) {
const tmpPath = settingsPath + ".tmp";
try {
require("fs").writeFileSync(tmpPath, settingsBackup, "utf-8");
require("fs").renameSync(tmpPath, settingsPath);
} catch (err) {
console.error("[Settings] Failed to restore settings on exit:", err);
}
}
});
// Start the core // Start the core
try { try {
infoLog(`[Starting] Initializing LiveSync...`); infoLog(`[Starting] Initializing LiveSync...`);
@@ -375,6 +449,18 @@ export async function main() {
console.error(`[Error] Failed to initialize LiveSync`); console.error(`[Error] Failed to initialize LiveSync`);
process.exit(1); process.exit(1);
} }
// Capture sync settings before suspendAllSync() clobbers them.
// Used by daemon mode to restore the correct sync behaviour after the mirror scan.
const settingsBeforeSuspend = core.services.setting.currentSettings();
const originalSyncSettings = {
liveSync: settingsBeforeSuspend.liveSync,
syncOnStart: settingsBeforeSuspend.syncOnStart,
periodicReplication: settingsBeforeSuspend.periodicReplication,
syncOnSave: settingsBeforeSuspend.syncOnSave,
syncOnEditorSave: settingsBeforeSuspend.syncOnEditorSave,
syncOnFileOpen: settingsBeforeSuspend.syncOnFileOpen,
syncAfterMerge: settingsBeforeSuspend.syncAfterMerge,
};
await core.services.setting.suspendAllSync(); await core.services.setting.suspendAllSync();
await core.services.control.onReady(); await core.services.control.onReady();
@@ -400,7 +486,7 @@ export async function main() {
infoLog(""); infoLog("");
} }
const result = await runCommand(options, { databasePath, core, settingsPath }); const result = await runCommand(options, { databasePath, core, settingsPath, originalSyncSettings });
if (!result) { if (!result) {
console.error(`[Error] Command '${options.command}' failed`); console.error(`[Error] Command '${options.command}' failed`);
process.exitCode = 1; process.exitCode = 1;
@@ -408,7 +494,7 @@ export async function main() {
infoLog(`[Done] Command '${options.command}' completed`); infoLog(`[Done] Command '${options.command}' completed`);
} }
if (options.command === "daemon") { if (options.command === "daemon" && result) {
// Keep the process running // Keep the process running
await new Promise(() => {}); await new Promise(() => {});
} else { } else {

View File

@@ -85,4 +85,67 @@ describe("CLI parseArgs", () => {
expect(parsed.command).toBe("p2p-host"); expect(parsed.command).toBe("p2p-host");
expect(parsed.commandArgs).toEqual([]); expect(parsed.commandArgs).toEqual([]);
}); });
it("parses --interval flag with valid integer", () => {
process.argv = ["node", "livesync-cli", "./vault", "--interval", "30"];
const parsed = parseArgs();
expect(parsed.command).toBe("daemon");
expect(parsed.interval).toBe(30);
});
it("parses -i shorthand for --interval", () => {
process.argv = ["node", "livesync-cli", "./vault", "-i", "10"];
const parsed = parseArgs();
expect(parsed.interval).toBe(10);
});
it("exits 1 when --interval has no value", () => {
process.argv = ["node", "livesync-cli", "./vault", "--interval"];
const exitMock = mockProcessExit();
vi.spyOn(console, "error").mockImplementation(() => {});
expect(() => parseArgs()).toThrowError("__EXIT__:1");
expect(exitMock).toHaveBeenCalledWith(1);
});
it("exits 1 when --interval is not a positive integer", () => {
process.argv = ["node", "livesync-cli", "./vault", "--interval", "0"];
const exitMock = mockProcessExit();
vi.spyOn(console, "error").mockImplementation(() => {});
expect(() => parseArgs()).toThrowError("__EXIT__:1");
expect(exitMock).toHaveBeenCalledWith(1);
});
it("exits 1 when --interval is negative", () => {
process.argv = ["node", "livesync-cli", "./vault", "--interval", "-5"];
const exitMock = mockProcessExit();
vi.spyOn(console, "error").mockImplementation(() => {});
expect(() => parseArgs()).toThrowError("__EXIT__:1");
});
it("exits 1 when --interval is not numeric", () => {
process.argv = ["node", "livesync-cli", "./vault", "--interval", "abc"];
const exitMock = mockProcessExit();
vi.spyOn(console, "error").mockImplementation(() => {});
expect(() => parseArgs()).toThrowError("__EXIT__:1");
});
it("parses explicit daemon command", () => {
process.argv = ["node", "livesync-cli", "./vault", "daemon"];
const parsed = parseArgs();
expect(parsed.command).toBe("daemon");
expect(parsed.databasePath).toBe("./vault");
});
it("defaults to daemon when no command specified", () => {
process.argv = ["node", "livesync-cli", "./vault"];
const parsed = parseArgs();
expect(parsed.command).toBe("daemon");
});
it("parses explicit daemon command with --interval", () => {
process.argv = ["node", "livesync-cli", "./vault", "daemon", "--interval", "30"];
const parsed = parseArgs();
expect(parsed.command).toBe("daemon");
expect(parsed.interval).toBe(30);
});
}); });

View File

@@ -11,8 +11,11 @@ import type {
} from "@lib/managers/adapters"; } from "@lib/managers/adapters";
import type { FileEventItemSentinel } from "@lib/managers/StorageEventManager"; import type { FileEventItemSentinel } from "@lib/managers/StorageEventManager";
import type { NodeFile, NodeFolder } from "../adapters/NodeTypes"; import type { NodeFile, NodeFolder } from "../adapters/NodeTypes";
import type { Stats } from "fs";
import * as fs from "fs/promises"; import * as fs from "fs/promises";
import * as path from "path"; import * as path from "path";
import { watch as chokidarWatch, type FSWatcher } from "chokidar";
import type { IgnoreRules } from "../serviceModules/IgnoreRules";
/** /**
* CLI-specific type guard adapter * CLI-specific type guard adapter
@@ -56,22 +59,11 @@ class CLIPersistenceAdapter implements IStorageEventPersistenceAdapter {
} }
/** /**
* CLI-specific status adapter (console logging) * CLI-specific status adapter (no-op — daemon uses journald for status)
*/ */
class CLIStatusAdapter implements IStorageEventStatusAdapter { class CLIStatusAdapter implements IStorageEventStatusAdapter {
private lastUpdate = 0; updateStatus(_status: { batched: number; processing: number; totalQueued: number }): void {
private updateInterval = 5000; // Update every 5 seconds // intentional no-op
updateStatus(status: { batched: number; processing: number; totalQueued: number }): void {
const now = Date.now();
if (now - this.lastUpdate > this.updateInterval) {
if (status.totalQueued > 0 || status.processing > 0) {
// console.log(
// `[StorageEventManager] Batched: ${status.batched}, Processing: ${status.processing}, Total Queued: ${status.totalQueued}`
// );
}
this.lastUpdate = now;
}
} }
} }
@@ -100,15 +92,101 @@ class CLIConverterAdapter implements IStorageEventConverterAdapter<NodeFile> {
} }
/** /**
* CLI-specific watch adapter (optional file watching with chokidar) * CLI-specific watch adapter using chokidar for real-time filesystem monitoring.
*/ */
class CLIWatchAdapter implements IStorageEventWatchAdapter { class CLIWatchAdapter implements IStorageEventWatchAdapter {
constructor(private basePath: string) {} private _watcher: FSWatcher | undefined;
constructor(
private basePath: string,
private ignoreRules?: IgnoreRules,
private watchEnabled: boolean = false
) {}
private _toNodeFile(filePath: string, stats: Stats | undefined): NodeFile {
return {
path: path.relative(this.basePath, filePath) as FilePath,
stat: {
ctime: stats?.ctimeMs ?? Date.now(),
mtime: stats?.mtimeMs ?? Date.now(),
size: stats?.size ?? 0,
type: "file",
},
};
}
private _toNodeFolder(dirPath: string): NodeFolder {
return {
path: path.relative(this.basePath, dirPath) as FilePath,
isFolder: true,
};
}
async beginWatch(handlers: IStorageEventWatchHandlers): Promise<void> { async beginWatch(handlers: IStorageEventWatchHandlers): Promise<void> {
// File watching is not activated in the CLI. if (!this.watchEnabled) return;
// Because the CLI is designed for push/pull operations, not real-time sync. const baseIgnored: Array<RegExp | string | ((p: string) => boolean)> = [
// console.error("[CLIWatchAdapter] File watching is not enabled in CLI version"); /(^|[/\\])\./,
/(^|[/\\])[^/\\]*-livesync-v2([/\\]|$)/,
];
// Bind rules to a local const before the closure — chokidar v4 requires a
// MatchFunction, not glob strings, for custom patterns.
const rules = this.ignoreRules;
const ignored = rules
? [...baseIgnored, (p: string) => rules.shouldIgnore(path.relative(this.basePath, p))]
: baseIgnored;
const watcher = chokidarWatch(this.basePath, {
ignored,
ignoreInitial: true,
persistent: true,
awaitWriteFinish: {
stabilityThreshold: 500,
pollInterval: 100,
},
});
watcher.on("add", (filePath, stats) => {
const nodeFile = this._toNodeFile(filePath, stats);
handlers.onCreate(nodeFile);
});
watcher.on("change", (filePath, stats) => {
const nodeFile = this._toNodeFile(filePath, stats);
handlers.onChange(nodeFile);
});
watcher.on("unlink", (filePath) => {
const nodeFile = this._toNodeFile(filePath, undefined);
handlers.onDelete(nodeFile);
});
watcher.on("addDir", (dirPath) => {
const nodeFolder = this._toNodeFolder(dirPath);
handlers.onCreate(nodeFolder);
});
watcher.on("unlinkDir", (dirPath) => {
const nodeFolder = this._toNodeFolder(dirPath);
handlers.onDelete(nodeFolder);
});
watcher.on("error", (err) => {
console.error("[CLIWatchAdapter] Fatal watcher error — file watching stopped:", err);
console.error("[CLIWatchAdapter] Exiting for systemd restart.");
void watcher.close();
this._watcher = undefined;
// Use exit(1) rather than SIGTERM so systemd Restart=on-failure engages.
process.exit(1);
});
await new Promise<void>((resolve) => watcher.once("ready", resolve));
this._watcher = watcher;
}
close(): Promise<void> {
if (this._watcher) {
return this._watcher.close();
}
return Promise.resolve(); return Promise.resolve();
} }
} }
@@ -123,11 +201,15 @@ export class CLIStorageEventManagerAdapter implements IStorageEventManagerAdapte
readonly status: CLIStatusAdapter; readonly status: CLIStatusAdapter;
readonly converter: CLIConverterAdapter; readonly converter: CLIConverterAdapter;
constructor(basePath: string) { constructor(basePath: string, ignoreRules?: IgnoreRules, watchEnabled: boolean = false) {
this.typeGuard = new CLITypeGuardAdapter(); this.typeGuard = new CLITypeGuardAdapter();
this.persistence = new CLIPersistenceAdapter(basePath); this.persistence = new CLIPersistenceAdapter(basePath);
this.watch = new CLIWatchAdapter(basePath); this.watch = new CLIWatchAdapter(basePath, ignoreRules, watchEnabled);
this.status = new CLIStatusAdapter(); this.status = new CLIStatusAdapter();
this.converter = new CLIConverterAdapter(); this.converter = new CLIConverterAdapter();
} }
close(): Promise<void> {
return this.watch.close();
}
} }

View File

@@ -0,0 +1,123 @@
import { describe, expect, it, vi, beforeEach } from "vitest";
import type { IStorageEventWatchHandlers } from "@lib/managers/adapters";
import type { NodeFile } from "../adapters/NodeTypes";
// ── chokidar mock ──────────────────────────────────────────────────────────────
// Must be hoisted before imports that pull in chokidar.
const mockWatcher = {
on: vi.fn().mockReturnThis(),
once: vi.fn((event: string, cb: () => void) => {
if (event === "ready") cb();
return mockWatcher;
}),
close: vi.fn(() => Promise.resolve()),
};
vi.mock("chokidar", () => ({
watch: vi.fn(() => mockWatcher),
}));
import * as chokidar from "chokidar";
import { CLIStorageEventManagerAdapter } from "./CLIStorageEventManagerAdapter";
// ── helpers ───────────────────────────────────────────────────────────────────
function makeHandlers(): IStorageEventWatchHandlers {
return {
onCreate: vi.fn(),
onChange: vi.fn(),
onDelete: vi.fn(),
onRename: vi.fn(),
} as any;
}
// ── tests ─────────────────────────────────────────────────────────────────────
describe("CLIStorageEventManagerAdapter", () => {
beforeEach(() => {
vi.clearAllMocks();
// Restore the default once() behaviour (ready fires synchronously).
mockWatcher.once.mockImplementation((event: string, cb: () => void) => {
if (event === "ready") cb();
return mockWatcher;
});
});
it("beginWatch is no-op when watchEnabled=false", async () => {
const adapter = new CLIStorageEventManagerAdapter("/base", undefined, false);
const handlers = makeHandlers();
await adapter.watch.beginWatch(handlers);
expect(chokidar.watch).not.toHaveBeenCalled();
});
it("beginWatch calls chokidar.watch when watchEnabled=true", async () => {
const adapter = new CLIStorageEventManagerAdapter("/base", undefined, true);
const handlers = makeHandlers();
await adapter.watch.beginWatch(handlers);
expect(chokidar.watch).toHaveBeenCalledTimes(1);
expect(chokidar.watch).toHaveBeenCalledWith("/base", expect.objectContaining({ ignoreInitial: true }));
});
it("add event produces NodeFile with correct relative path via onCreate", async () => {
const basePath = "/vault/base";
const adapter = new CLIStorageEventManagerAdapter(basePath, undefined, true);
const handlers = makeHandlers();
await adapter.watch.beginWatch(handlers);
// Find the callback registered for the "add" event.
const addCall = mockWatcher.on.mock.calls.find(([event]) => event === "add");
expect(addCall).toBeDefined();
const addCallback = addCall![1] as (filePath: string, stats: any) => void;
const fakeStats = { ctimeMs: 1000, mtimeMs: 2000, size: 42 };
addCallback(`${basePath}/subdir/note.md`, fakeStats);
expect(handlers.onCreate).toHaveBeenCalledTimes(1);
const created = (handlers.onCreate as ReturnType<typeof vi.fn>).mock.calls[0][0] as NodeFile;
expect(created.path).toBe("subdir/note.md");
expect(created.stat?.size).toBe(42);
});
it("close() calls watcher.close()", async () => {
const adapter = new CLIStorageEventManagerAdapter("/base", undefined, true);
const handlers = makeHandlers();
await adapter.watch.beginWatch(handlers);
await adapter.close();
expect(mockWatcher.close).toHaveBeenCalledTimes(1);
});
it("close() is safe when no watcher was started", async () => {
const adapter = new CLIStorageEventManagerAdapter("/base", undefined, false);
// Should not throw.
await expect(adapter.close()).resolves.toBeUndefined();
expect(mockWatcher.close).not.toHaveBeenCalled();
});
it("error event triggers process.exit(1)", async () => {
const adapter = new CLIStorageEventManagerAdapter("/base", undefined, true);
const handlers = makeHandlers();
await adapter.watch.beginWatch(handlers);
const processExitSpy = vi.spyOn(process, "exit").mockImplementation((() => {}) as any);
const errorCall = mockWatcher.on.mock.calls.find(([event]) => event === "error");
expect(errorCall).toBeDefined();
const errorCallback = errorCall![1] as (err: Error) => void;
errorCallback(new Error("disk failure"));
expect(processExitSpy).toHaveBeenCalledWith(1);
processExitSpy.mockRestore();
});
});

View File

@@ -2,6 +2,7 @@ import { StorageEventManagerBase, type StorageEventManagerBaseDependencies } fro
import { CLIStorageEventManagerAdapter } from "./CLIStorageEventManagerAdapter"; import { CLIStorageEventManagerAdapter } from "./CLIStorageEventManagerAdapter";
import type { IMinimumLiveSyncCommands, LiveSyncBaseCore } from "../../../LiveSyncBaseCore"; import type { IMinimumLiveSyncCommands, LiveSyncBaseCore } from "../../../LiveSyncBaseCore";
import type { ServiceContext } from "@lib/services/base/ServiceBase"; import type { ServiceContext } from "@lib/services/base/ServiceBase";
import type { IgnoreRules } from "../serviceModules/IgnoreRules";
// import type { IMinimumLiveSyncCommands } from "@lib/services/base/IService"; // import type { IMinimumLiveSyncCommands } from "@lib/services/base/IService";
export class StorageEventManagerCLI extends StorageEventManagerBase<CLIStorageEventManagerAdapter> { export class StorageEventManagerCLI extends StorageEventManagerBase<CLIStorageEventManagerAdapter> {
@@ -10,9 +11,11 @@ export class StorageEventManagerCLI extends StorageEventManagerBase<CLIStorageEv
constructor( constructor(
basePath: string, basePath: string,
core: LiveSyncBaseCore<ServiceContext, IMinimumLiveSyncCommands>, core: LiveSyncBaseCore<ServiceContext, IMinimumLiveSyncCommands>,
dependencies: StorageEventManagerBaseDependencies dependencies: StorageEventManagerBaseDependencies,
ignoreRules?: IgnoreRules,
watchEnabled?: boolean
) { ) {
const adapter = new CLIStorageEventManagerAdapter(basePath); const adapter = new CLIStorageEventManagerAdapter(basePath, ignoreRules, watchEnabled);
super(adapter, dependencies); super(adapter, dependencies);
this.core = core; this.core = core;
} }
@@ -25,4 +28,11 @@ export class StorageEventManagerCLI extends StorageEventManagerBase<CLIStorageEv
// No-op in CLI version // No-op in CLI version
// Internal file handling is not needed // Internal file handling is not needed
} }
/**
* Close the file watcher. Call this during graceful shutdown.
*/
close(): Promise<void> {
return this.adapter.close();
}
} }

View File

@@ -4,6 +4,7 @@
"version": "0.0.0", "version": "0.0.0",
"description": "Runtime dependencies for Self-hosted LiveSync CLI Docker image", "description": "Runtime dependencies for Self-hosted LiveSync CLI Docker image",
"dependencies": { "dependencies": {
"chokidar": "^4.0.0",
"commander": "^14.0.3", "commander": "^14.0.3",
"werift": "^0.22.9", "werift": "^0.22.9",
"pouchdb-adapter-http": "^9.0.0", "pouchdb-adapter-http": "^9.0.0",

View File

@@ -9,6 +9,7 @@ import { ServiceFileAccessCLI } from "./ServiceFileAccessImpl";
import { ServiceDatabaseFileAccessCLI } from "./DatabaseFileAccess"; import { ServiceDatabaseFileAccessCLI } from "./DatabaseFileAccess";
import { StorageEventManagerCLI } from "../managers/StorageEventManagerCLI"; import { StorageEventManagerCLI } from "../managers/StorageEventManagerCLI";
import type { ServiceModules } from "@lib/interfaces/ServiceModule"; import type { ServiceModules } from "@lib/interfaces/ServiceModule";
import type { IgnoreRules } from "./IgnoreRules";
/** /**
* Initialize service modules for CLI version * Initialize service modules for CLI version
@@ -22,7 +23,9 @@ import type { ServiceModules } from "@lib/interfaces/ServiceModule";
export function initialiseServiceModulesCLI( export function initialiseServiceModulesCLI(
basePath: string, basePath: string,
core: LiveSyncBaseCore<ServiceContext, any>, core: LiveSyncBaseCore<ServiceContext, any>,
services: InjectableServiceHub<ServiceContext> services: InjectableServiceHub<ServiceContext>,
ignoreRules?: IgnoreRules,
watchEnabled: boolean = false
): ServiceModules { ): ServiceModules {
const storageAccessManager = new StorageAccessManager(); const storageAccessManager = new StorageAccessManager();
@@ -36,12 +39,24 @@ export function initialiseServiceModulesCLI(
}); });
// CLI-specific storage event manager // CLI-specific storage event manager
const storageEventManager = new StorageEventManagerCLI(basePath, core, { const storageEventManager = new StorageEventManagerCLI(
fileProcessing: services.fileProcessing, basePath,
setting: services.setting, core,
vaultService: services.vault, {
storageAccessManager: storageAccessManager, fileProcessing: services.fileProcessing,
APIService: services.API, setting: services.setting,
vaultService: services.vault,
storageAccessManager: storageAccessManager,
APIService: services.API,
},
ignoreRules,
watchEnabled
);
// Close the file watcher during graceful shutdown so the process can exit cleanly.
services.appLifecycle.onUnload.addHandler(async () => {
await storageEventManager.close();
return true;
}); });
// Storage access using CLI file system adapter // Storage access using CLI file system adapter

View File

@@ -0,0 +1,131 @@
import * as fs from "fs/promises";
import * as path from "path";
import { minimatch } from "minimatch";
/**
* Loads and evaluates ignore rules from `.livesync/ignore` inside the vault.
*
* File format:
* - Lines starting with `#` are comments.
* - Blank lines are ignored.
* - `import: .gitignore` (exactly) — merges patterns from the vault's `.gitignore`.
* - All other lines are minimatch glob patterns relative to the vault root.
*
* Negation patterns (lines starting with `!`) are not supported. Loading a
* ruleset containing them throws an error — use separate include/exclude files
* instead.
*
* Missing files (`.livesync/ignore` or `.gitignore`) are silently skipped.
*/
export class IgnoreRules {
private patterns: string[] = [];
constructor(private vaultPath: string) {}
/**
* Reads `.livesync/ignore` (and optionally `.gitignore`) and populates the
* pattern list. Safe to call multiple times — each call replaces the
* previous state. Does not throw if files are absent.
*
* @throws if any pattern line begins with `!` (negation is unsupported).
*/
async load(): Promise<void> {
this.patterns = [];
const ignorePath = path.join(this.vaultPath, ".livesync", "ignore");
let rawLines: string[];
try {
const content = await fs.readFile(ignorePath, "utf-8");
rawLines = content.split(/\r?\n/);
} catch {
// File absent or unreadable — treat as empty ruleset.
return;
}
for (const line of rawLines) {
const trimmed = line.trim();
if (!trimmed || trimmed.startsWith("#")) {
continue;
}
// NOTE: Only the exact string "import: .gitignore" is recognised.
// Any future generalisation of this directive must validate that
// the resolved path stays within the vault directory.
if (trimmed === "import: .gitignore") {
await this._importGitignore();
continue;
}
if (trimmed.startsWith("import:")) {
console.error(
`[IgnoreRules] Warning: unrecognised directive '${trimmed}' — only 'import: .gitignore' is supported`
);
continue;
}
this._addPattern(trimmed);
}
if (this.patterns.length > 0) {
console.error(`[IgnoreRules] Loaded ${this.patterns.length} ignore patterns`);
}
}
// Normalises a single gitignore-style pattern:
// - Patterns ending with `/` (directory patterns like `build/`) are
// converted to `build/**` so they match all files inside that directory.
// - Patterns without a `/` are prefixed with `**/` to give them matchBase
// semantics (e.g. `*.tmp` → `**/*.tmp`), matching the basename in any
// subdirectory as gitignore does.
// - Patterns that already contain a `/` (but don't end with one) are
// path-specific and used as-is.
private _normalisePattern(pattern: string): string {
if (pattern.endsWith("/")) {
return "**/" + pattern + "**";
} else if (!pattern.includes("/")) {
return "**/" + pattern;
}
return pattern;
}
private async _importGitignore(): Promise<void> {
const gitignorePath = path.join(this.vaultPath, ".gitignore");
let content: string;
try {
content = await fs.readFile(gitignorePath, "utf-8");
} catch {
return;
}
this._parseLines(content);
}
private _parseLines(content: string): void {
for (const line of content.split(/\r?\n/)) {
const trimmed = line.trim();
if (!trimmed || trimmed.startsWith("#")) continue;
this._addPattern(trimmed);
}
}
private _addPattern(raw: string): void {
if (raw.startsWith("!")) {
throw new Error(
`[IgnoreRules] Negation pattern '${raw}' is not supported. ` +
`Remove it from .livesync/ignore or use a separate include/exclude file.`
);
}
this.patterns.push(this._normalisePattern(raw));
}
/**
* Returns `true` if the given vault-relative path matches any loaded
* ignore pattern.
*
* @param relativePath - Path relative to the vault root, using forward
* slashes or the OS separator.
*/
shouldIgnore(relativePath: string): boolean {
if (this.patterns.length === 0) {
return false;
}
// Normalise to forward slashes for minimatch.
const normalised = relativePath.replace(/\\/g, "/");
return this.patterns.some((p) => minimatch(normalised, p, { dot: true }));
}
}

View File

@@ -0,0 +1,169 @@
import * as fs from "node:fs/promises";
import * as os from "node:os";
import * as path from "node:path";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import { IgnoreRules } from "./IgnoreRules";
describe("IgnoreRules", () => {
const tempDirs: string[] = [];
async function createVault(): Promise<string> {
const tempDir = await fs.mkdtemp(path.join(os.tmpdir(), "livesync-ignorerules-"));
tempDirs.push(tempDir);
return tempDir;
}
async function writeIgnoreFile(vaultPath: string, content: string): Promise<void> {
const ignoreDir = path.join(vaultPath, ".livesync");
await fs.mkdir(ignoreDir, { recursive: true });
await fs.writeFile(path.join(ignoreDir, "ignore"), content, "utf-8");
}
afterEach(async () => {
await Promise.all(tempDirs.splice(0).map((dir) => fs.rm(dir, { recursive: true, force: true })));
});
describe("pattern normalisation", () => {
it("adds **/ prefix to basename patterns (no slash)", async () => {
const vaultPath = await createVault();
await writeIgnoreFile(vaultPath, "*.tmp\n");
const rules = new IgnoreRules(vaultPath);
await rules.load();
expect(rules.shouldIgnore("notes/scratch.tmp")).toBe(true);
expect(rules.shouldIgnore("scratch.tmp")).toBe(true);
expect(rules.shouldIgnore("deep/nested/file.tmp")).toBe(true);
});
it("appends ** to directory patterns ending with / and prepends **/", async () => {
const vaultPath = await createVault();
await writeIgnoreFile(vaultPath, "build/\n");
const rules = new IgnoreRules(vaultPath);
await rules.load();
expect(rules.shouldIgnore("build/output.js")).toBe(true);
expect(rules.shouldIgnore("build/nested/file.js")).toBe(true);
expect(rules.shouldIgnore("subproject/build/output.js")).toBe(true);
});
it("leaves patterns containing / as-is", async () => {
const vaultPath = await createVault();
await writeIgnoreFile(vaultPath, "docs/private.md\n");
const rules = new IgnoreRules(vaultPath);
await rules.load();
expect(rules.shouldIgnore("docs/private.md")).toBe(true);
expect(rules.shouldIgnore("other/docs/private.md")).toBe(false);
});
});
describe("shouldIgnore", () => {
it("matches **/*.tmp against notes/scratch.tmp", async () => {
const vaultPath = await createVault();
await writeIgnoreFile(vaultPath, "*.tmp\n");
const rules = new IgnoreRules(vaultPath);
await rules.load();
expect(rules.shouldIgnore("notes/scratch.tmp")).toBe(true);
});
it("does not match notes/readme.md against **/*.tmp", async () => {
const vaultPath = await createVault();
await writeIgnoreFile(vaultPath, "*.tmp\n");
const rules = new IgnoreRules(vaultPath);
await rules.load();
expect(rules.shouldIgnore("notes/readme.md")).toBe(false);
});
it("returns false when no patterns are loaded", async () => {
const vaultPath = await createVault();
const rules = new IgnoreRules(vaultPath);
// No load() call — patterns are empty
expect(rules.shouldIgnore("anything.md")).toBe(false);
});
});
describe("negation patterns", () => {
it("throws when a negation pattern is encountered", async () => {
const vaultPath = await createVault();
await writeIgnoreFile(vaultPath, "*.tmp\n!important.tmp\n");
const rules = new IgnoreRules(vaultPath);
await expect(rules.load()).rejects.toThrow(/Negation pattern/);
});
it("throws when a .gitignore imported via directive contains negation", async () => {
const vaultPath = await createVault();
await writeIgnoreFile(vaultPath, "import: .gitignore\n");
await fs.writeFile(path.join(vaultPath, ".gitignore"), "*.log\n!keep.log\n", "utf-8");
const rules = new IgnoreRules(vaultPath);
await expect(rules.load()).rejects.toThrow(/Negation pattern/);
});
});
describe("unrecognised import: directives", () => {
it("warns and skips unrecognised import: forms (does not add as literal pattern)", async () => {
const vaultPath = await createVault();
// Typo: "import:.gitignore" instead of "import: .gitignore"
await writeIgnoreFile(vaultPath, "*.tmp\nimport:.gitignore\n");
const rules = new IgnoreRules(vaultPath);
await rules.load();
// *.tmp still loaded; import:.gitignore is skipped (not treated as a literal pattern)
expect(rules.shouldIgnore("scratch.tmp")).toBe(true);
expect(rules.shouldIgnore("import:.gitignore")).toBe(false);
});
});
describe("load() with missing file", () => {
it("returns without error when .livesync/ignore is absent", async () => {
const vaultPath = await createVault();
// No ignore file created
const rules = new IgnoreRules(vaultPath);
await expect(rules.load()).resolves.toBeUndefined();
expect(rules.shouldIgnore("anything.md")).toBe(false);
});
});
describe("load() with comments and blank lines", () => {
it("skips # comment lines and blank lines", async () => {
const vaultPath = await createVault();
await writeIgnoreFile(vaultPath, "# This is a comment\n\n \n*.tmp\n# another comment\nbuild/\n");
const rules = new IgnoreRules(vaultPath);
await rules.load();
expect(rules.shouldIgnore("scratch.tmp")).toBe(true);
expect(rules.shouldIgnore("build/output.js")).toBe(true);
expect(rules.shouldIgnore("readme.md")).toBe(false);
});
});
describe("import: .gitignore directive", () => {
it("reads and normalises patterns from .gitignore", async () => {
const vaultPath = await createVault();
await writeIgnoreFile(vaultPath, "import: .gitignore\n");
await fs.writeFile(path.join(vaultPath, ".gitignore"), "*.log\nnode_modules/\n", "utf-8");
const rules = new IgnoreRules(vaultPath);
await rules.load();
expect(rules.shouldIgnore("app.log")).toBe(true);
expect(rules.shouldIgnore("node_modules/package.json")).toBe(true);
expect(rules.shouldIgnore("src/node_modules/package.json")).toBe(true);
expect(rules.shouldIgnore("src/index.ts")).toBe(false);
});
it("merges .gitignore patterns with other patterns", async () => {
const vaultPath = await createVault();
await writeIgnoreFile(vaultPath, "*.tmp\nimport: .gitignore\n");
await fs.writeFile(path.join(vaultPath, ".gitignore"), "*.log\n", "utf-8");
const rules = new IgnoreRules(vaultPath);
await rules.load();
expect(rules.shouldIgnore("scratch.tmp")).toBe(true);
expect(rules.shouldIgnore("error.log")).toBe(true);
});
});
describe("import: .gitignore with missing .gitignore", () => {
it("does not throw when .gitignore is absent", async () => {
const vaultPath = await createVault();
await writeIgnoreFile(vaultPath, "*.tmp\nimport: .gitignore\n");
// No .gitignore created
const rules = new IgnoreRules(vaultPath);
await expect(rules.load()).resolves.toBeUndefined();
// The *.tmp pattern from the ignore file still works
expect(rules.shouldIgnore("scratch.tmp")).toBe(true);
});
});
});

View File

@@ -0,0 +1,166 @@
#!/usr/bin/env bash
# Test: daemon-related ignore rules behaviour
#
# Tests that are runnable without a long-running daemon process are exercised
# here using the `mirror` command, which calls the same `isTargetFile` handler
# stack that the daemon uses.
#
# Covered cases:
# 1. .livesync/ignore with *.tmp pattern → ignored file is NOT synced to DB
# 2. .livesync/ignore missing → no error, normal sync continues
# 3. import: .gitignore directive → patterns from .gitignore are merged
#
set -euo pipefail
SCRIPT_DIR="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" && pwd)"
CLI_DIR="$(cd -- "$SCRIPT_DIR/.." && pwd)"
cd "$CLI_DIR"
source "$SCRIPT_DIR/test-helpers.sh"
display_test_info
RUN_BUILD="${RUN_BUILD:-1}"
cli_test_init_cli_cmd
WORK_DIR="$(mktemp -d "${TMPDIR:-/tmp}/livesync-cli-daemon-test.XXXXXX")"
trap 'rm -rf "$WORK_DIR"' EXIT
SETTINGS_FILE="$WORK_DIR/data.json"
VAULT_DIR="$WORK_DIR/vault"
mkdir -p "$VAULT_DIR/notes"
if [[ "$RUN_BUILD" == "1" ]]; then
echo "[INFO] building CLI..."
npm run build
fi
echo "[INFO] generating settings -> $SETTINGS_FILE"
cli_test_init_settings_file "$SETTINGS_FILE"
cli_test_mark_settings_configured "$SETTINGS_FILE"
PASS=0
FAIL=0
assert_pass() { echo "[PASS] $1"; PASS=$((PASS + 1)); }
assert_fail() { echo "[FAIL] $1" >&2; FAIL=$((FAIL + 1)); }
# ─────────────────────────────────────────────────────────────────────────────
# Case 1: .livesync/ignore with *.tmp → matched file should NOT appear in DB
# ─────────────────────────────────────────────────────────────────────────────
echo ""
echo "=== Case 1: .livesync/ignore *.tmp → ignored file not synced to DB ==="
mkdir -p "$VAULT_DIR/.livesync"
printf '*.tmp\n' > "$VAULT_DIR/.livesync/ignore"
# Also write a normal file so we can confirm mirror ran at all.
printf 'normal content\n' > "$VAULT_DIR/notes/normal.md"
# Write the file that should be ignored.
printf 'tmp content\n' > "$VAULT_DIR/notes/scratch.tmp"
run_cli "$VAULT_DIR" --settings "$SETTINGS_FILE" mirror
# The normal file should be in the DB.
RESULT_NORMAL="$WORK_DIR/case1-normal.txt"
if run_cli "$VAULT_DIR" --settings "$SETTINGS_FILE" pull notes/normal.md "$RESULT_NORMAL" 2>/dev/null; then
if cmp -s "$VAULT_DIR/notes/normal.md" "$RESULT_NORMAL"; then
assert_pass "normal.md was synced to DB"
else
assert_fail "normal.md content mismatch after mirror"
fi
else
assert_fail "normal.md was not found in DB after mirror"
fi
# The .tmp file should NOT be in the DB.
DB_LIST="$WORK_DIR/case1-ls.txt"
run_cli "$VAULT_DIR" --settings "$SETTINGS_FILE" ls > "$DB_LIST"
if grep -q "scratch.tmp" "$DB_LIST"; then
assert_fail "scratch.tmp (ignored) was unexpectedly synced to DB"
echo "--- DB listing ---" >&2; cat "$DB_LIST" >&2
else
assert_pass "scratch.tmp (*.tmp pattern) was NOT synced to DB"
fi
# ─────────────────────────────────────────────────────────────────────────────
# Case 2: .livesync/ignore absent → no error, normal sync continues
# ─────────────────────────────────────────────────────────────────────────────
echo ""
echo "=== Case 2: .livesync/ignore absent → no error, sync continues ==="
VAULT_DIR2="$WORK_DIR/vault2"
mkdir -p "$VAULT_DIR2/notes"
SETTINGS_FILE2="$WORK_DIR/data2.json"
cli_test_init_settings_file "$SETTINGS_FILE2"
cli_test_mark_settings_configured "$SETTINGS_FILE2"
# No .livesync directory at all.
printf 'hello\n' > "$VAULT_DIR2/notes/hello.md"
# mirror should succeed without error.
set +e
MIRROR_OUTPUT="$WORK_DIR/case2-mirror.txt"
run_cli "$VAULT_DIR2" --settings "$SETTINGS_FILE2" mirror >"$MIRROR_OUTPUT" 2>&1
MIRROR_EXIT=$?
set -e
if [[ "$MIRROR_EXIT" -ne 0 ]]; then
assert_fail "mirror exited non-zero ($MIRROR_EXIT) when .livesync/ignore is absent"
cat "$MIRROR_OUTPUT" >&2
else
assert_pass "mirror succeeded when .livesync/ignore is absent"
fi
# The normal file should have been synced.
RESULT_HELLO="$WORK_DIR/case2-hello.txt"
if run_cli "$VAULT_DIR2" --settings "$SETTINGS_FILE2" pull notes/hello.md "$RESULT_HELLO" 2>/dev/null; then
assert_pass "file synced normally when .livesync/ignore is absent"
else
assert_fail "file was not synced when .livesync/ignore is absent"
fi
# ─────────────────────────────────────────────────────────────────────────────
# Case 3: import: .gitignore merges patterns
# ─────────────────────────────────────────────────────────────────────────────
echo ""
echo "=== Case 3: import: .gitignore directive merges patterns ==="
VAULT_DIR3="$WORK_DIR/vault3"
mkdir -p "$VAULT_DIR3/notes"
SETTINGS_FILE3="$WORK_DIR/data3.json"
cli_test_init_settings_file "$SETTINGS_FILE3"
cli_test_mark_settings_configured "$SETTINGS_FILE3"
mkdir -p "$VAULT_DIR3/.livesync"
printf 'import: .gitignore\n' > "$VAULT_DIR3/.livesync/ignore"
printf '# gitignore comment\n*.log\nbuild/\n' > "$VAULT_DIR3/.gitignore"
printf 'regular note\n' > "$VAULT_DIR3/notes/regular.md"
printf 'log content\n' > "$VAULT_DIR3/notes/debug.log"
run_cli "$VAULT_DIR3" --settings "$SETTINGS_FILE3" mirror
DB_LIST3="$WORK_DIR/case3-ls.txt"
run_cli "$VAULT_DIR3" --settings "$SETTINGS_FILE3" ls > "$DB_LIST3"
if grep -q "debug.log" "$DB_LIST3"; then
assert_fail "debug.log (ignored via .gitignore import) was unexpectedly synced to DB"
echo "--- DB listing ---" >&2; cat "$DB_LIST3" >&2
else
assert_pass "debug.log (*.log from imported .gitignore) was NOT synced to DB"
fi
# regular.md should still be present.
if grep -q "regular.md" "$DB_LIST3"; then
assert_pass "regular.md was synced normally alongside .gitignore import rules"
else
assert_fail "regular.md was NOT synced — .gitignore import may have been too broad"
fi
# ─────────────────────────────────────────────────────────────────────────────
# Summary
# ─────────────────────────────────────────────────────────────────────────────
echo ""
echo "Results: PASS=$PASS FAIL=$FAIL"
if [[ "$FAIL" -gt 0 ]]; then
exit 1
fi

View File

@@ -29,7 +29,8 @@ export async function runScenario(remoteType: RemoteType, encrypt: boolean): Pro
const dbPrefix = remoteType === "COUCHDB" ? requireEnv("COUCHDB_DBNAME", "dbname") : ""; const dbPrefix = remoteType === "COUCHDB" ? requireEnv("COUCHDB_DBNAME", "dbname") : "";
const dbname = remoteType === "COUCHDB" ? `${dbPrefix}-${dbSuffix}` : ""; const dbname = remoteType === "COUCHDB" ? `${dbPrefix}-${dbSuffix}` : "";
const minioEndpoint = remoteType === "MINIO" ? requireEnv("MINIO_ENDPOINT", "minioEndpoint").replace(/\/$/, "") : ""; const minioEndpoint =
remoteType === "MINIO" ? requireEnv("MINIO_ENDPOINT", "minioEndpoint").replace(/\/$/, "") : "";
const minioAccessKey = remoteType === "MINIO" ? requireEnv("MINIO_ACCESS_KEY", "accessKey") : ""; const minioAccessKey = remoteType === "MINIO" ? requireEnv("MINIO_ACCESS_KEY", "accessKey") : "";
const minioSecretKey = remoteType === "MINIO" ? requireEnv("MINIO_SECRET_KEY", "secretKey") : ""; const minioSecretKey = remoteType === "MINIO" ? requireEnv("MINIO_SECRET_KEY", "secretKey") : "";
const minioBucketBase = remoteType === "MINIO" ? requireEnv("MINIO_BUCKET_NAME", "bucketName") : ""; const minioBucketBase = remoteType === "MINIO" ? requireEnv("MINIO_BUCKET_NAME", "bucketName") : "";

View File

@@ -11,11 +11,55 @@ const defaultExternal = [
"crypto", "crypto",
"pouchdb-adapter-leveldb", "pouchdb-adapter-leveldb",
"commander", "commander",
"chokidar",
"punycode", "punycode",
"werift", "werift",
]; ];
// Polyfill FileReader at the very top of the CJS bundle. octagonal-wheels uses
// FileReader for base64 conversion when Uint8Array.toBase64 (TC39 proposal) is
// unavailable. Node.js has neither, so we inject a minimal FileReader shim before
// any module-scope code evaluates.
const fileReaderPolyfillBanner = `
if (typeof globalThis.FileReader === "undefined") {
globalThis.FileReader = class FileReader {
constructor() { this.result = null; this.onload = null; this.onerror = null; }
readAsDataURL(blob) {
blob.arrayBuffer().then((buf) => {
var b64 = require("buffer").Buffer.from(buf).toString("base64");
this.result = "data:" + (blob.type || "application/octet-stream") + ";base64," + b64;
if (this.onload) this.onload({ target: this });
}).catch((err) => { if (this.onerror) this.onerror({ target: this, error: err }); });
}
readAsArrayBuffer() { throw new Error("FileReader.readAsArrayBuffer is not implemented in this polyfill"); }
readAsBinaryString() { throw new Error("FileReader.readAsBinaryString is not implemented in this polyfill"); }
readAsText() { throw new Error("FileReader.readAsText is not implemented in this polyfill"); }
abort() { throw new Error("FileReader.abort is not implemented in this polyfill"); }
};
}
`;
function injectBanner(): import("vite").Plugin {
return {
name: "inject-banner",
generateBundle(_options, bundle) {
for (const chunk of Object.values(bundle)) {
if (chunk.type === "chunk" && chunk.fileName.startsWith("entrypoint")) {
// Insert after the shebang line if present, otherwise at the top.
if (chunk.code.startsWith("#!")) {
const newline = chunk.code.indexOf("\n");
chunk.code =
chunk.code.slice(0, newline + 1) + fileReaderPolyfillBanner + chunk.code.slice(newline + 1);
} else {
chunk.code = fileReaderPolyfillBanner + chunk.code;
}
}
}
},
};
}
export default defineConfig({ export default defineConfig({
plugins: [svelte()], plugins: [svelte(), injectBanner()],
resolve: { resolve: {
alias: { alias: {
"@lib/worker/bgWorker.ts": "../../lib/src/worker/bgWorker.mock.ts", "@lib/worker/bgWorker.ts": "../../lib/src/worker/bgWorker.mock.ts",

View File

@@ -41,7 +41,7 @@ async function renderHistoryList(): Promise<VaultHistoryItem[]> {
const [items, lastUsedId] = await Promise.all([historyStore.getVaultHistory(), historyStore.getLastUsedVaultId()]); const [items, lastUsedId] = await Promise.all([historyStore.getVaultHistory(), historyStore.getLastUsedVaultId()]);
listEl.innerHTML = ""; listEl.replaceChildren();
emptyEl.classList.toggle("is-hidden", items.length > 0); emptyEl.classList.toggle("is-hidden", items.length > 0);
for (const item of items) { for (const item of items) {

Submodule src/lib updated: 97530553a6...ed4502e003

View File

@@ -1,5 +1,6 @@
import { Notice, Plugin, type App, type PluginManifest } from "./deps"; import { getLanguage, Notice, Plugin, type App, type PluginManifest } from "./deps";
import { setGetLanguage } from "./lib/src/common/coreEnvFunctions.ts";
setGetLanguage(getLanguage);
import { LiveSyncCommands } from "./features/LiveSyncCommands.ts"; import { LiveSyncCommands } from "./features/LiveSyncCommands.ts";
import { HiddenFileSync } from "./features/HiddenFileSync/CmdHiddenFileSync.ts"; import { HiddenFileSync } from "./features/HiddenFileSync/CmdHiddenFileSync.ts";
import { ConfigSync } from "./features/ConfigSync/CmdConfigSync.ts"; import { ConfigSync } from "./features/ConfigSync/CmdConfigSync.ts";

View File

@@ -1,6 +1,6 @@
import { TFile, Modal, App, DIFF_DELETE, DIFF_EQUAL, DIFF_INSERT, diff_match_patch } from "../../../deps.ts"; import { TFile, Modal, App, DIFF_DELETE, DIFF_EQUAL, DIFF_INSERT, diff_match_patch } from "../../../deps.ts";
import { getPathFromTFile, isValidPath } from "../../../common/utils.ts"; import { getPathFromTFile, isValidPath } from "../../../common/utils.ts";
import { decodeBinary, escapeStringToHTML, readString } from "../../../lib/src/string_and_binary/convert.ts"; import { decodeBinary, readString } from "../../../lib/src/string_and_binary/convert.ts";
import ObsidianLiveSyncPlugin from "../../../main.ts"; import ObsidianLiveSyncPlugin from "../../../main.ts";
import { import {
type DocumentID, type DocumentID,
@@ -145,22 +145,66 @@ export class DocumentHistoryModal extends Modal {
return v; return v;
} }
prepareContentView(usePreformatted = true) {
this.contentView.empty();
this.contentView.toggleClass("op-pre", usePreformatted);
}
appendTextDiff(diff: [number, string][]) {
for (const [operation, text] of diff) {
if (operation == DIFF_DELETE) {
this.contentView.createSpan({ text, cls: "history-deleted" });
} else if (operation == DIFF_EQUAL) {
this.contentView.createSpan({ text, cls: "history-normal" });
} else if (operation == DIFF_INSERT) {
this.contentView.createSpan({ text, cls: "history-added" });
}
}
}
appendImageDiff(baseSrc: string, overlaySrc?: string) {
const wrap = this.contentView.createDiv({ cls: "ls-imgdiff-wrap" });
const overlay = wrap.createDiv({ cls: "overlay" });
overlay.createEl("img", { cls: "img-base" }, (img) => {
img.src = baseSrc;
});
if (overlaySrc) {
overlay.createEl("img", { cls: "img-overlay" }, (img) => {
img.src = overlaySrc;
});
}
}
appendDeletedNotice(usePreformatted = true) {
const notice = "(At this revision, the file has been deleted)";
if (usePreformatted) {
this.contentView.appendText(`${notice}\n`);
} else {
this.contentView.createDiv({ text: notice });
}
}
async showExactRev(rev: string) { async showExactRev(rev: string) {
const db = this.core.localDatabase; const db = this.core.localDatabase;
const w = await db.getDBEntry(this.file, { rev: rev }, false, false, true); const w = await db.getDBEntry(this.file, { rev: rev }, false, false, true);
this.currentText = ""; this.currentText = "";
this.currentDeleted = false; this.currentDeleted = false;
this.prepareContentView();
if (w === false) { if (w === false) {
this.currentDeleted = true; this.currentDeleted = true;
this.info.innerHTML = ""; this.info.empty();
this.contentView.innerHTML = `Could not read this revision<br>(${rev})`; this.contentView.appendText("Could not read this revision");
this.contentView.createEl("br");
this.contentView.appendText(`(${rev})`);
} else { } else {
this.currentDoc = w; this.currentDoc = w;
this.info.innerHTML = `Modified:${new Date(w.mtime).toLocaleString()}`; this.info.setText(`Modified:${new Date(w.mtime).toLocaleString()}`);
let result = undefined;
const w1data = readDocument(w); const w1data = readDocument(w);
this.currentDeleted = !!w.deleted; this.currentDeleted = !!w.deleted;
// this.currentText = w1data; if (typeof w1data == "string") {
this.currentText = w1data;
}
let rendered = false;
if (this.showDiff) { if (this.showDiff) {
const prevRevIdx = this.revs_info.length - 1 - ((this.range.value as any) / 1 - 1); const prevRevIdx = this.revs_info.length - 1 - ((this.range.value as any) / 1 - 1);
if (prevRevIdx >= 0 && prevRevIdx < this.revs_info.length) { if (prevRevIdx >= 0 && prevRevIdx < this.revs_info.length) {
@@ -168,58 +212,55 @@ export class DocumentHistoryModal extends Modal {
const w2 = await db.getDBEntry(this.file, { rev: oldRev }, false, false, true); const w2 = await db.getDBEntry(this.file, { rev: oldRev }, false, false, true);
if (w2 != false) { if (w2 != false) {
if (typeof w1data == "string") { if (typeof w1data == "string") {
result = ""; const w2data = readDocument(w2);
const dmp = new diff_match_patch(); if (typeof w2data == "string") {
const w2data = readDocument(w2) as string; const dmp = new diff_match_patch();
const diff = dmp.diff_main(w2data, w1data); const diff = dmp.diff_main(w2data, w1data);
dmp.diff_cleanupSemantic(diff); dmp.diff_cleanupSemantic(diff);
for (const v of diff) { if (this.currentDeleted) {
const x1 = v[0]; this.appendDeletedNotice();
const x2 = v[1];
if (x1 == DIFF_DELETE) {
result += "<span class='history-deleted'>" + escapeStringToHTML(x2) + "</span>";
} else if (x1 == DIFF_EQUAL) {
result += "<span class='history-normal'>" + escapeStringToHTML(x2) + "</span>";
} else if (x1 == DIFF_INSERT) {
result += "<span class='history-added'>" + escapeStringToHTML(x2) + "</span>";
} }
this.appendTextDiff(diff);
rendered = true;
} }
result = result.replace(/\n/g, "<br>");
} else if (isImage(this.file)) { } else if (isImage(this.file)) {
const src = this.generateBlobURL("base", w1data); const src = this.generateBlobURL("base", w1data);
const overlay = this.generateBlobURL( const overlay = this.generateBlobURL(
"overlay", "overlay",
readDocument(w2) as Uint8Array<ArrayBuffer> readDocument(w2) as Uint8Array<ArrayBuffer>
); );
result = `<div class='ls-imgdiff-wrap'> this.prepareContentView(false);
<div class='overlay'> if (this.currentDeleted) {
<img class='img-base' src="${src}"> this.appendDeletedNotice(false);
<img class='img-overlay' src='${overlay}'> }
</div> this.appendImageDiff(src, overlay);
</div>`; rendered = true;
this.contentView.removeClass("op-pre");
} }
} }
} }
} }
if (result == undefined) { if (!rendered) {
if (typeof w1data != "string") { if (typeof w1data != "string") {
if (isImage(this.file)) { if (isImage(this.file)) {
const src = this.generateBlobURL("base", w1data); const src = this.generateBlobURL("base", w1data);
result = `<div class='ls-imgdiff-wrap'> this.prepareContentView(false);
<div class='overlay'> if (this.currentDeleted) {
<img class='img-base' src="${src}"> this.appendDeletedNotice(false);
</div> }
</div>`; this.appendImageDiff(src);
this.contentView.removeClass("op-pre"); } else {
if (this.currentDeleted) {
this.appendDeletedNotice();
}
this.contentView.appendText("Binary file");
} }
} else { } else {
result = escapeStringToHTML(w1data); if (this.currentDeleted) {
this.appendDeletedNotice();
}
this.contentView.appendText(w1data);
} }
} }
if (result == undefined) result = typeof w1data == "string" ? escapeStringToHTML(w1data) : "Binary file";
this.contentView.innerHTML =
(this.currentDeleted ? "(At this revision, the file has been deleted)\n" : "") + result;
} }
// Reset diff navigation after content changes // Reset diff navigation after content changes
this.resetDiffNavigation(); this.resetDiffNavigation();
@@ -245,8 +286,7 @@ export class DocumentHistoryModal extends Modal {
if (direction === "next") { if (direction === "next") {
this.currentDiffIndex = (this.currentDiffIndex + 1) % diffElements.length; this.currentDiffIndex = (this.currentDiffIndex + 1) % diffElements.length;
} else { } else {
this.currentDiffIndex = this.currentDiffIndex = this.currentDiffIndex <= 0 ? diffElements.length - 1 : this.currentDiffIndex - 1;
this.currentDiffIndex <= 0 ? diffElements.length - 1 : this.currentDiffIndex - 1;
} }
const target = diffElements[this.currentDiffIndex]; const target = diffElements[this.currentDiffIndex];

View File

@@ -1,7 +1,6 @@
import { App, Modal } from "../../../deps.ts"; import { App, Modal } from "../../../deps.ts";
import { DIFF_DELETE, DIFF_EQUAL, DIFF_INSERT } from "diff-match-patch"; import { DIFF_DELETE, DIFF_EQUAL, DIFF_INSERT } from "diff-match-patch";
import { CANCELLED, LEAVE_TO_SUBSEQUENT, type diff_result } from "../../../lib/src/common/types.ts"; import { CANCELLED, LEAVE_TO_SUBSEQUENT, type diff_result } from "../../../lib/src/common/types.ts";
import { escapeStringToHTML } from "../../../lib/src/string_and_binary/convert.ts";
import { delay } from "../../../lib/src/common/utils.ts"; import { delay } from "../../../lib/src/common/utils.ts";
import { eventHub } from "../../../common/events.ts"; import { eventHub } from "../../../common/events.ts";
import { globalSlipBoard } from "../../../lib/src/bureau/bureau.ts"; import { globalSlipBoard } from "../../../lib/src/bureau/bureau.ts";
@@ -44,6 +43,25 @@ export class ConflictResolveModal extends Modal {
// sendValue("close-resolve-conflict:" + this.filename, false); // sendValue("close-resolve-conflict:" + this.filename, false);
} }
appendDiffFragment(container: HTMLDivElement, text: string, cls: string) {
const lines = text.split("\n");
lines.forEach((line, index) => {
const span = container.createSpan({ cls });
span.textContent = line;
if (index < lines.length - 1) {
container.createSpan({ cls: "ls-mark-cr" });
container.createEl("br");
}
});
}
appendVersionInfo(container: HTMLDivElement, cls: string, name: string, date: string) {
const line = container.createSpan({ cls });
line.createSpan({ text: name, cls: "conflict-dev-name" });
line.appendText(`: ${date}`);
container.createEl("br");
}
override onOpen() { override onOpen() {
const { contentEl } = this; const { contentEl } = this;
// Send cancel signal for the previous merge dialogue // Send cancel signal for the previous merge dialogue
@@ -64,25 +82,21 @@ export class ConflictResolveModal extends Modal {
const div = contentEl.createDiv(""); const div = contentEl.createDiv("");
div.addClass("op-scrollable"); div.addClass("op-scrollable");
div.addClass("ls-dialog"); div.addClass("ls-dialog");
let diff = ""; let diffLength = 0;
for (const v of this.result.diff) { for (const v of this.result.diff) {
const x1 = v[0]; const x1 = v[0];
const x2 = v[1]; const x2 = v[1];
diffLength += x2.length;
if (diffLength > 100 * 1024) {
continue;
}
if (x1 == DIFF_DELETE) { if (x1 == DIFF_DELETE) {
diff += this.appendDiffFragment(div, x2, "deleted");
"<span class='deleted'>" + div.createEl("span", { text: x2, cls: "deleted normal conflict-dev-name" });
escapeStringToHTML(x2).replace(/\n/g, "<span class='ls-mark-cr'></span>\n") +
"</span>";
} else if (x1 == DIFF_EQUAL) { } else if (x1 == DIFF_EQUAL) {
diff += this.appendDiffFragment(div, x2, "normal");
"<span class='normal'>" +
escapeStringToHTML(x2).replace(/\n/g, "<span class='ls-mark-cr'></span>\n") +
"</span>";
} else if (x1 == DIFF_INSERT) { } else if (x1 == DIFF_INSERT) {
diff += this.appendDiffFragment(div, x2, "added");
"<span class='added'>" +
escapeStringToHTML(x2).replace(/\n/g, "<span class='ls-mark-cr'></span>\n") +
"</span>";
} }
} }
@@ -92,8 +106,8 @@ export class ConflictResolveModal extends Modal {
new Date(this.result.left.mtime).toLocaleString() + (this.result.left.deleted ? " (Deleted)" : ""); new Date(this.result.left.mtime).toLocaleString() + (this.result.left.deleted ? " (Deleted)" : "");
const date2 = const date2 =
new Date(this.result.right.mtime).toLocaleString() + (this.result.right.deleted ? " (Deleted)" : ""); new Date(this.result.right.mtime).toLocaleString() + (this.result.right.deleted ? " (Deleted)" : "");
div2.innerHTML = `<span class='deleted'><span class='conflict-dev-name'>${this.localName}</span>: ${date1}</span><br> this.appendVersionInfo(div2, "deleted", this.localName, date1);
<span class='added'><span class='conflict-dev-name'>${this.remoteName}</span>: ${date2}</span><br>`; this.appendVersionInfo(div2, "added", this.remoteName, date2);
contentEl.createEl("button", { text: `Use ${this.localName}` }, (e) => contentEl.createEl("button", { text: `Use ${this.localName}` }, (e) =>
e.addEventListener("click", () => this.sendResponse(this.result.right.rev)) e.addEventListener("click", () => this.sendResponse(this.result.right.rev))
).style.marginRight = "4px"; ).style.marginRight = "4px";
@@ -108,11 +122,9 @@ export class ConflictResolveModal extends Modal {
contentEl.createEl("button", { text: !this.pluginPickMode ? "Not now" : "Cancel" }, (e) => contentEl.createEl("button", { text: !this.pluginPickMode ? "Not now" : "Cancel" }, (e) =>
e.addEventListener("click", () => this.sendResponse(CANCELLED)) e.addEventListener("click", () => this.sendResponse(CANCELLED))
).style.marginRight = "4px"; ).style.marginRight = "4px";
diff = diff.replace(/\n/g, "<br>"); if (diffLength > 100 * 1024) {
if (diff.length > 100 * 1024) { div.empty();
div.innerText = "(Too large diff to display)"; div.innerText = "(Too large diff to display)";
} else {
div.innerHTML = diff;
} }
} }

View File

@@ -43,10 +43,13 @@ export function paneChangeLog(this: ObsidianLiveSyncSettingTab, paneEl: HTMLElem
// tmpDiv.addClass("sls-header-button"); // tmpDiv.addClass("sls-header-button");
tmpDiv.addClass("op-warn-info"); tmpDiv.addClass("op-warn-info");
tmpDiv.innerHTML = `<p>${$msg("obsidianLiveSyncSettingTab.msgNewVersionNote")}</p><button>${$msg("obsidianLiveSyncSettingTab.optionOkReadEverything")}</button>`; tmpDiv.createEl("p", { text: $msg("obsidianLiveSyncSettingTab.msgNewVersionNote") });
const readEverythingButton = tmpDiv.createEl("button", {
text: $msg("obsidianLiveSyncSettingTab.optionOkReadEverything"),
});
if (lastVersion > (this.editingSettings?.lastReadUpdates || 0)) { if (lastVersion > (this.editingSettings?.lastReadUpdates || 0)) {
const informationButtonDiv = informationDivEl.appendChild(tmpDiv); const informationButtonDiv = informationDivEl.appendChild(tmpDiv);
informationButtonDiv.querySelector("button")?.addEventListener("click", () => { readEverythingButton.addEventListener("click", () => {
fireAndForget(async () => { fireAndForget(async () => {
this.editingSettings.lastReadUpdates = lastVersion; this.editingSettings.lastReadUpdates = lastVersion;
await this.saveAllDirtySettings(); await this.saveAllDirtySettings();

View File

@@ -121,13 +121,13 @@ export function paneSetup(
const repo = "vrtmrz/obsidian-livesync"; const repo = "vrtmrz/obsidian-livesync";
const topPath = $msg("obsidianLiveSyncSettingTab.linkTroubleshooting"); const topPath = $msg("obsidianLiveSyncSettingTab.linkTroubleshooting");
const rawRepoURI = `https://raw.githubusercontent.com/${repo}/main`; const rawRepoURI = `https://raw.githubusercontent.com/${repo}/main`;
this.createEl( this.createEl(paneEl, "div", "", (el) => {
paneEl, el.createEl("a", { text: $msg("obsidianLiveSyncSettingTab.linkOpenInBrowser") }, (anchor) => {
"div", anchor.href = `https://github.com/${repo}/blob/main${topPath}`;
"", anchor.target = "_blank";
(el) => anchor.rel = "noopener";
(el.innerHTML = `<a href='https://github.com/${repo}/blob/main${topPath}' target="_blank">${$msg("obsidianLiveSyncSettingTab.linkOpenInBrowser")}</a>`) });
); });
const troubleShootEl = this.createEl(paneEl, "div", { const troubleShootEl = this.createEl(paneEl, "div", {
text: "", text: "",
cls: "sls-troubleshoot-preview", cls: "sls-troubleshoot-preview",

View File

@@ -13,7 +13,7 @@ export const checkConfig = async (
Logger($msg("obsidianLiveSyncSettingTab.logCheckingDbConfig"), LOG_LEVEL_INFO); Logger($msg("obsidianLiveSyncSettingTab.logCheckingDbConfig"), LOG_LEVEL_INFO);
let isSuccessful = true; let isSuccessful = true;
const emptyDiv = createDiv(); const emptyDiv = createDiv();
emptyDiv.innerHTML = "<span></span>"; emptyDiv.createSpan();
checkResultDiv?.replaceChildren(...[emptyDiv]); checkResultDiv?.replaceChildren(...[emptyDiv]);
const addResult = (msg: string, classes?: string[]) => { const addResult = (msg: string, classes?: string[]) => {
const tmpDiv = createDiv(); const tmpDiv = createDiv();
@@ -21,7 +21,7 @@ export const checkConfig = async (
if (classes) { if (classes) {
tmpDiv.addClasses(classes); tmpDiv.addClasses(classes);
} }
tmpDiv.innerHTML = `${msg}`; tmpDiv.textContent = msg;
checkResultDiv?.appendChild(tmpDiv); checkResultDiv?.appendChild(tmpDiv);
}; };
try { try {
@@ -47,9 +47,10 @@ export const checkConfig = async (
if (!checkResultDiv) return; if (!checkResultDiv) return;
const tmpDiv = createDiv(); const tmpDiv = createDiv();
tmpDiv.addClass("ob-btn-config-fix"); tmpDiv.addClass("ob-btn-config-fix");
tmpDiv.innerHTML = `<label>${title}</label><button>${$msg("obsidianLiveSyncSettingTab.btnFix")}</button>`; tmpDiv.createEl("label", { text: title });
const fixButton = tmpDiv.createEl("button", { text: $msg("obsidianLiveSyncSettingTab.btnFix") });
const x = checkResultDiv.appendChild(tmpDiv); const x = checkResultDiv.appendChild(tmpDiv);
x.querySelector("button")?.addEventListener("click", () => { fixButton.addEventListener("click", () => {
fireAndForget(async () => { fireAndForget(async () => {
Logger($msg("obsidianLiveSyncSettingTab.logCouchDbConfigSet", { title, key, value })); Logger($msg("obsidianLiveSyncSettingTab.logCouchDbConfigSet", { title, key, value }));
const res = await requestToCouchDBWithCredentials( const res = await requestToCouchDBWithCredentials(

View File

@@ -4,10 +4,10 @@
import Decision from "@/lib/src/UI/components/Decision.svelte"; import Decision from "@/lib/src/UI/components/Decision.svelte";
import Instruction from "@/lib/src/UI/components/Instruction.svelte"; import Instruction from "@/lib/src/UI/components/Instruction.svelte";
import UserDecisions from "@/lib/src/UI/components/UserDecisions.svelte"; import UserDecisions from "@/lib/src/UI/components/UserDecisions.svelte";
const TYPE_CLOSE = "close"; const TYPE_CLOSE = "close";
type ResultType = typeof TYPE_CLOSE; type ResultType = typeof TYPE_CLOSE;
type Props = { type Props = {
setResult: (result: ResultType) => void; setResult: (_result: ResultType) => void;
}; };
const { setResult }: Props = $props(); const { setResult }: Props = $props();
</script> </script>

View File

@@ -61,10 +61,12 @@ export class ModuleLiveSyncMain extends AbstractModule {
eventHub.onEvent(EVENT_SETTING_SAVED, (settings: ObsidianLiveSyncSettings) => { eventHub.onEvent(EVENT_SETTING_SAVED, (settings: ObsidianLiveSyncSettings) => {
fireAndForget(async () => { fireAndForget(async () => {
try { try {
await this.core.services.control.applySettings(); const lang = this.core.services.setting.currentSettings()?.displayLanguage;
const lang = this.core.services.setting.currentSettings()?.displayLanguage ?? undefined;
if (lang !== undefined) { if (lang !== undefined) {
setLang(this.core.services.setting.currentSettings()?.displayLanguage); setLang(lang);
}
if (this.core.services.database.isDatabaseReady()) {
await this.core.services.control.applySettings();
} }
eventHub.emitEvent(EVENT_REQUEST_RELOAD_SETTING_TAB); eventHub.emitEvent(EVENT_REQUEST_RELOAD_SETTING_TAB);
} catch (e) { } catch (e) {

View File

@@ -3,11 +3,22 @@ import { createServiceFeature } from "@lib/interfaces/ServiceModule";
import { SUPPORTED_I18N_LANGS, type I18N_LANGS } from "@lib/common/rosetta"; import { SUPPORTED_I18N_LANGS, type I18N_LANGS } from "@lib/common/rosetta";
import { $msg, setLang } from "@lib/common/i18n"; import { $msg, setLang } from "@lib/common/i18n";
function tryGetLanguage() {
try {
// Note: 1.8.7+ is required. but it is 18, Feb., 2025. we want to fallback on earlier versions, so we catch the error here.
// eslint-disable-next-line obsidianmd/no-unsupported-api
return getLanguage();
} catch (e) {
console.error("Failed to get Obsidian language, defaulting to 'def'", e);
return "en";
}
}
export const enableI18nFeature = createServiceFeature(async ({ services: { setting, API } }) => { export const enableI18nFeature = createServiceFeature(async ({ services: { setting, API } }) => {
let isChanged = false; let isChanged = false;
const settings = setting.currentSettings(); const settings = setting.currentSettings();
if (settings.displayLanguage == "") { if (settings.displayLanguage == "") {
const obsidianLanguage = getLanguage(); const obsidianLanguage = tryGetLanguage();
if ( if (
SUPPORTED_I18N_LANGS.indexOf(obsidianLanguage) !== -1 && // Check if the language is supported SUPPORTED_I18N_LANGS.indexOf(obsidianLanguage) !== -1 && // Check if the language is supported
obsidianLanguage != settings.displayLanguage // Check if the language is different from the current setting obsidianLanguage != settings.displayLanguage // Check if the language is different from the current setting

View File

@@ -5,6 +5,7 @@ import { type UseP2PReplicatorResult } from "@/lib/src/replication/trystero/UseP
import { P2PLogCollector } from "@/lib/src/replication/trystero/P2PLogCollector"; import { P2PLogCollector } from "@/lib/src/replication/trystero/P2PLogCollector";
import { P2PReplicatorPaneView, VIEW_TYPE_P2P } from "@/features/P2PSync/P2PReplicator/P2PReplicatorPaneView"; import { P2PReplicatorPaneView, VIEW_TYPE_P2P } from "@/features/P2PSync/P2PReplicator/P2PReplicatorPaneView";
import type { LiveSyncCore } from "@/main"; import type { LiveSyncCore } from "@/main";
import type { WorkspaceLeaf } from "@/deps";
/** /**
* ServiceFeature: P2P Replicator lifecycle management. * ServiceFeature: P2P Replicator lifecycle management.
@@ -43,7 +44,7 @@ export function useP2PReplicatorUI(
// Register view, commands and ribbon if a view factory is provided // Register view, commands and ribbon if a view factory is provided
const viewType = VIEW_TYPE_P2P; const viewType = VIEW_TYPE_P2P;
const factory = (leaf: any) => { const factory = (leaf: WorkspaceLeaf) => {
return new P2PReplicatorPaneView(leaf, core, { return new P2PReplicatorPaneView(leaf, core, {
replicator: getReplicator(), replicator: getReplicator(),
p2pLogCollector, p2pLogCollector,

View File

@@ -38,10 +38,20 @@ export class ObsidianVaultAdapter implements IVaultAdapter<TFile> {
} }
async delete(file: TFile | TFolder, force = false): Promise<void> { async delete(file: TFile | TFolder, force = false): Promise<void> {
// if ("trashFile" in this.app.fileManager) {
// // eslint-disable-next-line obsidianmd/no-unsupported-api
// return await this.app.fileManager.trashFile(file);
// }
//TODO: need fix
return await this.app.vault.delete(file, force); return await this.app.vault.delete(file, force);
} }
async trash(file: TFile | TFolder, force = false): Promise<void> { async trash(file: TFile | TFolder, force = false): Promise<void> {
// if ("trashFile" in this.app.fileManager) {
// // eslint-disable-next-line obsidianmd/no-unsupported-api
// return await this.app.fileManager.trashFile(file);
// }
//TODO: need fix
return await this.app.vault.trash(file, force); return await this.app.vault.trash(file, force);
} }

View File

@@ -3,6 +3,57 @@ Since 19th July, 2025 (beta1 in 0.25.0-beta1, 13th July, 2025)
The head note of 0.25 is now in [updates_old.md](https://github.com/vrtmrz/obsidian-livesync/blob/main/updates_old.md). Because 0.25 got a lot of updates, thankfully, compatibility is kept and we do not need breaking changes! In other words, when get enough stabled. The next version will be v1.0.0. Even though it my hope. The head note of 0.25 is now in [updates_old.md](https://github.com/vrtmrz/obsidian-livesync/blob/main/updates_old.md). Because 0.25 got a lot of updates, thankfully, compatibility is kept and we do not need breaking changes! In other words, when get enough stabled. The next version will be v1.0.0. Even though it my hope.
## 0.25.62
14th May, 2026
### Fixed
- Fixed an issue where a connection could not be established when attempting to connect to a brand-new remote database without going through the set-up wizard or configuration checking (#660).
## 0.25.61
13th May, 2026
Reviews have started on the Obsidian Community, haven't they? It was quite a struggle, what with having to fix the outdated ESLint.
I am a bit nervous, but it is far better than just plodding along aimlessly, so let us get on with it. If you spot any issues, please let me know straight away.
From now on, I am avoiding committing directly to the main branch. This is because you lots have all been sending so much PRs. I wanted to keep things harmonious.
That said, I am still not used to rebasing, so there are some parts where the commit history is a right mess. I will work on improving that.
### Improved
- P2P synchronisation has been made more robust
Now the foundation for P2P synchronisation has been rewritten, and the unit tests have been added. The foundation has been separated into the transport layer, signalling-and-connection layer, and, an RPC layers. And each layer has been unit-tested. As the result, the P2P synchronisation now uses the robust shim that uses RPC-ed PouchDB synchronisation in contrast to previous implementation.
This P2P synchronisation is not compatible with previous versions in terms of connectivity. All devices must be updated.
### Fixed
- No longer baffling errors occur when setting-update is triggered during the early stage of initialisation.
- Network error notice pop-ups are now suppressed when 'NetworkWarningStyle' is set to 'Hidden'. (Thank you so much @SeleiXi!)
### New features
- Diff navigation buttons have been added to the diff view, making it easier to move between differences. (Thank you so much @SeleiXi! #871)
### Translations
- Chinese (Simplified) translations for settings and the Setup Wizard have been added. (Thank you so much @zombiek731!)
- Common UI controls and signal words are now localised into Chinese (Simplified). (Thank you so much @zombiek731!)
- i18n runtime behaviour and locale coverage have been improved. (Thank you so much @52sanmao!)
### CLI
#### New features
- Daemon synchronisation is now supported. (Thank you so much @andrewleech! #843)
- `HeadlessConfirm` has been implemented with sensible defaults, enabling unattended operation in headless environments. (Thank you so much @andrewleech!)
- The CLI onboarding experience has been improved. (Thank you so much @OriBoharon! #872)
#### Fixed
- Sub-millisecond CLI mtimes are now truncated to prevent mobile crash. (Thank you so much @brian-spackman! #893)
## 0.25.60 ## 0.25.60
29th April, 2026 29th April, 2026

17
version-bump.mjs Normal file
View File

@@ -0,0 +1,17 @@
import { readFileSync, writeFileSync } from "fs";
const targetVersion = process.env.npm_package_version;
// read minAppVersion from manifest.json and bump version to target version
const manifest = JSON.parse(readFileSync("manifest.json", "utf8"));
const { minAppVersion } = manifest;
manifest.version = targetVersion;
writeFileSync("manifest.json", JSON.stringify(manifest, null, "\t"));
// update versions.json with target version and minAppVersion from manifest.json
// but only if the target version is not already in versions.json
const versions = JSON.parse(readFileSync('versions.json', 'utf8'));
if (!Object.values(versions).includes(minAppVersion)) {
versions[targetVersion] = minAppVersion;
writeFileSync('versions.json', JSON.stringify(versions, null, '\t'));
}

View File

@@ -1,4 +1,6 @@
{ {
"0.25.61": "1.7.2",
"0.25.60": "1.7.2",
"1.0.1": "0.9.12", "1.0.1": "0.9.12",
"1.0.0": "0.9.7" "1.0.0": "0.9.7"
} }

View File

@@ -2,7 +2,8 @@ import { defineConfig, mergeConfig } from "vitest/config";
import { playwright } from "@vitest/browser-playwright"; import { playwright } from "@vitest/browser-playwright";
import viteConfig from "./vitest.config.common"; import viteConfig from "./vitest.config.common";
import path from "path"; import path from "path";
import dotenv from "dotenv"; import { existsSync, readFileSync } from "node:fs";
import { parseEnv } from "node:util";
import { grantClipboardPermissions, writeHandoffFile, readHandoffFile } from "./test/lib/commands"; import { grantClipboardPermissions, writeHandoffFile, readHandoffFile } from "./test/lib/commands";
// P2P test environment variables // P2P test environment variables
@@ -22,8 +23,9 @@ import { grantClipboardPermissions, writeHandoffFile, readHandoffFile } from "./
// General test options (also read from env): // General test options (also read from env):
// ENABLE_DEBUGGER - Set to "true" to attach a debugger and pause before tests // ENABLE_DEBUGGER - Set to "true" to attach a debugger and pause before tests
// ENABLE_UI - Set to "true" to open a visible browser window during tests // ENABLE_UI - Set to "true" to open a visible browser window during tests
const defEnv = dotenv.config({ path: ".env" }).parsed; const loadEnvFile = (path: string) => (existsSync(path) ? parseEnv(readFileSync(path, "utf-8")) : undefined);
const testEnv = dotenv.config({ path: ".test.env" }).parsed; const defEnv = loadEnvFile(".env");
const testEnv = loadEnvFile(".test.env");
// Merge: dotenv files < process.env (so shell-injected vars like P2P_TEST_* take precedence) // Merge: dotenv files < process.env (so shell-injected vars like P2P_TEST_* take precedence)
const p2pEnv: Record<string, string> = {}; const p2pEnv: Record<string, string> = {};
if (process.env.P2P_TEST_ROOM_ID) p2pEnv.P2P_TEST_ROOM_ID = process.env.P2P_TEST_ROOM_ID; if (process.env.P2P_TEST_ROOM_ID) p2pEnv.P2P_TEST_ROOM_ID = process.env.P2P_TEST_ROOM_ID;

30
vitest.config.rpc-unit.ts Normal file
View File

@@ -0,0 +1,30 @@
import { defineConfig, mergeConfig } from "vitest/config";
import viteConfig from "./vitest.config.common";
export default mergeConfig(
viteConfig,
defineConfig({
resolve: {
alias: {
obsidian: "",
},
},
test: {
name: "rpc-unit-tests",
include: ["src/lib/src/rpc/**/*.unit.spec.ts"],
exclude: ["test/**"],
coverage: {
include: ["src/lib/src/rpc/**/*.ts"],
exclude: ["**/*.unit.spec.ts", "**/index.ts"],
provider: "v8",
reporter: ["text", "json", "html", ["text", { file: "coverage-rpc-text.txt" }]],
thresholds: {
lines: 90,
functions: 90,
branches: 75,
statements: 90,
},
},
},
})
);

View File

@@ -2,10 +2,13 @@ import { defineConfig, mergeConfig } from "vitest/config";
import { playwright } from "@vitest/browser-playwright"; import { playwright } from "@vitest/browser-playwright";
import viteConfig from "./vitest.config.common"; import viteConfig from "./vitest.config.common";
import path from "path"; import path from "path";
import dotenv from "dotenv"; import { existsSync, readFileSync } from "node:fs";
import { parseEnv } from "node:util";
import { grantClipboardPermissions, openWebPeer, closeWebPeer, acceptWebPeer } from "./test/lib/commands"; import { grantClipboardPermissions, openWebPeer, closeWebPeer, acceptWebPeer } from "./test/lib/commands";
const defEnv = dotenv.config({ path: ".env" }).parsed;
const testEnv = dotenv.config({ path: ".test.env" }).parsed; const loadEnvFile = (path: string) => (existsSync(path) ? parseEnv(readFileSync(path, "utf-8")) : undefined);
const defEnv = loadEnvFile(".env");
const testEnv = loadEnvFile(".test.env");
const env = Object.assign({}, defEnv, testEnv); const env = Object.assign({}, defEnv, testEnv);
const debuggerEnabled = env?.ENABLE_DEBUGGER === "true"; const debuggerEnabled = env?.ENABLE_DEBUGGER === "true";
const enableUI = env?.ENABLE_UI === "true"; const enableUI = env?.ENABLE_UI === "true";