diff --git a/.github/ISSUE_TEMPLATE/issue-report.md b/.github/ISSUE_TEMPLATE/issue-report.md index 2ca8a66..d12287f 100644 --- a/.github/ISSUE_TEMPLATE/issue-report.md +++ b/.github/ISSUE_TEMPLATE/issue-report.md @@ -2,7 +2,7 @@ name: Issue report about: Create a report to help us improve title: '' -labels: 'bug' +labels: 'uncategorised' assignees: '' --- diff --git a/.github/workflows/cli-deno-tests.yml b/.github/workflows/cli-deno-tests.yml index b2b4d13..9230910 100644 --- a/.github/workflows/cli-deno-tests.yml +++ b/.github/workflows/cli-deno-tests.yml @@ -17,9 +17,48 @@ permissions: contents: read jobs: + prepare: + runs-on: ubuntu-latest + outputs: + task_matrix: ${{ steps.select.outputs.task_matrix }} + steps: + - name: Select task matrix + id: select + shell: bash + run: | + set -euo pipefail + SELECTED_TASK="${{ github.event_name == 'workflow_dispatch' && inputs.test_task || 'test' }}" + echo "[INFO] Selected task set: $SELECTED_TASK" + + case "$SELECTED_TASK" in + test) + TASK_MATRIX='["test:setup-put-cat","test:mirror","test:push-pull","test:sync-two-local","test:sync-locked-remote","test:p2p-host","test:p2p-peers","test:p2p-sync","test:p2p-three-nodes","test:p2p-upload-download","test:e2e-couchdb","test:e2e-matrix"]' + ;; + test:local) + TASK_MATRIX='["test:setup-put-cat","test:mirror"]' + ;; + test:e2e-matrix) + TASK_MATRIX='["test:e2e-matrix"]' + ;; + test:p2p-sync) + TASK_MATRIX='["test:p2p-sync"]' + ;; + *) + echo "[ERROR] Unknown task set: $SELECTED_TASK" >&2 + exit 1 + ;; + esac + + echo "task_matrix=$TASK_MATRIX" >> "$GITHUB_OUTPUT" + test: + needs: prepare runs-on: ubuntu-latest timeout-minutes: 60 + strategy: + fail-fast: false + matrix: + task: ${{ fromJson(needs.prepare.outputs.task_matrix) }} steps: - name: Checkout uses: actions/checkout@v4 @@ -64,7 +103,7 @@ jobs: LIVESYNC_DOCKER_MODE: native LIVESYNC_CLI_RETRY: 3 run: | - TASK="${{ github.event_name == 'workflow_dispatch' && inputs.test_task || 'test' }}" + TASK="${{ matrix.task }}" echo "[INFO] Running Deno task: $TASK" deno task "$TASK" diff --git a/.prettierrc.mjs b/.prettierrc.mjs index d0c0d3e..33b5690 100644 --- a/.prettierrc.mjs +++ b/.prettierrc.mjs @@ -13,7 +13,7 @@ const prettierConfig = { tabWidth: 4, printWidth: 120, semi: true, - endOfLine: "cr", + endOfLine: "lf", ...localPrettierConfig, }; diff --git a/devs.md b/devs.md index d44a68b..d9d9ef1 100644 --- a/devs.md +++ b/devs.md @@ -63,6 +63,9 @@ npm test # Run vitest tests (requires Docker services) ### Environment Setup +- Clone with submodules: `git clone --recurse-submodules ` +- If you already cloned without them, run: `git submodule update --init --recursive` +- The shared common library is provided by the `src/lib` submodule, and builds will fail if it is missing - Create `.env` file with `PATHS_TEST_INSTALL` pointing to test vault plug-in directories (`:` separated on Unix, `;` on Windows) - Development builds auto-copy to these paths on build diff --git a/eslint.config.mjs b/eslint.config.mjs index b2fbf76..41cb90b 100644 --- a/eslint.config.mjs +++ b/eslint.config.mjs @@ -38,6 +38,7 @@ export default [ "modules/octagonal-wheels/rollup.config.js", "modules/octagonal-wheels/dist/**/*", "src/lib/test", + "src/lib/_tools", "src/lib/src/cli", "**/main.js", "src/apps/**/*", diff --git a/package-lock.json b/package-lock.json index 0f1f682..4c2fba3 100644 --- a/package-lock.json +++ b/package-lock.json @@ -16,11 +16,13 @@ "@smithy/protocol-http": "^5.3.9", "@smithy/querystring-builder": "^4.2.9", "@trystero-p2p/nostr": "^0.23.0", + "chokidar": "^4.0.0", "commander": "^14.0.3", "diff-match-patch": "^1.0.5", "fflate": "^0.8.2", "idb": "^8.0.3", "markdown-it": "^14.1.1", + "micromatch": "^4.0.0", "minimatch": "^10.2.2", "octagonal-wheels": "^0.1.45", "pouchdb-adapter-leveldb": "^9.0.0", @@ -38,6 +40,7 @@ "@types/deno": "^2.5.0", "@types/diff-match-patch": "^1.0.36", "@types/markdown-it": "^14.1.2", + "@types/micromatch": "^4.0.10", "@types/node": "^24.10.13", "@types/pouchdb": "^6.4.2", "@types/pouchdb-adapter-http": "^6.1.6", @@ -984,7 +987,6 @@ "integrity": "sha512-CGOfOJqWjg2qW/Mb6zNsDm+u5vFQ8DxXfbM09z69p5Z6+mE1ikP2jUXw+j42Pf1XTYED2Rni5f95npYeuwMDQA==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "@babel/code-frame": "^7.29.0", "@babel/generator": "^7.29.0", @@ -2378,7 +2380,8 @@ "resolved": "https://registry.npmjs.org/@marijn/find-cluster-break/-/find-cluster-break-1.0.2.tgz", "integrity": "sha512-l0h88YhZFyKdXIFNfSWpyjStDjGHwZ/U7iobcK1cQQD8sejsONdQtTVU+1wVN1PBw40PiiHB1vA5S7VTfQiP9g==", "dev": true, - "license": "MIT" + "license": "MIT", + "peer": true }, "node_modules/@minhducsun2002/leb128": { "version": "1.0.0", @@ -4224,7 +4227,6 @@ "integrity": "sha512-ou/d51QSdTyN26D7h6dSpusAKaZkAiGM55/AKYi+9AGZw7q85hElbjK3kEyzXHhLSnRISHOYzVge6x0jRZ7DXA==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "@sveltejs/vite-plugin-svelte-inspector": "^5.0.0", "deepmerge": "^4.3.1", @@ -4298,6 +4300,13 @@ "@babel/types": "^7.0.0" } }, + "node_modules/@types/braces": { + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/@types/braces/-/braces-3.0.5.tgz", + "integrity": "sha512-SQFof9H+LXeWNz8wDe7oN5zu7ket0qwMu5vZubW4GCJ8Kkeh6nBWUz87+KTz/G3Kqsrp0j/W253XJb3KMEeg3w==", + "dev": true, + "license": "MIT" + }, "node_modules/@types/chai": { "version": "5.2.3", "resolved": "https://registry.npmjs.org/@types/chai/-/chai-5.2.3.tgz", @@ -4417,6 +4426,16 @@ "dev": true, "license": "MIT" }, + "node_modules/@types/micromatch": { + "version": "4.0.10", + "resolved": "https://registry.npmjs.org/@types/micromatch/-/micromatch-4.0.10.tgz", + "integrity": "sha512-5jOhFDElqr4DKTrTEbnW8DZ4Hz5LRUEmyrGpCMrD/NphYv3nUnaF08xmSLx1rGGnyEs/kFnhiw6dCgcDqMr5PQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/braces": "*" + } + }, "node_modules/@types/minimatch": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/@types/minimatch/-/minimatch-5.1.2.tgz", @@ -4738,7 +4757,6 @@ "integrity": "sha512-klQbnPAAiGYFyI02+znpBRLyjL4/BrBd0nyWkdC0s/6xFLkXYQ8OoRrSkqacS1ddVxf/LDyODIKbQ5TgKAf/Fg==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "@typescript-eslint/scope-manager": "8.56.1", "@typescript-eslint/types": "8.56.1", @@ -4943,7 +4961,6 @@ "integrity": "sha512-gjjrFC4+kPVK/fN9URDJWrssU5Gqh8Az8pKG/NSfQ2V+ky8b/y1BgBg0Ug13+hOGp5pzInonmGRPn7vOgSLgzA==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "@blazediff/core": "1.9.1", "@vitest/mocker": "4.1.1", @@ -4967,7 +4984,6 @@ "integrity": "sha512-dtVSBZZha2k/7P7EAXXrEAoxuIKl8Yv9f2Dk4GN/DGfmhf4DQvkvu+57okR2wq/gan1xppKjL/aBxK/kbYrbGw==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "@vitest/browser": "4.1.1", "@vitest/mocker": "4.1.1", @@ -5409,7 +5425,6 @@ "integrity": "sha512-UVJyE9MttOsBQIDKw1skb9nAwQuR5wuGD3+82K6JgJlm/Y+KI92oNsMNGZCYdDsVtRHSak0pcV5Dno5+4jh9sw==", "dev": true, "license": "MIT", - "peer": true, "bin": { "acorn": "bin/acorn" }, @@ -6123,7 +6138,6 @@ "version": "3.0.3", "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", - "dev": true, "license": "MIT", "dependencies": { "fill-range": "^7.1.1" @@ -6152,7 +6166,6 @@ } ], "license": "MIT", - "peer": true, "dependencies": { "baseline-browser-mapping": "^2.9.0", "caniuse-lite": "^1.0.30001759", @@ -6385,7 +6398,6 @@ "version": "4.0.3", "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-4.0.3.tgz", "integrity": "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA==", - "dev": true, "license": "MIT", "dependencies": { "readdirp": "^4.0.1" @@ -6648,7 +6660,8 @@ "resolved": "https://registry.npmjs.org/crelt/-/crelt-1.0.6.tgz", "integrity": "sha512-VQ2MBenTq1fWZUH9DJNGti7kKv6EeAuYr3cLwxUWhIu1baTaXh4Ib5W2CqHVqib4/MqbYGJqiL3Zb8GJZr3l4g==", "dev": true, - "license": "MIT" + "license": "MIT", + "peer": true }, "node_modules/cross-spawn": { "version": "7.0.6", @@ -7441,7 +7454,6 @@ "dev": true, "hasInstallScript": true, "license": "MIT", - "peer": true, "bin": { "esbuild": "bin/esbuild" }, @@ -7555,7 +7567,6 @@ "integrity": "sha512-XoMjdBOwe/esVgEvLmNsD3IRHkm7fbKIUGvrleloJXUZgDHig2IPWNniv+GwjyJXzuNqVjlr5+4yVUZjycJwfQ==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "@eslint-community/eslint-utils": "^4.8.0", "@eslint-community/regexpp": "^4.12.1", @@ -8255,7 +8266,6 @@ "version": "7.1.1", "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", - "dev": true, "license": "MIT", "dependencies": { "to-regex-range": "^5.0.1" @@ -9358,7 +9368,6 @@ "version": "7.0.0", "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "dev": true, "license": "MIT", "engines": { "node": ">=0.12.0" @@ -9695,7 +9704,6 @@ "integrity": "sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ==", "dev": true, "license": "MIT", - "peer": true, "bin": { "jiti": "lib/jiti-cli.mjs" } @@ -10409,7 +10417,6 @@ "version": "4.0.8", "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", - "dev": true, "license": "MIT", "dependencies": { "braces": "^3.0.3", @@ -11119,7 +11126,6 @@ "version": "2.3.2", "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.2.tgz", "integrity": "sha512-V7+vQEJ06Z+c5tSye8S+nHUfI51xoXIXjHQ99cQtKUkQqqO1kO/KCJUfZXuB47h/YBlDhah2H3hdUGXn8ie0oA==", - "dev": true, "license": "MIT", "engines": { "node": ">=8.6" @@ -11203,7 +11209,6 @@ "integrity": "sha512-vA30H8Nvkq/cPBnNw4Q8TWz1EJyqgpuinBcHET0YVJVFldr8JDNiU9LaWAE1KqSkRYazuaBhTpB5ZzShOezQ6A==", "dev": true, "license": "Apache-2.0", - "peer": true, "dependencies": { "playwright-core": "1.58.2" }, @@ -11270,7 +11275,6 @@ } ], "license": "MIT", - "peer": true, "dependencies": { "nanoid": "^3.3.11", "picocolors": "^1.1.1", @@ -11296,7 +11300,6 @@ } ], "license": "MIT", - "peer": true, "dependencies": { "lilconfig": "^3.1.1" }, @@ -11943,7 +11946,6 @@ "version": "4.1.2", "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-4.1.2.tgz", "integrity": "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg==", - "dev": true, "license": "MIT", "engines": { "node": ">= 14.18.0" @@ -12956,7 +12958,8 @@ "resolved": "https://registry.npmjs.org/style-mod/-/style-mod-4.1.3.tgz", "integrity": "sha512-i/n8VsZydrugj3Iuzll8+x/00GH2vnYsk1eomD8QiRrSAeW6ItbCQDtfXCeJHd0iwiNagqjQkvpvREEPtW3IoQ==", "dev": true, - "license": "MIT" + "license": "MIT", + "peer": true }, "node_modules/sublevel-pouchdb": { "version": "9.0.0", @@ -13025,7 +13028,6 @@ "integrity": "sha512-0a/huwc8e2es+7KFi70esqsReRfRbrT8h1cJSY/+z1lF0yKM6TT+//HYu28Yxstr50H7ifaqZRDGd0KuKDxP7w==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "@jridgewell/remapping": "^2.3.4", "@jridgewell/sourcemap-codec": "^1.5.0", @@ -13336,7 +13338,6 @@ "integrity": "sha512-QP88BAKvMam/3NxH6vj2o21R6MjxZUAd6nlwAS/pnGvN9IVLocLHxGYIzFhg6fUQ+5th6P4dv4eW9jX3DSIj7A==", "dev": true, "license": "MIT", - "peer": true, "engines": { "node": ">=12" }, @@ -13358,7 +13359,6 @@ "version": "5.0.1", "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dev": true, "license": "MIT", "dependencies": { "is-number": "^7.0.0" @@ -13455,7 +13455,6 @@ "integrity": "sha512-5C1sg4USs1lfG0GFb2RLXsdpXqBSEhAaA/0kPL01wxzpMqLILNxIxIOKiILz+cdg/pLnOUxFYOR5yhHU666wbw==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "esbuild": "~0.27.0", "get-tsconfig": "^4.7.5" @@ -14086,7 +14085,6 @@ "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", "dev": true, "license": "Apache-2.0", - "peer": true, "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" @@ -14236,7 +14234,6 @@ "integrity": "sha512-Bby3NOsna2jsjfLVOHKes8sGwgl4TT0E6vvpYgnAYDIF/tie7MRaFthmKuHx1NSXjiTueXH3do80FMQgvEktRg==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "esbuild": "^0.27.0", "fdir": "^6.5.0", @@ -14873,7 +14870,6 @@ "integrity": "sha512-QP88BAKvMam/3NxH6vj2o21R6MjxZUAd6nlwAS/pnGvN9IVLocLHxGYIzFhg6fUQ+5th6P4dv4eW9jX3DSIj7A==", "dev": true, "license": "MIT", - "peer": true, "engines": { "node": ">=12" }, @@ -14907,7 +14903,6 @@ "integrity": "sha512-yF+o4POL41rpAzj5KVILUxm1GCjKnELvaqmU9TLLUbMfDzuN0UpUR9uaDs+mCtjPe+uYPksXDRLQGGPvj1cTmA==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "@vitest/expect": "4.1.1", "@vitest/mocker": "4.1.1", @@ -15015,7 +15010,8 @@ "resolved": "https://registry.npmjs.org/w3c-keyname/-/w3c-keyname-2.2.8.tgz", "integrity": "sha512-dpojBhNsCNN7T82Tm7k26A6G9ML3NkhDsnw9n/eoxSRlVBB4CEtIQ/KTCLI2Fwf3ataSXRhYFkQi3SlnFwPvPQ==", "dev": true, - "license": "MIT" + "license": "MIT", + "peer": true }, "node_modules/wait-port": { "version": "1.1.0", @@ -15667,7 +15663,6 @@ "integrity": "sha512-AvbaCLOO2Otw/lW5bmh9d/WEdcDFdQp2Z2ZUH3pX9U2ihyUY0nvLv7J6TrWowklRGPYbB/IuIMfYgxaCPg5Bpg==", "dev": true, "license": "ISC", - "peer": true, "bin": { "yaml": "bin.mjs" }, diff --git a/package.json b/package.json index 479780a..bc96572 100644 --- a/package.json +++ b/package.json @@ -69,6 +69,7 @@ "@types/deno": "^2.5.0", "@types/diff-match-patch": "^1.0.36", "@types/markdown-it": "^14.1.2", + "@types/micromatch": "^4.0.10", "@types/node": "^24.10.13", "@types/pouchdb": "^6.4.2", "@types/pouchdb-adapter-http": "^6.1.6", @@ -133,11 +134,13 @@ "@smithy/protocol-http": "^5.3.9", "@smithy/querystring-builder": "^4.2.9", "@trystero-p2p/nostr": "^0.23.0", + "chokidar": "^4.0.0", "commander": "^14.0.3", "diff-match-patch": "^1.0.5", "fflate": "^0.8.2", "idb": "^8.0.3", "markdown-it": "^14.1.1", + "micromatch": "^4.0.0", "minimatch": "^10.2.2", "octagonal-wheels": "^0.1.45", "pouchdb-adapter-leveldb": "^9.0.0", diff --git a/src/apps/cli/.gitignore b/src/apps/cli/.gitignore index 69dd5ab..24987cb 100644 --- a/src/apps/cli/.gitignore +++ b/src/apps/cli/.gitignore @@ -3,4 +3,6 @@ test/* !test/*.sh test/test-init.local.sh node_modules -.*.json \ No newline at end of file +.*.json +*.env +!.test.env \ No newline at end of file diff --git a/src/apps/cli/README.md b/src/apps/cli/README.md index b3272c6..d45f985 100644 --- a/src/apps/cli/README.md +++ b/src/apps/cli/README.md @@ -95,13 +95,24 @@ livesync-cli ./my-db pull folder/note.md ./note.md ### Build from source ```bash -# Install dependencies (ensure you are in repository root directory, not src/apps/cli) -# due to shared dependencies with webapp and main library +# Clone with submodules, because the shared core lives in src/lib +git clone --recurse-submodules +cd obsidian-livesync + +# If you already cloned without submodules, run this once instead +git submodule update --init --recursive + +# Install dependencies from the repository root npm install -# Build the project (ensure you are in `src/apps/cli` directory) + +# Build the CLI from its package directory +cd src/apps/cli npm run build ``` +If `src/lib` is missing, `npm run build` now stops early with a targeted message +instead of a low-level Vite `ENOENT` error. + Run the CLI: ```bash @@ -286,9 +297,11 @@ Options: --force, -f Overwrite existing file on init-settings --verbose, -v Enable verbose logging --debug, -d Enable debug logging (includes verbose) - --help, -h Show help message + --interval , -i (daemon only) Poll CouchDB every N seconds instead of using the _changes feed + --help, -h Show this help message Commands: + daemon (default) Run mirror scan then continuously sync CouchDB <-> local filesystem init-settings [path] Create settings JSON from DEFAULT_SETTINGS sync Run one replication cycle and exit p2p-peers Show discovered peers as [peer] @@ -395,6 +408,86 @@ In other words, it performs the following actions: Note: `mirror` does not respect file deletions. If a file is deleted in storage, it will be restored on the next `mirror` run. To delete a file, use the `rm` command instead. This is a little inconvenient, but it is intentional behaviour (if we handle this automatically in `mirror`, we should be against a ton of edge cases). +##### daemon + +`daemon` is the default command when no command is specified. It runs an initial mirror scan and then continuously syncs changes in both directions: + +- **CouchDB → local filesystem**: via the `_changes` feed (LiveSync mode, default) or periodic polling (`--interval N`). +- **local filesystem → CouchDB**: via chokidar file watching. Any file created, modified, or deleted in the vault directory is pushed to CouchDB. + +In **LiveSync mode** the `_changes` feed delivers remote changes as they arrive, with sub-second latency. In **polling mode** (`--interval N`) the CLI polls CouchDB every N seconds. Use polling mode if your CouchDB instance does not support long-lived HTTP connections, or if you need predictable network usage. + +The daemon exits cleanly on `SIGINT` or `SIGTERM`. + +```bash +# LiveSync mode (default — _changes feed, near-real-time) +livesync-cli /path/to/vault + +# Polling mode — poll every 60 seconds +livesync-cli /path/to/vault --interval 60 +``` + +### .livesync/ignore + +Place a `.livesync/ignore` file in your vault root to exclude files from sync in both directions (local → CouchDB and CouchDB → local). + +**Format:** + +- Lines beginning with `#` are comments. +- Blank lines are ignored. +- All other lines are [minimatch](https://github.com/isaacs/minimatch) glob patterns, relative to the vault root. +- The directive `import: .gitignore` (exactly this string) reads `.gitignore` from the vault root and merges its non-comment, non-blank lines into the ignore rules. +- Negation patterns (lines starting with `!`) are not supported and will cause an error on load. + +**Example `.livesync/ignore`:** + +``` +# Ignore temporary files +*.tmp +*.swp + +# Ignore build output +build/ +dist/ + +# Merge patterns from .gitignore +import: .gitignore +``` + +Patterns apply in both directions: the chokidar watcher will not emit events for matched files, and the `isTargetFile` filter will exclude them from CouchDB → local sync. + +Changes to this file require a daemon restart to take effect. + +### Systemd Installation + +The `deploy/` directory contains a systemd unit template and an install script. + +**Automated install (user service, recommended):** + +```bash +bash src/apps/cli/deploy/install.sh --vault /path/to/vault +``` + +**With polling interval:** + +```bash +bash src/apps/cli/deploy/install.sh --vault /path/to/vault --interval 60 +``` + +**System-wide install** (requires root / sudo for `/etc/systemd/system/`): + +```bash +bash src/apps/cli/deploy/install.sh --system --vault /path/to/vault +``` + +The script: +1. Builds the CLI (`npm install` + `npm run build`). +2. Installs the binary to `~/.local/bin/livesync-cli` (user) or `/usr/local/bin/livesync-cli` (system). +3. Writes the unit file to `~/.config/systemd/user/livesync-cli.service` (user) or `/etc/systemd/system/livesync-cli.service` (system). +4. Runs `systemctl [--user] daemon-reload && systemctl [--user] enable --now livesync-cli`. + +**Manual setup** — if you prefer to manage the unit yourself, copy `deploy/livesync-cli.service`, replace `LIVESYNC_BIN` and `LIVESYNC_VAULT_PATH` with the actual binary path and vault path, then install to the appropriate systemd directory. + ### Planned options: - `--immediate`: Perform sync after the command (e.g. `push`, `pull`, `put`, `rm`). diff --git a/src/apps/cli/adapters/NodeFileSystemAdapter.ts b/src/apps/cli/adapters/NodeFileSystemAdapter.ts index b90ad73..34d434e 100644 --- a/src/apps/cli/adapters/NodeFileSystemAdapter.ts +++ b/src/apps/cli/adapters/NodeFileSystemAdapter.ts @@ -39,12 +39,6 @@ export class NodeFileSystemAdapter implements IFileSystemAdapter { const pathStr = this.normalisePath(p); - - const cached = this.fileCache.get(pathStr); - if (cached) { - return cached; - } - return await this.refreshFile(pathStr); } @@ -104,14 +98,15 @@ export class NodeFileSystemAdapter implements IFileSystemAdapter { const stat = await fs.stat(this.resolvePath(p)); return { size: stat.size, - mtime: stat.mtimeMs, - ctime: stat.ctimeMs, + mtime: Math.floor(stat.mtimeMs), + ctime: Math.floor(stat.ctimeMs), type: stat.isDirectory() ? "folder" : "file", }; } catch { diff --git a/src/apps/cli/adapters/NodeVaultAdapter.ts b/src/apps/cli/adapters/NodeVaultAdapter.ts index 947ad01..e313f39 100644 --- a/src/apps/cli/adapters/NodeVaultAdapter.ts +++ b/src/apps/cli/adapters/NodeVaultAdapter.ts @@ -15,7 +15,12 @@ export class NodeVaultAdapter implements IVaultAdapter { } async read(file: NodeFile): Promise { - return await fs.readFile(this.resolvePath(file.path), "utf-8"); + const content = await fs.readFile(this.resolvePath(file.path), "utf-8"); + // Correct stale stat.size — chokidar stats may be from a poll before the final write. + // The downstream document integrity check compares stat.size to content length, so + // they must agree or other clients reject the file as corrupted. + file.stat.size = Buffer.byteLength(content, "utf-8"); + return content; } async cachedRead(file: NodeFile): Promise { @@ -25,6 +30,8 @@ export class NodeVaultAdapter implements IVaultAdapter { async readBinary(file: NodeFile): Promise { const buffer = await fs.readFile(this.resolvePath(file.path)); + // Same correction as read() — ensure stat.size matches actual byte length. + file.stat.size = buffer.length; return buffer.buffer.slice(buffer.byteOffset, buffer.byteOffset + buffer.byteLength) as ArrayBuffer; } @@ -66,8 +73,8 @@ export class NodeVaultAdapter implements IVaultAdapter { path: p as any, stat: { size: stat.size, - mtime: stat.mtimeMs, - ctime: stat.ctimeMs, + mtime: Math.floor(stat.mtimeMs), + ctime: Math.floor(stat.ctimeMs), type: "file", }, }; @@ -89,8 +96,8 @@ export class NodeVaultAdapter implements IVaultAdapter { path: p as any, stat: { size: stat.size, - mtime: stat.mtimeMs, - ctime: stat.ctimeMs, + mtime: Math.floor(stat.mtimeMs), + ctime: Math.floor(stat.ctimeMs), type: "file", }, }; diff --git a/src/apps/cli/commands/daemonCommand.unit.spec.ts b/src/apps/cli/commands/daemonCommand.unit.spec.ts new file mode 100644 index 0000000..1adb967 --- /dev/null +++ b/src/apps/cli/commands/daemonCommand.unit.spec.ts @@ -0,0 +1,312 @@ +import { describe, expect, it, vi, beforeEach, afterEach } from "vitest"; +import { runCommand } from "./runCommand"; +import type { CLIOptions } from "./types"; + +// Mock performFullScan so daemon tests don't require a real CouchDB connection. +vi.mock("@lib/serviceFeatures/offlineScanner", () => ({ + performFullScan: vi.fn(async () => true), +})); + +// Mock UnresolvedErrorManager to avoid event-hub side effects. +vi.mock("@lib/services/base/UnresolvedErrorManager", () => ({ + UnresolvedErrorManager: class UnresolvedErrorManager { + showError() {} + clearError() {} + clearErrors() {} + }, +})); + +import * as offlineScanner from "@lib/serviceFeatures/offlineScanner"; + +function createCoreMock() { + return { + services: { + control: { + activated: Promise.resolve(), + applySettings: vi.fn(async () => {}), + }, + setting: { + applyPartial: vi.fn(async () => {}), + currentSettings: vi.fn(() => ({ liveSync: true, syncOnStart: false })), + }, + replication: { + replicate: vi.fn(async () => true), + }, + appLifecycle: { + onUnload: { + addHandler: vi.fn(), + }, + }, + }, + serviceModules: { + fileHandler: { + dbToStorage: vi.fn(async () => true), + storeFileToDB: vi.fn(async () => true), + }, + storageAccess: { + readFileAuto: vi.fn(async () => ""), + writeFileAuto: vi.fn(async () => {}), + }, + databaseFileAccess: { + fetch: vi.fn(async () => undefined), + }, + }, + } as any; +} + +function makeDaemonOptions(interval?: number): CLIOptions { + return { + command: "daemon", + commandArgs: [], + databasePath: "/tmp/vault", + verbose: false, + force: false, + interval, + }; +} + +const baseContext = { + vaultPath: "/tmp/vault", + settingsPath: "/tmp/vault/.livesync/settings.json", + originalSyncSettings: { + liveSync: true, + syncOnStart: false, + periodicReplication: false, + syncOnSave: false, + syncOnEditorSave: false, + syncOnFileOpen: false, + syncAfterMerge: false, + }, +} as any; + +describe("daemon command", () => { + beforeEach(() => { + vi.restoreAllMocks(); + vi.useFakeTimers(); + }); + + afterEach(() => { + vi.useRealTimers(); + }); + + it("calls performFullScan during startup", async () => { + const core = createCoreMock(); + vi.mocked(offlineScanner.performFullScan).mockResolvedValue(true); + + await runCommand(makeDaemonOptions(), { ...baseContext, core }); + + expect(offlineScanner.performFullScan).toHaveBeenCalledTimes(1); + }); + + it("returns false when performFullScan fails", async () => { + const core = createCoreMock(); + vi.mocked(offlineScanner.performFullScan).mockResolvedValue(false); + + const result = await runCommand(makeDaemonOptions(), { ...baseContext, core }); + + expect(result).toBe(false); + }); + + it("polling mode: calls setTimeout when interval option is set", async () => { + const core = createCoreMock(); + vi.mocked(offlineScanner.performFullScan).mockResolvedValue(true); + const setTimeoutSpy = vi.spyOn(globalThis, "setTimeout"); + + await runCommand(makeDaemonOptions(30), { ...baseContext, core }); + + expect(setTimeoutSpy).toHaveBeenCalledTimes(1); + // Interval should be in milliseconds (30s → 30000ms) + expect(setTimeoutSpy).toHaveBeenCalledWith(expect.any(Function), 30000); + }); + + it("polling mode: applies settings with suspendFileWatching=false before setting interval", async () => { + const core = createCoreMock(); + vi.mocked(offlineScanner.performFullScan).mockResolvedValue(true); + + await runCommand(makeDaemonOptions(10), { ...baseContext, core }); + + expect(core.services.setting.applyPartial).toHaveBeenCalledWith( + expect.objectContaining({ suspendFileWatching: false }), + true + ); + expect(core.services.control.applySettings).toHaveBeenCalledTimes(1); + }); + + it("liveSync mode: calls applyPartial and applySettings", async () => { + const core = createCoreMock(); + vi.mocked(offlineScanner.performFullScan).mockResolvedValue(true); + + await runCommand(makeDaemonOptions(), { ...baseContext, core }); + + expect(core.services.setting.applyPartial).toHaveBeenCalledWith( + expect.objectContaining({ + ...baseContext.originalSyncSettings, + suspendFileWatching: false, + }), + true + ); + expect(core.services.control.applySettings).toHaveBeenCalledTimes(1); + }); + + it("liveSync mode: logs warning when both liveSync and syncOnStart are false", async () => { + const core = createCoreMock(); + core.services.setting.currentSettings = vi.fn(() => ({ + liveSync: false, + syncOnStart: false, + })); + vi.mocked(offlineScanner.performFullScan).mockResolvedValue(true); + const consoleSpy = vi.spyOn(console, "error").mockImplementation(() => {}); + + const result = await runCommand(makeDaemonOptions(), { ...baseContext, core }); + + expect(result).toBe(true); + const warningCalls = consoleSpy.mock.calls.filter( + (args) => typeof args[0] === "string" && args[0].includes("liveSync and syncOnStart are both disabled") + ); + expect(warningCalls.length).toBeGreaterThan(0); + }); + + it("liveSync mode: no warning when liveSync is true", async () => { + const core = createCoreMock(); + core.services.setting.currentSettings = vi.fn(() => ({ + liveSync: true, + syncOnStart: false, + })); + vi.mocked(offlineScanner.performFullScan).mockResolvedValue(true); + const consoleSpy = vi.spyOn(console, "error").mockImplementation(() => {}); + + await runCommand(makeDaemonOptions(), { ...baseContext, core }); + + const warningCalls = consoleSpy.mock.calls.filter( + (args) => typeof args[0] === "string" && args[0].includes("liveSync and syncOnStart are both disabled") + ); + expect(warningCalls.length).toBe(0); + }); + + it("calls replicate before performFullScan", async () => { + const core = createCoreMock(); + const callOrder: string[] = []; + core.services.replication.replicate = vi.fn(async () => { + callOrder.push("replicate"); + return true; + }); + vi.mocked(offlineScanner.performFullScan).mockImplementation(async () => { + callOrder.push("performFullScan"); + return true; + }); + + await runCommand(makeDaemonOptions(), { ...baseContext, core }); + + expect(callOrder).toEqual(["replicate", "performFullScan"]); + }); + + it("returns false when initial replication fails", async () => { + const core = createCoreMock(); + core.services.replication.replicate = vi.fn(async () => false); + vi.mocked(offlineScanner.performFullScan).mockClear(); + + const result = await runCommand(makeDaemonOptions(), { ...baseContext, core }); + + expect(result).toBe(false); + // performFullScan should NOT have been called + expect(offlineScanner.performFullScan).not.toHaveBeenCalled(); + }); + + it("polling mode: registers onUnload handler that clears timeout", async () => { + const core = createCoreMock(); + vi.mocked(offlineScanner.performFullScan).mockResolvedValue(true); + + await runCommand(makeDaemonOptions(10), { ...baseContext, core }); + + // onUnload handler should have been registered + expect(core.services.appLifecycle.onUnload.addHandler).toHaveBeenCalledTimes(1); + const handler = core.services.appLifecycle.onUnload.addHandler.mock.calls[0][0]; + + // Get the timeout ID that was created + const clearTimeoutSpy = vi.spyOn(globalThis, "clearTimeout"); + await handler(); + expect(clearTimeoutSpy).toHaveBeenCalledTimes(1); + }); + + it("polling backoff: interval escalates on failure, caps at 300000ms, then halves on recovery", async () => { + const core = createCoreMock(); + vi.mocked(offlineScanner.performFullScan).mockResolvedValue(true); + vi.spyOn(console, "error").mockImplementation(() => {}); + + // startup replicate (call 1) succeeds; poll calls 2–7 fail; call 8 succeeds. + let callCount = 0; + core.services.replication.replicate = vi.fn(async () => { + callCount++; + if (callCount === 1) return true; // initial startup replicate + if (callCount <= 7) throw new Error("network failure"); + return true; // recovery + }); + + const baseMs = 30 * 1000; + const setTimeoutSpy = vi.spyOn(globalThis, "setTimeout"); + + await runCommand(makeDaemonOptions(30), { ...baseContext, core }); + + // After runCommand returns the first setTimeout has been scheduled. + // setTimeoutSpy.mock.calls[0] is the initial schedule (baseMs). + expect(setTimeoutSpy.mock.calls[0][1]).toBe(baseMs); + + // Advance through 6 failure polls. After each failure the next setTimeout + // should be scheduled with a larger (or capped) interval. + // formula: min(base * 2^n, 300000). base=30000ms. + // failure 1: 30000*2=60000, failure 2: 30000*4=120000, + // failure 3: 30000*8=240000, failure 4: 30000*16=480000→capped, 5→cap, 6→cap + const expectedIntervals = [ + baseMs * 2, // after failure 1: 60000 + baseMs * 4, // after failure 2: 120000 + baseMs * 8, // after failure 3: 240000 + 300_000, // after failure 4 (would be 480000, capped) + 300_000, // after failure 5 (cap) + 300_000, // after failure 6 (cap) + ]; + + for (const expected of expectedIntervals) { + const prevCallCount = setTimeoutSpy.mock.calls.length; + await vi.advanceTimersByTimeAsync(setTimeoutSpy.mock.calls[prevCallCount - 1][1] as number); + const newCallCount = setTimeoutSpy.mock.calls.length; + expect(newCallCount).toBeGreaterThan(prevCallCount); + expect(setTimeoutSpy.mock.calls[newCallCount - 1][1]).toBe(expected); + } + + // Now trigger the success poll — interval should halve each time toward base. + // After failure 6, consecutiveFailures=6, currentIntervalMs=300000. + // On success: consecutiveFailures=5, currentIntervalMs=150000. + const prevCallCount = setTimeoutSpy.mock.calls.length; + await vi.advanceTimersByTimeAsync(setTimeoutSpy.mock.calls[prevCallCount - 1][1] as number); + const afterSuccessCallCount = setTimeoutSpy.mock.calls.length; + expect(afterSuccessCallCount).toBeGreaterThan(prevCallCount); + // The interval after one success should be halved (300000 / 2 = 150000). + expect(setTimeoutSpy.mock.calls[afterSuccessCallCount - 1][1]).toBe(150_000); + }); + + it("polling error handling: replicate rejection is caught and console.error is called", async () => { + const core = createCoreMock(); + vi.mocked(offlineScanner.performFullScan).mockResolvedValue(true); + const consoleSpy = vi.spyOn(console, "error").mockImplementation(() => {}); + + // Make replicate succeed on the initial call (startup), then fail on the poll. + let callCount = 0; + core.services.replication.replicate = vi.fn(async () => { + callCount++; + if (callCount === 1) return true; // startup replicate + throw new Error("network failure"); + }); + + const intervalMs = 30 * 1000; + await runCommand(makeDaemonOptions(30), { ...baseContext, core }); + + // Advance time to trigger the first poll callback and flush its async work. + await vi.advanceTimersByTimeAsync(intervalMs); + + // No unhandled rejection — the error was caught internally. + const errorCalls = consoleSpy.mock.calls.filter( + (args) => typeof args[0] === "string" && args[0].includes("Poll error") + ); + expect(errorCalls.length).toBeGreaterThan(0); + }); +}); diff --git a/src/apps/cli/commands/runCommand.ts b/src/apps/cli/commands/runCommand.ts index e188c23..c90fa94 100644 --- a/src/apps/cli/commands/runCommand.ts +++ b/src/apps/cli/commands/runCommand.ts @@ -15,6 +15,96 @@ export async function runCommand(options: CLIOptions, context: CLICommandContext await core.services.control.activated; if (options.command === "daemon") { + const log = (msg: unknown) => console.error(`[Daemon] ${msg}`); + + // Skip the config mismatch dialog — the daemon cannot resolve it interactively + // and the default "Dismiss" action would block replication. The daemon should + // accept whatever configuration the remote has. + await core.services.setting.applyPartial({ disableCheckingConfigMismatch: true }, true); + + // 1. Replicate CouchDB → local PouchDB so the mirror scan has content to work with. + log("Replicating from CouchDB..."); + const replResult = await core.services.replication.replicate(true); + if (!replResult) { + console.error("[Daemon] Initial CouchDB replication failed, cannot continue"); + return false; + } + log("CouchDB replication complete"); + + // 2. Mirror scan to reconcile PouchDB ↔ local filesystem. + const errorManager = new UnresolvedErrorManager(core.services.appLifecycle); + log("Running mirror scan..."); + const scanOk = await performFullScan(core as any, log, errorManager, false, true); + if (!scanOk) { + console.error("[Daemon] Mirror scan failed, cannot continue"); + return false; + } + log("Mirror scan complete"); + + // 3. Re-enable sync. + const restoreSyncSettings = async () => { + await core.services.setting.applyPartial({ + ...context.originalSyncSettings, + suspendFileWatching: false, + }, true); + // applySettings fires the full lifecycle: onSuspending → onResumed. + // ModuleReplicatorCouchDB starts continuous replication on onResumed + // via fireAndForget. + await core.services.control.applySettings(); + // Lifecycle events (onSuspending) may re-enable suspension flags. + // Clear them explicitly after the lifecycle completes. applyPartial + // with true is a direct store write — it does not re-trigger lifecycle. + await core.services.setting.applyPartial({ + suspendFileWatching: false, + suspendParseReplicationResult: false, + }, true); + }; + if (options.interval) { + log(`Polling mode: syncing every ${options.interval}s`); + await restoreSyncSettings(); + const baseIntervalMs = options.interval * 1000; + let currentIntervalMs = baseIntervalMs; + let consecutiveFailures = 0; + const maxIntervalMs = 5 * 60 * 1000; // 5 minutes cap + + const poll = async () => { + try { + await core.services.replication.replicate(true); + if (consecutiveFailures > 0) { + consecutiveFailures--; + currentIntervalMs = Math.max(currentIntervalMs / 2, baseIntervalMs); + log(`Replication recovered`); + } + } catch (err) { + consecutiveFailures++; + currentIntervalMs = Math.min(baseIntervalMs * Math.pow(2, consecutiveFailures), maxIntervalMs); + console.error(`[Daemon] Poll error (${consecutiveFailures} consecutive):`, err); + if (consecutiveFailures >= 5) { + console.error(`[Daemon] Warning: ${consecutiveFailures} consecutive failures, backing off to ${Math.round(currentIntervalMs / 1000)}s`); + } + } + pollTimer = setTimeout(poll, currentIntervalMs); + }; + let pollTimer: ReturnType = setTimeout(poll, currentIntervalMs); + core.services.appLifecycle.onUnload.addHandler(async () => { + clearTimeout(pollTimer); + return true; + }); + } else { + log("LiveSync mode: restoring sync settings and starting _changes feed"); + await restoreSyncSettings(); + // The applySettings() lifecycle fires onResumed → ModuleReplicatorCouchDB which + // starts continuous replication via fireAndForget(openReplication). Don't call + // openReplication directly — it races with the handler and causes dedup/termination. + log("LiveSync active"); + const currentSettings = core.services.setting.currentSettings(); + if (!currentSettings.liveSync && !currentSettings.syncOnStart) { + console.error("[Daemon] Warning: liveSync and syncOnStart are both disabled in settings. " + + "No sync will occur. Set liveSync=true in your settings file for continuous sync, " + + "or use --interval for polling mode."); + } + } + return true; } @@ -83,8 +173,8 @@ export async function runCommand(options: CLIOptions, context: CLICommandContext console.log(`[Command] push ${sourcePath} -> ${destinationDatabasePath}`); await core.serviceModules.storageAccess.writeFileAuto(destinationDatabasePath, toArrayBuffer(sourceData), { - mtime: sourceStat.mtimeMs, - ctime: sourceStat.ctimeMs, + mtime: Math.floor(sourceStat.mtimeMs), + ctime: Math.floor(sourceStat.ctimeMs), }); const destinationPathWithPrefix = destinationDatabasePath as FilePathWithPrefix; const stored = await core.serviceModules.fileHandler.storeFileToDB(destinationPathWithPrefix, true); diff --git a/src/apps/cli/commands/types.ts b/src/apps/cli/commands/types.ts index f63f751..ca01152 100644 --- a/src/apps/cli/commands/types.ts +++ b/src/apps/cli/commands/types.ts @@ -1,5 +1,6 @@ import { LiveSyncBaseCore } from "../../../LiveSyncBaseCore"; import { ServiceContext } from "@lib/services/base/ServiceBase"; +import type { ObsidianLiveSyncSettings } from "@lib/common/types"; export type CLICommand = | "daemon" @@ -29,15 +30,18 @@ export interface CLIOptions { force?: boolean; command: CLICommand; commandArgs: string[]; + interval?: number; } export interface CLICommandContext { databasePath: string; core: LiveSyncBaseCore; settingsPath: string; + originalSyncSettings: Pick; } export const VALID_COMMANDS = new Set([ + "daemon", "sync", "p2p-peers", "p2p-sync", diff --git a/src/apps/cli/deploy/install.sh b/src/apps/cli/deploy/install.sh new file mode 100755 index 0000000..d0d3a2e --- /dev/null +++ b/src/apps/cli/deploy/install.sh @@ -0,0 +1,187 @@ +#!/usr/bin/env bash +# install.sh — install livesync-cli as a systemd service +# +# Usage: +# install.sh [--user] [--system] [--vault ] [--interval ] +# +# Defaults: user install, prompts for vault path if not supplied. +set -euo pipefail + +SCRIPT_DIR="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" && pwd)" +REPO_ROOT="$(cd -- "$SCRIPT_DIR/../../.." && pwd)" +CLI_DIR="$REPO_ROOT/src/apps/cli" +SERVICE_TEMPLATE="$SCRIPT_DIR/livesync-cli.service" + +# ── Argument parsing ──────────────────────────────────────────────────────── +INSTALL_MODE="user" +VAULT_PATH="" +INTERVAL="" +FORCE=0 + +while [[ $# -gt 0 ]]; do + case "$1" in + --user) + INSTALL_MODE="user" + shift + ;; + --system) + INSTALL_MODE="system" + shift + ;; + --vault) + if [[ -z "${2:-}" ]]; then + echo "Error: --vault requires a path argument" >&2 + exit 1 + fi + VAULT_PATH="$2" + shift 2 + ;; + --interval) + if [[ -z "${2:-}" ]]; then + echo "Error: --interval requires a numeric argument" >&2 + exit 1 + fi + INTERVAL="$2" + if ! [[ "$INTERVAL" =~ ^[1-9][0-9]*$ ]]; then + echo "Error: --interval requires a positive integer, got '$INTERVAL'" >&2 + exit 1 + fi + shift 2 + ;; + --force|-f) + FORCE=1 + shift + ;; + --help|-h) + cat <] [--interval ] [--force] + + --user Install as a user systemd service (default, ~/.config/systemd/user/) + --system Install as a system systemd service (/etc/systemd/system/) + --vault Path to the vault directory (prompted if omitted) + --interval Poll CouchDB every N seconds instead of using the _changes feed + --force Overwrite existing service unit without prompting +EOF + exit 0 + ;; + *) + echo "Error: Unknown argument: $1" >&2 + exit 1 + ;; + esac +done + +# ── Vault path ────────────────────────────────────────────────────────────── +if [[ -z "$VAULT_PATH" ]]; then + if [ ! -t 0 ]; then + echo "Error: --vault is required in non-interactive mode" >&2 + exit 1 + fi + printf 'Vault path: ' + read -r VAULT_PATH +fi + +_orig_vault="$VAULT_PATH" +if ! VAULT_PATH="$(cd -- "$VAULT_PATH" 2>/dev/null && pwd)"; then + echo "Error: vault directory does not exist: $_orig_vault" >&2 + exit 1 +fi + +echo "[INFO] Vault: $VAULT_PATH" +echo "[INFO] Install mode: $INSTALL_MODE" + +# ── Build ──────────────────────────────────────────────────────────────────── +echo "[INFO] Building CLI from $REPO_ROOT..." +(cd "$REPO_ROOT" && npm install --silent) +(cd "$CLI_DIR" && npm run build) + +BUILT_CJS="$CLI_DIR/dist/index.cjs" +if [[ ! -f "$BUILT_CJS" ]]; then + echo "Error: build output not found: $BUILT_CJS" >&2 + exit 1 +fi + +# ── Install binary ─────────────────────────────────────────────────────────── +if [[ "$INSTALL_MODE" == "user" ]]; then + BIN_DIR="$HOME/.local/bin" + UNIT_DIR="$HOME/.config/systemd/user" + SYSTEMCTL_FLAGS="--user" +else + BIN_DIR="/usr/local/bin" + UNIT_DIR="/etc/systemd/system" + SYSTEMCTL_FLAGS="" +fi + +mkdir -p "$BIN_DIR" + +LIVESYNC_BIN="$BIN_DIR/livesync-cli" +LIVESYNC_JS="$BIN_DIR/livesync-cli.js" + +# Copy the CJS bundle so the wrapper is self-contained and independent of the +# build directory location. +cp "$BUILT_CJS" "$LIVESYNC_JS" + +# Write a bash wrapper that invokes node on the installed bundle. +cat > "$LIVESYNC_BIN" <&2 + exit 1 + fi + printf 'Service unit already exists at %s. Overwrite? [y/N]: ' "$UNIT_PATH" + read -r CONFIRM + case "$CONFIRM" in + [yY]|[yY][eE][sS]) : ;; + *) + echo "[INFO] Aborted. Existing unit left in place." + exit 0 + ;; + esac +fi + +# In awk gsub(), '&' in the replacement means "matched text"; escape any literal '&' +# in path variables before passing them as awk replacement strings. +AWK_BIN="${LIVESYNC_BIN//&/\\&}" +AWK_VAULT="${VAULT_PATH//&/\\&}" +awk -v bin="$AWK_BIN" -v vault="$AWK_VAULT" -v exec_start="ExecStart=$EXEC_START" \ + '/^ExecStart=/ { print exec_start; next } {gsub("LIVESYNC_BIN", bin); gsub("LIVESYNC_VAULT_PATH", vault); print}' \ + "$SERVICE_TEMPLATE" > "$UNIT_PATH" + +echo "[INFO] Installed unit: $UNIT_PATH" + +# ── Enable service ─────────────────────────────────────────────────────────── +if ! command -v systemctl >/dev/null 2>&1; then + echo "[WARN] systemctl not found — skipping service activation" + echo "[INFO] To enable manually, copy $UNIT_PATH to the correct systemd directory and run:" + echo " systemctl $SYSTEMCTL_FLAGS daemon-reload" + echo " systemctl $SYSTEMCTL_FLAGS enable --now livesync-cli" + exit 0 +fi + +# shellcheck disable=SC2086 +systemctl $SYSTEMCTL_FLAGS daemon-reload +# shellcheck disable=SC2086 +systemctl $SYSTEMCTL_FLAGS enable --now livesync-cli + +echo "" +echo "[Done] livesync-cli service installed and started." +echo "" +# shellcheck disable=SC2086 +systemctl $SYSTEMCTL_FLAGS status livesync-cli --no-pager || true diff --git a/src/apps/cli/deploy/livesync-cli.service b/src/apps/cli/deploy/livesync-cli.service new file mode 100644 index 0000000..b76e786 --- /dev/null +++ b/src/apps/cli/deploy/livesync-cli.service @@ -0,0 +1,17 @@ +[Unit] +Description=Self-hosted LiveSync CLI Daemon +After=network-online.target +Wants=network-online.target + +[Service] +Type=simple +ExecStart=LIVESYNC_BIN LIVESYNC_VAULT_PATH +Restart=on-failure +RestartSec=10 +TimeoutStartSec=300 +StandardOutput=journal +StandardError=journal +LimitNOFILE=65536 + +[Install] +WantedBy=default.target diff --git a/src/apps/cli/main.ts b/src/apps/cli/main.ts index 97483d5..535d137 100644 --- a/src/apps/cli/main.ts +++ b/src/apps/cli/main.ts @@ -26,6 +26,7 @@ import { VALID_COMMANDS } from "./commands/types"; import type { CLICommand, CLIOptions } from "./commands/types"; import { getPathFromUXFileInfo } from "@lib/common/typeUtils"; import { stripAllPrefixes } from "@lib/string_and_binary/path"; +import { IgnoreRules } from "./serviceModules/IgnoreRules"; const SETTINGS_FILE = ".livesync/settings.json"; ensureGlobalNodeLocalStorage(); @@ -43,7 +44,8 @@ Arguments: database-path Path to the local database directory Commands: - sync Run one replication cycle and exit + daemon (default) Run mirror scan then continuously sync CouchDB <-> local filesystem + sync Run one replication cycle and exit p2p-peers Show discovered peers as [peer]\t\t p2p-sync Sync with the specified peer-id or peer-name @@ -60,24 +62,30 @@ Commands: rm Mark a file as deleted in local database resolve Resolve conflicts by keeping and deleting others mirror [vault-path] Mirror database contents to the local file system (vault-path defaults to database-path) + +Options: + --interval , -i (daemon only) Poll CouchDB every N seconds instead of using the _changes feed + Examples: - livesync-cli ./my-database sync - livesync-cli ./my-database p2p-peers 5 - livesync-cli ./my-database p2p-sync my-peer-name 15 - livesync-cli ./my-database p2p-host - livesync-cli ./my-database --settings ./custom-settings.json push ./note.md folder/note.md - livesync-cli ./my-database pull folder/note.md ./exports/note.md - livesync-cli ./my-database pull-rev folder/note.md ./exports/note.old.md 3-abcdef - livesync-cli ./my-database setup "obsidian://setuplivesync?settings=..." - echo "Hello" | livesync-cli ./my-database put notes/hello.md - livesync-cli ./my-database cat notes/hello.md - livesync-cli ./my-database cat-rev notes/hello.md 3-abcdef - livesync-cli ./my-database ls notes/ - livesync-cli ./my-database info notes/hello.md - livesync-cli ./my-database rm notes/hello.md - livesync-cli ./my-database resolve notes/hello.md 3-abcdef - livesync-cli init-settings ./data.json - livesync-cli ./my-database --verbose + livesync-cli ./my-database Run daemon (LiveSync mode) + livesync-cli ./my-database --interval 30 Run daemon (polling every 30s) + livesync-cli ./my-database sync + livesync-cli ./my-database p2p-peers 5 + livesync-cli ./my-database p2p-sync my-peer-name 15 + livesync-cli ./my-database p2p-host + livesync-cli ./my-database --settings ./custom-settings.json push ./note.md folder/note.md + livesync-cli ./my-database pull folder/note.md ./exports/note.md + livesync-cli ./my-database pull-rev folder/note.md ./exports/note.old.md 3-abcdef + livesync-cli ./my-database setup "obsidian://setuplivesync?settings=..." + echo "Hello" | livesync-cli ./my-database put notes/hello.md + livesync-cli ./my-database cat notes/hello.md + livesync-cli ./my-database cat-rev notes/hello.md 3-abcdef + livesync-cli ./my-database ls notes/ + livesync-cli ./my-database info notes/hello.md + livesync-cli ./my-database rm notes/hello.md + livesync-cli ./my-database resolve notes/hello.md 3-abcdef + livesync-cli init-settings ./data.json + livesync-cli ./my-database --verbose `); } @@ -94,6 +102,7 @@ export function parseArgs(): CLIOptions { let verbose = false; let debug = false; let force = false; + let interval: number | undefined; let command: CLICommand = "daemon"; const commandArgs: string[] = []; @@ -110,6 +119,21 @@ export function parseArgs(): CLIOptions { settingsPath = args[i]; break; } + case "--interval": + case "-i": { + i++; + if (!args[i]) { + console.error(`Error: Missing value for ${token}`); + process.exit(1); + } + const n = parseInt(args[i], 10); + if (!Number.isInteger(n) || n <= 0) { + console.error(`Error: --interval requires a positive integer, got '${args[i]}'`); + process.exit(1); + } + interval = n; + break; + } case "--debug": case "-d": // debugging automatically enables verbose logging, as it is intended for debugging issues. @@ -164,6 +188,7 @@ export function parseArgs(): CLIOptions { force, command, commandArgs, + interval, }; } @@ -197,6 +222,9 @@ async function createDefaultSettingsFile(options: CLIOptions) { export async function main() { const options = parseArgs(); + if (options.interval && options.command !== "daemon") { + console.error(`Warning: --interval is only used in daemon mode, ignored for '${options.command}'`); + } const avoidStdoutNoise = options.command === "cat" || options.command === "cat-rev" || @@ -248,6 +276,20 @@ export async function main() { infoLog(`Settings: ${settingsPath}`); infoLog(""); + // For daemon and mirror mode, load ignore rules before the core is constructed so that + // chokidar's ignored option is populated when beginWatch() fires during onLoad(). + const watchEnabled = options.command === "daemon"; + const vaultPath = + options.command === "mirror" && options.commandArgs[0] + ? path.resolve(options.commandArgs[0]) + : databasePath; + let ignoreRules: IgnoreRules | undefined; + if (options.command === "daemon" || options.command === "mirror") { + ignoreRules = new IgnoreRules(vaultPath); + await ignoreRules.load(); + } + + // Create service context and hub const context = new NodeServiceContext(databasePath); const serviceHubInstance = new NodeServiceHub(databasePath, context); @@ -278,11 +320,14 @@ export async function main() { } console.error(`${prefix} ${message}`); }); - // Prevent replication result to be processed automatically. - serviceHubInstance.replication.processSynchroniseResult.addHandler(async () => { - console.error(`[Info] Replication result received, but not processed automatically in CLI mode.`); - return await Promise.resolve(true); - }, -100); + // Prevent replication result from being processed automatically in non-daemon commands. + // In daemon mode the default handler must run so changes are applied to the filesystem. + if (options.command !== "daemon") { + serviceHubInstance.replication.processSynchroniseResult.addHandler(async () => { + console.error(`[Info] Replication result received, but not processed automatically in CLI mode.`); + return await Promise.resolve(true); + }, -100); + } // Setup settings handlers const settingService = serviceHubInstance.setting; @@ -324,11 +369,7 @@ export async function main() { const core = new LiveSyncBaseCore( serviceHubInstance, (core: LiveSyncBaseCore, serviceHub: InjectableServiceHub) => { - const mirrorVaultPath = - options.command === "mirror" && options.commandArgs[0] - ? path.resolve(options.commandArgs[0]) - : databasePath; - return initialiseServiceModulesCLI(mirrorVaultPath, core, serviceHub); + return initialiseServiceModulesCLI(vaultPath, core, serviceHub, ignoreRules, watchEnabled); }, (core) => [ // No modules need to be registered for P2P replication in CLI. Directly using Replicators in p2p.ts @@ -344,8 +385,25 @@ export async function main() { if (parts.some((part) => part.startsWith("."))) { return await Promise.resolve(false); } + // PouchDB LevelDB database directory lives in the vault directory. + if (parts[0]?.endsWith("-livesync-v2")) { + return await Promise.resolve(false); + } return await Promise.resolve(true); }, -1 /* highest priority */); + + // Apply user-defined ignore rules for daemon mode (lower priority, runs after dotfile check). + if (ignoreRules) { + const rules = ignoreRules; + core.services.vault.isTargetFile.addHandler(async (target) => { + const targetPath = stripAllPrefixes(getPathFromUXFileInfo(target)); + if (rules.shouldIgnore(targetPath)) { + return false; + } + // undefined = pass through to next handler in chain + return undefined; + }, 0); + } } ); @@ -366,6 +424,25 @@ export async function main() { process.on("SIGINT", () => shutdown("SIGINT")); process.on("SIGTERM", () => shutdown("SIGTERM")); + // Save the settings file before any lifecycle events can mutate and persist them. + // suspendAllSync and other lifecycle hooks clobber sync settings in memory, and + // various code paths persist the clobbered state to disk. We restore on shutdown. + const settingsBackup = await fs.readFile(settingsPath, "utf-8").catch(() => null); + + // Restore settings file on any exit to undo lifecycle mutations. + // Write to a temp path first so a crash mid-write doesn't leave a truncated file. + process.on("exit", () => { + if (settingsBackup) { + const tmpPath = settingsPath + ".tmp"; + try { + require("fs").writeFileSync(tmpPath, settingsBackup, "utf-8"); + require("fs").renameSync(tmpPath, settingsPath); + } catch (err) { + console.error("[Settings] Failed to restore settings on exit:", err); + } + } + }); + // Start the core try { infoLog(`[Starting] Initializing LiveSync...`); @@ -375,6 +452,18 @@ export async function main() { console.error(`[Error] Failed to initialize LiveSync`); process.exit(1); } + // Capture sync settings before suspendAllSync() clobbers them. + // Used by daemon mode to restore the correct sync behaviour after the mirror scan. + const settingsBeforeSuspend = core.services.setting.currentSettings(); + const originalSyncSettings = { + liveSync: settingsBeforeSuspend.liveSync, + syncOnStart: settingsBeforeSuspend.syncOnStart, + periodicReplication: settingsBeforeSuspend.periodicReplication, + syncOnSave: settingsBeforeSuspend.syncOnSave, + syncOnEditorSave: settingsBeforeSuspend.syncOnEditorSave, + syncOnFileOpen: settingsBeforeSuspend.syncOnFileOpen, + syncAfterMerge: settingsBeforeSuspend.syncAfterMerge, + }; await core.services.setting.suspendAllSync(); await core.services.control.onReady(); @@ -400,7 +489,7 @@ export async function main() { infoLog(""); } - const result = await runCommand(options, { databasePath, core, settingsPath }); + const result = await runCommand(options, { databasePath, core, settingsPath, originalSyncSettings }); if (!result) { console.error(`[Error] Command '${options.command}' failed`); process.exitCode = 1; @@ -408,7 +497,7 @@ export async function main() { infoLog(`[Done] Command '${options.command}' completed`); } - if (options.command === "daemon") { + if (options.command === "daemon" && result) { // Keep the process running await new Promise(() => {}); } else { diff --git a/src/apps/cli/main.unit.spec.ts b/src/apps/cli/main.unit.spec.ts index 4c35ae9..2b70a44 100644 --- a/src/apps/cli/main.unit.spec.ts +++ b/src/apps/cli/main.unit.spec.ts @@ -85,4 +85,67 @@ describe("CLI parseArgs", () => { expect(parsed.command).toBe("p2p-host"); expect(parsed.commandArgs).toEqual([]); }); + + it("parses --interval flag with valid integer", () => { + process.argv = ["node", "livesync-cli", "./vault", "--interval", "30"]; + const parsed = parseArgs(); + expect(parsed.command).toBe("daemon"); + expect(parsed.interval).toBe(30); + }); + + it("parses -i shorthand for --interval", () => { + process.argv = ["node", "livesync-cli", "./vault", "-i", "10"]; + const parsed = parseArgs(); + expect(parsed.interval).toBe(10); + }); + + it("exits 1 when --interval has no value", () => { + process.argv = ["node", "livesync-cli", "./vault", "--interval"]; + const exitMock = mockProcessExit(); + vi.spyOn(console, "error").mockImplementation(() => {}); + expect(() => parseArgs()).toThrowError("__EXIT__:1"); + expect(exitMock).toHaveBeenCalledWith(1); + }); + + it("exits 1 when --interval is not a positive integer", () => { + process.argv = ["node", "livesync-cli", "./vault", "--interval", "0"]; + const exitMock = mockProcessExit(); + vi.spyOn(console, "error").mockImplementation(() => {}); + expect(() => parseArgs()).toThrowError("__EXIT__:1"); + expect(exitMock).toHaveBeenCalledWith(1); + }); + + it("exits 1 when --interval is negative", () => { + process.argv = ["node", "livesync-cli", "./vault", "--interval", "-5"]; + const exitMock = mockProcessExit(); + vi.spyOn(console, "error").mockImplementation(() => {}); + expect(() => parseArgs()).toThrowError("__EXIT__:1"); + }); + + it("exits 1 when --interval is not numeric", () => { + process.argv = ["node", "livesync-cli", "./vault", "--interval", "abc"]; + const exitMock = mockProcessExit(); + vi.spyOn(console, "error").mockImplementation(() => {}); + expect(() => parseArgs()).toThrowError("__EXIT__:1"); + }); + + it("parses explicit daemon command", () => { + process.argv = ["node", "livesync-cli", "./vault", "daemon"]; + const parsed = parseArgs(); + expect(parsed.command).toBe("daemon"); + expect(parsed.databasePath).toBe("./vault"); + }); + + it("defaults to daemon when no command specified", () => { + process.argv = ["node", "livesync-cli", "./vault"]; + const parsed = parseArgs(); + expect(parsed.command).toBe("daemon"); + }); + + it("parses explicit daemon command with --interval", () => { + process.argv = ["node", "livesync-cli", "./vault", "daemon", "--interval", "30"]; + const parsed = parseArgs(); + expect(parsed.command).toBe("daemon"); + expect(parsed.interval).toBe(30); + }); }); diff --git a/src/apps/cli/managers/CLIStorageEventManagerAdapter.ts b/src/apps/cli/managers/CLIStorageEventManagerAdapter.ts index 1334b6a..9abc5fd 100644 --- a/src/apps/cli/managers/CLIStorageEventManagerAdapter.ts +++ b/src/apps/cli/managers/CLIStorageEventManagerAdapter.ts @@ -11,8 +11,11 @@ import type { } from "@lib/managers/adapters"; import type { FileEventItemSentinel } from "@lib/managers/StorageEventManager"; import type { NodeFile, NodeFolder } from "../adapters/NodeTypes"; +import type { Stats } from "fs"; import * as fs from "fs/promises"; import * as path from "path"; +import { watch as chokidarWatch, type FSWatcher } from "chokidar"; +import type { IgnoreRules } from "../serviceModules/IgnoreRules"; /** * CLI-specific type guard adapter @@ -56,22 +59,11 @@ class CLIPersistenceAdapter implements IStorageEventPersistenceAdapter { } /** - * CLI-specific status adapter (console logging) + * CLI-specific status adapter (no-op — daemon uses journald for status) */ class CLIStatusAdapter implements IStorageEventStatusAdapter { - private lastUpdate = 0; - private updateInterval = 5000; // Update every 5 seconds - - updateStatus(status: { batched: number; processing: number; totalQueued: number }): void { - const now = Date.now(); - if (now - this.lastUpdate > this.updateInterval) { - if (status.totalQueued > 0 || status.processing > 0) { - // console.log( - // `[StorageEventManager] Batched: ${status.batched}, Processing: ${status.processing}, Total Queued: ${status.totalQueued}` - // ); - } - this.lastUpdate = now; - } + updateStatus(_status: { batched: number; processing: number; totalQueued: number }): void { + // intentional no-op } } @@ -100,15 +92,97 @@ class CLIConverterAdapter implements IStorageEventConverterAdapter { } /** - * CLI-specific watch adapter (optional file watching with chokidar) + * CLI-specific watch adapter using chokidar for real-time filesystem monitoring. */ class CLIWatchAdapter implements IStorageEventWatchAdapter { - constructor(private basePath: string) {} + private _watcher: FSWatcher | undefined; + + constructor(private basePath: string, private ignoreRules?: IgnoreRules, private watchEnabled: boolean = false) {} + + private _toNodeFile(filePath: string, stats: Stats | undefined): NodeFile { + return { + path: path.relative(this.basePath, filePath) as FilePath, + stat: { + ctime: stats?.ctimeMs ?? Date.now(), + mtime: stats?.mtimeMs ?? Date.now(), + size: stats?.size ?? 0, + type: "file", + }, + }; + } + + private _toNodeFolder(dirPath: string): NodeFolder { + return { + path: path.relative(this.basePath, dirPath) as FilePath, + isFolder: true, + }; + } async beginWatch(handlers: IStorageEventWatchHandlers): Promise { - // File watching is not activated in the CLI. - // Because the CLI is designed for push/pull operations, not real-time sync. - // console.error("[CLIWatchAdapter] File watching is not enabled in CLI version"); + if (!this.watchEnabled) return; + const baseIgnored: Array boolean)> = [ + /(^|[/\\])\./, + /(^|[/\\])[^/\\]*-livesync-v2([/\\]|$)/, + ]; + // Bind rules to a local const before the closure — chokidar v4 requires a + // MatchFunction, not glob strings, for custom patterns. + const rules = this.ignoreRules; + const ignored = rules + ? [...baseIgnored, (p: string) => rules.shouldIgnore(path.relative(this.basePath, p))] + : baseIgnored; + + const watcher = chokidarWatch(this.basePath, { + ignored, + ignoreInitial: true, + persistent: true, + awaitWriteFinish: { + stabilityThreshold: 500, + pollInterval: 100, + }, + }); + + watcher.on("add", (filePath, stats) => { + const nodeFile = this._toNodeFile(filePath, stats); + handlers.onCreate(nodeFile); + }); + + watcher.on("change", (filePath, stats) => { + const nodeFile = this._toNodeFile(filePath, stats); + handlers.onChange(nodeFile); + }); + + watcher.on("unlink", (filePath) => { + const nodeFile = this._toNodeFile(filePath, undefined); + handlers.onDelete(nodeFile); + }); + + watcher.on("addDir", (dirPath) => { + const nodeFolder = this._toNodeFolder(dirPath); + handlers.onCreate(nodeFolder); + }); + + watcher.on("unlinkDir", (dirPath) => { + const nodeFolder = this._toNodeFolder(dirPath); + handlers.onDelete(nodeFolder); + }); + + watcher.on("error", (err) => { + console.error("[CLIWatchAdapter] Fatal watcher error — file watching stopped:", err); + console.error("[CLIWatchAdapter] Exiting for systemd restart."); + void watcher.close(); + this._watcher = undefined; + // Use exit(1) rather than SIGTERM so systemd Restart=on-failure engages. + process.exit(1); + }); + + await new Promise((resolve) => watcher.once("ready", resolve)); + this._watcher = watcher; + } + + close(): Promise { + if (this._watcher) { + return this._watcher.close(); + } return Promise.resolve(); } } @@ -123,11 +197,15 @@ export class CLIStorageEventManagerAdapter implements IStorageEventManagerAdapte readonly status: CLIStatusAdapter; readonly converter: CLIConverterAdapter; - constructor(basePath: string) { + constructor(basePath: string, ignoreRules?: IgnoreRules, watchEnabled: boolean = false) { this.typeGuard = new CLITypeGuardAdapter(); this.persistence = new CLIPersistenceAdapter(basePath); - this.watch = new CLIWatchAdapter(basePath); + this.watch = new CLIWatchAdapter(basePath, ignoreRules, watchEnabled); this.status = new CLIStatusAdapter(); this.converter = new CLIConverterAdapter(); } + + close(): Promise { + return this.watch.close(); + } } diff --git a/src/apps/cli/managers/CLIStorageEventManagerAdapter.unit.spec.ts b/src/apps/cli/managers/CLIStorageEventManagerAdapter.unit.spec.ts new file mode 100644 index 0000000..edfb222 --- /dev/null +++ b/src/apps/cli/managers/CLIStorageEventManagerAdapter.unit.spec.ts @@ -0,0 +1,126 @@ +import { describe, expect, it, vi, beforeEach } from "vitest"; +import type { IStorageEventWatchHandlers } from "@lib/managers/adapters"; +import type { NodeFile } from "../adapters/NodeTypes"; + +// ── chokidar mock ────────────────────────────────────────────────────────────── +// Must be hoisted before imports that pull in chokidar. + +const mockWatcher = { + on: vi.fn().mockReturnThis(), + once: vi.fn((event: string, cb: () => void) => { + if (event === "ready") cb(); + return mockWatcher; + }), + close: vi.fn(() => Promise.resolve()), +}; + +vi.mock("chokidar", () => ({ + watch: vi.fn(() => mockWatcher), +})); + +import * as chokidar from "chokidar"; +import { CLIStorageEventManagerAdapter } from "./CLIStorageEventManagerAdapter"; + +// ── helpers ─────────────────────────────────────────────────────────────────── + +function makeHandlers(): IStorageEventWatchHandlers { + return { + onCreate: vi.fn(), + onChange: vi.fn(), + onDelete: vi.fn(), + onRename: vi.fn(), + } as any; +} + +// ── tests ───────────────────────────────────────────────────────────────────── + +describe("CLIStorageEventManagerAdapter", () => { + beforeEach(() => { + vi.clearAllMocks(); + // Restore the default once() behaviour (ready fires synchronously). + mockWatcher.once.mockImplementation((event: string, cb: () => void) => { + if (event === "ready") cb(); + return mockWatcher; + }); + }); + + it("beginWatch is no-op when watchEnabled=false", async () => { + const adapter = new CLIStorageEventManagerAdapter("/base", undefined, false); + const handlers = makeHandlers(); + + await adapter.watch.beginWatch(handlers); + + expect(chokidar.watch).not.toHaveBeenCalled(); + }); + + it("beginWatch calls chokidar.watch when watchEnabled=true", async () => { + const adapter = new CLIStorageEventManagerAdapter("/base", undefined, true); + const handlers = makeHandlers(); + + await adapter.watch.beginWatch(handlers); + + expect(chokidar.watch).toHaveBeenCalledTimes(1); + expect(chokidar.watch).toHaveBeenCalledWith( + "/base", + expect.objectContaining({ ignoreInitial: true }) + ); + }); + + it("add event produces NodeFile with correct relative path via onCreate", async () => { + const basePath = "/vault/base"; + const adapter = new CLIStorageEventManagerAdapter(basePath, undefined, true); + const handlers = makeHandlers(); + + await adapter.watch.beginWatch(handlers); + + // Find the callback registered for the "add" event. + const addCall = mockWatcher.on.mock.calls.find(([event]) => event === "add"); + expect(addCall).toBeDefined(); + const addCallback = addCall![1] as (filePath: string, stats: any) => void; + + const fakeStats = { ctimeMs: 1000, mtimeMs: 2000, size: 42 }; + addCallback(`${basePath}/subdir/note.md`, fakeStats); + + expect(handlers.onCreate).toHaveBeenCalledTimes(1); + const created = (handlers.onCreate as ReturnType).mock.calls[0][0] as NodeFile; + expect(created.path).toBe("subdir/note.md"); + expect(created.stat?.size).toBe(42); + }); + + it("close() calls watcher.close()", async () => { + const adapter = new CLIStorageEventManagerAdapter("/base", undefined, true); + const handlers = makeHandlers(); + + await adapter.watch.beginWatch(handlers); + await adapter.close(); + + expect(mockWatcher.close).toHaveBeenCalledTimes(1); + }); + + it("close() is safe when no watcher was started", async () => { + const adapter = new CLIStorageEventManagerAdapter("/base", undefined, false); + + // Should not throw. + await expect(adapter.close()).resolves.toBeUndefined(); + expect(mockWatcher.close).not.toHaveBeenCalled(); + }); + + it("error event triggers process.exit(1)", async () => { + const adapter = new CLIStorageEventManagerAdapter("/base", undefined, true); + const handlers = makeHandlers(); + + await adapter.watch.beginWatch(handlers); + + const processExitSpy = vi.spyOn(process, "exit").mockImplementation((() => {}) as any); + + const errorCall = mockWatcher.on.mock.calls.find(([event]) => event === "error"); + expect(errorCall).toBeDefined(); + const errorCallback = errorCall![1] as (err: Error) => void; + + errorCallback(new Error("disk failure")); + + expect(processExitSpy).toHaveBeenCalledWith(1); + + processExitSpy.mockRestore(); + }); +}); diff --git a/src/apps/cli/managers/StorageEventManagerCLI.ts b/src/apps/cli/managers/StorageEventManagerCLI.ts index d1f2504..7838ef3 100644 --- a/src/apps/cli/managers/StorageEventManagerCLI.ts +++ b/src/apps/cli/managers/StorageEventManagerCLI.ts @@ -2,6 +2,7 @@ import { StorageEventManagerBase, type StorageEventManagerBaseDependencies } fro import { CLIStorageEventManagerAdapter } from "./CLIStorageEventManagerAdapter"; import type { IMinimumLiveSyncCommands, LiveSyncBaseCore } from "../../../LiveSyncBaseCore"; import type { ServiceContext } from "@lib/services/base/ServiceBase"; +import type { IgnoreRules } from "../serviceModules/IgnoreRules"; // import type { IMinimumLiveSyncCommands } from "@lib/services/base/IService"; export class StorageEventManagerCLI extends StorageEventManagerBase { @@ -10,9 +11,11 @@ export class StorageEventManagerCLI extends StorageEventManagerBase, - dependencies: StorageEventManagerBaseDependencies + dependencies: StorageEventManagerBaseDependencies, + ignoreRules?: IgnoreRules, + watchEnabled?: boolean ) { - const adapter = new CLIStorageEventManagerAdapter(basePath); + const adapter = new CLIStorageEventManagerAdapter(basePath, ignoreRules, watchEnabled); super(adapter, dependencies); this.core = core; } @@ -25,4 +28,11 @@ export class StorageEventManagerCLI extends StorageEventManagerBase { + return this.adapter.close(); + } } diff --git a/src/apps/cli/package.json b/src/apps/cli/package.json index 4deaade..18768a9 100644 --- a/src/apps/cli/package.json +++ b/src/apps/cli/package.json @@ -6,6 +6,7 @@ "type": "module", "scripts": { "dev": "vite", + "prebuild": "node scripts/check-submodule.mjs", "build": "vite build", "preview": "vite preview", "cli": "node dist/index.cjs", diff --git a/src/apps/cli/runtime-package.json b/src/apps/cli/runtime-package.json index 5791992..305d966 100644 --- a/src/apps/cli/runtime-package.json +++ b/src/apps/cli/runtime-package.json @@ -4,6 +4,7 @@ "version": "0.0.0", "description": "Runtime dependencies for Self-hosted LiveSync CLI Docker image", "dependencies": { + "chokidar": "^4.0.0", "commander": "^14.0.3", "werift": "^0.22.9", "pouchdb-adapter-http": "^9.0.0", diff --git a/src/apps/cli/scripts/check-submodule.mjs b/src/apps/cli/scripts/check-submodule.mjs new file mode 100644 index 0000000..6235507 --- /dev/null +++ b/src/apps/cli/scripts/check-submodule.mjs @@ -0,0 +1,36 @@ +import fs from "node:fs"; +import path from "node:path"; +import process from "node:process"; + +const cliDir = process.cwd(); +const repoRoot = path.resolve(cliDir, "../../.."); +const requiredFiles = [ + path.join(repoRoot, "src/lib/src/common/types.ts"), +]; + +const missingFiles = requiredFiles.filter((filePath) => !fs.existsSync(filePath)); + +if (missingFiles.length === 0) { + process.exit(0); +} + +console.error("[CLI Build Error] Required shared sources were not found."); +console.error("This repository uses Git submodules, and the CLI depends on src/lib."); +console.error(""); +console.error("Missing file(s):"); +for (const filePath of missingFiles) { + console.error(` - ${path.relative(repoRoot, filePath)}`); +} +console.error(""); +console.error("Initialize submodules, then retry the CLI build:"); +console.error(" git submodule update --init --recursive"); +console.error(""); +console.error("For a fresh clone, prefer:"); +console.error(" git clone --recurse-submodules "); +console.error(""); +console.error("Then run:"); +console.error(" npm install"); +console.error(" cd src/apps/cli"); +console.error(" npm run build"); + +process.exit(1); diff --git a/src/apps/cli/serviceModules/CLIServiceModules.ts b/src/apps/cli/serviceModules/CLIServiceModules.ts index 8cf0f40..6c4cce5 100644 --- a/src/apps/cli/serviceModules/CLIServiceModules.ts +++ b/src/apps/cli/serviceModules/CLIServiceModules.ts @@ -9,6 +9,7 @@ import { ServiceFileAccessCLI } from "./ServiceFileAccessImpl"; import { ServiceDatabaseFileAccessCLI } from "./DatabaseFileAccess"; import { StorageEventManagerCLI } from "../managers/StorageEventManagerCLI"; import type { ServiceModules } from "@lib/interfaces/ServiceModule"; +import type { IgnoreRules } from "./IgnoreRules"; /** * Initialize service modules for CLI version @@ -22,7 +23,9 @@ import type { ServiceModules } from "@lib/interfaces/ServiceModule"; export function initialiseServiceModulesCLI( basePath: string, core: LiveSyncBaseCore, - services: InjectableServiceHub + services: InjectableServiceHub, + ignoreRules?: IgnoreRules, + watchEnabled: boolean = false, ): ServiceModules { const storageAccessManager = new StorageAccessManager(); @@ -42,6 +45,12 @@ export function initialiseServiceModulesCLI( vaultService: services.vault, storageAccessManager: storageAccessManager, APIService: services.API, + }, ignoreRules, watchEnabled); + + // Close the file watcher during graceful shutdown so the process can exit cleanly. + services.appLifecycle.onUnload.addHandler(async () => { + await storageEventManager.close(); + return true; }); // Storage access using CLI file system adapter diff --git a/src/apps/cli/serviceModules/IgnoreRules.ts b/src/apps/cli/serviceModules/IgnoreRules.ts new file mode 100644 index 0000000..9764fd2 --- /dev/null +++ b/src/apps/cli/serviceModules/IgnoreRules.ts @@ -0,0 +1,129 @@ +import * as fs from "fs/promises"; +import * as path from "path"; + +import { minimatch } from "minimatch"; + +/** + * Loads and evaluates ignore rules from `.livesync/ignore` inside the vault. + * + * File format: + * - Lines starting with `#` are comments. + * - Blank lines are ignored. + * - `import: .gitignore` (exactly) — merges patterns from the vault's `.gitignore`. + * - All other lines are minimatch glob patterns relative to the vault root. + * + * Negation patterns (lines starting with `!`) are not supported. Loading a + * ruleset containing them throws an error — use separate include/exclude files + * instead. + * + * Missing files (`.livesync/ignore` or `.gitignore`) are silently skipped. + */ +export class IgnoreRules { + private patterns: string[] = []; + + constructor(private vaultPath: string) {} + + /** + * Reads `.livesync/ignore` (and optionally `.gitignore`) and populates the + * pattern list. Safe to call multiple times — each call replaces the + * previous state. Does not throw if files are absent. + * + * @throws if any pattern line begins with `!` (negation is unsupported). + */ + async load(): Promise { + this.patterns = []; + const ignorePath = path.join(this.vaultPath, ".livesync", "ignore"); + let rawLines: string[]; + try { + const content = await fs.readFile(ignorePath, "utf-8"); + rawLines = content.split(/\r?\n/); + } catch { + // File absent or unreadable — treat as empty ruleset. + return; + } + + for (const line of rawLines) { + const trimmed = line.trim(); + if (!trimmed || trimmed.startsWith("#")) { + continue; + } + // NOTE: Only the exact string "import: .gitignore" is recognised. + // Any future generalisation of this directive must validate that + // the resolved path stays within the vault directory. + if (trimmed === "import: .gitignore") { + await this._importGitignore(); + continue; + } + if (trimmed.startsWith("import:")) { + console.error(`[IgnoreRules] Warning: unrecognised directive '${trimmed}' — only 'import: .gitignore' is supported`); + continue; + } + this._addPattern(trimmed); + } + if (this.patterns.length > 0) { + console.error(`[IgnoreRules] Loaded ${this.patterns.length} ignore patterns`); + } + } + + // Normalises a single gitignore-style pattern: + // - Patterns ending with `/` (directory patterns like `build/`) are + // converted to `build/**` so they match all files inside that directory. + // - Patterns without a `/` are prefixed with `**/` to give them matchBase + // semantics (e.g. `*.tmp` → `**/*.tmp`), matching the basename in any + // subdirectory as gitignore does. + // - Patterns that already contain a `/` (but don't end with one) are + // path-specific and used as-is. + private _normalisePattern(pattern: string): string { + if (pattern.endsWith("/")) { + return "**/" + pattern + "**"; + } else if (!pattern.includes("/")) { + return "**/" + pattern; + } + return pattern; + } + + private async _importGitignore(): Promise { + const gitignorePath = path.join(this.vaultPath, ".gitignore"); + let content: string; + try { + content = await fs.readFile(gitignorePath, "utf-8"); + } catch { + return; + } + this._parseLines(content); + } + + private _parseLines(content: string): void { + for (const line of content.split(/\r?\n/)) { + const trimmed = line.trim(); + if (!trimmed || trimmed.startsWith("#")) continue; + this._addPattern(trimmed); + } + } + + private _addPattern(raw: string): void { + if (raw.startsWith("!")) { + throw new Error( + `[IgnoreRules] Negation pattern '${raw}' is not supported. ` + + `Remove it from .livesync/ignore or use a separate include/exclude file.` + ); + } + this.patterns.push(this._normalisePattern(raw)); + } + + /** + * Returns `true` if the given vault-relative path matches any loaded + * ignore pattern. + * + * @param relativePath - Path relative to the vault root, using forward + * slashes or the OS separator. + */ + shouldIgnore(relativePath: string): boolean { + if (this.patterns.length === 0) { + return false; + } + // Normalise to forward slashes for minimatch. + const normalised = relativePath.replace(/\\/g, "/"); + return this.patterns.some((p) => minimatch(normalised, p, { dot: true })); + } +} diff --git a/src/apps/cli/serviceModules/IgnoreRules.unit.spec.ts b/src/apps/cli/serviceModules/IgnoreRules.unit.spec.ts new file mode 100644 index 0000000..59bfb12 --- /dev/null +++ b/src/apps/cli/serviceModules/IgnoreRules.unit.spec.ts @@ -0,0 +1,172 @@ +import * as fs from "node:fs/promises"; +import * as os from "node:os"; +import * as path from "node:path"; +import { afterEach, beforeEach, describe, expect, it } from "vitest"; +import { IgnoreRules } from "./IgnoreRules"; + +describe("IgnoreRules", () => { + const tempDirs: string[] = []; + + async function createVault(): Promise { + const tempDir = await fs.mkdtemp(path.join(os.tmpdir(), "livesync-ignorerules-")); + tempDirs.push(tempDir); + return tempDir; + } + + async function writeIgnoreFile(vaultPath: string, content: string): Promise { + const ignoreDir = path.join(vaultPath, ".livesync"); + await fs.mkdir(ignoreDir, { recursive: true }); + await fs.writeFile(path.join(ignoreDir, "ignore"), content, "utf-8"); + } + + afterEach(async () => { + await Promise.all(tempDirs.splice(0).map((dir) => fs.rm(dir, { recursive: true, force: true }))); + }); + + describe("pattern normalisation", () => { + it("adds **/ prefix to basename patterns (no slash)", async () => { + const vaultPath = await createVault(); + await writeIgnoreFile(vaultPath, "*.tmp\n"); + const rules = new IgnoreRules(vaultPath); + await rules.load(); + expect(rules.shouldIgnore("notes/scratch.tmp")).toBe(true); + expect(rules.shouldIgnore("scratch.tmp")).toBe(true); + expect(rules.shouldIgnore("deep/nested/file.tmp")).toBe(true); + }); + + it("appends ** to directory patterns ending with / and prepends **/", async () => { + const vaultPath = await createVault(); + await writeIgnoreFile(vaultPath, "build/\n"); + const rules = new IgnoreRules(vaultPath); + await rules.load(); + expect(rules.shouldIgnore("build/output.js")).toBe(true); + expect(rules.shouldIgnore("build/nested/file.js")).toBe(true); + expect(rules.shouldIgnore("subproject/build/output.js")).toBe(true); + }); + + it("leaves patterns containing / as-is", async () => { + const vaultPath = await createVault(); + await writeIgnoreFile(vaultPath, "docs/private.md\n"); + const rules = new IgnoreRules(vaultPath); + await rules.load(); + expect(rules.shouldIgnore("docs/private.md")).toBe(true); + expect(rules.shouldIgnore("other/docs/private.md")).toBe(false); + }); + }); + + describe("shouldIgnore", () => { + it("matches **/*.tmp against notes/scratch.tmp", async () => { + const vaultPath = await createVault(); + await writeIgnoreFile(vaultPath, "*.tmp\n"); + const rules = new IgnoreRules(vaultPath); + await rules.load(); + expect(rules.shouldIgnore("notes/scratch.tmp")).toBe(true); + }); + + it("does not match notes/readme.md against **/*.tmp", async () => { + const vaultPath = await createVault(); + await writeIgnoreFile(vaultPath, "*.tmp\n"); + const rules = new IgnoreRules(vaultPath); + await rules.load(); + expect(rules.shouldIgnore("notes/readme.md")).toBe(false); + }); + + it("returns false when no patterns are loaded", async () => { + const vaultPath = await createVault(); + const rules = new IgnoreRules(vaultPath); + // No load() call — patterns are empty + expect(rules.shouldIgnore("anything.md")).toBe(false); + }); + }); + + describe("negation patterns", () => { + it("throws when a negation pattern is encountered", async () => { + const vaultPath = await createVault(); + await writeIgnoreFile(vaultPath, "*.tmp\n!important.tmp\n"); + const rules = new IgnoreRules(vaultPath); + await expect(rules.load()).rejects.toThrow(/Negation pattern/); + }); + + it("throws when a .gitignore imported via directive contains negation", async () => { + const vaultPath = await createVault(); + await writeIgnoreFile(vaultPath, "import: .gitignore\n"); + await fs.writeFile(path.join(vaultPath, ".gitignore"), "*.log\n!keep.log\n", "utf-8"); + const rules = new IgnoreRules(vaultPath); + await expect(rules.load()).rejects.toThrow(/Negation pattern/); + }); + }); + + describe("unrecognised import: directives", () => { + it("warns and skips unrecognised import: forms (does not add as literal pattern)", async () => { + const vaultPath = await createVault(); + // Typo: "import:.gitignore" instead of "import: .gitignore" + await writeIgnoreFile(vaultPath, "*.tmp\nimport:.gitignore\n"); + const rules = new IgnoreRules(vaultPath); + await rules.load(); + // *.tmp still loaded; import:.gitignore is skipped (not treated as a literal pattern) + expect(rules.shouldIgnore("scratch.tmp")).toBe(true); + expect(rules.shouldIgnore("import:.gitignore")).toBe(false); + }); + }); + + describe("load() with missing file", () => { + it("returns without error when .livesync/ignore is absent", async () => { + const vaultPath = await createVault(); + // No ignore file created + const rules = new IgnoreRules(vaultPath); + await expect(rules.load()).resolves.toBeUndefined(); + expect(rules.shouldIgnore("anything.md")).toBe(false); + }); + }); + + describe("load() with comments and blank lines", () => { + it("skips # comment lines and blank lines", async () => { + const vaultPath = await createVault(); + await writeIgnoreFile( + vaultPath, + "# This is a comment\n\n \n*.tmp\n# another comment\nbuild/\n" + ); + const rules = new IgnoreRules(vaultPath); + await rules.load(); + expect(rules.shouldIgnore("scratch.tmp")).toBe(true); + expect(rules.shouldIgnore("build/output.js")).toBe(true); + expect(rules.shouldIgnore("readme.md")).toBe(false); + }); + }); + + describe("import: .gitignore directive", () => { + it("reads and normalises patterns from .gitignore", async () => { + const vaultPath = await createVault(); + await writeIgnoreFile(vaultPath, "import: .gitignore\n"); + await fs.writeFile(path.join(vaultPath, ".gitignore"), "*.log\nnode_modules/\n", "utf-8"); + const rules = new IgnoreRules(vaultPath); + await rules.load(); + expect(rules.shouldIgnore("app.log")).toBe(true); + expect(rules.shouldIgnore("node_modules/package.json")).toBe(true); + expect(rules.shouldIgnore("src/node_modules/package.json")).toBe(true); + expect(rules.shouldIgnore("src/index.ts")).toBe(false); + }); + + it("merges .gitignore patterns with other patterns", async () => { + const vaultPath = await createVault(); + await writeIgnoreFile(vaultPath, "*.tmp\nimport: .gitignore\n"); + await fs.writeFile(path.join(vaultPath, ".gitignore"), "*.log\n", "utf-8"); + const rules = new IgnoreRules(vaultPath); + await rules.load(); + expect(rules.shouldIgnore("scratch.tmp")).toBe(true); + expect(rules.shouldIgnore("error.log")).toBe(true); + }); + }); + + describe("import: .gitignore with missing .gitignore", () => { + it("does not throw when .gitignore is absent", async () => { + const vaultPath = await createVault(); + await writeIgnoreFile(vaultPath, "*.tmp\nimport: .gitignore\n"); + // No .gitignore created + const rules = new IgnoreRules(vaultPath); + await expect(rules.load()).resolves.toBeUndefined(); + // The *.tmp pattern from the ignore file still works + expect(rules.shouldIgnore("scratch.tmp")).toBe(true); + }); + }); +}); diff --git a/src/apps/cli/test/test-daemon-linux.sh b/src/apps/cli/test/test-daemon-linux.sh new file mode 100755 index 0000000..96db2c7 --- /dev/null +++ b/src/apps/cli/test/test-daemon-linux.sh @@ -0,0 +1,166 @@ +#!/usr/bin/env bash +# Test: daemon-related ignore rules behaviour +# +# Tests that are runnable without a long-running daemon process are exercised +# here using the `mirror` command, which calls the same `isTargetFile` handler +# stack that the daemon uses. +# +# Covered cases: +# 1. .livesync/ignore with *.tmp pattern → ignored file is NOT synced to DB +# 2. .livesync/ignore missing → no error, normal sync continues +# 3. import: .gitignore directive → patterns from .gitignore are merged +# +set -euo pipefail + +SCRIPT_DIR="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" && pwd)" +CLI_DIR="$(cd -- "$SCRIPT_DIR/.." && pwd)" +cd "$CLI_DIR" +source "$SCRIPT_DIR/test-helpers.sh" +display_test_info + +RUN_BUILD="${RUN_BUILD:-1}" +cli_test_init_cli_cmd + +WORK_DIR="$(mktemp -d "${TMPDIR:-/tmp}/livesync-cli-daemon-test.XXXXXX")" +trap 'rm -rf "$WORK_DIR"' EXIT + +SETTINGS_FILE="$WORK_DIR/data.json" +VAULT_DIR="$WORK_DIR/vault" +mkdir -p "$VAULT_DIR/notes" + +if [[ "$RUN_BUILD" == "1" ]]; then + echo "[INFO] building CLI..." + npm run build +fi + +echo "[INFO] generating settings -> $SETTINGS_FILE" +cli_test_init_settings_file "$SETTINGS_FILE" +cli_test_mark_settings_configured "$SETTINGS_FILE" + +PASS=0 +FAIL=0 + +assert_pass() { echo "[PASS] $1"; PASS=$((PASS + 1)); } +assert_fail() { echo "[FAIL] $1" >&2; FAIL=$((FAIL + 1)); } + +# ───────────────────────────────────────────────────────────────────────────── +# Case 1: .livesync/ignore with *.tmp → matched file should NOT appear in DB +# ───────────────────────────────────────────────────────────────────────────── +echo "" +echo "=== Case 1: .livesync/ignore *.tmp → ignored file not synced to DB ===" + +mkdir -p "$VAULT_DIR/.livesync" +printf '*.tmp\n' > "$VAULT_DIR/.livesync/ignore" + +# Also write a normal file so we can confirm mirror ran at all. +printf 'normal content\n' > "$VAULT_DIR/notes/normal.md" +# Write the file that should be ignored. +printf 'tmp content\n' > "$VAULT_DIR/notes/scratch.tmp" + +run_cli "$VAULT_DIR" --settings "$SETTINGS_FILE" mirror + +# The normal file should be in the DB. +RESULT_NORMAL="$WORK_DIR/case1-normal.txt" +if run_cli "$VAULT_DIR" --settings "$SETTINGS_FILE" pull notes/normal.md "$RESULT_NORMAL" 2>/dev/null; then + if cmp -s "$VAULT_DIR/notes/normal.md" "$RESULT_NORMAL"; then + assert_pass "normal.md was synced to DB" + else + assert_fail "normal.md content mismatch after mirror" + fi +else + assert_fail "normal.md was not found in DB after mirror" +fi + +# The .tmp file should NOT be in the DB. +DB_LIST="$WORK_DIR/case1-ls.txt" +run_cli "$VAULT_DIR" --settings "$SETTINGS_FILE" ls > "$DB_LIST" +if grep -q "scratch.tmp" "$DB_LIST"; then + assert_fail "scratch.tmp (ignored) was unexpectedly synced to DB" + echo "--- DB listing ---" >&2; cat "$DB_LIST" >&2 +else + assert_pass "scratch.tmp (*.tmp pattern) was NOT synced to DB" +fi + +# ───────────────────────────────────────────────────────────────────────────── +# Case 2: .livesync/ignore absent → no error, normal sync continues +# ───────────────────────────────────────────────────────────────────────────── +echo "" +echo "=== Case 2: .livesync/ignore absent → no error, sync continues ===" + +VAULT_DIR2="$WORK_DIR/vault2" +mkdir -p "$VAULT_DIR2/notes" +SETTINGS_FILE2="$WORK_DIR/data2.json" +cli_test_init_settings_file "$SETTINGS_FILE2" +cli_test_mark_settings_configured "$SETTINGS_FILE2" + +# No .livesync directory at all. +printf 'hello\n' > "$VAULT_DIR2/notes/hello.md" + +# mirror should succeed without error. +set +e +MIRROR_OUTPUT="$WORK_DIR/case2-mirror.txt" +run_cli "$VAULT_DIR2" --settings "$SETTINGS_FILE2" mirror >"$MIRROR_OUTPUT" 2>&1 +MIRROR_EXIT=$? +set -e + +if [[ "$MIRROR_EXIT" -ne 0 ]]; then + assert_fail "mirror exited non-zero ($MIRROR_EXIT) when .livesync/ignore is absent" + cat "$MIRROR_OUTPUT" >&2 +else + assert_pass "mirror succeeded when .livesync/ignore is absent" +fi + +# The normal file should have been synced. +RESULT_HELLO="$WORK_DIR/case2-hello.txt" +if run_cli "$VAULT_DIR2" --settings "$SETTINGS_FILE2" pull notes/hello.md "$RESULT_HELLO" 2>/dev/null; then + assert_pass "file synced normally when .livesync/ignore is absent" +else + assert_fail "file was not synced when .livesync/ignore is absent" +fi + +# ───────────────────────────────────────────────────────────────────────────── +# Case 3: import: .gitignore merges patterns +# ───────────────────────────────────────────────────────────────────────────── +echo "" +echo "=== Case 3: import: .gitignore directive merges patterns ===" + +VAULT_DIR3="$WORK_DIR/vault3" +mkdir -p "$VAULT_DIR3/notes" +SETTINGS_FILE3="$WORK_DIR/data3.json" +cli_test_init_settings_file "$SETTINGS_FILE3" +cli_test_mark_settings_configured "$SETTINGS_FILE3" + +mkdir -p "$VAULT_DIR3/.livesync" +printf 'import: .gitignore\n' > "$VAULT_DIR3/.livesync/ignore" +printf '# gitignore comment\n*.log\nbuild/\n' > "$VAULT_DIR3/.gitignore" + +printf 'regular note\n' > "$VAULT_DIR3/notes/regular.md" +printf 'log content\n' > "$VAULT_DIR3/notes/debug.log" + +run_cli "$VAULT_DIR3" --settings "$SETTINGS_FILE3" mirror + +DB_LIST3="$WORK_DIR/case3-ls.txt" +run_cli "$VAULT_DIR3" --settings "$SETTINGS_FILE3" ls > "$DB_LIST3" + +if grep -q "debug.log" "$DB_LIST3"; then + assert_fail "debug.log (ignored via .gitignore import) was unexpectedly synced to DB" + echo "--- DB listing ---" >&2; cat "$DB_LIST3" >&2 +else + assert_pass "debug.log (*.log from imported .gitignore) was NOT synced to DB" +fi + +# regular.md should still be present. +if grep -q "regular.md" "$DB_LIST3"; then + assert_pass "regular.md was synced normally alongside .gitignore import rules" +else + assert_fail "regular.md was NOT synced — .gitignore import may have been too broad" +fi + +# ───────────────────────────────────────────────────────────────────────────── +# Summary +# ───────────────────────────────────────────────────────────────────────────── +echo "" +echo "Results: PASS=$PASS FAIL=$FAIL" +if [[ "$FAIL" -gt 0 ]]; then + exit 1 +fi diff --git a/src/apps/cli/testdeno/.test.env b/src/apps/cli/testdeno/.test.env new file mode 100644 index 0000000..4ce5fb8 --- /dev/null +++ b/src/apps/cli/testdeno/.test.env @@ -0,0 +1,9 @@ +hostname=http://127.0.0.1:5989/ +dbname=livesync-test-db-ci +username=admin +password=testpassword +minioEndpoint=http://127.0.0.1:9000 +accessKey=minioadmin +secretKey=minioadmin +bucketName=livesync-test-bucket-ci +LIVESYNC_TEST_TEE=1 \ No newline at end of file diff --git a/src/apps/cli/testdeno/deno.json b/src/apps/cli/testdeno/deno.json index c056f9c..7f0d035 100644 --- a/src/apps/cli/testdeno/deno.json +++ b/src/apps/cli/testdeno/deno.json @@ -1,19 +1,19 @@ { "tasks": { - "test": "deno test -A --no-check test-*.ts", - "test:local": "deno test -A --no-check test-setup-put-cat.ts test-mirror.ts", - "test:push-pull": "deno test -A --no-check test-push-pull.ts", - "test:setup-put-cat": "deno test -A --no-check test-setup-put-cat.ts", - "test:mirror": "deno test -A --no-check test-mirror.ts", - "test:sync-two-local": "deno test -A --no-check test-sync-two-local-databases.ts", - "test:sync-locked-remote": "deno test -A --no-check test-sync-locked-remote.ts", - "test:p2p-host": "deno test -A --no-check test-p2p-host.ts", - "test:p2p-peers": "deno test -A --no-check test-p2p-peers-local-relay.ts", - "test:p2p-sync": "deno test -A --no-check test-p2p-sync.ts", - "test:p2p-three-nodes": "deno test -A --no-check test-p2p-three-nodes-conflict.ts", - "test:p2p-upload-download": "deno test -A --no-check test-p2p-upload-download-repro.ts", - "test:e2e-couchdb": "deno test -A --no-check test-e2e-two-vaults-couchdb.ts", - "test:e2e-matrix": "deno test -A --no-check test-e2e-two-vaults-matrix.ts" + "test": "deno test --env-file=.test.env -A --no-check test-*.ts", + "test:local": "deno test --env-file=.test.env -A --no-check test-setup-put-cat.ts test-mirror.ts", + "test:push-pull": "deno test --env-file=.test.env -A --no-check test-push-pull.ts", + "test:setup-put-cat": "deno test --env-file=.test.env -A --no-check test-setup-put-cat.ts", + "test:mirror": "deno test --env-file=.test.env -A --no-check test-mirror.ts", + "test:sync-two-local": "deno test --env-file=.test.env -A --no-check test-sync-two-local-databases.ts", + "test:sync-locked-remote": "deno test --env-file=.test.env -A --no-check test-sync-locked-remote.ts", + "test:p2p-host": "deno test --env-file=.test.env -A --no-check test-p2p-host.ts", + "test:p2p-peers": "deno test --env-file=.test.env -A --no-check test-p2p-peers-local-relay.ts", + "test:p2p-sync": "deno test --env-file=.test.env -A --no-check test-p2p-sync.ts", + "test:p2p-three-nodes": "deno test --env-file=.test.env -A --no-check test-p2p-three-nodes-conflict.ts", + "test:p2p-upload-download": "deno test --env-file=.test.env -A --no-check test-p2p-upload-download-repro.ts", + "test:e2e-couchdb": "deno test --env-file=.test.env -A --no-check test-e2e-two-vaults-couchdb.ts", + "test:e2e-matrix": "deno test --env-file=.test.env -A --no-check test-e2e-two-vaults-matrix.ts" }, "imports": { "@std/assert": "jsr:@std/assert@^1.0.13", diff --git a/src/apps/cli/testdeno/test-e2e-two-vaults-couchdb.ts b/src/apps/cli/testdeno/test-e2e-two-vaults-couchdb.ts index f1b60f1..6f5244b 100644 --- a/src/apps/cli/testdeno/test-e2e-two-vaults-couchdb.ts +++ b/src/apps/cli/testdeno/test-e2e-two-vaults-couchdb.ts @@ -1,6 +1,5 @@ import { assert } from "@std/assert"; import { TempDir } from "./helpers/temp.ts"; -import { loadEnvFile } from "./helpers/env.ts"; import { runCli, runCliOrFail, @@ -11,31 +10,29 @@ import { } from "./helpers/cli.ts"; import { applyRemoteSyncSettings, initSettingsFile } from "./helpers/settings.ts"; import { startCouchdb, startMinio, stopCouchdb, stopMinio } from "./helpers/docker.ts"; -import { join } from "@std/path"; - -const TEST_ENV = join(import.meta.dirname!, "..", ".test.env"); type RemoteType = "COUCHDB" | "MINIO"; -function requireEnv(env: Record, key: string): string { - const value = env[key]?.trim(); - if (!value) throw new Error(`Required env var is missing: ${key}`); - return value; +function requireEnv(...keys: string[]): string { + for (const key of keys) { + const value = Deno.env.get(key)?.trim(); + if (value) return value; + } + throw new Error(`Required env var is missing: ${keys.join(" or ")}`); } export async function runScenario(remoteType: RemoteType, encrypt: boolean): Promise { - const env = await loadEnvFile(TEST_ENV); const dbSuffix = `${Date.now()}-${Math.floor(Math.random() * 100000)}`; - const couchdbUri = remoteType === "COUCHDB" ? requireEnv(env, "hostname").replace(/\/$/, "") : ""; - const couchdbUser = remoteType === "COUCHDB" ? requireEnv(env, "username") : ""; - const couchdbPassword = remoteType === "COUCHDB" ? requireEnv(env, "password") : ""; - const dbPrefix = remoteType === "COUCHDB" ? requireEnv(env, "dbname") : ""; + const couchdbUri = remoteType === "COUCHDB" ? requireEnv("COUCHDB_URI", "hostname").replace(/\/$/, "") : ""; + const couchdbUser = remoteType === "COUCHDB" ? requireEnv("COUCHDB_USER", "username") : ""; + const couchdbPassword = remoteType === "COUCHDB" ? requireEnv("COUCHDB_PASSWORD", "password") : ""; + const dbPrefix = remoteType === "COUCHDB" ? requireEnv("COUCHDB_DBNAME", "dbname") : ""; const dbname = remoteType === "COUCHDB" ? `${dbPrefix}-${dbSuffix}` : ""; - const minioEndpoint = remoteType === "MINIO" ? requireEnv(env, "minioEndpoint").replace(/\/$/, "") : ""; - const minioAccessKey = remoteType === "MINIO" ? requireEnv(env, "accessKey") : ""; - const minioSecretKey = remoteType === "MINIO" ? requireEnv(env, "secretKey") : ""; - const minioBucketBase = remoteType === "MINIO" ? requireEnv(env, "bucketName") : ""; + const minioEndpoint = remoteType === "MINIO" ? requireEnv("MINIO_ENDPOINT", "minioEndpoint").replace(/\/$/, "") : ""; + const minioAccessKey = remoteType === "MINIO" ? requireEnv("MINIO_ACCESS_KEY", "accessKey") : ""; + const minioSecretKey = remoteType === "MINIO" ? requireEnv("MINIO_SECRET_KEY", "secretKey") : ""; + const minioBucketBase = remoteType === "MINIO" ? requireEnv("MINIO_BUCKET_NAME", "bucketName") : ""; const minioBucket = remoteType === "MINIO" ? `${minioBucketBase}-${dbSuffix}` : ""; const passphrase = "e2e-passphrase"; diff --git a/src/apps/cli/testdeno/test-sync-locked-remote.ts b/src/apps/cli/testdeno/test-sync-locked-remote.ts index 1dfc568..d8b2e3d 100644 --- a/src/apps/cli/testdeno/test-sync-locked-remote.ts +++ b/src/apps/cli/testdeno/test-sync-locked-remote.ts @@ -6,30 +6,26 @@ */ import { assert, assertStringIncludes } from "@std/assert"; -import { join } from "@std/path"; -import { loadEnvFile } from "./helpers/env.ts"; import { TempDir } from "./helpers/temp.ts"; import { runCli } from "./helpers/cli.ts"; import { applyCouchdbSettings, initSettingsFile } from "./helpers/settings.ts"; import { createCouchdbDatabase, startCouchdb, stopCouchdb, updateCouchdbDoc } from "./helpers/docker.ts"; -const TEST_ENV = join(import.meta.dirname!, "..", ".test.env"); const MILESTONE_DOC = "_local/obsydian_livesync_milestone"; -function requireEnv(env: Record, key: string): string { - const value = env[key]?.trim(); - if (!value) { - throw new Error(`Required env var is missing: ${key}`); +function requireEnv(...keys: string[]): string { + for (const key of keys) { + const value = Deno.env.get(key)?.trim(); + if (value) return value; } - return value; + throw new Error(`Required env var is missing: ${keys.join(" or ")}`); } Deno.test("sync: actionable error against locked remote DB", async () => { - const env = await loadEnvFile(TEST_ENV); - const couchdbUri = requireEnv(env, "hostname").replace(/\/$/, ""); - const couchdbUser = requireEnv(env, "username"); - const couchdbPassword = requireEnv(env, "password"); - const dbPrefix = requireEnv(env, "dbname"); + const couchdbUri = requireEnv("COUCHDB_URI", "hostname").replace(/\/$/, ""); + const couchdbUser = requireEnv("COUCHDB_USER", "username"); + const couchdbPassword = requireEnv("COUCHDB_PASSWORD", "password"); + const dbPrefix = requireEnv("COUCHDB_DBNAME", "dbname"); const dbname = `${dbPrefix}-locked-${Date.now()}-${Math.floor(Math.random() * 100000)}`; await using workDir = await TempDir.create("livesync-cli-locked-test"); diff --git a/src/apps/cli/testdeno/test-sync-two-local-databases.ts b/src/apps/cli/testdeno/test-sync-two-local-databases.ts index c14ee08..5717d40 100644 --- a/src/apps/cli/testdeno/test-sync-two-local-databases.ts +++ b/src/apps/cli/testdeno/test-sync-two-local-databases.ts @@ -23,13 +23,11 @@ * deno test -A test-sync-two-local-databases.ts */ -import { join } from "@std/path"; import { assertEquals, assert } from "@std/assert"; import { TempDir } from "./helpers/temp.ts"; -import { CLI_DIR, runCliOrFail, jsonFieldIsNa } from "./helpers/cli.ts"; +import { runCliOrFail, jsonFieldIsNa } from "./helpers/cli.ts"; import { applyCouchdbSettings, initSettingsFile } from "./helpers/settings.ts"; import { startCouchdb, stopCouchdb } from "./helpers/docker.ts"; -import { loadEnvFile } from "./helpers/env.ts"; // --------------------------------------------------------------------------- // Load configuration @@ -41,20 +39,7 @@ async function resolveConfig(): Promise<{ password: string; baseDbname: string; } | null> { - let env: Record = {}; - - // 1. Explicit environment variables take priority - if (Deno.env.get("COUCHDB_URI")) { - env = Object.fromEntries(Deno.env.toObject()); - } else { - // 2. TEST_ENV_FILE env var - const envFile = Deno.env.get("TEST_ENV_FILE") ?? join(CLI_DIR, ".test.env"); - try { - env = await loadEnvFile(envFile); - } catch { - return null; // no config available — skip - } - } + const env = Deno.env.toObject(); const uri = (env["COUCHDB_URI"] ?? env["hostname"] ?? "").replace(/\/$/, ""); const user = env["COUCHDB_USER"] ?? env["username"] ?? ""; diff --git a/src/apps/cli/vite.config.ts b/src/apps/cli/vite.config.ts index e78642c..74c4ba6 100644 --- a/src/apps/cli/vite.config.ts +++ b/src/apps/cli/vite.config.ts @@ -11,11 +11,54 @@ const defaultExternal = [ "crypto", "pouchdb-adapter-leveldb", "commander", + "chokidar", "punycode", "werift", ]; +// Polyfill FileReader at the very top of the CJS bundle. octagonal-wheels uses +// FileReader for base64 conversion when Uint8Array.toBase64 (TC39 proposal) is +// unavailable. Node.js has neither, so we inject a minimal FileReader shim before +// any module-scope code evaluates. +const fileReaderPolyfillBanner = ` +if (typeof globalThis.FileReader === "undefined") { + globalThis.FileReader = class FileReader { + constructor() { this.result = null; this.onload = null; this.onerror = null; } + readAsDataURL(blob) { + blob.arrayBuffer().then((buf) => { + var b64 = require("buffer").Buffer.from(buf).toString("base64"); + this.result = "data:" + (blob.type || "application/octet-stream") + ";base64," + b64; + if (this.onload) this.onload({ target: this }); + }).catch((err) => { if (this.onerror) this.onerror({ target: this, error: err }); }); + } + readAsArrayBuffer() { throw new Error("FileReader.readAsArrayBuffer is not implemented in this polyfill"); } + readAsBinaryString() { throw new Error("FileReader.readAsBinaryString is not implemented in this polyfill"); } + readAsText() { throw new Error("FileReader.readAsText is not implemented in this polyfill"); } + abort() { throw new Error("FileReader.abort is not implemented in this polyfill"); } + }; +} +`; + +function injectBanner(): import("vite").Plugin { + return { + name: "inject-banner", + generateBundle(_options, bundle) { + for (const chunk of Object.values(bundle)) { + if (chunk.type === "chunk" && chunk.fileName.startsWith("entrypoint")) { + // Insert after the shebang line if present, otherwise at the top. + if (chunk.code.startsWith("#!")) { + const newline = chunk.code.indexOf("\n"); + chunk.code = chunk.code.slice(0, newline + 1) + fileReaderPolyfillBanner + chunk.code.slice(newline + 1); + } else { + chunk.code = fileReaderPolyfillBanner + chunk.code; + } + } + } + }, + }; +} + export default defineConfig({ - plugins: [svelte()], + plugins: [svelte(), injectBanner()], resolve: { alias: { "@lib/worker/bgWorker.ts": "../../lib/src/worker/bgWorker.mock.ts", diff --git a/src/modules/features/DocumentHistory/DocumentHistoryModal.ts b/src/modules/features/DocumentHistory/DocumentHistoryModal.ts index f28f263..7e7560a 100644 --- a/src/modules/features/DocumentHistory/DocumentHistoryModal.ts +++ b/src/modules/features/DocumentHistory/DocumentHistoryModal.ts @@ -66,6 +66,11 @@ export class DocumentHistoryModal extends Modal { currentDeleted = false; initialRev?: string; + // Diff navigation state + currentDiffIndex = -1; + diffNavContainer!: HTMLDivElement; + diffNavIndicator!: HTMLSpanElement; + constructor( app: App, core: LiveSyncBaseCore, @@ -216,6 +221,64 @@ export class DocumentHistoryModal extends Modal { this.contentView.innerHTML = (this.currentDeleted ? "(At this revision, the file has been deleted)\n" : "") + result; } + // Reset diff navigation after content changes + this.resetDiffNavigation(); + if (this.showDiff) { + this.navigateDiff("next"); + } + } + + /** + * Navigate to the previous or next diff block in the content view. + * Only effective when diff highlighting is enabled. + */ + navigateDiff(direction: "prev" | "next") { + const diffElements = this.contentView.querySelectorAll(".history-added, .history-deleted"); + if (diffElements.length === 0) return; + + // Remove previous focus highlight + const prevFocused = this.contentView.querySelector(".diff-focused"); + if (prevFocused) { + prevFocused.classList.remove("diff-focused"); + } + + if (direction === "next") { + this.currentDiffIndex = (this.currentDiffIndex + 1) % diffElements.length; + } else { + this.currentDiffIndex = + this.currentDiffIndex <= 0 ? diffElements.length - 1 : this.currentDiffIndex - 1; + } + + const target = diffElements[this.currentDiffIndex]; + target.classList.add("diff-focused"); + target.scrollIntoView({ behavior: "smooth", block: "center" }); + + this.diffNavIndicator.textContent = `${this.currentDiffIndex + 1}/${diffElements.length}`; + } + + /** + * Reset the diff navigation index and update the indicator. + */ + resetDiffNavigation() { + this.currentDiffIndex = -1; + if (this.diffNavIndicator) { + if (this.showDiff) { + const diffElements = this.contentView.querySelectorAll(".history-added, .history-deleted"); + this.diffNavIndicator.textContent = diffElements.length > 0 ? `0/${diffElements.length}` : "\u2014"; + } else { + this.diffNavIndicator.textContent = "\u2014"; + } + } + this.updateDiffNavVisibility(); + } + + /** + * Show or hide the diff navigation buttons based on the showDiff state. + */ + updateDiffNavVisibility() { + if (this.diffNavContainer) { + this.diffNavContainer.style.display = this.showDiff ? "flex" : "none"; + } } override onOpen() { @@ -236,25 +299,47 @@ export class DocumentHistoryModal extends Modal { void scheduleOnceIfDuplicated("loadRevs", () => this.loadRevs()); }); }); - contentEl - .createDiv("", (e) => { - e.createEl("label", {}, (label) => { - label.appendChild( - createEl("input", { type: "checkbox" }, (checkbox) => { - if (this.showDiff) { - checkbox.checked = true; - } - checkbox.addEventListener("input", (evt: any) => { - this.showDiff = checkbox.checked; - localStorage.setItem("ols-history-highlightdiff", this.showDiff == true ? "1" : ""); - void scheduleOnceIfDuplicated("loadRevs", () => this.loadRevs()); - }); - }) - ); - label.appendText("Highlight diff"); - }); - }) - .addClass("op-info"); + const diffOptionsRow = contentEl.createDiv(""); + diffOptionsRow.addClass("op-info"); + diffOptionsRow.addClass("diff-options-row"); + + diffOptionsRow.createEl("label", {}, (label) => { + label.appendChild( + createEl("input", { type: "checkbox" }, (checkbox) => { + if (this.showDiff) { + checkbox.checked = true; + } + checkbox.addEventListener("input", (evt: any) => { + this.showDiff = checkbox.checked; + localStorage.setItem("ols-history-highlightdiff", this.showDiff == true ? "1" : ""); + this.updateDiffNavVisibility(); + void scheduleOnceIfDuplicated("loadRevs", () => this.loadRevs()); + }); + }) + ); + label.appendText("Highlight diff"); + }); + + // Diff navigation buttons + this.diffNavContainer = diffOptionsRow.createDiv(""); + this.diffNavContainer.addClass("diff-nav"); + this.diffNavContainer.style.display = this.showDiff ? "flex" : "none"; + + this.diffNavContainer.createEl("button", { text: "\u25B2 Prev" }, (e) => { + e.addClass("diff-nav-btn"); + e.addEventListener("click", () => { + this.navigateDiff("prev"); + }); + }); + this.diffNavContainer.createEl("button", { text: "\u25BC Next" }, (e) => { + e.addClass("diff-nav-btn"); + e.addEventListener("click", () => { + this.navigateDiff("next"); + }); + }); + this.diffNavIndicator = this.diffNavContainer.createEl("span", { text: "\u2014" }); + this.diffNavIndicator.addClass("diff-nav-indicator"); + this.info = contentEl.createDiv(""); this.info.addClass("op-info"); fireAndForget(async () => await this.loadFile(this.initialRev)); diff --git a/src/modules/features/ModuleSetupObsidian.ts b/src/modules/features/ModuleSetupObsidian.ts deleted file mode 100644 index 2c715d5..0000000 --- a/src/modules/features/ModuleSetupObsidian.ts +++ /dev/null @@ -1,208 +0,0 @@ -import { type ObsidianLiveSyncSettings, LOG_LEVEL_NOTICE, LOG_LEVEL_VERBOSE } from "../../lib/src/common/types.ts"; -import { configURIBase } from "../../common/types.ts"; -// import { PouchDB } from "../../lib/src/pouchdb/pouchdb-browser.js"; -import { fireAndForget } from "../../lib/src/common/utils.ts"; -import { - EVENT_REQUEST_COPY_SETUP_URI, - EVENT_REQUEST_OPEN_P2P_SETTINGS, - EVENT_REQUEST_OPEN_SETUP_URI, - EVENT_REQUEST_SHOW_SETUP_QR, - eventHub, -} from "../../common/events.ts"; -import { $msg } from "../../lib/src/common/i18n.ts"; -// import { performDoctorConsultation, RebuildOptions } from "@/lib/src/common/configForDoc.ts"; -import type { LiveSyncCore } from "../../main.ts"; -import { - encodeQR, - encodeSettingsToQRCodeData, - encodeSettingsToSetupURI, - OutputFormat, -} from "../../lib/src/API/processSetting.ts"; -import { SetupManager, UserMode } from "./SetupManager.ts"; -import { AbstractModule } from "../AbstractModule.ts"; - -export class ModuleSetupObsidian extends AbstractModule { - private _setupManager!: SetupManager; - private _everyOnload(): Promise { - this._setupManager = this.core.getModule(SetupManager); - try { - this.registerObsidianProtocolHandler("setuplivesync", async (conf: any) => { - if (conf.settings) { - await this._setupManager.onUseSetupURI( - UserMode.Unknown, - `${configURIBase}${encodeURIComponent(conf.settings)}` - ); - } else if (conf.settingsQR) { - await this._setupManager.decodeQR(conf.settingsQR); - } - }); - } catch (e) { - this._log( - "Failed to register protocol handler. This feature may not work in some environments.", - LOG_LEVEL_NOTICE - ); - this._log(e, LOG_LEVEL_VERBOSE); - } - this.addCommand({ - id: "livesync-setting-qr", - name: "Show settings as a QR code", - callback: () => fireAndForget(this.encodeQR()), - }); - - this.addCommand({ - id: "livesync-copysetupuri", - name: "Copy settings as a new setup URI", - callback: () => fireAndForget(this.command_copySetupURI()), - }); - this.addCommand({ - id: "livesync-copysetupuri-short", - name: "Copy settings as a new setup URI (With customization sync)", - callback: () => fireAndForget(this.command_copySetupURIWithSync()), - }); - - this.addCommand({ - id: "livesync-copysetupurifull", - name: "Copy settings as a new setup URI (Full)", - callback: () => fireAndForget(this.command_copySetupURIFull()), - }); - - this.addCommand({ - id: "livesync-opensetupuri", - name: "Use the copied setup URI (Formerly Open setup URI)", - callback: () => fireAndForget(this.command_openSetupURI()), - }); - - eventHub.onEvent(EVENT_REQUEST_OPEN_SETUP_URI, () => fireAndForget(() => this.command_openSetupURI())); - eventHub.onEvent(EVENT_REQUEST_COPY_SETUP_URI, () => fireAndForget(() => this.command_copySetupURI())); - eventHub.onEvent(EVENT_REQUEST_SHOW_SETUP_QR, () => fireAndForget(() => this.encodeQR())); - eventHub.onEvent(EVENT_REQUEST_OPEN_P2P_SETTINGS, () => - fireAndForget(() => { - return this._setupManager.onP2PManualSetup(UserMode.Update, this.settings, false); - }) - ); - return Promise.resolve(true); - } - async encodeQR() { - const settingString = encodeSettingsToQRCodeData(this.settings); - const codeSVG = encodeQR(settingString, OutputFormat.SVG); - if (codeSVG == "") { - return ""; - } - const msg = $msg("Setup.QRCode", { qr_image: codeSVG }); - await this.core.confirm.confirmWithMessage("Settings QR Code", msg, ["OK"], "OK"); - return await Promise.resolve(codeSVG); - } - - async askEncryptingPassphrase(): Promise { - const encryptingPassphrase = await this.core.confirm.askString( - "Encrypt your settings", - "The passphrase to encrypt the setup URI", - "", - true - ); - return encryptingPassphrase; - } - - async command_copySetupURI(stripExtra = true) { - const encryptingPassphrase = await this.askEncryptingPassphrase(); - if (encryptingPassphrase === false) return; - const encryptedURI = await encodeSettingsToSetupURI( - this.settings, - encryptingPassphrase, - [...((stripExtra ? ["pluginSyncExtendedSetting"] : []) as (keyof ObsidianLiveSyncSettings)[])], - true - ); - if (await this.services.UI.promptCopyToClipboard("Setup URI", encryptedURI)) { - this._log("Setup URI copied to clipboard", LOG_LEVEL_NOTICE); - } - // await navigator.clipboard.writeText(encryptedURI); - } - - async command_copySetupURIFull() { - const encryptingPassphrase = await this.askEncryptingPassphrase(); - if (encryptingPassphrase === false) return; - const encryptedURI = await encodeSettingsToSetupURI(this.settings, encryptingPassphrase, [], false); - await navigator.clipboard.writeText(encryptedURI); - this._log("Setup URI copied to clipboard", LOG_LEVEL_NOTICE); - } - - async command_copySetupURIWithSync() { - await this.command_copySetupURI(false); - } - async command_openSetupURI() { - await this._setupManager.onUseSetupURI(UserMode.Unknown); - } - - // TODO: Where to implement these? - - // async askSyncWithRemoteConfig(tryingSettings: ObsidianLiveSyncSettings): Promise { - // const buttons = { - // fetch: $msg("Setup.FetchRemoteConf.Buttons.Fetch"), - // no: $msg("Setup.FetchRemoteConf.Buttons.Skip"), - // } as const; - // const fetchRemoteConf = await this.core.confirm.askSelectStringDialogue( - // $msg("Setup.FetchRemoteConf.Message"), - // Object.values(buttons), - // { defaultAction: buttons.fetch, timeout: 0, title: $msg("Setup.FetchRemoteConf.Title") } - // ); - // if (fetchRemoteConf == buttons.no) { - // return tryingSettings; - // } - - // const newSettings = JSON.parse(JSON.stringify(tryingSettings)) as ObsidianLiveSyncSettings; - // const remoteConfig = await this.services.tweakValue.fetchRemotePreferred(newSettings); - // if (remoteConfig) { - // this._log("Remote configuration found.", LOG_LEVEL_NOTICE); - // const resultSettings = { - // ...DEFAULT_SETTINGS, - // ...tryingSettings, - // ...remoteConfig, - // } satisfies ObsidianLiveSyncSettings; - // return resultSettings; - // } else { - // this._log("Remote configuration not applied.", LOG_LEVEL_NOTICE); - // return { - // ...DEFAULT_SETTINGS, - // ...tryingSettings, - // } satisfies ObsidianLiveSyncSettings; - // } - // } - // async askPerformDoctor( - // tryingSettings: ObsidianLiveSyncSettings - // ): Promise<{ settings: ObsidianLiveSyncSettings; shouldRebuild: boolean; isModified: boolean }> { - // const buttons = { - // yes: $msg("Setup.Doctor.Buttons.Yes"), - // no: $msg("Setup.Doctor.Buttons.No"), - // } as const; - // const performDoctor = await this.core.confirm.askSelectStringDialogue( - // $msg("Setup.Doctor.Message"), - // Object.values(buttons), - // { defaultAction: buttons.yes, timeout: 0, title: $msg("Setup.Doctor.Title") } - // ); - // if (performDoctor == buttons.no) { - // return { settings: tryingSettings, shouldRebuild: false, isModified: false }; - // } - - // const newSettings = JSON.parse(JSON.stringify(tryingSettings)) as ObsidianLiveSyncSettings; - // const { settings, shouldRebuild, isModified } = await performDoctorConsultation(this.core, newSettings, { - // localRebuild: RebuildOptions.AutomaticAcceptable, // Because we are in the setup wizard, we can skip the confirmation. - // remoteRebuild: RebuildOptions.SkipEvenIfRequired, - // activateReason: "New settings from URI", - // }); - // if (isModified) { - // this._log("Doctor has fixed some issues!", LOG_LEVEL_NOTICE); - // return { - // settings, - // shouldRebuild, - // isModified, - // }; - // } else { - // this._log("Doctor detected no issues!", LOG_LEVEL_NOTICE); - // return { settings: tryingSettings, shouldRebuild: false, isModified: false }; - // } - // } - - override onBindFunction(core: LiveSyncCore, services: typeof core.services): void { - services.appLifecycle.onLoaded.addHandler(this._everyOnload.bind(this)); - } -} diff --git a/src/modules/main/ModuleLiveSyncMain.ts b/src/modules/main/ModuleLiveSyncMain.ts index d392c3e..b1765b2 100644 --- a/src/modules/main/ModuleLiveSyncMain.ts +++ b/src/modules/main/ModuleLiveSyncMain.ts @@ -61,10 +61,12 @@ export class ModuleLiveSyncMain extends AbstractModule { eventHub.onEvent(EVENT_SETTING_SAVED, (settings: ObsidianLiveSyncSettings) => { fireAndForget(async () => { try { - await this.core.services.control.applySettings(); - const lang = this.core.services.setting.currentSettings()?.displayLanguage ?? undefined; + const lang = this.core.services.setting.currentSettings()?.displayLanguage; if (lang !== undefined) { - setLang(this.core.services.setting.currentSettings()?.displayLanguage); + setLang(lang); + } + if (this.core.services.database.isDatabaseReady()) { + await this.core.services.control.applySettings(); } eventHub.emitEvent(EVENT_REQUEST_RELOAD_SETTING_TAB); } catch (e) { diff --git a/styles.css b/styles.css index a3b1792..59ad096 100644 --- a/styles.css +++ b/styles.css @@ -484,4 +484,45 @@ div.workspace-leaf-content[data-type=bases] .livesync-status { white-space: pre-wrap; word-break: break-all; +} + +/* Diff navigation */ +.diff-options-row { + display: flex; + align-items: center; + gap: 8px; +} + +.diff-nav { + display: flex; + align-items: center; + gap: 4px; + margin-left: auto; +} + +.diff-nav-btn { + padding: 2px 8px; + font-size: 0.85em; + cursor: pointer; + border: 1px solid var(--background-modifier-border); + border-radius: 4px; + background-color: var(--background-secondary); + color: var(--text-normal); +} + +.diff-nav-btn:hover { + background-color: var(--background-modifier-hover); +} + +.diff-nav-indicator { + font-size: 0.85em; + color: var(--text-muted); + min-width: 3em; + text-align: center; +} + +.diff-focused { + outline: 2px solid var(--interactive-accent); + outline-offset: 1px; + border-radius: 2px; } \ No newline at end of file diff --git a/updates.md b/updates.md index cd1dac7..53a435b 100644 --- a/updates.md +++ b/updates.md @@ -5,11 +5,16 @@ The head note of 0.25 is now in [updates_old.md](https://github.com/vrtmrz/obsid ## Unreleased -### P2P Synchronisation +### Improved -Now the foundation for P2P synchronisation has been rewritten, and the unit tests have been added. The foundation has been separated into the transport layer, signalling-and-connection layer, and, an RPC layers. And each layer has been unit-tested. As the result, the P2P synchronisation now uses the robust shim that uses RPC-ed PouchDB synchronisation in contrast to previous implementation. +- P2P synchronisation has been made more robust + Now the foundation for P2P synchronisation has been rewritten, and the unit tests have been added. The foundation has been separated into the transport layer, signalling-and-connection layer, and, an RPC layers. And each layer has been unit-tested. As the result, the P2P synchronisation now uses the robust shim that uses RPC-ed PouchDB synchronisation in contrast to previous implementation. This P2P synchronisation is not compatible with previous versions in terms of connectivity. All devices must be updated. +### Fixed + +- No longer baffling errors occur when setting-update is triggered during the early stage of initialisation. + ## 0.25.60 29th April, 2026