mirror of
https://github.com/vrtmrz/obsidian-livesync.git
synced 2026-02-22 20:18:48 +00:00
Compare commits
49 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3cd9b9e06d | ||
|
|
7c43c61b85 | ||
|
|
465af4f3aa | ||
|
|
0a1e3dcd51 | ||
|
|
b97756d0cf | ||
|
|
acf4bc3737 | ||
|
|
88838872e7 | ||
|
|
7d3827d335 | ||
|
|
92d3a0cfa2 | ||
|
|
bba26624ad | ||
|
|
b82f497cab | ||
|
|
37f4d13e75 | ||
|
|
7965f5342c | ||
|
|
9cdc14dda8 | ||
|
|
4f46276ebf | ||
|
|
931d360fb1 | ||
|
|
f68c1855da | ||
|
|
dff654b6e5 | ||
|
|
7e85bcbf08 | ||
|
|
38a695ea12 | ||
|
|
a502b0cd0c | ||
|
|
934f708753 | ||
|
|
0e574c6cb1 | ||
|
|
7375a85b07 | ||
|
|
4c3393d8b2 | ||
|
|
02aa9319c3 | ||
|
|
1a72e46d53 | ||
|
|
d755579968 | ||
|
|
b74ee9df77 | ||
|
|
daa04bcea8 | ||
|
|
b96b2f24a6 | ||
|
|
5569ab62df | ||
|
|
d84b6c4f15 | ||
|
|
336f2c8a4d | ||
|
|
b52ceec36a | ||
|
|
1e6400cf79 | ||
|
|
1ff1ac951b | ||
|
|
aa6d771d17 | ||
|
|
512c238415 | ||
|
|
55ffeeda10 | ||
|
|
65f18b4160 | ||
|
|
b0c1d6a1bf | ||
|
|
ca19f2f2ed | ||
|
|
ac0378ca4b | ||
|
|
f06f8d1eb6 | ||
|
|
77074cb92f | ||
|
|
1274b6f683 | ||
|
|
c54ae58c0f | ||
|
|
3f54921e90 |
56
.github/workflows/harness-ci.yml
vendored
Normal file
56
.github/workflows/harness-ci.yml
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
# Run tests by Harnessed CI
|
||||
name: harness-ci
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
testsuite:
|
||||
description: 'Run specific test suite (leave empty to run all)'
|
||||
type: choice
|
||||
options:
|
||||
- ''
|
||||
- 'suite/'
|
||||
- 'suitep2p/'
|
||||
default: ''
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '24.x'
|
||||
cache: 'npm'
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Install test dependencies (Playwright Chromium)
|
||||
run: npm run test:install-dependencies
|
||||
|
||||
- name: Start test services (CouchDB + MinIO + Nostr Relay + WebPeer)
|
||||
run: npm run test:docker-all:start
|
||||
|
||||
- name: Run tests suite
|
||||
if: ${{ inputs.testsuite == '' || inputs.testsuite == 'suite/' }}
|
||||
env:
|
||||
CI: true
|
||||
run: npm run test suite/
|
||||
- name: Run P2P tests suite
|
||||
if: ${{ inputs.testsuite == '' || inputs.testsuite == 'suitep2p/' }}
|
||||
env:
|
||||
CI: true
|
||||
run: npm run test suitep2p/
|
||||
- name: Stop test services
|
||||
if: always()
|
||||
run: npm run test:docker-all:stop
|
||||
33
.github/workflows/unit-ci.yml
vendored
Normal file
33
.github/workflows/unit-ci.yml
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
# Run Unit test without Harnesses
|
||||
name: unit-ci
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '24.x'
|
||||
cache: 'npm'
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Install test dependencies (Playwright Chromium)
|
||||
run: npm run test:install-dependencies
|
||||
|
||||
- name: Run unit tests suite
|
||||
run: npm run test:unit
|
||||
7
.gitignore
vendored
7
.gitignore
vendored
@@ -21,3 +21,10 @@ data.json
|
||||
|
||||
# environment variables
|
||||
.env
|
||||
|
||||
# local config files
|
||||
*.local
|
||||
|
||||
cov_profile/**
|
||||
|
||||
coverage
|
||||
@@ -1,7 +0,0 @@
|
||||
{
|
||||
"trailingComma": "es5",
|
||||
"tabWidth": 4,
|
||||
"printWidth": 120,
|
||||
"semi": true,
|
||||
"endOfLine": "lf"
|
||||
}
|
||||
20
.prettierrc.mjs
Normal file
20
.prettierrc.mjs
Normal file
@@ -0,0 +1,20 @@
|
||||
import { readFileSync } from "fs";
|
||||
let localPrettierConfig = {};
|
||||
|
||||
try {
|
||||
const localConfig = readFileSync(".prettierrc.local", "utf-8");
|
||||
localPrettierConfig = JSON.parse(localConfig);
|
||||
console.log("Using local Prettier config from .prettierrc.local");
|
||||
} catch (e) {
|
||||
// no local config
|
||||
}
|
||||
const prettierConfig = {
|
||||
trailingComma: "es5",
|
||||
tabWidth: 4,
|
||||
printWidth: 120,
|
||||
semi: true,
|
||||
endOfLine: "cr",
|
||||
...localPrettierConfig,
|
||||
};
|
||||
|
||||
export default prettierConfig;
|
||||
11
.test.env
Normal file
11
.test.env
Normal file
@@ -0,0 +1,11 @@
|
||||
hostname=http://localhost:5989/
|
||||
dbname=livesync-test-db2
|
||||
minioEndpoint=http://127.0.0.1:9000
|
||||
username=admin
|
||||
password=testpassword
|
||||
accessKey=minioadmin
|
||||
secretKey=minioadmin
|
||||
bucketName=livesync-test-bucket
|
||||
# ENABLE_DEBUGGER=true
|
||||
# PRINT_LIVESYNC_LOGS=true
|
||||
# ENABLE_UI=true
|
||||
@@ -97,6 +97,9 @@ The project has been in continual progress and harmony thanks to:
|
||||
|
||||
May those who have contributed be honoured and remembered for their kindness and generosity.
|
||||
|
||||
## Development Guide
|
||||
Please refer to [Development Guide](devs.md) for development setup, testing infrastructure, code conventions, and more.
|
||||
|
||||
## License
|
||||
|
||||
Licensed under the MIT License.
|
||||
|
||||
140
devs.md
Normal file
140
devs.md
Normal file
@@ -0,0 +1,140 @@
|
||||
# Self-hosted LiveSync Development Guide
|
||||
## Project Overview
|
||||
|
||||
Self-hosted LiveSync is an Obsidian plugin for synchronising vaults across devices using CouchDB, MinIO/S3, or peer-to-peer WebRTC. The codebase uses a modular architecture with TypeScript, Svelte, and PouchDB.
|
||||
|
||||
## Architecture
|
||||
|
||||
### Module System
|
||||
|
||||
The plugin uses a dynamic module system to reduce coupling and improve maintainability:
|
||||
|
||||
- **Service Hub**: Central registry for services using dependency injection
|
||||
- Services are registered, and accessed via `this.services` (in most modules)
|
||||
- **Module Loading**: All modules extend `AbstractModule` or `AbstractObsidianModule` (which extends `AbstractModule`). These modules are loaded in main.ts and some modules
|
||||
- **Module Categories** (by directory):
|
||||
- `core/` - Platform-independent core functionality
|
||||
- `coreObsidian/` - Obsidian-specific core (e.g., `ModuleFileAccessObsidian`)
|
||||
- `essential/` - Required modules (e.g., `ModuleMigration`, `ModuleKeyValueDB`)
|
||||
- `features/` - Optional features (e.g., `ModuleLog`, `ModuleObsidianSettings`)
|
||||
- `extras/` - Development/testing tools (e.g., `ModuleDev`, `ModuleIntegratedTest`)
|
||||
|
||||
### Key Architectural Components
|
||||
|
||||
- **LiveSyncLocalDB** (`src/lib/src/pouchdb/`): Local PouchDB database wrapper
|
||||
- **Replicators** (`src/lib/src/replication/`): CouchDB, Journal, and MinIO sync engines
|
||||
- **Service Hub** (`src/modules/services/`): Central service registry using dependency injection
|
||||
- **Common Library** (`src/lib/`): Platform-independent sync logic, shared with other tools
|
||||
|
||||
### File Structure Conventions
|
||||
|
||||
- **Platform-specific code**: Use `.platform.ts` suffix (replaced with `.obsidian.ts` in production builds via esbuild)
|
||||
- **Development code**: Use `.dev.ts` suffix (replaced with `.prod.ts` in production)
|
||||
- **Path aliases**: `@/*` maps to `src/*`, `@lib/*` maps to `src/lib/src/*`
|
||||
|
||||
## Build & Development Workflow
|
||||
|
||||
### Commands
|
||||
|
||||
```bash
|
||||
npm run check # TypeScript and svelte type checking
|
||||
npm run dev # Development build with auto-rebuild (uses .env for test vault paths)
|
||||
npm run build # Production build
|
||||
npm run buildDev # Development build (one-time)
|
||||
npm run bakei18n # Pre-build step: compile i18n resources (YAML → JSON → TS)
|
||||
npm test # Run vitest tests (requires Docker services)
|
||||
```
|
||||
|
||||
### Environment Setup
|
||||
|
||||
- Create `.env` file with `PATHS_TEST_INSTALL` pointing to test vault plug-in directories (`:` separated on Unix, `;` on Windows)
|
||||
- Development builds auto-copy to these paths on build
|
||||
|
||||
### Testing Infrastructure
|
||||
|
||||
- **Deno Tests**: Unit tests for platform-independent code (e.g., `HashManager.test.ts`)
|
||||
- **Vitest** (`vitest.config.ts`): E2E test by Browser-based-harness using Playwright
|
||||
- **Docker Services**: Tests require CouchDB, MinIO (S3), and P2P services:
|
||||
```bash
|
||||
npm run test:docker-all:start # Start all test services
|
||||
npm run test:full # Run tests with coverage
|
||||
npm run test:docker-all:stop # Stop services
|
||||
```
|
||||
If some services are not needed, start only required ones (e.g., `test:docker-couchdb:start`)
|
||||
Note that if services are already running, starting script will fail. Please stop them first.
|
||||
- **Test Structure**:
|
||||
- `test/suite/` - Integration tests for sync operations
|
||||
- `test/unit/` - Unit tests (via vitest, as harness is browser-based)
|
||||
- `test/harness/` - Mock implementations (e.g., `obsidian-mock.ts`)
|
||||
|
||||
## Code Conventions
|
||||
|
||||
### Internationalisation (i18n)
|
||||
|
||||
- **Translation Workflow**:
|
||||
1. Edit YAML files in `src/lib/src/common/messagesYAML/` (human-editable)
|
||||
2. Run `npm run bakei18n` to compile: YAML → JSON → TypeScript constants
|
||||
3. Use `$t()`, `$msg()` functions for translations
|
||||
You can also use `$f` for formatted messages with Tagged Template Literals.
|
||||
- **Usage**:
|
||||
```typescript
|
||||
$msg("dialog.someKey"); // Typed key with autocomplete
|
||||
$t("Some message"); // Direct translation
|
||||
$f`Hello, ${userName}`; // Formatted message
|
||||
```
|
||||
- **Supported languages**: `def` (English), `de`, `es`, `ja`, `ko`, `ru`, `zh`, `zh-tw`
|
||||
|
||||
### File Path Handling
|
||||
|
||||
- Use tagged types from `types.ts`: `FilePath`, `FilePathWithPrefix`, `DocumentID`
|
||||
- Prefix constants: `CHeader` (chunks), `ICHeader`/`ICHeaderEnd` (internal data)
|
||||
- Path utilities in `src/lib/src/string_and_binary/path.ts`: `addPrefix()`, `stripAllPrefixes()`, `shouldBeIgnored()`
|
||||
|
||||
### Logging & Debugging
|
||||
|
||||
- Use `this._log(msg, LOG_LEVEL_INFO)` in modules (automatically prefixes with module name)
|
||||
- Log levels: `LOG_LEVEL_DEBUG`, `LOG_LEVEL_VERBOSE`, `LOG_LEVEL_INFO`, `LOG_LEVEL_NOTICE`, `LOG_LEVEL_URGENT`
|
||||
- LOG_LEVEL_NOTICE and above are reported to the user via Obsidian notices
|
||||
- LOG_LEVEL_DEBUG is for debug only and not shown in default builds
|
||||
- Dev mode creates `ls-debug/` folder in `.obsidian/` for debug outputs (e.g., missing translations)
|
||||
- This causes pretty significant performance overhead.
|
||||
|
||||
## Common Patterns
|
||||
|
||||
### Module Implementation
|
||||
|
||||
```typescript
|
||||
export class ModuleExample extends AbstractObsidianModule {
|
||||
async _everyOnloadStart(): Promise<boolean> {
|
||||
/* ... */
|
||||
}
|
||||
|
||||
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
|
||||
services.appLifecycle.handleOnInitialise(this._everyOnloadStart.bind(this));
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Settings Management
|
||||
|
||||
- Settings defined in `src/lib/src/common/types.ts` (`ObsidianLiveSyncSettings`)
|
||||
- Configuration metadata in `src/lib/src/common/settingConstants.ts`
|
||||
- Use `this.services.setting.saveSettingData()` instead of using plugin methods directly
|
||||
|
||||
### Database Operations
|
||||
|
||||
- Local database operations through `LiveSyncLocalDB` (wraps PouchDB)
|
||||
- Document types: `EntryDoc` (files), `EntryLeaf` (chunks), `PluginDataEntry` (plugin sync)
|
||||
|
||||
## Important Files
|
||||
|
||||
- [main.ts](src/main.ts) - Plugin entry point, module registration
|
||||
- [esbuild.config.mjs](esbuild.config.mjs) - Build configuration with platform/dev file replacement
|
||||
- [package.json](package.json) - Scripts reference and dependencies
|
||||
|
||||
## Contribution Guidelines
|
||||
|
||||
- Follow existing code style and conventions
|
||||
- Please bump dependencies with care, check artifacts after updates, with diff-tools and only expected changes in the build output (to avoid unexpected vulnerabilities).
|
||||
- When adding new features, please consider it has an OSS implementation, and avoid using proprietary services or APIs that may limit usage.
|
||||
- For example, any functionality to connect to a new type of server is expected to either have an OSS implementation available for that server, or to be managed under some responsibilities and/or limitations without disrupting existing functionality, and scope for surveillance reduced by some means (e.g., by client-side encryption, auditing the server ourselves).
|
||||
@@ -40,7 +40,10 @@ export default [
|
||||
"src/lib/test",
|
||||
"src/lib/src/cli",
|
||||
"**/main.js",
|
||||
"src/lib/apps/webpeer/*"
|
||||
"src/lib/apps/webpeer/*",
|
||||
".prettierrc.*.mjs",
|
||||
".prettierrc.mjs",
|
||||
"*.config.mjs"
|
||||
],
|
||||
},
|
||||
...compat.extends(
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"id": "obsidian-livesync",
|
||||
"name": "Self-hosted LiveSync",
|
||||
"version": "0.25.31",
|
||||
"version": "0.25.41",
|
||||
"minAppVersion": "0.9.12",
|
||||
"description": "Community implementation of self-hosted livesync. Reflect your vault changes to some other devices immediately. Please make sure to disable other synchronize solutions to avoid content corruption or duplication.",
|
||||
"author": "vorotamoroz",
|
||||
|
||||
5664
package-lock.json
generated
5664
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
47
package.json
47
package.json
@@ -1,16 +1,16 @@
|
||||
{
|
||||
"name": "obsidian-livesync",
|
||||
"version": "0.25.31",
|
||||
"version": "0.25.41",
|
||||
"description": "Reflect your vault changes to some other devices immediately. Please make sure to disable other synchronize solutions to avoid content corruption or duplication.",
|
||||
"main": "main.js",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"bakei18n": "npx tsx ./src/lib/_tools/bakei18n.ts",
|
||||
"bakei18n": "npm run i18n:yaml2json && npm run i18n:bakejson",
|
||||
"i18n:bakejson": "npx tsx ./src/lib/_tools/bakei18n.ts",
|
||||
"i18n:yaml2json": "npx tsx ./src/lib/_tools/yaml2json.ts",
|
||||
"i18n:json2yaml": "npx tsx ./src/lib/_tools/json2yaml.ts",
|
||||
"prettyjson": "prettier --config ./.prettierrc ./src/lib/src/common/messagesJson/*.json --write --log-level error",
|
||||
"postbakei18n": "prettier --config ./.prettierrc ./src/lib/src/common/messages/*.ts --write --log-level error",
|
||||
"prettyjson": "prettier --config ./.prettierrc.mjs ./src/lib/src/common/messagesJson/*.json --write --log-level error",
|
||||
"postbakei18n": "prettier --config ./.prettierrc.mjs ./src/lib/src/common/messages/*.ts --write --log-level error",
|
||||
"posti18n:yaml2json": "npm run prettyjson",
|
||||
"predev": "npm run bakei18n",
|
||||
"dev": "node --env-file=.env esbuild.config.mjs",
|
||||
@@ -22,9 +22,35 @@
|
||||
"tsc-check": "tsc --noEmit",
|
||||
"pretty": "npm run prettyNoWrite -- --write --log-level error",
|
||||
"prettyCheck": "npm run prettyNoWrite -- --check",
|
||||
"prettyNoWrite": "prettier --config ./.prettierrc \"**/*.js\" \"**/*.ts\" \"**/*.json\" ",
|
||||
"prettyNoWrite": "prettier --config ./.prettierrc.mjs \"**/*.js\" \"**/*.ts\" \"**/*.json\" ",
|
||||
"check": "npm run lint && npm run svelte-check",
|
||||
"unittest": "deno test -A --no-check --coverage=cov_profile --v8-flags=--expose-gc --trace-leaks ./src/"
|
||||
"unittest": "deno test -A --no-check --coverage=cov_profile --v8-flags=--expose-gc --trace-leaks ./src/",
|
||||
"test": "vitest run",
|
||||
"test:unit": "vitest run --config vitest.config.unit.ts",
|
||||
"test:unit:coverage": "vitest run --config vitest.config.unit.ts --coverage",
|
||||
"test:install-playwright": "npx playwright install chromium",
|
||||
"test:install-dependencies": "npm run test:install-playwright",
|
||||
"test:coverage": "vitest run --coverage",
|
||||
"test:docker-couchdb:up": "npx dotenv-cli -e .env -e .test.env -- ./test/shell/couchdb-start.sh",
|
||||
"test:docker-couchdb:init": "npx dotenv-cli -e .env -e .test.env -- ./test/shell/couchdb-init.sh",
|
||||
"test:docker-couchdb:start": "npm run test:docker-couchdb:up && sleep 5 && npm run test:docker-couchdb:init",
|
||||
"test:docker-couchdb:down": "npx dotenv-cli -e .env -e .test.env -- ./test/shell/couchdb-stop.sh",
|
||||
"test:docker-couchdb:stop": "npm run test:docker-couchdb:down",
|
||||
"test:docker-s3:up": "npx dotenv-cli -e .env -e .test.env -- ./test/shell/minio-start.sh",
|
||||
"test:docker-s3:init": "npx dotenv-cli -e .env -e .test.env -- ./test/shell/minio-init.sh",
|
||||
"test:docker-s3:start": "npm run test:docker-s3:up && sleep 3 && npm run test:docker-s3:init",
|
||||
"test:docker-s3:down": "npx dotenv-cli -e .env -e .test.env -- ./test/shell/minio-stop.sh",
|
||||
"test:docker-s3:stop": "npm run test:docker-s3:down",
|
||||
"test:docker-p2p:up": "npx dotenv-cli -e .env -e .test.env -- ./test/shell/p2p-start.sh",
|
||||
"test:docker-p2p:init": "npx dotenv-cli -e .env -e .test.env -- ./test/shell/p2p-init.sh",
|
||||
"test:docker-p2p:down": "npx dotenv-cli -e .env -e .test.env -- ./test/shell/p2p-stop.sh",
|
||||
"test:docker-p2p:stop": "npm run test:docker-p2p:down",
|
||||
"test:docker-all:up": "npm run test:docker-couchdb:up && npm run test:docker-s3:up && npm run test:docker-p2p:up",
|
||||
"test:docker-all:init": "npm run test:docker-couchdb:init && npm run test:docker-s3:init && npm run test:docker-p2p:init",
|
||||
"test:docker-all:down": "npm run test:docker-couchdb:down && npm run test:docker-s3:down && npm run test:docker-p2p:down",
|
||||
"test:docker-all:start": "npm run test:docker-all:up && sleep 5 && npm run test:docker-all:init",
|
||||
"test:docker-all:stop": "npm run test:docker-all:down",
|
||||
"test:full": "npm run test:docker-all:start && vitest run --coverage && npm run test:docker-all:stop"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "vorotamoroz",
|
||||
@@ -49,7 +75,12 @@
|
||||
"@types/transform-pouch": "^1.0.6",
|
||||
"@typescript-eslint/eslint-plugin": "8.46.2",
|
||||
"@typescript-eslint/parser": "8.46.2",
|
||||
"@vitest/browser": "^4.0.16",
|
||||
"@vitest/browser-playwright": "^4.0.16",
|
||||
"@vitest/coverage-v8": "^4.0.16",
|
||||
"builtin-modules": "5.0.0",
|
||||
"dotenv": "^17.2.3",
|
||||
"dotenv-cli": "^11.0.0",
|
||||
"esbuild": "0.25.0",
|
||||
"esbuild-plugin-inline-worker": "^0.1.1",
|
||||
"esbuild-svelte": "^0.9.3",
|
||||
@@ -59,6 +90,7 @@
|
||||
"events": "^3.3.0",
|
||||
"glob": "^11.0.3",
|
||||
"obsidian": "^1.8.7",
|
||||
"playwright": "^1.57.0",
|
||||
"postcss": "^8.5.3",
|
||||
"postcss-load-config": "^6.0.1",
|
||||
"pouchdb-adapter-http": "^9.0.0",
|
||||
@@ -81,6 +113,9 @@
|
||||
"tslib": "^2.8.1",
|
||||
"tsx": "^4.20.6",
|
||||
"typescript": "5.9.3",
|
||||
"vite": "^7.3.0",
|
||||
"vitest": "^4.0.16",
|
||||
"webdriverio": "^9.23.0",
|
||||
"yaml": "^2.8.0"
|
||||
},
|
||||
"dependencies": {
|
||||
|
||||
@@ -1,43 +1,99 @@
|
||||
import { deleteDB, type IDBPDatabase, openDB } from "idb";
|
||||
import type { KeyValueDatabase } from "../lib/src/interfaces/KeyValueDatabase.ts";
|
||||
import { serialized } from "octagonal-wheels/concurrency/lock";
|
||||
import { Logger } from "octagonal-wheels/common/logger";
|
||||
const databaseCache: { [key: string]: IDBPDatabase<any> } = {};
|
||||
export const OpenKeyValueDatabase = async (dbKey: string): Promise<KeyValueDatabase> => {
|
||||
export { OpenKeyValueDatabase } from "./KeyValueDBv2.ts";
|
||||
|
||||
export const _OpenKeyValueDatabase = async (dbKey: string): Promise<KeyValueDatabase> => {
|
||||
if (dbKey in databaseCache) {
|
||||
databaseCache[dbKey].close();
|
||||
delete databaseCache[dbKey];
|
||||
}
|
||||
const storeKey = dbKey;
|
||||
const dbPromise = openDB(dbKey, 1, {
|
||||
upgrade(db, _oldVersion, _newVersion, _transaction, _event) {
|
||||
return db.createObjectStore(storeKey);
|
||||
},
|
||||
});
|
||||
const db = await dbPromise;
|
||||
databaseCache[dbKey] = db;
|
||||
let db: IDBPDatabase<any> | null = null;
|
||||
const _openDB = () => {
|
||||
return serialized("keyvaluedb-" + dbKey, async () => {
|
||||
const dbInstance = await openDB(dbKey, 1, {
|
||||
upgrade(db, _oldVersion, _newVersion, _transaction, _event) {
|
||||
return db.createObjectStore(storeKey);
|
||||
},
|
||||
blocking(currentVersion, blockedVersion, event) {
|
||||
Logger(
|
||||
`Blocking database open for ${dbKey}: currentVersion=${currentVersion}, blockedVersion=${blockedVersion}`
|
||||
);
|
||||
databaseCache[dbKey]?.close();
|
||||
delete databaseCache[dbKey];
|
||||
},
|
||||
blocked(currentVersion, blockedVersion, event) {
|
||||
Logger(
|
||||
`Database open blocked for ${dbKey}: currentVersion=${currentVersion}, blockedVersion=${blockedVersion}`
|
||||
);
|
||||
},
|
||||
terminated() {
|
||||
Logger(`Database connection terminated for ${dbKey}`);
|
||||
},
|
||||
});
|
||||
databaseCache[dbKey] = dbInstance;
|
||||
return dbInstance;
|
||||
});
|
||||
};
|
||||
const closeDB = () => {
|
||||
if (db) {
|
||||
db.close();
|
||||
delete databaseCache[dbKey];
|
||||
db = null;
|
||||
}
|
||||
};
|
||||
db = await _openDB();
|
||||
return {
|
||||
async get<T>(key: IDBValidKey): Promise<T> {
|
||||
if (!db) {
|
||||
db = await _openDB();
|
||||
databaseCache[dbKey] = db;
|
||||
}
|
||||
return await db.get(storeKey, key);
|
||||
},
|
||||
async set<T>(key: IDBValidKey, value: T) {
|
||||
if (!db) {
|
||||
db = await _openDB();
|
||||
databaseCache[dbKey] = db;
|
||||
}
|
||||
return await db.put(storeKey, value, key);
|
||||
},
|
||||
async del(key: IDBValidKey) {
|
||||
if (!db) {
|
||||
db = await _openDB();
|
||||
databaseCache[dbKey] = db;
|
||||
}
|
||||
return await db.delete(storeKey, key);
|
||||
},
|
||||
async clear() {
|
||||
if (!db) {
|
||||
db = await _openDB();
|
||||
databaseCache[dbKey] = db;
|
||||
}
|
||||
return await db.clear(storeKey);
|
||||
},
|
||||
async keys(query?: IDBValidKey | IDBKeyRange, count?: number) {
|
||||
if (!db) {
|
||||
db = await _openDB();
|
||||
databaseCache[dbKey] = db;
|
||||
}
|
||||
return await db.getAllKeys(storeKey, query, count);
|
||||
},
|
||||
close() {
|
||||
delete databaseCache[dbKey];
|
||||
return db.close();
|
||||
return Promise.resolve(closeDB());
|
||||
},
|
||||
async destroy() {
|
||||
delete databaseCache[dbKey];
|
||||
db.close();
|
||||
await deleteDB(dbKey);
|
||||
// await closeDB();
|
||||
await deleteDB(dbKey, {
|
||||
blocked() {
|
||||
console.warn(`Database delete blocked for ${dbKey}`);
|
||||
},
|
||||
});
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
154
src/common/KeyValueDBv2.ts
Normal file
154
src/common/KeyValueDBv2.ts
Normal file
@@ -0,0 +1,154 @@
|
||||
import { LOG_LEVEL_VERBOSE, Logger } from "@/lib/src/common/logger";
|
||||
import type { KeyValueDatabase } from "@/lib/src/interfaces/KeyValueDatabase";
|
||||
import { deleteDB, openDB, type IDBPDatabase } from "idb";
|
||||
import { serialized } from "octagonal-wheels/concurrency/lock";
|
||||
|
||||
const databaseCache = new Map<string, IDBKeyValueDatabase>();
|
||||
|
||||
export async function OpenKeyValueDatabase(dbKey: string): Promise<KeyValueDatabase> {
|
||||
return await serialized(`OpenKeyValueDatabase-${dbKey}`, async () => {
|
||||
const cachedDB = databaseCache.get(dbKey);
|
||||
if (cachedDB) {
|
||||
if (!cachedDB.isDestroyed) {
|
||||
return cachedDB;
|
||||
}
|
||||
await cachedDB.ensuredDestroyed;
|
||||
databaseCache.delete(dbKey);
|
||||
}
|
||||
const newDB = new IDBKeyValueDatabase(dbKey);
|
||||
try {
|
||||
await newDB.getIsReady();
|
||||
databaseCache.set(dbKey, newDB);
|
||||
return newDB;
|
||||
} catch (e) {
|
||||
databaseCache.delete(dbKey);
|
||||
throw e;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export class IDBKeyValueDatabase implements KeyValueDatabase {
|
||||
protected _dbPromise: Promise<IDBPDatabase<any>> | null = null;
|
||||
protected dbKey: string;
|
||||
protected storeKey: string;
|
||||
protected _isDestroyed: boolean = false;
|
||||
protected destroyedPromise: Promise<void> | null = null;
|
||||
|
||||
get isDestroyed() {
|
||||
return this._isDestroyed;
|
||||
}
|
||||
get ensuredDestroyed(): Promise<void> {
|
||||
if (this.destroyedPromise) {
|
||||
return this.destroyedPromise;
|
||||
}
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
async getIsReady(): Promise<boolean> {
|
||||
await this.ensureDB();
|
||||
return this.isDestroyed === false;
|
||||
}
|
||||
|
||||
protected ensureDB() {
|
||||
if (this._isDestroyed) {
|
||||
throw new Error("Database is destroyed");
|
||||
}
|
||||
if (this._dbPromise) {
|
||||
return this._dbPromise;
|
||||
}
|
||||
this._dbPromise = openDB(this.dbKey, undefined, {
|
||||
upgrade: (db, _oldVersion, _newVersion, _transaction, _event) => {
|
||||
if (!db.objectStoreNames.contains(this.storeKey)) {
|
||||
return db.createObjectStore(this.storeKey);
|
||||
}
|
||||
},
|
||||
blocking: (currentVersion, blockedVersion, event) => {
|
||||
Logger(
|
||||
`Blocking database open for ${this.dbKey}: currentVersion=${currentVersion}, blockedVersion=${blockedVersion}`,
|
||||
LOG_LEVEL_VERBOSE
|
||||
);
|
||||
// This `this` is not this openDB instance, previously opened DB. Let it be closed in the terminated handler.
|
||||
void this.closeDB(true);
|
||||
},
|
||||
blocked: (currentVersion, blockedVersion, event) => {
|
||||
Logger(
|
||||
`Database open blocked for ${this.dbKey}: currentVersion=${currentVersion}, blockedVersion=${blockedVersion}`,
|
||||
LOG_LEVEL_VERBOSE
|
||||
);
|
||||
},
|
||||
terminated: () => {
|
||||
Logger(`Database connection terminated for ${this.dbKey}`, LOG_LEVEL_VERBOSE);
|
||||
this._dbPromise = null;
|
||||
},
|
||||
}).catch((e) => {
|
||||
this._dbPromise = null;
|
||||
throw e;
|
||||
});
|
||||
return this._dbPromise;
|
||||
}
|
||||
protected async closeDB(setDestroyed: boolean = false) {
|
||||
if (this._dbPromise) {
|
||||
const tempPromise = this._dbPromise;
|
||||
this._dbPromise = null;
|
||||
try {
|
||||
const dbR = await tempPromise;
|
||||
dbR.close();
|
||||
} catch (e) {
|
||||
Logger(`Error closing database`);
|
||||
Logger(e, LOG_LEVEL_VERBOSE);
|
||||
}
|
||||
}
|
||||
this._dbPromise = null;
|
||||
if (setDestroyed) {
|
||||
this._isDestroyed = true;
|
||||
this.destroyedPromise = Promise.resolve();
|
||||
}
|
||||
}
|
||||
get DB(): Promise<IDBPDatabase<any>> {
|
||||
if (this._isDestroyed) {
|
||||
return Promise.reject(new Error("Database is destroyed"));
|
||||
}
|
||||
return this.ensureDB();
|
||||
}
|
||||
|
||||
constructor(dbKey: string) {
|
||||
this.dbKey = dbKey;
|
||||
this.storeKey = dbKey;
|
||||
}
|
||||
async get<U>(key: IDBValidKey): Promise<U> {
|
||||
const db = await this.DB;
|
||||
return await db.get(this.storeKey, key);
|
||||
}
|
||||
async set<U>(key: IDBValidKey, value: U): Promise<IDBValidKey> {
|
||||
const db = await this.DB;
|
||||
await db.put(this.storeKey, value, key);
|
||||
return key;
|
||||
}
|
||||
async del(key: IDBValidKey): Promise<void> {
|
||||
const db = await this.DB;
|
||||
return await db.delete(this.storeKey, key);
|
||||
}
|
||||
async clear(): Promise<void> {
|
||||
const db = await this.DB;
|
||||
return await db.clear(this.storeKey);
|
||||
}
|
||||
async keys(query?: IDBValidKey | IDBKeyRange, count?: number): Promise<IDBValidKey[]> {
|
||||
const db = await this.DB;
|
||||
return await db.getAllKeys(this.storeKey, query, count);
|
||||
}
|
||||
async close(): Promise<void> {
|
||||
await this.closeDB();
|
||||
}
|
||||
async destroy(): Promise<void> {
|
||||
this._isDestroyed = true;
|
||||
this.destroyedPromise = (async () => {
|
||||
await this.closeDB();
|
||||
await deleteDB(this.dbKey, {
|
||||
blocked: () => {
|
||||
Logger(`Database delete blocked for ${this.dbKey}`);
|
||||
},
|
||||
});
|
||||
})();
|
||||
await this.destroyedPromise;
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
import { ItemView } from "obsidian";
|
||||
import { ItemView } from "@/deps.ts";
|
||||
import { type mount, unmount } from "svelte";
|
||||
|
||||
export abstract class SvelteItemView extends ItemView {
|
||||
|
||||
@@ -23,6 +23,9 @@ export const EVENT_REQUEST_RUN_DOCTOR = "request-run-doctor";
|
||||
export const EVENT_REQUEST_RUN_FIX_INCOMPLETE = "request-run-fix-incomplete";
|
||||
export const EVENT_ON_UNRESOLVED_ERROR = "on-unresolved-error";
|
||||
|
||||
export const EVENT_ANALYSE_DB_USAGE = "analyse-db-usage";
|
||||
export const EVENT_REQUEST_PERFORM_GC_V3 = "request-perform-gc-v3";
|
||||
export const EVENT_REQUEST_CHECK_REMOTE_SIZE = "request-check-remote-size";
|
||||
// export const EVENT_FILE_CHANGED = "file-changed";
|
||||
|
||||
declare global {
|
||||
@@ -42,6 +45,9 @@ declare global {
|
||||
[EVENT_REQUEST_RUN_DOCTOR]: string;
|
||||
[EVENT_REQUEST_RUN_FIX_INCOMPLETE]: undefined;
|
||||
[EVENT_ON_UNRESOLVED_ERROR]: undefined;
|
||||
[EVENT_ANALYSE_DB_USAGE]: undefined;
|
||||
[EVENT_REQUEST_CHECK_REMOTE_SIZE]: undefined;
|
||||
[EVENT_REQUEST_PERFORM_GC_V3]: undefined;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -24,6 +24,13 @@ export {
|
||||
parseYaml,
|
||||
ItemView,
|
||||
WorkspaceLeaf,
|
||||
Menu,
|
||||
request,
|
||||
getLanguage,
|
||||
ButtonComponent,
|
||||
TextComponent,
|
||||
ToggleComponent,
|
||||
DropdownComponent,
|
||||
} from "obsidian";
|
||||
export type {
|
||||
DataWriteOptions,
|
||||
@@ -32,6 +39,7 @@ export type {
|
||||
RequestUrlResponse,
|
||||
MarkdownFileInfo,
|
||||
ListedFiles,
|
||||
ValueComponent,
|
||||
} from "obsidian";
|
||||
import { normalizePath as normalizePath_ } from "obsidian";
|
||||
const normalizePath = normalizePath_ as <T extends string | FilePath>(from: T) => T;
|
||||
|
||||
@@ -1803,16 +1803,16 @@ export class ConfigSync extends LiveSyncCommands {
|
||||
return files;
|
||||
}
|
||||
onBindFunction(core: LiveSyncCore, services: InjectableServiceHub): void {
|
||||
services.fileProcessing.handleOptionalFileEvent(this._anyProcessOptionalFileEvent.bind(this));
|
||||
services.conflict.handleGetOptionalConflictCheckMethod(this._anyGetOptionalConflictCheckMethod.bind(this));
|
||||
services.replication.handleProcessVirtualDocuments(this._anyModuleParsedReplicationResultItem.bind(this));
|
||||
services.setting.handleOnRealiseSetting(this._everyRealizeSettingSyncMode.bind(this));
|
||||
services.appLifecycle.handleOnResuming(this._everyOnResumeProcess.bind(this));
|
||||
services.appLifecycle.handleOnResumed(this._everyAfterResumeProcess.bind(this));
|
||||
services.replication.handleBeforeReplicate(this._everyBeforeReplicate.bind(this));
|
||||
services.databaseEvents.handleDatabaseInitialised(this._everyOnDatabaseInitialized.bind(this));
|
||||
services.setting.handleSuspendExtraSync(this._allSuspendExtraSync.bind(this));
|
||||
services.setting.handleSuggestOptionalFeatures(this._allAskUsingOptionalSyncFeature.bind(this));
|
||||
services.setting.handleEnableOptionalFeature(this._allConfigureOptionalSyncFeature.bind(this));
|
||||
services.fileProcessing.processOptionalFileEvent.addHandler(this._anyProcessOptionalFileEvent.bind(this));
|
||||
services.conflict.getOptionalConflictCheckMethod.addHandler(this._anyGetOptionalConflictCheckMethod.bind(this));
|
||||
services.replication.processVirtualDocument.addHandler(this._anyModuleParsedReplicationResultItem.bind(this));
|
||||
services.setting.onRealiseSetting.addHandler(this._everyRealizeSettingSyncMode.bind(this));
|
||||
services.appLifecycle.onResuming.addHandler(this._everyOnResumeProcess.bind(this));
|
||||
services.appLifecycle.onResumed.addHandler(this._everyAfterResumeProcess.bind(this));
|
||||
services.replication.onBeforeReplicate.addHandler(this._everyBeforeReplicate.bind(this));
|
||||
services.databaseEvents.onDatabaseInitialised.addHandler(this._everyOnDatabaseInitialized.bind(this));
|
||||
services.setting.suspendExtraSync.addHandler(this._allSuspendExtraSync.bind(this));
|
||||
services.setting.suggestOptionalFeatures.addHandler(this._allAskUsingOptionalSyncFeature.bind(this));
|
||||
services.setting.enableOptionalFeature.addHandler(this._allConfigureOptionalSyncFeature.bind(this));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
import { getDocData, timeDeltaToHumanReadable, unique } from "../../lib/src/common/utils";
|
||||
import type ObsidianLiveSyncPlugin from "../../main";
|
||||
// import { askString } from "../../common/utils";
|
||||
import { Menu } from "obsidian";
|
||||
import { Menu } from "@/deps.ts";
|
||||
|
||||
export let list: IPluginDataExDisplay[] = [];
|
||||
export let thisTerm = "";
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
pluginV2Progress,
|
||||
} from "./CmdConfigSync.ts";
|
||||
import PluginCombo from "./PluginCombo.svelte";
|
||||
import { Menu, type PluginManifest } from "obsidian";
|
||||
import { Menu, type PluginManifest } from "@/deps.ts";
|
||||
import { unique } from "../../lib/src/common/utils";
|
||||
import {
|
||||
MODE_SELECTIVE,
|
||||
|
||||
@@ -1914,16 +1914,16 @@ ${messageFetch}${messageOverwrite}${messageMerge}
|
||||
onBindFunction(core: LiveSyncCore, services: typeof core.services) {
|
||||
// No longer needed on initialisation
|
||||
// services.databaseEvents.handleOnDatabaseInitialisation(this._everyOnInitializeDatabase.bind(this));
|
||||
services.appLifecycle.handleOnSettingLoaded(this._everyOnloadAfterLoadSettings.bind(this));
|
||||
services.fileProcessing.handleOptionalFileEvent(this._anyProcessOptionalFileEvent.bind(this));
|
||||
services.conflict.handleGetOptionalConflictCheckMethod(this._anyGetOptionalConflictCheckMethod.bind(this));
|
||||
services.replication.handleProcessOptionalSynchroniseResult(this._anyProcessOptionalSyncFiles.bind(this));
|
||||
services.setting.handleOnRealiseSetting(this._everyRealizeSettingSyncMode.bind(this));
|
||||
services.appLifecycle.handleOnResuming(this._everyOnResumeProcess.bind(this));
|
||||
services.replication.handleBeforeReplicate(this._everyBeforeReplicate.bind(this));
|
||||
services.databaseEvents.handleDatabaseInitialised(this._everyOnDatabaseInitialized.bind(this));
|
||||
services.setting.handleSuspendExtraSync(this._allSuspendExtraSync.bind(this));
|
||||
services.setting.handleSuggestOptionalFeatures(this._allAskUsingOptionalSyncFeature.bind(this));
|
||||
services.setting.handleEnableOptionalFeature(this._allConfigureOptionalSyncFeature.bind(this));
|
||||
services.appLifecycle.onSettingLoaded.addHandler(this._everyOnloadAfterLoadSettings.bind(this));
|
||||
services.fileProcessing.processOptionalFileEvent.addHandler(this._anyProcessOptionalFileEvent.bind(this));
|
||||
services.conflict.getOptionalConflictCheckMethod.addHandler(this._anyGetOptionalConflictCheckMethod.bind(this));
|
||||
services.replication.processOptionalSynchroniseResult.addHandler(this._anyProcessOptionalSyncFiles.bind(this));
|
||||
services.setting.onRealiseSetting.addHandler(this._everyRealizeSettingSyncMode.bind(this));
|
||||
services.appLifecycle.onResuming.addHandler(this._everyOnResumeProcess.bind(this));
|
||||
services.replication.onBeforeReplicate.addHandler(this._everyBeforeReplicate.bind(this));
|
||||
services.databaseEvents.onDatabaseInitialised.addHandler(this._everyOnDatabaseInitialized.bind(this));
|
||||
services.setting.suspendExtraSync.addHandler(this._allSuspendExtraSync.bind(this));
|
||||
services.setting.suggestOptionalFeatures.addHandler(this._allAskUsingOptionalSyncFeature.bind(this));
|
||||
services.setting.enableOptionalFeature.addHandler(this._allConfigureOptionalSyncFeature.bind(this));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,12 +7,18 @@ import {
|
||||
type DocumentID,
|
||||
type EntryDoc,
|
||||
type EntryLeaf,
|
||||
type FilePathWithPrefix,
|
||||
type MetaEntry,
|
||||
} from "../../lib/src/common/types";
|
||||
import { getNoFromRev } from "../../lib/src/pouchdb/LiveSyncLocalDB";
|
||||
import { LiveSyncCommands } from "../LiveSyncCommands";
|
||||
import { serialized } from "octagonal-wheels/concurrency/lock_v2";
|
||||
import { arrayToChunkedArray } from "octagonal-wheels/collection";
|
||||
import { EVENT_ANALYSE_DB_USAGE, EVENT_REQUEST_PERFORM_GC_V3, eventHub } from "@/common/events";
|
||||
import type { LiveSyncCouchDBReplicator } from "@/lib/src/replication/couchdb/LiveSyncReplicator";
|
||||
import { delay, parseHeaderValues } from "@/lib/src/common/utils";
|
||||
import { generateCredentialObject } from "@/lib/src/replication/httplib";
|
||||
import { _requestToCouchDB } from "@/common/utils";
|
||||
const DB_KEY_SEQ = "gc-seq";
|
||||
const DB_KEY_CHUNK_SET = "chunk-set";
|
||||
const DB_KEY_DOC_USAGE_MAP = "doc-usage-map";
|
||||
@@ -27,6 +33,24 @@ export class LocalDatabaseMaintenance extends LiveSyncCommands {
|
||||
}
|
||||
onload(): void | Promise<void> {
|
||||
// NO OP.
|
||||
this.plugin.addCommand({
|
||||
id: "analyse-database",
|
||||
name: "Analyse Database Usage (advanced)",
|
||||
icon: "database-search",
|
||||
callback: async () => {
|
||||
await this.analyseDatabase();
|
||||
},
|
||||
});
|
||||
this.plugin.addCommand({
|
||||
id: "gc-v3",
|
||||
name: "Garbage Collection V3 (advanced, beta)",
|
||||
icon: "trash-2",
|
||||
callback: async () => {
|
||||
await this.gcv3();
|
||||
},
|
||||
});
|
||||
eventHub.onEvent(EVENT_ANALYSE_DB_USAGE, () => this.analyseDatabase());
|
||||
eventHub.onEvent(EVENT_REQUEST_PERFORM_GC_V3, () => this.gcv3());
|
||||
}
|
||||
async allChunks(includeDeleted: boolean = false) {
|
||||
const p = this._progress("", LOG_LEVEL_NOTICE);
|
||||
@@ -485,4 +509,458 @@ Success: ${successCount}, Errored: ${errored}`;
|
||||
const kvDB = this.plugin.kvDB;
|
||||
await kvDB.set(DB_KEY_CHUNK_SET, chunkSet);
|
||||
}
|
||||
|
||||
// Analyse the database and report chunk usage.
|
||||
async analyseDatabase() {
|
||||
if (!this.isAvailable()) return;
|
||||
const db = this.localDatabase.localDatabase;
|
||||
// Map of chunk ID to its info
|
||||
type ChunkInfo = {
|
||||
id: DocumentID;
|
||||
refCount: number;
|
||||
length: number;
|
||||
};
|
||||
const chunkMap = new Map<DocumentID, Set<ChunkInfo>>();
|
||||
// Map of document ID to its info
|
||||
type DocumentInfo = {
|
||||
id: DocumentID;
|
||||
rev: Rev;
|
||||
chunks: Set<ChunkID>;
|
||||
uniqueChunks: Set<ChunkID>;
|
||||
sharedChunks: Set<ChunkID>;
|
||||
path: FilePathWithPrefix;
|
||||
};
|
||||
const docMap = new Map<DocumentID, Set<DocumentInfo>>();
|
||||
const info = await db.info();
|
||||
// Total number of revisions to process (approximate)
|
||||
const maxSeq = new Number(info.update_seq);
|
||||
let processed = 0;
|
||||
let read = 0;
|
||||
let errored = 0;
|
||||
// Fetch Tasks
|
||||
const ft = [] as ReturnType<typeof fetchRevision>[];
|
||||
// Fetch a specific revision of a document and make note of its chunks, or add chunk info.
|
||||
const fetchRevision = async (id: DocumentID, rev: Rev, seq: string | number) => {
|
||||
try {
|
||||
processed++;
|
||||
const doc = await db.get(id, { rev: rev });
|
||||
if (doc) {
|
||||
if ("children" in doc) {
|
||||
const id = doc._id;
|
||||
const rev = doc._rev;
|
||||
const children = (doc.children || []) as DocumentID[];
|
||||
const set = docMap.get(id) || new Set();
|
||||
set.add({
|
||||
id,
|
||||
rev,
|
||||
chunks: new Set(children),
|
||||
uniqueChunks: new Set(),
|
||||
sharedChunks: new Set(),
|
||||
path: doc.path,
|
||||
});
|
||||
docMap.set(id, set);
|
||||
} else if (doc.type === EntryTypes.CHUNK) {
|
||||
const id = doc._id as DocumentID;
|
||||
if (chunkMap.has(id)) {
|
||||
return;
|
||||
}
|
||||
if (doc._deleted) {
|
||||
// Deleted chunk, skip (possibly resurrected later)
|
||||
return;
|
||||
}
|
||||
const length = doc.data.length;
|
||||
const set = chunkMap.get(id) || new Set();
|
||||
set.add({ id, length, refCount: 0 });
|
||||
chunkMap.set(id, set);
|
||||
}
|
||||
read++;
|
||||
} else {
|
||||
this._log(`Analysing Database: not found: ${id} / ${rev}`);
|
||||
errored++;
|
||||
}
|
||||
} catch (error) {
|
||||
this._log(`Error fetching document ${id} / ${rev}: $`, LOG_LEVEL_NOTICE);
|
||||
this._log(error, LOG_LEVEL_VERBOSE);
|
||||
errored++;
|
||||
}
|
||||
if (processed % 100 == 0) {
|
||||
this._log(`Analysing database: ${read} (${errored}) / ${maxSeq} `, LOG_LEVEL_NOTICE, "db-analyse");
|
||||
}
|
||||
};
|
||||
|
||||
// Enumerate all documents and their revisions.
|
||||
const IDs = this.localDatabase.findEntryNames("", "", {});
|
||||
for await (const id of IDs) {
|
||||
const revList = await this.localDatabase.getRaw(id as DocumentID, {
|
||||
revs: true,
|
||||
revs_info: true,
|
||||
conflicts: true,
|
||||
});
|
||||
const revInfos = revList._revs_info || [];
|
||||
for (const revInfo of revInfos) {
|
||||
// All available revisions should be processed.
|
||||
// If the revision is not available, it means the revision is already tombstoned.
|
||||
if (revInfo.status == "available") {
|
||||
// Schedule fetch task
|
||||
ft.push(fetchRevision(id as DocumentID, revInfo.rev, 0));
|
||||
}
|
||||
}
|
||||
}
|
||||
// Wait for all fetch tasks to complete.
|
||||
await Promise.all(ft);
|
||||
// Reference count marking and unique/shared chunk classification.
|
||||
for (const [, docRevs] of docMap) {
|
||||
for (const docRev of docRevs) {
|
||||
for (const chunkId of docRev.chunks) {
|
||||
const chunkInfos = chunkMap.get(chunkId);
|
||||
if (chunkInfos) {
|
||||
for (const chunkInfo of chunkInfos) {
|
||||
if (chunkInfo.refCount === 0) {
|
||||
docRev.uniqueChunks.add(chunkId);
|
||||
} else {
|
||||
docRev.sharedChunks.add(chunkId);
|
||||
}
|
||||
chunkInfo.refCount++;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// Prepare results
|
||||
const result = [];
|
||||
// Calculate total size of chunks in the given set.
|
||||
const getTotalSize = (ids: Set<DocumentID>) => {
|
||||
return [...ids].reduce((acc, chunkId) => {
|
||||
const chunkInfos = chunkMap.get(chunkId);
|
||||
if (chunkInfos) {
|
||||
for (const chunkInfo of chunkInfos) {
|
||||
acc += chunkInfo.length;
|
||||
}
|
||||
}
|
||||
return acc;
|
||||
}, 0);
|
||||
};
|
||||
|
||||
// Compile results for each document revision
|
||||
for (const doc of docMap.values()) {
|
||||
for (const rev of doc) {
|
||||
const title = `${rev.path} (${rev.rev})`;
|
||||
const id = rev.id;
|
||||
const revStr = `${getNoFromRev(rev.rev)}`;
|
||||
const revHash = rev.rev.split("-")[1].substring(0, 6);
|
||||
const path = rev.path;
|
||||
const uniqueChunkCount = rev.uniqueChunks.size;
|
||||
const sharedChunkCount = rev.sharedChunks.size;
|
||||
const uniqueChunkSize = getTotalSize(rev.uniqueChunks);
|
||||
const sharedChunkSize = getTotalSize(rev.sharedChunks);
|
||||
result.push({
|
||||
title,
|
||||
path,
|
||||
rev: revStr,
|
||||
revHash,
|
||||
id,
|
||||
uniqueChunkCount: uniqueChunkCount,
|
||||
sharedChunkCount,
|
||||
uniqueChunkSize: uniqueChunkSize,
|
||||
sharedChunkSize: sharedChunkSize,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const titleMap = {
|
||||
title: "Title",
|
||||
id: "Document ID",
|
||||
path: "Path",
|
||||
rev: "Revision No",
|
||||
revHash: "Revision Hash",
|
||||
uniqueChunkCount: "Unique Chunk Count",
|
||||
sharedChunkCount: "Shared Chunk Count",
|
||||
uniqueChunkSize: "Unique Chunk Size",
|
||||
sharedChunkSize: "Shared Chunk Size",
|
||||
} as const;
|
||||
// Enumerate orphan chunks (not referenced by any document)
|
||||
const orphanChunks = [...chunkMap.entries()].filter(([chunkId, infos]) => {
|
||||
const totalRefCount = [...infos].reduce((acc, info) => acc + info.refCount, 0);
|
||||
return totalRefCount === 0;
|
||||
});
|
||||
const orphanChunkSize = orphanChunks.reduce((acc, [chunkId, infos]) => {
|
||||
for (const info of infos) {
|
||||
acc += info.length;
|
||||
}
|
||||
return acc;
|
||||
}, 0);
|
||||
result.push({
|
||||
title: "__orphan",
|
||||
id: "__orphan",
|
||||
path: "__orphan",
|
||||
rev: "1",
|
||||
revHash: "xxxxx",
|
||||
uniqueChunkCount: orphanChunks.length,
|
||||
sharedChunkCount: 0,
|
||||
uniqueChunkSize: orphanChunkSize,
|
||||
sharedChunkSize: 0,
|
||||
} as any);
|
||||
|
||||
const csvSrc = result.map((e) => {
|
||||
return [
|
||||
`${e.title.replace(/"/g, '""')}"`,
|
||||
`${e.id}`,
|
||||
`${e.path}`,
|
||||
`${e.rev}`,
|
||||
`${e.revHash}`,
|
||||
`${e.uniqueChunkCount}`,
|
||||
`${e.sharedChunkCount}`,
|
||||
`${e.uniqueChunkSize}`,
|
||||
`${e.sharedChunkSize}`,
|
||||
].join("\t");
|
||||
});
|
||||
// Add title row
|
||||
csvSrc.unshift(Object.values(titleMap).join("\t"));
|
||||
const csv = csvSrc.join("\n");
|
||||
|
||||
// Prompt to copy to clipboard
|
||||
await this.services.UI.promptCopyToClipboard("Database Analysis data (TSV):", csv);
|
||||
}
|
||||
|
||||
async compactDatabase() {
|
||||
const replicator = this.plugin.replicator as LiveSyncCouchDBReplicator;
|
||||
const remote = await replicator.connectRemoteCouchDBWithSetting(this.settings, false, false, true);
|
||||
if (!remote) {
|
||||
this._notice("Failed to connect to remote for compaction.", "gc-compact");
|
||||
return;
|
||||
}
|
||||
if (typeof remote == "string") {
|
||||
this._notice(`Failed to connect to remote for compaction. ${remote}`, "gc-compact");
|
||||
return;
|
||||
}
|
||||
const compactResult = await remote.db.compact({
|
||||
interval: 1000,
|
||||
});
|
||||
// Probably no need to wait, but just in case.
|
||||
let timeout = 2 * 60 * 1000; // 2 minutes
|
||||
do {
|
||||
const status = await remote.db.info();
|
||||
if ("compact_running" in status && status?.compact_running) {
|
||||
this._notice("Compaction in progress on remote database...", "gc-compact");
|
||||
await delay(2000);
|
||||
timeout -= 2000;
|
||||
if (timeout <= 0) {
|
||||
this._notice("Compaction on remote database timed out.", "gc-compact");
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
} while (true);
|
||||
if (compactResult && "ok" in compactResult) {
|
||||
this._notice("Compaction on remote database completed successfully.", "gc-compact");
|
||||
} else {
|
||||
this._notice("Compaction on remote database failed.", "gc-compact");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Compact the database by temporarily setting the revision limit to 1.
|
||||
* @returns
|
||||
*/
|
||||
async compactDatabaseWithRevLimit() {
|
||||
// Temporarily set revs_limit to 1, perform compaction, and restore the original revs_limit.
|
||||
// Very dangerous operation, so now suppressed.
|
||||
return false;
|
||||
const replicator = this.plugin.replicator as LiveSyncCouchDBReplicator;
|
||||
const remote = await replicator.connectRemoteCouchDBWithSetting(this.settings, false, false, true);
|
||||
if (!remote) {
|
||||
this._notice("Failed to connect to remote for compaction.");
|
||||
return;
|
||||
}
|
||||
if (typeof remote == "string") {
|
||||
this._notice(`Failed to connect to remote for compaction. ${remote}`);
|
||||
return;
|
||||
}
|
||||
const customHeaders = parseHeaderValues(this.settings.couchDB_CustomHeaders);
|
||||
const credential = generateCredentialObject(this.settings);
|
||||
const request = async (path: string, method: string = "GET", body: any = undefined) => {
|
||||
const req = await _requestToCouchDB(
|
||||
this.settings.couchDB_URI + (this.settings.couchDB_DBNAME ? `/${this.settings.couchDB_DBNAME}` : ""),
|
||||
credential,
|
||||
window.origin,
|
||||
path,
|
||||
body,
|
||||
method,
|
||||
customHeaders
|
||||
);
|
||||
return req;
|
||||
};
|
||||
let revsLimit = "";
|
||||
const req = await request(`_revs_limit`, "GET");
|
||||
if (req.status == 200) {
|
||||
revsLimit = req.text.trim();
|
||||
this._info(`Remote database _revs_limit: ${revsLimit}`);
|
||||
} else {
|
||||
this._notice(`Failed to get remote database _revs_limit. Status: ${req.status}`);
|
||||
return;
|
||||
}
|
||||
const req2 = await request(`_revs_limit`, "PUT", 1);
|
||||
if (req2.status == 200) {
|
||||
this._info(`Set remote database _revs_limit to 1 for compaction.`);
|
||||
}
|
||||
try {
|
||||
await this.compactDatabase();
|
||||
} finally {
|
||||
// Restore revs_limit
|
||||
if (revsLimit) {
|
||||
const req3 = await request(`_revs_limit`, "PUT", parseInt(revsLimit));
|
||||
if (req3.status == 200) {
|
||||
this._info(`Restored remote database _revs_limit to ${revsLimit}.`);
|
||||
} else {
|
||||
this._notice(
|
||||
`Failed to restore remote database _revs_limit. Status: ${req3.status} / ${req3.text}`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
async gcv3() {
|
||||
if (!this.isAvailable()) return;
|
||||
const replicator = this.plugin.replicator as LiveSyncCouchDBReplicator;
|
||||
// Start one-shot replication to ensure all changes are synced before GC.
|
||||
const r0 = await replicator.openOneShotReplication(this.settings, false, false, "sync");
|
||||
if (!r0) {
|
||||
this._notice(
|
||||
"Failed to start one-shot replication before Garbage Collection. Garbage Collection Cancelled."
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// Delete the chunk, but first verify the following:
|
||||
// Fetch the list of accepted nodes from the replicator.
|
||||
const OPTION_CANCEL = "Cancel Garbage Collection";
|
||||
const info = await this.plugin.replicator.getConnectedDeviceList();
|
||||
if (!info) {
|
||||
this._notice("No connected device information found. Cancelling Garbage Collection.");
|
||||
return;
|
||||
}
|
||||
const { accepted_nodes, node_info } = info;
|
||||
//1. Compare accepted_nodes and node_info, and confirm whether it is acceptable to delete nodes not present in node_info.
|
||||
const infoMissingNodes = [] as string[];
|
||||
for (const node of accepted_nodes) {
|
||||
if (!(node in node_info)) {
|
||||
infoMissingNodes.push(node);
|
||||
}
|
||||
}
|
||||
if (infoMissingNodes.length > 0) {
|
||||
const message = `The following accepted nodes are missing its node information:\n- ${infoMissingNodes.join("\n- ")}\n\nThis indicates that they have not been connected for some time or have been left on an older version.
|
||||
It is preferable to update all devices if possible. If you have any devices that are no longer in use, you can clear all accepted nodes by locking the remote once.`;
|
||||
|
||||
const OPTION_IGNORE = "Ignore and Proceed";
|
||||
// const OPTION_DELETE = "Delete them and proceed";
|
||||
const buttons = [OPTION_CANCEL, OPTION_IGNORE] as const;
|
||||
const result = await this.plugin.confirm.askSelectStringDialogue(message, buttons, {
|
||||
title: "Node Information Missing",
|
||||
defaultAction: OPTION_CANCEL,
|
||||
});
|
||||
if (result === OPTION_CANCEL) {
|
||||
this._notice("Garbage Collection cancelled by user.");
|
||||
return;
|
||||
} else if (result === OPTION_IGNORE) {
|
||||
this._notice("Proceeding with Garbage Collection, ignoring missing nodes.");
|
||||
}
|
||||
}
|
||||
|
||||
//2. Check whether the progress values in NodeData are roughly the same (only the numerical part is needed).
|
||||
const progressValues = Object.values(node_info)
|
||||
.map((e) => e.progress.split("-")[0])
|
||||
.map((e) => parseInt(e));
|
||||
const maxProgress = Math.max(...progressValues);
|
||||
const minProgress = Math.min(...progressValues);
|
||||
const progressDifference = maxProgress - minProgress;
|
||||
const OPTION_PROCEED = "Proceed Garbage Collection";
|
||||
// - If they differ significantly, the node may not have completed synchronisation, potentially causing conflicts. Display a confirmation dialog as a precaution.
|
||||
// - If they are not significantly different, display the standard confirmation dialogue message.
|
||||
|
||||
const detail = `> [!INFO]- The connected devices have been detected as follows:
|
||||
${Object.entries(node_info)
|
||||
.map(
|
||||
([nodeId, nodeData]) =>
|
||||
`> - Device: ${nodeData.device_name} (Node ID: ${nodeId})
|
||||
> - Obsidian version: ${nodeData.app_version}
|
||||
> - Plug-in version: ${nodeData.plugin_version}
|
||||
> - Progress: ${nodeData.progress.split("-")[0]}`
|
||||
)
|
||||
.join("\n")}
|
||||
`;
|
||||
const message =
|
||||
progressDifference != 0
|
||||
? `Some devices have differing progress values (max: ${maxProgress}, min: ${minProgress}).
|
||||
This may indicate that some devices have not completed synchronisation, which could lead to conflicts. Strongly recommend confirming that all devices are synchronised before proceeding.`
|
||||
: `All devices have the same progress value (${maxProgress}). Your devices seem to be synchronised. And be able to proceed with Garbage Collection.`;
|
||||
const buttons = [OPTION_PROCEED, OPTION_CANCEL] as const;
|
||||
const defaultAction = progressDifference != 0 ? OPTION_CANCEL : OPTION_PROCEED;
|
||||
const result = await this.plugin.confirm.askSelectStringDialogue(message + "\n\n" + detail, buttons, {
|
||||
title: "Garbage Collection Confirmation",
|
||||
defaultAction,
|
||||
});
|
||||
if (result !== OPTION_PROCEED) {
|
||||
this._notice("Garbage Collection cancelled by user.");
|
||||
return;
|
||||
}
|
||||
this._notice("Proceeding with Garbage Collection.");
|
||||
//- 3. Once OK is confirmed in the dialogue, execute the chunk deletion. This is performed on the local database and immediately reflected on the remote. After reflecting on the remote, perform compaction.
|
||||
const gcStartTime = Date.now();
|
||||
// Perform Garbage Collection (new implementation).
|
||||
const localDatabase = this.localDatabase.localDatabase;
|
||||
const usedChunks = new Set<DocumentID>();
|
||||
const allChunks = new Map<DocumentID, string>();
|
||||
|
||||
const IDs = this.localDatabase.findEntryNames("", "", {});
|
||||
let i = 0;
|
||||
const doc_count = (await localDatabase.info()).doc_count;
|
||||
for await (const id of IDs) {
|
||||
const doc = await this.localDatabase.getRaw(id as DocumentID);
|
||||
i++;
|
||||
if (i % 100 == 0) {
|
||||
this._notice(`Garbage Collection: Scanned ${i} / ~${doc_count} `, "gc-scanning");
|
||||
}
|
||||
if (!doc) continue;
|
||||
if ("children" in doc) {
|
||||
const children = (doc.children || []) as DocumentID[];
|
||||
for (const chunkId of children) {
|
||||
usedChunks.add(chunkId);
|
||||
}
|
||||
} else if (doc.type === EntryTypes.CHUNK) {
|
||||
allChunks.set(doc._id as DocumentID, doc._rev);
|
||||
}
|
||||
}
|
||||
this._notice(
|
||||
`Garbage Collection: Scanning completed. Total chunks: ${allChunks.size}, Used chunks: ${usedChunks.size}`,
|
||||
"gc-scanning"
|
||||
);
|
||||
|
||||
const unusedChunks = [...allChunks.keys()].filter((e) => !usedChunks.has(e));
|
||||
this._notice(`Garbage Collection: Found ${unusedChunks.length} unused chunks to delete.`, "gc-scanning");
|
||||
const deleteChunkDocs = unusedChunks.map(
|
||||
(chunkId) =>
|
||||
({
|
||||
_id: chunkId,
|
||||
_deleted: true,
|
||||
_rev: allChunks.get(chunkId),
|
||||
}) as EntryLeaf
|
||||
);
|
||||
const response = await localDatabase.bulkDocs(deleteChunkDocs);
|
||||
const deletedCount = response.filter((e) => "ok" in e).length;
|
||||
const gcEndTime = Date.now();
|
||||
this._notice(
|
||||
`Garbage Collection completed. Deleted chunks: ${deletedCount} / ${unusedChunks.length}. Time taken: ${(gcEndTime - gcStartTime) / 1000} seconds.`
|
||||
);
|
||||
// Send changes to remote
|
||||
const r = await replicator.openOneShotReplication(this.settings, false, false, "pushOnly");
|
||||
// Wait for replication to complete
|
||||
if (!r) {
|
||||
this._notice("Failed to start replication after Garbage Collection.");
|
||||
return;
|
||||
}
|
||||
// Perform compaction
|
||||
await this.compactDatabase();
|
||||
this.clearHash();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -271,11 +271,11 @@ export class P2PReplicator extends LiveSyncCommands implements P2PReplicatorBase
|
||||
}
|
||||
|
||||
override onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
|
||||
services.replicator.handleGetNewReplicator(this._anyNewReplicator.bind(this));
|
||||
services.databaseEvents.handleOnDatabaseInitialisation(this._everyOnInitializeDatabase.bind(this));
|
||||
services.appLifecycle.handleOnInitialise(this._everyOnloadStart.bind(this));
|
||||
services.appLifecycle.handleOnSuspending(this._everyBeforeSuspendProcess.bind(this));
|
||||
services.appLifecycle.handleOnResumed(this._everyAfterResumeProcess.bind(this));
|
||||
services.setting.handleSuspendExtraSync(this._allSuspendExtraSync.bind(this));
|
||||
services.replicator.getNewReplicator.addHandler(this._anyNewReplicator.bind(this));
|
||||
services.databaseEvents.onDatabaseInitialisation.addHandler(this._everyOnInitializeDatabase.bind(this));
|
||||
services.appLifecycle.onInitialise.addHandler(this._everyOnloadStart.bind(this));
|
||||
services.appLifecycle.onSuspending.addHandler(this._everyBeforeSuspendProcess.bind(this));
|
||||
services.appLifecycle.onResumed.addHandler(this._everyAfterResumeProcess.bind(this));
|
||||
services.setting.suspendExtraSync.addHandler(this._allSuspendExtraSync.bind(this));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Menu, WorkspaceLeaf } from "obsidian";
|
||||
import { Menu, WorkspaceLeaf } from "@/deps.ts";
|
||||
import ReplicatorPaneComponent from "./P2PReplicatorPane.svelte";
|
||||
import type ObsidianLiveSyncPlugin from "../../../main.ts";
|
||||
import { mount } from "svelte";
|
||||
|
||||
2
src/lib
2
src/lib
Submodule src/lib updated: 9393ff1e08...cd32d3d326
474
src/main.ts
474
src/main.ts
@@ -23,10 +23,9 @@ import type { IObsidianModule } from "./modules/AbstractObsidianModule.ts";
|
||||
|
||||
import { ModuleDev } from "./modules/extras/ModuleDev.ts";
|
||||
import { ModuleFileAccessObsidian } from "./modules/coreObsidian/ModuleFileAccessObsidian.ts";
|
||||
import { ModuleInputUIObsidian } from "./modules/coreObsidian/ModuleInputUIObsidian.ts";
|
||||
import { ModuleMigration } from "./modules/essential/ModuleMigration.ts";
|
||||
|
||||
import { ModuleCheckRemoteSize } from "./modules/coreFeatures/ModuleCheckRemoteSize.ts";
|
||||
import { ModuleCheckRemoteSize } from "./modules/essentialObsidian/ModuleCheckRemoteSize.ts";
|
||||
import { ModuleConflictResolver } from "./modules/coreFeatures/ModuleConflictResolver.ts";
|
||||
import { ModuleInteractiveConflictResolver } from "./modules/features/ModuleInteractiveConflictResolver.ts";
|
||||
import { ModuleLog } from "./modules/features/ModuleLog.ts";
|
||||
@@ -70,31 +69,7 @@ import { P2PReplicator } from "./features/P2PSync/CmdP2PReplicator.ts";
|
||||
import type { LiveSyncManagers } from "./lib/src/managers/LiveSyncManagers.ts";
|
||||
import { ObsidianServiceHub } from "./modules/services/ObsidianServices.ts";
|
||||
import type { InjectableServiceHub } from "./lib/src/services/InjectableServices.ts";
|
||||
|
||||
// function throwShouldBeOverridden(): never {
|
||||
// throw new Error("This function should be overridden by the module.");
|
||||
// }
|
||||
// const InterceptiveAll = Promise.resolve(true);
|
||||
// const InterceptiveEvery = Promise.resolve(true);
|
||||
// const InterceptiveAny = Promise.resolve(undefined);
|
||||
|
||||
/**
|
||||
* All $prefixed functions are hooked by the modules. Be careful to call them directly.
|
||||
* Please refer to the module's source code to understand the function.
|
||||
* $$ : Completely overridden functions.
|
||||
* $all : Process all modules and return all results.
|
||||
* $every : Process all modules until the first failure.
|
||||
* $any : Process all modules until the first success.
|
||||
* $ : Other interceptive points. You should manually assign the module
|
||||
* All of above performed on injectModules function.
|
||||
*
|
||||
* No longer used! See AppLifecycleService in Services.ts.
|
||||
* For a while, just commented out some previously used code. (sorry, some are deleted...)
|
||||
* 'Convention over configuration' was a lie for me. At least, very lack of refactor-ability.
|
||||
*
|
||||
* Still some modules are separated, and connected by `ThroughHole` class.
|
||||
* However, it is not a good design. I am going to manage the modules in a more explicit way.
|
||||
*/
|
||||
import type { ServiceContext } from "./lib/src/services/ServiceHub.ts";
|
||||
|
||||
export default class ObsidianLiveSyncPlugin
|
||||
extends Plugin
|
||||
@@ -108,7 +83,7 @@ export default class ObsidianLiveSyncPlugin
|
||||
/**
|
||||
* The service hub for managing all services.
|
||||
*/
|
||||
_services: InjectableServiceHub = new ObsidianServiceHub(this);
|
||||
_services: InjectableServiceHub<ServiceContext> = new ObsidianServiceHub(this);
|
||||
get services() {
|
||||
return this._services;
|
||||
}
|
||||
@@ -161,7 +136,6 @@ export default class ObsidianLiveSyncPlugin
|
||||
new ModuleObsidianSettingsAsMarkdown(this, this),
|
||||
new ModuleObsidianSettingDialogue(this, this),
|
||||
new ModuleLog(this, this),
|
||||
new ModuleInputUIObsidian(this, this),
|
||||
new ModuleObsidianMenu(this, this),
|
||||
new ModuleRebuilder(this),
|
||||
new ModuleSetupObsidian(this, this),
|
||||
@@ -170,9 +144,7 @@ export default class ObsidianLiveSyncPlugin
|
||||
new ModuleRedFlag(this),
|
||||
new ModuleInteractiveConflictResolver(this, this),
|
||||
new ModuleObsidianGlobalHistory(this, this),
|
||||
// Common modules
|
||||
// Note: Platform-dependent functions are not entirely dependent on the core only, as they are from platform-dependent modules. Stubbing is sometimes required.
|
||||
new ModuleCheckRemoteSize(this),
|
||||
new ModuleCheckRemoteSize(this, this),
|
||||
// Test and Dev Modules
|
||||
new ModuleDev(this, this),
|
||||
new ModuleReplicateTest(this, this),
|
||||
@@ -186,17 +158,15 @@ export default class ObsidianLiveSyncPlugin
|
||||
}
|
||||
throw new Error(`Module ${constructor} not found or not loaded.`);
|
||||
}
|
||||
// injected = injectModules(this, [...this.modules, ...this.addOns] as ICoreModule[]);
|
||||
// <-- Module System
|
||||
|
||||
// Following are plugged by the modules.
|
||||
|
||||
settings!: ObsidianLiveSyncSettings;
|
||||
localDatabase!: LiveSyncLocalDB;
|
||||
managers!: LiveSyncManagers;
|
||||
simpleStore!: SimpleStore<CheckPointInfo>;
|
||||
replicator!: LiveSyncAbstractReplicator;
|
||||
confirm!: Confirm;
|
||||
get confirm(): Confirm {
|
||||
return this.services.UI.confirm;
|
||||
}
|
||||
storageAccess!: StorageAccess;
|
||||
databaseFileAccess!: DatabaseFileAccess;
|
||||
fileHandler!: ModuleFileHandler;
|
||||
@@ -234,436 +204,6 @@ export default class ObsidianLiveSyncPlugin
|
||||
syncStatus: "CLOSED" as DatabaseConnectingStatus,
|
||||
});
|
||||
|
||||
// --> Events
|
||||
|
||||
/*
|
||||
LifeCycle of the plugin
|
||||
0. onunload (Obsidian Kicks.)
|
||||
1. onLiveSyncLoad
|
||||
2. (event) EVENT_PLUGIN_LOADED
|
||||
3. $everyOnloadStart
|
||||
-- Load settings
|
||||
-- Open database
|
||||
--
|
||||
3. $everyOnloadAfterLoadSettings
|
||||
4. $everyOnload
|
||||
5. (addOns) onload
|
||||
--
|
||||
onLiveSyncReady
|
||||
-- $everyOnLayoutReady
|
||||
-- EVENT_LAYOUT_READY
|
||||
(initializeDatabase)
|
||||
-- $everyOnFirstInitialize
|
||||
-- realizeSettingSyncMode
|
||||
-- waitForReplicationOnce (if syncOnStart and not LiveSync)
|
||||
-- scanStat (Not waiting for the result)
|
||||
|
||||
---
|
||||
|
||||
Finalization
|
||||
0. onunload (Obsidian Kicks.)
|
||||
1. onLiveSyncUnload
|
||||
2. (event) EVENT_PLUGIN_UNLOADED
|
||||
3. $allStartOnUnload
|
||||
4. $allOnUnload
|
||||
5. (addOns) onunload
|
||||
6. localDatabase.onunload
|
||||
7. replicator.closeReplication
|
||||
8. localDatabase.close
|
||||
9. (event) EVENT_PLATFORM_UNLOADED
|
||||
|
||||
*/
|
||||
|
||||
// $everyOnLayoutReady(): Promise<boolean> {
|
||||
// //TODO: AppLifecycleService.onLayoutReady
|
||||
// return InterceptiveEvery;
|
||||
// }
|
||||
// $everyOnFirstInitialize(): Promise<boolean> {
|
||||
// //TODO: AppLifecycleService.onFirstInitialize
|
||||
// return InterceptiveEvery;
|
||||
// }
|
||||
|
||||
// Some Module should call this function to start the plugin.
|
||||
// $$onLiveSyncReady(): Promise<false | undefined> {
|
||||
// //TODO: AppLifecycleService.onLiveSyncReady
|
||||
// throwShouldBeOverridden();
|
||||
// }
|
||||
// $$wireUpEvents(): void {
|
||||
// //TODO: AppLifecycleService.wireUpEvents
|
||||
// throwShouldBeOverridden();
|
||||
// }
|
||||
// $$onLiveSyncLoad(): Promise<void> {
|
||||
// //TODO: AppLifecycleService.onLoad
|
||||
// throwShouldBeOverridden();
|
||||
// }
|
||||
|
||||
// $$onLiveSyncUnload(): Promise<void> {
|
||||
// //TODO: AppLifecycleService.onAppUnload
|
||||
// throwShouldBeOverridden();
|
||||
// }
|
||||
|
||||
// $allScanStat(): Promise<boolean> {
|
||||
// //TODO: AppLifecycleService.scanStartupIssues
|
||||
// return InterceptiveAll;
|
||||
// }
|
||||
// $everyOnloadStart(): Promise<boolean> {
|
||||
// //TODO: AppLifecycleService.onInitialise
|
||||
// return InterceptiveEvery;
|
||||
// }
|
||||
|
||||
// $everyOnloadAfterLoadSettings(): Promise<boolean> {
|
||||
// //TODO: AppLifecycleService.onApplyStartupLoaded
|
||||
// return InterceptiveEvery;
|
||||
// }
|
||||
|
||||
// $everyOnload(): Promise<boolean> {
|
||||
// //TODO: AppLifecycleService.onLoaded
|
||||
// return InterceptiveEvery;
|
||||
// }
|
||||
|
||||
// $anyHandlerProcessesFileEvent(item: FileEventItem): Promise<boolean | undefined> {
|
||||
// //TODO: FileProcessingService.processFileEvent
|
||||
// return InterceptiveAny;
|
||||
// }
|
||||
|
||||
// $allStartOnUnload(): Promise<boolean> {
|
||||
// //TODO: AppLifecycleService.onBeforeUnload
|
||||
// return InterceptiveAll;
|
||||
// }
|
||||
// $allOnUnload(): Promise<boolean> {
|
||||
// //TODO: AppLifecycleService.onUnload
|
||||
// return InterceptiveAll;
|
||||
// }
|
||||
|
||||
// $$openDatabase(): Promise<boolean> {
|
||||
// // DatabaseService.openDatabase
|
||||
// throwShouldBeOverridden();
|
||||
// }
|
||||
|
||||
// $$realizeSettingSyncMode(): Promise<void> {
|
||||
// // SettingService.realiseSetting
|
||||
// throwShouldBeOverridden();
|
||||
// }
|
||||
// $$performRestart() {
|
||||
// // AppLifecycleService.performRestart
|
||||
// throwShouldBeOverridden();
|
||||
// }
|
||||
|
||||
// $$clearUsedPassphrase(): void {
|
||||
// // SettingService.clearUsedPassphrase
|
||||
// throwShouldBeOverridden();
|
||||
// }
|
||||
|
||||
// $$decryptSettings(settings: ObsidianLiveSyncSettings): Promise<ObsidianLiveSyncSettings> {
|
||||
// // SettingService.decryptSettings
|
||||
// throwShouldBeOverridden();
|
||||
// }
|
||||
// $$adjustSettings(settings: ObsidianLiveSyncSettings): Promise<ObsidianLiveSyncSettings> {
|
||||
// // SettingService.adjustSettings
|
||||
// throwShouldBeOverridden();
|
||||
// }
|
||||
|
||||
// $$loadSettings(): Promise<void> {
|
||||
// // SettingService.loadSettings
|
||||
// throwShouldBeOverridden();
|
||||
// }
|
||||
|
||||
// $$saveDeviceAndVaultName(): void {
|
||||
// // SettingService.saveDeviceAndVaultName
|
||||
// throwShouldBeOverridden();
|
||||
// }
|
||||
|
||||
// $$saveSettingData(): Promise<void> {
|
||||
// // SettingService.saveSettingData
|
||||
// throwShouldBeOverridden();
|
||||
// }
|
||||
|
||||
// $anyProcessOptionalFileEvent(path: FilePath): Promise<boolean | undefined> {
|
||||
// // FileProcessingService.processOptionalFileEvent
|
||||
// return InterceptiveAny;
|
||||
// }
|
||||
|
||||
// $everyCommitPendingFileEvent(): Promise<boolean> {
|
||||
// // FileProcessingService.commitPendingFileEvent
|
||||
// return InterceptiveEvery;
|
||||
// }
|
||||
|
||||
// ->
|
||||
// $anyGetOptionalConflictCheckMethod(path: FilePathWithPrefix): Promise<boolean | undefined | "newer"> {
|
||||
// return InterceptiveAny;
|
||||
// }
|
||||
|
||||
// $$queueConflictCheckIfOpen(file: FilePathWithPrefix): Promise<void> {
|
||||
// // ConflictEventManager.queueCheckForConflictIfOpen
|
||||
// throwShouldBeOverridden();
|
||||
// }
|
||||
|
||||
// $$queueConflictCheck(file: FilePathWithPrefix): Promise<void> {
|
||||
// // ConflictEventManager.queueCheckForConflict
|
||||
// throwShouldBeOverridden();
|
||||
// }
|
||||
|
||||
// $$waitForAllConflictProcessed(): Promise<boolean> {
|
||||
// // ConflictEventManager.ensureAllConflictProcessed
|
||||
// throwShouldBeOverridden();
|
||||
// }
|
||||
|
||||
//<-- Conflict Check
|
||||
|
||||
// $anyProcessOptionalSyncFiles(doc: LoadedEntry): Promise<boolean | undefined> {
|
||||
// // ReplicationService.processOptionalSyncFile
|
||||
// return InterceptiveAny;
|
||||
// }
|
||||
|
||||
// $anyProcessReplicatedDoc(doc: MetaEntry): Promise<boolean | undefined> {
|
||||
// // ReplicationService.processReplicatedDocument
|
||||
// return InterceptiveAny;
|
||||
// }
|
||||
|
||||
//---> Sync
|
||||
// $$parseReplicationResult(docs: Array<PouchDB.Core.ExistingDocument<EntryDoc>>): void {
|
||||
// // ReplicationService.parseSynchroniseResult
|
||||
// throwShouldBeOverridden();
|
||||
// }
|
||||
|
||||
// $anyModuleParsedReplicationResultItem(docs: PouchDB.Core.ExistingDocument<EntryDoc>): Promise<boolean | undefined> {
|
||||
// // ReplicationService.processVirtualDocument
|
||||
// return InterceptiveAny;
|
||||
// }
|
||||
// $everyBeforeRealizeSetting(): Promise<boolean> {
|
||||
// // SettingEventManager.beforeRealiseSetting
|
||||
// return InterceptiveEvery;
|
||||
// }
|
||||
// $everyAfterRealizeSetting(): Promise<boolean> {
|
||||
// // SettingEventManager.onSettingRealised
|
||||
// return InterceptiveEvery;
|
||||
// }
|
||||
// $everyRealizeSettingSyncMode(): Promise<boolean> {
|
||||
// // SettingEventManager.onRealiseSetting
|
||||
// return InterceptiveEvery;
|
||||
// }
|
||||
|
||||
// $everyBeforeSuspendProcess(): Promise<boolean> {
|
||||
// // AppLifecycleService.onSuspending
|
||||
// return InterceptiveEvery;
|
||||
// }
|
||||
// $everyOnResumeProcess(): Promise<boolean> {
|
||||
// // AppLifecycleService.onResuming
|
||||
// return InterceptiveEvery;
|
||||
// }
|
||||
// $everyAfterResumeProcess(): Promise<boolean> {
|
||||
// // AppLifecycleService.onResumed
|
||||
// return InterceptiveEvery;
|
||||
// }
|
||||
|
||||
// $$fetchRemotePreferredTweakValues(trialSetting: RemoteDBSettings): Promise<TweakValues | false> {
|
||||
// //TODO:TweakValueService.fetchRemotePreferred
|
||||
// throwShouldBeOverridden();
|
||||
// }
|
||||
// $$checkAndAskResolvingMismatchedTweaks(preferred: Partial<TweakValues>): Promise<[TweakValues | boolean, boolean]> {
|
||||
// //TODO:TweakValueService.checkAndAskResolvingMismatched
|
||||
// throwShouldBeOverridden();
|
||||
// }
|
||||
// $$askResolvingMismatchedTweaks(preferredSource: TweakValues): Promise<"OK" | "CHECKAGAIN" | "IGNORE"> {
|
||||
// //TODO:TweakValueService.askResolvingMismatched
|
||||
// throwShouldBeOverridden();
|
||||
// }
|
||||
|
||||
// $$checkAndAskUseRemoteConfiguration(
|
||||
// settings: RemoteDBSettings
|
||||
// ): Promise<{ result: false | TweakValues; requireFetch: boolean }> {
|
||||
// // TweakValueService.checkAndAskUseRemoteConfiguration
|
||||
// throwShouldBeOverridden();
|
||||
// }
|
||||
|
||||
// $$askUseRemoteConfiguration(
|
||||
// trialSetting: RemoteDBSettings,
|
||||
// preferred: TweakValues
|
||||
// ): Promise<{ result: false | TweakValues; requireFetch: boolean }> {
|
||||
// // TweakValueService.askUseRemoteConfiguration
|
||||
// throwShouldBeOverridden();
|
||||
// }
|
||||
// $everyBeforeReplicate(showMessage: boolean): Promise<boolean> {
|
||||
// // ReplicationService.beforeReplicate
|
||||
// return InterceptiveEvery;
|
||||
// }
|
||||
|
||||
// $$canReplicate(showMessage: boolean = false): Promise<boolean> {
|
||||
// // ReplicationService.isReplicationReady
|
||||
// throwShouldBeOverridden();
|
||||
// }
|
||||
|
||||
// $$replicate(showMessage: boolean = false): Promise<boolean | void> {
|
||||
// // ReplicationService.replicate
|
||||
// throwShouldBeOverridden();
|
||||
// }
|
||||
// $$replicateByEvent(showMessage: boolean = false): Promise<boolean | void> {
|
||||
// // ReplicationService.replicateByEvent
|
||||
// throwShouldBeOverridden();
|
||||
// }
|
||||
|
||||
// $everyOnDatabaseInitialized(showingNotice: boolean): Promise<boolean> {
|
||||
// // DatabaseEventService.onDatabaseInitialised
|
||||
// throwShouldBeOverridden();
|
||||
// }
|
||||
|
||||
// $$initializeDatabase(
|
||||
// showingNotice: boolean = false,
|
||||
// reopenDatabase = true,
|
||||
// ignoreSuspending: boolean = false
|
||||
// ): Promise<boolean> {
|
||||
// // DatabaseEventService.initializeDatabase
|
||||
// throwShouldBeOverridden();
|
||||
// }
|
||||
|
||||
// $anyAfterConnectCheckFailed(): Promise<boolean | "CHECKAGAIN" | undefined> {
|
||||
// // ReplicationService.checkConnectionFailure
|
||||
// return InterceptiveAny;
|
||||
// }
|
||||
|
||||
// $$replicateAllToServer(
|
||||
// showingNotice: boolean = false,
|
||||
// sendChunksInBulkDisabled: boolean = false
|
||||
// ): Promise<boolean> {
|
||||
// // RemoteService.replicateAllToRemote
|
||||
// throwShouldBeOverridden();
|
||||
// }
|
||||
// $$replicateAllFromServer(showingNotice: boolean = false): Promise<boolean> {
|
||||
// // RemoteService.replicateAllFromRemote
|
||||
// throwShouldBeOverridden();
|
||||
// }
|
||||
|
||||
// Remote Governing
|
||||
// $$markRemoteLocked(lockByClean: boolean = false): Promise<void> {
|
||||
// // RemoteService.markLocked;
|
||||
// throwShouldBeOverridden();
|
||||
// }
|
||||
|
||||
// $$markRemoteUnlocked(): Promise<void> {
|
||||
// // RemoteService.markUnlocked;
|
||||
// throwShouldBeOverridden();
|
||||
// }
|
||||
|
||||
// $$markRemoteResolved(): Promise<void> {
|
||||
// // RemoteService.markResolved;
|
||||
// throwShouldBeOverridden();
|
||||
// }
|
||||
|
||||
// <-- Remote Governing
|
||||
|
||||
// $$isFileSizeExceeded(size: number): boolean {
|
||||
// // VaultService.isFileSizeTooLarge
|
||||
// throwShouldBeOverridden();
|
||||
// }
|
||||
|
||||
// $$performFullScan(showingNotice?: boolean, ignoreSuspending?: boolean): Promise<void> {
|
||||
// // VaultService.scanVault
|
||||
// throwShouldBeOverridden();
|
||||
// }
|
||||
|
||||
// $anyResolveConflictByUI(
|
||||
// filename: FilePathWithPrefix,
|
||||
// conflictCheckResult: diff_result
|
||||
// ): Promise<boolean | undefined> {
|
||||
// // ConflictService.resolveConflictByUserInteraction
|
||||
// return InterceptiveAny;
|
||||
// }
|
||||
// $$resolveConflictByDeletingRev(
|
||||
// path: FilePathWithPrefix,
|
||||
// deleteRevision: string,
|
||||
// subTitle = ""
|
||||
// ): Promise<typeof MISSING_OR_ERROR | typeof AUTO_MERGED> {
|
||||
// // ConflictService.resolveByDeletingRevision
|
||||
// throwShouldBeOverridden();
|
||||
// }
|
||||
// $$resolveConflict(filename: FilePathWithPrefix): Promise<void> {
|
||||
// // ConflictService.resolveConflict
|
||||
// throwShouldBeOverridden();
|
||||
// }
|
||||
// $anyResolveConflictByNewest(filename: FilePathWithPrefix): Promise<boolean> {
|
||||
// // ConflictService.resolveByNewest
|
||||
// throwShouldBeOverridden();
|
||||
// }
|
||||
|
||||
// $$resetLocalDatabase(): Promise<void> {
|
||||
// // DatabaseService.resetDatabase;
|
||||
// throwShouldBeOverridden();
|
||||
// }
|
||||
|
||||
// $$tryResetRemoteDatabase(): Promise<void> {
|
||||
// // RemoteService.tryResetDatabase;
|
||||
// throwShouldBeOverridden();
|
||||
// }
|
||||
|
||||
// $$tryCreateRemoteDatabase(): Promise<void> {
|
||||
// // RemoteService.tryCreateDatabase;
|
||||
// throwShouldBeOverridden();
|
||||
// }
|
||||
|
||||
// $$isIgnoredByIgnoreFiles(file: string | UXFileInfoStub): Promise<boolean> {
|
||||
// // VaultService.isIgnoredByIgnoreFiles
|
||||
// throwShouldBeOverridden();
|
||||
// }
|
||||
|
||||
// $$isTargetFile(file: string | UXFileInfoStub, keepFileCheckList = false): Promise<boolean> {
|
||||
// // VaultService.isTargetFile
|
||||
// throwShouldBeOverridden();
|
||||
// }
|
||||
|
||||
// $$askReload(message?: string) {
|
||||
// // AppLifecycleService.askRestart
|
||||
// throwShouldBeOverridden();
|
||||
// }
|
||||
// $$scheduleAppReload() {
|
||||
// // AppLifecycleService.scheduleRestart
|
||||
// throwShouldBeOverridden();
|
||||
// }
|
||||
|
||||
//--- Setup
|
||||
// $allSuspendAllSync(): Promise<boolean> {
|
||||
// // SettingEventManager.suspendAllSync
|
||||
// return InterceptiveAll;
|
||||
// }
|
||||
// $allSuspendExtraSync(): Promise<boolean> {
|
||||
// // SettingEventManager.suspendExtraSync
|
||||
// return InterceptiveAll;
|
||||
// }
|
||||
|
||||
// $allAskUsingOptionalSyncFeature(opt: { enableFetch?: boolean; enableOverwrite?: boolean }): Promise<boolean> {
|
||||
// // SettingEventManager.suggestOptionalFeatures
|
||||
// throwShouldBeOverridden();
|
||||
// }
|
||||
// $anyConfigureOptionalSyncFeature(mode: string): Promise<void> {
|
||||
// // SettingEventManager.enableOptionalFeature
|
||||
// throwShouldBeOverridden();
|
||||
// }
|
||||
|
||||
// $$showView(viewType: string): Promise<void> {
|
||||
// // UIManager.showWindow //
|
||||
// throwShouldBeOverridden();
|
||||
// }
|
||||
|
||||
// For Development: Ensure reliability MORE AND MORE. May the this plug-in helps all of us.
|
||||
// $everyModuleTest(): Promise<boolean> {
|
||||
// return InterceptiveEvery;
|
||||
// }
|
||||
// $everyModuleTestMultiDevice(): Promise<boolean> {
|
||||
// return InterceptiveEvery;
|
||||
// }
|
||||
// $$addTestResult(name: string, key: string, result: boolean, summary?: string, message?: string): void {
|
||||
// throwShouldBeOverridden();
|
||||
// }
|
||||
|
||||
// _isThisModuleEnabled(): boolean {
|
||||
// return true;
|
||||
// }
|
||||
|
||||
// $anyGetAppId(): Promise<string | undefined> {
|
||||
// // APIService.getAppId
|
||||
// return InterceptiveAny;
|
||||
// }
|
||||
|
||||
// Plug-in's overrideable functions
|
||||
onload() {
|
||||
void this.services.appLifecycle.onLoad();
|
||||
}
|
||||
|
||||
@@ -2,138 +2,6 @@ import { LOG_LEVEL_INFO, LOG_LEVEL_NOTICE, LOG_LEVEL_VERBOSE, Logger } from "oct
|
||||
import type { LOG_LEVEL } from "../lib/src/common/types";
|
||||
import type { LiveSyncCore } from "../main";
|
||||
import { __$checkInstanceBinding } from "../lib/src/dev/checks";
|
||||
// import { unique } from "octagonal-wheels/collection";
|
||||
// import type { IObsidianModule } from "./AbstractObsidianModule.ts";
|
||||
// import type {
|
||||
// ICoreModuleBase,
|
||||
// AllInjectableProps,
|
||||
// AllExecuteProps,
|
||||
// EveryExecuteProps,
|
||||
// AnyExecuteProps,
|
||||
// ICoreModule,
|
||||
// } from "./ModuleTypes";
|
||||
|
||||
// function isOverridableKey(key: string): key is keyof ICoreModuleBase {
|
||||
// return key.startsWith("$");
|
||||
// }
|
||||
|
||||
// function isInjectableKey(key: string): key is keyof AllInjectableProps {
|
||||
// return key.startsWith("$$");
|
||||
// }
|
||||
|
||||
// function isAllExecuteKey(key: string): key is keyof AllExecuteProps {
|
||||
// return key.startsWith("$all");
|
||||
// }
|
||||
// function isEveryExecuteKey(key: string): key is keyof EveryExecuteProps {
|
||||
// return key.startsWith("$every");
|
||||
// }
|
||||
// function isAnyExecuteKey(key: string): key is keyof AnyExecuteProps {
|
||||
// return key.startsWith("$any");
|
||||
// }
|
||||
/**
|
||||
* All $prefixed functions are hooked by the modules. Be careful to call them directly.
|
||||
* Please refer to the module's source code to understand the function.
|
||||
* $$ : Completely overridden functions.
|
||||
* $all : Process all modules and return all results.
|
||||
* $every : Process all modules until the first failure.
|
||||
* $any : Process all modules until the first success.
|
||||
* $ : Other interceptive points. You should manually assign the module
|
||||
* All of above performed on injectModules function.
|
||||
*/
|
||||
// export function injectModules<T extends ICoreModule>(target: T, modules: ICoreModule[]) {
|
||||
// const allKeys = unique([
|
||||
// ...Object.keys(Object.getOwnPropertyDescriptors(target)),
|
||||
// ...Object.keys(Object.getOwnPropertyDescriptors(Object.getPrototypeOf(target))),
|
||||
// ]).filter((e) => e.startsWith("$")) as (keyof ICoreModule)[];
|
||||
// const moduleMap = new Map<string, IObsidianModule[]>();
|
||||
// for (const module of modules) {
|
||||
// for (const key of allKeys) {
|
||||
// if (isOverridableKey(key)) {
|
||||
// if (key in module) {
|
||||
// const list = moduleMap.get(key) || [];
|
||||
// if (typeof module[key] === "function") {
|
||||
// module[key] = module[key].bind(module) as any;
|
||||
// }
|
||||
// list.push(module);
|
||||
// moduleMap.set(key, list);
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// Logger(`Injecting modules for ${target.constructor.name}`, LOG_LEVEL_VERBOSE);
|
||||
// for (const key of allKeys) {
|
||||
// const modules = moduleMap.get(key) || [];
|
||||
// if (isInjectableKey(key)) {
|
||||
// if (modules.length == 0) {
|
||||
// throw new Error(`No module injected for ${key}. This is a fatal error.`);
|
||||
// }
|
||||
// target[key] = modules[0][key]! as any;
|
||||
// Logger(`[${modules[0].constructor.name}]: Injected ${key} `, LOG_LEVEL_VERBOSE);
|
||||
// } else if (isAllExecuteKey(key)) {
|
||||
// const modules = moduleMap.get(key) || [];
|
||||
// target[key] = async (...args: any) => {
|
||||
// for (const module of modules) {
|
||||
// try {
|
||||
// //@ts-ignore
|
||||
// await module[key]!(...args);
|
||||
// } catch (ex) {
|
||||
// Logger(`[${module.constructor.name}]: All handler for ${key} failed`, LOG_LEVEL_VERBOSE);
|
||||
// Logger(ex, LOG_LEVEL_VERBOSE);
|
||||
// }
|
||||
// }
|
||||
// return true;
|
||||
// };
|
||||
// for (const module of modules) {
|
||||
// Logger(`[${module.constructor.name}]: Injected (All) ${key} `, LOG_LEVEL_VERBOSE);
|
||||
// }
|
||||
// } else if (isEveryExecuteKey(key)) {
|
||||
// target[key] = async (...args: any) => {
|
||||
// for (const module of modules) {
|
||||
// try {
|
||||
// //@ts-ignore:2556
|
||||
// const ret = await module[key]!(...args);
|
||||
// if (ret !== undefined && !ret) {
|
||||
// // Failed then return that falsy value.
|
||||
// return ret;
|
||||
// }
|
||||
// } catch (ex) {
|
||||
// Logger(`[${module.constructor.name}]: Every handler for ${key} failed`);
|
||||
// Logger(ex, LOG_LEVEL_VERBOSE);
|
||||
// }
|
||||
// }
|
||||
// return true;
|
||||
// };
|
||||
// for (const module of modules) {
|
||||
// Logger(`[${module.constructor.name}]: Injected (Every) ${key} `, LOG_LEVEL_VERBOSE);
|
||||
// }
|
||||
// } else if (isAnyExecuteKey(key)) {
|
||||
// //@ts-ignore
|
||||
// target[key] = async (...args: any[]) => {
|
||||
// for (const module of modules) {
|
||||
// try {
|
||||
// //@ts-ignore:2556
|
||||
// const ret = await module[key](...args);
|
||||
// // If truly value returned, then return that value.
|
||||
// if (ret) {
|
||||
// return ret;
|
||||
// }
|
||||
// } catch (ex) {
|
||||
// Logger(`[${module.constructor.name}]: Any handler for ${key} failed`);
|
||||
// Logger(ex, LOG_LEVEL_VERBOSE);
|
||||
// }
|
||||
// }
|
||||
// return false;
|
||||
// };
|
||||
// for (const module of modules) {
|
||||
// Logger(`[${module.constructor.name}]: Injected (Any) ${key} `, LOG_LEVEL_VERBOSE);
|
||||
// }
|
||||
// } else {
|
||||
// Logger(`No injected handler for ${key} `, LOG_LEVEL_VERBOSE);
|
||||
// }
|
||||
// }
|
||||
// Logger(`Injected modules for ${target.constructor.name}`, LOG_LEVEL_VERBOSE);
|
||||
// return true;
|
||||
// }
|
||||
|
||||
export abstract class AbstractModule {
|
||||
_log = (msg: any, level: LOG_LEVEL = LOG_LEVEL_INFO, key?: string) => {
|
||||
|
||||
@@ -76,11 +76,11 @@ export class ModuleDatabaseFileAccess extends AbstractModule implements Database
|
||||
async checkIsTargetFile(file: UXFileInfoStub | FilePathWithPrefix): Promise<boolean> {
|
||||
const path = getStoragePathFromUXFileInfo(file);
|
||||
if (!(await this.services.vault.isTargetFile(path))) {
|
||||
this._log(`File is not target`, LOG_LEVEL_VERBOSE);
|
||||
this._log(`File is not target: ${path}`, LOG_LEVEL_VERBOSE);
|
||||
return false;
|
||||
}
|
||||
if (shouldBeIgnored(path)) {
|
||||
this._log(`File should be ignored`, LOG_LEVEL_VERBOSE);
|
||||
this._log(`File should be ignored: ${path}`, LOG_LEVEL_VERBOSE);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
@@ -346,7 +346,7 @@ export class ModuleDatabaseFileAccess extends AbstractModule implements Database
|
||||
return ret;
|
||||
}
|
||||
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
|
||||
services.appLifecycle.handleOnLoaded(this._everyOnload.bind(this));
|
||||
services.test.handleTest(this._everyModuleTest.bind(this));
|
||||
services.appLifecycle.onLoaded.addHandler(this._everyOnload.bind(this));
|
||||
services.test.test.addHandler(this._everyModuleTest.bind(this));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -266,7 +266,10 @@ export class ModuleFileHandler extends AbstractModule {
|
||||
// Check the file is not corrupted
|
||||
// (Zero is a special case, may be created by some APIs and it might be acceptable).
|
||||
if (docRead.size != 0 && docRead.size !== readAsBlob(docRead).size) {
|
||||
this._log(`File ${path} seems to be corrupted! Writing prevented.`, LOG_LEVEL_NOTICE);
|
||||
this._log(
|
||||
`File ${path} seems to be corrupted! Writing prevented. (${docRead.size} != ${readAsBlob(docRead).size})`,
|
||||
LOG_LEVEL_NOTICE
|
||||
);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -433,8 +436,8 @@ export class ModuleFileHandler extends AbstractModule {
|
||||
);
|
||||
}
|
||||
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
|
||||
services.appLifecycle.handleOnInitialise(this._everyOnloadStart.bind(this));
|
||||
services.fileProcessing.handleProcessFileEvent(this._anyHandlerProcessesFileEvent.bind(this));
|
||||
services.replication.handleProcessSynchroniseResult(this._anyProcessReplicatedDoc.bind(this));
|
||||
services.appLifecycle.onInitialise.addHandler(this._everyOnloadStart.bind(this));
|
||||
services.fileProcessing.processFileEvent.addHandler(this._anyHandlerProcessesFileEvent.bind(this));
|
||||
services.replication.processSynchroniseResult.addHandler(this._anyProcessReplicatedDoc.bind(this));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -39,8 +39,8 @@ export class ModuleLocalDatabaseObsidian extends AbstractModule {
|
||||
return this.localDatabase != null && this.localDatabase.isReady;
|
||||
}
|
||||
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
|
||||
services.database.handleIsDatabaseReady(this._isDatabaseReady.bind(this));
|
||||
services.appLifecycle.handleOnInitialise(this._everyOnloadStart.bind(this));
|
||||
services.database.handleOpenDatabase(this._openDatabase.bind(this));
|
||||
services.database.isDatabaseReady.setHandler(this._isDatabaseReady.bind(this));
|
||||
services.appLifecycle.onInitialise.addHandler(this._everyOnloadStart.bind(this));
|
||||
services.database.openDatabase.setHandler(this._openDatabase.bind(this));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -32,10 +32,10 @@ export class ModulePeriodicProcess extends AbstractModule {
|
||||
}
|
||||
|
||||
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
|
||||
services.appLifecycle.handleOnUnload(this._allOnUnload.bind(this));
|
||||
services.setting.handleBeforeRealiseSetting(this._everyBeforeRealizeSetting.bind(this));
|
||||
services.setting.handleSettingRealised(this._everyAfterRealizeSetting.bind(this));
|
||||
services.appLifecycle.handleOnSuspending(this._everyBeforeSuspendProcess.bind(this));
|
||||
services.appLifecycle.handleOnResumed(this._everyAfterResumeProcess.bind(this));
|
||||
services.appLifecycle.onUnload.addHandler(this._allOnUnload.bind(this));
|
||||
services.setting.onBeforeRealiseSetting.addHandler(this._everyBeforeRealizeSetting.bind(this));
|
||||
services.setting.onSettingRealised.addHandler(this._everyAfterRealizeSetting.bind(this));
|
||||
services.appLifecycle.onSuspending.addHandler(this._everyBeforeSuspendProcess.bind(this));
|
||||
services.appLifecycle.onResumed.addHandler(this._everyAfterResumeProcess.bind(this));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,6 +18,6 @@ export class ModulePouchDB extends AbstractModule {
|
||||
return new PouchDB(name, optionPass);
|
||||
}
|
||||
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
|
||||
services.database.handleCreatePouchDBInstance(this._createPouchDBInstance.bind(this));
|
||||
services.database.createPouchDBInstance.setHandler(this._createPouchDBInstance.bind(this));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { delay } from "octagonal-wheels/promises";
|
||||
import {
|
||||
DEFAULT_SETTINGS,
|
||||
FLAGMD_REDFLAG2_HR,
|
||||
FLAGMD_REDFLAG3_HR,
|
||||
LOG_LEVEL_NOTICE,
|
||||
@@ -58,7 +59,7 @@ Please enable them from the settings screen after setup is complete.`,
|
||||
async rebuildRemote() {
|
||||
await this.services.setting.suspendExtraSync();
|
||||
this.core.settings.isConfigured = true;
|
||||
|
||||
this.core.settings.notifyThresholdOfRemoteStorageSize = DEFAULT_SETTINGS.notifyThresholdOfRemoteStorageSize;
|
||||
await this.services.setting.realiseSetting();
|
||||
await this.services.remote.markLocked();
|
||||
await this.services.remote.tryResetDatabase();
|
||||
@@ -79,6 +80,7 @@ Please enable them from the settings screen after setup is complete.`,
|
||||
await this.services.setting.suspendExtraSync();
|
||||
// await this.askUseNewAdapter();
|
||||
this.core.settings.isConfigured = true;
|
||||
this.core.settings.notifyThresholdOfRemoteStorageSize = DEFAULT_SETTINGS.notifyThresholdOfRemoteStorageSize;
|
||||
await this.services.setting.realiseSetting();
|
||||
await this.resetLocalDatabase();
|
||||
await delay(1000);
|
||||
@@ -191,6 +193,33 @@ Please enable them from the settings screen after setup is complete.`,
|
||||
await this.services.setting.suspendExtraSync();
|
||||
// await this.askUseNewAdapter();
|
||||
this.core.settings.isConfigured = true;
|
||||
this.core.settings.notifyThresholdOfRemoteStorageSize = DEFAULT_SETTINGS.notifyThresholdOfRemoteStorageSize;
|
||||
if (this.core.settings.maxMTimeForReflectEvents > 0) {
|
||||
const date = new Date(this.core.settings.maxMTimeForReflectEvents);
|
||||
|
||||
const ask = `Your settings restrict file reflection times to no later than ${date}.
|
||||
|
||||
**This is a recovery configuration.**
|
||||
|
||||
This operation should only be performed on an empty vault.
|
||||
Are you sure you wish to proceed?`;
|
||||
const PROCEED = "I understand, proceed";
|
||||
const CANCEL = "Cancel operation";
|
||||
const CLEARANDPROCEED = "Clear restriction and proceed";
|
||||
const choices = [PROCEED, CLEARANDPROCEED, CANCEL] as const;
|
||||
const ret = await this.core.confirm.askSelectStringDialogue(ask, choices, {
|
||||
title: "Confirm restricted fetch",
|
||||
defaultAction: CANCEL,
|
||||
timeout: 0,
|
||||
});
|
||||
if (ret == CLEARANDPROCEED) {
|
||||
this.core.settings.maxMTimeForReflectEvents = 0;
|
||||
await this.core.saveSettings();
|
||||
}
|
||||
if (ret == CANCEL) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
await this.suspendReflectingDatabase();
|
||||
await this.services.setting.realiseSetting();
|
||||
await this.resetLocalDatabase();
|
||||
@@ -237,7 +266,7 @@ Please enable them from the settings screen after setup is complete.`,
|
||||
async fetchRemoteChunks() {
|
||||
if (
|
||||
!this.core.settings.doNotSuspendOnFetching &&
|
||||
this.core.settings.readChunksOnline &&
|
||||
!this.core.settings.useOnlyLocalChunk &&
|
||||
this.core.settings.remoteType == REMOTE_COUCHDB
|
||||
) {
|
||||
this._log(`Fetching chunks`, LOG_LEVEL_NOTICE);
|
||||
@@ -272,10 +301,10 @@ Please enable them from the settings screen after setup is complete.`,
|
||||
this._log(`Done!`, LOG_LEVEL_NOTICE, "resolveAllConflictedFilesByNewerOnes");
|
||||
}
|
||||
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
|
||||
services.appLifecycle.handleOnLoaded(this._everyOnload.bind(this));
|
||||
services.database.handleResetDatabase(this._resetLocalDatabase.bind(this));
|
||||
services.remote.handleTryResetDatabase(this._tryResetRemoteDatabase.bind(this));
|
||||
services.remote.handleTryCreateDatabase(this._tryCreateRemoteDatabase.bind(this));
|
||||
services.setting.handleSuspendAllSync(this._allSuspendAllSync.bind(this));
|
||||
services.appLifecycle.onLoaded.addHandler(this._everyOnload.bind(this));
|
||||
services.database.resetDatabase.setHandler(this._resetLocalDatabase.bind(this));
|
||||
services.remote.tryResetDatabase.setHandler(this._tryResetRemoteDatabase.bind(this));
|
||||
services.remote.tryCreateDatabase.setHandler(this._tryCreateRemoteDatabase.bind(this));
|
||||
services.setting.suspendAllSync.addHandler(this._allSuspendAllSync.bind(this));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,34 +14,15 @@ import { isLockAcquired, shareRunningResult, skipIfDuplicated } from "octagonal-
|
||||
import { balanceChunkPurgedDBs } from "@/lib/src/pouchdb/chunks";
|
||||
import { purgeUnreferencedChunks } from "@/lib/src/pouchdb/chunks";
|
||||
import { LiveSyncCouchDBReplicator } from "../../lib/src/replication/couchdb/LiveSyncReplicator";
|
||||
import { throttle } from "octagonal-wheels/function";
|
||||
import { arrayToChunkedArray } from "octagonal-wheels/collection";
|
||||
import {
|
||||
SYNCINFO_ID,
|
||||
VER,
|
||||
type EntryBody,
|
||||
type EntryDoc,
|
||||
type EntryLeaf,
|
||||
type LoadedEntry,
|
||||
type MetaEntry,
|
||||
type RemoteType,
|
||||
} from "../../lib/src/common/types";
|
||||
import { QueueProcessor } from "octagonal-wheels/concurrency/processor";
|
||||
import {
|
||||
getPath,
|
||||
isChunk,
|
||||
isValidPath,
|
||||
rateLimitedSharedExecution,
|
||||
scheduleTask,
|
||||
updatePreviousExecutionTime,
|
||||
} from "../../common/utils";
|
||||
import { isAnyNote } from "../../lib/src/common/utils";
|
||||
import { type EntryDoc, type RemoteType } from "../../lib/src/common/types";
|
||||
import { rateLimitedSharedExecution, scheduleTask, updatePreviousExecutionTime } from "../../common/utils";
|
||||
import { EVENT_FILE_SAVED, EVENT_ON_UNRESOLVED_ERROR, EVENT_SETTING_SAVED, eventHub } from "../../common/events";
|
||||
import type { LiveSyncAbstractReplicator } from "../../lib/src/replication/LiveSyncAbstractReplicator";
|
||||
|
||||
import { $msg } from "../../lib/src/common/i18n";
|
||||
import { clearHandlers } from "../../lib/src/replication/SyncParamsHandler";
|
||||
import type { LiveSyncCore } from "../../main";
|
||||
import { ReplicateResultProcessor } from "./ReplicateResultProcessor";
|
||||
|
||||
const KEY_REPLICATION_ON_EVENT = "replicationOnEvent";
|
||||
const REPLICATION_ON_EVENT_FORECASTED_TIME = 5000;
|
||||
@@ -49,6 +30,7 @@ const REPLICATION_ON_EVENT_FORECASTED_TIME = 5000;
|
||||
export class ModuleReplicator extends AbstractModule {
|
||||
_replicatorType?: RemoteType;
|
||||
_previousErrors = new Set<string>();
|
||||
processor: ReplicateResultProcessor = new ReplicateResultProcessor(this);
|
||||
|
||||
showError(msg: string, max_log_level: LOG_LEVEL = LEVEL_NOTICE) {
|
||||
const level = this._previousErrors.has(msg) ? LEVEL_INFO : max_log_level;
|
||||
@@ -73,6 +55,11 @@ export class ModuleReplicator extends AbstractModule {
|
||||
if (this._replicatorType !== setting.remoteType) {
|
||||
void this.setReplicator();
|
||||
}
|
||||
if (this.core.settings.suspendParseReplicationResult) {
|
||||
this.processor.suspend();
|
||||
} else {
|
||||
this.processor.resume();
|
||||
}
|
||||
});
|
||||
|
||||
return Promise.resolve(true);
|
||||
@@ -103,6 +90,10 @@ export class ModuleReplicator extends AbstractModule {
|
||||
_everyOnInitializeDatabase(db: LiveSyncLocalDB): Promise<boolean> {
|
||||
return this.setReplicator();
|
||||
}
|
||||
_everyOnDatabaseInitialized(showNotice: boolean): Promise<boolean> {
|
||||
fireAndForget(() => this.processor.restoreFromSnapshotOnce());
|
||||
return Promise.resolve(true);
|
||||
}
|
||||
|
||||
_everyOnResetDatabase(db: LiveSyncLocalDB): Promise<boolean> {
|
||||
return this.setReplicator();
|
||||
@@ -128,7 +119,7 @@ export class ModuleReplicator extends AbstractModule {
|
||||
this.showError("Failed to initialise the encryption key, preventing replication.");
|
||||
return false;
|
||||
}
|
||||
await this.loadQueuedFiles();
|
||||
await this.processor.restoreFromSnapshotOnce();
|
||||
this.clearErrors();
|
||||
return true;
|
||||
}
|
||||
@@ -287,249 +278,10 @@ Even if you choose to clean up, you will see this option again if you exit Obsid
|
||||
}
|
||||
return await shareRunningResult(`replication`, () => this.services.replication.replicate());
|
||||
}
|
||||
|
||||
_parseReplicationResult(docs: Array<PouchDB.Core.ExistingDocument<EntryDoc>>): void {
|
||||
if (this.settings.suspendParseReplicationResult && !this.replicationResultProcessor.isSuspended) {
|
||||
this.replicationResultProcessor.suspend();
|
||||
}
|
||||
this.replicationResultProcessor.enqueueAll(docs);
|
||||
if (!this.settings.suspendParseReplicationResult && this.replicationResultProcessor.isSuspended) {
|
||||
this.replicationResultProcessor.resume();
|
||||
}
|
||||
this.processor.enqueueAll(docs);
|
||||
}
|
||||
_saveQueuedFiles = throttle(() => {
|
||||
const saveData = this.replicationResultProcessor._queue
|
||||
.filter((e) => e !== undefined && e !== null)
|
||||
.map((e) => e?._id ?? ("" as string)) as string[];
|
||||
const kvDBKey = "queued-files";
|
||||
// localStorage.setItem(lsKey, saveData);
|
||||
fireAndForget(() => this.core.kvDB.set(kvDBKey, saveData));
|
||||
}, 100);
|
||||
saveQueuedFiles() {
|
||||
this._saveQueuedFiles();
|
||||
}
|
||||
async loadQueuedFiles() {
|
||||
if (this.settings.suspendParseReplicationResult) return;
|
||||
if (!this.settings.isConfigured) return;
|
||||
try {
|
||||
const kvDBKey = "queued-files";
|
||||
// const ids = [...new Set(JSON.parse(localStorage.getItem(lsKey) || "[]"))] as string[];
|
||||
const ids = [...new Set((await this.core.kvDB.get<string[]>(kvDBKey)) ?? [])];
|
||||
const batchSize = 100;
|
||||
const chunkedIds = arrayToChunkedArray(ids, batchSize);
|
||||
|
||||
// suspendParseReplicationResult is true, so we have to resume it if it is suspended.
|
||||
if (this.replicationResultProcessor.isSuspended) {
|
||||
this.replicationResultProcessor.resume();
|
||||
}
|
||||
for await (const idsBatch of chunkedIds) {
|
||||
const ret = await this.localDatabase.allDocsRaw<EntryDoc>({
|
||||
keys: idsBatch,
|
||||
include_docs: true,
|
||||
limit: 100,
|
||||
});
|
||||
const docs = ret.rows
|
||||
.filter((e) => e.doc)
|
||||
.map((e) => e.doc) as PouchDB.Core.ExistingDocument<EntryDoc>[];
|
||||
const errors = ret.rows.filter((e) => !e.doc && !e.value.deleted);
|
||||
if (errors.length > 0) {
|
||||
Logger("Some queued processes were not resurrected");
|
||||
Logger(JSON.stringify(errors), LOG_LEVEL_VERBOSE);
|
||||
}
|
||||
this.replicationResultProcessor.enqueueAll(docs);
|
||||
}
|
||||
} catch (e) {
|
||||
Logger(`Failed to load queued files.`, LOG_LEVEL_NOTICE);
|
||||
Logger(e, LOG_LEVEL_VERBOSE);
|
||||
} finally {
|
||||
// Check again before awaiting,
|
||||
if (this.replicationResultProcessor.isSuspended) {
|
||||
this.replicationResultProcessor.resume();
|
||||
}
|
||||
}
|
||||
// Wait for all queued files to be processed.
|
||||
try {
|
||||
await this.replicationResultProcessor.waitForAllProcessed();
|
||||
} catch (e) {
|
||||
Logger(`Failed to wait for all queued files to be processed.`, LOG_LEVEL_NOTICE);
|
||||
Logger(e, LOG_LEVEL_VERBOSE);
|
||||
}
|
||||
}
|
||||
|
||||
replicationResultProcessor = new QueueProcessor(
|
||||
async (docs: PouchDB.Core.ExistingDocument<EntryDoc>[]) => {
|
||||
if (this.settings.suspendParseReplicationResult) return;
|
||||
const change = docs[0];
|
||||
if (!change) return;
|
||||
if (isChunk(change._id)) {
|
||||
this.localDatabase.onNewLeaf(change as EntryLeaf);
|
||||
return;
|
||||
}
|
||||
if (await this.services.replication.processVirtualDocument(change)) return;
|
||||
// any addon needs this item?
|
||||
// for (const proc of this.core.addOns) {
|
||||
// if (await proc.parseReplicationResultItem(change)) {
|
||||
// return;
|
||||
// }
|
||||
// }
|
||||
if (change.type == "versioninfo") {
|
||||
if (change.version > VER) {
|
||||
this.core.replicator.closeReplication();
|
||||
Logger(
|
||||
`Remote database updated to incompatible version. update your Self-hosted LiveSync plugin.`,
|
||||
LOG_LEVEL_NOTICE
|
||||
);
|
||||
}
|
||||
return;
|
||||
}
|
||||
if (
|
||||
change._id == SYNCINFO_ID || // Synchronisation information data
|
||||
change._id.startsWith("_design") //design document
|
||||
) {
|
||||
return;
|
||||
}
|
||||
if (isAnyNote(change)) {
|
||||
const docPath = getPath(change);
|
||||
if (!(await this.services.vault.isTargetFile(docPath))) {
|
||||
Logger(`Skipped: ${docPath}`, LOG_LEVEL_VERBOSE);
|
||||
return;
|
||||
}
|
||||
if (this.databaseQueuedProcessor._isSuspended) {
|
||||
Logger(`Processing scheduled: ${docPath}`, LOG_LEVEL_INFO);
|
||||
}
|
||||
const size = change.size;
|
||||
if (this.services.vault.isFileSizeTooLarge(size)) {
|
||||
Logger(
|
||||
`Processing ${docPath} has been skipped due to file size exceeding the limit`,
|
||||
LOG_LEVEL_NOTICE
|
||||
);
|
||||
return;
|
||||
}
|
||||
this.databaseQueuedProcessor.enqueue(change);
|
||||
}
|
||||
return;
|
||||
},
|
||||
{
|
||||
batchSize: 1,
|
||||
suspended: true,
|
||||
concurrentLimit: 100,
|
||||
delay: 0,
|
||||
totalRemainingReactiveSource: this.core.replicationResultCount,
|
||||
}
|
||||
)
|
||||
.replaceEnqueueProcessor((queue, newItem) => {
|
||||
const q = queue.filter((e) => e._id != newItem._id);
|
||||
return [...q, newItem];
|
||||
})
|
||||
.startPipeline()
|
||||
.onUpdateProgress(() => {
|
||||
this.saveQueuedFiles();
|
||||
});
|
||||
|
||||
async checkIsChangeRequiredForDatabaseProcessing(dbDoc: LoadedEntry): Promise<boolean> {
|
||||
const path = getPath(dbDoc);
|
||||
try {
|
||||
const savedDoc = await this.localDatabase.getRaw<LoadedEntry>(dbDoc._id, {
|
||||
conflicts: true,
|
||||
revs_info: true,
|
||||
});
|
||||
const newRev = dbDoc._rev ?? "";
|
||||
const latestRev = savedDoc._rev ?? "";
|
||||
const revisions = savedDoc._revs_info?.map((e) => e.rev) ?? [];
|
||||
if (savedDoc._conflicts && savedDoc._conflicts.length > 0) {
|
||||
// There are conflicts, so we have to process it.
|
||||
return true;
|
||||
}
|
||||
if (newRev == latestRev) {
|
||||
// The latest revision. We need to process it.
|
||||
return true;
|
||||
}
|
||||
const index = revisions.indexOf(newRev);
|
||||
if (index >= 0) {
|
||||
// the revision has been inserted before.
|
||||
return false; // Already processed.
|
||||
}
|
||||
return true; // This mostly should not happen, but we have to process it just in case.
|
||||
} catch (e: any) {
|
||||
if ("status" in e && e.status == 404) {
|
||||
return true;
|
||||
// Not existing, so we have to process it.
|
||||
} else {
|
||||
Logger(
|
||||
`Failed to get existing document for ${path} (${dbDoc._id.substring(0, 8)}, ${dbDoc._rev?.substring(0, 10)}) `,
|
||||
LOG_LEVEL_NOTICE
|
||||
);
|
||||
Logger(e, LOG_LEVEL_VERBOSE);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
databaseQueuedProcessor = new QueueProcessor(
|
||||
async (docs: EntryBody[]) => {
|
||||
const dbDoc = docs[0] as LoadedEntry; // It has no `data`
|
||||
const path = getPath(dbDoc);
|
||||
// If the document is existing with any revision, confirm that we have to process it.
|
||||
const isRequired = await this.checkIsChangeRequiredForDatabaseProcessing(dbDoc);
|
||||
if (!isRequired) {
|
||||
Logger(`Skipped (Not latest): ${path} (${dbDoc._id.substring(0, 8)})`, LOG_LEVEL_VERBOSE);
|
||||
return;
|
||||
}
|
||||
// If `Read chunks online` is disabled, chunks should be transferred before here.
|
||||
// However, in some cases, chunks are after that. So, if missing chunks exist, we have to wait for them.
|
||||
const doc = await this.localDatabase.getDBEntryFromMeta({ ...dbDoc }, false, true);
|
||||
if (!doc) {
|
||||
Logger(
|
||||
`Something went wrong while gathering content of ${path} (${dbDoc._id.substring(0, 8)}, ${dbDoc._rev?.substring(0, 10)}) `,
|
||||
LOG_LEVEL_NOTICE
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (await this.services.replication.processOptionalSynchroniseResult(dbDoc)) {
|
||||
// Already processed
|
||||
} else if (isValidPath(getPath(doc))) {
|
||||
this.storageApplyingProcessor.enqueue(doc as MetaEntry);
|
||||
} else {
|
||||
Logger(`Skipped: ${path} (${doc._id.substring(0, 8)})`, LOG_LEVEL_VERBOSE);
|
||||
}
|
||||
return;
|
||||
},
|
||||
{
|
||||
suspended: true,
|
||||
batchSize: 1,
|
||||
concurrentLimit: 10,
|
||||
yieldThreshold: 1,
|
||||
delay: 0,
|
||||
totalRemainingReactiveSource: this.core.databaseQueueCount,
|
||||
}
|
||||
)
|
||||
.replaceEnqueueProcessor((queue, newItem) => {
|
||||
const q = queue.filter((e) => e._id != newItem._id);
|
||||
return [...q, newItem];
|
||||
})
|
||||
.startPipeline();
|
||||
|
||||
storageApplyingProcessor = new QueueProcessor(
|
||||
async (docs: MetaEntry[]) => {
|
||||
const entry = docs[0];
|
||||
await this.services.replication.processSynchroniseResult(entry);
|
||||
return;
|
||||
},
|
||||
{
|
||||
suspended: true,
|
||||
batchSize: 1,
|
||||
concurrentLimit: 6,
|
||||
yieldThreshold: 1,
|
||||
delay: 0,
|
||||
totalRemainingReactiveSource: this.core.storageApplyingCount,
|
||||
}
|
||||
)
|
||||
.replaceEnqueueProcessor((queue, newItem) => {
|
||||
const q = queue.filter((e) => e._id != newItem._id);
|
||||
return [...q, newItem];
|
||||
})
|
||||
.startPipeline();
|
||||
|
||||
_everyBeforeSuspendProcess(): Promise<boolean> {
|
||||
this.core.replicator?.closeReplication();
|
||||
@@ -577,18 +329,19 @@ Even if you choose to clean up, you will see this option again if you exit Obsid
|
||||
}
|
||||
|
||||
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
|
||||
services.replicator.handleGetActiveReplicator(this._getReplicator.bind(this));
|
||||
services.databaseEvents.handleOnDatabaseInitialisation(this._everyOnInitializeDatabase.bind(this));
|
||||
services.databaseEvents.handleOnResetDatabase(this._everyOnResetDatabase.bind(this));
|
||||
services.appLifecycle.handleOnSettingLoaded(this._everyOnloadAfterLoadSettings.bind(this));
|
||||
services.replication.handleParseSynchroniseResult(this._parseReplicationResult.bind(this));
|
||||
services.appLifecycle.handleOnSuspending(this._everyBeforeSuspendProcess.bind(this));
|
||||
services.replication.handleBeforeReplicate(this._everyBeforeReplicate.bind(this));
|
||||
services.replication.handleIsReplicationReady(this._canReplicate.bind(this));
|
||||
services.replication.handleReplicate(this._replicate.bind(this));
|
||||
services.replication.handleReplicateByEvent(this._replicateByEvent.bind(this));
|
||||
services.remote.handleReplicateAllToRemote(this._replicateAllToServer.bind(this));
|
||||
services.remote.handleReplicateAllFromRemote(this._replicateAllFromServer.bind(this));
|
||||
services.appLifecycle.reportUnresolvedMessages(this._reportUnresolvedMessages.bind(this));
|
||||
services.replicator.getActiveReplicator.setHandler(this._getReplicator.bind(this));
|
||||
services.databaseEvents.onDatabaseInitialisation.addHandler(this._everyOnInitializeDatabase.bind(this));
|
||||
services.databaseEvents.onDatabaseInitialised.addHandler(this._everyOnDatabaseInitialized.bind(this));
|
||||
services.databaseEvents.onResetDatabase.addHandler(this._everyOnResetDatabase.bind(this));
|
||||
services.appLifecycle.onSettingLoaded.addHandler(this._everyOnloadAfterLoadSettings.bind(this));
|
||||
services.replication.parseSynchroniseResult.setHandler(this._parseReplicationResult.bind(this));
|
||||
services.appLifecycle.onSuspending.addHandler(this._everyBeforeSuspendProcess.bind(this));
|
||||
services.replication.onBeforeReplicate.addHandler(this._everyBeforeReplicate.bind(this));
|
||||
services.replication.isReplicationReady.setHandler(this._canReplicate.bind(this));
|
||||
services.replication.replicate.setHandler(this._replicate.bind(this));
|
||||
services.replication.replicateByEvent.setHandler(this._replicateByEvent.bind(this));
|
||||
services.remote.replicateAllToRemote.setHandler(this._replicateAllToServer.bind(this));
|
||||
services.remote.replicateAllFromRemote.setHandler(this._replicateAllFromServer.bind(this));
|
||||
services.appLifecycle.getUnresolvedMessages.addHandler(this._reportUnresolvedMessages.bind(this));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -36,7 +36,7 @@ export class ModuleReplicatorCouchDB extends AbstractModule {
|
||||
return Promise.resolve(true);
|
||||
}
|
||||
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
|
||||
services.replicator.handleGetNewReplicator(this._anyNewReplicator.bind(this));
|
||||
services.appLifecycle.handleOnResumed(this._everyAfterResumeProcess.bind(this));
|
||||
services.replicator.getNewReplicator.addHandler(this._anyNewReplicator.bind(this));
|
||||
services.appLifecycle.onResumed.addHandler(this._everyAfterResumeProcess.bind(this));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,6 +13,6 @@ export class ModuleReplicatorMinIO extends AbstractModule {
|
||||
return Promise.resolve(false);
|
||||
}
|
||||
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
|
||||
services.replicator.handleGetNewReplicator(this._anyNewReplicator.bind(this));
|
||||
services.replicator.getNewReplicator.addHandler(this._anyNewReplicator.bind(this));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -28,7 +28,7 @@ export class ModuleReplicatorP2P extends AbstractModule {
|
||||
return Promise.resolve(true);
|
||||
}
|
||||
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
|
||||
services.replicator.handleGetNewReplicator(this._anyNewReplicator.bind(this));
|
||||
services.appLifecycle.handleOnResumed(this._everyAfterResumeProcess.bind(this));
|
||||
services.replicator.getNewReplicator.addHandler(this._anyNewReplicator.bind(this));
|
||||
services.appLifecycle.onResumed.addHandler(this._everyAfterResumeProcess.bind(this));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -174,12 +174,12 @@ export class ModuleTargetFilter extends AbstractModule {
|
||||
}
|
||||
|
||||
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
|
||||
services.vault.handleMarkFileListPossiblyChanged(this._markFileListPossiblyChanged.bind(this));
|
||||
services.path.handleId2Path(this._id2path.bind(this));
|
||||
services.path.handlePath2Id(this._path2id.bind(this));
|
||||
services.appLifecycle.handleOnLoaded(this._everyOnload.bind(this));
|
||||
services.vault.handleIsFileSizeTooLarge(this._isFileSizeExceeded.bind(this));
|
||||
services.vault.handleIsIgnoredByIgnoreFile(this._isIgnoredByIgnoreFiles.bind(this));
|
||||
services.vault.handleIsTargetFile(this._isTargetFile.bind(this));
|
||||
services.vault.markFileListPossiblyChanged.setHandler(this._markFileListPossiblyChanged.bind(this));
|
||||
services.path.id2path.setHandler(this._id2path.bind(this));
|
||||
services.path.path2id.setHandler(this._path2id.bind(this));
|
||||
services.appLifecycle.onLoaded.addHandler(this._everyOnload.bind(this));
|
||||
services.vault.isFileSizeTooLarge.setHandler(this._isFileSizeExceeded.bind(this));
|
||||
services.vault.isIgnoredByIgnoreFile.setHandler(this._isIgnoredByIgnoreFiles.bind(this));
|
||||
services.vault.isTargetFile.setHandler(this._isTargetFile.bind(this));
|
||||
}
|
||||
}
|
||||
|
||||
480
src/modules/core/ReplicateResultProcessor.ts
Normal file
480
src/modules/core/ReplicateResultProcessor.ts
Normal file
@@ -0,0 +1,480 @@
|
||||
import {
|
||||
SYNCINFO_ID,
|
||||
VER,
|
||||
type AnyEntry,
|
||||
type EntryDoc,
|
||||
type EntryLeaf,
|
||||
type LoadedEntry,
|
||||
type MetaEntry,
|
||||
} from "@/lib/src/common/types";
|
||||
import type { ModuleReplicator } from "./ModuleReplicator";
|
||||
import { getPath, isChunk, isValidPath } from "@/common/utils";
|
||||
import type { LiveSyncCore } from "@/main";
|
||||
import {
|
||||
LOG_LEVEL_DEBUG,
|
||||
LOG_LEVEL_INFO,
|
||||
LOG_LEVEL_NOTICE,
|
||||
LOG_LEVEL_VERBOSE,
|
||||
Logger,
|
||||
type LOG_LEVEL,
|
||||
} from "@/lib/src/common/logger";
|
||||
import { fireAndForget, isAnyNote, throttle } from "@/lib/src/common/utils";
|
||||
import { Semaphore } from "octagonal-wheels/concurrency/semaphore_v2";
|
||||
import { serialized } from "octagonal-wheels/concurrency/lock";
|
||||
import type { ReactiveSource } from "octagonal-wheels/dataobject/reactive_v2";
|
||||
|
||||
const KV_KEY_REPLICATION_RESULT_PROCESSOR_SNAPSHOT = "replicationResultProcessorSnapshot";
|
||||
type ReplicateResultProcessorState = {
|
||||
queued: PouchDB.Core.ExistingDocument<EntryDoc>[];
|
||||
processing: PouchDB.Core.ExistingDocument<EntryDoc>[];
|
||||
};
|
||||
function shortenId(id: string): string {
|
||||
return id.length > 10 ? id.substring(0, 10) : id;
|
||||
}
|
||||
function shortenRev(rev: string | undefined): string {
|
||||
if (!rev) return "undefined";
|
||||
return rev.length > 10 ? rev.substring(0, 10) : rev;
|
||||
}
|
||||
export class ReplicateResultProcessor {
|
||||
private log(message: string, level: LOG_LEVEL = LOG_LEVEL_INFO) {
|
||||
Logger(`[ReplicateResultProcessor] ${message}`, level);
|
||||
}
|
||||
private logError(e: any) {
|
||||
Logger(e, LOG_LEVEL_VERBOSE);
|
||||
}
|
||||
private replicator: ModuleReplicator;
|
||||
|
||||
constructor(replicator: ModuleReplicator) {
|
||||
this.replicator = replicator;
|
||||
}
|
||||
|
||||
get localDatabase() {
|
||||
return this.replicator.core.localDatabase;
|
||||
}
|
||||
get services() {
|
||||
return this.replicator.core.services;
|
||||
}
|
||||
get core(): LiveSyncCore {
|
||||
return this.replicator.core;
|
||||
}
|
||||
|
||||
public suspend() {
|
||||
this._suspended = true;
|
||||
}
|
||||
public resume() {
|
||||
this._suspended = false;
|
||||
fireAndForget(() => this.runProcessQueue());
|
||||
}
|
||||
|
||||
// Whether the processing is suspended
|
||||
// If true, the processing queue processor bails the loop.
|
||||
private _suspended: boolean = false;
|
||||
|
||||
public get isSuspended() {
|
||||
return (
|
||||
this._suspended ||
|
||||
!this.core.services.appLifecycle.isReady ||
|
||||
this.replicator.settings.suspendParseReplicationResult ||
|
||||
this.core.services.appLifecycle.isSuspended()
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Take a snapshot of the current processing state.
|
||||
* This snapshot is stored in the KV database for recovery on restart.
|
||||
*/
|
||||
protected async _takeSnapshot() {
|
||||
const snapshot = {
|
||||
queued: this._queuedChanges.slice(),
|
||||
processing: this._processingChanges.slice(),
|
||||
} satisfies ReplicateResultProcessorState;
|
||||
await this.core.kvDB.set(KV_KEY_REPLICATION_RESULT_PROCESSOR_SNAPSHOT, snapshot);
|
||||
this.log(
|
||||
`Snapshot taken. Queued: ${snapshot.queued.length}, Processing: ${snapshot.processing.length}`,
|
||||
LOG_LEVEL_DEBUG
|
||||
);
|
||||
this.reportStatus();
|
||||
}
|
||||
/**
|
||||
* Trigger taking a snapshot.
|
||||
*/
|
||||
protected _triggerTakeSnapshot() {
|
||||
fireAndForget(() => this._takeSnapshot());
|
||||
}
|
||||
/**
|
||||
* Throttled version of triggerTakeSnapshot.
|
||||
*/
|
||||
protected triggerTakeSnapshot = throttle(() => this._triggerTakeSnapshot(), 50);
|
||||
|
||||
/**
|
||||
* Restore from snapshot.
|
||||
*/
|
||||
public async restoreFromSnapshot() {
|
||||
const snapshot = await this.core.kvDB.get<ReplicateResultProcessorState>(
|
||||
KV_KEY_REPLICATION_RESULT_PROCESSOR_SNAPSHOT
|
||||
);
|
||||
if (snapshot) {
|
||||
// Restoring the snapshot re-runs processing for both queued and processing items.
|
||||
const newQueue = [...snapshot.processing, ...snapshot.queued, ...this._queuedChanges];
|
||||
this._queuedChanges = [];
|
||||
this.enqueueAll(newQueue);
|
||||
this.log(
|
||||
`Restored from snapshot (${snapshot.processing.length + snapshot.queued.length} items)`,
|
||||
LOG_LEVEL_INFO
|
||||
);
|
||||
// await this._takeSnapshot();
|
||||
}
|
||||
}
|
||||
|
||||
private _restoreFromSnapshot: Promise<void> | undefined = undefined;
|
||||
|
||||
/**
|
||||
* Restore from snapshot only once.
|
||||
* @returns Promise that resolves when restoration is complete.
|
||||
*/
|
||||
public restoreFromSnapshotOnce() {
|
||||
if (!this._restoreFromSnapshot) {
|
||||
this._restoreFromSnapshot = this.restoreFromSnapshot();
|
||||
}
|
||||
return this._restoreFromSnapshot;
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform the given procedure while counting the concurrency.
|
||||
* @param proc async procedure to perform
|
||||
* @param countValue reactive source to count concurrency
|
||||
* @returns result of the procedure
|
||||
*/
|
||||
async withCounting<T>(proc: () => Promise<T>, countValue: ReactiveSource<number>) {
|
||||
countValue.value++;
|
||||
try {
|
||||
return await proc();
|
||||
} finally {
|
||||
countValue.value--;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Report the current status.
|
||||
*/
|
||||
protected reportStatus() {
|
||||
this.core.replicationResultCount.value = this._queuedChanges.length + this._processingChanges.length;
|
||||
}
|
||||
|
||||
/**
|
||||
* Enqueue all the given changes for processing.
|
||||
* @param changes Changes to enqueue
|
||||
*/
|
||||
|
||||
public enqueueAll(changes: PouchDB.Core.ExistingDocument<EntryDoc>[]) {
|
||||
for (const change of changes) {
|
||||
// Check if the change is not a document change (e.g., chunk, versioninfo, syncinfo), and processed it directly.
|
||||
const isProcessed = this.processIfNonDocumentChange(change);
|
||||
if (!isProcessed) {
|
||||
this.enqueueChange(change);
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Process the change if it is not a document change.
|
||||
* @param change Change to process
|
||||
* @returns True if the change was processed; false otherwise
|
||||
*/
|
||||
protected processIfNonDocumentChange(change: PouchDB.Core.ExistingDocument<EntryDoc>) {
|
||||
if (!change) {
|
||||
this.log(`Received empty change`, LOG_LEVEL_VERBOSE);
|
||||
return true;
|
||||
}
|
||||
if (isChunk(change._id)) {
|
||||
// Emit event for new chunk
|
||||
this.localDatabase.onNewLeaf(change as EntryLeaf);
|
||||
this.log(`Processed chunk: ${shortenId(change._id)}`, LOG_LEVEL_DEBUG);
|
||||
return true;
|
||||
}
|
||||
if (change.type == "versioninfo") {
|
||||
this.log(`Version info document received: ${change._id}`, LOG_LEVEL_VERBOSE);
|
||||
if (change.version > VER) {
|
||||
// Incompatible version, stop replication.
|
||||
this.core.replicator.closeReplication();
|
||||
this.log(
|
||||
`Remote database updated to incompatible version. update your Self-hosted LiveSync plugin.`,
|
||||
LOG_LEVEL_NOTICE
|
||||
);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
if (
|
||||
change._id == SYNCINFO_ID || // Synchronisation information data
|
||||
change._id.startsWith("_design") //design document
|
||||
) {
|
||||
this.log(`Skipped system document: ${change._id}`, LOG_LEVEL_VERBOSE);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Queue of changes to be processed.
|
||||
*/
|
||||
private _queuedChanges: PouchDB.Core.ExistingDocument<EntryDoc>[] = [];
|
||||
|
||||
/**
|
||||
* List of changes being processed.
|
||||
*/
|
||||
private _processingChanges: PouchDB.Core.ExistingDocument<EntryDoc>[] = [];
|
||||
|
||||
/**
|
||||
* Enqueue the given document change for processing.
|
||||
* @param doc Document change to enqueue
|
||||
* @returns
|
||||
*/
|
||||
protected enqueueChange(doc: PouchDB.Core.ExistingDocument<EntryDoc>) {
|
||||
const old = this._queuedChanges.find((e) => e._id == doc._id);
|
||||
const path = "path" in doc ? getPath(doc) : "<unknown>";
|
||||
const docNote = `${path} (${shortenId(doc._id)}, ${shortenRev(doc._rev)})`;
|
||||
if (old) {
|
||||
if (old._rev == doc._rev) {
|
||||
this.log(`[Enqueue] skipped (Already queued): ${docNote}`, LOG_LEVEL_VERBOSE);
|
||||
return;
|
||||
}
|
||||
|
||||
const oldRev = old._rev ?? "";
|
||||
const isDeletedBefore = old._deleted === true || ("deleted" in old && old.deleted === true);
|
||||
const isDeletedNow = doc._deleted === true || ("deleted" in doc && doc.deleted === true);
|
||||
|
||||
// Replace the old queued change (This may performed batched updates, actually process performed always with the latest version, hence we can simply replace it if the change is the same type).
|
||||
if (isDeletedBefore === isDeletedNow) {
|
||||
this._queuedChanges = this._queuedChanges.filter((e) => e._id != doc._id);
|
||||
this.log(`[Enqueue] requeued: ${docNote} (from rev: ${shortenRev(oldRev)})`, LOG_LEVEL_VERBOSE);
|
||||
}
|
||||
}
|
||||
// Enqueue the change
|
||||
this._queuedChanges.push(doc);
|
||||
this.triggerTakeSnapshot();
|
||||
this.triggerProcessQueue();
|
||||
}
|
||||
|
||||
/**
|
||||
* Trigger processing of the queued changes.
|
||||
*/
|
||||
protected triggerProcessQueue() {
|
||||
fireAndForget(() => this.runProcessQueue());
|
||||
}
|
||||
|
||||
/**
|
||||
* Semaphore to limit concurrent processing.
|
||||
* This is the per-id semaphore + concurrency-control (max 10 concurrent = 10 documents being processed at the same time).
|
||||
*/
|
||||
private _semaphore = Semaphore(10);
|
||||
|
||||
/**
|
||||
* Flag indicating whether the process queue is currently running.
|
||||
*/
|
||||
private _isRunningProcessQueue: boolean = false;
|
||||
|
||||
/**
|
||||
* Process the queued changes.
|
||||
*/
|
||||
private async runProcessQueue() {
|
||||
// Avoid re-entrance, suspend processing, or empty queue loop consumption.
|
||||
if (this._isRunningProcessQueue) return;
|
||||
if (this.isSuspended) return;
|
||||
if (this._queuedChanges.length == 0) return;
|
||||
try {
|
||||
this._isRunningProcessQueue = true;
|
||||
while (this._queuedChanges.length > 0) {
|
||||
// If getting suspended, bail the loop. Some concurrent tasks may still be running.
|
||||
if (this.isSuspended) {
|
||||
this.log(
|
||||
`Processing has got suspended. Remaining items in queue: ${this._queuedChanges.length}`,
|
||||
LOG_LEVEL_INFO
|
||||
);
|
||||
break;
|
||||
}
|
||||
|
||||
// Acquire semaphore for new processing slot
|
||||
// (per-document serialisation caps concurrency).
|
||||
const releaser = await this._semaphore.acquire();
|
||||
releaser();
|
||||
// Dequeue the next change
|
||||
const doc = this._queuedChanges.shift();
|
||||
if (doc) {
|
||||
this._processingChanges.push(doc);
|
||||
void this.parseDocumentChange(doc);
|
||||
}
|
||||
// Take snapshot (to be restored on next startup if needed)
|
||||
this.triggerTakeSnapshot();
|
||||
}
|
||||
} finally {
|
||||
this._isRunningProcessQueue = false;
|
||||
}
|
||||
}
|
||||
|
||||
// Phase 1: parse replication result
|
||||
/**
|
||||
* Parse the given document change.
|
||||
* @param change
|
||||
* @returns
|
||||
*/
|
||||
async parseDocumentChange(change: PouchDB.Core.ExistingDocument<EntryDoc>) {
|
||||
try {
|
||||
if (isAnyNote(change)) {
|
||||
const docMtime = change.mtime ?? 0;
|
||||
const maxMTime = this.replicator.settings.maxMTimeForReflectEvents;
|
||||
if (maxMTime > 0 && docMtime > maxMTime) {
|
||||
const docPath = getPath(change);
|
||||
this.log(
|
||||
`Processing ${docPath} has been skipped due to modification time (${new Date(
|
||||
docMtime * 1000
|
||||
).toISOString()}) exceeding the limit`,
|
||||
LOG_LEVEL_INFO
|
||||
);
|
||||
return;
|
||||
}
|
||||
}
|
||||
// If the document is a virtual document, process it in the virtual document processor.
|
||||
if (await this.services.replication.processVirtualDocument(change)) return;
|
||||
// If the document is version info, check compatibility and return.
|
||||
if (isAnyNote(change)) {
|
||||
const docPath = getPath(change);
|
||||
if (!(await this.services.vault.isTargetFile(docPath))) {
|
||||
this.log(`Skipped: ${docPath}`, LOG_LEVEL_VERBOSE);
|
||||
return;
|
||||
}
|
||||
const size = change.size;
|
||||
// Note that this size check depends size that in metadata, not the actual content size.
|
||||
if (this.services.vault.isFileSizeTooLarge(size)) {
|
||||
this.log(
|
||||
`Processing ${docPath} has been skipped due to file size exceeding the limit`,
|
||||
LOG_LEVEL_NOTICE
|
||||
);
|
||||
return;
|
||||
}
|
||||
return await this.applyToDatabase(change);
|
||||
}
|
||||
this.log(`Skipped unexpected non-note document: ${change._id}`, LOG_LEVEL_INFO);
|
||||
return;
|
||||
} finally {
|
||||
// Remove from processing queue
|
||||
this._processingChanges = this._processingChanges.filter((e) => e !== change);
|
||||
this.triggerTakeSnapshot();
|
||||
}
|
||||
}
|
||||
|
||||
// Phase 2: apply the document to database
|
||||
protected applyToDatabase(doc: PouchDB.Core.ExistingDocument<AnyEntry>) {
|
||||
return this.withCounting(async () => {
|
||||
let releaser: Awaited<ReturnType<typeof this._semaphore.acquire>> | undefined = undefined;
|
||||
try {
|
||||
releaser = await this._semaphore.acquire();
|
||||
await this._applyToDatabase(doc);
|
||||
} catch (e) {
|
||||
this.log(`Error while processing replication result`, LOG_LEVEL_NOTICE);
|
||||
this.logError(e);
|
||||
} finally {
|
||||
// Remove from processing queue (To remove from "in-progress" list, and snapshot will not include it)
|
||||
if (releaser) {
|
||||
releaser();
|
||||
}
|
||||
}
|
||||
}, this.replicator.core.databaseQueueCount);
|
||||
}
|
||||
// Phase 2.1: process the document and apply to storage
|
||||
// This function is serialized per document to avoid race-condition for the same document.
|
||||
private _applyToDatabase(doc_: PouchDB.Core.ExistingDocument<AnyEntry>) {
|
||||
const dbDoc = doc_ as LoadedEntry; // It has no `data`
|
||||
const path = getPath(dbDoc);
|
||||
return serialized(`replication-process:${dbDoc._id}`, async () => {
|
||||
const docNote = `${path} (${shortenId(dbDoc._id)}, ${shortenRev(dbDoc._rev)})`;
|
||||
const isRequired = await this.checkIsChangeRequiredForDatabaseProcessing(dbDoc);
|
||||
if (!isRequired) {
|
||||
this.log(`Skipped (Not latest): ${docNote}`, LOG_LEVEL_VERBOSE);
|
||||
return;
|
||||
}
|
||||
// If `Read chunks online` is disabled, chunks should be transferred before here.
|
||||
// However, in some cases, chunks are after that. So, if missing chunks exist, we have to wait for them.
|
||||
// (If `Use Only Local Chunks` is enabled, we should not attempt to fetch chunks online automatically).
|
||||
|
||||
const isDeleted = dbDoc._deleted === true || ("deleted" in dbDoc && dbDoc.deleted === true);
|
||||
// Gather full document if not deleted
|
||||
const doc = isDeleted
|
||||
? { ...dbDoc, data: "" }
|
||||
: await this.localDatabase.getDBEntryFromMeta({ ...dbDoc }, false, true);
|
||||
if (!doc) {
|
||||
// Failed to gather content
|
||||
this.log(`Failed to gather content of ${docNote}`, LOG_LEVEL_NOTICE);
|
||||
return;
|
||||
}
|
||||
// Check if other processor wants to process this document, if so, skip processing here.
|
||||
if (await this.services.replication.processOptionalSynchroniseResult(dbDoc)) {
|
||||
// Already processed
|
||||
this.log(`Processed by other processor: ${docNote}`, LOG_LEVEL_DEBUG);
|
||||
} else if (isValidPath(getPath(doc))) {
|
||||
// Apply to storage if the path is valid
|
||||
await this.applyToStorage(doc as MetaEntry);
|
||||
this.log(`Processed: ${docNote}`, LOG_LEVEL_DEBUG);
|
||||
} else {
|
||||
// Should process, but have an invalid path
|
||||
this.log(`Unprocessed (Invalid path): ${docNote}`, LOG_LEVEL_VERBOSE);
|
||||
}
|
||||
return;
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Phase 3: Apply the given entry to storage.
|
||||
* @param entry
|
||||
* @returns
|
||||
*/
|
||||
protected applyToStorage(entry: MetaEntry) {
|
||||
return this.withCounting(async () => {
|
||||
await this.services.replication.processSynchroniseResult(entry);
|
||||
}, this.replicator.core.storageApplyingCount);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether processing is required for the given document.
|
||||
* @param dbDoc Document to check
|
||||
* @returns True if processing is required; false otherwise
|
||||
*/
|
||||
protected async checkIsChangeRequiredForDatabaseProcessing(dbDoc: LoadedEntry): Promise<boolean> {
|
||||
const path = getPath(dbDoc);
|
||||
try {
|
||||
const savedDoc = await this.localDatabase.getRaw<LoadedEntry>(dbDoc._id, {
|
||||
conflicts: true,
|
||||
revs_info: true,
|
||||
});
|
||||
const newRev = dbDoc._rev ?? "";
|
||||
const latestRev = savedDoc._rev ?? "";
|
||||
const revisions = savedDoc._revs_info?.map((e) => e.rev) ?? [];
|
||||
if (savedDoc._conflicts && savedDoc._conflicts.length > 0) {
|
||||
// There are conflicts, so we have to process it.
|
||||
// (May auto-resolve or user intervention will be occurred).
|
||||
return true;
|
||||
}
|
||||
if (newRev == latestRev) {
|
||||
// The latest revision. Simply we can process it.
|
||||
return true;
|
||||
}
|
||||
const index = revisions.indexOf(newRev);
|
||||
if (index >= 0) {
|
||||
// The revision has been inserted before.
|
||||
return false; // This means that the document already processed (While no conflict existed).
|
||||
}
|
||||
return true; // This mostly should not happen, but we have to process it just in case.
|
||||
} catch (e: any) {
|
||||
if ("status" in e && e.status == 404) {
|
||||
// getRaw failed due to not existing, it may not be happened normally especially on replication.
|
||||
// If the process caused by some other reason, we **probably** have to process it.
|
||||
// Note that this is not a common case.
|
||||
return true;
|
||||
} else {
|
||||
this.log(
|
||||
`Failed to get existing document for ${path} (${shortenId(dbDoc._id)}, ${shortenRev(dbDoc._rev)}) `,
|
||||
LOG_LEVEL_NOTICE
|
||||
);
|
||||
this.logError(e);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -75,8 +75,8 @@ export class ModuleConflictChecker extends AbstractModule {
|
||||
}
|
||||
);
|
||||
onBindFunction(core: LiveSyncCore, services: InjectableServiceHub): void {
|
||||
services.conflict.handleQueueCheckForIfOpen(this._queueConflictCheckIfOpen.bind(this));
|
||||
services.conflict.handleQueueCheckFor(this._queueConflictCheck.bind(this));
|
||||
services.conflict.handleEnsureAllProcessed(this._waitForAllConflictProcessed.bind(this));
|
||||
services.conflict.queueCheckForIfOpen.setHandler(this._queueConflictCheckIfOpen.bind(this));
|
||||
services.conflict.queueCheckFor.setHandler(this._queueConflictCheck.bind(this));
|
||||
services.conflict.ensureAllProcessed.setHandler(this._waitForAllConflictProcessed.bind(this));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -213,8 +213,8 @@ export class ModuleConflictResolver extends AbstractModule {
|
||||
}
|
||||
|
||||
onBindFunction(core: LiveSyncCore, services: InjectableServiceHub): void {
|
||||
services.conflict.handleResolveByDeletingRevision(this._resolveConflictByDeletingRev.bind(this));
|
||||
services.conflict.handleResolve(this._resolveConflict.bind(this));
|
||||
services.conflict.handleResolveByNewest(this._anyResolveConflictByNewest.bind(this));
|
||||
services.conflict.resolveByDeletingRevision.setHandler(this._resolveConflictByDeletingRev.bind(this));
|
||||
services.conflict.resolve.setHandler(this._resolveConflict.bind(this));
|
||||
services.conflict.resolveByNewest.setHandler(this._anyResolveConflictByNewest.bind(this));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@ import { normalizePath } from "../../deps.ts";
|
||||
import {
|
||||
FlagFilesHumanReadable,
|
||||
FlagFilesOriginal,
|
||||
REMOTE_MINIO,
|
||||
TweakValuesShouldMatchedTemplate,
|
||||
type ObsidianLiveSyncSettings,
|
||||
} from "../../lib/src/common/types.ts";
|
||||
@@ -86,7 +87,7 @@ export class ModuleRedFlag extends AbstractModule {
|
||||
const remoteTweaks = await this.services.tweakValue.fetchRemotePreferred(config);
|
||||
if (!remoteTweaks) {
|
||||
const choice = await this.core.confirm.askSelectStringDialogue(
|
||||
"Could not fetch remote configuration. What do you want to do?",
|
||||
"Could not fetch configuration from remote. If you are new to the Self-hosted LiveSync, this might be expected. If not, you should check your network or server settings.",
|
||||
[SKIP_FETCH, RETRY_FETCH] as const,
|
||||
{
|
||||
defaultAction: RETRY_FETCH,
|
||||
@@ -203,12 +204,13 @@ export class ModuleRedFlag extends AbstractModule {
|
||||
return false;
|
||||
}
|
||||
const { vault, extra } = method;
|
||||
|
||||
// If remote is MinIO, makeLocalChunkBeforeSync is not available. (because no-deduplication on sending).
|
||||
const makeLocalChunkBeforeSyncAvailable = this.settings.remoteType !== REMOTE_MINIO;
|
||||
const mapVaultStateToAction = {
|
||||
identical: {
|
||||
// If both are identical, no need to make local files/chunks before sync,
|
||||
// Just for the efficiency, chunks should be made before sync.
|
||||
makeLocalChunkBeforeSync: true,
|
||||
makeLocalChunkBeforeSync: makeLocalChunkBeforeSyncAvailable,
|
||||
makeLocalFilesBeforeSync: false,
|
||||
},
|
||||
independent: {
|
||||
@@ -320,6 +322,6 @@ export class ModuleRedFlag extends AbstractModule {
|
||||
}
|
||||
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
|
||||
super.onBindFunction(core, services);
|
||||
services.appLifecycle.handleLayoutReady(this._everyOnLayoutReady.bind(this));
|
||||
services.appLifecycle.onLayoutReady.addHandler(this._everyOnLayoutReady.bind(this));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,8 +15,8 @@ export class ModuleRemoteGovernor extends AbstractModule {
|
||||
return await this.core.replicator.markRemoteResolved(this.settings);
|
||||
}
|
||||
onBindFunction(core: LiveSyncCore, services: InjectableServiceHub): void {
|
||||
services.remote.handleMarkLocked(this._markRemoteLocked.bind(this));
|
||||
services.remote.handleMarkUnlocked(this._markRemoteUnlocked.bind(this));
|
||||
services.remote.handleMarkResolved(this._markRemoteResolved.bind(this));
|
||||
services.remote.markLocked.setHandler(this._markRemoteLocked.bind(this));
|
||||
services.remote.markUnlocked.setHandler(this._markRemoteUnlocked.bind(this));
|
||||
services.remote.markResolved.setHandler(this._markRemoteResolved.bind(this));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -285,11 +285,15 @@ export class ModuleResolvingMismatchedTweaks extends AbstractModule {
|
||||
}
|
||||
|
||||
onBindFunction(core: LiveSyncCore, services: InjectableServiceHub): void {
|
||||
services.tweakValue.handleFetchRemotePreferred(this._fetchRemotePreferredTweakValues.bind(this));
|
||||
services.tweakValue.handleCheckAndAskResolvingMismatched(this._checkAndAskResolvingMismatchedTweaks.bind(this));
|
||||
services.tweakValue.handleAskResolvingMismatched(this._askResolvingMismatchedTweaks.bind(this));
|
||||
services.tweakValue.handleCheckAndAskUseRemoteConfiguration(this._checkAndAskUseRemoteConfiguration.bind(this));
|
||||
services.tweakValue.handleAskUseRemoteConfiguration(this._askUseRemoteConfiguration.bind(this));
|
||||
services.replication.handleCheckConnectionFailure(this._anyAfterConnectCheckFailed.bind(this));
|
||||
services.tweakValue.fetchRemotePreferred.setHandler(this._fetchRemotePreferredTweakValues.bind(this));
|
||||
services.tweakValue.checkAndAskResolvingMismatched.setHandler(
|
||||
this._checkAndAskResolvingMismatchedTweaks.bind(this)
|
||||
);
|
||||
services.tweakValue.askResolvingMismatched.setHandler(this._askResolvingMismatchedTweaks.bind(this));
|
||||
services.tweakValue.checkAndAskUseRemoteConfiguration.setHandler(
|
||||
this._checkAndAskUseRemoteConfiguration.bind(this)
|
||||
);
|
||||
services.tweakValue.askUseRemoteConfiguration.setHandler(this._askUseRemoteConfiguration.bind(this));
|
||||
services.replication.checkConnectionFailure.addHandler(this._anyAfterConnectCheckFailed.bind(this));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { TFile, TFolder, type ListedFiles } from "obsidian";
|
||||
import { TFile, TFolder, type ListedFiles } from "@/deps.ts";
|
||||
import { SerializedFileAccess } from "./storageLib/SerializedFileAccess";
|
||||
import { AbstractObsidianModule } from "../AbstractObsidianModule.ts";
|
||||
import { LOG_LEVEL_INFO, LOG_LEVEL_VERBOSE } from "octagonal-wheels/common/logger";
|
||||
@@ -52,12 +52,16 @@ export class ModuleFileAccessObsidian extends AbstractObsidianModule implements
|
||||
}
|
||||
vaultAccess!: SerializedFileAccess;
|
||||
vaultManager: StorageEventManager = new StorageEventManagerObsidian(this.plugin, this.core, this);
|
||||
|
||||
restoreState() {
|
||||
return this.vaultManager.restoreState();
|
||||
}
|
||||
private _everyOnload(): Promise<boolean> {
|
||||
this.core.storageAccess = this;
|
||||
return Promise.resolve(true);
|
||||
}
|
||||
_everyOnFirstInitialize(): Promise<boolean> {
|
||||
this.vaultManager.beginWatch();
|
||||
async _everyOnFirstInitialize(): Promise<boolean> {
|
||||
await this.vaultManager.beginWatch();
|
||||
return Promise.resolve(true);
|
||||
}
|
||||
|
||||
@@ -65,8 +69,8 @@ export class ModuleFileAccessObsidian extends AbstractObsidianModule implements
|
||||
// this.vaultManager.flushQueue();
|
||||
// }
|
||||
|
||||
_everyCommitPendingFileEvent(): Promise<boolean> {
|
||||
this.vaultManager.flushQueue();
|
||||
async _everyCommitPendingFileEvent(): Promise<boolean> {
|
||||
await this.vaultManager.waitForIdle();
|
||||
return Promise.resolve(true);
|
||||
}
|
||||
|
||||
@@ -382,11 +386,11 @@ export class ModuleFileAccessObsidian extends AbstractObsidianModule implements
|
||||
super(plugin, core);
|
||||
}
|
||||
onBindFunction(core: LiveSyncCore, services: InjectableServiceHub): void {
|
||||
services.vault.handleIsStorageInsensitive(this._isStorageInsensitive.bind(this));
|
||||
services.setting.handleShouldCheckCaseInsensitively(this._shouldCheckCaseInsensitive.bind(this));
|
||||
services.appLifecycle.handleFirstInitialise(this._everyOnFirstInitialize.bind(this));
|
||||
services.appLifecycle.handleOnInitialise(this._everyOnloadStart.bind(this));
|
||||
services.appLifecycle.handleOnLoaded(this._everyOnload.bind(this));
|
||||
services.fileProcessing.handleCommitPendingFileEvents(this._everyCommitPendingFileEvent.bind(this));
|
||||
services.vault.isStorageInsensitive.setHandler(this._isStorageInsensitive.bind(this));
|
||||
services.setting.shouldCheckCaseInsensitively.setHandler(this._shouldCheckCaseInsensitive.bind(this));
|
||||
services.appLifecycle.onFirstInitialise.addHandler(this._everyOnFirstInitialize.bind(this));
|
||||
services.appLifecycle.onInitialise.addHandler(this._everyOnloadStart.bind(this));
|
||||
services.appLifecycle.onLoaded.addHandler(this._everyOnload.bind(this));
|
||||
services.fileProcessing.commitPendingFileEvents.addHandler(this._everyCommitPendingFileEvent.bind(this));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,118 +0,0 @@
|
||||
// ModuleInputUIObsidian.ts
|
||||
import { AbstractObsidianModule } from "../AbstractObsidianModule.ts";
|
||||
import { scheduleTask } from "octagonal-wheels/concurrency/task";
|
||||
import { disposeMemoObject, memoIfNotExist, memoObject, retrieveMemoObject } from "../../common/utils.ts";
|
||||
import {
|
||||
askSelectString,
|
||||
askString,
|
||||
askYesNo,
|
||||
confirmWithMessage,
|
||||
confirmWithMessageWithWideButton,
|
||||
} from "./UILib/dialogs.ts";
|
||||
import { Notice } from "../../deps.ts";
|
||||
import type { Confirm } from "../../lib/src/interfaces/Confirm.ts";
|
||||
import { setConfirmInstance } from "../../lib/src/PlatformAPIs/obsidian/Confirm.ts";
|
||||
import { $msg } from "src/lib/src/common/i18n.ts";
|
||||
import type { LiveSyncCore } from "../../main.ts";
|
||||
|
||||
// This module cannot be a common module because it depends on Obsidian's API.
|
||||
// However, we have to make compatible one for other platform.
|
||||
|
||||
export class ModuleInputUIObsidian extends AbstractObsidianModule implements Confirm {
|
||||
private _everyOnload(): Promise<boolean> {
|
||||
this.core.confirm = this;
|
||||
setConfirmInstance(this);
|
||||
return Promise.resolve(true);
|
||||
}
|
||||
|
||||
askYesNo(message: string): Promise<"yes" | "no"> {
|
||||
return askYesNo(this.app, message);
|
||||
}
|
||||
askString(title: string, key: string, placeholder: string, isPassword: boolean = false): Promise<string | false> {
|
||||
return askString(this.app, title, key, placeholder, isPassword);
|
||||
}
|
||||
|
||||
async askYesNoDialog(
|
||||
message: string,
|
||||
opt: { title?: string; defaultOption?: "Yes" | "No"; timeout?: number } = { title: "Confirmation" }
|
||||
): Promise<"yes" | "no"> {
|
||||
const defaultTitle = $msg("moduleInputUIObsidian.defaultTitleConfirmation");
|
||||
const yesLabel = $msg("moduleInputUIObsidian.optionYes");
|
||||
const noLabel = $msg("moduleInputUIObsidian.optionNo");
|
||||
const defaultOption = opt.defaultOption === "Yes" ? yesLabel : noLabel;
|
||||
const ret = await confirmWithMessageWithWideButton(
|
||||
this.plugin,
|
||||
opt.title || defaultTitle,
|
||||
message,
|
||||
[yesLabel, noLabel],
|
||||
defaultOption,
|
||||
opt.timeout
|
||||
);
|
||||
return ret === yesLabel ? "yes" : "no";
|
||||
}
|
||||
|
||||
askSelectString(message: string, items: string[]): Promise<string> {
|
||||
return askSelectString(this.app, message, items);
|
||||
}
|
||||
|
||||
askSelectStringDialogue<T extends readonly string[]>(
|
||||
message: string,
|
||||
buttons: T,
|
||||
opt: { title?: string; defaultAction: T[number]; timeout?: number }
|
||||
): Promise<T[number] | false> {
|
||||
const defaultTitle = $msg("moduleInputUIObsidian.defaultTitleSelect");
|
||||
return confirmWithMessageWithWideButton(
|
||||
this.plugin,
|
||||
opt.title || defaultTitle,
|
||||
message,
|
||||
buttons,
|
||||
opt.defaultAction,
|
||||
opt.timeout
|
||||
);
|
||||
}
|
||||
|
||||
askInPopup(key: string, dialogText: string, anchorCallback: (anchor: HTMLAnchorElement) => void) {
|
||||
const fragment = createFragment((doc) => {
|
||||
const [beforeText, afterText] = dialogText.split("{HERE}", 2);
|
||||
doc.createEl("span", undefined, (a) => {
|
||||
a.appendText(beforeText);
|
||||
a.appendChild(
|
||||
a.createEl("a", undefined, (anchor) => {
|
||||
anchorCallback(anchor);
|
||||
})
|
||||
);
|
||||
a.appendText(afterText);
|
||||
});
|
||||
});
|
||||
const popupKey = "popup-" + key;
|
||||
scheduleTask(popupKey, 1000, async () => {
|
||||
const popup = await memoIfNotExist(popupKey, () => new Notice(fragment, 0));
|
||||
const isShown = popup?.noticeEl?.isShown();
|
||||
if (!isShown) {
|
||||
memoObject(popupKey, new Notice(fragment, 0));
|
||||
}
|
||||
scheduleTask(popupKey + "-close", 20000, () => {
|
||||
const popup = retrieveMemoObject<Notice>(popupKey);
|
||||
if (!popup) return;
|
||||
if (popup?.noticeEl?.isShown()) {
|
||||
popup.hide();
|
||||
}
|
||||
disposeMemoObject(popupKey);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
confirmWithMessage(
|
||||
title: string,
|
||||
contentMd: string,
|
||||
buttons: string[],
|
||||
defaultAction: (typeof buttons)[number],
|
||||
timeout?: number
|
||||
): Promise<(typeof buttons)[number] | false> {
|
||||
return confirmWithMessage(this.plugin, title, contentMd, buttons, defaultAction, timeout);
|
||||
}
|
||||
|
||||
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
|
||||
services.appLifecycle.handleOnLoaded(this._everyOnload.bind(this));
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
import { ButtonComponent } from "obsidian";
|
||||
import { ButtonComponent } from "@/deps.ts";
|
||||
import { App, FuzzySuggestModal, MarkdownRenderer, Modal, Plugin, Setting } from "../../../deps.ts";
|
||||
import { EVENT_PLUGIN_UNLOADED, eventHub } from "../../../common/events.ts";
|
||||
|
||||
|
||||
@@ -169,7 +169,7 @@ export class SerializedFileAccess {
|
||||
|
||||
getAbstractFileByPathInsensitive(path: FilePath | string): TAbstractFile | null {
|
||||
//@ts-ignore
|
||||
return app.vault.getAbstractFileByPathInsensitive(path);
|
||||
return this.app.vault.getAbstractFileByPathInsensitive(path);
|
||||
}
|
||||
|
||||
getAbstractFileByPath(path: FilePath | string): TAbstractFile | null {
|
||||
|
||||
@@ -9,25 +9,20 @@ import {
|
||||
LOG_LEVEL_VERBOSE,
|
||||
type FileEventType,
|
||||
type FilePath,
|
||||
type FilePathWithPrefix,
|
||||
type UXFileInfoStub,
|
||||
type UXInternalFileInfoStub,
|
||||
} from "../../../lib/src/common/types.ts";
|
||||
import { delay, fireAndForget } from "../../../lib/src/common/utils.ts";
|
||||
import { delay, fireAndForget, throttle } from "../../../lib/src/common/utils.ts";
|
||||
import { type FileEventItem } from "../../../common/types.ts";
|
||||
import { serialized, skipIfDuplicated } from "octagonal-wheels/concurrency/lock";
|
||||
import {
|
||||
finishAllWaitingForTimeout,
|
||||
finishWaitingForTimeout,
|
||||
isWaitingForTimeout,
|
||||
waitForTimeout,
|
||||
} from "octagonal-wheels/concurrency/task";
|
||||
import { isWaitingForTimeout } from "octagonal-wheels/concurrency/task";
|
||||
import { Semaphore } from "octagonal-wheels/concurrency/semaphore";
|
||||
import type { LiveSyncCore } from "../../../main.ts";
|
||||
import { InternalFileToUXFileInfoStub, TFileToUXFileInfoStub } from "./utilObsidian.ts";
|
||||
import ObsidianLiveSyncPlugin from "../../../main.ts";
|
||||
import type { StorageAccess } from "../../interfaces/StorageAccess.ts";
|
||||
import { HiddenFileSync } from "../../../features/HiddenFileSync/CmdHiddenFileSync.ts";
|
||||
import { promiseWithResolvers, type PromiseWithResolvers } from "octagonal-wheels/promises";
|
||||
// import { InternalFileToUXFileInfo } from "../platforms/obsidian.ts";
|
||||
|
||||
export type FileEvent = {
|
||||
@@ -36,14 +31,29 @@ export type FileEvent = {
|
||||
oldPath?: string;
|
||||
cachedData?: string;
|
||||
skipBatchWait?: boolean;
|
||||
cancelled?: boolean;
|
||||
};
|
||||
type WaitInfo = {
|
||||
since: number;
|
||||
type: FileEventType;
|
||||
canProceed: PromiseWithResolvers<boolean>;
|
||||
timerHandler: ReturnType<typeof setTimeout>;
|
||||
event: FileEventItem;
|
||||
};
|
||||
const TYPE_SENTINEL_FLUSH = "SENTINEL_FLUSH";
|
||||
type FileEventItemSentinelFlush = {
|
||||
type: typeof TYPE_SENTINEL_FLUSH;
|
||||
};
|
||||
type FileEventItemSentinel = FileEventItemSentinelFlush;
|
||||
|
||||
export abstract class StorageEventManager {
|
||||
abstract beginWatch(): void;
|
||||
abstract flushQueue(): void;
|
||||
abstract beginWatch(): Promise<void>;
|
||||
|
||||
abstract appendQueue(items: FileEvent[], ctx?: any): Promise<void>;
|
||||
abstract cancelQueue(key: string): void;
|
||||
|
||||
abstract isWaiting(filename: FilePath): boolean;
|
||||
abstract waitForIdle(): Promise<void>;
|
||||
abstract restoreState(): Promise<void>;
|
||||
}
|
||||
|
||||
export class StorageEventManagerObsidian extends StorageEventManager {
|
||||
@@ -66,6 +76,13 @@ export class StorageEventManagerObsidian extends StorageEventManager {
|
||||
// Necessary evil.
|
||||
cmdHiddenFileSync: HiddenFileSync;
|
||||
|
||||
/**
|
||||
* Snapshot restoration promise.
|
||||
* Snapshot will be restored before starting to watch vault changes.
|
||||
* In designed time, this has been called from Initialisation process, which has been implemented on `ModuleInitializerFile.ts`.
|
||||
*/
|
||||
snapShotRestored: Promise<void> | null = null;
|
||||
|
||||
constructor(plugin: ObsidianLiveSyncPlugin, core: LiveSyncCore, storageAccess: StorageAccess) {
|
||||
super();
|
||||
this.storageAccess = storageAccess;
|
||||
@@ -73,7 +90,18 @@ export class StorageEventManagerObsidian extends StorageEventManager {
|
||||
this.core = core;
|
||||
this.cmdHiddenFileSync = this.plugin.getAddOn(HiddenFileSync.name) as HiddenFileSync;
|
||||
}
|
||||
beginWatch() {
|
||||
|
||||
/**
|
||||
* Restore the previous snapshot if exists.
|
||||
* @returns
|
||||
*/
|
||||
restoreState(): Promise<void> {
|
||||
this.snapShotRestored = this._restoreFromSnapshot();
|
||||
return this.snapShotRestored;
|
||||
}
|
||||
|
||||
async beginWatch() {
|
||||
await this.snapShotRestored;
|
||||
const plugin = this.plugin;
|
||||
this.watchVaultChange = this.watchVaultChange.bind(this);
|
||||
this.watchVaultCreate = this.watchVaultCreate.bind(this);
|
||||
@@ -88,8 +116,6 @@ export class StorageEventManagerObsidian extends StorageEventManager {
|
||||
//@ts-ignore : Internal API
|
||||
plugin.registerEvent(plugin.app.vault.on("raw", this.watchVaultRawEvents));
|
||||
plugin.registerEvent(plugin.app.workspace.on("editor-change", this.watchEditorChange));
|
||||
|
||||
// plugin.fileEventQueue.startPipeline();
|
||||
}
|
||||
watchEditorChange(editor: any, info: any) {
|
||||
if (!("path" in info)) {
|
||||
@@ -212,13 +238,16 @@ export class StorageEventManagerObsidian extends StorageEventManager {
|
||||
null
|
||||
);
|
||||
}
|
||||
|
||||
// Cache file and waiting to can be proceed.
|
||||
async appendQueue(params: FileEvent[], ctx?: any) {
|
||||
if (!this.core.settings.isConfigured) return;
|
||||
if (this.core.settings.suspendFileWatching) return;
|
||||
if (this.core.settings.maxMTimeForReflectEvents > 0) {
|
||||
return;
|
||||
}
|
||||
this.core.services.vault.markFileListPossiblyChanged();
|
||||
// Flag up to be reload
|
||||
const processFiles = new Set<FilePath>();
|
||||
for (const param of params) {
|
||||
if (shouldBeIgnored(param.file.path)) {
|
||||
continue;
|
||||
@@ -261,7 +290,7 @@ export class StorageEventManagerObsidian extends StorageEventManager {
|
||||
if (param.cachedData) {
|
||||
cache = param.cachedData;
|
||||
}
|
||||
this.enqueue({
|
||||
void this.enqueue({
|
||||
type,
|
||||
args: {
|
||||
file: file,
|
||||
@@ -272,123 +301,291 @@ export class StorageEventManagerObsidian extends StorageEventManager {
|
||||
skipBatchWait: param.skipBatchWait,
|
||||
key: atomicKey,
|
||||
});
|
||||
processFiles.add(file.path as FilePath);
|
||||
if (oldPath) {
|
||||
processFiles.add(oldPath as FilePath);
|
||||
}
|
||||
}
|
||||
for (const path of processFiles) {
|
||||
fireAndForget(() => this.startStandingBy(path));
|
||||
}
|
||||
}
|
||||
bufferedQueuedItems = [] as FileEventItem[];
|
||||
private bufferedQueuedItems = [] as (FileEventItem | FileEventItemSentinel)[];
|
||||
|
||||
/**
|
||||
* Immediately take snapshot.
|
||||
*/
|
||||
private _triggerTakeSnapshot() {
|
||||
void this._takeSnapshot();
|
||||
}
|
||||
/**
|
||||
* Trigger taking snapshot after throttled period.
|
||||
*/
|
||||
triggerTakeSnapshot = throttle(() => this._triggerTakeSnapshot(), 100);
|
||||
|
||||
enqueue(newItem: FileEventItem) {
|
||||
const filename = newItem.args.file.path;
|
||||
if (this.shouldBatchSave) {
|
||||
Logger(`Request cancel for waiting of previous ${filename}`, LOG_LEVEL_DEBUG);
|
||||
finishWaitingForTimeout(`storage-event-manager-batchsave-${filename}`);
|
||||
}
|
||||
this.bufferedQueuedItems.push(newItem);
|
||||
// When deleting or renaming, the queue must be flushed once before processing subsequent processes to prevent unexpected race condition.
|
||||
if (newItem.type == "DELETE") {
|
||||
return this.flushQueue();
|
||||
// If the sentinel pushed, the runQueuedEvents will wait for idle before processing delete.
|
||||
this.bufferedQueuedItems.push({
|
||||
type: TYPE_SENTINEL_FLUSH,
|
||||
});
|
||||
}
|
||||
this.updateStatus();
|
||||
this.bufferedQueuedItems.push(newItem);
|
||||
|
||||
fireAndForget(() => this._takeSnapshot().then(() => this.runQueuedEvents()));
|
||||
}
|
||||
|
||||
// Limit concurrent processing to reduce the IO load. file-processing + scheduler (1), so file events can be processed in 4 slots.
|
||||
concurrentProcessing = Semaphore(5);
|
||||
|
||||
private _waitingMap = new Map<string, WaitInfo>();
|
||||
private _waitForIdle: Promise<void> | null = null;
|
||||
|
||||
/**
|
||||
* Wait until all queued events are processed.
|
||||
* Subsequent new events will not be waited, but new events will not be added.
|
||||
* @returns
|
||||
*/
|
||||
waitForIdle(): Promise<void> {
|
||||
if (this._waitingMap.size === 0) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
if (this._waitForIdle) {
|
||||
return this._waitForIdle;
|
||||
}
|
||||
const promises = [...this._waitingMap.entries()].map(([key, waitInfo]) => {
|
||||
return new Promise<void>((resolve) => {
|
||||
waitInfo.canProceed.promise
|
||||
.then(() => {
|
||||
Logger(`Processing ${key}: Wait for idle completed`, LOG_LEVEL_DEBUG);
|
||||
// No op
|
||||
})
|
||||
.catch((e) => {
|
||||
Logger(`Processing ${key}: Wait for idle error`, LOG_LEVEL_INFO);
|
||||
Logger(e, LOG_LEVEL_VERBOSE);
|
||||
//no op
|
||||
})
|
||||
.finally(() => {
|
||||
resolve();
|
||||
});
|
||||
this._proceedWaiting(key);
|
||||
});
|
||||
});
|
||||
const waitPromise = Promise.all(promises).then(() => {
|
||||
this._waitForIdle = null;
|
||||
Logger(`All wait for idle completed`, LOG_LEVEL_VERBOSE);
|
||||
});
|
||||
this._waitForIdle = waitPromise;
|
||||
return waitPromise;
|
||||
}
|
||||
|
||||
/**
|
||||
* Proceed waiting for the given key immediately.
|
||||
*/
|
||||
private _proceedWaiting(key: string) {
|
||||
const waitInfo = this._waitingMap.get(key);
|
||||
if (waitInfo) {
|
||||
waitInfo.canProceed.resolve(true);
|
||||
clearTimeout(waitInfo.timerHandler);
|
||||
this._waitingMap.delete(key);
|
||||
}
|
||||
this.triggerTakeSnapshot();
|
||||
}
|
||||
/**
|
||||
* Cancel waiting for the given key.
|
||||
*/
|
||||
private _cancelWaiting(key: string) {
|
||||
const waitInfo = this._waitingMap.get(key);
|
||||
if (waitInfo) {
|
||||
waitInfo.canProceed.resolve(false);
|
||||
clearTimeout(waitInfo.timerHandler);
|
||||
this._waitingMap.delete(key);
|
||||
}
|
||||
this.triggerTakeSnapshot();
|
||||
}
|
||||
/**
|
||||
* Add waiting for the given key.
|
||||
* @param key
|
||||
* @param event
|
||||
* @param waitedSince Optional waited since timestamp to calculate the remaining delay.
|
||||
*/
|
||||
private _addWaiting(key: string, event: FileEventItem, waitedSince?: number): WaitInfo {
|
||||
if (this._waitingMap.has(key)) {
|
||||
// Already waiting
|
||||
throw new Error(`Already waiting for key: ${key}`);
|
||||
}
|
||||
const resolver = promiseWithResolvers<boolean>();
|
||||
const now = Date.now();
|
||||
const since = waitedSince ?? now;
|
||||
const elapsed = now - since;
|
||||
const maxDelay = this.batchSaveMaximumDelay * 1000;
|
||||
const remainingDelay = Math.max(0, maxDelay - elapsed);
|
||||
const nextDelay = Math.min(remainingDelay, this.batchSaveMinimumDelay * 1000);
|
||||
// x*<------- maxDelay --------->*
|
||||
// x*<-- minDelay -->*
|
||||
// x* x<-- nextDelay -->*
|
||||
// x* x<-- Capped-->*
|
||||
// x* x.......*
|
||||
// x: event
|
||||
// *: save
|
||||
// When at event (x) At least, save (*) within maxDelay, but maintain minimum delay between saves.
|
||||
|
||||
if (elapsed >= maxDelay) {
|
||||
// Already exceeded maximum delay, do not wait.
|
||||
Logger(`Processing ${key}: Batch save maximum delay already exceeded: ${event.type}`, LOG_LEVEL_DEBUG);
|
||||
} else {
|
||||
Logger(`Processing ${key}: Adding waiting for batch save: ${event.type} (${nextDelay}ms)`, LOG_LEVEL_DEBUG);
|
||||
}
|
||||
const waitInfo: WaitInfo = {
|
||||
since: since,
|
||||
type: event.type,
|
||||
event: event,
|
||||
canProceed: resolver,
|
||||
timerHandler: setTimeout(() => {
|
||||
Logger(`Processing ${key}: Batch save timeout reached: ${event.type}`, LOG_LEVEL_DEBUG);
|
||||
this._proceedWaiting(key);
|
||||
}, nextDelay),
|
||||
};
|
||||
this._waitingMap.set(key, waitInfo);
|
||||
this.triggerTakeSnapshot();
|
||||
return waitInfo;
|
||||
}
|
||||
|
||||
/**
|
||||
* Process the given file event.
|
||||
*/
|
||||
async processFileEvent(fei: FileEventItem) {
|
||||
const releaser = await this.concurrentProcessing.acquire();
|
||||
try {
|
||||
this.updateStatus();
|
||||
const filename = fei.args.file.path;
|
||||
const waitingKey = `${filename}`;
|
||||
const previous = this._waitingMap.get(waitingKey);
|
||||
let isShouldBeCancelled = fei.skipBatchWait || false;
|
||||
let previousPromise: Promise<boolean> = Promise.resolve(true);
|
||||
let waitPromise: Promise<boolean> = Promise.resolve(true);
|
||||
// 1. Check if there is previous waiting for the same file
|
||||
if (previous) {
|
||||
previousPromise = previous.canProceed.promise;
|
||||
if (isShouldBeCancelled) {
|
||||
Logger(
|
||||
`Processing ${filename}: Requested to perform immediately, cancelling previous waiting: ${fei.type}`,
|
||||
LOG_LEVEL_DEBUG
|
||||
);
|
||||
}
|
||||
if (!isShouldBeCancelled && fei.type === "DELETE") {
|
||||
// For DELETE, cancel any previous waiting and proceed immediately
|
||||
// That because when deleting, we cannot read the file anymore.
|
||||
Logger(
|
||||
`Processing ${filename}: DELETE requested, cancelling previous waiting: ${fei.type}`,
|
||||
LOG_LEVEL_DEBUG
|
||||
);
|
||||
isShouldBeCancelled = true;
|
||||
}
|
||||
if (!isShouldBeCancelled && previous.type === fei.type) {
|
||||
// For the same type, we can cancel the previous waiting and proceed immediately.
|
||||
Logger(`Processing ${filename}: Cancelling previous waiting: ${fei.type}`, LOG_LEVEL_DEBUG);
|
||||
isShouldBeCancelled = true;
|
||||
}
|
||||
// 2. wait for the previous to complete
|
||||
if (isShouldBeCancelled) {
|
||||
this._cancelWaiting(waitingKey);
|
||||
Logger(`Processing ${filename}: Previous cancelled: ${fei.type}`, LOG_LEVEL_DEBUG);
|
||||
isShouldBeCancelled = true;
|
||||
}
|
||||
if (!isShouldBeCancelled) {
|
||||
Logger(`Processing ${filename}: Waiting for previous to complete: ${fei.type}`, LOG_LEVEL_DEBUG);
|
||||
this._proceedWaiting(waitingKey);
|
||||
Logger(`Processing ${filename}: Previous completed: ${fei.type}`, LOG_LEVEL_DEBUG);
|
||||
}
|
||||
}
|
||||
await previousPromise;
|
||||
// 3. Check if shouldBatchSave is true
|
||||
if (this.shouldBatchSave && !fei.skipBatchWait) {
|
||||
// if type is CREATE or CHANGED, set waiting
|
||||
if (fei.type == "CREATE" || fei.type == "CHANGED") {
|
||||
// 3.2. If true, set the queue, and wait for the waiting, or until timeout
|
||||
// (since is copied from previous waiting if exists to limit the maximum wait time)
|
||||
// console.warn(`Since:`, previous?.since);
|
||||
const info = this._addWaiting(waitingKey, fei, previous?.since);
|
||||
waitPromise = info.canProceed.promise;
|
||||
} else if (fei.type == "DELETE") {
|
||||
// For DELETE, cancel any previous waiting and proceed immediately
|
||||
}
|
||||
Logger(`Processing ${filename}: Waiting for batch save: ${fei.type}`, LOG_LEVEL_DEBUG);
|
||||
const canProceed = await waitPromise;
|
||||
if (!canProceed) {
|
||||
// 3.2.1. If cancelled by new queue, cancel subsequent process.
|
||||
Logger(`Processing ${filename}: Cancelled by new queue: ${fei.type}`, LOG_LEVEL_DEBUG);
|
||||
return;
|
||||
}
|
||||
}
|
||||
// await this.handleFileEvent(fei);
|
||||
await this.requestProcessQueue(fei);
|
||||
} finally {
|
||||
await this._takeSnapshot();
|
||||
releaser();
|
||||
}
|
||||
}
|
||||
concurrentProcessing = Semaphore(5);
|
||||
waitedSince = new Map<FilePath | FilePathWithPrefix, number>();
|
||||
async startStandingBy(filename: FilePath) {
|
||||
// If waited, no need to start again (looping inside the function)
|
||||
await skipIfDuplicated(`storage-event-manager-${filename}`, async () => {
|
||||
Logger(`Processing ${filename}: Starting`, LOG_LEVEL_DEBUG);
|
||||
const release = await this.concurrentProcessing.acquire();
|
||||
try {
|
||||
Logger(`Processing ${filename}: Started`, LOG_LEVEL_DEBUG);
|
||||
let noMoreFiles = false;
|
||||
do {
|
||||
const target = this.bufferedQueuedItems.find((e) => e.args.file.path == filename);
|
||||
if (target === undefined) {
|
||||
noMoreFiles = true;
|
||||
break;
|
||||
}
|
||||
const operationType = target.type;
|
||||
async _takeSnapshot() {
|
||||
const processingEvents = [...this._waitingMap.values()].map((e) => e.event);
|
||||
const waitingEvents = this.bufferedQueuedItems;
|
||||
const snapShot = [...processingEvents, ...waitingEvents];
|
||||
await this.core.kvDB.set("storage-event-manager-snapshot", snapShot);
|
||||
Logger(`Storage operation snapshot taken: ${snapShot.length} items`, LOG_LEVEL_DEBUG);
|
||||
this.updateStatus();
|
||||
}
|
||||
async _restoreFromSnapshot() {
|
||||
const snapShot = await this.core.kvDB.get<(FileEventItem | FileEventItemSentinel)[]>(
|
||||
"storage-event-manager-snapshot"
|
||||
);
|
||||
if (snapShot && Array.isArray(snapShot) && snapShot.length > 0) {
|
||||
// console.warn(`Restoring snapshot: ${snapShot.length} items`);
|
||||
Logger(`Restoring storage operation snapshot: ${snapShot.length} items`, LOG_LEVEL_VERBOSE);
|
||||
// Restore the snapshot
|
||||
// Note: Mark all items as skipBatchWait to prevent apply the off-line batch saving.
|
||||
this.bufferedQueuedItems = snapShot.map((e) => ({ ...e, skipBatchWait: true }));
|
||||
this.updateStatus();
|
||||
await this.runQueuedEvents();
|
||||
} else {
|
||||
Logger(`No snapshot to restore`, LOG_LEVEL_VERBOSE);
|
||||
// console.warn(`No snapshot to restore`);
|
||||
}
|
||||
}
|
||||
runQueuedEvents() {
|
||||
return skipIfDuplicated("storage-event-manager-run-queued-events", async () => {
|
||||
do {
|
||||
if (this.bufferedQueuedItems.length === 0) {
|
||||
break;
|
||||
}
|
||||
// 1. Get the first queued item
|
||||
|
||||
// if (target.waitedFrom + this.batchSaveMaximumDelay > now) {
|
||||
// this.requestProcessQueue(target);
|
||||
// continue;
|
||||
// }
|
||||
const type = target.type;
|
||||
// If already cancelled by other operation, skip this.
|
||||
if (target.cancelled) {
|
||||
Logger(`Processing ${filename}: Cancelled (scheduled): ${operationType}`, LOG_LEVEL_DEBUG);
|
||||
this.cancelStandingBy(target);
|
||||
continue;
|
||||
}
|
||||
if (!target.skipBatchWait) {
|
||||
if (this.shouldBatchSave && (type == "CREATE" || type == "CHANGED")) {
|
||||
const waitedSince = this.waitedSince.get(filename);
|
||||
let canWait = true;
|
||||
const now = Date.now();
|
||||
if (waitedSince !== undefined) {
|
||||
if (waitedSince + this.batchSaveMaximumDelay * 1000 < now) {
|
||||
Logger(
|
||||
`Processing ${filename}: Could not wait no more: ${operationType}`,
|
||||
LOG_LEVEL_INFO
|
||||
);
|
||||
canWait = false;
|
||||
}
|
||||
}
|
||||
if (canWait) {
|
||||
if (waitedSince === undefined) this.waitedSince.set(filename, now);
|
||||
target.batched = true;
|
||||
Logger(
|
||||
`Processing ${filename}: Waiting for batch save delay: ${operationType}`,
|
||||
LOG_LEVEL_DEBUG
|
||||
);
|
||||
this.updateStatus();
|
||||
const result = await waitForTimeout(
|
||||
`storage-event-manager-batchsave-${filename}`,
|
||||
this.batchSaveMinimumDelay * 1000
|
||||
);
|
||||
if (!result) {
|
||||
Logger(
|
||||
`Processing ${filename}: Cancelled by new queue: ${operationType}`,
|
||||
LOG_LEVEL_DEBUG
|
||||
);
|
||||
// If could not wait for the timeout, possibly we got a new queue. therefore, currently processing one should be cancelled
|
||||
this.cancelStandingBy(target);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Logger(
|
||||
`Processing ${filename}:Requested to perform immediately ${filename}: ${operationType}`,
|
||||
LOG_LEVEL_DEBUG
|
||||
);
|
||||
}
|
||||
Logger(`Processing ${filename}: Request main to process: ${operationType}`, LOG_LEVEL_DEBUG);
|
||||
await this.requestProcessQueue(target);
|
||||
} while (!noMoreFiles);
|
||||
} finally {
|
||||
release();
|
||||
}
|
||||
Logger(`Processing ${filename}: Finished`, LOG_LEVEL_DEBUG);
|
||||
const fei = this.bufferedQueuedItems.shift()!;
|
||||
await this._takeSnapshot();
|
||||
this.updateStatus();
|
||||
// 2. Consume 1 semaphore slot to enqueue processing. Then release immediately.
|
||||
// (Just to limit the total concurrent processing count, because skipping batch handles at processFileEvent).
|
||||
const releaser = await this.concurrentProcessing.acquire();
|
||||
releaser();
|
||||
this.updateStatus();
|
||||
// 3. Check if sentinel flush
|
||||
// If sentinel, wait for idle and continue.
|
||||
if (fei.type === TYPE_SENTINEL_FLUSH) {
|
||||
Logger(`Waiting for idle`, LOG_LEVEL_VERBOSE);
|
||||
// Flush all waiting batch queues
|
||||
await this.waitForIdle();
|
||||
this.updateStatus();
|
||||
continue;
|
||||
}
|
||||
// 4. Process the event, this should be fire-and-forget to not block the queue processing in each file.
|
||||
fireAndForget(() => this.processFileEvent(fei));
|
||||
} while (this.bufferedQueuedItems.length > 0);
|
||||
});
|
||||
}
|
||||
|
||||
cancelStandingBy(fei: FileEventItem) {
|
||||
this.bufferedQueuedItems.remove(fei);
|
||||
this.updateStatus();
|
||||
}
|
||||
processingCount = 0;
|
||||
async requestProcessQueue(fei: FileEventItem) {
|
||||
try {
|
||||
this.processingCount++;
|
||||
this.bufferedQueuedItems.remove(fei);
|
||||
// this.bufferedQueuedItems.remove(fei);
|
||||
this.updateStatus();
|
||||
this.waitedSince.delete(fei.args.file.path);
|
||||
// this.waitedSince.delete(fei.args.file.path);
|
||||
await this.handleFileEvent(fei);
|
||||
await this._takeSnapshot();
|
||||
} finally {
|
||||
this.processingCount--;
|
||||
this.updateStatus();
|
||||
@@ -397,27 +594,26 @@ export class StorageEventManagerObsidian extends StorageEventManager {
|
||||
isWaiting(filename: FilePath) {
|
||||
return isWaitingForTimeout(`storage-event-manager-batchsave-${filename}`);
|
||||
}
|
||||
flushQueue() {
|
||||
this.bufferedQueuedItems.forEach((e) => (e.skipBatchWait = true));
|
||||
finishAllWaitingForTimeout("storage-event-manager-batchsave-", true);
|
||||
}
|
||||
cancelQueue(key: string) {
|
||||
this.bufferedQueuedItems.forEach((e) => {
|
||||
if (e.key === key) e.skipBatchWait = true;
|
||||
});
|
||||
}
|
||||
|
||||
updateStatus() {
|
||||
const allItems = this.bufferedQueuedItems.filter((e) => !e.cancelled);
|
||||
const batchedCount = allItems.filter((e) => e.batched && !e.skipBatchWait).length;
|
||||
const allFileEventItems = this.bufferedQueuedItems.filter((e): e is FileEventItem => "args" in e);
|
||||
const allItems = allFileEventItems.filter((e) => !e.cancelled);
|
||||
const totalItems = allItems.length + this.concurrentProcessing.waiting;
|
||||
const processing = this.processingCount;
|
||||
const batchedCount = this._waitingMap.size;
|
||||
this.core.batched.value = batchedCount;
|
||||
this.core.processing.value = this.processingCount;
|
||||
this.core.totalQueued.value = allItems.length - batchedCount;
|
||||
this.core.processing.value = processing;
|
||||
this.core.totalQueued.value = totalItems + batchedCount + processing;
|
||||
}
|
||||
|
||||
async handleFileEvent(queue: FileEventItem): Promise<any> {
|
||||
const file = queue.args.file;
|
||||
const lockKey = `handleFile:${file.path}`;
|
||||
return await serialized(lockKey, async () => {
|
||||
const ret = await serialized(lockKey, async () => {
|
||||
if (queue.cancelled) {
|
||||
Logger(`File event cancelled before processing: ${file.path}`, LOG_LEVEL_INFO);
|
||||
return;
|
||||
}
|
||||
if (queue.type == "INTERNAL" || file.isInternal) {
|
||||
await this.core.services.fileProcessing.processOptionalFileEvent(file.path as unknown as FilePath);
|
||||
} else {
|
||||
@@ -444,9 +640,11 @@ export class StorageEventManagerObsidian extends StorageEventManager {
|
||||
}
|
||||
}
|
||||
});
|
||||
this.updateStatus();
|
||||
return ret;
|
||||
}
|
||||
|
||||
cancelRelativeEvent(item: FileEventItem): void {
|
||||
this.cancelQueue(item.key);
|
||||
this._cancelWaiting(item.args.file.path);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { unique } from "octagonal-wheels/collection";
|
||||
import { throttle } from "octagonal-wheels/function";
|
||||
import { eventHub } from "../../common/events.ts";
|
||||
import { EVENT_ON_UNRESOLVED_ERROR, eventHub } from "../../common/events.ts";
|
||||
import { BASE_IS_NEW, compareFileFreshness, EVEN, getPath, isValidPath, TARGET_IS_NEW } from "../../common/utils.ts";
|
||||
import {
|
||||
type FilePathWithPrefixLC,
|
||||
@@ -13,6 +13,7 @@ import {
|
||||
LOG_LEVEL_INFO,
|
||||
LOG_LEVEL_DEBUG,
|
||||
type UXFileInfoStub,
|
||||
type LOG_LEVEL,
|
||||
} from "../../lib/src/common/types.ts";
|
||||
import { isAnyNote } from "../../lib/src/common/utils.ts";
|
||||
import { stripAllPrefixes } from "../../lib/src/string_and_binary/path.ts";
|
||||
@@ -21,34 +22,51 @@ import { withConcurrency } from "octagonal-wheels/iterable/map";
|
||||
import type { InjectableServiceHub } from "../../lib/src/services/InjectableServices.ts";
|
||||
import type { LiveSyncCore } from "../../main.ts";
|
||||
export class ModuleInitializerFile extends AbstractModule {
|
||||
private _detectedErrors = new Set<string>();
|
||||
|
||||
private logDetectedError(message: string, logLevel: LOG_LEVEL = LOG_LEVEL_INFO, key?: string) {
|
||||
this._detectedErrors.add(message);
|
||||
eventHub.emitEvent(EVENT_ON_UNRESOLVED_ERROR);
|
||||
this._log(message, logLevel, key);
|
||||
}
|
||||
private resetDetectedError(message: string) {
|
||||
eventHub.emitEvent(EVENT_ON_UNRESOLVED_ERROR);
|
||||
this._detectedErrors.delete(message);
|
||||
}
|
||||
private async _performFullScan(showingNotice?: boolean, ignoreSuspending: boolean = false): Promise<boolean> {
|
||||
this._log("Opening the key-value database", LOG_LEVEL_VERBOSE);
|
||||
const isInitialized = (await this.core.kvDB.get<boolean>("initialized")) || false;
|
||||
// synchronize all files between database and storage.
|
||||
|
||||
const ERR_NOT_CONFIGURED =
|
||||
"LiveSync is not configured yet. Synchronising between the storage and the local database is now prevented.";
|
||||
if (!this.settings.isConfigured) {
|
||||
if (showingNotice) {
|
||||
this._log(
|
||||
"LiveSync is not configured yet. Synchronising between the storage and the local database is now prevented.",
|
||||
LOG_LEVEL_NOTICE,
|
||||
"syncAll"
|
||||
);
|
||||
}
|
||||
this.logDetectedError(ERR_NOT_CONFIGURED, showingNotice ? LOG_LEVEL_NOTICE : LOG_LEVEL_INFO, "syncAll");
|
||||
return false;
|
||||
}
|
||||
this.resetDetectedError(ERR_NOT_CONFIGURED);
|
||||
|
||||
const ERR_SUSPENDING =
|
||||
"Now suspending file watching. Synchronising between the storage and the local database is now prevented.";
|
||||
if (!ignoreSuspending && this.settings.suspendFileWatching) {
|
||||
if (showingNotice) {
|
||||
this._log(
|
||||
"Now suspending file watching. Synchronising between the storage and the local database is now prevented.",
|
||||
LOG_LEVEL_NOTICE,
|
||||
"syncAll"
|
||||
);
|
||||
}
|
||||
this.logDetectedError(ERR_SUSPENDING, showingNotice ? LOG_LEVEL_NOTICE : LOG_LEVEL_INFO, "syncAll");
|
||||
return false;
|
||||
}
|
||||
const MSG_IN_REMEDIATION = `Started in remediation Mode! (Max mtime for reflect events is set). Synchronising between the storage and the local database is now prevented.`;
|
||||
this.resetDetectedError(ERR_SUSPENDING);
|
||||
if (this.settings.maxMTimeForReflectEvents > 0) {
|
||||
this.logDetectedError(MSG_IN_REMEDIATION, LOG_LEVEL_NOTICE, "syncAll");
|
||||
return false;
|
||||
}
|
||||
this.resetDetectedError(MSG_IN_REMEDIATION);
|
||||
|
||||
if (showingNotice) {
|
||||
this._log("Initializing", LOG_LEVEL_NOTICE, "syncAll");
|
||||
}
|
||||
if (isInitialized) {
|
||||
this._log("Restoring storage state", LOG_LEVEL_VERBOSE);
|
||||
await this.core.storageAccess.restoreState();
|
||||
}
|
||||
|
||||
this._log("Initialize and checking database files");
|
||||
this._log("Checking deleted files");
|
||||
@@ -379,10 +397,12 @@ export class ModuleInitializerFile extends AbstractModule {
|
||||
if (this.localDatabase.isReady) {
|
||||
await this.services.vault.scanVault(showingNotice, ignoreSuspending);
|
||||
}
|
||||
const ERR_INITIALISATION_FAILED = `Initializing database has been failed on some module!`;
|
||||
if (!(await this.services.databaseEvents.onDatabaseInitialised(showingNotice))) {
|
||||
this._log(`Initializing database has been failed on some module!`, LOG_LEVEL_NOTICE);
|
||||
this.logDetectedError(ERR_INITIALISATION_FAILED, LOG_LEVEL_NOTICE);
|
||||
return false;
|
||||
}
|
||||
this.resetDetectedError(ERR_INITIALISATION_FAILED);
|
||||
this.services.appLifecycle.markIsReady();
|
||||
// run queued event once.
|
||||
await this.services.fileProcessing.commitPendingFileEvents();
|
||||
@@ -392,8 +412,12 @@ export class ModuleInitializerFile extends AbstractModule {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
private _reportDetectedErrors(): Promise<string[]> {
|
||||
return Promise.resolve(Array.from(this._detectedErrors));
|
||||
}
|
||||
onBindFunction(core: LiveSyncCore, services: InjectableServiceHub): void {
|
||||
services.databaseEvents.handleInitialiseDatabase(this._initializeDatabase.bind(this));
|
||||
services.vault.handleScanVault(this._performFullScan.bind(this));
|
||||
services.appLifecycle.getUnresolvedMessages.addHandler(this._reportDetectedErrors.bind(this));
|
||||
services.databaseEvents.initialiseDatabase.setHandler(this._initializeDatabase.bind(this));
|
||||
services.vault.scanVault.setHandler(this._performFullScan.bind(this));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,11 +4,12 @@ import type { LiveSyncLocalDB } from "../../lib/src/pouchdb/LiveSyncLocalDB.ts";
|
||||
import { LOG_LEVEL_NOTICE, LOG_LEVEL_VERBOSE } from "octagonal-wheels/common/logger";
|
||||
import { AbstractModule } from "../AbstractModule.ts";
|
||||
import type { LiveSyncCore } from "../../main.ts";
|
||||
import type { SimpleStore } from "octagonal-wheels/databases/SimpleStoreBase";
|
||||
|
||||
export class ModuleKeyValueDB extends AbstractModule {
|
||||
tryCloseKvDB() {
|
||||
async tryCloseKvDB() {
|
||||
try {
|
||||
this.core.kvDB?.close();
|
||||
await this.core.kvDB?.close();
|
||||
return true;
|
||||
} catch (e) {
|
||||
this._log("Failed to close KeyValueDB", LOG_LEVEL_VERBOSE);
|
||||
@@ -19,7 +20,7 @@ export class ModuleKeyValueDB extends AbstractModule {
|
||||
async openKeyValueDB(): Promise<boolean> {
|
||||
await delay(10);
|
||||
try {
|
||||
this.tryCloseKvDB();
|
||||
await this.tryCloseKvDB();
|
||||
await delay(10);
|
||||
await yieldMicrotask();
|
||||
this.core.kvDB = await OpenKeyValueDatabase(this.services.vault.getVaultName() + "-livesync-kv");
|
||||
@@ -33,12 +34,12 @@ export class ModuleKeyValueDB extends AbstractModule {
|
||||
}
|
||||
return true;
|
||||
}
|
||||
_onDBUnload(db: LiveSyncLocalDB) {
|
||||
if (this.core.kvDB) this.core.kvDB.close();
|
||||
async _onDBUnload(db: LiveSyncLocalDB) {
|
||||
if (this.core.kvDB) await this.core.kvDB.close();
|
||||
return Promise.resolve(true);
|
||||
}
|
||||
_onDBClose(db: LiveSyncLocalDB) {
|
||||
if (this.core.kvDB) this.core.kvDB.close();
|
||||
async _onDBClose(db: LiveSyncLocalDB) {
|
||||
if (this.core.kvDB) await this.core.kvDB.close();
|
||||
return Promise.resolve(true);
|
||||
}
|
||||
|
||||
@@ -50,32 +51,33 @@ export class ModuleKeyValueDB extends AbstractModule {
|
||||
return Promise.resolve(true);
|
||||
}
|
||||
_getSimpleStore<T>(kind: string) {
|
||||
const getDB = () => this.core.kvDB;
|
||||
const prefix = `${kind}-`;
|
||||
return {
|
||||
get: async (key: string): Promise<T> => {
|
||||
return await this.core.kvDB.get(`${prefix}${key}`);
|
||||
return await getDB().get(`${prefix}${key}`);
|
||||
},
|
||||
set: async (key: string, value: any): Promise<void> => {
|
||||
await this.core.kvDB.set(`${prefix}${key}`, value);
|
||||
await getDB().set(`${prefix}${key}`, value);
|
||||
},
|
||||
delete: async (key: string): Promise<void> => {
|
||||
await this.core.kvDB.del(`${prefix}${key}`);
|
||||
await getDB().del(`${prefix}${key}`);
|
||||
},
|
||||
keys: async (
|
||||
from: string | undefined,
|
||||
to: string | undefined,
|
||||
count?: number | undefined
|
||||
): Promise<string[]> => {
|
||||
const ret = this.core.kvDB.keys(
|
||||
const ret = await getDB().keys(
|
||||
IDBKeyRange.bound(`${prefix}${from || ""}`, `${prefix}${to || ""}`),
|
||||
count
|
||||
);
|
||||
return (await ret)
|
||||
return ret
|
||||
.map((e) => e.toString())
|
||||
.filter((e) => e.startsWith(prefix))
|
||||
.map((e) => e.substring(prefix.length));
|
||||
},
|
||||
};
|
||||
} satisfies SimpleStore<T>;
|
||||
}
|
||||
_everyOnInitializeDatabase(db: LiveSyncLocalDB): Promise<boolean> {
|
||||
return this.openKeyValueDB();
|
||||
@@ -99,11 +101,11 @@ export class ModuleKeyValueDB extends AbstractModule {
|
||||
return true;
|
||||
}
|
||||
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
|
||||
services.databaseEvents.handleOnUnloadDatabase(this._onDBUnload.bind(this));
|
||||
services.databaseEvents.handleOnCloseDatabase(this._onDBClose.bind(this));
|
||||
services.databaseEvents.handleOnDatabaseInitialisation(this._everyOnInitializeDatabase.bind(this));
|
||||
services.databaseEvents.handleOnResetDatabase(this._everyOnResetDatabase.bind(this));
|
||||
services.database.handleOpenSimpleStore(this._getSimpleStore.bind(this));
|
||||
services.appLifecycle.handleOnSettingLoaded(this._everyOnloadAfterLoadSettings.bind(this));
|
||||
services.databaseEvents.onUnloadDatabase.addHandler(this._onDBUnload.bind(this));
|
||||
services.databaseEvents.onCloseDatabase.addHandler(this._onDBClose.bind(this));
|
||||
services.databaseEvents.onDatabaseInitialisation.addHandler(this._everyOnInitializeDatabase.bind(this));
|
||||
services.databaseEvents.onResetDatabase.addHandler(this._everyOnResetDatabase.bind(this));
|
||||
services.database.openSimpleStore.setHandler(this._getSimpleStore.bind(this));
|
||||
services.appLifecycle.onSettingLoaded.addHandler(this._everyOnloadAfterLoadSettings.bind(this));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -355,7 +355,7 @@ export class ModuleMigration extends AbstractModule {
|
||||
}
|
||||
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
|
||||
super.onBindFunction(core, services);
|
||||
services.appLifecycle.handleLayoutReady(this._everyOnLayoutReady.bind(this));
|
||||
services.appLifecycle.handleFirstInitialise(this._everyOnFirstInitialize.bind(this));
|
||||
services.appLifecycle.onLayoutReady.addHandler(this._everyOnLayoutReady.bind(this));
|
||||
services.appLifecycle.onFirstInitialise.addHandler(this._everyOnFirstInitialize.bind(this));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,17 @@
|
||||
import { LOG_LEVEL_INFO, LOG_LEVEL_NOTICE, LOG_LEVEL_VERBOSE } from "octagonal-wheels/common/logger";
|
||||
import { AbstractModule } from "../AbstractModule.ts";
|
||||
import { sizeToHumanReadable } from "octagonal-wheels/number";
|
||||
import { $msg } from "src/lib/src/common/i18n.ts";
|
||||
import type { LiveSyncCore } from "../../main.ts";
|
||||
import { AbstractObsidianModule } from "../AbstractObsidianModule.ts";
|
||||
import { EVENT_REQUEST_CHECK_REMOTE_SIZE, eventHub } from "@/common/events.ts";
|
||||
|
||||
export class ModuleCheckRemoteSize extends AbstractModule {
|
||||
async _allScanStat(): Promise<boolean> {
|
||||
export class ModuleCheckRemoteSize extends AbstractObsidianModule {
|
||||
checkRemoteSize(): Promise<boolean> {
|
||||
this.settings.notifyThresholdOfRemoteStorageSize = 1;
|
||||
return this._allScanStat();
|
||||
}
|
||||
|
||||
private async _allScanStat(): Promise<boolean> {
|
||||
if (this.core.managers.networkManager.isOnline === false) {
|
||||
this._log("Network is offline, skipping remote size check.", LOG_LEVEL_INFO);
|
||||
return true;
|
||||
@@ -109,7 +115,20 @@ export class ModuleCheckRemoteSize extends AbstractModule {
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private _everyOnloadStart(): Promise<boolean> {
|
||||
this.addCommand({
|
||||
id: "livesync-reset-remote-size-threshold-and-check",
|
||||
name: "Reset notification threshold and check the remote database usage",
|
||||
callback: async () => {
|
||||
await this.checkRemoteSize();
|
||||
},
|
||||
});
|
||||
eventHub.onEvent(EVENT_REQUEST_CHECK_REMOTE_SIZE, () => this.checkRemoteSize());
|
||||
return Promise.resolve(true);
|
||||
}
|
||||
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
|
||||
services.appLifecycle.handleOnScanningStartupIssues(this._allScanStat.bind(this));
|
||||
services.appLifecycle.onScanningStartupIssues.addHandler(this._allScanStat.bind(this));
|
||||
services.appLifecycle.onInitialise.addHandler(this._everyOnloadStart.bind(this));
|
||||
}
|
||||
}
|
||||
@@ -58,7 +58,20 @@ export class ModuleObsidianAPI extends AbstractObsidianModule {
|
||||
async __fetchByAPI(url: string, authHeader: string, opts?: RequestInit): Promise<Response> {
|
||||
const body = opts?.body as string;
|
||||
|
||||
const transformedHeaders = { ...(opts?.headers as Record<string, string>) };
|
||||
const optHeaders = {} as Record<string, string>;
|
||||
if (opts && "headers" in opts) {
|
||||
if (opts.headers instanceof Headers) {
|
||||
// For Compatibility, mostly headers.entries() is supported, but not all environments.
|
||||
opts.headers.forEach((value, key) => {
|
||||
optHeaders[key] = value;
|
||||
});
|
||||
} else {
|
||||
for (const [key, value] of Object.entries(opts.headers as Record<string, string>)) {
|
||||
optHeaders[key] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
const transformedHeaders = { ...optHeaders };
|
||||
if (authHeader != "") transformedHeaders["authorization"] = authHeader;
|
||||
delete transformedHeaders["host"];
|
||||
delete transformedHeaders["Host"];
|
||||
@@ -132,7 +145,6 @@ export class ModuleObsidianAPI extends AbstractObsidianModule {
|
||||
return "Network is offline";
|
||||
}
|
||||
// let authHeader = await this._authHeader.getAuthorizationHeader(auth);
|
||||
|
||||
const conf: PouchDB.HttpAdapter.HttpAdapterConfiguration = {
|
||||
adapter: "http",
|
||||
auth: "username" in auth ? auth : undefined,
|
||||
@@ -166,7 +178,6 @@ export class ModuleObsidianAPI extends AbstractObsidianModule {
|
||||
if (!("username" in auth)) {
|
||||
headers.append("authorization", authHeader);
|
||||
}
|
||||
|
||||
try {
|
||||
this.plugin.requestCount.value = this.plugin.requestCount.value + 1;
|
||||
const response: Response = await (useRequestAPI
|
||||
@@ -301,19 +312,34 @@ export class ModuleObsidianAPI extends AbstractObsidianModule {
|
||||
return `${"appId" in this.app ? this.app.appId : ""}`;
|
||||
}
|
||||
|
||||
private _reportUnresolvedMessages(): Promise<string[]> {
|
||||
private _reportUnresolvedMessages(): Promise<(string | Error)[]> {
|
||||
return Promise.resolve([...this._previousErrors]);
|
||||
}
|
||||
|
||||
private _getAppVersion(): string {
|
||||
const navigatorString = globalThis.navigator?.userAgent ?? "";
|
||||
const match = navigatorString.match(/obsidian\/([0-9]+\.[0-9]+\.[0-9]+)/);
|
||||
if (match && match.length >= 2) {
|
||||
return match[1];
|
||||
}
|
||||
return "0.0.0";
|
||||
}
|
||||
|
||||
private _getPluginVersion(): string {
|
||||
return this.plugin.manifest.version;
|
||||
}
|
||||
|
||||
onBindFunction(core: LiveSyncCore, services: typeof core.services) {
|
||||
services.API.handleGetCustomFetchHandler(this._customFetchHandler.bind(this));
|
||||
services.API.handleIsLastPostFailedDueToPayloadSize(this._getLastPostFailedBySize.bind(this));
|
||||
services.remote.handleConnect(this._connectRemoteCouchDB.bind(this));
|
||||
services.API.handleIsMobile(this._isMobile.bind(this));
|
||||
services.vault.handleGetVaultName(this._getVaultName.bind(this));
|
||||
services.vault.handleVaultName(this._vaultName.bind(this));
|
||||
services.vault.handleGetActiveFilePath(this._getActiveFilePath.bind(this));
|
||||
services.API.handleGetAppID(this._anyGetAppId.bind(this));
|
||||
services.appLifecycle.reportUnresolvedMessages(this._reportUnresolvedMessages.bind(this));
|
||||
services.API.getCustomFetchHandler.setHandler(this._customFetchHandler.bind(this));
|
||||
services.API.isLastPostFailedDueToPayloadSize.setHandler(this._getLastPostFailedBySize.bind(this));
|
||||
services.remote.connect.setHandler(this._connectRemoteCouchDB.bind(this));
|
||||
services.API.isMobile.setHandler(this._isMobile.bind(this));
|
||||
services.vault.getVaultName.setHandler(this._getVaultName.bind(this));
|
||||
services.vault.vaultName.setHandler(this._vaultName.bind(this));
|
||||
services.vault.getActiveFilePath.setHandler(this._getActiveFilePath.bind(this));
|
||||
services.API.getAppID.setHandler(this._anyGetAppId.bind(this));
|
||||
services.API.getAppVersion.setHandler(this._getAppVersion.bind(this));
|
||||
services.API.getPluginVersion.setHandler(this._getPluginVersion.bind(this));
|
||||
services.appLifecycle.getUnresolvedMessages.addHandler(this._reportUnresolvedMessages.bind(this));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -67,6 +67,11 @@ export class ModuleObsidianEvents extends AbstractObsidianModule {
|
||||
// eslint-disable-next-line @typescript-eslint/no-this-alias
|
||||
const _this = this;
|
||||
//@ts-ignore
|
||||
if (!window.CodeMirrorAdapter) {
|
||||
this._log("CodeMirrorAdapter is not available");
|
||||
return;
|
||||
}
|
||||
//@ts-ignore
|
||||
window.CodeMirrorAdapter.commands.save = () => {
|
||||
//@ts-ignore
|
||||
_this.app.commands.executeCommandById("editor:save-file");
|
||||
@@ -239,10 +244,10 @@ export class ModuleObsidianEvents extends AbstractObsidianModule {
|
||||
}
|
||||
}
|
||||
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
|
||||
services.appLifecycle.handleLayoutReady(this._everyOnLayoutReady.bind(this));
|
||||
services.appLifecycle.handleOnInitialise(this._everyOnloadStart.bind(this));
|
||||
services.appLifecycle.handlePerformRestart(this._performRestart.bind(this));
|
||||
services.appLifecycle.handleAskRestart(this._askReload.bind(this));
|
||||
services.appLifecycle.handleScheduleRestart(this._scheduleAppReload.bind(this));
|
||||
services.appLifecycle.onLayoutReady.addHandler(this._everyOnLayoutReady.bind(this));
|
||||
services.appLifecycle.onInitialise.addHandler(this._everyOnloadStart.bind(this));
|
||||
services.appLifecycle.performRestart.setHandler(this._performRestart.bind(this));
|
||||
services.appLifecycle.askRestart.setHandler(this._askReload.bind(this));
|
||||
services.appLifecycle.scheduleRestart.setHandler(this._scheduleAppReload.bind(this));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -131,8 +131,8 @@ export class ModuleObsidianMenu extends AbstractObsidianModule {
|
||||
}
|
||||
}
|
||||
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
|
||||
services.appLifecycle.handleOnInitialise(this._everyOnloadStart.bind(this));
|
||||
services.appLifecycle.handleOnLoaded(this._everyOnload.bind(this));
|
||||
services.API.handleShowWindow(this._showView.bind(this));
|
||||
services.appLifecycle.onInitialise.addHandler(this._everyOnloadStart.bind(this));
|
||||
services.appLifecycle.onLoaded.addHandler(this._everyOnload.bind(this));
|
||||
services.API.showWindow.setHandler(this._showView.bind(this));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,7 +12,7 @@ export class ModuleExtraSyncObsidian extends AbstractObsidianModule {
|
||||
}
|
||||
|
||||
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
|
||||
services.setting.handleGetDeviceAndVaultName(this._getDeviceAndVaultName.bind(this));
|
||||
services.setting.handleSetDeviceAndVaultName(this._setDeviceAndVaultName.bind(this));
|
||||
services.setting.getDeviceAndVaultName.setHandler(this._getDeviceAndVaultName.bind(this));
|
||||
services.setting.setDeviceAndVaultName.setHandler(this._setDeviceAndVaultName.bind(this));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -157,10 +157,10 @@ export class ModuleDev extends AbstractObsidianModule {
|
||||
return this.testDone();
|
||||
}
|
||||
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
|
||||
services.appLifecycle.handleLayoutReady(this._everyOnLayoutReady.bind(this));
|
||||
services.appLifecycle.handleOnInitialise(this._everyOnloadStart.bind(this));
|
||||
services.appLifecycle.handleOnSettingLoaded(this._everyOnloadAfterLoadSettings.bind(this));
|
||||
services.test.handleTest(this._everyModuleTest.bind(this));
|
||||
services.test.handleAddTestResult(this._addTestResult.bind(this));
|
||||
services.appLifecycle.onLayoutReady.addHandler(this._everyOnLayoutReady.bind(this));
|
||||
services.appLifecycle.onInitialise.addHandler(this._everyOnloadStart.bind(this));
|
||||
services.appLifecycle.onSettingLoaded.addHandler(this._everyOnloadAfterLoadSettings.bind(this));
|
||||
services.test.test.addHandler(this._everyModuleTest.bind(this));
|
||||
services.test.addTestResult.setHandler(this._addTestResult.bind(this));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -441,6 +441,6 @@ Line4:D`;
|
||||
return Promise.resolve(true);
|
||||
}
|
||||
onBindFunction(core: typeof this.core, services: typeof core.services): void {
|
||||
services.test.handleTestMultiDevice(this._everyModuleTestMultiDevice.bind(this));
|
||||
services.test.testMultiDevice.addHandler(this._everyModuleTestMultiDevice.bind(this));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,7 +4,7 @@ import { LOG_LEVEL_INFO, LOG_LEVEL_NOTICE, LOG_LEVEL_VERBOSE } from "octagonal-w
|
||||
import { eventHub } from "../../common/events";
|
||||
import { getWebCrypto } from "../../lib/src/mods.ts";
|
||||
import { uint8ArrayToHexString } from "octagonal-wheels/binary/hex";
|
||||
import { parseYaml, requestUrl, stringifyYaml } from "obsidian";
|
||||
import { parseYaml, requestUrl, stringifyYaml } from "@/deps.ts";
|
||||
import type { FilePath } from "../../lib/src/common/types.ts";
|
||||
import { scheduleTask } from "octagonal-wheels/concurrency/task";
|
||||
import { getFileRegExp } from "../../lib/src/common/utils.ts";
|
||||
@@ -581,8 +581,8 @@ ABCDEFGHIJKLMNOPQRSTUVWXYZ`;
|
||||
return this.testDone();
|
||||
}
|
||||
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
|
||||
services.appLifecycle.handleOnSettingLoaded(this._everyOnloadAfterLoadSettings.bind(this));
|
||||
services.replication.handleBeforeReplicate(this._everyBeforeReplicate.bind(this));
|
||||
services.test.handleTestMultiDevice(this._everyModuleTestMultiDevice.bind(this));
|
||||
services.appLifecycle.onSettingLoaded.addHandler(this._everyOnloadAfterLoadSettings.bind(this));
|
||||
services.replication.onBeforeReplicate.addHandler(this._everyBeforeReplicate.bind(this));
|
||||
services.test.testMultiDevice.addHandler(this._everyModuleTestMultiDevice.bind(this));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { ItemView, WorkspaceLeaf } from "obsidian";
|
||||
import { ItemView, WorkspaceLeaf } from "@/deps.ts";
|
||||
import TestPaneComponent from "./TestPane.svelte";
|
||||
import type ObsidianLiveSyncPlugin from "../../../main.ts";
|
||||
import type { ModuleDev } from "../ModuleDev.ts";
|
||||
|
||||
@@ -90,10 +90,8 @@ export class ConflictResolveModal extends Modal {
|
||||
new Date(this.result.left.mtime).toLocaleString() + (this.result.left.deleted ? " (Deleted)" : "");
|
||||
const date2 =
|
||||
new Date(this.result.right.mtime).toLocaleString() + (this.result.right.deleted ? " (Deleted)" : "");
|
||||
div2.setHTMLUnsafe(`
|
||||
<span class='deleted'><span class='conflict-dev-name'>${this.localName}</span>: ${date1}</span><br>
|
||||
<span class='added'><span class='conflict-dev-name'>${this.remoteName}</span>: ${date2}</span><br>
|
||||
`);
|
||||
div2.innerHTML = `<span class='deleted'><span class='conflict-dev-name'>${this.localName}</span>: ${date1}</span><br>
|
||||
<span class='added'><span class='conflict-dev-name'>${this.remoteName}</span>: ${date2}</span><br>`;
|
||||
contentEl.createEl("button", { text: `Use ${this.localName}` }, (e) =>
|
||||
e.addEventListener("click", () => this.sendResponse(this.result.right.rev))
|
||||
).style.marginRight = "4px";
|
||||
@@ -109,11 +107,10 @@ export class ConflictResolveModal extends Modal {
|
||||
e.addEventListener("click", () => this.sendResponse(CANCELLED))
|
||||
).style.marginRight = "4px";
|
||||
diff = diff.replace(/\n/g, "<br>");
|
||||
// div.innerHTML = diff;
|
||||
if (diff.length > 100 * 1024) {
|
||||
div.setText("(Too large diff to display)");
|
||||
div.innerText = "(Too large diff to display)";
|
||||
} else {
|
||||
div.setHTMLUnsafe(diff);
|
||||
div.innerHTML = diff;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { WorkspaceLeaf } from "obsidian";
|
||||
import { WorkspaceLeaf } from "@/deps.ts";
|
||||
import LogPaneComponent from "./LogPane.svelte";
|
||||
import type ObsidianLiveSyncPlugin from "../../../main.ts";
|
||||
import { SvelteItemView } from "../../../common/SvelteItemView.ts";
|
||||
|
||||
@@ -20,6 +20,6 @@ export class ModuleObsidianGlobalHistory extends AbstractObsidianModule {
|
||||
void this.services.API.showWindow(VIEW_TYPE_GLOBAL_HISTORY);
|
||||
}
|
||||
onBindFunction(core: typeof this.core, services: typeof core.services): void {
|
||||
services.appLifecycle.handleOnInitialise(this._everyOnloadStart.bind(this));
|
||||
services.appLifecycle.onInitialise.addHandler(this._everyOnloadStart.bind(this));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -131,36 +131,42 @@ export class ModuleInteractiveConflictResolver extends AbstractObsidianModule {
|
||||
async _allScanStat(): Promise<boolean> {
|
||||
const notes: { path: string; mtime: number }[] = [];
|
||||
this._log(`Checking conflicted files`, LOG_LEVEL_VERBOSE);
|
||||
for await (const doc of this.localDatabase.findAllDocs({ conflicts: true })) {
|
||||
if (!("_conflicts" in doc)) continue;
|
||||
notes.push({ path: getPath(doc), mtime: doc.mtime });
|
||||
}
|
||||
if (notes.length > 0) {
|
||||
this.core.confirm.askInPopup(
|
||||
`conflicting-detected-on-safety`,
|
||||
`Some files have been left conflicted! Press {HERE} to resolve them, or you can do it later by "Pick a file to resolve conflict`,
|
||||
(anchor) => {
|
||||
anchor.text = "HERE";
|
||||
anchor.addEventListener("click", () => {
|
||||
fireAndForget(() => this.allConflictCheck());
|
||||
});
|
||||
}
|
||||
);
|
||||
this._log(
|
||||
`Some files have been left conflicted! Please resolve them by "Pick a file to resolve conflict". The list is written in the log.`,
|
||||
LOG_LEVEL_VERBOSE
|
||||
);
|
||||
for (const note of notes) {
|
||||
this._log(`Conflicted: ${note.path}`);
|
||||
try {
|
||||
for await (const doc of this.localDatabase.findAllDocs({ conflicts: true })) {
|
||||
if (!("_conflicts" in doc)) continue;
|
||||
notes.push({ path: getPath(doc), mtime: doc.mtime });
|
||||
}
|
||||
} else {
|
||||
this._log(`There are no conflicting files`, LOG_LEVEL_VERBOSE);
|
||||
if (notes.length > 0) {
|
||||
this.core.confirm.askInPopup(
|
||||
`conflicting-detected-on-safety`,
|
||||
`Some files have been left conflicted! Press {HERE} to resolve them, or you can do it later by "Pick a file to resolve conflict`,
|
||||
(anchor) => {
|
||||
anchor.text = "HERE";
|
||||
anchor.addEventListener("click", () => {
|
||||
fireAndForget(() => this.allConflictCheck());
|
||||
});
|
||||
}
|
||||
);
|
||||
this._log(
|
||||
`Some files have been left conflicted! Please resolve them by "Pick a file to resolve conflict". The list is written in the log.`,
|
||||
LOG_LEVEL_VERBOSE
|
||||
);
|
||||
for (const note of notes) {
|
||||
this._log(`Conflicted: ${note.path}`);
|
||||
}
|
||||
} else {
|
||||
this._log(`There are no conflicting files`, LOG_LEVEL_VERBOSE);
|
||||
}
|
||||
} catch (e) {
|
||||
this._log(`Error while scanning conflicted files: ${e}`, LOG_LEVEL_NOTICE);
|
||||
this._log(e, LOG_LEVEL_VERBOSE);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
|
||||
services.appLifecycle.handleOnScanningStartupIssues(this._allScanStat.bind(this));
|
||||
services.appLifecycle.handleOnInitialise(this._everyOnloadStart.bind(this));
|
||||
services.conflict.handleResolveByUserInteraction(this._anyResolveConflictByUI.bind(this));
|
||||
services.appLifecycle.onScanningStartupIssues.addHandler(this._allScanStat.bind(this));
|
||||
services.appLifecycle.onInitialise.addHandler(this._everyOnloadStart.bind(this));
|
||||
services.conflict.resolveByUserInteraction.addHandler(this._anyResolveConflictByUI.bind(this));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,7 +15,6 @@ import {
|
||||
hiddenFilesEventCount,
|
||||
hiddenFilesProcessingCount,
|
||||
type LogEntry,
|
||||
logStore,
|
||||
logMessages,
|
||||
} from "../../lib/src/mock_and_interop/stores.ts";
|
||||
import { eventHub } from "../../lib/src/hub/hub.ts";
|
||||
@@ -28,35 +27,37 @@ import {
|
||||
import { AbstractObsidianModule } from "../AbstractObsidianModule.ts";
|
||||
import { addIcon, normalizePath, Notice } from "../../deps.ts";
|
||||
import { LOG_LEVEL_NOTICE, setGlobalLogFunction } from "octagonal-wheels/common/logger";
|
||||
import { QueueProcessor } from "octagonal-wheels/concurrency/processor";
|
||||
import { LogPaneView, VIEW_TYPE_LOG } from "./Log/LogPaneView.ts";
|
||||
import { serialized } from "octagonal-wheels/concurrency/lock";
|
||||
import { $msg } from "src/lib/src/common/i18n.ts";
|
||||
import { P2PLogCollector } from "../../lib/src/replication/trystero/P2PReplicatorCore.ts";
|
||||
import type { LiveSyncCore } from "../../main.ts";
|
||||
import { LiveSyncError } from "@/lib/src/common/LSError.ts";
|
||||
import { isValidPath } from "@/common/utils.ts";
|
||||
import {
|
||||
isValidFilenameInAndroid,
|
||||
isValidFilenameInDarwin,
|
||||
isValidFilenameInWidows,
|
||||
} from "@/lib/src/string_and_binary/path.ts";
|
||||
|
||||
// This module cannot be a core module because it depends on the Obsidian UI.
|
||||
|
||||
// DI the log again.
|
||||
const recentLogEntries = reactiveSource<LogEntry[]>([]);
|
||||
setGlobalLogFunction((message: any, level?: number, key?: string) => {
|
||||
const messageX =
|
||||
message instanceof Error
|
||||
? new LiveSyncError("[Error Logged]: " + message.message, { cause: message })
|
||||
: message;
|
||||
const entry = { message: messageX, level, key } as LogEntry;
|
||||
logStore.enqueue(entry);
|
||||
recentLogEntries.value = [...recentLogEntries.value, entry];
|
||||
});
|
||||
let recentLogs = [] as string[];
|
||||
|
||||
// Recent log splicer
|
||||
const recentLogProcessor = new QueueProcessor(
|
||||
(logs: string[]) => {
|
||||
recentLogs = [...recentLogs, ...logs].splice(-200);
|
||||
logMessages.value = recentLogs;
|
||||
},
|
||||
{ batchSize: 25, delay: 10, suspended: false, concurrentLimit: 1 }
|
||||
).resumePipeLine();
|
||||
function addLog(log: string) {
|
||||
recentLogs = [...recentLogs, log].splice(-200);
|
||||
logMessages.value = recentLogs;
|
||||
}
|
||||
// logStore.intercept(e => e.slice(Math.min(e.length - 200, 0)));
|
||||
|
||||
const showDebugLog = false;
|
||||
@@ -229,19 +230,45 @@ export class ModuleLog extends AbstractObsidianModule {
|
||||
}
|
||||
|
||||
async getActiveFileStatus() {
|
||||
const reason = [] as string[];
|
||||
const reasonWarn = [] as string[];
|
||||
const thisFile = this.app.workspace.getActiveFile();
|
||||
if (!thisFile) return "";
|
||||
const validPath = isValidPath(thisFile.path);
|
||||
if (!validPath) {
|
||||
reason.push("This file has an invalid path under the current settings");
|
||||
} else {
|
||||
// The most narrow check: Filename validity on Windows
|
||||
const validOnWindows = isValidFilenameInWidows(thisFile.name);
|
||||
const validOnDarwin = isValidFilenameInDarwin(thisFile.name);
|
||||
const validOnAndroid = isValidFilenameInAndroid(thisFile.name);
|
||||
const labels = [];
|
||||
if (!validOnWindows) labels.push("🪟");
|
||||
if (!validOnDarwin) labels.push("🍎");
|
||||
if (!validOnAndroid) labels.push("🤖");
|
||||
if (labels.length > 0) {
|
||||
reasonWarn.push("Some platforms may be unable to process this file correctly: " + labels.join(" "));
|
||||
}
|
||||
}
|
||||
// Case Sensitivity
|
||||
if (this.services.setting.shouldCheckCaseInsensitively()) {
|
||||
const f = this.core.storageAccess
|
||||
.getFiles()
|
||||
.map((e) => e.path)
|
||||
.filter((e) => e.toLowerCase() == thisFile.path.toLowerCase());
|
||||
if (f.length > 1) return "Not synchronised: There are multiple files with the same name";
|
||||
if (f.length > 1) {
|
||||
reason.push("There are multiple files with the same name (case-insensitive match)");
|
||||
}
|
||||
}
|
||||
if (!(await this.services.vault.isTargetFile(thisFile.path))) return "Not synchronised: not a target file";
|
||||
if (this.services.vault.isFileSizeTooLarge(thisFile.stat.size)) return "Not synchronised: File size exceeded";
|
||||
return "";
|
||||
if (!(await this.services.vault.isTargetFile(thisFile.path))) {
|
||||
reason.push("This file is ignored by the ignore rules");
|
||||
}
|
||||
if (this.services.vault.isFileSizeTooLarge(thisFile.stat.size)) {
|
||||
reason.push("This file size exceeds the configured limit");
|
||||
}
|
||||
const result = reason.length > 0 ? "Not synchronised: " + reason.join(", ") : "";
|
||||
const warnResult = reasonWarn.length > 0 ? "Warning: " + reasonWarn.join(", ") : "";
|
||||
return [result, warnResult].filter((e) => e).join("\n");
|
||||
}
|
||||
async setFileStatus() {
|
||||
const fileStatus = await this.getActiveFileStatus();
|
||||
@@ -341,16 +368,12 @@ export class ModuleLog extends AbstractObsidianModule {
|
||||
return Promise.resolve(true);
|
||||
}
|
||||
private _everyOnloadAfterLoadSettings(): Promise<boolean> {
|
||||
logStore
|
||||
.pipeTo(
|
||||
new QueueProcessor((logs) => logs.forEach((e) => this.__addLog(e.message, e.level, e.key)), {
|
||||
suspended: false,
|
||||
batchSize: 20,
|
||||
concurrentLimit: 1,
|
||||
delay: 0,
|
||||
})
|
||||
)
|
||||
.startPipeline();
|
||||
recentLogEntries.onChanged((entries) => {
|
||||
if (entries.value.length === 0) return;
|
||||
const newEntries = [...entries.value];
|
||||
recentLogEntries.value = [];
|
||||
newEntries.forEach((e) => this.__addLog(e.message, e.level, e.key));
|
||||
});
|
||||
eventHub.onEvent(EVENT_FILE_RENAMED, (data) => {
|
||||
void this.setFileStatus();
|
||||
});
|
||||
@@ -432,7 +455,7 @@ export class ModuleLog extends AbstractObsidianModule {
|
||||
if (this.settings?.writeLogToTheFile) {
|
||||
this.writeLogToTheFile(now, vaultName, newMessage);
|
||||
}
|
||||
recentLogProcessor.enqueue(newMessage);
|
||||
addLog(newMessage);
|
||||
this.logLines.push({ ttl: now.getTime() + 3000, message: newMessage });
|
||||
|
||||
if (level >= LOG_LEVEL_NOTICE) {
|
||||
@@ -472,9 +495,9 @@ export class ModuleLog extends AbstractObsidianModule {
|
||||
}
|
||||
}
|
||||
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
|
||||
services.appLifecycle.handleOnInitialise(this._everyOnloadStart.bind(this));
|
||||
services.appLifecycle.handleOnSettingLoaded(this._everyOnloadAfterLoadSettings.bind(this));
|
||||
services.appLifecycle.handleOnLoaded(this._everyOnload.bind(this));
|
||||
services.appLifecycle.handleOnBeforeUnload(this._allStartOnUnload.bind(this));
|
||||
services.appLifecycle.onInitialise.addHandler(this._everyOnloadStart.bind(this));
|
||||
services.appLifecycle.onSettingLoaded.addHandler(this._everyOnloadAfterLoadSettings.bind(this));
|
||||
services.appLifecycle.onLoaded.addHandler(this._everyOnload.bind(this));
|
||||
services.appLifecycle.onBeforeUnload.addHandler(this._allStartOnUnload.bind(this));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { type TFile } from "obsidian";
|
||||
import { type TFile } from "@/deps.ts";
|
||||
import { eventHub } from "../../common/events.ts";
|
||||
import { EVENT_REQUEST_SHOW_HISTORY } from "../../common/obsidianEvents.ts";
|
||||
import type { FilePathWithPrefix, LoadedEntry, DocumentID } from "../../lib/src/common/types.ts";
|
||||
@@ -52,6 +52,6 @@ export class ModuleObsidianDocumentHistory extends AbstractObsidianModule {
|
||||
}
|
||||
}
|
||||
onBindFunction(core: typeof this.core, services: typeof core.services): void {
|
||||
services.appLifecycle.handleOnInitialise(this._everyOnloadStart.bind(this));
|
||||
services.appLifecycle.onInitialise.addHandler(this._everyOnloadStart.bind(this));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@ import {
|
||||
import { LOG_LEVEL_NOTICE, LOG_LEVEL_URGENT } from "octagonal-wheels/common/logger";
|
||||
import { $msg, setLang } from "../../lib/src/common/i18n.ts";
|
||||
import { isCloudantURI } from "../../lib/src/pouchdb/utils_couchdb.ts";
|
||||
import { getLanguage } from "obsidian";
|
||||
import { getLanguage } from "@/deps.ts";
|
||||
import { SUPPORTED_I18N_LANGS, type I18N_LANGS } from "../../lib/src/common/rosetta.ts";
|
||||
import { decryptString, encryptString } from "@/lib/src/encryption/stringEncryption.ts";
|
||||
import type { LiveSyncCore } from "../../main.ts";
|
||||
@@ -323,13 +323,13 @@ export class ModuleObsidianSettings extends AbstractObsidianModule {
|
||||
|
||||
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
|
||||
super.onBindFunction(core, services);
|
||||
services.appLifecycle.handleLayoutReady(this._everyOnLayoutReady.bind(this));
|
||||
services.setting.handleClearUsedPassphrase(this._clearUsedPassphrase.bind(this));
|
||||
services.setting.handleDecryptSettings(this._decryptSettings.bind(this));
|
||||
services.setting.handleAdjustSettings(this._adjustSettings.bind(this));
|
||||
services.setting.handleLoadSettings(this._loadSettings.bind(this));
|
||||
services.setting.handleCurrentSettings(this._currentSettings.bind(this));
|
||||
services.setting.handleSaveDeviceAndVaultName(this._saveDeviceAndVaultName.bind(this));
|
||||
services.setting.handleSaveSettingData(this._saveSettingData.bind(this));
|
||||
services.appLifecycle.onLayoutReady.addHandler(this._everyOnLayoutReady.bind(this));
|
||||
services.setting.clearUsedPassphrase.setHandler(this._clearUsedPassphrase.bind(this));
|
||||
services.setting.decryptSettings.setHandler(this._decryptSettings.bind(this));
|
||||
services.setting.adjustSettings.setHandler(this._adjustSettings.bind(this));
|
||||
services.setting.loadSettings.setHandler(this._loadSettings.bind(this));
|
||||
services.setting.currentSettings.setHandler(this._currentSettings.bind(this));
|
||||
services.setting.saveDeviceAndVaultName.setHandler(this._saveDeviceAndVaultName.bind(this));
|
||||
services.setting.saveSettingData.setHandler(this._saveSettingData.bind(this));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -243,6 +243,6 @@ We can perform a command in this file.
|
||||
}
|
||||
}
|
||||
onBindFunction(core: typeof this.plugin, services: typeof core.services): void {
|
||||
services.appLifecycle.handleOnInitialise(this._everyOnloadStart.bind(this));
|
||||
services.appLifecycle.onInitialise.addHandler(this._everyOnloadStart.bind(this));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -30,6 +30,6 @@ export class ModuleObsidianSettingDialogue extends AbstractObsidianModule {
|
||||
return `${"appId" in this.app ? this.app.appId : ""}`;
|
||||
}
|
||||
onBindFunction(core: typeof this.plugin, services: typeof core.services): void {
|
||||
services.appLifecycle.handleOnInitialise(this._everyOnloadStart.bind(this));
|
||||
services.appLifecycle.onInitialise.addHandler(this._everyOnloadStart.bind(this));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -195,6 +195,6 @@ export class ModuleSetupObsidian extends AbstractObsidianModule {
|
||||
// }
|
||||
|
||||
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
|
||||
services.appLifecycle.handleOnLoaded(this._everyOnload.bind(this));
|
||||
services.appLifecycle.onLoaded.addHandler(this._everyOnload.bind(this));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ import {
|
||||
ButtonComponent,
|
||||
type TextAreaComponent,
|
||||
type ValueComponent,
|
||||
} from "obsidian";
|
||||
} from "@/deps.ts";
|
||||
import { unique } from "octagonal-wheels/collection";
|
||||
import {
|
||||
LEVEL_ADVANCED,
|
||||
|
||||
@@ -22,6 +22,9 @@ export function paneAdvanced(this: ObsidianLiveSyncSettingTab, paneEl: HTMLEleme
|
||||
new Setting(paneEl)
|
||||
.setClass("wizardHidden")
|
||||
.autoWireToggle("readChunksOnline", { onUpdate: this.onlyOnCouchDB });
|
||||
new Setting(paneEl)
|
||||
.setClass("wizardHidden")
|
||||
.autoWireToggle("useOnlyLocalChunk", { onUpdate: this.onlyOnCouchDB });
|
||||
|
||||
new Setting(paneEl).setClass("wizardHidden").autoWireNumeric("concurrencyOfReadChunksOnline", {
|
||||
clampMin: 10,
|
||||
|
||||
@@ -26,7 +26,13 @@ import { addPrefix, shouldBeIgnored, stripAllPrefixes } from "../../../lib/src/s
|
||||
import { $msg } from "../../../lib/src/common/i18n.ts";
|
||||
import { Semaphore } from "octagonal-wheels/concurrency/semaphore";
|
||||
import { LiveSyncSetting as Setting } from "./LiveSyncSetting.ts";
|
||||
import { EVENT_REQUEST_RUN_DOCTOR, EVENT_REQUEST_RUN_FIX_INCOMPLETE, eventHub } from "../../../common/events.ts";
|
||||
import {
|
||||
EVENT_ANALYSE_DB_USAGE,
|
||||
EVENT_REQUEST_CHECK_REMOTE_SIZE,
|
||||
EVENT_REQUEST_RUN_DOCTOR,
|
||||
EVENT_REQUEST_RUN_FIX_INCOMPLETE,
|
||||
eventHub,
|
||||
} from "../../../common/events.ts";
|
||||
import { ICHeader, ICXHeader, PSCHeader } from "../../../common/types.ts";
|
||||
import { HiddenFileSync } from "../../../features/HiddenFileSync/CmdHiddenFileSync.ts";
|
||||
import { EVENT_REQUEST_SHOW_HISTORY } from "../../../common/obsidianEvents.ts";
|
||||
@@ -182,6 +188,24 @@ ${stringifyYaml({
|
||||
}
|
||||
})
|
||||
);
|
||||
new Setting(paneEl)
|
||||
.setName("Analyse database usage")
|
||||
.setDesc(
|
||||
"Analyse database usage and generate a TSV report for diagnosis yourself. You can paste the generated report with any spreadsheet you like."
|
||||
)
|
||||
.addButton((button) =>
|
||||
button.setButtonText("Analyse").onClick(() => {
|
||||
eventHub.emitEvent(EVENT_ANALYSE_DB_USAGE);
|
||||
})
|
||||
);
|
||||
new Setting(paneEl)
|
||||
.setName("Reset notification threshold and check the remote database usage")
|
||||
.setDesc("Reset the remote storage size threshold and check the remote storage size again.")
|
||||
.addButton((button) =>
|
||||
button.setButtonText("Check").onClick(() => {
|
||||
eventHub.emitEvent(EVENT_REQUEST_CHECK_REMOTE_SIZE);
|
||||
})
|
||||
);
|
||||
new Setting(paneEl).autoWireToggle("writeLogToTheFile");
|
||||
});
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { LocalDatabaseMaintenance } from "../../../features/LocalDatabaseMainte/CmdLocalDatabaseMainte.ts";
|
||||
import { EVENT_REQUEST_PERFORM_GC_V3, eventHub } from "@/common/events.ts";
|
||||
import { LOG_LEVEL_NOTICE, Logger } from "../../../lib/src/common/logger.ts";
|
||||
import { FlagFilesHumanReadable, FLAGMD_REDFLAG } from "../../../lib/src/common/types.ts";
|
||||
import { fireAndForget } from "../../../lib/src/common/utils.ts";
|
||||
@@ -187,92 +187,106 @@ export function paneMaintenance(
|
||||
)
|
||||
.addOnUpdate(this.onlyOnMinIO);
|
||||
});
|
||||
void addPanel(paneEl, "Garbage Collection (Beta2)", (e) => e, this.onlyOnP2POrCouchDB).then((paneEl) => {
|
||||
void addPanel(paneEl, "Garbage Collection V3 (Beta)", (e) => e, this.onlyOnP2POrCouchDB).then((paneEl) => {
|
||||
new Setting(paneEl)
|
||||
.setName("Scan garbage")
|
||||
.setDesc("Scan for garbage chunks in the database.")
|
||||
.setName("Perform Garbage Collection")
|
||||
.setDesc("Perform Garbage Collection to remove unused chunks and reduce database size.")
|
||||
.addButton((button) =>
|
||||
button
|
||||
.setButtonText("Scan")
|
||||
// .setWarning()
|
||||
.setButtonText("Perform Garbage Collection")
|
||||
.setDisabled(false)
|
||||
.onClick(async () => {
|
||||
await this.plugin
|
||||
.getAddOn<LocalDatabaseMaintenance>(LocalDatabaseMaintenance.name)
|
||||
?.trackChanges(false, true);
|
||||
})
|
||||
)
|
||||
.addButton((button) =>
|
||||
button.setButtonText("Rescan").onClick(async () => {
|
||||
await this.plugin
|
||||
.getAddOn<LocalDatabaseMaintenance>(LocalDatabaseMaintenance.name)
|
||||
?.trackChanges(true, true);
|
||||
})
|
||||
);
|
||||
new Setting(paneEl)
|
||||
.setName("Collect garbage")
|
||||
.setDesc("Remove all unused chunks from the local database.")
|
||||
.addButton((button) =>
|
||||
button
|
||||
.setButtonText("Collect")
|
||||
.setWarning()
|
||||
.setDisabled(false)
|
||||
.onClick(async () => {
|
||||
await this.plugin
|
||||
.getAddOn<LocalDatabaseMaintenance>(LocalDatabaseMaintenance.name)
|
||||
?.performGC(true);
|
||||
})
|
||||
);
|
||||
new Setting(paneEl)
|
||||
.setName("Commit File Deletion")
|
||||
.setDesc("Completely delete all deleted documents from the local database.")
|
||||
.addButton((button) =>
|
||||
button
|
||||
.setButtonText("Delete")
|
||||
.setWarning()
|
||||
.setDisabled(false)
|
||||
.onClick(async () => {
|
||||
await this.plugin
|
||||
.getAddOn<LocalDatabaseMaintenance>(LocalDatabaseMaintenance.name)
|
||||
?.commitFileDeletion();
|
||||
.onClick(() => {
|
||||
this.closeSetting();
|
||||
eventHub.emitEvent(EVENT_REQUEST_PERFORM_GC_V3);
|
||||
})
|
||||
);
|
||||
});
|
||||
void addPanel(paneEl, "Garbage Collection (Old and Experimental)", (e) => e, this.onlyOnP2POrCouchDB).then(
|
||||
(paneEl) => {
|
||||
new Setting(paneEl)
|
||||
.setName("Remove all orphaned chunks")
|
||||
.setDesc("Remove all orphaned chunks from the local database.")
|
||||
.addButton((button) =>
|
||||
button
|
||||
.setButtonText("Remove")
|
||||
.setWarning()
|
||||
.setDisabled(false)
|
||||
.onClick(async () => {
|
||||
await this.plugin
|
||||
.getAddOn<LocalDatabaseMaintenance>(LocalDatabaseMaintenance.name)
|
||||
?.removeUnusedChunks();
|
||||
})
|
||||
);
|
||||
// void addPanel(paneEl, "Garbage Collection (Beta2)", (e) => e, this.onlyOnP2POrCouchDB).then((paneEl) => {
|
||||
// new Setting(paneEl)
|
||||
// .setName("Scan garbage")
|
||||
// .setDesc("Scan for garbage chunks in the database.")
|
||||
// .addButton((button) =>
|
||||
// button
|
||||
// .setButtonText("Scan")
|
||||
// // .setWarning()
|
||||
// .setDisabled(false)
|
||||
// .onClick(async () => {
|
||||
// await this.plugin
|
||||
// .getAddOn<LocalDatabaseMaintenance>(LocalDatabaseMaintenance.name)
|
||||
// ?.trackChanges(false, true);
|
||||
// })
|
||||
// )
|
||||
// .addButton((button) =>
|
||||
// button.setButtonText("Rescan").onClick(async () => {
|
||||
// await this.plugin
|
||||
// .getAddOn<LocalDatabaseMaintenance>(LocalDatabaseMaintenance.name)
|
||||
// ?.trackChanges(true, true);
|
||||
// })
|
||||
// );
|
||||
// new Setting(paneEl)
|
||||
// .setName("Collect garbage")
|
||||
// .setDesc("Remove all unused chunks from the local database.")
|
||||
// .addButton((button) =>
|
||||
// button
|
||||
// .setButtonText("Collect")
|
||||
// .setWarning()
|
||||
// .setDisabled(false)
|
||||
// .onClick(async () => {
|
||||
// await this.plugin
|
||||
// .getAddOn<LocalDatabaseMaintenance>(LocalDatabaseMaintenance.name)
|
||||
// ?.performGC(true);
|
||||
// })
|
||||
// );
|
||||
// new Setting(paneEl)
|
||||
// .setName("Commit File Deletion")
|
||||
// .setDesc("Completely delete all deleted documents from the local database.")
|
||||
// .addButton((button) =>
|
||||
// button
|
||||
// .setButtonText("Delete")
|
||||
// .setWarning()
|
||||
// .setDisabled(false)
|
||||
// .onClick(async () => {
|
||||
// await this.plugin
|
||||
// .getAddOn<LocalDatabaseMaintenance>(LocalDatabaseMaintenance.name)
|
||||
// ?.commitFileDeletion();
|
||||
// })
|
||||
// );
|
||||
// });
|
||||
// void addPanel(paneEl, "Garbage Collection (Old and Experimental)", (e) => e, this.onlyOnP2POrCouchDB).then(
|
||||
// (paneEl) => {
|
||||
// new Setting(paneEl)
|
||||
// .setName("Remove all orphaned chunks")
|
||||
// .setDesc("Remove all orphaned chunks from the local database.")
|
||||
// .addButton((button) =>
|
||||
// button
|
||||
// .setButtonText("Remove")
|
||||
// .setWarning()
|
||||
// .setDisabled(false)
|
||||
// .onClick(async () => {
|
||||
// await this.plugin
|
||||
// .getAddOn<LocalDatabaseMaintenance>(LocalDatabaseMaintenance.name)
|
||||
// ?.removeUnusedChunks();
|
||||
// })
|
||||
// );
|
||||
|
||||
new Setting(paneEl)
|
||||
.setName("Resurrect deleted chunks")
|
||||
.setDesc(
|
||||
"If you have deleted chunks before fully synchronised and missed some chunks, you possibly can resurrect them."
|
||||
)
|
||||
.addButton((button) =>
|
||||
button
|
||||
.setButtonText("Try resurrect")
|
||||
.setWarning()
|
||||
.setDisabled(false)
|
||||
.onClick(async () => {
|
||||
await this.plugin
|
||||
.getAddOn<LocalDatabaseMaintenance>(LocalDatabaseMaintenance.name)
|
||||
?.resurrectChunks();
|
||||
})
|
||||
);
|
||||
}
|
||||
);
|
||||
// new Setting(paneEl)
|
||||
// .setName("Resurrect deleted chunks")
|
||||
// .setDesc(
|
||||
// "If you have deleted chunks before fully synchronised and missed some chunks, you possibly can resurrect them."
|
||||
// )
|
||||
// .addButton((button) =>
|
||||
// button
|
||||
// .setButtonText("Try resurrect")
|
||||
// .setWarning()
|
||||
// .setDisabled(false)
|
||||
// .onClick(async () => {
|
||||
// await this.plugin
|
||||
// .getAddOn<LocalDatabaseMaintenance>(LocalDatabaseMaintenance.name)
|
||||
// ?.resurrectChunks();
|
||||
// })
|
||||
// );
|
||||
// }
|
||||
// );
|
||||
|
||||
void addPanel(paneEl, "Rebuilding Operations (Remote Only)", () => {}, this.onlyOnCouchDBOrMinIO).then((paneEl) => {
|
||||
new Setting(paneEl)
|
||||
|
||||
@@ -173,7 +173,62 @@ export function panePatches(this: ObsidianLiveSyncSettingTab, paneEl: HTMLElemen
|
||||
void addPanel(paneEl, "Compatibility (Trouble addressed)").then((paneEl) => {
|
||||
new Setting(paneEl).autoWireToggle("disableCheckingConfigMismatch");
|
||||
});
|
||||
void addPanel(paneEl, "Remediation").then((paneEl) => {
|
||||
let dateEl: HTMLSpanElement;
|
||||
new Setting(paneEl)
|
||||
.addText((text) => {
|
||||
const updateDateText = () => {
|
||||
if (this.editingSettings.maxMTimeForReflectEvents == 0) {
|
||||
dateEl.textContent = `No limit configured`;
|
||||
} else {
|
||||
const date = new Date(this.editingSettings.maxMTimeForReflectEvents);
|
||||
dateEl.textContent = `Limit: ${date.toLocaleString()} (${this.editingSettings.maxMTimeForReflectEvents})`;
|
||||
}
|
||||
this.requestUpdate();
|
||||
};
|
||||
text.inputEl.before((dateEl = document.createElement("span")));
|
||||
text.inputEl.type = "datetime-local";
|
||||
if (this.editingSettings.maxMTimeForReflectEvents > 0) {
|
||||
const date = new Date(this.editingSettings.maxMTimeForReflectEvents);
|
||||
const isoString = date.toISOString().slice(0, 16);
|
||||
text.setValue(isoString);
|
||||
} else {
|
||||
text.setValue("");
|
||||
}
|
||||
text.onChange((value) => {
|
||||
if (value == "") {
|
||||
this.editingSettings.maxMTimeForReflectEvents = 0;
|
||||
updateDateText();
|
||||
return;
|
||||
}
|
||||
const date = new Date(value);
|
||||
if (!isNaN(date.getTime())) {
|
||||
this.editingSettings.maxMTimeForReflectEvents = date.getTime();
|
||||
}
|
||||
updateDateText();
|
||||
});
|
||||
updateDateText();
|
||||
return text;
|
||||
})
|
||||
.setAuto("maxMTimeForReflectEvents")
|
||||
.addApplyButton(["maxMTimeForReflectEvents"]);
|
||||
|
||||
this.addOnSaved("maxMTimeForReflectEvents", async (key) => {
|
||||
const buttons = ["Restart Now", "Later"] as const;
|
||||
const reboot = await this.plugin.confirm.askSelectStringDialogue(
|
||||
"Restarting Obsidian is strongly recommended. Until restart, some changes may not take effect, and display may be inconsistent. Are you sure to restart now?",
|
||||
buttons,
|
||||
{
|
||||
title: "Remediation Setting Changed",
|
||||
defaultAction: "Restart Now",
|
||||
}
|
||||
);
|
||||
if (reboot !== "Later") {
|
||||
Logger("Remediation setting changed. Restarting Obsidian...", LOG_LEVEL_NOTICE);
|
||||
this.services.appLifecycle.performRestart();
|
||||
}
|
||||
});
|
||||
});
|
||||
void addPanel(paneEl, "Remote Database Tweak (In sunset)").then((paneEl) => {
|
||||
// new Setting(paneEl).autoWireToggle("useEden").setClass("wizardHidden");
|
||||
// const onlyUsingEden = visibleOnly(() => this.isConfiguredAs("useEden", true));
|
||||
|
||||
@@ -12,7 +12,7 @@ import type { ObsidianLiveSyncSettingTab } from "./ObsidianLiveSyncSettingTab.ts
|
||||
import type { PageFunctions } from "./SettingPane.ts";
|
||||
import { visibleOnly } from "./SettingPane.ts";
|
||||
import { DEFAULT_SETTINGS } from "../../../lib/src/common/types.ts";
|
||||
import { request } from "obsidian";
|
||||
import { request } from "@/deps.ts";
|
||||
import { SetupManager, UserMode } from "../SetupManager.ts";
|
||||
export function paneSetup(
|
||||
this: ObsidianLiveSyncSettingTab,
|
||||
|
||||
@@ -93,6 +93,9 @@
|
||||
keys: () => {
|
||||
return Promise.resolve(Array.from(map.keys()));
|
||||
},
|
||||
get db() {
|
||||
return Promise.resolve(this);
|
||||
},
|
||||
} as SimpleStore<any>;
|
||||
|
||||
const dummyPouch = new PouchDB<EntryDoc>("dummy");
|
||||
|
||||
@@ -10,6 +10,7 @@ import type {
|
||||
import type { CustomRegExp } from "../../lib/src/common/utils";
|
||||
|
||||
export interface StorageAccess {
|
||||
restoreState(): Promise<void>;
|
||||
processWriteFile<T>(file: UXFileInfoStub | FilePathWithPrefix, proc: () => Promise<T>): Promise<T>;
|
||||
processReadFile<T>(file: UXFileInfoStub | FilePathWithPrefix, proc: () => Promise<T>): Promise<T>;
|
||||
isFileProcessing(file: UXFileInfoStub | FilePathWithPrefix): boolean;
|
||||
|
||||
@@ -16,6 +16,7 @@ import { AbstractModule } from "../AbstractModule.ts";
|
||||
import { EVENT_PLATFORM_UNLOADED } from "../../lib/src/PlatformAPIs/base/APIBase.ts";
|
||||
import type { InjectableServiceHub } from "../../lib/src/services/InjectableServices.ts";
|
||||
import type { LiveSyncCore } from "../../main.ts";
|
||||
import { initialiseWorkerModule } from "@/lib/src/worker/bgWorker.ts";
|
||||
|
||||
export class ModuleLiveSyncMain extends AbstractModule {
|
||||
async _onLiveSyncReady() {
|
||||
@@ -80,6 +81,7 @@ export class ModuleLiveSyncMain extends AbstractModule {
|
||||
}
|
||||
|
||||
async _onLiveSyncLoad(): Promise<boolean> {
|
||||
initialiseWorkerModule();
|
||||
await this.services.appLifecycle.onWireUpEvents();
|
||||
// debugger;
|
||||
eventHub.emitEvent(EVENT_PLUGIN_LOADED, this.core);
|
||||
@@ -206,17 +208,17 @@ export class ModuleLiveSyncMain extends AbstractModule {
|
||||
|
||||
onBindFunction(core: LiveSyncCore, services: InjectableServiceHub): void {
|
||||
super.onBindFunction(core, services);
|
||||
services.appLifecycle.handleIsSuspended(this._isSuspended.bind(this));
|
||||
services.appLifecycle.handleSetSuspended(this._setSuspended.bind(this));
|
||||
services.appLifecycle.handleIsReady(this._isReady.bind(this));
|
||||
services.appLifecycle.handleMarkIsReady(this._markIsReady.bind(this));
|
||||
services.appLifecycle.handleResetIsReady(this._resetIsReady.bind(this));
|
||||
services.appLifecycle.handleHasUnloaded(this._isUnloaded.bind(this));
|
||||
services.appLifecycle.handleIsReloadingScheduled(this._isReloadingScheduled.bind(this));
|
||||
services.appLifecycle.handleOnReady(this._onLiveSyncReady.bind(this));
|
||||
services.appLifecycle.handleOnWireUpEvents(this._wireUpEvents.bind(this));
|
||||
services.appLifecycle.handleOnLoad(this._onLiveSyncLoad.bind(this));
|
||||
services.appLifecycle.handleOnAppUnload(this._onLiveSyncUnload.bind(this));
|
||||
services.setting.handleRealiseSetting(this._realizeSettingSyncMode.bind(this));
|
||||
services.appLifecycle.isSuspended.setHandler(this._isSuspended.bind(this));
|
||||
services.appLifecycle.setSuspended.setHandler(this._setSuspended.bind(this));
|
||||
services.appLifecycle.isReady.setHandler(this._isReady.bind(this));
|
||||
services.appLifecycle.markIsReady.setHandler(this._markIsReady.bind(this));
|
||||
services.appLifecycle.resetIsReady.setHandler(this._resetIsReady.bind(this));
|
||||
services.appLifecycle.hasUnloaded.setHandler(this._isUnloaded.bind(this));
|
||||
services.appLifecycle.isReloadingScheduled.setHandler(this._isReloadingScheduled.bind(this));
|
||||
services.appLifecycle.onReady.addHandler(this._onLiveSyncReady.bind(this));
|
||||
services.appLifecycle.onWireUpEvents.addHandler(this._wireUpEvents.bind(this));
|
||||
services.appLifecycle.onLoad.addHandler(this._onLiveSyncLoad.bind(this));
|
||||
services.appLifecycle.onAppUnload.addHandler(this._onLiveSyncUnload.bind(this));
|
||||
services.setting.realiseSetting.setHandler(this._realizeSettingSyncMode.bind(this));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,45 +0,0 @@
|
||||
# Dynamic Load Modules
|
||||
|
||||
## Introduction
|
||||
|
||||
Self-hosted LiveSync has gradually but steadily become very feature-rich and they have created a very heavy `Main` class. This is very difficult to understand and maintain especially new contributors or futures contributors.
|
||||
And some of the features are not used by all users, we should limit the inter-dependencies between modules. And also inter-effects between modules.
|
||||
Hence, to make the code more readable and maintainable, I decided to split the code into multiple modules.
|
||||
|
||||
I also got a little greedy here, but I have an another objective now, which is to reduce the difficulty when porting to other platforms.
|
||||
|
||||
Therefore, almost all feature of the plug-in can be implemented as a module. And the `Main` class will be responsible for loading these modules.
|
||||
|
||||
## Modules
|
||||
|
||||
### Sorts
|
||||
|
||||
Modules can be sorted into two categories in some sorts:
|
||||
|
||||
- `CoreModule` and `ObsidianModule`
|
||||
- `Core`, `Essential`, and `Feature` ...
|
||||
|
||||
### How it works
|
||||
|
||||
After instancing `Core` and Modules, you should call `injectModules`. Then, the specific function will be injected into the stub of it of `Core` class by following rules:
|
||||
|
||||
| Function prefix | Description |
|
||||
| --------------- | ----------------------------------------------------------------- |
|
||||
| `$$` | Completely overridden functions. |
|
||||
| `$all` | Process all modules and return all results. |
|
||||
| `$every` | Process all modules until the first failure. |
|
||||
| `$any` | Process all modules until the first success. |
|
||||
| `$` | Other interceptive points. You should manually assign the module. |
|
||||
|
||||
Note1: `Core` class should implement the same function as the module. If not, the module will be ignored.
|
||||
|
||||
And, basically, the Module has a `Core` class as `core` property. You should call any of inject functions by `this.core.$xxxxxx`. This rule is also applied to the function which implemented itself. Because some other modules possibly injects the function again, for the specific purpose.
|
||||
|
||||
### CoreModule
|
||||
|
||||
This Module is independent from Obsidian, and can be used in any platform. However, it can be call (or use) functions which has implemented in `ObsidianModule`.
|
||||
To porting, you should implement shim functions for `ObsidianModule`.
|
||||
|
||||
### ObsidianModule
|
||||
|
||||
(TBW)
|
||||
@@ -1,7 +1,9 @@
|
||||
import { ServiceContext, type ServiceInstances } from "@/lib/src/services/ServiceHub.ts";
|
||||
import {
|
||||
InjectableAPIService,
|
||||
InjectableAppLifecycleService,
|
||||
InjectableConflictService,
|
||||
InjectableDatabaseEventService,
|
||||
InjectableDatabaseService,
|
||||
InjectableFileProcessingService,
|
||||
InjectablePathService,
|
||||
@@ -17,79 +19,96 @@ import { InjectableServiceHub } from "../../lib/src/services/InjectableServices.
|
||||
import { ConfigServiceBrowserCompat } from "../../lib/src/services/Services.ts";
|
||||
import type ObsidianLiveSyncPlugin from "../../main.ts";
|
||||
import { ObsidianUIService } from "./ObsidianUIService.ts";
|
||||
import type { App, Plugin } from "@/deps";
|
||||
|
||||
export class ObsidianServiceContext extends ServiceContext {
|
||||
app: App;
|
||||
plugin: Plugin;
|
||||
liveSyncPlugin: ObsidianLiveSyncPlugin;
|
||||
constructor(app: App, plugin: Plugin, liveSyncPlugin: ObsidianLiveSyncPlugin) {
|
||||
super();
|
||||
this.app = app;
|
||||
this.plugin = plugin;
|
||||
this.liveSyncPlugin = liveSyncPlugin;
|
||||
}
|
||||
}
|
||||
|
||||
// All Services will be migrated to be based on Plain Services, not Injectable Services.
|
||||
// This is a migration step.
|
||||
|
||||
export class ObsidianAPIService extends InjectableAPIService {
|
||||
export class ObsidianAPIService extends InjectableAPIService<ObsidianServiceContext> {
|
||||
getPlatform(): string {
|
||||
return "obsidian";
|
||||
}
|
||||
}
|
||||
export class ObsidianPathService extends InjectablePathService {}
|
||||
export class ObsidianDatabaseService extends InjectableDatabaseService {}
|
||||
export class ObsidianPathService extends InjectablePathService<ObsidianServiceContext> {}
|
||||
export class ObsidianDatabaseService extends InjectableDatabaseService<ObsidianServiceContext> {}
|
||||
export class ObsidianDatabaseEventService extends InjectableDatabaseEventService<ObsidianServiceContext> {}
|
||||
|
||||
// InjectableReplicatorService
|
||||
export class ObsidianReplicatorService extends InjectableReplicatorService {}
|
||||
export class ObsidianReplicatorService extends InjectableReplicatorService<ObsidianServiceContext> {}
|
||||
// InjectableFileProcessingService
|
||||
export class ObsidianFileProcessingService extends InjectableFileProcessingService {}
|
||||
export class ObsidianFileProcessingService extends InjectableFileProcessingService<ObsidianServiceContext> {}
|
||||
// InjectableReplicationService
|
||||
export class ObsidianReplicationService extends InjectableReplicationService {}
|
||||
export class ObsidianReplicationService extends InjectableReplicationService<ObsidianServiceContext> {}
|
||||
// InjectableRemoteService
|
||||
export class ObsidianRemoteService extends InjectableRemoteService {}
|
||||
export class ObsidianRemoteService extends InjectableRemoteService<ObsidianServiceContext> {}
|
||||
// InjectableConflictService
|
||||
export class ObsidianConflictService extends InjectableConflictService {}
|
||||
export class ObsidianConflictService extends InjectableConflictService<ObsidianServiceContext> {}
|
||||
// InjectableAppLifecycleService
|
||||
export class ObsidianAppLifecycleService extends InjectableAppLifecycleService {}
|
||||
export class ObsidianAppLifecycleService extends InjectableAppLifecycleService<ObsidianServiceContext> {}
|
||||
// InjectableSettingService
|
||||
export class ObsidianSettingService extends InjectableSettingService {}
|
||||
export class ObsidianSettingService extends InjectableSettingService<ObsidianServiceContext> {}
|
||||
// InjectableTweakValueService
|
||||
export class ObsidianTweakValueService extends InjectableTweakValueService {}
|
||||
export class ObsidianTweakValueService extends InjectableTweakValueService<ObsidianServiceContext> {}
|
||||
// InjectableVaultService
|
||||
export class ObsidianVaultService extends InjectableVaultService {}
|
||||
export class ObsidianVaultService extends InjectableVaultService<ObsidianServiceContext> {}
|
||||
// InjectableTestService
|
||||
export class ObsidianTestService extends InjectableTestService {}
|
||||
|
||||
export class ObsidianConfigService extends ConfigServiceBrowserCompat {}
|
||||
export class ObsidianTestService extends InjectableTestService<ObsidianServiceContext> {}
|
||||
export class ObsidianConfigService extends ConfigServiceBrowserCompat<ObsidianServiceContext> {}
|
||||
|
||||
// InjectableServiceHub
|
||||
|
||||
export class ObsidianServiceHub extends InjectableServiceHub {
|
||||
protected _api: ObsidianAPIService = new ObsidianAPIService(this._serviceBackend, this._throughHole);
|
||||
protected _path: ObsidianPathService = new ObsidianPathService(this._serviceBackend, this._throughHole);
|
||||
protected _database: ObsidianDatabaseService = new ObsidianDatabaseService(this._serviceBackend, this._throughHole);
|
||||
protected _replicator: ObsidianReplicatorService = new ObsidianReplicatorService(
|
||||
this._serviceBackend,
|
||||
this._throughHole
|
||||
);
|
||||
protected _fileProcessing: ObsidianFileProcessingService = new ObsidianFileProcessingService(
|
||||
this._serviceBackend,
|
||||
this._throughHole
|
||||
);
|
||||
protected _replication: ObsidianReplicationService = new ObsidianReplicationService(
|
||||
this._serviceBackend,
|
||||
this._throughHole
|
||||
);
|
||||
protected _remote: ObsidianRemoteService = new ObsidianRemoteService(this._serviceBackend, this._throughHole);
|
||||
protected _conflict: ObsidianConflictService = new ObsidianConflictService(this._serviceBackend, this._throughHole);
|
||||
protected _appLifecycle: ObsidianAppLifecycleService = new ObsidianAppLifecycleService(
|
||||
this._serviceBackend,
|
||||
this._throughHole
|
||||
);
|
||||
protected _setting: ObsidianSettingService = new ObsidianSettingService(this._serviceBackend, this._throughHole);
|
||||
protected _tweakValue: ObsidianTweakValueService = new ObsidianTweakValueService(
|
||||
this._serviceBackend,
|
||||
this._throughHole
|
||||
);
|
||||
protected _vault: ObsidianVaultService = new ObsidianVaultService(this._serviceBackend, this._throughHole);
|
||||
protected _test: ObsidianTestService = new ObsidianTestService(this._serviceBackend, this._throughHole);
|
||||
|
||||
private _plugin: ObsidianLiveSyncPlugin;
|
||||
export class ObsidianServiceHub extends InjectableServiceHub<ObsidianServiceContext> {
|
||||
constructor(plugin: ObsidianLiveSyncPlugin) {
|
||||
const config = new ObsidianConfigService();
|
||||
super({
|
||||
ui: new ObsidianUIService(plugin),
|
||||
const context = new ObsidianServiceContext(plugin.app, plugin, plugin);
|
||||
|
||||
const API = new ObsidianAPIService(context);
|
||||
const appLifecycle = new ObsidianAppLifecycleService(context);
|
||||
const conflict = new ObsidianConflictService(context);
|
||||
const database = new ObsidianDatabaseService(context);
|
||||
const fileProcessing = new ObsidianFileProcessingService(context);
|
||||
const replication = new ObsidianReplicationService(context);
|
||||
const replicator = new ObsidianReplicatorService(context);
|
||||
const remote = new ObsidianRemoteService(context);
|
||||
const setting = new ObsidianSettingService(context);
|
||||
const tweakValue = new ObsidianTweakValueService(context);
|
||||
const vault = new ObsidianVaultService(context);
|
||||
const test = new ObsidianTestService(context);
|
||||
const databaseEvents = new ObsidianDatabaseEventService(context);
|
||||
const path = new ObsidianPathService(context);
|
||||
const ui = new ObsidianUIService(context);
|
||||
const config = new ObsidianConfigService(context, vault);
|
||||
// Using 'satisfies' to ensure all services are provided
|
||||
const serviceInstancesToInit = {
|
||||
appLifecycle: appLifecycle,
|
||||
conflict: conflict,
|
||||
database: database,
|
||||
databaseEvents: databaseEvents,
|
||||
fileProcessing: fileProcessing,
|
||||
replication: replication,
|
||||
replicator: replicator,
|
||||
remote: remote,
|
||||
setting: setting,
|
||||
tweakValue: tweakValue,
|
||||
vault: vault,
|
||||
test: test,
|
||||
ui: ui,
|
||||
path: path,
|
||||
API: API,
|
||||
config: config,
|
||||
});
|
||||
this._plugin = plugin;
|
||||
} satisfies Required<ServiceInstances<ObsidianServiceContext>>;
|
||||
|
||||
super(context, serviceInstancesToInit);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,19 +1,129 @@
|
||||
import { UIService } from "../../lib/src/services/Services";
|
||||
import type ObsidianLiveSyncPlugin from "../../main";
|
||||
import { Notice, type App, type Plugin } from "@/deps";
|
||||
import { SvelteDialogManager } from "../features/SetupWizard/ObsidianSvelteDialog";
|
||||
import DialogueToCopy from "../../lib/src/UI/dialogues/DialogueToCopy.svelte";
|
||||
import type { ObsidianServiceContext } from "./ObsidianServices";
|
||||
import type ObsidianLiveSyncPlugin from "@/main";
|
||||
import type { Confirm } from "@/lib/src/interfaces/Confirm";
|
||||
import {
|
||||
askSelectString,
|
||||
askString,
|
||||
askYesNo,
|
||||
confirmWithMessage,
|
||||
confirmWithMessageWithWideButton,
|
||||
} from "../coreObsidian/UILib/dialogs";
|
||||
import { $msg } from "@/lib/src/common/i18n";
|
||||
import { disposeMemoObject, memoIfNotExist, memoObject, retrieveMemoObject, scheduleTask } from "@/common/utils";
|
||||
export class ObsidianConfirm implements Confirm {
|
||||
private _app: App;
|
||||
private _plugin: Plugin;
|
||||
constructor(app: App, plugin: Plugin) {
|
||||
this._app = app;
|
||||
this._plugin = plugin;
|
||||
}
|
||||
askYesNo(message: string): Promise<"yes" | "no"> {
|
||||
return askYesNo(this._app, message);
|
||||
}
|
||||
askString(title: string, key: string, placeholder: string, isPassword: boolean = false): Promise<string | false> {
|
||||
return askString(this._app, title, key, placeholder, isPassword);
|
||||
}
|
||||
|
||||
export class ObsidianUIService extends UIService {
|
||||
async askYesNoDialog(
|
||||
message: string,
|
||||
opt: { title?: string; defaultOption?: "Yes" | "No"; timeout?: number } = { title: "Confirmation" }
|
||||
): Promise<"yes" | "no"> {
|
||||
const defaultTitle = $msg("moduleInputUIObsidian.defaultTitleConfirmation");
|
||||
const yesLabel = $msg("moduleInputUIObsidian.optionYes");
|
||||
const noLabel = $msg("moduleInputUIObsidian.optionNo");
|
||||
const defaultOption = opt.defaultOption === "Yes" ? yesLabel : noLabel;
|
||||
const ret = await confirmWithMessageWithWideButton(
|
||||
this._plugin,
|
||||
opt.title || defaultTitle,
|
||||
message,
|
||||
[yesLabel, noLabel],
|
||||
defaultOption,
|
||||
opt.timeout
|
||||
);
|
||||
return ret === yesLabel ? "yes" : "no";
|
||||
}
|
||||
|
||||
askSelectString(message: string, items: string[]): Promise<string> {
|
||||
return askSelectString(this._app, message, items);
|
||||
}
|
||||
|
||||
askSelectStringDialogue<T extends readonly string[]>(
|
||||
message: string,
|
||||
buttons: T,
|
||||
opt: { title?: string; defaultAction: T[number]; timeout?: number }
|
||||
): Promise<T[number] | false> {
|
||||
const defaultTitle = $msg("moduleInputUIObsidian.defaultTitleSelect");
|
||||
return confirmWithMessageWithWideButton(
|
||||
this._plugin,
|
||||
opt.title || defaultTitle,
|
||||
message,
|
||||
buttons,
|
||||
opt.defaultAction,
|
||||
opt.timeout
|
||||
);
|
||||
}
|
||||
|
||||
askInPopup(key: string, dialogText: string, anchorCallback: (anchor: HTMLAnchorElement) => void) {
|
||||
const fragment = createFragment((doc) => {
|
||||
const [beforeText, afterText] = dialogText.split("{HERE}", 2);
|
||||
doc.createEl("span", undefined, (a) => {
|
||||
a.appendText(beforeText);
|
||||
a.appendChild(
|
||||
a.createEl("a", undefined, (anchor) => {
|
||||
anchorCallback(anchor);
|
||||
})
|
||||
);
|
||||
a.appendText(afterText);
|
||||
});
|
||||
});
|
||||
const popupKey = "popup-" + key;
|
||||
scheduleTask(popupKey, 1000, async () => {
|
||||
const popup = await memoIfNotExist(popupKey, () => new Notice(fragment, 0));
|
||||
const isShown = popup?.noticeEl?.isShown();
|
||||
if (!isShown) {
|
||||
memoObject(popupKey, new Notice(fragment, 0));
|
||||
}
|
||||
scheduleTask(popupKey + "-close", 20000, () => {
|
||||
const popup = retrieveMemoObject<Notice>(popupKey);
|
||||
if (!popup) return;
|
||||
if (popup?.noticeEl?.isShown()) {
|
||||
popup.hide();
|
||||
}
|
||||
disposeMemoObject(popupKey);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
confirmWithMessage(
|
||||
title: string,
|
||||
contentMd: string,
|
||||
buttons: string[],
|
||||
defaultAction: (typeof buttons)[number],
|
||||
timeout?: number
|
||||
): Promise<(typeof buttons)[number] | false> {
|
||||
return confirmWithMessage(this._plugin, title, contentMd, buttons, defaultAction, timeout);
|
||||
}
|
||||
}
|
||||
export class ObsidianUIService extends UIService<ObsidianServiceContext> {
|
||||
private _dialogManager: SvelteDialogManager;
|
||||
private _plugin: ObsidianLiveSyncPlugin;
|
||||
private _plugin: Plugin;
|
||||
private _liveSyncPlugin: ObsidianLiveSyncPlugin;
|
||||
private _confirmInstance: ObsidianConfirm;
|
||||
get dialogManager() {
|
||||
return this._dialogManager;
|
||||
}
|
||||
constructor(plugin: ObsidianLiveSyncPlugin) {
|
||||
super();
|
||||
this._dialogManager = new SvelteDialogManager(plugin);
|
||||
this._plugin = plugin;
|
||||
constructor(context: ObsidianServiceContext) {
|
||||
super(context);
|
||||
this._liveSyncPlugin = context.liveSyncPlugin;
|
||||
this._dialogManager = new SvelteDialogManager(this._liveSyncPlugin);
|
||||
this._plugin = context.plugin;
|
||||
this._confirmInstance = new ObsidianConfirm(this._plugin.app, this._plugin);
|
||||
}
|
||||
|
||||
async promptCopyToClipboard(title: string, value: string): Promise<boolean> {
|
||||
const param = {
|
||||
title: title,
|
||||
@@ -25,16 +135,22 @@ export class ObsidianUIService extends UIService {
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
showMarkdownDialog<T extends string[]>(
|
||||
title: string,
|
||||
contentMD: string,
|
||||
buttons: T,
|
||||
defaultAction?: (typeof buttons)[number]
|
||||
): Promise<(typeof buttons)[number] | false> {
|
||||
return this._plugin.confirm.askSelectStringDialogue(contentMD, buttons, {
|
||||
// TODO: implement `confirm` to this service
|
||||
return this._liveSyncPlugin.confirm.askSelectStringDialogue(contentMD, buttons, {
|
||||
title,
|
||||
defaultAction: defaultAction ?? buttons[0],
|
||||
timeout: 0,
|
||||
});
|
||||
}
|
||||
|
||||
get confirm(): Confirm {
|
||||
return this._confirmInstance;
|
||||
}
|
||||
}
|
||||
|
||||
147
test/harness/harness.ts
Normal file
147
test/harness/harness.ts
Normal file
@@ -0,0 +1,147 @@
|
||||
import { App } from "@/deps.ts";
|
||||
import ObsidianLiveSyncPlugin from "@/main";
|
||||
import { DEFAULT_SETTINGS, type ObsidianLiveSyncSettings } from "@/lib/src/common/types";
|
||||
import { LOG_LEVEL_VERBOSE, setGlobalLogFunction } from "@lib/common/logger";
|
||||
import { SettingCache } from "./obsidian-mock";
|
||||
import { delay, promiseWithResolvers } from "octagonal-wheels/promises";
|
||||
import { EVENT_LAYOUT_READY, eventHub } from "@/common/events";
|
||||
import { EVENT_PLATFORM_UNLOADED } from "@/lib/src/PlatformAPIs/base/APIBase";
|
||||
import { env } from "../suite/variables";
|
||||
|
||||
export type LiveSyncHarness = {
|
||||
app: App;
|
||||
plugin: ObsidianLiveSyncPlugin;
|
||||
dispose: () => Promise<void>;
|
||||
disposalPromise: Promise<void>;
|
||||
isDisposed: () => boolean;
|
||||
};
|
||||
const isLiveSyncLogEnabled = env?.PRINT_LIVESYNC_LOGS === "true";
|
||||
function overrideLogFunction(vaultName: string) {
|
||||
setGlobalLogFunction((msg, level, key) => {
|
||||
if (!isLiveSyncLogEnabled) {
|
||||
return;
|
||||
}
|
||||
if (level && level < LOG_LEVEL_VERBOSE) {
|
||||
return;
|
||||
}
|
||||
if (msg instanceof Error) {
|
||||
console.error(msg.stack);
|
||||
} else {
|
||||
console.log(
|
||||
`[${vaultName}] :: [${key ?? "Global"}][${level ?? 1}]: ${msg instanceof Error ? msg.stack : msg}`
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function generateHarness(
|
||||
paramVaultName?: string,
|
||||
settings?: Partial<ObsidianLiveSyncSettings>
|
||||
): Promise<LiveSyncHarness> {
|
||||
// return await serialized("harness-generation-lock", async () => {
|
||||
// Dispose previous harness to avoid multiple harness running at the same time
|
||||
// if (previousHarness && !previousHarness.isDisposed()) {
|
||||
// console.log(`Previous harness detected, waiting for disposal...`);
|
||||
// await previousHarness.disposalPromise;
|
||||
// previousHarness = null;
|
||||
// await delay(100);
|
||||
// }
|
||||
const vaultName = paramVaultName ?? "TestVault" + Date.now();
|
||||
const setting = {
|
||||
...DEFAULT_SETTINGS,
|
||||
...settings,
|
||||
};
|
||||
overrideLogFunction(vaultName);
|
||||
//@ts-ignore Mocked in harness
|
||||
const app = new App(vaultName);
|
||||
// setting and vault name
|
||||
SettingCache.set(app, setting);
|
||||
SettingCache.set(app.vault, vaultName);
|
||||
|
||||
//@ts-ignore
|
||||
const manifest_version = `${MANIFEST_VERSION || "0.0.0-harness"}`;
|
||||
overrideLogFunction(vaultName);
|
||||
const manifest = {
|
||||
id: "obsidian-livesync",
|
||||
name: "Self-hosted LiveSync (Harnessed)",
|
||||
version: manifest_version,
|
||||
minAppVersion: "0.15.0",
|
||||
description: "Testing",
|
||||
author: "vrtmrz",
|
||||
authorUrl: "",
|
||||
isDesktopOnly: false,
|
||||
};
|
||||
|
||||
const plugin = new ObsidianLiveSyncPlugin(app, manifest);
|
||||
overrideLogFunction(vaultName);
|
||||
// Initial load
|
||||
await delay(100);
|
||||
await plugin.onload();
|
||||
let isDisposed = false;
|
||||
const waitPromise = promiseWithResolvers<void>();
|
||||
eventHub.once(EVENT_PLATFORM_UNLOADED, async () => {
|
||||
console.log(`Harness for vault '${vaultName}' disposed.`);
|
||||
await delay(100);
|
||||
eventHub.offAll();
|
||||
isDisposed = true;
|
||||
waitPromise.resolve();
|
||||
});
|
||||
eventHub.once(EVENT_LAYOUT_READY, () => {
|
||||
plugin.app.vault.trigger("layout-ready");
|
||||
});
|
||||
const harness: LiveSyncHarness = {
|
||||
app,
|
||||
plugin,
|
||||
dispose: async () => {
|
||||
await plugin.onunload();
|
||||
return waitPromise.promise;
|
||||
},
|
||||
disposalPromise: waitPromise.promise,
|
||||
isDisposed: () => isDisposed,
|
||||
};
|
||||
await delay(100);
|
||||
console.log(`Harness for vault '${vaultName}' is ready.`);
|
||||
// previousHarness = harness;
|
||||
return harness;
|
||||
}
|
||||
export async function waitForReady(harness: LiveSyncHarness): Promise<void> {
|
||||
for (let i = 0; i < 10; i++) {
|
||||
if (harness.plugin.services.appLifecycle.isReady()) {
|
||||
console.log("App Lifecycle is ready");
|
||||
return;
|
||||
}
|
||||
await delay(100);
|
||||
}
|
||||
throw new Error(`Initialisation Timed out!`);
|
||||
}
|
||||
|
||||
export async function waitForIdle(harness: LiveSyncHarness): Promise<void> {
|
||||
for (let i = 0; i < 20; i++) {
|
||||
await delay(25);
|
||||
const processing =
|
||||
harness.plugin.databaseQueueCount.value +
|
||||
harness.plugin.processingFileEventCount.value +
|
||||
harness.plugin.pendingFileEventCount.value +
|
||||
harness.plugin.totalQueued.value +
|
||||
harness.plugin.batched.value +
|
||||
harness.plugin.processing.value +
|
||||
harness.plugin.storageApplyingCount.value;
|
||||
|
||||
if (processing === 0) {
|
||||
if (i > 0) {
|
||||
console.log(`Idle after ${i} loops`);
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
export async function waitForClosed(harness: LiveSyncHarness): Promise<void> {
|
||||
await delay(100);
|
||||
for (let i = 0; i < 10; i++) {
|
||||
if (harness.plugin.services.appLifecycle.hasUnloaded()) {
|
||||
console.log("App Lifecycle has unloaded");
|
||||
return;
|
||||
}
|
||||
await delay(100);
|
||||
}
|
||||
}
|
||||
995
test/harness/obsidian-mock.ts
Normal file
995
test/harness/obsidian-mock.ts
Normal file
@@ -0,0 +1,995 @@
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-function-type */
|
||||
export const SettingCache = new Map<any, any>();
|
||||
//@ts-ignore obsidian global
|
||||
globalThis.activeDocument = document;
|
||||
|
||||
declare const hostPlatform: string | undefined;
|
||||
|
||||
// import { interceptFetchForLogging } from "../harness/utils/intercept";
|
||||
// interceptFetchForLogging();
|
||||
globalThis.process = {
|
||||
platform: (hostPlatform || "win32") as any,
|
||||
} as any;
|
||||
console.warn(`[Obsidian Mock] process.platform is set to ${globalThis.process.platform}`);
|
||||
export class TAbstractFile {
|
||||
vault: Vault;
|
||||
path: string;
|
||||
name: string;
|
||||
parent: TFolder | null;
|
||||
|
||||
constructor(vault: Vault, path: string, name: string, parent: TFolder | null) {
|
||||
this.vault = vault;
|
||||
this.path = path;
|
||||
this.name = name;
|
||||
this.parent = parent;
|
||||
}
|
||||
}
|
||||
|
||||
export class TFile extends TAbstractFile {
|
||||
stat: {
|
||||
ctime: number;
|
||||
mtime: number;
|
||||
size: number;
|
||||
} = { ctime: Date.now(), mtime: Date.now(), size: 0 };
|
||||
|
||||
get extension(): string {
|
||||
return this.name.split(".").pop() || "";
|
||||
}
|
||||
|
||||
get basename(): string {
|
||||
const parts = this.name.split(".");
|
||||
if (parts.length > 1) parts.pop();
|
||||
return parts.join(".");
|
||||
}
|
||||
}
|
||||
|
||||
export class TFolder extends TAbstractFile {
|
||||
children: TAbstractFile[] = [];
|
||||
|
||||
get isRoot(): boolean {
|
||||
return this.path === "" || this.path === "/";
|
||||
}
|
||||
}
|
||||
|
||||
export class EventRef {}
|
||||
|
||||
// class StorageMap<T, U> extends Map<T, U> {
|
||||
// constructor(saveName?: string) {
|
||||
// super();
|
||||
// if (saveName) {
|
||||
// this.saveName = saveName;
|
||||
// void this.restore(saveName);
|
||||
// }
|
||||
// }
|
||||
// private saveName: string = "";
|
||||
// async restore(saveName: string) {
|
||||
// this.saveName = saveName;
|
||||
// const db = await OpenKeyValueDatabase(saveName);
|
||||
// const data = await db.get<{ [key: string]: U }>("data");
|
||||
// if (data) {
|
||||
// for (const key of Object.keys(data)) {
|
||||
// this.set(key as any as T, data[key]);
|
||||
// }
|
||||
// }
|
||||
// db.close();
|
||||
// return this;
|
||||
// }
|
||||
// saving: boolean = false;
|
||||
// async save() {
|
||||
// if (this.saveName === "") {
|
||||
// return;
|
||||
// }
|
||||
// if (this.saving) {
|
||||
// return;
|
||||
// }
|
||||
// try {
|
||||
// this.saving = true;
|
||||
|
||||
// const db = await OpenKeyValueDatabase(this.saveName);
|
||||
// const data: { [key: string]: U } = {};
|
||||
// for (const [key, value] of this.entries()) {
|
||||
// data[key as any as string] = value;
|
||||
// }
|
||||
// await db.set("data", data);
|
||||
// db.close();
|
||||
// } finally {
|
||||
// this.saving = false;
|
||||
// }
|
||||
// }
|
||||
// set(key: T, value: U): this {
|
||||
// super.set(key, value);
|
||||
// void this.save();
|
||||
// return this;
|
||||
// }
|
||||
|
||||
// }
|
||||
|
||||
export class Vault {
|
||||
adapter: DataAdapter;
|
||||
vaultName: string = "MockVault";
|
||||
private files: Map<string, TAbstractFile> = new Map();
|
||||
private contents: Map<string, string | ArrayBuffer> = new Map();
|
||||
private root: TFolder;
|
||||
private listeners: Map<string, Set<Function>> = new Map();
|
||||
|
||||
constructor(vaultName?: string) {
|
||||
if (vaultName) {
|
||||
this.vaultName = vaultName;
|
||||
}
|
||||
this.files = new Map();
|
||||
this.contents = new Map();
|
||||
this.adapter = new DataAdapter(this);
|
||||
this.root = new TFolder(this, "", "", null);
|
||||
this.files.set("", this.root);
|
||||
}
|
||||
|
||||
getAbstractFileByPath(path: string): TAbstractFile | null {
|
||||
if (path === "/") path = "";
|
||||
const file = this.files.get(path);
|
||||
return file || null;
|
||||
}
|
||||
getAbstractFileByPathInsensitive(path: string): TAbstractFile | null {
|
||||
const lowerPath = path.toLowerCase();
|
||||
for (const [p, file] of this.files.entries()) {
|
||||
if (p.toLowerCase() === lowerPath) {
|
||||
return file;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
getFiles(): TFile[] {
|
||||
return Array.from(this.files.values()).filter((f) => f instanceof TFile);
|
||||
}
|
||||
|
||||
async _adapterRead(path: string): Promise<string | null> {
|
||||
await Promise.resolve();
|
||||
const file = this.contents.get(path);
|
||||
if (typeof file === "string") {
|
||||
return file;
|
||||
}
|
||||
if (file instanceof ArrayBuffer) {
|
||||
return new TextDecoder().decode(file);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
async _adapterReadBinary(path: string): Promise<ArrayBuffer | null> {
|
||||
await Promise.resolve();
|
||||
const file = this.contents.get(path);
|
||||
if (file instanceof ArrayBuffer) {
|
||||
return file;
|
||||
}
|
||||
if (typeof file === "string") {
|
||||
return new TextEncoder().encode(file).buffer;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
async read(file: TFile): Promise<string> {
|
||||
await Promise.resolve();
|
||||
const content = this.contents.get(file.path);
|
||||
if (typeof content === "string") return content;
|
||||
if (content instanceof ArrayBuffer) {
|
||||
return new TextDecoder().decode(content);
|
||||
}
|
||||
return "";
|
||||
}
|
||||
|
||||
async readBinary(file: TFile): Promise<ArrayBuffer> {
|
||||
await Promise.resolve();
|
||||
const content = this.contents.get(file.path);
|
||||
if (content instanceof ArrayBuffer) return content;
|
||||
if (typeof content === "string") {
|
||||
return new TextEncoder().encode(content).buffer;
|
||||
}
|
||||
return new ArrayBuffer(0);
|
||||
}
|
||||
|
||||
private async _create(path: string, data: string | ArrayBuffer, options?: DataWriteOptions): Promise<TFile> {
|
||||
if (this.files.has(path)) throw new Error("File already exists");
|
||||
const name = path.split("/").pop() || "";
|
||||
const parentPath = path.includes("/") ? path.substring(0, path.lastIndexOf("/")) : "";
|
||||
let parent = this.getAbstractFileByPath(parentPath);
|
||||
if (!parent || !(parent instanceof TFolder)) {
|
||||
parent = await this.createFolder(parentPath);
|
||||
}
|
||||
|
||||
const file = new TFile(this, path, name, parent as TFolder);
|
||||
file.stat.size = typeof data === "string" ? new TextEncoder().encode(data).length : data.byteLength;
|
||||
file.stat.ctime = options?.ctime ?? Date.now();
|
||||
file.stat.mtime = options?.mtime ?? Date.now();
|
||||
this.files.set(path, file);
|
||||
this.contents.set(path, data);
|
||||
(parent as TFolder).children.push(file);
|
||||
// console.dir(this.files);
|
||||
|
||||
this.trigger("create", file);
|
||||
return file;
|
||||
}
|
||||
async create(path: string, data: string, options?: DataWriteOptions): Promise<TFile> {
|
||||
return await this._create(path, data, options);
|
||||
}
|
||||
async createBinary(path: string, data: ArrayBuffer, options?: DataWriteOptions): Promise<TFile> {
|
||||
return await this._create(path, data, options);
|
||||
}
|
||||
|
||||
async _modify(file: TFile, data: string | ArrayBuffer, options?: DataWriteOptions): Promise<void> {
|
||||
await Promise.resolve();
|
||||
this.contents.set(file.path, data);
|
||||
file.stat.mtime = options?.mtime ?? Date.now();
|
||||
file.stat.ctime = options?.ctime ?? file.stat.ctime ?? Date.now();
|
||||
file.stat.size = typeof data === "string" ? data.length : data.byteLength;
|
||||
console.warn(`[Obsidian Mock ${this.vaultName}] Modified file at path: '${file.path}'`);
|
||||
this.files.set(file.path, file);
|
||||
this.trigger("modify", file);
|
||||
}
|
||||
async modify(file: TFile, data: string, options?: DataWriteOptions): Promise<void> {
|
||||
return await this._modify(file, data, options);
|
||||
}
|
||||
async modifyBinary(file: TFile, data: ArrayBuffer, options?: DataWriteOptions): Promise<void> {
|
||||
return await this._modify(file, data, options);
|
||||
}
|
||||
|
||||
async createFolder(path: string): Promise<TFolder> {
|
||||
if (path === "") return this.root;
|
||||
if (this.files.has(path)) {
|
||||
const f = this.files.get(path);
|
||||
if (f instanceof TFolder) return f;
|
||||
throw new Error("Path is a file");
|
||||
}
|
||||
const name = path.split("/").pop() || "";
|
||||
const parentPath = path.includes("/") ? path.substring(0, path.lastIndexOf("/")) : "";
|
||||
const parent = await this.createFolder(parentPath);
|
||||
const folder = new TFolder(this, path, name, parent);
|
||||
this.files.set(path, folder);
|
||||
parent.children.push(folder);
|
||||
return folder;
|
||||
}
|
||||
|
||||
async delete(file: TAbstractFile, force?: boolean): Promise<void> {
|
||||
await Promise.resolve();
|
||||
this.files.delete(file.path);
|
||||
this.contents.delete(file.path);
|
||||
if (file.parent) {
|
||||
file.parent.children = file.parent.children.filter((c) => c !== file);
|
||||
}
|
||||
this.trigger("delete", file);
|
||||
}
|
||||
|
||||
async trash(file: TAbstractFile, system: boolean): Promise<void> {
|
||||
await Promise.resolve();
|
||||
return this.delete(file);
|
||||
}
|
||||
|
||||
on(name: string, callback: (...args: any[]) => any, ctx?: any): EventRef {
|
||||
if (!this.listeners.has(name)) {
|
||||
this.listeners.set(name, new Set());
|
||||
}
|
||||
const boundCallback = ctx ? callback.bind(ctx) : callback;
|
||||
this.listeners.get(name)!.add(boundCallback);
|
||||
return { name, callback: boundCallback } as any;
|
||||
}
|
||||
|
||||
off(name: string, callback: any) {
|
||||
this.listeners.get(name)?.delete(callback);
|
||||
}
|
||||
|
||||
offref(ref: EventRef) {
|
||||
const { name, callback } = ref as any;
|
||||
this.off(name, callback);
|
||||
}
|
||||
|
||||
trigger(name: string, ...args: any[]) {
|
||||
this.listeners.get(name)?.forEach((cb) => cb(...args));
|
||||
}
|
||||
|
||||
getName(): string {
|
||||
return this.vaultName;
|
||||
}
|
||||
}
|
||||
|
||||
export class DataAdapter {
|
||||
vault: Vault;
|
||||
constructor(vault: Vault) {
|
||||
this.vault = vault;
|
||||
}
|
||||
stat(path: string): Promise<{ ctime: number; mtime: number; size: number }> {
|
||||
const file = this.vault.getAbstractFileByPath(path);
|
||||
if (file && file instanceof TFile) {
|
||||
return Promise.resolve({
|
||||
ctime: file.stat.ctime,
|
||||
mtime: file.stat.mtime,
|
||||
size: file.stat.size,
|
||||
});
|
||||
}
|
||||
return Promise.reject(new Error("File not found"));
|
||||
}
|
||||
async list(path: string): Promise<{ files: string[]; folders: string[] }> {
|
||||
await Promise.resolve();
|
||||
const abstractFile = this.vault.getAbstractFileByPath(path);
|
||||
if (abstractFile instanceof TFolder) {
|
||||
const files: string[] = [];
|
||||
const folders: string[] = [];
|
||||
for (const child of abstractFile.children) {
|
||||
if (child instanceof TFile) files.push(child.path);
|
||||
else if (child instanceof TFolder) folders.push(child.path);
|
||||
}
|
||||
return { files, folders };
|
||||
}
|
||||
return { files: [], folders: [] };
|
||||
}
|
||||
async _write(path: string, data: string | ArrayBuffer, options?: DataWriteOptions): Promise<void> {
|
||||
const file = this.vault.getAbstractFileByPath(path);
|
||||
if (file instanceof TFile) {
|
||||
if (typeof data === "string") {
|
||||
await this.vault.modify(file, data, options);
|
||||
} else {
|
||||
await this.vault.modifyBinary(file, data, options);
|
||||
}
|
||||
} else {
|
||||
if (typeof data === "string") {
|
||||
await this.vault.create(path, data, options);
|
||||
} else {
|
||||
await this.vault.createBinary(path, data, options);
|
||||
}
|
||||
}
|
||||
}
|
||||
async write(path: string, data: string, options?: DataWriteOptions): Promise<void> {
|
||||
return await this._write(path, data, options);
|
||||
}
|
||||
async writeBinary(path: string, data: ArrayBuffer, options?: DataWriteOptions): Promise<void> {
|
||||
return await this._write(path, data, options);
|
||||
}
|
||||
|
||||
async read(path: string): Promise<string> {
|
||||
const file = this.vault.getAbstractFileByPath(path);
|
||||
if (file instanceof TFile) return await this.vault.read(file);
|
||||
throw new Error("File not found");
|
||||
}
|
||||
async readBinary(path: string): Promise<ArrayBuffer> {
|
||||
const file = this.vault.getAbstractFileByPath(path);
|
||||
if (file instanceof TFile) return await this.vault.readBinary(file);
|
||||
throw new Error("File not found");
|
||||
}
|
||||
|
||||
async exists(path: string): Promise<boolean> {
|
||||
await Promise.resolve();
|
||||
return this.vault.getAbstractFileByPath(path) !== null;
|
||||
}
|
||||
async mkdir(path: string): Promise<void> {
|
||||
await this.vault.createFolder(path);
|
||||
}
|
||||
async remove(path: string): Promise<void> {
|
||||
const file = this.vault.getAbstractFileByPath(path);
|
||||
if (file) await this.vault.delete(file);
|
||||
}
|
||||
}
|
||||
|
||||
class Events {
|
||||
_eventEmitter = new EventTarget();
|
||||
_events = new Map<any, any>();
|
||||
_eventTarget(cb: any) {
|
||||
const x = this._events.get(cb);
|
||||
if (x) {
|
||||
return x;
|
||||
}
|
||||
const callback = (evt: any) => {
|
||||
x(evt?.detail ?? undefined);
|
||||
};
|
||||
this._events.set(cb, callback);
|
||||
return callback;
|
||||
}
|
||||
on(name: string, cb: any, ctx?: any) {
|
||||
this._eventEmitter.addEventListener(name, this._eventTarget(cb));
|
||||
}
|
||||
trigger(name: string, args: any) {
|
||||
const evt = new CustomEvent(name, {
|
||||
detail: args,
|
||||
});
|
||||
this._eventEmitter.dispatchEvent(evt);
|
||||
}
|
||||
}
|
||||
|
||||
class Workspace extends Events {
|
||||
getActiveFile() {
|
||||
return null;
|
||||
}
|
||||
getMostRecentLeaf() {
|
||||
return null;
|
||||
}
|
||||
|
||||
onLayoutReady(cb: any) {
|
||||
// cb();
|
||||
// console.log("[Obsidian Mock] Workspace onLayoutReady registered");
|
||||
// this._eventEmitter.addEventListener("layout-ready", () => {
|
||||
// console.log("[Obsidian Mock] Workspace layout-ready event triggered");
|
||||
setTimeout(() => {
|
||||
cb();
|
||||
}, 200);
|
||||
// });
|
||||
}
|
||||
getLeavesOfType() {
|
||||
return [];
|
||||
}
|
||||
getLeaf() {
|
||||
return { setViewState: () => Promise.resolve(), revealLeaf: () => Promise.resolve() };
|
||||
}
|
||||
revealLeaf() {
|
||||
return Promise.resolve();
|
||||
}
|
||||
containerEl: HTMLElement = document.createElement("div");
|
||||
}
|
||||
export class App {
|
||||
vaultName: string = "MockVault";
|
||||
constructor(vaultName?: string) {
|
||||
if (vaultName) {
|
||||
this.vaultName = vaultName;
|
||||
}
|
||||
this.vault = new Vault(this.vaultName);
|
||||
}
|
||||
vault: Vault;
|
||||
workspace: Workspace = new Workspace();
|
||||
metadataCache: any = {
|
||||
on: (name: string, cb: any, ctx?: any) => {},
|
||||
getFileCache: () => null,
|
||||
};
|
||||
}
|
||||
|
||||
export class Plugin {
|
||||
app: App;
|
||||
manifest: any;
|
||||
settings: any;
|
||||
commands: Map<string, any> = new Map();
|
||||
constructor(app: App, manifest: any) {
|
||||
this.app = app;
|
||||
this.manifest = manifest;
|
||||
}
|
||||
async loadData(): Promise<any> {
|
||||
await Promise.resolve();
|
||||
return SettingCache.get(this.app) ?? {};
|
||||
}
|
||||
async saveData(data: any): Promise<void> {
|
||||
await Promise.resolve();
|
||||
SettingCache.set(this.app, data);
|
||||
}
|
||||
onload() {}
|
||||
onunload() {}
|
||||
addSettingTab(tab: any) {}
|
||||
addCommand(command: any) {
|
||||
this.commands.set(command.id, command);
|
||||
}
|
||||
addStatusBarItem() {
|
||||
return {
|
||||
setText: () => {},
|
||||
setClass: () => {},
|
||||
addClass: () => {},
|
||||
};
|
||||
}
|
||||
addRibbonIcon() {
|
||||
const icon = {
|
||||
setAttribute: () => icon,
|
||||
addClass: () => icon,
|
||||
onclick: () => {},
|
||||
};
|
||||
return icon;
|
||||
}
|
||||
registerView(type: string, creator: any) {}
|
||||
registerObsidianProtocolHandler(handler: any) {}
|
||||
registerEvent(handler: any) {}
|
||||
registerDomEvent(target: any, eventName: string, handler: any) {}
|
||||
}
|
||||
|
||||
export class Notice {
|
||||
constructor(message: string) {
|
||||
console.log("Notice:", message);
|
||||
}
|
||||
}
|
||||
|
||||
export class Modal {
|
||||
app: App;
|
||||
contentEl: HTMLElement;
|
||||
titleEl: HTMLElement;
|
||||
modalEl: HTMLElement;
|
||||
isOpen: boolean = false;
|
||||
|
||||
constructor(app: App) {
|
||||
this.app = app;
|
||||
this.contentEl = document.createElement("div");
|
||||
this.contentEl.className = "modal-content";
|
||||
this.titleEl = document.createElement("div");
|
||||
this.titleEl.className = "modal-title";
|
||||
this.modalEl = document.createElement("div");
|
||||
this.modalEl.className = "modal";
|
||||
this.modalEl.style.display = "none";
|
||||
this.modalEl.appendChild(this.titleEl);
|
||||
this.modalEl.appendChild(this.contentEl);
|
||||
}
|
||||
open() {
|
||||
this.isOpen = true;
|
||||
this.modalEl.style.display = "block";
|
||||
if (!this.modalEl.parentElement) {
|
||||
document.body.appendChild(this.modalEl);
|
||||
}
|
||||
this.onOpen();
|
||||
}
|
||||
close() {
|
||||
this.isOpen = false;
|
||||
this.modalEl.style.display = "none";
|
||||
this.onClose();
|
||||
}
|
||||
onOpen() {}
|
||||
onClose() {}
|
||||
setPlaceholder(p: string) {}
|
||||
setTitle(t: string) {
|
||||
this.titleEl.textContent = t;
|
||||
}
|
||||
}
|
||||
|
||||
export class PluginSettingTab {
|
||||
app: App;
|
||||
plugin: Plugin;
|
||||
containerEl: HTMLElement;
|
||||
constructor(app: App, plugin: Plugin) {
|
||||
this.app = app;
|
||||
this.plugin = plugin;
|
||||
this.containerEl = document.createElement("div");
|
||||
}
|
||||
display() {}
|
||||
}
|
||||
|
||||
export function normalizePath(path: string): string {
|
||||
return path.replace(/\\/g, "/").replace(/\/+$/, "");
|
||||
}
|
||||
|
||||
export const Platform = {
|
||||
isDesktop: true,
|
||||
isMobile: false,
|
||||
};
|
||||
|
||||
export class Menu {
|
||||
addItem(cb: (item: MenuItem) => any) {
|
||||
cb(new MenuItem());
|
||||
return this;
|
||||
}
|
||||
showAtMouseEvent(evt: MouseEvent) {}
|
||||
}
|
||||
export class MenuItem {
|
||||
setTitle(title: string) {
|
||||
return this;
|
||||
}
|
||||
setIcon(icon: string) {
|
||||
return this;
|
||||
}
|
||||
onClick(cb: (evt: MouseEvent) => any) {
|
||||
return this;
|
||||
}
|
||||
}
|
||||
export class MenuSeparator {}
|
||||
|
||||
export class Component {
|
||||
load() {}
|
||||
unload() {}
|
||||
}
|
||||
|
||||
export class ButtonComponent extends Component {
|
||||
buttonEl: HTMLButtonElement = document.createElement("button");
|
||||
private clickHandler: ((evt: MouseEvent) => any) | null = null;
|
||||
|
||||
constructor() {
|
||||
super();
|
||||
this.buttonEl = document.createElement("button");
|
||||
this.buttonEl.type = "button";
|
||||
}
|
||||
|
||||
setButtonText(text: string) {
|
||||
this.buttonEl.textContent = text;
|
||||
return this;
|
||||
}
|
||||
|
||||
setCta() {
|
||||
this.buttonEl.classList.add("mod-cta");
|
||||
return this;
|
||||
}
|
||||
|
||||
onClick(cb: (evt: MouseEvent) => any) {
|
||||
this.clickHandler = cb;
|
||||
this.buttonEl.removeEventListener("click", this.clickHandler);
|
||||
this.buttonEl.addEventListener("click", (evt) => cb(evt as MouseEvent));
|
||||
return this;
|
||||
}
|
||||
|
||||
setClass(c: string) {
|
||||
this.buttonEl.classList.add(c);
|
||||
return this;
|
||||
}
|
||||
|
||||
setTooltip(tooltip: string) {
|
||||
this.buttonEl.title = tooltip;
|
||||
return this;
|
||||
}
|
||||
|
||||
setDisabled(disabled: boolean) {
|
||||
this.buttonEl.disabled = disabled;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
export class TextComponent extends Component {
|
||||
inputEl: HTMLInputElement = document.createElement("input");
|
||||
private changeHandler: ((value: string) => any) | null = null;
|
||||
|
||||
constructor() {
|
||||
super();
|
||||
this.inputEl = document.createElement("input");
|
||||
this.inputEl.type = "text";
|
||||
}
|
||||
|
||||
onChange(cb: (value: string) => any) {
|
||||
this.changeHandler = cb;
|
||||
this.inputEl.removeEventListener("change", this.handleChange);
|
||||
this.inputEl.addEventListener("change", this.handleChange);
|
||||
this.inputEl.addEventListener("input", (evt) => {
|
||||
const target = evt.target as HTMLInputElement;
|
||||
cb(target.value);
|
||||
});
|
||||
return this;
|
||||
}
|
||||
|
||||
private handleChange = (evt: Event) => {
|
||||
if (this.changeHandler) {
|
||||
const target = evt.target as HTMLInputElement;
|
||||
this.changeHandler(target.value);
|
||||
}
|
||||
};
|
||||
|
||||
setValue(v: string) {
|
||||
this.inputEl.value = v;
|
||||
return this;
|
||||
}
|
||||
|
||||
setPlaceholder(p: string) {
|
||||
this.inputEl.placeholder = p;
|
||||
return this;
|
||||
}
|
||||
|
||||
setDisabled(disabled: boolean) {
|
||||
this.inputEl.disabled = disabled;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
export class ToggleComponent extends Component {
|
||||
inputEl: HTMLInputElement = document.createElement("input");
|
||||
private changeHandler: ((value: boolean) => any) | null = null;
|
||||
|
||||
constructor() {
|
||||
super();
|
||||
this.inputEl = document.createElement("input");
|
||||
this.inputEl.type = "checkbox";
|
||||
}
|
||||
|
||||
onChange(cb: (value: boolean) => any) {
|
||||
this.changeHandler = cb;
|
||||
this.inputEl.addEventListener("change", (evt) => {
|
||||
const target = evt.target as HTMLInputElement;
|
||||
cb(target.checked);
|
||||
});
|
||||
return this;
|
||||
}
|
||||
|
||||
setValue(v: boolean) {
|
||||
this.inputEl.checked = v;
|
||||
return this;
|
||||
}
|
||||
|
||||
setDisabled(disabled: boolean) {
|
||||
this.inputEl.disabled = disabled;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
export class DropdownComponent extends Component {
|
||||
selectEl: HTMLSelectElement = document.createElement("select");
|
||||
private changeHandler: ((value: string) => any) | null = null;
|
||||
|
||||
constructor() {
|
||||
super();
|
||||
this.selectEl = document.createElement("select");
|
||||
}
|
||||
|
||||
addOption(v: string, d: string) {
|
||||
const option = document.createElement("option");
|
||||
option.value = v;
|
||||
option.textContent = d;
|
||||
this.selectEl.appendChild(option);
|
||||
return this;
|
||||
}
|
||||
|
||||
addOptions(o: Record<string, string>) {
|
||||
for (const [value, display] of Object.entries(o)) {
|
||||
this.addOption(value, display);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
onChange(cb: (value: string) => any) {
|
||||
this.changeHandler = cb;
|
||||
this.selectEl.addEventListener("change", (evt) => {
|
||||
const target = evt.target as HTMLSelectElement;
|
||||
cb(target.value);
|
||||
});
|
||||
return this;
|
||||
}
|
||||
|
||||
setValue(v: string) {
|
||||
this.selectEl.value = v;
|
||||
return this;
|
||||
}
|
||||
|
||||
setDisabled(disabled: boolean) {
|
||||
this.selectEl.disabled = disabled;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
export class SliderComponent extends Component {
|
||||
inputEl: HTMLInputElement = document.createElement("input");
|
||||
private changeHandler: ((value: number) => any) | null = null;
|
||||
|
||||
constructor() {
|
||||
super();
|
||||
this.inputEl = document.createElement("input");
|
||||
this.inputEl.type = "range";
|
||||
}
|
||||
|
||||
onChange(cb: (value: number) => any) {
|
||||
this.changeHandler = cb;
|
||||
this.inputEl.addEventListener("change", (evt) => {
|
||||
const target = evt.target as HTMLInputElement;
|
||||
cb(parseFloat(target.value));
|
||||
});
|
||||
this.inputEl.addEventListener("input", (evt) => {
|
||||
const target = evt.target as HTMLInputElement;
|
||||
cb(parseFloat(target.value));
|
||||
});
|
||||
return this;
|
||||
}
|
||||
|
||||
setValue(v: number) {
|
||||
this.inputEl.value = String(v);
|
||||
return this;
|
||||
}
|
||||
|
||||
setMin(min: number) {
|
||||
this.inputEl.min = String(min);
|
||||
return this;
|
||||
}
|
||||
|
||||
setMax(max: number) {
|
||||
this.inputEl.max = String(max);
|
||||
return this;
|
||||
}
|
||||
|
||||
setStep(step: number) {
|
||||
this.inputEl.step = String(step);
|
||||
return this;
|
||||
}
|
||||
|
||||
setDisabled(disabled: boolean) {
|
||||
this.inputEl.disabled = disabled;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
export class Setting {
|
||||
nameEl: HTMLElement;
|
||||
descEl: HTMLElement;
|
||||
controlEl: HTMLElement;
|
||||
infoEl: HTMLElement;
|
||||
|
||||
constructor(containerEl: HTMLElement) {
|
||||
this.nameEl = containerEl.createDiv();
|
||||
this.descEl = containerEl.createDiv();
|
||||
this.controlEl = containerEl.createDiv();
|
||||
this.infoEl = containerEl.createDiv();
|
||||
}
|
||||
setName(name: string) {
|
||||
this.nameEl.setText(name);
|
||||
return this;
|
||||
}
|
||||
setDesc(desc: string) {
|
||||
this.descEl.setText(desc);
|
||||
return this;
|
||||
}
|
||||
setClass(c: string) {
|
||||
this.controlEl.addClass(c);
|
||||
return this;
|
||||
}
|
||||
addText(cb: (text: TextComponent) => any) {
|
||||
const component = new TextComponent();
|
||||
this.controlEl.appendChild(component.inputEl);
|
||||
cb(component);
|
||||
return this;
|
||||
}
|
||||
addToggle(cb: (toggle: ToggleComponent) => any) {
|
||||
const component = new ToggleComponent();
|
||||
cb(component);
|
||||
return this;
|
||||
}
|
||||
addButton(cb: (btn: ButtonComponent) => any) {
|
||||
const btn = new ButtonComponent();
|
||||
this.controlEl.appendChild(btn.buttonEl);
|
||||
cb(btn);
|
||||
return this;
|
||||
}
|
||||
addDropdown(cb: (dropdown: DropdownComponent) => any) {
|
||||
const component = new DropdownComponent();
|
||||
cb(component);
|
||||
return this;
|
||||
}
|
||||
addSlider(cb: (slider: SliderComponent) => any) {
|
||||
const component = new SliderComponent();
|
||||
cb(component);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
// HTMLElement extensions
|
||||
if (typeof HTMLElement !== "undefined") {
|
||||
const proto = HTMLElement.prototype as any;
|
||||
proto.createDiv = function (o?: any) {
|
||||
const div = document.createElement("div");
|
||||
if (o?.cls) div.addClass(o.cls);
|
||||
if (o?.text) div.setText(o.text);
|
||||
this.appendChild(div);
|
||||
return div;
|
||||
};
|
||||
proto.createEl = function (tag: string, o?: any) {
|
||||
const el = document.createElement(tag);
|
||||
if (o?.cls) el.addClass(o.cls);
|
||||
if (o?.text) el.setText(o.text);
|
||||
this.appendChild(el);
|
||||
return el;
|
||||
};
|
||||
proto.createSpan = function (o?: any) {
|
||||
return this.createEl("span", o);
|
||||
};
|
||||
proto.empty = function () {
|
||||
this.innerHTML = "";
|
||||
};
|
||||
proto.setText = function (t: string) {
|
||||
this.textContent = t;
|
||||
};
|
||||
proto.addClass = function (c: string) {
|
||||
this.classList.add(c);
|
||||
};
|
||||
proto.removeClass = function (c: string) {
|
||||
this.classList.remove(c);
|
||||
};
|
||||
proto.toggleClass = function (c: string, b: boolean) {
|
||||
this.classList.toggle(c, b);
|
||||
};
|
||||
proto.hasClass = function (c: string) {
|
||||
return this.classList.contains(c);
|
||||
};
|
||||
}
|
||||
|
||||
export class Editor {}
|
||||
|
||||
export class FuzzySuggestModal<T> {
|
||||
constructor(app: App) {}
|
||||
setPlaceholder(p: string) {}
|
||||
open() {}
|
||||
close() {}
|
||||
private __dummy(_: T): never {
|
||||
throw new Error("Not implemented.");
|
||||
}
|
||||
}
|
||||
export class MarkdownRenderer {
|
||||
static render(app: App, md: string, el: HTMLElement, path: string, component: Component) {
|
||||
el.innerHTML = md;
|
||||
return Promise.resolve();
|
||||
}
|
||||
}
|
||||
export class MarkdownView {}
|
||||
export class TextAreaComponent extends Component {}
|
||||
export class ItemView {}
|
||||
export class WorkspaceLeaf {}
|
||||
|
||||
export function sanitizeHTMLToDom(html: string) {
|
||||
const div = document.createElement("div");
|
||||
div.innerHTML = html;
|
||||
return div;
|
||||
}
|
||||
|
||||
export function addIcon() {}
|
||||
export const debounce = (fn: any) => fn;
|
||||
export async function request(options: any) {
|
||||
const result = await requestUrl(options);
|
||||
return result.text;
|
||||
}
|
||||
|
||||
export async function requestUrl({
|
||||
body,
|
||||
headers,
|
||||
method,
|
||||
url,
|
||||
contentType,
|
||||
}: RequestUrlParam): Promise<RequestUrlResponse> {
|
||||
// console.log("[requestUrl] Mock called:", { method, url, contentType });
|
||||
const reqHeadersObj: Record<string, string> = {};
|
||||
for (const key of Object.keys(headers || {})) {
|
||||
reqHeadersObj[key.toLowerCase()] = headers[key];
|
||||
}
|
||||
if (contentType) {
|
||||
reqHeadersObj["content-type"] = contentType;
|
||||
}
|
||||
reqHeadersObj["Cache-Control"] = "no-cache, no-store, must-revalidate";
|
||||
reqHeadersObj["Pragma"] = "no-cache";
|
||||
reqHeadersObj["Expires"] = "0";
|
||||
const result = await fetch(url, {
|
||||
method: method,
|
||||
headers: {
|
||||
...reqHeadersObj,
|
||||
},
|
||||
|
||||
body: body,
|
||||
});
|
||||
const headersObj: Record<string, string> = {};
|
||||
result.headers.forEach((value, key) => {
|
||||
headersObj[key] = value;
|
||||
});
|
||||
let json = undefined;
|
||||
let text = undefined;
|
||||
let arrayBuffer = undefined;
|
||||
try {
|
||||
const isJson = result.headers.get("content-type")?.includes("application/json");
|
||||
arrayBuffer = await result.arrayBuffer();
|
||||
const isText = result.headers.get("content-type")?.startsWith("text/");
|
||||
if (isText || isJson) {
|
||||
text = new TextDecoder().decode(arrayBuffer);
|
||||
}
|
||||
if (isJson) {
|
||||
json = await JSON.parse(text || "{}");
|
||||
}
|
||||
} catch (e) {
|
||||
console.warn("Failed to parse response:", e);
|
||||
// ignore
|
||||
}
|
||||
return {
|
||||
status: result.status,
|
||||
headers: headersObj,
|
||||
text: text,
|
||||
json: json,
|
||||
arrayBuffer: arrayBuffer,
|
||||
};
|
||||
}
|
||||
export function stringifyYaml(obj: any) {
|
||||
return JSON.stringify(obj);
|
||||
}
|
||||
export function parseYaml(s: string) {
|
||||
return JSON.parse(s);
|
||||
}
|
||||
export function getLanguage() {
|
||||
return "en";
|
||||
}
|
||||
export function setIcon(el: HTMLElement, icon: string) {}
|
||||
export function arrayBufferToBase64(buffer: ArrayBuffer): string {
|
||||
return btoa(String.fromCharCode(...new Uint8Array(buffer)));
|
||||
}
|
||||
export function base64ToArrayBuffer(base64: string): ArrayBuffer {
|
||||
return Uint8Array.from(atob(base64), (c) => c.charCodeAt(0)).buffer;
|
||||
}
|
||||
|
||||
export type DataWriteOptions = any;
|
||||
export type PluginManifest = any;
|
||||
export type RequestUrlParam = any;
|
||||
export type RequestUrlResponse = any;
|
||||
export type MarkdownFileInfo = any;
|
||||
export type ListedFiles = {
|
||||
files: string[];
|
||||
folders: string[];
|
||||
};
|
||||
|
||||
export type ValueComponent = any;
|
||||
51
test/harness/utils/intercept.ts
Normal file
51
test/harness/utils/intercept.ts
Normal file
@@ -0,0 +1,51 @@
|
||||
export function interceptFetchForLogging() {
|
||||
const originalFetch = globalThis.fetch;
|
||||
globalThis.fetch = async (...params: any[]) => {
|
||||
const paramObj = params[0];
|
||||
const initObj = params[1];
|
||||
const url = typeof paramObj === "string" ? paramObj : paramObj.url;
|
||||
const method = initObj?.method || "GET";
|
||||
const headers = initObj?.headers || {};
|
||||
const body = initObj?.body || null;
|
||||
const headersObj: Record<string, string> = {};
|
||||
if (headers instanceof Headers) {
|
||||
headers.forEach((value, key) => {
|
||||
headersObj[key] = value;
|
||||
});
|
||||
}
|
||||
console.dir({
|
||||
mockedFetch: {
|
||||
url,
|
||||
method,
|
||||
headers: headersObj,
|
||||
},
|
||||
});
|
||||
try {
|
||||
const res = await originalFetch.apply(globalThis, params as any);
|
||||
console.log(`[Obsidian Mock] Fetch response: ${res.status} ${res.statusText} for ${method} ${url}`);
|
||||
const resClone = res.clone();
|
||||
const contentType = resClone.headers.get("content-type") || "";
|
||||
const isJson = contentType.includes("application/json");
|
||||
if (isJson) {
|
||||
const data = await resClone.json();
|
||||
console.dir({ mockedFetchResponseJson: data });
|
||||
} else {
|
||||
const ab = await resClone.arrayBuffer();
|
||||
const text = new TextDecoder().decode(ab);
|
||||
const isText = /^text\//.test(contentType);
|
||||
if (isText) {
|
||||
console.dir({
|
||||
mockedFetchResponseText: ab.byteLength < 1000 ? text : text.slice(0, 1000) + "...(truncated)",
|
||||
});
|
||||
} else {
|
||||
console.log(`[Obsidian Mock] Fetch response is of content-type ${contentType}, not logging body.`);
|
||||
}
|
||||
}
|
||||
return res;
|
||||
} catch (e) {
|
||||
// console.error("[Obsidian Mock] Fetch error:", e);
|
||||
console.error(`[Obsidian Mock] Fetch failed for ${method} ${url}, error:`, e);
|
||||
throw e;
|
||||
}
|
||||
};
|
||||
}
|
||||
145
test/lib/commands.ts
Normal file
145
test/lib/commands.ts
Normal file
@@ -0,0 +1,145 @@
|
||||
import type { P2PSyncSetting } from "@/lib/src/common/types";
|
||||
import { delay } from "octagonal-wheels/promises";
|
||||
import type { BrowserContext, Page } from "playwright";
|
||||
import type { Plugin } from "vitest/config";
|
||||
import type { BrowserCommand } from "vitest/node";
|
||||
import { serialized } from "octagonal-wheels/concurrency/lock";
|
||||
export const grantClipboardPermissions: BrowserCommand = async (ctx) => {
|
||||
if (ctx.provider.name === "playwright") {
|
||||
await ctx.context.grantPermissions(["clipboard-read", "clipboard-write"]);
|
||||
console.log("Granted clipboard permissions");
|
||||
return;
|
||||
}
|
||||
};
|
||||
let peerPage: Page | undefined;
|
||||
let peerPageContext: BrowserContext | undefined;
|
||||
let previousName = "";
|
||||
async function setValue(page: Page, selector: string, value: string) {
|
||||
const e = await page.waitForSelector(selector);
|
||||
await e.fill(value);
|
||||
}
|
||||
async function closePeerContexts() {
|
||||
const peerPageLocal = peerPage;
|
||||
const peerPageContextLocal = peerPageContext;
|
||||
if (peerPageLocal) {
|
||||
await peerPageLocal.close();
|
||||
}
|
||||
if (peerPageContextLocal) {
|
||||
await peerPageContextLocal.close();
|
||||
}
|
||||
}
|
||||
export const openWebPeer: BrowserCommand<[P2PSyncSetting, serverPeerName: string]> = async (
|
||||
ctx,
|
||||
setting: P2PSyncSetting,
|
||||
serverPeerName: string = "p2p-livesync-web-peer"
|
||||
) => {
|
||||
if (ctx.provider.name === "playwright") {
|
||||
const previousPage = ctx.page;
|
||||
if (peerPage !== undefined) {
|
||||
if (previousName === serverPeerName) {
|
||||
console.log(`WebPeer for ${serverPeerName} already opened`);
|
||||
return;
|
||||
}
|
||||
console.log(`Closing previous WebPeer for ${previousName}`);
|
||||
await closePeerContexts();
|
||||
}
|
||||
console.log(`Opening webPeer`);
|
||||
return serialized("webpeer", async () => {
|
||||
const browser = ctx.context.browser()!;
|
||||
const context = await browser.newContext();
|
||||
peerPageContext = context;
|
||||
peerPage = await context.newPage();
|
||||
previousName = serverPeerName;
|
||||
console.log(`Navigating...`);
|
||||
await peerPage.goto("http://localhost:8081");
|
||||
await peerPage.waitForLoadState();
|
||||
console.log(`Navigated!`);
|
||||
await setValue(peerPage, "#app > main [placeholder*=wss]", setting.P2P_relays);
|
||||
await setValue(peerPage, "#app > main [placeholder*=anything]", setting.P2P_roomID);
|
||||
await setValue(peerPage, "#app > main [placeholder*=password]", setting.P2P_passphrase);
|
||||
await setValue(peerPage, "#app > main [placeholder*=iphone]", serverPeerName);
|
||||
// await peerPage.getByTitle("Enable P2P Replicator").setChecked(true);
|
||||
await peerPage.getByRole("checkbox").first().setChecked(true);
|
||||
// (await peerPage.waitForSelector("Save and Apply")).click();
|
||||
await peerPage.getByText("Save and Apply").click();
|
||||
await delay(100);
|
||||
await peerPage.reload();
|
||||
await delay(500);
|
||||
for (let i = 0; i < 10; i++) {
|
||||
await delay(100);
|
||||
const btn = peerPage.getByRole("button").filter({ hasText: /^connect/i });
|
||||
if ((await peerPage.getByText(/disconnect/i).count()) > 0) {
|
||||
break;
|
||||
}
|
||||
await btn.click();
|
||||
}
|
||||
await previousPage.bringToFront();
|
||||
ctx.context.on("close", async () => {
|
||||
console.log("Browser context is closing, closing peer page if exists");
|
||||
await closePeerContexts();
|
||||
});
|
||||
console.log("Web peer page opened");
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export const closeWebPeer: BrowserCommand = async (ctx) => {
|
||||
if (ctx.provider.name === "playwright") {
|
||||
return serialized("webpeer", async () => {
|
||||
await closePeerContexts();
|
||||
peerPage = undefined;
|
||||
peerPageContext = undefined;
|
||||
previousName = "";
|
||||
console.log("Web peer page closed");
|
||||
});
|
||||
}
|
||||
};
|
||||
export const acceptWebPeer: BrowserCommand = async (ctx) => {
|
||||
if (peerPage) {
|
||||
// Detect dialogue
|
||||
const buttonsOnDialogs = await peerPage.$$("popup .buttons button");
|
||||
for (const b of buttonsOnDialogs) {
|
||||
const text = (await b.innerText()).toLowerCase();
|
||||
// console.log(`Dialog button found: ${text}`);
|
||||
if (text === "accept") {
|
||||
console.log("Accepting dialog");
|
||||
await b.click({ timeout: 300 });
|
||||
await delay(500);
|
||||
}
|
||||
}
|
||||
const buttons = peerPage.getByRole("button").filter({ hasText: /^accept$/i });
|
||||
const a = await buttons.all();
|
||||
for (const b of a) {
|
||||
await b.click({ timeout: 300 });
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
export default function BrowserCommands(): Plugin {
|
||||
return {
|
||||
name: "vitest:custom-commands",
|
||||
config() {
|
||||
return {
|
||||
test: {
|
||||
browser: {
|
||||
commands: {
|
||||
grantClipboardPermissions,
|
||||
openWebPeer,
|
||||
closeWebPeer,
|
||||
acceptWebPeer,
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
},
|
||||
};
|
||||
}
|
||||
declare module "vitest/browser" {
|
||||
interface BrowserCommands {
|
||||
grantClipboardPermissions: () => Promise<void>;
|
||||
openWebPeer: (setting: P2PSyncSetting, serverPeerName: string) => Promise<void>;
|
||||
closeWebPeer: () => Promise<void>;
|
||||
acceptWebPeer: () => Promise<boolean>;
|
||||
}
|
||||
}
|
||||
70
test/lib/ui.ts
Normal file
70
test/lib/ui.ts
Normal file
@@ -0,0 +1,70 @@
|
||||
import { page } from "vitest/browser";
|
||||
import { delay } from "@/lib/src/common/utils";
|
||||
|
||||
export async function waitForDialogShown(dialogText: string, timeout = 500) {
|
||||
const ttl = Date.now() + timeout;
|
||||
while (Date.now() < ttl) {
|
||||
try {
|
||||
await delay(50);
|
||||
const dialog = page
|
||||
.getByText(dialogText)
|
||||
.elements()
|
||||
.filter((e) => e.classList.contains("modal-title"))
|
||||
.filter((e) => e.checkVisibility());
|
||||
if (dialog.length === 0) {
|
||||
continue;
|
||||
}
|
||||
return true;
|
||||
} catch (e) {
|
||||
// Ignore
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
export async function waitForDialogHidden(dialogText: string | RegExp, timeout = 500) {
|
||||
const ttl = Date.now() + timeout;
|
||||
while (Date.now() < ttl) {
|
||||
try {
|
||||
await delay(50);
|
||||
const dialog = page
|
||||
.getByText(dialogText)
|
||||
.elements()
|
||||
.filter((e) => e.classList.contains("modal-title"))
|
||||
.filter((e) => e.checkVisibility());
|
||||
if (dialog.length > 0) {
|
||||
// console.log(`Still exist ${dialogText.toString()}`);
|
||||
continue;
|
||||
}
|
||||
return true;
|
||||
} catch (e) {
|
||||
// Ignore
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
export async function waitForButtonClick(buttonText: string | RegExp, timeout = 500) {
|
||||
const ttl = Date.now() + timeout;
|
||||
while (Date.now() < ttl) {
|
||||
try {
|
||||
await delay(100);
|
||||
const buttons = page
|
||||
.getByText(buttonText)
|
||||
.elements()
|
||||
.filter((e) => e.checkVisibility() && e.tagName.toLowerCase() == "button");
|
||||
if (buttons.length == 0) {
|
||||
// console.log(`Could not found ${buttonText.toString()}`);
|
||||
continue;
|
||||
}
|
||||
console.log(`Button detected: ${buttonText.toString()}`);
|
||||
// console.dir(buttons[0])
|
||||
await page.elementLocator(buttons[0]).click();
|
||||
await delay(100);
|
||||
return true;
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
// Ignore
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
21
test/lib/util.ts
Normal file
21
test/lib/util.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
import { delay } from "@/lib/src/common/utils";
|
||||
|
||||
export async function waitTaskWithFollowups<T>(
|
||||
task: Promise<T>,
|
||||
followup: () => Promise<void>,
|
||||
timeout: number = 10000,
|
||||
interval: number = 1000
|
||||
): Promise<T> {
|
||||
const symbolNotCompleted = Symbol("notCompleted");
|
||||
const isCompleted = () => Promise.race([task, Promise.resolve(symbolNotCompleted)]);
|
||||
const ttl = Date.now() + timeout;
|
||||
do {
|
||||
const state = await isCompleted();
|
||||
if (state !== symbolNotCompleted) {
|
||||
return state;
|
||||
}
|
||||
await followup();
|
||||
await delay(interval);
|
||||
} while (Date.now() < ttl);
|
||||
throw new Error("Task did not complete in time");
|
||||
}
|
||||
33
test/shell/couchdb-init.sh
Executable file
33
test/shell/couchdb-init.sh
Executable file
@@ -0,0 +1,33 @@
|
||||
#!/bin/bash
|
||||
if [[ -z "$hostname" ]]; then
|
||||
echo "ERROR: Hostname missing"
|
||||
exit 1
|
||||
fi
|
||||
if [[ -z "$username" ]]; then
|
||||
echo "ERROR: Username missing"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ -z "$password" ]]; then
|
||||
echo "ERROR: Password missing"
|
||||
exit 1
|
||||
fi
|
||||
if [[ -z "$node" ]]; then
|
||||
echo "INFO: defaulting to _local"
|
||||
node=_local
|
||||
fi
|
||||
|
||||
echo "-- Configuring CouchDB by REST APIs... -->"
|
||||
|
||||
until (curl -X POST "${hostname}/_cluster_setup" -H "Content-Type: application/json" -d "{\"action\":\"enable_single_node\",\"username\":\"${username}\",\"password\":\"${password}\",\"bind_address\":\"0.0.0.0\",\"port\":5984,\"singlenode\":true}" --user "${username}:${password}"); do sleep 5; done
|
||||
until (curl -X PUT "${hostname}/_node/${node}/_config/chttpd/require_valid_user" -H "Content-Type: application/json" -d '"true"' --user "${username}:${password}"); do sleep 5; done
|
||||
until (curl -X PUT "${hostname}/_node/${node}/_config/chttpd_auth/require_valid_user" -H "Content-Type: application/json" -d '"true"' --user "${username}:${password}"); do sleep 5; done
|
||||
until (curl -X PUT "${hostname}/_node/${node}/_config/httpd/WWW-Authenticate" -H "Content-Type: application/json" -d '"Basic realm=\"couchdb\""' --user "${username}:${password}"); do sleep 5; done
|
||||
until (curl -X PUT "${hostname}/_node/${node}/_config/httpd/enable_cors" -H "Content-Type: application/json" -d '"true"' --user "${username}:${password}"); do sleep 5; done
|
||||
until (curl -X PUT "${hostname}/_node/${node}/_config/chttpd/enable_cors" -H "Content-Type: application/json" -d '"true"' --user "${username}:${password}"); do sleep 5; done
|
||||
until (curl -X PUT "${hostname}/_node/${node}/_config/chttpd/max_http_request_size" -H "Content-Type: application/json" -d '"4294967296"' --user "${username}:${password}"); do sleep 5; done
|
||||
until (curl -X PUT "${hostname}/_node/${node}/_config/couchdb/max_document_size" -H "Content-Type: application/json" -d '"50000000"' --user "${username}:${password}"); do sleep 5; done
|
||||
until (curl -X PUT "${hostname}/_node/${node}/_config/cors/credentials" -H "Content-Type: application/json" -d '"true"' --user "${username}:${password}"); do sleep 5; done
|
||||
until (curl -X PUT "${hostname}/_node/${node}/_config/cors/origins" -H "Content-Type: application/json" -d '"*"' --user "${username}:${password}"); do sleep 5; done
|
||||
|
||||
echo "<-- Configuring CouchDB by REST APIs Done!"
|
||||
3
test/shell/couchdb-start.sh
Executable file
3
test/shell/couchdb-start.sh
Executable file
@@ -0,0 +1,3 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
docker run -d --name couchdb-test -p 5989:5984 -e COUCHDB_USER=$username -e COUCHDB_PASSWORD=$password couchdb:3.5.0
|
||||
3
test/shell/couchdb-stop.sh
Executable file
3
test/shell/couchdb-stop.sh
Executable file
@@ -0,0 +1,3 @@
|
||||
#!/bin/bash
|
||||
docker stop couchdb-test
|
||||
docker rm couchdb-test
|
||||
47
test/shell/minio-init.sh
Executable file
47
test/shell/minio-init.sh
Executable file
@@ -0,0 +1,47 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
cat >/tmp/mybucket-rw.json <<EOF
|
||||
{
|
||||
"Version": "2012-10-17",
|
||||
"Statement": [
|
||||
{
|
||||
"Effect": "Allow",
|
||||
"Action": ["s3:GetBucketLocation","s3:ListBucket"],
|
||||
"Resource": ["arn:aws:s3:::$bucketName"]
|
||||
},
|
||||
{
|
||||
"Effect": "Allow",
|
||||
"Action": ["s3:GetObject","s3:PutObject","s3:DeleteObject"],
|
||||
"Resource": ["arn:aws:s3:::$bucketName/*"]
|
||||
}
|
||||
]
|
||||
}
|
||||
EOF
|
||||
# echo "<CORSConfiguration>
|
||||
# <CORSRule>
|
||||
# <AllowedOrigin>http://localhost:63315</AllowedOrigin>
|
||||
# <AllowedOrigin>http://localhost:63316</AllowedOrigin>
|
||||
# <AllowedOrigin>http://localhost</AllowedOrigin>
|
||||
# <AllowedMethod>GET</AllowedMethod>
|
||||
# <AllowedMethod>PUT</AllowedMethod>
|
||||
# <AllowedMethod>POST</AllowedMethod>
|
||||
# <AllowedMethod>DELETE</AllowedMethod>
|
||||
# <AllowedMethod>HEAD</AllowedMethod>
|
||||
# <AllowedHeader>*</AllowedHeader>
|
||||
# </CORSRule>
|
||||
# </CORSConfiguration>" > /tmp/cors.xml
|
||||
# docker run --rm --network host -v /tmp/mybucket-rw.json:/tmp/mybucket-rw.json --entrypoint=/bin/sh minio/mc -c "
|
||||
# mc alias set myminio $minioEndpoint $username $password
|
||||
# mc mb --ignore-existing myminio/$bucketName
|
||||
# mc admin policy create myminio my-custom-policy /tmp/mybucket-rw.json
|
||||
# echo 'Creating service account for user $username with access key $accessKey'
|
||||
# mc admin user svcacct add --access-key '$accessKey' --secret-key '$secretKey' myminio '$username'
|
||||
# mc admin policy attach myminio my-custom-policy --user '$accessKey'
|
||||
# echo 'Verifying policy and user creation:'
|
||||
# mc admin user svcacct info myminio '$accessKey'
|
||||
# "
|
||||
|
||||
docker run --rm --network host -v /tmp/mybucket-rw.json:/tmp/mybucket-rw.json --entrypoint=/bin/sh minio/mc -c "
|
||||
mc alias set myminio $minioEndpoint $accessKey $secretKey
|
||||
mc mb --ignore-existing myminio/$bucketName
|
||||
"
|
||||
2
test/shell/minio-start.sh
Executable file
2
test/shell/minio-start.sh
Executable file
@@ -0,0 +1,2 @@
|
||||
#!/bin/bash
|
||||
docker run -d --name minio-test -p 9000:9000 -p 9001:9001 -e MINIO_ROOT_USER=$accessKey -e MINIO_ROOT_PASSWORD=$secretKey -e MINIO_SERVER_URL=$minioEndpoint minio/minio server /data --console-address ':9001'
|
||||
3
test/shell/minio-stop.sh
Executable file
3
test/shell/minio-stop.sh
Executable file
@@ -0,0 +1,3 @@
|
||||
#!/bin/bash
|
||||
docker stop minio-test
|
||||
docker rm minio-test
|
||||
2
test/shell/p2p-init.sh
Executable file
2
test/shell/p2p-init.sh
Executable file
@@ -0,0 +1,2 @@
|
||||
#!/bin/bash
|
||||
echo "P2P Init - No additional initialization required."
|
||||
8
test/shell/p2p-start.sh
Executable file
8
test/shell/p2p-start.sh
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
script_dir=$(dirname "$0")
|
||||
webpeer_dir=$script_dir/../../src/lib/apps/webpeer
|
||||
|
||||
docker run -d --name relay-test -p 4000:8080 scsibug/nostr-rs-relay:latest
|
||||
npm run --prefix $webpeer_dir build
|
||||
docker run -d --name webpeer-test -p 8081:8043 -v $webpeer_dir/dist:/srv/http pierrezemb/gostatic
|
||||
5
test/shell/p2p-stop.sh
Executable file
5
test/shell/p2p-stop.sh
Executable file
@@ -0,0 +1,5 @@
|
||||
#!/bin/bash
|
||||
docker stop relay-test
|
||||
docker rm relay-test
|
||||
docker stop webpeer-test
|
||||
docker rm webpeer-test
|
||||
129
test/suite/db_common.ts
Normal file
129
test/suite/db_common.ts
Normal file
@@ -0,0 +1,129 @@
|
||||
import { compareMTime, EVEN } from "@/common/utils";
|
||||
import { TFile, type DataWriteOptions } from "@/deps";
|
||||
import type { FilePath } from "@/lib/src/common/types";
|
||||
import { isDocContentSame, readContent } from "@/lib/src/common/utils";
|
||||
import { waitForIdle, type LiveSyncHarness } from "../harness/harness";
|
||||
import { expect } from "vitest";
|
||||
|
||||
export const defaultFileOption = {
|
||||
mtime: new Date(2026, 0, 1, 0, 1, 2, 3).getTime(),
|
||||
} as const satisfies DataWriteOptions;
|
||||
export async function storeFile(
|
||||
harness: LiveSyncHarness,
|
||||
path: string,
|
||||
content: string | Blob,
|
||||
deleteBeforeSend = false,
|
||||
fileOptions = defaultFileOption
|
||||
) {
|
||||
if (deleteBeforeSend && harness.app.vault.getAbstractFileByPath(path)) {
|
||||
console.log(`Deleting existing file ${path}`);
|
||||
await harness.app.vault.delete(harness.app.vault.getAbstractFileByPath(path) as TFile);
|
||||
}
|
||||
// Create file via vault
|
||||
if (content instanceof Blob) {
|
||||
console.log(`Creating binary file ${path}`);
|
||||
await harness.app.vault.createBinary(path, await content.arrayBuffer(), fileOptions);
|
||||
} else {
|
||||
await harness.app.vault.create(path, content, fileOptions);
|
||||
}
|
||||
|
||||
// Ensure file is created
|
||||
const file = harness.app.vault.getAbstractFileByPath(path);
|
||||
expect(file).toBeInstanceOf(TFile);
|
||||
if (file instanceof TFile) {
|
||||
expect(compareMTime(file.stat.mtime, fileOptions?.mtime ?? defaultFileOption.mtime)).toBe(EVEN);
|
||||
if (content instanceof Blob) {
|
||||
const readContent = await harness.app.vault.readBinary(file);
|
||||
expect(await isDocContentSame(readContent, content)).toBe(true);
|
||||
} else {
|
||||
const readContent = await harness.app.vault.read(file);
|
||||
expect(readContent).toBe(content);
|
||||
}
|
||||
}
|
||||
await harness.plugin.services.fileProcessing.commitPendingFileEvents();
|
||||
await waitForIdle(harness);
|
||||
return file;
|
||||
}
|
||||
export async function readFromLocalDB(harness: LiveSyncHarness, path: string) {
|
||||
const entry = await harness.plugin.localDatabase.getDBEntry(path as FilePath);
|
||||
expect(entry).not.toBe(false);
|
||||
return entry;
|
||||
}
|
||||
export async function readFromVault(
|
||||
harness: LiveSyncHarness,
|
||||
path: string,
|
||||
isBinary: boolean = false,
|
||||
fileOptions = defaultFileOption
|
||||
): Promise<string | ArrayBuffer> {
|
||||
const file = harness.app.vault.getAbstractFileByPath(path);
|
||||
expect(file).toBeInstanceOf(TFile);
|
||||
if (file instanceof TFile) {
|
||||
// console.log(`MTime: ${file.stat.mtime}, Expected: ${fileOptions.mtime}`);
|
||||
if (fileOptions.mtime !== undefined) {
|
||||
expect(compareMTime(file.stat.mtime, fileOptions.mtime)).toBe(EVEN);
|
||||
}
|
||||
const content = isBinary ? await harness.app.vault.readBinary(file) : await harness.app.vault.read(file);
|
||||
return content;
|
||||
}
|
||||
|
||||
throw new Error("File not found in vault");
|
||||
}
|
||||
export async function checkStoredFileInDB(
|
||||
harness: LiveSyncHarness,
|
||||
path: string,
|
||||
content: string | Blob,
|
||||
fileOptions = defaultFileOption
|
||||
) {
|
||||
const entry = await readFromLocalDB(harness, path);
|
||||
if (entry === false) {
|
||||
throw new Error("DB Content not found");
|
||||
}
|
||||
const contentToCheck = content instanceof Blob ? await content.arrayBuffer() : content;
|
||||
const isDocSame = await isDocContentSame(readContent(entry), contentToCheck);
|
||||
if (fileOptions.mtime !== undefined) {
|
||||
expect(compareMTime(entry.mtime, fileOptions.mtime)).toBe(EVEN);
|
||||
}
|
||||
expect(isDocSame).toBe(true);
|
||||
return Promise.resolve();
|
||||
}
|
||||
export async function testFileWrite(
|
||||
harness: LiveSyncHarness,
|
||||
path: string,
|
||||
content: string | Blob,
|
||||
skipCheckToBeWritten = false,
|
||||
fileOptions = defaultFileOption
|
||||
) {
|
||||
const file = await storeFile(harness, path, content, false, fileOptions);
|
||||
expect(file).toBeInstanceOf(TFile);
|
||||
await harness.plugin.services.fileProcessing.commitPendingFileEvents();
|
||||
await waitForIdle(harness);
|
||||
const vaultFile = await readFromVault(harness, path, content instanceof Blob, fileOptions);
|
||||
expect(await isDocContentSame(vaultFile, content)).toBe(true);
|
||||
await harness.plugin.services.fileProcessing.commitPendingFileEvents();
|
||||
await waitForIdle(harness);
|
||||
if (skipCheckToBeWritten) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
await checkStoredFileInDB(harness, path, content);
|
||||
return Promise.resolve();
|
||||
}
|
||||
export async function testFileRead(
|
||||
harness: LiveSyncHarness,
|
||||
path: string,
|
||||
expectedContent: string | Blob,
|
||||
fileOptions = defaultFileOption
|
||||
) {
|
||||
await waitForIdle(harness);
|
||||
const file = await readFromVault(harness, path, expectedContent instanceof Blob, fileOptions);
|
||||
const isDocSame = await isDocContentSame(file, expectedContent);
|
||||
expect(isDocSame).toBe(true);
|
||||
// Check local database entry
|
||||
const entry = await readFromLocalDB(harness, path);
|
||||
expect(entry).not.toBe(false);
|
||||
if (entry === false) {
|
||||
throw new Error("DB Content not found");
|
||||
}
|
||||
const isDBDocSame = await isDocContentSame(readContent(entry), expectedContent);
|
||||
expect(isDBDocSame).toBe(true);
|
||||
return await Promise.resolve();
|
||||
}
|
||||
125
test/suite/onlylocaldb.test.ts
Normal file
125
test/suite/onlylocaldb.test.ts
Normal file
@@ -0,0 +1,125 @@
|
||||
import { beforeAll, describe, expect, it, test } from "vitest";
|
||||
import { generateHarness, waitForIdle, waitForReady, type LiveSyncHarness } from "../harness/harness";
|
||||
import { TFile } from "@/deps.ts";
|
||||
import { DEFAULT_SETTINGS, type FilePath, type ObsidianLiveSyncSettings } from "@/lib/src/common/types";
|
||||
import { isDocContentSame, readContent } from "@/lib/src/common/utils";
|
||||
import { DummyFileSourceInisialised, generateBinaryFile, generateFile, init } from "../utils/dummyfile";
|
||||
|
||||
const localdb_test_setting = {
|
||||
...DEFAULT_SETTINGS,
|
||||
isConfigured: true,
|
||||
handleFilenameCaseSensitive: false,
|
||||
} as ObsidianLiveSyncSettings;
|
||||
|
||||
describe.skip("Plugin Integration Test (Local Database)", async () => {
|
||||
let harness: LiveSyncHarness;
|
||||
const vaultName = "TestVault" + Date.now();
|
||||
|
||||
beforeAll(async () => {
|
||||
await DummyFileSourceInisialised;
|
||||
harness = await generateHarness(vaultName, localdb_test_setting);
|
||||
await waitForReady(harness);
|
||||
});
|
||||
|
||||
it("should be instantiated and defined", async () => {
|
||||
expect(harness.plugin).toBeDefined();
|
||||
expect(harness.plugin.app).toBe(harness.app);
|
||||
return await Promise.resolve();
|
||||
});
|
||||
|
||||
it("should have services initialized", async () => {
|
||||
expect(harness.plugin.services).toBeDefined();
|
||||
return await Promise.resolve();
|
||||
});
|
||||
it("should have local database initialized", async () => {
|
||||
expect(harness.plugin.localDatabase).toBeDefined();
|
||||
expect(harness.plugin.localDatabase.isReady).toBe(true);
|
||||
return await Promise.resolve();
|
||||
});
|
||||
|
||||
it("should store the changes into the local database", async () => {
|
||||
const path = "test-store6.md";
|
||||
const content = "Hello, World!";
|
||||
if (harness.app.vault.getAbstractFileByPath(path)) {
|
||||
console.log(`Deleting existing file ${path}`);
|
||||
await harness.app.vault.delete(harness.app.vault.getAbstractFileByPath(path) as TFile);
|
||||
}
|
||||
// Create file via vault
|
||||
await harness.app.vault.create(path, content);
|
||||
|
||||
const file = harness.app.vault.getAbstractFileByPath(path);
|
||||
expect(file).toBeInstanceOf(TFile);
|
||||
|
||||
if (file instanceof TFile) {
|
||||
const readContent = await harness.app.vault.read(file);
|
||||
expect(readContent).toBe(content);
|
||||
}
|
||||
await harness.plugin.services.fileProcessing.commitPendingFileEvents();
|
||||
await waitForIdle(harness);
|
||||
// await delay(100); // Wait a bit for the local database to process
|
||||
|
||||
const entry = await harness.plugin.localDatabase.getDBEntry(path as FilePath);
|
||||
expect(entry).not.toBe(false);
|
||||
if (entry) {
|
||||
expect(readContent(entry)).toBe(content);
|
||||
}
|
||||
return await Promise.resolve();
|
||||
});
|
||||
test.each([10, 100, 1000, 10000, 50000, 100000])("should handle large file of size %i bytes", async (size) => {
|
||||
const path = `test-large-file-${size}.md`;
|
||||
const content = Array.from(generateFile(size)).join("");
|
||||
if (harness.app.vault.getAbstractFileByPath(path)) {
|
||||
console.log(`Deleting existing file ${path}`);
|
||||
await harness.app.vault.delete(harness.app.vault.getAbstractFileByPath(path) as TFile);
|
||||
}
|
||||
// Create file via vault
|
||||
await harness.app.vault.create(path, content);
|
||||
const file = harness.app.vault.getAbstractFileByPath(path);
|
||||
expect(file).toBeInstanceOf(TFile);
|
||||
if (file instanceof TFile) {
|
||||
const readContent = await harness.app.vault.read(file);
|
||||
expect(readContent).toBe(content);
|
||||
}
|
||||
await harness.plugin.services.fileProcessing.commitPendingFileEvents();
|
||||
await waitForIdle(harness);
|
||||
|
||||
const entry = await harness.plugin.localDatabase.getDBEntry(path as FilePath);
|
||||
expect(entry).not.toBe(false);
|
||||
if (entry) {
|
||||
expect(readContent(entry)).toBe(content);
|
||||
}
|
||||
return await Promise.resolve();
|
||||
});
|
||||
|
||||
const binaryMap = Array.from({ length: 7 }, (_, i) => Math.pow(2, i * 4));
|
||||
test.each(binaryMap)("should handle binary file of size %i bytes", async (size) => {
|
||||
const path = `test-binary-file-${size}.bin`;
|
||||
const content = new Blob([...generateBinaryFile(size)], { type: "application/octet-stream" });
|
||||
if (harness.app.vault.getAbstractFileByPath(path)) {
|
||||
console.log(`Deleting existing file ${path}`);
|
||||
await harness.app.vault.delete(harness.app.vault.getAbstractFileByPath(path) as TFile);
|
||||
}
|
||||
// Create file via vault
|
||||
await harness.app.vault.createBinary(path, await content.arrayBuffer());
|
||||
const file = harness.app.vault.getAbstractFileByPath(path);
|
||||
expect(file).toBeInstanceOf(TFile);
|
||||
if (file instanceof TFile) {
|
||||
const readContent = await harness.app.vault.readBinary(file);
|
||||
expect(await isDocContentSame(readContent, content)).toBe(true);
|
||||
}
|
||||
|
||||
await harness.plugin.services.fileProcessing.commitPendingFileEvents();
|
||||
await waitForIdle(harness);
|
||||
const entry = await harness.plugin.localDatabase.getDBEntry(path as FilePath);
|
||||
expect(entry).not.toBe(false);
|
||||
if (entry) {
|
||||
const entryContent = await readContent(entry);
|
||||
if (!(entryContent instanceof ArrayBuffer)) {
|
||||
throw new Error("Entry content is not an ArrayBuffer");
|
||||
}
|
||||
// const expectedContent = await content.arrayBuffer();
|
||||
expect(await isDocContentSame(entryContent, content)).toBe(true);
|
||||
}
|
||||
return await Promise.resolve();
|
||||
});
|
||||
});
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user