Compare commits

...

44 Commits

Author SHA1 Message Date
vorotamoroz
4f987e7c2b ### Fixed
- Hidden file synchronisation now works!
- Now Hidden file synchronisation respects `.ignore` files.
- Replicator initialisation during rebuilding now works correctly.

### Refactored

- Some methods naming have been changed for better clarity, i.e., `_isTargetFileByLocalDB` is now `_isTargetAcceptedByLocalDB`.
2026-02-21 14:05:32 +09:00
vorotamoroz
556ce471f8 ## 0.25.43-patched-8 2026-02-20 14:28:28 +00:00
vorotamoroz
32b6717114 keep a note 2026-02-19 10:38:42 +00:00
vorotamoroz
e0e72fae72 Fixed: saving device name 2026-02-19 10:37:19 +00:00
vorotamoroz
203dd17421 for 0.25.43-patched-7, please refer to the updates.md 2026-02-19 10:23:45 +00:00
vorotamoroz
1bde2b2ff1 Fixed an issue where the StorageEventManager
Build by Vite is now testing
2026-02-19 04:18:18 +00:00
vorotamoroz
2bf1c775ee ## 0.25.43-patched-6
### Fixed

- Unlocking the remote database after rebuilding has been fixed.

### Refactored
- Now `StorageEventManagerBase` is separated from `StorageEventManagerObsidian` following their concerns.
- Now `FileAccessBase` is separated from `FileAccessObsidian` following their concerns.
2026-02-18 12:13:05 +00:00
vorotamoroz
4658e3735d Fix Shim 2026-02-17 10:56:05 +00:00
vorotamoroz
627edc96bf bump for beta 2026-02-17 10:14:13 +00:00
vorotamoroz
0a1917e83c Refactor for 0.25.43-patched-5 (very long, please refer the updates.md) 2026-02-17 10:14:04 +00:00
vorotamoroz
3201399bdf bump 2026-02-16 11:51:09 +00:00
vorotamoroz
2ae70e8f07 Refactor: DatabaseService and Replicator 2026-02-16 11:51:03 +00:00
vorotamoroz
2b9bb1ed06 beta bump 2026-02-16 06:51:52 +00:00
vorotamoroz
e63e3e6725 ### Refactor
- Module dependency refined. (For details, please refer to updates.md)
2026-02-16 06:50:31 +00:00
vorotamoroz
6e9ac6a9f9 - Application LifeCycle has now started in Main, not ServiceHub. 2026-02-14 15:21:00 +09:00
vorotamoroz
fb59c4a723 Refactored, please refer updates.md 2026-02-13 12:02:31 +00:00
vorotamoroz
1b5ca9e52c Refactor (write notes later) 2026-02-12 08:56:30 +00:00
vorotamoroz
787627a156 Refactor: Move some functions from modules to services 2026-02-12 06:27:29 +00:00
vorotamoroz
b1bba7685e Add note. 2026-02-12 03:39:56 +00:00
vorotamoroz
cdfc0ccead wow, auditing... bump. 2026-02-05 12:15:35 +00:00
vorotamoroz
0635cad350 bake i18n 2026-02-05 12:13:12 +00:00
vorotamoroz
6fd1fa6313 Fix typos and make P2P sync not experimental 2026-02-05 12:07:29 +00:00
vorotamoroz
12b1f881dc ### Fixed
- Encryption/decryption issues when using Object Storage as remote have been fixed.
2026-02-05 11:47:01 +00:00
vorotamoroz
bf3efab1af Merge pull request #733 from dayne/patch-2
Update regions setup-flyio-on-the-fly-v2.ipynb
2026-02-02 13:51:48 +09:00
vorotamoroz
da72fda221 Merge pull request #777 from oenhu/patch-1
Update README_cn.md
2026-02-02 13:42:14 +09:00
vorotamoroz
665501f485 Merge pull request #778 from oenhu/patch-2
Create tech_info_cn.md
2026-02-02 13:40:17 +09:00
vorotamoroz
6ee332fff8 Merge pull request #779 from oenhu/main
refactor: replace hardcoded strings with i18n keys
2026-02-02 13:38:01 +09:00
vorotamoroz
f66447cb59 Fix task 2026-02-02 13:00:28 +09:00
vorotamoroz
eb3120a8fd Fix CI 2026-02-02 12:57:49 +09:00
vorotamoroz
5fa39b3c6e Fix typo 2026-02-02 12:28:26 +09:00
vorotamoroz
91c35a88dd separate CIs 2026-02-02 12:27:37 +09:00
vorotamoroz
49f4d79f4f Update dependencies (Mostly translations) 2026-02-02 12:27:15 +09:00
vorotamoroz
abfd010467 Add the detail 2026-02-02 11:39:28 +09:00
vorotamoroz
cde1013359 Reducing ambiguity 2026-02-02 11:32:52 +09:00
vorotamoroz
9c7f9e4316 Tidy 2026-02-02 11:31:40 +09:00
vorotamoroz
aceda16c64 Add beta policy note 2026-02-02 11:31:06 +09:00
vorotamoroz
3656e5c725 Merge branch 'beta' 2026-02-02 11:00:12 +09:00
vorotamoroz
6915b160a2 Bump to stable 2026-02-02 10:57:15 +09:00
vorotamoroz
c2b7081215 Add some notes 2026-01-30 07:39:03 +00:00
oenhu
a9f1bbff9f refactor: replace hardcoded strings with i18n keys 2026-01-09 00:29:40 +08:00
oenhu
f86815e420 Create tech_info_cn.md
Create Chinese version
2026-01-08 15:07:06 +08:00
oenhu
fd16b166ef Update README_cn.md
Update the translation content based on the latest English REDME.md.
2026-01-08 01:41:07 +08:00
Dayne Broderson
c76187c6d2 Update setup-flyio-on-the-fly-v2.ipynb
remove extranious newline
2025-10-29 17:07:34 -08:00
Dayne Broderson
b3b3ad843c Update regions setup-flyio-on-the-fly-v2.ipynb
Updating the regions list pulled from recent `fly platform regions`
2025-10-22 09:26:13 -08:00
114 changed files with 4767 additions and 5817 deletions

View File

@@ -38,9 +38,15 @@ jobs:
- name: Install test dependencies (Playwright Chromium)
run: npm run test:install-dependencies
- name: Start test services (CouchDB + MinIO + Nostr Relay + WebPeer)
run: npm run test:docker-all:start
- name: Start test services (CouchDB)
run: npm run test:docker-couchdb:start
if: ${{ inputs.testsuite == '' || inputs.testsuite == 'suite/' }}
- name: Start test services (MinIO)
run: npm run test:docker-s3:start
if: ${{ inputs.testsuite == '' || inputs.testsuite == 'suite/' }}
- name: Start test services (Nostr Relay + WebPeer)
run: npm run test:docker-p2p:start
if: ${{ inputs.testsuite == '' || inputs.testsuite == 'suitep2p/' }}
- name: Run tests suite
if: ${{ inputs.testsuite == '' || inputs.testsuite == 'suite/' }}
env:
@@ -51,6 +57,12 @@ jobs:
env:
CI: true
run: npm run test suitep2p/
- name: Stop test services
if: always()
run: npm run test:docker-all:stop
- name: Stop test services (CouchDB)
run: npm run test:docker-couchdb:stop
if: ${{ inputs.testsuite == '' || inputs.testsuite == 'suite/' }}
- name: Stop test services (MinIO)
run: npm run test:docker-s3:stop
if: ${{ inputs.testsuite == '' || inputs.testsuite == 'suite/' }}
- name: Stop test services (Nostr Relay + WebPeer)
run: npm run test:docker-p2p:stop
if: ${{ inputs.testsuite == '' || inputs.testsuite == 'suitep2p/' }}

View File

@@ -1,9 +1,12 @@
# Self-hosted LiveSync
Self-hosted LiveSync (自搭建在线同步) 是一个社区实现的在线同步插件。
使用一个自搭建的或者购买的 CouchDB 作为中转服务器。兼容所有支持 Obsidian 的平台。
它利用诸如CouchDB或对象存储系统例如MinIO、S3、R2等等强大的服务器解决方案以确保数据同步的可靠性。。兼容所有支持 Obsidian 的平台。
注意: 本插件与官方的 "Obsidian Sync" 服务不兼容
此外它现在支持使用WebRTC进行点对点同步实验性功能使您无需依赖服务器即可直接在设备之间同步笔记
>[!IMPORTANT]
>本插件与官方的 "Obsidian Sync" 服务不兼容。
![obsidian_live_sync_demo](https://user-images.githubusercontent.com/45774780/137355323-f57a8b09-abf2-4501-836c-8cb7d2ff24a3.gif)
@@ -11,119 +14,94 @@ Self-hosted LiveSync (自搭建在线同步) 是一个社区实现的在线同
## 功能
- 可视化的冲突解决器
- 接近实时的多设备双向同步
- 可使用 CouchDB 以及兼容的服务,如 IBM Cloudant
- 支持端到端加密
- 插件同步 (Beta)
- 从 [obsidian-livesync-webclip](https://chrome.google.com/webstore/detail/obsidian-livesync-webclip/jfpaflmpckblieefkegjncjoceapakdf) 接收 WebClip (本功能不适用端到端加密)
- 以最少流量高效同步vault
- 有效处理冲突的修改。
- 自动合并简单冲突。
- 服务端使用开源的解决方案
- 支持兼容的解决方案。
- 支持端到端加密
- 同步设置、代码片段、主题和插件,通过 [Customisation Sync (Beta)](docs/settings.md#6-customization-sync-advanced) 或者 [Hidden File Sync](docs/settings.md#7-hidden-files-advanced).
- 启用 WebRTC 点对点同步,无需指定 `host`(实验性)。
- 此功能仍处于试验阶段。请在使用时务必谨慎。
- WebRTC 是一种点对点同步方法,因此**至少有一台设备必须在线才能进行同步**。
- 与其让您的设备作为稳定的对等节点保持在线,您可以使用两个 pseudo-peers:
- [livesync-serverpeer](https://github.com/vrtmrz/livesync-serverpeer): 在服务器上运行的 pseudo-client 用于在设备之间接收和发送数据。
- [webpeer](https://github.com/vrtmrz/livesync-commonlib/tree/main/apps/webpeer): 用于在设备之间接收和发送数据的pseudo-client。
- 一个预构建的实例现已上线,地址为 [fancy-syncing.vrtmrz.net/webpeer](https://fancy-syncing.vrtmrz.net/webpeer/) (托管于vrtmrz博客网站). 这也是一个点对点的实例。可自由使用。
- 欲了解更多信息,请参阅[英文说明文章](https://fancy-syncing.vrtmrz.net/blog/0034-p2p-sync-en.html)或[日文说明文章](https://fancy-syncing.vrtmrz.net/blog/0034-p2p-sync)。
适用于出于安全原因需要将笔记完全自托管的研究人员、工程师或开发人员,以及任何喜欢笔记完全私密所带来的安全感的人。
此插件适用于出于安全原因需要将笔记完全自托管的研究人员、工程师或开发人员,以及任何喜欢笔记完全私密所带来的安全感的人。
## 重要提醒
- 请勿与其同步解决方案(包括 iCloudObsidian Sync一起使用。在启用此插件之前,请确保禁用所有其他同步方法以避免内容损坏或重复。如果要同步到多个服务,请一一进行,切勿同时启用两种同步方法
这包括不能将您的保管库放在云同步文件夹中(例如 iCloud 文件夹或 Dropbox 文件夹)
- 这是一个同步插件,不是备份解决方案。不要依赖它进行备份。
- 如果设备的存储空间耗尽,可能会发生数据库损坏。
- 隐藏文件或任何其他不可见文件不会保存在数据库中,因此不会被同步。(**并且可能会被删除**
>[!IMPORTANT]
> - 在安装或升级此插件之前,请务必备份您的保险库。
> - 请勿同时启用此插件与其同步方案包括iCloudObsidian Sync
> - 对于备份,我们还提供了一款名为[Differential ZIP Backup](https://github.com/vrtmrz/diffzip)的插件。
## 如何使用
### 准备好你的数据库
### 3分钟搞定——在fly.io上部署CouchDB
首先准备好你的数据库。IBM Cloudant 是用于测试的首选。或者,您也可以在自己的服务器上安装 CouchDB。有关更多信息请参阅以下内容
1. [Setup IBM Cloudant](docs/setup_cloudant.md)
2. [Setup your CouchDB](docs/setup_own_server_cn.md)
**推荐初学者第一次使用此方法**
[![LiveSync Setup onto Fly.io SpeedRun 2024 using Google Colab](https://img.youtube.com/vi/7sa_I1832Xc/0.jpg)](https://www.youtube.com/watch?v=7sa_I1832Xc)
Note: 正在征集更多搭建方法!目前在讨论的有 [使用 fly.io](https://github.com/vrtmrz/obsidian-livesync/discussions/85)
1. [Setup CouchDB on fly.io](docs/setup_flyio.md)
2. 在 [Quick Setup](docs/quick_setup.md) 中配置插件。
### 第一个设备
### 手动设置
1. 在您的设备上安装插件。
2. 配置远程数据库信息。
1. 将您的服务器信息填写到 `Remote Database configuration`(远程数据库配置)设置页中。
2. 建议启用 `End to End Encryption`(端到端加密)。输入密码后,单击“应用”。
3. 点击 `Test Database Connection` 并确保插件显示 `Connected to (你的数据库名称)`
4. 单击 `Check database configuration`(检查数据库配置)并确保所有测试均已通过。
3.`Sync Settings`(同步设置)选项卡中配置何时进行同步。(您也可以稍后再设置)
1. 如果要实时同步,请启用 `LiveSync`
2. 或者,根据您的需要设置同步方式。默认情况下,不会启用任何自动同步,这意味着您需要手动触发同步过程。
3. 其他配置也在这里。建议启用 `Use Trash for deleted files`(删除文件到回收站),但您也可以保持所有配置不变
4. 配置杂项功能。
1. 启用 `Show staus inside editor` 会在编辑器右上角显示状态。(推荐开启)
5. 回到编辑器。等待初始扫描完成。
6. 当状态不再变化并显示 ⏹️ 图标表示 COMPLETED没有 ⏳ 和 🧩 图标)时,您就可以与服务器同步了。
7. 按功能区上的复制图标或从命令面板运行 `Replicate now`(立刻复制)。这会将您的所有数据发送到服务器。
8. 打开命令面板,运行 `Copy setup URI`(复制设置链接),并设置密码。这会将您的配置导出到剪贴板,作为您导入其他设备的链接。
1. 配置服务器
1. [在fly.io上快速搭建CouchDB](docs/setup_flyio.md)
2. [自行搭建CouchDB](docs/setup_own_server.md)
2. 在[快速设置](docs/quick_setup.md)中配置插件
> [!提示]
> Fly.io现已不再免费。不过尽管存在一些问题我们仍可使用IBM Cloudant。请参考[搭建IBM Cloudant](docs/setup_cloudant.md)。
> 此外我们还可以采用点对点同步方式无需搭建服务器或者选用价格极低的对象存储——Cloudflare R2可免费使用
> 但最重要的是,我们可以选择自己信任的服务器。因此,建议您搭建自有服务器
> CouchDB可在树莓派上运行。但请务必注意服务器的安全性
**重要: 不要公开本链接,这个链接包含了你的所有认证信息!** (即使没有密码别人读不了)
### 后续设备
注意:如果要与非空的 vault 进行同步,文件的修改日期和时间必须互相匹配。否则,可能会发生额外的传输或文件可能会损坏。
为简单起见,我们强烈建议同步到一个全空的 vault。
## 状态栏中的信息
1. 安装插件
2. 打开您从第一台设备导出的链接。
3. 插件会询问您是否确定应用配置。 回答 `Yes`,然后按照以下说明进行操作:
1.`Keep local DB?` 回答 `Yes`
*注意:如果您希望保留本地现有 vault则必须对此问题回答 `No`,并对 `Rebuild the database?` 回答 `No`。*
2.`Keep remote DB?` 回答 `Yes`
3.`Replicate once?` 回答 `Yes`
完成后,您的所有设置将会从第一台设备成功导入。
4. 你的笔记应该很快就会同步。
## 文件看起来有损坏...
请再次打开配置链接并回答如下:
- 如果您的本地数据库看起来已损坏(当你的本地 Obsidian 文件看起来很奇怪)
-`Keep local DB?` 回答 `No`
- 如果您的远程数据库看起来已损坏(当复制时发生中断)
-`Keep remote DB?` 回答 `No`
如果您对两者都回答“否”,您的数据库将根据您设备上的内容重建。并且远程数据库将锁定其他设备,您必须再次同步所有设备。(此时,几乎所有文件都会与时间戳同步。因此您可以安全地使用现有的 vault
## 测试服务器
设置 Cloudant 或本地 CouchDB 实例有点复杂,所以我搭建了一个 [self-hosted-livesync 尝鲜服务器](https://olstaste.vrtmrz.net/)。欢迎免费尝试!
注意:请仔细阅读“限制”条目。不要发送您的私人 vault。
## 状态栏信息
同步状态将显示在状态栏。
同步状态显示在状态栏中,采用以下图标
- 活动指示器
- 📲 网络请求
- 状态
- ⏹️ 就绪
- 💤 LiveSync 已启用,正在等待更改
- ⚡️ 同步中
-一个错误出现了。
- ↑ 上传的 chunk 和元数据数量
- ↓ 下载的 chunk 和元数据数量
- ⏳ 等待的过程的数量
- 🧩 正在等待 chunk 的文件数量
如果你删除或更名了文件,请等待 ⏳ 图标消失。
- ⏹️ 已停止
- 💤 LiveSync已启用正在等待更改
- ⚡️ 同步中
-发生了错误
- 统计指标
- ↑ 上传的分块与元数据
- ↓ 下载的分块与元数据
- 进度指示器
- 📥 未处理的传输项
- 📄 正在进行的数据库操作
- 💾 正在进行的写入存储进程
- ⏳ 正在进行的读取存储进程
- 🛫 待处理的读取存储进程
- 📬 批量处理的读取存储进程
- ⚙️ 正在进行或待处理的隐藏文件存储进程
- 🧩 等待中的分块
- 🔌 正在进行的自定义项(配置、代码片段和插件)
为避免文件和数据库损坏,请等待所有进度指示器尽可能消失后再关闭 Obsidian插件也会尝试恢复同步进度。特别是在您已删除或重命名文件的情况下请务必遵守此操作。
## 提示
- 如果文件夹在复制后变为空,则默认情况下该文件夹会被删除。您可以关闭此行为。检查 [设置](docs/settings.md)。
- LiveSync 模式在移动设备上可能导致耗电量增加。建议使用定期同步 + 条件自动同步。
- 移动平台上的 Obsidian 无法连接到非安全 (HTTP) 或本地签名的服务器,即使设备上安装了根证书。
- 没有类似“exclude_folders”的配置。
- 同步时,文件按修改时间进行比较,较旧的将被较新的文件覆盖。然后插件检查冲突,如果需要合并,将打开一个对话框。
- 数据库中的文件在罕见情况下可能会损坏。当接收到的文件看起来已损坏时,插件不会将其写入本地存储。如果您的设备上有文件的本地版本,则可以通过编辑本地文件并进行同步来覆盖损坏的版本。但是,如果您的任何设备上都不存在该文件,则无法挽救该文件。在这种情况下,您可以从设置对话框中删除这些损坏的文件。
- 要阻止插件的启动流程(例如,为了修复数据库问题),您可以在 vault 的根目录创建一个 "redflag.md" 文件。
- 问:数据库在增长,我该如何缩小它?
答:每个文档都保存了过去 100 次修订,用于检测和解决冲突。想象一台设备已经离线一段时间,然后再次上线。设备必须将其笔记与远程保存的笔记进行比较。如果存在曾经相同的历史修订,则可以安全地直接更新这个文件(和 git 的快进原理一样)。即使文件不在修订历史中,我们也只需检查两个设备上该文件的公有修订版本之后的差异。这就像 git 的冲突解决方法。所以,如果想从根本上解决数据库太大的问题,我们像构建一个扩大版的 git repo 一样去重新设计数据库。
- 更多技术信息在 [技术信息](docs/tech_info.md)
- 如果你想在没有黑曜石的情况下同步文件,你可以使用[filesystem-livesync](https://github.com/vrtmrz/filesystem-livesync)。
- WebClipper 也可在 Chrome Web Store 上使用:[obsidian-livesync-webclip](https://chrome.google.com/webstore/detail/obsidian-livesync-webclip/jfpaflmpckblieefkegjncjoceapakdf)
## 使用技巧与故障排除
如果您在配置插件时遇到问题,请参阅:[Tips and Troubleshooting](docs/troubleshooting.md).
仓库地址:[obsidian-livesync-webclip](https://github.com/vrtmrz/obsidian-livesync-webclip) (文档施工中)
## 致谢
本项目得以持续顺利推进,离不开以下各方的贡献:
- 众多[贡献者](https://github.com/vrtmrz/obsidian-livesync/graphs/contributors)。
- 许多[GitHub 赞助人](https://github.com/sponsors/vrtmrz#sponsors)。
- JetBrains 社区计划/对开源项目的支持。<img src="https://resources.jetbrains.com/storage/products/company/brand/logos/jetbrains.png" alt="JetBrains logo" height="24">
## License
愿所有作出贡献的人士因其善良与慷慨而受到尊敬与铭记。
The source code is licensed under the MIT License.
本源代码使用 MIT 协议授权。
## 许可协议
本项目采用 MIT 许可协议授权。

19
devs.md
View File

@@ -132,6 +132,25 @@ export class ModuleExample extends AbstractObsidianModule {
- [esbuild.config.mjs](esbuild.config.mjs) - Build configuration with platform/dev file replacement
- [package.json](package.json) - Scripts reference and dependencies
## Beta Policy
- Beta versions are denoted by appending `-patched-N` to the base version number.
- `The base version` mostly corresponds to the stable release version.
- e.g., v0.25.41-patched-1 is equivalent to v0.25.42-beta1.
- This notation is due to SemVer incompatibility of Obsidian's plugin system.
- Hence, this release is `0.25.41-patched-1`.
- Each beta version may include larger changes, but bug fixes will often not be included.
- I think that in most cases, bug fixes will cause the stable releases.
- They will not be released per branch or backported; they will simply be released.
- Bug fixes for previous versions will be applied to the latest beta version.
This means, if xx.yy.02-patched-1 exists and there is a defect in xx.yy.01, a fix is applied to xx.yy.02-patched-1 and yields xx.yy.02-patched-2.
If the fix is required immediately, it is released as xx.yy.02 (with xx.yy.01-patched-1).
- This procedure remains unchanged from the current one.
- At the very least, I am using the latest beta.
- However, I will not be using a beta continuously for a week after it has been released. It is probably closer to an RC in nature.
In short, the situation remains unchanged for me, but it means you all become a little safer. Thank you for your understanding!
## Contribution Guidelines
- Follow existing code style and conventions

168
docs/datastructure.md Normal file
View File

@@ -0,0 +1,168 @@
# Data Structures of Self-Hosted LiveSync
## Overview
Self-hosted LiveSync uses the following types of documents:
- Metadata
- Legacy Metadata
- Binary Metadata
- Plain Metadata
- Chunk
- Versioning
- Synchronise Information
- Synchronise Parameters
- Milestone Information
## Description of Each Data Structure
All documents inherit from the `DatabaseEntry` interface. This is necessary for conflict resolution and deletion flags.
```ts
export interface DatabaseEntry {
_id: DocumentID;
_rev?: string;
_deleted?: boolean;
}
```
### Versioning Document
This document stores version information for Self-hosted LiveSync.
The ID is fixed as `obsydian_livesync_version` [VERSIONING_DOCID]. Yes, the typo has become a curse.
When Self-hosted LiveSync detects changes to this document via Replication, it reads the version information and checks compatibility.
In that case, if there are major changes, synchronisation may be stopped.
Please refer to negotiation.ts.
### Synchronise Information Document
This document stores information that should be verified in synchronisation settings.
The ID is fixed as `syncinfo` [SYNCINFO_ID].
The information stored in this document is only the conditions necessary for synchronisation to succeed, and as of v0.25.43, only a random string is stored.
This document is only used during rebuilds from the settings screen for CouchDB-based synchronisation, making it like an appendix. It may be removed in the future.
### Synchronise Parameters Document
This document stores synchronisation parameters.
Synchronisation parameters include the protocol version and salt used for encryption, but do not include chunking settings.
The ID is fixed as `_local/obsidian_livesync_sync_parameters` [DOCID_SYNC_PARAMETERS] or `_obsidian_livesync_journal_sync_parameters.json` [DOCID_JOURNAL_SYNC_PARAMETERS].
This document exists only on the remote and not locally.
This document stores the following information.
It is read each time before connecting and is used to verify that E2EE settings match.
This mismatch cannot be ignored and synchronisation will be stopped.
```ts
export interface SyncParameters extends DatabaseEntry {
_id: typeof DOCID_SYNC_PARAMETERS;
type: (typeof EntryTypes)["SYNC_PARAMETERS"];
protocolVersion: ProtocolVersion;
pbkdf2salt: string;
}
```
#### protocolVersion
This field indicates the protocol version used by the remote. Mostly, this value should be `2` (ProtocolVersions.ADVANCED_E2EE), which indicates safer E2EE support.
#### pbkdf2salt
This field stores the salt used for PBKDF2 key derivation on the remote. This salt and the passphrase provides E2EE encryption keys.
### Milestone Information Document
This document stores information about how the remote accepts and recognises clients.
The ID is fixed as `_local/obsidian_livesync_milestone` [MILESTONE_DOCID].
This document exists only on the remote and not locally.
This document is used to indicate synchronisation progress and includes the version range of accepted chunks for each node and adjustment values for each node.
Tweak Mismatched is determined based on the information in this document.
For details, please refer to LiveSyncReplicator.ts, LiveSyncJournalReplicator.ts, and LiveSyncDBFunctions.ts.
```ts
export interface EntryMilestoneInfo extends DatabaseEntry {
_id: typeof MILESTONE_DOCID;
type: EntryTypes["MILESTONE_INFO"];
created: number;
accepted_nodes: string[];
node_info: { [key: NodeKey]: NodeData };
locked: boolean;
cleaned?: boolean;
node_chunk_info: { [key: NodeKey]: ChunkVersionRange };
tweak_values: { [key: NodeKey]: TweakValues };
}
```
### locked
If the remote has been requested to lock out from any client, this is set to true.
When set to true, clients will stop synchronisation unless they are included in accepted_nodes.
### cleaned
If the remote has been cleaned up from any client, this is set to true.
In this case, clients will stop synchronisation as they need to rebuild again.
### Metadata Document
Metadata documents store metadata for Obsidian notes.
```ts
export interface MetadataDocument extends DatabaseEntry {
_id: DocumentID;
ctime: number;
mtime: number;
size: number;
deleted?: boolean;
eden: Record<string, EdenChunk>; // Obsolete
path: FilePathWithPrefix;
children: string[];
type: EntryTypes["NOTE_LEGACY" | "NOTE_BINARY" | "NOTE_PLAIN"];
}
```
### type
This field indicates the type of Metadata document.
By convention, Self-hosted LiveSync does not save the mime type of the file, but distinguishes them with this field. Please note this.
Possible values are as follows:
- NOTE_LEGACY: Legacy metadata document
- Please do not use
- NOTE_BINARY: Binary metadata document (newnote)
- NOTE_PLAIN: Plain metadata document (plain)
#### children
This field stores an array of Chunk Document IDs.
#### \_id, path
\_id is generated based on the path of the Obsidian note.
- If the path starts with `_`, it is converted to `/_` for convenience.
- If Case Sensitive is disabled, it is converted to lowercase.
When Obfuscation is enabled, the path field contains `f:{obfuscated path}`.
The path field stores the path as is. However, when Obfuscation is enabled, the obfuscated path is stored.
When Property Encryption is enabled, the path field stores all properties including children, mtime, ctime, and size in an encrypted state. Please refer to encryption.ts.
### Chunk Document
```ts
export type EntryLeaf = DatabaseEntry & {
_id: DocumentID;
type: EntryTypes["CHUNK"];
data: string;
};
```
Chunk documents store parts of note content.
- The type field is always `[CHUNK]`, `leaf`.
- The data field stores the chunk content.
- The \_id field is generated based on a hash of the content and the passphrase.
Hash functions used include xxHash and SHA-1, depending on settings.
Chunking methods used include Contextual Chunking and Rabin-Karp Chunking, depending on settings.

16
docs/tech_info_cn.md Normal file
View File

@@ -0,0 +1,16 @@
# 架构设计
## 这个插件是怎么实现同步的.
![Synchronization](../images/1.png)
1. 当笔记创建或修改时Obsidian会触发事件。Self-hosted LiveSync捕获这些事件并将变更同步至本地PouchDB
2. PouchDB通过自动或手动方式将变更同步至远程CouchDB
3. 其他设备监听远程CouchDB的变更从而获取最新更新
4. Self-hosted LiveSync 将同步的变更集反映到Obsidian存储库中。
注:图示为简化演示,仅展示两个设备间的单向同步。实际为多设备间同时进行的双向同步。
## 降低带宽消耗的技术方案。
![dedupe](../images/2.png)

View File

@@ -1,5 +1,4 @@
# Tips and Troubleshooting
- [Tips and Troubleshooting](#tips-and-troubleshooting)
- [Tips](#tips)
- [CORS avoidance](#cors-avoidance)
@@ -14,7 +13,12 @@
- [Notable bugs and fixes](#notable-bugs-and-fixes)
- [Binary files get bigger on iOS](#binary-files-get-bigger-on-ios)
- [Some setting name has been changed](#some-setting-name-has-been-changed)
- [FAQ](#faq)
- [Questions and Answers](#questions-and-answers)
- [How should I share the settings between multiple devices?](#how-should-i-share-the-settings-between-multiple-devices)
- [What should I enter for the passphrase of Setup-URI?](#what-should-i-enter-for-the-passphrase-of-setup-uri)
- [Why the settings of Self-hosted LiveSync itself is disabled in default?](#why-the-settings-of-self-hosted-livesync-itself-is-disabled-in-default)
- [The plug-in says `something went wrong`.](#the-plug-in-says-something-went-wrong)
- [A large number of files were deleted, and were synchronised!](#a-large-number-of-files-were-deleted-and-were-synchronised)
- [Why `Use an old adapter for compatibility` is somehow enabled in my vault?](#why-use-an-old-adapter-for-compatibility-is-somehow-enabled-in-my-vault)
- [ZIP (or any extensions) files were not synchronised. Why?](#zip-or-any-extensions-files-were-not-synchronised-why)
- [I hope to report the issue, but you said you needs `Report`. How to make it?](#i-hope-to-report-the-issue-but-you-said-you-needs-report-how-to-make-it)
@@ -32,6 +36,7 @@
- [While using Cloudflare Tunnels, often Obsidian API fallback and `524` error occurs.](#while-using-cloudflare-tunnels-often-obsidian-api-fallback-and-524-error-occurs)
- [On the mobile device, cannot synchronise on the local network!](#on-the-mobile-device-cannot-synchronise-on-the-local-network)
- [I think that something bad happening on the vault...](#i-think-that-something-bad-happening-on-the-vault)
- [Flag Files](#flag-files)
- [Old tips](#old-tips)
<!-- - -->
@@ -39,19 +44,21 @@
## Tips
### CORS avoidance
If we are unable to configure CORS properly for any reason (for example, if we cannot configure non-administered network devices), we may choose to ignore CORS.
To use the Obsidian API (also known as the Non-Native API) to bypass CORS, we can enable the toggle ``Use Request API to avoid `inevitable` CORS problem``.
<!-- Add **Long explanation of CORS** here for integrity -->
### CORS configuration with reverse proxy
- IMPORTANT: CouchDB handles CORS by itself. Do not process CORS on the reverse
proxy.
- Do not process `Option` requests on the reverse proxy!
- Make sure `host` and `X-Forwarded-For` headers are forwarded to the CouchDB.
- If you are using a subdirectory, make sure to handle it properly. More
detailed information is in the
[CouchDB documentation](https://docs.couchdb.org/en/stable/best-practices/reverse-proxies.html).
- Do not process `Option` requests on the reverse proxy!
- Make sure `host` and `X-Forwarded-For` headers are forwarded to the CouchDB.
- If you are using a subdirectory, make sure to handle it properly. More
detailed information is in the
[CouchDB documentation](https://docs.couchdb.org/en/stable/best-practices/reverse-proxies.html).
Minimal configurations are as follows:
@@ -170,7 +177,56 @@ Probably, we can accept that.
| Setup Wizard | Minimal Setup |
| Check database configuration | Check and Fix database configuration |
## FAQ
## Questions and Answers
### How should I share the settings between multiple devices?
- Device setup:
- Using `Setup URI` is the most straightforward way.
- Setting changes during use:
- Use `Sync settings via Markdown files` on the `🔄️ Sync settings` pane.
### What should I enter for the passphrase of Setup-URI?
- Anything you like is OK. However, the recommendation is as follows:
- Include the vault (group) information.
- Include the date of operation.
- Anything random for your security.
- For example, `MyVault-20240901-r4nd0mStr1ng`.
- Why?
- The Setup-URI is encoded; that means it cannot indicate the actual settings. Hence, if you use the same passphrase for multiple vaults, you may accidentally mix up vaults.
### Why the settings of Self-hosted LiveSync itself is disabled in default?
Basically, if we configure all `additionalSuffixOfDatabaseName` the same, we can synchronise this file between multiple devices.
(`additionalSuffixOfDatabaseName` should be unique in each device, not in the synchronised vaults).
However, if we synchronise the settings of Self-hosted LiveSync itself, we may encounter some unexpected behaviours.
For example, if a setting that 'let Self-hosted LiveSync setting be excluded' is synced, it is very unlikely that things will recover automatically after this, and there is little chance we will even notice this. Even if we change our minds and change the settings back on other devices. It could get even worse if incompatible changes are automatically reflected; everything will break.
### The plug-in says `something went wrong`.
There are many cases where this is really unclear. One possibility is that the chunk fetch did not go well.
1. Restarting Obsidian sometimes helps (fetch-order problem).
2. If actually there are no chunks, please perform `Recreate missing chunks for all files` on the `🧰 Hatch` pane at the other devices. And synchronise again. (also restart Obsidian may effect).
3. If the problem persists, please perform `Verify and repair all files` on the `🧰 Hatch` pane. If our local database and storage are not matched, we will be asked to apply which one.
### A large number of files were deleted, and were synchronised!
1. Backup everything important.
- Your local vault.
- Your CouchDB database (this can be done by replicating to another database).
2. Prepare the empty vault
3. Place `redflag.md` at the top of the vault.
4. Apply the settings **BUT DO NOT PROCEED TO RESTORE YET**.
- You can use `Setup URI`, QR Code, or manually apply the settings.
5. Set `Maximum file modification time for reflected file events` in `Remediation` on the `🩹 Patches` pane.
- If you know when the files were deleted, set the time a bit before that.
- If not, bisecting may help us.
6. Delete `redflag.md`.
7. Perform `Reset synchronisation on This Device` on the `🎛️ Maintenance` pane.
This mode is very fragile. Please be careful.
### Why `Use an old adapter for compatibility` is somehow enabled in my vault?
@@ -248,15 +304,17 @@ files. Only it takes a bit of time and traffics.
### How to launch the DevTools
#### On Desktop Devices
We can launch the DevTools by pressing `ctrl`+`shift`+`i` (`Command`+`shift`+`i` on Mac).
#### On Android
Please refer to [Remote debug Android devices](https://developer.chrome.com/docs/devtools/remote-debugging/).
Once the DevTools have been launched, everything operates the same as on a PC.
#### On iOS, iPadOS devices
If we have a Mac, we can inspect from Safari on the Mac. Please refer to [Inspecting iOS and iPadOS](https://developer.apple.com/documentation/safari-developer-tools/inspecting-ios).
If we have a Mac, we can inspect from Safari on the Mac. Please refer to [Inspecting iOS and iPadOS](https://developer.apple.com/documentation/safari-developer-tools/inspecting-ios).
### How can I use the DevTools?
@@ -302,13 +360,20 @@ self-signed certificate.
### I think that something bad happening on the vault...
Place `redflag.md` on top of the vault, and restart Obsidian. The most simple
Place the [flag file](#flag-files) on top of the vault, and restart Obsidian. The most simple
way is to create a new note and rename it to `redflag`. Of course, we can put it
without Obsidian.
If there is `redflag.md`, Self-hosted LiveSync suspends all database and storage
For example, if there is `redflag.md`, Self-hosted LiveSync suspends all database and storage
processes.
### Flag Files
The flag file is a simple Markdown file designed to prevent storage events and database events in self-hosted LiveSync.
Its very existence is significant; it may be left blank, or it may contain text; either is acceptable.
This file is in Markdown format so that it can be placed in the Vault externally, even if Obsidian fails to launch.
There are some options to use `redflag.md`.
| Filename | Human-Friendly Name | Description |

View File

@@ -1,7 +1,7 @@
{
"id": "obsidian-livesync",
"name": "Self-hosted LiveSync",
"version": "0.25.41-patched-1",
"version": "0.25.43-patched-9",
"minAppVersion": "0.9.12",
"description": "Community implementation of self-hosted livesync. Reflect your vault changes to some other devices immediately. Please make sure to disable other synchronize solutions to avoid content corruption or duplication.",
"author": "vorotamoroz",

3359
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "obsidian-livesync",
"version": "0.25.41",
"version": "0.25.43-patched-9",
"description": "Reflect your vault changes to some other devices immediately. Please make sure to disable other synchronize solutions to avoid content corruption or duplication.",
"main": "main.js",
"type": "module",
@@ -16,6 +16,8 @@
"dev": "node --env-file=.env esbuild.config.mjs",
"prebuild": "npm run bakei18n",
"build": "node esbuild.config.mjs production",
"buildVite": "npx dotenv-cli -e .env -- vite build --mode production",
"buildViteOriginal": "npx dotenv-cli -e .env -- vite build --mode original",
"buildDev": "node esbuild.config.mjs dev",
"lint": "eslint src",
"svelte-check": "svelte-check --tsconfig ./tsconfig.json",
@@ -43,6 +45,7 @@
"test:docker-s3:stop": "npm run test:docker-s3:down",
"test:docker-p2p:up": "npx dotenv-cli -e .env -e .test.env -- ./test/shell/p2p-start.sh",
"test:docker-p2p:init": "npx dotenv-cli -e .env -e .test.env -- ./test/shell/p2p-init.sh",
"test:docker-p2p:start": "npm run test:docker-p2p:up && sleep 3 && npm run test:docker-p2p:init",
"test:docker-p2p:down": "npx dotenv-cli -e .env -e .test.env -- ./test/shell/p2p-stop.sh",
"test:docker-p2p:stop": "npm run test:docker-p2p:down",
"test:docker-all:up": "npm run test:docker-couchdb:up && npm run test:docker-s3:up && npm run test:docker-p2p:up",
@@ -105,6 +108,7 @@
"pouchdb-replication": "^9.0.0",
"pouchdb-utils": "^9.0.0",
"prettier": "3.5.2",
"rollup-plugin-copy": "^3.5.0",
"svelte": "5.41.1",
"svelte-check": "^4.3.3",
"svelte-preprocess": "^6.0.3",

View File

@@ -66,7 +66,7 @@
"outputs": [],
"source": [
"# see https://fly.io/docs/reference/regions/\n",
"region = \"nrt/Tokyo, Japan\" #@param [\"ams/Amsterdam, Netherlands\",\"arn/Stockholm, Sweden\",\"atl/Atlanta, Georgia (US)\",\"bog/Bogotá, Colombia\",\"bos/Boston, Massachusetts (US)\",\"cdg/Paris, France\",\"den/Denver, Colorado (US)\",\"dfw/Dallas, Texas (US)\",\"ewr/Secaucus, NJ (US)\",\"eze/Ezeiza, Argentina\",\"gdl/Guadalajara, Mexico\",\"gig/Rio de Janeiro, Brazil\",\"gru/Sao Paulo, Brazil\",\"hkg/Hong Kong, Hong Kong\",\"iad/Ashburn, Virginia (US)\",\"jnb/Johannesburg, South Africa\",\"lax/Los Angeles, California (US)\",\"lhr/London, United Kingdom\",\"mad/Madrid, Spain\",\"mia/Miami, Florida (US)\",\"nrt/Tokyo, Japan\",\"ord/Chicago, Illinois (US)\",\"otp/Bucharest, Romania\",\"phx/Phoenix, Arizona (US)\",\"qro/Querétaro, Mexico\",\"scl/Santiago, Chile\",\"sea/Seattle, Washington (US)\",\"sin/Singapore, Singapore\",\"sjc/San Jose, California (US)\",\"syd/Sydney, Australia\",\"waw/Warsaw, Poland\",\"yul/Montreal, Canada\",\"yyz/Toronto, Canada\" ] {allow-input: true}\n",
"region = \"nrt/Tokyo, Japan\" #@param [\"jnb/Johannesburg, South Africa\",\"bom/Mumbai, India\",\"sin/Singapore, Singapore\",\"syd/Sydney, Australia\",\"nrt/Tokyo, Japan\",\"ams/Amsterdam, Netherlands\",\"fra/Frankfurt, Germany\",\"lhr/London, United Kingdom\",\"cdg/Paris, France\",\"arn/Stockholm, Sweden\",\"iad/Ashburn, Virginia (US)\",\"ord/Chicago, Illinois (US)\",\"dfw/Dallas, Texas (US)\",\"lax/Los Angeles, California (US)\",\"sjc/San Jose, California (US)\",\"ewr/Secaucus, NJ (US)\",\"yyz/Toronto, Canada\",\"gru/Sao Paulo, Brazil\"] {allow-input: true}\n",
"%env region={region.split(\"/\")[0]}\n",
"#%env appame=\n",
"#%env username=\n",

View File

@@ -20,6 +20,7 @@
"vite": "^7.3.0"
},
"imports": {
"../../src/worker/bgWorker.ts": "../../src/worker/bgWorker.mock.ts"
"../../src/worker/bgWorker.ts": "../../src/worker/bgWorker.mock.ts",
"@lib/worker/bgWorker.ts": "@lib/worker/bgWorker.mock.ts"
}
}

View File

@@ -34,7 +34,11 @@ import { TrysteroReplicator } from "@lib/replication/trystero/TrysteroReplicator
import { SETTING_KEY_P2P_DEVICE_NAME } from "@lib/common/types";
import { ServiceContext } from "@lib/services/base/ServiceBase";
import type { InjectableServiceHub } from "@lib/services/InjectableServices";
import { Menu } from "@/lib/src/services/implements/browser/Menu";
import { Menu } from "@lib/services/implements/browser/Menu";
import type { InjectableVaultServiceCompat } from "@lib/services/implements/injectable/InjectableVaultService";
import { SimpleStoreIDBv2 } from "octagonal-wheels/databases/SimpleStoreIDBv2";
import type { InjectableAPIService } from "@/lib/src/services/implements/injectable/InjectableAPIService";
import type { BrowserAPIService } from "@/lib/src/services/implements/browser/BrowserAPIService";
function addToList(item: string, list: string) {
return unique(
@@ -79,7 +83,10 @@ export class P2PReplicatorShim implements P2PReplicatorBase, CommandShim {
constructor() {
const browserServiceHub = new BrowserServiceHub<ServiceContext>();
this.services = browserServiceHub;
this.services.vault.getVaultName.setHandler(() => "p2p-livesync-web-peer");
(this.services.API as BrowserAPIService<ServiceContext>).getSystemVaultName.setHandler(
() => "p2p-livesync-web-peer"
);
}
async init() {
// const { simpleStoreAPI } = await getWrappedSynchromesh();
@@ -95,11 +102,10 @@ export class P2PReplicatorShim implements P2PReplicatorBase, CommandShim {
Logger(ex, LOG_LEVEL_VERBOSE);
}
}
const repStore = this.services.database.openSimpleStore<any>("p2p-livesync-web-peer");
const repStore = SimpleStoreIDBv2.open<any>("p2p-livesync-web-peer");
this._simpleStore = repStore;
let _settings = (await repStore.get("settings")) || ({ ...P2P_DEFAULT_SETTINGS } as P2PSyncSetting);
this.services.setting.settings = _settings as any;
this.plugin = {
saveSettings: async () => {
await repStore.set("settings", _settings);
@@ -141,9 +147,9 @@ export class P2PReplicatorShim implements P2PReplicatorBase, CommandShim {
simpleStore(): SimpleStore<any> {
return this._simpleStore;
}
handleReplicatedDocuments(docs: EntryDoc[]): Promise<void> {
handleReplicatedDocuments(docs: EntryDoc[]): Promise<boolean> {
// No op. This is a client and does not need to process the docs
return Promise.resolve();
return Promise.resolve(true);
}
getPluginShim() {

View File

@@ -4,7 +4,7 @@ import { type mount, unmount } from "svelte";
export abstract class SvelteItemView extends ItemView {
abstract instantiateComponent(target: HTMLElement): ReturnType<typeof mount> | Promise<ReturnType<typeof mount>>;
component?: ReturnType<typeof mount>;
async onOpen() {
override async onOpen() {
await super.onOpen();
this.contentEl.empty();
await this._dismountComponent();
@@ -17,7 +17,7 @@ export abstract class SvelteItemView extends ItemView {
this.component = undefined;
}
}
async onClose() {
override async onClose() {
await super.onClose();
if (this.component) {
await unmount(this.component);

View File

@@ -21,7 +21,6 @@ export const EVENT_REQUEST_CLOSE_P2P = "request-close-p2p";
export const EVENT_REQUEST_RUN_DOCTOR = "request-run-doctor";
export const EVENT_REQUEST_RUN_FIX_INCOMPLETE = "request-run-fix-incomplete";
export const EVENT_ON_UNRESOLVED_ERROR = "on-unresolved-error";
export const EVENT_ANALYSE_DB_USAGE = "analyse-db-usage";
export const EVENT_REQUEST_PERFORM_GC_V3 = "request-perform-gc-v3";
@@ -44,7 +43,6 @@ declare global {
[EVENT_REQUEST_SHOW_SETUP_QR]: undefined;
[EVENT_REQUEST_RUN_DOCTOR]: string;
[EVENT_REQUEST_RUN_FIX_INCOMPLETE]: undefined;
[EVENT_ON_UNRESOLVED_ERROR]: undefined;
[EVENT_ANALYSE_DB_USAGE]: undefined;
[EVENT_REQUEST_CHECK_REMOTE_SIZE]: undefined;
[EVENT_REQUEST_PERFORM_GC_V3]: undefined;

View File

@@ -49,21 +49,16 @@ export type queueItem = {
warned?: boolean;
};
// Hidden items (Now means `chunk`)
export const CHeader = "h:";
// Plug-in Stored Container (Obsolete)
export const PSCHeader = "ps:";
export const PSCHeaderEnd = "ps;";
// Internal data Container
export const ICHeader = "i:";
export const ICHeaderEnd = "i;";
export const ICHeaderLength = ICHeader.length;
// Internal data Container (eXtended)
export const ICXHeader = "ix:";
export const FileWatchEventQueueMax = 10;
export { configURIBase, configURIBaseQR } from "../lib/src/common/types.ts";
export {
CHeader,
PSCHeader,
PSCHeaderEnd,
ICHeader,
ICHeaderEnd,
ICHeaderLength,
ICXHeader,
} from "../lib/src/common/models/fileaccess.const.ts";

View File

@@ -23,7 +23,7 @@ import {
type UXFileInfo,
type UXFileInfoStub,
} from "../lib/src/common/types.ts";
import { CHeader, ICHeader, ICHeaderLength, ICXHeader, PSCHeader } from "./types.ts";
export { ICHeader, ICXHeader } from "./types.ts";
import type ObsidianLiveSyncPlugin from "../main.ts";
import { writeString } from "../lib/src/string_and_binary/convert.ts";
import { fireAndForget } from "../lib/src/common/utils.ts";
@@ -31,7 +31,6 @@ import { sameChangePairs } from "./stores.ts";
import { scheduleTask } from "octagonal-wheels/concurrency/task";
import { EVENT_PLUGIN_UNLOADED, eventHub } from "./events.ts";
import { promiseWithResolver, type PromiseWithResolvers } from "octagonal-wheels/promises";
import { AuthorizationHeaderGenerator } from "../lib/src/replication/httplib.ts";
import type { KeyValueDatabase } from "../lib/src/interfaces/KeyValueDatabase.ts";
@@ -63,37 +62,18 @@ export function id2path(id: DocumentID, entry?: EntryHasPath): FilePathWithPrefi
const fixedPath = temp.join(":") as FilePathWithPrefix;
return fixedPath;
}
export function getPath(entry: AnyEntry) {
return id2path(entry._id, entry);
}
export function getPathWithoutPrefix(entry: AnyEntry) {
const f = getPath(entry);
return stripAllPrefixes(f);
}
export function getPathFromTFile(file: TAbstractFile) {
return file.path as FilePath;
}
export function isInternalFile(file: UXFileInfoStub | string | FilePathWithPrefix) {
if (typeof file == "string") return file.startsWith(ICHeader);
if (file.isInternal) return true;
return false;
}
export function getPathFromUXFileInfo(file: UXFileInfoStub | string | FilePathWithPrefix) {
if (typeof file == "string") return file as FilePathWithPrefix;
return file.path;
}
export function getStoragePathFromUXFileInfo(file: UXFileInfoStub | string | FilePathWithPrefix) {
if (typeof file == "string") return stripAllPrefixes(file as FilePathWithPrefix);
return stripAllPrefixes(file.path);
}
export function getDatabasePathFromUXFileInfo(file: UXFileInfoStub | string | FilePathWithPrefix) {
if (typeof file == "string" && file.startsWith(ICXHeader)) return file as FilePathWithPrefix;
const prefix = isInternalFile(file) ? ICHeader : "";
if (typeof file == "string") return (prefix + stripAllPrefixes(file as FilePathWithPrefix)) as FilePathWithPrefix;
return (prefix + stripAllPrefixes(file.path)) as FilePathWithPrefix;
}
import {
isInternalFile,
getPathFromUXFileInfo,
getStoragePathFromUXFileInfo,
getDatabasePathFromUXFileInfo,
} from "@lib/common/typeUtils.ts";
export { isInternalFile, getPathFromUXFileInfo, getStoragePathFromUXFileInfo, getDatabasePathFromUXFileInfo };
const memos: { [key: string]: any } = {};
export function memoObject<T>(key: string, obj: T): T {
@@ -137,32 +117,14 @@ export function trimPrefix(target: string, prefix: string) {
return target.startsWith(prefix) ? target.substring(prefix.length) : target;
}
/**
* returns is internal chunk of file
* @param id ID
* @returns
*/
export function isInternalMetadata(id: FilePath | FilePathWithPrefix | DocumentID): boolean {
return id.startsWith(ICHeader);
}
export function stripInternalMetadataPrefix<T extends FilePath | FilePathWithPrefix | DocumentID>(id: T): T {
return id.substring(ICHeaderLength) as T;
}
export function id2InternalMetadataId(id: DocumentID): DocumentID {
return (ICHeader + id) as DocumentID;
}
// const CHeaderLength = CHeader.length;
export function isChunk(str: string): boolean {
return str.startsWith(CHeader);
}
export function isPluginMetadata(str: string): boolean {
return str.startsWith(PSCHeader);
}
export function isCustomisationSyncMetadata(str: string): boolean {
return str.startsWith(ICXHeader);
}
export {
isInternalMetadata,
id2InternalMetadataId,
isChunk,
isCustomisationSyncMetadata,
isPluginMetadata,
stripInternalMetadataPrefix,
} from "@lib/common/typeUtils.ts";
export class PeriodicProcessor {
_process: () => Promise<any>;
@@ -189,7 +151,7 @@ export class PeriodicProcessor {
() =>
fireAndForget(async () => {
await this.process();
if (this._plugin.services?.appLifecycle?.hasUnloaded()) {
if (this._plugin.services?.control?.hasUnloaded()) {
this.disable();
}
}),
@@ -292,10 +254,8 @@ export function requestToCouchDBWithCredentials(
return _requestToCouchDB(baseUri, credentials, origin, uri, body, method, customHeaders);
}
export const BASE_IS_NEW = Symbol("base");
export const TARGET_IS_NEW = Symbol("target");
export const EVEN = Symbol("even");
import { BASE_IS_NEW, EVEN, TARGET_IS_NEW } from "@lib/common/models/shared.const.symbols.ts";
export { BASE_IS_NEW, EVEN, TARGET_IS_NEW };
// Why 2000? : ZIP FILE Does not have enough resolution.
const resolution = 2000;
export function compareMTime(
@@ -430,30 +390,6 @@ export function displayRev(rev: string) {
return `${number}-${hash.substring(0, 6)}`;
}
type DocumentProps = {
id: DocumentID;
rev?: string;
prefixedPath: FilePathWithPrefix;
path: FilePath;
isDeleted: boolean;
revDisplay: string;
shortenedId: string;
shortenedPath: string;
};
export function getDocProps(doc: AnyEntry): DocumentProps {
const id = doc._id;
const shortenedId = id.substring(0, 10);
const prefixedPath = getPath(doc);
const path = stripAllPrefixes(prefixedPath);
const rev = doc._rev;
const revDisplay = rev ? displayRev(rev) : "0-NOREVS";
// const prefix = prefixedPath.substring(0, prefixedPath.length - path.length);
const shortenedPath = path.substring(0, 10);
const isDeleted = doc._deleted || doc.deleted || false;
return { id, rev, revDisplay, prefixedPath, path, isDeleted, shortenedId, shortenedPath };
}
export function getLogLevel(showNotice: boolean) {
return showNotice ? LOG_LEVEL_NOTICE : LOG_LEVEL_INFO;
}
@@ -522,47 +458,3 @@ export function onlyInNTimes(n: number, proc: (progress: number) => any) {
}
};
}
const waitingTasks = {} as Record<string, { task?: PromiseWithResolvers<any>; previous: number; leastNext: number }>;
export function rateLimitedSharedExecution<T>(key: string, interval: number, proc: () => Promise<T>): Promise<T> {
if (!(key in waitingTasks)) {
waitingTasks[key] = { task: undefined, previous: 0, leastNext: 0 };
}
if (waitingTasks[key].task) {
// Extend the previous execution time.
waitingTasks[key].leastNext = Date.now() + interval;
return waitingTasks[key].task.promise;
}
const previous = waitingTasks[key].previous;
const delay = previous == 0 ? 0 : Math.max(interval - (Date.now() - previous), 0);
const task = promiseWithResolver<T>();
void task.promise.finally(() => {
if (waitingTasks[key].task === task) {
waitingTasks[key].task = undefined;
waitingTasks[key].previous = Math.max(Date.now(), waitingTasks[key].leastNext);
}
});
waitingTasks[key] = {
task,
previous: Date.now(),
leastNext: Date.now() + interval,
};
void scheduleTask("thin-out-" + key, delay, async () => {
try {
task.resolve(await proc());
} catch (ex) {
task.reject(ex);
}
});
return task.promise;
}
export function updatePreviousExecutionTime(key: string, timeDelta: number = 0) {
if (!(key in waitingTasks)) {
waitingTasks[key] = { task: undefined, previous: 0, leastNext: 0 };
}
waitingTasks[key].leastNext = Math.max(Date.now() + timeDelta, waitingTasks[key].leastNext);
}

View File

@@ -40,6 +40,7 @@ export type {
MarkdownFileInfo,
ListedFiles,
ValueComponent,
Stat,
} from "obsidian";
import { normalizePath as normalizePath_ } from "obsidian";
const normalizePath = normalizePath_ as <T extends string | FilePath>(from: T) => T;

View File

@@ -1802,7 +1802,7 @@ export class ConfigSync extends LiveSyncCommands {
}
return files;
}
onBindFunction(core: LiveSyncCore, services: InjectableServiceHub): void {
override onBindFunction(core: LiveSyncCore, services: InjectableServiceHub): void {
services.fileProcessing.processOptionalFileEvent.addHandler(this._anyProcessOptionalFileEvent.bind(this));
services.conflict.getOptionalConflictCheckMethod.addHandler(this._anyGetOptionalConflictCheckMethod.bind(this));
services.replication.processVirtualDocument.addHandler(this._anyModuleParsedReplicationResultItem.bind(this));

View File

@@ -14,7 +14,7 @@ export class PluginDialogModal extends Modal {
this.plugin = plugin;
}
onOpen() {
override onOpen() {
const { contentEl } = this;
this.contentEl.style.overflow = "auto";
this.contentEl.style.display = "flex";
@@ -28,7 +28,7 @@ export class PluginDialogModal extends Modal {
}
}
onClose() {
override onClose() {
if (this.component) {
void unmount(this.component);
this.component = undefined;

View File

@@ -50,7 +50,7 @@ export class JsonResolveModal extends Modal {
this.callback = undefined;
}
onOpen() {
override onOpen() {
const { contentEl } = this;
this.titleEl.setText(this.title);
contentEl.empty();
@@ -74,7 +74,7 @@ export class JsonResolveModal extends Modal {
return;
}
onClose() {
override onClose() {
const { contentEl } = this;
contentEl.empty();
// contentEl.empty();

View File

@@ -30,19 +30,18 @@ import {
import {
compareMTime,
unmarkChanges,
getPath,
isInternalMetadata,
markChangesAreSame,
PeriodicProcessor,
TARGET_IS_NEW,
scheduleTask,
getDocProps,
getLogLevel,
autosaveCache,
type MapLike,
onlyInNTimes,
BASE_IS_NEW,
EVEN,
displayRev,
} from "../../common/utils.ts";
import { serialized, skipIfDuplicated } from "octagonal-wheels/concurrency/lock";
import { JsonResolveModal } from "../HiddenFileCommon/JsonResolveModal.ts";
@@ -139,6 +138,7 @@ export class HiddenFileSync extends LiveSyncCommands {
this.updateSettingCache();
});
}
// We cannot initialise autosaveCache because kvDB is not ready yet
// async _everyOnInitializeDatabase(db: LiveSyncLocalDB): Promise<boolean> {
// this._fileInfoLastProcessed = await autosaveCache(this.kvDB, "hidden-file-lastProcessed");
@@ -243,7 +243,7 @@ export class HiddenFileSync extends LiveSyncCommands {
if (isInternalMetadata(doc._id)) {
if (this.isThisModuleEnabled()) {
//system file
const filename = getPath(doc);
const filename = this.getPath(doc);
if (await this.services.vault.isTargetFile(filename)) {
// this.procInternalFile(filename);
await this.processReplicationResult(doc);
@@ -843,9 +843,32 @@ Offline Changed files: ${processFiles.length}`;
// <-- Conflict processing
// --> Event Source Handler (Database)
getDocProps(doc: LoadedEntry) {
/*
type DocumentProps = {
id: DocumentID;
rev?: string;
prefixedPath: FilePathWithPrefix;
path: FilePath;
isDeleted: boolean;
revDisplay: string;
shortenedId: string;
shortenedPath: string;
};
*/
const id = doc._id;
const shortenedId = id.substring(0, 10);
const prefixedPath = this.getPath(doc);
const path = stripAllPrefixes(prefixedPath);
const rev = doc._rev;
const revDisplay = rev ? displayRev(rev) : "0-NOREVS";
// const prefix = prefixedPath.substring(0, prefixedPath.length - path.length);
const shortenedPath = path.substring(0, 10);
const isDeleted = doc._deleted || doc.deleted || false;
return { id, rev, revDisplay, prefixedPath, path, isDeleted, shortenedId, shortenedPath };
}
async processReplicationResult(doc: LoadedEntry): Promise<boolean> {
const info = getDocProps(doc);
const info = this.getDocProps(doc);
const path = info.path;
const headerLine = `Tracking DB ${info.path} (${info.revDisplay}) :`;
const ret = await this.trackDatabaseFileModification(path, headerLine);
@@ -1007,7 +1030,7 @@ Offline Changed files: ${processFiles.length}`;
p.log("Enumerating database files...");
const currentDatabaseFiles = await this.getAllDatabaseFiles();
const allDatabaseMap = Object.fromEntries(
currentDatabaseFiles.map((e) => [stripAllPrefixes(getPath(e)), e])
currentDatabaseFiles.map((e) => [stripAllPrefixes(this.getPath(e)), e])
);
const currentDatabaseFileNames = [...Object.keys(allDatabaseMap)] as FilePath[];
const untrackedLocal = currentStorageFiles.filter((e) => !this._fileInfoLastProcessed.has(e));
@@ -1250,14 +1273,14 @@ Offline Changed files: ${files.length}`;
: currentStorageFilesAll;
p.log("Enumerating database files...");
const allDatabaseFiles = await this.getAllDatabaseFiles();
const allDatabaseMap = new Map(allDatabaseFiles.map((e) => [stripAllPrefixes(getPath(e)), e]));
const allDatabaseMap = new Map(allDatabaseFiles.map((e) => [stripAllPrefixes(this.getPath(e)), e]));
const currentDatabaseFiles = targetFiles
? allDatabaseFiles.filter((e) => targetFiles.some((f) => f == stripAllPrefixes(getPath(e))))
? allDatabaseFiles.filter((e) => targetFiles.some((f) => f == stripAllPrefixes(this.getPath(e))))
: allDatabaseFiles;
const allFileNames = new Set([
...currentStorageFiles,
...currentDatabaseFiles.map((e) => stripAllPrefixes(getPath(e))),
...currentDatabaseFiles.map((e) => stripAllPrefixes(this.getPath(e))),
]);
const storageToDatabase = [] as FilePath[];
const databaseToStorage = [] as MetaEntry[];
@@ -1340,7 +1363,7 @@ Offline Changed files: ${files.length}`;
// However, in perspective of performance and future-proofing, I feel somewhat justified in doing it here.
const currentFiles = targetFiles
? allFiles.filter((e) => targetFiles.some((f) => f == stripAllPrefixes(getPath(e))))
? allFiles.filter((e) => targetFiles.some((f) => f == stripAllPrefixes(this.getPath(e))))
: allFiles;
p.once(`Database to Storage: ${currentFiles.length} files.`);
@@ -1383,7 +1406,7 @@ Offline Changed files: ${files.length}`;
const onlyNew = direction == "pull";
p.log(`Started: Database --> Storage ${onlyNew ? "(Only New)" : ""}`);
const updatedEntries = await this.rebuildFromDatabase(showMessage, targetFiles, onlyNew);
const updatedFiles = updatedEntries.map((e) => stripAllPrefixes(getPath(e)));
const updatedFiles = updatedEntries.map((e) => stripAllPrefixes(this.getPath(e)));
// making doubly sure, No more losing files.
await this.adoptCurrentStorageFilesAsProcessed(updatedFiles);
await this.adoptCurrentDatabaseFilesAsProcessed(updatedFiles);
@@ -1911,7 +1934,7 @@ ${messageFetch}${messageOverwrite}${messageMerge}
*/
// <-- Local Storage SubFunctions
onBindFunction(core: LiveSyncCore, services: typeof core.services) {
override onBindFunction(core: LiveSyncCore, services: typeof core.services) {
// No longer needed on initialisation
// services.databaseEvents.handleOnDatabaseInitialisation(this._everyOnInitializeDatabase.bind(this));
services.appLifecycle.onSettingLoaded.addHandler(this._everyOnloadAfterLoadSettings.bind(this));

View File

@@ -1,5 +1,4 @@
import { LOG_LEVEL_VERBOSE, Logger } from "octagonal-wheels/common/logger";
import { getPath } from "../common/utils.ts";
import { LOG_LEVEL_VERBOSE } from "octagonal-wheels/common/logger";
import {
LOG_LEVEL_INFO,
LOG_LEVEL_NOTICE,
@@ -13,6 +12,7 @@ import type ObsidianLiveSyncPlugin from "../main.ts";
import { MARK_DONE } from "../modules/features/ModuleLog.ts";
import type { LiveSyncCore } from "../main.ts";
import { __$checkInstanceBinding } from "../lib/src/dev/checks.ts";
import { createInstanceLogFunction } from "@/lib/src/services/lib/logUtils.ts";
let noticeIndex = 0;
export abstract class LiveSyncCommands {
@@ -36,13 +36,15 @@ export abstract class LiveSyncCommands {
async path2id(filename: FilePathWithPrefix | FilePath, prefix?: string): Promise<DocumentID> {
return await this.services.path.path2id(filename, prefix);
}
getPath(entry: AnyEntry): FilePathWithPrefix {
return getPath(entry);
return this.services.path.getPath(entry);
}
constructor(plugin: ObsidianLiveSyncPlugin) {
this.plugin = plugin;
this.onBindFunction(plugin, plugin.services);
this._log = createInstanceLogFunction(this.constructor.name, this.services.API);
__$checkInstanceBinding(this);
}
abstract onunload(): void;
@@ -58,13 +60,7 @@ export abstract class LiveSyncCommands {
return this.services.database.isDatabaseReady();
}
_log = (msg: any, level: LOG_LEVEL = LOG_LEVEL_INFO, key?: string) => {
if (typeof msg === "string" && level !== LOG_LEVEL_NOTICE) {
msg = `[${this.constructor.name}]\u{200A} ${msg}`;
}
// console.log(msg);
Logger(msg, level, key);
};
_log: ReturnType<typeof createInstanceLogFunction>;
_verbose = (msg: any, key?: string) => {
this._log(msg, LOG_LEVEL_VERBOSE, key);

View File

@@ -40,9 +40,6 @@ export class P2PReplicator extends LiveSyncCommands implements P2PReplicatorBase
getSettings(): P2PSyncSetting {
return this.plugin.settings;
}
get settings() {
return this.plugin.settings;
}
getDB() {
return this.plugin.localDatabase.localDatabase;
}
@@ -65,7 +62,7 @@ export class P2PReplicator extends LiveSyncCommands implements P2PReplicatorBase
// this.onBindFunction(plugin, plugin.services);
}
async handleReplicatedDocuments(docs: EntryDoc[]): Promise<void> {
async handleReplicatedDocuments(docs: EntryDoc[]): Promise<boolean> {
// console.log("Processing Replicated Docs", docs);
return await this.services.replication.parseSynchroniseResult(
docs as PouchDB.Core.ExistingDocument<EntryDoc>[]
@@ -107,7 +104,7 @@ export class P2PReplicator extends LiveSyncCommands implements P2PReplicatorBase
}
init() {
this._simpleStore = this.services.database.openSimpleStore("p2p-sync");
this._simpleStore = this.services.keyValueDB.openSimpleStore("p2p-sync");
return Promise.resolve(this);
}

View File

@@ -33,18 +33,15 @@
const initialSettings = { ...plugin.settings };
let settings = $state<P2PSyncSetting>(initialSettings);
// const vaultName = service.vault.getVaultName();
// const dbKey = `${vaultName}-p2p-device-name`;
const initialDeviceName = cmdSync.getConfig(SETTING_KEY_P2P_DEVICE_NAME) ?? plugin.services.vault.getVaultName();
let deviceName = $state<string>(initialDeviceName);
let deviceName = $state<string>("");
let eP2PEnabled = $state<boolean>(initialSettings.P2P_Enabled);
let eRelay = $state<string>(initialSettings.P2P_relays);
let eRoomId = $state<string>(initialSettings.P2P_roomID);
let ePassword = $state<string>(initialSettings.P2P_passphrase);
let eAppId = $state<string>(initialSettings.P2P_AppID);
let eDeviceName = $state<string>(initialDeviceName);
let eDeviceName = $state<string>("");
let eAutoAccept = $state<boolean>(initialSettings.P2P_AutoAccepting == AutoAccepting.ALL);
let eAutoStart = $state<boolean>(initialSettings.P2P_AutoStart);
let eAutoBroadcast = $state<boolean>(initialSettings.P2P_AutoBroadcast);
@@ -103,6 +100,11 @@
let serverInfo = $state<P2PServerInfo | undefined>(undefined);
let replicatorInfo = $state<P2PReplicatorStatus | undefined>(undefined);
const applyLoadSettings = (d: P2PSyncSetting, force: boolean) => {
if(force){
const initDeviceName = cmdSync.getConfig(SETTING_KEY_P2P_DEVICE_NAME) ?? plugin.services.vault.getVaultName();
deviceName = initDeviceName;
eDeviceName = initDeviceName;
}
const { P2P_relays, P2P_roomID, P2P_passphrase, P2P_AppID, P2P_AutoAccepting } = d;
if (force || !isP2PEnabledModified) eP2PEnabled = d.P2P_Enabled;
if (force || !isRelayModified) eRelay = P2P_relays;

View File

@@ -35,11 +35,11 @@ function removeFromList(item: string, list: string) {
export class P2PReplicatorPaneView extends SvelteItemView {
plugin: ObsidianLiveSyncPlugin;
icon = "waypoints";
override icon = "waypoints";
title: string = "";
navigation = false;
override navigation = false;
getIcon(): string {
override getIcon(): string {
return "waypoints";
}
get replicator() {

Submodule src/lib updated: a02102b131...d038ee5149

View File

@@ -1,76 +1,69 @@
import { Plugin } from "./deps";
import { Plugin, type App, type PluginManifest } from "./deps";
import {
type EntryDoc,
type ObsidianLiveSyncSettings,
type DatabaseConnectingStatus,
type HasSettings,
LOG_LEVEL_INFO,
} from "./lib/src/common/types.ts";
import { type SimpleStore } from "./lib/src/common/utils.ts";
import { LiveSyncLocalDB, type LiveSyncLocalDBEnv } from "./lib/src/pouchdb/LiveSyncLocalDB.ts";
import {
LiveSyncAbstractReplicator,
type LiveSyncReplicatorEnv,
} from "./lib/src/replication/LiveSyncAbstractReplicator.js";
import { type KeyValueDatabase } from "./lib/src/interfaces/KeyValueDatabase.ts";
import { type LiveSyncLocalDBEnv } from "./lib/src/pouchdb/LiveSyncLocalDB.ts";
import { type LiveSyncReplicatorEnv } from "./lib/src/replication/LiveSyncAbstractReplicator.js";
import { LiveSyncCommands } from "./features/LiveSyncCommands.ts";
import { HiddenFileSync } from "./features/HiddenFileSync/CmdHiddenFileSync.ts";
import { ConfigSync } from "./features/ConfigSync/CmdConfigSync.ts";
import { reactiveSource, type ReactiveValue } from "octagonal-wheels/dataobject/reactive";
import { type LiveSyncJournalReplicatorEnv } from "./lib/src/replication/journal/LiveSyncJournalReplicator.js";
import { type LiveSyncCouchDBReplicatorEnv } from "./lib/src/replication/couchdb/LiveSyncReplicator.js";
import type { CheckPointInfo } from "./lib/src/replication/journal/JournalSyncTypes.js";
import type { IObsidianModule } from "./modules/AbstractObsidianModule.ts";
import { ModuleDev } from "./modules/extras/ModuleDev.ts";
import { ModuleFileAccessObsidian } from "./modules/coreObsidian/ModuleFileAccessObsidian.ts";
import { ModuleMigration } from "./modules/essential/ModuleMigration.ts";
import { ModuleCheckRemoteSize } from "./modules/essentialObsidian/ModuleCheckRemoteSize.ts";
import { ModuleConflictResolver } from "./modules/coreFeatures/ModuleConflictResolver.ts";
import { ModuleInteractiveConflictResolver } from "./modules/features/ModuleInteractiveConflictResolver.ts";
import { ModuleLog } from "./modules/features/ModuleLog.ts";
import { ModuleObsidianSettings } from "./modules/features/ModuleObsidianSetting.ts";
import { ModuleRedFlag } from "./modules/coreFeatures/ModuleRedFlag.ts";
import { ModuleObsidianMenu } from "./modules/essentialObsidian/ModuleObsidianMenu.ts";
import { ModuleSetupObsidian } from "./modules/features/ModuleSetupObsidian.ts";
import { SetupManager } from "./modules/features/SetupManager.ts";
import type { StorageAccess } from "./modules/interfaces/StorageAccess.ts";
import type { StorageAccess } from "@lib/interfaces/StorageAccess.ts";
import type { Confirm } from "./lib/src/interfaces/Confirm.ts";
import type { Rebuilder } from "./modules/interfaces/DatabaseRebuilder.ts";
import type { DatabaseFileAccess } from "./modules/interfaces/DatabaseFileAccess.ts";
import { ModuleDatabaseFileAccess } from "./modules/core/ModuleDatabaseFileAccess.ts";
import { ModuleFileHandler } from "./modules/core/ModuleFileHandler.ts";
import type { Rebuilder } from "@lib/interfaces/DatabaseRebuilder.ts";
import type { DatabaseFileAccess } from "@lib/interfaces/DatabaseFileAccess.ts";
import { ModuleObsidianAPI } from "./modules/essentialObsidian/ModuleObsidianAPI.ts";
import { ModuleObsidianEvents } from "./modules/essentialObsidian/ModuleObsidianEvents.ts";
import { type AbstractModule } from "./modules/AbstractModule.ts";
import { AbstractModule } from "./modules/AbstractModule.ts";
import { ModuleObsidianSettingDialogue } from "./modules/features/ModuleObsidianSettingTab.ts";
import { ModuleObsidianDocumentHistory } from "./modules/features/ModuleObsidianDocumentHistory.ts";
import { ModuleObsidianGlobalHistory } from "./modules/features/ModuleGlobalHistory.ts";
import { ModuleObsidianSettingsAsMarkdown } from "./modules/features/ModuleObsidianSettingAsMarkdown.ts";
import { ModuleInitializerFile } from "./modules/essential/ModuleInitializerFile.ts";
import { ModuleKeyValueDB } from "./modules/essential/ModuleKeyValueDB.ts";
import { ModulePouchDB } from "./modules/core/ModulePouchDB.ts";
import { ModuleReplicator } from "./modules/core/ModuleReplicator.ts";
import { ModuleReplicatorCouchDB } from "./modules/core/ModuleReplicatorCouchDB.ts";
import { ModuleReplicatorMinIO } from "./modules/core/ModuleReplicatorMinIO.ts";
import { ModuleTargetFilter } from "./modules/core/ModuleTargetFilter.ts";
import { ModulePeriodicProcess } from "./modules/core/ModulePeriodicProcess.ts";
import { ModuleRemoteGovernor } from "./modules/coreFeatures/ModuleRemoteGovernor.ts";
import { ModuleLocalDatabaseObsidian } from "./modules/core/ModuleLocalDatabaseObsidian.ts";
import { ModuleConflictChecker } from "./modules/coreFeatures/ModuleConflictChecker.ts";
import { ModuleResolvingMismatchedTweaks } from "./modules/coreFeatures/ModuleResolveMismatchedTweaks.ts";
import { ModuleIntegratedTest } from "./modules/extras/ModuleIntegratedTest.ts";
import { ModuleRebuilder } from "./modules/core/ModuleRebuilder.ts";
import { ModuleReplicateTest } from "./modules/extras/ModuleReplicateTest.ts";
import { ModuleLiveSyncMain } from "./modules/main/ModuleLiveSyncMain.ts";
import { ModuleExtraSyncObsidian } from "./modules/extraFeaturesObsidian/ModuleExtraSyncObsidian.ts";
import { LocalDatabaseMaintenance } from "./features/LocalDatabaseMainte/CmdLocalDatabaseMainte.ts";
import { P2PReplicator } from "./features/P2PSync/CmdP2PReplicator.ts";
import type { LiveSyncManagers } from "./lib/src/managers/LiveSyncManagers.ts";
import type { InjectableServiceHub } from "./lib/src/services/implements/injectable/InjectableServiceHub.ts";
import { ObsidianServiceHub } from "./modules/services/ObsidianServiceHub.ts";
import type { ServiceContext } from "./lib/src/services/base/ServiceBase.ts";
// import type { InjectableServiceHub } from "./lib/src/services/InjectableServices.ts";
import { ServiceRebuilder } from "@lib/serviceModules/Rebuilder.ts";
import type { IFileHandler } from "@lib/interfaces/FileHandler.ts";
import { ServiceDatabaseFileAccess } from "@/serviceModules/DatabaseFileAccess.ts";
import { ServiceFileAccessObsidian } from "@/serviceModules/ServiceFileAccessImpl.ts";
import { StorageAccessManager } from "@lib/managers/StorageProcessingManager.ts";
import { __$checkInstanceBinding } from "./lib/src/dev/checks.ts";
import { ServiceFileHandler } from "./serviceModules/FileHandler.ts";
import { FileAccessObsidian } from "./serviceModules/FileAccessObsidian.ts";
import { StorageEventManagerObsidian } from "./managers/StorageEventManagerObsidian.ts";
import { onLayoutReadyFeatures } from "./serviceFeatures/onLayoutReady.ts";
import type { ServiceModules } from "./types.ts";
export default class ObsidianLiveSyncPlugin
extends Plugin
@@ -84,16 +77,58 @@ export default class ObsidianLiveSyncPlugin
/**
* The service hub for managing all services.
*/
_services: InjectableServiceHub<ServiceContext> = new ObsidianServiceHub(this);
_services: InjectableServiceHub<ServiceContext> | undefined = undefined;
get services() {
if (!this._services) {
throw new Error("Services not initialised yet");
}
return this._services;
}
/**
* Bind functions to the service hub (for migration purpose).
*/
// bindFunctions = (this.serviceHub as ObsidianServiceHub).bindFunctions.bind(this.serviceHub);
// --> Module System
/**
* Service Modules
*/
protected _serviceModules: ServiceModules;
get serviceModules() {
return this._serviceModules;
}
/**
* addOns: Non-essential and graphically features
*/
addOns = [] as LiveSyncCommands[];
/**
* The modules of the plug-in. Modules are responsible for specific features or functionalities of the plug-in, such as file handling, conflict resolution, replication, etc.
*/
private modules = [
// Move to registerModules
] as (IObsidianModule | AbstractModule)[];
/**
* register an add-onn to the plug-in.
* Add-ons are features that are not essential to the core functionality of the plugin,
* @param addOn
*/
private _registerAddOn(addOn: LiveSyncCommands) {
this.addOns.push(addOn);
this.services.appLifecycle.onUnload.addHandler(() => Promise.resolve(addOn.onunload()).then(() => true));
}
private registerAddOns() {
this._registerAddOn(new ConfigSync(this));
this._registerAddOn(new HiddenFileSync(this));
this._registerAddOn(new LocalDatabaseMaintenance(this));
this._registerAddOn(new P2PReplicator(this));
}
/**
* Get an add-on by its class name. Returns undefined if not found.
* @param cls
* @returns
*/
getAddOn<T extends LiveSyncCommands>(cls: string) {
for (const addon of this.addOns) {
if (addon.constructor.name == cls) return addon as T;
@@ -101,58 +136,12 @@ export default class ObsidianLiveSyncPlugin
return undefined;
}
// Keep order to display the dialogue in order.
addOns = [
new ConfigSync(this),
new HiddenFileSync(this),
new LocalDatabaseMaintenance(this),
new P2PReplicator(this),
] as LiveSyncCommands[];
modules = [
new ModuleLiveSyncMain(this),
new ModuleExtraSyncObsidian(this, this),
// Only on Obsidian
new ModuleDatabaseFileAccess(this),
// Common
new ModulePouchDB(this),
new ModuleConflictChecker(this),
new ModuleLocalDatabaseObsidian(this),
new ModuleReplicatorMinIO(this),
new ModuleReplicatorCouchDB(this),
new ModuleReplicator(this),
new ModuleFileHandler(this),
new ModuleConflictResolver(this),
new ModuleRemoteGovernor(this),
new ModuleTargetFilter(this),
new ModulePeriodicProcess(this),
// Obsidian modules
new ModuleKeyValueDB(this),
new ModuleInitializerFile(this),
new ModuleObsidianAPI(this, this),
new ModuleObsidianEvents(this, this),
new ModuleFileAccessObsidian(this, this),
new ModuleObsidianSettings(this, this),
new ModuleResolvingMismatchedTweaks(this),
new ModuleObsidianSettingsAsMarkdown(this, this),
new ModuleObsidianSettingDialogue(this, this),
new ModuleLog(this, this),
new ModuleObsidianMenu(this, this),
new ModuleRebuilder(this),
new ModuleSetupObsidian(this, this),
new ModuleObsidianDocumentHistory(this, this),
new ModuleMigration(this),
new ModuleRedFlag(this),
new ModuleInteractiveConflictResolver(this, this),
new ModuleObsidianGlobalHistory(this, this),
new ModuleCheckRemoteSize(this, this),
// Test and Dev Modules
new ModuleDev(this, this),
new ModuleReplicateTest(this, this),
new ModuleIntegratedTest(this, this),
new SetupManager(this, this),
] as (IObsidianModule | AbstractModule)[];
/**
* Get a module by its class. Throws an error if not found.
* Mostly used for getting SetupManager.
* @param constructor
* @returns
*/
getModule<T extends IObsidianModule>(constructor: new (...args: any[]) => T): T {
for (const module of this.modules) {
if (module.constructor === constructor) return module as T;
@@ -160,61 +149,301 @@ export default class ObsidianLiveSyncPlugin
throw new Error(`Module ${constructor} not found or not loaded.`);
}
settings!: ObsidianLiveSyncSettings;
localDatabase!: LiveSyncLocalDB;
managers!: LiveSyncManagers;
simpleStore!: SimpleStore<CheckPointInfo>;
replicator!: LiveSyncAbstractReplicator;
/**
* Register a module to the plug-in.
* @param module The module to register.
*/
private _registerModule(module: IObsidianModule) {
this.modules.push(module);
}
private registerModules() {
this._registerModule(new ModuleLiveSyncMain(this));
this._registerModule(new ModuleConflictChecker(this));
this._registerModule(new ModuleReplicatorMinIO(this));
this._registerModule(new ModuleReplicatorCouchDB(this));
this._registerModule(new ModuleReplicator(this));
this._registerModule(new ModuleConflictResolver(this));
this._registerModule(new ModuleTargetFilter(this));
this._registerModule(new ModulePeriodicProcess(this));
this._registerModule(new ModuleInitializerFile(this));
this._registerModule(new ModuleObsidianAPI(this, this));
this._registerModule(new ModuleObsidianEvents(this, this));
this._registerModule(new ModuleResolvingMismatchedTweaks(this));
this._registerModule(new ModuleObsidianSettingsAsMarkdown(this));
this._registerModule(new ModuleObsidianSettingDialogue(this, this));
this._registerModule(new ModuleLog(this, this));
this._registerModule(new ModuleObsidianMenu(this));
this._registerModule(new ModuleSetupObsidian(this));
this._registerModule(new ModuleObsidianDocumentHistory(this, this));
this._registerModule(new ModuleMigration(this));
this._registerModule(new ModuleRedFlag(this));
this._registerModule(new ModuleInteractiveConflictResolver(this, this));
this._registerModule(new ModuleObsidianGlobalHistory(this, this));
this._registerModule(new ModuleCheckRemoteSize(this));
// Test and Dev Modules
this._registerModule(new ModuleDev(this, this));
this._registerModule(new ModuleReplicateTest(this, this));
this._registerModule(new ModuleIntegratedTest(this, this));
this._registerModule(new SetupManager(this));
}
/**
* Bind module functions to services.
*/
private bindModuleFunctions() {
for (const module of this.modules) {
if (module instanceof AbstractModule) {
module.onBindFunction(this, this.services);
__$checkInstanceBinding(module); // Check if all functions are properly bound, and log warnings if not.
} else {
this.services.API.addLog(
`Module ${module.constructor.name} does not have onBindFunction, skipping binding.`,
LOG_LEVEL_INFO
);
}
}
}
/**
* @obsolete Use services.UI.confirm instead. The confirm function to show a confirmation dialog to the user.
*/
get confirm(): Confirm {
return this.services.UI.confirm;
}
storageAccess!: StorageAccess;
databaseFileAccess!: DatabaseFileAccess;
fileHandler!: ModuleFileHandler;
rebuilder!: Rebuilder;
kvDB!: KeyValueDatabase;
getDatabase(): PouchDB.Database<EntryDoc> {
return this.localDatabase.localDatabase;
/**
* @obsolete Use services.setting.currentSettings instead. The current settings of the plug-in.
*/
get settings() {
return this.services.setting.settings;
}
/**
* @obsolete Use services.setting.settings instead. Set the settings of the plug-in.
*/
set settings(value: ObsidianLiveSyncSettings) {
this.services.setting.settings = value;
}
/**
* @obsolete Use services.setting.currentSettings instead. Get the settings of the plug-in.
* @returns The current settings of the plug-in.
*/
getSettings(): ObsidianLiveSyncSettings {
return this.settings;
}
requestCount = reactiveSource(0);
responseCount = reactiveSource(0);
totalQueued = reactiveSource(0);
batched = reactiveSource(0);
processing = reactiveSource(0);
databaseQueueCount = reactiveSource(0);
storageApplyingCount = reactiveSource(0);
replicationResultCount = reactiveSource(0);
conflictProcessQueueCount = reactiveSource(0);
pendingFileEventCount = reactiveSource(0);
processingFileEventCount = reactiveSource(0);
_totalProcessingCount?: ReactiveValue<number>;
replicationStat = reactiveSource({
sent: 0,
arrived: 0,
maxPullSeq: 0,
maxPushSeq: 0,
lastSyncPullSeq: 0,
lastSyncPushSeq: 0,
syncStatus: "CLOSED" as DatabaseConnectingStatus,
});
onload() {
void this.services.appLifecycle.onLoad();
/**
* @obsolete Use services.database.localDatabase instead. The local database instance.
*/
get localDatabase() {
return this.services.database.localDatabase;
}
/**
* @obsolete Use services.database.managers instead. The database managers, including entry manager, revision manager, etc.
*/
get managers() {
return this.services.database.managers;
}
/**
* @obsolete Use services.database.localDatabase instead. Get the PouchDB database instance. Note that this is not the same as the local database instance, which is a wrapper around the PouchDB database.
* @returns The PouchDB database instance.
*/
getDatabase(): PouchDB.Database<EntryDoc> {
return this.localDatabase.localDatabase;
}
/**
* @obsolete Use services.keyValueDB.simpleStore instead. A simple key-value store for storing non-file data, such as checkpoints, sync status, etc.
*/
get simpleStore() {
return this.services.keyValueDB.simpleStore as SimpleStore<CheckPointInfo>;
}
/**
* @obsolete Use services.replication.getActiveReplicator instead. Get the active replicator instance. Note that there can be multiple replicators, but only one can be active at a time.
*/
get replicator() {
return this.services.replicator.getActiveReplicator()!;
}
/**
* @obsolete Use services.keyValueDB.kvDB instead. Get the key-value database instance. This is used for storing large data that cannot be stored in the simple store, such as file metadata, etc.
*/
get kvDB() {
return this.services.keyValueDB.kvDB;
}
/// Modules which were relied on services
/**
* Storage Accessor for handling file operations.
* @obsolete Use serviceModules.storageAccess instead.
*/
get storageAccess(): StorageAccess {
return this.serviceModules.storageAccess;
}
/**
* Database File Accessor for handling file operations related to the database, such as exporting the database, importing from a file, etc.
* @obsolete Use serviceModules.databaseFileAccess instead.
*/
get databaseFileAccess(): DatabaseFileAccess {
return this.serviceModules.databaseFileAccess;
}
/**
* File Handler for handling file operations related to replication, such as resolving conflicts, applying changes from replication, etc.
* @obsolete Use serviceModules.fileHandler instead.
*/
get fileHandler(): IFileHandler {
return this.serviceModules.fileHandler;
}
/**
* Rebuilder for handling database rebuilding operations.
* @obsolete Use serviceModules.rebuilder instead.
*/
get rebuilder(): Rebuilder {
return this.serviceModules.rebuilder;
}
// requestCount = reactiveSource(0);
// responseCount = reactiveSource(0);
// totalQueued = reactiveSource(0);
// batched = reactiveSource(0);
// processing = reactiveSource(0);
// databaseQueueCount = reactiveSource(0);
// storageApplyingCount = reactiveSource(0);
// replicationResultCount = reactiveSource(0);
// pendingFileEventCount = reactiveSource(0);
// processingFileEventCount = reactiveSource(0);
// _totalProcessingCount?: ReactiveValue<number>;
// replicationStat = reactiveSource({
// sent: 0,
// arrived: 0,
// maxPullSeq: 0,
// maxPushSeq: 0,
// lastSyncPullSeq: 0,
// lastSyncPushSeq: 0,
// syncStatus: "CLOSED" as DatabaseConnectingStatus,
// });
private initialiseServices() {
this._services = new ObsidianServiceHub(this);
}
/**
* Initialise service modules.
*/
private initialiseServiceModules() {
const storageAccessManager = new StorageAccessManager();
// If we want to implement to the other platform, implement ObsidianXXXXXService.
const vaultAccess = new FileAccessObsidian(this.app, {
storageAccessManager: storageAccessManager,
vaultService: this.services.vault,
settingService: this.services.setting,
APIService: this.services.API,
});
const storageEventManager = new StorageEventManagerObsidian(this, this, {
fileProcessing: this.services.fileProcessing,
setting: this.services.setting,
vaultService: this.services.vault,
storageAccessManager: storageAccessManager,
APIService: this.services.API,
});
const storageAccess = new ServiceFileAccessObsidian({
API: this.services.API,
setting: this.services.setting,
fileProcessing: this.services.fileProcessing,
vault: this.services.vault,
appLifecycle: this.services.appLifecycle,
storageEventManager: storageEventManager,
storageAccessManager: storageAccessManager,
vaultAccess: vaultAccess,
});
const databaseFileAccess = new ServiceDatabaseFileAccess({
API: this.services.API,
database: this.services.database,
path: this.services.path,
storageAccess: storageAccess,
vault: this.services.vault,
});
const fileHandler = new ServiceFileHandler({
API: this.services.API,
databaseFileAccess: databaseFileAccess,
conflict: this.services.conflict,
setting: this.services.setting,
fileProcessing: this.services.fileProcessing,
vault: this.services.vault,
path: this.services.path,
replication: this.services.replication,
storageAccess: storageAccess,
});
const rebuilder = new ServiceRebuilder({
API: this.services.API,
database: this.services.database,
appLifecycle: this.services.appLifecycle,
setting: this.services.setting,
remote: this.services.remote,
databaseEvents: this.services.databaseEvents,
replication: this.services.replication,
replicator: this.services.replicator,
UI: this.services.UI,
vault: this.services.vault,
fileHandler: fileHandler,
storageAccess: storageAccess,
control: this.services.control,
});
return {
rebuilder,
fileHandler,
databaseFileAccess,
storageAccess,
};
}
/**
* @obsolete Use services.setting.saveSettingData instead. Save the settings to the disk. This is usually called after changing the settings in the code, to persist the changes.
*/
async saveSettings() {
await this.services.setting.saveSettingData();
}
onunload() {
return void this.services.appLifecycle.onAppUnload();
/**
* Initialise ServiceFeatures.
* (Please refer `serviceFeatures` for more details)
*/
initialiseServiceFeatures() {
for (const feature of onLayoutReadyFeatures) {
const curriedFeature = () => feature(this);
this.services.appLifecycle.onLayoutReady.addHandler(curriedFeature);
}
}
constructor(app: App, manifest: PluginManifest) {
super(app, manifest);
this.initialiseServices();
this.registerModules();
this.registerAddOns();
this._serviceModules = this.initialiseServiceModules();
this.initialiseServiceFeatures();
this.bindModuleFunctions();
}
private async _startUp() {
if (!(await this.services.control.onLoad())) return;
const onReady = this.services.control.onReady.bind(this.services.control);
this.app.workspace.onLayoutReady(onReady);
}
override onload() {
void this._startUp();
}
override onunload() {
return void this.services.control.onUnload();
}
// <-- Plug-in's overrideable functions
}
// For now,

View File

@@ -0,0 +1,210 @@
import type { FileEventItem } from "@/common/types";
import { HiddenFileSync } from "@/features/HiddenFileSync/CmdHiddenFileSync";
import type { FilePath, UXFileInfoStub, UXFolderInfo, UXInternalFileInfoStub } from "@lib/common/types";
import type { FileEvent } from "@lib/interfaces/StorageEventManager";
import { TFile, type TAbstractFile, TFolder } from "@/deps";
import { LOG_LEVEL_DEBUG } from "octagonal-wheels/common/logger";
import type ObsidianLiveSyncPlugin from "@/main";
import type { LiveSyncCore } from "@/main";
import {
StorageEventManagerBase,
type FileEventItemSentinel,
type StorageEventManagerBaseDependencies,
} from "@lib/managers/StorageEventManager";
import { InternalFileToUXFileInfoStub, TFileToUXFileInfoStub } from "@/modules/coreObsidian/storageLib/utilObsidian";
export class StorageEventManagerObsidian extends StorageEventManagerBase {
plugin: ObsidianLiveSyncPlugin;
core: LiveSyncCore;
// Necessary evil.
cmdHiddenFileSync: HiddenFileSync;
override isFile(file: UXFileInfoStub | UXInternalFileInfoStub | UXFolderInfo | TFile): boolean {
if (file instanceof TFile) {
return true;
}
if (super.isFile(file)) {
return true;
}
return !file.isFolder;
}
override isFolder(file: UXFileInfoStub | UXInternalFileInfoStub | UXFolderInfo | TFolder): boolean {
if (file instanceof TFolder) {
return true;
}
if (super.isFolder(file)) {
return true;
}
return !!file.isFolder;
}
constructor(plugin: ObsidianLiveSyncPlugin, core: LiveSyncCore, dependencies: StorageEventManagerBaseDependencies) {
super(dependencies);
this.plugin = plugin;
this.core = core;
this.cmdHiddenFileSync = this.plugin.getAddOn(HiddenFileSync.name) as HiddenFileSync;
}
async beginWatch() {
await this.snapShotRestored;
const plugin = this.plugin;
this.watchVaultChange = this.watchVaultChange.bind(this);
this.watchVaultCreate = this.watchVaultCreate.bind(this);
this.watchVaultDelete = this.watchVaultDelete.bind(this);
this.watchVaultRename = this.watchVaultRename.bind(this);
this.watchVaultRawEvents = this.watchVaultRawEvents.bind(this);
this.watchEditorChange = this.watchEditorChange.bind(this);
plugin.registerEvent(plugin.app.vault.on("modify", this.watchVaultChange));
plugin.registerEvent(plugin.app.vault.on("delete", this.watchVaultDelete));
plugin.registerEvent(plugin.app.vault.on("rename", this.watchVaultRename));
plugin.registerEvent(plugin.app.vault.on("create", this.watchVaultCreate));
//@ts-ignore : Internal API
plugin.registerEvent(plugin.app.vault.on("raw", this.watchVaultRawEvents));
plugin.registerEvent(plugin.app.workspace.on("editor-change", this.watchEditorChange));
}
watchEditorChange(editor: any, info: any) {
if (!("path" in info)) {
return;
}
if (!this.shouldBatchSave) {
return;
}
const file = info?.file as TFile;
if (!file) return;
if (this.storageAccess.isFileProcessing(file.path as FilePath)) {
// this._log(`Editor change skipped because the file is being processed: ${file.path}`, LOG_LEVEL_VERBOSE);
return;
}
if (!this.isWaiting(file.path as FilePath)) {
return;
}
const data = info?.data as string;
const fi: FileEvent = {
type: "CHANGED",
file: TFileToUXFileInfoStub(file),
cachedData: data,
};
void this.appendQueue([fi]);
}
watchVaultCreate(file: TAbstractFile, ctx?: any) {
if (file instanceof TFolder) return;
if (this.storageAccess.isFileProcessing(file.path as FilePath)) {
// this._log(`File create skipped because the file is being processed: ${file.path}`, LOG_LEVEL_VERBOSE);
return;
}
const fileInfo = TFileToUXFileInfoStub(file);
void this.appendQueue([{ type: "CREATE", file: fileInfo }], ctx);
}
watchVaultChange(file: TAbstractFile, ctx?: any) {
if (file instanceof TFolder) return;
if (this.storageAccess.isFileProcessing(file.path as FilePath)) {
// this._log(`File change skipped because the file is being processed: ${file.path}`, LOG_LEVEL_VERBOSE);
return;
}
const fileInfo = TFileToUXFileInfoStub(file);
void this.appendQueue([{ type: "CHANGED", file: fileInfo }], ctx);
}
watchVaultDelete(file: TAbstractFile, ctx?: any) {
if (file instanceof TFolder) return;
if (this.storageAccess.isFileProcessing(file.path as FilePath)) {
// this._log(`File delete skipped because the file is being processed: ${file.path}`, LOG_LEVEL_VERBOSE);
return;
}
const fileInfo = TFileToUXFileInfoStub(file, true);
void this.appendQueue([{ type: "DELETE", file: fileInfo }], ctx);
}
watchVaultRename(file: TAbstractFile, oldFile: string, ctx?: any) {
// vault Rename will not be raised for self-events (Self-hosted LiveSync will not handle 'rename').
if (file instanceof TFile) {
const fileInfo = TFileToUXFileInfoStub(file);
void this.appendQueue(
[
{
type: "DELETE",
file: {
path: oldFile as FilePath,
name: file.name,
stat: {
mtime: file.stat.mtime,
ctime: file.stat.ctime,
size: file.stat.size,
type: "file",
},
deleted: true,
},
skipBatchWait: true,
},
{ type: "CREATE", file: fileInfo, skipBatchWait: true },
],
ctx
);
}
}
// Watch raw events (Internal API)
watchVaultRawEvents(path: FilePath) {
if (this.storageAccess.isFileProcessing(path)) {
// this._log(`Raw file event skipped because the file is being processed: ${path}`, LOG_LEVEL_VERBOSE);
return;
}
// Only for internal files.
if (!this.settings) return;
// if (this.plugin.settings.useIgnoreFiles && this.plugin.ignoreFiles.some(e => path.endsWith(e.trim()))) {
if (this.settings.useIgnoreFiles) {
// If it is one of ignore files, refresh the cached one.
// (Calling$$isTargetFile will refresh the cache)
void this.vaultService.isTargetFile(path).then(() => this._watchVaultRawEvents(path));
} else {
void this._watchVaultRawEvents(path);
}
}
async _watchVaultRawEvents(path: FilePath) {
if (!this.settings.syncInternalFiles && !this.settings.usePluginSync) return;
if (!this.settings.watchInternalFileChanges) return;
if (!path.startsWith(this.plugin.app.vault.configDir)) return;
if (path.endsWith("/")) {
// Folder
return;
}
const isTargetFile = await this.cmdHiddenFileSync.isTargetFile(path);
if (!isTargetFile) return;
void this.appendQueue(
[
{
type: "INTERNAL",
file: InternalFileToUXFileInfoStub(path),
skipBatchWait: true, // Internal files should be processed immediately.
},
],
null
);
}
async _saveSnapshot(snapshot: (FileEventItem | FileEventItemSentinel)[]) {
await this.core.kvDB.set("storage-event-manager-snapshot", snapshot);
this._log(`Storage operation snapshot saved: ${snapshot.length} items`, LOG_LEVEL_DEBUG);
}
async _loadSnapshot() {
const snapShot = await this.core.kvDB.get<(FileEventItem | FileEventItemSentinel)[]>(
"storage-event-manager-snapshot"
);
return snapShot;
}
updateStatus() {
const allFileEventItems = this.bufferedQueuedItems.filter((e): e is FileEventItem => "args" in e);
const allItems = allFileEventItems.filter((e) => !e.cancelled);
const totalItems = allItems.length + this.concurrentProcessing.waiting;
const processing = this.processingCount;
const batchedCount = this._waitingMap.size;
this.fileProcessing.batched.value = batchedCount;
this.fileProcessing.processing.value = processing;
this.fileProcessing.totalQueued.value = totalItems + batchedCount + processing;
}
}

View File

@@ -1,16 +1,22 @@
import { LOG_LEVEL_INFO, LOG_LEVEL_NOTICE, LOG_LEVEL_VERBOSE, Logger } from "octagonal-wheels/common/logger";
import type { LOG_LEVEL } from "../lib/src/common/types";
import type { LiveSyncCore } from "../main";
import { __$checkInstanceBinding } from "../lib/src/dev/checks";
import { LOG_LEVEL_NOTICE, LOG_LEVEL_VERBOSE, Logger } from "octagonal-wheels/common/logger";
import type { AnyEntry, FilePathWithPrefix } from "@lib/common/types";
import type { LiveSyncCore } from "@/main";
import { stripAllPrefixes } from "@lib/string_and_binary/path";
import { createInstanceLogFunction } from "@lib/services/lib/logUtils";
export abstract class AbstractModule {
_log = (msg: any, level: LOG_LEVEL = LOG_LEVEL_INFO, key?: string) => {
if (typeof msg === "string" && level !== LOG_LEVEL_NOTICE) {
msg = `[${this.constructor.name}]\u{200A} ${msg}`;
_log = createInstanceLogFunction(this.constructor.name, this.services.API);
get services() {
if (!this.core._services) {
throw new Error("Services are not ready yet.");
}
// console.log(msg);
Logger(msg, level, key);
};
return this.core._services;
}
addCommand = this.services.API.addCommand.bind(this.services.API);
registerView = this.services.API.registerWindow.bind(this.services.API);
addRibbonIcon = this.services.API.addRibbonIcon.bind(this.services.API);
registerObsidianProtocolHandler = this.services.API.registerProtocolHandler.bind(this.services.API);
get localDatabase() {
return this.core.localDatabase;
@@ -22,13 +28,19 @@ export abstract class AbstractModule {
this.core.settings = value;
}
getPath(entry: AnyEntry): FilePathWithPrefix {
return this.services.path.getPath(entry);
}
getPathWithoutPrefix(entry: AnyEntry): FilePathWithPrefix {
return stripAllPrefixes(this.services.path.getPath(entry));
}
onBindFunction(core: LiveSyncCore, services: typeof core.services) {
// Override if needed.
}
constructor(public core: LiveSyncCore) {
this.onBindFunction(core, core.services);
Logger(`[${this.constructor.name}] Loaded`, LOG_LEVEL_VERBOSE);
__$checkInstanceBinding(this);
}
saveSettings = this.core.saveSettings.bind(this.core);
@@ -59,7 +71,13 @@ export abstract class AbstractModule {
return this.testDone();
}
get services() {
return this.core._services;
isMainReady() {
return this.services.appLifecycle.isReady();
}
isMainSuspended() {
return this.services.appLifecycle.isSuspended();
}
isDatabaseReady() {
return this.services.database.isDatabaseReady();
}
}

View File

@@ -10,43 +10,17 @@ export type ModuleKeys = keyof IObsidianModule;
export type ChainableModuleProps = ChainableExecuteFunction<ObsidianLiveSyncPlugin>;
export abstract class AbstractObsidianModule extends AbstractModule {
addCommand = this.plugin.addCommand.bind(this.plugin);
registerView = this.plugin.registerView.bind(this.plugin);
addRibbonIcon = this.plugin.addRibbonIcon.bind(this.plugin);
registerObsidianProtocolHandler = this.plugin.registerObsidianProtocolHandler.bind(this.plugin);
get localDatabase() {
return this.plugin.localDatabase;
}
get settings() {
return this.plugin.settings;
}
set settings(value) {
this.plugin.settings = value;
}
get app() {
return this.plugin.app;
}
constructor(
public plugin: ObsidianLiveSyncPlugin,
public core: LiveSyncCore
core: LiveSyncCore
) {
super(core);
}
saveSettings = this.plugin.saveSettings.bind(this.plugin);
isMainReady() {
return this.services.appLifecycle.isReady();
}
isMainSuspended() {
return this.services.appLifecycle.isSuspended();
}
isDatabaseReady() {
return this.services.database.isDatabaseReady();
}
//should be overridden
isThisModuleEnabled() {
return true;

View File

@@ -1,352 +0,0 @@
import { LOG_LEVEL_VERBOSE } from "octagonal-wheels/common/logger";
import { EVENT_FILE_SAVED, eventHub } from "../../common/events";
import {
getDatabasePathFromUXFileInfo,
getStoragePathFromUXFileInfo,
isInternalMetadata,
markChangesAreSame,
} from "../../common/utils";
import type {
UXFileInfoStub,
FilePathWithPrefix,
UXFileInfo,
MetaEntry,
LoadedEntry,
FilePath,
SavingEntry,
DocumentID,
} from "../../lib/src/common/types";
import type { DatabaseFileAccess } from "../interfaces/DatabaseFileAccess";
import { isPlainText, shouldBeIgnored, stripAllPrefixes } from "../../lib/src/string_and_binary/path";
import {
createBlob,
createTextBlob,
delay,
determineTypeFromBlob,
isDocContentSame,
readContent,
} from "../../lib/src/common/utils";
import { serialized } from "octagonal-wheels/concurrency/lock";
import { AbstractModule } from "../AbstractModule.ts";
import { ICHeader } from "../../common/types.ts";
import type { LiveSyncCore } from "../../main.ts";
export class ModuleDatabaseFileAccess extends AbstractModule implements DatabaseFileAccess {
private _everyOnload(): Promise<boolean> {
this.core.databaseFileAccess = this;
return Promise.resolve(true);
}
private async _everyModuleTest(): Promise<boolean> {
if (!this.settings.enableDebugTools) return Promise.resolve(true);
const testString = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nullam nec purus nec nunc";
// Before test, we need to delete completely.
const conflicts = await this.getConflictedRevs("autoTest.md" as FilePathWithPrefix);
for (const rev of conflicts) {
await this.delete("autoTest.md" as FilePathWithPrefix, rev);
}
await this.delete("autoTest.md" as FilePathWithPrefix);
// OK, begin!
await this._test(
"storeContent",
async () => await this.storeContent("autoTest.md" as FilePathWithPrefix, testString)
);
// For test, we need to clear the caches.
this.localDatabase.clearCaches();
await this._test("readContent", async () => {
const content = await this.fetch("autoTest.md" as FilePathWithPrefix);
if (!content) return "File not found";
if (content.deleted) return "File is deleted";
return (await content.body.text()) == testString
? true
: `Content is not same ${await content.body.text()}`;
});
await this._test("delete", async () => await this.delete("autoTest.md" as FilePathWithPrefix));
await this._test("read deleted content", async () => {
const content = await this.fetch("autoTest.md" as FilePathWithPrefix);
if (!content) return true;
if (content.deleted) return true;
return `Still exist !:${await content.body.text()},${JSON.stringify(content, undefined, 2)}`;
});
await delay(100);
return this.testDone();
}
async checkIsTargetFile(file: UXFileInfoStub | FilePathWithPrefix): Promise<boolean> {
const path = getStoragePathFromUXFileInfo(file);
if (!(await this.services.vault.isTargetFile(path))) {
this._log(`File is not target: ${path}`, LOG_LEVEL_VERBOSE);
return false;
}
if (shouldBeIgnored(path)) {
this._log(`File should be ignored: ${path}`, LOG_LEVEL_VERBOSE);
return false;
}
return true;
}
async delete(file: UXFileInfoStub | FilePathWithPrefix, rev?: string): Promise<boolean> {
if (!(await this.checkIsTargetFile(file))) {
return true;
}
const fullPath = getDatabasePathFromUXFileInfo(file);
try {
this._log(`deleteDB By path:${fullPath}`);
return await this.deleteFromDBbyPath(fullPath, rev);
} catch (ex) {
this._log(`Failed to delete ${fullPath}`);
this._log(ex, LOG_LEVEL_VERBOSE);
return false;
}
}
async createChunks(file: UXFileInfo, force: boolean = false, skipCheck?: boolean): Promise<boolean> {
return await this.__store(file, force, skipCheck, true);
}
async store(file: UXFileInfo, force: boolean = false, skipCheck?: boolean): Promise<boolean> {
return await this.__store(file, force, skipCheck, false);
}
async storeContent(path: FilePathWithPrefix, content: string): Promise<boolean> {
const blob = createTextBlob(content);
const bytes = (await blob.arrayBuffer()).byteLength;
const isInternal = path.startsWith(".") ? true : undefined;
const dummyUXFileInfo: UXFileInfo = {
name: path.split("/").pop() as string,
path: path,
stat: {
size: bytes,
ctime: Date.now(),
mtime: Date.now(),
type: "file",
},
body: blob,
isInternal,
};
return await this.__store(dummyUXFileInfo, true, false, false);
}
private async __store(
file: UXFileInfo,
force: boolean = false,
skipCheck?: boolean,
onlyChunks?: boolean
): Promise<boolean> {
if (!skipCheck) {
if (!(await this.checkIsTargetFile(file))) {
return true;
}
}
if (!file) {
this._log("File seems bad", LOG_LEVEL_VERBOSE);
return false;
}
// const path = getPathFromUXFileInfo(file);
const isPlain = isPlainText(file.name);
const possiblyLarge = !isPlain;
const content = file.body;
const datatype = determineTypeFromBlob(content);
const idPrefix = file.isInternal ? ICHeader : "";
const fullPath = getStoragePathFromUXFileInfo(file);
const fullPathOnDB = getDatabasePathFromUXFileInfo(file);
if (possiblyLarge) this._log(`Processing: ${fullPath}`, LOG_LEVEL_VERBOSE);
// if (isInternalMetadata(fullPath)) {
// this._log(`Internal file: ${fullPath}`, LOG_LEVEL_VERBOSE);
// return false;
// }
if (file.isInternal) {
if (file.deleted) {
file.stat = {
size: 0,
ctime: Date.now(),
mtime: Date.now(),
type: "file",
};
} else if (file.stat == undefined) {
const stat = await this.core.storageAccess.statHidden(file.path);
if (!stat) {
// We stored actually deleted or not since here, so this is an unexpected case. we should raise an error.
this._log(`Internal file not found: ${fullPath}`, LOG_LEVEL_VERBOSE);
return false;
}
file.stat = stat;
}
}
const idMain = await this.services.path.path2id(fullPath);
const id = (idPrefix + idMain) as DocumentID;
const d: SavingEntry = {
_id: id,
path: fullPathOnDB,
data: content,
ctime: file.stat.ctime,
mtime: file.stat.mtime,
size: file.stat.size,
children: [],
datatype: datatype,
type: datatype,
eden: {},
};
//upsert should locked
const msg = `STORAGE -> DB (${datatype}) `;
const isNotChanged = await serialized("file-" + fullPath, async () => {
if (force) {
this._log(msg + "Force writing " + fullPath, LOG_LEVEL_VERBOSE);
return false;
}
// Commented out temporarily: this checks that the file was made ourself.
// if (this.core.storageAccess.recentlyTouched(file)) {
// return true;
// }
try {
const old = await this.localDatabase.getDBEntry(d.path, undefined, false, true, false);
if (old !== false) {
const oldData = { data: old.data, deleted: old._deleted || old.deleted };
const newData = { data: d.data, deleted: d._deleted || d.deleted };
if (oldData.deleted != newData.deleted) return false;
if (!(await isDocContentSame(old.data, newData.data))) return false;
this._log(
msg + "Skipped (not changed) " + fullPath + (d._deleted || d.deleted ? " (deleted)" : ""),
LOG_LEVEL_VERBOSE
);
markChangesAreSame(old, d.mtime, old.mtime);
return true;
// d._rev = old._rev;
}
} catch (ex) {
this._log(
msg +
"Error, Could not check the diff for the old one." +
(force ? "force writing." : "") +
fullPath +
(d._deleted || d.deleted ? " (deleted)" : ""),
LOG_LEVEL_VERBOSE
);
this._log(ex, LOG_LEVEL_VERBOSE);
return !force;
}
return false;
});
if (isNotChanged) {
this._log(msg + " Skip " + fullPath, LOG_LEVEL_VERBOSE);
return true;
}
const ret = await this.localDatabase.putDBEntry(d, onlyChunks);
if (ret !== false) {
this._log(msg + fullPath);
eventHub.emitEvent(EVENT_FILE_SAVED);
}
return ret != false;
}
async getConflictedRevs(file: UXFileInfoStub | FilePathWithPrefix): Promise<string[]> {
if (!(await this.checkIsTargetFile(file))) {
return [];
}
const filename = getDatabasePathFromUXFileInfo(file);
const doc = await this.localDatabase.getDBEntryMeta(filename, { conflicts: true }, true);
if (doc === false) {
return [];
}
return doc._conflicts || [];
}
async fetch(
file: UXFileInfoStub | FilePathWithPrefix,
rev?: string,
waitForReady?: boolean,
skipCheck = false
): Promise<UXFileInfo | false> {
if (skipCheck && !(await this.checkIsTargetFile(file))) {
return false;
}
const entry = await this.fetchEntry(file, rev, waitForReady, true);
if (entry === false) {
return false;
}
const data = createBlob(readContent(entry));
const path = stripAllPrefixes(entry.path);
const fileInfo: UXFileInfo = {
name: path.split("/").pop() as string,
path: path,
stat: {
size: entry.size,
ctime: entry.ctime,
mtime: entry.mtime,
type: "file",
},
body: data,
deleted: entry.deleted || entry._deleted,
};
if (isInternalMetadata(entry.path)) {
fileInfo.isInternal = true;
}
return fileInfo;
}
async fetchEntryMeta(
file: UXFileInfoStub | FilePathWithPrefix,
rev?: string,
skipCheck = false
): Promise<MetaEntry | false> {
const dbFileName = getDatabasePathFromUXFileInfo(file);
if (skipCheck && !(await this.checkIsTargetFile(file))) {
return false;
}
const doc = await this.localDatabase.getDBEntryMeta(dbFileName, rev ? { rev: rev } : undefined, true);
if (doc === false) {
return false;
}
return doc as MetaEntry;
}
async fetchEntryFromMeta(
meta: MetaEntry,
waitForReady: boolean = true,
skipCheck = false
): Promise<LoadedEntry | false> {
if (skipCheck && !(await this.checkIsTargetFile(meta.path))) {
return false;
}
const doc = await this.localDatabase.getDBEntryFromMeta(meta as LoadedEntry, false, waitForReady);
if (doc === false) {
return false;
}
return doc;
}
async fetchEntry(
file: UXFileInfoStub | FilePathWithPrefix,
rev?: string,
waitForReady: boolean = true,
skipCheck = false
): Promise<LoadedEntry | false> {
if (skipCheck && !(await this.checkIsTargetFile(file))) {
return false;
}
const entry = await this.fetchEntryMeta(file, rev, true);
if (entry === false) {
return false;
}
const doc = await this.fetchEntryFromMeta(entry, waitForReady, true);
return doc;
}
async deleteFromDBbyPath(fullPath: FilePath | FilePathWithPrefix, rev?: string): Promise<boolean> {
if (!(await this.checkIsTargetFile(fullPath))) {
this._log(`storeFromStorage: File is not target: ${fullPath}`);
return true;
}
const opt = rev ? { rev: rev } : undefined;
const ret = await this.localDatabase.deleteDBEntry(fullPath, opt);
eventHub.emitEvent(EVENT_FILE_SAVED);
return ret;
}
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
services.appLifecycle.onLoaded.addHandler(this._everyOnload.bind(this));
services.test.test.addHandler(this._everyModuleTest.bind(this));
}
}

View File

@@ -1,443 +0,0 @@
import { LOG_LEVEL_INFO, LOG_LEVEL_NOTICE, LOG_LEVEL_VERBOSE } from "octagonal-wheels/common/logger";
import { serialized } from "octagonal-wheels/concurrency/lock";
import type { FileEventItem } from "../../common/types";
import type {
FilePath,
FilePathWithPrefix,
MetaEntry,
UXFileInfo,
UXFileInfoStub,
UXInternalFileInfoStub,
} from "../../lib/src/common/types";
import { AbstractModule } from "../AbstractModule.ts";
import {
compareFileFreshness,
EVEN,
getPath,
getPathWithoutPrefix,
getStoragePathFromUXFileInfo,
markChangesAreSame,
} from "../../common/utils";
import { getDocDataAsArray, isDocContentSame, readAsBlob, readContent } from "../../lib/src/common/utils";
import { shouldBeIgnored } from "../../lib/src/string_and_binary/path";
import { Semaphore } from "octagonal-wheels/concurrency/semaphore";
import { eventHub } from "../../common/events.ts";
import type { LiveSyncCore } from "../../main.ts";
export class ModuleFileHandler extends AbstractModule {
get db() {
return this.core.databaseFileAccess;
}
get storage() {
return this.core.storageAccess;
}
_everyOnloadStart(): Promise<boolean> {
this.core.fileHandler = this;
return Promise.resolve(true);
}
async readFileFromStub(file: UXFileInfoStub | UXFileInfo) {
if ("body" in file && file.body) {
return file;
}
const readFile = await this.storage.readStubContent(file);
if (!readFile) {
throw new Error(`File ${file.path} is not exist on the storage`);
}
return readFile;
}
async storeFileToDB(
info: UXFileInfoStub | UXFileInfo | UXInternalFileInfoStub | FilePathWithPrefix,
force: boolean = false,
onlyChunks: boolean = false
): Promise<boolean> {
const file = typeof info === "string" ? this.storage.getFileStub(info) : info;
if (file == null) {
this._log(`File ${info} is not exist on the storage`, LOG_LEVEL_VERBOSE);
return false;
}
// const file = item.args.file;
if (file.isInternal) {
this._log(
`Internal file ${file.path} is not allowed to be processed on processFileEvent`,
LOG_LEVEL_VERBOSE
);
return false;
}
// First, check the file on the database
const entry = await this.db.fetchEntry(file, undefined, true, true);
if (!entry || entry.deleted || entry._deleted) {
// If the file is not exist on the database, then it should be created.
const readFile = await this.readFileFromStub(file);
if (!onlyChunks) {
return await this.db.store(readFile);
} else {
return await this.db.createChunks(readFile, false, true);
}
}
// entry is exist on the database, check the difference between the file and the entry.
let shouldApplied = false;
if (!force && !onlyChunks) {
// 1. if the time stamp is far different, then it should be updated.
// Note: This checks only the mtime with the resolution reduced to 2 seconds.
// 2 seconds it for the ZIP file's mtime. If not, we cannot backup the vault as the ZIP file.
// This is hardcoded on `compareMtime` of `src/common/utils.ts`.
if (compareFileFreshness(file, entry) !== EVEN) {
shouldApplied = true;
}
// 2. if not, the content should be checked.
let readFile: UXFileInfo | undefined = undefined;
if (!shouldApplied) {
readFile = await this.readFileFromStub(file);
if (!readFile) {
this._log(`File ${file.path} is not exist on the storage`, LOG_LEVEL_NOTICE);
return false;
}
if (await isDocContentSame(getDocDataAsArray(entry.data), readFile.body)) {
// Timestamp is different but the content is same. therefore, two timestamps should be handled as same.
// So, mark the changes are same.
markChangesAreSame(readFile, readFile.stat.mtime, entry.mtime);
} else {
shouldApplied = true;
}
}
if (!shouldApplied) {
this._log(`File ${file.path} is not changed`, LOG_LEVEL_VERBOSE);
return true;
}
if (!readFile) readFile = await this.readFileFromStub(file);
// If the file is changed, then the file should be stored.
if (onlyChunks) {
return await this.db.createChunks(readFile, false, true);
} else {
return await this.db.store(readFile, false, true);
}
} else {
// If force is true, then it should be updated.
const readFile = await this.readFileFromStub(file);
if (onlyChunks) {
return await this.db.createChunks(readFile, true, true);
} else {
return await this.db.store(readFile, true, true);
}
}
}
async deleteFileFromDB(info: UXFileInfoStub | UXInternalFileInfoStub | FilePath): Promise<boolean> {
const file = typeof info === "string" ? this.storage.getFileStub(info) : info;
if (file == null) {
this._log(`File ${info} is not exist on the storage`, LOG_LEVEL_VERBOSE);
return false;
}
// const file = item.args.file;
if (file.isInternal) {
this._log(
`Internal file ${file.path} is not allowed to be processed on processFileEvent`,
LOG_LEVEL_VERBOSE
);
return false;
}
// First, check the file on the database
const entry = await this.db.fetchEntry(file, undefined, true, true);
if (!entry || entry.deleted || entry._deleted) {
this._log(`File ${file.path} is not exist or already deleted on the database`, LOG_LEVEL_VERBOSE);
return false;
}
// Check the file is already conflicted. if so, only the conflicted one should be deleted.
const conflictedRevs = await this.db.getConflictedRevs(file);
if (conflictedRevs.length > 0) {
// If conflicted, then it should be deleted. entry._rev should be own file's rev.
// TODO: I BELIEVED SO. BUT I NOTICED THAT I AN NOT SURE. I SHOULD CHECK THIS.
// ANYWAY, I SHOULD DELETE THE FILE. ACTUALLY WE SIMPLY DELETED THE FILE UNTIL PREVIOUS VERSIONS.
return await this.db.delete(file, entry._rev);
}
// Otherwise, the file should be deleted simply. This is the previous behaviour.
return await this.db.delete(file);
}
async deleteRevisionFromDB(
info: UXFileInfoStub | FilePath | FilePathWithPrefix,
rev: string
): Promise<boolean | undefined> {
//TODO: Possibly check the conflicting.
return await this.db.delete(info, rev);
}
async resolveConflictedByDeletingRevision(
info: UXFileInfoStub | FilePath,
rev: string
): Promise<boolean | undefined> {
const path = getStoragePathFromUXFileInfo(info);
if (!(await this.deleteRevisionFromDB(info, rev))) {
this._log(`Failed to delete the conflicted revision ${rev} of ${path}`, LOG_LEVEL_VERBOSE);
return false;
}
if (!(await this.dbToStorageWithSpecificRev(info, rev, true))) {
this._log(`Failed to apply the resolved revision ${rev} of ${path} to the storage`, LOG_LEVEL_VERBOSE);
return false;
}
}
async dbToStorageWithSpecificRev(
info: UXFileInfoStub | UXFileInfo | FilePath | null,
rev: string,
force?: boolean
): Promise<boolean> {
const file = typeof info === "string" ? this.storage.getFileStub(info) : info;
if (file == null) {
this._log(`File ${info} is not exist on the storage`, LOG_LEVEL_VERBOSE);
return false;
}
const docEntry = await this.db.fetchEntryMeta(file, rev, true);
if (!docEntry) {
this._log(`File ${file.path} is not exist on the database`, LOG_LEVEL_VERBOSE);
return false;
}
return await this.dbToStorage(docEntry, file, force);
}
async dbToStorage(
entryInfo: MetaEntry | FilePathWithPrefix,
info: UXFileInfoStub | UXFileInfo | FilePath | null,
force?: boolean
): Promise<boolean> {
const file = typeof info === "string" ? this.storage.getFileStub(info) : info;
const mode = file == null ? "create" : "modify";
const pathFromEntryInfo = typeof entryInfo === "string" ? entryInfo : getPath(entryInfo);
const docEntry = await this.db.fetchEntryMeta(pathFromEntryInfo, undefined, true);
if (!docEntry) {
this._log(`File ${pathFromEntryInfo} is not exist on the database`, LOG_LEVEL_VERBOSE);
return false;
}
const path = getPath(docEntry);
// 1. Check if it already conflicted.
const revs = await this.db.getConflictedRevs(path);
if (revs.length > 0) {
// Some conflicts are exist.
if (this.settings.writeDocumentsIfConflicted) {
// If configured to write the document even if conflicted, then it should be written.
// NO OP
} else {
// If not, then it should be checked. and will be processed later (i.e., after the conflict is resolved).
await this.services.conflict.queueCheckForIfOpen(path);
return true;
}
}
// 2. Check if the file is already exist on the storage.
const existDoc = this.storage.getStub(path);
if (existDoc && existDoc.isFolder) {
this._log(`Folder ${path} is already exist on the storage as a folder`, LOG_LEVEL_VERBOSE);
// We can do nothing, and other modules should also nothing to do.
return true;
}
// Check existence of both file and docEntry.
const existOnDB = !(docEntry._deleted || docEntry.deleted || false);
const existOnStorage = existDoc != null;
if (!existOnDB && !existOnStorage) {
this._log(`File ${path} seems to be deleted, but already not on storage`, LOG_LEVEL_VERBOSE);
return true;
}
if (!existOnDB && existOnStorage) {
// Deletion has been Transferred. Storage files will be deleted.
// Note: If the folder becomes empty, the folder will be deleted if not configured to keep it.
// This behaviour is implemented on the `ModuleFileAccessObsidian`.
// And it does not care actually deleted.
await this.storage.deleteVaultItem(path);
return true;
}
// Okay, the file is exist on the database. Let's check the file is exist on the storage.
const docRead = await this.db.fetchEntryFromMeta(docEntry);
if (!docRead) {
this._log(`File ${path} is not exist on the database`, LOG_LEVEL_VERBOSE);
return false;
}
// If we want to process size mismatched files -- in case of having files created by some integrations, enable the toggle.
if (!this.settings.processSizeMismatchedFiles) {
// Check the file is not corrupted
// (Zero is a special case, may be created by some APIs and it might be acceptable).
if (docRead.size != 0 && docRead.size !== readAsBlob(docRead).size) {
this._log(
`File ${path} seems to be corrupted! Writing prevented. (${docRead.size} != ${readAsBlob(docRead).size})`,
LOG_LEVEL_NOTICE
);
return false;
}
}
const docData = readContent(docRead);
if (existOnStorage && !force) {
// The file is exist on the storage. Let's check the difference between the file and the entry.
// But, if force is true, then it should be updated.
// Ok, we have to compare.
let shouldApplied = false;
// 1. if the time stamp is far different, then it should be updated.
// Note: This checks only the mtime with the resolution reduced to 2 seconds.
// 2 seconds it for the ZIP file's mtime. If not, we cannot backup the vault as the ZIP file.
// This is hardcoded on `compareMtime` of `src/common/utils.ts`.
if (compareFileFreshness(existDoc, docEntry) !== EVEN) {
shouldApplied = true;
}
// 2. if not, the content should be checked.
if (!shouldApplied) {
const readFile = await this.readFileFromStub(existDoc);
if (await isDocContentSame(docData, readFile.body)) {
// The content is same. So, we do not need to update the file.
shouldApplied = false;
// Timestamp is different but the content is same. therefore, two timestamps should be handled as same.
// So, mark the changes are same.
markChangesAreSame(docRead, docRead.mtime, existDoc.stat.mtime);
} else {
shouldApplied = true;
}
}
if (!shouldApplied) {
this._log(`File ${docRead.path} is not changed`, LOG_LEVEL_VERBOSE);
return true;
}
// Let's apply the changes.
} else {
this._log(
`File ${docRead.path} ${existOnStorage ? "(new) " : ""} ${force ? " (forced)" : ""}`,
LOG_LEVEL_VERBOSE
);
}
await this.storage.ensureDir(path);
const ret = await this.storage.writeFileAuto(path, docData, { ctime: docRead.ctime, mtime: docRead.mtime });
await this.storage.touched(path);
this.storage.triggerFileEvent(mode, path);
return ret;
}
private async _anyHandlerProcessesFileEvent(item: FileEventItem): Promise<boolean> {
const eventItem = item.args;
const type = item.type;
const path = eventItem.file.path;
if (!(await this.services.vault.isTargetFile(path))) {
this._log(`File ${path} is not the target file`, LOG_LEVEL_VERBOSE);
return false;
}
if (shouldBeIgnored(path)) {
this._log(`File ${path} should be ignored`, LOG_LEVEL_VERBOSE);
return false;
}
const lockKey = `processFileEvent-${path}`;
return await serialized(lockKey, async () => {
switch (type) {
case "CREATE":
case "CHANGED":
return await this.storeFileToDB(item.args.file);
case "DELETE":
return await this.deleteFileFromDB(item.args.file);
case "INTERNAL":
// this should be handled on the other module.
return false;
default:
this._log(`Unsupported event type: ${type}`, LOG_LEVEL_VERBOSE);
return false;
}
});
}
async _anyProcessReplicatedDoc(entry: MetaEntry): Promise<boolean> {
return await serialized(entry.path, async () => {
if (!(await this.services.vault.isTargetFile(entry.path))) {
this._log(`File ${entry.path} is not the target file`, LOG_LEVEL_VERBOSE);
return false;
}
if (this.services.vault.isFileSizeTooLarge(entry.size)) {
this._log(`File ${entry.path} is too large (on database) to be processed`, LOG_LEVEL_VERBOSE);
return false;
}
if (shouldBeIgnored(entry.path)) {
this._log(`File ${entry.path} should be ignored`, LOG_LEVEL_VERBOSE);
return false;
}
const path = getPath(entry);
const targetFile = this.storage.getStub(getPathWithoutPrefix(entry));
if (targetFile && targetFile.isFolder) {
this._log(`${getPath(entry)} is already exist as the folder`);
// Nothing to do and other modules should also nothing to do.
return true;
} else {
if (targetFile && this.services.vault.isFileSizeTooLarge(targetFile.stat.size)) {
this._log(`File ${targetFile.path} is too large (on storage) to be processed`, LOG_LEVEL_VERBOSE);
return false;
}
this._log(
`Processing ${path} (${entry._id.substring(0, 8)} :${entry._rev?.substring(0, 5)}) : Started...`,
LOG_LEVEL_VERBOSE
);
// Before writing (or skipped ), merging dialogue should be cancelled.
eventHub.emitEvent("conflict-cancelled", path);
const ret = await this.dbToStorage(entry, targetFile);
this._log(`Processing ${path} (${entry._id.substring(0, 8)} :${entry._rev?.substring(0, 5)}) : Done`);
return ret;
}
});
}
async createAllChunks(showingNotice?: boolean): Promise<void> {
this._log("Collecting local files on the storage", LOG_LEVEL_VERBOSE);
const semaphore = Semaphore(10);
let processed = 0;
const filesStorageSrc = this.storage.getFiles();
const incProcessed = () => {
processed++;
if (processed % 25 == 0)
this._log(
`Creating missing chunks: ${processed} of ${total} files`,
showingNotice ? LOG_LEVEL_NOTICE : LOG_LEVEL_INFO,
"chunkCreation"
);
};
const total = filesStorageSrc.length;
const procAllChunks = filesStorageSrc.map(async (file) => {
if (!(await this.services.vault.isTargetFile(file))) {
incProcessed();
return true;
}
if (this.services.vault.isFileSizeTooLarge(file.stat.size)) {
incProcessed();
return true;
}
if (shouldBeIgnored(file.path)) {
incProcessed();
return true;
}
const release = await semaphore.acquire();
incProcessed();
try {
await this.storeFileToDB(file, false, true);
} catch (ex) {
this._log(ex, LOG_LEVEL_VERBOSE);
} finally {
release();
}
});
await Promise.all(procAllChunks);
this._log(
`Creating chunks Done: ${processed} of ${total} files`,
showingNotice ? LOG_LEVEL_NOTICE : LOG_LEVEL_INFO,
"chunkCreation"
);
}
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
services.appLifecycle.onInitialise.addHandler(this._everyOnloadStart.bind(this));
services.fileProcessing.processFileEvent.addHandler(this._anyHandlerProcessesFileEvent.bind(this));
services.replication.processSynchroniseResult.addHandler(this._anyProcessReplicatedDoc.bind(this));
}
}

View File

@@ -1,46 +0,0 @@
import { $msg } from "../../lib/src/common/i18n";
import { LiveSyncLocalDB } from "../../lib/src/pouchdb/LiveSyncLocalDB.ts";
import { initializeStores } from "../../common/stores.ts";
import { AbstractModule } from "../AbstractModule.ts";
import { LiveSyncManagers } from "../../lib/src/managers/LiveSyncManagers.ts";
import type { LiveSyncCore } from "../../main.ts";
export class ModuleLocalDatabaseObsidian extends AbstractModule {
_everyOnloadStart(): Promise<boolean> {
return Promise.resolve(true);
}
private async _openDatabase(): Promise<boolean> {
if (this.localDatabase != null) {
await this.localDatabase.close();
}
const vaultName = this.services.vault.getVaultName();
this._log($msg("moduleLocalDatabase.logWaitingForReady"));
const getDB = () => this.core.localDatabase.localDatabase;
const getSettings = () => this.core.settings;
this.core.managers = new LiveSyncManagers({
get database() {
return getDB();
},
getActiveReplicator: () => this.core.replicator,
id2path: this.services.path.id2path,
// path2id: this.core.$$path2id.bind(this.core),
path2id: this.services.path.path2id,
get settings() {
return getSettings();
},
});
this.core.localDatabase = new LiveSyncLocalDB(vaultName, this.core);
initializeStores(vaultName);
return await this.localDatabase.initializeDatabase();
}
_isDatabaseReady(): boolean {
return this.localDatabase != null && this.localDatabase.isReady;
}
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
services.database.isDatabaseReady.setHandler(this._isDatabaseReady.bind(this));
services.appLifecycle.onInitialise.addHandler(this._everyOnloadStart.bind(this));
services.database.openDatabase.setHandler(this._openDatabase.bind(this));
}
}

View File

@@ -31,7 +31,7 @@ export class ModulePeriodicProcess extends AbstractModule {
return this.resumePeriodic();
}
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
override onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
services.appLifecycle.onUnload.addHandler(this._allOnUnload.bind(this));
services.setting.onBeforeRealiseSetting.addHandler(this._everyBeforeRealizeSetting.bind(this));
services.setting.onSettingRealised.addHandler(this._everyAfterRealizeSetting.bind(this));

View File

@@ -1,23 +0,0 @@
import { AbstractModule } from "../AbstractModule";
import { PouchDB } from "../../lib/src/pouchdb/pouchdb-browser";
import type { LiveSyncCore } from "../../main";
import { ExtraSuffixIndexedDB } from "../../lib/src/common/types";
export class ModulePouchDB extends AbstractModule {
_createPouchDBInstance<T extends object>(
name?: string,
options?: PouchDB.Configuration.DatabaseConfiguration
): PouchDB.Database<T> {
const optionPass = options ?? {};
if (this.settings.useIndexedDBAdapter) {
optionPass.adapter = "indexeddb";
//@ts-ignore :missing def
optionPass.purged_infos_limit = 1;
return new PouchDB(name + ExtraSuffixIndexedDB, optionPass);
}
return new PouchDB(name, optionPass);
}
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
services.database.createPouchDBInstance.setHandler(this._createPouchDBInstance.bind(this));
}
}

View File

@@ -1,310 +0,0 @@
import { delay } from "octagonal-wheels/promises";
import {
DEFAULT_SETTINGS,
FLAGMD_REDFLAG2_HR,
FLAGMD_REDFLAG3_HR,
LOG_LEVEL_NOTICE,
LOG_LEVEL_VERBOSE,
REMOTE_COUCHDB,
REMOTE_MINIO,
} from "../../lib/src/common/types.ts";
import { AbstractModule } from "../AbstractModule.ts";
import type { Rebuilder } from "../interfaces/DatabaseRebuilder.ts";
import type { LiveSyncCouchDBReplicator } from "../../lib/src/replication/couchdb/LiveSyncReplicator.ts";
import { fetchAllUsedChunks } from "@/lib/src/pouchdb/chunks.ts";
import { EVENT_DATABASE_REBUILT, eventHub } from "src/common/events.ts";
import type { LiveSyncCore } from "../../main.ts";
export class ModuleRebuilder extends AbstractModule implements Rebuilder {
private _everyOnload(): Promise<boolean> {
this.core.rebuilder = this;
return Promise.resolve(true);
}
async $performRebuildDB(
method: "localOnly" | "remoteOnly" | "rebuildBothByThisDevice" | "localOnlyWithChunks"
): Promise<void> {
if (method == "localOnly") {
await this.$fetchLocal();
}
if (method == "localOnlyWithChunks") {
await this.$fetchLocal(true);
}
if (method == "remoteOnly") {
await this.$rebuildRemote();
}
if (method == "rebuildBothByThisDevice") {
await this.$rebuildEverything();
}
}
async informOptionalFeatures() {
await this.core.services.UI.showMarkdownDialog(
"All optional features are disabled",
`Customisation Sync and Hidden File Sync will all be disabled.
Please enable them from the settings screen after setup is complete.`,
["OK"]
);
}
async askUsingOptionalFeature(opt: { enableFetch?: boolean; enableOverwrite?: boolean }) {
if (
(await this.core.confirm.askYesNoDialog(
"Do you want to enable extra features? If you are new to Self-hosted LiveSync, try the core feature first!",
{ title: "Enable extra features", defaultOption: "No", timeout: 15 }
)) == "yes"
) {
await this.services.setting.suggestOptionalFeatures(opt);
}
}
async rebuildRemote() {
await this.services.setting.suspendExtraSync();
this.core.settings.isConfigured = true;
this.core.settings.notifyThresholdOfRemoteStorageSize = DEFAULT_SETTINGS.notifyThresholdOfRemoteStorageSize;
await this.services.setting.realiseSetting();
await this.services.remote.markLocked();
await this.services.remote.tryResetDatabase();
await this.services.remote.markLocked();
await delay(500);
// await this.askUsingOptionalFeature({ enableOverwrite: true });
await delay(1000);
await this.services.remote.replicateAllToRemote(true);
await delay(1000);
await this.services.remote.replicateAllToRemote(true, true);
await this.informOptionalFeatures();
}
$rebuildRemote(): Promise<void> {
return this.rebuildRemote();
}
async rebuildEverything() {
await this.services.setting.suspendExtraSync();
// await this.askUseNewAdapter();
this.core.settings.isConfigured = true;
this.core.settings.notifyThresholdOfRemoteStorageSize = DEFAULT_SETTINGS.notifyThresholdOfRemoteStorageSize;
await this.services.setting.realiseSetting();
await this.resetLocalDatabase();
await delay(1000);
await this.services.databaseEvents.initialiseDatabase(true, true, true);
await this.services.remote.markLocked();
await this.services.remote.tryResetDatabase();
await this.services.remote.markLocked();
await delay(500);
// We do not have any other devices' data, so we do not need to ask for overwriting.
// await this.askUsingOptionalFeature({ enableOverwrite: false });
await delay(1000);
await this.services.remote.replicateAllToRemote(true);
await delay(1000);
await this.services.remote.replicateAllToRemote(true, true);
await this.informOptionalFeatures();
}
$rebuildEverything(): Promise<void> {
return this.rebuildEverything();
}
$fetchLocal(makeLocalChunkBeforeSync?: boolean, preventMakeLocalFilesBeforeSync?: boolean): Promise<void> {
return this.fetchLocal(makeLocalChunkBeforeSync, preventMakeLocalFilesBeforeSync);
}
async scheduleRebuild(): Promise<void> {
try {
await this.core.storageAccess.writeFileAuto(FLAGMD_REDFLAG2_HR, "");
} catch (ex) {
this._log("Could not create red_flag_rebuild.md", LOG_LEVEL_NOTICE);
this._log(ex, LOG_LEVEL_VERBOSE);
}
this.services.appLifecycle.performRestart();
}
async scheduleFetch(): Promise<void> {
try {
await this.core.storageAccess.writeFileAuto(FLAGMD_REDFLAG3_HR, "");
} catch (ex) {
this._log("Could not create red_flag_fetch.md", LOG_LEVEL_NOTICE);
this._log(ex, LOG_LEVEL_VERBOSE);
}
this.services.appLifecycle.performRestart();
}
private async _tryResetRemoteDatabase(): Promise<void> {
await this.core.replicator.tryResetRemoteDatabase(this.settings);
}
private async _tryCreateRemoteDatabase(): Promise<void> {
await this.core.replicator.tryCreateRemoteDatabase(this.settings);
}
private async _resetLocalDatabase(): Promise<boolean> {
this.core.storageAccess.clearTouched();
return await this.localDatabase.resetDatabase();
}
async suspendAllSync() {
this.core.settings.liveSync = false;
this.core.settings.periodicReplication = false;
this.core.settings.syncOnSave = false;
this.core.settings.syncOnEditorSave = false;
this.core.settings.syncOnStart = false;
this.core.settings.syncOnFileOpen = false;
this.core.settings.syncAfterMerge = false;
await this.services.setting.suspendExtraSync();
}
async suspendReflectingDatabase() {
if (this.core.settings.doNotSuspendOnFetching) return;
if (this.core.settings.remoteType == REMOTE_MINIO) return;
this._log(
`Suspending reflection: Database and storage changes will not be reflected in each other until completely finished the fetching.`,
LOG_LEVEL_NOTICE
);
this.core.settings.suspendParseReplicationResult = true;
this.core.settings.suspendFileWatching = true;
await this.core.saveSettings();
}
async resumeReflectingDatabase() {
if (this.core.settings.doNotSuspendOnFetching) return;
if (this.core.settings.remoteType == REMOTE_MINIO) return;
this._log(`Database and storage reflection has been resumed!`, LOG_LEVEL_NOTICE);
this.core.settings.suspendParseReplicationResult = false;
this.core.settings.suspendFileWatching = false;
await this.services.vault.scanVault(true);
await this.services.replication.onBeforeReplicate(false); //TODO: Check actual need of this.
await this.core.saveSettings();
}
// No longer needed, both adapters have each advantages and disadvantages.
// async askUseNewAdapter() {
// if (!this.core.settings.useIndexedDBAdapter) {
// const message = `Now this core has been configured to use the old database adapter for keeping compatibility. Do you want to deactivate it?`;
// const CHOICE_YES = "Yes, disable and use latest";
// const CHOICE_NO = "No, keep compatibility";
// const choices = [CHOICE_YES, CHOICE_NO];
//
// const ret = await this.core.confirm.confirmWithMessage(
// "Database adapter",
// message,
// choices,
// CHOICE_YES,
// 10
// );
// if (ret == CHOICE_YES) {
// this.core.settings.useIndexedDBAdapter = true;
// }
// }
// }
async fetchLocal(makeLocalChunkBeforeSync?: boolean, preventMakeLocalFilesBeforeSync?: boolean) {
await this.services.setting.suspendExtraSync();
// await this.askUseNewAdapter();
this.core.settings.isConfigured = true;
this.core.settings.notifyThresholdOfRemoteStorageSize = DEFAULT_SETTINGS.notifyThresholdOfRemoteStorageSize;
if (this.core.settings.maxMTimeForReflectEvents > 0) {
const date = new Date(this.core.settings.maxMTimeForReflectEvents);
const ask = `Your settings restrict file reflection times to no later than ${date}.
**This is a recovery configuration.**
This operation should only be performed on an empty vault.
Are you sure you wish to proceed?`;
const PROCEED = "I understand, proceed";
const CANCEL = "Cancel operation";
const CLEARANDPROCEED = "Clear restriction and proceed";
const choices = [PROCEED, CLEARANDPROCEED, CANCEL] as const;
const ret = await this.core.confirm.askSelectStringDialogue(ask, choices, {
title: "Confirm restricted fetch",
defaultAction: CANCEL,
timeout: 0,
});
if (ret == CLEARANDPROCEED) {
this.core.settings.maxMTimeForReflectEvents = 0;
await this.core.saveSettings();
}
if (ret == CANCEL) {
return;
}
}
await this.suspendReflectingDatabase();
await this.services.setting.realiseSetting();
await this.resetLocalDatabase();
await delay(1000);
await this.services.database.openDatabase();
// this.core.isReady = true;
this.services.appLifecycle.markIsReady();
if (makeLocalChunkBeforeSync) {
await this.core.fileHandler.createAllChunks(true);
} else if (!preventMakeLocalFilesBeforeSync) {
await this.services.databaseEvents.initialiseDatabase(true, true, true);
} else {
// Do not create local file entries before sync (Means use remote information)
}
await this.services.remote.markResolved();
await delay(500);
await this.services.remote.replicateAllFromRemote(true);
await delay(1000);
await this.services.remote.replicateAllFromRemote(true);
await this.resumeReflectingDatabase();
await this.informOptionalFeatures();
// No longer enable
// await this.askUsingOptionalFeature({ enableFetch: true });
}
async fetchLocalWithRebuild() {
return await this.fetchLocal(true);
}
private async _allSuspendAllSync(): Promise<boolean> {
await this.suspendAllSync();
return true;
}
async resetLocalDatabase() {
if (this.core.settings.isConfigured && this.core.settings.additionalSuffixOfDatabaseName == "") {
// Discard the non-suffixed database
await this.services.database.resetDatabase();
}
const suffix = this.services.API.getAppID() || "";
this.core.settings.additionalSuffixOfDatabaseName = suffix;
await this.services.database.resetDatabase();
eventHub.emitEvent(EVENT_DATABASE_REBUILT);
}
async fetchRemoteChunks() {
if (
!this.core.settings.doNotSuspendOnFetching &&
!this.core.settings.useOnlyLocalChunk &&
this.core.settings.remoteType == REMOTE_COUCHDB
) {
this._log(`Fetching chunks`, LOG_LEVEL_NOTICE);
const replicator = this.services.replicator.getActiveReplicator() as LiveSyncCouchDBReplicator;
const remoteDB = await replicator.connectRemoteCouchDBWithSetting(
this.settings,
this.services.API.isMobile(),
true
);
if (typeof remoteDB == "string") {
this._log(remoteDB, LOG_LEVEL_NOTICE);
} else {
await fetchAllUsedChunks(this.localDatabase.localDatabase, remoteDB.db);
}
this._log(`Fetching chunks done`, LOG_LEVEL_NOTICE);
}
}
async resolveAllConflictedFilesByNewerOnes() {
this._log(`Resolving conflicts by newer ones`, LOG_LEVEL_NOTICE);
const files = this.core.storageAccess.getFileNames();
let i = 0;
for (const file of files) {
if (i++ % 10)
this._log(
`Check and Processing ${i} / ${files.length}`,
LOG_LEVEL_NOTICE,
"resolveAllConflictedFilesByNewerOnes"
);
await this.services.conflict.resolveByNewest(file);
}
this._log(`Done!`, LOG_LEVEL_NOTICE, "resolveAllConflictedFilesByNewerOnes");
}
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
services.appLifecycle.onLoaded.addHandler(this._everyOnload.bind(this));
services.database.resetDatabase.setHandler(this._resetLocalDatabase.bind(this));
services.remote.tryResetDatabase.setHandler(this._tryResetRemoteDatabase.bind(this));
services.remote.tryCreateDatabase.setHandler(this._tryCreateRemoteDatabase.bind(this));
services.setting.suspendAllSync.addHandler(this._allSuspendAllSync.bind(this));
}
}

View File

@@ -1,48 +1,69 @@
import { fireAndForget, yieldMicrotask } from "octagonal-wheels/promises";
import type { LiveSyncLocalDB } from "../../lib/src/pouchdb/LiveSyncLocalDB";
import { fireAndForget } from "octagonal-wheels/promises";
import { AbstractModule } from "../AbstractModule";
import {
Logger,
LOG_LEVEL_NOTICE,
LOG_LEVEL_INFO,
LOG_LEVEL_VERBOSE,
LEVEL_NOTICE,
LEVEL_INFO,
type LOG_LEVEL,
} from "octagonal-wheels/common/logger";
import { isLockAcquired, shareRunningResult, skipIfDuplicated } from "octagonal-wheels/concurrency/lock";
import { balanceChunkPurgedDBs } from "@/lib/src/pouchdb/chunks";
import { purgeUnreferencedChunks } from "@/lib/src/pouchdb/chunks";
import { Logger, LOG_LEVEL_NOTICE, LOG_LEVEL_INFO } from "octagonal-wheels/common/logger";
import { skipIfDuplicated } from "octagonal-wheels/concurrency/lock";
import { balanceChunkPurgedDBs } from "@lib/pouchdb/chunks";
import { purgeUnreferencedChunks } from "@lib/pouchdb/chunks";
import { LiveSyncCouchDBReplicator } from "../../lib/src/replication/couchdb/LiveSyncReplicator";
import { type EntryDoc, type RemoteType } from "../../lib/src/common/types";
import { rateLimitedSharedExecution, scheduleTask, updatePreviousExecutionTime } from "../../common/utils";
import { EVENT_FILE_SAVED, EVENT_ON_UNRESOLVED_ERROR, EVENT_SETTING_SAVED, eventHub } from "../../common/events";
import type { LiveSyncAbstractReplicator } from "../../lib/src/replication/LiveSyncAbstractReplicator";
import { scheduleTask } from "../../common/utils";
import { EVENT_FILE_SAVED, EVENT_SETTING_SAVED, eventHub } from "../../common/events";
import { $msg } from "../../lib/src/common/i18n";
import { clearHandlers } from "../../lib/src/replication/SyncParamsHandler";
import type { LiveSyncCore } from "../../main";
import { ReplicateResultProcessor } from "./ReplicateResultProcessor";
import { UnresolvedErrorManager } from "@lib/services/base/UnresolvedErrorManager";
import { clearHandlers } from "@lib/replication/SyncParamsHandler";
import type { NecessaryServices } from "@/serviceFeatures/types";
const KEY_REPLICATION_ON_EVENT = "replicationOnEvent";
const REPLICATION_ON_EVENT_FORECASTED_TIME = 5000;
function isOnlineAndCanReplicate(
errorManager: UnresolvedErrorManager,
host: NecessaryServices<"database", any>,
showMessage: boolean
): Promise<boolean> {
const errorMessage = "Network is offline";
const manager = host.services.database.managers.networkManager;
if (!manager.isOnline) {
errorManager.showError(errorMessage, showMessage ? LOG_LEVEL_NOTICE : LOG_LEVEL_INFO);
return Promise.resolve(false);
}
errorManager.clearError(errorMessage);
return Promise.resolve(true);
}
async function canReplicateWithPBKDF2(
errorManager: UnresolvedErrorManager,
host: NecessaryServices<"replicator" | "setting", any>,
showMessage: boolean
): Promise<boolean> {
const currentSettings = host.services.setting.currentSettings();
// TODO: check using PBKDF2 salt?
const errorMessage = $msg("Replicator.Message.InitialiseFatalError");
const replicator = host.services.replicator.getActiveReplicator();
if (!replicator) {
errorManager.showError(errorMessage, showMessage ? LOG_LEVEL_NOTICE : LOG_LEVEL_INFO);
return false;
}
errorManager.clearError(errorMessage);
const ensureMessage = "Failed to initialise the encryption key, preventing replication.";
const ensureResult = await replicator.ensurePBKDF2Salt(currentSettings, showMessage, true);
if (!ensureResult) {
errorManager.showError(ensureMessage, showMessage ? LOG_LEVEL_NOTICE : LOG_LEVEL_INFO);
return false;
}
errorManager.clearError(ensureMessage);
return ensureResult; // is true.
}
export class ModuleReplicator extends AbstractModule {
_replicatorType?: RemoteType;
_previousErrors = new Set<string>();
processor: ReplicateResultProcessor = new ReplicateResultProcessor(this);
showError(msg: string, max_log_level: LOG_LEVEL = LEVEL_NOTICE) {
const level = this._previousErrors.has(msg) ? LEVEL_INFO : max_log_level;
this._log(msg, level);
if (!this._previousErrors.has(msg)) {
this._previousErrors.add(msg);
eventHub.emitEvent(EVENT_ON_UNRESOLVED_ERROR);
}
}
processor: ReplicateResultProcessor = new ReplicateResultProcessor(this);
private _unresolvedErrorManager: UnresolvedErrorManager = new UnresolvedErrorManager(
this.core.services.appLifecycle
);
clearErrors() {
this._previousErrors.clear();
eventHub.emitEvent(EVENT_ON_UNRESOLVED_ERROR);
this._unresolvedErrorManager.clearErrors();
}
private _everyOnloadAfterLoadSettings(): Promise<boolean> {
@@ -52,9 +73,6 @@ export class ModuleReplicator extends AbstractModule {
}
});
eventHub.onEvent(EVENT_SETTING_SAVED, (setting) => {
if (this._replicatorType !== setting.remoteType) {
void this.setReplicator();
}
if (this.core.settings.suspendParseReplicationResult) {
this.processor.suspend();
} else {
@@ -65,74 +83,23 @@ export class ModuleReplicator extends AbstractModule {
return Promise.resolve(true);
}
async setReplicator() {
const replicator = await this.services.replicator.getNewReplicator();
if (!replicator) {
this.showError($msg("Replicator.Message.InitialiseFatalError"), LOG_LEVEL_NOTICE);
return false;
}
if (this.core.replicator) {
await this.core.replicator.closeReplication();
this._log("Replicator closed for changing", LOG_LEVEL_VERBOSE);
}
this.core.replicator = replicator;
this._replicatorType = this.settings.remoteType;
await yieldMicrotask();
// Clear any existing sync parameter handlers (means clearing key-deriving salt).
_onReplicatorInitialised(): Promise<boolean> {
// For now, we only need to clear the error related to replicator initialisation, but in the future, if there are more things to do when the replicator is initialised, we can add them here.
clearHandlers();
return true;
return Promise.resolve(true);
}
_getReplicator(): LiveSyncAbstractReplicator {
return this.core.replicator;
}
_everyOnInitializeDatabase(db: LiveSyncLocalDB): Promise<boolean> {
return this.setReplicator();
}
_everyOnDatabaseInitialized(showNotice: boolean): Promise<boolean> {
fireAndForget(() => this.processor.restoreFromSnapshotOnce());
return Promise.resolve(true);
}
_everyOnResetDatabase(db: LiveSyncLocalDB): Promise<boolean> {
return this.setReplicator();
}
async ensureReplicatorPBKDF2Salt(showMessage: boolean = false): Promise<boolean> {
// Checking salt
const replicator = this.services.replicator.getActiveReplicator();
if (!replicator) {
this.showError($msg("Replicator.Message.InitialiseFatalError"), LOG_LEVEL_NOTICE);
return false;
}
return await replicator.ensurePBKDF2Salt(this.settings, showMessage, true);
}
async _everyBeforeReplicate(showMessage: boolean): Promise<boolean> {
// Checking salt
if (!this.core.managers.networkManager.isOnline) {
this.showError("Network is offline", showMessage ? LOG_LEVEL_NOTICE : LOG_LEVEL_INFO);
return false;
}
// Showing message is false: that because be shown here. (And it is a fatal error, no way to hide it).
if (!(await this.ensureReplicatorPBKDF2Salt(false))) {
this.showError("Failed to initialise the encryption key, preventing replication.");
return false;
}
await this.processor.restoreFromSnapshotOnce();
this.clearErrors();
return true;
}
private async _replicate(showMessage: boolean = false): Promise<boolean | void> {
try {
updatePreviousExecutionTime(KEY_REPLICATION_ON_EVENT, REPLICATION_ON_EVENT_FORECASTED_TIME);
return await this.$$_replicate(showMessage);
} finally {
updatePreviousExecutionTime(KEY_REPLICATION_ON_EVENT);
}
}
/**
* obsolete method. No longer maintained and will be removed in the future.
* @deprecated v0.24.17
@@ -192,156 +159,129 @@ Even if you choose to clean up, you will see this option again if you exit Obsid
});
}
async _canReplicate(showMessage: boolean = false): Promise<boolean> {
if (!this.services.appLifecycle.isReady()) {
Logger(`Not ready`);
private async onReplicationFailed(showMessage: boolean = false): Promise<boolean> {
const activeReplicator = this.services.replicator.getActiveReplicator();
if (!activeReplicator) {
Logger(`No active replicator found`, LOG_LEVEL_INFO);
return false;
}
if (isLockAcquired("cleanup")) {
Logger($msg("Replicator.Message.Cleaned"), LOG_LEVEL_NOTICE);
return false;
}
if (this.settings.versionUpFlash != "") {
Logger($msg("Replicator.Message.VersionUpFlash"), LOG_LEVEL_NOTICE);
return false;
}
if (!(await this.services.fileProcessing.commitPendingFileEvents())) {
this.showError($msg("Replicator.Message.Pending"), LOG_LEVEL_NOTICE);
return false;
}
if (!this.core.managers.networkManager.isOnline) {
this.showError("Network is offline", showMessage ? LOG_LEVEL_NOTICE : LOG_LEVEL_INFO);
return false;
}
if (!(await this.services.replication.onBeforeReplicate(showMessage))) {
this.showError($msg("Replicator.Message.SomeModuleFailed"), LOG_LEVEL_NOTICE);
return false;
}
this.clearErrors();
return true;
}
async $$_replicate(showMessage: boolean = false): Promise<boolean | void> {
const checkBeforeReplicate = await this.services.replication.isReplicationReady(showMessage);
if (!checkBeforeReplicate) return false;
//<-- Here could be an module.
const ret = await this.core.replicator.openReplication(this.settings, false, showMessage, false);
if (!ret) {
if (this.core.replicator.tweakSettingsMismatched && this.core.replicator.preferredTweakValue) {
await this.services.tweakValue.askResolvingMismatched(this.core.replicator.preferredTweakValue);
} else {
if (this.core.replicator?.remoteLockedAndDeviceNotAccepted) {
if (this.core.replicator.remoteCleaned && this.settings.useIndexedDBAdapter) {
await this.cleaned(showMessage);
} else {
const message = $msg("Replicator.Dialogue.Locked.Message");
const CHOICE_FETCH = $msg("Replicator.Dialogue.Locked.Action.Fetch");
const CHOICE_DISMISS = $msg("Replicator.Dialogue.Locked.Action.Dismiss");
const CHOICE_UNLOCK = $msg("Replicator.Dialogue.Locked.Action.Unlock");
const ret = await this.core.confirm.askSelectStringDialogue(
message,
[CHOICE_FETCH, CHOICE_UNLOCK, CHOICE_DISMISS],
{
title: $msg("Replicator.Dialogue.Locked.Title"),
defaultAction: CHOICE_DISMISS,
timeout: 60,
}
);
if (ret == CHOICE_FETCH) {
this._log($msg("Replicator.Dialogue.Locked.Message.Fetch"), LOG_LEVEL_NOTICE);
await this.core.rebuilder.scheduleFetch();
this.services.appLifecycle.scheduleRestart();
return;
} else if (ret == CHOICE_UNLOCK) {
await this.core.replicator.markRemoteResolved(this.settings);
this._log($msg("Replicator.Dialogue.Locked.Message.Unlocked"), LOG_LEVEL_NOTICE);
return;
if (activeReplicator.tweakSettingsMismatched && activeReplicator.preferredTweakValue) {
await this.services.tweakValue.askResolvingMismatched(activeReplicator.preferredTweakValue);
} else {
if (activeReplicator.remoteLockedAndDeviceNotAccepted) {
if (activeReplicator.remoteCleaned && this.settings.useIndexedDBAdapter) {
await this.cleaned(showMessage);
} else {
const message = $msg("Replicator.Dialogue.Locked.Message");
const CHOICE_FETCH = $msg("Replicator.Dialogue.Locked.Action.Fetch");
const CHOICE_DISMISS = $msg("Replicator.Dialogue.Locked.Action.Dismiss");
const CHOICE_UNLOCK = $msg("Replicator.Dialogue.Locked.Action.Unlock");
const ret = await this.core.confirm.askSelectStringDialogue(
message,
[CHOICE_FETCH, CHOICE_UNLOCK, CHOICE_DISMISS],
{
title: $msg("Replicator.Dialogue.Locked.Title"),
defaultAction: CHOICE_DISMISS,
timeout: 60,
}
);
if (ret == CHOICE_FETCH) {
this._log($msg("Replicator.Dialogue.Locked.Message.Fetch"), LOG_LEVEL_NOTICE);
await this.core.rebuilder.scheduleFetch();
this.services.appLifecycle.scheduleRestart();
return false;
} else if (ret == CHOICE_UNLOCK) {
await activeReplicator.markRemoteResolved(this.settings);
this._log($msg("Replicator.Dialogue.Locked.Message.Unlocked"), LOG_LEVEL_NOTICE);
return false;
}
}
}
}
return ret;
// TODO: Check again and true/false return. This will be the result for performReplication.
return false;
}
private async _replicateByEvent(): Promise<boolean | void> {
const least = this.settings.syncMinimumInterval;
if (least > 0) {
return rateLimitedSharedExecution(KEY_REPLICATION_ON_EVENT, least, async () => {
return await this.services.replication.replicate();
});
}
return await shareRunningResult(`replication`, () => this.services.replication.replicate());
}
// private async _replicateByEvent(): Promise<boolean | void> {
// const least = this.settings.syncMinimumInterval;
// if (least > 0) {
// return rateLimitedSharedExecution(KEY_REPLICATION_ON_EVENT, least, async () => {
// return await this.services.replication.replicate();
// });
// }
// return await shareRunningResult(`replication`, () => this.services.replication.replicate());
// }
_parseReplicationResult(docs: Array<PouchDB.Core.ExistingDocument<EntryDoc>>): void {
_parseReplicationResult(docs: Array<PouchDB.Core.ExistingDocument<EntryDoc>>): Promise<boolean> {
this.processor.enqueueAll(docs);
}
_everyBeforeSuspendProcess(): Promise<boolean> {
this.core.replicator?.closeReplication();
return Promise.resolve(true);
}
private async _replicateAllToServer(
showingNotice: boolean = false,
sendChunksInBulkDisabled: boolean = false
): Promise<boolean> {
if (!this.services.appLifecycle.isReady()) return false;
if (!(await this.services.replication.onBeforeReplicate(showingNotice))) {
Logger($msg("Replicator.Message.SomeModuleFailed"), LOG_LEVEL_NOTICE);
return false;
}
if (!sendChunksInBulkDisabled) {
if (this.core.replicator instanceof LiveSyncCouchDBReplicator) {
if (
(await this.core.confirm.askYesNoDialog("Do you want to send all chunks before replication?", {
defaultOption: "No",
timeout: 20,
})) == "yes"
) {
await this.core.replicator.sendChunks(this.core.settings, undefined, true, 0);
}
}
}
const ret = await this.core.replicator.replicateAllToServer(this.settings, showingNotice);
if (ret) return true;
const checkResult = await this.services.replication.checkConnectionFailure();
if (checkResult == "CHECKAGAIN") return await this.services.remote.replicateAllToRemote(showingNotice);
return !checkResult;
}
async _replicateAllFromServer(showingNotice: boolean = false): Promise<boolean> {
if (!this.services.appLifecycle.isReady()) return false;
const ret = await this.core.replicator.replicateAllFromServer(this.settings, showingNotice);
if (ret) return true;
const checkResult = await this.services.replication.checkConnectionFailure();
if (checkResult == "CHECKAGAIN") return await this.services.remote.replicateAllFromRemote(showingNotice);
return !checkResult;
}
// _everyBeforeSuspendProcess(): Promise<boolean> {
// this.core.replicator?.closeReplication();
// return Promise.resolve(true);
// }
private _reportUnresolvedMessages(): Promise<string[]> {
return Promise.resolve([...this._previousErrors]);
}
// private async _replicateAllToServer(
// showingNotice: boolean = false,
// sendChunksInBulkDisabled: boolean = false
// ): Promise<boolean> {
// if (!this.services.appLifecycle.isReady()) return false;
// if (!(await this.services.replication.onBeforeReplicate(showingNotice))) {
// Logger($msg("Replicator.Message.SomeModuleFailed"), LOG_LEVEL_NOTICE);
// return false;
// }
// if (!sendChunksInBulkDisabled) {
// if (this.core.replicator instanceof LiveSyncCouchDBReplicator) {
// if (
// (await this.core.confirm.askYesNoDialog("Do you want to send all chunks before replication?", {
// defaultOption: "No",
// timeout: 20,
// })) == "yes"
// ) {
// await this.core.replicator.sendChunks(this.core.settings, undefined, true, 0);
// }
// }
// }
// const ret = await this.core.replicator.replicateAllToServer(this.settings, showingNotice);
// if (ret) return true;
// const checkResult = await this.services.replication.checkConnectionFailure();
// if (checkResult == "CHECKAGAIN") return await this.services.remote.replicateAllToRemote(showingNotice);
// return !checkResult;
// }
// async _replicateAllFromServer(showingNotice: boolean = false): Promise<boolean> {
// if (!this.services.appLifecycle.isReady()) return false;
// const ret = await this.core.replicator.replicateAllFromServer(this.settings, showingNotice);
// if (ret) return true;
// const checkResult = await this.services.replication.checkConnectionFailure();
// if (checkResult == "CHECKAGAIN") return await this.services.remote.replicateAllFromRemote(showingNotice);
// return !checkResult;
// }
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
services.replicator.getActiveReplicator.setHandler(this._getReplicator.bind(this));
services.databaseEvents.onDatabaseInitialisation.addHandler(this._everyOnInitializeDatabase.bind(this));
override onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
services.replicator.onReplicatorInitialised.addHandler(this._onReplicatorInitialised.bind(this));
services.databaseEvents.onDatabaseInitialised.addHandler(this._everyOnDatabaseInitialized.bind(this));
services.databaseEvents.onResetDatabase.addHandler(this._everyOnResetDatabase.bind(this));
services.appLifecycle.onSettingLoaded.addHandler(this._everyOnloadAfterLoadSettings.bind(this));
services.replication.parseSynchroniseResult.setHandler(this._parseReplicationResult.bind(this));
services.appLifecycle.onSuspending.addHandler(this._everyBeforeSuspendProcess.bind(this));
services.replication.onBeforeReplicate.addHandler(this._everyBeforeReplicate.bind(this));
services.replication.isReplicationReady.setHandler(this._canReplicate.bind(this));
services.replication.replicate.setHandler(this._replicate.bind(this));
services.replication.replicateByEvent.setHandler(this._replicateByEvent.bind(this));
services.remote.replicateAllToRemote.setHandler(this._replicateAllToServer.bind(this));
services.remote.replicateAllFromRemote.setHandler(this._replicateAllFromServer.bind(this));
services.appLifecycle.getUnresolvedMessages.addHandler(this._reportUnresolvedMessages.bind(this));
services.replication.parseSynchroniseResult.addHandler(this._parseReplicationResult.bind(this));
// --> These handlers can be separated.
const isOnlineAndCanReplicateWithHost = isOnlineAndCanReplicate.bind(null, this._unresolvedErrorManager, {
services: {
database: services.database,
},
serviceModules: {},
});
const canReplicateWithPBKDF2WithHost = canReplicateWithPBKDF2.bind(null, this._unresolvedErrorManager, {
services: {
replicator: services.replicator,
setting: services.setting,
},
serviceModules: {},
});
services.replication.onBeforeReplicate.addHandler(isOnlineAndCanReplicateWithHost, 10);
services.replication.onBeforeReplicate.addHandler(canReplicateWithPBKDF2WithHost, 20);
// <-- End of handlers that can be separated.
services.replication.onBeforeReplicate.addHandler(this._everyBeforeReplicate.bind(this), 100);
services.replication.onReplicationFailed.addHandler(this.onReplicationFailed.bind(this));
}
}

View File

@@ -35,7 +35,7 @@ export class ModuleReplicatorCouchDB extends AbstractModule {
return Promise.resolve(true);
}
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
override onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
services.replicator.getNewReplicator.addHandler(this._anyNewReplicator.bind(this));
services.appLifecycle.onResumed.addHandler(this._everyAfterResumeProcess.bind(this));
}

View File

@@ -12,7 +12,7 @@ export class ModuleReplicatorMinIO extends AbstractModule {
}
return Promise.resolve(false);
}
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
override onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
services.replicator.getNewReplicator.addHandler(this._anyNewReplicator.bind(this));
}
}

View File

@@ -27,7 +27,7 @@ export class ModuleReplicatorP2P extends AbstractModule {
return Promise.resolve(true);
}
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
override onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
services.replicator.getNewReplicator.addHandler(this._anyNewReplicator.bind(this));
services.appLifecycle.onResumed.addHandler(this._everyAfterResumeProcess.bind(this));
}

View File

@@ -1,185 +1,155 @@
import { LRUCache } from "octagonal-wheels/memory/LRUCache";
import {
getStoragePathFromUXFileInfo,
id2path,
isInternalMetadata,
path2id,
stripInternalMetadataPrefix,
useMemo,
} from "../../common/utils";
import {
LOG_LEVEL_VERBOSE,
type DocumentID,
type EntryHasPath,
type FilePath,
type FilePathWithPrefix,
type ObsidianLiveSyncSettings,
type UXFileInfoStub,
} from "../../lib/src/common/types";
import { addPrefix, isAcceptedAll } from "../../lib/src/string_and_binary/path";
import { getStoragePathFromUXFileInfo } from "../../common/utils";
import { LOG_LEVEL_DEBUG, LOG_LEVEL_VERBOSE, type UXFileInfoStub } from "../../lib/src/common/types";
import { isAcceptedAll } from "../../lib/src/string_and_binary/path";
import { AbstractModule } from "../AbstractModule";
import { EVENT_REQUEST_RELOAD_SETTING_TAB, EVENT_SETTING_SAVED, eventHub } from "../../common/events";
import { isDirty } from "../../lib/src/common/utils";
import type { LiveSyncCore } from "../../main";
import { Computed } from "octagonal-wheels/dataobject/Computed";
export class ModuleTargetFilter extends AbstractModule {
reloadIgnoreFiles() {
ignoreFiles: string[] = [];
private refreshSettings() {
this.ignoreFiles = this.settings.ignoreFiles.split(",").map((e) => e.trim());
}
private _everyOnload(): Promise<boolean> {
this.reloadIgnoreFiles();
eventHub.onEvent(EVENT_SETTING_SAVED, (evt: ObsidianLiveSyncSettings) => {
this.reloadIgnoreFiles();
});
eventHub.onEvent(EVENT_REQUEST_RELOAD_SETTING_TAB, () => {
this.reloadIgnoreFiles();
});
return Promise.resolve(true);
}
_id2path(id: DocumentID, entry?: EntryHasPath, stripPrefix?: boolean): FilePathWithPrefix {
const tempId = id2path(id, entry);
if (stripPrefix && isInternalMetadata(tempId)) {
const out = stripInternalMetadataPrefix(tempId);
return out;
}
return tempId;
}
async _path2id(filename: FilePathWithPrefix | FilePath, prefix?: string): Promise<DocumentID> {
const destPath = addPrefix(filename, prefix ?? "");
return await path2id(
destPath,
this.settings.usePathObfuscation ? this.settings.passphrase : "",
!this.settings.handleFilenameCaseSensitive
);
}
private _isFileSizeExceeded(size: number) {
if (this.settings.syncMaxSizeInMB > 0 && size > 0) {
if (this.settings.syncMaxSizeInMB * 1024 * 1024 < size) {
return true;
}
}
return false;
private _everyOnload(): Promise<boolean> {
void this.refreshSettings();
return Promise.resolve(true);
}
_markFileListPossiblyChanged(): void {
this.totalFileEventCount++;
}
totalFileEventCount = 0;
get fileListPossiblyChanged() {
if (isDirty("totalFileEventCount", this.totalFileEventCount)) {
return true;
}
return false;
}
private async _isTargetFile(file: string | UXFileInfoStub, keepFileCheckList = false) {
const fileCount = useMemo<Record<string, number>>(
{
key: "fileCount", // forceUpdate: !keepFileCheckList,
},
(ctx, prev) => {
if (keepFileCheckList && prev) return prev;
if (!keepFileCheckList && prev && !this.fileListPossiblyChanged) {
return prev;
fileCountMap = new Computed({
evaluation: (fileEventCount: number) => {
const vaultFiles = this.core.storageAccess.getFileNames().sort();
const fileCountMap: Record<string, number> = {};
for (const file of vaultFiles) {
const lc = file.toLowerCase();
if (!fileCountMap[lc]) {
fileCountMap[lc] = 1;
} else {
fileCountMap[lc]++;
}
const fileList = (ctx.get("fileList") ?? []) as FilePathWithPrefix[];
// const fileNameList = (ctx.get("fileNameList") ?? []) as FilePath[];
// const fileNames =
const vaultFiles = this.core.storageAccess.getFileNames().sort();
if (prev && vaultFiles.length == fileList.length) {
const fl3 = new Set([...fileList, ...vaultFiles]);
if (fileList.length == fl3.size && vaultFiles.length == fl3.size) {
return prev;
}
}
ctx.set("fileList", vaultFiles);
const fileCount: Record<string, number> = {};
for (const file of vaultFiles) {
const lc = file.toLowerCase();
if (!fileCount[lc]) {
fileCount[lc] = 1;
} else {
fileCount[lc]++;
}
}
return fileCount;
}
);
return fileCountMap;
},
requiresUpdate: (args, previousArgs, previousResult) => {
if (!previousResult) return true;
if (previousResult instanceof Error) return true;
if (!previousArgs) return true;
if (args[0] === previousArgs[0]) {
return false;
}
return true;
},
});
totalFileEventCount = 0;
private async _isTargetAcceptedByFileNameDuplication(file: string | UXFileInfoStub) {
await this.fileCountMap.updateValue(this.totalFileEventCount);
const fileCountMap = this.fileCountMap.value;
if (!fileCountMap) {
this._log("File count map is not ready yet.");
return false;
}
const filepath = getStoragePathFromUXFileInfo(file);
const lc = filepath.toLowerCase();
if (this.services.setting.shouldCheckCaseInsensitively()) {
if (lc in fileCount && fileCount[lc] > 1) {
if (this.services.vault.shouldCheckCaseInsensitively()) {
if (lc in fileCountMap && fileCountMap[lc] > 1) {
this._log("File is duplicated (case-insensitive): " + filepath);
return false;
}
}
const fileNameLC = getStoragePathFromUXFileInfo(file).split("/").pop()?.toLowerCase();
if (this.settings.useIgnoreFiles) {
if (this.ignoreFiles.some((e) => e.toLowerCase() == fileNameLC)) {
// We must reload ignore files due to the its change.
await this.readIgnoreFile(filepath);
}
if (await this.services.vault.isIgnoredByIgnoreFile(file)) {
return false;
}
}
if (!this.localDatabase?.isTargetFile(filepath)) return false;
this._log("File is not duplicated: " + filepath, LOG_LEVEL_DEBUG);
return true;
}
ignoreFileCache = new LRUCache<string, string[] | false>(300, 250000, true);
ignoreFiles = [] as string[];
async readIgnoreFile(path: string) {
private ignoreFileCacheMap = new Map<string, string[] | undefined | false>();
private invalidateIgnoreFileCache(path: string) {
// This erases `/path/to/.ignorefile` from cache, therefore, next access will reload it.
// When detecting edited the ignore file, this method should be called.
// Do not check whether it exists in cache or not; just delete it.
const key = path.toLowerCase();
this.ignoreFileCacheMap.delete(key);
}
private async getIgnoreFile(path: string): Promise<string[] | false> {
const key = path.toLowerCase();
const cached = this.ignoreFileCacheMap.get(key);
if (cached !== undefined) {
// if cached is not undefined, cache hit (neither exists or not exists, string[] or false).
return cached;
}
try {
// this._log(`[ignore]Reading ignore file: ${path}`, LOG_LEVEL_VERBOSE);
// load the ignore file
if (!(await this.core.storageAccess.isExistsIncludeHidden(path))) {
this.ignoreFileCache.set(path, false);
// this._log(`[ignore]Ignore file not found: ${path}`, LOG_LEVEL_VERBOSE);
// file does not exist, cache as not exists
this.ignoreFileCacheMap.set(key, false);
return false;
}
const file = await this.core.storageAccess.readHiddenFileText(path);
const gitignore = file.split(/\r?\n/g);
this.ignoreFileCache.set(path, gitignore);
this._log(`[ignore]Ignore file loaded: ${path}`, LOG_LEVEL_VERBOSE);
const gitignore = file
.split(/\r?\n/g)
.map((e) => e.replace(/\r$/, ""))
.map((e) => e.trim());
this.ignoreFileCacheMap.set(key, gitignore);
this._log(`[ignore] Ignore file loaded: ${path}`, LOG_LEVEL_VERBOSE);
return gitignore;
} catch (ex) {
this._log(`[ignore]Failed to read ignore file ${path}`);
// Failed to read the ignore file, delete cache.
this._log(`[ignore] Failed to read ignore file ${path}`);
this._log(ex, LOG_LEVEL_VERBOSE);
this.ignoreFileCache.set(path, false);
this.ignoreFileCacheMap.set(key, undefined);
return false;
}
}
async getIgnoreFile(path: string) {
if (this.ignoreFileCache.has(path)) {
return this.ignoreFileCache.get(path) ?? false;
} else {
return await this.readIgnoreFile(path);
}
}
private async _isIgnoredByIgnoreFiles(file: string | UXFileInfoStub): Promise<boolean> {
if (!this.settings.useIgnoreFiles) {
return false;
}
const filepath = getStoragePathFromUXFileInfo(file);
if (this.ignoreFileCache.has(filepath)) {
// Renew
await this.readIgnoreFile(filepath);
}
if (!(await isAcceptedAll(filepath, this.ignoreFiles, (filename) => this.getIgnoreFile(filename)))) {
return true;
}
return false;
}
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
private async _isTargetAcceptedByLocalDB(file: string | UXFileInfoStub) {
const filepath = getStoragePathFromUXFileInfo(file);
if (!this.localDatabase?.isTargetFile(filepath)) {
this._log("File is not target by local DB: " + filepath);
return false;
}
this._log("File is target by local DB: " + filepath, LOG_LEVEL_DEBUG);
return await Promise.resolve(true);
}
private async _isTargetAcceptedFinally(file: string | UXFileInfoStub) {
this._log("File is target finally: " + getStoragePathFromUXFileInfo(file), LOG_LEVEL_DEBUG);
return await Promise.resolve(true);
}
private async _isTargetAcceptedByIgnoreFiles(file: string | UXFileInfoStub): Promise<boolean> {
if (!this.settings.useIgnoreFiles) {
return true;
}
const filepath = getStoragePathFromUXFileInfo(file);
this.invalidateIgnoreFileCache(filepath);
this._log("Checking ignore files for: " + filepath, LOG_LEVEL_DEBUG);
if (!(await isAcceptedAll(filepath, this.ignoreFiles, (filename) => this.getIgnoreFile(filename)))) {
this._log("File is ignored by ignore files: " + filepath);
return false;
}
this._log("File is not ignored by ignore files: " + filepath, LOG_LEVEL_DEBUG);
return true;
}
private async _isTargetIgnoredByIgnoreFiles(file: string | UXFileInfoStub) {
const result = await this._isTargetAcceptedByIgnoreFiles(file);
return !result;
}
override onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
services.vault.markFileListPossiblyChanged.setHandler(this._markFileListPossiblyChanged.bind(this));
services.path.id2path.setHandler(this._id2path.bind(this));
services.path.path2id.setHandler(this._path2id.bind(this));
services.appLifecycle.onLoaded.addHandler(this._everyOnload.bind(this));
services.vault.isFileSizeTooLarge.setHandler(this._isFileSizeExceeded.bind(this));
services.vault.isIgnoredByIgnoreFile.setHandler(this._isIgnoredByIgnoreFiles.bind(this));
services.vault.isTargetFile.setHandler(this._isTargetFile.bind(this));
services.vault.isIgnoredByIgnoreFile.setHandler(this._isTargetIgnoredByIgnoreFiles.bind(this));
services.vault.isTargetFile.addHandler(this._isTargetAcceptedByFileNameDuplication.bind(this), 10);
services.vault.isTargetFile.addHandler(this._isTargetAcceptedByIgnoreFiles.bind(this), 20);
services.vault.isTargetFile.addHandler(this._isTargetAcceptedByLocalDB.bind(this), 30);
services.vault.isTargetFile.addHandler(this._isTargetAcceptedFinally.bind(this), 100);
services.setting.onSettingRealised.addHandler(this.refreshSettings.bind(this));
}
}

View File

@@ -6,9 +6,9 @@ import {
type EntryLeaf,
type LoadedEntry,
type MetaEntry,
} from "@/lib/src/common/types";
} from "@lib/common/types";
import type { ModuleReplicator } from "./ModuleReplicator";
import { getPath, isChunk, isValidPath } from "@/common/utils";
import { isChunk, isValidPath } from "@/common/utils";
import type { LiveSyncCore } from "@/main";
import {
LOG_LEVEL_DEBUG,
@@ -17,8 +17,8 @@ import {
LOG_LEVEL_VERBOSE,
Logger,
type LOG_LEVEL,
} from "@/lib/src/common/logger";
import { fireAndForget, isAnyNote, throttle } from "@/lib/src/common/utils";
} from "@lib/common/logger";
import { fireAndForget, isAnyNote, throttle } from "@lib/common/utils";
import { Semaphore } from "octagonal-wheels/concurrency/semaphore_v2";
import { serialized } from "octagonal-wheels/concurrency/lock";
import type { ReactiveSource } from "octagonal-wheels/dataobject/reactive_v2";
@@ -58,6 +58,10 @@ export class ReplicateResultProcessor {
return this.replicator.core;
}
getPath(entry: AnyEntry): string {
return this.services.path.getPath(entry);
}
public suspend() {
this._suspended = true;
}
@@ -158,7 +162,8 @@ export class ReplicateResultProcessor {
* Report the current status.
*/
protected reportStatus() {
this.core.replicationResultCount.value = this._queuedChanges.length + this._processingChanges.length;
this.services.replication.replicationResultCount.value =
this._queuedChanges.length + this._processingChanges.length;
}
/**
@@ -230,7 +235,7 @@ export class ReplicateResultProcessor {
*/
protected enqueueChange(doc: PouchDB.Core.ExistingDocument<EntryDoc>) {
const old = this._queuedChanges.find((e) => e._id == doc._id);
const path = "path" in doc ? getPath(doc) : "<unknown>";
const path = "path" in doc ? this.getPath(doc) : "<unknown>";
const docNote = `${path} (${shortenId(doc._id)}, ${shortenRev(doc._rev)})`;
if (old) {
if (old._rev == doc._rev) {
@@ -322,7 +327,7 @@ export class ReplicateResultProcessor {
const docMtime = change.mtime ?? 0;
const maxMTime = this.replicator.settings.maxMTimeForReflectEvents;
if (maxMTime > 0 && docMtime > maxMTime) {
const docPath = getPath(change);
const docPath = this.getPath(change);
this.log(
`Processing ${docPath} has been skipped due to modification time (${new Date(
docMtime * 1000
@@ -336,7 +341,7 @@ export class ReplicateResultProcessor {
if (await this.services.replication.processVirtualDocument(change)) return;
// If the document is version info, check compatibility and return.
if (isAnyNote(change)) {
const docPath = getPath(change);
const docPath = this.getPath(change);
if (!(await this.services.vault.isTargetFile(docPath))) {
this.log(`Skipped: ${docPath}`, LOG_LEVEL_VERBOSE);
return;
@@ -377,13 +382,13 @@ export class ReplicateResultProcessor {
releaser();
}
}
}, this.replicator.core.databaseQueueCount);
}, this.services.replication.databaseQueueCount);
}
// Phase 2.1: process the document and apply to storage
// This function is serialized per document to avoid race-condition for the same document.
private _applyToDatabase(doc_: PouchDB.Core.ExistingDocument<AnyEntry>) {
const dbDoc = doc_ as LoadedEntry; // It has no `data`
const path = getPath(dbDoc);
const path = this.getPath(dbDoc);
return serialized(`replication-process:${dbDoc._id}`, async () => {
const docNote = `${path} (${shortenId(dbDoc._id)}, ${shortenRev(dbDoc._rev)})`;
const isRequired = await this.checkIsChangeRequiredForDatabaseProcessing(dbDoc);
@@ -409,7 +414,7 @@ export class ReplicateResultProcessor {
if (await this.services.replication.processOptionalSynchroniseResult(dbDoc)) {
// Already processed
this.log(`Processed by other processor: ${docNote}`, LOG_LEVEL_DEBUG);
} else if (isValidPath(getPath(doc))) {
} else if (isValidPath(this.getPath(doc))) {
// Apply to storage if the path is valid
await this.applyToStorage(doc as MetaEntry);
this.log(`Processed: ${docNote}`, LOG_LEVEL_DEBUG);
@@ -428,7 +433,7 @@ export class ReplicateResultProcessor {
protected applyToStorage(entry: MetaEntry) {
return this.withCounting(async () => {
await this.services.replication.processSynchroniseResult(entry);
}, this.replicator.core.storageApplyingCount);
}, this.services.replication.storageApplyingCount);
}
/**
@@ -437,7 +442,7 @@ export class ReplicateResultProcessor {
* @returns True if processing is required; false otherwise
*/
protected async checkIsChangeRequiredForDatabaseProcessing(dbDoc: LoadedEntry): Promise<boolean> {
const path = getPath(dbDoc);
const path = this.getPath(dbDoc);
try {
const savedDoc = await this.localDatabase.getRaw<LoadedEntry>(dbDoc._id, {
conflicts: true,

View File

@@ -71,10 +71,10 @@ export class ModuleConflictChecker extends AbstractModule {
delay: 0,
keepResultUntilDownstreamConnected: true,
pipeTo: this.conflictResolveQueue,
totalRemainingReactiveSource: this.core.conflictProcessQueueCount,
totalRemainingReactiveSource: this.services.conflict.conflictProcessQueueCount,
}
);
onBindFunction(core: LiveSyncCore, services: InjectableServiceHub): void {
override onBindFunction(core: LiveSyncCore, services: InjectableServiceHub): void {
services.conflict.queueCheckForIfOpen.setHandler(this._queueConflictCheckIfOpen.bind(this));
services.conflict.queueCheckFor.setHandler(this._queueConflictCheck.bind(this));
services.conflict.ensureAllProcessed.setHandler(this._waitForAllConflictProcessed.bind(this));

View File

@@ -211,10 +211,30 @@ export class ModuleConflictResolver extends AbstractModule {
}
return true;
}
private async _resolveAllConflictedFilesByNewerOnes() {
this._log(`Resolving conflicts by newer ones`, LOG_LEVEL_NOTICE);
onBindFunction(core: LiveSyncCore, services: InjectableServiceHub): void {
const files = this.core.storageAccess.getFileNames();
let i = 0;
for (const file of files) {
if (i++ % 10)
this._log(
`Check and Processing ${i} / ${files.length}`,
LOG_LEVEL_NOTICE,
"resolveAllConflictedFilesByNewerOnes"
);
await this.services.conflict.resolveByNewest(file);
}
this._log(`Done!`, LOG_LEVEL_NOTICE, "resolveAllConflictedFilesByNewerOnes");
}
override onBindFunction(core: LiveSyncCore, services: InjectableServiceHub): void {
services.conflict.resolveByDeletingRevision.setHandler(this._resolveConflictByDeletingRev.bind(this));
services.conflict.resolve.setHandler(this._resolveConflict.bind(this));
services.conflict.resolveByNewest.setHandler(this._anyResolveConflictByNewest.bind(this));
services.conflict.resolveAllConflictedFilesByNewerOnes.setHandler(
this._resolveAllConflictedFilesByNewerOnes.bind(this)
);
}
}

View File

@@ -12,8 +12,8 @@ import type { LiveSyncCore } from "../../main.ts";
import FetchEverything from "../features/SetupWizard/dialogs/FetchEverything.svelte";
import RebuildEverything from "../features/SetupWizard/dialogs/RebuildEverything.svelte";
import { extractObject } from "octagonal-wheels/object";
import { SvelteDialogManagerBase } from "@/lib/src/UI/svelteDialog.ts";
import type { ServiceContext } from "@/lib/src/services/base/ServiceBase.ts";
import { SvelteDialogManagerBase } from "@lib/UI/svelteDialog.ts";
import type { ServiceContext } from "@lib/services/base/ServiceBase.ts";
export class ModuleRedFlag extends AbstractModule {
async isFlagFileExist(path: string) {
@@ -324,7 +324,7 @@ export class ModuleRedFlag extends AbstractModule {
}
return true;
}
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
override onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
super.onBindFunction(core, services);
services.appLifecycle.onLayoutReady.addHandler(this._everyOnLayoutReady.bind(this));
}

View File

@@ -1,22 +0,0 @@
import type { InjectableServiceHub } from "../../lib/src/services/InjectableServices.ts";
import type { LiveSyncCore } from "../../main.ts";
import { AbstractModule } from "../AbstractModule.ts";
export class ModuleRemoteGovernor extends AbstractModule {
private async _markRemoteLocked(lockByClean: boolean = false): Promise<void> {
return await this.core.replicator.markRemoteLocked(this.settings, true, lockByClean);
}
private async _markRemoteUnlocked(): Promise<void> {
return await this.core.replicator.markRemoteLocked(this.settings, false, false);
}
private async _markRemoteResolved(): Promise<void> {
return await this.core.replicator.markRemoteResolved(this.settings);
}
onBindFunction(core: LiveSyncCore, services: InjectableServiceHub): void {
services.remote.markLocked.setHandler(this._markRemoteLocked.bind(this));
services.remote.markUnlocked.setHandler(this._markRemoteUnlocked.bind(this));
services.remote.markResolved.setHandler(this._markRemoteResolved.bind(this));
}
}

View File

@@ -284,7 +284,7 @@ export class ModuleResolvingMismatchedTweaks extends AbstractModule {
return { result: false, requireFetch: false };
}
onBindFunction(core: LiveSyncCore, services: InjectableServiceHub): void {
override onBindFunction(core: LiveSyncCore, services: InjectableServiceHub): void {
services.tweakValue.fetchRemotePreferred.setHandler(this._fetchRemotePreferredTweakValues.bind(this));
services.tweakValue.checkAndAskResolvingMismatched.setHandler(
this._checkAndAskResolvingMismatchedTweaks.bind(this)

View File

@@ -1,396 +0,0 @@
import { TFile, TFolder, type ListedFiles } from "@/deps.ts";
import { SerializedFileAccess } from "./storageLib/SerializedFileAccess";
import { AbstractObsidianModule } from "../AbstractObsidianModule.ts";
import { LOG_LEVEL_INFO, LOG_LEVEL_VERBOSE } from "octagonal-wheels/common/logger";
import type {
FilePath,
FilePathWithPrefix,
UXDataWriteOptions,
UXFileInfo,
UXFileInfoStub,
UXFolderInfo,
UXStat,
} from "../../lib/src/common/types";
import { TFileToUXFileInfoStub, TFolderToUXFileInfoStub } from "./storageLib/utilObsidian.ts";
import { StorageEventManagerObsidian, type StorageEventManager } from "./storageLib/StorageEventManager";
import type { StorageAccess } from "../interfaces/StorageAccess";
import { createBlob, type CustomRegExp } from "../../lib/src/common/utils";
import { serialized } from "octagonal-wheels/concurrency/lock_v2";
import type { LiveSyncCore } from "../../main.ts";
import type ObsidianLiveSyncPlugin from "../../main.ts";
import type { InjectableServiceHub } from "../../lib/src/services/InjectableServices.ts";
const fileLockPrefix = "file-lock:";
export class ModuleFileAccessObsidian extends AbstractObsidianModule implements StorageAccess {
processingFiles: Set<FilePathWithPrefix> = new Set();
processWriteFile<T>(file: UXFileInfoStub | FilePathWithPrefix, proc: () => Promise<T>): Promise<T> {
const path = typeof file === "string" ? file : file.path;
return serialized(`${fileLockPrefix}${path}`, async () => {
try {
this.processingFiles.add(path);
return await proc();
} finally {
this.processingFiles.delete(path);
}
});
}
processReadFile<T>(file: UXFileInfoStub | FilePathWithPrefix, proc: () => Promise<T>): Promise<T> {
const path = typeof file === "string" ? file : file.path;
return serialized(`${fileLockPrefix}${path}`, async () => {
try {
this.processingFiles.add(path);
return await proc();
} finally {
this.processingFiles.delete(path);
}
});
}
isFileProcessing(file: UXFileInfoStub | FilePathWithPrefix): boolean {
const path = typeof file === "string" ? file : file.path;
return this.processingFiles.has(path);
}
vaultAccess!: SerializedFileAccess;
vaultManager: StorageEventManager = new StorageEventManagerObsidian(this.plugin, this.core, this);
restoreState() {
return this.vaultManager.restoreState();
}
private _everyOnload(): Promise<boolean> {
this.core.storageAccess = this;
return Promise.resolve(true);
}
async _everyOnFirstInitialize(): Promise<boolean> {
await this.vaultManager.beginWatch();
return Promise.resolve(true);
}
// $$flushFileEventQueue(): void {
// this.vaultManager.flushQueue();
// }
async _everyCommitPendingFileEvent(): Promise<boolean> {
await this.vaultManager.waitForIdle();
return Promise.resolve(true);
}
_everyOnloadStart(): Promise<boolean> {
this.vaultAccess = new SerializedFileAccess(this.app, this.plugin, this);
return Promise.resolve(true);
}
_isStorageInsensitive(): boolean {
return this.vaultAccess.isStorageInsensitive();
}
_shouldCheckCaseInsensitive(): boolean {
if (this.services.vault.isStorageInsensitive()) return false;
return !this.settings.handleFilenameCaseSensitive;
}
async writeFileAuto(path: string, data: string | ArrayBuffer, opt?: UXDataWriteOptions): Promise<boolean> {
const file = this.vaultAccess.getAbstractFileByPath(path);
if (file instanceof TFile) {
return this.vaultAccess.vaultModify(file, data, opt);
} else if (file === null) {
if (!path.endsWith(".md")) {
// Very rare case, we encountered this case with `writing-goals-history.csv` file.
// Indeed, that file not appears in the File Explorer, but it exists in the vault.
// Hence, we cannot retrieve the file from the vault by getAbstractFileByPath, and we cannot write it via vaultModify.
// It makes `File already exists` error.
// Therefore, we need to write it via adapterWrite.
// Maybe there are others like this, so I will write it via adapterWrite.
// This is a workaround for the issue, but I don't know if this is the right solution.
// (So limits to non-md files).
// Has Obsidian been patched?, anyway, writing directly might be a safer approach.
// However, does changes of that file trigger file-change event?
await this.vaultAccess.adapterWrite(path, data, opt);
// For safety, check existence
return await this.vaultAccess.adapterExists(path);
} else {
return (await this.vaultAccess.vaultCreate(path, data, opt)) instanceof TFile;
}
} else {
this._log(`Could not write file (Possibly already exists as a folder): ${path}`, LOG_LEVEL_VERBOSE);
return false;
}
}
readFileAuto(path: string): Promise<string | ArrayBuffer> {
const file = this.vaultAccess.getAbstractFileByPath(path);
if (file instanceof TFile) {
return this.vaultAccess.vaultRead(file);
} else {
throw new Error(`Could not read file (Possibly does not exist): ${path}`);
}
}
readFileText(path: string): Promise<string> {
const file = this.vaultAccess.getAbstractFileByPath(path);
if (file instanceof TFile) {
return this.vaultAccess.vaultRead(file);
} else {
throw new Error(`Could not read file (Possibly does not exist): ${path}`);
}
}
isExists(path: string): Promise<boolean> {
return Promise.resolve(this.vaultAccess.getAbstractFileByPath(path) instanceof TFile);
}
async writeHiddenFileAuto(path: string, data: string | ArrayBuffer, opt?: UXDataWriteOptions): Promise<boolean> {
try {
await this.vaultAccess.adapterWrite(path, data, opt);
return true;
} catch (e) {
this._log(`Could not write hidden file: ${path}`, LOG_LEVEL_VERBOSE);
this._log(e, LOG_LEVEL_VERBOSE);
return false;
}
}
async appendHiddenFile(path: string, data: string, opt?: UXDataWriteOptions): Promise<boolean> {
try {
await this.vaultAccess.adapterAppend(path, data, opt);
return true;
} catch (e) {
this._log(`Could not append hidden file: ${path}`, LOG_LEVEL_VERBOSE);
this._log(e, LOG_LEVEL_VERBOSE);
return false;
}
}
stat(path: string): Promise<UXStat | null> {
const file = this.vaultAccess.getAbstractFileByPath(path);
if (file === null) return Promise.resolve(null);
if (file instanceof TFile) {
return Promise.resolve({
ctime: file.stat.ctime,
mtime: file.stat.mtime,
size: file.stat.size,
type: "file",
});
} else {
throw new Error(`Could not stat file (Possibly does not exist): ${path}`);
}
}
statHidden(path: string): Promise<UXStat | null> {
return this.vaultAccess.tryAdapterStat(path);
}
async removeHidden(path: string): Promise<boolean> {
try {
await this.vaultAccess.adapterRemove(path);
if (this.vaultAccess.tryAdapterStat(path) !== null) {
return false;
}
return true;
} catch (e) {
this._log(`Could not remove hidden file: ${path}`, LOG_LEVEL_VERBOSE);
this._log(e, LOG_LEVEL_VERBOSE);
return false;
}
}
async readHiddenFileAuto(path: string): Promise<string | ArrayBuffer> {
return await this.vaultAccess.adapterReadAuto(path);
}
async readHiddenFileText(path: string): Promise<string> {
return await this.vaultAccess.adapterRead(path);
}
async readHiddenFileBinary(path: string): Promise<ArrayBuffer> {
return await this.vaultAccess.adapterReadBinary(path);
}
async isExistsIncludeHidden(path: string): Promise<boolean> {
return (await this.vaultAccess.tryAdapterStat(path)) !== null;
}
async ensureDir(path: string): Promise<boolean> {
try {
await this.vaultAccess.ensureDirectory(path);
return true;
} catch (e) {
this._log(`Could not ensure directory: ${path}`, LOG_LEVEL_VERBOSE);
this._log(e, LOG_LEVEL_VERBOSE);
return false;
}
}
triggerFileEvent(event: string, path: string): void {
const file = this.vaultAccess.getAbstractFileByPath(path);
if (file === null) return;
this.vaultAccess.trigger(event, file);
}
async triggerHiddenFile(path: string): Promise<void> {
//@ts-ignore internal function
await this.app.vault.adapter.reconcileInternalFile(path);
}
// getFileStub(file: TFile): UXFileInfoStub {
// return TFileToUXFileInfoStub(file);
// }
getFileStub(path: string): UXFileInfoStub | null {
const file = this.vaultAccess.getAbstractFileByPath(path);
if (file instanceof TFile) {
return TFileToUXFileInfoStub(file);
} else {
return null;
}
}
async readStubContent(stub: UXFileInfoStub): Promise<UXFileInfo | false> {
const file = this.vaultAccess.getAbstractFileByPath(stub.path);
if (!(file instanceof TFile)) {
this._log(`Could not read file (Possibly does not exist or a folder): ${stub.path}`, LOG_LEVEL_VERBOSE);
return false;
}
const data = await this.vaultAccess.vaultReadAuto(file);
return {
...stub,
...TFileToUXFileInfoStub(file),
body: createBlob(data),
};
}
getStub(path: string): UXFileInfoStub | UXFolderInfo | null {
const file = this.vaultAccess.getAbstractFileByPath(path);
if (file instanceof TFile) {
return TFileToUXFileInfoStub(file);
} else if (file instanceof TFolder) {
return TFolderToUXFileInfoStub(file);
}
return null;
}
getFiles(): UXFileInfoStub[] {
return this.vaultAccess.getFiles().map((f) => TFileToUXFileInfoStub(f));
}
getFileNames(): FilePath[] {
return this.vaultAccess.getFiles().map((f) => f.path as FilePath);
}
async getFilesIncludeHidden(
basePath: string,
includeFilter?: CustomRegExp[],
excludeFilter?: CustomRegExp[],
skipFolder: string[] = [".git", ".trash", "node_modules"]
): Promise<FilePath[]> {
let w: ListedFiles;
try {
w = await this.app.vault.adapter.list(basePath);
} catch (ex) {
this._log(`Could not traverse(getFilesIncludeHidden):${basePath}`, LOG_LEVEL_INFO);
this._log(ex, LOG_LEVEL_VERBOSE);
return [];
}
skipFolder = skipFolder.map((e) => e.toLowerCase());
let files = [] as string[];
for (const file of w.files) {
if (includeFilter && includeFilter.length > 0) {
if (!includeFilter.some((e) => e.test(file))) continue;
}
if (excludeFilter && excludeFilter.some((ee) => ee.test(file))) {
continue;
}
if (await this.services.vault.isIgnoredByIgnoreFile(file)) continue;
files.push(file);
}
for (const v of w.folders) {
const folderName = (v.split("/").pop() ?? "").toLowerCase();
if (skipFolder.some((e) => folderName === e)) {
continue;
}
if (excludeFilter && excludeFilter.some((e) => e.test(v))) {
continue;
}
if (await this.services.vault.isIgnoredByIgnoreFile(v)) {
continue;
}
// OK, deep dive!
files = files.concat(await this.getFilesIncludeHidden(v, includeFilter, excludeFilter, skipFolder));
}
return files as FilePath[];
}
async touched(file: UXFileInfoStub | FilePathWithPrefix): Promise<void> {
const path = typeof file === "string" ? file : file.path;
await this.vaultAccess.touch(path as FilePath);
}
recentlyTouched(file: UXFileInfoStub | FilePathWithPrefix): boolean {
const xFile = typeof file === "string" ? (this.vaultAccess.getAbstractFileByPath(file) as TFile) : file;
if (xFile === null) return false;
if (xFile instanceof TFolder) return false;
return this.vaultAccess.recentlyTouched(xFile);
}
clearTouched(): void {
this.vaultAccess.clearTouched();
}
delete(file: FilePathWithPrefix | UXFileInfoStub | string, force: boolean): Promise<void> {
const xPath = typeof file === "string" ? file : file.path;
const xFile = this.vaultAccess.getAbstractFileByPath(xPath);
if (xFile === null) return Promise.resolve();
if (!(xFile instanceof TFile) && !(xFile instanceof TFolder)) return Promise.resolve();
return this.vaultAccess.delete(xFile, force);
}
trash(file: FilePathWithPrefix | UXFileInfoStub | string, system: boolean): Promise<void> {
const xPath = typeof file === "string" ? file : file.path;
const xFile = this.vaultAccess.getAbstractFileByPath(xPath);
if (xFile === null) return Promise.resolve();
if (!(xFile instanceof TFile) && !(xFile instanceof TFolder)) return Promise.resolve();
return this.vaultAccess.trash(xFile, system);
}
// $readFileBinary(path: string): Promise<ArrayBuffer> {
// const file = this.vaultAccess.getAbstractFileByPath(path);
// if (file instanceof TFile) {
// return this.vaultAccess.vaultReadBinary(file);
// } else {
// throw new Error(`Could not read file (Possibly does not exist): ${path}`);
// }
// }
// async $appendFileAuto(path: string, data: string | ArrayBuffer, opt?: DataWriteOptions): Promise<boolean> {
// const file = this.vaultAccess.getAbstractFileByPath(path);
// if (file instanceof TFile) {
// return this.vaultAccess.a(file, data, opt);
// } else if (file !== null) {
// return await this.vaultAccess.vaultCreate(path, data, opt) instanceof TFile;
// } else {
// this._log(`Could not append file (Possibly already exists as a folder): ${path}`, LOG_LEVEL_VERBOSE);
// return false;
// }
// }
async __deleteVaultItem(file: TFile | TFolder) {
if (file instanceof TFile) {
if (!(await this.services.vault.isTargetFile(file.path))) return;
}
const dir = file.parent;
if (this.settings.trashInsteadDelete) {
await this.vaultAccess.trash(file, false);
} else {
await this.vaultAccess.delete(file, true);
}
this._log(`xxx <- STORAGE (deleted) ${file.path}`);
if (dir) {
this._log(`files: ${dir.children.length}`);
if (dir.children.length == 0) {
if (!this.settings.doNotDeleteFolder) {
this._log(
`All files under the parent directory (${dir.path}) have been deleted, so delete this one.`
);
await this.__deleteVaultItem(dir);
}
}
}
}
async deleteVaultItem(fileSrc: FilePathWithPrefix | UXFileInfoStub | UXFolderInfo): Promise<void> {
const path = typeof fileSrc === "string" ? fileSrc : fileSrc.path;
const file = this.vaultAccess.getAbstractFileByPath(path);
if (file === null) return;
if (file instanceof TFile || file instanceof TFolder) {
return await this.__deleteVaultItem(file);
}
}
constructor(plugin: ObsidianLiveSyncPlugin, core: LiveSyncCore) {
super(plugin, core);
}
onBindFunction(core: LiveSyncCore, services: InjectableServiceHub): void {
services.vault.isStorageInsensitive.setHandler(this._isStorageInsensitive.bind(this));
services.setting.shouldCheckCaseInsensitively.setHandler(this._shouldCheckCaseInsensitive.bind(this));
services.appLifecycle.onFirstInitialise.addHandler(this._everyOnFirstInitialize.bind(this));
services.appLifecycle.onInitialise.addHandler(this._everyOnloadStart.bind(this));
services.appLifecycle.onLoaded.addHandler(this._everyOnload.bind(this));
services.fileProcessing.commitPendingFileEvents.addHandler(this._everyCommitPendingFileEvent.bind(this));
}
}

View File

@@ -13,7 +13,7 @@ class AutoClosableModal extends Modal {
this._closeByUnload = this._closeByUnload.bind(this);
eventHub.once(EVENT_PLUGIN_UNLOADED, this._closeByUnload);
}
onClose() {
override onClose() {
eventHub.off(EVENT_PLUGIN_UNLOADED, this._closeByUnload);
}
}
@@ -43,7 +43,7 @@ export class InputStringDialog extends AutoClosableModal {
this.isPassword = isPassword;
}
onOpen() {
override onOpen() {
const { contentEl } = this;
this.titleEl.setText(this.title);
const formEl = contentEl.createDiv();
@@ -75,7 +75,7 @@ export class InputStringDialog extends AutoClosableModal {
);
}
onClose() {
override onClose() {
super.onClose();
const { contentEl } = this;
contentEl.empty();
@@ -87,7 +87,7 @@ export class InputStringDialog extends AutoClosableModal {
}
}
export class PopoverSelectString extends FuzzySuggestModal<string> {
app: App;
_app: App;
callback: ((e: string) => void) | undefined = () => {};
getItemsFun: () => string[] = () => {
return ["yes", "no"];
@@ -101,7 +101,7 @@ export class PopoverSelectString extends FuzzySuggestModal<string> {
callback: (e: string) => void
) {
super(app);
this.app = app;
this._app = app;
this.setPlaceholder((placeholder ?? "y/n) ") + note);
if (getItemsFun) this.getItemsFun = getItemsFun;
this.callback = callback;
@@ -120,7 +120,7 @@ export class PopoverSelectString extends FuzzySuggestModal<string> {
this.callback?.(item);
this.callback = undefined;
}
onClose(): void {
override onClose(): void {
setTimeout(() => {
if (this.callback) {
this.callback("");
@@ -184,7 +184,7 @@ export class MessageBox<T extends readonly string[]> extends AutoClosableModal {
}
}
onOpen() {
override onOpen() {
const { contentEl } = this;
this.titleEl.setText(this.title);
const div = contentEl.createDiv();
@@ -242,7 +242,7 @@ export class MessageBox<T extends readonly string[]> extends AutoClosableModal {
}
}
onClose() {
override onClose() {
super.onClose();
const { contentEl } = this;
contentEl.empty();

View File

@@ -1,231 +0,0 @@
import { type App, TFile, type DataWriteOptions, TFolder, TAbstractFile } from "../../../deps.ts";
import { Logger } from "../../../lib/src/common/logger.ts";
import { isPlainText } from "../../../lib/src/string_and_binary/path.ts";
import type { FilePath, HasSettings, UXFileInfoStub } from "../../../lib/src/common/types.ts";
import { createBinaryBlob, isDocContentSame } from "../../../lib/src/common/utils.ts";
import type { InternalFileInfo } from "../../../common/types.ts";
import { markChangesAreSame } from "../../../common/utils.ts";
import type { StorageAccess } from "../../interfaces/StorageAccess.ts";
function toArrayBuffer(arr: Uint8Array<ArrayBuffer> | ArrayBuffer | DataView<ArrayBuffer>): ArrayBuffer {
if (arr instanceof Uint8Array) {
return arr.buffer;
}
if (arr instanceof DataView) {
return arr.buffer;
}
return arr;
}
export class SerializedFileAccess {
app: App;
plugin: HasSettings<{ handleFilenameCaseSensitive: boolean }>;
storageAccess: StorageAccess;
constructor(app: App, plugin: SerializedFileAccess["plugin"], storageAccess: StorageAccess) {
this.app = app;
this.plugin = plugin;
this.storageAccess = storageAccess;
}
async tryAdapterStat(file: TFile | string) {
const path = file instanceof TFile ? file.path : file;
return await this.storageAccess.processReadFile(path as FilePath, async () => {
if (!(await this.app.vault.adapter.exists(path))) return null;
return this.app.vault.adapter.stat(path);
});
}
async adapterStat(file: TFile | string) {
const path = file instanceof TFile ? file.path : file;
return await this.storageAccess.processReadFile(path as FilePath, () => this.app.vault.adapter.stat(path));
}
async adapterExists(file: TFile | string) {
const path = file instanceof TFile ? file.path : file;
return await this.storageAccess.processReadFile(path as FilePath, () => this.app.vault.adapter.exists(path));
}
async adapterRemove(file: TFile | string) {
const path = file instanceof TFile ? file.path : file;
return await this.storageAccess.processReadFile(path as FilePath, () => this.app.vault.adapter.remove(path));
}
async adapterRead(file: TFile | string) {
const path = file instanceof TFile ? file.path : file;
return await this.storageAccess.processReadFile(path as FilePath, () => this.app.vault.adapter.read(path));
}
async adapterReadBinary(file: TFile | string) {
const path = file instanceof TFile ? file.path : file;
return await this.storageAccess.processReadFile(path as FilePath, () =>
this.app.vault.adapter.readBinary(path)
);
}
async adapterReadAuto(file: TFile | string) {
const path = file instanceof TFile ? file.path : file;
if (isPlainText(path)) {
return await this.storageAccess.processReadFile(path as FilePath, () => this.app.vault.adapter.read(path));
}
return await this.storageAccess.processReadFile(path as FilePath, () =>
this.app.vault.adapter.readBinary(path)
);
}
async adapterWrite(
file: TFile | string,
data: string | ArrayBuffer | Uint8Array<ArrayBuffer>,
options?: DataWriteOptions
) {
const path = file instanceof TFile ? file.path : file;
if (typeof data === "string") {
return await this.storageAccess.processWriteFile(path as FilePath, () =>
this.app.vault.adapter.write(path, data, options)
);
} else {
return await this.storageAccess.processWriteFile(path as FilePath, () =>
this.app.vault.adapter.writeBinary(path, toArrayBuffer(data), options)
);
}
}
async vaultCacheRead(file: TFile) {
return await this.storageAccess.processReadFile(file.path as FilePath, () => this.app.vault.cachedRead(file));
}
async vaultRead(file: TFile) {
return await this.storageAccess.processReadFile(file.path as FilePath, () => this.app.vault.read(file));
}
async vaultReadBinary(file: TFile) {
return await this.storageAccess.processReadFile(file.path as FilePath, () => this.app.vault.readBinary(file));
}
async vaultReadAuto(file: TFile) {
const path = file.path;
if (isPlainText(path)) {
return await this.storageAccess.processReadFile(path as FilePath, () => this.app.vault.read(file));
}
return await this.storageAccess.processReadFile(path as FilePath, () => this.app.vault.readBinary(file));
}
async vaultModify(file: TFile, data: string | ArrayBuffer | Uint8Array<ArrayBuffer>, options?: DataWriteOptions) {
if (typeof data === "string") {
return await this.storageAccess.processWriteFile(file.path as FilePath, async () => {
const oldData = await this.app.vault.read(file);
if (data === oldData) {
if (options && options.mtime) markChangesAreSame(file.path, file.stat.mtime, options.mtime);
return true;
}
await this.app.vault.modify(file, data, options);
return true;
});
} else {
return await this.storageAccess.processWriteFile(file.path as FilePath, async () => {
const oldData = await this.app.vault.readBinary(file);
if (await isDocContentSame(createBinaryBlob(oldData), createBinaryBlob(data))) {
if (options && options.mtime) markChangesAreSame(file.path, file.stat.mtime, options.mtime);
return true;
}
await this.app.vault.modifyBinary(file, toArrayBuffer(data), options);
return true;
});
}
}
async vaultCreate(
path: string,
data: string | ArrayBuffer | Uint8Array<ArrayBuffer>,
options?: DataWriteOptions
): Promise<TFile> {
if (typeof data === "string") {
return await this.storageAccess.processWriteFile(path as FilePath, () =>
this.app.vault.create(path, data, options)
);
} else {
return await this.storageAccess.processWriteFile(path as FilePath, () =>
this.app.vault.createBinary(path, toArrayBuffer(data), options)
);
}
}
trigger(name: string, ...data: any[]) {
return this.app.vault.trigger(name, ...data);
}
async adapterAppend(normalizedPath: string, data: string, options?: DataWriteOptions) {
return await this.app.vault.adapter.append(normalizedPath, data, options);
}
async delete(file: TFile | TFolder, force = false) {
return await this.storageAccess.processWriteFile(file.path as FilePath, () =>
this.app.vault.delete(file, force)
);
}
async trash(file: TFile | TFolder, force = false) {
return await this.storageAccess.processWriteFile(file.path as FilePath, () =>
this.app.vault.trash(file, force)
);
}
isStorageInsensitive(): boolean {
//@ts-ignore
return this.app.vault.adapter.insensitive ?? true;
}
getAbstractFileByPathInsensitive(path: FilePath | string): TAbstractFile | null {
//@ts-ignore
return this.app.vault.getAbstractFileByPathInsensitive(path);
}
getAbstractFileByPath(path: FilePath | string): TAbstractFile | null {
if (!this.plugin.settings.handleFilenameCaseSensitive || this.isStorageInsensitive()) {
return this.getAbstractFileByPathInsensitive(path);
}
return this.app.vault.getAbstractFileByPath(path);
}
getFiles() {
return this.app.vault.getFiles();
}
async ensureDirectory(fullPath: string) {
const pathElements = fullPath.split("/");
pathElements.pop();
let c = "";
for (const v of pathElements) {
c += v;
try {
await this.app.vault.adapter.mkdir(c);
} catch (ex: any) {
if (ex?.message == "Folder already exists.") {
// Skip if already exists.
} else {
Logger("Folder Create Error");
Logger(ex);
}
}
c += "/";
}
}
touchedFiles: string[] = [];
_statInternal(file: FilePath) {
return this.app.vault.adapter.stat(file);
}
async touch(file: TFile | FilePath) {
const path = file instanceof TFile ? (file.path as FilePath) : file;
const statOrg = file instanceof TFile ? file.stat : await this._statInternal(path);
const stat = statOrg || { mtime: 0, size: 0 };
const key = `${path}-${stat.mtime}-${stat.size}`;
this.touchedFiles.unshift(key);
this.touchedFiles = this.touchedFiles.slice(0, 100);
}
recentlyTouched(file: TFile | InternalFileInfo | UXFileInfoStub) {
const key =
"stat" in file
? `${file.path}-${file.stat.mtime}-${file.stat.size}`
: `${file.path}-${file.mtime}-${file.size}`;
if (this.touchedFiles.indexOf(key) == -1) return false;
return true;
}
clearTouched() {
this.touchedFiles = [];
}
}

View File

@@ -1,650 +0,0 @@
import { TAbstractFile, TFile, TFolder } from "../../../deps.ts";
import { Logger } from "../../../lib/src/common/logger.ts";
import { shouldBeIgnored } from "../../../lib/src/string_and_binary/path.ts";
import {
DEFAULT_SETTINGS,
LOG_LEVEL_DEBUG,
LOG_LEVEL_INFO,
LOG_LEVEL_NOTICE,
LOG_LEVEL_VERBOSE,
type FileEventType,
type FilePath,
type UXFileInfoStub,
type UXInternalFileInfoStub,
} from "../../../lib/src/common/types.ts";
import { delay, fireAndForget, throttle } from "../../../lib/src/common/utils.ts";
import { type FileEventItem } from "../../../common/types.ts";
import { serialized, skipIfDuplicated } from "octagonal-wheels/concurrency/lock";
import { isWaitingForTimeout } from "octagonal-wheels/concurrency/task";
import { Semaphore } from "octagonal-wheels/concurrency/semaphore";
import type { LiveSyncCore } from "../../../main.ts";
import { InternalFileToUXFileInfoStub, TFileToUXFileInfoStub } from "./utilObsidian.ts";
import ObsidianLiveSyncPlugin from "../../../main.ts";
import type { StorageAccess } from "../../interfaces/StorageAccess.ts";
import { HiddenFileSync } from "../../../features/HiddenFileSync/CmdHiddenFileSync.ts";
import { promiseWithResolvers, type PromiseWithResolvers } from "octagonal-wheels/promises";
// import { InternalFileToUXFileInfo } from "../platforms/obsidian.ts";
export type FileEvent = {
type: FileEventType;
file: UXFileInfoStub | UXInternalFileInfoStub;
oldPath?: string;
cachedData?: string;
skipBatchWait?: boolean;
cancelled?: boolean;
};
type WaitInfo = {
since: number;
type: FileEventType;
canProceed: PromiseWithResolvers<boolean>;
timerHandler: ReturnType<typeof setTimeout>;
event: FileEventItem;
};
const TYPE_SENTINEL_FLUSH = "SENTINEL_FLUSH";
type FileEventItemSentinelFlush = {
type: typeof TYPE_SENTINEL_FLUSH;
};
type FileEventItemSentinel = FileEventItemSentinelFlush;
export abstract class StorageEventManager {
abstract beginWatch(): Promise<void>;
abstract appendQueue(items: FileEvent[], ctx?: any): Promise<void>;
abstract isWaiting(filename: FilePath): boolean;
abstract waitForIdle(): Promise<void>;
abstract restoreState(): Promise<void>;
}
export class StorageEventManagerObsidian extends StorageEventManager {
plugin: ObsidianLiveSyncPlugin;
core: LiveSyncCore;
storageAccess: StorageAccess;
get services() {
return this.core.services;
}
get shouldBatchSave() {
return this.core.settings?.batchSave && this.core.settings?.liveSync != true;
}
get batchSaveMinimumDelay(): number {
return this.core.settings?.batchSaveMinimumDelay ?? DEFAULT_SETTINGS.batchSaveMinimumDelay;
}
get batchSaveMaximumDelay(): number {
return this.core.settings?.batchSaveMaximumDelay ?? DEFAULT_SETTINGS.batchSaveMaximumDelay;
}
// Necessary evil.
cmdHiddenFileSync: HiddenFileSync;
/**
* Snapshot restoration promise.
* Snapshot will be restored before starting to watch vault changes.
* In designed time, this has been called from Initialisation process, which has been implemented on `ModuleInitializerFile.ts`.
*/
snapShotRestored: Promise<void> | null = null;
constructor(plugin: ObsidianLiveSyncPlugin, core: LiveSyncCore, storageAccess: StorageAccess) {
super();
this.storageAccess = storageAccess;
this.plugin = plugin;
this.core = core;
this.cmdHiddenFileSync = this.plugin.getAddOn(HiddenFileSync.name) as HiddenFileSync;
}
/**
* Restore the previous snapshot if exists.
* @returns
*/
restoreState(): Promise<void> {
this.snapShotRestored = this._restoreFromSnapshot();
return this.snapShotRestored;
}
async beginWatch() {
await this.snapShotRestored;
const plugin = this.plugin;
this.watchVaultChange = this.watchVaultChange.bind(this);
this.watchVaultCreate = this.watchVaultCreate.bind(this);
this.watchVaultDelete = this.watchVaultDelete.bind(this);
this.watchVaultRename = this.watchVaultRename.bind(this);
this.watchVaultRawEvents = this.watchVaultRawEvents.bind(this);
this.watchEditorChange = this.watchEditorChange.bind(this);
plugin.registerEvent(plugin.app.vault.on("modify", this.watchVaultChange));
plugin.registerEvent(plugin.app.vault.on("delete", this.watchVaultDelete));
plugin.registerEvent(plugin.app.vault.on("rename", this.watchVaultRename));
plugin.registerEvent(plugin.app.vault.on("create", this.watchVaultCreate));
//@ts-ignore : Internal API
plugin.registerEvent(plugin.app.vault.on("raw", this.watchVaultRawEvents));
plugin.registerEvent(plugin.app.workspace.on("editor-change", this.watchEditorChange));
}
watchEditorChange(editor: any, info: any) {
if (!("path" in info)) {
return;
}
if (!this.shouldBatchSave) {
return;
}
const file = info?.file as TFile;
if (!file) return;
if (this.storageAccess.isFileProcessing(file.path as FilePath)) {
// Logger(`Editor change skipped because the file is being processed: ${file.path}`, LOG_LEVEL_VERBOSE);
return;
}
if (!this.isWaiting(file.path as FilePath)) {
return;
}
const data = info?.data as string;
const fi: FileEvent = {
type: "CHANGED",
file: TFileToUXFileInfoStub(file),
cachedData: data,
};
void this.appendQueue([fi]);
}
watchVaultCreate(file: TAbstractFile, ctx?: any) {
if (file instanceof TFolder) return;
if (this.storageAccess.isFileProcessing(file.path as FilePath)) {
// Logger(`File create skipped because the file is being processed: ${file.path}`, LOG_LEVEL_VERBOSE);
return;
}
const fileInfo = TFileToUXFileInfoStub(file);
void this.appendQueue([{ type: "CREATE", file: fileInfo }], ctx);
}
watchVaultChange(file: TAbstractFile, ctx?: any) {
if (file instanceof TFolder) return;
if (this.storageAccess.isFileProcessing(file.path as FilePath)) {
// Logger(`File change skipped because the file is being processed: ${file.path}`, LOG_LEVEL_VERBOSE);
return;
}
const fileInfo = TFileToUXFileInfoStub(file);
void this.appendQueue([{ type: "CHANGED", file: fileInfo }], ctx);
}
watchVaultDelete(file: TAbstractFile, ctx?: any) {
if (file instanceof TFolder) return;
if (this.storageAccess.isFileProcessing(file.path as FilePath)) {
// Logger(`File delete skipped because the file is being processed: ${file.path}`, LOG_LEVEL_VERBOSE);
return;
}
const fileInfo = TFileToUXFileInfoStub(file, true);
void this.appendQueue([{ type: "DELETE", file: fileInfo }], ctx);
}
watchVaultRename(file: TAbstractFile, oldFile: string, ctx?: any) {
// vault Rename will not be raised for self-events (Self-hosted LiveSync will not handle 'rename').
if (file instanceof TFile) {
const fileInfo = TFileToUXFileInfoStub(file);
void this.appendQueue(
[
{
type: "DELETE",
file: {
path: oldFile as FilePath,
name: file.name,
stat: {
mtime: file.stat.mtime,
ctime: file.stat.ctime,
size: file.stat.size,
type: "file",
},
deleted: true,
},
skipBatchWait: true,
},
{ type: "CREATE", file: fileInfo, skipBatchWait: true },
],
ctx
);
}
}
// Watch raw events (Internal API)
watchVaultRawEvents(path: FilePath) {
if (this.storageAccess.isFileProcessing(path)) {
// Logger(`Raw file event skipped because the file is being processed: ${path}`, LOG_LEVEL_VERBOSE);
return;
}
// Only for internal files.
if (!this.plugin.settings) return;
// if (this.plugin.settings.useIgnoreFiles && this.plugin.ignoreFiles.some(e => path.endsWith(e.trim()))) {
if (this.plugin.settings.useIgnoreFiles) {
// If it is one of ignore files, refresh the cached one.
// (Calling$$isTargetFile will refresh the cache)
void this.services.vault.isTargetFile(path).then(() => this._watchVaultRawEvents(path));
} else {
void this._watchVaultRawEvents(path);
}
}
async _watchVaultRawEvents(path: FilePath) {
if (!this.plugin.settings.syncInternalFiles && !this.plugin.settings.usePluginSync) return;
if (!this.plugin.settings.watchInternalFileChanges) return;
if (!path.startsWith(this.plugin.app.vault.configDir)) return;
if (path.endsWith("/")) {
// Folder
return;
}
const isTargetFile = await this.cmdHiddenFileSync.isTargetFile(path);
if (!isTargetFile) return;
void this.appendQueue(
[
{
type: "INTERNAL",
file: InternalFileToUXFileInfoStub(path),
skipBatchWait: true, // Internal files should be processed immediately.
},
],
null
);
}
// Cache file and waiting to can be proceed.
async appendQueue(params: FileEvent[], ctx?: any) {
if (!this.core.settings.isConfigured) return;
if (this.core.settings.suspendFileWatching) return;
if (this.core.settings.maxMTimeForReflectEvents > 0) {
return;
}
this.core.services.vault.markFileListPossiblyChanged();
// Flag up to be reload
for (const param of params) {
if (shouldBeIgnored(param.file.path)) {
continue;
}
const atomicKey = [0, 0, 0, 0, 0, 0].map((e) => `${Math.floor(Math.random() * 100000)}`).join("-");
const type = param.type;
const file = param.file;
const oldPath = param.oldPath;
if (type !== "INTERNAL") {
const size = (file as UXFileInfoStub).stat.size;
if (this.services.vault.isFileSizeTooLarge(size) && (type == "CREATE" || type == "CHANGED")) {
Logger(
`The storage file has been changed but exceeds the maximum size. Skipping: ${param.file.path}`,
LOG_LEVEL_NOTICE
);
continue;
}
}
if (file instanceof TFolder) continue;
// TODO: Confirm why only the TFolder skipping
// Possibly following line is needed...
// if (file?.isFolder) continue;
if (!(await this.services.vault.isTargetFile(file.path))) continue;
// Stop cache using to prevent the corruption;
// let cache: null | string | ArrayBuffer;
// new file or something changed, cache the changes.
// if (file instanceof TFile && (type == "CREATE" || type == "CHANGED")) {
if (file instanceof TFile || !file.isFolder) {
if (type == "CREATE" || type == "CHANGED") {
// Wait for a bit while to let the writer has marked `touched` at the file.
await delay(10);
if (this.core.storageAccess.recentlyTouched(file.path)) {
continue;
}
}
}
let cache: string | undefined = undefined;
if (param.cachedData) {
cache = param.cachedData;
}
void this.enqueue({
type,
args: {
file: file,
oldPath,
cache,
ctx,
},
skipBatchWait: param.skipBatchWait,
key: atomicKey,
});
}
}
private bufferedQueuedItems = [] as (FileEventItem | FileEventItemSentinel)[];
/**
* Immediately take snapshot.
*/
private _triggerTakeSnapshot() {
void this._takeSnapshot();
}
/**
* Trigger taking snapshot after throttled period.
*/
triggerTakeSnapshot = throttle(() => this._triggerTakeSnapshot(), 100);
enqueue(newItem: FileEventItem) {
if (newItem.type == "DELETE") {
// If the sentinel pushed, the runQueuedEvents will wait for idle before processing delete.
this.bufferedQueuedItems.push({
type: TYPE_SENTINEL_FLUSH,
});
}
this.updateStatus();
this.bufferedQueuedItems.push(newItem);
fireAndForget(() => this._takeSnapshot().then(() => this.runQueuedEvents()));
}
// Limit concurrent processing to reduce the IO load. file-processing + scheduler (1), so file events can be processed in 4 slots.
concurrentProcessing = Semaphore(5);
private _waitingMap = new Map<string, WaitInfo>();
private _waitForIdle: Promise<void> | null = null;
/**
* Wait until all queued events are processed.
* Subsequent new events will not be waited, but new events will not be added.
* @returns
*/
waitForIdle(): Promise<void> {
if (this._waitingMap.size === 0) {
return Promise.resolve();
}
if (this._waitForIdle) {
return this._waitForIdle;
}
const promises = [...this._waitingMap.entries()].map(([key, waitInfo]) => {
return new Promise<void>((resolve) => {
waitInfo.canProceed.promise
.then(() => {
Logger(`Processing ${key}: Wait for idle completed`, LOG_LEVEL_DEBUG);
// No op
})
.catch((e) => {
Logger(`Processing ${key}: Wait for idle error`, LOG_LEVEL_INFO);
Logger(e, LOG_LEVEL_VERBOSE);
//no op
})
.finally(() => {
resolve();
});
this._proceedWaiting(key);
});
});
const waitPromise = Promise.all(promises).then(() => {
this._waitForIdle = null;
Logger(`All wait for idle completed`, LOG_LEVEL_VERBOSE);
});
this._waitForIdle = waitPromise;
return waitPromise;
}
/**
* Proceed waiting for the given key immediately.
*/
private _proceedWaiting(key: string) {
const waitInfo = this._waitingMap.get(key);
if (waitInfo) {
waitInfo.canProceed.resolve(true);
clearTimeout(waitInfo.timerHandler);
this._waitingMap.delete(key);
}
this.triggerTakeSnapshot();
}
/**
* Cancel waiting for the given key.
*/
private _cancelWaiting(key: string) {
const waitInfo = this._waitingMap.get(key);
if (waitInfo) {
waitInfo.canProceed.resolve(false);
clearTimeout(waitInfo.timerHandler);
this._waitingMap.delete(key);
}
this.triggerTakeSnapshot();
}
/**
* Add waiting for the given key.
* @param key
* @param event
* @param waitedSince Optional waited since timestamp to calculate the remaining delay.
*/
private _addWaiting(key: string, event: FileEventItem, waitedSince?: number): WaitInfo {
if (this._waitingMap.has(key)) {
// Already waiting
throw new Error(`Already waiting for key: ${key}`);
}
const resolver = promiseWithResolvers<boolean>();
const now = Date.now();
const since = waitedSince ?? now;
const elapsed = now - since;
const maxDelay = this.batchSaveMaximumDelay * 1000;
const remainingDelay = Math.max(0, maxDelay - elapsed);
const nextDelay = Math.min(remainingDelay, this.batchSaveMinimumDelay * 1000);
// x*<------- maxDelay --------->*
// x*<-- minDelay -->*
// x* x<-- nextDelay -->*
// x* x<-- Capped-->*
// x* x.......*
// x: event
// *: save
// When at event (x) At least, save (*) within maxDelay, but maintain minimum delay between saves.
if (elapsed >= maxDelay) {
// Already exceeded maximum delay, do not wait.
Logger(`Processing ${key}: Batch save maximum delay already exceeded: ${event.type}`, LOG_LEVEL_DEBUG);
} else {
Logger(`Processing ${key}: Adding waiting for batch save: ${event.type} (${nextDelay}ms)`, LOG_LEVEL_DEBUG);
}
const waitInfo: WaitInfo = {
since: since,
type: event.type,
event: event,
canProceed: resolver,
timerHandler: setTimeout(() => {
Logger(`Processing ${key}: Batch save timeout reached: ${event.type}`, LOG_LEVEL_DEBUG);
this._proceedWaiting(key);
}, nextDelay),
};
this._waitingMap.set(key, waitInfo);
this.triggerTakeSnapshot();
return waitInfo;
}
/**
* Process the given file event.
*/
async processFileEvent(fei: FileEventItem) {
const releaser = await this.concurrentProcessing.acquire();
try {
this.updateStatus();
const filename = fei.args.file.path;
const waitingKey = `${filename}`;
const previous = this._waitingMap.get(waitingKey);
let isShouldBeCancelled = fei.skipBatchWait || false;
let previousPromise: Promise<boolean> = Promise.resolve(true);
let waitPromise: Promise<boolean> = Promise.resolve(true);
// 1. Check if there is previous waiting for the same file
if (previous) {
previousPromise = previous.canProceed.promise;
if (isShouldBeCancelled) {
Logger(
`Processing ${filename}: Requested to perform immediately, cancelling previous waiting: ${fei.type}`,
LOG_LEVEL_DEBUG
);
}
if (!isShouldBeCancelled && fei.type === "DELETE") {
// For DELETE, cancel any previous waiting and proceed immediately
// That because when deleting, we cannot read the file anymore.
Logger(
`Processing ${filename}: DELETE requested, cancelling previous waiting: ${fei.type}`,
LOG_LEVEL_DEBUG
);
isShouldBeCancelled = true;
}
if (!isShouldBeCancelled && previous.type === fei.type) {
// For the same type, we can cancel the previous waiting and proceed immediately.
Logger(`Processing ${filename}: Cancelling previous waiting: ${fei.type}`, LOG_LEVEL_DEBUG);
isShouldBeCancelled = true;
}
// 2. wait for the previous to complete
if (isShouldBeCancelled) {
this._cancelWaiting(waitingKey);
Logger(`Processing ${filename}: Previous cancelled: ${fei.type}`, LOG_LEVEL_DEBUG);
isShouldBeCancelled = true;
}
if (!isShouldBeCancelled) {
Logger(`Processing ${filename}: Waiting for previous to complete: ${fei.type}`, LOG_LEVEL_DEBUG);
this._proceedWaiting(waitingKey);
Logger(`Processing ${filename}: Previous completed: ${fei.type}`, LOG_LEVEL_DEBUG);
}
}
await previousPromise;
// 3. Check if shouldBatchSave is true
if (this.shouldBatchSave && !fei.skipBatchWait) {
// if type is CREATE or CHANGED, set waiting
if (fei.type == "CREATE" || fei.type == "CHANGED") {
// 3.2. If true, set the queue, and wait for the waiting, or until timeout
// (since is copied from previous waiting if exists to limit the maximum wait time)
// console.warn(`Since:`, previous?.since);
const info = this._addWaiting(waitingKey, fei, previous?.since);
waitPromise = info.canProceed.promise;
} else if (fei.type == "DELETE") {
// For DELETE, cancel any previous waiting and proceed immediately
}
Logger(`Processing ${filename}: Waiting for batch save: ${fei.type}`, LOG_LEVEL_DEBUG);
const canProceed = await waitPromise;
if (!canProceed) {
// 3.2.1. If cancelled by new queue, cancel subsequent process.
Logger(`Processing ${filename}: Cancelled by new queue: ${fei.type}`, LOG_LEVEL_DEBUG);
return;
}
}
// await this.handleFileEvent(fei);
await this.requestProcessQueue(fei);
} finally {
await this._takeSnapshot();
releaser();
}
}
async _takeSnapshot() {
const processingEvents = [...this._waitingMap.values()].map((e) => e.event);
const waitingEvents = this.bufferedQueuedItems;
const snapShot = [...processingEvents, ...waitingEvents];
await this.core.kvDB.set("storage-event-manager-snapshot", snapShot);
Logger(`Storage operation snapshot taken: ${snapShot.length} items`, LOG_LEVEL_DEBUG);
this.updateStatus();
}
async _restoreFromSnapshot() {
const snapShot = await this.core.kvDB.get<(FileEventItem | FileEventItemSentinel)[]>(
"storage-event-manager-snapshot"
);
if (snapShot && Array.isArray(snapShot) && snapShot.length > 0) {
// console.warn(`Restoring snapshot: ${snapShot.length} items`);
Logger(`Restoring storage operation snapshot: ${snapShot.length} items`, LOG_LEVEL_VERBOSE);
// Restore the snapshot
// Note: Mark all items as skipBatchWait to prevent apply the off-line batch saving.
this.bufferedQueuedItems = snapShot.map((e) => ({ ...e, skipBatchWait: true }));
this.updateStatus();
await this.runQueuedEvents();
} else {
Logger(`No snapshot to restore`, LOG_LEVEL_VERBOSE);
// console.warn(`No snapshot to restore`);
}
}
runQueuedEvents() {
return skipIfDuplicated("storage-event-manager-run-queued-events", async () => {
do {
if (this.bufferedQueuedItems.length === 0) {
break;
}
// 1. Get the first queued item
const fei = this.bufferedQueuedItems.shift()!;
await this._takeSnapshot();
this.updateStatus();
// 2. Consume 1 semaphore slot to enqueue processing. Then release immediately.
// (Just to limit the total concurrent processing count, because skipping batch handles at processFileEvent).
const releaser = await this.concurrentProcessing.acquire();
releaser();
this.updateStatus();
// 3. Check if sentinel flush
// If sentinel, wait for idle and continue.
if (fei.type === TYPE_SENTINEL_FLUSH) {
Logger(`Waiting for idle`, LOG_LEVEL_VERBOSE);
// Flush all waiting batch queues
await this.waitForIdle();
this.updateStatus();
continue;
}
// 4. Process the event, this should be fire-and-forget to not block the queue processing in each file.
fireAndForget(() => this.processFileEvent(fei));
} while (this.bufferedQueuedItems.length > 0);
});
}
processingCount = 0;
async requestProcessQueue(fei: FileEventItem) {
try {
this.processingCount++;
// this.bufferedQueuedItems.remove(fei);
this.updateStatus();
// this.waitedSince.delete(fei.args.file.path);
await this.handleFileEvent(fei);
await this._takeSnapshot();
} finally {
this.processingCount--;
this.updateStatus();
}
}
isWaiting(filename: FilePath) {
return isWaitingForTimeout(`storage-event-manager-batchsave-${filename}`);
}
updateStatus() {
const allFileEventItems = this.bufferedQueuedItems.filter((e): e is FileEventItem => "args" in e);
const allItems = allFileEventItems.filter((e) => !e.cancelled);
const totalItems = allItems.length + this.concurrentProcessing.waiting;
const processing = this.processingCount;
const batchedCount = this._waitingMap.size;
this.core.batched.value = batchedCount;
this.core.processing.value = processing;
this.core.totalQueued.value = totalItems + batchedCount + processing;
}
async handleFileEvent(queue: FileEventItem): Promise<any> {
const file = queue.args.file;
const lockKey = `handleFile:${file.path}`;
const ret = await serialized(lockKey, async () => {
if (queue.cancelled) {
Logger(`File event cancelled before processing: ${file.path}`, LOG_LEVEL_INFO);
return;
}
if (queue.type == "INTERNAL" || file.isInternal) {
await this.core.services.fileProcessing.processOptionalFileEvent(file.path as unknown as FilePath);
} else {
const key = `file-last-proc-${queue.type}-${file.path}`;
const last = Number((await this.core.kvDB.get(key)) || 0);
if (queue.type == "DELETE") {
await this.core.services.fileProcessing.processFileEvent(queue);
} else {
if (file.stat.mtime == last) {
Logger(`File has been already scanned on ${queue.type}, skip: ${file.path}`, LOG_LEVEL_VERBOSE);
// Should Cancel the relative operations? (e.g. rename)
// this.cancelRelativeEvent(queue);
return;
}
if (!(await this.core.services.fileProcessing.processFileEvent(queue))) {
Logger(
`STORAGE -> DB: Handler failed, cancel the relative operations: ${file.path}`,
LOG_LEVEL_INFO
);
// cancel running queues and remove one of atomic operation (e.g. rename)
this.cancelRelativeEvent(queue);
return;
}
}
}
});
this.updateStatus();
return ret;
}
cancelRelativeEvent(item: FileEventItem): void {
this._cancelWaiting(item.args.file.path);
}
}

View File

@@ -2,7 +2,6 @@
import { TFile, type TAbstractFile, type TFolder } from "../../../deps.ts";
import { ICHeader } from "../../../common/types.ts";
import type { SerializedFileAccess } from "./SerializedFileAccess.ts";
import { addPrefix, isPlainText } from "../../../lib/src/string_and_binary/path.ts";
import { LOG_LEVEL_VERBOSE, Logger } from "octagonal-wheels/common/logger";
import { createBlob } from "../../../lib/src/common/utils.ts";
@@ -15,6 +14,7 @@ import type {
UXInternalFileInfoStub,
} from "../../../lib/src/common/types.ts";
import type { LiveSyncCore } from "../../../main.ts";
import type { FileAccessObsidian } from "@/serviceModules/FileAccessObsidian.ts";
export async function TFileToUXFileInfo(
core: LiveSyncCore,
@@ -51,7 +51,7 @@ export async function TFileToUXFileInfo(
export async function InternalFileToUXFileInfo(
fullPath: string,
vaultAccess: SerializedFileAccess,
vaultAccess: FileAccessObsidian,
prefix: string = ICHeader
): Promise<UXFileInfo> {
const name = fullPath.split("/").pop() as string;

View File

@@ -1,7 +1,7 @@
import { unique } from "octagonal-wheels/collection";
import { throttle } from "octagonal-wheels/function";
import { EVENT_ON_UNRESOLVED_ERROR, eventHub } from "../../common/events.ts";
import { BASE_IS_NEW, compareFileFreshness, EVEN, getPath, isValidPath, TARGET_IS_NEW } from "../../common/utils.ts";
import { BASE_IS_NEW, compareFileFreshness, EVEN, isValidPath, TARGET_IS_NEW } from "../../common/utils.ts";
import {
type FilePathWithPrefixLC,
type FilePathWithPrefix,
@@ -78,7 +78,7 @@ export class ModuleInitializerFile extends AbstractModule {
const _filesStorage = [] as typeof filesStorageSrc;
for (const f of filesStorageSrc) {
if (await this.services.vault.isTargetFile(f.path, f != filesStorageSrc[0])) {
if (await this.services.vault.isTargetFile(f.path)) {
_filesStorage.push(f);
}
}
@@ -120,9 +120,9 @@ export class ModuleInitializerFile extends AbstractModule {
showingNotice ? LOG_LEVEL_NOTICE : LOG_LEVEL_INFO,
"syncAll"
);
const path = getPath(doc);
const path = this.getPath(doc);
if (isValidPath(path) && (await this.services.vault.isTargetFile(path, true))) {
if (isValidPath(path) && (await this.services.vault.isTargetFile(path))) {
if (!isMetaEntry(doc)) {
this._log(`Invalid entry: ${path}`, LOG_LEVEL_INFO);
continue;
@@ -132,7 +132,7 @@ export class ModuleInitializerFile extends AbstractModule {
}
const databaseFileNameMap = Object.fromEntries(
_DBEntries.map((e) => [getPath(e), e] as [FilePathWithPrefix, MetaEntry])
_DBEntries.map((e) => [this.getPath(e), e] as [FilePathWithPrefix, MetaEntry])
);
const databaseFileNames = Object.keys(databaseFileNameMap) as FilePathWithPrefix[];
const databaseFileNameCapsPair = databaseFileNames.map(
@@ -224,7 +224,7 @@ export class ModuleInitializerFile extends AbstractModule {
runAll("UPDATE STORAGE", filesExistOnlyInDatabase, async (e) => {
const w = databaseFileNameMap[databaseFileNameCI2CS[e]];
// Exists in database but not in storage.
const path = getPath(w) ?? e;
const path = this.getPath(w) ?? e;
if (w && !(w.deleted || w._deleted)) {
if (!this.services.vault.isFileSizeTooLarge(w.size)) {
// Prevent applying the conflicted state to the storage.
@@ -275,7 +275,7 @@ export class ModuleInitializerFile extends AbstractModule {
await this.syncFileBetweenDBandStorage(file, doc);
} else {
this._log(
`SYNC DATABASE AND STORAGE: ${getPath(doc)} has been skipped due to file size exceeding the limit`,
`SYNC DATABASE AND STORAGE: ${this.getPath(doc)} has been skipped due to file size exceeding the limit`,
logLevel
);
}
@@ -365,7 +365,7 @@ export class ModuleInitializerFile extends AbstractModule {
if (isAnyNote(doc)) {
if (doc.deleted && doc.mtime - limit < 0) {
notes.push({
path: getPath(doc),
path: this.getPath(doc),
mtime: doc.mtime,
ttl: (doc.mtime - limit) / 1000 / 86400,
doc: doc,
@@ -393,7 +393,13 @@ export class ModuleInitializerFile extends AbstractModule {
ignoreSuspending: boolean = false
): Promise<boolean> {
this.services.appLifecycle.resetIsReady();
if (!reopenDatabase || (await this.services.database.openDatabase())) {
if (
!reopenDatabase ||
(await this.services.database.openDatabase({
databaseEvents: this.services.databaseEvents,
replicator: this.services.replicator,
}))
) {
if (this.localDatabase.isReady) {
await this.services.vault.scanVault(showingNotice, ignoreSuspending);
}
@@ -415,7 +421,7 @@ export class ModuleInitializerFile extends AbstractModule {
private _reportDetectedErrors(): Promise<string[]> {
return Promise.resolve(Array.from(this._detectedErrors));
}
onBindFunction(core: LiveSyncCore, services: InjectableServiceHub): void {
override onBindFunction(core: LiveSyncCore, services: InjectableServiceHub): void {
services.appLifecycle.getUnresolvedMessages.addHandler(this._reportDetectedErrors.bind(this));
services.databaseEvents.initialiseDatabase.setHandler(this._initializeDatabase.bind(this));
services.vault.scanVault.setHandler(this._performFullScan.bind(this));

View File

@@ -1,114 +0,0 @@
import { delay, yieldMicrotask } from "octagonal-wheels/promises";
import { OpenKeyValueDatabase } from "../../common/KeyValueDB.ts";
import type { LiveSyncLocalDB } from "../../lib/src/pouchdb/LiveSyncLocalDB.ts";
import { LOG_LEVEL_NOTICE, LOG_LEVEL_VERBOSE } from "octagonal-wheels/common/logger";
import { AbstractModule } from "../AbstractModule.ts";
import type { LiveSyncCore } from "../../main.ts";
import type { SimpleStore } from "octagonal-wheels/databases/SimpleStoreBase";
import type { InjectableServiceHub } from "@/lib/src/services/InjectableServices.ts";
import type { ObsidianDatabaseService } from "../services/ObsidianServices.ts";
export class ModuleKeyValueDB extends AbstractModule {
async tryCloseKvDB() {
try {
await this.core.kvDB?.close();
return true;
} catch (e) {
this._log("Failed to close KeyValueDB", LOG_LEVEL_VERBOSE);
this._log(e);
return false;
}
}
async openKeyValueDB(): Promise<boolean> {
await delay(10);
try {
await this.tryCloseKvDB();
await delay(10);
await yieldMicrotask();
this.core.kvDB = await OpenKeyValueDatabase(this.services.vault.getVaultName() + "-livesync-kv");
await yieldMicrotask();
await delay(100);
} catch (e) {
this.core.kvDB = undefined!;
this._log("Failed to open KeyValueDB", LOG_LEVEL_NOTICE);
this._log(e, LOG_LEVEL_VERBOSE);
return false;
}
return true;
}
async _onDBUnload(db: LiveSyncLocalDB) {
if (this.core.kvDB) await this.core.kvDB.close();
return Promise.resolve(true);
}
async _onDBClose(db: LiveSyncLocalDB) {
if (this.core.kvDB) await this.core.kvDB.close();
return Promise.resolve(true);
}
private async _everyOnloadAfterLoadSettings(): Promise<boolean> {
if (!(await this.openKeyValueDB())) {
return false;
}
this.core.simpleStore = this.services.database.openSimpleStore<any>("os");
return Promise.resolve(true);
}
_getSimpleStore<T>(kind: string) {
const getDB = () => this.core.kvDB;
const prefix = `${kind}-`;
return {
get: async (key: string): Promise<T> => {
return await getDB().get(`${prefix}${key}`);
},
set: async (key: string, value: any): Promise<void> => {
await getDB().set(`${prefix}${key}`, value);
},
delete: async (key: string): Promise<void> => {
await getDB().del(`${prefix}${key}`);
},
keys: async (
from: string | undefined,
to: string | undefined,
count?: number | undefined
): Promise<string[]> => {
const ret = await getDB().keys(
IDBKeyRange.bound(`${prefix}${from || ""}`, `${prefix}${to || ""}`),
count
);
return ret
.map((e) => e.toString())
.filter((e) => e.startsWith(prefix))
.map((e) => e.substring(prefix.length));
},
db: Promise.resolve(getDB()),
} satisfies SimpleStore<T>;
}
_everyOnInitializeDatabase(db: LiveSyncLocalDB): Promise<boolean> {
return this.openKeyValueDB();
}
async _everyOnResetDatabase(db: LiveSyncLocalDB): Promise<boolean> {
try {
const kvDBKey = "queued-files";
await this.core.kvDB.del(kvDBKey);
// localStorage.removeItem(lsKey);
await this.core.kvDB.destroy();
await yieldMicrotask();
this.core.kvDB = await OpenKeyValueDatabase(this.services.vault.getVaultName() + "-livesync-kv");
await delay(100);
} catch (e) {
this.core.kvDB = undefined!;
this._log("Failed to reset KeyValueDB", LOG_LEVEL_NOTICE);
this._log(e, LOG_LEVEL_VERBOSE);
return false;
}
return true;
}
onBindFunction(core: LiveSyncCore, services: InjectableServiceHub): void {
services.databaseEvents.onUnloadDatabase.addHandler(this._onDBUnload.bind(this));
services.databaseEvents.onCloseDatabase.addHandler(this._onDBClose.bind(this));
services.databaseEvents.onDatabaseInitialisation.addHandler(this._everyOnInitializeDatabase.bind(this));
services.databaseEvents.onResetDatabase.addHandler(this._everyOnResetDatabase.bind(this));
(services.database as ObsidianDatabaseService).openSimpleStore.setHandler(this._getSimpleStore.bind(this));
services.appLifecycle.onSettingLoaded.addHandler(this._everyOnloadAfterLoadSettings.bind(this));
}
}

View File

@@ -10,7 +10,7 @@ import {
import { AbstractModule } from "../AbstractModule.ts";
import { $msg } from "src/lib/src/common/i18n.ts";
import { performDoctorConsultation, RebuildOptions } from "../../lib/src/common/configForDoc.ts";
import { getPath, isValidPath } from "../../common/utils.ts";
import { isValidPath } from "../../common/utils.ts";
import { isMetaEntry } from "../../lib/src/common/types.ts";
import { isDeletedEntry, isDocContentSame, isLoadedEntry, readAsBlob } from "../../lib/src/common/utils.ts";
import { countCompromisedChunks } from "../../lib/src/pouchdb/negotiation.ts";
@@ -128,12 +128,12 @@ export class ModuleMigration extends AbstractModule {
const errorFiles = [] as ErrorInfo[];
for await (const metaDoc of this.localDatabase.findAllNormalDocs({ conflicts: true })) {
const path = getPath(metaDoc);
const path = this.getPath(metaDoc);
if (!isValidPath(path)) {
continue;
}
if (!(await this.services.vault.isTargetFile(path, true))) {
if (!(await this.services.vault.isTargetFile(path))) {
continue;
}
if (!isMetaEntry(metaDoc)) {
@@ -353,7 +353,7 @@ export class ModuleMigration extends AbstractModule {
});
return Promise.resolve(true);
}
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
override onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
super.onBindFunction(core, services);
services.appLifecycle.onLayoutReady.addHandler(this._everyOnLayoutReady.bind(this));
services.appLifecycle.onFirstInitialise.addHandler(this._everyOnFirstInitialize.bind(this));

View File

@@ -28,7 +28,10 @@ export class ObsHttpHandler extends FetchHttpHandler {
this.reverseProxyNoSignUrl = reverseProxyNoSignUrl;
}
// eslint-disable-next-line require-await
async handle(request: HttpRequest, { abortSignal }: HttpHandlerOptions = {}): Promise<{ response: HttpResponse }> {
override async handle(
request: HttpRequest,
{ abortSignal }: HttpHandlerOptions = {}
): Promise<{ response: HttpResponse }> {
if (abortSignal?.aborted) {
const abortError = new Error("Request aborted");
abortError.name = "AbortError";

View File

@@ -2,10 +2,10 @@ import { LOG_LEVEL_INFO, LOG_LEVEL_NOTICE, LOG_LEVEL_VERBOSE } from "octagonal-w
import { sizeToHumanReadable } from "octagonal-wheels/number";
import { $msg } from "src/lib/src/common/i18n.ts";
import type { LiveSyncCore } from "../../main.ts";
import { AbstractObsidianModule } from "../AbstractObsidianModule.ts";
import { EVENT_REQUEST_CHECK_REMOTE_SIZE, eventHub } from "@/common/events.ts";
import { AbstractModule } from "../AbstractModule.ts";
export class ModuleCheckRemoteSize extends AbstractObsidianModule {
export class ModuleCheckRemoteSize extends AbstractModule {
checkRemoteSize(): Promise<boolean> {
this.settings.notifyThresholdOfRemoteStorageSize = 1;
return this._allScanStat();
@@ -127,7 +127,7 @@ export class ModuleCheckRemoteSize extends AbstractObsidianModule {
eventHub.onEvent(EVENT_REQUEST_CHECK_REMOTE_SIZE, () => this.checkRemoteSize());
return Promise.resolve(true);
}
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
override onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
services.appLifecycle.onScanningStartupIssues.addHandler(this._allScanStat.bind(this));
services.appLifecycle.onInitialise.addHandler(this._everyOnloadStart.bind(this));
}

View File

@@ -8,12 +8,11 @@ import {
type LOG_LEVEL,
} from "octagonal-wheels/common/logger";
import { Notice, requestUrl, type RequestUrlParam, type RequestUrlResponse } from "../../deps.ts";
import { type CouchDBCredentials, type EntryDoc, type FilePath } from "../../lib/src/common/types.ts";
import { getPathFromTFile } from "../../common/utils.ts";
import { type CouchDBCredentials, type EntryDoc } from "../../lib/src/common/types.ts";
import { isCloudantURI, isValidRemoteCouchDBURI } from "../../lib/src/pouchdb/utils_couchdb.ts";
import { replicationFilter } from "@/lib/src/pouchdb/compress.ts";
import { disableEncryption } from "@/lib/src/pouchdb/encryption.ts";
import { enableEncryption } from "@/lib/src/pouchdb/encryption.ts";
import { replicationFilter } from "@lib/pouchdb/compress.ts";
import { disableEncryption } from "@lib/pouchdb/encryption.ts";
import { enableEncryption } from "@lib/pouchdb/encryption.ts";
import { setNoticeClass } from "../../lib/src/mock_and_interop/wrapper.ts";
import { PouchDB } from "../../lib/src/pouchdb/pouchdb-browser.ts";
import { AuthorizationHeaderGenerator } from "../../lib/src/replication/httplib.ts";
@@ -97,7 +96,7 @@ export class ModuleObsidianAPI extends AbstractObsidianModule {
const size = body ? ` (${body.length})` : "";
try {
const r = await this.__fetchByAPI(url, authHeader, opts);
this.plugin.requestCount.value = this.plugin.requestCount.value + 1;
this.services.API.requestCount.value = this.services.API.requestCount.value + 1;
if (method == "POST" || method == "PUT") {
this.last_successful_post = r.status - (r.status % 100) == 200;
} else {
@@ -114,7 +113,7 @@ export class ModuleObsidianAPI extends AbstractObsidianModule {
this._log(ex);
throw ex;
} finally {
this.plugin.responseCount.value = this.plugin.responseCount.value + 1;
this.services.API.responseCount.value = this.services.API.responseCount.value + 1;
}
}
@@ -172,7 +171,7 @@ export class ModuleObsidianAPI extends AbstractObsidianModule {
headers.append("authorization", authHeader);
}
try {
this.plugin.requestCount.value = this.plugin.requestCount.value + 1;
this.services.API.requestCount.value = this.services.API.requestCount.value + 1;
const response: Response = await (useRequestAPI
? this.__fetchByAPI(url.toString(), authHeader, { ...opts, headers })
: fetch(url, { ...opts, headers }));
@@ -246,7 +245,7 @@ export class ModuleObsidianAPI extends AbstractObsidianModule {
this._log(ex);
throw ex;
} finally {
this.plugin.responseCount.value = this.plugin.responseCount.value + 1;
this.services.API.responseCount.value = this.services.API.responseCount.value + 1;
}
// return await fetch(url, opts);
@@ -279,33 +278,13 @@ export class ModuleObsidianAPI extends AbstractObsidianModule {
}
}
_vaultName(): string {
return this.app.vault.getName();
}
_getVaultName(): string {
return (
this.services.vault.vaultName() +
(this.settings?.additionalSuffixOfDatabaseName ? "-" + this.settings.additionalSuffixOfDatabaseName : "")
);
}
_getActiveFilePath(): FilePath | undefined {
const file = this.app.workspace.getActiveFile();
if (file) {
return getPathFromTFile(file);
}
return undefined;
}
private _reportUnresolvedMessages(): Promise<(string | Error)[]> {
return Promise.resolve([...this._previousErrors]);
}
onBindFunction(core: LiveSyncCore, services: typeof core.services) {
override onBindFunction(core: LiveSyncCore, services: typeof core.services) {
services.API.isLastPostFailedDueToPayloadSize.setHandler(this._getLastPostFailedBySize.bind(this));
services.remote.connect.setHandler(this._connectRemoteCouchDB.bind(this));
services.vault.getVaultName.setHandler(this._getVaultName.bind(this));
services.vault.vaultName.setHandler(this._vaultName.bind(this));
services.vault.getActiveFilePath.setHandler(this._getActiveFilePath.bind(this));
services.appLifecycle.getUnresolvedMessages.addHandler(this._reportUnresolvedMessages.bind(this));
}
}

View File

@@ -5,7 +5,7 @@ import { scheduleTask } from "octagonal-wheels/concurrency/task";
import { type TFile } from "../../deps.ts";
import { fireAndForget } from "octagonal-wheels/promises";
import { type FilePathWithPrefix } from "../../lib/src/common/types.ts";
import { reactive, reactiveSource } from "octagonal-wheels/dataobject/reactive";
import { reactive, reactiveSource, type ReactiveSource } from "octagonal-wheels/dataobject/reactive";
import {
collectingChunks,
pluginScanningCount,
@@ -31,13 +31,8 @@ export class ModuleObsidianEvents extends AbstractObsidianModule {
return Promise.resolve(true);
}
private _performRestart(): void {
this.__performAppReload();
}
__performAppReload() {
//@ts-ignore
this.app.commands.executeCommandById("app:reload");
this.services.appLifecycle.performRestart();
}
initialCallback: any;
@@ -50,7 +45,7 @@ export class ModuleObsidianEvents extends AbstractObsidianModule {
this.initialCallback = save;
saveCommandDefinition.callback = () => {
scheduleTask("syncOnEditorSave", 250, () => {
if (this.services.appLifecycle.hasUnloaded()) {
if (this.services.control.hasUnloaded()) {
this._log("Unload and remove the handler.", LOG_LEVEL_VERBOSE);
saveCommandDefinition.callback = this.initialCallback;
this.initialCallback = undefined;
@@ -74,7 +69,7 @@ export class ModuleObsidianEvents extends AbstractObsidianModule {
//@ts-ignore
window.CodeMirrorAdapter.commands.save = () => {
//@ts-ignore
_this.app.commands.executeCommandById("editor:save-file");
void _this.app.commands.executeCommandById("editor:save-file");
// _this.app.performCommand('editor:save-file');
};
}
@@ -193,19 +188,25 @@ export class ModuleObsidianEvents extends AbstractObsidianModule {
}
});
}
// Process counting for app reload scheduling
_totalProcessingCount?: ReactiveSource<number> = undefined;
private _scheduleAppReload() {
if (!this.core._totalProcessingCount) {
if (!this._totalProcessingCount) {
const __tick = reactiveSource(0);
this.core._totalProcessingCount = reactive(() => {
const dbCount = this.core.databaseQueueCount.value;
const replicationCount = this.core.replicationResultCount.value;
const storageApplyingCount = this.core.storageApplyingCount.value;
this._totalProcessingCount = reactive(() => {
const dbCount = this.services.replication.databaseQueueCount.value;
const replicationCount = this.services.replication.replicationResultCount.value;
const storageApplyingCount = this.services.replication.storageApplyingCount.value;
const chunkCount = collectingChunks.value;
const pluginScanCount = pluginScanningCount.value;
const hiddenFilesCount = hiddenFilesEventCount.value + hiddenFilesProcessingCount.value;
const conflictProcessCount = this.core.conflictProcessQueueCount.value;
const e = this.core.pendingFileEventCount.value;
const proc = this.core.processingFileEventCount.value;
const conflictProcessCount = this.services.conflict.conflictProcessQueueCount.value;
// Now no longer `pendingFileEventCount` and `processingFileEventCount` is used
// const e = this.core.pendingFileEventCount.value;
// const proc = this.core.processingFileEventCount.value;
const e = 0;
const proc = 0;
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const __ = __tick.value;
return (
@@ -227,7 +228,7 @@ export class ModuleObsidianEvents extends AbstractObsidianModule {
);
let stableCheck = 3;
this.core._totalProcessingCount.onChanged((e) => {
this._totalProcessingCount.onChanged((e) => {
if (e.value == 0) {
if (stableCheck-- <= 0) {
this.__performAppReload();
@@ -243,11 +244,14 @@ export class ModuleObsidianEvents extends AbstractObsidianModule {
});
}
}
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
_isReloadingScheduled(): boolean {
return this._totalProcessingCount !== undefined;
}
override onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
services.appLifecycle.onLayoutReady.addHandler(this._everyOnLayoutReady.bind(this));
services.appLifecycle.onInitialise.addHandler(this._everyOnloadStart.bind(this));
services.appLifecycle.performRestart.setHandler(this._performRestart.bind(this));
services.appLifecycle.askRestart.setHandler(this._askReload.bind(this));
services.appLifecycle.scheduleRestart.setHandler(this._scheduleAppReload.bind(this));
services.appLifecycle.isReloadingScheduled.setHandler(this._isReloadingScheduled.bind(this));
}
}

View File

@@ -1,11 +1,11 @@
import { fireAndForget } from "octagonal-wheels/promises";
import { addIcon, type Editor, type MarkdownFileInfo, type MarkdownView } from "../../deps.ts";
import { LOG_LEVEL_NOTICE, type FilePathWithPrefix } from "../../lib/src/common/types.ts";
import { AbstractObsidianModule } from "../AbstractObsidianModule.ts";
import { $msg } from "src/lib/src/common/i18n.ts";
import type { LiveSyncCore } from "../../main.ts";
import { AbstractModule } from "../AbstractModule.ts";
export class ModuleObsidianMenu extends AbstractObsidianModule {
export class ModuleObsidianMenu extends AbstractModule {
_everyOnloadStart(): Promise<boolean> {
// UI
addIcon(
@@ -59,7 +59,7 @@ export class ModuleObsidianMenu extends AbstractObsidianModule {
this.settings.liveSync = true;
this._log("LiveSync Enabled.", LOG_LEVEL_NOTICE);
}
await this.services.setting.realiseSetting();
await this.services.control.applySettings();
await this.services.setting.saveSettingData();
},
});
@@ -74,7 +74,7 @@ export class ModuleObsidianMenu extends AbstractObsidianModule {
this.services.appLifecycle.setSuspended(true);
this._log("Self-hosted LiveSync suspended", LOG_LEVEL_NOTICE);
}
await this.services.setting.realiseSetting();
await this.services.control.applySettings();
await this.services.setting.saveSettingData();
},
});
@@ -105,16 +105,8 @@ export class ModuleObsidianMenu extends AbstractObsidianModule {
});
return Promise.resolve(true);
}
private __onWorkspaceReady() {
void this.services.appLifecycle.onReady();
}
private _everyOnload(): Promise<boolean> {
this.app.workspace.onLayoutReady(this.__onWorkspaceReady.bind(this));
return Promise.resolve(true);
}
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
override onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
services.appLifecycle.onInitialise.addHandler(this._everyOnloadStart.bind(this));
services.appLifecycle.onLoaded.addHandler(this._everyOnload.bind(this));
}
}

View File

@@ -1,18 +0,0 @@
import type { LiveSyncCore } from "../../main.ts";
import { AbstractObsidianModule } from "../AbstractObsidianModule.ts";
export class ModuleExtraSyncObsidian extends AbstractObsidianModule {
deviceAndVaultName: string = "";
_getDeviceAndVaultName(): string {
return this.deviceAndVaultName;
}
_setDeviceAndVaultName(name: string): void {
this.deviceAndVaultName = name;
}
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
services.setting.getDeviceAndVaultName.setHandler(this._getDeviceAndVaultName.bind(this));
services.setting.setDeviceAndVaultName.setHandler(this._setDeviceAndVaultName.bind(this));
}
}

View File

@@ -156,7 +156,7 @@ export class ModuleDev extends AbstractObsidianModule {
// this.addTestResult("Test of test3", true);
return this.testDone();
}
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
override onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
services.appLifecycle.onLayoutReady.addHandler(this._everyOnLayoutReady.bind(this));
services.appLifecycle.onInitialise.addHandler(this._everyOnloadStart.bind(this));
services.appLifecycle.onSettingLoaded.addHandler(this._everyOnloadAfterLoadSettings.bind(this));

View File

@@ -440,7 +440,7 @@ Line4:D`;
return Promise.resolve(true);
}
onBindFunction(core: typeof this.core, services: typeof core.services): void {
override onBindFunction(core: typeof this.core, services: typeof core.services): void {
services.test.testMultiDevice.addHandler(this._everyModuleTestMultiDevice.bind(this));
}
}

View File

@@ -511,10 +511,11 @@ ABCDEFGHIJKLMNOPQRSTUVWXYZ`;
return this.__assertStorageContent((this.testRootPath + "task.md") as FilePath, mergedDoc, false, true);
}
// No longer tested
async checkConflictResolution() {
this._log("Before testing conflicted files, resolve all once", LOG_LEVEL_NOTICE);
await this.core.rebuilder.resolveAllConflictedFilesByNewerOnes();
await this.core.rebuilder.resolveAllConflictedFilesByNewerOnes();
await this.services.conflict.resolveAllConflictedFilesByNewerOnes();
await this.services.conflict.resolveAllConflictedFilesByNewerOnes();
await this.services.replication.replicate();
await delay(1000);
if (!(await this.testConflictAutomatic())) {
@@ -580,7 +581,7 @@ ABCDEFGHIJKLMNOPQRSTUVWXYZ`;
await this._test("Conflict resolution", async () => await this.checkConflictResolution());
return this.testDone();
}
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
override onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
services.appLifecycle.onSettingLoaded.addHandler(this._everyOnloadAfterLoadSettings.bind(this));
services.replication.onBeforeReplicate.addHandler(this._everyBeforeReplicate.bind(this));
services.test.testMultiDevice.addHandler(this._everyModuleTestMultiDevice.bind(this));

View File

@@ -8,11 +8,11 @@ export class TestPaneView extends ItemView {
component?: TestPaneComponent;
plugin: ObsidianLiveSyncPlugin;
moduleDev: ModuleDev;
icon = "view-log";
override icon = "view-log";
title: string = "Self-hosted LiveSync Test and Results";
navigation = true;
override navigation = true;
getIcon(): string {
override getIcon(): string {
return "view-log";
}
@@ -30,7 +30,7 @@ export class TestPaneView extends ItemView {
return "Self-hosted LiveSync Test and Results";
}
async onOpen() {
override async onOpen() {
this.component = new TestPaneComponent({
target: this.contentEl,
props: {
@@ -41,7 +41,7 @@ export class TestPaneView extends ItemView {
await Promise.resolve();
}
async onClose() {
override async onClose() {
this.component?.$destroy();
await Promise.resolve();
}

View File

@@ -214,7 +214,7 @@ export class DocumentHistoryModal extends Modal {
}
}
onOpen() {
override onOpen() {
const { contentEl } = this;
this.titleEl.setText("Document History");
contentEl.empty();
@@ -299,7 +299,7 @@ export class DocumentHistoryModal extends Modal {
});
});
}
onClose() {
override onClose() {
const { contentEl } = this;
contentEl.empty();
this.BlobURLs.forEach((value) => {

View File

@@ -6,7 +6,6 @@
import { diff_match_patch } from "../../../deps.ts";
import { DocumentHistoryModal } from "../DocumentHistory/DocumentHistoryModal.ts";
import { isPlainText, stripAllPrefixes } from "../../../lib/src/string_and_binary/path.ts";
import { getPath } from "../../../common/utils.ts";
export let plugin: ObsidianLiveSyncPlugin;
let showDiffInfo = false;
@@ -44,6 +43,9 @@
};
let history = [] as HistoryData[];
let loading = false;
function getPath(entry: AnyEntry): FilePathWithPrefix {
return plugin.services.path.getPath(entry);
}
async function fetchChanges(): Promise<HistoryData[]> {
try {
@@ -219,69 +221,69 @@
{/if}
<table>
<tbody>
<tr>
<th> Date </th>
<th> Path </th>
<th> Rev </th>
<th> Stat </th>
{#if showChunkCorrected}
<th> Chunks </th>
{/if}
</tr>
<tr>
<td colspan="5" class="more">
{#if loading}
<div class=""></div>
{:else}
<div><button on:click={() => nextWeek()}>+1 week</button></div>
{/if}
</td>
</tr>
{#each history as entry}
<tr>
<td class="mtime">
{entry.mtimeDisp}
<th> Date </th>
<th> Path </th>
<th> Rev </th>
<th> Stat </th>
{#if showChunkCorrected}
<th> Chunks </th>
{/if}
</tr>
<tr>
<td colspan="5" class="more">
{#if loading}
<div class=""></div>
{:else}
<div><button on:click={() => nextWeek()}>+1 week</button></div>
{/if}
</td>
<td class="path">
<div class="filenames">
<span class="path">/{entry.dirname.split("/").join(`/`)}</span>
<!-- svelte-ignore a11y-click-events-have-key-events -->
<!-- svelte-ignore a11y-no-static-element-interactions -->
<!-- svelte-ignore a11y-missing-attribute -->
<span class="filename"><a on:click={() => openFile(entry.path)}>{entry.filename}</a></span>
</div>
</td>
<td>
<span class="rev">
{#if entry.isPlain}
</tr>
{#each history as entry}
<tr>
<td class="mtime">
{entry.mtimeDisp}
</td>
<td class="path">
<div class="filenames">
<span class="path">/{entry.dirname.split("/").join(`/`)}</span>
<!-- svelte-ignore a11y-click-events-have-key-events -->
<!-- svelte-ignore a11y-no-static-element-interactions -->
<!-- svelte-ignore a11y-missing-attribute -->
<a on:click={() => showHistory(entry.path, entry?.rev || "")}>{entry.rev}</a>
{:else}
{entry.rev}
{/if}
</span>
</td>
<td>
{entry.changes}
</td>
{#if showChunkCorrected}
<td>
{entry.chunks}
<span class="filename"><a on:click={() => openFile(entry.path)}>{entry.filename}</a></span>
</div>
</td>
{/if}
</tr>
{/each}
<tr>
<td colspan="5" class="more">
{#if loading}
<td>
<span class="rev">
{#if entry.isPlain}
<!-- svelte-ignore a11y-click-events-have-key-events -->
<!-- svelte-ignore a11y-no-static-element-interactions -->
<!-- svelte-ignore a11y-missing-attribute -->
<a on:click={() => showHistory(entry.path, entry?.rev || "")}>{entry.rev}</a>
{:else}
{entry.rev}
{/if}
</span>
</td>
<td>
{entry.changes}
</td>
{#if showChunkCorrected}
<td>
{entry.chunks}
</td>
{/if}
</tr>
{/each}
<tr>
<td colspan="5" class="more">
{#if loading}
<div class=""></div>
{:else}
<div><button on:click={() => prevWeek()}>+1 week</button></div>
{/if}
</td>
</tr>
{:else}
<div><button on:click={() => prevWeek()}>+1 week</button></div>
{/if}
</td>
</tr>
</tbody>
</table>
</div>

View File

@@ -16,11 +16,11 @@ export class GlobalHistoryView extends SvelteItemView {
}
plugin: ObsidianLiveSyncPlugin;
icon = "clock";
override icon = "clock";
title: string = "";
navigation = true;
override navigation = true;
getIcon(): string {
override getIcon(): string {
return "clock";
}

View File

@@ -44,7 +44,7 @@ export class ConflictResolveModal extends Modal {
// sendValue("close-resolve-conflict:" + this.filename, false);
}
onOpen() {
override onOpen() {
const { contentEl } = this;
// Send cancel signal for the previous merge dialogue
// if not there, simply be ignored.
@@ -119,7 +119,7 @@ export class ConflictResolveModal extends Modal {
this.close();
}
onClose() {
override onClose() {
const { contentEl } = this;
contentEl.empty();
if (this.offEvent) {

View File

@@ -19,11 +19,11 @@ export class LogPaneView extends SvelteItemView {
}
plugin: ObsidianLiveSyncPlugin;
icon = "view-log";
override icon = "view-log";
title: string = "";
navigation = false;
override navigation = false;
getIcon(): string {
override getIcon(): string {
return "view-log";
}

View File

@@ -19,7 +19,7 @@ export class ModuleObsidianGlobalHistory extends AbstractObsidianModule {
showGlobalHistory() {
void this.services.API.showWindow(VIEW_TYPE_GLOBAL_HISTORY);
}
onBindFunction(core: typeof this.core, services: typeof core.services): void {
override onBindFunction(core: typeof this.core, services: typeof core.services): void {
services.appLifecycle.onInitialise.addHandler(this._everyOnloadStart.bind(this));
}
}

View File

@@ -11,7 +11,7 @@ import {
} from "../../lib/src/common/types.ts";
import { ConflictResolveModal } from "./InteractiveConflictResolving/ConflictResolveModal.ts";
import { AbstractObsidianModule } from "../AbstractObsidianModule.ts";
import { displayRev, getPath, getPathWithoutPrefix } from "../../common/utils.ts";
import { displayRev } from "../../common/utils.ts";
import { fireAndForget } from "octagonal-wheels/promises";
import { serialized } from "octagonal-wheels/concurrency/lock";
import type { LiveSyncCore } from "../../main.ts";
@@ -110,7 +110,12 @@ export class ModuleInteractiveConflictResolver extends AbstractObsidianModule {
const notes: { id: DocumentID; path: FilePathWithPrefix; dispPath: string; mtime: number }[] = [];
for await (const doc of this.localDatabase.findAllDocs({ conflicts: true })) {
if (!("_conflicts" in doc)) continue;
notes.push({ id: doc._id, path: getPath(doc), dispPath: getPathWithoutPrefix(doc), mtime: doc.mtime });
notes.push({
id: doc._id,
path: this.getPath(doc),
dispPath: this.getPathWithoutPrefix(doc),
mtime: doc.mtime,
});
}
notes.sort((a, b) => b.mtime - a.mtime);
const notesList = notes.map((e) => e.dispPath);
@@ -134,7 +139,7 @@ export class ModuleInteractiveConflictResolver extends AbstractObsidianModule {
try {
for await (const doc of this.localDatabase.findAllDocs({ conflicts: true })) {
if (!("_conflicts" in doc)) continue;
notes.push({ path: getPath(doc), mtime: doc.mtime });
notes.push({ path: this.getPath(doc), mtime: doc.mtime });
}
if (notes.length > 0) {
this.core.confirm.askInPopup(
@@ -164,7 +169,7 @@ export class ModuleInteractiveConflictResolver extends AbstractObsidianModule {
}
return true;
}
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
override onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
services.appLifecycle.onScanningStartupIssues.addHandler(this._allScanStat.bind(this));
services.appLifecycle.onInitialise.addHandler(this._everyOnloadStart.bind(this));
services.conflict.resolveByUserInteraction.addHandler(this._anyResolveConflictByUI.bind(this));

View File

@@ -32,26 +32,29 @@ import { serialized } from "octagonal-wheels/concurrency/lock";
import { $msg } from "src/lib/src/common/i18n.ts";
import { P2PLogCollector } from "../../lib/src/replication/trystero/P2PReplicatorCore.ts";
import type { LiveSyncCore } from "../../main.ts";
import { LiveSyncError } from "@/lib/src/common/LSError.ts";
import { LiveSyncError } from "@lib/common/LSError.ts";
import { isValidPath } from "@/common/utils.ts";
import {
isValidFilenameInAndroid,
isValidFilenameInDarwin,
isValidFilenameInWidows,
} from "@/lib/src/string_and_binary/path.ts";
} from "@lib/string_and_binary/path.ts";
import { MARK_LOG_SEPARATOR } from "@lib/services/lib/logUtils.ts";
// This module cannot be a core module because it depends on the Obsidian UI.
// DI the log again.
const recentLogEntries = reactiveSource<LogEntry[]>([]);
setGlobalLogFunction((message: any, level?: number, key?: string) => {
const globalLogFunction = (message: any, level?: number, key?: string) => {
const messageX =
message instanceof Error
? new LiveSyncError("[Error Logged]: " + message.message, { cause: message })
: message;
const entry = { message: messageX, level, key } as LogEntry;
recentLogEntries.value = [...recentLogEntries.value, entry];
});
};
setGlobalLogFunction(globalLogFunction);
let recentLogs = [] as string[];
function addLog(log: string) {
@@ -63,8 +66,6 @@ function addLog(log: string) {
const showDebugLog = false;
export const MARK_DONE = "\u{2009}\u{2009}";
export class ModuleLog extends AbstractObsidianModule {
registerView = this.plugin.registerView.bind(this.plugin);
statusBar?: HTMLElement;
statusDiv?: HTMLElement;
@@ -101,12 +102,12 @@ export class ModuleLog extends AbstractObsidianModule {
});
return computed(() => formatted.value);
}
const labelReplication = padLeftSpComputed(this.core.replicationResultCount, `📥`);
const labelDBCount = padLeftSpComputed(this.core.databaseQueueCount, `📄`);
const labelStorageCount = padLeftSpComputed(this.core.storageApplyingCount, `💾`);
const labelReplication = padLeftSpComputed(this.services.replication.replicationResultCount, `📥`);
const labelDBCount = padLeftSpComputed(this.services.replication.databaseQueueCount, `📄`);
const labelStorageCount = padLeftSpComputed(this.services.replication.storageApplyingCount, `💾`);
const labelChunkCount = padLeftSpComputed(collectingChunks, `🧩`);
const labelPluginScanCount = padLeftSpComputed(pluginScanningCount, `🔌`);
const labelConflictProcessCount = padLeftSpComputed(this.core.conflictProcessQueueCount, `🔩`);
const labelConflictProcessCount = padLeftSpComputed(this.services.conflict.conflictProcessQueueCount, `🔩`);
const hiddenFilesCount = reactive(() => hiddenFilesEventCount.value - hiddenFilesProcessingCount.value);
const labelHiddenFilesCount = padLeftSpComputed(hiddenFilesCount, `⚙️`);
const queueCountLabelX = reactive(() => {
@@ -115,12 +116,12 @@ export class ModuleLog extends AbstractObsidianModule {
const queueCountLabel = () => queueCountLabelX.value;
const requestingStatLabel = computed(() => {
const diff = this.core.requestCount.value - this.core.responseCount.value;
const diff = this.services.API.requestCount.value - this.services.API.responseCount.value;
return diff != 0 ? "📲 " : "";
});
const replicationStatLabel = computed(() => {
const e = this.core.replicationStat.value;
const e = this.services.replicator.replicationStatics.value;
const sent = e.sent;
const arrived = e.arrived;
const maxPullSeq = e.maxPullSeq;
@@ -172,9 +173,9 @@ export class ModuleLog extends AbstractObsidianModule {
}
return { w, sent, pushLast, arrived, pullLast };
});
const labelProc = padLeftSpComputed(this.core.processing, ``);
const labelPend = padLeftSpComputed(this.core.totalQueued, `🛫`);
const labelInBatchDelay = padLeftSpComputed(this.core.batched, `📬`);
const labelProc = padLeftSpComputed(this.services.fileProcessing.processing, ``);
const labelPend = padLeftSpComputed(this.services.fileProcessing.totalQueued, `🛫`);
const labelInBatchDelay = padLeftSpComputed(this.services.fileProcessing.batched, `📬`);
const waitingLabel = computed(() => {
return `${labelProc()}${labelPend()}${labelInBatchDelay()}`;
});
@@ -251,7 +252,7 @@ export class ModuleLog extends AbstractObsidianModule {
}
}
// Case Sensitivity
if (this.services.setting.shouldCheckCaseInsensitively()) {
if (this.services.vault.shouldCheckCaseInsensitively()) {
const f = this.core.storageAccess
.getFiles()
.map((e) => e.path)
@@ -306,9 +307,9 @@ export class ModuleLog extends AbstractObsidianModule {
// const recent = logMessages.value;
const newMsg = message;
let newLog = this.settings?.showOnlyIconsOnEditor ? "" : status;
const moduleTagEnd = newLog.indexOf(`]\u{200A}`);
const moduleTagEnd = newLog.indexOf(`]${MARK_LOG_SEPARATOR}`);
if (moduleTagEnd != -1) {
newLog = newLog.substring(moduleTagEnd + 2);
newLog = newLog.substring(moduleTagEnd + MARK_LOG_SEPARATOR.length + 1);
}
this.statusBar?.setText(newMsg.split("\n")[0]);
@@ -494,7 +495,8 @@ export class ModuleLog extends AbstractObsidianModule {
}
}
}
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
override onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
services.API.addLog.setHandler(globalLogFunction);
services.appLifecycle.onInitialise.addHandler(this._everyOnloadStart.bind(this));
services.appLifecycle.onSettingLoaded.addHandler(this._everyOnloadAfterLoadSettings.bind(this));
services.appLifecycle.onLoaded.addHandler(this._everyOnload.bind(this));

View File

@@ -4,7 +4,6 @@ import { EVENT_REQUEST_SHOW_HISTORY } from "../../common/obsidianEvents.ts";
import type { FilePathWithPrefix, LoadedEntry, DocumentID } from "../../lib/src/common/types.ts";
import { AbstractObsidianModule } from "../AbstractObsidianModule.ts";
import { DocumentHistoryModal } from "./DocumentHistory/DocumentHistoryModal.ts";
import { getPath } from "../../common/utils.ts";
import { fireAndForget } from "octagonal-wheels/promises";
export class ModuleObsidianDocumentHistory extends AbstractObsidianModule {
@@ -41,7 +40,7 @@ export class ModuleObsidianDocumentHistory extends AbstractObsidianModule {
async fileHistory() {
const notes: { id: DocumentID; path: FilePathWithPrefix; dispPath: string; mtime: number }[] = [];
for await (const doc of this.localDatabase.findAllDocs()) {
notes.push({ id: doc._id, path: getPath(doc), dispPath: getPath(doc), mtime: doc.mtime });
notes.push({ id: doc._id, path: this.getPath(doc), dispPath: this.getPath(doc), mtime: doc.mtime });
}
notes.sort((a, b) => b.mtime - a.mtime);
const notesList = notes.map((e) => e.dispPath);
@@ -51,7 +50,7 @@ export class ModuleObsidianDocumentHistory extends AbstractObsidianModule {
this.showHistory(targetId.path, targetId.id);
}
}
onBindFunction(core: typeof this.core, services: typeof core.services): void {
override onBindFunction(core: typeof this.core, services: typeof core.services): void {
services.appLifecycle.onInitialise.addHandler(this._everyOnloadStart.bind(this));
}
}

View File

@@ -1,335 +0,0 @@
import { AbstractObsidianModule } from "../AbstractObsidianModule.ts";
// import { PouchDB } from "../../lib/src/pouchdb/pouchdb-browser";
import { EVENT_REQUEST_RELOAD_SETTING_TAB, EVENT_SETTING_SAVED, eventHub } from "../../common/events.ts";
import {
type BucketSyncSetting,
ChunkAlgorithmNames,
type ConfigPassphraseStore,
type CouchDBConnection,
DEFAULT_SETTINGS,
type ObsidianLiveSyncSettings,
SALT_OF_PASSPHRASE,
SETTING_KEY_P2P_DEVICE_NAME,
} from "../../lib/src/common/types";
import { LOG_LEVEL_NOTICE, LOG_LEVEL_URGENT } from "octagonal-wheels/common/logger";
import { $msg, setLang } from "../../lib/src/common/i18n.ts";
import { isCloudantURI } from "../../lib/src/pouchdb/utils_couchdb.ts";
import { getLanguage } from "@/deps.ts";
import { SUPPORTED_I18N_LANGS, type I18N_LANGS } from "../../lib/src/common/rosetta.ts";
import { decryptString, encryptString } from "@/lib/src/encryption/stringEncryption.ts";
import type { LiveSyncCore } from "../../main.ts";
export class ModuleObsidianSettings extends AbstractObsidianModule {
async _everyOnLayoutReady(): Promise<boolean> {
let isChanged = false;
if (this.settings.displayLanguage == "") {
const obsidianLanguage = getLanguage();
if (
SUPPORTED_I18N_LANGS.indexOf(obsidianLanguage) !== -1 && // Check if the language is supported
obsidianLanguage != this.settings.displayLanguage // Check if the language is different from the current setting
) {
// Check if the current setting is not empty (Means migrated or installed).
this.settings.displayLanguage = obsidianLanguage as I18N_LANGS;
isChanged = true;
setLang(this.settings.displayLanguage);
} else if (this.settings.displayLanguage == "") {
this.settings.displayLanguage = "def";
setLang(this.settings.displayLanguage);
await this.services.setting.saveSettingData();
}
}
if (isChanged) {
const revert = $msg("dialog.yourLanguageAvailable.btnRevertToDefault");
if (
(await this.core.confirm.askSelectStringDialogue($msg(`dialog.yourLanguageAvailable`), ["OK", revert], {
defaultAction: "OK",
title: $msg(`dialog.yourLanguageAvailable.Title`),
})) == revert
) {
this.settings.displayLanguage = "def";
setLang(this.settings.displayLanguage);
}
await this.services.setting.saveSettingData();
}
return true;
}
getPassphrase(settings: ObsidianLiveSyncSettings) {
const methods: Record<ConfigPassphraseStore, () => Promise<string | false>> = {
"": () => Promise.resolve("*"),
LOCALSTORAGE: () => Promise.resolve(localStorage.getItem("ls-setting-passphrase") ?? false),
ASK_AT_LAUNCH: () => this.core.confirm.askString("Passphrase", "passphrase", ""),
};
const method = settings.configPassphraseStore;
const methodFunc = method in methods ? methods[method] : methods[""];
return methodFunc();
}
_saveDeviceAndVaultName(): void {
const lsKey = "obsidian-live-sync-vaultanddevicename-" + this.services.vault.getVaultName();
localStorage.setItem(lsKey, this.services.setting.getDeviceAndVaultName() || "");
}
usedPassphrase = "";
private _clearUsedPassphrase(): void {
this.usedPassphrase = "";
}
async decryptConfigurationItem(encrypted: string, passphrase: string) {
const dec = await decryptString(encrypted, passphrase + SALT_OF_PASSPHRASE);
if (dec) {
this.usedPassphrase = passphrase;
return dec;
}
return false;
}
async encryptConfigurationItem(src: string, settings: ObsidianLiveSyncSettings) {
if (this.usedPassphrase != "") {
return await encryptString(src, this.usedPassphrase + SALT_OF_PASSPHRASE);
}
const passphrase = await this.getPassphrase(settings);
if (passphrase === false) {
this._log(
"Failed to obtain passphrase when saving data.json! Please verify the configuration.",
LOG_LEVEL_URGENT
);
return "";
}
const dec = await encryptString(src, passphrase + SALT_OF_PASSPHRASE);
if (dec) {
this.usedPassphrase = passphrase;
return dec;
}
return "";
}
get appId() {
return `${"appId" in this.app ? this.app.appId : ""}`;
}
async _saveSettingData() {
this.services.setting.saveDeviceAndVaultName();
const settings = { ...this.settings };
settings.deviceAndVaultName = "";
if (settings.P2P_DevicePeerName && settings.P2P_DevicePeerName.trim() !== "") {
console.log("Saving device peer name to small config");
this.services.config.setSmallConfig(SETTING_KEY_P2P_DEVICE_NAME, settings.P2P_DevicePeerName.trim());
settings.P2P_DevicePeerName = "";
}
if (this.usedPassphrase == "" && !(await this.getPassphrase(settings))) {
this._log("Failed to retrieve passphrase. data.json contains unencrypted items!", LOG_LEVEL_NOTICE);
} else {
if (
settings.couchDB_PASSWORD != "" ||
settings.couchDB_URI != "" ||
settings.couchDB_USER != "" ||
settings.couchDB_DBNAME
) {
const connectionSetting: CouchDBConnection & BucketSyncSetting = {
couchDB_DBNAME: settings.couchDB_DBNAME,
couchDB_PASSWORD: settings.couchDB_PASSWORD,
couchDB_URI: settings.couchDB_URI,
couchDB_USER: settings.couchDB_USER,
accessKey: settings.accessKey,
bucket: settings.bucket,
endpoint: settings.endpoint,
region: settings.region,
secretKey: settings.secretKey,
useCustomRequestHandler: settings.useCustomRequestHandler,
bucketCustomHeaders: settings.bucketCustomHeaders,
couchDB_CustomHeaders: settings.couchDB_CustomHeaders,
useJWT: settings.useJWT,
jwtKey: settings.jwtKey,
jwtAlgorithm: settings.jwtAlgorithm,
jwtKid: settings.jwtKid,
jwtExpDuration: settings.jwtExpDuration,
jwtSub: settings.jwtSub,
useRequestAPI: settings.useRequestAPI,
bucketPrefix: settings.bucketPrefix,
forcePathStyle: settings.forcePathStyle,
};
settings.encryptedCouchDBConnection = await this.encryptConfigurationItem(
JSON.stringify(connectionSetting),
settings
);
settings.couchDB_PASSWORD = "";
settings.couchDB_DBNAME = "";
settings.couchDB_URI = "";
settings.couchDB_USER = "";
settings.accessKey = "";
settings.bucket = "";
settings.region = "";
settings.secretKey = "";
settings.endpoint = "";
}
if (settings.encrypt && settings.passphrase != "") {
settings.encryptedPassphrase = await this.encryptConfigurationItem(settings.passphrase, settings);
settings.passphrase = "";
}
}
await this.core.saveData(settings);
eventHub.emitEvent(EVENT_SETTING_SAVED, settings);
}
tryDecodeJson(encoded: string | false): object | false {
try {
if (!encoded) return false;
return JSON.parse(encoded);
} catch {
return false;
}
}
async _decryptSettings(settings: ObsidianLiveSyncSettings): Promise<ObsidianLiveSyncSettings> {
const passphrase = await this.getPassphrase(settings);
if (passphrase === false) {
this._log("No passphrase found for data.json! Verify configuration before syncing.", LOG_LEVEL_URGENT);
} else {
if (settings.encryptedCouchDBConnection) {
const keys = [
"couchDB_URI",
"couchDB_USER",
"couchDB_PASSWORD",
"couchDB_DBNAME",
"accessKey",
"bucket",
"endpoint",
"region",
"secretKey",
] as (keyof CouchDBConnection | keyof BucketSyncSetting)[];
const decrypted = this.tryDecodeJson(
await this.decryptConfigurationItem(settings.encryptedCouchDBConnection, passphrase)
) as CouchDBConnection & BucketSyncSetting;
if (decrypted) {
for (const key of keys) {
if (key in decrypted) {
//@ts-ignore
settings[key] = decrypted[key];
}
}
} else {
this._log(
"Failed to decrypt passphrase from data.json! Ensure configuration is correct before syncing with remote.",
LOG_LEVEL_URGENT
);
for (const key of keys) {
//@ts-ignore
settings[key] = "";
}
}
}
if (settings.encrypt && settings.encryptedPassphrase) {
const encrypted = settings.encryptedPassphrase;
const decrypted = await this.decryptConfigurationItem(encrypted, passphrase);
if (decrypted) {
settings.passphrase = decrypted;
} else {
this._log(
"Failed to decrypt passphrase from data.json! Ensure configuration is correct before syncing with remote.",
LOG_LEVEL_URGENT
);
settings.passphrase = "";
}
}
}
return settings;
}
/**
* This method mutates the settings object.
* @param settings
* @returns
*/
_adjustSettings(settings: ObsidianLiveSyncSettings): Promise<ObsidianLiveSyncSettings> {
// Adjust settings as needed
// Delete this feature to avoid problems on mobile.
settings.disableRequestURI = true;
// GC is disabled.
settings.gcDelay = 0;
// So, use history is always enabled.
settings.useHistory = true;
if ("workingEncrypt" in settings) delete settings.workingEncrypt;
if ("workingPassphrase" in settings) delete settings.workingPassphrase;
// Splitter configurations have been replaced with chunkSplitterVersion.
if (settings.chunkSplitterVersion == "") {
if (settings.enableChunkSplitterV2) {
if (settings.useSegmenter) {
settings.chunkSplitterVersion = "v2-segmenter";
} else {
settings.chunkSplitterVersion = "v2";
}
} else {
settings.chunkSplitterVersion = "";
}
} else if (!(settings.chunkSplitterVersion in ChunkAlgorithmNames)) {
settings.chunkSplitterVersion = "";
}
return Promise.resolve(settings);
}
async _loadSettings(): Promise<void> {
const settings = Object.assign({}, DEFAULT_SETTINGS, await this.core.loadData()) as ObsidianLiveSyncSettings;
if (typeof settings.isConfigured == "undefined") {
// If migrated, mark true
if (JSON.stringify(settings) !== JSON.stringify(DEFAULT_SETTINGS)) {
settings.isConfigured = true;
} else {
settings.additionalSuffixOfDatabaseName = this.appId;
settings.isConfigured = false;
}
}
this.settings = await this.services.setting.decryptSettings(settings);
setLang(this.settings.displayLanguage);
await this.services.setting.adjustSettings(this.settings);
const lsKey = "obsidian-live-sync-vaultanddevicename-" + this.services.vault.getVaultName();
if (this.settings.deviceAndVaultName != "") {
if (!localStorage.getItem(lsKey)) {
this.services.setting.setDeviceAndVaultName(this.settings.deviceAndVaultName);
this.services.setting.saveDeviceAndVaultName();
this.settings.deviceAndVaultName = "";
}
}
if (isCloudantURI(this.settings.couchDB_URI) && this.settings.customChunkSize != 0) {
this._log(
"Configuration issues detected and automatically resolved. However, unsynchronized data may exist. Consider rebuilding if necessary.",
LOG_LEVEL_NOTICE
);
this.settings.customChunkSize = 0;
}
this.services.setting.setDeviceAndVaultName(localStorage.getItem(lsKey) || "");
if (this.services.setting.getDeviceAndVaultName() == "") {
if (this.settings.usePluginSync) {
this._log("Device name missing. Disabling plug-in sync.", LOG_LEVEL_NOTICE);
this.settings.usePluginSync = false;
}
}
// this.core.ignoreFiles = this.settings.ignoreFiles.split(",").map(e => e.trim());
eventHub.emitEvent(EVENT_REQUEST_RELOAD_SETTING_TAB);
}
private _currentSettings(): ObsidianLiveSyncSettings {
return this.settings;
}
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
super.onBindFunction(core, services);
services.appLifecycle.onLayoutReady.addHandler(this._everyOnLayoutReady.bind(this));
services.setting.clearUsedPassphrase.setHandler(this._clearUsedPassphrase.bind(this));
services.setting.decryptSettings.setHandler(this._decryptSettings.bind(this));
services.setting.adjustSettings.setHandler(this._adjustSettings.bind(this));
services.setting.loadSettings.setHandler(this._loadSettings.bind(this));
services.setting.currentSettings.setHandler(this._currentSettings.bind(this));
services.setting.saveDeviceAndVaultName.setHandler(this._saveDeviceAndVaultName.bind(this));
services.setting.saveSettingData.setHandler(this._saveSettingData.bind(this));
}
}

View File

@@ -1,4 +1,3 @@
import { AbstractObsidianModule } from "../AbstractObsidianModule.ts";
// import { PouchDB } from "../../lib/src/pouchdb/pouchdb-browser";
import { isObjectDifferent } from "octagonal-wheels/object";
import { EVENT_SETTING_SAVED, eventHub } from "../../common/events";
@@ -6,9 +5,13 @@ import { fireAndForget } from "octagonal-wheels/promises";
import { DEFAULT_SETTINGS, type FilePathWithPrefix, type ObsidianLiveSyncSettings } from "../../lib/src/common/types";
import { parseYaml, stringifyYaml } from "../../deps";
import { LOG_LEVEL_DEBUG, LOG_LEVEL_INFO, LOG_LEVEL_NOTICE, LOG_LEVEL_VERBOSE } from "octagonal-wheels/common/logger";
import { AbstractModule } from "../AbstractModule.ts";
import type { ServiceContext } from "@lib/services/base/ServiceBase.ts";
import type { InjectableServiceHub } from "@lib/services/InjectableServices.ts";
import type { LiveSyncCore } from "@/main.ts";
const SETTING_HEADER = "````yaml:livesync-setting\n";
const SETTING_FOOTER = "\n````";
export class ModuleObsidianSettingsAsMarkdown extends AbstractObsidianModule {
export class ModuleObsidianSettingsAsMarkdown extends AbstractModule {
_everyOnloadStart(): Promise<boolean> {
this.addCommand({
id: "livesync-export-config",
@@ -242,7 +245,8 @@ We can perform a command in this file.
this._log(`Markdown setting: ${filename} has been updated!`, LOG_LEVEL_VERBOSE);
}
}
onBindFunction(core: typeof this.plugin, services: typeof core.services): void {
override onBindFunction(core: LiveSyncCore, services: InjectableServiceHub<ServiceContext>): void {
services.appLifecycle.onInitialise.addHandler(this._everyOnloadStart.bind(this));
}
}

View File

@@ -29,7 +29,7 @@ export class ModuleObsidianSettingDialogue extends AbstractObsidianModule {
get appId() {
return `${"appId" in this.app ? this.app.appId : ""}`;
}
onBindFunction(core: typeof this.plugin, services: typeof core.services): void {
override onBindFunction(core: typeof this.plugin, services: typeof core.services): void {
services.appLifecycle.onInitialise.addHandler(this._everyOnloadStart.bind(this));
}
}

View File

@@ -9,7 +9,6 @@ import {
EVENT_REQUEST_SHOW_SETUP_QR,
eventHub,
} from "../../common/events.ts";
import { AbstractObsidianModule } from "../AbstractObsidianModule.ts";
import { $msg } from "../../lib/src/common/i18n.ts";
// import { performDoctorConsultation, RebuildOptions } from "@/lib/src/common/configForDoc.ts";
import type { LiveSyncCore } from "../../main.ts";
@@ -20,11 +19,12 @@ import {
OutputFormat,
} from "../../lib/src/API/processSetting.ts";
import { SetupManager, UserMode } from "./SetupManager.ts";
import { AbstractModule } from "../AbstractModule.ts";
export class ModuleSetupObsidian extends AbstractObsidianModule {
export class ModuleSetupObsidian extends AbstractModule {
private _setupManager!: SetupManager;
private _everyOnload(): Promise<boolean> {
this._setupManager = this.plugin.getModule(SetupManager);
this._setupManager = this.core.getModule(SetupManager);
this.registerObsidianProtocolHandler("setuplivesync", async (conf: any) => {
if (conf.settings) {
await this._setupManager.onUseSetupURI(
@@ -194,7 +194,7 @@ export class ModuleSetupObsidian extends AbstractObsidianModule {
// }
// }
onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
override onBindFunction(core: LiveSyncCore, services: typeof core.services): void {
services.appLifecycle.onLoaded.addHandler(this._everyOnload.bind(this));
}
}

View File

@@ -58,7 +58,7 @@ export class LiveSyncSetting extends Setting {
}
}
setDesc(desc: string | DocumentFragment): this {
override setDesc(desc: string | DocumentFragment): this {
this.descBuf = desc;
DEV: {
this._createDocStub("desc", desc);
@@ -66,7 +66,7 @@ export class LiveSyncSetting extends Setting {
super.setDesc(desc);
return this;
}
setName(name: string | DocumentFragment): this {
override setName(name: string | DocumentFragment): this {
this.nameBuf = name;
DEV: {
this._createDocStub("name", name);

View File

@@ -16,7 +16,7 @@ import {
import { delay, isObjectDifferent, sizeToHumanReadable } from "../../../lib/src/common/utils.ts";
import { versionNumberString2Number } from "../../../lib/src/string_and_binary/convert.ts";
import { Logger } from "../../../lib/src/common/logger.ts";
import { checkSyncInfo } from "@/lib/src/pouchdb/negotiation.ts";
import { checkSyncInfo } from "@lib/pouchdb/negotiation.ts";
import { testCrypt } from "octagonal-wheels/encryption/encryption";
import ObsidianLiveSyncPlugin from "../../../main.ts";
import { scheduleTask } from "../../../common/utils.ts";
@@ -374,7 +374,7 @@ export class ObsidianLiveSyncSettingTab extends PluginSettingTab {
this.initialSettings = undefined;
}
hide() {
override hide() {
this.isShown = false;
}
isShown: boolean = false;
@@ -424,8 +424,6 @@ export class ObsidianLiveSyncSettingTab extends PluginSettingTab {
//@ts-ignore
manifestVersion: string = MANIFEST_VERSION || "-";
//@ts-ignore
updateInformation: string = UPDATE_INFO || "";
lastVersion = ~~(versionNumberString2Number(this.manifestVersion) / 1000);

View File

@@ -21,7 +21,7 @@ import {
} from "../../../lib/src/common/utils.ts";
import { Logger } from "../../../lib/src/common/logger.ts";
import { isCloudantURI } from "../../../lib/src/pouchdb/utils_couchdb.ts";
import { getPath, requestToCouchDBWithCredentials } from "../../../common/utils.ts";
import { requestToCouchDBWithCredentials } from "../../../common/utils.ts";
import { addPrefix, shouldBeIgnored, stripAllPrefixes } from "../../../lib/src/string_and_binary/path.ts";
import { $msg } from "../../../lib/src/common/i18n.ts";
import { Semaphore } from "octagonal-wheels/concurrency/semaphore";
@@ -48,7 +48,7 @@ export function paneHatch(this: ObsidianLiveSyncSettingTab, paneEl: HTMLElement,
.setDesc($msg("Setting.TroubleShooting.Doctor.Desc"))
.addButton((button) =>
button
.setButtonText("Run Doctor")
.setButtonText($msg("Run Doctor"))
.setCta()
.setDisabled(false)
.onClick(() => {
@@ -69,9 +69,9 @@ export function paneHatch(this: ObsidianLiveSyncSettingTab, paneEl: HTMLElement,
eventHub.emitEvent(EVENT_REQUEST_RUN_FIX_INCOMPLETE);
})
);
new Setting(paneEl).setName("Prepare the 'report' to create an issue").addButton((button) =>
new Setting(paneEl).setName($msg("Prepare the 'report' to create an issue")).addButton((button) =>
button
.setButtonText("Copy Report to clipboard")
.setButtonText($msg("Copy Report to clipboard"))
.setCta()
.setDisabled(false)
.onClick(async () => {
@@ -189,20 +189,22 @@ ${stringifyYaml({
})
);
new Setting(paneEl)
.setName("Analyse database usage")
.setName($msg("Analyse database usage"))
.setDesc(
"Analyse database usage and generate a TSV report for diagnosis yourself. You can paste the generated report with any spreadsheet you like."
$msg(
"Analyse database usage and generate a TSV report for diagnosis yourself. You can paste the generated report with any spreadsheet you like."
)
)
.addButton((button) =>
button.setButtonText("Analyse").onClick(() => {
button.setButtonText($msg("Analyse")).onClick(() => {
eventHub.emitEvent(EVENT_ANALYSE_DB_USAGE);
})
);
new Setting(paneEl)
.setName("Reset notification threshold and check the remote database usage")
.setDesc("Reset the remote storage size threshold and check the remote storage size again.")
.setName($msg("Reset notification threshold and check the remote database usage"))
.setDesc($msg("Reset the remote storage size threshold and check the remote storage size again."))
.addButton((button) =>
button.setButtonText("Check").onClick(() => {
button.setButtonText($msg("Check")).onClick(() => {
eventHub.emitEvent(EVENT_REQUEST_CHECK_REMOTE_SIZE);
})
);
@@ -359,7 +361,7 @@ ${stringifyYaml({
.setButtonText("Resolve All")
.setCta()
.onClick(async () => {
await this.plugin.rebuilder.resolveAllConflictedFilesByNewerOnes();
await this.services.conflict.resolveAllConflictedFilesByNewerOnes();
})
);
@@ -386,7 +388,7 @@ ${stringifyYaml({
const adn = this.plugin.localDatabase.findAllDocs();
for await (const i of adn) {
const path = getPath(i);
const path = this.services.path.getPath(i);
if (path.startsWith(ICXHeader)) continue;
if (path.startsWith(PSCHeader)) continue;
if (!this.plugin.settings.syncInternalFiles && path.startsWith(ICHeader)) continue;

View File

@@ -32,7 +32,7 @@ export function paneMaintenance(
(e) => {
e.addEventListener("click", () => {
fireAndForget(async () => {
await this.services.remote.markResolved();
await this.services.replication.markResolved();
this.display();
});
});
@@ -59,7 +59,7 @@ export function paneMaintenance(
(e) => {
e.addEventListener("click", () => {
fireAndForget(async () => {
await this.services.remote.markUnlocked();
await this.services.replication.markUnlocked();
this.display();
});
});
@@ -78,7 +78,7 @@ export function paneMaintenance(
.setDisabled(false)
.setWarning()
.onClick(async () => {
await this.services.remote.markLocked();
await this.services.replication.markLocked();
})
)
.addOnUpdate(this.onlyOnCouchDBOrMinIO);

View File

@@ -92,7 +92,7 @@ export function paneRemoteConfig(
}
{
void addPanel(paneEl, $msg("obsidianLiveSyncSettingTab.titleRemoteServer"), () => {}).then((paneEl) => {
const setting = new Setting(paneEl).setName("Active Remote Configuration");
const setting = new Setting(paneEl).setName($msg("Active Remote Configuration"));
const el = setting.controlEl.createDiv({});
el.setText(`${remoteNameMap[this.editingSettings.remoteType] || " - "}`);

View File

@@ -31,10 +31,10 @@ export function paneSetup(
});
new Setting(paneEl)
.setName("Rerun Onboarding Wizard")
.setDesc("Rerun the onboarding wizard to set up Self-hosted LiveSync again.")
.setName($msg("Rerun Onboarding Wizard"))
.setDesc($msg("Rerun the onboarding wizard to set up Self-hosted LiveSync again."))
.addButton((text) => {
text.setButtonText("Rerun Wizard").onClick(async () => {
text.setButtonText($msg("Rerun Wizard")).onClick(async () => {
const setupManager = this.plugin.getModule(SetupManager);
await setupManager.onOnboard(UserMode.ExistingUser);
// await this.plugin.moduleSetupObsidian.onBoardingWizard(true);

View File

@@ -105,7 +105,7 @@ export function paneSyncSettings(
if (!this.editingSettings.isConfigured) {
this.editingSettings.isConfigured = true;
await this.saveAllDirtySettings();
await this.services.setting.realiseSetting();
await this.services.control.applySettings();
await this.rebuildDB("localOnly");
// this.resetEditingSettings();
if (
@@ -124,13 +124,13 @@ export function paneSyncSettings(
await this.confirmRebuild();
} else {
await this.saveAllDirtySettings();
await this.services.setting.realiseSetting();
await this.services.control.applySettings();
this.services.appLifecycle.askRestart();
}
}
} else {
await this.saveAllDirtySettings();
await this.services.setting.realiseSetting();
await this.services.control.applySettings();
}
});
});
@@ -169,7 +169,7 @@ export function paneSyncSettings(
}
await this.saveSettings(["liveSync", "periodicReplication"]);
await this.services.setting.realiseSetting();
await this.services.control.applySettings();
});
new Setting(paneEl)

View File

@@ -8,7 +8,6 @@ import {
REMOTE_P2P,
} from "../../lib/src/common/types.ts";
import { generatePatchObj, isObjectDifferent } from "../../lib/src/common/utils.ts";
import { AbstractObsidianModule } from "../AbstractObsidianModule.ts";
import Intro from "./SetupWizard/dialogs/Intro.svelte";
import SelectMethodNewUser from "./SetupWizard/dialogs/SelectMethodNewUser.svelte";
import SelectMethodExisting from "./SetupWizard/dialogs/SelectMethodExisting.svelte";
@@ -23,6 +22,7 @@ import SetupRemoteBucket from "./SetupWizard/dialogs/SetupRemoteBucket.svelte";
import SetupRemoteP2P from "./SetupWizard/dialogs/SetupRemoteP2P.svelte";
import SetupRemoteE2EE from "./SetupWizard/dialogs/SetupRemoteE2EE.svelte";
import { decodeSettingsFromQRCodeData } from "../../lib/src/API/processSetting.ts";
import { AbstractModule } from "../AbstractModule.ts";
/**
* User modes for onboarding and setup
@@ -50,7 +50,7 @@ export const enum UserMode {
/**
* Setup Manager to handle onboarding and configuration setup
*/
export class SetupManager extends AbstractObsidianModule {
export class SetupManager extends AbstractModule {
// /**
// * Dialog manager for handling Svelte dialogs
// */

View File

@@ -6,7 +6,7 @@
import Options from "@/lib/src/UI/components/Options.svelte";
import Instruction from "@/lib/src/UI/components/Instruction.svelte";
import UserDecisions from "@/lib/src/UI/components/UserDecisions.svelte";
const TYPE_COUCHDB = "couchdb";
const TYPE_COUCHDB = "couchdb";
const TYPE_BUCKET = "bucket";
const TYPE_P2P = "p2p";
const TYPE_CANCELLED = "cancelled";
@@ -44,9 +44,9 @@
Synchronisation utilising journal files. You must have set up an S3/MinIO/R2 compatible object storage.
</Option>
<Option selectedValue={TYPE_P2P} title="Peer-to-Peer only" bind:value={userType}>
This is an experimental feature enabling direct synchronisation between devices. No server is required, but
both devices must be online at the same time for synchronisation to occur, and some features may be limited.
Internet connection is only required to signalling (detecting peers) and not for data transfer.
This feature enables direct synchronisation between devices. No server is required, but both devices must be
online at the same time for synchronisation to occur, and some features may be limited. Internet connection
is only required to signalling (detecting peers) and not for data transfer.
</Option>
</Options>
</Instruction>

View File

@@ -1,34 +0,0 @@
import type {
FilePathWithPrefix,
LoadedEntry,
MetaEntry,
UXFileInfo,
UXFileInfoStub,
} from "../../lib/src/common/types";
export interface DatabaseFileAccess {
delete: (file: UXFileInfoStub | FilePathWithPrefix, rev?: string) => Promise<boolean>;
store: (file: UXFileInfo, force?: boolean, skipCheck?: boolean) => Promise<boolean>;
storeContent(path: FilePathWithPrefix, content: string): Promise<boolean>;
createChunks: (file: UXFileInfo, force?: boolean, skipCheck?: boolean) => Promise<boolean>;
fetch: (
file: UXFileInfoStub | FilePathWithPrefix,
rev?: string,
waitForReady?: boolean,
skipCheck?: boolean
) => Promise<UXFileInfo | false>;
fetchEntryFromMeta: (meta: MetaEntry, waitForReady?: boolean, skipCheck?: boolean) => Promise<LoadedEntry | false>;
fetchEntryMeta: (
file: UXFileInfoStub | FilePathWithPrefix,
rev?: string,
skipCheck?: boolean
) => Promise<MetaEntry | false>;
fetchEntry: (
file: UXFileInfoStub | FilePathWithPrefix,
rev?: string,
waitForReady?: boolean,
skipCheck?: boolean
) => Promise<LoadedEntry | false>;
getConflictedRevs: (file: UXFileInfoStub | FilePathWithPrefix) => Promise<string[]>;
// storeFromStorage: (file: UXFileInfoStub | FilePathWithPrefix, force?: boolean) => Promise<boolean>;
}

View File

@@ -1,12 +0,0 @@
export interface Rebuilder {
$performRebuildDB(
method: "localOnly" | "remoteOnly" | "rebuildBothByThisDevice" | "localOnlyWithChunks"
): Promise<void>;
$rebuildRemote(): Promise<void>;
$rebuildEverything(): Promise<void>;
$fetchLocal(makeLocalChunkBeforeSync?: boolean, preventMakeLocalFilesBeforeSync?: boolean): Promise<void>;
scheduleRebuild(): Promise<void>;
scheduleFetch(): Promise<void>;
resolveAllConflictedFilesByNewerOnes(): Promise<void>;
}

View File

@@ -1,61 +0,0 @@
import type {
FilePath,
FilePathWithPrefix,
UXDataWriteOptions,
UXFileInfo,
UXFileInfoStub,
UXFolderInfo,
UXStat,
} from "../../lib/src/common/types";
import type { CustomRegExp } from "../../lib/src/common/utils";
export interface StorageAccess {
restoreState(): Promise<void>;
processWriteFile<T>(file: UXFileInfoStub | FilePathWithPrefix, proc: () => Promise<T>): Promise<T>;
processReadFile<T>(file: UXFileInfoStub | FilePathWithPrefix, proc: () => Promise<T>): Promise<T>;
isFileProcessing(file: UXFileInfoStub | FilePathWithPrefix): boolean;
deleteVaultItem(file: FilePathWithPrefix | UXFileInfoStub | UXFolderInfo): Promise<void>;
writeFileAuto(path: string, data: string | ArrayBuffer, opt?: UXDataWriteOptions): Promise<boolean>;
readFileAuto(path: string): Promise<string | ArrayBuffer>;
readFileText(path: string): Promise<string>;
isExists(path: string): Promise<boolean>;
writeHiddenFileAuto(path: string, data: string | ArrayBuffer, opt?: UXDataWriteOptions): Promise<boolean>;
appendHiddenFile(path: string, data: string, opt?: UXDataWriteOptions): Promise<boolean>;
stat(path: string): Promise<UXStat | null>;
statHidden(path: string): Promise<UXStat | null>;
removeHidden(path: string): Promise<boolean>;
readHiddenFileAuto(path: string): Promise<string | ArrayBuffer>;
readHiddenFileBinary(path: string): Promise<ArrayBuffer>;
readHiddenFileText(path: string): Promise<string>;
isExistsIncludeHidden(path: string): Promise<boolean>;
// This could be work also for the hidden files.
ensureDir(path: string): Promise<boolean>;
triggerFileEvent(event: string, path: string): void;
triggerHiddenFile(path: string): Promise<void>;
getFileStub(path: string): UXFileInfoStub | null;
readStubContent(stub: UXFileInfoStub): Promise<UXFileInfo | false>;
getStub(path: string): UXFileInfoStub | UXFolderInfo | null;
getFiles(): UXFileInfoStub[];
getFileNames(): FilePathWithPrefix[];
touched(file: UXFileInfoStub | FilePathWithPrefix): Promise<void>;
recentlyTouched(file: UXFileInfoStub | FilePathWithPrefix): boolean;
clearTouched(): void;
// -- Low-Level
delete(file: FilePathWithPrefix | UXFileInfoStub | string, force: boolean): Promise<void>;
trash(file: FilePathWithPrefix | UXFileInfoStub | string, system: boolean): Promise<void>;
getFilesIncludeHidden(
basePath: string,
includeFilter?: CustomRegExp[],
excludeFilter?: CustomRegExp[],
skipFolder?: string[]
): Promise<FilePath[]>;
}

View File

@@ -3,20 +3,16 @@ import { LOG_LEVEL_NOTICE, LOG_LEVEL_VERBOSE, VER, type ObsidianLiveSyncSettings
import {
EVENT_LAYOUT_READY,
EVENT_PLUGIN_LOADED,
EVENT_PLUGIN_UNLOADED,
EVENT_REQUEST_RELOAD_SETTING_TAB,
EVENT_SETTING_SAVED,
eventHub,
} from "../../common/events.ts";
import { $msg, setLang } from "../../lib/src/common/i18n.ts";
import { versionNumberString2Number } from "../../lib/src/string_and_binary/convert.ts";
import { cancelAllPeriodicTask, cancelAllTasks } from "octagonal-wheels/concurrency/task";
import { stopAllRunningProcessors } from "octagonal-wheels/concurrency/processor";
import { AbstractModule } from "../AbstractModule.ts";
import { EVENT_PLATFORM_UNLOADED } from "@lib/events/coreEvents";
import type { InjectableServiceHub } from "@lib/services/implements/injectable/InjectableServiceHub.ts";
import type { LiveSyncCore } from "../../main.ts";
import { initialiseWorkerModule } from "@/lib/src/worker/bgWorker.ts";
import { initialiseWorkerModule } from "@lib/worker/bgWorker.ts";
export class ModuleLiveSyncMain extends AbstractModule {
async _onLiveSyncReady() {
@@ -49,7 +45,7 @@ export class ModuleLiveSyncMain extends AbstractModule {
}
if (!(await this.core.services.appLifecycle.onFirstInitialise())) return false;
// await this.core.$$realizeSettingSyncMode();
await this.services.setting.realiseSetting();
await this.services.control.applySettings();
fireAndForget(async () => {
this._log($msg("moduleLiveSyncMain.logAdditionalSafetyScan"), LOG_LEVEL_VERBOSE);
if (!(await this.services.appLifecycle.onScanningStartupIssues())) {
@@ -65,7 +61,7 @@ export class ModuleLiveSyncMain extends AbstractModule {
eventHub.onEvent(EVENT_SETTING_SAVED, (settings: ObsidianLiveSyncSettings) => {
fireAndForget(async () => {
try {
await this.core.services.setting.realiseSetting();
await this.core.services.control.applySettings();
const lang = this.core.services.setting.currentSettings()?.displayLanguage ?? undefined;
if (lang !== undefined) {
setLang(this.core.services.setting.currentSettings()?.displayLanguage);
@@ -126,7 +122,10 @@ export class ModuleLiveSyncMain extends AbstractModule {
await this.saveSettings();
}
localStorage.setItem(lsKey, `${VER}`);
await this.services.database.openDatabase();
await this.services.database.openDatabase({
databaseEvents: this.services.databaseEvents,
replicator: this.services.replicator,
});
// this.core.$$realizeSettingSyncMode = this.core.$$realizeSettingSyncMode.bind(this);
// this.$$parseReplicationResult = this.$$parseReplicationResult.bind(this);
// this.$$replicate = this.$$replicate.bind(this);
@@ -136,89 +135,82 @@ export class ModuleLiveSyncMain extends AbstractModule {
return true;
}
async _onLiveSyncUnload(): Promise<void> {
eventHub.emitEvent(EVENT_PLUGIN_UNLOADED);
await this.services.appLifecycle.onBeforeUnload();
cancelAllPeriodicTask();
cancelAllTasks();
stopAllRunningProcessors();
await this.services.appLifecycle.onUnload();
this._unloaded = true;
for (const addOn of this.core.addOns) {
addOn.onunload();
}
if (this.localDatabase != null) {
this.localDatabase.onunload();
if (this.core.replicator) {
this.core.replicator?.closeReplication();
}
await this.localDatabase.close();
}
eventHub.emitEvent(EVENT_PLATFORM_UNLOADED);
eventHub.offAll();
this._log($msg("moduleLiveSyncMain.logUnloadingPlugin"));
return;
}
// async _onLiveSyncUnload(): Promise<void> {
// eventHub.emitEvent(EVENT_PLUGIN_UNLOADED);
// await this.services.appLifecycle.onBeforeUnload();
// cancelAllPeriodicTask();
// cancelAllTasks();
// stopAllRunningProcessors();
// await this.services.appLifecycle.onUnload();
// this._unloaded = true;
// for (const addOn of this.core.addOns) {
// addOn.onunload();
// }
// if (this.localDatabase != null) {
// this.localDatabase.onunload();
// if (this.core.replicator) {
// this.core.replicator?.closeReplication();
// }
// await this.localDatabase.close();
// }
// eventHub.emitEvent(EVENT_PLATFORM_UNLOADED);
// eventHub.offAll();
// this._log($msg("moduleLiveSyncMain.logUnloadingPlugin"));
// return;
// }
private async _realizeSettingSyncMode(): Promise<void> {
await this.services.appLifecycle.onSuspending();
await this.services.setting.onBeforeRealiseSetting();
this.localDatabase.refreshSettings();
await this.services.fileProcessing.commitPendingFileEvents();
await this.services.setting.onRealiseSetting();
// disable all sync temporary.
if (this.services.appLifecycle.isSuspended()) return;
await this.services.appLifecycle.onResuming();
await this.services.appLifecycle.onResumed();
await this.services.setting.onSettingRealised();
return;
}
// private async _realizeSettingSyncMode(): Promise<void> {
// await this.services.appLifecycle.onSuspending();
// await this.services.setting.onBeforeRealiseSetting();
// this.localDatabase.refreshSettings();
// await this.services.fileProcessing.commitPendingFileEvents();
// await this.services.setting.onRealiseSetting();
// // disable all sync temporary.
// if (this.services.appLifecycle.isSuspended()) return;
// await this.services.appLifecycle.onResuming();
// await this.services.appLifecycle.onResumed();
// await this.services.setting.onSettingRealised();
// return;
// }
_isReloadingScheduled(): boolean {
return this.core._totalProcessingCount !== undefined;
}
// isReady = false;
isReady = false;
// _isReady(): boolean {
// return this.isReady;
// }
_isReady(): boolean {
return this.isReady;
}
// _markIsReady(): void {
// this.isReady = true;
// }
_markIsReady(): void {
this.isReady = true;
}
// _resetIsReady(): void {
// this.isReady = false;
// }
_resetIsReady(): void {
this.isReady = false;
}
// _suspended = false;
// _isSuspended(): boolean {
// return this._suspended || !this.settings?.isConfigured;
// }
_suspended = false;
_isSuspended(): boolean {
return this._suspended || !this.settings?.isConfigured;
}
// _setSuspended(value: boolean) {
// this._suspended = value;
// }
_setSuspended(value: boolean) {
this._suspended = value;
}
// _unloaded = false;
// _isUnloaded(): boolean {
// return this._unloaded;
// }
_unloaded = false;
_isUnloaded(): boolean {
return this._unloaded;
}
onBindFunction(core: LiveSyncCore, services: InjectableServiceHub): void {
override onBindFunction(core: LiveSyncCore, services: InjectableServiceHub): void {
super.onBindFunction(core, services);
services.appLifecycle.isSuspended.setHandler(this._isSuspended.bind(this));
services.appLifecycle.setSuspended.setHandler(this._setSuspended.bind(this));
services.appLifecycle.isReady.setHandler(this._isReady.bind(this));
services.appLifecycle.markIsReady.setHandler(this._markIsReady.bind(this));
services.appLifecycle.resetIsReady.setHandler(this._resetIsReady.bind(this));
services.appLifecycle.hasUnloaded.setHandler(this._isUnloaded.bind(this));
services.appLifecycle.isReloadingScheduled.setHandler(this._isReloadingScheduled.bind(this));
// services.appLifecycle.isSuspended.setHandler(this._isSuspended.bind(this));
// services.appLifecycle.setSuspended.setHandler(this._setSuspended.bind(this));
// services.appLifecycle.isReady.setHandler(this._isReady.bind(this));
// services.appLifecycle.markIsReady.setHandler(this._markIsReady.bind(this));
// services.appLifecycle.resetIsReady.setHandler(this._resetIsReady.bind(this));
// services.appLifecycle.hasUnloaded.setHandler(this._isUnloaded.bind(this));
services.appLifecycle.onReady.addHandler(this._onLiveSyncReady.bind(this));
services.appLifecycle.onWireUpEvents.addHandler(this._wireUpEvents.bind(this));
services.appLifecycle.onLoad.addHandler(this._onLiveSyncLoad.bind(this));
services.appLifecycle.onAppUnload.addHandler(this._onLiveSyncUnload.bind(this));
services.setting.realiseSetting.setHandler(this._realizeSettingSyncMode.bind(this));
}
}

View File

@@ -0,0 +1,108 @@
import { InjectableAPIService } from "@lib/services/implements/injectable/InjectableAPIService";
import type { ObsidianServiceContext } from "@lib/services/implements/obsidian/ObsidianServiceContext";
import { Platform, type Command, type ViewCreator } from "obsidian";
import { ObsHttpHandler } from "../essentialObsidian/APILib/ObsHttpHandler";
import { ObsidianConfirm } from "./ObsidianConfirm";
import type { Confirm } from "@lib/interfaces/Confirm";
// All Services will be migrated to be based on Plain Services, not Injectable Services.
// This is a migration step.
export class ObsidianAPIService extends InjectableAPIService<ObsidianServiceContext> {
_customHandler: ObsHttpHandler | undefined;
_confirmInstance: Confirm;
constructor(context: ObsidianServiceContext) {
super(context);
this._confirmInstance = new ObsidianConfirm(context);
}
getCustomFetchHandler(): ObsHttpHandler {
if (!this._customHandler) this._customHandler = new ObsHttpHandler(undefined, undefined);
return this._customHandler;
}
async showWindow(viewType: string): Promise<void> {
const leaves = this.app.workspace.getLeavesOfType(viewType);
if (leaves.length == 0) {
await this.app.workspace.getLeaf(true).setViewState({
type: viewType,
active: true,
});
} else {
await leaves[0].setViewState({
type: viewType,
active: true,
});
}
if (leaves.length > 0) {
await this.app.workspace.revealLeaf(leaves[0]);
}
}
private get app() {
return this.context.app;
}
override getPlatform(): string {
if (Platform.isAndroidApp) {
return "android-app";
} else if (Platform.isIosApp) {
return "ios";
} else if (Platform.isMacOS) {
return "macos";
} else if (Platform.isMobileApp) {
return "mobile-app";
} else if (Platform.isMobile) {
return "mobile";
} else if (Platform.isSafari) {
return "safari";
} else if (Platform.isDesktop) {
return "desktop";
} else if (Platform.isDesktopApp) {
return "desktop-app";
} else {
return "unknown-obsidian";
}
}
override isMobile(): boolean {
//@ts-ignore : internal API
return this.app.isMobile;
}
override getAppID(): string {
return `${"appId" in this.app ? this.app.appId : ""}`;
}
override getSystemVaultName(): string {
return this.app.vault.getName();
}
override getAppVersion(): string {
const navigatorString = globalThis.navigator?.userAgent ?? "";
const match = navigatorString.match(/obsidian\/([0-9]+\.[0-9]+\.[0-9]+)/);
if (match && match.length >= 2) {
return match[1];
}
return "0.0.0";
}
override getPluginVersion(): string {
return this.context.plugin.manifest.version;
}
get confirm(): Confirm {
return this._confirmInstance;
}
addCommand<TCommand extends Command>(command: TCommand): TCommand {
return this.context.plugin.addCommand(command) as TCommand;
}
registerWindow(type: string, factory: ViewCreator): void {
return this.context.plugin.registerView(type, factory);
}
addRibbonIcon(icon: string, title: string, callback: (evt: MouseEvent) => any): HTMLElement {
return this.context.plugin.addRibbonIcon(icon, title, callback);
}
registerProtocolHandler(action: string, handler: (params: Record<string, string>) => any): void {
return this.context.plugin.registerObsidianProtocolHandler(action, handler);
}
}

View File

@@ -0,0 +1,15 @@
import { AppLifecycleServiceBase } from "@/lib/src/services/implements/injectable/InjectableAppLifecycleService";
import type { ObsidianServiceContext } from "@/lib/src/services/implements/obsidian/ObsidianServiceContext";
declare module "obsidian" {
interface App {
commands: {
executeCommandById: (id: string) => Promise<void>;
};
}
}
// InjectableAppLifecycleService
export class ObsidianAppLifecycleService<T extends ObsidianServiceContext> extends AppLifecycleServiceBase<T> {
performRestart(): void {
void this.context.plugin.app.commands.executeCommandById("app:reload");
}
}

View File

@@ -0,0 +1,16 @@
import { initializeStores } from "@/common/stores";
// import { InjectableDatabaseService } from "@/lib/src/services/implements/injectable/InjectableDatabaseService";
import type { ObsidianServiceContext } from "@/lib/src/services/implements/obsidian/ObsidianServiceContext";
import { DatabaseService, type DatabaseServiceDependencies } from "@lib/services/base/DatabaseService.ts";
export class ObsidianDatabaseService<T extends ObsidianServiceContext> extends DatabaseService<T> {
private __onOpenDatabase(vaultName: string) {
initializeStores(vaultName);
return Promise.resolve(true);
}
constructor(context: T, dependencies: DatabaseServiceDependencies) {
super(context, dependencies);
this.onOpenDatabase.addHandler(this.__onOpenDatabase.bind(this));
}
}

View File

@@ -0,0 +1,8 @@
import type { ObsidianServiceContext } from "@lib/services/implements/obsidian/ObsidianServiceContext";
import { normalizePath } from "@/deps";
import { PathService } from "@/lib/src/services/base/PathService";
export class ObsidianPathService extends PathService<ObsidianServiceContext> {
protected normalizePath(path: string): string {
return normalizePath(path);
}
}

View File

@@ -1,24 +1,26 @@
import { InjectableServiceHub } from "@/lib/src/services/implements/injectable/InjectableServiceHub";
import { ObsidianServiceContext } from "@/lib/src/services/implements/obsidian/ObsidianServiceContext";
import type { ServiceInstances } from "@/lib/src/services/ServiceHub";
import { InjectableServiceHub } from "@lib/services/implements/injectable/InjectableServiceHub";
import { ObsidianServiceContext } from "@lib/services/implements/obsidian/ObsidianServiceContext";
import type { ServiceInstances } from "@lib/services/ServiceHub";
import type ObsidianLiveSyncPlugin from "@/main";
import {
ObsidianAPIService,
ObsidianAppLifecycleService,
ObsidianConflictService,
ObsidianDatabaseService,
ObsidianFileProcessingService,
ObsidianReplicationService,
ObsidianReplicatorService,
ObsidianRemoteService,
ObsidianSettingService,
ObsidianTweakValueService,
ObsidianVaultService,
ObsidianTestService,
ObsidianDatabaseEventService,
ObsidianPathService,
ObsidianConfigService,
ObsidianKeyValueDBService,
ObsidianControlService,
} from "./ObsidianServices";
import { ObsidianSettingService } from "./ObsidianSettingService";
import { ObsidianDatabaseService } from "./ObsidianDatabaseService";
import { ObsidianAPIService } from "./ObsidianAPIService";
import { ObsidianAppLifecycleService } from "./ObsidianAppLifecycleService";
import { ObsidianPathService } from "./ObsidianPathService";
import { ObsidianVaultService } from "./ObsidianVaultService";
import { ObsidianUIService } from "./ObsidianUIService";
// InjectableServiceHub
@@ -28,26 +30,71 @@ export class ObsidianServiceHub extends InjectableServiceHub<ObsidianServiceCont
const context = new ObsidianServiceContext(plugin.app, plugin, plugin);
const API = new ObsidianAPIService(context);
const appLifecycle = new ObsidianAppLifecycleService(context);
const conflict = new ObsidianConflictService(context);
const database = new ObsidianDatabaseService(context);
const fileProcessing = new ObsidianFileProcessingService(context);
const replication = new ObsidianReplicationService(context);
const replicator = new ObsidianReplicatorService(context);
const remote = new ObsidianRemoteService(context);
const setting = new ObsidianSettingService(context);
const tweakValue = new ObsidianTweakValueService(context);
const vault = new ObsidianVaultService(context);
const setting = new ObsidianSettingService(context, {
APIService: API,
});
const appLifecycle = new ObsidianAppLifecycleService(context, {
settingService: setting,
});
const vault = new ObsidianVaultService(context, {
settingService: setting,
APIService: API,
});
const test = new ObsidianTestService(context);
const databaseEvents = new ObsidianDatabaseEventService(context);
const path = new ObsidianPathService(context);
const config = new ObsidianConfigService(context, vault);
const path = new ObsidianPathService(context, {
settingService: setting,
});
const database = new ObsidianDatabaseService(context, {
path: path,
vault: vault,
setting: setting,
});
const keyValueDB = new ObsidianKeyValueDBService(context, {
appLifecycle: appLifecycle,
databaseEvents: databaseEvents,
vault: vault,
});
const config = new ObsidianConfigService(context, {
settingService: setting,
APIService: API,
});
const replicator = new ObsidianReplicatorService(context, {
settingService: setting,
appLifecycleService: appLifecycle,
databaseEventService: databaseEvents,
});
const replication = new ObsidianReplicationService(context, {
APIService: API,
appLifecycleService: appLifecycle,
databaseEventService: databaseEvents,
replicatorService: replicator,
settingService: setting,
fileProcessingService: fileProcessing,
databaseService: database,
});
const control = new ObsidianControlService(context, {
appLifecycleService: appLifecycle,
databaseService: database,
fileProcessingService: fileProcessing,
settingService: setting,
APIService: API,
replicatorService: replicator,
});
const ui = new ObsidianUIService(context, {
appLifecycle,
config,
replicator,
APIService: API,
control: control,
});
// Using 'satisfies' to ensure all services are provided
const serviceInstancesToInit = {
appLifecycle: appLifecycle,
@@ -66,6 +113,8 @@ export class ObsidianServiceHub extends InjectableServiceHub<ObsidianServiceCont
path: path,
API: API,
config: config,
keyValueDB: keyValueDB,
control: control,
} satisfies Required<ServiceInstances<ObsidianServiceContext>>;
super(context, serviceInstancesToInit);

View File

@@ -1,104 +1,16 @@
import { InjectableAPIService } from "@lib/services/implements/injectable/InjectableAPIService";
import { InjectableAppLifecycleService } from "@lib/services/implements/injectable/InjectableAppLifecycleService";
import { InjectableConflictService } from "@lib/services/implements/injectable/InjectableConflictService";
import { InjectableDatabaseEventService } from "@lib/services/implements/injectable/InjectableDatabaseEventService";
import { InjectableDatabaseService } from "@lib/services/implements/injectable/InjectableDatabaseService";
import { InjectableFileProcessingService } from "@lib/services/implements/injectable/InjectableFileProcessingService";
import { InjectablePathService } from "@lib/services/implements/injectable/InjectablePathService";
import { InjectableRemoteService } from "@lib/services/implements/injectable/InjectableRemoteService";
import { InjectableReplicationService } from "@lib/services/implements/injectable/InjectableReplicationService";
import { InjectableReplicatorService } from "@lib/services/implements/injectable/InjectableReplicatorService";
import { InjectableSettingService } from "@lib/services/implements/injectable/InjectableSettingService";
import { InjectableTestService } from "@lib/services/implements/injectable/InjectableTestService";
import { InjectableTweakValueService } from "@lib/services/implements/injectable/InjectableTweakValueService";
import { InjectableVaultService } from "@lib/services/implements/injectable/InjectableVaultService";
import { ConfigServiceBrowserCompat } from "@lib/services/implements/browser/ConfigServiceBrowserCompat";
import type { ObsidianServiceContext } from "@lib/services/implements/obsidian/ObsidianServiceContext.ts";
import { Platform } from "@/deps";
import type { SimpleStore } from "@/lib/src/common/utils";
import type { IDatabaseService } from "@/lib/src/services/base/IService";
import { handlers } from "@/lib/src/services/lib/HandlerUtils";
import { ObsHttpHandler } from "../essentialObsidian/APILib/ObsHttpHandler";
import { KeyValueDBService } from "@lib/services/base/KeyValueDBService";
import { ControlService } from "@lib/services/base/ControlService";
// All Services will be migrated to be based on Plain Services, not Injectable Services.
// This is a migration step.
export class ObsidianAPIService extends InjectableAPIService<ObsidianServiceContext> {
_customHandler: ObsHttpHandler | undefined;
getCustomFetchHandler(): ObsHttpHandler {
if (!this._customHandler) this._customHandler = new ObsHttpHandler(undefined, undefined);
return this._customHandler;
}
async showWindow(viewType: string): Promise<void> {
const leaves = this.app.workspace.getLeavesOfType(viewType);
if (leaves.length == 0) {
await this.app.workspace.getLeaf(true).setViewState({
type: viewType,
active: true,
});
} else {
await leaves[0].setViewState({
type: viewType,
active: true,
});
}
if (leaves.length > 0) {
await this.app.workspace.revealLeaf(leaves[0]);
}
}
private get app() {
return this.context.app;
}
getPlatform(): string {
if (Platform.isAndroidApp) {
return "android-app";
} else if (Platform.isIosApp) {
return "ios";
} else if (Platform.isMacOS) {
return "macos";
} else if (Platform.isMobileApp) {
return "mobile-app";
} else if (Platform.isMobile) {
return "mobile";
} else if (Platform.isSafari) {
return "safari";
} else if (Platform.isDesktop) {
return "desktop";
} else if (Platform.isDesktopApp) {
return "desktop-app";
} else {
return "unknown-obsidian";
}
}
override isMobile(): boolean {
//@ts-ignore : internal API
return this.app.isMobile;
}
override getAppID(): string {
return `${"appId" in this.app ? this.app.appId : ""}`;
}
override getAppVersion(): string {
const navigatorString = globalThis.navigator?.userAgent ?? "";
const match = navigatorString.match(/obsidian\/([0-9]+\.[0-9]+\.[0-9]+)/);
if (match && match.length >= 2) {
return match[1];
}
return "0.0.0";
}
override getPluginVersion(): string {
return this.context.plugin.manifest.version;
}
}
export class ObsidianPathService extends InjectablePathService<ObsidianServiceContext> {}
export class ObsidianDatabaseService extends InjectableDatabaseService<ObsidianServiceContext> {
openSimpleStore = handlers<IDatabaseService>().binder("openSimpleStore") as (<T>(
kind: string
) => SimpleStore<T>) & { setHandler: (handler: IDatabaseService["openSimpleStore"], override?: boolean) => void };
}
export class ObsidianDatabaseEventService extends InjectableDatabaseEventService<ObsidianServiceContext> {}
// InjectableReplicatorService
@@ -111,14 +23,12 @@ export class ObsidianReplicationService extends InjectableReplicationService<Obs
export class ObsidianRemoteService extends InjectableRemoteService<ObsidianServiceContext> {}
// InjectableConflictService
export class ObsidianConflictService extends InjectableConflictService<ObsidianServiceContext> {}
// InjectableAppLifecycleService
export class ObsidianAppLifecycleService extends InjectableAppLifecycleService<ObsidianServiceContext> {}
// InjectableSettingService
export class ObsidianSettingService extends InjectableSettingService<ObsidianServiceContext> {}
// InjectableTweakValueService
export class ObsidianTweakValueService extends InjectableTweakValueService<ObsidianServiceContext> {}
// InjectableVaultService
export class ObsidianVaultService extends InjectableVaultService<ObsidianServiceContext> {}
// InjectableTestService
export class ObsidianTestService extends InjectableTestService<ObsidianServiceContext> {}
export class ObsidianConfigService extends ConfigServiceBrowserCompat<ObsidianServiceContext> {}
export class ObsidianKeyValueDBService extends KeyValueDBService<ObsidianServiceContext> {}
export class ObsidianControlService extends ControlService<ObsidianServiceContext> {}

View File

@@ -0,0 +1,35 @@
import { type ObsidianLiveSyncSettings } from "@lib/common/types";
import { EVENT_REQUEST_RELOAD_SETTING_TAB, EVENT_SETTING_SAVED } from "@lib/events/coreEvents";
import { eventHub } from "@lib/hub/hub";
import { SettingService, type SettingServiceDependencies } from "@lib/services/base/SettingService";
import type { ObsidianServiceContext } from "@lib/services/implements/obsidian/ObsidianServiceContext";
export class ObsidianSettingService<T extends ObsidianServiceContext> extends SettingService<T> {
constructor(context: T, dependencies: SettingServiceDependencies) {
super(context, dependencies);
this.onSettingSaved.addHandler((settings) => {
eventHub.emitEvent(EVENT_SETTING_SAVED, settings);
return Promise.resolve(true);
});
this.onSettingLoaded.addHandler((settings) => {
eventHub.emitEvent(EVENT_REQUEST_RELOAD_SETTING_TAB);
return Promise.resolve(true);
});
}
protected setItem(key: string, value: string) {
return localStorage.setItem(key, value);
}
protected getItem(key: string): string {
return localStorage.getItem(key) ?? "";
}
protected deleteItem(key: string): void {
localStorage.removeItem(key);
}
protected override async saveData(data: ObsidianLiveSyncSettings): Promise<void> {
return await this.context.liveSyncPlugin.saveData(data);
}
protected override async loadData(): Promise<ObsidianLiveSyncSettings | undefined> {
return await this.context.liveSyncPlugin.loadData();
}
}

View File

@@ -2,29 +2,35 @@ import type { ConfigService } from "@lib/services/base/ConfigService";
import type { AppLifecycleService } from "@lib/services/base/AppLifecycleService";
import type { ReplicatorService } from "@lib/services/base/ReplicatorService";
import { UIService } from "@lib/services//implements/base/UIService";
import { ObsidianServiceContext } from "@/lib/src/services/implements/obsidian/ObsidianServiceContext";
import { ObsidianServiceContext } from "@lib/services/implements/obsidian/ObsidianServiceContext";
import { ObsidianSvelteDialogManager } from "./SvelteDialogObsidian";
import { ObsidianConfirm } from "./ObsidianConfirm";
import DialogToCopy from "@lib/UI/dialogues/DialogueToCopy.svelte";
import type { IAPIService, IControlService } from "@lib/services/base/IService";
export type ObsidianUIServiceDependencies<T extends ObsidianServiceContext = ObsidianServiceContext> = {
appLifecycle: AppLifecycleService<T>;
config: ConfigService<T>;
replicator: ReplicatorService<T>;
APIService: IAPIService;
control: IControlService;
};
export class ObsidianUIService extends UIService<ObsidianServiceContext> {
override get dialogToCopy() {
return DialogToCopy;
}
constructor(context: ObsidianServiceContext, dependents: ObsidianUIServiceDependencies<ObsidianServiceContext>) {
const obsidianConfirm = new ObsidianConfirm(context);
const obsidianConfirm = dependents.APIService.confirm;
const obsidianSvelteDialogManager = new ObsidianSvelteDialogManager<ObsidianServiceContext>(context, {
appLifecycle: dependents.appLifecycle,
config: dependents.config,
replicator: dependents.replicator,
confirm: obsidianConfirm,
control: dependents.control,
});
super(context, {
appLifecycle: dependents.appLifecycle,
dialogManager: obsidianSvelteDialogManager,
confirm: obsidianConfirm,
APIService: dependents.APIService,
});
}
}

View File

@@ -0,0 +1,33 @@
import { getPathFromTFile } from "@/common/utils";
import { InjectableVaultService } from "@/lib/src/services/implements/injectable/InjectableVaultService";
import type { ObsidianServiceContext } from "@/lib/src/services/implements/obsidian/ObsidianServiceContext";
import type { FilePath } from "@/lib/src/common/types";
declare module "obsidian" {
interface DataAdapter {
insensitive?: boolean;
}
}
// InjectableVaultService
export class ObsidianVaultService extends InjectableVaultService<ObsidianServiceContext> {
override vaultName(): string {
return this.context.app.vault.getName();
}
getActiveFilePath(): FilePath | undefined {
const file = this.context.app.workspace.getActiveFile();
if (file) {
return getPathFromTFile(file);
}
return undefined;
}
isStorageInsensitive(): boolean {
return this.context.app.vault.adapter.insensitive ?? true;
}
override shouldCheckCaseInsensitively(): boolean {
// If the storage is insensitive, always return false, that because no need to check again.
if (this.isStorageInsensitive()) return false;
return super.shouldCheckCaseInsensitively(); // Check the setting
}
}

Some files were not shown because too many files have changed in this diff Show More