@abraca/dabra 0.8.0 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/abracadabra-provider.cjs +7075 -2858
- package/dist/abracadabra-provider.cjs.map +1 -1
- package/dist/abracadabra-provider.esm.js +7019 -2836
- package/dist/abracadabra-provider.esm.js.map +1 -1
- package/dist/index.d.ts +406 -116
- package/package.json +2 -1
- package/src/AbracadabraClient.ts +79 -2
- package/src/AbracadabraProvider.ts +26 -4
- package/src/BackgroundSyncManager.ts +400 -0
- package/src/BackgroundSyncPersistence.ts +107 -0
- package/src/CryptoIdentityKeystore.ts +65 -2
- package/src/DocKeyManager.ts +107 -0
- package/src/E2EAbracadabraProvider.ts +200 -0
- package/src/E2EOfflineStore.ts +55 -0
- package/src/EncryptedY.ts +145 -0
- package/src/OfflineStore.ts +23 -0
- package/src/TreeTimestamps.ts +51 -0
- package/src/index.ts +10 -0
- package/src/types.ts +9 -2
package/src/AbracadabraClient.ts
CHANGED
|
@@ -9,8 +9,13 @@ import type {
|
|
|
9
9
|
ServerInfo,
|
|
10
10
|
InviteRow,
|
|
11
11
|
} from "./types.ts";
|
|
12
|
+
import type { DocEncryptionInfo } from "./types.ts";
|
|
12
13
|
import type { DocumentCache } from "./DocumentCache.ts";
|
|
13
14
|
|
|
15
|
+
function fromBase64(b64: string): Uint8Array {
|
|
16
|
+
return Uint8Array.from(atob(b64), (c) => c.charCodeAt(0));
|
|
17
|
+
}
|
|
18
|
+
|
|
14
19
|
export interface AbracadabraClientConfig {
|
|
15
20
|
/** Server base URL (http or https). WebSocket URL is derived automatically. */
|
|
16
21
|
url: string;
|
|
@@ -166,8 +171,8 @@ export class AbracadabraClient {
|
|
|
166
171
|
}
|
|
167
172
|
|
|
168
173
|
/** Add a new Ed25519 public key to the current user (multi-device). */
|
|
169
|
-
async addKey(opts: { publicKey: string; deviceName?: string }): Promise<void> {
|
|
170
|
-
await this.request("POST", "/auth/keys", { body: opts });
|
|
174
|
+
async addKey(opts: { publicKey: string; deviceName?: string; x25519Key?: string }): Promise<void> {
|
|
175
|
+
await this.request("POST", "/auth/keys", { body: { publicKey: opts.publicKey, deviceName: opts.deviceName, x25519Key: opts.x25519Key } });
|
|
171
176
|
}
|
|
172
177
|
|
|
173
178
|
/** List all registered public keys for the current user. */
|
|
@@ -181,6 +186,78 @@ export class AbracadabraClient {
|
|
|
181
186
|
await this.request("DELETE", `/auth/keys/${encodeURIComponent(keyId)}`);
|
|
182
187
|
}
|
|
183
188
|
|
|
189
|
+
// ── Encryption ───────────────────────────────────────────────────────────
|
|
190
|
+
|
|
191
|
+
/** Get encryption info for a document. */
|
|
192
|
+
async getDocEncryption(docId: string): Promise<DocEncryptionInfo> {
|
|
193
|
+
return this.request<DocEncryptionInfo>("GET", `/docs/${encodeURIComponent(docId)}/encryption`);
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
/** Set the encryption mode for a document (no downgrade). */
|
|
197
|
+
async setDocEncryption(docId: string, mode: "none" | "cse" | "e2e"): Promise<void> {
|
|
198
|
+
await this.request("PATCH", `/docs/${encodeURIComponent(docId)}/encryption`, { body: { mode } });
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
/** Get the caller's key envelope for a document (for decrypting the DocKey). */
|
|
202
|
+
async getMyKeyEnvelope(docId: string): Promise<{ encrypted_key: string; key_epoch: number } | null> {
|
|
203
|
+
try {
|
|
204
|
+
return await this.request<{ encrypted_key: string; key_epoch: number }>(
|
|
205
|
+
"GET",
|
|
206
|
+
`/docs/${encodeURIComponent(docId)}/key-envelope`,
|
|
207
|
+
);
|
|
208
|
+
} catch (e: unknown) {
|
|
209
|
+
if (typeof e === "object" && e !== null && "status" in e && (e as { status: number }).status === 404) {
|
|
210
|
+
return null;
|
|
211
|
+
}
|
|
212
|
+
throw e;
|
|
213
|
+
}
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
/** Upload key envelopes for a document (Owner only). */
|
|
217
|
+
async uploadKeyEnvelopes(
|
|
218
|
+
docId: string,
|
|
219
|
+
opts: { key_epoch: number; envelopes: { recipient_key_id: string; encrypted_key: string }[] },
|
|
220
|
+
): Promise<void> {
|
|
221
|
+
await this.request("POST", `/docs/${encodeURIComponent(docId)}/key-envelopes`, { body: opts });
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
/** Get the X25519 public key for a user. */
|
|
225
|
+
async getUserX25519Key(userId: string): Promise<string | null> {
|
|
226
|
+
try {
|
|
227
|
+
const res = await this.request<{ x25519_key: string }>(
|
|
228
|
+
"GET",
|
|
229
|
+
`/users/${encodeURIComponent(userId)}/x25519-key`,
|
|
230
|
+
);
|
|
231
|
+
return res.x25519_key;
|
|
232
|
+
} catch (e: unknown) {
|
|
233
|
+
if (typeof e === "object" && e !== null && "status" in e && (e as { status: number }).status === 404) {
|
|
234
|
+
return null;
|
|
235
|
+
}
|
|
236
|
+
throw e;
|
|
237
|
+
}
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
/** List all non-revoked keys for a user (Owner/Admin or self). */
|
|
241
|
+
async listUserKeys(userId: string): Promise<{ id: string; publicKey: string; x25519Key: string | null }[]> {
|
|
242
|
+
const res = await this.request<{ keys: { id: string; publicKey: string; x25519Key: string | null }[] }>(
|
|
243
|
+
"GET",
|
|
244
|
+
`/users/${encodeURIComponent(userId)}/keys`,
|
|
245
|
+
);
|
|
246
|
+
return res.keys;
|
|
247
|
+
}
|
|
248
|
+
|
|
249
|
+
/** Fetch encrypted E2E update blobs since a given sequence number. */
|
|
250
|
+
async getE2EUpdatesSince(docId: string, sinceSeq: number): Promise<{ seq: number; data: Uint8Array }[]> {
|
|
251
|
+
const res = await this.request<{ updates: { seq: number; data: string }[] }>(
|
|
252
|
+
"GET",
|
|
253
|
+
`/docs/${encodeURIComponent(docId)}/e2e-updates?since_seq=${sinceSeq}`,
|
|
254
|
+
);
|
|
255
|
+
return res.updates.map((u) => ({
|
|
256
|
+
seq: u.seq,
|
|
257
|
+
data: fromBase64(u.data),
|
|
258
|
+
}));
|
|
259
|
+
}
|
|
260
|
+
|
|
184
261
|
/** Clear token from memory and storage. */
|
|
185
262
|
logout(): void {
|
|
186
263
|
this.token = null;
|
|
@@ -13,6 +13,8 @@ import type {
|
|
|
13
13
|
} from "./types.ts";
|
|
14
14
|
import { AuthenticationMessage } from "./OutgoingMessages/AuthenticationMessage.ts";
|
|
15
15
|
import type { AbracadabraClient } from "./AbracadabraClient.ts";
|
|
16
|
+
import type { DocKeyManager } from "./DocKeyManager.ts";
|
|
17
|
+
import type { CryptoIdentityKeystore } from "./CryptoIdentityKeystore.ts";
|
|
16
18
|
|
|
17
19
|
export interface AbracadabraProviderConfiguration
|
|
18
20
|
extends Omit<AbracadabraBaseProviderConfiguration, "url" | "websocketProvider"> {
|
|
@@ -57,6 +59,11 @@ export interface AbracadabraProviderConfiguration
|
|
|
57
59
|
/** WebSocket URL. Derived from client.wsUrl if client is provided. */
|
|
58
60
|
url?: string;
|
|
59
61
|
|
|
62
|
+
/** DocKeyManager for E2E/CSE encryption key management. */
|
|
63
|
+
docKeyManager?: DocKeyManager;
|
|
64
|
+
/** Keystore for X25519 key derivation. */
|
|
65
|
+
keystore?: CryptoIdentityKeystore;
|
|
66
|
+
|
|
60
67
|
/** Shared WebSocket connection (use when multiplexing multiple root documents). */
|
|
61
68
|
websocketProvider?: AbracadabraWS;
|
|
62
69
|
}
|
|
@@ -158,7 +165,7 @@ export class AbracadabraProvider extends AbracadabraBaseProvider {
|
|
|
158
165
|
* Used to namespace the IndexedDB key so docs from different servers
|
|
159
166
|
* never share the same database.
|
|
160
167
|
*/
|
|
161
|
-
|
|
168
|
+
protected static deriveServerOrigin(
|
|
162
169
|
config: AbracadabraProviderConfiguration,
|
|
163
170
|
client: AbracadabraClient | null,
|
|
164
171
|
): string | undefined {
|
|
@@ -208,8 +215,16 @@ export class AbracadabraProvider extends AbracadabraBaseProvider {
|
|
|
208
215
|
override authenticatedHandler(scope: string) {
|
|
209
216
|
super.authenticatedHandler(scope);
|
|
210
217
|
|
|
211
|
-
|
|
212
|
-
|
|
218
|
+
const roleMap: Record<string, import("./types.ts").EffectiveRole> = {
|
|
219
|
+
service: "service",
|
|
220
|
+
admin: "admin",
|
|
221
|
+
owner: "owner",
|
|
222
|
+
editor: "editor",
|
|
223
|
+
viewer: "viewer",
|
|
224
|
+
"read-write": "editor",
|
|
225
|
+
readonly: "viewer",
|
|
226
|
+
};
|
|
227
|
+
this.effectiveRole = roleMap[scope] ?? "viewer";
|
|
213
228
|
|
|
214
229
|
this.offlineStore?.savePermissionSnapshot(this.effectiveRole);
|
|
215
230
|
}
|
|
@@ -283,7 +298,7 @@ export class AbracadabraProvider extends AbracadabraBaseProvider {
|
|
|
283
298
|
}
|
|
284
299
|
|
|
285
300
|
get canWrite(): boolean {
|
|
286
|
-
return this.effectiveRole
|
|
301
|
+
return this.effectiveRole != null && this.effectiveRole !== "viewer";
|
|
287
302
|
}
|
|
288
303
|
|
|
289
304
|
/** The AbracadabraClient instance for REST API access, if configured. */
|
|
@@ -291,6 +306,11 @@ export class AbracadabraProvider extends AbracadabraBaseProvider {
|
|
|
291
306
|
return this._client;
|
|
292
307
|
}
|
|
293
308
|
|
|
309
|
+
/** The OfflineStore instance, or null if offline storage is disabled. */
|
|
310
|
+
get store(): OfflineStore | null {
|
|
311
|
+
return this.offlineStore;
|
|
312
|
+
}
|
|
313
|
+
|
|
294
314
|
// ── Stateless message interception ────────────────────────────────────────
|
|
295
315
|
|
|
296
316
|
/**
|
|
@@ -441,6 +461,8 @@ export class AbracadabraProvider extends AbracadabraBaseProvider {
|
|
|
441
461
|
client: this._client ?? undefined,
|
|
442
462
|
cryptoIdentity: this.abracadabraConfig.cryptoIdentity,
|
|
443
463
|
signChallenge: this.abracadabraConfig.signChallenge,
|
|
464
|
+
docKeyManager: this.abracadabraConfig.docKeyManager,
|
|
465
|
+
keystore: this.abracadabraConfig.keystore,
|
|
444
466
|
});
|
|
445
467
|
|
|
446
468
|
this.childProviders.set(childId, childProvider);
|
|
@@ -0,0 +1,400 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* BackgroundSyncManager
|
|
3
|
+
*
|
|
4
|
+
* Proactively syncs all documents in the root tree map to IndexedDB so they
|
|
5
|
+
* are available offline even if the user has never opened them directly.
|
|
6
|
+
*
|
|
7
|
+
* Key behaviours:
|
|
8
|
+
* - Priority queue: sorted by updatedAt desc → never-synced first → errored last
|
|
9
|
+
* - Concurrency: semaphore (default 2 parallel WS connections)
|
|
10
|
+
* - Provider selection: non-E2E via rootProvider.loadChild(); E2E via
|
|
11
|
+
* E2EAbracadabraProvider directly when docKeyManager + keystore are present
|
|
12
|
+
* - File pre-caching: after sync, walks the Y.Doc for file refs and warms
|
|
13
|
+
* the FileBlobStore cache (opt-in via prefetchFiles, default true)
|
|
14
|
+
* - Cover images: at start of syncAll(), pre-caches all cover upload IDs
|
|
15
|
+
* from the root tree map immediately (fire-and-forget)
|
|
16
|
+
* - Periodic sync: startPeriodicSync() returns a cleanup function
|
|
17
|
+
*
|
|
18
|
+
* Note: This is in-page only (no Service Worker). The existing
|
|
19
|
+
* flushPendingUpdates() handles offline → reconnect for open docs.
|
|
20
|
+
*/
|
|
21
|
+
|
|
22
|
+
import * as Y from "yjs";
|
|
23
|
+
import type { AbracadabraProvider } from "./AbracadabraProvider.ts";
|
|
24
|
+
import type { AbracadabraClient } from "./AbracadabraClient.ts";
|
|
25
|
+
import type { FileBlobStore } from "./FileBlobStore.ts";
|
|
26
|
+
import {
|
|
27
|
+
BackgroundSyncPersistence,
|
|
28
|
+
type DocSyncState,
|
|
29
|
+
} from "./BackgroundSyncPersistence.ts";
|
|
30
|
+
import EventEmitter from "./EventEmitter.ts";
|
|
31
|
+
import { E2EAbracadabraProvider } from "./E2EAbracadabraProvider.ts";
|
|
32
|
+
|
|
33
|
+
export interface BackgroundSyncManagerOptions {
|
|
34
|
+
/** Max parallel WS connections for background sync. Default: 2. */
|
|
35
|
+
concurrency?: number;
|
|
36
|
+
/** Timeout (ms) waiting for a provider to sync. Default: 15 000. */
|
|
37
|
+
syncTimeout?: number;
|
|
38
|
+
/** Pre-cache file blobs after syncing a doc. Default: true. */
|
|
39
|
+
prefetchFiles?: boolean;
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
/**
|
|
43
|
+
* Simple semaphore for capping concurrency.
|
|
44
|
+
*/
|
|
45
|
+
class Semaphore {
|
|
46
|
+
private slots: number;
|
|
47
|
+
private queue: Array<() => void> = [];
|
|
48
|
+
|
|
49
|
+
constructor(limit: number) {
|
|
50
|
+
this.slots = limit;
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
acquire(): Promise<void> {
|
|
54
|
+
if (this.slots > 0) {
|
|
55
|
+
this.slots--;
|
|
56
|
+
return Promise.resolve();
|
|
57
|
+
}
|
|
58
|
+
return new Promise((resolve) => this.queue.push(resolve));
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
release(): void {
|
|
62
|
+
const next = this.queue.shift();
|
|
63
|
+
if (next) {
|
|
64
|
+
next();
|
|
65
|
+
} else {
|
|
66
|
+
this.slots++;
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
export class BackgroundSyncManager extends EventEmitter {
|
|
72
|
+
private readonly rootProvider: AbracadabraProvider;
|
|
73
|
+
private readonly client: AbracadabraClient;
|
|
74
|
+
private readonly fileBlobStore: FileBlobStore | null;
|
|
75
|
+
private readonly opts: Required<BackgroundSyncManagerOptions>;
|
|
76
|
+
|
|
77
|
+
private readonly persistence: BackgroundSyncPersistence;
|
|
78
|
+
private readonly semaphore: Semaphore;
|
|
79
|
+
private readonly syncStates = new Map<string, DocSyncState>();
|
|
80
|
+
|
|
81
|
+
private _destroyed = false;
|
|
82
|
+
|
|
83
|
+
constructor(
|
|
84
|
+
rootProvider: AbracadabraProvider,
|
|
85
|
+
client: AbracadabraClient,
|
|
86
|
+
fileBlobStore?: FileBlobStore | null,
|
|
87
|
+
opts?: BackgroundSyncManagerOptions,
|
|
88
|
+
) {
|
|
89
|
+
super();
|
|
90
|
+
this.rootProvider = rootProvider;
|
|
91
|
+
this.client = client;
|
|
92
|
+
this.fileBlobStore = fileBlobStore ?? null;
|
|
93
|
+
this.opts = {
|
|
94
|
+
concurrency: opts?.concurrency ?? 2,
|
|
95
|
+
syncTimeout: opts?.syncTimeout ?? 15_000,
|
|
96
|
+
prefetchFiles: opts?.prefetchFiles ?? true,
|
|
97
|
+
};
|
|
98
|
+
|
|
99
|
+
// Derive server origin from client URL for IDB namespacing
|
|
100
|
+
let serverOrigin = "default";
|
|
101
|
+
try {
|
|
102
|
+
serverOrigin = new URL((client as any).baseUrl ?? "").hostname;
|
|
103
|
+
} catch {}
|
|
104
|
+
|
|
105
|
+
this.persistence = new BackgroundSyncPersistence(serverOrigin);
|
|
106
|
+
this.semaphore = new Semaphore(this.opts.concurrency);
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
// ── Public API ────────────────────────────────────────────────────────────
|
|
110
|
+
|
|
111
|
+
/** Sync all documents in the root tree. */
|
|
112
|
+
async syncAll(): Promise<void> {
|
|
113
|
+
if (this._destroyed) return;
|
|
114
|
+
|
|
115
|
+
const treeMap = this.rootProvider.document.getMap("doc-tree") as Y.Map<any>;
|
|
116
|
+
const entries = Array.from(treeMap.entries()) as Array<[string, any]>;
|
|
117
|
+
|
|
118
|
+
if (entries.length === 0) return;
|
|
119
|
+
|
|
120
|
+
// Pre-cache cover images immediately (fire-and-forget)
|
|
121
|
+
this._prefetchCovers(entries).catch(() => null);
|
|
122
|
+
|
|
123
|
+
// Build the priority queue
|
|
124
|
+
const queue = this._buildQueue(entries);
|
|
125
|
+
|
|
126
|
+
// Sync all docs respecting concurrency limit
|
|
127
|
+
await Promise.all(queue.map((docId) => this._syncWithSemaphore(docId)));
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
/** Sync a single document by ID. */
|
|
131
|
+
async syncDoc(docId: string): Promise<DocSyncState> {
|
|
132
|
+
const state = await this._doSyncDoc(docId);
|
|
133
|
+
this.syncStates.set(docId, state);
|
|
134
|
+
await this.persistence.setState(state).catch(() => null);
|
|
135
|
+
this.emit("stateChanged", { docId, state });
|
|
136
|
+
return state;
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
/** Return a snapshot of all known sync states. */
|
|
140
|
+
getSyncStatus(): Map<string, DocSyncState> {
|
|
141
|
+
return new Map(this.syncStates);
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
/**
|
|
145
|
+
* Start periodic background sync.
|
|
146
|
+
* @param intervalMs Interval between sync runs. Default: 5 minutes.
|
|
147
|
+
* @returns Cleanup function to stop the periodic sync.
|
|
148
|
+
*/
|
|
149
|
+
startPeriodicSync(intervalMs = 5 * 60 * 1000): () => void {
|
|
150
|
+
const handle = setInterval(() => {
|
|
151
|
+
if (!this._destroyed) {
|
|
152
|
+
this.syncAll().catch(() => null);
|
|
153
|
+
}
|
|
154
|
+
}, intervalMs);
|
|
155
|
+
return () => clearInterval(handle);
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
destroy(): void {
|
|
159
|
+
this._destroyed = true;
|
|
160
|
+
this.removeAllListeners();
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
// ── Internal ──────────────────────────────────────────────────────────────
|
|
164
|
+
|
|
165
|
+
/**
|
|
166
|
+
* Build a priority-sorted list of doc IDs:
|
|
167
|
+
* 1. Never-synced docs first (lastSynced === null, status !== "error")
|
|
168
|
+
* 2. Synced docs sorted by updatedAt desc (most-recently-edited first)
|
|
169
|
+
* 3. Errored docs last
|
|
170
|
+
*/
|
|
171
|
+
private _buildQueue(entries: Array<[string, any]>): string[] {
|
|
172
|
+
type Sortable = { docId: string; priority: number };
|
|
173
|
+
|
|
174
|
+
const items: Sortable[] = entries.map(([docId, v]) => {
|
|
175
|
+
const state = this.syncStates.get(docId);
|
|
176
|
+
const updatedAt: number = v?.updatedAt ?? v?.createdAt ?? 0;
|
|
177
|
+
|
|
178
|
+
let priority: number;
|
|
179
|
+
if (!state || state.status === "pending") {
|
|
180
|
+
// Never synced — high priority (large number so it sorts first when DESC)
|
|
181
|
+
priority = Number.MAX_SAFE_INTEGER - updatedAt;
|
|
182
|
+
} else if (state.status === "error") {
|
|
183
|
+
// Errored — lowest priority (large negative offset)
|
|
184
|
+
priority = -1;
|
|
185
|
+
} else {
|
|
186
|
+
// Synced — sort by updatedAt desc (most recent = highest priority)
|
|
187
|
+
priority = updatedAt;
|
|
188
|
+
}
|
|
189
|
+
return { docId, priority };
|
|
190
|
+
});
|
|
191
|
+
|
|
192
|
+
// Sort descending by priority
|
|
193
|
+
items.sort((a, b) => b.priority - a.priority);
|
|
194
|
+
return items.map((i) => i.docId);
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
private async _syncWithSemaphore(docId: string): Promise<void> {
|
|
198
|
+
if (this._destroyed) return;
|
|
199
|
+
await this.semaphore.acquire();
|
|
200
|
+
try {
|
|
201
|
+
const state = await this._doSyncDoc(docId);
|
|
202
|
+
this.syncStates.set(docId, state);
|
|
203
|
+
await this.persistence.setState(state).catch(() => null);
|
|
204
|
+
this.emit("stateChanged", { docId, state });
|
|
205
|
+
} finally {
|
|
206
|
+
this.semaphore.release();
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
private async _doSyncDoc(docId: string): Promise<DocSyncState> {
|
|
211
|
+
// Mark as syncing
|
|
212
|
+
const syncing: DocSyncState = {
|
|
213
|
+
docId,
|
|
214
|
+
status: "syncing",
|
|
215
|
+
lastSynced: this.syncStates.get(docId)?.lastSynced ?? null,
|
|
216
|
+
isE2E: false,
|
|
217
|
+
};
|
|
218
|
+
this.syncStates.set(docId, syncing);
|
|
219
|
+
this.emit("stateChanged", { docId, state: syncing });
|
|
220
|
+
|
|
221
|
+
try {
|
|
222
|
+
// Check encryption type
|
|
223
|
+
let isE2E = false;
|
|
224
|
+
try {
|
|
225
|
+
const enc = await this.client.getDocEncryption(docId);
|
|
226
|
+
isE2E = enc.mode === "e2e";
|
|
227
|
+
} catch {
|
|
228
|
+
// If we can't check, treat as non-E2E
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
if (isE2E) {
|
|
232
|
+
return await this._syncE2EDoc(docId);
|
|
233
|
+
} else {
|
|
234
|
+
return await this._syncNonE2EDoc(docId);
|
|
235
|
+
}
|
|
236
|
+
} catch (err) {
|
|
237
|
+
const error = err instanceof Error ? err.message : String(err);
|
|
238
|
+
return {
|
|
239
|
+
docId,
|
|
240
|
+
status: "error",
|
|
241
|
+
lastSynced: this.syncStates.get(docId)?.lastSynced ?? null,
|
|
242
|
+
error,
|
|
243
|
+
isE2E: false,
|
|
244
|
+
};
|
|
245
|
+
}
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
private async _syncNonE2EDoc(docId: string): Promise<DocSyncState> {
|
|
249
|
+
// loadChild() returns cached provider if already open
|
|
250
|
+
const childProvider = await this.rootProvider.loadChild(docId);
|
|
251
|
+
|
|
252
|
+
// Wait for ready (offline snapshot loaded) then synced (server sync done)
|
|
253
|
+
await childProvider.ready;
|
|
254
|
+
await this._waitForSynced(childProvider);
|
|
255
|
+
|
|
256
|
+
// Prefetch file blobs
|
|
257
|
+
if (this.opts.prefetchFiles && this.fileBlobStore) {
|
|
258
|
+
this._prefetchDocFiles(docId, childProvider.document).catch(() => null);
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
return { docId, status: "synced", lastSynced: Date.now(), isE2E: false };
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
private async _syncE2EDoc(docId: string): Promise<DocSyncState> {
|
|
265
|
+
// Attempt E2E sync only when docKeyManager and keystore are available
|
|
266
|
+
const docKeyManager = (this.rootProvider as any).abracadabraConfig
|
|
267
|
+
?.docKeyManager;
|
|
268
|
+
const keystore = (this.rootProvider as any).abracadabraConfig?.keystore;
|
|
269
|
+
|
|
270
|
+
if (!docKeyManager || !keystore) {
|
|
271
|
+
// No key management configured — skip silently
|
|
272
|
+
return { docId, status: "skipped", lastSynced: null, isE2E: true };
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
const childDoc = new Y.Doc({ guid: docId });
|
|
276
|
+
const childProvider = new E2EAbracadabraProvider({
|
|
277
|
+
name: docId,
|
|
278
|
+
document: childDoc,
|
|
279
|
+
client: this.client,
|
|
280
|
+
docKeyManager,
|
|
281
|
+
keystore,
|
|
282
|
+
});
|
|
283
|
+
|
|
284
|
+
try {
|
|
285
|
+
await childProvider.ready;
|
|
286
|
+
await this._waitForSynced(childProvider);
|
|
287
|
+
|
|
288
|
+
if (this.opts.prefetchFiles && this.fileBlobStore) {
|
|
289
|
+
this._prefetchDocFiles(docId, childDoc).catch(() => null);
|
|
290
|
+
}
|
|
291
|
+
|
|
292
|
+
return { docId, status: "synced", lastSynced: Date.now(), isE2E: true };
|
|
293
|
+
} finally {
|
|
294
|
+
childProvider.destroy();
|
|
295
|
+
}
|
|
296
|
+
}
|
|
297
|
+
|
|
298
|
+
/**
|
|
299
|
+
* Wait for a provider to emit the "synced" event (with state=true),
|
|
300
|
+
* timing out after opts.syncTimeout ms.
|
|
301
|
+
*/
|
|
302
|
+
private _waitForSynced(provider: any): Promise<void> {
|
|
303
|
+
// If already synced, resolve immediately
|
|
304
|
+
if (provider.synced === true) return Promise.resolve();
|
|
305
|
+
|
|
306
|
+
return new Promise<void>((resolve, reject) => {
|
|
307
|
+
const timer = setTimeout(() => {
|
|
308
|
+
provider.off("synced", onSynced);
|
|
309
|
+
reject(new Error(`Sync timeout after ${this.opts.syncTimeout}ms`));
|
|
310
|
+
}, this.opts.syncTimeout);
|
|
311
|
+
|
|
312
|
+
function onSynced(state: boolean | { state: boolean }) {
|
|
313
|
+
// Hocuspocus passes `{ state }` or a boolean directly
|
|
314
|
+
const synced =
|
|
315
|
+
typeof state === "boolean" ? state : (state as any)?.state ?? state;
|
|
316
|
+
if (synced) {
|
|
317
|
+
clearTimeout(timer);
|
|
318
|
+
provider.off("synced", onSynced);
|
|
319
|
+
resolve();
|
|
320
|
+
}
|
|
321
|
+
}
|
|
322
|
+
|
|
323
|
+
provider.on("synced", onSynced);
|
|
324
|
+
});
|
|
325
|
+
}
|
|
326
|
+
|
|
327
|
+
/**
|
|
328
|
+
* Pre-cache all cover images referenced in the root tree map.
|
|
329
|
+
*/
|
|
330
|
+
private async _prefetchCovers(entries: Array<[string, any]>): Promise<void> {
|
|
331
|
+
if (!this.fileBlobStore) return;
|
|
332
|
+
for (const [docId, v] of entries) {
|
|
333
|
+
const coverUploadId = v?.meta?.coverUploadId;
|
|
334
|
+
if (coverUploadId) {
|
|
335
|
+
this.fileBlobStore.getBlobUrl(docId, coverUploadId).catch(() => null);
|
|
336
|
+
}
|
|
337
|
+
}
|
|
338
|
+
}
|
|
339
|
+
|
|
340
|
+
/**
|
|
341
|
+
* Pre-cache file blobs referenced in a synced Y.Doc.
|
|
342
|
+
* Walks:
|
|
343
|
+
* - ydoc.getXmlFragment('default') for `fileBlock` nodes (TipTap)
|
|
344
|
+
* - ydoc.getMap('doc-tree') for meta.coverUploadId entries
|
|
345
|
+
*/
|
|
346
|
+
private async _prefetchDocFiles(
|
|
347
|
+
docId: string,
|
|
348
|
+
ydoc: Y.Doc,
|
|
349
|
+
): Promise<void> {
|
|
350
|
+
if (!this.fileBlobStore) return;
|
|
351
|
+
|
|
352
|
+
const refs = this._extractFileRefs(ydoc);
|
|
353
|
+
for (const { refDocId, uploadId } of refs) {
|
|
354
|
+
this.fileBlobStore
|
|
355
|
+
.getBlobUrl(refDocId ?? docId, uploadId)
|
|
356
|
+
.catch(() => null);
|
|
357
|
+
}
|
|
358
|
+
}
|
|
359
|
+
|
|
360
|
+
private _extractFileRefs(
|
|
361
|
+
ydoc: Y.Doc,
|
|
362
|
+
): Array<{ refDocId: string | null; uploadId: string }> {
|
|
363
|
+
const refs: Array<{ refDocId: string | null; uploadId: string }> = [];
|
|
364
|
+
|
|
365
|
+
// Walk XML fragment for fileBlock nodes
|
|
366
|
+
try {
|
|
367
|
+
const fragment = ydoc.getXmlFragment("default");
|
|
368
|
+
this._walkXml(fragment, refs);
|
|
369
|
+
} catch {}
|
|
370
|
+
|
|
371
|
+
// Walk doc-tree meta for cover upload IDs
|
|
372
|
+
try {
|
|
373
|
+
const treeMap = ydoc.getMap("doc-tree") as Y.Map<any>;
|
|
374
|
+
for (const [id, v] of treeMap.entries()) {
|
|
375
|
+
const coverUploadId = v?.meta?.coverUploadId;
|
|
376
|
+
if (coverUploadId) {
|
|
377
|
+
refs.push({ refDocId: id, uploadId: coverUploadId });
|
|
378
|
+
}
|
|
379
|
+
}
|
|
380
|
+
} catch {}
|
|
381
|
+
|
|
382
|
+
return refs;
|
|
383
|
+
}
|
|
384
|
+
|
|
385
|
+
private _walkXml(
|
|
386
|
+
node: Y.XmlFragment | Y.XmlElement,
|
|
387
|
+
refs: Array<{ refDocId: string | null; uploadId: string }>,
|
|
388
|
+
): void {
|
|
389
|
+
for (const child of node.toArray()) {
|
|
390
|
+
if (child instanceof Y.XmlElement) {
|
|
391
|
+
if (child.nodeName === "fileBlock") {
|
|
392
|
+
const uploadId = child.getAttribute("uploadId");
|
|
393
|
+
const refDocId = child.getAttribute("docId") ?? null;
|
|
394
|
+
if (uploadId) refs.push({ refDocId, uploadId });
|
|
395
|
+
}
|
|
396
|
+
this._walkXml(child, refs);
|
|
397
|
+
}
|
|
398
|
+
}
|
|
399
|
+
}
|
|
400
|
+
}
|
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* BackgroundSyncPersistence
|
|
3
|
+
*
|
|
4
|
+
* Minimal IndexedDB store (`abracadabra:bgsync:{serverOrigin}`) that persists
|
|
5
|
+
* per-document sync state across page reloads. A single object store,
|
|
6
|
+
* `sync_state`, is keyed by `docId`.
|
|
7
|
+
*
|
|
8
|
+
* Falls back to a no-op when IndexedDB is unavailable (SSR / Node.js).
|
|
9
|
+
*/
|
|
10
|
+
|
|
11
|
+
export interface DocSyncState {
|
|
12
|
+
docId: string;
|
|
13
|
+
/** Current lifecycle status of this document's background sync. */
|
|
14
|
+
status: "pending" | "syncing" | "synced" | "error" | "skipped";
|
|
15
|
+
/** Unix ms of the last successful sync, or null if never synced. */
|
|
16
|
+
lastSynced: number | null;
|
|
17
|
+
/** Human-readable error message if status === "error". */
|
|
18
|
+
error?: string;
|
|
19
|
+
/** Whether the document uses E2E encryption. */
|
|
20
|
+
isE2E: boolean;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
const DB_VERSION = 1;
|
|
24
|
+
|
|
25
|
+
function idbAvailable(): boolean {
|
|
26
|
+
return typeof globalThis !== "undefined" && "indexedDB" in globalThis;
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
function openDb(serverOrigin: string): Promise<IDBDatabase> {
|
|
30
|
+
return new Promise((resolve, reject) => {
|
|
31
|
+
const req = globalThis.indexedDB.open(
|
|
32
|
+
`abracadabra:bgsync:${serverOrigin}`,
|
|
33
|
+
DB_VERSION,
|
|
34
|
+
);
|
|
35
|
+
req.onupgradeneeded = (event) => {
|
|
36
|
+
const db = (event.target as IDBOpenDBRequest).result;
|
|
37
|
+
if (!db.objectStoreNames.contains("sync_state")) {
|
|
38
|
+
db.createObjectStore("sync_state", { keyPath: "docId" });
|
|
39
|
+
}
|
|
40
|
+
};
|
|
41
|
+
req.onsuccess = () => resolve(req.result);
|
|
42
|
+
req.onerror = () => reject(req.error);
|
|
43
|
+
});
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
function idbPromise<T>(request: IDBRequest<T>): Promise<T> {
|
|
47
|
+
return new Promise((resolve, reject) => {
|
|
48
|
+
request.onsuccess = () => resolve(request.result);
|
|
49
|
+
request.onerror = () => reject(request.error);
|
|
50
|
+
});
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
export class BackgroundSyncPersistence {
|
|
54
|
+
private dbPromise: Promise<IDBDatabase | null> | null = null;
|
|
55
|
+
private readonly serverOrigin: string;
|
|
56
|
+
|
|
57
|
+
constructor(serverOrigin: string) {
|
|
58
|
+
this.serverOrigin = serverOrigin;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
private getDb(): Promise<IDBDatabase | null> {
|
|
62
|
+
if (!idbAvailable()) return Promise.resolve(null);
|
|
63
|
+
if (!this.dbPromise) {
|
|
64
|
+
this.dbPromise = openDb(this.serverOrigin).catch(() => null);
|
|
65
|
+
}
|
|
66
|
+
return this.dbPromise;
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
async getState(docId: string): Promise<DocSyncState | null> {
|
|
70
|
+
const db = await this.getDb();
|
|
71
|
+
if (!db) return null;
|
|
72
|
+
const tx = db.transaction("sync_state", "readonly");
|
|
73
|
+
const result = await idbPromise<DocSyncState | undefined>(
|
|
74
|
+
tx.objectStore("sync_state").get(docId),
|
|
75
|
+
);
|
|
76
|
+
return result ?? null;
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
async getAllStates(): Promise<DocSyncState[]> {
|
|
80
|
+
const db = await this.getDb();
|
|
81
|
+
if (!db) return [];
|
|
82
|
+
return new Promise((resolve, reject) => {
|
|
83
|
+
const tx = db.transaction("sync_state", "readonly");
|
|
84
|
+
const req = tx.objectStore("sync_state").getAll();
|
|
85
|
+
req.onsuccess = () => resolve(req.result as DocSyncState[]);
|
|
86
|
+
req.onerror = () => reject(req.error);
|
|
87
|
+
});
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
async setState(state: DocSyncState): Promise<void> {
|
|
91
|
+
const db = await this.getDb();
|
|
92
|
+
if (!db) return;
|
|
93
|
+
const tx = db.transaction("sync_state", "readwrite");
|
|
94
|
+
await idbPromise(tx.objectStore("sync_state").put(state));
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
async deleteState(docId: string): Promise<void> {
|
|
98
|
+
const db = await this.getDb();
|
|
99
|
+
if (!db) return;
|
|
100
|
+
const tx = db.transaction("sync_state", "readwrite");
|
|
101
|
+
await idbPromise(tx.objectStore("sync_state").delete(docId));
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
destroy(): void {
|
|
105
|
+
// No cleanup needed — shared IDB connection managed by browser
|
|
106
|
+
}
|
|
107
|
+
}
|