loro-repo 0.4.0 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,881 +1,456 @@
1
1
  import { Flock } from "@loro-dev/flock";
2
- import { LoroAdaptor } from "loro-adaptors/loro";
3
- import { CrdtType, bytesToHex } from "loro-protocol";
4
- import { LoroWebsocketClient } from "loro-websocket";
5
2
  import { LoroDoc } from "loro-crdt";
6
- import { FlockAdaptor } from "loro-adaptors/flock";
7
3
  import { promises } from "node:fs";
8
4
  import * as path from "node:path";
9
5
  import { randomUUID } from "node:crypto";
6
+ import { LoroAdaptor } from "loro-adaptors/loro";
7
+ import { bytesToHex } from "loro-protocol";
8
+ import { LoroWebsocketClient } from "loro-websocket";
9
+ import { FlockAdaptor } from "loro-adaptors/flock";
10
10
 
11
- //#region src/loro-adaptor.ts
12
- function createRepoFlockAdaptorFromDoc(flock, config = {}) {
13
- return new FlockAdaptor(flock, config);
14
- }
15
-
16
- //#endregion
17
- //#region src/internal/debug.ts
18
- const getEnv = () => {
19
- if (typeof globalThis !== "object" || globalThis === null) return;
20
- return globalThis.process?.env;
21
- };
22
- const rawNamespaceConfig = (getEnv()?.LORO_REPO_DEBUG ?? "").trim();
23
- const normalizedNamespaces = rawNamespaceConfig.length > 0 ? rawNamespaceConfig.split(/[\s,]+/).map((token) => token.toLowerCase()).filter(Boolean) : [];
24
- const wildcardTokens = new Set([
25
- "*",
26
- "1",
27
- "true",
28
- "all"
29
- ]);
30
- const namespaceSet = new Set(normalizedNamespaces);
31
- const hasWildcard = namespaceSet.size > 0 && normalizedNamespaces.some((token) => wildcardTokens.has(token));
32
- const isDebugEnabled = (namespace) => {
33
- if (!namespaceSet.size) return false;
34
- if (!namespace) return hasWildcard;
35
- const normalized = namespace.toLowerCase();
36
- if (hasWildcard) return true;
37
- if (namespaceSet.has(normalized)) return true;
38
- const [root] = normalized.split(":");
39
- return namespaceSet.has(root);
40
- };
41
- const createDebugLogger = (namespace) => {
42
- const normalized = namespace.toLowerCase();
43
- return (...args) => {
44
- if (!isDebugEnabled(normalized)) return;
45
- const prefix = `[loro-repo:${namespace}]`;
46
- if (args.length === 0) {
47
- console.info(prefix);
48
- return;
49
- }
50
- console.info(prefix, ...args);
11
+ //#region src/transport/broadcast-channel.ts
12
+ function deferred() {
13
+ let resolve;
14
+ return {
15
+ promise: new Promise((res) => {
16
+ resolve = res;
17
+ }),
18
+ resolve
51
19
  };
52
- };
53
-
54
- //#endregion
55
- //#region src/transport/websocket.ts
56
- const debug = createDebugLogger("transport:websocket");
57
- function withTimeout(promise, timeoutMs) {
58
- if (!timeoutMs || timeoutMs <= 0) return promise;
59
- return new Promise((resolve, reject) => {
60
- const timer = setTimeout(() => {
61
- reject(/* @__PURE__ */ new Error(`Operation timed out after ${timeoutMs}ms`));
62
- }, timeoutMs);
63
- promise.then((value) => {
64
- clearTimeout(timer);
65
- resolve(value);
66
- }).catch((error) => {
67
- clearTimeout(timer);
68
- reject(error);
69
- });
70
- });
71
20
  }
72
- function normalizeRoomId(roomId, fallback) {
73
- if (typeof roomId === "string" && roomId.length > 0) return roomId;
74
- if (roomId instanceof Uint8Array && roomId.length > 0) try {
75
- return bytesToHex(roomId);
21
+ function randomInstanceId() {
22
+ if (typeof crypto !== "undefined" && typeof crypto.randomUUID === "function") return crypto.randomUUID();
23
+ return Math.random().toString(36).slice(2);
24
+ }
25
+ function ensureBroadcastChannel() {
26
+ if (typeof BroadcastChannel === "undefined") throw new Error("BroadcastChannel API is not available in this environment");
27
+ return BroadcastChannel;
28
+ }
29
+ function encodeDocChannelId(docId) {
30
+ try {
31
+ return encodeURIComponent(docId);
76
32
  } catch {
77
- return fallback;
33
+ return docId.replace(/[^a-z0-9_-]/gi, "_");
78
34
  }
79
- return fallback;
80
35
  }
81
- function bytesEqual(a, b) {
82
- if (a === b) return true;
83
- if (!a || !b) return false;
84
- if (a.length !== b.length) return false;
85
- for (let i = 0; i < a.length; i += 1) if (a[i] !== b[i]) return false;
86
- return true;
36
+ function postChannelMessage(channel, message) {
37
+ channel.postMessage(message);
87
38
  }
88
39
  /**
89
- * loro-websocket backed {@link TransportAdapter} implementation for LoroRepo.
40
+ * TransportAdapter that relies on the BroadcastChannel API to fan out metadata
41
+ * and document updates between browser tabs within the same origin.
90
42
  */
91
- var WebSocketTransportAdapter = class {
92
- options;
93
- client;
94
- metadataSession;
95
- docSessions = /* @__PURE__ */ new Map();
96
- constructor(options) {
97
- this.options = options;
43
+ var BroadcastChannelTransportAdapter = class {
44
+ instanceId = randomInstanceId();
45
+ namespace;
46
+ metaChannelName;
47
+ connected = false;
48
+ metaState;
49
+ docStates = /* @__PURE__ */ new Map();
50
+ constructor(options = {}) {
51
+ ensureBroadcastChannel();
52
+ this.namespace = options.namespace ?? "loro-repo";
53
+ this.metaChannelName = options.metaChannelName ?? `${this.namespace}-meta`;
98
54
  }
99
- async connect(_options) {
100
- const client = this.ensureClient();
101
- debug("connect requested", { status: client.getStatus() });
102
- try {
103
- await client.connect();
104
- debug("client.connect resolved");
105
- await client.waitConnected();
106
- debug("client.waitConnected resolved", { status: client.getStatus() });
107
- } catch (error) {
108
- debug("connect failed", error);
109
- throw error;
110
- }
55
+ async connect() {
56
+ this.connected = true;
111
57
  }
112
58
  async close() {
113
- debug("close requested", {
114
- docSessions: this.docSessions.size,
115
- metadataSession: Boolean(this.metadataSession)
116
- });
117
- for (const [docId] of this.docSessions) await this.leaveDocSession(docId).catch(() => {});
118
- this.docSessions.clear();
119
- await this.teardownMetadataSession().catch(() => {});
120
- if (this.client) {
121
- const client = this.client;
122
- this.client = void 0;
123
- client.destroy();
124
- debug("websocket client destroyed");
59
+ this.connected = false;
60
+ if (this.metaState) {
61
+ for (const entry of this.metaState.listeners) entry.unsubscribe();
62
+ this.metaState.channel.close();
63
+ this.metaState = void 0;
125
64
  }
126
- debug("close completed");
65
+ for (const [docId] of this.docStates) this.teardownDocChannel(docId);
66
+ this.docStates.clear();
127
67
  }
128
68
  isConnected() {
129
- return this.client?.getStatus() === "connected";
69
+ return this.connected;
130
70
  }
131
- async syncMeta(flock, options) {
132
- if (!this.options.metadataRoomId) {
133
- debug("syncMeta skipped; metadata room not configured");
134
- return { ok: true };
135
- }
136
- debug("syncMeta requested", { roomId: this.options.metadataRoomId });
137
- try {
138
- await withTimeout((await this.ensureMetadataSession(flock, {
139
- roomId: this.options.metadataRoomId,
140
- auth: this.options.metadataAuth
141
- })).firstSynced, options?.timeout);
142
- debug("syncMeta completed", { roomId: this.options.metadataRoomId });
143
- return { ok: true };
144
- } catch (error) {
145
- debug("syncMeta failed", error);
146
- return { ok: false };
147
- }
71
+ async syncMeta(flock, _options) {
72
+ const subscription = this.joinMetaRoom(flock);
73
+ subscription.firstSyncedWithRemote.catch(() => void 0);
74
+ await subscription.firstSyncedWithRemote;
75
+ subscription.unsubscribe();
76
+ return { ok: true };
148
77
  }
149
- joinMetaRoom(flock, params) {
150
- const fallback = this.options.metadataRoomId ?? "";
151
- const roomId = normalizeRoomId(params?.roomId, fallback);
152
- if (!roomId) throw new Error("Metadata room id not configured");
153
- const auth = params?.auth ?? this.options.metadataAuth;
154
- debug("joinMetaRoom requested", {
155
- roomId,
156
- hasAuth: Boolean(auth && auth.length)
78
+ joinMetaRoom(flock, _params) {
79
+ const state = this.ensureMetaChannel();
80
+ const { promise, resolve } = deferred();
81
+ const listener = {
82
+ flock,
83
+ muted: false,
84
+ unsubscribe: flock.subscribe(() => {
85
+ if (listener.muted) return;
86
+ Promise.resolve(flock.exportJson()).then((bundle) => {
87
+ postChannelMessage(state.channel, {
88
+ kind: "meta-export",
89
+ from: this.instanceId,
90
+ bundle
91
+ });
92
+ });
93
+ }),
94
+ resolveFirst: resolve,
95
+ firstSynced: promise
96
+ };
97
+ state.listeners.add(listener);
98
+ postChannelMessage(state.channel, {
99
+ kind: "meta-request",
100
+ from: this.instanceId
157
101
  });
158
- const ensure = this.ensureMetadataSession(flock, {
159
- roomId,
160
- auth
102
+ Promise.resolve(flock.exportJson()).then((bundle) => {
103
+ postChannelMessage(state.channel, {
104
+ kind: "meta-export",
105
+ from: this.instanceId,
106
+ bundle
107
+ });
161
108
  });
162
- const firstSynced = ensure.then((session) => session.firstSynced);
163
- const getConnected = () => this.isConnected();
164
- const subscription = {
109
+ queueMicrotask(() => resolve());
110
+ return {
165
111
  unsubscribe: () => {
166
- ensure.then((session) => {
167
- session.refCount = Math.max(0, session.refCount - 1);
168
- debug("metadata session refCount decremented", {
169
- roomId: session.roomId,
170
- refCount: session.refCount
171
- });
172
- if (session.refCount === 0) {
173
- debug("tearing down metadata session due to refCount=0", { roomId: session.roomId });
174
- this.teardownMetadataSession(session).catch(() => {});
175
- }
176
- });
112
+ listener.unsubscribe();
113
+ state.listeners.delete(listener);
114
+ if (!state.listeners.size) {
115
+ state.channel.removeEventListener("message", state.onMessage);
116
+ state.channel.close();
117
+ this.metaState = void 0;
118
+ }
177
119
  },
178
- firstSyncedWithRemote: firstSynced,
120
+ firstSyncedWithRemote: listener.firstSynced,
179
121
  get connected() {
180
- return getConnected();
122
+ return true;
181
123
  }
182
124
  };
183
- ensure.then((session) => {
184
- session.refCount += 1;
185
- debug("metadata session refCount incremented", {
186
- roomId: session.roomId,
187
- refCount: session.refCount
188
- });
189
- });
190
- return subscription;
191
125
  }
192
- async syncDoc(docId, doc, options) {
193
- debug("syncDoc requested", { docId });
194
- try {
195
- const session = await this.ensureDocSession(docId, doc, {});
196
- await withTimeout(session.firstSynced, options?.timeout);
197
- debug("syncDoc completed", {
198
- docId,
199
- roomId: session.roomId
200
- });
201
- return { ok: true };
202
- } catch (error) {
203
- debug("syncDoc failed", {
204
- docId,
205
- error
206
- });
207
- return { ok: false };
208
- }
126
+ async syncDoc(docId, doc, _options) {
127
+ const subscription = this.joinDocRoom(docId, doc);
128
+ subscription.firstSyncedWithRemote.catch(() => void 0);
129
+ await subscription.firstSyncedWithRemote;
130
+ subscription.unsubscribe();
131
+ return { ok: true };
209
132
  }
210
- joinDocRoom(docId, doc, params) {
211
- debug("joinDocRoom requested", {
133
+ joinDocRoom(docId, doc, _params) {
134
+ const state = this.ensureDocChannel(docId);
135
+ const { promise, resolve } = deferred();
136
+ const listener = {
137
+ doc,
138
+ muted: false,
139
+ unsubscribe: doc.subscribe(() => {
140
+ if (listener.muted) return;
141
+ const payload = doc.export({ mode: "update" });
142
+ postChannelMessage(state.channel, {
143
+ kind: "doc-update",
144
+ docId,
145
+ from: this.instanceId,
146
+ mode: "update",
147
+ payload
148
+ });
149
+ }),
150
+ resolveFirst: resolve,
151
+ firstSynced: promise
152
+ };
153
+ state.listeners.add(listener);
154
+ postChannelMessage(state.channel, {
155
+ kind: "doc-request",
212
156
  docId,
213
- roomParamType: params?.roomId ? typeof params.roomId === "string" ? "string" : "uint8array" : void 0,
214
- hasAuthOverride: Boolean(params?.auth && params.auth.length)
157
+ from: this.instanceId
215
158
  });
216
- const ensure = this.ensureDocSession(docId, doc, params ?? {});
217
- const firstSynced = ensure.then((session) => session.firstSynced);
218
- const getConnected = () => this.isConnected();
219
- const subscription = {
159
+ postChannelMessage(state.channel, {
160
+ kind: "doc-update",
161
+ docId,
162
+ from: this.instanceId,
163
+ mode: "snapshot",
164
+ payload: doc.export({ mode: "snapshot" })
165
+ });
166
+ queueMicrotask(() => resolve());
167
+ return {
220
168
  unsubscribe: () => {
221
- ensure.then((session) => {
222
- session.refCount = Math.max(0, session.refCount - 1);
223
- debug("doc session refCount decremented", {
224
- docId,
225
- roomId: session.roomId,
226
- refCount: session.refCount
227
- });
228
- if (session.refCount === 0) this.leaveDocSession(docId).catch(() => {});
229
- });
169
+ listener.unsubscribe();
170
+ state.listeners.delete(listener);
171
+ if (!state.listeners.size) this.teardownDocChannel(docId);
230
172
  },
231
- firstSyncedWithRemote: firstSynced,
173
+ firstSyncedWithRemote: listener.firstSynced,
232
174
  get connected() {
233
- return getConnected();
175
+ return true;
234
176
  }
235
177
  };
236
- ensure.then((session) => {
237
- session.refCount += 1;
238
- debug("doc session refCount incremented", {
239
- docId,
240
- roomId: session.roomId,
241
- refCount: session.refCount
242
- });
243
- });
244
- return subscription;
245
178
  }
246
- ensureClient() {
247
- if (this.client) {
248
- debug("reusing websocket client", { status: this.client.getStatus() });
249
- return this.client;
250
- }
251
- const { url, client: clientOptions } = this.options;
252
- debug("creating websocket client", {
253
- url,
254
- clientOptionsKeys: clientOptions ? Object.keys(clientOptions) : []
255
- });
256
- const client = new LoroWebsocketClient({
257
- url,
258
- ...clientOptions
259
- });
260
- this.client = client;
261
- return client;
179
+ ensureMetaChannel() {
180
+ if (this.metaState) return this.metaState;
181
+ const channel = new (ensureBroadcastChannel())(this.metaChannelName);
182
+ const listeners = /* @__PURE__ */ new Set();
183
+ const onMessage = (event) => {
184
+ const message = event.data;
185
+ if (!message || message.from === this.instanceId) return;
186
+ if (message.kind === "meta-export") for (const entry of listeners) {
187
+ entry.muted = true;
188
+ entry.flock.importJson(message.bundle);
189
+ entry.muted = false;
190
+ entry.resolveFirst();
191
+ }
192
+ else if (message.kind === "meta-request") {
193
+ const first = listeners.values().next().value;
194
+ if (!first) return;
195
+ Promise.resolve(first.flock.exportJson()).then((bundle) => {
196
+ postChannelMessage(channel, {
197
+ kind: "meta-export",
198
+ from: this.instanceId,
199
+ bundle
200
+ });
201
+ });
202
+ }
203
+ };
204
+ channel.addEventListener("message", onMessage);
205
+ this.metaState = {
206
+ channel,
207
+ listeners,
208
+ onMessage
209
+ };
210
+ return this.metaState;
262
211
  }
263
- async ensureMetadataSession(flock, params) {
264
- debug("ensureMetadataSession invoked", {
265
- roomId: params.roomId,
266
- hasAuth: Boolean(params.auth && params.auth.length)
267
- });
268
- const client = this.ensureClient();
269
- await client.waitConnected();
270
- debug("websocket client ready for metadata session", { status: client.getStatus() });
271
- if (this.metadataSession && this.metadataSession.flock === flock && this.metadataSession.roomId === params.roomId && bytesEqual(this.metadataSession.auth, params.auth)) {
272
- debug("reusing metadata session", {
273
- roomId: this.metadataSession.roomId,
274
- refCount: this.metadataSession.refCount
275
- });
276
- return this.metadataSession;
277
- }
278
- if (this.metadataSession) {
279
- debug("tearing down previous metadata session", { roomId: this.metadataSession.roomId });
280
- await this.teardownMetadataSession(this.metadataSession).catch(() => {});
281
- }
282
- const configuredType = this.options.metadataCrdtType;
283
- if (configuredType && configuredType !== CrdtType.Flock) throw new Error(`metadataCrdtType must be ${CrdtType.Flock} when syncing Flock metadata`);
284
- const adaptor = createRepoFlockAdaptorFromDoc(flock, this.options.metadataAdaptorConfig ?? {});
285
- debug("joining metadata room", {
286
- roomId: params.roomId,
287
- hasAuth: Boolean(params.auth && params.auth.length)
288
- });
289
- const room = await client.join({
290
- roomId: params.roomId,
291
- crdtAdaptor: adaptor,
292
- auth: params.auth
293
- });
294
- const firstSynced = room.waitForReachingServerVersion();
295
- firstSynced.then(() => {
296
- debug("metadata session firstSynced resolved", { roomId: params.roomId });
297
- }, (error) => {
298
- debug("metadata session firstSynced rejected", {
299
- roomId: params.roomId,
300
- error
301
- });
302
- });
303
- const session = {
304
- adaptor,
305
- room,
306
- firstSynced,
307
- flock,
308
- roomId: params.roomId,
309
- auth: params.auth,
310
- refCount: 0
212
+ ensureDocChannel(docId) {
213
+ const existing = this.docStates.get(docId);
214
+ if (existing) return existing;
215
+ const channel = new (ensureBroadcastChannel())(`${this.namespace}-doc-${encodeDocChannelId(docId)}`);
216
+ const listeners = /* @__PURE__ */ new Set();
217
+ const onMessage = (event) => {
218
+ const message = event.data;
219
+ if (!message || message.from === this.instanceId) return;
220
+ if (message.kind === "doc-update") for (const entry of listeners) {
221
+ entry.muted = true;
222
+ entry.doc.import(message.payload);
223
+ entry.muted = false;
224
+ entry.resolveFirst();
225
+ }
226
+ else if (message.kind === "doc-request") {
227
+ const first = listeners.values().next().value;
228
+ if (!first) return;
229
+ const payload = message.docId === docId ? first.doc.export({ mode: "snapshot" }) : void 0;
230
+ if (!payload) return;
231
+ postChannelMessage(channel, {
232
+ kind: "doc-update",
233
+ docId,
234
+ from: this.instanceId,
235
+ mode: "snapshot",
236
+ payload
237
+ });
238
+ }
311
239
  };
312
- this.metadataSession = session;
313
- return session;
240
+ channel.addEventListener("message", onMessage);
241
+ const state = {
242
+ channel,
243
+ listeners,
244
+ onMessage
245
+ };
246
+ this.docStates.set(docId, state);
247
+ return state;
314
248
  }
315
- async teardownMetadataSession(session) {
316
- const target = session ?? this.metadataSession;
317
- if (!target) return;
318
- debug("teardownMetadataSession invoked", { roomId: target.roomId });
319
- if (this.metadataSession === target) this.metadataSession = void 0;
320
- const { adaptor, room } = target;
321
- try {
322
- await room.leave();
323
- debug("metadata room left", { roomId: target.roomId });
324
- } catch (error) {
325
- debug("metadata room leave failed; destroying", {
326
- roomId: target.roomId,
327
- error
328
- });
329
- await room.destroy().catch(() => {});
330
- }
331
- adaptor.destroy();
332
- debug("metadata session destroyed", { roomId: target.roomId });
333
- }
334
- async ensureDocSession(docId, doc, params) {
335
- debug("ensureDocSession invoked", { docId });
336
- const client = this.ensureClient();
337
- await client.waitConnected();
338
- debug("websocket client ready for doc session", {
339
- docId,
340
- status: client.getStatus()
341
- });
342
- const existing = this.docSessions.get(docId);
343
- const derivedRoomId = this.options.docRoomId?.(docId) ?? docId;
344
- const roomId = normalizeRoomId(params.roomId, derivedRoomId);
345
- const auth = params.auth ?? this.options.docAuth?.(docId);
346
- debug("doc session params resolved", {
347
- docId,
348
- roomId,
349
- hasAuth: Boolean(auth && auth.length)
350
- });
351
- if (existing && existing.doc === doc && existing.roomId === roomId) {
352
- debug("reusing doc session", {
353
- docId,
354
- roomId,
355
- refCount: existing.refCount
356
- });
357
- return existing;
358
- }
359
- if (existing) {
360
- debug("doc session mismatch; leaving existing session", {
361
- docId,
362
- previousRoomId: existing.roomId,
363
- nextRoomId: roomId
364
- });
365
- await this.leaveDocSession(docId).catch(() => {});
366
- }
367
- const adaptor = new LoroAdaptor(doc);
368
- debug("joining doc room", {
369
- docId,
370
- roomId,
371
- hasAuth: Boolean(auth && auth.length)
372
- });
373
- const room = await client.join({
374
- roomId,
375
- crdtAdaptor: adaptor,
376
- auth
377
- });
378
- const firstSynced = room.waitForReachingServerVersion();
379
- firstSynced.then(() => {
380
- debug("doc session firstSynced resolved", {
381
- docId,
382
- roomId
383
- });
384
- }, (error) => {
385
- debug("doc session firstSynced rejected", {
386
- docId,
387
- roomId,
388
- error
389
- });
390
- });
391
- const session = {
392
- adaptor,
393
- room,
394
- firstSynced,
395
- doc,
396
- roomId,
397
- refCount: 0
398
- };
399
- this.docSessions.set(docId, session);
400
- return session;
401
- }
402
- async leaveDocSession(docId) {
403
- const session = this.docSessions.get(docId);
404
- if (!session) {
405
- debug("leaveDocSession invoked but no session found", { docId });
406
- return;
407
- }
408
- this.docSessions.delete(docId);
409
- debug("leaving doc session", {
410
- docId,
411
- roomId: session.roomId
412
- });
413
- try {
414
- await session.room.leave();
415
- debug("doc room left", {
416
- docId,
417
- roomId: session.roomId
418
- });
419
- } catch (error) {
420
- debug("doc room leave failed; destroying", {
421
- docId,
422
- roomId: session.roomId,
423
- error
424
- });
425
- await session.room.destroy().catch(() => {});
426
- }
427
- session.adaptor.destroy();
428
- debug("doc session destroyed", {
429
- docId,
430
- roomId: session.roomId
431
- });
249
+ teardownDocChannel(docId) {
250
+ const state = this.docStates.get(docId);
251
+ if (!state) return;
252
+ for (const entry of state.listeners) entry.unsubscribe();
253
+ state.channel.removeEventListener("message", state.onMessage);
254
+ state.channel.close();
255
+ this.docStates.delete(docId);
432
256
  }
433
257
  };
434
258
 
435
259
  //#endregion
436
- //#region src/transport/broadcast-channel.ts
437
- function deferred() {
438
- let resolve;
439
- return {
440
- promise: new Promise((res) => {
441
- resolve = res;
442
- }),
443
- resolve
444
- };
445
- }
446
- function randomInstanceId() {
447
- if (typeof crypto !== "undefined" && typeof crypto.randomUUID === "function") return crypto.randomUUID();
448
- return Math.random().toString(36).slice(2);
449
- }
450
- function ensureBroadcastChannel() {
451
- if (typeof BroadcastChannel === "undefined") throw new Error("BroadcastChannel API is not available in this environment");
452
- return BroadcastChannel;
453
- }
454
- function encodeDocChannelId(docId) {
455
- try {
456
- return encodeURIComponent(docId);
260
+ //#region src/storage/indexeddb.ts
261
+ const DEFAULT_DB_NAME = "loro-repo";
262
+ const DEFAULT_DB_VERSION = 1;
263
+ const DEFAULT_DOC_STORE = "docs";
264
+ const DEFAULT_META_STORE = "meta";
265
+ const DEFAULT_ASSET_STORE = "assets";
266
+ const DEFAULT_DOC_UPDATE_STORE = "doc-updates";
267
+ const DEFAULT_META_KEY = "snapshot";
268
+ const textDecoder$1 = new TextDecoder();
269
+ function describeUnknown(cause) {
270
+ if (typeof cause === "string") return cause;
271
+ if (typeof cause === "number" || typeof cause === "boolean") return String(cause);
272
+ if (typeof cause === "bigint") return cause.toString();
273
+ if (typeof cause === "symbol") return cause.description ?? cause.toString();
274
+ if (typeof cause === "function") return `[function ${cause.name ?? "anonymous"}]`;
275
+ if (cause && typeof cause === "object") try {
276
+ return JSON.stringify(cause);
457
277
  } catch {
458
- return docId.replace(/[^a-z0-9_-]/gi, "_");
278
+ return "[object]";
459
279
  }
280
+ return String(cause);
460
281
  }
461
- function postChannelMessage(channel, message) {
462
- channel.postMessage(message);
463
- }
464
- /**
465
- * TransportAdapter that relies on the BroadcastChannel API to fan out metadata
466
- * and document updates between browser tabs within the same origin.
467
- */
468
- var BroadcastChannelTransportAdapter = class {
469
- instanceId = randomInstanceId();
470
- namespace;
471
- metaChannelName;
472
- connected = false;
473
- metaState;
474
- docStates = /* @__PURE__ */ new Map();
282
+ var IndexedDBStorageAdaptor = class {
283
+ idb;
284
+ dbName;
285
+ version;
286
+ docStore;
287
+ docUpdateStore;
288
+ metaStore;
289
+ assetStore;
290
+ metaKey;
291
+ dbPromise;
292
+ closed = false;
475
293
  constructor(options = {}) {
476
- ensureBroadcastChannel();
477
- this.namespace = options.namespace ?? "loro-repo";
478
- this.metaChannelName = options.metaChannelName ?? `${this.namespace}-meta`;
294
+ const idbFactory = globalThis.indexedDB;
295
+ if (!idbFactory) throw new Error("IndexedDB is not available in this environment");
296
+ this.idb = idbFactory;
297
+ this.dbName = options.dbName ?? DEFAULT_DB_NAME;
298
+ this.version = options.version ?? DEFAULT_DB_VERSION;
299
+ this.docStore = options.docStoreName ?? DEFAULT_DOC_STORE;
300
+ this.docUpdateStore = options.docUpdateStoreName ?? DEFAULT_DOC_UPDATE_STORE;
301
+ this.metaStore = options.metaStoreName ?? DEFAULT_META_STORE;
302
+ this.assetStore = options.assetStoreName ?? DEFAULT_ASSET_STORE;
303
+ this.metaKey = options.metaKey ?? DEFAULT_META_KEY;
479
304
  }
480
- async connect() {
481
- this.connected = true;
305
+ async save(payload) {
306
+ const db = await this.ensureDb();
307
+ switch (payload.type) {
308
+ case "doc-snapshot": {
309
+ const snapshot = payload.snapshot.slice();
310
+ await this.storeMergedSnapshot(db, payload.docId, snapshot);
311
+ break;
312
+ }
313
+ case "doc-update": {
314
+ const update = payload.update.slice();
315
+ await this.appendDocUpdate(db, payload.docId, update);
316
+ break;
317
+ }
318
+ case "asset": {
319
+ const bytes = payload.data.slice();
320
+ await this.putBinary(db, this.assetStore, payload.assetId, bytes);
321
+ break;
322
+ }
323
+ case "meta": {
324
+ const bytes = payload.update.slice();
325
+ await this.putBinary(db, this.metaStore, this.metaKey, bytes);
326
+ break;
327
+ }
328
+ default: throw new Error("Unsupported storage payload type");
329
+ }
482
330
  }
483
- async close() {
484
- this.connected = false;
485
- if (this.metaState) {
486
- for (const entry of this.metaState.listeners) entry.unsubscribe();
487
- this.metaState.channel.close();
488
- this.metaState = void 0;
331
+ async deleteAsset(assetId) {
332
+ const db = await this.ensureDb();
333
+ await this.deleteKey(db, this.assetStore, assetId);
334
+ }
335
+ async loadDoc(docId) {
336
+ const db = await this.ensureDb();
337
+ const snapshot = await this.getBinaryFromDb(db, this.docStore, docId);
338
+ const pendingUpdates = await this.getDocUpdates(db, docId);
339
+ if (!snapshot && pendingUpdates.length === 0) return;
340
+ let doc;
341
+ try {
342
+ doc = snapshot ? LoroDoc.fromSnapshot(snapshot) : new LoroDoc();
343
+ } catch (error) {
344
+ throw this.createError(`Failed to hydrate document snapshot for "${docId}"`, error);
489
345
  }
490
- for (const [docId] of this.docStates) this.teardownDocChannel(docId);
491
- this.docStates.clear();
346
+ let appliedUpdates = false;
347
+ for (const update of pendingUpdates) try {
348
+ doc.import(update);
349
+ appliedUpdates = true;
350
+ } catch (error) {
351
+ throw this.createError(`Failed to apply queued document update for "${docId}"`, error);
352
+ }
353
+ if (appliedUpdates) {
354
+ let consolidated;
355
+ try {
356
+ consolidated = doc.export({ mode: "snapshot" });
357
+ } catch (error) {
358
+ throw this.createError(`Failed to export consolidated snapshot for "${docId}"`, error);
359
+ }
360
+ await this.writeSnapshot(db, docId, consolidated);
361
+ await this.clearDocUpdates(db, docId);
362
+ }
363
+ return doc;
492
364
  }
493
- isConnected() {
494
- return this.connected;
365
+ async loadMeta() {
366
+ const bytes = await this.getBinary(this.metaStore, this.metaKey);
367
+ if (!bytes) return void 0;
368
+ try {
369
+ const json = textDecoder$1.decode(bytes);
370
+ const bundle = JSON.parse(json);
371
+ const flock = new Flock();
372
+ flock.importJson(bundle);
373
+ return flock;
374
+ } catch (error) {
375
+ throw this.createError("Failed to hydrate metadata snapshot", error);
376
+ }
495
377
  }
496
- async syncMeta(flock, _options) {
497
- const subscription = this.joinMetaRoom(flock);
498
- subscription.firstSyncedWithRemote.catch(() => void 0);
499
- await subscription.firstSyncedWithRemote;
500
- subscription.unsubscribe();
501
- return { ok: true };
378
+ async loadAsset(assetId) {
379
+ return await this.getBinary(this.assetStore, assetId) ?? void 0;
502
380
  }
503
- joinMetaRoom(flock, _params) {
504
- const state = this.ensureMetaChannel();
505
- const { promise, resolve } = deferred();
506
- const listener = {
507
- flock,
508
- muted: false,
509
- unsubscribe: flock.subscribe(() => {
510
- if (listener.muted) return;
511
- Promise.resolve(flock.exportJson()).then((bundle) => {
512
- postChannelMessage(state.channel, {
513
- kind: "meta-export",
514
- from: this.instanceId,
515
- bundle
516
- });
517
- });
518
- }),
519
- resolveFirst: resolve,
520
- firstSynced: promise
521
- };
522
- state.listeners.add(listener);
523
- postChannelMessage(state.channel, {
524
- kind: "meta-request",
525
- from: this.instanceId
526
- });
527
- Promise.resolve(flock.exportJson()).then((bundle) => {
528
- postChannelMessage(state.channel, {
529
- kind: "meta-export",
530
- from: this.instanceId,
531
- bundle
381
+ async close() {
382
+ this.closed = true;
383
+ const db = await this.dbPromise;
384
+ if (db) db.close();
385
+ this.dbPromise = void 0;
386
+ }
387
+ async ensureDb() {
388
+ if (this.closed) throw new Error("IndexedDBStorageAdaptor has been closed");
389
+ if (!this.dbPromise) this.dbPromise = new Promise((resolve, reject) => {
390
+ const request = this.idb.open(this.dbName, this.version);
391
+ request.addEventListener("upgradeneeded", () => {
392
+ const db = request.result;
393
+ this.ensureStore(db, this.docStore);
394
+ this.ensureStore(db, this.docUpdateStore);
395
+ this.ensureStore(db, this.metaStore);
396
+ this.ensureStore(db, this.assetStore);
532
397
  });
398
+ request.addEventListener("success", () => resolve(request.result), { once: true });
399
+ request.addEventListener("error", () => {
400
+ reject(this.createError(`Failed to open IndexedDB database "${this.dbName}"`, request.error));
401
+ }, { once: true });
533
402
  });
534
- queueMicrotask(() => resolve());
535
- return {
536
- unsubscribe: () => {
537
- listener.unsubscribe();
538
- state.listeners.delete(listener);
539
- if (!state.listeners.size) {
540
- state.channel.removeEventListener("message", state.onMessage);
541
- state.channel.close();
542
- this.metaState = void 0;
543
- }
544
- },
545
- firstSyncedWithRemote: listener.firstSynced,
546
- get connected() {
547
- return true;
548
- }
549
- };
403
+ return this.dbPromise;
550
404
  }
551
- async syncDoc(docId, doc, _options) {
552
- const subscription = this.joinDocRoom(docId, doc);
553
- subscription.firstSyncedWithRemote.catch(() => void 0);
554
- await subscription.firstSyncedWithRemote;
555
- subscription.unsubscribe();
556
- return { ok: true };
405
+ ensureStore(db, storeName) {
406
+ const names = db.objectStoreNames;
407
+ if (this.storeExists(names, storeName)) return;
408
+ db.createObjectStore(storeName);
557
409
  }
558
- joinDocRoom(docId, doc, _params) {
559
- const state = this.ensureDocChannel(docId);
560
- const { promise, resolve } = deferred();
561
- const listener = {
562
- doc,
563
- muted: false,
564
- unsubscribe: doc.subscribe(() => {
565
- if (listener.muted) return;
566
- const payload = doc.export({ mode: "update" });
567
- postChannelMessage(state.channel, {
568
- kind: "doc-update",
569
- docId,
570
- from: this.instanceId,
571
- mode: "update",
572
- payload
573
- });
574
- }),
575
- resolveFirst: resolve,
576
- firstSynced: promise
577
- };
578
- state.listeners.add(listener);
579
- postChannelMessage(state.channel, {
580
- kind: "doc-request",
581
- docId,
582
- from: this.instanceId
410
+ storeExists(names, storeName) {
411
+ if (typeof names.contains === "function") return names.contains(storeName);
412
+ const length = names.length ?? 0;
413
+ for (let index = 0; index < length; index += 1) if (names.item?.(index) === storeName) return true;
414
+ return false;
415
+ }
416
+ async storeMergedSnapshot(db, docId, incoming) {
417
+ await this.runInTransaction(db, this.docStore, "readwrite", async (store) => {
418
+ const existingRaw = await this.wrapRequest(store.get(docId), "read");
419
+ const existing = await this.normalizeBinary(existingRaw);
420
+ const merged = this.mergeSnapshots(docId, existing, incoming);
421
+ await this.wrapRequest(store.put(merged, docId), "write");
583
422
  });
584
- postChannelMessage(state.channel, {
585
- kind: "doc-update",
586
- docId,
587
- from: this.instanceId,
588
- mode: "snapshot",
589
- payload: doc.export({ mode: "snapshot" })
423
+ }
424
+ mergeSnapshots(docId, existing, incoming) {
425
+ try {
426
+ const doc = existing ? LoroDoc.fromSnapshot(existing) : new LoroDoc();
427
+ doc.import(incoming);
428
+ return doc.export({ mode: "snapshot" });
429
+ } catch (error) {
430
+ throw this.createError(`Failed to merge snapshot for "${docId}"`, error);
431
+ }
432
+ }
433
+ async appendDocUpdate(db, docId, update) {
434
+ await this.runInTransaction(db, this.docUpdateStore, "readwrite", async (store) => {
435
+ const raw = await this.wrapRequest(store.get(docId), "read");
436
+ const queue = await this.normalizeUpdateQueue(raw);
437
+ queue.push(update.slice());
438
+ await this.wrapRequest(store.put({ updates: queue }, docId), "write");
590
439
  });
591
- queueMicrotask(() => resolve());
592
- return {
593
- unsubscribe: () => {
594
- listener.unsubscribe();
595
- state.listeners.delete(listener);
596
- if (!state.listeners.size) this.teardownDocChannel(docId);
597
- },
598
- firstSyncedWithRemote: listener.firstSynced,
599
- get connected() {
600
- return true;
601
- }
602
- };
603
440
  }
604
- ensureMetaChannel() {
605
- if (this.metaState) return this.metaState;
606
- const channel = new (ensureBroadcastChannel())(this.metaChannelName);
607
- const listeners = /* @__PURE__ */ new Set();
608
- const onMessage = (event) => {
609
- const message = event.data;
610
- if (!message || message.from === this.instanceId) return;
611
- if (message.kind === "meta-export") for (const entry of listeners) {
612
- entry.muted = true;
613
- entry.flock.importJson(message.bundle);
614
- entry.muted = false;
615
- entry.resolveFirst();
616
- }
617
- else if (message.kind === "meta-request") {
618
- const first = listeners.values().next().value;
619
- if (!first) return;
620
- Promise.resolve(first.flock.exportJson()).then((bundle) => {
621
- postChannelMessage(channel, {
622
- kind: "meta-export",
623
- from: this.instanceId,
624
- bundle
625
- });
626
- });
627
- }
628
- };
629
- channel.addEventListener("message", onMessage);
630
- this.metaState = {
631
- channel,
632
- listeners,
633
- onMessage
634
- };
635
- return this.metaState;
441
+ async getDocUpdates(db, docId) {
442
+ const raw = await this.runInTransaction(db, this.docUpdateStore, "readonly", (store) => this.wrapRequest(store.get(docId), "read"));
443
+ return this.normalizeUpdateQueue(raw);
636
444
  }
637
- ensureDocChannel(docId) {
638
- const existing = this.docStates.get(docId);
639
- if (existing) return existing;
640
- const channel = new (ensureBroadcastChannel())(`${this.namespace}-doc-${encodeDocChannelId(docId)}`);
641
- const listeners = /* @__PURE__ */ new Set();
642
- const onMessage = (event) => {
643
- const message = event.data;
644
- if (!message || message.from === this.instanceId) return;
645
- if (message.kind === "doc-update") for (const entry of listeners) {
646
- entry.muted = true;
647
- entry.doc.import(message.payload);
648
- entry.muted = false;
649
- entry.resolveFirst();
650
- }
651
- else if (message.kind === "doc-request") {
652
- const first = listeners.values().next().value;
653
- if (!first) return;
654
- const payload = message.docId === docId ? first.doc.export({ mode: "snapshot" }) : void 0;
655
- if (!payload) return;
656
- postChannelMessage(channel, {
657
- kind: "doc-update",
658
- docId,
659
- from: this.instanceId,
660
- mode: "snapshot",
661
- payload
662
- });
663
- }
664
- };
665
- channel.addEventListener("message", onMessage);
666
- const state = {
667
- channel,
668
- listeners,
669
- onMessage
670
- };
671
- this.docStates.set(docId, state);
672
- return state;
445
+ async clearDocUpdates(db, docId) {
446
+ await this.runInTransaction(db, this.docUpdateStore, "readwrite", (store) => this.wrapRequest(store.delete(docId), "delete"));
673
447
  }
674
- teardownDocChannel(docId) {
675
- const state = this.docStates.get(docId);
676
- if (!state) return;
677
- for (const entry of state.listeners) entry.unsubscribe();
678
- state.channel.removeEventListener("message", state.onMessage);
679
- state.channel.close();
680
- this.docStates.delete(docId);
448
+ async writeSnapshot(db, docId, snapshot) {
449
+ await this.putBinary(db, this.docStore, docId, snapshot.slice());
681
450
  }
682
- };
683
-
684
- //#endregion
685
- //#region src/storage/indexeddb.ts
686
- const DEFAULT_DB_NAME = "loro-repo";
687
- const DEFAULT_DB_VERSION = 1;
688
- const DEFAULT_DOC_STORE = "docs";
689
- const DEFAULT_META_STORE = "meta";
690
- const DEFAULT_ASSET_STORE = "assets";
691
- const DEFAULT_DOC_UPDATE_STORE = "doc-updates";
692
- const DEFAULT_META_KEY = "snapshot";
693
- const textDecoder$1 = new TextDecoder();
694
- function describeUnknown(cause) {
695
- if (typeof cause === "string") return cause;
696
- if (typeof cause === "number" || typeof cause === "boolean") return String(cause);
697
- if (typeof cause === "bigint") return cause.toString();
698
- if (typeof cause === "symbol") return cause.description ?? cause.toString();
699
- if (typeof cause === "function") return `[function ${cause.name ?? "anonymous"}]`;
700
- if (cause && typeof cause === "object") try {
701
- return JSON.stringify(cause);
702
- } catch {
703
- return "[object]";
704
- }
705
- return String(cause);
706
- }
707
- var IndexedDBStorageAdaptor = class {
708
- idb;
709
- dbName;
710
- version;
711
- docStore;
712
- docUpdateStore;
713
- metaStore;
714
- assetStore;
715
- metaKey;
716
- dbPromise;
717
- closed = false;
718
- constructor(options = {}) {
719
- const idbFactory = globalThis.indexedDB;
720
- if (!idbFactory) throw new Error("IndexedDB is not available in this environment");
721
- this.idb = idbFactory;
722
- this.dbName = options.dbName ?? DEFAULT_DB_NAME;
723
- this.version = options.version ?? DEFAULT_DB_VERSION;
724
- this.docStore = options.docStoreName ?? DEFAULT_DOC_STORE;
725
- this.docUpdateStore = options.docUpdateStoreName ?? DEFAULT_DOC_UPDATE_STORE;
726
- this.metaStore = options.metaStoreName ?? DEFAULT_META_STORE;
727
- this.assetStore = options.assetStoreName ?? DEFAULT_ASSET_STORE;
728
- this.metaKey = options.metaKey ?? DEFAULT_META_KEY;
729
- }
730
- async save(payload) {
731
- const db = await this.ensureDb();
732
- switch (payload.type) {
733
- case "doc-snapshot": {
734
- const snapshot = payload.snapshot.slice();
735
- await this.storeMergedSnapshot(db, payload.docId, snapshot);
736
- break;
737
- }
738
- case "doc-update": {
739
- const update = payload.update.slice();
740
- await this.appendDocUpdate(db, payload.docId, update);
741
- break;
742
- }
743
- case "asset": {
744
- const bytes = payload.data.slice();
745
- await this.putBinary(db, this.assetStore, payload.assetId, bytes);
746
- break;
747
- }
748
- case "meta": {
749
- const bytes = payload.update.slice();
750
- await this.putBinary(db, this.metaStore, this.metaKey, bytes);
751
- break;
752
- }
753
- default: throw new Error("Unsupported storage payload type");
754
- }
755
- }
756
- async deleteAsset(assetId) {
757
- const db = await this.ensureDb();
758
- await this.deleteKey(db, this.assetStore, assetId);
759
- }
760
- async loadDoc(docId) {
761
- const db = await this.ensureDb();
762
- const snapshot = await this.getBinaryFromDb(db, this.docStore, docId);
763
- const pendingUpdates = await this.getDocUpdates(db, docId);
764
- if (!snapshot && pendingUpdates.length === 0) return;
765
- let doc;
766
- try {
767
- doc = snapshot ? LoroDoc.fromSnapshot(snapshot) : new LoroDoc();
768
- } catch (error) {
769
- throw this.createError(`Failed to hydrate document snapshot for "${docId}"`, error);
770
- }
771
- let appliedUpdates = false;
772
- for (const update of pendingUpdates) try {
773
- doc.import(update);
774
- appliedUpdates = true;
775
- } catch (error) {
776
- throw this.createError(`Failed to apply queued document update for "${docId}"`, error);
777
- }
778
- if (appliedUpdates) {
779
- let consolidated;
780
- try {
781
- consolidated = doc.export({ mode: "snapshot" });
782
- } catch (error) {
783
- throw this.createError(`Failed to export consolidated snapshot for "${docId}"`, error);
784
- }
785
- await this.writeSnapshot(db, docId, consolidated);
786
- await this.clearDocUpdates(db, docId);
787
- }
788
- return doc;
789
- }
790
- async loadMeta() {
791
- const bytes = await this.getBinary(this.metaStore, this.metaKey);
792
- if (!bytes) return void 0;
793
- try {
794
- const json = textDecoder$1.decode(bytes);
795
- const bundle = JSON.parse(json);
796
- const flock = new Flock();
797
- flock.importJson(bundle);
798
- return flock;
799
- } catch (error) {
800
- throw this.createError("Failed to hydrate metadata snapshot", error);
801
- }
802
- }
803
- async loadAsset(assetId) {
804
- return await this.getBinary(this.assetStore, assetId) ?? void 0;
805
- }
806
- async close() {
807
- this.closed = true;
808
- const db = await this.dbPromise;
809
- if (db) db.close();
810
- this.dbPromise = void 0;
811
- }
812
- async ensureDb() {
813
- if (this.closed) throw new Error("IndexedDBStorageAdaptor has been closed");
814
- if (!this.dbPromise) this.dbPromise = new Promise((resolve, reject) => {
815
- const request = this.idb.open(this.dbName, this.version);
816
- request.addEventListener("upgradeneeded", () => {
817
- const db = request.result;
818
- this.ensureStore(db, this.docStore);
819
- this.ensureStore(db, this.docUpdateStore);
820
- this.ensureStore(db, this.metaStore);
821
- this.ensureStore(db, this.assetStore);
822
- });
823
- request.addEventListener("success", () => resolve(request.result), { once: true });
824
- request.addEventListener("error", () => {
825
- reject(this.createError(`Failed to open IndexedDB database "${this.dbName}"`, request.error));
826
- }, { once: true });
827
- });
828
- return this.dbPromise;
829
- }
830
- ensureStore(db, storeName) {
831
- const names = db.objectStoreNames;
832
- if (this.storeExists(names, storeName)) return;
833
- db.createObjectStore(storeName);
834
- }
835
- storeExists(names, storeName) {
836
- if (typeof names.contains === "function") return names.contains(storeName);
837
- const length = names.length ?? 0;
838
- for (let index = 0; index < length; index += 1) if (names.item?.(index) === storeName) return true;
839
- return false;
840
- }
841
- async storeMergedSnapshot(db, docId, incoming) {
842
- await this.runInTransaction(db, this.docStore, "readwrite", async (store) => {
843
- const existingRaw = await this.wrapRequest(store.get(docId), "read");
844
- const existing = await this.normalizeBinary(existingRaw);
845
- const merged = this.mergeSnapshots(docId, existing, incoming);
846
- await this.wrapRequest(store.put(merged, docId), "write");
847
- });
848
- }
849
- mergeSnapshots(docId, existing, incoming) {
850
- try {
851
- const doc = existing ? LoroDoc.fromSnapshot(existing) : new LoroDoc();
852
- doc.import(incoming);
853
- return doc.export({ mode: "snapshot" });
854
- } catch (error) {
855
- throw this.createError(`Failed to merge snapshot for "${docId}"`, error);
856
- }
857
- }
858
- async appendDocUpdate(db, docId, update) {
859
- await this.runInTransaction(db, this.docUpdateStore, "readwrite", async (store) => {
860
- const raw = await this.wrapRequest(store.get(docId), "read");
861
- const queue = await this.normalizeUpdateQueue(raw);
862
- queue.push(update.slice());
863
- await this.wrapRequest(store.put({ updates: queue }, docId), "write");
864
- });
865
- }
866
- async getDocUpdates(db, docId) {
867
- const raw = await this.runInTransaction(db, this.docUpdateStore, "readonly", (store) => this.wrapRequest(store.get(docId), "read"));
868
- return this.normalizeUpdateQueue(raw);
869
- }
870
- async clearDocUpdates(db, docId) {
871
- await this.runInTransaction(db, this.docUpdateStore, "readwrite", (store) => this.wrapRequest(store.delete(docId), "delete"));
872
- }
873
- async writeSnapshot(db, docId, snapshot) {
874
- await this.putBinary(db, this.docStore, docId, snapshot.slice());
875
- }
876
- async getBinaryFromDb(db, storeName, key) {
877
- const value = await this.runInTransaction(db, storeName, "readonly", (store) => this.wrapRequest(store.get(key), "read"));
878
- return this.normalizeBinary(value);
451
+ async getBinaryFromDb(db, storeName, key) {
452
+ const value = await this.runInTransaction(db, storeName, "readonly", (store) => this.wrapRequest(store.get(key), "read"));
453
+ return this.normalizeBinary(value);
879
454
  }
880
455
  async normalizeUpdateQueue(value) {
881
456
  if (value == null) return [];
@@ -1085,241 +660,476 @@ async function writeFileAtomic(targetPath, data) {
1085
660
  }
1086
661
 
1087
662
  //#endregion
1088
- //#region src/internal/event-bus.ts
1089
- var RepoEventBus = class {
1090
- watchers = /* @__PURE__ */ new Set();
1091
- eventByStack = [];
1092
- watch(listener, filter = {}) {
1093
- const entry = {
1094
- listener,
1095
- filter
1096
- };
1097
- this.watchers.add(entry);
1098
- return { unsubscribe: () => {
1099
- this.watchers.delete(entry);
1100
- } };
1101
- }
1102
- emit(event) {
1103
- for (const entry of this.watchers) if (this.shouldNotify(entry.filter, event)) entry.listener(event);
1104
- }
1105
- clear() {
1106
- this.watchers.clear();
1107
- this.eventByStack.length = 0;
1108
- }
1109
- pushEventBy(by) {
1110
- this.eventByStack.push(by);
1111
- }
1112
- popEventBy() {
1113
- this.eventByStack.pop();
1114
- }
1115
- resolveEventBy(defaultBy) {
1116
- const index = this.eventByStack.length - 1;
1117
- return index >= 0 ? this.eventByStack[index] : defaultBy;
1118
- }
1119
- shouldNotify(filter, event) {
1120
- if (!filter.docIds && !filter.kinds && !filter.metadataFields && !filter.by) return true;
1121
- if (filter.kinds && !filter.kinds.includes(event.kind)) return false;
1122
- if (filter.by && !filter.by.includes(event.by)) return false;
1123
- const docId = (() => {
1124
- if (event.kind === "doc-metadata" || event.kind === "doc-frontiers") return event.docId;
1125
- if (event.kind === "asset-link" || event.kind === "asset-unlink") return event.docId;
1126
- })();
1127
- if (filter.docIds && docId && !filter.docIds.includes(docId)) return false;
1128
- if (filter.docIds && !docId) return false;
1129
- if (filter.metadataFields && event.kind === "doc-metadata") {
1130
- if (!Object.keys(event.patch).some((key) => filter.metadataFields?.includes(key))) return false;
663
+ //#region src/internal/debug.ts
664
+ const getEnv = () => {
665
+ if (typeof globalThis !== "object" || globalThis === null) return;
666
+ return globalThis.process?.env;
667
+ };
668
+ const rawNamespaceConfig = (getEnv()?.LORO_REPO_DEBUG ?? "").trim();
669
+ const normalizedNamespaces = rawNamespaceConfig.length > 0 ? rawNamespaceConfig.split(/[\s,]+/).map((token) => token.toLowerCase()).filter(Boolean) : [];
670
+ const wildcardTokens = new Set([
671
+ "*",
672
+ "1",
673
+ "true",
674
+ "all"
675
+ ]);
676
+ const namespaceSet = new Set(normalizedNamespaces);
677
+ const hasWildcard = namespaceSet.size > 0 && normalizedNamespaces.some((token) => wildcardTokens.has(token));
678
+ const isDebugEnabled = (namespace) => {
679
+ if (!namespaceSet.size) return false;
680
+ if (!namespace) return hasWildcard;
681
+ const normalized = namespace.toLowerCase();
682
+ if (hasWildcard) return true;
683
+ if (namespaceSet.has(normalized)) return true;
684
+ const [root] = normalized.split(":");
685
+ return namespaceSet.has(root);
686
+ };
687
+ const createDebugLogger = (namespace) => {
688
+ const normalized = namespace.toLowerCase();
689
+ return (...args) => {
690
+ if (!isDebugEnabled(normalized)) return;
691
+ const prefix = `[loro-repo:${namespace}]`;
692
+ if (args.length === 0) {
693
+ console.info(prefix);
694
+ return;
1131
695
  }
1132
- return true;
1133
- }
696
+ console.info(prefix, ...args);
697
+ };
1134
698
  };
1135
699
 
1136
700
  //#endregion
1137
- //#region src/utils.ts
1138
- async function streamToUint8Array(stream) {
1139
- const reader = stream.getReader();
1140
- const chunks = [];
1141
- let total = 0;
1142
- while (true) {
1143
- const { done, value } = await reader.read();
1144
- if (done) break;
1145
- if (value) {
1146
- chunks.push(value);
1147
- total += value.byteLength;
1148
- }
1149
- }
1150
- const buffer = new Uint8Array(total);
1151
- let offset = 0;
1152
- for (const chunk of chunks) {
1153
- buffer.set(chunk, offset);
1154
- offset += chunk.byteLength;
1155
- }
1156
- return buffer;
1157
- }
1158
- async function assetContentToUint8Array(content) {
1159
- if (content instanceof Uint8Array) return content;
1160
- if (ArrayBuffer.isView(content)) return new Uint8Array(content.buffer.slice(content.byteOffset, content.byteOffset + content.byteLength));
1161
- if (typeof Blob !== "undefined" && content instanceof Blob) return new Uint8Array(await content.arrayBuffer());
1162
- if (typeof ReadableStream !== "undefined" && content instanceof ReadableStream) return streamToUint8Array(content);
1163
- throw new TypeError("Unsupported asset content type");
1164
- }
1165
- function bytesToHex$1(bytes) {
1166
- return Array.from(bytes, (byte) => byte.toString(16).padStart(2, "0")).join("");
701
+ //#region src/transport/websocket.ts
702
+ const debug = createDebugLogger("transport:websocket");
703
+ function withTimeout(promise, timeoutMs) {
704
+ if (!timeoutMs || timeoutMs <= 0) return promise;
705
+ return new Promise((resolve, reject) => {
706
+ const timer = setTimeout(() => {
707
+ reject(/* @__PURE__ */ new Error(`Operation timed out after ${timeoutMs}ms`));
708
+ }, timeoutMs);
709
+ promise.then((value) => {
710
+ clearTimeout(timer);
711
+ resolve(value);
712
+ }).catch((error) => {
713
+ clearTimeout(timer);
714
+ reject(error);
715
+ });
716
+ });
1167
717
  }
1168
- async function computeSha256(bytes) {
1169
- const globalCrypto = globalThis.crypto;
1170
- if (globalCrypto?.subtle && typeof globalCrypto.subtle.digest === "function") {
1171
- const digest = await globalCrypto.subtle.digest("SHA-256", bytes);
1172
- return bytesToHex$1(new Uint8Array(digest));
1173
- }
1174
- try {
1175
- const { createHash } = await import("node:crypto");
1176
- const hash = createHash("sha256");
1177
- hash.update(bytes);
1178
- return hash.digest("hex");
718
+ function normalizeRoomId(roomId, fallback) {
719
+ if (typeof roomId === "string" && roomId.length > 0) return roomId;
720
+ if (roomId instanceof Uint8Array && roomId.length > 0) try {
721
+ return bytesToHex(roomId);
1179
722
  } catch {
1180
- throw new Error("SHA-256 digest is not available in this environment");
723
+ return fallback;
1181
724
  }
725
+ return fallback;
1182
726
  }
1183
- function cloneJsonValue(value) {
1184
- if (value === null) return null;
1185
- if (typeof value === "string" || typeof value === "boolean") return value;
1186
- if (typeof value === "number") return Number.isFinite(value) ? value : void 0;
1187
- if (Array.isArray(value)) {
1188
- const arr = [];
1189
- for (const entry of value) {
1190
- const cloned = cloneJsonValue(entry);
1191
- if (cloned !== void 0) arr.push(cloned);
727
+ function bytesEqual(a, b) {
728
+ if (a === b) return true;
729
+ if (!a || !b) return false;
730
+ if (a.length !== b.length) return false;
731
+ for (let i = 0; i < a.length; i += 1) if (a[i] !== b[i]) return false;
732
+ return true;
733
+ }
734
+ /**
735
+ * loro-websocket backed {@link TransportAdapter} implementation for LoroRepo.
736
+ * It uses loro-protocol as the underlying protocol for the transport.
737
+ */
738
+ var WebSocketTransportAdapter = class {
739
+ options;
740
+ client;
741
+ metadataSession;
742
+ docSessions = /* @__PURE__ */ new Map();
743
+ constructor(options) {
744
+ this.options = options;
745
+ }
746
+ async connect(_options) {
747
+ const client = this.ensureClient();
748
+ debug("connect requested", { status: client.getStatus() });
749
+ try {
750
+ await client.connect();
751
+ debug("client.connect resolved");
752
+ await client.waitConnected();
753
+ debug("client.waitConnected resolved", { status: client.getStatus() });
754
+ } catch (error) {
755
+ debug("connect failed", error);
756
+ throw error;
1192
757
  }
1193
- return arr;
1194
758
  }
1195
- if (value && typeof value === "object") {
1196
- const input = value;
1197
- const obj = {};
1198
- for (const [key, entry] of Object.entries(input)) {
1199
- const cloned = cloneJsonValue(entry);
1200
- if (cloned !== void 0) obj[key] = cloned;
759
+ async close() {
760
+ debug("close requested", {
761
+ docSessions: this.docSessions.size,
762
+ metadataSession: Boolean(this.metadataSession)
763
+ });
764
+ for (const [docId] of this.docSessions) await this.leaveDocSession(docId).catch(() => {});
765
+ this.docSessions.clear();
766
+ await this.teardownMetadataSession().catch(() => {});
767
+ if (this.client) {
768
+ const client = this.client;
769
+ this.client = void 0;
770
+ client.destroy();
771
+ debug("websocket client destroyed");
1201
772
  }
1202
- return obj;
773
+ debug("close completed");
1203
774
  }
1204
- }
1205
- function cloneJsonObject(value) {
1206
- const cloned = cloneJsonValue(value);
1207
- if (cloned && typeof cloned === "object" && !Array.isArray(cloned)) return cloned;
1208
- return {};
1209
- }
1210
- function asJsonObject(value) {
1211
- const cloned = cloneJsonValue(value);
1212
- if (cloned && typeof cloned === "object" && !Array.isArray(cloned)) return cloned;
1213
- }
1214
- function isJsonObjectValue(value) {
1215
- return Boolean(value && typeof value === "object" && !Array.isArray(value));
1216
- }
1217
- function stableStringify(value) {
1218
- if (value === null) return "null";
1219
- if (typeof value === "string") return JSON.stringify(value);
1220
- if (typeof value === "number" || typeof value === "boolean") return JSON.stringify(value);
1221
- if (Array.isArray(value)) return `[${value.map(stableStringify).join(",")}]`;
1222
- if (!isJsonObjectValue(value)) return "null";
1223
- return `{${Object.keys(value).sort().map((key) => `${JSON.stringify(key)}:${stableStringify(value[key])}`).join(",")}}`;
1224
- }
1225
- function jsonEquals(a, b) {
1226
- if (a === void 0 && b === void 0) return true;
1227
- if (a === void 0 || b === void 0) return false;
1228
- return stableStringify(a) === stableStringify(b);
1229
- }
1230
- function diffJsonObjects(previous, next) {
1231
- const patch = {};
1232
- const keys = /* @__PURE__ */ new Set();
1233
- if (previous) for (const key of Object.keys(previous)) keys.add(key);
1234
- for (const key of Object.keys(next)) keys.add(key);
1235
- for (const key of keys) {
1236
- const prevValue = previous ? previous[key] : void 0;
1237
- const nextValue = next[key];
1238
- if (!jsonEquals(prevValue, nextValue)) {
1239
- if (nextValue === void 0 && previous && key in previous) {
1240
- patch[key] = null;
1241
- continue;
775
+ isConnected() {
776
+ return this.client?.getStatus() === "connected";
777
+ }
778
+ async syncMeta(flock, options) {
779
+ debug("syncMeta requested", { roomId: this.options.metadataRoomId });
780
+ try {
781
+ let auth;
782
+ if (this.options.metadataAuth) if (typeof this.options.metadataAuth === "function") auth = await this.options.metadataAuth();
783
+ else auth = this.options.metadataAuth;
784
+ await withTimeout((await this.ensureMetadataSession(flock, {
785
+ roomId: this.options.metadataRoomId ?? "repo:meta",
786
+ auth
787
+ })).firstSynced, options?.timeout);
788
+ debug("syncMeta completed", { roomId: this.options.metadataRoomId });
789
+ return { ok: true };
790
+ } catch (error) {
791
+ debug("syncMeta failed", error);
792
+ return { ok: false };
793
+ }
794
+ }
795
+ joinMetaRoom(flock, params) {
796
+ const fallback = this.options.metadataRoomId ?? "";
797
+ const roomId = normalizeRoomId(params?.roomId, fallback);
798
+ if (!roomId) throw new Error("Metadata room id not configured");
799
+ const session = (async () => {
800
+ let auth;
801
+ const authWay = params?.auth ?? this.options.metadataAuth;
802
+ if (typeof authWay === "function") auth = await authWay();
803
+ else auth = authWay;
804
+ debug("joinMetaRoom requested", {
805
+ roomId,
806
+ hasAuth: Boolean(auth && auth.length)
807
+ });
808
+ return this.ensureMetadataSession(flock, {
809
+ roomId,
810
+ auth
811
+ });
812
+ })();
813
+ const firstSynced = session.then((session$1) => session$1.firstSynced);
814
+ const getConnected = () => this.isConnected();
815
+ const subscription = {
816
+ unsubscribe: () => {
817
+ session.then((session$1) => {
818
+ session$1.refCount = Math.max(0, session$1.refCount - 1);
819
+ debug("metadata session refCount decremented", {
820
+ roomId: session$1.roomId,
821
+ refCount: session$1.refCount
822
+ });
823
+ if (session$1.refCount === 0) {
824
+ debug("tearing down metadata session due to refCount=0", { roomId: session$1.roomId });
825
+ this.teardownMetadataSession(session$1).catch(() => {});
826
+ }
827
+ });
828
+ },
829
+ firstSyncedWithRemote: firstSynced,
830
+ get connected() {
831
+ return getConnected();
1242
832
  }
1243
- const cloned = cloneJsonValue(nextValue);
1244
- if (cloned !== void 0) patch[key] = cloned;
833
+ };
834
+ session.then((session$1) => {
835
+ session$1.refCount += 1;
836
+ debug("metadata session refCount incremented", {
837
+ roomId: session$1.roomId,
838
+ refCount: session$1.refCount
839
+ });
840
+ });
841
+ return subscription;
842
+ }
843
+ async syncDoc(docId, doc, options) {
844
+ debug("syncDoc requested", { docId });
845
+ try {
846
+ const session = await this.ensureDocSession(docId, doc, {});
847
+ await withTimeout(session.firstSynced, options?.timeout);
848
+ debug("syncDoc completed", {
849
+ docId,
850
+ roomId: session.roomId
851
+ });
852
+ return { ok: true };
853
+ } catch (error) {
854
+ debug("syncDoc failed", {
855
+ docId,
856
+ error
857
+ });
858
+ return { ok: false };
1245
859
  }
1246
860
  }
1247
- return patch;
1248
- }
1249
- function assetMetaToJson(meta) {
1250
- const json = {
1251
- assetId: meta.assetId,
1252
- size: meta.size,
1253
- createdAt: meta.createdAt
1254
- };
1255
- if (meta.mime !== void 0) json.mime = meta.mime;
1256
- if (meta.policy !== void 0) json.policy = meta.policy;
1257
- if (meta.tag !== void 0) json.tag = meta.tag;
1258
- return json;
1259
- }
1260
- function assetMetaFromJson(value) {
1261
- const obj = asJsonObject(value);
1262
- if (!obj) return void 0;
1263
- const assetId = typeof obj.assetId === "string" ? obj.assetId : void 0;
1264
- if (!assetId) return void 0;
1265
- const size = typeof obj.size === "number" ? obj.size : void 0;
1266
- const createdAt = typeof obj.createdAt === "number" ? obj.createdAt : void 0;
1267
- if (size === void 0 || createdAt === void 0) return void 0;
1268
- return {
1269
- assetId,
1270
- size,
1271
- createdAt,
1272
- ...typeof obj.mime === "string" ? { mime: obj.mime } : {},
1273
- ...typeof obj.policy === "string" ? { policy: obj.policy } : {},
1274
- ...typeof obj.tag === "string" ? { tag: obj.tag } : {}
1275
- };
1276
- }
1277
- function assetMetadataEqual(a, b) {
1278
- if (!a && !b) return true;
1279
- if (!a || !b) return false;
1280
- return stableStringify(assetMetaToJson(a)) === stableStringify(assetMetaToJson(b));
1281
- }
1282
- function cloneRepoAssetMetadata(meta) {
1283
- return {
1284
- assetId: meta.assetId,
1285
- size: meta.size,
1286
- createdAt: meta.createdAt,
1287
- ...meta.mime !== void 0 ? { mime: meta.mime } : {},
1288
- ...meta.policy !== void 0 ? { policy: meta.policy } : {},
1289
- ...meta.tag !== void 0 ? { tag: meta.tag } : {}
1290
- };
1291
- }
1292
- function toReadableStream(bytes) {
1293
- return new ReadableStream({ start(controller) {
1294
- controller.enqueue(bytes);
1295
- controller.close();
1296
- } });
1297
- }
1298
- function canonicalizeFrontiers(frontiers) {
1299
- const json = [...frontiers].sort((a, b) => {
1300
- if (a.peer < b.peer) return -1;
1301
- if (a.peer > b.peer) return 1;
1302
- return a.counter - b.counter;
1303
- }).map((f) => ({
1304
- peer: f.peer,
1305
- counter: f.counter
1306
- }));
1307
- return {
1308
- json,
1309
- key: stableStringify(json)
1310
- };
1311
- }
1312
- function includesFrontiers(vv, frontiers) {
1313
- for (const { peer, counter } of frontiers) if ((vv.get(peer) ?? 0) <= counter) return false;
1314
- return true;
1315
- }
1316
- function matchesQuery(docId, _metadata, query) {
1317
- if (!query) return true;
1318
- if (query.prefix && !docId.startsWith(query.prefix)) return false;
1319
- if (query.start && docId < query.start) return false;
1320
- if (query.end && docId > query.end) return false;
1321
- return true;
1322
- }
861
+ joinDocRoom(docId, doc, params) {
862
+ debug("joinDocRoom requested", {
863
+ docId,
864
+ roomParamType: params?.roomId ? typeof params.roomId === "string" ? "string" : "uint8array" : void 0,
865
+ hasAuthOverride: Boolean(params?.auth && params.auth.length)
866
+ });
867
+ const ensure = this.ensureDocSession(docId, doc, params ?? {});
868
+ const firstSynced = ensure.then((session) => session.firstSynced);
869
+ const getConnected = () => this.isConnected();
870
+ const subscription = {
871
+ unsubscribe: () => {
872
+ ensure.then((session) => {
873
+ session.refCount = Math.max(0, session.refCount - 1);
874
+ debug("doc session refCount decremented", {
875
+ docId,
876
+ roomId: session.roomId,
877
+ refCount: session.refCount
878
+ });
879
+ if (session.refCount === 0) this.leaveDocSession(docId).catch(() => {});
880
+ });
881
+ },
882
+ firstSyncedWithRemote: firstSynced,
883
+ get connected() {
884
+ return getConnected();
885
+ }
886
+ };
887
+ ensure.then((session) => {
888
+ session.refCount += 1;
889
+ debug("doc session refCount incremented", {
890
+ docId,
891
+ roomId: session.roomId,
892
+ refCount: session.refCount
893
+ });
894
+ });
895
+ return subscription;
896
+ }
897
+ ensureClient() {
898
+ if (this.client) {
899
+ debug("reusing websocket client", { status: this.client.getStatus() });
900
+ return this.client;
901
+ }
902
+ const { url, client: clientOptions } = this.options;
903
+ debug("creating websocket client", {
904
+ url,
905
+ clientOptionsKeys: clientOptions ? Object.keys(clientOptions) : []
906
+ });
907
+ const client = new LoroWebsocketClient({
908
+ url,
909
+ ...clientOptions
910
+ });
911
+ this.client = client;
912
+ return client;
913
+ }
914
+ async ensureMetadataSession(flock, params) {
915
+ debug("ensureMetadataSession invoked", {
916
+ roomId: params.roomId,
917
+ hasAuth: Boolean(params.auth && params.auth.length)
918
+ });
919
+ const client = this.ensureClient();
920
+ await client.waitConnected();
921
+ debug("websocket client ready for metadata session", { status: client.getStatus() });
922
+ if (this.metadataSession && this.metadataSession.flock === flock && this.metadataSession.roomId === params.roomId && bytesEqual(this.metadataSession.auth, params.auth)) {
923
+ debug("reusing metadata session", {
924
+ roomId: this.metadataSession.roomId,
925
+ refCount: this.metadataSession.refCount
926
+ });
927
+ return this.metadataSession;
928
+ }
929
+ if (this.metadataSession) {
930
+ debug("tearing down previous metadata session", { roomId: this.metadataSession.roomId });
931
+ await this.teardownMetadataSession(this.metadataSession).catch(() => {});
932
+ }
933
+ const adaptor = new FlockAdaptor(flock, this.options.metadataAdaptorConfig);
934
+ debug("joining metadata room", {
935
+ roomId: params.roomId,
936
+ hasAuth: Boolean(params.auth && params.auth.length)
937
+ });
938
+ const room = await client.join({
939
+ roomId: params.roomId,
940
+ crdtAdaptor: adaptor,
941
+ auth: params.auth
942
+ });
943
+ const firstSynced = room.waitForReachingServerVersion();
944
+ firstSynced.then(() => {
945
+ debug("metadata session firstSynced resolved", { roomId: params.roomId });
946
+ }, (error) => {
947
+ debug("metadata session firstSynced rejected", {
948
+ roomId: params.roomId,
949
+ error
950
+ });
951
+ });
952
+ const session = {
953
+ adaptor,
954
+ room,
955
+ firstSynced,
956
+ flock,
957
+ roomId: params.roomId,
958
+ auth: params.auth,
959
+ refCount: 0
960
+ };
961
+ this.metadataSession = session;
962
+ return session;
963
+ }
964
+ async teardownMetadataSession(session) {
965
+ const target = session ?? this.metadataSession;
966
+ if (!target) return;
967
+ debug("teardownMetadataSession invoked", { roomId: target.roomId });
968
+ if (this.metadataSession === target) this.metadataSession = void 0;
969
+ const { adaptor, room } = target;
970
+ try {
971
+ await room.leave();
972
+ debug("metadata room left", { roomId: target.roomId });
973
+ } catch (error) {
974
+ debug("metadata room leave failed; destroying", {
975
+ roomId: target.roomId,
976
+ error
977
+ });
978
+ await room.destroy().catch(() => {});
979
+ }
980
+ adaptor.destroy();
981
+ debug("metadata session destroyed", { roomId: target.roomId });
982
+ }
983
+ async ensureDocSession(docId, doc, params) {
984
+ debug("ensureDocSession invoked", { docId });
985
+ const client = this.ensureClient();
986
+ await client.waitConnected();
987
+ debug("websocket client ready for doc session", {
988
+ docId,
989
+ status: client.getStatus()
990
+ });
991
+ const existing = this.docSessions.get(docId);
992
+ const derivedRoomId = this.options.docRoomId?.(docId) ?? docId;
993
+ const roomId = normalizeRoomId(params.roomId, derivedRoomId);
994
+ let auth;
995
+ auth = await (params.auth ?? this.options.docAuth?.(docId));
996
+ debug("doc session params resolved", {
997
+ docId,
998
+ roomId,
999
+ hasAuth: Boolean(auth && auth.length)
1000
+ });
1001
+ if (existing && existing.doc === doc && existing.roomId === roomId) {
1002
+ debug("reusing doc session", {
1003
+ docId,
1004
+ roomId,
1005
+ refCount: existing.refCount
1006
+ });
1007
+ return existing;
1008
+ }
1009
+ if (existing) {
1010
+ debug("doc session mismatch; leaving existing session", {
1011
+ docId,
1012
+ previousRoomId: existing.roomId,
1013
+ nextRoomId: roomId
1014
+ });
1015
+ await this.leaveDocSession(docId).catch(() => {});
1016
+ }
1017
+ const adaptor = new LoroAdaptor(doc);
1018
+ debug("joining doc room", {
1019
+ docId,
1020
+ roomId,
1021
+ hasAuth: Boolean(auth && auth.length)
1022
+ });
1023
+ const room = await client.join({
1024
+ roomId,
1025
+ crdtAdaptor: adaptor,
1026
+ auth
1027
+ });
1028
+ const firstSynced = room.waitForReachingServerVersion();
1029
+ firstSynced.then(() => {
1030
+ debug("doc session firstSynced resolved", {
1031
+ docId,
1032
+ roomId
1033
+ });
1034
+ }, (error) => {
1035
+ debug("doc session firstSynced rejected", {
1036
+ docId,
1037
+ roomId,
1038
+ error
1039
+ });
1040
+ });
1041
+ const session = {
1042
+ adaptor,
1043
+ room,
1044
+ firstSynced,
1045
+ doc,
1046
+ roomId,
1047
+ refCount: 0
1048
+ };
1049
+ this.docSessions.set(docId, session);
1050
+ return session;
1051
+ }
1052
+ async leaveDocSession(docId) {
1053
+ const session = this.docSessions.get(docId);
1054
+ if (!session) {
1055
+ debug("leaveDocSession invoked but no session found", { docId });
1056
+ return;
1057
+ }
1058
+ this.docSessions.delete(docId);
1059
+ debug("leaving doc session", {
1060
+ docId,
1061
+ roomId: session.roomId
1062
+ });
1063
+ try {
1064
+ await session.room.leave();
1065
+ debug("doc room left", {
1066
+ docId,
1067
+ roomId: session.roomId
1068
+ });
1069
+ } catch (error) {
1070
+ debug("doc room leave failed; destroying", {
1071
+ docId,
1072
+ roomId: session.roomId,
1073
+ error
1074
+ });
1075
+ await session.room.destroy().catch(() => {});
1076
+ }
1077
+ session.adaptor.destroy();
1078
+ debug("doc session destroyed", {
1079
+ docId,
1080
+ roomId: session.roomId
1081
+ });
1082
+ }
1083
+ };
1084
+
1085
+ //#endregion
1086
+ //#region src/internal/event-bus.ts
1087
+ var RepoEventBus = class {
1088
+ watchers = /* @__PURE__ */ new Set();
1089
+ eventByStack = [];
1090
+ watch(listener, filter = {}) {
1091
+ const entry = {
1092
+ listener,
1093
+ filter
1094
+ };
1095
+ this.watchers.add(entry);
1096
+ return { unsubscribe: () => {
1097
+ this.watchers.delete(entry);
1098
+ } };
1099
+ }
1100
+ emit(event) {
1101
+ for (const entry of this.watchers) if (this.shouldNotify(entry.filter, event)) entry.listener(event);
1102
+ }
1103
+ clear() {
1104
+ this.watchers.clear();
1105
+ this.eventByStack.length = 0;
1106
+ }
1107
+ pushEventBy(by) {
1108
+ this.eventByStack.push(by);
1109
+ }
1110
+ popEventBy() {
1111
+ this.eventByStack.pop();
1112
+ }
1113
+ resolveEventBy(defaultBy) {
1114
+ const index = this.eventByStack.length - 1;
1115
+ return index >= 0 ? this.eventByStack[index] : defaultBy;
1116
+ }
1117
+ shouldNotify(filter, event) {
1118
+ if (!filter.docIds && !filter.kinds && !filter.metadataFields && !filter.by) return true;
1119
+ if (filter.kinds && !filter.kinds.includes(event.kind)) return false;
1120
+ if (filter.by && !filter.by.includes(event.by)) return false;
1121
+ const docId = (() => {
1122
+ if (event.kind === "doc-metadata" || event.kind === "doc-frontiers") return event.docId;
1123
+ if (event.kind === "asset-link" || event.kind === "asset-unlink") return event.docId;
1124
+ })();
1125
+ if (filter.docIds && docId && !filter.docIds.includes(docId)) return false;
1126
+ if (filter.docIds && !docId) return false;
1127
+ if (filter.metadataFields && event.kind === "doc-metadata") {
1128
+ if (!Object.keys(event.patch).some((key) => filter.metadataFields?.includes(key))) return false;
1129
+ }
1130
+ return true;
1131
+ }
1132
+ };
1323
1133
 
1324
1134
  //#endregion
1325
1135
  //#region src/internal/logging.ts
@@ -1338,23 +1148,18 @@ var DocManager = class {
1338
1148
  getMetaFlock;
1339
1149
  eventBus;
1340
1150
  persistMeta;
1341
- state;
1342
1151
  docs = /* @__PURE__ */ new Map();
1343
1152
  docSubscriptions = /* @__PURE__ */ new Map();
1344
1153
  docFrontierUpdates = /* @__PURE__ */ new Map();
1345
1154
  docPersistedVersions = /* @__PURE__ */ new Map();
1346
- get docFrontierKeys() {
1347
- return this.state.docFrontierKeys;
1348
- }
1349
1155
  constructor(options) {
1350
1156
  this.storage = options.storage;
1351
1157
  this.docFrontierDebounceMs = options.docFrontierDebounceMs;
1352
1158
  this.getMetaFlock = options.getMetaFlock;
1353
1159
  this.eventBus = options.eventBus;
1354
1160
  this.persistMeta = options.persistMeta;
1355
- this.state = options.state;
1356
1161
  }
1357
- async openCollaborativeDoc(docId) {
1162
+ async openPersistedDoc(docId) {
1358
1163
  return await this.ensureDoc(docId);
1359
1164
  }
1360
1165
  async openDetachedDoc(docId) {
@@ -1401,38 +1206,27 @@ var DocManager = class {
1401
1206
  }
1402
1207
  async updateDocFrontiers(docId, doc, defaultBy) {
1403
1208
  const frontiers = doc.oplogFrontiers();
1404
- const { json, key } = canonicalizeFrontiers(frontiers);
1405
- const existingKeys = this.docFrontierKeys.get(docId) ?? /* @__PURE__ */ new Set();
1209
+ const vv = doc.version();
1210
+ const existingFrontiers = this.readFrontiersFromFlock(docId);
1406
1211
  let mutated = false;
1407
1212
  const metaFlock = this.metaFlock;
1408
- const vv = doc.version();
1409
- for (const entry of existingKeys) {
1410
- if (entry === key) continue;
1411
- let oldFrontiers;
1412
- try {
1413
- oldFrontiers = JSON.parse(entry);
1414
- } catch {
1415
- continue;
1416
- }
1417
- if (includesFrontiers(vv, oldFrontiers)) {
1418
- metaFlock.delete([
1419
- "f",
1420
- docId,
1421
- entry
1422
- ]);
1423
- mutated = true;
1424
- }
1425
- }
1426
- if (!existingKeys.has(key)) {
1213
+ for (const f of frontiers) if (existingFrontiers.get(f.peer) !== f.counter) {
1427
1214
  metaFlock.put([
1428
1215
  "f",
1429
1216
  docId,
1430
- key
1431
- ], json);
1217
+ f.peer
1218
+ ], f.counter);
1432
1219
  mutated = true;
1433
1220
  }
1434
1221
  if (mutated) {
1435
- this.refreshDocFrontierKeys(docId);
1222
+ for (const [peer, counter] of existingFrontiers) {
1223
+ const docCounterEnd = vv.get(peer);
1224
+ if (docCounterEnd != null && docCounterEnd > counter) metaFlock.delete([
1225
+ "f",
1226
+ docId,
1227
+ peer
1228
+ ]);
1229
+ }
1436
1230
  await this.persistMeta();
1437
1231
  }
1438
1232
  const by = this.eventBus.resolveEventBy(defaultBy);
@@ -1484,37 +1278,22 @@ var DocManager = class {
1484
1278
  this.docFrontierUpdates.clear();
1485
1279
  this.docs.clear();
1486
1280
  this.docPersistedVersions.clear();
1487
- this.docFrontierKeys.clear();
1488
1281
  }
1489
- hydrateFrontierKeys() {
1490
- const nextFrontierKeys = /* @__PURE__ */ new Map();
1491
- const frontierRows = this.metaFlock.scan({ prefix: ["f"] });
1492
- for (const row of frontierRows) {
1493
- if (!Array.isArray(row.key) || row.key.length < 3) continue;
1494
- const docId = row.key[1];
1495
- const frontierKey = row.key[2];
1496
- if (typeof docId !== "string" || typeof frontierKey !== "string") continue;
1497
- const set = nextFrontierKeys.get(docId) ?? /* @__PURE__ */ new Set();
1498
- set.add(frontierKey);
1499
- nextFrontierKeys.set(docId, set);
1500
- }
1501
- this.docFrontierKeys.clear();
1502
- for (const [docId, keys] of nextFrontierKeys) this.docFrontierKeys.set(docId, keys);
1282
+ get metaFlock() {
1283
+ return this.getMetaFlock();
1503
1284
  }
1504
- refreshDocFrontierKeys(docId) {
1285
+ readFrontiersFromFlock(docId) {
1505
1286
  const rows = this.metaFlock.scan({ prefix: ["f", docId] });
1506
- const keys = /* @__PURE__ */ new Set();
1287
+ const frontiers = /* @__PURE__ */ new Map();
1507
1288
  for (const row of rows) {
1508
1289
  if (!Array.isArray(row.key) || row.key.length < 3) continue;
1509
- if (row.value === void 0 || row.value === null) continue;
1510
- const frontierKey = row.key[2];
1511
- if (typeof frontierKey === "string") keys.add(frontierKey);
1290
+ const peer = row.key[2];
1291
+ const counter = row.value;
1292
+ if (typeof peer !== "string") continue;
1293
+ if (typeof counter !== "number" || !Number.isFinite(counter)) continue;
1294
+ frontiers.set(peer, counter);
1512
1295
  }
1513
- if (keys.size > 0) this.docFrontierKeys.set(docId, keys);
1514
- else this.docFrontierKeys.delete(docId);
1515
- }
1516
- get metaFlock() {
1517
- return this.getMetaFlock();
1296
+ return frontiers;
1518
1297
  }
1519
1298
  registerDoc(docId, doc) {
1520
1299
  this.docs.set(docId, doc);
@@ -1627,6 +1406,176 @@ var DocManager = class {
1627
1406
  }
1628
1407
  };
1629
1408
 
1409
+ //#endregion
1410
+ //#region src/utils.ts
1411
+ async function streamToUint8Array(stream) {
1412
+ const reader = stream.getReader();
1413
+ const chunks = [];
1414
+ let total = 0;
1415
+ while (true) {
1416
+ const { done, value } = await reader.read();
1417
+ if (done) break;
1418
+ if (value) {
1419
+ chunks.push(value);
1420
+ total += value.byteLength;
1421
+ }
1422
+ }
1423
+ const buffer = new Uint8Array(total);
1424
+ let offset = 0;
1425
+ for (const chunk of chunks) {
1426
+ buffer.set(chunk, offset);
1427
+ offset += chunk.byteLength;
1428
+ }
1429
+ return buffer;
1430
+ }
1431
+ async function assetContentToUint8Array(content) {
1432
+ if (content instanceof Uint8Array) return content;
1433
+ if (ArrayBuffer.isView(content)) return new Uint8Array(content.buffer.slice(content.byteOffset, content.byteOffset + content.byteLength));
1434
+ if (typeof Blob !== "undefined" && content instanceof Blob) return new Uint8Array(await content.arrayBuffer());
1435
+ if (typeof ReadableStream !== "undefined" && content instanceof ReadableStream) return streamToUint8Array(content);
1436
+ throw new TypeError("Unsupported asset content type");
1437
+ }
1438
+ function bytesToHex$1(bytes) {
1439
+ return Array.from(bytes, (byte) => byte.toString(16).padStart(2, "0")).join("");
1440
+ }
1441
+ async function computeSha256(bytes) {
1442
+ const globalCrypto = globalThis.crypto;
1443
+ if (globalCrypto?.subtle && typeof globalCrypto.subtle.digest === "function") {
1444
+ const digest = await globalCrypto.subtle.digest("SHA-256", bytes);
1445
+ return bytesToHex$1(new Uint8Array(digest));
1446
+ }
1447
+ try {
1448
+ const { createHash } = await import("node:crypto");
1449
+ const hash = createHash("sha256");
1450
+ hash.update(bytes);
1451
+ return hash.digest("hex");
1452
+ } catch {
1453
+ throw new Error("SHA-256 digest is not available in this environment");
1454
+ }
1455
+ }
1456
+ function cloneJsonValue(value) {
1457
+ if (value === null) return null;
1458
+ if (typeof value === "string" || typeof value === "boolean") return value;
1459
+ if (typeof value === "number") return Number.isFinite(value) ? value : void 0;
1460
+ if (Array.isArray(value)) {
1461
+ const arr = [];
1462
+ for (const entry of value) {
1463
+ const cloned = cloneJsonValue(entry);
1464
+ if (cloned !== void 0) arr.push(cloned);
1465
+ }
1466
+ return arr;
1467
+ }
1468
+ if (value && typeof value === "object") {
1469
+ const input = value;
1470
+ const obj = {};
1471
+ for (const [key, entry] of Object.entries(input)) {
1472
+ const cloned = cloneJsonValue(entry);
1473
+ if (cloned !== void 0) obj[key] = cloned;
1474
+ }
1475
+ return obj;
1476
+ }
1477
+ }
1478
+ function cloneJsonObject(value) {
1479
+ const cloned = cloneJsonValue(value);
1480
+ if (cloned && typeof cloned === "object" && !Array.isArray(cloned)) return cloned;
1481
+ return {};
1482
+ }
1483
+ function asJsonObject(value) {
1484
+ const cloned = cloneJsonValue(value);
1485
+ if (cloned && typeof cloned === "object" && !Array.isArray(cloned)) return cloned;
1486
+ }
1487
+ function isJsonObjectValue(value) {
1488
+ return Boolean(value && typeof value === "object" && !Array.isArray(value));
1489
+ }
1490
+ function stableStringify(value) {
1491
+ if (value === null) return "null";
1492
+ if (typeof value === "string") return JSON.stringify(value);
1493
+ if (typeof value === "number" || typeof value === "boolean") return JSON.stringify(value);
1494
+ if (Array.isArray(value)) return `[${value.map(stableStringify).join(",")}]`;
1495
+ if (!isJsonObjectValue(value)) return "null";
1496
+ return `{${Object.keys(value).sort().map((key) => `${JSON.stringify(key)}:${stableStringify(value[key])}`).join(",")}}`;
1497
+ }
1498
+ function jsonEquals(a, b) {
1499
+ if (a === void 0 && b === void 0) return true;
1500
+ if (a === void 0 || b === void 0) return false;
1501
+ return stableStringify(a) === stableStringify(b);
1502
+ }
1503
+ function diffJsonObjects(previous, next) {
1504
+ const patch = {};
1505
+ const keys = /* @__PURE__ */ new Set();
1506
+ if (previous) for (const key of Object.keys(previous)) keys.add(key);
1507
+ for (const key of Object.keys(next)) keys.add(key);
1508
+ for (const key of keys) {
1509
+ const prevValue = previous ? previous[key] : void 0;
1510
+ const nextValue = next[key];
1511
+ if (!jsonEquals(prevValue, nextValue)) {
1512
+ if (nextValue === void 0 && previous && key in previous) {
1513
+ patch[key] = null;
1514
+ continue;
1515
+ }
1516
+ const cloned = cloneJsonValue(nextValue);
1517
+ if (cloned !== void 0) patch[key] = cloned;
1518
+ }
1519
+ }
1520
+ return patch;
1521
+ }
1522
+ function assetMetaToJson(meta) {
1523
+ const json = {
1524
+ assetId: meta.assetId,
1525
+ size: meta.size,
1526
+ createdAt: meta.createdAt
1527
+ };
1528
+ if (meta.mime !== void 0) json.mime = meta.mime;
1529
+ if (meta.policy !== void 0) json.policy = meta.policy;
1530
+ if (meta.tag !== void 0) json.tag = meta.tag;
1531
+ return json;
1532
+ }
1533
+ function assetMetaFromJson(value) {
1534
+ const obj = asJsonObject(value);
1535
+ if (!obj) return void 0;
1536
+ const assetId = typeof obj.assetId === "string" ? obj.assetId : void 0;
1537
+ if (!assetId) return void 0;
1538
+ const size = typeof obj.size === "number" ? obj.size : void 0;
1539
+ const createdAt = typeof obj.createdAt === "number" ? obj.createdAt : void 0;
1540
+ if (size === void 0 || createdAt === void 0) return void 0;
1541
+ return {
1542
+ assetId,
1543
+ size,
1544
+ createdAt,
1545
+ ...typeof obj.mime === "string" ? { mime: obj.mime } : {},
1546
+ ...typeof obj.policy === "string" ? { policy: obj.policy } : {},
1547
+ ...typeof obj.tag === "string" ? { tag: obj.tag } : {}
1548
+ };
1549
+ }
1550
+ function assetMetadataEqual(a, b) {
1551
+ if (!a && !b) return true;
1552
+ if (!a || !b) return false;
1553
+ return stableStringify(assetMetaToJson(a)) === stableStringify(assetMetaToJson(b));
1554
+ }
1555
+ function cloneRepoAssetMetadata(meta) {
1556
+ return {
1557
+ assetId: meta.assetId,
1558
+ size: meta.size,
1559
+ createdAt: meta.createdAt,
1560
+ ...meta.mime !== void 0 ? { mime: meta.mime } : {},
1561
+ ...meta.policy !== void 0 ? { policy: meta.policy } : {},
1562
+ ...meta.tag !== void 0 ? { tag: meta.tag } : {}
1563
+ };
1564
+ }
1565
+ function toReadableStream(bytes) {
1566
+ return new ReadableStream({ start(controller) {
1567
+ controller.enqueue(bytes);
1568
+ controller.close();
1569
+ } });
1570
+ }
1571
+ function matchesQuery(docId, _metadata, query) {
1572
+ if (!query) return true;
1573
+ if (query.prefix && !docId.startsWith(query.prefix)) return false;
1574
+ if (query.start && docId < query.start) return false;
1575
+ if (query.end && docId > query.end) return false;
1576
+ return true;
1577
+ }
1578
+
1630
1579
  //#endregion
1631
1580
  //#region src/internal/metadata-manager.ts
1632
1581
  var MetadataManager = class {
@@ -2424,13 +2373,11 @@ var FlockHydrator = class {
2424
2373
  const nextMetadata = this.readAllDocMetadata();
2425
2374
  this.metadataManager.replaceAll(nextMetadata, by);
2426
2375
  this.assetManager.hydrateFromFlock(by);
2427
- this.docManager.hydrateFrontierKeys();
2428
2376
  }
2429
2377
  applyEvents(events, by) {
2430
2378
  if (!events.length) return;
2431
2379
  const docMetadataIds = /* @__PURE__ */ new Set();
2432
2380
  const docAssetIds = /* @__PURE__ */ new Set();
2433
- const docFrontiersIds = /* @__PURE__ */ new Set();
2434
2381
  const assetIds = /* @__PURE__ */ new Set();
2435
2382
  for (const event of events) {
2436
2383
  const key = event.key;
@@ -2447,15 +2394,11 @@ var FlockHydrator = class {
2447
2394
  const assetId = key[2];
2448
2395
  if (typeof docId === "string") docAssetIds.add(docId);
2449
2396
  if (typeof assetId === "string") assetIds.add(assetId);
2450
- } else if (root === "f") {
2451
- const docId = key[1];
2452
- if (typeof docId === "string") docFrontiersIds.add(docId);
2453
2397
  }
2454
2398
  }
2455
2399
  for (const assetId of assetIds) this.assetManager.refreshAssetMetadataEntry(assetId, by);
2456
2400
  for (const docId of docMetadataIds) this.metadataManager.refreshFromFlock(docId, by);
2457
2401
  for (const docId of docAssetIds) this.assetManager.refreshDocAssetsEntry(docId, by);
2458
- for (const docId of docFrontiersIds) this.docManager.refreshDocFrontierKeys(docId);
2459
2402
  }
2460
2403
  readAllDocMetadata() {
2461
2404
  const nextMetadata = /* @__PURE__ */ new Map();
@@ -2666,8 +2609,7 @@ function createRepoState() {
2666
2609
  docAssets: /* @__PURE__ */ new Map(),
2667
2610
  assets: /* @__PURE__ */ new Map(),
2668
2611
  orphanedAssets: /* @__PURE__ */ new Map(),
2669
- assetToDocRefs: /* @__PURE__ */ new Map(),
2670
- docFrontierKeys: /* @__PURE__ */ new Map()
2612
+ assetToDocRefs: /* @__PURE__ */ new Map()
2671
2613
  };
2672
2614
  }
2673
2615
 
@@ -2703,8 +2645,7 @@ var LoroRepo = class LoroRepo {
2703
2645
  docFrontierDebounceMs,
2704
2646
  getMetaFlock: () => this.metaFlock,
2705
2647
  eventBus: this.eventBus,
2706
- persistMeta: () => this.persistMeta(),
2707
- state: this.state
2648
+ persistMeta: () => this.persistMeta()
2708
2649
  });
2709
2650
  this.metadataManager = new MetadataManager({
2710
2651
  getMetaFlock: () => this.metaFlock,
@@ -2793,7 +2734,7 @@ var LoroRepo = class LoroRepo {
2793
2734
  */
2794
2735
  async openPersistedDoc(docId) {
2795
2736
  return {
2796
- doc: await this.docManager.openCollaborativeDoc(docId),
2737
+ doc: await this.docManager.openPersistedDoc(docId),
2797
2738
  syncOnce: () => {
2798
2739
  return this.sync({
2799
2740
  scope: "doc",