loro-repo 0.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs ADDED
@@ -0,0 +1,2173 @@
1
+ let __loro_dev_flock = require("@loro-dev/flock");
2
+ let loro_crdt = require("loro-crdt");
3
+ let loro_adaptors = require("loro-adaptors");
4
+ let loro_protocol = require("loro-protocol");
5
+ let loro_websocket = require("loro-websocket");
6
+
7
+ //#region src/loro-adaptor.ts
8
+ function createRepoFlockAdaptorFromDoc(flock, config = {}) {
9
+ return new loro_adaptors.FlockAdaptor(flock, config);
10
+ }
11
+
12
+ //#endregion
13
+ //#region src/transport/websocket.ts
14
+ function withTimeout(promise, timeoutMs) {
15
+ if (!timeoutMs || timeoutMs <= 0) return promise;
16
+ return new Promise((resolve, reject) => {
17
+ const timer = setTimeout(() => {
18
+ reject(/* @__PURE__ */ new Error(`Operation timed out after ${timeoutMs}ms`));
19
+ }, timeoutMs);
20
+ promise.then((value) => {
21
+ clearTimeout(timer);
22
+ resolve(value);
23
+ }).catch((error) => {
24
+ clearTimeout(timer);
25
+ reject(error);
26
+ });
27
+ });
28
+ }
29
+ function normalizeRoomId(roomId, fallback) {
30
+ if (typeof roomId === "string" && roomId.length > 0) return roomId;
31
+ if (roomId instanceof Uint8Array && roomId.length > 0) try {
32
+ return (0, loro_protocol.bytesToHex)(roomId);
33
+ } catch {
34
+ return fallback;
35
+ }
36
+ return fallback;
37
+ }
38
+ function bytesEqual(a, b) {
39
+ if (a === b) return true;
40
+ if (!a || !b) return false;
41
+ if (a.length !== b.length) return false;
42
+ for (let i = 0; i < a.length; i += 1) if (a[i] !== b[i]) return false;
43
+ return true;
44
+ }
45
+ /**
46
+ * loro-websocket backed {@link TransportAdapter} implementation for LoroRepo.
47
+ */
48
+ var WebSocketTransportAdapter = class {
49
+ options;
50
+ client;
51
+ metadataSession;
52
+ docSessions = /* @__PURE__ */ new Map();
53
+ constructor(options) {
54
+ this.options = options;
55
+ }
56
+ async connect(_options) {
57
+ const client = this.ensureClient();
58
+ await client.connect();
59
+ await client.waitConnected();
60
+ }
61
+ async close() {
62
+ for (const [docId] of this.docSessions) await this.leaveDocSession(docId).catch(() => {});
63
+ this.docSessions.clear();
64
+ await this.teardownMetadataSession().catch(() => {});
65
+ if (this.client) {
66
+ this.client.destroy();
67
+ this.client = void 0;
68
+ }
69
+ }
70
+ isConnected() {
71
+ return this.client?.getStatus() === "connected";
72
+ }
73
+ async syncMeta(flock, options) {
74
+ if (!this.options.metadataRoomId) return { ok: true };
75
+ try {
76
+ await withTimeout((await this.ensureMetadataSession(flock, {
77
+ roomId: this.options.metadataRoomId,
78
+ auth: this.options.metadataAuth
79
+ })).firstSynced, options?.timeout);
80
+ return { ok: true };
81
+ } catch {
82
+ return { ok: false };
83
+ }
84
+ }
85
+ joinMetaRoom(flock, params) {
86
+ const fallback = this.options.metadataRoomId ?? "";
87
+ const roomId = normalizeRoomId(params?.roomId, fallback);
88
+ if (!roomId) throw new Error("Metadata room id not configured");
89
+ const auth = params?.auth ?? this.options.metadataAuth;
90
+ const ensure = this.ensureMetadataSession(flock, {
91
+ roomId,
92
+ auth
93
+ });
94
+ const firstSynced = ensure.then((session) => session.firstSynced);
95
+ const getConnected = () => this.isConnected();
96
+ const subscription = {
97
+ unsubscribe: () => {
98
+ ensure.then((session) => {
99
+ session.refCount = Math.max(0, session.refCount - 1);
100
+ if (session.refCount === 0) this.teardownMetadataSession(session).catch(() => {});
101
+ });
102
+ },
103
+ firstSyncedWithRemote: firstSynced,
104
+ get connected() {
105
+ return getConnected();
106
+ }
107
+ };
108
+ ensure.then((session) => {
109
+ session.refCount += 1;
110
+ });
111
+ return subscription;
112
+ }
113
+ async syncDoc(docId, doc, options) {
114
+ try {
115
+ await withTimeout((await this.ensureDocSession(docId, doc, {})).firstSynced, options?.timeout);
116
+ return { ok: true };
117
+ } catch {
118
+ return { ok: false };
119
+ }
120
+ }
121
+ joinDocRoom(docId, doc, params) {
122
+ const ensure = this.ensureDocSession(docId, doc, params ?? {});
123
+ const firstSynced = ensure.then((session) => session.firstSynced);
124
+ const getConnected = () => this.isConnected();
125
+ const subscription = {
126
+ unsubscribe: () => {
127
+ ensure.then((session) => {
128
+ session.refCount = Math.max(0, session.refCount - 1);
129
+ if (session.refCount === 0) this.leaveDocSession(docId).catch(() => {});
130
+ });
131
+ },
132
+ firstSyncedWithRemote: firstSynced,
133
+ get connected() {
134
+ return getConnected();
135
+ }
136
+ };
137
+ ensure.then((session) => {
138
+ session.refCount += 1;
139
+ });
140
+ return subscription;
141
+ }
142
+ ensureClient() {
143
+ if (this.client) return this.client;
144
+ const { url, client: clientOptions } = this.options;
145
+ const client = new loro_websocket.LoroWebsocketClient({
146
+ url,
147
+ ...clientOptions
148
+ });
149
+ this.client = client;
150
+ return client;
151
+ }
152
+ async ensureMetadataSession(flock, params) {
153
+ const client = this.ensureClient();
154
+ await client.waitConnected();
155
+ if (this.metadataSession && this.metadataSession.flock === flock && this.metadataSession.roomId === params.roomId && bytesEqual(this.metadataSession.auth, params.auth)) return this.metadataSession;
156
+ if (this.metadataSession) await this.teardownMetadataSession(this.metadataSession).catch(() => {});
157
+ const configuredType = this.options.metadataCrdtType;
158
+ if (configuredType && configuredType !== loro_protocol.CrdtType.Flock) throw new Error(`metadataCrdtType must be ${loro_protocol.CrdtType.Flock} when syncing Flock metadata`);
159
+ const adaptor = createRepoFlockAdaptorFromDoc(flock, this.options.metadataAdaptorConfig ?? {});
160
+ const room = await client.join({
161
+ roomId: params.roomId,
162
+ crdtAdaptor: adaptor,
163
+ auth: params.auth
164
+ });
165
+ const session = {
166
+ adaptor,
167
+ room,
168
+ firstSynced: room.waitForReachingServerVersion(),
169
+ flock,
170
+ roomId: params.roomId,
171
+ auth: params.auth,
172
+ refCount: 0
173
+ };
174
+ this.metadataSession = session;
175
+ return session;
176
+ }
177
+ async teardownMetadataSession(session) {
178
+ const target = session ?? this.metadataSession;
179
+ if (!target) return;
180
+ if (this.metadataSession === target) this.metadataSession = void 0;
181
+ const { adaptor, room } = target;
182
+ try {
183
+ await room.leave();
184
+ } catch {
185
+ await room.destroy().catch(() => {});
186
+ }
187
+ adaptor.destroy();
188
+ }
189
+ async ensureDocSession(docId, doc, params) {
190
+ const client = this.ensureClient();
191
+ await client.waitConnected();
192
+ const existing = this.docSessions.get(docId);
193
+ const derivedRoomId = this.options.docRoomId?.(docId) ?? docId;
194
+ const roomId = normalizeRoomId(params.roomId, derivedRoomId);
195
+ const auth = params.auth ?? this.options.docAuth?.(docId);
196
+ if (existing && existing.doc === doc && existing.roomId === roomId) return existing;
197
+ if (existing) await this.leaveDocSession(docId).catch(() => {});
198
+ const adaptor = (0, loro_adaptors.createLoroAdaptorFromDoc)(doc);
199
+ const room = await client.join({
200
+ roomId,
201
+ crdtAdaptor: adaptor,
202
+ auth
203
+ });
204
+ const session = {
205
+ adaptor,
206
+ room,
207
+ firstSynced: room.waitForReachingServerVersion(),
208
+ doc,
209
+ roomId,
210
+ refCount: 0
211
+ };
212
+ this.docSessions.set(docId, session);
213
+ return session;
214
+ }
215
+ async leaveDocSession(docId) {
216
+ const session = this.docSessions.get(docId);
217
+ if (!session) return;
218
+ this.docSessions.delete(docId);
219
+ try {
220
+ await session.room.leave();
221
+ } catch {
222
+ await session.room.destroy().catch(() => {});
223
+ }
224
+ session.adaptor.destroy();
225
+ }
226
+ };
227
+
228
+ //#endregion
229
+ //#region src/transport/broadcast-channel.ts
230
+ function deferred() {
231
+ let resolve;
232
+ return {
233
+ promise: new Promise((res) => {
234
+ resolve = res;
235
+ }),
236
+ resolve
237
+ };
238
+ }
239
+ function randomInstanceId() {
240
+ if (typeof crypto !== "undefined" && typeof crypto.randomUUID === "function") return crypto.randomUUID();
241
+ return Math.random().toString(36).slice(2);
242
+ }
243
+ function ensureBroadcastChannel() {
244
+ if (typeof BroadcastChannel === "undefined") throw new Error("BroadcastChannel API is not available in this environment");
245
+ return BroadcastChannel;
246
+ }
247
+ function encodeDocChannelId(docId) {
248
+ try {
249
+ return encodeURIComponent(docId);
250
+ } catch {
251
+ return docId.replace(/[^a-z0-9_-]/gi, "_");
252
+ }
253
+ }
254
+ function postChannelMessage(channel, message) {
255
+ channel.postMessage(message);
256
+ }
257
+ /**
258
+ * TransportAdapter that relies on the BroadcastChannel API to fan out metadata
259
+ * and document updates between browser tabs within the same origin.
260
+ */
261
+ var BroadcastChannelTransportAdapter = class {
262
+ instanceId = randomInstanceId();
263
+ namespace;
264
+ metaChannelName;
265
+ connected = false;
266
+ metaState;
267
+ docStates = /* @__PURE__ */ new Map();
268
+ constructor(options = {}) {
269
+ ensureBroadcastChannel();
270
+ this.namespace = options.namespace ?? "loro-repo";
271
+ this.metaChannelName = options.metaChannelName ?? `${this.namespace}-meta`;
272
+ }
273
+ async connect() {
274
+ this.connected = true;
275
+ }
276
+ async close() {
277
+ this.connected = false;
278
+ if (this.metaState) {
279
+ for (const entry of this.metaState.listeners) entry.unsubscribe();
280
+ this.metaState.channel.close();
281
+ this.metaState = void 0;
282
+ }
283
+ for (const [docId] of this.docStates) this.teardownDocChannel(docId);
284
+ this.docStates.clear();
285
+ }
286
+ isConnected() {
287
+ return this.connected;
288
+ }
289
+ async syncMeta(flock, _options) {
290
+ const subscription = this.joinMetaRoom(flock);
291
+ subscription.firstSyncedWithRemote.catch(() => void 0);
292
+ await subscription.firstSyncedWithRemote;
293
+ subscription.unsubscribe();
294
+ return { ok: true };
295
+ }
296
+ joinMetaRoom(flock, _params) {
297
+ const state = this.ensureMetaChannel();
298
+ const { promise, resolve } = deferred();
299
+ const listener = {
300
+ flock,
301
+ muted: false,
302
+ unsubscribe: flock.subscribe(() => {
303
+ if (listener.muted) return;
304
+ Promise.resolve(flock.exportJson()).then((bundle) => {
305
+ postChannelMessage(state.channel, {
306
+ kind: "meta-export",
307
+ from: this.instanceId,
308
+ bundle
309
+ });
310
+ });
311
+ }),
312
+ resolveFirst: resolve,
313
+ firstSynced: promise
314
+ };
315
+ state.listeners.add(listener);
316
+ postChannelMessage(state.channel, {
317
+ kind: "meta-request",
318
+ from: this.instanceId
319
+ });
320
+ Promise.resolve(flock.exportJson()).then((bundle) => {
321
+ postChannelMessage(state.channel, {
322
+ kind: "meta-export",
323
+ from: this.instanceId,
324
+ bundle
325
+ });
326
+ });
327
+ queueMicrotask(() => resolve());
328
+ return {
329
+ unsubscribe: () => {
330
+ listener.unsubscribe();
331
+ state.listeners.delete(listener);
332
+ if (!state.listeners.size) {
333
+ state.channel.removeEventListener("message", state.onMessage);
334
+ state.channel.close();
335
+ this.metaState = void 0;
336
+ }
337
+ },
338
+ firstSyncedWithRemote: listener.firstSynced,
339
+ get connected() {
340
+ return true;
341
+ }
342
+ };
343
+ }
344
+ async syncDoc(docId, doc, _options) {
345
+ const subscription = this.joinDocRoom(docId, doc);
346
+ subscription.firstSyncedWithRemote.catch(() => void 0);
347
+ await subscription.firstSyncedWithRemote;
348
+ subscription.unsubscribe();
349
+ return { ok: true };
350
+ }
351
+ joinDocRoom(docId, doc, _params) {
352
+ const state = this.ensureDocChannel(docId);
353
+ const { promise, resolve } = deferred();
354
+ const listener = {
355
+ doc,
356
+ muted: false,
357
+ unsubscribe: doc.subscribe(() => {
358
+ if (listener.muted) return;
359
+ const payload = doc.export({ mode: "update" });
360
+ postChannelMessage(state.channel, {
361
+ kind: "doc-update",
362
+ docId,
363
+ from: this.instanceId,
364
+ mode: "update",
365
+ payload
366
+ });
367
+ }),
368
+ resolveFirst: resolve,
369
+ firstSynced: promise
370
+ };
371
+ state.listeners.add(listener);
372
+ postChannelMessage(state.channel, {
373
+ kind: "doc-request",
374
+ docId,
375
+ from: this.instanceId
376
+ });
377
+ postChannelMessage(state.channel, {
378
+ kind: "doc-update",
379
+ docId,
380
+ from: this.instanceId,
381
+ mode: "snapshot",
382
+ payload: doc.export({ mode: "snapshot" })
383
+ });
384
+ queueMicrotask(() => resolve());
385
+ return {
386
+ unsubscribe: () => {
387
+ listener.unsubscribe();
388
+ state.listeners.delete(listener);
389
+ if (!state.listeners.size) this.teardownDocChannel(docId);
390
+ },
391
+ firstSyncedWithRemote: listener.firstSynced,
392
+ get connected() {
393
+ return true;
394
+ }
395
+ };
396
+ }
397
+ ensureMetaChannel() {
398
+ if (this.metaState) return this.metaState;
399
+ const channel = new (ensureBroadcastChannel())(this.metaChannelName);
400
+ const listeners = /* @__PURE__ */ new Set();
401
+ const onMessage = (event) => {
402
+ const message = event.data;
403
+ if (!message || message.from === this.instanceId) return;
404
+ if (message.kind === "meta-export") for (const entry of listeners) {
405
+ entry.muted = true;
406
+ entry.flock.importJson(message.bundle);
407
+ entry.muted = false;
408
+ entry.resolveFirst();
409
+ }
410
+ else if (message.kind === "meta-request") {
411
+ const first = listeners.values().next().value;
412
+ if (!first) return;
413
+ Promise.resolve(first.flock.exportJson()).then((bundle) => {
414
+ postChannelMessage(channel, {
415
+ kind: "meta-export",
416
+ from: this.instanceId,
417
+ bundle
418
+ });
419
+ });
420
+ }
421
+ };
422
+ channel.addEventListener("message", onMessage);
423
+ this.metaState = {
424
+ channel,
425
+ listeners,
426
+ onMessage
427
+ };
428
+ return this.metaState;
429
+ }
430
+ ensureDocChannel(docId) {
431
+ const existing = this.docStates.get(docId);
432
+ if (existing) return existing;
433
+ const channel = new (ensureBroadcastChannel())(`${this.namespace}-doc-${encodeDocChannelId(docId)}`);
434
+ const listeners = /* @__PURE__ */ new Set();
435
+ const onMessage = (event) => {
436
+ const message = event.data;
437
+ if (!message || message.from === this.instanceId) return;
438
+ if (message.kind === "doc-update") for (const entry of listeners) {
439
+ entry.muted = true;
440
+ entry.doc.import(message.payload);
441
+ entry.muted = false;
442
+ entry.resolveFirst();
443
+ }
444
+ else if (message.kind === "doc-request") {
445
+ const first = listeners.values().next().value;
446
+ if (!first) return;
447
+ const payload = message.docId === docId ? first.doc.export({ mode: "snapshot" }) : void 0;
448
+ if (!payload) return;
449
+ postChannelMessage(channel, {
450
+ kind: "doc-update",
451
+ docId,
452
+ from: this.instanceId,
453
+ mode: "snapshot",
454
+ payload
455
+ });
456
+ }
457
+ };
458
+ channel.addEventListener("message", onMessage);
459
+ const state = {
460
+ channel,
461
+ listeners,
462
+ onMessage
463
+ };
464
+ this.docStates.set(docId, state);
465
+ return state;
466
+ }
467
+ teardownDocChannel(docId) {
468
+ const state = this.docStates.get(docId);
469
+ if (!state) return;
470
+ for (const entry of state.listeners) entry.unsubscribe();
471
+ state.channel.removeEventListener("message", state.onMessage);
472
+ state.channel.close();
473
+ this.docStates.delete(docId);
474
+ }
475
+ };
476
+
477
+ //#endregion
478
+ //#region src/storage/indexeddb.ts
479
+ const DEFAULT_DB_NAME = "loro-repo";
480
+ const DEFAULT_DB_VERSION = 1;
481
+ const DEFAULT_DOC_STORE = "docs";
482
+ const DEFAULT_META_STORE = "meta";
483
+ const DEFAULT_ASSET_STORE = "assets";
484
+ const DEFAULT_DOC_UPDATE_STORE = "doc-updates";
485
+ const DEFAULT_META_KEY = "snapshot";
486
+ const textDecoder = new TextDecoder();
487
+ function describeUnknown(cause) {
488
+ if (typeof cause === "string") return cause;
489
+ if (typeof cause === "number" || typeof cause === "boolean") return String(cause);
490
+ if (typeof cause === "bigint") return cause.toString();
491
+ if (typeof cause === "symbol") return cause.description ?? cause.toString();
492
+ if (typeof cause === "function") return `[function ${cause.name ?? "anonymous"}]`;
493
+ if (cause && typeof cause === "object") try {
494
+ return JSON.stringify(cause);
495
+ } catch {
496
+ return "[object]";
497
+ }
498
+ return String(cause);
499
+ }
500
+ var IndexedDBStorageAdaptor = class {
501
+ idb;
502
+ dbName;
503
+ version;
504
+ docStore;
505
+ docUpdateStore;
506
+ metaStore;
507
+ assetStore;
508
+ metaKey;
509
+ dbPromise;
510
+ closed = false;
511
+ constructor(options = {}) {
512
+ const idbFactory = globalThis.indexedDB;
513
+ if (!idbFactory) throw new Error("IndexedDB is not available in this environment");
514
+ this.idb = idbFactory;
515
+ this.dbName = options.dbName ?? DEFAULT_DB_NAME;
516
+ this.version = options.version ?? DEFAULT_DB_VERSION;
517
+ this.docStore = options.docStoreName ?? DEFAULT_DOC_STORE;
518
+ this.docUpdateStore = options.docUpdateStoreName ?? DEFAULT_DOC_UPDATE_STORE;
519
+ this.metaStore = options.metaStoreName ?? DEFAULT_META_STORE;
520
+ this.assetStore = options.assetStoreName ?? DEFAULT_ASSET_STORE;
521
+ this.metaKey = options.metaKey ?? DEFAULT_META_KEY;
522
+ }
523
+ async save(payload) {
524
+ const db = await this.ensureDb();
525
+ switch (payload.type) {
526
+ case "doc-snapshot": {
527
+ const snapshot = payload.snapshot.slice();
528
+ await this.storeMergedSnapshot(db, payload.docId, snapshot);
529
+ break;
530
+ }
531
+ case "doc-update": {
532
+ const update = payload.update.slice();
533
+ await this.appendDocUpdate(db, payload.docId, update);
534
+ break;
535
+ }
536
+ case "asset": {
537
+ const bytes = payload.data.slice();
538
+ await this.putBinary(db, this.assetStore, payload.assetId, bytes);
539
+ break;
540
+ }
541
+ case "meta": {
542
+ const bytes = payload.update.slice();
543
+ await this.putBinary(db, this.metaStore, this.metaKey, bytes);
544
+ break;
545
+ }
546
+ default: throw new Error("Unsupported storage payload type");
547
+ }
548
+ }
549
+ async deleteAsset(assetId) {
550
+ const db = await this.ensureDb();
551
+ await this.deleteKey(db, this.assetStore, assetId);
552
+ }
553
+ async loadDoc(docId) {
554
+ const db = await this.ensureDb();
555
+ const snapshot = await this.getBinaryFromDb(db, this.docStore, docId);
556
+ const pendingUpdates = await this.getDocUpdates(db, docId);
557
+ if (!snapshot && pendingUpdates.length === 0) return;
558
+ let doc;
559
+ try {
560
+ doc = snapshot ? loro_crdt.LoroDoc.fromSnapshot(snapshot) : new loro_crdt.LoroDoc();
561
+ } catch (error) {
562
+ throw this.createError(`Failed to hydrate document snapshot for "${docId}"`, error);
563
+ }
564
+ let appliedUpdates = false;
565
+ for (const update of pendingUpdates) try {
566
+ doc.import(update);
567
+ appliedUpdates = true;
568
+ } catch (error) {
569
+ throw this.createError(`Failed to apply queued document update for "${docId}"`, error);
570
+ }
571
+ if (appliedUpdates) {
572
+ let consolidated;
573
+ try {
574
+ consolidated = doc.export({ mode: "snapshot" });
575
+ } catch (error) {
576
+ throw this.createError(`Failed to export consolidated snapshot for "${docId}"`, error);
577
+ }
578
+ await this.writeSnapshot(db, docId, consolidated);
579
+ await this.clearDocUpdates(db, docId);
580
+ }
581
+ return doc;
582
+ }
583
+ async loadMeta() {
584
+ const bytes = await this.getBinary(this.metaStore, this.metaKey);
585
+ if (!bytes) return void 0;
586
+ try {
587
+ const json = textDecoder.decode(bytes);
588
+ const bundle = JSON.parse(json);
589
+ const flock = new __loro_dev_flock.Flock();
590
+ flock.importJson(bundle);
591
+ return flock;
592
+ } catch (error) {
593
+ throw this.createError("Failed to hydrate metadata snapshot", error);
594
+ }
595
+ }
596
+ async loadAsset(assetId) {
597
+ return await this.getBinary(this.assetStore, assetId) ?? void 0;
598
+ }
599
+ async close() {
600
+ this.closed = true;
601
+ const db = await this.dbPromise;
602
+ if (db) db.close();
603
+ this.dbPromise = void 0;
604
+ }
605
+ async ensureDb() {
606
+ if (this.closed) throw new Error("IndexedDBStorageAdaptor has been closed");
607
+ if (!this.dbPromise) this.dbPromise = new Promise((resolve, reject) => {
608
+ const request = this.idb.open(this.dbName, this.version);
609
+ request.addEventListener("upgradeneeded", () => {
610
+ const db = request.result;
611
+ this.ensureStore(db, this.docStore);
612
+ this.ensureStore(db, this.docUpdateStore);
613
+ this.ensureStore(db, this.metaStore);
614
+ this.ensureStore(db, this.assetStore);
615
+ });
616
+ request.addEventListener("success", () => resolve(request.result), { once: true });
617
+ request.addEventListener("error", () => {
618
+ reject(this.createError(`Failed to open IndexedDB database "${this.dbName}"`, request.error));
619
+ }, { once: true });
620
+ });
621
+ return this.dbPromise;
622
+ }
623
+ ensureStore(db, storeName) {
624
+ const names = db.objectStoreNames;
625
+ if (this.storeExists(names, storeName)) return;
626
+ db.createObjectStore(storeName);
627
+ }
628
+ storeExists(names, storeName) {
629
+ if (typeof names.contains === "function") return names.contains(storeName);
630
+ const length = names.length ?? 0;
631
+ for (let index = 0; index < length; index += 1) if (names.item?.(index) === storeName) return true;
632
+ return false;
633
+ }
634
+ async storeMergedSnapshot(db, docId, incoming) {
635
+ await this.runInTransaction(db, this.docStore, "readwrite", async (store) => {
636
+ const existingRaw = await this.wrapRequest(store.get(docId), "read");
637
+ const existing = await this.normalizeBinary(existingRaw);
638
+ const merged = this.mergeSnapshots(docId, existing, incoming);
639
+ await this.wrapRequest(store.put(merged, docId), "write");
640
+ });
641
+ }
642
+ mergeSnapshots(docId, existing, incoming) {
643
+ try {
644
+ const doc = existing ? loro_crdt.LoroDoc.fromSnapshot(existing) : new loro_crdt.LoroDoc();
645
+ doc.import(incoming);
646
+ return doc.export({ mode: "snapshot" });
647
+ } catch (error) {
648
+ throw this.createError(`Failed to merge snapshot for "${docId}"`, error);
649
+ }
650
+ }
651
+ async appendDocUpdate(db, docId, update) {
652
+ await this.runInTransaction(db, this.docUpdateStore, "readwrite", async (store) => {
653
+ const raw = await this.wrapRequest(store.get(docId), "read");
654
+ const queue = await this.normalizeUpdateQueue(raw);
655
+ queue.push(update.slice());
656
+ await this.wrapRequest(store.put({ updates: queue }, docId), "write");
657
+ });
658
+ }
659
+ async getDocUpdates(db, docId) {
660
+ const raw = await this.runInTransaction(db, this.docUpdateStore, "readonly", (store) => this.wrapRequest(store.get(docId), "read"));
661
+ return this.normalizeUpdateQueue(raw);
662
+ }
663
+ async clearDocUpdates(db, docId) {
664
+ await this.runInTransaction(db, this.docUpdateStore, "readwrite", (store) => this.wrapRequest(store.delete(docId), "delete"));
665
+ }
666
+ async writeSnapshot(db, docId, snapshot) {
667
+ await this.putBinary(db, this.docStore, docId, snapshot.slice());
668
+ }
669
+ async getBinaryFromDb(db, storeName, key) {
670
+ const value = await this.runInTransaction(db, storeName, "readonly", (store) => this.wrapRequest(store.get(key), "read"));
671
+ return this.normalizeBinary(value);
672
+ }
673
+ async normalizeUpdateQueue(value) {
674
+ if (value == null) return [];
675
+ const list = Array.isArray(value) ? value : typeof value === "object" && value !== null ? value.updates : void 0;
676
+ if (!Array.isArray(list)) return [];
677
+ const queue = [];
678
+ for (const entry of list) {
679
+ const bytes = await this.normalizeBinary(entry);
680
+ if (bytes) queue.push(bytes);
681
+ }
682
+ return queue;
683
+ }
684
+ async putBinary(db, storeName, key, value) {
685
+ await this.runInTransaction(db, storeName, "readwrite", (store) => this.wrapRequest(store.put(value, key), "write"));
686
+ }
687
+ async deleteKey(db, storeName, key) {
688
+ await this.runInTransaction(db, storeName, "readwrite", (store) => this.wrapRequest(store.delete(key), "delete"));
689
+ }
690
+ async getBinary(storeName, key) {
691
+ const db = await this.ensureDb();
692
+ return this.getBinaryFromDb(db, storeName, key);
693
+ }
694
+ runInTransaction(db, storeName, mode, executor) {
695
+ const tx = db.transaction(storeName, mode);
696
+ const store = tx.objectStore(storeName);
697
+ const completion = new Promise((resolve, reject) => {
698
+ tx.addEventListener("complete", () => resolve(), { once: true });
699
+ tx.addEventListener("abort", () => reject(this.createError("IndexedDB transaction aborted", tx.error)), { once: true });
700
+ tx.addEventListener("error", () => reject(this.createError("IndexedDB transaction failed", tx.error)), { once: true });
701
+ });
702
+ return Promise.all([executor(store), completion]).then(([result]) => result);
703
+ }
704
+ wrapRequest(request, action) {
705
+ return new Promise((resolve, reject) => {
706
+ request.addEventListener("success", () => resolve(request.result), { once: true });
707
+ request.addEventListener("error", () => reject(this.createError(`IndexedDB request failed during ${action}`, request.error)), { once: true });
708
+ });
709
+ }
710
+ async normalizeBinary(value) {
711
+ if (value == null) return void 0;
712
+ if (value instanceof Uint8Array) return value.slice();
713
+ if (ArrayBuffer.isView(value)) return new Uint8Array(value.buffer, value.byteOffset, value.byteLength).slice();
714
+ if (value instanceof ArrayBuffer) return new Uint8Array(value.slice(0));
715
+ if (typeof value === "object" && value !== null && "arrayBuffer" in value) {
716
+ const candidate = value;
717
+ if (typeof candidate.arrayBuffer === "function") {
718
+ const buffer = await candidate.arrayBuffer();
719
+ return new Uint8Array(buffer);
720
+ }
721
+ }
722
+ }
723
+ createError(message, cause) {
724
+ if (cause instanceof Error) return new Error(`${message}: ${cause.message}`, { cause });
725
+ if (cause !== void 0 && cause !== null) return /* @__PURE__ */ new Error(`${message}: ${describeUnknown(cause)}`);
726
+ return new Error(message);
727
+ }
728
+ };
729
+
730
+ //#endregion
731
+ //#region src/index.ts
732
+ const textEncoder = new TextEncoder();
733
+ const DEFAULT_DOC_FRONTIER_DEBOUNCE_MS = 1e3;
734
+ function logAsyncError(context) {
735
+ return (error) => {
736
+ if (error instanceof Error) console.error(`[loro-repo] ${context} failed: ${error.message}`, error);
737
+ else console.error(`[loro-repo] ${context} failed with non-error reason:`, error);
738
+ };
739
+ }
740
+ async function streamToUint8Array(stream) {
741
+ const reader = stream.getReader();
742
+ const chunks = [];
743
+ let total = 0;
744
+ while (true) {
745
+ const { done, value } = await reader.read();
746
+ if (done) break;
747
+ if (value) {
748
+ chunks.push(value);
749
+ total += value.byteLength;
750
+ }
751
+ }
752
+ const buffer = new Uint8Array(total);
753
+ let offset = 0;
754
+ for (const chunk of chunks) {
755
+ buffer.set(chunk, offset);
756
+ offset += chunk.byteLength;
757
+ }
758
+ return buffer;
759
+ }
760
+ async function assetContentToUint8Array(content) {
761
+ if (content instanceof Uint8Array) return content;
762
+ if (ArrayBuffer.isView(content)) return new Uint8Array(content.buffer.slice(content.byteOffset, content.byteOffset + content.byteLength));
763
+ if (typeof Blob !== "undefined" && content instanceof Blob) return new Uint8Array(await content.arrayBuffer());
764
+ if (typeof ReadableStream !== "undefined" && content instanceof ReadableStream) return streamToUint8Array(content);
765
+ throw new TypeError("Unsupported asset content type");
766
+ }
767
+ function bytesToHex(bytes) {
768
+ return Array.from(bytes, (byte) => byte.toString(16).padStart(2, "0")).join("");
769
+ }
770
+ async function computeSha256(bytes) {
771
+ const globalCrypto = globalThis.crypto;
772
+ if (globalCrypto?.subtle && typeof globalCrypto.subtle.digest === "function") {
773
+ const digest = await globalCrypto.subtle.digest("SHA-256", bytes);
774
+ return bytesToHex(new Uint8Array(digest));
775
+ }
776
+ try {
777
+ const { createHash } = await import("node:crypto");
778
+ const hash = createHash("sha256");
779
+ hash.update(bytes);
780
+ return hash.digest("hex");
781
+ } catch {
782
+ throw new Error("SHA-256 digest is not available in this environment");
783
+ }
784
+ }
785
+ function cloneJsonValue(value) {
786
+ if (value === null) return null;
787
+ if (typeof value === "string" || typeof value === "boolean") return value;
788
+ if (typeof value === "number") return Number.isFinite(value) ? value : void 0;
789
+ if (Array.isArray(value)) {
790
+ const arr = [];
791
+ for (const entry of value) {
792
+ const cloned = cloneJsonValue(entry);
793
+ if (cloned !== void 0) arr.push(cloned);
794
+ }
795
+ return arr;
796
+ }
797
+ if (value && typeof value === "object") {
798
+ const input = value;
799
+ const obj = {};
800
+ for (const [key, entry] of Object.entries(input)) {
801
+ const cloned = cloneJsonValue(entry);
802
+ if (cloned !== void 0) obj[key] = cloned;
803
+ }
804
+ return obj;
805
+ }
806
+ }
807
+ function cloneJsonObject(value) {
808
+ const cloned = cloneJsonValue(value);
809
+ if (cloned && typeof cloned === "object" && !Array.isArray(cloned)) return cloned;
810
+ return {};
811
+ }
812
+ function asJsonObject(value) {
813
+ const cloned = cloneJsonValue(value);
814
+ if (cloned && typeof cloned === "object" && !Array.isArray(cloned)) return cloned;
815
+ }
816
+ function isJsonObjectValue(value) {
817
+ return typeof value === "object" && value !== null && !Array.isArray(value);
818
+ }
819
+ function stableStringify(value) {
820
+ if (value === null || typeof value !== "object") return JSON.stringify(value);
821
+ if (Array.isArray(value)) return `[${value.map(stableStringify).join(",")}]`;
822
+ if (!isJsonObjectValue(value)) return JSON.stringify(value);
823
+ return `{${Object.keys(value).sort().map((key) => `${JSON.stringify(key)}:${stableStringify(value[key])}`).join(",")}}`;
824
+ }
825
+ function jsonEquals(a, b) {
826
+ if (a === void 0 && b === void 0) return true;
827
+ if (a === void 0 || b === void 0) return false;
828
+ return stableStringify(a) === stableStringify(b);
829
+ }
830
+ function diffJsonObjects(previous, next) {
831
+ const patch = {};
832
+ const keys = new Set([...Object.keys(next), ...previous ? Object.keys(previous) : []]);
833
+ for (const key of keys) {
834
+ const prevValue = previous ? previous[key] : void 0;
835
+ if (!Object.prototype.hasOwnProperty.call(next, key)) {
836
+ patch[key] = null;
837
+ continue;
838
+ }
839
+ const nextValue = next[key];
840
+ if (!jsonEquals(prevValue, nextValue)) {
841
+ const cloned = cloneJsonValue(nextValue);
842
+ if (cloned !== void 0) patch[key] = cloned;
843
+ }
844
+ }
845
+ return patch;
846
+ }
847
+ function assetMetaToJson(meta) {
848
+ const json = {
849
+ assetId: meta.assetId,
850
+ size: meta.size,
851
+ createdAt: meta.createdAt
852
+ };
853
+ if (meta.mime !== void 0) json.mime = meta.mime;
854
+ if (meta.policy !== void 0) json.policy = meta.policy;
855
+ if (meta.tag !== void 0) json.tag = meta.tag;
856
+ return json;
857
+ }
858
+ function assetMetaFromJson(value) {
859
+ const obj = asJsonObject(value);
860
+ if (!obj) return void 0;
861
+ const assetId = typeof obj.assetId === "string" ? obj.assetId : void 0;
862
+ if (!assetId) return void 0;
863
+ const size = typeof obj.size === "number" ? obj.size : void 0;
864
+ const createdAt = typeof obj.createdAt === "number" ? obj.createdAt : void 0;
865
+ if (size === void 0 || createdAt === void 0) return void 0;
866
+ return {
867
+ assetId,
868
+ size,
869
+ createdAt,
870
+ ...typeof obj.mime === "string" ? { mime: obj.mime } : {},
871
+ ...typeof obj.policy === "string" ? { policy: obj.policy } : {},
872
+ ...typeof obj.tag === "string" ? { tag: obj.tag } : {}
873
+ };
874
+ }
875
+ function assetMetadataEqual(a, b) {
876
+ if (!a && !b) return true;
877
+ if (!a || !b) return false;
878
+ return stableStringify(assetMetaToJson(a)) === stableStringify(assetMetaToJson(b));
879
+ }
880
+ function cloneRepoAssetMetadata(meta) {
881
+ return {
882
+ assetId: meta.assetId,
883
+ size: meta.size,
884
+ createdAt: meta.createdAt,
885
+ ...meta.mime !== void 0 ? { mime: meta.mime } : {},
886
+ ...meta.policy !== void 0 ? { policy: meta.policy } : {},
887
+ ...meta.tag !== void 0 ? { tag: meta.tag } : {}
888
+ };
889
+ }
890
+ function toReadableStream(bytes) {
891
+ return new ReadableStream({ start(controller) {
892
+ controller.enqueue(bytes);
893
+ controller.close();
894
+ } });
895
+ }
896
+ function computeVersionVector(doc) {
897
+ const candidate = doc;
898
+ if (typeof candidate.frontiers === "function" && typeof candidate.frontiersToVV === "function") {
899
+ const frontiers = candidate.frontiers();
900
+ return candidate.frontiersToVV(frontiers);
901
+ }
902
+ if (typeof candidate.version === "function") return candidate.version();
903
+ return {};
904
+ }
905
+ function emptyFrontiers() {
906
+ return [];
907
+ }
908
+ function getDocFrontiers(doc) {
909
+ const candidate = doc;
910
+ if (typeof candidate.frontiers === "function") {
911
+ const result = candidate.frontiers();
912
+ if (result) return result;
913
+ }
914
+ return emptyFrontiers();
915
+ }
916
+ function versionVectorToJson(vv) {
917
+ const map = vv.toJSON();
918
+ const record = {};
919
+ if (map instanceof Map) {
920
+ const entries = Array.from(map.entries()).sort(([a], [b]) => String(a).localeCompare(String(b)));
921
+ for (const [peer, counter] of entries) {
922
+ if (typeof counter !== "number" || !Number.isFinite(counter)) continue;
923
+ const key = typeof peer === "string" ? peer : JSON.stringify(peer);
924
+ record[key] = counter;
925
+ }
926
+ }
927
+ return record;
928
+ }
929
+ function canonicalizeVersionVector(vv) {
930
+ const json = versionVectorToJson(vv);
931
+ return {
932
+ json,
933
+ key: stableStringify(json)
934
+ };
935
+ }
936
+ var RepoDocHandleImpl = class {
937
+ doc;
938
+ whenSyncedWithRemote;
939
+ docId;
940
+ onClose;
941
+ constructor(docId, doc, whenSyncedWithRemote, onClose) {
942
+ this.docId = docId;
943
+ this.doc = doc;
944
+ this.whenSyncedWithRemote = whenSyncedWithRemote;
945
+ this.onClose = onClose;
946
+ }
947
+ async close() {
948
+ await this.onClose(this.docId, this.doc);
949
+ }
950
+ };
951
+ function matchesQuery(docId, _metadata, query) {
952
+ if (!query) return true;
953
+ if (query.prefix && !docId.startsWith(query.prefix)) return false;
954
+ if (query.start && docId < query.start) return false;
955
+ if (query.end && docId > query.end) return false;
956
+ return true;
957
+ }
958
+ var LoroRepo = class {
959
+ options;
960
+ transport;
961
+ storage;
962
+ assetTransport;
963
+ docFactory;
964
+ metaFlock = new __loro_dev_flock.Flock();
965
+ metadata = /* @__PURE__ */ new Map();
966
+ docs = /* @__PURE__ */ new Map();
967
+ docRefs = /* @__PURE__ */ new Map();
968
+ docSubscriptions = /* @__PURE__ */ new Map();
969
+ docAssets = /* @__PURE__ */ new Map();
970
+ assets = /* @__PURE__ */ new Map();
971
+ orphanedAssets = /* @__PURE__ */ new Map();
972
+ assetToDocRefs = /* @__PURE__ */ new Map();
973
+ docFrontierKeys = /* @__PURE__ */ new Map();
974
+ docFrontierUpdates = /* @__PURE__ */ new Map();
975
+ docPersistedVersions = /* @__PURE__ */ new Map();
976
+ docFrontierDebounceMs;
977
+ watchers = /* @__PURE__ */ new Set();
978
+ eventByStack = [];
979
+ metaRoomSubscription;
980
+ unsubscribeMetaFlock;
981
+ readyPromise;
982
+ constructor(options) {
983
+ this.options = options;
984
+ this.transport = options.transportAdapter;
985
+ this.storage = options.storageAdapter;
986
+ this.assetTransport = options.assetTransportAdapter;
987
+ this.docFactory = options.docFactory ?? (async () => new loro_crdt.LoroDoc());
988
+ const configuredDebounce = options.docFrontierDebounceMs;
989
+ this.docFrontierDebounceMs = typeof configuredDebounce === "number" && Number.isFinite(configuredDebounce) && configuredDebounce >= 0 ? configuredDebounce : DEFAULT_DOC_FRONTIER_DEBOUNCE_MS;
990
+ }
991
+ async ready() {
992
+ if (!this.readyPromise) this.readyPromise = this.initialize();
993
+ await this.readyPromise;
994
+ }
995
+ async initialize() {
996
+ if (this.storage) {
997
+ const snapshot = await this.storage.loadMeta();
998
+ if (snapshot) this.metaFlock = snapshot;
999
+ }
1000
+ this.hydrateMetadataFromFlock("sync");
1001
+ }
1002
+ async sync(options = {}) {
1003
+ await this.ready();
1004
+ const { scope = "full", docIds } = options;
1005
+ if (!this.transport) return;
1006
+ if (!this.transport.isConnected()) await this.transport.connect();
1007
+ if (scope === "meta" || scope === "full") {
1008
+ this.pushEventBy("sync");
1009
+ const recordedEvents = [];
1010
+ const unsubscribe = this.metaFlock.subscribe((batch) => {
1011
+ if (batch.source === "local") return;
1012
+ recordedEvents.push(...batch.events);
1013
+ });
1014
+ try {
1015
+ if (!(await this.transport.syncMeta(this.metaFlock)).ok) throw new Error("Metadata sync failed");
1016
+ if (recordedEvents.length > 0) this.applyMetaFlockEvents(recordedEvents, "sync");
1017
+ else this.hydrateMetadataFromFlock("sync");
1018
+ await this.persistMeta();
1019
+ } finally {
1020
+ unsubscribe();
1021
+ this.popEventBy();
1022
+ }
1023
+ }
1024
+ if (scope === "doc" || scope === "full") {
1025
+ const targets = docIds ?? Array.from(this.metadata.keys());
1026
+ for (const docId of targets) {
1027
+ const doc = await this.ensureDoc(docId);
1028
+ this.pushEventBy("sync");
1029
+ try {
1030
+ if (!(await this.transport.syncDoc(docId, doc)).ok) throw new Error(`Document sync failed for ${docId}`);
1031
+ } finally {
1032
+ this.popEventBy();
1033
+ }
1034
+ await this.persistDoc(docId, doc);
1035
+ await this.updateDocFrontiers(docId, doc, "sync");
1036
+ }
1037
+ }
1038
+ }
1039
+ refreshDocMetadataEntry(docId, by) {
1040
+ const previous = this.metadata.get(docId);
1041
+ const next = this.readDocMetadataFromFlock(docId);
1042
+ if (!next) {
1043
+ if (previous) {
1044
+ this.metadata.delete(docId);
1045
+ this.emit({
1046
+ kind: "doc-metadata",
1047
+ docId,
1048
+ patch: {},
1049
+ by
1050
+ });
1051
+ }
1052
+ return;
1053
+ }
1054
+ this.metadata.set(docId, next);
1055
+ const patch = diffJsonObjects(previous, next);
1056
+ if (!previous || Object.keys(patch).length > 0) this.emit({
1057
+ kind: "doc-metadata",
1058
+ docId,
1059
+ patch,
1060
+ by
1061
+ });
1062
+ }
1063
+ refreshDocAssetsEntry(docId, by) {
1064
+ const mapping = this.readDocAssetsFromFlock(docId);
1065
+ const previous = this.docAssets.get(docId);
1066
+ if (!mapping.size) {
1067
+ if (previous?.size) {
1068
+ this.docAssets.delete(docId);
1069
+ for (const assetId of previous.keys()) {
1070
+ this.emit({
1071
+ kind: "asset-unlink",
1072
+ docId,
1073
+ assetId,
1074
+ by
1075
+ });
1076
+ if (!Array.from(this.docAssets.values()).some((assets) => assets.has(assetId))) {
1077
+ const record = this.assets.get(assetId);
1078
+ if (record) {
1079
+ const deletedAt = this.orphanedAssets.get(assetId)?.deletedAt ?? Date.now();
1080
+ this.orphanedAssets.set(assetId, {
1081
+ metadata: record.metadata,
1082
+ deletedAt
1083
+ });
1084
+ }
1085
+ }
1086
+ }
1087
+ }
1088
+ return;
1089
+ }
1090
+ this.docAssets.set(docId, mapping);
1091
+ const added = [];
1092
+ const removed = [];
1093
+ if (previous) {
1094
+ for (const assetId of previous.keys()) if (!mapping.has(assetId)) removed.push(assetId);
1095
+ }
1096
+ for (const assetId of mapping.keys()) if (!previous || !previous.has(assetId)) added.push(assetId);
1097
+ for (const assetId of removed) {
1098
+ this.emit({
1099
+ kind: "asset-unlink",
1100
+ docId,
1101
+ assetId,
1102
+ by
1103
+ });
1104
+ if (!Array.from(this.docAssets.values()).some((assets) => assets.has(assetId))) {
1105
+ const record = this.assets.get(assetId);
1106
+ if (record) {
1107
+ const deletedAt = this.orphanedAssets.get(assetId)?.deletedAt ?? Date.now();
1108
+ this.orphanedAssets.set(assetId, {
1109
+ metadata: record.metadata,
1110
+ deletedAt
1111
+ });
1112
+ }
1113
+ }
1114
+ }
1115
+ for (const assetId of added) this.emit({
1116
+ kind: "asset-link",
1117
+ docId,
1118
+ assetId,
1119
+ by
1120
+ });
1121
+ }
1122
+ refreshAssetMetadataEntry(assetId, by) {
1123
+ const previous = this.assets.get(assetId);
1124
+ const metadata = assetMetaFromJson(this.metaFlock.get(["a", assetId]));
1125
+ if (!metadata) {
1126
+ this.handleAssetRemoval(assetId, by);
1127
+ return;
1128
+ }
1129
+ const existingData = previous?.data;
1130
+ this.rememberAsset(metadata, existingData);
1131
+ for (const assets of this.docAssets.values()) if (assets.has(assetId)) assets.set(assetId, cloneRepoAssetMetadata(metadata));
1132
+ if (!previous || !assetMetadataEqual(previous.metadata, metadata)) this.emit({
1133
+ kind: "asset-metadata",
1134
+ asset: this.createAssetDownload(assetId, metadata, existingData),
1135
+ by
1136
+ });
1137
+ }
1138
+ refreshDocFrontierKeys(docId) {
1139
+ const rows = this.metaFlock.scan({ prefix: ["f", docId] });
1140
+ const keys = /* @__PURE__ */ new Set();
1141
+ for (const row of rows) {
1142
+ if (!Array.isArray(row.key) || row.key.length < 3) continue;
1143
+ const frontierKey = row.key[2];
1144
+ if (typeof frontierKey === "string") keys.add(frontierKey);
1145
+ }
1146
+ if (keys.size > 0) this.docFrontierKeys.set(docId, keys);
1147
+ else this.docFrontierKeys.delete(docId);
1148
+ }
1149
+ readDocMetadataFromFlock(docId) {
1150
+ const rows = this.metaFlock.scan({ prefix: ["m", docId] });
1151
+ if (!rows.length) return void 0;
1152
+ const docMeta = {};
1153
+ let populated = false;
1154
+ for (const row of rows) {
1155
+ if (!Array.isArray(row.key) || row.key.length < 2) continue;
1156
+ if (row.key.length === 2) {
1157
+ const obj = asJsonObject(row.value);
1158
+ if (!obj) continue;
1159
+ for (const [field, value] of Object.entries(obj)) {
1160
+ const cloned = cloneJsonValue(value);
1161
+ if (cloned !== void 0) {
1162
+ docMeta[field] = cloned;
1163
+ populated = true;
1164
+ }
1165
+ }
1166
+ continue;
1167
+ }
1168
+ const fieldKey = row.key[2];
1169
+ if (typeof fieldKey !== "string") continue;
1170
+ if (fieldKey === "$tombstone") {
1171
+ docMeta.tombstone = Boolean(row.value);
1172
+ populated = true;
1173
+ continue;
1174
+ }
1175
+ const jsonValue = cloneJsonValue(row.value);
1176
+ if (jsonValue === void 0) continue;
1177
+ docMeta[fieldKey] = jsonValue;
1178
+ populated = true;
1179
+ }
1180
+ return populated ? docMeta : void 0;
1181
+ }
1182
+ readDocAssetsFromFlock(docId) {
1183
+ const rows = this.metaFlock.scan({ prefix: ["ld", docId] });
1184
+ const mapping = /* @__PURE__ */ new Map();
1185
+ for (const row of rows) {
1186
+ if (!Array.isArray(row.key) || row.key.length < 3) continue;
1187
+ const assetId = row.key[2];
1188
+ if (typeof assetId !== "string") continue;
1189
+ if (!(row.value !== void 0 && row.value !== null && row.value !== false)) continue;
1190
+ let metadata = this.assets.get(assetId)?.metadata;
1191
+ if (!metadata) {
1192
+ metadata = this.readAssetMetadataFromFlock(assetId);
1193
+ if (!metadata) continue;
1194
+ this.rememberAsset(metadata);
1195
+ }
1196
+ mapping.set(assetId, cloneRepoAssetMetadata(metadata));
1197
+ }
1198
+ return mapping;
1199
+ }
1200
+ readAssetMetadataFromFlock(assetId) {
1201
+ return assetMetaFromJson(this.metaFlock.get(["a", assetId]));
1202
+ }
1203
+ handleAssetRemoval(assetId, by) {
1204
+ const record = this.assets.get(assetId);
1205
+ if (!record) return;
1206
+ this.assets.delete(assetId);
1207
+ const deletedAt = this.orphanedAssets.get(assetId)?.deletedAt ?? Date.now();
1208
+ this.orphanedAssets.set(assetId, {
1209
+ metadata: record.metadata,
1210
+ deletedAt
1211
+ });
1212
+ const affectedDocs = [];
1213
+ for (const [docId, assets] of this.docAssets) if (assets.delete(assetId)) {
1214
+ if (assets.size === 0) this.docAssets.delete(docId);
1215
+ affectedDocs.push(docId);
1216
+ }
1217
+ for (const docId of affectedDocs) this.emit({
1218
+ kind: "asset-unlink",
1219
+ docId,
1220
+ assetId,
1221
+ by
1222
+ });
1223
+ }
1224
+ emit(event) {
1225
+ for (const entry of this.watchers) if (this.shouldNotify(entry.filter, event)) entry.listener(event);
1226
+ }
1227
+ async joinMetaRoom(params) {
1228
+ await this.ready();
1229
+ if (!this.transport) throw new Error("Transport adapter not configured");
1230
+ if (!this.transport.isConnected()) await this.transport.connect();
1231
+ if (this.metaRoomSubscription) return this.metaRoomSubscription;
1232
+ this.ensureMetaLiveMonitor();
1233
+ const subscription = this.transport.joinMetaRoom(this.metaFlock, params);
1234
+ const wrapped = {
1235
+ unsubscribe: () => {
1236
+ subscription.unsubscribe();
1237
+ if (this.metaRoomSubscription === wrapped) this.metaRoomSubscription = void 0;
1238
+ if (this.unsubscribeMetaFlock) {
1239
+ this.unsubscribeMetaFlock();
1240
+ this.unsubscribeMetaFlock = void 0;
1241
+ }
1242
+ },
1243
+ firstSyncedWithRemote: subscription.firstSyncedWithRemote,
1244
+ get connected() {
1245
+ return subscription.connected;
1246
+ }
1247
+ };
1248
+ this.metaRoomSubscription = wrapped;
1249
+ subscription.firstSyncedWithRemote.then(async () => {
1250
+ const by = this.resolveEventBy("live");
1251
+ this.hydrateMetadataFromFlock(by);
1252
+ await this.persistMeta();
1253
+ }).catch(logAsyncError("meta room first sync"));
1254
+ return wrapped;
1255
+ }
1256
+ async joinDocRoom(docId, params) {
1257
+ await this.ready();
1258
+ if (!this.transport) throw new Error("Transport adapter not configured");
1259
+ if (!this.transport.isConnected()) await this.transport.connect();
1260
+ const doc = await this.ensureDoc(docId);
1261
+ const subscription = this.transport.joinDocRoom(docId, doc, params);
1262
+ subscription.firstSyncedWithRemote.catch(logAsyncError(`doc ${docId} first sync`));
1263
+ return subscription;
1264
+ }
1265
+ async close() {
1266
+ for (const unsubscribe of this.docSubscriptions.values()) try {
1267
+ unsubscribe();
1268
+ } catch {}
1269
+ this.docSubscriptions.clear();
1270
+ this.metaRoomSubscription?.unsubscribe();
1271
+ this.metaRoomSubscription = void 0;
1272
+ if (this.unsubscribeMetaFlock) {
1273
+ this.unsubscribeMetaFlock();
1274
+ this.unsubscribeMetaFlock = void 0;
1275
+ }
1276
+ const pendingDocIds = Array.from(this.docFrontierUpdates.keys());
1277
+ for (const docId of pendingDocIds) try {
1278
+ await this.flushScheduledDocFrontierUpdate(docId);
1279
+ } catch (error) {
1280
+ logAsyncError(`doc ${docId} frontier flush on close`)(error);
1281
+ }
1282
+ this.docFrontierUpdates.clear();
1283
+ this.watchers.clear();
1284
+ this.docs.clear();
1285
+ this.docRefs.clear();
1286
+ this.metadata.clear();
1287
+ this.docAssets.clear();
1288
+ this.assets.clear();
1289
+ this.docFrontierKeys.clear();
1290
+ this.docPersistedVersions.clear();
1291
+ this.readyPromise = void 0;
1292
+ await this.transport?.close();
1293
+ }
1294
+ async upsertDocMeta(docId, patch, _options = {}) {
1295
+ await this.ready();
1296
+ const base = this.metadata.get(docId);
1297
+ const next = base ? cloneJsonObject(base) : {};
1298
+ const outPatch = {};
1299
+ let changed = false;
1300
+ const patchObject = patch;
1301
+ for (const key of Object.keys(patchObject)) {
1302
+ const rawValue = patchObject[key];
1303
+ if (rawValue === void 0) continue;
1304
+ let canonical;
1305
+ if (key === "tombstone") canonical = Boolean(rawValue);
1306
+ else canonical = cloneJsonValue(rawValue);
1307
+ if (canonical === void 0) continue;
1308
+ if (jsonEquals(base ? base[key] : void 0, canonical)) continue;
1309
+ const storageKey = key === "tombstone" ? "$tombstone" : key;
1310
+ this.metaFlock.put([
1311
+ "m",
1312
+ docId,
1313
+ storageKey
1314
+ ], canonical);
1315
+ const stored = cloneJsonValue(canonical) ?? canonical;
1316
+ next[key] = stored;
1317
+ outPatch[key] = cloneJsonValue(stored) ?? stored;
1318
+ changed = true;
1319
+ }
1320
+ if (!changed) {
1321
+ if (!this.metadata.has(docId)) this.metadata.set(docId, next);
1322
+ return;
1323
+ }
1324
+ this.metadata.set(docId, next);
1325
+ await this.persistMeta();
1326
+ this.emit({
1327
+ kind: "doc-metadata",
1328
+ docId,
1329
+ patch: cloneJsonObject(outPatch),
1330
+ by: "local"
1331
+ });
1332
+ }
1333
+ async getDocMeta(docId) {
1334
+ await this.ready();
1335
+ const metadata = this.metadata.get(docId);
1336
+ return metadata ? cloneJsonObject(metadata) : void 0;
1337
+ }
1338
+ async listDoc(query) {
1339
+ await this.ready();
1340
+ const entries = [];
1341
+ for (const [docId, metadata] of this.metadata.entries()) {
1342
+ if (!matchesQuery(docId, metadata, query)) continue;
1343
+ entries.push({
1344
+ docId,
1345
+ meta: cloneJsonObject(metadata)
1346
+ });
1347
+ }
1348
+ entries.sort((a, b) => a.docId < b.docId ? -1 : a.docId > b.docId ? 1 : 0);
1349
+ if (query?.limit !== void 0) return entries.slice(0, query.limit);
1350
+ return entries;
1351
+ }
1352
+ getMetaReplica() {
1353
+ return this.metaFlock;
1354
+ }
1355
+ watch(listener, filter = {}) {
1356
+ const entry = {
1357
+ listener,
1358
+ filter
1359
+ };
1360
+ this.watchers.add(entry);
1361
+ return { unsubscribe: () => {
1362
+ this.watchers.delete(entry);
1363
+ } };
1364
+ }
1365
+ /**
1366
+ * Opens the repo-managed collaborative document, registers it for persistence,
1367
+ * and schedules a doc-level sync so `whenSyncedWithRemote` resolves after remote backfills.
1368
+ */
1369
+ async openCollaborativeDoc(docId) {
1370
+ const doc = await this.ensureDoc(docId);
1371
+ const refs = this.docRefs.get(docId) ?? 0;
1372
+ this.docRefs.set(docId, refs + 1);
1373
+ return new RepoDocHandleImpl(docId, doc, this.whenDocInSyncWithRemote(docId), async (id, instance) => this.onDocHandleClose(id, instance));
1374
+ }
1375
+ /**
1376
+ * Opens a detached `LoroDoc` snapshot that never registers with the repo, meaning
1377
+ * it neither participates in remote subscriptions nor persists edits back to storage.
1378
+ */
1379
+ async openDetachedDoc(docId) {
1380
+ await this.ready();
1381
+ return new RepoDocHandleImpl(docId, await this.materializeDetachedDoc(docId), Promise.resolve(), async () => {});
1382
+ }
1383
+ async uploadAsset(params) {
1384
+ await this.ready();
1385
+ const bytes = await assetContentToUint8Array(params.content);
1386
+ const assetId = await computeSha256(bytes);
1387
+ if (params.assetId && params.assetId !== assetId) throw new Error("Provided assetId does not match content digest");
1388
+ const existing = this.assets.get(assetId);
1389
+ if (existing) {
1390
+ if (!existing.data) {
1391
+ const clone = bytes.slice();
1392
+ existing.data = clone;
1393
+ if (this.storage) await this.storage.save({
1394
+ type: "asset",
1395
+ assetId,
1396
+ data: clone.slice()
1397
+ });
1398
+ }
1399
+ let metadataMutated = false;
1400
+ const metadata$1 = { ...existing.metadata };
1401
+ if (params.mime && metadata$1.mime !== params.mime) {
1402
+ metadata$1.mime = params.mime;
1403
+ metadataMutated = true;
1404
+ }
1405
+ if (params.policy && metadata$1.policy !== params.policy) {
1406
+ metadata$1.policy = params.policy;
1407
+ metadataMutated = true;
1408
+ }
1409
+ if (params.tag && metadata$1.tag !== params.tag) {
1410
+ metadata$1.tag = params.tag;
1411
+ metadataMutated = true;
1412
+ }
1413
+ if (params.createdAt !== void 0 && metadata$1.createdAt !== params.createdAt) {
1414
+ metadata$1.createdAt = params.createdAt;
1415
+ metadataMutated = true;
1416
+ }
1417
+ if (metadataMutated) {
1418
+ existing.metadata = metadata$1;
1419
+ this.metaFlock.put(["a", assetId], assetMetaToJson(metadata$1));
1420
+ await this.persistMeta();
1421
+ this.emit({
1422
+ kind: "asset-metadata",
1423
+ asset: this.createAssetDownload(assetId, metadata$1, existing.data),
1424
+ by: "local"
1425
+ });
1426
+ }
1427
+ this.rememberAsset(existing.metadata);
1428
+ return assetId;
1429
+ }
1430
+ const metadata = {
1431
+ assetId,
1432
+ size: bytes.byteLength,
1433
+ createdAt: params.createdAt ?? Date.now(),
1434
+ ...params.mime ? { mime: params.mime } : {},
1435
+ ...params.policy ? { policy: params.policy } : {},
1436
+ ...params.tag ? { tag: params.tag } : {}
1437
+ };
1438
+ if (this.assetTransport) {
1439
+ let shouldUpload = true;
1440
+ if (typeof this.assetTransport.ensure === "function") shouldUpload = !await this.assetTransport.ensure(assetId);
1441
+ if (shouldUpload) await this.assetTransport.upload(assetId, bytes, {
1442
+ mime: params.mime,
1443
+ policy: params.policy,
1444
+ tag: params.tag
1445
+ });
1446
+ }
1447
+ const storedBytes = bytes.slice();
1448
+ if (this.storage) await this.storage.save({
1449
+ type: "asset",
1450
+ assetId,
1451
+ data: storedBytes.slice()
1452
+ });
1453
+ this.rememberAsset(metadata, storedBytes);
1454
+ for (const assets of this.docAssets.values()) if (assets.has(assetId)) assets.set(assetId, metadata);
1455
+ this.metaFlock.put(["a", assetId], assetMetaToJson(metadata));
1456
+ await this.persistMeta();
1457
+ this.emit({
1458
+ kind: "asset-metadata",
1459
+ asset: this.createAssetDownload(assetId, metadata, storedBytes),
1460
+ by: "local"
1461
+ });
1462
+ return assetId;
1463
+ }
1464
+ async whenDocInSyncWithRemote(docId) {
1465
+ await this.ready();
1466
+ await this.ensureDoc(docId);
1467
+ await this.sync({
1468
+ scope: "doc",
1469
+ docIds: [docId]
1470
+ });
1471
+ }
1472
+ async linkAsset(docId, params) {
1473
+ await this.ready();
1474
+ const bytes = await assetContentToUint8Array(params.content);
1475
+ const assetId = await computeSha256(bytes);
1476
+ if (params.assetId && params.assetId !== assetId) throw new Error("Provided assetId does not match content digest");
1477
+ let metadata;
1478
+ let storedBytes;
1479
+ let created = false;
1480
+ const existing = this.assets.get(assetId);
1481
+ if (existing) {
1482
+ metadata = existing.metadata;
1483
+ if (!existing.data) {
1484
+ const clone = bytes.slice();
1485
+ existing.data = clone;
1486
+ if (this.storage) await this.storage.save({
1487
+ type: "asset",
1488
+ assetId,
1489
+ data: clone.slice()
1490
+ });
1491
+ }
1492
+ let nextMetadata = metadata;
1493
+ let metadataMutated = false;
1494
+ if (params.mime && params.mime !== nextMetadata.mime) {
1495
+ nextMetadata = {
1496
+ ...nextMetadata,
1497
+ mime: params.mime
1498
+ };
1499
+ metadataMutated = true;
1500
+ }
1501
+ if (params.policy && params.policy !== nextMetadata.policy) {
1502
+ nextMetadata = {
1503
+ ...nextMetadata,
1504
+ policy: params.policy
1505
+ };
1506
+ metadataMutated = true;
1507
+ }
1508
+ if (params.tag && params.tag !== nextMetadata.tag) {
1509
+ nextMetadata = {
1510
+ ...nextMetadata,
1511
+ tag: params.tag
1512
+ };
1513
+ metadataMutated = true;
1514
+ }
1515
+ if (params.createdAt !== void 0 && params.createdAt !== nextMetadata.createdAt) {
1516
+ nextMetadata = {
1517
+ ...nextMetadata,
1518
+ createdAt: params.createdAt
1519
+ };
1520
+ metadataMutated = true;
1521
+ }
1522
+ if (metadataMutated) {
1523
+ existing.metadata = nextMetadata;
1524
+ metadata = nextMetadata;
1525
+ this.metaFlock.put(["a", assetId], assetMetaToJson(metadata));
1526
+ await this.persistMeta();
1527
+ this.emit({
1528
+ kind: "asset-metadata",
1529
+ asset: this.createAssetDownload(assetId, metadata, existing.data),
1530
+ by: "local"
1531
+ });
1532
+ } else metadata = existing.metadata;
1533
+ storedBytes = existing.data;
1534
+ this.rememberAsset(metadata);
1535
+ } else {
1536
+ metadata = {
1537
+ assetId,
1538
+ size: bytes.byteLength,
1539
+ createdAt: params.createdAt ?? Date.now(),
1540
+ ...params.mime ? { mime: params.mime } : {},
1541
+ ...params.policy ? { policy: params.policy } : {},
1542
+ ...params.tag ? { tag: params.tag } : {}
1543
+ };
1544
+ if (this.assetTransport) {
1545
+ let shouldUpload = true;
1546
+ if (typeof this.assetTransport.ensure === "function") shouldUpload = !await this.assetTransport.ensure(assetId);
1547
+ if (shouldUpload) await this.assetTransport.upload(assetId, bytes, {
1548
+ mime: params.mime,
1549
+ policy: params.policy,
1550
+ tag: params.tag
1551
+ });
1552
+ }
1553
+ storedBytes = bytes.slice();
1554
+ if (this.storage) await this.storage.save({
1555
+ type: "asset",
1556
+ assetId,
1557
+ data: storedBytes.slice()
1558
+ });
1559
+ this.rememberAsset(metadata, storedBytes);
1560
+ for (const assets of this.docAssets.values()) if (assets.has(assetId)) assets.set(assetId, metadata);
1561
+ this.metaFlock.put(["a", assetId], assetMetaToJson(metadata));
1562
+ created = true;
1563
+ }
1564
+ const mapping = this.docAssets.get(docId) ?? /* @__PURE__ */ new Map();
1565
+ mapping.set(assetId, metadata);
1566
+ this.docAssets.set(docId, mapping);
1567
+ const refs = this.assetToDocRefs.get(assetId) ?? /* @__PURE__ */ new Set();
1568
+ refs.add(docId);
1569
+ this.assetToDocRefs.set(assetId, refs);
1570
+ this.metaFlock.put([
1571
+ "ld",
1572
+ docId,
1573
+ assetId
1574
+ ], true);
1575
+ await this.persistMeta();
1576
+ this.emit({
1577
+ kind: "asset-link",
1578
+ docId,
1579
+ assetId,
1580
+ by: "local"
1581
+ });
1582
+ if (created) this.emit({
1583
+ kind: "asset-metadata",
1584
+ asset: this.createAssetDownload(assetId, metadata, storedBytes ?? bytes),
1585
+ by: "local"
1586
+ });
1587
+ return assetId;
1588
+ }
1589
+ async fetchAsset(assetId) {
1590
+ await this.ready();
1591
+ const { metadata, bytes } = await this.materializeAsset(assetId);
1592
+ return this.createAssetDownload(assetId, metadata, bytes);
1593
+ }
1594
+ async unlinkAsset(docId, assetId) {
1595
+ await this.ready();
1596
+ const mapping = this.docAssets.get(docId);
1597
+ if (!mapping || !mapping.has(assetId)) return;
1598
+ mapping.delete(assetId);
1599
+ if (mapping.size === 0) this.docAssets.delete(docId);
1600
+ this.metaFlock.delete([
1601
+ "ld",
1602
+ docId,
1603
+ assetId
1604
+ ]);
1605
+ const refs = this.assetToDocRefs.get(assetId);
1606
+ if (refs) {
1607
+ refs.delete(docId);
1608
+ if (refs.size === 0) {
1609
+ this.assetToDocRefs.delete(assetId);
1610
+ const record = this.assets.get(assetId);
1611
+ if (record) this.orphanedAssets.set(assetId, {
1612
+ metadata: record.metadata,
1613
+ deletedAt: Date.now()
1614
+ });
1615
+ this.metaFlock.delete(["a", assetId]);
1616
+ this.assets.delete(assetId);
1617
+ }
1618
+ }
1619
+ await this.persistMeta();
1620
+ this.emit({
1621
+ kind: "asset-unlink",
1622
+ docId,
1623
+ assetId,
1624
+ by: "local"
1625
+ });
1626
+ }
1627
+ async listAssets(docId) {
1628
+ await this.ready();
1629
+ const mapping = this.docAssets.get(docId);
1630
+ if (!mapping) return [];
1631
+ return Array.from(mapping.values()).map((asset) => ({ ...asset }));
1632
+ }
1633
+ async ensureAsset(assetId) {
1634
+ return this.fetchAsset(assetId);
1635
+ }
1636
+ createAssetDownload(assetId, metadata, initialBytes) {
1637
+ let cached = initialBytes ? initialBytes.slice() : void 0;
1638
+ return {
1639
+ assetId,
1640
+ size: metadata.size,
1641
+ createdAt: metadata.createdAt,
1642
+ mime: metadata.mime,
1643
+ policy: metadata.policy,
1644
+ tag: metadata.tag,
1645
+ content: async () => {
1646
+ if (!cached) cached = (await this.materializeAsset(assetId)).bytes.slice();
1647
+ return toReadableStream(cached.slice());
1648
+ }
1649
+ };
1650
+ }
1651
+ async materializeAsset(assetId) {
1652
+ let record = this.assets.get(assetId);
1653
+ if (record?.data) return {
1654
+ metadata: record.metadata,
1655
+ bytes: record.data.slice()
1656
+ };
1657
+ if (record && this.storage) {
1658
+ const stored = await this.storage.loadAsset(assetId);
1659
+ if (stored) {
1660
+ const clone = stored.slice();
1661
+ record.data = clone.slice();
1662
+ return {
1663
+ metadata: record.metadata,
1664
+ bytes: clone
1665
+ };
1666
+ }
1667
+ }
1668
+ if (!record && this.storage) {
1669
+ const stored = await this.storage.loadAsset(assetId);
1670
+ if (stored) {
1671
+ const metadata$1 = this.getAssetMetadata(assetId);
1672
+ if (!metadata$1) throw new Error(`Missing metadata for asset ${assetId}`);
1673
+ const clone = stored.slice();
1674
+ this.assets.set(assetId, {
1675
+ metadata: metadata$1,
1676
+ data: clone.slice()
1677
+ });
1678
+ this.updateDocAssetMetadata(assetId, metadata$1);
1679
+ return {
1680
+ metadata: metadata$1,
1681
+ bytes: clone
1682
+ };
1683
+ }
1684
+ }
1685
+ if (!this.assetTransport) throw new Error(`Asset ${assetId} is not available locally`);
1686
+ const remote = await this.assetTransport.fetch(assetId);
1687
+ if (!remote) throw new Error(`Asset ${assetId} missing from remote store`);
1688
+ const remoteBytes = await streamToUint8Array(await remote.content());
1689
+ const metadata = {
1690
+ assetId,
1691
+ size: remote.size,
1692
+ createdAt: remote.createdAt,
1693
+ ...remote.mime ? { mime: remote.mime } : {},
1694
+ ...remote.policy ? { policy: remote.policy } : {},
1695
+ ...remote.tag ? { tag: remote.tag } : {}
1696
+ };
1697
+ this.assets.set(assetId, {
1698
+ metadata,
1699
+ data: remoteBytes.slice()
1700
+ });
1701
+ this.updateDocAssetMetadata(assetId, metadata);
1702
+ this.metaFlock.put(["a", assetId], assetMetaToJson(metadata));
1703
+ await this.persistMeta();
1704
+ if (this.storage) await this.storage.save({
1705
+ type: "asset",
1706
+ assetId,
1707
+ data: remoteBytes.slice()
1708
+ });
1709
+ return {
1710
+ metadata,
1711
+ bytes: remoteBytes
1712
+ };
1713
+ }
1714
+ updateDocAssetMetadata(assetId, metadata) {
1715
+ for (const assets of this.docAssets.values()) if (assets.has(assetId)) assets.set(assetId, metadata);
1716
+ }
1717
+ async gcAssets(options = {}) {
1718
+ await this.ready();
1719
+ const { minKeepMs = 0 } = options;
1720
+ const now = Date.now();
1721
+ let removed = 0;
1722
+ for (const [assetId, orphan] of Array.from(this.orphanedAssets.entries())) {
1723
+ if (now - orphan.deletedAt < minKeepMs) continue;
1724
+ this.orphanedAssets.delete(assetId);
1725
+ if (this.storage?.deleteAsset) try {
1726
+ await this.storage.deleteAsset(assetId);
1727
+ } catch (error) {
1728
+ logAsyncError(`asset ${assetId} delete`)(error);
1729
+ }
1730
+ removed += 1;
1731
+ }
1732
+ return removed;
1733
+ }
1734
+ async onDocHandleClose(docId, doc) {
1735
+ const refs = this.docRefs.get(docId) ?? 0;
1736
+ if (refs <= 1) this.docRefs.delete(docId);
1737
+ else this.docRefs.set(docId, refs - 1);
1738
+ await this.persistDocUpdate(docId, doc);
1739
+ if (!await this.flushScheduledDocFrontierUpdate(docId)) await this.updateDocFrontiers(docId, doc, "local");
1740
+ }
1741
+ async ensureDoc(docId) {
1742
+ await this.ready();
1743
+ const cached = this.docs.get(docId);
1744
+ if (cached) {
1745
+ this.ensureDocSubscription(docId, cached);
1746
+ if (!this.docPersistedVersions.has(docId)) this.docPersistedVersions.set(docId, cached.version());
1747
+ return cached;
1748
+ }
1749
+ if (this.storage) {
1750
+ const stored = await this.storage.loadDoc(docId);
1751
+ if (stored) {
1752
+ this.registerDoc(docId, stored);
1753
+ return stored;
1754
+ }
1755
+ }
1756
+ const created = await this.docFactory(docId);
1757
+ this.registerDoc(docId, created);
1758
+ return created;
1759
+ }
1760
+ async materializeDetachedDoc(docId) {
1761
+ const doc = await this.docFactory(docId);
1762
+ const snapshot = await this.exportDocSnapshot(docId);
1763
+ if (snapshot) doc.import(snapshot);
1764
+ return doc;
1765
+ }
1766
+ async exportDocSnapshot(docId) {
1767
+ const cached = this.docs.get(docId);
1768
+ if (cached) return cached.export({ mode: "snapshot" });
1769
+ if (!this.storage) return;
1770
+ return (await this.storage.loadDoc(docId))?.export({ mode: "snapshot" });
1771
+ }
1772
+ async persistMeta() {
1773
+ if (!this.storage) return;
1774
+ const bundle = this.metaFlock.exportJson();
1775
+ const encoded = textEncoder.encode(JSON.stringify(bundle));
1776
+ await this.storage.save({
1777
+ type: "meta",
1778
+ update: encoded
1779
+ });
1780
+ }
1781
+ async persistDoc(docId, doc) {
1782
+ const previousVersion = this.docPersistedVersions.get(docId);
1783
+ const nextVersion = doc.version();
1784
+ if (!this.storage) {
1785
+ this.docPersistedVersions.set(docId, nextVersion);
1786
+ return;
1787
+ }
1788
+ const snapshot = doc.export({ mode: "snapshot" });
1789
+ this.docPersistedVersions.set(docId, nextVersion);
1790
+ try {
1791
+ await this.storage.save({
1792
+ type: "doc-snapshot",
1793
+ docId,
1794
+ snapshot
1795
+ });
1796
+ } catch (error) {
1797
+ if (previousVersion) this.docPersistedVersions.set(docId, previousVersion);
1798
+ else this.docPersistedVersions.delete(docId);
1799
+ throw error;
1800
+ }
1801
+ }
1802
+ async persistDocUpdate(docId, doc) {
1803
+ const previousVersion = this.docPersistedVersions.get(docId);
1804
+ const nextVersion = doc.version();
1805
+ if (!this.storage) {
1806
+ this.docPersistedVersions.set(docId, nextVersion);
1807
+ return;
1808
+ }
1809
+ if (!previousVersion) {
1810
+ await this.persistDoc(docId, doc);
1811
+ this.docPersistedVersions.set(docId, nextVersion);
1812
+ return;
1813
+ }
1814
+ const update = doc.export({
1815
+ mode: "update",
1816
+ from: previousVersion
1817
+ });
1818
+ if (!update.length) {
1819
+ this.docPersistedVersions.set(docId, nextVersion);
1820
+ return;
1821
+ }
1822
+ this.docPersistedVersions.set(docId, nextVersion);
1823
+ try {
1824
+ await this.storage.save({
1825
+ type: "doc-update",
1826
+ docId,
1827
+ update
1828
+ });
1829
+ } catch (error) {
1830
+ this.docPersistedVersions.set(docId, previousVersion);
1831
+ throw error;
1832
+ }
1833
+ }
1834
+ pushEventBy(by) {
1835
+ this.eventByStack.push(by);
1836
+ }
1837
+ popEventBy() {
1838
+ this.eventByStack.pop();
1839
+ }
1840
+ resolveEventBy(defaultBy) {
1841
+ const index = this.eventByStack.length - 1;
1842
+ return index >= 0 ? this.eventByStack[index] : defaultBy;
1843
+ }
1844
+ ensureMetaLiveMonitor() {
1845
+ if (this.unsubscribeMetaFlock) return;
1846
+ this.unsubscribeMetaFlock = this.metaFlock.subscribe((batch) => {
1847
+ if (batch.source === "local") return;
1848
+ const by = this.resolveEventBy("live");
1849
+ (async () => {
1850
+ this.applyMetaFlockEvents(batch.events, by);
1851
+ await this.persistMeta();
1852
+ })().catch(logAsyncError("meta live monitor sync"));
1853
+ });
1854
+ }
1855
+ applyMetaFlockEvents(events, by) {
1856
+ if (!events.length) return;
1857
+ const docMetadataIds = /* @__PURE__ */ new Set();
1858
+ const docAssetIds = /* @__PURE__ */ new Set();
1859
+ const docFrontiersIds = /* @__PURE__ */ new Set();
1860
+ const assetIds = /* @__PURE__ */ new Set();
1861
+ for (const event of events) {
1862
+ const key = event.key;
1863
+ if (!Array.isArray(key) || key.length === 0) continue;
1864
+ const root = key[0];
1865
+ if (root === "m") {
1866
+ const docId = key[1];
1867
+ if (typeof docId === "string") docMetadataIds.add(docId);
1868
+ } else if (root === "a") {
1869
+ const assetId = key[1];
1870
+ if (typeof assetId === "string") assetIds.add(assetId);
1871
+ } else if (root === "ld") {
1872
+ const docId = key[1];
1873
+ const assetId = key[2];
1874
+ if (typeof docId === "string") docAssetIds.add(docId);
1875
+ if (typeof assetId === "string") assetIds.add(assetId);
1876
+ } else if (root === "f") {
1877
+ const docId = key[1];
1878
+ if (typeof docId === "string") docFrontiersIds.add(docId);
1879
+ }
1880
+ }
1881
+ for (const assetId of assetIds) this.refreshAssetMetadataEntry(assetId, by);
1882
+ for (const docId of docMetadataIds) this.refreshDocMetadataEntry(docId, by);
1883
+ for (const docId of docAssetIds) this.refreshDocAssetsEntry(docId, by);
1884
+ for (const docId of docFrontiersIds) this.refreshDocFrontierKeys(docId);
1885
+ }
1886
+ registerDoc(docId, doc) {
1887
+ this.docs.set(docId, doc);
1888
+ this.docPersistedVersions.set(docId, doc.version());
1889
+ this.ensureDocSubscription(docId, doc);
1890
+ }
1891
+ ensureDocSubscription(docId, doc) {
1892
+ if (this.docSubscriptions.has(docId)) return;
1893
+ const unsubscribe = doc.subscribe((batch) => {
1894
+ const stackBy = this.resolveEventBy("local");
1895
+ const by = stackBy === "local" && batch.by === "import" ? "live" : stackBy;
1896
+ this.onDocEvent(docId, doc, batch, by);
1897
+ });
1898
+ if (typeof unsubscribe === "function") this.docSubscriptions.set(docId, unsubscribe);
1899
+ }
1900
+ rememberAsset(metadata, bytes) {
1901
+ const data = bytes ? bytes.slice() : this.assets.get(metadata.assetId)?.data;
1902
+ this.assets.set(metadata.assetId, {
1903
+ metadata,
1904
+ data
1905
+ });
1906
+ this.orphanedAssets.delete(metadata.assetId);
1907
+ }
1908
+ scheduleDocFrontierUpdate(docId, doc, by) {
1909
+ const existing = this.docFrontierUpdates.get(docId);
1910
+ const effectiveBy = existing ? this.mergeRepoEventBy(existing.by, by) : by;
1911
+ if (existing) clearTimeout(existing.timeout);
1912
+ const delay = this.docFrontierDebounceMs > 0 ? this.docFrontierDebounceMs : 0;
1913
+ const timeout = setTimeout(() => this.runScheduledDocFrontierUpdate(docId), delay);
1914
+ this.docFrontierUpdates.set(docId, {
1915
+ timeout,
1916
+ doc,
1917
+ by: effectiveBy
1918
+ });
1919
+ }
1920
+ mergeRepoEventBy(current, next) {
1921
+ if (current === next) return current;
1922
+ if (current === "live" || next === "live") return "live";
1923
+ if (current === "sync" || next === "sync") return "sync";
1924
+ return "local";
1925
+ }
1926
+ runScheduledDocFrontierUpdate(docId) {
1927
+ const pending = this.docFrontierUpdates.get(docId);
1928
+ if (!pending) return;
1929
+ this.docFrontierUpdates.delete(docId);
1930
+ this.pushEventBy(pending.by);
1931
+ (async () => {
1932
+ try {
1933
+ await this.updateDocFrontiers(docId, pending.doc, pending.by);
1934
+ } finally {
1935
+ this.popEventBy();
1936
+ }
1937
+ })().catch(logAsyncError(`doc ${docId} frontier debounce`));
1938
+ }
1939
+ async flushScheduledDocFrontierUpdate(docId) {
1940
+ const pending = this.docFrontierUpdates.get(docId);
1941
+ if (!pending) return false;
1942
+ clearTimeout(pending.timeout);
1943
+ this.docFrontierUpdates.delete(docId);
1944
+ this.pushEventBy(pending.by);
1945
+ try {
1946
+ await this.updateDocFrontiers(docId, pending.doc, pending.by);
1947
+ } finally {
1948
+ this.popEventBy();
1949
+ }
1950
+ return true;
1951
+ }
1952
+ onDocEvent(docId, doc, _batch, by) {
1953
+ (async () => {
1954
+ const a = this.persistDocUpdate(docId, doc);
1955
+ if (by === "local") {
1956
+ this.scheduleDocFrontierUpdate(docId, doc, by);
1957
+ await a;
1958
+ return;
1959
+ }
1960
+ const b = this.flushScheduledDocFrontierUpdate(docId);
1961
+ const c = this.updateDocFrontiers(docId, doc, by);
1962
+ await Promise.all([
1963
+ a,
1964
+ b,
1965
+ c
1966
+ ]);
1967
+ })().catch(logAsyncError(`doc ${docId} event processing`));
1968
+ }
1969
+ getAssetMetadata(assetId) {
1970
+ const record = this.assets.get(assetId);
1971
+ if (record) return record.metadata;
1972
+ for (const assets of this.docAssets.values()) {
1973
+ const metadata = assets.get(assetId);
1974
+ if (metadata) return metadata;
1975
+ }
1976
+ }
1977
+ async updateDocFrontiers(docId, doc, defaultBy = "local") {
1978
+ const { json, key } = canonicalizeVersionVector(computeVersionVector(doc));
1979
+ const existingKeys = this.docFrontierKeys.get(docId) ?? /* @__PURE__ */ new Set();
1980
+ let mutated = false;
1981
+ if (existingKeys.size !== 1 || !existingKeys.has(key)) {
1982
+ for (const entry of existingKeys) this.metaFlock.delete([
1983
+ "f",
1984
+ docId,
1985
+ entry
1986
+ ]);
1987
+ this.metaFlock.put([
1988
+ "f",
1989
+ docId,
1990
+ key
1991
+ ], json);
1992
+ this.docFrontierKeys.set(docId, new Set([key]));
1993
+ mutated = true;
1994
+ }
1995
+ if (mutated) await this.persistMeta();
1996
+ const by = this.resolveEventBy(defaultBy);
1997
+ const frontiers = getDocFrontiers(doc);
1998
+ this.emit({
1999
+ kind: "doc-frontiers",
2000
+ docId,
2001
+ frontiers,
2002
+ by
2003
+ });
2004
+ }
2005
+ hydrateMetadataFromFlock(by) {
2006
+ const prevMetadata = new Map(this.metadata);
2007
+ const prevDocAssets = new Map(this.docAssets);
2008
+ const prevAssets = new Map(this.assets);
2009
+ const nextMetadata = /* @__PURE__ */ new Map();
2010
+ const metadataRows = this.metaFlock.scan({ prefix: ["m"] });
2011
+ for (const row of metadataRows) {
2012
+ if (!Array.isArray(row.key) || row.key.length < 2) continue;
2013
+ const docId = row.key[1];
2014
+ if (typeof docId !== "string") continue;
2015
+ let docMeta = nextMetadata.get(docId);
2016
+ if (!docMeta) {
2017
+ docMeta = {};
2018
+ nextMetadata.set(docId, docMeta);
2019
+ }
2020
+ if (row.key.length === 2) {
2021
+ const obj = asJsonObject(row.value);
2022
+ if (!obj) continue;
2023
+ for (const [field, value] of Object.entries(obj)) {
2024
+ const cloned = cloneJsonValue(value);
2025
+ if (cloned !== void 0) docMeta[field] = cloned;
2026
+ }
2027
+ continue;
2028
+ }
2029
+ const fieldKey = row.key[2];
2030
+ if (typeof fieldKey !== "string") continue;
2031
+ if (fieldKey === "$tombstone") {
2032
+ docMeta.tombstone = Boolean(row.value);
2033
+ continue;
2034
+ }
2035
+ const jsonValue = cloneJsonValue(row.value);
2036
+ if (jsonValue === void 0) continue;
2037
+ docMeta[fieldKey] = jsonValue;
2038
+ }
2039
+ const nextAssets = /* @__PURE__ */ new Map();
2040
+ const assetRows = this.metaFlock.scan({ prefix: ["a"] });
2041
+ for (const row of assetRows) {
2042
+ if (!Array.isArray(row.key) || row.key.length < 2) continue;
2043
+ const assetId = row.key[1];
2044
+ if (typeof assetId !== "string") continue;
2045
+ const metadata = assetMetaFromJson(row.value);
2046
+ if (!metadata) continue;
2047
+ const existing = this.assets.get(assetId);
2048
+ nextAssets.set(assetId, {
2049
+ metadata,
2050
+ data: existing?.data
2051
+ });
2052
+ }
2053
+ const nextDocAssets = /* @__PURE__ */ new Map();
2054
+ const linkRows = this.metaFlock.scan({ prefix: ["ld"] });
2055
+ for (const row of linkRows) {
2056
+ if (!Array.isArray(row.key) || row.key.length < 3) continue;
2057
+ const docId = row.key[1];
2058
+ const assetId = row.key[2];
2059
+ if (typeof docId !== "string" || typeof assetId !== "string") continue;
2060
+ const metadata = nextAssets.get(assetId)?.metadata;
2061
+ if (!metadata) continue;
2062
+ const mapping = nextDocAssets.get(docId) ?? /* @__PURE__ */ new Map();
2063
+ mapping.set(assetId, metadata);
2064
+ nextDocAssets.set(docId, mapping);
2065
+ }
2066
+ const nextFrontierKeys = /* @__PURE__ */ new Map();
2067
+ const frontierRows = this.metaFlock.scan({ prefix: ["f"] });
2068
+ for (const row of frontierRows) {
2069
+ if (!Array.isArray(row.key) || row.key.length < 3) continue;
2070
+ const docId = row.key[1];
2071
+ const frontierKey = row.key[2];
2072
+ if (typeof docId !== "string" || typeof frontierKey !== "string") continue;
2073
+ const set = nextFrontierKeys.get(docId) ?? /* @__PURE__ */ new Set();
2074
+ set.add(frontierKey);
2075
+ nextFrontierKeys.set(docId, set);
2076
+ }
2077
+ const removedAssets = [];
2078
+ for (const [assetId, record] of prevAssets) if (!nextAssets.has(assetId)) removedAssets.push([assetId, record]);
2079
+ if (removedAssets.length > 0) {
2080
+ const now = Date.now();
2081
+ for (const [assetId, record] of removedAssets) {
2082
+ const deletedAt = this.orphanedAssets.get(assetId)?.deletedAt ?? now;
2083
+ this.orphanedAssets.set(assetId, {
2084
+ metadata: record.metadata,
2085
+ deletedAt
2086
+ });
2087
+ }
2088
+ }
2089
+ this.metadata.clear();
2090
+ for (const [docId, meta] of nextMetadata) this.metadata.set(docId, meta);
2091
+ this.docAssets.clear();
2092
+ for (const [docId, assets] of nextDocAssets) this.docAssets.set(docId, assets);
2093
+ this.assetToDocRefs.clear();
2094
+ for (const [docId, assets] of nextDocAssets) for (const assetId of assets.keys()) {
2095
+ const refs = this.assetToDocRefs.get(assetId) ?? /* @__PURE__ */ new Set();
2096
+ refs.add(docId);
2097
+ this.assetToDocRefs.set(assetId, refs);
2098
+ }
2099
+ this.assets.clear();
2100
+ for (const record of nextAssets.values()) this.rememberAsset(record.metadata, record.data);
2101
+ this.docFrontierKeys.clear();
2102
+ for (const [docId, keys] of nextFrontierKeys) this.docFrontierKeys.set(docId, keys);
2103
+ const docIds = new Set([...prevMetadata.keys(), ...nextMetadata.keys()]);
2104
+ for (const docId of docIds) {
2105
+ const previous = prevMetadata.get(docId);
2106
+ const current = nextMetadata.get(docId);
2107
+ if (!current) {
2108
+ if (previous) this.emit({
2109
+ kind: "doc-metadata",
2110
+ docId,
2111
+ patch: {},
2112
+ by
2113
+ });
2114
+ continue;
2115
+ }
2116
+ const patch = diffJsonObjects(previous, current);
2117
+ if (Object.keys(patch).length > 0) this.emit({
2118
+ kind: "doc-metadata",
2119
+ docId,
2120
+ patch,
2121
+ by
2122
+ });
2123
+ }
2124
+ for (const [assetId, record] of nextAssets) {
2125
+ const previous = prevAssets.get(assetId)?.metadata;
2126
+ if (!assetMetadataEqual(previous, record.metadata)) this.emit({
2127
+ kind: "asset-metadata",
2128
+ asset: this.createAssetDownload(assetId, record.metadata, record.data),
2129
+ by
2130
+ });
2131
+ }
2132
+ for (const [docId, assets] of nextDocAssets) {
2133
+ const previous = prevDocAssets.get(docId);
2134
+ for (const assetId of assets.keys()) if (!previous || !previous.has(assetId)) this.emit({
2135
+ kind: "asset-link",
2136
+ docId,
2137
+ assetId,
2138
+ by
2139
+ });
2140
+ }
2141
+ for (const [docId, assets] of prevDocAssets) {
2142
+ const current = nextDocAssets.get(docId);
2143
+ for (const assetId of assets.keys()) if (!current || !current.has(assetId)) this.emit({
2144
+ kind: "asset-unlink",
2145
+ docId,
2146
+ assetId,
2147
+ by
2148
+ });
2149
+ }
2150
+ }
2151
+ shouldNotify(filter, event) {
2152
+ if (!filter.docIds && !filter.kinds && !filter.metadataFields && !filter.by) return true;
2153
+ if (filter.kinds && !filter.kinds.includes(event.kind)) return false;
2154
+ if (filter.by && !filter.by.includes(event.by)) return false;
2155
+ const docId = (() => {
2156
+ if (event.kind === "doc-metadata" || event.kind === "doc-frontiers") return event.docId;
2157
+ if (event.kind === "asset-link" || event.kind === "asset-unlink") return event.docId;
2158
+ })();
2159
+ if (filter.docIds && docId && !filter.docIds.includes(docId)) return false;
2160
+ if (filter.docIds && !docId) return false;
2161
+ if (filter.metadataFields && event.kind === "doc-metadata") {
2162
+ if (!Object.keys(event.patch).some((key) => filter.metadataFields?.includes(key))) return false;
2163
+ }
2164
+ return true;
2165
+ }
2166
+ };
2167
+
2168
+ //#endregion
2169
+ exports.BroadcastChannelTransportAdapter = BroadcastChannelTransportAdapter;
2170
+ exports.IndexedDBStorageAdaptor = IndexedDBStorageAdaptor;
2171
+ exports.LoroRepo = LoroRepo;
2172
+ exports.WebSocketTransportAdapter = WebSocketTransportAdapter;
2173
+ //# sourceMappingURL=index.cjs.map