loro-repo 0.1.0 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,16 +1,58 @@
1
1
  import { Flock } from "@loro-dev/flock";
2
- import { LoroDoc } from "loro-crdt";
3
2
  import { FlockAdaptor, LoroAdaptor } from "loro-adaptors";
4
3
  import { CrdtType, bytesToHex } from "loro-protocol";
5
4
  import { LoroWebsocketClient } from "loro-websocket";
5
+ import { LoroDoc } from "loro-crdt";
6
+ import { promises } from "node:fs";
7
+ import * as path from "node:path";
8
+ import { randomUUID } from "node:crypto";
6
9
 
7
10
  //#region src/loro-adaptor.ts
8
11
  function createRepoFlockAdaptorFromDoc(flock, config = {}) {
9
12
  return new FlockAdaptor(flock, config);
10
13
  }
11
14
 
15
+ //#endregion
16
+ //#region src/internal/debug.ts
17
+ const getEnv = () => {
18
+ if (typeof globalThis !== "object" || globalThis === null) return;
19
+ return globalThis.process?.env;
20
+ };
21
+ const rawNamespaceConfig = (getEnv()?.LORO_REPO_DEBUG ?? "").trim();
22
+ const normalizedNamespaces = rawNamespaceConfig.length > 0 ? rawNamespaceConfig.split(/[\s,]+/).map((token) => token.toLowerCase()).filter(Boolean) : [];
23
+ const wildcardTokens = new Set([
24
+ "*",
25
+ "1",
26
+ "true",
27
+ "all"
28
+ ]);
29
+ const namespaceSet = new Set(normalizedNamespaces);
30
+ const hasWildcard = namespaceSet.size > 0 && normalizedNamespaces.some((token) => wildcardTokens.has(token));
31
+ const isDebugEnabled = (namespace) => {
32
+ if (!namespaceSet.size) return false;
33
+ if (!namespace) return hasWildcard;
34
+ const normalized = namespace.toLowerCase();
35
+ if (hasWildcard) return true;
36
+ if (namespaceSet.has(normalized)) return true;
37
+ const [root] = normalized.split(":");
38
+ return namespaceSet.has(root);
39
+ };
40
+ const createDebugLogger = (namespace) => {
41
+ const normalized = namespace.toLowerCase();
42
+ return (...args) => {
43
+ if (!isDebugEnabled(normalized)) return;
44
+ const prefix = `[loro-repo:${namespace}]`;
45
+ if (args.length === 0) {
46
+ console.info(prefix);
47
+ return;
48
+ }
49
+ console.info(prefix, ...args);
50
+ };
51
+ };
52
+
12
53
  //#endregion
13
54
  //#region src/transport/websocket.ts
55
+ const debug = createDebugLogger("transport:websocket");
14
56
  function withTimeout(promise, timeoutMs) {
15
57
  if (!timeoutMs || timeoutMs <= 0) return promise;
16
58
  return new Promise((resolve, reject) => {
@@ -55,30 +97,51 @@ var WebSocketTransportAdapter = class {
55
97
  }
56
98
  async connect(_options) {
57
99
  const client = this.ensureClient();
58
- await client.connect();
59
- await client.waitConnected();
100
+ debug("connect requested", { status: client.getStatus() });
101
+ try {
102
+ await client.connect();
103
+ debug("client.connect resolved");
104
+ await client.waitConnected();
105
+ debug("client.waitConnected resolved", { status: client.getStatus() });
106
+ } catch (error) {
107
+ debug("connect failed", error);
108
+ throw error;
109
+ }
60
110
  }
61
111
  async close() {
112
+ debug("close requested", {
113
+ docSessions: this.docSessions.size,
114
+ metadataSession: Boolean(this.metadataSession)
115
+ });
62
116
  for (const [docId] of this.docSessions) await this.leaveDocSession(docId).catch(() => {});
63
117
  this.docSessions.clear();
64
118
  await this.teardownMetadataSession().catch(() => {});
65
119
  if (this.client) {
66
- this.client.destroy();
120
+ const client = this.client;
67
121
  this.client = void 0;
122
+ client.destroy();
123
+ debug("websocket client destroyed");
68
124
  }
125
+ debug("close completed");
69
126
  }
70
127
  isConnected() {
71
128
  return this.client?.getStatus() === "connected";
72
129
  }
73
130
  async syncMeta(flock, options) {
74
- if (!this.options.metadataRoomId) return { ok: true };
131
+ if (!this.options.metadataRoomId) {
132
+ debug("syncMeta skipped; metadata room not configured");
133
+ return { ok: true };
134
+ }
135
+ debug("syncMeta requested", { roomId: this.options.metadataRoomId });
75
136
  try {
76
137
  await withTimeout((await this.ensureMetadataSession(flock, {
77
138
  roomId: this.options.metadataRoomId,
78
139
  auth: this.options.metadataAuth
79
140
  })).firstSynced, options?.timeout);
141
+ debug("syncMeta completed", { roomId: this.options.metadataRoomId });
80
142
  return { ok: true };
81
- } catch {
143
+ } catch (error) {
144
+ debug("syncMeta failed", error);
82
145
  return { ok: false };
83
146
  }
84
147
  }
@@ -87,6 +150,10 @@ var WebSocketTransportAdapter = class {
87
150
  const roomId = normalizeRoomId(params?.roomId, fallback);
88
151
  if (!roomId) throw new Error("Metadata room id not configured");
89
152
  const auth = params?.auth ?? this.options.metadataAuth;
153
+ debug("joinMetaRoom requested", {
154
+ roomId,
155
+ hasAuth: Boolean(auth && auth.length)
156
+ });
90
157
  const ensure = this.ensureMetadataSession(flock, {
91
158
  roomId,
92
159
  auth
@@ -97,7 +164,14 @@ var WebSocketTransportAdapter = class {
97
164
  unsubscribe: () => {
98
165
  ensure.then((session) => {
99
166
  session.refCount = Math.max(0, session.refCount - 1);
100
- if (session.refCount === 0) this.teardownMetadataSession(session).catch(() => {});
167
+ debug("metadata session refCount decremented", {
168
+ roomId: session.roomId,
169
+ refCount: session.refCount
170
+ });
171
+ if (session.refCount === 0) {
172
+ debug("tearing down metadata session due to refCount=0", { roomId: session.roomId });
173
+ this.teardownMetadataSession(session).catch(() => {});
174
+ }
101
175
  });
102
176
  },
103
177
  firstSyncedWithRemote: firstSynced,
@@ -107,18 +181,37 @@ var WebSocketTransportAdapter = class {
107
181
  };
108
182
  ensure.then((session) => {
109
183
  session.refCount += 1;
184
+ debug("metadata session refCount incremented", {
185
+ roomId: session.roomId,
186
+ refCount: session.refCount
187
+ });
110
188
  });
111
189
  return subscription;
112
190
  }
113
191
  async syncDoc(docId, doc, options) {
192
+ debug("syncDoc requested", { docId });
114
193
  try {
115
- await withTimeout((await this.ensureDocSession(docId, doc, {})).firstSynced, options?.timeout);
194
+ const session = await this.ensureDocSession(docId, doc, {});
195
+ await withTimeout(session.firstSynced, options?.timeout);
196
+ debug("syncDoc completed", {
197
+ docId,
198
+ roomId: session.roomId
199
+ });
116
200
  return { ok: true };
117
- } catch {
201
+ } catch (error) {
202
+ debug("syncDoc failed", {
203
+ docId,
204
+ error
205
+ });
118
206
  return { ok: false };
119
207
  }
120
208
  }
121
209
  joinDocRoom(docId, doc, params) {
210
+ debug("joinDocRoom requested", {
211
+ docId,
212
+ roomParamType: params?.roomId ? typeof params.roomId === "string" ? "string" : "uint8array" : void 0,
213
+ hasAuthOverride: Boolean(params?.auth && params.auth.length)
214
+ });
122
215
  const ensure = this.ensureDocSession(docId, doc, params ?? {});
123
216
  const firstSynced = ensure.then((session) => session.firstSynced);
124
217
  const getConnected = () => this.isConnected();
@@ -126,6 +219,11 @@ var WebSocketTransportAdapter = class {
126
219
  unsubscribe: () => {
127
220
  ensure.then((session) => {
128
221
  session.refCount = Math.max(0, session.refCount - 1);
222
+ debug("doc session refCount decremented", {
223
+ docId,
224
+ roomId: session.roomId,
225
+ refCount: session.refCount
226
+ });
129
227
  if (session.refCount === 0) this.leaveDocSession(docId).catch(() => {});
130
228
  });
131
229
  },
@@ -136,12 +234,24 @@ var WebSocketTransportAdapter = class {
136
234
  };
137
235
  ensure.then((session) => {
138
236
  session.refCount += 1;
237
+ debug("doc session refCount incremented", {
238
+ docId,
239
+ roomId: session.roomId,
240
+ refCount: session.refCount
241
+ });
139
242
  });
140
243
  return subscription;
141
244
  }
142
245
  ensureClient() {
143
- if (this.client) return this.client;
246
+ if (this.client) {
247
+ debug("reusing websocket client", { status: this.client.getStatus() });
248
+ return this.client;
249
+ }
144
250
  const { url, client: clientOptions } = this.options;
251
+ debug("creating websocket client", {
252
+ url,
253
+ clientOptionsKeys: clientOptions ? Object.keys(clientOptions) : []
254
+ });
145
255
  const client = new LoroWebsocketClient({
146
256
  url,
147
257
  ...clientOptions
@@ -150,22 +260,49 @@ var WebSocketTransportAdapter = class {
150
260
  return client;
151
261
  }
152
262
  async ensureMetadataSession(flock, params) {
263
+ debug("ensureMetadataSession invoked", {
264
+ roomId: params.roomId,
265
+ hasAuth: Boolean(params.auth && params.auth.length)
266
+ });
153
267
  const client = this.ensureClient();
154
268
  await client.waitConnected();
155
- if (this.metadataSession && this.metadataSession.flock === flock && this.metadataSession.roomId === params.roomId && bytesEqual(this.metadataSession.auth, params.auth)) return this.metadataSession;
156
- if (this.metadataSession) await this.teardownMetadataSession(this.metadataSession).catch(() => {});
269
+ debug("websocket client ready for metadata session", { status: client.getStatus() });
270
+ if (this.metadataSession && this.metadataSession.flock === flock && this.metadataSession.roomId === params.roomId && bytesEqual(this.metadataSession.auth, params.auth)) {
271
+ debug("reusing metadata session", {
272
+ roomId: this.metadataSession.roomId,
273
+ refCount: this.metadataSession.refCount
274
+ });
275
+ return this.metadataSession;
276
+ }
277
+ if (this.metadataSession) {
278
+ debug("tearing down previous metadata session", { roomId: this.metadataSession.roomId });
279
+ await this.teardownMetadataSession(this.metadataSession).catch(() => {});
280
+ }
157
281
  const configuredType = this.options.metadataCrdtType;
158
282
  if (configuredType && configuredType !== CrdtType.Flock) throw new Error(`metadataCrdtType must be ${CrdtType.Flock} when syncing Flock metadata`);
159
283
  const adaptor = createRepoFlockAdaptorFromDoc(flock, this.options.metadataAdaptorConfig ?? {});
284
+ debug("joining metadata room", {
285
+ roomId: params.roomId,
286
+ hasAuth: Boolean(params.auth && params.auth.length)
287
+ });
160
288
  const room = await client.join({
161
289
  roomId: params.roomId,
162
290
  crdtAdaptor: adaptor,
163
291
  auth: params.auth
164
292
  });
293
+ const firstSynced = room.waitForReachingServerVersion();
294
+ firstSynced.then(() => {
295
+ debug("metadata session firstSynced resolved", { roomId: params.roomId });
296
+ }, (error) => {
297
+ debug("metadata session firstSynced rejected", {
298
+ roomId: params.roomId,
299
+ error
300
+ });
301
+ });
165
302
  const session = {
166
303
  adaptor,
167
304
  room,
168
- firstSynced: room.waitForReachingServerVersion(),
305
+ firstSynced,
169
306
  flock,
170
307
  roomId: params.roomId,
171
308
  auth: params.auth,
@@ -177,34 +314,83 @@ var WebSocketTransportAdapter = class {
177
314
  async teardownMetadataSession(session) {
178
315
  const target = session ?? this.metadataSession;
179
316
  if (!target) return;
317
+ debug("teardownMetadataSession invoked", { roomId: target.roomId });
180
318
  if (this.metadataSession === target) this.metadataSession = void 0;
181
319
  const { adaptor, room } = target;
182
320
  try {
183
321
  await room.leave();
184
- } catch {
322
+ debug("metadata room left", { roomId: target.roomId });
323
+ } catch (error) {
324
+ debug("metadata room leave failed; destroying", {
325
+ roomId: target.roomId,
326
+ error
327
+ });
185
328
  await room.destroy().catch(() => {});
186
329
  }
187
330
  adaptor.destroy();
331
+ debug("metadata session destroyed", { roomId: target.roomId });
188
332
  }
189
333
  async ensureDocSession(docId, doc, params) {
334
+ debug("ensureDocSession invoked", { docId });
190
335
  const client = this.ensureClient();
191
336
  await client.waitConnected();
337
+ debug("websocket client ready for doc session", {
338
+ docId,
339
+ status: client.getStatus()
340
+ });
192
341
  const existing = this.docSessions.get(docId);
193
342
  const derivedRoomId = this.options.docRoomId?.(docId) ?? docId;
194
343
  const roomId = normalizeRoomId(params.roomId, derivedRoomId);
195
344
  const auth = params.auth ?? this.options.docAuth?.(docId);
196
- if (existing && existing.doc === doc && existing.roomId === roomId) return existing;
197
- if (existing) await this.leaveDocSession(docId).catch(() => {});
345
+ debug("doc session params resolved", {
346
+ docId,
347
+ roomId,
348
+ hasAuth: Boolean(auth && auth.length)
349
+ });
350
+ if (existing && existing.doc === doc && existing.roomId === roomId) {
351
+ debug("reusing doc session", {
352
+ docId,
353
+ roomId,
354
+ refCount: existing.refCount
355
+ });
356
+ return existing;
357
+ }
358
+ if (existing) {
359
+ debug("doc session mismatch; leaving existing session", {
360
+ docId,
361
+ previousRoomId: existing.roomId,
362
+ nextRoomId: roomId
363
+ });
364
+ await this.leaveDocSession(docId).catch(() => {});
365
+ }
198
366
  const adaptor = new LoroAdaptor(doc);
367
+ debug("joining doc room", {
368
+ docId,
369
+ roomId,
370
+ hasAuth: Boolean(auth && auth.length)
371
+ });
199
372
  const room = await client.join({
200
373
  roomId,
201
374
  crdtAdaptor: adaptor,
202
375
  auth
203
376
  });
377
+ const firstSynced = room.waitForReachingServerVersion();
378
+ firstSynced.then(() => {
379
+ debug("doc session firstSynced resolved", {
380
+ docId,
381
+ roomId
382
+ });
383
+ }, (error) => {
384
+ debug("doc session firstSynced rejected", {
385
+ docId,
386
+ roomId,
387
+ error
388
+ });
389
+ });
204
390
  const session = {
205
391
  adaptor,
206
392
  room,
207
- firstSynced: room.waitForReachingServerVersion(),
393
+ firstSynced,
208
394
  doc,
209
395
  roomId,
210
396
  refCount: 0
@@ -214,14 +400,34 @@ var WebSocketTransportAdapter = class {
214
400
  }
215
401
  async leaveDocSession(docId) {
216
402
  const session = this.docSessions.get(docId);
217
- if (!session) return;
403
+ if (!session) {
404
+ debug("leaveDocSession invoked but no session found", { docId });
405
+ return;
406
+ }
218
407
  this.docSessions.delete(docId);
408
+ debug("leaving doc session", {
409
+ docId,
410
+ roomId: session.roomId
411
+ });
219
412
  try {
220
413
  await session.room.leave();
221
- } catch {
414
+ debug("doc room left", {
415
+ docId,
416
+ roomId: session.roomId
417
+ });
418
+ } catch (error) {
419
+ debug("doc room leave failed; destroying", {
420
+ docId,
421
+ roomId: session.roomId,
422
+ error
423
+ });
222
424
  await session.room.destroy().catch(() => {});
223
425
  }
224
426
  session.adaptor.destroy();
427
+ debug("doc session destroyed", {
428
+ docId,
429
+ roomId: session.roomId
430
+ });
225
431
  }
226
432
  };
227
433
 
@@ -483,7 +689,7 @@ const DEFAULT_META_STORE = "meta";
483
689
  const DEFAULT_ASSET_STORE = "assets";
484
690
  const DEFAULT_DOC_UPDATE_STORE = "doc-updates";
485
691
  const DEFAULT_META_KEY = "snapshot";
486
- const textDecoder = new TextDecoder();
692
+ const textDecoder$1 = new TextDecoder();
487
693
  function describeUnknown(cause) {
488
694
  if (typeof cause === "string") return cause;
489
695
  if (typeof cause === "number" || typeof cause === "boolean") return String(cause);
@@ -584,7 +790,7 @@ var IndexedDBStorageAdaptor = class {
584
790
  const bytes = await this.getBinary(this.metaStore, this.metaKey);
585
791
  if (!bytes) return void 0;
586
792
  try {
587
- const json = textDecoder.decode(bytes);
793
+ const json = textDecoder$1.decode(bytes);
588
794
  const bundle = JSON.parse(json);
589
795
  const flock = new Flock();
590
796
  flock.importJson(bundle);
@@ -728,15 +934,206 @@ var IndexedDBStorageAdaptor = class {
728
934
  };
729
935
 
730
936
  //#endregion
731
- //#region src/index.ts
732
- const textEncoder = new TextEncoder();
733
- const DEFAULT_DOC_FRONTIER_DEBOUNCE_MS = 1e3;
734
- function logAsyncError(context) {
735
- return (error) => {
736
- if (error instanceof Error) console.error(`[loro-repo] ${context} failed: ${error.message}`, error);
737
- else console.error(`[loro-repo] ${context} failed with non-error reason:`, error);
738
- };
937
+ //#region src/storage/filesystem.ts
938
+ const textDecoder = new TextDecoder();
939
+ var FileSystemStorageAdaptor = class {
940
+ baseDir;
941
+ docsDir;
942
+ assetsDir;
943
+ metaPath;
944
+ initPromise;
945
+ updateCounter = 0;
946
+ constructor(options = {}) {
947
+ this.baseDir = path.resolve(options.baseDir ?? path.join(process.cwd(), ".loro-repo"));
948
+ this.docsDir = path.join(this.baseDir, options.docsDirName ?? "docs");
949
+ this.assetsDir = path.join(this.baseDir, options.assetsDirName ?? "assets");
950
+ this.metaPath = path.join(this.baseDir, options.metaFileName ?? "meta.json");
951
+ this.initPromise = this.ensureLayout();
952
+ }
953
+ async save(payload) {
954
+ await this.initPromise;
955
+ switch (payload.type) {
956
+ case "doc-snapshot":
957
+ await this.writeDocSnapshot(payload.docId, payload.snapshot);
958
+ return;
959
+ case "doc-update":
960
+ await this.enqueueDocUpdate(payload.docId, payload.update);
961
+ return;
962
+ case "asset":
963
+ await this.writeAsset(payload.assetId, payload.data);
964
+ return;
965
+ case "meta":
966
+ await writeFileAtomic(this.metaPath, payload.update);
967
+ return;
968
+ default: throw new Error(`Unsupported payload type: ${payload.type}`);
969
+ }
970
+ }
971
+ async deleteAsset(assetId) {
972
+ await this.initPromise;
973
+ await removeIfExists(this.assetPath(assetId));
974
+ }
975
+ async loadDoc(docId) {
976
+ await this.initPromise;
977
+ const snapshotBytes = await readFileIfExists(this.docSnapshotPath(docId));
978
+ const updateDir = this.docUpdatesDir(docId);
979
+ const updateFiles = await listFiles(updateDir);
980
+ if (!snapshotBytes && updateFiles.length === 0) return;
981
+ const doc = snapshotBytes ? LoroDoc.fromSnapshot(snapshotBytes) : new LoroDoc();
982
+ if (updateFiles.length === 0) return doc;
983
+ const updatePaths = updateFiles.map((file) => path.join(updateDir, file));
984
+ for (const updatePath of updatePaths) {
985
+ const update = await readFileIfExists(updatePath);
986
+ if (!update) continue;
987
+ doc.import(update);
988
+ }
989
+ await Promise.all(updatePaths.map((filePath) => removeIfExists(filePath)));
990
+ const consolidated = doc.export({ mode: "snapshot" });
991
+ await this.writeDocSnapshot(docId, consolidated);
992
+ return doc;
993
+ }
994
+ async loadMeta() {
995
+ await this.initPromise;
996
+ const bytes = await readFileIfExists(this.metaPath);
997
+ if (!bytes) return void 0;
998
+ try {
999
+ const bundle = JSON.parse(textDecoder.decode(bytes));
1000
+ const flock = new Flock();
1001
+ flock.importJson(bundle);
1002
+ return flock;
1003
+ } catch (error) {
1004
+ throw new Error("Failed to hydrate metadata snapshot", { cause: error });
1005
+ }
1006
+ }
1007
+ async loadAsset(assetId) {
1008
+ await this.initPromise;
1009
+ return readFileIfExists(this.assetPath(assetId));
1010
+ }
1011
+ async ensureLayout() {
1012
+ await Promise.all([
1013
+ ensureDir(this.baseDir),
1014
+ ensureDir(this.docsDir),
1015
+ ensureDir(this.assetsDir)
1016
+ ]);
1017
+ }
1018
+ async writeDocSnapshot(docId, snapshot) {
1019
+ await ensureDir(this.docDir(docId));
1020
+ await writeFileAtomic(this.docSnapshotPath(docId), snapshot);
1021
+ }
1022
+ async enqueueDocUpdate(docId, update) {
1023
+ const dir = this.docUpdatesDir(docId);
1024
+ await ensureDir(dir);
1025
+ const counter = this.updateCounter = (this.updateCounter + 1) % 1e6;
1026
+ const fileName = `${Date.now().toString().padStart(13, "0")}-${counter.toString().padStart(6, "0")}.bin`;
1027
+ await writeFileAtomic(path.join(dir, fileName), update);
1028
+ }
1029
+ async writeAsset(assetId, data) {
1030
+ const filePath = this.assetPath(assetId);
1031
+ await ensureDir(path.dirname(filePath));
1032
+ await writeFileAtomic(filePath, data);
1033
+ }
1034
+ docDir(docId) {
1035
+ return path.join(this.docsDir, encodeComponent(docId));
1036
+ }
1037
+ docSnapshotPath(docId) {
1038
+ return path.join(this.docDir(docId), "snapshot.bin");
1039
+ }
1040
+ docUpdatesDir(docId) {
1041
+ return path.join(this.docDir(docId), "updates");
1042
+ }
1043
+ assetPath(assetId) {
1044
+ return path.join(this.assetsDir, encodeComponent(assetId));
1045
+ }
1046
+ };
1047
+ function encodeComponent(value) {
1048
+ return Buffer.from(value, "utf8").toString("base64url");
739
1049
  }
1050
+ async function ensureDir(dir) {
1051
+ await promises.mkdir(dir, { recursive: true });
1052
+ }
1053
+ async function readFileIfExists(filePath) {
1054
+ try {
1055
+ const data = await promises.readFile(filePath);
1056
+ return new Uint8Array(data.buffer, data.byteOffset, data.byteLength).slice();
1057
+ } catch (error) {
1058
+ if (error.code === "ENOENT") return;
1059
+ throw error;
1060
+ }
1061
+ }
1062
+ async function removeIfExists(filePath) {
1063
+ try {
1064
+ await promises.rm(filePath);
1065
+ } catch (error) {
1066
+ if (error.code === "ENOENT") return;
1067
+ throw error;
1068
+ }
1069
+ }
1070
+ async function listFiles(dir) {
1071
+ try {
1072
+ return (await promises.readdir(dir)).sort();
1073
+ } catch (error) {
1074
+ if (error.code === "ENOENT") return [];
1075
+ throw error;
1076
+ }
1077
+ }
1078
+ async function writeFileAtomic(targetPath, data) {
1079
+ const dir = path.dirname(targetPath);
1080
+ await ensureDir(dir);
1081
+ const tempPath = path.join(dir, `.tmp-${randomUUID()}`);
1082
+ await promises.writeFile(tempPath, data);
1083
+ await promises.rename(tempPath, targetPath);
1084
+ }
1085
+
1086
+ //#endregion
1087
+ //#region src/internal/event-bus.ts
1088
+ var RepoEventBus = class {
1089
+ watchers = /* @__PURE__ */ new Set();
1090
+ eventByStack = [];
1091
+ watch(listener, filter = {}) {
1092
+ const entry = {
1093
+ listener,
1094
+ filter
1095
+ };
1096
+ this.watchers.add(entry);
1097
+ return { unsubscribe: () => {
1098
+ this.watchers.delete(entry);
1099
+ } };
1100
+ }
1101
+ emit(event) {
1102
+ for (const entry of this.watchers) if (this.shouldNotify(entry.filter, event)) entry.listener(event);
1103
+ }
1104
+ clear() {
1105
+ this.watchers.clear();
1106
+ this.eventByStack.length = 0;
1107
+ }
1108
+ pushEventBy(by) {
1109
+ this.eventByStack.push(by);
1110
+ }
1111
+ popEventBy() {
1112
+ this.eventByStack.pop();
1113
+ }
1114
+ resolveEventBy(defaultBy) {
1115
+ const index = this.eventByStack.length - 1;
1116
+ return index >= 0 ? this.eventByStack[index] : defaultBy;
1117
+ }
1118
+ shouldNotify(filter, event) {
1119
+ if (!filter.docIds && !filter.kinds && !filter.metadataFields && !filter.by) return true;
1120
+ if (filter.kinds && !filter.kinds.includes(event.kind)) return false;
1121
+ if (filter.by && !filter.by.includes(event.by)) return false;
1122
+ const docId = (() => {
1123
+ if (event.kind === "doc-metadata" || event.kind === "doc-frontiers") return event.docId;
1124
+ if (event.kind === "asset-link" || event.kind === "asset-unlink") return event.docId;
1125
+ })();
1126
+ if (filter.docIds && docId && !filter.docIds.includes(docId)) return false;
1127
+ if (filter.docIds && !docId) return false;
1128
+ if (filter.metadataFields && event.kind === "doc-metadata") {
1129
+ if (!Object.keys(event.patch).some((key) => filter.metadataFields?.includes(key))) return false;
1130
+ }
1131
+ return true;
1132
+ }
1133
+ };
1134
+
1135
+ //#endregion
1136
+ //#region src/utils.ts
740
1137
  async function streamToUint8Array(stream) {
741
1138
  const reader = stream.getReader();
742
1139
  const chunks = [];
@@ -814,12 +1211,14 @@ function asJsonObject(value) {
814
1211
  if (cloned && typeof cloned === "object" && !Array.isArray(cloned)) return cloned;
815
1212
  }
816
1213
  function isJsonObjectValue(value) {
817
- return typeof value === "object" && value !== null && !Array.isArray(value);
1214
+ return Boolean(value && typeof value === "object" && !Array.isArray(value));
818
1215
  }
819
1216
  function stableStringify(value) {
820
- if (value === null || typeof value !== "object") return JSON.stringify(value);
1217
+ if (value === null) return "null";
1218
+ if (typeof value === "string") return JSON.stringify(value);
1219
+ if (typeof value === "number" || typeof value === "boolean") return JSON.stringify(value);
821
1220
  if (Array.isArray(value)) return `[${value.map(stableStringify).join(",")}]`;
822
- if (!isJsonObjectValue(value)) return JSON.stringify(value);
1221
+ if (!isJsonObjectValue(value)) return "null";
823
1222
  return `{${Object.keys(value).sort().map((key) => `${JSON.stringify(key)}:${stableStringify(value[key])}`).join(",")}}`;
824
1223
  }
825
1224
  function jsonEquals(a, b) {
@@ -829,15 +1228,17 @@ function jsonEquals(a, b) {
829
1228
  }
830
1229
  function diffJsonObjects(previous, next) {
831
1230
  const patch = {};
832
- const keys = new Set([...Object.keys(next), ...previous ? Object.keys(previous) : []]);
1231
+ const keys = /* @__PURE__ */ new Set();
1232
+ if (previous) for (const key of Object.keys(previous)) keys.add(key);
1233
+ for (const key of Object.keys(next)) keys.add(key);
833
1234
  for (const key of keys) {
834
1235
  const prevValue = previous ? previous[key] : void 0;
835
- if (!Object.prototype.hasOwnProperty.call(next, key)) {
836
- patch[key] = null;
837
- continue;
838
- }
839
1236
  const nextValue = next[key];
840
1237
  if (!jsonEquals(prevValue, nextValue)) {
1238
+ if (nextValue === void 0 && previous && key in previous) {
1239
+ patch[key] = null;
1240
+ continue;
1241
+ }
841
1242
  const cloned = cloneJsonValue(nextValue);
842
1243
  if (cloned !== void 0) patch[key] = cloned;
843
1244
  }
@@ -893,61 +1294,24 @@ function toReadableStream(bytes) {
893
1294
  controller.close();
894
1295
  } });
895
1296
  }
896
- function computeVersionVector(doc) {
897
- const candidate = doc;
898
- if (typeof candidate.frontiers === "function" && typeof candidate.frontiersToVV === "function") {
899
- const frontiers = candidate.frontiers();
900
- return candidate.frontiersToVV(frontiers);
901
- }
902
- if (typeof candidate.version === "function") return candidate.version();
903
- return {};
904
- }
905
- function emptyFrontiers() {
906
- return [];
907
- }
908
- function getDocFrontiers(doc) {
909
- const candidate = doc;
910
- if (typeof candidate.frontiers === "function") {
911
- const result = candidate.frontiers();
912
- if (result) return result;
913
- }
914
- return emptyFrontiers();
915
- }
916
- function versionVectorToJson(vv) {
917
- const map = vv.toJSON();
918
- const record = {};
919
- if (map instanceof Map) {
920
- const entries = Array.from(map.entries()).sort(([a], [b]) => String(a).localeCompare(String(b)));
921
- for (const [peer, counter] of entries) {
922
- if (typeof counter !== "number" || !Number.isFinite(counter)) continue;
923
- const key = typeof peer === "string" ? peer : JSON.stringify(peer);
924
- record[key] = counter;
925
- }
926
- }
927
- return record;
928
- }
929
- function canonicalizeVersionVector(vv) {
930
- const json = versionVectorToJson(vv);
1297
+ function canonicalizeFrontiers(frontiers) {
1298
+ const json = [...frontiers].sort((a, b) => {
1299
+ if (a.peer < b.peer) return -1;
1300
+ if (a.peer > b.peer) return 1;
1301
+ return a.counter - b.counter;
1302
+ }).map((f) => ({
1303
+ peer: f.peer,
1304
+ counter: f.counter
1305
+ }));
931
1306
  return {
932
1307
  json,
933
1308
  key: stableStringify(json)
934
1309
  };
935
1310
  }
936
- var RepoDocHandleImpl = class {
937
- doc;
938
- whenSyncedWithRemote;
939
- docId;
940
- onClose;
941
- constructor(docId, doc, whenSyncedWithRemote, onClose) {
942
- this.docId = docId;
943
- this.doc = doc;
944
- this.whenSyncedWithRemote = whenSyncedWithRemote;
945
- this.onClose = onClose;
946
- }
947
- async close() {
948
- await this.onClose(this.docId, this.doc);
949
- }
950
- };
1311
+ function includesFrontiers(vv, frontiers) {
1312
+ for (const { peer, counter } of frontiers) if ((vv.get(peer) ?? 0) <= counter) return false;
1313
+ return true;
1314
+ }
951
1315
  function matchesQuery(docId, _metadata, query) {
952
1316
  if (!query) return true;
953
1317
  if (query.prefix && !docId.startsWith(query.prefix)) return false;
@@ -955,345 +1319,372 @@ function matchesQuery(docId, _metadata, query) {
955
1319
  if (query.end && docId > query.end) return false;
956
1320
  return true;
957
1321
  }
958
- var LoroRepo = class {
959
- options;
960
- transport;
1322
+
1323
+ //#endregion
1324
+ //#region src/internal/logging.ts
1325
+ function logAsyncError(context) {
1326
+ return (error) => {
1327
+ if (error instanceof Error) console.error(`[loro-repo] ${context} failed: ${error.message}`, error);
1328
+ else console.error(`[loro-repo] ${context} failed with non-error reason:`, error);
1329
+ };
1330
+ }
1331
+
1332
+ //#endregion
1333
+ //#region src/internal/doc-manager.ts
1334
+ var DocManager = class {
961
1335
  storage;
962
- assetTransport;
963
- docFactory;
964
- metaFlock = new Flock();
965
- metadata = /* @__PURE__ */ new Map();
1336
+ docFrontierDebounceMs;
1337
+ getMetaFlock;
1338
+ eventBus;
1339
+ persistMeta;
1340
+ state;
966
1341
  docs = /* @__PURE__ */ new Map();
967
- docRefs = /* @__PURE__ */ new Map();
968
1342
  docSubscriptions = /* @__PURE__ */ new Map();
969
- docAssets = /* @__PURE__ */ new Map();
970
- assets = /* @__PURE__ */ new Map();
971
- orphanedAssets = /* @__PURE__ */ new Map();
972
- assetToDocRefs = /* @__PURE__ */ new Map();
973
- docFrontierKeys = /* @__PURE__ */ new Map();
974
1343
  docFrontierUpdates = /* @__PURE__ */ new Map();
975
1344
  docPersistedVersions = /* @__PURE__ */ new Map();
976
- docFrontierDebounceMs;
977
- watchers = /* @__PURE__ */ new Set();
978
- eventByStack = [];
979
- metaRoomSubscription;
980
- unsubscribeMetaFlock;
981
- readyPromise;
1345
+ get docFrontierKeys() {
1346
+ return this.state.docFrontierKeys;
1347
+ }
982
1348
  constructor(options) {
983
- this.options = options;
984
- this.transport = options.transportAdapter;
985
- this.storage = options.storageAdapter;
986
- this.assetTransport = options.assetTransportAdapter;
987
- this.docFactory = options.docFactory ?? (async () => new LoroDoc());
988
- const configuredDebounce = options.docFrontierDebounceMs;
989
- this.docFrontierDebounceMs = typeof configuredDebounce === "number" && Number.isFinite(configuredDebounce) && configuredDebounce >= 0 ? configuredDebounce : DEFAULT_DOC_FRONTIER_DEBOUNCE_MS;
1349
+ this.storage = options.storage;
1350
+ this.docFrontierDebounceMs = options.docFrontierDebounceMs;
1351
+ this.getMetaFlock = options.getMetaFlock;
1352
+ this.eventBus = options.eventBus;
1353
+ this.persistMeta = options.persistMeta;
1354
+ this.state = options.state;
990
1355
  }
991
- async ready() {
992
- if (!this.readyPromise) this.readyPromise = this.initialize();
993
- await this.readyPromise;
1356
+ async openCollaborativeDoc(docId) {
1357
+ return await this.ensureDoc(docId);
994
1358
  }
995
- async initialize() {
1359
+ async openDetachedDoc(docId) {
1360
+ return await this.materializeDetachedDoc(docId);
1361
+ }
1362
+ async ensureDoc(docId) {
1363
+ const cached = this.docs.get(docId);
1364
+ if (cached) {
1365
+ this.ensureDocSubscription(docId, cached);
1366
+ if (!this.docPersistedVersions.has(docId)) this.docPersistedVersions.set(docId, cached.version());
1367
+ return cached;
1368
+ }
996
1369
  if (this.storage) {
997
- const snapshot = await this.storage.loadMeta();
998
- if (snapshot) this.metaFlock = snapshot;
1370
+ const stored = await this.storage.loadDoc(docId);
1371
+ if (stored) {
1372
+ this.registerDoc(docId, stored);
1373
+ return stored;
1374
+ }
999
1375
  }
1000
- this.hydrateMetadataFromFlock("sync");
1376
+ const created = new LoroDoc();
1377
+ this.registerDoc(docId, created);
1378
+ return created;
1001
1379
  }
1002
- async sync(options = {}) {
1003
- await this.ready();
1004
- const { scope = "full", docIds } = options;
1005
- if (!this.transport) return;
1006
- if (!this.transport.isConnected()) await this.transport.connect();
1007
- if (scope === "meta" || scope === "full") {
1008
- this.pushEventBy("sync");
1009
- const recordedEvents = [];
1010
- const unsubscribe = this.metaFlock.subscribe((batch) => {
1011
- if (batch.source === "local") return;
1012
- recordedEvents.push(...batch.events);
1013
- });
1014
- try {
1015
- if (!(await this.transport.syncMeta(this.metaFlock)).ok) throw new Error("Metadata sync failed");
1016
- if (recordedEvents.length > 0) this.applyMetaFlockEvents(recordedEvents, "sync");
1017
- else this.hydrateMetadataFromFlock("sync");
1018
- await this.persistMeta();
1019
- } finally {
1020
- unsubscribe();
1021
- this.popEventBy();
1022
- }
1380
+ async persistDoc(docId, doc) {
1381
+ const previousVersion = this.docPersistedVersions.get(docId);
1382
+ const snapshot = doc.export({ mode: "snapshot" });
1383
+ const nextVersion = doc.version();
1384
+ if (!this.storage) {
1385
+ this.docPersistedVersions.set(docId, nextVersion);
1386
+ return;
1023
1387
  }
1024
- if (scope === "doc" || scope === "full") {
1025
- const targets = docIds ?? Array.from(this.metadata.keys());
1026
- for (const docId of targets) {
1027
- const doc = await this.ensureDoc(docId);
1028
- this.pushEventBy("sync");
1029
- try {
1030
- if (!(await this.transport.syncDoc(docId, doc)).ok) throw new Error(`Document sync failed for ${docId}`);
1031
- } finally {
1032
- this.popEventBy();
1033
- }
1034
- await this.persistDoc(docId, doc);
1035
- await this.updateDocFrontiers(docId, doc, "sync");
1036
- }
1388
+ this.docPersistedVersions.set(docId, nextVersion);
1389
+ try {
1390
+ await this.storage.save({
1391
+ type: "doc-snapshot",
1392
+ docId,
1393
+ snapshot
1394
+ });
1395
+ } catch (error) {
1396
+ if (previousVersion) this.docPersistedVersions.set(docId, previousVersion);
1397
+ else this.docPersistedVersions.delete(docId);
1398
+ throw error;
1037
1399
  }
1038
1400
  }
1039
- refreshDocMetadataEntry(docId, by) {
1040
- const previous = this.metadata.get(docId);
1041
- const next = this.readDocMetadataFromFlock(docId);
1042
- if (!next) {
1043
- if (previous) {
1044
- this.metadata.delete(docId);
1045
- this.emit({
1046
- kind: "doc-metadata",
1047
- docId,
1048
- patch: {},
1049
- by
1050
- });
1401
+ async updateDocFrontiers(docId, doc, defaultBy) {
1402
+ const frontiers = doc.oplogFrontiers();
1403
+ const { json, key } = canonicalizeFrontiers(frontiers);
1404
+ const existingKeys = this.docFrontierKeys.get(docId) ?? /* @__PURE__ */ new Set();
1405
+ let mutated = false;
1406
+ const metaFlock = this.metaFlock;
1407
+ const vv = doc.version();
1408
+ for (const entry of existingKeys) {
1409
+ if (entry === key) continue;
1410
+ let oldFrontiers;
1411
+ try {
1412
+ oldFrontiers = JSON.parse(entry);
1413
+ } catch {
1414
+ continue;
1051
1415
  }
1052
- return;
1053
- }
1054
- this.metadata.set(docId, next);
1055
- const patch = diffJsonObjects(previous, next);
1056
- if (!previous || Object.keys(patch).length > 0) this.emit({
1057
- kind: "doc-metadata",
1058
- docId,
1059
- patch,
1060
- by
1061
- });
1062
- }
1063
- refreshDocAssetsEntry(docId, by) {
1064
- const mapping = this.readDocAssetsFromFlock(docId);
1065
- const previous = this.docAssets.get(docId);
1066
- if (!mapping.size) {
1067
- if (previous?.size) {
1068
- this.docAssets.delete(docId);
1069
- for (const assetId of previous.keys()) {
1070
- this.emit({
1071
- kind: "asset-unlink",
1072
- docId,
1073
- assetId,
1074
- by
1075
- });
1076
- if (!Array.from(this.docAssets.values()).some((assets) => assets.has(assetId))) {
1077
- const record = this.assets.get(assetId);
1078
- if (record) {
1079
- const deletedAt = this.orphanedAssets.get(assetId)?.deletedAt ?? Date.now();
1080
- this.orphanedAssets.set(assetId, {
1081
- metadata: record.metadata,
1082
- deletedAt
1083
- });
1084
- }
1085
- }
1086
- }
1416
+ if (includesFrontiers(vv, oldFrontiers)) {
1417
+ metaFlock.delete([
1418
+ "f",
1419
+ docId,
1420
+ entry
1421
+ ]);
1422
+ mutated = true;
1087
1423
  }
1088
- return;
1089
- }
1090
- this.docAssets.set(docId, mapping);
1091
- const added = [];
1092
- const removed = [];
1093
- if (previous) {
1094
- for (const assetId of previous.keys()) if (!mapping.has(assetId)) removed.push(assetId);
1095
1424
  }
1096
- for (const assetId of mapping.keys()) if (!previous || !previous.has(assetId)) added.push(assetId);
1097
- for (const assetId of removed) {
1098
- this.emit({
1099
- kind: "asset-unlink",
1425
+ if (!existingKeys.has(key)) {
1426
+ metaFlock.put([
1427
+ "f",
1100
1428
  docId,
1101
- assetId,
1102
- by
1103
- });
1104
- if (!Array.from(this.docAssets.values()).some((assets) => assets.has(assetId))) {
1105
- const record = this.assets.get(assetId);
1106
- if (record) {
1107
- const deletedAt = this.orphanedAssets.get(assetId)?.deletedAt ?? Date.now();
1108
- this.orphanedAssets.set(assetId, {
1109
- metadata: record.metadata,
1110
- deletedAt
1111
- });
1112
- }
1113
- }
1429
+ key
1430
+ ], json);
1431
+ mutated = true;
1114
1432
  }
1115
- for (const assetId of added) this.emit({
1116
- kind: "asset-link",
1433
+ if (mutated) {
1434
+ this.refreshDocFrontierKeys(docId);
1435
+ await this.persistMeta();
1436
+ }
1437
+ const by = this.eventBus.resolveEventBy(defaultBy);
1438
+ this.eventBus.emit({
1439
+ kind: "doc-frontiers",
1117
1440
  docId,
1118
- assetId,
1441
+ frontiers,
1119
1442
  by
1120
1443
  });
1121
1444
  }
1122
- refreshAssetMetadataEntry(assetId, by) {
1123
- const previous = this.assets.get(assetId);
1124
- const metadata = assetMetaFromJson(this.metaFlock.get(["a", assetId]));
1125
- if (!metadata) {
1126
- this.handleAssetRemoval(assetId, by);
1127
- return;
1445
+ async flushScheduledDocFrontierUpdate(docId) {
1446
+ const pending = this.docFrontierUpdates.get(docId);
1447
+ if (!pending) return false;
1448
+ clearTimeout(pending.timeout);
1449
+ this.docFrontierUpdates.delete(docId);
1450
+ this.eventBus.pushEventBy(pending.by);
1451
+ try {
1452
+ await this.updateDocFrontiers(docId, pending.doc, pending.by);
1453
+ } finally {
1454
+ this.eventBus.popEventBy();
1128
1455
  }
1129
- const existingData = previous?.data;
1130
- this.rememberAsset(metadata, existingData);
1131
- for (const assets of this.docAssets.values()) if (assets.has(assetId)) assets.set(assetId, cloneRepoAssetMetadata(metadata));
1132
- if (!previous || !assetMetadataEqual(previous.metadata, metadata)) this.emit({
1133
- kind: "asset-metadata",
1134
- asset: this.createAssetDownload(assetId, metadata, existingData),
1135
- by
1136
- });
1456
+ return true;
1457
+ }
1458
+ async unloadDoc(docId) {
1459
+ const doc = this.docs.get(docId);
1460
+ if (!doc) return;
1461
+ await this.flushScheduledDocFrontierUpdate(docId);
1462
+ await this.persistDocUpdate(docId, doc);
1463
+ await this.updateDocFrontiers(docId, doc, "local");
1464
+ this.docSubscriptions.get(docId)?.();
1465
+ this.docSubscriptions.delete(docId);
1466
+ this.docs.delete(docId);
1467
+ this.docPersistedVersions.delete(docId);
1468
+ }
1469
+ async flush() {
1470
+ const promises$1 = [];
1471
+ for (const [docId, doc] of this.docs) promises$1.push((async () => {
1472
+ await this.persistDocUpdate(docId, doc);
1473
+ await this.flushScheduledDocFrontierUpdate(docId);
1474
+ })());
1475
+ await Promise.all(promises$1);
1476
+ }
1477
+ async close() {
1478
+ await this.flush();
1479
+ for (const unsubscribe of this.docSubscriptions.values()) try {
1480
+ unsubscribe();
1481
+ } catch {}
1482
+ this.docSubscriptions.clear();
1483
+ this.docFrontierUpdates.clear();
1484
+ this.docs.clear();
1485
+ this.docPersistedVersions.clear();
1486
+ this.docFrontierKeys.clear();
1487
+ }
1488
+ hydrateFrontierKeys() {
1489
+ const nextFrontierKeys = /* @__PURE__ */ new Map();
1490
+ const frontierRows = this.metaFlock.scan({ prefix: ["f"] });
1491
+ for (const row of frontierRows) {
1492
+ if (!Array.isArray(row.key) || row.key.length < 3) continue;
1493
+ const docId = row.key[1];
1494
+ const frontierKey = row.key[2];
1495
+ if (typeof docId !== "string" || typeof frontierKey !== "string") continue;
1496
+ const set = nextFrontierKeys.get(docId) ?? /* @__PURE__ */ new Set();
1497
+ set.add(frontierKey);
1498
+ nextFrontierKeys.set(docId, set);
1499
+ }
1500
+ this.docFrontierKeys.clear();
1501
+ for (const [docId, keys] of nextFrontierKeys) this.docFrontierKeys.set(docId, keys);
1137
1502
  }
1138
1503
  refreshDocFrontierKeys(docId) {
1139
1504
  const rows = this.metaFlock.scan({ prefix: ["f", docId] });
1140
1505
  const keys = /* @__PURE__ */ new Set();
1141
1506
  for (const row of rows) {
1142
1507
  if (!Array.isArray(row.key) || row.key.length < 3) continue;
1508
+ if (row.value === void 0 || row.value === null) continue;
1143
1509
  const frontierKey = row.key[2];
1144
1510
  if (typeof frontierKey === "string") keys.add(frontierKey);
1145
1511
  }
1146
1512
  if (keys.size > 0) this.docFrontierKeys.set(docId, keys);
1147
1513
  else this.docFrontierKeys.delete(docId);
1148
1514
  }
1149
- readDocMetadataFromFlock(docId) {
1150
- const rows = this.metaFlock.scan({ prefix: ["m", docId] });
1151
- if (!rows.length) return void 0;
1152
- const docMeta = {};
1153
- let populated = false;
1154
- for (const row of rows) {
1155
- if (!Array.isArray(row.key) || row.key.length < 2) continue;
1156
- if (row.key.length === 2) {
1157
- const obj = asJsonObject(row.value);
1158
- if (!obj) continue;
1159
- for (const [field, value] of Object.entries(obj)) {
1160
- const cloned = cloneJsonValue(value);
1161
- if (cloned !== void 0) {
1162
- docMeta[field] = cloned;
1163
- populated = true;
1164
- }
1165
- }
1166
- continue;
1167
- }
1168
- const fieldKey = row.key[2];
1169
- if (typeof fieldKey !== "string") continue;
1170
- if (fieldKey === "$tombstone") {
1171
- docMeta.tombstone = Boolean(row.value);
1172
- populated = true;
1173
- continue;
1174
- }
1175
- const jsonValue = cloneJsonValue(row.value);
1176
- if (jsonValue === void 0) continue;
1177
- docMeta[fieldKey] = jsonValue;
1178
- populated = true;
1179
- }
1180
- return populated ? docMeta : void 0;
1181
- }
1182
- readDocAssetsFromFlock(docId) {
1183
- const rows = this.metaFlock.scan({ prefix: ["ld", docId] });
1184
- const mapping = /* @__PURE__ */ new Map();
1185
- for (const row of rows) {
1186
- if (!Array.isArray(row.key) || row.key.length < 3) continue;
1187
- const assetId = row.key[2];
1188
- if (typeof assetId !== "string") continue;
1189
- if (!(row.value !== void 0 && row.value !== null && row.value !== false)) continue;
1190
- let metadata = this.assets.get(assetId)?.metadata;
1191
- if (!metadata) {
1192
- metadata = this.readAssetMetadataFromFlock(assetId);
1193
- if (!metadata) continue;
1194
- this.rememberAsset(metadata);
1195
- }
1196
- mapping.set(assetId, cloneRepoAssetMetadata(metadata));
1197
- }
1198
- return mapping;
1515
+ get metaFlock() {
1516
+ return this.getMetaFlock();
1199
1517
  }
1200
- readAssetMetadataFromFlock(assetId) {
1201
- return assetMetaFromJson(this.metaFlock.get(["a", assetId]));
1518
+ registerDoc(docId, doc) {
1519
+ this.docs.set(docId, doc);
1520
+ this.docPersistedVersions.set(docId, doc.version());
1521
+ this.ensureDocSubscription(docId, doc);
1202
1522
  }
1203
- handleAssetRemoval(assetId, by) {
1204
- const record = this.assets.get(assetId);
1205
- if (!record) return;
1206
- this.assets.delete(assetId);
1207
- const deletedAt = this.orphanedAssets.get(assetId)?.deletedAt ?? Date.now();
1208
- this.orphanedAssets.set(assetId, {
1209
- metadata: record.metadata,
1210
- deletedAt
1523
+ ensureDocSubscription(docId, doc) {
1524
+ if (this.docSubscriptions.has(docId)) return;
1525
+ const unsubscribe = doc.subscribe((batch) => {
1526
+ const stackBy = this.eventBus.resolveEventBy("local");
1527
+ const by = stackBy === "local" && batch.by === "import" ? "live" : stackBy;
1528
+ this.onDocEvent(docId, doc, batch, by);
1211
1529
  });
1212
- const affectedDocs = [];
1213
- for (const [docId, assets] of this.docAssets) if (assets.delete(assetId)) {
1214
- if (assets.size === 0) this.docAssets.delete(docId);
1215
- affectedDocs.push(docId);
1216
- }
1217
- for (const docId of affectedDocs) this.emit({
1218
- kind: "asset-unlink",
1219
- docId,
1220
- assetId,
1221
- by
1530
+ if (typeof unsubscribe === "function") this.docSubscriptions.set(docId, unsubscribe);
1531
+ }
1532
+ scheduleDocFrontierUpdate(docId, doc, by) {
1533
+ const existing = this.docFrontierUpdates.get(docId);
1534
+ const effectiveBy = existing ? this.mergeRepoEventBy(existing.by, by) : by;
1535
+ if (existing) clearTimeout(existing.timeout);
1536
+ const delay = this.docFrontierDebounceMs > 0 ? this.docFrontierDebounceMs : 0;
1537
+ const timeout = setTimeout(() => this.runScheduledDocFrontierUpdate(docId), delay);
1538
+ this.docFrontierUpdates.set(docId, {
1539
+ timeout,
1540
+ doc,
1541
+ by: effectiveBy
1222
1542
  });
1223
1543
  }
1224
- emit(event) {
1225
- for (const entry of this.watchers) if (this.shouldNotify(entry.filter, event)) entry.listener(event);
1544
+ mergeRepoEventBy(current, next) {
1545
+ if (current === next) return current;
1546
+ if (current === "live" || next === "live") return "live";
1547
+ if (current === "sync" || next === "sync") return "sync";
1548
+ return "local";
1226
1549
  }
1227
- async joinMetaRoom(params) {
1228
- await this.ready();
1229
- if (!this.transport) throw new Error("Transport adapter not configured");
1230
- if (!this.transport.isConnected()) await this.transport.connect();
1231
- if (this.metaRoomSubscription) return this.metaRoomSubscription;
1232
- this.ensureMetaLiveMonitor();
1233
- const subscription = this.transport.joinMetaRoom(this.metaFlock, params);
1234
- const wrapped = {
1235
- unsubscribe: () => {
1236
- subscription.unsubscribe();
1237
- if (this.metaRoomSubscription === wrapped) this.metaRoomSubscription = void 0;
1238
- if (this.unsubscribeMetaFlock) {
1239
- this.unsubscribeMetaFlock();
1240
- this.unsubscribeMetaFlock = void 0;
1241
- }
1242
- },
1243
- firstSyncedWithRemote: subscription.firstSyncedWithRemote,
1244
- get connected() {
1245
- return subscription.connected;
1550
+ runScheduledDocFrontierUpdate(docId) {
1551
+ const pending = this.docFrontierUpdates.get(docId);
1552
+ if (!pending) return;
1553
+ this.docFrontierUpdates.delete(docId);
1554
+ this.eventBus.pushEventBy(pending.by);
1555
+ (async () => {
1556
+ try {
1557
+ await this.updateDocFrontiers(docId, pending.doc, pending.by);
1558
+ } finally {
1559
+ this.eventBus.popEventBy();
1246
1560
  }
1247
- };
1248
- this.metaRoomSubscription = wrapped;
1249
- subscription.firstSyncedWithRemote.then(async () => {
1250
- const by = this.resolveEventBy("live");
1251
- this.hydrateMetadataFromFlock(by);
1252
- await this.persistMeta();
1253
- }).catch(logAsyncError("meta room first sync"));
1254
- return wrapped;
1561
+ })().catch(logAsyncError(`doc ${docId} frontier debounce`));
1255
1562
  }
1256
- async joinDocRoom(docId, params) {
1257
- await this.ready();
1258
- if (!this.transport) throw new Error("Transport adapter not configured");
1259
- if (!this.transport.isConnected()) await this.transport.connect();
1260
- const doc = await this.ensureDoc(docId);
1261
- const subscription = this.transport.joinDocRoom(docId, doc, params);
1262
- subscription.firstSyncedWithRemote.catch(logAsyncError(`doc ${docId} first sync`));
1263
- return subscription;
1563
+ async materializeDetachedDoc(docId) {
1564
+ const snapshot = await this.exportDocSnapshot(docId);
1565
+ if (snapshot) return LoroDoc.fromSnapshot(snapshot);
1566
+ return new LoroDoc();
1264
1567
  }
1265
- async close() {
1266
- for (const unsubscribe of this.docSubscriptions.values()) try {
1267
- unsubscribe();
1268
- } catch {}
1269
- this.docSubscriptions.clear();
1270
- this.metaRoomSubscription?.unsubscribe();
1271
- this.metaRoomSubscription = void 0;
1272
- if (this.unsubscribeMetaFlock) {
1273
- this.unsubscribeMetaFlock();
1274
- this.unsubscribeMetaFlock = void 0;
1568
+ async exportDocSnapshot(docId) {
1569
+ const cached = this.docs.get(docId);
1570
+ if (cached) return cached.export({ mode: "snapshot" });
1571
+ if (!this.storage) return;
1572
+ return (await this.storage.loadDoc(docId))?.export({ mode: "snapshot" });
1573
+ }
1574
+ async persistDocUpdate(docId, doc) {
1575
+ const previousVersion = this.docPersistedVersions.get(docId);
1576
+ const nextVersion = doc.oplogVersion();
1577
+ if (!this.storage) {
1578
+ this.docPersistedVersions.set(docId, nextVersion);
1579
+ return;
1275
1580
  }
1276
- const pendingDocIds = Array.from(this.docFrontierUpdates.keys());
1277
- for (const docId of pendingDocIds) try {
1278
- await this.flushScheduledDocFrontierUpdate(docId);
1581
+ if (!previousVersion) {
1582
+ await this.persistDoc(docId, doc);
1583
+ this.docPersistedVersions.set(docId, nextVersion);
1584
+ return;
1585
+ }
1586
+ if (previousVersion.compare(nextVersion) === 0) return;
1587
+ const update = doc.export({
1588
+ mode: "update",
1589
+ from: previousVersion
1590
+ });
1591
+ this.docPersistedVersions.set(docId, nextVersion);
1592
+ try {
1593
+ await this.storage.save({
1594
+ type: "doc-update",
1595
+ docId,
1596
+ update
1597
+ });
1279
1598
  } catch (error) {
1280
- logAsyncError(`doc ${docId} frontier flush on close`)(error);
1599
+ this.docPersistedVersions.set(docId, previousVersion);
1600
+ throw error;
1281
1601
  }
1282
- this.docFrontierUpdates.clear();
1283
- this.watchers.clear();
1284
- this.docs.clear();
1285
- this.docRefs.clear();
1286
- this.metadata.clear();
1287
- this.docAssets.clear();
1288
- this.assets.clear();
1289
- this.docFrontierKeys.clear();
1290
- this.docPersistedVersions.clear();
1291
- this.readyPromise = void 0;
1292
- await this.transport?.close();
1293
1602
  }
1294
- async upsertDocMeta(docId, patch, _options = {}) {
1295
- await this.ready();
1296
- const base = this.metadata.get(docId);
1603
+ onDocEvent(docId, doc, _batch, by) {
1604
+ (async () => {
1605
+ const persist = this.persistDocUpdate(docId, doc);
1606
+ if (by === "local") {
1607
+ this.scheduleDocFrontierUpdate(docId, doc, by);
1608
+ await persist;
1609
+ return;
1610
+ }
1611
+ const flushed = this.flushScheduledDocFrontierUpdate(docId);
1612
+ const updated = (async () => {
1613
+ this.eventBus.pushEventBy(by);
1614
+ try {
1615
+ await this.updateDocFrontiers(docId, doc, by);
1616
+ } finally {
1617
+ this.eventBus.popEventBy();
1618
+ }
1619
+ })();
1620
+ await Promise.all([
1621
+ persist,
1622
+ flushed,
1623
+ updated
1624
+ ]);
1625
+ })().catch(logAsyncError(`doc ${docId} event processing`));
1626
+ }
1627
+ };
1628
+
1629
+ //#endregion
1630
+ //#region src/internal/metadata-manager.ts
1631
+ var MetadataManager = class {
1632
+ getMetaFlock;
1633
+ eventBus;
1634
+ persistMeta;
1635
+ state;
1636
+ constructor(options) {
1637
+ this.getMetaFlock = options.getMetaFlock;
1638
+ this.eventBus = options.eventBus;
1639
+ this.persistMeta = options.persistMeta;
1640
+ this.state = options.state;
1641
+ }
1642
+ getDocIds() {
1643
+ return Array.from(this.state.metadata.keys());
1644
+ }
1645
+ entries() {
1646
+ return this.state.metadata.entries();
1647
+ }
1648
+ get(docId) {
1649
+ const metadata = this.state.metadata.get(docId);
1650
+ return metadata ? cloneJsonObject(metadata) : void 0;
1651
+ }
1652
+ listDoc(query) {
1653
+ if (query?.limit !== void 0 && query.limit <= 0) return [];
1654
+ const { startKey, endKey } = this.computeDocRangeKeys(query);
1655
+ if (startKey && endKey && startKey >= endKey) return [];
1656
+ const scanOptions = { prefix: ["m"] };
1657
+ if (startKey) scanOptions.start = {
1658
+ kind: "inclusive",
1659
+ key: ["m", startKey]
1660
+ };
1661
+ if (endKey) scanOptions.end = {
1662
+ kind: "exclusive",
1663
+ key: ["m", endKey]
1664
+ };
1665
+ const rows = this.metaFlock.scan(scanOptions);
1666
+ const seen = /* @__PURE__ */ new Set();
1667
+ const entries = [];
1668
+ for (const row of rows) {
1669
+ if (query?.limit !== void 0 && entries.length >= query.limit) break;
1670
+ if (!Array.isArray(row.key) || row.key.length < 2) continue;
1671
+ const docId = row.key[1];
1672
+ if (typeof docId !== "string") continue;
1673
+ if (seen.has(docId)) continue;
1674
+ seen.add(docId);
1675
+ const metadata = this.state.metadata.get(docId);
1676
+ if (!metadata) continue;
1677
+ if (!matchesQuery(docId, metadata, query)) continue;
1678
+ entries.push({
1679
+ docId,
1680
+ meta: cloneJsonObject(metadata)
1681
+ });
1682
+ if (query?.limit !== void 0 && entries.length >= query.limit) break;
1683
+ }
1684
+ return entries;
1685
+ }
1686
+ async upsert(docId, patch) {
1687
+ const base = this.state.metadata.get(docId);
1297
1688
  const next = base ? cloneJsonObject(base) : {};
1298
1689
  const outPatch = {};
1299
1690
  let changed = false;
@@ -1318,70 +1709,159 @@ var LoroRepo = class {
1318
1709
  changed = true;
1319
1710
  }
1320
1711
  if (!changed) {
1321
- if (!this.metadata.has(docId)) this.metadata.set(docId, next);
1712
+ if (!this.state.metadata.has(docId)) this.state.metadata.set(docId, next);
1322
1713
  return;
1323
1714
  }
1324
- this.metadata.set(docId, next);
1715
+ this.state.metadata.set(docId, next);
1325
1716
  await this.persistMeta();
1326
- this.emit({
1717
+ this.eventBus.emit({
1327
1718
  kind: "doc-metadata",
1328
1719
  docId,
1329
1720
  patch: cloneJsonObject(outPatch),
1330
1721
  by: "local"
1331
1722
  });
1332
1723
  }
1333
- async getDocMeta(docId) {
1334
- await this.ready();
1335
- const metadata = this.metadata.get(docId);
1336
- return metadata ? cloneJsonObject(metadata) : void 0;
1724
+ refreshFromFlock(docId, by) {
1725
+ const previous = this.state.metadata.get(docId);
1726
+ const next = this.readDocMetadataFromFlock(docId);
1727
+ if (!next) {
1728
+ if (previous) {
1729
+ this.state.metadata.delete(docId);
1730
+ this.eventBus.emit({
1731
+ kind: "doc-metadata",
1732
+ docId,
1733
+ patch: {},
1734
+ by
1735
+ });
1736
+ }
1737
+ return;
1738
+ }
1739
+ this.state.metadata.set(docId, next);
1740
+ const patch = diffJsonObjects(previous, next);
1741
+ if (!previous || Object.keys(patch).length > 0) this.eventBus.emit({
1742
+ kind: "doc-metadata",
1743
+ docId,
1744
+ patch,
1745
+ by
1746
+ });
1337
1747
  }
1338
- async listDoc(query) {
1339
- await this.ready();
1340
- const entries = [];
1341
- for (const [docId, metadata] of this.metadata.entries()) {
1342
- if (!matchesQuery(docId, metadata, query)) continue;
1343
- entries.push({
1748
+ replaceAll(nextMetadata, by) {
1749
+ const prevMetadata = new Map(this.state.metadata);
1750
+ this.state.metadata.clear();
1751
+ for (const [docId, meta] of nextMetadata) this.state.metadata.set(docId, meta);
1752
+ const docIds = new Set([...prevMetadata.keys(), ...nextMetadata.keys()]);
1753
+ for (const docId of docIds) {
1754
+ const previous = prevMetadata.get(docId);
1755
+ const current = nextMetadata.get(docId);
1756
+ if (!current) {
1757
+ if (previous) this.eventBus.emit({
1758
+ kind: "doc-metadata",
1759
+ docId,
1760
+ patch: {},
1761
+ by
1762
+ });
1763
+ continue;
1764
+ }
1765
+ const patch = diffJsonObjects(previous, current);
1766
+ if (!previous || Object.keys(patch).length > 0) this.eventBus.emit({
1767
+ kind: "doc-metadata",
1344
1768
  docId,
1345
- meta: cloneJsonObject(metadata)
1769
+ patch,
1770
+ by
1346
1771
  });
1347
1772
  }
1348
- entries.sort((a, b) => a.docId < b.docId ? -1 : a.docId > b.docId ? 1 : 0);
1349
- if (query?.limit !== void 0) return entries.slice(0, query.limit);
1350
- return entries;
1351
1773
  }
1352
- getMetaReplica() {
1353
- return this.metaFlock;
1774
+ clear() {
1775
+ this.state.metadata.clear();
1354
1776
  }
1355
- watch(listener, filter = {}) {
1356
- const entry = {
1357
- listener,
1358
- filter
1777
+ computeDocRangeKeys(query) {
1778
+ if (!query) return {};
1779
+ const prefix = query.prefix && query.prefix.length > 0 ? query.prefix : void 0;
1780
+ let startKey = query.start;
1781
+ if (prefix) startKey = !startKey || prefix > startKey ? prefix : startKey;
1782
+ let endKey = query.end;
1783
+ const prefixEnd = this.nextLexicographicString(prefix);
1784
+ if (prefixEnd) endKey = !endKey || prefixEnd < endKey ? prefixEnd : endKey;
1785
+ return {
1786
+ startKey,
1787
+ endKey
1359
1788
  };
1360
- this.watchers.add(entry);
1361
- return { unsubscribe: () => {
1362
- this.watchers.delete(entry);
1363
- } };
1364
1789
  }
1365
- /**
1366
- * Opens the repo-managed collaborative document, registers it for persistence,
1367
- * and schedules a doc-level sync so `whenSyncedWithRemote` resolves after remote backfills.
1368
- */
1369
- async openCollaborativeDoc(docId) {
1370
- const doc = await this.ensureDoc(docId);
1371
- const refs = this.docRefs.get(docId) ?? 0;
1372
- this.docRefs.set(docId, refs + 1);
1373
- return new RepoDocHandleImpl(docId, doc, this.whenDocInSyncWithRemote(docId), async (id, instance) => this.onDocHandleClose(id, instance));
1790
+ nextLexicographicString(value) {
1791
+ if (!value) return void 0;
1792
+ for (let i = value.length - 1; i >= 0; i -= 1) {
1793
+ const code = value.charCodeAt(i);
1794
+ if (code < 65535) return `${value.slice(0, i)}${String.fromCharCode(code + 1)}`;
1795
+ }
1374
1796
  }
1375
- /**
1376
- * Opens a detached `LoroDoc` snapshot that never registers with the repo, meaning
1377
- * it neither participates in remote subscriptions nor persists edits back to storage.
1378
- */
1379
- async openDetachedDoc(docId) {
1380
- await this.ready();
1381
- return new RepoDocHandleImpl(docId, await this.materializeDetachedDoc(docId), Promise.resolve(), async () => {});
1797
+ readDocMetadataFromFlock(docId) {
1798
+ const rows = this.metaFlock.scan({ prefix: ["m", docId] });
1799
+ if (!rows.length) return void 0;
1800
+ const docMeta = {};
1801
+ let populated = false;
1802
+ for (const row of rows) {
1803
+ if (!Array.isArray(row.key) || row.key.length < 2) continue;
1804
+ if (row.key.length === 2) {
1805
+ const obj = asJsonObject(row.value);
1806
+ if (!obj) continue;
1807
+ for (const [field, value] of Object.entries(obj)) {
1808
+ const cloned = cloneJsonValue(value);
1809
+ if (cloned !== void 0) {
1810
+ docMeta[field] = cloned;
1811
+ populated = true;
1812
+ }
1813
+ }
1814
+ continue;
1815
+ }
1816
+ const fieldKey = row.key[2];
1817
+ if (typeof fieldKey !== "string") continue;
1818
+ if (fieldKey === "$tombstone") {
1819
+ docMeta.tombstone = Boolean(row.value);
1820
+ populated = true;
1821
+ continue;
1822
+ }
1823
+ const jsonValue = cloneJsonValue(row.value);
1824
+ if (jsonValue === void 0) continue;
1825
+ docMeta[fieldKey] = jsonValue;
1826
+ populated = true;
1827
+ }
1828
+ return populated ? docMeta : void 0;
1829
+ }
1830
+ get metaFlock() {
1831
+ return this.getMetaFlock();
1832
+ }
1833
+ };
1834
+
1835
+ //#endregion
1836
+ //#region src/internal/asset-manager.ts
1837
+ var AssetManager = class {
1838
+ storage;
1839
+ assetTransport;
1840
+ getMetaFlock;
1841
+ eventBus;
1842
+ persistMeta;
1843
+ state;
1844
+ get docAssets() {
1845
+ return this.state.docAssets;
1846
+ }
1847
+ get assets() {
1848
+ return this.state.assets;
1849
+ }
1850
+ get orphanedAssets() {
1851
+ return this.state.orphanedAssets;
1852
+ }
1853
+ get assetToDocRefs() {
1854
+ return this.state.assetToDocRefs;
1855
+ }
1856
+ constructor(options) {
1857
+ this.storage = options.storage;
1858
+ this.assetTransport = options.assetTransport;
1859
+ this.getMetaFlock = options.getMetaFlock;
1860
+ this.eventBus = options.eventBus;
1861
+ this.persistMeta = options.persistMeta;
1862
+ this.state = options.state;
1382
1863
  }
1383
1864
  async uploadAsset(params) {
1384
- await this.ready();
1385
1865
  const bytes = await assetContentToUint8Array(params.content);
1386
1866
  const assetId = await computeSha256(bytes);
1387
1867
  if (params.assetId && params.assetId !== assetId) throw new Error("Provided assetId does not match content digest");
@@ -1418,7 +1898,7 @@ var LoroRepo = class {
1418
1898
  existing.metadata = metadata$1;
1419
1899
  this.metaFlock.put(["a", assetId], assetMetaToJson(metadata$1));
1420
1900
  await this.persistMeta();
1421
- this.emit({
1901
+ this.eventBus.emit({
1422
1902
  kind: "asset-metadata",
1423
1903
  asset: this.createAssetDownload(assetId, metadata$1, existing.data),
1424
1904
  by: "local"
@@ -1451,26 +1931,17 @@ var LoroRepo = class {
1451
1931
  data: storedBytes.slice()
1452
1932
  });
1453
1933
  this.rememberAsset(metadata, storedBytes);
1454
- for (const assets of this.docAssets.values()) if (assets.has(assetId)) assets.set(assetId, metadata);
1934
+ this.updateDocAssetMetadata(assetId, metadata);
1455
1935
  this.metaFlock.put(["a", assetId], assetMetaToJson(metadata));
1456
1936
  await this.persistMeta();
1457
- this.emit({
1937
+ this.eventBus.emit({
1458
1938
  kind: "asset-metadata",
1459
1939
  asset: this.createAssetDownload(assetId, metadata, storedBytes),
1460
1940
  by: "local"
1461
1941
  });
1462
1942
  return assetId;
1463
1943
  }
1464
- async whenDocInSyncWithRemote(docId) {
1465
- await this.ready();
1466
- await this.ensureDoc(docId);
1467
- await this.sync({
1468
- scope: "doc",
1469
- docIds: [docId]
1470
- });
1471
- }
1472
1944
  async linkAsset(docId, params) {
1473
- await this.ready();
1474
1945
  const bytes = await assetContentToUint8Array(params.content);
1475
1946
  const assetId = await computeSha256(bytes);
1476
1947
  if (params.assetId && params.assetId !== assetId) throw new Error("Provided assetId does not match content digest");
@@ -1524,7 +1995,7 @@ var LoroRepo = class {
1524
1995
  metadata = nextMetadata;
1525
1996
  this.metaFlock.put(["a", assetId], assetMetaToJson(metadata));
1526
1997
  await this.persistMeta();
1527
- this.emit({
1998
+ this.eventBus.emit({
1528
1999
  kind: "asset-metadata",
1529
2000
  asset: this.createAssetDownload(assetId, metadata, existing.data),
1530
2001
  by: "local"
@@ -1557,7 +2028,7 @@ var LoroRepo = class {
1557
2028
  data: storedBytes.slice()
1558
2029
  });
1559
2030
  this.rememberAsset(metadata, storedBytes);
1560
- for (const assets of this.docAssets.values()) if (assets.has(assetId)) assets.set(assetId, metadata);
2031
+ this.updateDocAssetMetadata(assetId, metadata);
1561
2032
  this.metaFlock.put(["a", assetId], assetMetaToJson(metadata));
1562
2033
  created = true;
1563
2034
  }
@@ -1573,26 +2044,20 @@ var LoroRepo = class {
1573
2044
  assetId
1574
2045
  ], true);
1575
2046
  await this.persistMeta();
1576
- this.emit({
2047
+ this.eventBus.emit({
1577
2048
  kind: "asset-link",
1578
2049
  docId,
1579
2050
  assetId,
1580
2051
  by: "local"
1581
2052
  });
1582
- if (created) this.emit({
2053
+ if (created) this.eventBus.emit({
1583
2054
  kind: "asset-metadata",
1584
2055
  asset: this.createAssetDownload(assetId, metadata, storedBytes ?? bytes),
1585
2056
  by: "local"
1586
2057
  });
1587
2058
  return assetId;
1588
2059
  }
1589
- async fetchAsset(assetId) {
1590
- await this.ready();
1591
- const { metadata, bytes } = await this.materializeAsset(assetId);
1592
- return this.createAssetDownload(assetId, metadata, bytes);
1593
- }
1594
2060
  async unlinkAsset(docId, assetId) {
1595
- await this.ready();
1596
2061
  const mapping = this.docAssets.get(docId);
1597
2062
  if (!mapping || !mapping.has(assetId)) return;
1598
2063
  mapping.delete(assetId);
@@ -1617,7 +2082,7 @@ var LoroRepo = class {
1617
2082
  }
1618
2083
  }
1619
2084
  await this.persistMeta();
1620
- this.emit({
2085
+ this.eventBus.emit({
1621
2086
  kind: "asset-unlink",
1622
2087
  docId,
1623
2088
  assetId,
@@ -1625,7 +2090,6 @@ var LoroRepo = class {
1625
2090
  });
1626
2091
  }
1627
2092
  async listAssets(docId) {
1628
- await this.ready();
1629
2093
  const mapping = this.docAssets.get(docId);
1630
2094
  if (!mapping) return [];
1631
2095
  return Array.from(mapping.values()).map((asset) => ({ ...asset }));
@@ -1633,26 +2097,241 @@ var LoroRepo = class {
1633
2097
  async ensureAsset(assetId) {
1634
2098
  return this.fetchAsset(assetId);
1635
2099
  }
1636
- createAssetDownload(assetId, metadata, initialBytes) {
1637
- let cached = initialBytes ? initialBytes.slice() : void 0;
1638
- return {
1639
- assetId,
1640
- size: metadata.size,
1641
- createdAt: metadata.createdAt,
1642
- mime: metadata.mime,
1643
- policy: metadata.policy,
1644
- tag: metadata.tag,
1645
- content: async () => {
1646
- if (!cached) cached = (await this.materializeAsset(assetId)).bytes.slice();
1647
- return toReadableStream(cached.slice());
1648
- }
1649
- };
2100
+ async fetchAsset(assetId) {
2101
+ const { metadata, bytes } = await this.materializeAsset(assetId);
2102
+ return this.createAssetDownload(assetId, metadata, bytes);
1650
2103
  }
1651
- async materializeAsset(assetId) {
1652
- let record = this.assets.get(assetId);
1653
- if (record?.data) return {
1654
- metadata: record.metadata,
1655
- bytes: record.data.slice()
2104
+ async gcAssets(options = {}) {
2105
+ const { minKeepMs = 0 } = options;
2106
+ const now = Date.now();
2107
+ let removed = 0;
2108
+ for (const [assetId, orphan] of Array.from(this.orphanedAssets.entries())) {
2109
+ if (now - orphan.deletedAt < minKeepMs) continue;
2110
+ this.orphanedAssets.delete(assetId);
2111
+ if (this.storage?.deleteAsset) try {
2112
+ await this.storage.deleteAsset(assetId);
2113
+ } catch (error) {
2114
+ logAsyncError(`asset ${assetId} delete`)(error);
2115
+ }
2116
+ removed += 1;
2117
+ }
2118
+ return removed;
2119
+ }
2120
+ refreshDocAssetsEntry(docId, by) {
2121
+ const mapping = this.readDocAssetsFromFlock(docId);
2122
+ const previous = this.docAssets.get(docId);
2123
+ if (!mapping.size) {
2124
+ if (previous?.size) {
2125
+ this.docAssets.delete(docId);
2126
+ for (const assetId of previous.keys()) {
2127
+ this.removeDocAssetReference(assetId, docId);
2128
+ this.eventBus.emit({
2129
+ kind: "asset-unlink",
2130
+ docId,
2131
+ assetId,
2132
+ by
2133
+ });
2134
+ }
2135
+ }
2136
+ return;
2137
+ }
2138
+ this.docAssets.set(docId, mapping);
2139
+ const removed = [];
2140
+ if (previous) {
2141
+ for (const assetId of previous.keys()) if (!mapping.has(assetId)) removed.push(assetId);
2142
+ }
2143
+ for (const assetId of removed) {
2144
+ this.removeDocAssetReference(assetId, docId);
2145
+ this.eventBus.emit({
2146
+ kind: "asset-unlink",
2147
+ docId,
2148
+ assetId,
2149
+ by
2150
+ });
2151
+ }
2152
+ for (const assetId of mapping.keys()) {
2153
+ const isNew = !previous || !previous.has(assetId);
2154
+ this.addDocReference(assetId, docId);
2155
+ if (isNew) this.eventBus.emit({
2156
+ kind: "asset-link",
2157
+ docId,
2158
+ assetId,
2159
+ by
2160
+ });
2161
+ }
2162
+ }
2163
+ refreshAssetMetadataEntry(assetId, by) {
2164
+ const previous = this.assets.get(assetId);
2165
+ const metadata = assetMetaFromJson(this.metaFlock.get(["a", assetId]));
2166
+ if (!metadata) {
2167
+ this.handleAssetRemoval(assetId, by);
2168
+ return;
2169
+ }
2170
+ const existingData = previous?.data;
2171
+ this.rememberAsset(metadata, existingData);
2172
+ this.updateDocAssetMetadata(assetId, cloneRepoAssetMetadata(metadata));
2173
+ if (!previous || !assetMetadataEqual(previous.metadata, metadata)) this.eventBus.emit({
2174
+ kind: "asset-metadata",
2175
+ asset: this.createAssetDownload(assetId, metadata, existingData),
2176
+ by
2177
+ });
2178
+ }
2179
+ hydrateFromFlock(by) {
2180
+ const prevDocAssets = new Map(this.docAssets);
2181
+ const prevAssets = new Map(this.assets);
2182
+ const nextAssets = /* @__PURE__ */ new Map();
2183
+ const assetRows = this.metaFlock.scan({ prefix: ["a"] });
2184
+ for (const row of assetRows) {
2185
+ if (!Array.isArray(row.key) || row.key.length < 2) continue;
2186
+ const assetId = row.key[1];
2187
+ if (typeof assetId !== "string") continue;
2188
+ const metadata = assetMetaFromJson(row.value);
2189
+ if (!metadata) continue;
2190
+ const existing = this.assets.get(assetId);
2191
+ nextAssets.set(assetId, {
2192
+ metadata,
2193
+ data: existing?.data
2194
+ });
2195
+ }
2196
+ const nextDocAssets = /* @__PURE__ */ new Map();
2197
+ const linkRows = this.metaFlock.scan({ prefix: ["ld"] });
2198
+ for (const row of linkRows) {
2199
+ if (!Array.isArray(row.key) || row.key.length < 3) continue;
2200
+ const docId = row.key[1];
2201
+ const assetId = row.key[2];
2202
+ if (typeof docId !== "string" || typeof assetId !== "string") continue;
2203
+ const metadata = nextAssets.get(assetId)?.metadata;
2204
+ if (!metadata) continue;
2205
+ const mapping = nextDocAssets.get(docId) ?? /* @__PURE__ */ new Map();
2206
+ mapping.set(assetId, metadata);
2207
+ nextDocAssets.set(docId, mapping);
2208
+ }
2209
+ const removedAssets = [];
2210
+ for (const [assetId, record] of prevAssets) if (!nextAssets.has(assetId)) removedAssets.push([assetId, record]);
2211
+ if (removedAssets.length > 0) {
2212
+ const now = Date.now();
2213
+ for (const [assetId, record] of removedAssets) {
2214
+ const deletedAt = this.orphanedAssets.get(assetId)?.deletedAt ?? now;
2215
+ this.orphanedAssets.set(assetId, {
2216
+ metadata: record.metadata,
2217
+ deletedAt
2218
+ });
2219
+ }
2220
+ }
2221
+ this.docAssets.clear();
2222
+ for (const [docId, assets] of nextDocAssets) this.docAssets.set(docId, assets);
2223
+ this.assetToDocRefs.clear();
2224
+ for (const [docId, assets] of nextDocAssets) for (const assetId of assets.keys()) {
2225
+ const refs = this.assetToDocRefs.get(assetId) ?? /* @__PURE__ */ new Set();
2226
+ refs.add(docId);
2227
+ this.assetToDocRefs.set(assetId, refs);
2228
+ }
2229
+ this.assets.clear();
2230
+ for (const record of nextAssets.values()) this.rememberAsset(record.metadata, record.data);
2231
+ for (const [assetId, record] of nextAssets) {
2232
+ const previous = prevAssets.get(assetId)?.metadata;
2233
+ if (!assetMetadataEqual(previous, record.metadata)) this.eventBus.emit({
2234
+ kind: "asset-metadata",
2235
+ asset: this.createAssetDownload(assetId, record.metadata, record.data),
2236
+ by
2237
+ });
2238
+ }
2239
+ for (const [docId, assets] of nextDocAssets) {
2240
+ const previous = prevDocAssets.get(docId);
2241
+ for (const assetId of assets.keys()) if (!previous || !previous.has(assetId)) this.eventBus.emit({
2242
+ kind: "asset-link",
2243
+ docId,
2244
+ assetId,
2245
+ by
2246
+ });
2247
+ }
2248
+ for (const [docId, assets] of prevDocAssets) {
2249
+ const current = nextDocAssets.get(docId);
2250
+ for (const assetId of assets.keys()) if (!current || !current.has(assetId)) this.eventBus.emit({
2251
+ kind: "asset-unlink",
2252
+ docId,
2253
+ assetId,
2254
+ by
2255
+ });
2256
+ }
2257
+ }
2258
+ clear() {
2259
+ this.docAssets.clear();
2260
+ this.assets.clear();
2261
+ this.orphanedAssets.clear();
2262
+ this.assetToDocRefs.clear();
2263
+ }
2264
+ readDocAssetsFromFlock(docId) {
2265
+ const rows = this.metaFlock.scan({ prefix: ["ld", docId] });
2266
+ const mapping = /* @__PURE__ */ new Map();
2267
+ for (const row of rows) {
2268
+ if (!Array.isArray(row.key) || row.key.length < 3) continue;
2269
+ const assetId = row.key[2];
2270
+ if (typeof assetId !== "string") continue;
2271
+ if (!(row.value !== void 0 && row.value !== null && row.value !== false)) continue;
2272
+ let metadata = this.assets.get(assetId)?.metadata;
2273
+ if (!metadata) {
2274
+ metadata = this.readAssetMetadataFromFlock(assetId);
2275
+ if (!metadata) continue;
2276
+ this.rememberAsset(metadata);
2277
+ }
2278
+ mapping.set(assetId, cloneRepoAssetMetadata(metadata));
2279
+ }
2280
+ return mapping;
2281
+ }
2282
+ readAssetMetadataFromFlock(assetId) {
2283
+ return assetMetaFromJson(this.metaFlock.get(["a", assetId]));
2284
+ }
2285
+ handleAssetRemoval(assetId, by) {
2286
+ const record = this.assets.get(assetId);
2287
+ if (!record) return;
2288
+ this.assets.delete(assetId);
2289
+ this.markAssetAsOrphan(assetId, record.metadata);
2290
+ const refs = this.assetToDocRefs.get(assetId);
2291
+ if (refs) {
2292
+ this.assetToDocRefs.delete(assetId);
2293
+ for (const docId of refs) {
2294
+ const assets = this.docAssets.get(docId);
2295
+ if (assets?.delete(assetId) && assets.size === 0) this.docAssets.delete(docId);
2296
+ this.eventBus.emit({
2297
+ kind: "asset-unlink",
2298
+ docId,
2299
+ assetId,
2300
+ by
2301
+ });
2302
+ }
2303
+ return;
2304
+ }
2305
+ for (const [docId, assets] of this.docAssets) if (assets.delete(assetId)) {
2306
+ if (assets.size === 0) this.docAssets.delete(docId);
2307
+ this.eventBus.emit({
2308
+ kind: "asset-unlink",
2309
+ docId,
2310
+ assetId,
2311
+ by
2312
+ });
2313
+ }
2314
+ }
2315
+ createAssetDownload(assetId, metadata, initialBytes) {
2316
+ let cached = initialBytes ? initialBytes.slice() : void 0;
2317
+ return {
2318
+ assetId,
2319
+ size: metadata.size,
2320
+ createdAt: metadata.createdAt,
2321
+ mime: metadata.mime,
2322
+ policy: metadata.policy,
2323
+ tag: metadata.tag,
2324
+ content: async () => {
2325
+ if (!cached) cached = (await this.materializeAsset(assetId)).bytes.slice();
2326
+ return toReadableStream(cached.slice());
2327
+ }
2328
+ };
2329
+ }
2330
+ async materializeAsset(assetId) {
2331
+ let record = this.assets.get(assetId);
2332
+ if (record?.data) return {
2333
+ metadata: record.metadata,
2334
+ bytes: record.data.slice()
1656
2335
  };
1657
2336
  if (record && this.storage) {
1658
2337
  const stored = await this.storage.loadAsset(assetId);
@@ -1712,147 +2391,77 @@ var LoroRepo = class {
1712
2391
  };
1713
2392
  }
1714
2393
  updateDocAssetMetadata(assetId, metadata) {
1715
- for (const assets of this.docAssets.values()) if (assets.has(assetId)) assets.set(assetId, metadata);
1716
- }
1717
- async gcAssets(options = {}) {
1718
- await this.ready();
1719
- const { minKeepMs = 0 } = options;
1720
- const now = Date.now();
1721
- let removed = 0;
1722
- for (const [assetId, orphan] of Array.from(this.orphanedAssets.entries())) {
1723
- if (now - orphan.deletedAt < minKeepMs) continue;
1724
- this.orphanedAssets.delete(assetId);
1725
- if (this.storage?.deleteAsset) try {
1726
- await this.storage.deleteAsset(assetId);
1727
- } catch (error) {
1728
- logAsyncError(`asset ${assetId} delete`)(error);
1729
- }
1730
- removed += 1;
2394
+ const refs = this.assetToDocRefs.get(assetId);
2395
+ if (!refs) return;
2396
+ for (const docId of refs) {
2397
+ const assets = this.docAssets.get(docId);
2398
+ if (assets) assets.set(assetId, metadata);
1731
2399
  }
1732
- return removed;
1733
2400
  }
1734
- async onDocHandleClose(docId, doc) {
1735
- const refs = this.docRefs.get(docId) ?? 0;
1736
- if (refs <= 1) this.docRefs.delete(docId);
1737
- else this.docRefs.set(docId, refs - 1);
1738
- await this.persistDocUpdate(docId, doc);
1739
- if (!await this.flushScheduledDocFrontierUpdate(docId)) await this.updateDocFrontiers(docId, doc, "local");
2401
+ rememberAsset(metadata, bytes) {
2402
+ const data = bytes ? bytes.slice() : this.assets.get(metadata.assetId)?.data;
2403
+ this.assets.set(metadata.assetId, {
2404
+ metadata,
2405
+ data
2406
+ });
2407
+ this.orphanedAssets.delete(metadata.assetId);
1740
2408
  }
1741
- async ensureDoc(docId) {
1742
- await this.ready();
1743
- const cached = this.docs.get(docId);
1744
- if (cached) {
1745
- this.ensureDocSubscription(docId, cached);
1746
- if (!this.docPersistedVersions.has(docId)) this.docPersistedVersions.set(docId, cached.version());
1747
- return cached;
1748
- }
1749
- if (this.storage) {
1750
- const stored = await this.storage.loadDoc(docId);
1751
- if (stored) {
1752
- this.registerDoc(docId, stored);
1753
- return stored;
1754
- }
2409
+ addDocReference(assetId, docId) {
2410
+ const refs = this.assetToDocRefs.get(assetId) ?? /* @__PURE__ */ new Set();
2411
+ refs.add(docId);
2412
+ this.assetToDocRefs.set(assetId, refs);
2413
+ }
2414
+ removeDocAssetReference(assetId, docId) {
2415
+ const refs = this.assetToDocRefs.get(assetId);
2416
+ if (!refs) return;
2417
+ refs.delete(docId);
2418
+ if (refs.size === 0) {
2419
+ this.assetToDocRefs.delete(assetId);
2420
+ this.markAssetAsOrphan(assetId);
1755
2421
  }
1756
- const created = await this.docFactory(docId);
1757
- this.registerDoc(docId, created);
1758
- return created;
1759
2422
  }
1760
- async materializeDetachedDoc(docId) {
1761
- const doc = await this.docFactory(docId);
1762
- const snapshot = await this.exportDocSnapshot(docId);
1763
- if (snapshot) doc.import(snapshot);
1764
- return doc;
1765
- }
1766
- async exportDocSnapshot(docId) {
1767
- const cached = this.docs.get(docId);
1768
- if (cached) return cached.export({ mode: "snapshot" });
1769
- if (!this.storage) return;
1770
- return (await this.storage.loadDoc(docId))?.export({ mode: "snapshot" });
1771
- }
1772
- async persistMeta() {
1773
- if (!this.storage) return;
1774
- const bundle = this.metaFlock.exportJson();
1775
- const encoded = textEncoder.encode(JSON.stringify(bundle));
1776
- await this.storage.save({
1777
- type: "meta",
1778
- update: encoded
2423
+ markAssetAsOrphan(assetId, metadataOverride) {
2424
+ const metadata = metadataOverride ?? this.assets.get(assetId)?.metadata;
2425
+ if (!metadata) return;
2426
+ const deletedAt = this.orphanedAssets.get(assetId)?.deletedAt ?? Date.now();
2427
+ this.orphanedAssets.set(assetId, {
2428
+ metadata,
2429
+ deletedAt
1779
2430
  });
1780
2431
  }
1781
- async persistDoc(docId, doc) {
1782
- const previousVersion = this.docPersistedVersions.get(docId);
1783
- const nextVersion = doc.version();
1784
- if (!this.storage) {
1785
- this.docPersistedVersions.set(docId, nextVersion);
1786
- return;
1787
- }
1788
- const snapshot = doc.export({ mode: "snapshot" });
1789
- this.docPersistedVersions.set(docId, nextVersion);
1790
- try {
1791
- await this.storage.save({
1792
- type: "doc-snapshot",
1793
- docId,
1794
- snapshot
1795
- });
1796
- } catch (error) {
1797
- if (previousVersion) this.docPersistedVersions.set(docId, previousVersion);
1798
- else this.docPersistedVersions.delete(docId);
1799
- throw error;
1800
- }
1801
- }
1802
- async persistDocUpdate(docId, doc) {
1803
- const previousVersion = this.docPersistedVersions.get(docId);
1804
- const nextVersion = doc.version();
1805
- if (!this.storage) {
1806
- this.docPersistedVersions.set(docId, nextVersion);
1807
- return;
1808
- }
1809
- if (!previousVersion) {
1810
- await this.persistDoc(docId, doc);
1811
- this.docPersistedVersions.set(docId, nextVersion);
1812
- return;
1813
- }
1814
- const update = doc.export({
1815
- mode: "update",
1816
- from: previousVersion
1817
- });
1818
- if (!update.length) {
1819
- this.docPersistedVersions.set(docId, nextVersion);
1820
- return;
1821
- }
1822
- this.docPersistedVersions.set(docId, nextVersion);
1823
- try {
1824
- await this.storage.save({
1825
- type: "doc-update",
1826
- docId,
1827
- update
1828
- });
1829
- } catch (error) {
1830
- this.docPersistedVersions.set(docId, previousVersion);
1831
- throw error;
2432
+ getAssetMetadata(assetId) {
2433
+ const record = this.assets.get(assetId);
2434
+ if (record) return record.metadata;
2435
+ for (const assets of this.docAssets.values()) {
2436
+ const metadata = assets.get(assetId);
2437
+ if (metadata) return metadata;
1832
2438
  }
1833
2439
  }
1834
- pushEventBy(by) {
1835
- this.eventByStack.push(by);
1836
- }
1837
- popEventBy() {
1838
- this.eventByStack.pop();
1839
- }
1840
- resolveEventBy(defaultBy) {
1841
- const index = this.eventByStack.length - 1;
1842
- return index >= 0 ? this.eventByStack[index] : defaultBy;
1843
- }
1844
- ensureMetaLiveMonitor() {
1845
- if (this.unsubscribeMetaFlock) return;
1846
- this.unsubscribeMetaFlock = this.metaFlock.subscribe((batch) => {
1847
- if (batch.source === "local") return;
1848
- const by = this.resolveEventBy("live");
1849
- (async () => {
1850
- this.applyMetaFlockEvents(batch.events, by);
1851
- await this.persistMeta();
1852
- })().catch(logAsyncError("meta live monitor sync"));
1853
- });
2440
+ get metaFlock() {
2441
+ return this.getMetaFlock();
1854
2442
  }
1855
- applyMetaFlockEvents(events, by) {
2443
+ };
2444
+
2445
+ //#endregion
2446
+ //#region src/internal/flock-hydrator.ts
2447
+ var FlockHydrator = class {
2448
+ getMetaFlock;
2449
+ metadataManager;
2450
+ assetManager;
2451
+ docManager;
2452
+ constructor(options) {
2453
+ this.getMetaFlock = options.getMetaFlock;
2454
+ this.metadataManager = options.metadataManager;
2455
+ this.assetManager = options.assetManager;
2456
+ this.docManager = options.docManager;
2457
+ }
2458
+ hydrateAll(by) {
2459
+ const nextMetadata = this.readAllDocMetadata();
2460
+ this.metadataManager.replaceAll(nextMetadata, by);
2461
+ this.assetManager.hydrateFromFlock(by);
2462
+ this.docManager.hydrateFrontierKeys();
2463
+ }
2464
+ applyEvents(events, by) {
1856
2465
  if (!events.length) return;
1857
2466
  const docMetadataIds = /* @__PURE__ */ new Set();
1858
2467
  const docAssetIds = /* @__PURE__ */ new Set();
@@ -1878,134 +2487,12 @@ var LoroRepo = class {
1878
2487
  if (typeof docId === "string") docFrontiersIds.add(docId);
1879
2488
  }
1880
2489
  }
1881
- for (const assetId of assetIds) this.refreshAssetMetadataEntry(assetId, by);
1882
- for (const docId of docMetadataIds) this.refreshDocMetadataEntry(docId, by);
1883
- for (const docId of docAssetIds) this.refreshDocAssetsEntry(docId, by);
1884
- for (const docId of docFrontiersIds) this.refreshDocFrontierKeys(docId);
1885
- }
1886
- registerDoc(docId, doc) {
1887
- this.docs.set(docId, doc);
1888
- this.docPersistedVersions.set(docId, doc.version());
1889
- this.ensureDocSubscription(docId, doc);
1890
- }
1891
- ensureDocSubscription(docId, doc) {
1892
- if (this.docSubscriptions.has(docId)) return;
1893
- const unsubscribe = doc.subscribe((batch) => {
1894
- const stackBy = this.resolveEventBy("local");
1895
- const by = stackBy === "local" && batch.by === "import" ? "live" : stackBy;
1896
- this.onDocEvent(docId, doc, batch, by);
1897
- });
1898
- if (typeof unsubscribe === "function") this.docSubscriptions.set(docId, unsubscribe);
1899
- }
1900
- rememberAsset(metadata, bytes) {
1901
- const data = bytes ? bytes.slice() : this.assets.get(metadata.assetId)?.data;
1902
- this.assets.set(metadata.assetId, {
1903
- metadata,
1904
- data
1905
- });
1906
- this.orphanedAssets.delete(metadata.assetId);
1907
- }
1908
- scheduleDocFrontierUpdate(docId, doc, by) {
1909
- const existing = this.docFrontierUpdates.get(docId);
1910
- const effectiveBy = existing ? this.mergeRepoEventBy(existing.by, by) : by;
1911
- if (existing) clearTimeout(existing.timeout);
1912
- const delay = this.docFrontierDebounceMs > 0 ? this.docFrontierDebounceMs : 0;
1913
- const timeout = setTimeout(() => this.runScheduledDocFrontierUpdate(docId), delay);
1914
- this.docFrontierUpdates.set(docId, {
1915
- timeout,
1916
- doc,
1917
- by: effectiveBy
1918
- });
1919
- }
1920
- mergeRepoEventBy(current, next) {
1921
- if (current === next) return current;
1922
- if (current === "live" || next === "live") return "live";
1923
- if (current === "sync" || next === "sync") return "sync";
1924
- return "local";
1925
- }
1926
- runScheduledDocFrontierUpdate(docId) {
1927
- const pending = this.docFrontierUpdates.get(docId);
1928
- if (!pending) return;
1929
- this.docFrontierUpdates.delete(docId);
1930
- this.pushEventBy(pending.by);
1931
- (async () => {
1932
- try {
1933
- await this.updateDocFrontiers(docId, pending.doc, pending.by);
1934
- } finally {
1935
- this.popEventBy();
1936
- }
1937
- })().catch(logAsyncError(`doc ${docId} frontier debounce`));
1938
- }
1939
- async flushScheduledDocFrontierUpdate(docId) {
1940
- const pending = this.docFrontierUpdates.get(docId);
1941
- if (!pending) return false;
1942
- clearTimeout(pending.timeout);
1943
- this.docFrontierUpdates.delete(docId);
1944
- this.pushEventBy(pending.by);
1945
- try {
1946
- await this.updateDocFrontiers(docId, pending.doc, pending.by);
1947
- } finally {
1948
- this.popEventBy();
1949
- }
1950
- return true;
1951
- }
1952
- onDocEvent(docId, doc, _batch, by) {
1953
- (async () => {
1954
- const a = this.persistDocUpdate(docId, doc);
1955
- if (by === "local") {
1956
- this.scheduleDocFrontierUpdate(docId, doc, by);
1957
- await a;
1958
- return;
1959
- }
1960
- const b = this.flushScheduledDocFrontierUpdate(docId);
1961
- const c = this.updateDocFrontiers(docId, doc, by);
1962
- await Promise.all([
1963
- a,
1964
- b,
1965
- c
1966
- ]);
1967
- })().catch(logAsyncError(`doc ${docId} event processing`));
1968
- }
1969
- getAssetMetadata(assetId) {
1970
- const record = this.assets.get(assetId);
1971
- if (record) return record.metadata;
1972
- for (const assets of this.docAssets.values()) {
1973
- const metadata = assets.get(assetId);
1974
- if (metadata) return metadata;
1975
- }
2490
+ for (const assetId of assetIds) this.assetManager.refreshAssetMetadataEntry(assetId, by);
2491
+ for (const docId of docMetadataIds) this.metadataManager.refreshFromFlock(docId, by);
2492
+ for (const docId of docAssetIds) this.assetManager.refreshDocAssetsEntry(docId, by);
2493
+ for (const docId of docFrontiersIds) this.docManager.refreshDocFrontierKeys(docId);
1976
2494
  }
1977
- async updateDocFrontiers(docId, doc, defaultBy = "local") {
1978
- const { json, key } = canonicalizeVersionVector(computeVersionVector(doc));
1979
- const existingKeys = this.docFrontierKeys.get(docId) ?? /* @__PURE__ */ new Set();
1980
- let mutated = false;
1981
- if (existingKeys.size !== 1 || !existingKeys.has(key)) {
1982
- for (const entry of existingKeys) this.metaFlock.delete([
1983
- "f",
1984
- docId,
1985
- entry
1986
- ]);
1987
- this.metaFlock.put([
1988
- "f",
1989
- docId,
1990
- key
1991
- ], json);
1992
- this.docFrontierKeys.set(docId, new Set([key]));
1993
- mutated = true;
1994
- }
1995
- if (mutated) await this.persistMeta();
1996
- const by = this.resolveEventBy(defaultBy);
1997
- const frontiers = getDocFrontiers(doc);
1998
- this.emit({
1999
- kind: "doc-frontiers",
2000
- docId,
2001
- frontiers,
2002
- by
2003
- });
2004
- }
2005
- hydrateMetadataFromFlock(by) {
2006
- const prevMetadata = new Map(this.metadata);
2007
- const prevDocAssets = new Map(this.docAssets);
2008
- const prevAssets = new Map(this.assets);
2495
+ readAllDocMetadata() {
2009
2496
  const nextMetadata = /* @__PURE__ */ new Map();
2010
2497
  const metadataRows = this.metaFlock.scan({ prefix: ["m"] });
2011
2498
  for (const row of metadataRows) {
@@ -2036,135 +2523,404 @@ var LoroRepo = class {
2036
2523
  if (jsonValue === void 0) continue;
2037
2524
  docMeta[fieldKey] = jsonValue;
2038
2525
  }
2039
- const nextAssets = /* @__PURE__ */ new Map();
2040
- const assetRows = this.metaFlock.scan({ prefix: ["a"] });
2041
- for (const row of assetRows) {
2042
- if (!Array.isArray(row.key) || row.key.length < 2) continue;
2043
- const assetId = row.key[1];
2044
- if (typeof assetId !== "string") continue;
2045
- const metadata = assetMetaFromJson(row.value);
2046
- if (!metadata) continue;
2047
- const existing = this.assets.get(assetId);
2048
- nextAssets.set(assetId, {
2049
- metadata,
2050
- data: existing?.data
2526
+ return nextMetadata;
2527
+ }
2528
+ get metaFlock() {
2529
+ return this.getMetaFlock();
2530
+ }
2531
+ };
2532
+
2533
+ //#endregion
2534
+ //#region src/internal/sync-runner.ts
2535
+ /**
2536
+ * Sync data between storage and transport layer
2537
+ */
2538
+ var SyncRunner = class {
2539
+ storage;
2540
+ transport;
2541
+ eventBus;
2542
+ docManager;
2543
+ metadataManager;
2544
+ assetManager;
2545
+ flockHydrator;
2546
+ getMetaFlock;
2547
+ replaceMetaFlock;
2548
+ persistMeta;
2549
+ readyPromise;
2550
+ metaRoomSubscription;
2551
+ unsubscribeMetaFlock;
2552
+ docSubscriptions = /* @__PURE__ */ new Map();
2553
+ constructor(options) {
2554
+ this.storage = options.storage;
2555
+ this.transport = options.transport;
2556
+ this.eventBus = options.eventBus;
2557
+ this.docManager = options.docManager;
2558
+ this.metadataManager = options.metadataManager;
2559
+ this.assetManager = options.assetManager;
2560
+ this.flockHydrator = options.flockHydrator;
2561
+ this.getMetaFlock = options.getMetaFlock;
2562
+ this.replaceMetaFlock = options.mergeFlock;
2563
+ this.persistMeta = options.persistMeta;
2564
+ }
2565
+ async ready() {
2566
+ if (!this.readyPromise) this.readyPromise = this.initialize();
2567
+ await this.readyPromise;
2568
+ }
2569
+ async sync(options = {}) {
2570
+ await this.ready();
2571
+ const { scope = "full", docIds } = options;
2572
+ if (!this.transport) return;
2573
+ if (!this.transport.isConnected()) await this.transport.connect();
2574
+ if (scope === "meta" || scope === "full") {
2575
+ this.eventBus.pushEventBy("sync");
2576
+ const recordedEvents = [];
2577
+ const unsubscribe = this.metaFlock.subscribe((batch) => {
2578
+ if (batch.source === "local") return;
2579
+ recordedEvents.push(...batch.events);
2051
2580
  });
2581
+ try {
2582
+ if (!(await this.transport.syncMeta(this.metaFlock)).ok) throw new Error("Metadata sync failed");
2583
+ if (recordedEvents.length > 0) this.flockHydrator.applyEvents(recordedEvents, "sync");
2584
+ else this.flockHydrator.hydrateAll("sync");
2585
+ await this.persistMeta();
2586
+ } finally {
2587
+ unsubscribe();
2588
+ this.eventBus.popEventBy();
2589
+ }
2052
2590
  }
2053
- const nextDocAssets = /* @__PURE__ */ new Map();
2054
- const linkRows = this.metaFlock.scan({ prefix: ["ld"] });
2055
- for (const row of linkRows) {
2056
- if (!Array.isArray(row.key) || row.key.length < 3) continue;
2057
- const docId = row.key[1];
2058
- const assetId = row.key[2];
2059
- if (typeof docId !== "string" || typeof assetId !== "string") continue;
2060
- const metadata = nextAssets.get(assetId)?.metadata;
2061
- if (!metadata) continue;
2062
- const mapping = nextDocAssets.get(docId) ?? /* @__PURE__ */ new Map();
2063
- mapping.set(assetId, metadata);
2064
- nextDocAssets.set(docId, mapping);
2065
- }
2066
- const nextFrontierKeys = /* @__PURE__ */ new Map();
2067
- const frontierRows = this.metaFlock.scan({ prefix: ["f"] });
2068
- for (const row of frontierRows) {
2069
- if (!Array.isArray(row.key) || row.key.length < 3) continue;
2070
- const docId = row.key[1];
2071
- const frontierKey = row.key[2];
2072
- if (typeof docId !== "string" || typeof frontierKey !== "string") continue;
2073
- const set = nextFrontierKeys.get(docId) ?? /* @__PURE__ */ new Set();
2074
- set.add(frontierKey);
2075
- nextFrontierKeys.set(docId, set);
2591
+ if (scope === "doc" || scope === "full") {
2592
+ const targets = docIds ?? this.metadataManager.getDocIds();
2593
+ for (const docId of targets) {
2594
+ const doc = await this.docManager.ensureDoc(docId);
2595
+ this.eventBus.pushEventBy("sync");
2596
+ try {
2597
+ if (!(await this.transport.syncDoc(docId, doc)).ok) throw new Error(`Document sync failed for ${docId}`);
2598
+ } finally {
2599
+ this.eventBus.popEventBy();
2600
+ }
2601
+ await this.docManager.persistDoc(docId, doc);
2602
+ await this.docManager.updateDocFrontiers(docId, doc, "sync");
2603
+ }
2076
2604
  }
2077
- const removedAssets = [];
2078
- for (const [assetId, record] of prevAssets) if (!nextAssets.has(assetId)) removedAssets.push([assetId, record]);
2079
- if (removedAssets.length > 0) {
2080
- const now = Date.now();
2081
- for (const [assetId, record] of removedAssets) {
2082
- const deletedAt = this.orphanedAssets.get(assetId)?.deletedAt ?? now;
2083
- this.orphanedAssets.set(assetId, {
2084
- metadata: record.metadata,
2085
- deletedAt
2086
- });
2605
+ }
2606
+ async joinMetaRoom(params) {
2607
+ await this.ready();
2608
+ if (!this.transport) throw new Error("Transport adapter not configured");
2609
+ if (!this.transport.isConnected()) await this.transport.connect();
2610
+ if (this.metaRoomSubscription) return this.metaRoomSubscription;
2611
+ this.ensureMetaLiveMonitor();
2612
+ const subscription = this.transport.joinMetaRoom(this.metaFlock, params);
2613
+ const wrapped = {
2614
+ unsubscribe: () => {
2615
+ subscription.unsubscribe();
2616
+ if (this.metaRoomSubscription === wrapped) this.metaRoomSubscription = void 0;
2617
+ if (this.unsubscribeMetaFlock) {
2618
+ this.unsubscribeMetaFlock();
2619
+ this.unsubscribeMetaFlock = void 0;
2620
+ }
2621
+ },
2622
+ firstSyncedWithRemote: subscription.firstSyncedWithRemote,
2623
+ get connected() {
2624
+ return subscription.connected;
2087
2625
  }
2626
+ };
2627
+ this.metaRoomSubscription = wrapped;
2628
+ subscription.firstSyncedWithRemote.then(async () => {
2629
+ const by = this.eventBus.resolveEventBy("live");
2630
+ this.flockHydrator.hydrateAll(by);
2631
+ await this.persistMeta();
2632
+ }).catch(logAsyncError("meta room first sync"));
2633
+ return wrapped;
2634
+ }
2635
+ async joinDocRoom(docId, params) {
2636
+ await this.ready();
2637
+ if (!this.transport) throw new Error("Transport adapter not configured");
2638
+ if (!this.transport.isConnected()) await this.transport.connect();
2639
+ const existing = this.docSubscriptions.get(docId);
2640
+ if (existing) return existing;
2641
+ const doc = await this.docManager.ensureDoc(docId);
2642
+ const subscription = this.transport.joinDocRoom(docId, doc, params);
2643
+ const wrapped = {
2644
+ unsubscribe: () => {
2645
+ subscription.unsubscribe();
2646
+ if (this.docSubscriptions.get(docId) === wrapped) this.docSubscriptions.delete(docId);
2647
+ },
2648
+ firstSyncedWithRemote: subscription.firstSyncedWithRemote,
2649
+ get connected() {
2650
+ return subscription.connected;
2651
+ }
2652
+ };
2653
+ this.docSubscriptions.set(docId, wrapped);
2654
+ subscription.firstSyncedWithRemote.catch(logAsyncError(`doc ${docId} first sync`));
2655
+ return wrapped;
2656
+ }
2657
+ async destroy() {
2658
+ await this.docManager.close();
2659
+ this.metaRoomSubscription?.unsubscribe();
2660
+ this.metaRoomSubscription = void 0;
2661
+ for (const sub of this.docSubscriptions.values()) sub.unsubscribe();
2662
+ this.docSubscriptions.clear();
2663
+ if (this.unsubscribeMetaFlock) {
2664
+ this.unsubscribeMetaFlock();
2665
+ this.unsubscribeMetaFlock = void 0;
2088
2666
  }
2089
- this.metadata.clear();
2090
- for (const [docId, meta] of nextMetadata) this.metadata.set(docId, meta);
2091
- this.docAssets.clear();
2092
- for (const [docId, assets] of nextDocAssets) this.docAssets.set(docId, assets);
2093
- this.assetToDocRefs.clear();
2094
- for (const [docId, assets] of nextDocAssets) for (const assetId of assets.keys()) {
2095
- const refs = this.assetToDocRefs.get(assetId) ?? /* @__PURE__ */ new Set();
2096
- refs.add(docId);
2097
- this.assetToDocRefs.set(assetId, refs);
2667
+ this.eventBus.clear();
2668
+ this.metadataManager.clear();
2669
+ this.assetManager.clear();
2670
+ this.readyPromise = void 0;
2671
+ await this.transport?.close();
2672
+ }
2673
+ async initialize() {
2674
+ if (this.storage) {
2675
+ const snapshot = await this.storage.loadMeta();
2676
+ if (snapshot) this.replaceMetaFlock(snapshot);
2098
2677
  }
2099
- this.assets.clear();
2100
- for (const record of nextAssets.values()) this.rememberAsset(record.metadata, record.data);
2101
- this.docFrontierKeys.clear();
2102
- for (const [docId, keys] of nextFrontierKeys) this.docFrontierKeys.set(docId, keys);
2103
- const docIds = new Set([...prevMetadata.keys(), ...nextMetadata.keys()]);
2104
- for (const docId of docIds) {
2105
- const previous = prevMetadata.get(docId);
2106
- const current = nextMetadata.get(docId);
2107
- if (!current) {
2108
- if (previous) this.emit({
2109
- kind: "doc-metadata",
2110
- docId,
2111
- patch: {},
2112
- by
2678
+ this.flockHydrator.hydrateAll("sync");
2679
+ }
2680
+ ensureMetaLiveMonitor() {
2681
+ if (this.unsubscribeMetaFlock) return;
2682
+ this.unsubscribeMetaFlock = this.metaFlock.subscribe((batch) => {
2683
+ if (batch.source === "local") return;
2684
+ const by = this.eventBus.resolveEventBy("live");
2685
+ (async () => {
2686
+ this.flockHydrator.applyEvents(batch.events, by);
2687
+ await this.persistMeta();
2688
+ })().catch(logAsyncError("meta live monitor sync"));
2689
+ });
2690
+ }
2691
+ get metaFlock() {
2692
+ return this.getMetaFlock();
2693
+ }
2694
+ };
2695
+
2696
+ //#endregion
2697
+ //#region src/internal/repo-state.ts
2698
+ function createRepoState() {
2699
+ return {
2700
+ metadata: /* @__PURE__ */ new Map(),
2701
+ docAssets: /* @__PURE__ */ new Map(),
2702
+ assets: /* @__PURE__ */ new Map(),
2703
+ orphanedAssets: /* @__PURE__ */ new Map(),
2704
+ assetToDocRefs: /* @__PURE__ */ new Map(),
2705
+ docFrontierKeys: /* @__PURE__ */ new Map()
2706
+ };
2707
+ }
2708
+
2709
+ //#endregion
2710
+ //#region src/index.ts
2711
+ const textEncoder = new TextEncoder();
2712
+ const DEFAULT_DOC_FRONTIER_DEBOUNCE_MS = 1e3;
2713
+ var LoroRepo = class LoroRepo {
2714
+ options;
2715
+ _destroyed = false;
2716
+ transport;
2717
+ storage;
2718
+ metaFlock = new Flock();
2719
+ eventBus;
2720
+ docManager;
2721
+ metadataManager;
2722
+ assetManager;
2723
+ assetTransport;
2724
+ flockHydrator;
2725
+ state;
2726
+ syncRunner;
2727
+ constructor(options) {
2728
+ this.options = options;
2729
+ this.transport = options.transportAdapter;
2730
+ this.storage = options.storageAdapter;
2731
+ this.assetTransport = options.assetTransportAdapter;
2732
+ this.eventBus = new RepoEventBus();
2733
+ this.state = createRepoState();
2734
+ const configuredDebounce = options.docFrontierDebounceMs;
2735
+ const docFrontierDebounceMs = typeof configuredDebounce === "number" && Number.isFinite(configuredDebounce) && configuredDebounce >= 0 ? configuredDebounce : DEFAULT_DOC_FRONTIER_DEBOUNCE_MS;
2736
+ this.docManager = new DocManager({
2737
+ storage: this.storage,
2738
+ docFrontierDebounceMs,
2739
+ getMetaFlock: () => this.metaFlock,
2740
+ eventBus: this.eventBus,
2741
+ persistMeta: () => this.persistMeta(),
2742
+ state: this.state
2743
+ });
2744
+ this.metadataManager = new MetadataManager({
2745
+ getMetaFlock: () => this.metaFlock,
2746
+ eventBus: this.eventBus,
2747
+ persistMeta: () => this.persistMeta(),
2748
+ state: this.state
2749
+ });
2750
+ this.assetManager = new AssetManager({
2751
+ storage: this.storage,
2752
+ assetTransport: this.assetTransport,
2753
+ getMetaFlock: () => this.metaFlock,
2754
+ eventBus: this.eventBus,
2755
+ persistMeta: () => this.persistMeta(),
2756
+ state: this.state
2757
+ });
2758
+ this.flockHydrator = new FlockHydrator({
2759
+ getMetaFlock: () => this.metaFlock,
2760
+ metadataManager: this.metadataManager,
2761
+ assetManager: this.assetManager,
2762
+ docManager: this.docManager
2763
+ });
2764
+ this.syncRunner = new SyncRunner({
2765
+ storage: this.storage,
2766
+ transport: this.transport,
2767
+ eventBus: this.eventBus,
2768
+ docManager: this.docManager,
2769
+ metadataManager: this.metadataManager,
2770
+ assetManager: this.assetManager,
2771
+ flockHydrator: this.flockHydrator,
2772
+ getMetaFlock: () => this.metaFlock,
2773
+ mergeFlock: (snapshot) => {
2774
+ this.metaFlock.merge(snapshot);
2775
+ },
2776
+ persistMeta: () => this.persistMeta()
2777
+ });
2778
+ }
2779
+ static async create(options) {
2780
+ const repo = new LoroRepo(options);
2781
+ await repo.storage?.init?.();
2782
+ await repo.ready();
2783
+ return repo;
2784
+ }
2785
+ /**
2786
+ * Load meta from storage.
2787
+ *
2788
+ * You need to call this before all other operations to make the app functioning correctly.
2789
+ * Though we do that implicitly already
2790
+ */
2791
+ async ready() {
2792
+ await this.syncRunner.ready();
2793
+ }
2794
+ /**
2795
+ * Sync selected data via the transport adaptor
2796
+ * @param options
2797
+ */
2798
+ async sync(options = {}) {
2799
+ await this.syncRunner.sync(options);
2800
+ }
2801
+ /**
2802
+ * Start syncing the metadata (Flock) room. It will establish a realtime connection to the transport adaptor.
2803
+ * All changes on the room will be synced to the Flock, and all changes on the Flock will be synced to the room.
2804
+ * @param params
2805
+ * @returns
2806
+ */
2807
+ async joinMetaRoom(params) {
2808
+ return this.syncRunner.joinMetaRoom(params);
2809
+ }
2810
+ /**
2811
+ * Start syncing the given doc. It will establish a realtime connection to the transport adaptor.
2812
+ * All changes on the doc will be synced to the transport, and all changes on the transport will be synced to the doc.
2813
+ *
2814
+ * All the changes on the room will be reflected on the same doc you get from `repo.openCollaborativeDoc(docId)`
2815
+ * @param docId
2816
+ * @param params
2817
+ * @returns
2818
+ */
2819
+ async joinDocRoom(docId, params) {
2820
+ return this.syncRunner.joinDocRoom(docId, params);
2821
+ }
2822
+ /**
2823
+ * Opens a document that is automatically persisted to the configured storage adapter.
2824
+ *
2825
+ * - Edits are saved to storage (debounced).
2826
+ * - Frontiers are synced to the metadata (Flock).
2827
+ * - Realtime collaboration is NOT enabled by default; use `joinDocRoom` to connect.
2828
+ */
2829
+ async openPersistedDoc(docId) {
2830
+ return {
2831
+ doc: await this.docManager.openCollaborativeDoc(docId),
2832
+ syncOnce: () => {
2833
+ return this.sync({
2834
+ scope: "doc",
2835
+ docIds: [docId]
2113
2836
  });
2114
- continue;
2837
+ },
2838
+ joinRoom: (auth) => {
2839
+ return this.syncRunner.joinDocRoom(docId, { auth });
2115
2840
  }
2116
- const patch = diffJsonObjects(previous, current);
2117
- if (Object.keys(patch).length > 0) this.emit({
2118
- kind: "doc-metadata",
2119
- docId,
2120
- patch,
2121
- by
2122
- });
2123
- }
2124
- for (const [assetId, record] of nextAssets) {
2125
- const previous = prevAssets.get(assetId)?.metadata;
2126
- if (!assetMetadataEqual(previous, record.metadata)) this.emit({
2127
- kind: "asset-metadata",
2128
- asset: this.createAssetDownload(assetId, record.metadata, record.data),
2129
- by
2130
- });
2131
- }
2132
- for (const [docId, assets] of nextDocAssets) {
2133
- const previous = prevDocAssets.get(docId);
2134
- for (const assetId of assets.keys()) if (!previous || !previous.has(assetId)) this.emit({
2135
- kind: "asset-link",
2136
- docId,
2137
- assetId,
2138
- by
2139
- });
2140
- }
2141
- for (const [docId, assets] of prevDocAssets) {
2142
- const current = nextDocAssets.get(docId);
2143
- for (const assetId of assets.keys()) if (!current || !current.has(assetId)) this.emit({
2144
- kind: "asset-unlink",
2145
- docId,
2146
- assetId,
2147
- by
2148
- });
2149
- }
2841
+ };
2150
2842
  }
2151
- shouldNotify(filter, event) {
2152
- if (!filter.docIds && !filter.kinds && !filter.metadataFields && !filter.by) return true;
2153
- if (filter.kinds && !filter.kinds.includes(event.kind)) return false;
2154
- if (filter.by && !filter.by.includes(event.by)) return false;
2155
- const docId = (() => {
2156
- if (event.kind === "doc-metadata" || event.kind === "doc-frontiers") return event.docId;
2157
- if (event.kind === "asset-link" || event.kind === "asset-unlink") return event.docId;
2158
- })();
2159
- if (filter.docIds && docId && !filter.docIds.includes(docId)) return false;
2160
- if (filter.docIds && !docId) return false;
2161
- if (filter.metadataFields && event.kind === "doc-metadata") {
2162
- if (!Object.keys(event.patch).some((key) => filter.metadataFields?.includes(key))) return false;
2163
- }
2164
- return true;
2843
+ async upsertDocMeta(docId, patch) {
2844
+ await this.metadataManager.upsert(docId, patch);
2845
+ }
2846
+ async getDocMeta(docId) {
2847
+ return this.metadataManager.get(docId);
2848
+ }
2849
+ async listDoc(query) {
2850
+ return this.metadataManager.listDoc(query);
2851
+ }
2852
+ getMeta() {
2853
+ return this.metaFlock;
2854
+ }
2855
+ watch(listener, filter = {}) {
2856
+ return this.eventBus.watch(listener, filter);
2857
+ }
2858
+ /**
2859
+ * Opens a detached `LoroDoc` snapshot.
2860
+ *
2861
+ * - **No Persistence**: Edits to this document are NOT saved to storage.
2862
+ * - **No Sync**: This document does not participate in realtime updates.
2863
+ * - **Use Case**: Ideal for read-only history inspection, temporary drafts, or conflict resolution without affecting the main state.
2864
+ */
2865
+ async openDetachedDoc(docId) {
2866
+ return this.docManager.openDetachedDoc(docId);
2867
+ }
2868
+ /**
2869
+ * Explicitly unloads a document from memory.
2870
+ *
2871
+ * - **Persists Immediately**: Forces a save of the document's current state to storage.
2872
+ * - **Frees Memory**: Removes the document from the internal cache.
2873
+ * - **Note**: If the document is currently being synced (via `joinDocRoom`), you should also unsubscribe from the room to fully release resources.
2874
+ */
2875
+ async unloadDoc(docId) {
2876
+ await this.docManager.unloadDoc(docId);
2877
+ }
2878
+ async flush() {
2879
+ await this.docManager.flush();
2880
+ }
2881
+ async uploadAsset(params) {
2882
+ return this.assetManager.uploadAsset(params);
2883
+ }
2884
+ async linkAsset(docId, params) {
2885
+ return this.assetManager.linkAsset(docId, params);
2886
+ }
2887
+ async fetchAsset(assetId) {
2888
+ return this.assetManager.fetchAsset(assetId);
2889
+ }
2890
+ async unlinkAsset(docId, assetId) {
2891
+ await this.assetManager.unlinkAsset(docId, assetId);
2892
+ }
2893
+ async listAssets(docId) {
2894
+ return this.assetManager.listAssets(docId);
2895
+ }
2896
+ async ensureAsset(assetId) {
2897
+ return this.assetManager.ensureAsset(assetId);
2898
+ }
2899
+ async gcAssets(options = {}) {
2900
+ return this.assetManager.gcAssets(options);
2901
+ }
2902
+ async persistMeta() {
2903
+ if (!this.storage) return;
2904
+ const bundle = this.metaFlock.exportJson();
2905
+ const encoded = textEncoder.encode(JSON.stringify(bundle));
2906
+ await this.storage.save({
2907
+ type: "meta",
2908
+ update: encoded
2909
+ });
2910
+ }
2911
+ get destroyed() {
2912
+ return this._destroyed;
2913
+ }
2914
+ async destroy() {
2915
+ if (this._destroyed) return;
2916
+ this._destroyed = true;
2917
+ await this.syncRunner.destroy();
2918
+ this.assetTransport?.close?.();
2919
+ this.storage?.close?.();
2920
+ await this.transport?.close();
2165
2921
  }
2166
2922
  };
2167
2923
 
2168
2924
  //#endregion
2169
- export { BroadcastChannelTransportAdapter, IndexedDBStorageAdaptor, LoroRepo, WebSocketTransportAdapter };
2925
+ export { BroadcastChannelTransportAdapter, FileSystemStorageAdaptor, IndexedDBStorageAdaptor, LoroRepo, WebSocketTransportAdapter };
2170
2926
  //# sourceMappingURL=index.js.map