loro-repo 0.4.0 → 0.5.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +9 -9
- package/dist/chunk.cjs +30 -0
- package/dist/index.cjs +250 -1424
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +5 -629
- package/dist/index.d.ts +5 -630
- package/dist/index.js +248 -1389
- package/dist/index.js.map +1 -1
- package/dist/storage/filesystem.cjs +164 -0
- package/dist/storage/filesystem.cjs.map +1 -0
- package/dist/storage/filesystem.d.cts +49 -0
- package/dist/storage/filesystem.d.ts +49 -0
- package/dist/storage/filesystem.js +158 -0
- package/dist/storage/filesystem.js.map +1 -0
- package/dist/storage/indexeddb.cjs +261 -0
- package/dist/storage/indexeddb.cjs.map +1 -0
- package/dist/storage/indexeddb.d.cts +54 -0
- package/dist/storage/indexeddb.d.ts +54 -0
- package/dist/storage/indexeddb.js +258 -0
- package/dist/storage/indexeddb.js.map +1 -0
- package/dist/transport/broadcast-channel.cjs +252 -0
- package/dist/transport/broadcast-channel.cjs.map +1 -0
- package/dist/transport/broadcast-channel.d.cts +45 -0
- package/dist/transport/broadcast-channel.d.ts +45 -0
- package/dist/transport/broadcast-channel.js +251 -0
- package/dist/transport/broadcast-channel.js.map +1 -0
- package/dist/transport/websocket.cjs +435 -0
- package/dist/transport/websocket.cjs.map +1 -0
- package/dist/transport/websocket.d.cts +69 -0
- package/dist/transport/websocket.d.ts +69 -0
- package/dist/transport/websocket.js +430 -0
- package/dist/transport/websocket.js.map +1 -0
- package/dist/types.d.cts +419 -0
- package/dist/types.d.ts +419 -0
- package/package.json +28 -4
package/dist/index.cjs
CHANGED
|
@@ -1,1357 +1,56 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
var
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
};
|
|
54
|
-
const rawNamespaceConfig = (getEnv()?.LORO_REPO_DEBUG ?? "").trim();
|
|
55
|
-
const normalizedNamespaces = rawNamespaceConfig.length > 0 ? rawNamespaceConfig.split(/[\s,]+/).map((token) => token.toLowerCase()).filter(Boolean) : [];
|
|
56
|
-
const wildcardTokens = new Set([
|
|
57
|
-
"*",
|
|
58
|
-
"1",
|
|
59
|
-
"true",
|
|
60
|
-
"all"
|
|
61
|
-
]);
|
|
62
|
-
const namespaceSet = new Set(normalizedNamespaces);
|
|
63
|
-
const hasWildcard = namespaceSet.size > 0 && normalizedNamespaces.some((token) => wildcardTokens.has(token));
|
|
64
|
-
const isDebugEnabled = (namespace) => {
|
|
65
|
-
if (!namespaceSet.size) return false;
|
|
66
|
-
if (!namespace) return hasWildcard;
|
|
67
|
-
const normalized = namespace.toLowerCase();
|
|
68
|
-
if (hasWildcard) return true;
|
|
69
|
-
if (namespaceSet.has(normalized)) return true;
|
|
70
|
-
const [root] = normalized.split(":");
|
|
71
|
-
return namespaceSet.has(root);
|
|
72
|
-
};
|
|
73
|
-
const createDebugLogger = (namespace) => {
|
|
74
|
-
const normalized = namespace.toLowerCase();
|
|
75
|
-
return (...args) => {
|
|
76
|
-
if (!isDebugEnabled(normalized)) return;
|
|
77
|
-
const prefix = `[loro-repo:${namespace}]`;
|
|
78
|
-
if (args.length === 0) {
|
|
79
|
-
console.info(prefix);
|
|
80
|
-
return;
|
|
81
|
-
}
|
|
82
|
-
console.info(prefix, ...args);
|
|
83
|
-
};
|
|
84
|
-
};
|
|
85
|
-
|
|
86
|
-
//#endregion
|
|
87
|
-
//#region src/transport/websocket.ts
|
|
88
|
-
const debug = createDebugLogger("transport:websocket");
|
|
89
|
-
function withTimeout(promise, timeoutMs) {
|
|
90
|
-
if (!timeoutMs || timeoutMs <= 0) return promise;
|
|
91
|
-
return new Promise((resolve, reject) => {
|
|
92
|
-
const timer = setTimeout(() => {
|
|
93
|
-
reject(/* @__PURE__ */ new Error(`Operation timed out after ${timeoutMs}ms`));
|
|
94
|
-
}, timeoutMs);
|
|
95
|
-
promise.then((value) => {
|
|
96
|
-
clearTimeout(timer);
|
|
97
|
-
resolve(value);
|
|
98
|
-
}).catch((error) => {
|
|
99
|
-
clearTimeout(timer);
|
|
100
|
-
reject(error);
|
|
101
|
-
});
|
|
102
|
-
});
|
|
103
|
-
}
|
|
104
|
-
function normalizeRoomId(roomId, fallback) {
|
|
105
|
-
if (typeof roomId === "string" && roomId.length > 0) return roomId;
|
|
106
|
-
if (roomId instanceof Uint8Array && roomId.length > 0) try {
|
|
107
|
-
return (0, loro_protocol.bytesToHex)(roomId);
|
|
108
|
-
} catch {
|
|
109
|
-
return fallback;
|
|
110
|
-
}
|
|
111
|
-
return fallback;
|
|
112
|
-
}
|
|
113
|
-
function bytesEqual(a, b) {
|
|
114
|
-
if (a === b) return true;
|
|
115
|
-
if (!a || !b) return false;
|
|
116
|
-
if (a.length !== b.length) return false;
|
|
117
|
-
for (let i = 0; i < a.length; i += 1) if (a[i] !== b[i]) return false;
|
|
118
|
-
return true;
|
|
119
|
-
}
|
|
120
|
-
/**
|
|
121
|
-
* loro-websocket backed {@link TransportAdapter} implementation for LoroRepo.
|
|
122
|
-
*/
|
|
123
|
-
var WebSocketTransportAdapter = class {
|
|
124
|
-
options;
|
|
125
|
-
client;
|
|
126
|
-
metadataSession;
|
|
127
|
-
docSessions = /* @__PURE__ */ new Map();
|
|
128
|
-
constructor(options) {
|
|
129
|
-
this.options = options;
|
|
130
|
-
}
|
|
131
|
-
async connect(_options) {
|
|
132
|
-
const client = this.ensureClient();
|
|
133
|
-
debug("connect requested", { status: client.getStatus() });
|
|
134
|
-
try {
|
|
135
|
-
await client.connect();
|
|
136
|
-
debug("client.connect resolved");
|
|
137
|
-
await client.waitConnected();
|
|
138
|
-
debug("client.waitConnected resolved", { status: client.getStatus() });
|
|
139
|
-
} catch (error) {
|
|
140
|
-
debug("connect failed", error);
|
|
141
|
-
throw error;
|
|
142
|
-
}
|
|
143
|
-
}
|
|
144
|
-
async close() {
|
|
145
|
-
debug("close requested", {
|
|
146
|
-
docSessions: this.docSessions.size,
|
|
147
|
-
metadataSession: Boolean(this.metadataSession)
|
|
148
|
-
});
|
|
149
|
-
for (const [docId] of this.docSessions) await this.leaveDocSession(docId).catch(() => {});
|
|
150
|
-
this.docSessions.clear();
|
|
151
|
-
await this.teardownMetadataSession().catch(() => {});
|
|
152
|
-
if (this.client) {
|
|
153
|
-
const client = this.client;
|
|
154
|
-
this.client = void 0;
|
|
155
|
-
client.destroy();
|
|
156
|
-
debug("websocket client destroyed");
|
|
157
|
-
}
|
|
158
|
-
debug("close completed");
|
|
159
|
-
}
|
|
160
|
-
isConnected() {
|
|
161
|
-
return this.client?.getStatus() === "connected";
|
|
162
|
-
}
|
|
163
|
-
async syncMeta(flock, options) {
|
|
164
|
-
if (!this.options.metadataRoomId) {
|
|
165
|
-
debug("syncMeta skipped; metadata room not configured");
|
|
166
|
-
return { ok: true };
|
|
167
|
-
}
|
|
168
|
-
debug("syncMeta requested", { roomId: this.options.metadataRoomId });
|
|
169
|
-
try {
|
|
170
|
-
await withTimeout((await this.ensureMetadataSession(flock, {
|
|
171
|
-
roomId: this.options.metadataRoomId,
|
|
172
|
-
auth: this.options.metadataAuth
|
|
173
|
-
})).firstSynced, options?.timeout);
|
|
174
|
-
debug("syncMeta completed", { roomId: this.options.metadataRoomId });
|
|
175
|
-
return { ok: true };
|
|
176
|
-
} catch (error) {
|
|
177
|
-
debug("syncMeta failed", error);
|
|
178
|
-
return { ok: false };
|
|
179
|
-
}
|
|
180
|
-
}
|
|
181
|
-
joinMetaRoom(flock, params) {
|
|
182
|
-
const fallback = this.options.metadataRoomId ?? "";
|
|
183
|
-
const roomId = normalizeRoomId(params?.roomId, fallback);
|
|
184
|
-
if (!roomId) throw new Error("Metadata room id not configured");
|
|
185
|
-
const auth = params?.auth ?? this.options.metadataAuth;
|
|
186
|
-
debug("joinMetaRoom requested", {
|
|
187
|
-
roomId,
|
|
188
|
-
hasAuth: Boolean(auth && auth.length)
|
|
189
|
-
});
|
|
190
|
-
const ensure = this.ensureMetadataSession(flock, {
|
|
191
|
-
roomId,
|
|
192
|
-
auth
|
|
193
|
-
});
|
|
194
|
-
const firstSynced = ensure.then((session) => session.firstSynced);
|
|
195
|
-
const getConnected = () => this.isConnected();
|
|
196
|
-
const subscription = {
|
|
197
|
-
unsubscribe: () => {
|
|
198
|
-
ensure.then((session) => {
|
|
199
|
-
session.refCount = Math.max(0, session.refCount - 1);
|
|
200
|
-
debug("metadata session refCount decremented", {
|
|
201
|
-
roomId: session.roomId,
|
|
202
|
-
refCount: session.refCount
|
|
203
|
-
});
|
|
204
|
-
if (session.refCount === 0) {
|
|
205
|
-
debug("tearing down metadata session due to refCount=0", { roomId: session.roomId });
|
|
206
|
-
this.teardownMetadataSession(session).catch(() => {});
|
|
207
|
-
}
|
|
208
|
-
});
|
|
209
|
-
},
|
|
210
|
-
firstSyncedWithRemote: firstSynced,
|
|
211
|
-
get connected() {
|
|
212
|
-
return getConnected();
|
|
213
|
-
}
|
|
214
|
-
};
|
|
215
|
-
ensure.then((session) => {
|
|
216
|
-
session.refCount += 1;
|
|
217
|
-
debug("metadata session refCount incremented", {
|
|
218
|
-
roomId: session.roomId,
|
|
219
|
-
refCount: session.refCount
|
|
220
|
-
});
|
|
221
|
-
});
|
|
222
|
-
return subscription;
|
|
223
|
-
}
|
|
224
|
-
async syncDoc(docId, doc, options) {
|
|
225
|
-
debug("syncDoc requested", { docId });
|
|
226
|
-
try {
|
|
227
|
-
const session = await this.ensureDocSession(docId, doc, {});
|
|
228
|
-
await withTimeout(session.firstSynced, options?.timeout);
|
|
229
|
-
debug("syncDoc completed", {
|
|
230
|
-
docId,
|
|
231
|
-
roomId: session.roomId
|
|
232
|
-
});
|
|
233
|
-
return { ok: true };
|
|
234
|
-
} catch (error) {
|
|
235
|
-
debug("syncDoc failed", {
|
|
236
|
-
docId,
|
|
237
|
-
error
|
|
238
|
-
});
|
|
239
|
-
return { ok: false };
|
|
240
|
-
}
|
|
241
|
-
}
|
|
242
|
-
joinDocRoom(docId, doc, params) {
|
|
243
|
-
debug("joinDocRoom requested", {
|
|
244
|
-
docId,
|
|
245
|
-
roomParamType: params?.roomId ? typeof params.roomId === "string" ? "string" : "uint8array" : void 0,
|
|
246
|
-
hasAuthOverride: Boolean(params?.auth && params.auth.length)
|
|
247
|
-
});
|
|
248
|
-
const ensure = this.ensureDocSession(docId, doc, params ?? {});
|
|
249
|
-
const firstSynced = ensure.then((session) => session.firstSynced);
|
|
250
|
-
const getConnected = () => this.isConnected();
|
|
251
|
-
const subscription = {
|
|
252
|
-
unsubscribe: () => {
|
|
253
|
-
ensure.then((session) => {
|
|
254
|
-
session.refCount = Math.max(0, session.refCount - 1);
|
|
255
|
-
debug("doc session refCount decremented", {
|
|
256
|
-
docId,
|
|
257
|
-
roomId: session.roomId,
|
|
258
|
-
refCount: session.refCount
|
|
259
|
-
});
|
|
260
|
-
if (session.refCount === 0) this.leaveDocSession(docId).catch(() => {});
|
|
261
|
-
});
|
|
262
|
-
},
|
|
263
|
-
firstSyncedWithRemote: firstSynced,
|
|
264
|
-
get connected() {
|
|
265
|
-
return getConnected();
|
|
266
|
-
}
|
|
267
|
-
};
|
|
268
|
-
ensure.then((session) => {
|
|
269
|
-
session.refCount += 1;
|
|
270
|
-
debug("doc session refCount incremented", {
|
|
271
|
-
docId,
|
|
272
|
-
roomId: session.roomId,
|
|
273
|
-
refCount: session.refCount
|
|
274
|
-
});
|
|
275
|
-
});
|
|
276
|
-
return subscription;
|
|
277
|
-
}
|
|
278
|
-
ensureClient() {
|
|
279
|
-
if (this.client) {
|
|
280
|
-
debug("reusing websocket client", { status: this.client.getStatus() });
|
|
281
|
-
return this.client;
|
|
282
|
-
}
|
|
283
|
-
const { url, client: clientOptions } = this.options;
|
|
284
|
-
debug("creating websocket client", {
|
|
285
|
-
url,
|
|
286
|
-
clientOptionsKeys: clientOptions ? Object.keys(clientOptions) : []
|
|
287
|
-
});
|
|
288
|
-
const client = new loro_websocket.LoroWebsocketClient({
|
|
289
|
-
url,
|
|
290
|
-
...clientOptions
|
|
291
|
-
});
|
|
292
|
-
this.client = client;
|
|
293
|
-
return client;
|
|
294
|
-
}
|
|
295
|
-
async ensureMetadataSession(flock, params) {
|
|
296
|
-
debug("ensureMetadataSession invoked", {
|
|
297
|
-
roomId: params.roomId,
|
|
298
|
-
hasAuth: Boolean(params.auth && params.auth.length)
|
|
299
|
-
});
|
|
300
|
-
const client = this.ensureClient();
|
|
301
|
-
await client.waitConnected();
|
|
302
|
-
debug("websocket client ready for metadata session", { status: client.getStatus() });
|
|
303
|
-
if (this.metadataSession && this.metadataSession.flock === flock && this.metadataSession.roomId === params.roomId && bytesEqual(this.metadataSession.auth, params.auth)) {
|
|
304
|
-
debug("reusing metadata session", {
|
|
305
|
-
roomId: this.metadataSession.roomId,
|
|
306
|
-
refCount: this.metadataSession.refCount
|
|
307
|
-
});
|
|
308
|
-
return this.metadataSession;
|
|
309
|
-
}
|
|
310
|
-
if (this.metadataSession) {
|
|
311
|
-
debug("tearing down previous metadata session", { roomId: this.metadataSession.roomId });
|
|
312
|
-
await this.teardownMetadataSession(this.metadataSession).catch(() => {});
|
|
313
|
-
}
|
|
314
|
-
const configuredType = this.options.metadataCrdtType;
|
|
315
|
-
if (configuredType && configuredType !== loro_protocol.CrdtType.Flock) throw new Error(`metadataCrdtType must be ${loro_protocol.CrdtType.Flock} when syncing Flock metadata`);
|
|
316
|
-
const adaptor = createRepoFlockAdaptorFromDoc(flock, this.options.metadataAdaptorConfig ?? {});
|
|
317
|
-
debug("joining metadata room", {
|
|
318
|
-
roomId: params.roomId,
|
|
319
|
-
hasAuth: Boolean(params.auth && params.auth.length)
|
|
320
|
-
});
|
|
321
|
-
const room = await client.join({
|
|
322
|
-
roomId: params.roomId,
|
|
323
|
-
crdtAdaptor: adaptor,
|
|
324
|
-
auth: params.auth
|
|
325
|
-
});
|
|
326
|
-
const firstSynced = room.waitForReachingServerVersion();
|
|
327
|
-
firstSynced.then(() => {
|
|
328
|
-
debug("metadata session firstSynced resolved", { roomId: params.roomId });
|
|
329
|
-
}, (error) => {
|
|
330
|
-
debug("metadata session firstSynced rejected", {
|
|
331
|
-
roomId: params.roomId,
|
|
332
|
-
error
|
|
333
|
-
});
|
|
334
|
-
});
|
|
335
|
-
const session = {
|
|
336
|
-
adaptor,
|
|
337
|
-
room,
|
|
338
|
-
firstSynced,
|
|
339
|
-
flock,
|
|
340
|
-
roomId: params.roomId,
|
|
341
|
-
auth: params.auth,
|
|
342
|
-
refCount: 0
|
|
343
|
-
};
|
|
344
|
-
this.metadataSession = session;
|
|
345
|
-
return session;
|
|
346
|
-
}
|
|
347
|
-
async teardownMetadataSession(session) {
|
|
348
|
-
const target = session ?? this.metadataSession;
|
|
349
|
-
if (!target) return;
|
|
350
|
-
debug("teardownMetadataSession invoked", { roomId: target.roomId });
|
|
351
|
-
if (this.metadataSession === target) this.metadataSession = void 0;
|
|
352
|
-
const { adaptor, room } = target;
|
|
353
|
-
try {
|
|
354
|
-
await room.leave();
|
|
355
|
-
debug("metadata room left", { roomId: target.roomId });
|
|
356
|
-
} catch (error) {
|
|
357
|
-
debug("metadata room leave failed; destroying", {
|
|
358
|
-
roomId: target.roomId,
|
|
359
|
-
error
|
|
360
|
-
});
|
|
361
|
-
await room.destroy().catch(() => {});
|
|
362
|
-
}
|
|
363
|
-
adaptor.destroy();
|
|
364
|
-
debug("metadata session destroyed", { roomId: target.roomId });
|
|
365
|
-
}
|
|
366
|
-
async ensureDocSession(docId, doc, params) {
|
|
367
|
-
debug("ensureDocSession invoked", { docId });
|
|
368
|
-
const client = this.ensureClient();
|
|
369
|
-
await client.waitConnected();
|
|
370
|
-
debug("websocket client ready for doc session", {
|
|
371
|
-
docId,
|
|
372
|
-
status: client.getStatus()
|
|
373
|
-
});
|
|
374
|
-
const existing = this.docSessions.get(docId);
|
|
375
|
-
const derivedRoomId = this.options.docRoomId?.(docId) ?? docId;
|
|
376
|
-
const roomId = normalizeRoomId(params.roomId, derivedRoomId);
|
|
377
|
-
const auth = params.auth ?? this.options.docAuth?.(docId);
|
|
378
|
-
debug("doc session params resolved", {
|
|
379
|
-
docId,
|
|
380
|
-
roomId,
|
|
381
|
-
hasAuth: Boolean(auth && auth.length)
|
|
382
|
-
});
|
|
383
|
-
if (existing && existing.doc === doc && existing.roomId === roomId) {
|
|
384
|
-
debug("reusing doc session", {
|
|
385
|
-
docId,
|
|
386
|
-
roomId,
|
|
387
|
-
refCount: existing.refCount
|
|
388
|
-
});
|
|
389
|
-
return existing;
|
|
390
|
-
}
|
|
391
|
-
if (existing) {
|
|
392
|
-
debug("doc session mismatch; leaving existing session", {
|
|
393
|
-
docId,
|
|
394
|
-
previousRoomId: existing.roomId,
|
|
395
|
-
nextRoomId: roomId
|
|
396
|
-
});
|
|
397
|
-
await this.leaveDocSession(docId).catch(() => {});
|
|
398
|
-
}
|
|
399
|
-
const adaptor = new loro_adaptors_loro.LoroAdaptor(doc);
|
|
400
|
-
debug("joining doc room", {
|
|
401
|
-
docId,
|
|
402
|
-
roomId,
|
|
403
|
-
hasAuth: Boolean(auth && auth.length)
|
|
404
|
-
});
|
|
405
|
-
const room = await client.join({
|
|
406
|
-
roomId,
|
|
407
|
-
crdtAdaptor: adaptor,
|
|
408
|
-
auth
|
|
409
|
-
});
|
|
410
|
-
const firstSynced = room.waitForReachingServerVersion();
|
|
411
|
-
firstSynced.then(() => {
|
|
412
|
-
debug("doc session firstSynced resolved", {
|
|
413
|
-
docId,
|
|
414
|
-
roomId
|
|
415
|
-
});
|
|
416
|
-
}, (error) => {
|
|
417
|
-
debug("doc session firstSynced rejected", {
|
|
418
|
-
docId,
|
|
419
|
-
roomId,
|
|
420
|
-
error
|
|
421
|
-
});
|
|
422
|
-
});
|
|
423
|
-
const session = {
|
|
424
|
-
adaptor,
|
|
425
|
-
room,
|
|
426
|
-
firstSynced,
|
|
427
|
-
doc,
|
|
428
|
-
roomId,
|
|
429
|
-
refCount: 0
|
|
430
|
-
};
|
|
431
|
-
this.docSessions.set(docId, session);
|
|
432
|
-
return session;
|
|
433
|
-
}
|
|
434
|
-
async leaveDocSession(docId) {
|
|
435
|
-
const session = this.docSessions.get(docId);
|
|
436
|
-
if (!session) {
|
|
437
|
-
debug("leaveDocSession invoked but no session found", { docId });
|
|
438
|
-
return;
|
|
439
|
-
}
|
|
440
|
-
this.docSessions.delete(docId);
|
|
441
|
-
debug("leaving doc session", {
|
|
442
|
-
docId,
|
|
443
|
-
roomId: session.roomId
|
|
444
|
-
});
|
|
445
|
-
try {
|
|
446
|
-
await session.room.leave();
|
|
447
|
-
debug("doc room left", {
|
|
448
|
-
docId,
|
|
449
|
-
roomId: session.roomId
|
|
450
|
-
});
|
|
451
|
-
} catch (error) {
|
|
452
|
-
debug("doc room leave failed; destroying", {
|
|
453
|
-
docId,
|
|
454
|
-
roomId: session.roomId,
|
|
455
|
-
error
|
|
456
|
-
});
|
|
457
|
-
await session.room.destroy().catch(() => {});
|
|
458
|
-
}
|
|
459
|
-
session.adaptor.destroy();
|
|
460
|
-
debug("doc session destroyed", {
|
|
461
|
-
docId,
|
|
462
|
-
roomId: session.roomId
|
|
463
|
-
});
|
|
464
|
-
}
|
|
465
|
-
};
|
|
466
|
-
|
|
467
|
-
//#endregion
|
|
468
|
-
//#region src/transport/broadcast-channel.ts
|
|
469
|
-
function deferred() {
|
|
470
|
-
let resolve;
|
|
471
|
-
return {
|
|
472
|
-
promise: new Promise((res) => {
|
|
473
|
-
resolve = res;
|
|
474
|
-
}),
|
|
475
|
-
resolve
|
|
476
|
-
};
|
|
477
|
-
}
|
|
478
|
-
function randomInstanceId() {
|
|
479
|
-
if (typeof crypto !== "undefined" && typeof crypto.randomUUID === "function") return crypto.randomUUID();
|
|
480
|
-
return Math.random().toString(36).slice(2);
|
|
481
|
-
}
|
|
482
|
-
function ensureBroadcastChannel() {
|
|
483
|
-
if (typeof BroadcastChannel === "undefined") throw new Error("BroadcastChannel API is not available in this environment");
|
|
484
|
-
return BroadcastChannel;
|
|
485
|
-
}
|
|
486
|
-
function encodeDocChannelId(docId) {
|
|
487
|
-
try {
|
|
488
|
-
return encodeURIComponent(docId);
|
|
489
|
-
} catch {
|
|
490
|
-
return docId.replace(/[^a-z0-9_-]/gi, "_");
|
|
491
|
-
}
|
|
492
|
-
}
|
|
493
|
-
function postChannelMessage(channel, message) {
|
|
494
|
-
channel.postMessage(message);
|
|
495
|
-
}
|
|
496
|
-
/**
|
|
497
|
-
* TransportAdapter that relies on the BroadcastChannel API to fan out metadata
|
|
498
|
-
* and document updates between browser tabs within the same origin.
|
|
499
|
-
*/
|
|
500
|
-
var BroadcastChannelTransportAdapter = class {
|
|
501
|
-
instanceId = randomInstanceId();
|
|
502
|
-
namespace;
|
|
503
|
-
metaChannelName;
|
|
504
|
-
connected = false;
|
|
505
|
-
metaState;
|
|
506
|
-
docStates = /* @__PURE__ */ new Map();
|
|
507
|
-
constructor(options = {}) {
|
|
508
|
-
ensureBroadcastChannel();
|
|
509
|
-
this.namespace = options.namespace ?? "loro-repo";
|
|
510
|
-
this.metaChannelName = options.metaChannelName ?? `${this.namespace}-meta`;
|
|
511
|
-
}
|
|
512
|
-
async connect() {
|
|
513
|
-
this.connected = true;
|
|
514
|
-
}
|
|
515
|
-
async close() {
|
|
516
|
-
this.connected = false;
|
|
517
|
-
if (this.metaState) {
|
|
518
|
-
for (const entry of this.metaState.listeners) entry.unsubscribe();
|
|
519
|
-
this.metaState.channel.close();
|
|
520
|
-
this.metaState = void 0;
|
|
521
|
-
}
|
|
522
|
-
for (const [docId] of this.docStates) this.teardownDocChannel(docId);
|
|
523
|
-
this.docStates.clear();
|
|
524
|
-
}
|
|
525
|
-
isConnected() {
|
|
526
|
-
return this.connected;
|
|
527
|
-
}
|
|
528
|
-
async syncMeta(flock, _options) {
|
|
529
|
-
const subscription = this.joinMetaRoom(flock);
|
|
530
|
-
subscription.firstSyncedWithRemote.catch(() => void 0);
|
|
531
|
-
await subscription.firstSyncedWithRemote;
|
|
532
|
-
subscription.unsubscribe();
|
|
533
|
-
return { ok: true };
|
|
534
|
-
}
|
|
535
|
-
joinMetaRoom(flock, _params) {
|
|
536
|
-
const state = this.ensureMetaChannel();
|
|
537
|
-
const { promise, resolve } = deferred();
|
|
538
|
-
const listener = {
|
|
539
|
-
flock,
|
|
540
|
-
muted: false,
|
|
541
|
-
unsubscribe: flock.subscribe(() => {
|
|
542
|
-
if (listener.muted) return;
|
|
543
|
-
Promise.resolve(flock.exportJson()).then((bundle) => {
|
|
544
|
-
postChannelMessage(state.channel, {
|
|
545
|
-
kind: "meta-export",
|
|
546
|
-
from: this.instanceId,
|
|
547
|
-
bundle
|
|
548
|
-
});
|
|
549
|
-
});
|
|
550
|
-
}),
|
|
551
|
-
resolveFirst: resolve,
|
|
552
|
-
firstSynced: promise
|
|
553
|
-
};
|
|
554
|
-
state.listeners.add(listener);
|
|
555
|
-
postChannelMessage(state.channel, {
|
|
556
|
-
kind: "meta-request",
|
|
557
|
-
from: this.instanceId
|
|
558
|
-
});
|
|
559
|
-
Promise.resolve(flock.exportJson()).then((bundle) => {
|
|
560
|
-
postChannelMessage(state.channel, {
|
|
561
|
-
kind: "meta-export",
|
|
562
|
-
from: this.instanceId,
|
|
563
|
-
bundle
|
|
564
|
-
});
|
|
565
|
-
});
|
|
566
|
-
queueMicrotask(() => resolve());
|
|
567
|
-
return {
|
|
568
|
-
unsubscribe: () => {
|
|
569
|
-
listener.unsubscribe();
|
|
570
|
-
state.listeners.delete(listener);
|
|
571
|
-
if (!state.listeners.size) {
|
|
572
|
-
state.channel.removeEventListener("message", state.onMessage);
|
|
573
|
-
state.channel.close();
|
|
574
|
-
this.metaState = void 0;
|
|
575
|
-
}
|
|
576
|
-
},
|
|
577
|
-
firstSyncedWithRemote: listener.firstSynced,
|
|
578
|
-
get connected() {
|
|
579
|
-
return true;
|
|
580
|
-
}
|
|
581
|
-
};
|
|
582
|
-
}
|
|
583
|
-
async syncDoc(docId, doc, _options) {
|
|
584
|
-
const subscription = this.joinDocRoom(docId, doc);
|
|
585
|
-
subscription.firstSyncedWithRemote.catch(() => void 0);
|
|
586
|
-
await subscription.firstSyncedWithRemote;
|
|
587
|
-
subscription.unsubscribe();
|
|
588
|
-
return { ok: true };
|
|
589
|
-
}
|
|
590
|
-
joinDocRoom(docId, doc, _params) {
|
|
591
|
-
const state = this.ensureDocChannel(docId);
|
|
592
|
-
const { promise, resolve } = deferred();
|
|
593
|
-
const listener = {
|
|
594
|
-
doc,
|
|
595
|
-
muted: false,
|
|
596
|
-
unsubscribe: doc.subscribe(() => {
|
|
597
|
-
if (listener.muted) return;
|
|
598
|
-
const payload = doc.export({ mode: "update" });
|
|
599
|
-
postChannelMessage(state.channel, {
|
|
600
|
-
kind: "doc-update",
|
|
601
|
-
docId,
|
|
602
|
-
from: this.instanceId,
|
|
603
|
-
mode: "update",
|
|
604
|
-
payload
|
|
605
|
-
});
|
|
606
|
-
}),
|
|
607
|
-
resolveFirst: resolve,
|
|
608
|
-
firstSynced: promise
|
|
609
|
-
};
|
|
610
|
-
state.listeners.add(listener);
|
|
611
|
-
postChannelMessage(state.channel, {
|
|
612
|
-
kind: "doc-request",
|
|
613
|
-
docId,
|
|
614
|
-
from: this.instanceId
|
|
615
|
-
});
|
|
616
|
-
postChannelMessage(state.channel, {
|
|
617
|
-
kind: "doc-update",
|
|
618
|
-
docId,
|
|
619
|
-
from: this.instanceId,
|
|
620
|
-
mode: "snapshot",
|
|
621
|
-
payload: doc.export({ mode: "snapshot" })
|
|
622
|
-
});
|
|
623
|
-
queueMicrotask(() => resolve());
|
|
624
|
-
return {
|
|
625
|
-
unsubscribe: () => {
|
|
626
|
-
listener.unsubscribe();
|
|
627
|
-
state.listeners.delete(listener);
|
|
628
|
-
if (!state.listeners.size) this.teardownDocChannel(docId);
|
|
629
|
-
},
|
|
630
|
-
firstSyncedWithRemote: listener.firstSynced,
|
|
631
|
-
get connected() {
|
|
632
|
-
return true;
|
|
633
|
-
}
|
|
634
|
-
};
|
|
635
|
-
}
|
|
636
|
-
ensureMetaChannel() {
|
|
637
|
-
if (this.metaState) return this.metaState;
|
|
638
|
-
const channel = new (ensureBroadcastChannel())(this.metaChannelName);
|
|
639
|
-
const listeners = /* @__PURE__ */ new Set();
|
|
640
|
-
const onMessage = (event) => {
|
|
641
|
-
const message = event.data;
|
|
642
|
-
if (!message || message.from === this.instanceId) return;
|
|
643
|
-
if (message.kind === "meta-export") for (const entry of listeners) {
|
|
644
|
-
entry.muted = true;
|
|
645
|
-
entry.flock.importJson(message.bundle);
|
|
646
|
-
entry.muted = false;
|
|
647
|
-
entry.resolveFirst();
|
|
648
|
-
}
|
|
649
|
-
else if (message.kind === "meta-request") {
|
|
650
|
-
const first = listeners.values().next().value;
|
|
651
|
-
if (!first) return;
|
|
652
|
-
Promise.resolve(first.flock.exportJson()).then((bundle) => {
|
|
653
|
-
postChannelMessage(channel, {
|
|
654
|
-
kind: "meta-export",
|
|
655
|
-
from: this.instanceId,
|
|
656
|
-
bundle
|
|
657
|
-
});
|
|
658
|
-
});
|
|
659
|
-
}
|
|
660
|
-
};
|
|
661
|
-
channel.addEventListener("message", onMessage);
|
|
662
|
-
this.metaState = {
|
|
663
|
-
channel,
|
|
664
|
-
listeners,
|
|
665
|
-
onMessage
|
|
666
|
-
};
|
|
667
|
-
return this.metaState;
|
|
668
|
-
}
|
|
669
|
-
ensureDocChannel(docId) {
|
|
670
|
-
const existing = this.docStates.get(docId);
|
|
671
|
-
if (existing) return existing;
|
|
672
|
-
const channel = new (ensureBroadcastChannel())(`${this.namespace}-doc-${encodeDocChannelId(docId)}`);
|
|
673
|
-
const listeners = /* @__PURE__ */ new Set();
|
|
674
|
-
const onMessage = (event) => {
|
|
675
|
-
const message = event.data;
|
|
676
|
-
if (!message || message.from === this.instanceId) return;
|
|
677
|
-
if (message.kind === "doc-update") for (const entry of listeners) {
|
|
678
|
-
entry.muted = true;
|
|
679
|
-
entry.doc.import(message.payload);
|
|
680
|
-
entry.muted = false;
|
|
681
|
-
entry.resolveFirst();
|
|
682
|
-
}
|
|
683
|
-
else if (message.kind === "doc-request") {
|
|
684
|
-
const first = listeners.values().next().value;
|
|
685
|
-
if (!first) return;
|
|
686
|
-
const payload = message.docId === docId ? first.doc.export({ mode: "snapshot" }) : void 0;
|
|
687
|
-
if (!payload) return;
|
|
688
|
-
postChannelMessage(channel, {
|
|
689
|
-
kind: "doc-update",
|
|
690
|
-
docId,
|
|
691
|
-
from: this.instanceId,
|
|
692
|
-
mode: "snapshot",
|
|
693
|
-
payload
|
|
694
|
-
});
|
|
695
|
-
}
|
|
696
|
-
};
|
|
697
|
-
channel.addEventListener("message", onMessage);
|
|
698
|
-
const state = {
|
|
699
|
-
channel,
|
|
700
|
-
listeners,
|
|
701
|
-
onMessage
|
|
702
|
-
};
|
|
703
|
-
this.docStates.set(docId, state);
|
|
704
|
-
return state;
|
|
705
|
-
}
|
|
706
|
-
teardownDocChannel(docId) {
|
|
707
|
-
const state = this.docStates.get(docId);
|
|
708
|
-
if (!state) return;
|
|
709
|
-
for (const entry of state.listeners) entry.unsubscribe();
|
|
710
|
-
state.channel.removeEventListener("message", state.onMessage);
|
|
711
|
-
state.channel.close();
|
|
712
|
-
this.docStates.delete(docId);
|
|
713
|
-
}
|
|
714
|
-
};
|
|
715
|
-
|
|
716
|
-
//#endregion
|
|
717
|
-
//#region src/storage/indexeddb.ts
|
|
718
|
-
const DEFAULT_DB_NAME = "loro-repo";
|
|
719
|
-
const DEFAULT_DB_VERSION = 1;
|
|
720
|
-
const DEFAULT_DOC_STORE = "docs";
|
|
721
|
-
const DEFAULT_META_STORE = "meta";
|
|
722
|
-
const DEFAULT_ASSET_STORE = "assets";
|
|
723
|
-
const DEFAULT_DOC_UPDATE_STORE = "doc-updates";
|
|
724
|
-
const DEFAULT_META_KEY = "snapshot";
|
|
725
|
-
const textDecoder$1 = new TextDecoder();
|
|
726
|
-
function describeUnknown(cause) {
|
|
727
|
-
if (typeof cause === "string") return cause;
|
|
728
|
-
if (typeof cause === "number" || typeof cause === "boolean") return String(cause);
|
|
729
|
-
if (typeof cause === "bigint") return cause.toString();
|
|
730
|
-
if (typeof cause === "symbol") return cause.description ?? cause.toString();
|
|
731
|
-
if (typeof cause === "function") return `[function ${cause.name ?? "anonymous"}]`;
|
|
732
|
-
if (cause && typeof cause === "object") try {
|
|
733
|
-
return JSON.stringify(cause);
|
|
734
|
-
} catch {
|
|
735
|
-
return "[object]";
|
|
736
|
-
}
|
|
737
|
-
return String(cause);
|
|
738
|
-
}
|
|
739
|
-
var IndexedDBStorageAdaptor = class {
|
|
740
|
-
idb;
|
|
741
|
-
dbName;
|
|
742
|
-
version;
|
|
743
|
-
docStore;
|
|
744
|
-
docUpdateStore;
|
|
745
|
-
metaStore;
|
|
746
|
-
assetStore;
|
|
747
|
-
metaKey;
|
|
748
|
-
dbPromise;
|
|
749
|
-
closed = false;
|
|
750
|
-
constructor(options = {}) {
|
|
751
|
-
const idbFactory = globalThis.indexedDB;
|
|
752
|
-
if (!idbFactory) throw new Error("IndexedDB is not available in this environment");
|
|
753
|
-
this.idb = idbFactory;
|
|
754
|
-
this.dbName = options.dbName ?? DEFAULT_DB_NAME;
|
|
755
|
-
this.version = options.version ?? DEFAULT_DB_VERSION;
|
|
756
|
-
this.docStore = options.docStoreName ?? DEFAULT_DOC_STORE;
|
|
757
|
-
this.docUpdateStore = options.docUpdateStoreName ?? DEFAULT_DOC_UPDATE_STORE;
|
|
758
|
-
this.metaStore = options.metaStoreName ?? DEFAULT_META_STORE;
|
|
759
|
-
this.assetStore = options.assetStoreName ?? DEFAULT_ASSET_STORE;
|
|
760
|
-
this.metaKey = options.metaKey ?? DEFAULT_META_KEY;
|
|
761
|
-
}
|
|
762
|
-
async save(payload) {
|
|
763
|
-
const db = await this.ensureDb();
|
|
764
|
-
switch (payload.type) {
|
|
765
|
-
case "doc-snapshot": {
|
|
766
|
-
const snapshot = payload.snapshot.slice();
|
|
767
|
-
await this.storeMergedSnapshot(db, payload.docId, snapshot);
|
|
768
|
-
break;
|
|
769
|
-
}
|
|
770
|
-
case "doc-update": {
|
|
771
|
-
const update = payload.update.slice();
|
|
772
|
-
await this.appendDocUpdate(db, payload.docId, update);
|
|
773
|
-
break;
|
|
774
|
-
}
|
|
775
|
-
case "asset": {
|
|
776
|
-
const bytes = payload.data.slice();
|
|
777
|
-
await this.putBinary(db, this.assetStore, payload.assetId, bytes);
|
|
778
|
-
break;
|
|
779
|
-
}
|
|
780
|
-
case "meta": {
|
|
781
|
-
const bytes = payload.update.slice();
|
|
782
|
-
await this.putBinary(db, this.metaStore, this.metaKey, bytes);
|
|
783
|
-
break;
|
|
784
|
-
}
|
|
785
|
-
default: throw new Error("Unsupported storage payload type");
|
|
786
|
-
}
|
|
787
|
-
}
|
|
788
|
-
async deleteAsset(assetId) {
|
|
789
|
-
const db = await this.ensureDb();
|
|
790
|
-
await this.deleteKey(db, this.assetStore, assetId);
|
|
791
|
-
}
|
|
792
|
-
async loadDoc(docId) {
|
|
793
|
-
const db = await this.ensureDb();
|
|
794
|
-
const snapshot = await this.getBinaryFromDb(db, this.docStore, docId);
|
|
795
|
-
const pendingUpdates = await this.getDocUpdates(db, docId);
|
|
796
|
-
if (!snapshot && pendingUpdates.length === 0) return;
|
|
797
|
-
let doc;
|
|
798
|
-
try {
|
|
799
|
-
doc = snapshot ? loro_crdt.LoroDoc.fromSnapshot(snapshot) : new loro_crdt.LoroDoc();
|
|
800
|
-
} catch (error) {
|
|
801
|
-
throw this.createError(`Failed to hydrate document snapshot for "${docId}"`, error);
|
|
802
|
-
}
|
|
803
|
-
let appliedUpdates = false;
|
|
804
|
-
for (const update of pendingUpdates) try {
|
|
805
|
-
doc.import(update);
|
|
806
|
-
appliedUpdates = true;
|
|
807
|
-
} catch (error) {
|
|
808
|
-
throw this.createError(`Failed to apply queued document update for "${docId}"`, error);
|
|
809
|
-
}
|
|
810
|
-
if (appliedUpdates) {
|
|
811
|
-
let consolidated;
|
|
812
|
-
try {
|
|
813
|
-
consolidated = doc.export({ mode: "snapshot" });
|
|
814
|
-
} catch (error) {
|
|
815
|
-
throw this.createError(`Failed to export consolidated snapshot for "${docId}"`, error);
|
|
816
|
-
}
|
|
817
|
-
await this.writeSnapshot(db, docId, consolidated);
|
|
818
|
-
await this.clearDocUpdates(db, docId);
|
|
819
|
-
}
|
|
820
|
-
return doc;
|
|
821
|
-
}
|
|
822
|
-
async loadMeta() {
|
|
823
|
-
const bytes = await this.getBinary(this.metaStore, this.metaKey);
|
|
824
|
-
if (!bytes) return void 0;
|
|
825
|
-
try {
|
|
826
|
-
const json = textDecoder$1.decode(bytes);
|
|
827
|
-
const bundle = JSON.parse(json);
|
|
828
|
-
const flock = new __loro_dev_flock.Flock();
|
|
829
|
-
flock.importJson(bundle);
|
|
830
|
-
return flock;
|
|
831
|
-
} catch (error) {
|
|
832
|
-
throw this.createError("Failed to hydrate metadata snapshot", error);
|
|
833
|
-
}
|
|
834
|
-
}
|
|
835
|
-
async loadAsset(assetId) {
|
|
836
|
-
return await this.getBinary(this.assetStore, assetId) ?? void 0;
|
|
837
|
-
}
|
|
838
|
-
async close() {
|
|
839
|
-
this.closed = true;
|
|
840
|
-
const db = await this.dbPromise;
|
|
841
|
-
if (db) db.close();
|
|
842
|
-
this.dbPromise = void 0;
|
|
843
|
-
}
|
|
844
|
-
async ensureDb() {
|
|
845
|
-
if (this.closed) throw new Error("IndexedDBStorageAdaptor has been closed");
|
|
846
|
-
if (!this.dbPromise) this.dbPromise = new Promise((resolve, reject) => {
|
|
847
|
-
const request = this.idb.open(this.dbName, this.version);
|
|
848
|
-
request.addEventListener("upgradeneeded", () => {
|
|
849
|
-
const db = request.result;
|
|
850
|
-
this.ensureStore(db, this.docStore);
|
|
851
|
-
this.ensureStore(db, this.docUpdateStore);
|
|
852
|
-
this.ensureStore(db, this.metaStore);
|
|
853
|
-
this.ensureStore(db, this.assetStore);
|
|
854
|
-
});
|
|
855
|
-
request.addEventListener("success", () => resolve(request.result), { once: true });
|
|
856
|
-
request.addEventListener("error", () => {
|
|
857
|
-
reject(this.createError(`Failed to open IndexedDB database "${this.dbName}"`, request.error));
|
|
858
|
-
}, { once: true });
|
|
859
|
-
});
|
|
860
|
-
return this.dbPromise;
|
|
861
|
-
}
|
|
862
|
-
ensureStore(db, storeName) {
|
|
863
|
-
const names = db.objectStoreNames;
|
|
864
|
-
if (this.storeExists(names, storeName)) return;
|
|
865
|
-
db.createObjectStore(storeName);
|
|
866
|
-
}
|
|
867
|
-
storeExists(names, storeName) {
|
|
868
|
-
if (typeof names.contains === "function") return names.contains(storeName);
|
|
869
|
-
const length = names.length ?? 0;
|
|
870
|
-
for (let index = 0; index < length; index += 1) if (names.item?.(index) === storeName) return true;
|
|
871
|
-
return false;
|
|
872
|
-
}
|
|
873
|
-
async storeMergedSnapshot(db, docId, incoming) {
|
|
874
|
-
await this.runInTransaction(db, this.docStore, "readwrite", async (store) => {
|
|
875
|
-
const existingRaw = await this.wrapRequest(store.get(docId), "read");
|
|
876
|
-
const existing = await this.normalizeBinary(existingRaw);
|
|
877
|
-
const merged = this.mergeSnapshots(docId, existing, incoming);
|
|
878
|
-
await this.wrapRequest(store.put(merged, docId), "write");
|
|
879
|
-
});
|
|
880
|
-
}
|
|
881
|
-
mergeSnapshots(docId, existing, incoming) {
|
|
882
|
-
try {
|
|
883
|
-
const doc = existing ? loro_crdt.LoroDoc.fromSnapshot(existing) : new loro_crdt.LoroDoc();
|
|
884
|
-
doc.import(incoming);
|
|
885
|
-
return doc.export({ mode: "snapshot" });
|
|
886
|
-
} catch (error) {
|
|
887
|
-
throw this.createError(`Failed to merge snapshot for "${docId}"`, error);
|
|
888
|
-
}
|
|
889
|
-
}
|
|
890
|
-
async appendDocUpdate(db, docId, update) {
|
|
891
|
-
await this.runInTransaction(db, this.docUpdateStore, "readwrite", async (store) => {
|
|
892
|
-
const raw = await this.wrapRequest(store.get(docId), "read");
|
|
893
|
-
const queue = await this.normalizeUpdateQueue(raw);
|
|
894
|
-
queue.push(update.slice());
|
|
895
|
-
await this.wrapRequest(store.put({ updates: queue }, docId), "write");
|
|
896
|
-
});
|
|
897
|
-
}
|
|
898
|
-
async getDocUpdates(db, docId) {
|
|
899
|
-
const raw = await this.runInTransaction(db, this.docUpdateStore, "readonly", (store) => this.wrapRequest(store.get(docId), "read"));
|
|
900
|
-
return this.normalizeUpdateQueue(raw);
|
|
901
|
-
}
|
|
902
|
-
async clearDocUpdates(db, docId) {
|
|
903
|
-
await this.runInTransaction(db, this.docUpdateStore, "readwrite", (store) => this.wrapRequest(store.delete(docId), "delete"));
|
|
904
|
-
}
|
|
905
|
-
async writeSnapshot(db, docId, snapshot) {
|
|
906
|
-
await this.putBinary(db, this.docStore, docId, snapshot.slice());
|
|
907
|
-
}
|
|
908
|
-
async getBinaryFromDb(db, storeName, key) {
|
|
909
|
-
const value = await this.runInTransaction(db, storeName, "readonly", (store) => this.wrapRequest(store.get(key), "read"));
|
|
910
|
-
return this.normalizeBinary(value);
|
|
911
|
-
}
|
|
912
|
-
async normalizeUpdateQueue(value) {
|
|
913
|
-
if (value == null) return [];
|
|
914
|
-
const list = Array.isArray(value) ? value : typeof value === "object" && value !== null ? value.updates : void 0;
|
|
915
|
-
if (!Array.isArray(list)) return [];
|
|
916
|
-
const queue = [];
|
|
917
|
-
for (const entry of list) {
|
|
918
|
-
const bytes = await this.normalizeBinary(entry);
|
|
919
|
-
if (bytes) queue.push(bytes);
|
|
920
|
-
}
|
|
921
|
-
return queue;
|
|
922
|
-
}
|
|
923
|
-
async putBinary(db, storeName, key, value) {
|
|
924
|
-
await this.runInTransaction(db, storeName, "readwrite", (store) => this.wrapRequest(store.put(value, key), "write"));
|
|
925
|
-
}
|
|
926
|
-
async deleteKey(db, storeName, key) {
|
|
927
|
-
await this.runInTransaction(db, storeName, "readwrite", (store) => this.wrapRequest(store.delete(key), "delete"));
|
|
928
|
-
}
|
|
929
|
-
async getBinary(storeName, key) {
|
|
930
|
-
const db = await this.ensureDb();
|
|
931
|
-
return this.getBinaryFromDb(db, storeName, key);
|
|
932
|
-
}
|
|
933
|
-
runInTransaction(db, storeName, mode, executor) {
|
|
934
|
-
const tx = db.transaction(storeName, mode);
|
|
935
|
-
const store = tx.objectStore(storeName);
|
|
936
|
-
const completion = new Promise((resolve, reject) => {
|
|
937
|
-
tx.addEventListener("complete", () => resolve(), { once: true });
|
|
938
|
-
tx.addEventListener("abort", () => reject(this.createError("IndexedDB transaction aborted", tx.error)), { once: true });
|
|
939
|
-
tx.addEventListener("error", () => reject(this.createError("IndexedDB transaction failed", tx.error)), { once: true });
|
|
940
|
-
});
|
|
941
|
-
return Promise.all([executor(store), completion]).then(([result]) => result);
|
|
942
|
-
}
|
|
943
|
-
wrapRequest(request, action) {
|
|
944
|
-
return new Promise((resolve, reject) => {
|
|
945
|
-
request.addEventListener("success", () => resolve(request.result), { once: true });
|
|
946
|
-
request.addEventListener("error", () => reject(this.createError(`IndexedDB request failed during ${action}`, request.error)), { once: true });
|
|
947
|
-
});
|
|
948
|
-
}
|
|
949
|
-
async normalizeBinary(value) {
|
|
950
|
-
if (value == null) return void 0;
|
|
951
|
-
if (value instanceof Uint8Array) return value.slice();
|
|
952
|
-
if (ArrayBuffer.isView(value)) return new Uint8Array(value.buffer, value.byteOffset, value.byteLength).slice();
|
|
953
|
-
if (value instanceof ArrayBuffer) return new Uint8Array(value.slice(0));
|
|
954
|
-
if (typeof value === "object" && value !== null && "arrayBuffer" in value) {
|
|
955
|
-
const candidate = value;
|
|
956
|
-
if (typeof candidate.arrayBuffer === "function") {
|
|
957
|
-
const buffer = await candidate.arrayBuffer();
|
|
958
|
-
return new Uint8Array(buffer);
|
|
959
|
-
}
|
|
960
|
-
}
|
|
961
|
-
}
|
|
962
|
-
createError(message, cause) {
|
|
963
|
-
if (cause instanceof Error) return new Error(`${message}: ${cause.message}`, { cause });
|
|
964
|
-
if (cause !== void 0 && cause !== null) return /* @__PURE__ */ new Error(`${message}: ${describeUnknown(cause)}`);
|
|
965
|
-
return new Error(message);
|
|
966
|
-
}
|
|
967
|
-
};
|
|
968
|
-
|
|
969
|
-
//#endregion
|
|
970
|
-
//#region src/storage/filesystem.ts
|
|
971
|
-
const textDecoder = new TextDecoder();
|
|
972
|
-
var FileSystemStorageAdaptor = class {
|
|
973
|
-
baseDir;
|
|
974
|
-
docsDir;
|
|
975
|
-
assetsDir;
|
|
976
|
-
metaPath;
|
|
977
|
-
initPromise;
|
|
978
|
-
updateCounter = 0;
|
|
979
|
-
constructor(options = {}) {
|
|
980
|
-
this.baseDir = node_path.resolve(options.baseDir ?? node_path.join(process.cwd(), ".loro-repo"));
|
|
981
|
-
this.docsDir = node_path.join(this.baseDir, options.docsDirName ?? "docs");
|
|
982
|
-
this.assetsDir = node_path.join(this.baseDir, options.assetsDirName ?? "assets");
|
|
983
|
-
this.metaPath = node_path.join(this.baseDir, options.metaFileName ?? "meta.json");
|
|
984
|
-
this.initPromise = this.ensureLayout();
|
|
985
|
-
}
|
|
986
|
-
async save(payload) {
|
|
987
|
-
await this.initPromise;
|
|
988
|
-
switch (payload.type) {
|
|
989
|
-
case "doc-snapshot":
|
|
990
|
-
await this.writeDocSnapshot(payload.docId, payload.snapshot);
|
|
991
|
-
return;
|
|
992
|
-
case "doc-update":
|
|
993
|
-
await this.enqueueDocUpdate(payload.docId, payload.update);
|
|
994
|
-
return;
|
|
995
|
-
case "asset":
|
|
996
|
-
await this.writeAsset(payload.assetId, payload.data);
|
|
997
|
-
return;
|
|
998
|
-
case "meta":
|
|
999
|
-
await writeFileAtomic(this.metaPath, payload.update);
|
|
1000
|
-
return;
|
|
1001
|
-
default: throw new Error(`Unsupported payload type: ${payload.type}`);
|
|
1002
|
-
}
|
|
1003
|
-
}
|
|
1004
|
-
async deleteAsset(assetId) {
|
|
1005
|
-
await this.initPromise;
|
|
1006
|
-
await removeIfExists(this.assetPath(assetId));
|
|
1007
|
-
}
|
|
1008
|
-
async loadDoc(docId) {
|
|
1009
|
-
await this.initPromise;
|
|
1010
|
-
const snapshotBytes = await readFileIfExists(this.docSnapshotPath(docId));
|
|
1011
|
-
const updateDir = this.docUpdatesDir(docId);
|
|
1012
|
-
const updateFiles = await listFiles(updateDir);
|
|
1013
|
-
if (!snapshotBytes && updateFiles.length === 0) return;
|
|
1014
|
-
const doc = snapshotBytes ? loro_crdt.LoroDoc.fromSnapshot(snapshotBytes) : new loro_crdt.LoroDoc();
|
|
1015
|
-
if (updateFiles.length === 0) return doc;
|
|
1016
|
-
const updatePaths = updateFiles.map((file) => node_path.join(updateDir, file));
|
|
1017
|
-
for (const updatePath of updatePaths) {
|
|
1018
|
-
const update = await readFileIfExists(updatePath);
|
|
1019
|
-
if (!update) continue;
|
|
1020
|
-
doc.import(update);
|
|
1021
|
-
}
|
|
1022
|
-
await Promise.all(updatePaths.map((filePath) => removeIfExists(filePath)));
|
|
1023
|
-
const consolidated = doc.export({ mode: "snapshot" });
|
|
1024
|
-
await this.writeDocSnapshot(docId, consolidated);
|
|
1025
|
-
return doc;
|
|
1026
|
-
}
|
|
1027
|
-
async loadMeta() {
|
|
1028
|
-
await this.initPromise;
|
|
1029
|
-
const bytes = await readFileIfExists(this.metaPath);
|
|
1030
|
-
if (!bytes) return void 0;
|
|
1031
|
-
try {
|
|
1032
|
-
const bundle = JSON.parse(textDecoder.decode(bytes));
|
|
1033
|
-
const flock = new __loro_dev_flock.Flock();
|
|
1034
|
-
flock.importJson(bundle);
|
|
1035
|
-
return flock;
|
|
1036
|
-
} catch (error) {
|
|
1037
|
-
throw new Error("Failed to hydrate metadata snapshot", { cause: error });
|
|
1038
|
-
}
|
|
1039
|
-
}
|
|
1040
|
-
async loadAsset(assetId) {
|
|
1041
|
-
await this.initPromise;
|
|
1042
|
-
return readFileIfExists(this.assetPath(assetId));
|
|
1043
|
-
}
|
|
1044
|
-
async ensureLayout() {
|
|
1045
|
-
await Promise.all([
|
|
1046
|
-
ensureDir(this.baseDir),
|
|
1047
|
-
ensureDir(this.docsDir),
|
|
1048
|
-
ensureDir(this.assetsDir)
|
|
1049
|
-
]);
|
|
1050
|
-
}
|
|
1051
|
-
async writeDocSnapshot(docId, snapshot) {
|
|
1052
|
-
await ensureDir(this.docDir(docId));
|
|
1053
|
-
await writeFileAtomic(this.docSnapshotPath(docId), snapshot);
|
|
1054
|
-
}
|
|
1055
|
-
async enqueueDocUpdate(docId, update) {
|
|
1056
|
-
const dir = this.docUpdatesDir(docId);
|
|
1057
|
-
await ensureDir(dir);
|
|
1058
|
-
const counter = this.updateCounter = (this.updateCounter + 1) % 1e6;
|
|
1059
|
-
const fileName = `${Date.now().toString().padStart(13, "0")}-${counter.toString().padStart(6, "0")}.bin`;
|
|
1060
|
-
await writeFileAtomic(node_path.join(dir, fileName), update);
|
|
1061
|
-
}
|
|
1062
|
-
async writeAsset(assetId, data) {
|
|
1063
|
-
const filePath = this.assetPath(assetId);
|
|
1064
|
-
await ensureDir(node_path.dirname(filePath));
|
|
1065
|
-
await writeFileAtomic(filePath, data);
|
|
1066
|
-
}
|
|
1067
|
-
docDir(docId) {
|
|
1068
|
-
return node_path.join(this.docsDir, encodeComponent(docId));
|
|
1069
|
-
}
|
|
1070
|
-
docSnapshotPath(docId) {
|
|
1071
|
-
return node_path.join(this.docDir(docId), "snapshot.bin");
|
|
1072
|
-
}
|
|
1073
|
-
docUpdatesDir(docId) {
|
|
1074
|
-
return node_path.join(this.docDir(docId), "updates");
|
|
1075
|
-
}
|
|
1076
|
-
assetPath(assetId) {
|
|
1077
|
-
return node_path.join(this.assetsDir, encodeComponent(assetId));
|
|
1078
|
-
}
|
|
1079
|
-
};
|
|
1080
|
-
function encodeComponent(value) {
|
|
1081
|
-
return Buffer.from(value, "utf8").toString("base64url");
|
|
1082
|
-
}
|
|
1083
|
-
async function ensureDir(dir) {
|
|
1084
|
-
await node_fs.promises.mkdir(dir, { recursive: true });
|
|
1085
|
-
}
|
|
1086
|
-
async function readFileIfExists(filePath) {
|
|
1087
|
-
try {
|
|
1088
|
-
const data = await node_fs.promises.readFile(filePath);
|
|
1089
|
-
return new Uint8Array(data.buffer, data.byteOffset, data.byteLength).slice();
|
|
1090
|
-
} catch (error) {
|
|
1091
|
-
if (error.code === "ENOENT") return;
|
|
1092
|
-
throw error;
|
|
1093
|
-
}
|
|
1094
|
-
}
|
|
1095
|
-
async function removeIfExists(filePath) {
|
|
1096
|
-
try {
|
|
1097
|
-
await node_fs.promises.rm(filePath);
|
|
1098
|
-
} catch (error) {
|
|
1099
|
-
if (error.code === "ENOENT") return;
|
|
1100
|
-
throw error;
|
|
1101
|
-
}
|
|
1102
|
-
}
|
|
1103
|
-
async function listFiles(dir) {
|
|
1104
|
-
try {
|
|
1105
|
-
return (await node_fs.promises.readdir(dir)).sort();
|
|
1106
|
-
} catch (error) {
|
|
1107
|
-
if (error.code === "ENOENT") return [];
|
|
1108
|
-
throw error;
|
|
1109
|
-
}
|
|
1110
|
-
}
|
|
1111
|
-
async function writeFileAtomic(targetPath, data) {
|
|
1112
|
-
const dir = node_path.dirname(targetPath);
|
|
1113
|
-
await ensureDir(dir);
|
|
1114
|
-
const tempPath = node_path.join(dir, `.tmp-${(0, node_crypto.randomUUID)()}`);
|
|
1115
|
-
await node_fs.promises.writeFile(tempPath, data);
|
|
1116
|
-
await node_fs.promises.rename(tempPath, targetPath);
|
|
1117
|
-
}
|
|
1118
|
-
|
|
1119
|
-
//#endregion
|
|
1120
|
-
//#region src/internal/event-bus.ts
|
|
1121
|
-
var RepoEventBus = class {
|
|
1122
|
-
watchers = /* @__PURE__ */ new Set();
|
|
1123
|
-
eventByStack = [];
|
|
1124
|
-
watch(listener, filter = {}) {
|
|
1125
|
-
const entry = {
|
|
1126
|
-
listener,
|
|
1127
|
-
filter
|
|
1128
|
-
};
|
|
1129
|
-
this.watchers.add(entry);
|
|
1130
|
-
return { unsubscribe: () => {
|
|
1131
|
-
this.watchers.delete(entry);
|
|
1132
|
-
} };
|
|
1133
|
-
}
|
|
1134
|
-
emit(event) {
|
|
1135
|
-
for (const entry of this.watchers) if (this.shouldNotify(entry.filter, event)) entry.listener(event);
|
|
1136
|
-
}
|
|
1137
|
-
clear() {
|
|
1138
|
-
this.watchers.clear();
|
|
1139
|
-
this.eventByStack.length = 0;
|
|
1140
|
-
}
|
|
1141
|
-
pushEventBy(by) {
|
|
1142
|
-
this.eventByStack.push(by);
|
|
1143
|
-
}
|
|
1144
|
-
popEventBy() {
|
|
1145
|
-
this.eventByStack.pop();
|
|
1146
|
-
}
|
|
1147
|
-
resolveEventBy(defaultBy) {
|
|
1148
|
-
const index = this.eventByStack.length - 1;
|
|
1149
|
-
return index >= 0 ? this.eventByStack[index] : defaultBy;
|
|
1150
|
-
}
|
|
1151
|
-
shouldNotify(filter, event) {
|
|
1152
|
-
if (!filter.docIds && !filter.kinds && !filter.metadataFields && !filter.by) return true;
|
|
1153
|
-
if (filter.kinds && !filter.kinds.includes(event.kind)) return false;
|
|
1154
|
-
if (filter.by && !filter.by.includes(event.by)) return false;
|
|
1155
|
-
const docId = (() => {
|
|
1156
|
-
if (event.kind === "doc-metadata" || event.kind === "doc-frontiers") return event.docId;
|
|
1157
|
-
if (event.kind === "asset-link" || event.kind === "asset-unlink") return event.docId;
|
|
1158
|
-
})();
|
|
1159
|
-
if (filter.docIds && docId && !filter.docIds.includes(docId)) return false;
|
|
1160
|
-
if (filter.docIds && !docId) return false;
|
|
1161
|
-
if (filter.metadataFields && event.kind === "doc-metadata") {
|
|
1162
|
-
if (!Object.keys(event.patch).some((key) => filter.metadataFields?.includes(key))) return false;
|
|
1163
|
-
}
|
|
1164
|
-
return true;
|
|
1165
|
-
}
|
|
1166
|
-
};
|
|
1167
|
-
|
|
1168
|
-
//#endregion
|
|
1169
|
-
//#region src/utils.ts
|
|
1170
|
-
async function streamToUint8Array(stream) {
|
|
1171
|
-
const reader = stream.getReader();
|
|
1172
|
-
const chunks = [];
|
|
1173
|
-
let total = 0;
|
|
1174
|
-
while (true) {
|
|
1175
|
-
const { done, value } = await reader.read();
|
|
1176
|
-
if (done) break;
|
|
1177
|
-
if (value) {
|
|
1178
|
-
chunks.push(value);
|
|
1179
|
-
total += value.byteLength;
|
|
1180
|
-
}
|
|
1181
|
-
}
|
|
1182
|
-
const buffer = new Uint8Array(total);
|
|
1183
|
-
let offset = 0;
|
|
1184
|
-
for (const chunk of chunks) {
|
|
1185
|
-
buffer.set(chunk, offset);
|
|
1186
|
-
offset += chunk.byteLength;
|
|
1187
|
-
}
|
|
1188
|
-
return buffer;
|
|
1189
|
-
}
|
|
1190
|
-
async function assetContentToUint8Array(content) {
|
|
1191
|
-
if (content instanceof Uint8Array) return content;
|
|
1192
|
-
if (ArrayBuffer.isView(content)) return new Uint8Array(content.buffer.slice(content.byteOffset, content.byteOffset + content.byteLength));
|
|
1193
|
-
if (typeof Blob !== "undefined" && content instanceof Blob) return new Uint8Array(await content.arrayBuffer());
|
|
1194
|
-
if (typeof ReadableStream !== "undefined" && content instanceof ReadableStream) return streamToUint8Array(content);
|
|
1195
|
-
throw new TypeError("Unsupported asset content type");
|
|
1196
|
-
}
|
|
1197
|
-
function bytesToHex(bytes) {
|
|
1198
|
-
return Array.from(bytes, (byte) => byte.toString(16).padStart(2, "0")).join("");
|
|
1199
|
-
}
|
|
1200
|
-
async function computeSha256(bytes) {
|
|
1201
|
-
const globalCrypto = globalThis.crypto;
|
|
1202
|
-
if (globalCrypto?.subtle && typeof globalCrypto.subtle.digest === "function") {
|
|
1203
|
-
const digest = await globalCrypto.subtle.digest("SHA-256", bytes);
|
|
1204
|
-
return bytesToHex(new Uint8Array(digest));
|
|
1205
|
-
}
|
|
1206
|
-
try {
|
|
1207
|
-
const { createHash } = await import("node:crypto");
|
|
1208
|
-
const hash = createHash("sha256");
|
|
1209
|
-
hash.update(bytes);
|
|
1210
|
-
return hash.digest("hex");
|
|
1211
|
-
} catch {
|
|
1212
|
-
throw new Error("SHA-256 digest is not available in this environment");
|
|
1213
|
-
}
|
|
1214
|
-
}
|
|
1215
|
-
function cloneJsonValue(value) {
|
|
1216
|
-
if (value === null) return null;
|
|
1217
|
-
if (typeof value === "string" || typeof value === "boolean") return value;
|
|
1218
|
-
if (typeof value === "number") return Number.isFinite(value) ? value : void 0;
|
|
1219
|
-
if (Array.isArray(value)) {
|
|
1220
|
-
const arr = [];
|
|
1221
|
-
for (const entry of value) {
|
|
1222
|
-
const cloned = cloneJsonValue(entry);
|
|
1223
|
-
if (cloned !== void 0) arr.push(cloned);
|
|
1224
|
-
}
|
|
1225
|
-
return arr;
|
|
1226
|
-
}
|
|
1227
|
-
if (value && typeof value === "object") {
|
|
1228
|
-
const input = value;
|
|
1229
|
-
const obj = {};
|
|
1230
|
-
for (const [key, entry] of Object.entries(input)) {
|
|
1231
|
-
const cloned = cloneJsonValue(entry);
|
|
1232
|
-
if (cloned !== void 0) obj[key] = cloned;
|
|
1233
|
-
}
|
|
1234
|
-
return obj;
|
|
1235
|
-
}
|
|
1236
|
-
}
|
|
1237
|
-
function cloneJsonObject(value) {
|
|
1238
|
-
const cloned = cloneJsonValue(value);
|
|
1239
|
-
if (cloned && typeof cloned === "object" && !Array.isArray(cloned)) return cloned;
|
|
1240
|
-
return {};
|
|
1241
|
-
}
|
|
1242
|
-
function asJsonObject(value) {
|
|
1243
|
-
const cloned = cloneJsonValue(value);
|
|
1244
|
-
if (cloned && typeof cloned === "object" && !Array.isArray(cloned)) return cloned;
|
|
1245
|
-
}
|
|
1246
|
-
function isJsonObjectValue(value) {
|
|
1247
|
-
return Boolean(value && typeof value === "object" && !Array.isArray(value));
|
|
1248
|
-
}
|
|
1249
|
-
function stableStringify(value) {
|
|
1250
|
-
if (value === null) return "null";
|
|
1251
|
-
if (typeof value === "string") return JSON.stringify(value);
|
|
1252
|
-
if (typeof value === "number" || typeof value === "boolean") return JSON.stringify(value);
|
|
1253
|
-
if (Array.isArray(value)) return `[${value.map(stableStringify).join(",")}]`;
|
|
1254
|
-
if (!isJsonObjectValue(value)) return "null";
|
|
1255
|
-
return `{${Object.keys(value).sort().map((key) => `${JSON.stringify(key)}:${stableStringify(value[key])}`).join(",")}}`;
|
|
1256
|
-
}
|
|
1257
|
-
function jsonEquals(a, b) {
|
|
1258
|
-
if (a === void 0 && b === void 0) return true;
|
|
1259
|
-
if (a === void 0 || b === void 0) return false;
|
|
1260
|
-
return stableStringify(a) === stableStringify(b);
|
|
1261
|
-
}
|
|
1262
|
-
function diffJsonObjects(previous, next) {
|
|
1263
|
-
const patch = {};
|
|
1264
|
-
const keys = /* @__PURE__ */ new Set();
|
|
1265
|
-
if (previous) for (const key of Object.keys(previous)) keys.add(key);
|
|
1266
|
-
for (const key of Object.keys(next)) keys.add(key);
|
|
1267
|
-
for (const key of keys) {
|
|
1268
|
-
const prevValue = previous ? previous[key] : void 0;
|
|
1269
|
-
const nextValue = next[key];
|
|
1270
|
-
if (!jsonEquals(prevValue, nextValue)) {
|
|
1271
|
-
if (nextValue === void 0 && previous && key in previous) {
|
|
1272
|
-
patch[key] = null;
|
|
1273
|
-
continue;
|
|
1274
|
-
}
|
|
1275
|
-
const cloned = cloneJsonValue(nextValue);
|
|
1276
|
-
if (cloned !== void 0) patch[key] = cloned;
|
|
1277
|
-
}
|
|
1278
|
-
}
|
|
1279
|
-
return patch;
|
|
1280
|
-
}
|
|
1281
|
-
function assetMetaToJson(meta) {
|
|
1282
|
-
const json = {
|
|
1283
|
-
assetId: meta.assetId,
|
|
1284
|
-
size: meta.size,
|
|
1285
|
-
createdAt: meta.createdAt
|
|
1286
|
-
};
|
|
1287
|
-
if (meta.mime !== void 0) json.mime = meta.mime;
|
|
1288
|
-
if (meta.policy !== void 0) json.policy = meta.policy;
|
|
1289
|
-
if (meta.tag !== void 0) json.tag = meta.tag;
|
|
1290
|
-
return json;
|
|
1291
|
-
}
|
|
1292
|
-
function assetMetaFromJson(value) {
|
|
1293
|
-
const obj = asJsonObject(value);
|
|
1294
|
-
if (!obj) return void 0;
|
|
1295
|
-
const assetId = typeof obj.assetId === "string" ? obj.assetId : void 0;
|
|
1296
|
-
if (!assetId) return void 0;
|
|
1297
|
-
const size = typeof obj.size === "number" ? obj.size : void 0;
|
|
1298
|
-
const createdAt = typeof obj.createdAt === "number" ? obj.createdAt : void 0;
|
|
1299
|
-
if (size === void 0 || createdAt === void 0) return void 0;
|
|
1300
|
-
return {
|
|
1301
|
-
assetId,
|
|
1302
|
-
size,
|
|
1303
|
-
createdAt,
|
|
1304
|
-
...typeof obj.mime === "string" ? { mime: obj.mime } : {},
|
|
1305
|
-
...typeof obj.policy === "string" ? { policy: obj.policy } : {},
|
|
1306
|
-
...typeof obj.tag === "string" ? { tag: obj.tag } : {}
|
|
1307
|
-
};
|
|
1308
|
-
}
|
|
1309
|
-
function assetMetadataEqual(a, b) {
|
|
1310
|
-
if (!a && !b) return true;
|
|
1311
|
-
if (!a || !b) return false;
|
|
1312
|
-
return stableStringify(assetMetaToJson(a)) === stableStringify(assetMetaToJson(b));
|
|
1313
|
-
}
|
|
1314
|
-
function cloneRepoAssetMetadata(meta) {
|
|
1315
|
-
return {
|
|
1316
|
-
assetId: meta.assetId,
|
|
1317
|
-
size: meta.size,
|
|
1318
|
-
createdAt: meta.createdAt,
|
|
1319
|
-
...meta.mime !== void 0 ? { mime: meta.mime } : {},
|
|
1320
|
-
...meta.policy !== void 0 ? { policy: meta.policy } : {},
|
|
1321
|
-
...meta.tag !== void 0 ? { tag: meta.tag } : {}
|
|
1322
|
-
};
|
|
1323
|
-
}
|
|
1324
|
-
function toReadableStream(bytes) {
|
|
1325
|
-
return new ReadableStream({ start(controller) {
|
|
1326
|
-
controller.enqueue(bytes);
|
|
1327
|
-
controller.close();
|
|
1328
|
-
} });
|
|
1329
|
-
}
|
|
1330
|
-
function canonicalizeFrontiers(frontiers) {
|
|
1331
|
-
const json = [...frontiers].sort((a, b) => {
|
|
1332
|
-
if (a.peer < b.peer) return -1;
|
|
1333
|
-
if (a.peer > b.peer) return 1;
|
|
1334
|
-
return a.counter - b.counter;
|
|
1335
|
-
}).map((f) => ({
|
|
1336
|
-
peer: f.peer,
|
|
1337
|
-
counter: f.counter
|
|
1338
|
-
}));
|
|
1339
|
-
return {
|
|
1340
|
-
json,
|
|
1341
|
-
key: stableStringify(json)
|
|
1342
|
-
};
|
|
1343
|
-
}
|
|
1344
|
-
function includesFrontiers(vv, frontiers) {
|
|
1345
|
-
for (const { peer, counter } of frontiers) if ((vv.get(peer) ?? 0) <= counter) return false;
|
|
1346
|
-
return true;
|
|
1347
|
-
}
|
|
1348
|
-
function matchesQuery(docId, _metadata, query) {
|
|
1349
|
-
if (!query) return true;
|
|
1350
|
-
if (query.prefix && !docId.startsWith(query.prefix)) return false;
|
|
1351
|
-
if (query.start && docId < query.start) return false;
|
|
1352
|
-
if (query.end && docId > query.end) return false;
|
|
1353
|
-
return true;
|
|
1354
|
-
}
|
|
1
|
+
const require_chunk = require('./chunk.cjs');
|
|
2
|
+
let __loro_dev_flock = require("@loro-dev/flock");
|
|
3
|
+
__loro_dev_flock = require_chunk.__toESM(__loro_dev_flock);
|
|
4
|
+
let loro_crdt = require("loro-crdt");
|
|
5
|
+
loro_crdt = require_chunk.__toESM(loro_crdt);
|
|
6
|
+
|
|
7
|
+
//#region src/internal/event-bus.ts
|
|
8
|
+
var RepoEventBus = class {
|
|
9
|
+
watchers = /* @__PURE__ */ new Set();
|
|
10
|
+
eventByStack = [];
|
|
11
|
+
watch(listener, filter = {}) {
|
|
12
|
+
const entry = {
|
|
13
|
+
listener,
|
|
14
|
+
filter
|
|
15
|
+
};
|
|
16
|
+
this.watchers.add(entry);
|
|
17
|
+
return { unsubscribe: () => {
|
|
18
|
+
this.watchers.delete(entry);
|
|
19
|
+
} };
|
|
20
|
+
}
|
|
21
|
+
emit(event) {
|
|
22
|
+
for (const entry of this.watchers) if (this.shouldNotify(entry.filter, event)) entry.listener(event);
|
|
23
|
+
}
|
|
24
|
+
clear() {
|
|
25
|
+
this.watchers.clear();
|
|
26
|
+
this.eventByStack.length = 0;
|
|
27
|
+
}
|
|
28
|
+
pushEventBy(by) {
|
|
29
|
+
this.eventByStack.push(by);
|
|
30
|
+
}
|
|
31
|
+
popEventBy() {
|
|
32
|
+
this.eventByStack.pop();
|
|
33
|
+
}
|
|
34
|
+
resolveEventBy(defaultBy) {
|
|
35
|
+
const index = this.eventByStack.length - 1;
|
|
36
|
+
return index >= 0 ? this.eventByStack[index] : defaultBy;
|
|
37
|
+
}
|
|
38
|
+
shouldNotify(filter, event) {
|
|
39
|
+
if (!filter.docIds && !filter.kinds && !filter.metadataFields && !filter.by) return true;
|
|
40
|
+
if (filter.kinds && !filter.kinds.includes(event.kind)) return false;
|
|
41
|
+
if (filter.by && !filter.by.includes(event.by)) return false;
|
|
42
|
+
const docId = (() => {
|
|
43
|
+
if (event.kind === "doc-metadata" || event.kind === "doc-frontiers") return event.docId;
|
|
44
|
+
if (event.kind === "asset-link" || event.kind === "asset-unlink") return event.docId;
|
|
45
|
+
})();
|
|
46
|
+
if (filter.docIds && docId && !filter.docIds.includes(docId)) return false;
|
|
47
|
+
if (filter.docIds && !docId) return false;
|
|
48
|
+
if (filter.metadataFields && event.kind === "doc-metadata") {
|
|
49
|
+
if (!Object.keys(event.patch).some((key) => filter.metadataFields?.includes(key))) return false;
|
|
50
|
+
}
|
|
51
|
+
return true;
|
|
52
|
+
}
|
|
53
|
+
};
|
|
1355
54
|
|
|
1356
55
|
//#endregion
|
|
1357
56
|
//#region src/internal/logging.ts
|
|
@@ -1370,23 +69,18 @@ var DocManager = class {
|
|
|
1370
69
|
getMetaFlock;
|
|
1371
70
|
eventBus;
|
|
1372
71
|
persistMeta;
|
|
1373
|
-
state;
|
|
1374
72
|
docs = /* @__PURE__ */ new Map();
|
|
1375
73
|
docSubscriptions = /* @__PURE__ */ new Map();
|
|
1376
74
|
docFrontierUpdates = /* @__PURE__ */ new Map();
|
|
1377
75
|
docPersistedVersions = /* @__PURE__ */ new Map();
|
|
1378
|
-
get docFrontierKeys() {
|
|
1379
|
-
return this.state.docFrontierKeys;
|
|
1380
|
-
}
|
|
1381
76
|
constructor(options) {
|
|
1382
77
|
this.storage = options.storage;
|
|
1383
78
|
this.docFrontierDebounceMs = options.docFrontierDebounceMs;
|
|
1384
79
|
this.getMetaFlock = options.getMetaFlock;
|
|
1385
80
|
this.eventBus = options.eventBus;
|
|
1386
81
|
this.persistMeta = options.persistMeta;
|
|
1387
|
-
this.state = options.state;
|
|
1388
82
|
}
|
|
1389
|
-
async
|
|
83
|
+
async openPersistedDoc(docId) {
|
|
1390
84
|
return await this.ensureDoc(docId);
|
|
1391
85
|
}
|
|
1392
86
|
async openDetachedDoc(docId) {
|
|
@@ -1433,38 +127,27 @@ var DocManager = class {
|
|
|
1433
127
|
}
|
|
1434
128
|
async updateDocFrontiers(docId, doc, defaultBy) {
|
|
1435
129
|
const frontiers = doc.oplogFrontiers();
|
|
1436
|
-
const
|
|
1437
|
-
const
|
|
130
|
+
const vv = doc.version();
|
|
131
|
+
const existingFrontiers = this.readFrontiersFromFlock(docId);
|
|
1438
132
|
let mutated = false;
|
|
1439
133
|
const metaFlock = this.metaFlock;
|
|
1440
|
-
const
|
|
1441
|
-
for (const entry of existingKeys) {
|
|
1442
|
-
if (entry === key) continue;
|
|
1443
|
-
let oldFrontiers;
|
|
1444
|
-
try {
|
|
1445
|
-
oldFrontiers = JSON.parse(entry);
|
|
1446
|
-
} catch {
|
|
1447
|
-
continue;
|
|
1448
|
-
}
|
|
1449
|
-
if (includesFrontiers(vv, oldFrontiers)) {
|
|
1450
|
-
metaFlock.delete([
|
|
1451
|
-
"f",
|
|
1452
|
-
docId,
|
|
1453
|
-
entry
|
|
1454
|
-
]);
|
|
1455
|
-
mutated = true;
|
|
1456
|
-
}
|
|
1457
|
-
}
|
|
1458
|
-
if (!existingKeys.has(key)) {
|
|
134
|
+
for (const f of frontiers) if (existingFrontiers.get(f.peer) !== f.counter) {
|
|
1459
135
|
metaFlock.put([
|
|
1460
136
|
"f",
|
|
1461
137
|
docId,
|
|
1462
|
-
|
|
1463
|
-
],
|
|
138
|
+
f.peer
|
|
139
|
+
], f.counter);
|
|
1464
140
|
mutated = true;
|
|
1465
141
|
}
|
|
1466
142
|
if (mutated) {
|
|
1467
|
-
|
|
143
|
+
for (const [peer, counter] of existingFrontiers) {
|
|
144
|
+
const docCounterEnd = vv.get(peer);
|
|
145
|
+
if (docCounterEnd != null && docCounterEnd > counter) metaFlock.delete([
|
|
146
|
+
"f",
|
|
147
|
+
docId,
|
|
148
|
+
peer
|
|
149
|
+
]);
|
|
150
|
+
}
|
|
1468
151
|
await this.persistMeta();
|
|
1469
152
|
}
|
|
1470
153
|
const by = this.eventBus.resolveEventBy(defaultBy);
|
|
@@ -1516,37 +199,22 @@ var DocManager = class {
|
|
|
1516
199
|
this.docFrontierUpdates.clear();
|
|
1517
200
|
this.docs.clear();
|
|
1518
201
|
this.docPersistedVersions.clear();
|
|
1519
|
-
this.docFrontierKeys.clear();
|
|
1520
202
|
}
|
|
1521
|
-
|
|
1522
|
-
|
|
1523
|
-
const frontierRows = this.metaFlock.scan({ prefix: ["f"] });
|
|
1524
|
-
for (const row of frontierRows) {
|
|
1525
|
-
if (!Array.isArray(row.key) || row.key.length < 3) continue;
|
|
1526
|
-
const docId = row.key[1];
|
|
1527
|
-
const frontierKey = row.key[2];
|
|
1528
|
-
if (typeof docId !== "string" || typeof frontierKey !== "string") continue;
|
|
1529
|
-
const set = nextFrontierKeys.get(docId) ?? /* @__PURE__ */ new Set();
|
|
1530
|
-
set.add(frontierKey);
|
|
1531
|
-
nextFrontierKeys.set(docId, set);
|
|
1532
|
-
}
|
|
1533
|
-
this.docFrontierKeys.clear();
|
|
1534
|
-
for (const [docId, keys] of nextFrontierKeys) this.docFrontierKeys.set(docId, keys);
|
|
203
|
+
get metaFlock() {
|
|
204
|
+
return this.getMetaFlock();
|
|
1535
205
|
}
|
|
1536
|
-
|
|
206
|
+
readFrontiersFromFlock(docId) {
|
|
1537
207
|
const rows = this.metaFlock.scan({ prefix: ["f", docId] });
|
|
1538
|
-
const
|
|
208
|
+
const frontiers = /* @__PURE__ */ new Map();
|
|
1539
209
|
for (const row of rows) {
|
|
1540
210
|
if (!Array.isArray(row.key) || row.key.length < 3) continue;
|
|
1541
|
-
|
|
1542
|
-
const
|
|
1543
|
-
if (typeof
|
|
211
|
+
const peer = row.key[2];
|
|
212
|
+
const counter = row.value;
|
|
213
|
+
if (typeof peer !== "string") continue;
|
|
214
|
+
if (typeof counter !== "number" || !Number.isFinite(counter)) continue;
|
|
215
|
+
frontiers.set(peer, counter);
|
|
1544
216
|
}
|
|
1545
|
-
|
|
1546
|
-
else this.docFrontierKeys.delete(docId);
|
|
1547
|
-
}
|
|
1548
|
-
get metaFlock() {
|
|
1549
|
-
return this.getMetaFlock();
|
|
217
|
+
return frontiers;
|
|
1550
218
|
}
|
|
1551
219
|
registerDoc(docId, doc) {
|
|
1552
220
|
this.docs.set(docId, doc);
|
|
@@ -1659,6 +327,176 @@ var DocManager = class {
|
|
|
1659
327
|
}
|
|
1660
328
|
};
|
|
1661
329
|
|
|
330
|
+
//#endregion
|
|
331
|
+
//#region src/utils.ts
|
|
332
|
+
async function streamToUint8Array(stream) {
|
|
333
|
+
const reader = stream.getReader();
|
|
334
|
+
const chunks = [];
|
|
335
|
+
let total = 0;
|
|
336
|
+
while (true) {
|
|
337
|
+
const { done, value } = await reader.read();
|
|
338
|
+
if (done) break;
|
|
339
|
+
if (value) {
|
|
340
|
+
chunks.push(value);
|
|
341
|
+
total += value.byteLength;
|
|
342
|
+
}
|
|
343
|
+
}
|
|
344
|
+
const buffer = new Uint8Array(total);
|
|
345
|
+
let offset = 0;
|
|
346
|
+
for (const chunk of chunks) {
|
|
347
|
+
buffer.set(chunk, offset);
|
|
348
|
+
offset += chunk.byteLength;
|
|
349
|
+
}
|
|
350
|
+
return buffer;
|
|
351
|
+
}
|
|
352
|
+
async function assetContentToUint8Array(content) {
|
|
353
|
+
if (content instanceof Uint8Array) return content;
|
|
354
|
+
if (ArrayBuffer.isView(content)) return new Uint8Array(content.buffer.slice(content.byteOffset, content.byteOffset + content.byteLength));
|
|
355
|
+
if (typeof Blob !== "undefined" && content instanceof Blob) return new Uint8Array(await content.arrayBuffer());
|
|
356
|
+
if (typeof ReadableStream !== "undefined" && content instanceof ReadableStream) return streamToUint8Array(content);
|
|
357
|
+
throw new TypeError("Unsupported asset content type");
|
|
358
|
+
}
|
|
359
|
+
function bytesToHex(bytes) {
|
|
360
|
+
return Array.from(bytes, (byte) => byte.toString(16).padStart(2, "0")).join("");
|
|
361
|
+
}
|
|
362
|
+
async function computeSha256(bytes) {
|
|
363
|
+
const globalCrypto = globalThis.crypto;
|
|
364
|
+
if (globalCrypto?.subtle && typeof globalCrypto.subtle.digest === "function") {
|
|
365
|
+
const digest = await globalCrypto.subtle.digest("SHA-256", bytes);
|
|
366
|
+
return bytesToHex(new Uint8Array(digest));
|
|
367
|
+
}
|
|
368
|
+
try {
|
|
369
|
+
const { createHash } = await import("node:crypto");
|
|
370
|
+
const hash = createHash("sha256");
|
|
371
|
+
hash.update(bytes);
|
|
372
|
+
return hash.digest("hex");
|
|
373
|
+
} catch {
|
|
374
|
+
throw new Error("SHA-256 digest is not available in this environment");
|
|
375
|
+
}
|
|
376
|
+
}
|
|
377
|
+
function cloneJsonValue(value) {
|
|
378
|
+
if (value === null) return null;
|
|
379
|
+
if (typeof value === "string" || typeof value === "boolean") return value;
|
|
380
|
+
if (typeof value === "number") return Number.isFinite(value) ? value : void 0;
|
|
381
|
+
if (Array.isArray(value)) {
|
|
382
|
+
const arr = [];
|
|
383
|
+
for (const entry of value) {
|
|
384
|
+
const cloned = cloneJsonValue(entry);
|
|
385
|
+
if (cloned !== void 0) arr.push(cloned);
|
|
386
|
+
}
|
|
387
|
+
return arr;
|
|
388
|
+
}
|
|
389
|
+
if (value && typeof value === "object") {
|
|
390
|
+
const input = value;
|
|
391
|
+
const obj = {};
|
|
392
|
+
for (const [key, entry] of Object.entries(input)) {
|
|
393
|
+
const cloned = cloneJsonValue(entry);
|
|
394
|
+
if (cloned !== void 0) obj[key] = cloned;
|
|
395
|
+
}
|
|
396
|
+
return obj;
|
|
397
|
+
}
|
|
398
|
+
}
|
|
399
|
+
function cloneJsonObject(value) {
|
|
400
|
+
const cloned = cloneJsonValue(value);
|
|
401
|
+
if (cloned && typeof cloned === "object" && !Array.isArray(cloned)) return cloned;
|
|
402
|
+
return {};
|
|
403
|
+
}
|
|
404
|
+
function asJsonObject(value) {
|
|
405
|
+
const cloned = cloneJsonValue(value);
|
|
406
|
+
if (cloned && typeof cloned === "object" && !Array.isArray(cloned)) return cloned;
|
|
407
|
+
}
|
|
408
|
+
function isJsonObjectValue(value) {
|
|
409
|
+
return Boolean(value && typeof value === "object" && !Array.isArray(value));
|
|
410
|
+
}
|
|
411
|
+
function stableStringify(value) {
|
|
412
|
+
if (value === null) return "null";
|
|
413
|
+
if (typeof value === "string") return JSON.stringify(value);
|
|
414
|
+
if (typeof value === "number" || typeof value === "boolean") return JSON.stringify(value);
|
|
415
|
+
if (Array.isArray(value)) return `[${value.map(stableStringify).join(",")}]`;
|
|
416
|
+
if (!isJsonObjectValue(value)) return "null";
|
|
417
|
+
return `{${Object.keys(value).sort().map((key) => `${JSON.stringify(key)}:${stableStringify(value[key])}`).join(",")}}`;
|
|
418
|
+
}
|
|
419
|
+
function jsonEquals(a, b) {
|
|
420
|
+
if (a === void 0 && b === void 0) return true;
|
|
421
|
+
if (a === void 0 || b === void 0) return false;
|
|
422
|
+
return stableStringify(a) === stableStringify(b);
|
|
423
|
+
}
|
|
424
|
+
function diffJsonObjects(previous, next) {
|
|
425
|
+
const patch = {};
|
|
426
|
+
const keys = /* @__PURE__ */ new Set();
|
|
427
|
+
if (previous) for (const key of Object.keys(previous)) keys.add(key);
|
|
428
|
+
for (const key of Object.keys(next)) keys.add(key);
|
|
429
|
+
for (const key of keys) {
|
|
430
|
+
const prevValue = previous ? previous[key] : void 0;
|
|
431
|
+
const nextValue = next[key];
|
|
432
|
+
if (!jsonEquals(prevValue, nextValue)) {
|
|
433
|
+
if (nextValue === void 0 && previous && key in previous) {
|
|
434
|
+
patch[key] = null;
|
|
435
|
+
continue;
|
|
436
|
+
}
|
|
437
|
+
const cloned = cloneJsonValue(nextValue);
|
|
438
|
+
if (cloned !== void 0) patch[key] = cloned;
|
|
439
|
+
}
|
|
440
|
+
}
|
|
441
|
+
return patch;
|
|
442
|
+
}
|
|
443
|
+
function assetMetaToJson(meta) {
|
|
444
|
+
const json = {
|
|
445
|
+
assetId: meta.assetId,
|
|
446
|
+
size: meta.size,
|
|
447
|
+
createdAt: meta.createdAt
|
|
448
|
+
};
|
|
449
|
+
if (meta.mime !== void 0) json.mime = meta.mime;
|
|
450
|
+
if (meta.policy !== void 0) json.policy = meta.policy;
|
|
451
|
+
if (meta.tag !== void 0) json.tag = meta.tag;
|
|
452
|
+
return json;
|
|
453
|
+
}
|
|
454
|
+
function assetMetaFromJson(value) {
|
|
455
|
+
const obj = asJsonObject(value);
|
|
456
|
+
if (!obj) return void 0;
|
|
457
|
+
const assetId = typeof obj.assetId === "string" ? obj.assetId : void 0;
|
|
458
|
+
if (!assetId) return void 0;
|
|
459
|
+
const size = typeof obj.size === "number" ? obj.size : void 0;
|
|
460
|
+
const createdAt = typeof obj.createdAt === "number" ? obj.createdAt : void 0;
|
|
461
|
+
if (size === void 0 || createdAt === void 0) return void 0;
|
|
462
|
+
return {
|
|
463
|
+
assetId,
|
|
464
|
+
size,
|
|
465
|
+
createdAt,
|
|
466
|
+
...typeof obj.mime === "string" ? { mime: obj.mime } : {},
|
|
467
|
+
...typeof obj.policy === "string" ? { policy: obj.policy } : {},
|
|
468
|
+
...typeof obj.tag === "string" ? { tag: obj.tag } : {}
|
|
469
|
+
};
|
|
470
|
+
}
|
|
471
|
+
function assetMetadataEqual(a, b) {
|
|
472
|
+
if (!a && !b) return true;
|
|
473
|
+
if (!a || !b) return false;
|
|
474
|
+
return stableStringify(assetMetaToJson(a)) === stableStringify(assetMetaToJson(b));
|
|
475
|
+
}
|
|
476
|
+
function cloneRepoAssetMetadata(meta) {
|
|
477
|
+
return {
|
|
478
|
+
assetId: meta.assetId,
|
|
479
|
+
size: meta.size,
|
|
480
|
+
createdAt: meta.createdAt,
|
|
481
|
+
...meta.mime !== void 0 ? { mime: meta.mime } : {},
|
|
482
|
+
...meta.policy !== void 0 ? { policy: meta.policy } : {},
|
|
483
|
+
...meta.tag !== void 0 ? { tag: meta.tag } : {}
|
|
484
|
+
};
|
|
485
|
+
}
|
|
486
|
+
function toReadableStream(bytes) {
|
|
487
|
+
return new ReadableStream({ start(controller) {
|
|
488
|
+
controller.enqueue(bytes);
|
|
489
|
+
controller.close();
|
|
490
|
+
} });
|
|
491
|
+
}
|
|
492
|
+
function matchesQuery(docId, _metadata, query) {
|
|
493
|
+
if (!query) return true;
|
|
494
|
+
if (query.prefix && !docId.startsWith(query.prefix)) return false;
|
|
495
|
+
if (query.start && docId < query.start) return false;
|
|
496
|
+
if (query.end && docId > query.end) return false;
|
|
497
|
+
return true;
|
|
498
|
+
}
|
|
499
|
+
|
|
1662
500
|
//#endregion
|
|
1663
501
|
//#region src/internal/metadata-manager.ts
|
|
1664
502
|
var MetadataManager = class {
|
|
@@ -2456,13 +1294,11 @@ var FlockHydrator = class {
|
|
|
2456
1294
|
const nextMetadata = this.readAllDocMetadata();
|
|
2457
1295
|
this.metadataManager.replaceAll(nextMetadata, by);
|
|
2458
1296
|
this.assetManager.hydrateFromFlock(by);
|
|
2459
|
-
this.docManager.hydrateFrontierKeys();
|
|
2460
1297
|
}
|
|
2461
1298
|
applyEvents(events, by) {
|
|
2462
1299
|
if (!events.length) return;
|
|
2463
1300
|
const docMetadataIds = /* @__PURE__ */ new Set();
|
|
2464
1301
|
const docAssetIds = /* @__PURE__ */ new Set();
|
|
2465
|
-
const docFrontiersIds = /* @__PURE__ */ new Set();
|
|
2466
1302
|
const assetIds = /* @__PURE__ */ new Set();
|
|
2467
1303
|
for (const event of events) {
|
|
2468
1304
|
const key = event.key;
|
|
@@ -2479,15 +1315,11 @@ var FlockHydrator = class {
|
|
|
2479
1315
|
const assetId = key[2];
|
|
2480
1316
|
if (typeof docId === "string") docAssetIds.add(docId);
|
|
2481
1317
|
if (typeof assetId === "string") assetIds.add(assetId);
|
|
2482
|
-
} else if (root === "f") {
|
|
2483
|
-
const docId = key[1];
|
|
2484
|
-
if (typeof docId === "string") docFrontiersIds.add(docId);
|
|
2485
1318
|
}
|
|
2486
1319
|
}
|
|
2487
1320
|
for (const assetId of assetIds) this.assetManager.refreshAssetMetadataEntry(assetId, by);
|
|
2488
1321
|
for (const docId of docMetadataIds) this.metadataManager.refreshFromFlock(docId, by);
|
|
2489
1322
|
for (const docId of docAssetIds) this.assetManager.refreshDocAssetsEntry(docId, by);
|
|
2490
|
-
for (const docId of docFrontiersIds) this.docManager.refreshDocFrontierKeys(docId);
|
|
2491
1323
|
}
|
|
2492
1324
|
readAllDocMetadata() {
|
|
2493
1325
|
const nextMetadata = /* @__PURE__ */ new Map();
|
|
@@ -2698,8 +1530,7 @@ function createRepoState() {
|
|
|
2698
1530
|
docAssets: /* @__PURE__ */ new Map(),
|
|
2699
1531
|
assets: /* @__PURE__ */ new Map(),
|
|
2700
1532
|
orphanedAssets: /* @__PURE__ */ new Map(),
|
|
2701
|
-
assetToDocRefs: /* @__PURE__ */ new Map()
|
|
2702
|
-
docFrontierKeys: /* @__PURE__ */ new Map()
|
|
1533
|
+
assetToDocRefs: /* @__PURE__ */ new Map()
|
|
2703
1534
|
};
|
|
2704
1535
|
}
|
|
2705
1536
|
|
|
@@ -2735,8 +1566,7 @@ var LoroRepo = class LoroRepo {
|
|
|
2735
1566
|
docFrontierDebounceMs,
|
|
2736
1567
|
getMetaFlock: () => this.metaFlock,
|
|
2737
1568
|
eventBus: this.eventBus,
|
|
2738
|
-
persistMeta: () => this.persistMeta()
|
|
2739
|
-
state: this.state
|
|
1569
|
+
persistMeta: () => this.persistMeta()
|
|
2740
1570
|
});
|
|
2741
1571
|
this.metadataManager = new MetadataManager({
|
|
2742
1572
|
getMetaFlock: () => this.metaFlock,
|
|
@@ -2825,7 +1655,7 @@ var LoroRepo = class LoroRepo {
|
|
|
2825
1655
|
*/
|
|
2826
1656
|
async openPersistedDoc(docId) {
|
|
2827
1657
|
return {
|
|
2828
|
-
doc: await this.docManager.
|
|
1658
|
+
doc: await this.docManager.openPersistedDoc(docId),
|
|
2829
1659
|
syncOnce: () => {
|
|
2830
1660
|
return this.sync({
|
|
2831
1661
|
scope: "doc",
|
|
@@ -2919,9 +1749,5 @@ var LoroRepo = class LoroRepo {
|
|
|
2919
1749
|
};
|
|
2920
1750
|
|
|
2921
1751
|
//#endregion
|
|
2922
|
-
exports.BroadcastChannelTransportAdapter = BroadcastChannelTransportAdapter;
|
|
2923
|
-
exports.FileSystemStorageAdaptor = FileSystemStorageAdaptor;
|
|
2924
|
-
exports.IndexedDBStorageAdaptor = IndexedDBStorageAdaptor;
|
|
2925
1752
|
exports.LoroRepo = LoroRepo;
|
|
2926
|
-
exports.WebSocketTransportAdapter = WebSocketTransportAdapter;
|
|
2927
1753
|
//# sourceMappingURL=index.cjs.map
|