loro-repo 0.5.0 → 0.5.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +9 -9
- package/dist/chunk.cjs +30 -0
- package/dist/index.cjs +10 -1130
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +3 -608
- package/dist/index.d.ts +3 -608
- package/dist/index.js +13 -1100
- package/dist/index.js.map +1 -1
- package/dist/storage/filesystem.cjs +164 -0
- package/dist/storage/filesystem.cjs.map +1 -0
- package/dist/storage/filesystem.d.cts +49 -0
- package/dist/storage/filesystem.d.ts +49 -0
- package/dist/storage/filesystem.js +158 -0
- package/dist/storage/filesystem.js.map +1 -0
- package/dist/storage/indexeddb.cjs +261 -0
- package/dist/storage/indexeddb.cjs.map +1 -0
- package/dist/storage/indexeddb.d.cts +54 -0
- package/dist/storage/indexeddb.d.ts +54 -0
- package/dist/storage/indexeddb.js +258 -0
- package/dist/storage/indexeddb.js.map +1 -0
- package/dist/transport/broadcast-channel.cjs +252 -0
- package/dist/transport/broadcast-channel.cjs.map +1 -0
- package/dist/transport/broadcast-channel.d.cts +45 -0
- package/dist/transport/broadcast-channel.d.ts +45 -0
- package/dist/transport/broadcast-channel.js +251 -0
- package/dist/transport/broadcast-channel.js.map +1 -0
- package/dist/transport/websocket.cjs +435 -0
- package/dist/transport/websocket.cjs.map +1 -0
- package/dist/transport/websocket.d.cts +69 -0
- package/dist/transport/websocket.d.ts +69 -0
- package/dist/transport/websocket.js +430 -0
- package/dist/transport/websocket.js.map +1 -0
- package/dist/types.d.cts +419 -0
- package/dist/types.d.ts +419 -0
- package/package.json +27 -2
|
@@ -0,0 +1,261 @@
|
|
|
1
|
+
const require_chunk = require('../chunk.cjs');
|
|
2
|
+
let __loro_dev_flock = require("@loro-dev/flock");
|
|
3
|
+
__loro_dev_flock = require_chunk.__toESM(__loro_dev_flock);
|
|
4
|
+
let loro_crdt = require("loro-crdt");
|
|
5
|
+
loro_crdt = require_chunk.__toESM(loro_crdt);
|
|
6
|
+
|
|
7
|
+
//#region src/storage/indexeddb.ts
|
|
8
|
+
const DEFAULT_DB_NAME = "loro-repo";
|
|
9
|
+
const DEFAULT_DB_VERSION = 1;
|
|
10
|
+
const DEFAULT_DOC_STORE = "docs";
|
|
11
|
+
const DEFAULT_META_STORE = "meta";
|
|
12
|
+
const DEFAULT_ASSET_STORE = "assets";
|
|
13
|
+
const DEFAULT_DOC_UPDATE_STORE = "doc-updates";
|
|
14
|
+
const DEFAULT_META_KEY = "snapshot";
|
|
15
|
+
const textDecoder = new TextDecoder();
|
|
16
|
+
function describeUnknown(cause) {
|
|
17
|
+
if (typeof cause === "string") return cause;
|
|
18
|
+
if (typeof cause === "number" || typeof cause === "boolean") return String(cause);
|
|
19
|
+
if (typeof cause === "bigint") return cause.toString();
|
|
20
|
+
if (typeof cause === "symbol") return cause.description ?? cause.toString();
|
|
21
|
+
if (typeof cause === "function") return `[function ${cause.name ?? "anonymous"}]`;
|
|
22
|
+
if (cause && typeof cause === "object") try {
|
|
23
|
+
return JSON.stringify(cause);
|
|
24
|
+
} catch {
|
|
25
|
+
return "[object]";
|
|
26
|
+
}
|
|
27
|
+
return String(cause);
|
|
28
|
+
}
|
|
29
|
+
var IndexedDBStorageAdaptor = class {
|
|
30
|
+
idb;
|
|
31
|
+
dbName;
|
|
32
|
+
version;
|
|
33
|
+
docStore;
|
|
34
|
+
docUpdateStore;
|
|
35
|
+
metaStore;
|
|
36
|
+
assetStore;
|
|
37
|
+
metaKey;
|
|
38
|
+
dbPromise;
|
|
39
|
+
closed = false;
|
|
40
|
+
constructor(options = {}) {
|
|
41
|
+
const idbFactory = globalThis.indexedDB;
|
|
42
|
+
if (!idbFactory) throw new Error("IndexedDB is not available in this environment");
|
|
43
|
+
this.idb = idbFactory;
|
|
44
|
+
this.dbName = options.dbName ?? DEFAULT_DB_NAME;
|
|
45
|
+
this.version = options.version ?? DEFAULT_DB_VERSION;
|
|
46
|
+
this.docStore = options.docStoreName ?? DEFAULT_DOC_STORE;
|
|
47
|
+
this.docUpdateStore = options.docUpdateStoreName ?? DEFAULT_DOC_UPDATE_STORE;
|
|
48
|
+
this.metaStore = options.metaStoreName ?? DEFAULT_META_STORE;
|
|
49
|
+
this.assetStore = options.assetStoreName ?? DEFAULT_ASSET_STORE;
|
|
50
|
+
this.metaKey = options.metaKey ?? DEFAULT_META_KEY;
|
|
51
|
+
}
|
|
52
|
+
async save(payload) {
|
|
53
|
+
const db = await this.ensureDb();
|
|
54
|
+
switch (payload.type) {
|
|
55
|
+
case "doc-snapshot": {
|
|
56
|
+
const snapshot = payload.snapshot.slice();
|
|
57
|
+
await this.storeMergedSnapshot(db, payload.docId, snapshot);
|
|
58
|
+
break;
|
|
59
|
+
}
|
|
60
|
+
case "doc-update": {
|
|
61
|
+
const update = payload.update.slice();
|
|
62
|
+
await this.appendDocUpdate(db, payload.docId, update);
|
|
63
|
+
break;
|
|
64
|
+
}
|
|
65
|
+
case "asset": {
|
|
66
|
+
const bytes = payload.data.slice();
|
|
67
|
+
await this.putBinary(db, this.assetStore, payload.assetId, bytes);
|
|
68
|
+
break;
|
|
69
|
+
}
|
|
70
|
+
case "meta": {
|
|
71
|
+
const bytes = payload.update.slice();
|
|
72
|
+
await this.putBinary(db, this.metaStore, this.metaKey, bytes);
|
|
73
|
+
break;
|
|
74
|
+
}
|
|
75
|
+
default: throw new Error("Unsupported storage payload type");
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
async deleteAsset(assetId) {
|
|
79
|
+
const db = await this.ensureDb();
|
|
80
|
+
await this.deleteKey(db, this.assetStore, assetId);
|
|
81
|
+
}
|
|
82
|
+
async loadDoc(docId) {
|
|
83
|
+
const db = await this.ensureDb();
|
|
84
|
+
const snapshot = await this.getBinaryFromDb(db, this.docStore, docId);
|
|
85
|
+
const pendingUpdates = await this.getDocUpdates(db, docId);
|
|
86
|
+
if (!snapshot && pendingUpdates.length === 0) return;
|
|
87
|
+
let doc;
|
|
88
|
+
try {
|
|
89
|
+
doc = snapshot ? loro_crdt.LoroDoc.fromSnapshot(snapshot) : new loro_crdt.LoroDoc();
|
|
90
|
+
} catch (error) {
|
|
91
|
+
throw this.createError(`Failed to hydrate document snapshot for "${docId}"`, error);
|
|
92
|
+
}
|
|
93
|
+
let appliedUpdates = false;
|
|
94
|
+
for (const update of pendingUpdates) try {
|
|
95
|
+
doc.import(update);
|
|
96
|
+
appliedUpdates = true;
|
|
97
|
+
} catch (error) {
|
|
98
|
+
throw this.createError(`Failed to apply queued document update for "${docId}"`, error);
|
|
99
|
+
}
|
|
100
|
+
if (appliedUpdates) {
|
|
101
|
+
let consolidated;
|
|
102
|
+
try {
|
|
103
|
+
consolidated = doc.export({ mode: "snapshot" });
|
|
104
|
+
} catch (error) {
|
|
105
|
+
throw this.createError(`Failed to export consolidated snapshot for "${docId}"`, error);
|
|
106
|
+
}
|
|
107
|
+
await this.writeSnapshot(db, docId, consolidated);
|
|
108
|
+
await this.clearDocUpdates(db, docId);
|
|
109
|
+
}
|
|
110
|
+
return doc;
|
|
111
|
+
}
|
|
112
|
+
async loadMeta() {
|
|
113
|
+
const bytes = await this.getBinary(this.metaStore, this.metaKey);
|
|
114
|
+
if (!bytes) return void 0;
|
|
115
|
+
try {
|
|
116
|
+
const json = textDecoder.decode(bytes);
|
|
117
|
+
const bundle = JSON.parse(json);
|
|
118
|
+
const flock = new __loro_dev_flock.Flock();
|
|
119
|
+
flock.importJson(bundle);
|
|
120
|
+
return flock;
|
|
121
|
+
} catch (error) {
|
|
122
|
+
throw this.createError("Failed to hydrate metadata snapshot", error);
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
async loadAsset(assetId) {
|
|
126
|
+
return await this.getBinary(this.assetStore, assetId) ?? void 0;
|
|
127
|
+
}
|
|
128
|
+
async close() {
|
|
129
|
+
this.closed = true;
|
|
130
|
+
const db = await this.dbPromise;
|
|
131
|
+
if (db) db.close();
|
|
132
|
+
this.dbPromise = void 0;
|
|
133
|
+
}
|
|
134
|
+
async ensureDb() {
|
|
135
|
+
if (this.closed) throw new Error("IndexedDBStorageAdaptor has been closed");
|
|
136
|
+
if (!this.dbPromise) this.dbPromise = new Promise((resolve, reject) => {
|
|
137
|
+
const request = this.idb.open(this.dbName, this.version);
|
|
138
|
+
request.addEventListener("upgradeneeded", () => {
|
|
139
|
+
const db = request.result;
|
|
140
|
+
this.ensureStore(db, this.docStore);
|
|
141
|
+
this.ensureStore(db, this.docUpdateStore);
|
|
142
|
+
this.ensureStore(db, this.metaStore);
|
|
143
|
+
this.ensureStore(db, this.assetStore);
|
|
144
|
+
});
|
|
145
|
+
request.addEventListener("success", () => resolve(request.result), { once: true });
|
|
146
|
+
request.addEventListener("error", () => {
|
|
147
|
+
reject(this.createError(`Failed to open IndexedDB database "${this.dbName}"`, request.error));
|
|
148
|
+
}, { once: true });
|
|
149
|
+
});
|
|
150
|
+
return this.dbPromise;
|
|
151
|
+
}
|
|
152
|
+
ensureStore(db, storeName) {
|
|
153
|
+
const names = db.objectStoreNames;
|
|
154
|
+
if (this.storeExists(names, storeName)) return;
|
|
155
|
+
db.createObjectStore(storeName);
|
|
156
|
+
}
|
|
157
|
+
storeExists(names, storeName) {
|
|
158
|
+
if (typeof names.contains === "function") return names.contains(storeName);
|
|
159
|
+
const length = names.length ?? 0;
|
|
160
|
+
for (let index = 0; index < length; index += 1) if (names.item?.(index) === storeName) return true;
|
|
161
|
+
return false;
|
|
162
|
+
}
|
|
163
|
+
async storeMergedSnapshot(db, docId, incoming) {
|
|
164
|
+
await this.runInTransaction(db, this.docStore, "readwrite", async (store) => {
|
|
165
|
+
const existingRaw = await this.wrapRequest(store.get(docId), "read");
|
|
166
|
+
const existing = await this.normalizeBinary(existingRaw);
|
|
167
|
+
const merged = this.mergeSnapshots(docId, existing, incoming);
|
|
168
|
+
await this.wrapRequest(store.put(merged, docId), "write");
|
|
169
|
+
});
|
|
170
|
+
}
|
|
171
|
+
mergeSnapshots(docId, existing, incoming) {
|
|
172
|
+
try {
|
|
173
|
+
const doc = existing ? loro_crdt.LoroDoc.fromSnapshot(existing) : new loro_crdt.LoroDoc();
|
|
174
|
+
doc.import(incoming);
|
|
175
|
+
return doc.export({ mode: "snapshot" });
|
|
176
|
+
} catch (error) {
|
|
177
|
+
throw this.createError(`Failed to merge snapshot for "${docId}"`, error);
|
|
178
|
+
}
|
|
179
|
+
}
|
|
180
|
+
async appendDocUpdate(db, docId, update) {
|
|
181
|
+
await this.runInTransaction(db, this.docUpdateStore, "readwrite", async (store) => {
|
|
182
|
+
const raw = await this.wrapRequest(store.get(docId), "read");
|
|
183
|
+
const queue = await this.normalizeUpdateQueue(raw);
|
|
184
|
+
queue.push(update.slice());
|
|
185
|
+
await this.wrapRequest(store.put({ updates: queue }, docId), "write");
|
|
186
|
+
});
|
|
187
|
+
}
|
|
188
|
+
async getDocUpdates(db, docId) {
|
|
189
|
+
const raw = await this.runInTransaction(db, this.docUpdateStore, "readonly", (store) => this.wrapRequest(store.get(docId), "read"));
|
|
190
|
+
return this.normalizeUpdateQueue(raw);
|
|
191
|
+
}
|
|
192
|
+
async clearDocUpdates(db, docId) {
|
|
193
|
+
await this.runInTransaction(db, this.docUpdateStore, "readwrite", (store) => this.wrapRequest(store.delete(docId), "delete"));
|
|
194
|
+
}
|
|
195
|
+
async writeSnapshot(db, docId, snapshot) {
|
|
196
|
+
await this.putBinary(db, this.docStore, docId, snapshot.slice());
|
|
197
|
+
}
|
|
198
|
+
async getBinaryFromDb(db, storeName, key) {
|
|
199
|
+
const value = await this.runInTransaction(db, storeName, "readonly", (store) => this.wrapRequest(store.get(key), "read"));
|
|
200
|
+
return this.normalizeBinary(value);
|
|
201
|
+
}
|
|
202
|
+
async normalizeUpdateQueue(value) {
|
|
203
|
+
if (value == null) return [];
|
|
204
|
+
const list = Array.isArray(value) ? value : typeof value === "object" && value !== null ? value.updates : void 0;
|
|
205
|
+
if (!Array.isArray(list)) return [];
|
|
206
|
+
const queue = [];
|
|
207
|
+
for (const entry of list) {
|
|
208
|
+
const bytes = await this.normalizeBinary(entry);
|
|
209
|
+
if (bytes) queue.push(bytes);
|
|
210
|
+
}
|
|
211
|
+
return queue;
|
|
212
|
+
}
|
|
213
|
+
async putBinary(db, storeName, key, value) {
|
|
214
|
+
await this.runInTransaction(db, storeName, "readwrite", (store) => this.wrapRequest(store.put(value, key), "write"));
|
|
215
|
+
}
|
|
216
|
+
async deleteKey(db, storeName, key) {
|
|
217
|
+
await this.runInTransaction(db, storeName, "readwrite", (store) => this.wrapRequest(store.delete(key), "delete"));
|
|
218
|
+
}
|
|
219
|
+
async getBinary(storeName, key) {
|
|
220
|
+
const db = await this.ensureDb();
|
|
221
|
+
return this.getBinaryFromDb(db, storeName, key);
|
|
222
|
+
}
|
|
223
|
+
runInTransaction(db, storeName, mode, executor) {
|
|
224
|
+
const tx = db.transaction(storeName, mode);
|
|
225
|
+
const store = tx.objectStore(storeName);
|
|
226
|
+
const completion = new Promise((resolve, reject) => {
|
|
227
|
+
tx.addEventListener("complete", () => resolve(), { once: true });
|
|
228
|
+
tx.addEventListener("abort", () => reject(this.createError("IndexedDB transaction aborted", tx.error)), { once: true });
|
|
229
|
+
tx.addEventListener("error", () => reject(this.createError("IndexedDB transaction failed", tx.error)), { once: true });
|
|
230
|
+
});
|
|
231
|
+
return Promise.all([executor(store), completion]).then(([result]) => result);
|
|
232
|
+
}
|
|
233
|
+
wrapRequest(request, action) {
|
|
234
|
+
return new Promise((resolve, reject) => {
|
|
235
|
+
request.addEventListener("success", () => resolve(request.result), { once: true });
|
|
236
|
+
request.addEventListener("error", () => reject(this.createError(`IndexedDB request failed during ${action}`, request.error)), { once: true });
|
|
237
|
+
});
|
|
238
|
+
}
|
|
239
|
+
async normalizeBinary(value) {
|
|
240
|
+
if (value == null) return void 0;
|
|
241
|
+
if (value instanceof Uint8Array) return value.slice();
|
|
242
|
+
if (ArrayBuffer.isView(value)) return new Uint8Array(value.buffer, value.byteOffset, value.byteLength).slice();
|
|
243
|
+
if (value instanceof ArrayBuffer) return new Uint8Array(value.slice(0));
|
|
244
|
+
if (typeof value === "object" && value !== null && "arrayBuffer" in value) {
|
|
245
|
+
const candidate = value;
|
|
246
|
+
if (typeof candidate.arrayBuffer === "function") {
|
|
247
|
+
const buffer = await candidate.arrayBuffer();
|
|
248
|
+
return new Uint8Array(buffer);
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
}
|
|
252
|
+
createError(message, cause) {
|
|
253
|
+
if (cause instanceof Error) return new Error(`${message}: ${cause.message}`, { cause });
|
|
254
|
+
if (cause !== void 0 && cause !== null) return /* @__PURE__ */ new Error(`${message}: ${describeUnknown(cause)}`);
|
|
255
|
+
return new Error(message);
|
|
256
|
+
}
|
|
257
|
+
};
|
|
258
|
+
|
|
259
|
+
//#endregion
|
|
260
|
+
exports.IndexedDBStorageAdaptor = IndexedDBStorageAdaptor;
|
|
261
|
+
//# sourceMappingURL=indexeddb.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"indexeddb.cjs","names":["doc: LoroDoc","LoroDoc","consolidated: Uint8Array","Flock","queue: Uint8Array[]"],"sources":["../../src/storage/indexeddb.ts"],"sourcesContent":["import { Flock } from \"@loro-dev/flock\";\nimport { LoroDoc } from \"loro-crdt\";\n\nimport type {\n AssetId,\n StorageAdapter,\n StorageSavePayload,\n} from \"../types\";\nimport type { ExportBundle } from \"@loro-dev/flock\";\n\nconst DEFAULT_DB_NAME = \"loro-repo\";\nconst DEFAULT_DB_VERSION = 1;\nconst DEFAULT_DOC_STORE = \"docs\";\nconst DEFAULT_META_STORE = \"meta\";\nconst DEFAULT_ASSET_STORE = \"assets\";\nconst DEFAULT_DOC_UPDATE_STORE = \"doc-updates\";\nconst DEFAULT_META_KEY = \"snapshot\";\n\ntype EventListenerOptions = {\n once?: boolean;\n};\n\ntype IDBFactory = {\n open(name: string, version?: number): IDBOpenDBRequest;\n};\n\ntype IDBOpenDBRequest = {\n result: IDBDatabase;\n error: unknown;\n onupgradeneeded: ((event: unknown) => void) | null;\n onsuccess: ((event: unknown) => void) | null;\n onerror: ((event: unknown) => void) | null;\n addEventListener(\n type: string,\n listener: (event: unknown) => void,\n options?: EventListenerOptions,\n ): void;\n};\n\ntype ObjectStoreNames = {\n contains?(name: string): boolean;\n length?: number;\n item?(index: number): string | null;\n};\n\ntype IDBDatabase = {\n close(): void;\n createObjectStore(name: string): IDBObjectStore;\n transaction(\n storeName: string,\n mode?: IDBTransactionMode,\n ): IDBTransaction;\n objectStoreNames: ObjectStoreNames;\n};\n\ntype IDBTransactionMode = \"readonly\" | \"readwrite\";\n\ntype IDBTransaction = {\n objectStore(name: string): IDBObjectStore;\n oncomplete: ((event: unknown) => void) | null;\n onerror: ((event: unknown) => void) | null;\n onabort: ((event: unknown) => void) | null;\n error: unknown;\n addEventListener(\n type: string,\n listener: (event: unknown) => void,\n options?: EventListenerOptions,\n ): void;\n};\n\ntype IDBObjectStore = {\n put(value: unknown, key?: unknown): IDBRequest<unknown>;\n get(key: unknown): IDBRequest<unknown>;\n delete(key: unknown): IDBRequest<unknown>;\n};\n\ntype IDBRequest<T> = {\n onsuccess: ((event: unknown) => void) | null;\n onerror: ((event: unknown) => void) | null;\n result: T;\n error: unknown;\n addEventListener(\n type: string,\n listener: (event: unknown) => void,\n options?: EventListenerOptions,\n ): void;\n};\n\nconst textDecoder = new TextDecoder();\n\nfunction describeUnknown(cause: unknown): string {\n if (typeof cause === \"string\") return cause;\n if (typeof cause === \"number\" || typeof cause === \"boolean\") {\n return String(cause);\n }\n if (typeof cause === \"bigint\") {\n return cause.toString();\n }\n if (typeof cause === \"symbol\") {\n return cause.description ?? cause.toString();\n }\n if (typeof cause === \"function\") {\n return `[function ${cause.name ?? \"anonymous\"}]`;\n }\n if (cause && typeof cause === \"object\") {\n try {\n return JSON.stringify(cause);\n } catch {\n return \"[object]\";\n }\n }\n return String(cause);\n}\n\nexport interface IndexedDBStorageAdaptorOptions {\n readonly dbName?: string;\n readonly version?: number;\n readonly docStoreName?: string;\n readonly docUpdateStoreName?: string;\n readonly metaStoreName?: string;\n readonly assetStoreName?: string;\n readonly metaKey?: string;\n}\n\nexport class IndexedDBStorageAdaptor implements StorageAdapter {\n private readonly idb: IDBFactory;\n private readonly dbName: string;\n private readonly version: number;\n private readonly docStore: string;\n private readonly docUpdateStore: string;\n private readonly metaStore: string;\n private readonly assetStore: string;\n private readonly metaKey: string;\n private dbPromise?: Promise<IDBDatabase>;\n private closed = false;\n\n constructor(options: IndexedDBStorageAdaptorOptions = {}) {\n const idbFactory = (globalThis as { indexedDB?: IDBFactory }).indexedDB;\n if (!idbFactory) {\n throw new Error(\"IndexedDB is not available in this environment\");\n }\n this.idb = idbFactory;\n this.dbName = options.dbName ?? DEFAULT_DB_NAME;\n this.version = options.version ?? DEFAULT_DB_VERSION;\n this.docStore = options.docStoreName ?? DEFAULT_DOC_STORE;\n this.docUpdateStore = options.docUpdateStoreName ?? DEFAULT_DOC_UPDATE_STORE;\n this.metaStore = options.metaStoreName ?? DEFAULT_META_STORE;\n this.assetStore = options.assetStoreName ?? DEFAULT_ASSET_STORE;\n this.metaKey = options.metaKey ?? DEFAULT_META_KEY;\n }\n\n async save(payload: StorageSavePayload): Promise<void> {\n const db = await this.ensureDb();\n switch (payload.type) {\n case \"doc-snapshot\": {\n const snapshot = payload.snapshot.slice();\n await this.storeMergedSnapshot(db, payload.docId, snapshot);\n break;\n }\n case \"doc-update\": {\n const update = payload.update.slice();\n await this.appendDocUpdate(db, payload.docId, update);\n break;\n }\n case \"asset\": {\n const bytes = payload.data.slice();\n await this.putBinary(db, this.assetStore, payload.assetId, bytes);\n break;\n }\n case \"meta\": {\n const bytes = payload.update.slice();\n await this.putBinary(db, this.metaStore, this.metaKey, bytes);\n break;\n }\n default:\n throw new Error(\"Unsupported storage payload type\");\n }\n }\n\n async deleteAsset(assetId: AssetId): Promise<void> {\n const db = await this.ensureDb();\n await this.deleteKey(db, this.assetStore, assetId);\n }\n\n async loadDoc(docId: string): Promise<LoroDoc | undefined> {\n const db = await this.ensureDb();\n const snapshot = await this.getBinaryFromDb(db, this.docStore, docId);\n const pendingUpdates = await this.getDocUpdates(db, docId);\n\n if (!snapshot && pendingUpdates.length === 0) {\n return undefined;\n }\n\n let doc: LoroDoc;\n try {\n doc = snapshot ? LoroDoc.fromSnapshot(snapshot) : new LoroDoc();\n } catch (error) {\n throw this.createError(\n `Failed to hydrate document snapshot for \"${docId}\"`,\n error,\n );\n }\n\n let appliedUpdates = false;\n for (const update of pendingUpdates) {\n try {\n doc.import(update);\n appliedUpdates = true;\n } catch (error) {\n throw this.createError(\n `Failed to apply queued document update for \"${docId}\"`,\n error,\n );\n }\n }\n\n if (appliedUpdates) {\n let consolidated: Uint8Array;\n try {\n consolidated = doc.export({ mode: \"snapshot\" });\n } catch (error) {\n throw this.createError(\n `Failed to export consolidated snapshot for \"${docId}\"`,\n error,\n );\n }\n await this.writeSnapshot(db, docId, consolidated);\n await this.clearDocUpdates(db, docId);\n }\n\n return doc;\n }\n\n async loadMeta(): Promise<Flock | undefined> {\n const bytes = await this.getBinary(this.metaStore, this.metaKey);\n if (!bytes) return undefined;\n try {\n const json = textDecoder.decode(bytes);\n const bundle = JSON.parse(json) as ExportBundle;\n const flock = new Flock();\n flock.importJson(bundle);\n return flock;\n } catch (error) {\n throw this.createError(\"Failed to hydrate metadata snapshot\", error);\n }\n }\n\n async loadAsset(assetId: AssetId): Promise<Uint8Array | undefined> {\n const bytes = await this.getBinary(this.assetStore, assetId);\n return bytes ?? undefined;\n }\n\n async close(): Promise<void> {\n this.closed = true;\n const db = await this.dbPromise;\n if (db) {\n db.close();\n }\n this.dbPromise = undefined;\n }\n\n private async ensureDb(): Promise<IDBDatabase> {\n if (this.closed) {\n throw new Error(\"IndexedDBStorageAdaptor has been closed\");\n }\n if (!this.dbPromise) {\n this.dbPromise = new Promise((resolve, reject) => {\n const request = this.idb.open(this.dbName, this.version);\n request.addEventListener(\"upgradeneeded\", () => {\n const db = request.result;\n this.ensureStore(db, this.docStore);\n this.ensureStore(db, this.docUpdateStore);\n this.ensureStore(db, this.metaStore);\n this.ensureStore(db, this.assetStore);\n });\n request.addEventListener(\n \"success\",\n () => resolve(request.result),\n { once: true },\n );\n request.addEventListener(\n \"error\",\n () => {\n reject(\n this.createError(\n `Failed to open IndexedDB database \"${this.dbName}\"`,\n request.error,\n ),\n );\n },\n { once: true },\n );\n });\n }\n return this.dbPromise;\n }\n\n private ensureStore(db: IDBDatabase, storeName: string): void {\n const names = db.objectStoreNames;\n if (this.storeExists(names, storeName)) return;\n db.createObjectStore(storeName);\n }\n\n private storeExists(names: ObjectStoreNames, storeName: string): boolean {\n if (typeof names.contains === \"function\") {\n return names.contains(storeName);\n }\n const length = names.length ?? 0;\n for (let index = 0; index < length; index += 1) {\n const value = names.item?.(index);\n if (value === storeName) return true;\n }\n return false;\n }\n\n private async storeMergedSnapshot(\n db: IDBDatabase,\n docId: string,\n incoming: Uint8Array,\n ): Promise<void> {\n await this.runInTransaction(db, this.docStore, \"readwrite\", async (store) => {\n const existingRaw = await this.wrapRequest(store.get(docId), \"read\");\n const existing = await this.normalizeBinary(existingRaw);\n const merged = this.mergeSnapshots(docId, existing, incoming);\n await this.wrapRequest(store.put(merged, docId), \"write\");\n });\n }\n\n private mergeSnapshots(\n docId: string,\n existing: Uint8Array | undefined,\n incoming: Uint8Array,\n ): Uint8Array {\n try {\n const doc = existing ? LoroDoc.fromSnapshot(existing) : new LoroDoc();\n doc.import(incoming);\n return doc.export({ mode: \"snapshot\" });\n } catch (error) {\n throw this.createError(`Failed to merge snapshot for \"${docId}\"`, error);\n }\n }\n\n private async appendDocUpdate(\n db: IDBDatabase,\n docId: string,\n update: Uint8Array,\n ): Promise<void> {\n await this.runInTransaction(\n db,\n this.docUpdateStore,\n \"readwrite\",\n async (store) => {\n const raw = await this.wrapRequest(store.get(docId), \"read\");\n const queue = await this.normalizeUpdateQueue(raw);\n queue.push(update.slice());\n await this.wrapRequest(store.put({ updates: queue }, docId), \"write\");\n },\n );\n }\n\n private async getDocUpdates(\n db: IDBDatabase,\n docId: string,\n ): Promise<Uint8Array[]> {\n const raw = await this.runInTransaction(\n db,\n this.docUpdateStore,\n \"readonly\",\n (store) => this.wrapRequest(store.get(docId), \"read\"),\n );\n return this.normalizeUpdateQueue(raw);\n }\n\n private async clearDocUpdates(\n db: IDBDatabase,\n docId: string,\n ): Promise<void> {\n await this.runInTransaction(\n db,\n this.docUpdateStore,\n \"readwrite\",\n (store) => this.wrapRequest(store.delete(docId), \"delete\"),\n );\n }\n\n private async writeSnapshot(\n db: IDBDatabase,\n docId: string,\n snapshot: Uint8Array,\n ): Promise<void> {\n await this.putBinary(db, this.docStore, docId, snapshot.slice());\n }\n\n private async getBinaryFromDb(\n db: IDBDatabase,\n storeName: string,\n key: string,\n ): Promise<Uint8Array | undefined> {\n const value = await this.runInTransaction(\n db,\n storeName,\n \"readonly\",\n (store) => this.wrapRequest(store.get(key), \"read\"),\n );\n return this.normalizeBinary(value);\n }\n\n private async normalizeUpdateQueue(value: unknown): Promise<Uint8Array[]> {\n if (value == null) return [];\n const list = Array.isArray(value)\n ? value\n : typeof value === \"object\" && value !== null\n ? (value as { updates?: unknown }).updates\n : undefined;\n\n if (!Array.isArray(list)) return [];\n\n const queue: Uint8Array[] = [];\n for (const entry of list) {\n const bytes = await this.normalizeBinary(entry);\n if (bytes) {\n queue.push(bytes);\n }\n }\n return queue;\n }\n\n private async putBinary(\n db: IDBDatabase,\n storeName: string,\n key: string,\n value: Uint8Array,\n ): Promise<void> {\n await this.runInTransaction(db, storeName, \"readwrite\", (store) =>\n this.wrapRequest(store.put(value, key), \"write\"),\n );\n }\n\n private async deleteKey(\n db: IDBDatabase,\n storeName: string,\n key: string,\n ): Promise<void> {\n await this.runInTransaction(db, storeName, \"readwrite\", (store) =>\n this.wrapRequest(store.delete(key), \"delete\"),\n );\n }\n\n private async getBinary(\n storeName: string,\n key: string,\n ): Promise<Uint8Array | undefined> {\n const db = await this.ensureDb();\n return this.getBinaryFromDb(db, storeName, key);\n }\n\n private runInTransaction<T>(\n db: IDBDatabase,\n storeName: string,\n mode: IDBTransactionMode,\n executor: (store: IDBObjectStore) => Promise<T>,\n ): Promise<T> {\n const tx = db.transaction(storeName, mode);\n const store = tx.objectStore(storeName);\n const completion = new Promise<void>((resolve, reject) => {\n tx.addEventListener(\n \"complete\",\n () => resolve(),\n { once: true },\n );\n tx.addEventListener(\n \"abort\",\n () =>\n reject(\n this.createError(\"IndexedDB transaction aborted\", tx.error),\n ),\n { once: true },\n );\n tx.addEventListener(\n \"error\",\n () =>\n reject(\n this.createError(\"IndexedDB transaction failed\", tx.error),\n ),\n { once: true },\n );\n });\n return Promise.all([executor(store), completion]).then(([result]) => result);\n }\n\n private wrapRequest<T>(\n request: IDBRequest<T>,\n action: string,\n ): Promise<T> {\n return new Promise<T>((resolve, reject) => {\n request.addEventListener(\n \"success\",\n () => resolve(request.result),\n { once: true },\n );\n request.addEventListener(\n \"error\",\n () =>\n reject(\n this.createError(\n `IndexedDB request failed during ${action}`,\n request.error,\n ),\n ),\n { once: true },\n );\n });\n }\n\n private async normalizeBinary(value: unknown): Promise<Uint8Array | undefined> {\n if (value == null) return undefined;\n if (value instanceof Uint8Array) {\n return value.slice();\n }\n if (ArrayBuffer.isView(value)) {\n return new Uint8Array(\n value.buffer,\n value.byteOffset,\n value.byteLength,\n ).slice();\n }\n if (value instanceof ArrayBuffer) {\n return new Uint8Array(value.slice(0));\n }\n if (\n typeof value === \"object\" &&\n value !== null &&\n \"arrayBuffer\" in value\n ) {\n const candidate = value as {\n arrayBuffer?: unknown;\n };\n if (typeof candidate.arrayBuffer === \"function\") {\n const buffer = await candidate.arrayBuffer();\n return new Uint8Array(buffer);\n }\n }\n return undefined;\n }\n\n private createError(message: string, cause: unknown): Error {\n if (cause instanceof Error) {\n return new Error(`${message}: ${cause.message}`, { cause });\n }\n if (cause !== undefined && cause !== null) {\n return new Error(`${message}: ${describeUnknown(cause)}`);\n }\n return new Error(message);\n }\n}\n"],"mappings":";;;;;;;AAUA,MAAM,kBAAkB;AACxB,MAAM,qBAAqB;AAC3B,MAAM,oBAAoB;AAC1B,MAAM,qBAAqB;AAC3B,MAAM,sBAAsB;AAC5B,MAAM,2BAA2B;AACjC,MAAM,mBAAmB;AAwEzB,MAAM,cAAc,IAAI,aAAa;AAErC,SAAS,gBAAgB,OAAwB;AAC/C,KAAI,OAAO,UAAU,SAAU,QAAO;AACtC,KAAI,OAAO,UAAU,YAAY,OAAO,UAAU,UAChD,QAAO,OAAO,MAAM;AAEtB,KAAI,OAAO,UAAU,SACnB,QAAO,MAAM,UAAU;AAEzB,KAAI,OAAO,UAAU,SACnB,QAAO,MAAM,eAAe,MAAM,UAAU;AAE9C,KAAI,OAAO,UAAU,WACnB,QAAO,aAAa,MAAM,QAAQ,YAAY;AAEhD,KAAI,SAAS,OAAO,UAAU,SAC5B,KAAI;AACF,SAAO,KAAK,UAAU,MAAM;SACtB;AACN,SAAO;;AAGX,QAAO,OAAO,MAAM;;AAatB,IAAa,0BAAb,MAA+D;CAC7D,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAQ;CACR,AAAQ,SAAS;CAEjB,YAAY,UAA0C,EAAE,EAAE;EACxD,MAAM,aAAc,WAA0C;AAC9D,MAAI,CAAC,WACH,OAAM,IAAI,MAAM,iDAAiD;AAEnE,OAAK,MAAM;AACX,OAAK,SAAS,QAAQ,UAAU;AAChC,OAAK,UAAU,QAAQ,WAAW;AAClC,OAAK,WAAW,QAAQ,gBAAgB;AACxC,OAAK,iBAAiB,QAAQ,sBAAsB;AACpD,OAAK,YAAY,QAAQ,iBAAiB;AAC1C,OAAK,aAAa,QAAQ,kBAAkB;AAC5C,OAAK,UAAU,QAAQ,WAAW;;CAGpC,MAAM,KAAK,SAA4C;EACrD,MAAM,KAAK,MAAM,KAAK,UAAU;AAChC,UAAQ,QAAQ,MAAhB;GACE,KAAK,gBAAgB;IACnB,MAAM,WAAW,QAAQ,SAAS,OAAO;AACzC,UAAM,KAAK,oBAAoB,IAAI,QAAQ,OAAO,SAAS;AAC3D;;GAEF,KAAK,cAAc;IACjB,MAAM,SAAS,QAAQ,OAAO,OAAO;AACrC,UAAM,KAAK,gBAAgB,IAAI,QAAQ,OAAO,OAAO;AACrD;;GAEF,KAAK,SAAS;IACZ,MAAM,QAAQ,QAAQ,KAAK,OAAO;AAClC,UAAM,KAAK,UAAU,IAAI,KAAK,YAAY,QAAQ,SAAS,MAAM;AACjE;;GAEF,KAAK,QAAQ;IACX,MAAM,QAAQ,QAAQ,OAAO,OAAO;AACpC,UAAM,KAAK,UAAU,IAAI,KAAK,WAAW,KAAK,SAAS,MAAM;AAC7D;;GAEF,QACE,OAAM,IAAI,MAAM,mCAAmC;;;CAIzD,MAAM,YAAY,SAAiC;EACjD,MAAM,KAAK,MAAM,KAAK,UAAU;AAChC,QAAM,KAAK,UAAU,IAAI,KAAK,YAAY,QAAQ;;CAGpD,MAAM,QAAQ,OAA6C;EACzD,MAAM,KAAK,MAAM,KAAK,UAAU;EAChC,MAAM,WAAW,MAAM,KAAK,gBAAgB,IAAI,KAAK,UAAU,MAAM;EACrE,MAAM,iBAAiB,MAAM,KAAK,cAAc,IAAI,MAAM;AAE1D,MAAI,CAAC,YAAY,eAAe,WAAW,EACzC;EAGF,IAAIA;AACJ,MAAI;AACF,SAAM,WAAWC,kBAAQ,aAAa,SAAS,GAAG,IAAIA,mBAAS;WACxD,OAAO;AACd,SAAM,KAAK,YACT,4CAA4C,MAAM,IAClD,MACD;;EAGH,IAAI,iBAAiB;AACrB,OAAK,MAAM,UAAU,eACnB,KAAI;AACF,OAAI,OAAO,OAAO;AAClB,oBAAiB;WACV,OAAO;AACd,SAAM,KAAK,YACT,+CAA+C,MAAM,IACrD,MACD;;AAIL,MAAI,gBAAgB;GAClB,IAAIC;AACJ,OAAI;AACF,mBAAe,IAAI,OAAO,EAAE,MAAM,YAAY,CAAC;YACxC,OAAO;AACd,UAAM,KAAK,YACT,+CAA+C,MAAM,IACrD,MACD;;AAEH,SAAM,KAAK,cAAc,IAAI,OAAO,aAAa;AACjD,SAAM,KAAK,gBAAgB,IAAI,MAAM;;AAGvC,SAAO;;CAGT,MAAM,WAAuC;EAC3C,MAAM,QAAQ,MAAM,KAAK,UAAU,KAAK,WAAW,KAAK,QAAQ;AAChE,MAAI,CAAC,MAAO,QAAO;AACnB,MAAI;GACF,MAAM,OAAO,YAAY,OAAO,MAAM;GACtC,MAAM,SAAS,KAAK,MAAM,KAAK;GAC/B,MAAM,QAAQ,IAAIC,wBAAO;AACzB,SAAM,WAAW,OAAO;AACxB,UAAO;WACA,OAAO;AACd,SAAM,KAAK,YAAY,uCAAuC,MAAM;;;CAIxE,MAAM,UAAU,SAAmD;AAEjE,SADc,MAAM,KAAK,UAAU,KAAK,YAAY,QAAQ,IAC5C;;CAGlB,MAAM,QAAuB;AAC3B,OAAK,SAAS;EACd,MAAM,KAAK,MAAM,KAAK;AACtB,MAAI,GACF,IAAG,OAAO;AAEZ,OAAK,YAAY;;CAGnB,MAAc,WAAiC;AAC7C,MAAI,KAAK,OACP,OAAM,IAAI,MAAM,0CAA0C;AAE5D,MAAI,CAAC,KAAK,UACR,MAAK,YAAY,IAAI,SAAS,SAAS,WAAW;GAChD,MAAM,UAAU,KAAK,IAAI,KAAK,KAAK,QAAQ,KAAK,QAAQ;AACxD,WAAQ,iBAAiB,uBAAuB;IAC9C,MAAM,KAAK,QAAQ;AACnB,SAAK,YAAY,IAAI,KAAK,SAAS;AACnC,SAAK,YAAY,IAAI,KAAK,eAAe;AACzC,SAAK,YAAY,IAAI,KAAK,UAAU;AACpC,SAAK,YAAY,IAAI,KAAK,WAAW;KACrC;AACF,WAAQ,iBACN,iBACM,QAAQ,QAAQ,OAAO,EAC7B,EAAE,MAAM,MAAM,CACf;AACD,WAAQ,iBACN,eACM;AACJ,WACE,KAAK,YACH,sCAAsC,KAAK,OAAO,IAClD,QAAQ,MACT,CACF;MAEH,EAAE,MAAM,MAAM,CACf;IACD;AAEJ,SAAO,KAAK;;CAGd,AAAQ,YAAY,IAAiB,WAAyB;EAC5D,MAAM,QAAQ,GAAG;AACjB,MAAI,KAAK,YAAY,OAAO,UAAU,CAAE;AACxC,KAAG,kBAAkB,UAAU;;CAGjC,AAAQ,YAAY,OAAyB,WAA4B;AACvE,MAAI,OAAO,MAAM,aAAa,WAC5B,QAAO,MAAM,SAAS,UAAU;EAElC,MAAM,SAAS,MAAM,UAAU;AAC/B,OAAK,IAAI,QAAQ,GAAG,QAAQ,QAAQ,SAAS,EAE3C,KADc,MAAM,OAAO,MAAM,KACnB,UAAW,QAAO;AAElC,SAAO;;CAGT,MAAc,oBACZ,IACA,OACA,UACe;AACf,QAAM,KAAK,iBAAiB,IAAI,KAAK,UAAU,aAAa,OAAO,UAAU;GAC3E,MAAM,cAAc,MAAM,KAAK,YAAY,MAAM,IAAI,MAAM,EAAE,OAAO;GACpE,MAAM,WAAW,MAAM,KAAK,gBAAgB,YAAY;GACxD,MAAM,SAAS,KAAK,eAAe,OAAO,UAAU,SAAS;AAC7D,SAAM,KAAK,YAAY,MAAM,IAAI,QAAQ,MAAM,EAAE,QAAQ;IACzD;;CAGJ,AAAQ,eACN,OACA,UACA,UACY;AACZ,MAAI;GACF,MAAM,MAAM,WAAWF,kBAAQ,aAAa,SAAS,GAAG,IAAIA,mBAAS;AACrE,OAAI,OAAO,SAAS;AACpB,UAAO,IAAI,OAAO,EAAE,MAAM,YAAY,CAAC;WAChC,OAAO;AACd,SAAM,KAAK,YAAY,iCAAiC,MAAM,IAAI,MAAM;;;CAI5E,MAAc,gBACZ,IACA,OACA,QACe;AACf,QAAM,KAAK,iBACT,IACA,KAAK,gBACL,aACA,OAAO,UAAU;GACf,MAAM,MAAM,MAAM,KAAK,YAAY,MAAM,IAAI,MAAM,EAAE,OAAO;GAC5D,MAAM,QAAQ,MAAM,KAAK,qBAAqB,IAAI;AAClD,SAAM,KAAK,OAAO,OAAO,CAAC;AAC1B,SAAM,KAAK,YAAY,MAAM,IAAI,EAAE,SAAS,OAAO,EAAE,MAAM,EAAE,QAAQ;IAExE;;CAGH,MAAc,cACZ,IACA,OACuB;EACvB,MAAM,MAAM,MAAM,KAAK,iBACrB,IACA,KAAK,gBACL,aACC,UAAU,KAAK,YAAY,MAAM,IAAI,MAAM,EAAE,OAAO,CACtD;AACD,SAAO,KAAK,qBAAqB,IAAI;;CAGvC,MAAc,gBACZ,IACA,OACe;AACf,QAAM,KAAK,iBACT,IACA,KAAK,gBACL,cACC,UAAU,KAAK,YAAY,MAAM,OAAO,MAAM,EAAE,SAAS,CAC3D;;CAGH,MAAc,cACZ,IACA,OACA,UACe;AACf,QAAM,KAAK,UAAU,IAAI,KAAK,UAAU,OAAO,SAAS,OAAO,CAAC;;CAGlE,MAAc,gBACZ,IACA,WACA,KACiC;EACjC,MAAM,QAAQ,MAAM,KAAK,iBACvB,IACA,WACA,aACC,UAAU,KAAK,YAAY,MAAM,IAAI,IAAI,EAAE,OAAO,CACpD;AACD,SAAO,KAAK,gBAAgB,MAAM;;CAGpC,MAAc,qBAAqB,OAAuC;AACxE,MAAI,SAAS,KAAM,QAAO,EAAE;EAC5B,MAAM,OAAO,MAAM,QAAQ,MAAM,GAC7B,QACA,OAAO,UAAU,YAAY,UAAU,OACpC,MAAgC,UACjC;AAEN,MAAI,CAAC,MAAM,QAAQ,KAAK,CAAE,QAAO,EAAE;EAEnC,MAAMG,QAAsB,EAAE;AAC9B,OAAK,MAAM,SAAS,MAAM;GACxB,MAAM,QAAQ,MAAM,KAAK,gBAAgB,MAAM;AAC/C,OAAI,MACF,OAAM,KAAK,MAAM;;AAGrB,SAAO;;CAGT,MAAc,UACZ,IACA,WACA,KACA,OACe;AACf,QAAM,KAAK,iBAAiB,IAAI,WAAW,cAAc,UACvD,KAAK,YAAY,MAAM,IAAI,OAAO,IAAI,EAAE,QAAQ,CACjD;;CAGH,MAAc,UACZ,IACA,WACA,KACe;AACf,QAAM,KAAK,iBAAiB,IAAI,WAAW,cAAc,UACvD,KAAK,YAAY,MAAM,OAAO,IAAI,EAAE,SAAS,CAC9C;;CAGH,MAAc,UACZ,WACA,KACiC;EACjC,MAAM,KAAK,MAAM,KAAK,UAAU;AAChC,SAAO,KAAK,gBAAgB,IAAI,WAAW,IAAI;;CAGjD,AAAQ,iBACN,IACA,WACA,MACA,UACY;EACZ,MAAM,KAAK,GAAG,YAAY,WAAW,KAAK;EAC1C,MAAM,QAAQ,GAAG,YAAY,UAAU;EACvC,MAAM,aAAa,IAAI,SAAe,SAAS,WAAW;AACxD,MAAG,iBACD,kBACM,SAAS,EACf,EAAE,MAAM,MAAM,CACf;AACD,MAAG,iBACD,eAEE,OACE,KAAK,YAAY,iCAAiC,GAAG,MAAM,CAC5D,EACH,EAAE,MAAM,MAAM,CACf;AACD,MAAG,iBACD,eAEE,OACE,KAAK,YAAY,gCAAgC,GAAG,MAAM,CAC3D,EACH,EAAE,MAAM,MAAM,CACf;IACD;AACF,SAAO,QAAQ,IAAI,CAAC,SAAS,MAAM,EAAE,WAAW,CAAC,CAAC,MAAM,CAAC,YAAY,OAAO;;CAG9E,AAAQ,YACN,SACA,QACY;AACZ,SAAO,IAAI,SAAY,SAAS,WAAW;AACzC,WAAQ,iBACN,iBACM,QAAQ,QAAQ,OAAO,EAC7B,EAAE,MAAM,MAAM,CACf;AACD,WAAQ,iBACN,eAEE,OACE,KAAK,YACH,mCAAmC,UACnC,QAAQ,MACT,CACF,EACH,EAAE,MAAM,MAAM,CACf;IACD;;CAGJ,MAAc,gBAAgB,OAAiD;AAC7E,MAAI,SAAS,KAAM,QAAO;AAC1B,MAAI,iBAAiB,WACnB,QAAO,MAAM,OAAO;AAEtB,MAAI,YAAY,OAAO,MAAM,CAC3B,QAAO,IAAI,WACT,MAAM,QACN,MAAM,YACN,MAAM,WACP,CAAC,OAAO;AAEX,MAAI,iBAAiB,YACnB,QAAO,IAAI,WAAW,MAAM,MAAM,EAAE,CAAC;AAEvC,MACE,OAAO,UAAU,YACjB,UAAU,QACV,iBAAiB,OACjB;GACA,MAAM,YAAY;AAGlB,OAAI,OAAO,UAAU,gBAAgB,YAAY;IAC/C,MAAM,SAAS,MAAM,UAAU,aAAa;AAC5C,WAAO,IAAI,WAAW,OAAO;;;;CAMnC,AAAQ,YAAY,SAAiB,OAAuB;AAC1D,MAAI,iBAAiB,MACnB,QAAO,IAAI,MAAM,GAAG,QAAQ,IAAI,MAAM,WAAW,EAAE,OAAO,CAAC;AAE7D,MAAI,UAAU,UAAa,UAAU,KACnC,wBAAO,IAAI,MAAM,GAAG,QAAQ,IAAI,gBAAgB,MAAM,GAAG;AAE3D,SAAO,IAAI,MAAM,QAAQ"}
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
import { C as StorageAdapter, r as AssetId, w as StorageSavePayload } from "../types.cjs";
|
|
2
|
+
import { Flock } from "@loro-dev/flock";
|
|
3
|
+
import { LoroDoc } from "loro-crdt";
|
|
4
|
+
|
|
5
|
+
//#region src/storage/indexeddb.d.ts
|
|
6
|
+
interface IndexedDBStorageAdaptorOptions {
|
|
7
|
+
readonly dbName?: string;
|
|
8
|
+
readonly version?: number;
|
|
9
|
+
readonly docStoreName?: string;
|
|
10
|
+
readonly docUpdateStoreName?: string;
|
|
11
|
+
readonly metaStoreName?: string;
|
|
12
|
+
readonly assetStoreName?: string;
|
|
13
|
+
readonly metaKey?: string;
|
|
14
|
+
}
|
|
15
|
+
declare class IndexedDBStorageAdaptor implements StorageAdapter {
|
|
16
|
+
private readonly idb;
|
|
17
|
+
private readonly dbName;
|
|
18
|
+
private readonly version;
|
|
19
|
+
private readonly docStore;
|
|
20
|
+
private readonly docUpdateStore;
|
|
21
|
+
private readonly metaStore;
|
|
22
|
+
private readonly assetStore;
|
|
23
|
+
private readonly metaKey;
|
|
24
|
+
private dbPromise?;
|
|
25
|
+
private closed;
|
|
26
|
+
constructor(options?: IndexedDBStorageAdaptorOptions);
|
|
27
|
+
save(payload: StorageSavePayload): Promise<void>;
|
|
28
|
+
deleteAsset(assetId: AssetId): Promise<void>;
|
|
29
|
+
loadDoc(docId: string): Promise<LoroDoc | undefined>;
|
|
30
|
+
loadMeta(): Promise<Flock | undefined>;
|
|
31
|
+
loadAsset(assetId: AssetId): Promise<Uint8Array | undefined>;
|
|
32
|
+
close(): Promise<void>;
|
|
33
|
+
private ensureDb;
|
|
34
|
+
private ensureStore;
|
|
35
|
+
private storeExists;
|
|
36
|
+
private storeMergedSnapshot;
|
|
37
|
+
private mergeSnapshots;
|
|
38
|
+
private appendDocUpdate;
|
|
39
|
+
private getDocUpdates;
|
|
40
|
+
private clearDocUpdates;
|
|
41
|
+
private writeSnapshot;
|
|
42
|
+
private getBinaryFromDb;
|
|
43
|
+
private normalizeUpdateQueue;
|
|
44
|
+
private putBinary;
|
|
45
|
+
private deleteKey;
|
|
46
|
+
private getBinary;
|
|
47
|
+
private runInTransaction;
|
|
48
|
+
private wrapRequest;
|
|
49
|
+
private normalizeBinary;
|
|
50
|
+
private createError;
|
|
51
|
+
}
|
|
52
|
+
//#endregion
|
|
53
|
+
export { IndexedDBStorageAdaptor, IndexedDBStorageAdaptorOptions };
|
|
54
|
+
//# sourceMappingURL=indexeddb.d.cts.map
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
import { C as StorageAdapter, r as AssetId, w as StorageSavePayload } from "../types.js";
|
|
2
|
+
import { Flock } from "@loro-dev/flock";
|
|
3
|
+
import { LoroDoc } from "loro-crdt";
|
|
4
|
+
|
|
5
|
+
//#region src/storage/indexeddb.d.ts
|
|
6
|
+
interface IndexedDBStorageAdaptorOptions {
|
|
7
|
+
readonly dbName?: string;
|
|
8
|
+
readonly version?: number;
|
|
9
|
+
readonly docStoreName?: string;
|
|
10
|
+
readonly docUpdateStoreName?: string;
|
|
11
|
+
readonly metaStoreName?: string;
|
|
12
|
+
readonly assetStoreName?: string;
|
|
13
|
+
readonly metaKey?: string;
|
|
14
|
+
}
|
|
15
|
+
declare class IndexedDBStorageAdaptor implements StorageAdapter {
|
|
16
|
+
private readonly idb;
|
|
17
|
+
private readonly dbName;
|
|
18
|
+
private readonly version;
|
|
19
|
+
private readonly docStore;
|
|
20
|
+
private readonly docUpdateStore;
|
|
21
|
+
private readonly metaStore;
|
|
22
|
+
private readonly assetStore;
|
|
23
|
+
private readonly metaKey;
|
|
24
|
+
private dbPromise?;
|
|
25
|
+
private closed;
|
|
26
|
+
constructor(options?: IndexedDBStorageAdaptorOptions);
|
|
27
|
+
save(payload: StorageSavePayload): Promise<void>;
|
|
28
|
+
deleteAsset(assetId: AssetId): Promise<void>;
|
|
29
|
+
loadDoc(docId: string): Promise<LoroDoc | undefined>;
|
|
30
|
+
loadMeta(): Promise<Flock | undefined>;
|
|
31
|
+
loadAsset(assetId: AssetId): Promise<Uint8Array | undefined>;
|
|
32
|
+
close(): Promise<void>;
|
|
33
|
+
private ensureDb;
|
|
34
|
+
private ensureStore;
|
|
35
|
+
private storeExists;
|
|
36
|
+
private storeMergedSnapshot;
|
|
37
|
+
private mergeSnapshots;
|
|
38
|
+
private appendDocUpdate;
|
|
39
|
+
private getDocUpdates;
|
|
40
|
+
private clearDocUpdates;
|
|
41
|
+
private writeSnapshot;
|
|
42
|
+
private getBinaryFromDb;
|
|
43
|
+
private normalizeUpdateQueue;
|
|
44
|
+
private putBinary;
|
|
45
|
+
private deleteKey;
|
|
46
|
+
private getBinary;
|
|
47
|
+
private runInTransaction;
|
|
48
|
+
private wrapRequest;
|
|
49
|
+
private normalizeBinary;
|
|
50
|
+
private createError;
|
|
51
|
+
}
|
|
52
|
+
//#endregion
|
|
53
|
+
export { IndexedDBStorageAdaptor, IndexedDBStorageAdaptorOptions };
|
|
54
|
+
//# sourceMappingURL=indexeddb.d.ts.map
|
|
@@ -0,0 +1,258 @@
|
|
|
1
|
+
import { Flock } from "@loro-dev/flock";
|
|
2
|
+
import { LoroDoc } from "loro-crdt";
|
|
3
|
+
|
|
4
|
+
//#region src/storage/indexeddb.ts
|
|
5
|
+
const DEFAULT_DB_NAME = "loro-repo";
|
|
6
|
+
const DEFAULT_DB_VERSION = 1;
|
|
7
|
+
const DEFAULT_DOC_STORE = "docs";
|
|
8
|
+
const DEFAULT_META_STORE = "meta";
|
|
9
|
+
const DEFAULT_ASSET_STORE = "assets";
|
|
10
|
+
const DEFAULT_DOC_UPDATE_STORE = "doc-updates";
|
|
11
|
+
const DEFAULT_META_KEY = "snapshot";
|
|
12
|
+
const textDecoder = new TextDecoder();
|
|
13
|
+
function describeUnknown(cause) {
|
|
14
|
+
if (typeof cause === "string") return cause;
|
|
15
|
+
if (typeof cause === "number" || typeof cause === "boolean") return String(cause);
|
|
16
|
+
if (typeof cause === "bigint") return cause.toString();
|
|
17
|
+
if (typeof cause === "symbol") return cause.description ?? cause.toString();
|
|
18
|
+
if (typeof cause === "function") return `[function ${cause.name ?? "anonymous"}]`;
|
|
19
|
+
if (cause && typeof cause === "object") try {
|
|
20
|
+
return JSON.stringify(cause);
|
|
21
|
+
} catch {
|
|
22
|
+
return "[object]";
|
|
23
|
+
}
|
|
24
|
+
return String(cause);
|
|
25
|
+
}
|
|
26
|
+
var IndexedDBStorageAdaptor = class {
|
|
27
|
+
idb;
|
|
28
|
+
dbName;
|
|
29
|
+
version;
|
|
30
|
+
docStore;
|
|
31
|
+
docUpdateStore;
|
|
32
|
+
metaStore;
|
|
33
|
+
assetStore;
|
|
34
|
+
metaKey;
|
|
35
|
+
dbPromise;
|
|
36
|
+
closed = false;
|
|
37
|
+
constructor(options = {}) {
|
|
38
|
+
const idbFactory = globalThis.indexedDB;
|
|
39
|
+
if (!idbFactory) throw new Error("IndexedDB is not available in this environment");
|
|
40
|
+
this.idb = idbFactory;
|
|
41
|
+
this.dbName = options.dbName ?? DEFAULT_DB_NAME;
|
|
42
|
+
this.version = options.version ?? DEFAULT_DB_VERSION;
|
|
43
|
+
this.docStore = options.docStoreName ?? DEFAULT_DOC_STORE;
|
|
44
|
+
this.docUpdateStore = options.docUpdateStoreName ?? DEFAULT_DOC_UPDATE_STORE;
|
|
45
|
+
this.metaStore = options.metaStoreName ?? DEFAULT_META_STORE;
|
|
46
|
+
this.assetStore = options.assetStoreName ?? DEFAULT_ASSET_STORE;
|
|
47
|
+
this.metaKey = options.metaKey ?? DEFAULT_META_KEY;
|
|
48
|
+
}
|
|
49
|
+
async save(payload) {
|
|
50
|
+
const db = await this.ensureDb();
|
|
51
|
+
switch (payload.type) {
|
|
52
|
+
case "doc-snapshot": {
|
|
53
|
+
const snapshot = payload.snapshot.slice();
|
|
54
|
+
await this.storeMergedSnapshot(db, payload.docId, snapshot);
|
|
55
|
+
break;
|
|
56
|
+
}
|
|
57
|
+
case "doc-update": {
|
|
58
|
+
const update = payload.update.slice();
|
|
59
|
+
await this.appendDocUpdate(db, payload.docId, update);
|
|
60
|
+
break;
|
|
61
|
+
}
|
|
62
|
+
case "asset": {
|
|
63
|
+
const bytes = payload.data.slice();
|
|
64
|
+
await this.putBinary(db, this.assetStore, payload.assetId, bytes);
|
|
65
|
+
break;
|
|
66
|
+
}
|
|
67
|
+
case "meta": {
|
|
68
|
+
const bytes = payload.update.slice();
|
|
69
|
+
await this.putBinary(db, this.metaStore, this.metaKey, bytes);
|
|
70
|
+
break;
|
|
71
|
+
}
|
|
72
|
+
default: throw new Error("Unsupported storage payload type");
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
async deleteAsset(assetId) {
|
|
76
|
+
const db = await this.ensureDb();
|
|
77
|
+
await this.deleteKey(db, this.assetStore, assetId);
|
|
78
|
+
}
|
|
79
|
+
async loadDoc(docId) {
|
|
80
|
+
const db = await this.ensureDb();
|
|
81
|
+
const snapshot = await this.getBinaryFromDb(db, this.docStore, docId);
|
|
82
|
+
const pendingUpdates = await this.getDocUpdates(db, docId);
|
|
83
|
+
if (!snapshot && pendingUpdates.length === 0) return;
|
|
84
|
+
let doc;
|
|
85
|
+
try {
|
|
86
|
+
doc = snapshot ? LoroDoc.fromSnapshot(snapshot) : new LoroDoc();
|
|
87
|
+
} catch (error) {
|
|
88
|
+
throw this.createError(`Failed to hydrate document snapshot for "${docId}"`, error);
|
|
89
|
+
}
|
|
90
|
+
let appliedUpdates = false;
|
|
91
|
+
for (const update of pendingUpdates) try {
|
|
92
|
+
doc.import(update);
|
|
93
|
+
appliedUpdates = true;
|
|
94
|
+
} catch (error) {
|
|
95
|
+
throw this.createError(`Failed to apply queued document update for "${docId}"`, error);
|
|
96
|
+
}
|
|
97
|
+
if (appliedUpdates) {
|
|
98
|
+
let consolidated;
|
|
99
|
+
try {
|
|
100
|
+
consolidated = doc.export({ mode: "snapshot" });
|
|
101
|
+
} catch (error) {
|
|
102
|
+
throw this.createError(`Failed to export consolidated snapshot for "${docId}"`, error);
|
|
103
|
+
}
|
|
104
|
+
await this.writeSnapshot(db, docId, consolidated);
|
|
105
|
+
await this.clearDocUpdates(db, docId);
|
|
106
|
+
}
|
|
107
|
+
return doc;
|
|
108
|
+
}
|
|
109
|
+
async loadMeta() {
|
|
110
|
+
const bytes = await this.getBinary(this.metaStore, this.metaKey);
|
|
111
|
+
if (!bytes) return void 0;
|
|
112
|
+
try {
|
|
113
|
+
const json = textDecoder.decode(bytes);
|
|
114
|
+
const bundle = JSON.parse(json);
|
|
115
|
+
const flock = new Flock();
|
|
116
|
+
flock.importJson(bundle);
|
|
117
|
+
return flock;
|
|
118
|
+
} catch (error) {
|
|
119
|
+
throw this.createError("Failed to hydrate metadata snapshot", error);
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
async loadAsset(assetId) {
|
|
123
|
+
return await this.getBinary(this.assetStore, assetId) ?? void 0;
|
|
124
|
+
}
|
|
125
|
+
async close() {
|
|
126
|
+
this.closed = true;
|
|
127
|
+
const db = await this.dbPromise;
|
|
128
|
+
if (db) db.close();
|
|
129
|
+
this.dbPromise = void 0;
|
|
130
|
+
}
|
|
131
|
+
async ensureDb() {
|
|
132
|
+
if (this.closed) throw new Error("IndexedDBStorageAdaptor has been closed");
|
|
133
|
+
if (!this.dbPromise) this.dbPromise = new Promise((resolve, reject) => {
|
|
134
|
+
const request = this.idb.open(this.dbName, this.version);
|
|
135
|
+
request.addEventListener("upgradeneeded", () => {
|
|
136
|
+
const db = request.result;
|
|
137
|
+
this.ensureStore(db, this.docStore);
|
|
138
|
+
this.ensureStore(db, this.docUpdateStore);
|
|
139
|
+
this.ensureStore(db, this.metaStore);
|
|
140
|
+
this.ensureStore(db, this.assetStore);
|
|
141
|
+
});
|
|
142
|
+
request.addEventListener("success", () => resolve(request.result), { once: true });
|
|
143
|
+
request.addEventListener("error", () => {
|
|
144
|
+
reject(this.createError(`Failed to open IndexedDB database "${this.dbName}"`, request.error));
|
|
145
|
+
}, { once: true });
|
|
146
|
+
});
|
|
147
|
+
return this.dbPromise;
|
|
148
|
+
}
|
|
149
|
+
ensureStore(db, storeName) {
|
|
150
|
+
const names = db.objectStoreNames;
|
|
151
|
+
if (this.storeExists(names, storeName)) return;
|
|
152
|
+
db.createObjectStore(storeName);
|
|
153
|
+
}
|
|
154
|
+
storeExists(names, storeName) {
|
|
155
|
+
if (typeof names.contains === "function") return names.contains(storeName);
|
|
156
|
+
const length = names.length ?? 0;
|
|
157
|
+
for (let index = 0; index < length; index += 1) if (names.item?.(index) === storeName) return true;
|
|
158
|
+
return false;
|
|
159
|
+
}
|
|
160
|
+
async storeMergedSnapshot(db, docId, incoming) {
|
|
161
|
+
await this.runInTransaction(db, this.docStore, "readwrite", async (store) => {
|
|
162
|
+
const existingRaw = await this.wrapRequest(store.get(docId), "read");
|
|
163
|
+
const existing = await this.normalizeBinary(existingRaw);
|
|
164
|
+
const merged = this.mergeSnapshots(docId, existing, incoming);
|
|
165
|
+
await this.wrapRequest(store.put(merged, docId), "write");
|
|
166
|
+
});
|
|
167
|
+
}
|
|
168
|
+
mergeSnapshots(docId, existing, incoming) {
|
|
169
|
+
try {
|
|
170
|
+
const doc = existing ? LoroDoc.fromSnapshot(existing) : new LoroDoc();
|
|
171
|
+
doc.import(incoming);
|
|
172
|
+
return doc.export({ mode: "snapshot" });
|
|
173
|
+
} catch (error) {
|
|
174
|
+
throw this.createError(`Failed to merge snapshot for "${docId}"`, error);
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
async appendDocUpdate(db, docId, update) {
|
|
178
|
+
await this.runInTransaction(db, this.docUpdateStore, "readwrite", async (store) => {
|
|
179
|
+
const raw = await this.wrapRequest(store.get(docId), "read");
|
|
180
|
+
const queue = await this.normalizeUpdateQueue(raw);
|
|
181
|
+
queue.push(update.slice());
|
|
182
|
+
await this.wrapRequest(store.put({ updates: queue }, docId), "write");
|
|
183
|
+
});
|
|
184
|
+
}
|
|
185
|
+
async getDocUpdates(db, docId) {
|
|
186
|
+
const raw = await this.runInTransaction(db, this.docUpdateStore, "readonly", (store) => this.wrapRequest(store.get(docId), "read"));
|
|
187
|
+
return this.normalizeUpdateQueue(raw);
|
|
188
|
+
}
|
|
189
|
+
async clearDocUpdates(db, docId) {
|
|
190
|
+
await this.runInTransaction(db, this.docUpdateStore, "readwrite", (store) => this.wrapRequest(store.delete(docId), "delete"));
|
|
191
|
+
}
|
|
192
|
+
async writeSnapshot(db, docId, snapshot) {
|
|
193
|
+
await this.putBinary(db, this.docStore, docId, snapshot.slice());
|
|
194
|
+
}
|
|
195
|
+
async getBinaryFromDb(db, storeName, key) {
|
|
196
|
+
const value = await this.runInTransaction(db, storeName, "readonly", (store) => this.wrapRequest(store.get(key), "read"));
|
|
197
|
+
return this.normalizeBinary(value);
|
|
198
|
+
}
|
|
199
|
+
async normalizeUpdateQueue(value) {
|
|
200
|
+
if (value == null) return [];
|
|
201
|
+
const list = Array.isArray(value) ? value : typeof value === "object" && value !== null ? value.updates : void 0;
|
|
202
|
+
if (!Array.isArray(list)) return [];
|
|
203
|
+
const queue = [];
|
|
204
|
+
for (const entry of list) {
|
|
205
|
+
const bytes = await this.normalizeBinary(entry);
|
|
206
|
+
if (bytes) queue.push(bytes);
|
|
207
|
+
}
|
|
208
|
+
return queue;
|
|
209
|
+
}
|
|
210
|
+
async putBinary(db, storeName, key, value) {
|
|
211
|
+
await this.runInTransaction(db, storeName, "readwrite", (store) => this.wrapRequest(store.put(value, key), "write"));
|
|
212
|
+
}
|
|
213
|
+
async deleteKey(db, storeName, key) {
|
|
214
|
+
await this.runInTransaction(db, storeName, "readwrite", (store) => this.wrapRequest(store.delete(key), "delete"));
|
|
215
|
+
}
|
|
216
|
+
async getBinary(storeName, key) {
|
|
217
|
+
const db = await this.ensureDb();
|
|
218
|
+
return this.getBinaryFromDb(db, storeName, key);
|
|
219
|
+
}
|
|
220
|
+
runInTransaction(db, storeName, mode, executor) {
|
|
221
|
+
const tx = db.transaction(storeName, mode);
|
|
222
|
+
const store = tx.objectStore(storeName);
|
|
223
|
+
const completion = new Promise((resolve, reject) => {
|
|
224
|
+
tx.addEventListener("complete", () => resolve(), { once: true });
|
|
225
|
+
tx.addEventListener("abort", () => reject(this.createError("IndexedDB transaction aborted", tx.error)), { once: true });
|
|
226
|
+
tx.addEventListener("error", () => reject(this.createError("IndexedDB transaction failed", tx.error)), { once: true });
|
|
227
|
+
});
|
|
228
|
+
return Promise.all([executor(store), completion]).then(([result]) => result);
|
|
229
|
+
}
|
|
230
|
+
wrapRequest(request, action) {
|
|
231
|
+
return new Promise((resolve, reject) => {
|
|
232
|
+
request.addEventListener("success", () => resolve(request.result), { once: true });
|
|
233
|
+
request.addEventListener("error", () => reject(this.createError(`IndexedDB request failed during ${action}`, request.error)), { once: true });
|
|
234
|
+
});
|
|
235
|
+
}
|
|
236
|
+
async normalizeBinary(value) {
|
|
237
|
+
if (value == null) return void 0;
|
|
238
|
+
if (value instanceof Uint8Array) return value.slice();
|
|
239
|
+
if (ArrayBuffer.isView(value)) return new Uint8Array(value.buffer, value.byteOffset, value.byteLength).slice();
|
|
240
|
+
if (value instanceof ArrayBuffer) return new Uint8Array(value.slice(0));
|
|
241
|
+
if (typeof value === "object" && value !== null && "arrayBuffer" in value) {
|
|
242
|
+
const candidate = value;
|
|
243
|
+
if (typeof candidate.arrayBuffer === "function") {
|
|
244
|
+
const buffer = await candidate.arrayBuffer();
|
|
245
|
+
return new Uint8Array(buffer);
|
|
246
|
+
}
|
|
247
|
+
}
|
|
248
|
+
}
|
|
249
|
+
createError(message, cause) {
|
|
250
|
+
if (cause instanceof Error) return new Error(`${message}: ${cause.message}`, { cause });
|
|
251
|
+
if (cause !== void 0 && cause !== null) return /* @__PURE__ */ new Error(`${message}: ${describeUnknown(cause)}`);
|
|
252
|
+
return new Error(message);
|
|
253
|
+
}
|
|
254
|
+
};
|
|
255
|
+
|
|
256
|
+
//#endregion
|
|
257
|
+
export { IndexedDBStorageAdaptor };
|
|
258
|
+
//# sourceMappingURL=indexeddb.js.map
|