loro-repo 0.5.2 → 0.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +40 -1
- package/dist/index.cjs +644 -133
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +48 -5
- package/dist/index.d.ts +48 -5
- package/dist/index.js +644 -133
- package/dist/index.js.map +1 -1
- package/dist/storage/filesystem.cjs +60 -10
- package/dist/storage/filesystem.cjs.map +1 -1
- package/dist/storage/filesystem.d.cts +8 -2
- package/dist/storage/filesystem.d.ts +8 -2
- package/dist/storage/filesystem.js +60 -10
- package/dist/storage/filesystem.js.map +1 -1
- package/dist/storage/indexeddb.cjs +51 -9
- package/dist/storage/indexeddb.cjs.map +1 -1
- package/dist/storage/indexeddb.d.cts +7 -1
- package/dist/storage/indexeddb.d.ts +7 -1
- package/dist/storage/indexeddb.js +51 -9
- package/dist/storage/indexeddb.js.map +1 -1
- package/dist/transport/broadcast-channel.cjs +131 -1
- package/dist/transport/broadcast-channel.cjs.map +1 -1
- package/dist/transport/broadcast-channel.d.cts +20 -3
- package/dist/transport/broadcast-channel.d.ts +20 -3
- package/dist/transport/broadcast-channel.js +130 -1
- package/dist/transport/broadcast-channel.js.map +1 -1
- package/dist/transport/websocket.cjs +348 -24
- package/dist/transport/websocket.cjs.map +1 -1
- package/dist/transport/websocket.d.cts +47 -5
- package/dist/transport/websocket.d.ts +47 -5
- package/dist/transport/websocket.js +349 -24
- package/dist/transport/websocket.js.map +1 -1
- package/dist/types.d.cts +121 -4
- package/dist/types.d.ts +121 -4
- package/package.json +7 -7
|
@@ -16,14 +16,19 @@ var FileSystemStorageAdaptor = class {
|
|
|
16
16
|
baseDir;
|
|
17
17
|
docsDir;
|
|
18
18
|
assetsDir;
|
|
19
|
-
|
|
19
|
+
metaDir;
|
|
20
|
+
metaUpdatesDir;
|
|
21
|
+
legacyMetaPath;
|
|
20
22
|
initPromise;
|
|
21
23
|
updateCounter = 0;
|
|
24
|
+
metaUpdateCounter = 0;
|
|
22
25
|
constructor(options = {}) {
|
|
23
26
|
this.baseDir = node_path.resolve(options.baseDir ?? node_path.join(process.cwd(), ".loro-repo"));
|
|
24
27
|
this.docsDir = node_path.join(this.baseDir, options.docsDirName ?? "docs");
|
|
25
28
|
this.assetsDir = node_path.join(this.baseDir, options.assetsDirName ?? "assets");
|
|
26
|
-
this.
|
|
29
|
+
this.metaDir = node_path.join(this.baseDir, "meta");
|
|
30
|
+
this.metaUpdatesDir = node_path.join(this.metaDir, "updates");
|
|
31
|
+
this.legacyMetaPath = node_path.join(this.baseDir, options.metaFileName ?? "meta.json");
|
|
27
32
|
this.initPromise = this.ensureLayout();
|
|
28
33
|
}
|
|
29
34
|
async save(payload) {
|
|
@@ -39,7 +44,7 @@ var FileSystemStorageAdaptor = class {
|
|
|
39
44
|
await this.writeAsset(payload.assetId, payload.data);
|
|
40
45
|
return;
|
|
41
46
|
case "meta":
|
|
42
|
-
await
|
|
47
|
+
await this.enqueueMetaUpdate(payload.update);
|
|
43
48
|
return;
|
|
44
49
|
default: throw new Error(`Unsupported payload type: ${payload.type}`);
|
|
45
50
|
}
|
|
@@ -67,18 +72,36 @@ var FileSystemStorageAdaptor = class {
|
|
|
67
72
|
await this.writeDocSnapshot(docId, consolidated);
|
|
68
73
|
return doc;
|
|
69
74
|
}
|
|
75
|
+
async deleteDoc(docId) {
|
|
76
|
+
await this.initPromise;
|
|
77
|
+
await removeIfExists(this.docSnapshotPath(docId));
|
|
78
|
+
await removeDirIfExists(this.docUpdatesDir(docId));
|
|
79
|
+
await removeDirIfExists(this.docDir(docId));
|
|
80
|
+
}
|
|
70
81
|
async loadMeta() {
|
|
71
82
|
await this.initPromise;
|
|
72
|
-
const
|
|
73
|
-
|
|
83
|
+
const updatePaths = (await listFiles(this.metaUpdatesDir)).map((file) => node_path.join(this.metaUpdatesDir, file));
|
|
84
|
+
const updates = [];
|
|
85
|
+
for (const updatePath of updatePaths) {
|
|
86
|
+
const bytes = await readFileIfExists(updatePath);
|
|
87
|
+
if (bytes) updates.push(bytes);
|
|
88
|
+
}
|
|
89
|
+
const legacy = await readFileIfExists(this.legacyMetaPath);
|
|
90
|
+
const totalUpdates = updates.length + (legacy ? 1 : 0);
|
|
91
|
+
if (totalUpdates === 0) return void 0;
|
|
92
|
+
const flock = new __loro_dev_flock.Flock();
|
|
74
93
|
try {
|
|
75
|
-
|
|
76
|
-
const
|
|
77
|
-
flock.importJson(bundle);
|
|
78
|
-
return flock;
|
|
94
|
+
if (legacy) flock.importJson(JSON.parse(textDecoder.decode(legacy)));
|
|
95
|
+
for (const bytes of updates) flock.importJson(JSON.parse(textDecoder.decode(bytes)));
|
|
79
96
|
} catch (error) {
|
|
80
97
|
throw new Error("Failed to hydrate metadata snapshot", { cause: error });
|
|
81
98
|
}
|
|
99
|
+
if (totalUpdates > 1 || legacy) {
|
|
100
|
+
const snapshot = flock.exportJson();
|
|
101
|
+
const encoded = new TextEncoder().encode(JSON.stringify(snapshot));
|
|
102
|
+
await this.compactMeta(encoded, updatePaths, Boolean(legacy));
|
|
103
|
+
}
|
|
104
|
+
return flock;
|
|
82
105
|
}
|
|
83
106
|
async loadAsset(assetId) {
|
|
84
107
|
await this.initPromise;
|
|
@@ -88,7 +111,8 @@ var FileSystemStorageAdaptor = class {
|
|
|
88
111
|
await Promise.all([
|
|
89
112
|
ensureDir(this.baseDir),
|
|
90
113
|
ensureDir(this.docsDir),
|
|
91
|
-
ensureDir(this.assetsDir)
|
|
114
|
+
ensureDir(this.assetsDir),
|
|
115
|
+
ensureDir(this.metaUpdatesDir)
|
|
92
116
|
]);
|
|
93
117
|
}
|
|
94
118
|
async writeDocSnapshot(docId, snapshot) {
|
|
@@ -107,6 +131,21 @@ var FileSystemStorageAdaptor = class {
|
|
|
107
131
|
await ensureDir(node_path.dirname(filePath));
|
|
108
132
|
await writeFileAtomic(filePath, data);
|
|
109
133
|
}
|
|
134
|
+
async enqueueMetaUpdate(update) {
|
|
135
|
+
const dir = this.metaUpdatesDir;
|
|
136
|
+
await ensureDir(dir);
|
|
137
|
+
const counter = this.metaUpdateCounter = (this.metaUpdateCounter + 1) % 1e6;
|
|
138
|
+
const fileName = `${Date.now().toString().padStart(13, "0")}-${counter.toString().padStart(6, "0")}.json`;
|
|
139
|
+
await writeFileAtomic(node_path.join(dir, fileName), update);
|
|
140
|
+
}
|
|
141
|
+
async compactMeta(update, previousUpdates, hadLegacy) {
|
|
142
|
+
const targetPath = node_path.join(this.metaUpdatesDir, "000000-full.json");
|
|
143
|
+
await ensureDir(this.metaUpdatesDir);
|
|
144
|
+
await writeFileAtomic(targetPath, update);
|
|
145
|
+
const removals = hadLegacy ? [this.legacyMetaPath] : [];
|
|
146
|
+
removals.push(...previousUpdates.filter((filePath) => node_path.resolve(filePath) !== node_path.resolve(targetPath)));
|
|
147
|
+
await Promise.all(removals.map((filePath) => removeIfExists(filePath)));
|
|
148
|
+
}
|
|
110
149
|
docDir(docId) {
|
|
111
150
|
return node_path.join(this.docsDir, encodeComponent(docId));
|
|
112
151
|
}
|
|
@@ -143,6 +182,17 @@ async function removeIfExists(filePath) {
|
|
|
143
182
|
throw error;
|
|
144
183
|
}
|
|
145
184
|
}
|
|
185
|
+
async function removeDirIfExists(dir) {
|
|
186
|
+
try {
|
|
187
|
+
await node_fs.promises.rm(dir, {
|
|
188
|
+
recursive: true,
|
|
189
|
+
force: true
|
|
190
|
+
});
|
|
191
|
+
} catch (error) {
|
|
192
|
+
if (error.code === "ENOENT") return;
|
|
193
|
+
throw error;
|
|
194
|
+
}
|
|
195
|
+
}
|
|
146
196
|
async function listFiles(dir) {
|
|
147
197
|
try {
|
|
148
198
|
return (await node_fs.promises.readdir(dir)).sort();
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"filesystem.cjs","names":["path","LoroDoc","Flock","fs"],"sources":["../../src/storage/filesystem.ts"],"sourcesContent":["import { promises as fs } from \"node:fs\";\nimport * as path from \"node:path\";\nimport { randomUUID } from \"node:crypto\";\n\nimport { Flock, type ExportBundle } from \"@loro-dev/flock\";\nimport { LoroDoc } from \"loro-crdt\";\n\nimport type { AssetId, StorageAdapter, StorageSavePayload } from \"../types\";\n\nconst textDecoder = new TextDecoder();\n\nexport interface FileSystemStorageAdaptorOptions {\n /**\n * Base directory where metadata, document snapshots, and assets will be stored.\n * Defaults to `.loro-repo` inside the current working directory.\n */\n readonly baseDir?: string;\n /**\n * Subdirectory dedicated to document persistence. Defaults to `docs`.\n */\n readonly docsDirName?: string;\n /**\n * Subdirectory dedicated to asset blobs. Defaults to `assets`.\n */\n readonly assetsDirName?: string;\n /**\n * File name for the metadata snapshot bundle. Defaults to `meta.json`.\n */\n readonly metaFileName?: string;\n}\n\nexport class FileSystemStorageAdaptor implements StorageAdapter {\n private readonly baseDir: string;\n private readonly docsDir: string;\n private readonly assetsDir: string;\n private readonly metaPath: string;\n private readonly initPromise: Promise<void>;\n private updateCounter = 0;\n\n constructor(options: FileSystemStorageAdaptorOptions = {}) {\n this.baseDir = path.resolve(\n options.baseDir ?? path.join(process.cwd(), \".loro-repo\"),\n );\n this.docsDir = path.join(this.baseDir, options.docsDirName ?? \"docs\");\n this.assetsDir = path.join(this.baseDir, options.assetsDirName ?? \"assets\");\n this.metaPath = path.join(this.baseDir, options.metaFileName ?? \"meta.json\");\n this.initPromise = this.ensureLayout();\n }\n\n async save(payload: StorageSavePayload): Promise<void> {\n await this.initPromise;\n switch (payload.type) {\n case \"doc-snapshot\":\n await this.writeDocSnapshot(payload.docId, payload.snapshot);\n return;\n case \"doc-update\":\n await this.enqueueDocUpdate(payload.docId, payload.update);\n return;\n case \"asset\":\n await this.writeAsset(payload.assetId, payload.data);\n return;\n case \"meta\":\n await writeFileAtomic(this.metaPath, payload.update);\n return;\n default:\n throw new Error(`Unsupported payload type: ${(payload as { type: string }).type}`);\n }\n }\n\n async deleteAsset(assetId: AssetId): Promise<void> {\n await this.initPromise;\n const filePath = this.assetPath(assetId);\n await removeIfExists(filePath);\n }\n\n async loadDoc(docId: string): Promise<LoroDoc | undefined> {\n await this.initPromise;\n const snapshotPath = this.docSnapshotPath(docId);\n const snapshotBytes = await readFileIfExists(snapshotPath);\n const updateDir = this.docUpdatesDir(docId);\n const updateFiles = await listFiles(updateDir);\n\n if (!snapshotBytes && updateFiles.length === 0) {\n return undefined;\n }\n\n const doc = snapshotBytes\n ? LoroDoc.fromSnapshot(snapshotBytes)\n : new LoroDoc();\n\n if (updateFiles.length === 0) {\n return doc;\n }\n\n const updatePaths = updateFiles.map((file) => path.join(updateDir, file));\n for (const updatePath of updatePaths) {\n const update = await readFileIfExists(updatePath);\n if (!update) continue;\n doc.import(update);\n }\n\n await Promise.all(updatePaths.map((filePath) => removeIfExists(filePath)));\n\n const consolidated = doc.export({ mode: \"snapshot\" });\n await this.writeDocSnapshot(docId, consolidated);\n return doc;\n }\n\n async loadMeta(): Promise<Flock | undefined> {\n await this.initPromise;\n const bytes = await readFileIfExists(this.metaPath);\n if (!bytes) return undefined;\n try {\n const bundle = JSON.parse(textDecoder.decode(bytes)) as ExportBundle;\n const flock = new Flock();\n flock.importJson(bundle);\n return flock;\n } catch (error) {\n throw new Error(\"Failed to hydrate metadata snapshot\", { cause: error });\n }\n }\n\n async loadAsset(assetId: AssetId): Promise<Uint8Array | undefined> {\n await this.initPromise;\n return readFileIfExists(this.assetPath(assetId));\n }\n\n private async ensureLayout(): Promise<void> {\n await Promise.all([\n ensureDir(this.baseDir),\n ensureDir(this.docsDir),\n ensureDir(this.assetsDir),\n ]);\n }\n\n private async writeDocSnapshot(\n docId: string,\n snapshot: Uint8Array,\n ): Promise<void> {\n const targetDir = this.docDir(docId);\n await ensureDir(targetDir);\n await writeFileAtomic(this.docSnapshotPath(docId), snapshot);\n }\n\n private async enqueueDocUpdate(\n docId: string,\n update: Uint8Array,\n ): Promise<void> {\n const dir = this.docUpdatesDir(docId);\n await ensureDir(dir);\n const counter = (this.updateCounter = (this.updateCounter + 1) % 1_000_000);\n const timestamp = Date.now().toString().padStart(13, \"0\");\n const fileName = `${timestamp}-${counter.toString().padStart(6, \"0\")}.bin`;\n const filePath = path.join(dir, fileName);\n await writeFileAtomic(filePath, update);\n }\n\n private async writeAsset(assetId: AssetId, data: Uint8Array): Promise<void> {\n const filePath = this.assetPath(assetId);\n await ensureDir(path.dirname(filePath));\n await writeFileAtomic(filePath, data);\n }\n\n private docDir(docId: string): string {\n return path.join(this.docsDir, encodeComponent(docId));\n }\n\n private docSnapshotPath(docId: string): string {\n return path.join(this.docDir(docId), \"snapshot.bin\");\n }\n\n private docUpdatesDir(docId: string): string {\n return path.join(this.docDir(docId), \"updates\");\n }\n\n private assetPath(assetId: AssetId): string {\n return path.join(this.assetsDir, encodeComponent(assetId));\n }\n}\n\nfunction encodeComponent(value: string): string {\n return Buffer.from(value, \"utf8\").toString(\"base64url\");\n}\n\nasync function ensureDir(dir: string): Promise<void> {\n await fs.mkdir(dir, { recursive: true });\n}\n\nasync function readFileIfExists(filePath: string): Promise<Uint8Array | undefined> {\n try {\n const data = await fs.readFile(filePath);\n return new Uint8Array(data.buffer, data.byteOffset, data.byteLength).slice();\n } catch (error) {\n if ((error as NodeJS.ErrnoException).code === \"ENOENT\") {\n return undefined;\n }\n throw error;\n }\n}\n\nasync function removeIfExists(filePath: string): Promise<void> {\n try {\n await fs.rm(filePath);\n } catch (error) {\n if ((error as NodeJS.ErrnoException).code === \"ENOENT\") {\n return;\n }\n throw error;\n }\n}\n\nasync function listFiles(dir: string): Promise<string[]> {\n try {\n const entries = await fs.readdir(dir);\n return entries.sort();\n } catch (error) {\n if ((error as NodeJS.ErrnoException).code === \"ENOENT\") {\n return [];\n }\n throw error;\n }\n}\n\nasync function writeFileAtomic(\n targetPath: string,\n data: Uint8Array,\n): Promise<void> {\n const dir = path.dirname(targetPath);\n await ensureDir(dir);\n const tempPath = path.join(dir, `.tmp-${randomUUID()}`);\n await fs.writeFile(tempPath, data);\n await fs.rename(tempPath, targetPath);\n}\n"],"mappings":";;;;;;;;;;;;;AASA,MAAM,cAAc,IAAI,aAAa;AAsBrC,IAAa,2BAAb,MAAgE;CAC9D,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAQ,gBAAgB;CAExB,YAAY,UAA2C,EAAE,EAAE;AACzD,OAAK,UAAUA,UAAK,QAClB,QAAQ,WAAWA,UAAK,KAAK,QAAQ,KAAK,EAAE,aAAa,CAC1D;AACD,OAAK,UAAUA,UAAK,KAAK,KAAK,SAAS,QAAQ,eAAe,OAAO;AACrE,OAAK,YAAYA,UAAK,KAAK,KAAK,SAAS,QAAQ,iBAAiB,SAAS;AAC3E,OAAK,WAAWA,UAAK,KAAK,KAAK,SAAS,QAAQ,gBAAgB,YAAY;AAC5E,OAAK,cAAc,KAAK,cAAc;;CAGxC,MAAM,KAAK,SAA4C;AACrD,QAAM,KAAK;AACX,UAAQ,QAAQ,MAAhB;GACE,KAAK;AACH,UAAM,KAAK,iBAAiB,QAAQ,OAAO,QAAQ,SAAS;AAC5D;GACF,KAAK;AACH,UAAM,KAAK,iBAAiB,QAAQ,OAAO,QAAQ,OAAO;AAC1D;GACF,KAAK;AACH,UAAM,KAAK,WAAW,QAAQ,SAAS,QAAQ,KAAK;AACpD;GACF,KAAK;AACH,UAAM,gBAAgB,KAAK,UAAU,QAAQ,OAAO;AACpD;GACF,QACE,OAAM,IAAI,MAAM,6BAA8B,QAA6B,OAAO;;;CAIxF,MAAM,YAAY,SAAiC;AACjD,QAAM,KAAK;AAEX,QAAM,eADW,KAAK,UAAU,QAAQ,CACV;;CAGhC,MAAM,QAAQ,OAA6C;AACzD,QAAM,KAAK;EAEX,MAAM,gBAAgB,MAAM,iBADP,KAAK,gBAAgB,MAAM,CACU;EAC1D,MAAM,YAAY,KAAK,cAAc,MAAM;EAC3C,MAAM,cAAc,MAAM,UAAU,UAAU;AAE9C,MAAI,CAAC,iBAAiB,YAAY,WAAW,EAC3C;EAGF,MAAM,MAAM,gBACRC,kBAAQ,aAAa,cAAc,GACnC,IAAIA,mBAAS;AAEjB,MAAI,YAAY,WAAW,EACzB,QAAO;EAGT,MAAM,cAAc,YAAY,KAAK,SAASD,UAAK,KAAK,WAAW,KAAK,CAAC;AACzE,OAAK,MAAM,cAAc,aAAa;GACpC,MAAM,SAAS,MAAM,iBAAiB,WAAW;AACjD,OAAI,CAAC,OAAQ;AACb,OAAI,OAAO,OAAO;;AAGpB,QAAM,QAAQ,IAAI,YAAY,KAAK,aAAa,eAAe,SAAS,CAAC,CAAC;EAE1E,MAAM,eAAe,IAAI,OAAO,EAAE,MAAM,YAAY,CAAC;AACrD,QAAM,KAAK,iBAAiB,OAAO,aAAa;AAChD,SAAO;;CAGT,MAAM,WAAuC;AAC3C,QAAM,KAAK;EACX,MAAM,QAAQ,MAAM,iBAAiB,KAAK,SAAS;AACnD,MAAI,CAAC,MAAO,QAAO;AACnB,MAAI;GACF,MAAM,SAAS,KAAK,MAAM,YAAY,OAAO,MAAM,CAAC;GACpD,MAAM,QAAQ,IAAIE,wBAAO;AACzB,SAAM,WAAW,OAAO;AACxB,UAAO;WACA,OAAO;AACd,SAAM,IAAI,MAAM,uCAAuC,EAAE,OAAO,OAAO,CAAC;;;CAI5E,MAAM,UAAU,SAAmD;AACjE,QAAM,KAAK;AACX,SAAO,iBAAiB,KAAK,UAAU,QAAQ,CAAC;;CAGlD,MAAc,eAA8B;AAC1C,QAAM,QAAQ,IAAI;GAChB,UAAU,KAAK,QAAQ;GACvB,UAAU,KAAK,QAAQ;GACvB,UAAU,KAAK,UAAU;GAC1B,CAAC;;CAGJ,MAAc,iBACZ,OACA,UACe;AAEf,QAAM,UADY,KAAK,OAAO,MAAM,CACV;AAC1B,QAAM,gBAAgB,KAAK,gBAAgB,MAAM,EAAE,SAAS;;CAG9D,MAAc,iBACZ,OACA,QACe;EACf,MAAM,MAAM,KAAK,cAAc,MAAM;AACrC,QAAM,UAAU,IAAI;EACpB,MAAM,UAAW,KAAK,iBAAiB,KAAK,gBAAgB,KAAK;EAEjE,MAAM,WAAW,GADC,KAAK,KAAK,CAAC,UAAU,CAAC,SAAS,IAAI,IAAI,CAC3B,GAAG,QAAQ,UAAU,CAAC,SAAS,GAAG,IAAI,CAAC;AAErE,QAAM,gBADWF,UAAK,KAAK,KAAK,SAAS,EACT,OAAO;;CAGzC,MAAc,WAAW,SAAkB,MAAiC;EAC1E,MAAM,WAAW,KAAK,UAAU,QAAQ;AACxC,QAAM,UAAUA,UAAK,QAAQ,SAAS,CAAC;AACvC,QAAM,gBAAgB,UAAU,KAAK;;CAGvC,AAAQ,OAAO,OAAuB;AACpC,SAAOA,UAAK,KAAK,KAAK,SAAS,gBAAgB,MAAM,CAAC;;CAGxD,AAAQ,gBAAgB,OAAuB;AAC7C,SAAOA,UAAK,KAAK,KAAK,OAAO,MAAM,EAAE,eAAe;;CAGtD,AAAQ,cAAc,OAAuB;AAC3C,SAAOA,UAAK,KAAK,KAAK,OAAO,MAAM,EAAE,UAAU;;CAGjD,AAAQ,UAAU,SAA0B;AAC1C,SAAOA,UAAK,KAAK,KAAK,WAAW,gBAAgB,QAAQ,CAAC;;;AAI9D,SAAS,gBAAgB,OAAuB;AAC9C,QAAO,OAAO,KAAK,OAAO,OAAO,CAAC,SAAS,YAAY;;AAGzD,eAAe,UAAU,KAA4B;AACnD,OAAMG,iBAAG,MAAM,KAAK,EAAE,WAAW,MAAM,CAAC;;AAG1C,eAAe,iBAAiB,UAAmD;AACjF,KAAI;EACF,MAAM,OAAO,MAAMA,iBAAG,SAAS,SAAS;AACxC,SAAO,IAAI,WAAW,KAAK,QAAQ,KAAK,YAAY,KAAK,WAAW,CAAC,OAAO;UACrE,OAAO;AACd,MAAK,MAAgC,SAAS,SAC5C;AAEF,QAAM;;;AAIV,eAAe,eAAe,UAAiC;AAC7D,KAAI;AACF,QAAMA,iBAAG,GAAG,SAAS;UACd,OAAO;AACd,MAAK,MAAgC,SAAS,SAC5C;AAEF,QAAM;;;AAIV,eAAe,UAAU,KAAgC;AACvD,KAAI;AAEF,UADgB,MAAMA,iBAAG,QAAQ,IAAI,EACtB,MAAM;UACd,OAAO;AACd,MAAK,MAAgC,SAAS,SAC5C,QAAO,EAAE;AAEX,QAAM;;;AAIV,eAAe,gBACb,YACA,MACe;CACf,MAAM,MAAMH,UAAK,QAAQ,WAAW;AACpC,OAAM,UAAU,IAAI;CACpB,MAAM,WAAWA,UAAK,KAAK,KAAK,qCAAoB,GAAG;AACvD,OAAMG,iBAAG,UAAU,UAAU,KAAK;AAClC,OAAMA,iBAAG,OAAO,UAAU,WAAW"}
|
|
1
|
+
{"version":3,"file":"filesystem.cjs","names":["path","LoroDoc","updates: Uint8Array[]","Flock","fs"],"sources":["../../src/storage/filesystem.ts"],"sourcesContent":["import { promises as fs } from \"node:fs\";\nimport * as path from \"node:path\";\nimport { randomUUID } from \"node:crypto\";\n\nimport { Flock, type ExportBundle } from \"@loro-dev/flock\";\nimport { LoroDoc } from \"loro-crdt\";\n\nimport type { AssetId, StorageAdapter, StorageSavePayload } from \"../types\";\n\nconst textDecoder = new TextDecoder();\n\nexport interface FileSystemStorageAdaptorOptions {\n /**\n * Base directory where metadata, document snapshots, and assets will be stored.\n * Defaults to `.loro-repo` inside the current working directory.\n */\n readonly baseDir?: string;\n /**\n * Subdirectory dedicated to document persistence. Defaults to `docs`.\n */\n readonly docsDirName?: string;\n /**\n * Subdirectory dedicated to asset blobs. Defaults to `assets`.\n */\n readonly assetsDirName?: string;\n /**\n * File name for the metadata snapshot bundle. Defaults to `meta.json`.\n */\n readonly metaFileName?: string;\n}\n\nexport class FileSystemStorageAdaptor implements StorageAdapter {\n private readonly baseDir: string;\n private readonly docsDir: string;\n private readonly assetsDir: string;\n private readonly metaDir: string;\n private readonly metaUpdatesDir: string;\n private readonly legacyMetaPath: string;\n private readonly initPromise: Promise<void>;\n private updateCounter = 0;\n private metaUpdateCounter = 0;\n\n constructor(options: FileSystemStorageAdaptorOptions = {}) {\n this.baseDir = path.resolve(\n options.baseDir ?? path.join(process.cwd(), \".loro-repo\"),\n );\n this.docsDir = path.join(this.baseDir, options.docsDirName ?? \"docs\");\n this.assetsDir = path.join(this.baseDir, options.assetsDirName ?? \"assets\");\n this.metaDir = path.join(this.baseDir, \"meta\");\n this.metaUpdatesDir = path.join(this.metaDir, \"updates\");\n this.legacyMetaPath = path.join(\n this.baseDir,\n options.metaFileName ?? \"meta.json\",\n );\n this.initPromise = this.ensureLayout();\n }\n\n async save(payload: StorageSavePayload): Promise<void> {\n await this.initPromise;\n switch (payload.type) {\n case \"doc-snapshot\":\n await this.writeDocSnapshot(payload.docId, payload.snapshot);\n return;\n case \"doc-update\":\n await this.enqueueDocUpdate(payload.docId, payload.update);\n return;\n case \"asset\":\n await this.writeAsset(payload.assetId, payload.data);\n return;\n case \"meta\":\n await this.enqueueMetaUpdate(payload.update);\n return;\n default:\n throw new Error(`Unsupported payload type: ${(payload as { type: string }).type}`);\n }\n }\n\n async deleteAsset(assetId: AssetId): Promise<void> {\n await this.initPromise;\n const filePath = this.assetPath(assetId);\n await removeIfExists(filePath);\n }\n\n async loadDoc(docId: string): Promise<LoroDoc | undefined> {\n await this.initPromise;\n const snapshotPath = this.docSnapshotPath(docId);\n const snapshotBytes = await readFileIfExists(snapshotPath);\n const updateDir = this.docUpdatesDir(docId);\n const updateFiles = await listFiles(updateDir);\n\n if (!snapshotBytes && updateFiles.length === 0) {\n return undefined;\n }\n\n const doc = snapshotBytes\n ? LoroDoc.fromSnapshot(snapshotBytes)\n : new LoroDoc();\n\n if (updateFiles.length === 0) {\n return doc;\n }\n\n const updatePaths = updateFiles.map((file) => path.join(updateDir, file));\n for (const updatePath of updatePaths) {\n const update = await readFileIfExists(updatePath);\n if (!update) continue;\n doc.import(update);\n }\n\n await Promise.all(updatePaths.map((filePath) => removeIfExists(filePath)));\n\n const consolidated = doc.export({ mode: \"snapshot\" });\n await this.writeDocSnapshot(docId, consolidated);\n return doc;\n }\n\n async deleteDoc(docId: string): Promise<void> {\n await this.initPromise;\n await removeIfExists(this.docSnapshotPath(docId));\n await removeDirIfExists(this.docUpdatesDir(docId));\n await removeDirIfExists(this.docDir(docId));\n }\n\n async loadMeta(): Promise<Flock | undefined> {\n await this.initPromise;\n const updateFiles = await listFiles(this.metaUpdatesDir);\n const updatePaths = updateFiles.map((file) => path.join(this.metaUpdatesDir, file));\n const updates: Uint8Array[] = [];\n\n for (const updatePath of updatePaths) {\n const bytes = await readFileIfExists(updatePath);\n if (bytes) {\n updates.push(bytes);\n }\n }\n\n const legacy = await readFileIfExists(this.legacyMetaPath);\n const totalUpdates = updates.length + (legacy ? 1 : 0);\n if (totalUpdates === 0) return undefined;\n\n const flock = new Flock();\n try {\n if (legacy) {\n flock.importJson(JSON.parse(textDecoder.decode(legacy)) as ExportBundle);\n }\n for (const bytes of updates) {\n flock.importJson(JSON.parse(textDecoder.decode(bytes)) as ExportBundle);\n }\n } catch (error) {\n throw new Error(\"Failed to hydrate metadata snapshot\", { cause: error });\n }\n\n if (totalUpdates > 1 || legacy) {\n const snapshot = flock.exportJson();\n const encoded = new TextEncoder().encode(JSON.stringify(snapshot));\n await this.compactMeta(encoded, updatePaths, Boolean(legacy));\n }\n\n return flock;\n }\n\n async loadAsset(assetId: AssetId): Promise<Uint8Array | undefined> {\n await this.initPromise;\n return readFileIfExists(this.assetPath(assetId));\n }\n\n private async ensureLayout(): Promise<void> {\n await Promise.all([\n ensureDir(this.baseDir),\n ensureDir(this.docsDir),\n ensureDir(this.assetsDir),\n ensureDir(this.metaUpdatesDir),\n ]);\n }\n\n private async writeDocSnapshot(\n docId: string,\n snapshot: Uint8Array,\n ): Promise<void> {\n const targetDir = this.docDir(docId);\n await ensureDir(targetDir);\n await writeFileAtomic(this.docSnapshotPath(docId), snapshot);\n }\n\n private async enqueueDocUpdate(\n docId: string,\n update: Uint8Array,\n ): Promise<void> {\n const dir = this.docUpdatesDir(docId);\n await ensureDir(dir);\n const counter = (this.updateCounter = (this.updateCounter + 1) % 1_000_000);\n const timestamp = Date.now().toString().padStart(13, \"0\");\n const fileName = `${timestamp}-${counter.toString().padStart(6, \"0\")}.bin`;\n const filePath = path.join(dir, fileName);\n await writeFileAtomic(filePath, update);\n }\n\n private async writeAsset(assetId: AssetId, data: Uint8Array): Promise<void> {\n const filePath = this.assetPath(assetId);\n await ensureDir(path.dirname(filePath));\n await writeFileAtomic(filePath, data);\n }\n\n private async enqueueMetaUpdate(update: Uint8Array): Promise<void> {\n const dir = this.metaUpdatesDir;\n await ensureDir(dir);\n const counter = (this.metaUpdateCounter = (this.metaUpdateCounter + 1) % 1_000_000);\n const timestamp = Date.now().toString().padStart(13, \"0\");\n const fileName = `${timestamp}-${counter.toString().padStart(6, \"0\")}.json`;\n const filePath = path.join(dir, fileName);\n await writeFileAtomic(filePath, update);\n }\n\n private async compactMeta(\n update: Uint8Array,\n previousUpdates: string[],\n hadLegacy: boolean,\n ): Promise<void> {\n const targetPath = path.join(this.metaUpdatesDir, \"000000-full.json\");\n await ensureDir(this.metaUpdatesDir);\n await writeFileAtomic(targetPath, update);\n\n const removals = hadLegacy ? [this.legacyMetaPath] : [];\n removals.push(\n ...previousUpdates.filter((filePath) => path.resolve(filePath) !== path.resolve(targetPath)),\n );\n await Promise.all(removals.map((filePath) => removeIfExists(filePath)));\n }\n\n private docDir(docId: string): string {\n return path.join(this.docsDir, encodeComponent(docId));\n }\n\n private docSnapshotPath(docId: string): string {\n return path.join(this.docDir(docId), \"snapshot.bin\");\n }\n\n private docUpdatesDir(docId: string): string {\n return path.join(this.docDir(docId), \"updates\");\n }\n\n private assetPath(assetId: AssetId): string {\n return path.join(this.assetsDir, encodeComponent(assetId));\n }\n}\n\nfunction encodeComponent(value: string): string {\n return Buffer.from(value, \"utf8\").toString(\"base64url\");\n}\n\nasync function ensureDir(dir: string): Promise<void> {\n await fs.mkdir(dir, { recursive: true });\n}\n\nasync function readFileIfExists(filePath: string): Promise<Uint8Array | undefined> {\n try {\n const data = await fs.readFile(filePath);\n return new Uint8Array(data.buffer, data.byteOffset, data.byteLength).slice();\n } catch (error) {\n if ((error as NodeJS.ErrnoException).code === \"ENOENT\") {\n return undefined;\n }\n throw error;\n }\n}\n\nasync function removeIfExists(filePath: string): Promise<void> {\n try {\n await fs.rm(filePath);\n } catch (error) {\n if ((error as NodeJS.ErrnoException).code === \"ENOENT\") {\n return;\n }\n throw error;\n }\n}\n\nasync function removeDirIfExists(dir: string): Promise<void> {\n try {\n await fs.rm(dir, { recursive: true, force: true });\n } catch (error) {\n if ((error as NodeJS.ErrnoException).code === \"ENOENT\") {\n return;\n }\n throw error;\n }\n}\n\nasync function listFiles(dir: string): Promise<string[]> {\n try {\n const entries = await fs.readdir(dir);\n return entries.sort();\n } catch (error) {\n if ((error as NodeJS.ErrnoException).code === \"ENOENT\") {\n return [];\n }\n throw error;\n }\n}\n\nasync function writeFileAtomic(\n targetPath: string,\n data: Uint8Array,\n): Promise<void> {\n const dir = path.dirname(targetPath);\n await ensureDir(dir);\n const tempPath = path.join(dir, `.tmp-${randomUUID()}`);\n await fs.writeFile(tempPath, data);\n await fs.rename(tempPath, targetPath);\n}\n"],"mappings":";;;;;;;;;;;;;AASA,MAAM,cAAc,IAAI,aAAa;AAsBrC,IAAa,2BAAb,MAAgE;CAC9D,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAQ,gBAAgB;CACxB,AAAQ,oBAAoB;CAE5B,YAAY,UAA2C,EAAE,EAAE;AACzD,OAAK,UAAUA,UAAK,QAClB,QAAQ,WAAWA,UAAK,KAAK,QAAQ,KAAK,EAAE,aAAa,CAC1D;AACD,OAAK,UAAUA,UAAK,KAAK,KAAK,SAAS,QAAQ,eAAe,OAAO;AACrE,OAAK,YAAYA,UAAK,KAAK,KAAK,SAAS,QAAQ,iBAAiB,SAAS;AAC3E,OAAK,UAAUA,UAAK,KAAK,KAAK,SAAS,OAAO;AAC9C,OAAK,iBAAiBA,UAAK,KAAK,KAAK,SAAS,UAAU;AACxD,OAAK,iBAAiBA,UAAK,KACzB,KAAK,SACL,QAAQ,gBAAgB,YACzB;AACD,OAAK,cAAc,KAAK,cAAc;;CAGxC,MAAM,KAAK,SAA4C;AACrD,QAAM,KAAK;AACX,UAAQ,QAAQ,MAAhB;GACE,KAAK;AACH,UAAM,KAAK,iBAAiB,QAAQ,OAAO,QAAQ,SAAS;AAC5D;GACF,KAAK;AACH,UAAM,KAAK,iBAAiB,QAAQ,OAAO,QAAQ,OAAO;AAC1D;GACF,KAAK;AACH,UAAM,KAAK,WAAW,QAAQ,SAAS,QAAQ,KAAK;AACpD;GACF,KAAK;AACH,UAAM,KAAK,kBAAkB,QAAQ,OAAO;AAC5C;GACF,QACE,OAAM,IAAI,MAAM,6BAA8B,QAA6B,OAAO;;;CAIxF,MAAM,YAAY,SAAiC;AACjD,QAAM,KAAK;AAEX,QAAM,eADW,KAAK,UAAU,QAAQ,CACV;;CAGhC,MAAM,QAAQ,OAA6C;AACzD,QAAM,KAAK;EAEX,MAAM,gBAAgB,MAAM,iBADP,KAAK,gBAAgB,MAAM,CACU;EAC1D,MAAM,YAAY,KAAK,cAAc,MAAM;EAC3C,MAAM,cAAc,MAAM,UAAU,UAAU;AAE9C,MAAI,CAAC,iBAAiB,YAAY,WAAW,EAC3C;EAGF,MAAM,MAAM,gBACRC,kBAAQ,aAAa,cAAc,GACnC,IAAIA,mBAAS;AAEjB,MAAI,YAAY,WAAW,EACzB,QAAO;EAGT,MAAM,cAAc,YAAY,KAAK,SAASD,UAAK,KAAK,WAAW,KAAK,CAAC;AACzE,OAAK,MAAM,cAAc,aAAa;GACpC,MAAM,SAAS,MAAM,iBAAiB,WAAW;AACjD,OAAI,CAAC,OAAQ;AACb,OAAI,OAAO,OAAO;;AAGpB,QAAM,QAAQ,IAAI,YAAY,KAAK,aAAa,eAAe,SAAS,CAAC,CAAC;EAE1E,MAAM,eAAe,IAAI,OAAO,EAAE,MAAM,YAAY,CAAC;AACrD,QAAM,KAAK,iBAAiB,OAAO,aAAa;AAChD,SAAO;;CAGT,MAAM,UAAU,OAA8B;AAC5C,QAAM,KAAK;AACX,QAAM,eAAe,KAAK,gBAAgB,MAAM,CAAC;AACjD,QAAM,kBAAkB,KAAK,cAAc,MAAM,CAAC;AAClD,QAAM,kBAAkB,KAAK,OAAO,MAAM,CAAC;;CAG7C,MAAM,WAAuC;AAC3C,QAAM,KAAK;EAEX,MAAM,eADc,MAAM,UAAU,KAAK,eAAe,EACxB,KAAK,SAASA,UAAK,KAAK,KAAK,gBAAgB,KAAK,CAAC;EACnF,MAAME,UAAwB,EAAE;AAEhC,OAAK,MAAM,cAAc,aAAa;GACpC,MAAM,QAAQ,MAAM,iBAAiB,WAAW;AAChD,OAAI,MACF,SAAQ,KAAK,MAAM;;EAIvB,MAAM,SAAS,MAAM,iBAAiB,KAAK,eAAe;EAC1D,MAAM,eAAe,QAAQ,UAAU,SAAS,IAAI;AACpD,MAAI,iBAAiB,EAAG,QAAO;EAE/B,MAAM,QAAQ,IAAIC,wBAAO;AACzB,MAAI;AACF,OAAI,OACF,OAAM,WAAW,KAAK,MAAM,YAAY,OAAO,OAAO,CAAC,CAAiB;AAE1E,QAAK,MAAM,SAAS,QAClB,OAAM,WAAW,KAAK,MAAM,YAAY,OAAO,MAAM,CAAC,CAAiB;WAElE,OAAO;AACd,SAAM,IAAI,MAAM,uCAAuC,EAAE,OAAO,OAAO,CAAC;;AAG1E,MAAI,eAAe,KAAK,QAAQ;GAC9B,MAAM,WAAW,MAAM,YAAY;GACnC,MAAM,UAAU,IAAI,aAAa,CAAC,OAAO,KAAK,UAAU,SAAS,CAAC;AAClE,SAAM,KAAK,YAAY,SAAS,aAAa,QAAQ,OAAO,CAAC;;AAG/D,SAAO;;CAGT,MAAM,UAAU,SAAmD;AACjE,QAAM,KAAK;AACX,SAAO,iBAAiB,KAAK,UAAU,QAAQ,CAAC;;CAGlD,MAAc,eAA8B;AAC1C,QAAM,QAAQ,IAAI;GAChB,UAAU,KAAK,QAAQ;GACvB,UAAU,KAAK,QAAQ;GACvB,UAAU,KAAK,UAAU;GACzB,UAAU,KAAK,eAAe;GAC/B,CAAC;;CAGJ,MAAc,iBACZ,OACA,UACe;AAEf,QAAM,UADY,KAAK,OAAO,MAAM,CACV;AAC1B,QAAM,gBAAgB,KAAK,gBAAgB,MAAM,EAAE,SAAS;;CAG9D,MAAc,iBACZ,OACA,QACe;EACf,MAAM,MAAM,KAAK,cAAc,MAAM;AACrC,QAAM,UAAU,IAAI;EACpB,MAAM,UAAW,KAAK,iBAAiB,KAAK,gBAAgB,KAAK;EAEjE,MAAM,WAAW,GADC,KAAK,KAAK,CAAC,UAAU,CAAC,SAAS,IAAI,IAAI,CAC3B,GAAG,QAAQ,UAAU,CAAC,SAAS,GAAG,IAAI,CAAC;AAErE,QAAM,gBADWH,UAAK,KAAK,KAAK,SAAS,EACT,OAAO;;CAGzC,MAAc,WAAW,SAAkB,MAAiC;EAC1E,MAAM,WAAW,KAAK,UAAU,QAAQ;AACxC,QAAM,UAAUA,UAAK,QAAQ,SAAS,CAAC;AACvC,QAAM,gBAAgB,UAAU,KAAK;;CAGvC,MAAc,kBAAkB,QAAmC;EACjE,MAAM,MAAM,KAAK;AACjB,QAAM,UAAU,IAAI;EACpB,MAAM,UAAW,KAAK,qBAAqB,KAAK,oBAAoB,KAAK;EAEzE,MAAM,WAAW,GADC,KAAK,KAAK,CAAC,UAAU,CAAC,SAAS,IAAI,IAAI,CAC3B,GAAG,QAAQ,UAAU,CAAC,SAAS,GAAG,IAAI,CAAC;AAErE,QAAM,gBADWA,UAAK,KAAK,KAAK,SAAS,EACT,OAAO;;CAGzC,MAAc,YACZ,QACA,iBACA,WACe;EACf,MAAM,aAAaA,UAAK,KAAK,KAAK,gBAAgB,mBAAmB;AACrE,QAAM,UAAU,KAAK,eAAe;AACpC,QAAM,gBAAgB,YAAY,OAAO;EAEzC,MAAM,WAAW,YAAY,CAAC,KAAK,eAAe,GAAG,EAAE;AACvD,WAAS,KACP,GAAG,gBAAgB,QAAQ,aAAaA,UAAK,QAAQ,SAAS,KAAKA,UAAK,QAAQ,WAAW,CAAC,CAC7F;AACD,QAAM,QAAQ,IAAI,SAAS,KAAK,aAAa,eAAe,SAAS,CAAC,CAAC;;CAGzE,AAAQ,OAAO,OAAuB;AACpC,SAAOA,UAAK,KAAK,KAAK,SAAS,gBAAgB,MAAM,CAAC;;CAGxD,AAAQ,gBAAgB,OAAuB;AAC7C,SAAOA,UAAK,KAAK,KAAK,OAAO,MAAM,EAAE,eAAe;;CAGtD,AAAQ,cAAc,OAAuB;AAC3C,SAAOA,UAAK,KAAK,KAAK,OAAO,MAAM,EAAE,UAAU;;CAGjD,AAAQ,UAAU,SAA0B;AAC1C,SAAOA,UAAK,KAAK,KAAK,WAAW,gBAAgB,QAAQ,CAAC;;;AAI9D,SAAS,gBAAgB,OAAuB;AAC9C,QAAO,OAAO,KAAK,OAAO,OAAO,CAAC,SAAS,YAAY;;AAGzD,eAAe,UAAU,KAA4B;AACnD,OAAMI,iBAAG,MAAM,KAAK,EAAE,WAAW,MAAM,CAAC;;AAG1C,eAAe,iBAAiB,UAAmD;AACjF,KAAI;EACF,MAAM,OAAO,MAAMA,iBAAG,SAAS,SAAS;AACxC,SAAO,IAAI,WAAW,KAAK,QAAQ,KAAK,YAAY,KAAK,WAAW,CAAC,OAAO;UACrE,OAAO;AACd,MAAK,MAAgC,SAAS,SAC5C;AAEF,QAAM;;;AAIV,eAAe,eAAe,UAAiC;AAC7D,KAAI;AACF,QAAMA,iBAAG,GAAG,SAAS;UACd,OAAO;AACd,MAAK,MAAgC,SAAS,SAC5C;AAEF,QAAM;;;AAIV,eAAe,kBAAkB,KAA4B;AAC3D,KAAI;AACF,QAAMA,iBAAG,GAAG,KAAK;GAAE,WAAW;GAAM,OAAO;GAAM,CAAC;UAC3C,OAAO;AACd,MAAK,MAAgC,SAAS,SAC5C;AAEF,QAAM;;;AAIV,eAAe,UAAU,KAAgC;AACvD,KAAI;AAEF,UADgB,MAAMA,iBAAG,QAAQ,IAAI,EACtB,MAAM;UACd,OAAO;AACd,MAAK,MAAgC,SAAS,SAC5C,QAAO,EAAE;AAEX,QAAM;;;AAIV,eAAe,gBACb,YACA,MACe;CACf,MAAM,MAAMJ,UAAK,QAAQ,WAAW;AACpC,OAAM,UAAU,IAAI;CACpB,MAAM,WAAWA,UAAK,KAAK,KAAK,qCAAoB,GAAG;AACvD,OAAMI,iBAAG,UAAU,UAAU,KAAK;AAClC,OAAMA,iBAAG,OAAO,UAAU,WAAW"}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { D as StorageSavePayload, E as StorageAdapter, r as AssetId } from "../types.cjs";
|
|
2
2
|
import { Flock } from "@loro-dev/flock";
|
|
3
3
|
import { LoroDoc } from "loro-crdt";
|
|
4
4
|
|
|
@@ -26,19 +26,25 @@ declare class FileSystemStorageAdaptor implements StorageAdapter {
|
|
|
26
26
|
private readonly baseDir;
|
|
27
27
|
private readonly docsDir;
|
|
28
28
|
private readonly assetsDir;
|
|
29
|
-
private readonly
|
|
29
|
+
private readonly metaDir;
|
|
30
|
+
private readonly metaUpdatesDir;
|
|
31
|
+
private readonly legacyMetaPath;
|
|
30
32
|
private readonly initPromise;
|
|
31
33
|
private updateCounter;
|
|
34
|
+
private metaUpdateCounter;
|
|
32
35
|
constructor(options?: FileSystemStorageAdaptorOptions);
|
|
33
36
|
save(payload: StorageSavePayload): Promise<void>;
|
|
34
37
|
deleteAsset(assetId: AssetId): Promise<void>;
|
|
35
38
|
loadDoc(docId: string): Promise<LoroDoc | undefined>;
|
|
39
|
+
deleteDoc(docId: string): Promise<void>;
|
|
36
40
|
loadMeta(): Promise<Flock | undefined>;
|
|
37
41
|
loadAsset(assetId: AssetId): Promise<Uint8Array | undefined>;
|
|
38
42
|
private ensureLayout;
|
|
39
43
|
private writeDocSnapshot;
|
|
40
44
|
private enqueueDocUpdate;
|
|
41
45
|
private writeAsset;
|
|
46
|
+
private enqueueMetaUpdate;
|
|
47
|
+
private compactMeta;
|
|
42
48
|
private docDir;
|
|
43
49
|
private docSnapshotPath;
|
|
44
50
|
private docUpdatesDir;
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { D as StorageSavePayload, E as StorageAdapter, r as AssetId } from "../types.js";
|
|
2
2
|
import { Flock } from "@loro-dev/flock";
|
|
3
3
|
import { LoroDoc } from "loro-crdt";
|
|
4
4
|
|
|
@@ -26,19 +26,25 @@ declare class FileSystemStorageAdaptor implements StorageAdapter {
|
|
|
26
26
|
private readonly baseDir;
|
|
27
27
|
private readonly docsDir;
|
|
28
28
|
private readonly assetsDir;
|
|
29
|
-
private readonly
|
|
29
|
+
private readonly metaDir;
|
|
30
|
+
private readonly metaUpdatesDir;
|
|
31
|
+
private readonly legacyMetaPath;
|
|
30
32
|
private readonly initPromise;
|
|
31
33
|
private updateCounter;
|
|
34
|
+
private metaUpdateCounter;
|
|
32
35
|
constructor(options?: FileSystemStorageAdaptorOptions);
|
|
33
36
|
save(payload: StorageSavePayload): Promise<void>;
|
|
34
37
|
deleteAsset(assetId: AssetId): Promise<void>;
|
|
35
38
|
loadDoc(docId: string): Promise<LoroDoc | undefined>;
|
|
39
|
+
deleteDoc(docId: string): Promise<void>;
|
|
36
40
|
loadMeta(): Promise<Flock | undefined>;
|
|
37
41
|
loadAsset(assetId: AssetId): Promise<Uint8Array | undefined>;
|
|
38
42
|
private ensureLayout;
|
|
39
43
|
private writeDocSnapshot;
|
|
40
44
|
private enqueueDocUpdate;
|
|
41
45
|
private writeAsset;
|
|
46
|
+
private enqueueMetaUpdate;
|
|
47
|
+
private compactMeta;
|
|
42
48
|
private docDir;
|
|
43
49
|
private docSnapshotPath;
|
|
44
50
|
private docUpdatesDir;
|
|
@@ -10,14 +10,19 @@ var FileSystemStorageAdaptor = class {
|
|
|
10
10
|
baseDir;
|
|
11
11
|
docsDir;
|
|
12
12
|
assetsDir;
|
|
13
|
-
|
|
13
|
+
metaDir;
|
|
14
|
+
metaUpdatesDir;
|
|
15
|
+
legacyMetaPath;
|
|
14
16
|
initPromise;
|
|
15
17
|
updateCounter = 0;
|
|
18
|
+
metaUpdateCounter = 0;
|
|
16
19
|
constructor(options = {}) {
|
|
17
20
|
this.baseDir = path.resolve(options.baseDir ?? path.join(process.cwd(), ".loro-repo"));
|
|
18
21
|
this.docsDir = path.join(this.baseDir, options.docsDirName ?? "docs");
|
|
19
22
|
this.assetsDir = path.join(this.baseDir, options.assetsDirName ?? "assets");
|
|
20
|
-
this.
|
|
23
|
+
this.metaDir = path.join(this.baseDir, "meta");
|
|
24
|
+
this.metaUpdatesDir = path.join(this.metaDir, "updates");
|
|
25
|
+
this.legacyMetaPath = path.join(this.baseDir, options.metaFileName ?? "meta.json");
|
|
21
26
|
this.initPromise = this.ensureLayout();
|
|
22
27
|
}
|
|
23
28
|
async save(payload) {
|
|
@@ -33,7 +38,7 @@ var FileSystemStorageAdaptor = class {
|
|
|
33
38
|
await this.writeAsset(payload.assetId, payload.data);
|
|
34
39
|
return;
|
|
35
40
|
case "meta":
|
|
36
|
-
await
|
|
41
|
+
await this.enqueueMetaUpdate(payload.update);
|
|
37
42
|
return;
|
|
38
43
|
default: throw new Error(`Unsupported payload type: ${payload.type}`);
|
|
39
44
|
}
|
|
@@ -61,18 +66,36 @@ var FileSystemStorageAdaptor = class {
|
|
|
61
66
|
await this.writeDocSnapshot(docId, consolidated);
|
|
62
67
|
return doc;
|
|
63
68
|
}
|
|
69
|
+
async deleteDoc(docId) {
|
|
70
|
+
await this.initPromise;
|
|
71
|
+
await removeIfExists(this.docSnapshotPath(docId));
|
|
72
|
+
await removeDirIfExists(this.docUpdatesDir(docId));
|
|
73
|
+
await removeDirIfExists(this.docDir(docId));
|
|
74
|
+
}
|
|
64
75
|
async loadMeta() {
|
|
65
76
|
await this.initPromise;
|
|
66
|
-
const
|
|
67
|
-
|
|
77
|
+
const updatePaths = (await listFiles(this.metaUpdatesDir)).map((file) => path.join(this.metaUpdatesDir, file));
|
|
78
|
+
const updates = [];
|
|
79
|
+
for (const updatePath of updatePaths) {
|
|
80
|
+
const bytes = await readFileIfExists(updatePath);
|
|
81
|
+
if (bytes) updates.push(bytes);
|
|
82
|
+
}
|
|
83
|
+
const legacy = await readFileIfExists(this.legacyMetaPath);
|
|
84
|
+
const totalUpdates = updates.length + (legacy ? 1 : 0);
|
|
85
|
+
if (totalUpdates === 0) return void 0;
|
|
86
|
+
const flock = new Flock();
|
|
68
87
|
try {
|
|
69
|
-
|
|
70
|
-
const
|
|
71
|
-
flock.importJson(bundle);
|
|
72
|
-
return flock;
|
|
88
|
+
if (legacy) flock.importJson(JSON.parse(textDecoder.decode(legacy)));
|
|
89
|
+
for (const bytes of updates) flock.importJson(JSON.parse(textDecoder.decode(bytes)));
|
|
73
90
|
} catch (error) {
|
|
74
91
|
throw new Error("Failed to hydrate metadata snapshot", { cause: error });
|
|
75
92
|
}
|
|
93
|
+
if (totalUpdates > 1 || legacy) {
|
|
94
|
+
const snapshot = flock.exportJson();
|
|
95
|
+
const encoded = new TextEncoder().encode(JSON.stringify(snapshot));
|
|
96
|
+
await this.compactMeta(encoded, updatePaths, Boolean(legacy));
|
|
97
|
+
}
|
|
98
|
+
return flock;
|
|
76
99
|
}
|
|
77
100
|
async loadAsset(assetId) {
|
|
78
101
|
await this.initPromise;
|
|
@@ -82,7 +105,8 @@ var FileSystemStorageAdaptor = class {
|
|
|
82
105
|
await Promise.all([
|
|
83
106
|
ensureDir(this.baseDir),
|
|
84
107
|
ensureDir(this.docsDir),
|
|
85
|
-
ensureDir(this.assetsDir)
|
|
108
|
+
ensureDir(this.assetsDir),
|
|
109
|
+
ensureDir(this.metaUpdatesDir)
|
|
86
110
|
]);
|
|
87
111
|
}
|
|
88
112
|
async writeDocSnapshot(docId, snapshot) {
|
|
@@ -101,6 +125,21 @@ var FileSystemStorageAdaptor = class {
|
|
|
101
125
|
await ensureDir(path.dirname(filePath));
|
|
102
126
|
await writeFileAtomic(filePath, data);
|
|
103
127
|
}
|
|
128
|
+
async enqueueMetaUpdate(update) {
|
|
129
|
+
const dir = this.metaUpdatesDir;
|
|
130
|
+
await ensureDir(dir);
|
|
131
|
+
const counter = this.metaUpdateCounter = (this.metaUpdateCounter + 1) % 1e6;
|
|
132
|
+
const fileName = `${Date.now().toString().padStart(13, "0")}-${counter.toString().padStart(6, "0")}.json`;
|
|
133
|
+
await writeFileAtomic(path.join(dir, fileName), update);
|
|
134
|
+
}
|
|
135
|
+
async compactMeta(update, previousUpdates, hadLegacy) {
|
|
136
|
+
const targetPath = path.join(this.metaUpdatesDir, "000000-full.json");
|
|
137
|
+
await ensureDir(this.metaUpdatesDir);
|
|
138
|
+
await writeFileAtomic(targetPath, update);
|
|
139
|
+
const removals = hadLegacy ? [this.legacyMetaPath] : [];
|
|
140
|
+
removals.push(...previousUpdates.filter((filePath) => path.resolve(filePath) !== path.resolve(targetPath)));
|
|
141
|
+
await Promise.all(removals.map((filePath) => removeIfExists(filePath)));
|
|
142
|
+
}
|
|
104
143
|
docDir(docId) {
|
|
105
144
|
return path.join(this.docsDir, encodeComponent(docId));
|
|
106
145
|
}
|
|
@@ -137,6 +176,17 @@ async function removeIfExists(filePath) {
|
|
|
137
176
|
throw error;
|
|
138
177
|
}
|
|
139
178
|
}
|
|
179
|
+
async function removeDirIfExists(dir) {
|
|
180
|
+
try {
|
|
181
|
+
await promises.rm(dir, {
|
|
182
|
+
recursive: true,
|
|
183
|
+
force: true
|
|
184
|
+
});
|
|
185
|
+
} catch (error) {
|
|
186
|
+
if (error.code === "ENOENT") return;
|
|
187
|
+
throw error;
|
|
188
|
+
}
|
|
189
|
+
}
|
|
140
190
|
async function listFiles(dir) {
|
|
141
191
|
try {
|
|
142
192
|
return (await promises.readdir(dir)).sort();
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"filesystem.js","names":["fs"],"sources":["../../src/storage/filesystem.ts"],"sourcesContent":["import { promises as fs } from \"node:fs\";\nimport * as path from \"node:path\";\nimport { randomUUID } from \"node:crypto\";\n\nimport { Flock, type ExportBundle } from \"@loro-dev/flock\";\nimport { LoroDoc } from \"loro-crdt\";\n\nimport type { AssetId, StorageAdapter, StorageSavePayload } from \"../types\";\n\nconst textDecoder = new TextDecoder();\n\nexport interface FileSystemStorageAdaptorOptions {\n /**\n * Base directory where metadata, document snapshots, and assets will be stored.\n * Defaults to `.loro-repo` inside the current working directory.\n */\n readonly baseDir?: string;\n /**\n * Subdirectory dedicated to document persistence. Defaults to `docs`.\n */\n readonly docsDirName?: string;\n /**\n * Subdirectory dedicated to asset blobs. Defaults to `assets`.\n */\n readonly assetsDirName?: string;\n /**\n * File name for the metadata snapshot bundle. Defaults to `meta.json`.\n */\n readonly metaFileName?: string;\n}\n\nexport class FileSystemStorageAdaptor implements StorageAdapter {\n private readonly baseDir: string;\n private readonly docsDir: string;\n private readonly assetsDir: string;\n private readonly metaPath: string;\n private readonly initPromise: Promise<void>;\n private updateCounter = 0;\n\n constructor(options: FileSystemStorageAdaptorOptions = {}) {\n this.baseDir = path.resolve(\n options.baseDir ?? path.join(process.cwd(), \".loro-repo\"),\n );\n this.docsDir = path.join(this.baseDir, options.docsDirName ?? \"docs\");\n this.assetsDir = path.join(this.baseDir, options.assetsDirName ?? \"assets\");\n this.metaPath = path.join(this.baseDir, options.metaFileName ?? \"meta.json\");\n this.initPromise = this.ensureLayout();\n }\n\n async save(payload: StorageSavePayload): Promise<void> {\n await this.initPromise;\n switch (payload.type) {\n case \"doc-snapshot\":\n await this.writeDocSnapshot(payload.docId, payload.snapshot);\n return;\n case \"doc-update\":\n await this.enqueueDocUpdate(payload.docId, payload.update);\n return;\n case \"asset\":\n await this.writeAsset(payload.assetId, payload.data);\n return;\n case \"meta\":\n await writeFileAtomic(this.metaPath, payload.update);\n return;\n default:\n throw new Error(`Unsupported payload type: ${(payload as { type: string }).type}`);\n }\n }\n\n async deleteAsset(assetId: AssetId): Promise<void> {\n await this.initPromise;\n const filePath = this.assetPath(assetId);\n await removeIfExists(filePath);\n }\n\n async loadDoc(docId: string): Promise<LoroDoc | undefined> {\n await this.initPromise;\n const snapshotPath = this.docSnapshotPath(docId);\n const snapshotBytes = await readFileIfExists(snapshotPath);\n const updateDir = this.docUpdatesDir(docId);\n const updateFiles = await listFiles(updateDir);\n\n if (!snapshotBytes && updateFiles.length === 0) {\n return undefined;\n }\n\n const doc = snapshotBytes\n ? LoroDoc.fromSnapshot(snapshotBytes)\n : new LoroDoc();\n\n if (updateFiles.length === 0) {\n return doc;\n }\n\n const updatePaths = updateFiles.map((file) => path.join(updateDir, file));\n for (const updatePath of updatePaths) {\n const update = await readFileIfExists(updatePath);\n if (!update) continue;\n doc.import(update);\n }\n\n await Promise.all(updatePaths.map((filePath) => removeIfExists(filePath)));\n\n const consolidated = doc.export({ mode: \"snapshot\" });\n await this.writeDocSnapshot(docId, consolidated);\n return doc;\n }\n\n async loadMeta(): Promise<Flock | undefined> {\n await this.initPromise;\n const bytes = await readFileIfExists(this.metaPath);\n if (!bytes) return undefined;\n try {\n const bundle = JSON.parse(textDecoder.decode(bytes)) as ExportBundle;\n const flock = new Flock();\n flock.importJson(bundle);\n return flock;\n } catch (error) {\n throw new Error(\"Failed to hydrate metadata snapshot\", { cause: error });\n }\n }\n\n async loadAsset(assetId: AssetId): Promise<Uint8Array | undefined> {\n await this.initPromise;\n return readFileIfExists(this.assetPath(assetId));\n }\n\n private async ensureLayout(): Promise<void> {\n await Promise.all([\n ensureDir(this.baseDir),\n ensureDir(this.docsDir),\n ensureDir(this.assetsDir),\n ]);\n }\n\n private async writeDocSnapshot(\n docId: string,\n snapshot: Uint8Array,\n ): Promise<void> {\n const targetDir = this.docDir(docId);\n await ensureDir(targetDir);\n await writeFileAtomic(this.docSnapshotPath(docId), snapshot);\n }\n\n private async enqueueDocUpdate(\n docId: string,\n update: Uint8Array,\n ): Promise<void> {\n const dir = this.docUpdatesDir(docId);\n await ensureDir(dir);\n const counter = (this.updateCounter = (this.updateCounter + 1) % 1_000_000);\n const timestamp = Date.now().toString().padStart(13, \"0\");\n const fileName = `${timestamp}-${counter.toString().padStart(6, \"0\")}.bin`;\n const filePath = path.join(dir, fileName);\n await writeFileAtomic(filePath, update);\n }\n\n private async writeAsset(assetId: AssetId, data: Uint8Array): Promise<void> {\n const filePath = this.assetPath(assetId);\n await ensureDir(path.dirname(filePath));\n await writeFileAtomic(filePath, data);\n }\n\n private docDir(docId: string): string {\n return path.join(this.docsDir, encodeComponent(docId));\n }\n\n private docSnapshotPath(docId: string): string {\n return path.join(this.docDir(docId), \"snapshot.bin\");\n }\n\n private docUpdatesDir(docId: string): string {\n return path.join(this.docDir(docId), \"updates\");\n }\n\n private assetPath(assetId: AssetId): string {\n return path.join(this.assetsDir, encodeComponent(assetId));\n }\n}\n\nfunction encodeComponent(value: string): string {\n return Buffer.from(value, \"utf8\").toString(\"base64url\");\n}\n\nasync function ensureDir(dir: string): Promise<void> {\n await fs.mkdir(dir, { recursive: true });\n}\n\nasync function readFileIfExists(filePath: string): Promise<Uint8Array | undefined> {\n try {\n const data = await fs.readFile(filePath);\n return new Uint8Array(data.buffer, data.byteOffset, data.byteLength).slice();\n } catch (error) {\n if ((error as NodeJS.ErrnoException).code === \"ENOENT\") {\n return undefined;\n }\n throw error;\n }\n}\n\nasync function removeIfExists(filePath: string): Promise<void> {\n try {\n await fs.rm(filePath);\n } catch (error) {\n if ((error as NodeJS.ErrnoException).code === \"ENOENT\") {\n return;\n }\n throw error;\n }\n}\n\nasync function listFiles(dir: string): Promise<string[]> {\n try {\n const entries = await fs.readdir(dir);\n return entries.sort();\n } catch (error) {\n if ((error as NodeJS.ErrnoException).code === \"ENOENT\") {\n return [];\n }\n throw error;\n }\n}\n\nasync function writeFileAtomic(\n targetPath: string,\n data: Uint8Array,\n): Promise<void> {\n const dir = path.dirname(targetPath);\n await ensureDir(dir);\n const tempPath = path.join(dir, `.tmp-${randomUUID()}`);\n await fs.writeFile(tempPath, data);\n await fs.rename(tempPath, targetPath);\n}\n"],"mappings":";;;;;;;AASA,MAAM,cAAc,IAAI,aAAa;AAsBrC,IAAa,2BAAb,MAAgE;CAC9D,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAQ,gBAAgB;CAExB,YAAY,UAA2C,EAAE,EAAE;AACzD,OAAK,UAAU,KAAK,QAClB,QAAQ,WAAW,KAAK,KAAK,QAAQ,KAAK,EAAE,aAAa,CAC1D;AACD,OAAK,UAAU,KAAK,KAAK,KAAK,SAAS,QAAQ,eAAe,OAAO;AACrE,OAAK,YAAY,KAAK,KAAK,KAAK,SAAS,QAAQ,iBAAiB,SAAS;AAC3E,OAAK,WAAW,KAAK,KAAK,KAAK,SAAS,QAAQ,gBAAgB,YAAY;AAC5E,OAAK,cAAc,KAAK,cAAc;;CAGxC,MAAM,KAAK,SAA4C;AACrD,QAAM,KAAK;AACX,UAAQ,QAAQ,MAAhB;GACE,KAAK;AACH,UAAM,KAAK,iBAAiB,QAAQ,OAAO,QAAQ,SAAS;AAC5D;GACF,KAAK;AACH,UAAM,KAAK,iBAAiB,QAAQ,OAAO,QAAQ,OAAO;AAC1D;GACF,KAAK;AACH,UAAM,KAAK,WAAW,QAAQ,SAAS,QAAQ,KAAK;AACpD;GACF,KAAK;AACH,UAAM,gBAAgB,KAAK,UAAU,QAAQ,OAAO;AACpD;GACF,QACE,OAAM,IAAI,MAAM,6BAA8B,QAA6B,OAAO;;;CAIxF,MAAM,YAAY,SAAiC;AACjD,QAAM,KAAK;AAEX,QAAM,eADW,KAAK,UAAU,QAAQ,CACV;;CAGhC,MAAM,QAAQ,OAA6C;AACzD,QAAM,KAAK;EAEX,MAAM,gBAAgB,MAAM,iBADP,KAAK,gBAAgB,MAAM,CACU;EAC1D,MAAM,YAAY,KAAK,cAAc,MAAM;EAC3C,MAAM,cAAc,MAAM,UAAU,UAAU;AAE9C,MAAI,CAAC,iBAAiB,YAAY,WAAW,EAC3C;EAGF,MAAM,MAAM,gBACR,QAAQ,aAAa,cAAc,GACnC,IAAI,SAAS;AAEjB,MAAI,YAAY,WAAW,EACzB,QAAO;EAGT,MAAM,cAAc,YAAY,KAAK,SAAS,KAAK,KAAK,WAAW,KAAK,CAAC;AACzE,OAAK,MAAM,cAAc,aAAa;GACpC,MAAM,SAAS,MAAM,iBAAiB,WAAW;AACjD,OAAI,CAAC,OAAQ;AACb,OAAI,OAAO,OAAO;;AAGpB,QAAM,QAAQ,IAAI,YAAY,KAAK,aAAa,eAAe,SAAS,CAAC,CAAC;EAE1E,MAAM,eAAe,IAAI,OAAO,EAAE,MAAM,YAAY,CAAC;AACrD,QAAM,KAAK,iBAAiB,OAAO,aAAa;AAChD,SAAO;;CAGT,MAAM,WAAuC;AAC3C,QAAM,KAAK;EACX,MAAM,QAAQ,MAAM,iBAAiB,KAAK,SAAS;AACnD,MAAI,CAAC,MAAO,QAAO;AACnB,MAAI;GACF,MAAM,SAAS,KAAK,MAAM,YAAY,OAAO,MAAM,CAAC;GACpD,MAAM,QAAQ,IAAI,OAAO;AACzB,SAAM,WAAW,OAAO;AACxB,UAAO;WACA,OAAO;AACd,SAAM,IAAI,MAAM,uCAAuC,EAAE,OAAO,OAAO,CAAC;;;CAI5E,MAAM,UAAU,SAAmD;AACjE,QAAM,KAAK;AACX,SAAO,iBAAiB,KAAK,UAAU,QAAQ,CAAC;;CAGlD,MAAc,eAA8B;AAC1C,QAAM,QAAQ,IAAI;GAChB,UAAU,KAAK,QAAQ;GACvB,UAAU,KAAK,QAAQ;GACvB,UAAU,KAAK,UAAU;GAC1B,CAAC;;CAGJ,MAAc,iBACZ,OACA,UACe;AAEf,QAAM,UADY,KAAK,OAAO,MAAM,CACV;AAC1B,QAAM,gBAAgB,KAAK,gBAAgB,MAAM,EAAE,SAAS;;CAG9D,MAAc,iBACZ,OACA,QACe;EACf,MAAM,MAAM,KAAK,cAAc,MAAM;AACrC,QAAM,UAAU,IAAI;EACpB,MAAM,UAAW,KAAK,iBAAiB,KAAK,gBAAgB,KAAK;EAEjE,MAAM,WAAW,GADC,KAAK,KAAK,CAAC,UAAU,CAAC,SAAS,IAAI,IAAI,CAC3B,GAAG,QAAQ,UAAU,CAAC,SAAS,GAAG,IAAI,CAAC;AAErE,QAAM,gBADW,KAAK,KAAK,KAAK,SAAS,EACT,OAAO;;CAGzC,MAAc,WAAW,SAAkB,MAAiC;EAC1E,MAAM,WAAW,KAAK,UAAU,QAAQ;AACxC,QAAM,UAAU,KAAK,QAAQ,SAAS,CAAC;AACvC,QAAM,gBAAgB,UAAU,KAAK;;CAGvC,AAAQ,OAAO,OAAuB;AACpC,SAAO,KAAK,KAAK,KAAK,SAAS,gBAAgB,MAAM,CAAC;;CAGxD,AAAQ,gBAAgB,OAAuB;AAC7C,SAAO,KAAK,KAAK,KAAK,OAAO,MAAM,EAAE,eAAe;;CAGtD,AAAQ,cAAc,OAAuB;AAC3C,SAAO,KAAK,KAAK,KAAK,OAAO,MAAM,EAAE,UAAU;;CAGjD,AAAQ,UAAU,SAA0B;AAC1C,SAAO,KAAK,KAAK,KAAK,WAAW,gBAAgB,QAAQ,CAAC;;;AAI9D,SAAS,gBAAgB,OAAuB;AAC9C,QAAO,OAAO,KAAK,OAAO,OAAO,CAAC,SAAS,YAAY;;AAGzD,eAAe,UAAU,KAA4B;AACnD,OAAMA,SAAG,MAAM,KAAK,EAAE,WAAW,MAAM,CAAC;;AAG1C,eAAe,iBAAiB,UAAmD;AACjF,KAAI;EACF,MAAM,OAAO,MAAMA,SAAG,SAAS,SAAS;AACxC,SAAO,IAAI,WAAW,KAAK,QAAQ,KAAK,YAAY,KAAK,WAAW,CAAC,OAAO;UACrE,OAAO;AACd,MAAK,MAAgC,SAAS,SAC5C;AAEF,QAAM;;;AAIV,eAAe,eAAe,UAAiC;AAC7D,KAAI;AACF,QAAMA,SAAG,GAAG,SAAS;UACd,OAAO;AACd,MAAK,MAAgC,SAAS,SAC5C;AAEF,QAAM;;;AAIV,eAAe,UAAU,KAAgC;AACvD,KAAI;AAEF,UADgB,MAAMA,SAAG,QAAQ,IAAI,EACtB,MAAM;UACd,OAAO;AACd,MAAK,MAAgC,SAAS,SAC5C,QAAO,EAAE;AAEX,QAAM;;;AAIV,eAAe,gBACb,YACA,MACe;CACf,MAAM,MAAM,KAAK,QAAQ,WAAW;AACpC,OAAM,UAAU,IAAI;CACpB,MAAM,WAAW,KAAK,KAAK,KAAK,QAAQ,YAAY,GAAG;AACvD,OAAMA,SAAG,UAAU,UAAU,KAAK;AAClC,OAAMA,SAAG,OAAO,UAAU,WAAW"}
|
|
1
|
+
{"version":3,"file":"filesystem.js","names":["updates: Uint8Array[]","fs"],"sources":["../../src/storage/filesystem.ts"],"sourcesContent":["import { promises as fs } from \"node:fs\";\nimport * as path from \"node:path\";\nimport { randomUUID } from \"node:crypto\";\n\nimport { Flock, type ExportBundle } from \"@loro-dev/flock\";\nimport { LoroDoc } from \"loro-crdt\";\n\nimport type { AssetId, StorageAdapter, StorageSavePayload } from \"../types\";\n\nconst textDecoder = new TextDecoder();\n\nexport interface FileSystemStorageAdaptorOptions {\n /**\n * Base directory where metadata, document snapshots, and assets will be stored.\n * Defaults to `.loro-repo` inside the current working directory.\n */\n readonly baseDir?: string;\n /**\n * Subdirectory dedicated to document persistence. Defaults to `docs`.\n */\n readonly docsDirName?: string;\n /**\n * Subdirectory dedicated to asset blobs. Defaults to `assets`.\n */\n readonly assetsDirName?: string;\n /**\n * File name for the metadata snapshot bundle. Defaults to `meta.json`.\n */\n readonly metaFileName?: string;\n}\n\nexport class FileSystemStorageAdaptor implements StorageAdapter {\n private readonly baseDir: string;\n private readonly docsDir: string;\n private readonly assetsDir: string;\n private readonly metaDir: string;\n private readonly metaUpdatesDir: string;\n private readonly legacyMetaPath: string;\n private readonly initPromise: Promise<void>;\n private updateCounter = 0;\n private metaUpdateCounter = 0;\n\n constructor(options: FileSystemStorageAdaptorOptions = {}) {\n this.baseDir = path.resolve(\n options.baseDir ?? path.join(process.cwd(), \".loro-repo\"),\n );\n this.docsDir = path.join(this.baseDir, options.docsDirName ?? \"docs\");\n this.assetsDir = path.join(this.baseDir, options.assetsDirName ?? \"assets\");\n this.metaDir = path.join(this.baseDir, \"meta\");\n this.metaUpdatesDir = path.join(this.metaDir, \"updates\");\n this.legacyMetaPath = path.join(\n this.baseDir,\n options.metaFileName ?? \"meta.json\",\n );\n this.initPromise = this.ensureLayout();\n }\n\n async save(payload: StorageSavePayload): Promise<void> {\n await this.initPromise;\n switch (payload.type) {\n case \"doc-snapshot\":\n await this.writeDocSnapshot(payload.docId, payload.snapshot);\n return;\n case \"doc-update\":\n await this.enqueueDocUpdate(payload.docId, payload.update);\n return;\n case \"asset\":\n await this.writeAsset(payload.assetId, payload.data);\n return;\n case \"meta\":\n await this.enqueueMetaUpdate(payload.update);\n return;\n default:\n throw new Error(`Unsupported payload type: ${(payload as { type: string }).type}`);\n }\n }\n\n async deleteAsset(assetId: AssetId): Promise<void> {\n await this.initPromise;\n const filePath = this.assetPath(assetId);\n await removeIfExists(filePath);\n }\n\n async loadDoc(docId: string): Promise<LoroDoc | undefined> {\n await this.initPromise;\n const snapshotPath = this.docSnapshotPath(docId);\n const snapshotBytes = await readFileIfExists(snapshotPath);\n const updateDir = this.docUpdatesDir(docId);\n const updateFiles = await listFiles(updateDir);\n\n if (!snapshotBytes && updateFiles.length === 0) {\n return undefined;\n }\n\n const doc = snapshotBytes\n ? LoroDoc.fromSnapshot(snapshotBytes)\n : new LoroDoc();\n\n if (updateFiles.length === 0) {\n return doc;\n }\n\n const updatePaths = updateFiles.map((file) => path.join(updateDir, file));\n for (const updatePath of updatePaths) {\n const update = await readFileIfExists(updatePath);\n if (!update) continue;\n doc.import(update);\n }\n\n await Promise.all(updatePaths.map((filePath) => removeIfExists(filePath)));\n\n const consolidated = doc.export({ mode: \"snapshot\" });\n await this.writeDocSnapshot(docId, consolidated);\n return doc;\n }\n\n async deleteDoc(docId: string): Promise<void> {\n await this.initPromise;\n await removeIfExists(this.docSnapshotPath(docId));\n await removeDirIfExists(this.docUpdatesDir(docId));\n await removeDirIfExists(this.docDir(docId));\n }\n\n async loadMeta(): Promise<Flock | undefined> {\n await this.initPromise;\n const updateFiles = await listFiles(this.metaUpdatesDir);\n const updatePaths = updateFiles.map((file) => path.join(this.metaUpdatesDir, file));\n const updates: Uint8Array[] = [];\n\n for (const updatePath of updatePaths) {\n const bytes = await readFileIfExists(updatePath);\n if (bytes) {\n updates.push(bytes);\n }\n }\n\n const legacy = await readFileIfExists(this.legacyMetaPath);\n const totalUpdates = updates.length + (legacy ? 1 : 0);\n if (totalUpdates === 0) return undefined;\n\n const flock = new Flock();\n try {\n if (legacy) {\n flock.importJson(JSON.parse(textDecoder.decode(legacy)) as ExportBundle);\n }\n for (const bytes of updates) {\n flock.importJson(JSON.parse(textDecoder.decode(bytes)) as ExportBundle);\n }\n } catch (error) {\n throw new Error(\"Failed to hydrate metadata snapshot\", { cause: error });\n }\n\n if (totalUpdates > 1 || legacy) {\n const snapshot = flock.exportJson();\n const encoded = new TextEncoder().encode(JSON.stringify(snapshot));\n await this.compactMeta(encoded, updatePaths, Boolean(legacy));\n }\n\n return flock;\n }\n\n async loadAsset(assetId: AssetId): Promise<Uint8Array | undefined> {\n await this.initPromise;\n return readFileIfExists(this.assetPath(assetId));\n }\n\n private async ensureLayout(): Promise<void> {\n await Promise.all([\n ensureDir(this.baseDir),\n ensureDir(this.docsDir),\n ensureDir(this.assetsDir),\n ensureDir(this.metaUpdatesDir),\n ]);\n }\n\n private async writeDocSnapshot(\n docId: string,\n snapshot: Uint8Array,\n ): Promise<void> {\n const targetDir = this.docDir(docId);\n await ensureDir(targetDir);\n await writeFileAtomic(this.docSnapshotPath(docId), snapshot);\n }\n\n private async enqueueDocUpdate(\n docId: string,\n update: Uint8Array,\n ): Promise<void> {\n const dir = this.docUpdatesDir(docId);\n await ensureDir(dir);\n const counter = (this.updateCounter = (this.updateCounter + 1) % 1_000_000);\n const timestamp = Date.now().toString().padStart(13, \"0\");\n const fileName = `${timestamp}-${counter.toString().padStart(6, \"0\")}.bin`;\n const filePath = path.join(dir, fileName);\n await writeFileAtomic(filePath, update);\n }\n\n private async writeAsset(assetId: AssetId, data: Uint8Array): Promise<void> {\n const filePath = this.assetPath(assetId);\n await ensureDir(path.dirname(filePath));\n await writeFileAtomic(filePath, data);\n }\n\n private async enqueueMetaUpdate(update: Uint8Array): Promise<void> {\n const dir = this.metaUpdatesDir;\n await ensureDir(dir);\n const counter = (this.metaUpdateCounter = (this.metaUpdateCounter + 1) % 1_000_000);\n const timestamp = Date.now().toString().padStart(13, \"0\");\n const fileName = `${timestamp}-${counter.toString().padStart(6, \"0\")}.json`;\n const filePath = path.join(dir, fileName);\n await writeFileAtomic(filePath, update);\n }\n\n private async compactMeta(\n update: Uint8Array,\n previousUpdates: string[],\n hadLegacy: boolean,\n ): Promise<void> {\n const targetPath = path.join(this.metaUpdatesDir, \"000000-full.json\");\n await ensureDir(this.metaUpdatesDir);\n await writeFileAtomic(targetPath, update);\n\n const removals = hadLegacy ? [this.legacyMetaPath] : [];\n removals.push(\n ...previousUpdates.filter((filePath) => path.resolve(filePath) !== path.resolve(targetPath)),\n );\n await Promise.all(removals.map((filePath) => removeIfExists(filePath)));\n }\n\n private docDir(docId: string): string {\n return path.join(this.docsDir, encodeComponent(docId));\n }\n\n private docSnapshotPath(docId: string): string {\n return path.join(this.docDir(docId), \"snapshot.bin\");\n }\n\n private docUpdatesDir(docId: string): string {\n return path.join(this.docDir(docId), \"updates\");\n }\n\n private assetPath(assetId: AssetId): string {\n return path.join(this.assetsDir, encodeComponent(assetId));\n }\n}\n\nfunction encodeComponent(value: string): string {\n return Buffer.from(value, \"utf8\").toString(\"base64url\");\n}\n\nasync function ensureDir(dir: string): Promise<void> {\n await fs.mkdir(dir, { recursive: true });\n}\n\nasync function readFileIfExists(filePath: string): Promise<Uint8Array | undefined> {\n try {\n const data = await fs.readFile(filePath);\n return new Uint8Array(data.buffer, data.byteOffset, data.byteLength).slice();\n } catch (error) {\n if ((error as NodeJS.ErrnoException).code === \"ENOENT\") {\n return undefined;\n }\n throw error;\n }\n}\n\nasync function removeIfExists(filePath: string): Promise<void> {\n try {\n await fs.rm(filePath);\n } catch (error) {\n if ((error as NodeJS.ErrnoException).code === \"ENOENT\") {\n return;\n }\n throw error;\n }\n}\n\nasync function removeDirIfExists(dir: string): Promise<void> {\n try {\n await fs.rm(dir, { recursive: true, force: true });\n } catch (error) {\n if ((error as NodeJS.ErrnoException).code === \"ENOENT\") {\n return;\n }\n throw error;\n }\n}\n\nasync function listFiles(dir: string): Promise<string[]> {\n try {\n const entries = await fs.readdir(dir);\n return entries.sort();\n } catch (error) {\n if ((error as NodeJS.ErrnoException).code === \"ENOENT\") {\n return [];\n }\n throw error;\n }\n}\n\nasync function writeFileAtomic(\n targetPath: string,\n data: Uint8Array,\n): Promise<void> {\n const dir = path.dirname(targetPath);\n await ensureDir(dir);\n const tempPath = path.join(dir, `.tmp-${randomUUID()}`);\n await fs.writeFile(tempPath, data);\n await fs.rename(tempPath, targetPath);\n}\n"],"mappings":";;;;;;;AASA,MAAM,cAAc,IAAI,aAAa;AAsBrC,IAAa,2BAAb,MAAgE;CAC9D,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAQ,gBAAgB;CACxB,AAAQ,oBAAoB;CAE5B,YAAY,UAA2C,EAAE,EAAE;AACzD,OAAK,UAAU,KAAK,QAClB,QAAQ,WAAW,KAAK,KAAK,QAAQ,KAAK,EAAE,aAAa,CAC1D;AACD,OAAK,UAAU,KAAK,KAAK,KAAK,SAAS,QAAQ,eAAe,OAAO;AACrE,OAAK,YAAY,KAAK,KAAK,KAAK,SAAS,QAAQ,iBAAiB,SAAS;AAC3E,OAAK,UAAU,KAAK,KAAK,KAAK,SAAS,OAAO;AAC9C,OAAK,iBAAiB,KAAK,KAAK,KAAK,SAAS,UAAU;AACxD,OAAK,iBAAiB,KAAK,KACzB,KAAK,SACL,QAAQ,gBAAgB,YACzB;AACD,OAAK,cAAc,KAAK,cAAc;;CAGxC,MAAM,KAAK,SAA4C;AACrD,QAAM,KAAK;AACX,UAAQ,QAAQ,MAAhB;GACE,KAAK;AACH,UAAM,KAAK,iBAAiB,QAAQ,OAAO,QAAQ,SAAS;AAC5D;GACF,KAAK;AACH,UAAM,KAAK,iBAAiB,QAAQ,OAAO,QAAQ,OAAO;AAC1D;GACF,KAAK;AACH,UAAM,KAAK,WAAW,QAAQ,SAAS,QAAQ,KAAK;AACpD;GACF,KAAK;AACH,UAAM,KAAK,kBAAkB,QAAQ,OAAO;AAC5C;GACF,QACE,OAAM,IAAI,MAAM,6BAA8B,QAA6B,OAAO;;;CAIxF,MAAM,YAAY,SAAiC;AACjD,QAAM,KAAK;AAEX,QAAM,eADW,KAAK,UAAU,QAAQ,CACV;;CAGhC,MAAM,QAAQ,OAA6C;AACzD,QAAM,KAAK;EAEX,MAAM,gBAAgB,MAAM,iBADP,KAAK,gBAAgB,MAAM,CACU;EAC1D,MAAM,YAAY,KAAK,cAAc,MAAM;EAC3C,MAAM,cAAc,MAAM,UAAU,UAAU;AAE9C,MAAI,CAAC,iBAAiB,YAAY,WAAW,EAC3C;EAGF,MAAM,MAAM,gBACR,QAAQ,aAAa,cAAc,GACnC,IAAI,SAAS;AAEjB,MAAI,YAAY,WAAW,EACzB,QAAO;EAGT,MAAM,cAAc,YAAY,KAAK,SAAS,KAAK,KAAK,WAAW,KAAK,CAAC;AACzE,OAAK,MAAM,cAAc,aAAa;GACpC,MAAM,SAAS,MAAM,iBAAiB,WAAW;AACjD,OAAI,CAAC,OAAQ;AACb,OAAI,OAAO,OAAO;;AAGpB,QAAM,QAAQ,IAAI,YAAY,KAAK,aAAa,eAAe,SAAS,CAAC,CAAC;EAE1E,MAAM,eAAe,IAAI,OAAO,EAAE,MAAM,YAAY,CAAC;AACrD,QAAM,KAAK,iBAAiB,OAAO,aAAa;AAChD,SAAO;;CAGT,MAAM,UAAU,OAA8B;AAC5C,QAAM,KAAK;AACX,QAAM,eAAe,KAAK,gBAAgB,MAAM,CAAC;AACjD,QAAM,kBAAkB,KAAK,cAAc,MAAM,CAAC;AAClD,QAAM,kBAAkB,KAAK,OAAO,MAAM,CAAC;;CAG7C,MAAM,WAAuC;AAC3C,QAAM,KAAK;EAEX,MAAM,eADc,MAAM,UAAU,KAAK,eAAe,EACxB,KAAK,SAAS,KAAK,KAAK,KAAK,gBAAgB,KAAK,CAAC;EACnF,MAAMA,UAAwB,EAAE;AAEhC,OAAK,MAAM,cAAc,aAAa;GACpC,MAAM,QAAQ,MAAM,iBAAiB,WAAW;AAChD,OAAI,MACF,SAAQ,KAAK,MAAM;;EAIvB,MAAM,SAAS,MAAM,iBAAiB,KAAK,eAAe;EAC1D,MAAM,eAAe,QAAQ,UAAU,SAAS,IAAI;AACpD,MAAI,iBAAiB,EAAG,QAAO;EAE/B,MAAM,QAAQ,IAAI,OAAO;AACzB,MAAI;AACF,OAAI,OACF,OAAM,WAAW,KAAK,MAAM,YAAY,OAAO,OAAO,CAAC,CAAiB;AAE1E,QAAK,MAAM,SAAS,QAClB,OAAM,WAAW,KAAK,MAAM,YAAY,OAAO,MAAM,CAAC,CAAiB;WAElE,OAAO;AACd,SAAM,IAAI,MAAM,uCAAuC,EAAE,OAAO,OAAO,CAAC;;AAG1E,MAAI,eAAe,KAAK,QAAQ;GAC9B,MAAM,WAAW,MAAM,YAAY;GACnC,MAAM,UAAU,IAAI,aAAa,CAAC,OAAO,KAAK,UAAU,SAAS,CAAC;AAClE,SAAM,KAAK,YAAY,SAAS,aAAa,QAAQ,OAAO,CAAC;;AAG/D,SAAO;;CAGT,MAAM,UAAU,SAAmD;AACjE,QAAM,KAAK;AACX,SAAO,iBAAiB,KAAK,UAAU,QAAQ,CAAC;;CAGlD,MAAc,eAA8B;AAC1C,QAAM,QAAQ,IAAI;GAChB,UAAU,KAAK,QAAQ;GACvB,UAAU,KAAK,QAAQ;GACvB,UAAU,KAAK,UAAU;GACzB,UAAU,KAAK,eAAe;GAC/B,CAAC;;CAGJ,MAAc,iBACZ,OACA,UACe;AAEf,QAAM,UADY,KAAK,OAAO,MAAM,CACV;AAC1B,QAAM,gBAAgB,KAAK,gBAAgB,MAAM,EAAE,SAAS;;CAG9D,MAAc,iBACZ,OACA,QACe;EACf,MAAM,MAAM,KAAK,cAAc,MAAM;AACrC,QAAM,UAAU,IAAI;EACpB,MAAM,UAAW,KAAK,iBAAiB,KAAK,gBAAgB,KAAK;EAEjE,MAAM,WAAW,GADC,KAAK,KAAK,CAAC,UAAU,CAAC,SAAS,IAAI,IAAI,CAC3B,GAAG,QAAQ,UAAU,CAAC,SAAS,GAAG,IAAI,CAAC;AAErE,QAAM,gBADW,KAAK,KAAK,KAAK,SAAS,EACT,OAAO;;CAGzC,MAAc,WAAW,SAAkB,MAAiC;EAC1E,MAAM,WAAW,KAAK,UAAU,QAAQ;AACxC,QAAM,UAAU,KAAK,QAAQ,SAAS,CAAC;AACvC,QAAM,gBAAgB,UAAU,KAAK;;CAGvC,MAAc,kBAAkB,QAAmC;EACjE,MAAM,MAAM,KAAK;AACjB,QAAM,UAAU,IAAI;EACpB,MAAM,UAAW,KAAK,qBAAqB,KAAK,oBAAoB,KAAK;EAEzE,MAAM,WAAW,GADC,KAAK,KAAK,CAAC,UAAU,CAAC,SAAS,IAAI,IAAI,CAC3B,GAAG,QAAQ,UAAU,CAAC,SAAS,GAAG,IAAI,CAAC;AAErE,QAAM,gBADW,KAAK,KAAK,KAAK,SAAS,EACT,OAAO;;CAGzC,MAAc,YACZ,QACA,iBACA,WACe;EACf,MAAM,aAAa,KAAK,KAAK,KAAK,gBAAgB,mBAAmB;AACrE,QAAM,UAAU,KAAK,eAAe;AACpC,QAAM,gBAAgB,YAAY,OAAO;EAEzC,MAAM,WAAW,YAAY,CAAC,KAAK,eAAe,GAAG,EAAE;AACvD,WAAS,KACP,GAAG,gBAAgB,QAAQ,aAAa,KAAK,QAAQ,SAAS,KAAK,KAAK,QAAQ,WAAW,CAAC,CAC7F;AACD,QAAM,QAAQ,IAAI,SAAS,KAAK,aAAa,eAAe,SAAS,CAAC,CAAC;;CAGzE,AAAQ,OAAO,OAAuB;AACpC,SAAO,KAAK,KAAK,KAAK,SAAS,gBAAgB,MAAM,CAAC;;CAGxD,AAAQ,gBAAgB,OAAuB;AAC7C,SAAO,KAAK,KAAK,KAAK,OAAO,MAAM,EAAE,eAAe;;CAGtD,AAAQ,cAAc,OAAuB;AAC3C,SAAO,KAAK,KAAK,KAAK,OAAO,MAAM,EAAE,UAAU;;CAGjD,AAAQ,UAAU,SAA0B;AAC1C,SAAO,KAAK,KAAK,KAAK,WAAW,gBAAgB,QAAQ,CAAC;;;AAI9D,SAAS,gBAAgB,OAAuB;AAC9C,QAAO,OAAO,KAAK,OAAO,OAAO,CAAC,SAAS,YAAY;;AAGzD,eAAe,UAAU,KAA4B;AACnD,OAAMC,SAAG,MAAM,KAAK,EAAE,WAAW,MAAM,CAAC;;AAG1C,eAAe,iBAAiB,UAAmD;AACjF,KAAI;EACF,MAAM,OAAO,MAAMA,SAAG,SAAS,SAAS;AACxC,SAAO,IAAI,WAAW,KAAK,QAAQ,KAAK,YAAY,KAAK,WAAW,CAAC,OAAO;UACrE,OAAO;AACd,MAAK,MAAgC,SAAS,SAC5C;AAEF,QAAM;;;AAIV,eAAe,eAAe,UAAiC;AAC7D,KAAI;AACF,QAAMA,SAAG,GAAG,SAAS;UACd,OAAO;AACd,MAAK,MAAgC,SAAS,SAC5C;AAEF,QAAM;;;AAIV,eAAe,kBAAkB,KAA4B;AAC3D,KAAI;AACF,QAAMA,SAAG,GAAG,KAAK;GAAE,WAAW;GAAM,OAAO;GAAM,CAAC;UAC3C,OAAO;AACd,MAAK,MAAgC,SAAS,SAC5C;AAEF,QAAM;;;AAIV,eAAe,UAAU,KAAgC;AACvD,KAAI;AAEF,UADgB,MAAMA,SAAG,QAAQ,IAAI,EACtB,MAAM;UACd,OAAO;AACd,MAAK,MAAgC,SAAS,SAC5C,QAAO,EAAE;AAEX,QAAM;;;AAIV,eAAe,gBACb,YACA,MACe;CACf,MAAM,MAAM,KAAK,QAAQ,WAAW;AACpC,OAAM,UAAU,IAAI;CACpB,MAAM,WAAW,KAAK,KAAK,KAAK,QAAQ,YAAY,GAAG;AACvD,OAAMA,SAAG,UAAU,UAAU,KAAK;AAClC,OAAMA,SAAG,OAAO,UAAU,WAAW"}
|
|
@@ -6,13 +6,15 @@ loro_crdt = require_chunk.__toESM(loro_crdt);
|
|
|
6
6
|
|
|
7
7
|
//#region src/storage/indexeddb.ts
|
|
8
8
|
const DEFAULT_DB_NAME = "loro-repo";
|
|
9
|
-
const DEFAULT_DB_VERSION =
|
|
9
|
+
const DEFAULT_DB_VERSION = 2;
|
|
10
10
|
const DEFAULT_DOC_STORE = "docs";
|
|
11
11
|
const DEFAULT_META_STORE = "meta";
|
|
12
12
|
const DEFAULT_ASSET_STORE = "assets";
|
|
13
13
|
const DEFAULT_DOC_UPDATE_STORE = "doc-updates";
|
|
14
|
+
const DEFAULT_META_UPDATE_STORE = "meta-updates";
|
|
14
15
|
const DEFAULT_META_KEY = "snapshot";
|
|
15
16
|
const textDecoder = new TextDecoder();
|
|
17
|
+
const textEncoder = new TextEncoder();
|
|
16
18
|
function describeUnknown(cause) {
|
|
17
19
|
if (typeof cause === "string") return cause;
|
|
18
20
|
if (typeof cause === "number" || typeof cause === "boolean") return String(cause);
|
|
@@ -33,6 +35,7 @@ var IndexedDBStorageAdaptor = class {
|
|
|
33
35
|
docStore;
|
|
34
36
|
docUpdateStore;
|
|
35
37
|
metaStore;
|
|
38
|
+
metaUpdateStore;
|
|
36
39
|
assetStore;
|
|
37
40
|
metaKey;
|
|
38
41
|
dbPromise;
|
|
@@ -46,6 +49,7 @@ var IndexedDBStorageAdaptor = class {
|
|
|
46
49
|
this.docStore = options.docStoreName ?? DEFAULT_DOC_STORE;
|
|
47
50
|
this.docUpdateStore = options.docUpdateStoreName ?? DEFAULT_DOC_UPDATE_STORE;
|
|
48
51
|
this.metaStore = options.metaStoreName ?? DEFAULT_META_STORE;
|
|
52
|
+
this.metaUpdateStore = options.metaUpdateStoreName ?? DEFAULT_META_UPDATE_STORE;
|
|
49
53
|
this.assetStore = options.assetStoreName ?? DEFAULT_ASSET_STORE;
|
|
50
54
|
this.metaKey = options.metaKey ?? DEFAULT_META_KEY;
|
|
51
55
|
}
|
|
@@ -69,7 +73,7 @@ var IndexedDBStorageAdaptor = class {
|
|
|
69
73
|
}
|
|
70
74
|
case "meta": {
|
|
71
75
|
const bytes = payload.update.slice();
|
|
72
|
-
await this.
|
|
76
|
+
await this.appendMetaUpdate(db, bytes);
|
|
73
77
|
break;
|
|
74
78
|
}
|
|
75
79
|
default: throw new Error("Unsupported storage payload type");
|
|
@@ -109,18 +113,36 @@ var IndexedDBStorageAdaptor = class {
|
|
|
109
113
|
}
|
|
110
114
|
return doc;
|
|
111
115
|
}
|
|
116
|
+
async deleteDoc(docId) {
|
|
117
|
+
const db = await this.ensureDb();
|
|
118
|
+
await Promise.all([this.deleteKey(db, this.docStore, docId), this.clearDocUpdates(db, docId)]);
|
|
119
|
+
}
|
|
112
120
|
async loadMeta() {
|
|
113
|
-
const
|
|
114
|
-
|
|
121
|
+
const db = await this.ensureDb();
|
|
122
|
+
const legacy = await this.getBinaryFromDb(db, this.metaStore, this.metaKey);
|
|
123
|
+
const queuedUpdates = await this.getMetaUpdates(db);
|
|
124
|
+
if (!legacy && queuedUpdates.length === 0) return void 0;
|
|
125
|
+
const flock = new __loro_dev_flock.Flock();
|
|
115
126
|
try {
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
127
|
+
if (legacy) {
|
|
128
|
+
const json = textDecoder.decode(legacy);
|
|
129
|
+
const bundle = JSON.parse(json);
|
|
130
|
+
flock.importJson(bundle);
|
|
131
|
+
}
|
|
132
|
+
for (const bytes of queuedUpdates) {
|
|
133
|
+
const json = textDecoder.decode(bytes);
|
|
134
|
+
const bundle = JSON.parse(json);
|
|
135
|
+
flock.importJson(bundle);
|
|
136
|
+
}
|
|
121
137
|
} catch (error) {
|
|
122
138
|
throw this.createError("Failed to hydrate metadata snapshot", error);
|
|
123
139
|
}
|
|
140
|
+
if (legacy || queuedUpdates.length > 1) {
|
|
141
|
+
const snapshot = flock.exportJson();
|
|
142
|
+
const encoded = textEncoder.encode(JSON.stringify(snapshot));
|
|
143
|
+
await this.writeMetaSnapshot(db, encoded);
|
|
144
|
+
} else if (queuedUpdates.length === 1) await this.writeMetaSnapshot(db, queuedUpdates[0]);
|
|
145
|
+
return flock;
|
|
124
146
|
}
|
|
125
147
|
async loadAsset(assetId) {
|
|
126
148
|
return await this.getBinary(this.assetStore, assetId) ?? void 0;
|
|
@@ -140,6 +162,7 @@ var IndexedDBStorageAdaptor = class {
|
|
|
140
162
|
this.ensureStore(db, this.docStore);
|
|
141
163
|
this.ensureStore(db, this.docUpdateStore);
|
|
142
164
|
this.ensureStore(db, this.metaStore);
|
|
165
|
+
this.ensureStore(db, this.metaUpdateStore);
|
|
143
166
|
this.ensureStore(db, this.assetStore);
|
|
144
167
|
});
|
|
145
168
|
request.addEventListener("success", () => resolve(request.result), { once: true });
|
|
@@ -189,6 +212,25 @@ var IndexedDBStorageAdaptor = class {
|
|
|
189
212
|
const raw = await this.runInTransaction(db, this.docUpdateStore, "readonly", (store) => this.wrapRequest(store.get(docId), "read"));
|
|
190
213
|
return this.normalizeUpdateQueue(raw);
|
|
191
214
|
}
|
|
215
|
+
async appendMetaUpdate(db, update) {
|
|
216
|
+
await this.runInTransaction(db, this.metaUpdateStore, "readwrite", async (store) => {
|
|
217
|
+
const raw = await this.wrapRequest(store.get("queue"), "read");
|
|
218
|
+
const queue = await this.normalizeUpdateQueue(raw);
|
|
219
|
+
queue.push(update.slice());
|
|
220
|
+
await this.wrapRequest(store.put({ updates: queue }, "queue"), "write");
|
|
221
|
+
});
|
|
222
|
+
}
|
|
223
|
+
async getMetaUpdates(db) {
|
|
224
|
+
const raw = await this.runInTransaction(db, this.metaUpdateStore, "readonly", (store) => this.wrapRequest(store.get("queue"), "read"));
|
|
225
|
+
return this.normalizeUpdateQueue(raw);
|
|
226
|
+
}
|
|
227
|
+
async writeMetaSnapshot(db, update) {
|
|
228
|
+
const bytes = update.slice();
|
|
229
|
+
await this.runInTransaction(db, this.metaUpdateStore, "readwrite", async (store) => {
|
|
230
|
+
await this.wrapRequest(store.put({ updates: [bytes] }, "queue"), "write");
|
|
231
|
+
});
|
|
232
|
+
await this.runInTransaction(db, this.metaStore, "readwrite", (store) => this.wrapRequest(store.delete(this.metaKey), "delete"));
|
|
233
|
+
}
|
|
192
234
|
async clearDocUpdates(db, docId) {
|
|
193
235
|
await this.runInTransaction(db, this.docUpdateStore, "readwrite", (store) => this.wrapRequest(store.delete(docId), "delete"));
|
|
194
236
|
}
|