@fireproof/core 0.18.0 → 0.19.4-dev
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +29 -15
- package/chunk-7OGPZSGT.js +39 -0
- package/chunk-7OGPZSGT.js.map +1 -0
- package/chunk-H3A2HMMM.js +164 -0
- package/chunk-H3A2HMMM.js.map +1 -0
- package/chunk-HCXR2M5B.js +202 -0
- package/chunk-HCXR2M5B.js.map +1 -0
- package/chunk-QHSXUST7.js +208 -0
- package/chunk-QHSXUST7.js.map +1 -0
- package/chunk-VZGT7ZYP.js +22 -0
- package/chunk-VZGT7ZYP.js.map +1 -0
- package/index.cjs +4649 -0
- package/index.cjs.map +1 -0
- package/index.d.cts +911 -0
- package/index.d.ts +911 -0
- package/index.js +2923 -0
- package/index.js.map +1 -0
- package/metafile-cjs.json +1 -0
- package/metafile-esm.json +1 -0
- package/node-sys-container-E7LADX2Z.js +29 -0
- package/node-sys-container-E7LADX2Z.js.map +1 -0
- package/package.json +19 -109
- package/sqlite-data-store-YS4U7AQ4.js +120 -0
- package/sqlite-data-store-YS4U7AQ4.js.map +1 -0
- package/sqlite-meta-store-FJZSZG4R.js +137 -0
- package/sqlite-meta-store-FJZSZG4R.js.map +1 -0
- package/sqlite-wal-store-6JZ4URNS.js +123 -0
- package/sqlite-wal-store-6JZ4URNS.js.map +1 -0
- package/store-file-HMHPQTUV.js +193 -0
- package/store-file-HMHPQTUV.js.map +1 -0
- package/store-indexdb-MRVZG4OG.js +20 -0
- package/store-indexdb-MRVZG4OG.js.map +1 -0
- package/store-sql-5XMJ5OWJ.js +406 -0
- package/store-sql-5XMJ5OWJ.js.map +1 -0
- package/dist/browser/fireproof.cjs +0 -1172
- package/dist/browser/fireproof.cjs.map +0 -1
- package/dist/browser/fireproof.d.cts +0 -268
- package/dist/browser/fireproof.d.ts +0 -268
- package/dist/browser/fireproof.global.js +0 -24178
- package/dist/browser/fireproof.global.js.map +0 -1
- package/dist/browser/fireproof.js +0 -1147
- package/dist/browser/fireproof.js.map +0 -1
- package/dist/browser/metafile-cjs.json +0 -1
- package/dist/browser/metafile-esm.json +0 -1
- package/dist/browser/metafile-iife.json +0 -1
- package/dist/memory/fireproof.cjs +0 -1172
- package/dist/memory/fireproof.cjs.map +0 -1
- package/dist/memory/fireproof.d.cts +0 -268
- package/dist/memory/fireproof.d.ts +0 -268
- package/dist/memory/fireproof.global.js +0 -24178
- package/dist/memory/fireproof.global.js.map +0 -1
- package/dist/memory/fireproof.js +0 -1147
- package/dist/memory/fireproof.js.map +0 -1
- package/dist/memory/metafile-cjs.json +0 -1
- package/dist/memory/metafile-esm.json +0 -1
- package/dist/memory/metafile-iife.json +0 -1
- package/dist/node/fireproof.cjs +0 -1172
- package/dist/node/fireproof.cjs.map +0 -1
- package/dist/node/fireproof.d.cts +0 -268
- package/dist/node/fireproof.d.ts +0 -268
- package/dist/node/fireproof.global.js +0 -38540
- package/dist/node/fireproof.global.js.map +0 -1
- package/dist/node/fireproof.js +0 -1138
- package/dist/node/fireproof.js.map +0 -1
- package/dist/node/metafile-cjs.json +0 -1
- package/dist/node/metafile-esm.json +0 -1
- package/dist/node/metafile-iife.json +0 -1
package/index.js
ADDED
@@ -0,0 +1,2923 @@
|
|
1
|
+
import {
|
2
|
+
guardVersion
|
3
|
+
} from "./chunk-QHSXUST7.js";
|
4
|
+
import {
|
5
|
+
NotFoundError,
|
6
|
+
isNotFoundError
|
7
|
+
} from "./chunk-VZGT7ZYP.js";
|
8
|
+
import {
|
9
|
+
dataDir,
|
10
|
+
ensureLogger,
|
11
|
+
exception2Result,
|
12
|
+
exceptionWrapper,
|
13
|
+
getKey,
|
14
|
+
getName,
|
15
|
+
getStore,
|
16
|
+
runtime_exports
|
17
|
+
} from "./chunk-HCXR2M5B.js";
|
18
|
+
import {
|
19
|
+
SysContainer,
|
20
|
+
__export,
|
21
|
+
falsyToUndef,
|
22
|
+
isFalsy,
|
23
|
+
throwFalsy
|
24
|
+
} from "./chunk-H3A2HMMM.js";
|
25
|
+
|
26
|
+
// src/database.ts
|
27
|
+
import { uuidv7 } from "uuidv7";
|
28
|
+
import { ResolveOnce as ResolveOnce5 } from "@adviser/cement";
|
29
|
+
|
30
|
+
// src/write-queue.ts
|
31
|
+
function writeQueue(worker, payload = Infinity, unbounded = false) {
|
32
|
+
const queue = [];
|
33
|
+
let isProcessing = false;
|
34
|
+
async function process() {
|
35
|
+
if (isProcessing || queue.length === 0) return;
|
36
|
+
isProcessing = true;
|
37
|
+
const tasksToProcess = queue.splice(0, payload);
|
38
|
+
const updates = tasksToProcess.map((item) => item.task);
|
39
|
+
if (unbounded) {
|
40
|
+
const promises = updates.map(async (update, index2) => {
|
41
|
+
try {
|
42
|
+
const result = await worker([update]);
|
43
|
+
tasksToProcess[index2].resolve(result);
|
44
|
+
} catch (error) {
|
45
|
+
tasksToProcess[index2].reject(error);
|
46
|
+
}
|
47
|
+
});
|
48
|
+
await Promise.all(promises);
|
49
|
+
} else {
|
50
|
+
try {
|
51
|
+
const result = await worker(updates);
|
52
|
+
tasksToProcess.forEach((task) => task.resolve(result));
|
53
|
+
} catch (error) {
|
54
|
+
tasksToProcess.forEach((task) => task.reject(error));
|
55
|
+
}
|
56
|
+
}
|
57
|
+
isProcessing = false;
|
58
|
+
void process();
|
59
|
+
}
|
60
|
+
return {
|
61
|
+
push(task) {
|
62
|
+
return new Promise((resolve, reject) => {
|
63
|
+
queue.push({ task, resolve, reject });
|
64
|
+
void process();
|
65
|
+
});
|
66
|
+
}
|
67
|
+
};
|
68
|
+
}
|
69
|
+
|
70
|
+
// src/crdt.ts
|
71
|
+
import { ResolveOnce as ResolveOnce4 } from "@adviser/cement";
|
72
|
+
|
73
|
+
// src/crdt-helpers.ts
|
74
|
+
import { encode as encode3, decode as decode3, Block as Block2 } from "multiformats/block";
|
75
|
+
import { parse as parse2 } from "multiformats/link";
|
76
|
+
import { sha256 as hasher2 } from "multiformats/hashes/sha2";
|
77
|
+
import * as codec2 from "@ipld/dag-cbor";
|
78
|
+
import { put, get, entries, root } from "@web3-storage/pail/crdt";
|
79
|
+
import { EventFetcher, vis } from "@web3-storage/pail/clock";
|
80
|
+
import * as Batch from "@web3-storage/pail/crdt/batch";
|
81
|
+
|
82
|
+
// src/blockstore/index.ts
|
83
|
+
var blockstore_exports = {};
|
84
|
+
__export(blockstore_exports, {
|
85
|
+
BaseBlockstore: () => BaseBlockstore,
|
86
|
+
CarTransaction: () => CarTransaction,
|
87
|
+
CompactionFetcher: () => CompactionFetcher,
|
88
|
+
ConnectREST: () => ConnectREST,
|
89
|
+
ConnectionBase: () => ConnectionBase,
|
90
|
+
DataStore: () => DataStore,
|
91
|
+
EncryptedBlockstore: () => EncryptedBlockstore,
|
92
|
+
Loadable: () => Loadable,
|
93
|
+
Loader: () => Loader,
|
94
|
+
MetaStore: () => MetaStore,
|
95
|
+
NotFoundError: () => NotFoundError,
|
96
|
+
RemoteWAL: () => RemoteWAL,
|
97
|
+
isNotFoundError: () => isNotFoundError,
|
98
|
+
parseCarFile: () => parseCarFile,
|
99
|
+
registerStoreProtocol: () => registerStoreProtocol,
|
100
|
+
testStoreFactory: () => testStoreFactory,
|
101
|
+
toStoreRuntime: () => toStoreRuntime,
|
102
|
+
toURL: () => toURL
|
103
|
+
});
|
104
|
+
|
105
|
+
// src/blockstore/connection-base.ts
|
106
|
+
import { EventBlock, decodeEventBlock } from "@web3-storage/pail/clock";
|
107
|
+
import { MemoryBlockstore } from "@web3-storage/pail/block";
|
108
|
+
|
109
|
+
// src/blockstore/task-manager.ts
|
110
|
+
var TaskManager = class {
|
111
|
+
constructor(loader) {
|
112
|
+
this.eventsWeHandled = /* @__PURE__ */ new Set();
|
113
|
+
this.queue = [];
|
114
|
+
this.isProcessing = false;
|
115
|
+
this.loader = loader;
|
116
|
+
this.logger = ensureLogger(loader.logger, "TaskManager");
|
117
|
+
}
|
118
|
+
async handleEvent(eventBlock) {
|
119
|
+
const cid = eventBlock.cid.toString();
|
120
|
+
const parents = eventBlock.value.parents.map((cid2) => cid2.toString());
|
121
|
+
for (const parent of parents) {
|
122
|
+
this.eventsWeHandled.add(parent);
|
123
|
+
}
|
124
|
+
this.queue.push({ cid, eventBlock, retries: 0 });
|
125
|
+
this.queue = this.queue.filter(({ cid: cid2 }) => !this.eventsWeHandled.has(cid2));
|
126
|
+
void this.processQueue();
|
127
|
+
}
|
128
|
+
async processQueue() {
|
129
|
+
if (this.isProcessing) return;
|
130
|
+
this.isProcessing = true;
|
131
|
+
const filteredQueue = this.queue.filter(({ cid }) => !this.eventsWeHandled.has(cid));
|
132
|
+
const first = filteredQueue[0];
|
133
|
+
if (!first) {
|
134
|
+
return;
|
135
|
+
}
|
136
|
+
try {
|
137
|
+
this.loader?.remoteMetaStore?.handleByteHeads([first.eventBlock.value.data.dbMeta]);
|
138
|
+
this.eventsWeHandled.add(first.cid);
|
139
|
+
this.queue = this.queue.filter(({ cid }) => !this.eventsWeHandled.has(cid));
|
140
|
+
} catch (err) {
|
141
|
+
if (first.retries++ > 3) {
|
142
|
+
this.logger.Error().Str("cid", first.cid).Msg("failed to process event block after 3 retries");
|
143
|
+
this.queue = this.queue.filter(({ cid }) => cid !== first.cid);
|
144
|
+
}
|
145
|
+
await new Promise((resolve) => setTimeout(resolve, 50));
|
146
|
+
throw this.logger.Error().Err(err).Msg("failed to process event block").AsError();
|
147
|
+
} finally {
|
148
|
+
this.isProcessing = false;
|
149
|
+
if (this.queue.length > 0) {
|
150
|
+
void this.processQueue();
|
151
|
+
}
|
152
|
+
}
|
153
|
+
}
|
154
|
+
};
|
155
|
+
|
156
|
+
// src/blockstore/connection-base.ts
|
157
|
+
var ConnectionBase = class {
|
158
|
+
constructor(logger) {
|
159
|
+
// readonly ready: Promise<unknown>;
|
160
|
+
// todo move to LRU blockstore https://github.com/web3-storage/w3clock/blob/main/src/worker/block.js
|
161
|
+
this.eventBlocks = new MemoryBlockstore();
|
162
|
+
this.parents = [];
|
163
|
+
this.loaded = Promise.resolve();
|
164
|
+
this.logger = ensureLogger(logger, "ConnectionBase");
|
165
|
+
}
|
166
|
+
async refresh() {
|
167
|
+
await throwFalsy(throwFalsy(this.loader).remoteMetaStore).load("main");
|
168
|
+
await (await throwFalsy(this.loader).remoteWAL())._process();
|
169
|
+
}
|
170
|
+
connect({ loader }) {
|
171
|
+
if (!loader) throw this.logger.Error().Msg("loader is required").AsError();
|
172
|
+
this.connectMeta({ loader });
|
173
|
+
this.connectStorage({ loader });
|
174
|
+
}
|
175
|
+
connectMeta({ loader }) {
|
176
|
+
if (!loader) throw this.logger.Error().Msg("loader is required").AsError();
|
177
|
+
this.loader = loader;
|
178
|
+
this.taskManager = new TaskManager(loader);
|
179
|
+
this.onConnect();
|
180
|
+
this.logger.Warn().Msg("connectMeta: connecting to remote meta store is disabled");
|
181
|
+
}
|
182
|
+
async onConnect() {
|
183
|
+
return;
|
184
|
+
}
|
185
|
+
connectStorage({ loader }) {
|
186
|
+
if (!loader) throw this.logger.Error().Msg("loader is required").AsError();
|
187
|
+
this.loader = loader;
|
188
|
+
this.logger.Warn().Msg("connectStorage: connecting to remote meta store is disabled");
|
189
|
+
}
|
190
|
+
async createEventBlock(bytes) {
|
191
|
+
const data = {
|
192
|
+
dbMeta: bytes
|
193
|
+
};
|
194
|
+
const event = await EventBlock.create(
|
195
|
+
data,
|
196
|
+
this.parents
|
197
|
+
);
|
198
|
+
await this.eventBlocks.put(event.cid, event.bytes);
|
199
|
+
return event;
|
200
|
+
}
|
201
|
+
async decodeEventBlock(bytes) {
|
202
|
+
const event = await decodeEventBlock(bytes);
|
203
|
+
return event;
|
204
|
+
}
|
205
|
+
// move this stuff to connect
|
206
|
+
// async getDashboardURL(compact = true) {
|
207
|
+
// const baseUrl = 'https://dashboard.fireproof.storage/'
|
208
|
+
// if (!this.loader?.remoteCarStore) return new URL('/howto', baseUrl)
|
209
|
+
// // if (compact) {
|
210
|
+
// // await this.compact()
|
211
|
+
// // }
|
212
|
+
// const currents = await this.loader?.metaStore?.load()
|
213
|
+
// if (!currents) throw new Error("Can't sync empty database: save data first")
|
214
|
+
// if (currents.length > 1)
|
215
|
+
// throw new Error("Can't sync database with split heads: make an update first")
|
216
|
+
// const current = currents[0]
|
217
|
+
// const params = {
|
218
|
+
// car: current.car.toString()
|
219
|
+
// }
|
220
|
+
// if (current.key) {
|
221
|
+
// // @ts-ignore
|
222
|
+
// params.key = current.key.toString()
|
223
|
+
// }
|
224
|
+
// // @ts-ignore
|
225
|
+
// if (this.name) {
|
226
|
+
// // @ts-ignore
|
227
|
+
// params.name = this.name
|
228
|
+
// }
|
229
|
+
// const url = new URL('/import#' + new URLSearchParams(params).toString(), baseUrl)
|
230
|
+
// console.log('Import to dashboard: ' + url.toString())
|
231
|
+
// return url
|
232
|
+
// }
|
233
|
+
// openDashboard() {
|
234
|
+
// void this.getDashboardURL().then(url => {
|
235
|
+
// if (url) window.open(url.toString(), '_blank')
|
236
|
+
// })
|
237
|
+
// }
|
238
|
+
};
|
239
|
+
|
240
|
+
// src/blockstore/connect-rest.ts
|
241
|
+
var ConnectREST = class extends ConnectionBase {
|
242
|
+
constructor(base, logger) {
|
243
|
+
super(ensureLogger(logger, "ConnectREST"));
|
244
|
+
this.baseUrl = new URL(base);
|
245
|
+
}
|
246
|
+
async dataUpload(bytes, params) {
|
247
|
+
const carCid = params.car.toString();
|
248
|
+
const uploadURL = new URL(`/cars/${carCid}.car`, this.baseUrl);
|
249
|
+
const done = await fetch(uploadURL, { method: "PUT", body: bytes });
|
250
|
+
if (!done.ok) {
|
251
|
+
throw this.logger.Error().Msg("failed to upload data " + done.statusText);
|
252
|
+
}
|
253
|
+
}
|
254
|
+
async dataDownload(params) {
|
255
|
+
const { car } = params;
|
256
|
+
const fetchFromUrl = new URL(`/cars/${car.toString()}.car`, this.baseUrl);
|
257
|
+
const response = await fetch(fetchFromUrl);
|
258
|
+
if (!response.ok) {
|
259
|
+
return void 0;
|
260
|
+
}
|
261
|
+
const bytes = new Uint8Array(await response.arrayBuffer());
|
262
|
+
return bytes;
|
263
|
+
}
|
264
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
265
|
+
async metaUpload(bytes, params) {
|
266
|
+
return void 0;
|
267
|
+
}
|
268
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
269
|
+
async metaDownload(params) {
|
270
|
+
return [];
|
271
|
+
}
|
272
|
+
};
|
273
|
+
|
274
|
+
// src/blockstore/store-factory.ts
|
275
|
+
import { KeyedResolvOnce } from "@adviser/cement";
|
276
|
+
|
277
|
+
// src/runtime/files.ts
|
278
|
+
import * as UnixFS from "@ipld/unixfs";
|
279
|
+
import * as raw from "multiformats/codecs/raw";
|
280
|
+
import { withMaxChunkSize } from "@ipld/unixfs/file/chunker/fixed";
|
281
|
+
import { withWidth } from "@ipld/unixfs/file/layout/balanced";
|
282
|
+
import { exporter } from "ipfs-unixfs-exporter";
|
283
|
+
var queuingStrategy = UnixFS.withCapacity();
|
284
|
+
var settings = UnixFS.configure({
|
285
|
+
fileChunkEncoder: raw,
|
286
|
+
smallFileEncoder: raw,
|
287
|
+
chunker: withMaxChunkSize(1024 * 1024),
|
288
|
+
fileLayout: withWidth(1024)
|
289
|
+
});
|
290
|
+
async function collect(collectable) {
|
291
|
+
const chunks = [];
|
292
|
+
await collectable.pipeTo(
|
293
|
+
new WritableStream({
|
294
|
+
write(chunk) {
|
295
|
+
chunks.push(chunk);
|
296
|
+
}
|
297
|
+
})
|
298
|
+
);
|
299
|
+
return chunks;
|
300
|
+
}
|
301
|
+
async function encodeFile(blob) {
|
302
|
+
const readable = createFileEncoderStream(blob);
|
303
|
+
const blocks = await collect(readable);
|
304
|
+
return { cid: blocks.at(-1).cid, blocks };
|
305
|
+
}
|
306
|
+
async function decodeFile(blocks, cid, meta) {
|
307
|
+
const entry = await exporter(cid.toString(), blocks, { length: meta.size });
|
308
|
+
const chunks = [];
|
309
|
+
for await (const chunk of entry.content()) {
|
310
|
+
chunks.push(chunk);
|
311
|
+
}
|
312
|
+
return new File(chunks, entry.name, { type: meta.type, lastModified: 0 });
|
313
|
+
}
|
314
|
+
function createFileEncoderStream(blob) {
|
315
|
+
const { readable, writable } = new TransformStream({}, queuingStrategy);
|
316
|
+
const unixfsWriter = UnixFS.createWriter({ writable, settings });
|
317
|
+
const fileBuilder = new UnixFSFileBuilder("", blob);
|
318
|
+
void (async () => {
|
319
|
+
await fileBuilder.finalize(unixfsWriter);
|
320
|
+
await unixfsWriter.close();
|
321
|
+
})();
|
322
|
+
return readable;
|
323
|
+
}
|
324
|
+
var UnixFSFileBuilder = class {
|
325
|
+
#file;
|
326
|
+
constructor(name, file) {
|
327
|
+
this.name = name;
|
328
|
+
this.#file = file;
|
329
|
+
}
|
330
|
+
async finalize(writer) {
|
331
|
+
const unixfsFileWriter = UnixFS.createFileWriter(writer);
|
332
|
+
await this.#file.stream().pipeTo(
|
333
|
+
new WritableStream({
|
334
|
+
async write(chunk) {
|
335
|
+
await unixfsFileWriter.write(chunk);
|
336
|
+
}
|
337
|
+
})
|
338
|
+
);
|
339
|
+
return await unixfsFileWriter.close();
|
340
|
+
}
|
341
|
+
};
|
342
|
+
|
343
|
+
// src/blockstore/store.ts
|
344
|
+
import pLimit2 from "p-limit";
|
345
|
+
import { format, parse } from "@ipld/dag-json";
|
346
|
+
import { ResolveOnce as ResolveOnce2, Result } from "@adviser/cement";
|
347
|
+
|
348
|
+
// src/blockstore/loader.ts
|
349
|
+
import pLimit from "p-limit";
|
350
|
+
import { CarReader } from "@ipld/car";
|
351
|
+
import { ResolveOnce } from "@adviser/cement";
|
352
|
+
|
353
|
+
// src/blockstore/types.ts
|
354
|
+
function toCIDBlock(block) {
|
355
|
+
return block;
|
356
|
+
}
|
357
|
+
|
358
|
+
// src/blockstore/loader-helpers.ts
|
359
|
+
import { encode, decode } from "multiformats/block";
|
360
|
+
import { sha256 as hasher } from "multiformats/hashes/sha2";
|
361
|
+
import * as raw2 from "multiformats/codecs/raw";
|
362
|
+
import * as CBW from "@ipld/car/buffer-writer";
|
363
|
+
import * as codec from "@ipld/dag-cbor";
|
364
|
+
async function encodeCarFile(roots, t) {
|
365
|
+
let size = 0;
|
366
|
+
const headerSize = CBW.headerLength({ roots });
|
367
|
+
size += headerSize;
|
368
|
+
for (const { cid, bytes } of t.entries()) {
|
369
|
+
size += CBW.blockLength({ cid, bytes });
|
370
|
+
}
|
371
|
+
const buffer = new Uint8Array(size);
|
372
|
+
const writer = CBW.createWriter(buffer, { headerSize });
|
373
|
+
for (const r of roots) {
|
374
|
+
writer.addRoot(r);
|
375
|
+
}
|
376
|
+
for (const { cid, bytes } of t.entries()) {
|
377
|
+
writer.write({ cid, bytes });
|
378
|
+
}
|
379
|
+
writer.close();
|
380
|
+
return await encode({ value: writer.bytes, hasher, codec: raw2 });
|
381
|
+
}
|
382
|
+
async function encodeCarHeader(fp) {
|
383
|
+
return await encode({
|
384
|
+
value: { fp },
|
385
|
+
hasher,
|
386
|
+
codec
|
387
|
+
});
|
388
|
+
}
|
389
|
+
async function parseCarFile(reader, logger) {
|
390
|
+
const roots = await reader.getRoots();
|
391
|
+
const header = await reader.get(roots[0]);
|
392
|
+
if (!header) throw logger.Error().Msg("missing header block").AsError();
|
393
|
+
const { value } = await decode({ bytes: header.bytes, hasher, codec });
|
394
|
+
const fpvalue = value;
|
395
|
+
if (fpvalue && !fpvalue.fp) {
|
396
|
+
throw logger.Error().Msg("missing fp").AsError();
|
397
|
+
}
|
398
|
+
return fpvalue.fp;
|
399
|
+
}
|
400
|
+
|
401
|
+
// src/blockstore/encrypt-helpers.ts
|
402
|
+
import { sha256 } from "multiformats/hashes/sha2";
|
403
|
+
import { CID as CID2 } from "multiformats";
|
404
|
+
import { encode as encode2, decode as decode2, create as mfCreate } from "multiformats/block";
|
405
|
+
import * as dagcbor from "@ipld/dag-cbor";
|
406
|
+
import { MemoryBlockstore as MemoryBlockstore2 } from "@web3-storage/pail/block";
|
407
|
+
import { bf } from "prolly-trees/utils";
|
408
|
+
import { nocache as cache } from "prolly-trees/cache";
|
409
|
+
import { create, load } from "prolly-trees/cid-set";
|
410
|
+
|
411
|
+
// src/blockstore/encrypt-codec.ts
|
412
|
+
import { CID } from "multiformats";
|
413
|
+
function makeCodec(ilogger, crypto2, randomBytes2) {
|
414
|
+
const logger = ensureLogger(ilogger, "makeCodec");
|
415
|
+
const enc32 = (value) => {
|
416
|
+
value = +value;
|
417
|
+
const buff = new Uint8Array(4);
|
418
|
+
buff[3] = value >>> 24;
|
419
|
+
buff[2] = value >>> 16;
|
420
|
+
buff[1] = value >>> 8;
|
421
|
+
buff[0] = value & 255;
|
422
|
+
return buff;
|
423
|
+
};
|
424
|
+
const readUInt32LE = (buffer) => {
|
425
|
+
const offset = buffer.byteLength - 4;
|
426
|
+
return (buffer[offset] | buffer[offset + 1] << 8 | buffer[offset + 2] << 16) + buffer[offset + 3] * 16777216;
|
427
|
+
};
|
428
|
+
const concat = (buffers) => {
|
429
|
+
const uint8Arrays = buffers.map((b) => b instanceof ArrayBuffer ? new Uint8Array(b) : b);
|
430
|
+
const totalLength = uint8Arrays.reduce((sum, arr) => sum + arr.length, 0);
|
431
|
+
const result = new Uint8Array(totalLength);
|
432
|
+
let offset = 0;
|
433
|
+
for (const arr of uint8Arrays) {
|
434
|
+
result.set(arr, offset);
|
435
|
+
offset += arr.length;
|
436
|
+
}
|
437
|
+
return result;
|
438
|
+
};
|
439
|
+
const encode4 = ({ iv, bytes }) => concat([iv, bytes]);
|
440
|
+
const decode4 = (bytes) => {
|
441
|
+
const iv = bytes.subarray(0, 12);
|
442
|
+
bytes = bytes.slice(12);
|
443
|
+
return { iv, bytes };
|
444
|
+
};
|
445
|
+
const code = 3145728 + 1337;
|
446
|
+
async function subtleKey(key) {
|
447
|
+
return await crypto2.importKey(
|
448
|
+
"raw",
|
449
|
+
// raw or jwk
|
450
|
+
key,
|
451
|
+
// raw data
|
452
|
+
"AES-GCM",
|
453
|
+
false,
|
454
|
+
// extractable
|
455
|
+
["encrypt", "decrypt"]
|
456
|
+
);
|
457
|
+
}
|
458
|
+
const decrypt = async ({ key, value }) => {
|
459
|
+
const { bytes: inBytes, iv } = value;
|
460
|
+
const cryKey = await subtleKey(key);
|
461
|
+
const deBytes = await crypto2.decrypt(
|
462
|
+
{
|
463
|
+
name: "AES-GCM",
|
464
|
+
iv,
|
465
|
+
tagLength: 128
|
466
|
+
},
|
467
|
+
cryKey,
|
468
|
+
inBytes
|
469
|
+
);
|
470
|
+
const bytes = new Uint8Array(deBytes);
|
471
|
+
const len = readUInt32LE(bytes.subarray(0, 4));
|
472
|
+
const cid = CID.decode(bytes.subarray(4, 4 + len));
|
473
|
+
return { cid, bytes: bytes.subarray(4 + len) };
|
474
|
+
};
|
475
|
+
const encrypt = async ({ key, cid, bytes }) => {
|
476
|
+
const len = enc32(cid.bytes.byteLength);
|
477
|
+
const iv = randomBytes2(12);
|
478
|
+
const msg = concat([len, cid.bytes, bytes]);
|
479
|
+
try {
|
480
|
+
const cryKey = await subtleKey(key);
|
481
|
+
const deBytes = await crypto2.encrypt(
|
482
|
+
{
|
483
|
+
name: "AES-GCM",
|
484
|
+
iv,
|
485
|
+
tagLength: 128
|
486
|
+
},
|
487
|
+
cryKey,
|
488
|
+
msg
|
489
|
+
);
|
490
|
+
bytes = new Uint8Array(deBytes);
|
491
|
+
} catch (e) {
|
492
|
+
throw logger.Error().Err(e).Msg("encrypt failed").AsError();
|
493
|
+
}
|
494
|
+
return { value: { bytes, iv } };
|
495
|
+
};
|
496
|
+
const cryptoFn = (key) => {
|
497
|
+
return { encrypt: (opts) => encrypt({ ...opts, key }), decrypt: (opts) => decrypt({ ...opts, key }) };
|
498
|
+
};
|
499
|
+
const name = "jchris@encrypted-block:aes-gcm";
|
500
|
+
return { encode: encode4, decode: decode4, code, name, encrypt, decrypt, crypto: cryptoFn };
|
501
|
+
}
|
502
|
+
|
503
|
+
// src/blockstore/encrypt-helpers.ts
|
504
|
+
function carLogIncludesGroup(list, cidMatch) {
|
505
|
+
return list.some((cid) => {
|
506
|
+
return cid.toString() === cidMatch.toString();
|
507
|
+
});
|
508
|
+
}
|
509
|
+
function makeEncDec(logger, crypto2, randomBytes2) {
|
510
|
+
const codec4 = makeCodec(logger, crypto2, randomBytes2);
|
511
|
+
const encrypt = async function* ({
|
512
|
+
get: get2,
|
513
|
+
cids,
|
514
|
+
hasher: hasher4,
|
515
|
+
key,
|
516
|
+
cache: cache3,
|
517
|
+
chunker: chunker2,
|
518
|
+
root: root3
|
519
|
+
}) {
|
520
|
+
const set = /* @__PURE__ */ new Set();
|
521
|
+
let eroot;
|
522
|
+
if (!carLogIncludesGroup(cids, root3)) cids.push(root3);
|
523
|
+
for (const cid of cids) {
|
524
|
+
const unencrypted = await get2(cid);
|
525
|
+
if (!unencrypted) throw logger.Error().Ref("cid", cid).Msg("missing cid block").AsError();
|
526
|
+
const encrypted = await codec4.encrypt({ ...unencrypted, key });
|
527
|
+
const block2 = await encode2({ ...encrypted, codec: codec4, hasher: hasher4 });
|
528
|
+
yield block2;
|
529
|
+
set.add(block2.cid.toString());
|
530
|
+
if (unencrypted.cid.equals(root3)) eroot = block2.cid;
|
531
|
+
}
|
532
|
+
if (!eroot) throw logger.Error().Msg("cids does not include root").AsError();
|
533
|
+
const list = [...set].map((s) => CID2.parse(s));
|
534
|
+
let last;
|
535
|
+
for await (const node of create({ list, get: get2, cache: cache3, chunker: chunker2, hasher: hasher4, codec: dagcbor })) {
|
536
|
+
const block2 = await node.block;
|
537
|
+
yield block2;
|
538
|
+
last = block2;
|
539
|
+
}
|
540
|
+
if (!last) throw logger.Error().Msg("missing last block").AsError();
|
541
|
+
const head = [eroot, last.cid];
|
542
|
+
const block = await encode2({ value: head, codec: dagcbor, hasher: hasher4 });
|
543
|
+
yield block;
|
544
|
+
};
|
545
|
+
const decrypt = async function* ({
|
546
|
+
root: root3,
|
547
|
+
get: get2,
|
548
|
+
key,
|
549
|
+
cache: cache3,
|
550
|
+
chunker: chunker2,
|
551
|
+
hasher: hasher4
|
552
|
+
}) {
|
553
|
+
const getWithDecode = async (cid) => get2(cid).then(async (block) => {
|
554
|
+
if (!block) return;
|
555
|
+
const decoded = await decode2({ ...block, codec: dagcbor, hasher: hasher4 });
|
556
|
+
return decoded;
|
557
|
+
});
|
558
|
+
const getWithDecrypt = async (cid) => get2(cid).then(async (block) => {
|
559
|
+
if (!block) return;
|
560
|
+
const decoded = await decode2({ ...block, codec: codec4, hasher: hasher4 });
|
561
|
+
return decoded;
|
562
|
+
});
|
563
|
+
const decodedRoot = await getWithDecode(root3);
|
564
|
+
if (!decodedRoot) throw logger.Error().Msg("missing root").AsError();
|
565
|
+
if (!decodedRoot.bytes) throw logger.Error().Msg("missing bytes").AsError();
|
566
|
+
const {
|
567
|
+
value: [eroot, tree]
|
568
|
+
} = decodedRoot;
|
569
|
+
const rootBlock = await get2(eroot);
|
570
|
+
if (!rootBlock) throw logger.Error().Msg("missing root block").AsError();
|
571
|
+
const cidset = await load({ cid: tree, get: getWithDecode, cache: cache3, chunker: chunker2, codec: codec4, hasher: hasher4 });
|
572
|
+
const { result: nodes } = await cidset.getAllEntries();
|
573
|
+
const unwrap = async (eblock) => {
|
574
|
+
if (!eblock) throw logger.Error().Msg("missing block").AsError();
|
575
|
+
if (!eblock.value) {
|
576
|
+
eblock = await decode2({ ...eblock, codec: codec4, hasher: hasher4 });
|
577
|
+
if (!eblock.value) throw logger.Error().Msg("missing value").AsError();
|
578
|
+
}
|
579
|
+
const { bytes, cid } = await codec4.decrypt({ ...eblock, key }).catch((e) => {
|
580
|
+
throw e;
|
581
|
+
});
|
582
|
+
const block = await mfCreate({ cid, bytes, hasher: hasher4, codec: codec4 });
|
583
|
+
return block;
|
584
|
+
};
|
585
|
+
const promises = [];
|
586
|
+
for (const { cid } of nodes) {
|
587
|
+
if (!rootBlock.cid.equals(cid)) promises.push(getWithDecrypt(cid).then(unwrap));
|
588
|
+
}
|
589
|
+
yield* promises;
|
590
|
+
yield unwrap(rootBlock);
|
591
|
+
};
|
592
|
+
return { encrypt, decrypt };
|
593
|
+
}
|
594
|
+
var chunker = bf(30);
|
595
|
+
function hexStringToUint8Array(hexString) {
|
596
|
+
const length = hexString.length;
|
597
|
+
const uint8Array = new Uint8Array(length / 2);
|
598
|
+
for (let i = 0; i < length; i += 2) {
|
599
|
+
uint8Array[i / 2] = parseInt(hexString.substring(i, i + 2), 16);
|
600
|
+
}
|
601
|
+
return uint8Array;
|
602
|
+
}
|
603
|
+
async function encryptedEncodeCarFile(logger, crypto2, key, rootCid, t) {
|
604
|
+
const encryptionKey = hexStringToUint8Array(key);
|
605
|
+
const encryptedBlocks = new MemoryBlockstore2();
|
606
|
+
const cidsToEncrypt = [];
|
607
|
+
for (const { cid, bytes } of t.entries()) {
|
608
|
+
cidsToEncrypt.push(cid);
|
609
|
+
const g = await t.get(cid);
|
610
|
+
if (!g) throw logger.Error().Ref("cid", cid).Int("bytes", bytes.length).Msg("missing cid block").AsError();
|
611
|
+
}
|
612
|
+
let last = null;
|
613
|
+
const { encrypt } = makeEncDec(logger, crypto2, crypto2.randomBytes);
|
614
|
+
for await (const block of encrypt({
|
615
|
+
cids: cidsToEncrypt,
|
616
|
+
get: t.get.bind(t),
|
617
|
+
key: encryptionKey,
|
618
|
+
hasher: sha256,
|
619
|
+
chunker,
|
620
|
+
cache,
|
621
|
+
root: rootCid
|
622
|
+
})) {
|
623
|
+
await encryptedBlocks.put(block.cid, block.bytes);
|
624
|
+
last = block;
|
625
|
+
}
|
626
|
+
if (!last) throw logger.Error().Msg("no blocks encrypted").AsError();
|
627
|
+
const encryptedCar = await encodeCarFile([last.cid], encryptedBlocks);
|
628
|
+
return encryptedCar;
|
629
|
+
}
|
630
|
+
async function decodeEncryptedCar(logger, crypto2, key, reader) {
|
631
|
+
const roots = await reader.getRoots();
|
632
|
+
const root3 = roots[0];
|
633
|
+
return await decodeCarBlocks(logger, crypto2, root3, reader.get.bind(reader), key);
|
634
|
+
}
|
635
|
+
async function decodeCarBlocks(logger, crypto2, root3, get2, keyMaterial) {
|
636
|
+
const decryptionKeyUint8 = hexStringToUint8Array(keyMaterial);
|
637
|
+
const decryptionKey = decryptionKeyUint8.buffer.slice(0, decryptionKeyUint8.byteLength);
|
638
|
+
const decryptedBlocks = new MemoryBlockstore2();
|
639
|
+
let last = null;
|
640
|
+
const { decrypt } = makeEncDec(logger, crypto2, crypto2.randomBytes);
|
641
|
+
for await (const block of decrypt({
|
642
|
+
root: root3,
|
643
|
+
get: get2,
|
644
|
+
key: decryptionKey,
|
645
|
+
hasher: sha256,
|
646
|
+
chunker,
|
647
|
+
cache
|
648
|
+
})) {
|
649
|
+
await decryptedBlocks.put(block.cid, block.bytes);
|
650
|
+
last = block;
|
651
|
+
}
|
652
|
+
if (!last) throw logger.Error().Msg("no blocks decrypted").AsError();
|
653
|
+
return { blocks: decryptedBlocks, root: last.cid };
|
654
|
+
}
|
655
|
+
|
656
|
+
// src/blockstore/transaction.ts
|
657
|
+
import { MemoryBlockstore as MemoryBlockstore3 } from "@web3-storage/pail/block";
|
658
|
+
|
659
|
+
// src/runtime/crypto.ts
|
660
|
+
function randomBytes(size) {
|
661
|
+
const bytes = new Uint8Array(size);
|
662
|
+
if (size > 0) {
|
663
|
+
crypto.getRandomValues(bytes);
|
664
|
+
}
|
665
|
+
return bytes;
|
666
|
+
}
|
667
|
+
function digestSHA256(data) {
|
668
|
+
return Promise.resolve(crypto.subtle.digest("SHA-256", data));
|
669
|
+
}
|
670
|
+
function toCryptoOpts(cryptoOpts = {}) {
|
671
|
+
const opts = {
|
672
|
+
importKey: cryptoOpts.importKey || crypto.subtle.importKey.bind(crypto.subtle),
|
673
|
+
encrypt: cryptoOpts.encrypt || crypto.subtle.encrypt.bind(crypto.subtle),
|
674
|
+
decrypt: cryptoOpts.decrypt || crypto.subtle.decrypt.bind(crypto.subtle),
|
675
|
+
randomBytes: cryptoOpts.randomBytes || randomBytes,
|
676
|
+
digestSHA256: cryptoOpts.digestSHA256 || digestSHA256
|
677
|
+
};
|
678
|
+
return opts;
|
679
|
+
}
|
680
|
+
|
681
|
+
// src/blockstore/transaction.ts
|
682
|
+
var CarTransaction = class extends MemoryBlockstore3 {
|
683
|
+
constructor(parent, opts = { add: true }) {
|
684
|
+
super();
|
685
|
+
if (opts.add) {
|
686
|
+
parent.transactions.add(this);
|
687
|
+
}
|
688
|
+
this.parent = parent;
|
689
|
+
}
|
690
|
+
async get(cid) {
|
691
|
+
return await this.superGet(cid) || falsyToUndef(await this.parent.get(cid));
|
692
|
+
}
|
693
|
+
async superGet(cid) {
|
694
|
+
return super.get(cid);
|
695
|
+
}
|
696
|
+
};
|
697
|
+
function defaultedBlockstoreRuntime(opts, component, ctx) {
|
698
|
+
const logger = ensureLogger(opts, component, ctx);
|
699
|
+
const store = opts.store || {};
|
700
|
+
return {
|
701
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
702
|
+
applyMeta: (meta, snap) => {
|
703
|
+
return Promise.resolve();
|
704
|
+
},
|
705
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
706
|
+
compact: async (blocks) => {
|
707
|
+
return {};
|
708
|
+
},
|
709
|
+
autoCompact: 100,
|
710
|
+
public: false,
|
711
|
+
name: void 0,
|
712
|
+
threshold: 1e3 * 1e3,
|
713
|
+
...opts,
|
714
|
+
logger,
|
715
|
+
crypto: toCryptoOpts(opts.crypto),
|
716
|
+
store,
|
717
|
+
storeRuntime: toStoreRuntime(store, logger)
|
718
|
+
};
|
719
|
+
}
|
720
|
+
var blockstoreFactory = function(opts) {
|
721
|
+
if (opts.name) {
|
722
|
+
return new EncryptedBlockstore(opts);
|
723
|
+
} else {
|
724
|
+
return new BaseBlockstore(opts);
|
725
|
+
}
|
726
|
+
};
|
727
|
+
var BaseBlockstore = class {
|
728
|
+
constructor(ebOpts = {}) {
|
729
|
+
this.transactions = /* @__PURE__ */ new Set();
|
730
|
+
this.ebOpts = defaultedBlockstoreRuntime(ebOpts, "BaseBlockstore");
|
731
|
+
this.logger = this.ebOpts.logger;
|
732
|
+
}
|
733
|
+
// ready: Promise<void>;
|
734
|
+
ready() {
|
735
|
+
return Promise.resolve();
|
736
|
+
}
|
737
|
+
async close() {
|
738
|
+
}
|
739
|
+
async destroy() {
|
740
|
+
}
|
741
|
+
async get(cid) {
|
742
|
+
if (!cid) throw this.logger.Error().Msg("required cid").AsError();
|
743
|
+
for (const f of this.transactions) {
|
744
|
+
const v = await f.superGet(cid);
|
745
|
+
if (v) return v;
|
746
|
+
}
|
747
|
+
}
|
748
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
749
|
+
async put(cid, block) {
|
750
|
+
throw this.logger.Error().Msg("use a transaction to put").AsError();
|
751
|
+
}
|
752
|
+
// TransactionMeta
|
753
|
+
async transaction(fn, _opts = {}) {
|
754
|
+
const t = new CarTransaction(this);
|
755
|
+
const done = await fn(t);
|
756
|
+
this.lastTxMeta = done;
|
757
|
+
return { t, meta: done };
|
758
|
+
}
|
759
|
+
async *entries() {
|
760
|
+
const seen = /* @__PURE__ */ new Set();
|
761
|
+
for (const t of this.transactions) {
|
762
|
+
for await (const blk of t.entries()) {
|
763
|
+
if (seen.has(blk.cid.toString())) continue;
|
764
|
+
seen.add(blk.cid.toString());
|
765
|
+
yield blk;
|
766
|
+
}
|
767
|
+
}
|
768
|
+
}
|
769
|
+
};
|
770
|
+
var EncryptedBlockstore = class extends BaseBlockstore {
|
771
|
+
constructor(ebOpts) {
|
772
|
+
super(ebOpts);
|
773
|
+
this.compacting = false;
|
774
|
+
this.logger = ensureLogger(ebOpts, "EncryptedBlockstore");
|
775
|
+
const { name } = ebOpts;
|
776
|
+
if (!name) {
|
777
|
+
throw this.logger.Error().Msg("name required").AsError();
|
778
|
+
}
|
779
|
+
this.name = name;
|
780
|
+
this.loader = new Loader(this.name, ebOpts);
|
781
|
+
}
|
782
|
+
ready() {
|
783
|
+
return this.loader.ready();
|
784
|
+
}
|
785
|
+
close() {
|
786
|
+
return this.loader.close();
|
787
|
+
}
|
788
|
+
destroy() {
|
789
|
+
return this.loader.destroy();
|
790
|
+
}
|
791
|
+
async get(cid) {
|
792
|
+
const got = await super.get(cid);
|
793
|
+
if (got) return got;
|
794
|
+
if (!this.loader) {
|
795
|
+
return;
|
796
|
+
}
|
797
|
+
return falsyToUndef(await this.loader.getBlock(cid));
|
798
|
+
}
|
799
|
+
async transaction(fn, opts = { noLoader: false }) {
|
800
|
+
const { t, meta: done } = await super.transaction(fn);
|
801
|
+
const cars = await this.loader.commit(t, done, opts);
|
802
|
+
if (this.ebOpts.autoCompact && this.loader.carLog.length > this.ebOpts.autoCompact) {
|
803
|
+
setTimeout(() => void this.compact(), 10);
|
804
|
+
}
|
805
|
+
if (cars) {
|
806
|
+
this.transactions.delete(t);
|
807
|
+
return { meta: done, cars, t };
|
808
|
+
}
|
809
|
+
throw this.logger.Error().Msg("failed to commit car files").AsError();
|
810
|
+
}
|
811
|
+
async getFile(car, cid, isPublic = false) {
|
812
|
+
await this.ready();
|
813
|
+
if (!this.loader) throw this.logger.Error().Msg("loader required to get file, database must be named").AsError();
|
814
|
+
const reader = await this.loader.loadFileCar(car, isPublic);
|
815
|
+
const block = await reader.get(cid);
|
816
|
+
if (!block) throw this.logger.Error().Str("cid", cid.toString()).Msg(`Missing block`).AsError();
|
817
|
+
return block.bytes;
|
818
|
+
}
|
819
|
+
async compact() {
|
820
|
+
await this.ready();
|
821
|
+
if (!this.loader) throw this.logger.Error().Msg("loader required to compact").AsError();
|
822
|
+
if (this.loader.carLog.length < 2) return;
|
823
|
+
const compactFn = this.ebOpts.compact || ((blocks) => this.defaultCompact(blocks, this.logger));
|
824
|
+
if (!compactFn || this.compacting) return;
|
825
|
+
const blockLog = new CompactionFetcher(this);
|
826
|
+
this.compacting = true;
|
827
|
+
const meta = await compactFn(blockLog);
|
828
|
+
await this.loader?.commit(blockLog.loggedBlocks, meta, {
|
829
|
+
compact: true,
|
830
|
+
noLoader: true
|
831
|
+
});
|
832
|
+
this.compacting = false;
|
833
|
+
}
|
834
|
+
async defaultCompact(blocks, logger) {
|
835
|
+
if (!this.loader) {
|
836
|
+
throw logger.Error().Msg("no loader").AsError();
|
837
|
+
}
|
838
|
+
if (!this.lastTxMeta) {
|
839
|
+
throw logger.Error().Msg("no lastTxMeta").AsError();
|
840
|
+
}
|
841
|
+
for await (const blk of this.loader.entries(false)) {
|
842
|
+
blocks.loggedBlocks.putSync(blk.cid, blk.bytes);
|
843
|
+
}
|
844
|
+
for (const t of this.transactions) {
|
845
|
+
for await (const blk of t.entries()) {
|
846
|
+
blocks.loggedBlocks.putSync(blk.cid, blk.bytes);
|
847
|
+
}
|
848
|
+
}
|
849
|
+
return this.lastTxMeta;
|
850
|
+
}
|
851
|
+
async *entries() {
|
852
|
+
for await (const blk of this.loader.entries()) {
|
853
|
+
yield blk;
|
854
|
+
}
|
855
|
+
}
|
856
|
+
};
|
857
|
+
var CompactionFetcher = class {
|
858
|
+
constructor(blocks) {
|
859
|
+
this.blockstore = blocks;
|
860
|
+
this.loggedBlocks = new CarTransaction(blocks);
|
861
|
+
}
|
862
|
+
async get(cid) {
|
863
|
+
const block = await this.blockstore.get(cid);
|
864
|
+
if (block) this.loggedBlocks.putSync(cid, block.bytes);
|
865
|
+
return falsyToUndef(block);
|
866
|
+
}
|
867
|
+
};
|
868
|
+
|
869
|
+
// src/blockstore/commit-queue.ts
|
870
|
+
var CommitQueue = class {
|
871
|
+
constructor() {
|
872
|
+
this.queue = [];
|
873
|
+
this.processing = false;
|
874
|
+
}
|
875
|
+
async enqueue(fn) {
|
876
|
+
return new Promise((resolve, reject) => {
|
877
|
+
const queueFn = async () => {
|
878
|
+
try {
|
879
|
+
resolve(await fn());
|
880
|
+
} catch (e) {
|
881
|
+
reject(e);
|
882
|
+
} finally {
|
883
|
+
this.processing = false;
|
884
|
+
this.processNext();
|
885
|
+
}
|
886
|
+
};
|
887
|
+
this.queue.push(queueFn);
|
888
|
+
if (!this.processing) {
|
889
|
+
this.processNext();
|
890
|
+
}
|
891
|
+
});
|
892
|
+
}
|
893
|
+
processNext() {
|
894
|
+
if (this.queue.length > 0 && !this.processing) {
|
895
|
+
this.processing = true;
|
896
|
+
const queueFn = this.queue.shift();
|
897
|
+
if (queueFn) {
|
898
|
+
queueFn();
|
899
|
+
}
|
900
|
+
}
|
901
|
+
}
|
902
|
+
};
|
903
|
+
|
904
|
+
// src/blockstore/loader.ts
|
905
|
+
import * as CBW2 from "@ipld/car/buffer-writer";
|
906
|
+
function carLogIncludesGroup2(list, cids) {
|
907
|
+
return list.some((arr) => {
|
908
|
+
return arr.toString() === cids.toString();
|
909
|
+
});
|
910
|
+
}
|
911
|
+
function uniqueCids(list, remove = /* @__PURE__ */ new Set()) {
|
912
|
+
const byString = /* @__PURE__ */ new Map();
|
913
|
+
for (const cid of list) {
|
914
|
+
if (remove.has(cid.toString())) continue;
|
915
|
+
byString.set(cid.toString(), cid);
|
916
|
+
}
|
917
|
+
return [...byString.values()];
|
918
|
+
}
|
919
|
+
function toHexString(byteArray) {
|
920
|
+
return Array.from(byteArray).map((byte) => byte.toString(16).padStart(2, "0")).join("");
|
921
|
+
}
|
922
|
+
var Loadable = class {
|
923
|
+
constructor() {
|
924
|
+
this.name = "";
|
925
|
+
this.carLog = new Array();
|
926
|
+
}
|
927
|
+
};
|
928
|
+
var Loader = class {
|
929
|
+
constructor(name, ebOpts) {
|
930
|
+
this.commitQueue = new CommitQueue();
|
931
|
+
this.isCompacting = false;
|
932
|
+
this.carReaders = /* @__PURE__ */ new Map();
|
933
|
+
this.seenCompacted = /* @__PURE__ */ new Set();
|
934
|
+
this.processedCars = /* @__PURE__ */ new Set();
|
935
|
+
this.carLog = [];
|
936
|
+
this.getBlockCache = /* @__PURE__ */ new Map();
|
937
|
+
this.seenMeta = /* @__PURE__ */ new Set();
|
938
|
+
this.writeLimit = pLimit(1);
|
939
|
+
this.onceReady = new ResolveOnce();
|
940
|
+
this.name = name;
|
941
|
+
this.ebOpts = defaultedBlockstoreRuntime(
|
942
|
+
{
|
943
|
+
...ebOpts,
|
944
|
+
name
|
945
|
+
},
|
946
|
+
"Loader"
|
947
|
+
);
|
948
|
+
this.logger = this.ebOpts.logger;
|
949
|
+
}
|
950
|
+
// readonly id = uuidv4();
|
951
|
+
async carStore() {
|
952
|
+
return this.ebOpts.storeRuntime.makeDataStore(this);
|
953
|
+
}
|
954
|
+
async fileStore() {
|
955
|
+
return this.ebOpts.storeRuntime.makeDataStore(this);
|
956
|
+
}
|
957
|
+
async remoteWAL() {
|
958
|
+
return this.ebOpts.storeRuntime.makeRemoteWAL(this);
|
959
|
+
}
|
960
|
+
async metaStore() {
|
961
|
+
return this.ebOpts.storeRuntime.makeMetaStore(this);
|
962
|
+
}
|
963
|
+
async ready() {
|
964
|
+
return this.onceReady.once(async () => {
|
965
|
+
const metas = this.ebOpts.meta ? [this.ebOpts.meta] : await (await this.metaStore()).load("main");
|
966
|
+
if (metas) {
|
967
|
+
await this.handleDbMetasFromStore(metas);
|
968
|
+
}
|
969
|
+
});
|
970
|
+
}
|
971
|
+
async close() {
|
972
|
+
const toClose = await Promise.all([this.carStore(), this.metaStore(), this.fileStore(), this.remoteWAL()]);
|
973
|
+
await Promise.all(toClose.map((store) => store.close()));
|
974
|
+
}
|
975
|
+
async destroy() {
|
976
|
+
const toDestroy = await Promise.all([this.carStore(), this.metaStore(), this.fileStore(), this.remoteWAL()]);
|
977
|
+
await Promise.all(toDestroy.map((store) => store.destroy()));
|
978
|
+
}
|
979
|
+
// async snapToCar(carCid: AnyLink | string) {
|
980
|
+
// await this.ready
|
981
|
+
// if (typeof carCid === 'string') {
|
982
|
+
// carCid = CID.parse(carCid)
|
983
|
+
// }
|
984
|
+
// const carHeader = await this.loadCarHeaderFromMeta({ car: carCid, key: this.key || null })
|
985
|
+
// this.carLog = [carCid, ...carHeader.cars]
|
986
|
+
// await this.getMoreReaders(carHeader.cars)
|
987
|
+
// await this._applyCarHeader(carHeader, true)
|
988
|
+
// }
|
989
|
+
async handleDbMetasFromStore(metas) {
|
990
|
+
for (const meta of metas) {
|
991
|
+
await this.writeLimit(async () => {
|
992
|
+
await this.mergeDbMetaIntoClock(meta);
|
993
|
+
});
|
994
|
+
}
|
995
|
+
}
|
996
|
+
async mergeDbMetaIntoClock(meta) {
|
997
|
+
if (this.isCompacting) {
|
998
|
+
throw this.logger.Error().Msg("cannot merge while compacting").AsError();
|
999
|
+
}
|
1000
|
+
if (this.seenMeta.has(meta.cars.toString())) return;
|
1001
|
+
this.seenMeta.add(meta.cars.toString());
|
1002
|
+
if (meta.key) {
|
1003
|
+
await this.setKey(meta.key);
|
1004
|
+
}
|
1005
|
+
if (carLogIncludesGroup2(this.carLog, meta.cars)) {
|
1006
|
+
return;
|
1007
|
+
}
|
1008
|
+
const carHeader = await this.loadCarHeaderFromMeta(meta);
|
1009
|
+
carHeader.compact.map((c) => c.toString()).forEach(this.seenCompacted.add, this.seenCompacted);
|
1010
|
+
await this.getMoreReaders(carHeader.cars.flat());
|
1011
|
+
this.carLog = [...uniqueCids([meta.cars, ...this.carLog, ...carHeader.cars], this.seenCompacted)];
|
1012
|
+
await this.ebOpts.applyMeta?.(carHeader.meta);
|
1013
|
+
}
|
1014
|
+
async ingestKeyFromMeta(meta) {
|
1015
|
+
const { key } = meta;
|
1016
|
+
if (key) {
|
1017
|
+
await this.setKey(key);
|
1018
|
+
}
|
1019
|
+
}
|
1020
|
+
async loadCarHeaderFromMeta({ cars: cids }) {
|
1021
|
+
const reader = await this.loadCar(cids[0]);
|
1022
|
+
return await parseCarFile(reader, this.logger);
|
1023
|
+
}
|
1024
|
+
async _getKey() {
|
1025
|
+
if (this.key) return this.key;
|
1026
|
+
if (!this.ebOpts.public) {
|
1027
|
+
await this.setKey(toHexString(this.ebOpts.crypto.randomBytes(32)));
|
1028
|
+
}
|
1029
|
+
return this.key || void 0;
|
1030
|
+
}
|
1031
|
+
async commitFiles(t, done, opts = { noLoader: false, compact: false }) {
|
1032
|
+
return this.commitQueue.enqueue(() => this._commitInternalFiles(t, done, opts));
|
1033
|
+
}
|
1034
|
+
// can these skip the queue? or have a file queue?
|
1035
|
+
async _commitInternalFiles(t, done, opts = { noLoader: false, compact: false }) {
|
1036
|
+
await this.ready();
|
1037
|
+
const { files: roots } = this.makeFileCarHeader(done);
|
1038
|
+
const cids = [];
|
1039
|
+
const cars = await this.prepareCarFilesFiles(roots, t, !!opts.public);
|
1040
|
+
for (const car of cars) {
|
1041
|
+
const { cid, bytes } = car;
|
1042
|
+
await (await this.fileStore()).save({ cid, bytes });
|
1043
|
+
await (await this.remoteWAL()).enqueueFile(cid, !!opts.public);
|
1044
|
+
cids.push(cid);
|
1045
|
+
}
|
1046
|
+
return cids;
|
1047
|
+
}
|
1048
|
+
async loadFileCar(cid, isPublic = false) {
|
1049
|
+
return await this.storesLoadCar(cid, await this.fileStore(), this.remoteFileStore, isPublic);
|
1050
|
+
}
|
1051
|
+
async commit(t, done, opts = { noLoader: false, compact: false }) {
|
1052
|
+
return this.commitQueue.enqueue(() => this._commitInternal(t, done, opts));
|
1053
|
+
}
|
1054
|
+
async cacheTransaction(t) {
|
1055
|
+
for await (const block of t.entries()) {
|
1056
|
+
const sBlock = block.cid.toString();
|
1057
|
+
if (!this.getBlockCache.has(sBlock)) {
|
1058
|
+
this.getBlockCache.set(sBlock, block);
|
1059
|
+
}
|
1060
|
+
}
|
1061
|
+
}
|
1062
|
+
async cacheCarReader(carCidStr, reader) {
|
1063
|
+
if (this.processedCars.has(carCidStr)) return;
|
1064
|
+
this.processedCars.add(carCidStr);
|
1065
|
+
for await (const block of reader.blocks()) {
|
1066
|
+
const sBlock = block.cid.toString();
|
1067
|
+
if (!this.getBlockCache.has(sBlock)) {
|
1068
|
+
this.getBlockCache.set(sBlock, block);
|
1069
|
+
}
|
1070
|
+
}
|
1071
|
+
}
|
1072
|
+
async _commitInternal(t, done, opts = { noLoader: false, compact: false }) {
|
1073
|
+
await this.ready();
|
1074
|
+
const fp = this.makeCarHeader(done, this.carLog, !!opts.compact);
|
1075
|
+
const rootBlock = await encodeCarHeader(fp);
|
1076
|
+
const cars = await this.prepareCarFiles(rootBlock, t, !!opts.public);
|
1077
|
+
const cids = [];
|
1078
|
+
for (const car of cars) {
|
1079
|
+
const { cid, bytes } = car;
|
1080
|
+
await (await this.carStore()).save({ cid, bytes });
|
1081
|
+
cids.push(cid);
|
1082
|
+
}
|
1083
|
+
await this.cacheTransaction(t);
|
1084
|
+
const newDbMeta = { cars: cids, key: this.key || null };
|
1085
|
+
await (await this.remoteWAL()).enqueue(newDbMeta, opts);
|
1086
|
+
await (await this.metaStore()).save(newDbMeta);
|
1087
|
+
await this.updateCarLog(cids, fp, !!opts.compact);
|
1088
|
+
return cids;
|
1089
|
+
}
|
1090
|
+
async prepareCarFilesFiles(roots, t, isPublic) {
|
1091
|
+
const theKey = isPublic ? null : await this._getKey();
|
1092
|
+
const car = theKey && this.ebOpts.crypto ? await encryptedEncodeCarFile(this.logger, this.ebOpts.crypto, theKey, roots[0], t) : await encodeCarFile(roots, t);
|
1093
|
+
return [car];
|
1094
|
+
}
|
1095
|
+
async prepareCarFiles(rootBlock, t, isPublic) {
|
1096
|
+
const theKey = isPublic ? void 0 : await this._getKey();
|
1097
|
+
const carFiles = [];
|
1098
|
+
const threshold = this.ebOpts.threshold || 1e3 * 1e3;
|
1099
|
+
let clonedt = new CarTransaction(t.parent, { add: false });
|
1100
|
+
clonedt.putSync(rootBlock.cid, rootBlock.bytes);
|
1101
|
+
let newsize = CBW2.blockLength(toCIDBlock(rootBlock));
|
1102
|
+
let cidRootBlock = rootBlock;
|
1103
|
+
for (const { cid, bytes } of t.entries()) {
|
1104
|
+
newsize += CBW2.blockLength(toCIDBlock({ cid, bytes }));
|
1105
|
+
if (newsize >= threshold) {
|
1106
|
+
carFiles.push(await this.createCarFile(theKey, cidRootBlock.cid, clonedt));
|
1107
|
+
clonedt = new CarTransaction(t.parent, { add: false });
|
1108
|
+
clonedt.putSync(cid, bytes);
|
1109
|
+
cidRootBlock = { cid, bytes };
|
1110
|
+
newsize = CBW2.blockLength(toCIDBlock({ cid, bytes }));
|
1111
|
+
} else {
|
1112
|
+
clonedt.putSync(cid, bytes);
|
1113
|
+
}
|
1114
|
+
}
|
1115
|
+
carFiles.push(await this.createCarFile(theKey, cidRootBlock.cid, clonedt));
|
1116
|
+
return carFiles;
|
1117
|
+
}
|
1118
|
+
async createCarFile(theKey, cid, t) {
|
1119
|
+
try {
|
1120
|
+
return theKey && this.ebOpts.crypto ? await encryptedEncodeCarFile(this.logger, this.ebOpts.crypto, theKey, cid, t) : await encodeCarFile([cid], t);
|
1121
|
+
} catch (e) {
|
1122
|
+
console.error("error creating car file", e);
|
1123
|
+
throw e;
|
1124
|
+
}
|
1125
|
+
}
|
1126
|
+
makeFileCarHeader(result) {
|
1127
|
+
const files = [];
|
1128
|
+
for (const [, meta] of Object.entries(result.files || {})) {
|
1129
|
+
if (meta && typeof meta === "object" && "cid" in meta && meta !== null) {
|
1130
|
+
files.push(meta.cid);
|
1131
|
+
}
|
1132
|
+
}
|
1133
|
+
return { ...result, files };
|
1134
|
+
}
|
1135
|
+
async updateCarLog(cids, fp, compact) {
|
1136
|
+
if (compact) {
|
1137
|
+
const previousCompactCid = fp.compact[fp.compact.length - 1];
|
1138
|
+
fp.compact.map((c) => c.toString()).forEach(this.seenCompacted.add, this.seenCompacted);
|
1139
|
+
this.carLog = [...uniqueCids([...this.carLog, ...fp.cars, cids], this.seenCompacted)];
|
1140
|
+
await this.removeCidsForCompact(previousCompactCid[0]);
|
1141
|
+
} else {
|
1142
|
+
this.carLog.unshift(cids);
|
1143
|
+
}
|
1144
|
+
}
|
1145
|
+
async removeCidsForCompact(cid) {
|
1146
|
+
const carHeader = await this.loadCarHeaderFromMeta({
|
1147
|
+
cars: [cid]
|
1148
|
+
});
|
1149
|
+
for (const cids of carHeader.compact) {
|
1150
|
+
for (const cid2 of cids) {
|
1151
|
+
await (await this.carStore()).remove(cid2);
|
1152
|
+
}
|
1153
|
+
}
|
1154
|
+
}
|
1155
|
+
// async flushCars() {
|
1156
|
+
// await this.ready
|
1157
|
+
// // for each cid in car log, make a dbMeta
|
1158
|
+
// for (const cid of this.carLog) {
|
1159
|
+
// const dbMeta = { car: cid, key: this.key || null } as DbMeta
|
1160
|
+
// await this.remoteWAL!.enqueue(dbMeta, { public: false })
|
1161
|
+
// }
|
1162
|
+
// }
|
1163
|
+
async *entries(cache3 = true) {
|
1164
|
+
await this.ready();
|
1165
|
+
if (cache3) {
|
1166
|
+
for (const [, block] of this.getBlockCache) {
|
1167
|
+
yield block;
|
1168
|
+
}
|
1169
|
+
} else {
|
1170
|
+
for (const [, block] of this.getBlockCache) {
|
1171
|
+
yield block;
|
1172
|
+
}
|
1173
|
+
for (const cids of this.carLog) {
|
1174
|
+
for (const cid of cids) {
|
1175
|
+
const reader = await this.loadCar(cid);
|
1176
|
+
if (!reader) throw this.logger.Error().Ref("cid", cid).Msg("missing car reader").AsError();
|
1177
|
+
for await (const block of reader.blocks()) {
|
1178
|
+
const sCid = block.cid.toString();
|
1179
|
+
if (!this.getBlockCache.has(sCid)) {
|
1180
|
+
yield block;
|
1181
|
+
}
|
1182
|
+
}
|
1183
|
+
}
|
1184
|
+
}
|
1185
|
+
}
|
1186
|
+
}
|
1187
|
+
async getBlock(cid) {
|
1188
|
+
await this.ready();
|
1189
|
+
const sCid = cid.toString();
|
1190
|
+
if (this.getBlockCache.has(sCid)) return this.getBlockCache.get(sCid);
|
1191
|
+
const getCarCid = async (carCid) => {
|
1192
|
+
if (this.getBlockCache.has(sCid)) return this.getBlockCache.get(sCid);
|
1193
|
+
const reader = await this.loadCar(carCid);
|
1194
|
+
if (!reader) {
|
1195
|
+
throw this.logger.Error().Ref("cid", carCid).Msg("missing car reader").AsError();
|
1196
|
+
}
|
1197
|
+
await this.cacheCarReader(carCid.toString(), reader).catch(() => {
|
1198
|
+
return;
|
1199
|
+
});
|
1200
|
+
if (this.getBlockCache.has(sCid)) return this.getBlockCache.get(sCid);
|
1201
|
+
throw this.logger.Error().Str("cid", sCid).Msg("block not in reader").AsError();
|
1202
|
+
};
|
1203
|
+
const getCompactCarCids = async (carCid) => {
|
1204
|
+
const reader = await this.loadCar(carCid);
|
1205
|
+
if (!reader) {
|
1206
|
+
throw this.logger.Error().Str("cid", carCid.toString()).Msg("missing car reader").AsError();
|
1207
|
+
}
|
1208
|
+
const header = await parseCarFile(reader, this.logger);
|
1209
|
+
const compacts = header.compact;
|
1210
|
+
let got2;
|
1211
|
+
const batchSize2 = 5;
|
1212
|
+
for (let i = 0; i < compacts.length; i += batchSize2) {
|
1213
|
+
const promises = [];
|
1214
|
+
for (let j = i; j < Math.min(i + batchSize2, compacts.length); j++) {
|
1215
|
+
for (const cid2 of compacts[j]) {
|
1216
|
+
promises.push(getCarCid(cid2));
|
1217
|
+
}
|
1218
|
+
}
|
1219
|
+
try {
|
1220
|
+
got2 = await Promise.any(promises);
|
1221
|
+
} catch {
|
1222
|
+
}
|
1223
|
+
if (got2) break;
|
1224
|
+
}
|
1225
|
+
if (this.getBlockCache.has(sCid)) return this.getBlockCache.get(sCid);
|
1226
|
+
throw this.logger.Error().Str("cid", sCid).Msg("block not in compact reader").AsError();
|
1227
|
+
};
|
1228
|
+
let got;
|
1229
|
+
const batchSize = 5;
|
1230
|
+
for (let i = 0; i < this.carLog.length; i += batchSize) {
|
1231
|
+
const batch = this.carLog.slice(i, i + batchSize);
|
1232
|
+
const promises = batch.flatMap((slice) => slice.map(getCarCid));
|
1233
|
+
try {
|
1234
|
+
got = await Promise.any(promises);
|
1235
|
+
} catch {
|
1236
|
+
}
|
1237
|
+
if (got) break;
|
1238
|
+
}
|
1239
|
+
if (!got) {
|
1240
|
+
try {
|
1241
|
+
got = await getCompactCarCids(this.carLog[this.carLog.length - 1][0]);
|
1242
|
+
} catch {
|
1243
|
+
}
|
1244
|
+
}
|
1245
|
+
return got;
|
1246
|
+
}
|
1247
|
+
makeCarHeader(meta, cars, compact = false) {
|
1248
|
+
const coreHeader = compact ? { cars: [], compact: cars } : { cars, compact: [] };
|
1249
|
+
return { ...coreHeader, meta };
|
1250
|
+
}
|
1251
|
+
async loadCar(cid) {
|
1252
|
+
if (!this.carStore) {
|
1253
|
+
throw this.logger.Error().Msg("car store not initialized").AsError();
|
1254
|
+
}
|
1255
|
+
const loaded = await this.storesLoadCar(cid, await this.carStore(), this.remoteCarStore);
|
1256
|
+
return loaded;
|
1257
|
+
}
|
1258
|
+
//What if instead it returns an Array of CarHeader
|
1259
|
+
async storesLoadCar(cid, local, remote, publicFiles) {
|
1260
|
+
const cidsString = cid.toString();
|
1261
|
+
if (!this.carReaders.has(cidsString)) {
|
1262
|
+
this.carReaders.set(
|
1263
|
+
cidsString,
|
1264
|
+
(async () => {
|
1265
|
+
let loadedCar = void 0;
|
1266
|
+
try {
|
1267
|
+
this.logger.Debug().Str("cid", cidsString).Msg("loading car");
|
1268
|
+
loadedCar = await local.load(cid);
|
1269
|
+
this.logger.Debug().Bool("loadedCar", loadedCar).Msg("loaded");
|
1270
|
+
} catch (e) {
|
1271
|
+
if (remote) {
|
1272
|
+
const remoteCar = await remote.load(cid);
|
1273
|
+
if (remoteCar) {
|
1274
|
+
this.logger.Debug().Ref("cid", remoteCar.cid).Msg("saving remote car locally");
|
1275
|
+
await local.save(remoteCar);
|
1276
|
+
loadedCar = remoteCar;
|
1277
|
+
}
|
1278
|
+
} else {
|
1279
|
+
this.logger.Error().Str("cid", cidsString).Err(e).Msg("loading car");
|
1280
|
+
}
|
1281
|
+
}
|
1282
|
+
if (!loadedCar) {
|
1283
|
+
throw this.logger.Error().Url(local.url).Str("cid", cidsString).Msg("missing car files").AsError();
|
1284
|
+
}
|
1285
|
+
const rawReader = await CarReader.fromBytes(loadedCar.bytes);
|
1286
|
+
const readerP = publicFiles ? Promise.resolve(rawReader) : this.ensureDecryptedReader(rawReader);
|
1287
|
+
const cachedReaderP = readerP.then(async (reader) => {
|
1288
|
+
await this.cacheCarReader(cidsString, reader).catch(() => {
|
1289
|
+
return;
|
1290
|
+
});
|
1291
|
+
return reader;
|
1292
|
+
});
|
1293
|
+
this.carReaders.set(cidsString, cachedReaderP);
|
1294
|
+
return readerP;
|
1295
|
+
})().catch((e) => {
|
1296
|
+
this.carReaders.delete(cidsString);
|
1297
|
+
throw e;
|
1298
|
+
})
|
1299
|
+
);
|
1300
|
+
}
|
1301
|
+
return this.carReaders.get(cidsString);
|
1302
|
+
}
|
1303
|
+
async ensureDecryptedReader(reader) {
|
1304
|
+
const theKey = await this._getKey();
|
1305
|
+
if (this.ebOpts.public || !(theKey && this.ebOpts.crypto)) {
|
1306
|
+
return reader;
|
1307
|
+
}
|
1308
|
+
const { blocks, root: root3 } = await decodeEncryptedCar(this.logger, this.ebOpts.crypto, theKey, reader);
|
1309
|
+
return {
|
1310
|
+
getRoots: () => [root3],
|
1311
|
+
get: blocks.get.bind(blocks),
|
1312
|
+
blocks: blocks.entries.bind(blocks)
|
1313
|
+
};
|
1314
|
+
}
|
1315
|
+
async setKey(key) {
|
1316
|
+
if (this.key && this.key !== key)
|
1317
|
+
throw this.logger.Error().Str("this.key", this.key).Str("key", key).Msg("setting key").AsError();
|
1318
|
+
this.key = key;
|
1319
|
+
const encoder = new TextEncoder();
|
1320
|
+
const data = encoder.encode(key);
|
1321
|
+
const hashBuffer = await this.ebOpts.crypto.digestSHA256(data);
|
1322
|
+
const hashArray = Array.from(new Uint8Array(hashBuffer));
|
1323
|
+
this.keyId = hashArray.map((b) => b.toString(16).padStart(2, "0")).join("");
|
1324
|
+
}
|
1325
|
+
async getMoreReaders(cids) {
|
1326
|
+
const limit = pLimit(5);
|
1327
|
+
const missing = cids.filter((cid) => !this.carReaders.has(cid.toString()));
|
1328
|
+
await Promise.all(missing.map((cid) => limit(() => this.loadCar(cid))));
|
1329
|
+
}
|
1330
|
+
};
|
1331
|
+
|
1332
|
+
// src/blockstore/store.ts
|
1333
|
+
var VersionedStore = class {
|
1334
|
+
constructor(name, url, logger) {
|
1335
|
+
this._onStarted = [];
|
1336
|
+
this._onClosed = [];
|
1337
|
+
this.name = name;
|
1338
|
+
this.url = url;
|
1339
|
+
this.logger = logger;
|
1340
|
+
}
|
1341
|
+
onStarted(fn) {
|
1342
|
+
this._onStarted.push(fn);
|
1343
|
+
}
|
1344
|
+
onClosed(fn) {
|
1345
|
+
this._onClosed.push(fn);
|
1346
|
+
}
|
1347
|
+
};
|
1348
|
+
var textEncoder = new TextEncoder();
|
1349
|
+
var textDecoder = new TextDecoder();
|
1350
|
+
var MetaStore = class extends VersionedStore {
|
1351
|
+
constructor(name, url, logger, gateway) {
|
1352
|
+
super(name, url, ensureLogger(logger, "MetaStore", {}));
|
1353
|
+
this.tag = "header-base";
|
1354
|
+
this.gateway = gateway;
|
1355
|
+
}
|
1356
|
+
makeHeader({ cars, key }) {
|
1357
|
+
const toEncode = { cars };
|
1358
|
+
if (key) toEncode.key = key;
|
1359
|
+
return format(toEncode);
|
1360
|
+
}
|
1361
|
+
parseHeader(headerData) {
|
1362
|
+
const got = parse(headerData);
|
1363
|
+
return got;
|
1364
|
+
}
|
1365
|
+
async start() {
|
1366
|
+
this.logger.Debug().Msg("starting");
|
1367
|
+
const res = await this.gateway.start(this.url);
|
1368
|
+
if (res.isErr()) {
|
1369
|
+
return res;
|
1370
|
+
}
|
1371
|
+
this._onStarted.forEach((fn) => fn());
|
1372
|
+
return guardVersion(this.url);
|
1373
|
+
}
|
1374
|
+
async load(branch) {
|
1375
|
+
this.logger.Debug().Str("branch", branch || "").Msg("loading");
|
1376
|
+
const url = await this.gateway.buildUrl(this.url, branch || "main");
|
1377
|
+
if (url.isErr()) {
|
1378
|
+
throw this.logger.Error().Err(url.Err()).Str("branch", branch || "").Str("url", this.url.toString()).Msg("got error from gateway.buildUrl").AsError();
|
1379
|
+
}
|
1380
|
+
const bytes = await this.gateway.get(url.Ok());
|
1381
|
+
if (bytes.isErr()) {
|
1382
|
+
if (isNotFoundError(bytes)) {
|
1383
|
+
return void 0;
|
1384
|
+
}
|
1385
|
+
throw this.logger.Error().Err(bytes.Err()).Msg("gateway get").AsError();
|
1386
|
+
}
|
1387
|
+
try {
|
1388
|
+
return [this.parseHeader(textDecoder.decode(bytes.Ok()))];
|
1389
|
+
} catch (e) {
|
1390
|
+
throw this.logger.Error().Err(e).Msg("parseHeader").AsError();
|
1391
|
+
}
|
1392
|
+
}
|
1393
|
+
async save(meta, branch = "main") {
|
1394
|
+
this.logger.Debug().Str("branch", branch).Any("meta", meta).Msg("saving meta");
|
1395
|
+
const bytes = this.makeHeader(meta);
|
1396
|
+
const url = await this.gateway.buildUrl(this.url, branch);
|
1397
|
+
if (url.isErr()) {
|
1398
|
+
throw this.logger.Error().Err(url.Err()).Str("branch", branch).Url(this.url).Msg("got error from gateway.buildUrl").AsError();
|
1399
|
+
}
|
1400
|
+
const res = await this.gateway.put(url.Ok(), textEncoder.encode(bytes));
|
1401
|
+
if (res.isErr()) {
|
1402
|
+
throw this.logger.Error().Err(res.Err()).Msg("got error from gateway.put").AsError();
|
1403
|
+
}
|
1404
|
+
return res.Ok();
|
1405
|
+
}
|
1406
|
+
async close() {
|
1407
|
+
await this.gateway.close(this.url);
|
1408
|
+
this._onClosed.forEach((fn) => fn());
|
1409
|
+
return Result.Ok(void 0);
|
1410
|
+
}
|
1411
|
+
async destroy() {
|
1412
|
+
return this.gateway.destroy(this.url);
|
1413
|
+
}
|
1414
|
+
};
|
1415
|
+
var DataStore = class extends VersionedStore {
|
1416
|
+
constructor(name, url, logger, gateway) {
|
1417
|
+
super(
|
1418
|
+
name,
|
1419
|
+
url,
|
1420
|
+
ensureLogger(logger, "DataStore", {
|
1421
|
+
url: () => url.toString()
|
1422
|
+
})
|
1423
|
+
);
|
1424
|
+
this.tag = "car-base";
|
1425
|
+
this.gateway = gateway;
|
1426
|
+
}
|
1427
|
+
async start() {
|
1428
|
+
this.logger.Debug().Msg("starting-gateway");
|
1429
|
+
const res = await this.gateway.start(this.url);
|
1430
|
+
if (res.isErr()) {
|
1431
|
+
this.logger.Error().Err(res.Err()).Msg("started-gateway");
|
1432
|
+
return res;
|
1433
|
+
}
|
1434
|
+
this._onStarted.forEach((fn) => fn());
|
1435
|
+
const version = guardVersion(this.url);
|
1436
|
+
if (version.isErr()) {
|
1437
|
+
this.logger.Error().Err(res.Err()).Msg("guardVersion");
|
1438
|
+
await this.close();
|
1439
|
+
return version;
|
1440
|
+
}
|
1441
|
+
this.logger.Debug().Msg("started");
|
1442
|
+
return version;
|
1443
|
+
}
|
1444
|
+
async load(cid) {
|
1445
|
+
this.logger.Debug().Any("cid", cid).Msg("loading");
|
1446
|
+
const url = await this.gateway.buildUrl(this.url, cid.toString());
|
1447
|
+
if (url.isErr()) {
|
1448
|
+
throw this.logger.Error().Err(url.Err()).Str("cid", cid.toString()).Msg("got error from gateway.buildUrl").AsError();
|
1449
|
+
}
|
1450
|
+
const res = await this.gateway.get(url.Ok());
|
1451
|
+
if (res.isErr()) {
|
1452
|
+
throw res.Err();
|
1453
|
+
}
|
1454
|
+
return { cid, bytes: res.Ok() };
|
1455
|
+
}
|
1456
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
1457
|
+
async save(car, opts) {
|
1458
|
+
this.logger.Debug().Any("cid", car.cid.toString()).Msg("saving");
|
1459
|
+
const url = await this.gateway.buildUrl(this.url, car.cid.toString());
|
1460
|
+
if (url.isErr()) {
|
1461
|
+
throw this.logger.Error().Err(url.Err()).Ref("cid", car.cid).Msg("got error from gateway.buildUrl").AsError();
|
1462
|
+
}
|
1463
|
+
const res = await this.gateway.put(url.Ok(), car.bytes);
|
1464
|
+
if (res.isErr()) {
|
1465
|
+
throw this.logger.Error().Err(res.Err()).Msg("got error from gateway.put").AsError();
|
1466
|
+
}
|
1467
|
+
return res.Ok();
|
1468
|
+
}
|
1469
|
+
async remove(cid) {
|
1470
|
+
const url = await this.gateway.buildUrl(this.url, cid.toString());
|
1471
|
+
if (url.isErr()) {
|
1472
|
+
return url;
|
1473
|
+
}
|
1474
|
+
return this.gateway.delete(url.Ok());
|
1475
|
+
}
|
1476
|
+
async close() {
|
1477
|
+
await this.gateway.close(this.url);
|
1478
|
+
this._onClosed.forEach((fn) => fn());
|
1479
|
+
return Result.Ok(void 0);
|
1480
|
+
}
|
1481
|
+
destroy() {
|
1482
|
+
return this.gateway.destroy(this.url);
|
1483
|
+
}
|
1484
|
+
};
|
1485
|
+
var RemoteWAL = class extends VersionedStore {
|
1486
|
+
constructor(loader, url, logger, gateway) {
|
1487
|
+
super(loader.name, url, ensureLogger(logger, "RemoteWAL"));
|
1488
|
+
this.tag = "rwal-base";
|
1489
|
+
this._ready = new ResolveOnce2();
|
1490
|
+
this.walState = { operations: [], noLoaderOps: [], fileOperations: [] };
|
1491
|
+
this.processing = void 0;
|
1492
|
+
this.processQueue = new CommitQueue();
|
1493
|
+
this.loader = loader;
|
1494
|
+
this.gateway = gateway;
|
1495
|
+
}
|
1496
|
+
async ready() {
|
1497
|
+
return this._ready.once(async () => {
|
1498
|
+
const walState = await this.load().catch((e) => {
|
1499
|
+
this.logger.Error().Any("error", e).Msg("error loading wal");
|
1500
|
+
return void 0;
|
1501
|
+
});
|
1502
|
+
if (!walState) {
|
1503
|
+
this.walState.operations = [];
|
1504
|
+
this.walState.fileOperations = [];
|
1505
|
+
} else {
|
1506
|
+
this.walState.operations = walState.operations || [];
|
1507
|
+
this.walState.fileOperations = walState.fileOperations || [];
|
1508
|
+
}
|
1509
|
+
});
|
1510
|
+
}
|
1511
|
+
async enqueue(dbMeta, opts) {
|
1512
|
+
await this.ready();
|
1513
|
+
if (opts.noLoader) {
|
1514
|
+
this.walState.noLoaderOps.push(dbMeta);
|
1515
|
+
} else {
|
1516
|
+
this.walState.operations.push(dbMeta);
|
1517
|
+
}
|
1518
|
+
await this.save(this.walState);
|
1519
|
+
void this._process();
|
1520
|
+
}
|
1521
|
+
async enqueueFile(fileCid, publicFile = false) {
|
1522
|
+
await this.ready();
|
1523
|
+
this.walState.fileOperations.push({ cid: fileCid, public: publicFile });
|
1524
|
+
}
|
1525
|
+
async _process() {
|
1526
|
+
await this.ready();
|
1527
|
+
if (!this.loader.remoteCarStore) return;
|
1528
|
+
await this.processQueue.enqueue(async () => {
|
1529
|
+
await this._doProcess();
|
1530
|
+
if (this.walState.operations.length || this.walState.fileOperations.length || this.walState.noLoaderOps.length) {
|
1531
|
+
setTimeout(() => void this._process(), 0);
|
1532
|
+
}
|
1533
|
+
});
|
1534
|
+
}
|
1535
|
+
async _doProcess() {
|
1536
|
+
if (!this.loader.remoteCarStore) return;
|
1537
|
+
const rmlp = (async () => {
|
1538
|
+
const operations = [...this.walState.operations];
|
1539
|
+
const fileOperations = [...this.walState.fileOperations];
|
1540
|
+
const uploads = [];
|
1541
|
+
const noLoaderOps = [...this.walState.noLoaderOps];
|
1542
|
+
const limit = pLimit2(5);
|
1543
|
+
if (operations.length + fileOperations.length + noLoaderOps.length === 0) return;
|
1544
|
+
for (const dbMeta of noLoaderOps) {
|
1545
|
+
const uploadP = limit(async () => {
|
1546
|
+
for (const cid of dbMeta.cars) {
|
1547
|
+
const car = await (await this.loader.carStore()).load(cid);
|
1548
|
+
if (!car) {
|
1549
|
+
if (carLogIncludesGroup2(this.loader.carLog, dbMeta.cars))
|
1550
|
+
throw this.logger.Error().Ref("cid", cid).Msg("missing local car").AsError();
|
1551
|
+
} else {
|
1552
|
+
await throwFalsy(this.loader.remoteCarStore).save(car);
|
1553
|
+
}
|
1554
|
+
this.walState.noLoaderOps = this.walState.noLoaderOps.filter((op) => op !== dbMeta);
|
1555
|
+
}
|
1556
|
+
});
|
1557
|
+
uploads.push(uploadP);
|
1558
|
+
}
|
1559
|
+
for (const dbMeta of operations) {
|
1560
|
+
const uploadP = limit(async () => {
|
1561
|
+
for (const cid of dbMeta.cars) {
|
1562
|
+
const car = await (await this.loader.carStore()).load(cid).catch(() => null);
|
1563
|
+
if (!car) {
|
1564
|
+
if (carLogIncludesGroup2(this.loader.carLog, dbMeta.cars))
|
1565
|
+
throw this.logger.Error().Ref("cid", cid).Msg(`missing local car`).AsError();
|
1566
|
+
} else {
|
1567
|
+
await throwFalsy(this.loader.remoteCarStore).save(car);
|
1568
|
+
}
|
1569
|
+
}
|
1570
|
+
this.walState.operations = this.walState.operations.filter((op) => op !== dbMeta);
|
1571
|
+
});
|
1572
|
+
uploads.push(uploadP);
|
1573
|
+
}
|
1574
|
+
if (fileOperations.length) {
|
1575
|
+
const dbLoader = this.loader;
|
1576
|
+
for (const { cid: fileCid, public: publicFile } of fileOperations) {
|
1577
|
+
const uploadP = limit(async () => {
|
1578
|
+
const fileBlock = await (await dbLoader.fileStore()).load(fileCid);
|
1579
|
+
await dbLoader.remoteFileStore?.save(fileBlock, { public: publicFile });
|
1580
|
+
this.walState.fileOperations = this.walState.fileOperations.filter((op) => op.cid !== fileCid);
|
1581
|
+
});
|
1582
|
+
uploads.push(uploadP);
|
1583
|
+
}
|
1584
|
+
}
|
1585
|
+
try {
|
1586
|
+
const res = await Promise.allSettled(uploads);
|
1587
|
+
const errors = res.filter((r) => r.status === "rejected");
|
1588
|
+
if (errors.length) {
|
1589
|
+
throw this.logger.Error().Any(
|
1590
|
+
"errors",
|
1591
|
+
errors.map((e) => e.reason)
|
1592
|
+
).Msg("error uploading").AsError();
|
1593
|
+
errors[0].reason;
|
1594
|
+
}
|
1595
|
+
if (operations.length) {
|
1596
|
+
const lastOp = operations[operations.length - 1];
|
1597
|
+
await this.loader.remoteMetaStore?.save(lastOp).catch((e) => {
|
1598
|
+
this.walState.operations.push(lastOp);
|
1599
|
+
throw this.logger.Error().Any("error", e).Msg("error saving remote meta").AsError();
|
1600
|
+
});
|
1601
|
+
}
|
1602
|
+
} finally {
|
1603
|
+
await this.save(this.walState);
|
1604
|
+
}
|
1605
|
+
})();
|
1606
|
+
await rmlp;
|
1607
|
+
}
|
1608
|
+
async start() {
|
1609
|
+
const res = await this.gateway.start(this.url);
|
1610
|
+
if (res.isErr()) {
|
1611
|
+
return res;
|
1612
|
+
}
|
1613
|
+
const ver = guardVersion(this.url);
|
1614
|
+
if (ver.isErr()) {
|
1615
|
+
await this.close();
|
1616
|
+
return ver;
|
1617
|
+
}
|
1618
|
+
const ready = await exception2Result(() => this.ready());
|
1619
|
+
this._onStarted.forEach((fn) => fn());
|
1620
|
+
if (ready.isErr()) {
|
1621
|
+
await this.close();
|
1622
|
+
return ready;
|
1623
|
+
}
|
1624
|
+
return ready;
|
1625
|
+
}
|
1626
|
+
async load() {
|
1627
|
+
this.logger.Debug().Msg("loading");
|
1628
|
+
const filepath = await this.gateway.buildUrl(this.url, "main");
|
1629
|
+
if (filepath.isErr()) {
|
1630
|
+
throw this.logger.Error().Err(filepath.Err()).Str("url", this.url.toString()).Msg("error building url").AsError();
|
1631
|
+
}
|
1632
|
+
const bytes = await this.gateway.get(filepath.Ok());
|
1633
|
+
if (bytes.isErr()) {
|
1634
|
+
if (isNotFoundError(bytes)) {
|
1635
|
+
return void 0;
|
1636
|
+
}
|
1637
|
+
throw this.logger.Error().Err(bytes.Err()).Msg("error get").AsError();
|
1638
|
+
}
|
1639
|
+
try {
|
1640
|
+
return bytes && parse(textDecoder.decode(bytes.Ok()));
|
1641
|
+
} catch (e) {
|
1642
|
+
throw this.logger.Error().Err(e).Msg("error parse").AsError();
|
1643
|
+
}
|
1644
|
+
}
|
1645
|
+
async save(state) {
|
1646
|
+
const filepath = await this.gateway.buildUrl(this.url, "main");
|
1647
|
+
if (filepath.isErr()) {
|
1648
|
+
throw this.logger.Error().Err(filepath.Err()).Str("url", this.url.toString()).Msg("error building url").AsError();
|
1649
|
+
}
|
1650
|
+
let encoded;
|
1651
|
+
try {
|
1652
|
+
encoded = format(state);
|
1653
|
+
} catch (e) {
|
1654
|
+
throw this.logger.Error().Err(e).Any("state", state).Msg("error format").AsError();
|
1655
|
+
}
|
1656
|
+
const res = await this.gateway.put(filepath.Ok(), textEncoder.encode(encoded));
|
1657
|
+
if (res.isErr()) {
|
1658
|
+
throw this.logger.Error().Err(res.Err()).Str("filePath", filepath.Ok().toString()).Msg("error saving").AsError();
|
1659
|
+
}
|
1660
|
+
}
|
1661
|
+
async close() {
|
1662
|
+
await this.gateway.close(this.url);
|
1663
|
+
this._onClosed.forEach((fn) => fn());
|
1664
|
+
return Result.Ok(void 0);
|
1665
|
+
}
|
1666
|
+
destroy() {
|
1667
|
+
return this.gateway.destroy(this.url);
|
1668
|
+
}
|
1669
|
+
};
|
1670
|
+
|
1671
|
+
// src/blockstore/store-factory.ts
|
1672
|
+
function ensureIsIndex(url, isIndex) {
|
1673
|
+
if (isIndex) {
|
1674
|
+
url.searchParams.set("index", isIndex);
|
1675
|
+
return url;
|
1676
|
+
} else {
|
1677
|
+
url.searchParams.delete("index");
|
1678
|
+
return url;
|
1679
|
+
}
|
1680
|
+
}
|
1681
|
+
function toURL(pathOrUrl, isIndex) {
|
1682
|
+
if (pathOrUrl instanceof URL) return ensureIsIndex(pathOrUrl, isIndex);
|
1683
|
+
try {
|
1684
|
+
const url = new URL(pathOrUrl);
|
1685
|
+
return ensureIsIndex(url, isIndex);
|
1686
|
+
} catch (e) {
|
1687
|
+
const url = new URL(`file://${pathOrUrl}`);
|
1688
|
+
return ensureIsIndex(url, isIndex);
|
1689
|
+
}
|
1690
|
+
}
|
1691
|
+
function buildURL(optURL, loader) {
|
1692
|
+
const storeOpts = loader.ebOpts.store;
|
1693
|
+
return toURL(optURL || dataDir(loader.name, storeOpts.stores?.base), storeOpts.isIndex);
|
1694
|
+
}
|
1695
|
+
var storeFactory = /* @__PURE__ */ new Map();
|
1696
|
+
function registerStoreProtocol(item) {
|
1697
|
+
if (storeFactory.has(item.protocol)) {
|
1698
|
+
throw new Error(`protocol ${item.protocol} already registered`);
|
1699
|
+
}
|
1700
|
+
storeFactory.set(item.protocol, item);
|
1701
|
+
}
|
1702
|
+
function runStoreFactory(url, logger, run) {
|
1703
|
+
const item = storeFactory.get(url.protocol);
|
1704
|
+
if (!item) {
|
1705
|
+
throw logger.Error().Url(url).Str("protocol", url.protocol).Any("keys", Array(storeFactory.keys())).Msg(`unsupported protocol`).AsError();
|
1706
|
+
}
|
1707
|
+
logger.Debug().Str("protocol", url.protocol).Msg("run");
|
1708
|
+
return run(item);
|
1709
|
+
}
|
1710
|
+
var onceLoadDataGateway = new KeyedResolvOnce();
|
1711
|
+
function loadDataGateway(url, logger) {
|
1712
|
+
return onceLoadDataGateway.get(url.protocol).once(async () => {
|
1713
|
+
return runStoreFactory(url, logger, async (item) => item.data(logger));
|
1714
|
+
});
|
1715
|
+
}
|
1716
|
+
var onceDataStoreFactory = new KeyedResolvOnce();
|
1717
|
+
async function dataStoreFactory(loader) {
|
1718
|
+
const url = buildURL(loader.ebOpts.store.stores?.data, loader);
|
1719
|
+
const logger = ensureLogger(loader.logger, "dataStoreFactory", { url: url.toString() });
|
1720
|
+
url.searchParams.set("store", "data");
|
1721
|
+
return onceDataStoreFactory.get(url.toString()).once(async () => {
|
1722
|
+
const gateway = await loadDataGateway(url, logger);
|
1723
|
+
const store = new DataStore(loader.name, url, loader.logger, gateway);
|
1724
|
+
await store.start();
|
1725
|
+
logger.Debug().Str("prepared", store.url.toString()).Msg("produced");
|
1726
|
+
return store;
|
1727
|
+
});
|
1728
|
+
}
|
1729
|
+
var onceLoadMetaGateway = new KeyedResolvOnce();
|
1730
|
+
function loadMetaGateway(url, logger) {
|
1731
|
+
return onceLoadMetaGateway.get(url.protocol).once(async () => {
|
1732
|
+
return runStoreFactory(url, logger, async (item) => item.meta(logger));
|
1733
|
+
});
|
1734
|
+
}
|
1735
|
+
var onceMetaStoreFactory = new KeyedResolvOnce();
|
1736
|
+
async function metaStoreFactory(loader) {
|
1737
|
+
const url = buildURL(loader.ebOpts.store.stores?.meta, loader);
|
1738
|
+
const logger = ensureLogger(loader.logger, "metaStoreFactory", { url: () => url.toString() });
|
1739
|
+
url.searchParams.set("store", "meta");
|
1740
|
+
return onceMetaStoreFactory.get(url.toString()).once(async () => {
|
1741
|
+
logger.Debug().Str("protocol", url.protocol).Msg("pre-protocol switch");
|
1742
|
+
const gateway = await loadMetaGateway(url, logger);
|
1743
|
+
const store = new MetaStore(loader.name, url, loader.logger, gateway);
|
1744
|
+
logger.Debug().Msg("pre-start");
|
1745
|
+
await store.start();
|
1746
|
+
logger.Debug().Msg("post-start");
|
1747
|
+
return store;
|
1748
|
+
});
|
1749
|
+
}
|
1750
|
+
var onceWalGateway = new KeyedResolvOnce();
|
1751
|
+
function loadWalGateway(url, logger) {
|
1752
|
+
return onceWalGateway.get(url.protocol).once(async () => {
|
1753
|
+
return runStoreFactory(url, logger, async (item) => item.wal(logger));
|
1754
|
+
});
|
1755
|
+
}
|
1756
|
+
var onceRemoteWalFactory = new KeyedResolvOnce();
|
1757
|
+
async function remoteWalFactory(loader) {
|
1758
|
+
const url = buildURL(loader.ebOpts.store.stores?.meta, loader);
|
1759
|
+
const logger = ensureLogger(loader.logger, "remoteWalFactory", { url: url.toString() });
|
1760
|
+
url.searchParams.set("store", "wal");
|
1761
|
+
return onceRemoteWalFactory.get(url.toString()).once(async () => {
|
1762
|
+
const gateway = await loadWalGateway(url, logger);
|
1763
|
+
logger.Debug().Str("prepared", url.toString()).Msg("produced");
|
1764
|
+
const store = new RemoteWAL(loader, url, loader.logger, gateway);
|
1765
|
+
await store.start();
|
1766
|
+
return store;
|
1767
|
+
});
|
1768
|
+
}
|
1769
|
+
async function testStoreFactory(url, ilogger) {
|
1770
|
+
const logger = ensureLogger(
|
1771
|
+
{
|
1772
|
+
logger: ilogger
|
1773
|
+
},
|
1774
|
+
"testStoreFactory"
|
1775
|
+
);
|
1776
|
+
return runStoreFactory(url, logger, async (item) => item.test(logger));
|
1777
|
+
}
|
1778
|
+
function toStoreRuntime(opts, ilogger) {
|
1779
|
+
const logger = ensureLogger(ilogger, "toStoreRuntime", {});
|
1780
|
+
return {
|
1781
|
+
makeMetaStore: (loader) => {
|
1782
|
+
logger.Debug().Str("fromOpts", "" + !!loader.ebOpts.store.makeMetaStore).Msg("makeMetaStore");
|
1783
|
+
return (loader.ebOpts.store.makeMetaStore || metaStoreFactory)(loader);
|
1784
|
+
},
|
1785
|
+
makeDataStore: (loader) => {
|
1786
|
+
logger.Debug().Str("fromOpts", "" + !!loader.ebOpts.store.makeDataStore).Msg("makeDataStore");
|
1787
|
+
return (loader.ebOpts.store.makeDataStore || dataStoreFactory)(loader);
|
1788
|
+
},
|
1789
|
+
makeRemoteWAL: (loader) => {
|
1790
|
+
logger.Debug().Str("fromOpts", "" + !!loader.ebOpts.store.makeRemoteWAL).Msg("makeRemoteWAL");
|
1791
|
+
return (loader.ebOpts.store.makeRemoteWAL || remoteWalFactory)(loader);
|
1792
|
+
},
|
1793
|
+
encodeFile: opts.encodeFile || encodeFile,
|
1794
|
+
decodeFile: opts.decodeFile || decodeFile
|
1795
|
+
};
|
1796
|
+
}
|
1797
|
+
registerStoreProtocol({
|
1798
|
+
protocol: "file:",
|
1799
|
+
data: async (logger) => {
|
1800
|
+
const { FileDataGateway } = await import("./store-file-HMHPQTUV.js");
|
1801
|
+
return new FileDataGateway(logger);
|
1802
|
+
},
|
1803
|
+
meta: async (logger) => {
|
1804
|
+
const { FileMetaGateway } = await import("./store-file-HMHPQTUV.js");
|
1805
|
+
return new FileMetaGateway(logger);
|
1806
|
+
},
|
1807
|
+
wal: async (logger) => {
|
1808
|
+
const { FileWALGateway } = await import("./store-file-HMHPQTUV.js");
|
1809
|
+
return new FileWALGateway(logger);
|
1810
|
+
},
|
1811
|
+
test: async (logger) => {
|
1812
|
+
const { FileTestStore } = await import("./store-file-HMHPQTUV.js");
|
1813
|
+
return new FileTestStore(logger);
|
1814
|
+
}
|
1815
|
+
});
|
1816
|
+
registerStoreProtocol({
|
1817
|
+
protocol: "indexdb:",
|
1818
|
+
data: async (logger) => {
|
1819
|
+
const { IndexDBDataGateway } = await import("./store-indexdb-MRVZG4OG.js");
|
1820
|
+
return new IndexDBDataGateway(logger);
|
1821
|
+
},
|
1822
|
+
meta: async (logger) => {
|
1823
|
+
const { IndexDBMetaGateway } = await import("./store-indexdb-MRVZG4OG.js");
|
1824
|
+
return new IndexDBMetaGateway(logger);
|
1825
|
+
},
|
1826
|
+
wal: async (logger) => {
|
1827
|
+
const { IndexDBMetaGateway } = await import("./store-indexdb-MRVZG4OG.js");
|
1828
|
+
return new IndexDBMetaGateway(logger);
|
1829
|
+
},
|
1830
|
+
test: async (logger) => {
|
1831
|
+
const { IndexDBTestStore } = await import("./store-indexdb-MRVZG4OG.js");
|
1832
|
+
return new IndexDBTestStore(logger);
|
1833
|
+
}
|
1834
|
+
});
|
1835
|
+
registerStoreProtocol({
|
1836
|
+
protocol: "sqlite:",
|
1837
|
+
data: async (logger) => {
|
1838
|
+
const { SQLDataGateway } = await import("./store-sql-5XMJ5OWJ.js");
|
1839
|
+
return new SQLDataGateway(logger);
|
1840
|
+
},
|
1841
|
+
meta: async (logger) => {
|
1842
|
+
const { SQLMetaGateway } = await import("./store-sql-5XMJ5OWJ.js");
|
1843
|
+
return new SQLMetaGateway(logger);
|
1844
|
+
},
|
1845
|
+
wal: async (logger) => {
|
1846
|
+
const { SQLWalGateway } = await import("./store-sql-5XMJ5OWJ.js");
|
1847
|
+
return new SQLWalGateway(logger);
|
1848
|
+
},
|
1849
|
+
test: async (logger) => {
|
1850
|
+
const { SQLTestStore } = await import("./store-sql-5XMJ5OWJ.js");
|
1851
|
+
return new SQLTestStore(logger);
|
1852
|
+
}
|
1853
|
+
});
|
1854
|
+
|
1855
|
+
// src/crdt-helpers.ts
|
1856
|
+
function time(tag) {
|
1857
|
+
}
|
1858
|
+
function timeEnd(tag) {
|
1859
|
+
}
|
1860
|
+
function toString(key, logger) {
|
1861
|
+
switch (typeof key) {
|
1862
|
+
case "string":
|
1863
|
+
case "number":
|
1864
|
+
return key.toString();
|
1865
|
+
default:
|
1866
|
+
throw logger.Error().Msg("Invalid key type").AsError();
|
1867
|
+
}
|
1868
|
+
}
|
1869
|
+
async function applyBulkUpdateToCrdt(store, tblocks, head, updates, logger) {
|
1870
|
+
let result = null;
|
1871
|
+
if (updates.length > 1) {
|
1872
|
+
const batch = await Batch.create(tblocks, head);
|
1873
|
+
for (const update of updates) {
|
1874
|
+
const link = await writeDocContent(store, tblocks, update, logger);
|
1875
|
+
await batch.put(toString(update.id, logger), link);
|
1876
|
+
}
|
1877
|
+
result = await batch.commit();
|
1878
|
+
} else if (updates.length === 1) {
|
1879
|
+
const link = await writeDocContent(store, tblocks, updates[0], logger);
|
1880
|
+
result = await put(tblocks, head, toString(updates[0].id, logger), link);
|
1881
|
+
}
|
1882
|
+
if (!result) throw logger.Error().Uint64("updates.len", updates.length).Msg("Missing result").AsError();
|
1883
|
+
if (result.event) {
|
1884
|
+
for (const { cid, bytes } of [
|
1885
|
+
...result.additions,
|
1886
|
+
// ...result.removals,
|
1887
|
+
result.event
|
1888
|
+
]) {
|
1889
|
+
tblocks.putSync(cid, bytes);
|
1890
|
+
}
|
1891
|
+
}
|
1892
|
+
return { head: result.head };
|
1893
|
+
}
|
1894
|
+
async function writeDocContent(store, blocks, update, logger) {
|
1895
|
+
let value;
|
1896
|
+
if (update.del) {
|
1897
|
+
value = { del: true };
|
1898
|
+
} else {
|
1899
|
+
if (!update.value) throw logger.Error().Msg("Missing value").AsError();
|
1900
|
+
await processFiles(store, blocks, update.value, logger);
|
1901
|
+
value = { doc: update.value };
|
1902
|
+
}
|
1903
|
+
const block = await encode3({ value, hasher: hasher2, codec: codec2 });
|
1904
|
+
blocks.putSync(block.cid, block.bytes);
|
1905
|
+
return block.cid;
|
1906
|
+
}
|
1907
|
+
async function processFiles(store, blocks, doc, logger) {
|
1908
|
+
if (doc._files) {
|
1909
|
+
await processFileset(logger, store, blocks, doc._files);
|
1910
|
+
}
|
1911
|
+
if (doc._publicFiles) {
|
1912
|
+
await processFileset(logger, store, blocks, doc._publicFiles, true);
|
1913
|
+
}
|
1914
|
+
}
|
1915
|
+
async function processFileset(logger, store, blocks, files, publicFiles = false) {
|
1916
|
+
const dbBlockstore = blocks.parent;
|
1917
|
+
if (!dbBlockstore.loader) throw logger.Error().Msg("Missing loader, database name is required").AsError();
|
1918
|
+
const t = new CarTransaction(dbBlockstore);
|
1919
|
+
const didPut = [];
|
1920
|
+
for (const filename in files) {
|
1921
|
+
if (File === files[filename].constructor) {
|
1922
|
+
const file = files[filename];
|
1923
|
+
const { cid, blocks: fileBlocks } = await store.encodeFile(file);
|
1924
|
+
didPut.push(filename);
|
1925
|
+
for (const block of fileBlocks) {
|
1926
|
+
t.putSync(block.cid, block.bytes);
|
1927
|
+
}
|
1928
|
+
files[filename] = { cid, type: file.type, size: file.size };
|
1929
|
+
} else {
|
1930
|
+
const { cid, type, size, car } = files[filename];
|
1931
|
+
if (cid && type && size && car) {
|
1932
|
+
files[filename] = { cid, type, size, car };
|
1933
|
+
}
|
1934
|
+
}
|
1935
|
+
}
|
1936
|
+
if (didPut.length) {
|
1937
|
+
const car = await dbBlockstore.loader.commitFiles(t, { files }, {
|
1938
|
+
public: publicFiles
|
1939
|
+
});
|
1940
|
+
if (car) {
|
1941
|
+
for (const name of didPut) {
|
1942
|
+
files[name] = { car, ...files[name] };
|
1943
|
+
}
|
1944
|
+
}
|
1945
|
+
}
|
1946
|
+
}
|
1947
|
+
async function getValueFromCrdt(blocks, head, key, logger) {
|
1948
|
+
if (!head.length) throw logger.Debug().Msg("Getting from an empty database").AsError();
|
1949
|
+
const link = await get(blocks, head, key);
|
1950
|
+
if (!link) throw logger.Error().Str("key", key).Msg(`Missing key`).AsError();
|
1951
|
+
return await getValueFromLink(blocks, link, logger);
|
1952
|
+
}
|
1953
|
+
function readFiles(blocks, { doc }) {
|
1954
|
+
if (!doc) return;
|
1955
|
+
if (doc._files) {
|
1956
|
+
readFileset(blocks, doc._files);
|
1957
|
+
}
|
1958
|
+
if (doc._publicFiles) {
|
1959
|
+
readFileset(blocks, doc._publicFiles, true);
|
1960
|
+
}
|
1961
|
+
}
|
1962
|
+
function readFileset(blocks, files, isPublic = false) {
|
1963
|
+
for (const filename in files) {
|
1964
|
+
const fileMeta = files[filename];
|
1965
|
+
if (fileMeta.cid) {
|
1966
|
+
if (isPublic) {
|
1967
|
+
fileMeta.url = `https://${fileMeta.cid.toString()}.ipfs.w3s.link/`;
|
1968
|
+
}
|
1969
|
+
if (fileMeta.car) {
|
1970
|
+
fileMeta.file = async () => await blocks.ebOpts.storeRuntime.decodeFile(
|
1971
|
+
{
|
1972
|
+
get: async (cid) => {
|
1973
|
+
return await blocks.getFile(throwFalsy(fileMeta.car), cid, isPublic);
|
1974
|
+
}
|
1975
|
+
},
|
1976
|
+
fileMeta.cid,
|
1977
|
+
fileMeta
|
1978
|
+
);
|
1979
|
+
}
|
1980
|
+
}
|
1981
|
+
files[filename] = fileMeta;
|
1982
|
+
}
|
1983
|
+
}
|
1984
|
+
async function getValueFromLink(blocks, link, logger) {
|
1985
|
+
const block = await blocks.get(link);
|
1986
|
+
if (!block) throw logger.Error().Str("link", link.toString()).Msg(`Missing linked block`).AsError();
|
1987
|
+
const { value } = await decode3({ bytes: block.bytes, hasher: hasher2, codec: codec2 });
|
1988
|
+
const cvalue = {
|
1989
|
+
...value,
|
1990
|
+
cid: link
|
1991
|
+
};
|
1992
|
+
readFiles(blocks, cvalue);
|
1993
|
+
return cvalue;
|
1994
|
+
}
|
1995
|
+
var DirtyEventFetcher = class extends EventFetcher {
|
1996
|
+
async get(link) {
|
1997
|
+
try {
|
1998
|
+
return super.get(link);
|
1999
|
+
} catch (e) {
|
2000
|
+
console.error("missing event", link.toString(), e);
|
2001
|
+
return { value: void 0 };
|
2002
|
+
}
|
2003
|
+
}
|
2004
|
+
};
|
2005
|
+
async function clockChangesSince(blocks, head, since, opts, logger) {
|
2006
|
+
const eventsFetcher = opts.dirty ? new DirtyEventFetcher(blocks) : new EventFetcher(blocks);
|
2007
|
+
const keys = /* @__PURE__ */ new Set();
|
2008
|
+
const updates = await gatherUpdates(
|
2009
|
+
blocks,
|
2010
|
+
eventsFetcher,
|
2011
|
+
head,
|
2012
|
+
since,
|
2013
|
+
[],
|
2014
|
+
keys,
|
2015
|
+
/* @__PURE__ */ new Set(),
|
2016
|
+
opts.limit || Infinity,
|
2017
|
+
logger
|
2018
|
+
);
|
2019
|
+
return { result: updates.reverse(), head };
|
2020
|
+
}
|
2021
|
+
async function gatherUpdates(blocks, eventsFetcher, head, since, updates = [], keys, didLinks, limit, logger) {
|
2022
|
+
if (limit <= 0) return updates;
|
2023
|
+
const sHead = head.map((l) => l.toString());
|
2024
|
+
for (const link of since) {
|
2025
|
+
if (sHead.includes(link.toString())) {
|
2026
|
+
return updates;
|
2027
|
+
}
|
2028
|
+
}
|
2029
|
+
for (const link of head) {
|
2030
|
+
if (didLinks.has(link.toString())) continue;
|
2031
|
+
didLinks.add(link.toString());
|
2032
|
+
const { value: event } = await eventsFetcher.get(link);
|
2033
|
+
if (!event) continue;
|
2034
|
+
const { type } = event.data;
|
2035
|
+
let ops = [];
|
2036
|
+
if (type === "batch") {
|
2037
|
+
ops = event.data.ops;
|
2038
|
+
} else if (type === "put") {
|
2039
|
+
ops = [event.data];
|
2040
|
+
}
|
2041
|
+
for (let i = ops.length - 1; i >= 0; i--) {
|
2042
|
+
const { key, value } = ops[i];
|
2043
|
+
if (!keys.has(key)) {
|
2044
|
+
const docValue = await getValueFromLink(blocks, value, logger);
|
2045
|
+
updates.push({ id: key, value: docValue.doc, del: docValue.del, clock: link });
|
2046
|
+
limit--;
|
2047
|
+
keys.add(key);
|
2048
|
+
}
|
2049
|
+
}
|
2050
|
+
if (event.parents) {
|
2051
|
+
updates = await gatherUpdates(blocks, eventsFetcher, event.parents, since, updates, keys, didLinks, limit, logger);
|
2052
|
+
}
|
2053
|
+
}
|
2054
|
+
return updates;
|
2055
|
+
}
|
2056
|
+
async function* getAllEntries(blocks, head, logger) {
|
2057
|
+
for await (const [key, link] of entries(blocks, head)) {
|
2058
|
+
const docValue = await getValueFromLink(blocks, link, logger);
|
2059
|
+
yield { id: key, value: docValue.doc, del: docValue.del };
|
2060
|
+
}
|
2061
|
+
}
|
2062
|
+
async function* clockVis(blocks, head) {
|
2063
|
+
for await (const line of vis(blocks, head)) {
|
2064
|
+
yield line;
|
2065
|
+
}
|
2066
|
+
}
|
2067
|
+
var isCompacting = false;
|
2068
|
+
async function doCompact(blockLog, head, logger) {
|
2069
|
+
if (isCompacting) {
|
2070
|
+
return;
|
2071
|
+
}
|
2072
|
+
isCompacting = true;
|
2073
|
+
time("compact head");
|
2074
|
+
for (const cid of head) {
|
2075
|
+
const bl = await blockLog.get(cid);
|
2076
|
+
if (!bl) throw logger.Error().Ref("cid", cid).Msg("Missing head block").AsError();
|
2077
|
+
}
|
2078
|
+
timeEnd("compact head");
|
2079
|
+
time("compact all entries");
|
2080
|
+
for await (const _entry of getAllEntries(blockLog, head, logger)) {
|
2081
|
+
continue;
|
2082
|
+
}
|
2083
|
+
timeEnd("compact all entries");
|
2084
|
+
time("compact clock vis");
|
2085
|
+
for await (const _line of vis(blockLog, head)) {
|
2086
|
+
}
|
2087
|
+
timeEnd("compact clock vis");
|
2088
|
+
time("compact root");
|
2089
|
+
const result = await root(blockLog, head);
|
2090
|
+
timeEnd("compact root");
|
2091
|
+
time("compact root blocks");
|
2092
|
+
for (const { cid, bytes } of [...result.additions, ...result.removals]) {
|
2093
|
+
blockLog.loggedBlocks.putSync(cid, bytes);
|
2094
|
+
}
|
2095
|
+
timeEnd("compact root blocks");
|
2096
|
+
time("compact changes");
|
2097
|
+
await clockChangesSince(blockLog, head, [], {}, logger);
|
2098
|
+
timeEnd("compact changes");
|
2099
|
+
isCompacting = false;
|
2100
|
+
}
|
2101
|
+
async function getBlock(blocks, cidString) {
|
2102
|
+
const block = await blocks.get(parse2(cidString));
|
2103
|
+
if (!block) throw new Error(`Missing block ${cidString}`);
|
2104
|
+
const { cid, value } = await decode3({ bytes: block.bytes, codec: codec2, hasher: hasher2 });
|
2105
|
+
return new Block2({ cid, value, bytes: block.bytes });
|
2106
|
+
}
|
2107
|
+
|
2108
|
+
// src/indexer-helpers.ts
|
2109
|
+
import { create as create3 } from "multiformats/block";
|
2110
|
+
import { sha256 as hasher3 } from "multiformats/hashes/sha2";
|
2111
|
+
import * as codec3 from "@ipld/dag-cbor";
|
2112
|
+
import charwise from "charwise";
|
2113
|
+
import * as DbIndex from "prolly-trees/db-index";
|
2114
|
+
import { bf as bf2, simpleCompare } from "prolly-trees/utils";
|
2115
|
+
import { nocache as cache2 } from "prolly-trees/cache";
|
2116
|
+
var IndexTree = class {
|
2117
|
+
};
|
2118
|
+
function refCompare(aRef, bRef) {
|
2119
|
+
if (Number.isNaN(aRef)) return -1;
|
2120
|
+
if (Number.isNaN(bRef)) throw new Error("ref may not be Infinity or NaN");
|
2121
|
+
if (aRef === Infinity) return 1;
|
2122
|
+
return simpleCompare(aRef, bRef);
|
2123
|
+
}
|
2124
|
+
function compare(a, b) {
|
2125
|
+
const [aKey, aRef] = a;
|
2126
|
+
const [bKey, bRef] = b;
|
2127
|
+
const comp = simpleCompare(aKey, bKey);
|
2128
|
+
if (comp !== 0) return comp;
|
2129
|
+
return refCompare(aRef, bRef);
|
2130
|
+
}
|
2131
|
+
var byKeyOpts = { cache: cache2, chunker: bf2(30), codec: codec3, hasher: hasher3, compare };
|
2132
|
+
var byIdOpts = { cache: cache2, chunker: bf2(30), codec: codec3, hasher: hasher3, compare: simpleCompare };
|
2133
|
+
function indexEntriesForChanges(changes, mapFn) {
|
2134
|
+
const indexEntries = [];
|
2135
|
+
changes.forEach(({ id: key, value, del }) => {
|
2136
|
+
if (del || !value) return;
|
2137
|
+
let mapCalled = false;
|
2138
|
+
const mapReturn = mapFn({ ...value, _id: key }, (k, v) => {
|
2139
|
+
mapCalled = true;
|
2140
|
+
if (typeof k === "undefined") return;
|
2141
|
+
indexEntries.push({
|
2142
|
+
key: [charwise.encode(k), key],
|
2143
|
+
value: v || null
|
2144
|
+
});
|
2145
|
+
});
|
2146
|
+
if (!mapCalled && mapReturn) {
|
2147
|
+
indexEntries.push({
|
2148
|
+
key: [charwise.encode(mapReturn), key],
|
2149
|
+
value: null
|
2150
|
+
});
|
2151
|
+
}
|
2152
|
+
});
|
2153
|
+
return indexEntries;
|
2154
|
+
}
|
2155
|
+
function makeProllyGetBlock(blocks) {
|
2156
|
+
return async (address) => {
|
2157
|
+
const block = await blocks.get(address);
|
2158
|
+
if (!block) throw new Error(`Missing block ${address.toString()}`);
|
2159
|
+
const { cid, bytes } = block;
|
2160
|
+
return create3({ cid, bytes, hasher: hasher3, codec: codec3 });
|
2161
|
+
};
|
2162
|
+
}
|
2163
|
+
async function bulkIndex(tblocks, inIndex, indexEntries, opts) {
|
2164
|
+
if (!indexEntries.length) return inIndex;
|
2165
|
+
if (!inIndex.root) {
|
2166
|
+
if (!inIndex.cid) {
|
2167
|
+
let returnRootBlock = void 0;
|
2168
|
+
let returnNode = void 0;
|
2169
|
+
for await (const node of await DbIndex.create({
|
2170
|
+
get: makeProllyGetBlock(tblocks),
|
2171
|
+
list: indexEntries,
|
2172
|
+
...opts
|
2173
|
+
})) {
|
2174
|
+
const block = await node.block;
|
2175
|
+
await tblocks.put(block.cid, block.bytes);
|
2176
|
+
returnRootBlock = block;
|
2177
|
+
returnNode = node;
|
2178
|
+
}
|
2179
|
+
if (!returnNode || !returnRootBlock) throw new Error("failed to create index");
|
2180
|
+
return { root: returnNode, cid: returnRootBlock.cid };
|
2181
|
+
} else {
|
2182
|
+
inIndex.root = await DbIndex.load({ cid: inIndex.cid, get: makeProllyGetBlock(tblocks), ...opts });
|
2183
|
+
}
|
2184
|
+
}
|
2185
|
+
const { root: root3, blocks: newBlocks } = await inIndex.root.bulk(indexEntries);
|
2186
|
+
if (root3) {
|
2187
|
+
for await (const block of newBlocks) {
|
2188
|
+
await tblocks.put(block.cid, block.bytes);
|
2189
|
+
}
|
2190
|
+
return { root: root3, cid: (await root3.block).cid };
|
2191
|
+
} else {
|
2192
|
+
return { root: void 0, cid: void 0 };
|
2193
|
+
}
|
2194
|
+
}
|
2195
|
+
async function loadIndex(tblocks, cid, opts) {
|
2196
|
+
return await DbIndex.load({ cid, get: makeProllyGetBlock(tblocks), ...opts });
|
2197
|
+
}
|
2198
|
+
async function applyQuery(crdt, resp, query) {
|
2199
|
+
if (query.descending) {
|
2200
|
+
resp.result = resp.result.reverse();
|
2201
|
+
}
|
2202
|
+
if (query.limit) {
|
2203
|
+
resp.result = resp.result.slice(0, query.limit);
|
2204
|
+
}
|
2205
|
+
if (query.includeDocs) {
|
2206
|
+
resp.result = await Promise.all(
|
2207
|
+
resp.result.map(async (row) => {
|
2208
|
+
const val = await crdt.get(row.id);
|
2209
|
+
const doc = val ? { ...val.doc, _id: row.id } : void 0;
|
2210
|
+
return { ...row, doc };
|
2211
|
+
})
|
2212
|
+
);
|
2213
|
+
}
|
2214
|
+
return {
|
2215
|
+
rows: resp.result.map(({ key, ...row }) => {
|
2216
|
+
return {
|
2217
|
+
key: charwise.decode(key),
|
2218
|
+
...row
|
2219
|
+
};
|
2220
|
+
})
|
2221
|
+
};
|
2222
|
+
}
|
2223
|
+
function encodeRange(range) {
|
2224
|
+
return [charwise.encode(range[0]), charwise.encode(range[1])];
|
2225
|
+
}
|
2226
|
+
function encodeKey(key) {
|
2227
|
+
return charwise.encode(key);
|
2228
|
+
}
|
2229
|
+
|
2230
|
+
// src/indexer.ts
|
2231
|
+
function index({ _crdt }, name, mapFn, meta) {
|
2232
|
+
if (mapFn && meta) throw _crdt.logger.Error().Msg("cannot provide both mapFn and meta").AsError();
|
2233
|
+
if (mapFn && mapFn.constructor.name !== "Function") throw _crdt.logger.Error().Msg("mapFn must be a function").AsError();
|
2234
|
+
if (_crdt.indexers.has(name)) {
|
2235
|
+
const idx = _crdt.indexers.get(name);
|
2236
|
+
idx.applyMapFn(name, mapFn, meta);
|
2237
|
+
} else {
|
2238
|
+
const idx = new Index(_crdt, name, mapFn, meta);
|
2239
|
+
_crdt.indexers.set(name, idx);
|
2240
|
+
}
|
2241
|
+
return _crdt.indexers.get(name);
|
2242
|
+
}
|
2243
|
+
var Index = class {
|
2244
|
+
constructor(crdt, name, mapFn, meta) {
|
2245
|
+
this.mapFnString = "";
|
2246
|
+
this.byKey = new IndexTree();
|
2247
|
+
this.byId = new IndexTree();
|
2248
|
+
this.includeDocsDefault = false;
|
2249
|
+
this.logger = ensureLogger(crdt.logger, "Index");
|
2250
|
+
this.blockstore = crdt.indexBlockstore;
|
2251
|
+
this.crdt = crdt;
|
2252
|
+
this.applyMapFn(name, mapFn, meta);
|
2253
|
+
this.name = name;
|
2254
|
+
if (!(this.mapFnString || this.initError)) throw this.logger.Error().Msg("missing mapFnString").AsError();
|
2255
|
+
}
|
2256
|
+
ready() {
|
2257
|
+
return Promise.all([this.blockstore.ready(), this.crdt.ready()]).then(() => {
|
2258
|
+
});
|
2259
|
+
}
|
2260
|
+
close() {
|
2261
|
+
return Promise.all([this.blockstore.close(), this.crdt.close()]).then(() => {
|
2262
|
+
});
|
2263
|
+
}
|
2264
|
+
destroy() {
|
2265
|
+
return Promise.all([this.blockstore.destroy(), this.crdt.destroy()]).then(() => {
|
2266
|
+
});
|
2267
|
+
}
|
2268
|
+
applyMapFn(name, mapFn, meta) {
|
2269
|
+
if (mapFn && meta) throw this.logger.Error().Msg("cannot provide both mapFn and meta").AsError();
|
2270
|
+
if (this.name && this.name !== name) throw this.logger.Error().Msg("cannot change name").AsError();
|
2271
|
+
this.name = name;
|
2272
|
+
try {
|
2273
|
+
if (meta) {
|
2274
|
+
if (this.indexHead && this.indexHead.map((c) => c.toString()).join() !== meta.head.map((c) => c.toString()).join()) {
|
2275
|
+
throw this.logger.Error().Msg("cannot apply different head meta").AsError();
|
2276
|
+
}
|
2277
|
+
if (this.mapFnString) {
|
2278
|
+
if (this.mapFnString !== meta.map) {
|
2279
|
+
this.logger.Warn().Msg(`cannot apply different mapFn meta: old mapFnString ${this.mapFnString} new mapFnString ${meta.map}`);
|
2280
|
+
} else {
|
2281
|
+
this.byId.cid = meta.byId;
|
2282
|
+
this.byKey.cid = meta.byKey;
|
2283
|
+
this.indexHead = meta.head;
|
2284
|
+
}
|
2285
|
+
} else {
|
2286
|
+
this.mapFnString = meta.map;
|
2287
|
+
this.byId.cid = meta.byId;
|
2288
|
+
this.byKey.cid = meta.byKey;
|
2289
|
+
this.indexHead = meta.head;
|
2290
|
+
}
|
2291
|
+
} else {
|
2292
|
+
if (this.mapFn) {
|
2293
|
+
if (mapFn) {
|
2294
|
+
if (this.mapFn.toString() !== mapFn.toString()) {
|
2295
|
+
throw this.logger.Error().Msg("cannot apply different mapFn app2").AsError();
|
2296
|
+
}
|
2297
|
+
}
|
2298
|
+
} else {
|
2299
|
+
if (!mapFn) {
|
2300
|
+
mapFn = (doc) => doc[name] ?? void 0;
|
2301
|
+
}
|
2302
|
+
if (this.mapFnString) {
|
2303
|
+
if (this.mapFnString !== mapFn.toString()) {
|
2304
|
+
throw this.logger.Error().Msg("cannot apply different mapFn app").AsError();
|
2305
|
+
}
|
2306
|
+
} else {
|
2307
|
+
this.mapFnString = mapFn.toString();
|
2308
|
+
}
|
2309
|
+
this.mapFn = mapFn;
|
2310
|
+
}
|
2311
|
+
}
|
2312
|
+
const matches = /=>\s*(.*)/.test(this.mapFnString);
|
2313
|
+
this.includeDocsDefault = matches;
|
2314
|
+
} catch (e) {
|
2315
|
+
this.initError = e;
|
2316
|
+
}
|
2317
|
+
}
|
2318
|
+
async query(opts = {}) {
|
2319
|
+
await this.ready();
|
2320
|
+
await this._updateIndex();
|
2321
|
+
await this._hydrateIndex();
|
2322
|
+
if (!this.byKey.root) {
|
2323
|
+
return await applyQuery(this.crdt, { result: [] }, opts);
|
2324
|
+
}
|
2325
|
+
if (this.includeDocsDefault && opts.includeDocs === void 0) opts.includeDocs = true;
|
2326
|
+
if (opts.range) {
|
2327
|
+
const eRange = encodeRange(opts.range);
|
2328
|
+
return await applyQuery(this.crdt, await throwFalsy(this.byKey.root).range(eRange[0], eRange[1]), opts);
|
2329
|
+
}
|
2330
|
+
if (opts.key) {
|
2331
|
+
const encodedKey = encodeKey(opts.key);
|
2332
|
+
return await applyQuery(this.crdt, await throwFalsy(this.byKey.root).get(encodedKey), opts);
|
2333
|
+
}
|
2334
|
+
if (Array.isArray(opts.keys)) {
|
2335
|
+
const results = await Promise.all(
|
2336
|
+
opts.keys.map(async (key) => {
|
2337
|
+
const encodedKey = encodeKey(key);
|
2338
|
+
return (await applyQuery(this.crdt, await throwFalsy(this.byKey.root).get(encodedKey), opts)).rows;
|
2339
|
+
})
|
2340
|
+
);
|
2341
|
+
return { rows: results.flat() };
|
2342
|
+
}
|
2343
|
+
if (opts.prefix) {
|
2344
|
+
if (!Array.isArray(opts.prefix)) opts.prefix = [opts.prefix];
|
2345
|
+
const start = [...opts.prefix, NaN];
|
2346
|
+
const end = [...opts.prefix, Infinity];
|
2347
|
+
const encodedR = encodeRange([start, end]);
|
2348
|
+
return await applyQuery(this.crdt, await this.byKey.root.range(...encodedR), opts);
|
2349
|
+
}
|
2350
|
+
const all = await this.byKey.root.getAllEntries();
|
2351
|
+
return await applyQuery(
|
2352
|
+
this.crdt,
|
2353
|
+
{
|
2354
|
+
// @ts-expect-error getAllEntries returns a different type than range
|
2355
|
+
result: all.result.map(({ key: [k, id], value }) => ({
|
2356
|
+
key: k,
|
2357
|
+
id,
|
2358
|
+
value
|
2359
|
+
}))
|
2360
|
+
},
|
2361
|
+
opts
|
2362
|
+
);
|
2363
|
+
}
|
2364
|
+
_resetIndex() {
|
2365
|
+
this.byId = new IndexTree();
|
2366
|
+
this.byKey = new IndexTree();
|
2367
|
+
this.indexHead = void 0;
|
2368
|
+
}
|
2369
|
+
async _hydrateIndex() {
|
2370
|
+
if (this.byId.root && this.byKey.root) return;
|
2371
|
+
if (!this.byId.cid || !this.byKey.cid) return;
|
2372
|
+
this.byId.root = await loadIndex(this.blockstore, this.byId.cid, byIdOpts);
|
2373
|
+
this.byKey.root = await loadIndex(this.blockstore, this.byKey.cid, byKeyOpts);
|
2374
|
+
}
|
2375
|
+
async _updateIndex() {
|
2376
|
+
await this.ready();
|
2377
|
+
if (this.initError) throw this.initError;
|
2378
|
+
if (!this.mapFn) throw this.logger.Error().Msg("No map function defined").AsError();
|
2379
|
+
let result, head;
|
2380
|
+
if (!this.indexHead || this.indexHead.length === 0) {
|
2381
|
+
({ result, head } = await this.crdt.allDocs());
|
2382
|
+
} else {
|
2383
|
+
({ result, head } = await this.crdt.changes(this.indexHead));
|
2384
|
+
}
|
2385
|
+
if (result.length === 0) {
|
2386
|
+
this.indexHead = head;
|
2387
|
+
}
|
2388
|
+
let staleKeyIndexEntries = [];
|
2389
|
+
let removeIdIndexEntries = [];
|
2390
|
+
if (this.byId.root) {
|
2391
|
+
const removeIds = result.map(({ id: key }) => key);
|
2392
|
+
const { result: oldChangeEntries } = await this.byId.root.getMany(removeIds);
|
2393
|
+
staleKeyIndexEntries = oldChangeEntries.map((key) => ({ key, del: true }));
|
2394
|
+
removeIdIndexEntries = oldChangeEntries.map((key) => ({ key: key[1], del: true }));
|
2395
|
+
}
|
2396
|
+
const indexEntries = indexEntriesForChanges(result, this.mapFn);
|
2397
|
+
const byIdIndexEntries = indexEntries.map(({ key }) => ({
|
2398
|
+
key: key[1],
|
2399
|
+
value: key
|
2400
|
+
}));
|
2401
|
+
const indexerMeta = { indexes: /* @__PURE__ */ new Map() };
|
2402
|
+
for (const [name, indexer] of this.crdt.indexers) {
|
2403
|
+
if (indexer.indexHead) {
|
2404
|
+
indexerMeta.indexes?.set(name, {
|
2405
|
+
byId: indexer.byId.cid,
|
2406
|
+
byKey: indexer.byKey.cid,
|
2407
|
+
head: indexer.indexHead,
|
2408
|
+
map: indexer.mapFnString,
|
2409
|
+
name: indexer.name
|
2410
|
+
});
|
2411
|
+
}
|
2412
|
+
}
|
2413
|
+
if (result.length === 0) {
|
2414
|
+
return indexerMeta;
|
2415
|
+
}
|
2416
|
+
const { meta } = await this.blockstore.transaction(async (tblocks) => {
|
2417
|
+
this.byId = await bulkIndex(tblocks, this.byId, removeIdIndexEntries.concat(byIdIndexEntries), byIdOpts);
|
2418
|
+
this.byKey = await bulkIndex(tblocks, this.byKey, staleKeyIndexEntries.concat(indexEntries), byKeyOpts);
|
2419
|
+
this.indexHead = head;
|
2420
|
+
if (this.byId.cid && this.byKey.cid) {
|
2421
|
+
const idxMeta = {
|
2422
|
+
byId: this.byId.cid,
|
2423
|
+
byKey: this.byKey.cid,
|
2424
|
+
head,
|
2425
|
+
map: this.mapFnString,
|
2426
|
+
name: this.name
|
2427
|
+
};
|
2428
|
+
indexerMeta.indexes?.set(this.name, idxMeta);
|
2429
|
+
}
|
2430
|
+
return indexerMeta;
|
2431
|
+
});
|
2432
|
+
return meta;
|
2433
|
+
}
|
2434
|
+
};
|
2435
|
+
|
2436
|
+
// src/crdt-clock.ts
|
2437
|
+
import { advance } from "@web3-storage/pail/clock";
|
2438
|
+
import { root as root2 } from "@web3-storage/pail/crdt";
|
2439
|
+
import { ResolveOnce as ResolveOnce3 } from "@adviser/cement";
|
2440
|
+
|
2441
|
+
// src/apply-head-queue.ts
|
2442
|
+
function applyHeadQueue(worker, logger) {
|
2443
|
+
const queue = [];
|
2444
|
+
let isProcessing = false;
|
2445
|
+
async function* process() {
|
2446
|
+
if (isProcessing || queue.length === 0) return;
|
2447
|
+
isProcessing = true;
|
2448
|
+
const allUpdates = [];
|
2449
|
+
try {
|
2450
|
+
while (queue.length > 0) {
|
2451
|
+
queue.sort((a, b) => b.updates ? 1 : -1);
|
2452
|
+
const task = queue.shift();
|
2453
|
+
if (!task) continue;
|
2454
|
+
await worker(task.newHead, task.prevHead, task.updates !== null).catch((e) => {
|
2455
|
+
throw logger.Error().Err(e).Msg("int_applyHead worker error").AsError();
|
2456
|
+
});
|
2457
|
+
if (task.updates) {
|
2458
|
+
allUpdates.push(...task.updates);
|
2459
|
+
}
|
2460
|
+
if (!queue.some((t) => t.updates) || task.updates) {
|
2461
|
+
const allTasksHaveUpdates = queue.every((task2) => task2.updates !== null);
|
2462
|
+
yield { updates: allUpdates, all: allTasksHaveUpdates };
|
2463
|
+
allUpdates.length = 0;
|
2464
|
+
}
|
2465
|
+
}
|
2466
|
+
} finally {
|
2467
|
+
isProcessing = false;
|
2468
|
+
const generator = process();
|
2469
|
+
let result = await generator.next();
|
2470
|
+
while (!result.done) {
|
2471
|
+
result = await generator.next();
|
2472
|
+
}
|
2473
|
+
}
|
2474
|
+
}
|
2475
|
+
return {
|
2476
|
+
push(task) {
|
2477
|
+
queue.push(task);
|
2478
|
+
return process();
|
2479
|
+
},
|
2480
|
+
size() {
|
2481
|
+
return queue.length;
|
2482
|
+
}
|
2483
|
+
};
|
2484
|
+
}
|
2485
|
+
|
2486
|
+
// src/crdt-clock.ts
|
2487
|
+
var CRDTClock = class {
|
2488
|
+
constructor(blockstore) {
|
2489
|
+
// todo: track local and remote clocks independently, merge on read
|
2490
|
+
// that way we can drop the whole remote if we need to
|
2491
|
+
// should go with making sure the local clock only references locally available blockstore on write
|
2492
|
+
this.head = [];
|
2493
|
+
this.zoomers = /* @__PURE__ */ new Set();
|
2494
|
+
this.watchers = /* @__PURE__ */ new Set();
|
2495
|
+
this.emptyWatchers = /* @__PURE__ */ new Set();
|
2496
|
+
this._ready = new ResolveOnce3();
|
2497
|
+
this.blockstore = blockstore;
|
2498
|
+
this.logger = ensureLogger(blockstore.logger, "CRDTClock");
|
2499
|
+
this.applyHeadQueue = applyHeadQueue(this.int_applyHead.bind(this), this.logger);
|
2500
|
+
}
|
2501
|
+
async ready() {
|
2502
|
+
return this._ready.once(async () => {
|
2503
|
+
await this.blockstore.ready();
|
2504
|
+
});
|
2505
|
+
}
|
2506
|
+
async close() {
|
2507
|
+
await this.blockstore.close();
|
2508
|
+
}
|
2509
|
+
setHead(head) {
|
2510
|
+
this.head = head;
|
2511
|
+
}
|
2512
|
+
async applyHead(newHead, prevHead, updates) {
|
2513
|
+
for await (const { updates: updatesAcc, all } of this.applyHeadQueue.push({
|
2514
|
+
newHead,
|
2515
|
+
prevHead,
|
2516
|
+
updates
|
2517
|
+
})) {
|
2518
|
+
return this.processUpdates(updatesAcc, all, prevHead);
|
2519
|
+
}
|
2520
|
+
}
|
2521
|
+
async processUpdates(updatesAcc, all, prevHead) {
|
2522
|
+
let internalUpdates = updatesAcc;
|
2523
|
+
if (this.watchers.size && !all) {
|
2524
|
+
const changes = await clockChangesSince(throwFalsy(this.blockstore), this.head, prevHead, {}, this.logger);
|
2525
|
+
internalUpdates = changes.result;
|
2526
|
+
}
|
2527
|
+
this.zoomers.forEach((fn) => fn());
|
2528
|
+
this.notifyWatchers(internalUpdates || []);
|
2529
|
+
}
|
2530
|
+
notifyWatchers(updates) {
|
2531
|
+
this.emptyWatchers.forEach((fn) => fn());
|
2532
|
+
this.watchers.forEach((fn) => fn(updates || []));
|
2533
|
+
}
|
2534
|
+
onTick(fn) {
|
2535
|
+
this.watchers.add(fn);
|
2536
|
+
}
|
2537
|
+
onTock(fn) {
|
2538
|
+
this.emptyWatchers.add(fn);
|
2539
|
+
}
|
2540
|
+
onZoom(fn) {
|
2541
|
+
this.zoomers.add(fn);
|
2542
|
+
}
|
2543
|
+
async int_applyHead(newHead, prevHead, localUpdates) {
|
2544
|
+
const ogHead = sortClockHead(this.head);
|
2545
|
+
newHead = sortClockHead(newHead);
|
2546
|
+
if (compareClockHeads(ogHead, newHead)) {
|
2547
|
+
return;
|
2548
|
+
}
|
2549
|
+
const ogPrev = sortClockHead(prevHead);
|
2550
|
+
if (compareClockHeads(ogHead, ogPrev)) {
|
2551
|
+
this.setHead(newHead);
|
2552
|
+
return;
|
2553
|
+
}
|
2554
|
+
const noLoader = !localUpdates;
|
2555
|
+
if (!this.blockstore) {
|
2556
|
+
throw this.logger.Error().Msg("missing blockstore").AsError();
|
2557
|
+
}
|
2558
|
+
await validateBlocks(this.logger, newHead, this.blockstore);
|
2559
|
+
const { meta } = await this.blockstore.transaction(
|
2560
|
+
async (tblocks) => {
|
2561
|
+
const advancedHead = await advanceBlocks(this.logger, newHead, tblocks, this.head);
|
2562
|
+
const result = await root2(tblocks, advancedHead);
|
2563
|
+
for (const { cid, bytes } of [
|
2564
|
+
...result.additions
|
2565
|
+
// ...result.removals
|
2566
|
+
]) {
|
2567
|
+
tblocks.putSync(cid, bytes);
|
2568
|
+
}
|
2569
|
+
return { head: advancedHead };
|
2570
|
+
},
|
2571
|
+
{ noLoader }
|
2572
|
+
);
|
2573
|
+
this.setHead(meta.head);
|
2574
|
+
}
|
2575
|
+
};
|
2576
|
+
function sortClockHead(clockHead) {
|
2577
|
+
return clockHead.sort((a, b) => a.toString().localeCompare(b.toString()));
|
2578
|
+
}
|
2579
|
+
async function validateBlocks(logger, newHead, blockstore) {
|
2580
|
+
if (!blockstore) throw logger.Error().Msg("missing blockstore");
|
2581
|
+
newHead.map(async (cid) => {
|
2582
|
+
const got = await blockstore.get(cid);
|
2583
|
+
if (!got) {
|
2584
|
+
throw logger.Error().Str("cid", cid.toString()).Msg("int_applyHead missing block").AsError();
|
2585
|
+
}
|
2586
|
+
});
|
2587
|
+
}
|
2588
|
+
function compareClockHeads(head1, head2) {
|
2589
|
+
return head1.toString() === head2.toString();
|
2590
|
+
}
|
2591
|
+
async function advanceBlocks(logger, newHead, tblocks, head) {
|
2592
|
+
for (const cid of newHead) {
|
2593
|
+
try {
|
2594
|
+
head = await advance(tblocks, head, cid);
|
2595
|
+
} catch (e) {
|
2596
|
+
logger.Debug().Err(e).Msg("failed to advance head");
|
2597
|
+
continue;
|
2598
|
+
}
|
2599
|
+
}
|
2600
|
+
return head;
|
2601
|
+
}
|
2602
|
+
|
2603
|
+
// src/crdt.ts
|
2604
|
+
var CRDT = class {
|
2605
|
+
constructor(name, opts = {}) {
|
2606
|
+
this.onceReady = new ResolveOnce4();
|
2607
|
+
this.indexers = /* @__PURE__ */ new Map();
|
2608
|
+
this.name = name;
|
2609
|
+
this.logger = ensureLogger(opts, "CRDT");
|
2610
|
+
this.opts = opts;
|
2611
|
+
this.blockstore = blockstoreFactory({
|
2612
|
+
name,
|
2613
|
+
applyMeta: async (meta) => {
|
2614
|
+
const crdtMeta = meta;
|
2615
|
+
if (!crdtMeta.head) throw this.logger.Error().Msg("missing head").AsError();
|
2616
|
+
await this.clock.applyHead(crdtMeta.head, []);
|
2617
|
+
},
|
2618
|
+
compact: async (blocks) => {
|
2619
|
+
await doCompact(blocks, this.clock.head, this.logger);
|
2620
|
+
return { head: this.clock.head };
|
2621
|
+
},
|
2622
|
+
autoCompact: this.opts.autoCompact || 100,
|
2623
|
+
crypto: this.opts.crypto,
|
2624
|
+
store: { ...this.opts.store, isIndex: void 0 },
|
2625
|
+
public: this.opts.public,
|
2626
|
+
meta: this.opts.meta,
|
2627
|
+
threshold: this.opts.threshold
|
2628
|
+
});
|
2629
|
+
this.indexBlockstore = blockstoreFactory({
|
2630
|
+
name,
|
2631
|
+
applyMeta: async (meta) => {
|
2632
|
+
const idxCarMeta = meta;
|
2633
|
+
if (!idxCarMeta.indexes) throw this.logger.Error().Msg("missing indexes").AsError();
|
2634
|
+
for (const [name2, idx] of Object.entries(idxCarMeta.indexes)) {
|
2635
|
+
index({ _crdt: this }, name2, void 0, idx);
|
2636
|
+
}
|
2637
|
+
},
|
2638
|
+
crypto: this.opts.crypto,
|
2639
|
+
store: { ...this.opts.store, isIndex: this.opts.store?.isIndex || "idx" },
|
2640
|
+
public: this.opts.public
|
2641
|
+
});
|
2642
|
+
this.clock = new CRDTClock(this.blockstore);
|
2643
|
+
this.clock.onZoom(() => {
|
2644
|
+
for (const idx of this.indexers.values()) {
|
2645
|
+
idx._resetIndex();
|
2646
|
+
}
|
2647
|
+
});
|
2648
|
+
}
|
2649
|
+
async ready() {
|
2650
|
+
return this.onceReady.once(async () => {
|
2651
|
+
await Promise.all([this.blockstore.ready(), this.indexBlockstore.ready(), this.clock.ready()]);
|
2652
|
+
});
|
2653
|
+
}
|
2654
|
+
async close() {
|
2655
|
+
await Promise.all([this.blockstore.close(), this.indexBlockstore.close(), this.clock.close()]);
|
2656
|
+
}
|
2657
|
+
async destroy() {
|
2658
|
+
await Promise.all([this.blockstore.destroy(), this.indexBlockstore.destroy()]);
|
2659
|
+
}
|
2660
|
+
async bulk(updates) {
|
2661
|
+
await this.ready();
|
2662
|
+
const prevHead = [...this.clock.head];
|
2663
|
+
const done = await this.blockstore.transaction(async (blocks) => {
|
2664
|
+
const { head } = await applyBulkUpdateToCrdt(
|
2665
|
+
this.blockstore.ebOpts.storeRuntime,
|
2666
|
+
blocks,
|
2667
|
+
this.clock.head,
|
2668
|
+
updates,
|
2669
|
+
this.logger
|
2670
|
+
);
|
2671
|
+
updates = updates.map((dupdate) => {
|
2672
|
+
readFiles(this.blockstore, { doc: dupdate.value });
|
2673
|
+
return dupdate;
|
2674
|
+
});
|
2675
|
+
return { head };
|
2676
|
+
});
|
2677
|
+
await this.clock.applyHead(done.meta.head, prevHead, updates);
|
2678
|
+
return done.meta;
|
2679
|
+
}
|
2680
|
+
// if (snap) await this.clock.applyHead(crdtMeta.head, this.clock.head)
|
2681
|
+
async allDocs() {
|
2682
|
+
await this.ready();
|
2683
|
+
const result = [];
|
2684
|
+
for await (const entry of getAllEntries(this.blockstore, this.clock.head, this.logger)) {
|
2685
|
+
result.push(entry);
|
2686
|
+
}
|
2687
|
+
return { result, head: this.clock.head };
|
2688
|
+
}
|
2689
|
+
async vis() {
|
2690
|
+
await this.ready();
|
2691
|
+
const txt = [];
|
2692
|
+
for await (const line of clockVis(this.blockstore, this.clock.head)) {
|
2693
|
+
txt.push(line);
|
2694
|
+
}
|
2695
|
+
return txt.join("\n");
|
2696
|
+
}
|
2697
|
+
async getBlock(cidString) {
|
2698
|
+
await this.ready();
|
2699
|
+
return await getBlock(this.blockstore, cidString);
|
2700
|
+
}
|
2701
|
+
async get(key) {
|
2702
|
+
await this.ready();
|
2703
|
+
const result = await getValueFromCrdt(this.blockstore, this.clock.head, key, this.logger);
|
2704
|
+
if (result.del) return void 0;
|
2705
|
+
return result;
|
2706
|
+
}
|
2707
|
+
async changes(since = [], opts = {}) {
|
2708
|
+
await this.ready();
|
2709
|
+
return await clockChangesSince(this.blockstore, this.clock.head, since, opts, this.logger);
|
2710
|
+
}
|
2711
|
+
async compact() {
|
2712
|
+
const blocks = this.blockstore;
|
2713
|
+
return await blocks.compact();
|
2714
|
+
}
|
2715
|
+
};
|
2716
|
+
|
2717
|
+
// src/database.ts
|
2718
|
+
var Database = class {
|
2719
|
+
constructor(name, opts) {
|
2720
|
+
this.opts = {};
|
2721
|
+
this._listening = false;
|
2722
|
+
this._listeners = /* @__PURE__ */ new Set();
|
2723
|
+
this._noupdate_listeners = /* @__PURE__ */ new Set();
|
2724
|
+
this._ready = new ResolveOnce5();
|
2725
|
+
this.name = name;
|
2726
|
+
this.opts = opts || this.opts;
|
2727
|
+
this.logger = ensureLogger(this.opts, "Database");
|
2728
|
+
this._crdt = new CRDT(name, this.opts);
|
2729
|
+
this.blockstore = this._crdt.blockstore;
|
2730
|
+
this._writeQueue = writeQueue(async (updates) => {
|
2731
|
+
return await this._crdt.bulk(updates);
|
2732
|
+
});
|
2733
|
+
this._crdt.clock.onTock(() => {
|
2734
|
+
this._no_update_notify();
|
2735
|
+
});
|
2736
|
+
}
|
2737
|
+
static {
|
2738
|
+
this.databases = /* @__PURE__ */ new Map();
|
2739
|
+
}
|
2740
|
+
async close() {
|
2741
|
+
await this.ready();
|
2742
|
+
await this._crdt.close();
|
2743
|
+
await this.blockstore.close();
|
2744
|
+
}
|
2745
|
+
async destroy() {
|
2746
|
+
await this.ready();
|
2747
|
+
await this._crdt.destroy();
|
2748
|
+
await this.blockstore.destroy();
|
2749
|
+
}
|
2750
|
+
async ready() {
|
2751
|
+
return this._ready.once(async () => {
|
2752
|
+
await SysContainer.start();
|
2753
|
+
await this._crdt.ready();
|
2754
|
+
await this.blockstore.ready();
|
2755
|
+
});
|
2756
|
+
}
|
2757
|
+
async get(id) {
|
2758
|
+
this.logger.Debug().Str("id", id).Msg("get-pre-ready");
|
2759
|
+
await this.ready();
|
2760
|
+
this.logger.Debug().Str("id", id).Msg("get-post-ready");
|
2761
|
+
const got = await this._crdt.get(id).catch((e) => {
|
2762
|
+
throw new NotFoundError(`Not found: ${id} - ${e.message}`);
|
2763
|
+
});
|
2764
|
+
if (!got) throw new NotFoundError(`Not found: ${id}`);
|
2765
|
+
const { doc } = got;
|
2766
|
+
return { ...doc, _id: id };
|
2767
|
+
}
|
2768
|
+
async put(doc) {
|
2769
|
+
this.logger.Debug().Str("id", doc._id).Msg("put-pre-ready");
|
2770
|
+
await this.ready();
|
2771
|
+
this.logger.Debug().Str("id", doc._id).Msg("put-post-ready");
|
2772
|
+
const { _id, ...value } = doc;
|
2773
|
+
const docId = _id || uuidv7();
|
2774
|
+
const result = await this._writeQueue.push({
|
2775
|
+
id: docId,
|
2776
|
+
value: {
|
2777
|
+
...value,
|
2778
|
+
_id: docId
|
2779
|
+
}
|
2780
|
+
});
|
2781
|
+
return { id: docId, clock: result?.head };
|
2782
|
+
}
|
2783
|
+
async del(id) {
|
2784
|
+
await this.ready();
|
2785
|
+
const result = await this._writeQueue.push({ id, del: true });
|
2786
|
+
return { id, clock: result?.head };
|
2787
|
+
}
|
2788
|
+
async changes(since = [], opts = {}) {
|
2789
|
+
await this.ready();
|
2790
|
+
const { result, head } = await this._crdt.changes(since, opts);
|
2791
|
+
const rows = result.map(({ id: key, value, del, clock }) => ({
|
2792
|
+
key,
|
2793
|
+
value: del ? { _id: key, _deleted: true } : { _id: key, ...value },
|
2794
|
+
clock
|
2795
|
+
}));
|
2796
|
+
return { rows, clock: head };
|
2797
|
+
}
|
2798
|
+
async allDocs() {
|
2799
|
+
await this.ready();
|
2800
|
+
const { result, head } = await this._crdt.allDocs();
|
2801
|
+
const rows = result.map(({ id: key, value, del }) => ({
|
2802
|
+
key,
|
2803
|
+
value: del ? { _id: key, _deleted: true } : { _id: key, ...value }
|
2804
|
+
}));
|
2805
|
+
return { rows, clock: head };
|
2806
|
+
}
|
2807
|
+
async allDocuments() {
|
2808
|
+
return this.allDocs();
|
2809
|
+
}
|
2810
|
+
subscribe(listener, updates) {
|
2811
|
+
if (updates) {
|
2812
|
+
if (!this._listening) {
|
2813
|
+
this._listening = true;
|
2814
|
+
this._crdt.clock.onTick((updates2) => {
|
2815
|
+
void this._notify(updates2);
|
2816
|
+
});
|
2817
|
+
}
|
2818
|
+
this._listeners.add(listener);
|
2819
|
+
return () => {
|
2820
|
+
this._listeners.delete(listener);
|
2821
|
+
};
|
2822
|
+
} else {
|
2823
|
+
this._noupdate_listeners.add(listener);
|
2824
|
+
return () => {
|
2825
|
+
this._noupdate_listeners.delete(listener);
|
2826
|
+
};
|
2827
|
+
}
|
2828
|
+
}
|
2829
|
+
// todo if we add this onto dbs in fireproof.ts then we can make index.ts a separate package
|
2830
|
+
async query(field, opts = {}) {
|
2831
|
+
await this.ready();
|
2832
|
+
const _crdt = this._crdt;
|
2833
|
+
const idx = typeof field === "string" ? index({ _crdt }, field) : index({ _crdt }, makeName(field.toString()), field);
|
2834
|
+
return await idx.query(opts);
|
2835
|
+
}
|
2836
|
+
async compact() {
|
2837
|
+
await this.ready();
|
2838
|
+
await this._crdt.compact();
|
2839
|
+
}
|
2840
|
+
async _notify(updates) {
|
2841
|
+
await this.ready();
|
2842
|
+
if (this._listeners.size) {
|
2843
|
+
const docs = updates.map(({ id, value }) => ({ ...value, _id: id }));
|
2844
|
+
for (const listener of this._listeners) {
|
2845
|
+
await (async () => await listener(docs))().catch((e) => {
|
2846
|
+
this.logger.Error().Err(e).Msg("subscriber error");
|
2847
|
+
});
|
2848
|
+
}
|
2849
|
+
}
|
2850
|
+
}
|
2851
|
+
async _no_update_notify() {
|
2852
|
+
await this.ready();
|
2853
|
+
if (this._noupdate_listeners.size) {
|
2854
|
+
for (const listener of this._noupdate_listeners) {
|
2855
|
+
await (async () => await listener([]))().catch((e) => {
|
2856
|
+
this.logger.Error().Err(e).Msg("subscriber error");
|
2857
|
+
});
|
2858
|
+
}
|
2859
|
+
}
|
2860
|
+
}
|
2861
|
+
};
|
2862
|
+
function toSortedArray(set) {
|
2863
|
+
if (!set) return [];
|
2864
|
+
return Object.entries(set).sort(([a], [b]) => a.localeCompare(b)).map(([k, v]) => ({ [k]: v }));
|
2865
|
+
}
|
2866
|
+
function fireproof(name, opts) {
|
2867
|
+
const key = JSON.stringify(
|
2868
|
+
toSortedArray({
|
2869
|
+
name,
|
2870
|
+
stores: toSortedArray(opts?.store?.stores),
|
2871
|
+
makeMetaStore: !!opts?.store?.makeMetaStore,
|
2872
|
+
makeDataStore: !!opts?.store?.makeDataStore,
|
2873
|
+
makeRemoteWAL: !!opts?.store?.makeRemoteWAL,
|
2874
|
+
encodeFile: !!opts?.store?.encodeFile,
|
2875
|
+
decodeFile: !!opts?.store?.decodeFile
|
2876
|
+
})
|
2877
|
+
);
|
2878
|
+
let db = Database.databases.get(key);
|
2879
|
+
if (!db) {
|
2880
|
+
db = new Database(name, opts);
|
2881
|
+
Database.databases.set(key, db);
|
2882
|
+
}
|
2883
|
+
return db;
|
2884
|
+
}
|
2885
|
+
function makeName(fnString) {
|
2886
|
+
const regex = /\(([^,()]+,\s*[^,()]+|\[[^\]]+\],\s*[^,()]+)\)/g;
|
2887
|
+
let found = null;
|
2888
|
+
const matches = Array.from(fnString.matchAll(regex), (match) => match[1].trim());
|
2889
|
+
if (matches.length === 0) {
|
2890
|
+
found = /=>\s*(.*)/.exec(fnString);
|
2891
|
+
}
|
2892
|
+
if (!found) {
|
2893
|
+
return fnString;
|
2894
|
+
} else {
|
2895
|
+
return found[1];
|
2896
|
+
}
|
2897
|
+
}
|
2898
|
+
|
2899
|
+
// src/version.ts
|
2900
|
+
var PACKAGE_VERSION = Object.keys({
|
2901
|
+
"0.19.4-dev": "xxxx"
|
2902
|
+
})[0];
|
2903
|
+
export {
|
2904
|
+
CRDT,
|
2905
|
+
Database,
|
2906
|
+
Index,
|
2907
|
+
PACKAGE_VERSION,
|
2908
|
+
blockstore_exports as blockstore,
|
2909
|
+
blockstore_exports as bs,
|
2910
|
+
ensureLogger,
|
2911
|
+
exception2Result,
|
2912
|
+
exceptionWrapper,
|
2913
|
+
falsyToUndef,
|
2914
|
+
fireproof,
|
2915
|
+
getKey,
|
2916
|
+
getName,
|
2917
|
+
getStore,
|
2918
|
+
index,
|
2919
|
+
isFalsy,
|
2920
|
+
runtime_exports as rt,
|
2921
|
+
throwFalsy
|
2922
|
+
};
|
2923
|
+
//# sourceMappingURL=index.js.map
|