@fireproof/core 0.18.0 → 0.19.5-dev

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. package/README.md +29 -15
  2. package/chunk-7OGPZSGT.js +39 -0
  3. package/chunk-7OGPZSGT.js.map +1 -0
  4. package/chunk-H3A2HMMM.js +164 -0
  5. package/chunk-H3A2HMMM.js.map +1 -0
  6. package/chunk-HCXR2M5B.js +202 -0
  7. package/chunk-HCXR2M5B.js.map +1 -0
  8. package/chunk-QHSXUST7.js +208 -0
  9. package/chunk-QHSXUST7.js.map +1 -0
  10. package/chunk-VZGT7ZYP.js +22 -0
  11. package/chunk-VZGT7ZYP.js.map +1 -0
  12. package/index.cjs +4649 -0
  13. package/index.cjs.map +1 -0
  14. package/index.d.cts +911 -0
  15. package/index.d.ts +911 -0
  16. package/index.js +2923 -0
  17. package/index.js.map +1 -0
  18. package/metafile-cjs.json +1 -0
  19. package/metafile-esm.json +1 -0
  20. package/node-sys-container-E7LADX2Z.js +29 -0
  21. package/node-sys-container-E7LADX2Z.js.map +1 -0
  22. package/package.json +23 -109
  23. package/sqlite-data-store-YS4U7AQ4.js +120 -0
  24. package/sqlite-data-store-YS4U7AQ4.js.map +1 -0
  25. package/sqlite-meta-store-FJZSZG4R.js +137 -0
  26. package/sqlite-meta-store-FJZSZG4R.js.map +1 -0
  27. package/sqlite-wal-store-6JZ4URNS.js +123 -0
  28. package/sqlite-wal-store-6JZ4URNS.js.map +1 -0
  29. package/store-file-HMHPQTUV.js +193 -0
  30. package/store-file-HMHPQTUV.js.map +1 -0
  31. package/store-indexdb-MRVZG4OG.js +20 -0
  32. package/store-indexdb-MRVZG4OG.js.map +1 -0
  33. package/store-sql-5XMJ5OWJ.js +406 -0
  34. package/store-sql-5XMJ5OWJ.js.map +1 -0
  35. package/dist/browser/fireproof.cjs +0 -1172
  36. package/dist/browser/fireproof.cjs.map +0 -1
  37. package/dist/browser/fireproof.d.cts +0 -268
  38. package/dist/browser/fireproof.d.ts +0 -268
  39. package/dist/browser/fireproof.global.js +0 -24178
  40. package/dist/browser/fireproof.global.js.map +0 -1
  41. package/dist/browser/fireproof.js +0 -1147
  42. package/dist/browser/fireproof.js.map +0 -1
  43. package/dist/browser/metafile-cjs.json +0 -1
  44. package/dist/browser/metafile-esm.json +0 -1
  45. package/dist/browser/metafile-iife.json +0 -1
  46. package/dist/memory/fireproof.cjs +0 -1172
  47. package/dist/memory/fireproof.cjs.map +0 -1
  48. package/dist/memory/fireproof.d.cts +0 -268
  49. package/dist/memory/fireproof.d.ts +0 -268
  50. package/dist/memory/fireproof.global.js +0 -24178
  51. package/dist/memory/fireproof.global.js.map +0 -1
  52. package/dist/memory/fireproof.js +0 -1147
  53. package/dist/memory/fireproof.js.map +0 -1
  54. package/dist/memory/metafile-cjs.json +0 -1
  55. package/dist/memory/metafile-esm.json +0 -1
  56. package/dist/memory/metafile-iife.json +0 -1
  57. package/dist/node/fireproof.cjs +0 -1172
  58. package/dist/node/fireproof.cjs.map +0 -1
  59. package/dist/node/fireproof.d.cts +0 -268
  60. package/dist/node/fireproof.d.ts +0 -268
  61. package/dist/node/fireproof.global.js +0 -38540
  62. package/dist/node/fireproof.global.js.map +0 -1
  63. package/dist/node/fireproof.js +0 -1138
  64. package/dist/node/fireproof.js.map +0 -1
  65. package/dist/node/metafile-cjs.json +0 -1
  66. package/dist/node/metafile-esm.json +0 -1
  67. package/dist/node/metafile-iife.json +0 -1
@@ -1,1172 +0,0 @@
1
- "use strict";
2
- var __create = Object.create;
3
- var __defProp = Object.defineProperty;
4
- var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
- var __getOwnPropNames = Object.getOwnPropertyNames;
6
- var __getProtoOf = Object.getPrototypeOf;
7
- var __hasOwnProp = Object.prototype.hasOwnProperty;
8
- var __export = (target, all) => {
9
- for (var name in all)
10
- __defProp(target, name, { get: all[name], enumerable: true });
11
- };
12
- var __copyProps = (to, from, except, desc) => {
13
- if (from && typeof from === "object" || typeof from === "function") {
14
- for (let key of __getOwnPropNames(from))
15
- if (!__hasOwnProp.call(to, key) && key !== except)
16
- __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
17
- }
18
- return to;
19
- };
20
- var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
21
- // If the importer is in node compatibility mode or this is not an ESM
22
- // file that has been converted to a CommonJS file using a Babel-
23
- // compatible transform (i.e. "__esModule" has not been set), then set
24
- // "default" to the CommonJS "module.exports" for node compatibility.
25
- isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
26
- mod
27
- ));
28
- var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
29
-
30
- // src/fireproof.ts
31
- var fireproof_exports = {};
32
- __export(fireproof_exports, {
33
- Database: () => Database,
34
- fireproof: () => fireproof
35
- });
36
- module.exports = __toCommonJS(fireproof_exports);
37
-
38
- // src/database.ts
39
- var import_uuidv7 = require("uuidv7");
40
-
41
- // src/write-queue.ts
42
- function writeQueue(worker, payload = Infinity, unbounded = false) {
43
- const queue = [];
44
- let isProcessing = false;
45
- async function process() {
46
- if (isProcessing || queue.length === 0)
47
- return;
48
- isProcessing = true;
49
- const tasksToProcess = queue.splice(0, payload);
50
- const updates = tasksToProcess.map((item) => item.task);
51
- if (unbounded) {
52
- const promises = updates.map(async (update, index2) => {
53
- try {
54
- const result = await worker([update]);
55
- tasksToProcess[index2].resolve(result);
56
- } catch (error) {
57
- tasksToProcess[index2].reject(error);
58
- }
59
- });
60
- await Promise.all(promises);
61
- } else {
62
- try {
63
- const result = await worker(updates);
64
- tasksToProcess.forEach((task) => task.resolve(result));
65
- } catch (error) {
66
- tasksToProcess.forEach((task) => task.reject(error));
67
- }
68
- }
69
- isProcessing = false;
70
- void process();
71
- }
72
- return {
73
- push(task) {
74
- return new Promise((resolve, reject) => {
75
- queue.push({ task, resolve, reject });
76
- void process();
77
- });
78
- }
79
- };
80
- }
81
-
82
- // src/crdt.ts
83
- var import_encrypted_blockstore2 = require("@fireproof/encrypted-blockstore");
84
-
85
- // src/eb-node.ts
86
- var crypto = __toESM(require("@fireproof/encrypted-blockstore/crypto-node"));
87
- var store = __toESM(require("@fireproof/encrypted-blockstore/store-node"));
88
-
89
- // src/crdt-helpers.ts
90
- var import_block = require("multiformats/block");
91
- var import_link = require("multiformats/link");
92
- var import_sha2 = require("multiformats/hashes/sha2");
93
- var codec = __toESM(require("@ipld/dag-cbor"), 1);
94
- var import_crdt = require("@web3-storage/pail/crdt");
95
- var import_clock = require("@web3-storage/pail/clock");
96
- var Batch = __toESM(require("@web3-storage/pail/crdt/batch"), 1);
97
- var import_encrypted_blockstore = require("@fireproof/encrypted-blockstore");
98
-
99
- // src/files.ts
100
- var UnixFS = __toESM(require("@ipld/unixfs"), 1);
101
- var raw = __toESM(require("multiformats/codecs/raw"), 1);
102
- var import_fixed = require("@ipld/unixfs/file/chunker/fixed");
103
- var import_balanced = require("@ipld/unixfs/file/layout/balanced");
104
- var import_ipfs_unixfs_exporter = require("ipfs-unixfs-exporter");
105
- var queuingStrategy = UnixFS.withCapacity();
106
- var settings = UnixFS.configure({
107
- fileChunkEncoder: raw,
108
- smallFileEncoder: raw,
109
- // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-call
110
- chunker: (0, import_fixed.withMaxChunkSize)(1024 * 1024),
111
- // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-call
112
- fileLayout: (0, import_balanced.withWidth)(1024)
113
- });
114
- async function encodeFile(blob) {
115
- const readable = createFileEncoderStream(blob);
116
- const blocks = await collect(readable);
117
- return { cid: blocks.at(-1).cid, blocks };
118
- }
119
- async function decodeFile(blocks, cid, meta) {
120
- const entry = await (0, import_ipfs_unixfs_exporter.exporter)(cid.toString(), blocks, { length: meta.size });
121
- const chunks = [];
122
- for await (const chunk of entry.content())
123
- chunks.push(chunk);
124
- return new File(chunks, entry.name, { type: meta.type, lastModified: 0 });
125
- }
126
- function createFileEncoderStream(blob) {
127
- const { readable, writable } = new TransformStream({}, queuingStrategy);
128
- const unixfsWriter = UnixFS.createWriter({ writable, settings });
129
- const fileBuilder = new UnixFSFileBuilder("", blob);
130
- void (async () => {
131
- await fileBuilder.finalize(unixfsWriter);
132
- await unixfsWriter.close();
133
- })();
134
- return readable;
135
- }
136
- async function collect(collectable) {
137
- const chunks = [];
138
- await collectable.pipeTo(
139
- new WritableStream({
140
- write(chunk) {
141
- chunks.push(chunk);
142
- }
143
- })
144
- );
145
- return chunks;
146
- }
147
- var UnixFSFileBuilder = class {
148
- #file;
149
- name;
150
- constructor(name, file) {
151
- this.name = name;
152
- this.#file = file;
153
- }
154
- async finalize(writer) {
155
- const unixfsFileWriter = UnixFS.createFileWriter(writer);
156
- await this.#file.stream().pipeTo(
157
- new WritableStream({
158
- async write(chunk) {
159
- await unixfsFileWriter.write(chunk);
160
- }
161
- })
162
- );
163
- return await unixfsFileWriter.close();
164
- }
165
- };
166
-
167
- // src/crdt-helpers.ts
168
- function time(tag) {
169
- }
170
- function timeEnd(tag) {
171
- }
172
- async function applyBulkUpdateToCrdt(tblocks, head, updates) {
173
- let result = null;
174
- if (updates.length > 1) {
175
- const batch = await Batch.create(tblocks, head);
176
- for (const update of updates) {
177
- const link = await writeDocContent(tblocks, update);
178
- await batch.put(update.key, link);
179
- }
180
- result = await batch.commit();
181
- } else {
182
- for (const update of updates) {
183
- const link = await writeDocContent(tblocks, update);
184
- result = await (0, import_crdt.put)(tblocks, head, update.key, link);
185
- const resRoot = result.root.toString();
186
- const isReturned = result.additions.some((a) => a.cid.toString() === resRoot);
187
- if (!isReturned) {
188
- const hasRoot = await tblocks.get(result.root);
189
- if (!hasRoot) {
190
- throw new Error(
191
- `missing root in additions: ${result.additions.length} ${resRoot} keys: ${updates.map((u) => u.key).toString()}`
192
- );
193
- }
194
- }
195
- }
196
- }
197
- if (!result)
198
- throw new Error("Missing result");
199
- if (result.event) {
200
- for (const { cid, bytes } of [...result.additions, ...result.removals, result.event]) {
201
- tblocks.putSync(cid, bytes);
202
- }
203
- }
204
- return { head: result.head };
205
- }
206
- async function writeDocContent(blocks, update) {
207
- let value;
208
- if (update.del) {
209
- value = { del: true };
210
- } else {
211
- await processFiles(blocks, update.value);
212
- value = { doc: update.value };
213
- }
214
- const block = await (0, import_block.encode)({ value, hasher: import_sha2.sha256, codec });
215
- blocks.putSync(block.cid, block.bytes);
216
- return block.cid;
217
- }
218
- async function processFiles(blocks, doc) {
219
- if (doc._files) {
220
- await processFileset(blocks, doc._files);
221
- }
222
- if (doc._publicFiles) {
223
- await processFileset(blocks, doc._publicFiles, true);
224
- }
225
- }
226
- async function processFileset(blocks, files, publicFiles = false) {
227
- const dbBlockstore = blocks.parent;
228
- const t = new import_encrypted_blockstore.CarTransaction(dbBlockstore);
229
- const didPut = [];
230
- for (const filename in files) {
231
- if (File === files[filename].constructor) {
232
- const file = files[filename];
233
- const { cid, blocks: fileBlocks } = await encodeFile(file);
234
- didPut.push(filename);
235
- for (const block of fileBlocks) {
236
- t.putSync(block.cid, block.bytes);
237
- }
238
- files[filename] = { cid, type: file.type, size: file.size };
239
- } else {
240
- const { cid, type, size, car } = files[filename];
241
- if (cid && type && size && car) {
242
- files[filename] = { cid, type, size, car };
243
- }
244
- }
245
- }
246
- if (didPut.length) {
247
- const car = await dbBlockstore.loader?.commitFiles(t, { files }, {
248
- public: publicFiles
249
- });
250
- if (car) {
251
- for (const name of didPut) {
252
- files[name] = { car, ...files[name] };
253
- }
254
- }
255
- }
256
- }
257
- async function getValueFromCrdt(blocks, head, key) {
258
- if (!head.length)
259
- throw new Error("Getting from an empty database");
260
- const link = await (0, import_crdt.get)(blocks, head, key);
261
- if (!link)
262
- throw new Error(`Missing key ${key}`);
263
- return await getValueFromLink(blocks, link);
264
- }
265
- function readFiles(blocks, { doc }) {
266
- if (!doc)
267
- return;
268
- if (doc._files) {
269
- readFileset(blocks, doc._files);
270
- }
271
- if (doc._publicFiles) {
272
- readFileset(blocks, doc._publicFiles, true);
273
- }
274
- }
275
- function readFileset(blocks, files, isPublic = false) {
276
- for (const filename in files) {
277
- const fileMeta = files[filename];
278
- if (fileMeta.cid) {
279
- if (isPublic) {
280
- fileMeta.url = `https://${fileMeta.cid.toString()}.ipfs.w3s.link/`;
281
- }
282
- if (fileMeta.car) {
283
- fileMeta.file = async () => await decodeFile(
284
- {
285
- get: async (cid) => {
286
- return await blocks.getFile(fileMeta.car, cid, isPublic);
287
- }
288
- },
289
- fileMeta.cid,
290
- fileMeta
291
- );
292
- }
293
- }
294
- files[filename] = fileMeta;
295
- }
296
- }
297
- async function getValueFromLink(blocks, link) {
298
- const block = await blocks.get(link);
299
- if (!block)
300
- throw new Error(`Missing linked block ${link.toString()}`);
301
- const { value } = await (0, import_block.decode)({ bytes: block.bytes, hasher: import_sha2.sha256, codec });
302
- value.cid = link;
303
- readFiles(blocks, value);
304
- return value;
305
- }
306
- var DirtyEventFetcher = class extends import_clock.EventFetcher {
307
- // @ts-ignore
308
- async get(link) {
309
- try {
310
- return await super.get(link);
311
- } catch (e) {
312
- console.error("missing event", link.toString(), e);
313
- return { value: null };
314
- }
315
- }
316
- };
317
- async function clockChangesSince(blocks, head, since, opts) {
318
- const eventsFetcher = opts.dirty ? new DirtyEventFetcher(blocks) : new import_clock.EventFetcher(blocks);
319
- const keys = /* @__PURE__ */ new Set();
320
- const updates = await gatherUpdates(
321
- blocks,
322
- eventsFetcher,
323
- head,
324
- since,
325
- [],
326
- keys,
327
- /* @__PURE__ */ new Set(),
328
- opts.limit || Infinity
329
- );
330
- return { result: updates.reverse(), head };
331
- }
332
- async function gatherUpdates(blocks, eventsFetcher, head, since, updates = [], keys, didLinks, limit) {
333
- if (limit <= 0)
334
- return updates;
335
- const sHead = head.map((l) => l.toString());
336
- for (const link of since) {
337
- if (sHead.includes(link.toString())) {
338
- return updates;
339
- }
340
- }
341
- for (const link of head) {
342
- if (didLinks.has(link.toString()))
343
- continue;
344
- didLinks.add(link.toString());
345
- const { value: event } = await eventsFetcher.get(link);
346
- if (!event)
347
- continue;
348
- const { type } = event.data;
349
- let ops = [];
350
- if (type === "batch") {
351
- ops = event.data.ops;
352
- } else if (type === "put") {
353
- ops = [event.data];
354
- }
355
- for (let i = ops.length - 1; i >= 0; i--) {
356
- const { key, value } = ops[i];
357
- if (!keys.has(key)) {
358
- const docValue = await getValueFromLink(blocks, value);
359
- updates.push({ key, value: docValue.doc, del: docValue.del, clock: link });
360
- limit--;
361
- keys.add(key);
362
- }
363
- }
364
- if (event.parents) {
365
- updates = await gatherUpdates(
366
- blocks,
367
- eventsFetcher,
368
- event.parents,
369
- since,
370
- updates,
371
- keys,
372
- didLinks,
373
- limit
374
- );
375
- }
376
- }
377
- return updates;
378
- }
379
- async function* getAllEntries(blocks, head) {
380
- for await (const [key, link] of (0, import_crdt.entries)(blocks, head)) {
381
- const docValue = await getValueFromLink(blocks, link);
382
- yield { key, value: docValue.doc, del: docValue.del };
383
- }
384
- }
385
- async function* clockVis(blocks, head) {
386
- for await (const line of (0, import_clock.vis)(blocks, head)) {
387
- yield line;
388
- }
389
- }
390
- var isCompacting = false;
391
- async function doCompact(blockLog, head) {
392
- if (isCompacting) {
393
- return;
394
- }
395
- isCompacting = true;
396
- time("compact head");
397
- for (const cid of head) {
398
- const bl = await blockLog.get(cid);
399
- if (!bl)
400
- throw new Error("Missing head block: " + cid.toString());
401
- }
402
- timeEnd("compact head");
403
- time("compact all entries");
404
- for await (const _entry of getAllEntries(blockLog, head)) {
405
- }
406
- timeEnd("compact all entries");
407
- time("compact clock vis");
408
- for await (const _line of (0, import_clock.vis)(blockLog, head)) {
409
- }
410
- timeEnd("compact clock vis");
411
- time("compact root");
412
- const result = await (0, import_crdt.root)(blockLog, head);
413
- timeEnd("compact root");
414
- time("compact root blocks");
415
- for (const { cid, bytes } of [...result.additions, ...result.removals]) {
416
- blockLog.loggedBlocks.putSync(cid, bytes);
417
- }
418
- timeEnd("compact root blocks");
419
- time("compact changes");
420
- await clockChangesSince(blockLog, head, [], {});
421
- timeEnd("compact changes");
422
- isCompacting = false;
423
- }
424
- async function getBlock(blocks, cidString) {
425
- const block = await blocks.get((0, import_link.parse)(cidString));
426
- if (!block)
427
- throw new Error(`Missing block ${cidString}`);
428
- const { cid, value } = await (0, import_block.decode)({ bytes: block.bytes, codec, hasher: import_sha2.sha256 });
429
- return new import_block.Block({ cid, value, bytes: block.bytes });
430
- }
431
-
432
- // src/indexer-helpers.ts
433
- var import_block2 = require("multiformats/block");
434
- var import_sha22 = require("multiformats/hashes/sha2");
435
- var codec2 = __toESM(require("@ipld/dag-cbor"), 1);
436
- var import_charwise = __toESM(require("charwise"), 1);
437
- var DbIndex = __toESM(require("prolly-trees/db-index"), 1);
438
- var import_utils = require("prolly-trees/utils");
439
- var import_cache = require("prolly-trees/cache");
440
- var IndexTree = class {
441
- cid = null;
442
- root = null;
443
- };
444
- var refCompare = (aRef, bRef) => {
445
- if (Number.isNaN(aRef))
446
- return -1;
447
- if (Number.isNaN(bRef))
448
- throw new Error("ref may not be Infinity or NaN");
449
- if (aRef === Infinity)
450
- return 1;
451
- return (0, import_utils.simpleCompare)(aRef, bRef);
452
- };
453
- var compare = (a, b) => {
454
- const [aKey, aRef] = a;
455
- const [bKey, bRef] = b;
456
- const comp = (0, import_utils.simpleCompare)(aKey, bKey);
457
- if (comp !== 0)
458
- return comp;
459
- return refCompare(aRef, bRef);
460
- };
461
- var byKeyOpts = { cache: import_cache.nocache, chunker: (0, import_utils.bf)(30), codec: codec2, hasher: import_sha22.sha256, compare };
462
- var byIdOpts = { cache: import_cache.nocache, chunker: (0, import_utils.bf)(30), codec: codec2, hasher: import_sha22.sha256, compare: import_utils.simpleCompare };
463
- function indexEntriesForChanges(changes, mapFn) {
464
- const indexEntries = [];
465
- changes.forEach(({ key: _id, value, del }) => {
466
- if (del || !value)
467
- return;
468
- let mapCalled = false;
469
- const mapReturn = mapFn({ _id, ...value }, (k, v) => {
470
- mapCalled = true;
471
- if (typeof k === "undefined")
472
- return;
473
- indexEntries.push({
474
- // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-unsafe-call
475
- key: [import_charwise.default.encode(k), _id],
476
- value: v || null
477
- });
478
- });
479
- if (!mapCalled && mapReturn) {
480
- indexEntries.push({
481
- // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-unsafe-call
482
- key: [import_charwise.default.encode(mapReturn), _id],
483
- value: null
484
- });
485
- }
486
- });
487
- return indexEntries;
488
- }
489
- function makeProllyGetBlock(blocks) {
490
- return async (address) => {
491
- const block = await blocks.get(address);
492
- if (!block)
493
- throw new Error(`Missing block ${address.toString()}`);
494
- const { cid, bytes } = block;
495
- return (0, import_block2.create)({ cid, bytes, hasher: import_sha22.sha256, codec: codec2 });
496
- };
497
- }
498
- async function bulkIndex(tblocks, inIndex, indexEntries, opts) {
499
- if (!indexEntries.length)
500
- return inIndex;
501
- if (!inIndex.root) {
502
- if (!inIndex.cid) {
503
- let returnRootBlock = null;
504
- let returnNode = null;
505
- for await (const node of await DbIndex.create({ get: makeProllyGetBlock(tblocks), list: indexEntries, ...opts })) {
506
- const block = await node.block;
507
- await tblocks.put(block.cid, block.bytes);
508
- returnRootBlock = block;
509
- returnNode = node;
510
- }
511
- if (!returnNode || !returnRootBlock)
512
- throw new Error("failed to create index");
513
- return { root: returnNode, cid: returnRootBlock.cid };
514
- } else {
515
- inIndex.root = await DbIndex.load({ cid: inIndex.cid, get: makeProllyGetBlock(tblocks), ...opts });
516
- }
517
- }
518
- const { root: root3, blocks: newBlocks } = await inIndex.root.bulk(indexEntries);
519
- if (root3) {
520
- for await (const block of newBlocks) {
521
- await tblocks.put(block.cid, block.bytes);
522
- }
523
- return { root: root3, cid: (await root3.block).cid };
524
- } else {
525
- return { root: null, cid: null };
526
- }
527
- }
528
- async function loadIndex(tblocks, cid, opts) {
529
- return await DbIndex.load({ cid, get: makeProllyGetBlock(tblocks), ...opts });
530
- }
531
- async function applyQuery(crdt, resp, query) {
532
- if (query.descending) {
533
- resp.result = resp.result.reverse();
534
- }
535
- if (query.limit) {
536
- resp.result = resp.result.slice(0, query.limit);
537
- }
538
- if (query.includeDocs) {
539
- resp.result = await Promise.all(
540
- resp.result.map(async (row) => {
541
- const val = await crdt.get(row.id);
542
- const doc = val ? { _id: row.id, ...val.doc } : null;
543
- return { ...row, doc };
544
- })
545
- );
546
- }
547
- return {
548
- rows: resp.result.map((row) => {
549
- row.key = import_charwise.default.decode(row.key);
550
- if (row.row && !row.value) {
551
- row.value = row.row;
552
- delete row.row;
553
- }
554
- return row;
555
- })
556
- };
557
- }
558
- function encodeRange(range) {
559
- return range.map((key) => import_charwise.default.encode(key));
560
- }
561
- function encodeKey(key) {
562
- return import_charwise.default.encode(key);
563
- }
564
-
565
- // src/index.ts
566
- function index({ _crdt }, name, mapFn, meta) {
567
- if (mapFn && meta)
568
- throw new Error("cannot provide both mapFn and meta");
569
- if (mapFn && mapFn.constructor.name !== "Function")
570
- throw new Error("mapFn must be a function");
571
- if (_crdt.indexers.has(name)) {
572
- const idx = _crdt.indexers.get(name);
573
- idx.applyMapFn(name, mapFn, meta);
574
- } else {
575
- const idx = new Index(_crdt, name, mapFn, meta);
576
- _crdt.indexers.set(name, idx);
577
- }
578
- return _crdt.indexers.get(name);
579
- }
580
- var Index = class {
581
- blockstore;
582
- crdt;
583
- name = null;
584
- mapFn = null;
585
- mapFnString = "";
586
- byKey = new IndexTree();
587
- byId = new IndexTree();
588
- indexHead = void 0;
589
- includeDocsDefault = false;
590
- initError = null;
591
- ready;
592
- constructor(crdt, name, mapFn, meta) {
593
- this.blockstore = crdt.indexBlockstore;
594
- this.crdt = crdt;
595
- this.applyMapFn(name, mapFn, meta);
596
- if (!(this.mapFnString || this.initError))
597
- throw new Error("missing mapFnString");
598
- this.ready = this.blockstore.ready.then(() => {
599
- });
600
- }
601
- applyMapFn(name, mapFn, meta) {
602
- if (mapFn && meta)
603
- throw new Error("cannot provide both mapFn and meta");
604
- if (this.name && this.name !== name)
605
- throw new Error("cannot change name");
606
- this.name = name;
607
- try {
608
- if (meta) {
609
- if (this.indexHead && this.indexHead.map((c) => c.toString()).join() !== meta.head.map((c) => c.toString()).join()) {
610
- throw new Error("cannot apply meta to existing index");
611
- }
612
- if (this.mapFnString) {
613
- if (this.mapFnString !== meta.map) {
614
- console.log(
615
- "cannot apply different mapFn meta: old mapFnString",
616
- this.mapFnString,
617
- "new mapFnString",
618
- meta.map
619
- );
620
- } else {
621
- this.byId.cid = meta.byId;
622
- this.byKey.cid = meta.byKey;
623
- this.indexHead = meta.head;
624
- }
625
- } else {
626
- this.mapFnString = meta.map;
627
- this.byId.cid = meta.byId;
628
- this.byKey.cid = meta.byKey;
629
- this.indexHead = meta.head;
630
- }
631
- } else {
632
- if (this.mapFn) {
633
- if (mapFn) {
634
- if (this.mapFn.toString() !== mapFn.toString())
635
- throw new Error("cannot apply different mapFn app2");
636
- }
637
- } else {
638
- if (!mapFn) {
639
- mapFn = (doc) => doc[name] ?? void 0;
640
- }
641
- if (this.mapFnString) {
642
- if (this.mapFnString !== mapFn.toString())
643
- throw new Error("cannot apply different mapFn app");
644
- } else {
645
- this.mapFnString = mapFn.toString();
646
- }
647
- this.mapFn = mapFn;
648
- }
649
- }
650
- const matches = /=>\s*(.*)/.test(this.mapFnString);
651
- this.includeDocsDefault = matches;
652
- } catch (e) {
653
- this.initError = e;
654
- }
655
- }
656
- async query(opts = {}) {
657
- await this._updateIndex();
658
- await this._hydrateIndex();
659
- if (!this.byKey.root)
660
- return await applyQuery(this.crdt, { result: [] }, opts);
661
- if (this.includeDocsDefault && opts.includeDocs === void 0)
662
- opts.includeDocs = true;
663
- if (opts.range) {
664
- const { result: result2, ...all2 } = await this.byKey.root.range(...encodeRange(opts.range));
665
- return await applyQuery(this.crdt, { result: result2, ...all2 }, opts);
666
- }
667
- if (opts.key) {
668
- const encodedKey = encodeKey(opts.key);
669
- return await applyQuery(this.crdt, await this.byKey.root.get(encodedKey), opts);
670
- }
671
- if (Array.isArray(opts.keys)) {
672
- const results = await Promise.all(
673
- opts.keys.map(async (key) => {
674
- const encodedKey = encodeKey(key);
675
- return (await applyQuery(this.crdt, await this.byKey.root.get(encodedKey), opts)).rows;
676
- })
677
- );
678
- return { rows: results.flat() };
679
- }
680
- if (opts.prefix) {
681
- if (!Array.isArray(opts.prefix))
682
- opts.prefix = [opts.prefix];
683
- const start = [...opts.prefix, NaN];
684
- const end = [...opts.prefix, Infinity];
685
- const encodedR = encodeRange([start, end]);
686
- return await applyQuery(this.crdt, await this.byKey.root.range(...encodedR), opts);
687
- }
688
- const { result, ...all } = await this.byKey.root.getAllEntries();
689
- return await applyQuery(
690
- this.crdt,
691
- {
692
- result: result.map(({ key: [k, id], value }) => ({ key: k, id, value })),
693
- ...all
694
- },
695
- opts
696
- );
697
- }
698
- _resetIndex() {
699
- this.byId = new IndexTree();
700
- this.byKey = new IndexTree();
701
- this.indexHead = void 0;
702
- }
703
- async _hydrateIndex() {
704
- if (this.byId.root && this.byKey.root)
705
- return;
706
- if (!this.byId.cid || !this.byKey.cid)
707
- return;
708
- this.byId.root = await loadIndex(this.blockstore, this.byId.cid, byIdOpts);
709
- this.byKey.root = await loadIndex(this.blockstore, this.byKey.cid, byKeyOpts);
710
- }
711
- async _updateIndex() {
712
- await this.ready;
713
- if (this.initError)
714
- throw this.initError;
715
- if (!this.mapFn)
716
- throw new Error("No map function defined");
717
- let result, head;
718
- if (!this.indexHead || this.indexHead.length === 0) {
719
- ;
720
- ({ result, head } = await this.crdt.allDocs());
721
- } else {
722
- ;
723
- ({ result, head } = await this.crdt.changes(this.indexHead));
724
- }
725
- if (result.length === 0) {
726
- this.indexHead = head;
727
- return { byId: this.byId, byKey: this.byKey };
728
- }
729
- let staleKeyIndexEntries = [];
730
- let removeIdIndexEntries = [];
731
- if (this.byId.root) {
732
- const removeIds = result.map(({ key }) => key);
733
- const { result: oldChangeEntries } = await this.byId.root.getMany(removeIds);
734
- staleKeyIndexEntries = oldChangeEntries.map((key) => ({ key, del: true }));
735
- removeIdIndexEntries = oldChangeEntries.map((key) => ({ key: key[1], del: true }));
736
- }
737
- const indexEntries = indexEntriesForChanges(result, this.mapFn);
738
- const byIdIndexEntries = indexEntries.map(({ key }) => ({
739
- key: key[1],
740
- value: key
741
- }));
742
- const indexerMeta = { indexes: /* @__PURE__ */ new Map() };
743
- for (const [name, indexer] of this.crdt.indexers) {
744
- if (indexer.indexHead) {
745
- indexerMeta.indexes.set(name, {
746
- byId: indexer.byId.cid,
747
- byKey: indexer.byKey.cid,
748
- head: indexer.indexHead,
749
- map: indexer.mapFnString,
750
- name: indexer.name
751
- });
752
- }
753
- }
754
- return await this.blockstore.transaction(async (tblocks) => {
755
- this.byId = await bulkIndex(
756
- tblocks,
757
- this.byId,
758
- removeIdIndexEntries.concat(byIdIndexEntries),
759
- byIdOpts
760
- );
761
- this.byKey = await bulkIndex(
762
- tblocks,
763
- this.byKey,
764
- staleKeyIndexEntries.concat(indexEntries),
765
- byKeyOpts
766
- );
767
- this.indexHead = head;
768
- const idxMeta = {
769
- byId: this.byId.cid,
770
- byKey: this.byKey.cid,
771
- head,
772
- map: this.mapFnString,
773
- name: this.name
774
- };
775
- indexerMeta.indexes.set(this.name, idxMeta);
776
- return indexerMeta;
777
- });
778
- }
779
- };
780
-
781
- // src/crdt-clock.ts
782
- var import_clock2 = require("@web3-storage/pail/clock");
783
- var import_crdt2 = require("@web3-storage/pail/crdt");
784
-
785
- // src/apply-head-queue.ts
786
- function applyHeadQueue(worker) {
787
- const queue = [];
788
- let isProcessing = false;
789
- async function* process() {
790
- if (isProcessing || queue.length === 0)
791
- return;
792
- isProcessing = true;
793
- const allUpdates = [];
794
- try {
795
- while (queue.length > 0) {
796
- queue.sort((a, b) => b.updates ? 1 : -1);
797
- const task = queue.shift();
798
- if (!task)
799
- continue;
800
- await worker(task.newHead, task.prevHead, task.updates !== null).catch((e) => {
801
- console.error("int_applyHead worker error", e);
802
- throw e;
803
- });
804
- if (task.updates) {
805
- allUpdates.push(...task.updates);
806
- }
807
- if (!queue.some((t) => t.updates) || task.updates) {
808
- const allTasksHaveUpdates = queue.every((task2) => task2.updates !== null);
809
- yield { updates: allUpdates, all: allTasksHaveUpdates };
810
- allUpdates.length = 0;
811
- }
812
- }
813
- } finally {
814
- isProcessing = false;
815
- const generator = process();
816
- let result = await generator.next();
817
- while (!result.done) {
818
- result = await generator.next();
819
- }
820
- }
821
- }
822
- return {
823
- push(task) {
824
- queue.push(task);
825
- return process();
826
- },
827
- size() {
828
- return queue.length;
829
- }
830
- };
831
- }
832
-
833
- // src/crdt-clock.ts
834
- var CRDTClock = class {
835
- // todo: track local and remote clocks independently, merge on read
836
- // that way we can drop the whole remote if we need to
837
- // should go with making sure the local clock only references locally available blockstore on write
838
- head = [];
839
- zoomers = /* @__PURE__ */ new Set();
840
- watchers = /* @__PURE__ */ new Set();
841
- emptyWatchers = /* @__PURE__ */ new Set();
842
- blockstore = null;
843
- applyHeadQueue;
844
- constructor() {
845
- this.applyHeadQueue = applyHeadQueue(this.int_applyHead.bind(this));
846
- }
847
- setHead(head) {
848
- this.head = head;
849
- }
850
- async applyHead(newHead, prevHead, updates = null) {
851
- for await (const { updates: updatesAcc, all } of this.applyHeadQueue.push({
852
- newHead,
853
- prevHead,
854
- updates
855
- })) {
856
- this.processUpdates(updatesAcc, all, prevHead);
857
- }
858
- }
859
- async processUpdates(updatesAcc, all, prevHead) {
860
- let internalUpdates = updatesAcc;
861
- if (this.watchers.size && !all) {
862
- const changes = await clockChangesSince(this.blockstore, this.head, prevHead, {});
863
- internalUpdates = changes.result;
864
- }
865
- this.zoomers.forEach((fn) => fn());
866
- this.notifyWatchers(internalUpdates || []);
867
- }
868
- notifyWatchers(updates) {
869
- this.emptyWatchers.forEach((fn) => fn());
870
- this.watchers.forEach((fn) => fn(updates || []));
871
- }
872
- onTick(fn) {
873
- this.watchers.add(fn);
874
- }
875
- onTock(fn) {
876
- this.emptyWatchers.add(fn);
877
- }
878
- onZoom(fn) {
879
- this.zoomers.add(fn);
880
- }
881
- async int_applyHead(newHead, prevHead, localUpdates) {
882
- const ogHead = sortClockHead(this.head);
883
- newHead = sortClockHead(newHead);
884
- if (compareClockHeads(ogHead, newHead)) {
885
- return;
886
- }
887
- const ogPrev = sortClockHead(prevHead);
888
- if (compareClockHeads(ogHead, ogPrev)) {
889
- this.setHead(newHead);
890
- return;
891
- }
892
- let head = this.head;
893
- const noLoader = !localUpdates;
894
- if (!this.blockstore)
895
- throw new Error("missing blockstore");
896
- await validateBlocks(newHead, this.blockstore);
897
- await this.blockstore.transaction(
898
- async (tblocks) => {
899
- head = await advanceBlocks(newHead, tblocks, head);
900
- const result = await (0, import_crdt2.root)(tblocks, head);
901
- for (const { cid, bytes } of [...result.additions, ...result.removals]) {
902
- tblocks.putSync(cid, bytes);
903
- }
904
- return { head };
905
- },
906
- { noLoader }
907
- );
908
- this.setHead(head);
909
- }
910
- };
911
- function sortClockHead(clockHead) {
912
- return clockHead.sort((a, b) => a.toString().localeCompare(b.toString()));
913
- }
914
- async function validateBlocks(newHead, blockstore) {
915
- newHead.map(async (cid) => {
916
- const got = await blockstore.get(cid);
917
- if (!got) {
918
- throw new Error("int_applyHead missing block: " + cid.toString());
919
- }
920
- });
921
- }
922
- function compareClockHeads(head1, head2) {
923
- return head1.toString() === head2.toString();
924
- }
925
- async function advanceBlocks(newHead, tblocks, head) {
926
- for (const cid of newHead) {
927
- try {
928
- head = await (0, import_clock2.advance)(tblocks, head, cid);
929
- } catch (e) {
930
- continue;
931
- }
932
- }
933
- return head;
934
- }
935
-
936
- // src/crdt.ts
937
- var CRDT = class {
938
- name;
939
- opts = {};
940
- ready;
941
- blockstore;
942
- indexBlockstore;
943
- indexers = /* @__PURE__ */ new Map();
944
- clock = new CRDTClock();
945
- constructor(name, opts) {
946
- this.name = name || null;
947
- this.opts = opts || this.opts;
948
- this.blockstore = new import_encrypted_blockstore2.EncryptedBlockstore({
949
- name,
950
- applyMeta: async (meta) => {
951
- const crdtMeta = meta;
952
- await this.clock.applyHead(crdtMeta.head, []);
953
- },
954
- compact: async (blocks) => {
955
- await doCompact(blocks, this.clock.head);
956
- return { head: this.clock.head };
957
- },
958
- autoCompact: this.opts.autoCompact || 100,
959
- crypto: this.opts.crypto || crypto,
960
- store: this.opts.store || store,
961
- public: this.opts.public,
962
- meta: this.opts.meta
963
- });
964
- this.clock.blockstore = this.blockstore;
965
- this.indexBlockstore = new import_encrypted_blockstore2.EncryptedBlockstore({
966
- name: this.opts.persistIndexes && this.name ? this.name + ".idx" : void 0,
967
- applyMeta: async (meta) => {
968
- const idxCarMeta = meta;
969
- for (const [name2, idx] of Object.entries(idxCarMeta.indexes)) {
970
- index({ _crdt: this }, name2, void 0, idx);
971
- }
972
- },
973
- crypto,
974
- public: this.opts.public,
975
- store
976
- });
977
- this.ready = Promise.all([this.blockstore.ready, this.indexBlockstore.ready]).then(() => {
978
- });
979
- this.clock.onZoom(() => {
980
- for (const idx of this.indexers.values()) {
981
- idx._resetIndex();
982
- }
983
- });
984
- }
985
- async bulk(updates) {
986
- await this.ready;
987
- const prevHead = [...this.clock.head];
988
- const meta = await this.blockstore.transaction(
989
- async (blocks) => {
990
- const { head } = await applyBulkUpdateToCrdt(blocks, this.clock.head, updates);
991
- updates = updates.map(({ key, value, del, clock }) => {
992
- readFiles(this.blockstore, { doc: value });
993
- return { key, value, del, clock };
994
- });
995
- return { head };
996
- }
997
- );
998
- await this.clock.applyHead(meta.head, prevHead, updates);
999
- return meta;
1000
- }
1001
- // if (snap) await this.clock.applyHead(crdtMeta.head, this.clock.head)
1002
- async allDocs() {
1003
- await this.ready;
1004
- const result = [];
1005
- for await (const entry of getAllEntries(this.blockstore, this.clock.head)) {
1006
- result.push(entry);
1007
- }
1008
- return { result, head: this.clock.head };
1009
- }
1010
- async vis() {
1011
- await this.ready;
1012
- const txt = [];
1013
- for await (const line of clockVis(this.blockstore, this.clock.head)) {
1014
- txt.push(line);
1015
- }
1016
- return txt.join("\n");
1017
- }
1018
- async getBlock(cidString) {
1019
- await this.ready;
1020
- return await getBlock(this.blockstore, cidString);
1021
- }
1022
- async get(key) {
1023
- await this.ready;
1024
- const result = await getValueFromCrdt(this.blockstore, this.clock.head, key);
1025
- if (result.del)
1026
- return null;
1027
- return result;
1028
- }
1029
- async changes(since = [], opts = {}) {
1030
- await this.ready;
1031
- return await clockChangesSince(this.blockstore, this.clock.head, since, opts);
1032
- }
1033
- async compact() {
1034
- return await this.blockstore.compact();
1035
- }
1036
- };
1037
-
1038
- // src/database.ts
1039
- var Database = class {
1040
- static databases = /* @__PURE__ */ new Map();
1041
- name;
1042
- opts = {};
1043
- _listening = false;
1044
- _listeners = /* @__PURE__ */ new Set();
1045
- _noupdate_listeners = /* @__PURE__ */ new Set();
1046
- _crdt;
1047
- _writeQueue;
1048
- blockstore;
1049
- constructor(name, opts) {
1050
- this.name = name || null;
1051
- this.opts = opts || this.opts;
1052
- this._crdt = new CRDT(name, this.opts);
1053
- this.blockstore = this._crdt.blockstore;
1054
- this._writeQueue = writeQueue(async (updates) => {
1055
- return await this._crdt.bulk(updates);
1056
- });
1057
- this._crdt.clock.onTock(() => {
1058
- this._no_update_notify();
1059
- });
1060
- }
1061
- async get(id) {
1062
- const got = await this._crdt.get(id).catch((e) => {
1063
- e.message = `Not found: ${id} - ` + e.message;
1064
- throw e;
1065
- });
1066
- if (!got)
1067
- throw new Error(`Not found: ${id}`);
1068
- const { doc } = got;
1069
- return { _id: id, ...doc };
1070
- }
1071
- async put(doc) {
1072
- const { _id, ...value } = doc;
1073
- const docId = _id || (0, import_uuidv7.uuidv7)();
1074
- const result = await this._writeQueue.push({ key: docId, value });
1075
- return { id: docId, clock: result?.head };
1076
- }
1077
- async del(id) {
1078
- const result = await this._writeQueue.push({ key: id, del: true });
1079
- return { id, clock: result?.head };
1080
- }
1081
- async changes(since = [], opts = {}) {
1082
- const { result, head } = await this._crdt.changes(since, opts);
1083
- const rows = result.map(({ key, value, del, clock }) => ({
1084
- key,
1085
- value: del ? { _id: key, _deleted: true } : { _id: key, ...value },
1086
- clock
1087
- }));
1088
- return { rows, clock: head };
1089
- }
1090
- async allDocs() {
1091
- const { result, head } = await this._crdt.allDocs();
1092
- const rows = result.map(({ key, value, del }) => ({
1093
- key,
1094
- value: del ? { _id: key, _deleted: true } : { _id: key, ...value }
1095
- }));
1096
- return { rows, clock: head };
1097
- }
1098
- async allDocuments() {
1099
- return this.allDocs();
1100
- }
1101
- subscribe(listener, updates) {
1102
- if (updates) {
1103
- if (!this._listening) {
1104
- this._listening = true;
1105
- this._crdt.clock.onTick((updates2) => {
1106
- void this._notify(updates2);
1107
- });
1108
- }
1109
- this._listeners.add(listener);
1110
- return () => {
1111
- this._listeners.delete(listener);
1112
- };
1113
- } else {
1114
- this._noupdate_listeners.add(listener);
1115
- return () => {
1116
- this._noupdate_listeners.delete(listener);
1117
- };
1118
- }
1119
- }
1120
- // todo if we add this onto dbs in fireproof.ts then we can make index.ts a separate package
1121
- async query(field, opts = {}) {
1122
- const idx = typeof field === "string" ? index({ _crdt: this._crdt }, field) : index({ _crdt: this._crdt }, makeName(field.toString()), field);
1123
- return await idx.query(opts);
1124
- }
1125
- async compact() {
1126
- await this._crdt.compact();
1127
- }
1128
- async _notify(updates) {
1129
- if (this._listeners.size) {
1130
- const docs = updates.map(({ key, value }) => ({ _id: key, ...value }));
1131
- for (const listener of this._listeners) {
1132
- await (async () => await listener(docs))().catch((e) => {
1133
- console.error("subscriber error", e);
1134
- });
1135
- }
1136
- }
1137
- }
1138
- async _no_update_notify() {
1139
- if (this._noupdate_listeners.size) {
1140
- for (const listener of this._noupdate_listeners) {
1141
- await (async () => await listener([]))().catch((e) => {
1142
- console.error("subscriber error", e);
1143
- });
1144
- }
1145
- }
1146
- }
1147
- };
1148
- function fireproof(name, opts) {
1149
- if (!Database.databases.has(name)) {
1150
- Database.databases.set(name, new Database(name, opts));
1151
- }
1152
- return Database.databases.get(name);
1153
- }
1154
- function makeName(fnString) {
1155
- const regex = /\(([^,()]+,\s*[^,()]+|\[[^\]]+\],\s*[^,()]+)\)/g;
1156
- let found = null;
1157
- const matches = Array.from(fnString.matchAll(regex), (match) => match[1].trim());
1158
- if (matches.length === 0) {
1159
- found = /=>\s*(.*)/.exec(fnString);
1160
- }
1161
- if (!found) {
1162
- return fnString;
1163
- } else {
1164
- return found[1];
1165
- }
1166
- }
1167
- // Annotate the CommonJS export names for ESM import in node:
1168
- 0 && (module.exports = {
1169
- Database,
1170
- fireproof
1171
- });
1172
- //# sourceMappingURL=fireproof.cjs.map