@fireproof/core 0.18.0 → 0.19.4-dev

Sign up to get free protection for your applications and to get access to all the features.
Files changed (67) hide show
  1. package/README.md +29 -15
  2. package/chunk-7OGPZSGT.js +39 -0
  3. package/chunk-7OGPZSGT.js.map +1 -0
  4. package/chunk-H3A2HMMM.js +164 -0
  5. package/chunk-H3A2HMMM.js.map +1 -0
  6. package/chunk-HCXR2M5B.js +202 -0
  7. package/chunk-HCXR2M5B.js.map +1 -0
  8. package/chunk-QHSXUST7.js +208 -0
  9. package/chunk-QHSXUST7.js.map +1 -0
  10. package/chunk-VZGT7ZYP.js +22 -0
  11. package/chunk-VZGT7ZYP.js.map +1 -0
  12. package/index.cjs +4649 -0
  13. package/index.cjs.map +1 -0
  14. package/index.d.cts +911 -0
  15. package/index.d.ts +911 -0
  16. package/index.js +2923 -0
  17. package/index.js.map +1 -0
  18. package/metafile-cjs.json +1 -0
  19. package/metafile-esm.json +1 -0
  20. package/node-sys-container-E7LADX2Z.js +29 -0
  21. package/node-sys-container-E7LADX2Z.js.map +1 -0
  22. package/package.json +19 -109
  23. package/sqlite-data-store-YS4U7AQ4.js +120 -0
  24. package/sqlite-data-store-YS4U7AQ4.js.map +1 -0
  25. package/sqlite-meta-store-FJZSZG4R.js +137 -0
  26. package/sqlite-meta-store-FJZSZG4R.js.map +1 -0
  27. package/sqlite-wal-store-6JZ4URNS.js +123 -0
  28. package/sqlite-wal-store-6JZ4URNS.js.map +1 -0
  29. package/store-file-HMHPQTUV.js +193 -0
  30. package/store-file-HMHPQTUV.js.map +1 -0
  31. package/store-indexdb-MRVZG4OG.js +20 -0
  32. package/store-indexdb-MRVZG4OG.js.map +1 -0
  33. package/store-sql-5XMJ5OWJ.js +406 -0
  34. package/store-sql-5XMJ5OWJ.js.map +1 -0
  35. package/dist/browser/fireproof.cjs +0 -1172
  36. package/dist/browser/fireproof.cjs.map +0 -1
  37. package/dist/browser/fireproof.d.cts +0 -268
  38. package/dist/browser/fireproof.d.ts +0 -268
  39. package/dist/browser/fireproof.global.js +0 -24178
  40. package/dist/browser/fireproof.global.js.map +0 -1
  41. package/dist/browser/fireproof.js +0 -1147
  42. package/dist/browser/fireproof.js.map +0 -1
  43. package/dist/browser/metafile-cjs.json +0 -1
  44. package/dist/browser/metafile-esm.json +0 -1
  45. package/dist/browser/metafile-iife.json +0 -1
  46. package/dist/memory/fireproof.cjs +0 -1172
  47. package/dist/memory/fireproof.cjs.map +0 -1
  48. package/dist/memory/fireproof.d.cts +0 -268
  49. package/dist/memory/fireproof.d.ts +0 -268
  50. package/dist/memory/fireproof.global.js +0 -24178
  51. package/dist/memory/fireproof.global.js.map +0 -1
  52. package/dist/memory/fireproof.js +0 -1147
  53. package/dist/memory/fireproof.js.map +0 -1
  54. package/dist/memory/metafile-cjs.json +0 -1
  55. package/dist/memory/metafile-esm.json +0 -1
  56. package/dist/memory/metafile-iife.json +0 -1
  57. package/dist/node/fireproof.cjs +0 -1172
  58. package/dist/node/fireproof.cjs.map +0 -1
  59. package/dist/node/fireproof.d.cts +0 -268
  60. package/dist/node/fireproof.d.ts +0 -268
  61. package/dist/node/fireproof.global.js +0 -38540
  62. package/dist/node/fireproof.global.js.map +0 -1
  63. package/dist/node/fireproof.js +0 -1138
  64. package/dist/node/fireproof.js.map +0 -1
  65. package/dist/node/metafile-cjs.json +0 -1
  66. package/dist/node/metafile-esm.json +0 -1
  67. package/dist/node/metafile-iife.json +0 -1
@@ -1,1172 +0,0 @@
1
- "use strict";
2
- var __create = Object.create;
3
- var __defProp = Object.defineProperty;
4
- var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
- var __getOwnPropNames = Object.getOwnPropertyNames;
6
- var __getProtoOf = Object.getPrototypeOf;
7
- var __hasOwnProp = Object.prototype.hasOwnProperty;
8
- var __export = (target, all) => {
9
- for (var name in all)
10
- __defProp(target, name, { get: all[name], enumerable: true });
11
- };
12
- var __copyProps = (to, from, except, desc) => {
13
- if (from && typeof from === "object" || typeof from === "function") {
14
- for (let key of __getOwnPropNames(from))
15
- if (!__hasOwnProp.call(to, key) && key !== except)
16
- __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
17
- }
18
- return to;
19
- };
20
- var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
21
- // If the importer is in node compatibility mode or this is not an ESM
22
- // file that has been converted to a CommonJS file using a Babel-
23
- // compatible transform (i.e. "__esModule" has not been set), then set
24
- // "default" to the CommonJS "module.exports" for node compatibility.
25
- isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
26
- mod
27
- ));
28
- var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
29
-
30
- // src/fireproof.ts
31
- var fireproof_exports = {};
32
- __export(fireproof_exports, {
33
- Database: () => Database,
34
- fireproof: () => fireproof
35
- });
36
- module.exports = __toCommonJS(fireproof_exports);
37
-
38
- // src/database.ts
39
- var import_uuidv7 = require("uuidv7");
40
-
41
- // src/write-queue.ts
42
- function writeQueue(worker, payload = Infinity, unbounded = false) {
43
- const queue = [];
44
- let isProcessing = false;
45
- async function process() {
46
- if (isProcessing || queue.length === 0)
47
- return;
48
- isProcessing = true;
49
- const tasksToProcess = queue.splice(0, payload);
50
- const updates = tasksToProcess.map((item) => item.task);
51
- if (unbounded) {
52
- const promises = updates.map(async (update, index2) => {
53
- try {
54
- const result = await worker([update]);
55
- tasksToProcess[index2].resolve(result);
56
- } catch (error) {
57
- tasksToProcess[index2].reject(error);
58
- }
59
- });
60
- await Promise.all(promises);
61
- } else {
62
- try {
63
- const result = await worker(updates);
64
- tasksToProcess.forEach((task) => task.resolve(result));
65
- } catch (error) {
66
- tasksToProcess.forEach((task) => task.reject(error));
67
- }
68
- }
69
- isProcessing = false;
70
- void process();
71
- }
72
- return {
73
- push(task) {
74
- return new Promise((resolve, reject) => {
75
- queue.push({ task, resolve, reject });
76
- void process();
77
- });
78
- }
79
- };
80
- }
81
-
82
- // src/crdt.ts
83
- var import_encrypted_blockstore2 = require("@fireproof/encrypted-blockstore");
84
-
85
- // src/eb-web.ts
86
- var crypto = __toESM(require("@fireproof/encrypted-blockstore/crypto-web"), 1);
87
- var import_store_web = require("@fireproof/encrypted-blockstore/store-web");
88
- var store = {
89
- makeDataStore: import_store_web.makeDataStore,
90
- makeMetaStore: import_store_web.makeMetaStore,
91
- makeRemoteWAL: import_store_web.makeRemoteWAL
92
- };
93
-
94
- // src/crdt-helpers.ts
95
- var import_block = require("multiformats/block");
96
- var import_link = require("multiformats/link");
97
- var import_sha2 = require("multiformats/hashes/sha2");
98
- var codec = __toESM(require("@ipld/dag-cbor"), 1);
99
- var import_crdt = require("@web3-storage/pail/crdt");
100
- var import_clock = require("@web3-storage/pail/clock");
101
- var Batch = __toESM(require("@web3-storage/pail/crdt/batch"), 1);
102
- var import_encrypted_blockstore = require("@fireproof/encrypted-blockstore");
103
-
104
- // src/files.ts
105
- var UnixFS = __toESM(require("@ipld/unixfs"), 1);
106
- var raw = __toESM(require("multiformats/codecs/raw"), 1);
107
- var import_fixed = require("@ipld/unixfs/file/chunker/fixed");
108
- var import_balanced = require("@ipld/unixfs/file/layout/balanced");
109
- var import_ipfs_unixfs_exporter = require("ipfs-unixfs-exporter");
110
- var queuingStrategy = UnixFS.withCapacity();
111
- var settings = UnixFS.configure({
112
- fileChunkEncoder: raw,
113
- smallFileEncoder: raw,
114
- // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-call
115
- chunker: (0, import_fixed.withMaxChunkSize)(1024 * 1024),
116
- // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-call
117
- fileLayout: (0, import_balanced.withWidth)(1024)
118
- });
119
- async function encodeFile(blob) {
120
- const readable = createFileEncoderStream(blob);
121
- const blocks = await collect(readable);
122
- return { cid: blocks.at(-1).cid, blocks };
123
- }
124
- async function decodeFile(blocks, cid, meta) {
125
- const entry = await (0, import_ipfs_unixfs_exporter.exporter)(cid.toString(), blocks, { length: meta.size });
126
- const chunks = [];
127
- for await (const chunk of entry.content())
128
- chunks.push(chunk);
129
- return new File(chunks, entry.name, { type: meta.type, lastModified: 0 });
130
- }
131
- function createFileEncoderStream(blob) {
132
- const { readable, writable } = new TransformStream({}, queuingStrategy);
133
- const unixfsWriter = UnixFS.createWriter({ writable, settings });
134
- const fileBuilder = new UnixFSFileBuilder("", blob);
135
- void (async () => {
136
- await fileBuilder.finalize(unixfsWriter);
137
- await unixfsWriter.close();
138
- })();
139
- return readable;
140
- }
141
- async function collect(collectable) {
142
- const chunks = [];
143
- await collectable.pipeTo(
144
- new WritableStream({
145
- write(chunk) {
146
- chunks.push(chunk);
147
- }
148
- })
149
- );
150
- return chunks;
151
- }
152
- var UnixFSFileBuilder = class {
153
- #file;
154
- name;
155
- constructor(name, file) {
156
- this.name = name;
157
- this.#file = file;
158
- }
159
- async finalize(writer) {
160
- const unixfsFileWriter = UnixFS.createFileWriter(writer);
161
- await this.#file.stream().pipeTo(
162
- new WritableStream({
163
- async write(chunk) {
164
- await unixfsFileWriter.write(chunk);
165
- }
166
- })
167
- );
168
- return await unixfsFileWriter.close();
169
- }
170
- };
171
-
172
- // src/crdt-helpers.ts
173
- function time(tag) {
174
- }
175
- function timeEnd(tag) {
176
- }
177
- async function applyBulkUpdateToCrdt(tblocks, head, updates) {
178
- let result = null;
179
- if (updates.length > 1) {
180
- const batch = await Batch.create(tblocks, head);
181
- for (const update of updates) {
182
- const link = await writeDocContent(tblocks, update);
183
- await batch.put(update.key, link);
184
- }
185
- result = await batch.commit();
186
- } else {
187
- for (const update of updates) {
188
- const link = await writeDocContent(tblocks, update);
189
- result = await (0, import_crdt.put)(tblocks, head, update.key, link);
190
- const resRoot = result.root.toString();
191
- const isReturned = result.additions.some((a) => a.cid.toString() === resRoot);
192
- if (!isReturned) {
193
- const hasRoot = await tblocks.get(result.root);
194
- if (!hasRoot) {
195
- throw new Error(
196
- `missing root in additions: ${result.additions.length} ${resRoot} keys: ${updates.map((u) => u.key).toString()}`
197
- );
198
- }
199
- }
200
- }
201
- }
202
- if (!result)
203
- throw new Error("Missing result");
204
- if (result.event) {
205
- for (const { cid, bytes } of [...result.additions, ...result.removals, result.event]) {
206
- tblocks.putSync(cid, bytes);
207
- }
208
- }
209
- return { head: result.head };
210
- }
211
- async function writeDocContent(blocks, update) {
212
- let value;
213
- if (update.del) {
214
- value = { del: true };
215
- } else {
216
- await processFiles(blocks, update.value);
217
- value = { doc: update.value };
218
- }
219
- const block = await (0, import_block.encode)({ value, hasher: import_sha2.sha256, codec });
220
- blocks.putSync(block.cid, block.bytes);
221
- return block.cid;
222
- }
223
- async function processFiles(blocks, doc) {
224
- if (doc._files) {
225
- await processFileset(blocks, doc._files);
226
- }
227
- if (doc._publicFiles) {
228
- await processFileset(blocks, doc._publicFiles, true);
229
- }
230
- }
231
- async function processFileset(blocks, files, publicFiles = false) {
232
- const dbBlockstore = blocks.parent;
233
- const t = new import_encrypted_blockstore.CarTransaction(dbBlockstore);
234
- const didPut = [];
235
- for (const filename in files) {
236
- if (File === files[filename].constructor) {
237
- const file = files[filename];
238
- const { cid, blocks: fileBlocks } = await encodeFile(file);
239
- didPut.push(filename);
240
- for (const block of fileBlocks) {
241
- t.putSync(block.cid, block.bytes);
242
- }
243
- files[filename] = { cid, type: file.type, size: file.size };
244
- } else {
245
- const { cid, type, size, car } = files[filename];
246
- if (cid && type && size && car) {
247
- files[filename] = { cid, type, size, car };
248
- }
249
- }
250
- }
251
- if (didPut.length) {
252
- const car = await dbBlockstore.loader?.commitFiles(t, { files }, {
253
- public: publicFiles
254
- });
255
- if (car) {
256
- for (const name of didPut) {
257
- files[name] = { car, ...files[name] };
258
- }
259
- }
260
- }
261
- }
262
- async function getValueFromCrdt(blocks, head, key) {
263
- if (!head.length)
264
- throw new Error("Getting from an empty database");
265
- const link = await (0, import_crdt.get)(blocks, head, key);
266
- if (!link)
267
- throw new Error(`Missing key ${key}`);
268
- return await getValueFromLink(blocks, link);
269
- }
270
- function readFiles(blocks, { doc }) {
271
- if (!doc)
272
- return;
273
- if (doc._files) {
274
- readFileset(blocks, doc._files);
275
- }
276
- if (doc._publicFiles) {
277
- readFileset(blocks, doc._publicFiles, true);
278
- }
279
- }
280
- function readFileset(blocks, files, isPublic = false) {
281
- for (const filename in files) {
282
- const fileMeta = files[filename];
283
- if (fileMeta.cid) {
284
- if (isPublic) {
285
- fileMeta.url = `https://${fileMeta.cid.toString()}.ipfs.w3s.link/`;
286
- }
287
- if (fileMeta.car) {
288
- fileMeta.file = async () => await decodeFile(
289
- {
290
- get: async (cid) => {
291
- return await blocks.getFile(fileMeta.car, cid, isPublic);
292
- }
293
- },
294
- fileMeta.cid,
295
- fileMeta
296
- );
297
- }
298
- }
299
- files[filename] = fileMeta;
300
- }
301
- }
302
- async function getValueFromLink(blocks, link) {
303
- const block = await blocks.get(link);
304
- if (!block)
305
- throw new Error(`Missing linked block ${link.toString()}`);
306
- const { value } = await (0, import_block.decode)({ bytes: block.bytes, hasher: import_sha2.sha256, codec });
307
- value.cid = link;
308
- readFiles(blocks, value);
309
- return value;
310
- }
311
- var DirtyEventFetcher = class extends import_clock.EventFetcher {
312
- // @ts-ignore
313
- async get(link) {
314
- try {
315
- return await super.get(link);
316
- } catch (e) {
317
- console.error("missing event", link.toString(), e);
318
- return { value: null };
319
- }
320
- }
321
- };
322
- async function clockChangesSince(blocks, head, since, opts) {
323
- const eventsFetcher = opts.dirty ? new DirtyEventFetcher(blocks) : new import_clock.EventFetcher(blocks);
324
- const keys = /* @__PURE__ */ new Set();
325
- const updates = await gatherUpdates(
326
- blocks,
327
- eventsFetcher,
328
- head,
329
- since,
330
- [],
331
- keys,
332
- /* @__PURE__ */ new Set(),
333
- opts.limit || Infinity
334
- );
335
- return { result: updates.reverse(), head };
336
- }
337
- async function gatherUpdates(blocks, eventsFetcher, head, since, updates = [], keys, didLinks, limit) {
338
- if (limit <= 0)
339
- return updates;
340
- const sHead = head.map((l) => l.toString());
341
- for (const link of since) {
342
- if (sHead.includes(link.toString())) {
343
- return updates;
344
- }
345
- }
346
- for (const link of head) {
347
- if (didLinks.has(link.toString()))
348
- continue;
349
- didLinks.add(link.toString());
350
- const { value: event } = await eventsFetcher.get(link);
351
- if (!event)
352
- continue;
353
- const { type } = event.data;
354
- let ops = [];
355
- if (type === "batch") {
356
- ops = event.data.ops;
357
- } else if (type === "put") {
358
- ops = [event.data];
359
- }
360
- for (let i = ops.length - 1; i >= 0; i--) {
361
- const { key, value } = ops[i];
362
- if (!keys.has(key)) {
363
- const docValue = await getValueFromLink(blocks, value);
364
- updates.push({ key, value: docValue.doc, del: docValue.del, clock: link });
365
- limit--;
366
- keys.add(key);
367
- }
368
- }
369
- if (event.parents) {
370
- updates = await gatherUpdates(
371
- blocks,
372
- eventsFetcher,
373
- event.parents,
374
- since,
375
- updates,
376
- keys,
377
- didLinks,
378
- limit
379
- );
380
- }
381
- }
382
- return updates;
383
- }
384
- async function* getAllEntries(blocks, head) {
385
- for await (const [key, link] of (0, import_crdt.entries)(blocks, head)) {
386
- const docValue = await getValueFromLink(blocks, link);
387
- yield { key, value: docValue.doc, del: docValue.del };
388
- }
389
- }
390
- async function* clockVis(blocks, head) {
391
- for await (const line of (0, import_clock.vis)(blocks, head)) {
392
- yield line;
393
- }
394
- }
395
- var isCompacting = false;
396
- async function doCompact(blockLog, head) {
397
- if (isCompacting) {
398
- return;
399
- }
400
- isCompacting = true;
401
- time("compact head");
402
- for (const cid of head) {
403
- const bl = await blockLog.get(cid);
404
- if (!bl)
405
- throw new Error("Missing head block: " + cid.toString());
406
- }
407
- timeEnd("compact head");
408
- time("compact all entries");
409
- for await (const _entry of getAllEntries(blockLog, head)) {
410
- }
411
- timeEnd("compact all entries");
412
- time("compact clock vis");
413
- for await (const _line of (0, import_clock.vis)(blockLog, head)) {
414
- }
415
- timeEnd("compact clock vis");
416
- time("compact root");
417
- const result = await (0, import_crdt.root)(blockLog, head);
418
- timeEnd("compact root");
419
- time("compact root blocks");
420
- for (const { cid, bytes } of [...result.additions, ...result.removals]) {
421
- blockLog.loggedBlocks.putSync(cid, bytes);
422
- }
423
- timeEnd("compact root blocks");
424
- time("compact changes");
425
- await clockChangesSince(blockLog, head, [], {});
426
- timeEnd("compact changes");
427
- isCompacting = false;
428
- }
429
- async function getBlock(blocks, cidString) {
430
- const block = await blocks.get((0, import_link.parse)(cidString));
431
- if (!block)
432
- throw new Error(`Missing block ${cidString}`);
433
- const { cid, value } = await (0, import_block.decode)({ bytes: block.bytes, codec, hasher: import_sha2.sha256 });
434
- return new import_block.Block({ cid, value, bytes: block.bytes });
435
- }
436
-
437
- // src/indexer-helpers.ts
438
- var import_block2 = require("multiformats/block");
439
- var import_sha22 = require("multiformats/hashes/sha2");
440
- var codec2 = __toESM(require("@ipld/dag-cbor"), 1);
441
- var import_charwise = __toESM(require("charwise"), 1);
442
- var DbIndex = __toESM(require("prolly-trees/db-index"), 1);
443
- var import_utils = require("prolly-trees/utils");
444
- var import_cache = require("prolly-trees/cache");
445
- var IndexTree = class {
446
- cid = null;
447
- root = null;
448
- };
449
- var refCompare = (aRef, bRef) => {
450
- if (Number.isNaN(aRef))
451
- return -1;
452
- if (Number.isNaN(bRef))
453
- throw new Error("ref may not be Infinity or NaN");
454
- if (aRef === Infinity)
455
- return 1;
456
- return (0, import_utils.simpleCompare)(aRef, bRef);
457
- };
458
- var compare = (a, b) => {
459
- const [aKey, aRef] = a;
460
- const [bKey, bRef] = b;
461
- const comp = (0, import_utils.simpleCompare)(aKey, bKey);
462
- if (comp !== 0)
463
- return comp;
464
- return refCompare(aRef, bRef);
465
- };
466
- var byKeyOpts = { cache: import_cache.nocache, chunker: (0, import_utils.bf)(30), codec: codec2, hasher: import_sha22.sha256, compare };
467
- var byIdOpts = { cache: import_cache.nocache, chunker: (0, import_utils.bf)(30), codec: codec2, hasher: import_sha22.sha256, compare: import_utils.simpleCompare };
468
- function indexEntriesForChanges(changes, mapFn) {
469
- const indexEntries = [];
470
- changes.forEach(({ key: _id, value, del }) => {
471
- if (del || !value)
472
- return;
473
- let mapCalled = false;
474
- const mapReturn = mapFn({ _id, ...value }, (k, v) => {
475
- mapCalled = true;
476
- if (typeof k === "undefined")
477
- return;
478
- indexEntries.push({
479
- // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-unsafe-call
480
- key: [import_charwise.default.encode(k), _id],
481
- value: v || null
482
- });
483
- });
484
- if (!mapCalled && mapReturn) {
485
- indexEntries.push({
486
- // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-unsafe-call
487
- key: [import_charwise.default.encode(mapReturn), _id],
488
- value: null
489
- });
490
- }
491
- });
492
- return indexEntries;
493
- }
494
- function makeProllyGetBlock(blocks) {
495
- return async (address) => {
496
- const block = await blocks.get(address);
497
- if (!block)
498
- throw new Error(`Missing block ${address.toString()}`);
499
- const { cid, bytes } = block;
500
- return (0, import_block2.create)({ cid, bytes, hasher: import_sha22.sha256, codec: codec2 });
501
- };
502
- }
503
- async function bulkIndex(tblocks, inIndex, indexEntries, opts) {
504
- if (!indexEntries.length)
505
- return inIndex;
506
- if (!inIndex.root) {
507
- if (!inIndex.cid) {
508
- let returnRootBlock = null;
509
- let returnNode = null;
510
- for await (const node of await DbIndex.create({ get: makeProllyGetBlock(tblocks), list: indexEntries, ...opts })) {
511
- const block = await node.block;
512
- await tblocks.put(block.cid, block.bytes);
513
- returnRootBlock = block;
514
- returnNode = node;
515
- }
516
- if (!returnNode || !returnRootBlock)
517
- throw new Error("failed to create index");
518
- return { root: returnNode, cid: returnRootBlock.cid };
519
- } else {
520
- inIndex.root = await DbIndex.load({ cid: inIndex.cid, get: makeProllyGetBlock(tblocks), ...opts });
521
- }
522
- }
523
- const { root: root3, blocks: newBlocks } = await inIndex.root.bulk(indexEntries);
524
- if (root3) {
525
- for await (const block of newBlocks) {
526
- await tblocks.put(block.cid, block.bytes);
527
- }
528
- return { root: root3, cid: (await root3.block).cid };
529
- } else {
530
- return { root: null, cid: null };
531
- }
532
- }
533
- async function loadIndex(tblocks, cid, opts) {
534
- return await DbIndex.load({ cid, get: makeProllyGetBlock(tblocks), ...opts });
535
- }
536
- async function applyQuery(crdt, resp, query) {
537
- if (query.descending) {
538
- resp.result = resp.result.reverse();
539
- }
540
- if (query.limit) {
541
- resp.result = resp.result.slice(0, query.limit);
542
- }
543
- if (query.includeDocs) {
544
- resp.result = await Promise.all(
545
- resp.result.map(async (row) => {
546
- const val = await crdt.get(row.id);
547
- const doc = val ? { _id: row.id, ...val.doc } : null;
548
- return { ...row, doc };
549
- })
550
- );
551
- }
552
- return {
553
- rows: resp.result.map((row) => {
554
- row.key = import_charwise.default.decode(row.key);
555
- if (row.row && !row.value) {
556
- row.value = row.row;
557
- delete row.row;
558
- }
559
- return row;
560
- })
561
- };
562
- }
563
- function encodeRange(range) {
564
- return range.map((key) => import_charwise.default.encode(key));
565
- }
566
- function encodeKey(key) {
567
- return import_charwise.default.encode(key);
568
- }
569
-
570
- // src/index.ts
571
- function index({ _crdt }, name, mapFn, meta) {
572
- if (mapFn && meta)
573
- throw new Error("cannot provide both mapFn and meta");
574
- if (mapFn && mapFn.constructor.name !== "Function")
575
- throw new Error("mapFn must be a function");
576
- if (_crdt.indexers.has(name)) {
577
- const idx = _crdt.indexers.get(name);
578
- idx.applyMapFn(name, mapFn, meta);
579
- } else {
580
- const idx = new Index(_crdt, name, mapFn, meta);
581
- _crdt.indexers.set(name, idx);
582
- }
583
- return _crdt.indexers.get(name);
584
- }
585
- var Index = class {
586
- blockstore;
587
- crdt;
588
- name = null;
589
- mapFn = null;
590
- mapFnString = "";
591
- byKey = new IndexTree();
592
- byId = new IndexTree();
593
- indexHead = void 0;
594
- includeDocsDefault = false;
595
- initError = null;
596
- ready;
597
- constructor(crdt, name, mapFn, meta) {
598
- this.blockstore = crdt.indexBlockstore;
599
- this.crdt = crdt;
600
- this.applyMapFn(name, mapFn, meta);
601
- if (!(this.mapFnString || this.initError))
602
- throw new Error("missing mapFnString");
603
- this.ready = this.blockstore.ready.then(() => {
604
- });
605
- }
606
- applyMapFn(name, mapFn, meta) {
607
- if (mapFn && meta)
608
- throw new Error("cannot provide both mapFn and meta");
609
- if (this.name && this.name !== name)
610
- throw new Error("cannot change name");
611
- this.name = name;
612
- try {
613
- if (meta) {
614
- if (this.indexHead && this.indexHead.map((c) => c.toString()).join() !== meta.head.map((c) => c.toString()).join()) {
615
- throw new Error("cannot apply meta to existing index");
616
- }
617
- if (this.mapFnString) {
618
- if (this.mapFnString !== meta.map) {
619
- console.log(
620
- "cannot apply different mapFn meta: old mapFnString",
621
- this.mapFnString,
622
- "new mapFnString",
623
- meta.map
624
- );
625
- } else {
626
- this.byId.cid = meta.byId;
627
- this.byKey.cid = meta.byKey;
628
- this.indexHead = meta.head;
629
- }
630
- } else {
631
- this.mapFnString = meta.map;
632
- this.byId.cid = meta.byId;
633
- this.byKey.cid = meta.byKey;
634
- this.indexHead = meta.head;
635
- }
636
- } else {
637
- if (this.mapFn) {
638
- if (mapFn) {
639
- if (this.mapFn.toString() !== mapFn.toString())
640
- throw new Error("cannot apply different mapFn app2");
641
- }
642
- } else {
643
- if (!mapFn) {
644
- mapFn = (doc) => doc[name] ?? void 0;
645
- }
646
- if (this.mapFnString) {
647
- if (this.mapFnString !== mapFn.toString())
648
- throw new Error("cannot apply different mapFn app");
649
- } else {
650
- this.mapFnString = mapFn.toString();
651
- }
652
- this.mapFn = mapFn;
653
- }
654
- }
655
- const matches = /=>\s*(.*)/.test(this.mapFnString);
656
- this.includeDocsDefault = matches;
657
- } catch (e) {
658
- this.initError = e;
659
- }
660
- }
661
- async query(opts = {}) {
662
- await this._updateIndex();
663
- await this._hydrateIndex();
664
- if (!this.byKey.root)
665
- return await applyQuery(this.crdt, { result: [] }, opts);
666
- if (this.includeDocsDefault && opts.includeDocs === void 0)
667
- opts.includeDocs = true;
668
- if (opts.range) {
669
- const { result: result2, ...all2 } = await this.byKey.root.range(...encodeRange(opts.range));
670
- return await applyQuery(this.crdt, { result: result2, ...all2 }, opts);
671
- }
672
- if (opts.key) {
673
- const encodedKey = encodeKey(opts.key);
674
- return await applyQuery(this.crdt, await this.byKey.root.get(encodedKey), opts);
675
- }
676
- if (Array.isArray(opts.keys)) {
677
- const results = await Promise.all(
678
- opts.keys.map(async (key) => {
679
- const encodedKey = encodeKey(key);
680
- return (await applyQuery(this.crdt, await this.byKey.root.get(encodedKey), opts)).rows;
681
- })
682
- );
683
- return { rows: results.flat() };
684
- }
685
- if (opts.prefix) {
686
- if (!Array.isArray(opts.prefix))
687
- opts.prefix = [opts.prefix];
688
- const start = [...opts.prefix, NaN];
689
- const end = [...opts.prefix, Infinity];
690
- const encodedR = encodeRange([start, end]);
691
- return await applyQuery(this.crdt, await this.byKey.root.range(...encodedR), opts);
692
- }
693
- const { result, ...all } = await this.byKey.root.getAllEntries();
694
- return await applyQuery(
695
- this.crdt,
696
- {
697
- result: result.map(({ key: [k, id], value }) => ({ key: k, id, value })),
698
- ...all
699
- },
700
- opts
701
- );
702
- }
703
- _resetIndex() {
704
- this.byId = new IndexTree();
705
- this.byKey = new IndexTree();
706
- this.indexHead = void 0;
707
- }
708
- async _hydrateIndex() {
709
- if (this.byId.root && this.byKey.root)
710
- return;
711
- if (!this.byId.cid || !this.byKey.cid)
712
- return;
713
- this.byId.root = await loadIndex(this.blockstore, this.byId.cid, byIdOpts);
714
- this.byKey.root = await loadIndex(this.blockstore, this.byKey.cid, byKeyOpts);
715
- }
716
- async _updateIndex() {
717
- await this.ready;
718
- if (this.initError)
719
- throw this.initError;
720
- if (!this.mapFn)
721
- throw new Error("No map function defined");
722
- let result, head;
723
- if (!this.indexHead || this.indexHead.length === 0) {
724
- ;
725
- ({ result, head } = await this.crdt.allDocs());
726
- } else {
727
- ;
728
- ({ result, head } = await this.crdt.changes(this.indexHead));
729
- }
730
- if (result.length === 0) {
731
- this.indexHead = head;
732
- return { byId: this.byId, byKey: this.byKey };
733
- }
734
- let staleKeyIndexEntries = [];
735
- let removeIdIndexEntries = [];
736
- if (this.byId.root) {
737
- const removeIds = result.map(({ key }) => key);
738
- const { result: oldChangeEntries } = await this.byId.root.getMany(removeIds);
739
- staleKeyIndexEntries = oldChangeEntries.map((key) => ({ key, del: true }));
740
- removeIdIndexEntries = oldChangeEntries.map((key) => ({ key: key[1], del: true }));
741
- }
742
- const indexEntries = indexEntriesForChanges(result, this.mapFn);
743
- const byIdIndexEntries = indexEntries.map(({ key }) => ({
744
- key: key[1],
745
- value: key
746
- }));
747
- const indexerMeta = { indexes: /* @__PURE__ */ new Map() };
748
- for (const [name, indexer] of this.crdt.indexers) {
749
- if (indexer.indexHead) {
750
- indexerMeta.indexes.set(name, {
751
- byId: indexer.byId.cid,
752
- byKey: indexer.byKey.cid,
753
- head: indexer.indexHead,
754
- map: indexer.mapFnString,
755
- name: indexer.name
756
- });
757
- }
758
- }
759
- return await this.blockstore.transaction(async (tblocks) => {
760
- this.byId = await bulkIndex(
761
- tblocks,
762
- this.byId,
763
- removeIdIndexEntries.concat(byIdIndexEntries),
764
- byIdOpts
765
- );
766
- this.byKey = await bulkIndex(
767
- tblocks,
768
- this.byKey,
769
- staleKeyIndexEntries.concat(indexEntries),
770
- byKeyOpts
771
- );
772
- this.indexHead = head;
773
- const idxMeta = {
774
- byId: this.byId.cid,
775
- byKey: this.byKey.cid,
776
- head,
777
- map: this.mapFnString,
778
- name: this.name
779
- };
780
- indexerMeta.indexes.set(this.name, idxMeta);
781
- return indexerMeta;
782
- });
783
- }
784
- };
785
-
786
- // src/crdt-clock.ts
787
- var import_clock2 = require("@web3-storage/pail/clock");
788
- var import_crdt2 = require("@web3-storage/pail/crdt");
789
-
790
- // src/apply-head-queue.ts
791
- function applyHeadQueue(worker) {
792
- const queue = [];
793
- let isProcessing = false;
794
- async function* process() {
795
- if (isProcessing || queue.length === 0)
796
- return;
797
- isProcessing = true;
798
- const allUpdates = [];
799
- try {
800
- while (queue.length > 0) {
801
- queue.sort((a, b) => b.updates ? 1 : -1);
802
- const task = queue.shift();
803
- if (!task)
804
- continue;
805
- await worker(task.newHead, task.prevHead, task.updates !== null).catch((e) => {
806
- console.error("int_applyHead worker error", e);
807
- throw e;
808
- });
809
- if (task.updates) {
810
- allUpdates.push(...task.updates);
811
- }
812
- if (!queue.some((t) => t.updates) || task.updates) {
813
- const allTasksHaveUpdates = queue.every((task2) => task2.updates !== null);
814
- yield { updates: allUpdates, all: allTasksHaveUpdates };
815
- allUpdates.length = 0;
816
- }
817
- }
818
- } finally {
819
- isProcessing = false;
820
- const generator = process();
821
- let result = await generator.next();
822
- while (!result.done) {
823
- result = await generator.next();
824
- }
825
- }
826
- }
827
- return {
828
- push(task) {
829
- queue.push(task);
830
- return process();
831
- },
832
- size() {
833
- return queue.length;
834
- }
835
- };
836
- }
837
-
838
- // src/crdt-clock.ts
839
- var CRDTClock = class {
840
- // todo: track local and remote clocks independently, merge on read
841
- // that way we can drop the whole remote if we need to
842
- // should go with making sure the local clock only references locally available blockstore on write
843
- head = [];
844
- zoomers = /* @__PURE__ */ new Set();
845
- watchers = /* @__PURE__ */ new Set();
846
- emptyWatchers = /* @__PURE__ */ new Set();
847
- blockstore = null;
848
- applyHeadQueue;
849
- constructor() {
850
- this.applyHeadQueue = applyHeadQueue(this.int_applyHead.bind(this));
851
- }
852
- setHead(head) {
853
- this.head = head;
854
- }
855
- async applyHead(newHead, prevHead, updates = null) {
856
- for await (const { updates: updatesAcc, all } of this.applyHeadQueue.push({
857
- newHead,
858
- prevHead,
859
- updates
860
- })) {
861
- this.processUpdates(updatesAcc, all, prevHead);
862
- }
863
- }
864
- async processUpdates(updatesAcc, all, prevHead) {
865
- let internalUpdates = updatesAcc;
866
- if (this.watchers.size && !all) {
867
- const changes = await clockChangesSince(this.blockstore, this.head, prevHead, {});
868
- internalUpdates = changes.result;
869
- }
870
- this.zoomers.forEach((fn) => fn());
871
- this.notifyWatchers(internalUpdates || []);
872
- }
873
- notifyWatchers(updates) {
874
- this.emptyWatchers.forEach((fn) => fn());
875
- this.watchers.forEach((fn) => fn(updates || []));
876
- }
877
- onTick(fn) {
878
- this.watchers.add(fn);
879
- }
880
- onTock(fn) {
881
- this.emptyWatchers.add(fn);
882
- }
883
- onZoom(fn) {
884
- this.zoomers.add(fn);
885
- }
886
- async int_applyHead(newHead, prevHead, localUpdates) {
887
- const ogHead = sortClockHead(this.head);
888
- newHead = sortClockHead(newHead);
889
- if (compareClockHeads(ogHead, newHead)) {
890
- return;
891
- }
892
- const ogPrev = sortClockHead(prevHead);
893
- if (compareClockHeads(ogHead, ogPrev)) {
894
- this.setHead(newHead);
895
- return;
896
- }
897
- let head = this.head;
898
- const noLoader = !localUpdates;
899
- if (!this.blockstore)
900
- throw new Error("missing blockstore");
901
- await validateBlocks(newHead, this.blockstore);
902
- await this.blockstore.transaction(
903
- async (tblocks) => {
904
- head = await advanceBlocks(newHead, tblocks, head);
905
- const result = await (0, import_crdt2.root)(tblocks, head);
906
- for (const { cid, bytes } of [...result.additions, ...result.removals]) {
907
- tblocks.putSync(cid, bytes);
908
- }
909
- return { head };
910
- },
911
- { noLoader }
912
- );
913
- this.setHead(head);
914
- }
915
- };
916
- function sortClockHead(clockHead) {
917
- return clockHead.sort((a, b) => a.toString().localeCompare(b.toString()));
918
- }
919
- async function validateBlocks(newHead, blockstore) {
920
- newHead.map(async (cid) => {
921
- const got = await blockstore.get(cid);
922
- if (!got) {
923
- throw new Error("int_applyHead missing block: " + cid.toString());
924
- }
925
- });
926
- }
927
- function compareClockHeads(head1, head2) {
928
- return head1.toString() === head2.toString();
929
- }
930
- async function advanceBlocks(newHead, tblocks, head) {
931
- for (const cid of newHead) {
932
- try {
933
- head = await (0, import_clock2.advance)(tblocks, head, cid);
934
- } catch (e) {
935
- continue;
936
- }
937
- }
938
- return head;
939
- }
940
-
941
- // src/crdt.ts
942
- var CRDT = class {
943
- name;
944
- opts = {};
945
- ready;
946
- blockstore;
947
- indexBlockstore;
948
- indexers = /* @__PURE__ */ new Map();
949
- clock = new CRDTClock();
950
- constructor(name, opts) {
951
- this.name = name || null;
952
- this.opts = opts || this.opts;
953
- this.blockstore = new import_encrypted_blockstore2.EncryptedBlockstore({
954
- name,
955
- applyMeta: async (meta) => {
956
- const crdtMeta = meta;
957
- await this.clock.applyHead(crdtMeta.head, []);
958
- },
959
- compact: async (blocks) => {
960
- await doCompact(blocks, this.clock.head);
961
- return { head: this.clock.head };
962
- },
963
- autoCompact: this.opts.autoCompact || 100,
964
- crypto: this.opts.crypto || crypto,
965
- store: this.opts.store || store,
966
- public: this.opts.public,
967
- meta: this.opts.meta
968
- });
969
- this.clock.blockstore = this.blockstore;
970
- this.indexBlockstore = new import_encrypted_blockstore2.EncryptedBlockstore({
971
- name: this.opts.persistIndexes && this.name ? this.name + ".idx" : void 0,
972
- applyMeta: async (meta) => {
973
- const idxCarMeta = meta;
974
- for (const [name2, idx] of Object.entries(idxCarMeta.indexes)) {
975
- index({ _crdt: this }, name2, void 0, idx);
976
- }
977
- },
978
- crypto,
979
- public: this.opts.public,
980
- store
981
- });
982
- this.ready = Promise.all([this.blockstore.ready, this.indexBlockstore.ready]).then(() => {
983
- });
984
- this.clock.onZoom(() => {
985
- for (const idx of this.indexers.values()) {
986
- idx._resetIndex();
987
- }
988
- });
989
- }
990
- async bulk(updates) {
991
- await this.ready;
992
- const prevHead = [...this.clock.head];
993
- const meta = await this.blockstore.transaction(
994
- async (blocks) => {
995
- const { head } = await applyBulkUpdateToCrdt(blocks, this.clock.head, updates);
996
- updates = updates.map(({ key, value, del, clock }) => {
997
- readFiles(this.blockstore, { doc: value });
998
- return { key, value, del, clock };
999
- });
1000
- return { head };
1001
- }
1002
- );
1003
- await this.clock.applyHead(meta.head, prevHead, updates);
1004
- return meta;
1005
- }
1006
- // if (snap) await this.clock.applyHead(crdtMeta.head, this.clock.head)
1007
- async allDocs() {
1008
- await this.ready;
1009
- const result = [];
1010
- for await (const entry of getAllEntries(this.blockstore, this.clock.head)) {
1011
- result.push(entry);
1012
- }
1013
- return { result, head: this.clock.head };
1014
- }
1015
- async vis() {
1016
- await this.ready;
1017
- const txt = [];
1018
- for await (const line of clockVis(this.blockstore, this.clock.head)) {
1019
- txt.push(line);
1020
- }
1021
- return txt.join("\n");
1022
- }
1023
- async getBlock(cidString) {
1024
- await this.ready;
1025
- return await getBlock(this.blockstore, cidString);
1026
- }
1027
- async get(key) {
1028
- await this.ready;
1029
- const result = await getValueFromCrdt(this.blockstore, this.clock.head, key);
1030
- if (result.del)
1031
- return null;
1032
- return result;
1033
- }
1034
- async changes(since = [], opts = {}) {
1035
- await this.ready;
1036
- return await clockChangesSince(this.blockstore, this.clock.head, since, opts);
1037
- }
1038
- async compact() {
1039
- return await this.blockstore.compact();
1040
- }
1041
- };
1042
-
1043
- // src/database.ts
1044
- var Database = class {
1045
- static databases = /* @__PURE__ */ new Map();
1046
- name;
1047
- opts = {};
1048
- _listening = false;
1049
- _listeners = /* @__PURE__ */ new Set();
1050
- _noupdate_listeners = /* @__PURE__ */ new Set();
1051
- _crdt;
1052
- _writeQueue;
1053
- blockstore;
1054
- constructor(name, opts) {
1055
- this.name = name || null;
1056
- this.opts = opts || this.opts;
1057
- this._crdt = new CRDT(name, this.opts);
1058
- this.blockstore = this._crdt.blockstore;
1059
- this._writeQueue = writeQueue(async (updates) => {
1060
- return await this._crdt.bulk(updates);
1061
- });
1062
- this._crdt.clock.onTock(() => {
1063
- this._no_update_notify();
1064
- });
1065
- }
1066
- async get(id) {
1067
- const got = await this._crdt.get(id).catch((e) => {
1068
- e.message = `Not found: ${id} - ` + e.message;
1069
- throw e;
1070
- });
1071
- if (!got)
1072
- throw new Error(`Not found: ${id}`);
1073
- const { doc } = got;
1074
- return { _id: id, ...doc };
1075
- }
1076
- async put(doc) {
1077
- const { _id, ...value } = doc;
1078
- const docId = _id || (0, import_uuidv7.uuidv7)();
1079
- const result = await this._writeQueue.push({ key: docId, value });
1080
- return { id: docId, clock: result?.head };
1081
- }
1082
- async del(id) {
1083
- const result = await this._writeQueue.push({ key: id, del: true });
1084
- return { id, clock: result?.head };
1085
- }
1086
- async changes(since = [], opts = {}) {
1087
- const { result, head } = await this._crdt.changes(since, opts);
1088
- const rows = result.map(({ key, value, del, clock }) => ({
1089
- key,
1090
- value: del ? { _id: key, _deleted: true } : { _id: key, ...value },
1091
- clock
1092
- }));
1093
- return { rows, clock: head };
1094
- }
1095
- async allDocs() {
1096
- const { result, head } = await this._crdt.allDocs();
1097
- const rows = result.map(({ key, value, del }) => ({
1098
- key,
1099
- value: del ? { _id: key, _deleted: true } : { _id: key, ...value }
1100
- }));
1101
- return { rows, clock: head };
1102
- }
1103
- async allDocuments() {
1104
- return this.allDocs();
1105
- }
1106
- subscribe(listener, updates) {
1107
- if (updates) {
1108
- if (!this._listening) {
1109
- this._listening = true;
1110
- this._crdt.clock.onTick((updates2) => {
1111
- void this._notify(updates2);
1112
- });
1113
- }
1114
- this._listeners.add(listener);
1115
- return () => {
1116
- this._listeners.delete(listener);
1117
- };
1118
- } else {
1119
- this._noupdate_listeners.add(listener);
1120
- return () => {
1121
- this._noupdate_listeners.delete(listener);
1122
- };
1123
- }
1124
- }
1125
- // todo if we add this onto dbs in fireproof.ts then we can make index.ts a separate package
1126
- async query(field, opts = {}) {
1127
- const idx = typeof field === "string" ? index({ _crdt: this._crdt }, field) : index({ _crdt: this._crdt }, makeName(field.toString()), field);
1128
- return await idx.query(opts);
1129
- }
1130
- async compact() {
1131
- await this._crdt.compact();
1132
- }
1133
- async _notify(updates) {
1134
- if (this._listeners.size) {
1135
- const docs = updates.map(({ key, value }) => ({ _id: key, ...value }));
1136
- for (const listener of this._listeners) {
1137
- await (async () => await listener(docs))().catch((e) => {
1138
- console.error("subscriber error", e);
1139
- });
1140
- }
1141
- }
1142
- }
1143
- async _no_update_notify() {
1144
- if (this._noupdate_listeners.size) {
1145
- for (const listener of this._noupdate_listeners) {
1146
- await (async () => await listener([]))().catch((e) => {
1147
- console.error("subscriber error", e);
1148
- });
1149
- }
1150
- }
1151
- }
1152
- };
1153
- function fireproof(name, opts) {
1154
- if (!Database.databases.has(name)) {
1155
- Database.databases.set(name, new Database(name, opts));
1156
- }
1157
- return Database.databases.get(name);
1158
- }
1159
- function makeName(fnString) {
1160
- const regex = /\(([^,()]+,\s*[^,()]+|\[[^\]]+\],\s*[^,()]+)\)/g;
1161
- let found = null;
1162
- const matches = Array.from(fnString.matchAll(regex), (match) => match[1].trim());
1163
- if (matches.length === 0) {
1164
- found = /=>\s*(.*)/.exec(fnString);
1165
- }
1166
- if (!found) {
1167
- return fnString;
1168
- } else {
1169
- return found[1];
1170
- }
1171
- }
1172
- //# sourceMappingURL=fireproof.cjs.map