@fireproof/core 0.18.0 → 0.19.4-dev

Sign up to get free protection for your applications and to get access to all the features.
Files changed (67) hide show
  1. package/README.md +29 -15
  2. package/chunk-7OGPZSGT.js +39 -0
  3. package/chunk-7OGPZSGT.js.map +1 -0
  4. package/chunk-H3A2HMMM.js +164 -0
  5. package/chunk-H3A2HMMM.js.map +1 -0
  6. package/chunk-HCXR2M5B.js +202 -0
  7. package/chunk-HCXR2M5B.js.map +1 -0
  8. package/chunk-QHSXUST7.js +208 -0
  9. package/chunk-QHSXUST7.js.map +1 -0
  10. package/chunk-VZGT7ZYP.js +22 -0
  11. package/chunk-VZGT7ZYP.js.map +1 -0
  12. package/index.cjs +4649 -0
  13. package/index.cjs.map +1 -0
  14. package/index.d.cts +911 -0
  15. package/index.d.ts +911 -0
  16. package/index.js +2923 -0
  17. package/index.js.map +1 -0
  18. package/metafile-cjs.json +1 -0
  19. package/metafile-esm.json +1 -0
  20. package/node-sys-container-E7LADX2Z.js +29 -0
  21. package/node-sys-container-E7LADX2Z.js.map +1 -0
  22. package/package.json +19 -109
  23. package/sqlite-data-store-YS4U7AQ4.js +120 -0
  24. package/sqlite-data-store-YS4U7AQ4.js.map +1 -0
  25. package/sqlite-meta-store-FJZSZG4R.js +137 -0
  26. package/sqlite-meta-store-FJZSZG4R.js.map +1 -0
  27. package/sqlite-wal-store-6JZ4URNS.js +123 -0
  28. package/sqlite-wal-store-6JZ4URNS.js.map +1 -0
  29. package/store-file-HMHPQTUV.js +193 -0
  30. package/store-file-HMHPQTUV.js.map +1 -0
  31. package/store-indexdb-MRVZG4OG.js +20 -0
  32. package/store-indexdb-MRVZG4OG.js.map +1 -0
  33. package/store-sql-5XMJ5OWJ.js +406 -0
  34. package/store-sql-5XMJ5OWJ.js.map +1 -0
  35. package/dist/browser/fireproof.cjs +0 -1172
  36. package/dist/browser/fireproof.cjs.map +0 -1
  37. package/dist/browser/fireproof.d.cts +0 -268
  38. package/dist/browser/fireproof.d.ts +0 -268
  39. package/dist/browser/fireproof.global.js +0 -24178
  40. package/dist/browser/fireproof.global.js.map +0 -1
  41. package/dist/browser/fireproof.js +0 -1147
  42. package/dist/browser/fireproof.js.map +0 -1
  43. package/dist/browser/metafile-cjs.json +0 -1
  44. package/dist/browser/metafile-esm.json +0 -1
  45. package/dist/browser/metafile-iife.json +0 -1
  46. package/dist/memory/fireproof.cjs +0 -1172
  47. package/dist/memory/fireproof.cjs.map +0 -1
  48. package/dist/memory/fireproof.d.cts +0 -268
  49. package/dist/memory/fireproof.d.ts +0 -268
  50. package/dist/memory/fireproof.global.js +0 -24178
  51. package/dist/memory/fireproof.global.js.map +0 -1
  52. package/dist/memory/fireproof.js +0 -1147
  53. package/dist/memory/fireproof.js.map +0 -1
  54. package/dist/memory/metafile-cjs.json +0 -1
  55. package/dist/memory/metafile-esm.json +0 -1
  56. package/dist/memory/metafile-iife.json +0 -1
  57. package/dist/node/fireproof.cjs +0 -1172
  58. package/dist/node/fireproof.cjs.map +0 -1
  59. package/dist/node/fireproof.d.cts +0 -268
  60. package/dist/node/fireproof.d.ts +0 -268
  61. package/dist/node/fireproof.global.js +0 -38540
  62. package/dist/node/fireproof.global.js.map +0 -1
  63. package/dist/node/fireproof.js +0 -1138
  64. package/dist/node/fireproof.js.map +0 -1
  65. package/dist/node/metafile-cjs.json +0 -1
  66. package/dist/node/metafile-esm.json +0 -1
  67. package/dist/node/metafile-iife.json +0 -1
@@ -1,1138 +0,0 @@
1
- // src/database.ts
2
- import { uuidv7 } from "uuidv7";
3
-
4
- // src/write-queue.ts
5
- function writeQueue(worker, payload = Infinity, unbounded = false) {
6
- const queue = [];
7
- let isProcessing = false;
8
- async function process() {
9
- if (isProcessing || queue.length === 0)
10
- return;
11
- isProcessing = true;
12
- const tasksToProcess = queue.splice(0, payload);
13
- const updates = tasksToProcess.map((item) => item.task);
14
- if (unbounded) {
15
- const promises = updates.map(async (update, index2) => {
16
- try {
17
- const result = await worker([update]);
18
- tasksToProcess[index2].resolve(result);
19
- } catch (error) {
20
- tasksToProcess[index2].reject(error);
21
- }
22
- });
23
- await Promise.all(promises);
24
- } else {
25
- try {
26
- const result = await worker(updates);
27
- tasksToProcess.forEach((task) => task.resolve(result));
28
- } catch (error) {
29
- tasksToProcess.forEach((task) => task.reject(error));
30
- }
31
- }
32
- isProcessing = false;
33
- void process();
34
- }
35
- return {
36
- push(task) {
37
- return new Promise((resolve, reject) => {
38
- queue.push({ task, resolve, reject });
39
- void process();
40
- });
41
- }
42
- };
43
- }
44
-
45
- // src/crdt.ts
46
- import {
47
- EncryptedBlockstore
48
- } from "@fireproof/encrypted-blockstore";
49
-
50
- // src/eb-node.ts
51
- import * as crypto from "@fireproof/encrypted-blockstore/crypto-node";
52
- import * as store from "@fireproof/encrypted-blockstore/store-node";
53
-
54
- // src/crdt-helpers.ts
55
- import { encode, decode, Block } from "multiformats/block";
56
- import { parse } from "multiformats/link";
57
- import { sha256 as hasher } from "multiformats/hashes/sha2";
58
- import * as codec from "@ipld/dag-cbor";
59
- import { put, get, entries, root } from "@web3-storage/pail/crdt";
60
- import { EventFetcher, vis } from "@web3-storage/pail/clock";
61
- import * as Batch from "@web3-storage/pail/crdt/batch";
62
- import {
63
- CarTransaction
64
- } from "@fireproof/encrypted-blockstore";
65
-
66
- // src/files.ts
67
- import * as UnixFS from "@ipld/unixfs";
68
- import * as raw from "multiformats/codecs/raw";
69
- import { withMaxChunkSize } from "@ipld/unixfs/file/chunker/fixed";
70
- import { withWidth } from "@ipld/unixfs/file/layout/balanced";
71
- import { exporter } from "ipfs-unixfs-exporter";
72
- var queuingStrategy = UnixFS.withCapacity();
73
- var settings = UnixFS.configure({
74
- fileChunkEncoder: raw,
75
- smallFileEncoder: raw,
76
- // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-call
77
- chunker: withMaxChunkSize(1024 * 1024),
78
- // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-call
79
- fileLayout: withWidth(1024)
80
- });
81
- async function encodeFile(blob) {
82
- const readable = createFileEncoderStream(blob);
83
- const blocks = await collect(readable);
84
- return { cid: blocks.at(-1).cid, blocks };
85
- }
86
- async function decodeFile(blocks, cid, meta) {
87
- const entry = await exporter(cid.toString(), blocks, { length: meta.size });
88
- const chunks = [];
89
- for await (const chunk of entry.content())
90
- chunks.push(chunk);
91
- return new File(chunks, entry.name, { type: meta.type, lastModified: 0 });
92
- }
93
- function createFileEncoderStream(blob) {
94
- const { readable, writable } = new TransformStream({}, queuingStrategy);
95
- const unixfsWriter = UnixFS.createWriter({ writable, settings });
96
- const fileBuilder = new UnixFSFileBuilder("", blob);
97
- void (async () => {
98
- await fileBuilder.finalize(unixfsWriter);
99
- await unixfsWriter.close();
100
- })();
101
- return readable;
102
- }
103
- async function collect(collectable) {
104
- const chunks = [];
105
- await collectable.pipeTo(
106
- new WritableStream({
107
- write(chunk) {
108
- chunks.push(chunk);
109
- }
110
- })
111
- );
112
- return chunks;
113
- }
114
- var UnixFSFileBuilder = class {
115
- #file;
116
- name;
117
- constructor(name, file) {
118
- this.name = name;
119
- this.#file = file;
120
- }
121
- async finalize(writer) {
122
- const unixfsFileWriter = UnixFS.createFileWriter(writer);
123
- await this.#file.stream().pipeTo(
124
- new WritableStream({
125
- async write(chunk) {
126
- await unixfsFileWriter.write(chunk);
127
- }
128
- })
129
- );
130
- return await unixfsFileWriter.close();
131
- }
132
- };
133
-
134
- // src/crdt-helpers.ts
135
- function time(tag) {
136
- }
137
- function timeEnd(tag) {
138
- }
139
- async function applyBulkUpdateToCrdt(tblocks, head, updates) {
140
- let result = null;
141
- if (updates.length > 1) {
142
- const batch = await Batch.create(tblocks, head);
143
- for (const update of updates) {
144
- const link = await writeDocContent(tblocks, update);
145
- await batch.put(update.key, link);
146
- }
147
- result = await batch.commit();
148
- } else {
149
- for (const update of updates) {
150
- const link = await writeDocContent(tblocks, update);
151
- result = await put(tblocks, head, update.key, link);
152
- const resRoot = result.root.toString();
153
- const isReturned = result.additions.some((a) => a.cid.toString() === resRoot);
154
- if (!isReturned) {
155
- const hasRoot = await tblocks.get(result.root);
156
- if (!hasRoot) {
157
- throw new Error(
158
- `missing root in additions: ${result.additions.length} ${resRoot} keys: ${updates.map((u) => u.key).toString()}`
159
- );
160
- }
161
- }
162
- }
163
- }
164
- if (!result)
165
- throw new Error("Missing result");
166
- if (result.event) {
167
- for (const { cid, bytes } of [...result.additions, ...result.removals, result.event]) {
168
- tblocks.putSync(cid, bytes);
169
- }
170
- }
171
- return { head: result.head };
172
- }
173
- async function writeDocContent(blocks, update) {
174
- let value;
175
- if (update.del) {
176
- value = { del: true };
177
- } else {
178
- await processFiles(blocks, update.value);
179
- value = { doc: update.value };
180
- }
181
- const block = await encode({ value, hasher, codec });
182
- blocks.putSync(block.cid, block.bytes);
183
- return block.cid;
184
- }
185
- async function processFiles(blocks, doc) {
186
- if (doc._files) {
187
- await processFileset(blocks, doc._files);
188
- }
189
- if (doc._publicFiles) {
190
- await processFileset(blocks, doc._publicFiles, true);
191
- }
192
- }
193
- async function processFileset(blocks, files, publicFiles = false) {
194
- const dbBlockstore = blocks.parent;
195
- const t = new CarTransaction(dbBlockstore);
196
- const didPut = [];
197
- for (const filename in files) {
198
- if (File === files[filename].constructor) {
199
- const file = files[filename];
200
- const { cid, blocks: fileBlocks } = await encodeFile(file);
201
- didPut.push(filename);
202
- for (const block of fileBlocks) {
203
- t.putSync(block.cid, block.bytes);
204
- }
205
- files[filename] = { cid, type: file.type, size: file.size };
206
- } else {
207
- const { cid, type, size, car } = files[filename];
208
- if (cid && type && size && car) {
209
- files[filename] = { cid, type, size, car };
210
- }
211
- }
212
- }
213
- if (didPut.length) {
214
- const car = await dbBlockstore.loader?.commitFiles(t, { files }, {
215
- public: publicFiles
216
- });
217
- if (car) {
218
- for (const name of didPut) {
219
- files[name] = { car, ...files[name] };
220
- }
221
- }
222
- }
223
- }
224
- async function getValueFromCrdt(blocks, head, key) {
225
- if (!head.length)
226
- throw new Error("Getting from an empty database");
227
- const link = await get(blocks, head, key);
228
- if (!link)
229
- throw new Error(`Missing key ${key}`);
230
- return await getValueFromLink(blocks, link);
231
- }
232
- function readFiles(blocks, { doc }) {
233
- if (!doc)
234
- return;
235
- if (doc._files) {
236
- readFileset(blocks, doc._files);
237
- }
238
- if (doc._publicFiles) {
239
- readFileset(blocks, doc._publicFiles, true);
240
- }
241
- }
242
- function readFileset(blocks, files, isPublic = false) {
243
- for (const filename in files) {
244
- const fileMeta = files[filename];
245
- if (fileMeta.cid) {
246
- if (isPublic) {
247
- fileMeta.url = `https://${fileMeta.cid.toString()}.ipfs.w3s.link/`;
248
- }
249
- if (fileMeta.car) {
250
- fileMeta.file = async () => await decodeFile(
251
- {
252
- get: async (cid) => {
253
- return await blocks.getFile(fileMeta.car, cid, isPublic);
254
- }
255
- },
256
- fileMeta.cid,
257
- fileMeta
258
- );
259
- }
260
- }
261
- files[filename] = fileMeta;
262
- }
263
- }
264
- async function getValueFromLink(blocks, link) {
265
- const block = await blocks.get(link);
266
- if (!block)
267
- throw new Error(`Missing linked block ${link.toString()}`);
268
- const { value } = await decode({ bytes: block.bytes, hasher, codec });
269
- value.cid = link;
270
- readFiles(blocks, value);
271
- return value;
272
- }
273
- var DirtyEventFetcher = class extends EventFetcher {
274
- // @ts-ignore
275
- async get(link) {
276
- try {
277
- return await super.get(link);
278
- } catch (e) {
279
- console.error("missing event", link.toString(), e);
280
- return { value: null };
281
- }
282
- }
283
- };
284
- async function clockChangesSince(blocks, head, since, opts) {
285
- const eventsFetcher = opts.dirty ? new DirtyEventFetcher(blocks) : new EventFetcher(blocks);
286
- const keys = /* @__PURE__ */ new Set();
287
- const updates = await gatherUpdates(
288
- blocks,
289
- eventsFetcher,
290
- head,
291
- since,
292
- [],
293
- keys,
294
- /* @__PURE__ */ new Set(),
295
- opts.limit || Infinity
296
- );
297
- return { result: updates.reverse(), head };
298
- }
299
- async function gatherUpdates(blocks, eventsFetcher, head, since, updates = [], keys, didLinks, limit) {
300
- if (limit <= 0)
301
- return updates;
302
- const sHead = head.map((l) => l.toString());
303
- for (const link of since) {
304
- if (sHead.includes(link.toString())) {
305
- return updates;
306
- }
307
- }
308
- for (const link of head) {
309
- if (didLinks.has(link.toString()))
310
- continue;
311
- didLinks.add(link.toString());
312
- const { value: event } = await eventsFetcher.get(link);
313
- if (!event)
314
- continue;
315
- const { type } = event.data;
316
- let ops = [];
317
- if (type === "batch") {
318
- ops = event.data.ops;
319
- } else if (type === "put") {
320
- ops = [event.data];
321
- }
322
- for (let i = ops.length - 1; i >= 0; i--) {
323
- const { key, value } = ops[i];
324
- if (!keys.has(key)) {
325
- const docValue = await getValueFromLink(blocks, value);
326
- updates.push({ key, value: docValue.doc, del: docValue.del, clock: link });
327
- limit--;
328
- keys.add(key);
329
- }
330
- }
331
- if (event.parents) {
332
- updates = await gatherUpdates(
333
- blocks,
334
- eventsFetcher,
335
- event.parents,
336
- since,
337
- updates,
338
- keys,
339
- didLinks,
340
- limit
341
- );
342
- }
343
- }
344
- return updates;
345
- }
346
- async function* getAllEntries(blocks, head) {
347
- for await (const [key, link] of entries(blocks, head)) {
348
- const docValue = await getValueFromLink(blocks, link);
349
- yield { key, value: docValue.doc, del: docValue.del };
350
- }
351
- }
352
- async function* clockVis(blocks, head) {
353
- for await (const line of vis(blocks, head)) {
354
- yield line;
355
- }
356
- }
357
- var isCompacting = false;
358
- async function doCompact(blockLog, head) {
359
- if (isCompacting) {
360
- return;
361
- }
362
- isCompacting = true;
363
- time("compact head");
364
- for (const cid of head) {
365
- const bl = await blockLog.get(cid);
366
- if (!bl)
367
- throw new Error("Missing head block: " + cid.toString());
368
- }
369
- timeEnd("compact head");
370
- time("compact all entries");
371
- for await (const _entry of getAllEntries(blockLog, head)) {
372
- }
373
- timeEnd("compact all entries");
374
- time("compact clock vis");
375
- for await (const _line of vis(blockLog, head)) {
376
- }
377
- timeEnd("compact clock vis");
378
- time("compact root");
379
- const result = await root(blockLog, head);
380
- timeEnd("compact root");
381
- time("compact root blocks");
382
- for (const { cid, bytes } of [...result.additions, ...result.removals]) {
383
- blockLog.loggedBlocks.putSync(cid, bytes);
384
- }
385
- timeEnd("compact root blocks");
386
- time("compact changes");
387
- await clockChangesSince(blockLog, head, [], {});
388
- timeEnd("compact changes");
389
- isCompacting = false;
390
- }
391
- async function getBlock(blocks, cidString) {
392
- const block = await blocks.get(parse(cidString));
393
- if (!block)
394
- throw new Error(`Missing block ${cidString}`);
395
- const { cid, value } = await decode({ bytes: block.bytes, codec, hasher });
396
- return new Block({ cid, value, bytes: block.bytes });
397
- }
398
-
399
- // src/indexer-helpers.ts
400
- import { create as create2 } from "multiformats/block";
401
- import { sha256 as hasher2 } from "multiformats/hashes/sha2";
402
- import * as codec2 from "@ipld/dag-cbor";
403
- import charwise from "charwise";
404
- import * as DbIndex from "prolly-trees/db-index";
405
- import { bf, simpleCompare } from "prolly-trees/utils";
406
- import { nocache as cache } from "prolly-trees/cache";
407
- var IndexTree = class {
408
- cid = null;
409
- root = null;
410
- };
411
- var refCompare = (aRef, bRef) => {
412
- if (Number.isNaN(aRef))
413
- return -1;
414
- if (Number.isNaN(bRef))
415
- throw new Error("ref may not be Infinity or NaN");
416
- if (aRef === Infinity)
417
- return 1;
418
- return simpleCompare(aRef, bRef);
419
- };
420
- var compare = (a, b) => {
421
- const [aKey, aRef] = a;
422
- const [bKey, bRef] = b;
423
- const comp = simpleCompare(aKey, bKey);
424
- if (comp !== 0)
425
- return comp;
426
- return refCompare(aRef, bRef);
427
- };
428
- var byKeyOpts = { cache, chunker: bf(30), codec: codec2, hasher: hasher2, compare };
429
- var byIdOpts = { cache, chunker: bf(30), codec: codec2, hasher: hasher2, compare: simpleCompare };
430
- function indexEntriesForChanges(changes, mapFn) {
431
- const indexEntries = [];
432
- changes.forEach(({ key: _id, value, del }) => {
433
- if (del || !value)
434
- return;
435
- let mapCalled = false;
436
- const mapReturn = mapFn({ _id, ...value }, (k, v) => {
437
- mapCalled = true;
438
- if (typeof k === "undefined")
439
- return;
440
- indexEntries.push({
441
- // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-unsafe-call
442
- key: [charwise.encode(k), _id],
443
- value: v || null
444
- });
445
- });
446
- if (!mapCalled && mapReturn) {
447
- indexEntries.push({
448
- // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-unsafe-call
449
- key: [charwise.encode(mapReturn), _id],
450
- value: null
451
- });
452
- }
453
- });
454
- return indexEntries;
455
- }
456
- function makeProllyGetBlock(blocks) {
457
- return async (address) => {
458
- const block = await blocks.get(address);
459
- if (!block)
460
- throw new Error(`Missing block ${address.toString()}`);
461
- const { cid, bytes } = block;
462
- return create2({ cid, bytes, hasher: hasher2, codec: codec2 });
463
- };
464
- }
465
- async function bulkIndex(tblocks, inIndex, indexEntries, opts) {
466
- if (!indexEntries.length)
467
- return inIndex;
468
- if (!inIndex.root) {
469
- if (!inIndex.cid) {
470
- let returnRootBlock = null;
471
- let returnNode = null;
472
- for await (const node of await DbIndex.create({ get: makeProllyGetBlock(tblocks), list: indexEntries, ...opts })) {
473
- const block = await node.block;
474
- await tblocks.put(block.cid, block.bytes);
475
- returnRootBlock = block;
476
- returnNode = node;
477
- }
478
- if (!returnNode || !returnRootBlock)
479
- throw new Error("failed to create index");
480
- return { root: returnNode, cid: returnRootBlock.cid };
481
- } else {
482
- inIndex.root = await DbIndex.load({ cid: inIndex.cid, get: makeProllyGetBlock(tblocks), ...opts });
483
- }
484
- }
485
- const { root: root3, blocks: newBlocks } = await inIndex.root.bulk(indexEntries);
486
- if (root3) {
487
- for await (const block of newBlocks) {
488
- await tblocks.put(block.cid, block.bytes);
489
- }
490
- return { root: root3, cid: (await root3.block).cid };
491
- } else {
492
- return { root: null, cid: null };
493
- }
494
- }
495
- async function loadIndex(tblocks, cid, opts) {
496
- return await DbIndex.load({ cid, get: makeProllyGetBlock(tblocks), ...opts });
497
- }
498
- async function applyQuery(crdt, resp, query) {
499
- if (query.descending) {
500
- resp.result = resp.result.reverse();
501
- }
502
- if (query.limit) {
503
- resp.result = resp.result.slice(0, query.limit);
504
- }
505
- if (query.includeDocs) {
506
- resp.result = await Promise.all(
507
- resp.result.map(async (row) => {
508
- const val = await crdt.get(row.id);
509
- const doc = val ? { _id: row.id, ...val.doc } : null;
510
- return { ...row, doc };
511
- })
512
- );
513
- }
514
- return {
515
- rows: resp.result.map((row) => {
516
- row.key = charwise.decode(row.key);
517
- if (row.row && !row.value) {
518
- row.value = row.row;
519
- delete row.row;
520
- }
521
- return row;
522
- })
523
- };
524
- }
525
- function encodeRange(range) {
526
- return range.map((key) => charwise.encode(key));
527
- }
528
- function encodeKey(key) {
529
- return charwise.encode(key);
530
- }
531
-
532
- // src/index.ts
533
- function index({ _crdt }, name, mapFn, meta) {
534
- if (mapFn && meta)
535
- throw new Error("cannot provide both mapFn and meta");
536
- if (mapFn && mapFn.constructor.name !== "Function")
537
- throw new Error("mapFn must be a function");
538
- if (_crdt.indexers.has(name)) {
539
- const idx = _crdt.indexers.get(name);
540
- idx.applyMapFn(name, mapFn, meta);
541
- } else {
542
- const idx = new Index(_crdt, name, mapFn, meta);
543
- _crdt.indexers.set(name, idx);
544
- }
545
- return _crdt.indexers.get(name);
546
- }
547
- var Index = class {
548
- blockstore;
549
- crdt;
550
- name = null;
551
- mapFn = null;
552
- mapFnString = "";
553
- byKey = new IndexTree();
554
- byId = new IndexTree();
555
- indexHead = void 0;
556
- includeDocsDefault = false;
557
- initError = null;
558
- ready;
559
- constructor(crdt, name, mapFn, meta) {
560
- this.blockstore = crdt.indexBlockstore;
561
- this.crdt = crdt;
562
- this.applyMapFn(name, mapFn, meta);
563
- if (!(this.mapFnString || this.initError))
564
- throw new Error("missing mapFnString");
565
- this.ready = this.blockstore.ready.then(() => {
566
- });
567
- }
568
- applyMapFn(name, mapFn, meta) {
569
- if (mapFn && meta)
570
- throw new Error("cannot provide both mapFn and meta");
571
- if (this.name && this.name !== name)
572
- throw new Error("cannot change name");
573
- this.name = name;
574
- try {
575
- if (meta) {
576
- if (this.indexHead && this.indexHead.map((c) => c.toString()).join() !== meta.head.map((c) => c.toString()).join()) {
577
- throw new Error("cannot apply meta to existing index");
578
- }
579
- if (this.mapFnString) {
580
- if (this.mapFnString !== meta.map) {
581
- console.log(
582
- "cannot apply different mapFn meta: old mapFnString",
583
- this.mapFnString,
584
- "new mapFnString",
585
- meta.map
586
- );
587
- } else {
588
- this.byId.cid = meta.byId;
589
- this.byKey.cid = meta.byKey;
590
- this.indexHead = meta.head;
591
- }
592
- } else {
593
- this.mapFnString = meta.map;
594
- this.byId.cid = meta.byId;
595
- this.byKey.cid = meta.byKey;
596
- this.indexHead = meta.head;
597
- }
598
- } else {
599
- if (this.mapFn) {
600
- if (mapFn) {
601
- if (this.mapFn.toString() !== mapFn.toString())
602
- throw new Error("cannot apply different mapFn app2");
603
- }
604
- } else {
605
- if (!mapFn) {
606
- mapFn = (doc) => doc[name] ?? void 0;
607
- }
608
- if (this.mapFnString) {
609
- if (this.mapFnString !== mapFn.toString())
610
- throw new Error("cannot apply different mapFn app");
611
- } else {
612
- this.mapFnString = mapFn.toString();
613
- }
614
- this.mapFn = mapFn;
615
- }
616
- }
617
- const matches = /=>\s*(.*)/.test(this.mapFnString);
618
- this.includeDocsDefault = matches;
619
- } catch (e) {
620
- this.initError = e;
621
- }
622
- }
623
- async query(opts = {}) {
624
- await this._updateIndex();
625
- await this._hydrateIndex();
626
- if (!this.byKey.root)
627
- return await applyQuery(this.crdt, { result: [] }, opts);
628
- if (this.includeDocsDefault && opts.includeDocs === void 0)
629
- opts.includeDocs = true;
630
- if (opts.range) {
631
- const { result: result2, ...all2 } = await this.byKey.root.range(...encodeRange(opts.range));
632
- return await applyQuery(this.crdt, { result: result2, ...all2 }, opts);
633
- }
634
- if (opts.key) {
635
- const encodedKey = encodeKey(opts.key);
636
- return await applyQuery(this.crdt, await this.byKey.root.get(encodedKey), opts);
637
- }
638
- if (Array.isArray(opts.keys)) {
639
- const results = await Promise.all(
640
- opts.keys.map(async (key) => {
641
- const encodedKey = encodeKey(key);
642
- return (await applyQuery(this.crdt, await this.byKey.root.get(encodedKey), opts)).rows;
643
- })
644
- );
645
- return { rows: results.flat() };
646
- }
647
- if (opts.prefix) {
648
- if (!Array.isArray(opts.prefix))
649
- opts.prefix = [opts.prefix];
650
- const start = [...opts.prefix, NaN];
651
- const end = [...opts.prefix, Infinity];
652
- const encodedR = encodeRange([start, end]);
653
- return await applyQuery(this.crdt, await this.byKey.root.range(...encodedR), opts);
654
- }
655
- const { result, ...all } = await this.byKey.root.getAllEntries();
656
- return await applyQuery(
657
- this.crdt,
658
- {
659
- result: result.map(({ key: [k, id], value }) => ({ key: k, id, value })),
660
- ...all
661
- },
662
- opts
663
- );
664
- }
665
- _resetIndex() {
666
- this.byId = new IndexTree();
667
- this.byKey = new IndexTree();
668
- this.indexHead = void 0;
669
- }
670
- async _hydrateIndex() {
671
- if (this.byId.root && this.byKey.root)
672
- return;
673
- if (!this.byId.cid || !this.byKey.cid)
674
- return;
675
- this.byId.root = await loadIndex(this.blockstore, this.byId.cid, byIdOpts);
676
- this.byKey.root = await loadIndex(this.blockstore, this.byKey.cid, byKeyOpts);
677
- }
678
- async _updateIndex() {
679
- await this.ready;
680
- if (this.initError)
681
- throw this.initError;
682
- if (!this.mapFn)
683
- throw new Error("No map function defined");
684
- let result, head;
685
- if (!this.indexHead || this.indexHead.length === 0) {
686
- ;
687
- ({ result, head } = await this.crdt.allDocs());
688
- } else {
689
- ;
690
- ({ result, head } = await this.crdt.changes(this.indexHead));
691
- }
692
- if (result.length === 0) {
693
- this.indexHead = head;
694
- return { byId: this.byId, byKey: this.byKey };
695
- }
696
- let staleKeyIndexEntries = [];
697
- let removeIdIndexEntries = [];
698
- if (this.byId.root) {
699
- const removeIds = result.map(({ key }) => key);
700
- const { result: oldChangeEntries } = await this.byId.root.getMany(removeIds);
701
- staleKeyIndexEntries = oldChangeEntries.map((key) => ({ key, del: true }));
702
- removeIdIndexEntries = oldChangeEntries.map((key) => ({ key: key[1], del: true }));
703
- }
704
- const indexEntries = indexEntriesForChanges(result, this.mapFn);
705
- const byIdIndexEntries = indexEntries.map(({ key }) => ({
706
- key: key[1],
707
- value: key
708
- }));
709
- const indexerMeta = { indexes: /* @__PURE__ */ new Map() };
710
- for (const [name, indexer] of this.crdt.indexers) {
711
- if (indexer.indexHead) {
712
- indexerMeta.indexes.set(name, {
713
- byId: indexer.byId.cid,
714
- byKey: indexer.byKey.cid,
715
- head: indexer.indexHead,
716
- map: indexer.mapFnString,
717
- name: indexer.name
718
- });
719
- }
720
- }
721
- return await this.blockstore.transaction(async (tblocks) => {
722
- this.byId = await bulkIndex(
723
- tblocks,
724
- this.byId,
725
- removeIdIndexEntries.concat(byIdIndexEntries),
726
- byIdOpts
727
- );
728
- this.byKey = await bulkIndex(
729
- tblocks,
730
- this.byKey,
731
- staleKeyIndexEntries.concat(indexEntries),
732
- byKeyOpts
733
- );
734
- this.indexHead = head;
735
- const idxMeta = {
736
- byId: this.byId.cid,
737
- byKey: this.byKey.cid,
738
- head,
739
- map: this.mapFnString,
740
- name: this.name
741
- };
742
- indexerMeta.indexes.set(this.name, idxMeta);
743
- return indexerMeta;
744
- });
745
- }
746
- };
747
-
748
- // src/crdt-clock.ts
749
- import { advance } from "@web3-storage/pail/clock";
750
- import { root as root2 } from "@web3-storage/pail/crdt";
751
-
752
- // src/apply-head-queue.ts
753
- function applyHeadQueue(worker) {
754
- const queue = [];
755
- let isProcessing = false;
756
- async function* process() {
757
- if (isProcessing || queue.length === 0)
758
- return;
759
- isProcessing = true;
760
- const allUpdates = [];
761
- try {
762
- while (queue.length > 0) {
763
- queue.sort((a, b) => b.updates ? 1 : -1);
764
- const task = queue.shift();
765
- if (!task)
766
- continue;
767
- await worker(task.newHead, task.prevHead, task.updates !== null).catch((e) => {
768
- console.error("int_applyHead worker error", e);
769
- throw e;
770
- });
771
- if (task.updates) {
772
- allUpdates.push(...task.updates);
773
- }
774
- if (!queue.some((t) => t.updates) || task.updates) {
775
- const allTasksHaveUpdates = queue.every((task2) => task2.updates !== null);
776
- yield { updates: allUpdates, all: allTasksHaveUpdates };
777
- allUpdates.length = 0;
778
- }
779
- }
780
- } finally {
781
- isProcessing = false;
782
- const generator = process();
783
- let result = await generator.next();
784
- while (!result.done) {
785
- result = await generator.next();
786
- }
787
- }
788
- }
789
- return {
790
- push(task) {
791
- queue.push(task);
792
- return process();
793
- },
794
- size() {
795
- return queue.length;
796
- }
797
- };
798
- }
799
-
800
- // src/crdt-clock.ts
801
- var CRDTClock = class {
802
- // todo: track local and remote clocks independently, merge on read
803
- // that way we can drop the whole remote if we need to
804
- // should go with making sure the local clock only references locally available blockstore on write
805
- head = [];
806
- zoomers = /* @__PURE__ */ new Set();
807
- watchers = /* @__PURE__ */ new Set();
808
- emptyWatchers = /* @__PURE__ */ new Set();
809
- blockstore = null;
810
- applyHeadQueue;
811
- constructor() {
812
- this.applyHeadQueue = applyHeadQueue(this.int_applyHead.bind(this));
813
- }
814
- setHead(head) {
815
- this.head = head;
816
- }
817
- async applyHead(newHead, prevHead, updates = null) {
818
- for await (const { updates: updatesAcc, all } of this.applyHeadQueue.push({
819
- newHead,
820
- prevHead,
821
- updates
822
- })) {
823
- this.processUpdates(updatesAcc, all, prevHead);
824
- }
825
- }
826
- async processUpdates(updatesAcc, all, prevHead) {
827
- let internalUpdates = updatesAcc;
828
- if (this.watchers.size && !all) {
829
- const changes = await clockChangesSince(this.blockstore, this.head, prevHead, {});
830
- internalUpdates = changes.result;
831
- }
832
- this.zoomers.forEach((fn) => fn());
833
- this.notifyWatchers(internalUpdates || []);
834
- }
835
- notifyWatchers(updates) {
836
- this.emptyWatchers.forEach((fn) => fn());
837
- this.watchers.forEach((fn) => fn(updates || []));
838
- }
839
- onTick(fn) {
840
- this.watchers.add(fn);
841
- }
842
- onTock(fn) {
843
- this.emptyWatchers.add(fn);
844
- }
845
- onZoom(fn) {
846
- this.zoomers.add(fn);
847
- }
848
- async int_applyHead(newHead, prevHead, localUpdates) {
849
- const ogHead = sortClockHead(this.head);
850
- newHead = sortClockHead(newHead);
851
- if (compareClockHeads(ogHead, newHead)) {
852
- return;
853
- }
854
- const ogPrev = sortClockHead(prevHead);
855
- if (compareClockHeads(ogHead, ogPrev)) {
856
- this.setHead(newHead);
857
- return;
858
- }
859
- let head = this.head;
860
- const noLoader = !localUpdates;
861
- if (!this.blockstore)
862
- throw new Error("missing blockstore");
863
- await validateBlocks(newHead, this.blockstore);
864
- await this.blockstore.transaction(
865
- async (tblocks) => {
866
- head = await advanceBlocks(newHead, tblocks, head);
867
- const result = await root2(tblocks, head);
868
- for (const { cid, bytes } of [...result.additions, ...result.removals]) {
869
- tblocks.putSync(cid, bytes);
870
- }
871
- return { head };
872
- },
873
- { noLoader }
874
- );
875
- this.setHead(head);
876
- }
877
- };
878
- function sortClockHead(clockHead) {
879
- return clockHead.sort((a, b) => a.toString().localeCompare(b.toString()));
880
- }
881
- async function validateBlocks(newHead, blockstore) {
882
- newHead.map(async (cid) => {
883
- const got = await blockstore.get(cid);
884
- if (!got) {
885
- throw new Error("int_applyHead missing block: " + cid.toString());
886
- }
887
- });
888
- }
889
- function compareClockHeads(head1, head2) {
890
- return head1.toString() === head2.toString();
891
- }
892
- async function advanceBlocks(newHead, tblocks, head) {
893
- for (const cid of newHead) {
894
- try {
895
- head = await advance(tblocks, head, cid);
896
- } catch (e) {
897
- continue;
898
- }
899
- }
900
- return head;
901
- }
902
-
903
- // src/crdt.ts
904
- var CRDT = class {
905
- name;
906
- opts = {};
907
- ready;
908
- blockstore;
909
- indexBlockstore;
910
- indexers = /* @__PURE__ */ new Map();
911
- clock = new CRDTClock();
912
- constructor(name, opts) {
913
- this.name = name || null;
914
- this.opts = opts || this.opts;
915
- this.blockstore = new EncryptedBlockstore({
916
- name,
917
- applyMeta: async (meta) => {
918
- const crdtMeta = meta;
919
- await this.clock.applyHead(crdtMeta.head, []);
920
- },
921
- compact: async (blocks) => {
922
- await doCompact(blocks, this.clock.head);
923
- return { head: this.clock.head };
924
- },
925
- autoCompact: this.opts.autoCompact || 100,
926
- crypto: this.opts.crypto || crypto,
927
- store: this.opts.store || store,
928
- public: this.opts.public,
929
- meta: this.opts.meta
930
- });
931
- this.clock.blockstore = this.blockstore;
932
- this.indexBlockstore = new EncryptedBlockstore({
933
- name: this.opts.persistIndexes && this.name ? this.name + ".idx" : void 0,
934
- applyMeta: async (meta) => {
935
- const idxCarMeta = meta;
936
- for (const [name2, idx] of Object.entries(idxCarMeta.indexes)) {
937
- index({ _crdt: this }, name2, void 0, idx);
938
- }
939
- },
940
- crypto,
941
- public: this.opts.public,
942
- store
943
- });
944
- this.ready = Promise.all([this.blockstore.ready, this.indexBlockstore.ready]).then(() => {
945
- });
946
- this.clock.onZoom(() => {
947
- for (const idx of this.indexers.values()) {
948
- idx._resetIndex();
949
- }
950
- });
951
- }
952
- async bulk(updates) {
953
- await this.ready;
954
- const prevHead = [...this.clock.head];
955
- const meta = await this.blockstore.transaction(
956
- async (blocks) => {
957
- const { head } = await applyBulkUpdateToCrdt(blocks, this.clock.head, updates);
958
- updates = updates.map(({ key, value, del, clock }) => {
959
- readFiles(this.blockstore, { doc: value });
960
- return { key, value, del, clock };
961
- });
962
- return { head };
963
- }
964
- );
965
- await this.clock.applyHead(meta.head, prevHead, updates);
966
- return meta;
967
- }
968
- // if (snap) await this.clock.applyHead(crdtMeta.head, this.clock.head)
969
- async allDocs() {
970
- await this.ready;
971
- const result = [];
972
- for await (const entry of getAllEntries(this.blockstore, this.clock.head)) {
973
- result.push(entry);
974
- }
975
- return { result, head: this.clock.head };
976
- }
977
- async vis() {
978
- await this.ready;
979
- const txt = [];
980
- for await (const line of clockVis(this.blockstore, this.clock.head)) {
981
- txt.push(line);
982
- }
983
- return txt.join("\n");
984
- }
985
- async getBlock(cidString) {
986
- await this.ready;
987
- return await getBlock(this.blockstore, cidString);
988
- }
989
- async get(key) {
990
- await this.ready;
991
- const result = await getValueFromCrdt(this.blockstore, this.clock.head, key);
992
- if (result.del)
993
- return null;
994
- return result;
995
- }
996
- async changes(since = [], opts = {}) {
997
- await this.ready;
998
- return await clockChangesSince(this.blockstore, this.clock.head, since, opts);
999
- }
1000
- async compact() {
1001
- return await this.blockstore.compact();
1002
- }
1003
- };
1004
-
1005
- // src/database.ts
1006
- var Database = class {
1007
- static databases = /* @__PURE__ */ new Map();
1008
- name;
1009
- opts = {};
1010
- _listening = false;
1011
- _listeners = /* @__PURE__ */ new Set();
1012
- _noupdate_listeners = /* @__PURE__ */ new Set();
1013
- _crdt;
1014
- _writeQueue;
1015
- blockstore;
1016
- constructor(name, opts) {
1017
- this.name = name || null;
1018
- this.opts = opts || this.opts;
1019
- this._crdt = new CRDT(name, this.opts);
1020
- this.blockstore = this._crdt.blockstore;
1021
- this._writeQueue = writeQueue(async (updates) => {
1022
- return await this._crdt.bulk(updates);
1023
- });
1024
- this._crdt.clock.onTock(() => {
1025
- this._no_update_notify();
1026
- });
1027
- }
1028
- async get(id) {
1029
- const got = await this._crdt.get(id).catch((e) => {
1030
- e.message = `Not found: ${id} - ` + e.message;
1031
- throw e;
1032
- });
1033
- if (!got)
1034
- throw new Error(`Not found: ${id}`);
1035
- const { doc } = got;
1036
- return { _id: id, ...doc };
1037
- }
1038
- async put(doc) {
1039
- const { _id, ...value } = doc;
1040
- const docId = _id || uuidv7();
1041
- const result = await this._writeQueue.push({ key: docId, value });
1042
- return { id: docId, clock: result?.head };
1043
- }
1044
- async del(id) {
1045
- const result = await this._writeQueue.push({ key: id, del: true });
1046
- return { id, clock: result?.head };
1047
- }
1048
- async changes(since = [], opts = {}) {
1049
- const { result, head } = await this._crdt.changes(since, opts);
1050
- const rows = result.map(({ key, value, del, clock }) => ({
1051
- key,
1052
- value: del ? { _id: key, _deleted: true } : { _id: key, ...value },
1053
- clock
1054
- }));
1055
- return { rows, clock: head };
1056
- }
1057
- async allDocs() {
1058
- const { result, head } = await this._crdt.allDocs();
1059
- const rows = result.map(({ key, value, del }) => ({
1060
- key,
1061
- value: del ? { _id: key, _deleted: true } : { _id: key, ...value }
1062
- }));
1063
- return { rows, clock: head };
1064
- }
1065
- async allDocuments() {
1066
- return this.allDocs();
1067
- }
1068
- subscribe(listener, updates) {
1069
- if (updates) {
1070
- if (!this._listening) {
1071
- this._listening = true;
1072
- this._crdt.clock.onTick((updates2) => {
1073
- void this._notify(updates2);
1074
- });
1075
- }
1076
- this._listeners.add(listener);
1077
- return () => {
1078
- this._listeners.delete(listener);
1079
- };
1080
- } else {
1081
- this._noupdate_listeners.add(listener);
1082
- return () => {
1083
- this._noupdate_listeners.delete(listener);
1084
- };
1085
- }
1086
- }
1087
- // todo if we add this onto dbs in fireproof.ts then we can make index.ts a separate package
1088
- async query(field, opts = {}) {
1089
- const idx = typeof field === "string" ? index({ _crdt: this._crdt }, field) : index({ _crdt: this._crdt }, makeName(field.toString()), field);
1090
- return await idx.query(opts);
1091
- }
1092
- async compact() {
1093
- await this._crdt.compact();
1094
- }
1095
- async _notify(updates) {
1096
- if (this._listeners.size) {
1097
- const docs = updates.map(({ key, value }) => ({ _id: key, ...value }));
1098
- for (const listener of this._listeners) {
1099
- await (async () => await listener(docs))().catch((e) => {
1100
- console.error("subscriber error", e);
1101
- });
1102
- }
1103
- }
1104
- }
1105
- async _no_update_notify() {
1106
- if (this._noupdate_listeners.size) {
1107
- for (const listener of this._noupdate_listeners) {
1108
- await (async () => await listener([]))().catch((e) => {
1109
- console.error("subscriber error", e);
1110
- });
1111
- }
1112
- }
1113
- }
1114
- };
1115
- function fireproof(name, opts) {
1116
- if (!Database.databases.has(name)) {
1117
- Database.databases.set(name, new Database(name, opts));
1118
- }
1119
- return Database.databases.get(name);
1120
- }
1121
- function makeName(fnString) {
1122
- const regex = /\(([^,()]+,\s*[^,()]+|\[[^\]]+\],\s*[^,()]+)\)/g;
1123
- let found = null;
1124
- const matches = Array.from(fnString.matchAll(regex), (match) => match[1].trim());
1125
- if (matches.length === 0) {
1126
- found = /=>\s*(.*)/.exec(fnString);
1127
- }
1128
- if (!found) {
1129
- return fnString;
1130
- } else {
1131
- return found[1];
1132
- }
1133
- }
1134
- export {
1135
- Database,
1136
- fireproof
1137
- };
1138
- //# sourceMappingURL=fireproof.js.map