@fireproof/core 0.18.0 → 0.19.4-dev

Sign up to get free protection for your applications and to get access to all the features.
Files changed (67) hide show
  1. package/README.md +29 -15
  2. package/chunk-7OGPZSGT.js +39 -0
  3. package/chunk-7OGPZSGT.js.map +1 -0
  4. package/chunk-H3A2HMMM.js +164 -0
  5. package/chunk-H3A2HMMM.js.map +1 -0
  6. package/chunk-HCXR2M5B.js +202 -0
  7. package/chunk-HCXR2M5B.js.map +1 -0
  8. package/chunk-QHSXUST7.js +208 -0
  9. package/chunk-QHSXUST7.js.map +1 -0
  10. package/chunk-VZGT7ZYP.js +22 -0
  11. package/chunk-VZGT7ZYP.js.map +1 -0
  12. package/index.cjs +4649 -0
  13. package/index.cjs.map +1 -0
  14. package/index.d.cts +911 -0
  15. package/index.d.ts +911 -0
  16. package/index.js +2923 -0
  17. package/index.js.map +1 -0
  18. package/metafile-cjs.json +1 -0
  19. package/metafile-esm.json +1 -0
  20. package/node-sys-container-E7LADX2Z.js +29 -0
  21. package/node-sys-container-E7LADX2Z.js.map +1 -0
  22. package/package.json +19 -109
  23. package/sqlite-data-store-YS4U7AQ4.js +120 -0
  24. package/sqlite-data-store-YS4U7AQ4.js.map +1 -0
  25. package/sqlite-meta-store-FJZSZG4R.js +137 -0
  26. package/sqlite-meta-store-FJZSZG4R.js.map +1 -0
  27. package/sqlite-wal-store-6JZ4URNS.js +123 -0
  28. package/sqlite-wal-store-6JZ4URNS.js.map +1 -0
  29. package/store-file-HMHPQTUV.js +193 -0
  30. package/store-file-HMHPQTUV.js.map +1 -0
  31. package/store-indexdb-MRVZG4OG.js +20 -0
  32. package/store-indexdb-MRVZG4OG.js.map +1 -0
  33. package/store-sql-5XMJ5OWJ.js +406 -0
  34. package/store-sql-5XMJ5OWJ.js.map +1 -0
  35. package/dist/browser/fireproof.cjs +0 -1172
  36. package/dist/browser/fireproof.cjs.map +0 -1
  37. package/dist/browser/fireproof.d.cts +0 -268
  38. package/dist/browser/fireproof.d.ts +0 -268
  39. package/dist/browser/fireproof.global.js +0 -24178
  40. package/dist/browser/fireproof.global.js.map +0 -1
  41. package/dist/browser/fireproof.js +0 -1147
  42. package/dist/browser/fireproof.js.map +0 -1
  43. package/dist/browser/metafile-cjs.json +0 -1
  44. package/dist/browser/metafile-esm.json +0 -1
  45. package/dist/browser/metafile-iife.json +0 -1
  46. package/dist/memory/fireproof.cjs +0 -1172
  47. package/dist/memory/fireproof.cjs.map +0 -1
  48. package/dist/memory/fireproof.d.cts +0 -268
  49. package/dist/memory/fireproof.d.ts +0 -268
  50. package/dist/memory/fireproof.global.js +0 -24178
  51. package/dist/memory/fireproof.global.js.map +0 -1
  52. package/dist/memory/fireproof.js +0 -1147
  53. package/dist/memory/fireproof.js.map +0 -1
  54. package/dist/memory/metafile-cjs.json +0 -1
  55. package/dist/memory/metafile-esm.json +0 -1
  56. package/dist/memory/metafile-iife.json +0 -1
  57. package/dist/node/fireproof.cjs +0 -1172
  58. package/dist/node/fireproof.cjs.map +0 -1
  59. package/dist/node/fireproof.d.cts +0 -268
  60. package/dist/node/fireproof.d.ts +0 -268
  61. package/dist/node/fireproof.global.js +0 -38540
  62. package/dist/node/fireproof.global.js.map +0 -1
  63. package/dist/node/fireproof.js +0 -1138
  64. package/dist/node/fireproof.js.map +0 -1
  65. package/dist/node/metafile-cjs.json +0 -1
  66. package/dist/node/metafile-esm.json +0 -1
  67. package/dist/node/metafile-iife.json +0 -1
@@ -1,1147 +0,0 @@
1
- // src/database.ts
2
- import { uuidv7 } from "uuidv7";
3
-
4
- // src/write-queue.ts
5
- function writeQueue(worker, payload = Infinity, unbounded = false) {
6
- const queue = [];
7
- let isProcessing = false;
8
- async function process() {
9
- if (isProcessing || queue.length === 0)
10
- return;
11
- isProcessing = true;
12
- const tasksToProcess = queue.splice(0, payload);
13
- const updates = tasksToProcess.map((item) => item.task);
14
- if (unbounded) {
15
- const promises = updates.map(async (update, index2) => {
16
- try {
17
- const result = await worker([update]);
18
- tasksToProcess[index2].resolve(result);
19
- } catch (error) {
20
- tasksToProcess[index2].reject(error);
21
- }
22
- });
23
- await Promise.all(promises);
24
- } else {
25
- try {
26
- const result = await worker(updates);
27
- tasksToProcess.forEach((task) => task.resolve(result));
28
- } catch (error) {
29
- tasksToProcess.forEach((task) => task.reject(error));
30
- }
31
- }
32
- isProcessing = false;
33
- void process();
34
- }
35
- return {
36
- push(task) {
37
- return new Promise((resolve, reject) => {
38
- queue.push({ task, resolve, reject });
39
- void process();
40
- });
41
- }
42
- };
43
- }
44
-
45
- // src/crdt.ts
46
- import {
47
- EncryptedBlockstore
48
- } from "@fireproof/encrypted-blockstore";
49
-
50
- // src/eb-web.ts
51
- import * as crypto from "@fireproof/encrypted-blockstore/crypto-web";
52
- import {
53
- makeDataStore,
54
- makeMetaStore,
55
- makeRemoteWAL
56
- } from "@fireproof/encrypted-blockstore/store-web";
57
- var store = {
58
- makeDataStore,
59
- makeMetaStore,
60
- makeRemoteWAL
61
- };
62
-
63
- // src/crdt-helpers.ts
64
- import { encode, decode, Block } from "multiformats/block";
65
- import { parse } from "multiformats/link";
66
- import { sha256 as hasher } from "multiformats/hashes/sha2";
67
- import * as codec from "@ipld/dag-cbor";
68
- import { put, get, entries, root } from "@web3-storage/pail/crdt";
69
- import { EventFetcher, vis } from "@web3-storage/pail/clock";
70
- import * as Batch from "@web3-storage/pail/crdt/batch";
71
- import {
72
- CarTransaction
73
- } from "@fireproof/encrypted-blockstore";
74
-
75
- // src/files.ts
76
- import * as UnixFS from "@ipld/unixfs";
77
- import * as raw from "multiformats/codecs/raw";
78
- import { withMaxChunkSize } from "@ipld/unixfs/file/chunker/fixed";
79
- import { withWidth } from "@ipld/unixfs/file/layout/balanced";
80
- import { exporter } from "ipfs-unixfs-exporter";
81
- var queuingStrategy = UnixFS.withCapacity();
82
- var settings = UnixFS.configure({
83
- fileChunkEncoder: raw,
84
- smallFileEncoder: raw,
85
- // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-call
86
- chunker: withMaxChunkSize(1024 * 1024),
87
- // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-call
88
- fileLayout: withWidth(1024)
89
- });
90
- async function encodeFile(blob) {
91
- const readable = createFileEncoderStream(blob);
92
- const blocks = await collect(readable);
93
- return { cid: blocks.at(-1).cid, blocks };
94
- }
95
- async function decodeFile(blocks, cid, meta) {
96
- const entry = await exporter(cid.toString(), blocks, { length: meta.size });
97
- const chunks = [];
98
- for await (const chunk of entry.content())
99
- chunks.push(chunk);
100
- return new File(chunks, entry.name, { type: meta.type, lastModified: 0 });
101
- }
102
- function createFileEncoderStream(blob) {
103
- const { readable, writable } = new TransformStream({}, queuingStrategy);
104
- const unixfsWriter = UnixFS.createWriter({ writable, settings });
105
- const fileBuilder = new UnixFSFileBuilder("", blob);
106
- void (async () => {
107
- await fileBuilder.finalize(unixfsWriter);
108
- await unixfsWriter.close();
109
- })();
110
- return readable;
111
- }
112
- async function collect(collectable) {
113
- const chunks = [];
114
- await collectable.pipeTo(
115
- new WritableStream({
116
- write(chunk) {
117
- chunks.push(chunk);
118
- }
119
- })
120
- );
121
- return chunks;
122
- }
123
- var UnixFSFileBuilder = class {
124
- #file;
125
- name;
126
- constructor(name, file) {
127
- this.name = name;
128
- this.#file = file;
129
- }
130
- async finalize(writer) {
131
- const unixfsFileWriter = UnixFS.createFileWriter(writer);
132
- await this.#file.stream().pipeTo(
133
- new WritableStream({
134
- async write(chunk) {
135
- await unixfsFileWriter.write(chunk);
136
- }
137
- })
138
- );
139
- return await unixfsFileWriter.close();
140
- }
141
- };
142
-
143
- // src/crdt-helpers.ts
144
- function time(tag) {
145
- }
146
- function timeEnd(tag) {
147
- }
148
- async function applyBulkUpdateToCrdt(tblocks, head, updates) {
149
- let result = null;
150
- if (updates.length > 1) {
151
- const batch = await Batch.create(tblocks, head);
152
- for (const update of updates) {
153
- const link = await writeDocContent(tblocks, update);
154
- await batch.put(update.key, link);
155
- }
156
- result = await batch.commit();
157
- } else {
158
- for (const update of updates) {
159
- const link = await writeDocContent(tblocks, update);
160
- result = await put(tblocks, head, update.key, link);
161
- const resRoot = result.root.toString();
162
- const isReturned = result.additions.some((a) => a.cid.toString() === resRoot);
163
- if (!isReturned) {
164
- const hasRoot = await tblocks.get(result.root);
165
- if (!hasRoot) {
166
- throw new Error(
167
- `missing root in additions: ${result.additions.length} ${resRoot} keys: ${updates.map((u) => u.key).toString()}`
168
- );
169
- }
170
- }
171
- }
172
- }
173
- if (!result)
174
- throw new Error("Missing result");
175
- if (result.event) {
176
- for (const { cid, bytes } of [...result.additions, ...result.removals, result.event]) {
177
- tblocks.putSync(cid, bytes);
178
- }
179
- }
180
- return { head: result.head };
181
- }
182
- async function writeDocContent(blocks, update) {
183
- let value;
184
- if (update.del) {
185
- value = { del: true };
186
- } else {
187
- await processFiles(blocks, update.value);
188
- value = { doc: update.value };
189
- }
190
- const block = await encode({ value, hasher, codec });
191
- blocks.putSync(block.cid, block.bytes);
192
- return block.cid;
193
- }
194
- async function processFiles(blocks, doc) {
195
- if (doc._files) {
196
- await processFileset(blocks, doc._files);
197
- }
198
- if (doc._publicFiles) {
199
- await processFileset(blocks, doc._publicFiles, true);
200
- }
201
- }
202
- async function processFileset(blocks, files, publicFiles = false) {
203
- const dbBlockstore = blocks.parent;
204
- const t = new CarTransaction(dbBlockstore);
205
- const didPut = [];
206
- for (const filename in files) {
207
- if (File === files[filename].constructor) {
208
- const file = files[filename];
209
- const { cid, blocks: fileBlocks } = await encodeFile(file);
210
- didPut.push(filename);
211
- for (const block of fileBlocks) {
212
- t.putSync(block.cid, block.bytes);
213
- }
214
- files[filename] = { cid, type: file.type, size: file.size };
215
- } else {
216
- const { cid, type, size, car } = files[filename];
217
- if (cid && type && size && car) {
218
- files[filename] = { cid, type, size, car };
219
- }
220
- }
221
- }
222
- if (didPut.length) {
223
- const car = await dbBlockstore.loader?.commitFiles(t, { files }, {
224
- public: publicFiles
225
- });
226
- if (car) {
227
- for (const name of didPut) {
228
- files[name] = { car, ...files[name] };
229
- }
230
- }
231
- }
232
- }
233
- async function getValueFromCrdt(blocks, head, key) {
234
- if (!head.length)
235
- throw new Error("Getting from an empty database");
236
- const link = await get(blocks, head, key);
237
- if (!link)
238
- throw new Error(`Missing key ${key}`);
239
- return await getValueFromLink(blocks, link);
240
- }
241
- function readFiles(blocks, { doc }) {
242
- if (!doc)
243
- return;
244
- if (doc._files) {
245
- readFileset(blocks, doc._files);
246
- }
247
- if (doc._publicFiles) {
248
- readFileset(blocks, doc._publicFiles, true);
249
- }
250
- }
251
- function readFileset(blocks, files, isPublic = false) {
252
- for (const filename in files) {
253
- const fileMeta = files[filename];
254
- if (fileMeta.cid) {
255
- if (isPublic) {
256
- fileMeta.url = `https://${fileMeta.cid.toString()}.ipfs.w3s.link/`;
257
- }
258
- if (fileMeta.car) {
259
- fileMeta.file = async () => await decodeFile(
260
- {
261
- get: async (cid) => {
262
- return await blocks.getFile(fileMeta.car, cid, isPublic);
263
- }
264
- },
265
- fileMeta.cid,
266
- fileMeta
267
- );
268
- }
269
- }
270
- files[filename] = fileMeta;
271
- }
272
- }
273
- async function getValueFromLink(blocks, link) {
274
- const block = await blocks.get(link);
275
- if (!block)
276
- throw new Error(`Missing linked block ${link.toString()}`);
277
- const { value } = await decode({ bytes: block.bytes, hasher, codec });
278
- value.cid = link;
279
- readFiles(blocks, value);
280
- return value;
281
- }
282
- var DirtyEventFetcher = class extends EventFetcher {
283
- // @ts-ignore
284
- async get(link) {
285
- try {
286
- return await super.get(link);
287
- } catch (e) {
288
- console.error("missing event", link.toString(), e);
289
- return { value: null };
290
- }
291
- }
292
- };
293
- async function clockChangesSince(blocks, head, since, opts) {
294
- const eventsFetcher = opts.dirty ? new DirtyEventFetcher(blocks) : new EventFetcher(blocks);
295
- const keys = /* @__PURE__ */ new Set();
296
- const updates = await gatherUpdates(
297
- blocks,
298
- eventsFetcher,
299
- head,
300
- since,
301
- [],
302
- keys,
303
- /* @__PURE__ */ new Set(),
304
- opts.limit || Infinity
305
- );
306
- return { result: updates.reverse(), head };
307
- }
308
- async function gatherUpdates(blocks, eventsFetcher, head, since, updates = [], keys, didLinks, limit) {
309
- if (limit <= 0)
310
- return updates;
311
- const sHead = head.map((l) => l.toString());
312
- for (const link of since) {
313
- if (sHead.includes(link.toString())) {
314
- return updates;
315
- }
316
- }
317
- for (const link of head) {
318
- if (didLinks.has(link.toString()))
319
- continue;
320
- didLinks.add(link.toString());
321
- const { value: event } = await eventsFetcher.get(link);
322
- if (!event)
323
- continue;
324
- const { type } = event.data;
325
- let ops = [];
326
- if (type === "batch") {
327
- ops = event.data.ops;
328
- } else if (type === "put") {
329
- ops = [event.data];
330
- }
331
- for (let i = ops.length - 1; i >= 0; i--) {
332
- const { key, value } = ops[i];
333
- if (!keys.has(key)) {
334
- const docValue = await getValueFromLink(blocks, value);
335
- updates.push({ key, value: docValue.doc, del: docValue.del, clock: link });
336
- limit--;
337
- keys.add(key);
338
- }
339
- }
340
- if (event.parents) {
341
- updates = await gatherUpdates(
342
- blocks,
343
- eventsFetcher,
344
- event.parents,
345
- since,
346
- updates,
347
- keys,
348
- didLinks,
349
- limit
350
- );
351
- }
352
- }
353
- return updates;
354
- }
355
- async function* getAllEntries(blocks, head) {
356
- for await (const [key, link] of entries(blocks, head)) {
357
- const docValue = await getValueFromLink(blocks, link);
358
- yield { key, value: docValue.doc, del: docValue.del };
359
- }
360
- }
361
- async function* clockVis(blocks, head) {
362
- for await (const line of vis(blocks, head)) {
363
- yield line;
364
- }
365
- }
366
- var isCompacting = false;
367
- async function doCompact(blockLog, head) {
368
- if (isCompacting) {
369
- return;
370
- }
371
- isCompacting = true;
372
- time("compact head");
373
- for (const cid of head) {
374
- const bl = await blockLog.get(cid);
375
- if (!bl)
376
- throw new Error("Missing head block: " + cid.toString());
377
- }
378
- timeEnd("compact head");
379
- time("compact all entries");
380
- for await (const _entry of getAllEntries(blockLog, head)) {
381
- }
382
- timeEnd("compact all entries");
383
- time("compact clock vis");
384
- for await (const _line of vis(blockLog, head)) {
385
- }
386
- timeEnd("compact clock vis");
387
- time("compact root");
388
- const result = await root(blockLog, head);
389
- timeEnd("compact root");
390
- time("compact root blocks");
391
- for (const { cid, bytes } of [...result.additions, ...result.removals]) {
392
- blockLog.loggedBlocks.putSync(cid, bytes);
393
- }
394
- timeEnd("compact root blocks");
395
- time("compact changes");
396
- await clockChangesSince(blockLog, head, [], {});
397
- timeEnd("compact changes");
398
- isCompacting = false;
399
- }
400
- async function getBlock(blocks, cidString) {
401
- const block = await blocks.get(parse(cidString));
402
- if (!block)
403
- throw new Error(`Missing block ${cidString}`);
404
- const { cid, value } = await decode({ bytes: block.bytes, codec, hasher });
405
- return new Block({ cid, value, bytes: block.bytes });
406
- }
407
-
408
- // src/indexer-helpers.ts
409
- import { create as create2 } from "multiformats/block";
410
- import { sha256 as hasher2 } from "multiformats/hashes/sha2";
411
- import * as codec2 from "@ipld/dag-cbor";
412
- import charwise from "charwise";
413
- import * as DbIndex from "prolly-trees/db-index";
414
- import { bf, simpleCompare } from "prolly-trees/utils";
415
- import { nocache as cache } from "prolly-trees/cache";
416
- var IndexTree = class {
417
- cid = null;
418
- root = null;
419
- };
420
- var refCompare = (aRef, bRef) => {
421
- if (Number.isNaN(aRef))
422
- return -1;
423
- if (Number.isNaN(bRef))
424
- throw new Error("ref may not be Infinity or NaN");
425
- if (aRef === Infinity)
426
- return 1;
427
- return simpleCompare(aRef, bRef);
428
- };
429
- var compare = (a, b) => {
430
- const [aKey, aRef] = a;
431
- const [bKey, bRef] = b;
432
- const comp = simpleCompare(aKey, bKey);
433
- if (comp !== 0)
434
- return comp;
435
- return refCompare(aRef, bRef);
436
- };
437
- var byKeyOpts = { cache, chunker: bf(30), codec: codec2, hasher: hasher2, compare };
438
- var byIdOpts = { cache, chunker: bf(30), codec: codec2, hasher: hasher2, compare: simpleCompare };
439
- function indexEntriesForChanges(changes, mapFn) {
440
- const indexEntries = [];
441
- changes.forEach(({ key: _id, value, del }) => {
442
- if (del || !value)
443
- return;
444
- let mapCalled = false;
445
- const mapReturn = mapFn({ _id, ...value }, (k, v) => {
446
- mapCalled = true;
447
- if (typeof k === "undefined")
448
- return;
449
- indexEntries.push({
450
- // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-unsafe-call
451
- key: [charwise.encode(k), _id],
452
- value: v || null
453
- });
454
- });
455
- if (!mapCalled && mapReturn) {
456
- indexEntries.push({
457
- // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-unsafe-call
458
- key: [charwise.encode(mapReturn), _id],
459
- value: null
460
- });
461
- }
462
- });
463
- return indexEntries;
464
- }
465
- function makeProllyGetBlock(blocks) {
466
- return async (address) => {
467
- const block = await blocks.get(address);
468
- if (!block)
469
- throw new Error(`Missing block ${address.toString()}`);
470
- const { cid, bytes } = block;
471
- return create2({ cid, bytes, hasher: hasher2, codec: codec2 });
472
- };
473
- }
474
- async function bulkIndex(tblocks, inIndex, indexEntries, opts) {
475
- if (!indexEntries.length)
476
- return inIndex;
477
- if (!inIndex.root) {
478
- if (!inIndex.cid) {
479
- let returnRootBlock = null;
480
- let returnNode = null;
481
- for await (const node of await DbIndex.create({ get: makeProllyGetBlock(tblocks), list: indexEntries, ...opts })) {
482
- const block = await node.block;
483
- await tblocks.put(block.cid, block.bytes);
484
- returnRootBlock = block;
485
- returnNode = node;
486
- }
487
- if (!returnNode || !returnRootBlock)
488
- throw new Error("failed to create index");
489
- return { root: returnNode, cid: returnRootBlock.cid };
490
- } else {
491
- inIndex.root = await DbIndex.load({ cid: inIndex.cid, get: makeProllyGetBlock(tblocks), ...opts });
492
- }
493
- }
494
- const { root: root3, blocks: newBlocks } = await inIndex.root.bulk(indexEntries);
495
- if (root3) {
496
- for await (const block of newBlocks) {
497
- await tblocks.put(block.cid, block.bytes);
498
- }
499
- return { root: root3, cid: (await root3.block).cid };
500
- } else {
501
- return { root: null, cid: null };
502
- }
503
- }
504
- async function loadIndex(tblocks, cid, opts) {
505
- return await DbIndex.load({ cid, get: makeProllyGetBlock(tblocks), ...opts });
506
- }
507
- async function applyQuery(crdt, resp, query) {
508
- if (query.descending) {
509
- resp.result = resp.result.reverse();
510
- }
511
- if (query.limit) {
512
- resp.result = resp.result.slice(0, query.limit);
513
- }
514
- if (query.includeDocs) {
515
- resp.result = await Promise.all(
516
- resp.result.map(async (row) => {
517
- const val = await crdt.get(row.id);
518
- const doc = val ? { _id: row.id, ...val.doc } : null;
519
- return { ...row, doc };
520
- })
521
- );
522
- }
523
- return {
524
- rows: resp.result.map((row) => {
525
- row.key = charwise.decode(row.key);
526
- if (row.row && !row.value) {
527
- row.value = row.row;
528
- delete row.row;
529
- }
530
- return row;
531
- })
532
- };
533
- }
534
- function encodeRange(range) {
535
- return range.map((key) => charwise.encode(key));
536
- }
537
- function encodeKey(key) {
538
- return charwise.encode(key);
539
- }
540
-
541
- // src/index.ts
542
- function index({ _crdt }, name, mapFn, meta) {
543
- if (mapFn && meta)
544
- throw new Error("cannot provide both mapFn and meta");
545
- if (mapFn && mapFn.constructor.name !== "Function")
546
- throw new Error("mapFn must be a function");
547
- if (_crdt.indexers.has(name)) {
548
- const idx = _crdt.indexers.get(name);
549
- idx.applyMapFn(name, mapFn, meta);
550
- } else {
551
- const idx = new Index(_crdt, name, mapFn, meta);
552
- _crdt.indexers.set(name, idx);
553
- }
554
- return _crdt.indexers.get(name);
555
- }
556
- var Index = class {
557
- blockstore;
558
- crdt;
559
- name = null;
560
- mapFn = null;
561
- mapFnString = "";
562
- byKey = new IndexTree();
563
- byId = new IndexTree();
564
- indexHead = void 0;
565
- includeDocsDefault = false;
566
- initError = null;
567
- ready;
568
- constructor(crdt, name, mapFn, meta) {
569
- this.blockstore = crdt.indexBlockstore;
570
- this.crdt = crdt;
571
- this.applyMapFn(name, mapFn, meta);
572
- if (!(this.mapFnString || this.initError))
573
- throw new Error("missing mapFnString");
574
- this.ready = this.blockstore.ready.then(() => {
575
- });
576
- }
577
- applyMapFn(name, mapFn, meta) {
578
- if (mapFn && meta)
579
- throw new Error("cannot provide both mapFn and meta");
580
- if (this.name && this.name !== name)
581
- throw new Error("cannot change name");
582
- this.name = name;
583
- try {
584
- if (meta) {
585
- if (this.indexHead && this.indexHead.map((c) => c.toString()).join() !== meta.head.map((c) => c.toString()).join()) {
586
- throw new Error("cannot apply meta to existing index");
587
- }
588
- if (this.mapFnString) {
589
- if (this.mapFnString !== meta.map) {
590
- console.log(
591
- "cannot apply different mapFn meta: old mapFnString",
592
- this.mapFnString,
593
- "new mapFnString",
594
- meta.map
595
- );
596
- } else {
597
- this.byId.cid = meta.byId;
598
- this.byKey.cid = meta.byKey;
599
- this.indexHead = meta.head;
600
- }
601
- } else {
602
- this.mapFnString = meta.map;
603
- this.byId.cid = meta.byId;
604
- this.byKey.cid = meta.byKey;
605
- this.indexHead = meta.head;
606
- }
607
- } else {
608
- if (this.mapFn) {
609
- if (mapFn) {
610
- if (this.mapFn.toString() !== mapFn.toString())
611
- throw new Error("cannot apply different mapFn app2");
612
- }
613
- } else {
614
- if (!mapFn) {
615
- mapFn = (doc) => doc[name] ?? void 0;
616
- }
617
- if (this.mapFnString) {
618
- if (this.mapFnString !== mapFn.toString())
619
- throw new Error("cannot apply different mapFn app");
620
- } else {
621
- this.mapFnString = mapFn.toString();
622
- }
623
- this.mapFn = mapFn;
624
- }
625
- }
626
- const matches = /=>\s*(.*)/.test(this.mapFnString);
627
- this.includeDocsDefault = matches;
628
- } catch (e) {
629
- this.initError = e;
630
- }
631
- }
632
- async query(opts = {}) {
633
- await this._updateIndex();
634
- await this._hydrateIndex();
635
- if (!this.byKey.root)
636
- return await applyQuery(this.crdt, { result: [] }, opts);
637
- if (this.includeDocsDefault && opts.includeDocs === void 0)
638
- opts.includeDocs = true;
639
- if (opts.range) {
640
- const { result: result2, ...all2 } = await this.byKey.root.range(...encodeRange(opts.range));
641
- return await applyQuery(this.crdt, { result: result2, ...all2 }, opts);
642
- }
643
- if (opts.key) {
644
- const encodedKey = encodeKey(opts.key);
645
- return await applyQuery(this.crdt, await this.byKey.root.get(encodedKey), opts);
646
- }
647
- if (Array.isArray(opts.keys)) {
648
- const results = await Promise.all(
649
- opts.keys.map(async (key) => {
650
- const encodedKey = encodeKey(key);
651
- return (await applyQuery(this.crdt, await this.byKey.root.get(encodedKey), opts)).rows;
652
- })
653
- );
654
- return { rows: results.flat() };
655
- }
656
- if (opts.prefix) {
657
- if (!Array.isArray(opts.prefix))
658
- opts.prefix = [opts.prefix];
659
- const start = [...opts.prefix, NaN];
660
- const end = [...opts.prefix, Infinity];
661
- const encodedR = encodeRange([start, end]);
662
- return await applyQuery(this.crdt, await this.byKey.root.range(...encodedR), opts);
663
- }
664
- const { result, ...all } = await this.byKey.root.getAllEntries();
665
- return await applyQuery(
666
- this.crdt,
667
- {
668
- result: result.map(({ key: [k, id], value }) => ({ key: k, id, value })),
669
- ...all
670
- },
671
- opts
672
- );
673
- }
674
- _resetIndex() {
675
- this.byId = new IndexTree();
676
- this.byKey = new IndexTree();
677
- this.indexHead = void 0;
678
- }
679
- async _hydrateIndex() {
680
- if (this.byId.root && this.byKey.root)
681
- return;
682
- if (!this.byId.cid || !this.byKey.cid)
683
- return;
684
- this.byId.root = await loadIndex(this.blockstore, this.byId.cid, byIdOpts);
685
- this.byKey.root = await loadIndex(this.blockstore, this.byKey.cid, byKeyOpts);
686
- }
687
- async _updateIndex() {
688
- await this.ready;
689
- if (this.initError)
690
- throw this.initError;
691
- if (!this.mapFn)
692
- throw new Error("No map function defined");
693
- let result, head;
694
- if (!this.indexHead || this.indexHead.length === 0) {
695
- ;
696
- ({ result, head } = await this.crdt.allDocs());
697
- } else {
698
- ;
699
- ({ result, head } = await this.crdt.changes(this.indexHead));
700
- }
701
- if (result.length === 0) {
702
- this.indexHead = head;
703
- return { byId: this.byId, byKey: this.byKey };
704
- }
705
- let staleKeyIndexEntries = [];
706
- let removeIdIndexEntries = [];
707
- if (this.byId.root) {
708
- const removeIds = result.map(({ key }) => key);
709
- const { result: oldChangeEntries } = await this.byId.root.getMany(removeIds);
710
- staleKeyIndexEntries = oldChangeEntries.map((key) => ({ key, del: true }));
711
- removeIdIndexEntries = oldChangeEntries.map((key) => ({ key: key[1], del: true }));
712
- }
713
- const indexEntries = indexEntriesForChanges(result, this.mapFn);
714
- const byIdIndexEntries = indexEntries.map(({ key }) => ({
715
- key: key[1],
716
- value: key
717
- }));
718
- const indexerMeta = { indexes: /* @__PURE__ */ new Map() };
719
- for (const [name, indexer] of this.crdt.indexers) {
720
- if (indexer.indexHead) {
721
- indexerMeta.indexes.set(name, {
722
- byId: indexer.byId.cid,
723
- byKey: indexer.byKey.cid,
724
- head: indexer.indexHead,
725
- map: indexer.mapFnString,
726
- name: indexer.name
727
- });
728
- }
729
- }
730
- return await this.blockstore.transaction(async (tblocks) => {
731
- this.byId = await bulkIndex(
732
- tblocks,
733
- this.byId,
734
- removeIdIndexEntries.concat(byIdIndexEntries),
735
- byIdOpts
736
- );
737
- this.byKey = await bulkIndex(
738
- tblocks,
739
- this.byKey,
740
- staleKeyIndexEntries.concat(indexEntries),
741
- byKeyOpts
742
- );
743
- this.indexHead = head;
744
- const idxMeta = {
745
- byId: this.byId.cid,
746
- byKey: this.byKey.cid,
747
- head,
748
- map: this.mapFnString,
749
- name: this.name
750
- };
751
- indexerMeta.indexes.set(this.name, idxMeta);
752
- return indexerMeta;
753
- });
754
- }
755
- };
756
-
757
- // src/crdt-clock.ts
758
- import { advance } from "@web3-storage/pail/clock";
759
- import { root as root2 } from "@web3-storage/pail/crdt";
760
-
761
- // src/apply-head-queue.ts
762
- function applyHeadQueue(worker) {
763
- const queue = [];
764
- let isProcessing = false;
765
- async function* process() {
766
- if (isProcessing || queue.length === 0)
767
- return;
768
- isProcessing = true;
769
- const allUpdates = [];
770
- try {
771
- while (queue.length > 0) {
772
- queue.sort((a, b) => b.updates ? 1 : -1);
773
- const task = queue.shift();
774
- if (!task)
775
- continue;
776
- await worker(task.newHead, task.prevHead, task.updates !== null).catch((e) => {
777
- console.error("int_applyHead worker error", e);
778
- throw e;
779
- });
780
- if (task.updates) {
781
- allUpdates.push(...task.updates);
782
- }
783
- if (!queue.some((t) => t.updates) || task.updates) {
784
- const allTasksHaveUpdates = queue.every((task2) => task2.updates !== null);
785
- yield { updates: allUpdates, all: allTasksHaveUpdates };
786
- allUpdates.length = 0;
787
- }
788
- }
789
- } finally {
790
- isProcessing = false;
791
- const generator = process();
792
- let result = await generator.next();
793
- while (!result.done) {
794
- result = await generator.next();
795
- }
796
- }
797
- }
798
- return {
799
- push(task) {
800
- queue.push(task);
801
- return process();
802
- },
803
- size() {
804
- return queue.length;
805
- }
806
- };
807
- }
808
-
809
- // src/crdt-clock.ts
810
- var CRDTClock = class {
811
- // todo: track local and remote clocks independently, merge on read
812
- // that way we can drop the whole remote if we need to
813
- // should go with making sure the local clock only references locally available blockstore on write
814
- head = [];
815
- zoomers = /* @__PURE__ */ new Set();
816
- watchers = /* @__PURE__ */ new Set();
817
- emptyWatchers = /* @__PURE__ */ new Set();
818
- blockstore = null;
819
- applyHeadQueue;
820
- constructor() {
821
- this.applyHeadQueue = applyHeadQueue(this.int_applyHead.bind(this));
822
- }
823
- setHead(head) {
824
- this.head = head;
825
- }
826
- async applyHead(newHead, prevHead, updates = null) {
827
- for await (const { updates: updatesAcc, all } of this.applyHeadQueue.push({
828
- newHead,
829
- prevHead,
830
- updates
831
- })) {
832
- this.processUpdates(updatesAcc, all, prevHead);
833
- }
834
- }
835
- async processUpdates(updatesAcc, all, prevHead) {
836
- let internalUpdates = updatesAcc;
837
- if (this.watchers.size && !all) {
838
- const changes = await clockChangesSince(this.blockstore, this.head, prevHead, {});
839
- internalUpdates = changes.result;
840
- }
841
- this.zoomers.forEach((fn) => fn());
842
- this.notifyWatchers(internalUpdates || []);
843
- }
844
- notifyWatchers(updates) {
845
- this.emptyWatchers.forEach((fn) => fn());
846
- this.watchers.forEach((fn) => fn(updates || []));
847
- }
848
- onTick(fn) {
849
- this.watchers.add(fn);
850
- }
851
- onTock(fn) {
852
- this.emptyWatchers.add(fn);
853
- }
854
- onZoom(fn) {
855
- this.zoomers.add(fn);
856
- }
857
- async int_applyHead(newHead, prevHead, localUpdates) {
858
- const ogHead = sortClockHead(this.head);
859
- newHead = sortClockHead(newHead);
860
- if (compareClockHeads(ogHead, newHead)) {
861
- return;
862
- }
863
- const ogPrev = sortClockHead(prevHead);
864
- if (compareClockHeads(ogHead, ogPrev)) {
865
- this.setHead(newHead);
866
- return;
867
- }
868
- let head = this.head;
869
- const noLoader = !localUpdates;
870
- if (!this.blockstore)
871
- throw new Error("missing blockstore");
872
- await validateBlocks(newHead, this.blockstore);
873
- await this.blockstore.transaction(
874
- async (tblocks) => {
875
- head = await advanceBlocks(newHead, tblocks, head);
876
- const result = await root2(tblocks, head);
877
- for (const { cid, bytes } of [...result.additions, ...result.removals]) {
878
- tblocks.putSync(cid, bytes);
879
- }
880
- return { head };
881
- },
882
- { noLoader }
883
- );
884
- this.setHead(head);
885
- }
886
- };
887
- function sortClockHead(clockHead) {
888
- return clockHead.sort((a, b) => a.toString().localeCompare(b.toString()));
889
- }
890
- async function validateBlocks(newHead, blockstore) {
891
- newHead.map(async (cid) => {
892
- const got = await blockstore.get(cid);
893
- if (!got) {
894
- throw new Error("int_applyHead missing block: " + cid.toString());
895
- }
896
- });
897
- }
898
- function compareClockHeads(head1, head2) {
899
- return head1.toString() === head2.toString();
900
- }
901
- async function advanceBlocks(newHead, tblocks, head) {
902
- for (const cid of newHead) {
903
- try {
904
- head = await advance(tblocks, head, cid);
905
- } catch (e) {
906
- continue;
907
- }
908
- }
909
- return head;
910
- }
911
-
912
- // src/crdt.ts
913
- var CRDT = class {
914
- name;
915
- opts = {};
916
- ready;
917
- blockstore;
918
- indexBlockstore;
919
- indexers = /* @__PURE__ */ new Map();
920
- clock = new CRDTClock();
921
- constructor(name, opts) {
922
- this.name = name || null;
923
- this.opts = opts || this.opts;
924
- this.blockstore = new EncryptedBlockstore({
925
- name,
926
- applyMeta: async (meta) => {
927
- const crdtMeta = meta;
928
- await this.clock.applyHead(crdtMeta.head, []);
929
- },
930
- compact: async (blocks) => {
931
- await doCompact(blocks, this.clock.head);
932
- return { head: this.clock.head };
933
- },
934
- autoCompact: this.opts.autoCompact || 100,
935
- crypto: this.opts.crypto || crypto,
936
- store: this.opts.store || store,
937
- public: this.opts.public,
938
- meta: this.opts.meta
939
- });
940
- this.clock.blockstore = this.blockstore;
941
- this.indexBlockstore = new EncryptedBlockstore({
942
- name: this.opts.persistIndexes && this.name ? this.name + ".idx" : void 0,
943
- applyMeta: async (meta) => {
944
- const idxCarMeta = meta;
945
- for (const [name2, idx] of Object.entries(idxCarMeta.indexes)) {
946
- index({ _crdt: this }, name2, void 0, idx);
947
- }
948
- },
949
- crypto,
950
- public: this.opts.public,
951
- store
952
- });
953
- this.ready = Promise.all([this.blockstore.ready, this.indexBlockstore.ready]).then(() => {
954
- });
955
- this.clock.onZoom(() => {
956
- for (const idx of this.indexers.values()) {
957
- idx._resetIndex();
958
- }
959
- });
960
- }
961
- async bulk(updates) {
962
- await this.ready;
963
- const prevHead = [...this.clock.head];
964
- const meta = await this.blockstore.transaction(
965
- async (blocks) => {
966
- const { head } = await applyBulkUpdateToCrdt(blocks, this.clock.head, updates);
967
- updates = updates.map(({ key, value, del, clock }) => {
968
- readFiles(this.blockstore, { doc: value });
969
- return { key, value, del, clock };
970
- });
971
- return { head };
972
- }
973
- );
974
- await this.clock.applyHead(meta.head, prevHead, updates);
975
- return meta;
976
- }
977
- // if (snap) await this.clock.applyHead(crdtMeta.head, this.clock.head)
978
- async allDocs() {
979
- await this.ready;
980
- const result = [];
981
- for await (const entry of getAllEntries(this.blockstore, this.clock.head)) {
982
- result.push(entry);
983
- }
984
- return { result, head: this.clock.head };
985
- }
986
- async vis() {
987
- await this.ready;
988
- const txt = [];
989
- for await (const line of clockVis(this.blockstore, this.clock.head)) {
990
- txt.push(line);
991
- }
992
- return txt.join("\n");
993
- }
994
- async getBlock(cidString) {
995
- await this.ready;
996
- return await getBlock(this.blockstore, cidString);
997
- }
998
- async get(key) {
999
- await this.ready;
1000
- const result = await getValueFromCrdt(this.blockstore, this.clock.head, key);
1001
- if (result.del)
1002
- return null;
1003
- return result;
1004
- }
1005
- async changes(since = [], opts = {}) {
1006
- await this.ready;
1007
- return await clockChangesSince(this.blockstore, this.clock.head, since, opts);
1008
- }
1009
- async compact() {
1010
- return await this.blockstore.compact();
1011
- }
1012
- };
1013
-
1014
- // src/database.ts
1015
- var Database = class {
1016
- static databases = /* @__PURE__ */ new Map();
1017
- name;
1018
- opts = {};
1019
- _listening = false;
1020
- _listeners = /* @__PURE__ */ new Set();
1021
- _noupdate_listeners = /* @__PURE__ */ new Set();
1022
- _crdt;
1023
- _writeQueue;
1024
- blockstore;
1025
- constructor(name, opts) {
1026
- this.name = name || null;
1027
- this.opts = opts || this.opts;
1028
- this._crdt = new CRDT(name, this.opts);
1029
- this.blockstore = this._crdt.blockstore;
1030
- this._writeQueue = writeQueue(async (updates) => {
1031
- return await this._crdt.bulk(updates);
1032
- });
1033
- this._crdt.clock.onTock(() => {
1034
- this._no_update_notify();
1035
- });
1036
- }
1037
- async get(id) {
1038
- const got = await this._crdt.get(id).catch((e) => {
1039
- e.message = `Not found: ${id} - ` + e.message;
1040
- throw e;
1041
- });
1042
- if (!got)
1043
- throw new Error(`Not found: ${id}`);
1044
- const { doc } = got;
1045
- return { _id: id, ...doc };
1046
- }
1047
- async put(doc) {
1048
- const { _id, ...value } = doc;
1049
- const docId = _id || uuidv7();
1050
- const result = await this._writeQueue.push({ key: docId, value });
1051
- return { id: docId, clock: result?.head };
1052
- }
1053
- async del(id) {
1054
- const result = await this._writeQueue.push({ key: id, del: true });
1055
- return { id, clock: result?.head };
1056
- }
1057
- async changes(since = [], opts = {}) {
1058
- const { result, head } = await this._crdt.changes(since, opts);
1059
- const rows = result.map(({ key, value, del, clock }) => ({
1060
- key,
1061
- value: del ? { _id: key, _deleted: true } : { _id: key, ...value },
1062
- clock
1063
- }));
1064
- return { rows, clock: head };
1065
- }
1066
- async allDocs() {
1067
- const { result, head } = await this._crdt.allDocs();
1068
- const rows = result.map(({ key, value, del }) => ({
1069
- key,
1070
- value: del ? { _id: key, _deleted: true } : { _id: key, ...value }
1071
- }));
1072
- return { rows, clock: head };
1073
- }
1074
- async allDocuments() {
1075
- return this.allDocs();
1076
- }
1077
- subscribe(listener, updates) {
1078
- if (updates) {
1079
- if (!this._listening) {
1080
- this._listening = true;
1081
- this._crdt.clock.onTick((updates2) => {
1082
- void this._notify(updates2);
1083
- });
1084
- }
1085
- this._listeners.add(listener);
1086
- return () => {
1087
- this._listeners.delete(listener);
1088
- };
1089
- } else {
1090
- this._noupdate_listeners.add(listener);
1091
- return () => {
1092
- this._noupdate_listeners.delete(listener);
1093
- };
1094
- }
1095
- }
1096
- // todo if we add this onto dbs in fireproof.ts then we can make index.ts a separate package
1097
- async query(field, opts = {}) {
1098
- const idx = typeof field === "string" ? index({ _crdt: this._crdt }, field) : index({ _crdt: this._crdt }, makeName(field.toString()), field);
1099
- return await idx.query(opts);
1100
- }
1101
- async compact() {
1102
- await this._crdt.compact();
1103
- }
1104
- async _notify(updates) {
1105
- if (this._listeners.size) {
1106
- const docs = updates.map(({ key, value }) => ({ _id: key, ...value }));
1107
- for (const listener of this._listeners) {
1108
- await (async () => await listener(docs))().catch((e) => {
1109
- console.error("subscriber error", e);
1110
- });
1111
- }
1112
- }
1113
- }
1114
- async _no_update_notify() {
1115
- if (this._noupdate_listeners.size) {
1116
- for (const listener of this._noupdate_listeners) {
1117
- await (async () => await listener([]))().catch((e) => {
1118
- console.error("subscriber error", e);
1119
- });
1120
- }
1121
- }
1122
- }
1123
- };
1124
- function fireproof(name, opts) {
1125
- if (!Database.databases.has(name)) {
1126
- Database.databases.set(name, new Database(name, opts));
1127
- }
1128
- return Database.databases.get(name);
1129
- }
1130
- function makeName(fnString) {
1131
- const regex = /\(([^,()]+,\s*[^,()]+|\[[^\]]+\],\s*[^,()]+)\)/g;
1132
- let found = null;
1133
- const matches = Array.from(fnString.matchAll(regex), (match) => match[1].trim());
1134
- if (matches.length === 0) {
1135
- found = /=>\s*(.*)/.exec(fnString);
1136
- }
1137
- if (!found) {
1138
- return fnString;
1139
- } else {
1140
- return found[1];
1141
- }
1142
- }
1143
- export {
1144
- Database,
1145
- fireproof
1146
- };
1147
- //# sourceMappingURL=fireproof.js.map