@fireproof/core 0.16.1 → 0.16.3

Sign up to get free protection for your applications and to get access to all the features.
Files changed (56) hide show
  1. package/README.md +14 -7
  2. package/dist/browser/fireproof.cjs +251 -34623
  3. package/dist/browser/fireproof.cjs.map +1 -7
  4. package/dist/browser/fireproof.d.cts +286 -0
  5. package/dist/browser/fireproof.d.ts +286 -0
  6. package/dist/browser/fireproof.global.js +21282 -0
  7. package/dist/browser/fireproof.global.js.map +1 -0
  8. package/dist/browser/fireproof.js +1114 -0
  9. package/dist/browser/fireproof.js.map +1 -0
  10. package/dist/browser/metafile-cjs.json +1 -0
  11. package/dist/browser/metafile-esm.json +1 -0
  12. package/dist/browser/metafile-iife.json +1 -0
  13. package/dist/memory/fireproof.cjs +1143 -0
  14. package/dist/memory/fireproof.cjs.map +1 -0
  15. package/dist/memory/fireproof.d.cts +286 -0
  16. package/dist/memory/fireproof.d.ts +286 -0
  17. package/dist/memory/fireproof.global.js +21282 -0
  18. package/dist/memory/fireproof.global.js.map +1 -0
  19. package/dist/memory/fireproof.js +1114 -0
  20. package/dist/memory/fireproof.js.map +1 -0
  21. package/dist/memory/metafile-cjs.json +1 -0
  22. package/dist/memory/metafile-esm.json +1 -0
  23. package/dist/memory/metafile-iife.json +1 -0
  24. package/dist/node/fireproof.cjs +210 -43910
  25. package/dist/node/fireproof.cjs.map +1 -7
  26. package/dist/node/fireproof.d.cts +286 -0
  27. package/dist/node/fireproof.d.ts +286 -0
  28. package/dist/node/fireproof.global.js +21338 -0
  29. package/dist/node/fireproof.global.js.map +1 -0
  30. package/dist/node/fireproof.js +1114 -0
  31. package/dist/node/fireproof.js.map +1 -0
  32. package/dist/node/metafile-cjs.json +1 -0
  33. package/dist/node/metafile-esm.json +1 -0
  34. package/dist/node/metafile-iife.json +1 -0
  35. package/package.json +38 -33
  36. package/dist/browser/fireproof.esm.js +0 -35509
  37. package/dist/browser/fireproof.esm.js.map +0 -7
  38. package/dist/browser/fireproof.iife.js +0 -35517
  39. package/dist/browser/fireproof.iife.js.map +0 -7
  40. package/dist/node/fireproof.esm.js +0 -44848
  41. package/dist/node/fireproof.esm.js.map +0 -7
  42. package/dist/types/apply-head-queue.d.ts +0 -15
  43. package/dist/types/crdt-clock.d.ts +0 -20
  44. package/dist/types/crdt-helpers.d.ts +0 -14
  45. package/dist/types/crdt.d.ts +0 -27
  46. package/dist/types/database.d.ts +0 -48
  47. package/dist/types/eb-edge.d.ts +0 -0
  48. package/dist/types/eb-node.d.ts +0 -3
  49. package/dist/types/eb-web.d.ts +0 -3
  50. package/dist/types/files.d.ts +0 -12
  51. package/dist/types/fireproof.d.ts +0 -3
  52. package/dist/types/index.d.ts +0 -31
  53. package/dist/types/indexer-helpers.d.ts +0 -57
  54. package/dist/types/types.d.ts +0 -136
  55. package/dist/types/version.d.ts +0 -1
  56. package/dist/types/write-queue.d.ts +0 -7
@@ -0,0 +1,1114 @@
1
+ // src/database.ts
2
+ import { uuidv7 } from "uuidv7";
3
+
4
+ // src/write-queue.ts
5
+ function writeQueue(worker, payload = Infinity, unbounded = false) {
6
+ const queue = [];
7
+ let isProcessing = false;
8
+ async function process() {
9
+ if (isProcessing || queue.length === 0)
10
+ return;
11
+ isProcessing = true;
12
+ const tasksToProcess = queue.splice(0, payload);
13
+ const updates = tasksToProcess.map((item) => item.task);
14
+ if (unbounded) {
15
+ const promises = updates.map(async (update, index2) => {
16
+ try {
17
+ const result = await worker([update]);
18
+ tasksToProcess[index2].resolve(result);
19
+ } catch (error) {
20
+ tasksToProcess[index2].reject(error);
21
+ }
22
+ });
23
+ await Promise.all(promises);
24
+ } else {
25
+ try {
26
+ const result = await worker(updates);
27
+ tasksToProcess.forEach((task) => task.resolve(result));
28
+ } catch (error) {
29
+ tasksToProcess.forEach((task) => task.reject(error));
30
+ }
31
+ }
32
+ isProcessing = false;
33
+ void process();
34
+ }
35
+ return {
36
+ push(task) {
37
+ return new Promise((resolve, reject) => {
38
+ queue.push({ task, resolve, reject });
39
+ void process();
40
+ });
41
+ }
42
+ };
43
+ }
44
+
45
+ // src/crdt.ts
46
+ import {
47
+ EncryptedBlockstore
48
+ } from "@fireproof/encrypted-blockstore";
49
+
50
+ // src/eb-web.ts
51
+ import * as crypto from "@fireproof/encrypted-blockstore/crypto-web";
52
+ import * as store from "@fireproof/encrypted-blockstore/store-web";
53
+
54
+ // src/crdt-helpers.ts
55
+ import { encode, decode, Block } from "multiformats/block";
56
+ import { parse } from "multiformats/link";
57
+ import { sha256 as hasher } from "multiformats/hashes/sha2";
58
+ import * as codec from "@ipld/dag-cbor";
59
+ import { put, get, entries, root } from "@alanshaw/pail/crdt";
60
+ import { EventFetcher, vis } from "@alanshaw/pail/clock";
61
+ import {
62
+ CarTransaction
63
+ } from "@fireproof/encrypted-blockstore";
64
+
65
+ // src/files.ts
66
+ import * as UnixFS from "@ipld/unixfs";
67
+ import * as raw from "multiformats/codecs/raw";
68
+ import { withMaxChunkSize } from "@ipld/unixfs/file/chunker/fixed";
69
+ import { withWidth } from "@ipld/unixfs/file/layout/balanced";
70
+ import { exporter } from "ipfs-unixfs-exporter";
71
+ var queuingStrategy = UnixFS.withCapacity();
72
+ var settings = UnixFS.configure({
73
+ fileChunkEncoder: raw,
74
+ smallFileEncoder: raw,
75
+ // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-call
76
+ chunker: withMaxChunkSize(1024 * 1024),
77
+ // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-call
78
+ fileLayout: withWidth(1024)
79
+ });
80
+ async function encodeFile(blob) {
81
+ const readable = createFileEncoderStream(blob);
82
+ const blocks = await collect(readable);
83
+ return { cid: blocks.at(-1).cid, blocks };
84
+ }
85
+ async function decodeFile(blocks, cid, meta) {
86
+ const entry = await exporter(cid.toString(), blocks, { length: meta.size });
87
+ const chunks = [];
88
+ for await (const chunk of entry.content())
89
+ chunks.push(chunk);
90
+ return new File(chunks, entry.name, { type: meta.type, lastModified: 0 });
91
+ }
92
+ function createFileEncoderStream(blob) {
93
+ const { readable, writable } = new TransformStream({}, queuingStrategy);
94
+ const unixfsWriter = UnixFS.createWriter({ writable, settings });
95
+ const fileBuilder = new UnixFSFileBuilder("", blob);
96
+ void (async () => {
97
+ await fileBuilder.finalize(unixfsWriter);
98
+ await unixfsWriter.close();
99
+ })();
100
+ return readable;
101
+ }
102
+ async function collect(collectable) {
103
+ const chunks = [];
104
+ await collectable.pipeTo(
105
+ new WritableStream({
106
+ write(chunk) {
107
+ chunks.push(chunk);
108
+ }
109
+ })
110
+ );
111
+ return chunks;
112
+ }
113
+ var UnixFSFileBuilder = class {
114
+ #file;
115
+ name;
116
+ constructor(name, file) {
117
+ this.name = name;
118
+ this.#file = file;
119
+ }
120
+ async finalize(writer) {
121
+ const unixfsFileWriter = UnixFS.createFileWriter(writer);
122
+ await this.#file.stream().pipeTo(
123
+ new WritableStream({
124
+ async write(chunk) {
125
+ await unixfsFileWriter.write(chunk);
126
+ }
127
+ })
128
+ );
129
+ return await unixfsFileWriter.close();
130
+ }
131
+ };
132
+
133
+ // src/crdt-helpers.ts
134
+ async function applyBulkUpdateToCrdt(tblocks, head, updates, options) {
135
+ let result;
136
+ for (const update of updates) {
137
+ const link = await writeDocContent(tblocks, update);
138
+ result = await put(tblocks, head, update.key, link, options);
139
+ const resRoot = result.root.toString();
140
+ const isReturned = result.additions.some((a) => a.cid.toString() === resRoot);
141
+ if (!isReturned) {
142
+ const hasRoot = await tblocks.get(result.root);
143
+ if (!hasRoot) {
144
+ throw new Error(
145
+ `missing root in additions: ${result.additions.length} ${resRoot} keys: ${updates.map((u) => u.key).toString()}`
146
+ );
147
+ result.head = head;
148
+ }
149
+ }
150
+ if (result.event) {
151
+ for (const { cid, bytes } of [...result.additions, result.event]) {
152
+ tblocks.putSync(cid, bytes);
153
+ }
154
+ head = result.head;
155
+ }
156
+ }
157
+ return { head };
158
+ }
159
+ async function writeDocContent(blocks, update) {
160
+ let value;
161
+ if (update.del) {
162
+ value = { del: true };
163
+ } else {
164
+ await processFiles(blocks, update.value);
165
+ value = { doc: update.value };
166
+ }
167
+ const block = await encode({ value, hasher, codec });
168
+ blocks.putSync(block.cid, block.bytes);
169
+ return block.cid;
170
+ }
171
+ async function processFiles(blocks, doc) {
172
+ if (doc._files) {
173
+ await processFileset(blocks, doc._files);
174
+ }
175
+ if (doc._publicFiles) {
176
+ await processFileset(blocks, doc._publicFiles, true);
177
+ }
178
+ }
179
+ async function processFileset(blocks, files, publicFiles = false) {
180
+ const dbBlockstore = blocks.parent;
181
+ const t = new CarTransaction(dbBlockstore);
182
+ const didPut = [];
183
+ for (const filename in files) {
184
+ if (File === files[filename].constructor) {
185
+ const file = files[filename];
186
+ const { cid, blocks: fileBlocks } = await encodeFile(file);
187
+ didPut.push(filename);
188
+ for (const block of fileBlocks) {
189
+ t.putSync(block.cid, block.bytes);
190
+ }
191
+ files[filename] = { cid, type: file.type, size: file.size };
192
+ }
193
+ }
194
+ if (didPut.length) {
195
+ const car = await dbBlockstore.loader?.commitFiles(t, { files }, {
196
+ public: publicFiles
197
+ });
198
+ if (car) {
199
+ for (const name of didPut) {
200
+ files[name] = { car, ...files[name] };
201
+ }
202
+ }
203
+ }
204
+ }
205
+ async function getValueFromCrdt(blocks, head, key) {
206
+ if (!head.length)
207
+ throw new Error("Getting from an empty database");
208
+ const link = await get(blocks, head, key);
209
+ if (!link)
210
+ throw new Error(`Missing key ${key}`);
211
+ return await getValueFromLink(blocks, link);
212
+ }
213
+ function readFiles(blocks, { doc }) {
214
+ if (!doc)
215
+ return;
216
+ if (doc._files) {
217
+ readFileset(blocks, doc._files);
218
+ }
219
+ if (doc._publicFiles) {
220
+ readFileset(blocks, doc._publicFiles, true);
221
+ }
222
+ }
223
+ function readFileset(blocks, files, isPublic = false) {
224
+ for (const filename in files) {
225
+ const fileMeta = files[filename];
226
+ if (fileMeta.cid) {
227
+ if (isPublic) {
228
+ fileMeta.url = `https://${fileMeta.cid.toString()}.ipfs.w3s.link/`;
229
+ }
230
+ if (fileMeta.car) {
231
+ fileMeta.file = async () => await decodeFile(
232
+ {
233
+ get: async (cid) => {
234
+ return await blocks.getFile(fileMeta.car, cid, isPublic);
235
+ }
236
+ },
237
+ fileMeta.cid,
238
+ fileMeta
239
+ );
240
+ }
241
+ }
242
+ files[filename] = fileMeta;
243
+ }
244
+ }
245
+ async function getValueFromLink(blocks, link) {
246
+ const block = await blocks.get(link);
247
+ if (!block)
248
+ throw new Error(`Missing linked block ${link.toString()}`);
249
+ const { value } = await decode({ bytes: block.bytes, hasher, codec });
250
+ readFiles(blocks, value);
251
+ return value;
252
+ }
253
+ var DirtyEventFetcher = class extends EventFetcher {
254
+ // @ts-ignore
255
+ async get(link) {
256
+ try {
257
+ return await super.get(link);
258
+ } catch (e) {
259
+ console.error("missing event", link.toString(), e);
260
+ return { value: null };
261
+ }
262
+ }
263
+ };
264
+ async function clockChangesSince(blocks, head, since, opts) {
265
+ const eventsFetcher = opts.dirty ? new DirtyEventFetcher(blocks) : new EventFetcher(blocks);
266
+ const keys = /* @__PURE__ */ new Set();
267
+ const updates = await gatherUpdates(
268
+ blocks,
269
+ eventsFetcher,
270
+ head,
271
+ since,
272
+ [],
273
+ keys,
274
+ /* @__PURE__ */ new Set(),
275
+ opts.limit || Infinity
276
+ );
277
+ return { result: updates.reverse(), head };
278
+ }
279
+ async function gatherUpdates(blocks, eventsFetcher, head, since, updates = [], keys, didLinks, limit) {
280
+ if (limit <= 0)
281
+ return updates;
282
+ const sHead = head.map((l) => l.toString());
283
+ for (const link of since) {
284
+ if (sHead.includes(link.toString())) {
285
+ return updates;
286
+ }
287
+ }
288
+ for (const link of head) {
289
+ if (didLinks.has(link.toString()))
290
+ continue;
291
+ didLinks.add(link.toString());
292
+ const { value: event } = await eventsFetcher.get(link);
293
+ if (!event)
294
+ continue;
295
+ const { key, value } = event.data;
296
+ if (keys.has(key)) {
297
+ if (event.parents) {
298
+ updates = await gatherUpdates(
299
+ blocks,
300
+ eventsFetcher,
301
+ event.parents,
302
+ since,
303
+ updates,
304
+ keys,
305
+ didLinks,
306
+ limit
307
+ );
308
+ }
309
+ } else {
310
+ keys.add(key);
311
+ const docValue = await getValueFromLink(blocks, value);
312
+ updates.push({ key, value: docValue.doc, del: docValue.del, clock: link });
313
+ limit--;
314
+ if (event.parents) {
315
+ updates = await gatherUpdates(
316
+ blocks,
317
+ eventsFetcher,
318
+ event.parents,
319
+ since,
320
+ updates,
321
+ keys,
322
+ didLinks,
323
+ limit
324
+ );
325
+ }
326
+ }
327
+ }
328
+ return updates;
329
+ }
330
+ async function* getAllEntries(blocks, head) {
331
+ for await (const [key, link] of entries(blocks, head)) {
332
+ const docValue = await getValueFromLink(blocks, link);
333
+ yield { key, value: docValue.doc, del: docValue.del };
334
+ }
335
+ }
336
+ async function* clockVis(blocks, head) {
337
+ for await (const line of vis(blocks, head)) {
338
+ yield line;
339
+ }
340
+ }
341
+ var isCompacting = false;
342
+ async function doCompact(blockLog, head) {
343
+ if (isCompacting) {
344
+ console.log("already compacting");
345
+ return;
346
+ }
347
+ isCompacting = true;
348
+ for (const cid of head) {
349
+ const bl = await blockLog.get(cid);
350
+ if (!bl)
351
+ throw new Error("Missing head block: " + cid.toString());
352
+ }
353
+ for await (const entry of getAllEntries(blockLog, head)) {
354
+ }
355
+ for await (const [, link] of entries(blockLog, head)) {
356
+ const bl = await blockLog.get(link);
357
+ if (!bl)
358
+ throw new Error("Missing entry block: " + link.toString());
359
+ }
360
+ for await (const _line of vis(blockLog, head)) {
361
+ }
362
+ const result = await root(blockLog, head);
363
+ for (const { cid, bytes } of [...result.additions, ...result.removals]) {
364
+ blockLog.loggedBlocks.putSync(cid, bytes);
365
+ }
366
+ await clockChangesSince(blockLog, head, [], {});
367
+ isCompacting = false;
368
+ }
369
+ async function getBlock(blocks, cidString) {
370
+ const block = await blocks.get(parse(cidString));
371
+ if (!block)
372
+ throw new Error(`Missing block ${cidString}`);
373
+ const { cid, value } = await decode({ bytes: block.bytes, codec, hasher });
374
+ return new Block({ cid, value, bytes: block.bytes });
375
+ }
376
+
377
+ // src/indexer-helpers.ts
378
+ import { create } from "multiformats/block";
379
+ import { sha256 as hasher2 } from "multiformats/hashes/sha2";
380
+ import * as codec2 from "@ipld/dag-cbor";
381
+ import charwise from "charwise";
382
+ import * as DbIndex from "prolly-trees/db-index";
383
+ import { bf, simpleCompare } from "prolly-trees/utils";
384
+ import { nocache as cache } from "prolly-trees/cache";
385
+ var IndexTree = class {
386
+ cid = null;
387
+ root = null;
388
+ };
389
+ var refCompare = (aRef, bRef) => {
390
+ if (Number.isNaN(aRef))
391
+ return -1;
392
+ if (Number.isNaN(bRef))
393
+ throw new Error("ref may not be Infinity or NaN");
394
+ if (aRef === Infinity)
395
+ return 1;
396
+ return simpleCompare(aRef, bRef);
397
+ };
398
+ var compare = (a, b) => {
399
+ const [aKey, aRef] = a;
400
+ const [bKey, bRef] = b;
401
+ const comp = simpleCompare(aKey, bKey);
402
+ if (comp !== 0)
403
+ return comp;
404
+ return refCompare(aRef, bRef);
405
+ };
406
+ var byKeyOpts = { cache, chunker: bf(30), codec: codec2, hasher: hasher2, compare };
407
+ var byIdOpts = { cache, chunker: bf(30), codec: codec2, hasher: hasher2, compare: simpleCompare };
408
+ function indexEntriesForChanges(changes, mapFn) {
409
+ const indexEntries = [];
410
+ changes.forEach(({ key: _id, value, del }) => {
411
+ if (del || !value)
412
+ return;
413
+ let mapCalled = false;
414
+ const mapReturn = mapFn({ _id, ...value }, (k, v) => {
415
+ mapCalled = true;
416
+ if (typeof k === "undefined")
417
+ return;
418
+ indexEntries.push({
419
+ // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-unsafe-call
420
+ key: [charwise.encode(k), _id],
421
+ value: v || null
422
+ });
423
+ });
424
+ if (!mapCalled && mapReturn) {
425
+ indexEntries.push({
426
+ // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-unsafe-call
427
+ key: [charwise.encode(mapReturn), _id],
428
+ value: null
429
+ });
430
+ }
431
+ });
432
+ return indexEntries;
433
+ }
434
+ function makeProllyGetBlock(blocks) {
435
+ return async (address) => {
436
+ const block = await blocks.get(address);
437
+ if (!block)
438
+ throw new Error(`Missing block ${address.toString()}`);
439
+ const { cid, bytes } = block;
440
+ return create({ cid, bytes, hasher: hasher2, codec: codec2 });
441
+ };
442
+ }
443
+ async function bulkIndex(tblocks, inIndex, indexEntries, opts) {
444
+ if (!indexEntries.length)
445
+ return inIndex;
446
+ if (!inIndex.root) {
447
+ if (!inIndex.cid) {
448
+ let returnRootBlock = null;
449
+ let returnNode = null;
450
+ for await (const node of await DbIndex.create({ get: makeProllyGetBlock(tblocks), list: indexEntries, ...opts })) {
451
+ const block = await node.block;
452
+ await tblocks.put(block.cid, block.bytes);
453
+ returnRootBlock = block;
454
+ returnNode = node;
455
+ }
456
+ if (!returnNode || !returnRootBlock)
457
+ throw new Error("failed to create index");
458
+ return { root: returnNode, cid: returnRootBlock.cid };
459
+ } else {
460
+ inIndex.root = await DbIndex.load({ cid: inIndex.cid, get: makeProllyGetBlock(tblocks), ...opts });
461
+ }
462
+ }
463
+ const { root: root3, blocks: newBlocks } = await inIndex.root.bulk(indexEntries);
464
+ if (root3) {
465
+ for await (const block of newBlocks) {
466
+ await tblocks.put(block.cid, block.bytes);
467
+ }
468
+ return { root: root3, cid: (await root3.block).cid };
469
+ } else {
470
+ return { root: null, cid: null };
471
+ }
472
+ }
473
+ async function loadIndex(tblocks, cid, opts) {
474
+ return await DbIndex.load({ cid, get: makeProllyGetBlock(tblocks), ...opts });
475
+ }
476
+ async function applyQuery(crdt, resp, query) {
477
+ if (query.descending) {
478
+ resp.result = resp.result.reverse();
479
+ }
480
+ if (query.limit) {
481
+ resp.result = resp.result.slice(0, query.limit);
482
+ }
483
+ if (query.includeDocs) {
484
+ resp.result = await Promise.all(
485
+ resp.result.map(async (row) => {
486
+ const val = await crdt.get(row.id);
487
+ const doc = val ? { _id: row.id, ...val.doc } : null;
488
+ return { ...row, doc };
489
+ })
490
+ );
491
+ }
492
+ return {
493
+ rows: resp.result.map((row) => {
494
+ row.key = charwise.decode(row.key);
495
+ if (row.row && !row.value) {
496
+ row.value = row.row;
497
+ delete row.row;
498
+ }
499
+ return row;
500
+ })
501
+ };
502
+ }
503
+ function encodeRange(range) {
504
+ return range.map((key) => charwise.encode(key));
505
+ }
506
+ function encodeKey(key) {
507
+ return charwise.encode(key);
508
+ }
509
+
510
+ // src/index.ts
511
+ function index({ _crdt }, name, mapFn, meta) {
512
+ if (mapFn && meta)
513
+ throw new Error("cannot provide both mapFn and meta");
514
+ if (mapFn && mapFn.constructor.name !== "Function")
515
+ throw new Error("mapFn must be a function");
516
+ if (_crdt.indexers.has(name)) {
517
+ const idx = _crdt.indexers.get(name);
518
+ idx.applyMapFn(name, mapFn, meta);
519
+ } else {
520
+ const idx = new Index(_crdt, name, mapFn, meta);
521
+ _crdt.indexers.set(name, idx);
522
+ }
523
+ return _crdt.indexers.get(name);
524
+ }
525
+ var Index = class {
526
+ blockstore;
527
+ crdt;
528
+ name = null;
529
+ mapFn = null;
530
+ mapFnString = "";
531
+ byKey = new IndexTree();
532
+ byId = new IndexTree();
533
+ indexHead = void 0;
534
+ includeDocsDefault = false;
535
+ initError = null;
536
+ ready;
537
+ constructor(crdt, name, mapFn, meta) {
538
+ this.blockstore = crdt.indexBlockstore;
539
+ this.crdt = crdt;
540
+ this.applyMapFn(name, mapFn, meta);
541
+ if (!(this.mapFnString || this.initError))
542
+ throw new Error("missing mapFnString");
543
+ this.ready = this.blockstore.ready.then(() => {
544
+ });
545
+ }
546
+ applyMapFn(name, mapFn, meta) {
547
+ if (mapFn && meta)
548
+ throw new Error("cannot provide both mapFn and meta");
549
+ if (this.name && this.name !== name)
550
+ throw new Error("cannot change name");
551
+ this.name = name;
552
+ try {
553
+ if (meta) {
554
+ if (this.indexHead && this.indexHead.map((c) => c.toString()).join() !== meta.head.map((c) => c.toString()).join()) {
555
+ throw new Error("cannot apply meta to existing index");
556
+ }
557
+ if (this.mapFnString) {
558
+ if (this.mapFnString !== meta.map) {
559
+ console.log(
560
+ "cannot apply different mapFn meta: old mapFnString",
561
+ this.mapFnString,
562
+ "new mapFnString",
563
+ meta.map
564
+ );
565
+ } else {
566
+ this.byId.cid = meta.byId;
567
+ this.byKey.cid = meta.byKey;
568
+ this.indexHead = meta.head;
569
+ }
570
+ } else {
571
+ this.mapFnString = meta.map;
572
+ this.byId.cid = meta.byId;
573
+ this.byKey.cid = meta.byKey;
574
+ this.indexHead = meta.head;
575
+ }
576
+ } else {
577
+ if (this.mapFn) {
578
+ if (mapFn) {
579
+ if (this.mapFn.toString() !== mapFn.toString())
580
+ throw new Error("cannot apply different mapFn app2");
581
+ }
582
+ } else {
583
+ if (!mapFn) {
584
+ mapFn = makeMapFnFromName(name);
585
+ }
586
+ if (this.mapFnString) {
587
+ if (this.mapFnString !== mapFn.toString())
588
+ throw new Error("cannot apply different mapFn app");
589
+ } else {
590
+ this.mapFnString = mapFn.toString();
591
+ }
592
+ this.mapFn = mapFn;
593
+ }
594
+ }
595
+ const matches = /=>\s*(.*)/.test(this.mapFnString);
596
+ this.includeDocsDefault = matches;
597
+ } catch (e) {
598
+ this.initError = e;
599
+ }
600
+ }
601
+ async query(opts = {}) {
602
+ await this._updateIndex();
603
+ await this._hydrateIndex();
604
+ if (!this.byKey.root)
605
+ return await applyQuery(this.crdt, { result: [] }, opts);
606
+ if (this.includeDocsDefault && opts.includeDocs === void 0)
607
+ opts.includeDocs = true;
608
+ if (opts.range) {
609
+ const { result: result2, ...all2 } = await this.byKey.root.range(...encodeRange(opts.range));
610
+ return await applyQuery(this.crdt, { result: result2, ...all2 }, opts);
611
+ }
612
+ if (opts.key) {
613
+ const encodedKey = encodeKey(opts.key);
614
+ return await applyQuery(this.crdt, await this.byKey.root.get(encodedKey), opts);
615
+ }
616
+ if (Array.isArray(opts.keys)) {
617
+ const results = await Promise.all(
618
+ opts.keys.map(async (key) => {
619
+ const encodedKey = encodeKey(key);
620
+ return (await applyQuery(this.crdt, await this.byKey.root.get(encodedKey), opts)).rows;
621
+ })
622
+ );
623
+ return { rows: results.flat() };
624
+ }
625
+ if (opts.prefix) {
626
+ if (!Array.isArray(opts.prefix))
627
+ opts.prefix = [opts.prefix];
628
+ const start = [...opts.prefix, NaN];
629
+ const end = [...opts.prefix, Infinity];
630
+ const encodedR = encodeRange([start, end]);
631
+ return await applyQuery(this.crdt, await this.byKey.root.range(...encodedR), opts);
632
+ }
633
+ const { result, ...all } = await this.byKey.root.getAllEntries();
634
+ return await applyQuery(
635
+ this.crdt,
636
+ {
637
+ result: result.map(({ key: [k, id], value }) => ({ key: k, id, value })),
638
+ ...all
639
+ },
640
+ opts
641
+ );
642
+ }
643
+ _resetIndex() {
644
+ this.byId = new IndexTree();
645
+ this.byKey = new IndexTree();
646
+ this.indexHead = void 0;
647
+ }
648
+ async _hydrateIndex() {
649
+ if (this.byId.root && this.byKey.root)
650
+ return;
651
+ if (!this.byId.cid || !this.byKey.cid)
652
+ return;
653
+ this.byId.root = await loadIndex(this.blockstore, this.byId.cid, byIdOpts);
654
+ this.byKey.root = await loadIndex(this.blockstore, this.byKey.cid, byKeyOpts);
655
+ }
656
+ async _updateIndex() {
657
+ await this.ready;
658
+ if (this.initError)
659
+ throw this.initError;
660
+ if (!this.mapFn)
661
+ throw new Error("No map function defined");
662
+ let result, head;
663
+ if (!this.indexHead || this.indexHead.length === 0) {
664
+ ;
665
+ ({ result, head } = await this.crdt.allDocs());
666
+ } else {
667
+ ;
668
+ ({ result, head } = await this.crdt.changes(this.indexHead));
669
+ }
670
+ if (result.length === 0) {
671
+ this.indexHead = head;
672
+ return { byId: this.byId, byKey: this.byKey };
673
+ }
674
+ let staleKeyIndexEntries = [];
675
+ let removeIdIndexEntries = [];
676
+ if (this.byId.root) {
677
+ const removeIds = result.map(({ key }) => key);
678
+ const { result: oldChangeEntries } = await this.byId.root.getMany(removeIds);
679
+ staleKeyIndexEntries = oldChangeEntries.map((key) => ({ key, del: true }));
680
+ removeIdIndexEntries = oldChangeEntries.map((key) => ({ key: key[1], del: true }));
681
+ }
682
+ const indexEntries = indexEntriesForChanges(result, this.mapFn);
683
+ const byIdIndexEntries = indexEntries.map(({ key }) => ({
684
+ key: key[1],
685
+ value: key
686
+ }));
687
+ const indexerMeta = { indexes: /* @__PURE__ */ new Map() };
688
+ for (const [name, indexer] of this.crdt.indexers) {
689
+ if (indexer.indexHead) {
690
+ indexerMeta.indexes.set(name, {
691
+ byId: indexer.byId.cid,
692
+ byKey: indexer.byKey.cid,
693
+ head: indexer.indexHead,
694
+ map: indexer.mapFnString,
695
+ name: indexer.name
696
+ });
697
+ }
698
+ }
699
+ return await this.blockstore.transaction(async (tblocks) => {
700
+ this.byId = await bulkIndex(
701
+ tblocks,
702
+ this.byId,
703
+ removeIdIndexEntries.concat(byIdIndexEntries),
704
+ byIdOpts
705
+ );
706
+ this.byKey = await bulkIndex(
707
+ tblocks,
708
+ this.byKey,
709
+ staleKeyIndexEntries.concat(indexEntries),
710
+ byKeyOpts
711
+ );
712
+ this.indexHead = head;
713
+ const idxMeta = {
714
+ byId: this.byId.cid,
715
+ byKey: this.byKey.cid,
716
+ head,
717
+ map: this.mapFnString,
718
+ name: this.name
719
+ };
720
+ indexerMeta.indexes.set(this.name, idxMeta);
721
+ return indexerMeta;
722
+ });
723
+ }
724
+ };
725
+ function makeMapFnFromName(name) {
726
+ return (doc) => doc[name] ?? void 0;
727
+ }
728
+
729
+ // src/crdt-clock.ts
730
+ import { advance } from "@alanshaw/pail/clock";
731
+ import { root as root2 } from "@alanshaw/pail/crdt";
732
+
733
+ // src/apply-head-queue.ts
734
+ function applyHeadQueue(worker) {
735
+ const queue = [];
736
+ let isProcessing = false;
737
+ async function* process() {
738
+ if (isProcessing || queue.length === 0)
739
+ return;
740
+ isProcessing = true;
741
+ const allUpdates = [];
742
+ try {
743
+ while (queue.length > 0) {
744
+ queue.sort((a, b) => b.updates ? 1 : -1);
745
+ const task = queue.shift();
746
+ if (!task)
747
+ continue;
748
+ await worker(task.newHead, task.prevHead);
749
+ if (task.updates) {
750
+ allUpdates.push(...task.updates);
751
+ }
752
+ if (!queue.some((t) => t.updates) || task.updates) {
753
+ const allTasksHaveUpdates = queue.every((task2) => task2.updates !== null);
754
+ yield { updates: allUpdates, all: allTasksHaveUpdates };
755
+ allUpdates.length = 0;
756
+ }
757
+ }
758
+ } finally {
759
+ isProcessing = false;
760
+ const generator = process();
761
+ let result = await generator.next();
762
+ while (!result.done) {
763
+ result = await generator.next();
764
+ }
765
+ }
766
+ }
767
+ return {
768
+ push(task) {
769
+ queue.push(task);
770
+ return process();
771
+ }
772
+ };
773
+ }
774
+
775
+ // src/crdt-clock.ts
776
+ var CRDTClock = class {
777
+ // todo: track local and remote clocks independently, merge on read
778
+ // that way we can drop the whole remote if we need to
779
+ // should go with making sure the local clock only references locally available blockstore on write
780
+ head = [];
781
+ zoomers = /* @__PURE__ */ new Set();
782
+ watchers = /* @__PURE__ */ new Set();
783
+ emptyWatchers = /* @__PURE__ */ new Set();
784
+ blockstore = null;
785
+ applyHeadQueue;
786
+ constructor() {
787
+ this.applyHeadQueue = applyHeadQueue(this.int_applyHead.bind(this));
788
+ }
789
+ setHead(head) {
790
+ this.head = head;
791
+ }
792
+ async applyHead(newHead, prevHead, updates = null) {
793
+ for await (const { updates: updatesAcc, all } of this.applyHeadQueue.push({
794
+ newHead,
795
+ prevHead,
796
+ updates
797
+ })) {
798
+ this.processUpdates(updatesAcc, all, prevHead);
799
+ }
800
+ }
801
+ async processUpdates(updatesAcc, all, prevHead) {
802
+ let internalUpdates = updatesAcc;
803
+ if (this.watchers.size && !all) {
804
+ const changes = await clockChangesSince(this.blockstore, this.head, prevHead, {});
805
+ internalUpdates = changes.result;
806
+ }
807
+ this.zoomers.forEach((fn) => fn());
808
+ this.notifyWatchers(internalUpdates || []);
809
+ }
810
+ notifyWatchers(updates) {
811
+ this.emptyWatchers.forEach((fn) => fn());
812
+ this.watchers.forEach((fn) => fn(updates || []));
813
+ }
814
+ onTick(fn) {
815
+ this.watchers.add(fn);
816
+ }
817
+ onTock(fn) {
818
+ this.emptyWatchers.add(fn);
819
+ }
820
+ onZoom(fn) {
821
+ this.zoomers.add(fn);
822
+ }
823
+ async int_applyHead(newHead, prevHead) {
824
+ const ogHead = sortClockHead(this.head);
825
+ newHead = sortClockHead(newHead);
826
+ if (compareClockHeads(ogHead, newHead)) {
827
+ return;
828
+ }
829
+ const ogPrev = sortClockHead(prevHead);
830
+ if (compareClockHeads(ogHead, ogPrev)) {
831
+ this.setHead(newHead);
832
+ return;
833
+ }
834
+ let head = this.head;
835
+ const noLoader = false;
836
+ if (!this.blockstore)
837
+ throw new Error("missing blockstore");
838
+ await validateBlocks(newHead, this.blockstore);
839
+ await this.blockstore.transaction(
840
+ async (tblocks) => {
841
+ head = await advanceBlocks(newHead, tblocks, head);
842
+ const result = await root2(tblocks, head);
843
+ for (const { cid, bytes } of [...result.additions, ...result.removals]) {
844
+ tblocks.putSync(cid, bytes);
845
+ }
846
+ return { head };
847
+ },
848
+ { noLoader }
849
+ );
850
+ this.setHead(head);
851
+ }
852
+ };
853
+ function sortClockHead(clockHead) {
854
+ return clockHead.sort((a, b) => a.toString().localeCompare(b.toString()));
855
+ }
856
+ async function validateBlocks(newHead, blockstore) {
857
+ newHead.map(async (cid) => {
858
+ const got = await blockstore.get(cid);
859
+ if (!got) {
860
+ throw new Error("int_applyHead missing block: " + cid.toString());
861
+ }
862
+ });
863
+ }
864
+ function compareClockHeads(head1, head2) {
865
+ return head1.toString() === head2.toString();
866
+ }
867
+ async function advanceBlocks(newHead, tblocks, head) {
868
+ for (const cid of newHead) {
869
+ try {
870
+ head = await advance(tblocks, head, cid);
871
+ } catch (e) {
872
+ console.error("failed to advance", cid.toString(), e);
873
+ continue;
874
+ }
875
+ }
876
+ return head;
877
+ }
878
+
879
+ // src/crdt.ts
880
+ var CRDT = class {
881
+ name;
882
+ opts = {};
883
+ ready;
884
+ blockstore;
885
+ indexBlockstore;
886
+ indexers = /* @__PURE__ */ new Map();
887
+ clock = new CRDTClock();
888
+ constructor(name, opts) {
889
+ this.name = name || null;
890
+ this.opts = opts || this.opts;
891
+ this.blockstore = new EncryptedBlockstore({
892
+ name,
893
+ applyMeta: async (meta) => {
894
+ const crdtMeta = meta;
895
+ await this.clock.applyHead(crdtMeta.head, []);
896
+ },
897
+ compact: async (blocks) => {
898
+ await doCompact(blocks, this.clock.head);
899
+ return { head: this.clock.head };
900
+ },
901
+ autoCompact: this.opts.autoCompact || 100,
902
+ crypto,
903
+ store,
904
+ public: this.opts.public,
905
+ meta: this.opts.meta
906
+ });
907
+ this.clock.blockstore = this.blockstore;
908
+ this.indexBlockstore = new EncryptedBlockstore({
909
+ name: this.opts.persistIndexes && this.name ? this.name + ".idx" : void 0,
910
+ applyMeta: async (meta) => {
911
+ const idxCarMeta = meta;
912
+ for (const [name2, idx] of Object.entries(idxCarMeta.indexes)) {
913
+ index({ _crdt: this }, name2, void 0, idx);
914
+ }
915
+ },
916
+ crypto,
917
+ public: this.opts.public,
918
+ store
919
+ });
920
+ this.ready = Promise.all([this.blockstore.ready, this.indexBlockstore.ready]).then(() => {
921
+ });
922
+ this.clock.onZoom(() => {
923
+ for (const idx of this.indexers.values()) {
924
+ idx._resetIndex();
925
+ }
926
+ });
927
+ }
928
+ async bulk(updates, options) {
929
+ await this.ready;
930
+ const prevHead = [...this.clock.head];
931
+ const meta = await this.blockstore.transaction(
932
+ async (blocks) => {
933
+ const { head } = await applyBulkUpdateToCrdt(blocks, this.clock.head, updates, options);
934
+ updates = updates.map(({ key, value, del, clock }) => {
935
+ readFiles(this.blockstore, { doc: value });
936
+ return { key, value, del, clock };
937
+ });
938
+ return { head };
939
+ }
940
+ );
941
+ await this.clock.applyHead(meta.head, prevHead, updates);
942
+ return meta;
943
+ }
944
+ // if (snap) await this.clock.applyHead(crdtMeta.head, this.clock.head)
945
+ async allDocs() {
946
+ await this.ready;
947
+ const result = [];
948
+ for await (const entry of getAllEntries(this.blockstore, this.clock.head)) {
949
+ result.push(entry);
950
+ }
951
+ return { result, head: this.clock.head };
952
+ }
953
+ async vis() {
954
+ await this.ready;
955
+ const txt = [];
956
+ for await (const line of clockVis(this.blockstore, this.clock.head)) {
957
+ txt.push(line);
958
+ }
959
+ return txt.join("\n");
960
+ }
961
+ async getBlock(cidString) {
962
+ await this.ready;
963
+ return await getBlock(this.blockstore, cidString);
964
+ }
965
+ async get(key) {
966
+ await this.ready;
967
+ const result = await getValueFromCrdt(this.blockstore, this.clock.head, key);
968
+ if (result.del)
969
+ return null;
970
+ return result;
971
+ }
972
+ async changes(since = [], opts = {}) {
973
+ await this.ready;
974
+ return await clockChangesSince(this.blockstore, this.clock.head, since, opts);
975
+ }
976
+ async compact() {
977
+ return await this.blockstore.compact();
978
+ }
979
+ };
980
+
981
+ // src/database.ts
982
+ var Database = class {
983
+ static databases = /* @__PURE__ */ new Map();
984
+ name;
985
+ opts = {};
986
+ _listening = false;
987
+ _listeners = /* @__PURE__ */ new Set();
988
+ _noupdate_listeners = /* @__PURE__ */ new Set();
989
+ _crdt;
990
+ _writeQueue;
991
+ blockstore;
992
+ constructor(name, opts) {
993
+ this.name = name || null;
994
+ this.opts = opts || this.opts;
995
+ this._crdt = new CRDT(name, this.opts);
996
+ this.blockstore = this._crdt.blockstore;
997
+ this._writeQueue = writeQueue(async (updates) => {
998
+ return await this._crdt.bulk(updates);
999
+ });
1000
+ this._crdt.clock.onTock(() => {
1001
+ this._no_update_notify();
1002
+ });
1003
+ }
1004
+ async get(id) {
1005
+ const got = await this._crdt.get(id).catch((e) => {
1006
+ e.message = `Not found: ${id} - ` + e.message;
1007
+ throw e;
1008
+ });
1009
+ if (!got)
1010
+ throw new Error(`Not found: ${id}`);
1011
+ const { doc } = got;
1012
+ return { _id: id, ...doc };
1013
+ }
1014
+ async put(doc) {
1015
+ const { _id, ...value } = doc;
1016
+ const docId = _id || uuidv7();
1017
+ const result = await this._writeQueue.push({ key: docId, value });
1018
+ return { id: docId, clock: result?.head };
1019
+ }
1020
+ async del(id) {
1021
+ const result = await this._writeQueue.push({ key: id, del: true });
1022
+ return { id, clock: result?.head };
1023
+ }
1024
+ async changes(since = [], opts = {}) {
1025
+ const { result, head } = await this._crdt.changes(since, opts);
1026
+ const rows = result.map(({ key, value, del, clock }) => ({
1027
+ key,
1028
+ value: del ? { _id: key, _deleted: true } : { _id: key, ...value },
1029
+ clock
1030
+ }));
1031
+ return { rows, clock: head };
1032
+ }
1033
+ async allDocs() {
1034
+ const { result, head } = await this._crdt.allDocs();
1035
+ const rows = result.map(({ key, value, del }) => ({
1036
+ key,
1037
+ value: del ? { _id: key, _deleted: true } : { _id: key, ...value }
1038
+ }));
1039
+ return { rows, clock: head };
1040
+ }
1041
+ async allDocuments() {
1042
+ return this.allDocs();
1043
+ }
1044
+ subscribe(listener, updates) {
1045
+ if (updates) {
1046
+ if (!this._listening) {
1047
+ this._listening = true;
1048
+ this._crdt.clock.onTick((updates2) => {
1049
+ void this._notify(updates2);
1050
+ });
1051
+ }
1052
+ this._listeners.add(listener);
1053
+ return () => {
1054
+ this._listeners.delete(listener);
1055
+ };
1056
+ } else {
1057
+ this._noupdate_listeners.add(listener);
1058
+ return () => {
1059
+ this._noupdate_listeners.delete(listener);
1060
+ };
1061
+ }
1062
+ }
1063
+ // todo if we add this onto dbs in fireproof.ts then we can make index.ts a separate package
1064
+ async query(field, opts = {}) {
1065
+ const idx = typeof field === "string" ? index({ _crdt: this._crdt }, field) : index({ _crdt: this._crdt }, makeName(field.toString()), field);
1066
+ return await idx.query(opts);
1067
+ }
1068
+ async compact() {
1069
+ await this._crdt.compact();
1070
+ }
1071
+ async _notify(updates) {
1072
+ if (this._listeners.size) {
1073
+ const docs = updates.map(({ key, value }) => ({ _id: key, ...value }));
1074
+ for (const listener of this._listeners) {
1075
+ await (async () => await listener(docs))().catch((e) => {
1076
+ console.error("subscriber error", e);
1077
+ });
1078
+ }
1079
+ }
1080
+ }
1081
+ async _no_update_notify() {
1082
+ if (this._noupdate_listeners.size) {
1083
+ for (const listener of this._noupdate_listeners) {
1084
+ await (async () => await listener([]))().catch((e) => {
1085
+ console.error("subscriber error", e);
1086
+ });
1087
+ }
1088
+ }
1089
+ }
1090
+ };
1091
+ function fireproof(name, opts) {
1092
+ if (!Database.databases.has(name)) {
1093
+ Database.databases.set(name, new Database(name, opts));
1094
+ }
1095
+ return Database.databases.get(name);
1096
+ }
1097
+ function makeName(fnString) {
1098
+ const regex = /\(([^,()]+,\s*[^,()]+|\[[^\]]+\],\s*[^,()]+)\)/g;
1099
+ let found = null;
1100
+ const matches = Array.from(fnString.matchAll(regex), (match) => match[1].trim());
1101
+ if (matches.length === 0) {
1102
+ found = /=>\s*(.*)/.exec(fnString);
1103
+ }
1104
+ if (!found) {
1105
+ return fnString;
1106
+ } else {
1107
+ return found[1];
1108
+ }
1109
+ }
1110
+ export {
1111
+ Database,
1112
+ fireproof
1113
+ };
1114
+ //# sourceMappingURL=fireproof.js.map