@fireproof/core 0.3.11 → 0.3.13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. package/README.md +1 -1
  2. package/dist/bundle.js +2168 -0
  3. package/dist/src/blockstore.d.ts +115 -0
  4. package/dist/src/blockstore.d.ts.map +1 -0
  5. package/dist/src/clock.d.ts +98 -0
  6. package/dist/src/clock.d.ts.map +1 -0
  7. package/dist/src/crypto.d.ts +18 -0
  8. package/dist/src/crypto.d.ts.map +1 -0
  9. package/dist/src/db-index.d.ts +116 -0
  10. package/dist/src/db-index.d.ts.map +1 -0
  11. package/dist/src/fireproof.d.ts +167 -0
  12. package/dist/src/fireproof.d.ts.map +1 -0
  13. package/dist/src/hydrator.d.ts +6 -0
  14. package/dist/src/hydrator.d.ts.map +1 -0
  15. package/dist/src/index.d.ts +6 -0
  16. package/dist/src/index.d.ts.map +1 -0
  17. package/dist/src/listener.d.ts +36 -0
  18. package/dist/src/listener.d.ts.map +1 -0
  19. package/dist/src/prolly.d.ts +83 -0
  20. package/dist/src/prolly.d.ts.map +1 -0
  21. package/dist/src/sha1.d.ts +9 -0
  22. package/dist/src/sha1.d.ts.map +1 -0
  23. package/dist/src/valet.d.ts +34 -0
  24. package/dist/src/valet.d.ts.map +1 -0
  25. package/dist/tsconfig.tsbuildinfo +1 -0
  26. package/package.json +39 -5
  27. package/src/blockstore.js +24 -23
  28. package/src/clock.js +4 -3
  29. package/src/crypto.js +1 -0
  30. package/src/db-index.js +23 -18
  31. package/src/fireproof.js +31 -26
  32. package/src/hydrator.js +3 -3
  33. package/src/index.js +6 -0
  34. package/src/listener.js +9 -8
  35. package/src/prolly.js +12 -25
  36. package/src/sha1.js +2 -1
  37. package/src/valet.js +22 -20
  38. package/hooks/use-fireproof.js +0 -135
  39. package/index.js +0 -6
  40. package/scripts/keygen.js +0 -3
  41. package/test/block.js +0 -65
  42. package/test/clock.test.js +0 -694
  43. package/test/db-index.test.js +0 -261
  44. package/test/fireproof.test.js +0 -493
  45. package/test/fulltext.test.js +0 -66
  46. package/test/helpers.js +0 -45
  47. package/test/hydrator.test.js +0 -81
  48. package/test/listener.test.js +0 -102
  49. package/test/prolly.test.js +0 -190
  50. package/test/proofs.test.js +0 -53
  51. package/test/reproduce-fixture-bug.test.js +0 -65
  52. package/test/valet.test.js +0 -59
package/dist/bundle.js ADDED
@@ -0,0 +1,2168 @@
1
+ 'use strict';
2
+
3
+ var crypto = require('crypto');
4
+ var Block = require('multiformats/block');
5
+ var sha2 = require('multiformats/hashes/sha2');
6
+ var dagcbor = require('@ipld/dag-cbor');
7
+ var utils = require('prolly-trees/utils');
8
+ var map = require('prolly-trees/map');
9
+ var cache = require('prolly-trees/cache');
10
+ var link = require('multiformats/link');
11
+ var multiformats = require('multiformats');
12
+ var car = require('@ipld/car');
13
+ var cid = require('multiformats/cid');
14
+ var CBW = require('@ipld/car/buffer-writer');
15
+ var raw = require('multiformats/codecs/raw');
16
+ var idb = require('idb');
17
+ var cargoQueue = require('async/cargoQueue.js');
18
+ var codec = require('encrypted-block');
19
+ var cidSet = require('prolly-trees/cid-set');
20
+ var buffer = require('buffer');
21
+ var charwise = require('charwise');
22
+ var dbIndex = require('prolly-trees/db-index');
23
+
24
+ function _interopNamespaceDefault(e) {
25
+ var n = Object.create(null);
26
+ if (e) {
27
+ Object.keys(e).forEach(function (k) {
28
+ if (k !== 'default') {
29
+ var d = Object.getOwnPropertyDescriptor(e, k);
30
+ Object.defineProperty(n, k, d.get ? d : {
31
+ enumerable: true,
32
+ get: function () { return e[k]; }
33
+ });
34
+ }
35
+ });
36
+ }
37
+ n.default = e;
38
+ return Object.freeze(n);
39
+ }
40
+
41
+ var Block__namespace = /*#__PURE__*/_interopNamespaceDefault(Block);
42
+ var dagcbor__namespace = /*#__PURE__*/_interopNamespaceDefault(dagcbor);
43
+ var CBW__namespace = /*#__PURE__*/_interopNamespaceDefault(CBW);
44
+ var raw__namespace = /*#__PURE__*/_interopNamespaceDefault(raw);
45
+ var codec__namespace = /*#__PURE__*/_interopNamespaceDefault(codec);
46
+
47
+ // @ts-nocheck
48
+
49
+ /**
50
+ * @template T
51
+ * @typedef {{ parents: EventLink<T>[], data: T }} EventView
52
+ */
53
+
54
+ /**
55
+ * @template T
56
+ * @typedef {import('multiformats').BlockView<EventView<T>>} EventBlockView
57
+ */
58
+
59
+ /**
60
+ * @template T
61
+ * @typedef {import('multiformats').Link<EventView<T>>} EventLink
62
+ */
63
+
64
+ /**
65
+ * Advance the clock by adding an event.
66
+ *
67
+ * @template T
68
+ * @param {import('./blockstore').BlockFetcher} blocks Block storage.
69
+ * @param {EventLink<T>[]} head The head of the clock.
70
+ * @param {EventLink<T>} event The event to add.
71
+ * @returns {Promise<EventLink<T>[]>} The new head of the clock.
72
+ */
73
+ async function advance (blocks, head, event) {
74
+ /** @type {EventFetcher<T>} */
75
+ const events = new EventFetcher(blocks);
76
+ const headmap = new Map(head.map((cid) => [cid.toString(), cid]));
77
+
78
+ // Check if the headmap already includes the event, return head if it does
79
+ if (headmap.has(event.toString())) return { head, cids: events.cids }
80
+
81
+ // Does event contain the clock?
82
+ let changed = false;
83
+ for (const cid of head) {
84
+ if (await contains(events, event, cid)) {
85
+ headmap.delete(cid.toString());
86
+ headmap.set(event.toString(), event);
87
+ changed = true;
88
+ }
89
+ }
90
+
91
+ // If the headmap has been changed, return the new headmap values
92
+ if (changed) {
93
+ return { head: [...headmap.values()], cids: events.cids }
94
+ }
95
+
96
+ // Does clock contain the event?
97
+ for (const p of head) {
98
+ if (await contains(events, p, event)) {
99
+ return { head, cids: events.cids }
100
+ }
101
+ }
102
+
103
+ // Return the head concatenated with the new event if it passes both checks
104
+ return { head: head.concat(event), cids: events.cids }
105
+ }
106
+
107
+ /**
108
+ * @template T
109
+ * @implements {EventBlockView<T>}
110
+ */
111
+ class EventBlock extends Block.Block {
112
+ /**
113
+ * @param {object} config
114
+ * @param {EventLink<T>} config.cid
115
+ * @param {Event} config.value
116
+ * @param {Uint8Array} config.bytes
117
+ */
118
+ constructor ({ cid, value, bytes }) {
119
+ // @ts-expect-error
120
+ super({ cid, value, bytes });
121
+ }
122
+
123
+ /**
124
+ * @template T
125
+ * @param {T} data
126
+ * @param {EventLink<T>[]} [parents]
127
+ */
128
+ static create (data, parents) {
129
+ return encodeEventBlock({ data, parents: parents ?? [] })
130
+ }
131
+ }
132
+
133
+ /** @template T */
134
+ class EventFetcher {
135
+ /** @param {import('./blockstore').BlockFetcher} blocks */
136
+ constructor (blocks) {
137
+ /** @private */
138
+ this._blocks = blocks;
139
+ this._cids = new utils.CIDCounter();
140
+ this._cache = new Map();
141
+ }
142
+
143
+ /**
144
+ * @param {EventLink<T>} link
145
+ * @returns {Promise<EventBlockView<T>>}
146
+ */
147
+ async get (link) {
148
+ const slink = link.toString();
149
+ // console.log('get', link.toString())
150
+ if (this._cache.has(slink)) return this._cache.get(slink)
151
+ const block = await this._blocks.get(link);
152
+ this._cids.add({ address: link });
153
+ if (!block) throw new Error(`missing block: ${link}`)
154
+ const got = decodeEventBlock(block.bytes);
155
+ this._cache.set(slink, got);
156
+ return got
157
+ }
158
+
159
+ async all () {
160
+ await Promise.all([...this._cids]);
161
+ return this._cids
162
+ }
163
+ }
164
+
165
+ /**
166
+ * @template T
167
+ * @param {EventView<T>} value
168
+ * @returns {Promise<EventBlockView<T>>}
169
+ */
170
+ async function encodeEventBlock (value) {
171
+ // TODO: sort parents
172
+ const { cid, bytes } = await Block.encode({ value, codec: dagcbor__namespace, hasher: sha2.sha256 });
173
+ // @ts-expect-error
174
+ return new Block.Block({ cid, value, bytes })
175
+ }
176
+
177
+ /**
178
+ * @template T
179
+ * @param {Uint8Array} bytes
180
+ * @returns {Promise<EventBlockView<T>>}
181
+ */
182
+ async function decodeEventBlock (bytes) {
183
+ const { cid, value } = await Block.decode({ bytes, codec: dagcbor__namespace, hasher: sha2.sha256 });
184
+ // @ts-expect-error
185
+ return new Block.Block({ cid, value, bytes })
186
+ }
187
+
188
+ /**
189
+ * Returns true if event "a" contains event "b". Breadth first search.
190
+ * @template T
191
+ * @param {EventFetcher} events
192
+ * @param {EventLink<T>} a
193
+ * @param {EventLink<T>} b
194
+ */
195
+ async function contains (events, a, b) {
196
+ if (a.toString() === b.toString()) return true
197
+ const [{ value: aevent }, { value: bevent }] = await Promise.all([events.get(a), events.get(b)]);
198
+ const links = [...aevent.parents];
199
+ while (links.length) {
200
+ const link = links.shift();
201
+ if (!link) break
202
+ if (link.toString() === b.toString()) return true
203
+ // if any of b's parents are this link, then b cannot exist in any of the
204
+ // tree below, since that would create a cycle.
205
+ if (bevent.parents.some((p) => link.toString() === p.toString())) continue
206
+ const { value: event } = await events.get(link);
207
+ links.push(...event.parents);
208
+ }
209
+ return false
210
+ }
211
+
212
+ /**
213
+ * @template T
214
+ * @param {import('./blockstore').BlockFetcher} blocks Block storage.
215
+ * @param {EventLink<T>[]} head
216
+ * @param {object} [options]
217
+ * @param {(b: EventBlockView<T>) => string} [options.renderNodeLabel]
218
+ */
219
+ async function * vis$1 (blocks, head, options = {}) {
220
+ const renderNodeLabel = options.renderNodeLabel ?? ((b) => b.value.data.value);
221
+ const events = new EventFetcher(blocks);
222
+ yield 'digraph clock {';
223
+ yield ' node [shape=point fontname="Courier"]; head;';
224
+ const hevents = await Promise.all(head.map((link) => events.get(link)));
225
+ const links = [];
226
+ const nodes = new Set();
227
+ for (const e of hevents) {
228
+ nodes.add(e.cid.toString());
229
+ yield ` node [shape=oval fontname="Courier"]; ${e.cid} [label="${renderNodeLabel(e)}"];`;
230
+ yield ` head -> ${e.cid};`;
231
+ for (const p of e.value.parents) {
232
+ yield ` ${e.cid} -> ${p};`;
233
+ }
234
+ links.push(...e.value.parents);
235
+ }
236
+ while (links.length) {
237
+ const link = links.shift();
238
+ if (!link) break
239
+ if (nodes.has(link.toString())) continue
240
+ nodes.add(link.toString());
241
+ const block = await events.get(link);
242
+ yield ` node [shape=oval]; ${link} [label="${renderNodeLabel(block)}" fontname="Courier"];`;
243
+ for (const p of block.value.parents) {
244
+ yield ` ${link} -> ${p};`;
245
+ }
246
+ links.push(...block.value.parents);
247
+ }
248
+ yield '}';
249
+ }
250
+
251
+ async function findEventsToSync (blocks, head) {
252
+ // const callTag = Math.random().toString(36).substring(7)
253
+ const events = new EventFetcher(blocks);
254
+ // console.time(callTag + '.findCommonAncestorWithSortedEvents')
255
+ const { ancestor, sorted } = await findCommonAncestorWithSortedEvents(events, head);
256
+ // console.timeEnd(callTag + '.findCommonAncestorWithSortedEvents')
257
+ // console.log('sorted', sorted.length)
258
+ // console.time(callTag + '.contains')
259
+ const toSync = await asyncFilter(sorted, async (uks) => !(await contains(events, ancestor, uks.cid)));
260
+ // console.timeEnd(callTag + '.contains')
261
+
262
+ return { cids: events.cids, events: toSync }
263
+ }
264
+
265
+ const asyncFilter = async (arr, predicate) =>
266
+ Promise.all(arr.map(predicate)).then((results) => arr.filter((_v, index) => results[index]));
267
+
268
+ async function findCommonAncestorWithSortedEvents (events, children) {
269
+ // const callTag = Math.random().toString(36).substring(7)
270
+ // console.time(callTag + '.findCommonAncestor')
271
+ const ancestor = await findCommonAncestor(events, children);
272
+ // console.timeEnd(callTag + '.findCommonAncestor')
273
+ if (!ancestor) {
274
+ throw new Error('failed to find common ancestor event')
275
+ }
276
+ // console.time(callTag + '.findSortedEvents')
277
+ const sorted = await findSortedEvents(events, children, ancestor);
278
+ // console.timeEnd(callTag + '.findSortedEvents')
279
+ return { ancestor, sorted }
280
+ }
281
+
282
+ /**
283
+ * Find the common ancestor event of the passed children. A common ancestor is
284
+ * the first single event in the DAG that _all_ paths from children lead to.
285
+ *
286
+ * @param {import('./clock').EventFetcher} events
287
+ * @param {import('./clock').EventLink<EventData>[]} children
288
+ */
289
+ async function findCommonAncestor (events, children) {
290
+ if (!children.length) return
291
+ const candidates = children.map((c) => [c]);
292
+ while (true) {
293
+ let changed = false;
294
+ for (const c of candidates) {
295
+ const candidate = await findAncestorCandidate(events, c[c.length - 1]);
296
+ if (!candidate) continue
297
+ changed = true;
298
+ c.push(candidate);
299
+ const ancestor = findCommonString(candidates);
300
+ if (ancestor) return ancestor
301
+ }
302
+ if (!changed) return
303
+ }
304
+ }
305
+
306
+ /**
307
+ * @param {import('./clock').EventFetcher} events
308
+ * @param {import('./clock').EventLink<EventData>} root
309
+ */
310
+ async function findAncestorCandidate (events, root) {
311
+ const { value: event } = await events.get(root);
312
+ if (!event.parents.length) return root
313
+ return event.parents.length === 1 ? event.parents[0] : findCommonAncestor(events, event.parents)
314
+ }
315
+
316
+ /**
317
+ * @template {{ toString: () => string }} T
318
+ * @param {Array<T[]>} arrays
319
+ */
320
+ function findCommonString (arrays) {
321
+ arrays = arrays.map((a) => [...a]);
322
+ for (const arr of arrays) {
323
+ for (const item of arr) {
324
+ let matched = true;
325
+ for (const other of arrays) {
326
+ if (arr === other) continue
327
+ matched = other.some((i) => String(i) === String(item));
328
+ if (!matched) break
329
+ }
330
+ if (matched) return item
331
+ }
332
+ }
333
+ }
334
+
335
+ /**
336
+ * Find and sort events between the head(s) and the tail.
337
+ * @param {import('./clock').EventFetcher} events
338
+ * @param {import('./clock').EventLink<EventData>[]} head
339
+ * @param {import('./clock').EventLink<EventData>} tail
340
+ */
341
+ async function findSortedEvents (events, head, tail) {
342
+ // const callTag = Math.random().toString(36).substring(7)
343
+ // get weighted events - heavier events happened first
344
+ /** @type {Map<string, { event: import('./clock').EventBlockView<EventData>, weight: number }>} */
345
+ const weights = new Map();
346
+ const all = await Promise.all(head.map((h) => findEvents(events, h, tail)));
347
+ for (const arr of all) {
348
+ for (const { event, depth } of arr) {
349
+ // console.log('event value', event.value.data.value)
350
+ const info = weights.get(event.cid.toString());
351
+ if (info) {
352
+ info.weight += depth;
353
+ } else {
354
+ weights.set(event.cid.toString(), { event, weight: depth });
355
+ }
356
+ }
357
+ }
358
+
359
+ // group events into buckets by weight
360
+ /** @type {Map<number, import('./clock').EventBlockView<EventData>[]>} */
361
+ const buckets = new Map();
362
+ for (const { event, weight } of weights.values()) {
363
+ const bucket = buckets.get(weight);
364
+ if (bucket) {
365
+ bucket.push(event);
366
+ } else {
367
+ buckets.set(weight, [event]);
368
+ }
369
+ }
370
+
371
+ // sort by weight, and by CID within weight
372
+ const sorted = Array.from(buckets)
373
+ .sort((a, b) => b[0] - a[0])
374
+ .flatMap(([, es]) => es.sort((a, b) => (String(a.cid) < String(b.cid) ? -1 : 1)));
375
+ // console.log('sorted', sorted.map(s => s.value.data.value))
376
+
377
+ return sorted
378
+ }
379
+
380
+ /**
381
+ * @param {import('./clock').EventFetcher} events
382
+ * @param {import('./clock').EventLink<EventData>} start
383
+ * @param {import('./clock').EventLink<EventData>} end
384
+ * @returns {Promise<Array<{ event: import('./clock').EventBlockView<EventData>, depth: number }>>}
385
+ */
386
+ async function findEvents (events, start, end, depth = 0) {
387
+ // console.log('findEvents', start)
388
+ const event = await events.get(start);
389
+ const acc = [{ event, depth }];
390
+ const { parents } = event.value;
391
+ if (parents.length === 1 && String(parents[0]) === String(end)) return acc
392
+ const rest = await Promise.all(parents.map((p) => findEvents(events, p, end, depth + 1)));
393
+ return acc.concat(...rest)
394
+ }
395
+
396
+ // @ts-nocheck
397
+
398
+ const createBlock = (bytes, cid) => Block.create({ cid, bytes, hasher: sha2.sha256, codec: codec__namespace });
399
+
400
+ const encrypt = async function * ({ get, cids, hasher, key, cache, chunker, root }) {
401
+ const set = new Set();
402
+ let eroot;
403
+ for (const string of cids) {
404
+ const cid = multiformats.CID.parse(string);
405
+ const unencrypted = await get(cid);
406
+ const block = await Block.encode({ ...await codec__namespace.encrypt({ ...unencrypted, key }), codec: codec__namespace, hasher });
407
+ // console.log(`encrypting ${string} as ${block.cid}`)
408
+ yield block;
409
+ set.add(block.cid.toString());
410
+ if (unencrypted.cid.equals(root)) eroot = block.cid;
411
+ }
412
+ if (!eroot) throw new Error('cids does not include root')
413
+ const list = [...set].map(s => multiformats.CID.parse(s));
414
+ let last;
415
+ for await (const node of cidSet.create({ list, get, cache, chunker, hasher, codec: dagcbor__namespace })) {
416
+ const block = await node.block;
417
+ yield block;
418
+ last = block;
419
+ }
420
+ const head = [eroot, last.cid];
421
+ const block = await Block.encode({ value: head, codec: dagcbor__namespace, hasher });
422
+ yield block;
423
+ };
424
+
425
+ const decrypt = async function * ({ root, get, key, cache, chunker, hasher }) {
426
+ const o = { ...await get(root), codec: dagcbor__namespace, hasher };
427
+ const decodedRoot = await Block.decode(o);
428
+ // console.log('decodedRoot', decodedRoot)
429
+ const { value: [eroot, tree] } = decodedRoot;
430
+ const rootBlock = await get(eroot); // should I decrypt?
431
+ const cidset = await cidSet.load({ cid: tree, get, cache, chunker, codec: codec__namespace, hasher });
432
+ const { result: nodes } = await cidset.getAllEntries();
433
+ const unwrap = async (eblock) => {
434
+ const { bytes, cid } = await codec__namespace.decrypt({ ...eblock, key }).catch(e => {
435
+ console.log('ekey', e);
436
+ throw new Error('bad key: ' + key.toString('hex'))
437
+ });
438
+ const block = await createBlock(bytes, cid);
439
+ return block
440
+ };
441
+ const promises = [];
442
+ for (const { cid } of nodes) {
443
+ if (!rootBlock.cid.equals(cid)) promises.push(get(cid).then(unwrap));
444
+ }
445
+ yield * promises;
446
+ yield unwrap(rootBlock);
447
+ };
448
+
449
+ // @ts-nocheck
450
+ // from https://github.com/duzun/sync-sha1/blob/master/rawSha1.js
451
+ // MIT License Copyright (c) 2020 Dumitru Uzun
452
+ // Permission is hereby granted, free of charge, to any person obtaining a copy
453
+ // of this software and associated documentation files (the "Software"), to deal
454
+ // in the Software without restriction, including without limitation the rights
455
+ // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
456
+ // copies of the Software, and to permit persons to whom the Software is
457
+ // furnished to do so, subject to the following conditions:
458
+
459
+ // The above copyright notice and this permission notice shall be included in all
460
+ // copies or substantial portions of the Software.
461
+
462
+ // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
463
+ // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
464
+ // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
465
+ // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
466
+ // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
467
+ // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
468
+ // SOFTWARE.
469
+
470
+ // import {
471
+ // isLittleEndian, switchEndianness32
472
+ // } from 'string-encode'
473
+
474
+ /**
475
+ * SHA1 on binary array
476
+ *
477
+ * @param {Uint8Array} b Data to hash
478
+ *
479
+ * @return {Uint8Array} sha1 hash
480
+ */
481
+ function rawSha1 (b) {
482
+ let i = b.byteLength;
483
+ let bs = 0;
484
+ let A; let B; let C; let D; let G;
485
+ const H = Uint32Array.from([A = 0x67452301, B = 0xEFCDAB89, ~A, ~B, 0xC3D2E1F0]);
486
+ const W = new Uint32Array(80);
487
+ const nrWords = (i / 4 + 2) | 15;
488
+ const words = new Uint32Array(nrWords + 1);
489
+ let j;
490
+
491
+ words[nrWords] = i * 8;
492
+ words[i >> 2] |= 0x80 << (~i << 3);
493
+ for (;i--;) {
494
+ words[i >> 2] |= b[i] << (~i << 3);
495
+ }
496
+
497
+ for (A = H.slice(); bs < nrWords; bs += 16, A.set(H)) {
498
+ for (i = 0; i < 80;
499
+ A[0] = (
500
+ G = ((b = A[0]) << 5 | b >>> 27) +
501
+ A[4] +
502
+ (W[i] = (i < 16) ? words[bs + i] : G << 1 | G >>> 31) +
503
+ 0x5A827999,
504
+ B = A[1],
505
+ C = A[2],
506
+ D = A[3],
507
+ G + ((j = i / 5 >> 2) // eslint-disable-line no-cond-assign
508
+ ? j !== 2
509
+ ? (B ^ C ^ D) + (j & 2 ? 0x6FE0483D : 0x14577208)
510
+ : (B & C | B & D | C & D) + 0x34994343
511
+ : B & C | ~B & D
512
+ )
513
+ )
514
+ , A[1] = b
515
+ , A[2] = B << 30 | B >>> 2
516
+ , A[3] = C
517
+ , A[4] = D
518
+ , ++i
519
+ ) {
520
+ G = W[i - 3] ^ W[i - 8] ^ W[i - 14] ^ W[i - 16];
521
+ }
522
+
523
+ for (i = 5; i;) H[--i] = H[i] + A[i];
524
+ }
525
+
526
+ // if (isLittleEndian()) {
527
+ // H = H.map(switchEndianness32)
528
+ // }
529
+
530
+ return new Uint8Array(H.buffer, H.byteOffset, H.byteLength)
531
+ }
532
+
533
+ // @ts-nocheck
534
+ const chunker = utils.bf(3);
535
+
536
+ const NO_ENCRYPT =
537
+ typeof process !== 'undefined' ? process.env.NO_ENCRYPT : ({ url: (typeof document === 'undefined' ? require('u' + 'rl').pathToFileURL(__filename).href : (document.currentScript && document.currentScript.src || new URL('bundle.js', document.baseURI).href)) }) && undefined.VITE_NO_ENCRYPT;
538
+
539
+ class Valet {
540
+ idb = null
541
+ name = null
542
+ uploadQueue = null
543
+ alreadyEnqueued = new Set()
544
+ keyMaterial = null
545
+ keyId = 'null'
546
+
547
+ /**
548
+ * Function installed by the database to upload car files
549
+ * @type {null|function(string, Uint8Array):Promise<void>}
550
+ */
551
+ uploadFunction = null
552
+
553
+ constructor (name = 'default', keyMaterial) {
554
+ this.name = name;
555
+ this.setKeyMaterial(keyMaterial);
556
+ this.uploadQueue = cargoQueue(async (tasks, callback) => {
557
+ console.log(
558
+ 'queue worker',
559
+ tasks.length,
560
+ tasks.reduce((acc, t) => acc + t.value.length, 0)
561
+ );
562
+ if (this.uploadFunction) {
563
+ // todo we can coalesce these into a single car file
564
+ return await this.withDB(async db => {
565
+ for (const task of tasks) {
566
+ await this.uploadFunction(task.carCid, task.value);
567
+ // update the indexedb to mark this car as no longer pending
568
+ const carMeta = await db.get('cidToCar', task.carCid);
569
+ delete carMeta.pending;
570
+ await db.put('cidToCar', carMeta);
571
+ }
572
+ })
573
+ }
574
+ callback();
575
+ });
576
+
577
+ this.uploadQueue.drain(async () => {
578
+ return await this.withDB(async db => {
579
+ const carKeys = (await db.getAllFromIndex('cidToCar', 'pending')).map(c => c.car);
580
+ for (const carKey of carKeys) {
581
+ await this.uploadFunction(carKey, await db.get('cars', carKey));
582
+ const carMeta = await db.get('cidToCar', carKey);
583
+ delete carMeta.pending;
584
+ await db.put('cidToCar', carMeta);
585
+ }
586
+ })
587
+ });
588
+ }
589
+
590
+ getKeyMaterial () {
591
+ return this.keyMaterial
592
+ }
593
+
594
+ setKeyMaterial (km) {
595
+ if (km && !NO_ENCRYPT) {
596
+ const hex = Uint8Array.from(buffer.Buffer.from(km, 'hex'));
597
+ this.keyMaterial = km;
598
+ const hash = rawSha1(hex);
599
+ this.keyId = buffer.Buffer.from(hash).toString('hex');
600
+ } else {
601
+ this.keyMaterial = null;
602
+ this.keyId = 'null';
603
+ }
604
+ // console.trace('keyId', this.name, this.keyId)
605
+ }
606
+
607
+ /**
608
+ * Group the blocks into a car and write it to the valet.
609
+ * @param {InnerBlockstore} innerBlockstore
610
+ * @param {Set<string>} cids
611
+ * @returns {Promise<void>}
612
+ * @memberof Valet
613
+ */
614
+ async writeTransaction (innerBlockstore, cids) {
615
+ if (innerBlockstore.lastCid) {
616
+ if (this.keyMaterial) {
617
+ // console.log('encrypting car', innerBlockstore.label)
618
+ const newCar = await blocksToEncryptedCarBlock(innerBlockstore.lastCid, innerBlockstore, this.keyMaterial);
619
+ await this.parkCar(newCar.cid.toString(), newCar.bytes, cids);
620
+ } else {
621
+ const newCar = await blocksToCarBlock(innerBlockstore.lastCid, innerBlockstore);
622
+ await this.parkCar(newCar.cid.toString(), newCar.bytes, cids);
623
+ }
624
+ }
625
+ }
626
+
627
+ withDB = async dbWorkFun => {
628
+ if (!this.idb) {
629
+ this.idb = await idb.openDB(`fp.${this.keyId}.${this.name}.valet`, 2, {
630
+ upgrade (db, oldVersion, newVersion, transaction) {
631
+ if (oldVersion < 1) {
632
+ db.createObjectStore('cars'); // todo use database name
633
+ const cidToCar = db.createObjectStore('cidToCar', { keyPath: 'car' });
634
+ cidToCar.createIndex('cids', 'cids', { multiEntry: true });
635
+ }
636
+ if (oldVersion < 2) {
637
+ const cidToCar = transaction.objectStore('cidToCar');
638
+ cidToCar.createIndex('pending', 'pending');
639
+ }
640
+ }
641
+ });
642
+ }
643
+ return await dbWorkFun(this.idb)
644
+ }
645
+
646
+ /**
647
+ *
648
+ * @param {string} carCid
649
+ * @param {*} value
650
+ */
651
+ async parkCar (carCid, value, cids) {
652
+ await this.withDB(async db => {
653
+ const tx = db.transaction(['cars', 'cidToCar'], 'readwrite');
654
+ await tx.objectStore('cars').put(value, carCid);
655
+ await tx.objectStore('cidToCar').put({ pending: 'y', car: carCid, cids: Array.from(cids) });
656
+ return await tx.done
657
+ });
658
+
659
+ // upload to web3.storage if we have credentials
660
+ if (this.uploadFunction) {
661
+ if (this.alreadyEnqueued.has(carCid)) {
662
+ // console.log('already enqueued', carCid)
663
+ return
664
+ }
665
+ // don't await this, it will be done in the queue
666
+ // console.log('add to queue', carCid, value.length)
667
+ this.uploadQueue.push({ carCid, value });
668
+ this.alreadyEnqueued.add(carCid);
669
+ }
670
+ }
671
+
672
+ remoteBlockFunction = null
673
+
674
+ async getBlock (dataCID) {
675
+ return await this.withDB(async db => {
676
+ const tx = db.transaction(['cars', 'cidToCar'], 'readonly');
677
+ const indexResp = await tx.objectStore('cidToCar').index('cids').get(dataCID);
678
+ const carCid = indexResp?.car;
679
+ if (!carCid) {
680
+ throw new Error('Missing block: ' + dataCID)
681
+ }
682
+ const carBytes = await tx.objectStore('cars').get(carCid);
683
+ const reader = await car.CarReader.fromBytes(carBytes);
684
+ if (this.keyMaterial) {
685
+ const roots = await reader.getRoots();
686
+ const readerGetWithCodec = async cid => {
687
+ const got = await reader.get(cid);
688
+ // console.log('got.', cid.toString())
689
+ let useCodec = codec__namespace;
690
+ if (cid.toString().indexOf('bafy') === 0) {
691
+ useCodec = dagcbor__namespace;
692
+ }
693
+ const decoded = await Block__namespace.decode({
694
+ ...got,
695
+ codec: useCodec,
696
+ hasher: sha2.sha256
697
+ });
698
+ // console.log('decoded', decoded.value)
699
+ return decoded
700
+ };
701
+ const { blocks } = await blocksFromEncryptedCarBlock(roots[0], readerGetWithCodec, this.keyMaterial);
702
+ const block = blocks.find(b => b.cid.toString() === dataCID);
703
+ if (block) {
704
+ return block.bytes
705
+ }
706
+ } else {
707
+ const gotBlock = await reader.get(cid.CID.parse(dataCID));
708
+ if (gotBlock) {
709
+ return gotBlock.bytes
710
+ }
711
+ }
712
+ })
713
+ }
714
+ }
715
+
716
+ const blocksToCarBlock = async (lastCid, blocks) => {
717
+ let size = 0;
718
+ const headerSize = CBW__namespace.headerLength({ roots: [lastCid] });
719
+ size += headerSize;
720
+ if (!Array.isArray(blocks)) {
721
+ blocks = Array.from(blocks.entries());
722
+ }
723
+ for (const { cid, bytes } of blocks) {
724
+ size += CBW__namespace.blockLength({ cid, bytes });
725
+ }
726
+ const buffer = new Uint8Array(size);
727
+ const writer = await CBW__namespace.createWriter(buffer, { headerSize });
728
+
729
+ writer.addRoot(lastCid);
730
+
731
+ for (const { cid, bytes } of blocks) {
732
+ writer.write({ cid, bytes });
733
+ }
734
+ await writer.close();
735
+ return await Block__namespace.encode({ value: writer.bytes, hasher: sha2.sha256, codec: raw__namespace })
736
+ };
737
+
738
+ const blocksToEncryptedCarBlock = async (innerBlockStoreClockRootCid, blocks, keyMaterial) => {
739
+ const encryptionKey = buffer.Buffer.from(keyMaterial, 'hex');
740
+ const encryptedBlocks = [];
741
+ const theCids = [];
742
+ for (const { cid } of blocks.entries()) {
743
+ theCids.push(cid.toString());
744
+ }
745
+
746
+ let last;
747
+ for await (const block of encrypt({
748
+ cids: theCids,
749
+ get: async cid => blocks.get(cid), // maybe we can just use blocks.get
750
+ key: encryptionKey,
751
+ hasher: sha2.sha256,
752
+ chunker,
753
+ cache: cache.nocache,
754
+ // codec: dagcbor, // should be crypto?
755
+ root: innerBlockStoreClockRootCid
756
+ })) {
757
+ encryptedBlocks.push(block);
758
+ last = block;
759
+ }
760
+ // console.log('last', last.cid.toString(), 'for clock', innerBlockStoreClockRootCid.toString())
761
+ const encryptedCar = await blocksToCarBlock(last.cid, encryptedBlocks);
762
+ return encryptedCar
763
+ };
764
+ // { root, get, key, cache, chunker, hasher }
765
+
766
+ const memoizeDecryptedCarBlocks = new Map();
767
+ const blocksFromEncryptedCarBlock = async (cid, get, keyMaterial) => {
768
+ if (memoizeDecryptedCarBlocks.has(cid.toString())) {
769
+ return memoizeDecryptedCarBlocks.get(cid.toString())
770
+ } else {
771
+ const blocksPromise = (async () => {
772
+ const decryptionKey = buffer.Buffer.from(keyMaterial, 'hex');
773
+ // console.log('decrypting', keyMaterial, cid.toString())
774
+ const cids = new Set();
775
+ const decryptedBlocks = [];
776
+ for await (const block of decrypt({
777
+ root: cid,
778
+ get,
779
+ key: decryptionKey,
780
+ chunker,
781
+ hasher: sha2.sha256,
782
+ cache: cache.nocache
783
+ // codec: dagcbor
784
+ })) {
785
+ decryptedBlocks.push(block);
786
+ cids.add(block.cid.toString());
787
+ }
788
+ return { blocks: decryptedBlocks, cids }
789
+ })();
790
+ memoizeDecryptedCarBlocks.set(cid.toString(), blocksPromise);
791
+ return blocksPromise
792
+ }
793
+ };
794
+
795
+ // @ts-nocheck
796
+
797
+ // const sleep = ms => new Promise(r => setTimeout(r, ms))
798
+
799
+ const husherMap = new Map();
800
+ const husher = (id, workFn) => {
801
+ if (!husherMap.has(id)) {
802
+ husherMap.set(
803
+ id,
804
+ workFn().finally(() => setTimeout(() => husherMap.delete(id), 100))
805
+ );
806
+ }
807
+ return husherMap.get(id)
808
+ };
809
+
810
+ /**
811
+ * @typedef {Object} AnyBlock
812
+ * @property {import('./link').AnyLink} cid - The CID of the block
813
+ * @property {Uint8Array} bytes - The block's data
814
+ *
815
+ * @typedef {Object} Blockstore
816
+ * @property {function(import('./link').AnyLink): Promise<AnyBlock|undefined>} get - A function to retrieve a block by CID
817
+ * @property {function(import('./link').AnyLink, Uint8Array): Promise<void>} put - A function to store a block's data and CID
818
+ *
819
+ * A blockstore that caches writes to a transaction and only persists them when committed.
820
+ * @implements {Blockstore}
821
+ */
822
+ class TransactionBlockstore {
823
+ /** @type {Map<string, Uint8Array>} */
824
+ committedBlocks = new Map()
825
+
826
+ valet = null
827
+
828
+ instanceId = 'blkz.' + Math.random().toString(36).substring(2, 4)
829
+ inflightTransactions = new Set()
830
+
831
+ constructor (name, encryptionKey) {
832
+ this.valet = new Valet(name, encryptionKey);
833
+ }
834
+
835
+ /**
836
+ * Get a block from the store.
837
+ *
838
+ * @param {import('./link').AnyLink} cid
839
+ * @returns {Promise<AnyBlock | undefined>}
840
+ */
841
+ async get (cid) {
842
+ const key = cid.toString();
843
+ // it is safe to read from the in-flight transactions becauase they are immutable
844
+ const bytes = await Promise.any([this.transactionsGet(key), this.committedGet(key)]).catch(e => {
845
+ // console.log('networkGet', cid.toString(), e)
846
+ return this.networkGet(key)
847
+ });
848
+ if (!bytes) throw new Error('Missing block: ' + key)
849
+ return { cid, bytes }
850
+ }
851
+
852
+ // this iterates over the in-flight transactions
853
+ // and returns the first matching block it finds
854
+ async transactionsGet (key) {
855
+ for (const transaction of this.inflightTransactions) {
856
+ const got = await transaction.get(key);
857
+ if (got && got.bytes) return got.bytes
858
+ }
859
+ throw new Error('Missing block: ' + key)
860
+ }
861
+
862
+ async committedGet (key) {
863
+ const old = this.committedBlocks.get(key);
864
+ if (old) return old
865
+ const got = await this.valet.getBlock(key);
866
+ // console.log('committedGet: ' + key)
867
+ this.committedBlocks.set(key, got);
868
+ return got
869
+ }
870
+
871
+ async clearCommittedCache () {
872
+ this.committedBlocks.clear();
873
+ }
874
+
875
+ async networkGet (key) {
876
+ if (this.valet.remoteBlockFunction) {
877
+ // todo why is this on valet?
878
+ const value = await husher(key, async () => await this.valet.remoteBlockFunction(key));
879
+ if (value) {
880
+ // console.log('networkGot: ' + key, value.length)
881
+ doTransaction('networkGot: ' + key, this, async innerBlockstore => {
882
+ await innerBlockstore.put(multiformats.CID.parse(key), value);
883
+ });
884
+ return value
885
+ }
886
+ } else {
887
+ return false
888
+ }
889
+ }
890
+
891
+ /**
892
+ * Add a block to the store. Usually bound to a transaction by a closure.
893
+ * It sets the lastCid property to the CID of the block that was put.
894
+ * This is used by the transaction as the head of the car when written to the valet.
895
+ * We don't have to worry about which transaction we are when we are here because
896
+ * we are the transactionBlockstore.
897
+ *
898
+ * @param {import('./link').AnyLink} cid
899
+ * @param {Uint8Array} bytes
900
+ */
901
+ put (cid, bytes) {
902
+ throw new Error('use a transaction to put')
903
+ }
904
+
905
+ /**
906
+ * Iterate over all blocks in the store.
907
+ *
908
+ * @yields {AnyBlock}
909
+ * @returns {AsyncGenerator<AnyBlock>}
910
+ */
911
+ // * entries () {
912
+ // // needs transaction blocks?
913
+ // // for (const [str, bytes] of this.blocks) {
914
+ // // yield { cid: parse(str), bytes }
915
+ // // }
916
+ // for (const [str, bytes] of this.committedBlocks) {
917
+ // yield { cid: parse(str), bytes }
918
+ // }
919
+ // }
920
+
921
+ /**
922
+ * Begin a transaction. Ensures the uncommited blocks are empty at the begining.
923
+ * Returns the blocks to read and write during the transaction.
924
+ * @returns {InnerBlockstore}
925
+ * @memberof TransactionBlockstore
926
+ */
927
+ begin (label = '') {
928
+ const innerTransactionBlockstore = new InnerBlockstore(label, this);
929
+ this.inflightTransactions.add(innerTransactionBlockstore);
930
+ return innerTransactionBlockstore
931
+ }
932
+
933
+ /**
934
+ * Commit the transaction. Writes the blocks to the store.
935
+ * @returns {Promise<void>}
936
+ * @memberof TransactionBlockstore
937
+ */
938
+ async commit (innerBlockstore) {
939
+ await this.doCommit(innerBlockstore);
940
+ }
941
+
942
+ // first get the transaction blockstore from the map of transaction blockstores
943
+ // then copy it to committedBlocks
944
+ // then write the transaction blockstore to a car
945
+ // then write the car to the valet
946
+ // then remove the transaction blockstore from the map of transaction blockstores
947
+ doCommit = async innerBlockstore => {
948
+ const cids = new Set();
949
+ for (const { cid, bytes } of innerBlockstore.entries()) {
950
+ const stringCid = cid.toString(); // unnecessary string conversion, can we fix upstream?
951
+ if (this.committedBlocks.has(stringCid)) ; else {
952
+ this.committedBlocks.set(stringCid, bytes);
953
+ cids.add(stringCid);
954
+ }
955
+ }
956
+ if (cids.size > 0) {
957
+ // console.log(innerBlockstore.label, 'committing', cids.size, 'blocks')
958
+ await this.valet.writeTransaction(innerBlockstore, cids);
959
+ }
960
+ }
961
+
962
+ /**
963
+ * Retire the transaction. Clears the uncommited blocks.
964
+ * @returns {void}
965
+ * @memberof TransactionBlockstore
966
+ */
967
+ retire (innerBlockstore) {
968
+ this.inflightTransactions.delete(innerBlockstore);
969
+ }
970
+ }
971
+
972
+ /**
973
+ * Runs a function on an inner blockstore, then persists the change to a car writer
974
+ * or other outer blockstore.
975
+ * @param {string} label
976
+ * @param {TransactionBlockstore} blockstore
977
+ * @param {(innerBlockstore: Blockstore) => Promise<any>} doFun
978
+ * @returns {Promise<any>}
979
+ * @memberof TransactionBlockstore
980
+ */
981
+ const doTransaction = async (label, blockstore, doFun) => {
982
+ if (!blockstore.commit) return await doFun(blockstore)
983
+ const innerBlockstore = blockstore.begin(label);
984
+ try {
985
+ const result = await doFun(innerBlockstore);
986
+ await blockstore.commit(innerBlockstore);
987
+ return result
988
+ } catch (e) {
989
+ console.error(`Transaction ${label} failed`, e, e.stack);
990
+ throw e
991
+ } finally {
992
+ blockstore.retire(innerBlockstore);
993
+ }
994
+ };
995
+
996
+ /** @implements {BlockFetcher} */
997
+ class InnerBlockstore {
998
+ /** @type {Map<string, Uint8Array>} */
999
+ blocks = new Map()
1000
+ lastCid = null
1001
+ label = ''
1002
+ parentBlockstore = null
1003
+
1004
+ constructor (label, parentBlockstore) {
1005
+ this.label = label;
1006
+ this.parentBlockstore = parentBlockstore;
1007
+ }
1008
+
1009
+ /**
1010
+ * @param {import('./link').AnyLink} cid
1011
+ * @returns {Promise<AnyBlock | undefined>}
1012
+ */
1013
+ async get (cid) {
1014
+ const key = cid.toString();
1015
+ let bytes = this.blocks.get(key);
1016
+ if (bytes) {
1017
+ return { cid, bytes }
1018
+ }
1019
+ bytes = await this.parentBlockstore.committedGet(key);
1020
+ if (bytes) {
1021
+ return { cid, bytes }
1022
+ }
1023
+ }
1024
+
1025
+ /**
1026
+ * @param {import('./link').AnyLink} cid
1027
+ * @param {Uint8Array} bytes
1028
+ */
1029
+ put (cid, bytes) {
1030
+ // console.log('put', cid)
1031
+ this.blocks.set(cid.toString(), bytes);
1032
+ this.lastCid = cid;
1033
+ }
1034
+
1035
+ * entries () {
1036
+ for (const [str, bytes] of this.blocks) {
1037
+ yield { cid: link.parse(str), bytes };
1038
+ }
1039
+ }
1040
+ }
1041
+
1042
+ // @ts-nocheck
1043
+ const blockOpts = { cache: cache.nocache, chunker: utils.bf(3), codec: dagcbor__namespace, hasher: sha2.sha256, compare: utils.simpleCompare };
1044
+
1045
+ const withLog = async (label, fn) => {
1046
+ const resp = await fn();
1047
+ // console.log('withLog', label, !!resp)
1048
+ return resp
1049
+ };
1050
+
1051
+ // should also return a CIDCounter
1052
+ const makeGetBlock = (blocks) => {
1053
+ // const cids = new CIDCounter() // this could be used for proofs of mutations
1054
+ const getBlockFn = async (address) => {
1055
+ const { cid, bytes } = await withLog(address, () => blocks.get(address));
1056
+ // cids.add({ address: cid })
1057
+ return Block.create({ cid, bytes, hasher: sha2.sha256, codec: dagcbor__namespace })
1058
+ };
1059
+ return {
1060
+ // cids,
1061
+ getBlock: getBlockFn
1062
+ }
1063
+ };
1064
+
1065
+ /**
1066
+ *
1067
+ * @param {*} param0
1068
+ * @returns
1069
+ */
1070
+ async function createAndSaveNewEvent ({
1071
+ inBlocks,
1072
+ bigPut,
1073
+ root,
1074
+ event: inEvent,
1075
+ head,
1076
+ additions,
1077
+ removals = []
1078
+ }) {
1079
+ let cids;
1080
+ const { key, value, del } = inEvent;
1081
+ const data = {
1082
+ root: (root
1083
+ ? {
1084
+ cid: root.cid,
1085
+ bytes: root.bytes, // can we remove this?
1086
+ value: root.value // can we remove this?
1087
+ }
1088
+ : null),
1089
+ key
1090
+ };
1091
+
1092
+ if (del) {
1093
+ data.value = null;
1094
+ data.type = 'del';
1095
+ } else {
1096
+ data.value = value;
1097
+ data.type = 'put';
1098
+ }
1099
+ /** @type {EventData} */
1100
+
1101
+ const event = await EventBlock.create(data, head);
1102
+ bigPut(event)
1103
+ ;({ head, cids } = await advance(inBlocks, head, event.cid));
1104
+
1105
+ return {
1106
+ root,
1107
+ additions,
1108
+ removals,
1109
+ head,
1110
+ clockCIDs: cids,
1111
+ event
1112
+ }
1113
+ }
1114
+
1115
+ const makeGetAndPutBlock = (inBlocks) => {
1116
+ // const mblocks = new MemoryBlockstore()
1117
+ // const blocks = new MultiBlockFetcher(mblocks, inBlocks)
1118
+ const { getBlock, cids } = makeGetBlock(inBlocks);
1119
+ const put = inBlocks.put.bind(inBlocks);
1120
+ const bigPut = async (block, additions) => {
1121
+ // console.log('bigPut', block.cid.toString())
1122
+ const { cid, bytes } = block;
1123
+ put(cid, bytes);
1124
+ // mblocks.putSync(cid, bytes)
1125
+ if (additions) {
1126
+ additions.set(cid.toString(), block);
1127
+ }
1128
+ };
1129
+ return { getBlock, bigPut, blocks: inBlocks, cids }
1130
+ };
1131
+
1132
+ const bulkFromEvents = (sorted, event) => {
1133
+ if (event) {
1134
+ const update = { value: { data: { key: event.key } } };
1135
+ if (event.del) {
1136
+ update.value.data.type = 'del';
1137
+ } else {
1138
+ update.value.data.type = 'put';
1139
+ update.value.data.value = event.value;
1140
+ }
1141
+ sorted.push(update);
1142
+ }
1143
+ const bulk = new Map();
1144
+ for (const { value: event } of sorted) {
1145
+ const {
1146
+ data: { type, value, key }
1147
+ } = event;
1148
+ const bulkEvent = type === 'put' ? { key, value } : { key, del: true };
1149
+ bulk.set(bulkEvent.key, bulkEvent); // last wins
1150
+ }
1151
+ return Array.from(bulk.values())
1152
+ };
1153
+
1154
+ // Get the value of the root from the ancestor event
1155
+ /**
1156
+ *
1157
+ * @param {EventFetcher} events
1158
+ * @param {Link} ancestor
1159
+ * @param {*} getBlock
1160
+ * @returns
1161
+ */
1162
+ const prollyRootFromAncestor = async (events, ancestor, getBlock) => {
1163
+ // console.log('prollyRootFromAncestor', ancestor)
1164
+ const event = await events.get(ancestor);
1165
+ const { root } = event.value.data;
1166
+ // console.log('prollyRootFromAncestor', root.cid, JSON.stringify(root.value))
1167
+ if (root) {
1168
+ return map.load({ cid: root.cid, get: getBlock, ...blockOpts })
1169
+ } else {
1170
+ return null
1171
+ }
1172
+ };
1173
+
1174
+ const doProllyBulk = async (inBlocks, head, event) => {
1175
+ const { getBlock, blocks } = makeGetAndPutBlock(inBlocks);
1176
+ let bulkSorted = [];
1177
+ let prollyRootNode = null;
1178
+ if (head.length) {
1179
+ // Otherwise, we find the common ancestor and update the root and other blocks
1180
+ const events = new EventFetcher(blocks);
1181
+ // todo this is returning more events than necessary, lets define the desired semantics from the top down
1182
+ // good semantics mean we can cache the results of this call
1183
+ const { ancestor, sorted } = await findCommonAncestorWithSortedEvents(events, head);
1184
+ bulkSorted = sorted;
1185
+ // console.log('sorted', JSON.stringify(sorted.map(({ value: { data: { key, value } } }) => ({ key, value }))))
1186
+ prollyRootNode = await prollyRootFromAncestor(events, ancestor, getBlock);
1187
+ // console.log('event', event)
1188
+ }
1189
+
1190
+ const bulkOperations = bulkFromEvents(bulkSorted, event);
1191
+
1192
+ // if prolly root node is null, we need to create a new one
1193
+ if (!prollyRootNode) {
1194
+ let root;
1195
+ const newBlocks = [];
1196
+ // if all operations are deletes, we can just return an empty root
1197
+ if (bulkOperations.every((op) => op.del)) {
1198
+ return { root: null, blocks: [] }
1199
+ }
1200
+ for await (const node of map.create({ get: getBlock, list: bulkOperations, ...blockOpts })) {
1201
+ root = await node.block;
1202
+ newBlocks.push(root);
1203
+ }
1204
+ return { root, blocks: newBlocks }
1205
+ } else {
1206
+ return await prollyRootNode.bulk(bulkOperations) // { root: newProllyRootNode, blocks: newBlocks }
1207
+ }
1208
+ };
1209
+
1210
+ /**
1211
+ * Put a value (a CID) for the given key. If the key exists it's value is overwritten.
1212
+ *
1213
+ * @param {import('./blockstore.js').BlockFetcher} blocks Bucket block storage.
1214
+ * @param {import('./clock').EventLink<EventData>[]} head Merkle clock head.
1215
+ * @param {string} key The key of the value to put.
1216
+ * @param {CID} value The value to put.
1217
+ * @param {object} [options]
1218
+ * @returns {Promise<Result>}
1219
+ */
1220
+ async function put (inBlocks, head, event, options) {
1221
+ const { bigPut } = makeGetAndPutBlock(inBlocks);
1222
+
1223
+ // If the head is empty, we create a new event and return the root and addition blocks
1224
+ if (!head.length) {
1225
+ const additions = new Map();
1226
+ const { root, blocks } = await doProllyBulk(inBlocks, head, event);
1227
+ for (const b of blocks) {
1228
+ bigPut(b, additions);
1229
+ }
1230
+ return createAndSaveNewEvent({ inBlocks, bigPut, root, event, head, additions: Array.from(additions.values()) })
1231
+ }
1232
+ const { root: newProllyRootNode, blocks: newBlocks } = await doProllyBulk(inBlocks, head, event);
1233
+
1234
+ if (!newProllyRootNode) {
1235
+ return createAndSaveNewEvent({
1236
+ inBlocks,
1237
+ bigPut,
1238
+ root: null,
1239
+ event,
1240
+ head,
1241
+ additions: []
1242
+ })
1243
+ } else {
1244
+ const prollyRootBlock = await newProllyRootNode.block;
1245
+ const additions = new Map(); // ; const removals = new Map()
1246
+ bigPut(prollyRootBlock, additions);
1247
+ for (const nb of newBlocks) {
1248
+ bigPut(nb, additions);
1249
+ }
1250
+ // additions are new blocks
1251
+ return createAndSaveNewEvent({
1252
+ inBlocks,
1253
+ bigPut,
1254
+ root: prollyRootBlock,
1255
+ event,
1256
+ head,
1257
+ additions: Array.from(additions.values()) /*, todo? Array.from(removals.values()) */
1258
+ })
1259
+ }
1260
+ }
1261
+
1262
+ /**
1263
+ * Determine the effective prolly root given the current merkle clock head.
1264
+ *
1265
+ * @param {import('./blockstore.js').BlockFetcher} blocks Bucket block storage.
1266
+ * @param {import('./clock').EventLink<EventData>[]} head Merkle clock head.
1267
+ */
1268
+ async function root (inBlocks, head) {
1269
+ if (!head.length) {
1270
+ throw new Error('no head')
1271
+ }
1272
+ const { root: newProllyRootNode, blocks: newBlocks, cids } = await doProllyBulk(inBlocks, head);
1273
+ // todo maybe these should go to a temp blockstore?
1274
+ await doTransaction('root', inBlocks, async (transactionBlockstore) => {
1275
+ const { bigPut } = makeGetAndPutBlock(transactionBlockstore);
1276
+ for (const nb of newBlocks) {
1277
+ bigPut(nb);
1278
+ }
1279
+ });
1280
+ return { cids, node: newProllyRootNode }
1281
+ }
1282
+
1283
+ /**
1284
+ * Get the list of events not known by the `since` event
1285
+ * @param {import('./blockstore.js').BlockFetcher} blocks Bucket block storage.
1286
+ * @param {import('./clock').EventLink<EventData>[]} head Merkle clock head.
1287
+ * @param {import('./clock').EventLink<EventData>} since Event to compare against.
1288
+ * @returns {Promise<{clockCIDs: CIDCounter, result: EventData[]}>}
1289
+ */
1290
+ async function eventsSince (blocks, head, since) {
1291
+ if (!head.length) {
1292
+ throw new Error('no head')
1293
+ }
1294
+ const sinceHead = [...since, ...head]; // ?
1295
+ const { cids, events: unknownSorted3 } = await findEventsToSync(blocks, sinceHead);
1296
+ return { clockCIDs: cids, result: unknownSorted3.map(({ value: { data } }) => data) }
1297
+ }
1298
+
1299
+ /**
1300
+ *
1301
+ * @param {import('./blockstore.js').BlockFetcher} blocks Bucket block storage.
1302
+ * @param {import('./clock').EventLink<EventData>[]} head Merkle clock head.
1303
+ *
1304
+ * @returns {Promise<{clockCIDs: CIDCounter, result: EventData[]}>}
1305
+ *
1306
+ */
1307
+ async function getAll (blocks, head) {
1308
+ // todo use the root node left around from put, etc
1309
+ // move load to a central place
1310
+ if (!head.length) {
1311
+ return { clockCIDs: new utils.CIDCounter(), cids: new utils.CIDCounter(), result: [] }
1312
+ }
1313
+ const { node: prollyRootNode, cids: clockCIDs } = await root(blocks, head);
1314
+ if (!prollyRootNode) {
1315
+ return { clockCIDs, cids: new utils.CIDCounter(), result: [] }
1316
+ }
1317
+ const { result, cids } = await prollyRootNode.getAllEntries(); // todo params
1318
+ return { clockCIDs, cids, result: result.map(({ key, value }) => ({ key, value })) }
1319
+ }
1320
+
1321
+ /**
1322
+ * @param {import('./blockstore.js').BlockFetcher} blocks Bucket block storage.
1323
+ * @param {import('./clock').EventLink<EventData>[]} head Merkle clock head.
1324
+ * @param {string} key The key of the value to retrieve.
1325
+ */
1326
+ async function get (blocks, head, key) {
1327
+ // instead pass root from db? and always update on change
1328
+ if (!head.length) {
1329
+ return { cids: new utils.CIDCounter(), result: null }
1330
+ }
1331
+ const { node: prollyRootNode, cids: clockCIDs } = await root(blocks, head);
1332
+ if (!prollyRootNode) {
1333
+ return { clockCIDs, cids: new utils.CIDCounter(), result: null }
1334
+ }
1335
+ const { result, cids } = await prollyRootNode.get(key);
1336
+ return { result, cids, clockCIDs }
1337
+ }
1338
+
1339
+ async function * vis (blocks, head) {
1340
+ if (!head.length) {
1341
+ return { cids: new utils.CIDCounter(), result: null }
1342
+ }
1343
+ const { node: prollyRootNode, cids } = await root(blocks, head);
1344
+ const lines = [];
1345
+ for await (const line of prollyRootNode.vis()) {
1346
+ yield line;
1347
+ lines.push(line);
1348
+ }
1349
+ return { vis: lines.join('\n'), cids }
1350
+ }
1351
+
1352
+ async function visMerkleTree (blocks, head) {
1353
+ if (!head.length) {
1354
+ return { cids: new utils.CIDCounter(), result: null }
1355
+ }
1356
+ const { node: prollyRootNode, cids } = await root(blocks, head);
1357
+ const lines = [];
1358
+ for await (const line of prollyRootNode.vis()) {
1359
+ lines.push(line);
1360
+ }
1361
+ return { vis: lines.join('\n'), cids }
1362
+ }
1363
+
1364
+ async function visMerkleClock (blocks, head) {
1365
+ const lines = [];
1366
+ for await (const line of vis$1(blocks, head)) {
1367
+ // yield line
1368
+ lines.push(line);
1369
+ }
1370
+ return { vis: lines.join('\n') }
1371
+ }
1372
+
1373
+ // @ts-nocheck
1374
+ // import { CID } from 'multiformats/dist/types/src/cid.js'
1375
+
1376
+ // const sleep = ms => new Promise(resolve => setTimeout(resolve, ms))
1377
+
1378
+ // class Proof {}
1379
+
1380
+ /**
1381
+ * @class Fireproof
1382
+ * @classdesc Fireproof stores data in IndexedDB and provides a Merkle clock.
1383
+ * This is the main class for saving and loading JSON and other documents with the database. You can find additional examples and
1384
+ * usage guides in the repository README.
1385
+ *
1386
+ * @param {import('./blockstore.js').TransactionBlockstore} blocks - The block storage instance to use documents and indexes
1387
+ * @param {CID[]} clock - The Merkle clock head to use for the Fireproof instance.
1388
+ * @param {object} [config] - Optional configuration options for the Fireproof instance.
1389
+ * @param {object} [authCtx] - Optional authorization context object to use for any authentication checks.
1390
+ *
1391
+ */
1392
+ class Fireproof {
1393
+ listeners = new Set()
1394
+
1395
+ /**
1396
+ * @function storage
1397
+ * @memberof Fireproof
1398
+ * Creates a new Fireproof instance with default storage settings
1399
+ * Most apps should use this and not worry about the details.
1400
+ * @static
1401
+ * @returns {Fireproof} - a new Fireproof instance
1402
+ */
1403
+ static storage = (name = 'global') => {
1404
+ const instanceKey = crypto.randomBytes(32).toString('hex'); // pass null to disable encryption
1405
+ // pick a random key from const validatedKeys
1406
+ // const instanceKey = validatedKeys[Math.floor(Math.random() * validatedKeys.length)]
1407
+ return new Fireproof(new TransactionBlockstore(name, instanceKey), [], { name })
1408
+ }
1409
+
1410
+ constructor (blocks, clock, config, authCtx = {}) {
1411
+ this.name = config?.name || 'global';
1412
+ this.instanceId = `fp.${this.name}.${Math.random().toString(36).substring(2, 7)}`;
1413
+ this.blocks = blocks;
1414
+ this.clock = clock;
1415
+ this.config = config;
1416
+ this.authCtx = authCtx;
1417
+ this.indexes = new Map();
1418
+ }
1419
+
1420
+ /**
1421
+ * Renders the Fireproof instance as a JSON object.
1422
+ * @returns {Object} - The JSON representation of the Fireproof instance. Includes clock heads for the database and its indexes.
1423
+ * @memberof Fireproof
1424
+ * @instance
1425
+ */
1426
+ toJSON () {
1427
+ // todo this also needs to return the index roots...
1428
+ return {
1429
+ clock: this.clockToJSON(),
1430
+ name: this.name,
1431
+ key: this.blocks.valet.getKeyMaterial(),
1432
+ indexes: [...this.indexes.values()].map(index => index.toJSON())
1433
+ }
1434
+ }
1435
+
1436
+ clockToJSON () {
1437
+ return this.clock.map(cid => cid.toString())
1438
+ }
1439
+
1440
+ hydrate ({ clock, name, key }) {
1441
+ this.name = name;
1442
+ this.clock = clock;
1443
+ this.blocks.valet.setKeyMaterial(key);
1444
+ this.indexBlocks = null;
1445
+ }
1446
+
1447
+ /**
1448
+ * Triggers a notification to all listeners
1449
+ * of the Fireproof instance so they can repaint UI, etc.
1450
+ * @param {CID[] } clock
1451
+ * Clock to use for the snapshot.
1452
+ * @returns {Promise<void>}
1453
+ * @memberof Fireproof
1454
+ * @instance
1455
+ */
1456
+ async notifyReset () {
1457
+ await this.notifyListeners({ _reset: true, _clock: this.clockToJSON() });
1458
+ }
1459
+
1460
+ // used be indexes etc to notify database listeners of new availability
1461
+ async notifyExternal (source = 'unknown') {
1462
+ await this.notifyListeners({ _external: source, _clock: this.clockToJSON() });
1463
+ }
1464
+
1465
+ /**
1466
+ * Returns the changes made to the Fireproof instance since the specified event.
1467
+ * @function changesSince
1468
+ * @param {CID[]} [event] - The clock head to retrieve changes since. If null or undefined, retrieves all changes.
1469
+ * @returns {Object<{rows : Object[], clock: CID[]}>} An object containing the rows and the head of the instance's clock.
1470
+ * @memberof Fireproof
1471
+ * @instance
1472
+ */
1473
+ async changesSince (event) {
1474
+ // console.log('changesSince', this.instanceId, event, this.clock)
1475
+ let rows, dataCIDs, clockCIDs;
1476
+ // if (!event) event = []
1477
+ if (event) {
1478
+ const resp = await eventsSince(this.blocks, this.clock, event);
1479
+ const docsMap = new Map();
1480
+ for (const { key, type, value } of resp.result.map(decodeEvent)) {
1481
+ if (type === 'del') {
1482
+ docsMap.set(key, { key, del: true });
1483
+ } else {
1484
+ docsMap.set(key, { key, value });
1485
+ }
1486
+ }
1487
+ rows = Array.from(docsMap.values());
1488
+ clockCIDs = resp.cids;
1489
+ // console.log('change rows', this.instanceId, rows)
1490
+ } else {
1491
+ const allResp = await getAll(this.blocks, this.clock);
1492
+ rows = allResp.result.map(({ key, value }) => (decodeEvent({ key, value })));
1493
+ dataCIDs = allResp.cids;
1494
+ // console.log('dbdoc rows', this.instanceId, rows)
1495
+ }
1496
+ return {
1497
+ rows,
1498
+ clock: this.clockToJSON(),
1499
+ proof: { data: await cidsToProof(dataCIDs), clock: await cidsToProof(clockCIDs) }
1500
+ }
1501
+ }
1502
+
1503
+ async allDocuments () {
1504
+ const allResp = await getAll(this.blocks, this.clock);
1505
+ const rows = allResp.result.map(({ key, value }) => (decodeEvent({ key, value }))).map(({ key, value }) => ({ key, value: { _id: key, ...value } }));
1506
+ return {
1507
+ rows,
1508
+ clock: this.clockToJSON(),
1509
+ proof: await cidsToProof(allResp.cids)
1510
+ }
1511
+ }
1512
+
1513
+ /**
1514
+ * Runs validation on the specified document using the Fireproof instance's configuration. Throws an error if the document is invalid.
1515
+ *
1516
+ * @param {Object} doc - The document to validate.
1517
+ * @returns {Promise<void>}
1518
+ * @throws {Error} - Throws an error if the document is invalid.
1519
+ * @memberof Fireproof
1520
+ * @instance
1521
+ */
1522
+ async runValidation (doc) {
1523
+ if (this.config && this.config.validateChange) {
1524
+ const oldDoc = await this.get(doc._id)
1525
+ .then((doc) => doc)
1526
+ .catch(() => ({}));
1527
+ this.config.validateChange(doc, oldDoc, this.authCtx);
1528
+ }
1529
+ }
1530
+
1531
+ /**
1532
+ * Retrieves the document with the specified ID from the database
1533
+ *
1534
+ * @param {string} key - the ID of the document to retrieve
1535
+ * @param {Object} [opts] - options
1536
+ * @returns {Promise<{_id: string}>} - the document with the specified ID
1537
+ * @memberof Fireproof
1538
+ * @instance
1539
+ */
1540
+ async get (key, opts = {}) {
1541
+ const clock = opts.clock || this.clock;
1542
+ const resp = await get(this.blocks, clock, charwise.encode(key));
1543
+
1544
+ // this tombstone is temporary until we can get the prolly tree to delete
1545
+ if (!resp || resp.result === null) {
1546
+ throw new Error('Not found')
1547
+ }
1548
+ const doc = resp.result;
1549
+ if (opts.mvcc === true) {
1550
+ doc._clock = this.clockToJSON();
1551
+ }
1552
+ doc._proof = {
1553
+ data: await cidsToProof(resp.cids),
1554
+ clock: this.clockToJSON()
1555
+ };
1556
+ doc._id = key;
1557
+ return doc
1558
+ }
1559
+
1560
+ /**
1561
+ * Adds a new document to the database, or updates an existing document. Returns the ID of the document and the new clock head.
1562
+ *
1563
+ * @param {Object} doc - the document to be added
1564
+ * @param {string} doc._id - the document ID. If not provided, a random ID will be generated.
1565
+ * @param {CID[]} doc._clock - the document ID. If not provided, a random ID will be generated.
1566
+ * @param {Proof} doc._proof - CIDs referenced by the update
1567
+ * @returns {Promise<{ id: string, clock: CID[] }>} - The result of adding the document to the database
1568
+ * @memberof Fireproof
1569
+ * @instance
1570
+ */
1571
+ async put ({ _id, _proof, ...doc }) {
1572
+ const id = _id || 'f' + Math.random().toString(36).slice(2);
1573
+ await this.runValidation({ _id: id, ...doc });
1574
+ return await this.putToProllyTree({ key: id, value: doc }, doc._clock)
1575
+ }
1576
+
1577
+ /**
1578
+ * Deletes a document from the database
1579
+ * @param {string | any} docOrId - the document ID
1580
+ * @returns {Promise<{ id: string, clock: CID[] }>} - The result of deleting the document from the database
1581
+ * @memberof Fireproof
1582
+ * @instance
1583
+ */
1584
+ async del (docOrId) {
1585
+ let id;
1586
+ let clock = null;
1587
+ if (docOrId._id) {
1588
+ id = docOrId._id;
1589
+ clock = docOrId._clock;
1590
+ } else {
1591
+ id = docOrId;
1592
+ }
1593
+ await this.runValidation({ _id: id, _deleted: true });
1594
+ return await this.putToProllyTree({ key: id, del: true }, clock) // not working at prolly tree layer?
1595
+ // this tombstone is temporary until we can get the prolly tree to delete
1596
+ // return await this.putToProllyTree({ key: id, value: null }, clock)
1597
+ }
1598
+
1599
+ /**
1600
+ * Updates the underlying storage with the specified event.
1601
+ * @private
1602
+ * @param {{del?: true, key : string, value?: any}} decodedEvent - the event to add
1603
+ * @returns {Promise<{ proof:{}, id: string, clock: CID[] }>} - The result of adding the event to storage
1604
+ */
1605
+ async putToProllyTree (decodedEvent, clock = null) {
1606
+ const event = encodeEvent(decodedEvent);
1607
+ if (clock && JSON.stringify(clock) !== JSON.stringify(this.clockToJSON())) {
1608
+ // we need to check and see what version of the document exists at the clock specified
1609
+ // if it is the same as the one we are trying to put, then we can proceed
1610
+ const resp = await eventsSince(this.blocks, this.clock, event.value._clock);
1611
+ const missedChange = resp.result.find(({ key }) => key === event.key);
1612
+ if (missedChange) {
1613
+ throw new Error('MVCC conflict, document is changed, please reload the document and try again.')
1614
+ }
1615
+ }
1616
+ const result = await doTransaction(
1617
+ 'putToProllyTree',
1618
+ this.blocks,
1619
+ async (blocks) => await put(blocks, this.clock, event)
1620
+ );
1621
+ if (!result) {
1622
+ console.error('failed', event);
1623
+ throw new Error('failed to put at storage layer')
1624
+ }
1625
+ // console.log('new clock head', this.instanceId, result.head.toString())
1626
+ this.clock = result.head; // do we want to do this as a finally block
1627
+ await this.notifyListeners([decodedEvent]); // this type is odd
1628
+ return {
1629
+ id: decodedEvent.key,
1630
+ clock: this.clockToJSON(),
1631
+ proof: { data: await cidsToProof(result.cids), clock: await cidsToProof(result.clockCIDs) }
1632
+ }
1633
+ // todo should include additions (or split clock)
1634
+ }
1635
+
1636
+ // /**
1637
+ // * Advances the clock to the specified event and updates the root CID
1638
+ // * Will be used by replication
1639
+ // */
1640
+ // async advance (event) {
1641
+ // this.clock = await advance(this.blocks, this.clock, event)
1642
+ // this.rootCid = await root(this.blocks, this.clock)
1643
+ // return this.clock
1644
+ // }
1645
+
1646
+ async * vis () {
1647
+ return yield * vis(this.blocks, this.clock)
1648
+ }
1649
+
1650
+ async visTree () {
1651
+ return await visMerkleTree(this.blocks, this.clock)
1652
+ }
1653
+
1654
+ async visClock () {
1655
+ return await visMerkleClock(this.blocks, this.clock)
1656
+ }
1657
+
1658
+ /**
1659
+ * Registers a Listener to be called when the Fireproof instance's clock is updated.
1660
+ * Recieves live changes from the database after they are committed.
1661
+ * @param {Function} listener - The listener to be called when the clock is updated.
1662
+ * @returns {Function} - A function that can be called to unregister the listener.
1663
+ * @memberof Fireproof
1664
+ */
1665
+ registerListener (listener) {
1666
+ this.listeners.add(listener);
1667
+ return () => {
1668
+ this.listeners.delete(listener);
1669
+ }
1670
+ }
1671
+
1672
+ async notifyListeners (changes) {
1673
+ // await sleep(10)
1674
+ for (const listener of this.listeners) {
1675
+ await listener(changes);
1676
+ }
1677
+ }
1678
+
1679
+ setCarUploader (carUploaderFn) {
1680
+ // console.log('registering car uploader')
1681
+ // https://en.wikipedia.org/wiki/Law_of_Demeter - this is a violation of the law of demeter
1682
+ this.blocks.valet.uploadFunction = carUploaderFn;
1683
+ }
1684
+
1685
+ setRemoteBlockReader (remoteBlockReaderFn) {
1686
+ // console.log('registering remote block reader')
1687
+ this.blocks.valet.remoteBlockFunction = remoteBlockReaderFn;
1688
+ }
1689
+ }
1690
+
1691
+ async function cidsToProof (cids) {
1692
+ if (!cids || !cids.all) return []
1693
+ const all = await cids.all();
1694
+ return [...all].map((cid) => cid.toString())
1695
+ }
1696
+
1697
+ function decodeEvent (event) {
1698
+ const decodedKey = charwise.decode(event.key);
1699
+ return { ...event, key: decodedKey }
1700
+ }
1701
+
1702
+ function encodeEvent (event) {
1703
+ if (!(event && event.key)) return
1704
+ const encodedKey = charwise.encode(event.key);
1705
+ return { ...event, key: encodedKey }
1706
+ }
1707
+
1708
+ // @ts-nocheck
1709
+
1710
+ const compare = (a, b) => {
1711
+ const [aKey, aRef] = a;
1712
+ const [bKey, bRef] = b;
1713
+ const comp = utils.simpleCompare(aKey, bKey);
1714
+ if (comp !== 0) return comp
1715
+ return refCompare(aRef, bRef)
1716
+ };
1717
+
1718
+ const refCompare = (aRef, bRef) => {
1719
+ if (Number.isNaN(aRef)) return -1
1720
+ if (Number.isNaN(bRef)) throw new Error('ref may not be Infinity or NaN')
1721
+ if (aRef === Infinity) return 1 // need to test this on equal docids!
1722
+ // if (!Number.isFinite(bRef)) throw new Error('ref may not be Infinity or NaN')
1723
+ return utils.simpleCompare(aRef, bRef)
1724
+ };
1725
+
1726
+ const dbIndexOpts = { cache: cache.nocache, chunker: utils.bf(3), codec: dagcbor__namespace, hasher: sha2.sha256, compare };
1727
+ const idIndexOpts = { cache: cache.nocache, chunker: utils.bf(3), codec: dagcbor__namespace, hasher: sha2.sha256, compare: utils.simpleCompare };
1728
+
1729
+ const makeDoc = ({ key, value }) => ({ _id: key, ...value });
1730
+
1731
+ /**
1732
+ * JDoc for the result row type.
1733
+ * @typedef {Object} ChangeEvent
1734
+ * @property {string} key - The key of the document.
1735
+ * @property {Object} value - The new value of the document.
1736
+ * @property {boolean} [del] - Is the row deleted?
1737
+ * @memberof DbIndex
1738
+ */
1739
+
1740
+ /**
1741
+ * JDoc for the result row type.
1742
+ * @typedef {Object} DbIndexEntry
1743
+ * @property {string[]} key - The key for the DbIndex entry.
1744
+ * @property {Object} value - The value of the document.
1745
+ * @property {boolean} [del] - Is the row deleted?
1746
+ * @memberof DbIndex
1747
+ */
1748
+
1749
+ /**
1750
+ * Transforms a set of changes to DbIndex entries using a map function.
1751
+ *
1752
+ * @param {ChangeEvent[]} changes
1753
+ * @param {Function} mapFn
1754
+ * @returns {DbIndexEntry[]} The DbIndex entries generated by the map function.
1755
+ * @private
1756
+ * @memberof DbIndex
1757
+ */
1758
+ const indexEntriesForChanges = (changes, mapFn) => {
1759
+ const indexEntries = [];
1760
+ changes.forEach(({ key, value, del }) => {
1761
+ if (del || !value) return
1762
+ mapFn(makeDoc({ key, value }), (k, v) => {
1763
+ if (typeof v === 'undefined' || typeof k === 'undefined') return
1764
+ indexEntries.push({
1765
+ key: [charwise.encode(k), key],
1766
+ value: v
1767
+ });
1768
+ });
1769
+ });
1770
+ return indexEntries
1771
+ };
1772
+
1773
+ /**
1774
+ * Represents an DbIndex for a Fireproof database.
1775
+ *
1776
+ * @class DbIndex
1777
+ * @classdesc An DbIndex can be used to order and filter the documents in a Fireproof database.
1778
+ *
1779
+ * @param {Fireproof} database - The Fireproof database instance to DbIndex.
1780
+ * @param {Function} mapFn - The map function to apply to each entry in the database.
1781
+ *
1782
+ */
1783
+ class DbIndex {
1784
+ constructor (database, mapFn, clock, opts = {}) {
1785
+ // console.log('DbIndex constructor', database.constructor.name, typeof mapFn, clock)
1786
+ /**
1787
+ * The database instance to DbIndex.
1788
+ * @type {Fireproof}
1789
+ */
1790
+ this.database = database;
1791
+ if (!database.indexBlocks) {
1792
+ database.indexBlocks = new TransactionBlockstore(database.name + '.indexes', database.blocks.valet.getKeyMaterial());
1793
+ }
1794
+ /**
1795
+ * The map function to apply to each entry in the database.
1796
+ * @type {Function}
1797
+ */
1798
+
1799
+ if (typeof mapFn === 'string') {
1800
+ this.mapFnString = mapFn;
1801
+ } else {
1802
+ this.mapFn = mapFn;
1803
+ this.mapFnString = mapFn.toString();
1804
+ }
1805
+ this.name = opts.name || this.makeName();
1806
+ this.indexById = { root: null, cid: null };
1807
+ this.indexByKey = { root: null, cid: null };
1808
+ this.dbHead = null;
1809
+ if (clock) {
1810
+ this.indexById.cid = clock.byId;
1811
+ this.indexByKey.cid = clock.byKey;
1812
+ this.dbHead = clock.db;
1813
+ }
1814
+ this.instanceId = this.database.instanceId + `.DbIndex.${Math.random().toString(36).substring(2, 7)}`;
1815
+ this.updateIndexPromise = null;
1816
+ if (!opts.temporary) { DbIndex.registerWithDatabase(this, this.database); }
1817
+ }
1818
+
1819
+ makeName () {
1820
+ const regex = /\(([^,()]+,\s*[^,()]+|\[[^\]]+\],\s*[^,()]+)\)/g;
1821
+ const matches = Array.from(this.mapFnString.matchAll(regex), match => match[1].trim());
1822
+ return matches[1]
1823
+ }
1824
+
1825
+ static registerWithDatabase (inIndex, database) {
1826
+ if (!database.indexes.has(inIndex.mapFnString)) {
1827
+ database.indexes.set(inIndex.mapFnString, inIndex);
1828
+ } else {
1829
+ // merge our inIndex code with the inIndex clock or vice versa
1830
+ const existingIndex = database.indexes.get(inIndex.mapFnString);
1831
+ // keep the code instance, discard the clock instance
1832
+ if (existingIndex.mapFn) { // this one also has other config
1833
+ existingIndex.dbHead = inIndex.dbHead;
1834
+ existingIndex.indexById.cid = inIndex.indexById.cid;
1835
+ existingIndex.indexByKey.cid = inIndex.indexByKey.cid;
1836
+ } else {
1837
+ inIndex.dbHead = existingIndex.dbHead;
1838
+ inIndex.indexById.cid = existingIndex.indexById.cid;
1839
+ inIndex.indexByKey.cid = existingIndex.indexByKey.cid;
1840
+ database.indexes.set(inIndex.mapFnString, inIndex);
1841
+ }
1842
+ }
1843
+ }
1844
+
1845
+ toJSON () {
1846
+ const indexJson = { name: this.name, code: this.mapFnString, clock: { db: null, byId: null, byKey: null } };
1847
+ indexJson.clock.db = this.dbHead?.map(cid => cid.toString());
1848
+ indexJson.clock.byId = this.indexById.cid?.toString();
1849
+ indexJson.clock.byKey = this.indexByKey.cid?.toString();
1850
+ return indexJson
1851
+ }
1852
+
1853
+ static fromJSON (database, { code, clock, name }) {
1854
+ // console.log('DbIndex.fromJSON', database.constructor.name, code, clock)
1855
+ return new DbIndex(database, code, clock, { name })
1856
+ }
1857
+
1858
+ /**
1859
+ * JSDoc for Query type.
1860
+ * @typedef {Object} DbQuery
1861
+ * @property {string[]} [range] - The range to query.
1862
+ * @memberof DbIndex
1863
+ */
1864
+
1865
+ /**
1866
+ * Query object can have {range}
1867
+ * @param {DbQuery} query - the query range to use
1868
+ * @returns {Promise<{proof: {}, rows: Array<{id: string, key: string, value: any}>}>}
1869
+ * @memberof DbIndex
1870
+ * @instance
1871
+ */
1872
+ async query (query, update = true) {
1873
+ // const callId = Math.random().toString(36).substring(2, 7)
1874
+ // todo pass a root to query a snapshot
1875
+ // console.time(callId + '.updateIndex')
1876
+ update && await this.updateIndex(this.database.indexBlocks);
1877
+ // console.timeEnd(callId + '.updateIndex')
1878
+ // console.time(callId + '.doIndexQuery')
1879
+ // console.log('query', query)
1880
+ const response = await doIndexQuery(this.database.indexBlocks, this.indexByKey, query);
1881
+ // console.timeEnd(callId + '.doIndexQuery')
1882
+ return {
1883
+ proof: { index: await cidsToProof(response.cids) },
1884
+ rows: response.result.map(({ id, key, row }) => {
1885
+ return ({ id, key: charwise.decode(key), value: row })
1886
+ })
1887
+ }
1888
+ }
1889
+
1890
+ /**
1891
+ * Update the DbIndex with the latest changes
1892
+ * @private
1893
+ * @returns {Promise<void>}
1894
+ */
1895
+
1896
+ async updateIndex (blocks) {
1897
+ // todo this could enqueue the request and give fresh ones to all second comers -- right now it gives out stale promises while working
1898
+ // what would it do in a world where all indexes provide a database snapshot to query?
1899
+ if (this.updateIndexPromise) return this.updateIndexPromise
1900
+ this.updateIndexPromise = this.innerUpdateIndex(blocks);
1901
+ this.updateIndexPromise.finally(() => { this.updateIndexPromise = null; });
1902
+ return this.updateIndexPromise
1903
+ }
1904
+
1905
+ async innerUpdateIndex (inBlocks) {
1906
+ // console.log('dbHead', this.dbHead)
1907
+ // console.time(callTag + '.changesSince')
1908
+ const result = await this.database.changesSince(this.dbHead); // {key, value, del}
1909
+ // console.timeEnd(callTag + '.changesSince')
1910
+ // console.log('result.rows.length', result.rows.length)
1911
+
1912
+ // console.time(callTag + '.doTransactionupdateIndex')
1913
+ // console.log('updateIndex changes length', result.rows.length)
1914
+
1915
+ if (result.rows.length === 0) {
1916
+ // console.log('updateIndex < no changes', result.clock)
1917
+ this.dbHead = result.clock;
1918
+ return
1919
+ }
1920
+ await doTransaction('updateIndex', inBlocks, async (blocks) => {
1921
+ let oldIndexEntries = [];
1922
+ let removeByIdIndexEntries = [];
1923
+ await loadIndex(blocks, this.indexById, idIndexOpts);
1924
+ await loadIndex(blocks, this.indexByKey, dbIndexOpts);
1925
+ if (this.dbHead) {
1926
+ const oldChangeEntries = await this.indexById.root.getMany(result.rows.map(({ key }) => key));
1927
+ oldIndexEntries = oldChangeEntries.result.map((key) => ({ key, del: true }));
1928
+ removeByIdIndexEntries = oldIndexEntries.map(({ key }) => ({ key: key[1], del: true }));
1929
+ }
1930
+ if (!this.mapFn) {
1931
+ throw new Error('No live map function installed for index, cannot update. Make sure your index definition runs before any queries.' + (this.mapFnString ? ' Your code should match the stored map function source:\n' + this.mapFnString : ''))
1932
+ }
1933
+ const indexEntries = indexEntriesForChanges(result.rows, this.mapFn);
1934
+ const byIdIndexEntries = indexEntries.map(({ key }) => ({ key: key[1], value: key }));
1935
+ this.indexById = await bulkIndex(blocks, this.indexById, removeByIdIndexEntries.concat(byIdIndexEntries), idIndexOpts);
1936
+ this.indexByKey = await bulkIndex(blocks, this.indexByKey, oldIndexEntries.concat(indexEntries), dbIndexOpts);
1937
+ this.dbHead = result.clock;
1938
+ });
1939
+ this.database.notifyExternal('dbIndex');
1940
+ // console.timeEnd(callTag + '.doTransactionupdateIndex')
1941
+ // console.log(`updateIndex ${callTag} <`, this.instanceId, this.dbHead?.toString(), this.indexByKey.cid?.toString(), this.indexById.cid?.toString())
1942
+ }
1943
+ }
1944
+
1945
+ /**
1946
+ * Update the DbIndex with the given entries
1947
+ * @param {import('./blockstore.js').Blockstore} blocks
1948
+ * @param {{root, cid}} inIndex
1949
+ * @param {DbIndexEntry[]} indexEntries
1950
+ * @private
1951
+ */
1952
+ async function bulkIndex (blocks, inIndex, indexEntries, opts) {
1953
+ if (!indexEntries.length) return inIndex
1954
+ const putBlock = blocks.put.bind(blocks);
1955
+ const { getBlock } = makeGetBlock(blocks);
1956
+ let returnRootBlock;
1957
+ let returnNode;
1958
+ if (!inIndex.root) {
1959
+ const cid = inIndex.cid;
1960
+ if (!cid) {
1961
+ for await (const node of await dbIndex.create({ get: getBlock, list: indexEntries, ...opts })) {
1962
+ const block = await node.block;
1963
+ await putBlock(block.cid, block.bytes);
1964
+ returnRootBlock = block;
1965
+ returnNode = node;
1966
+ }
1967
+ return { root: returnNode, cid: returnRootBlock.cid }
1968
+ }
1969
+ inIndex.root = await dbIndex.load({ cid, get: getBlock, ...dbIndexOpts });
1970
+ }
1971
+ const { root, blocks: newBlocks } = await inIndex.root.bulk(indexEntries);
1972
+ returnRootBlock = await root.block;
1973
+ returnNode = root;
1974
+ for await (const block of newBlocks) {
1975
+ await putBlock(block.cid, block.bytes);
1976
+ }
1977
+ await putBlock(returnRootBlock.cid, returnRootBlock.bytes);
1978
+ return { root: returnNode, cid: returnRootBlock.cid }
1979
+ }
1980
+
1981
+ async function loadIndex (blocks, index, indexOpts) {
1982
+ if (!index.root) {
1983
+ const cid = index.cid;
1984
+ if (!cid) return
1985
+ const { getBlock } = makeGetBlock(blocks);
1986
+ index.root = await dbIndex.load({ cid, get: getBlock, ...indexOpts });
1987
+ }
1988
+ return index.root
1989
+ }
1990
+
1991
+ async function applyLimit (results, limit) {
1992
+ results.result = results.result.slice(0, limit);
1993
+ return results
1994
+ }
1995
+
1996
+ async function doIndexQuery (blocks, indexByKey, query = {}) {
1997
+ await loadIndex(blocks, indexByKey, dbIndexOpts);
1998
+ if (!indexByKey.root) return { result: [] }
1999
+ if (query.range) {
2000
+ const encodedRange = query.range.map((key) => charwise.encode(key));
2001
+ return applyLimit(await indexByKey.root.range(...encodedRange), query.limit)
2002
+ } else if (query.key) {
2003
+ const encodedKey = charwise.encode(query.key);
2004
+ return indexByKey.root.get(encodedKey)
2005
+ } else {
2006
+ const { result, ...all } = await indexByKey.root.getAllEntries();
2007
+ return applyLimit({ result: result.map(({ key: [k, id], value }) => ({ key: k, id, row: value })), ...all }, query.limit)
2008
+ }
2009
+ }
2010
+
2011
+ // @ts-nocheck
2012
+ /**
2013
+ * A Fireproof database Listener allows you to react to events in the database.
2014
+ *
2015
+ * @class Listener
2016
+ * @classdesc An listener attaches to a Fireproof database and runs a routing function on each change, sending the results to subscribers.
2017
+ *
2018
+ * @param {Fireproof} database - The Fireproof database instance to index.
2019
+ * @param {Function} routingFn - The routing function to apply to each entry in the database.
2020
+ */
2021
+ // import { ChangeEvent } from './db-index'
2022
+
2023
+ class Listener {
2024
+ subcribers = new Map()
2025
+ doStopListening = null
2026
+
2027
+ constructor (database, routingFn) {
2028
+ /** routingFn
2029
+ * The database instance to index.
2030
+ * @type {Fireproof}
2031
+ */
2032
+ this.database = database;
2033
+ this.doStopListening = database.registerListener(changes => this.onChanges(changes));
2034
+ /**
2035
+ * The map function to apply to each entry in the database.
2036
+ * @type {Function}
2037
+ */
2038
+ this.routingFn =
2039
+ routingFn ||
2040
+ function (_, emit) {
2041
+ emit('*');
2042
+ };
2043
+ this.dbHead = null;
2044
+ }
2045
+
2046
+ /**
2047
+ * Subscribe to a topic emitted by the event function.
2048
+ * @param {string} topic - The topic to subscribe to.
2049
+ * @param {Function} subscriber - The function to call when the topic is emitted.
2050
+ * @returns {Function} A function to unsubscribe from the topic.
2051
+ * @memberof Listener
2052
+ * @instance
2053
+ */
2054
+ on (topic, subscriber, since) {
2055
+ const listOfTopicSubscribers = getTopicList(this.subcribers, topic);
2056
+ listOfTopicSubscribers.push(subscriber);
2057
+ if (typeof since !== 'undefined') {
2058
+ this.database.changesSince(since).then(({ rows: changes }) => {
2059
+ const keys = topicsForChanges(changes, this.routingFn).get(topic);
2060
+ if (keys) keys.forEach(key => subscriber(key));
2061
+ });
2062
+ }
2063
+ return () => {
2064
+ const index = listOfTopicSubscribers.indexOf(subscriber);
2065
+ if (index > -1) listOfTopicSubscribers.splice(index, 1);
2066
+ }
2067
+ }
2068
+
2069
+ onChanges (changes) {
2070
+ if (Array.isArray(changes)) {
2071
+ const seenTopics = topicsForChanges(changes, this.routingFn);
2072
+ for (const [topic, keys] of seenTopics) {
2073
+ const listOfTopicSubscribers = getTopicList(this.subcribers, topic);
2074
+ listOfTopicSubscribers.forEach(subscriber => keys.forEach(key => subscriber(key)));
2075
+ }
2076
+ } else {
2077
+ // non-arrays go to all subscribers
2078
+ for (const [, listOfTopicSubscribers] of this.subcribers) {
2079
+ listOfTopicSubscribers.forEach(subscriber => subscriber(changes));
2080
+ }
2081
+ }
2082
+ }
2083
+ }
2084
+
2085
+ function getTopicList (subscribersMap, name) {
2086
+ let topicList = subscribersMap.get(name);
2087
+ if (!topicList) {
2088
+ topicList = [];
2089
+ subscribersMap.set(name, topicList);
2090
+ }
2091
+ return topicList
2092
+ }
2093
+
2094
+ /**
2095
+ * Transforms a set of changes to events using an emitter function.
2096
+ *
2097
+ * @param {ChangeEvent[]} changes
2098
+ * @param {Function} routingFn
2099
+ * @returns {Array<string>} The topics emmitted by the event function.
2100
+ * @private
2101
+ */
2102
+ const topicsForChanges = (changes, routingFn) => {
2103
+ const seenTopics = new Map();
2104
+ changes.forEach(({ key, value, del }) => {
2105
+ if (del || !value) value = { _deleted: true };
2106
+ routingFn(({ _id: key, ...value }), t => {
2107
+ const topicList = getTopicList(seenTopics, t);
2108
+ topicList.push(key);
2109
+ });
2110
+ });
2111
+ return seenTopics
2112
+ };
2113
+
2114
+ const parseCID = cid => typeof cid === 'string' ? multiformats.CID.parse(cid) : cid;
2115
+
2116
+ class Hydrator {
2117
+ static fromJSON (json, database) {
2118
+ database.hydrate({ clock: json.clock.map(c => parseCID(c)), name: json.name, key: json.key });
2119
+ if (json.indexes) {
2120
+ for (const { name, code, clock: { byId, byKey, db } } of json.indexes) {
2121
+ DbIndex.fromJSON(database, {
2122
+ clock: {
2123
+ byId: byId ? parseCID(byId) : null,
2124
+ byKey: byKey ? parseCID(byKey) : null,
2125
+ db: db ? db.map(c => parseCID(c)) : null
2126
+ },
2127
+ code,
2128
+ name
2129
+ });
2130
+ }
2131
+ }
2132
+ return database
2133
+ }
2134
+
2135
+ static snapshot (database, clock) {
2136
+ const definition = database.toJSON();
2137
+ const withBlocks = new Fireproof(database.blocks);
2138
+ if (clock) {
2139
+ definition.clock = clock.map(c => parseCID(c));
2140
+ definition.indexes.forEach(index => {
2141
+ index.clock.byId = null;
2142
+ index.clock.byKey = null;
2143
+ index.clock.db = null;
2144
+ });
2145
+ }
2146
+ const snappedDb = this.fromJSON(definition, withBlocks)
2147
+ ;([...database.indexes.values()]).forEach(index => {
2148
+ snappedDb.indexes.get(index.mapFnString).mapFn = index.mapFn;
2149
+ });
2150
+ return snappedDb
2151
+ }
2152
+
2153
+ static async zoom (database, clock) {
2154
+ ([...database.indexes.values()]).forEach(index => {
2155
+ index.indexById = { root: null, cid: null };
2156
+ index.indexByKey = { root: null, cid: null };
2157
+ index.dbHead = null;
2158
+ });
2159
+ database.clock = clock.map(c => parseCID(c));
2160
+ await database.notifyReset(); // hmm... indexes should listen to this? might be more complex than worth it. so far this is the only caller
2161
+ return database
2162
+ }
2163
+ }
2164
+
2165
+ exports.Fireproof = Fireproof;
2166
+ exports.Hydrator = Hydrator;
2167
+ exports.Index = DbIndex;
2168
+ exports.Listener = Listener;