@fireproof/core 0.3.13 → 0.3.14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -44,41 +44,11 @@ var CBW__namespace = /*#__PURE__*/_interopNamespaceDefault(CBW);
44
44
  var raw__namespace = /*#__PURE__*/_interopNamespaceDefault(raw);
45
45
  var codec__namespace = /*#__PURE__*/_interopNamespaceDefault(codec);
46
46
 
47
- // @ts-nocheck
48
-
49
- /**
50
- * @template T
51
- * @typedef {{ parents: EventLink<T>[], data: T }} EventView
52
- */
53
-
54
- /**
55
- * @template T
56
- * @typedef {import('multiformats').BlockView<EventView<T>>} EventBlockView
57
- */
58
-
59
- /**
60
- * @template T
61
- * @typedef {import('multiformats').Link<EventView<T>>} EventLink
62
- */
63
-
64
- /**
65
- * Advance the clock by adding an event.
66
- *
67
- * @template T
68
- * @param {import('./blockstore').BlockFetcher} blocks Block storage.
69
- * @param {EventLink<T>[]} head The head of the clock.
70
- * @param {EventLink<T>} event The event to add.
71
- * @returns {Promise<EventLink<T>[]>} The new head of the clock.
72
- */
73
- async function advance (blocks, head, event) {
74
- /** @type {EventFetcher<T>} */
47
+ async function advance(blocks, head, event) {
75
48
  const events = new EventFetcher(blocks);
76
49
  const headmap = new Map(head.map((cid) => [cid.toString(), cid]));
77
-
78
- // Check if the headmap already includes the event, return head if it does
79
- if (headmap.has(event.toString())) return { head, cids: events.cids }
80
-
81
- // Does event contain the clock?
50
+ if (headmap.has(event.toString()))
51
+ return { head, cids: await events.all() };
82
52
  let changed = false;
83
53
  for (const cid of head) {
84
54
  if (await contains(events, event, cid)) {
@@ -87,27 +57,16 @@ async function advance (blocks, head, event) {
87
57
  changed = true;
88
58
  }
89
59
  }
90
-
91
- // If the headmap has been changed, return the new headmap values
92
60
  if (changed) {
93
- return { head: [...headmap.values()], cids: events.cids }
61
+ return { head: [...headmap.values()], cids: await events.all() };
94
62
  }
95
-
96
- // Does clock contain the event?
97
63
  for (const p of head) {
98
64
  if (await contains(events, p, event)) {
99
- return { head, cids: events.cids }
65
+ return { head, cids: await events.all() };
100
66
  }
101
67
  }
102
-
103
- // Return the head concatenated with the new event if it passes both checks
104
- return { head: head.concat(event), cids: events.cids }
68
+ return { head: head.concat(event), cids: await events.all() };
105
69
  }
106
-
107
- /**
108
- * @template T
109
- * @implements {EventBlockView<T>}
110
- */
111
70
  class EventBlock extends Block.Block {
112
71
  /**
113
72
  * @param {object} config
@@ -115,115 +74,79 @@ class EventBlock extends Block.Block {
115
74
  * @param {Event} config.value
116
75
  * @param {Uint8Array} config.bytes
117
76
  */
118
- constructor ({ cid, value, bytes }) {
119
- // @ts-expect-error
77
+ constructor({ cid, value, bytes }) {
120
78
  super({ cid, value, bytes });
121
79
  }
122
-
123
80
  /**
124
81
  * @template T
125
82
  * @param {T} data
126
83
  * @param {EventLink<T>[]} [parents]
127
84
  */
128
- static create (data, parents) {
129
- return encodeEventBlock({ data, parents: parents ?? [] })
85
+ static create(data, parents) {
86
+ return encodeEventBlock({ data, parents: parents ?? [] });
130
87
  }
131
88
  }
132
-
133
- /** @template T */
134
89
  class EventFetcher {
135
- /** @param {import('./blockstore').BlockFetcher} blocks */
136
- constructor (blocks) {
137
- /** @private */
90
+ /** @param {import('./blockstore').TransactionBlockstore} blocks */
91
+ constructor(blocks) {
138
92
  this._blocks = blocks;
139
93
  this._cids = new utils.CIDCounter();
140
- this._cache = new Map();
94
+ this._cache = /* @__PURE__ */ new Map();
141
95
  }
142
-
143
96
  /**
144
97
  * @param {EventLink<T>} link
145
98
  * @returns {Promise<EventBlockView<T>>}
146
99
  */
147
- async get (link) {
100
+ async get(link) {
148
101
  const slink = link.toString();
149
- // console.log('get', link.toString())
150
- if (this._cache.has(slink)) return this._cache.get(slink)
102
+ if (this._cache.has(slink))
103
+ return this._cache.get(slink);
151
104
  const block = await this._blocks.get(link);
152
105
  this._cids.add({ address: link });
153
- if (!block) throw new Error(`missing block: ${link}`)
106
+ if (!block)
107
+ throw new Error(`missing block: ${link}`);
154
108
  const got = decodeEventBlock(block.bytes);
155
109
  this._cache.set(slink, got);
156
- return got
110
+ return got;
157
111
  }
158
-
159
- async all () {
160
- await Promise.all([...this._cids]);
161
- return this._cids
112
+ async all() {
113
+ return this._cids.all();
162
114
  }
163
115
  }
164
-
165
- /**
166
- * @template T
167
- * @param {EventView<T>} value
168
- * @returns {Promise<EventBlockView<T>>}
169
- */
170
- async function encodeEventBlock (value) {
171
- // TODO: sort parents
116
+ async function encodeEventBlock(value) {
172
117
  const { cid, bytes } = await Block.encode({ value, codec: dagcbor__namespace, hasher: sha2.sha256 });
173
- // @ts-expect-error
174
- return new Block.Block({ cid, value, bytes })
118
+ return new Block.Block({ cid, value, bytes });
175
119
  }
176
-
177
- /**
178
- * @template T
179
- * @param {Uint8Array} bytes
180
- * @returns {Promise<EventBlockView<T>>}
181
- */
182
- async function decodeEventBlock (bytes) {
120
+ async function decodeEventBlock(bytes) {
183
121
  const { cid, value } = await Block.decode({ bytes, codec: dagcbor__namespace, hasher: sha2.sha256 });
184
- // @ts-expect-error
185
- return new Block.Block({ cid, value, bytes })
122
+ return new Block.Block({ cid, value, bytes });
186
123
  }
187
-
188
- /**
189
- * Returns true if event "a" contains event "b". Breadth first search.
190
- * @template T
191
- * @param {EventFetcher} events
192
- * @param {EventLink<T>} a
193
- * @param {EventLink<T>} b
194
- */
195
- async function contains (events, a, b) {
196
- if (a.toString() === b.toString()) return true
124
+ async function contains(events, a, b) {
125
+ if (a.toString() === b.toString())
126
+ return true;
197
127
  const [{ value: aevent }, { value: bevent }] = await Promise.all([events.get(a), events.get(b)]);
198
128
  const links = [...aevent.parents];
199
129
  while (links.length) {
200
130
  const link = links.shift();
201
- if (!link) break
202
- if (link.toString() === b.toString()) return true
203
- // if any of b's parents are this link, then b cannot exist in any of the
204
- // tree below, since that would create a cycle.
205
- if (bevent.parents.some((p) => link.toString() === p.toString())) continue
131
+ if (!link)
132
+ break;
133
+ if (link.toString() === b.toString())
134
+ return true;
135
+ if (bevent.parents.some((p) => link.toString() === p.toString()))
136
+ continue;
206
137
  const { value: event } = await events.get(link);
207
138
  links.push(...event.parents);
208
139
  }
209
- return false
140
+ return false;
210
141
  }
211
-
212
- /**
213
- * @template T
214
- * @param {import('./blockstore').BlockFetcher} blocks Block storage.
215
- * @param {EventLink<T>[]} head
216
- * @param {object} [options]
217
- * @param {(b: EventBlockView<T>) => string} [options.renderNodeLabel]
218
- */
219
- async function * vis$1 (blocks, head, options = {}) {
142
+ async function* vis$1(blocks, head, options = {}) {
220
143
  const renderNodeLabel = options.renderNodeLabel ?? ((b) => b.value.data.value);
221
144
  const events = new EventFetcher(blocks);
222
- yield 'digraph clock {';
145
+ yield "digraph clock {";
223
146
  yield ' node [shape=point fontname="Courier"]; head;';
224
147
  const hevents = await Promise.all(head.map((link) => events.get(link)));
225
148
  const links = [];
226
- const nodes = new Set();
149
+ const nodes = /* @__PURE__ */ new Set();
227
150
  for (const e of hevents) {
228
151
  nodes.add(e.cid.toString());
229
152
  yield ` node [shape=oval fontname="Courier"]; ${e.cid} [label="${renderNodeLabel(e)}"];`;
@@ -235,8 +158,10 @@ async function * vis$1 (blocks, head, options = {}) {
235
158
  }
236
159
  while (links.length) {
237
160
  const link = links.shift();
238
- if (!link) break
239
- if (nodes.has(link.toString())) continue
161
+ if (!link)
162
+ break;
163
+ if (nodes.has(link.toString()))
164
+ continue;
240
165
  nodes.add(link.toString());
241
166
  const block = await events.get(link);
242
167
  yield ` node [shape=oval]; ${link} [label="${renderNodeLabel(block)}" fontname="Courier"];`;
@@ -245,108 +170,71 @@ async function * vis$1 (blocks, head, options = {}) {
245
170
  }
246
171
  links.push(...block.value.parents);
247
172
  }
248
- yield '}';
173
+ yield "}";
249
174
  }
250
-
251
- async function findEventsToSync (blocks, head) {
252
- // const callTag = Math.random().toString(36).substring(7)
175
+ async function findEventsToSync(blocks, head) {
253
176
  const events = new EventFetcher(blocks);
254
- // console.time(callTag + '.findCommonAncestorWithSortedEvents')
255
177
  const { ancestor, sorted } = await findCommonAncestorWithSortedEvents(events, head);
256
- // console.timeEnd(callTag + '.findCommonAncestorWithSortedEvents')
257
- // console.log('sorted', sorted.length)
258
- // console.time(callTag + '.contains')
259
- const toSync = await asyncFilter(sorted, async (uks) => !(await contains(events, ancestor, uks.cid)));
260
- // console.timeEnd(callTag + '.contains')
261
-
262
- return { cids: events.cids, events: toSync }
178
+ const toSync = await asyncFilter(sorted, async (uks) => !await contains(events, ancestor, uks.cid));
179
+ return { cids: events.all(), events: toSync };
263
180
  }
264
-
265
- const asyncFilter = async (arr, predicate) =>
266
- Promise.all(arr.map(predicate)).then((results) => arr.filter((_v, index) => results[index]));
267
-
268
- async function findCommonAncestorWithSortedEvents (events, children) {
269
- // const callTag = Math.random().toString(36).substring(7)
270
- // console.time(callTag + '.findCommonAncestor')
181
+ const asyncFilter = async (arr, predicate) => Promise.all(arr.map(predicate)).then((results) => arr.filter((_v, index) => results[index]));
182
+ async function findCommonAncestorWithSortedEvents(events, children) {
271
183
  const ancestor = await findCommonAncestor(events, children);
272
- // console.timeEnd(callTag + '.findCommonAncestor')
273
184
  if (!ancestor) {
274
- throw new Error('failed to find common ancestor event')
185
+ throw new Error("failed to find common ancestor event");
275
186
  }
276
- // console.time(callTag + '.findSortedEvents')
277
187
  const sorted = await findSortedEvents(events, children, ancestor);
278
- // console.timeEnd(callTag + '.findSortedEvents')
279
- return { ancestor, sorted }
188
+ return { ancestor, sorted };
280
189
  }
281
-
282
- /**
283
- * Find the common ancestor event of the passed children. A common ancestor is
284
- * the first single event in the DAG that _all_ paths from children lead to.
285
- *
286
- * @param {import('./clock').EventFetcher} events
287
- * @param {import('./clock').EventLink<EventData>[]} children
288
- */
289
- async function findCommonAncestor (events, children) {
290
- if (!children.length) return
190
+ async function findCommonAncestor(events, children) {
191
+ if (!children.length)
192
+ return;
291
193
  const candidates = children.map((c) => [c]);
292
194
  while (true) {
293
195
  let changed = false;
294
196
  for (const c of candidates) {
295
197
  const candidate = await findAncestorCandidate(events, c[c.length - 1]);
296
- if (!candidate) continue
198
+ if (!candidate)
199
+ continue;
297
200
  changed = true;
298
201
  c.push(candidate);
299
202
  const ancestor = findCommonString(candidates);
300
- if (ancestor) return ancestor
203
+ if (ancestor)
204
+ return ancestor;
301
205
  }
302
- if (!changed) return
206
+ if (!changed)
207
+ return;
303
208
  }
304
209
  }
305
-
306
- /**
307
- * @param {import('./clock').EventFetcher} events
308
- * @param {import('./clock').EventLink<EventData>} root
309
- */
310
- async function findAncestorCandidate (events, root) {
210
+ async function findAncestorCandidate(events, root) {
311
211
  const { value: event } = await events.get(root);
312
- if (!event.parents.length) return root
313
- return event.parents.length === 1 ? event.parents[0] : findCommonAncestor(events, event.parents)
212
+ if (!event.parents.length)
213
+ return root;
214
+ return event.parents.length === 1 ? event.parents[0] : findCommonAncestor(events, event.parents);
314
215
  }
315
-
316
- /**
317
- * @template {{ toString: () => string }} T
318
- * @param {Array<T[]>} arrays
319
- */
320
- function findCommonString (arrays) {
216
+ function findCommonString(arrays) {
321
217
  arrays = arrays.map((a) => [...a]);
322
218
  for (const arr of arrays) {
323
219
  for (const item of arr) {
324
220
  let matched = true;
325
221
  for (const other of arrays) {
326
- if (arr === other) continue
222
+ if (arr === other)
223
+ continue;
327
224
  matched = other.some((i) => String(i) === String(item));
328
- if (!matched) break
225
+ if (!matched)
226
+ break;
329
227
  }
330
- if (matched) return item
228
+ if (matched)
229
+ return item;
331
230
  }
332
231
  }
333
232
  }
334
-
335
- /**
336
- * Find and sort events between the head(s) and the tail.
337
- * @param {import('./clock').EventFetcher} events
338
- * @param {import('./clock').EventLink<EventData>[]} head
339
- * @param {import('./clock').EventLink<EventData>} tail
340
- */
341
- async function findSortedEvents (events, head, tail) {
342
- // const callTag = Math.random().toString(36).substring(7)
343
- // get weighted events - heavier events happened first
344
- /** @type {Map<string, { event: import('./clock').EventBlockView<EventData>, weight: number }>} */
345
- const weights = new Map();
233
+ async function findSortedEvents(events, head, tail) {
234
+ const weights = /* @__PURE__ */ new Map();
346
235
  const all = await Promise.all(head.map((h) => findEvents(events, h, tail)));
347
236
  for (const arr of all) {
348
237
  for (const { event, depth } of arr) {
349
- // console.log('event value', event.value.data.value)
350
238
  const info = weights.get(event.cid.toString());
351
239
  if (info) {
352
240
  info.weight += depth;
@@ -355,10 +243,7 @@ async function findSortedEvents (events, head, tail) {
355
243
  }
356
244
  }
357
245
  }
358
-
359
- // group events into buckets by weight
360
- /** @type {Map<number, import('./clock').EventBlockView<EventData>[]>} */
361
- const buckets = new Map();
246
+ const buckets = /* @__PURE__ */ new Map();
362
247
  for (const { event, weight } of weights.values()) {
363
248
  const bucket = buckets.get(weight);
364
249
  if (bucket) {
@@ -367,254 +252,168 @@ async function findSortedEvents (events, head, tail) {
367
252
  buckets.set(weight, [event]);
368
253
  }
369
254
  }
370
-
371
- // sort by weight, and by CID within weight
372
- const sorted = Array.from(buckets)
373
- .sort((a, b) => b[0] - a[0])
374
- .flatMap(([, es]) => es.sort((a, b) => (String(a.cid) < String(b.cid) ? -1 : 1)));
375
- // console.log('sorted', sorted.map(s => s.value.data.value))
376
-
377
- return sorted
255
+ const sorted = Array.from(buckets).sort((a, b) => b[0] - a[0]).flatMap(([, es]) => es.sort((a, b) => String(a.cid) < String(b.cid) ? -1 : 1));
256
+ return sorted;
378
257
  }
379
-
380
- /**
381
- * @param {import('./clock').EventFetcher} events
382
- * @param {import('./clock').EventLink<EventData>} start
383
- * @param {import('./clock').EventLink<EventData>} end
384
- * @returns {Promise<Array<{ event: import('./clock').EventBlockView<EventData>, depth: number }>>}
385
- */
386
- async function findEvents (events, start, end, depth = 0) {
387
- // console.log('findEvents', start)
258
+ async function findEvents(events, start, end, depth = 0) {
388
259
  const event = await events.get(start);
389
260
  const acc = [{ event, depth }];
390
261
  const { parents } = event.value;
391
- if (parents.length === 1 && String(parents[0]) === String(end)) return acc
262
+ if (parents.length === 1 && String(parents[0]) === String(end))
263
+ return acc;
392
264
  const rest = await Promise.all(parents.map((p) => findEvents(events, p, end, depth + 1)));
393
- return acc.concat(...rest)
265
+ return acc.concat(...rest);
394
266
  }
395
267
 
396
- // @ts-nocheck
397
-
398
268
  const createBlock = (bytes, cid) => Block.create({ cid, bytes, hasher: sha2.sha256, codec: codec__namespace });
399
-
400
- const encrypt = async function * ({ get, cids, hasher, key, cache, chunker, root }) {
401
- const set = new Set();
269
+ const encrypt = async function* ({ get, cids, hasher: hasher2, key, cache, chunker, root }) {
270
+ const set = /* @__PURE__ */ new Set();
402
271
  let eroot;
403
272
  for (const string of cids) {
404
273
  const cid = multiformats.CID.parse(string);
405
274
  const unencrypted = await get(cid);
406
- const block = await Block.encode({ ...await codec__namespace.encrypt({ ...unencrypted, key }), codec: codec__namespace, hasher });
407
- // console.log(`encrypting ${string} as ${block.cid}`)
408
- yield block;
409
- set.add(block.cid.toString());
410
- if (unencrypted.cid.equals(root)) eroot = block.cid;
411
- }
412
- if (!eroot) throw new Error('cids does not include root')
413
- const list = [...set].map(s => multiformats.CID.parse(s));
275
+ const block2 = await Block.encode({ ...await codec__namespace.encrypt({ ...unencrypted, key }), codec: codec__namespace, hasher: hasher2 });
276
+ yield block2;
277
+ set.add(block2.cid.toString());
278
+ if (unencrypted.cid.equals(root))
279
+ eroot = block2.cid;
280
+ }
281
+ if (!eroot)
282
+ throw new Error("cids does not include root");
283
+ const list = [...set].map((s) => multiformats.CID.parse(s));
414
284
  let last;
415
- for await (const node of cidSet.create({ list, get, cache, chunker, hasher, codec: dagcbor__namespace })) {
416
- const block = await node.block;
417
- yield block;
418
- last = block;
285
+ for await (const node of cidSet.create({ list, get, cache, chunker, hasher: hasher2, codec: dagcbor__namespace })) {
286
+ const block2 = await node.block;
287
+ yield block2;
288
+ last = block2;
419
289
  }
420
290
  const head = [eroot, last.cid];
421
- const block = await Block.encode({ value: head, codec: dagcbor__namespace, hasher });
291
+ const block = await Block.encode({ value: head, codec: dagcbor__namespace, hasher: hasher2 });
422
292
  yield block;
423
293
  };
424
-
425
- const decrypt = async function * ({ root, get, key, cache, chunker, hasher }) {
426
- const o = { ...await get(root), codec: dagcbor__namespace, hasher };
294
+ const decrypt = async function* ({ root, get, key, cache, chunker, hasher: hasher2 }) {
295
+ const o = { ...await get(root), codec: dagcbor__namespace, hasher: hasher2 };
427
296
  const decodedRoot = await Block.decode(o);
428
- // console.log('decodedRoot', decodedRoot)
429
297
  const { value: [eroot, tree] } = decodedRoot;
430
- const rootBlock = await get(eroot); // should I decrypt?
431
- const cidset = await cidSet.load({ cid: tree, get, cache, chunker, codec: codec__namespace, hasher });
298
+ const rootBlock = await get(eroot);
299
+ const cidset = await cidSet.load({ cid: tree, get, cache, chunker, codec: codec__namespace, hasher: hasher2 });
432
300
  const { result: nodes } = await cidset.getAllEntries();
433
301
  const unwrap = async (eblock) => {
434
- const { bytes, cid } = await codec__namespace.decrypt({ ...eblock, key }).catch(e => {
435
- console.log('ekey', e);
436
- throw new Error('bad key: ' + key.toString('hex'))
302
+ const { bytes, cid } = await codec__namespace.decrypt({ ...eblock, key }).catch((e) => {
303
+ console.log("ekey", e);
304
+ throw new Error("bad key: " + key.toString("hex"));
437
305
  });
438
306
  const block = await createBlock(bytes, cid);
439
- return block
307
+ return block;
440
308
  };
441
309
  const promises = [];
442
310
  for (const { cid } of nodes) {
443
- if (!rootBlock.cid.equals(cid)) promises.push(get(cid).then(unwrap));
311
+ if (!rootBlock.cid.equals(cid))
312
+ promises.push(get(cid).then(unwrap));
444
313
  }
445
- yield * promises;
314
+ yield* promises;
446
315
  yield unwrap(rootBlock);
447
316
  };
448
317
 
449
- // @ts-nocheck
450
- // from https://github.com/duzun/sync-sha1/blob/master/rawSha1.js
451
- // MIT License Copyright (c) 2020 Dumitru Uzun
452
- // Permission is hereby granted, free of charge, to any person obtaining a copy
453
- // of this software and associated documentation files (the "Software"), to deal
454
- // in the Software without restriction, including without limitation the rights
455
- // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
456
- // copies of the Software, and to permit persons to whom the Software is
457
- // furnished to do so, subject to the following conditions:
458
-
459
- // The above copyright notice and this permission notice shall be included in all
460
- // copies or substantial portions of the Software.
461
-
462
- // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
463
- // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
464
- // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
465
- // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
466
- // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
467
- // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
468
- // SOFTWARE.
469
-
470
- // import {
471
- // isLittleEndian, switchEndianness32
472
- // } from 'string-encode'
473
-
474
- /**
475
- * SHA1 on binary array
476
- *
477
- * @param {Uint8Array} b Data to hash
478
- *
479
- * @return {Uint8Array} sha1 hash
480
- */
481
- function rawSha1 (b) {
318
+ function rawSha1(b) {
482
319
  let i = b.byteLength;
483
320
  let bs = 0;
484
- let A; let B; let C; let D; let G;
485
- const H = Uint32Array.from([A = 0x67452301, B = 0xEFCDAB89, ~A, ~B, 0xC3D2E1F0]);
321
+ let A;
322
+ let B;
323
+ let C;
324
+ let D;
325
+ let G;
326
+ const H = Uint32Array.from([A = 1732584193, B = 4023233417, ~A, ~B, 3285377520]);
486
327
  const W = new Uint32Array(80);
487
- const nrWords = (i / 4 + 2) | 15;
328
+ const nrWords = i / 4 + 2 | 15;
488
329
  const words = new Uint32Array(nrWords + 1);
489
330
  let j;
490
-
491
331
  words[nrWords] = i * 8;
492
- words[i >> 2] |= 0x80 << (~i << 3);
493
- for (;i--;) {
332
+ words[i >> 2] |= 128 << (~i << 3);
333
+ for (; i--; ) {
494
334
  words[i >> 2] |= b[i] << (~i << 3);
495
335
  }
496
-
497
336
  for (A = H.slice(); bs < nrWords; bs += 16, A.set(H)) {
498
- for (i = 0; i < 80;
499
- A[0] = (
500
- G = ((b = A[0]) << 5 | b >>> 27) +
501
- A[4] +
502
- (W[i] = (i < 16) ? words[bs + i] : G << 1 | G >>> 31) +
503
- 0x5A827999,
504
- B = A[1],
505
- C = A[2],
506
- D = A[3],
507
- G + ((j = i / 5 >> 2) // eslint-disable-line no-cond-assign
508
- ? j !== 2
509
- ? (B ^ C ^ D) + (j & 2 ? 0x6FE0483D : 0x14577208)
510
- : (B & C | B & D | C & D) + 0x34994343
511
- : B & C | ~B & D
512
- )
513
- )
514
- , A[1] = b
515
- , A[2] = B << 30 | B >>> 2
516
- , A[3] = C
517
- , A[4] = D
518
- , ++i
519
- ) {
337
+ for (i = 0; i < 80; A[0] = (G = ((b = A[0]) << 5 | b >>> 27) + A[4] + (W[i] = i < 16 ? words[bs + i] : G << 1 | G >>> 31) + 1518500249, B = A[1], C = A[2], D = A[3], G + ((j = i / 5 >> 2) ? j !== 2 ? (B ^ C ^ D) + (j & 2 ? 1876969533 : 341275144) : (B & C | B & D | C & D) + 882459459 : B & C | ~B & D)), A[1] = b, A[2] = B << 30 | B >>> 2, A[3] = C, A[4] = D, ++i) {
520
338
  G = W[i - 3] ^ W[i - 8] ^ W[i - 14] ^ W[i - 16];
521
339
  }
522
-
523
- for (i = 5; i;) H[--i] = H[i] + A[i];
340
+ for (i = 5; i; )
341
+ H[--i] = H[i] + A[i];
524
342
  }
525
-
526
- // if (isLittleEndian()) {
527
- // H = H.map(switchEndianness32)
528
- // }
529
-
530
- return new Uint8Array(H.buffer, H.byteOffset, H.byteLength)
343
+ return new Uint8Array(H.buffer, H.byteOffset, H.byteLength);
531
344
  }
532
345
 
533
- // @ts-nocheck
534
346
  const chunker = utils.bf(3);
535
-
536
- const NO_ENCRYPT =
537
- typeof process !== 'undefined' ? process.env.NO_ENCRYPT : ({ url: (typeof document === 'undefined' ? require('u' + 'rl').pathToFileURL(__filename).href : (document.currentScript && document.currentScript.src || new URL('bundle.js', document.baseURI).href)) }) && undefined.VITE_NO_ENCRYPT;
538
-
347
+ const NO_ENCRYPT = process.env?.NO_ENCRYPT;
539
348
  class Valet {
540
- idb = null
541
- name = null
542
- uploadQueue = null
543
- alreadyEnqueued = new Set()
544
- keyMaterial = null
545
- keyId = 'null'
546
-
349
+ idb = null;
350
+ name = null;
351
+ uploadQueue = null;
352
+ alreadyEnqueued = /* @__PURE__ */ new Set();
353
+ keyMaterial = null;
354
+ keyId = "null";
547
355
  /**
548
356
  * Function installed by the database to upload car files
549
357
  * @type {null|function(string, Uint8Array):Promise<void>}
550
358
  */
551
- uploadFunction = null
552
-
553
- constructor (name = 'default', keyMaterial) {
359
+ uploadFunction = null;
360
+ constructor(name = "default", keyMaterial) {
554
361
  this.name = name;
555
362
  this.setKeyMaterial(keyMaterial);
556
363
  this.uploadQueue = cargoQueue(async (tasks, callback) => {
557
364
  console.log(
558
- 'queue worker',
365
+ "queue worker",
559
366
  tasks.length,
560
367
  tasks.reduce((acc, t) => acc + t.value.length, 0)
561
368
  );
562
369
  if (this.uploadFunction) {
563
- // todo we can coalesce these into a single car file
564
- return await this.withDB(async db => {
370
+ return await this.withDB(async (db) => {
565
371
  for (const task of tasks) {
566
372
  await this.uploadFunction(task.carCid, task.value);
567
- // update the indexedb to mark this car as no longer pending
568
- const carMeta = await db.get('cidToCar', task.carCid);
373
+ const carMeta = await db.get("cidToCar", task.carCid);
569
374
  delete carMeta.pending;
570
- await db.put('cidToCar', carMeta);
375
+ await db.put("cidToCar", carMeta);
571
376
  }
572
- })
377
+ });
573
378
  }
574
379
  callback();
575
380
  });
576
-
577
381
  this.uploadQueue.drain(async () => {
578
- return await this.withDB(async db => {
579
- const carKeys = (await db.getAllFromIndex('cidToCar', 'pending')).map(c => c.car);
382
+ return await this.withDB(async (db) => {
383
+ const carKeys = (await db.getAllFromIndex("cidToCar", "pending")).map((c) => c.car);
580
384
  for (const carKey of carKeys) {
581
- await this.uploadFunction(carKey, await db.get('cars', carKey));
582
- const carMeta = await db.get('cidToCar', carKey);
385
+ await this.uploadFunction(carKey, await db.get("cars", carKey));
386
+ const carMeta = await db.get("cidToCar", carKey);
583
387
  delete carMeta.pending;
584
- await db.put('cidToCar', carMeta);
388
+ await db.put("cidToCar", carMeta);
585
389
  }
586
- })
390
+ });
587
391
  });
588
392
  }
589
-
590
- getKeyMaterial () {
591
- return this.keyMaterial
393
+ getKeyMaterial() {
394
+ return this.keyMaterial;
592
395
  }
593
-
594
- setKeyMaterial (km) {
396
+ setKeyMaterial(km) {
595
397
  if (km && !NO_ENCRYPT) {
596
- const hex = Uint8Array.from(buffer.Buffer.from(km, 'hex'));
398
+ const hex = Uint8Array.from(buffer.Buffer.from(km, "hex"));
597
399
  this.keyMaterial = km;
598
400
  const hash = rawSha1(hex);
599
- this.keyId = buffer.Buffer.from(hash).toString('hex');
401
+ this.keyId = buffer.Buffer.from(hash).toString("hex");
600
402
  } else {
601
403
  this.keyMaterial = null;
602
- this.keyId = 'null';
404
+ this.keyId = "null";
603
405
  }
604
- // console.trace('keyId', this.name, this.keyId)
605
406
  }
606
-
607
407
  /**
608
408
  * Group the blocks into a car and write it to the valet.
609
- * @param {InnerBlockstore} innerBlockstore
409
+ * @param {import('./blockstore.js').InnerBlockstore} innerBlockstore
610
410
  * @param {Set<string>} cids
611
411
  * @returns {Promise<void>}
612
412
  * @memberof Valet
613
413
  */
614
- async writeTransaction (innerBlockstore, cids) {
414
+ async writeTransaction(innerBlockstore, cids) {
615
415
  if (innerBlockstore.lastCid) {
616
416
  if (this.keyMaterial) {
617
- // console.log('encrypting car', innerBlockstore.label)
618
417
  const newCar = await blocksToEncryptedCarBlock(innerBlockstore.lastCid, innerBlockstore, this.keyMaterial);
619
418
  await this.parkCar(newCar.cid.toString(), newCar.bytes, cids);
620
419
  } else {
@@ -623,71 +422,61 @@ class Valet {
623
422
  }
624
423
  }
625
424
  }
626
-
627
- withDB = async dbWorkFun => {
425
+ withDB = async (dbWorkFun) => {
628
426
  if (!this.idb) {
629
427
  this.idb = await idb.openDB(`fp.${this.keyId}.${this.name}.valet`, 2, {
630
- upgrade (db, oldVersion, newVersion, transaction) {
428
+ upgrade(db, oldVersion, newVersion, transaction) {
631
429
  if (oldVersion < 1) {
632
- db.createObjectStore('cars'); // todo use database name
633
- const cidToCar = db.createObjectStore('cidToCar', { keyPath: 'car' });
634
- cidToCar.createIndex('cids', 'cids', { multiEntry: true });
430
+ db.createObjectStore("cars");
431
+ const cidToCar = db.createObjectStore("cidToCar", { keyPath: "car" });
432
+ cidToCar.createIndex("cids", "cids", { multiEntry: true });
635
433
  }
636
434
  if (oldVersion < 2) {
637
- const cidToCar = transaction.objectStore('cidToCar');
638
- cidToCar.createIndex('pending', 'pending');
435
+ const cidToCar = transaction.objectStore("cidToCar");
436
+ cidToCar.createIndex("pending", "pending");
639
437
  }
640
438
  }
641
439
  });
642
440
  }
643
- return await dbWorkFun(this.idb)
644
- }
645
-
441
+ return await dbWorkFun(this.idb);
442
+ };
646
443
  /**
647
444
  *
648
445
  * @param {string} carCid
649
446
  * @param {*} value
650
447
  */
651
- async parkCar (carCid, value, cids) {
652
- await this.withDB(async db => {
653
- const tx = db.transaction(['cars', 'cidToCar'], 'readwrite');
654
- await tx.objectStore('cars').put(value, carCid);
655
- await tx.objectStore('cidToCar').put({ pending: 'y', car: carCid, cids: Array.from(cids) });
656
- return await tx.done
448
+ async parkCar(carCid, value, cids) {
449
+ await this.withDB(async (db) => {
450
+ const tx = db.transaction(["cars", "cidToCar"], "readwrite");
451
+ await tx.objectStore("cars").put(value, carCid);
452
+ await tx.objectStore("cidToCar").put({ pending: "y", car: carCid, cids: Array.from(cids) });
453
+ return await tx.done;
657
454
  });
658
-
659
- // upload to web3.storage if we have credentials
660
455
  if (this.uploadFunction) {
661
456
  if (this.alreadyEnqueued.has(carCid)) {
662
- // console.log('already enqueued', carCid)
663
- return
457
+ return;
664
458
  }
665
- // don't await this, it will be done in the queue
666
- // console.log('add to queue', carCid, value.length)
667
459
  this.uploadQueue.push({ carCid, value });
668
460
  this.alreadyEnqueued.add(carCid);
669
461
  }
670
462
  }
671
-
672
- remoteBlockFunction = null
673
-
674
- async getBlock (dataCID) {
675
- return await this.withDB(async db => {
676
- const tx = db.transaction(['cars', 'cidToCar'], 'readonly');
677
- const indexResp = await tx.objectStore('cidToCar').index('cids').get(dataCID);
463
+ remoteBlockFunction = null;
464
+ async getBlock(dataCID) {
465
+ return await this.withDB(async (db) => {
466
+ const tx = db.transaction(["cars", "cidToCar"], "readonly");
467
+ const indexResp = await tx.objectStore("cidToCar").index("cids").get(dataCID);
678
468
  const carCid = indexResp?.car;
679
469
  if (!carCid) {
680
- throw new Error('Missing block: ' + dataCID)
470
+ throw new Error("Missing block: " + dataCID);
681
471
  }
682
- const carBytes = await tx.objectStore('cars').get(carCid);
472
+ const carBytes = await tx.objectStore("cars").get(carCid);
683
473
  const reader = await car.CarReader.fromBytes(carBytes);
684
474
  if (this.keyMaterial) {
685
475
  const roots = await reader.getRoots();
686
- const readerGetWithCodec = async cid => {
476
+ const readerGetWithCodec = async (cid) => {
687
477
  const got = await reader.get(cid);
688
- // console.log('got.', cid.toString())
689
478
  let useCodec = codec__namespace;
690
- if (cid.toString().indexOf('bafy') === 0) {
479
+ if (cid.toString().indexOf("bafy") === 0) {
691
480
  useCodec = dagcbor__namespace;
692
481
  }
693
482
  const decoded = await Block__namespace.decode({
@@ -695,24 +484,22 @@ class Valet {
695
484
  codec: useCodec,
696
485
  hasher: sha2.sha256
697
486
  });
698
- // console.log('decoded', decoded.value)
699
- return decoded
487
+ return decoded;
700
488
  };
701
489
  const { blocks } = await blocksFromEncryptedCarBlock(roots[0], readerGetWithCodec, this.keyMaterial);
702
- const block = blocks.find(b => b.cid.toString() === dataCID);
490
+ const block = blocks.find((b) => b.cid.toString() === dataCID);
703
491
  if (block) {
704
- return block.bytes
492
+ return block.bytes;
705
493
  }
706
494
  } else {
707
495
  const gotBlock = await reader.get(cid.CID.parse(dataCID));
708
496
  if (gotBlock) {
709
- return gotBlock.bytes
497
+ return gotBlock.bytes;
710
498
  }
711
499
  }
712
- })
500
+ });
713
501
  }
714
502
  }
715
-
716
503
  const blocksToCarBlock = async (lastCid, blocks) => {
717
504
  let size = 0;
718
505
  const headerSize = CBW__namespace.headerLength({ roots: [lastCid] });
@@ -725,28 +512,25 @@ const blocksToCarBlock = async (lastCid, blocks) => {
725
512
  }
726
513
  const buffer = new Uint8Array(size);
727
514
  const writer = await CBW__namespace.createWriter(buffer, { headerSize });
728
-
729
515
  writer.addRoot(lastCid);
730
-
731
516
  for (const { cid, bytes } of blocks) {
732
517
  writer.write({ cid, bytes });
733
518
  }
734
519
  await writer.close();
735
- return await Block__namespace.encode({ value: writer.bytes, hasher: sha2.sha256, codec: raw__namespace })
520
+ return await Block__namespace.encode({ value: writer.bytes, hasher: sha2.sha256, codec: raw__namespace });
736
521
  };
737
-
738
522
  const blocksToEncryptedCarBlock = async (innerBlockStoreClockRootCid, blocks, keyMaterial) => {
739
- const encryptionKey = buffer.Buffer.from(keyMaterial, 'hex');
523
+ const encryptionKey = buffer.Buffer.from(keyMaterial, "hex");
740
524
  const encryptedBlocks = [];
741
525
  const theCids = [];
742
526
  for (const { cid } of blocks.entries()) {
743
527
  theCids.push(cid.toString());
744
528
  }
745
-
746
529
  let last;
747
530
  for await (const block of encrypt({
748
531
  cids: theCids,
749
- get: async cid => blocks.get(cid), // maybe we can just use blocks.get
532
+ get: async (cid) => blocks.get(cid),
533
+ // maybe we can just use blocks.get
750
534
  key: encryptionKey,
751
535
  hasher: sha2.sha256,
752
536
  chunker,
@@ -757,21 +541,17 @@ const blocksToEncryptedCarBlock = async (innerBlockStoreClockRootCid, blocks, ke
757
541
  encryptedBlocks.push(block);
758
542
  last = block;
759
543
  }
760
- // console.log('last', last.cid.toString(), 'for clock', innerBlockStoreClockRootCid.toString())
761
544
  const encryptedCar = await blocksToCarBlock(last.cid, encryptedBlocks);
762
- return encryptedCar
545
+ return encryptedCar;
763
546
  };
764
- // { root, get, key, cache, chunker, hasher }
765
-
766
- const memoizeDecryptedCarBlocks = new Map();
547
+ const memoizeDecryptedCarBlocks = /* @__PURE__ */ new Map();
767
548
  const blocksFromEncryptedCarBlock = async (cid, get, keyMaterial) => {
768
549
  if (memoizeDecryptedCarBlocks.has(cid.toString())) {
769
- return memoizeDecryptedCarBlocks.get(cid.toString())
550
+ return memoizeDecryptedCarBlocks.get(cid.toString());
770
551
  } else {
771
552
  const blocksPromise = (async () => {
772
- const decryptionKey = buffer.Buffer.from(keyMaterial, 'hex');
773
- // console.log('decrypting', keyMaterial, cid.toString())
774
- const cids = new Set();
553
+ const decryptionKey = buffer.Buffer.from(keyMaterial, "hex");
554
+ const cids = /* @__PURE__ */ new Set();
775
555
  const decryptedBlocks = [];
776
556
  for await (const block of decrypt({
777
557
  root: cid,
@@ -785,18 +565,14 @@ const blocksFromEncryptedCarBlock = async (cid, get, keyMaterial) => {
785
565
  decryptedBlocks.push(block);
786
566
  cids.add(block.cid.toString());
787
567
  }
788
- return { blocks: decryptedBlocks, cids }
568
+ return { blocks: decryptedBlocks, cids };
789
569
  })();
790
570
  memoizeDecryptedCarBlocks.set(cid.toString(), blocksPromise);
791
- return blocksPromise
571
+ return blocksPromise;
792
572
  }
793
573
  };
794
574
 
795
- // @ts-nocheck
796
-
797
- // const sleep = ms => new Promise(r => setTimeout(r, ms))
798
-
799
- const husherMap = new Map();
575
+ const husherMap = /* @__PURE__ */ new Map();
800
576
  const husher = (id, workFn) => {
801
577
  if (!husherMap.has(id)) {
802
578
  husherMap.set(
@@ -804,90 +580,66 @@ const husher = (id, workFn) => {
804
580
  workFn().finally(() => setTimeout(() => husherMap.delete(id), 100))
805
581
  );
806
582
  }
807
- return husherMap.get(id)
583
+ return husherMap.get(id);
808
584
  };
809
-
810
- /**
811
- * @typedef {Object} AnyBlock
812
- * @property {import('./link').AnyLink} cid - The CID of the block
813
- * @property {Uint8Array} bytes - The block's data
814
- *
815
- * @typedef {Object} Blockstore
816
- * @property {function(import('./link').AnyLink): Promise<AnyBlock|undefined>} get - A function to retrieve a block by CID
817
- * @property {function(import('./link').AnyLink, Uint8Array): Promise<void>} put - A function to store a block's data and CID
818
- *
819
- * A blockstore that caches writes to a transaction and only persists them when committed.
820
- * @implements {Blockstore}
821
- */
822
585
  class TransactionBlockstore {
823
586
  /** @type {Map<string, Uint8Array>} */
824
- committedBlocks = new Map()
825
-
826
- valet = null
827
-
828
- instanceId = 'blkz.' + Math.random().toString(36).substring(2, 4)
829
- inflightTransactions = new Set()
830
-
831
- constructor (name, encryptionKey) {
587
+ committedBlocks = /* @__PURE__ */ new Map();
588
+ valet = null;
589
+ instanceId = "blkz." + Math.random().toString(36).substring(2, 4);
590
+ inflightTransactions = /* @__PURE__ */ new Set();
591
+ constructor(name, encryptionKey) {
832
592
  this.valet = new Valet(name, encryptionKey);
833
593
  }
834
-
835
594
  /**
836
595
  * Get a block from the store.
837
596
  *
838
597
  * @param {import('./link').AnyLink} cid
839
598
  * @returns {Promise<AnyBlock | undefined>}
840
599
  */
841
- async get (cid) {
600
+ async get(cid) {
842
601
  const key = cid.toString();
843
- // it is safe to read from the in-flight transactions becauase they are immutable
844
- const bytes = await Promise.any([this.transactionsGet(key), this.committedGet(key)]).catch(e => {
845
- // console.log('networkGet', cid.toString(), e)
846
- return this.networkGet(key)
602
+ const bytes = await Promise.any([this.transactionsGet(key), this.committedGet(key)]).catch((e) => {
603
+ return this.networkGet(key);
847
604
  });
848
- if (!bytes) throw new Error('Missing block: ' + key)
849
- return { cid, bytes }
605
+ if (!bytes)
606
+ throw new Error("Missing block: " + key);
607
+ return { cid, bytes };
850
608
  }
851
-
852
609
  // this iterates over the in-flight transactions
853
610
  // and returns the first matching block it finds
854
- async transactionsGet (key) {
611
+ async transactionsGet(key) {
855
612
  for (const transaction of this.inflightTransactions) {
856
613
  const got = await transaction.get(key);
857
- if (got && got.bytes) return got.bytes
614
+ if (got && got.bytes)
615
+ return got.bytes;
858
616
  }
859
- throw new Error('Missing block: ' + key)
617
+ throw new Error("Missing block: " + key);
860
618
  }
861
-
862
- async committedGet (key) {
619
+ async committedGet(key) {
863
620
  const old = this.committedBlocks.get(key);
864
- if (old) return old
621
+ if (old)
622
+ return old;
865
623
  const got = await this.valet.getBlock(key);
866
- // console.log('committedGet: ' + key)
867
624
  this.committedBlocks.set(key, got);
868
- return got
625
+ return got;
869
626
  }
870
-
871
- async clearCommittedCache () {
627
+ async clearCommittedCache() {
872
628
  this.committedBlocks.clear();
873
629
  }
874
-
875
- async networkGet (key) {
630
+ async networkGet(key) {
876
631
  if (this.valet.remoteBlockFunction) {
877
- // todo why is this on valet?
878
632
  const value = await husher(key, async () => await this.valet.remoteBlockFunction(key));
879
633
  if (value) {
880
- // console.log('networkGot: ' + key, value.length)
881
- doTransaction('networkGot: ' + key, this, async innerBlockstore => {
634
+ doTransaction("networkGot: " + key, this, async (innerBlockstore) => {
882
635
  await innerBlockstore.put(multiformats.CID.parse(key), value);
883
636
  });
884
- return value
637
+ return value;
885
638
  }
886
639
  } else {
887
- return false
640
+ return false;
888
641
  }
889
642
  }
890
-
891
643
  /**
892
644
  * Add a block to the store. Usually bound to a transaction by a closure.
893
645
  * It sets the lastCid property to the CID of the block that was put.
@@ -898,10 +650,9 @@ class TransactionBlockstore {
898
650
  * @param {import('./link').AnyLink} cid
899
651
  * @param {Uint8Array} bytes
900
652
  */
901
- put (cid, bytes) {
902
- throw new Error('use a transaction to put')
653
+ put(cid, bytes) {
654
+ throw new Error("use a transaction to put");
903
655
  }
904
-
905
656
  /**
906
657
  * Iterate over all blocks in the store.
907
658
  *
@@ -917,160 +668,126 @@ class TransactionBlockstore {
917
668
  // yield { cid: parse(str), bytes }
918
669
  // }
919
670
  // }
920
-
921
671
  /**
922
672
  * Begin a transaction. Ensures the uncommited blocks are empty at the begining.
923
673
  * Returns the blocks to read and write during the transaction.
924
674
  * @returns {InnerBlockstore}
925
675
  * @memberof TransactionBlockstore
926
676
  */
927
- begin (label = '') {
677
+ begin(label = "") {
928
678
  const innerTransactionBlockstore = new InnerBlockstore(label, this);
929
679
  this.inflightTransactions.add(innerTransactionBlockstore);
930
- return innerTransactionBlockstore
680
+ return innerTransactionBlockstore;
931
681
  }
932
-
933
682
  /**
934
683
  * Commit the transaction. Writes the blocks to the store.
935
684
  * @returns {Promise<void>}
936
685
  * @memberof TransactionBlockstore
937
686
  */
938
- async commit (innerBlockstore) {
687
+ async commit(innerBlockstore) {
939
688
  await this.doCommit(innerBlockstore);
940
689
  }
941
-
942
690
  // first get the transaction blockstore from the map of transaction blockstores
943
691
  // then copy it to committedBlocks
944
692
  // then write the transaction blockstore to a car
945
693
  // then write the car to the valet
946
694
  // then remove the transaction blockstore from the map of transaction blockstores
947
- doCommit = async innerBlockstore => {
948
- const cids = new Set();
695
+ doCommit = async (innerBlockstore) => {
696
+ const cids = /* @__PURE__ */ new Set();
949
697
  for (const { cid, bytes } of innerBlockstore.entries()) {
950
- const stringCid = cid.toString(); // unnecessary string conversion, can we fix upstream?
698
+ const stringCid = cid.toString();
951
699
  if (this.committedBlocks.has(stringCid)) ; else {
952
700
  this.committedBlocks.set(stringCid, bytes);
953
701
  cids.add(stringCid);
954
702
  }
955
703
  }
956
704
  if (cids.size > 0) {
957
- // console.log(innerBlockstore.label, 'committing', cids.size, 'blocks')
958
705
  await this.valet.writeTransaction(innerBlockstore, cids);
959
706
  }
960
- }
961
-
707
+ };
962
708
  /**
963
709
  * Retire the transaction. Clears the uncommited blocks.
964
710
  * @returns {void}
965
711
  * @memberof TransactionBlockstore
966
712
  */
967
- retire (innerBlockstore) {
713
+ retire(innerBlockstore) {
968
714
  this.inflightTransactions.delete(innerBlockstore);
969
715
  }
970
716
  }
971
-
972
- /**
973
- * Runs a function on an inner blockstore, then persists the change to a car writer
974
- * or other outer blockstore.
975
- * @param {string} label
976
- * @param {TransactionBlockstore} blockstore
977
- * @param {(innerBlockstore: Blockstore) => Promise<any>} doFun
978
- * @returns {Promise<any>}
979
- * @memberof TransactionBlockstore
980
- */
981
717
  const doTransaction = async (label, blockstore, doFun) => {
982
- if (!blockstore.commit) return await doFun(blockstore)
718
+ if (!blockstore.commit)
719
+ return await doFun(blockstore);
983
720
  const innerBlockstore = blockstore.begin(label);
984
721
  try {
985
722
  const result = await doFun(innerBlockstore);
986
723
  await blockstore.commit(innerBlockstore);
987
- return result
724
+ return result;
988
725
  } catch (e) {
989
726
  console.error(`Transaction ${label} failed`, e, e.stack);
990
- throw e
727
+ throw e;
991
728
  } finally {
992
729
  blockstore.retire(innerBlockstore);
993
730
  }
994
731
  };
995
-
996
- /** @implements {BlockFetcher} */
997
732
  class InnerBlockstore {
998
733
  /** @type {Map<string, Uint8Array>} */
999
- blocks = new Map()
1000
- lastCid = null
1001
- label = ''
1002
- parentBlockstore = null
1003
-
1004
- constructor (label, parentBlockstore) {
734
+ blocks = /* @__PURE__ */ new Map();
735
+ lastCid = null;
736
+ label = "";
737
+ parentBlockstore = null;
738
+ constructor(label, parentBlockstore) {
1005
739
  this.label = label;
1006
740
  this.parentBlockstore = parentBlockstore;
1007
741
  }
1008
-
1009
742
  /**
1010
743
  * @param {import('./link').AnyLink} cid
1011
744
  * @returns {Promise<AnyBlock | undefined>}
1012
745
  */
1013
- async get (cid) {
746
+ async get(cid) {
1014
747
  const key = cid.toString();
1015
748
  let bytes = this.blocks.get(key);
1016
749
  if (bytes) {
1017
- return { cid, bytes }
750
+ return { cid, bytes };
1018
751
  }
1019
752
  bytes = await this.parentBlockstore.committedGet(key);
1020
753
  if (bytes) {
1021
- return { cid, bytes }
754
+ return { cid, bytes };
1022
755
  }
1023
756
  }
1024
-
1025
757
  /**
1026
758
  * @param {import('./link').AnyLink} cid
1027
759
  * @param {Uint8Array} bytes
1028
760
  */
1029
- put (cid, bytes) {
1030
- // console.log('put', cid)
761
+ async put(cid, bytes) {
1031
762
  this.blocks.set(cid.toString(), bytes);
1032
763
  this.lastCid = cid;
1033
764
  }
1034
-
1035
- * entries () {
765
+ *entries() {
1036
766
  for (const [str, bytes] of this.blocks) {
1037
767
  yield { cid: link.parse(str), bytes };
1038
768
  }
1039
769
  }
1040
770
  }
1041
771
 
1042
- // @ts-nocheck
1043
772
  const blockOpts = { cache: cache.nocache, chunker: utils.bf(3), codec: dagcbor__namespace, hasher: sha2.sha256, compare: utils.simpleCompare };
1044
-
1045
773
  const withLog = async (label, fn) => {
1046
774
  const resp = await fn();
1047
- // console.log('withLog', label, !!resp)
1048
- return resp
775
+ return resp;
1049
776
  };
1050
-
1051
- // should also return a CIDCounter
1052
777
  const makeGetBlock = (blocks) => {
1053
- // const cids = new CIDCounter() // this could be used for proofs of mutations
1054
778
  const getBlockFn = async (address) => {
1055
779
  const { cid, bytes } = await withLog(address, () => blocks.get(address));
1056
- // cids.add({ address: cid })
1057
- return Block.create({ cid, bytes, hasher: sha2.sha256, codec: dagcbor__namespace })
780
+ return Block.create({ cid, bytes, hasher: sha2.sha256, codec: dagcbor__namespace });
1058
781
  };
1059
782
  return {
1060
783
  // cids,
1061
784
  getBlock: getBlockFn
1062
- }
785
+ };
1063
786
  };
1064
-
1065
- /**
1066
- *
1067
- * @param {*} param0
1068
- * @returns
1069
- */
1070
- async function createAndSaveNewEvent ({
787
+ async function createAndSaveNewEvent({
1071
788
  inBlocks,
1072
789
  bigPut,
1073
- root,
790
+ root: root2,
1074
791
  event: inEvent,
1075
792
  head,
1076
793
  additions,
@@ -1079,158 +796,113 @@ async function createAndSaveNewEvent ({
1079
796
  let cids;
1080
797
  const { key, value, del } = inEvent;
1081
798
  const data = {
1082
- root: (root
1083
- ? {
1084
- cid: root.cid,
1085
- bytes: root.bytes, // can we remove this?
1086
- value: root.value // can we remove this?
1087
- }
1088
- : null),
799
+ root: root2 ? {
800
+ cid: root2.cid,
801
+ bytes: root2.bytes,
802
+ // can we remove this?
803
+ value: root2.value
804
+ // can we remove this?
805
+ } : null,
1089
806
  key
1090
807
  };
1091
-
1092
808
  if (del) {
1093
809
  data.value = null;
1094
- data.type = 'del';
810
+ data.type = "del";
1095
811
  } else {
1096
812
  data.value = value;
1097
- data.type = 'put';
813
+ data.type = "put";
1098
814
  }
1099
- /** @type {EventData} */
1100
-
1101
815
  const event = await EventBlock.create(data, head);
1102
- bigPut(event)
1103
- ;({ head, cids } = await advance(inBlocks, head, event.cid));
1104
-
816
+ bigPut(event);
817
+ ({ head, cids } = await advance(inBlocks, head, event.cid));
1105
818
  return {
1106
- root,
819
+ root: root2,
1107
820
  additions,
1108
821
  removals,
1109
822
  head,
1110
823
  clockCIDs: cids,
1111
824
  event
1112
- }
825
+ };
1113
826
  }
1114
-
1115
827
  const makeGetAndPutBlock = (inBlocks) => {
1116
- // const mblocks = new MemoryBlockstore()
1117
- // const blocks = new MultiBlockFetcher(mblocks, inBlocks)
1118
828
  const { getBlock, cids } = makeGetBlock(inBlocks);
1119
- const put = inBlocks.put.bind(inBlocks);
829
+ const put2 = inBlocks.put.bind(inBlocks);
1120
830
  const bigPut = async (block, additions) => {
1121
- // console.log('bigPut', block.cid.toString())
1122
831
  const { cid, bytes } = block;
1123
- put(cid, bytes);
1124
- // mblocks.putSync(cid, bytes)
832
+ put2(cid, bytes);
1125
833
  if (additions) {
1126
834
  additions.set(cid.toString(), block);
1127
835
  }
1128
836
  };
1129
- return { getBlock, bigPut, blocks: inBlocks, cids }
837
+ return { getBlock, bigPut, blocks: inBlocks, cids };
1130
838
  };
1131
-
1132
839
  const bulkFromEvents = (sorted, event) => {
1133
840
  if (event) {
1134
841
  const update = { value: { data: { key: event.key } } };
1135
842
  if (event.del) {
1136
- update.value.data.type = 'del';
843
+ update.value.data.type = "del";
1137
844
  } else {
1138
- update.value.data.type = 'put';
845
+ update.value.data.type = "put";
1139
846
  update.value.data.value = event.value;
1140
847
  }
1141
848
  sorted.push(update);
1142
849
  }
1143
- const bulk = new Map();
1144
- for (const { value: event } of sorted) {
850
+ const bulk = /* @__PURE__ */ new Map();
851
+ for (const { value: event2 } of sorted) {
1145
852
  const {
1146
853
  data: { type, value, key }
1147
- } = event;
1148
- const bulkEvent = type === 'put' ? { key, value } : { key, del: true };
1149
- bulk.set(bulkEvent.key, bulkEvent); // last wins
854
+ } = event2;
855
+ const bulkEvent = type === "put" ? { key, value } : { key, del: true };
856
+ bulk.set(bulkEvent.key, bulkEvent);
1150
857
  }
1151
- return Array.from(bulk.values())
858
+ return Array.from(bulk.values());
1152
859
  };
1153
-
1154
- // Get the value of the root from the ancestor event
1155
- /**
1156
- *
1157
- * @param {EventFetcher} events
1158
- * @param {Link} ancestor
1159
- * @param {*} getBlock
1160
- * @returns
1161
- */
1162
860
  const prollyRootFromAncestor = async (events, ancestor, getBlock) => {
1163
- // console.log('prollyRootFromAncestor', ancestor)
1164
861
  const event = await events.get(ancestor);
1165
- const { root } = event.value.data;
1166
- // console.log('prollyRootFromAncestor', root.cid, JSON.stringify(root.value))
1167
- if (root) {
1168
- return map.load({ cid: root.cid, get: getBlock, ...blockOpts })
862
+ const { root: root2 } = event.value.data;
863
+ if (root2) {
864
+ return map.load({ cid: root2.cid, get: getBlock, ...blockOpts });
1169
865
  } else {
1170
- return null
866
+ return null;
1171
867
  }
1172
868
  };
1173
-
1174
869
  const doProllyBulk = async (inBlocks, head, event) => {
1175
870
  const { getBlock, blocks } = makeGetAndPutBlock(inBlocks);
1176
871
  let bulkSorted = [];
1177
872
  let prollyRootNode = null;
1178
873
  if (head.length) {
1179
- // Otherwise, we find the common ancestor and update the root and other blocks
1180
874
  const events = new EventFetcher(blocks);
1181
- // todo this is returning more events than necessary, lets define the desired semantics from the top down
1182
- // good semantics mean we can cache the results of this call
1183
875
  const { ancestor, sorted } = await findCommonAncestorWithSortedEvents(events, head);
1184
876
  bulkSorted = sorted;
1185
- // console.log('sorted', JSON.stringify(sorted.map(({ value: { data: { key, value } } }) => ({ key, value }))))
1186
877
  prollyRootNode = await prollyRootFromAncestor(events, ancestor, getBlock);
1187
- // console.log('event', event)
1188
878
  }
1189
-
1190
879
  const bulkOperations = bulkFromEvents(bulkSorted, event);
1191
-
1192
- // if prolly root node is null, we need to create a new one
1193
880
  if (!prollyRootNode) {
1194
- let root;
881
+ let root2;
1195
882
  const newBlocks = [];
1196
- // if all operations are deletes, we can just return an empty root
1197
883
  if (bulkOperations.every((op) => op.del)) {
1198
- return { root: null, blocks: [] }
884
+ return { root: null, blocks: [] };
1199
885
  }
1200
886
  for await (const node of map.create({ get: getBlock, list: bulkOperations, ...blockOpts })) {
1201
- root = await node.block;
1202
- newBlocks.push(root);
887
+ root2 = await node.block;
888
+ newBlocks.push(root2);
1203
889
  }
1204
- return { root, blocks: newBlocks }
890
+ return { root: root2, blocks: newBlocks };
1205
891
  } else {
1206
- return await prollyRootNode.bulk(bulkOperations) // { root: newProllyRootNode, blocks: newBlocks }
892
+ return await prollyRootNode.bulk(bulkOperations);
1207
893
  }
1208
894
  };
1209
-
1210
- /**
1211
- * Put a value (a CID) for the given key. If the key exists it's value is overwritten.
1212
- *
1213
- * @param {import('./blockstore.js').BlockFetcher} blocks Bucket block storage.
1214
- * @param {import('./clock').EventLink<EventData>[]} head Merkle clock head.
1215
- * @param {string} key The key of the value to put.
1216
- * @param {CID} value The value to put.
1217
- * @param {object} [options]
1218
- * @returns {Promise<Result>}
1219
- */
1220
- async function put (inBlocks, head, event, options) {
895
+ async function put(inBlocks, head, event, options) {
1221
896
  const { bigPut } = makeGetAndPutBlock(inBlocks);
1222
-
1223
- // If the head is empty, we create a new event and return the root and addition blocks
1224
897
  if (!head.length) {
1225
- const additions = new Map();
1226
- const { root, blocks } = await doProllyBulk(inBlocks, head, event);
898
+ const additions = /* @__PURE__ */ new Map();
899
+ const { root: root2, blocks } = await doProllyBulk(inBlocks, head, event);
1227
900
  for (const b of blocks) {
1228
901
  bigPut(b, additions);
1229
902
  }
1230
- return createAndSaveNewEvent({ inBlocks, bigPut, root, event, head, additions: Array.from(additions.values()) })
903
+ return createAndSaveNewEvent({ inBlocks, bigPut, root: root2, event, head, additions: Array.from(additions.values()) });
1231
904
  }
1232
905
  const { root: newProllyRootNode, blocks: newBlocks } = await doProllyBulk(inBlocks, head, event);
1233
-
1234
906
  if (!newProllyRootNode) {
1235
907
  return createAndSaveNewEvent({
1236
908
  inBlocks,
@@ -1239,106 +911,71 @@ async function put (inBlocks, head, event, options) {
1239
911
  event,
1240
912
  head,
1241
913
  additions: []
1242
- })
914
+ });
1243
915
  } else {
1244
916
  const prollyRootBlock = await newProllyRootNode.block;
1245
- const additions = new Map(); // ; const removals = new Map()
917
+ const additions = /* @__PURE__ */ new Map();
1246
918
  bigPut(prollyRootBlock, additions);
1247
919
  for (const nb of newBlocks) {
1248
920
  bigPut(nb, additions);
1249
921
  }
1250
- // additions are new blocks
1251
922
  return createAndSaveNewEvent({
1252
923
  inBlocks,
1253
924
  bigPut,
1254
925
  root: prollyRootBlock,
1255
926
  event,
1256
927
  head,
1257
- additions: Array.from(additions.values()) /*, todo? Array.from(removals.values()) */
1258
- })
928
+ additions: Array.from(additions.values())
929
+ /*, todo? Array.from(removals.values()) */
930
+ });
1259
931
  }
1260
932
  }
1261
-
1262
- /**
1263
- * Determine the effective prolly root given the current merkle clock head.
1264
- *
1265
- * @param {import('./blockstore.js').BlockFetcher} blocks Bucket block storage.
1266
- * @param {import('./clock').EventLink<EventData>[]} head Merkle clock head.
1267
- */
1268
- async function root (inBlocks, head) {
933
+ async function root(inBlocks, head) {
1269
934
  if (!head.length) {
1270
- throw new Error('no head')
935
+ throw new Error("no head");
1271
936
  }
1272
937
  const { root: newProllyRootNode, blocks: newBlocks, cids } = await doProllyBulk(inBlocks, head);
1273
- // todo maybe these should go to a temp blockstore?
1274
- await doTransaction('root', inBlocks, async (transactionBlockstore) => {
938
+ await doTransaction("root", inBlocks, async (transactionBlockstore) => {
1275
939
  const { bigPut } = makeGetAndPutBlock(transactionBlockstore);
1276
940
  for (const nb of newBlocks) {
1277
941
  bigPut(nb);
1278
942
  }
1279
943
  });
1280
- return { cids, node: newProllyRootNode }
944
+ return { cids, node: newProllyRootNode };
1281
945
  }
1282
-
1283
- /**
1284
- * Get the list of events not known by the `since` event
1285
- * @param {import('./blockstore.js').BlockFetcher} blocks Bucket block storage.
1286
- * @param {import('./clock').EventLink<EventData>[]} head Merkle clock head.
1287
- * @param {import('./clock').EventLink<EventData>} since Event to compare against.
1288
- * @returns {Promise<{clockCIDs: CIDCounter, result: EventData[]}>}
1289
- */
1290
- async function eventsSince (blocks, head, since) {
946
+ async function eventsSince(blocks, head, since) {
1291
947
  if (!head.length) {
1292
- throw new Error('no head')
948
+ throw new Error("no head");
1293
949
  }
1294
- const sinceHead = [...since, ...head]; // ?
950
+ const sinceHead = [...since, ...head];
1295
951
  const { cids, events: unknownSorted3 } = await findEventsToSync(blocks, sinceHead);
1296
- return { clockCIDs: cids, result: unknownSorted3.map(({ value: { data } }) => data) }
952
+ return { clockCIDs: cids, result: unknownSorted3.map(({ value: { data } }) => data) };
1297
953
  }
1298
-
1299
- /**
1300
- *
1301
- * @param {import('./blockstore.js').BlockFetcher} blocks Bucket block storage.
1302
- * @param {import('./clock').EventLink<EventData>[]} head Merkle clock head.
1303
- *
1304
- * @returns {Promise<{clockCIDs: CIDCounter, result: EventData[]}>}
1305
- *
1306
- */
1307
- async function getAll (blocks, head) {
1308
- // todo use the root node left around from put, etc
1309
- // move load to a central place
954
+ async function getAll(blocks, head) {
1310
955
  if (!head.length) {
1311
- return { clockCIDs: new utils.CIDCounter(), cids: new utils.CIDCounter(), result: [] }
956
+ return { clockCIDs: new utils.CIDCounter(), cids: new utils.CIDCounter(), result: [] };
1312
957
  }
1313
958
  const { node: prollyRootNode, cids: clockCIDs } = await root(blocks, head);
1314
959
  if (!prollyRootNode) {
1315
- return { clockCIDs, cids: new utils.CIDCounter(), result: [] }
960
+ return { clockCIDs, cids: new utils.CIDCounter(), result: [] };
1316
961
  }
1317
- const { result, cids } = await prollyRootNode.getAllEntries(); // todo params
1318
- return { clockCIDs, cids, result: result.map(({ key, value }) => ({ key, value })) }
962
+ const { result, cids } = await prollyRootNode.getAllEntries();
963
+ return { clockCIDs, cids, result: result.map(({ key, value }) => ({ key, value })) };
1319
964
  }
1320
-
1321
- /**
1322
- * @param {import('./blockstore.js').BlockFetcher} blocks Bucket block storage.
1323
- * @param {import('./clock').EventLink<EventData>[]} head Merkle clock head.
1324
- * @param {string} key The key of the value to retrieve.
1325
- */
1326
- async function get (blocks, head, key) {
1327
- // instead pass root from db? and always update on change
965
+ async function get(blocks, head, key) {
1328
966
  if (!head.length) {
1329
- return { cids: new utils.CIDCounter(), result: null }
967
+ return { cids: new utils.CIDCounter(), result: null };
1330
968
  }
1331
969
  const { node: prollyRootNode, cids: clockCIDs } = await root(blocks, head);
1332
970
  if (!prollyRootNode) {
1333
- return { clockCIDs, cids: new utils.CIDCounter(), result: null }
971
+ return { clockCIDs, cids: new utils.CIDCounter(), result: null };
1334
972
  }
1335
973
  const { result, cids } = await prollyRootNode.get(key);
1336
- return { result, cids, clockCIDs }
974
+ return { result, cids, clockCIDs };
1337
975
  }
1338
-
1339
- async function * vis (blocks, head) {
976
+ async function* vis(blocks, head) {
1340
977
  if (!head.length) {
1341
- return { cids: new utils.CIDCounter(), result: null }
978
+ return { cids: new utils.CIDCounter(), result: null };
1342
979
  }
1343
980
  const { node: prollyRootNode, cids } = await root(blocks, head);
1344
981
  const lines = [];
@@ -1346,52 +983,29 @@ async function * vis (blocks, head) {
1346
983
  yield line;
1347
984
  lines.push(line);
1348
985
  }
1349
- return { vis: lines.join('\n'), cids }
986
+ return { vis: lines.join("\n"), cids };
1350
987
  }
1351
-
1352
- async function visMerkleTree (blocks, head) {
988
+ async function visMerkleTree(blocks, head) {
1353
989
  if (!head.length) {
1354
- return { cids: new utils.CIDCounter(), result: null }
990
+ return { cids: new utils.CIDCounter(), result: null };
1355
991
  }
1356
992
  const { node: prollyRootNode, cids } = await root(blocks, head);
1357
993
  const lines = [];
1358
994
  for await (const line of prollyRootNode.vis()) {
1359
995
  lines.push(line);
1360
996
  }
1361
- return { vis: lines.join('\n'), cids }
997
+ return { vis: lines.join("\n"), cids };
1362
998
  }
1363
-
1364
- async function visMerkleClock (blocks, head) {
999
+ async function visMerkleClock(blocks, head) {
1365
1000
  const lines = [];
1366
1001
  for await (const line of vis$1(blocks, head)) {
1367
- // yield line
1368
1002
  lines.push(line);
1369
1003
  }
1370
- return { vis: lines.join('\n') }
1004
+ return { vis: lines.join("\n") };
1371
1005
  }
1372
1006
 
1373
- // @ts-nocheck
1374
- // import { CID } from 'multiformats/dist/types/src/cid.js'
1375
-
1376
- // const sleep = ms => new Promise(resolve => setTimeout(resolve, ms))
1377
-
1378
- // class Proof {}
1379
-
1380
- /**
1381
- * @class Fireproof
1382
- * @classdesc Fireproof stores data in IndexedDB and provides a Merkle clock.
1383
- * This is the main class for saving and loading JSON and other documents with the database. You can find additional examples and
1384
- * usage guides in the repository README.
1385
- *
1386
- * @param {import('./blockstore.js').TransactionBlockstore} blocks - The block storage instance to use documents and indexes
1387
- * @param {CID[]} clock - The Merkle clock head to use for the Fireproof instance.
1388
- * @param {object} [config] - Optional configuration options for the Fireproof instance.
1389
- * @param {object} [authCtx] - Optional authorization context object to use for any authentication checks.
1390
- *
1391
- */
1392
1007
  class Fireproof {
1393
- listeners = new Set()
1394
-
1008
+ listeners = /* @__PURE__ */ new Set();
1395
1009
  /**
1396
1010
  * @function storage
1397
1011
  * @memberof Fireproof
@@ -1400,116 +1014,104 @@ class Fireproof {
1400
1014
  * @static
1401
1015
  * @returns {Fireproof} - a new Fireproof instance
1402
1016
  */
1403
- static storage = (name = 'global') => {
1404
- const instanceKey = crypto.randomBytes(32).toString('hex'); // pass null to disable encryption
1405
- // pick a random key from const validatedKeys
1406
- // const instanceKey = validatedKeys[Math.floor(Math.random() * validatedKeys.length)]
1407
- return new Fireproof(new TransactionBlockstore(name, instanceKey), [], { name })
1408
- }
1409
-
1410
- constructor (blocks, clock, config, authCtx = {}) {
1411
- this.name = config?.name || 'global';
1017
+ static storage = (name = "global") => {
1018
+ const instanceKey = crypto.randomBytes(32).toString("hex");
1019
+ return new Fireproof(new TransactionBlockstore(name, instanceKey), [], { name });
1020
+ };
1021
+ constructor(blocks, clock, config, authCtx = {}) {
1022
+ this.name = config?.name || "global";
1412
1023
  this.instanceId = `fp.${this.name}.${Math.random().toString(36).substring(2, 7)}`;
1413
1024
  this.blocks = blocks;
1414
1025
  this.clock = clock;
1415
1026
  this.config = config;
1416
1027
  this.authCtx = authCtx;
1417
- this.indexes = new Map();
1028
+ this.indexes = /* @__PURE__ */ new Map();
1418
1029
  }
1419
-
1420
1030
  /**
1421
1031
  * Renders the Fireproof instance as a JSON object.
1422
1032
  * @returns {Object} - The JSON representation of the Fireproof instance. Includes clock heads for the database and its indexes.
1423
1033
  * @memberof Fireproof
1424
1034
  * @instance
1425
1035
  */
1426
- toJSON () {
1427
- // todo this also needs to return the index roots...
1036
+ toJSON() {
1428
1037
  return {
1429
1038
  clock: this.clockToJSON(),
1430
1039
  name: this.name,
1431
1040
  key: this.blocks.valet.getKeyMaterial(),
1432
- indexes: [...this.indexes.values()].map(index => index.toJSON())
1433
- }
1041
+ indexes: [...this.indexes.values()].map((index) => index.toJSON())
1042
+ };
1434
1043
  }
1435
-
1436
- clockToJSON () {
1437
- return this.clock.map(cid => cid.toString())
1044
+ /**
1045
+ * Returns the Merkle clock heads for the Fireproof instance.
1046
+ * @returns {string[]} - The Merkle clock heads for the Fireproof instance.
1047
+ * @memberof Fireproof
1048
+ * @instance
1049
+ */
1050
+ clockToJSON() {
1051
+ return this.clock.map((cid) => cid.toString());
1438
1052
  }
1439
-
1440
- hydrate ({ clock, name, key }) {
1053
+ hydrate({ clock, name, key }) {
1441
1054
  this.name = name;
1442
1055
  this.clock = clock;
1443
1056
  this.blocks.valet.setKeyMaterial(key);
1444
1057
  this.indexBlocks = null;
1445
1058
  }
1446
-
1447
1059
  /**
1448
1060
  * Triggers a notification to all listeners
1449
1061
  * of the Fireproof instance so they can repaint UI, etc.
1450
- * @param {CID[] } clock
1451
- * Clock to use for the snapshot.
1452
1062
  * @returns {Promise<void>}
1453
1063
  * @memberof Fireproof
1454
1064
  * @instance
1455
1065
  */
1456
- async notifyReset () {
1066
+ async notifyReset() {
1457
1067
  await this.notifyListeners({ _reset: true, _clock: this.clockToJSON() });
1458
1068
  }
1459
-
1460
1069
  // used be indexes etc to notify database listeners of new availability
1461
- async notifyExternal (source = 'unknown') {
1070
+ async notifyExternal(source = "unknown") {
1462
1071
  await this.notifyListeners({ _external: source, _clock: this.clockToJSON() });
1463
1072
  }
1464
-
1465
1073
  /**
1466
1074
  * Returns the changes made to the Fireproof instance since the specified event.
1467
1075
  * @function changesSince
1468
1076
  * @param {CID[]} [event] - The clock head to retrieve changes since. If null or undefined, retrieves all changes.
1469
- * @returns {Object<{rows : Object[], clock: CID[]}>} An object containing the rows and the head of the instance's clock.
1077
+ * @returns {Promise<{rows : Object[], clock: CID[], proof: {}}>} An object containing the rows and the head of the instance's clock.
1470
1078
  * @memberof Fireproof
1471
1079
  * @instance
1472
1080
  */
1473
- async changesSince (event) {
1474
- // console.log('changesSince', this.instanceId, event, this.clock)
1081
+ async changesSince(event) {
1475
1082
  let rows, dataCIDs, clockCIDs;
1476
- // if (!event) event = []
1477
1083
  if (event) {
1478
1084
  const resp = await eventsSince(this.blocks, this.clock, event);
1479
- const docsMap = new Map();
1085
+ const docsMap = /* @__PURE__ */ new Map();
1480
1086
  for (const { key, type, value } of resp.result.map(decodeEvent)) {
1481
- if (type === 'del') {
1087
+ if (type === "del") {
1482
1088
  docsMap.set(key, { key, del: true });
1483
1089
  } else {
1484
1090
  docsMap.set(key, { key, value });
1485
1091
  }
1486
1092
  }
1487
1093
  rows = Array.from(docsMap.values());
1488
- clockCIDs = resp.cids;
1489
- // console.log('change rows', this.instanceId, rows)
1094
+ clockCIDs = resp.clockCIDs;
1490
1095
  } else {
1491
1096
  const allResp = await getAll(this.blocks, this.clock);
1492
- rows = allResp.result.map(({ key, value }) => (decodeEvent({ key, value })));
1097
+ rows = allResp.result.map(({ key, value }) => decodeEvent({ key, value }));
1493
1098
  dataCIDs = allResp.cids;
1494
- // console.log('dbdoc rows', this.instanceId, rows)
1495
1099
  }
1496
1100
  return {
1497
1101
  rows,
1498
1102
  clock: this.clockToJSON(),
1499
1103
  proof: { data: await cidsToProof(dataCIDs), clock: await cidsToProof(clockCIDs) }
1500
- }
1104
+ };
1501
1105
  }
1502
-
1503
- async allDocuments () {
1106
+ async allDocuments() {
1504
1107
  const allResp = await getAll(this.blocks, this.clock);
1505
- const rows = allResp.result.map(({ key, value }) => (decodeEvent({ key, value }))).map(({ key, value }) => ({ key, value: { _id: key, ...value } }));
1108
+ const rows = allResp.result.map(({ key, value }) => decodeEvent({ key, value })).map(({ key, value }) => ({ key, value: { _id: key, ...value } }));
1506
1109
  return {
1507
1110
  rows,
1508
1111
  clock: this.clockToJSON(),
1509
1112
  proof: await cidsToProof(allResp.cids)
1510
- }
1113
+ };
1511
1114
  }
1512
-
1513
1115
  /**
1514
1116
  * Runs validation on the specified document using the Fireproof instance's configuration. Throws an error if the document is invalid.
1515
1117
  *
@@ -1519,15 +1121,12 @@ class Fireproof {
1519
1121
  * @memberof Fireproof
1520
1122
  * @instance
1521
1123
  */
1522
- async runValidation (doc) {
1124
+ async runValidation(doc) {
1523
1125
  if (this.config && this.config.validateChange) {
1524
- const oldDoc = await this.get(doc._id)
1525
- .then((doc) => doc)
1526
- .catch(() => ({}));
1126
+ const oldDoc = await this.get(doc._id).then((doc2) => doc2).catch(() => ({}));
1527
1127
  this.config.validateChange(doc, oldDoc, this.authCtx);
1528
1128
  }
1529
1129
  }
1530
-
1531
1130
  /**
1532
1131
  * Retrieves the document with the specified ID from the database
1533
1132
  *
@@ -1537,13 +1136,11 @@ class Fireproof {
1537
1136
  * @memberof Fireproof
1538
1137
  * @instance
1539
1138
  */
1540
- async get (key, opts = {}) {
1139
+ async get(key, opts = {}) {
1541
1140
  const clock = opts.clock || this.clock;
1542
1141
  const resp = await get(this.blocks, clock, charwise.encode(key));
1543
-
1544
- // this tombstone is temporary until we can get the prolly tree to delete
1545
1142
  if (!resp || resp.result === null) {
1546
- throw new Error('Not found')
1143
+ throw new Error("Not found");
1547
1144
  }
1548
1145
  const doc = resp.result;
1549
1146
  if (opts.mvcc === true) {
@@ -1554,9 +1151,8 @@ class Fireproof {
1554
1151
  clock: this.clockToJSON()
1555
1152
  };
1556
1153
  doc._id = key;
1557
- return doc
1154
+ return doc;
1558
1155
  }
1559
-
1560
1156
  /**
1561
1157
  * Adds a new document to the database, or updates an existing document. Returns the ID of the document and the new clock head.
1562
1158
  *
@@ -1568,12 +1164,11 @@ class Fireproof {
1568
1164
  * @memberof Fireproof
1569
1165
  * @instance
1570
1166
  */
1571
- async put ({ _id, _proof, ...doc }) {
1572
- const id = _id || 'f' + Math.random().toString(36).slice(2);
1167
+ async put({ _id, _proof, ...doc }) {
1168
+ const id = _id || "f" + Math.random().toString(36).slice(2);
1573
1169
  await this.runValidation({ _id: id, ...doc });
1574
- return await this.putToProllyTree({ key: id, value: doc }, doc._clock)
1170
+ return await this.putToProllyTree({ key: id, value: doc }, doc._clock);
1575
1171
  }
1576
-
1577
1172
  /**
1578
1173
  * Deletes a document from the database
1579
1174
  * @param {string | any} docOrId - the document ID
@@ -1581,7 +1176,7 @@ class Fireproof {
1581
1176
  * @memberof Fireproof
1582
1177
  * @instance
1583
1178
  */
1584
- async del (docOrId) {
1179
+ async del(docOrId) {
1585
1180
  let id;
1586
1181
  let clock = null;
1587
1182
  if (docOrId._id) {
@@ -1591,48 +1186,40 @@ class Fireproof {
1591
1186
  id = docOrId;
1592
1187
  }
1593
1188
  await this.runValidation({ _id: id, _deleted: true });
1594
- return await this.putToProllyTree({ key: id, del: true }, clock) // not working at prolly tree layer?
1595
- // this tombstone is temporary until we can get the prolly tree to delete
1596
- // return await this.putToProllyTree({ key: id, value: null }, clock)
1189
+ return await this.putToProllyTree({ key: id, del: true }, clock);
1597
1190
  }
1598
-
1599
1191
  /**
1600
1192
  * Updates the underlying storage with the specified event.
1601
1193
  * @private
1602
1194
  * @param {{del?: true, key : string, value?: any}} decodedEvent - the event to add
1603
1195
  * @returns {Promise<{ proof:{}, id: string, clock: CID[] }>} - The result of adding the event to storage
1604
1196
  */
1605
- async putToProllyTree (decodedEvent, clock = null) {
1197
+ async putToProllyTree(decodedEvent, clock = null) {
1606
1198
  const event = encodeEvent(decodedEvent);
1607
1199
  if (clock && JSON.stringify(clock) !== JSON.stringify(this.clockToJSON())) {
1608
- // we need to check and see what version of the document exists at the clock specified
1609
- // if it is the same as the one we are trying to put, then we can proceed
1610
1200
  const resp = await eventsSince(this.blocks, this.clock, event.value._clock);
1611
1201
  const missedChange = resp.result.find(({ key }) => key === event.key);
1612
1202
  if (missedChange) {
1613
- throw new Error('MVCC conflict, document is changed, please reload the document and try again.')
1203
+ throw new Error("MVCC conflict, document is changed, please reload the document and try again.");
1614
1204
  }
1615
1205
  }
1616
1206
  const result = await doTransaction(
1617
- 'putToProllyTree',
1207
+ "putToProllyTree",
1618
1208
  this.blocks,
1619
1209
  async (blocks) => await put(blocks, this.clock, event)
1620
1210
  );
1621
1211
  if (!result) {
1622
- console.error('failed', event);
1623
- throw new Error('failed to put at storage layer')
1212
+ console.error("failed", event);
1213
+ throw new Error("failed to put at storage layer");
1624
1214
  }
1625
- // console.log('new clock head', this.instanceId, result.head.toString())
1626
- this.clock = result.head; // do we want to do this as a finally block
1627
- await this.notifyListeners([decodedEvent]); // this type is odd
1215
+ this.clock = result.head;
1216
+ await this.notifyListeners([decodedEvent]);
1628
1217
  return {
1629
1218
  id: decodedEvent.key,
1630
1219
  clock: this.clockToJSON(),
1631
1220
  proof: { data: await cidsToProof(result.cids), clock: await cidsToProof(result.clockCIDs) }
1632
- }
1633
- // todo should include additions (or split clock)
1221
+ };
1634
1222
  }
1635
-
1636
1223
  // /**
1637
1224
  // * Advances the clock to the specified event and updates the root CID
1638
1225
  // * Will be used by replication
@@ -1642,19 +1229,15 @@ class Fireproof {
1642
1229
  // this.rootCid = await root(this.blocks, this.clock)
1643
1230
  // return this.clock
1644
1231
  // }
1645
-
1646
- async * vis () {
1647
- return yield * vis(this.blocks, this.clock)
1232
+ async *vis() {
1233
+ return yield* vis(this.blocks, this.clock);
1648
1234
  }
1649
-
1650
- async visTree () {
1651
- return await visMerkleTree(this.blocks, this.clock)
1235
+ async visTree() {
1236
+ return await visMerkleTree(this.blocks, this.clock);
1652
1237
  }
1653
-
1654
- async visClock () {
1655
- return await visMerkleClock(this.blocks, this.clock)
1238
+ async visClock() {
1239
+ return await visMerkleClock(this.blocks, this.clock);
1656
1240
  }
1657
-
1658
1241
  /**
1659
1242
  * Registers a Listener to be called when the Fireproof instance's clock is updated.
1660
1243
  * Recieves live changes from the database after they are committed.
@@ -1662,141 +1245,84 @@ class Fireproof {
1662
1245
  * @returns {Function} - A function that can be called to unregister the listener.
1663
1246
  * @memberof Fireproof
1664
1247
  */
1665
- registerListener (listener) {
1248
+ registerListener(listener) {
1666
1249
  this.listeners.add(listener);
1667
1250
  return () => {
1668
1251
  this.listeners.delete(listener);
1669
- }
1252
+ };
1670
1253
  }
1671
-
1672
- async notifyListeners (changes) {
1673
- // await sleep(10)
1254
+ async notifyListeners(changes) {
1674
1255
  for (const listener of this.listeners) {
1675
1256
  await listener(changes);
1676
1257
  }
1677
1258
  }
1678
-
1679
- setCarUploader (carUploaderFn) {
1680
- // console.log('registering car uploader')
1681
- // https://en.wikipedia.org/wiki/Law_of_Demeter - this is a violation of the law of demeter
1259
+ setCarUploader(carUploaderFn) {
1682
1260
  this.blocks.valet.uploadFunction = carUploaderFn;
1683
1261
  }
1684
-
1685
- setRemoteBlockReader (remoteBlockReaderFn) {
1686
- // console.log('registering remote block reader')
1262
+ setRemoteBlockReader(remoteBlockReaderFn) {
1687
1263
  this.blocks.valet.remoteBlockFunction = remoteBlockReaderFn;
1688
1264
  }
1689
1265
  }
1690
-
1691
- async function cidsToProof (cids) {
1692
- if (!cids || !cids.all) return []
1266
+ async function cidsToProof(cids) {
1267
+ if (!cids || !cids.all)
1268
+ return [];
1693
1269
  const all = await cids.all();
1694
- return [...all].map((cid) => cid.toString())
1270
+ return [...all].map((cid) => cid.toString());
1695
1271
  }
1696
-
1697
- function decodeEvent (event) {
1272
+ function decodeEvent(event) {
1698
1273
  const decodedKey = charwise.decode(event.key);
1699
- return { ...event, key: decodedKey }
1274
+ return { ...event, key: decodedKey };
1700
1275
  }
1701
-
1702
- function encodeEvent (event) {
1703
- if (!(event && event.key)) return
1276
+ function encodeEvent(event) {
1277
+ if (!(event && event.key))
1278
+ return;
1704
1279
  const encodedKey = charwise.encode(event.key);
1705
- return { ...event, key: encodedKey }
1280
+ return { ...event, key: encodedKey };
1706
1281
  }
1707
1282
 
1708
- // @ts-nocheck
1709
-
1710
1283
  const compare = (a, b) => {
1711
1284
  const [aKey, aRef] = a;
1712
1285
  const [bKey, bRef] = b;
1713
1286
  const comp = utils.simpleCompare(aKey, bKey);
1714
- if (comp !== 0) return comp
1715
- return refCompare(aRef, bRef)
1287
+ if (comp !== 0)
1288
+ return comp;
1289
+ return refCompare(aRef, bRef);
1716
1290
  };
1717
-
1718
1291
  const refCompare = (aRef, bRef) => {
1719
- if (Number.isNaN(aRef)) return -1
1720
- if (Number.isNaN(bRef)) throw new Error('ref may not be Infinity or NaN')
1721
- if (aRef === Infinity) return 1 // need to test this on equal docids!
1722
- // if (!Number.isFinite(bRef)) throw new Error('ref may not be Infinity or NaN')
1723
- return utils.simpleCompare(aRef, bRef)
1292
+ if (Number.isNaN(aRef))
1293
+ return -1;
1294
+ if (Number.isNaN(bRef))
1295
+ throw new Error("ref may not be Infinity or NaN");
1296
+ if (aRef === Infinity)
1297
+ return 1;
1298
+ return utils.simpleCompare(aRef, bRef);
1724
1299
  };
1725
-
1726
1300
  const dbIndexOpts = { cache: cache.nocache, chunker: utils.bf(3), codec: dagcbor__namespace, hasher: sha2.sha256, compare };
1727
1301
  const idIndexOpts = { cache: cache.nocache, chunker: utils.bf(3), codec: dagcbor__namespace, hasher: sha2.sha256, compare: utils.simpleCompare };
1728
-
1729
1302
  const makeDoc = ({ key, value }) => ({ _id: key, ...value });
1730
-
1731
- /**
1732
- * JDoc for the result row type.
1733
- * @typedef {Object} ChangeEvent
1734
- * @property {string} key - The key of the document.
1735
- * @property {Object} value - The new value of the document.
1736
- * @property {boolean} [del] - Is the row deleted?
1737
- * @memberof DbIndex
1738
- */
1739
-
1740
- /**
1741
- * JDoc for the result row type.
1742
- * @typedef {Object} DbIndexEntry
1743
- * @property {string[]} key - The key for the DbIndex entry.
1744
- * @property {Object} value - The value of the document.
1745
- * @property {boolean} [del] - Is the row deleted?
1746
- * @memberof DbIndex
1747
- */
1748
-
1749
- /**
1750
- * Transforms a set of changes to DbIndex entries using a map function.
1751
- *
1752
- * @param {ChangeEvent[]} changes
1753
- * @param {Function} mapFn
1754
- * @returns {DbIndexEntry[]} The DbIndex entries generated by the map function.
1755
- * @private
1756
- * @memberof DbIndex
1757
- */
1758
1303
  const indexEntriesForChanges = (changes, mapFn) => {
1759
1304
  const indexEntries = [];
1760
1305
  changes.forEach(({ key, value, del }) => {
1761
- if (del || !value) return
1306
+ if (del || !value)
1307
+ return;
1762
1308
  mapFn(makeDoc({ key, value }), (k, v) => {
1763
- if (typeof v === 'undefined' || typeof k === 'undefined') return
1309
+ if (typeof v === "undefined" || typeof k === "undefined")
1310
+ return;
1764
1311
  indexEntries.push({
1765
1312
  key: [charwise.encode(k), key],
1766
1313
  value: v
1767
1314
  });
1768
1315
  });
1769
1316
  });
1770
- return indexEntries
1317
+ return indexEntries;
1771
1318
  };
1772
-
1773
- /**
1774
- * Represents an DbIndex for a Fireproof database.
1775
- *
1776
- * @class DbIndex
1777
- * @classdesc An DbIndex can be used to order and filter the documents in a Fireproof database.
1778
- *
1779
- * @param {Fireproof} database - The Fireproof database instance to DbIndex.
1780
- * @param {Function} mapFn - The map function to apply to each entry in the database.
1781
- *
1782
- */
1783
1319
  class DbIndex {
1784
- constructor (database, mapFn, clock, opts = {}) {
1785
- // console.log('DbIndex constructor', database.constructor.name, typeof mapFn, clock)
1786
- /**
1787
- * The database instance to DbIndex.
1788
- * @type {Fireproof}
1789
- */
1320
+ constructor(database, mapFn, clock, opts = {}) {
1790
1321
  this.database = database;
1791
1322
  if (!database.indexBlocks) {
1792
- database.indexBlocks = new TransactionBlockstore(database.name + '.indexes', database.blocks.valet.getKeyMaterial());
1323
+ database.indexBlocks = new TransactionBlockstore(database.name + ".indexes", database.blocks.valet.getKeyMaterial());
1793
1324
  }
1794
- /**
1795
- * The map function to apply to each entry in the database.
1796
- * @type {Function}
1797
- */
1798
-
1799
- if (typeof mapFn === 'string') {
1325
+ if (typeof mapFn === "string") {
1800
1326
  this.mapFnString = mapFn;
1801
1327
  } else {
1802
1328
  this.mapFn = mapFn;
@@ -1813,23 +1339,21 @@ class DbIndex {
1813
1339
  }
1814
1340
  this.instanceId = this.database.instanceId + `.DbIndex.${Math.random().toString(36).substring(2, 7)}`;
1815
1341
  this.updateIndexPromise = null;
1816
- if (!opts.temporary) { DbIndex.registerWithDatabase(this, this.database); }
1342
+ if (!opts.temporary) {
1343
+ DbIndex.registerWithDatabase(this, this.database);
1344
+ }
1817
1345
  }
1818
-
1819
- makeName () {
1346
+ makeName() {
1820
1347
  const regex = /\(([^,()]+,\s*[^,()]+|\[[^\]]+\],\s*[^,()]+)\)/g;
1821
- const matches = Array.from(this.mapFnString.matchAll(regex), match => match[1].trim());
1822
- return matches[1]
1348
+ const matches = Array.from(this.mapFnString.matchAll(regex), (match) => match[1].trim());
1349
+ return matches[1];
1823
1350
  }
1824
-
1825
- static registerWithDatabase (inIndex, database) {
1351
+ static registerWithDatabase(inIndex, database) {
1826
1352
  if (!database.indexes.has(inIndex.mapFnString)) {
1827
1353
  database.indexes.set(inIndex.mapFnString, inIndex);
1828
1354
  } else {
1829
- // merge our inIndex code with the inIndex clock or vice versa
1830
1355
  const existingIndex = database.indexes.get(inIndex.mapFnString);
1831
- // keep the code instance, discard the clock instance
1832
- if (existingIndex.mapFn) { // this one also has other config
1356
+ if (existingIndex.mapFn) {
1833
1357
  existingIndex.dbHead = inIndex.dbHead;
1834
1358
  existingIndex.indexById.cid = inIndex.indexById.cid;
1835
1359
  existingIndex.indexByKey.cid = inIndex.indexByKey.cid;
@@ -1841,27 +1365,22 @@ class DbIndex {
1841
1365
  }
1842
1366
  }
1843
1367
  }
1844
-
1845
- toJSON () {
1368
+ toJSON() {
1846
1369
  const indexJson = { name: this.name, code: this.mapFnString, clock: { db: null, byId: null, byKey: null } };
1847
- indexJson.clock.db = this.dbHead?.map(cid => cid.toString());
1370
+ indexJson.clock.db = this.dbHead?.map((cid) => cid.toString());
1848
1371
  indexJson.clock.byId = this.indexById.cid?.toString();
1849
1372
  indexJson.clock.byKey = this.indexByKey.cid?.toString();
1850
- return indexJson
1373
+ return indexJson;
1851
1374
  }
1852
-
1853
- static fromJSON (database, { code, clock, name }) {
1854
- // console.log('DbIndex.fromJSON', database.constructor.name, code, clock)
1855
- return new DbIndex(database, code, clock, { name })
1375
+ static fromJSON(database, { code, clock, name }) {
1376
+ return new DbIndex(database, code, clock, { name });
1856
1377
  }
1857
-
1858
1378
  /**
1859
1379
  * JSDoc for Query type.
1860
1380
  * @typedef {Object} DbQuery
1861
1381
  * @property {string[]} [range] - The range to query.
1862
1382
  * @memberof DbIndex
1863
1383
  */
1864
-
1865
1384
  /**
1866
1385
  * Query object can have {range}
1867
1386
  * @param {DbQuery} query - the query range to use
@@ -1869,55 +1388,37 @@ class DbIndex {
1869
1388
  * @memberof DbIndex
1870
1389
  * @instance
1871
1390
  */
1872
- async query (query, update = true) {
1873
- // const callId = Math.random().toString(36).substring(2, 7)
1874
- // todo pass a root to query a snapshot
1875
- // console.time(callId + '.updateIndex')
1391
+ async query(query, update = true) {
1876
1392
  update && await this.updateIndex(this.database.indexBlocks);
1877
- // console.timeEnd(callId + '.updateIndex')
1878
- // console.time(callId + '.doIndexQuery')
1879
- // console.log('query', query)
1880
1393
  const response = await doIndexQuery(this.database.indexBlocks, this.indexByKey, query);
1881
- // console.timeEnd(callId + '.doIndexQuery')
1882
1394
  return {
1883
1395
  proof: { index: await cidsToProof(response.cids) },
1884
1396
  rows: response.result.map(({ id, key, row }) => {
1885
- return ({ id, key: charwise.decode(key), value: row })
1397
+ return { id, key: charwise.decode(key), value: row };
1886
1398
  })
1887
- }
1399
+ };
1888
1400
  }
1889
-
1890
1401
  /**
1891
1402
  * Update the DbIndex with the latest changes
1892
1403
  * @private
1893
1404
  * @returns {Promise<void>}
1894
1405
  */
1895
-
1896
- async updateIndex (blocks) {
1897
- // todo this could enqueue the request and give fresh ones to all second comers -- right now it gives out stale promises while working
1898
- // what would it do in a world where all indexes provide a database snapshot to query?
1899
- if (this.updateIndexPromise) return this.updateIndexPromise
1406
+ async updateIndex(blocks) {
1407
+ if (this.updateIndexPromise)
1408
+ return this.updateIndexPromise;
1900
1409
  this.updateIndexPromise = this.innerUpdateIndex(blocks);
1901
- this.updateIndexPromise.finally(() => { this.updateIndexPromise = null; });
1902
- return this.updateIndexPromise
1410
+ this.updateIndexPromise.finally(() => {
1411
+ this.updateIndexPromise = null;
1412
+ });
1413
+ return this.updateIndexPromise;
1903
1414
  }
1904
-
1905
- async innerUpdateIndex (inBlocks) {
1906
- // console.log('dbHead', this.dbHead)
1907
- // console.time(callTag + '.changesSince')
1908
- const result = await this.database.changesSince(this.dbHead); // {key, value, del}
1909
- // console.timeEnd(callTag + '.changesSince')
1910
- // console.log('result.rows.length', result.rows.length)
1911
-
1912
- // console.time(callTag + '.doTransactionupdateIndex')
1913
- // console.log('updateIndex changes length', result.rows.length)
1914
-
1415
+ async innerUpdateIndex(inBlocks) {
1416
+ const result = await this.database.changesSince(this.dbHead);
1915
1417
  if (result.rows.length === 0) {
1916
- // console.log('updateIndex < no changes', result.clock)
1917
1418
  this.dbHead = result.clock;
1918
- return
1419
+ return;
1919
1420
  }
1920
- await doTransaction('updateIndex', inBlocks, async (blocks) => {
1421
+ await doTransaction("updateIndex", inBlocks, async (blocks) => {
1921
1422
  let oldIndexEntries = [];
1922
1423
  let removeByIdIndexEntries = [];
1923
1424
  await loadIndex(blocks, this.indexById, idIndexOpts);
@@ -1928,7 +1429,7 @@ class DbIndex {
1928
1429
  removeByIdIndexEntries = oldIndexEntries.map(({ key }) => ({ key: key[1], del: true }));
1929
1430
  }
1930
1431
  if (!this.mapFn) {
1931
- throw new Error('No live map function installed for index, cannot update. Make sure your index definition runs before any queries.' + (this.mapFnString ? ' Your code should match the stored map function source:\n' + this.mapFnString : ''))
1432
+ throw new Error("No live map function installed for index, cannot update. Make sure your index definition runs before any queries." + (this.mapFnString ? " Your code should match the stored map function source:\n" + this.mapFnString : ""));
1932
1433
  }
1933
1434
  const indexEntries = indexEntriesForChanges(result.rows, this.mapFn);
1934
1435
  const byIdIndexEntries = indexEntries.map(({ key }) => ({ key: key[1], value: key }));
@@ -1936,21 +1437,12 @@ class DbIndex {
1936
1437
  this.indexByKey = await bulkIndex(blocks, this.indexByKey, oldIndexEntries.concat(indexEntries), dbIndexOpts);
1937
1438
  this.dbHead = result.clock;
1938
1439
  });
1939
- this.database.notifyExternal('dbIndex');
1940
- // console.timeEnd(callTag + '.doTransactionupdateIndex')
1941
- // console.log(`updateIndex ${callTag} <`, this.instanceId, this.dbHead?.toString(), this.indexByKey.cid?.toString(), this.indexById.cid?.toString())
1440
+ this.database.notifyExternal("dbIndex");
1942
1441
  }
1943
1442
  }
1944
-
1945
- /**
1946
- * Update the DbIndex with the given entries
1947
- * @param {import('./blockstore.js').Blockstore} blocks
1948
- * @param {{root, cid}} inIndex
1949
- * @param {DbIndexEntry[]} indexEntries
1950
- * @private
1951
- */
1952
- async function bulkIndex (blocks, inIndex, indexEntries, opts) {
1953
- if (!indexEntries.length) return inIndex
1443
+ async function bulkIndex(blocks, inIndex, indexEntries, opts) {
1444
+ if (!indexEntries.length)
1445
+ return inIndex;
1954
1446
  const putBlock = blocks.put.bind(blocks);
1955
1447
  const { getBlock } = makeGetBlock(blocks);
1956
1448
  let returnRootBlock;
@@ -1964,7 +1456,7 @@ async function bulkIndex (blocks, inIndex, indexEntries, opts) {
1964
1456
  returnRootBlock = block;
1965
1457
  returnNode = node;
1966
1458
  }
1967
- return { root: returnNode, cid: returnRootBlock.cid }
1459
+ return { root: returnNode, cid: returnRootBlock.cid };
1968
1460
  }
1969
1461
  inIndex.root = await dbIndex.load({ cid, get: getBlock, ...dbIndexOpts });
1970
1462
  }
@@ -1975,74 +1467,53 @@ async function bulkIndex (blocks, inIndex, indexEntries, opts) {
1975
1467
  await putBlock(block.cid, block.bytes);
1976
1468
  }
1977
1469
  await putBlock(returnRootBlock.cid, returnRootBlock.bytes);
1978
- return { root: returnNode, cid: returnRootBlock.cid }
1470
+ return { root: returnNode, cid: returnRootBlock.cid };
1979
1471
  }
1980
-
1981
- async function loadIndex (blocks, index, indexOpts) {
1472
+ async function loadIndex(blocks, index, indexOpts) {
1982
1473
  if (!index.root) {
1983
1474
  const cid = index.cid;
1984
- if (!cid) return
1475
+ if (!cid)
1476
+ return;
1985
1477
  const { getBlock } = makeGetBlock(blocks);
1986
1478
  index.root = await dbIndex.load({ cid, get: getBlock, ...indexOpts });
1987
1479
  }
1988
- return index.root
1480
+ return index.root;
1989
1481
  }
1990
-
1991
- async function applyLimit (results, limit) {
1482
+ async function applyLimit(results, limit) {
1992
1483
  results.result = results.result.slice(0, limit);
1993
- return results
1484
+ return results;
1994
1485
  }
1995
-
1996
- async function doIndexQuery (blocks, indexByKey, query = {}) {
1486
+ async function doIndexQuery(blocks, indexByKey, query = {}) {
1997
1487
  await loadIndex(blocks, indexByKey, dbIndexOpts);
1998
- if (!indexByKey.root) return { result: [] }
1488
+ if (!indexByKey.root)
1489
+ return { result: [] };
1999
1490
  if (query.range) {
2000
1491
  const encodedRange = query.range.map((key) => charwise.encode(key));
2001
- return applyLimit(await indexByKey.root.range(...encodedRange), query.limit)
1492
+ return applyLimit(await indexByKey.root.range(...encodedRange), query.limit);
2002
1493
  } else if (query.key) {
2003
1494
  const encodedKey = charwise.encode(query.key);
2004
- return indexByKey.root.get(encodedKey)
1495
+ return indexByKey.root.get(encodedKey);
2005
1496
  } else {
2006
1497
  const { result, ...all } = await indexByKey.root.getAllEntries();
2007
- return applyLimit({ result: result.map(({ key: [k, id], value }) => ({ key: k, id, row: value })), ...all }, query.limit)
1498
+ return applyLimit({ result: result.map(({ key: [k, id], value }) => ({ key: k, id, row: value })), ...all }, query.limit);
2008
1499
  }
2009
1500
  }
2010
1501
 
2011
- // @ts-nocheck
2012
- /**
2013
- * A Fireproof database Listener allows you to react to events in the database.
2014
- *
2015
- * @class Listener
2016
- * @classdesc An listener attaches to a Fireproof database and runs a routing function on each change, sending the results to subscribers.
2017
- *
2018
- * @param {Fireproof} database - The Fireproof database instance to index.
2019
- * @param {Function} routingFn - The routing function to apply to each entry in the database.
2020
- */
2021
- // import { ChangeEvent } from './db-index'
2022
-
2023
1502
  class Listener {
2024
- subcribers = new Map()
2025
- doStopListening = null
2026
-
2027
- constructor (database, routingFn) {
2028
- /** routingFn
2029
- * The database instance to index.
2030
- * @type {Fireproof}
2031
- */
1503
+ subcribers = /* @__PURE__ */ new Map();
1504
+ doStopListening = null;
1505
+ /**
1506
+ * @param {import('./fireproof.js').Fireproof} database
1507
+ * @param {(_: any, emit: any) => void} routingFn
1508
+ */
1509
+ constructor(database, routingFn) {
2032
1510
  this.database = database;
2033
- this.doStopListening = database.registerListener(changes => this.onChanges(changes));
2034
- /**
2035
- * The map function to apply to each entry in the database.
2036
- * @type {Function}
2037
- */
2038
- this.routingFn =
2039
- routingFn ||
2040
- function (_, emit) {
2041
- emit('*');
2042
- };
1511
+ this.doStopListening = database.registerListener((changes) => this.onChanges(changes));
1512
+ this.routingFn = routingFn || function(_, emit) {
1513
+ emit("*");
1514
+ };
2043
1515
  this.dbHead = null;
2044
1516
  }
2045
-
2046
1517
  /**
2047
1518
  * Subscribe to a topic emitted by the event function.
2048
1519
  * @param {string} topic - The topic to subscribe to.
@@ -2050,115 +1521,112 @@ class Listener {
2050
1521
  * @returns {Function} A function to unsubscribe from the topic.
2051
1522
  * @memberof Listener
2052
1523
  * @instance
1524
+ * @param {any} since
2053
1525
  */
2054
- on (topic, subscriber, since) {
1526
+ on(topic, subscriber, since) {
2055
1527
  const listOfTopicSubscribers = getTopicList(this.subcribers, topic);
2056
1528
  listOfTopicSubscribers.push(subscriber);
2057
- if (typeof since !== 'undefined') {
1529
+ if (typeof since !== "undefined") {
2058
1530
  this.database.changesSince(since).then(({ rows: changes }) => {
2059
1531
  const keys = topicsForChanges(changes, this.routingFn).get(topic);
2060
- if (keys) keys.forEach(key => subscriber(key));
1532
+ if (keys)
1533
+ keys.forEach((key) => subscriber(key));
2061
1534
  });
2062
1535
  }
2063
1536
  return () => {
2064
1537
  const index = listOfTopicSubscribers.indexOf(subscriber);
2065
- if (index > -1) listOfTopicSubscribers.splice(index, 1);
2066
- }
1538
+ if (index > -1)
1539
+ listOfTopicSubscribers.splice(index, 1);
1540
+ };
2067
1541
  }
2068
-
2069
- onChanges (changes) {
1542
+ /**
1543
+ * @typedef {import('./db-index').ChangeEvent} ChangeEvent
1544
+ */
1545
+ /**
1546
+ * @param {ChangeEvent[]} changes
1547
+ */
1548
+ onChanges(changes) {
2070
1549
  if (Array.isArray(changes)) {
2071
1550
  const seenTopics = topicsForChanges(changes, this.routingFn);
2072
1551
  for (const [topic, keys] of seenTopics) {
2073
1552
  const listOfTopicSubscribers = getTopicList(this.subcribers, topic);
2074
- listOfTopicSubscribers.forEach(subscriber => keys.forEach(key => subscriber(key)));
1553
+ listOfTopicSubscribers.forEach(
1554
+ (subscriber) => keys.forEach((key) => subscriber(key))
1555
+ );
2075
1556
  }
2076
1557
  } else {
2077
- // non-arrays go to all subscribers
2078
1558
  for (const [, listOfTopicSubscribers] of this.subcribers) {
2079
- listOfTopicSubscribers.forEach(subscriber => subscriber(changes));
1559
+ listOfTopicSubscribers.forEach((subscriber) => subscriber(changes));
2080
1560
  }
2081
1561
  }
2082
1562
  }
2083
1563
  }
2084
-
2085
- function getTopicList (subscribersMap, name) {
1564
+ function getTopicList(subscribersMap, name) {
2086
1565
  let topicList = subscribersMap.get(name);
2087
1566
  if (!topicList) {
2088
1567
  topicList = [];
2089
1568
  subscribersMap.set(name, topicList);
2090
1569
  }
2091
- return topicList
1570
+ return topicList;
2092
1571
  }
2093
-
2094
- /**
2095
- * Transforms a set of changes to events using an emitter function.
2096
- *
2097
- * @param {ChangeEvent[]} changes
2098
- * @param {Function} routingFn
2099
- * @returns {Array<string>} The topics emmitted by the event function.
2100
- * @private
2101
- */
2102
1572
  const topicsForChanges = (changes, routingFn) => {
2103
- const seenTopics = new Map();
1573
+ const seenTopics = /* @__PURE__ */ new Map();
2104
1574
  changes.forEach(({ key, value, del }) => {
2105
- if (del || !value) value = { _deleted: true };
2106
- routingFn(({ _id: key, ...value }), t => {
1575
+ if (del || !value)
1576
+ value = { _deleted: true };
1577
+ routingFn({ _id: key, ...value }, (t) => {
2107
1578
  const topicList = getTopicList(seenTopics, t);
2108
1579
  topicList.push(key);
2109
1580
  });
2110
1581
  });
2111
- return seenTopics
1582
+ return seenTopics;
2112
1583
  };
2113
1584
 
2114
- const parseCID = cid => typeof cid === 'string' ? multiformats.CID.parse(cid) : cid;
2115
-
1585
+ const parseCID = (cid) => typeof cid === "string" ? multiformats.CID.parse(cid) : cid;
2116
1586
  class Hydrator {
2117
- static fromJSON (json, database) {
2118
- database.hydrate({ clock: json.clock.map(c => parseCID(c)), name: json.name, key: json.key });
1587
+ static fromJSON(json, database) {
1588
+ database.hydrate({ clock: json.clock.map((c) => parseCID(c)), name: json.name, key: json.key });
2119
1589
  if (json.indexes) {
2120
1590
  for (const { name, code, clock: { byId, byKey, db } } of json.indexes) {
2121
1591
  DbIndex.fromJSON(database, {
2122
1592
  clock: {
2123
1593
  byId: byId ? parseCID(byId) : null,
2124
1594
  byKey: byKey ? parseCID(byKey) : null,
2125
- db: db ? db.map(c => parseCID(c)) : null
1595
+ db: db ? db.map((c) => parseCID(c)) : null
2126
1596
  },
2127
1597
  code,
2128
1598
  name
2129
1599
  });
2130
1600
  }
2131
1601
  }
2132
- return database
1602
+ return database;
2133
1603
  }
2134
-
2135
- static snapshot (database, clock) {
1604
+ static snapshot(database, clock) {
2136
1605
  const definition = database.toJSON();
2137
1606
  const withBlocks = new Fireproof(database.blocks);
2138
1607
  if (clock) {
2139
- definition.clock = clock.map(c => parseCID(c));
2140
- definition.indexes.forEach(index => {
1608
+ definition.clock = clock.map((c) => parseCID(c));
1609
+ definition.indexes.forEach((index) => {
2141
1610
  index.clock.byId = null;
2142
1611
  index.clock.byKey = null;
2143
1612
  index.clock.db = null;
2144
1613
  });
2145
1614
  }
2146
- const snappedDb = this.fromJSON(definition, withBlocks)
2147
- ;([...database.indexes.values()]).forEach(index => {
1615
+ const snappedDb = this.fromJSON(definition, withBlocks);
1616
+ [...database.indexes.values()].forEach((index) => {
2148
1617
  snappedDb.indexes.get(index.mapFnString).mapFn = index.mapFn;
2149
1618
  });
2150
- return snappedDb
1619
+ return snappedDb;
2151
1620
  }
2152
-
2153
- static async zoom (database, clock) {
2154
- ([...database.indexes.values()]).forEach(index => {
1621
+ static async zoom(database, clock) {
1622
+ [...database.indexes.values()].forEach((index) => {
2155
1623
  index.indexById = { root: null, cid: null };
2156
1624
  index.indexByKey = { root: null, cid: null };
2157
1625
  index.dbHead = null;
2158
1626
  });
2159
- database.clock = clock.map(c => parseCID(c));
2160
- await database.notifyReset(); // hmm... indexes should listen to this? might be more complex than worth it. so far this is the only caller
2161
- return database
1627
+ database.clock = clock.map((c) => parseCID(c));
1628
+ await database.notifyReset();
1629
+ return database;
2162
1630
  }
2163
1631
  }
2164
1632
 
@@ -2166,3 +1634,4 @@ exports.Fireproof = Fireproof;
2166
1634
  exports.Hydrator = Hydrator;
2167
1635
  exports.Index = DbIndex;
2168
1636
  exports.Listener = Listener;
1637
+ //# sourceMappingURL=index.js.map