@fireproof/core 0.3.12 → 0.3.13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/bundle.js +1933 -1917
  2. package/package.json +1 -1
package/dist/bundle.js CHANGED
@@ -1,2152 +1,2168 @@
1
- (function (exports, crypto, Block, sha2, dagcbor, utils, map, cache, link, multiformats, car, cid, CBW, raw, idb, cargoQueue, codec, cidSet, buffer, charwise, dbIndex) {
2
- 'use strict';
3
-
4
- function _interopNamespaceDefault(e) {
5
- var n = Object.create(null);
6
- if (e) {
7
- Object.keys(e).forEach(function (k) {
8
- if (k !== 'default') {
9
- var d = Object.getOwnPropertyDescriptor(e, k);
10
- Object.defineProperty(n, k, d.get ? d : {
11
- enumerable: true,
12
- get: function () { return e[k]; }
13
- });
14
- }
15
- });
16
- }
17
- n.default = e;
18
- return Object.freeze(n);
19
- }
20
-
21
- var Block__namespace = /*#__PURE__*/_interopNamespaceDefault(Block);
22
- var dagcbor__namespace = /*#__PURE__*/_interopNamespaceDefault(dagcbor);
23
- var CBW__namespace = /*#__PURE__*/_interopNamespaceDefault(CBW);
24
- var raw__namespace = /*#__PURE__*/_interopNamespaceDefault(raw);
25
- var codec__namespace = /*#__PURE__*/_interopNamespaceDefault(codec);
26
-
27
- // @ts-nocheck
28
-
29
- /**
30
- * @template T
31
- * @typedef {{ parents: EventLink<T>[], data: T }} EventView
32
- */
33
-
34
- /**
35
- * @template T
36
- * @typedef {import('multiformats').BlockView<EventView<T>>} EventBlockView
37
- */
38
-
39
- /**
40
- * @template T
41
- * @typedef {import('multiformats').Link<EventView<T>>} EventLink
42
- */
43
-
44
- /**
45
- * Advance the clock by adding an event.
46
- *
47
- * @template T
48
- * @param {import('./blockstore').BlockFetcher} blocks Block storage.
49
- * @param {EventLink<T>[]} head The head of the clock.
50
- * @param {EventLink<T>} event The event to add.
51
- * @returns {Promise<EventLink<T>[]>} The new head of the clock.
52
- */
53
- async function advance (blocks, head, event) {
54
- /** @type {EventFetcher<T>} */
55
- const events = new EventFetcher(blocks);
56
- const headmap = new Map(head.map((cid) => [cid.toString(), cid]));
57
-
58
- // Check if the headmap already includes the event, return head if it does
59
- if (headmap.has(event.toString())) return { head, cids: events.cids }
60
-
61
- // Does event contain the clock?
62
- let changed = false;
63
- for (const cid of head) {
64
- if (await contains(events, event, cid)) {
65
- headmap.delete(cid.toString());
66
- headmap.set(event.toString(), event);
67
- changed = true;
68
- }
69
- }
70
-
71
- // If the headmap has been changed, return the new headmap values
72
- if (changed) {
73
- return { head: [...headmap.values()], cids: events.cids }
74
- }
75
-
76
- // Does clock contain the event?
77
- for (const p of head) {
78
- if (await contains(events, p, event)) {
79
- return { head, cids: events.cids }
1
+ 'use strict';
2
+
3
+ var crypto = require('crypto');
4
+ var Block = require('multiformats/block');
5
+ var sha2 = require('multiformats/hashes/sha2');
6
+ var dagcbor = require('@ipld/dag-cbor');
7
+ var utils = require('prolly-trees/utils');
8
+ var map = require('prolly-trees/map');
9
+ var cache = require('prolly-trees/cache');
10
+ var link = require('multiformats/link');
11
+ var multiformats = require('multiformats');
12
+ var car = require('@ipld/car');
13
+ var cid = require('multiformats/cid');
14
+ var CBW = require('@ipld/car/buffer-writer');
15
+ var raw = require('multiformats/codecs/raw');
16
+ var idb = require('idb');
17
+ var cargoQueue = require('async/cargoQueue.js');
18
+ var codec = require('encrypted-block');
19
+ var cidSet = require('prolly-trees/cid-set');
20
+ var buffer = require('buffer');
21
+ var charwise = require('charwise');
22
+ var dbIndex = require('prolly-trees/db-index');
23
+
24
+ function _interopNamespaceDefault(e) {
25
+ var n = Object.create(null);
26
+ if (e) {
27
+ Object.keys(e).forEach(function (k) {
28
+ if (k !== 'default') {
29
+ var d = Object.getOwnPropertyDescriptor(e, k);
30
+ Object.defineProperty(n, k, d.get ? d : {
31
+ enumerable: true,
32
+ get: function () { return e[k]; }
33
+ });
80
34
  }
81
- }
82
-
83
- // Return the head concatenated with the new event if it passes both checks
84
- return { head: head.concat(event), cids: events.cids }
35
+ });
85
36
  }
86
-
87
- /**
88
- * @template T
89
- * @implements {EventBlockView<T>}
90
- */
91
- class EventBlock extends Block.Block {
92
- /**
93
- * @param {object} config
94
- * @param {EventLink<T>} config.cid
95
- * @param {Event} config.value
96
- * @param {Uint8Array} config.bytes
97
- */
98
- constructor ({ cid, value, bytes }) {
99
- // @ts-expect-error
100
- super({ cid, value, bytes });
101
- }
102
-
103
- /**
104
- * @template T
105
- * @param {T} data
106
- * @param {EventLink<T>[]} [parents]
107
- */
108
- static create (data, parents) {
109
- return encodeEventBlock({ data, parents: parents ?? [] })
37
+ n.default = e;
38
+ return Object.freeze(n);
39
+ }
40
+
41
+ var Block__namespace = /*#__PURE__*/_interopNamespaceDefault(Block);
42
+ var dagcbor__namespace = /*#__PURE__*/_interopNamespaceDefault(dagcbor);
43
+ var CBW__namespace = /*#__PURE__*/_interopNamespaceDefault(CBW);
44
+ var raw__namespace = /*#__PURE__*/_interopNamespaceDefault(raw);
45
+ var codec__namespace = /*#__PURE__*/_interopNamespaceDefault(codec);
46
+
47
+ // @ts-nocheck
48
+
49
+ /**
50
+ * @template T
51
+ * @typedef {{ parents: EventLink<T>[], data: T }} EventView
52
+ */
53
+
54
+ /**
55
+ * @template T
56
+ * @typedef {import('multiformats').BlockView<EventView<T>>} EventBlockView
57
+ */
58
+
59
+ /**
60
+ * @template T
61
+ * @typedef {import('multiformats').Link<EventView<T>>} EventLink
62
+ */
63
+
64
+ /**
65
+ * Advance the clock by adding an event.
66
+ *
67
+ * @template T
68
+ * @param {import('./blockstore').BlockFetcher} blocks Block storage.
69
+ * @param {EventLink<T>[]} head The head of the clock.
70
+ * @param {EventLink<T>} event The event to add.
71
+ * @returns {Promise<EventLink<T>[]>} The new head of the clock.
72
+ */
73
+ async function advance (blocks, head, event) {
74
+ /** @type {EventFetcher<T>} */
75
+ const events = new EventFetcher(blocks);
76
+ const headmap = new Map(head.map((cid) => [cid.toString(), cid]));
77
+
78
+ // Check if the headmap already includes the event, return head if it does
79
+ if (headmap.has(event.toString())) return { head, cids: events.cids }
80
+
81
+ // Does event contain the clock?
82
+ let changed = false;
83
+ for (const cid of head) {
84
+ if (await contains(events, event, cid)) {
85
+ headmap.delete(cid.toString());
86
+ headmap.set(event.toString(), event);
87
+ changed = true;
110
88
  }
111
89
  }
112
90
 
113
- /** @template T */
114
- class EventFetcher {
115
- /** @param {import('./blockstore').BlockFetcher} blocks */
116
- constructor (blocks) {
117
- /** @private */
118
- this._blocks = blocks;
119
- this._cids = new utils.CIDCounter();
120
- this._cache = new Map();
121
- }
122
-
123
- /**
124
- * @param {EventLink<T>} link
125
- * @returns {Promise<EventBlockView<T>>}
126
- */
127
- async get (link) {
128
- const slink = link.toString();
129
- // console.log('get', link.toString())
130
- if (this._cache.has(slink)) return this._cache.get(slink)
131
- const block = await this._blocks.get(link);
132
- this._cids.add({ address: link });
133
- if (!block) throw new Error(`missing block: ${link}`)
134
- const got = decodeEventBlock(block.bytes);
135
- this._cache.set(slink, got);
136
- return got
137
- }
91
+ // If the headmap has been changed, return the new headmap values
92
+ if (changed) {
93
+ return { head: [...headmap.values()], cids: events.cids }
94
+ }
138
95
 
139
- async all () {
140
- await Promise.all([...this._cids]);
141
- return this._cids
96
+ // Does clock contain the event?
97
+ for (const p of head) {
98
+ if (await contains(events, p, event)) {
99
+ return { head, cids: events.cids }
142
100
  }
143
101
  }
144
102
 
145
- /**
146
- * @template T
147
- * @param {EventView<T>} value
148
- * @returns {Promise<EventBlockView<T>>}
149
- */
150
- async function encodeEventBlock (value) {
151
- // TODO: sort parents
152
- const { cid, bytes } = await Block.encode({ value, codec: dagcbor__namespace, hasher: sha2.sha256 });
153
- // @ts-expect-error
154
- return new Block.Block({ cid, value, bytes })
155
- }
103
+ // Return the head concatenated with the new event if it passes both checks
104
+ return { head: head.concat(event), cids: events.cids }
105
+ }
156
106
 
107
+ /**
108
+ * @template T
109
+ * @implements {EventBlockView<T>}
110
+ */
111
+ class EventBlock extends Block.Block {
157
112
  /**
158
- * @template T
159
- * @param {Uint8Array} bytes
160
- * @returns {Promise<EventBlockView<T>>}
113
+ * @param {object} config
114
+ * @param {EventLink<T>} config.cid
115
+ * @param {Event} config.value
116
+ * @param {Uint8Array} config.bytes
161
117
  */
162
- async function decodeEventBlock (bytes) {
163
- const { cid, value } = await Block.decode({ bytes, codec: dagcbor__namespace, hasher: sha2.sha256 });
118
+ constructor ({ cid, value, bytes }) {
164
119
  // @ts-expect-error
165
- return new Block.Block({ cid, value, bytes })
120
+ super({ cid, value, bytes });
166
121
  }
167
122
 
168
123
  /**
169
- * Returns true if event "a" contains event "b". Breadth first search.
170
124
  * @template T
171
- * @param {EventFetcher} events
172
- * @param {EventLink<T>} a
173
- * @param {EventLink<T>} b
125
+ * @param {T} data
126
+ * @param {EventLink<T>[]} [parents]
174
127
  */
175
- async function contains (events, a, b) {
176
- if (a.toString() === b.toString()) return true
177
- const [{ value: aevent }, { value: bevent }] = await Promise.all([events.get(a), events.get(b)]);
178
- const links = [...aevent.parents];
179
- while (links.length) {
180
- const link = links.shift();
181
- if (!link) break
182
- if (link.toString() === b.toString()) return true
183
- // if any of b's parents are this link, then b cannot exist in any of the
184
- // tree below, since that would create a cycle.
185
- if (bevent.parents.some((p) => link.toString() === p.toString())) continue
186
- const { value: event } = await events.get(link);
187
- links.push(...event.parents);
188
- }
189
- return false
128
+ static create (data, parents) {
129
+ return encodeEventBlock({ data, parents: parents ?? [] })
130
+ }
131
+ }
132
+
133
+ /** @template T */
134
+ class EventFetcher {
135
+ /** @param {import('./blockstore').BlockFetcher} blocks */
136
+ constructor (blocks) {
137
+ /** @private */
138
+ this._blocks = blocks;
139
+ this._cids = new utils.CIDCounter();
140
+ this._cache = new Map();
190
141
  }
191
142
 
192
143
  /**
193
- * @template T
194
- * @param {import('./blockstore').BlockFetcher} blocks Block storage.
195
- * @param {EventLink<T>[]} head
196
- * @param {object} [options]
197
- * @param {(b: EventBlockView<T>) => string} [options.renderNodeLabel]
144
+ * @param {EventLink<T>} link
145
+ * @returns {Promise<EventBlockView<T>>}
198
146
  */
199
- async function * vis$1 (blocks, head, options = {}) {
200
- const renderNodeLabel = options.renderNodeLabel ?? ((b) => b.value.data.value);
201
- const events = new EventFetcher(blocks);
202
- yield 'digraph clock {';
203
- yield ' node [shape=point fontname="Courier"]; head;';
204
- const hevents = await Promise.all(head.map((link) => events.get(link)));
205
- const links = [];
206
- const nodes = new Set();
207
- for (const e of hevents) {
208
- nodes.add(e.cid.toString());
209
- yield ` node [shape=oval fontname="Courier"]; ${e.cid} [label="${renderNodeLabel(e)}"];`;
210
- yield ` head -> ${e.cid};`;
211
- for (const p of e.value.parents) {
212
- yield ` ${e.cid} -> ${p};`;
213
- }
214
- links.push(...e.value.parents);
215
- }
216
- while (links.length) {
217
- const link = links.shift();
218
- if (!link) break
219
- if (nodes.has(link.toString())) continue
220
- nodes.add(link.toString());
221
- const block = await events.get(link);
222
- yield ` node [shape=oval]; ${link} [label="${renderNodeLabel(block)}" fontname="Courier"];`;
223
- for (const p of block.value.parents) {
224
- yield ` ${link} -> ${p};`;
225
- }
226
- links.push(...block.value.parents);
227
- }
228
- yield '}';
147
+ async get (link) {
148
+ const slink = link.toString();
149
+ // console.log('get', link.toString())
150
+ if (this._cache.has(slink)) return this._cache.get(slink)
151
+ const block = await this._blocks.get(link);
152
+ this._cids.add({ address: link });
153
+ if (!block) throw new Error(`missing block: ${link}`)
154
+ const got = decodeEventBlock(block.bytes);
155
+ this._cache.set(slink, got);
156
+ return got
229
157
  }
230
158
 
231
- async function findEventsToSync (blocks, head) {
232
- // const callTag = Math.random().toString(36).substring(7)
233
- const events = new EventFetcher(blocks);
234
- // console.time(callTag + '.findCommonAncestorWithSortedEvents')
235
- const { ancestor, sorted } = await findCommonAncestorWithSortedEvents(events, head);
236
- // console.timeEnd(callTag + '.findCommonAncestorWithSortedEvents')
237
- // console.log('sorted', sorted.length)
238
- // console.time(callTag + '.contains')
239
- const toSync = await asyncFilter(sorted, async (uks) => !(await contains(events, ancestor, uks.cid)));
240
- // console.timeEnd(callTag + '.contains')
241
-
242
- return { cids: events.cids, events: toSync }
159
+ async all () {
160
+ await Promise.all([...this._cids]);
161
+ return this._cids
243
162
  }
244
-
245
- const asyncFilter = async (arr, predicate) =>
246
- Promise.all(arr.map(predicate)).then((results) => arr.filter((_v, index) => results[index]));
247
-
248
- async function findCommonAncestorWithSortedEvents (events, children) {
249
- // const callTag = Math.random().toString(36).substring(7)
250
- // console.time(callTag + '.findCommonAncestor')
251
- const ancestor = await findCommonAncestor(events, children);
252
- // console.timeEnd(callTag + '.findCommonAncestor')
253
- if (!ancestor) {
254
- throw new Error('failed to find common ancestor event')
255
- }
256
- // console.time(callTag + '.findSortedEvents')
257
- const sorted = await findSortedEvents(events, children, ancestor);
258
- // console.timeEnd(callTag + '.findSortedEvents')
259
- return { ancestor, sorted }
163
+ }
164
+
165
+ /**
166
+ * @template T
167
+ * @param {EventView<T>} value
168
+ * @returns {Promise<EventBlockView<T>>}
169
+ */
170
+ async function encodeEventBlock (value) {
171
+ // TODO: sort parents
172
+ const { cid, bytes } = await Block.encode({ value, codec: dagcbor__namespace, hasher: sha2.sha256 });
173
+ // @ts-expect-error
174
+ return new Block.Block({ cid, value, bytes })
175
+ }
176
+
177
+ /**
178
+ * @template T
179
+ * @param {Uint8Array} bytes
180
+ * @returns {Promise<EventBlockView<T>>}
181
+ */
182
+ async function decodeEventBlock (bytes) {
183
+ const { cid, value } = await Block.decode({ bytes, codec: dagcbor__namespace, hasher: sha2.sha256 });
184
+ // @ts-expect-error
185
+ return new Block.Block({ cid, value, bytes })
186
+ }
187
+
188
+ /**
189
+ * Returns true if event "a" contains event "b". Breadth first search.
190
+ * @template T
191
+ * @param {EventFetcher} events
192
+ * @param {EventLink<T>} a
193
+ * @param {EventLink<T>} b
194
+ */
195
+ async function contains (events, a, b) {
196
+ if (a.toString() === b.toString()) return true
197
+ const [{ value: aevent }, { value: bevent }] = await Promise.all([events.get(a), events.get(b)]);
198
+ const links = [...aevent.parents];
199
+ while (links.length) {
200
+ const link = links.shift();
201
+ if (!link) break
202
+ if (link.toString() === b.toString()) return true
203
+ // if any of b's parents are this link, then b cannot exist in any of the
204
+ // tree below, since that would create a cycle.
205
+ if (bevent.parents.some((p) => link.toString() === p.toString())) continue
206
+ const { value: event } = await events.get(link);
207
+ links.push(...event.parents);
260
208
  }
261
-
262
- /**
263
- * Find the common ancestor event of the passed children. A common ancestor is
264
- * the first single event in the DAG that _all_ paths from children lead to.
265
- *
266
- * @param {import('./clock').EventFetcher} events
267
- * @param {import('./clock').EventLink<EventData>[]} children
268
- */
269
- async function findCommonAncestor (events, children) {
270
- if (!children.length) return
271
- const candidates = children.map((c) => [c]);
272
- while (true) {
273
- let changed = false;
274
- for (const c of candidates) {
275
- const candidate = await findAncestorCandidate(events, c[c.length - 1]);
276
- if (!candidate) continue
277
- changed = true;
278
- c.push(candidate);
279
- const ancestor = findCommonString(candidates);
280
- if (ancestor) return ancestor
281
- }
282
- if (!changed) return
283
- }
209
+ return false
210
+ }
211
+
212
+ /**
213
+ * @template T
214
+ * @param {import('./blockstore').BlockFetcher} blocks Block storage.
215
+ * @param {EventLink<T>[]} head
216
+ * @param {object} [options]
217
+ * @param {(b: EventBlockView<T>) => string} [options.renderNodeLabel]
218
+ */
219
+ async function * vis$1 (blocks, head, options = {}) {
220
+ const renderNodeLabel = options.renderNodeLabel ?? ((b) => b.value.data.value);
221
+ const events = new EventFetcher(blocks);
222
+ yield 'digraph clock {';
223
+ yield ' node [shape=point fontname="Courier"]; head;';
224
+ const hevents = await Promise.all(head.map((link) => events.get(link)));
225
+ const links = [];
226
+ const nodes = new Set();
227
+ for (const e of hevents) {
228
+ nodes.add(e.cid.toString());
229
+ yield ` node [shape=oval fontname="Courier"]; ${e.cid} [label="${renderNodeLabel(e)}"];`;
230
+ yield ` head -> ${e.cid};`;
231
+ for (const p of e.value.parents) {
232
+ yield ` ${e.cid} -> ${p};`;
233
+ }
234
+ links.push(...e.value.parents);
284
235
  }
285
-
286
- /**
287
- * @param {import('./clock').EventFetcher} events
288
- * @param {import('./clock').EventLink<EventData>} root
289
- */
290
- async function findAncestorCandidate (events, root) {
291
- const { value: event } = await events.get(root);
292
- if (!event.parents.length) return root
293
- return event.parents.length === 1 ? event.parents[0] : findCommonAncestor(events, event.parents)
236
+ while (links.length) {
237
+ const link = links.shift();
238
+ if (!link) break
239
+ if (nodes.has(link.toString())) continue
240
+ nodes.add(link.toString());
241
+ const block = await events.get(link);
242
+ yield ` node [shape=oval]; ${link} [label="${renderNodeLabel(block)}" fontname="Courier"];`;
243
+ for (const p of block.value.parents) {
244
+ yield ` ${link} -> ${p};`;
245
+ }
246
+ links.push(...block.value.parents);
294
247
  }
295
-
296
- /**
297
- * @template {{ toString: () => string }} T
298
- * @param {Array<T[]>} arrays
299
- */
300
- function findCommonString (arrays) {
301
- arrays = arrays.map((a) => [...a]);
302
- for (const arr of arrays) {
303
- for (const item of arr) {
304
- let matched = true;
305
- for (const other of arrays) {
306
- if (arr === other) continue
307
- matched = other.some((i) => String(i) === String(item));
308
- if (!matched) break
309
- }
310
- if (matched) return item
311
- }
312
- }
248
+ yield '}';
249
+ }
250
+
251
+ async function findEventsToSync (blocks, head) {
252
+ // const callTag = Math.random().toString(36).substring(7)
253
+ const events = new EventFetcher(blocks);
254
+ // console.time(callTag + '.findCommonAncestorWithSortedEvents')
255
+ const { ancestor, sorted } = await findCommonAncestorWithSortedEvents(events, head);
256
+ // console.timeEnd(callTag + '.findCommonAncestorWithSortedEvents')
257
+ // console.log('sorted', sorted.length)
258
+ // console.time(callTag + '.contains')
259
+ const toSync = await asyncFilter(sorted, async (uks) => !(await contains(events, ancestor, uks.cid)));
260
+ // console.timeEnd(callTag + '.contains')
261
+
262
+ return { cids: events.cids, events: toSync }
263
+ }
264
+
265
+ const asyncFilter = async (arr, predicate) =>
266
+ Promise.all(arr.map(predicate)).then((results) => arr.filter((_v, index) => results[index]));
267
+
268
+ async function findCommonAncestorWithSortedEvents (events, children) {
269
+ // const callTag = Math.random().toString(36).substring(7)
270
+ // console.time(callTag + '.findCommonAncestor')
271
+ const ancestor = await findCommonAncestor(events, children);
272
+ // console.timeEnd(callTag + '.findCommonAncestor')
273
+ if (!ancestor) {
274
+ throw new Error('failed to find common ancestor event')
313
275
  }
314
-
315
- /**
316
- * Find and sort events between the head(s) and the tail.
317
- * @param {import('./clock').EventFetcher} events
318
- * @param {import('./clock').EventLink<EventData>[]} head
319
- * @param {import('./clock').EventLink<EventData>} tail
320
- */
321
- async function findSortedEvents (events, head, tail) {
322
- // const callTag = Math.random().toString(36).substring(7)
323
- // get weighted events - heavier events happened first
324
- /** @type {Map<string, { event: import('./clock').EventBlockView<EventData>, weight: number }>} */
325
- const weights = new Map();
326
- const all = await Promise.all(head.map((h) => findEvents(events, h, tail)));
327
- for (const arr of all) {
328
- for (const { event, depth } of arr) {
329
- // console.log('event value', event.value.data.value)
330
- const info = weights.get(event.cid.toString());
331
- if (info) {
332
- info.weight += depth;
333
- } else {
334
- weights.set(event.cid.toString(), { event, weight: depth });
335
- }
276
+ // console.time(callTag + '.findSortedEvents')
277
+ const sorted = await findSortedEvents(events, children, ancestor);
278
+ // console.timeEnd(callTag + '.findSortedEvents')
279
+ return { ancestor, sorted }
280
+ }
281
+
282
+ /**
283
+ * Find the common ancestor event of the passed children. A common ancestor is
284
+ * the first single event in the DAG that _all_ paths from children lead to.
285
+ *
286
+ * @param {import('./clock').EventFetcher} events
287
+ * @param {import('./clock').EventLink<EventData>[]} children
288
+ */
289
+ async function findCommonAncestor (events, children) {
290
+ if (!children.length) return
291
+ const candidates = children.map((c) => [c]);
292
+ while (true) {
293
+ let changed = false;
294
+ for (const c of candidates) {
295
+ const candidate = await findAncestorCandidate(events, c[c.length - 1]);
296
+ if (!candidate) continue
297
+ changed = true;
298
+ c.push(candidate);
299
+ const ancestor = findCommonString(candidates);
300
+ if (ancestor) return ancestor
301
+ }
302
+ if (!changed) return
303
+ }
304
+ }
305
+
306
+ /**
307
+ * @param {import('./clock').EventFetcher} events
308
+ * @param {import('./clock').EventLink<EventData>} root
309
+ */
310
+ async function findAncestorCandidate (events, root) {
311
+ const { value: event } = await events.get(root);
312
+ if (!event.parents.length) return root
313
+ return event.parents.length === 1 ? event.parents[0] : findCommonAncestor(events, event.parents)
314
+ }
315
+
316
+ /**
317
+ * @template {{ toString: () => string }} T
318
+ * @param {Array<T[]>} arrays
319
+ */
320
+ function findCommonString (arrays) {
321
+ arrays = arrays.map((a) => [...a]);
322
+ for (const arr of arrays) {
323
+ for (const item of arr) {
324
+ let matched = true;
325
+ for (const other of arrays) {
326
+ if (arr === other) continue
327
+ matched = other.some((i) => String(i) === String(item));
328
+ if (!matched) break
336
329
  }
330
+ if (matched) return item
337
331
  }
338
-
339
- // group events into buckets by weight
340
- /** @type {Map<number, import('./clock').EventBlockView<EventData>[]>} */
341
- const buckets = new Map();
342
- for (const { event, weight } of weights.values()) {
343
- const bucket = buckets.get(weight);
344
- if (bucket) {
345
- bucket.push(event);
332
+ }
333
+ }
334
+
335
+ /**
336
+ * Find and sort events between the head(s) and the tail.
337
+ * @param {import('./clock').EventFetcher} events
338
+ * @param {import('./clock').EventLink<EventData>[]} head
339
+ * @param {import('./clock').EventLink<EventData>} tail
340
+ */
341
+ async function findSortedEvents (events, head, tail) {
342
+ // const callTag = Math.random().toString(36).substring(7)
343
+ // get weighted events - heavier events happened first
344
+ /** @type {Map<string, { event: import('./clock').EventBlockView<EventData>, weight: number }>} */
345
+ const weights = new Map();
346
+ const all = await Promise.all(head.map((h) => findEvents(events, h, tail)));
347
+ for (const arr of all) {
348
+ for (const { event, depth } of arr) {
349
+ // console.log('event value', event.value.data.value)
350
+ const info = weights.get(event.cid.toString());
351
+ if (info) {
352
+ info.weight += depth;
346
353
  } else {
347
- buckets.set(weight, [event]);
354
+ weights.set(event.cid.toString(), { event, weight: depth });
348
355
  }
349
356
  }
350
-
351
- // sort by weight, and by CID within weight
352
- const sorted = Array.from(buckets)
353
- .sort((a, b) => b[0] - a[0])
354
- .flatMap(([, es]) => es.sort((a, b) => (String(a.cid) < String(b.cid) ? -1 : 1)));
355
- // console.log('sorted', sorted.map(s => s.value.data.value))
356
-
357
- return sorted
358
357
  }
359
358
 
360
- /**
361
- * @param {import('./clock').EventFetcher} events
362
- * @param {import('./clock').EventLink<EventData>} start
363
- * @param {import('./clock').EventLink<EventData>} end
364
- * @returns {Promise<Array<{ event: import('./clock').EventBlockView<EventData>, depth: number }>>}
365
- */
366
- async function findEvents (events, start, end, depth = 0) {
367
- // console.log('findEvents', start)
368
- const event = await events.get(start);
369
- const acc = [{ event, depth }];
370
- const { parents } = event.value;
371
- if (parents.length === 1 && String(parents[0]) === String(end)) return acc
372
- const rest = await Promise.all(parents.map((p) => findEvents(events, p, end, depth + 1)));
373
- return acc.concat(...rest)
374
- }
375
-
376
- // @ts-nocheck
377
-
378
- const createBlock = (bytes, cid) => Block.create({ cid, bytes, hasher: sha2.sha256, codec: codec__namespace });
379
-
380
- const encrypt = async function * ({ get, cids, hasher, key, cache, chunker, root }) {
381
- const set = new Set();
382
- let eroot;
383
- for (const string of cids) {
384
- const cid = multiformats.CID.parse(string);
385
- const unencrypted = await get(cid);
386
- const block = await Block.encode({ ...await codec__namespace.encrypt({ ...unencrypted, key }), codec: codec__namespace, hasher });
387
- // console.log(`encrypting ${string} as ${block.cid}`)
388
- yield block;
389
- set.add(block.cid.toString());
390
- if (unencrypted.cid.equals(root)) eroot = block.cid;
391
- }
392
- if (!eroot) throw new Error('cids does not include root')
393
- const list = [...set].map(s => multiformats.CID.parse(s));
394
- let last;
395
- for await (const node of cidSet.create({ list, get, cache, chunker, hasher, codec: dagcbor__namespace })) {
396
- const block = await node.block;
397
- yield block;
398
- last = block;
359
+ // group events into buckets by weight
360
+ /** @type {Map<number, import('./clock').EventBlockView<EventData>[]>} */
361
+ const buckets = new Map();
362
+ for (const { event, weight } of weights.values()) {
363
+ const bucket = buckets.get(weight);
364
+ if (bucket) {
365
+ bucket.push(event);
366
+ } else {
367
+ buckets.set(weight, [event]);
399
368
  }
400
- const head = [eroot, last.cid];
401
- const block = await Block.encode({ value: head, codec: dagcbor__namespace, hasher });
402
- yield block;
403
- };
369
+ }
404
370
 
405
- const decrypt = async function * ({ root, get, key, cache, chunker, hasher }) {
406
- const o = { ...await get(root), codec: dagcbor__namespace, hasher };
407
- const decodedRoot = await Block.decode(o);
408
- // console.log('decodedRoot', decodedRoot)
409
- const { value: [eroot, tree] } = decodedRoot;
410
- const rootBlock = await get(eroot); // should I decrypt?
411
- const cidset = await cidSet.load({ cid: tree, get, cache, chunker, codec: codec__namespace, hasher });
412
- const { result: nodes } = await cidset.getAllEntries();
413
- const unwrap = async (eblock) => {
414
- const { bytes, cid } = await codec__namespace.decrypt({ ...eblock, key }).catch(e => {
415
- console.log('ekey', e);
416
- throw new Error('bad key: ' + key.toString('hex'))
417
- });
418
- const block = await createBlock(bytes, cid);
419
- return block
420
- };
421
- const promises = [];
422
- for (const { cid } of nodes) {
423
- if (!rootBlock.cid.equals(cid)) promises.push(get(cid).then(unwrap));
424
- }
425
- yield * promises;
426
- yield unwrap(rootBlock);
371
+ // sort by weight, and by CID within weight
372
+ const sorted = Array.from(buckets)
373
+ .sort((a, b) => b[0] - a[0])
374
+ .flatMap(([, es]) => es.sort((a, b) => (String(a.cid) < String(b.cid) ? -1 : 1)));
375
+ // console.log('sorted', sorted.map(s => s.value.data.value))
376
+
377
+ return sorted
378
+ }
379
+
380
+ /**
381
+ * @param {import('./clock').EventFetcher} events
382
+ * @param {import('./clock').EventLink<EventData>} start
383
+ * @param {import('./clock').EventLink<EventData>} end
384
+ * @returns {Promise<Array<{ event: import('./clock').EventBlockView<EventData>, depth: number }>>}
385
+ */
386
+ async function findEvents (events, start, end, depth = 0) {
387
+ // console.log('findEvents', start)
388
+ const event = await events.get(start);
389
+ const acc = [{ event, depth }];
390
+ const { parents } = event.value;
391
+ if (parents.length === 1 && String(parents[0]) === String(end)) return acc
392
+ const rest = await Promise.all(parents.map((p) => findEvents(events, p, end, depth + 1)));
393
+ return acc.concat(...rest)
394
+ }
395
+
396
+ // @ts-nocheck
397
+
398
+ const createBlock = (bytes, cid) => Block.create({ cid, bytes, hasher: sha2.sha256, codec: codec__namespace });
399
+
400
+ const encrypt = async function * ({ get, cids, hasher, key, cache, chunker, root }) {
401
+ const set = new Set();
402
+ let eroot;
403
+ for (const string of cids) {
404
+ const cid = multiformats.CID.parse(string);
405
+ const unencrypted = await get(cid);
406
+ const block = await Block.encode({ ...await codec__namespace.encrypt({ ...unencrypted, key }), codec: codec__namespace, hasher });
407
+ // console.log(`encrypting ${string} as ${block.cid}`)
408
+ yield block;
409
+ set.add(block.cid.toString());
410
+ if (unencrypted.cid.equals(root)) eroot = block.cid;
411
+ }
412
+ if (!eroot) throw new Error('cids does not include root')
413
+ const list = [...set].map(s => multiformats.CID.parse(s));
414
+ let last;
415
+ for await (const node of cidSet.create({ list, get, cache, chunker, hasher, codec: dagcbor__namespace })) {
416
+ const block = await node.block;
417
+ yield block;
418
+ last = block;
419
+ }
420
+ const head = [eroot, last.cid];
421
+ const block = await Block.encode({ value: head, codec: dagcbor__namespace, hasher });
422
+ yield block;
423
+ };
424
+
425
+ const decrypt = async function * ({ root, get, key, cache, chunker, hasher }) {
426
+ const o = { ...await get(root), codec: dagcbor__namespace, hasher };
427
+ const decodedRoot = await Block.decode(o);
428
+ // console.log('decodedRoot', decodedRoot)
429
+ const { value: [eroot, tree] } = decodedRoot;
430
+ const rootBlock = await get(eroot); // should I decrypt?
431
+ const cidset = await cidSet.load({ cid: tree, get, cache, chunker, codec: codec__namespace, hasher });
432
+ const { result: nodes } = await cidset.getAllEntries();
433
+ const unwrap = async (eblock) => {
434
+ const { bytes, cid } = await codec__namespace.decrypt({ ...eblock, key }).catch(e => {
435
+ console.log('ekey', e);
436
+ throw new Error('bad key: ' + key.toString('hex'))
437
+ });
438
+ const block = await createBlock(bytes, cid);
439
+ return block
427
440
  };
441
+ const promises = [];
442
+ for (const { cid } of nodes) {
443
+ if (!rootBlock.cid.equals(cid)) promises.push(get(cid).then(unwrap));
444
+ }
445
+ yield * promises;
446
+ yield unwrap(rootBlock);
447
+ };
448
+
449
+ // @ts-nocheck
450
+ // from https://github.com/duzun/sync-sha1/blob/master/rawSha1.js
451
+ // MIT License Copyright (c) 2020 Dumitru Uzun
452
+ // Permission is hereby granted, free of charge, to any person obtaining a copy
453
+ // of this software and associated documentation files (the "Software"), to deal
454
+ // in the Software without restriction, including without limitation the rights
455
+ // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
456
+ // copies of the Software, and to permit persons to whom the Software is
457
+ // furnished to do so, subject to the following conditions:
458
+
459
+ // The above copyright notice and this permission notice shall be included in all
460
+ // copies or substantial portions of the Software.
461
+
462
+ // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
463
+ // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
464
+ // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
465
+ // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
466
+ // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
467
+ // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
468
+ // SOFTWARE.
469
+
470
+ // import {
471
+ // isLittleEndian, switchEndianness32
472
+ // } from 'string-encode'
473
+
474
+ /**
475
+ * SHA1 on binary array
476
+ *
477
+ * @param {Uint8Array} b Data to hash
478
+ *
479
+ * @return {Uint8Array} sha1 hash
480
+ */
481
+ function rawSha1 (b) {
482
+ let i = b.byteLength;
483
+ let bs = 0;
484
+ let A; let B; let C; let D; let G;
485
+ const H = Uint32Array.from([A = 0x67452301, B = 0xEFCDAB89, ~A, ~B, 0xC3D2E1F0]);
486
+ const W = new Uint32Array(80);
487
+ const nrWords = (i / 4 + 2) | 15;
488
+ const words = new Uint32Array(nrWords + 1);
489
+ let j;
490
+
491
+ words[nrWords] = i * 8;
492
+ words[i >> 2] |= 0x80 << (~i << 3);
493
+ for (;i--;) {
494
+ words[i >> 2] |= b[i] << (~i << 3);
495
+ }
428
496
 
429
- // @ts-nocheck
430
- // from https://github.com/duzun/sync-sha1/blob/master/rawSha1.js
431
- // MIT License Copyright (c) 2020 Dumitru Uzun
432
- // Permission is hereby granted, free of charge, to any person obtaining a copy
433
- // of this software and associated documentation files (the "Software"), to deal
434
- // in the Software without restriction, including without limitation the rights
435
- // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
436
- // copies of the Software, and to permit persons to whom the Software is
437
- // furnished to do so, subject to the following conditions:
438
-
439
- // The above copyright notice and this permission notice shall be included in all
440
- // copies or substantial portions of the Software.
441
-
442
- // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
443
- // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
444
- // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
445
- // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
446
- // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
447
- // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
448
- // SOFTWARE.
449
-
450
- // import {
451
- // isLittleEndian, switchEndianness32
452
- // } from 'string-encode'
453
-
454
- /**
455
- * SHA1 on binary array
456
- *
457
- * @param {Uint8Array} b Data to hash
458
- *
459
- * @return {Uint8Array} sha1 hash
460
- */
461
- function rawSha1 (b) {
462
- let i = b.byteLength;
463
- let bs = 0;
464
- let A; let B; let C; let D; let G;
465
- const H = Uint32Array.from([A = 0x67452301, B = 0xEFCDAB89, ~A, ~B, 0xC3D2E1F0]);
466
- const W = new Uint32Array(80);
467
- const nrWords = (i / 4 + 2) | 15;
468
- const words = new Uint32Array(nrWords + 1);
469
- let j;
470
-
471
- words[nrWords] = i * 8;
472
- words[i >> 2] |= 0x80 << (~i << 3);
473
- for (;i--;) {
474
- words[i >> 2] |= b[i] << (~i << 3);
475
- }
476
-
477
- for (A = H.slice(); bs < nrWords; bs += 16, A.set(H)) {
478
- for (i = 0; i < 80;
479
- A[0] = (
480
- G = ((b = A[0]) << 5 | b >>> 27) +
481
- A[4] +
482
- (W[i] = (i < 16) ? words[bs + i] : G << 1 | G >>> 31) +
483
- 0x5A827999,
484
- B = A[1],
485
- C = A[2],
486
- D = A[3],
487
- G + ((j = i / 5 >> 2) // eslint-disable-line no-cond-assign
488
- ? j !== 2
489
- ? (B ^ C ^ D) + (j & 2 ? 0x6FE0483D : 0x14577208)
490
- : (B & C | B & D | C & D) + 0x34994343
491
- : B & C | ~B & D
492
- )
497
+ for (A = H.slice(); bs < nrWords; bs += 16, A.set(H)) {
498
+ for (i = 0; i < 80;
499
+ A[0] = (
500
+ G = ((b = A[0]) << 5 | b >>> 27) +
501
+ A[4] +
502
+ (W[i] = (i < 16) ? words[bs + i] : G << 1 | G >>> 31) +
503
+ 0x5A827999,
504
+ B = A[1],
505
+ C = A[2],
506
+ D = A[3],
507
+ G + ((j = i / 5 >> 2) // eslint-disable-line no-cond-assign
508
+ ? j !== 2
509
+ ? (B ^ C ^ D) + (j & 2 ? 0x6FE0483D : 0x14577208)
510
+ : (B & C | B & D | C & D) + 0x34994343
511
+ : B & C | ~B & D
493
512
  )
494
- , A[1] = b
495
- , A[2] = B << 30 | B >>> 2
496
- , A[3] = C
497
- , A[4] = D
498
- , ++i
499
- ) {
500
- G = W[i - 3] ^ W[i - 8] ^ W[i - 14] ^ W[i - 16];
501
- }
502
-
503
- for (i = 5; i;) H[--i] = H[i] + A[i];
513
+ )
514
+ , A[1] = b
515
+ , A[2] = B << 30 | B >>> 2
516
+ , A[3] = C
517
+ , A[4] = D
518
+ , ++i
519
+ ) {
520
+ G = W[i - 3] ^ W[i - 8] ^ W[i - 14] ^ W[i - 16];
504
521
  }
505
522
 
506
- // if (isLittleEndian()) {
507
- // H = H.map(switchEndianness32)
508
- // }
509
-
510
- return new Uint8Array(H.buffer, H.byteOffset, H.byteLength)
523
+ for (i = 5; i;) H[--i] = H[i] + A[i];
511
524
  }
512
525
 
513
- // @ts-nocheck
514
- const chunker = utils.bf(3);
526
+ // if (isLittleEndian()) {
527
+ // H = H.map(switchEndianness32)
528
+ // }
515
529
 
516
- const NO_ENCRYPT =
517
- typeof process !== 'undefined' ? process.env.NO_ENCRYPT : ({ url: (document.currentScript && document.currentScript.src || new URL('bundle.js', document.baseURI).href) }) && undefined.VITE_NO_ENCRYPT;
530
+ return new Uint8Array(H.buffer, H.byteOffset, H.byteLength)
531
+ }
518
532
 
519
- class Valet {
520
- idb = null
521
- name = null
522
- uploadQueue = null
523
- alreadyEnqueued = new Set()
524
- keyMaterial = null
525
- keyId = 'null'
533
+ // @ts-nocheck
534
+ const chunker = utils.bf(3);
526
535
 
527
- /**
528
- * Function installed by the database to upload car files
529
- * @type {null|function(string, Uint8Array):Promise<void>}
530
- */
531
- uploadFunction = null
532
-
533
- constructor (name = 'default', keyMaterial) {
534
- this.name = name;
535
- this.setKeyMaterial(keyMaterial);
536
- this.uploadQueue = cargoQueue(async (tasks, callback) => {
537
- console.log(
538
- 'queue worker',
539
- tasks.length,
540
- tasks.reduce((acc, t) => acc + t.value.length, 0)
541
- );
542
- if (this.uploadFunction) {
543
- // todo we can coalesce these into a single car file
544
- return await this.withDB(async db => {
545
- for (const task of tasks) {
546
- await this.uploadFunction(task.carCid, task.value);
547
- // update the indexedb to mark this car as no longer pending
548
- const carMeta = await db.get('cidToCar', task.carCid);
549
- delete carMeta.pending;
550
- await db.put('cidToCar', carMeta);
551
- }
552
- })
553
- }
554
- callback();
555
- });
536
+ const NO_ENCRYPT =
537
+ typeof process !== 'undefined' ? process.env.NO_ENCRYPT : ({ url: (typeof document === 'undefined' ? require('u' + 'rl').pathToFileURL(__filename).href : (document.currentScript && document.currentScript.src || new URL('bundle.js', document.baseURI).href)) }) && undefined.VITE_NO_ENCRYPT;
538
+
539
+ class Valet {
540
+ idb = null
541
+ name = null
542
+ uploadQueue = null
543
+ alreadyEnqueued = new Set()
544
+ keyMaterial = null
545
+ keyId = 'null'
556
546
 
557
- this.uploadQueue.drain(async () => {
547
+ /**
548
+ * Function installed by the database to upload car files
549
+ * @type {null|function(string, Uint8Array):Promise<void>}
550
+ */
551
+ uploadFunction = null
552
+
553
+ constructor (name = 'default', keyMaterial) {
554
+ this.name = name;
555
+ this.setKeyMaterial(keyMaterial);
556
+ this.uploadQueue = cargoQueue(async (tasks, callback) => {
557
+ console.log(
558
+ 'queue worker',
559
+ tasks.length,
560
+ tasks.reduce((acc, t) => acc + t.value.length, 0)
561
+ );
562
+ if (this.uploadFunction) {
563
+ // todo we can coalesce these into a single car file
558
564
  return await this.withDB(async db => {
559
- const carKeys = (await db.getAllFromIndex('cidToCar', 'pending')).map(c => c.car);
560
- for (const carKey of carKeys) {
561
- await this.uploadFunction(carKey, await db.get('cars', carKey));
562
- const carMeta = await db.get('cidToCar', carKey);
565
+ for (const task of tasks) {
566
+ await this.uploadFunction(task.carCid, task.value);
567
+ // update the indexedb to mark this car as no longer pending
568
+ const carMeta = await db.get('cidToCar', task.carCid);
563
569
  delete carMeta.pending;
564
570
  await db.put('cidToCar', carMeta);
565
571
  }
566
572
  })
567
- });
568
- }
569
-
570
- getKeyMaterial () {
571
- return this.keyMaterial
572
- }
573
-
574
- setKeyMaterial (km) {
575
- if (km && !NO_ENCRYPT) {
576
- const hex = Uint8Array.from(buffer.Buffer.from(km, 'hex'));
577
- this.keyMaterial = km;
578
- const hash = rawSha1(hex);
579
- this.keyId = buffer.Buffer.from(hash).toString('hex');
580
- } else {
581
- this.keyMaterial = null;
582
- this.keyId = 'null';
583
573
  }
584
- // console.trace('keyId', this.name, this.keyId)
585
- }
574
+ callback();
575
+ });
586
576
 
587
- /**
588
- * Group the blocks into a car and write it to the valet.
589
- * @param {InnerBlockstore} innerBlockstore
590
- * @param {Set<string>} cids
591
- * @returns {Promise<void>}
592
- * @memberof Valet
593
- */
594
- async writeTransaction (innerBlockstore, cids) {
595
- if (innerBlockstore.lastCid) {
596
- if (this.keyMaterial) {
597
- // console.log('encrypting car', innerBlockstore.label)
598
- const newCar = await blocksToEncryptedCarBlock(innerBlockstore.lastCid, innerBlockstore, this.keyMaterial);
599
- await this.parkCar(newCar.cid.toString(), newCar.bytes, cids);
600
- } else {
601
- const newCar = await blocksToCarBlock(innerBlockstore.lastCid, innerBlockstore);
602
- await this.parkCar(newCar.cid.toString(), newCar.bytes, cids);
577
+ this.uploadQueue.drain(async () => {
578
+ return await this.withDB(async db => {
579
+ const carKeys = (await db.getAllFromIndex('cidToCar', 'pending')).map(c => c.car);
580
+ for (const carKey of carKeys) {
581
+ await this.uploadFunction(carKey, await db.get('cars', carKey));
582
+ const carMeta = await db.get('cidToCar', carKey);
583
+ delete carMeta.pending;
584
+ await db.put('cidToCar', carMeta);
603
585
  }
604
- }
605
- }
586
+ })
587
+ });
588
+ }
606
589
 
607
- withDB = async dbWorkFun => {
608
- if (!this.idb) {
609
- this.idb = await idb.openDB(`fp.${this.keyId}.${this.name}.valet`, 2, {
610
- upgrade (db, oldVersion, newVersion, transaction) {
611
- if (oldVersion < 1) {
612
- db.createObjectStore('cars'); // todo use database name
613
- const cidToCar = db.createObjectStore('cidToCar', { keyPath: 'car' });
614
- cidToCar.createIndex('cids', 'cids', { multiEntry: true });
615
- }
616
- if (oldVersion < 2) {
617
- const cidToCar = transaction.objectStore('cidToCar');
618
- cidToCar.createIndex('pending', 'pending');
619
- }
620
- }
621
- });
622
- }
623
- return await dbWorkFun(this.idb)
624
- }
590
+ getKeyMaterial () {
591
+ return this.keyMaterial
592
+ }
625
593
 
626
- /**
627
- *
628
- * @param {string} carCid
629
- * @param {*} value
630
- */
631
- async parkCar (carCid, value, cids) {
632
- await this.withDB(async db => {
633
- const tx = db.transaction(['cars', 'cidToCar'], 'readwrite');
634
- await tx.objectStore('cars').put(value, carCid);
635
- await tx.objectStore('cidToCar').put({ pending: 'y', car: carCid, cids: Array.from(cids) });
636
- return await tx.done
637
- });
594
+ setKeyMaterial (km) {
595
+ if (km && !NO_ENCRYPT) {
596
+ const hex = Uint8Array.from(buffer.Buffer.from(km, 'hex'));
597
+ this.keyMaterial = km;
598
+ const hash = rawSha1(hex);
599
+ this.keyId = buffer.Buffer.from(hash).toString('hex');
600
+ } else {
601
+ this.keyMaterial = null;
602
+ this.keyId = 'null';
603
+ }
604
+ // console.trace('keyId', this.name, this.keyId)
605
+ }
638
606
 
639
- // upload to web3.storage if we have credentials
640
- if (this.uploadFunction) {
641
- if (this.alreadyEnqueued.has(carCid)) {
642
- // console.log('already enqueued', carCid)
643
- return
644
- }
645
- // don't await this, it will be done in the queue
646
- // console.log('add to queue', carCid, value.length)
647
- this.uploadQueue.push({ carCid, value });
648
- this.alreadyEnqueued.add(carCid);
607
+ /**
608
+ * Group the blocks into a car and write it to the valet.
609
+ * @param {InnerBlockstore} innerBlockstore
610
+ * @param {Set<string>} cids
611
+ * @returns {Promise<void>}
612
+ * @memberof Valet
613
+ */
614
+ async writeTransaction (innerBlockstore, cids) {
615
+ if (innerBlockstore.lastCid) {
616
+ if (this.keyMaterial) {
617
+ // console.log('encrypting car', innerBlockstore.label)
618
+ const newCar = await blocksToEncryptedCarBlock(innerBlockstore.lastCid, innerBlockstore, this.keyMaterial);
619
+ await this.parkCar(newCar.cid.toString(), newCar.bytes, cids);
620
+ } else {
621
+ const newCar = await blocksToCarBlock(innerBlockstore.lastCid, innerBlockstore);
622
+ await this.parkCar(newCar.cid.toString(), newCar.bytes, cids);
649
623
  }
650
624
  }
625
+ }
651
626
 
652
- remoteBlockFunction = null
653
-
654
- async getBlock (dataCID) {
655
- return await this.withDB(async db => {
656
- const tx = db.transaction(['cars', 'cidToCar'], 'readonly');
657
- const indexResp = await tx.objectStore('cidToCar').index('cids').get(dataCID);
658
- const carCid = indexResp?.car;
659
- if (!carCid) {
660
- throw new Error('Missing block: ' + dataCID)
661
- }
662
- const carBytes = await tx.objectStore('cars').get(carCid);
663
- const reader = await car.CarReader.fromBytes(carBytes);
664
- if (this.keyMaterial) {
665
- const roots = await reader.getRoots();
666
- const readerGetWithCodec = async cid => {
667
- const got = await reader.get(cid);
668
- // console.log('got.', cid.toString())
669
- let useCodec = codec__namespace;
670
- if (cid.toString().indexOf('bafy') === 0) {
671
- useCodec = dagcbor__namespace;
672
- }
673
- const decoded = await Block__namespace.decode({
674
- ...got,
675
- codec: useCodec,
676
- hasher: sha2.sha256
677
- });
678
- // console.log('decoded', decoded.value)
679
- return decoded
680
- };
681
- const { blocks } = await blocksFromEncryptedCarBlock(roots[0], readerGetWithCodec, this.keyMaterial);
682
- const block = blocks.find(b => b.cid.toString() === dataCID);
683
- if (block) {
684
- return block.bytes
627
+ withDB = async dbWorkFun => {
628
+ if (!this.idb) {
629
+ this.idb = await idb.openDB(`fp.${this.keyId}.${this.name}.valet`, 2, {
630
+ upgrade (db, oldVersion, newVersion, transaction) {
631
+ if (oldVersion < 1) {
632
+ db.createObjectStore('cars'); // todo use database name
633
+ const cidToCar = db.createObjectStore('cidToCar', { keyPath: 'car' });
634
+ cidToCar.createIndex('cids', 'cids', { multiEntry: true });
685
635
  }
686
- } else {
687
- const gotBlock = await reader.get(cid.CID.parse(dataCID));
688
- if (gotBlock) {
689
- return gotBlock.bytes
636
+ if (oldVersion < 2) {
637
+ const cidToCar = transaction.objectStore('cidToCar');
638
+ cidToCar.createIndex('pending', 'pending');
690
639
  }
691
640
  }
692
- })
641
+ });
693
642
  }
643
+ return await dbWorkFun(this.idb)
694
644
  }
695
645
 
696
- const blocksToCarBlock = async (lastCid, blocks) => {
697
- let size = 0;
698
- const headerSize = CBW__namespace.headerLength({ roots: [lastCid] });
699
- size += headerSize;
700
- if (!Array.isArray(blocks)) {
701
- blocks = Array.from(blocks.entries());
702
- }
703
- for (const { cid, bytes } of blocks) {
704
- size += CBW__namespace.blockLength({ cid, bytes });
646
+ /**
647
+ *
648
+ * @param {string} carCid
649
+ * @param {*} value
650
+ */
651
+ async parkCar (carCid, value, cids) {
652
+ await this.withDB(async db => {
653
+ const tx = db.transaction(['cars', 'cidToCar'], 'readwrite');
654
+ await tx.objectStore('cars').put(value, carCid);
655
+ await tx.objectStore('cidToCar').put({ pending: 'y', car: carCid, cids: Array.from(cids) });
656
+ return await tx.done
657
+ });
658
+
659
+ // upload to web3.storage if we have credentials
660
+ if (this.uploadFunction) {
661
+ if (this.alreadyEnqueued.has(carCid)) {
662
+ // console.log('already enqueued', carCid)
663
+ return
664
+ }
665
+ // don't await this, it will be done in the queue
666
+ // console.log('add to queue', carCid, value.length)
667
+ this.uploadQueue.push({ carCid, value });
668
+ this.alreadyEnqueued.add(carCid);
705
669
  }
706
- const buffer = new Uint8Array(size);
707
- const writer = await CBW__namespace.createWriter(buffer, { headerSize });
670
+ }
708
671
 
709
- writer.addRoot(lastCid);
672
+ remoteBlockFunction = null
710
673
 
711
- for (const { cid, bytes } of blocks) {
712
- writer.write({ cid, bytes });
713
- }
714
- await writer.close();
715
- return await Block__namespace.encode({ value: writer.bytes, hasher: sha2.sha256, codec: raw__namespace })
716
- };
674
+ async getBlock (dataCID) {
675
+ return await this.withDB(async db => {
676
+ const tx = db.transaction(['cars', 'cidToCar'], 'readonly');
677
+ const indexResp = await tx.objectStore('cidToCar').index('cids').get(dataCID);
678
+ const carCid = indexResp?.car;
679
+ if (!carCid) {
680
+ throw new Error('Missing block: ' + dataCID)
681
+ }
682
+ const carBytes = await tx.objectStore('cars').get(carCid);
683
+ const reader = await car.CarReader.fromBytes(carBytes);
684
+ if (this.keyMaterial) {
685
+ const roots = await reader.getRoots();
686
+ const readerGetWithCodec = async cid => {
687
+ const got = await reader.get(cid);
688
+ // console.log('got.', cid.toString())
689
+ let useCodec = codec__namespace;
690
+ if (cid.toString().indexOf('bafy') === 0) {
691
+ useCodec = dagcbor__namespace;
692
+ }
693
+ const decoded = await Block__namespace.decode({
694
+ ...got,
695
+ codec: useCodec,
696
+ hasher: sha2.sha256
697
+ });
698
+ // console.log('decoded', decoded.value)
699
+ return decoded
700
+ };
701
+ const { blocks } = await blocksFromEncryptedCarBlock(roots[0], readerGetWithCodec, this.keyMaterial);
702
+ const block = blocks.find(b => b.cid.toString() === dataCID);
703
+ if (block) {
704
+ return block.bytes
705
+ }
706
+ } else {
707
+ const gotBlock = await reader.get(cid.CID.parse(dataCID));
708
+ if (gotBlock) {
709
+ return gotBlock.bytes
710
+ }
711
+ }
712
+ })
713
+ }
714
+ }
715
+
716
+ const blocksToCarBlock = async (lastCid, blocks) => {
717
+ let size = 0;
718
+ const headerSize = CBW__namespace.headerLength({ roots: [lastCid] });
719
+ size += headerSize;
720
+ if (!Array.isArray(blocks)) {
721
+ blocks = Array.from(blocks.entries());
722
+ }
723
+ for (const { cid, bytes } of blocks) {
724
+ size += CBW__namespace.blockLength({ cid, bytes });
725
+ }
726
+ const buffer = new Uint8Array(size);
727
+ const writer = await CBW__namespace.createWriter(buffer, { headerSize });
717
728
 
718
- const blocksToEncryptedCarBlock = async (innerBlockStoreClockRootCid, blocks, keyMaterial) => {
719
- const encryptionKey = buffer.Buffer.from(keyMaterial, 'hex');
720
- const encryptedBlocks = [];
721
- const theCids = [];
722
- for (const { cid } of blocks.entries()) {
723
- theCids.push(cid.toString());
724
- }
729
+ writer.addRoot(lastCid);
725
730
 
726
- let last;
727
- for await (const block of encrypt({
728
- cids: theCids,
729
- get: async cid => blocks.get(cid), // maybe we can just use blocks.get
730
- key: encryptionKey,
731
- hasher: sha2.sha256,
732
- chunker,
733
- cache: cache.nocache,
734
- // codec: dagcbor, // should be crypto?
735
- root: innerBlockStoreClockRootCid
736
- })) {
737
- encryptedBlocks.push(block);
738
- last = block;
739
- }
740
- // console.log('last', last.cid.toString(), 'for clock', innerBlockStoreClockRootCid.toString())
741
- const encryptedCar = await blocksToCarBlock(last.cid, encryptedBlocks);
742
- return encryptedCar
743
- };
744
- // { root, get, key, cache, chunker, hasher }
731
+ for (const { cid, bytes } of blocks) {
732
+ writer.write({ cid, bytes });
733
+ }
734
+ await writer.close();
735
+ return await Block__namespace.encode({ value: writer.bytes, hasher: sha2.sha256, codec: raw__namespace })
736
+ };
737
+
738
+ const blocksToEncryptedCarBlock = async (innerBlockStoreClockRootCid, blocks, keyMaterial) => {
739
+ const encryptionKey = buffer.Buffer.from(keyMaterial, 'hex');
740
+ const encryptedBlocks = [];
741
+ const theCids = [];
742
+ for (const { cid } of blocks.entries()) {
743
+ theCids.push(cid.toString());
744
+ }
745
745
 
746
- const memoizeDecryptedCarBlocks = new Map();
747
- const blocksFromEncryptedCarBlock = async (cid, get, keyMaterial) => {
748
- if (memoizeDecryptedCarBlocks.has(cid.toString())) {
749
- return memoizeDecryptedCarBlocks.get(cid.toString())
750
- } else {
751
- const blocksPromise = (async () => {
752
- const decryptionKey = buffer.Buffer.from(keyMaterial, 'hex');
753
- // console.log('decrypting', keyMaterial, cid.toString())
754
- const cids = new Set();
755
- const decryptedBlocks = [];
756
- for await (const block of decrypt({
757
- root: cid,
758
- get,
759
- key: decryptionKey,
760
- chunker,
761
- hasher: sha2.sha256,
762
- cache: cache.nocache
763
- // codec: dagcbor
764
- })) {
765
- decryptedBlocks.push(block);
766
- cids.add(block.cid.toString());
767
- }
768
- return { blocks: decryptedBlocks, cids }
769
- })();
770
- memoizeDecryptedCarBlocks.set(cid.toString(), blocksPromise);
771
- return blocksPromise
772
- }
773
- };
746
+ let last;
747
+ for await (const block of encrypt({
748
+ cids: theCids,
749
+ get: async cid => blocks.get(cid), // maybe we can just use blocks.get
750
+ key: encryptionKey,
751
+ hasher: sha2.sha256,
752
+ chunker,
753
+ cache: cache.nocache,
754
+ // codec: dagcbor, // should be crypto?
755
+ root: innerBlockStoreClockRootCid
756
+ })) {
757
+ encryptedBlocks.push(block);
758
+ last = block;
759
+ }
760
+ // console.log('last', last.cid.toString(), 'for clock', innerBlockStoreClockRootCid.toString())
761
+ const encryptedCar = await blocksToCarBlock(last.cid, encryptedBlocks);
762
+ return encryptedCar
763
+ };
764
+ // { root, get, key, cache, chunker, hasher }
765
+
766
+ const memoizeDecryptedCarBlocks = new Map();
767
+ const blocksFromEncryptedCarBlock = async (cid, get, keyMaterial) => {
768
+ if (memoizeDecryptedCarBlocks.has(cid.toString())) {
769
+ return memoizeDecryptedCarBlocks.get(cid.toString())
770
+ } else {
771
+ const blocksPromise = (async () => {
772
+ const decryptionKey = buffer.Buffer.from(keyMaterial, 'hex');
773
+ // console.log('decrypting', keyMaterial, cid.toString())
774
+ const cids = new Set();
775
+ const decryptedBlocks = [];
776
+ for await (const block of decrypt({
777
+ root: cid,
778
+ get,
779
+ key: decryptionKey,
780
+ chunker,
781
+ hasher: sha2.sha256,
782
+ cache: cache.nocache
783
+ // codec: dagcbor
784
+ })) {
785
+ decryptedBlocks.push(block);
786
+ cids.add(block.cid.toString());
787
+ }
788
+ return { blocks: decryptedBlocks, cids }
789
+ })();
790
+ memoizeDecryptedCarBlocks.set(cid.toString(), blocksPromise);
791
+ return blocksPromise
792
+ }
793
+ };
774
794
 
775
- // @ts-nocheck
795
+ // @ts-nocheck
776
796
 
777
- // const sleep = ms => new Promise(r => setTimeout(r, ms))
797
+ // const sleep = ms => new Promise(r => setTimeout(r, ms))
778
798
 
779
- const husherMap = new Map();
780
- const husher = (id, workFn) => {
781
- if (!husherMap.has(id)) {
782
- husherMap.set(
783
- id,
784
- workFn().finally(() => setTimeout(() => husherMap.delete(id), 100))
785
- );
786
- }
787
- return husherMap.get(id)
788
- };
799
+ const husherMap = new Map();
800
+ const husher = (id, workFn) => {
801
+ if (!husherMap.has(id)) {
802
+ husherMap.set(
803
+ id,
804
+ workFn().finally(() => setTimeout(() => husherMap.delete(id), 100))
805
+ );
806
+ }
807
+ return husherMap.get(id)
808
+ };
809
+
810
+ /**
811
+ * @typedef {Object} AnyBlock
812
+ * @property {import('./link').AnyLink} cid - The CID of the block
813
+ * @property {Uint8Array} bytes - The block's data
814
+ *
815
+ * @typedef {Object} Blockstore
816
+ * @property {function(import('./link').AnyLink): Promise<AnyBlock|undefined>} get - A function to retrieve a block by CID
817
+ * @property {function(import('./link').AnyLink, Uint8Array): Promise<void>} put - A function to store a block's data and CID
818
+ *
819
+ * A blockstore that caches writes to a transaction and only persists them when committed.
820
+ * @implements {Blockstore}
821
+ */
822
+ class TransactionBlockstore {
823
+ /** @type {Map<string, Uint8Array>} */
824
+ committedBlocks = new Map()
825
+
826
+ valet = null
827
+
828
+ instanceId = 'blkz.' + Math.random().toString(36).substring(2, 4)
829
+ inflightTransactions = new Set()
830
+
831
+ constructor (name, encryptionKey) {
832
+ this.valet = new Valet(name, encryptionKey);
833
+ }
789
834
 
790
835
  /**
791
- * @typedef {Object} AnyBlock
792
- * @property {import('./link').AnyLink} cid - The CID of the block
793
- * @property {Uint8Array} bytes - The block's data
836
+ * Get a block from the store.
794
837
  *
795
- * @typedef {Object} Blockstore
796
- * @property {function(import('./link').AnyLink): Promise<AnyBlock|undefined>} get - A function to retrieve a block by CID
797
- * @property {function(import('./link').AnyLink, Uint8Array): Promise<void>} put - A function to store a block's data and CID
798
- *
799
- * A blockstore that caches writes to a transaction and only persists them when committed.
800
- * @implements {Blockstore}
838
+ * @param {import('./link').AnyLink} cid
839
+ * @returns {Promise<AnyBlock | undefined>}
801
840
  */
802
- class TransactionBlockstore {
803
- /** @type {Map<string, Uint8Array>} */
804
- committedBlocks = new Map()
805
-
806
- valet = null
807
-
808
- instanceId = 'blkz.' + Math.random().toString(36).substring(2, 4)
809
- inflightTransactions = new Set()
810
-
811
- constructor (name, encryptionKey) {
812
- this.valet = new Valet(name, encryptionKey);
813
- }
814
-
815
- /**
816
- * Get a block from the store.
817
- *
818
- * @param {import('./link').AnyLink} cid
819
- * @returns {Promise<AnyBlock | undefined>}
820
- */
821
- async get (cid) {
822
- const key = cid.toString();
823
- // it is safe to read from the in-flight transactions becauase they are immutable
824
- const bytes = await Promise.any([this.transactionsGet(key), this.committedGet(key)]).catch(e => {
825
- // console.log('networkGet', cid.toString(), e)
826
- return this.networkGet(key)
827
- });
828
- if (!bytes) throw new Error('Missing block: ' + key)
829
- return { cid, bytes }
830
- }
841
+ async get (cid) {
842
+ const key = cid.toString();
843
+ // it is safe to read from the in-flight transactions becauase they are immutable
844
+ const bytes = await Promise.any([this.transactionsGet(key), this.committedGet(key)]).catch(e => {
845
+ // console.log('networkGet', cid.toString(), e)
846
+ return this.networkGet(key)
847
+ });
848
+ if (!bytes) throw new Error('Missing block: ' + key)
849
+ return { cid, bytes }
850
+ }
831
851
 
832
- // this iterates over the in-flight transactions
833
- // and returns the first matching block it finds
834
- async transactionsGet (key) {
835
- for (const transaction of this.inflightTransactions) {
836
- const got = await transaction.get(key);
837
- if (got && got.bytes) return got.bytes
838
- }
839
- throw new Error('Missing block: ' + key)
852
+ // this iterates over the in-flight transactions
853
+ // and returns the first matching block it finds
854
+ async transactionsGet (key) {
855
+ for (const transaction of this.inflightTransactions) {
856
+ const got = await transaction.get(key);
857
+ if (got && got.bytes) return got.bytes
840
858
  }
859
+ throw new Error('Missing block: ' + key)
860
+ }
841
861
 
842
- async committedGet (key) {
843
- const old = this.committedBlocks.get(key);
844
- if (old) return old
845
- const got = await this.valet.getBlock(key);
846
- // console.log('committedGet: ' + key)
847
- this.committedBlocks.set(key, got);
848
- return got
849
- }
862
+ async committedGet (key) {
863
+ const old = this.committedBlocks.get(key);
864
+ if (old) return old
865
+ const got = await this.valet.getBlock(key);
866
+ // console.log('committedGet: ' + key)
867
+ this.committedBlocks.set(key, got);
868
+ return got
869
+ }
850
870
 
851
- async clearCommittedCache () {
852
- this.committedBlocks.clear();
853
- }
871
+ async clearCommittedCache () {
872
+ this.committedBlocks.clear();
873
+ }
854
874
 
855
- async networkGet (key) {
856
- if (this.valet.remoteBlockFunction) {
857
- // todo why is this on valet?
858
- const value = await husher(key, async () => await this.valet.remoteBlockFunction(key));
859
- if (value) {
860
- // console.log('networkGot: ' + key, value.length)
861
- doTransaction('networkGot: ' + key, this, async innerBlockstore => {
862
- await innerBlockstore.put(multiformats.CID.parse(key), value);
863
- });
864
- return value
865
- }
866
- } else {
867
- return false
875
+ async networkGet (key) {
876
+ if (this.valet.remoteBlockFunction) {
877
+ // todo why is this on valet?
878
+ const value = await husher(key, async () => await this.valet.remoteBlockFunction(key));
879
+ if (value) {
880
+ // console.log('networkGot: ' + key, value.length)
881
+ doTransaction('networkGot: ' + key, this, async innerBlockstore => {
882
+ await innerBlockstore.put(multiformats.CID.parse(key), value);
883
+ });
884
+ return value
868
885
  }
886
+ } else {
887
+ return false
869
888
  }
889
+ }
870
890
 
871
- /**
872
- * Add a block to the store. Usually bound to a transaction by a closure.
873
- * It sets the lastCid property to the CID of the block that was put.
874
- * This is used by the transaction as the head of the car when written to the valet.
875
- * We don't have to worry about which transaction we are when we are here because
876
- * we are the transactionBlockstore.
877
- *
878
- * @param {import('./link').AnyLink} cid
879
- * @param {Uint8Array} bytes
880
- */
881
- put (cid, bytes) {
882
- throw new Error('use a transaction to put')
883
- }
891
+ /**
892
+ * Add a block to the store. Usually bound to a transaction by a closure.
893
+ * It sets the lastCid property to the CID of the block that was put.
894
+ * This is used by the transaction as the head of the car when written to the valet.
895
+ * We don't have to worry about which transaction we are when we are here because
896
+ * we are the transactionBlockstore.
897
+ *
898
+ * @param {import('./link').AnyLink} cid
899
+ * @param {Uint8Array} bytes
900
+ */
901
+ put (cid, bytes) {
902
+ throw new Error('use a transaction to put')
903
+ }
884
904
 
885
- /**
886
- * Iterate over all blocks in the store.
887
- *
888
- * @yields {AnyBlock}
889
- * @returns {AsyncGenerator<AnyBlock>}
890
- */
891
- // * entries () {
892
- // // needs transaction blocks?
893
- // // for (const [str, bytes] of this.blocks) {
894
- // // yield { cid: parse(str), bytes }
895
- // // }
896
- // for (const [str, bytes] of this.committedBlocks) {
897
- // yield { cid: parse(str), bytes }
898
- // }
899
- // }
905
+ /**
906
+ * Iterate over all blocks in the store.
907
+ *
908
+ * @yields {AnyBlock}
909
+ * @returns {AsyncGenerator<AnyBlock>}
910
+ */
911
+ // * entries () {
912
+ // // needs transaction blocks?
913
+ // // for (const [str, bytes] of this.blocks) {
914
+ // // yield { cid: parse(str), bytes }
915
+ // // }
916
+ // for (const [str, bytes] of this.committedBlocks) {
917
+ // yield { cid: parse(str), bytes }
918
+ // }
919
+ // }
900
920
 
901
- /**
902
- * Begin a transaction. Ensures the uncommited blocks are empty at the begining.
903
- * Returns the blocks to read and write during the transaction.
904
- * @returns {InnerBlockstore}
905
- * @memberof TransactionBlockstore
906
- */
907
- begin (label = '') {
908
- const innerTransactionBlockstore = new InnerBlockstore(label, this);
909
- this.inflightTransactions.add(innerTransactionBlockstore);
910
- return innerTransactionBlockstore
911
- }
921
+ /**
922
+ * Begin a transaction. Ensures the uncommited blocks are empty at the begining.
923
+ * Returns the blocks to read and write during the transaction.
924
+ * @returns {InnerBlockstore}
925
+ * @memberof TransactionBlockstore
926
+ */
927
+ begin (label = '') {
928
+ const innerTransactionBlockstore = new InnerBlockstore(label, this);
929
+ this.inflightTransactions.add(innerTransactionBlockstore);
930
+ return innerTransactionBlockstore
931
+ }
912
932
 
913
- /**
914
- * Commit the transaction. Writes the blocks to the store.
915
- * @returns {Promise<void>}
916
- * @memberof TransactionBlockstore
917
- */
918
- async commit (innerBlockstore) {
919
- await this.doCommit(innerBlockstore);
920
- }
933
+ /**
934
+ * Commit the transaction. Writes the blocks to the store.
935
+ * @returns {Promise<void>}
936
+ * @memberof TransactionBlockstore
937
+ */
938
+ async commit (innerBlockstore) {
939
+ await this.doCommit(innerBlockstore);
940
+ }
921
941
 
922
- // first get the transaction blockstore from the map of transaction blockstores
923
- // then copy it to committedBlocks
924
- // then write the transaction blockstore to a car
925
- // then write the car to the valet
926
- // then remove the transaction blockstore from the map of transaction blockstores
927
- doCommit = async innerBlockstore => {
928
- const cids = new Set();
929
- for (const { cid, bytes } of innerBlockstore.entries()) {
930
- const stringCid = cid.toString(); // unnecessary string conversion, can we fix upstream?
931
- if (this.committedBlocks.has(stringCid)) ; else {
932
- this.committedBlocks.set(stringCid, bytes);
933
- cids.add(stringCid);
934
- }
935
- }
936
- if (cids.size > 0) {
937
- // console.log(innerBlockstore.label, 'committing', cids.size, 'blocks')
938
- await this.valet.writeTransaction(innerBlockstore, cids);
942
+ // first get the transaction blockstore from the map of transaction blockstores
943
+ // then copy it to committedBlocks
944
+ // then write the transaction blockstore to a car
945
+ // then write the car to the valet
946
+ // then remove the transaction blockstore from the map of transaction blockstores
947
+ doCommit = async innerBlockstore => {
948
+ const cids = new Set();
949
+ for (const { cid, bytes } of innerBlockstore.entries()) {
950
+ const stringCid = cid.toString(); // unnecessary string conversion, can we fix upstream?
951
+ if (this.committedBlocks.has(stringCid)) ; else {
952
+ this.committedBlocks.set(stringCid, bytes);
953
+ cids.add(stringCid);
939
954
  }
940
955
  }
941
-
942
- /**
943
- * Retire the transaction. Clears the uncommited blocks.
944
- * @returns {void}
945
- * @memberof TransactionBlockstore
946
- */
947
- retire (innerBlockstore) {
948
- this.inflightTransactions.delete(innerBlockstore);
956
+ if (cids.size > 0) {
957
+ // console.log(innerBlockstore.label, 'committing', cids.size, 'blocks')
958
+ await this.valet.writeTransaction(innerBlockstore, cids);
949
959
  }
950
960
  }
951
961
 
952
962
  /**
953
- * Runs a function on an inner blockstore, then persists the change to a car writer
954
- * or other outer blockstore.
955
- * @param {string} label
956
- * @param {TransactionBlockstore} blockstore
957
- * @param {(innerBlockstore: Blockstore) => Promise<any>} doFun
958
- * @returns {Promise<any>}
963
+ * Retire the transaction. Clears the uncommited blocks.
964
+ * @returns {void}
959
965
  * @memberof TransactionBlockstore
960
966
  */
961
- const doTransaction = async (label, blockstore, doFun) => {
962
- if (!blockstore.commit) return await doFun(blockstore)
963
- const innerBlockstore = blockstore.begin(label);
964
- try {
965
- const result = await doFun(innerBlockstore);
966
- await blockstore.commit(innerBlockstore);
967
- return result
968
- } catch (e) {
969
- console.error(`Transaction ${label} failed`, e, e.stack);
970
- throw e
971
- } finally {
972
- blockstore.retire(innerBlockstore);
973
- }
974
- };
967
+ retire (innerBlockstore) {
968
+ this.inflightTransactions.delete(innerBlockstore);
969
+ }
970
+ }
971
+
972
+ /**
973
+ * Runs a function on an inner blockstore, then persists the change to a car writer
974
+ * or other outer blockstore.
975
+ * @param {string} label
976
+ * @param {TransactionBlockstore} blockstore
977
+ * @param {(innerBlockstore: Blockstore) => Promise<any>} doFun
978
+ * @returns {Promise<any>}
979
+ * @memberof TransactionBlockstore
980
+ */
981
+ const doTransaction = async (label, blockstore, doFun) => {
982
+ if (!blockstore.commit) return await doFun(blockstore)
983
+ const innerBlockstore = blockstore.begin(label);
984
+ try {
985
+ const result = await doFun(innerBlockstore);
986
+ await blockstore.commit(innerBlockstore);
987
+ return result
988
+ } catch (e) {
989
+ console.error(`Transaction ${label} failed`, e, e.stack);
990
+ throw e
991
+ } finally {
992
+ blockstore.retire(innerBlockstore);
993
+ }
994
+ };
995
+
996
+ /** @implements {BlockFetcher} */
997
+ class InnerBlockstore {
998
+ /** @type {Map<string, Uint8Array>} */
999
+ blocks = new Map()
1000
+ lastCid = null
1001
+ label = ''
1002
+ parentBlockstore = null
1003
+
1004
+ constructor (label, parentBlockstore) {
1005
+ this.label = label;
1006
+ this.parentBlockstore = parentBlockstore;
1007
+ }
975
1008
 
976
- /** @implements {BlockFetcher} */
977
- class InnerBlockstore {
978
- /** @type {Map<string, Uint8Array>} */
979
- blocks = new Map()
980
- lastCid = null
981
- label = ''
982
- parentBlockstore = null
983
-
984
- constructor (label, parentBlockstore) {
985
- this.label = label;
986
- this.parentBlockstore = parentBlockstore;
1009
+ /**
1010
+ * @param {import('./link').AnyLink} cid
1011
+ * @returns {Promise<AnyBlock | undefined>}
1012
+ */
1013
+ async get (cid) {
1014
+ const key = cid.toString();
1015
+ let bytes = this.blocks.get(key);
1016
+ if (bytes) {
1017
+ return { cid, bytes }
987
1018
  }
988
-
989
- /**
990
- * @param {import('./link').AnyLink} cid
991
- * @returns {Promise<AnyBlock | undefined>}
992
- */
993
- async get (cid) {
994
- const key = cid.toString();
995
- let bytes = this.blocks.get(key);
996
- if (bytes) {
997
- return { cid, bytes }
998
- }
999
- bytes = await this.parentBlockstore.committedGet(key);
1000
- if (bytes) {
1001
- return { cid, bytes }
1002
- }
1019
+ bytes = await this.parentBlockstore.committedGet(key);
1020
+ if (bytes) {
1021
+ return { cid, bytes }
1003
1022
  }
1023
+ }
1004
1024
 
1005
- /**
1006
- * @param {import('./link').AnyLink} cid
1007
- * @param {Uint8Array} bytes
1008
- */
1009
- put (cid, bytes) {
1010
- // console.log('put', cid)
1011
- this.blocks.set(cid.toString(), bytes);
1012
- this.lastCid = cid;
1013
- }
1025
+ /**
1026
+ * @param {import('./link').AnyLink} cid
1027
+ * @param {Uint8Array} bytes
1028
+ */
1029
+ put (cid, bytes) {
1030
+ // console.log('put', cid)
1031
+ this.blocks.set(cid.toString(), bytes);
1032
+ this.lastCid = cid;
1033
+ }
1014
1034
 
1015
- * entries () {
1016
- for (const [str, bytes] of this.blocks) {
1017
- yield { cid: link.parse(str), bytes };
1018
- }
1035
+ * entries () {
1036
+ for (const [str, bytes] of this.blocks) {
1037
+ yield { cid: link.parse(str), bytes };
1019
1038
  }
1020
1039
  }
1021
-
1022
- // @ts-nocheck
1023
- const blockOpts = { cache: cache.nocache, chunker: utils.bf(3), codec: dagcbor__namespace, hasher: sha2.sha256, compare: utils.simpleCompare };
1024
-
1025
- const withLog = async (label, fn) => {
1026
- const resp = await fn();
1027
- // console.log('withLog', label, !!resp)
1028
- return resp
1040
+ }
1041
+
1042
+ // @ts-nocheck
1043
+ const blockOpts = { cache: cache.nocache, chunker: utils.bf(3), codec: dagcbor__namespace, hasher: sha2.sha256, compare: utils.simpleCompare };
1044
+
1045
+ const withLog = async (label, fn) => {
1046
+ const resp = await fn();
1047
+ // console.log('withLog', label, !!resp)
1048
+ return resp
1049
+ };
1050
+
1051
+ // should also return a CIDCounter
1052
+ const makeGetBlock = (blocks) => {
1053
+ // const cids = new CIDCounter() // this could be used for proofs of mutations
1054
+ const getBlockFn = async (address) => {
1055
+ const { cid, bytes } = await withLog(address, () => blocks.get(address));
1056
+ // cids.add({ address: cid })
1057
+ return Block.create({ cid, bytes, hasher: sha2.sha256, codec: dagcbor__namespace })
1029
1058
  };
1030
-
1031
- // should also return a CIDCounter
1032
- const makeGetBlock = (blocks) => {
1033
- // const cids = new CIDCounter() // this could be used for proofs of mutations
1034
- const getBlockFn = async (address) => {
1035
- const { cid, bytes } = await withLog(address, () => blocks.get(address));
1036
- // cids.add({ address: cid })
1037
- return Block.create({ cid, bytes, hasher: sha2.sha256, codec: dagcbor__namespace })
1038
- };
1039
- return {
1040
- // cids,
1041
- getBlock: getBlockFn
1042
- }
1059
+ return {
1060
+ // cids,
1061
+ getBlock: getBlockFn
1062
+ }
1063
+ };
1064
+
1065
+ /**
1066
+ *
1067
+ * @param {*} param0
1068
+ * @returns
1069
+ */
1070
+ async function createAndSaveNewEvent ({
1071
+ inBlocks,
1072
+ bigPut,
1073
+ root,
1074
+ event: inEvent,
1075
+ head,
1076
+ additions,
1077
+ removals = []
1078
+ }) {
1079
+ let cids;
1080
+ const { key, value, del } = inEvent;
1081
+ const data = {
1082
+ root: (root
1083
+ ? {
1084
+ cid: root.cid,
1085
+ bytes: root.bytes, // can we remove this?
1086
+ value: root.value // can we remove this?
1087
+ }
1088
+ : null),
1089
+ key
1043
1090
  };
1044
1091
 
1045
- /**
1046
- *
1047
- * @param {*} param0
1048
- * @returns
1049
- */
1050
- async function createAndSaveNewEvent ({
1051
- inBlocks,
1052
- bigPut,
1092
+ if (del) {
1093
+ data.value = null;
1094
+ data.type = 'del';
1095
+ } else {
1096
+ data.value = value;
1097
+ data.type = 'put';
1098
+ }
1099
+ /** @type {EventData} */
1100
+
1101
+ const event = await EventBlock.create(data, head);
1102
+ bigPut(event)
1103
+ ;({ head, cids } = await advance(inBlocks, head, event.cid));
1104
+
1105
+ return {
1053
1106
  root,
1054
- event: inEvent,
1055
- head,
1056
1107
  additions,
1057
- removals = []
1058
- }) {
1059
- let cids;
1060
- const { key, value, del } = inEvent;
1061
- const data = {
1062
- root: (root
1063
- ? {
1064
- cid: root.cid,
1065
- bytes: root.bytes, // can we remove this?
1066
- value: root.value // can we remove this?
1067
- }
1068
- : null),
1069
- key
1070
- };
1071
-
1072
- if (del) {
1073
- data.value = null;
1074
- data.type = 'del';
1108
+ removals,
1109
+ head,
1110
+ clockCIDs: cids,
1111
+ event
1112
+ }
1113
+ }
1114
+
1115
+ const makeGetAndPutBlock = (inBlocks) => {
1116
+ // const mblocks = new MemoryBlockstore()
1117
+ // const blocks = new MultiBlockFetcher(mblocks, inBlocks)
1118
+ const { getBlock, cids } = makeGetBlock(inBlocks);
1119
+ const put = inBlocks.put.bind(inBlocks);
1120
+ const bigPut = async (block, additions) => {
1121
+ // console.log('bigPut', block.cid.toString())
1122
+ const { cid, bytes } = block;
1123
+ put(cid, bytes);
1124
+ // mblocks.putSync(cid, bytes)
1125
+ if (additions) {
1126
+ additions.set(cid.toString(), block);
1127
+ }
1128
+ };
1129
+ return { getBlock, bigPut, blocks: inBlocks, cids }
1130
+ };
1131
+
1132
+ const bulkFromEvents = (sorted, event) => {
1133
+ if (event) {
1134
+ const update = { value: { data: { key: event.key } } };
1135
+ if (event.del) {
1136
+ update.value.data.type = 'del';
1075
1137
  } else {
1076
- data.value = value;
1077
- data.type = 'put';
1138
+ update.value.data.type = 'put';
1139
+ update.value.data.value = event.value;
1078
1140
  }
1079
- /** @type {EventData} */
1141
+ sorted.push(update);
1142
+ }
1143
+ const bulk = new Map();
1144
+ for (const { value: event } of sorted) {
1145
+ const {
1146
+ data: { type, value, key }
1147
+ } = event;
1148
+ const bulkEvent = type === 'put' ? { key, value } : { key, del: true };
1149
+ bulk.set(bulkEvent.key, bulkEvent); // last wins
1150
+ }
1151
+ return Array.from(bulk.values())
1152
+ };
1153
+
1154
+ // Get the value of the root from the ancestor event
1155
+ /**
1156
+ *
1157
+ * @param {EventFetcher} events
1158
+ * @param {Link} ancestor
1159
+ * @param {*} getBlock
1160
+ * @returns
1161
+ */
1162
+ const prollyRootFromAncestor = async (events, ancestor, getBlock) => {
1163
+ // console.log('prollyRootFromAncestor', ancestor)
1164
+ const event = await events.get(ancestor);
1165
+ const { root } = event.value.data;
1166
+ // console.log('prollyRootFromAncestor', root.cid, JSON.stringify(root.value))
1167
+ if (root) {
1168
+ return map.load({ cid: root.cid, get: getBlock, ...blockOpts })
1169
+ } else {
1170
+ return null
1171
+ }
1172
+ };
1173
+
1174
+ const doProllyBulk = async (inBlocks, head, event) => {
1175
+ const { getBlock, blocks } = makeGetAndPutBlock(inBlocks);
1176
+ let bulkSorted = [];
1177
+ let prollyRootNode = null;
1178
+ if (head.length) {
1179
+ // Otherwise, we find the common ancestor and update the root and other blocks
1180
+ const events = new EventFetcher(blocks);
1181
+ // todo this is returning more events than necessary, lets define the desired semantics from the top down
1182
+ // good semantics mean we can cache the results of this call
1183
+ const { ancestor, sorted } = await findCommonAncestorWithSortedEvents(events, head);
1184
+ bulkSorted = sorted;
1185
+ // console.log('sorted', JSON.stringify(sorted.map(({ value: { data: { key, value } } }) => ({ key, value }))))
1186
+ prollyRootNode = await prollyRootFromAncestor(events, ancestor, getBlock);
1187
+ // console.log('event', event)
1188
+ }
1080
1189
 
1081
- const event = await EventBlock.create(data, head);
1082
- bigPut(event)
1083
- ;({ head, cids } = await advance(inBlocks, head, event.cid));
1190
+ const bulkOperations = bulkFromEvents(bulkSorted, event);
1084
1191
 
1085
- return {
1086
- root,
1087
- additions,
1088
- removals,
1089
- head,
1090
- clockCIDs: cids,
1091
- event
1192
+ // if prolly root node is null, we need to create a new one
1193
+ if (!prollyRootNode) {
1194
+ let root;
1195
+ const newBlocks = [];
1196
+ // if all operations are deletes, we can just return an empty root
1197
+ if (bulkOperations.every((op) => op.del)) {
1198
+ return { root: null, blocks: [] }
1092
1199
  }
1200
+ for await (const node of map.create({ get: getBlock, list: bulkOperations, ...blockOpts })) {
1201
+ root = await node.block;
1202
+ newBlocks.push(root);
1203
+ }
1204
+ return { root, blocks: newBlocks }
1205
+ } else {
1206
+ return await prollyRootNode.bulk(bulkOperations) // { root: newProllyRootNode, blocks: newBlocks }
1207
+ }
1208
+ };
1209
+
1210
+ /**
1211
+ * Put a value (a CID) for the given key. If the key exists it's value is overwritten.
1212
+ *
1213
+ * @param {import('./blockstore.js').BlockFetcher} blocks Bucket block storage.
1214
+ * @param {import('./clock').EventLink<EventData>[]} head Merkle clock head.
1215
+ * @param {string} key The key of the value to put.
1216
+ * @param {CID} value The value to put.
1217
+ * @param {object} [options]
1218
+ * @returns {Promise<Result>}
1219
+ */
1220
+ async function put (inBlocks, head, event, options) {
1221
+ const { bigPut } = makeGetAndPutBlock(inBlocks);
1222
+
1223
+ // If the head is empty, we create a new event and return the root and addition blocks
1224
+ if (!head.length) {
1225
+ const additions = new Map();
1226
+ const { root, blocks } = await doProllyBulk(inBlocks, head, event);
1227
+ for (const b of blocks) {
1228
+ bigPut(b, additions);
1229
+ }
1230
+ return createAndSaveNewEvent({ inBlocks, bigPut, root, event, head, additions: Array.from(additions.values()) })
1231
+ }
1232
+ const { root: newProllyRootNode, blocks: newBlocks } = await doProllyBulk(inBlocks, head, event);
1233
+
1234
+ if (!newProllyRootNode) {
1235
+ return createAndSaveNewEvent({
1236
+ inBlocks,
1237
+ bigPut,
1238
+ root: null,
1239
+ event,
1240
+ head,
1241
+ additions: []
1242
+ })
1243
+ } else {
1244
+ const prollyRootBlock = await newProllyRootNode.block;
1245
+ const additions = new Map(); // ; const removals = new Map()
1246
+ bigPut(prollyRootBlock, additions);
1247
+ for (const nb of newBlocks) {
1248
+ bigPut(nb, additions);
1249
+ }
1250
+ // additions are new blocks
1251
+ return createAndSaveNewEvent({
1252
+ inBlocks,
1253
+ bigPut,
1254
+ root: prollyRootBlock,
1255
+ event,
1256
+ head,
1257
+ additions: Array.from(additions.values()) /*, todo? Array.from(removals.values()) */
1258
+ })
1259
+ }
1260
+ }
1261
+
1262
+ /**
1263
+ * Determine the effective prolly root given the current merkle clock head.
1264
+ *
1265
+ * @param {import('./blockstore.js').BlockFetcher} blocks Bucket block storage.
1266
+ * @param {import('./clock').EventLink<EventData>[]} head Merkle clock head.
1267
+ */
1268
+ async function root (inBlocks, head) {
1269
+ if (!head.length) {
1270
+ throw new Error('no head')
1271
+ }
1272
+ const { root: newProllyRootNode, blocks: newBlocks, cids } = await doProllyBulk(inBlocks, head);
1273
+ // todo maybe these should go to a temp blockstore?
1274
+ await doTransaction('root', inBlocks, async (transactionBlockstore) => {
1275
+ const { bigPut } = makeGetAndPutBlock(transactionBlockstore);
1276
+ for (const nb of newBlocks) {
1277
+ bigPut(nb);
1278
+ }
1279
+ });
1280
+ return { cids, node: newProllyRootNode }
1281
+ }
1282
+
1283
+ /**
1284
+ * Get the list of events not known by the `since` event
1285
+ * @param {import('./blockstore.js').BlockFetcher} blocks Bucket block storage.
1286
+ * @param {import('./clock').EventLink<EventData>[]} head Merkle clock head.
1287
+ * @param {import('./clock').EventLink<EventData>} since Event to compare against.
1288
+ * @returns {Promise<{clockCIDs: CIDCounter, result: EventData[]}>}
1289
+ */
1290
+ async function eventsSince (blocks, head, since) {
1291
+ if (!head.length) {
1292
+ throw new Error('no head')
1293
+ }
1294
+ const sinceHead = [...since, ...head]; // ?
1295
+ const { cids, events: unknownSorted3 } = await findEventsToSync(blocks, sinceHead);
1296
+ return { clockCIDs: cids, result: unknownSorted3.map(({ value: { data } }) => data) }
1297
+ }
1298
+
1299
+ /**
1300
+ *
1301
+ * @param {import('./blockstore.js').BlockFetcher} blocks Bucket block storage.
1302
+ * @param {import('./clock').EventLink<EventData>[]} head Merkle clock head.
1303
+ *
1304
+ * @returns {Promise<{clockCIDs: CIDCounter, result: EventData[]}>}
1305
+ *
1306
+ */
1307
+ async function getAll (blocks, head) {
1308
+ // todo use the root node left around from put, etc
1309
+ // move load to a central place
1310
+ if (!head.length) {
1311
+ return { clockCIDs: new utils.CIDCounter(), cids: new utils.CIDCounter(), result: [] }
1093
1312
  }
1313
+ const { node: prollyRootNode, cids: clockCIDs } = await root(blocks, head);
1314
+ if (!prollyRootNode) {
1315
+ return { clockCIDs, cids: new utils.CIDCounter(), result: [] }
1316
+ }
1317
+ const { result, cids } = await prollyRootNode.getAllEntries(); // todo params
1318
+ return { clockCIDs, cids, result: result.map(({ key, value }) => ({ key, value })) }
1319
+ }
1320
+
1321
+ /**
1322
+ * @param {import('./blockstore.js').BlockFetcher} blocks Bucket block storage.
1323
+ * @param {import('./clock').EventLink<EventData>[]} head Merkle clock head.
1324
+ * @param {string} key The key of the value to retrieve.
1325
+ */
1326
+ async function get (blocks, head, key) {
1327
+ // instead pass root from db? and always update on change
1328
+ if (!head.length) {
1329
+ return { cids: new utils.CIDCounter(), result: null }
1330
+ }
1331
+ const { node: prollyRootNode, cids: clockCIDs } = await root(blocks, head);
1332
+ if (!prollyRootNode) {
1333
+ return { clockCIDs, cids: new utils.CIDCounter(), result: null }
1334
+ }
1335
+ const { result, cids } = await prollyRootNode.get(key);
1336
+ return { result, cids, clockCIDs }
1337
+ }
1094
1338
 
1095
- const makeGetAndPutBlock = (inBlocks) => {
1096
- // const mblocks = new MemoryBlockstore()
1097
- // const blocks = new MultiBlockFetcher(mblocks, inBlocks)
1098
- const { getBlock, cids } = makeGetBlock(inBlocks);
1099
- const put = inBlocks.put.bind(inBlocks);
1100
- const bigPut = async (block, additions) => {
1101
- // console.log('bigPut', block.cid.toString())
1102
- const { cid, bytes } = block;
1103
- put(cid, bytes);
1104
- // mblocks.putSync(cid, bytes)
1105
- if (additions) {
1106
- additions.set(cid.toString(), block);
1107
- }
1108
- };
1109
- return { getBlock, bigPut, blocks: inBlocks, cids }
1110
- };
1339
+ async function * vis (blocks, head) {
1340
+ if (!head.length) {
1341
+ return { cids: new utils.CIDCounter(), result: null }
1342
+ }
1343
+ const { node: prollyRootNode, cids } = await root(blocks, head);
1344
+ const lines = [];
1345
+ for await (const line of prollyRootNode.vis()) {
1346
+ yield line;
1347
+ lines.push(line);
1348
+ }
1349
+ return { vis: lines.join('\n'), cids }
1350
+ }
1111
1351
 
1112
- const bulkFromEvents = (sorted, event) => {
1113
- if (event) {
1114
- const update = { value: { data: { key: event.key } } };
1115
- if (event.del) {
1116
- update.value.data.type = 'del';
1117
- } else {
1118
- update.value.data.type = 'put';
1119
- update.value.data.value = event.value;
1120
- }
1121
- sorted.push(update);
1122
- }
1123
- const bulk = new Map();
1124
- for (const { value: event } of sorted) {
1125
- const {
1126
- data: { type, value, key }
1127
- } = event;
1128
- const bulkEvent = type === 'put' ? { key, value } : { key, del: true };
1129
- bulk.set(bulkEvent.key, bulkEvent); // last wins
1130
- }
1131
- return Array.from(bulk.values())
1132
- };
1352
+ async function visMerkleTree (blocks, head) {
1353
+ if (!head.length) {
1354
+ return { cids: new utils.CIDCounter(), result: null }
1355
+ }
1356
+ const { node: prollyRootNode, cids } = await root(blocks, head);
1357
+ const lines = [];
1358
+ for await (const line of prollyRootNode.vis()) {
1359
+ lines.push(line);
1360
+ }
1361
+ return { vis: lines.join('\n'), cids }
1362
+ }
1363
+
1364
+ async function visMerkleClock (blocks, head) {
1365
+ const lines = [];
1366
+ for await (const line of vis$1(blocks, head)) {
1367
+ // yield line
1368
+ lines.push(line);
1369
+ }
1370
+ return { vis: lines.join('\n') }
1371
+ }
1372
+
1373
+ // @ts-nocheck
1374
+ // import { CID } from 'multiformats/dist/types/src/cid.js'
1375
+
1376
+ // const sleep = ms => new Promise(resolve => setTimeout(resolve, ms))
1377
+
1378
+ // class Proof {}
1379
+
1380
+ /**
1381
+ * @class Fireproof
1382
+ * @classdesc Fireproof stores data in IndexedDB and provides a Merkle clock.
1383
+ * This is the main class for saving and loading JSON and other documents with the database. You can find additional examples and
1384
+ * usage guides in the repository README.
1385
+ *
1386
+ * @param {import('./blockstore.js').TransactionBlockstore} blocks - The block storage instance to use documents and indexes
1387
+ * @param {CID[]} clock - The Merkle clock head to use for the Fireproof instance.
1388
+ * @param {object} [config] - Optional configuration options for the Fireproof instance.
1389
+ * @param {object} [authCtx] - Optional authorization context object to use for any authentication checks.
1390
+ *
1391
+ */
1392
+ class Fireproof {
1393
+ listeners = new Set()
1394
+
1395
+ /**
1396
+ * @function storage
1397
+ * @memberof Fireproof
1398
+ * Creates a new Fireproof instance with default storage settings
1399
+ * Most apps should use this and not worry about the details.
1400
+ * @static
1401
+ * @returns {Fireproof} - a new Fireproof instance
1402
+ */
1403
+ static storage = (name = 'global') => {
1404
+ const instanceKey = crypto.randomBytes(32).toString('hex'); // pass null to disable encryption
1405
+ // pick a random key from const validatedKeys
1406
+ // const instanceKey = validatedKeys[Math.floor(Math.random() * validatedKeys.length)]
1407
+ return new Fireproof(new TransactionBlockstore(name, instanceKey), [], { name })
1408
+ }
1409
+
1410
+ constructor (blocks, clock, config, authCtx = {}) {
1411
+ this.name = config?.name || 'global';
1412
+ this.instanceId = `fp.${this.name}.${Math.random().toString(36).substring(2, 7)}`;
1413
+ this.blocks = blocks;
1414
+ this.clock = clock;
1415
+ this.config = config;
1416
+ this.authCtx = authCtx;
1417
+ this.indexes = new Map();
1418
+ }
1133
1419
 
1134
- // Get the value of the root from the ancestor event
1135
1420
  /**
1136
- *
1137
- * @param {EventFetcher} events
1138
- * @param {Link} ancestor
1139
- * @param {*} getBlock
1140
- * @returns
1421
+ * Renders the Fireproof instance as a JSON object.
1422
+ * @returns {Object} - The JSON representation of the Fireproof instance. Includes clock heads for the database and its indexes.
1423
+ * @memberof Fireproof
1424
+ * @instance
1141
1425
  */
1142
- const prollyRootFromAncestor = async (events, ancestor, getBlock) => {
1143
- // console.log('prollyRootFromAncestor', ancestor)
1144
- const event = await events.get(ancestor);
1145
- const { root } = event.value.data;
1146
- // console.log('prollyRootFromAncestor', root.cid, JSON.stringify(root.value))
1147
- if (root) {
1148
- return map.load({ cid: root.cid, get: getBlock, ...blockOpts })
1149
- } else {
1150
- return null
1426
+ toJSON () {
1427
+ // todo this also needs to return the index roots...
1428
+ return {
1429
+ clock: this.clockToJSON(),
1430
+ name: this.name,
1431
+ key: this.blocks.valet.getKeyMaterial(),
1432
+ indexes: [...this.indexes.values()].map(index => index.toJSON())
1151
1433
  }
1152
- };
1434
+ }
1153
1435
 
1154
- const doProllyBulk = async (inBlocks, head, event) => {
1155
- const { getBlock, blocks } = makeGetAndPutBlock(inBlocks);
1156
- let bulkSorted = [];
1157
- let prollyRootNode = null;
1158
- if (head.length) {
1159
- // Otherwise, we find the common ancestor and update the root and other blocks
1160
- const events = new EventFetcher(blocks);
1161
- // todo this is returning more events than necessary, lets define the desired semantics from the top down
1162
- // good semantics mean we can cache the results of this call
1163
- const { ancestor, sorted } = await findCommonAncestorWithSortedEvents(events, head);
1164
- bulkSorted = sorted;
1165
- // console.log('sorted', JSON.stringify(sorted.map(({ value: { data: { key, value } } }) => ({ key, value }))))
1166
- prollyRootNode = await prollyRootFromAncestor(events, ancestor, getBlock);
1167
- // console.log('event', event)
1168
- }
1436
+ clockToJSON () {
1437
+ return this.clock.map(cid => cid.toString())
1438
+ }
1169
1439
 
1170
- const bulkOperations = bulkFromEvents(bulkSorted, event);
1440
+ hydrate ({ clock, name, key }) {
1441
+ this.name = name;
1442
+ this.clock = clock;
1443
+ this.blocks.valet.setKeyMaterial(key);
1444
+ this.indexBlocks = null;
1445
+ }
1171
1446
 
1172
- // if prolly root node is null, we need to create a new one
1173
- if (!prollyRootNode) {
1174
- let root;
1175
- const newBlocks = [];
1176
- // if all operations are deletes, we can just return an empty root
1177
- if (bulkOperations.every((op) => op.del)) {
1178
- return { root: null, blocks: [] }
1179
- }
1180
- for await (const node of map.create({ get: getBlock, list: bulkOperations, ...blockOpts })) {
1181
- root = await node.block;
1182
- newBlocks.push(root);
1183
- }
1184
- return { root, blocks: newBlocks }
1185
- } else {
1186
- return await prollyRootNode.bulk(bulkOperations) // { root: newProllyRootNode, blocks: newBlocks }
1187
- }
1188
- };
1447
+ /**
1448
+ * Triggers a notification to all listeners
1449
+ * of the Fireproof instance so they can repaint UI, etc.
1450
+ * @param {CID[] } clock
1451
+ * Clock to use for the snapshot.
1452
+ * @returns {Promise<void>}
1453
+ * @memberof Fireproof
1454
+ * @instance
1455
+ */
1456
+ async notifyReset () {
1457
+ await this.notifyListeners({ _reset: true, _clock: this.clockToJSON() });
1458
+ }
1459
+
1460
+ // used be indexes etc to notify database listeners of new availability
1461
+ async notifyExternal (source = 'unknown') {
1462
+ await this.notifyListeners({ _external: source, _clock: this.clockToJSON() });
1463
+ }
1189
1464
 
1190
1465
  /**
1191
- * Put a value (a CID) for the given key. If the key exists it's value is overwritten.
1192
- *
1193
- * @param {import('./blockstore.js').BlockFetcher} blocks Bucket block storage.
1194
- * @param {import('./clock').EventLink<EventData>[]} head Merkle clock head.
1195
- * @param {string} key The key of the value to put.
1196
- * @param {CID} value The value to put.
1197
- * @param {object} [options]
1198
- * @returns {Promise<Result>}
1466
+ * Returns the changes made to the Fireproof instance since the specified event.
1467
+ * @function changesSince
1468
+ * @param {CID[]} [event] - The clock head to retrieve changes since. If null or undefined, retrieves all changes.
1469
+ * @returns {Object<{rows : Object[], clock: CID[]}>} An object containing the rows and the head of the instance's clock.
1470
+ * @memberof Fireproof
1471
+ * @instance
1199
1472
  */
1200
- async function put (inBlocks, head, event, options) {
1201
- const { bigPut } = makeGetAndPutBlock(inBlocks);
1202
-
1203
- // If the head is empty, we create a new event and return the root and addition blocks
1204
- if (!head.length) {
1205
- const additions = new Map();
1206
- const { root, blocks } = await doProllyBulk(inBlocks, head, event);
1207
- for (const b of blocks) {
1208
- bigPut(b, additions);
1473
+ async changesSince (event) {
1474
+ // console.log('changesSince', this.instanceId, event, this.clock)
1475
+ let rows, dataCIDs, clockCIDs;
1476
+ // if (!event) event = []
1477
+ if (event) {
1478
+ const resp = await eventsSince(this.blocks, this.clock, event);
1479
+ const docsMap = new Map();
1480
+ for (const { key, type, value } of resp.result.map(decodeEvent)) {
1481
+ if (type === 'del') {
1482
+ docsMap.set(key, { key, del: true });
1483
+ } else {
1484
+ docsMap.set(key, { key, value });
1485
+ }
1209
1486
  }
1210
- return createAndSaveNewEvent({ inBlocks, bigPut, root, event, head, additions: Array.from(additions.values()) })
1211
- }
1212
- const { root: newProllyRootNode, blocks: newBlocks } = await doProllyBulk(inBlocks, head, event);
1213
-
1214
- if (!newProllyRootNode) {
1215
- return createAndSaveNewEvent({
1216
- inBlocks,
1217
- bigPut,
1218
- root: null,
1219
- event,
1220
- head,
1221
- additions: []
1222
- })
1487
+ rows = Array.from(docsMap.values());
1488
+ clockCIDs = resp.cids;
1489
+ // console.log('change rows', this.instanceId, rows)
1223
1490
  } else {
1224
- const prollyRootBlock = await newProllyRootNode.block;
1225
- const additions = new Map(); // ; const removals = new Map()
1226
- bigPut(prollyRootBlock, additions);
1227
- for (const nb of newBlocks) {
1228
- bigPut(nb, additions);
1229
- }
1230
- // additions are new blocks
1231
- return createAndSaveNewEvent({
1232
- inBlocks,
1233
- bigPut,
1234
- root: prollyRootBlock,
1235
- event,
1236
- head,
1237
- additions: Array.from(additions.values()) /*, todo? Array.from(removals.values()) */
1238
- })
1491
+ const allResp = await getAll(this.blocks, this.clock);
1492
+ rows = allResp.result.map(({ key, value }) => (decodeEvent({ key, value })));
1493
+ dataCIDs = allResp.cids;
1494
+ // console.log('dbdoc rows', this.instanceId, rows)
1495
+ }
1496
+ return {
1497
+ rows,
1498
+ clock: this.clockToJSON(),
1499
+ proof: { data: await cidsToProof(dataCIDs), clock: await cidsToProof(clockCIDs) }
1239
1500
  }
1240
1501
  }
1241
1502
 
1242
- /**
1243
- * Determine the effective prolly root given the current merkle clock head.
1244
- *
1245
- * @param {import('./blockstore.js').BlockFetcher} blocks Bucket block storage.
1246
- * @param {import('./clock').EventLink<EventData>[]} head Merkle clock head.
1247
- */
1248
- async function root (inBlocks, head) {
1249
- if (!head.length) {
1250
- throw new Error('no head')
1503
+ async allDocuments () {
1504
+ const allResp = await getAll(this.blocks, this.clock);
1505
+ const rows = allResp.result.map(({ key, value }) => (decodeEvent({ key, value }))).map(({ key, value }) => ({ key, value: { _id: key, ...value } }));
1506
+ return {
1507
+ rows,
1508
+ clock: this.clockToJSON(),
1509
+ proof: await cidsToProof(allResp.cids)
1251
1510
  }
1252
- const { root: newProllyRootNode, blocks: newBlocks, cids } = await doProllyBulk(inBlocks, head);
1253
- // todo maybe these should go to a temp blockstore?
1254
- await doTransaction('root', inBlocks, async (transactionBlockstore) => {
1255
- const { bigPut } = makeGetAndPutBlock(transactionBlockstore);
1256
- for (const nb of newBlocks) {
1257
- bigPut(nb);
1258
- }
1259
- });
1260
- return { cids, node: newProllyRootNode }
1261
1511
  }
1262
1512
 
1263
1513
  /**
1264
- * Get the list of events not known by the `since` event
1265
- * @param {import('./blockstore.js').BlockFetcher} blocks Bucket block storage.
1266
- * @param {import('./clock').EventLink<EventData>[]} head Merkle clock head.
1267
- * @param {import('./clock').EventLink<EventData>} since Event to compare against.
1268
- * @returns {Promise<{clockCIDs: CIDCounter, result: EventData[]}>}
1514
+ * Runs validation on the specified document using the Fireproof instance's configuration. Throws an error if the document is invalid.
1515
+ *
1516
+ * @param {Object} doc - The document to validate.
1517
+ * @returns {Promise<void>}
1518
+ * @throws {Error} - Throws an error if the document is invalid.
1519
+ * @memberof Fireproof
1520
+ * @instance
1269
1521
  */
1270
- async function eventsSince (blocks, head, since) {
1271
- if (!head.length) {
1272
- throw new Error('no head')
1522
+ async runValidation (doc) {
1523
+ if (this.config && this.config.validateChange) {
1524
+ const oldDoc = await this.get(doc._id)
1525
+ .then((doc) => doc)
1526
+ .catch(() => ({}));
1527
+ this.config.validateChange(doc, oldDoc, this.authCtx);
1273
1528
  }
1274
- const sinceHead = [...since, ...head]; // ?
1275
- const { cids, events: unknownSorted3 } = await findEventsToSync(blocks, sinceHead);
1276
- return { clockCIDs: cids, result: unknownSorted3.map(({ value: { data } }) => data) }
1277
1529
  }
1278
1530
 
1279
1531
  /**
1532
+ * Retrieves the document with the specified ID from the database
1280
1533
  *
1281
- * @param {import('./blockstore.js').BlockFetcher} blocks Bucket block storage.
1282
- * @param {import('./clock').EventLink<EventData>[]} head Merkle clock head.
1283
- *
1284
- * @returns {Promise<{clockCIDs: CIDCounter, result: EventData[]}>}
1285
- *
1534
+ * @param {string} key - the ID of the document to retrieve
1535
+ * @param {Object} [opts] - options
1536
+ * @returns {Promise<{_id: string}>} - the document with the specified ID
1537
+ * @memberof Fireproof
1538
+ * @instance
1286
1539
  */
1287
- async function getAll (blocks, head) {
1288
- // todo use the root node left around from put, etc
1289
- // move load to a central place
1290
- if (!head.length) {
1291
- return { clockCIDs: new utils.CIDCounter(), cids: new utils.CIDCounter(), result: [] }
1540
+ async get (key, opts = {}) {
1541
+ const clock = opts.clock || this.clock;
1542
+ const resp = await get(this.blocks, clock, charwise.encode(key));
1543
+
1544
+ // this tombstone is temporary until we can get the prolly tree to delete
1545
+ if (!resp || resp.result === null) {
1546
+ throw new Error('Not found')
1292
1547
  }
1293
- const { node: prollyRootNode, cids: clockCIDs } = await root(blocks, head);
1294
- if (!prollyRootNode) {
1295
- return { clockCIDs, cids: new utils.CIDCounter(), result: [] }
1548
+ const doc = resp.result;
1549
+ if (opts.mvcc === true) {
1550
+ doc._clock = this.clockToJSON();
1296
1551
  }
1297
- const { result, cids } = await prollyRootNode.getAllEntries(); // todo params
1298
- return { clockCIDs, cids, result: result.map(({ key, value }) => ({ key, value })) }
1552
+ doc._proof = {
1553
+ data: await cidsToProof(resp.cids),
1554
+ clock: this.clockToJSON()
1555
+ };
1556
+ doc._id = key;
1557
+ return doc
1299
1558
  }
1300
1559
 
1301
1560
  /**
1302
- * @param {import('./blockstore.js').BlockFetcher} blocks Bucket block storage.
1303
- * @param {import('./clock').EventLink<EventData>[]} head Merkle clock head.
1304
- * @param {string} key The key of the value to retrieve.
1561
+ * Adds a new document to the database, or updates an existing document. Returns the ID of the document and the new clock head.
1562
+ *
1563
+ * @param {Object} doc - the document to be added
1564
+ * @param {string} doc._id - the document ID. If not provided, a random ID will be generated.
1565
+ * @param {CID[]} doc._clock - the document ID. If not provided, a random ID will be generated.
1566
+ * @param {Proof} doc._proof - CIDs referenced by the update
1567
+ * @returns {Promise<{ id: string, clock: CID[] }>} - The result of adding the document to the database
1568
+ * @memberof Fireproof
1569
+ * @instance
1305
1570
  */
1306
- async function get (blocks, head, key) {
1307
- // instead pass root from db? and always update on change
1308
- if (!head.length) {
1309
- return { cids: new utils.CIDCounter(), result: null }
1310
- }
1311
- const { node: prollyRootNode, cids: clockCIDs } = await root(blocks, head);
1312
- if (!prollyRootNode) {
1313
- return { clockCIDs, cids: new utils.CIDCounter(), result: null }
1314
- }
1315
- const { result, cids } = await prollyRootNode.get(key);
1316
- return { result, cids, clockCIDs }
1571
+ async put ({ _id, _proof, ...doc }) {
1572
+ const id = _id || 'f' + Math.random().toString(36).slice(2);
1573
+ await this.runValidation({ _id: id, ...doc });
1574
+ return await this.putToProllyTree({ key: id, value: doc }, doc._clock)
1317
1575
  }
1318
1576
 
1319
- async function * vis (blocks, head) {
1320
- if (!head.length) {
1321
- return { cids: new utils.CIDCounter(), result: null }
1322
- }
1323
- const { node: prollyRootNode, cids } = await root(blocks, head);
1324
- const lines = [];
1325
- for await (const line of prollyRootNode.vis()) {
1326
- yield line;
1327
- lines.push(line);
1577
+ /**
1578
+ * Deletes a document from the database
1579
+ * @param {string | any} docOrId - the document ID
1580
+ * @returns {Promise<{ id: string, clock: CID[] }>} - The result of deleting the document from the database
1581
+ * @memberof Fireproof
1582
+ * @instance
1583
+ */
1584
+ async del (docOrId) {
1585
+ let id;
1586
+ let clock = null;
1587
+ if (docOrId._id) {
1588
+ id = docOrId._id;
1589
+ clock = docOrId._clock;
1590
+ } else {
1591
+ id = docOrId;
1328
1592
  }
1329
- return { vis: lines.join('\n'), cids }
1593
+ await this.runValidation({ _id: id, _deleted: true });
1594
+ return await this.putToProllyTree({ key: id, del: true }, clock) // not working at prolly tree layer?
1595
+ // this tombstone is temporary until we can get the prolly tree to delete
1596
+ // return await this.putToProllyTree({ key: id, value: null }, clock)
1330
1597
  }
1331
1598
 
1332
- async function visMerkleTree (blocks, head) {
1333
- if (!head.length) {
1334
- return { cids: new utils.CIDCounter(), result: null }
1599
+ /**
1600
+ * Updates the underlying storage with the specified event.
1601
+ * @private
1602
+ * @param {{del?: true, key : string, value?: any}} decodedEvent - the event to add
1603
+ * @returns {Promise<{ proof:{}, id: string, clock: CID[] }>} - The result of adding the event to storage
1604
+ */
1605
+ async putToProllyTree (decodedEvent, clock = null) {
1606
+ const event = encodeEvent(decodedEvent);
1607
+ if (clock && JSON.stringify(clock) !== JSON.stringify(this.clockToJSON())) {
1608
+ // we need to check and see what version of the document exists at the clock specified
1609
+ // if it is the same as the one we are trying to put, then we can proceed
1610
+ const resp = await eventsSince(this.blocks, this.clock, event.value._clock);
1611
+ const missedChange = resp.result.find(({ key }) => key === event.key);
1612
+ if (missedChange) {
1613
+ throw new Error('MVCC conflict, document is changed, please reload the document and try again.')
1614
+ }
1335
1615
  }
1336
- const { node: prollyRootNode, cids } = await root(blocks, head);
1337
- const lines = [];
1338
- for await (const line of prollyRootNode.vis()) {
1339
- lines.push(line);
1616
+ const result = await doTransaction(
1617
+ 'putToProllyTree',
1618
+ this.blocks,
1619
+ async (blocks) => await put(blocks, this.clock, event)
1620
+ );
1621
+ if (!result) {
1622
+ console.error('failed', event);
1623
+ throw new Error('failed to put at storage layer')
1624
+ }
1625
+ // console.log('new clock head', this.instanceId, result.head.toString())
1626
+ this.clock = result.head; // do we want to do this as a finally block
1627
+ await this.notifyListeners([decodedEvent]); // this type is odd
1628
+ return {
1629
+ id: decodedEvent.key,
1630
+ clock: this.clockToJSON(),
1631
+ proof: { data: await cidsToProof(result.cids), clock: await cidsToProof(result.clockCIDs) }
1340
1632
  }
1341
- return { vis: lines.join('\n'), cids }
1633
+ // todo should include additions (or split clock)
1342
1634
  }
1343
1635
 
1344
- async function visMerkleClock (blocks, head) {
1345
- const lines = [];
1346
- for await (const line of vis$1(blocks, head)) {
1347
- // yield line
1348
- lines.push(line);
1349
- }
1350
- return { vis: lines.join('\n') }
1636
+ // /**
1637
+ // * Advances the clock to the specified event and updates the root CID
1638
+ // * Will be used by replication
1639
+ // */
1640
+ // async advance (event) {
1641
+ // this.clock = await advance(this.blocks, this.clock, event)
1642
+ // this.rootCid = await root(this.blocks, this.clock)
1643
+ // return this.clock
1644
+ // }
1645
+
1646
+ async * vis () {
1647
+ return yield * vis(this.blocks, this.clock)
1351
1648
  }
1352
1649
 
1353
- // @ts-nocheck
1354
- // import { CID } from 'multiformats/dist/types/src/cid.js'
1355
-
1356
- // const sleep = ms => new Promise(resolve => setTimeout(resolve, ms))
1650
+ async visTree () {
1651
+ return await visMerkleTree(this.blocks, this.clock)
1652
+ }
1357
1653
 
1358
- // class Proof {}
1654
+ async visClock () {
1655
+ return await visMerkleClock(this.blocks, this.clock)
1656
+ }
1359
1657
 
1360
1658
  /**
1361
- * @class Fireproof
1362
- * @classdesc Fireproof stores data in IndexedDB and provides a Merkle clock.
1363
- * This is the main class for saving and loading JSON and other documents with the database. You can find additional examples and
1364
- * usage guides in the repository README.
1365
- *
1366
- * @param {import('./blockstore.js').TransactionBlockstore} blocks - The block storage instance to use documents and indexes
1367
- * @param {CID[]} clock - The Merkle clock head to use for the Fireproof instance.
1368
- * @param {object} [config] - Optional configuration options for the Fireproof instance.
1369
- * @param {object} [authCtx] - Optional authorization context object to use for any authentication checks.
1370
- *
1659
+ * Registers a Listener to be called when the Fireproof instance's clock is updated.
1660
+ * Recieves live changes from the database after they are committed.
1661
+ * @param {Function} listener - The listener to be called when the clock is updated.
1662
+ * @returns {Function} - A function that can be called to unregister the listener.
1663
+ * @memberof Fireproof
1371
1664
  */
1372
- class Fireproof {
1373
- listeners = new Set()
1374
-
1375
- /**
1376
- * @function storage
1377
- * @memberof Fireproof
1378
- * Creates a new Fireproof instance with default storage settings
1379
- * Most apps should use this and not worry about the details.
1380
- * @static
1381
- * @returns {Fireproof} - a new Fireproof instance
1382
- */
1383
- static storage = (name = 'global') => {
1384
- const instanceKey = crypto.randomBytes(32).toString('hex'); // pass null to disable encryption
1385
- // pick a random key from const validatedKeys
1386
- // const instanceKey = validatedKeys[Math.floor(Math.random() * validatedKeys.length)]
1387
- return new Fireproof(new TransactionBlockstore(name, instanceKey), [], { name })
1388
- }
1389
-
1390
- constructor (blocks, clock, config, authCtx = {}) {
1391
- this.name = config?.name || 'global';
1392
- this.instanceId = `fp.${this.name}.${Math.random().toString(36).substring(2, 7)}`;
1393
- this.blocks = blocks;
1394
- this.clock = clock;
1395
- this.config = config;
1396
- this.authCtx = authCtx;
1397
- this.indexes = new Map();
1398
- }
1399
-
1400
- /**
1401
- * Renders the Fireproof instance as a JSON object.
1402
- * @returns {Object} - The JSON representation of the Fireproof instance. Includes clock heads for the database and its indexes.
1403
- * @memberof Fireproof
1404
- * @instance
1405
- */
1406
- toJSON () {
1407
- // todo this also needs to return the index roots...
1408
- return {
1409
- clock: this.clockToJSON(),
1410
- name: this.name,
1411
- key: this.blocks.valet.getKeyMaterial(),
1412
- indexes: [...this.indexes.values()].map(index => index.toJSON())
1413
- }
1414
- }
1415
-
1416
- clockToJSON () {
1417
- return this.clock.map(cid => cid.toString())
1418
- }
1419
-
1420
- hydrate ({ clock, name, key }) {
1421
- this.name = name;
1422
- this.clock = clock;
1423
- this.blocks.valet.setKeyMaterial(key);
1424
- this.indexBlocks = null;
1425
- }
1426
-
1427
- /**
1428
- * Triggers a notification to all listeners
1429
- * of the Fireproof instance so they can repaint UI, etc.
1430
- * @param {CID[] } clock
1431
- * Clock to use for the snapshot.
1432
- * @returns {Promise<void>}
1433
- * @memberof Fireproof
1434
- * @instance
1435
- */
1436
- async notifyReset () {
1437
- await this.notifyListeners({ _reset: true, _clock: this.clockToJSON() });
1438
- }
1439
-
1440
- // used be indexes etc to notify database listeners of new availability
1441
- async notifyExternal (source = 'unknown') {
1442
- await this.notifyListeners({ _external: source, _clock: this.clockToJSON() });
1665
+ registerListener (listener) {
1666
+ this.listeners.add(listener);
1667
+ return () => {
1668
+ this.listeners.delete(listener);
1443
1669
  }
1670
+ }
1444
1671
 
1445
- /**
1446
- * Returns the changes made to the Fireproof instance since the specified event.
1447
- * @function changesSince
1448
- * @param {CID[]} [event] - The clock head to retrieve changes since. If null or undefined, retrieves all changes.
1449
- * @returns {Object<{rows : Object[], clock: CID[]}>} An object containing the rows and the head of the instance's clock.
1450
- * @memberof Fireproof
1451
- * @instance
1452
- */
1453
- async changesSince (event) {
1454
- // console.log('changesSince', this.instanceId, event, this.clock)
1455
- let rows, dataCIDs, clockCIDs;
1456
- // if (!event) event = []
1457
- if (event) {
1458
- const resp = await eventsSince(this.blocks, this.clock, event);
1459
- const docsMap = new Map();
1460
- for (const { key, type, value } of resp.result.map(decodeEvent)) {
1461
- if (type === 'del') {
1462
- docsMap.set(key, { key, del: true });
1463
- } else {
1464
- docsMap.set(key, { key, value });
1465
- }
1466
- }
1467
- rows = Array.from(docsMap.values());
1468
- clockCIDs = resp.cids;
1469
- // console.log('change rows', this.instanceId, rows)
1470
- } else {
1471
- const allResp = await getAll(this.blocks, this.clock);
1472
- rows = allResp.result.map(({ key, value }) => (decodeEvent({ key, value })));
1473
- dataCIDs = allResp.cids;
1474
- // console.log('dbdoc rows', this.instanceId, rows)
1475
- }
1476
- return {
1477
- rows,
1478
- clock: this.clockToJSON(),
1479
- proof: { data: await cidsToProof(dataCIDs), clock: await cidsToProof(clockCIDs) }
1480
- }
1672
+ async notifyListeners (changes) {
1673
+ // await sleep(10)
1674
+ for (const listener of this.listeners) {
1675
+ await listener(changes);
1481
1676
  }
1677
+ }
1482
1678
 
1483
- async allDocuments () {
1484
- const allResp = await getAll(this.blocks, this.clock);
1485
- const rows = allResp.result.map(({ key, value }) => (decodeEvent({ key, value }))).map(({ key, value }) => ({ key, value: { _id: key, ...value } }));
1486
- return {
1487
- rows,
1488
- clock: this.clockToJSON(),
1489
- proof: await cidsToProof(allResp.cids)
1490
- }
1491
- }
1679
+ setCarUploader (carUploaderFn) {
1680
+ // console.log('registering car uploader')
1681
+ // https://en.wikipedia.org/wiki/Law_of_Demeter - this is a violation of the law of demeter
1682
+ this.blocks.valet.uploadFunction = carUploaderFn;
1683
+ }
1492
1684
 
1685
+ setRemoteBlockReader (remoteBlockReaderFn) {
1686
+ // console.log('registering remote block reader')
1687
+ this.blocks.valet.remoteBlockFunction = remoteBlockReaderFn;
1688
+ }
1689
+ }
1690
+
1691
+ async function cidsToProof (cids) {
1692
+ if (!cids || !cids.all) return []
1693
+ const all = await cids.all();
1694
+ return [...all].map((cid) => cid.toString())
1695
+ }
1696
+
1697
+ function decodeEvent (event) {
1698
+ const decodedKey = charwise.decode(event.key);
1699
+ return { ...event, key: decodedKey }
1700
+ }
1701
+
1702
+ function encodeEvent (event) {
1703
+ if (!(event && event.key)) return
1704
+ const encodedKey = charwise.encode(event.key);
1705
+ return { ...event, key: encodedKey }
1706
+ }
1707
+
1708
+ // @ts-nocheck
1709
+
1710
+ const compare = (a, b) => {
1711
+ const [aKey, aRef] = a;
1712
+ const [bKey, bRef] = b;
1713
+ const comp = utils.simpleCompare(aKey, bKey);
1714
+ if (comp !== 0) return comp
1715
+ return refCompare(aRef, bRef)
1716
+ };
1717
+
1718
+ const refCompare = (aRef, bRef) => {
1719
+ if (Number.isNaN(aRef)) return -1
1720
+ if (Number.isNaN(bRef)) throw new Error('ref may not be Infinity or NaN')
1721
+ if (aRef === Infinity) return 1 // need to test this on equal docids!
1722
+ // if (!Number.isFinite(bRef)) throw new Error('ref may not be Infinity or NaN')
1723
+ return utils.simpleCompare(aRef, bRef)
1724
+ };
1725
+
1726
+ const dbIndexOpts = { cache: cache.nocache, chunker: utils.bf(3), codec: dagcbor__namespace, hasher: sha2.sha256, compare };
1727
+ const idIndexOpts = { cache: cache.nocache, chunker: utils.bf(3), codec: dagcbor__namespace, hasher: sha2.sha256, compare: utils.simpleCompare };
1728
+
1729
+ const makeDoc = ({ key, value }) => ({ _id: key, ...value });
1730
+
1731
+ /**
1732
+ * JDoc for the result row type.
1733
+ * @typedef {Object} ChangeEvent
1734
+ * @property {string} key - The key of the document.
1735
+ * @property {Object} value - The new value of the document.
1736
+ * @property {boolean} [del] - Is the row deleted?
1737
+ * @memberof DbIndex
1738
+ */
1739
+
1740
+ /**
1741
+ * JDoc for the result row type.
1742
+ * @typedef {Object} DbIndexEntry
1743
+ * @property {string[]} key - The key for the DbIndex entry.
1744
+ * @property {Object} value - The value of the document.
1745
+ * @property {boolean} [del] - Is the row deleted?
1746
+ * @memberof DbIndex
1747
+ */
1748
+
1749
+ /**
1750
+ * Transforms a set of changes to DbIndex entries using a map function.
1751
+ *
1752
+ * @param {ChangeEvent[]} changes
1753
+ * @param {Function} mapFn
1754
+ * @returns {DbIndexEntry[]} The DbIndex entries generated by the map function.
1755
+ * @private
1756
+ * @memberof DbIndex
1757
+ */
1758
+ const indexEntriesForChanges = (changes, mapFn) => {
1759
+ const indexEntries = [];
1760
+ changes.forEach(({ key, value, del }) => {
1761
+ if (del || !value) return
1762
+ mapFn(makeDoc({ key, value }), (k, v) => {
1763
+ if (typeof v === 'undefined' || typeof k === 'undefined') return
1764
+ indexEntries.push({
1765
+ key: [charwise.encode(k), key],
1766
+ value: v
1767
+ });
1768
+ });
1769
+ });
1770
+ return indexEntries
1771
+ };
1772
+
1773
+ /**
1774
+ * Represents an DbIndex for a Fireproof database.
1775
+ *
1776
+ * @class DbIndex
1777
+ * @classdesc An DbIndex can be used to order and filter the documents in a Fireproof database.
1778
+ *
1779
+ * @param {Fireproof} database - The Fireproof database instance to DbIndex.
1780
+ * @param {Function} mapFn - The map function to apply to each entry in the database.
1781
+ *
1782
+ */
1783
+ class DbIndex {
1784
+ constructor (database, mapFn, clock, opts = {}) {
1785
+ // console.log('DbIndex constructor', database.constructor.name, typeof mapFn, clock)
1493
1786
  /**
1494
- * Runs validation on the specified document using the Fireproof instance's configuration. Throws an error if the document is invalid.
1495
- *
1496
- * @param {Object} doc - The document to validate.
1497
- * @returns {Promise<void>}
1498
- * @throws {Error} - Throws an error if the document is invalid.
1499
- * @memberof Fireproof
1500
- * @instance
1787
+ * The database instance to DbIndex.
1788
+ * @type {Fireproof}
1501
1789
  */
1502
- async runValidation (doc) {
1503
- if (this.config && this.config.validateChange) {
1504
- const oldDoc = await this.get(doc._id)
1505
- .then((doc) => doc)
1506
- .catch(() => ({}));
1507
- this.config.validateChange(doc, oldDoc, this.authCtx);
1508
- }
1790
+ this.database = database;
1791
+ if (!database.indexBlocks) {
1792
+ database.indexBlocks = new TransactionBlockstore(database.name + '.indexes', database.blocks.valet.getKeyMaterial());
1509
1793
  }
1510
-
1511
1794
  /**
1512
- * Retrieves the document with the specified ID from the database
1513
- *
1514
- * @param {string} key - the ID of the document to retrieve
1515
- * @param {Object} [opts] - options
1516
- * @returns {Promise<{_id: string}>} - the document with the specified ID
1517
- * @memberof Fireproof
1518
- * @instance
1795
+ * The map function to apply to each entry in the database.
1796
+ * @type {Function}
1519
1797
  */
1520
- async get (key, opts = {}) {
1521
- const clock = opts.clock || this.clock;
1522
- const resp = await get(this.blocks, clock, charwise.encode(key));
1523
1798
 
1524
- // this tombstone is temporary until we can get the prolly tree to delete
1525
- if (!resp || resp.result === null) {
1526
- throw new Error('Not found')
1527
- }
1528
- const doc = resp.result;
1529
- if (opts.mvcc === true) {
1530
- doc._clock = this.clockToJSON();
1531
- }
1532
- doc._proof = {
1533
- data: await cidsToProof(resp.cids),
1534
- clock: this.clockToJSON()
1535
- };
1536
- doc._id = key;
1537
- return doc
1538
- }
1799
+ if (typeof mapFn === 'string') {
1800
+ this.mapFnString = mapFn;
1801
+ } else {
1802
+ this.mapFn = mapFn;
1803
+ this.mapFnString = mapFn.toString();
1804
+ }
1805
+ this.name = opts.name || this.makeName();
1806
+ this.indexById = { root: null, cid: null };
1807
+ this.indexByKey = { root: null, cid: null };
1808
+ this.dbHead = null;
1809
+ if (clock) {
1810
+ this.indexById.cid = clock.byId;
1811
+ this.indexByKey.cid = clock.byKey;
1812
+ this.dbHead = clock.db;
1813
+ }
1814
+ this.instanceId = this.database.instanceId + `.DbIndex.${Math.random().toString(36).substring(2, 7)}`;
1815
+ this.updateIndexPromise = null;
1816
+ if (!opts.temporary) { DbIndex.registerWithDatabase(this, this.database); }
1817
+ }
1539
1818
 
1540
- /**
1541
- * Adds a new document to the database, or updates an existing document. Returns the ID of the document and the new clock head.
1542
- *
1543
- * @param {Object} doc - the document to be added
1544
- * @param {string} doc._id - the document ID. If not provided, a random ID will be generated.
1545
- * @param {CID[]} doc._clock - the document ID. If not provided, a random ID will be generated.
1546
- * @param {Proof} doc._proof - CIDs referenced by the update
1547
- * @returns {Promise<{ id: string, clock: CID[] }>} - The result of adding the document to the database
1548
- * @memberof Fireproof
1549
- * @instance
1550
- */
1551
- async put ({ _id, _proof, ...doc }) {
1552
- const id = _id || 'f' + Math.random().toString(36).slice(2);
1553
- await this.runValidation({ _id: id, ...doc });
1554
- return await this.putToProllyTree({ key: id, value: doc }, doc._clock)
1555
- }
1819
+ makeName () {
1820
+ const regex = /\(([^,()]+,\s*[^,()]+|\[[^\]]+\],\s*[^,()]+)\)/g;
1821
+ const matches = Array.from(this.mapFnString.matchAll(regex), match => match[1].trim());
1822
+ return matches[1]
1823
+ }
1556
1824
 
1557
- /**
1558
- * Deletes a document from the database
1559
- * @param {string | any} docOrId - the document ID
1560
- * @returns {Promise<{ id: string, clock: CID[] }>} - The result of deleting the document from the database
1561
- * @memberof Fireproof
1562
- * @instance
1563
- */
1564
- async del (docOrId) {
1565
- let id;
1566
- let clock = null;
1567
- if (docOrId._id) {
1568
- id = docOrId._id;
1569
- clock = docOrId._clock;
1825
+ static registerWithDatabase (inIndex, database) {
1826
+ if (!database.indexes.has(inIndex.mapFnString)) {
1827
+ database.indexes.set(inIndex.mapFnString, inIndex);
1828
+ } else {
1829
+ // merge our inIndex code with the inIndex clock or vice versa
1830
+ const existingIndex = database.indexes.get(inIndex.mapFnString);
1831
+ // keep the code instance, discard the clock instance
1832
+ if (existingIndex.mapFn) { // this one also has other config
1833
+ existingIndex.dbHead = inIndex.dbHead;
1834
+ existingIndex.indexById.cid = inIndex.indexById.cid;
1835
+ existingIndex.indexByKey.cid = inIndex.indexByKey.cid;
1570
1836
  } else {
1571
- id = docOrId;
1572
- }
1573
- await this.runValidation({ _id: id, _deleted: true });
1574
- return await this.putToProllyTree({ key: id, del: true }, clock) // not working at prolly tree layer?
1575
- // this tombstone is temporary until we can get the prolly tree to delete
1576
- // return await this.putToProllyTree({ key: id, value: null }, clock)
1577
- }
1578
-
1579
- /**
1580
- * Updates the underlying storage with the specified event.
1581
- * @private
1582
- * @param {{del?: true, key : string, value?: any}} decodedEvent - the event to add
1583
- * @returns {Promise<{ proof:{}, id: string, clock: CID[] }>} - The result of adding the event to storage
1584
- */
1585
- async putToProllyTree (decodedEvent, clock = null) {
1586
- const event = encodeEvent(decodedEvent);
1587
- if (clock && JSON.stringify(clock) !== JSON.stringify(this.clockToJSON())) {
1588
- // we need to check and see what version of the document exists at the clock specified
1589
- // if it is the same as the one we are trying to put, then we can proceed
1590
- const resp = await eventsSince(this.blocks, this.clock, event.value._clock);
1591
- const missedChange = resp.result.find(({ key }) => key === event.key);
1592
- if (missedChange) {
1593
- throw new Error('MVCC conflict, document is changed, please reload the document and try again.')
1594
- }
1595
- }
1596
- const result = await doTransaction(
1597
- 'putToProllyTree',
1598
- this.blocks,
1599
- async (blocks) => await put(blocks, this.clock, event)
1600
- );
1601
- if (!result) {
1602
- console.error('failed', event);
1603
- throw new Error('failed to put at storage layer')
1604
- }
1605
- // console.log('new clock head', this.instanceId, result.head.toString())
1606
- this.clock = result.head; // do we want to do this as a finally block
1607
- await this.notifyListeners([decodedEvent]); // this type is odd
1608
- return {
1609
- id: decodedEvent.key,
1610
- clock: this.clockToJSON(),
1611
- proof: { data: await cidsToProof(result.cids), clock: await cidsToProof(result.clockCIDs) }
1612
- }
1613
- // todo should include additions (or split clock)
1614
- }
1615
-
1616
- // /**
1617
- // * Advances the clock to the specified event and updates the root CID
1618
- // * Will be used by replication
1619
- // */
1620
- // async advance (event) {
1621
- // this.clock = await advance(this.blocks, this.clock, event)
1622
- // this.rootCid = await root(this.blocks, this.clock)
1623
- // return this.clock
1624
- // }
1625
-
1626
- async * vis () {
1627
- return yield * vis(this.blocks, this.clock)
1628
- }
1629
-
1630
- async visTree () {
1631
- return await visMerkleTree(this.blocks, this.clock)
1632
- }
1633
-
1634
- async visClock () {
1635
- return await visMerkleClock(this.blocks, this.clock)
1636
- }
1637
-
1638
- /**
1639
- * Registers a Listener to be called when the Fireproof instance's clock is updated.
1640
- * Recieves live changes from the database after they are committed.
1641
- * @param {Function} listener - The listener to be called when the clock is updated.
1642
- * @returns {Function} - A function that can be called to unregister the listener.
1643
- * @memberof Fireproof
1644
- */
1645
- registerListener (listener) {
1646
- this.listeners.add(listener);
1647
- return () => {
1648
- this.listeners.delete(listener);
1649
- }
1650
- }
1651
-
1652
- async notifyListeners (changes) {
1653
- // await sleep(10)
1654
- for (const listener of this.listeners) {
1655
- await listener(changes);
1837
+ inIndex.dbHead = existingIndex.dbHead;
1838
+ inIndex.indexById.cid = existingIndex.indexById.cid;
1839
+ inIndex.indexByKey.cid = existingIndex.indexByKey.cid;
1840
+ database.indexes.set(inIndex.mapFnString, inIndex);
1656
1841
  }
1657
1842
  }
1658
-
1659
- setCarUploader (carUploaderFn) {
1660
- // console.log('registering car uploader')
1661
- // https://en.wikipedia.org/wiki/Law_of_Demeter - this is a violation of the law of demeter
1662
- this.blocks.valet.uploadFunction = carUploaderFn;
1663
- }
1664
-
1665
- setRemoteBlockReader (remoteBlockReaderFn) {
1666
- // console.log('registering remote block reader')
1667
- this.blocks.valet.remoteBlockFunction = remoteBlockReaderFn;
1668
- }
1669
- }
1670
-
1671
- async function cidsToProof (cids) {
1672
- if (!cids || !cids.all) return []
1673
- const all = await cids.all();
1674
- return [...all].map((cid) => cid.toString())
1675
1843
  }
1676
1844
 
1677
- function decodeEvent (event) {
1678
- const decodedKey = charwise.decode(event.key);
1679
- return { ...event, key: decodedKey }
1845
+ toJSON () {
1846
+ const indexJson = { name: this.name, code: this.mapFnString, clock: { db: null, byId: null, byKey: null } };
1847
+ indexJson.clock.db = this.dbHead?.map(cid => cid.toString());
1848
+ indexJson.clock.byId = this.indexById.cid?.toString();
1849
+ indexJson.clock.byKey = this.indexByKey.cid?.toString();
1850
+ return indexJson
1680
1851
  }
1681
1852
 
1682
- function encodeEvent (event) {
1683
- if (!(event && event.key)) return
1684
- const encodedKey = charwise.encode(event.key);
1685
- return { ...event, key: encodedKey }
1853
+ static fromJSON (database, { code, clock, name }) {
1854
+ // console.log('DbIndex.fromJSON', database.constructor.name, code, clock)
1855
+ return new DbIndex(database, code, clock, { name })
1686
1856
  }
1687
1857
 
1688
- // @ts-nocheck
1689
-
1690
- const compare = (a, b) => {
1691
- const [aKey, aRef] = a;
1692
- const [bKey, bRef] = b;
1693
- const comp = utils.simpleCompare(aKey, bKey);
1694
- if (comp !== 0) return comp
1695
- return refCompare(aRef, bRef)
1696
- };
1697
-
1698
- const refCompare = (aRef, bRef) => {
1699
- if (Number.isNaN(aRef)) return -1
1700
- if (Number.isNaN(bRef)) throw new Error('ref may not be Infinity or NaN')
1701
- if (aRef === Infinity) return 1 // need to test this on equal docids!
1702
- // if (!Number.isFinite(bRef)) throw new Error('ref may not be Infinity or NaN')
1703
- return utils.simpleCompare(aRef, bRef)
1704
- };
1705
-
1706
- const dbIndexOpts = { cache: cache.nocache, chunker: utils.bf(3), codec: dagcbor__namespace, hasher: sha2.sha256, compare };
1707
- const idIndexOpts = { cache: cache.nocache, chunker: utils.bf(3), codec: dagcbor__namespace, hasher: sha2.sha256, compare: utils.simpleCompare };
1708
-
1709
- const makeDoc = ({ key, value }) => ({ _id: key, ...value });
1710
-
1711
1858
  /**
1712
- * JDoc for the result row type.
1713
- * @typedef {Object} ChangeEvent
1714
- * @property {string} key - The key of the document.
1715
- * @property {Object} value - The new value of the document.
1716
- * @property {boolean} [del] - Is the row deleted?
1859
+ * JSDoc for Query type.
1860
+ * @typedef {Object} DbQuery
1861
+ * @property {string[]} [range] - The range to query.
1717
1862
  * @memberof DbIndex
1718
1863
  */
1719
1864
 
1720
1865
  /**
1721
- * JDoc for the result row type.
1722
- * @typedef {Object} DbIndexEntry
1723
- * @property {string[]} key - The key for the DbIndex entry.
1724
- * @property {Object} value - The value of the document.
1725
- * @property {boolean} [del] - Is the row deleted?
1866
+ * Query object can have {range}
1867
+ * @param {DbQuery} query - the query range to use
1868
+ * @returns {Promise<{proof: {}, rows: Array<{id: string, key: string, value: any}>}>}
1726
1869
  * @memberof DbIndex
1870
+ * @instance
1727
1871
  */
1872
+ async query (query, update = true) {
1873
+ // const callId = Math.random().toString(36).substring(2, 7)
1874
+ // todo pass a root to query a snapshot
1875
+ // console.time(callId + '.updateIndex')
1876
+ update && await this.updateIndex(this.database.indexBlocks);
1877
+ // console.timeEnd(callId + '.updateIndex')
1878
+ // console.time(callId + '.doIndexQuery')
1879
+ // console.log('query', query)
1880
+ const response = await doIndexQuery(this.database.indexBlocks, this.indexByKey, query);
1881
+ // console.timeEnd(callId + '.doIndexQuery')
1882
+ return {
1883
+ proof: { index: await cidsToProof(response.cids) },
1884
+ rows: response.result.map(({ id, key, row }) => {
1885
+ return ({ id, key: charwise.decode(key), value: row })
1886
+ })
1887
+ }
1888
+ }
1728
1889
 
1729
1890
  /**
1730
- * Transforms a set of changes to DbIndex entries using a map function.
1731
- *
1732
- * @param {ChangeEvent[]} changes
1733
- * @param {Function} mapFn
1734
- * @returns {DbIndexEntry[]} The DbIndex entries generated by the map function.
1891
+ * Update the DbIndex with the latest changes
1735
1892
  * @private
1736
- * @memberof DbIndex
1893
+ * @returns {Promise<void>}
1737
1894
  */
1738
- const indexEntriesForChanges = (changes, mapFn) => {
1739
- const indexEntries = [];
1740
- changes.forEach(({ key, value, del }) => {
1741
- if (del || !value) return
1742
- mapFn(makeDoc({ key, value }), (k, v) => {
1743
- if (typeof v === 'undefined' || typeof k === 'undefined') return
1744
- indexEntries.push({
1745
- key: [charwise.encode(k), key],
1746
- value: v
1747
- });
1748
- });
1749
- });
1750
- return indexEntries
1751
- };
1752
1895
 
1753
- /**
1754
- * Represents an DbIndex for a Fireproof database.
1755
- *
1756
- * @class DbIndex
1757
- * @classdesc An DbIndex can be used to order and filter the documents in a Fireproof database.
1758
- *
1759
- * @param {Fireproof} database - The Fireproof database instance to DbIndex.
1760
- * @param {Function} mapFn - The map function to apply to each entry in the database.
1761
- *
1762
- */
1763
- class DbIndex {
1764
- constructor (database, mapFn, clock, opts = {}) {
1765
- // console.log('DbIndex constructor', database.constructor.name, typeof mapFn, clock)
1766
- /**
1767
- * The database instance to DbIndex.
1768
- * @type {Fireproof}
1769
- */
1770
- this.database = database;
1771
- if (!database.indexBlocks) {
1772
- database.indexBlocks = new TransactionBlockstore(database.name + '.indexes', database.blocks.valet.getKeyMaterial());
1773
- }
1774
- /**
1775
- * The map function to apply to each entry in the database.
1776
- * @type {Function}
1777
- */
1896
+ async updateIndex (blocks) {
1897
+ // todo this could enqueue the request and give fresh ones to all second comers -- right now it gives out stale promises while working
1898
+ // what would it do in a world where all indexes provide a database snapshot to query?
1899
+ if (this.updateIndexPromise) return this.updateIndexPromise
1900
+ this.updateIndexPromise = this.innerUpdateIndex(blocks);
1901
+ this.updateIndexPromise.finally(() => { this.updateIndexPromise = null; });
1902
+ return this.updateIndexPromise
1903
+ }
1778
1904
 
1779
- if (typeof mapFn === 'string') {
1780
- this.mapFnString = mapFn;
1781
- } else {
1782
- this.mapFn = mapFn;
1783
- this.mapFnString = mapFn.toString();
1905
+ async innerUpdateIndex (inBlocks) {
1906
+ // console.log('dbHead', this.dbHead)
1907
+ // console.time(callTag + '.changesSince')
1908
+ const result = await this.database.changesSince(this.dbHead); // {key, value, del}
1909
+ // console.timeEnd(callTag + '.changesSince')
1910
+ // console.log('result.rows.length', result.rows.length)
1911
+
1912
+ // console.time(callTag + '.doTransactionupdateIndex')
1913
+ // console.log('updateIndex changes length', result.rows.length)
1914
+
1915
+ if (result.rows.length === 0) {
1916
+ // console.log('updateIndex < no changes', result.clock)
1917
+ this.dbHead = result.clock;
1918
+ return
1919
+ }
1920
+ await doTransaction('updateIndex', inBlocks, async (blocks) => {
1921
+ let oldIndexEntries = [];
1922
+ let removeByIdIndexEntries = [];
1923
+ await loadIndex(blocks, this.indexById, idIndexOpts);
1924
+ await loadIndex(blocks, this.indexByKey, dbIndexOpts);
1925
+ if (this.dbHead) {
1926
+ const oldChangeEntries = await this.indexById.root.getMany(result.rows.map(({ key }) => key));
1927
+ oldIndexEntries = oldChangeEntries.result.map((key) => ({ key, del: true }));
1928
+ removeByIdIndexEntries = oldIndexEntries.map(({ key }) => ({ key: key[1], del: true }));
1784
1929
  }
1785
- this.name = opts.name || this.makeName();
1786
- this.indexById = { root: null, cid: null };
1787
- this.indexByKey = { root: null, cid: null };
1788
- this.dbHead = null;
1789
- if (clock) {
1790
- this.indexById.cid = clock.byId;
1791
- this.indexByKey.cid = clock.byKey;
1792
- this.dbHead = clock.db;
1930
+ if (!this.mapFn) {
1931
+ throw new Error('No live map function installed for index, cannot update. Make sure your index definition runs before any queries.' + (this.mapFnString ? ' Your code should match the stored map function source:\n' + this.mapFnString : ''))
1793
1932
  }
1794
- this.instanceId = this.database.instanceId + `.DbIndex.${Math.random().toString(36).substring(2, 7)}`;
1795
- this.updateIndexPromise = null;
1796
- if (!opts.temporary) { DbIndex.registerWithDatabase(this, this.database); }
1797
- }
1798
-
1799
- makeName () {
1800
- const regex = /\(([^,()]+,\s*[^,()]+|\[[^\]]+\],\s*[^,()]+)\)/g;
1801
- const matches = Array.from(this.mapFnString.matchAll(regex), match => match[1].trim());
1802
- return matches[1]
1803
- }
1804
-
1805
- static registerWithDatabase (inIndex, database) {
1806
- if (!database.indexes.has(inIndex.mapFnString)) {
1807
- database.indexes.set(inIndex.mapFnString, inIndex);
1808
- } else {
1809
- // merge our inIndex code with the inIndex clock or vice versa
1810
- const existingIndex = database.indexes.get(inIndex.mapFnString);
1811
- // keep the code instance, discard the clock instance
1812
- if (existingIndex.mapFn) { // this one also has other config
1813
- existingIndex.dbHead = inIndex.dbHead;
1814
- existingIndex.indexById.cid = inIndex.indexById.cid;
1815
- existingIndex.indexByKey.cid = inIndex.indexByKey.cid;
1816
- } else {
1817
- inIndex.dbHead = existingIndex.dbHead;
1818
- inIndex.indexById.cid = existingIndex.indexById.cid;
1819
- inIndex.indexByKey.cid = existingIndex.indexByKey.cid;
1820
- database.indexes.set(inIndex.mapFnString, inIndex);
1821
- }
1933
+ const indexEntries = indexEntriesForChanges(result.rows, this.mapFn);
1934
+ const byIdIndexEntries = indexEntries.map(({ key }) => ({ key: key[1], value: key }));
1935
+ this.indexById = await bulkIndex(blocks, this.indexById, removeByIdIndexEntries.concat(byIdIndexEntries), idIndexOpts);
1936
+ this.indexByKey = await bulkIndex(blocks, this.indexByKey, oldIndexEntries.concat(indexEntries), dbIndexOpts);
1937
+ this.dbHead = result.clock;
1938
+ });
1939
+ this.database.notifyExternal('dbIndex');
1940
+ // console.timeEnd(callTag + '.doTransactionupdateIndex')
1941
+ // console.log(`updateIndex ${callTag} <`, this.instanceId, this.dbHead?.toString(), this.indexByKey.cid?.toString(), this.indexById.cid?.toString())
1942
+ }
1943
+ }
1944
+
1945
+ /**
1946
+ * Update the DbIndex with the given entries
1947
+ * @param {import('./blockstore.js').Blockstore} blocks
1948
+ * @param {{root, cid}} inIndex
1949
+ * @param {DbIndexEntry[]} indexEntries
1950
+ * @private
1951
+ */
1952
+ async function bulkIndex (blocks, inIndex, indexEntries, opts) {
1953
+ if (!indexEntries.length) return inIndex
1954
+ const putBlock = blocks.put.bind(blocks);
1955
+ const { getBlock } = makeGetBlock(blocks);
1956
+ let returnRootBlock;
1957
+ let returnNode;
1958
+ if (!inIndex.root) {
1959
+ const cid = inIndex.cid;
1960
+ if (!cid) {
1961
+ for await (const node of await dbIndex.create({ get: getBlock, list: indexEntries, ...opts })) {
1962
+ const block = await node.block;
1963
+ await putBlock(block.cid, block.bytes);
1964
+ returnRootBlock = block;
1965
+ returnNode = node;
1822
1966
  }
1967
+ return { root: returnNode, cid: returnRootBlock.cid }
1823
1968
  }
1824
-
1825
- toJSON () {
1826
- const indexJson = { name: this.name, code: this.mapFnString, clock: { db: null, byId: null, byKey: null } };
1827
- indexJson.clock.db = this.dbHead?.map(cid => cid.toString());
1828
- indexJson.clock.byId = this.indexById.cid?.toString();
1829
- indexJson.clock.byKey = this.indexByKey.cid?.toString();
1830
- return indexJson
1831
- }
1832
-
1833
- static fromJSON (database, { code, clock, name }) {
1834
- // console.log('DbIndex.fromJSON', database.constructor.name, code, clock)
1835
- return new DbIndex(database, code, clock, { name })
1836
- }
1837
-
1838
- /**
1839
- * JSDoc for Query type.
1840
- * @typedef {Object} DbQuery
1841
- * @property {string[]} [range] - The range to query.
1842
- * @memberof DbIndex
1843
- */
1844
-
1845
- /**
1846
- * Query object can have {range}
1847
- * @param {DbQuery} query - the query range to use
1848
- * @returns {Promise<{proof: {}, rows: Array<{id: string, key: string, value: any}>}>}
1849
- * @memberof DbIndex
1850
- * @instance
1969
+ inIndex.root = await dbIndex.load({ cid, get: getBlock, ...dbIndexOpts });
1970
+ }
1971
+ const { root, blocks: newBlocks } = await inIndex.root.bulk(indexEntries);
1972
+ returnRootBlock = await root.block;
1973
+ returnNode = root;
1974
+ for await (const block of newBlocks) {
1975
+ await putBlock(block.cid, block.bytes);
1976
+ }
1977
+ await putBlock(returnRootBlock.cid, returnRootBlock.bytes);
1978
+ return { root: returnNode, cid: returnRootBlock.cid }
1979
+ }
1980
+
1981
+ async function loadIndex (blocks, index, indexOpts) {
1982
+ if (!index.root) {
1983
+ const cid = index.cid;
1984
+ if (!cid) return
1985
+ const { getBlock } = makeGetBlock(blocks);
1986
+ index.root = await dbIndex.load({ cid, get: getBlock, ...indexOpts });
1987
+ }
1988
+ return index.root
1989
+ }
1990
+
1991
+ async function applyLimit (results, limit) {
1992
+ results.result = results.result.slice(0, limit);
1993
+ return results
1994
+ }
1995
+
1996
+ async function doIndexQuery (blocks, indexByKey, query = {}) {
1997
+ await loadIndex(blocks, indexByKey, dbIndexOpts);
1998
+ if (!indexByKey.root) return { result: [] }
1999
+ if (query.range) {
2000
+ const encodedRange = query.range.map((key) => charwise.encode(key));
2001
+ return applyLimit(await indexByKey.root.range(...encodedRange), query.limit)
2002
+ } else if (query.key) {
2003
+ const encodedKey = charwise.encode(query.key);
2004
+ return indexByKey.root.get(encodedKey)
2005
+ } else {
2006
+ const { result, ...all } = await indexByKey.root.getAllEntries();
2007
+ return applyLimit({ result: result.map(({ key: [k, id], value }) => ({ key: k, id, row: value })), ...all }, query.limit)
2008
+ }
2009
+ }
2010
+
2011
+ // @ts-nocheck
2012
+ /**
2013
+ * A Fireproof database Listener allows you to react to events in the database.
2014
+ *
2015
+ * @class Listener
2016
+ * @classdesc An listener attaches to a Fireproof database and runs a routing function on each change, sending the results to subscribers.
2017
+ *
2018
+ * @param {Fireproof} database - The Fireproof database instance to index.
2019
+ * @param {Function} routingFn - The routing function to apply to each entry in the database.
2020
+ */
2021
+ // import { ChangeEvent } from './db-index'
2022
+
2023
+ class Listener {
2024
+ subcribers = new Map()
2025
+ doStopListening = null
2026
+
2027
+ constructor (database, routingFn) {
2028
+ /** routingFn
2029
+ * The database instance to index.
2030
+ * @type {Fireproof}
1851
2031
  */
1852
- async query (query, update = true) {
1853
- // const callId = Math.random().toString(36).substring(2, 7)
1854
- // todo pass a root to query a snapshot
1855
- // console.time(callId + '.updateIndex')
1856
- update && await this.updateIndex(this.database.indexBlocks);
1857
- // console.timeEnd(callId + '.updateIndex')
1858
- // console.time(callId + '.doIndexQuery')
1859
- // console.log('query', query)
1860
- const response = await doIndexQuery(this.database.indexBlocks, this.indexByKey, query);
1861
- // console.timeEnd(callId + '.doIndexQuery')
1862
- return {
1863
- proof: { index: await cidsToProof(response.cids) },
1864
- rows: response.result.map(({ id, key, row }) => {
1865
- return ({ id, key: charwise.decode(key), value: row })
1866
- })
1867
- }
1868
- }
1869
-
2032
+ this.database = database;
2033
+ this.doStopListening = database.registerListener(changes => this.onChanges(changes));
1870
2034
  /**
1871
- * Update the DbIndex with the latest changes
1872
- * @private
1873
- * @returns {Promise<void>}
2035
+ * The map function to apply to each entry in the database.
2036
+ * @type {Function}
1874
2037
  */
1875
-
1876
- async updateIndex (blocks) {
1877
- // todo this could enqueue the request and give fresh ones to all second comers -- right now it gives out stale promises while working
1878
- // what would it do in a world where all indexes provide a database snapshot to query?
1879
- if (this.updateIndexPromise) return this.updateIndexPromise
1880
- this.updateIndexPromise = this.innerUpdateIndex(blocks);
1881
- this.updateIndexPromise.finally(() => { this.updateIndexPromise = null; });
1882
- return this.updateIndexPromise
1883
- }
1884
-
1885
- async innerUpdateIndex (inBlocks) {
1886
- // console.log('dbHead', this.dbHead)
1887
- // console.time(callTag + '.changesSince')
1888
- const result = await this.database.changesSince(this.dbHead); // {key, value, del}
1889
- // console.timeEnd(callTag + '.changesSince')
1890
- // console.log('result.rows.length', result.rows.length)
1891
-
1892
- // console.time(callTag + '.doTransactionupdateIndex')
1893
- // console.log('updateIndex changes length', result.rows.length)
1894
-
1895
- if (result.rows.length === 0) {
1896
- // console.log('updateIndex < no changes', result.clock)
1897
- this.dbHead = result.clock;
1898
- return
1899
- }
1900
- await doTransaction('updateIndex', inBlocks, async (blocks) => {
1901
- let oldIndexEntries = [];
1902
- let removeByIdIndexEntries = [];
1903
- await loadIndex(blocks, this.indexById, idIndexOpts);
1904
- await loadIndex(blocks, this.indexByKey, dbIndexOpts);
1905
- if (this.dbHead) {
1906
- const oldChangeEntries = await this.indexById.root.getMany(result.rows.map(({ key }) => key));
1907
- oldIndexEntries = oldChangeEntries.result.map((key) => ({ key, del: true }));
1908
- removeByIdIndexEntries = oldIndexEntries.map(({ key }) => ({ key: key[1], del: true }));
1909
- }
1910
- if (!this.mapFn) {
1911
- throw new Error('No live map function installed for index, cannot update. Make sure your index definition runs before any queries.' + (this.mapFnString ? ' Your code should match the stored map function source:\n' + this.mapFnString : ''))
1912
- }
1913
- const indexEntries = indexEntriesForChanges(result.rows, this.mapFn);
1914
- const byIdIndexEntries = indexEntries.map(({ key }) => ({ key: key[1], value: key }));
1915
- this.indexById = await bulkIndex(blocks, this.indexById, removeByIdIndexEntries.concat(byIdIndexEntries), idIndexOpts);
1916
- this.indexByKey = await bulkIndex(blocks, this.indexByKey, oldIndexEntries.concat(indexEntries), dbIndexOpts);
1917
- this.dbHead = result.clock;
1918
- });
1919
- this.database.notifyExternal('dbIndex');
1920
- // console.timeEnd(callTag + '.doTransactionupdateIndex')
1921
- // console.log(`updateIndex ${callTag} <`, this.instanceId, this.dbHead?.toString(), this.indexByKey.cid?.toString(), this.indexById.cid?.toString())
1922
- }
2038
+ this.routingFn =
2039
+ routingFn ||
2040
+ function (_, emit) {
2041
+ emit('*');
2042
+ };
2043
+ this.dbHead = null;
1923
2044
  }
1924
2045
 
1925
2046
  /**
1926
- * Update the DbIndex with the given entries
1927
- * @param {import('./blockstore.js').Blockstore} blocks
1928
- * @param {{root, cid}} inIndex
1929
- * @param {DbIndexEntry[]} indexEntries
1930
- * @private
2047
+ * Subscribe to a topic emitted by the event function.
2048
+ * @param {string} topic - The topic to subscribe to.
2049
+ * @param {Function} subscriber - The function to call when the topic is emitted.
2050
+ * @returns {Function} A function to unsubscribe from the topic.
2051
+ * @memberof Listener
2052
+ * @instance
1931
2053
  */
1932
- async function bulkIndex (blocks, inIndex, indexEntries, opts) {
1933
- if (!indexEntries.length) return inIndex
1934
- const putBlock = blocks.put.bind(blocks);
1935
- const { getBlock } = makeGetBlock(blocks);
1936
- let returnRootBlock;
1937
- let returnNode;
1938
- if (!inIndex.root) {
1939
- const cid = inIndex.cid;
1940
- if (!cid) {
1941
- for await (const node of await dbIndex.create({ get: getBlock, list: indexEntries, ...opts })) {
1942
- const block = await node.block;
1943
- await putBlock(block.cid, block.bytes);
1944
- returnRootBlock = block;
1945
- returnNode = node;
1946
- }
1947
- return { root: returnNode, cid: returnRootBlock.cid }
1948
- }
1949
- inIndex.root = await dbIndex.load({ cid, get: getBlock, ...dbIndexOpts });
1950
- }
1951
- const { root, blocks: newBlocks } = await inIndex.root.bulk(indexEntries);
1952
- returnRootBlock = await root.block;
1953
- returnNode = root;
1954
- for await (const block of newBlocks) {
1955
- await putBlock(block.cid, block.bytes);
1956
- }
1957
- await putBlock(returnRootBlock.cid, returnRootBlock.bytes);
1958
- return { root: returnNode, cid: returnRootBlock.cid }
1959
- }
1960
-
1961
- async function loadIndex (blocks, index, indexOpts) {
1962
- if (!index.root) {
1963
- const cid = index.cid;
1964
- if (!cid) return
1965
- const { getBlock } = makeGetBlock(blocks);
1966
- index.root = await dbIndex.load({ cid, get: getBlock, ...indexOpts });
2054
+ on (topic, subscriber, since) {
2055
+ const listOfTopicSubscribers = getTopicList(this.subcribers, topic);
2056
+ listOfTopicSubscribers.push(subscriber);
2057
+ if (typeof since !== 'undefined') {
2058
+ this.database.changesSince(since).then(({ rows: changes }) => {
2059
+ const keys = topicsForChanges(changes, this.routingFn).get(topic);
2060
+ if (keys) keys.forEach(key => subscriber(key));
2061
+ });
1967
2062
  }
1968
- return index.root
1969
- }
1970
-
1971
- async function applyLimit (results, limit) {
1972
- results.result = results.result.slice(0, limit);
1973
- return results
1974
- }
1975
-
1976
- async function doIndexQuery (blocks, indexByKey, query = {}) {
1977
- await loadIndex(blocks, indexByKey, dbIndexOpts);
1978
- if (!indexByKey.root) return { result: [] }
1979
- if (query.range) {
1980
- const encodedRange = query.range.map((key) => charwise.encode(key));
1981
- return applyLimit(await indexByKey.root.range(...encodedRange), query.limit)
1982
- } else if (query.key) {
1983
- const encodedKey = charwise.encode(query.key);
1984
- return indexByKey.root.get(encodedKey)
1985
- } else {
1986
- const { result, ...all } = await indexByKey.root.getAllEntries();
1987
- return applyLimit({ result: result.map(({ key: [k, id], value }) => ({ key: k, id, row: value })), ...all }, query.limit)
2063
+ return () => {
2064
+ const index = listOfTopicSubscribers.indexOf(subscriber);
2065
+ if (index > -1) listOfTopicSubscribers.splice(index, 1);
1988
2066
  }
1989
2067
  }
1990
2068
 
1991
- // @ts-nocheck
1992
- /**
1993
- * A Fireproof database Listener allows you to react to events in the database.
1994
- *
1995
- * @class Listener
1996
- * @classdesc An listener attaches to a Fireproof database and runs a routing function on each change, sending the results to subscribers.
1997
- *
1998
- * @param {Fireproof} database - The Fireproof database instance to index.
1999
- * @param {Function} routingFn - The routing function to apply to each entry in the database.
2000
- */
2001
- // import { ChangeEvent } from './db-index'
2002
-
2003
- class Listener {
2004
- subcribers = new Map()
2005
- doStopListening = null
2006
-
2007
- constructor (database, routingFn) {
2008
- /** routingFn
2009
- * The database instance to index.
2010
- * @type {Fireproof}
2011
- */
2012
- this.database = database;
2013
- this.doStopListening = database.registerListener(changes => this.onChanges(changes));
2014
- /**
2015
- * The map function to apply to each entry in the database.
2016
- * @type {Function}
2017
- */
2018
- this.routingFn =
2019
- routingFn ||
2020
- function (_, emit) {
2021
- emit('*');
2022
- };
2023
- this.dbHead = null;
2024
- }
2025
-
2026
- /**
2027
- * Subscribe to a topic emitted by the event function.
2028
- * @param {string} topic - The topic to subscribe to.
2029
- * @param {Function} subscriber - The function to call when the topic is emitted.
2030
- * @returns {Function} A function to unsubscribe from the topic.
2031
- * @memberof Listener
2032
- * @instance
2033
- */
2034
- on (topic, subscriber, since) {
2035
- const listOfTopicSubscribers = getTopicList(this.subcribers, topic);
2036
- listOfTopicSubscribers.push(subscriber);
2037
- if (typeof since !== 'undefined') {
2038
- this.database.changesSince(since).then(({ rows: changes }) => {
2039
- const keys = topicsForChanges(changes, this.routingFn).get(topic);
2040
- if (keys) keys.forEach(key => subscriber(key));
2041
- });
2042
- }
2043
- return () => {
2044
- const index = listOfTopicSubscribers.indexOf(subscriber);
2045
- if (index > -1) listOfTopicSubscribers.splice(index, 1);
2069
+ onChanges (changes) {
2070
+ if (Array.isArray(changes)) {
2071
+ const seenTopics = topicsForChanges(changes, this.routingFn);
2072
+ for (const [topic, keys] of seenTopics) {
2073
+ const listOfTopicSubscribers = getTopicList(this.subcribers, topic);
2074
+ listOfTopicSubscribers.forEach(subscriber => keys.forEach(key => subscriber(key)));
2046
2075
  }
2047
- }
2048
-
2049
- onChanges (changes) {
2050
- if (Array.isArray(changes)) {
2051
- const seenTopics = topicsForChanges(changes, this.routingFn);
2052
- for (const [topic, keys] of seenTopics) {
2053
- const listOfTopicSubscribers = getTopicList(this.subcribers, topic);
2054
- listOfTopicSubscribers.forEach(subscriber => keys.forEach(key => subscriber(key)));
2055
- }
2056
- } else {
2057
- // non-arrays go to all subscribers
2058
- for (const [, listOfTopicSubscribers] of this.subcribers) {
2059
- listOfTopicSubscribers.forEach(subscriber => subscriber(changes));
2060
- }
2076
+ } else {
2077
+ // non-arrays go to all subscribers
2078
+ for (const [, listOfTopicSubscribers] of this.subcribers) {
2079
+ listOfTopicSubscribers.forEach(subscriber => subscriber(changes));
2061
2080
  }
2062
2081
  }
2063
2082
  }
2083
+ }
2064
2084
 
2065
- function getTopicList (subscribersMap, name) {
2066
- let topicList = subscribersMap.get(name);
2067
- if (!topicList) {
2068
- topicList = [];
2069
- subscribersMap.set(name, topicList);
2070
- }
2071
- return topicList
2085
+ function getTopicList (subscribersMap, name) {
2086
+ let topicList = subscribersMap.get(name);
2087
+ if (!topicList) {
2088
+ topicList = [];
2089
+ subscribersMap.set(name, topicList);
2072
2090
  }
2073
-
2074
- /**
2075
- * Transforms a set of changes to events using an emitter function.
2076
- *
2077
- * @param {ChangeEvent[]} changes
2078
- * @param {Function} routingFn
2079
- * @returns {Array<string>} The topics emmitted by the event function.
2080
- * @private
2081
- */
2082
- const topicsForChanges = (changes, routingFn) => {
2083
- const seenTopics = new Map();
2084
- changes.forEach(({ key, value, del }) => {
2085
- if (del || !value) value = { _deleted: true };
2086
- routingFn(({ _id: key, ...value }), t => {
2087
- const topicList = getTopicList(seenTopics, t);
2088
- topicList.push(key);
2089
- });
2091
+ return topicList
2092
+ }
2093
+
2094
+ /**
2095
+ * Transforms a set of changes to events using an emitter function.
2096
+ *
2097
+ * @param {ChangeEvent[]} changes
2098
+ * @param {Function} routingFn
2099
+ * @returns {Array<string>} The topics emmitted by the event function.
2100
+ * @private
2101
+ */
2102
+ const topicsForChanges = (changes, routingFn) => {
2103
+ const seenTopics = new Map();
2104
+ changes.forEach(({ key, value, del }) => {
2105
+ if (del || !value) value = { _deleted: true };
2106
+ routingFn(({ _id: key, ...value }), t => {
2107
+ const topicList = getTopicList(seenTopics, t);
2108
+ topicList.push(key);
2090
2109
  });
2091
- return seenTopics
2092
- };
2093
-
2094
- const parseCID = cid => typeof cid === 'string' ? multiformats.CID.parse(cid) : cid;
2095
-
2096
- class Hydrator {
2097
- static fromJSON (json, database) {
2098
- database.hydrate({ clock: json.clock.map(c => parseCID(c)), name: json.name, key: json.key });
2099
- if (json.indexes) {
2100
- for (const { name, code, clock: { byId, byKey, db } } of json.indexes) {
2101
- DbIndex.fromJSON(database, {
2102
- clock: {
2103
- byId: byId ? parseCID(byId) : null,
2104
- byKey: byKey ? parseCID(byKey) : null,
2105
- db: db ? db.map(c => parseCID(c)) : null
2106
- },
2107
- code,
2108
- name
2109
- });
2110
- }
2111
- }
2112
- return database
2113
- }
2114
-
2115
- static snapshot (database, clock) {
2116
- const definition = database.toJSON();
2117
- const withBlocks = new Fireproof(database.blocks);
2118
- if (clock) {
2119
- definition.clock = clock.map(c => parseCID(c));
2120
- definition.indexes.forEach(index => {
2121
- index.clock.byId = null;
2122
- index.clock.byKey = null;
2123
- index.clock.db = null;
2110
+ });
2111
+ return seenTopics
2112
+ };
2113
+
2114
+ const parseCID = cid => typeof cid === 'string' ? multiformats.CID.parse(cid) : cid;
2115
+
2116
+ class Hydrator {
2117
+ static fromJSON (json, database) {
2118
+ database.hydrate({ clock: json.clock.map(c => parseCID(c)), name: json.name, key: json.key });
2119
+ if (json.indexes) {
2120
+ for (const { name, code, clock: { byId, byKey, db } } of json.indexes) {
2121
+ DbIndex.fromJSON(database, {
2122
+ clock: {
2123
+ byId: byId ? parseCID(byId) : null,
2124
+ byKey: byKey ? parseCID(byKey) : null,
2125
+ db: db ? db.map(c => parseCID(c)) : null
2126
+ },
2127
+ code,
2128
+ name
2124
2129
  });
2125
2130
  }
2126
- const snappedDb = this.fromJSON(definition, withBlocks)
2127
- ;([...database.indexes.values()]).forEach(index => {
2128
- snappedDb.indexes.get(index.mapFnString).mapFn = index.mapFn;
2129
- });
2130
- return snappedDb
2131
2131
  }
2132
+ return database
2133
+ }
2132
2134
 
2133
- static async zoom (database, clock) {
2134
- ([...database.indexes.values()]).forEach(index => {
2135
- index.indexById = { root: null, cid: null };
2136
- index.indexByKey = { root: null, cid: null };
2137
- index.dbHead = null;
2135
+ static snapshot (database, clock) {
2136
+ const definition = database.toJSON();
2137
+ const withBlocks = new Fireproof(database.blocks);
2138
+ if (clock) {
2139
+ definition.clock = clock.map(c => parseCID(c));
2140
+ definition.indexes.forEach(index => {
2141
+ index.clock.byId = null;
2142
+ index.clock.byKey = null;
2143
+ index.clock.db = null;
2138
2144
  });
2139
- database.clock = clock.map(c => parseCID(c));
2140
- await database.notifyReset(); // hmm... indexes should listen to this? might be more complex than worth it. so far this is the only caller
2141
- return database
2142
2145
  }
2146
+ const snappedDb = this.fromJSON(definition, withBlocks)
2147
+ ;([...database.indexes.values()]).forEach(index => {
2148
+ snappedDb.indexes.get(index.mapFnString).mapFn = index.mapFn;
2149
+ });
2150
+ return snappedDb
2143
2151
  }
2144
2152
 
2145
- exports.Fireproof = Fireproof;
2146
- exports.Hydrator = Hydrator;
2147
- exports.Index = DbIndex;
2148
- exports.Listener = Listener;
2149
-
2150
- return exports;
2153
+ static async zoom (database, clock) {
2154
+ ([...database.indexes.values()]).forEach(index => {
2155
+ index.indexById = { root: null, cid: null };
2156
+ index.indexByKey = { root: null, cid: null };
2157
+ index.dbHead = null;
2158
+ });
2159
+ database.clock = clock.map(c => parseCID(c));
2160
+ await database.notifyReset(); // hmm... indexes should listen to this? might be more complex than worth it. so far this is the only caller
2161
+ return database
2162
+ }
2163
+ }
2151
2164
 
2152
- })({}, crypto, Block, sha2, dagcbor, utils, map, cache, link, multiformats, car, cid, CBW, raw, idb, cargoQueue, codec, cidSet, buffer, charwise, dbIndex);
2165
+ exports.Fireproof = Fireproof;
2166
+ exports.Hydrator = Hydrator;
2167
+ exports.Index = DbIndex;
2168
+ exports.Listener = Listener;