@fireproof/core 0.3.10 → 0.3.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/dist/bundle.js +2152 -0
- package/dist/src/blockstore.d.ts +115 -0
- package/dist/src/blockstore.d.ts.map +1 -0
- package/dist/src/clock.d.ts +98 -0
- package/dist/src/clock.d.ts.map +1 -0
- package/dist/src/crypto.d.ts +18 -0
- package/dist/src/crypto.d.ts.map +1 -0
- package/dist/src/db-index.d.ts +116 -0
- package/dist/src/db-index.d.ts.map +1 -0
- package/dist/src/fireproof.d.ts +167 -0
- package/dist/src/fireproof.d.ts.map +1 -0
- package/dist/src/hydrator.d.ts +6 -0
- package/dist/src/hydrator.d.ts.map +1 -0
- package/dist/src/index.d.ts +6 -0
- package/dist/src/index.d.ts.map +1 -0
- package/dist/src/listener.d.ts +36 -0
- package/dist/src/listener.d.ts.map +1 -0
- package/dist/src/prolly.d.ts +83 -0
- package/dist/src/prolly.d.ts.map +1 -0
- package/dist/src/sha1.d.ts +9 -0
- package/dist/src/sha1.d.ts.map +1 -0
- package/dist/src/valet.d.ts +34 -0
- package/dist/src/valet.d.ts.map +1 -0
- package/dist/tsconfig.tsbuildinfo +1 -0
- package/package.json +39 -5
- package/src/blockstore.js +3 -2
- package/src/clock.js +4 -3
- package/src/crypto.js +1 -0
- package/src/db-index.js +10 -5
- package/src/fireproof.js +15 -10
- package/src/hydrator.js +3 -3
- package/src/index.js +6 -0
- package/src/listener.js +2 -1
- package/src/prolly.js +12 -25
- package/src/sha1.js +2 -1
- package/src/valet.js +7 -5
- package/hooks/use-fireproof.js +0 -135
- package/index.js +0 -6
- package/scripts/keygen.js +0 -3
- package/test/block.js +0 -65
- package/test/clock.test.js +0 -694
- package/test/db-index.test.js +0 -261
- package/test/fireproof.test.js +0 -493
- package/test/fulltext.test.js +0 -66
- package/test/helpers.js +0 -45
- package/test/hydrator.test.js +0 -81
- package/test/listener.test.js +0 -102
- package/test/prolly.test.js +0 -190
- package/test/proofs.test.js +0 -53
- package/test/reproduce-fixture-bug.test.js +0 -65
- package/test/valet.test.js +0 -59
package/dist/bundle.js
ADDED
@@ -0,0 +1,2152 @@
|
|
1
|
+
(function (exports, crypto, Block, sha2, dagcbor, utils, map, cache, link, multiformats, car, cid, CBW, raw, idb, cargoQueue, codec, cidSet, buffer, charwise, dbIndex) {
|
2
|
+
'use strict';
|
3
|
+
|
4
|
+
function _interopNamespaceDefault(e) {
|
5
|
+
var n = Object.create(null);
|
6
|
+
if (e) {
|
7
|
+
Object.keys(e).forEach(function (k) {
|
8
|
+
if (k !== 'default') {
|
9
|
+
var d = Object.getOwnPropertyDescriptor(e, k);
|
10
|
+
Object.defineProperty(n, k, d.get ? d : {
|
11
|
+
enumerable: true,
|
12
|
+
get: function () { return e[k]; }
|
13
|
+
});
|
14
|
+
}
|
15
|
+
});
|
16
|
+
}
|
17
|
+
n.default = e;
|
18
|
+
return Object.freeze(n);
|
19
|
+
}
|
20
|
+
|
21
|
+
var Block__namespace = /*#__PURE__*/_interopNamespaceDefault(Block);
|
22
|
+
var dagcbor__namespace = /*#__PURE__*/_interopNamespaceDefault(dagcbor);
|
23
|
+
var CBW__namespace = /*#__PURE__*/_interopNamespaceDefault(CBW);
|
24
|
+
var raw__namespace = /*#__PURE__*/_interopNamespaceDefault(raw);
|
25
|
+
var codec__namespace = /*#__PURE__*/_interopNamespaceDefault(codec);
|
26
|
+
|
27
|
+
// @ts-nocheck
|
28
|
+
|
29
|
+
/**
|
30
|
+
* @template T
|
31
|
+
* @typedef {{ parents: EventLink<T>[], data: T }} EventView
|
32
|
+
*/
|
33
|
+
|
34
|
+
/**
|
35
|
+
* @template T
|
36
|
+
* @typedef {import('multiformats').BlockView<EventView<T>>} EventBlockView
|
37
|
+
*/
|
38
|
+
|
39
|
+
/**
|
40
|
+
* @template T
|
41
|
+
* @typedef {import('multiformats').Link<EventView<T>>} EventLink
|
42
|
+
*/
|
43
|
+
|
44
|
+
/**
|
45
|
+
* Advance the clock by adding an event.
|
46
|
+
*
|
47
|
+
* @template T
|
48
|
+
* @param {import('./blockstore').BlockFetcher} blocks Block storage.
|
49
|
+
* @param {EventLink<T>[]} head The head of the clock.
|
50
|
+
* @param {EventLink<T>} event The event to add.
|
51
|
+
* @returns {Promise<EventLink<T>[]>} The new head of the clock.
|
52
|
+
*/
|
53
|
+
async function advance (blocks, head, event) {
|
54
|
+
/** @type {EventFetcher<T>} */
|
55
|
+
const events = new EventFetcher(blocks);
|
56
|
+
const headmap = new Map(head.map((cid) => [cid.toString(), cid]));
|
57
|
+
|
58
|
+
// Check if the headmap already includes the event, return head if it does
|
59
|
+
if (headmap.has(event.toString())) return { head, cids: events.cids }
|
60
|
+
|
61
|
+
// Does event contain the clock?
|
62
|
+
let changed = false;
|
63
|
+
for (const cid of head) {
|
64
|
+
if (await contains(events, event, cid)) {
|
65
|
+
headmap.delete(cid.toString());
|
66
|
+
headmap.set(event.toString(), event);
|
67
|
+
changed = true;
|
68
|
+
}
|
69
|
+
}
|
70
|
+
|
71
|
+
// If the headmap has been changed, return the new headmap values
|
72
|
+
if (changed) {
|
73
|
+
return { head: [...headmap.values()], cids: events.cids }
|
74
|
+
}
|
75
|
+
|
76
|
+
// Does clock contain the event?
|
77
|
+
for (const p of head) {
|
78
|
+
if (await contains(events, p, event)) {
|
79
|
+
return { head, cids: events.cids }
|
80
|
+
}
|
81
|
+
}
|
82
|
+
|
83
|
+
// Return the head concatenated with the new event if it passes both checks
|
84
|
+
return { head: head.concat(event), cids: events.cids }
|
85
|
+
}
|
86
|
+
|
87
|
+
/**
|
88
|
+
* @template T
|
89
|
+
* @implements {EventBlockView<T>}
|
90
|
+
*/
|
91
|
+
class EventBlock extends Block.Block {
|
92
|
+
/**
|
93
|
+
* @param {object} config
|
94
|
+
* @param {EventLink<T>} config.cid
|
95
|
+
* @param {Event} config.value
|
96
|
+
* @param {Uint8Array} config.bytes
|
97
|
+
*/
|
98
|
+
constructor ({ cid, value, bytes }) {
|
99
|
+
// @ts-expect-error
|
100
|
+
super({ cid, value, bytes });
|
101
|
+
}
|
102
|
+
|
103
|
+
/**
|
104
|
+
* @template T
|
105
|
+
* @param {T} data
|
106
|
+
* @param {EventLink<T>[]} [parents]
|
107
|
+
*/
|
108
|
+
static create (data, parents) {
|
109
|
+
return encodeEventBlock({ data, parents: parents ?? [] })
|
110
|
+
}
|
111
|
+
}
|
112
|
+
|
113
|
+
/** @template T */
|
114
|
+
class EventFetcher {
|
115
|
+
/** @param {import('./blockstore').BlockFetcher} blocks */
|
116
|
+
constructor (blocks) {
|
117
|
+
/** @private */
|
118
|
+
this._blocks = blocks;
|
119
|
+
this._cids = new utils.CIDCounter();
|
120
|
+
this._cache = new Map();
|
121
|
+
}
|
122
|
+
|
123
|
+
/**
|
124
|
+
* @param {EventLink<T>} link
|
125
|
+
* @returns {Promise<EventBlockView<T>>}
|
126
|
+
*/
|
127
|
+
async get (link) {
|
128
|
+
const slink = link.toString();
|
129
|
+
// console.log('get', link.toString())
|
130
|
+
if (this._cache.has(slink)) return this._cache.get(slink)
|
131
|
+
const block = await this._blocks.get(link);
|
132
|
+
this._cids.add({ address: link });
|
133
|
+
if (!block) throw new Error(`missing block: ${link}`)
|
134
|
+
const got = decodeEventBlock(block.bytes);
|
135
|
+
this._cache.set(slink, got);
|
136
|
+
return got
|
137
|
+
}
|
138
|
+
|
139
|
+
async all () {
|
140
|
+
await Promise.all([...this._cids]);
|
141
|
+
return this._cids
|
142
|
+
}
|
143
|
+
}
|
144
|
+
|
145
|
+
/**
|
146
|
+
* @template T
|
147
|
+
* @param {EventView<T>} value
|
148
|
+
* @returns {Promise<EventBlockView<T>>}
|
149
|
+
*/
|
150
|
+
async function encodeEventBlock (value) {
|
151
|
+
// TODO: sort parents
|
152
|
+
const { cid, bytes } = await Block.encode({ value, codec: dagcbor__namespace, hasher: sha2.sha256 });
|
153
|
+
// @ts-expect-error
|
154
|
+
return new Block.Block({ cid, value, bytes })
|
155
|
+
}
|
156
|
+
|
157
|
+
/**
|
158
|
+
* @template T
|
159
|
+
* @param {Uint8Array} bytes
|
160
|
+
* @returns {Promise<EventBlockView<T>>}
|
161
|
+
*/
|
162
|
+
async function decodeEventBlock (bytes) {
|
163
|
+
const { cid, value } = await Block.decode({ bytes, codec: dagcbor__namespace, hasher: sha2.sha256 });
|
164
|
+
// @ts-expect-error
|
165
|
+
return new Block.Block({ cid, value, bytes })
|
166
|
+
}
|
167
|
+
|
168
|
+
/**
|
169
|
+
* Returns true if event "a" contains event "b". Breadth first search.
|
170
|
+
* @template T
|
171
|
+
* @param {EventFetcher} events
|
172
|
+
* @param {EventLink<T>} a
|
173
|
+
* @param {EventLink<T>} b
|
174
|
+
*/
|
175
|
+
async function contains (events, a, b) {
|
176
|
+
if (a.toString() === b.toString()) return true
|
177
|
+
const [{ value: aevent }, { value: bevent }] = await Promise.all([events.get(a), events.get(b)]);
|
178
|
+
const links = [...aevent.parents];
|
179
|
+
while (links.length) {
|
180
|
+
const link = links.shift();
|
181
|
+
if (!link) break
|
182
|
+
if (link.toString() === b.toString()) return true
|
183
|
+
// if any of b's parents are this link, then b cannot exist in any of the
|
184
|
+
// tree below, since that would create a cycle.
|
185
|
+
if (bevent.parents.some((p) => link.toString() === p.toString())) continue
|
186
|
+
const { value: event } = await events.get(link);
|
187
|
+
links.push(...event.parents);
|
188
|
+
}
|
189
|
+
return false
|
190
|
+
}
|
191
|
+
|
192
|
+
/**
|
193
|
+
* @template T
|
194
|
+
* @param {import('./blockstore').BlockFetcher} blocks Block storage.
|
195
|
+
* @param {EventLink<T>[]} head
|
196
|
+
* @param {object} [options]
|
197
|
+
* @param {(b: EventBlockView<T>) => string} [options.renderNodeLabel]
|
198
|
+
*/
|
199
|
+
async function * vis$1 (blocks, head, options = {}) {
|
200
|
+
const renderNodeLabel = options.renderNodeLabel ?? ((b) => b.value.data.value);
|
201
|
+
const events = new EventFetcher(blocks);
|
202
|
+
yield 'digraph clock {';
|
203
|
+
yield ' node [shape=point fontname="Courier"]; head;';
|
204
|
+
const hevents = await Promise.all(head.map((link) => events.get(link)));
|
205
|
+
const links = [];
|
206
|
+
const nodes = new Set();
|
207
|
+
for (const e of hevents) {
|
208
|
+
nodes.add(e.cid.toString());
|
209
|
+
yield ` node [shape=oval fontname="Courier"]; ${e.cid} [label="${renderNodeLabel(e)}"];`;
|
210
|
+
yield ` head -> ${e.cid};`;
|
211
|
+
for (const p of e.value.parents) {
|
212
|
+
yield ` ${e.cid} -> ${p};`;
|
213
|
+
}
|
214
|
+
links.push(...e.value.parents);
|
215
|
+
}
|
216
|
+
while (links.length) {
|
217
|
+
const link = links.shift();
|
218
|
+
if (!link) break
|
219
|
+
if (nodes.has(link.toString())) continue
|
220
|
+
nodes.add(link.toString());
|
221
|
+
const block = await events.get(link);
|
222
|
+
yield ` node [shape=oval]; ${link} [label="${renderNodeLabel(block)}" fontname="Courier"];`;
|
223
|
+
for (const p of block.value.parents) {
|
224
|
+
yield ` ${link} -> ${p};`;
|
225
|
+
}
|
226
|
+
links.push(...block.value.parents);
|
227
|
+
}
|
228
|
+
yield '}';
|
229
|
+
}
|
230
|
+
|
231
|
+
async function findEventsToSync (blocks, head) {
|
232
|
+
// const callTag = Math.random().toString(36).substring(7)
|
233
|
+
const events = new EventFetcher(blocks);
|
234
|
+
// console.time(callTag + '.findCommonAncestorWithSortedEvents')
|
235
|
+
const { ancestor, sorted } = await findCommonAncestorWithSortedEvents(events, head);
|
236
|
+
// console.timeEnd(callTag + '.findCommonAncestorWithSortedEvents')
|
237
|
+
// console.log('sorted', sorted.length)
|
238
|
+
// console.time(callTag + '.contains')
|
239
|
+
const toSync = await asyncFilter(sorted, async (uks) => !(await contains(events, ancestor, uks.cid)));
|
240
|
+
// console.timeEnd(callTag + '.contains')
|
241
|
+
|
242
|
+
return { cids: events.cids, events: toSync }
|
243
|
+
}
|
244
|
+
|
245
|
+
const asyncFilter = async (arr, predicate) =>
|
246
|
+
Promise.all(arr.map(predicate)).then((results) => arr.filter((_v, index) => results[index]));
|
247
|
+
|
248
|
+
async function findCommonAncestorWithSortedEvents (events, children) {
|
249
|
+
// const callTag = Math.random().toString(36).substring(7)
|
250
|
+
// console.time(callTag + '.findCommonAncestor')
|
251
|
+
const ancestor = await findCommonAncestor(events, children);
|
252
|
+
// console.timeEnd(callTag + '.findCommonAncestor')
|
253
|
+
if (!ancestor) {
|
254
|
+
throw new Error('failed to find common ancestor event')
|
255
|
+
}
|
256
|
+
// console.time(callTag + '.findSortedEvents')
|
257
|
+
const sorted = await findSortedEvents(events, children, ancestor);
|
258
|
+
// console.timeEnd(callTag + '.findSortedEvents')
|
259
|
+
return { ancestor, sorted }
|
260
|
+
}
|
261
|
+
|
262
|
+
/**
|
263
|
+
* Find the common ancestor event of the passed children. A common ancestor is
|
264
|
+
* the first single event in the DAG that _all_ paths from children lead to.
|
265
|
+
*
|
266
|
+
* @param {import('./clock').EventFetcher} events
|
267
|
+
* @param {import('./clock').EventLink<EventData>[]} children
|
268
|
+
*/
|
269
|
+
async function findCommonAncestor (events, children) {
|
270
|
+
if (!children.length) return
|
271
|
+
const candidates = children.map((c) => [c]);
|
272
|
+
while (true) {
|
273
|
+
let changed = false;
|
274
|
+
for (const c of candidates) {
|
275
|
+
const candidate = await findAncestorCandidate(events, c[c.length - 1]);
|
276
|
+
if (!candidate) continue
|
277
|
+
changed = true;
|
278
|
+
c.push(candidate);
|
279
|
+
const ancestor = findCommonString(candidates);
|
280
|
+
if (ancestor) return ancestor
|
281
|
+
}
|
282
|
+
if (!changed) return
|
283
|
+
}
|
284
|
+
}
|
285
|
+
|
286
|
+
/**
|
287
|
+
* @param {import('./clock').EventFetcher} events
|
288
|
+
* @param {import('./clock').EventLink<EventData>} root
|
289
|
+
*/
|
290
|
+
async function findAncestorCandidate (events, root) {
|
291
|
+
const { value: event } = await events.get(root);
|
292
|
+
if (!event.parents.length) return root
|
293
|
+
return event.parents.length === 1 ? event.parents[0] : findCommonAncestor(events, event.parents)
|
294
|
+
}
|
295
|
+
|
296
|
+
/**
|
297
|
+
* @template {{ toString: () => string }} T
|
298
|
+
* @param {Array<T[]>} arrays
|
299
|
+
*/
|
300
|
+
function findCommonString (arrays) {
|
301
|
+
arrays = arrays.map((a) => [...a]);
|
302
|
+
for (const arr of arrays) {
|
303
|
+
for (const item of arr) {
|
304
|
+
let matched = true;
|
305
|
+
for (const other of arrays) {
|
306
|
+
if (arr === other) continue
|
307
|
+
matched = other.some((i) => String(i) === String(item));
|
308
|
+
if (!matched) break
|
309
|
+
}
|
310
|
+
if (matched) return item
|
311
|
+
}
|
312
|
+
}
|
313
|
+
}
|
314
|
+
|
315
|
+
/**
|
316
|
+
* Find and sort events between the head(s) and the tail.
|
317
|
+
* @param {import('./clock').EventFetcher} events
|
318
|
+
* @param {import('./clock').EventLink<EventData>[]} head
|
319
|
+
* @param {import('./clock').EventLink<EventData>} tail
|
320
|
+
*/
|
321
|
+
async function findSortedEvents (events, head, tail) {
|
322
|
+
// const callTag = Math.random().toString(36).substring(7)
|
323
|
+
// get weighted events - heavier events happened first
|
324
|
+
/** @type {Map<string, { event: import('./clock').EventBlockView<EventData>, weight: number }>} */
|
325
|
+
const weights = new Map();
|
326
|
+
const all = await Promise.all(head.map((h) => findEvents(events, h, tail)));
|
327
|
+
for (const arr of all) {
|
328
|
+
for (const { event, depth } of arr) {
|
329
|
+
// console.log('event value', event.value.data.value)
|
330
|
+
const info = weights.get(event.cid.toString());
|
331
|
+
if (info) {
|
332
|
+
info.weight += depth;
|
333
|
+
} else {
|
334
|
+
weights.set(event.cid.toString(), { event, weight: depth });
|
335
|
+
}
|
336
|
+
}
|
337
|
+
}
|
338
|
+
|
339
|
+
// group events into buckets by weight
|
340
|
+
/** @type {Map<number, import('./clock').EventBlockView<EventData>[]>} */
|
341
|
+
const buckets = new Map();
|
342
|
+
for (const { event, weight } of weights.values()) {
|
343
|
+
const bucket = buckets.get(weight);
|
344
|
+
if (bucket) {
|
345
|
+
bucket.push(event);
|
346
|
+
} else {
|
347
|
+
buckets.set(weight, [event]);
|
348
|
+
}
|
349
|
+
}
|
350
|
+
|
351
|
+
// sort by weight, and by CID within weight
|
352
|
+
const sorted = Array.from(buckets)
|
353
|
+
.sort((a, b) => b[0] - a[0])
|
354
|
+
.flatMap(([, es]) => es.sort((a, b) => (String(a.cid) < String(b.cid) ? -1 : 1)));
|
355
|
+
// console.log('sorted', sorted.map(s => s.value.data.value))
|
356
|
+
|
357
|
+
return sorted
|
358
|
+
}
|
359
|
+
|
360
|
+
/**
|
361
|
+
* @param {import('./clock').EventFetcher} events
|
362
|
+
* @param {import('./clock').EventLink<EventData>} start
|
363
|
+
* @param {import('./clock').EventLink<EventData>} end
|
364
|
+
* @returns {Promise<Array<{ event: import('./clock').EventBlockView<EventData>, depth: number }>>}
|
365
|
+
*/
|
366
|
+
async function findEvents (events, start, end, depth = 0) {
|
367
|
+
// console.log('findEvents', start)
|
368
|
+
const event = await events.get(start);
|
369
|
+
const acc = [{ event, depth }];
|
370
|
+
const { parents } = event.value;
|
371
|
+
if (parents.length === 1 && String(parents[0]) === String(end)) return acc
|
372
|
+
const rest = await Promise.all(parents.map((p) => findEvents(events, p, end, depth + 1)));
|
373
|
+
return acc.concat(...rest)
|
374
|
+
}
|
375
|
+
|
376
|
+
// @ts-nocheck
|
377
|
+
|
378
|
+
const createBlock = (bytes, cid) => Block.create({ cid, bytes, hasher: sha2.sha256, codec: codec__namespace });
|
379
|
+
|
380
|
+
const encrypt = async function * ({ get, cids, hasher, key, cache, chunker, root }) {
|
381
|
+
const set = new Set();
|
382
|
+
let eroot;
|
383
|
+
for (const string of cids) {
|
384
|
+
const cid = multiformats.CID.parse(string);
|
385
|
+
const unencrypted = await get(cid);
|
386
|
+
const block = await Block.encode({ ...await codec__namespace.encrypt({ ...unencrypted, key }), codec: codec__namespace, hasher });
|
387
|
+
// console.log(`encrypting ${string} as ${block.cid}`)
|
388
|
+
yield block;
|
389
|
+
set.add(block.cid.toString());
|
390
|
+
if (unencrypted.cid.equals(root)) eroot = block.cid;
|
391
|
+
}
|
392
|
+
if (!eroot) throw new Error('cids does not include root')
|
393
|
+
const list = [...set].map(s => multiformats.CID.parse(s));
|
394
|
+
let last;
|
395
|
+
for await (const node of cidSet.create({ list, get, cache, chunker, hasher, codec: dagcbor__namespace })) {
|
396
|
+
const block = await node.block;
|
397
|
+
yield block;
|
398
|
+
last = block;
|
399
|
+
}
|
400
|
+
const head = [eroot, last.cid];
|
401
|
+
const block = await Block.encode({ value: head, codec: dagcbor__namespace, hasher });
|
402
|
+
yield block;
|
403
|
+
};
|
404
|
+
|
405
|
+
const decrypt = async function * ({ root, get, key, cache, chunker, hasher }) {
|
406
|
+
const o = { ...await get(root), codec: dagcbor__namespace, hasher };
|
407
|
+
const decodedRoot = await Block.decode(o);
|
408
|
+
// console.log('decodedRoot', decodedRoot)
|
409
|
+
const { value: [eroot, tree] } = decodedRoot;
|
410
|
+
const rootBlock = await get(eroot); // should I decrypt?
|
411
|
+
const cidset = await cidSet.load({ cid: tree, get, cache, chunker, codec: codec__namespace, hasher });
|
412
|
+
const { result: nodes } = await cidset.getAllEntries();
|
413
|
+
const unwrap = async (eblock) => {
|
414
|
+
const { bytes, cid } = await codec__namespace.decrypt({ ...eblock, key }).catch(e => {
|
415
|
+
console.log('ekey', e);
|
416
|
+
throw new Error('bad key: ' + key.toString('hex'))
|
417
|
+
});
|
418
|
+
const block = await createBlock(bytes, cid);
|
419
|
+
return block
|
420
|
+
};
|
421
|
+
const promises = [];
|
422
|
+
for (const { cid } of nodes) {
|
423
|
+
if (!rootBlock.cid.equals(cid)) promises.push(get(cid).then(unwrap));
|
424
|
+
}
|
425
|
+
yield * promises;
|
426
|
+
yield unwrap(rootBlock);
|
427
|
+
};
|
428
|
+
|
429
|
+
// @ts-nocheck
|
430
|
+
// from https://github.com/duzun/sync-sha1/blob/master/rawSha1.js
|
431
|
+
// MIT License Copyright (c) 2020 Dumitru Uzun
|
432
|
+
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
433
|
+
// of this software and associated documentation files (the "Software"), to deal
|
434
|
+
// in the Software without restriction, including without limitation the rights
|
435
|
+
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
436
|
+
// copies of the Software, and to permit persons to whom the Software is
|
437
|
+
// furnished to do so, subject to the following conditions:
|
438
|
+
|
439
|
+
// The above copyright notice and this permission notice shall be included in all
|
440
|
+
// copies or substantial portions of the Software.
|
441
|
+
|
442
|
+
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
443
|
+
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
444
|
+
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
445
|
+
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
446
|
+
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
447
|
+
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
448
|
+
// SOFTWARE.
|
449
|
+
|
450
|
+
// import {
|
451
|
+
// isLittleEndian, switchEndianness32
|
452
|
+
// } from 'string-encode'
|
453
|
+
|
454
|
+
/**
|
455
|
+
* SHA1 on binary array
|
456
|
+
*
|
457
|
+
* @param {Uint8Array} b Data to hash
|
458
|
+
*
|
459
|
+
* @return {Uint8Array} sha1 hash
|
460
|
+
*/
|
461
|
+
function rawSha1 (b) {
|
462
|
+
let i = b.byteLength;
|
463
|
+
let bs = 0;
|
464
|
+
let A; let B; let C; let D; let G;
|
465
|
+
const H = Uint32Array.from([A = 0x67452301, B = 0xEFCDAB89, ~A, ~B, 0xC3D2E1F0]);
|
466
|
+
const W = new Uint32Array(80);
|
467
|
+
const nrWords = (i / 4 + 2) | 15;
|
468
|
+
const words = new Uint32Array(nrWords + 1);
|
469
|
+
let j;
|
470
|
+
|
471
|
+
words[nrWords] = i * 8;
|
472
|
+
words[i >> 2] |= 0x80 << (~i << 3);
|
473
|
+
for (;i--;) {
|
474
|
+
words[i >> 2] |= b[i] << (~i << 3);
|
475
|
+
}
|
476
|
+
|
477
|
+
for (A = H.slice(); bs < nrWords; bs += 16, A.set(H)) {
|
478
|
+
for (i = 0; i < 80;
|
479
|
+
A[0] = (
|
480
|
+
G = ((b = A[0]) << 5 | b >>> 27) +
|
481
|
+
A[4] +
|
482
|
+
(W[i] = (i < 16) ? words[bs + i] : G << 1 | G >>> 31) +
|
483
|
+
0x5A827999,
|
484
|
+
B = A[1],
|
485
|
+
C = A[2],
|
486
|
+
D = A[3],
|
487
|
+
G + ((j = i / 5 >> 2) // eslint-disable-line no-cond-assign
|
488
|
+
? j !== 2
|
489
|
+
? (B ^ C ^ D) + (j & 2 ? 0x6FE0483D : 0x14577208)
|
490
|
+
: (B & C | B & D | C & D) + 0x34994343
|
491
|
+
: B & C | ~B & D
|
492
|
+
)
|
493
|
+
)
|
494
|
+
, A[1] = b
|
495
|
+
, A[2] = B << 30 | B >>> 2
|
496
|
+
, A[3] = C
|
497
|
+
, A[4] = D
|
498
|
+
, ++i
|
499
|
+
) {
|
500
|
+
G = W[i - 3] ^ W[i - 8] ^ W[i - 14] ^ W[i - 16];
|
501
|
+
}
|
502
|
+
|
503
|
+
for (i = 5; i;) H[--i] = H[i] + A[i];
|
504
|
+
}
|
505
|
+
|
506
|
+
// if (isLittleEndian()) {
|
507
|
+
// H = H.map(switchEndianness32)
|
508
|
+
// }
|
509
|
+
|
510
|
+
return new Uint8Array(H.buffer, H.byteOffset, H.byteLength)
|
511
|
+
}
|
512
|
+
|
513
|
+
// @ts-nocheck
|
514
|
+
const chunker = utils.bf(3);
|
515
|
+
|
516
|
+
const NO_ENCRYPT =
|
517
|
+
typeof process !== 'undefined' ? process.env.NO_ENCRYPT : ({ url: (document.currentScript && document.currentScript.src || new URL('bundle.js', document.baseURI).href) }) && undefined.VITE_NO_ENCRYPT;
|
518
|
+
|
519
|
+
class Valet {
|
520
|
+
idb = null
|
521
|
+
name = null
|
522
|
+
uploadQueue = null
|
523
|
+
alreadyEnqueued = new Set()
|
524
|
+
keyMaterial = null
|
525
|
+
keyId = 'null'
|
526
|
+
|
527
|
+
/**
|
528
|
+
* Function installed by the database to upload car files
|
529
|
+
* @type {null|function(string, Uint8Array):Promise<void>}
|
530
|
+
*/
|
531
|
+
uploadFunction = null
|
532
|
+
|
533
|
+
constructor (name = 'default', keyMaterial) {
|
534
|
+
this.name = name;
|
535
|
+
this.setKeyMaterial(keyMaterial);
|
536
|
+
this.uploadQueue = cargoQueue(async (tasks, callback) => {
|
537
|
+
console.log(
|
538
|
+
'queue worker',
|
539
|
+
tasks.length,
|
540
|
+
tasks.reduce((acc, t) => acc + t.value.length, 0)
|
541
|
+
);
|
542
|
+
if (this.uploadFunction) {
|
543
|
+
// todo we can coalesce these into a single car file
|
544
|
+
return await this.withDB(async db => {
|
545
|
+
for (const task of tasks) {
|
546
|
+
await this.uploadFunction(task.carCid, task.value);
|
547
|
+
// update the indexedb to mark this car as no longer pending
|
548
|
+
const carMeta = await db.get('cidToCar', task.carCid);
|
549
|
+
delete carMeta.pending;
|
550
|
+
await db.put('cidToCar', carMeta);
|
551
|
+
}
|
552
|
+
})
|
553
|
+
}
|
554
|
+
callback();
|
555
|
+
});
|
556
|
+
|
557
|
+
this.uploadQueue.drain(async () => {
|
558
|
+
return await this.withDB(async db => {
|
559
|
+
const carKeys = (await db.getAllFromIndex('cidToCar', 'pending')).map(c => c.car);
|
560
|
+
for (const carKey of carKeys) {
|
561
|
+
await this.uploadFunction(carKey, await db.get('cars', carKey));
|
562
|
+
const carMeta = await db.get('cidToCar', carKey);
|
563
|
+
delete carMeta.pending;
|
564
|
+
await db.put('cidToCar', carMeta);
|
565
|
+
}
|
566
|
+
})
|
567
|
+
});
|
568
|
+
}
|
569
|
+
|
570
|
+
getKeyMaterial () {
|
571
|
+
return this.keyMaterial
|
572
|
+
}
|
573
|
+
|
574
|
+
setKeyMaterial (km) {
|
575
|
+
if (km && !NO_ENCRYPT) {
|
576
|
+
const hex = Uint8Array.from(buffer.Buffer.from(km, 'hex'));
|
577
|
+
this.keyMaterial = km;
|
578
|
+
const hash = rawSha1(hex);
|
579
|
+
this.keyId = buffer.Buffer.from(hash).toString('hex');
|
580
|
+
} else {
|
581
|
+
this.keyMaterial = null;
|
582
|
+
this.keyId = 'null';
|
583
|
+
}
|
584
|
+
// console.trace('keyId', this.name, this.keyId)
|
585
|
+
}
|
586
|
+
|
587
|
+
/**
|
588
|
+
* Group the blocks into a car and write it to the valet.
|
589
|
+
* @param {InnerBlockstore} innerBlockstore
|
590
|
+
* @param {Set<string>} cids
|
591
|
+
* @returns {Promise<void>}
|
592
|
+
* @memberof Valet
|
593
|
+
*/
|
594
|
+
async writeTransaction (innerBlockstore, cids) {
|
595
|
+
if (innerBlockstore.lastCid) {
|
596
|
+
if (this.keyMaterial) {
|
597
|
+
// console.log('encrypting car', innerBlockstore.label)
|
598
|
+
const newCar = await blocksToEncryptedCarBlock(innerBlockstore.lastCid, innerBlockstore, this.keyMaterial);
|
599
|
+
await this.parkCar(newCar.cid.toString(), newCar.bytes, cids);
|
600
|
+
} else {
|
601
|
+
const newCar = await blocksToCarBlock(innerBlockstore.lastCid, innerBlockstore);
|
602
|
+
await this.parkCar(newCar.cid.toString(), newCar.bytes, cids);
|
603
|
+
}
|
604
|
+
}
|
605
|
+
}
|
606
|
+
|
607
|
+
withDB = async dbWorkFun => {
|
608
|
+
if (!this.idb) {
|
609
|
+
this.idb = await idb.openDB(`fp.${this.keyId}.${this.name}.valet`, 2, {
|
610
|
+
upgrade (db, oldVersion, newVersion, transaction) {
|
611
|
+
if (oldVersion < 1) {
|
612
|
+
db.createObjectStore('cars'); // todo use database name
|
613
|
+
const cidToCar = db.createObjectStore('cidToCar', { keyPath: 'car' });
|
614
|
+
cidToCar.createIndex('cids', 'cids', { multiEntry: true });
|
615
|
+
}
|
616
|
+
if (oldVersion < 2) {
|
617
|
+
const cidToCar = transaction.objectStore('cidToCar');
|
618
|
+
cidToCar.createIndex('pending', 'pending');
|
619
|
+
}
|
620
|
+
}
|
621
|
+
});
|
622
|
+
}
|
623
|
+
return await dbWorkFun(this.idb)
|
624
|
+
}
|
625
|
+
|
626
|
+
/**
|
627
|
+
*
|
628
|
+
* @param {string} carCid
|
629
|
+
* @param {*} value
|
630
|
+
*/
|
631
|
+
async parkCar (carCid, value, cids) {
|
632
|
+
await this.withDB(async db => {
|
633
|
+
const tx = db.transaction(['cars', 'cidToCar'], 'readwrite');
|
634
|
+
await tx.objectStore('cars').put(value, carCid);
|
635
|
+
await tx.objectStore('cidToCar').put({ pending: 'y', car: carCid, cids: Array.from(cids) });
|
636
|
+
return await tx.done
|
637
|
+
});
|
638
|
+
|
639
|
+
// upload to web3.storage if we have credentials
|
640
|
+
if (this.uploadFunction) {
|
641
|
+
if (this.alreadyEnqueued.has(carCid)) {
|
642
|
+
// console.log('already enqueued', carCid)
|
643
|
+
return
|
644
|
+
}
|
645
|
+
// don't await this, it will be done in the queue
|
646
|
+
// console.log('add to queue', carCid, value.length)
|
647
|
+
this.uploadQueue.push({ carCid, value });
|
648
|
+
this.alreadyEnqueued.add(carCid);
|
649
|
+
}
|
650
|
+
}
|
651
|
+
|
652
|
+
remoteBlockFunction = null
|
653
|
+
|
654
|
+
async getBlock (dataCID) {
|
655
|
+
return await this.withDB(async db => {
|
656
|
+
const tx = db.transaction(['cars', 'cidToCar'], 'readonly');
|
657
|
+
const indexResp = await tx.objectStore('cidToCar').index('cids').get(dataCID);
|
658
|
+
const carCid = indexResp?.car;
|
659
|
+
if (!carCid) {
|
660
|
+
throw new Error('Missing block: ' + dataCID)
|
661
|
+
}
|
662
|
+
const carBytes = await tx.objectStore('cars').get(carCid);
|
663
|
+
const reader = await car.CarReader.fromBytes(carBytes);
|
664
|
+
if (this.keyMaterial) {
|
665
|
+
const roots = await reader.getRoots();
|
666
|
+
const readerGetWithCodec = async cid => {
|
667
|
+
const got = await reader.get(cid);
|
668
|
+
// console.log('got.', cid.toString())
|
669
|
+
let useCodec = codec__namespace;
|
670
|
+
if (cid.toString().indexOf('bafy') === 0) {
|
671
|
+
useCodec = dagcbor__namespace;
|
672
|
+
}
|
673
|
+
const decoded = await Block__namespace.decode({
|
674
|
+
...got,
|
675
|
+
codec: useCodec,
|
676
|
+
hasher: sha2.sha256
|
677
|
+
});
|
678
|
+
// console.log('decoded', decoded.value)
|
679
|
+
return decoded
|
680
|
+
};
|
681
|
+
const { blocks } = await blocksFromEncryptedCarBlock(roots[0], readerGetWithCodec, this.keyMaterial);
|
682
|
+
const block = blocks.find(b => b.cid.toString() === dataCID);
|
683
|
+
if (block) {
|
684
|
+
return block.bytes
|
685
|
+
}
|
686
|
+
} else {
|
687
|
+
const gotBlock = await reader.get(cid.CID.parse(dataCID));
|
688
|
+
if (gotBlock) {
|
689
|
+
return gotBlock.bytes
|
690
|
+
}
|
691
|
+
}
|
692
|
+
})
|
693
|
+
}
|
694
|
+
}
|
695
|
+
|
696
|
+
const blocksToCarBlock = async (lastCid, blocks) => {
|
697
|
+
let size = 0;
|
698
|
+
const headerSize = CBW__namespace.headerLength({ roots: [lastCid] });
|
699
|
+
size += headerSize;
|
700
|
+
if (!Array.isArray(blocks)) {
|
701
|
+
blocks = Array.from(blocks.entries());
|
702
|
+
}
|
703
|
+
for (const { cid, bytes } of blocks) {
|
704
|
+
size += CBW__namespace.blockLength({ cid, bytes });
|
705
|
+
}
|
706
|
+
const buffer = new Uint8Array(size);
|
707
|
+
const writer = await CBW__namespace.createWriter(buffer, { headerSize });
|
708
|
+
|
709
|
+
writer.addRoot(lastCid);
|
710
|
+
|
711
|
+
for (const { cid, bytes } of blocks) {
|
712
|
+
writer.write({ cid, bytes });
|
713
|
+
}
|
714
|
+
await writer.close();
|
715
|
+
return await Block__namespace.encode({ value: writer.bytes, hasher: sha2.sha256, codec: raw__namespace })
|
716
|
+
};
|
717
|
+
|
718
|
+
const blocksToEncryptedCarBlock = async (innerBlockStoreClockRootCid, blocks, keyMaterial) => {
|
719
|
+
const encryptionKey = buffer.Buffer.from(keyMaterial, 'hex');
|
720
|
+
const encryptedBlocks = [];
|
721
|
+
const theCids = [];
|
722
|
+
for (const { cid } of blocks.entries()) {
|
723
|
+
theCids.push(cid.toString());
|
724
|
+
}
|
725
|
+
|
726
|
+
let last;
|
727
|
+
for await (const block of encrypt({
|
728
|
+
cids: theCids,
|
729
|
+
get: async cid => blocks.get(cid), // maybe we can just use blocks.get
|
730
|
+
key: encryptionKey,
|
731
|
+
hasher: sha2.sha256,
|
732
|
+
chunker,
|
733
|
+
cache: cache.nocache,
|
734
|
+
// codec: dagcbor, // should be crypto?
|
735
|
+
root: innerBlockStoreClockRootCid
|
736
|
+
})) {
|
737
|
+
encryptedBlocks.push(block);
|
738
|
+
last = block;
|
739
|
+
}
|
740
|
+
// console.log('last', last.cid.toString(), 'for clock', innerBlockStoreClockRootCid.toString())
|
741
|
+
const encryptedCar = await blocksToCarBlock(last.cid, encryptedBlocks);
|
742
|
+
return encryptedCar
|
743
|
+
};
|
744
|
+
// { root, get, key, cache, chunker, hasher }
|
745
|
+
|
746
|
+
const memoizeDecryptedCarBlocks = new Map();
|
747
|
+
const blocksFromEncryptedCarBlock = async (cid, get, keyMaterial) => {
|
748
|
+
if (memoizeDecryptedCarBlocks.has(cid.toString())) {
|
749
|
+
return memoizeDecryptedCarBlocks.get(cid.toString())
|
750
|
+
} else {
|
751
|
+
const blocksPromise = (async () => {
|
752
|
+
const decryptionKey = buffer.Buffer.from(keyMaterial, 'hex');
|
753
|
+
// console.log('decrypting', keyMaterial, cid.toString())
|
754
|
+
const cids = new Set();
|
755
|
+
const decryptedBlocks = [];
|
756
|
+
for await (const block of decrypt({
|
757
|
+
root: cid,
|
758
|
+
get,
|
759
|
+
key: decryptionKey,
|
760
|
+
chunker,
|
761
|
+
hasher: sha2.sha256,
|
762
|
+
cache: cache.nocache
|
763
|
+
// codec: dagcbor
|
764
|
+
})) {
|
765
|
+
decryptedBlocks.push(block);
|
766
|
+
cids.add(block.cid.toString());
|
767
|
+
}
|
768
|
+
return { blocks: decryptedBlocks, cids }
|
769
|
+
})();
|
770
|
+
memoizeDecryptedCarBlocks.set(cid.toString(), blocksPromise);
|
771
|
+
return blocksPromise
|
772
|
+
}
|
773
|
+
};
|
774
|
+
|
775
|
+
// @ts-nocheck
|
776
|
+
|
777
|
+
// const sleep = ms => new Promise(r => setTimeout(r, ms))
|
778
|
+
|
779
|
+
const husherMap = new Map();
|
780
|
+
const husher = (id, workFn) => {
|
781
|
+
if (!husherMap.has(id)) {
|
782
|
+
husherMap.set(
|
783
|
+
id,
|
784
|
+
workFn().finally(() => setTimeout(() => husherMap.delete(id), 100))
|
785
|
+
);
|
786
|
+
}
|
787
|
+
return husherMap.get(id)
|
788
|
+
};
|
789
|
+
|
790
|
+
/**
|
791
|
+
* @typedef {Object} AnyBlock
|
792
|
+
* @property {import('./link').AnyLink} cid - The CID of the block
|
793
|
+
* @property {Uint8Array} bytes - The block's data
|
794
|
+
*
|
795
|
+
* @typedef {Object} Blockstore
|
796
|
+
* @property {function(import('./link').AnyLink): Promise<AnyBlock|undefined>} get - A function to retrieve a block by CID
|
797
|
+
* @property {function(import('./link').AnyLink, Uint8Array): Promise<void>} put - A function to store a block's data and CID
|
798
|
+
*
|
799
|
+
* A blockstore that caches writes to a transaction and only persists them when committed.
|
800
|
+
* @implements {Blockstore}
|
801
|
+
*/
|
802
|
+
class TransactionBlockstore {
|
803
|
+
/** @type {Map<string, Uint8Array>} */
|
804
|
+
committedBlocks = new Map()
|
805
|
+
|
806
|
+
valet = null
|
807
|
+
|
808
|
+
instanceId = 'blkz.' + Math.random().toString(36).substring(2, 4)
|
809
|
+
inflightTransactions = new Set()
|
810
|
+
|
811
|
+
constructor (name, encryptionKey) {
|
812
|
+
this.valet = new Valet(name, encryptionKey);
|
813
|
+
}
|
814
|
+
|
815
|
+
/**
|
816
|
+
* Get a block from the store.
|
817
|
+
*
|
818
|
+
* @param {import('./link').AnyLink} cid
|
819
|
+
* @returns {Promise<AnyBlock | undefined>}
|
820
|
+
*/
|
821
|
+
async get (cid) {
|
822
|
+
const key = cid.toString();
|
823
|
+
// it is safe to read from the in-flight transactions becauase they are immutable
|
824
|
+
const bytes = await Promise.any([this.transactionsGet(key), this.committedGet(key)]).catch(e => {
|
825
|
+
// console.log('networkGet', cid.toString(), e)
|
826
|
+
return this.networkGet(key)
|
827
|
+
});
|
828
|
+
if (!bytes) throw new Error('Missing block: ' + key)
|
829
|
+
return { cid, bytes }
|
830
|
+
}
|
831
|
+
|
832
|
+
// this iterates over the in-flight transactions
|
833
|
+
// and returns the first matching block it finds
|
834
|
+
async transactionsGet (key) {
|
835
|
+
for (const transaction of this.inflightTransactions) {
|
836
|
+
const got = await transaction.get(key);
|
837
|
+
if (got && got.bytes) return got.bytes
|
838
|
+
}
|
839
|
+
throw new Error('Missing block: ' + key)
|
840
|
+
}
|
841
|
+
|
842
|
+
async committedGet (key) {
|
843
|
+
const old = this.committedBlocks.get(key);
|
844
|
+
if (old) return old
|
845
|
+
const got = await this.valet.getBlock(key);
|
846
|
+
// console.log('committedGet: ' + key)
|
847
|
+
this.committedBlocks.set(key, got);
|
848
|
+
return got
|
849
|
+
}
|
850
|
+
|
851
|
+
async clearCommittedCache () {
|
852
|
+
this.committedBlocks.clear();
|
853
|
+
}
|
854
|
+
|
855
|
+
async networkGet (key) {
|
856
|
+
if (this.valet.remoteBlockFunction) {
|
857
|
+
// todo why is this on valet?
|
858
|
+
const value = await husher(key, async () => await this.valet.remoteBlockFunction(key));
|
859
|
+
if (value) {
|
860
|
+
// console.log('networkGot: ' + key, value.length)
|
861
|
+
doTransaction('networkGot: ' + key, this, async innerBlockstore => {
|
862
|
+
await innerBlockstore.put(multiformats.CID.parse(key), value);
|
863
|
+
});
|
864
|
+
return value
|
865
|
+
}
|
866
|
+
} else {
|
867
|
+
return false
|
868
|
+
}
|
869
|
+
}
|
870
|
+
|
871
|
+
/**
|
872
|
+
* Add a block to the store. Usually bound to a transaction by a closure.
|
873
|
+
* It sets the lastCid property to the CID of the block that was put.
|
874
|
+
* This is used by the transaction as the head of the car when written to the valet.
|
875
|
+
* We don't have to worry about which transaction we are when we are here because
|
876
|
+
* we are the transactionBlockstore.
|
877
|
+
*
|
878
|
+
* @param {import('./link').AnyLink} cid
|
879
|
+
* @param {Uint8Array} bytes
|
880
|
+
*/
|
881
|
+
put (cid, bytes) {
|
882
|
+
throw new Error('use a transaction to put')
|
883
|
+
}
|
884
|
+
|
885
|
+
/**
|
886
|
+
* Iterate over all blocks in the store.
|
887
|
+
*
|
888
|
+
* @yields {AnyBlock}
|
889
|
+
* @returns {AsyncGenerator<AnyBlock>}
|
890
|
+
*/
|
891
|
+
// * entries () {
|
892
|
+
// // needs transaction blocks?
|
893
|
+
// // for (const [str, bytes] of this.blocks) {
|
894
|
+
// // yield { cid: parse(str), bytes }
|
895
|
+
// // }
|
896
|
+
// for (const [str, bytes] of this.committedBlocks) {
|
897
|
+
// yield { cid: parse(str), bytes }
|
898
|
+
// }
|
899
|
+
// }
|
900
|
+
|
901
|
+
/**
|
902
|
+
* Begin a transaction. Ensures the uncommited blocks are empty at the begining.
|
903
|
+
* Returns the blocks to read and write during the transaction.
|
904
|
+
* @returns {InnerBlockstore}
|
905
|
+
* @memberof TransactionBlockstore
|
906
|
+
*/
|
907
|
+
begin (label = '') {
|
908
|
+
const innerTransactionBlockstore = new InnerBlockstore(label, this);
|
909
|
+
this.inflightTransactions.add(innerTransactionBlockstore);
|
910
|
+
return innerTransactionBlockstore
|
911
|
+
}
|
912
|
+
|
913
|
+
/**
|
914
|
+
* Commit the transaction. Writes the blocks to the store.
|
915
|
+
* @returns {Promise<void>}
|
916
|
+
* @memberof TransactionBlockstore
|
917
|
+
*/
|
918
|
+
async commit (innerBlockstore) {
|
919
|
+
await this.doCommit(innerBlockstore);
|
920
|
+
}
|
921
|
+
|
922
|
+
// first get the transaction blockstore from the map of transaction blockstores
|
923
|
+
// then copy it to committedBlocks
|
924
|
+
// then write the transaction blockstore to a car
|
925
|
+
// then write the car to the valet
|
926
|
+
// then remove the transaction blockstore from the map of transaction blockstores
|
927
|
+
doCommit = async innerBlockstore => {
|
928
|
+
const cids = new Set();
|
929
|
+
for (const { cid, bytes } of innerBlockstore.entries()) {
|
930
|
+
const stringCid = cid.toString(); // unnecessary string conversion, can we fix upstream?
|
931
|
+
if (this.committedBlocks.has(stringCid)) ; else {
|
932
|
+
this.committedBlocks.set(stringCid, bytes);
|
933
|
+
cids.add(stringCid);
|
934
|
+
}
|
935
|
+
}
|
936
|
+
if (cids.size > 0) {
|
937
|
+
// console.log(innerBlockstore.label, 'committing', cids.size, 'blocks')
|
938
|
+
await this.valet.writeTransaction(innerBlockstore, cids);
|
939
|
+
}
|
940
|
+
}
|
941
|
+
|
942
|
+
/**
|
943
|
+
* Retire the transaction. Clears the uncommited blocks.
|
944
|
+
* @returns {void}
|
945
|
+
* @memberof TransactionBlockstore
|
946
|
+
*/
|
947
|
+
retire (innerBlockstore) {
|
948
|
+
this.inflightTransactions.delete(innerBlockstore);
|
949
|
+
}
|
950
|
+
}
|
951
|
+
|
952
|
+
/**
|
953
|
+
* Runs a function on an inner blockstore, then persists the change to a car writer
|
954
|
+
* or other outer blockstore.
|
955
|
+
* @param {string} label
|
956
|
+
* @param {TransactionBlockstore} blockstore
|
957
|
+
* @param {(innerBlockstore: Blockstore) => Promise<any>} doFun
|
958
|
+
* @returns {Promise<any>}
|
959
|
+
* @memberof TransactionBlockstore
|
960
|
+
*/
|
961
|
+
const doTransaction = async (label, blockstore, doFun) => {
|
962
|
+
if (!blockstore.commit) return await doFun(blockstore)
|
963
|
+
const innerBlockstore = blockstore.begin(label);
|
964
|
+
try {
|
965
|
+
const result = await doFun(innerBlockstore);
|
966
|
+
await blockstore.commit(innerBlockstore);
|
967
|
+
return result
|
968
|
+
} catch (e) {
|
969
|
+
console.error(`Transaction ${label} failed`, e, e.stack);
|
970
|
+
throw e
|
971
|
+
} finally {
|
972
|
+
blockstore.retire(innerBlockstore);
|
973
|
+
}
|
974
|
+
};
|
975
|
+
|
976
|
+
/** @implements {BlockFetcher} */
|
977
|
+
class InnerBlockstore {
|
978
|
+
/** @type {Map<string, Uint8Array>} */
|
979
|
+
blocks = new Map()
|
980
|
+
lastCid = null
|
981
|
+
label = ''
|
982
|
+
parentBlockstore = null
|
983
|
+
|
984
|
+
constructor (label, parentBlockstore) {
|
985
|
+
this.label = label;
|
986
|
+
this.parentBlockstore = parentBlockstore;
|
987
|
+
}
|
988
|
+
|
989
|
+
/**
|
990
|
+
* @param {import('./link').AnyLink} cid
|
991
|
+
* @returns {Promise<AnyBlock | undefined>}
|
992
|
+
*/
|
993
|
+
async get (cid) {
|
994
|
+
const key = cid.toString();
|
995
|
+
let bytes = this.blocks.get(key);
|
996
|
+
if (bytes) {
|
997
|
+
return { cid, bytes }
|
998
|
+
}
|
999
|
+
bytes = await this.parentBlockstore.committedGet(key);
|
1000
|
+
if (bytes) {
|
1001
|
+
return { cid, bytes }
|
1002
|
+
}
|
1003
|
+
}
|
1004
|
+
|
1005
|
+
/**
|
1006
|
+
* @param {import('./link').AnyLink} cid
|
1007
|
+
* @param {Uint8Array} bytes
|
1008
|
+
*/
|
1009
|
+
put (cid, bytes) {
|
1010
|
+
// console.log('put', cid)
|
1011
|
+
this.blocks.set(cid.toString(), bytes);
|
1012
|
+
this.lastCid = cid;
|
1013
|
+
}
|
1014
|
+
|
1015
|
+
* entries () {
|
1016
|
+
for (const [str, bytes] of this.blocks) {
|
1017
|
+
yield { cid: link.parse(str), bytes };
|
1018
|
+
}
|
1019
|
+
}
|
1020
|
+
}
|
1021
|
+
|
1022
|
+
// @ts-nocheck
|
1023
|
+
const blockOpts = { cache: cache.nocache, chunker: utils.bf(3), codec: dagcbor__namespace, hasher: sha2.sha256, compare: utils.simpleCompare };
|
1024
|
+
|
1025
|
+
const withLog = async (label, fn) => {
|
1026
|
+
const resp = await fn();
|
1027
|
+
// console.log('withLog', label, !!resp)
|
1028
|
+
return resp
|
1029
|
+
};
|
1030
|
+
|
1031
|
+
// should also return a CIDCounter
|
1032
|
+
const makeGetBlock = (blocks) => {
|
1033
|
+
// const cids = new CIDCounter() // this could be used for proofs of mutations
|
1034
|
+
const getBlockFn = async (address) => {
|
1035
|
+
const { cid, bytes } = await withLog(address, () => blocks.get(address));
|
1036
|
+
// cids.add({ address: cid })
|
1037
|
+
return Block.create({ cid, bytes, hasher: sha2.sha256, codec: dagcbor__namespace })
|
1038
|
+
};
|
1039
|
+
return {
|
1040
|
+
// cids,
|
1041
|
+
getBlock: getBlockFn
|
1042
|
+
}
|
1043
|
+
};
|
1044
|
+
|
1045
|
+
/**
|
1046
|
+
*
|
1047
|
+
* @param {*} param0
|
1048
|
+
* @returns
|
1049
|
+
*/
|
1050
|
+
async function createAndSaveNewEvent ({
|
1051
|
+
inBlocks,
|
1052
|
+
bigPut,
|
1053
|
+
root,
|
1054
|
+
event: inEvent,
|
1055
|
+
head,
|
1056
|
+
additions,
|
1057
|
+
removals = []
|
1058
|
+
}) {
|
1059
|
+
let cids;
|
1060
|
+
const { key, value, del } = inEvent;
|
1061
|
+
const data = {
|
1062
|
+
root: (root
|
1063
|
+
? {
|
1064
|
+
cid: root.cid,
|
1065
|
+
bytes: root.bytes, // can we remove this?
|
1066
|
+
value: root.value // can we remove this?
|
1067
|
+
}
|
1068
|
+
: null),
|
1069
|
+
key
|
1070
|
+
};
|
1071
|
+
|
1072
|
+
if (del) {
|
1073
|
+
data.value = null;
|
1074
|
+
data.type = 'del';
|
1075
|
+
} else {
|
1076
|
+
data.value = value;
|
1077
|
+
data.type = 'put';
|
1078
|
+
}
|
1079
|
+
/** @type {EventData} */
|
1080
|
+
|
1081
|
+
const event = await EventBlock.create(data, head);
|
1082
|
+
bigPut(event)
|
1083
|
+
;({ head, cids } = await advance(inBlocks, head, event.cid));
|
1084
|
+
|
1085
|
+
return {
|
1086
|
+
root,
|
1087
|
+
additions,
|
1088
|
+
removals,
|
1089
|
+
head,
|
1090
|
+
clockCIDs: cids,
|
1091
|
+
event
|
1092
|
+
}
|
1093
|
+
}
|
1094
|
+
|
1095
|
+
const makeGetAndPutBlock = (inBlocks) => {
|
1096
|
+
// const mblocks = new MemoryBlockstore()
|
1097
|
+
// const blocks = new MultiBlockFetcher(mblocks, inBlocks)
|
1098
|
+
const { getBlock, cids } = makeGetBlock(inBlocks);
|
1099
|
+
const put = inBlocks.put.bind(inBlocks);
|
1100
|
+
const bigPut = async (block, additions) => {
|
1101
|
+
// console.log('bigPut', block.cid.toString())
|
1102
|
+
const { cid, bytes } = block;
|
1103
|
+
put(cid, bytes);
|
1104
|
+
// mblocks.putSync(cid, bytes)
|
1105
|
+
if (additions) {
|
1106
|
+
additions.set(cid.toString(), block);
|
1107
|
+
}
|
1108
|
+
};
|
1109
|
+
return { getBlock, bigPut, blocks: inBlocks, cids }
|
1110
|
+
};
|
1111
|
+
|
1112
|
+
const bulkFromEvents = (sorted, event) => {
|
1113
|
+
if (event) {
|
1114
|
+
const update = { value: { data: { key: event.key } } };
|
1115
|
+
if (event.del) {
|
1116
|
+
update.value.data.type = 'del';
|
1117
|
+
} else {
|
1118
|
+
update.value.data.type = 'put';
|
1119
|
+
update.value.data.value = event.value;
|
1120
|
+
}
|
1121
|
+
sorted.push(update);
|
1122
|
+
}
|
1123
|
+
const bulk = new Map();
|
1124
|
+
for (const { value: event } of sorted) {
|
1125
|
+
const {
|
1126
|
+
data: { type, value, key }
|
1127
|
+
} = event;
|
1128
|
+
const bulkEvent = type === 'put' ? { key, value } : { key, del: true };
|
1129
|
+
bulk.set(bulkEvent.key, bulkEvent); // last wins
|
1130
|
+
}
|
1131
|
+
return Array.from(bulk.values())
|
1132
|
+
};
|
1133
|
+
|
1134
|
+
// Get the value of the root from the ancestor event
|
1135
|
+
/**
|
1136
|
+
*
|
1137
|
+
* @param {EventFetcher} events
|
1138
|
+
* @param {Link} ancestor
|
1139
|
+
* @param {*} getBlock
|
1140
|
+
* @returns
|
1141
|
+
*/
|
1142
|
+
const prollyRootFromAncestor = async (events, ancestor, getBlock) => {
|
1143
|
+
// console.log('prollyRootFromAncestor', ancestor)
|
1144
|
+
const event = await events.get(ancestor);
|
1145
|
+
const { root } = event.value.data;
|
1146
|
+
// console.log('prollyRootFromAncestor', root.cid, JSON.stringify(root.value))
|
1147
|
+
if (root) {
|
1148
|
+
return map.load({ cid: root.cid, get: getBlock, ...blockOpts })
|
1149
|
+
} else {
|
1150
|
+
return null
|
1151
|
+
}
|
1152
|
+
};
|
1153
|
+
|
1154
|
+
const doProllyBulk = async (inBlocks, head, event) => {
|
1155
|
+
const { getBlock, blocks } = makeGetAndPutBlock(inBlocks);
|
1156
|
+
let bulkSorted = [];
|
1157
|
+
let prollyRootNode = null;
|
1158
|
+
if (head.length) {
|
1159
|
+
// Otherwise, we find the common ancestor and update the root and other blocks
|
1160
|
+
const events = new EventFetcher(blocks);
|
1161
|
+
// todo this is returning more events than necessary, lets define the desired semantics from the top down
|
1162
|
+
// good semantics mean we can cache the results of this call
|
1163
|
+
const { ancestor, sorted } = await findCommonAncestorWithSortedEvents(events, head);
|
1164
|
+
bulkSorted = sorted;
|
1165
|
+
// console.log('sorted', JSON.stringify(sorted.map(({ value: { data: { key, value } } }) => ({ key, value }))))
|
1166
|
+
prollyRootNode = await prollyRootFromAncestor(events, ancestor, getBlock);
|
1167
|
+
// console.log('event', event)
|
1168
|
+
}
|
1169
|
+
|
1170
|
+
const bulkOperations = bulkFromEvents(bulkSorted, event);
|
1171
|
+
|
1172
|
+
// if prolly root node is null, we need to create a new one
|
1173
|
+
if (!prollyRootNode) {
|
1174
|
+
let root;
|
1175
|
+
const newBlocks = [];
|
1176
|
+
// if all operations are deletes, we can just return an empty root
|
1177
|
+
if (bulkOperations.every((op) => op.del)) {
|
1178
|
+
return { root: null, blocks: [] }
|
1179
|
+
}
|
1180
|
+
for await (const node of map.create({ get: getBlock, list: bulkOperations, ...blockOpts })) {
|
1181
|
+
root = await node.block;
|
1182
|
+
newBlocks.push(root);
|
1183
|
+
}
|
1184
|
+
return { root, blocks: newBlocks }
|
1185
|
+
} else {
|
1186
|
+
return await prollyRootNode.bulk(bulkOperations) // { root: newProllyRootNode, blocks: newBlocks }
|
1187
|
+
}
|
1188
|
+
};
|
1189
|
+
|
1190
|
+
/**
|
1191
|
+
* Put a value (a CID) for the given key. If the key exists it's value is overwritten.
|
1192
|
+
*
|
1193
|
+
* @param {import('./blockstore.js').BlockFetcher} blocks Bucket block storage.
|
1194
|
+
* @param {import('./clock').EventLink<EventData>[]} head Merkle clock head.
|
1195
|
+
* @param {string} key The key of the value to put.
|
1196
|
+
* @param {CID} value The value to put.
|
1197
|
+
* @param {object} [options]
|
1198
|
+
* @returns {Promise<Result>}
|
1199
|
+
*/
|
1200
|
+
async function put (inBlocks, head, event, options) {
|
1201
|
+
const { bigPut } = makeGetAndPutBlock(inBlocks);
|
1202
|
+
|
1203
|
+
// If the head is empty, we create a new event and return the root and addition blocks
|
1204
|
+
if (!head.length) {
|
1205
|
+
const additions = new Map();
|
1206
|
+
const { root, blocks } = await doProllyBulk(inBlocks, head, event);
|
1207
|
+
for (const b of blocks) {
|
1208
|
+
bigPut(b, additions);
|
1209
|
+
}
|
1210
|
+
return createAndSaveNewEvent({ inBlocks, bigPut, root, event, head, additions: Array.from(additions.values()) })
|
1211
|
+
}
|
1212
|
+
const { root: newProllyRootNode, blocks: newBlocks } = await doProllyBulk(inBlocks, head, event);
|
1213
|
+
|
1214
|
+
if (!newProllyRootNode) {
|
1215
|
+
return createAndSaveNewEvent({
|
1216
|
+
inBlocks,
|
1217
|
+
bigPut,
|
1218
|
+
root: null,
|
1219
|
+
event,
|
1220
|
+
head,
|
1221
|
+
additions: []
|
1222
|
+
})
|
1223
|
+
} else {
|
1224
|
+
const prollyRootBlock = await newProllyRootNode.block;
|
1225
|
+
const additions = new Map(); // ; const removals = new Map()
|
1226
|
+
bigPut(prollyRootBlock, additions);
|
1227
|
+
for (const nb of newBlocks) {
|
1228
|
+
bigPut(nb, additions);
|
1229
|
+
}
|
1230
|
+
// additions are new blocks
|
1231
|
+
return createAndSaveNewEvent({
|
1232
|
+
inBlocks,
|
1233
|
+
bigPut,
|
1234
|
+
root: prollyRootBlock,
|
1235
|
+
event,
|
1236
|
+
head,
|
1237
|
+
additions: Array.from(additions.values()) /*, todo? Array.from(removals.values()) */
|
1238
|
+
})
|
1239
|
+
}
|
1240
|
+
}
|
1241
|
+
|
1242
|
+
/**
|
1243
|
+
* Determine the effective prolly root given the current merkle clock head.
|
1244
|
+
*
|
1245
|
+
* @param {import('./blockstore.js').BlockFetcher} blocks Bucket block storage.
|
1246
|
+
* @param {import('./clock').EventLink<EventData>[]} head Merkle clock head.
|
1247
|
+
*/
|
1248
|
+
async function root (inBlocks, head) {
|
1249
|
+
if (!head.length) {
|
1250
|
+
throw new Error('no head')
|
1251
|
+
}
|
1252
|
+
const { root: newProllyRootNode, blocks: newBlocks, cids } = await doProllyBulk(inBlocks, head);
|
1253
|
+
// todo maybe these should go to a temp blockstore?
|
1254
|
+
await doTransaction('root', inBlocks, async (transactionBlockstore) => {
|
1255
|
+
const { bigPut } = makeGetAndPutBlock(transactionBlockstore);
|
1256
|
+
for (const nb of newBlocks) {
|
1257
|
+
bigPut(nb);
|
1258
|
+
}
|
1259
|
+
});
|
1260
|
+
return { cids, node: newProllyRootNode }
|
1261
|
+
}
|
1262
|
+
|
1263
|
+
/**
|
1264
|
+
* Get the list of events not known by the `since` event
|
1265
|
+
* @param {import('./blockstore.js').BlockFetcher} blocks Bucket block storage.
|
1266
|
+
* @param {import('./clock').EventLink<EventData>[]} head Merkle clock head.
|
1267
|
+
* @param {import('./clock').EventLink<EventData>} since Event to compare against.
|
1268
|
+
* @returns {Promise<{clockCIDs: CIDCounter, result: EventData[]}>}
|
1269
|
+
*/
|
1270
|
+
async function eventsSince (blocks, head, since) {
|
1271
|
+
if (!head.length) {
|
1272
|
+
throw new Error('no head')
|
1273
|
+
}
|
1274
|
+
const sinceHead = [...since, ...head]; // ?
|
1275
|
+
const { cids, events: unknownSorted3 } = await findEventsToSync(blocks, sinceHead);
|
1276
|
+
return { clockCIDs: cids, result: unknownSorted3.map(({ value: { data } }) => data) }
|
1277
|
+
}
|
1278
|
+
|
1279
|
+
/**
|
1280
|
+
*
|
1281
|
+
* @param {import('./blockstore.js').BlockFetcher} blocks Bucket block storage.
|
1282
|
+
* @param {import('./clock').EventLink<EventData>[]} head Merkle clock head.
|
1283
|
+
*
|
1284
|
+
* @returns {Promise<{clockCIDs: CIDCounter, result: EventData[]}>}
|
1285
|
+
*
|
1286
|
+
*/
|
1287
|
+
async function getAll (blocks, head) {
|
1288
|
+
// todo use the root node left around from put, etc
|
1289
|
+
// move load to a central place
|
1290
|
+
if (!head.length) {
|
1291
|
+
return { clockCIDs: new utils.CIDCounter(), cids: new utils.CIDCounter(), result: [] }
|
1292
|
+
}
|
1293
|
+
const { node: prollyRootNode, cids: clockCIDs } = await root(blocks, head);
|
1294
|
+
if (!prollyRootNode) {
|
1295
|
+
return { clockCIDs, cids: new utils.CIDCounter(), result: [] }
|
1296
|
+
}
|
1297
|
+
const { result, cids } = await prollyRootNode.getAllEntries(); // todo params
|
1298
|
+
return { clockCIDs, cids, result: result.map(({ key, value }) => ({ key, value })) }
|
1299
|
+
}
|
1300
|
+
|
1301
|
+
/**
|
1302
|
+
* @param {import('./blockstore.js').BlockFetcher} blocks Bucket block storage.
|
1303
|
+
* @param {import('./clock').EventLink<EventData>[]} head Merkle clock head.
|
1304
|
+
* @param {string} key The key of the value to retrieve.
|
1305
|
+
*/
|
1306
|
+
async function get (blocks, head, key) {
|
1307
|
+
// instead pass root from db? and always update on change
|
1308
|
+
if (!head.length) {
|
1309
|
+
return { cids: new utils.CIDCounter(), result: null }
|
1310
|
+
}
|
1311
|
+
const { node: prollyRootNode, cids: clockCIDs } = await root(blocks, head);
|
1312
|
+
if (!prollyRootNode) {
|
1313
|
+
return { clockCIDs, cids: new utils.CIDCounter(), result: null }
|
1314
|
+
}
|
1315
|
+
const { result, cids } = await prollyRootNode.get(key);
|
1316
|
+
return { result, cids, clockCIDs }
|
1317
|
+
}
|
1318
|
+
|
1319
|
+
async function * vis (blocks, head) {
|
1320
|
+
if (!head.length) {
|
1321
|
+
return { cids: new utils.CIDCounter(), result: null }
|
1322
|
+
}
|
1323
|
+
const { node: prollyRootNode, cids } = await root(blocks, head);
|
1324
|
+
const lines = [];
|
1325
|
+
for await (const line of prollyRootNode.vis()) {
|
1326
|
+
yield line;
|
1327
|
+
lines.push(line);
|
1328
|
+
}
|
1329
|
+
return { vis: lines.join('\n'), cids }
|
1330
|
+
}
|
1331
|
+
|
1332
|
+
async function visMerkleTree (blocks, head) {
|
1333
|
+
if (!head.length) {
|
1334
|
+
return { cids: new utils.CIDCounter(), result: null }
|
1335
|
+
}
|
1336
|
+
const { node: prollyRootNode, cids } = await root(blocks, head);
|
1337
|
+
const lines = [];
|
1338
|
+
for await (const line of prollyRootNode.vis()) {
|
1339
|
+
lines.push(line);
|
1340
|
+
}
|
1341
|
+
return { vis: lines.join('\n'), cids }
|
1342
|
+
}
|
1343
|
+
|
1344
|
+
async function visMerkleClock (blocks, head) {
|
1345
|
+
const lines = [];
|
1346
|
+
for await (const line of vis$1(blocks, head)) {
|
1347
|
+
// yield line
|
1348
|
+
lines.push(line);
|
1349
|
+
}
|
1350
|
+
return { vis: lines.join('\n') }
|
1351
|
+
}
|
1352
|
+
|
1353
|
+
// @ts-nocheck
|
1354
|
+
// import { CID } from 'multiformats/dist/types/src/cid.js'
|
1355
|
+
|
1356
|
+
// const sleep = ms => new Promise(resolve => setTimeout(resolve, ms))
|
1357
|
+
|
1358
|
+
// class Proof {}
|
1359
|
+
|
1360
|
+
/**
|
1361
|
+
* @class Fireproof
|
1362
|
+
* @classdesc Fireproof stores data in IndexedDB and provides a Merkle clock.
|
1363
|
+
* This is the main class for saving and loading JSON and other documents with the database. You can find additional examples and
|
1364
|
+
* usage guides in the repository README.
|
1365
|
+
*
|
1366
|
+
* @param {import('./blockstore.js').TransactionBlockstore} blocks - The block storage instance to use documents and indexes
|
1367
|
+
* @param {CID[]} clock - The Merkle clock head to use for the Fireproof instance.
|
1368
|
+
* @param {object} [config] - Optional configuration options for the Fireproof instance.
|
1369
|
+
* @param {object} [authCtx] - Optional authorization context object to use for any authentication checks.
|
1370
|
+
*
|
1371
|
+
*/
|
1372
|
+
class Fireproof {
|
1373
|
+
listeners = new Set()
|
1374
|
+
|
1375
|
+
/**
|
1376
|
+
* @function storage
|
1377
|
+
* @memberof Fireproof
|
1378
|
+
* Creates a new Fireproof instance with default storage settings
|
1379
|
+
* Most apps should use this and not worry about the details.
|
1380
|
+
* @static
|
1381
|
+
* @returns {Fireproof} - a new Fireproof instance
|
1382
|
+
*/
|
1383
|
+
static storage = (name = 'global') => {
|
1384
|
+
const instanceKey = crypto.randomBytes(32).toString('hex'); // pass null to disable encryption
|
1385
|
+
// pick a random key from const validatedKeys
|
1386
|
+
// const instanceKey = validatedKeys[Math.floor(Math.random() * validatedKeys.length)]
|
1387
|
+
return new Fireproof(new TransactionBlockstore(name, instanceKey), [], { name })
|
1388
|
+
}
|
1389
|
+
|
1390
|
+
constructor (blocks, clock, config, authCtx = {}) {
|
1391
|
+
this.name = config?.name || 'global';
|
1392
|
+
this.instanceId = `fp.${this.name}.${Math.random().toString(36).substring(2, 7)}`;
|
1393
|
+
this.blocks = blocks;
|
1394
|
+
this.clock = clock;
|
1395
|
+
this.config = config;
|
1396
|
+
this.authCtx = authCtx;
|
1397
|
+
this.indexes = new Map();
|
1398
|
+
}
|
1399
|
+
|
1400
|
+
/**
|
1401
|
+
* Renders the Fireproof instance as a JSON object.
|
1402
|
+
* @returns {Object} - The JSON representation of the Fireproof instance. Includes clock heads for the database and its indexes.
|
1403
|
+
* @memberof Fireproof
|
1404
|
+
* @instance
|
1405
|
+
*/
|
1406
|
+
toJSON () {
|
1407
|
+
// todo this also needs to return the index roots...
|
1408
|
+
return {
|
1409
|
+
clock: this.clockToJSON(),
|
1410
|
+
name: this.name,
|
1411
|
+
key: this.blocks.valet.getKeyMaterial(),
|
1412
|
+
indexes: [...this.indexes.values()].map(index => index.toJSON())
|
1413
|
+
}
|
1414
|
+
}
|
1415
|
+
|
1416
|
+
clockToJSON () {
|
1417
|
+
return this.clock.map(cid => cid.toString())
|
1418
|
+
}
|
1419
|
+
|
1420
|
+
hydrate ({ clock, name, key }) {
|
1421
|
+
this.name = name;
|
1422
|
+
this.clock = clock;
|
1423
|
+
this.blocks.valet.setKeyMaterial(key);
|
1424
|
+
this.indexBlocks = null;
|
1425
|
+
}
|
1426
|
+
|
1427
|
+
/**
|
1428
|
+
* Triggers a notification to all listeners
|
1429
|
+
* of the Fireproof instance so they can repaint UI, etc.
|
1430
|
+
* @param {CID[] } clock
|
1431
|
+
* Clock to use for the snapshot.
|
1432
|
+
* @returns {Promise<void>}
|
1433
|
+
* @memberof Fireproof
|
1434
|
+
* @instance
|
1435
|
+
*/
|
1436
|
+
async notifyReset () {
|
1437
|
+
await this.notifyListeners({ _reset: true, _clock: this.clockToJSON() });
|
1438
|
+
}
|
1439
|
+
|
1440
|
+
// used be indexes etc to notify database listeners of new availability
|
1441
|
+
async notifyExternal (source = 'unknown') {
|
1442
|
+
await this.notifyListeners({ _external: source, _clock: this.clockToJSON() });
|
1443
|
+
}
|
1444
|
+
|
1445
|
+
/**
|
1446
|
+
* Returns the changes made to the Fireproof instance since the specified event.
|
1447
|
+
* @function changesSince
|
1448
|
+
* @param {CID[]} [event] - The clock head to retrieve changes since. If null or undefined, retrieves all changes.
|
1449
|
+
* @returns {Object<{rows : Object[], clock: CID[]}>} An object containing the rows and the head of the instance's clock.
|
1450
|
+
* @memberof Fireproof
|
1451
|
+
* @instance
|
1452
|
+
*/
|
1453
|
+
async changesSince (event) {
|
1454
|
+
// console.log('changesSince', this.instanceId, event, this.clock)
|
1455
|
+
let rows, dataCIDs, clockCIDs;
|
1456
|
+
// if (!event) event = []
|
1457
|
+
if (event) {
|
1458
|
+
const resp = await eventsSince(this.blocks, this.clock, event);
|
1459
|
+
const docsMap = new Map();
|
1460
|
+
for (const { key, type, value } of resp.result.map(decodeEvent)) {
|
1461
|
+
if (type === 'del') {
|
1462
|
+
docsMap.set(key, { key, del: true });
|
1463
|
+
} else {
|
1464
|
+
docsMap.set(key, { key, value });
|
1465
|
+
}
|
1466
|
+
}
|
1467
|
+
rows = Array.from(docsMap.values());
|
1468
|
+
clockCIDs = resp.cids;
|
1469
|
+
// console.log('change rows', this.instanceId, rows)
|
1470
|
+
} else {
|
1471
|
+
const allResp = await getAll(this.blocks, this.clock);
|
1472
|
+
rows = allResp.result.map(({ key, value }) => (decodeEvent({ key, value })));
|
1473
|
+
dataCIDs = allResp.cids;
|
1474
|
+
// console.log('dbdoc rows', this.instanceId, rows)
|
1475
|
+
}
|
1476
|
+
return {
|
1477
|
+
rows,
|
1478
|
+
clock: this.clockToJSON(),
|
1479
|
+
proof: { data: await cidsToProof(dataCIDs), clock: await cidsToProof(clockCIDs) }
|
1480
|
+
}
|
1481
|
+
}
|
1482
|
+
|
1483
|
+
async allDocuments () {
|
1484
|
+
const allResp = await getAll(this.blocks, this.clock);
|
1485
|
+
const rows = allResp.result.map(({ key, value }) => (decodeEvent({ key, value }))).map(({ key, value }) => ({ key, value: { _id: key, ...value } }));
|
1486
|
+
return {
|
1487
|
+
rows,
|
1488
|
+
clock: this.clockToJSON(),
|
1489
|
+
proof: await cidsToProof(allResp.cids)
|
1490
|
+
}
|
1491
|
+
}
|
1492
|
+
|
1493
|
+
/**
|
1494
|
+
* Runs validation on the specified document using the Fireproof instance's configuration. Throws an error if the document is invalid.
|
1495
|
+
*
|
1496
|
+
* @param {Object} doc - The document to validate.
|
1497
|
+
* @returns {Promise<void>}
|
1498
|
+
* @throws {Error} - Throws an error if the document is invalid.
|
1499
|
+
* @memberof Fireproof
|
1500
|
+
* @instance
|
1501
|
+
*/
|
1502
|
+
async runValidation (doc) {
|
1503
|
+
if (this.config && this.config.validateChange) {
|
1504
|
+
const oldDoc = await this.get(doc._id)
|
1505
|
+
.then((doc) => doc)
|
1506
|
+
.catch(() => ({}));
|
1507
|
+
this.config.validateChange(doc, oldDoc, this.authCtx);
|
1508
|
+
}
|
1509
|
+
}
|
1510
|
+
|
1511
|
+
/**
|
1512
|
+
* Retrieves the document with the specified ID from the database
|
1513
|
+
*
|
1514
|
+
* @param {string} key - the ID of the document to retrieve
|
1515
|
+
* @param {Object} [opts] - options
|
1516
|
+
* @returns {Promise<{_id: string}>} - the document with the specified ID
|
1517
|
+
* @memberof Fireproof
|
1518
|
+
* @instance
|
1519
|
+
*/
|
1520
|
+
async get (key, opts = {}) {
|
1521
|
+
const clock = opts.clock || this.clock;
|
1522
|
+
const resp = await get(this.blocks, clock, charwise.encode(key));
|
1523
|
+
|
1524
|
+
// this tombstone is temporary until we can get the prolly tree to delete
|
1525
|
+
if (!resp || resp.result === null) {
|
1526
|
+
throw new Error('Not found')
|
1527
|
+
}
|
1528
|
+
const doc = resp.result;
|
1529
|
+
if (opts.mvcc === true) {
|
1530
|
+
doc._clock = this.clockToJSON();
|
1531
|
+
}
|
1532
|
+
doc._proof = {
|
1533
|
+
data: await cidsToProof(resp.cids),
|
1534
|
+
clock: this.clockToJSON()
|
1535
|
+
};
|
1536
|
+
doc._id = key;
|
1537
|
+
return doc
|
1538
|
+
}
|
1539
|
+
|
1540
|
+
/**
|
1541
|
+
* Adds a new document to the database, or updates an existing document. Returns the ID of the document and the new clock head.
|
1542
|
+
*
|
1543
|
+
* @param {Object} doc - the document to be added
|
1544
|
+
* @param {string} doc._id - the document ID. If not provided, a random ID will be generated.
|
1545
|
+
* @param {CID[]} doc._clock - the document ID. If not provided, a random ID will be generated.
|
1546
|
+
* @param {Proof} doc._proof - CIDs referenced by the update
|
1547
|
+
* @returns {Promise<{ id: string, clock: CID[] }>} - The result of adding the document to the database
|
1548
|
+
* @memberof Fireproof
|
1549
|
+
* @instance
|
1550
|
+
*/
|
1551
|
+
async put ({ _id, _proof, ...doc }) {
|
1552
|
+
const id = _id || 'f' + Math.random().toString(36).slice(2);
|
1553
|
+
await this.runValidation({ _id: id, ...doc });
|
1554
|
+
return await this.putToProllyTree({ key: id, value: doc }, doc._clock)
|
1555
|
+
}
|
1556
|
+
|
1557
|
+
/**
|
1558
|
+
* Deletes a document from the database
|
1559
|
+
* @param {string | any} docOrId - the document ID
|
1560
|
+
* @returns {Promise<{ id: string, clock: CID[] }>} - The result of deleting the document from the database
|
1561
|
+
* @memberof Fireproof
|
1562
|
+
* @instance
|
1563
|
+
*/
|
1564
|
+
async del (docOrId) {
|
1565
|
+
let id;
|
1566
|
+
let clock = null;
|
1567
|
+
if (docOrId._id) {
|
1568
|
+
id = docOrId._id;
|
1569
|
+
clock = docOrId._clock;
|
1570
|
+
} else {
|
1571
|
+
id = docOrId;
|
1572
|
+
}
|
1573
|
+
await this.runValidation({ _id: id, _deleted: true });
|
1574
|
+
return await this.putToProllyTree({ key: id, del: true }, clock) // not working at prolly tree layer?
|
1575
|
+
// this tombstone is temporary until we can get the prolly tree to delete
|
1576
|
+
// return await this.putToProllyTree({ key: id, value: null }, clock)
|
1577
|
+
}
|
1578
|
+
|
1579
|
+
/**
|
1580
|
+
* Updates the underlying storage with the specified event.
|
1581
|
+
* @private
|
1582
|
+
* @param {{del?: true, key : string, value?: any}} decodedEvent - the event to add
|
1583
|
+
* @returns {Promise<{ proof:{}, id: string, clock: CID[] }>} - The result of adding the event to storage
|
1584
|
+
*/
|
1585
|
+
async putToProllyTree (decodedEvent, clock = null) {
|
1586
|
+
const event = encodeEvent(decodedEvent);
|
1587
|
+
if (clock && JSON.stringify(clock) !== JSON.stringify(this.clockToJSON())) {
|
1588
|
+
// we need to check and see what version of the document exists at the clock specified
|
1589
|
+
// if it is the same as the one we are trying to put, then we can proceed
|
1590
|
+
const resp = await eventsSince(this.blocks, this.clock, event.value._clock);
|
1591
|
+
const missedChange = resp.result.find(({ key }) => key === event.key);
|
1592
|
+
if (missedChange) {
|
1593
|
+
throw new Error('MVCC conflict, document is changed, please reload the document and try again.')
|
1594
|
+
}
|
1595
|
+
}
|
1596
|
+
const result = await doTransaction(
|
1597
|
+
'putToProllyTree',
|
1598
|
+
this.blocks,
|
1599
|
+
async (blocks) => await put(blocks, this.clock, event)
|
1600
|
+
);
|
1601
|
+
if (!result) {
|
1602
|
+
console.error('failed', event);
|
1603
|
+
throw new Error('failed to put at storage layer')
|
1604
|
+
}
|
1605
|
+
// console.log('new clock head', this.instanceId, result.head.toString())
|
1606
|
+
this.clock = result.head; // do we want to do this as a finally block
|
1607
|
+
await this.notifyListeners([decodedEvent]); // this type is odd
|
1608
|
+
return {
|
1609
|
+
id: decodedEvent.key,
|
1610
|
+
clock: this.clockToJSON(),
|
1611
|
+
proof: { data: await cidsToProof(result.cids), clock: await cidsToProof(result.clockCIDs) }
|
1612
|
+
}
|
1613
|
+
// todo should include additions (or split clock)
|
1614
|
+
}
|
1615
|
+
|
1616
|
+
// /**
|
1617
|
+
// * Advances the clock to the specified event and updates the root CID
|
1618
|
+
// * Will be used by replication
|
1619
|
+
// */
|
1620
|
+
// async advance (event) {
|
1621
|
+
// this.clock = await advance(this.blocks, this.clock, event)
|
1622
|
+
// this.rootCid = await root(this.blocks, this.clock)
|
1623
|
+
// return this.clock
|
1624
|
+
// }
|
1625
|
+
|
1626
|
+
async * vis () {
|
1627
|
+
return yield * vis(this.blocks, this.clock)
|
1628
|
+
}
|
1629
|
+
|
1630
|
+
async visTree () {
|
1631
|
+
return await visMerkleTree(this.blocks, this.clock)
|
1632
|
+
}
|
1633
|
+
|
1634
|
+
async visClock () {
|
1635
|
+
return await visMerkleClock(this.blocks, this.clock)
|
1636
|
+
}
|
1637
|
+
|
1638
|
+
/**
|
1639
|
+
* Registers a Listener to be called when the Fireproof instance's clock is updated.
|
1640
|
+
* Recieves live changes from the database after they are committed.
|
1641
|
+
* @param {Function} listener - The listener to be called when the clock is updated.
|
1642
|
+
* @returns {Function} - A function that can be called to unregister the listener.
|
1643
|
+
* @memberof Fireproof
|
1644
|
+
*/
|
1645
|
+
registerListener (listener) {
|
1646
|
+
this.listeners.add(listener);
|
1647
|
+
return () => {
|
1648
|
+
this.listeners.delete(listener);
|
1649
|
+
}
|
1650
|
+
}
|
1651
|
+
|
1652
|
+
async notifyListeners (changes) {
|
1653
|
+
// await sleep(10)
|
1654
|
+
for (const listener of this.listeners) {
|
1655
|
+
await listener(changes);
|
1656
|
+
}
|
1657
|
+
}
|
1658
|
+
|
1659
|
+
setCarUploader (carUploaderFn) {
|
1660
|
+
// console.log('registering car uploader')
|
1661
|
+
// https://en.wikipedia.org/wiki/Law_of_Demeter - this is a violation of the law of demeter
|
1662
|
+
this.blocks.valet.uploadFunction = carUploaderFn;
|
1663
|
+
}
|
1664
|
+
|
1665
|
+
setRemoteBlockReader (remoteBlockReaderFn) {
|
1666
|
+
// console.log('registering remote block reader')
|
1667
|
+
this.blocks.valet.remoteBlockFunction = remoteBlockReaderFn;
|
1668
|
+
}
|
1669
|
+
}
|
1670
|
+
|
1671
|
+
async function cidsToProof (cids) {
|
1672
|
+
if (!cids || !cids.all) return []
|
1673
|
+
const all = await cids.all();
|
1674
|
+
return [...all].map((cid) => cid.toString())
|
1675
|
+
}
|
1676
|
+
|
1677
|
+
function decodeEvent (event) {
|
1678
|
+
const decodedKey = charwise.decode(event.key);
|
1679
|
+
return { ...event, key: decodedKey }
|
1680
|
+
}
|
1681
|
+
|
1682
|
+
function encodeEvent (event) {
|
1683
|
+
if (!(event && event.key)) return
|
1684
|
+
const encodedKey = charwise.encode(event.key);
|
1685
|
+
return { ...event, key: encodedKey }
|
1686
|
+
}
|
1687
|
+
|
1688
|
+
// @ts-nocheck
|
1689
|
+
|
1690
|
+
const compare = (a, b) => {
|
1691
|
+
const [aKey, aRef] = a;
|
1692
|
+
const [bKey, bRef] = b;
|
1693
|
+
const comp = utils.simpleCompare(aKey, bKey);
|
1694
|
+
if (comp !== 0) return comp
|
1695
|
+
return refCompare(aRef, bRef)
|
1696
|
+
};
|
1697
|
+
|
1698
|
+
const refCompare = (aRef, bRef) => {
|
1699
|
+
if (Number.isNaN(aRef)) return -1
|
1700
|
+
if (Number.isNaN(bRef)) throw new Error('ref may not be Infinity or NaN')
|
1701
|
+
if (aRef === Infinity) return 1 // need to test this on equal docids!
|
1702
|
+
// if (!Number.isFinite(bRef)) throw new Error('ref may not be Infinity or NaN')
|
1703
|
+
return utils.simpleCompare(aRef, bRef)
|
1704
|
+
};
|
1705
|
+
|
1706
|
+
const dbIndexOpts = { cache: cache.nocache, chunker: utils.bf(3), codec: dagcbor__namespace, hasher: sha2.sha256, compare };
|
1707
|
+
const idIndexOpts = { cache: cache.nocache, chunker: utils.bf(3), codec: dagcbor__namespace, hasher: sha2.sha256, compare: utils.simpleCompare };
|
1708
|
+
|
1709
|
+
const makeDoc = ({ key, value }) => ({ _id: key, ...value });
|
1710
|
+
|
1711
|
+
/**
|
1712
|
+
* JDoc for the result row type.
|
1713
|
+
* @typedef {Object} ChangeEvent
|
1714
|
+
* @property {string} key - The key of the document.
|
1715
|
+
* @property {Object} value - The new value of the document.
|
1716
|
+
* @property {boolean} [del] - Is the row deleted?
|
1717
|
+
* @memberof DbIndex
|
1718
|
+
*/
|
1719
|
+
|
1720
|
+
/**
|
1721
|
+
* JDoc for the result row type.
|
1722
|
+
* @typedef {Object} DbIndexEntry
|
1723
|
+
* @property {string[]} key - The key for the DbIndex entry.
|
1724
|
+
* @property {Object} value - The value of the document.
|
1725
|
+
* @property {boolean} [del] - Is the row deleted?
|
1726
|
+
* @memberof DbIndex
|
1727
|
+
*/
|
1728
|
+
|
1729
|
+
/**
|
1730
|
+
* Transforms a set of changes to DbIndex entries using a map function.
|
1731
|
+
*
|
1732
|
+
* @param {ChangeEvent[]} changes
|
1733
|
+
* @param {Function} mapFn
|
1734
|
+
* @returns {DbIndexEntry[]} The DbIndex entries generated by the map function.
|
1735
|
+
* @private
|
1736
|
+
* @memberof DbIndex
|
1737
|
+
*/
|
1738
|
+
const indexEntriesForChanges = (changes, mapFn) => {
|
1739
|
+
const indexEntries = [];
|
1740
|
+
changes.forEach(({ key, value, del }) => {
|
1741
|
+
if (del || !value) return
|
1742
|
+
mapFn(makeDoc({ key, value }), (k, v) => {
|
1743
|
+
if (typeof v === 'undefined' || typeof k === 'undefined') return
|
1744
|
+
indexEntries.push({
|
1745
|
+
key: [charwise.encode(k), key],
|
1746
|
+
value: v
|
1747
|
+
});
|
1748
|
+
});
|
1749
|
+
});
|
1750
|
+
return indexEntries
|
1751
|
+
};
|
1752
|
+
|
1753
|
+
/**
|
1754
|
+
* Represents an DbIndex for a Fireproof database.
|
1755
|
+
*
|
1756
|
+
* @class DbIndex
|
1757
|
+
* @classdesc An DbIndex can be used to order and filter the documents in a Fireproof database.
|
1758
|
+
*
|
1759
|
+
* @param {Fireproof} database - The Fireproof database instance to DbIndex.
|
1760
|
+
* @param {Function} mapFn - The map function to apply to each entry in the database.
|
1761
|
+
*
|
1762
|
+
*/
|
1763
|
+
class DbIndex {
|
1764
|
+
constructor (database, mapFn, clock, opts = {}) {
|
1765
|
+
// console.log('DbIndex constructor', database.constructor.name, typeof mapFn, clock)
|
1766
|
+
/**
|
1767
|
+
* The database instance to DbIndex.
|
1768
|
+
* @type {Fireproof}
|
1769
|
+
*/
|
1770
|
+
this.database = database;
|
1771
|
+
if (!database.indexBlocks) {
|
1772
|
+
database.indexBlocks = new TransactionBlockstore(database.name + '.indexes', database.blocks.valet.getKeyMaterial());
|
1773
|
+
}
|
1774
|
+
/**
|
1775
|
+
* The map function to apply to each entry in the database.
|
1776
|
+
* @type {Function}
|
1777
|
+
*/
|
1778
|
+
|
1779
|
+
if (typeof mapFn === 'string') {
|
1780
|
+
this.mapFnString = mapFn;
|
1781
|
+
} else {
|
1782
|
+
this.mapFn = mapFn;
|
1783
|
+
this.mapFnString = mapFn.toString();
|
1784
|
+
}
|
1785
|
+
this.name = opts.name || this.makeName();
|
1786
|
+
this.indexById = { root: null, cid: null };
|
1787
|
+
this.indexByKey = { root: null, cid: null };
|
1788
|
+
this.dbHead = null;
|
1789
|
+
if (clock) {
|
1790
|
+
this.indexById.cid = clock.byId;
|
1791
|
+
this.indexByKey.cid = clock.byKey;
|
1792
|
+
this.dbHead = clock.db;
|
1793
|
+
}
|
1794
|
+
this.instanceId = this.database.instanceId + `.DbIndex.${Math.random().toString(36).substring(2, 7)}`;
|
1795
|
+
this.updateIndexPromise = null;
|
1796
|
+
if (!opts.temporary) { DbIndex.registerWithDatabase(this, this.database); }
|
1797
|
+
}
|
1798
|
+
|
1799
|
+
makeName () {
|
1800
|
+
const regex = /\(([^,()]+,\s*[^,()]+|\[[^\]]+\],\s*[^,()]+)\)/g;
|
1801
|
+
const matches = Array.from(this.mapFnString.matchAll(regex), match => match[1].trim());
|
1802
|
+
return matches[1]
|
1803
|
+
}
|
1804
|
+
|
1805
|
+
static registerWithDatabase (inIndex, database) {
|
1806
|
+
if (!database.indexes.has(inIndex.mapFnString)) {
|
1807
|
+
database.indexes.set(inIndex.mapFnString, inIndex);
|
1808
|
+
} else {
|
1809
|
+
// merge our inIndex code with the inIndex clock or vice versa
|
1810
|
+
const existingIndex = database.indexes.get(inIndex.mapFnString);
|
1811
|
+
// keep the code instance, discard the clock instance
|
1812
|
+
if (existingIndex.mapFn) { // this one also has other config
|
1813
|
+
existingIndex.dbHead = inIndex.dbHead;
|
1814
|
+
existingIndex.indexById.cid = inIndex.indexById.cid;
|
1815
|
+
existingIndex.indexByKey.cid = inIndex.indexByKey.cid;
|
1816
|
+
} else {
|
1817
|
+
inIndex.dbHead = existingIndex.dbHead;
|
1818
|
+
inIndex.indexById.cid = existingIndex.indexById.cid;
|
1819
|
+
inIndex.indexByKey.cid = existingIndex.indexByKey.cid;
|
1820
|
+
database.indexes.set(inIndex.mapFnString, inIndex);
|
1821
|
+
}
|
1822
|
+
}
|
1823
|
+
}
|
1824
|
+
|
1825
|
+
toJSON () {
|
1826
|
+
const indexJson = { name: this.name, code: this.mapFnString, clock: { db: null, byId: null, byKey: null } };
|
1827
|
+
indexJson.clock.db = this.dbHead?.map(cid => cid.toString());
|
1828
|
+
indexJson.clock.byId = this.indexById.cid?.toString();
|
1829
|
+
indexJson.clock.byKey = this.indexByKey.cid?.toString();
|
1830
|
+
return indexJson
|
1831
|
+
}
|
1832
|
+
|
1833
|
+
static fromJSON (database, { code, clock, name }) {
|
1834
|
+
// console.log('DbIndex.fromJSON', database.constructor.name, code, clock)
|
1835
|
+
return new DbIndex(database, code, clock, { name })
|
1836
|
+
}
|
1837
|
+
|
1838
|
+
/**
|
1839
|
+
* JSDoc for Query type.
|
1840
|
+
* @typedef {Object} DbQuery
|
1841
|
+
* @property {string[]} [range] - The range to query.
|
1842
|
+
* @memberof DbIndex
|
1843
|
+
*/
|
1844
|
+
|
1845
|
+
/**
|
1846
|
+
* Query object can have {range}
|
1847
|
+
* @param {DbQuery} query - the query range to use
|
1848
|
+
* @returns {Promise<{proof: {}, rows: Array<{id: string, key: string, value: any}>}>}
|
1849
|
+
* @memberof DbIndex
|
1850
|
+
* @instance
|
1851
|
+
*/
|
1852
|
+
async query (query, update = true) {
|
1853
|
+
// const callId = Math.random().toString(36).substring(2, 7)
|
1854
|
+
// todo pass a root to query a snapshot
|
1855
|
+
// console.time(callId + '.updateIndex')
|
1856
|
+
update && await this.updateIndex(this.database.indexBlocks);
|
1857
|
+
// console.timeEnd(callId + '.updateIndex')
|
1858
|
+
// console.time(callId + '.doIndexQuery')
|
1859
|
+
// console.log('query', query)
|
1860
|
+
const response = await doIndexQuery(this.database.indexBlocks, this.indexByKey, query);
|
1861
|
+
// console.timeEnd(callId + '.doIndexQuery')
|
1862
|
+
return {
|
1863
|
+
proof: { index: await cidsToProof(response.cids) },
|
1864
|
+
rows: response.result.map(({ id, key, row }) => {
|
1865
|
+
return ({ id, key: charwise.decode(key), value: row })
|
1866
|
+
})
|
1867
|
+
}
|
1868
|
+
}
|
1869
|
+
|
1870
|
+
/**
|
1871
|
+
* Update the DbIndex with the latest changes
|
1872
|
+
* @private
|
1873
|
+
* @returns {Promise<void>}
|
1874
|
+
*/
|
1875
|
+
|
1876
|
+
async updateIndex (blocks) {
|
1877
|
+
// todo this could enqueue the request and give fresh ones to all second comers -- right now it gives out stale promises while working
|
1878
|
+
// what would it do in a world where all indexes provide a database snapshot to query?
|
1879
|
+
if (this.updateIndexPromise) return this.updateIndexPromise
|
1880
|
+
this.updateIndexPromise = this.innerUpdateIndex(blocks);
|
1881
|
+
this.updateIndexPromise.finally(() => { this.updateIndexPromise = null; });
|
1882
|
+
return this.updateIndexPromise
|
1883
|
+
}
|
1884
|
+
|
1885
|
+
async innerUpdateIndex (inBlocks) {
|
1886
|
+
// console.log('dbHead', this.dbHead)
|
1887
|
+
// console.time(callTag + '.changesSince')
|
1888
|
+
const result = await this.database.changesSince(this.dbHead); // {key, value, del}
|
1889
|
+
// console.timeEnd(callTag + '.changesSince')
|
1890
|
+
// console.log('result.rows.length', result.rows.length)
|
1891
|
+
|
1892
|
+
// console.time(callTag + '.doTransactionupdateIndex')
|
1893
|
+
// console.log('updateIndex changes length', result.rows.length)
|
1894
|
+
|
1895
|
+
if (result.rows.length === 0) {
|
1896
|
+
// console.log('updateIndex < no changes', result.clock)
|
1897
|
+
this.dbHead = result.clock;
|
1898
|
+
return
|
1899
|
+
}
|
1900
|
+
await doTransaction('updateIndex', inBlocks, async (blocks) => {
|
1901
|
+
let oldIndexEntries = [];
|
1902
|
+
let removeByIdIndexEntries = [];
|
1903
|
+
await loadIndex(blocks, this.indexById, idIndexOpts);
|
1904
|
+
await loadIndex(blocks, this.indexByKey, dbIndexOpts);
|
1905
|
+
if (this.dbHead) {
|
1906
|
+
const oldChangeEntries = await this.indexById.root.getMany(result.rows.map(({ key }) => key));
|
1907
|
+
oldIndexEntries = oldChangeEntries.result.map((key) => ({ key, del: true }));
|
1908
|
+
removeByIdIndexEntries = oldIndexEntries.map(({ key }) => ({ key: key[1], del: true }));
|
1909
|
+
}
|
1910
|
+
if (!this.mapFn) {
|
1911
|
+
throw new Error('No live map function installed for index, cannot update. Make sure your index definition runs before any queries.' + (this.mapFnString ? ' Your code should match the stored map function source:\n' + this.mapFnString : ''))
|
1912
|
+
}
|
1913
|
+
const indexEntries = indexEntriesForChanges(result.rows, this.mapFn);
|
1914
|
+
const byIdIndexEntries = indexEntries.map(({ key }) => ({ key: key[1], value: key }));
|
1915
|
+
this.indexById = await bulkIndex(blocks, this.indexById, removeByIdIndexEntries.concat(byIdIndexEntries), idIndexOpts);
|
1916
|
+
this.indexByKey = await bulkIndex(blocks, this.indexByKey, oldIndexEntries.concat(indexEntries), dbIndexOpts);
|
1917
|
+
this.dbHead = result.clock;
|
1918
|
+
});
|
1919
|
+
this.database.notifyExternal('dbIndex');
|
1920
|
+
// console.timeEnd(callTag + '.doTransactionupdateIndex')
|
1921
|
+
// console.log(`updateIndex ${callTag} <`, this.instanceId, this.dbHead?.toString(), this.indexByKey.cid?.toString(), this.indexById.cid?.toString())
|
1922
|
+
}
|
1923
|
+
}
|
1924
|
+
|
1925
|
+
/**
|
1926
|
+
* Update the DbIndex with the given entries
|
1927
|
+
* @param {import('./blockstore.js').Blockstore} blocks
|
1928
|
+
* @param {{root, cid}} inIndex
|
1929
|
+
* @param {DbIndexEntry[]} indexEntries
|
1930
|
+
* @private
|
1931
|
+
*/
|
1932
|
+
async function bulkIndex (blocks, inIndex, indexEntries, opts) {
|
1933
|
+
if (!indexEntries.length) return inIndex
|
1934
|
+
const putBlock = blocks.put.bind(blocks);
|
1935
|
+
const { getBlock } = makeGetBlock(blocks);
|
1936
|
+
let returnRootBlock;
|
1937
|
+
let returnNode;
|
1938
|
+
if (!inIndex.root) {
|
1939
|
+
const cid = inIndex.cid;
|
1940
|
+
if (!cid) {
|
1941
|
+
for await (const node of await dbIndex.create({ get: getBlock, list: indexEntries, ...opts })) {
|
1942
|
+
const block = await node.block;
|
1943
|
+
await putBlock(block.cid, block.bytes);
|
1944
|
+
returnRootBlock = block;
|
1945
|
+
returnNode = node;
|
1946
|
+
}
|
1947
|
+
return { root: returnNode, cid: returnRootBlock.cid }
|
1948
|
+
}
|
1949
|
+
inIndex.root = await dbIndex.load({ cid, get: getBlock, ...dbIndexOpts });
|
1950
|
+
}
|
1951
|
+
const { root, blocks: newBlocks } = await inIndex.root.bulk(indexEntries);
|
1952
|
+
returnRootBlock = await root.block;
|
1953
|
+
returnNode = root;
|
1954
|
+
for await (const block of newBlocks) {
|
1955
|
+
await putBlock(block.cid, block.bytes);
|
1956
|
+
}
|
1957
|
+
await putBlock(returnRootBlock.cid, returnRootBlock.bytes);
|
1958
|
+
return { root: returnNode, cid: returnRootBlock.cid }
|
1959
|
+
}
|
1960
|
+
|
1961
|
+
async function loadIndex (blocks, index, indexOpts) {
|
1962
|
+
if (!index.root) {
|
1963
|
+
const cid = index.cid;
|
1964
|
+
if (!cid) return
|
1965
|
+
const { getBlock } = makeGetBlock(blocks);
|
1966
|
+
index.root = await dbIndex.load({ cid, get: getBlock, ...indexOpts });
|
1967
|
+
}
|
1968
|
+
return index.root
|
1969
|
+
}
|
1970
|
+
|
1971
|
+
async function applyLimit (results, limit) {
|
1972
|
+
results.result = results.result.slice(0, limit);
|
1973
|
+
return results
|
1974
|
+
}
|
1975
|
+
|
1976
|
+
async function doIndexQuery (blocks, indexByKey, query = {}) {
|
1977
|
+
await loadIndex(blocks, indexByKey, dbIndexOpts);
|
1978
|
+
if (!indexByKey.root) return { result: [] }
|
1979
|
+
if (query.range) {
|
1980
|
+
const encodedRange = query.range.map((key) => charwise.encode(key));
|
1981
|
+
return applyLimit(await indexByKey.root.range(...encodedRange), query.limit)
|
1982
|
+
} else if (query.key) {
|
1983
|
+
const encodedKey = charwise.encode(query.key);
|
1984
|
+
return indexByKey.root.get(encodedKey)
|
1985
|
+
} else {
|
1986
|
+
const { result, ...all } = await indexByKey.root.getAllEntries();
|
1987
|
+
return applyLimit({ result: result.map(({ key: [k, id], value }) => ({ key: k, id, row: value })), ...all }, query.limit)
|
1988
|
+
}
|
1989
|
+
}
|
1990
|
+
|
1991
|
+
// @ts-nocheck
|
1992
|
+
/**
|
1993
|
+
* A Fireproof database Listener allows you to react to events in the database.
|
1994
|
+
*
|
1995
|
+
* @class Listener
|
1996
|
+
* @classdesc An listener attaches to a Fireproof database and runs a routing function on each change, sending the results to subscribers.
|
1997
|
+
*
|
1998
|
+
* @param {Fireproof} database - The Fireproof database instance to index.
|
1999
|
+
* @param {Function} routingFn - The routing function to apply to each entry in the database.
|
2000
|
+
*/
|
2001
|
+
// import { ChangeEvent } from './db-index'
|
2002
|
+
|
2003
|
+
class Listener {
|
2004
|
+
subcribers = new Map()
|
2005
|
+
doStopListening = null
|
2006
|
+
|
2007
|
+
constructor (database, routingFn) {
|
2008
|
+
/** routingFn
|
2009
|
+
* The database instance to index.
|
2010
|
+
* @type {Fireproof}
|
2011
|
+
*/
|
2012
|
+
this.database = database;
|
2013
|
+
this.doStopListening = database.registerListener(changes => this.onChanges(changes));
|
2014
|
+
/**
|
2015
|
+
* The map function to apply to each entry in the database.
|
2016
|
+
* @type {Function}
|
2017
|
+
*/
|
2018
|
+
this.routingFn =
|
2019
|
+
routingFn ||
|
2020
|
+
function (_, emit) {
|
2021
|
+
emit('*');
|
2022
|
+
};
|
2023
|
+
this.dbHead = null;
|
2024
|
+
}
|
2025
|
+
|
2026
|
+
/**
|
2027
|
+
* Subscribe to a topic emitted by the event function.
|
2028
|
+
* @param {string} topic - The topic to subscribe to.
|
2029
|
+
* @param {Function} subscriber - The function to call when the topic is emitted.
|
2030
|
+
* @returns {Function} A function to unsubscribe from the topic.
|
2031
|
+
* @memberof Listener
|
2032
|
+
* @instance
|
2033
|
+
*/
|
2034
|
+
on (topic, subscriber, since) {
|
2035
|
+
const listOfTopicSubscribers = getTopicList(this.subcribers, topic);
|
2036
|
+
listOfTopicSubscribers.push(subscriber);
|
2037
|
+
if (typeof since !== 'undefined') {
|
2038
|
+
this.database.changesSince(since).then(({ rows: changes }) => {
|
2039
|
+
const keys = topicsForChanges(changes, this.routingFn).get(topic);
|
2040
|
+
if (keys) keys.forEach(key => subscriber(key));
|
2041
|
+
});
|
2042
|
+
}
|
2043
|
+
return () => {
|
2044
|
+
const index = listOfTopicSubscribers.indexOf(subscriber);
|
2045
|
+
if (index > -1) listOfTopicSubscribers.splice(index, 1);
|
2046
|
+
}
|
2047
|
+
}
|
2048
|
+
|
2049
|
+
onChanges (changes) {
|
2050
|
+
if (Array.isArray(changes)) {
|
2051
|
+
const seenTopics = topicsForChanges(changes, this.routingFn);
|
2052
|
+
for (const [topic, keys] of seenTopics) {
|
2053
|
+
const listOfTopicSubscribers = getTopicList(this.subcribers, topic);
|
2054
|
+
listOfTopicSubscribers.forEach(subscriber => keys.forEach(key => subscriber(key)));
|
2055
|
+
}
|
2056
|
+
} else {
|
2057
|
+
// non-arrays go to all subscribers
|
2058
|
+
for (const [, listOfTopicSubscribers] of this.subcribers) {
|
2059
|
+
listOfTopicSubscribers.forEach(subscriber => subscriber(changes));
|
2060
|
+
}
|
2061
|
+
}
|
2062
|
+
}
|
2063
|
+
}
|
2064
|
+
|
2065
|
+
function getTopicList (subscribersMap, name) {
|
2066
|
+
let topicList = subscribersMap.get(name);
|
2067
|
+
if (!topicList) {
|
2068
|
+
topicList = [];
|
2069
|
+
subscribersMap.set(name, topicList);
|
2070
|
+
}
|
2071
|
+
return topicList
|
2072
|
+
}
|
2073
|
+
|
2074
|
+
/**
|
2075
|
+
* Transforms a set of changes to events using an emitter function.
|
2076
|
+
*
|
2077
|
+
* @param {ChangeEvent[]} changes
|
2078
|
+
* @param {Function} routingFn
|
2079
|
+
* @returns {Array<string>} The topics emmitted by the event function.
|
2080
|
+
* @private
|
2081
|
+
*/
|
2082
|
+
const topicsForChanges = (changes, routingFn) => {
|
2083
|
+
const seenTopics = new Map();
|
2084
|
+
changes.forEach(({ key, value, del }) => {
|
2085
|
+
if (del || !value) value = { _deleted: true };
|
2086
|
+
routingFn(({ _id: key, ...value }), t => {
|
2087
|
+
const topicList = getTopicList(seenTopics, t);
|
2088
|
+
topicList.push(key);
|
2089
|
+
});
|
2090
|
+
});
|
2091
|
+
return seenTopics
|
2092
|
+
};
|
2093
|
+
|
2094
|
+
const parseCID = cid => typeof cid === 'string' ? multiformats.CID.parse(cid) : cid;
|
2095
|
+
|
2096
|
+
class Hydrator {
|
2097
|
+
static fromJSON (json, database) {
|
2098
|
+
database.hydrate({ clock: json.clock.map(c => parseCID(c)), name: json.name, key: json.key });
|
2099
|
+
if (json.indexes) {
|
2100
|
+
for (const { name, code, clock: { byId, byKey, db } } of json.indexes) {
|
2101
|
+
DbIndex.fromJSON(database, {
|
2102
|
+
clock: {
|
2103
|
+
byId: byId ? parseCID(byId) : null,
|
2104
|
+
byKey: byKey ? parseCID(byKey) : null,
|
2105
|
+
db: db ? db.map(c => parseCID(c)) : null
|
2106
|
+
},
|
2107
|
+
code,
|
2108
|
+
name
|
2109
|
+
});
|
2110
|
+
}
|
2111
|
+
}
|
2112
|
+
return database
|
2113
|
+
}
|
2114
|
+
|
2115
|
+
static snapshot (database, clock) {
|
2116
|
+
const definition = database.toJSON();
|
2117
|
+
const withBlocks = new Fireproof(database.blocks);
|
2118
|
+
if (clock) {
|
2119
|
+
definition.clock = clock.map(c => parseCID(c));
|
2120
|
+
definition.indexes.forEach(index => {
|
2121
|
+
index.clock.byId = null;
|
2122
|
+
index.clock.byKey = null;
|
2123
|
+
index.clock.db = null;
|
2124
|
+
});
|
2125
|
+
}
|
2126
|
+
const snappedDb = this.fromJSON(definition, withBlocks)
|
2127
|
+
;([...database.indexes.values()]).forEach(index => {
|
2128
|
+
snappedDb.indexes.get(index.mapFnString).mapFn = index.mapFn;
|
2129
|
+
});
|
2130
|
+
return snappedDb
|
2131
|
+
}
|
2132
|
+
|
2133
|
+
static async zoom (database, clock) {
|
2134
|
+
([...database.indexes.values()]).forEach(index => {
|
2135
|
+
index.indexById = { root: null, cid: null };
|
2136
|
+
index.indexByKey = { root: null, cid: null };
|
2137
|
+
index.dbHead = null;
|
2138
|
+
});
|
2139
|
+
database.clock = clock.map(c => parseCID(c));
|
2140
|
+
await database.notifyReset(); // hmm... indexes should listen to this? might be more complex than worth it. so far this is the only caller
|
2141
|
+
return database
|
2142
|
+
}
|
2143
|
+
}
|
2144
|
+
|
2145
|
+
exports.Fireproof = Fireproof;
|
2146
|
+
exports.Hydrator = Hydrator;
|
2147
|
+
exports.Index = DbIndex;
|
2148
|
+
exports.Listener = Listener;
|
2149
|
+
|
2150
|
+
return exports;
|
2151
|
+
|
2152
|
+
})({}, crypto, Block, sha2, dagcbor, utils, map, cache, link, multiformats, car, cid, CBW, raw, idb, cargoQueue, codec, cidSet, buffer, charwise, dbIndex);
|