@fireproof/core 0.3.12 → 0.3.14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,1637 @@
1
+ 'use strict';
2
+
3
+ var crypto = require('crypto');
4
+ var Block = require('multiformats/block');
5
+ var sha2 = require('multiformats/hashes/sha2');
6
+ var dagcbor = require('@ipld/dag-cbor');
7
+ var utils = require('prolly-trees/utils');
8
+ var map = require('prolly-trees/map');
9
+ var cache = require('prolly-trees/cache');
10
+ var link = require('multiformats/link');
11
+ var multiformats = require('multiformats');
12
+ var car = require('@ipld/car');
13
+ var cid = require('multiformats/cid');
14
+ var CBW = require('@ipld/car/buffer-writer');
15
+ var raw = require('multiformats/codecs/raw');
16
+ var idb = require('idb');
17
+ var cargoQueue = require('async/cargoQueue.js');
18
+ var codec = require('encrypted-block');
19
+ var cidSet = require('prolly-trees/cid-set');
20
+ var buffer = require('buffer');
21
+ var charwise = require('charwise');
22
+ var dbIndex = require('prolly-trees/db-index');
23
+
24
+ function _interopNamespaceDefault(e) {
25
+ var n = Object.create(null);
26
+ if (e) {
27
+ Object.keys(e).forEach(function (k) {
28
+ if (k !== 'default') {
29
+ var d = Object.getOwnPropertyDescriptor(e, k);
30
+ Object.defineProperty(n, k, d.get ? d : {
31
+ enumerable: true,
32
+ get: function () { return e[k]; }
33
+ });
34
+ }
35
+ });
36
+ }
37
+ n.default = e;
38
+ return Object.freeze(n);
39
+ }
40
+
41
+ var Block__namespace = /*#__PURE__*/_interopNamespaceDefault(Block);
42
+ var dagcbor__namespace = /*#__PURE__*/_interopNamespaceDefault(dagcbor);
43
+ var CBW__namespace = /*#__PURE__*/_interopNamespaceDefault(CBW);
44
+ var raw__namespace = /*#__PURE__*/_interopNamespaceDefault(raw);
45
+ var codec__namespace = /*#__PURE__*/_interopNamespaceDefault(codec);
46
+
47
+ async function advance(blocks, head, event) {
48
+ const events = new EventFetcher(blocks);
49
+ const headmap = new Map(head.map((cid) => [cid.toString(), cid]));
50
+ if (headmap.has(event.toString()))
51
+ return { head, cids: await events.all() };
52
+ let changed = false;
53
+ for (const cid of head) {
54
+ if (await contains(events, event, cid)) {
55
+ headmap.delete(cid.toString());
56
+ headmap.set(event.toString(), event);
57
+ changed = true;
58
+ }
59
+ }
60
+ if (changed) {
61
+ return { head: [...headmap.values()], cids: await events.all() };
62
+ }
63
+ for (const p of head) {
64
+ if (await contains(events, p, event)) {
65
+ return { head, cids: await events.all() };
66
+ }
67
+ }
68
+ return { head: head.concat(event), cids: await events.all() };
69
+ }
70
+ class EventBlock extends Block.Block {
71
+ /**
72
+ * @param {object} config
73
+ * @param {EventLink<T>} config.cid
74
+ * @param {Event} config.value
75
+ * @param {Uint8Array} config.bytes
76
+ */
77
+ constructor({ cid, value, bytes }) {
78
+ super({ cid, value, bytes });
79
+ }
80
+ /**
81
+ * @template T
82
+ * @param {T} data
83
+ * @param {EventLink<T>[]} [parents]
84
+ */
85
+ static create(data, parents) {
86
+ return encodeEventBlock({ data, parents: parents ?? [] });
87
+ }
88
+ }
89
+ class EventFetcher {
90
+ /** @param {import('./blockstore').TransactionBlockstore} blocks */
91
+ constructor(blocks) {
92
+ this._blocks = blocks;
93
+ this._cids = new utils.CIDCounter();
94
+ this._cache = /* @__PURE__ */ new Map();
95
+ }
96
+ /**
97
+ * @param {EventLink<T>} link
98
+ * @returns {Promise<EventBlockView<T>>}
99
+ */
100
+ async get(link) {
101
+ const slink = link.toString();
102
+ if (this._cache.has(slink))
103
+ return this._cache.get(slink);
104
+ const block = await this._blocks.get(link);
105
+ this._cids.add({ address: link });
106
+ if (!block)
107
+ throw new Error(`missing block: ${link}`);
108
+ const got = decodeEventBlock(block.bytes);
109
+ this._cache.set(slink, got);
110
+ return got;
111
+ }
112
+ async all() {
113
+ return this._cids.all();
114
+ }
115
+ }
116
+ async function encodeEventBlock(value) {
117
+ const { cid, bytes } = await Block.encode({ value, codec: dagcbor__namespace, hasher: sha2.sha256 });
118
+ return new Block.Block({ cid, value, bytes });
119
+ }
120
+ async function decodeEventBlock(bytes) {
121
+ const { cid, value } = await Block.decode({ bytes, codec: dagcbor__namespace, hasher: sha2.sha256 });
122
+ return new Block.Block({ cid, value, bytes });
123
+ }
124
+ async function contains(events, a, b) {
125
+ if (a.toString() === b.toString())
126
+ return true;
127
+ const [{ value: aevent }, { value: bevent }] = await Promise.all([events.get(a), events.get(b)]);
128
+ const links = [...aevent.parents];
129
+ while (links.length) {
130
+ const link = links.shift();
131
+ if (!link)
132
+ break;
133
+ if (link.toString() === b.toString())
134
+ return true;
135
+ if (bevent.parents.some((p) => link.toString() === p.toString()))
136
+ continue;
137
+ const { value: event } = await events.get(link);
138
+ links.push(...event.parents);
139
+ }
140
+ return false;
141
+ }
142
+ async function* vis$1(blocks, head, options = {}) {
143
+ const renderNodeLabel = options.renderNodeLabel ?? ((b) => b.value.data.value);
144
+ const events = new EventFetcher(blocks);
145
+ yield "digraph clock {";
146
+ yield ' node [shape=point fontname="Courier"]; head;';
147
+ const hevents = await Promise.all(head.map((link) => events.get(link)));
148
+ const links = [];
149
+ const nodes = /* @__PURE__ */ new Set();
150
+ for (const e of hevents) {
151
+ nodes.add(e.cid.toString());
152
+ yield ` node [shape=oval fontname="Courier"]; ${e.cid} [label="${renderNodeLabel(e)}"];`;
153
+ yield ` head -> ${e.cid};`;
154
+ for (const p of e.value.parents) {
155
+ yield ` ${e.cid} -> ${p};`;
156
+ }
157
+ links.push(...e.value.parents);
158
+ }
159
+ while (links.length) {
160
+ const link = links.shift();
161
+ if (!link)
162
+ break;
163
+ if (nodes.has(link.toString()))
164
+ continue;
165
+ nodes.add(link.toString());
166
+ const block = await events.get(link);
167
+ yield ` node [shape=oval]; ${link} [label="${renderNodeLabel(block)}" fontname="Courier"];`;
168
+ for (const p of block.value.parents) {
169
+ yield ` ${link} -> ${p};`;
170
+ }
171
+ links.push(...block.value.parents);
172
+ }
173
+ yield "}";
174
+ }
175
+ async function findEventsToSync(blocks, head) {
176
+ const events = new EventFetcher(blocks);
177
+ const { ancestor, sorted } = await findCommonAncestorWithSortedEvents(events, head);
178
+ const toSync = await asyncFilter(sorted, async (uks) => !await contains(events, ancestor, uks.cid));
179
+ return { cids: events.all(), events: toSync };
180
+ }
181
+ const asyncFilter = async (arr, predicate) => Promise.all(arr.map(predicate)).then((results) => arr.filter((_v, index) => results[index]));
182
+ async function findCommonAncestorWithSortedEvents(events, children) {
183
+ const ancestor = await findCommonAncestor(events, children);
184
+ if (!ancestor) {
185
+ throw new Error("failed to find common ancestor event");
186
+ }
187
+ const sorted = await findSortedEvents(events, children, ancestor);
188
+ return { ancestor, sorted };
189
+ }
190
+ async function findCommonAncestor(events, children) {
191
+ if (!children.length)
192
+ return;
193
+ const candidates = children.map((c) => [c]);
194
+ while (true) {
195
+ let changed = false;
196
+ for (const c of candidates) {
197
+ const candidate = await findAncestorCandidate(events, c[c.length - 1]);
198
+ if (!candidate)
199
+ continue;
200
+ changed = true;
201
+ c.push(candidate);
202
+ const ancestor = findCommonString(candidates);
203
+ if (ancestor)
204
+ return ancestor;
205
+ }
206
+ if (!changed)
207
+ return;
208
+ }
209
+ }
210
+ async function findAncestorCandidate(events, root) {
211
+ const { value: event } = await events.get(root);
212
+ if (!event.parents.length)
213
+ return root;
214
+ return event.parents.length === 1 ? event.parents[0] : findCommonAncestor(events, event.parents);
215
+ }
216
+ function findCommonString(arrays) {
217
+ arrays = arrays.map((a) => [...a]);
218
+ for (const arr of arrays) {
219
+ for (const item of arr) {
220
+ let matched = true;
221
+ for (const other of arrays) {
222
+ if (arr === other)
223
+ continue;
224
+ matched = other.some((i) => String(i) === String(item));
225
+ if (!matched)
226
+ break;
227
+ }
228
+ if (matched)
229
+ return item;
230
+ }
231
+ }
232
+ }
233
+ async function findSortedEvents(events, head, tail) {
234
+ const weights = /* @__PURE__ */ new Map();
235
+ const all = await Promise.all(head.map((h) => findEvents(events, h, tail)));
236
+ for (const arr of all) {
237
+ for (const { event, depth } of arr) {
238
+ const info = weights.get(event.cid.toString());
239
+ if (info) {
240
+ info.weight += depth;
241
+ } else {
242
+ weights.set(event.cid.toString(), { event, weight: depth });
243
+ }
244
+ }
245
+ }
246
+ const buckets = /* @__PURE__ */ new Map();
247
+ for (const { event, weight } of weights.values()) {
248
+ const bucket = buckets.get(weight);
249
+ if (bucket) {
250
+ bucket.push(event);
251
+ } else {
252
+ buckets.set(weight, [event]);
253
+ }
254
+ }
255
+ const sorted = Array.from(buckets).sort((a, b) => b[0] - a[0]).flatMap(([, es]) => es.sort((a, b) => String(a.cid) < String(b.cid) ? -1 : 1));
256
+ return sorted;
257
+ }
258
+ async function findEvents(events, start, end, depth = 0) {
259
+ const event = await events.get(start);
260
+ const acc = [{ event, depth }];
261
+ const { parents } = event.value;
262
+ if (parents.length === 1 && String(parents[0]) === String(end))
263
+ return acc;
264
+ const rest = await Promise.all(parents.map((p) => findEvents(events, p, end, depth + 1)));
265
+ return acc.concat(...rest);
266
+ }
267
+
268
+ const createBlock = (bytes, cid) => Block.create({ cid, bytes, hasher: sha2.sha256, codec: codec__namespace });
269
+ const encrypt = async function* ({ get, cids, hasher: hasher2, key, cache, chunker, root }) {
270
+ const set = /* @__PURE__ */ new Set();
271
+ let eroot;
272
+ for (const string of cids) {
273
+ const cid = multiformats.CID.parse(string);
274
+ const unencrypted = await get(cid);
275
+ const block2 = await Block.encode({ ...await codec__namespace.encrypt({ ...unencrypted, key }), codec: codec__namespace, hasher: hasher2 });
276
+ yield block2;
277
+ set.add(block2.cid.toString());
278
+ if (unencrypted.cid.equals(root))
279
+ eroot = block2.cid;
280
+ }
281
+ if (!eroot)
282
+ throw new Error("cids does not include root");
283
+ const list = [...set].map((s) => multiformats.CID.parse(s));
284
+ let last;
285
+ for await (const node of cidSet.create({ list, get, cache, chunker, hasher: hasher2, codec: dagcbor__namespace })) {
286
+ const block2 = await node.block;
287
+ yield block2;
288
+ last = block2;
289
+ }
290
+ const head = [eroot, last.cid];
291
+ const block = await Block.encode({ value: head, codec: dagcbor__namespace, hasher: hasher2 });
292
+ yield block;
293
+ };
294
+ const decrypt = async function* ({ root, get, key, cache, chunker, hasher: hasher2 }) {
295
+ const o = { ...await get(root), codec: dagcbor__namespace, hasher: hasher2 };
296
+ const decodedRoot = await Block.decode(o);
297
+ const { value: [eroot, tree] } = decodedRoot;
298
+ const rootBlock = await get(eroot);
299
+ const cidset = await cidSet.load({ cid: tree, get, cache, chunker, codec: codec__namespace, hasher: hasher2 });
300
+ const { result: nodes } = await cidset.getAllEntries();
301
+ const unwrap = async (eblock) => {
302
+ const { bytes, cid } = await codec__namespace.decrypt({ ...eblock, key }).catch((e) => {
303
+ console.log("ekey", e);
304
+ throw new Error("bad key: " + key.toString("hex"));
305
+ });
306
+ const block = await createBlock(bytes, cid);
307
+ return block;
308
+ };
309
+ const promises = [];
310
+ for (const { cid } of nodes) {
311
+ if (!rootBlock.cid.equals(cid))
312
+ promises.push(get(cid).then(unwrap));
313
+ }
314
+ yield* promises;
315
+ yield unwrap(rootBlock);
316
+ };
317
+
318
+ function rawSha1(b) {
319
+ let i = b.byteLength;
320
+ let bs = 0;
321
+ let A;
322
+ let B;
323
+ let C;
324
+ let D;
325
+ let G;
326
+ const H = Uint32Array.from([A = 1732584193, B = 4023233417, ~A, ~B, 3285377520]);
327
+ const W = new Uint32Array(80);
328
+ const nrWords = i / 4 + 2 | 15;
329
+ const words = new Uint32Array(nrWords + 1);
330
+ let j;
331
+ words[nrWords] = i * 8;
332
+ words[i >> 2] |= 128 << (~i << 3);
333
+ for (; i--; ) {
334
+ words[i >> 2] |= b[i] << (~i << 3);
335
+ }
336
+ for (A = H.slice(); bs < nrWords; bs += 16, A.set(H)) {
337
+ for (i = 0; i < 80; A[0] = (G = ((b = A[0]) << 5 | b >>> 27) + A[4] + (W[i] = i < 16 ? words[bs + i] : G << 1 | G >>> 31) + 1518500249, B = A[1], C = A[2], D = A[3], G + ((j = i / 5 >> 2) ? j !== 2 ? (B ^ C ^ D) + (j & 2 ? 1876969533 : 341275144) : (B & C | B & D | C & D) + 882459459 : B & C | ~B & D)), A[1] = b, A[2] = B << 30 | B >>> 2, A[3] = C, A[4] = D, ++i) {
338
+ G = W[i - 3] ^ W[i - 8] ^ W[i - 14] ^ W[i - 16];
339
+ }
340
+ for (i = 5; i; )
341
+ H[--i] = H[i] + A[i];
342
+ }
343
+ return new Uint8Array(H.buffer, H.byteOffset, H.byteLength);
344
+ }
345
+
346
+ const chunker = utils.bf(3);
347
+ const NO_ENCRYPT = process.env?.NO_ENCRYPT;
348
+ class Valet {
349
+ idb = null;
350
+ name = null;
351
+ uploadQueue = null;
352
+ alreadyEnqueued = /* @__PURE__ */ new Set();
353
+ keyMaterial = null;
354
+ keyId = "null";
355
+ /**
356
+ * Function installed by the database to upload car files
357
+ * @type {null|function(string, Uint8Array):Promise<void>}
358
+ */
359
+ uploadFunction = null;
360
+ constructor(name = "default", keyMaterial) {
361
+ this.name = name;
362
+ this.setKeyMaterial(keyMaterial);
363
+ this.uploadQueue = cargoQueue(async (tasks, callback) => {
364
+ console.log(
365
+ "queue worker",
366
+ tasks.length,
367
+ tasks.reduce((acc, t) => acc + t.value.length, 0)
368
+ );
369
+ if (this.uploadFunction) {
370
+ return await this.withDB(async (db) => {
371
+ for (const task of tasks) {
372
+ await this.uploadFunction(task.carCid, task.value);
373
+ const carMeta = await db.get("cidToCar", task.carCid);
374
+ delete carMeta.pending;
375
+ await db.put("cidToCar", carMeta);
376
+ }
377
+ });
378
+ }
379
+ callback();
380
+ });
381
+ this.uploadQueue.drain(async () => {
382
+ return await this.withDB(async (db) => {
383
+ const carKeys = (await db.getAllFromIndex("cidToCar", "pending")).map((c) => c.car);
384
+ for (const carKey of carKeys) {
385
+ await this.uploadFunction(carKey, await db.get("cars", carKey));
386
+ const carMeta = await db.get("cidToCar", carKey);
387
+ delete carMeta.pending;
388
+ await db.put("cidToCar", carMeta);
389
+ }
390
+ });
391
+ });
392
+ }
393
+ getKeyMaterial() {
394
+ return this.keyMaterial;
395
+ }
396
+ setKeyMaterial(km) {
397
+ if (km && !NO_ENCRYPT) {
398
+ const hex = Uint8Array.from(buffer.Buffer.from(km, "hex"));
399
+ this.keyMaterial = km;
400
+ const hash = rawSha1(hex);
401
+ this.keyId = buffer.Buffer.from(hash).toString("hex");
402
+ } else {
403
+ this.keyMaterial = null;
404
+ this.keyId = "null";
405
+ }
406
+ }
407
+ /**
408
+ * Group the blocks into a car and write it to the valet.
409
+ * @param {import('./blockstore.js').InnerBlockstore} innerBlockstore
410
+ * @param {Set<string>} cids
411
+ * @returns {Promise<void>}
412
+ * @memberof Valet
413
+ */
414
+ async writeTransaction(innerBlockstore, cids) {
415
+ if (innerBlockstore.lastCid) {
416
+ if (this.keyMaterial) {
417
+ const newCar = await blocksToEncryptedCarBlock(innerBlockstore.lastCid, innerBlockstore, this.keyMaterial);
418
+ await this.parkCar(newCar.cid.toString(), newCar.bytes, cids);
419
+ } else {
420
+ const newCar = await blocksToCarBlock(innerBlockstore.lastCid, innerBlockstore);
421
+ await this.parkCar(newCar.cid.toString(), newCar.bytes, cids);
422
+ }
423
+ }
424
+ }
425
+ withDB = async (dbWorkFun) => {
426
+ if (!this.idb) {
427
+ this.idb = await idb.openDB(`fp.${this.keyId}.${this.name}.valet`, 2, {
428
+ upgrade(db, oldVersion, newVersion, transaction) {
429
+ if (oldVersion < 1) {
430
+ db.createObjectStore("cars");
431
+ const cidToCar = db.createObjectStore("cidToCar", { keyPath: "car" });
432
+ cidToCar.createIndex("cids", "cids", { multiEntry: true });
433
+ }
434
+ if (oldVersion < 2) {
435
+ const cidToCar = transaction.objectStore("cidToCar");
436
+ cidToCar.createIndex("pending", "pending");
437
+ }
438
+ }
439
+ });
440
+ }
441
+ return await dbWorkFun(this.idb);
442
+ };
443
+ /**
444
+ *
445
+ * @param {string} carCid
446
+ * @param {*} value
447
+ */
448
+ async parkCar(carCid, value, cids) {
449
+ await this.withDB(async (db) => {
450
+ const tx = db.transaction(["cars", "cidToCar"], "readwrite");
451
+ await tx.objectStore("cars").put(value, carCid);
452
+ await tx.objectStore("cidToCar").put({ pending: "y", car: carCid, cids: Array.from(cids) });
453
+ return await tx.done;
454
+ });
455
+ if (this.uploadFunction) {
456
+ if (this.alreadyEnqueued.has(carCid)) {
457
+ return;
458
+ }
459
+ this.uploadQueue.push({ carCid, value });
460
+ this.alreadyEnqueued.add(carCid);
461
+ }
462
+ }
463
+ remoteBlockFunction = null;
464
+ async getBlock(dataCID) {
465
+ return await this.withDB(async (db) => {
466
+ const tx = db.transaction(["cars", "cidToCar"], "readonly");
467
+ const indexResp = await tx.objectStore("cidToCar").index("cids").get(dataCID);
468
+ const carCid = indexResp?.car;
469
+ if (!carCid) {
470
+ throw new Error("Missing block: " + dataCID);
471
+ }
472
+ const carBytes = await tx.objectStore("cars").get(carCid);
473
+ const reader = await car.CarReader.fromBytes(carBytes);
474
+ if (this.keyMaterial) {
475
+ const roots = await reader.getRoots();
476
+ const readerGetWithCodec = async (cid) => {
477
+ const got = await reader.get(cid);
478
+ let useCodec = codec__namespace;
479
+ if (cid.toString().indexOf("bafy") === 0) {
480
+ useCodec = dagcbor__namespace;
481
+ }
482
+ const decoded = await Block__namespace.decode({
483
+ ...got,
484
+ codec: useCodec,
485
+ hasher: sha2.sha256
486
+ });
487
+ return decoded;
488
+ };
489
+ const { blocks } = await blocksFromEncryptedCarBlock(roots[0], readerGetWithCodec, this.keyMaterial);
490
+ const block = blocks.find((b) => b.cid.toString() === dataCID);
491
+ if (block) {
492
+ return block.bytes;
493
+ }
494
+ } else {
495
+ const gotBlock = await reader.get(cid.CID.parse(dataCID));
496
+ if (gotBlock) {
497
+ return gotBlock.bytes;
498
+ }
499
+ }
500
+ });
501
+ }
502
+ }
503
+ const blocksToCarBlock = async (lastCid, blocks) => {
504
+ let size = 0;
505
+ const headerSize = CBW__namespace.headerLength({ roots: [lastCid] });
506
+ size += headerSize;
507
+ if (!Array.isArray(blocks)) {
508
+ blocks = Array.from(blocks.entries());
509
+ }
510
+ for (const { cid, bytes } of blocks) {
511
+ size += CBW__namespace.blockLength({ cid, bytes });
512
+ }
513
+ const buffer = new Uint8Array(size);
514
+ const writer = await CBW__namespace.createWriter(buffer, { headerSize });
515
+ writer.addRoot(lastCid);
516
+ for (const { cid, bytes } of blocks) {
517
+ writer.write({ cid, bytes });
518
+ }
519
+ await writer.close();
520
+ return await Block__namespace.encode({ value: writer.bytes, hasher: sha2.sha256, codec: raw__namespace });
521
+ };
522
+ const blocksToEncryptedCarBlock = async (innerBlockStoreClockRootCid, blocks, keyMaterial) => {
523
+ const encryptionKey = buffer.Buffer.from(keyMaterial, "hex");
524
+ const encryptedBlocks = [];
525
+ const theCids = [];
526
+ for (const { cid } of blocks.entries()) {
527
+ theCids.push(cid.toString());
528
+ }
529
+ let last;
530
+ for await (const block of encrypt({
531
+ cids: theCids,
532
+ get: async (cid) => blocks.get(cid),
533
+ // maybe we can just use blocks.get
534
+ key: encryptionKey,
535
+ hasher: sha2.sha256,
536
+ chunker,
537
+ cache: cache.nocache,
538
+ // codec: dagcbor, // should be crypto?
539
+ root: innerBlockStoreClockRootCid
540
+ })) {
541
+ encryptedBlocks.push(block);
542
+ last = block;
543
+ }
544
+ const encryptedCar = await blocksToCarBlock(last.cid, encryptedBlocks);
545
+ return encryptedCar;
546
+ };
547
+ const memoizeDecryptedCarBlocks = /* @__PURE__ */ new Map();
548
+ const blocksFromEncryptedCarBlock = async (cid, get, keyMaterial) => {
549
+ if (memoizeDecryptedCarBlocks.has(cid.toString())) {
550
+ return memoizeDecryptedCarBlocks.get(cid.toString());
551
+ } else {
552
+ const blocksPromise = (async () => {
553
+ const decryptionKey = buffer.Buffer.from(keyMaterial, "hex");
554
+ const cids = /* @__PURE__ */ new Set();
555
+ const decryptedBlocks = [];
556
+ for await (const block of decrypt({
557
+ root: cid,
558
+ get,
559
+ key: decryptionKey,
560
+ chunker,
561
+ hasher: sha2.sha256,
562
+ cache: cache.nocache
563
+ // codec: dagcbor
564
+ })) {
565
+ decryptedBlocks.push(block);
566
+ cids.add(block.cid.toString());
567
+ }
568
+ return { blocks: decryptedBlocks, cids };
569
+ })();
570
+ memoizeDecryptedCarBlocks.set(cid.toString(), blocksPromise);
571
+ return blocksPromise;
572
+ }
573
+ };
574
+
575
+ const husherMap = /* @__PURE__ */ new Map();
576
+ const husher = (id, workFn) => {
577
+ if (!husherMap.has(id)) {
578
+ husherMap.set(
579
+ id,
580
+ workFn().finally(() => setTimeout(() => husherMap.delete(id), 100))
581
+ );
582
+ }
583
+ return husherMap.get(id);
584
+ };
585
+ class TransactionBlockstore {
586
+ /** @type {Map<string, Uint8Array>} */
587
+ committedBlocks = /* @__PURE__ */ new Map();
588
+ valet = null;
589
+ instanceId = "blkz." + Math.random().toString(36).substring(2, 4);
590
+ inflightTransactions = /* @__PURE__ */ new Set();
591
+ constructor(name, encryptionKey) {
592
+ this.valet = new Valet(name, encryptionKey);
593
+ }
594
+ /**
595
+ * Get a block from the store.
596
+ *
597
+ * @param {import('./link').AnyLink} cid
598
+ * @returns {Promise<AnyBlock | undefined>}
599
+ */
600
+ async get(cid) {
601
+ const key = cid.toString();
602
+ const bytes = await Promise.any([this.transactionsGet(key), this.committedGet(key)]).catch((e) => {
603
+ return this.networkGet(key);
604
+ });
605
+ if (!bytes)
606
+ throw new Error("Missing block: " + key);
607
+ return { cid, bytes };
608
+ }
609
+ // this iterates over the in-flight transactions
610
+ // and returns the first matching block it finds
611
+ async transactionsGet(key) {
612
+ for (const transaction of this.inflightTransactions) {
613
+ const got = await transaction.get(key);
614
+ if (got && got.bytes)
615
+ return got.bytes;
616
+ }
617
+ throw new Error("Missing block: " + key);
618
+ }
619
+ async committedGet(key) {
620
+ const old = this.committedBlocks.get(key);
621
+ if (old)
622
+ return old;
623
+ const got = await this.valet.getBlock(key);
624
+ this.committedBlocks.set(key, got);
625
+ return got;
626
+ }
627
+ async clearCommittedCache() {
628
+ this.committedBlocks.clear();
629
+ }
630
+ async networkGet(key) {
631
+ if (this.valet.remoteBlockFunction) {
632
+ const value = await husher(key, async () => await this.valet.remoteBlockFunction(key));
633
+ if (value) {
634
+ doTransaction("networkGot: " + key, this, async (innerBlockstore) => {
635
+ await innerBlockstore.put(multiformats.CID.parse(key), value);
636
+ });
637
+ return value;
638
+ }
639
+ } else {
640
+ return false;
641
+ }
642
+ }
643
+ /**
644
+ * Add a block to the store. Usually bound to a transaction by a closure.
645
+ * It sets the lastCid property to the CID of the block that was put.
646
+ * This is used by the transaction as the head of the car when written to the valet.
647
+ * We don't have to worry about which transaction we are when we are here because
648
+ * we are the transactionBlockstore.
649
+ *
650
+ * @param {import('./link').AnyLink} cid
651
+ * @param {Uint8Array} bytes
652
+ */
653
+ put(cid, bytes) {
654
+ throw new Error("use a transaction to put");
655
+ }
656
+ /**
657
+ * Iterate over all blocks in the store.
658
+ *
659
+ * @yields {AnyBlock}
660
+ * @returns {AsyncGenerator<AnyBlock>}
661
+ */
662
+ // * entries () {
663
+ // // needs transaction blocks?
664
+ // // for (const [str, bytes] of this.blocks) {
665
+ // // yield { cid: parse(str), bytes }
666
+ // // }
667
+ // for (const [str, bytes] of this.committedBlocks) {
668
+ // yield { cid: parse(str), bytes }
669
+ // }
670
+ // }
671
+ /**
672
+ * Begin a transaction. Ensures the uncommited blocks are empty at the begining.
673
+ * Returns the blocks to read and write during the transaction.
674
+ * @returns {InnerBlockstore}
675
+ * @memberof TransactionBlockstore
676
+ */
677
+ begin(label = "") {
678
+ const innerTransactionBlockstore = new InnerBlockstore(label, this);
679
+ this.inflightTransactions.add(innerTransactionBlockstore);
680
+ return innerTransactionBlockstore;
681
+ }
682
+ /**
683
+ * Commit the transaction. Writes the blocks to the store.
684
+ * @returns {Promise<void>}
685
+ * @memberof TransactionBlockstore
686
+ */
687
+ async commit(innerBlockstore) {
688
+ await this.doCommit(innerBlockstore);
689
+ }
690
+ // first get the transaction blockstore from the map of transaction blockstores
691
+ // then copy it to committedBlocks
692
+ // then write the transaction blockstore to a car
693
+ // then write the car to the valet
694
+ // then remove the transaction blockstore from the map of transaction blockstores
695
+ doCommit = async (innerBlockstore) => {
696
+ const cids = /* @__PURE__ */ new Set();
697
+ for (const { cid, bytes } of innerBlockstore.entries()) {
698
+ const stringCid = cid.toString();
699
+ if (this.committedBlocks.has(stringCid)) ; else {
700
+ this.committedBlocks.set(stringCid, bytes);
701
+ cids.add(stringCid);
702
+ }
703
+ }
704
+ if (cids.size > 0) {
705
+ await this.valet.writeTransaction(innerBlockstore, cids);
706
+ }
707
+ };
708
+ /**
709
+ * Retire the transaction. Clears the uncommited blocks.
710
+ * @returns {void}
711
+ * @memberof TransactionBlockstore
712
+ */
713
+ retire(innerBlockstore) {
714
+ this.inflightTransactions.delete(innerBlockstore);
715
+ }
716
+ }
717
+ const doTransaction = async (label, blockstore, doFun) => {
718
+ if (!blockstore.commit)
719
+ return await doFun(blockstore);
720
+ const innerBlockstore = blockstore.begin(label);
721
+ try {
722
+ const result = await doFun(innerBlockstore);
723
+ await blockstore.commit(innerBlockstore);
724
+ return result;
725
+ } catch (e) {
726
+ console.error(`Transaction ${label} failed`, e, e.stack);
727
+ throw e;
728
+ } finally {
729
+ blockstore.retire(innerBlockstore);
730
+ }
731
+ };
732
+ class InnerBlockstore {
733
+ /** @type {Map<string, Uint8Array>} */
734
+ blocks = /* @__PURE__ */ new Map();
735
+ lastCid = null;
736
+ label = "";
737
+ parentBlockstore = null;
738
+ constructor(label, parentBlockstore) {
739
+ this.label = label;
740
+ this.parentBlockstore = parentBlockstore;
741
+ }
742
+ /**
743
+ * @param {import('./link').AnyLink} cid
744
+ * @returns {Promise<AnyBlock | undefined>}
745
+ */
746
+ async get(cid) {
747
+ const key = cid.toString();
748
+ let bytes = this.blocks.get(key);
749
+ if (bytes) {
750
+ return { cid, bytes };
751
+ }
752
+ bytes = await this.parentBlockstore.committedGet(key);
753
+ if (bytes) {
754
+ return { cid, bytes };
755
+ }
756
+ }
757
+ /**
758
+ * @param {import('./link').AnyLink} cid
759
+ * @param {Uint8Array} bytes
760
+ */
761
+ async put(cid, bytes) {
762
+ this.blocks.set(cid.toString(), bytes);
763
+ this.lastCid = cid;
764
+ }
765
+ *entries() {
766
+ for (const [str, bytes] of this.blocks) {
767
+ yield { cid: link.parse(str), bytes };
768
+ }
769
+ }
770
+ }
771
+
772
+ const blockOpts = { cache: cache.nocache, chunker: utils.bf(3), codec: dagcbor__namespace, hasher: sha2.sha256, compare: utils.simpleCompare };
773
+ const withLog = async (label, fn) => {
774
+ const resp = await fn();
775
+ return resp;
776
+ };
777
+ const makeGetBlock = (blocks) => {
778
+ const getBlockFn = async (address) => {
779
+ const { cid, bytes } = await withLog(address, () => blocks.get(address));
780
+ return Block.create({ cid, bytes, hasher: sha2.sha256, codec: dagcbor__namespace });
781
+ };
782
+ return {
783
+ // cids,
784
+ getBlock: getBlockFn
785
+ };
786
+ };
787
+ async function createAndSaveNewEvent({
788
+ inBlocks,
789
+ bigPut,
790
+ root: root2,
791
+ event: inEvent,
792
+ head,
793
+ additions,
794
+ removals = []
795
+ }) {
796
+ let cids;
797
+ const { key, value, del } = inEvent;
798
+ const data = {
799
+ root: root2 ? {
800
+ cid: root2.cid,
801
+ bytes: root2.bytes,
802
+ // can we remove this?
803
+ value: root2.value
804
+ // can we remove this?
805
+ } : null,
806
+ key
807
+ };
808
+ if (del) {
809
+ data.value = null;
810
+ data.type = "del";
811
+ } else {
812
+ data.value = value;
813
+ data.type = "put";
814
+ }
815
+ const event = await EventBlock.create(data, head);
816
+ bigPut(event);
817
+ ({ head, cids } = await advance(inBlocks, head, event.cid));
818
+ return {
819
+ root: root2,
820
+ additions,
821
+ removals,
822
+ head,
823
+ clockCIDs: cids,
824
+ event
825
+ };
826
+ }
827
+ const makeGetAndPutBlock = (inBlocks) => {
828
+ const { getBlock, cids } = makeGetBlock(inBlocks);
829
+ const put2 = inBlocks.put.bind(inBlocks);
830
+ const bigPut = async (block, additions) => {
831
+ const { cid, bytes } = block;
832
+ put2(cid, bytes);
833
+ if (additions) {
834
+ additions.set(cid.toString(), block);
835
+ }
836
+ };
837
+ return { getBlock, bigPut, blocks: inBlocks, cids };
838
+ };
839
+ const bulkFromEvents = (sorted, event) => {
840
+ if (event) {
841
+ const update = { value: { data: { key: event.key } } };
842
+ if (event.del) {
843
+ update.value.data.type = "del";
844
+ } else {
845
+ update.value.data.type = "put";
846
+ update.value.data.value = event.value;
847
+ }
848
+ sorted.push(update);
849
+ }
850
+ const bulk = /* @__PURE__ */ new Map();
851
+ for (const { value: event2 } of sorted) {
852
+ const {
853
+ data: { type, value, key }
854
+ } = event2;
855
+ const bulkEvent = type === "put" ? { key, value } : { key, del: true };
856
+ bulk.set(bulkEvent.key, bulkEvent);
857
+ }
858
+ return Array.from(bulk.values());
859
+ };
860
+ const prollyRootFromAncestor = async (events, ancestor, getBlock) => {
861
+ const event = await events.get(ancestor);
862
+ const { root: root2 } = event.value.data;
863
+ if (root2) {
864
+ return map.load({ cid: root2.cid, get: getBlock, ...blockOpts });
865
+ } else {
866
+ return null;
867
+ }
868
+ };
869
+ const doProllyBulk = async (inBlocks, head, event) => {
870
+ const { getBlock, blocks } = makeGetAndPutBlock(inBlocks);
871
+ let bulkSorted = [];
872
+ let prollyRootNode = null;
873
+ if (head.length) {
874
+ const events = new EventFetcher(blocks);
875
+ const { ancestor, sorted } = await findCommonAncestorWithSortedEvents(events, head);
876
+ bulkSorted = sorted;
877
+ prollyRootNode = await prollyRootFromAncestor(events, ancestor, getBlock);
878
+ }
879
+ const bulkOperations = bulkFromEvents(bulkSorted, event);
880
+ if (!prollyRootNode) {
881
+ let root2;
882
+ const newBlocks = [];
883
+ if (bulkOperations.every((op) => op.del)) {
884
+ return { root: null, blocks: [] };
885
+ }
886
+ for await (const node of map.create({ get: getBlock, list: bulkOperations, ...blockOpts })) {
887
+ root2 = await node.block;
888
+ newBlocks.push(root2);
889
+ }
890
+ return { root: root2, blocks: newBlocks };
891
+ } else {
892
+ return await prollyRootNode.bulk(bulkOperations);
893
+ }
894
+ };
895
+ async function put(inBlocks, head, event, options) {
896
+ const { bigPut } = makeGetAndPutBlock(inBlocks);
897
+ if (!head.length) {
898
+ const additions = /* @__PURE__ */ new Map();
899
+ const { root: root2, blocks } = await doProllyBulk(inBlocks, head, event);
900
+ for (const b of blocks) {
901
+ bigPut(b, additions);
902
+ }
903
+ return createAndSaveNewEvent({ inBlocks, bigPut, root: root2, event, head, additions: Array.from(additions.values()) });
904
+ }
905
+ const { root: newProllyRootNode, blocks: newBlocks } = await doProllyBulk(inBlocks, head, event);
906
+ if (!newProllyRootNode) {
907
+ return createAndSaveNewEvent({
908
+ inBlocks,
909
+ bigPut,
910
+ root: null,
911
+ event,
912
+ head,
913
+ additions: []
914
+ });
915
+ } else {
916
+ const prollyRootBlock = await newProllyRootNode.block;
917
+ const additions = /* @__PURE__ */ new Map();
918
+ bigPut(prollyRootBlock, additions);
919
+ for (const nb of newBlocks) {
920
+ bigPut(nb, additions);
921
+ }
922
+ return createAndSaveNewEvent({
923
+ inBlocks,
924
+ bigPut,
925
+ root: prollyRootBlock,
926
+ event,
927
+ head,
928
+ additions: Array.from(additions.values())
929
+ /*, todo? Array.from(removals.values()) */
930
+ });
931
+ }
932
+ }
933
+ async function root(inBlocks, head) {
934
+ if (!head.length) {
935
+ throw new Error("no head");
936
+ }
937
+ const { root: newProllyRootNode, blocks: newBlocks, cids } = await doProllyBulk(inBlocks, head);
938
+ await doTransaction("root", inBlocks, async (transactionBlockstore) => {
939
+ const { bigPut } = makeGetAndPutBlock(transactionBlockstore);
940
+ for (const nb of newBlocks) {
941
+ bigPut(nb);
942
+ }
943
+ });
944
+ return { cids, node: newProllyRootNode };
945
+ }
946
+ async function eventsSince(blocks, head, since) {
947
+ if (!head.length) {
948
+ throw new Error("no head");
949
+ }
950
+ const sinceHead = [...since, ...head];
951
+ const { cids, events: unknownSorted3 } = await findEventsToSync(blocks, sinceHead);
952
+ return { clockCIDs: cids, result: unknownSorted3.map(({ value: { data } }) => data) };
953
+ }
954
+ async function getAll(blocks, head) {
955
+ if (!head.length) {
956
+ return { clockCIDs: new utils.CIDCounter(), cids: new utils.CIDCounter(), result: [] };
957
+ }
958
+ const { node: prollyRootNode, cids: clockCIDs } = await root(blocks, head);
959
+ if (!prollyRootNode) {
960
+ return { clockCIDs, cids: new utils.CIDCounter(), result: [] };
961
+ }
962
+ const { result, cids } = await prollyRootNode.getAllEntries();
963
+ return { clockCIDs, cids, result: result.map(({ key, value }) => ({ key, value })) };
964
+ }
965
+ async function get(blocks, head, key) {
966
+ if (!head.length) {
967
+ return { cids: new utils.CIDCounter(), result: null };
968
+ }
969
+ const { node: prollyRootNode, cids: clockCIDs } = await root(blocks, head);
970
+ if (!prollyRootNode) {
971
+ return { clockCIDs, cids: new utils.CIDCounter(), result: null };
972
+ }
973
+ const { result, cids } = await prollyRootNode.get(key);
974
+ return { result, cids, clockCIDs };
975
+ }
976
+ async function* vis(blocks, head) {
977
+ if (!head.length) {
978
+ return { cids: new utils.CIDCounter(), result: null };
979
+ }
980
+ const { node: prollyRootNode, cids } = await root(blocks, head);
981
+ const lines = [];
982
+ for await (const line of prollyRootNode.vis()) {
983
+ yield line;
984
+ lines.push(line);
985
+ }
986
+ return { vis: lines.join("\n"), cids };
987
+ }
988
+ async function visMerkleTree(blocks, head) {
989
+ if (!head.length) {
990
+ return { cids: new utils.CIDCounter(), result: null };
991
+ }
992
+ const { node: prollyRootNode, cids } = await root(blocks, head);
993
+ const lines = [];
994
+ for await (const line of prollyRootNode.vis()) {
995
+ lines.push(line);
996
+ }
997
+ return { vis: lines.join("\n"), cids };
998
+ }
999
+ async function visMerkleClock(blocks, head) {
1000
+ const lines = [];
1001
+ for await (const line of vis$1(blocks, head)) {
1002
+ lines.push(line);
1003
+ }
1004
+ return { vis: lines.join("\n") };
1005
+ }
1006
+
1007
+ class Fireproof {
1008
+ listeners = /* @__PURE__ */ new Set();
1009
+ /**
1010
+ * @function storage
1011
+ * @memberof Fireproof
1012
+ * Creates a new Fireproof instance with default storage settings
1013
+ * Most apps should use this and not worry about the details.
1014
+ * @static
1015
+ * @returns {Fireproof} - a new Fireproof instance
1016
+ */
1017
+ static storage = (name = "global") => {
1018
+ const instanceKey = crypto.randomBytes(32).toString("hex");
1019
+ return new Fireproof(new TransactionBlockstore(name, instanceKey), [], { name });
1020
+ };
1021
+ constructor(blocks, clock, config, authCtx = {}) {
1022
+ this.name = config?.name || "global";
1023
+ this.instanceId = `fp.${this.name}.${Math.random().toString(36).substring(2, 7)}`;
1024
+ this.blocks = blocks;
1025
+ this.clock = clock;
1026
+ this.config = config;
1027
+ this.authCtx = authCtx;
1028
+ this.indexes = /* @__PURE__ */ new Map();
1029
+ }
1030
+ /**
1031
+ * Renders the Fireproof instance as a JSON object.
1032
+ * @returns {Object} - The JSON representation of the Fireproof instance. Includes clock heads for the database and its indexes.
1033
+ * @memberof Fireproof
1034
+ * @instance
1035
+ */
1036
+ toJSON() {
1037
+ return {
1038
+ clock: this.clockToJSON(),
1039
+ name: this.name,
1040
+ key: this.blocks.valet.getKeyMaterial(),
1041
+ indexes: [...this.indexes.values()].map((index) => index.toJSON())
1042
+ };
1043
+ }
1044
+ /**
1045
+ * Returns the Merkle clock heads for the Fireproof instance.
1046
+ * @returns {string[]} - The Merkle clock heads for the Fireproof instance.
1047
+ * @memberof Fireproof
1048
+ * @instance
1049
+ */
1050
+ clockToJSON() {
1051
+ return this.clock.map((cid) => cid.toString());
1052
+ }
1053
+ hydrate({ clock, name, key }) {
1054
+ this.name = name;
1055
+ this.clock = clock;
1056
+ this.blocks.valet.setKeyMaterial(key);
1057
+ this.indexBlocks = null;
1058
+ }
1059
+ /**
1060
+ * Triggers a notification to all listeners
1061
+ * of the Fireproof instance so they can repaint UI, etc.
1062
+ * @returns {Promise<void>}
1063
+ * @memberof Fireproof
1064
+ * @instance
1065
+ */
1066
+ async notifyReset() {
1067
+ await this.notifyListeners({ _reset: true, _clock: this.clockToJSON() });
1068
+ }
1069
+ // used be indexes etc to notify database listeners of new availability
1070
+ async notifyExternal(source = "unknown") {
1071
+ await this.notifyListeners({ _external: source, _clock: this.clockToJSON() });
1072
+ }
1073
+ /**
1074
+ * Returns the changes made to the Fireproof instance since the specified event.
1075
+ * @function changesSince
1076
+ * @param {CID[]} [event] - The clock head to retrieve changes since. If null or undefined, retrieves all changes.
1077
+ * @returns {Promise<{rows : Object[], clock: CID[], proof: {}}>} An object containing the rows and the head of the instance's clock.
1078
+ * @memberof Fireproof
1079
+ * @instance
1080
+ */
1081
+ async changesSince(event) {
1082
+ let rows, dataCIDs, clockCIDs;
1083
+ if (event) {
1084
+ const resp = await eventsSince(this.blocks, this.clock, event);
1085
+ const docsMap = /* @__PURE__ */ new Map();
1086
+ for (const { key, type, value } of resp.result.map(decodeEvent)) {
1087
+ if (type === "del") {
1088
+ docsMap.set(key, { key, del: true });
1089
+ } else {
1090
+ docsMap.set(key, { key, value });
1091
+ }
1092
+ }
1093
+ rows = Array.from(docsMap.values());
1094
+ clockCIDs = resp.clockCIDs;
1095
+ } else {
1096
+ const allResp = await getAll(this.blocks, this.clock);
1097
+ rows = allResp.result.map(({ key, value }) => decodeEvent({ key, value }));
1098
+ dataCIDs = allResp.cids;
1099
+ }
1100
+ return {
1101
+ rows,
1102
+ clock: this.clockToJSON(),
1103
+ proof: { data: await cidsToProof(dataCIDs), clock: await cidsToProof(clockCIDs) }
1104
+ };
1105
+ }
1106
+ async allDocuments() {
1107
+ const allResp = await getAll(this.blocks, this.clock);
1108
+ const rows = allResp.result.map(({ key, value }) => decodeEvent({ key, value })).map(({ key, value }) => ({ key, value: { _id: key, ...value } }));
1109
+ return {
1110
+ rows,
1111
+ clock: this.clockToJSON(),
1112
+ proof: await cidsToProof(allResp.cids)
1113
+ };
1114
+ }
1115
+ /**
1116
+ * Runs validation on the specified document using the Fireproof instance's configuration. Throws an error if the document is invalid.
1117
+ *
1118
+ * @param {Object} doc - The document to validate.
1119
+ * @returns {Promise<void>}
1120
+ * @throws {Error} - Throws an error if the document is invalid.
1121
+ * @memberof Fireproof
1122
+ * @instance
1123
+ */
1124
+ async runValidation(doc) {
1125
+ if (this.config && this.config.validateChange) {
1126
+ const oldDoc = await this.get(doc._id).then((doc2) => doc2).catch(() => ({}));
1127
+ this.config.validateChange(doc, oldDoc, this.authCtx);
1128
+ }
1129
+ }
1130
+ /**
1131
+ * Retrieves the document with the specified ID from the database
1132
+ *
1133
+ * @param {string} key - the ID of the document to retrieve
1134
+ * @param {Object} [opts] - options
1135
+ * @returns {Promise<{_id: string}>} - the document with the specified ID
1136
+ * @memberof Fireproof
1137
+ * @instance
1138
+ */
1139
+ async get(key, opts = {}) {
1140
+ const clock = opts.clock || this.clock;
1141
+ const resp = await get(this.blocks, clock, charwise.encode(key));
1142
+ if (!resp || resp.result === null) {
1143
+ throw new Error("Not found");
1144
+ }
1145
+ const doc = resp.result;
1146
+ if (opts.mvcc === true) {
1147
+ doc._clock = this.clockToJSON();
1148
+ }
1149
+ doc._proof = {
1150
+ data: await cidsToProof(resp.cids),
1151
+ clock: this.clockToJSON()
1152
+ };
1153
+ doc._id = key;
1154
+ return doc;
1155
+ }
1156
+ /**
1157
+ * Adds a new document to the database, or updates an existing document. Returns the ID of the document and the new clock head.
1158
+ *
1159
+ * @param {Object} doc - the document to be added
1160
+ * @param {string} doc._id - the document ID. If not provided, a random ID will be generated.
1161
+ * @param {CID[]} doc._clock - the document ID. If not provided, a random ID will be generated.
1162
+ * @param {Proof} doc._proof - CIDs referenced by the update
1163
+ * @returns {Promise<{ id: string, clock: CID[] }>} - The result of adding the document to the database
1164
+ * @memberof Fireproof
1165
+ * @instance
1166
+ */
1167
+ async put({ _id, _proof, ...doc }) {
1168
+ const id = _id || "f" + Math.random().toString(36).slice(2);
1169
+ await this.runValidation({ _id: id, ...doc });
1170
+ return await this.putToProllyTree({ key: id, value: doc }, doc._clock);
1171
+ }
1172
+ /**
1173
+ * Deletes a document from the database
1174
+ * @param {string | any} docOrId - the document ID
1175
+ * @returns {Promise<{ id: string, clock: CID[] }>} - The result of deleting the document from the database
1176
+ * @memberof Fireproof
1177
+ * @instance
1178
+ */
1179
+ async del(docOrId) {
1180
+ let id;
1181
+ let clock = null;
1182
+ if (docOrId._id) {
1183
+ id = docOrId._id;
1184
+ clock = docOrId._clock;
1185
+ } else {
1186
+ id = docOrId;
1187
+ }
1188
+ await this.runValidation({ _id: id, _deleted: true });
1189
+ return await this.putToProllyTree({ key: id, del: true }, clock);
1190
+ }
1191
+ /**
1192
+ * Updates the underlying storage with the specified event.
1193
+ * @private
1194
+ * @param {{del?: true, key : string, value?: any}} decodedEvent - the event to add
1195
+ * @returns {Promise<{ proof:{}, id: string, clock: CID[] }>} - The result of adding the event to storage
1196
+ */
1197
+ async putToProllyTree(decodedEvent, clock = null) {
1198
+ const event = encodeEvent(decodedEvent);
1199
+ if (clock && JSON.stringify(clock) !== JSON.stringify(this.clockToJSON())) {
1200
+ const resp = await eventsSince(this.blocks, this.clock, event.value._clock);
1201
+ const missedChange = resp.result.find(({ key }) => key === event.key);
1202
+ if (missedChange) {
1203
+ throw new Error("MVCC conflict, document is changed, please reload the document and try again.");
1204
+ }
1205
+ }
1206
+ const result = await doTransaction(
1207
+ "putToProllyTree",
1208
+ this.blocks,
1209
+ async (blocks) => await put(blocks, this.clock, event)
1210
+ );
1211
+ if (!result) {
1212
+ console.error("failed", event);
1213
+ throw new Error("failed to put at storage layer");
1214
+ }
1215
+ this.clock = result.head;
1216
+ await this.notifyListeners([decodedEvent]);
1217
+ return {
1218
+ id: decodedEvent.key,
1219
+ clock: this.clockToJSON(),
1220
+ proof: { data: await cidsToProof(result.cids), clock: await cidsToProof(result.clockCIDs) }
1221
+ };
1222
+ }
1223
+ // /**
1224
+ // * Advances the clock to the specified event and updates the root CID
1225
+ // * Will be used by replication
1226
+ // */
1227
+ // async advance (event) {
1228
+ // this.clock = await advance(this.blocks, this.clock, event)
1229
+ // this.rootCid = await root(this.blocks, this.clock)
1230
+ // return this.clock
1231
+ // }
1232
+ async *vis() {
1233
+ return yield* vis(this.blocks, this.clock);
1234
+ }
1235
+ async visTree() {
1236
+ return await visMerkleTree(this.blocks, this.clock);
1237
+ }
1238
+ async visClock() {
1239
+ return await visMerkleClock(this.blocks, this.clock);
1240
+ }
1241
+ /**
1242
+ * Registers a Listener to be called when the Fireproof instance's clock is updated.
1243
+ * Recieves live changes from the database after they are committed.
1244
+ * @param {Function} listener - The listener to be called when the clock is updated.
1245
+ * @returns {Function} - A function that can be called to unregister the listener.
1246
+ * @memberof Fireproof
1247
+ */
1248
+ registerListener(listener) {
1249
+ this.listeners.add(listener);
1250
+ return () => {
1251
+ this.listeners.delete(listener);
1252
+ };
1253
+ }
1254
+ async notifyListeners(changes) {
1255
+ for (const listener of this.listeners) {
1256
+ await listener(changes);
1257
+ }
1258
+ }
1259
+ setCarUploader(carUploaderFn) {
1260
+ this.blocks.valet.uploadFunction = carUploaderFn;
1261
+ }
1262
+ setRemoteBlockReader(remoteBlockReaderFn) {
1263
+ this.blocks.valet.remoteBlockFunction = remoteBlockReaderFn;
1264
+ }
1265
+ }
1266
+ async function cidsToProof(cids) {
1267
+ if (!cids || !cids.all)
1268
+ return [];
1269
+ const all = await cids.all();
1270
+ return [...all].map((cid) => cid.toString());
1271
+ }
1272
+ function decodeEvent(event) {
1273
+ const decodedKey = charwise.decode(event.key);
1274
+ return { ...event, key: decodedKey };
1275
+ }
1276
+ function encodeEvent(event) {
1277
+ if (!(event && event.key))
1278
+ return;
1279
+ const encodedKey = charwise.encode(event.key);
1280
+ return { ...event, key: encodedKey };
1281
+ }
1282
+
1283
+ const compare = (a, b) => {
1284
+ const [aKey, aRef] = a;
1285
+ const [bKey, bRef] = b;
1286
+ const comp = utils.simpleCompare(aKey, bKey);
1287
+ if (comp !== 0)
1288
+ return comp;
1289
+ return refCompare(aRef, bRef);
1290
+ };
1291
+ const refCompare = (aRef, bRef) => {
1292
+ if (Number.isNaN(aRef))
1293
+ return -1;
1294
+ if (Number.isNaN(bRef))
1295
+ throw new Error("ref may not be Infinity or NaN");
1296
+ if (aRef === Infinity)
1297
+ return 1;
1298
+ return utils.simpleCompare(aRef, bRef);
1299
+ };
1300
+ const dbIndexOpts = { cache: cache.nocache, chunker: utils.bf(3), codec: dagcbor__namespace, hasher: sha2.sha256, compare };
1301
+ const idIndexOpts = { cache: cache.nocache, chunker: utils.bf(3), codec: dagcbor__namespace, hasher: sha2.sha256, compare: utils.simpleCompare };
1302
+ const makeDoc = ({ key, value }) => ({ _id: key, ...value });
1303
+ const indexEntriesForChanges = (changes, mapFn) => {
1304
+ const indexEntries = [];
1305
+ changes.forEach(({ key, value, del }) => {
1306
+ if (del || !value)
1307
+ return;
1308
+ mapFn(makeDoc({ key, value }), (k, v) => {
1309
+ if (typeof v === "undefined" || typeof k === "undefined")
1310
+ return;
1311
+ indexEntries.push({
1312
+ key: [charwise.encode(k), key],
1313
+ value: v
1314
+ });
1315
+ });
1316
+ });
1317
+ return indexEntries;
1318
+ };
1319
+ class DbIndex {
1320
+ constructor(database, mapFn, clock, opts = {}) {
1321
+ this.database = database;
1322
+ if (!database.indexBlocks) {
1323
+ database.indexBlocks = new TransactionBlockstore(database.name + ".indexes", database.blocks.valet.getKeyMaterial());
1324
+ }
1325
+ if (typeof mapFn === "string") {
1326
+ this.mapFnString = mapFn;
1327
+ } else {
1328
+ this.mapFn = mapFn;
1329
+ this.mapFnString = mapFn.toString();
1330
+ }
1331
+ this.name = opts.name || this.makeName();
1332
+ this.indexById = { root: null, cid: null };
1333
+ this.indexByKey = { root: null, cid: null };
1334
+ this.dbHead = null;
1335
+ if (clock) {
1336
+ this.indexById.cid = clock.byId;
1337
+ this.indexByKey.cid = clock.byKey;
1338
+ this.dbHead = clock.db;
1339
+ }
1340
+ this.instanceId = this.database.instanceId + `.DbIndex.${Math.random().toString(36).substring(2, 7)}`;
1341
+ this.updateIndexPromise = null;
1342
+ if (!opts.temporary) {
1343
+ DbIndex.registerWithDatabase(this, this.database);
1344
+ }
1345
+ }
1346
+ makeName() {
1347
+ const regex = /\(([^,()]+,\s*[^,()]+|\[[^\]]+\],\s*[^,()]+)\)/g;
1348
+ const matches = Array.from(this.mapFnString.matchAll(regex), (match) => match[1].trim());
1349
+ return matches[1];
1350
+ }
1351
+ static registerWithDatabase(inIndex, database) {
1352
+ if (!database.indexes.has(inIndex.mapFnString)) {
1353
+ database.indexes.set(inIndex.mapFnString, inIndex);
1354
+ } else {
1355
+ const existingIndex = database.indexes.get(inIndex.mapFnString);
1356
+ if (existingIndex.mapFn) {
1357
+ existingIndex.dbHead = inIndex.dbHead;
1358
+ existingIndex.indexById.cid = inIndex.indexById.cid;
1359
+ existingIndex.indexByKey.cid = inIndex.indexByKey.cid;
1360
+ } else {
1361
+ inIndex.dbHead = existingIndex.dbHead;
1362
+ inIndex.indexById.cid = existingIndex.indexById.cid;
1363
+ inIndex.indexByKey.cid = existingIndex.indexByKey.cid;
1364
+ database.indexes.set(inIndex.mapFnString, inIndex);
1365
+ }
1366
+ }
1367
+ }
1368
+ toJSON() {
1369
+ const indexJson = { name: this.name, code: this.mapFnString, clock: { db: null, byId: null, byKey: null } };
1370
+ indexJson.clock.db = this.dbHead?.map((cid) => cid.toString());
1371
+ indexJson.clock.byId = this.indexById.cid?.toString();
1372
+ indexJson.clock.byKey = this.indexByKey.cid?.toString();
1373
+ return indexJson;
1374
+ }
1375
+ static fromJSON(database, { code, clock, name }) {
1376
+ return new DbIndex(database, code, clock, { name });
1377
+ }
1378
+ /**
1379
+ * JSDoc for Query type.
1380
+ * @typedef {Object} DbQuery
1381
+ * @property {string[]} [range] - The range to query.
1382
+ * @memberof DbIndex
1383
+ */
1384
+ /**
1385
+ * Query object can have {range}
1386
+ * @param {DbQuery} query - the query range to use
1387
+ * @returns {Promise<{proof: {}, rows: Array<{id: string, key: string, value: any}>}>}
1388
+ * @memberof DbIndex
1389
+ * @instance
1390
+ */
1391
+ async query(query, update = true) {
1392
+ update && await this.updateIndex(this.database.indexBlocks);
1393
+ const response = await doIndexQuery(this.database.indexBlocks, this.indexByKey, query);
1394
+ return {
1395
+ proof: { index: await cidsToProof(response.cids) },
1396
+ rows: response.result.map(({ id, key, row }) => {
1397
+ return { id, key: charwise.decode(key), value: row };
1398
+ })
1399
+ };
1400
+ }
1401
+ /**
1402
+ * Update the DbIndex with the latest changes
1403
+ * @private
1404
+ * @returns {Promise<void>}
1405
+ */
1406
+ async updateIndex(blocks) {
1407
+ if (this.updateIndexPromise)
1408
+ return this.updateIndexPromise;
1409
+ this.updateIndexPromise = this.innerUpdateIndex(blocks);
1410
+ this.updateIndexPromise.finally(() => {
1411
+ this.updateIndexPromise = null;
1412
+ });
1413
+ return this.updateIndexPromise;
1414
+ }
1415
+ async innerUpdateIndex(inBlocks) {
1416
+ const result = await this.database.changesSince(this.dbHead);
1417
+ if (result.rows.length === 0) {
1418
+ this.dbHead = result.clock;
1419
+ return;
1420
+ }
1421
+ await doTransaction("updateIndex", inBlocks, async (blocks) => {
1422
+ let oldIndexEntries = [];
1423
+ let removeByIdIndexEntries = [];
1424
+ await loadIndex(blocks, this.indexById, idIndexOpts);
1425
+ await loadIndex(blocks, this.indexByKey, dbIndexOpts);
1426
+ if (this.dbHead) {
1427
+ const oldChangeEntries = await this.indexById.root.getMany(result.rows.map(({ key }) => key));
1428
+ oldIndexEntries = oldChangeEntries.result.map((key) => ({ key, del: true }));
1429
+ removeByIdIndexEntries = oldIndexEntries.map(({ key }) => ({ key: key[1], del: true }));
1430
+ }
1431
+ if (!this.mapFn) {
1432
+ throw new Error("No live map function installed for index, cannot update. Make sure your index definition runs before any queries." + (this.mapFnString ? " Your code should match the stored map function source:\n" + this.mapFnString : ""));
1433
+ }
1434
+ const indexEntries = indexEntriesForChanges(result.rows, this.mapFn);
1435
+ const byIdIndexEntries = indexEntries.map(({ key }) => ({ key: key[1], value: key }));
1436
+ this.indexById = await bulkIndex(blocks, this.indexById, removeByIdIndexEntries.concat(byIdIndexEntries), idIndexOpts);
1437
+ this.indexByKey = await bulkIndex(blocks, this.indexByKey, oldIndexEntries.concat(indexEntries), dbIndexOpts);
1438
+ this.dbHead = result.clock;
1439
+ });
1440
+ this.database.notifyExternal("dbIndex");
1441
+ }
1442
+ }
1443
+ async function bulkIndex(blocks, inIndex, indexEntries, opts) {
1444
+ if (!indexEntries.length)
1445
+ return inIndex;
1446
+ const putBlock = blocks.put.bind(blocks);
1447
+ const { getBlock } = makeGetBlock(blocks);
1448
+ let returnRootBlock;
1449
+ let returnNode;
1450
+ if (!inIndex.root) {
1451
+ const cid = inIndex.cid;
1452
+ if (!cid) {
1453
+ for await (const node of await dbIndex.create({ get: getBlock, list: indexEntries, ...opts })) {
1454
+ const block = await node.block;
1455
+ await putBlock(block.cid, block.bytes);
1456
+ returnRootBlock = block;
1457
+ returnNode = node;
1458
+ }
1459
+ return { root: returnNode, cid: returnRootBlock.cid };
1460
+ }
1461
+ inIndex.root = await dbIndex.load({ cid, get: getBlock, ...dbIndexOpts });
1462
+ }
1463
+ const { root, blocks: newBlocks } = await inIndex.root.bulk(indexEntries);
1464
+ returnRootBlock = await root.block;
1465
+ returnNode = root;
1466
+ for await (const block of newBlocks) {
1467
+ await putBlock(block.cid, block.bytes);
1468
+ }
1469
+ await putBlock(returnRootBlock.cid, returnRootBlock.bytes);
1470
+ return { root: returnNode, cid: returnRootBlock.cid };
1471
+ }
1472
+ async function loadIndex(blocks, index, indexOpts) {
1473
+ if (!index.root) {
1474
+ const cid = index.cid;
1475
+ if (!cid)
1476
+ return;
1477
+ const { getBlock } = makeGetBlock(blocks);
1478
+ index.root = await dbIndex.load({ cid, get: getBlock, ...indexOpts });
1479
+ }
1480
+ return index.root;
1481
+ }
1482
+ async function applyLimit(results, limit) {
1483
+ results.result = results.result.slice(0, limit);
1484
+ return results;
1485
+ }
1486
+ async function doIndexQuery(blocks, indexByKey, query = {}) {
1487
+ await loadIndex(blocks, indexByKey, dbIndexOpts);
1488
+ if (!indexByKey.root)
1489
+ return { result: [] };
1490
+ if (query.range) {
1491
+ const encodedRange = query.range.map((key) => charwise.encode(key));
1492
+ return applyLimit(await indexByKey.root.range(...encodedRange), query.limit);
1493
+ } else if (query.key) {
1494
+ const encodedKey = charwise.encode(query.key);
1495
+ return indexByKey.root.get(encodedKey);
1496
+ } else {
1497
+ const { result, ...all } = await indexByKey.root.getAllEntries();
1498
+ return applyLimit({ result: result.map(({ key: [k, id], value }) => ({ key: k, id, row: value })), ...all }, query.limit);
1499
+ }
1500
+ }
1501
+
1502
+ class Listener {
1503
+ subcribers = /* @__PURE__ */ new Map();
1504
+ doStopListening = null;
1505
+ /**
1506
+ * @param {import('./fireproof.js').Fireproof} database
1507
+ * @param {(_: any, emit: any) => void} routingFn
1508
+ */
1509
+ constructor(database, routingFn) {
1510
+ this.database = database;
1511
+ this.doStopListening = database.registerListener((changes) => this.onChanges(changes));
1512
+ this.routingFn = routingFn || function(_, emit) {
1513
+ emit("*");
1514
+ };
1515
+ this.dbHead = null;
1516
+ }
1517
+ /**
1518
+ * Subscribe to a topic emitted by the event function.
1519
+ * @param {string} topic - The topic to subscribe to.
1520
+ * @param {Function} subscriber - The function to call when the topic is emitted.
1521
+ * @returns {Function} A function to unsubscribe from the topic.
1522
+ * @memberof Listener
1523
+ * @instance
1524
+ * @param {any} since
1525
+ */
1526
+ on(topic, subscriber, since) {
1527
+ const listOfTopicSubscribers = getTopicList(this.subcribers, topic);
1528
+ listOfTopicSubscribers.push(subscriber);
1529
+ if (typeof since !== "undefined") {
1530
+ this.database.changesSince(since).then(({ rows: changes }) => {
1531
+ const keys = topicsForChanges(changes, this.routingFn).get(topic);
1532
+ if (keys)
1533
+ keys.forEach((key) => subscriber(key));
1534
+ });
1535
+ }
1536
+ return () => {
1537
+ const index = listOfTopicSubscribers.indexOf(subscriber);
1538
+ if (index > -1)
1539
+ listOfTopicSubscribers.splice(index, 1);
1540
+ };
1541
+ }
1542
+ /**
1543
+ * @typedef {import('./db-index').ChangeEvent} ChangeEvent
1544
+ */
1545
+ /**
1546
+ * @param {ChangeEvent[]} changes
1547
+ */
1548
+ onChanges(changes) {
1549
+ if (Array.isArray(changes)) {
1550
+ const seenTopics = topicsForChanges(changes, this.routingFn);
1551
+ for (const [topic, keys] of seenTopics) {
1552
+ const listOfTopicSubscribers = getTopicList(this.subcribers, topic);
1553
+ listOfTopicSubscribers.forEach(
1554
+ (subscriber) => keys.forEach((key) => subscriber(key))
1555
+ );
1556
+ }
1557
+ } else {
1558
+ for (const [, listOfTopicSubscribers] of this.subcribers) {
1559
+ listOfTopicSubscribers.forEach((subscriber) => subscriber(changes));
1560
+ }
1561
+ }
1562
+ }
1563
+ }
1564
+ function getTopicList(subscribersMap, name) {
1565
+ let topicList = subscribersMap.get(name);
1566
+ if (!topicList) {
1567
+ topicList = [];
1568
+ subscribersMap.set(name, topicList);
1569
+ }
1570
+ return topicList;
1571
+ }
1572
+ const topicsForChanges = (changes, routingFn) => {
1573
+ const seenTopics = /* @__PURE__ */ new Map();
1574
+ changes.forEach(({ key, value, del }) => {
1575
+ if (del || !value)
1576
+ value = { _deleted: true };
1577
+ routingFn({ _id: key, ...value }, (t) => {
1578
+ const topicList = getTopicList(seenTopics, t);
1579
+ topicList.push(key);
1580
+ });
1581
+ });
1582
+ return seenTopics;
1583
+ };
1584
+
1585
+ const parseCID = (cid) => typeof cid === "string" ? multiformats.CID.parse(cid) : cid;
1586
+ class Hydrator {
1587
+ static fromJSON(json, database) {
1588
+ database.hydrate({ clock: json.clock.map((c) => parseCID(c)), name: json.name, key: json.key });
1589
+ if (json.indexes) {
1590
+ for (const { name, code, clock: { byId, byKey, db } } of json.indexes) {
1591
+ DbIndex.fromJSON(database, {
1592
+ clock: {
1593
+ byId: byId ? parseCID(byId) : null,
1594
+ byKey: byKey ? parseCID(byKey) : null,
1595
+ db: db ? db.map((c) => parseCID(c)) : null
1596
+ },
1597
+ code,
1598
+ name
1599
+ });
1600
+ }
1601
+ }
1602
+ return database;
1603
+ }
1604
+ static snapshot(database, clock) {
1605
+ const definition = database.toJSON();
1606
+ const withBlocks = new Fireproof(database.blocks);
1607
+ if (clock) {
1608
+ definition.clock = clock.map((c) => parseCID(c));
1609
+ definition.indexes.forEach((index) => {
1610
+ index.clock.byId = null;
1611
+ index.clock.byKey = null;
1612
+ index.clock.db = null;
1613
+ });
1614
+ }
1615
+ const snappedDb = this.fromJSON(definition, withBlocks);
1616
+ [...database.indexes.values()].forEach((index) => {
1617
+ snappedDb.indexes.get(index.mapFnString).mapFn = index.mapFn;
1618
+ });
1619
+ return snappedDb;
1620
+ }
1621
+ static async zoom(database, clock) {
1622
+ [...database.indexes.values()].forEach((index) => {
1623
+ index.indexById = { root: null, cid: null };
1624
+ index.indexByKey = { root: null, cid: null };
1625
+ index.dbHead = null;
1626
+ });
1627
+ database.clock = clock.map((c) => parseCID(c));
1628
+ await database.notifyReset();
1629
+ return database;
1630
+ }
1631
+ }
1632
+
1633
+ exports.Fireproof = Fireproof;
1634
+ exports.Hydrator = Hydrator;
1635
+ exports.Index = DbIndex;
1636
+ exports.Listener = Listener;
1637
+ //# sourceMappingURL=index.js.map