@fireproof/core 0.3.12 → 0.3.14
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/src/index.d.ts +321 -6
- package/dist/src/index.js +1637 -0
- package/dist/src/index.js.map +1 -0
- package/dist/src/index.mjs +1610 -0
- package/dist/src/index.mjs.map +1 -0
- package/hooks/use-fireproof.js +135 -0
- package/package.json +17 -4
- package/src/blockstore.js +10 -5
- package/src/clock.js +31 -16
- package/src/db-index.js +2 -7
- package/src/fireproof.js +13 -8
- package/src/listener.js +26 -13
- package/src/prolly.js +25 -22
- package/src/valet.js +6 -4
- package/dist/bundle.js +0 -2152
- package/dist/src/blockstore.d.ts +0 -115
- package/dist/src/blockstore.d.ts.map +0 -1
- package/dist/src/clock.d.ts +0 -98
- package/dist/src/clock.d.ts.map +0 -1
- package/dist/src/crypto.d.ts +0 -18
- package/dist/src/crypto.d.ts.map +0 -1
- package/dist/src/db-index.d.ts +0 -116
- package/dist/src/db-index.d.ts.map +0 -1
- package/dist/src/fireproof.d.ts +0 -167
- package/dist/src/fireproof.d.ts.map +0 -1
- package/dist/src/hydrator.d.ts +0 -6
- package/dist/src/hydrator.d.ts.map +0 -1
- package/dist/src/index.d.ts.map +0 -1
- package/dist/src/listener.d.ts +0 -36
- package/dist/src/listener.d.ts.map +0 -1
- package/dist/src/prolly.d.ts +0 -83
- package/dist/src/prolly.d.ts.map +0 -1
- package/dist/src/sha1.d.ts +0 -9
- package/dist/src/sha1.d.ts.map +0 -1
- package/dist/src/valet.d.ts +0 -34
- package/dist/src/valet.d.ts.map +0 -1
- package/dist/tsconfig.tsbuildinfo +0 -1
@@ -0,0 +1,1610 @@
|
|
1
|
+
import { randomBytes } from 'crypto';
|
2
|
+
import * as Block from 'multiformats/block';
|
3
|
+
import { decode, Block as Block$1, encode, create as create$1 } from 'multiformats/block';
|
4
|
+
import { sha256 } from 'multiformats/hashes/sha2';
|
5
|
+
import * as dagcbor from '@ipld/dag-cbor';
|
6
|
+
import { CIDCounter, bf, simpleCompare } from 'prolly-trees/utils';
|
7
|
+
import { create as create$2, load as load$1 } from 'prolly-trees/map';
|
8
|
+
import { nocache } from 'prolly-trees/cache';
|
9
|
+
import { parse } from 'multiformats/link';
|
10
|
+
import { CID } from 'multiformats';
|
11
|
+
import { CarReader } from '@ipld/car';
|
12
|
+
import { CID as CID$1 } from 'multiformats/cid';
|
13
|
+
import * as CBW from '@ipld/car/buffer-writer';
|
14
|
+
import * as raw from 'multiformats/codecs/raw';
|
15
|
+
import { openDB } from 'idb';
|
16
|
+
import cargoQueue from 'async/cargoQueue.js';
|
17
|
+
import * as codec from 'encrypted-block';
|
18
|
+
import { create, load } from 'prolly-trees/cid-set';
|
19
|
+
import { Buffer } from 'buffer';
|
20
|
+
import charwise from 'charwise';
|
21
|
+
import { create as create$3, load as load$2 } from 'prolly-trees/db-index';
|
22
|
+
|
23
|
+
async function advance(blocks, head, event) {
|
24
|
+
const events = new EventFetcher(blocks);
|
25
|
+
const headmap = new Map(head.map((cid) => [cid.toString(), cid]));
|
26
|
+
if (headmap.has(event.toString()))
|
27
|
+
return { head, cids: await events.all() };
|
28
|
+
let changed = false;
|
29
|
+
for (const cid of head) {
|
30
|
+
if (await contains(events, event, cid)) {
|
31
|
+
headmap.delete(cid.toString());
|
32
|
+
headmap.set(event.toString(), event);
|
33
|
+
changed = true;
|
34
|
+
}
|
35
|
+
}
|
36
|
+
if (changed) {
|
37
|
+
return { head: [...headmap.values()], cids: await events.all() };
|
38
|
+
}
|
39
|
+
for (const p of head) {
|
40
|
+
if (await contains(events, p, event)) {
|
41
|
+
return { head, cids: await events.all() };
|
42
|
+
}
|
43
|
+
}
|
44
|
+
return { head: head.concat(event), cids: await events.all() };
|
45
|
+
}
|
46
|
+
class EventBlock extends Block$1 {
|
47
|
+
/**
|
48
|
+
* @param {object} config
|
49
|
+
* @param {EventLink<T>} config.cid
|
50
|
+
* @param {Event} config.value
|
51
|
+
* @param {Uint8Array} config.bytes
|
52
|
+
*/
|
53
|
+
constructor({ cid, value, bytes }) {
|
54
|
+
super({ cid, value, bytes });
|
55
|
+
}
|
56
|
+
/**
|
57
|
+
* @template T
|
58
|
+
* @param {T} data
|
59
|
+
* @param {EventLink<T>[]} [parents]
|
60
|
+
*/
|
61
|
+
static create(data, parents) {
|
62
|
+
return encodeEventBlock({ data, parents: parents ?? [] });
|
63
|
+
}
|
64
|
+
}
|
65
|
+
class EventFetcher {
|
66
|
+
/** @param {import('./blockstore').TransactionBlockstore} blocks */
|
67
|
+
constructor(blocks) {
|
68
|
+
this._blocks = blocks;
|
69
|
+
this._cids = new CIDCounter();
|
70
|
+
this._cache = /* @__PURE__ */ new Map();
|
71
|
+
}
|
72
|
+
/**
|
73
|
+
* @param {EventLink<T>} link
|
74
|
+
* @returns {Promise<EventBlockView<T>>}
|
75
|
+
*/
|
76
|
+
async get(link) {
|
77
|
+
const slink = link.toString();
|
78
|
+
if (this._cache.has(slink))
|
79
|
+
return this._cache.get(slink);
|
80
|
+
const block = await this._blocks.get(link);
|
81
|
+
this._cids.add({ address: link });
|
82
|
+
if (!block)
|
83
|
+
throw new Error(`missing block: ${link}`);
|
84
|
+
const got = decodeEventBlock(block.bytes);
|
85
|
+
this._cache.set(slink, got);
|
86
|
+
return got;
|
87
|
+
}
|
88
|
+
async all() {
|
89
|
+
return this._cids.all();
|
90
|
+
}
|
91
|
+
}
|
92
|
+
async function encodeEventBlock(value) {
|
93
|
+
const { cid, bytes } = await encode({ value, codec: dagcbor, hasher: sha256 });
|
94
|
+
return new Block$1({ cid, value, bytes });
|
95
|
+
}
|
96
|
+
async function decodeEventBlock(bytes) {
|
97
|
+
const { cid, value } = await decode({ bytes, codec: dagcbor, hasher: sha256 });
|
98
|
+
return new Block$1({ cid, value, bytes });
|
99
|
+
}
|
100
|
+
async function contains(events, a, b) {
|
101
|
+
if (a.toString() === b.toString())
|
102
|
+
return true;
|
103
|
+
const [{ value: aevent }, { value: bevent }] = await Promise.all([events.get(a), events.get(b)]);
|
104
|
+
const links = [...aevent.parents];
|
105
|
+
while (links.length) {
|
106
|
+
const link = links.shift();
|
107
|
+
if (!link)
|
108
|
+
break;
|
109
|
+
if (link.toString() === b.toString())
|
110
|
+
return true;
|
111
|
+
if (bevent.parents.some((p) => link.toString() === p.toString()))
|
112
|
+
continue;
|
113
|
+
const { value: event } = await events.get(link);
|
114
|
+
links.push(...event.parents);
|
115
|
+
}
|
116
|
+
return false;
|
117
|
+
}
|
118
|
+
async function* vis$1(blocks, head, options = {}) {
|
119
|
+
const renderNodeLabel = options.renderNodeLabel ?? ((b) => b.value.data.value);
|
120
|
+
const events = new EventFetcher(blocks);
|
121
|
+
yield "digraph clock {";
|
122
|
+
yield ' node [shape=point fontname="Courier"]; head;';
|
123
|
+
const hevents = await Promise.all(head.map((link) => events.get(link)));
|
124
|
+
const links = [];
|
125
|
+
const nodes = /* @__PURE__ */ new Set();
|
126
|
+
for (const e of hevents) {
|
127
|
+
nodes.add(e.cid.toString());
|
128
|
+
yield ` node [shape=oval fontname="Courier"]; ${e.cid} [label="${renderNodeLabel(e)}"];`;
|
129
|
+
yield ` head -> ${e.cid};`;
|
130
|
+
for (const p of e.value.parents) {
|
131
|
+
yield ` ${e.cid} -> ${p};`;
|
132
|
+
}
|
133
|
+
links.push(...e.value.parents);
|
134
|
+
}
|
135
|
+
while (links.length) {
|
136
|
+
const link = links.shift();
|
137
|
+
if (!link)
|
138
|
+
break;
|
139
|
+
if (nodes.has(link.toString()))
|
140
|
+
continue;
|
141
|
+
nodes.add(link.toString());
|
142
|
+
const block = await events.get(link);
|
143
|
+
yield ` node [shape=oval]; ${link} [label="${renderNodeLabel(block)}" fontname="Courier"];`;
|
144
|
+
for (const p of block.value.parents) {
|
145
|
+
yield ` ${link} -> ${p};`;
|
146
|
+
}
|
147
|
+
links.push(...block.value.parents);
|
148
|
+
}
|
149
|
+
yield "}";
|
150
|
+
}
|
151
|
+
async function findEventsToSync(blocks, head) {
|
152
|
+
const events = new EventFetcher(blocks);
|
153
|
+
const { ancestor, sorted } = await findCommonAncestorWithSortedEvents(events, head);
|
154
|
+
const toSync = await asyncFilter(sorted, async (uks) => !await contains(events, ancestor, uks.cid));
|
155
|
+
return { cids: events.all(), events: toSync };
|
156
|
+
}
|
157
|
+
const asyncFilter = async (arr, predicate) => Promise.all(arr.map(predicate)).then((results) => arr.filter((_v, index) => results[index]));
|
158
|
+
async function findCommonAncestorWithSortedEvents(events, children) {
|
159
|
+
const ancestor = await findCommonAncestor(events, children);
|
160
|
+
if (!ancestor) {
|
161
|
+
throw new Error("failed to find common ancestor event");
|
162
|
+
}
|
163
|
+
const sorted = await findSortedEvents(events, children, ancestor);
|
164
|
+
return { ancestor, sorted };
|
165
|
+
}
|
166
|
+
async function findCommonAncestor(events, children) {
|
167
|
+
if (!children.length)
|
168
|
+
return;
|
169
|
+
const candidates = children.map((c) => [c]);
|
170
|
+
while (true) {
|
171
|
+
let changed = false;
|
172
|
+
for (const c of candidates) {
|
173
|
+
const candidate = await findAncestorCandidate(events, c[c.length - 1]);
|
174
|
+
if (!candidate)
|
175
|
+
continue;
|
176
|
+
changed = true;
|
177
|
+
c.push(candidate);
|
178
|
+
const ancestor = findCommonString(candidates);
|
179
|
+
if (ancestor)
|
180
|
+
return ancestor;
|
181
|
+
}
|
182
|
+
if (!changed)
|
183
|
+
return;
|
184
|
+
}
|
185
|
+
}
|
186
|
+
async function findAncestorCandidate(events, root) {
|
187
|
+
const { value: event } = await events.get(root);
|
188
|
+
if (!event.parents.length)
|
189
|
+
return root;
|
190
|
+
return event.parents.length === 1 ? event.parents[0] : findCommonAncestor(events, event.parents);
|
191
|
+
}
|
192
|
+
function findCommonString(arrays) {
|
193
|
+
arrays = arrays.map((a) => [...a]);
|
194
|
+
for (const arr of arrays) {
|
195
|
+
for (const item of arr) {
|
196
|
+
let matched = true;
|
197
|
+
for (const other of arrays) {
|
198
|
+
if (arr === other)
|
199
|
+
continue;
|
200
|
+
matched = other.some((i) => String(i) === String(item));
|
201
|
+
if (!matched)
|
202
|
+
break;
|
203
|
+
}
|
204
|
+
if (matched)
|
205
|
+
return item;
|
206
|
+
}
|
207
|
+
}
|
208
|
+
}
|
209
|
+
async function findSortedEvents(events, head, tail) {
|
210
|
+
const weights = /* @__PURE__ */ new Map();
|
211
|
+
const all = await Promise.all(head.map((h) => findEvents(events, h, tail)));
|
212
|
+
for (const arr of all) {
|
213
|
+
for (const { event, depth } of arr) {
|
214
|
+
const info = weights.get(event.cid.toString());
|
215
|
+
if (info) {
|
216
|
+
info.weight += depth;
|
217
|
+
} else {
|
218
|
+
weights.set(event.cid.toString(), { event, weight: depth });
|
219
|
+
}
|
220
|
+
}
|
221
|
+
}
|
222
|
+
const buckets = /* @__PURE__ */ new Map();
|
223
|
+
for (const { event, weight } of weights.values()) {
|
224
|
+
const bucket = buckets.get(weight);
|
225
|
+
if (bucket) {
|
226
|
+
bucket.push(event);
|
227
|
+
} else {
|
228
|
+
buckets.set(weight, [event]);
|
229
|
+
}
|
230
|
+
}
|
231
|
+
const sorted = Array.from(buckets).sort((a, b) => b[0] - a[0]).flatMap(([, es]) => es.sort((a, b) => String(a.cid) < String(b.cid) ? -1 : 1));
|
232
|
+
return sorted;
|
233
|
+
}
|
234
|
+
async function findEvents(events, start, end, depth = 0) {
|
235
|
+
const event = await events.get(start);
|
236
|
+
const acc = [{ event, depth }];
|
237
|
+
const { parents } = event.value;
|
238
|
+
if (parents.length === 1 && String(parents[0]) === String(end))
|
239
|
+
return acc;
|
240
|
+
const rest = await Promise.all(parents.map((p) => findEvents(events, p, end, depth + 1)));
|
241
|
+
return acc.concat(...rest);
|
242
|
+
}
|
243
|
+
|
244
|
+
const createBlock = (bytes, cid) => create$1({ cid, bytes, hasher: sha256, codec });
|
245
|
+
const encrypt = async function* ({ get, cids, hasher: hasher2, key, cache, chunker, root }) {
|
246
|
+
const set = /* @__PURE__ */ new Set();
|
247
|
+
let eroot;
|
248
|
+
for (const string of cids) {
|
249
|
+
const cid = CID.parse(string);
|
250
|
+
const unencrypted = await get(cid);
|
251
|
+
const block2 = await encode({ ...await codec.encrypt({ ...unencrypted, key }), codec, hasher: hasher2 });
|
252
|
+
yield block2;
|
253
|
+
set.add(block2.cid.toString());
|
254
|
+
if (unencrypted.cid.equals(root))
|
255
|
+
eroot = block2.cid;
|
256
|
+
}
|
257
|
+
if (!eroot)
|
258
|
+
throw new Error("cids does not include root");
|
259
|
+
const list = [...set].map((s) => CID.parse(s));
|
260
|
+
let last;
|
261
|
+
for await (const node of create({ list, get, cache, chunker, hasher: hasher2, codec: dagcbor })) {
|
262
|
+
const block2 = await node.block;
|
263
|
+
yield block2;
|
264
|
+
last = block2;
|
265
|
+
}
|
266
|
+
const head = [eroot, last.cid];
|
267
|
+
const block = await encode({ value: head, codec: dagcbor, hasher: hasher2 });
|
268
|
+
yield block;
|
269
|
+
};
|
270
|
+
const decrypt = async function* ({ root, get, key, cache, chunker, hasher: hasher2 }) {
|
271
|
+
const o = { ...await get(root), codec: dagcbor, hasher: hasher2 };
|
272
|
+
const decodedRoot = await decode(o);
|
273
|
+
const { value: [eroot, tree] } = decodedRoot;
|
274
|
+
const rootBlock = await get(eroot);
|
275
|
+
const cidset = await load({ cid: tree, get, cache, chunker, codec, hasher: hasher2 });
|
276
|
+
const { result: nodes } = await cidset.getAllEntries();
|
277
|
+
const unwrap = async (eblock) => {
|
278
|
+
const { bytes, cid } = await codec.decrypt({ ...eblock, key }).catch((e) => {
|
279
|
+
console.log("ekey", e);
|
280
|
+
throw new Error("bad key: " + key.toString("hex"));
|
281
|
+
});
|
282
|
+
const block = await createBlock(bytes, cid);
|
283
|
+
return block;
|
284
|
+
};
|
285
|
+
const promises = [];
|
286
|
+
for (const { cid } of nodes) {
|
287
|
+
if (!rootBlock.cid.equals(cid))
|
288
|
+
promises.push(get(cid).then(unwrap));
|
289
|
+
}
|
290
|
+
yield* promises;
|
291
|
+
yield unwrap(rootBlock);
|
292
|
+
};
|
293
|
+
|
294
|
+
function rawSha1(b) {
|
295
|
+
let i = b.byteLength;
|
296
|
+
let bs = 0;
|
297
|
+
let A;
|
298
|
+
let B;
|
299
|
+
let C;
|
300
|
+
let D;
|
301
|
+
let G;
|
302
|
+
const H = Uint32Array.from([A = 1732584193, B = 4023233417, ~A, ~B, 3285377520]);
|
303
|
+
const W = new Uint32Array(80);
|
304
|
+
const nrWords = i / 4 + 2 | 15;
|
305
|
+
const words = new Uint32Array(nrWords + 1);
|
306
|
+
let j;
|
307
|
+
words[nrWords] = i * 8;
|
308
|
+
words[i >> 2] |= 128 << (~i << 3);
|
309
|
+
for (; i--; ) {
|
310
|
+
words[i >> 2] |= b[i] << (~i << 3);
|
311
|
+
}
|
312
|
+
for (A = H.slice(); bs < nrWords; bs += 16, A.set(H)) {
|
313
|
+
for (i = 0; i < 80; A[0] = (G = ((b = A[0]) << 5 | b >>> 27) + A[4] + (W[i] = i < 16 ? words[bs + i] : G << 1 | G >>> 31) + 1518500249, B = A[1], C = A[2], D = A[3], G + ((j = i / 5 >> 2) ? j !== 2 ? (B ^ C ^ D) + (j & 2 ? 1876969533 : 341275144) : (B & C | B & D | C & D) + 882459459 : B & C | ~B & D)), A[1] = b, A[2] = B << 30 | B >>> 2, A[3] = C, A[4] = D, ++i) {
|
314
|
+
G = W[i - 3] ^ W[i - 8] ^ W[i - 14] ^ W[i - 16];
|
315
|
+
}
|
316
|
+
for (i = 5; i; )
|
317
|
+
H[--i] = H[i] + A[i];
|
318
|
+
}
|
319
|
+
return new Uint8Array(H.buffer, H.byteOffset, H.byteLength);
|
320
|
+
}
|
321
|
+
|
322
|
+
const chunker = bf(3);
|
323
|
+
const NO_ENCRYPT = process.env?.NO_ENCRYPT;
|
324
|
+
class Valet {
|
325
|
+
idb = null;
|
326
|
+
name = null;
|
327
|
+
uploadQueue = null;
|
328
|
+
alreadyEnqueued = /* @__PURE__ */ new Set();
|
329
|
+
keyMaterial = null;
|
330
|
+
keyId = "null";
|
331
|
+
/**
|
332
|
+
* Function installed by the database to upload car files
|
333
|
+
* @type {null|function(string, Uint8Array):Promise<void>}
|
334
|
+
*/
|
335
|
+
uploadFunction = null;
|
336
|
+
constructor(name = "default", keyMaterial) {
|
337
|
+
this.name = name;
|
338
|
+
this.setKeyMaterial(keyMaterial);
|
339
|
+
this.uploadQueue = cargoQueue(async (tasks, callback) => {
|
340
|
+
console.log(
|
341
|
+
"queue worker",
|
342
|
+
tasks.length,
|
343
|
+
tasks.reduce((acc, t) => acc + t.value.length, 0)
|
344
|
+
);
|
345
|
+
if (this.uploadFunction) {
|
346
|
+
return await this.withDB(async (db) => {
|
347
|
+
for (const task of tasks) {
|
348
|
+
await this.uploadFunction(task.carCid, task.value);
|
349
|
+
const carMeta = await db.get("cidToCar", task.carCid);
|
350
|
+
delete carMeta.pending;
|
351
|
+
await db.put("cidToCar", carMeta);
|
352
|
+
}
|
353
|
+
});
|
354
|
+
}
|
355
|
+
callback();
|
356
|
+
});
|
357
|
+
this.uploadQueue.drain(async () => {
|
358
|
+
return await this.withDB(async (db) => {
|
359
|
+
const carKeys = (await db.getAllFromIndex("cidToCar", "pending")).map((c) => c.car);
|
360
|
+
for (const carKey of carKeys) {
|
361
|
+
await this.uploadFunction(carKey, await db.get("cars", carKey));
|
362
|
+
const carMeta = await db.get("cidToCar", carKey);
|
363
|
+
delete carMeta.pending;
|
364
|
+
await db.put("cidToCar", carMeta);
|
365
|
+
}
|
366
|
+
});
|
367
|
+
});
|
368
|
+
}
|
369
|
+
getKeyMaterial() {
|
370
|
+
return this.keyMaterial;
|
371
|
+
}
|
372
|
+
setKeyMaterial(km) {
|
373
|
+
if (km && !NO_ENCRYPT) {
|
374
|
+
const hex = Uint8Array.from(Buffer.from(km, "hex"));
|
375
|
+
this.keyMaterial = km;
|
376
|
+
const hash = rawSha1(hex);
|
377
|
+
this.keyId = Buffer.from(hash).toString("hex");
|
378
|
+
} else {
|
379
|
+
this.keyMaterial = null;
|
380
|
+
this.keyId = "null";
|
381
|
+
}
|
382
|
+
}
|
383
|
+
/**
|
384
|
+
* Group the blocks into a car and write it to the valet.
|
385
|
+
* @param {import('./blockstore.js').InnerBlockstore} innerBlockstore
|
386
|
+
* @param {Set<string>} cids
|
387
|
+
* @returns {Promise<void>}
|
388
|
+
* @memberof Valet
|
389
|
+
*/
|
390
|
+
async writeTransaction(innerBlockstore, cids) {
|
391
|
+
if (innerBlockstore.lastCid) {
|
392
|
+
if (this.keyMaterial) {
|
393
|
+
const newCar = await blocksToEncryptedCarBlock(innerBlockstore.lastCid, innerBlockstore, this.keyMaterial);
|
394
|
+
await this.parkCar(newCar.cid.toString(), newCar.bytes, cids);
|
395
|
+
} else {
|
396
|
+
const newCar = await blocksToCarBlock(innerBlockstore.lastCid, innerBlockstore);
|
397
|
+
await this.parkCar(newCar.cid.toString(), newCar.bytes, cids);
|
398
|
+
}
|
399
|
+
}
|
400
|
+
}
|
401
|
+
withDB = async (dbWorkFun) => {
|
402
|
+
if (!this.idb) {
|
403
|
+
this.idb = await openDB(`fp.${this.keyId}.${this.name}.valet`, 2, {
|
404
|
+
upgrade(db, oldVersion, newVersion, transaction) {
|
405
|
+
if (oldVersion < 1) {
|
406
|
+
db.createObjectStore("cars");
|
407
|
+
const cidToCar = db.createObjectStore("cidToCar", { keyPath: "car" });
|
408
|
+
cidToCar.createIndex("cids", "cids", { multiEntry: true });
|
409
|
+
}
|
410
|
+
if (oldVersion < 2) {
|
411
|
+
const cidToCar = transaction.objectStore("cidToCar");
|
412
|
+
cidToCar.createIndex("pending", "pending");
|
413
|
+
}
|
414
|
+
}
|
415
|
+
});
|
416
|
+
}
|
417
|
+
return await dbWorkFun(this.idb);
|
418
|
+
};
|
419
|
+
/**
|
420
|
+
*
|
421
|
+
* @param {string} carCid
|
422
|
+
* @param {*} value
|
423
|
+
*/
|
424
|
+
async parkCar(carCid, value, cids) {
|
425
|
+
await this.withDB(async (db) => {
|
426
|
+
const tx = db.transaction(["cars", "cidToCar"], "readwrite");
|
427
|
+
await tx.objectStore("cars").put(value, carCid);
|
428
|
+
await tx.objectStore("cidToCar").put({ pending: "y", car: carCid, cids: Array.from(cids) });
|
429
|
+
return await tx.done;
|
430
|
+
});
|
431
|
+
if (this.uploadFunction) {
|
432
|
+
if (this.alreadyEnqueued.has(carCid)) {
|
433
|
+
return;
|
434
|
+
}
|
435
|
+
this.uploadQueue.push({ carCid, value });
|
436
|
+
this.alreadyEnqueued.add(carCid);
|
437
|
+
}
|
438
|
+
}
|
439
|
+
remoteBlockFunction = null;
|
440
|
+
async getBlock(dataCID) {
|
441
|
+
return await this.withDB(async (db) => {
|
442
|
+
const tx = db.transaction(["cars", "cidToCar"], "readonly");
|
443
|
+
const indexResp = await tx.objectStore("cidToCar").index("cids").get(dataCID);
|
444
|
+
const carCid = indexResp?.car;
|
445
|
+
if (!carCid) {
|
446
|
+
throw new Error("Missing block: " + dataCID);
|
447
|
+
}
|
448
|
+
const carBytes = await tx.objectStore("cars").get(carCid);
|
449
|
+
const reader = await CarReader.fromBytes(carBytes);
|
450
|
+
if (this.keyMaterial) {
|
451
|
+
const roots = await reader.getRoots();
|
452
|
+
const readerGetWithCodec = async (cid) => {
|
453
|
+
const got = await reader.get(cid);
|
454
|
+
let useCodec = codec;
|
455
|
+
if (cid.toString().indexOf("bafy") === 0) {
|
456
|
+
useCodec = dagcbor;
|
457
|
+
}
|
458
|
+
const decoded = await Block.decode({
|
459
|
+
...got,
|
460
|
+
codec: useCodec,
|
461
|
+
hasher: sha256
|
462
|
+
});
|
463
|
+
return decoded;
|
464
|
+
};
|
465
|
+
const { blocks } = await blocksFromEncryptedCarBlock(roots[0], readerGetWithCodec, this.keyMaterial);
|
466
|
+
const block = blocks.find((b) => b.cid.toString() === dataCID);
|
467
|
+
if (block) {
|
468
|
+
return block.bytes;
|
469
|
+
}
|
470
|
+
} else {
|
471
|
+
const gotBlock = await reader.get(CID$1.parse(dataCID));
|
472
|
+
if (gotBlock) {
|
473
|
+
return gotBlock.bytes;
|
474
|
+
}
|
475
|
+
}
|
476
|
+
});
|
477
|
+
}
|
478
|
+
}
|
479
|
+
const blocksToCarBlock = async (lastCid, blocks) => {
|
480
|
+
let size = 0;
|
481
|
+
const headerSize = CBW.headerLength({ roots: [lastCid] });
|
482
|
+
size += headerSize;
|
483
|
+
if (!Array.isArray(blocks)) {
|
484
|
+
blocks = Array.from(blocks.entries());
|
485
|
+
}
|
486
|
+
for (const { cid, bytes } of blocks) {
|
487
|
+
size += CBW.blockLength({ cid, bytes });
|
488
|
+
}
|
489
|
+
const buffer = new Uint8Array(size);
|
490
|
+
const writer = await CBW.createWriter(buffer, { headerSize });
|
491
|
+
writer.addRoot(lastCid);
|
492
|
+
for (const { cid, bytes } of blocks) {
|
493
|
+
writer.write({ cid, bytes });
|
494
|
+
}
|
495
|
+
await writer.close();
|
496
|
+
return await Block.encode({ value: writer.bytes, hasher: sha256, codec: raw });
|
497
|
+
};
|
498
|
+
const blocksToEncryptedCarBlock = async (innerBlockStoreClockRootCid, blocks, keyMaterial) => {
|
499
|
+
const encryptionKey = Buffer.from(keyMaterial, "hex");
|
500
|
+
const encryptedBlocks = [];
|
501
|
+
const theCids = [];
|
502
|
+
for (const { cid } of blocks.entries()) {
|
503
|
+
theCids.push(cid.toString());
|
504
|
+
}
|
505
|
+
let last;
|
506
|
+
for await (const block of encrypt({
|
507
|
+
cids: theCids,
|
508
|
+
get: async (cid) => blocks.get(cid),
|
509
|
+
// maybe we can just use blocks.get
|
510
|
+
key: encryptionKey,
|
511
|
+
hasher: sha256,
|
512
|
+
chunker,
|
513
|
+
cache: nocache,
|
514
|
+
// codec: dagcbor, // should be crypto?
|
515
|
+
root: innerBlockStoreClockRootCid
|
516
|
+
})) {
|
517
|
+
encryptedBlocks.push(block);
|
518
|
+
last = block;
|
519
|
+
}
|
520
|
+
const encryptedCar = await blocksToCarBlock(last.cid, encryptedBlocks);
|
521
|
+
return encryptedCar;
|
522
|
+
};
|
523
|
+
const memoizeDecryptedCarBlocks = /* @__PURE__ */ new Map();
|
524
|
+
const blocksFromEncryptedCarBlock = async (cid, get, keyMaterial) => {
|
525
|
+
if (memoizeDecryptedCarBlocks.has(cid.toString())) {
|
526
|
+
return memoizeDecryptedCarBlocks.get(cid.toString());
|
527
|
+
} else {
|
528
|
+
const blocksPromise = (async () => {
|
529
|
+
const decryptionKey = Buffer.from(keyMaterial, "hex");
|
530
|
+
const cids = /* @__PURE__ */ new Set();
|
531
|
+
const decryptedBlocks = [];
|
532
|
+
for await (const block of decrypt({
|
533
|
+
root: cid,
|
534
|
+
get,
|
535
|
+
key: decryptionKey,
|
536
|
+
chunker,
|
537
|
+
hasher: sha256,
|
538
|
+
cache: nocache
|
539
|
+
// codec: dagcbor
|
540
|
+
})) {
|
541
|
+
decryptedBlocks.push(block);
|
542
|
+
cids.add(block.cid.toString());
|
543
|
+
}
|
544
|
+
return { blocks: decryptedBlocks, cids };
|
545
|
+
})();
|
546
|
+
memoizeDecryptedCarBlocks.set(cid.toString(), blocksPromise);
|
547
|
+
return blocksPromise;
|
548
|
+
}
|
549
|
+
};
|
550
|
+
|
551
|
+
const husherMap = /* @__PURE__ */ new Map();
|
552
|
+
const husher = (id, workFn) => {
|
553
|
+
if (!husherMap.has(id)) {
|
554
|
+
husherMap.set(
|
555
|
+
id,
|
556
|
+
workFn().finally(() => setTimeout(() => husherMap.delete(id), 100))
|
557
|
+
);
|
558
|
+
}
|
559
|
+
return husherMap.get(id);
|
560
|
+
};
|
561
|
+
class TransactionBlockstore {
|
562
|
+
/** @type {Map<string, Uint8Array>} */
|
563
|
+
committedBlocks = /* @__PURE__ */ new Map();
|
564
|
+
valet = null;
|
565
|
+
instanceId = "blkz." + Math.random().toString(36).substring(2, 4);
|
566
|
+
inflightTransactions = /* @__PURE__ */ new Set();
|
567
|
+
constructor(name, encryptionKey) {
|
568
|
+
this.valet = new Valet(name, encryptionKey);
|
569
|
+
}
|
570
|
+
/**
|
571
|
+
* Get a block from the store.
|
572
|
+
*
|
573
|
+
* @param {import('./link').AnyLink} cid
|
574
|
+
* @returns {Promise<AnyBlock | undefined>}
|
575
|
+
*/
|
576
|
+
async get(cid) {
|
577
|
+
const key = cid.toString();
|
578
|
+
const bytes = await Promise.any([this.transactionsGet(key), this.committedGet(key)]).catch((e) => {
|
579
|
+
return this.networkGet(key);
|
580
|
+
});
|
581
|
+
if (!bytes)
|
582
|
+
throw new Error("Missing block: " + key);
|
583
|
+
return { cid, bytes };
|
584
|
+
}
|
585
|
+
// this iterates over the in-flight transactions
|
586
|
+
// and returns the first matching block it finds
|
587
|
+
async transactionsGet(key) {
|
588
|
+
for (const transaction of this.inflightTransactions) {
|
589
|
+
const got = await transaction.get(key);
|
590
|
+
if (got && got.bytes)
|
591
|
+
return got.bytes;
|
592
|
+
}
|
593
|
+
throw new Error("Missing block: " + key);
|
594
|
+
}
|
595
|
+
async committedGet(key) {
|
596
|
+
const old = this.committedBlocks.get(key);
|
597
|
+
if (old)
|
598
|
+
return old;
|
599
|
+
const got = await this.valet.getBlock(key);
|
600
|
+
this.committedBlocks.set(key, got);
|
601
|
+
return got;
|
602
|
+
}
|
603
|
+
async clearCommittedCache() {
|
604
|
+
this.committedBlocks.clear();
|
605
|
+
}
|
606
|
+
async networkGet(key) {
|
607
|
+
if (this.valet.remoteBlockFunction) {
|
608
|
+
const value = await husher(key, async () => await this.valet.remoteBlockFunction(key));
|
609
|
+
if (value) {
|
610
|
+
doTransaction("networkGot: " + key, this, async (innerBlockstore) => {
|
611
|
+
await innerBlockstore.put(CID.parse(key), value);
|
612
|
+
});
|
613
|
+
return value;
|
614
|
+
}
|
615
|
+
} else {
|
616
|
+
return false;
|
617
|
+
}
|
618
|
+
}
|
619
|
+
/**
|
620
|
+
* Add a block to the store. Usually bound to a transaction by a closure.
|
621
|
+
* It sets the lastCid property to the CID of the block that was put.
|
622
|
+
* This is used by the transaction as the head of the car when written to the valet.
|
623
|
+
* We don't have to worry about which transaction we are when we are here because
|
624
|
+
* we are the transactionBlockstore.
|
625
|
+
*
|
626
|
+
* @param {import('./link').AnyLink} cid
|
627
|
+
* @param {Uint8Array} bytes
|
628
|
+
*/
|
629
|
+
put(cid, bytes) {
|
630
|
+
throw new Error("use a transaction to put");
|
631
|
+
}
|
632
|
+
/**
|
633
|
+
* Iterate over all blocks in the store.
|
634
|
+
*
|
635
|
+
* @yields {AnyBlock}
|
636
|
+
* @returns {AsyncGenerator<AnyBlock>}
|
637
|
+
*/
|
638
|
+
// * entries () {
|
639
|
+
// // needs transaction blocks?
|
640
|
+
// // for (const [str, bytes] of this.blocks) {
|
641
|
+
// // yield { cid: parse(str), bytes }
|
642
|
+
// // }
|
643
|
+
// for (const [str, bytes] of this.committedBlocks) {
|
644
|
+
// yield { cid: parse(str), bytes }
|
645
|
+
// }
|
646
|
+
// }
|
647
|
+
/**
|
648
|
+
* Begin a transaction. Ensures the uncommited blocks are empty at the begining.
|
649
|
+
* Returns the blocks to read and write during the transaction.
|
650
|
+
* @returns {InnerBlockstore}
|
651
|
+
* @memberof TransactionBlockstore
|
652
|
+
*/
|
653
|
+
begin(label = "") {
|
654
|
+
const innerTransactionBlockstore = new InnerBlockstore(label, this);
|
655
|
+
this.inflightTransactions.add(innerTransactionBlockstore);
|
656
|
+
return innerTransactionBlockstore;
|
657
|
+
}
|
658
|
+
/**
|
659
|
+
* Commit the transaction. Writes the blocks to the store.
|
660
|
+
* @returns {Promise<void>}
|
661
|
+
* @memberof TransactionBlockstore
|
662
|
+
*/
|
663
|
+
async commit(innerBlockstore) {
|
664
|
+
await this.doCommit(innerBlockstore);
|
665
|
+
}
|
666
|
+
// first get the transaction blockstore from the map of transaction blockstores
|
667
|
+
// then copy it to committedBlocks
|
668
|
+
// then write the transaction blockstore to a car
|
669
|
+
// then write the car to the valet
|
670
|
+
// then remove the transaction blockstore from the map of transaction blockstores
|
671
|
+
doCommit = async (innerBlockstore) => {
|
672
|
+
const cids = /* @__PURE__ */ new Set();
|
673
|
+
for (const { cid, bytes } of innerBlockstore.entries()) {
|
674
|
+
const stringCid = cid.toString();
|
675
|
+
if (this.committedBlocks.has(stringCid)) ; else {
|
676
|
+
this.committedBlocks.set(stringCid, bytes);
|
677
|
+
cids.add(stringCid);
|
678
|
+
}
|
679
|
+
}
|
680
|
+
if (cids.size > 0) {
|
681
|
+
await this.valet.writeTransaction(innerBlockstore, cids);
|
682
|
+
}
|
683
|
+
};
|
684
|
+
/**
|
685
|
+
* Retire the transaction. Clears the uncommited blocks.
|
686
|
+
* @returns {void}
|
687
|
+
* @memberof TransactionBlockstore
|
688
|
+
*/
|
689
|
+
retire(innerBlockstore) {
|
690
|
+
this.inflightTransactions.delete(innerBlockstore);
|
691
|
+
}
|
692
|
+
}
|
693
|
+
const doTransaction = async (label, blockstore, doFun) => {
|
694
|
+
if (!blockstore.commit)
|
695
|
+
return await doFun(blockstore);
|
696
|
+
const innerBlockstore = blockstore.begin(label);
|
697
|
+
try {
|
698
|
+
const result = await doFun(innerBlockstore);
|
699
|
+
await blockstore.commit(innerBlockstore);
|
700
|
+
return result;
|
701
|
+
} catch (e) {
|
702
|
+
console.error(`Transaction ${label} failed`, e, e.stack);
|
703
|
+
throw e;
|
704
|
+
} finally {
|
705
|
+
blockstore.retire(innerBlockstore);
|
706
|
+
}
|
707
|
+
};
|
708
|
+
class InnerBlockstore {
|
709
|
+
/** @type {Map<string, Uint8Array>} */
|
710
|
+
blocks = /* @__PURE__ */ new Map();
|
711
|
+
lastCid = null;
|
712
|
+
label = "";
|
713
|
+
parentBlockstore = null;
|
714
|
+
constructor(label, parentBlockstore) {
|
715
|
+
this.label = label;
|
716
|
+
this.parentBlockstore = parentBlockstore;
|
717
|
+
}
|
718
|
+
/**
|
719
|
+
* @param {import('./link').AnyLink} cid
|
720
|
+
* @returns {Promise<AnyBlock | undefined>}
|
721
|
+
*/
|
722
|
+
async get(cid) {
|
723
|
+
const key = cid.toString();
|
724
|
+
let bytes = this.blocks.get(key);
|
725
|
+
if (bytes) {
|
726
|
+
return { cid, bytes };
|
727
|
+
}
|
728
|
+
bytes = await this.parentBlockstore.committedGet(key);
|
729
|
+
if (bytes) {
|
730
|
+
return { cid, bytes };
|
731
|
+
}
|
732
|
+
}
|
733
|
+
/**
|
734
|
+
* @param {import('./link').AnyLink} cid
|
735
|
+
* @param {Uint8Array} bytes
|
736
|
+
*/
|
737
|
+
async put(cid, bytes) {
|
738
|
+
this.blocks.set(cid.toString(), bytes);
|
739
|
+
this.lastCid = cid;
|
740
|
+
}
|
741
|
+
*entries() {
|
742
|
+
for (const [str, bytes] of this.blocks) {
|
743
|
+
yield { cid: parse(str), bytes };
|
744
|
+
}
|
745
|
+
}
|
746
|
+
}
|
747
|
+
|
748
|
+
const blockOpts = { cache: nocache, chunker: bf(3), codec: dagcbor, hasher: sha256, compare: simpleCompare };
|
749
|
+
const withLog = async (label, fn) => {
|
750
|
+
const resp = await fn();
|
751
|
+
return resp;
|
752
|
+
};
|
753
|
+
const makeGetBlock = (blocks) => {
|
754
|
+
const getBlockFn = async (address) => {
|
755
|
+
const { cid, bytes } = await withLog(address, () => blocks.get(address));
|
756
|
+
return create$1({ cid, bytes, hasher: sha256, codec: dagcbor });
|
757
|
+
};
|
758
|
+
return {
|
759
|
+
// cids,
|
760
|
+
getBlock: getBlockFn
|
761
|
+
};
|
762
|
+
};
|
763
|
+
async function createAndSaveNewEvent({
|
764
|
+
inBlocks,
|
765
|
+
bigPut,
|
766
|
+
root: root2,
|
767
|
+
event: inEvent,
|
768
|
+
head,
|
769
|
+
additions,
|
770
|
+
removals = []
|
771
|
+
}) {
|
772
|
+
let cids;
|
773
|
+
const { key, value, del } = inEvent;
|
774
|
+
const data = {
|
775
|
+
root: root2 ? {
|
776
|
+
cid: root2.cid,
|
777
|
+
bytes: root2.bytes,
|
778
|
+
// can we remove this?
|
779
|
+
value: root2.value
|
780
|
+
// can we remove this?
|
781
|
+
} : null,
|
782
|
+
key
|
783
|
+
};
|
784
|
+
if (del) {
|
785
|
+
data.value = null;
|
786
|
+
data.type = "del";
|
787
|
+
} else {
|
788
|
+
data.value = value;
|
789
|
+
data.type = "put";
|
790
|
+
}
|
791
|
+
const event = await EventBlock.create(data, head);
|
792
|
+
bigPut(event);
|
793
|
+
({ head, cids } = await advance(inBlocks, head, event.cid));
|
794
|
+
return {
|
795
|
+
root: root2,
|
796
|
+
additions,
|
797
|
+
removals,
|
798
|
+
head,
|
799
|
+
clockCIDs: cids,
|
800
|
+
event
|
801
|
+
};
|
802
|
+
}
|
803
|
+
const makeGetAndPutBlock = (inBlocks) => {
|
804
|
+
const { getBlock, cids } = makeGetBlock(inBlocks);
|
805
|
+
const put2 = inBlocks.put.bind(inBlocks);
|
806
|
+
const bigPut = async (block, additions) => {
|
807
|
+
const { cid, bytes } = block;
|
808
|
+
put2(cid, bytes);
|
809
|
+
if (additions) {
|
810
|
+
additions.set(cid.toString(), block);
|
811
|
+
}
|
812
|
+
};
|
813
|
+
return { getBlock, bigPut, blocks: inBlocks, cids };
|
814
|
+
};
|
815
|
+
const bulkFromEvents = (sorted, event) => {
|
816
|
+
if (event) {
|
817
|
+
const update = { value: { data: { key: event.key } } };
|
818
|
+
if (event.del) {
|
819
|
+
update.value.data.type = "del";
|
820
|
+
} else {
|
821
|
+
update.value.data.type = "put";
|
822
|
+
update.value.data.value = event.value;
|
823
|
+
}
|
824
|
+
sorted.push(update);
|
825
|
+
}
|
826
|
+
const bulk = /* @__PURE__ */ new Map();
|
827
|
+
for (const { value: event2 } of sorted) {
|
828
|
+
const {
|
829
|
+
data: { type, value, key }
|
830
|
+
} = event2;
|
831
|
+
const bulkEvent = type === "put" ? { key, value } : { key, del: true };
|
832
|
+
bulk.set(bulkEvent.key, bulkEvent);
|
833
|
+
}
|
834
|
+
return Array.from(bulk.values());
|
835
|
+
};
|
836
|
+
const prollyRootFromAncestor = async (events, ancestor, getBlock) => {
|
837
|
+
const event = await events.get(ancestor);
|
838
|
+
const { root: root2 } = event.value.data;
|
839
|
+
if (root2) {
|
840
|
+
return load$1({ cid: root2.cid, get: getBlock, ...blockOpts });
|
841
|
+
} else {
|
842
|
+
return null;
|
843
|
+
}
|
844
|
+
};
|
845
|
+
const doProllyBulk = async (inBlocks, head, event) => {
|
846
|
+
const { getBlock, blocks } = makeGetAndPutBlock(inBlocks);
|
847
|
+
let bulkSorted = [];
|
848
|
+
let prollyRootNode = null;
|
849
|
+
if (head.length) {
|
850
|
+
const events = new EventFetcher(blocks);
|
851
|
+
const { ancestor, sorted } = await findCommonAncestorWithSortedEvents(events, head);
|
852
|
+
bulkSorted = sorted;
|
853
|
+
prollyRootNode = await prollyRootFromAncestor(events, ancestor, getBlock);
|
854
|
+
}
|
855
|
+
const bulkOperations = bulkFromEvents(bulkSorted, event);
|
856
|
+
if (!prollyRootNode) {
|
857
|
+
let root2;
|
858
|
+
const newBlocks = [];
|
859
|
+
if (bulkOperations.every((op) => op.del)) {
|
860
|
+
return { root: null, blocks: [] };
|
861
|
+
}
|
862
|
+
for await (const node of create$2({ get: getBlock, list: bulkOperations, ...blockOpts })) {
|
863
|
+
root2 = await node.block;
|
864
|
+
newBlocks.push(root2);
|
865
|
+
}
|
866
|
+
return { root: root2, blocks: newBlocks };
|
867
|
+
} else {
|
868
|
+
return await prollyRootNode.bulk(bulkOperations);
|
869
|
+
}
|
870
|
+
};
|
871
|
+
async function put(inBlocks, head, event, options) {
|
872
|
+
const { bigPut } = makeGetAndPutBlock(inBlocks);
|
873
|
+
if (!head.length) {
|
874
|
+
const additions = /* @__PURE__ */ new Map();
|
875
|
+
const { root: root2, blocks } = await doProllyBulk(inBlocks, head, event);
|
876
|
+
for (const b of blocks) {
|
877
|
+
bigPut(b, additions);
|
878
|
+
}
|
879
|
+
return createAndSaveNewEvent({ inBlocks, bigPut, root: root2, event, head, additions: Array.from(additions.values()) });
|
880
|
+
}
|
881
|
+
const { root: newProllyRootNode, blocks: newBlocks } = await doProllyBulk(inBlocks, head, event);
|
882
|
+
if (!newProllyRootNode) {
|
883
|
+
return createAndSaveNewEvent({
|
884
|
+
inBlocks,
|
885
|
+
bigPut,
|
886
|
+
root: null,
|
887
|
+
event,
|
888
|
+
head,
|
889
|
+
additions: []
|
890
|
+
});
|
891
|
+
} else {
|
892
|
+
const prollyRootBlock = await newProllyRootNode.block;
|
893
|
+
const additions = /* @__PURE__ */ new Map();
|
894
|
+
bigPut(prollyRootBlock, additions);
|
895
|
+
for (const nb of newBlocks) {
|
896
|
+
bigPut(nb, additions);
|
897
|
+
}
|
898
|
+
return createAndSaveNewEvent({
|
899
|
+
inBlocks,
|
900
|
+
bigPut,
|
901
|
+
root: prollyRootBlock,
|
902
|
+
event,
|
903
|
+
head,
|
904
|
+
additions: Array.from(additions.values())
|
905
|
+
/*, todo? Array.from(removals.values()) */
|
906
|
+
});
|
907
|
+
}
|
908
|
+
}
|
909
|
+
async function root(inBlocks, head) {
|
910
|
+
if (!head.length) {
|
911
|
+
throw new Error("no head");
|
912
|
+
}
|
913
|
+
const { root: newProllyRootNode, blocks: newBlocks, cids } = await doProllyBulk(inBlocks, head);
|
914
|
+
await doTransaction("root", inBlocks, async (transactionBlockstore) => {
|
915
|
+
const { bigPut } = makeGetAndPutBlock(transactionBlockstore);
|
916
|
+
for (const nb of newBlocks) {
|
917
|
+
bigPut(nb);
|
918
|
+
}
|
919
|
+
});
|
920
|
+
return { cids, node: newProllyRootNode };
|
921
|
+
}
|
922
|
+
async function eventsSince(blocks, head, since) {
|
923
|
+
if (!head.length) {
|
924
|
+
throw new Error("no head");
|
925
|
+
}
|
926
|
+
const sinceHead = [...since, ...head];
|
927
|
+
const { cids, events: unknownSorted3 } = await findEventsToSync(blocks, sinceHead);
|
928
|
+
return { clockCIDs: cids, result: unknownSorted3.map(({ value: { data } }) => data) };
|
929
|
+
}
|
930
|
+
async function getAll(blocks, head) {
|
931
|
+
if (!head.length) {
|
932
|
+
return { clockCIDs: new CIDCounter(), cids: new CIDCounter(), result: [] };
|
933
|
+
}
|
934
|
+
const { node: prollyRootNode, cids: clockCIDs } = await root(blocks, head);
|
935
|
+
if (!prollyRootNode) {
|
936
|
+
return { clockCIDs, cids: new CIDCounter(), result: [] };
|
937
|
+
}
|
938
|
+
const { result, cids } = await prollyRootNode.getAllEntries();
|
939
|
+
return { clockCIDs, cids, result: result.map(({ key, value }) => ({ key, value })) };
|
940
|
+
}
|
941
|
+
async function get(blocks, head, key) {
|
942
|
+
if (!head.length) {
|
943
|
+
return { cids: new CIDCounter(), result: null };
|
944
|
+
}
|
945
|
+
const { node: prollyRootNode, cids: clockCIDs } = await root(blocks, head);
|
946
|
+
if (!prollyRootNode) {
|
947
|
+
return { clockCIDs, cids: new CIDCounter(), result: null };
|
948
|
+
}
|
949
|
+
const { result, cids } = await prollyRootNode.get(key);
|
950
|
+
return { result, cids, clockCIDs };
|
951
|
+
}
|
952
|
+
async function* vis(blocks, head) {
|
953
|
+
if (!head.length) {
|
954
|
+
return { cids: new CIDCounter(), result: null };
|
955
|
+
}
|
956
|
+
const { node: prollyRootNode, cids } = await root(blocks, head);
|
957
|
+
const lines = [];
|
958
|
+
for await (const line of prollyRootNode.vis()) {
|
959
|
+
yield line;
|
960
|
+
lines.push(line);
|
961
|
+
}
|
962
|
+
return { vis: lines.join("\n"), cids };
|
963
|
+
}
|
964
|
+
async function visMerkleTree(blocks, head) {
|
965
|
+
if (!head.length) {
|
966
|
+
return { cids: new CIDCounter(), result: null };
|
967
|
+
}
|
968
|
+
const { node: prollyRootNode, cids } = await root(blocks, head);
|
969
|
+
const lines = [];
|
970
|
+
for await (const line of prollyRootNode.vis()) {
|
971
|
+
lines.push(line);
|
972
|
+
}
|
973
|
+
return { vis: lines.join("\n"), cids };
|
974
|
+
}
|
975
|
+
async function visMerkleClock(blocks, head) {
|
976
|
+
const lines = [];
|
977
|
+
for await (const line of vis$1(blocks, head)) {
|
978
|
+
lines.push(line);
|
979
|
+
}
|
980
|
+
return { vis: lines.join("\n") };
|
981
|
+
}
|
982
|
+
|
983
|
+
class Fireproof {
|
984
|
+
listeners = /* @__PURE__ */ new Set();
|
985
|
+
/**
|
986
|
+
* @function storage
|
987
|
+
* @memberof Fireproof
|
988
|
+
* Creates a new Fireproof instance with default storage settings
|
989
|
+
* Most apps should use this and not worry about the details.
|
990
|
+
* @static
|
991
|
+
* @returns {Fireproof} - a new Fireproof instance
|
992
|
+
*/
|
993
|
+
static storage = (name = "global") => {
|
994
|
+
const instanceKey = randomBytes(32).toString("hex");
|
995
|
+
return new Fireproof(new TransactionBlockstore(name, instanceKey), [], { name });
|
996
|
+
};
|
997
|
+
constructor(blocks, clock, config, authCtx = {}) {
|
998
|
+
this.name = config?.name || "global";
|
999
|
+
this.instanceId = `fp.${this.name}.${Math.random().toString(36).substring(2, 7)}`;
|
1000
|
+
this.blocks = blocks;
|
1001
|
+
this.clock = clock;
|
1002
|
+
this.config = config;
|
1003
|
+
this.authCtx = authCtx;
|
1004
|
+
this.indexes = /* @__PURE__ */ new Map();
|
1005
|
+
}
|
1006
|
+
/**
|
1007
|
+
* Renders the Fireproof instance as a JSON object.
|
1008
|
+
* @returns {Object} - The JSON representation of the Fireproof instance. Includes clock heads for the database and its indexes.
|
1009
|
+
* @memberof Fireproof
|
1010
|
+
* @instance
|
1011
|
+
*/
|
1012
|
+
toJSON() {
|
1013
|
+
return {
|
1014
|
+
clock: this.clockToJSON(),
|
1015
|
+
name: this.name,
|
1016
|
+
key: this.blocks.valet.getKeyMaterial(),
|
1017
|
+
indexes: [...this.indexes.values()].map((index) => index.toJSON())
|
1018
|
+
};
|
1019
|
+
}
|
1020
|
+
/**
|
1021
|
+
* Returns the Merkle clock heads for the Fireproof instance.
|
1022
|
+
* @returns {string[]} - The Merkle clock heads for the Fireproof instance.
|
1023
|
+
* @memberof Fireproof
|
1024
|
+
* @instance
|
1025
|
+
*/
|
1026
|
+
clockToJSON() {
|
1027
|
+
return this.clock.map((cid) => cid.toString());
|
1028
|
+
}
|
1029
|
+
hydrate({ clock, name, key }) {
|
1030
|
+
this.name = name;
|
1031
|
+
this.clock = clock;
|
1032
|
+
this.blocks.valet.setKeyMaterial(key);
|
1033
|
+
this.indexBlocks = null;
|
1034
|
+
}
|
1035
|
+
/**
|
1036
|
+
* Triggers a notification to all listeners
|
1037
|
+
* of the Fireproof instance so they can repaint UI, etc.
|
1038
|
+
* @returns {Promise<void>}
|
1039
|
+
* @memberof Fireproof
|
1040
|
+
* @instance
|
1041
|
+
*/
|
1042
|
+
async notifyReset() {
|
1043
|
+
await this.notifyListeners({ _reset: true, _clock: this.clockToJSON() });
|
1044
|
+
}
|
1045
|
+
// used be indexes etc to notify database listeners of new availability
|
1046
|
+
async notifyExternal(source = "unknown") {
|
1047
|
+
await this.notifyListeners({ _external: source, _clock: this.clockToJSON() });
|
1048
|
+
}
|
1049
|
+
/**
|
1050
|
+
* Returns the changes made to the Fireproof instance since the specified event.
|
1051
|
+
* @function changesSince
|
1052
|
+
* @param {CID[]} [event] - The clock head to retrieve changes since. If null or undefined, retrieves all changes.
|
1053
|
+
* @returns {Promise<{rows : Object[], clock: CID[], proof: {}}>} An object containing the rows and the head of the instance's clock.
|
1054
|
+
* @memberof Fireproof
|
1055
|
+
* @instance
|
1056
|
+
*/
|
1057
|
+
async changesSince(event) {
|
1058
|
+
let rows, dataCIDs, clockCIDs;
|
1059
|
+
if (event) {
|
1060
|
+
const resp = await eventsSince(this.blocks, this.clock, event);
|
1061
|
+
const docsMap = /* @__PURE__ */ new Map();
|
1062
|
+
for (const { key, type, value } of resp.result.map(decodeEvent)) {
|
1063
|
+
if (type === "del") {
|
1064
|
+
docsMap.set(key, { key, del: true });
|
1065
|
+
} else {
|
1066
|
+
docsMap.set(key, { key, value });
|
1067
|
+
}
|
1068
|
+
}
|
1069
|
+
rows = Array.from(docsMap.values());
|
1070
|
+
clockCIDs = resp.clockCIDs;
|
1071
|
+
} else {
|
1072
|
+
const allResp = await getAll(this.blocks, this.clock);
|
1073
|
+
rows = allResp.result.map(({ key, value }) => decodeEvent({ key, value }));
|
1074
|
+
dataCIDs = allResp.cids;
|
1075
|
+
}
|
1076
|
+
return {
|
1077
|
+
rows,
|
1078
|
+
clock: this.clockToJSON(),
|
1079
|
+
proof: { data: await cidsToProof(dataCIDs), clock: await cidsToProof(clockCIDs) }
|
1080
|
+
};
|
1081
|
+
}
|
1082
|
+
async allDocuments() {
|
1083
|
+
const allResp = await getAll(this.blocks, this.clock);
|
1084
|
+
const rows = allResp.result.map(({ key, value }) => decodeEvent({ key, value })).map(({ key, value }) => ({ key, value: { _id: key, ...value } }));
|
1085
|
+
return {
|
1086
|
+
rows,
|
1087
|
+
clock: this.clockToJSON(),
|
1088
|
+
proof: await cidsToProof(allResp.cids)
|
1089
|
+
};
|
1090
|
+
}
|
1091
|
+
/**
|
1092
|
+
* Runs validation on the specified document using the Fireproof instance's configuration. Throws an error if the document is invalid.
|
1093
|
+
*
|
1094
|
+
* @param {Object} doc - The document to validate.
|
1095
|
+
* @returns {Promise<void>}
|
1096
|
+
* @throws {Error} - Throws an error if the document is invalid.
|
1097
|
+
* @memberof Fireproof
|
1098
|
+
* @instance
|
1099
|
+
*/
|
1100
|
+
async runValidation(doc) {
|
1101
|
+
if (this.config && this.config.validateChange) {
|
1102
|
+
const oldDoc = await this.get(doc._id).then((doc2) => doc2).catch(() => ({}));
|
1103
|
+
this.config.validateChange(doc, oldDoc, this.authCtx);
|
1104
|
+
}
|
1105
|
+
}
|
1106
|
+
/**
|
1107
|
+
* Retrieves the document with the specified ID from the database
|
1108
|
+
*
|
1109
|
+
* @param {string} key - the ID of the document to retrieve
|
1110
|
+
* @param {Object} [opts] - options
|
1111
|
+
* @returns {Promise<{_id: string}>} - the document with the specified ID
|
1112
|
+
* @memberof Fireproof
|
1113
|
+
* @instance
|
1114
|
+
*/
|
1115
|
+
async get(key, opts = {}) {
|
1116
|
+
const clock = opts.clock || this.clock;
|
1117
|
+
const resp = await get(this.blocks, clock, charwise.encode(key));
|
1118
|
+
if (!resp || resp.result === null) {
|
1119
|
+
throw new Error("Not found");
|
1120
|
+
}
|
1121
|
+
const doc = resp.result;
|
1122
|
+
if (opts.mvcc === true) {
|
1123
|
+
doc._clock = this.clockToJSON();
|
1124
|
+
}
|
1125
|
+
doc._proof = {
|
1126
|
+
data: await cidsToProof(resp.cids),
|
1127
|
+
clock: this.clockToJSON()
|
1128
|
+
};
|
1129
|
+
doc._id = key;
|
1130
|
+
return doc;
|
1131
|
+
}
|
1132
|
+
/**
|
1133
|
+
* Adds a new document to the database, or updates an existing document. Returns the ID of the document and the new clock head.
|
1134
|
+
*
|
1135
|
+
* @param {Object} doc - the document to be added
|
1136
|
+
* @param {string} doc._id - the document ID. If not provided, a random ID will be generated.
|
1137
|
+
* @param {CID[]} doc._clock - the document ID. If not provided, a random ID will be generated.
|
1138
|
+
* @param {Proof} doc._proof - CIDs referenced by the update
|
1139
|
+
* @returns {Promise<{ id: string, clock: CID[] }>} - The result of adding the document to the database
|
1140
|
+
* @memberof Fireproof
|
1141
|
+
* @instance
|
1142
|
+
*/
|
1143
|
+
async put({ _id, _proof, ...doc }) {
|
1144
|
+
const id = _id || "f" + Math.random().toString(36).slice(2);
|
1145
|
+
await this.runValidation({ _id: id, ...doc });
|
1146
|
+
return await this.putToProllyTree({ key: id, value: doc }, doc._clock);
|
1147
|
+
}
|
1148
|
+
/**
|
1149
|
+
* Deletes a document from the database
|
1150
|
+
* @param {string | any} docOrId - the document ID
|
1151
|
+
* @returns {Promise<{ id: string, clock: CID[] }>} - The result of deleting the document from the database
|
1152
|
+
* @memberof Fireproof
|
1153
|
+
* @instance
|
1154
|
+
*/
|
1155
|
+
async del(docOrId) {
|
1156
|
+
let id;
|
1157
|
+
let clock = null;
|
1158
|
+
if (docOrId._id) {
|
1159
|
+
id = docOrId._id;
|
1160
|
+
clock = docOrId._clock;
|
1161
|
+
} else {
|
1162
|
+
id = docOrId;
|
1163
|
+
}
|
1164
|
+
await this.runValidation({ _id: id, _deleted: true });
|
1165
|
+
return await this.putToProllyTree({ key: id, del: true }, clock);
|
1166
|
+
}
|
1167
|
+
/**
|
1168
|
+
* Updates the underlying storage with the specified event.
|
1169
|
+
* @private
|
1170
|
+
* @param {{del?: true, key : string, value?: any}} decodedEvent - the event to add
|
1171
|
+
* @returns {Promise<{ proof:{}, id: string, clock: CID[] }>} - The result of adding the event to storage
|
1172
|
+
*/
|
1173
|
+
async putToProllyTree(decodedEvent, clock = null) {
|
1174
|
+
const event = encodeEvent(decodedEvent);
|
1175
|
+
if (clock && JSON.stringify(clock) !== JSON.stringify(this.clockToJSON())) {
|
1176
|
+
const resp = await eventsSince(this.blocks, this.clock, event.value._clock);
|
1177
|
+
const missedChange = resp.result.find(({ key }) => key === event.key);
|
1178
|
+
if (missedChange) {
|
1179
|
+
throw new Error("MVCC conflict, document is changed, please reload the document and try again.");
|
1180
|
+
}
|
1181
|
+
}
|
1182
|
+
const result = await doTransaction(
|
1183
|
+
"putToProllyTree",
|
1184
|
+
this.blocks,
|
1185
|
+
async (blocks) => await put(blocks, this.clock, event)
|
1186
|
+
);
|
1187
|
+
if (!result) {
|
1188
|
+
console.error("failed", event);
|
1189
|
+
throw new Error("failed to put at storage layer");
|
1190
|
+
}
|
1191
|
+
this.clock = result.head;
|
1192
|
+
await this.notifyListeners([decodedEvent]);
|
1193
|
+
return {
|
1194
|
+
id: decodedEvent.key,
|
1195
|
+
clock: this.clockToJSON(),
|
1196
|
+
proof: { data: await cidsToProof(result.cids), clock: await cidsToProof(result.clockCIDs) }
|
1197
|
+
};
|
1198
|
+
}
|
1199
|
+
// /**
|
1200
|
+
// * Advances the clock to the specified event and updates the root CID
|
1201
|
+
// * Will be used by replication
|
1202
|
+
// */
|
1203
|
+
// async advance (event) {
|
1204
|
+
// this.clock = await advance(this.blocks, this.clock, event)
|
1205
|
+
// this.rootCid = await root(this.blocks, this.clock)
|
1206
|
+
// return this.clock
|
1207
|
+
// }
|
1208
|
+
async *vis() {
|
1209
|
+
return yield* vis(this.blocks, this.clock);
|
1210
|
+
}
|
1211
|
+
async visTree() {
|
1212
|
+
return await visMerkleTree(this.blocks, this.clock);
|
1213
|
+
}
|
1214
|
+
async visClock() {
|
1215
|
+
return await visMerkleClock(this.blocks, this.clock);
|
1216
|
+
}
|
1217
|
+
/**
|
1218
|
+
* Registers a Listener to be called when the Fireproof instance's clock is updated.
|
1219
|
+
* Recieves live changes from the database after they are committed.
|
1220
|
+
* @param {Function} listener - The listener to be called when the clock is updated.
|
1221
|
+
* @returns {Function} - A function that can be called to unregister the listener.
|
1222
|
+
* @memberof Fireproof
|
1223
|
+
*/
|
1224
|
+
registerListener(listener) {
|
1225
|
+
this.listeners.add(listener);
|
1226
|
+
return () => {
|
1227
|
+
this.listeners.delete(listener);
|
1228
|
+
};
|
1229
|
+
}
|
1230
|
+
async notifyListeners(changes) {
|
1231
|
+
for (const listener of this.listeners) {
|
1232
|
+
await listener(changes);
|
1233
|
+
}
|
1234
|
+
}
|
1235
|
+
setCarUploader(carUploaderFn) {
|
1236
|
+
this.blocks.valet.uploadFunction = carUploaderFn;
|
1237
|
+
}
|
1238
|
+
setRemoteBlockReader(remoteBlockReaderFn) {
|
1239
|
+
this.blocks.valet.remoteBlockFunction = remoteBlockReaderFn;
|
1240
|
+
}
|
1241
|
+
}
|
1242
|
+
async function cidsToProof(cids) {
|
1243
|
+
if (!cids || !cids.all)
|
1244
|
+
return [];
|
1245
|
+
const all = await cids.all();
|
1246
|
+
return [...all].map((cid) => cid.toString());
|
1247
|
+
}
|
1248
|
+
function decodeEvent(event) {
|
1249
|
+
const decodedKey = charwise.decode(event.key);
|
1250
|
+
return { ...event, key: decodedKey };
|
1251
|
+
}
|
1252
|
+
function encodeEvent(event) {
|
1253
|
+
if (!(event && event.key))
|
1254
|
+
return;
|
1255
|
+
const encodedKey = charwise.encode(event.key);
|
1256
|
+
return { ...event, key: encodedKey };
|
1257
|
+
}
|
1258
|
+
|
1259
|
+
const compare = (a, b) => {
|
1260
|
+
const [aKey, aRef] = a;
|
1261
|
+
const [bKey, bRef] = b;
|
1262
|
+
const comp = simpleCompare(aKey, bKey);
|
1263
|
+
if (comp !== 0)
|
1264
|
+
return comp;
|
1265
|
+
return refCompare(aRef, bRef);
|
1266
|
+
};
|
1267
|
+
const refCompare = (aRef, bRef) => {
|
1268
|
+
if (Number.isNaN(aRef))
|
1269
|
+
return -1;
|
1270
|
+
if (Number.isNaN(bRef))
|
1271
|
+
throw new Error("ref may not be Infinity or NaN");
|
1272
|
+
if (aRef === Infinity)
|
1273
|
+
return 1;
|
1274
|
+
return simpleCompare(aRef, bRef);
|
1275
|
+
};
|
1276
|
+
const dbIndexOpts = { cache: nocache, chunker: bf(3), codec: dagcbor, hasher: sha256, compare };
|
1277
|
+
const idIndexOpts = { cache: nocache, chunker: bf(3), codec: dagcbor, hasher: sha256, compare: simpleCompare };
|
1278
|
+
const makeDoc = ({ key, value }) => ({ _id: key, ...value });
|
1279
|
+
const indexEntriesForChanges = (changes, mapFn) => {
|
1280
|
+
const indexEntries = [];
|
1281
|
+
changes.forEach(({ key, value, del }) => {
|
1282
|
+
if (del || !value)
|
1283
|
+
return;
|
1284
|
+
mapFn(makeDoc({ key, value }), (k, v) => {
|
1285
|
+
if (typeof v === "undefined" || typeof k === "undefined")
|
1286
|
+
return;
|
1287
|
+
indexEntries.push({
|
1288
|
+
key: [charwise.encode(k), key],
|
1289
|
+
value: v
|
1290
|
+
});
|
1291
|
+
});
|
1292
|
+
});
|
1293
|
+
return indexEntries;
|
1294
|
+
};
|
1295
|
+
class DbIndex {
|
1296
|
+
constructor(database, mapFn, clock, opts = {}) {
|
1297
|
+
this.database = database;
|
1298
|
+
if (!database.indexBlocks) {
|
1299
|
+
database.indexBlocks = new TransactionBlockstore(database.name + ".indexes", database.blocks.valet.getKeyMaterial());
|
1300
|
+
}
|
1301
|
+
if (typeof mapFn === "string") {
|
1302
|
+
this.mapFnString = mapFn;
|
1303
|
+
} else {
|
1304
|
+
this.mapFn = mapFn;
|
1305
|
+
this.mapFnString = mapFn.toString();
|
1306
|
+
}
|
1307
|
+
this.name = opts.name || this.makeName();
|
1308
|
+
this.indexById = { root: null, cid: null };
|
1309
|
+
this.indexByKey = { root: null, cid: null };
|
1310
|
+
this.dbHead = null;
|
1311
|
+
if (clock) {
|
1312
|
+
this.indexById.cid = clock.byId;
|
1313
|
+
this.indexByKey.cid = clock.byKey;
|
1314
|
+
this.dbHead = clock.db;
|
1315
|
+
}
|
1316
|
+
this.instanceId = this.database.instanceId + `.DbIndex.${Math.random().toString(36).substring(2, 7)}`;
|
1317
|
+
this.updateIndexPromise = null;
|
1318
|
+
if (!opts.temporary) {
|
1319
|
+
DbIndex.registerWithDatabase(this, this.database);
|
1320
|
+
}
|
1321
|
+
}
|
1322
|
+
makeName() {
|
1323
|
+
const regex = /\(([^,()]+,\s*[^,()]+|\[[^\]]+\],\s*[^,()]+)\)/g;
|
1324
|
+
const matches = Array.from(this.mapFnString.matchAll(regex), (match) => match[1].trim());
|
1325
|
+
return matches[1];
|
1326
|
+
}
|
1327
|
+
static registerWithDatabase(inIndex, database) {
|
1328
|
+
if (!database.indexes.has(inIndex.mapFnString)) {
|
1329
|
+
database.indexes.set(inIndex.mapFnString, inIndex);
|
1330
|
+
} else {
|
1331
|
+
const existingIndex = database.indexes.get(inIndex.mapFnString);
|
1332
|
+
if (existingIndex.mapFn) {
|
1333
|
+
existingIndex.dbHead = inIndex.dbHead;
|
1334
|
+
existingIndex.indexById.cid = inIndex.indexById.cid;
|
1335
|
+
existingIndex.indexByKey.cid = inIndex.indexByKey.cid;
|
1336
|
+
} else {
|
1337
|
+
inIndex.dbHead = existingIndex.dbHead;
|
1338
|
+
inIndex.indexById.cid = existingIndex.indexById.cid;
|
1339
|
+
inIndex.indexByKey.cid = existingIndex.indexByKey.cid;
|
1340
|
+
database.indexes.set(inIndex.mapFnString, inIndex);
|
1341
|
+
}
|
1342
|
+
}
|
1343
|
+
}
|
1344
|
+
toJSON() {
|
1345
|
+
const indexJson = { name: this.name, code: this.mapFnString, clock: { db: null, byId: null, byKey: null } };
|
1346
|
+
indexJson.clock.db = this.dbHead?.map((cid) => cid.toString());
|
1347
|
+
indexJson.clock.byId = this.indexById.cid?.toString();
|
1348
|
+
indexJson.clock.byKey = this.indexByKey.cid?.toString();
|
1349
|
+
return indexJson;
|
1350
|
+
}
|
1351
|
+
static fromJSON(database, { code, clock, name }) {
|
1352
|
+
return new DbIndex(database, code, clock, { name });
|
1353
|
+
}
|
1354
|
+
/**
|
1355
|
+
* JSDoc for Query type.
|
1356
|
+
* @typedef {Object} DbQuery
|
1357
|
+
* @property {string[]} [range] - The range to query.
|
1358
|
+
* @memberof DbIndex
|
1359
|
+
*/
|
1360
|
+
/**
|
1361
|
+
* Query object can have {range}
|
1362
|
+
* @param {DbQuery} query - the query range to use
|
1363
|
+
* @returns {Promise<{proof: {}, rows: Array<{id: string, key: string, value: any}>}>}
|
1364
|
+
* @memberof DbIndex
|
1365
|
+
* @instance
|
1366
|
+
*/
|
1367
|
+
async query(query, update = true) {
|
1368
|
+
update && await this.updateIndex(this.database.indexBlocks);
|
1369
|
+
const response = await doIndexQuery(this.database.indexBlocks, this.indexByKey, query);
|
1370
|
+
return {
|
1371
|
+
proof: { index: await cidsToProof(response.cids) },
|
1372
|
+
rows: response.result.map(({ id, key, row }) => {
|
1373
|
+
return { id, key: charwise.decode(key), value: row };
|
1374
|
+
})
|
1375
|
+
};
|
1376
|
+
}
|
1377
|
+
/**
|
1378
|
+
* Update the DbIndex with the latest changes
|
1379
|
+
* @private
|
1380
|
+
* @returns {Promise<void>}
|
1381
|
+
*/
|
1382
|
+
async updateIndex(blocks) {
|
1383
|
+
if (this.updateIndexPromise)
|
1384
|
+
return this.updateIndexPromise;
|
1385
|
+
this.updateIndexPromise = this.innerUpdateIndex(blocks);
|
1386
|
+
this.updateIndexPromise.finally(() => {
|
1387
|
+
this.updateIndexPromise = null;
|
1388
|
+
});
|
1389
|
+
return this.updateIndexPromise;
|
1390
|
+
}
|
1391
|
+
async innerUpdateIndex(inBlocks) {
|
1392
|
+
const result = await this.database.changesSince(this.dbHead);
|
1393
|
+
if (result.rows.length === 0) {
|
1394
|
+
this.dbHead = result.clock;
|
1395
|
+
return;
|
1396
|
+
}
|
1397
|
+
await doTransaction("updateIndex", inBlocks, async (blocks) => {
|
1398
|
+
let oldIndexEntries = [];
|
1399
|
+
let removeByIdIndexEntries = [];
|
1400
|
+
await loadIndex(blocks, this.indexById, idIndexOpts);
|
1401
|
+
await loadIndex(blocks, this.indexByKey, dbIndexOpts);
|
1402
|
+
if (this.dbHead) {
|
1403
|
+
const oldChangeEntries = await this.indexById.root.getMany(result.rows.map(({ key }) => key));
|
1404
|
+
oldIndexEntries = oldChangeEntries.result.map((key) => ({ key, del: true }));
|
1405
|
+
removeByIdIndexEntries = oldIndexEntries.map(({ key }) => ({ key: key[1], del: true }));
|
1406
|
+
}
|
1407
|
+
if (!this.mapFn) {
|
1408
|
+
throw new Error("No live map function installed for index, cannot update. Make sure your index definition runs before any queries." + (this.mapFnString ? " Your code should match the stored map function source:\n" + this.mapFnString : ""));
|
1409
|
+
}
|
1410
|
+
const indexEntries = indexEntriesForChanges(result.rows, this.mapFn);
|
1411
|
+
const byIdIndexEntries = indexEntries.map(({ key }) => ({ key: key[1], value: key }));
|
1412
|
+
this.indexById = await bulkIndex(blocks, this.indexById, removeByIdIndexEntries.concat(byIdIndexEntries), idIndexOpts);
|
1413
|
+
this.indexByKey = await bulkIndex(blocks, this.indexByKey, oldIndexEntries.concat(indexEntries), dbIndexOpts);
|
1414
|
+
this.dbHead = result.clock;
|
1415
|
+
});
|
1416
|
+
this.database.notifyExternal("dbIndex");
|
1417
|
+
}
|
1418
|
+
}
|
1419
|
+
async function bulkIndex(blocks, inIndex, indexEntries, opts) {
|
1420
|
+
if (!indexEntries.length)
|
1421
|
+
return inIndex;
|
1422
|
+
const putBlock = blocks.put.bind(blocks);
|
1423
|
+
const { getBlock } = makeGetBlock(blocks);
|
1424
|
+
let returnRootBlock;
|
1425
|
+
let returnNode;
|
1426
|
+
if (!inIndex.root) {
|
1427
|
+
const cid = inIndex.cid;
|
1428
|
+
if (!cid) {
|
1429
|
+
for await (const node of await create$3({ get: getBlock, list: indexEntries, ...opts })) {
|
1430
|
+
const block = await node.block;
|
1431
|
+
await putBlock(block.cid, block.bytes);
|
1432
|
+
returnRootBlock = block;
|
1433
|
+
returnNode = node;
|
1434
|
+
}
|
1435
|
+
return { root: returnNode, cid: returnRootBlock.cid };
|
1436
|
+
}
|
1437
|
+
inIndex.root = await load$2({ cid, get: getBlock, ...dbIndexOpts });
|
1438
|
+
}
|
1439
|
+
const { root, blocks: newBlocks } = await inIndex.root.bulk(indexEntries);
|
1440
|
+
returnRootBlock = await root.block;
|
1441
|
+
returnNode = root;
|
1442
|
+
for await (const block of newBlocks) {
|
1443
|
+
await putBlock(block.cid, block.bytes);
|
1444
|
+
}
|
1445
|
+
await putBlock(returnRootBlock.cid, returnRootBlock.bytes);
|
1446
|
+
return { root: returnNode, cid: returnRootBlock.cid };
|
1447
|
+
}
|
1448
|
+
async function loadIndex(blocks, index, indexOpts) {
|
1449
|
+
if (!index.root) {
|
1450
|
+
const cid = index.cid;
|
1451
|
+
if (!cid)
|
1452
|
+
return;
|
1453
|
+
const { getBlock } = makeGetBlock(blocks);
|
1454
|
+
index.root = await load$2({ cid, get: getBlock, ...indexOpts });
|
1455
|
+
}
|
1456
|
+
return index.root;
|
1457
|
+
}
|
1458
|
+
async function applyLimit(results, limit) {
|
1459
|
+
results.result = results.result.slice(0, limit);
|
1460
|
+
return results;
|
1461
|
+
}
|
1462
|
+
async function doIndexQuery(blocks, indexByKey, query = {}) {
|
1463
|
+
await loadIndex(blocks, indexByKey, dbIndexOpts);
|
1464
|
+
if (!indexByKey.root)
|
1465
|
+
return { result: [] };
|
1466
|
+
if (query.range) {
|
1467
|
+
const encodedRange = query.range.map((key) => charwise.encode(key));
|
1468
|
+
return applyLimit(await indexByKey.root.range(...encodedRange), query.limit);
|
1469
|
+
} else if (query.key) {
|
1470
|
+
const encodedKey = charwise.encode(query.key);
|
1471
|
+
return indexByKey.root.get(encodedKey);
|
1472
|
+
} else {
|
1473
|
+
const { result, ...all } = await indexByKey.root.getAllEntries();
|
1474
|
+
return applyLimit({ result: result.map(({ key: [k, id], value }) => ({ key: k, id, row: value })), ...all }, query.limit);
|
1475
|
+
}
|
1476
|
+
}
|
1477
|
+
|
1478
|
+
class Listener {
|
1479
|
+
subcribers = /* @__PURE__ */ new Map();
|
1480
|
+
doStopListening = null;
|
1481
|
+
/**
|
1482
|
+
* @param {import('./fireproof.js').Fireproof} database
|
1483
|
+
* @param {(_: any, emit: any) => void} routingFn
|
1484
|
+
*/
|
1485
|
+
constructor(database, routingFn) {
|
1486
|
+
this.database = database;
|
1487
|
+
this.doStopListening = database.registerListener((changes) => this.onChanges(changes));
|
1488
|
+
this.routingFn = routingFn || function(_, emit) {
|
1489
|
+
emit("*");
|
1490
|
+
};
|
1491
|
+
this.dbHead = null;
|
1492
|
+
}
|
1493
|
+
/**
|
1494
|
+
* Subscribe to a topic emitted by the event function.
|
1495
|
+
* @param {string} topic - The topic to subscribe to.
|
1496
|
+
* @param {Function} subscriber - The function to call when the topic is emitted.
|
1497
|
+
* @returns {Function} A function to unsubscribe from the topic.
|
1498
|
+
* @memberof Listener
|
1499
|
+
* @instance
|
1500
|
+
* @param {any} since
|
1501
|
+
*/
|
1502
|
+
on(topic, subscriber, since) {
|
1503
|
+
const listOfTopicSubscribers = getTopicList(this.subcribers, topic);
|
1504
|
+
listOfTopicSubscribers.push(subscriber);
|
1505
|
+
if (typeof since !== "undefined") {
|
1506
|
+
this.database.changesSince(since).then(({ rows: changes }) => {
|
1507
|
+
const keys = topicsForChanges(changes, this.routingFn).get(topic);
|
1508
|
+
if (keys)
|
1509
|
+
keys.forEach((key) => subscriber(key));
|
1510
|
+
});
|
1511
|
+
}
|
1512
|
+
return () => {
|
1513
|
+
const index = listOfTopicSubscribers.indexOf(subscriber);
|
1514
|
+
if (index > -1)
|
1515
|
+
listOfTopicSubscribers.splice(index, 1);
|
1516
|
+
};
|
1517
|
+
}
|
1518
|
+
/**
|
1519
|
+
* @typedef {import('./db-index').ChangeEvent} ChangeEvent
|
1520
|
+
*/
|
1521
|
+
/**
|
1522
|
+
* @param {ChangeEvent[]} changes
|
1523
|
+
*/
|
1524
|
+
onChanges(changes) {
|
1525
|
+
if (Array.isArray(changes)) {
|
1526
|
+
const seenTopics = topicsForChanges(changes, this.routingFn);
|
1527
|
+
for (const [topic, keys] of seenTopics) {
|
1528
|
+
const listOfTopicSubscribers = getTopicList(this.subcribers, topic);
|
1529
|
+
listOfTopicSubscribers.forEach(
|
1530
|
+
(subscriber) => keys.forEach((key) => subscriber(key))
|
1531
|
+
);
|
1532
|
+
}
|
1533
|
+
} else {
|
1534
|
+
for (const [, listOfTopicSubscribers] of this.subcribers) {
|
1535
|
+
listOfTopicSubscribers.forEach((subscriber) => subscriber(changes));
|
1536
|
+
}
|
1537
|
+
}
|
1538
|
+
}
|
1539
|
+
}
|
1540
|
+
function getTopicList(subscribersMap, name) {
|
1541
|
+
let topicList = subscribersMap.get(name);
|
1542
|
+
if (!topicList) {
|
1543
|
+
topicList = [];
|
1544
|
+
subscribersMap.set(name, topicList);
|
1545
|
+
}
|
1546
|
+
return topicList;
|
1547
|
+
}
|
1548
|
+
const topicsForChanges = (changes, routingFn) => {
|
1549
|
+
const seenTopics = /* @__PURE__ */ new Map();
|
1550
|
+
changes.forEach(({ key, value, del }) => {
|
1551
|
+
if (del || !value)
|
1552
|
+
value = { _deleted: true };
|
1553
|
+
routingFn({ _id: key, ...value }, (t) => {
|
1554
|
+
const topicList = getTopicList(seenTopics, t);
|
1555
|
+
topicList.push(key);
|
1556
|
+
});
|
1557
|
+
});
|
1558
|
+
return seenTopics;
|
1559
|
+
};
|
1560
|
+
|
1561
|
+
const parseCID = (cid) => typeof cid === "string" ? CID.parse(cid) : cid;
|
1562
|
+
class Hydrator {
|
1563
|
+
static fromJSON(json, database) {
|
1564
|
+
database.hydrate({ clock: json.clock.map((c) => parseCID(c)), name: json.name, key: json.key });
|
1565
|
+
if (json.indexes) {
|
1566
|
+
for (const { name, code, clock: { byId, byKey, db } } of json.indexes) {
|
1567
|
+
DbIndex.fromJSON(database, {
|
1568
|
+
clock: {
|
1569
|
+
byId: byId ? parseCID(byId) : null,
|
1570
|
+
byKey: byKey ? parseCID(byKey) : null,
|
1571
|
+
db: db ? db.map((c) => parseCID(c)) : null
|
1572
|
+
},
|
1573
|
+
code,
|
1574
|
+
name
|
1575
|
+
});
|
1576
|
+
}
|
1577
|
+
}
|
1578
|
+
return database;
|
1579
|
+
}
|
1580
|
+
static snapshot(database, clock) {
|
1581
|
+
const definition = database.toJSON();
|
1582
|
+
const withBlocks = new Fireproof(database.blocks);
|
1583
|
+
if (clock) {
|
1584
|
+
definition.clock = clock.map((c) => parseCID(c));
|
1585
|
+
definition.indexes.forEach((index) => {
|
1586
|
+
index.clock.byId = null;
|
1587
|
+
index.clock.byKey = null;
|
1588
|
+
index.clock.db = null;
|
1589
|
+
});
|
1590
|
+
}
|
1591
|
+
const snappedDb = this.fromJSON(definition, withBlocks);
|
1592
|
+
[...database.indexes.values()].forEach((index) => {
|
1593
|
+
snappedDb.indexes.get(index.mapFnString).mapFn = index.mapFn;
|
1594
|
+
});
|
1595
|
+
return snappedDb;
|
1596
|
+
}
|
1597
|
+
static async zoom(database, clock) {
|
1598
|
+
[...database.indexes.values()].forEach((index) => {
|
1599
|
+
index.indexById = { root: null, cid: null };
|
1600
|
+
index.indexByKey = { root: null, cid: null };
|
1601
|
+
index.dbHead = null;
|
1602
|
+
});
|
1603
|
+
database.clock = clock.map((c) => parseCID(c));
|
1604
|
+
await database.notifyReset();
|
1605
|
+
return database;
|
1606
|
+
}
|
1607
|
+
}
|
1608
|
+
|
1609
|
+
export { Fireproof, Hydrator, DbIndex as Index, Listener };
|
1610
|
+
//# sourceMappingURL=index.mjs.map
|