@fireproof/core 0.5.9 → 0.5.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,112 @@
1
+ // @ts-nocheck
2
+ /**
3
+ * A Fireproof database Listener allows you to react to events in the database.
4
+ *
5
+ * @class Listener
6
+ * @classdesc An listener attaches to a Fireproof database and runs a routing function on each change, sending the results to subscribers.
7
+ *
8
+ * @param {import('./database').Database} database - The Database database instance to index.
9
+ * @param {Function} routingFn - The routing function to apply to each entry in the database.
10
+ */
11
+ // import { ChangeEvent } from './db-index'
12
+ /**
13
+ * @deprecated since version 0.7.0
14
+ */
15
+ export class Listener {
16
+ subcribers = new Map();
17
+ doStopListening = null;
18
+ /**
19
+ * @param {import('./database').Database} database
20
+ * @param {(_: any, emit: any) => void} routingFn
21
+ */
22
+ constructor(database, routingFn = function (/** @type {any} */ _, /** @type {(arg0: string) => void} */ emit) {
23
+ emit('*');
24
+ }) {
25
+ this.database = database;
26
+ this.doStopListening = database.registerListener((/** @type {any} */ changes) => this.onChanges(changes));
27
+ /**
28
+ * The map function to apply to each entry in the database.
29
+ * @type {Function}
30
+ */
31
+ this.routingFn = routingFn;
32
+ this.dbHead = null;
33
+ }
34
+ /**
35
+ * Subscribe to a topic emitted by the event function.
36
+ * @param {string} topic - The topic to subscribe to.
37
+ * @param {Function} subscriber - The function to call when the topic is emitted.
38
+ * @returns {Function} A function to unsubscribe from the topic.
39
+ * @memberof Listener
40
+ * @instance
41
+ * @param {any} [since] - clock to flush from on launch, pass null for all
42
+ */
43
+ on(topic, subscriber, since = undefined) {
44
+ const listOfTopicSubscribers = getTopicList(this.subcribers, topic);
45
+ listOfTopicSubscribers.push(subscriber);
46
+ if (typeof since !== 'undefined') {
47
+ this.database.changesSince(since).then(({ rows: changes }) => {
48
+ const keys = topicsForChanges(changes, this.routingFn).get(topic);
49
+ if (keys)
50
+ keys.forEach((/** @type {any} */ key) => subscriber(key));
51
+ });
52
+ }
53
+ return () => {
54
+ const index = listOfTopicSubscribers.indexOf(subscriber);
55
+ if (index > -1)
56
+ listOfTopicSubscribers.splice(index, 1);
57
+ };
58
+ }
59
+ /**
60
+ * @typedef {import('./db-index').ChangeEvent} ChangeEvent
61
+ */
62
+ /**
63
+ * @param {ChangeEvent[]} changes
64
+ */
65
+ onChanges(changes) {
66
+ if (Array.isArray(changes)) {
67
+ const seenTopics = topicsForChanges(changes, this.routingFn);
68
+ for (const [topic, keys] of seenTopics) {
69
+ const listOfTopicSubscribers = getTopicList(this.subcribers, topic);
70
+ listOfTopicSubscribers.forEach((/** @type {(arg0: any) => any} */ subscriber) => keys.forEach((/** @type {any} */ key) => subscriber(key)));
71
+ }
72
+ }
73
+ else {
74
+ // non-arrays go to all subscribers
75
+ for (const [, listOfTopicSubscribers] of this.subcribers) {
76
+ listOfTopicSubscribers.forEach((/** @type {(arg0: any) => any} */ subscriber) => subscriber(changes));
77
+ }
78
+ }
79
+ }
80
+ }
81
+ /**
82
+ * @param {Map<any, any>} subscribersMap
83
+ * @param {string} name
84
+ */
85
+ function getTopicList(subscribersMap, name) {
86
+ let topicList = subscribersMap.get(name);
87
+ if (!topicList) {
88
+ topicList = [];
89
+ subscribersMap.set(name, topicList);
90
+ }
91
+ return topicList;
92
+ }
93
+ /**
94
+ * Transforms a set of changes to events using an emitter function.
95
+ *
96
+ * @param {ChangeEvent[]} changes
97
+ * @param {Function} routingFn
98
+ * @returns {Map<string,string[]>} The topics emmitted by the event function.
99
+ * @private
100
+ */
101
+ const topicsForChanges = (changes, routingFn) => {
102
+ const seenTopics = new Map();
103
+ changes.forEach(({ key, value, del }) => {
104
+ if (del || !value)
105
+ value = { _deleted: true };
106
+ routingFn({ _id: key, ...value }, (/** @type {any} */ t) => {
107
+ const topicList = getTopicList(seenTopics, t);
108
+ topicList.push(key);
109
+ });
110
+ });
111
+ return seenTopics;
112
+ };
@@ -0,0 +1,360 @@
1
+ // @ts-nocheck
2
+ import { advance, EventFetcher, EventBlock, findCommonAncestorWithSortedEvents, findEventsToSync, vis as visClock } from './clock';
3
+ // import { create, load } from '../../../../prolly-trees/src/map'
4
+ // @ts-ignore
5
+ import { create, load } from 'prolly-trees/map';
6
+ // @ts-ignore
7
+ import { nocache as cache } from 'prolly-trees/cache';
8
+ // @ts-ignore
9
+ import { CIDCounter, bf, simpleCompare as compare } from 'prolly-trees/utils';
10
+ import * as codec from '@ipld/dag-cbor';
11
+ import { sha256 as hasher } from 'multiformats/hashes/sha2';
12
+ import { doTransaction } from './blockstore';
13
+ import { create as createBlock } from 'multiformats/block';
14
+ const blockOpts = { cache, chunker: bf(30), codec, hasher, compare };
15
+ /**
16
+ * @typedef {import('./blockstore').TransactionBlockstore} TransactionBlockstore
17
+ */
18
+ // const withLog = async (label, fn) => {
19
+ // const resp = await fn()
20
+ // // console.log('withLog', label, !!resp)
21
+ // return resp
22
+ // }
23
+ // should also return a CIDCounter
24
+ export const makeGetBlock = blocks => {
25
+ // const cids = new CIDCounter() // this could be used for proofs of mutations
26
+ const getBlockFn = async (address) => {
27
+ // const { cid, bytes } = await withLog(address, () => blocks.get(address))
28
+ const { cid, bytes } = await blocks.get(address);
29
+ // cids.add({ address: cid })
30
+ return createBlock({ cid, bytes, hasher, codec });
31
+ };
32
+ return {
33
+ // cids,
34
+ getBlock: getBlockFn
35
+ };
36
+ };
37
+ /**
38
+ *
39
+ * @param {*} param0
40
+ * @returns
41
+ */
42
+ async function createAndSaveNewEvent({ inBlocks, bigPut, root, event: inEvent, head, additions, removals = [] }) {
43
+ let cids;
44
+ const { key, value, del } = inEvent;
45
+ const data = {
46
+ root: root
47
+ ? {
48
+ cid: root.cid,
49
+ bytes: root.bytes,
50
+ value: root.value // can we remove this?
51
+ }
52
+ : null,
53
+ key
54
+ };
55
+ // import('./clock').EventLink<import('./clock').EventData>
56
+ if (del) {
57
+ data.value = null;
58
+ data.type = 'del';
59
+ }
60
+ else {
61
+ data.value = value;
62
+ data.type = 'put';
63
+ }
64
+ /** @type {import('./clock').EventData} */
65
+ // @ts-ignore
66
+ const event = await EventBlock.create(data, head);
67
+ bigPut(event);
68
+ ({ head, cids } = await advance(inBlocks, head, event.cid));
69
+ return {
70
+ root,
71
+ additions,
72
+ removals,
73
+ head,
74
+ clockCIDs: cids,
75
+ event
76
+ };
77
+ }
78
+ const makeGetAndPutBlock = inBlocks => {
79
+ // const mblocks = new MemoryBlockstore()
80
+ // const blocks = new MultiBlockFetcher(mblocks, inBlocks)
81
+ const { getBlock, cids } = makeGetBlock(inBlocks);
82
+ const put = inBlocks.put.bind(inBlocks);
83
+ const bigPut = async (block, additions) => {
84
+ // console.log('bigPut', block.cid.toString())
85
+ const { cid, bytes } = block;
86
+ put(cid, bytes);
87
+ // mblocks.putSync(cid, bytes)
88
+ if (additions) {
89
+ additions.set(cid.toString(), block);
90
+ }
91
+ };
92
+ return { getBlock, bigPut, blocks: inBlocks, cids };
93
+ };
94
+ const bulkFromEvents = (sorted, event) => {
95
+ if (event) {
96
+ const update = { value: { data: { key: event.key } } };
97
+ if (event.del) {
98
+ update.value.data.type = 'del';
99
+ }
100
+ else {
101
+ update.value.data.type = 'put';
102
+ update.value.data.value = event.value;
103
+ }
104
+ sorted.push(update);
105
+ }
106
+ const bulk = new Map();
107
+ for (const { value: event } of sorted) {
108
+ const { data: { type, value, key } } = event;
109
+ const bulkEvent = type === 'put' ? { key, value } : { key, del: true };
110
+ bulk.set(bulkEvent.key, bulkEvent); // last wins
111
+ }
112
+ return Array.from(bulk.values());
113
+ };
114
+ // Get the value of the root from the ancestor event
115
+ /**
116
+ *
117
+ * @param {EventFetcher} events
118
+ * @param {import('./clock').EventLink<import('./clock').EventData>} ancestor
119
+ * @param {*} getBlock
120
+ * @returns
121
+ */
122
+ const prollyRootFromAncestor = async (events, ancestor, getBlock) => {
123
+ // console.log('prollyRootFromAncestor', ancestor)
124
+ const event = await events.get(ancestor);
125
+ const { root } = event.value.data;
126
+ // console.log('prollyRootFromAncestor', root.cid, JSON.stringify(root.value))
127
+ if (root) {
128
+ return load({ cid: root.cid, get: getBlock, ...blockOpts });
129
+ }
130
+ else {
131
+ return null;
132
+ }
133
+ };
134
+ const doProllyBulk = async (inBlocks, head, event, doFull = false) => {
135
+ const { getBlock, blocks } = makeGetAndPutBlock(inBlocks);
136
+ let bulkSorted = [];
137
+ let prollyRootNode = null;
138
+ const events = new EventFetcher(blocks);
139
+ if (head.length) {
140
+ if (!doFull && head.length === 1) {
141
+ prollyRootNode = await prollyRootFromAncestor(events, head[0], getBlock);
142
+ }
143
+ else {
144
+ // Otherwise, we find the common ancestor and update the root and other blocks
145
+ // todo this is returning more events than necessary, lets define the desired semantics from the top down
146
+ // good semantics mean we can cache the results of this call
147
+ const { ancestor, sorted } = await findCommonAncestorWithSortedEvents(events, head, doFull);
148
+ bulkSorted = sorted;
149
+ // console.log('sorted', JSON.stringify(sorted.map(({ value: { data: { key, value } } }) => ({ key, value }))))
150
+ prollyRootNode = await prollyRootFromAncestor(events, ancestor, getBlock);
151
+ // console.log('event', event)
152
+ }
153
+ }
154
+ const bulkOperations = bulkFromEvents(bulkSorted, event);
155
+ // if prolly root node is null, we need to create a new one
156
+ if (!prollyRootNode) {
157
+ let root;
158
+ const newBlocks = [];
159
+ // if all operations are deletes, we can just return an empty root
160
+ if (bulkOperations.every(op => op.del)) {
161
+ return { root: null, blocks: [], clockCIDs: await events.all() };
162
+ }
163
+ for await (const node of create({ get: getBlock, list: bulkOperations, ...blockOpts })) {
164
+ root = await node.block;
165
+ newBlocks.push(root);
166
+ }
167
+ return { root, blocks: newBlocks, clockCIDs: await events.all() };
168
+ }
169
+ else {
170
+ const writeResp = await prollyRootNode.bulk(bulkOperations); // { root: newProllyRootNode, blocks: newBlocks }
171
+ writeResp.clockCIDs = await events.all();
172
+ return writeResp;
173
+ }
174
+ };
175
+ /**
176
+ * Put a value (a CID) for the given key. If the key exists it's value is overwritten.
177
+ *
178
+ * @param {import('./blockstore').Blockstore} inBlocks Bucket block storage.
179
+ * @param {import('./clock').EventLink<import('./clock').EventData>[]} head Merkle clock head.
180
+ * @param {{key: string, value: import('./clock').EventLink<import('./clock').EventData>}} event The key of the value to put.
181
+ * @param {object} [options]
182
+ * @returns {Promise<any>}
183
+ */
184
+ export async function put(inBlocks, head, event, options) {
185
+ const { bigPut } = makeGetAndPutBlock(inBlocks);
186
+ // If the head is empty, we create a new event and return the root and addition blocks
187
+ if (!head.length) {
188
+ const additions = new Map();
189
+ const { root, blocks } = await doProllyBulk(inBlocks, head, event);
190
+ for (const b of blocks) {
191
+ bigPut(b, additions);
192
+ }
193
+ return createAndSaveNewEvent({ inBlocks, bigPut, root, event, head, additions: Array.from(additions.values()) });
194
+ }
195
+ const { root: newProllyRootNode, blocks: newBlocks } = await doProllyBulk(inBlocks, head, event);
196
+ if (!newProllyRootNode) {
197
+ return createAndSaveNewEvent({
198
+ inBlocks,
199
+ bigPut,
200
+ root: null,
201
+ event,
202
+ head,
203
+ additions: []
204
+ });
205
+ }
206
+ else {
207
+ const prollyRootBlock = await newProllyRootNode.block;
208
+ const additions = new Map(); // ; const removals = new Map()
209
+ bigPut(prollyRootBlock, additions);
210
+ for (const nb of newBlocks) {
211
+ bigPut(nb, additions);
212
+ }
213
+ // additions are new blocks
214
+ return createAndSaveNewEvent({
215
+ inBlocks,
216
+ bigPut,
217
+ root: prollyRootBlock,
218
+ event,
219
+ head,
220
+ additions: Array.from(additions.values()) /*, todo? Array.from(removals.values()) */
221
+ });
222
+ }
223
+ }
224
+ /**
225
+ * Determine the effective prolly root given the current merkle clock head.
226
+ *
227
+ * @param {TransactionBlockstore} inBlocks Bucket block storage.
228
+ * @param {import('./clock').EventLink<import('./clock').EventData>[]} head Merkle clock head.
229
+ */
230
+ export async function root(inBlocks, head, doFull = false) {
231
+ if (!head.length) {
232
+ throw new Error('no head');
233
+ }
234
+ // console.log('root', head.map(h => h.toString()))
235
+ const { root: newProllyRootNode, blocks: newBlocks, clockCIDs } = await doProllyBulk(inBlocks, head, null, doFull);
236
+ // todo maybe these should go to a temp blockstore?
237
+ await doTransaction('root', inBlocks, async (transactionBlocks) => {
238
+ const { bigPut } = makeGetAndPutBlock(transactionBlocks);
239
+ for (const nb of newBlocks) {
240
+ bigPut(nb);
241
+ }
242
+ }, false);
243
+ return { clockCIDs, node: newProllyRootNode };
244
+ }
245
+ /**
246
+ * Get the list of events not known by the `since` event
247
+ * @param {TransactionBlockstore} blocks Bucket block storage.
248
+ * @param {import('./clock').EventLink<import('./clock').EventData>[]} head Merkle clock head.
249
+ * @param {import('./clock').EventLink<import('./clock').EventData>} since Event to compare against.
250
+ * @returns {Promise<{clockCIDs: CIDCounter, result: import('./clock').EventData[]}>}
251
+ */
252
+ export async function eventsSince(blocks, head, since) {
253
+ if (!head.length) {
254
+ // throw new Error('no head')
255
+ return { clockCIDs: [], result: [] };
256
+ }
257
+ // @ts-ignore
258
+ const sinceHead = [...since, ...head]; // ?
259
+ const { cids, events: unknownSorted3 } = await findEventsToSync(blocks, sinceHead);
260
+ return { clockCIDs: cids, result: unknownSorted3.map(({ value: { data } }) => data) };
261
+ }
262
+ /**
263
+ *
264
+ * @param {TransactionBlockstore} blocks Bucket block storage.
265
+ * @param {import('./clock').EventLink<import('./clock').EventData>[]} head Merkle clock head.
266
+ *
267
+ * @returns {Promise<{root: any, cids: CIDCounter, clockCIDs: CIDCounter, result: import('./clock').EventData[]}>}
268
+ *
269
+ */
270
+ export async function getAll(blocks, head, rootCache = null, doFull = false) {
271
+ // todo use the root node left around from put, etc
272
+ // move load to a central place
273
+ if (!head.length) {
274
+ return { root: null, clockCIDs: new CIDCounter(), cids: new CIDCounter(), result: [] };
275
+ }
276
+ const { node: prollyRootNode, clockCIDs } = await rootOrCache(blocks, head, rootCache, doFull);
277
+ if (!prollyRootNode) {
278
+ return { root: null, clockCIDs, cids: new CIDCounter(), result: [] };
279
+ }
280
+ const { result, cids } = await prollyRootNode.getAllEntries(); // todo params
281
+ return { root: prollyRootNode, clockCIDs, cids, result: result.map(({ key, value }) => ({ key, value })) };
282
+ }
283
+ async function rootOrCache(blocks, head, rootCache, doFull = false) {
284
+ let node;
285
+ let clockCIDs;
286
+ if (!doFull && rootCache && rootCache.root) {
287
+ // console.log('get root from cache', rootCache)
288
+ node = rootCache.root;
289
+ clockCIDs = rootCache.clockCIDs;
290
+ }
291
+ else {
292
+ // console.log('finding root')
293
+ // const callTag = Math.random().toString(36).substring(7)
294
+ // console.time(callTag + '.root')
295
+ ;
296
+ ({ node, clockCIDs } = await root(blocks, head, doFull));
297
+ // console.timeEnd(callTag + '.root')
298
+ // console.log('found root')
299
+ }
300
+ return { node, clockCIDs };
301
+ }
302
+ /**
303
+ * @param {TransactionBlockstore} blocks Bucket block storage.
304
+ * @param {import('./clock').EventLink<import('./clock').EventData>[]} head Merkle clock head.
305
+ * @param {string} key The key of the value to retrieve.
306
+ */
307
+ export async function get(blocks, head, key, rootCache = null) {
308
+ // instead pass root from db? and always update on change
309
+ if (!head.length) {
310
+ return { cids: new CIDCounter(), result: null };
311
+ }
312
+ const { node: prollyRootNode, clockCIDs } = await rootOrCache(blocks, head, rootCache);
313
+ if (!prollyRootNode) {
314
+ return { clockCIDs, cids: new CIDCounter(), result: null };
315
+ }
316
+ const { result, cids } = await prollyRootNode.get(key);
317
+ return { result, cids, clockCIDs, root: prollyRootNode };
318
+ }
319
+ export async function* vis(blocks, head) {
320
+ if (!head.length) {
321
+ return { cids: new CIDCounter(), result: null };
322
+ }
323
+ const { node: prollyRootNode, cids } = await root(blocks, head);
324
+ const lines = [];
325
+ for await (const line of prollyRootNode.vis()) {
326
+ yield line;
327
+ lines.push(line);
328
+ }
329
+ return { vis: lines.join('\n'), cids };
330
+ }
331
+ export async function visMerkleTree(blocks, head) {
332
+ // if (!head) return
333
+ if (head && !Array.isArray(head)) {
334
+ const getBl = makeGetBlock(blocks);
335
+ const prollyRootNode = await load({
336
+ cid: head,
337
+ get: getBl.getBlock,
338
+ ...blockOpts
339
+ });
340
+ const lines = [];
341
+ for await (const line of prollyRootNode.vis()) {
342
+ lines.push(line);
343
+ }
344
+ return { vis: lines.join('\n'), cids: new CIDCounter() };
345
+ }
346
+ const { node: prollyRootNode, cids } = await root(blocks, head);
347
+ const lines = [];
348
+ for await (const line of prollyRootNode.vis()) {
349
+ lines.push(line);
350
+ }
351
+ return { vis: lines.join('\n'), cids };
352
+ }
353
+ export async function visMerkleClock(blocks, head) {
354
+ const lines = [];
355
+ for await (const line of visClock(blocks, head)) {
356
+ // yield line
357
+ lines.push(line);
358
+ }
359
+ return { vis: lines.join('\n') };
360
+ }
@@ -0,0 +1,73 @@
1
+ // from https://github.com/duzun/sync-sha1/blob/master/rawSha1
2
+ // MIT License Copyright (c) 2020 Dumitru Uzun
3
+ // Permission is hereby granted, free of charge, to any person obtaining a copy
4
+ // of this software and associated documentation files (the "Software"), to deal
5
+ // in the Software without restriction, including without limitation the rights
6
+ // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7
+ // copies of the Software, and to permit persons to whom the Software is
8
+ // furnished to do so, subject to the following conditions:
9
+ // The above copyright notice and this permission notice shall be included in all
10
+ // copies or substantial portions of the Software.
11
+ // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
12
+ // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
13
+ // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
14
+ // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
15
+ // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
16
+ // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
17
+ // SOFTWARE.
18
+ // import {
19
+ // isLittleEndian, switchEndianness32
20
+ // } from 'string-encode'
21
+ /**
22
+ * SHA1 on binary array
23
+ *
24
+ * @param {Uint8Array} b Data to hash
25
+ *
26
+ * @return {Uint8Array} sha1 hash
27
+ */
28
+ export function rawSha1(b) {
29
+ let i = b.byteLength;
30
+ let bs = 0;
31
+ let A;
32
+ let B;
33
+ let C;
34
+ let D;
35
+ let G;
36
+ const H = Uint32Array.from([A = 0x67452301, B = 0xEFCDAB89, ~A, ~B, 0xC3D2E1F0]);
37
+ const W = new Uint32Array(80);
38
+ const nrWords = (i / 4 + 2) | 15;
39
+ const words = new Uint32Array(nrWords + 1);
40
+ let j;
41
+ words[nrWords] = i * 8;
42
+ words[i >> 2] |= 0x80 << (~i << 3);
43
+ for (; i--;) {
44
+ words[i >> 2] |= b[i] << (~i << 3);
45
+ }
46
+ for (A = H.slice(); bs < nrWords; bs += 16, A.set(H)) {
47
+ for (i = 0; i < 80; A[0] = (G = ((b = A[0]) << 5 | b >>> 27) +
48
+ A[4] +
49
+ (W[i] = (i < 16) ? words[bs + i] : G << 1 | G >>> 31) +
50
+ 0x5A827999,
51
+ B = A[1],
52
+ C = A[2],
53
+ D = A[3],
54
+ G + ((j = i / 5 >> 2) // eslint-disable-line no-cond-assign
55
+ ? j !== 2
56
+ ? (B ^ C ^ D) + (j & 2 ? 0x6FE0483D : 0x14577208)
57
+ : (B & C | B & D | C & D) + 0x34994343
58
+ : B & C | ~B & D))
59
+ , A[1] = b
60
+ , A[2] = B << 30 | B >>> 2
61
+ , A[3] = C
62
+ , A[4] = D
63
+ , ++i) {
64
+ G = W[i - 3] ^ W[i - 8] ^ W[i - 14] ^ W[i - 16];
65
+ }
66
+ for (i = 5; i;)
67
+ H[--i] = H[i] + A[i];
68
+ }
69
+ // if (isLittleEndian()) {
70
+ // H = H.map(switchEndianness32)
71
+ // }
72
+ return new Uint8Array(H.buffer, H.byteOffset, H.byteLength);
73
+ }