@fireproof/core 0.3.22 → 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/blockstore.js +242 -0
- package/dist/clock.js +355 -0
- package/dist/crypto.js +59 -0
- package/dist/database.js +308 -0
- package/dist/db-index.js +314 -0
- package/dist/fireproof.js +83 -0
- package/dist/hooks/use-fireproof.js +100 -0
- package/dist/listener.js +110 -0
- package/dist/prolly.js +316 -0
- package/dist/sha1.js +74 -0
- package/dist/src/blockstore.js +242 -0
- package/dist/src/clock.js +355 -0
- package/dist/src/crypto.js +59 -0
- package/dist/src/database.js +312 -0
- package/dist/src/db-index.js +314 -0
- package/dist/src/fireproof.d.ts +319 -0
- package/dist/src/fireproof.js +38976 -0
- package/dist/src/fireproof.js.map +1 -0
- package/dist/src/fireproof.mjs +38972 -0
- package/dist/src/fireproof.mjs.map +1 -0
- package/dist/src/index.d.ts +1 -1
- package/dist/src/index.js +19 -14
- package/dist/src/index.js.map +1 -1
- package/dist/src/index.mjs +19 -14
- package/dist/src/index.mjs.map +1 -1
- package/dist/src/listener.js +108 -0
- package/dist/src/prolly.js +319 -0
- package/dist/src/sha1.js +74 -0
- package/dist/src/utils.js +16 -0
- package/dist/src/valet.js +262 -0
- package/dist/test/block.js +57 -0
- package/dist/test/clock.test.js +556 -0
- package/dist/test/db-index.test.js +231 -0
- package/dist/test/fireproof.test.js +444 -0
- package/dist/test/fulltext.test.js +61 -0
- package/dist/test/helpers.js +39 -0
- package/dist/test/hydrator.test.js +142 -0
- package/dist/test/listener.test.js +103 -0
- package/dist/test/prolly.test.js +162 -0
- package/dist/test/proofs.test.js +45 -0
- package/dist/test/reproduce-fixture-bug.test.js +57 -0
- package/dist/test/valet.test.js +56 -0
- package/dist/utils.js +16 -0
- package/dist/valet.js +262 -0
- package/hooks/use-fireproof.js +38 -63
- package/package.json +13 -14
- package/src/blockstore.js +8 -4
- package/src/database.js +338 -0
- package/src/db-index.js +3 -3
- package/src/fireproof.js +65 -322
- package/src/listener.js +10 -8
- package/src/prolly.js +10 -6
- package/src/utils.js +16 -0
- package/src/hydrator.js +0 -54
- package/src/index.js +0 -6
@@ -0,0 +1,108 @@
|
|
1
|
+
/**
|
2
|
+
* A Fireproof database Listener allows you to react to events in the database.
|
3
|
+
*
|
4
|
+
* @class Listener
|
5
|
+
* @classdesc An listener attaches to a Fireproof database and runs a routing function on each change, sending the results to subscribers.
|
6
|
+
*
|
7
|
+
* @param {import('./database.js').Database} database - The Database database instance to index.
|
8
|
+
* @param {Function} routingFn - The routing function to apply to each entry in the database.
|
9
|
+
*/
|
10
|
+
// import { ChangeEvent } from './db-index'
|
11
|
+
export class Listener {
|
12
|
+
subcribers = new Map();
|
13
|
+
doStopListening = null;
|
14
|
+
/**
|
15
|
+
* @param {import('./database.js').Database} database
|
16
|
+
* @param {(_: any, emit: any) => void} routingFn
|
17
|
+
*/
|
18
|
+
constructor(database, routingFn = function (/** @type {any} */ _, /** @type {(arg0: string) => void} */ emit) {
|
19
|
+
emit('*');
|
20
|
+
}) {
|
21
|
+
this.database = database;
|
22
|
+
this.doStopListening = database.registerListener((/** @type {any} */ changes) => this.onChanges(changes));
|
23
|
+
/**
|
24
|
+
* The map function to apply to each entry in the database.
|
25
|
+
* @type {Function}
|
26
|
+
*/
|
27
|
+
this.routingFn = routingFn;
|
28
|
+
this.dbHead = null;
|
29
|
+
}
|
30
|
+
/**
|
31
|
+
* Subscribe to a topic emitted by the event function.
|
32
|
+
* @param {string} topic - The topic to subscribe to.
|
33
|
+
* @param {Function} subscriber - The function to call when the topic is emitted.
|
34
|
+
* @returns {Function} A function to unsubscribe from the topic.
|
35
|
+
* @memberof Listener
|
36
|
+
* @instance
|
37
|
+
* @param {any} since
|
38
|
+
*/
|
39
|
+
on(topic, subscriber, since) {
|
40
|
+
const listOfTopicSubscribers = getTopicList(this.subcribers, topic);
|
41
|
+
listOfTopicSubscribers.push(subscriber);
|
42
|
+
if (typeof since !== 'undefined') {
|
43
|
+
this.database.changesSince(since).then(({ rows: changes }) => {
|
44
|
+
const keys = topicsForChanges(changes, this.routingFn).get(topic);
|
45
|
+
if (keys)
|
46
|
+
keys.forEach((/** @type {any} */ key) => subscriber(key));
|
47
|
+
});
|
48
|
+
}
|
49
|
+
return () => {
|
50
|
+
const index = listOfTopicSubscribers.indexOf(subscriber);
|
51
|
+
if (index > -1)
|
52
|
+
listOfTopicSubscribers.splice(index, 1);
|
53
|
+
};
|
54
|
+
}
|
55
|
+
/**
|
56
|
+
* @typedef {import('./db-index').ChangeEvent} ChangeEvent
|
57
|
+
*/
|
58
|
+
/**
|
59
|
+
* @param {ChangeEvent[]} changes
|
60
|
+
*/
|
61
|
+
onChanges(changes) {
|
62
|
+
if (Array.isArray(changes)) {
|
63
|
+
const seenTopics = topicsForChanges(changes, this.routingFn);
|
64
|
+
for (const [topic, keys] of seenTopics) {
|
65
|
+
const listOfTopicSubscribers = getTopicList(this.subcribers, topic);
|
66
|
+
listOfTopicSubscribers.forEach((/** @type {(arg0: any) => any} */ subscriber) => keys.forEach((/** @type {any} */ key) => subscriber(key)));
|
67
|
+
}
|
68
|
+
}
|
69
|
+
else {
|
70
|
+
// non-arrays go to all subscribers
|
71
|
+
for (const [, listOfTopicSubscribers] of this.subcribers) {
|
72
|
+
listOfTopicSubscribers.forEach((/** @type {(arg0: any) => any} */ subscriber) => subscriber(changes));
|
73
|
+
}
|
74
|
+
}
|
75
|
+
}
|
76
|
+
}
|
77
|
+
/**
|
78
|
+
* @param {Map<any, any>} subscribersMap
|
79
|
+
* @param {string} name
|
80
|
+
*/
|
81
|
+
function getTopicList(subscribersMap, name) {
|
82
|
+
let topicList = subscribersMap.get(name);
|
83
|
+
if (!topicList) {
|
84
|
+
topicList = [];
|
85
|
+
subscribersMap.set(name, topicList);
|
86
|
+
}
|
87
|
+
return topicList;
|
88
|
+
}
|
89
|
+
/**
|
90
|
+
* Transforms a set of changes to events using an emitter function.
|
91
|
+
*
|
92
|
+
* @param {ChangeEvent[]} changes
|
93
|
+
* @param {Function} routingFn
|
94
|
+
* @returns {Map<string,string[]>} The topics emmitted by the event function.
|
95
|
+
* @private
|
96
|
+
*/
|
97
|
+
const topicsForChanges = (changes, routingFn) => {
|
98
|
+
const seenTopics = new Map();
|
99
|
+
changes.forEach(({ key, value, del }) => {
|
100
|
+
if (del || !value)
|
101
|
+
value = { _deleted: true };
|
102
|
+
routingFn({ _id: key, ...value }, (/** @type {any} */ t) => {
|
103
|
+
const topicList = getTopicList(seenTopics, t);
|
104
|
+
topicList.push(key);
|
105
|
+
});
|
106
|
+
});
|
107
|
+
return seenTopics;
|
108
|
+
};
|
@@ -0,0 +1,319 @@
|
|
1
|
+
import { advance, EventFetcher, EventBlock, findCommonAncestorWithSortedEvents, findEventsToSync, vis as visClock } from './clock.js';
|
2
|
+
// import { create, load } from '../../../../prolly-trees/src/map.js'
|
3
|
+
// @ts-ignore
|
4
|
+
import { create, load } from 'prolly-trees/map';
|
5
|
+
// @ts-ignore
|
6
|
+
import { nocache as cache } from 'prolly-trees/cache';
|
7
|
+
// @ts-ignore
|
8
|
+
import { CIDCounter, bf, simpleCompare as compare } from 'prolly-trees/utils';
|
9
|
+
import * as codec from '@ipld/dag-cbor';
|
10
|
+
import { sha256 as hasher } from 'multiformats/hashes/sha2';
|
11
|
+
import { doTransaction } from './blockstore.js';
|
12
|
+
import { create as createBlock } from 'multiformats/block';
|
13
|
+
const blockOpts = { cache, chunker: bf(3), codec, hasher, compare };
|
14
|
+
/**
|
15
|
+
* @typedef {import('./blockstore.js').TransactionBlockstore} TransactionBlockstore
|
16
|
+
*/
|
17
|
+
const withLog = async (label, fn) => {
|
18
|
+
const resp = await fn();
|
19
|
+
// console.log('withLog', label, !!resp)
|
20
|
+
return resp;
|
21
|
+
};
|
22
|
+
// should also return a CIDCounter
|
23
|
+
export const makeGetBlock = (blocks) => {
|
24
|
+
// const cids = new CIDCounter() // this could be used for proofs of mutations
|
25
|
+
const getBlockFn = async (address) => {
|
26
|
+
const { cid, bytes } = await withLog(address, () => blocks.get(address));
|
27
|
+
// cids.add({ address: cid })
|
28
|
+
return createBlock({ cid, bytes, hasher, codec });
|
29
|
+
};
|
30
|
+
return {
|
31
|
+
// cids,
|
32
|
+
getBlock: getBlockFn
|
33
|
+
};
|
34
|
+
};
|
35
|
+
/**
|
36
|
+
*
|
37
|
+
* @param {*} param0
|
38
|
+
* @returns
|
39
|
+
*/
|
40
|
+
async function createAndSaveNewEvent({ inBlocks, bigPut, root, event: inEvent, head, additions, removals = [] }) {
|
41
|
+
let cids;
|
42
|
+
const { key, value, del } = inEvent;
|
43
|
+
const data = {
|
44
|
+
root: (root
|
45
|
+
? {
|
46
|
+
cid: root.cid,
|
47
|
+
bytes: root.bytes,
|
48
|
+
value: root.value // can we remove this?
|
49
|
+
}
|
50
|
+
: null),
|
51
|
+
key
|
52
|
+
};
|
53
|
+
// import('./clock').EventLink<import('./clock').EventData>
|
54
|
+
if (del) {
|
55
|
+
data.value = null;
|
56
|
+
data.type = 'del';
|
57
|
+
}
|
58
|
+
else {
|
59
|
+
data.value = value;
|
60
|
+
data.type = 'put';
|
61
|
+
}
|
62
|
+
/** @type {import('./clock').EventData} */
|
63
|
+
// @ts-ignore
|
64
|
+
const event = await EventBlock.create(data, head);
|
65
|
+
bigPut(event);
|
66
|
+
({ head, cids } = await advance(inBlocks, head, event.cid));
|
67
|
+
return {
|
68
|
+
root,
|
69
|
+
additions,
|
70
|
+
removals,
|
71
|
+
head,
|
72
|
+
clockCIDs: cids,
|
73
|
+
event
|
74
|
+
};
|
75
|
+
}
|
76
|
+
const makeGetAndPutBlock = (inBlocks) => {
|
77
|
+
// const mblocks = new MemoryBlockstore()
|
78
|
+
// const blocks = new MultiBlockFetcher(mblocks, inBlocks)
|
79
|
+
const { getBlock, cids } = makeGetBlock(inBlocks);
|
80
|
+
const put = inBlocks.put.bind(inBlocks);
|
81
|
+
const bigPut = async (block, additions) => {
|
82
|
+
// console.log('bigPut', block.cid.toString())
|
83
|
+
const { cid, bytes } = block;
|
84
|
+
put(cid, bytes);
|
85
|
+
// mblocks.putSync(cid, bytes)
|
86
|
+
if (additions) {
|
87
|
+
additions.set(cid.toString(), block);
|
88
|
+
}
|
89
|
+
};
|
90
|
+
return { getBlock, bigPut, blocks: inBlocks, cids };
|
91
|
+
};
|
92
|
+
const bulkFromEvents = (sorted, event) => {
|
93
|
+
if (event) {
|
94
|
+
const update = { value: { data: { key: event.key } } };
|
95
|
+
if (event.del) {
|
96
|
+
update.value.data.type = 'del';
|
97
|
+
}
|
98
|
+
else {
|
99
|
+
update.value.data.type = 'put';
|
100
|
+
update.value.data.value = event.value;
|
101
|
+
}
|
102
|
+
sorted.push(update);
|
103
|
+
}
|
104
|
+
const bulk = new Map();
|
105
|
+
for (const { value: event } of sorted) {
|
106
|
+
const { data: { type, value, key } } = event;
|
107
|
+
const bulkEvent = type === 'put' ? { key, value } : { key, del: true };
|
108
|
+
bulk.set(bulkEvent.key, bulkEvent); // last wins
|
109
|
+
}
|
110
|
+
return Array.from(bulk.values());
|
111
|
+
};
|
112
|
+
// Get the value of the root from the ancestor event
|
113
|
+
/**
|
114
|
+
*
|
115
|
+
* @param {EventFetcher} events
|
116
|
+
* @param {import('./clock').EventLink<import('./clock').EventData>} ancestor
|
117
|
+
* @param {*} getBlock
|
118
|
+
* @returns
|
119
|
+
*/
|
120
|
+
const prollyRootFromAncestor = async (events, ancestor, getBlock) => {
|
121
|
+
// console.log('prollyRootFromAncestor', ancestor)
|
122
|
+
const event = await events.get(ancestor);
|
123
|
+
const { root } = event.value.data;
|
124
|
+
// console.log('prollyRootFromAncestor', root.cid, JSON.stringify(root.value))
|
125
|
+
if (root) {
|
126
|
+
return load({ cid: root.cid, get: getBlock, ...blockOpts });
|
127
|
+
}
|
128
|
+
else {
|
129
|
+
return null;
|
130
|
+
}
|
131
|
+
};
|
132
|
+
const doProllyBulk = async (inBlocks, head, event) => {
|
133
|
+
const { getBlock, blocks } = makeGetAndPutBlock(inBlocks);
|
134
|
+
let bulkSorted = [];
|
135
|
+
let prollyRootNode = null;
|
136
|
+
if (head.length) {
|
137
|
+
// Otherwise, we find the common ancestor and update the root and other blocks
|
138
|
+
const events = new EventFetcher(blocks);
|
139
|
+
// todo this is returning more events than necessary, lets define the desired semantics from the top down
|
140
|
+
// good semantics mean we can cache the results of this call
|
141
|
+
const { ancestor, sorted } = await findCommonAncestorWithSortedEvents(events, head);
|
142
|
+
bulkSorted = sorted;
|
143
|
+
// console.log('sorted', JSON.stringify(sorted.map(({ value: { data: { key, value } } }) => ({ key, value }))))
|
144
|
+
prollyRootNode = await prollyRootFromAncestor(events, ancestor, getBlock);
|
145
|
+
// console.log('event', event)
|
146
|
+
}
|
147
|
+
const bulkOperations = bulkFromEvents(bulkSorted, event);
|
148
|
+
// if prolly root node is null, we need to create a new one
|
149
|
+
if (!prollyRootNode) {
|
150
|
+
let root;
|
151
|
+
const newBlocks = [];
|
152
|
+
// if all operations are deletes, we can just return an empty root
|
153
|
+
if (bulkOperations.every((op) => op.del)) {
|
154
|
+
return { root: null, blocks: [] };
|
155
|
+
}
|
156
|
+
for await (const node of create({ get: getBlock, list: bulkOperations, ...blockOpts })) {
|
157
|
+
root = await node.block;
|
158
|
+
newBlocks.push(root);
|
159
|
+
}
|
160
|
+
return { root, blocks: newBlocks };
|
161
|
+
}
|
162
|
+
else {
|
163
|
+
return await prollyRootNode.bulk(bulkOperations); // { root: newProllyRootNode, blocks: newBlocks }
|
164
|
+
}
|
165
|
+
};
|
166
|
+
/**
|
167
|
+
* Put a value (a CID) for the given key. If the key exists it's value is overwritten.
|
168
|
+
*
|
169
|
+
* @param {import('./blockstore.js').Blockstore} inBlocks Bucket block storage.
|
170
|
+
* @param {import('./clock').EventLink<import('./clock').EventData>[]} head Merkle clock head.
|
171
|
+
* @param {{key: string, value: import('./clock').EventLink<import('./clock').EventData>}} event The key of the value to put.
|
172
|
+
* @param {object} [options]
|
173
|
+
* @returns {Promise<any>}
|
174
|
+
*/
|
175
|
+
export async function put(inBlocks, head, event, options) {
|
176
|
+
const { bigPut } = makeGetAndPutBlock(inBlocks);
|
177
|
+
// If the head is empty, we create a new event and return the root and addition blocks
|
178
|
+
if (!head.length) {
|
179
|
+
const additions = new Map();
|
180
|
+
const { root, blocks } = await doProllyBulk(inBlocks, head, event);
|
181
|
+
for (const b of blocks) {
|
182
|
+
bigPut(b, additions);
|
183
|
+
}
|
184
|
+
return createAndSaveNewEvent({ inBlocks, bigPut, root, event, head, additions: Array.from(additions.values()) });
|
185
|
+
}
|
186
|
+
const { root: newProllyRootNode, blocks: newBlocks } = await doProllyBulk(inBlocks, head, event);
|
187
|
+
if (!newProllyRootNode) {
|
188
|
+
return createAndSaveNewEvent({
|
189
|
+
inBlocks,
|
190
|
+
bigPut,
|
191
|
+
root: null,
|
192
|
+
event,
|
193
|
+
head,
|
194
|
+
additions: []
|
195
|
+
});
|
196
|
+
}
|
197
|
+
else {
|
198
|
+
const prollyRootBlock = await newProllyRootNode.block;
|
199
|
+
const additions = new Map(); // ; const removals = new Map()
|
200
|
+
bigPut(prollyRootBlock, additions);
|
201
|
+
for (const nb of newBlocks) {
|
202
|
+
bigPut(nb, additions);
|
203
|
+
}
|
204
|
+
// additions are new blocks
|
205
|
+
return createAndSaveNewEvent({
|
206
|
+
inBlocks,
|
207
|
+
bigPut,
|
208
|
+
root: prollyRootBlock,
|
209
|
+
event,
|
210
|
+
head,
|
211
|
+
additions: Array.from(additions.values()) /*, todo? Array.from(removals.values()) */
|
212
|
+
});
|
213
|
+
}
|
214
|
+
}
|
215
|
+
/**
|
216
|
+
* Determine the effective prolly root given the current merkle clock head.
|
217
|
+
*
|
218
|
+
* @param {TransactionBlockstore} inBlocks Bucket block storage.
|
219
|
+
* @param {import('./clock').EventLink<import('./clock').EventData>[]} head Merkle clock head.
|
220
|
+
*/
|
221
|
+
export async function root(inBlocks, head) {
|
222
|
+
if (!head.length) {
|
223
|
+
throw new Error('no head');
|
224
|
+
}
|
225
|
+
const { root: newProllyRootNode, blocks: newBlocks, cids } = await doProllyBulk(inBlocks, head);
|
226
|
+
// todo maybe these should go to a temp blockstore?
|
227
|
+
await doTransaction('root', inBlocks, async (transactionBlocks) => {
|
228
|
+
const { bigPut } = makeGetAndPutBlock(transactionBlocks);
|
229
|
+
for (const nb of newBlocks) {
|
230
|
+
bigPut(nb);
|
231
|
+
}
|
232
|
+
});
|
233
|
+
return { cids, node: newProllyRootNode };
|
234
|
+
}
|
235
|
+
/**
|
236
|
+
* Get the list of events not known by the `since` event
|
237
|
+
* @param {TransactionBlockstore} blocks Bucket block storage.
|
238
|
+
* @param {import('./clock').EventLink<import('./clock').EventData>[]} head Merkle clock head.
|
239
|
+
* @param {import('./clock').EventLink<import('./clock').EventData>} since Event to compare against.
|
240
|
+
* @returns {Promise<{clockCIDs: CIDCounter, result: import('./clock').EventData[]}>}
|
241
|
+
*/
|
242
|
+
export async function eventsSince(blocks, head, since) {
|
243
|
+
if (!head.length) {
|
244
|
+
throw new Error('no head');
|
245
|
+
}
|
246
|
+
// @ts-ignore
|
247
|
+
const sinceHead = [...since, ...head]; // ?
|
248
|
+
const { cids, events: unknownSorted3 } = await findEventsToSync(blocks, sinceHead);
|
249
|
+
return { clockCIDs: cids, result: unknownSorted3.map(({ value: { data } }) => data) };
|
250
|
+
}
|
251
|
+
/**
|
252
|
+
*
|
253
|
+
* @param {TransactionBlockstore} blocks Bucket block storage.
|
254
|
+
* @param {import('./clock').EventLink<import('./clock').EventData>[]} head Merkle clock head.
|
255
|
+
*
|
256
|
+
* @returns {Promise<{cids: CIDCounter, clockCIDs: CIDCounter, result: import('./clock').EventData[]}>}
|
257
|
+
*
|
258
|
+
*/
|
259
|
+
export async function getAll(blocks, head) {
|
260
|
+
// todo use the root node left around from put, etc
|
261
|
+
// move load to a central place
|
262
|
+
if (!head.length) {
|
263
|
+
return { clockCIDs: new CIDCounter(), cids: new CIDCounter(), result: [] };
|
264
|
+
}
|
265
|
+
const { node: prollyRootNode, cids: clockCIDs } = await root(blocks, head);
|
266
|
+
if (!prollyRootNode) {
|
267
|
+
return { clockCIDs, cids: new CIDCounter(), result: [] };
|
268
|
+
}
|
269
|
+
const { result, cids } = await prollyRootNode.getAllEntries(); // todo params
|
270
|
+
return { clockCIDs, cids, result: result.map(({ key, value }) => ({ key, value })) };
|
271
|
+
}
|
272
|
+
/**
|
273
|
+
* @param {TransactionBlockstore} blocks Bucket block storage.
|
274
|
+
* @param {import('./clock').EventLink<import('./clock').EventData>[]} head Merkle clock head.
|
275
|
+
* @param {string} key The key of the value to retrieve.
|
276
|
+
*/
|
277
|
+
export async function get(blocks, head, key) {
|
278
|
+
// instead pass root from db? and always update on change
|
279
|
+
if (!head.length) {
|
280
|
+
return { cids: new CIDCounter(), result: null };
|
281
|
+
}
|
282
|
+
const { node: prollyRootNode, cids: clockCIDs } = await root(blocks, head);
|
283
|
+
if (!prollyRootNode) {
|
284
|
+
return { clockCIDs, cids: new CIDCounter(), result: null };
|
285
|
+
}
|
286
|
+
const { result, cids } = await prollyRootNode.get(key);
|
287
|
+
return { result, cids, clockCIDs };
|
288
|
+
}
|
289
|
+
export async function* vis(blocks, head) {
|
290
|
+
if (!head.length) {
|
291
|
+
return { cids: new CIDCounter(), result: null };
|
292
|
+
}
|
293
|
+
const { node: prollyRootNode, cids } = await root(blocks, head);
|
294
|
+
const lines = [];
|
295
|
+
for await (const line of prollyRootNode.vis()) {
|
296
|
+
yield line;
|
297
|
+
lines.push(line);
|
298
|
+
}
|
299
|
+
return { vis: lines.join('\n'), cids };
|
300
|
+
}
|
301
|
+
export async function visMerkleTree(blocks, head) {
|
302
|
+
if (!head.length) {
|
303
|
+
return { cids: new CIDCounter(), result: null };
|
304
|
+
}
|
305
|
+
const { node: prollyRootNode, cids } = await root(blocks, head);
|
306
|
+
const lines = [];
|
307
|
+
for await (const line of prollyRootNode.vis()) {
|
308
|
+
lines.push(line);
|
309
|
+
}
|
310
|
+
return { vis: lines.join('\n'), cids };
|
311
|
+
}
|
312
|
+
export async function visMerkleClock(blocks, head) {
|
313
|
+
const lines = [];
|
314
|
+
for await (const line of visClock(blocks, head)) {
|
315
|
+
// yield line
|
316
|
+
lines.push(line);
|
317
|
+
}
|
318
|
+
return { vis: lines.join('\n') };
|
319
|
+
}
|
package/dist/src/sha1.js
ADDED
@@ -0,0 +1,74 @@
|
|
1
|
+
// @ts-nocheck
|
2
|
+
// from https://github.com/duzun/sync-sha1/blob/master/rawSha1.js
|
3
|
+
// MIT License Copyright (c) 2020 Dumitru Uzun
|
4
|
+
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
5
|
+
// of this software and associated documentation files (the "Software"), to deal
|
6
|
+
// in the Software without restriction, including without limitation the rights
|
7
|
+
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
8
|
+
// copies of the Software, and to permit persons to whom the Software is
|
9
|
+
// furnished to do so, subject to the following conditions:
|
10
|
+
// The above copyright notice and this permission notice shall be included in all
|
11
|
+
// copies or substantial portions of the Software.
|
12
|
+
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
13
|
+
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
14
|
+
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
15
|
+
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
16
|
+
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
17
|
+
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
18
|
+
// SOFTWARE.
|
19
|
+
// import {
|
20
|
+
// isLittleEndian, switchEndianness32
|
21
|
+
// } from 'string-encode'
|
22
|
+
/**
|
23
|
+
* SHA1 on binary array
|
24
|
+
*
|
25
|
+
* @param {Uint8Array} b Data to hash
|
26
|
+
*
|
27
|
+
* @return {Uint8Array} sha1 hash
|
28
|
+
*/
|
29
|
+
export function rawSha1(b) {
|
30
|
+
let i = b.byteLength;
|
31
|
+
let bs = 0;
|
32
|
+
let A;
|
33
|
+
let B;
|
34
|
+
let C;
|
35
|
+
let D;
|
36
|
+
let G;
|
37
|
+
const H = Uint32Array.from([A = 0x67452301, B = 0xEFCDAB89, ~A, ~B, 0xC3D2E1F0]);
|
38
|
+
const W = new Uint32Array(80);
|
39
|
+
const nrWords = (i / 4 + 2) | 15;
|
40
|
+
const words = new Uint32Array(nrWords + 1);
|
41
|
+
let j;
|
42
|
+
words[nrWords] = i * 8;
|
43
|
+
words[i >> 2] |= 0x80 << (~i << 3);
|
44
|
+
for (; i--;) {
|
45
|
+
words[i >> 2] |= b[i] << (~i << 3);
|
46
|
+
}
|
47
|
+
for (A = H.slice(); bs < nrWords; bs += 16, A.set(H)) {
|
48
|
+
for (i = 0; i < 80; A[0] = (G = ((b = A[0]) << 5 | b >>> 27) +
|
49
|
+
A[4] +
|
50
|
+
(W[i] = (i < 16) ? words[bs + i] : G << 1 | G >>> 31) +
|
51
|
+
0x5A827999,
|
52
|
+
B = A[1],
|
53
|
+
C = A[2],
|
54
|
+
D = A[3],
|
55
|
+
G + ((j = i / 5 >> 2) // eslint-disable-line no-cond-assign
|
56
|
+
? j !== 2
|
57
|
+
? (B ^ C ^ D) + (j & 2 ? 0x6FE0483D : 0x14577208)
|
58
|
+
: (B & C | B & D | C & D) + 0x34994343
|
59
|
+
: B & C | ~B & D))
|
60
|
+
, A[1] = b
|
61
|
+
, A[2] = B << 30 | B >>> 2
|
62
|
+
, A[3] = C
|
63
|
+
, A[4] = D
|
64
|
+
, ++i) {
|
65
|
+
G = W[i - 3] ^ W[i - 8] ^ W[i - 14] ^ W[i - 16];
|
66
|
+
}
|
67
|
+
for (i = 5; i;)
|
68
|
+
H[--i] = H[i] + A[i];
|
69
|
+
}
|
70
|
+
// if (isLittleEndian()) {
|
71
|
+
// H = H.map(switchEndianness32)
|
72
|
+
// }
|
73
|
+
return new Uint8Array(H.buffer, H.byteOffset, H.byteLength);
|
74
|
+
}
|
@@ -0,0 +1,16 @@
|
|
1
|
+
/* global localStorage */
|
2
|
+
let storageSupported = false;
|
3
|
+
try {
|
4
|
+
storageSupported = window.localStorage && true;
|
5
|
+
}
|
6
|
+
catch (e) { }
|
7
|
+
export function localGet(key) {
|
8
|
+
if (storageSupported) {
|
9
|
+
return localStorage && localStorage.getItem(key);
|
10
|
+
}
|
11
|
+
}
|
12
|
+
export function localSet(key, value) {
|
13
|
+
if (storageSupported) {
|
14
|
+
return localStorage && localStorage.setItem(key, value);
|
15
|
+
}
|
16
|
+
}
|