@fireproof/core 0.8.0 → 0.10.1-dev
Sign up to get free protection for your applications and to get access to all the features.
- package/README.md +5 -184
- package/dist/fireproof.browser.js +18879 -0
- package/dist/fireproof.browser.js.map +7 -0
- package/dist/fireproof.cjs.js +9305 -0
- package/dist/fireproof.cjs.js.map +7 -0
- package/dist/fireproof.esm.js +9295 -0
- package/dist/fireproof.esm.js.map +7 -0
- package/package.json +57 -105
- package/dist/blockstore.js +0 -268
- package/dist/clock.js +0 -459
- package/dist/crypto.js +0 -63
- package/dist/database.js +0 -434
- package/dist/db-index.js +0 -403
- package/dist/encrypted-block.js +0 -48
- package/dist/fireproof.js +0 -84
- package/dist/import.js +0 -29
- package/dist/listener.js +0 -111
- package/dist/loader.js +0 -13
- package/dist/prolly.js +0 -405
- package/dist/remote.js +0 -102
- package/dist/sha1.js +0 -74
- package/dist/src/fireproof.d.ts +0 -472
- package/dist/src/fireproof.js +0 -81191
- package/dist/src/fireproof.js.map +0 -1
- package/dist/src/fireproof.mjs +0 -81186
- package/dist/src/fireproof.mjs.map +0 -1
- package/dist/storage/base.js +0 -426
- package/dist/storage/blocksToEncryptedCarBlock.js +0 -144
- package/dist/storage/browser.js +0 -62
- package/dist/storage/filesystem.js +0 -67
- package/dist/storage/rest.js +0 -57
- package/dist/storage/ucan.js +0 -0
- package/dist/storage/utils.js +0 -144
- package/dist/sync.js +0 -218
- package/dist/utils.js +0 -16
- package/dist/valet.js +0 -102
- package/src/blockstore.js +0 -283
- package/src/clock.js +0 -486
- package/src/crypto.js +0 -70
- package/src/database.js +0 -469
- package/src/db-index.js +0 -426
- package/src/encrypted-block.js +0 -57
- package/src/fireproof.js +0 -98
- package/src/import.js +0 -34
- package/src/link.d.ts +0 -3
- package/src/loader.js +0 -16
- package/src/prolly.js +0 -445
- package/src/remote.js +0 -113
- package/src/sha1.js +0 -83
- package/src/storage/base.js +0 -463
- package/src/storage/browser.js +0 -67
- package/src/storage/filesystem.js +0 -73
- package/src/storage/rest.js +0 -59
- package/src/storage/ucan.js +0 -0
- package/src/storage/utils.js +0 -152
- package/src/sync.js +0 -237
- package/src/valet.js +0 -105
package/src/prolly.js
DELETED
@@ -1,445 +0,0 @@
|
|
1
|
-
import {
|
2
|
-
advance,
|
3
|
-
EventFetcher,
|
4
|
-
EventBlock,
|
5
|
-
findCommonAncestorWithSortedEvents,
|
6
|
-
findEventsToSync,
|
7
|
-
vis as visClock
|
8
|
-
} from './clock.js'
|
9
|
-
// import { create, load } from '../../../../prolly-trees/src/map.js'
|
10
|
-
// @ts-ignore
|
11
|
-
import { create, load } from 'prolly-trees/map'
|
12
|
-
// @ts-ignore
|
13
|
-
import { nocache as cache } from 'prolly-trees/cache'
|
14
|
-
// @ts-ignore
|
15
|
-
import { CIDCounter, bf, simpleCompare as compare } from 'prolly-trees/utils'
|
16
|
-
import * as codec from '@ipld/dag-cbor'
|
17
|
-
import { sha256 as hasher } from 'multiformats/hashes/sha2'
|
18
|
-
// import { blake2b256 as hasher } from '@multiformats/blake2/blake2b'
|
19
|
-
|
20
|
-
import { doTransaction } from './blockstore.js'
|
21
|
-
import { create as createBlock } from 'multiformats/block'
|
22
|
-
const blockOpts = { cache, chunker: bf(30), codec, hasher, compare }
|
23
|
-
|
24
|
-
// const SYNC_ROOT = 'fireproof' // change this if you want to break sync
|
25
|
-
|
26
|
-
/**
|
27
|
-
* @typedef {import('./blockstore.js').TransactionBlockstore} TransactionBlockstore
|
28
|
-
*/
|
29
|
-
|
30
|
-
// const withLog = async (label, fn) => {
|
31
|
-
// const resp = await fn()
|
32
|
-
// // console.log('withLog', label, !!resp)
|
33
|
-
// return resp
|
34
|
-
// }
|
35
|
-
|
36
|
-
// should also return a CIDCounter
|
37
|
-
export const makeGetBlock = blocks => {
|
38
|
-
// const cids = new CIDCounter() // this could be used for proofs of mutations
|
39
|
-
const getBlockFn = async address => {
|
40
|
-
// const { cid, bytes } = await withLog(address, () => blocks.get(address))
|
41
|
-
const { cid, bytes } = await blocks.get(address)
|
42
|
-
// cids.add({ address: cid })
|
43
|
-
return createBlock({ cid, bytes, hasher, codec })
|
44
|
-
}
|
45
|
-
return {
|
46
|
-
// cids,
|
47
|
-
getBlock: getBlockFn
|
48
|
-
}
|
49
|
-
}
|
50
|
-
|
51
|
-
/**
|
52
|
-
*
|
53
|
-
* @param {*} param0
|
54
|
-
* @returns
|
55
|
-
*/
|
56
|
-
async function createAndSaveNewEvent ({ inBlocks, bigPut, root, event: inEvent, head, additions, removals = [] }) {
|
57
|
-
let cids
|
58
|
-
const { key, value, del } = inEvent
|
59
|
-
// console.log('createAndSaveNewEvent', root.constructor.name, root.entryList)
|
60
|
-
// root = await root.block
|
61
|
-
const data = {
|
62
|
-
root: root
|
63
|
-
? (await root.address)
|
64
|
-
: null,
|
65
|
-
key
|
66
|
-
}
|
67
|
-
// import('./clock').EventLink<import('./clock').EventData>
|
68
|
-
if (del) {
|
69
|
-
data.value = null
|
70
|
-
data.type = 'del'
|
71
|
-
} else {
|
72
|
-
data.value = value
|
73
|
-
data.type = 'put'
|
74
|
-
}
|
75
|
-
// console.log('head', head)
|
76
|
-
// if (head.length === 0) {
|
77
|
-
// // create an empty prolly root
|
78
|
-
// let emptyRoot
|
79
|
-
|
80
|
-
// for await (const node of create({ get: getBlock, list: [{ key: '_sync', value: SYNC_ROOT }], ...blockOpts })) {
|
81
|
-
// emptyRoot = await node.block
|
82
|
-
// bigPut(emptyRoot)
|
83
|
-
// }
|
84
|
-
// console.log('emptyRoot', emptyRoot)
|
85
|
-
// const first = await EventBlock.create(
|
86
|
-
// {
|
87
|
-
// root: emptyRoot.cid,
|
88
|
-
// key: null,
|
89
|
-
// value: null,
|
90
|
-
// type: 'del'
|
91
|
-
// },
|
92
|
-
// []
|
93
|
-
// )
|
94
|
-
// bigPut(first)
|
95
|
-
// head = [first.cid]
|
96
|
-
// }
|
97
|
-
|
98
|
-
/** @type {import('./clock').EventData} */
|
99
|
-
// @ts-ignore
|
100
|
-
const event = await EventBlock.create(data, head)
|
101
|
-
bigPut(event)
|
102
|
-
// @ts-ignore
|
103
|
-
;({ head, cids } = await advance(inBlocks, head, event.cid))
|
104
|
-
|
105
|
-
return {
|
106
|
-
root,
|
107
|
-
additions,
|
108
|
-
removals,
|
109
|
-
head,
|
110
|
-
clockCIDs: cids,
|
111
|
-
event
|
112
|
-
}
|
113
|
-
}
|
114
|
-
|
115
|
-
const makeGetAndPutBlock = inBlocks => {
|
116
|
-
// const mblocks = new MemoryBlockstore()
|
117
|
-
// const blocks = new MultiBlockFetcher(mblocks, inBlocks)
|
118
|
-
const { getBlock, cids } = makeGetBlock(inBlocks)
|
119
|
-
// const put = inBlocks.put.bind(inBlocks)
|
120
|
-
const bigPut = async (block, additions) => {
|
121
|
-
// console.log('bigPut', block.cid.toString())
|
122
|
-
const { cid, bytes } = block
|
123
|
-
inBlocks.put(cid, bytes)
|
124
|
-
// mblocks.putSync(cid, bytes)
|
125
|
-
if (additions) {
|
126
|
-
additions.set(cid.toString(), block)
|
127
|
-
}
|
128
|
-
}
|
129
|
-
return { getBlock, bigPut, blocks: inBlocks, cids }
|
130
|
-
}
|
131
|
-
|
132
|
-
const bulkFromEvents = (sorted, event) => {
|
133
|
-
if (event) {
|
134
|
-
const update = { value: { data: { key: event.key } } }
|
135
|
-
if (event.del) {
|
136
|
-
update.value.data.type = 'del'
|
137
|
-
} else {
|
138
|
-
update.value.data.type = 'put'
|
139
|
-
update.value.data.value = event.value
|
140
|
-
}
|
141
|
-
sorted.push(update)
|
142
|
-
}
|
143
|
-
const bulk = new Map()
|
144
|
-
for (const { value: event } of sorted) {
|
145
|
-
const {
|
146
|
-
data: { type, value, key }
|
147
|
-
} = event
|
148
|
-
if (!key) {
|
149
|
-
throw new Error('key is required')
|
150
|
-
}
|
151
|
-
const bulkEvent = type === 'put' ? { key, value } : { key, del: true }
|
152
|
-
bulk.set(bulkEvent.key, bulkEvent) // last wins
|
153
|
-
}
|
154
|
-
return Array.from(bulk.values())
|
155
|
-
}
|
156
|
-
|
157
|
-
// Get the value of the root from the ancestor event
|
158
|
-
/**
|
159
|
-
*
|
160
|
-
* @param {EventFetcher} events
|
161
|
-
* @param {import('./clock').EventLink<import('./clock').EventData>} ancestor
|
162
|
-
* @param {*} getBlock
|
163
|
-
* @returns
|
164
|
-
*/
|
165
|
-
const prollyRootFromAncestor = async (events, ancestor, getBlock) => {
|
166
|
-
// console.log('prollyRootFromAncestor', ancestor)
|
167
|
-
const event = await events.get(ancestor)
|
168
|
-
const { root } = event.value.data
|
169
|
-
if (root) {
|
170
|
-
return load({ cid: root, get: getBlock, ...blockOpts })
|
171
|
-
} else {
|
172
|
-
// console.log('no root', root) // false means no common ancestor. null means empty database.
|
173
|
-
return root
|
174
|
-
}
|
175
|
-
}
|
176
|
-
|
177
|
-
const doProllyBulk = async (inBlocks, head, event, doFull = false) => {
|
178
|
-
const { getBlock, blocks } = makeGetAndPutBlock(inBlocks) // this is doubled with eventfetcher
|
179
|
-
let bulkSorted = []
|
180
|
-
let prollyRootNode = null
|
181
|
-
const events = new EventFetcher(blocks)
|
182
|
-
if (head.length) {
|
183
|
-
if (!doFull && head.length === 1) {
|
184
|
-
prollyRootNode = await prollyRootFromAncestor(events, head[0], getBlock)
|
185
|
-
} else {
|
186
|
-
// Otherwise, we find the common ancestor and update the root and other blocks
|
187
|
-
// todo this is returning more events than necessary, lets define the desired semantics from the top down
|
188
|
-
// good semantics mean we can cache the results of this call
|
189
|
-
// const {cids, events : bulkSorted } = await findEventsToSync(blocks, head)
|
190
|
-
const { ancestor, sorted } = await findCommonAncestorWithSortedEvents(events, head, doFull)
|
191
|
-
|
192
|
-
bulkSorted = sorted
|
193
|
-
// console.log('sorted', !!ancestor, JSON.stringify(sorted.map(({ value: { data: { key, value } } }) => ({ key, value }))))
|
194
|
-
if (ancestor) {
|
195
|
-
prollyRootNode = await prollyRootFromAncestor(events, ancestor, getBlock)
|
196
|
-
// if (!prollyRootNode) {
|
197
|
-
// prollyRootNode = await bigMerge(events, head, getBlock)
|
198
|
-
// // throw new Error('no common ancestor')
|
199
|
-
// }
|
200
|
-
}
|
201
|
-
// console.log('event', event)
|
202
|
-
}
|
203
|
-
}
|
204
|
-
|
205
|
-
const bulkOperations = bulkFromEvents(bulkSorted, event)
|
206
|
-
|
207
|
-
// if prolly root node is null, we need to create a new one
|
208
|
-
if (!prollyRootNode) {
|
209
|
-
// console.log('make new root', bulkOperations.length)
|
210
|
-
let root
|
211
|
-
// let rootNode
|
212
|
-
const newBlocks = []
|
213
|
-
// if all operations are deletes, we can just return an empty root
|
214
|
-
if (bulkOperations.every(op => op.del)) {
|
215
|
-
return { root: null, blocks: [], clockCIDs: await events.all() }
|
216
|
-
}
|
217
|
-
for await (const node of create({ get: getBlock, list: bulkOperations, ...blockOpts })) {
|
218
|
-
// root = await node.block
|
219
|
-
root = node
|
220
|
-
newBlocks.push(await node.block)
|
221
|
-
}
|
222
|
-
// throw new Error('not root time')
|
223
|
-
// root.isThisOne = 'yes'
|
224
|
-
// console.log('made new root', root.constructor.name, root.block.cid.toString())
|
225
|
-
|
226
|
-
return { root, blocks: newBlocks, clockCIDs: await events.all() }
|
227
|
-
} else {
|
228
|
-
const writeResp = await prollyRootNode.bulk(bulkOperations) // { root: newProllyRootNode, blocks: newBlocks }
|
229
|
-
writeResp.clockCIDs = await events.all()
|
230
|
-
return writeResp
|
231
|
-
}
|
232
|
-
}
|
233
|
-
|
234
|
-
/**
|
235
|
-
* Put a value (a CID) for the given key. If the key exists it's value is overwritten.
|
236
|
-
*
|
237
|
-
* @param {import('./blockstore.js').Blockstore} inBlocks Bucket block storage.
|
238
|
-
* @param {import('./clock').EventLink<import('./clock').EventData>[]} head Merkle clock head.
|
239
|
-
* @param {{key: string, value: import('./clock').EventLink<import('./clock').EventData>}} event The key of the value to put.
|
240
|
-
* @param {object} [options]
|
241
|
-
* @returns {Promise<any>}
|
242
|
-
*/
|
243
|
-
export async function put (inBlocks, head, event, options) {
|
244
|
-
const { bigPut } = makeGetAndPutBlock(inBlocks)
|
245
|
-
// console.log('major put')
|
246
|
-
// If the head is empty, we create a new event and return the root and addition blocks
|
247
|
-
if (!head.length) {
|
248
|
-
const additions = new Map()
|
249
|
-
const { root, blocks } = await doProllyBulk(inBlocks, head, event)
|
250
|
-
for (const b of blocks) {
|
251
|
-
bigPut(b, additions)
|
252
|
-
}
|
253
|
-
return createAndSaveNewEvent({ inBlocks, bigPut, root, event, head, additions: Array.from(additions.values()) })
|
254
|
-
}
|
255
|
-
const { root: newProllyRootNode, blocks: newBlocks } = await doProllyBulk(inBlocks, head, event)
|
256
|
-
|
257
|
-
if (!newProllyRootNode) {
|
258
|
-
return createAndSaveNewEvent({
|
259
|
-
inBlocks,
|
260
|
-
bigPut,
|
261
|
-
root: null,
|
262
|
-
event,
|
263
|
-
head,
|
264
|
-
additions: []
|
265
|
-
})
|
266
|
-
} else {
|
267
|
-
const prollyRootBlock = await newProllyRootNode.block
|
268
|
-
const additions = new Map() // ; const removals = new Map()
|
269
|
-
bigPut(prollyRootBlock, additions)
|
270
|
-
for (const nb of newBlocks) {
|
271
|
-
bigPut(nb, additions)
|
272
|
-
}
|
273
|
-
// additions are new blocks
|
274
|
-
return createAndSaveNewEvent({
|
275
|
-
inBlocks,
|
276
|
-
bigPut,
|
277
|
-
root: newProllyRootNode, // Block
|
278
|
-
event,
|
279
|
-
head,
|
280
|
-
additions: Array.from(additions.values()) /*, todo? Array.from(removals.values()) */
|
281
|
-
})
|
282
|
-
}
|
283
|
-
}
|
284
|
-
|
285
|
-
/**
|
286
|
-
* Determine the effective prolly root given the current merkle clock head.
|
287
|
-
*
|
288
|
-
* @param {TransactionBlockstore} inBlocks Bucket block storage.
|
289
|
-
* @param {import('./clock').EventLink<import('./clock').EventData>[]} head Merkle clock head.
|
290
|
-
*/
|
291
|
-
export async function root (inBlocks, head, doFull = false) {
|
292
|
-
if (!head.length) {
|
293
|
-
throw new Error('no head')
|
294
|
-
}
|
295
|
-
// console.log('root', head.map(h => h.toString()))
|
296
|
-
// todo maybe these should go to a temp blockstore?
|
297
|
-
return await doTransaction(
|
298
|
-
'root',
|
299
|
-
inBlocks,
|
300
|
-
async transactionBlocks => {
|
301
|
-
const { bigPut } = makeGetAndPutBlock(transactionBlocks)
|
302
|
-
const { root: newProllyRootNode, blocks: newBlocks, clockCIDs } = await doProllyBulk(inBlocks, head, null, doFull)
|
303
|
-
//
|
304
|
-
// const rootBlock = await newProllyRootNode.block
|
305
|
-
// bigPut(rootBlock)
|
306
|
-
for (const nb of newBlocks) {
|
307
|
-
bigPut(nb)
|
308
|
-
}
|
309
|
-
// console.log('root root', newProllyRootNode.constructor.name, newProllyRootNode)
|
310
|
-
return { clockCIDs, node: newProllyRootNode, head }
|
311
|
-
},
|
312
|
-
false
|
313
|
-
)
|
314
|
-
// return { clockCIDs, node: newProllyRootNode }
|
315
|
-
}
|
316
|
-
|
317
|
-
/**
|
318
|
-
* Get the list of events not known by the `since` event
|
319
|
-
* @param {TransactionBlockstore} blocks Bucket block storage.
|
320
|
-
* @param {import('./clock').EventLink<import('./clock').EventData>[]} head Merkle clock head.
|
321
|
-
* @param {import('./clock').EventLink<import('./clock').EventData>} since Event to compare against.
|
322
|
-
* @returns {Promise<{clockCIDs: CIDCounter, result: import('./clock').EventData[]}>}
|
323
|
-
*/
|
324
|
-
export async function eventsSince (blocks, head, since) {
|
325
|
-
if (!head.length) {
|
326
|
-
// throw new Error('no head')
|
327
|
-
return { clockCIDs: [], result: [] }
|
328
|
-
}
|
329
|
-
// @ts-ignore
|
330
|
-
const sinceHead = [...since, ...head].map(h => h.toString()) // ?
|
331
|
-
// console.log('eventsSince', sinceHead.map(h => h.toString()))
|
332
|
-
const { cids, events: unknownSorted3 } = await findEventsToSync(blocks, sinceHead)
|
333
|
-
return { clockCIDs: cids, result: unknownSorted3.map(({ value: { data } }) => data) }
|
334
|
-
}
|
335
|
-
|
336
|
-
/**
|
337
|
-
*
|
338
|
-
* @param {TransactionBlockstore} blocks Bucket block storage.
|
339
|
-
* @param {import('./clock').EventLink<import('./clock').EventData>[]} head Merkle clock head.
|
340
|
-
*
|
341
|
-
* @returns {Promise<{root: any, cids: CIDCounter, clockCIDs: CIDCounter, result: import('./clock').EventData[]}>}
|
342
|
-
*
|
343
|
-
*/
|
344
|
-
export async function getAll (blocks, head, rootCache = null, doFull = false) {
|
345
|
-
if (!head.length) {
|
346
|
-
return { root: null, clockCIDs: new CIDCounter(), cids: new CIDCounter(), result: [] }
|
347
|
-
}
|
348
|
-
const { node: prollyRootNode, clockCIDs } = await rootOrCache(blocks, head, rootCache, doFull)
|
349
|
-
|
350
|
-
if (!prollyRootNode) {
|
351
|
-
return { root: null, clockCIDs, cids: new CIDCounter(), result: [] }
|
352
|
-
}
|
353
|
-
const { result, cids } = await prollyRootNode.getAllEntries() // todo params
|
354
|
-
return { root: prollyRootNode, clockCIDs, cids, result: result.map(({ key, value }) => ({ key, value })) }
|
355
|
-
}
|
356
|
-
|
357
|
-
async function rootOrCache (blocks, head, rootCache, doFull = false) {
|
358
|
-
let node
|
359
|
-
let clockCIDs
|
360
|
-
if (!doFull && rootCache && rootCache.root) {
|
361
|
-
// console.log('get root from cache', rootCache)
|
362
|
-
node = rootCache.root
|
363
|
-
clockCIDs = rootCache.clockCIDs
|
364
|
-
} else {
|
365
|
-
// console.log('finding root')
|
366
|
-
// const callTag = Math.random().toString(36).substring(7)
|
367
|
-
// console.time(callTag + '.root')
|
368
|
-
//
|
369
|
-
// const prevClock = [...this.clock]
|
370
|
-
|
371
|
-
;({ node, clockCIDs } = await root(blocks, head, doFull))
|
372
|
-
|
373
|
-
// this.applyClock(prevClock, result.head)
|
374
|
-
// await this.notifyListeners([decodedEvent])
|
375
|
-
|
376
|
-
// console.timeEnd(callTag + '.root')
|
377
|
-
// console.log('found root', node.entryList)
|
378
|
-
}
|
379
|
-
return { node, clockCIDs }
|
380
|
-
}
|
381
|
-
|
382
|
-
/**
|
383
|
-
* @param {TransactionBlockstore} blocks Bucket block storage.
|
384
|
-
* @param {import('./clock').EventLink<import('./clock').EventData>[]} head Merkle clock head.
|
385
|
-
* @param {string} key The key of the value to retrieve.
|
386
|
-
*/
|
387
|
-
export async function get (blocks, head, key, rootCache = null) {
|
388
|
-
// instead pass root from db? and always update on change
|
389
|
-
if (!head.length) {
|
390
|
-
return { cids: new CIDCounter(), result: null }
|
391
|
-
}
|
392
|
-
|
393
|
-
const { node: prollyRootNode, clockCIDs } = await rootOrCache(blocks, head, rootCache)
|
394
|
-
|
395
|
-
if (!prollyRootNode) {
|
396
|
-
return { clockCIDs, cids: new CIDCounter(), result: null }
|
397
|
-
}
|
398
|
-
const { result, cids } = await prollyRootNode.get(key)
|
399
|
-
return { result, cids, clockCIDs, root: prollyRootNode }
|
400
|
-
}
|
401
|
-
|
402
|
-
export async function * vis (blocks, head) {
|
403
|
-
if (!head.length) {
|
404
|
-
return { cids: new CIDCounter(), result: null }
|
405
|
-
}
|
406
|
-
const { node: prollyRootNode, cids } = await root(blocks, head)
|
407
|
-
const lines = []
|
408
|
-
for await (const line of prollyRootNode.vis()) {
|
409
|
-
yield line
|
410
|
-
lines.push(line)
|
411
|
-
}
|
412
|
-
return { vis: lines.join('\n'), cids }
|
413
|
-
}
|
414
|
-
|
415
|
-
export async function visMerkleTree (blocks, head) {
|
416
|
-
// if (!head) return
|
417
|
-
if (head && !Array.isArray(head)) {
|
418
|
-
const getBl = makeGetBlock(blocks)
|
419
|
-
const prollyRootNode = await load({
|
420
|
-
cid: head,
|
421
|
-
get: getBl.getBlock,
|
422
|
-
...blockOpts
|
423
|
-
})
|
424
|
-
const lines = []
|
425
|
-
for await (const line of prollyRootNode.vis()) {
|
426
|
-
lines.push(line)
|
427
|
-
}
|
428
|
-
return { vis: lines.join('\n'), cids: new CIDCounter() }
|
429
|
-
}
|
430
|
-
const { node: prollyRootNode, cids } = await root(blocks, head)
|
431
|
-
const lines = []
|
432
|
-
for await (const line of prollyRootNode.vis()) {
|
433
|
-
lines.push(line)
|
434
|
-
}
|
435
|
-
return { vis: lines.join('\n'), cids }
|
436
|
-
}
|
437
|
-
|
438
|
-
export async function visMerkleClock (blocks, head) {
|
439
|
-
const lines = []
|
440
|
-
for await (const line of visClock(blocks, head)) {
|
441
|
-
// yield line
|
442
|
-
lines.push(line)
|
443
|
-
}
|
444
|
-
return { vis: lines.join('\n') }
|
445
|
-
}
|
package/src/remote.js
DELETED
@@ -1,113 +0,0 @@
|
|
1
|
-
// when you call database.connect(email)
|
2
|
-
// it will return a promise that resolves when the user is logged in
|
3
|
-
// and sends you an email
|
4
|
-
|
5
|
-
import { create } from '@web3-storage/w3up-client'
|
6
|
-
import * as w3clock from '@web3-storage/clock/client'
|
7
|
-
import { CID } from 'multiformats'
|
8
|
-
|
9
|
-
export class Remote {
|
10
|
-
client = null
|
11
|
-
name = 'unset'
|
12
|
-
config = {}
|
13
|
-
|
14
|
-
constructor (database, name, config) {
|
15
|
-
this.name = name
|
16
|
-
this.config = config
|
17
|
-
this.database = database
|
18
|
-
}
|
19
|
-
|
20
|
-
async clock (cid) {
|
21
|
-
// const did = this.client.currentSpace()
|
22
|
-
const agent = this.client.agent()
|
23
|
-
const head = await w3clock.head({ issuer: agent, with: agent.did(), proofs: [] })
|
24
|
-
console.log('head', head, JSON.stringify(head.root.data.ocm.out))
|
25
|
-
const headCids = head.root.data.ocm.out.ok.head
|
26
|
-
const blocks = await Promise.all([this.database.blocks.get(CID.parse(cid)),
|
27
|
-
...headCids.map(c => this.database.blocks.get(c))])
|
28
|
-
|
29
|
-
console.log('blocks', blocks)
|
30
|
-
const adv = await w3clock.advance({ issuer: agent, with: agent.did(), proofs: [] }, CID.parse(cid)
|
31
|
-
, { blocks }
|
32
|
-
)
|
33
|
-
console.log('adv', adv, JSON.stringify(adv.root.data.ocm.out))
|
34
|
-
return { head, adv }
|
35
|
-
}
|
36
|
-
|
37
|
-
async sync (cid) {
|
38
|
-
// fetch the remote clock headCids using w3clock.head
|
39
|
-
const agent = this.client.agent()
|
40
|
-
const head = await w3clock.head({ issuer: agent, with: agent.did(), proofs: [] })
|
41
|
-
console.log('head', head, JSON.stringify(head.root.data.ocm.out))
|
42
|
-
const headCids = head.root.data.ocm.out.ok.head
|
43
|
-
const lastSyncHead = await this.database.blocks.valet.primary.getLastSynced()
|
44
|
-
console.log('lastSyncHead', lastSyncHead)
|
45
|
-
const headSet = new Set(headCids.map(c => c.toString()))
|
46
|
-
const lastSyncSet = new Set(lastSyncHead.map(c => c.toString()))
|
47
|
-
|
48
|
-
// are they the same?
|
49
|
-
const same = headSet.size === lastSyncSet.size && [...headSet].every(value => lastSyncSet.has(value))
|
50
|
-
|
51
|
-
// if the headCids and the lastSyncHead are the same, we are in sync and can push
|
52
|
-
if (same) {
|
53
|
-
const currentHead = this.database.clock
|
54
|
-
const currentHeadSet = new Set(currentHead.map(c => c.toString()))
|
55
|
-
|
56
|
-
console.log('synced with cloud', headSet, lastSyncSet)
|
57
|
-
|
58
|
-
// are they the same?
|
59
|
-
const currentSame = headSet.size === currentHeadSet.size && [...headSet].every(value => currentHeadSet.has(value))
|
60
|
-
if (currentSame) {
|
61
|
-
// we are in sync, do nothing
|
62
|
-
return true
|
63
|
-
} else {
|
64
|
-
console.log('push to cloud', headSet, currentHeadSet)
|
65
|
-
// we are ahead of the remote, push our clock
|
66
|
-
// const lastCompact = this.database.blocks.valet.primary.getLastCompact()
|
67
|
-
// get a compact since the last sync
|
68
|
-
console.log('we are ahead of the remote, push our clock')
|
69
|
-
// const compact = this.database.blocks.valet.primary.getCompactSince(lastSyncHead)
|
70
|
-
}
|
71
|
-
} else {
|
72
|
-
// we are behind, fetch the remote
|
73
|
-
console.log('we are behind, fetch the remote')
|
74
|
-
}
|
75
|
-
|
76
|
-
// if it is the same as the local (current metadata carcid? `newValetCidCar` / sync clock), do nothing, we are in sync
|
77
|
-
// if it is the same as our previously pushed clock event, but our local clock is ahead of it, we need to push our clock
|
78
|
-
// - we can store the previous clock event cid in the metadata
|
79
|
-
// - sending our updates:
|
80
|
-
// - get the _last_sync and _last_compact values from our metadata
|
81
|
-
// - if last sync is after last compact
|
82
|
-
// - make a merged car file for the syncs
|
83
|
-
// - else
|
84
|
-
// - upload the car file for the last compact
|
85
|
-
// - make a merge car file for any uncompacted car files since the last compact, it should base its cidMap on the compact car file (as we go the sync stream will need to track it's own cidMap)
|
86
|
-
// - if there is only one car file, it is the merge car file (already based on last compact)
|
87
|
-
// - upload the merge car file
|
88
|
-
// - create a new clock block with the current w3clock.head as parent and the merge car file cid as the data
|
89
|
-
// - update the remote clock with the new clock block (it doesn't need to fetch the car file, and we dont need to store the clock blocks locally, just the most recent one)
|
90
|
-
//
|
91
|
-
// else if the remote head is not contained by our clock, it is is ahead of the local sync clock.
|
92
|
-
// - get the car file it points to from its data field
|
93
|
-
// - merge to the local clock (park that car so we have both carcid indexes)
|
94
|
-
// - calculate a new root from the merged head, and update the local clock
|
95
|
-
}
|
96
|
-
|
97
|
-
async connect (email) {
|
98
|
-
try {
|
99
|
-
const client = await create()
|
100
|
-
await client.authorize(email)
|
101
|
-
const claims = await client.capability.access.claim()
|
102
|
-
console.log('claims', claims)
|
103
|
-
const space = await client.createSpace('fp.' + this.name)
|
104
|
-
console.log('space', space)
|
105
|
-
await client.setCurrentSpace(space.did())
|
106
|
-
await client.registerSpace(email)
|
107
|
-
this.client = client
|
108
|
-
console.log('client', client)
|
109
|
-
} catch (err) {
|
110
|
-
console.error('registration failed: ', err)
|
111
|
-
}
|
112
|
-
}
|
113
|
-
}
|
package/src/sha1.js
DELETED
@@ -1,83 +0,0 @@
|
|
1
|
-
// @ts-nocheck
|
2
|
-
// from https://github.com/duzun/sync-sha1/blob/master/rawSha1.js
|
3
|
-
// MIT License Copyright (c) 2020 Dumitru Uzun
|
4
|
-
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
5
|
-
// of this software and associated documentation files (the "Software"), to deal
|
6
|
-
// in the Software without restriction, including without limitation the rights
|
7
|
-
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
8
|
-
// copies of the Software, and to permit persons to whom the Software is
|
9
|
-
// furnished to do so, subject to the following conditions:
|
10
|
-
|
11
|
-
// The above copyright notice and this permission notice shall be included in all
|
12
|
-
// copies or substantial portions of the Software.
|
13
|
-
|
14
|
-
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
15
|
-
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
16
|
-
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
17
|
-
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
18
|
-
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
19
|
-
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
20
|
-
// SOFTWARE.
|
21
|
-
|
22
|
-
// import {
|
23
|
-
// isLittleEndian, switchEndianness32
|
24
|
-
// } from 'string-encode'
|
25
|
-
|
26
|
-
/**
|
27
|
-
* SHA1 on binary array
|
28
|
-
*
|
29
|
-
* @param {Uint8Array} b Data to hash
|
30
|
-
*
|
31
|
-
* @return {Uint8Array} sha1 hash
|
32
|
-
*/
|
33
|
-
export function rawSha1 (b) {
|
34
|
-
let i = b.byteLength
|
35
|
-
let bs = 0
|
36
|
-
let A; let B; let C; let D; let G
|
37
|
-
const H = Uint32Array.from([A = 0x67452301, B = 0xEFCDAB89, ~A, ~B, 0xC3D2E1F0])
|
38
|
-
const W = new Uint32Array(80)
|
39
|
-
const nrWords = (i / 4 + 2) | 15
|
40
|
-
const words = new Uint32Array(nrWords + 1)
|
41
|
-
let j
|
42
|
-
|
43
|
-
words[nrWords] = i * 8
|
44
|
-
words[i >> 2] |= 0x80 << (~i << 3)
|
45
|
-
for (;i--;) {
|
46
|
-
words[i >> 2] |= b[i] << (~i << 3)
|
47
|
-
}
|
48
|
-
|
49
|
-
for (A = H.slice(); bs < nrWords; bs += 16, A.set(H)) {
|
50
|
-
for (i = 0; i < 80;
|
51
|
-
A[0] = (
|
52
|
-
G = ((b = A[0]) << 5 | b >>> 27) +
|
53
|
-
A[4] +
|
54
|
-
(W[i] = (i < 16) ? words[bs + i] : G << 1 | G >>> 31) +
|
55
|
-
0x5A827999,
|
56
|
-
B = A[1],
|
57
|
-
C = A[2],
|
58
|
-
D = A[3],
|
59
|
-
G + ((j = i / 5 >> 2) // eslint-disable-line no-cond-assign
|
60
|
-
? j !== 2
|
61
|
-
? (B ^ C ^ D) + (j & 2 ? 0x6FE0483D : 0x14577208)
|
62
|
-
: (B & C | B & D | C & D) + 0x34994343
|
63
|
-
: B & C | ~B & D
|
64
|
-
)
|
65
|
-
)
|
66
|
-
, A[1] = b
|
67
|
-
, A[2] = B << 30 | B >>> 2
|
68
|
-
, A[3] = C
|
69
|
-
, A[4] = D
|
70
|
-
, ++i
|
71
|
-
) {
|
72
|
-
G = W[i - 3] ^ W[i - 8] ^ W[i - 14] ^ W[i - 16]
|
73
|
-
}
|
74
|
-
|
75
|
-
for (i = 5; i;) H[--i] = H[i] + A[i]
|
76
|
-
}
|
77
|
-
|
78
|
-
// if (isLittleEndian()) {
|
79
|
-
// H = H.map(switchEndianness32)
|
80
|
-
// }
|
81
|
-
|
82
|
-
return new Uint8Array(H.buffer, H.byteOffset, H.byteLength)
|
83
|
-
}
|