@fireproof/core 0.5.6 → 0.5.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +18 -0
- package/dist/src/fireproof.d.ts +149 -108
- package/dist/src/fireproof.js +2771 -581
- package/dist/src/fireproof.js.map +1 -1
- package/dist/src/fireproof.mjs +2771 -582
- package/dist/src/fireproof.mjs.map +1 -1
- package/package.json +4 -2
- package/src/blockstore.js +32 -16
- package/src/clock.js +42 -10
- package/src/database.js +52 -13
- package/src/db-index.js +14 -3
- package/src/fireproof.js +2 -40
- package/src/listener.js +1 -1
- package/src/prolly.js +90 -53
- package/src/sync.js +201 -0
- package/src/valet.js +32 -6
package/src/prolly.js
CHANGED
@@ -17,23 +17,24 @@ import * as codec from '@ipld/dag-cbor'
|
|
17
17
|
import { sha256 as hasher } from 'multiformats/hashes/sha2'
|
18
18
|
import { doTransaction } from './blockstore.js'
|
19
19
|
import { create as createBlock } from 'multiformats/block'
|
20
|
-
const blockOpts = { cache, chunker: bf(
|
20
|
+
const blockOpts = { cache, chunker: bf(30), codec, hasher, compare }
|
21
21
|
|
22
22
|
/**
|
23
23
|
* @typedef {import('./blockstore.js').TransactionBlockstore} TransactionBlockstore
|
24
24
|
*/
|
25
25
|
|
26
|
-
const withLog = async (label, fn) => {
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
}
|
26
|
+
// const withLog = async (label, fn) => {
|
27
|
+
// const resp = await fn()
|
28
|
+
// // console.log('withLog', label, !!resp)
|
29
|
+
// return resp
|
30
|
+
// }
|
31
31
|
|
32
32
|
// should also return a CIDCounter
|
33
|
-
export const makeGetBlock =
|
33
|
+
export const makeGetBlock = blocks => {
|
34
34
|
// const cids = new CIDCounter() // this could be used for proofs of mutations
|
35
|
-
const getBlockFn = async
|
36
|
-
const { cid, bytes } = await withLog(address, () => blocks.get(address))
|
35
|
+
const getBlockFn = async address => {
|
36
|
+
// const { cid, bytes } = await withLog(address, () => blocks.get(address))
|
37
|
+
const { cid, bytes } = await blocks.get(address)
|
37
38
|
// cids.add({ address: cid })
|
38
39
|
return createBlock({ cid, bytes, hasher, codec })
|
39
40
|
}
|
@@ -48,25 +49,17 @@ export const makeGetBlock = (blocks) => {
|
|
48
49
|
* @param {*} param0
|
49
50
|
* @returns
|
50
51
|
*/
|
51
|
-
async function createAndSaveNewEvent ({
|
52
|
-
inBlocks,
|
53
|
-
bigPut,
|
54
|
-
root,
|
55
|
-
event: inEvent,
|
56
|
-
head,
|
57
|
-
additions,
|
58
|
-
removals = []
|
59
|
-
}) {
|
52
|
+
async function createAndSaveNewEvent ({ inBlocks, bigPut, root, event: inEvent, head, additions, removals = [] }) {
|
60
53
|
let cids
|
61
54
|
const { key, value, del } = inEvent
|
62
55
|
const data = {
|
63
|
-
root:
|
56
|
+
root: root
|
64
57
|
? {
|
65
58
|
cid: root.cid,
|
66
59
|
bytes: root.bytes, // can we remove this?
|
67
60
|
value: root.value // can we remove this?
|
68
61
|
}
|
69
|
-
: null
|
62
|
+
: null,
|
70
63
|
key
|
71
64
|
}
|
72
65
|
// import('./clock').EventLink<import('./clock').EventData>
|
@@ -94,7 +87,7 @@ async function createAndSaveNewEvent ({
|
|
94
87
|
}
|
95
88
|
}
|
96
89
|
|
97
|
-
const makeGetAndPutBlock =
|
90
|
+
const makeGetAndPutBlock = inBlocks => {
|
98
91
|
// const mblocks = new MemoryBlockstore()
|
99
92
|
// const blocks = new MultiBlockFetcher(mblocks, inBlocks)
|
100
93
|
const { getBlock, cids } = makeGetBlock(inBlocks)
|
@@ -153,20 +146,24 @@ const prollyRootFromAncestor = async (events, ancestor, getBlock) => {
|
|
153
146
|
}
|
154
147
|
}
|
155
148
|
|
156
|
-
const doProllyBulk = async (inBlocks, head, event) => {
|
149
|
+
const doProllyBulk = async (inBlocks, head, event, doFull = false) => {
|
157
150
|
const { getBlock, blocks } = makeGetAndPutBlock(inBlocks)
|
158
151
|
let bulkSorted = []
|
159
152
|
let prollyRootNode = null
|
153
|
+
const events = new EventFetcher(blocks)
|
160
154
|
if (head.length) {
|
161
|
-
|
162
|
-
|
155
|
+
if (!doFull && head.length === 1) {
|
156
|
+
prollyRootNode = await prollyRootFromAncestor(events, head[0], getBlock)
|
157
|
+
} else {
|
158
|
+
// Otherwise, we find the common ancestor and update the root and other blocks
|
163
159
|
// todo this is returning more events than necessary, lets define the desired semantics from the top down
|
164
160
|
// good semantics mean we can cache the results of this call
|
165
|
-
|
166
|
-
|
167
|
-
|
168
|
-
|
161
|
+
const { ancestor, sorted } = await findCommonAncestorWithSortedEvents(events, head, doFull)
|
162
|
+
bulkSorted = sorted
|
163
|
+
// console.log('sorted', JSON.stringify(sorted.map(({ value: { data: { key, value } } }) => ({ key, value }))))
|
164
|
+
prollyRootNode = await prollyRootFromAncestor(events, ancestor, getBlock)
|
169
165
|
// console.log('event', event)
|
166
|
+
}
|
170
167
|
}
|
171
168
|
|
172
169
|
const bulkOperations = bulkFromEvents(bulkSorted, event)
|
@@ -176,16 +173,18 @@ const doProllyBulk = async (inBlocks, head, event) => {
|
|
176
173
|
let root
|
177
174
|
const newBlocks = []
|
178
175
|
// if all operations are deletes, we can just return an empty root
|
179
|
-
if (bulkOperations.every(
|
180
|
-
return { root: null, blocks: [] }
|
176
|
+
if (bulkOperations.every(op => op.del)) {
|
177
|
+
return { root: null, blocks: [], clockCIDs: await events.all() }
|
181
178
|
}
|
182
179
|
for await (const node of create({ get: getBlock, list: bulkOperations, ...blockOpts })) {
|
183
180
|
root = await node.block
|
184
181
|
newBlocks.push(root)
|
185
182
|
}
|
186
|
-
return { root, blocks: newBlocks }
|
183
|
+
return { root, blocks: newBlocks, clockCIDs: await events.all() }
|
187
184
|
} else {
|
188
|
-
|
185
|
+
const writeResp = await prollyRootNode.bulk(bulkOperations) // { root: newProllyRootNode, blocks: newBlocks }
|
186
|
+
writeResp.clockCIDs = await events.all()
|
187
|
+
return writeResp
|
189
188
|
}
|
190
189
|
}
|
191
190
|
|
@@ -194,7 +193,7 @@ const doProllyBulk = async (inBlocks, head, event) => {
|
|
194
193
|
*
|
195
194
|
* @param {import('./blockstore.js').Blockstore} inBlocks Bucket block storage.
|
196
195
|
* @param {import('./clock').EventLink<import('./clock').EventData>[]} head Merkle clock head.
|
197
|
-
* @param {{key: string, value: import('./clock').EventLink<import('./clock').EventData>}} event The key of the value to put.
|
196
|
+
* @param {{key: string, value: import('./clock').EventLink<import('./clock').EventData>}} event The key of the value to put.
|
198
197
|
* @param {object} [options]
|
199
198
|
* @returns {Promise<any>}
|
200
199
|
*/
|
@@ -246,19 +245,25 @@ export async function put (inBlocks, head, event, options) {
|
|
246
245
|
* @param {TransactionBlockstore} inBlocks Bucket block storage.
|
247
246
|
* @param {import('./clock').EventLink<import('./clock').EventData>[]} head Merkle clock head.
|
248
247
|
*/
|
249
|
-
export async function root (inBlocks, head) {
|
248
|
+
export async function root (inBlocks, head, doFull = false) {
|
250
249
|
if (!head.length) {
|
251
250
|
throw new Error('no head')
|
252
251
|
}
|
253
|
-
|
252
|
+
// console.log('root', head.map(h => h.toString()))
|
253
|
+
const { root: newProllyRootNode, blocks: newBlocks, clockCIDs } = await doProllyBulk(inBlocks, head, null, doFull)
|
254
254
|
// todo maybe these should go to a temp blockstore?
|
255
|
-
await doTransaction(
|
256
|
-
|
257
|
-
|
258
|
-
|
259
|
-
|
260
|
-
|
261
|
-
|
255
|
+
await doTransaction(
|
256
|
+
'root',
|
257
|
+
inBlocks,
|
258
|
+
async transactionBlocks => {
|
259
|
+
const { bigPut } = makeGetAndPutBlock(transactionBlocks)
|
260
|
+
for (const nb of newBlocks) {
|
261
|
+
bigPut(nb)
|
262
|
+
}
|
263
|
+
},
|
264
|
+
false
|
265
|
+
)
|
266
|
+
return { clockCIDs, node: newProllyRootNode }
|
262
267
|
}
|
263
268
|
|
264
269
|
/**
|
@@ -284,21 +289,40 @@ export async function eventsSince (blocks, head, since) {
|
|
284
289
|
* @param {TransactionBlockstore} blocks Bucket block storage.
|
285
290
|
* @param {import('./clock').EventLink<import('./clock').EventData>[]} head Merkle clock head.
|
286
291
|
*
|
287
|
-
* @returns {Promise<{cids: CIDCounter, clockCIDs: CIDCounter, result: import('./clock').EventData[]}>}
|
292
|
+
* @returns {Promise<{root: any, cids: CIDCounter, clockCIDs: CIDCounter, result: import('./clock').EventData[]}>}
|
288
293
|
*
|
289
294
|
*/
|
290
|
-
export async function getAll (blocks, head) {
|
295
|
+
export async function getAll (blocks, head, rootCache = null, doFull = false) {
|
291
296
|
// todo use the root node left around from put, etc
|
292
297
|
// move load to a central place
|
293
298
|
if (!head.length) {
|
294
|
-
return { clockCIDs: new CIDCounter(), cids: new CIDCounter(), result: [] }
|
299
|
+
return { root: null, clockCIDs: new CIDCounter(), cids: new CIDCounter(), result: [] }
|
295
300
|
}
|
296
|
-
const { node: prollyRootNode,
|
301
|
+
const { node: prollyRootNode, clockCIDs } = await rootOrCache(blocks, head, rootCache, doFull)
|
302
|
+
|
297
303
|
if (!prollyRootNode) {
|
298
|
-
return { clockCIDs, cids: new CIDCounter(), result: [] }
|
304
|
+
return { root: null, clockCIDs, cids: new CIDCounter(), result: [] }
|
299
305
|
}
|
300
306
|
const { result, cids } = await prollyRootNode.getAllEntries() // todo params
|
301
|
-
return { clockCIDs, cids, result: result.map(({ key, value }) => ({ key, value })) }
|
307
|
+
return { root: prollyRootNode, clockCIDs, cids, result: result.map(({ key, value }) => ({ key, value })) }
|
308
|
+
}
|
309
|
+
|
310
|
+
async function rootOrCache (blocks, head, rootCache, doFull = false) {
|
311
|
+
let node
|
312
|
+
let clockCIDs
|
313
|
+
if (!doFull && rootCache && rootCache.root) {
|
314
|
+
// console.log('get root from cache', rootCache)
|
315
|
+
node = rootCache.root
|
316
|
+
clockCIDs = rootCache.clockCIDs
|
317
|
+
} else {
|
318
|
+
// console.log('finding root')
|
319
|
+
// const callTag = Math.random().toString(36).substring(7)
|
320
|
+
// console.time(callTag + '.root')
|
321
|
+
;({ node, clockCIDs } = await root(blocks, head, doFull))
|
322
|
+
// console.timeEnd(callTag + '.root')
|
323
|
+
// console.log('found root')
|
324
|
+
}
|
325
|
+
return { node, clockCIDs }
|
302
326
|
}
|
303
327
|
|
304
328
|
/**
|
@@ -306,17 +330,19 @@ export async function getAll (blocks, head) {
|
|
306
330
|
* @param {import('./clock').EventLink<import('./clock').EventData>[]} head Merkle clock head.
|
307
331
|
* @param {string} key The key of the value to retrieve.
|
308
332
|
*/
|
309
|
-
export async function get (blocks, head, key) {
|
333
|
+
export async function get (blocks, head, key, rootCache = null) {
|
310
334
|
// instead pass root from db? and always update on change
|
311
335
|
if (!head.length) {
|
312
336
|
return { cids: new CIDCounter(), result: null }
|
313
337
|
}
|
314
|
-
|
338
|
+
|
339
|
+
const { node: prollyRootNode, clockCIDs } = await rootOrCache(blocks, head, rootCache)
|
340
|
+
|
315
341
|
if (!prollyRootNode) {
|
316
342
|
return { clockCIDs, cids: new CIDCounter(), result: null }
|
317
343
|
}
|
318
344
|
const { result, cids } = await prollyRootNode.get(key)
|
319
|
-
return { result, cids, clockCIDs }
|
345
|
+
return { result, cids, clockCIDs, root: prollyRootNode }
|
320
346
|
}
|
321
347
|
|
322
348
|
export async function * vis (blocks, head) {
|
@@ -333,8 +359,19 @@ export async function * vis (blocks, head) {
|
|
333
359
|
}
|
334
360
|
|
335
361
|
export async function visMerkleTree (blocks, head) {
|
336
|
-
if (!head
|
337
|
-
|
362
|
+
// if (!head) return
|
363
|
+
if (head && !Array.isArray(head)) {
|
364
|
+
const getBl = makeGetBlock(blocks)
|
365
|
+
const prollyRootNode = await load({
|
366
|
+
cid: head,
|
367
|
+
get: getBl.getBlock,
|
368
|
+
...blockOpts
|
369
|
+
})
|
370
|
+
const lines = []
|
371
|
+
for await (const line of prollyRootNode.vis()) {
|
372
|
+
lines.push(line)
|
373
|
+
}
|
374
|
+
return { vis: lines.join('\n'), cids: new CIDCounter() }
|
338
375
|
}
|
339
376
|
const { node: prollyRootNode, cids } = await root(blocks, head)
|
340
377
|
const lines = []
|
package/src/sync.js
ADDED
@@ -0,0 +1,201 @@
|
|
1
|
+
import SimplePeer from 'simple-peer'
|
2
|
+
import { parseCID } from './database.js'
|
3
|
+
import { decodeEventBlock } from './clock.js'
|
4
|
+
import { blocksToCarBlock, blocksToEncryptedCarBlock } from './valet.js'
|
5
|
+
import { CarReader } from '@ipld/car'
|
6
|
+
|
7
|
+
/**
|
8
|
+
* @typedef {import('./database.js').Database} Database
|
9
|
+
*/
|
10
|
+
export class Sync {
|
11
|
+
/**
|
12
|
+
* @param {Database} database
|
13
|
+
* @param {typeof SimplePeer} [PeerClass]
|
14
|
+
* @memberof Sync
|
15
|
+
* @static
|
16
|
+
*/
|
17
|
+
constructor (database, PeerClass = SimplePeer) {
|
18
|
+
this.database = database
|
19
|
+
this.database.blocks.syncs.add(this) // should this happen during setup?
|
20
|
+
this.PeerClass = PeerClass
|
21
|
+
this.pushBacklog = new Promise((resolve, reject) => {
|
22
|
+
this.pushBacklogResolve = resolve
|
23
|
+
this.pushBacklogReject = reject
|
24
|
+
})
|
25
|
+
// this.pushBacklog.then(() => {
|
26
|
+
// // console.log('sync backlog resolved')
|
27
|
+
// this.database.notifyReset()
|
28
|
+
// })
|
29
|
+
}
|
30
|
+
|
31
|
+
async offer () {
|
32
|
+
return this.setupPeer(true)
|
33
|
+
}
|
34
|
+
|
35
|
+
async accept (base64offer) {
|
36
|
+
const offer = JSON.parse(atob(base64offer))
|
37
|
+
const p = this.setupPeer(false)
|
38
|
+
this.peer.signal(offer)
|
39
|
+
return p
|
40
|
+
}
|
41
|
+
|
42
|
+
connect (base64accept) {
|
43
|
+
const accept = JSON.parse(atob(base64accept))
|
44
|
+
this.peer.signal(accept)
|
45
|
+
}
|
46
|
+
|
47
|
+
async setupPeer (initiator = false) {
|
48
|
+
this.peer = new this.PeerClass({
|
49
|
+
initiator,
|
50
|
+
trickle: false
|
51
|
+
})
|
52
|
+
this.peer.on('connect', () => this.startSync())
|
53
|
+
this.peer.on('data', data => this.gotData(data))
|
54
|
+
const p = new Promise((resolve, reject) => {
|
55
|
+
this.peer.on('signal', resolve)
|
56
|
+
this.peer.on('error', reject)
|
57
|
+
})
|
58
|
+
return p.then(signal => btoa(JSON.stringify(signal)))
|
59
|
+
}
|
60
|
+
|
61
|
+
async backlog () {
|
62
|
+
return this.pushBacklog
|
63
|
+
}
|
64
|
+
|
65
|
+
async gotData (data) {
|
66
|
+
// console.log('got data', data.toString())
|
67
|
+
let reader = null
|
68
|
+
try {
|
69
|
+
reader = await CarReader.fromBytes(data)
|
70
|
+
} catch (e) {
|
71
|
+
// console.log('not a car', data.toString())
|
72
|
+
}
|
73
|
+
if (reader) {
|
74
|
+
const blz = new Set()
|
75
|
+
for await (const block of reader.blocks()) {
|
76
|
+
blz.add(block)
|
77
|
+
}
|
78
|
+
const roots = await reader.getRoots()
|
79
|
+
// console.log(
|
80
|
+
// 'got car',
|
81
|
+
// roots.map(c => c.toString()),
|
82
|
+
// this.database.clock.map(c => c.toString())
|
83
|
+
// )
|
84
|
+
// console.log(
|
85
|
+
// 'got blocks',
|
86
|
+
// [...blz].map(({ cid }) => cid.toString())
|
87
|
+
// )
|
88
|
+
// @ts-ignore
|
89
|
+
reader.entries = reader.blocks
|
90
|
+
await this.database.blocks.commit({
|
91
|
+
label: 'sync',
|
92
|
+
entries: () => [...blz],
|
93
|
+
get: async cid => await reader.get(cid),
|
94
|
+
lastCid: [...blz][0].cid // doesn't matter
|
95
|
+
}, false)
|
96
|
+
// first arg could be the roots parents?
|
97
|
+
// get the roots parents
|
98
|
+
const parents = await Promise.all(roots.map(async (cid) => {
|
99
|
+
const rbl = await reader.get(cid)
|
100
|
+
const block = await decodeEventBlock(rbl.bytes)
|
101
|
+
return block.value.parents
|
102
|
+
}))
|
103
|
+
this.database.applyClock(parents.flat(), roots)
|
104
|
+
this.database.notifyReset()
|
105
|
+
// console.log('after', this.database.clockToJSON())
|
106
|
+
this.pushBacklogResolve({ ok: true })
|
107
|
+
} else {
|
108
|
+
// data is a json string, parse it
|
109
|
+
const message = JSON.parse(data.toString())
|
110
|
+
// console.log('got message', message)
|
111
|
+
if (message.ok) {
|
112
|
+
this.pushBacklogResolve({ ok: true })
|
113
|
+
} else if (message.clock) {
|
114
|
+
const reqCidDiff = message
|
115
|
+
// this might be a CID diff
|
116
|
+
// console.log('got diff', reqCidDiff)
|
117
|
+
const carBlock = await Sync.makeCar(this.database, null, reqCidDiff.cids)
|
118
|
+
if (!carBlock) {
|
119
|
+
// we are full synced
|
120
|
+
// console.log('we are full synced')
|
121
|
+
this.peer.send(JSON.stringify({ ok: true }))
|
122
|
+
// this.pushBacklogResolve({ ok: true })
|
123
|
+
} else {
|
124
|
+
// console.log('do send', carBlock.bytes.length)
|
125
|
+
this.peer.send(carBlock.bytes)
|
126
|
+
// this.pushBacklogResolve({ ok: true })
|
127
|
+
}
|
128
|
+
}
|
129
|
+
}
|
130
|
+
}
|
131
|
+
|
132
|
+
async sendUpdate (blockstore) {
|
133
|
+
// console.log('send update from', this.database.instanceId)
|
134
|
+
// todo should send updates since last sync
|
135
|
+
const newCar = await blocksToCarBlock(blockstore.lastCid, blockstore)
|
136
|
+
this.peer.send(newCar.bytes)
|
137
|
+
}
|
138
|
+
|
139
|
+
async startSync () {
|
140
|
+
// console.log('start sync', this.peer.initiator)
|
141
|
+
const allCIDs = await this.database.allStoredCIDs()
|
142
|
+
// console.log('allCIDs', allCIDs)
|
143
|
+
const reqCidDiff = {
|
144
|
+
clock: this.database.clockToJSON(),
|
145
|
+
cids: allCIDs.map(cid => cid.toString())
|
146
|
+
}
|
147
|
+
// console.log('send diff', reqCidDiff)
|
148
|
+
this.peer.send(JSON.stringify(reqCidDiff))
|
149
|
+
}
|
150
|
+
|
151
|
+
// get all the cids
|
152
|
+
// tell valet to make a file
|
153
|
+
/**
|
154
|
+
* @param {import("./database.js").Database} database
|
155
|
+
* @param {string} key
|
156
|
+
*/
|
157
|
+
static async makeCar (database, key, skip = []) {
|
158
|
+
const allCIDs = await database.allCIDs()
|
159
|
+
const blocks = database.blocks
|
160
|
+
const rootCIDs = database.clock
|
161
|
+
|
162
|
+
const syncCIDs = [...new Set([...rootCIDs, ...allCIDs])].filter(cid => !skip.includes(cid.toString()))
|
163
|
+
// console.log(
|
164
|
+
// 'makeCar',
|
165
|
+
// rootCIDs.map(c => c.toString()),
|
166
|
+
// syncCIDs.map(c => c.toString()),
|
167
|
+
// allCIDs.map(c => c.toString())
|
168
|
+
// )
|
169
|
+
if (syncCIDs.length === 0) {
|
170
|
+
return null
|
171
|
+
}
|
172
|
+
|
173
|
+
if (typeof key === 'undefined') {
|
174
|
+
key = blocks.valet?.getKeyMaterial()
|
175
|
+
}
|
176
|
+
if (key) {
|
177
|
+
return blocksToEncryptedCarBlock(
|
178
|
+
rootCIDs,
|
179
|
+
{
|
180
|
+
entries: () => syncCIDs.map(cid => ({ cid })),
|
181
|
+
get: async cid => await blocks.get(cid)
|
182
|
+
},
|
183
|
+
key
|
184
|
+
)
|
185
|
+
} else {
|
186
|
+
const carBlocks = await Promise.all(
|
187
|
+
syncCIDs.map(async c => {
|
188
|
+
const b = await blocks.get(c)
|
189
|
+
if (typeof b.cid === 'string') {
|
190
|
+
b.cid = parseCID(b.cid)
|
191
|
+
}
|
192
|
+
return b
|
193
|
+
})
|
194
|
+
)
|
195
|
+
// console.log('carblock')
|
196
|
+
return blocksToCarBlock(rootCIDs, {
|
197
|
+
entries: () => carBlocks
|
198
|
+
})
|
199
|
+
}
|
200
|
+
}
|
201
|
+
}
|
package/src/valet.js
CHANGED
@@ -16,7 +16,7 @@ import { Buffer } from 'buffer'
|
|
16
16
|
// @ts-ignore
|
17
17
|
import * as codec from 'encrypted-block'
|
18
18
|
import { rawSha1 as sha1sync } from './sha1.js'
|
19
|
-
const chunker = bf(
|
19
|
+
const chunker = bf(30)
|
20
20
|
|
21
21
|
const NO_ENCRYPT = typeof process !== 'undefined' && !!process.env?.NO_ENCRYPT
|
22
22
|
// ? process.env.NO_ENCRYPT : import.meta && import.meta.env.VITE_NO_ENCRYPT
|
@@ -100,12 +100,15 @@ export class Valet {
|
|
100
100
|
if (innerBlockstore.lastCid) {
|
101
101
|
if (this.keyMaterial) {
|
102
102
|
// console.log('encrypting car', innerBlockstore.label)
|
103
|
+
// should we pass cids in instead of iterating frin innerBlockstore?
|
103
104
|
const newCar = await blocksToEncryptedCarBlock(innerBlockstore.lastCid, innerBlockstore, this.keyMaterial)
|
104
105
|
await this.parkCar(newCar.cid.toString(), newCar.bytes, cids)
|
105
106
|
} else {
|
106
107
|
const newCar = await blocksToCarBlock(innerBlockstore.lastCid, innerBlockstore)
|
107
108
|
await this.parkCar(newCar.cid.toString(), newCar.bytes, cids)
|
108
109
|
}
|
110
|
+
} else {
|
111
|
+
throw new Error('missing lastCid for car header')
|
109
112
|
}
|
110
113
|
}
|
111
114
|
|
@@ -128,6 +131,23 @@ export class Valet {
|
|
128
131
|
return await dbWorkFun(this.idb)
|
129
132
|
}
|
130
133
|
|
134
|
+
/**
|
135
|
+
* Iterate over all blocks in the store.
|
136
|
+
*
|
137
|
+
* @yields {{cid: string, value: Uint8Array}}
|
138
|
+
* @returns {AsyncGenerator<any, any, any>}
|
139
|
+
*/
|
140
|
+
async * cids () {
|
141
|
+
// console.log('valet cids')
|
142
|
+
const db = await this.withDB(async db => db)
|
143
|
+
const tx = db.transaction(['cidToCar'], 'readonly')
|
144
|
+
let cursor = await tx.store.openCursor()
|
145
|
+
while (cursor) {
|
146
|
+
yield { cid: cursor.key, car: cursor.value.car }
|
147
|
+
cursor = await cursor.continue()
|
148
|
+
}
|
149
|
+
}
|
150
|
+
|
131
151
|
/**
|
132
152
|
*
|
133
153
|
* @param {string} carCid
|
@@ -140,7 +160,7 @@ export class Valet {
|
|
140
160
|
await tx.objectStore('cidToCar').put({ pending: 'y', car: carCid, cids: Array.from(cids) })
|
141
161
|
return await tx.done
|
142
162
|
})
|
143
|
-
|
163
|
+
// console.log('parked car', carCid, value.length, Array.from(cids))
|
144
164
|
// upload to web3.storage if we have credentials
|
145
165
|
if (this.uploadFunction) {
|
146
166
|
if (this.alreadyEnqueued.has(carCid)) {
|
@@ -200,9 +220,12 @@ export class Valet {
|
|
200
220
|
}
|
201
221
|
}
|
202
222
|
|
203
|
-
export const blocksToCarBlock = async (
|
223
|
+
export const blocksToCarBlock = async (rootCids, blocks) => {
|
204
224
|
let size = 0
|
205
|
-
|
225
|
+
if (!Array.isArray(rootCids)) {
|
226
|
+
rootCids = [rootCids]
|
227
|
+
}
|
228
|
+
const headerSize = CBW.headerLength({ roots: rootCids })
|
206
229
|
size += headerSize
|
207
230
|
if (!Array.isArray(blocks)) {
|
208
231
|
blocks = Array.from(blocks.entries())
|
@@ -214,7 +237,9 @@ export const blocksToCarBlock = async (lastCid, blocks) => {
|
|
214
237
|
const buffer = new Uint8Array(size)
|
215
238
|
const writer = await CBW.createWriter(buffer, { headerSize })
|
216
239
|
|
217
|
-
|
240
|
+
for (const cid of rootCids) {
|
241
|
+
writer.addRoot(cid)
|
242
|
+
}
|
218
243
|
|
219
244
|
for (const { cid, bytes } of blocks) {
|
220
245
|
writer.write({ cid, bytes })
|
@@ -230,7 +255,8 @@ export const blocksToEncryptedCarBlock = async (innerBlockStoreClockRootCid, blo
|
|
230
255
|
for (const { cid } of blocks.entries()) {
|
231
256
|
theCids.push(cid.toString())
|
232
257
|
}
|
233
|
-
|
258
|
+
// console.log('encrypting', theCids.length, 'blocks', theCids.includes(innerBlockStoreClockRootCid.toString()))
|
259
|
+
// console.log('cids', theCids, innerBlockStoreClockRootCid.toString())
|
234
260
|
let last
|
235
261
|
for await (const block of encrypt({
|
236
262
|
cids: theCids,
|