@fireproof/core 0.5.6 → 0.5.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +18 -0
- package/dist/src/fireproof.d.ts +44 -8
- package/dist/src/fireproof.js +2600 -514
- package/dist/src/fireproof.js.map +1 -1
- package/dist/src/fireproof.mjs +2600 -515
- package/dist/src/fireproof.mjs.map +1 -1
- package/package.json +4 -2
- package/src/blockstore.js +29 -16
- package/src/clock.js +1 -1
- package/src/database.js +21 -3
- package/src/fireproof.js +2 -40
- package/src/listener.js +1 -1
- package/src/prolly.js +11 -8
- package/src/sync.js +194 -0
- package/src/valet.js +31 -5
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "@fireproof/core",
|
3
|
-
"version": "0.5.
|
3
|
+
"version": "0.5.7",
|
4
4
|
"description": "Cloudless database for apps, the browser, and IPFS",
|
5
5
|
"main": "dist/src/fireproof.js",
|
6
6
|
"module": "dist/src/fireproof.mjs",
|
@@ -36,6 +36,7 @@
|
|
36
36
|
"Mikeal Rogers"
|
37
37
|
],
|
38
38
|
"license": "Apache-2.0 OR MIT",
|
39
|
+
"gptdoc": "Fireproof/API/Usage: import { Fireproof, Listener, Index } from '@fireproof/core'; const db = fireproof.storage('app-db-name'); const ok = await db.put({ any: 'json' }); const doc = await db.get(ok.id); await db.del(doc._id); const all = await db.allDocuments(); all.rows.map(({key, value}) => value); const listener = new Listener(db); listener.on('*', updateReactStateFn); const index = new Index(db, (doc, map) => map(doc.any, {custom: Object.keys(doc)})); const result = await index.query({range : ['a', 'z']}); result.rows.map(({ key }) => key);",
|
39
40
|
"dependencies": {
|
40
41
|
"@ipld/car": "^5.1.0",
|
41
42
|
"@ipld/dag-cbor": "^9.0.0",
|
@@ -53,7 +54,8 @@
|
|
53
54
|
"prolly-trees": "1.0.4",
|
54
55
|
"randombytes": "^2.1.0",
|
55
56
|
"rollup-plugin-commonjs": "^10.1.0",
|
56
|
-
"sade": "^1.8.1"
|
57
|
+
"sade": "^1.8.1",
|
58
|
+
"simple-peer": "^9.11.1"
|
57
59
|
},
|
58
60
|
"devDependencies": {
|
59
61
|
"@rollup/plugin-alias": "^5.0.0",
|
package/src/blockstore.js
CHANGED
@@ -34,10 +34,12 @@ export class TransactionBlockstore {
|
|
34
34
|
/** @type {Map<string, Uint8Array>} */
|
35
35
|
committedBlocks = new Map()
|
36
36
|
|
37
|
+
/** @type {Valet} */
|
37
38
|
valet = null
|
38
39
|
|
39
40
|
instanceId = 'blkz.' + Math.random().toString(36).substring(2, 4)
|
40
41
|
inflightTransactions = new Set()
|
42
|
+
syncs = new Set()
|
41
43
|
|
42
44
|
constructor (name, encryptionKey) {
|
43
45
|
if (name) {
|
@@ -75,10 +77,10 @@ export class TransactionBlockstore {
|
|
75
77
|
|
76
78
|
async committedGet (key) {
|
77
79
|
const old = this.committedBlocks.get(key)
|
80
|
+
// console.log('committedGet: ' + key + ' ' + this.instanceId, old.length)
|
78
81
|
if (old) return old
|
79
82
|
if (!this.valet) throw new Error('Missing block: ' + key)
|
80
83
|
const got = await this.valet.getBlock(key)
|
81
|
-
// console.log('committedGet: ' + key)
|
82
84
|
this.committedBlocks.set(key, got)
|
83
85
|
return got
|
84
86
|
}
|
@@ -120,18 +122,24 @@ export class TransactionBlockstore {
|
|
120
122
|
/**
|
121
123
|
* Iterate over all blocks in the store.
|
122
124
|
*
|
123
|
-
* @yields {
|
124
|
-
* @returns {AsyncGenerator<
|
125
|
+
* @yields {{cid: string, bytes: Uint8Array}}
|
126
|
+
* @returns {AsyncGenerator<any, any, any>}
|
125
127
|
*/
|
126
|
-
|
127
|
-
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
|
132
|
-
|
133
|
-
|
134
|
-
|
128
|
+
async * entries () {
|
129
|
+
for (const transaction of this.inflightTransactions) {
|
130
|
+
for (const [str, bytes] of transaction) {
|
131
|
+
yield { cid: str, bytes }
|
132
|
+
}
|
133
|
+
}
|
134
|
+
for (const [str, bytes] of this.committedBlocks) {
|
135
|
+
yield { cid: str, bytes }
|
136
|
+
}
|
137
|
+
if (this.valet) {
|
138
|
+
for await (const { cid } of this.valet.cids()) {
|
139
|
+
yield { cid }
|
140
|
+
}
|
141
|
+
}
|
142
|
+
}
|
135
143
|
|
136
144
|
/**
|
137
145
|
* Begin a transaction. Ensures the uncommited blocks are empty at the begining.
|
@@ -150,8 +158,13 @@ export class TransactionBlockstore {
|
|
150
158
|
* @returns {Promise<void>}
|
151
159
|
* @memberof TransactionBlockstore
|
152
160
|
*/
|
153
|
-
async commit (innerBlockstore) {
|
161
|
+
async commit (innerBlockstore, doSync = true) {
|
162
|
+
// console.log('commit', doSync, innerBlockstore.label)
|
154
163
|
await this.doCommit(innerBlockstore)
|
164
|
+
if (doSync) {
|
165
|
+
// const all =
|
166
|
+
await Promise.all([...this.syncs].map(async sync => sync.sendUpdate(innerBlockstore)))
|
167
|
+
}
|
155
168
|
}
|
156
169
|
|
157
170
|
// first get the transaction blockstore from the map of transaction blockstores
|
@@ -170,8 +183,8 @@ export class TransactionBlockstore {
|
|
170
183
|
cids.add(stringCid)
|
171
184
|
}
|
172
185
|
}
|
186
|
+
// console.log(innerBlockstore.label, 'committing', cids.size, 'blocks', [...cids].map(cid => cid.toString()), this.valet)
|
173
187
|
if (cids.size > 0 && this.valet) {
|
174
|
-
// console.log(innerBlockstore.label, 'committing', cids.size, 'blocks')
|
175
188
|
await this.valet.writeTransaction(innerBlockstore, cids)
|
176
189
|
}
|
177
190
|
}
|
@@ -195,7 +208,7 @@ export class TransactionBlockstore {
|
|
195
208
|
* @returns {Promise<any>}
|
196
209
|
* @memberof TransactionBlockstore
|
197
210
|
*/
|
198
|
-
export const doTransaction = async (label, blockstore, doFun) => {
|
211
|
+
export const doTransaction = async (label, blockstore, doFun, doSync = true) => {
|
199
212
|
// @ts-ignore
|
200
213
|
if (!blockstore.commit) return await doFun(blockstore)
|
201
214
|
// @ts-ignore
|
@@ -203,7 +216,7 @@ export const doTransaction = async (label, blockstore, doFun) => {
|
|
203
216
|
try {
|
204
217
|
const result = await doFun(innerBlockstore)
|
205
218
|
// @ts-ignore
|
206
|
-
await blockstore.commit(innerBlockstore)
|
219
|
+
await blockstore.commit(innerBlockstore, doSync)
|
207
220
|
return result
|
208
221
|
} catch (e) {
|
209
222
|
console.error(`Transaction ${label} failed`, e, e.stack)
|
package/src/clock.js
CHANGED
@@ -232,7 +232,7 @@ export async function findEventsToSync (blocks, head) {
|
|
232
232
|
const toSync = await asyncFilter(sorted, async (uks) => !(await contains(events, ancestor, uks.cid)))
|
233
233
|
// console.timeEnd(callTag + '.contains')
|
234
234
|
|
235
|
-
return { cids: events
|
235
|
+
return { cids: events, events: toSync }
|
236
236
|
}
|
237
237
|
|
238
238
|
const asyncFilter = async (arr, predicate) =>
|
package/src/database.js
CHANGED
@@ -144,6 +144,7 @@ export class Database {
|
|
144
144
|
|
145
145
|
async allCIDs () {
|
146
146
|
const allResp = await getAll(this.blocks, this.clock)
|
147
|
+
// console.log('allcids', allResp.cids, allResp.clockCIDs)
|
147
148
|
const cids = await cidsToProof(allResp.cids)
|
148
149
|
const clockCids = await cidsToProof(allResp.clockCIDs)
|
149
150
|
// console.log('allcids', cids, clockCids)
|
@@ -151,6 +152,14 @@ export class Database {
|
|
151
152
|
return [...cids, ...clockCids] // need a single block version of clock head, maybe an encoded block for it
|
152
153
|
}
|
153
154
|
|
155
|
+
async allStoredCIDs () {
|
156
|
+
const allCIDs = []
|
157
|
+
for await (const { cid } of this.blocks.entries()) {
|
158
|
+
allCIDs.push(cid)
|
159
|
+
}
|
160
|
+
return allCIDs
|
161
|
+
}
|
162
|
+
|
154
163
|
/**
|
155
164
|
* Runs validation on the specified document using the Fireproof instance's configuration. Throws an error if the document is invalid.
|
156
165
|
*
|
@@ -281,8 +290,15 @@ export class Database {
|
|
281
290
|
|
282
291
|
applyClock (prevClock, newClock) {
|
283
292
|
// console.log('applyClock', prevClock, newClock, this.clock)
|
284
|
-
const
|
285
|
-
this.clock
|
293
|
+
const stPrev = prevClock.map(cid => cid.toString())
|
294
|
+
const keptPrevClock = this.clock.filter(cid => stPrev.indexOf(cid.toString()) === -1)
|
295
|
+
const merged = keptPrevClock.concat(newClock)
|
296
|
+
const uniquebyCid = new Map()
|
297
|
+
for (const cid of merged) {
|
298
|
+
uniquebyCid.set(cid.toString(), cid)
|
299
|
+
}
|
300
|
+
this.clock = Array.from(uniquebyCid.values())
|
301
|
+
// console.log('afterClock', this.clock)
|
286
302
|
}
|
287
303
|
|
288
304
|
// /**
|
@@ -341,7 +357,9 @@ export class Database {
|
|
341
357
|
}
|
342
358
|
|
343
359
|
export async function cidsToProof (cids) {
|
344
|
-
if (!cids
|
360
|
+
if (!cids) return []
|
361
|
+
if (!cids.all) { return [...cids] }
|
362
|
+
|
345
363
|
const all = await cids.all()
|
346
364
|
return [...all].map(cid => cid.toString())
|
347
365
|
}
|
package/src/fireproof.js
CHANGED
@@ -1,13 +1,12 @@
|
|
1
1
|
import randomBytes from 'randombytes'
|
2
|
-
|
3
2
|
import { Database, parseCID } from './database.js'
|
4
3
|
import { Listener } from './listener.js'
|
5
4
|
import { DbIndex as Index } from './db-index.js'
|
6
5
|
import { TransactionBlockstore } from './blockstore.js'
|
7
6
|
import { localGet } from './utils.js'
|
8
|
-
import {
|
7
|
+
import { Sync } from './sync.js'
|
9
8
|
|
10
|
-
export { Index, Listener, Database }
|
9
|
+
export { Index, Listener, Database, Sync }
|
11
10
|
|
12
11
|
export class Fireproof {
|
13
12
|
/**
|
@@ -85,41 +84,4 @@ export class Fireproof {
|
|
85
84
|
await database.notifyReset() // hmm... indexes should listen to this? might be more complex than worth it. so far this is the only caller
|
86
85
|
return database
|
87
86
|
}
|
88
|
-
|
89
|
-
// get all the cids
|
90
|
-
// tell valet to make a file
|
91
|
-
static async makeCar (database, key) {
|
92
|
-
const allCIDs = await database.allCIDs()
|
93
|
-
const blocks = database.blocks
|
94
|
-
|
95
|
-
const rootCid = parseCID(allCIDs[allCIDs.length - 1])
|
96
|
-
if (typeof key === 'undefined') {
|
97
|
-
key = blocks.valet?.getKeyMaterial()
|
98
|
-
}
|
99
|
-
if (key) {
|
100
|
-
return blocksToEncryptedCarBlock(
|
101
|
-
rootCid,
|
102
|
-
{
|
103
|
-
entries: () => allCIDs.map(cid => ({ cid })),
|
104
|
-
get: async cid => await blocks.get(cid)
|
105
|
-
},
|
106
|
-
key
|
107
|
-
)
|
108
|
-
} else {
|
109
|
-
const carBlocks = await Promise.all(
|
110
|
-
allCIDs.map(async c => {
|
111
|
-
const b = await blocks.get(c)
|
112
|
-
// console.log('block', b)
|
113
|
-
if (typeof b.cid === 'string') {
|
114
|
-
b.cid = parseCID(b.cid)
|
115
|
-
}
|
116
|
-
// if (b.bytes.constructor.name === 'Buffer') console.log('conver vbuff')
|
117
|
-
return b
|
118
|
-
})
|
119
|
-
)
|
120
|
-
return blocksToCarBlock(rootCid, {
|
121
|
-
entries: () => carBlocks
|
122
|
-
})
|
123
|
-
}
|
124
|
-
}
|
125
87
|
}
|
package/src/listener.js
CHANGED
@@ -41,7 +41,7 @@ export class Listener {
|
|
41
41
|
* @returns {Function} A function to unsubscribe from the topic.
|
42
42
|
* @memberof Listener
|
43
43
|
* @instance
|
44
|
-
* @param {any} [since] - clock to flush from on launch
|
44
|
+
* @param {any} [since] - clock to flush from on launch, pass null for all
|
45
45
|
*/
|
46
46
|
on (topic, subscriber, since = undefined) {
|
47
47
|
const listOfTopicSubscribers = getTopicList(this.subcribers, topic)
|
package/src/prolly.js
CHANGED
@@ -157,9 +157,9 @@ const doProllyBulk = async (inBlocks, head, event) => {
|
|
157
157
|
const { getBlock, blocks } = makeGetAndPutBlock(inBlocks)
|
158
158
|
let bulkSorted = []
|
159
159
|
let prollyRootNode = null
|
160
|
+
const events = new EventFetcher(blocks)
|
160
161
|
if (head.length) {
|
161
162
|
// Otherwise, we find the common ancestor and update the root and other blocks
|
162
|
-
const events = new EventFetcher(blocks)
|
163
163
|
// todo this is returning more events than necessary, lets define the desired semantics from the top down
|
164
164
|
// good semantics mean we can cache the results of this call
|
165
165
|
const { ancestor, sorted } = await findCommonAncestorWithSortedEvents(events, head)
|
@@ -177,15 +177,17 @@ const doProllyBulk = async (inBlocks, head, event) => {
|
|
177
177
|
const newBlocks = []
|
178
178
|
// if all operations are deletes, we can just return an empty root
|
179
179
|
if (bulkOperations.every((op) => op.del)) {
|
180
|
-
return { root: null, blocks: [] }
|
180
|
+
return { root: null, blocks: [], clockCIDs: await events.all() }
|
181
181
|
}
|
182
182
|
for await (const node of create({ get: getBlock, list: bulkOperations, ...blockOpts })) {
|
183
183
|
root = await node.block
|
184
184
|
newBlocks.push(root)
|
185
185
|
}
|
186
|
-
return { root, blocks: newBlocks }
|
186
|
+
return { root, blocks: newBlocks, clockCIDs: await events.all() }
|
187
187
|
} else {
|
188
|
-
|
188
|
+
const writeResp = await prollyRootNode.bulk(bulkOperations) // { root: newProllyRootNode, blocks: newBlocks }
|
189
|
+
writeResp.clockCIDs = await events.all()
|
190
|
+
return writeResp
|
189
191
|
}
|
190
192
|
}
|
191
193
|
|
@@ -250,15 +252,15 @@ export async function root (inBlocks, head) {
|
|
250
252
|
if (!head.length) {
|
251
253
|
throw new Error('no head')
|
252
254
|
}
|
253
|
-
const { root: newProllyRootNode, blocks: newBlocks,
|
255
|
+
const { root: newProllyRootNode, blocks: newBlocks, clockCIDs } = await doProllyBulk(inBlocks, head)
|
254
256
|
// todo maybe these should go to a temp blockstore?
|
255
257
|
await doTransaction('root', inBlocks, async (transactionBlocks) => {
|
256
258
|
const { bigPut } = makeGetAndPutBlock(transactionBlocks)
|
257
259
|
for (const nb of newBlocks) {
|
258
260
|
bigPut(nb)
|
259
261
|
}
|
260
|
-
})
|
261
|
-
return {
|
262
|
+
}, false)
|
263
|
+
return { clockCIDs, node: newProllyRootNode }
|
262
264
|
}
|
263
265
|
|
264
266
|
/**
|
@@ -293,7 +295,8 @@ export async function getAll (blocks, head) {
|
|
293
295
|
if (!head.length) {
|
294
296
|
return { clockCIDs: new CIDCounter(), cids: new CIDCounter(), result: [] }
|
295
297
|
}
|
296
|
-
const { node: prollyRootNode,
|
298
|
+
const { node: prollyRootNode, clockCIDs } = await root(blocks, head)
|
299
|
+
|
297
300
|
if (!prollyRootNode) {
|
298
301
|
return { clockCIDs, cids: new CIDCounter(), result: [] }
|
299
302
|
}
|
package/src/sync.js
ADDED
@@ -0,0 +1,194 @@
|
|
1
|
+
import SimplePeer from 'simple-peer'
|
2
|
+
import { parseCID } from './database.js'
|
3
|
+
import { blocksToCarBlock, blocksToEncryptedCarBlock } from './valet.js'
|
4
|
+
import { CarReader } from '@ipld/car'
|
5
|
+
|
6
|
+
/**
|
7
|
+
* @typedef {import('./database.js').Database} Database
|
8
|
+
*/
|
9
|
+
export class Sync {
|
10
|
+
/**
|
11
|
+
* @param {Database} database
|
12
|
+
* @param {typeof SimplePeer} [PeerClass]
|
13
|
+
* @memberof Sync
|
14
|
+
* @static
|
15
|
+
*/
|
16
|
+
constructor (database, PeerClass = SimplePeer) {
|
17
|
+
this.database = database
|
18
|
+
this.database.blocks.syncs.add(this)
|
19
|
+
this.PeerClass = PeerClass
|
20
|
+
this.pushBacklog = new Promise((resolve, reject) => {
|
21
|
+
this.pushBacklogResolve = resolve
|
22
|
+
this.pushBacklogReject = reject
|
23
|
+
})
|
24
|
+
// this.pushBacklog.then(() => {
|
25
|
+
// // console.log('sync backlog resolved')
|
26
|
+
// this.database.notifyReset()
|
27
|
+
// })
|
28
|
+
}
|
29
|
+
|
30
|
+
async offer () {
|
31
|
+
return this.setupPeer(true)
|
32
|
+
}
|
33
|
+
|
34
|
+
async accept (base64offer) {
|
35
|
+
const offer = JSON.parse(atob(base64offer))
|
36
|
+
const p = this.setupPeer(false)
|
37
|
+
this.peer.signal(offer)
|
38
|
+
return p
|
39
|
+
}
|
40
|
+
|
41
|
+
connect (base64accept) {
|
42
|
+
const accept = JSON.parse(atob(base64accept))
|
43
|
+
this.peer.signal(accept)
|
44
|
+
}
|
45
|
+
|
46
|
+
async setupPeer (initiator = false) {
|
47
|
+
this.peer = new this.PeerClass({
|
48
|
+
initiator,
|
49
|
+
trickle: false
|
50
|
+
})
|
51
|
+
this.peer.on('connect', () => this.startSync())
|
52
|
+
this.peer.on('data', data => this.gotData(data))
|
53
|
+
const p = new Promise((resolve, reject) => {
|
54
|
+
this.peer.on('signal', resolve)
|
55
|
+
this.peer.on('error', reject)
|
56
|
+
})
|
57
|
+
return p.then(signal => btoa(JSON.stringify(signal)))
|
58
|
+
}
|
59
|
+
|
60
|
+
async backlog () {
|
61
|
+
return this.pushBacklog
|
62
|
+
}
|
63
|
+
|
64
|
+
async gotData (data) {
|
65
|
+
// console.log('got data', data.toString())
|
66
|
+
try {
|
67
|
+
const reader = await CarReader.fromBytes(data)
|
68
|
+
const blz = new Set()
|
69
|
+
for await (const block of reader.blocks()) {
|
70
|
+
blz.add(block)
|
71
|
+
}
|
72
|
+
const roots = await reader.getRoots()
|
73
|
+
// console.log(
|
74
|
+
// 'got car',
|
75
|
+
// roots.map(c => c.toString()),
|
76
|
+
// this.database.clock.map(c => c.toString())
|
77
|
+
// )
|
78
|
+
// console.log(
|
79
|
+
// 'got blocks',
|
80
|
+
// [...blz].map(({ cid }) => cid.toString())
|
81
|
+
// )
|
82
|
+
// @ts-ignore
|
83
|
+
reader.entries = reader.blocks
|
84
|
+
await this.database.blocks.commit({
|
85
|
+
label: 'sync',
|
86
|
+
entries: () => [...blz],
|
87
|
+
get: async cid => await reader.get(cid),
|
88
|
+
lastCid: [...blz][0].cid // doesn't matter
|
89
|
+
}, false)
|
90
|
+
this.database.applyClock([], roots)
|
91
|
+
this.database.notifyReset()
|
92
|
+
// console.log('after', this.database.clockToJSON())
|
93
|
+
this.pushBacklogResolve({ ok: true })
|
94
|
+
} catch (e) {
|
95
|
+
// console.error(e)
|
96
|
+
// if e.message matche 'CBOR' we can ignore it
|
97
|
+
if (!e.message.match(/CBOR|fromBytes/)) {
|
98
|
+
throw e
|
99
|
+
}
|
100
|
+
|
101
|
+
// data is a json string, parse it
|
102
|
+
const message = JSON.parse(data.toString())
|
103
|
+
// console.log('got message', message)
|
104
|
+
if (message.ok) {
|
105
|
+
this.pushBacklogResolve({ ok: true })
|
106
|
+
} else if (message.clock) {
|
107
|
+
const reqCidDiff = message
|
108
|
+
// this might be a CID diff
|
109
|
+
// console.log('got diff', reqCidDiff)
|
110
|
+
const carBlock = await Sync.makeCar(this.database, null, reqCidDiff.cids)
|
111
|
+
if (!carBlock) {
|
112
|
+
// we are full synced
|
113
|
+
// console.log('we are full synced')
|
114
|
+
this.peer.send(JSON.stringify({ ok: true }))
|
115
|
+
// this.pushBacklogResolve({ ok: true })
|
116
|
+
} else {
|
117
|
+
// console.log('do send', carBlock.bytes.length)
|
118
|
+
this.peer.send(carBlock.bytes)
|
119
|
+
// this.pushBacklogResolve({ ok: true })
|
120
|
+
}
|
121
|
+
}
|
122
|
+
}
|
123
|
+
}
|
124
|
+
|
125
|
+
async sendUpdate (blockstore) {
|
126
|
+
console.log('send update from', this.database.instanceId)
|
127
|
+
// todo should send updates since last sync
|
128
|
+
const newCar = await blocksToCarBlock(blockstore.lastCid, blockstore)
|
129
|
+
this.peer.send(newCar.bytes)
|
130
|
+
}
|
131
|
+
|
132
|
+
async startSync () {
|
133
|
+
// console.log('start sync', this.peer.initiator)
|
134
|
+
const allCIDs = await this.database.allStoredCIDs()
|
135
|
+
// console.log('allCIDs', allCIDs)
|
136
|
+
const reqCidDiff = {
|
137
|
+
clock: this.database.clockToJSON(),
|
138
|
+
cids: allCIDs.map(cid => cid.toString())
|
139
|
+
}
|
140
|
+
// console.log('send diff', reqCidDiff)
|
141
|
+
this.peer.send(JSON.stringify(reqCidDiff))
|
142
|
+
}
|
143
|
+
|
144
|
+
// get all the cids
|
145
|
+
// tell valet to make a file
|
146
|
+
/**
|
147
|
+
* @param {import("./database.js").Database} database
|
148
|
+
* @param {string} key
|
149
|
+
*/
|
150
|
+
static async makeCar (database, key, skip = []) {
|
151
|
+
const allCIDs = await database.allCIDs()
|
152
|
+
const blocks = database.blocks
|
153
|
+
const rootCIDs = database.clock
|
154
|
+
|
155
|
+
const syncCIDs = [...new Set([...rootCIDs, ...allCIDs])].filter(cid => !skip.includes(cid.toString()))
|
156
|
+
// console.log(
|
157
|
+
// 'makeCar',
|
158
|
+
// rootCIDs.map(c => c.toString()),
|
159
|
+
// syncCIDs.map(c => c.toString()),
|
160
|
+
// allCIDs.map(c => c.toString())
|
161
|
+
// )
|
162
|
+
if (syncCIDs.length === 0) {
|
163
|
+
return null
|
164
|
+
}
|
165
|
+
|
166
|
+
if (typeof key === 'undefined') {
|
167
|
+
key = blocks.valet?.getKeyMaterial()
|
168
|
+
}
|
169
|
+
if (key) {
|
170
|
+
return blocksToEncryptedCarBlock(
|
171
|
+
rootCIDs,
|
172
|
+
{
|
173
|
+
entries: () => syncCIDs.map(cid => ({ cid })),
|
174
|
+
get: async cid => await blocks.get(cid)
|
175
|
+
},
|
176
|
+
key
|
177
|
+
)
|
178
|
+
} else {
|
179
|
+
const carBlocks = await Promise.all(
|
180
|
+
syncCIDs.map(async c => {
|
181
|
+
const b = await blocks.get(c)
|
182
|
+
if (typeof b.cid === 'string') {
|
183
|
+
b.cid = parseCID(b.cid)
|
184
|
+
}
|
185
|
+
return b
|
186
|
+
})
|
187
|
+
)
|
188
|
+
// console.log('carblock')
|
189
|
+
return blocksToCarBlock(rootCIDs, {
|
190
|
+
entries: () => carBlocks
|
191
|
+
})
|
192
|
+
}
|
193
|
+
}
|
194
|
+
}
|
package/src/valet.js
CHANGED
@@ -100,12 +100,15 @@ export class Valet {
|
|
100
100
|
if (innerBlockstore.lastCid) {
|
101
101
|
if (this.keyMaterial) {
|
102
102
|
// console.log('encrypting car', innerBlockstore.label)
|
103
|
+
// should we pass cids in instead of iterating frin innerBlockstore?
|
103
104
|
const newCar = await blocksToEncryptedCarBlock(innerBlockstore.lastCid, innerBlockstore, this.keyMaterial)
|
104
105
|
await this.parkCar(newCar.cid.toString(), newCar.bytes, cids)
|
105
106
|
} else {
|
106
107
|
const newCar = await blocksToCarBlock(innerBlockstore.lastCid, innerBlockstore)
|
107
108
|
await this.parkCar(newCar.cid.toString(), newCar.bytes, cids)
|
108
109
|
}
|
110
|
+
} else {
|
111
|
+
throw new Error('missing lastCid for car header')
|
109
112
|
}
|
110
113
|
}
|
111
114
|
|
@@ -128,6 +131,23 @@ export class Valet {
|
|
128
131
|
return await dbWorkFun(this.idb)
|
129
132
|
}
|
130
133
|
|
134
|
+
/**
|
135
|
+
* Iterate over all blocks in the store.
|
136
|
+
*
|
137
|
+
* @yields {{cid: string, value: Uint8Array}}
|
138
|
+
* @returns {AsyncGenerator<any, any, any>}
|
139
|
+
*/
|
140
|
+
async * cids () {
|
141
|
+
// console.log('valet cids')
|
142
|
+
const db = await this.withDB(async db => db)
|
143
|
+
const tx = db.transaction(['cidToCar'], 'readonly')
|
144
|
+
let cursor = await tx.store.openCursor()
|
145
|
+
while (cursor) {
|
146
|
+
yield { cid: cursor.key, car: cursor.value.car }
|
147
|
+
cursor = await cursor.continue()
|
148
|
+
}
|
149
|
+
}
|
150
|
+
|
131
151
|
/**
|
132
152
|
*
|
133
153
|
* @param {string} carCid
|
@@ -140,7 +160,7 @@ export class Valet {
|
|
140
160
|
await tx.objectStore('cidToCar').put({ pending: 'y', car: carCid, cids: Array.from(cids) })
|
141
161
|
return await tx.done
|
142
162
|
})
|
143
|
-
|
163
|
+
// console.log('parked car', carCid, value.length, Array.from(cids))
|
144
164
|
// upload to web3.storage if we have credentials
|
145
165
|
if (this.uploadFunction) {
|
146
166
|
if (this.alreadyEnqueued.has(carCid)) {
|
@@ -200,9 +220,12 @@ export class Valet {
|
|
200
220
|
}
|
201
221
|
}
|
202
222
|
|
203
|
-
export const blocksToCarBlock = async (
|
223
|
+
export const blocksToCarBlock = async (rootCids, blocks) => {
|
204
224
|
let size = 0
|
205
|
-
|
225
|
+
if (!Array.isArray(rootCids)) {
|
226
|
+
rootCids = [rootCids]
|
227
|
+
}
|
228
|
+
const headerSize = CBW.headerLength({ roots: rootCids })
|
206
229
|
size += headerSize
|
207
230
|
if (!Array.isArray(blocks)) {
|
208
231
|
blocks = Array.from(blocks.entries())
|
@@ -214,7 +237,9 @@ export const blocksToCarBlock = async (lastCid, blocks) => {
|
|
214
237
|
const buffer = new Uint8Array(size)
|
215
238
|
const writer = await CBW.createWriter(buffer, { headerSize })
|
216
239
|
|
217
|
-
|
240
|
+
for (const cid of rootCids) {
|
241
|
+
writer.addRoot(cid)
|
242
|
+
}
|
218
243
|
|
219
244
|
for (const { cid, bytes } of blocks) {
|
220
245
|
writer.write({ cid, bytes })
|
@@ -230,7 +255,8 @@ export const blocksToEncryptedCarBlock = async (innerBlockStoreClockRootCid, blo
|
|
230
255
|
for (const { cid } of blocks.entries()) {
|
231
256
|
theCids.push(cid.toString())
|
232
257
|
}
|
233
|
-
|
258
|
+
// console.log('encrypting', theCids.length, 'blocks', theCids.includes(innerBlockStoreClockRootCid.toString()))
|
259
|
+
// console.log('cids', theCids, innerBlockStoreClockRootCid.toString())
|
234
260
|
let last
|
235
261
|
for await (const block of encrypt({
|
236
262
|
cids: theCids,
|