@fireproof/core 0.8.0 → 0.10.1-dev
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +5 -184
- package/dist/fireproof.browser.js +18879 -0
- package/dist/fireproof.browser.js.map +7 -0
- package/dist/fireproof.cjs.js +9305 -0
- package/dist/fireproof.cjs.js.map +7 -0
- package/dist/fireproof.esm.js +9295 -0
- package/dist/fireproof.esm.js.map +7 -0
- package/package.json +57 -105
- package/dist/blockstore.js +0 -268
- package/dist/clock.js +0 -459
- package/dist/crypto.js +0 -63
- package/dist/database.js +0 -434
- package/dist/db-index.js +0 -403
- package/dist/encrypted-block.js +0 -48
- package/dist/fireproof.js +0 -84
- package/dist/import.js +0 -29
- package/dist/listener.js +0 -111
- package/dist/loader.js +0 -13
- package/dist/prolly.js +0 -405
- package/dist/remote.js +0 -102
- package/dist/sha1.js +0 -74
- package/dist/src/fireproof.d.ts +0 -472
- package/dist/src/fireproof.js +0 -81191
- package/dist/src/fireproof.js.map +0 -1
- package/dist/src/fireproof.mjs +0 -81186
- package/dist/src/fireproof.mjs.map +0 -1
- package/dist/storage/base.js +0 -426
- package/dist/storage/blocksToEncryptedCarBlock.js +0 -144
- package/dist/storage/browser.js +0 -62
- package/dist/storage/filesystem.js +0 -67
- package/dist/storage/rest.js +0 -57
- package/dist/storage/ucan.js +0 -0
- package/dist/storage/utils.js +0 -144
- package/dist/sync.js +0 -218
- package/dist/utils.js +0 -16
- package/dist/valet.js +0 -102
- package/src/blockstore.js +0 -283
- package/src/clock.js +0 -486
- package/src/crypto.js +0 -70
- package/src/database.js +0 -469
- package/src/db-index.js +0 -426
- package/src/encrypted-block.js +0 -57
- package/src/fireproof.js +0 -98
- package/src/import.js +0 -34
- package/src/link.d.ts +0 -3
- package/src/loader.js +0 -16
- package/src/prolly.js +0 -445
- package/src/remote.js +0 -113
- package/src/sha1.js +0 -83
- package/src/storage/base.js +0 -463
- package/src/storage/browser.js +0 -67
- package/src/storage/filesystem.js +0 -73
- package/src/storage/rest.js +0 -59
- package/src/storage/ucan.js +0 -0
- package/src/storage/utils.js +0 -152
- package/src/sync.js +0 -237
- package/src/valet.js +0 -105
package/src/db-index.js
DELETED
@@ -1,426 +0,0 @@
|
|
1
|
-
// @ts-ignore
|
2
|
-
import { create, load } from 'prolly-trees/db-index'
|
3
|
-
// import { create, load } from '../../../../prolly-trees/src/db-index.js'
|
4
|
-
|
5
|
-
import { sha256 as hasher } from 'multiformats/hashes/sha2'
|
6
|
-
// @ts-ignore
|
7
|
-
import { nocache as cache } from 'prolly-trees/cache'
|
8
|
-
// @ts-ignore
|
9
|
-
import { bf, simpleCompare } from 'prolly-trees/utils'
|
10
|
-
import { makeGetBlock, visMerkleTree } from './prolly.js'
|
11
|
-
// eslint-disable-next-line no-unused-vars
|
12
|
-
import { Database, cidsToProof } from './database.js'
|
13
|
-
|
14
|
-
import * as codec from '@ipld/dag-cbor'
|
15
|
-
// import { create as createBlock } from 'multiformats/block'
|
16
|
-
import { doTransaction } from './blockstore.js'
|
17
|
-
// @ts-ignore
|
18
|
-
import charwise from 'charwise'
|
19
|
-
|
20
|
-
const ALWAYS_REBUILD = false // todo: remove
|
21
|
-
|
22
|
-
const compare = (a, b) => {
|
23
|
-
const [aKey, aRef] = a
|
24
|
-
const [bKey, bRef] = b
|
25
|
-
const comp = simpleCompare(aKey, bKey)
|
26
|
-
if (comp !== 0) return comp
|
27
|
-
return refCompare(aRef, bRef)
|
28
|
-
}
|
29
|
-
|
30
|
-
const refCompare = (aRef, bRef) => {
|
31
|
-
if (Number.isNaN(aRef)) return -1
|
32
|
-
if (Number.isNaN(bRef)) throw new Error('ref may not be Infinity or NaN')
|
33
|
-
if (aRef === Infinity) return 1 // need to test this on equal docids!
|
34
|
-
// if (!Number.isFinite(bRef)) throw new Error('ref may not be Infinity or NaN')
|
35
|
-
return simpleCompare(aRef, bRef)
|
36
|
-
}
|
37
|
-
|
38
|
-
const dbIndexOpts = { cache, chunker: bf(30), codec, hasher, compare }
|
39
|
-
const idIndexOpts = { cache, chunker: bf(30), codec, hasher, compare: simpleCompare }
|
40
|
-
|
41
|
-
const makeDoc = ({ key, value }) => ({ _id: key, ...value })
|
42
|
-
|
43
|
-
/**
|
44
|
-
* JDoc for the result row type.
|
45
|
-
* @typedef {Object} ChangeEvent
|
46
|
-
* @property {string} key - The key of the document.
|
47
|
-
* @property {Object} value - The new value of the document.
|
48
|
-
* @property {boolean} [del] - Is the row deleted?
|
49
|
-
* @memberof DbIndex
|
50
|
-
*/
|
51
|
-
|
52
|
-
/**
|
53
|
-
* JDoc for the result row type.
|
54
|
-
* @typedef {Object} DbIndexEntry
|
55
|
-
* @property {string[]} key - The key for the DbIndex entry.
|
56
|
-
* @property {Object} value - The value of the document.
|
57
|
-
* @property {boolean} [del] - Is the row deleted?
|
58
|
-
* @memberof DbIndex
|
59
|
-
*/
|
60
|
-
|
61
|
-
/**
|
62
|
-
* Transforms a set of changes to DbIndex entries using a map function.
|
63
|
-
*
|
64
|
-
* @param {ChangeEvent[]} changes
|
65
|
-
* @param {Function} mapFn
|
66
|
-
* @returns {DbIndexEntry[]} The DbIndex entries generated by the map function.
|
67
|
-
* @private
|
68
|
-
* @memberof DbIndex
|
69
|
-
*/
|
70
|
-
const indexEntriesForChanges = (changes, mapFn) => {
|
71
|
-
const indexEntries = []
|
72
|
-
changes.forEach(({ key: _id, value, del }) => {
|
73
|
-
// key is _id, value is the document
|
74
|
-
if (del || !value) return
|
75
|
-
let mapCalled = false
|
76
|
-
const mapReturn = mapFn(makeDoc({ key: _id, value }), (k, v) => {
|
77
|
-
mapCalled = true
|
78
|
-
if (typeof k === 'undefined') return
|
79
|
-
indexEntries.push({
|
80
|
-
key: [charwise.encode(k), _id],
|
81
|
-
value: v || null
|
82
|
-
})
|
83
|
-
})
|
84
|
-
if (!mapCalled && mapReturn) {
|
85
|
-
indexEntries.push({
|
86
|
-
key: [charwise.encode(mapReturn), _id],
|
87
|
-
value: null
|
88
|
-
})
|
89
|
-
}
|
90
|
-
})
|
91
|
-
return indexEntries
|
92
|
-
}
|
93
|
-
|
94
|
-
/**
|
95
|
-
* Represents an DbIndex for a Fireproof database.
|
96
|
-
*
|
97
|
-
* @class DbIndex
|
98
|
-
* @classdesc An DbIndex can be used to order and filter the documents in a Fireproof database.
|
99
|
-
*
|
100
|
-
* @param {Database} database - The Fireproof database instance to DbIndex.
|
101
|
-
* @param {Function} mapFn - The map function to apply to each entry in the database.
|
102
|
-
*
|
103
|
-
*/
|
104
|
-
export class DbIndex {
|
105
|
-
/**
|
106
|
-
* @param {Database} database
|
107
|
-
*/
|
108
|
-
constructor (database, name, mapFn, clock = null, opts = {}) {
|
109
|
-
this.database = database
|
110
|
-
if (typeof name === 'function') {
|
111
|
-
// app is using deprecated API, remove in 0.7
|
112
|
-
opts = clock || {}
|
113
|
-
clock = mapFn || null
|
114
|
-
mapFn = name
|
115
|
-
name = null
|
116
|
-
}
|
117
|
-
this.applyMapFn(mapFn, name)
|
118
|
-
|
119
|
-
this.indexById = { root: null, cid: null }
|
120
|
-
this.indexByKey = { root: null, cid: null }
|
121
|
-
this.dbHead = null
|
122
|
-
if (clock) {
|
123
|
-
this.indexById.cid = clock.byId
|
124
|
-
this.indexByKey.cid = clock.byKey
|
125
|
-
this.dbHead = clock.db
|
126
|
-
}
|
127
|
-
this.instanceId = this.database.instanceId + `.DbIndex.${Math.random().toString(36).substring(2, 7)}`
|
128
|
-
this.updateIndexPromise = null
|
129
|
-
if (!opts.temporary) {
|
130
|
-
DbIndex.registerWithDatabase(this, this.database)
|
131
|
-
}
|
132
|
-
}
|
133
|
-
|
134
|
-
applyMapFn (mapFn, name) {
|
135
|
-
if (typeof mapFn === 'string') {
|
136
|
-
this.mapFnString = mapFn
|
137
|
-
// make a regex that matches strings that only have letters, numbers, and spaces
|
138
|
-
const regex = /^[a-zA-Z0-9 ]+$/
|
139
|
-
// if the string matches the regex, make a function that returns the value at that key
|
140
|
-
if (regex.test(mapFn)) {
|
141
|
-
this.mapFn = (doc, emit) => {
|
142
|
-
if (doc[mapFn]) emit(doc[mapFn])
|
143
|
-
}
|
144
|
-
this.includeDocsDefault = true
|
145
|
-
}
|
146
|
-
} else {
|
147
|
-
this.mapFn = mapFn
|
148
|
-
this.mapFnString = mapFn.toString()
|
149
|
-
}
|
150
|
-
const matches = /=>\s*(.*)/.exec(this.mapFnString)
|
151
|
-
this.includeDocsDefault = this.includeDocsDefault || (matches && matches.length > 0)
|
152
|
-
this.name = name || this.makeName()
|
153
|
-
}
|
154
|
-
|
155
|
-
makeName () {
|
156
|
-
const regex = /\(([^,()]+,\s*[^,()]+|\[[^\]]+\],\s*[^,()]+)\)/g
|
157
|
-
let matches = Array.from(this.mapFnString.matchAll(regex), match => match[1].trim())
|
158
|
-
if (matches.length === 0) {
|
159
|
-
matches = /=>\s*(.*)/.exec(this.mapFnString)
|
160
|
-
}
|
161
|
-
if (matches === null) {
|
162
|
-
return this.mapFnString
|
163
|
-
} else {
|
164
|
-
// it's a consise arrow function, match everythign after the arrow
|
165
|
-
return matches[1]
|
166
|
-
}
|
167
|
-
}
|
168
|
-
|
169
|
-
static registerWithDatabase (inIndex, database) {
|
170
|
-
if (!database.indexes.has(inIndex.mapFnString)) {
|
171
|
-
database.indexes.set(inIndex.mapFnString, inIndex)
|
172
|
-
} else {
|
173
|
-
// merge our inIndex code with the inIndex clock or vice versa
|
174
|
-
const existingIndex = database.indexes.get(inIndex.mapFnString)
|
175
|
-
// keep the code instance, discard the clock instance
|
176
|
-
if (existingIndex.mapFn) {
|
177
|
-
// this one also has other config
|
178
|
-
existingIndex.dbHead = inIndex.dbHead
|
179
|
-
existingIndex.indexById.cid = inIndex.indexById.cid
|
180
|
-
existingIndex.indexByKey.cid = inIndex.indexByKey.cid
|
181
|
-
} else {
|
182
|
-
inIndex.dbHead = existingIndex.dbHead
|
183
|
-
inIndex.indexById.cid = existingIndex.indexById.cid
|
184
|
-
inIndex.indexByKey.cid = existingIndex.indexByKey.cid
|
185
|
-
database.indexes.set(inIndex.mapFnString, inIndex)
|
186
|
-
}
|
187
|
-
}
|
188
|
-
}
|
189
|
-
|
190
|
-
toJSON () {
|
191
|
-
const indexJson = { name: this.name, code: this.mapFnString, clock: { db: null, byId: null, byKey: null } }
|
192
|
-
indexJson.clock.db = this.dbHead?.map(cid => cid.toString())
|
193
|
-
indexJson.clock.byId = this.indexById.cid?.toString()
|
194
|
-
indexJson.clock.byKey = this.indexByKey.cid?.toString()
|
195
|
-
return indexJson
|
196
|
-
}
|
197
|
-
|
198
|
-
static fromJSON (database, { code, clock, name }) {
|
199
|
-
// console.log('DbIndex.fromJSON', database.constructor.name, code, clock)
|
200
|
-
return new DbIndex(database, name, code, clock)
|
201
|
-
}
|
202
|
-
|
203
|
-
async visKeyTree () {
|
204
|
-
return await visMerkleTree(this.database.indexBlocks, this.indexById.cid)
|
205
|
-
}
|
206
|
-
|
207
|
-
async visIdTree () {
|
208
|
-
return await visMerkleTree(this.database.indexBlocks, this.indexByKey.cid)
|
209
|
-
}
|
210
|
-
|
211
|
-
/**
|
212
|
-
* JSDoc for Query type.
|
213
|
-
* @typedef {Object} DbQuery
|
214
|
-
* @property {string[]} [range] - The range to query.
|
215
|
-
* @memberof DbIndex
|
216
|
-
*/
|
217
|
-
|
218
|
-
/**
|
219
|
-
* Query object can have {range}
|
220
|
-
* @param {DbQuery} query - the query range to use
|
221
|
-
* @returns {Promise<{proof: {}, rows: Array<{id: string, key: string, value: any, doc?: any}>}>}
|
222
|
-
* @memberof DbIndex
|
223
|
-
* @instance
|
224
|
-
*/
|
225
|
-
async query (query = {}, update = true) {
|
226
|
-
// const callId = Math.random().toString(36).substring(2, 7)
|
227
|
-
// todo pass a root to query a snapshot
|
228
|
-
// console.time(callId + '.updateIndex')
|
229
|
-
update && (await this.updateIndex(this.database.indexBlocks))
|
230
|
-
// console.timeEnd(callId + '.updateIndex')
|
231
|
-
// console.time(callId + '.doIndexQuery')
|
232
|
-
// console.log('query', query)
|
233
|
-
const response = await this.doIndexQuery(query)
|
234
|
-
// console.timeEnd(callId + '.doIndexQuery')
|
235
|
-
return {
|
236
|
-
proof: { index: await cidsToProof(response.cids) },
|
237
|
-
rows: response.result.map(({ id, key, row, doc }) => {
|
238
|
-
return { id, key: charwise.decode(key), value: row, doc }
|
239
|
-
})
|
240
|
-
}
|
241
|
-
}
|
242
|
-
|
243
|
-
/**
|
244
|
-
*
|
245
|
-
* @param {any} resp
|
246
|
-
* @param {any} query
|
247
|
-
* @returns
|
248
|
-
*/
|
249
|
-
async applyQuery (resp, query) {
|
250
|
-
// console.log('applyQuery', resp, query)
|
251
|
-
if (query.descending) {
|
252
|
-
resp.result = resp.result.reverse()
|
253
|
-
}
|
254
|
-
if (query.limit) {
|
255
|
-
resp.result = resp.result.slice(0, query.limit)
|
256
|
-
}
|
257
|
-
if (query.includeDocs) {
|
258
|
-
resp.result = await Promise.all(
|
259
|
-
resp.result.map(async row => {
|
260
|
-
const doc = await this.database.get(row.id)
|
261
|
-
return { ...row, doc }
|
262
|
-
})
|
263
|
-
)
|
264
|
-
}
|
265
|
-
return resp
|
266
|
-
}
|
267
|
-
|
268
|
-
async doIndexQuery (query = {}) {
|
269
|
-
await loadIndex(this.database.indexBlocks, this.indexByKey, dbIndexOpts)
|
270
|
-
if (!this.indexByKey.root) return { result: [] }
|
271
|
-
if (query.includeDocs === undefined) query.includeDocs = this.includeDocsDefault
|
272
|
-
if (query.prefix) {
|
273
|
-
// ensure prefix is an array
|
274
|
-
if (!Array.isArray(query.prefix)) query.prefix = [query.prefix]
|
275
|
-
const start = [...query.prefix, NaN]
|
276
|
-
const end = [...query.prefix, Infinity]
|
277
|
-
const prefixRange = [start, end].map(key => charwise.encode(key))
|
278
|
-
return await this.applyQuery(await this.indexByKey.root.range(...prefixRange), query)
|
279
|
-
} else if (query.range) {
|
280
|
-
const encodedRange = query.range.map(key => charwise.encode(key))
|
281
|
-
return await this.applyQuery(await this.indexByKey.root.range(...encodedRange), query)
|
282
|
-
} else if (query.key) {
|
283
|
-
const encodedKey = charwise.encode(query.key)
|
284
|
-
return await this.applyQuery(await this.indexByKey.root.get(encodedKey), query)
|
285
|
-
} else {
|
286
|
-
const { result, ...all } = await this.indexByKey.root.getAllEntries()
|
287
|
-
return await this.applyQuery(
|
288
|
-
{ result: result.map(({ key: [k, id], value }) => ({ key: k, id, row: value })), ...all },
|
289
|
-
query
|
290
|
-
)
|
291
|
-
}
|
292
|
-
}
|
293
|
-
|
294
|
-
/**
|
295
|
-
* Update the DbIndex with the latest changes
|
296
|
-
* @private
|
297
|
-
* @returns {Promise<void>}
|
298
|
-
*/
|
299
|
-
|
300
|
-
async updateIndex (blocks) {
|
301
|
-
// todo this could enqueue the request and give fresh ones to all second comers -- right now it gives out stale promises while working
|
302
|
-
// what would it do in a world where all indexes provide a database snapshot to query?
|
303
|
-
if (this.updateIndexPromise) {
|
304
|
-
return this.updateIndexPromise.then(() => {
|
305
|
-
this.updateIndexPromise = null
|
306
|
-
return this.updateIndex(blocks)
|
307
|
-
})
|
308
|
-
}
|
309
|
-
this.updateIndexPromise = this.innerUpdateIndex(blocks)
|
310
|
-
this.updateIndexPromise.finally(() => {
|
311
|
-
this.updateIndexPromise = null
|
312
|
-
})
|
313
|
-
return this.updateIndexPromise
|
314
|
-
}
|
315
|
-
|
316
|
-
async innerUpdateIndex (inBlocks) {
|
317
|
-
// const callTag = Math.random().toString(36).substring(4)
|
318
|
-
// console.log(`updateIndex ${callTag} >`, this.instanceId, this.dbHead?.toString(), this.indexByKey.cid?.toString(), this.indexById.cid?.toString())
|
319
|
-
// todo remove this hack in 0.7.0
|
320
|
-
if (ALWAYS_REBUILD) {
|
321
|
-
this.indexById = { root: null, cid: null }
|
322
|
-
this.indexByKey = { root: null, cid: null }
|
323
|
-
this.dbHead = null
|
324
|
-
}
|
325
|
-
// console.log('dbHead', this.dbHead)
|
326
|
-
// console.time(callTag + '.changesSince')
|
327
|
-
const result = await this.database.changesSince(this.dbHead) // {key, value, del}
|
328
|
-
// console.timeEnd(callTag + '.changesSince')
|
329
|
-
// console.log('result.rows.length', result.rows.length)
|
330
|
-
|
331
|
-
// console.time(callTag + '.doTransactionupdateIndex')
|
332
|
-
// console.log('updateIndex changes length', result.rows.length)
|
333
|
-
|
334
|
-
if (result.rows.length === 0) {
|
335
|
-
// console.log('updateIndex < no changes', result.clock)
|
336
|
-
this.dbHead = result.clock
|
337
|
-
return
|
338
|
-
}
|
339
|
-
const didT = await doTransaction('updateIndex', inBlocks, async blocks => {
|
340
|
-
let oldIndexEntries = []
|
341
|
-
let removeByIdIndexEntries = []
|
342
|
-
await loadIndex(blocks, this.indexById, idIndexOpts)
|
343
|
-
await loadIndex(blocks, this.indexByKey, dbIndexOpts)
|
344
|
-
// console.log('head', this.dbHead, this.indexById)
|
345
|
-
if (this.indexById.root) {
|
346
|
-
const oldChangeEntries = await this.indexById.root.getMany(result.rows.map(({ key }) => key))
|
347
|
-
oldIndexEntries = oldChangeEntries.result.map(key => ({ key, del: true }))
|
348
|
-
removeByIdIndexEntries = oldIndexEntries.map(({ key }) => ({ key: key[1], del: true }))
|
349
|
-
}
|
350
|
-
if (!this.mapFn) {
|
351
|
-
throw new Error(
|
352
|
-
'No live map function installed for index, cannot update. Make sure your index definition runs before any queries.' +
|
353
|
-
(this.mapFnString ? ' Your code should match the stored map function source:\n' + this.mapFnString : '')
|
354
|
-
)
|
355
|
-
}
|
356
|
-
const indexEntries = indexEntriesForChanges(result.rows, this.mapFn)
|
357
|
-
const byIdIndexEntries = indexEntries.map(({ key }) => ({ key: key[1], value: key }))
|
358
|
-
this.indexById = await bulkIndex(
|
359
|
-
blocks,
|
360
|
-
this.indexById,
|
361
|
-
removeByIdIndexEntries.concat(byIdIndexEntries),
|
362
|
-
idIndexOpts
|
363
|
-
)
|
364
|
-
this.indexByKey = await bulkIndex(blocks, this.indexByKey, oldIndexEntries.concat(indexEntries), dbIndexOpts)
|
365
|
-
this.dbHead = result.clock
|
366
|
-
}, false /* don't sync transaction -- todo move this flag to database.indexBlocks, and concept of sync channels */)
|
367
|
-
// console.timeEnd(callTag + '.doTransactionupdateIndex')
|
368
|
-
// console.log(`updateIndex ${callTag} <`, this.instanceId, this.dbHead?.toString(), this.indexByKey.cid?.toString(), this.indexById.cid?.toString())
|
369
|
-
return didT
|
370
|
-
}
|
371
|
-
}
|
372
|
-
|
373
|
-
/**
|
374
|
-
* Update the DbIndex with the given entries
|
375
|
-
* @param {import('./blockstore.js').Blockstore} blocks
|
376
|
-
* @param {{root, cid}} inIndex
|
377
|
-
* @param {DbIndexEntry[]} indexEntries
|
378
|
-
* @private
|
379
|
-
*/
|
380
|
-
async function bulkIndex (blocks, inIndex, indexEntries, opts) {
|
381
|
-
if (!indexEntries.length) return inIndex
|
382
|
-
const putBlock = blocks.put.bind(blocks)
|
383
|
-
const { getBlock } = makeGetBlock(blocks)
|
384
|
-
let returnRootBlock
|
385
|
-
let returnNode
|
386
|
-
if (!inIndex.root) {
|
387
|
-
const cid = inIndex.cid
|
388
|
-
if (!cid) {
|
389
|
-
for await (const node of await create({ get: getBlock, list: indexEntries, ...opts })) {
|
390
|
-
const block = await node.block
|
391
|
-
await putBlock(block.cid, block.bytes)
|
392
|
-
returnRootBlock = block
|
393
|
-
returnNode = node
|
394
|
-
}
|
395
|
-
return { root: returnNode, cid: returnRootBlock.cid }
|
396
|
-
}
|
397
|
-
inIndex.root = await load({ cid, get: getBlock, ...dbIndexOpts })
|
398
|
-
}
|
399
|
-
const { root, blocks: newBlocks } = await inIndex.root.bulk(indexEntries)
|
400
|
-
if (root) {
|
401
|
-
returnRootBlock = await root.block
|
402
|
-
returnNode = root
|
403
|
-
for await (const block of newBlocks) {
|
404
|
-
await putBlock(block.cid, block.bytes)
|
405
|
-
}
|
406
|
-
await putBlock(returnRootBlock.cid, returnRootBlock.bytes)
|
407
|
-
return { root: returnNode, cid: returnRootBlock.cid }
|
408
|
-
} else {
|
409
|
-
// throw new Error('test for index after delete')
|
410
|
-
return { root: null, cid: null }
|
411
|
-
}
|
412
|
-
}
|
413
|
-
|
414
|
-
async function loadIndex (blocks, index, indexOpts) {
|
415
|
-
if (!index.root) {
|
416
|
-
const cid = index.cid
|
417
|
-
if (!cid) {
|
418
|
-
// console.log('no cid', index)
|
419
|
-
// throw new Error('cannot load index')
|
420
|
-
return null
|
421
|
-
}
|
422
|
-
const { getBlock } = makeGetBlock(blocks)
|
423
|
-
index.root = await load({ cid, get: getBlock, ...indexOpts })
|
424
|
-
}
|
425
|
-
return index.root
|
426
|
-
}
|
package/src/encrypted-block.js
DELETED
@@ -1,57 +0,0 @@
|
|
1
|
-
// from https://github.com/mikeal/encrypted-block
|
2
|
-
import randomBytes from 'randombytes'
|
3
|
-
import aes from 'js-crypto-aes'
|
4
|
-
import { CID } from 'multiformats'
|
5
|
-
|
6
|
-
const enc32 = value => {
|
7
|
-
value = +value
|
8
|
-
const buff = new Uint8Array(4)
|
9
|
-
buff[3] = (value >>> 24)
|
10
|
-
buff[2] = (value >>> 16)
|
11
|
-
buff[1] = (value >>> 8)
|
12
|
-
buff[0] = (value & 0xff)
|
13
|
-
return buff
|
14
|
-
}
|
15
|
-
|
16
|
-
const readUInt32LE = (buffer) => {
|
17
|
-
const offset = buffer.byteLength - 4
|
18
|
-
return ((buffer[offset]) |
|
19
|
-
(buffer[offset + 1] << 8) |
|
20
|
-
(buffer[offset + 2] << 16)) +
|
21
|
-
(buffer[offset + 3] * 0x1000000)
|
22
|
-
}
|
23
|
-
|
24
|
-
const encode = ({ iv, bytes }) => concat([iv, bytes])
|
25
|
-
const decode = bytes => {
|
26
|
-
const iv = bytes.subarray(0, 12)
|
27
|
-
bytes = bytes.slice(12)
|
28
|
-
return { iv, bytes }
|
29
|
-
}
|
30
|
-
|
31
|
-
const code = 0x300000 + 1337
|
32
|
-
|
33
|
-
const concat = buffers => Uint8Array.from(buffers.map(b => [...b]).flat())
|
34
|
-
|
35
|
-
const decrypt = async ({ key, value }) => {
|
36
|
-
let { bytes, iv } = value
|
37
|
-
bytes = await aes.decrypt(bytes, key, { name: 'AES-GCM', iv, tagLength: 16 })
|
38
|
-
const len = readUInt32LE(bytes.subarray(0, 4))
|
39
|
-
const cid = CID.decode(bytes.subarray(4, 4 + len))
|
40
|
-
bytes = bytes.subarray(4 + len)
|
41
|
-
return { cid, bytes }
|
42
|
-
}
|
43
|
-
const encrypt = async ({ key, cid, bytes }) => {
|
44
|
-
const len = enc32(cid.bytes.byteLength)
|
45
|
-
const iv = randomBytes(12)
|
46
|
-
const msg = concat([len, cid.bytes, bytes])
|
47
|
-
bytes = await aes.encrypt(msg, key, { name: 'AES-GCM', iv, tagLength: 16 })
|
48
|
-
return { value: { bytes, iv } }
|
49
|
-
}
|
50
|
-
|
51
|
-
const crypto = key => {
|
52
|
-
return { encrypt: opts => encrypt({ key, ...opts }), decrypt: opts => decrypt({ key, ...opts }) }
|
53
|
-
}
|
54
|
-
|
55
|
-
const name = 'mikeal@encrypted-block:aes-gcm'
|
56
|
-
|
57
|
-
export { encode, decode, code, name, encrypt, decrypt, crypto }
|
package/src/fireproof.js
DELETED
@@ -1,98 +0,0 @@
|
|
1
|
-
import { Database, parseCID } from './database.js'
|
2
|
-
import { DbIndex as Index } from './db-index.js'
|
3
|
-
import { Sync } from './sync.js'
|
4
|
-
|
5
|
-
export { Index, Database, Sync }
|
6
|
-
|
7
|
-
export class Fireproof {
|
8
|
-
/**
|
9
|
-
* @function storage
|
10
|
-
* @memberof Fireproof
|
11
|
-
* Creates a new Fireproof instance with default storage settings
|
12
|
-
* Most apps should use this and not worry about the details.
|
13
|
-
* @static
|
14
|
-
* @returns {Database} - a new Fireproof instance
|
15
|
-
*/
|
16
|
-
static storage = (name = null, opts = {}) => {
|
17
|
-
return new Database(name, opts)
|
18
|
-
}
|
19
|
-
|
20
|
-
// static fromConfig (name, primary, secondary, opts = {}) {
|
21
|
-
// console.log('fromConfig', name, primary, secondary, opts)
|
22
|
-
// let clock = []
|
23
|
-
// if (primary && primary.clock) {
|
24
|
-
// clock = clock.concat(primary.clock)
|
25
|
-
// }
|
26
|
-
// if (secondary && secondary.clock) {
|
27
|
-
// clock = clock.concat(secondary.clock)
|
28
|
-
// }
|
29
|
-
|
30
|
-
// const mergedClock = [...new Set(clock)].map(c => parseCID(c))
|
31
|
-
|
32
|
-
// opts.primaryHeader = primary
|
33
|
-
// opts.secondaryHeader = secondary
|
34
|
-
|
35
|
-
// opts.index = primary ? primary.index : {}
|
36
|
-
|
37
|
-
// const fp = new Database(name, mergedClock, opts)
|
38
|
-
// return Fireproof.fromJSON(primary, secondary, fp)
|
39
|
-
// }
|
40
|
-
|
41
|
-
static fromJSON (primary, secondary, database) {
|
42
|
-
const json = primary && primary.indexes ? primary : secondary
|
43
|
-
if (json.indexes) {
|
44
|
-
for (const {
|
45
|
-
name,
|
46
|
-
code,
|
47
|
-
clock: { byId, byKey, db }
|
48
|
-
} of json.indexes) {
|
49
|
-
Index.fromJSON(database, {
|
50
|
-
clock: {
|
51
|
-
byId: byId ? parseCID(byId) : null,
|
52
|
-
byKey: byKey ? parseCID(byKey) : null,
|
53
|
-
db: db && db.length > 0 ? db.map(c => parseCID(c)) : null
|
54
|
-
},
|
55
|
-
code,
|
56
|
-
name
|
57
|
-
})
|
58
|
-
}
|
59
|
-
}
|
60
|
-
return database
|
61
|
-
}
|
62
|
-
|
63
|
-
static snapshot (database, clock) {
|
64
|
-
const definition = database.toJSON()
|
65
|
-
definition.clock = database.clockToJSON()
|
66
|
-
if (clock) {
|
67
|
-
definition.clock = clock.map(c => parseCID(c))
|
68
|
-
definition.indexes.forEach(index => {
|
69
|
-
index.clock.byId = null
|
70
|
-
index.clock.byKey = null
|
71
|
-
index.clock.db = null
|
72
|
-
})
|
73
|
-
}
|
74
|
-
|
75
|
-
const withBlocks = new Database(database.name)
|
76
|
-
withBlocks.blocks = database.blocks
|
77
|
-
withBlocks.ready.then(() => {
|
78
|
-
withBlocks.clock = definition.clock.map(c => parseCID(c))
|
79
|
-
})
|
80
|
-
|
81
|
-
const snappedDb = Fireproof.fromJSON(definition, null, withBlocks)
|
82
|
-
;[...database.indexes.values()].forEach(index => {
|
83
|
-
snappedDb.indexes.get(index.mapFnString).mapFn = index.mapFn
|
84
|
-
})
|
85
|
-
return snappedDb
|
86
|
-
}
|
87
|
-
|
88
|
-
static async zoom (database, clock) {
|
89
|
-
;[...database.indexes.values()].forEach(index => {
|
90
|
-
index.indexById = { root: null, cid: null }
|
91
|
-
index.indexByKey = { root: null, cid: null }
|
92
|
-
index.dbHead = null
|
93
|
-
})
|
94
|
-
database.clock = clock.map(c => parseCID(c))
|
95
|
-
await database.notifyReset() // hmm... indexes should listen to this? might be more complex than worth it. so far this is the only caller
|
96
|
-
return database
|
97
|
-
}
|
98
|
-
}
|
package/src/import.js
DELETED
@@ -1,34 +0,0 @@
|
|
1
|
-
import { createReadStream } from 'fs'
|
2
|
-
import { join } from 'path'
|
3
|
-
import { parse } from '@jsonlines/core'
|
4
|
-
import cargoQueue from 'async/cargoQueue.js'
|
5
|
-
|
6
|
-
// todo maybe this goes in a utils package for tree-shaking?
|
7
|
-
|
8
|
-
async function loadData (database, filename) {
|
9
|
-
const fullFilePath = join(process.cwd(), filename)
|
10
|
-
const readableStream = createReadStream(fullFilePath)
|
11
|
-
const parseStream = parse()
|
12
|
-
readableStream.pipe(parseStream)
|
13
|
-
|
14
|
-
const saveQueue = cargoQueue(async (tasks, callback) => {
|
15
|
-
for (const t of tasks) {
|
16
|
-
await database.put(t)
|
17
|
-
}
|
18
|
-
callback()
|
19
|
-
})
|
20
|
-
|
21
|
-
parseStream.on('data', async data => {
|
22
|
-
saveQueue.push(data)
|
23
|
-
})
|
24
|
-
let res
|
25
|
-
const p = new Promise((resolve, reject) => {
|
26
|
-
res = resolve
|
27
|
-
})
|
28
|
-
saveQueue.drain(async x => {
|
29
|
-
res()
|
30
|
-
})
|
31
|
-
return p
|
32
|
-
}
|
33
|
-
|
34
|
-
export { loadData }
|
package/src/link.d.ts
DELETED
package/src/loader.js
DELETED
@@ -1,16 +0,0 @@
|
|
1
|
-
import { Browser } from './storage/browser.js'
|
2
|
-
import { Rest } from './storage/rest.js'
|
3
|
-
|
4
|
-
export const Loader = {
|
5
|
-
appropriate: (name, config = {}) => {
|
6
|
-
if (config.StorageClass) {
|
7
|
-
return new config.StorageClass(name, config)
|
8
|
-
}
|
9
|
-
|
10
|
-
if (config.type === 'rest') {
|
11
|
-
return new Rest(name, config)
|
12
|
-
}
|
13
|
-
|
14
|
-
return new Browser(name, config)
|
15
|
-
}
|
16
|
-
}
|