@fireproof/core 0.8.0 → 0.10.1-dev
Sign up to get free protection for your applications and to get access to all the features.
- package/README.md +5 -184
- package/dist/fireproof.browser.js +18879 -0
- package/dist/fireproof.browser.js.map +7 -0
- package/dist/fireproof.cjs.js +9305 -0
- package/dist/fireproof.cjs.js.map +7 -0
- package/dist/fireproof.esm.js +9295 -0
- package/dist/fireproof.esm.js.map +7 -0
- package/package.json +57 -105
- package/dist/blockstore.js +0 -268
- package/dist/clock.js +0 -459
- package/dist/crypto.js +0 -63
- package/dist/database.js +0 -434
- package/dist/db-index.js +0 -403
- package/dist/encrypted-block.js +0 -48
- package/dist/fireproof.js +0 -84
- package/dist/import.js +0 -29
- package/dist/listener.js +0 -111
- package/dist/loader.js +0 -13
- package/dist/prolly.js +0 -405
- package/dist/remote.js +0 -102
- package/dist/sha1.js +0 -74
- package/dist/src/fireproof.d.ts +0 -472
- package/dist/src/fireproof.js +0 -81191
- package/dist/src/fireproof.js.map +0 -1
- package/dist/src/fireproof.mjs +0 -81186
- package/dist/src/fireproof.mjs.map +0 -1
- package/dist/storage/base.js +0 -426
- package/dist/storage/blocksToEncryptedCarBlock.js +0 -144
- package/dist/storage/browser.js +0 -62
- package/dist/storage/filesystem.js +0 -67
- package/dist/storage/rest.js +0 -57
- package/dist/storage/ucan.js +0 -0
- package/dist/storage/utils.js +0 -144
- package/dist/sync.js +0 -218
- package/dist/utils.js +0 -16
- package/dist/valet.js +0 -102
- package/src/blockstore.js +0 -283
- package/src/clock.js +0 -486
- package/src/crypto.js +0 -70
- package/src/database.js +0 -469
- package/src/db-index.js +0 -426
- package/src/encrypted-block.js +0 -57
- package/src/fireproof.js +0 -98
- package/src/import.js +0 -34
- package/src/link.d.ts +0 -3
- package/src/loader.js +0 -16
- package/src/prolly.js +0 -445
- package/src/remote.js +0 -113
- package/src/sha1.js +0 -83
- package/src/storage/base.js +0 -463
- package/src/storage/browser.js +0 -67
- package/src/storage/filesystem.js +0 -73
- package/src/storage/rest.js +0 -59
- package/src/storage/ucan.js +0 -0
- package/src/storage/utils.js +0 -152
- package/src/sync.js +0 -237
- package/src/valet.js +0 -105
package/dist/db-index.js
DELETED
@@ -1,403 +0,0 @@
|
|
1
|
-
// @ts-ignore
|
2
|
-
import { create, load } from 'prolly-trees/db-index';
|
3
|
-
// import { create, load } from '../../../../prolly-trees/src/db-index.js'
|
4
|
-
import { sha256 as hasher } from 'multiformats/hashes/sha2';
|
5
|
-
// @ts-ignore
|
6
|
-
import { nocache as cache } from 'prolly-trees/cache';
|
7
|
-
// @ts-ignore
|
8
|
-
import { bf, simpleCompare } from 'prolly-trees/utils';
|
9
|
-
import { makeGetBlock, visMerkleTree } from './prolly.js';
|
10
|
-
// eslint-disable-next-line no-unused-vars
|
11
|
-
import { Database, cidsToProof } from './database.js';
|
12
|
-
import * as codec from '@ipld/dag-cbor';
|
13
|
-
// import { create as createBlock } from 'multiformats/block'
|
14
|
-
import { doTransaction } from './blockstore.js';
|
15
|
-
// @ts-ignore
|
16
|
-
import charwise from 'charwise';
|
17
|
-
const ALWAYS_REBUILD = false; // todo: remove
|
18
|
-
const compare = (a, b) => {
|
19
|
-
const [aKey, aRef] = a;
|
20
|
-
const [bKey, bRef] = b;
|
21
|
-
const comp = simpleCompare(aKey, bKey);
|
22
|
-
if (comp !== 0)
|
23
|
-
return comp;
|
24
|
-
return refCompare(aRef, bRef);
|
25
|
-
};
|
26
|
-
const refCompare = (aRef, bRef) => {
|
27
|
-
if (Number.isNaN(aRef))
|
28
|
-
return -1;
|
29
|
-
if (Number.isNaN(bRef))
|
30
|
-
throw new Error('ref may not be Infinity or NaN');
|
31
|
-
if (aRef === Infinity)
|
32
|
-
return 1; // need to test this on equal docids!
|
33
|
-
// if (!Number.isFinite(bRef)) throw new Error('ref may not be Infinity or NaN')
|
34
|
-
return simpleCompare(aRef, bRef);
|
35
|
-
};
|
36
|
-
const dbIndexOpts = { cache, chunker: bf(30), codec, hasher, compare };
|
37
|
-
const idIndexOpts = { cache, chunker: bf(30), codec, hasher, compare: simpleCompare };
|
38
|
-
const makeDoc = ({ key, value }) => ({ _id: key, ...value });
|
39
|
-
/**
|
40
|
-
* JDoc for the result row type.
|
41
|
-
* @typedef {Object} ChangeEvent
|
42
|
-
* @property {string} key - The key of the document.
|
43
|
-
* @property {Object} value - The new value of the document.
|
44
|
-
* @property {boolean} [del] - Is the row deleted?
|
45
|
-
* @memberof DbIndex
|
46
|
-
*/
|
47
|
-
/**
|
48
|
-
* JDoc for the result row type.
|
49
|
-
* @typedef {Object} DbIndexEntry
|
50
|
-
* @property {string[]} key - The key for the DbIndex entry.
|
51
|
-
* @property {Object} value - The value of the document.
|
52
|
-
* @property {boolean} [del] - Is the row deleted?
|
53
|
-
* @memberof DbIndex
|
54
|
-
*/
|
55
|
-
/**
|
56
|
-
* Transforms a set of changes to DbIndex entries using a map function.
|
57
|
-
*
|
58
|
-
* @param {ChangeEvent[]} changes
|
59
|
-
* @param {Function} mapFn
|
60
|
-
* @returns {DbIndexEntry[]} The DbIndex entries generated by the map function.
|
61
|
-
* @private
|
62
|
-
* @memberof DbIndex
|
63
|
-
*/
|
64
|
-
const indexEntriesForChanges = (changes, mapFn) => {
|
65
|
-
const indexEntries = [];
|
66
|
-
changes.forEach(({ key: _id, value, del }) => {
|
67
|
-
// key is _id, value is the document
|
68
|
-
if (del || !value)
|
69
|
-
return;
|
70
|
-
let mapCalled = false;
|
71
|
-
const mapReturn = mapFn(makeDoc({ key: _id, value }), (k, v) => {
|
72
|
-
mapCalled = true;
|
73
|
-
if (typeof k === 'undefined')
|
74
|
-
return;
|
75
|
-
indexEntries.push({
|
76
|
-
key: [charwise.encode(k), _id],
|
77
|
-
value: v || null
|
78
|
-
});
|
79
|
-
});
|
80
|
-
if (!mapCalled && mapReturn) {
|
81
|
-
indexEntries.push({
|
82
|
-
key: [charwise.encode(mapReturn), _id],
|
83
|
-
value: null
|
84
|
-
});
|
85
|
-
}
|
86
|
-
});
|
87
|
-
return indexEntries;
|
88
|
-
};
|
89
|
-
/**
|
90
|
-
* Represents an DbIndex for a Fireproof database.
|
91
|
-
*
|
92
|
-
* @class DbIndex
|
93
|
-
* @classdesc An DbIndex can be used to order and filter the documents in a Fireproof database.
|
94
|
-
*
|
95
|
-
* @param {Database} database - The Fireproof database instance to DbIndex.
|
96
|
-
* @param {Function} mapFn - The map function to apply to each entry in the database.
|
97
|
-
*
|
98
|
-
*/
|
99
|
-
export class DbIndex {
|
100
|
-
/**
|
101
|
-
* @param {Database} database
|
102
|
-
*/
|
103
|
-
constructor(database, name, mapFn, clock = null, opts = {}) {
|
104
|
-
this.database = database;
|
105
|
-
if (typeof name === 'function') {
|
106
|
-
// app is using deprecated API, remove in 0.7
|
107
|
-
opts = clock || {};
|
108
|
-
clock = mapFn || null;
|
109
|
-
mapFn = name;
|
110
|
-
name = null;
|
111
|
-
}
|
112
|
-
this.applyMapFn(mapFn, name);
|
113
|
-
this.indexById = { root: null, cid: null };
|
114
|
-
this.indexByKey = { root: null, cid: null };
|
115
|
-
this.dbHead = null;
|
116
|
-
if (clock) {
|
117
|
-
this.indexById.cid = clock.byId;
|
118
|
-
this.indexByKey.cid = clock.byKey;
|
119
|
-
this.dbHead = clock.db;
|
120
|
-
}
|
121
|
-
this.instanceId = this.database.instanceId + `.DbIndex.${Math.random().toString(36).substring(2, 7)}`;
|
122
|
-
this.updateIndexPromise = null;
|
123
|
-
if (!opts.temporary) {
|
124
|
-
DbIndex.registerWithDatabase(this, this.database);
|
125
|
-
}
|
126
|
-
}
|
127
|
-
applyMapFn(mapFn, name) {
|
128
|
-
if (typeof mapFn === 'string') {
|
129
|
-
this.mapFnString = mapFn;
|
130
|
-
// make a regex that matches strings that only have letters, numbers, and spaces
|
131
|
-
const regex = /^[a-zA-Z0-9 ]+$/;
|
132
|
-
// if the string matches the regex, make a function that returns the value at that key
|
133
|
-
if (regex.test(mapFn)) {
|
134
|
-
this.mapFn = (doc, emit) => {
|
135
|
-
if (doc[mapFn])
|
136
|
-
emit(doc[mapFn]);
|
137
|
-
};
|
138
|
-
this.includeDocsDefault = true;
|
139
|
-
}
|
140
|
-
}
|
141
|
-
else {
|
142
|
-
this.mapFn = mapFn;
|
143
|
-
this.mapFnString = mapFn.toString();
|
144
|
-
}
|
145
|
-
const matches = /=>\s*(.*)/.exec(this.mapFnString);
|
146
|
-
this.includeDocsDefault = this.includeDocsDefault || (matches && matches.length > 0);
|
147
|
-
this.name = name || this.makeName();
|
148
|
-
}
|
149
|
-
makeName() {
|
150
|
-
const regex = /\(([^,()]+,\s*[^,()]+|\[[^\]]+\],\s*[^,()]+)\)/g;
|
151
|
-
let matches = Array.from(this.mapFnString.matchAll(regex), match => match[1].trim());
|
152
|
-
if (matches.length === 0) {
|
153
|
-
matches = /=>\s*(.*)/.exec(this.mapFnString);
|
154
|
-
}
|
155
|
-
if (matches === null) {
|
156
|
-
return this.mapFnString;
|
157
|
-
}
|
158
|
-
else {
|
159
|
-
// it's a consise arrow function, match everythign after the arrow
|
160
|
-
return matches[1];
|
161
|
-
}
|
162
|
-
}
|
163
|
-
static registerWithDatabase(inIndex, database) {
|
164
|
-
if (!database.indexes.has(inIndex.mapFnString)) {
|
165
|
-
database.indexes.set(inIndex.mapFnString, inIndex);
|
166
|
-
}
|
167
|
-
else {
|
168
|
-
// merge our inIndex code with the inIndex clock or vice versa
|
169
|
-
const existingIndex = database.indexes.get(inIndex.mapFnString);
|
170
|
-
// keep the code instance, discard the clock instance
|
171
|
-
if (existingIndex.mapFn) {
|
172
|
-
// this one also has other config
|
173
|
-
existingIndex.dbHead = inIndex.dbHead;
|
174
|
-
existingIndex.indexById.cid = inIndex.indexById.cid;
|
175
|
-
existingIndex.indexByKey.cid = inIndex.indexByKey.cid;
|
176
|
-
}
|
177
|
-
else {
|
178
|
-
inIndex.dbHead = existingIndex.dbHead;
|
179
|
-
inIndex.indexById.cid = existingIndex.indexById.cid;
|
180
|
-
inIndex.indexByKey.cid = existingIndex.indexByKey.cid;
|
181
|
-
database.indexes.set(inIndex.mapFnString, inIndex);
|
182
|
-
}
|
183
|
-
}
|
184
|
-
}
|
185
|
-
toJSON() {
|
186
|
-
const indexJson = { name: this.name, code: this.mapFnString, clock: { db: null, byId: null, byKey: null } };
|
187
|
-
indexJson.clock.db = this.dbHead?.map(cid => cid.toString());
|
188
|
-
indexJson.clock.byId = this.indexById.cid?.toString();
|
189
|
-
indexJson.clock.byKey = this.indexByKey.cid?.toString();
|
190
|
-
return indexJson;
|
191
|
-
}
|
192
|
-
static fromJSON(database, { code, clock, name }) {
|
193
|
-
// console.log('DbIndex.fromJSON', database.constructor.name, code, clock)
|
194
|
-
return new DbIndex(database, name, code, clock);
|
195
|
-
}
|
196
|
-
async visKeyTree() {
|
197
|
-
return await visMerkleTree(this.database.indexBlocks, this.indexById.cid);
|
198
|
-
}
|
199
|
-
async visIdTree() {
|
200
|
-
return await visMerkleTree(this.database.indexBlocks, this.indexByKey.cid);
|
201
|
-
}
|
202
|
-
/**
|
203
|
-
* JSDoc for Query type.
|
204
|
-
* @typedef {Object} DbQuery
|
205
|
-
* @property {string[]} [range] - The range to query.
|
206
|
-
* @memberof DbIndex
|
207
|
-
*/
|
208
|
-
/**
|
209
|
-
* Query object can have {range}
|
210
|
-
* @param {DbQuery} query - the query range to use
|
211
|
-
* @returns {Promise<{proof: {}, rows: Array<{id: string, key: string, value: any, doc?: any}>}>}
|
212
|
-
* @memberof DbIndex
|
213
|
-
* @instance
|
214
|
-
*/
|
215
|
-
async query(query = {}, update = true) {
|
216
|
-
// const callId = Math.random().toString(36).substring(2, 7)
|
217
|
-
// todo pass a root to query a snapshot
|
218
|
-
// console.time(callId + '.updateIndex')
|
219
|
-
update && (await this.updateIndex(this.database.indexBlocks));
|
220
|
-
// console.timeEnd(callId + '.updateIndex')
|
221
|
-
// console.time(callId + '.doIndexQuery')
|
222
|
-
// console.log('query', query)
|
223
|
-
const response = await this.doIndexQuery(query);
|
224
|
-
// console.timeEnd(callId + '.doIndexQuery')
|
225
|
-
return {
|
226
|
-
proof: { index: await cidsToProof(response.cids) },
|
227
|
-
rows: response.result.map(({ id, key, row, doc }) => {
|
228
|
-
return { id, key: charwise.decode(key), value: row, doc };
|
229
|
-
})
|
230
|
-
};
|
231
|
-
}
|
232
|
-
/**
|
233
|
-
*
|
234
|
-
* @param {any} resp
|
235
|
-
* @param {any} query
|
236
|
-
* @returns
|
237
|
-
*/
|
238
|
-
async applyQuery(resp, query) {
|
239
|
-
// console.log('applyQuery', resp, query)
|
240
|
-
if (query.descending) {
|
241
|
-
resp.result = resp.result.reverse();
|
242
|
-
}
|
243
|
-
if (query.limit) {
|
244
|
-
resp.result = resp.result.slice(0, query.limit);
|
245
|
-
}
|
246
|
-
if (query.includeDocs) {
|
247
|
-
resp.result = await Promise.all(resp.result.map(async (row) => {
|
248
|
-
const doc = await this.database.get(row.id);
|
249
|
-
return { ...row, doc };
|
250
|
-
}));
|
251
|
-
}
|
252
|
-
return resp;
|
253
|
-
}
|
254
|
-
async doIndexQuery(query = {}) {
|
255
|
-
await loadIndex(this.database.indexBlocks, this.indexByKey, dbIndexOpts);
|
256
|
-
if (!this.indexByKey.root)
|
257
|
-
return { result: [] };
|
258
|
-
if (query.includeDocs === undefined)
|
259
|
-
query.includeDocs = this.includeDocsDefault;
|
260
|
-
if (query.prefix) {
|
261
|
-
// ensure prefix is an array
|
262
|
-
if (!Array.isArray(query.prefix))
|
263
|
-
query.prefix = [query.prefix];
|
264
|
-
const start = [...query.prefix, NaN];
|
265
|
-
const end = [...query.prefix, Infinity];
|
266
|
-
const prefixRange = [start, end].map(key => charwise.encode(key));
|
267
|
-
return await this.applyQuery(await this.indexByKey.root.range(...prefixRange), query);
|
268
|
-
}
|
269
|
-
else if (query.range) {
|
270
|
-
const encodedRange = query.range.map(key => charwise.encode(key));
|
271
|
-
return await this.applyQuery(await this.indexByKey.root.range(...encodedRange), query);
|
272
|
-
}
|
273
|
-
else if (query.key) {
|
274
|
-
const encodedKey = charwise.encode(query.key);
|
275
|
-
return await this.applyQuery(await this.indexByKey.root.get(encodedKey), query);
|
276
|
-
}
|
277
|
-
else {
|
278
|
-
const { result, ...all } = await this.indexByKey.root.getAllEntries();
|
279
|
-
return await this.applyQuery({ result: result.map(({ key: [k, id], value }) => ({ key: k, id, row: value })), ...all }, query);
|
280
|
-
}
|
281
|
-
}
|
282
|
-
/**
|
283
|
-
* Update the DbIndex with the latest changes
|
284
|
-
* @private
|
285
|
-
* @returns {Promise<void>}
|
286
|
-
*/
|
287
|
-
async updateIndex(blocks) {
|
288
|
-
// todo this could enqueue the request and give fresh ones to all second comers -- right now it gives out stale promises while working
|
289
|
-
// what would it do in a world where all indexes provide a database snapshot to query?
|
290
|
-
if (this.updateIndexPromise) {
|
291
|
-
return this.updateIndexPromise.then(() => {
|
292
|
-
this.updateIndexPromise = null;
|
293
|
-
return this.updateIndex(blocks);
|
294
|
-
});
|
295
|
-
}
|
296
|
-
this.updateIndexPromise = this.innerUpdateIndex(blocks);
|
297
|
-
this.updateIndexPromise.finally(() => {
|
298
|
-
this.updateIndexPromise = null;
|
299
|
-
});
|
300
|
-
return this.updateIndexPromise;
|
301
|
-
}
|
302
|
-
async innerUpdateIndex(inBlocks) {
|
303
|
-
// const callTag = Math.random().toString(36).substring(4)
|
304
|
-
// console.log(`updateIndex ${callTag} >`, this.instanceId, this.dbHead?.toString(), this.indexByKey.cid?.toString(), this.indexById.cid?.toString())
|
305
|
-
// todo remove this hack in 0.7.0
|
306
|
-
if (ALWAYS_REBUILD) {
|
307
|
-
this.indexById = { root: null, cid: null };
|
308
|
-
this.indexByKey = { root: null, cid: null };
|
309
|
-
this.dbHead = null;
|
310
|
-
}
|
311
|
-
// console.log('dbHead', this.dbHead)
|
312
|
-
// console.time(callTag + '.changesSince')
|
313
|
-
const result = await this.database.changesSince(this.dbHead); // {key, value, del}
|
314
|
-
// console.timeEnd(callTag + '.changesSince')
|
315
|
-
// console.log('result.rows.length', result.rows.length)
|
316
|
-
// console.time(callTag + '.doTransactionupdateIndex')
|
317
|
-
// console.log('updateIndex changes length', result.rows.length)
|
318
|
-
if (result.rows.length === 0) {
|
319
|
-
// console.log('updateIndex < no changes', result.clock)
|
320
|
-
this.dbHead = result.clock;
|
321
|
-
return;
|
322
|
-
}
|
323
|
-
const didT = await doTransaction('updateIndex', inBlocks, async (blocks) => {
|
324
|
-
let oldIndexEntries = [];
|
325
|
-
let removeByIdIndexEntries = [];
|
326
|
-
await loadIndex(blocks, this.indexById, idIndexOpts);
|
327
|
-
await loadIndex(blocks, this.indexByKey, dbIndexOpts);
|
328
|
-
// console.log('head', this.dbHead, this.indexById)
|
329
|
-
if (this.indexById.root) {
|
330
|
-
const oldChangeEntries = await this.indexById.root.getMany(result.rows.map(({ key }) => key));
|
331
|
-
oldIndexEntries = oldChangeEntries.result.map(key => ({ key, del: true }));
|
332
|
-
removeByIdIndexEntries = oldIndexEntries.map(({ key }) => ({ key: key[1], del: true }));
|
333
|
-
}
|
334
|
-
if (!this.mapFn) {
|
335
|
-
throw new Error('No live map function installed for index, cannot update. Make sure your index definition runs before any queries.' +
|
336
|
-
(this.mapFnString ? ' Your code should match the stored map function source:\n' + this.mapFnString : ''));
|
337
|
-
}
|
338
|
-
const indexEntries = indexEntriesForChanges(result.rows, this.mapFn);
|
339
|
-
const byIdIndexEntries = indexEntries.map(({ key }) => ({ key: key[1], value: key }));
|
340
|
-
this.indexById = await bulkIndex(blocks, this.indexById, removeByIdIndexEntries.concat(byIdIndexEntries), idIndexOpts);
|
341
|
-
this.indexByKey = await bulkIndex(blocks, this.indexByKey, oldIndexEntries.concat(indexEntries), dbIndexOpts);
|
342
|
-
this.dbHead = result.clock;
|
343
|
-
}, false /* don't sync transaction -- todo move this flag to database.indexBlocks, and concept of sync channels */);
|
344
|
-
// console.timeEnd(callTag + '.doTransactionupdateIndex')
|
345
|
-
// console.log(`updateIndex ${callTag} <`, this.instanceId, this.dbHead?.toString(), this.indexByKey.cid?.toString(), this.indexById.cid?.toString())
|
346
|
-
return didT;
|
347
|
-
}
|
348
|
-
}
|
349
|
-
/**
|
350
|
-
* Update the DbIndex with the given entries
|
351
|
-
* @param {import('./blockstore.js').Blockstore} blocks
|
352
|
-
* @param {{root, cid}} inIndex
|
353
|
-
* @param {DbIndexEntry[]} indexEntries
|
354
|
-
* @private
|
355
|
-
*/
|
356
|
-
async function bulkIndex(blocks, inIndex, indexEntries, opts) {
|
357
|
-
if (!indexEntries.length)
|
358
|
-
return inIndex;
|
359
|
-
const putBlock = blocks.put.bind(blocks);
|
360
|
-
const { getBlock } = makeGetBlock(blocks);
|
361
|
-
let returnRootBlock;
|
362
|
-
let returnNode;
|
363
|
-
if (!inIndex.root) {
|
364
|
-
const cid = inIndex.cid;
|
365
|
-
if (!cid) {
|
366
|
-
for await (const node of await create({ get: getBlock, list: indexEntries, ...opts })) {
|
367
|
-
const block = await node.block;
|
368
|
-
await putBlock(block.cid, block.bytes);
|
369
|
-
returnRootBlock = block;
|
370
|
-
returnNode = node;
|
371
|
-
}
|
372
|
-
return { root: returnNode, cid: returnRootBlock.cid };
|
373
|
-
}
|
374
|
-
inIndex.root = await load({ cid, get: getBlock, ...dbIndexOpts });
|
375
|
-
}
|
376
|
-
const { root, blocks: newBlocks } = await inIndex.root.bulk(indexEntries);
|
377
|
-
if (root) {
|
378
|
-
returnRootBlock = await root.block;
|
379
|
-
returnNode = root;
|
380
|
-
for await (const block of newBlocks) {
|
381
|
-
await putBlock(block.cid, block.bytes);
|
382
|
-
}
|
383
|
-
await putBlock(returnRootBlock.cid, returnRootBlock.bytes);
|
384
|
-
return { root: returnNode, cid: returnRootBlock.cid };
|
385
|
-
}
|
386
|
-
else {
|
387
|
-
// throw new Error('test for index after delete')
|
388
|
-
return { root: null, cid: null };
|
389
|
-
}
|
390
|
-
}
|
391
|
-
async function loadIndex(blocks, index, indexOpts) {
|
392
|
-
if (!index.root) {
|
393
|
-
const cid = index.cid;
|
394
|
-
if (!cid) {
|
395
|
-
// console.log('no cid', index)
|
396
|
-
// throw new Error('cannot load index')
|
397
|
-
return null;
|
398
|
-
}
|
399
|
-
const { getBlock } = makeGetBlock(blocks);
|
400
|
-
index.root = await load({ cid, get: getBlock, ...indexOpts });
|
401
|
-
}
|
402
|
-
return index.root;
|
403
|
-
}
|
package/dist/encrypted-block.js
DELETED
@@ -1,48 +0,0 @@
|
|
1
|
-
// from https://github.com/mikeal/encrypted-block
|
2
|
-
import randomBytes from 'randombytes';
|
3
|
-
import aes from 'js-crypto-aes';
|
4
|
-
import { CID } from 'multiformats';
|
5
|
-
const enc32 = value => {
|
6
|
-
value = +value;
|
7
|
-
const buff = new Uint8Array(4);
|
8
|
-
buff[3] = (value >>> 24);
|
9
|
-
buff[2] = (value >>> 16);
|
10
|
-
buff[1] = (value >>> 8);
|
11
|
-
buff[0] = (value & 0xff);
|
12
|
-
return buff;
|
13
|
-
};
|
14
|
-
const readUInt32LE = (buffer) => {
|
15
|
-
const offset = buffer.byteLength - 4;
|
16
|
-
return ((buffer[offset]) |
|
17
|
-
(buffer[offset + 1] << 8) |
|
18
|
-
(buffer[offset + 2] << 16)) +
|
19
|
-
(buffer[offset + 3] * 0x1000000);
|
20
|
-
};
|
21
|
-
const encode = ({ iv, bytes }) => concat([iv, bytes]);
|
22
|
-
const decode = bytes => {
|
23
|
-
const iv = bytes.subarray(0, 12);
|
24
|
-
bytes = bytes.slice(12);
|
25
|
-
return { iv, bytes };
|
26
|
-
};
|
27
|
-
const code = 0x300000 + 1337;
|
28
|
-
const concat = buffers => Uint8Array.from(buffers.map(b => [...b]).flat());
|
29
|
-
const decrypt = async ({ key, value }) => {
|
30
|
-
let { bytes, iv } = value;
|
31
|
-
bytes = await aes.decrypt(bytes, key, { name: 'AES-GCM', iv, tagLength: 16 });
|
32
|
-
const len = readUInt32LE(bytes.subarray(0, 4));
|
33
|
-
const cid = CID.decode(bytes.subarray(4, 4 + len));
|
34
|
-
bytes = bytes.subarray(4 + len);
|
35
|
-
return { cid, bytes };
|
36
|
-
};
|
37
|
-
const encrypt = async ({ key, cid, bytes }) => {
|
38
|
-
const len = enc32(cid.bytes.byteLength);
|
39
|
-
const iv = randomBytes(12);
|
40
|
-
const msg = concat([len, cid.bytes, bytes]);
|
41
|
-
bytes = await aes.encrypt(msg, key, { name: 'AES-GCM', iv, tagLength: 16 });
|
42
|
-
return { value: { bytes, iv } };
|
43
|
-
};
|
44
|
-
const crypto = key => {
|
45
|
-
return { encrypt: opts => encrypt({ key, ...opts }), decrypt: opts => decrypt({ key, ...opts }) };
|
46
|
-
};
|
47
|
-
const name = 'mikeal@encrypted-block:aes-gcm';
|
48
|
-
export { encode, decode, code, name, encrypt, decrypt, crypto };
|
package/dist/fireproof.js
DELETED
@@ -1,84 +0,0 @@
|
|
1
|
-
import { Database, parseCID } from './database.js';
|
2
|
-
import { DbIndex as Index } from './db-index.js';
|
3
|
-
import { Sync } from './sync.js';
|
4
|
-
export { Index, Database, Sync };
|
5
|
-
class Fireproof {
|
6
|
-
/**
|
7
|
-
* @function storage
|
8
|
-
* @memberof Fireproof
|
9
|
-
* Creates a new Fireproof instance with default storage settings
|
10
|
-
* Most apps should use this and not worry about the details.
|
11
|
-
* @static
|
12
|
-
* @returns {Database} - a new Fireproof instance
|
13
|
-
*/
|
14
|
-
static storage = (name = null, opts = {}) => {
|
15
|
-
return new Database(name, opts);
|
16
|
-
};
|
17
|
-
// static fromConfig (name, primary, secondary, opts = {}) {
|
18
|
-
// console.log('fromConfig', name, primary, secondary, opts)
|
19
|
-
// let clock = []
|
20
|
-
// if (primary && primary.clock) {
|
21
|
-
// clock = clock.concat(primary.clock)
|
22
|
-
// }
|
23
|
-
// if (secondary && secondary.clock) {
|
24
|
-
// clock = clock.concat(secondary.clock)
|
25
|
-
// }
|
26
|
-
// const mergedClock = [...new Set(clock)].map(c => parseCID(c))
|
27
|
-
// opts.primaryHeader = primary
|
28
|
-
// opts.secondaryHeader = secondary
|
29
|
-
// opts.index = primary ? primary.index : {}
|
30
|
-
// const fp = new Database(name, mergedClock, opts)
|
31
|
-
// return Fireproof.fromJSON(primary, secondary, fp)
|
32
|
-
// }
|
33
|
-
static fromJSON(primary, secondary, database) {
|
34
|
-
const json = primary && primary.indexes ? primary : secondary;
|
35
|
-
if (json.indexes) {
|
36
|
-
for (const { name, code, clock: { byId, byKey, db } } of json.indexes) {
|
37
|
-
Index.fromJSON(database, {
|
38
|
-
clock: {
|
39
|
-
byId: byId ? parseCID(byId) : null,
|
40
|
-
byKey: byKey ? parseCID(byKey) : null,
|
41
|
-
db: db && db.length > 0 ? db.map(c => parseCID(c)) : null
|
42
|
-
},
|
43
|
-
code,
|
44
|
-
name
|
45
|
-
});
|
46
|
-
}
|
47
|
-
}
|
48
|
-
return database;
|
49
|
-
}
|
50
|
-
static snapshot(database, clock) {
|
51
|
-
const definition = database.toJSON();
|
52
|
-
definition.clock = database.clockToJSON();
|
53
|
-
if (clock) {
|
54
|
-
definition.clock = clock.map(c => parseCID(c));
|
55
|
-
definition.indexes.forEach(index => {
|
56
|
-
index.clock.byId = null;
|
57
|
-
index.clock.byKey = null;
|
58
|
-
index.clock.db = null;
|
59
|
-
});
|
60
|
-
}
|
61
|
-
const withBlocks = new Database(database.name);
|
62
|
-
withBlocks.blocks = database.blocks;
|
63
|
-
withBlocks.ready.then(() => {
|
64
|
-
withBlocks.clock = definition.clock.map(c => parseCID(c));
|
65
|
-
});
|
66
|
-
const snappedDb = Fireproof.fromJSON(definition, null, withBlocks);
|
67
|
-
[...database.indexes.values()].forEach(index => {
|
68
|
-
snappedDb.indexes.get(index.mapFnString).mapFn = index.mapFn;
|
69
|
-
});
|
70
|
-
return snappedDb;
|
71
|
-
}
|
72
|
-
static async zoom(database, clock) {
|
73
|
-
;
|
74
|
-
[...database.indexes.values()].forEach(index => {
|
75
|
-
index.indexById = { root: null, cid: null };
|
76
|
-
index.indexByKey = { root: null, cid: null };
|
77
|
-
index.dbHead = null;
|
78
|
-
});
|
79
|
-
database.clock = clock.map(c => parseCID(c));
|
80
|
-
await database.notifyReset(); // hmm... indexes should listen to this? might be more complex than worth it. so far this is the only caller
|
81
|
-
return database;
|
82
|
-
}
|
83
|
-
}
|
84
|
-
export { Fireproof };
|
package/dist/import.js
DELETED
@@ -1,29 +0,0 @@
|
|
1
|
-
import { createReadStream } from 'fs';
|
2
|
-
import { join } from 'path';
|
3
|
-
import { parse } from '@jsonlines/core';
|
4
|
-
import cargoQueue from 'async/cargoQueue.js';
|
5
|
-
// todo maybe this goes in a utils package for tree-shaking?
|
6
|
-
async function loadData(database, filename) {
|
7
|
-
const fullFilePath = join(process.cwd(), filename);
|
8
|
-
const readableStream = createReadStream(fullFilePath);
|
9
|
-
const parseStream = parse();
|
10
|
-
readableStream.pipe(parseStream);
|
11
|
-
const saveQueue = cargoQueue(async (tasks, callback) => {
|
12
|
-
for (const t of tasks) {
|
13
|
-
await database.put(t);
|
14
|
-
}
|
15
|
-
callback();
|
16
|
-
});
|
17
|
-
parseStream.on('data', async (data) => {
|
18
|
-
saveQueue.push(data);
|
19
|
-
});
|
20
|
-
let res;
|
21
|
-
const p = new Promise((resolve, reject) => {
|
22
|
-
res = resolve;
|
23
|
-
});
|
24
|
-
saveQueue.drain(async (x) => {
|
25
|
-
res();
|
26
|
-
});
|
27
|
-
return p;
|
28
|
-
}
|
29
|
-
export { loadData };
|
package/dist/listener.js
DELETED
@@ -1,111 +0,0 @@
|
|
1
|
-
/**
|
2
|
-
* A Fireproof database Listener allows you to react to events in the database.
|
3
|
-
*
|
4
|
-
* @class Listener
|
5
|
-
* @classdesc An listener attaches to a Fireproof database and runs a routing function on each change, sending the results to subscribers.
|
6
|
-
*
|
7
|
-
* @param {import('./database.js').Database} database - The Database database instance to index.
|
8
|
-
* @param {Function} routingFn - The routing function to apply to each entry in the database.
|
9
|
-
*/
|
10
|
-
// import { ChangeEvent } from './db-index'
|
11
|
-
/**
|
12
|
-
* @deprecated since version 0.7.0
|
13
|
-
*/
|
14
|
-
export class Listener {
|
15
|
-
subcribers = new Map();
|
16
|
-
doStopListening = null;
|
17
|
-
/**
|
18
|
-
* @param {import('./database.js').Database} database
|
19
|
-
* @param {(_: any, emit: any) => void} routingFn
|
20
|
-
*/
|
21
|
-
constructor(database, routingFn = function (/** @type {any} */ _, /** @type {(arg0: string) => void} */ emit) {
|
22
|
-
emit('*');
|
23
|
-
}) {
|
24
|
-
this.database = database;
|
25
|
-
this.doStopListening = database.registerListener((/** @type {any} */ changes) => this.onChanges(changes));
|
26
|
-
/**
|
27
|
-
* The map function to apply to each entry in the database.
|
28
|
-
* @type {Function}
|
29
|
-
*/
|
30
|
-
this.routingFn = routingFn;
|
31
|
-
this.dbHead = null;
|
32
|
-
}
|
33
|
-
/**
|
34
|
-
* Subscribe to a topic emitted by the event function.
|
35
|
-
* @param {string} topic - The topic to subscribe to.
|
36
|
-
* @param {Function} subscriber - The function to call when the topic is emitted.
|
37
|
-
* @returns {Function} A function to unsubscribe from the topic.
|
38
|
-
* @memberof Listener
|
39
|
-
* @instance
|
40
|
-
* @param {any} [since] - clock to flush from on launch, pass null for all
|
41
|
-
*/
|
42
|
-
on(topic, subscriber, since = undefined) {
|
43
|
-
const listOfTopicSubscribers = getTopicList(this.subcribers, topic);
|
44
|
-
listOfTopicSubscribers.push(subscriber);
|
45
|
-
if (typeof since !== 'undefined') {
|
46
|
-
this.database.changesSince(since).then(({ rows: changes }) => {
|
47
|
-
const keys = topicsForChanges(changes, this.routingFn).get(topic);
|
48
|
-
if (keys)
|
49
|
-
keys.forEach((/** @type {any} */ key) => subscriber(key));
|
50
|
-
});
|
51
|
-
}
|
52
|
-
return () => {
|
53
|
-
const index = listOfTopicSubscribers.indexOf(subscriber);
|
54
|
-
if (index > -1)
|
55
|
-
listOfTopicSubscribers.splice(index, 1);
|
56
|
-
};
|
57
|
-
}
|
58
|
-
/**
|
59
|
-
* @typedef {import('./db-index').ChangeEvent} ChangeEvent
|
60
|
-
*/
|
61
|
-
/**
|
62
|
-
* @param {ChangeEvent[]} changes
|
63
|
-
*/
|
64
|
-
onChanges(changes) {
|
65
|
-
if (Array.isArray(changes)) {
|
66
|
-
const seenTopics = topicsForChanges(changes, this.routingFn);
|
67
|
-
for (const [topic, keys] of seenTopics) {
|
68
|
-
const listOfTopicSubscribers = getTopicList(this.subcribers, topic);
|
69
|
-
listOfTopicSubscribers.forEach((/** @type {(arg0: any) => any} */ subscriber) => keys.forEach((/** @type {any} */ key) => subscriber(key)));
|
70
|
-
}
|
71
|
-
}
|
72
|
-
else {
|
73
|
-
// non-arrays go to all subscribers
|
74
|
-
for (const [, listOfTopicSubscribers] of this.subcribers) {
|
75
|
-
listOfTopicSubscribers.forEach((/** @type {(arg0: any) => any} */ subscriber) => subscriber(changes));
|
76
|
-
}
|
77
|
-
}
|
78
|
-
}
|
79
|
-
}
|
80
|
-
/**
|
81
|
-
* @param {Map<any, any>} subscribersMap
|
82
|
-
* @param {string} name
|
83
|
-
*/
|
84
|
-
function getTopicList(subscribersMap, name) {
|
85
|
-
let topicList = subscribersMap.get(name);
|
86
|
-
if (!topicList) {
|
87
|
-
topicList = [];
|
88
|
-
subscribersMap.set(name, topicList);
|
89
|
-
}
|
90
|
-
return topicList;
|
91
|
-
}
|
92
|
-
/**
|
93
|
-
* Transforms a set of changes to events using an emitter function.
|
94
|
-
*
|
95
|
-
* @param {ChangeEvent[]} changes
|
96
|
-
* @param {Function} routingFn
|
97
|
-
* @returns {Map<string,string[]>} The topics emmitted by the event function.
|
98
|
-
* @private
|
99
|
-
*/
|
100
|
-
const topicsForChanges = (changes, routingFn) => {
|
101
|
-
const seenTopics = new Map();
|
102
|
-
changes.forEach(({ key, value, del }) => {
|
103
|
-
if (del || !value)
|
104
|
-
value = { _deleted: true };
|
105
|
-
routingFn({ _id: key, ...value }, (/** @type {any} */ t) => {
|
106
|
-
const topicList = getTopicList(seenTopics, t);
|
107
|
-
topicList.push(key);
|
108
|
-
});
|
109
|
-
});
|
110
|
-
return seenTopics;
|
111
|
-
};
|
package/dist/loader.js
DELETED
@@ -1,13 +0,0 @@
|
|
1
|
-
import { Browser } from './storage/browser.js';
|
2
|
-
import { Rest } from './storage/rest.js';
|
3
|
-
export const Loader = {
|
4
|
-
appropriate: (name, config = {}) => {
|
5
|
-
if (config.StorageClass) {
|
6
|
-
return new config.StorageClass(name, config);
|
7
|
-
}
|
8
|
-
if (config.type === 'rest') {
|
9
|
-
return new Rest(name, config);
|
10
|
-
}
|
11
|
-
return new Browser(name, config);
|
12
|
-
}
|
13
|
-
};
|