@fireproof/core 0.5.9 → 0.5.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/hooks/use-fireproof.js +149 -0
- package/dist/src/blockstore.js +259 -0
- package/dist/src/clock.js +385 -0
- package/dist/src/crypto.js +59 -0
- package/dist/src/database.js +379 -0
- package/dist/src/db-index.js +387 -0
- package/dist/src/fireproof.d.ts +25 -5
- package/dist/src/fireproof.js +272 -163
- package/dist/src/fireproof.js.map +1 -1
- package/dist/src/fireproof.mjs +272 -163
- package/dist/src/fireproof.mjs.map +1 -1
- package/dist/src/link.js +1 -0
- package/dist/src/listener.js +112 -0
- package/dist/src/prolly.js +360 -0
- package/dist/src/sha1.js +73 -0
- package/dist/src/sync.js +198 -0
- package/dist/src/utils.js +16 -0
- package/dist/src/valet.js +291 -0
- package/hooks/use-fireproof.js +77 -14
- package/package.json +2 -1
- package/src/database.js +15 -4
- package/src/db-index.js +107 -44
- package/src/listener.js +3 -1
package/dist/src/sync.js
ADDED
@@ -0,0 +1,198 @@
|
|
1
|
+
import SimplePeer from 'simple-peer';
|
2
|
+
import { parseCID } from './database.js';
|
3
|
+
import { decodeEventBlock } from './clock.js';
|
4
|
+
import { blocksToCarBlock, blocksToEncryptedCarBlock } from './valet.js';
|
5
|
+
import { CarReader } from '@ipld/car';
|
6
|
+
/**
|
7
|
+
* @typedef {import('./database.js').Database} Database
|
8
|
+
*/
|
9
|
+
export class Sync {
|
10
|
+
PeerClass;
|
11
|
+
database;
|
12
|
+
pushBacklog;
|
13
|
+
pushBacklogResolve;
|
14
|
+
pushBacklogReject;
|
15
|
+
peer;
|
16
|
+
/**
|
17
|
+
* @param {Database} database
|
18
|
+
* @param {typeof SimplePeer} [PeerClass]
|
19
|
+
* @memberof Sync
|
20
|
+
* @static
|
21
|
+
*/
|
22
|
+
constructor(database, PeerClass = SimplePeer) {
|
23
|
+
this.database = database;
|
24
|
+
this.database.blocks.syncs.add(this); // should this happen during setup?
|
25
|
+
this.PeerClass = PeerClass;
|
26
|
+
this.pushBacklog = new Promise((resolve, reject) => {
|
27
|
+
this.pushBacklogResolve = resolve;
|
28
|
+
this.pushBacklogReject = reject;
|
29
|
+
});
|
30
|
+
// this.pushBacklog.then(() => {
|
31
|
+
// // console.log('sync backlog resolved')
|
32
|
+
// this.database.notifyReset()
|
33
|
+
// })
|
34
|
+
}
|
35
|
+
async offer() {
|
36
|
+
return this.setupPeer(true);
|
37
|
+
}
|
38
|
+
async accept(base64offer) {
|
39
|
+
const offer = JSON.parse(atob(base64offer));
|
40
|
+
const p = this.setupPeer(false);
|
41
|
+
this.peer.signal(offer);
|
42
|
+
return p;
|
43
|
+
}
|
44
|
+
connect(base64accept) {
|
45
|
+
const accept = JSON.parse(atob(base64accept));
|
46
|
+
this.peer.signal(accept);
|
47
|
+
}
|
48
|
+
async setupPeer(initiator = false) {
|
49
|
+
this.peer = new this.PeerClass({
|
50
|
+
initiator,
|
51
|
+
trickle: false
|
52
|
+
});
|
53
|
+
this.peer.on('connect', () => this.startSync());
|
54
|
+
this.peer.on('data', data => this.gotData(data));
|
55
|
+
const p = new Promise((resolve, reject) => {
|
56
|
+
this.peer.on('signal', resolve);
|
57
|
+
this.peer.on('error', reject);
|
58
|
+
});
|
59
|
+
return p.then(signal => btoa(JSON.stringify(signal)));
|
60
|
+
}
|
61
|
+
async backlog() {
|
62
|
+
return this.pushBacklog;
|
63
|
+
}
|
64
|
+
async gotData(data) {
|
65
|
+
// console.log('got data', data.toString())
|
66
|
+
let reader = null;
|
67
|
+
try {
|
68
|
+
reader = await CarReader.fromBytes(data);
|
69
|
+
}
|
70
|
+
catch (e) {
|
71
|
+
// console.log('not a car', data.toString())
|
72
|
+
}
|
73
|
+
if (reader) {
|
74
|
+
const blz = new Set();
|
75
|
+
for await (const block of reader.blocks()) {
|
76
|
+
blz.add(block);
|
77
|
+
}
|
78
|
+
const roots = await reader.getRoots();
|
79
|
+
// console.log(
|
80
|
+
// 'got car',
|
81
|
+
// roots.map(c => c.toString()),
|
82
|
+
// this.database.clock.map(c => c.toString())
|
83
|
+
// )
|
84
|
+
// console.log(
|
85
|
+
// 'got blocks',
|
86
|
+
// [...blz].map(({ cid }) => cid.toString())
|
87
|
+
// )
|
88
|
+
// @ts-ignore
|
89
|
+
reader.entries = reader.blocks;
|
90
|
+
await this.database.blocks.commit({
|
91
|
+
label: 'sync',
|
92
|
+
entries: () => [...blz],
|
93
|
+
get: async (cid) => await reader.get(cid),
|
94
|
+
lastCid: [...blz][0].cid // doesn't matter
|
95
|
+
}, false);
|
96
|
+
// first arg could be the roots parents?
|
97
|
+
// get the roots parents
|
98
|
+
const parents = await Promise.all(roots.map(async (cid) => {
|
99
|
+
const rbl = await reader.get(cid);
|
100
|
+
if (!rbl) {
|
101
|
+
console.log('missing root block', cid.toString(), reader);
|
102
|
+
throw new Error('missing root block');
|
103
|
+
}
|
104
|
+
const block = await decodeEventBlock(rbl.bytes);
|
105
|
+
return block.value.parents;
|
106
|
+
}));
|
107
|
+
this.database.applyClock(parents.flat(), roots);
|
108
|
+
this.database.notifyReset();
|
109
|
+
// console.log('after', this.database.clockToJSON())
|
110
|
+
this.pushBacklogResolve({ ok: true });
|
111
|
+
}
|
112
|
+
else {
|
113
|
+
// data is a json string, parse it
|
114
|
+
const message = JSON.parse(data.toString());
|
115
|
+
// console.log('got message', message)
|
116
|
+
if (message.ok) {
|
117
|
+
this.pushBacklogResolve({ ok: true });
|
118
|
+
}
|
119
|
+
else if (message.clock) {
|
120
|
+
const reqCidDiff = message;
|
121
|
+
// this might be a CID diff
|
122
|
+
console.log('got diff', reqCidDiff);
|
123
|
+
const carBlock = await Sync.makeCar(this.database, null, reqCidDiff.cids);
|
124
|
+
if (!carBlock) {
|
125
|
+
// we are full synced
|
126
|
+
// console.log('we are full synced')
|
127
|
+
this.peer.send(JSON.stringify({ ok: true }));
|
128
|
+
// this.pushBacklogResolve({ ok: true })
|
129
|
+
}
|
130
|
+
else {
|
131
|
+
// console.log('do send', carBlock.bytes.length)
|
132
|
+
this.peer.send(carBlock.bytes);
|
133
|
+
// this.pushBacklogResolve({ ok: true })
|
134
|
+
}
|
135
|
+
}
|
136
|
+
}
|
137
|
+
}
|
138
|
+
async sendUpdate(blockstore) {
|
139
|
+
// console.log('send update from', this.database.instanceId)
|
140
|
+
// todo should send updates since last sync
|
141
|
+
const newCar = await blocksToCarBlock(blockstore.lastCid, blockstore);
|
142
|
+
this.peer.send(newCar.bytes);
|
143
|
+
}
|
144
|
+
async startSync() {
|
145
|
+
// console.log('start sync', this.peer.initiator)
|
146
|
+
const allCIDs = await this.database.allStoredCIDs();
|
147
|
+
// console.log('allCIDs', allCIDs)
|
148
|
+
const reqCidDiff = {
|
149
|
+
clock: this.database.clockToJSON(),
|
150
|
+
cids: allCIDs.map(cid => cid.toString())
|
151
|
+
};
|
152
|
+
// console.log('send diff', reqCidDiff)
|
153
|
+
this.peer.send(JSON.stringify(reqCidDiff));
|
154
|
+
}
|
155
|
+
// get all the cids
|
156
|
+
// tell valet to make a file
|
157
|
+
/**
|
158
|
+
* @param {import("./database.js").Database} database
|
159
|
+
* @param {string} key
|
160
|
+
*/
|
161
|
+
static async makeCar(database, key, skip = []) {
|
162
|
+
const allCIDs = await database.allCIDs();
|
163
|
+
const blocks = database.blocks;
|
164
|
+
const rootCIDs = database.clock;
|
165
|
+
const syncCIDs = [...new Set([...rootCIDs, ...allCIDs])].filter(cid => !skip.includes(cid.toString()));
|
166
|
+
// console.log(
|
167
|
+
// 'makeCar',
|
168
|
+
// rootCIDs.map(c => c.toString()),
|
169
|
+
// syncCIDs.map(c => c.toString()),
|
170
|
+
// allCIDs.map(c => c.toString())
|
171
|
+
// )
|
172
|
+
if (syncCIDs.length === 0) {
|
173
|
+
return null;
|
174
|
+
}
|
175
|
+
if (typeof key === 'undefined') {
|
176
|
+
key = blocks.valet?.getKeyMaterial();
|
177
|
+
}
|
178
|
+
if (key) {
|
179
|
+
return blocksToEncryptedCarBlock(rootCIDs, {
|
180
|
+
entries: () => syncCIDs.map(cid => ({ cid })),
|
181
|
+
get: async (cid) => await blocks.get(cid)
|
182
|
+
}, key);
|
183
|
+
}
|
184
|
+
else {
|
185
|
+
const carBlocks = await Promise.all(syncCIDs.map(async (c) => {
|
186
|
+
const b = await blocks.get(c);
|
187
|
+
if (typeof b.cid === 'string') {
|
188
|
+
b.cid = parseCID(b.cid);
|
189
|
+
}
|
190
|
+
return b;
|
191
|
+
}));
|
192
|
+
// console.log('carblock')
|
193
|
+
return blocksToCarBlock(rootCIDs, {
|
194
|
+
entries: () => carBlocks
|
195
|
+
});
|
196
|
+
}
|
197
|
+
}
|
198
|
+
}
|
@@ -0,0 +1,16 @@
|
|
1
|
+
/* global localStorage */
|
2
|
+
let storageSupported = false;
|
3
|
+
try {
|
4
|
+
storageSupported = window.localStorage && true;
|
5
|
+
}
|
6
|
+
catch (e) { }
|
7
|
+
export function localGet(key) {
|
8
|
+
if (storageSupported) {
|
9
|
+
return localStorage && localStorage.getItem(key);
|
10
|
+
}
|
11
|
+
}
|
12
|
+
export function localSet(key, value) {
|
13
|
+
if (storageSupported) {
|
14
|
+
return localStorage && localStorage.setItem(key, value);
|
15
|
+
}
|
16
|
+
}
|
@@ -0,0 +1,291 @@
|
|
1
|
+
import { CarReader } from '@ipld/car';
|
2
|
+
import { CID } from 'multiformats/cid';
|
3
|
+
import { sha256 } from 'multiformats/hashes/sha2';
|
4
|
+
import * as CBW from '@ipld/car/buffer-writer';
|
5
|
+
import * as raw from 'multiformats/codecs/raw';
|
6
|
+
import * as Block from 'multiformats/block';
|
7
|
+
import * as dagcbor from '@ipld/dag-cbor';
|
8
|
+
import { openDB } from 'idb';
|
9
|
+
import cargoQueue from 'async/cargoQueue.js';
|
10
|
+
// @ts-ignore
|
11
|
+
import { bf } from 'prolly-trees/utils';
|
12
|
+
// @ts-ignore
|
13
|
+
import { nocache as cache } from 'prolly-trees/cache';
|
14
|
+
import { encrypt, decrypt } from './crypto.js';
|
15
|
+
import { Buffer } from 'buffer';
|
16
|
+
// @ts-ignore
|
17
|
+
import * as codec from 'encrypted-block';
|
18
|
+
import { rawSha1 as sha1sync } from './sha1.js';
|
19
|
+
const chunker = bf(30);
|
20
|
+
const NO_ENCRYPT = typeof process !== 'undefined' && !!process.env?.NO_ENCRYPT;
|
21
|
+
// ? process.env.NO_ENCRYPT : import.meta && import.meta.env.VITE_NO_ENCRYPT
|
22
|
+
export class Valet {
|
23
|
+
idb = null;
|
24
|
+
name = null;
|
25
|
+
uploadQueue = null;
|
26
|
+
alreadyEnqueued = new Set();
|
27
|
+
keyMaterial = null;
|
28
|
+
keyId = 'null';
|
29
|
+
/**
|
30
|
+
* Function installed by the database to upload car files
|
31
|
+
* @type {null|function(string, Uint8Array):Promise<void>}
|
32
|
+
*/
|
33
|
+
uploadFunction = null;
|
34
|
+
constructor(name = 'default', keyMaterial) {
|
35
|
+
this.name = name;
|
36
|
+
this.setKeyMaterial(keyMaterial);
|
37
|
+
this.uploadQueue = cargoQueue(async (tasks, callback) => {
|
38
|
+
console.log('queue worker', tasks.length, tasks.reduce((acc, t) => acc + t.value.length, 0));
|
39
|
+
if (this.uploadFunction) {
|
40
|
+
// todo we can coalesce these into a single car file
|
41
|
+
return await this.withDB(async (db) => {
|
42
|
+
for (const task of tasks) {
|
43
|
+
await this.uploadFunction(task.carCid, task.value);
|
44
|
+
// update the indexedb to mark this car as no longer pending
|
45
|
+
const carMeta = await db.get('cidToCar', task.carCid);
|
46
|
+
delete carMeta.pending;
|
47
|
+
await db.put('cidToCar', carMeta);
|
48
|
+
}
|
49
|
+
});
|
50
|
+
}
|
51
|
+
callback();
|
52
|
+
});
|
53
|
+
this.uploadQueue.drain(async () => {
|
54
|
+
return await this.withDB(async (db) => {
|
55
|
+
const carKeys = (await db.getAllFromIndex('cidToCar', 'pending')).map(c => c.car);
|
56
|
+
for (const carKey of carKeys) {
|
57
|
+
await this.uploadFunction(carKey, await db.get('cars', carKey));
|
58
|
+
const carMeta = await db.get('cidToCar', carKey);
|
59
|
+
delete carMeta.pending;
|
60
|
+
await db.put('cidToCar', carMeta);
|
61
|
+
}
|
62
|
+
});
|
63
|
+
});
|
64
|
+
}
|
65
|
+
getKeyMaterial() {
|
66
|
+
return this.keyMaterial;
|
67
|
+
}
|
68
|
+
setKeyMaterial(km) {
|
69
|
+
if (km && !NO_ENCRYPT) {
|
70
|
+
const hex = Uint8Array.from(Buffer.from(km, 'hex'));
|
71
|
+
this.keyMaterial = km;
|
72
|
+
const hash = sha1sync(hex);
|
73
|
+
this.keyId = Buffer.from(hash).toString('hex');
|
74
|
+
}
|
75
|
+
else {
|
76
|
+
this.keyMaterial = null;
|
77
|
+
this.keyId = 'null';
|
78
|
+
}
|
79
|
+
// console.trace('keyId', this.name, this.keyId)
|
80
|
+
}
|
81
|
+
/**
|
82
|
+
* Group the blocks into a car and write it to the valet.
|
83
|
+
* @param {import('./blockstore.js').InnerBlockstore} innerBlockstore
|
84
|
+
* @param {Set<string>} cids
|
85
|
+
* @returns {Promise<void>}
|
86
|
+
* @memberof Valet
|
87
|
+
*/
|
88
|
+
async writeTransaction(innerBlockstore, cids) {
|
89
|
+
if (innerBlockstore.lastCid) {
|
90
|
+
if (this.keyMaterial) {
|
91
|
+
// console.log('encrypting car', innerBlockstore.label)
|
92
|
+
// should we pass cids in instead of iterating frin innerBlockstore?
|
93
|
+
const newCar = await blocksToEncryptedCarBlock(innerBlockstore.lastCid, innerBlockstore, this.keyMaterial);
|
94
|
+
await this.parkCar(newCar.cid.toString(), newCar.bytes, cids);
|
95
|
+
}
|
96
|
+
else {
|
97
|
+
const newCar = await blocksToCarBlock(innerBlockstore.lastCid, innerBlockstore);
|
98
|
+
await this.parkCar(newCar.cid.toString(), newCar.bytes, cids);
|
99
|
+
}
|
100
|
+
}
|
101
|
+
else {
|
102
|
+
throw new Error('missing lastCid for car header');
|
103
|
+
}
|
104
|
+
}
|
105
|
+
withDB = async (dbWorkFun) => {
|
106
|
+
if (!this.idb) {
|
107
|
+
this.idb = await openDB(`fp.${this.keyId}.${this.name}.valet`, 2, {
|
108
|
+
upgrade(db, oldVersion, newVersion, transaction) {
|
109
|
+
if (oldVersion < 1) {
|
110
|
+
db.createObjectStore('cars'); // todo use database name
|
111
|
+
const cidToCar = db.createObjectStore('cidToCar', { keyPath: 'car' });
|
112
|
+
cidToCar.createIndex('cids', 'cids', { multiEntry: true });
|
113
|
+
}
|
114
|
+
if (oldVersion < 2) {
|
115
|
+
const cidToCar = transaction.objectStore('cidToCar');
|
116
|
+
cidToCar.createIndex('pending', 'pending');
|
117
|
+
}
|
118
|
+
}
|
119
|
+
});
|
120
|
+
}
|
121
|
+
return await dbWorkFun(this.idb);
|
122
|
+
};
|
123
|
+
/**
|
124
|
+
* Iterate over all blocks in the store.
|
125
|
+
*
|
126
|
+
* @yields {{cid: string, value: Uint8Array}}
|
127
|
+
* @returns {AsyncGenerator<any, any, any>}
|
128
|
+
*/
|
129
|
+
async *cids() {
|
130
|
+
// console.log('valet cids')
|
131
|
+
const db = await this.withDB(async (db) => db);
|
132
|
+
const tx = db.transaction(['cidToCar'], 'readonly');
|
133
|
+
let cursor = await tx.store.openCursor();
|
134
|
+
while (cursor) {
|
135
|
+
yield { cid: cursor.key, car: cursor.value.car };
|
136
|
+
cursor = await cursor.continue();
|
137
|
+
}
|
138
|
+
}
|
139
|
+
/**
|
140
|
+
*
|
141
|
+
* @param {string} carCid
|
142
|
+
* @param {*} value
|
143
|
+
*/
|
144
|
+
async parkCar(carCid, value, cids) {
|
145
|
+
await this.withDB(async (db) => {
|
146
|
+
const tx = db.transaction(['cars', 'cidToCar'], 'readwrite');
|
147
|
+
await tx.objectStore('cars').put(value, carCid);
|
148
|
+
await tx.objectStore('cidToCar').put({ pending: 'y', car: carCid, cids: Array.from(cids) });
|
149
|
+
return await tx.done;
|
150
|
+
});
|
151
|
+
// console.log('parked car', carCid, value.length, Array.from(cids))
|
152
|
+
// upload to web3.storage if we have credentials
|
153
|
+
if (this.uploadFunction) {
|
154
|
+
if (this.alreadyEnqueued.has(carCid)) {
|
155
|
+
// console.log('already enqueued', carCid)
|
156
|
+
return;
|
157
|
+
}
|
158
|
+
// don't await this, it will be done in the queue
|
159
|
+
// console.log('add to queue', carCid, value.length)
|
160
|
+
this.uploadQueue.push({ carCid, value });
|
161
|
+
this.alreadyEnqueued.add(carCid);
|
162
|
+
}
|
163
|
+
else {
|
164
|
+
// console.log('no upload function', carCid, value.length, this.uploadFunction)
|
165
|
+
}
|
166
|
+
}
|
167
|
+
remoteBlockFunction = null;
|
168
|
+
async getBlock(dataCID) {
|
169
|
+
return await this.withDB(async (db) => {
|
170
|
+
const tx = db.transaction(['cars', 'cidToCar'], 'readonly');
|
171
|
+
const indexResp = await tx.objectStore('cidToCar').index('cids').get(dataCID);
|
172
|
+
const carCid = indexResp?.car;
|
173
|
+
if (!carCid) {
|
174
|
+
throw new Error('Missing block: ' + dataCID);
|
175
|
+
}
|
176
|
+
const carBytes = await tx.objectStore('cars').get(carCid);
|
177
|
+
const reader = await CarReader.fromBytes(carBytes);
|
178
|
+
if (this.keyMaterial) {
|
179
|
+
const roots = await reader.getRoots();
|
180
|
+
const readerGetWithCodec = async (cid) => {
|
181
|
+
const got = await reader.get(cid);
|
182
|
+
// console.log('got.', cid.toString())
|
183
|
+
let useCodec = codec;
|
184
|
+
if (cid.toString().indexOf('bafy') === 0) {
|
185
|
+
useCodec = dagcbor;
|
186
|
+
}
|
187
|
+
const decoded = await Block.decode({
|
188
|
+
...got,
|
189
|
+
codec: useCodec,
|
190
|
+
hasher: sha256
|
191
|
+
});
|
192
|
+
// console.log('decoded', decoded.value)
|
193
|
+
return decoded;
|
194
|
+
};
|
195
|
+
const { blocks } = await blocksFromEncryptedCarBlock(roots[0], readerGetWithCodec, this.keyMaterial);
|
196
|
+
const block = blocks.find(b => b.cid.toString() === dataCID);
|
197
|
+
if (block) {
|
198
|
+
return block.bytes;
|
199
|
+
}
|
200
|
+
}
|
201
|
+
else {
|
202
|
+
const gotBlock = await reader.get(CID.parse(dataCID));
|
203
|
+
if (gotBlock) {
|
204
|
+
return gotBlock.bytes;
|
205
|
+
}
|
206
|
+
}
|
207
|
+
});
|
208
|
+
}
|
209
|
+
}
|
210
|
+
export const blocksToCarBlock = async (rootCids, blocks) => {
|
211
|
+
let size = 0;
|
212
|
+
if (!Array.isArray(rootCids)) {
|
213
|
+
rootCids = [rootCids];
|
214
|
+
}
|
215
|
+
const headerSize = CBW.headerLength({ roots: rootCids });
|
216
|
+
size += headerSize;
|
217
|
+
if (!Array.isArray(blocks)) {
|
218
|
+
blocks = Array.from(blocks.entries());
|
219
|
+
}
|
220
|
+
for (const { cid, bytes } of blocks) {
|
221
|
+
// console.log(cid, bytes)
|
222
|
+
size += CBW.blockLength({ cid, bytes });
|
223
|
+
}
|
224
|
+
const buffer = new Uint8Array(size);
|
225
|
+
const writer = await CBW.createWriter(buffer, { headerSize });
|
226
|
+
for (const cid of rootCids) {
|
227
|
+
writer.addRoot(cid);
|
228
|
+
}
|
229
|
+
for (const { cid, bytes } of blocks) {
|
230
|
+
writer.write({ cid, bytes });
|
231
|
+
}
|
232
|
+
await writer.close();
|
233
|
+
return await Block.encode({ value: writer.bytes, hasher: sha256, codec: raw });
|
234
|
+
};
|
235
|
+
export const blocksToEncryptedCarBlock = async (innerBlockStoreClockRootCid, blocks, keyMaterial) => {
|
236
|
+
const encryptionKey = Buffer.from(keyMaterial, 'hex');
|
237
|
+
const encryptedBlocks = [];
|
238
|
+
const theCids = [];
|
239
|
+
for (const { cid } of blocks.entries()) {
|
240
|
+
theCids.push(cid.toString());
|
241
|
+
}
|
242
|
+
// console.log('encrypting', theCids.length, 'blocks', theCids.includes(innerBlockStoreClockRootCid.toString()))
|
243
|
+
// console.log('cids', theCids, innerBlockStoreClockRootCid.toString())
|
244
|
+
let last;
|
245
|
+
for await (const block of encrypt({
|
246
|
+
cids: theCids,
|
247
|
+
get: async (cid) => blocks.get(cid),
|
248
|
+
key: encryptionKey,
|
249
|
+
hasher: sha256,
|
250
|
+
chunker,
|
251
|
+
cache,
|
252
|
+
// codec: dagcbor, // should be crypto?
|
253
|
+
root: innerBlockStoreClockRootCid
|
254
|
+
})) {
|
255
|
+
encryptedBlocks.push(block);
|
256
|
+
last = block;
|
257
|
+
}
|
258
|
+
// console.log('last', last.cid.toString(), 'for clock', innerBlockStoreClockRootCid.toString())
|
259
|
+
const encryptedCar = await blocksToCarBlock(last.cid, encryptedBlocks);
|
260
|
+
return encryptedCar;
|
261
|
+
};
|
262
|
+
// { root, get, key, cache, chunker, hasher }
|
263
|
+
const memoizeDecryptedCarBlocks = new Map();
|
264
|
+
const blocksFromEncryptedCarBlock = async (cid, get, keyMaterial) => {
|
265
|
+
if (memoizeDecryptedCarBlocks.has(cid.toString())) {
|
266
|
+
return memoizeDecryptedCarBlocks.get(cid.toString());
|
267
|
+
}
|
268
|
+
else {
|
269
|
+
const blocksPromise = (async () => {
|
270
|
+
const decryptionKey = Buffer.from(keyMaterial, 'hex');
|
271
|
+
// console.log('decrypting', keyMaterial, cid.toString())
|
272
|
+
const cids = new Set();
|
273
|
+
const decryptedBlocks = [];
|
274
|
+
for await (const block of decrypt({
|
275
|
+
root: cid,
|
276
|
+
get,
|
277
|
+
key: decryptionKey,
|
278
|
+
chunker,
|
279
|
+
hasher: sha256,
|
280
|
+
cache
|
281
|
+
// codec: dagcbor
|
282
|
+
})) {
|
283
|
+
decryptedBlocks.push(block);
|
284
|
+
cids.add(block.cid.toString());
|
285
|
+
}
|
286
|
+
return { blocks: decryptedBlocks, cids };
|
287
|
+
})();
|
288
|
+
memoizeDecryptedCarBlocks.set(cid.toString(), blocksPromise);
|
289
|
+
return blocksPromise;
|
290
|
+
}
|
291
|
+
};
|
package/hooks/use-fireproof.js
CHANGED
@@ -1,11 +1,13 @@
|
|
1
1
|
// @ts-ignore
|
2
|
-
import { useEffect, useState, createContext } from 'react'
|
3
|
-
import { Fireproof,
|
2
|
+
import { useEffect, useState, useCallback, createContext } from 'react'
|
3
|
+
import { Fireproof, Index } from '@fireproof/core'
|
4
4
|
|
5
5
|
/**
|
6
6
|
@typedef {Object} FireproofCtxValue
|
7
7
|
@property {Function} addSubscriber - A function to add a subscriber with a label and function.
|
8
8
|
@property {Fireproof} database - An instance of the Fireproof class.
|
9
|
+
@property {Function} useLiveQuery - A hook to return a query result
|
10
|
+
@property {Function} useLiveDocument - A hook to return a live document
|
9
11
|
@property {boolean} ready - A boolean indicating whether the database is ready.
|
10
12
|
@param {string} label - A label for the subscriber.
|
11
13
|
@param {Function} fn - A function to be added as a subscriber.
|
@@ -17,38 +19,36 @@ export const FireproofCtx = createContext({
|
|
17
19
|
ready: false
|
18
20
|
})
|
19
21
|
|
20
|
-
const inboundSubscriberQueue = new Map()
|
22
|
+
// const inboundSubscriberQueue = new Map()
|
21
23
|
|
22
24
|
let startedSetup = false
|
23
25
|
let database
|
24
|
-
let listener
|
25
26
|
const initializeDatabase = name => {
|
26
27
|
if (database) return
|
27
28
|
database = Fireproof.storage(name)
|
28
|
-
listener = new Listener(database)
|
29
29
|
}
|
30
30
|
|
31
31
|
/**
|
32
|
-
|
33
32
|
@function useFireproof
|
34
33
|
React hook to initialize a Fireproof database, automatically saving and loading the clock.
|
35
34
|
You might need to import { nodePolyfills } from 'vite-plugin-node-polyfills' in your vite.config.ts
|
35
|
+
@deprecated - npm install @fireproof/react instead
|
36
36
|
@param {string} name - The path to the database file
|
37
37
|
@param {function(database): void} [defineDatabaseFn] - Synchronous function that defines the database, run this before any async calls
|
38
38
|
@param {function(database): Promise<void>} [setupDatabaseFn] - Asynchronous function that sets up the database, run this to load fixture data etc
|
39
|
-
@returns {FireproofCtxValue} {
|
39
|
+
@returns {FireproofCtxValue} { useLiveQuery, useLiveDocument, database, ready }
|
40
40
|
*/
|
41
41
|
export function useFireproof (name = 'useFireproof', defineDatabaseFn = () => {}, setupDatabaseFn = async () => {}) {
|
42
42
|
const [ready, setReady] = useState(false)
|
43
43
|
initializeDatabase(name)
|
44
44
|
|
45
|
+
/**
|
46
|
+
* @deprecated - use database.subscribe instead
|
47
|
+
*/
|
45
48
|
const addSubscriber = (label, fn) => {
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
const listenerCallback = async event => {
|
50
|
-
if (event._external) return
|
51
|
-
for (const [, fn] of inboundSubscriberQueue) fn()
|
49
|
+
// todo test that the label is not needed
|
50
|
+
return database.subscribe(fn)
|
51
|
+
// inboundSubscriberQueue.set(label, fn)
|
52
52
|
}
|
53
53
|
|
54
54
|
useEffect(() => {
|
@@ -61,13 +61,76 @@ export function useFireproof (name = 'useFireproof', defineDatabaseFn = () => {}
|
|
61
61
|
await setupDatabaseFn(database)
|
62
62
|
}
|
63
63
|
setReady(true)
|
64
|
-
listener.on('*', listenerCallback) // hushed('*', listenerCallback, 250))
|
65
64
|
}
|
66
65
|
doSetup()
|
67
66
|
}, [ready])
|
68
67
|
|
68
|
+
function useLiveDocument (initialDoc) {
|
69
|
+
const id = initialDoc._id
|
70
|
+
const [doc, setDoc] = useState(initialDoc)
|
71
|
+
|
72
|
+
const saveDoc = async newDoc => {
|
73
|
+
await database.put({ _id: id, ...newDoc })
|
74
|
+
}
|
75
|
+
const refreshDoc = useCallback(async () => {
|
76
|
+
// todo add option for mvcc checks
|
77
|
+
const got = await database.get(id).catch(() => initialDoc)
|
78
|
+
setDoc(got)
|
79
|
+
}, [id, initialDoc])
|
80
|
+
|
81
|
+
useEffect(
|
82
|
+
() =>
|
83
|
+
database.subscribe(change => {
|
84
|
+
if (change.find(c => c.key === id)) {
|
85
|
+
refreshDoc() // todo use change.value
|
86
|
+
}
|
87
|
+
}),
|
88
|
+
[id, refreshDoc]
|
89
|
+
)
|
90
|
+
|
91
|
+
useEffect(() => {
|
92
|
+
refreshDoc()
|
93
|
+
}, [])
|
94
|
+
|
95
|
+
return [doc, saveDoc]
|
96
|
+
}
|
97
|
+
|
98
|
+
function useLiveQuery (mapFn, query = null, initialRows = []) {
|
99
|
+
const [rows, setRows] = useState({ rows: initialRows, proof: {} })
|
100
|
+
const [index, setIndex] = useState(null)
|
101
|
+
|
102
|
+
const refreshRows = useCallback(async () => {
|
103
|
+
if (!index) return
|
104
|
+
const got = await index.query(query || {})
|
105
|
+
setRows(got)
|
106
|
+
}, [index, JSON.stringify(query)])
|
107
|
+
|
108
|
+
useEffect(
|
109
|
+
() => {
|
110
|
+
// todo listen to index changes
|
111
|
+
return database.subscribe(() => {
|
112
|
+
refreshRows()
|
113
|
+
})
|
114
|
+
},
|
115
|
+
[refreshRows]
|
116
|
+
)
|
117
|
+
|
118
|
+
useEffect(() => {
|
119
|
+
refreshRows()
|
120
|
+
}, [index])
|
121
|
+
|
122
|
+
useEffect(() => {
|
123
|
+
const index = new Index(database, null, mapFn) // this should only be created once
|
124
|
+
setIndex(index)
|
125
|
+
}, [mapFn.toString()])
|
126
|
+
|
127
|
+
return rows
|
128
|
+
}
|
129
|
+
|
69
130
|
return {
|
70
131
|
addSubscriber,
|
132
|
+
useLiveQuery,
|
133
|
+
useLiveDocument,
|
71
134
|
database,
|
72
135
|
ready
|
73
136
|
}
|
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "@fireproof/core",
|
3
|
-
"version": "0.5.
|
3
|
+
"version": "0.5.10",
|
4
4
|
"description": "Cloudless database for apps, the browser, and IPFS",
|
5
5
|
"main": "dist/src/fireproof.js",
|
6
6
|
"module": "dist/src/fireproof.mjs",
|
@@ -88,6 +88,7 @@
|
|
88
88
|
"examples/**/*.tsx",
|
89
89
|
"examples/**/dist",
|
90
90
|
"out/**",
|
91
|
+
"packages/react/**",
|
91
92
|
"rollup.config.js"
|
92
93
|
]
|
93
94
|
},
|