@fireproof/core 0.6.3-dev2 → 0.6.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/src/database.js +3 -3
- package/dist/src/db-index.js +11 -1
- package/dist/src/fireproof.d.ts +3 -27
- package/dist/src/fireproof.js +543 -764
- package/dist/src/fireproof.js.map +1 -1
- package/dist/src/fireproof.mjs +543 -764
- package/dist/src/fireproof.mjs.map +1 -1
- package/dist/src/import.js +29 -0
- package/dist/src/loader.js +15 -122
- package/dist/src/prolly.js +0 -1
- package/dist/src/storage/base.js +348 -0
- package/dist/src/storage/browser.js +61 -0
- package/dist/src/storage/filesystem.js +65 -0
- package/dist/src/storage/rest.js +58 -0
- package/dist/src/storage/ucan.js +0 -0
- package/dist/src/valet.js +33 -18
- package/package.json +1 -3
- package/src/database.js +3 -3
- package/src/db-index.js +10 -1
- package/src/fireproof.js +3 -4
- package/src/prolly.js +0 -2
- package/src/utils.js +16 -0
- package/src/valet.js +36 -21
- package/src/loader.js +0 -168
@@ -0,0 +1,29 @@
|
|
1
|
+
import { createReadStream } from 'fs';
|
2
|
+
import { join } from 'path';
|
3
|
+
import { parse } from '@jsonlines/core';
|
4
|
+
import cargoQueue from 'async/cargoQueue.js';
|
5
|
+
// todo maybe this goes in a utils package for tree-shaking?
|
6
|
+
async function loadData(database, filename) {
|
7
|
+
const fullFilePath = join(process.cwd(), filename);
|
8
|
+
const readableStream = createReadStream(fullFilePath);
|
9
|
+
const parseStream = parse();
|
10
|
+
readableStream.pipe(parseStream);
|
11
|
+
const saveQueue = cargoQueue(async (tasks, callback) => {
|
12
|
+
for (const t of tasks) {
|
13
|
+
await database.put(t);
|
14
|
+
}
|
15
|
+
callback();
|
16
|
+
});
|
17
|
+
parseStream.on('data', async (data) => {
|
18
|
+
saveQueue.push(data);
|
19
|
+
});
|
20
|
+
let res;
|
21
|
+
const p = new Promise((resolve, reject) => {
|
22
|
+
res = resolve;
|
23
|
+
});
|
24
|
+
saveQueue.drain(async (x) => {
|
25
|
+
res();
|
26
|
+
});
|
27
|
+
return p;
|
28
|
+
}
|
29
|
+
export { loadData };
|
package/dist/src/loader.js
CHANGED
@@ -1,131 +1,24 @@
|
|
1
|
-
import {
|
2
|
-
//
|
3
|
-
} from '
|
4
|
-
import { mkdir, writeFile } from 'node:fs/promises';
|
5
|
-
import { openDB } from 'idb';
|
6
|
-
import { join, dirname } from 'path';
|
7
|
-
// import { parse } from '@jsonlines/core'
|
8
|
-
// import cargoQueue from 'async/cargoQueue.js'
|
9
|
-
import { homedir } from 'os';
|
10
|
-
const defaultConfig = {
|
11
|
-
dataDir: join(homedir(), '.fireproof'),
|
12
|
-
headerKeyPrefix: 'fp.'
|
13
|
-
};
|
1
|
+
import { Browser } from './storage/browser.js';
|
2
|
+
// import { Filesystem } from './storage/filesystem.js'
|
3
|
+
import { Rest } from './storage/rest.js';
|
14
4
|
const FORCE_IDB = typeof process !== 'undefined' && !!process.env?.FORCE_IDB;
|
15
|
-
/* global
|
16
|
-
export
|
17
|
-
|
18
|
-
|
19
|
-
this.keyId = keyId;
|
20
|
-
this.config = config;
|
21
|
-
this.isBrowser = false;
|
5
|
+
/* global window */
|
6
|
+
export const Loader = {
|
7
|
+
appropriate: (name, config = {}) => {
|
8
|
+
let isBrowser = false;
|
22
9
|
try {
|
23
|
-
|
10
|
+
isBrowser = window.localStorage && true;
|
24
11
|
}
|
25
12
|
catch (e) { }
|
26
|
-
|
27
|
-
|
28
|
-
if (!this.idb) {
|
29
|
-
this.idb = await openDB(`fp.${this.keyId}.${this.name}.valet`, 3, {
|
30
|
-
upgrade(db, oldVersion, newVersion, transaction) {
|
31
|
-
if (oldVersion < 1) {
|
32
|
-
db.createObjectStore('cars');
|
33
|
-
}
|
34
|
-
}
|
35
|
-
});
|
36
|
-
}
|
37
|
-
return await dbWorkFun(this.idb);
|
38
|
-
};
|
39
|
-
async writeCars(cars) {
|
40
|
-
// console.log('writeCars', this.config.dataDir, this.name, cars.map(c => c.cid.toString()))
|
41
|
-
// console.log('writeCars', cars.length)
|
42
|
-
if (FORCE_IDB || this.isBrowser) {
|
43
|
-
await this.writeCarsIDB(cars);
|
44
|
-
}
|
45
|
-
else {
|
46
|
-
const writes = [];
|
47
|
-
for (const { cid, bytes } of cars) {
|
48
|
-
const carFilename = join(this.config.dataDir, this.name, `${cid.toString()}.car`);
|
49
|
-
// console.log('writeCars', carFilename)
|
50
|
-
writes.push(writeSync(carFilename, bytes));
|
51
|
-
}
|
52
|
-
await Promise.all(writes);
|
53
|
-
}
|
54
|
-
}
|
55
|
-
async writeCarsIDB(cars) {
|
56
|
-
return await this.withDB(async (db) => {
|
57
|
-
const tx = db.transaction(['cars'], 'readwrite');
|
58
|
-
for (const { cid, bytes, replaces } of cars) {
|
59
|
-
await tx.objectStore('cars').put(bytes, cid.toString());
|
60
|
-
// todo remove old maps
|
61
|
-
if (replaces) {
|
62
|
-
await tx.objectStore('cars').delete(replaces.toString());
|
63
|
-
}
|
64
|
-
}
|
65
|
-
return await tx.done;
|
66
|
-
});
|
67
|
-
}
|
68
|
-
async readCar(carCid) {
|
69
|
-
if (FORCE_IDB || this.isBrowser) {
|
70
|
-
return await this.readCarIDB(carCid);
|
71
|
-
}
|
72
|
-
else {
|
73
|
-
const carFilename = join(this.config.dataDir, this.name, `${carCid.toString()}.car`);
|
74
|
-
const got = readFileSync(carFilename);
|
75
|
-
// console.log('readCar', carFilename, got.constructor.name)
|
76
|
-
return got;
|
13
|
+
if (config.type === 'rest') {
|
14
|
+
return new Rest(name, config);
|
77
15
|
}
|
78
|
-
|
79
|
-
|
80
|
-
return await this.withDB(async (db) => {
|
81
|
-
const tx = db.transaction(['cars'], 'readonly');
|
82
|
-
// console.log('getCarReader', carCid)
|
83
|
-
return await tx.objectStore('cars').get(carCid);
|
84
|
-
});
|
85
|
-
}
|
86
|
-
getHeader() {
|
87
|
-
if (this.isBrowser) {
|
88
|
-
return localStorage.getItem(this.config.headerKeyPrefix + this.name);
|
89
|
-
}
|
90
|
-
else {
|
91
|
-
return loadSync(this.headerFilename());
|
92
|
-
// return null
|
93
|
-
}
|
94
|
-
}
|
95
|
-
async saveHeader(stringValue) {
|
96
|
-
// console.log('saveHeader', this.isBrowser)
|
97
|
-
if (this.isBrowser) {
|
98
|
-
// console.log('localStorage!', this.config.headerKeyPrefix)
|
99
|
-
return localStorage.setItem(this.config.headerKeyPrefix + this.name, stringValue);
|
16
|
+
if (FORCE_IDB || isBrowser) {
|
17
|
+
return new Browser(name, config);
|
100
18
|
}
|
101
19
|
else {
|
102
|
-
|
103
|
-
//
|
104
|
-
try {
|
105
|
-
await writeSync(this.headerFilename(), stringValue);
|
106
|
-
}
|
107
|
-
catch (error) {
|
108
|
-
console.log('error', error);
|
109
|
-
}
|
110
|
-
// console.log('saved clock to', this.headerFilename())
|
20
|
+
return new Browser(name, config);
|
21
|
+
// return new Filesystem(name, config)
|
111
22
|
}
|
112
23
|
}
|
113
|
-
|
114
|
-
// console.log('headerFilename', this.config.dataDir, this.name)
|
115
|
-
return join(this.config.dataDir, this.name, 'header.json');
|
116
|
-
}
|
117
|
-
}
|
118
|
-
function loadSync(filename) {
|
119
|
-
try {
|
120
|
-
return readFileSync(filename, 'utf8').toString();
|
121
|
-
}
|
122
|
-
catch (error) {
|
123
|
-
// console.log('error', error)
|
124
|
-
return null;
|
125
|
-
}
|
126
|
-
}
|
127
|
-
async function writeSync(fullpath, stringValue) {
|
128
|
-
await mkdir(dirname(fullpath), { recursive: true });
|
129
|
-
// writeFileSync(fullpath, stringValue)
|
130
|
-
await writeFile(fullpath, stringValue);
|
131
|
-
}
|
24
|
+
};
|
package/dist/src/prolly.js
CHANGED
@@ -8,7 +8,6 @@ import { nocache as cache } from 'prolly-trees/cache';
|
|
8
8
|
import { CIDCounter, bf, simpleCompare as compare } from 'prolly-trees/utils';
|
9
9
|
import * as codec from '@ipld/dag-cbor';
|
10
10
|
import { sha256 as hasher } from 'multiformats/hashes/sha2';
|
11
|
-
// import { blake2b256 as hasher } from '@multiformats/blake2/blake2b'
|
12
11
|
import { doTransaction } from './blockstore.js';
|
13
12
|
import { create as createBlock } from 'multiformats/block';
|
14
13
|
const blockOpts = { cache, chunker: bf(30), codec, hasher, compare };
|
@@ -0,0 +1,348 @@
|
|
1
|
+
import randomBytes from 'randombytes';
|
2
|
+
// import { randomBytes } from 'crypto'
|
3
|
+
import { create, load } from 'ipld-hashmap';
|
4
|
+
import { parse } from 'multiformats/link';
|
5
|
+
import { CarReader } from '@ipld/car';
|
6
|
+
import { CID } from 'multiformats/cid';
|
7
|
+
import { sha256 } from 'multiformats/hashes/sha2';
|
8
|
+
import * as Block from 'multiformats/block';
|
9
|
+
import * as dagcbor from '@ipld/dag-cbor';
|
10
|
+
// @ts-ignore
|
11
|
+
import { bf, simpleCompare as compare } from 'prolly-trees/utils';
|
12
|
+
// @ts-ignore
|
13
|
+
import { nocache as cache } from 'prolly-trees/cache';
|
14
|
+
import { Buffer } from 'buffer';
|
15
|
+
import { rawSha1 as sha1sync } from '../sha1.js';
|
16
|
+
// @ts-ignore
|
17
|
+
import * as codec from 'encrypted-block';
|
18
|
+
import { blocksToCarBlock, blocksToEncryptedCarBlock, blocksFromEncryptedCarBlock } from '../valet.js';
|
19
|
+
const chunker = bf(30);
|
20
|
+
const blockOpts = { cache, chunker, codec: dagcbor, hasher: sha256, compare };
|
21
|
+
const NO_ENCRYPT = typeof process !== 'undefined' && !!process.env?.NO_ENCRYPT;
|
22
|
+
const NOT_IMPL = true;
|
23
|
+
export class Base {
|
24
|
+
valetRootCarCid = null; // used on initial hydrate, if you change this, set this.valetCarCidMap = null
|
25
|
+
keyMaterial = null;
|
26
|
+
keyId = 'null';
|
27
|
+
constructor(name, config = {}) {
|
28
|
+
this.instanceId = Math.random().toString(36).slice(2);
|
29
|
+
this.name = name;
|
30
|
+
this.config = config;
|
31
|
+
if (!this.config.branches) {
|
32
|
+
this.config.branches = {
|
33
|
+
main: { readonly: false }
|
34
|
+
};
|
35
|
+
}
|
36
|
+
// console.log('this.config', this.instanceId, this.name, this.config)
|
37
|
+
// if there is config.key and config.car,
|
38
|
+
// then we could skip loading the headers if we want.
|
39
|
+
// currently we don't do that, because we only use
|
40
|
+
// the config for first run, and then we use the headers
|
41
|
+
// once they exist
|
42
|
+
this.ready = this.getHeaders().then((blocksReady) => {
|
43
|
+
// console.log('blocksReady base', this.name, blocksReady)
|
44
|
+
return blocksReady;
|
45
|
+
});
|
46
|
+
}
|
47
|
+
setCarCidMapCarCid(carCid) {
|
48
|
+
// console.trace('setCarCidMapCarCid', carCid)
|
49
|
+
if (!carCid)
|
50
|
+
return;
|
51
|
+
this.valetRootCarCid = parse(carCid);
|
52
|
+
this.valetCarCidMap = null;
|
53
|
+
}
|
54
|
+
setKeyMaterial(km) {
|
55
|
+
if (km && !NO_ENCRYPT) {
|
56
|
+
const hex = Uint8Array.from(Buffer.from(km, 'hex'));
|
57
|
+
this.keyMaterial = km;
|
58
|
+
const hash = sha1sync(hex);
|
59
|
+
this.keyId = Buffer.from(hash).toString('hex');
|
60
|
+
// console.log('setKeyMaterial', this.instanceId, this.name, km)
|
61
|
+
}
|
62
|
+
else {
|
63
|
+
// console.log('setKeyMaterial', this.instanceId, this.name, km)
|
64
|
+
this.keyMaterial = null;
|
65
|
+
this.keyId = 'null';
|
66
|
+
}
|
67
|
+
}
|
68
|
+
async saveCar(carCid, value, cids) {
|
69
|
+
const newValetCidCar = await this.updateCarCidMap(carCid, cids);
|
70
|
+
// console.log('writeCars', carCid.toString(), newValetCidCar.cid.toString())
|
71
|
+
const carList = [
|
72
|
+
{
|
73
|
+
cid: carCid,
|
74
|
+
bytes: value,
|
75
|
+
replaces: null
|
76
|
+
},
|
77
|
+
{
|
78
|
+
cid: newValetCidCar.cid,
|
79
|
+
bytes: newValetCidCar.bytes,
|
80
|
+
replaces: null
|
81
|
+
// replaces: this.valetRootCarCid // todo
|
82
|
+
}
|
83
|
+
];
|
84
|
+
await this.writeCars(carList);
|
85
|
+
this.valetRootCarCid = newValetCidCar.cid;
|
86
|
+
return newValetCidCar;
|
87
|
+
}
|
88
|
+
applyHeaders(headers) {
|
89
|
+
// console.log('applyHeaders', headers.index)
|
90
|
+
this.headers = headers;
|
91
|
+
// console.log('before applied', this.instanceId, this.name, this.keyMaterial, this.valetRootCarCid)
|
92
|
+
for (const [, header] of Object.entries(headers)) {
|
93
|
+
if (header) {
|
94
|
+
// console.log('applyHeaders', this.instanceId, this.name, header.key, header.car)
|
95
|
+
header.key && this.setKeyMaterial(header.key);
|
96
|
+
this.setCarCidMapCarCid(header.car);
|
97
|
+
}
|
98
|
+
}
|
99
|
+
if (!this.valetRootCarCid) {
|
100
|
+
this.setCarCidMapCarCid(this.config.car);
|
101
|
+
}
|
102
|
+
if (!this.keyMaterial) {
|
103
|
+
const nullKey = this.config.key === null;
|
104
|
+
if (nullKey || this.config.key) {
|
105
|
+
this.setKeyMaterial(this.config.key);
|
106
|
+
}
|
107
|
+
else {
|
108
|
+
this.setKeyMaterial(randomBytes(32).toString('hex'));
|
109
|
+
}
|
110
|
+
}
|
111
|
+
// console.log('applied', this.instanceId, this.name, this.keyMaterial, this.valetRootCarCid)
|
112
|
+
}
|
113
|
+
async getHeaders() {
|
114
|
+
const headers = {};
|
115
|
+
for (const [branch] of Object.entries(this.config.branches)) {
|
116
|
+
const got = await this.loadHeader(branch);
|
117
|
+
// console.log('getHeaders', this.name, branch, got)
|
118
|
+
headers[branch] = got;
|
119
|
+
}
|
120
|
+
this.applyHeaders(headers);
|
121
|
+
return headers;
|
122
|
+
}
|
123
|
+
loadHeader(branch = 'main') {
|
124
|
+
throw new Error('not implemented');
|
125
|
+
}
|
126
|
+
async saveHeader(header) {
|
127
|
+
// for each branch, save the header
|
128
|
+
// console.log('saveHeader', this.config.branches)
|
129
|
+
// for (const branch of this.branches) {
|
130
|
+
// await this.saveBranchHeader(branch)
|
131
|
+
// }
|
132
|
+
for (const [branch, { readonly }] of Object.entries(this.config.branches)) {
|
133
|
+
if (readonly)
|
134
|
+
continue;
|
135
|
+
// console.log('saveHeader', this.instanceId, this.name, branch, header)
|
136
|
+
await this.writeHeader(branch, header);
|
137
|
+
}
|
138
|
+
}
|
139
|
+
prepareHeader(header, json = true) {
|
140
|
+
header.key = this.keyMaterial;
|
141
|
+
header.car = this.valetRootCarCid.toString();
|
142
|
+
// console.log('prepareHeader', this.instanceId, this.name, header.key, this.valetRootCarCid.toString())
|
143
|
+
return json ? JSON.stringify(header) : header;
|
144
|
+
}
|
145
|
+
writeHeader(branch, header) {
|
146
|
+
throw new Error('not implemented');
|
147
|
+
}
|
148
|
+
async getCarCIDForCID(cid) {
|
149
|
+
const cidMap = await this.getCidCarMap();
|
150
|
+
const carCid = cidMap.get(cid.toString());
|
151
|
+
if (carCid) {
|
152
|
+
return { result: carCid };
|
153
|
+
}
|
154
|
+
return { result: null };
|
155
|
+
}
|
156
|
+
async readCar(carCid) {
|
157
|
+
if (NOT_IMPL)
|
158
|
+
throw new Error('not implemented');
|
159
|
+
return new Uint8Array(carCid);
|
160
|
+
}
|
161
|
+
async getLoaderBlock(dataCID) {
|
162
|
+
const { result: carCid } = await this.getCarCIDForCID(dataCID);
|
163
|
+
if (!carCid) {
|
164
|
+
throw new Error('Missing car for: ' + dataCID);
|
165
|
+
}
|
166
|
+
// console.log('getLoaderBlock', dataCID, carCid)
|
167
|
+
const reader = await this.getCarReader(carCid);
|
168
|
+
return { block: await reader.get(dataCID), reader, carCid };
|
169
|
+
}
|
170
|
+
/** Private - internal **/
|
171
|
+
async getCidCarMap() {
|
172
|
+
// console.log('getCidCarMap', this.constructor.name, this.name, this.valetRootCarCid, typeof this.valetCarCidMap)
|
173
|
+
if (this.valetCarCidMap)
|
174
|
+
return this.valetCarCidMap;
|
175
|
+
if (this.valetRootCarCid) {
|
176
|
+
this.valetCarCidMap = await this.mapForIPLDHashmapCarCid(this.valetRootCarCid);
|
177
|
+
return this.valetCarCidMap;
|
178
|
+
}
|
179
|
+
else {
|
180
|
+
this.valetCarCidMap = new Map();
|
181
|
+
return this.valetCarCidMap;
|
182
|
+
}
|
183
|
+
}
|
184
|
+
async mapForIPLDHashmapCarCid(carCid) {
|
185
|
+
// console.log('mapForIPLDHashmapCarCid', carCid)
|
186
|
+
const carMapReader = await this.getWriteableCarReader(carCid);
|
187
|
+
const indexNode = await load(carMapReader, carMapReader.root.cid, {
|
188
|
+
blockHasher: blockOpts.hasher,
|
189
|
+
blockCodec: blockOpts.codec
|
190
|
+
});
|
191
|
+
const theCarMap = new Map();
|
192
|
+
for await (const [key, value] of indexNode.entries()) {
|
193
|
+
// console.log('mapForIPLDHashmapCarCid', key, value)
|
194
|
+
theCarMap.set(key, value);
|
195
|
+
}
|
196
|
+
return theCarMap;
|
197
|
+
}
|
198
|
+
async getWriteableCarReader(carCid) {
|
199
|
+
// console.log('getWriteableCarReader', carCid)
|
200
|
+
const carMapReader = await this.getCarReader(carCid);
|
201
|
+
const theseWriteableBlocks = new VMemoryBlockstore();
|
202
|
+
const combinedReader = {
|
203
|
+
blocks: theseWriteableBlocks,
|
204
|
+
root: carMapReader?.root,
|
205
|
+
put: async (cid, bytes) => {
|
206
|
+
return await theseWriteableBlocks.put(cid, bytes);
|
207
|
+
},
|
208
|
+
get: async (cid) => {
|
209
|
+
try {
|
210
|
+
const got = await theseWriteableBlocks.get(cid);
|
211
|
+
return got.bytes;
|
212
|
+
}
|
213
|
+
catch (e) {
|
214
|
+
if (!carMapReader)
|
215
|
+
throw e;
|
216
|
+
const bytes = await carMapReader.get(cid);
|
217
|
+
await theseWriteableBlocks.put(cid, bytes);
|
218
|
+
return bytes;
|
219
|
+
}
|
220
|
+
}
|
221
|
+
};
|
222
|
+
return combinedReader;
|
223
|
+
}
|
224
|
+
async getCarReader(carCid) {
|
225
|
+
carCid = carCid.toString();
|
226
|
+
const carBytes = await this.readCar(carCid);
|
227
|
+
// console.log('getCarReader', this.constructor.name, carCid, carBytes.length)
|
228
|
+
const reader = await CarReader.fromBytes(carBytes);
|
229
|
+
if (this.keyMaterial) {
|
230
|
+
const roots = await reader.getRoots();
|
231
|
+
const readerGetWithCodec = async (cid) => {
|
232
|
+
const got = await reader.get(cid);
|
233
|
+
let useCodec = codec;
|
234
|
+
if (cid.toString().indexOf('bafy') === 0) {
|
235
|
+
useCodec = dagcbor; // todo this is a dirty check
|
236
|
+
}
|
237
|
+
const decoded = await Block.decode({
|
238
|
+
...got,
|
239
|
+
codec: useCodec,
|
240
|
+
hasher: sha256
|
241
|
+
});
|
242
|
+
return decoded;
|
243
|
+
};
|
244
|
+
const { blocks } = await blocksFromEncryptedCarBlock(roots[0], readerGetWithCodec, this.keyMaterial);
|
245
|
+
const rootBlock = blocks[blocks.length - 1];
|
246
|
+
const blocksIterable = function* () { for (const block of blocks)
|
247
|
+
yield block; };
|
248
|
+
const gat = async (dataCID) => {
|
249
|
+
dataCID = dataCID.toString();
|
250
|
+
return blocks.find(b => b.cid.toString() === dataCID);
|
251
|
+
};
|
252
|
+
return {
|
253
|
+
entries: blocksIterable,
|
254
|
+
root: rootBlock,
|
255
|
+
gat,
|
256
|
+
get: async (dataCID) => {
|
257
|
+
const block = await gat(dataCID);
|
258
|
+
if (block) {
|
259
|
+
return block.bytes;
|
260
|
+
}
|
261
|
+
}
|
262
|
+
};
|
263
|
+
}
|
264
|
+
else {
|
265
|
+
const gat = async (dataCID) => {
|
266
|
+
return await reader.get(CID.parse(dataCID));
|
267
|
+
};
|
268
|
+
return {
|
269
|
+
// blocks,
|
270
|
+
entries: reader.blocks.bind(reader),
|
271
|
+
root: reader.getRoots()[0],
|
272
|
+
gat,
|
273
|
+
get: async (dataCID) => {
|
274
|
+
const gotBlock = await gat(dataCID);
|
275
|
+
if (gotBlock) {
|
276
|
+
return gotBlock.bytes;
|
277
|
+
}
|
278
|
+
}
|
279
|
+
};
|
280
|
+
}
|
281
|
+
}
|
282
|
+
writeCars(cars) { }
|
283
|
+
async updateCarCidMap(carCid, cids) {
|
284
|
+
// this hydrates the map if it has not been hydrated
|
285
|
+
const theCarMap = await this.getCidCarMap();
|
286
|
+
for (const cid of cids) {
|
287
|
+
theCarMap.set(cid, carCid);
|
288
|
+
}
|
289
|
+
// todo can we debounce this? -- maybe put it into a queue so we can batch it
|
290
|
+
return await this.persistCarMap(theCarMap);
|
291
|
+
}
|
292
|
+
async persistCarMap(theCarMap) {
|
293
|
+
const ipldLoader = await getEmptyLoader();
|
294
|
+
const indexNode = await create(ipldLoader, {
|
295
|
+
bitWidth: 4,
|
296
|
+
bucketSize: 2,
|
297
|
+
blockHasher: blockOpts.hasher,
|
298
|
+
blockCodec: blockOpts.codec
|
299
|
+
});
|
300
|
+
for (const [key, value] of theCarMap.entries()) {
|
301
|
+
await indexNode.set(key, value);
|
302
|
+
}
|
303
|
+
let newValetCidCar;
|
304
|
+
if (this.keyMaterial) {
|
305
|
+
newValetCidCar = await blocksToEncryptedCarBlock(indexNode.cid, ipldLoader.blocks, this.keyMaterial);
|
306
|
+
}
|
307
|
+
else {
|
308
|
+
newValetCidCar = await blocksToCarBlock(indexNode.cid, ipldLoader.blocks);
|
309
|
+
}
|
310
|
+
return newValetCidCar;
|
311
|
+
}
|
312
|
+
}
|
313
|
+
async function getEmptyLoader() {
|
314
|
+
const theseWriteableBlocks = new VMemoryBlockstore();
|
315
|
+
return {
|
316
|
+
blocks: theseWriteableBlocks,
|
317
|
+
put: async (cid, bytes) => {
|
318
|
+
return await theseWriteableBlocks.put(cid, bytes);
|
319
|
+
},
|
320
|
+
get: async (cid) => {
|
321
|
+
const got = await theseWriteableBlocks.get(cid);
|
322
|
+
return got.bytes;
|
323
|
+
}
|
324
|
+
};
|
325
|
+
}
|
326
|
+
export class VMemoryBlockstore {
|
327
|
+
/** @type {Map<string, Uint8Array>} */
|
328
|
+
blocks = new Map();
|
329
|
+
instanceId = Math.random().toString(36).slice(2);
|
330
|
+
async get(cid) {
|
331
|
+
const bytes = this.blocks.get(cid.toString());
|
332
|
+
if (!bytes)
|
333
|
+
throw new Error('block not found ' + cid.toString());
|
334
|
+
return { cid, bytes };
|
335
|
+
}
|
336
|
+
/**
|
337
|
+
* @param {any} cid
|
338
|
+
* @param {Uint8Array} bytes
|
339
|
+
*/
|
340
|
+
async put(cid, bytes) {
|
341
|
+
this.blocks.set(cid.toString(), bytes);
|
342
|
+
}
|
343
|
+
*entries() {
|
344
|
+
for (const [str, bytes] of this.blocks) {
|
345
|
+
yield { cid: parse(str), bytes };
|
346
|
+
}
|
347
|
+
}
|
348
|
+
}
|
@@ -0,0 +1,61 @@
|
|
1
|
+
import { openDB } from 'idb';
|
2
|
+
import { Base } from './base.js';
|
3
|
+
const defaultConfig = {
|
4
|
+
headerKeyPrefix: 'fp.'
|
5
|
+
};
|
6
|
+
/* global localStorage */
|
7
|
+
export class Browser extends Base {
|
8
|
+
constructor(name, config = {}) {
|
9
|
+
super(name, Object.assign({}, defaultConfig, config));
|
10
|
+
this.isBrowser = false;
|
11
|
+
try {
|
12
|
+
this.isBrowser = window.localStorage && true;
|
13
|
+
}
|
14
|
+
catch (e) { }
|
15
|
+
}
|
16
|
+
withDB = async (dbWorkFun) => {
|
17
|
+
if (!this.idb) {
|
18
|
+
this.idb = await openDB(`fp.${this.keyId}.${this.name}.valet`, 3, {
|
19
|
+
upgrade(db, oldVersion, newVersion, transaction) {
|
20
|
+
if (oldVersion < 1) {
|
21
|
+
db.createObjectStore('cars');
|
22
|
+
}
|
23
|
+
}
|
24
|
+
});
|
25
|
+
}
|
26
|
+
return await dbWorkFun(this.idb);
|
27
|
+
};
|
28
|
+
async writeCars(cars) {
|
29
|
+
if (this.config.readonly)
|
30
|
+
return;
|
31
|
+
return await this.withDB(async (db) => {
|
32
|
+
const tx = db.transaction(['cars'], 'readwrite');
|
33
|
+
for (const { cid, bytes, replaces } of cars) {
|
34
|
+
await tx.objectStore('cars').put(bytes, cid.toString());
|
35
|
+
// todo remove old maps
|
36
|
+
if (replaces) {
|
37
|
+
await tx.objectStore('cars').delete(replaces.toString());
|
38
|
+
}
|
39
|
+
}
|
40
|
+
return await tx.done;
|
41
|
+
});
|
42
|
+
}
|
43
|
+
async readCar(carCid) {
|
44
|
+
return await this.withDB(async (db) => {
|
45
|
+
const tx = db.transaction(['cars'], 'readonly');
|
46
|
+
// console.log('getCarReader', carCid)
|
47
|
+
return await tx.objectStore('cars').get(carCid);
|
48
|
+
});
|
49
|
+
}
|
50
|
+
loadHeader(branch = 'main') {
|
51
|
+
return this.isBrowser && localStorage.getItem(this.headerKey(branch));
|
52
|
+
}
|
53
|
+
async writeHeader(branch, header) {
|
54
|
+
if (this.config.readonly)
|
55
|
+
return;
|
56
|
+
return this.isBrowser && localStorage.setItem(this.headerKey(branch), this.prepareHeader(header));
|
57
|
+
}
|
58
|
+
headerKey(branch = 'main') {
|
59
|
+
return this.config.headerKeyPrefix + this.name + '.' + branch;
|
60
|
+
}
|
61
|
+
}
|
@@ -0,0 +1,65 @@
|
|
1
|
+
import { readFileSync } from 'fs';
|
2
|
+
import { mkdir, writeFile } from 'fs/promises';
|
3
|
+
import { join, dirname } from 'path';
|
4
|
+
import { homedir } from 'os';
|
5
|
+
import { Base } from './base.js';
|
6
|
+
export const defaultConfig = {
|
7
|
+
dataDir: join(homedir(), '.fireproof')
|
8
|
+
};
|
9
|
+
export class Filesystem extends Base {
|
10
|
+
constructor(name, config = {}) {
|
11
|
+
const mergedConfig = Object.assign({}, defaultConfig, config);
|
12
|
+
// console.log('Filesystem', name, mergedConfig, header)
|
13
|
+
super(name, mergedConfig);
|
14
|
+
}
|
15
|
+
async writeCars(cars) {
|
16
|
+
if (this.config.readonly)
|
17
|
+
return;
|
18
|
+
const writes = [];
|
19
|
+
for (const { cid, bytes } of cars) {
|
20
|
+
const carFilename = join(this.config.dataDir, this.name, `${cid.toString()}.car`);
|
21
|
+
// console.log('writeCars', carFilename)
|
22
|
+
writes.push(writeSync(carFilename, bytes));
|
23
|
+
}
|
24
|
+
await Promise.all(writes);
|
25
|
+
}
|
26
|
+
async readCar(carCid) {
|
27
|
+
const carFilename = join(this.config.dataDir, this.name, `${carCid.toString()}.car`);
|
28
|
+
const got = readFileSync(carFilename);
|
29
|
+
// console.log('readCar', carFilename, got.constructor.name)
|
30
|
+
return got;
|
31
|
+
}
|
32
|
+
loadHeader(branch = 'main') {
|
33
|
+
const header = loadSync(this.headerFilename(branch));
|
34
|
+
// console.log('fs getHeader', this.headerFilename(), header, typeof header)
|
35
|
+
if (!header)
|
36
|
+
return null;
|
37
|
+
return JSON.parse(header);
|
38
|
+
}
|
39
|
+
async writeHeader(branch, header) {
|
40
|
+
// console.log('saveHeader', this.isBrowser)
|
41
|
+
if (this.config.readonly)
|
42
|
+
return;
|
43
|
+
const pHeader = this.prepareHeader(header);
|
44
|
+
// console.log('writeHeader fs', branch, pHeader)
|
45
|
+
await writeSync(this.headerFilename(branch), pHeader);
|
46
|
+
}
|
47
|
+
headerFilename(branch = 'main') {
|
48
|
+
// console.log('headerFilename', this.config.dataDir, this.name)
|
49
|
+
return join(this.config.dataDir, this.name, branch + '.json');
|
50
|
+
}
|
51
|
+
}
|
52
|
+
function loadSync(filename) {
|
53
|
+
try {
|
54
|
+
return readFileSync(filename, 'utf8').toString();
|
55
|
+
}
|
56
|
+
catch (error) {
|
57
|
+
// console.log('error', error)
|
58
|
+
return null;
|
59
|
+
}
|
60
|
+
}
|
61
|
+
async function writeSync(fullpath, stringValue) {
|
62
|
+
await mkdir(dirname(fullpath), { recursive: true });
|
63
|
+
// writeFileSync(fullpath, stringValue)
|
64
|
+
await writeFile(fullpath, stringValue);
|
65
|
+
}
|