@fireproof/core 0.18.0 → 0.19.0-dev-publish
Sign up to get free protection for your applications and to get access to all the features.
- package/README.md +29 -15
- package/chunk-5X6APJDY.js +39 -0
- package/chunk-5X6APJDY.js.map +1 -0
- package/chunk-EVSZA26U.js +208 -0
- package/chunk-EVSZA26U.js.map +1 -0
- package/chunk-H3A2HMMM.js +164 -0
- package/chunk-H3A2HMMM.js.map +1 -0
- package/chunk-UCMXU3DH.js +268 -0
- package/chunk-UCMXU3DH.js.map +1 -0
- package/chunk-VZGT7ZYP.js +22 -0
- package/chunk-VZGT7ZYP.js.map +1 -0
- package/index.cjs +4676 -0
- package/index.cjs.map +1 -0
- package/index.d.cts +992 -0
- package/index.d.ts +992 -0
- package/index.js +2937 -0
- package/index.js.map +1 -0
- package/metafile-cjs.json +1 -0
- package/metafile-esm.json +1 -0
- package/node-sys-container-E7LADX2Z.js +29 -0
- package/node-sys-container-E7LADX2Z.js.map +1 -0
- package/package.json +23 -109
- package/sqlite-data-store-RIH56645.js +120 -0
- package/sqlite-data-store-RIH56645.js.map +1 -0
- package/sqlite-meta-store-6347MWOR.js +137 -0
- package/sqlite-meta-store-6347MWOR.js.map +1 -0
- package/sqlite-wal-store-G5YGK77N.js +123 -0
- package/sqlite-wal-store-G5YGK77N.js.map +1 -0
- package/store-file-D472VFCS.js +193 -0
- package/store-file-D472VFCS.js.map +1 -0
- package/store-indexdb-FRX5PTKR.js +20 -0
- package/store-indexdb-FRX5PTKR.js.map +1 -0
- package/store-sql-MDSU23Y7.js +344 -0
- package/store-sql-MDSU23Y7.js.map +1 -0
- package/dist/browser/fireproof.cjs +0 -1172
- package/dist/browser/fireproof.cjs.map +0 -1
- package/dist/browser/fireproof.d.cts +0 -268
- package/dist/browser/fireproof.d.ts +0 -268
- package/dist/browser/fireproof.global.js +0 -24178
- package/dist/browser/fireproof.global.js.map +0 -1
- package/dist/browser/fireproof.js +0 -1147
- package/dist/browser/fireproof.js.map +0 -1
- package/dist/browser/metafile-cjs.json +0 -1
- package/dist/browser/metafile-esm.json +0 -1
- package/dist/browser/metafile-iife.json +0 -1
- package/dist/memory/fireproof.cjs +0 -1172
- package/dist/memory/fireproof.cjs.map +0 -1
- package/dist/memory/fireproof.d.cts +0 -268
- package/dist/memory/fireproof.d.ts +0 -268
- package/dist/memory/fireproof.global.js +0 -24178
- package/dist/memory/fireproof.global.js.map +0 -1
- package/dist/memory/fireproof.js +0 -1147
- package/dist/memory/fireproof.js.map +0 -1
- package/dist/memory/metafile-cjs.json +0 -1
- package/dist/memory/metafile-esm.json +0 -1
- package/dist/memory/metafile-iife.json +0 -1
- package/dist/node/fireproof.cjs +0 -1172
- package/dist/node/fireproof.cjs.map +0 -1
- package/dist/node/fireproof.d.cts +0 -268
- package/dist/node/fireproof.d.ts +0 -268
- package/dist/node/fireproof.global.js +0 -38540
- package/dist/node/fireproof.global.js.map +0 -1
- package/dist/node/fireproof.js +0 -1138
- package/dist/node/fireproof.js.map +0 -1
- package/dist/node/metafile-cjs.json +0 -1
- package/dist/node/metafile-esm.json +0 -1
- package/dist/node/metafile-iife.json +0 -1
package/index.js
ADDED
@@ -0,0 +1,2937 @@
|
|
1
|
+
import {
|
2
|
+
guardVersion
|
3
|
+
} from "./chunk-EVSZA26U.js";
|
4
|
+
import {
|
5
|
+
NotFoundError,
|
6
|
+
isNotFoundError
|
7
|
+
} from "./chunk-VZGT7ZYP.js";
|
8
|
+
import {
|
9
|
+
dataDir,
|
10
|
+
ensureLogger,
|
11
|
+
exception2Result,
|
12
|
+
exceptionWrapper,
|
13
|
+
getKey,
|
14
|
+
getName,
|
15
|
+
getStore,
|
16
|
+
runtime_exports
|
17
|
+
} from "./chunk-UCMXU3DH.js";
|
18
|
+
import {
|
19
|
+
SysContainer,
|
20
|
+
__export,
|
21
|
+
falsyToUndef,
|
22
|
+
isFalsy,
|
23
|
+
throwFalsy
|
24
|
+
} from "./chunk-H3A2HMMM.js";
|
25
|
+
|
26
|
+
// src/database.ts
|
27
|
+
import { uuidv7 } from "uuidv7";
|
28
|
+
import { ResolveOnce as ResolveOnce5 } from "@adviser/cement";
|
29
|
+
|
30
|
+
// src/write-queue.ts
|
31
|
+
function writeQueue(worker, payload = Infinity, unbounded = false) {
|
32
|
+
const queue = [];
|
33
|
+
let isProcessing = false;
|
34
|
+
async function process() {
|
35
|
+
if (isProcessing || queue.length === 0) return;
|
36
|
+
isProcessing = true;
|
37
|
+
const tasksToProcess = queue.splice(0, payload);
|
38
|
+
const updates = tasksToProcess.map((item) => item.task);
|
39
|
+
if (unbounded) {
|
40
|
+
const promises = updates.map(async (update, index2) => {
|
41
|
+
try {
|
42
|
+
const result = await worker([update]);
|
43
|
+
tasksToProcess[index2].resolve(result);
|
44
|
+
} catch (error) {
|
45
|
+
tasksToProcess[index2].reject(error);
|
46
|
+
}
|
47
|
+
});
|
48
|
+
await Promise.all(promises);
|
49
|
+
} else {
|
50
|
+
try {
|
51
|
+
const result = await worker(updates);
|
52
|
+
tasksToProcess.forEach((task) => task.resolve(result));
|
53
|
+
} catch (error) {
|
54
|
+
tasksToProcess.forEach((task) => task.reject(error));
|
55
|
+
}
|
56
|
+
}
|
57
|
+
isProcessing = false;
|
58
|
+
void process();
|
59
|
+
}
|
60
|
+
return {
|
61
|
+
push(task) {
|
62
|
+
return new Promise((resolve, reject) => {
|
63
|
+
queue.push({ task, resolve, reject });
|
64
|
+
void process();
|
65
|
+
});
|
66
|
+
}
|
67
|
+
};
|
68
|
+
}
|
69
|
+
|
70
|
+
// src/crdt.ts
|
71
|
+
import { ResolveOnce as ResolveOnce4 } from "@adviser/cement";
|
72
|
+
|
73
|
+
// src/crdt-helpers.ts
|
74
|
+
import { encode as encode3, decode as decode3, Block as Block2 } from "multiformats/block";
|
75
|
+
import { parse as parse2 } from "multiformats/link";
|
76
|
+
import { sha256 as hasher2 } from "multiformats/hashes/sha2";
|
77
|
+
import * as codec2 from "@ipld/dag-cbor";
|
78
|
+
import { put, get, entries, root } from "@web3-storage/pail/crdt";
|
79
|
+
import { EventFetcher, vis } from "@web3-storage/pail/clock";
|
80
|
+
import * as Batch from "@web3-storage/pail/crdt/batch";
|
81
|
+
|
82
|
+
// src/blockstore/index.ts
|
83
|
+
var blockstore_exports = {};
|
84
|
+
__export(blockstore_exports, {
|
85
|
+
BaseBlockstore: () => BaseBlockstore,
|
86
|
+
CarTransaction: () => CarTransaction,
|
87
|
+
CompactionFetcher: () => CompactionFetcher,
|
88
|
+
ConnectREST: () => ConnectREST,
|
89
|
+
ConnectionBase: () => ConnectionBase,
|
90
|
+
DataStore: () => DataStore,
|
91
|
+
EncryptedBlockstore: () => EncryptedBlockstore,
|
92
|
+
Loadable: () => Loadable,
|
93
|
+
Loader: () => Loader,
|
94
|
+
MetaStore: () => MetaStore,
|
95
|
+
NotFoundError: () => NotFoundError,
|
96
|
+
RemoteWAL: () => RemoteWAL,
|
97
|
+
isNotFoundError: () => isNotFoundError,
|
98
|
+
parseCarFile: () => parseCarFile,
|
99
|
+
registerStoreProtocol: () => registerStoreProtocol,
|
100
|
+
testStoreFactory: () => testStoreFactory,
|
101
|
+
toStoreRuntime: () => toStoreRuntime,
|
102
|
+
toURL: () => toURL
|
103
|
+
});
|
104
|
+
|
105
|
+
// src/blockstore/connection-base.ts
|
106
|
+
import { EventBlock, decodeEventBlock } from "@web3-storage/pail/clock";
|
107
|
+
import { MemoryBlockstore } from "@web3-storage/pail/block";
|
108
|
+
|
109
|
+
// src/blockstore/task-manager.ts
|
110
|
+
var TaskManager = class {
|
111
|
+
constructor(loader) {
|
112
|
+
this.eventsWeHandled = /* @__PURE__ */ new Set();
|
113
|
+
this.queue = [];
|
114
|
+
this.isProcessing = false;
|
115
|
+
this.loader = loader;
|
116
|
+
this.logger = ensureLogger(loader.logger, "TaskManager");
|
117
|
+
}
|
118
|
+
async handleEvent(eventBlock) {
|
119
|
+
const cid = eventBlock.cid.toString();
|
120
|
+
const parents = eventBlock.value.parents.map((cid2) => cid2.toString());
|
121
|
+
for (const parent of parents) {
|
122
|
+
this.eventsWeHandled.add(parent);
|
123
|
+
}
|
124
|
+
this.queue.push({ cid, eventBlock, retries: 0 });
|
125
|
+
this.queue = this.queue.filter(({ cid: cid2 }) => !this.eventsWeHandled.has(cid2));
|
126
|
+
void this.processQueue();
|
127
|
+
}
|
128
|
+
async processQueue() {
|
129
|
+
if (this.isProcessing) return;
|
130
|
+
this.isProcessing = true;
|
131
|
+
const filteredQueue = this.queue.filter(({ cid }) => !this.eventsWeHandled.has(cid));
|
132
|
+
const first = filteredQueue[0];
|
133
|
+
if (!first) {
|
134
|
+
return;
|
135
|
+
}
|
136
|
+
try {
|
137
|
+
this.loader?.remoteMetaStore?.handleByteHeads([first.eventBlock.value.data.dbMeta]);
|
138
|
+
this.eventsWeHandled.add(first.cid);
|
139
|
+
this.queue = this.queue.filter(({ cid }) => !this.eventsWeHandled.has(cid));
|
140
|
+
} catch (err) {
|
141
|
+
if (first.retries++ > 3) {
|
142
|
+
this.logger.Error().Str("cid", first.cid).Msg("failed to process event block after 3 retries");
|
143
|
+
this.queue = this.queue.filter(({ cid }) => cid !== first.cid);
|
144
|
+
}
|
145
|
+
await new Promise((resolve) => setTimeout(resolve, 50));
|
146
|
+
throw this.logger.Error().Err(err).Msg("failed to process event block").AsError();
|
147
|
+
} finally {
|
148
|
+
this.isProcessing = false;
|
149
|
+
if (this.queue.length > 0) {
|
150
|
+
void this.processQueue();
|
151
|
+
}
|
152
|
+
}
|
153
|
+
}
|
154
|
+
};
|
155
|
+
|
156
|
+
// src/blockstore/connection-base.ts
|
157
|
+
var ConnectionBase = class {
|
158
|
+
constructor(logger) {
|
159
|
+
// readonly ready: Promise<unknown>;
|
160
|
+
// todo move to LRU blockstore https://github.com/web3-storage/w3clock/blob/main/src/worker/block.js
|
161
|
+
this.eventBlocks = new MemoryBlockstore();
|
162
|
+
this.parents = [];
|
163
|
+
this.loaded = Promise.resolve();
|
164
|
+
this.logger = ensureLogger(logger, "ConnectionBase");
|
165
|
+
}
|
166
|
+
async refresh() {
|
167
|
+
await throwFalsy(throwFalsy(this.loader).remoteMetaStore).load("main");
|
168
|
+
await (await throwFalsy(this.loader).remoteWAL())._process();
|
169
|
+
}
|
170
|
+
connect({ loader }) {
|
171
|
+
if (!loader) throw this.logger.Error().Msg("loader is required").AsError();
|
172
|
+
this.connectMeta({ loader });
|
173
|
+
this.connectStorage({ loader });
|
174
|
+
}
|
175
|
+
connectMeta({ loader }) {
|
176
|
+
if (!loader) throw this.logger.Error().Msg("loader is required").AsError();
|
177
|
+
this.loader = loader;
|
178
|
+
this.taskManager = new TaskManager(loader);
|
179
|
+
this.onConnect();
|
180
|
+
this.logger.Warn().Msg("connectMeta: connecting to remote meta store is disabled");
|
181
|
+
}
|
182
|
+
async onConnect() {
|
183
|
+
return;
|
184
|
+
}
|
185
|
+
connectStorage({ loader }) {
|
186
|
+
if (!loader) throw this.logger.Error().Msg("loader is required").AsError();
|
187
|
+
this.loader = loader;
|
188
|
+
this.logger.Warn().Msg("connectStorage: connecting to remote meta store is disabled");
|
189
|
+
}
|
190
|
+
async createEventBlock(bytes) {
|
191
|
+
const data = {
|
192
|
+
dbMeta: bytes
|
193
|
+
};
|
194
|
+
const event = await EventBlock.create(
|
195
|
+
data,
|
196
|
+
this.parents
|
197
|
+
);
|
198
|
+
await this.eventBlocks.put(event.cid, event.bytes);
|
199
|
+
return event;
|
200
|
+
}
|
201
|
+
async decodeEventBlock(bytes) {
|
202
|
+
const event = await decodeEventBlock(bytes);
|
203
|
+
return event;
|
204
|
+
}
|
205
|
+
// move this stuff to connect
|
206
|
+
// async getDashboardURL(compact = true) {
|
207
|
+
// const baseUrl = 'https://dashboard.fireproof.storage/'
|
208
|
+
// if (!this.loader?.remoteCarStore) return new URL('/howto', baseUrl)
|
209
|
+
// // if (compact) {
|
210
|
+
// // await this.compact()
|
211
|
+
// // }
|
212
|
+
// const currents = await this.loader?.metaStore?.load()
|
213
|
+
// if (!currents) throw new Error("Can't sync empty database: save data first")
|
214
|
+
// if (currents.length > 1)
|
215
|
+
// throw new Error("Can't sync database with split heads: make an update first")
|
216
|
+
// const current = currents[0]
|
217
|
+
// const params = {
|
218
|
+
// car: current.car.toString()
|
219
|
+
// }
|
220
|
+
// if (current.key) {
|
221
|
+
// // @ts-ignore
|
222
|
+
// params.key = current.key.toString()
|
223
|
+
// }
|
224
|
+
// // @ts-ignore
|
225
|
+
// if (this.name) {
|
226
|
+
// // @ts-ignore
|
227
|
+
// params.name = this.name
|
228
|
+
// }
|
229
|
+
// const url = new URL('/import#' + new URLSearchParams(params).toString(), baseUrl)
|
230
|
+
// console.log('Import to dashboard: ' + url.toString())
|
231
|
+
// return url
|
232
|
+
// }
|
233
|
+
// openDashboard() {
|
234
|
+
// void this.getDashboardURL().then(url => {
|
235
|
+
// if (url) window.open(url.toString(), '_blank')
|
236
|
+
// })
|
237
|
+
// }
|
238
|
+
};
|
239
|
+
|
240
|
+
// src/blockstore/connect-rest.ts
|
241
|
+
var ConnectREST = class extends ConnectionBase {
|
242
|
+
constructor(base, logger) {
|
243
|
+
super(ensureLogger(logger, "ConnectREST"));
|
244
|
+
this.baseUrl = new URL(base);
|
245
|
+
}
|
246
|
+
async dataUpload(bytes, params) {
|
247
|
+
const carCid = params.car.toString();
|
248
|
+
const uploadURL = new URL(`/cars/${carCid}.car`, this.baseUrl);
|
249
|
+
const done = await fetch(uploadURL, { method: "PUT", body: bytes });
|
250
|
+
if (!done.ok) {
|
251
|
+
throw this.logger.Error().Msg("failed to upload data " + done.statusText);
|
252
|
+
}
|
253
|
+
}
|
254
|
+
async dataDownload(params) {
|
255
|
+
const { car } = params;
|
256
|
+
const fetchFromUrl = new URL(`/cars/${car.toString()}.car`, this.baseUrl);
|
257
|
+
const response = await fetch(fetchFromUrl);
|
258
|
+
if (!response.ok) {
|
259
|
+
return void 0;
|
260
|
+
}
|
261
|
+
const bytes = new Uint8Array(await response.arrayBuffer());
|
262
|
+
return bytes;
|
263
|
+
}
|
264
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
265
|
+
async metaUpload(bytes, params) {
|
266
|
+
return void 0;
|
267
|
+
}
|
268
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
269
|
+
async metaDownload(params) {
|
270
|
+
return [];
|
271
|
+
}
|
272
|
+
};
|
273
|
+
|
274
|
+
// src/blockstore/store-factory.ts
|
275
|
+
import { KeyedResolvOnce } from "@adviser/cement";
|
276
|
+
|
277
|
+
// src/runtime/files.ts
|
278
|
+
import * as UnixFS from "@ipld/unixfs";
|
279
|
+
import * as raw from "multiformats/codecs/raw";
|
280
|
+
import { withMaxChunkSize } from "@ipld/unixfs/file/chunker/fixed";
|
281
|
+
import { withWidth } from "@ipld/unixfs/file/layout/balanced";
|
282
|
+
import { exporter } from "ipfs-unixfs-exporter";
|
283
|
+
var queuingStrategy = UnixFS.withCapacity();
|
284
|
+
var settings = UnixFS.configure({
|
285
|
+
fileChunkEncoder: raw,
|
286
|
+
smallFileEncoder: raw,
|
287
|
+
chunker: withMaxChunkSize(1024 * 1024),
|
288
|
+
fileLayout: withWidth(1024)
|
289
|
+
});
|
290
|
+
async function collect(collectable) {
|
291
|
+
const chunks = [];
|
292
|
+
await collectable.pipeTo(
|
293
|
+
new WritableStream({
|
294
|
+
write(chunk) {
|
295
|
+
chunks.push(chunk);
|
296
|
+
}
|
297
|
+
})
|
298
|
+
);
|
299
|
+
return chunks;
|
300
|
+
}
|
301
|
+
async function encodeFile(blob) {
|
302
|
+
const readable = createFileEncoderStream(blob);
|
303
|
+
const blocks = await collect(readable);
|
304
|
+
return { cid: blocks.at(-1).cid, blocks };
|
305
|
+
}
|
306
|
+
async function decodeFile(blocks, cid, meta) {
|
307
|
+
const entry = await exporter(cid.toString(), blocks, { length: meta.size });
|
308
|
+
const chunks = [];
|
309
|
+
for await (const chunk of entry.content()) {
|
310
|
+
chunks.push(chunk);
|
311
|
+
}
|
312
|
+
return new File(chunks, entry.name, { type: meta.type, lastModified: 0 });
|
313
|
+
}
|
314
|
+
function createFileEncoderStream(blob) {
|
315
|
+
const { readable, writable } = new TransformStream({}, queuingStrategy);
|
316
|
+
const unixfsWriter = UnixFS.createWriter({ writable, settings });
|
317
|
+
const fileBuilder = new UnixFSFileBuilder("", blob);
|
318
|
+
void (async () => {
|
319
|
+
await fileBuilder.finalize(unixfsWriter);
|
320
|
+
await unixfsWriter.close();
|
321
|
+
})();
|
322
|
+
return readable;
|
323
|
+
}
|
324
|
+
var UnixFSFileBuilder = class {
|
325
|
+
#file;
|
326
|
+
constructor(name, file) {
|
327
|
+
this.name = name;
|
328
|
+
this.#file = file;
|
329
|
+
}
|
330
|
+
async finalize(writer) {
|
331
|
+
const unixfsFileWriter = UnixFS.createFileWriter(writer);
|
332
|
+
await this.#file.stream().pipeTo(
|
333
|
+
new WritableStream({
|
334
|
+
async write(chunk) {
|
335
|
+
await unixfsFileWriter.write(chunk);
|
336
|
+
}
|
337
|
+
})
|
338
|
+
);
|
339
|
+
return await unixfsFileWriter.close();
|
340
|
+
}
|
341
|
+
};
|
342
|
+
|
343
|
+
// src/blockstore/store.ts
|
344
|
+
import pLimit2 from "p-limit";
|
345
|
+
import { format, parse } from "@ipld/dag-json";
|
346
|
+
import { ResolveOnce as ResolveOnce2, Result } from "@adviser/cement";
|
347
|
+
|
348
|
+
// src/blockstore/loader.ts
|
349
|
+
import pLimit from "p-limit";
|
350
|
+
import { CarReader } from "@ipld/car";
|
351
|
+
import { ResolveOnce } from "@adviser/cement";
|
352
|
+
|
353
|
+
// src/blockstore/types.ts
|
354
|
+
function toCIDBlock(block) {
|
355
|
+
return block;
|
356
|
+
}
|
357
|
+
|
358
|
+
// src/blockstore/loader-helpers.ts
|
359
|
+
import { encode, decode } from "multiformats/block";
|
360
|
+
import { sha256 as hasher } from "multiformats/hashes/sha2";
|
361
|
+
import * as raw2 from "multiformats/codecs/raw";
|
362
|
+
import * as CBW from "@ipld/car/buffer-writer";
|
363
|
+
import * as codec from "@ipld/dag-cbor";
|
364
|
+
async function encodeCarFile(roots, t) {
|
365
|
+
let size = 0;
|
366
|
+
const headerSize = CBW.headerLength({ roots });
|
367
|
+
size += headerSize;
|
368
|
+
for (const { cid, bytes } of t.entries()) {
|
369
|
+
size += CBW.blockLength({ cid, bytes });
|
370
|
+
}
|
371
|
+
const buffer = new Uint8Array(size);
|
372
|
+
const writer = CBW.createWriter(buffer, { headerSize });
|
373
|
+
for (const r of roots) {
|
374
|
+
writer.addRoot(r);
|
375
|
+
}
|
376
|
+
for (const { cid, bytes } of t.entries()) {
|
377
|
+
writer.write({ cid, bytes });
|
378
|
+
}
|
379
|
+
writer.close();
|
380
|
+
return await encode({ value: writer.bytes, hasher, codec: raw2 });
|
381
|
+
}
|
382
|
+
async function encodeCarHeader(fp) {
|
383
|
+
return await encode({
|
384
|
+
value: { fp },
|
385
|
+
hasher,
|
386
|
+
codec
|
387
|
+
});
|
388
|
+
}
|
389
|
+
async function parseCarFile(reader, logger) {
|
390
|
+
const roots = await reader.getRoots();
|
391
|
+
const header = await reader.get(roots[0]);
|
392
|
+
if (!header) throw logger.Error().Msg("missing header block").AsError();
|
393
|
+
const { value } = await decode({ bytes: header.bytes, hasher, codec });
|
394
|
+
const fpvalue = value;
|
395
|
+
if (fpvalue && !fpvalue.fp) {
|
396
|
+
throw logger.Error().Msg("missing fp").AsError();
|
397
|
+
}
|
398
|
+
return fpvalue.fp;
|
399
|
+
}
|
400
|
+
|
401
|
+
// src/blockstore/encrypt-helpers.ts
|
402
|
+
import { sha256 } from "multiformats/hashes/sha2";
|
403
|
+
import { CID as CID2 } from "multiformats";
|
404
|
+
import { encode as encode2, decode as decode2, create as mfCreate } from "multiformats/block";
|
405
|
+
import * as dagcbor from "@ipld/dag-cbor";
|
406
|
+
import { MemoryBlockstore as MemoryBlockstore2 } from "@web3-storage/pail/block";
|
407
|
+
import { bf } from "prolly-trees/utils";
|
408
|
+
import { nocache as cache } from "prolly-trees/cache";
|
409
|
+
import { create, load } from "prolly-trees/cid-set";
|
410
|
+
|
411
|
+
// src/blockstore/encrypt-codec.ts
|
412
|
+
import { CID } from "multiformats";
|
413
|
+
function makeCodec(ilogger, crypto2, randomBytes2) {
|
414
|
+
const logger = ensureLogger(ilogger, "makeCodec");
|
415
|
+
const enc32 = (value) => {
|
416
|
+
value = +value;
|
417
|
+
const buff = new Uint8Array(4);
|
418
|
+
buff[3] = value >>> 24;
|
419
|
+
buff[2] = value >>> 16;
|
420
|
+
buff[1] = value >>> 8;
|
421
|
+
buff[0] = value & 255;
|
422
|
+
return buff;
|
423
|
+
};
|
424
|
+
const readUInt32LE = (buffer) => {
|
425
|
+
const offset = buffer.byteLength - 4;
|
426
|
+
return (buffer[offset] | buffer[offset + 1] << 8 | buffer[offset + 2] << 16) + buffer[offset + 3] * 16777216;
|
427
|
+
};
|
428
|
+
const concat = (buffers) => {
|
429
|
+
const uint8Arrays = buffers.map((b) => b instanceof ArrayBuffer ? new Uint8Array(b) : b);
|
430
|
+
const totalLength = uint8Arrays.reduce((sum, arr) => sum + arr.length, 0);
|
431
|
+
const result = new Uint8Array(totalLength);
|
432
|
+
let offset = 0;
|
433
|
+
for (const arr of uint8Arrays) {
|
434
|
+
result.set(arr, offset);
|
435
|
+
offset += arr.length;
|
436
|
+
}
|
437
|
+
return result;
|
438
|
+
};
|
439
|
+
const encode4 = ({ iv, bytes }) => concat([iv, bytes]);
|
440
|
+
const decode4 = (bytes) => {
|
441
|
+
const iv = bytes.subarray(0, 12);
|
442
|
+
bytes = bytes.slice(12);
|
443
|
+
return { iv, bytes };
|
444
|
+
};
|
445
|
+
const code = 3145728 + 1337;
|
446
|
+
async function subtleKey(key) {
|
447
|
+
return await crypto2.importKey(
|
448
|
+
"raw",
|
449
|
+
// raw or jwk
|
450
|
+
key,
|
451
|
+
// raw data
|
452
|
+
"AES-GCM",
|
453
|
+
false,
|
454
|
+
// extractable
|
455
|
+
["encrypt", "decrypt"]
|
456
|
+
);
|
457
|
+
}
|
458
|
+
const decrypt = async ({ key, value }) => {
|
459
|
+
const { bytes: inBytes, iv } = value;
|
460
|
+
const cryKey = await subtleKey(key);
|
461
|
+
const deBytes = await crypto2.decrypt(
|
462
|
+
{
|
463
|
+
name: "AES-GCM",
|
464
|
+
iv,
|
465
|
+
tagLength: 128
|
466
|
+
},
|
467
|
+
cryKey,
|
468
|
+
inBytes
|
469
|
+
);
|
470
|
+
const bytes = new Uint8Array(deBytes);
|
471
|
+
const len = readUInt32LE(bytes.subarray(0, 4));
|
472
|
+
const cid = CID.decode(bytes.subarray(4, 4 + len));
|
473
|
+
return { cid, bytes: bytes.subarray(4 + len) };
|
474
|
+
};
|
475
|
+
const encrypt = async ({ key, cid, bytes }) => {
|
476
|
+
const len = enc32(cid.bytes.byteLength);
|
477
|
+
const iv = randomBytes2(12);
|
478
|
+
const msg = concat([len, cid.bytes, bytes]);
|
479
|
+
try {
|
480
|
+
const cryKey = await subtleKey(key);
|
481
|
+
const deBytes = await crypto2.encrypt(
|
482
|
+
{
|
483
|
+
name: "AES-GCM",
|
484
|
+
iv,
|
485
|
+
tagLength: 128
|
486
|
+
},
|
487
|
+
cryKey,
|
488
|
+
msg
|
489
|
+
);
|
490
|
+
bytes = new Uint8Array(deBytes);
|
491
|
+
} catch (e) {
|
492
|
+
throw logger.Error().Err(e).Msg("encrypt failed").AsError();
|
493
|
+
}
|
494
|
+
return { value: { bytes, iv } };
|
495
|
+
};
|
496
|
+
const cryptoFn = (key) => {
|
497
|
+
return { encrypt: (opts) => encrypt({ ...opts, key }), decrypt: (opts) => decrypt({ ...opts, key }) };
|
498
|
+
};
|
499
|
+
const name = "jchris@encrypted-block:aes-gcm";
|
500
|
+
return { encode: encode4, decode: decode4, code, name, encrypt, decrypt, crypto: cryptoFn };
|
501
|
+
}
|
502
|
+
|
503
|
+
// src/blockstore/encrypt-helpers.ts
|
504
|
+
function carLogIncludesGroup(list, cidMatch) {
|
505
|
+
return list.some((cid) => {
|
506
|
+
return cid.toString() === cidMatch.toString();
|
507
|
+
});
|
508
|
+
}
|
509
|
+
function makeEncDec(logger, crypto2, randomBytes2) {
|
510
|
+
const codec4 = makeCodec(logger, crypto2, randomBytes2);
|
511
|
+
const encrypt = async function* ({
|
512
|
+
get: get2,
|
513
|
+
cids,
|
514
|
+
hasher: hasher4,
|
515
|
+
key,
|
516
|
+
cache: cache3,
|
517
|
+
chunker: chunker2,
|
518
|
+
root: root3
|
519
|
+
}) {
|
520
|
+
const set = /* @__PURE__ */ new Set();
|
521
|
+
let eroot;
|
522
|
+
if (!carLogIncludesGroup(cids, root3)) cids.push(root3);
|
523
|
+
for (const cid of cids) {
|
524
|
+
const unencrypted = await get2(cid);
|
525
|
+
if (!unencrypted) throw logger.Error().Ref("cid", cid).Msg("missing cid block").AsError();
|
526
|
+
const encrypted = await codec4.encrypt({ ...unencrypted, key });
|
527
|
+
const block2 = await encode2({ ...encrypted, codec: codec4, hasher: hasher4 });
|
528
|
+
yield block2;
|
529
|
+
set.add(block2.cid.toString());
|
530
|
+
if (unencrypted.cid.equals(root3)) eroot = block2.cid;
|
531
|
+
}
|
532
|
+
if (!eroot) throw logger.Error().Msg("cids does not include root").AsError();
|
533
|
+
const list = [...set].map((s) => CID2.parse(s));
|
534
|
+
let last;
|
535
|
+
for await (const node of create({ list, get: get2, cache: cache3, chunker: chunker2, hasher: hasher4, codec: dagcbor })) {
|
536
|
+
const block2 = await node.block;
|
537
|
+
yield block2;
|
538
|
+
last = block2;
|
539
|
+
}
|
540
|
+
if (!last) throw logger.Error().Msg("missing last block").AsError();
|
541
|
+
const head = [eroot, last.cid];
|
542
|
+
const block = await encode2({ value: head, codec: dagcbor, hasher: hasher4 });
|
543
|
+
yield block;
|
544
|
+
};
|
545
|
+
const decrypt = async function* ({
|
546
|
+
root: root3,
|
547
|
+
get: get2,
|
548
|
+
key,
|
549
|
+
cache: cache3,
|
550
|
+
chunker: chunker2,
|
551
|
+
hasher: hasher4
|
552
|
+
}) {
|
553
|
+
const getWithDecode = async (cid) => get2(cid).then(async (block) => {
|
554
|
+
if (!block) return;
|
555
|
+
const decoded = await decode2({ ...block, codec: dagcbor, hasher: hasher4 });
|
556
|
+
return decoded;
|
557
|
+
});
|
558
|
+
const getWithDecrypt = async (cid) => get2(cid).then(async (block) => {
|
559
|
+
if (!block) return;
|
560
|
+
const decoded = await decode2({ ...block, codec: codec4, hasher: hasher4 });
|
561
|
+
return decoded;
|
562
|
+
});
|
563
|
+
const decodedRoot = await getWithDecode(root3);
|
564
|
+
if (!decodedRoot) throw logger.Error().Msg("missing root").AsError();
|
565
|
+
if (!decodedRoot.bytes) throw logger.Error().Msg("missing bytes").AsError();
|
566
|
+
const {
|
567
|
+
value: [eroot, tree]
|
568
|
+
} = decodedRoot;
|
569
|
+
const rootBlock = await get2(eroot);
|
570
|
+
if (!rootBlock) throw logger.Error().Msg("missing root block").AsError();
|
571
|
+
const cidset = await load({ cid: tree, get: getWithDecode, cache: cache3, chunker: chunker2, codec: codec4, hasher: hasher4 });
|
572
|
+
const { result: nodes } = await cidset.getAllEntries();
|
573
|
+
const unwrap = async (eblock) => {
|
574
|
+
if (!eblock) throw logger.Error().Msg("missing block").AsError();
|
575
|
+
if (!eblock.value) {
|
576
|
+
eblock = await decode2({ ...eblock, codec: codec4, hasher: hasher4 });
|
577
|
+
if (!eblock.value) throw logger.Error().Msg("missing value").AsError();
|
578
|
+
}
|
579
|
+
const { bytes, cid } = await codec4.decrypt({ ...eblock, key }).catch((e) => {
|
580
|
+
throw e;
|
581
|
+
});
|
582
|
+
const block = await mfCreate({ cid, bytes, hasher: hasher4, codec: codec4 });
|
583
|
+
return block;
|
584
|
+
};
|
585
|
+
const promises = [];
|
586
|
+
for (const { cid } of nodes) {
|
587
|
+
if (!rootBlock.cid.equals(cid)) promises.push(getWithDecrypt(cid).then(unwrap));
|
588
|
+
}
|
589
|
+
yield* promises;
|
590
|
+
yield unwrap(rootBlock);
|
591
|
+
};
|
592
|
+
return { encrypt, decrypt };
|
593
|
+
}
|
594
|
+
var chunker = bf(30);
|
595
|
+
function hexStringToUint8Array(hexString) {
|
596
|
+
const length = hexString.length;
|
597
|
+
const uint8Array = new Uint8Array(length / 2);
|
598
|
+
for (let i = 0; i < length; i += 2) {
|
599
|
+
uint8Array[i / 2] = parseInt(hexString.substring(i, i + 2), 16);
|
600
|
+
}
|
601
|
+
return uint8Array;
|
602
|
+
}
|
603
|
+
async function encryptedEncodeCarFile(logger, crypto2, key, rootCid, t) {
|
604
|
+
const encryptionKey = hexStringToUint8Array(key);
|
605
|
+
const encryptedBlocks = new MemoryBlockstore2();
|
606
|
+
const cidsToEncrypt = [];
|
607
|
+
for (const { cid, bytes } of t.entries()) {
|
608
|
+
cidsToEncrypt.push(cid);
|
609
|
+
const g = await t.get(cid);
|
610
|
+
if (!g) throw logger.Error().Ref("cid", cid).Int("bytes", bytes.length).Msg("missing cid block").AsError();
|
611
|
+
}
|
612
|
+
let last = null;
|
613
|
+
const { encrypt } = makeEncDec(logger, crypto2, crypto2.randomBytes);
|
614
|
+
for await (const block of encrypt({
|
615
|
+
cids: cidsToEncrypt,
|
616
|
+
get: t.get.bind(t),
|
617
|
+
key: encryptionKey,
|
618
|
+
hasher: sha256,
|
619
|
+
chunker,
|
620
|
+
cache,
|
621
|
+
root: rootCid
|
622
|
+
})) {
|
623
|
+
await encryptedBlocks.put(block.cid, block.bytes);
|
624
|
+
last = block;
|
625
|
+
}
|
626
|
+
if (!last) throw logger.Error().Msg("no blocks encrypted").AsError();
|
627
|
+
const encryptedCar = await encodeCarFile([last.cid], encryptedBlocks);
|
628
|
+
return encryptedCar;
|
629
|
+
}
|
630
|
+
async function decodeEncryptedCar(logger, crypto2, key, reader) {
|
631
|
+
const roots = await reader.getRoots();
|
632
|
+
const root3 = roots[0];
|
633
|
+
return await decodeCarBlocks(logger, crypto2, root3, reader.get.bind(reader), key);
|
634
|
+
}
|
635
|
+
async function decodeCarBlocks(logger, crypto2, root3, get2, keyMaterial) {
|
636
|
+
const decryptionKeyUint8 = hexStringToUint8Array(keyMaterial);
|
637
|
+
const decryptionKey = decryptionKeyUint8.buffer.slice(0, decryptionKeyUint8.byteLength);
|
638
|
+
const decryptedBlocks = new MemoryBlockstore2();
|
639
|
+
let last = null;
|
640
|
+
const { decrypt } = makeEncDec(logger, crypto2, crypto2.randomBytes);
|
641
|
+
for await (const block of decrypt({
|
642
|
+
root: root3,
|
643
|
+
get: get2,
|
644
|
+
key: decryptionKey,
|
645
|
+
hasher: sha256,
|
646
|
+
chunker,
|
647
|
+
cache
|
648
|
+
})) {
|
649
|
+
await decryptedBlocks.put(block.cid, block.bytes);
|
650
|
+
last = block;
|
651
|
+
}
|
652
|
+
if (!last) throw logger.Error().Msg("no blocks decrypted").AsError();
|
653
|
+
return { blocks: decryptedBlocks, root: last.cid };
|
654
|
+
}
|
655
|
+
|
656
|
+
// src/blockstore/transaction.ts
|
657
|
+
import { MemoryBlockstore as MemoryBlockstore3 } from "@web3-storage/pail/block";
|
658
|
+
|
659
|
+
// src/runtime/crypto.ts
|
660
|
+
function randomBytes(size) {
|
661
|
+
const bytes = new Uint8Array(size);
|
662
|
+
if (size > 0) {
|
663
|
+
crypto.getRandomValues(bytes);
|
664
|
+
}
|
665
|
+
return bytes;
|
666
|
+
}
|
667
|
+
function digestSHA256(data) {
|
668
|
+
return Promise.resolve(crypto.subtle.digest("SHA-256", data));
|
669
|
+
}
|
670
|
+
function toCryptoOpts(cryptoOpts = {}) {
|
671
|
+
const opts = {
|
672
|
+
importKey: cryptoOpts.importKey || crypto.subtle.importKey.bind(crypto.subtle),
|
673
|
+
encrypt: cryptoOpts.encrypt || crypto.subtle.encrypt.bind(crypto.subtle),
|
674
|
+
decrypt: cryptoOpts.decrypt || crypto.subtle.decrypt.bind(crypto.subtle),
|
675
|
+
randomBytes: cryptoOpts.randomBytes || randomBytes,
|
676
|
+
digestSHA256: cryptoOpts.digestSHA256 || digestSHA256
|
677
|
+
};
|
678
|
+
return opts;
|
679
|
+
}
|
680
|
+
|
681
|
+
// src/blockstore/transaction.ts
|
682
|
+
var CarTransaction = class extends MemoryBlockstore3 {
|
683
|
+
constructor(parent, opts = { add: true }) {
|
684
|
+
super();
|
685
|
+
if (opts.add) {
|
686
|
+
parent.transactions.add(this);
|
687
|
+
}
|
688
|
+
this.parent = parent;
|
689
|
+
}
|
690
|
+
async get(cid) {
|
691
|
+
return await this.superGet(cid) || falsyToUndef(await this.parent.get(cid));
|
692
|
+
}
|
693
|
+
async superGet(cid) {
|
694
|
+
return super.get(cid);
|
695
|
+
}
|
696
|
+
};
|
697
|
+
function defaultedBlockstoreRuntime(opts, component, ctx) {
|
698
|
+
const logger = ensureLogger(opts, component, ctx);
|
699
|
+
const store = opts.store || {};
|
700
|
+
return {
|
701
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
702
|
+
applyMeta: (meta, snap) => {
|
703
|
+
return Promise.resolve();
|
704
|
+
},
|
705
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
706
|
+
compact: async (blocks) => {
|
707
|
+
return {};
|
708
|
+
},
|
709
|
+
autoCompact: 100,
|
710
|
+
public: false,
|
711
|
+
name: void 0,
|
712
|
+
threshold: 1e3 * 1e3,
|
713
|
+
...opts,
|
714
|
+
logger,
|
715
|
+
crypto: toCryptoOpts(opts.crypto),
|
716
|
+
store,
|
717
|
+
storeRuntime: toStoreRuntime(store, logger)
|
718
|
+
};
|
719
|
+
}
|
720
|
+
var blockstoreFactory = function(opts) {
|
721
|
+
if (opts.name) {
|
722
|
+
return new EncryptedBlockstore(opts);
|
723
|
+
} else {
|
724
|
+
return new BaseBlockstore(opts);
|
725
|
+
}
|
726
|
+
};
|
727
|
+
var BaseBlockstore = class {
|
728
|
+
constructor(ebOpts = {}) {
|
729
|
+
this.transactions = /* @__PURE__ */ new Set();
|
730
|
+
this.ebOpts = defaultedBlockstoreRuntime(ebOpts, "BaseBlockstore");
|
731
|
+
this.logger = this.ebOpts.logger;
|
732
|
+
}
|
733
|
+
// ready: Promise<void>;
|
734
|
+
ready() {
|
735
|
+
return Promise.resolve();
|
736
|
+
}
|
737
|
+
async close() {
|
738
|
+
}
|
739
|
+
async destroy() {
|
740
|
+
}
|
741
|
+
async get(cid) {
|
742
|
+
if (!cid) throw this.logger.Error().Msg("required cid").AsError();
|
743
|
+
for (const f of this.transactions) {
|
744
|
+
const v = await f.superGet(cid);
|
745
|
+
if (v) return v;
|
746
|
+
}
|
747
|
+
}
|
748
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
749
|
+
async put(cid, block) {
|
750
|
+
throw this.logger.Error().Msg("use a transaction to put").AsError();
|
751
|
+
}
|
752
|
+
// TransactionMeta
|
753
|
+
async transaction(fn, _opts = {}) {
|
754
|
+
const t = new CarTransaction(this);
|
755
|
+
const done = await fn(t);
|
756
|
+
this.lastTxMeta = done;
|
757
|
+
return { t, meta: done };
|
758
|
+
}
|
759
|
+
async *entries() {
|
760
|
+
const seen = /* @__PURE__ */ new Set();
|
761
|
+
for (const t of this.transactions) {
|
762
|
+
for await (const blk of t.entries()) {
|
763
|
+
if (seen.has(blk.cid.toString())) continue;
|
764
|
+
seen.add(blk.cid.toString());
|
765
|
+
yield blk;
|
766
|
+
}
|
767
|
+
}
|
768
|
+
}
|
769
|
+
};
|
770
|
+
var EncryptedBlockstore = class extends BaseBlockstore {
|
771
|
+
constructor(ebOpts) {
|
772
|
+
super(ebOpts);
|
773
|
+
this.compacting = false;
|
774
|
+
this.logger = ensureLogger(ebOpts, "EncryptedBlockstore");
|
775
|
+
const { name } = ebOpts;
|
776
|
+
if (!name) {
|
777
|
+
throw this.logger.Error().Msg("name required").AsError();
|
778
|
+
}
|
779
|
+
this.name = name;
|
780
|
+
this.loader = new Loader(this.name, ebOpts);
|
781
|
+
}
|
782
|
+
ready() {
|
783
|
+
return this.loader.ready();
|
784
|
+
}
|
785
|
+
close() {
|
786
|
+
return this.loader.close();
|
787
|
+
}
|
788
|
+
destroy() {
|
789
|
+
return this.loader.destroy();
|
790
|
+
}
|
791
|
+
async get(cid) {
|
792
|
+
const got = await super.get(cid);
|
793
|
+
if (got) return got;
|
794
|
+
if (!this.loader) {
|
795
|
+
return;
|
796
|
+
}
|
797
|
+
return falsyToUndef(await this.loader.getBlock(cid));
|
798
|
+
}
|
799
|
+
async transaction(fn, opts = { noLoader: false }) {
|
800
|
+
const { t, meta: done } = await super.transaction(fn);
|
801
|
+
const cars = await this.loader.commit(t, done, opts);
|
802
|
+
if (this.ebOpts.autoCompact && this.loader.carLog.length > this.ebOpts.autoCompact) {
|
803
|
+
setTimeout(() => void this.compact(), 10);
|
804
|
+
}
|
805
|
+
if (cars) {
|
806
|
+
this.transactions.delete(t);
|
807
|
+
return { meta: done, cars, t };
|
808
|
+
}
|
809
|
+
throw this.logger.Error().Msg("failed to commit car files").AsError();
|
810
|
+
}
|
811
|
+
async getFile(car, cid, isPublic = false) {
|
812
|
+
await this.ready();
|
813
|
+
if (!this.loader) throw this.logger.Error().Msg("loader required to get file, database must be named").AsError();
|
814
|
+
const reader = await this.loader.loadFileCar(car, isPublic);
|
815
|
+
const block = await reader.get(cid);
|
816
|
+
if (!block) throw this.logger.Error().Str("cid", cid.toString()).Msg(`Missing block`).AsError();
|
817
|
+
return block.bytes;
|
818
|
+
}
|
819
|
+
async compact() {
|
820
|
+
await this.ready();
|
821
|
+
if (!this.loader) throw this.logger.Error().Msg("loader required to compact").AsError();
|
822
|
+
if (this.loader.carLog.length < 2) return;
|
823
|
+
const compactFn = this.ebOpts.compact || ((blocks) => this.defaultCompact(blocks, this.logger));
|
824
|
+
if (!compactFn || this.compacting) return;
|
825
|
+
const blockLog = new CompactionFetcher(this);
|
826
|
+
this.compacting = true;
|
827
|
+
const meta = await compactFn(blockLog);
|
828
|
+
await this.loader?.commit(blockLog.loggedBlocks, meta, {
|
829
|
+
compact: true,
|
830
|
+
noLoader: true
|
831
|
+
});
|
832
|
+
this.compacting = false;
|
833
|
+
}
|
834
|
+
async defaultCompact(blocks, logger) {
|
835
|
+
if (!this.loader) {
|
836
|
+
throw logger.Error().Msg("no loader").AsError();
|
837
|
+
}
|
838
|
+
if (!this.lastTxMeta) {
|
839
|
+
throw logger.Error().Msg("no lastTxMeta").AsError();
|
840
|
+
}
|
841
|
+
for await (const blk of this.loader.entries(false)) {
|
842
|
+
blocks.loggedBlocks.putSync(blk.cid, blk.bytes);
|
843
|
+
}
|
844
|
+
for (const t of this.transactions) {
|
845
|
+
for await (const blk of t.entries()) {
|
846
|
+
blocks.loggedBlocks.putSync(blk.cid, blk.bytes);
|
847
|
+
}
|
848
|
+
}
|
849
|
+
return this.lastTxMeta;
|
850
|
+
}
|
851
|
+
async *entries() {
|
852
|
+
for await (const blk of this.loader.entries()) {
|
853
|
+
yield blk;
|
854
|
+
}
|
855
|
+
}
|
856
|
+
};
|
857
|
+
var CompactionFetcher = class {
|
858
|
+
constructor(blocks) {
|
859
|
+
this.blockstore = blocks;
|
860
|
+
this.loggedBlocks = new CarTransaction(blocks);
|
861
|
+
}
|
862
|
+
async get(cid) {
|
863
|
+
const block = await this.blockstore.get(cid);
|
864
|
+
if (block) this.loggedBlocks.putSync(cid, block.bytes);
|
865
|
+
return falsyToUndef(block);
|
866
|
+
}
|
867
|
+
};
|
868
|
+
|
869
|
+
// src/blockstore/commit-queue.ts
|
870
|
+
var CommitQueue = class {
|
871
|
+
constructor() {
|
872
|
+
this.queue = [];
|
873
|
+
this.processing = false;
|
874
|
+
}
|
875
|
+
async enqueue(fn) {
|
876
|
+
return new Promise((resolve, reject) => {
|
877
|
+
const queueFn = async () => {
|
878
|
+
try {
|
879
|
+
resolve(await fn());
|
880
|
+
} catch (e) {
|
881
|
+
reject(e);
|
882
|
+
} finally {
|
883
|
+
this.processing = false;
|
884
|
+
this.processNext();
|
885
|
+
}
|
886
|
+
};
|
887
|
+
this.queue.push(queueFn);
|
888
|
+
if (!this.processing) {
|
889
|
+
this.processNext();
|
890
|
+
}
|
891
|
+
});
|
892
|
+
}
|
893
|
+
processNext() {
|
894
|
+
if (this.queue.length > 0 && !this.processing) {
|
895
|
+
this.processing = true;
|
896
|
+
const queueFn = this.queue.shift();
|
897
|
+
if (queueFn) {
|
898
|
+
queueFn();
|
899
|
+
}
|
900
|
+
}
|
901
|
+
}
|
902
|
+
};
|
903
|
+
|
904
|
+
// src/blockstore/loader.ts
|
905
|
+
import * as CBW2 from "@ipld/car/buffer-writer";
|
906
|
+
function carLogIncludesGroup2(list, cids) {
|
907
|
+
return list.some((arr) => {
|
908
|
+
return arr.toString() === cids.toString();
|
909
|
+
});
|
910
|
+
}
|
911
|
+
function uniqueCids(list, remove = /* @__PURE__ */ new Set()) {
|
912
|
+
const byString = /* @__PURE__ */ new Map();
|
913
|
+
for (const cid of list) {
|
914
|
+
if (remove.has(cid.toString())) continue;
|
915
|
+
byString.set(cid.toString(), cid);
|
916
|
+
}
|
917
|
+
return [...byString.values()];
|
918
|
+
}
|
919
|
+
function toHexString(byteArray) {
|
920
|
+
return Array.from(byteArray).map((byte) => byte.toString(16).padStart(2, "0")).join("");
|
921
|
+
}
|
922
|
+
var Loadable = class {
|
923
|
+
constructor() {
|
924
|
+
this.name = "";
|
925
|
+
this.carLog = new Array();
|
926
|
+
}
|
927
|
+
};
|
928
|
+
var Loader = class {
|
929
|
+
constructor(name, ebOpts) {
|
930
|
+
this.commitQueue = new CommitQueue();
|
931
|
+
this.isCompacting = false;
|
932
|
+
this.carReaders = /* @__PURE__ */ new Map();
|
933
|
+
this.seenCompacted = /* @__PURE__ */ new Set();
|
934
|
+
this.processedCars = /* @__PURE__ */ new Set();
|
935
|
+
this.carLog = [];
|
936
|
+
this.getBlockCache = /* @__PURE__ */ new Map();
|
937
|
+
this.seenMeta = /* @__PURE__ */ new Set();
|
938
|
+
this.writeLimit = pLimit(1);
|
939
|
+
this.onceReady = new ResolveOnce();
|
940
|
+
this.name = name;
|
941
|
+
this.ebOpts = defaultedBlockstoreRuntime(
|
942
|
+
{
|
943
|
+
...ebOpts,
|
944
|
+
name
|
945
|
+
},
|
946
|
+
"Loader"
|
947
|
+
);
|
948
|
+
this.logger = this.ebOpts.logger;
|
949
|
+
}
|
950
|
+
// readonly id = uuidv4();
|
951
|
+
async carStore() {
|
952
|
+
return this.ebOpts.storeRuntime.makeDataStore(this);
|
953
|
+
}
|
954
|
+
async fileStore() {
|
955
|
+
return this.ebOpts.storeRuntime.makeDataStore(this);
|
956
|
+
}
|
957
|
+
async remoteWAL() {
|
958
|
+
return this.ebOpts.storeRuntime.makeRemoteWAL(this);
|
959
|
+
}
|
960
|
+
async metaStore() {
|
961
|
+
return this.ebOpts.storeRuntime.makeMetaStore(this);
|
962
|
+
}
|
963
|
+
async ready() {
|
964
|
+
return this.onceReady.once(async () => {
|
965
|
+
const metas = this.ebOpts.meta ? [this.ebOpts.meta] : await (await this.metaStore()).load("main");
|
966
|
+
if (metas) {
|
967
|
+
await this.handleDbMetasFromStore(metas);
|
968
|
+
}
|
969
|
+
});
|
970
|
+
}
|
971
|
+
async close() {
|
972
|
+
const toClose = await Promise.all([this.carStore(), this.metaStore(), this.fileStore(), this.remoteWAL()]);
|
973
|
+
await Promise.all(toClose.map((store) => store.close()));
|
974
|
+
}
|
975
|
+
async destroy() {
|
976
|
+
const toDestroy = await Promise.all([this.carStore(), this.metaStore(), this.fileStore(), this.remoteWAL()]);
|
977
|
+
await Promise.all(toDestroy.map((store) => store.destroy()));
|
978
|
+
}
|
979
|
+
// async snapToCar(carCid: AnyLink | string) {
|
980
|
+
// await this.ready
|
981
|
+
// if (typeof carCid === 'string') {
|
982
|
+
// carCid = CID.parse(carCid)
|
983
|
+
// }
|
984
|
+
// const carHeader = await this.loadCarHeaderFromMeta({ car: carCid, key: this.key || null })
|
985
|
+
// this.carLog = [carCid, ...carHeader.cars]
|
986
|
+
// await this.getMoreReaders(carHeader.cars)
|
987
|
+
// await this._applyCarHeader(carHeader, true)
|
988
|
+
// }
|
989
|
+
async handleDbMetasFromStore(metas) {
|
990
|
+
for (const meta of metas) {
|
991
|
+
await this.writeLimit(async () => {
|
992
|
+
await this.mergeDbMetaIntoClock(meta);
|
993
|
+
});
|
994
|
+
}
|
995
|
+
}
|
996
|
+
async mergeDbMetaIntoClock(meta) {
|
997
|
+
if (this.isCompacting) {
|
998
|
+
throw this.logger.Error().Msg("cannot merge while compacting").AsError();
|
999
|
+
}
|
1000
|
+
if (this.seenMeta.has(meta.cars.toString())) return;
|
1001
|
+
this.seenMeta.add(meta.cars.toString());
|
1002
|
+
if (meta.key) {
|
1003
|
+
await this.setKey(meta.key);
|
1004
|
+
}
|
1005
|
+
if (carLogIncludesGroup2(this.carLog, meta.cars)) {
|
1006
|
+
return;
|
1007
|
+
}
|
1008
|
+
const carHeader = await this.loadCarHeaderFromMeta(meta);
|
1009
|
+
carHeader.compact.map((c) => c.toString()).forEach(this.seenCompacted.add, this.seenCompacted);
|
1010
|
+
await this.getMoreReaders(carHeader.cars.flat());
|
1011
|
+
this.carLog = [...uniqueCids([meta.cars, ...this.carLog, ...carHeader.cars], this.seenCompacted)];
|
1012
|
+
await this.ebOpts.applyMeta?.(carHeader.meta);
|
1013
|
+
}
|
1014
|
+
async ingestKeyFromMeta(meta) {
|
1015
|
+
const { key } = meta;
|
1016
|
+
if (key) {
|
1017
|
+
await this.setKey(key);
|
1018
|
+
}
|
1019
|
+
}
|
1020
|
+
async loadCarHeaderFromMeta({ cars: cids }) {
|
1021
|
+
const reader = await this.loadCar(cids[0]);
|
1022
|
+
return await parseCarFile(reader, this.logger);
|
1023
|
+
}
|
1024
|
+
async _getKey() {
|
1025
|
+
if (this.key) return this.key;
|
1026
|
+
if (!this.ebOpts.public) {
|
1027
|
+
await this.setKey(toHexString(this.ebOpts.crypto.randomBytes(32)));
|
1028
|
+
}
|
1029
|
+
return this.key || void 0;
|
1030
|
+
}
|
1031
|
+
async commitFiles(t, done, opts = { noLoader: false, compact: false }) {
|
1032
|
+
return this.commitQueue.enqueue(() => this._commitInternalFiles(t, done, opts));
|
1033
|
+
}
|
1034
|
+
// can these skip the queue? or have a file queue?
|
1035
|
+
async _commitInternalFiles(t, done, opts = { noLoader: false, compact: false }) {
|
1036
|
+
await this.ready();
|
1037
|
+
const { files: roots } = this.makeFileCarHeader(done);
|
1038
|
+
const cids = [];
|
1039
|
+
const cars = await this.prepareCarFilesFiles(roots, t, !!opts.public);
|
1040
|
+
for (const car of cars) {
|
1041
|
+
const { cid, bytes } = car;
|
1042
|
+
await (await this.fileStore()).save({ cid, bytes });
|
1043
|
+
await (await this.remoteWAL()).enqueueFile(cid, !!opts.public);
|
1044
|
+
cids.push(cid);
|
1045
|
+
}
|
1046
|
+
return cids;
|
1047
|
+
}
|
1048
|
+
async loadFileCar(cid, isPublic = false) {
|
1049
|
+
return await this.storesLoadCar(cid, await this.fileStore(), this.remoteFileStore, isPublic);
|
1050
|
+
}
|
1051
|
+
async commit(t, done, opts = { noLoader: false, compact: false }) {
|
1052
|
+
return this.commitQueue.enqueue(() => this._commitInternal(t, done, opts));
|
1053
|
+
}
|
1054
|
+
async cacheTransaction(t) {
|
1055
|
+
for await (const block of t.entries()) {
|
1056
|
+
const sBlock = block.cid.toString();
|
1057
|
+
if (!this.getBlockCache.has(sBlock)) {
|
1058
|
+
this.getBlockCache.set(sBlock, block);
|
1059
|
+
}
|
1060
|
+
}
|
1061
|
+
}
|
1062
|
+
async cacheCarReader(carCidStr, reader) {
|
1063
|
+
if (this.processedCars.has(carCidStr)) return;
|
1064
|
+
this.processedCars.add(carCidStr);
|
1065
|
+
for await (const block of reader.blocks()) {
|
1066
|
+
const sBlock = block.cid.toString();
|
1067
|
+
if (!this.getBlockCache.has(sBlock)) {
|
1068
|
+
this.getBlockCache.set(sBlock, block);
|
1069
|
+
}
|
1070
|
+
}
|
1071
|
+
}
|
1072
|
+
async _commitInternal(t, done, opts = { noLoader: false, compact: false }) {
|
1073
|
+
await this.ready();
|
1074
|
+
const fp = this.makeCarHeader(done, this.carLog, !!opts.compact);
|
1075
|
+
const rootBlock = await encodeCarHeader(fp);
|
1076
|
+
const cars = await this.prepareCarFiles(rootBlock, t, !!opts.public);
|
1077
|
+
const cids = [];
|
1078
|
+
for (const car of cars) {
|
1079
|
+
const { cid, bytes } = car;
|
1080
|
+
await (await this.carStore()).save({ cid, bytes });
|
1081
|
+
cids.push(cid);
|
1082
|
+
}
|
1083
|
+
await this.cacheTransaction(t);
|
1084
|
+
const newDbMeta = { cars: cids, key: this.key || null };
|
1085
|
+
await (await this.remoteWAL()).enqueue(newDbMeta, opts);
|
1086
|
+
await (await this.metaStore()).save(newDbMeta);
|
1087
|
+
await this.updateCarLog(cids, fp, !!opts.compact);
|
1088
|
+
return cids;
|
1089
|
+
}
|
1090
|
+
async prepareCarFilesFiles(roots, t, isPublic) {
|
1091
|
+
const theKey = isPublic ? null : await this._getKey();
|
1092
|
+
const car = theKey && this.ebOpts.crypto ? await encryptedEncodeCarFile(this.logger, this.ebOpts.crypto, theKey, roots[0], t) : await encodeCarFile(roots, t);
|
1093
|
+
return [car];
|
1094
|
+
}
|
1095
|
+
async prepareCarFiles(rootBlock, t, isPublic) {
|
1096
|
+
const theKey = isPublic ? void 0 : await this._getKey();
|
1097
|
+
const carFiles = [];
|
1098
|
+
const threshold = this.ebOpts.threshold || 1e3 * 1e3;
|
1099
|
+
let clonedt = new CarTransaction(t.parent, { add: false });
|
1100
|
+
clonedt.putSync(rootBlock.cid, rootBlock.bytes);
|
1101
|
+
let newsize = CBW2.blockLength(toCIDBlock(rootBlock));
|
1102
|
+
let cidRootBlock = rootBlock;
|
1103
|
+
for (const { cid, bytes } of t.entries()) {
|
1104
|
+
newsize += CBW2.blockLength(toCIDBlock({ cid, bytes }));
|
1105
|
+
if (newsize >= threshold) {
|
1106
|
+
carFiles.push(await this.createCarFile(theKey, cidRootBlock.cid, clonedt));
|
1107
|
+
clonedt = new CarTransaction(t.parent, { add: false });
|
1108
|
+
clonedt.putSync(cid, bytes);
|
1109
|
+
cidRootBlock = { cid, bytes };
|
1110
|
+
newsize = CBW2.blockLength(toCIDBlock({ cid, bytes }));
|
1111
|
+
} else {
|
1112
|
+
clonedt.putSync(cid, bytes);
|
1113
|
+
}
|
1114
|
+
}
|
1115
|
+
carFiles.push(await this.createCarFile(theKey, cidRootBlock.cid, clonedt));
|
1116
|
+
return carFiles;
|
1117
|
+
}
|
1118
|
+
async createCarFile(theKey, cid, t) {
|
1119
|
+
try {
|
1120
|
+
return theKey && this.ebOpts.crypto ? await encryptedEncodeCarFile(this.logger, this.ebOpts.crypto, theKey, cid, t) : await encodeCarFile([cid], t);
|
1121
|
+
} catch (e) {
|
1122
|
+
console.error("error creating car file", e);
|
1123
|
+
throw e;
|
1124
|
+
}
|
1125
|
+
}
|
1126
|
+
makeFileCarHeader(result) {
|
1127
|
+
const files = [];
|
1128
|
+
for (const [, meta] of Object.entries(result.files || {})) {
|
1129
|
+
if (meta && typeof meta === "object" && "cid" in meta && meta !== null) {
|
1130
|
+
files.push(meta.cid);
|
1131
|
+
}
|
1132
|
+
}
|
1133
|
+
return { ...result, files };
|
1134
|
+
}
|
1135
|
+
async updateCarLog(cids, fp, compact) {
|
1136
|
+
if (compact) {
|
1137
|
+
const previousCompactCid = fp.compact[fp.compact.length - 1];
|
1138
|
+
fp.compact.map((c) => c.toString()).forEach(this.seenCompacted.add, this.seenCompacted);
|
1139
|
+
this.carLog = [...uniqueCids([...this.carLog, ...fp.cars, cids], this.seenCompacted)];
|
1140
|
+
await this.removeCidsForCompact(previousCompactCid[0]);
|
1141
|
+
} else {
|
1142
|
+
this.carLog.unshift(cids);
|
1143
|
+
}
|
1144
|
+
}
|
1145
|
+
async removeCidsForCompact(cid) {
|
1146
|
+
const carHeader = await this.loadCarHeaderFromMeta({
|
1147
|
+
cars: [cid]
|
1148
|
+
});
|
1149
|
+
for (const cids of carHeader.compact) {
|
1150
|
+
for (const cid2 of cids) {
|
1151
|
+
await (await this.carStore()).remove(cid2);
|
1152
|
+
}
|
1153
|
+
}
|
1154
|
+
}
|
1155
|
+
// async flushCars() {
|
1156
|
+
// await this.ready
|
1157
|
+
// // for each cid in car log, make a dbMeta
|
1158
|
+
// for (const cid of this.carLog) {
|
1159
|
+
// const dbMeta = { car: cid, key: this.key || null } as DbMeta
|
1160
|
+
// await this.remoteWAL!.enqueue(dbMeta, { public: false })
|
1161
|
+
// }
|
1162
|
+
// }
|
1163
|
+
async *entries(cache3 = true) {
|
1164
|
+
await this.ready();
|
1165
|
+
if (cache3) {
|
1166
|
+
for (const [, block] of this.getBlockCache) {
|
1167
|
+
yield block;
|
1168
|
+
}
|
1169
|
+
} else {
|
1170
|
+
for (const [, block] of this.getBlockCache) {
|
1171
|
+
yield block;
|
1172
|
+
}
|
1173
|
+
for (const cids of this.carLog) {
|
1174
|
+
for (const cid of cids) {
|
1175
|
+
const reader = await this.loadCar(cid);
|
1176
|
+
if (!reader) throw this.logger.Error().Ref("cid", cid).Msg("missing car reader").AsError();
|
1177
|
+
for await (const block of reader.blocks()) {
|
1178
|
+
const sCid = block.cid.toString();
|
1179
|
+
if (!this.getBlockCache.has(sCid)) {
|
1180
|
+
yield block;
|
1181
|
+
}
|
1182
|
+
}
|
1183
|
+
}
|
1184
|
+
}
|
1185
|
+
}
|
1186
|
+
}
|
1187
|
+
async getBlock(cid) {
|
1188
|
+
await this.ready();
|
1189
|
+
const sCid = cid.toString();
|
1190
|
+
if (this.getBlockCache.has(sCid)) return this.getBlockCache.get(sCid);
|
1191
|
+
const getCarCid = async (carCid) => {
|
1192
|
+
if (this.getBlockCache.has(sCid)) return this.getBlockCache.get(sCid);
|
1193
|
+
const reader = await this.loadCar(carCid);
|
1194
|
+
if (!reader) {
|
1195
|
+
throw this.logger.Error().Ref("cid", carCid).Msg("missing car reader").AsError();
|
1196
|
+
}
|
1197
|
+
await this.cacheCarReader(carCid.toString(), reader).catch(() => {
|
1198
|
+
return;
|
1199
|
+
});
|
1200
|
+
if (this.getBlockCache.has(sCid)) return this.getBlockCache.get(sCid);
|
1201
|
+
throw this.logger.Error().Str("cid", sCid).Msg("block not in reader").AsError();
|
1202
|
+
};
|
1203
|
+
const getCompactCarCids = async (carCid) => {
|
1204
|
+
const reader = await this.loadCar(carCid);
|
1205
|
+
if (!reader) {
|
1206
|
+
throw this.logger.Error().Str("cid", carCid.toString()).Msg("missing car reader").AsError();
|
1207
|
+
}
|
1208
|
+
const header = await parseCarFile(reader, this.logger);
|
1209
|
+
const compacts = header.compact;
|
1210
|
+
let got2;
|
1211
|
+
const batchSize2 = 5;
|
1212
|
+
for (let i = 0; i < compacts.length; i += batchSize2) {
|
1213
|
+
const promises = [];
|
1214
|
+
for (let j = i; j < Math.min(i + batchSize2, compacts.length); j++) {
|
1215
|
+
for (const cid2 of compacts[j]) {
|
1216
|
+
promises.push(getCarCid(cid2));
|
1217
|
+
}
|
1218
|
+
}
|
1219
|
+
try {
|
1220
|
+
got2 = await Promise.any(promises);
|
1221
|
+
} catch {
|
1222
|
+
}
|
1223
|
+
if (got2) break;
|
1224
|
+
}
|
1225
|
+
if (this.getBlockCache.has(sCid)) return this.getBlockCache.get(sCid);
|
1226
|
+
throw this.logger.Error().Str("cid", sCid).Msg("block not in compact reader").AsError();
|
1227
|
+
};
|
1228
|
+
let got;
|
1229
|
+
const batchSize = 5;
|
1230
|
+
for (let i = 0; i < this.carLog.length; i += batchSize) {
|
1231
|
+
const batch = this.carLog.slice(i, i + batchSize);
|
1232
|
+
const promises = batch.flatMap((slice) => slice.map(getCarCid));
|
1233
|
+
try {
|
1234
|
+
got = await Promise.any(promises);
|
1235
|
+
} catch {
|
1236
|
+
}
|
1237
|
+
if (got) break;
|
1238
|
+
}
|
1239
|
+
if (!got) {
|
1240
|
+
try {
|
1241
|
+
got = await getCompactCarCids(this.carLog[this.carLog.length - 1][0]);
|
1242
|
+
} catch {
|
1243
|
+
}
|
1244
|
+
}
|
1245
|
+
return got;
|
1246
|
+
}
|
1247
|
+
makeCarHeader(meta, cars, compact = false) {
|
1248
|
+
const coreHeader = compact ? { cars: [], compact: cars } : { cars, compact: [] };
|
1249
|
+
return { ...coreHeader, meta };
|
1250
|
+
}
|
1251
|
+
async loadCar(cid) {
|
1252
|
+
if (!this.carStore) {
|
1253
|
+
throw this.logger.Error().Msg("car store not initialized").AsError();
|
1254
|
+
}
|
1255
|
+
const loaded = await this.storesLoadCar(cid, await this.carStore(), this.remoteCarStore);
|
1256
|
+
return loaded;
|
1257
|
+
}
|
1258
|
+
//What if instead it returns an Array of CarHeader
|
1259
|
+
async storesLoadCar(cid, local, remote, publicFiles) {
|
1260
|
+
const cidsString = cid.toString();
|
1261
|
+
if (!this.carReaders.has(cidsString)) {
|
1262
|
+
this.carReaders.set(
|
1263
|
+
cidsString,
|
1264
|
+
(async () => {
|
1265
|
+
let loadedCar = void 0;
|
1266
|
+
try {
|
1267
|
+
this.logger.Debug().Str("cid", cidsString).Msg("loading car");
|
1268
|
+
loadedCar = await local.load(cid);
|
1269
|
+
this.logger.Debug().Bool("loadedCar", loadedCar).Msg("loaded");
|
1270
|
+
} catch (e) {
|
1271
|
+
if (remote) {
|
1272
|
+
const remoteCar = await remote.load(cid);
|
1273
|
+
if (remoteCar) {
|
1274
|
+
this.logger.Debug().Ref("cid", remoteCar.cid).Msg("saving remote car locally");
|
1275
|
+
await local.save(remoteCar);
|
1276
|
+
loadedCar = remoteCar;
|
1277
|
+
}
|
1278
|
+
} else {
|
1279
|
+
this.logger.Error().Str("cid", cidsString).Err(e).Msg("loading car");
|
1280
|
+
}
|
1281
|
+
}
|
1282
|
+
if (!loadedCar) {
|
1283
|
+
throw this.logger.Error().Url(local.url).Str("cid", cidsString).Msg("missing car files").AsError();
|
1284
|
+
}
|
1285
|
+
const rawReader = await CarReader.fromBytes(loadedCar.bytes);
|
1286
|
+
const readerP = publicFiles ? Promise.resolve(rawReader) : this.ensureDecryptedReader(rawReader);
|
1287
|
+
const cachedReaderP = readerP.then(async (reader) => {
|
1288
|
+
await this.cacheCarReader(cidsString, reader).catch(() => {
|
1289
|
+
return;
|
1290
|
+
});
|
1291
|
+
return reader;
|
1292
|
+
});
|
1293
|
+
this.carReaders.set(cidsString, cachedReaderP);
|
1294
|
+
return readerP;
|
1295
|
+
})().catch((e) => {
|
1296
|
+
this.carReaders.delete(cidsString);
|
1297
|
+
throw e;
|
1298
|
+
})
|
1299
|
+
);
|
1300
|
+
}
|
1301
|
+
return this.carReaders.get(cidsString);
|
1302
|
+
}
|
1303
|
+
async ensureDecryptedReader(reader) {
|
1304
|
+
const theKey = await this._getKey();
|
1305
|
+
if (this.ebOpts.public || !(theKey && this.ebOpts.crypto)) {
|
1306
|
+
return reader;
|
1307
|
+
}
|
1308
|
+
const { blocks, root: root3 } = await decodeEncryptedCar(this.logger, this.ebOpts.crypto, theKey, reader);
|
1309
|
+
return {
|
1310
|
+
getRoots: () => [root3],
|
1311
|
+
get: blocks.get.bind(blocks),
|
1312
|
+
blocks: blocks.entries.bind(blocks)
|
1313
|
+
};
|
1314
|
+
}
|
1315
|
+
async setKey(key) {
|
1316
|
+
if (this.key && this.key !== key)
|
1317
|
+
throw this.logger.Error().Str("this.key", this.key).Str("key", key).Msg("setting key").AsError();
|
1318
|
+
this.key = key;
|
1319
|
+
const encoder = new TextEncoder();
|
1320
|
+
const data = encoder.encode(key);
|
1321
|
+
const hashBuffer = await this.ebOpts.crypto.digestSHA256(data);
|
1322
|
+
const hashArray = Array.from(new Uint8Array(hashBuffer));
|
1323
|
+
this.keyId = hashArray.map((b) => b.toString(16).padStart(2, "0")).join("");
|
1324
|
+
}
|
1325
|
+
async getMoreReaders(cids) {
|
1326
|
+
const limit = pLimit(5);
|
1327
|
+
const missing = cids.filter((cid) => !this.carReaders.has(cid.toString()));
|
1328
|
+
await Promise.all(missing.map((cid) => limit(() => this.loadCar(cid))));
|
1329
|
+
}
|
1330
|
+
};
|
1331
|
+
|
1332
|
+
// src/blockstore/store.ts
|
1333
|
+
var VersionedStore = class {
|
1334
|
+
constructor(name, url, logger) {
|
1335
|
+
this._onStarted = [];
|
1336
|
+
this._onClosed = [];
|
1337
|
+
this.name = name;
|
1338
|
+
this.url = url;
|
1339
|
+
this.logger = logger;
|
1340
|
+
}
|
1341
|
+
onStarted(fn) {
|
1342
|
+
this._onStarted.push(fn);
|
1343
|
+
}
|
1344
|
+
onClosed(fn) {
|
1345
|
+
this._onClosed.push(fn);
|
1346
|
+
}
|
1347
|
+
};
|
1348
|
+
var textEncoder = new TextEncoder();
|
1349
|
+
var textDecoder = new TextDecoder();
|
1350
|
+
var MetaStore = class extends VersionedStore {
|
1351
|
+
constructor(name, url, logger, gateway) {
|
1352
|
+
super(name, url, ensureLogger(logger, "MetaStore", {}));
|
1353
|
+
this.tag = "header-base";
|
1354
|
+
this.gateway = gateway;
|
1355
|
+
}
|
1356
|
+
makeHeader({ cars, key }) {
|
1357
|
+
const toEncode = { cars };
|
1358
|
+
if (key) toEncode.key = key;
|
1359
|
+
return format(toEncode);
|
1360
|
+
}
|
1361
|
+
parseHeader(headerData) {
|
1362
|
+
const got = parse(headerData);
|
1363
|
+
return got;
|
1364
|
+
}
|
1365
|
+
async start() {
|
1366
|
+
this.logger.Debug().Msg("starting");
|
1367
|
+
const res = await this.gateway.start(this.url);
|
1368
|
+
if (res.isErr()) {
|
1369
|
+
return res;
|
1370
|
+
}
|
1371
|
+
this._onStarted.forEach((fn) => fn());
|
1372
|
+
return guardVersion(this.url);
|
1373
|
+
}
|
1374
|
+
async load(branch) {
|
1375
|
+
this.logger.Debug().Str("branch", branch || "").Msg("loading");
|
1376
|
+
const url = await this.gateway.buildUrl(this.url, branch || "main");
|
1377
|
+
if (url.isErr()) {
|
1378
|
+
throw this.logger.Error().Err(url.Err()).Str("branch", branch || "").Str("url", this.url.toString()).Msg("got error from gateway.buildUrl").AsError();
|
1379
|
+
}
|
1380
|
+
const bytes = await this.gateway.get(url.Ok());
|
1381
|
+
if (bytes.isErr()) {
|
1382
|
+
if (isNotFoundError(bytes)) {
|
1383
|
+
return void 0;
|
1384
|
+
}
|
1385
|
+
throw this.logger.Error().Err(bytes.Err()).Msg("gateway get").AsError();
|
1386
|
+
}
|
1387
|
+
try {
|
1388
|
+
return [this.parseHeader(textDecoder.decode(bytes.Ok()))];
|
1389
|
+
} catch (e) {
|
1390
|
+
throw this.logger.Error().Err(e).Msg("parseHeader").AsError();
|
1391
|
+
}
|
1392
|
+
}
|
1393
|
+
async save(meta, branch = "main") {
|
1394
|
+
this.logger.Debug().Str("branch", branch).Any("meta", meta).Msg("saving meta");
|
1395
|
+
const bytes = this.makeHeader(meta);
|
1396
|
+
const url = await this.gateway.buildUrl(this.url, branch);
|
1397
|
+
if (url.isErr()) {
|
1398
|
+
throw this.logger.Error().Err(url.Err()).Str("branch", branch).Url(this.url).Msg("got error from gateway.buildUrl").AsError();
|
1399
|
+
}
|
1400
|
+
const res = await this.gateway.put(url.Ok(), textEncoder.encode(bytes));
|
1401
|
+
if (res.isErr()) {
|
1402
|
+
throw this.logger.Error().Err(res.Err()).Msg("got error from gateway.put").AsError();
|
1403
|
+
}
|
1404
|
+
return res.Ok();
|
1405
|
+
}
|
1406
|
+
async close() {
|
1407
|
+
await this.gateway.close(this.url);
|
1408
|
+
this._onClosed.forEach((fn) => fn());
|
1409
|
+
return Result.Ok(void 0);
|
1410
|
+
}
|
1411
|
+
async destroy() {
|
1412
|
+
return this.gateway.destroy(this.url);
|
1413
|
+
}
|
1414
|
+
};
|
1415
|
+
var DataStore = class extends VersionedStore {
|
1416
|
+
constructor(name, url, logger, gateway) {
|
1417
|
+
super(
|
1418
|
+
name,
|
1419
|
+
url,
|
1420
|
+
ensureLogger(logger, "DataStore", {
|
1421
|
+
url: () => url.toString()
|
1422
|
+
})
|
1423
|
+
);
|
1424
|
+
this.tag = "car-base";
|
1425
|
+
this.gateway = gateway;
|
1426
|
+
}
|
1427
|
+
async start() {
|
1428
|
+
this.logger.Debug().Msg("starting-gateway");
|
1429
|
+
const res = await this.gateway.start(this.url);
|
1430
|
+
if (res.isErr()) {
|
1431
|
+
this.logger.Error().Err(res.Err()).Msg("started-gateway");
|
1432
|
+
return res;
|
1433
|
+
}
|
1434
|
+
this._onStarted.forEach((fn) => fn());
|
1435
|
+
const version = guardVersion(this.url);
|
1436
|
+
if (version.isErr()) {
|
1437
|
+
this.logger.Error().Err(res.Err()).Msg("guardVersion");
|
1438
|
+
await this.close();
|
1439
|
+
return version;
|
1440
|
+
}
|
1441
|
+
this.logger.Debug().Msg("started");
|
1442
|
+
return version;
|
1443
|
+
}
|
1444
|
+
async load(cid) {
|
1445
|
+
this.logger.Debug().Any("cid", cid).Msg("loading");
|
1446
|
+
const url = await this.gateway.buildUrl(this.url, cid.toString());
|
1447
|
+
if (url.isErr()) {
|
1448
|
+
throw this.logger.Error().Err(url.Err()).Str("cid", cid.toString()).Msg("got error from gateway.buildUrl").AsError();
|
1449
|
+
}
|
1450
|
+
const res = await this.gateway.get(url.Ok());
|
1451
|
+
if (res.isErr()) {
|
1452
|
+
throw res.Err();
|
1453
|
+
}
|
1454
|
+
return { cid, bytes: res.Ok() };
|
1455
|
+
}
|
1456
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
1457
|
+
async save(car, opts) {
|
1458
|
+
this.logger.Debug().Any("cid", car.cid.toString()).Msg("saving");
|
1459
|
+
const url = await this.gateway.buildUrl(this.url, car.cid.toString());
|
1460
|
+
if (url.isErr()) {
|
1461
|
+
throw this.logger.Error().Err(url.Err()).Ref("cid", car.cid).Msg("got error from gateway.buildUrl").AsError();
|
1462
|
+
}
|
1463
|
+
const res = await this.gateway.put(url.Ok(), car.bytes);
|
1464
|
+
if (res.isErr()) {
|
1465
|
+
throw this.logger.Error().Err(res.Err()).Msg("got error from gateway.put").AsError();
|
1466
|
+
}
|
1467
|
+
return res.Ok();
|
1468
|
+
}
|
1469
|
+
async remove(cid) {
|
1470
|
+
const url = await this.gateway.buildUrl(this.url, cid.toString());
|
1471
|
+
if (url.isErr()) {
|
1472
|
+
return url;
|
1473
|
+
}
|
1474
|
+
return this.gateway.delete(url.Ok());
|
1475
|
+
}
|
1476
|
+
async close() {
|
1477
|
+
await this.gateway.close(this.url);
|
1478
|
+
this._onClosed.forEach((fn) => fn());
|
1479
|
+
return Result.Ok(void 0);
|
1480
|
+
}
|
1481
|
+
destroy() {
|
1482
|
+
return this.gateway.destroy(this.url);
|
1483
|
+
}
|
1484
|
+
};
|
1485
|
+
var RemoteWAL = class extends VersionedStore {
|
1486
|
+
constructor(loader, url, logger, gateway) {
|
1487
|
+
super(loader.name, url, ensureLogger(logger, "RemoteWAL"));
|
1488
|
+
this.tag = "rwal-base";
|
1489
|
+
this._ready = new ResolveOnce2();
|
1490
|
+
this.walState = { operations: [], noLoaderOps: [], fileOperations: [] };
|
1491
|
+
this.processing = void 0;
|
1492
|
+
this.processQueue = new CommitQueue();
|
1493
|
+
this.loader = loader;
|
1494
|
+
this.gateway = gateway;
|
1495
|
+
}
|
1496
|
+
async ready() {
|
1497
|
+
return this._ready.once(async () => {
|
1498
|
+
const walState = await this.load().catch((e) => {
|
1499
|
+
this.logger.Error().Any("error", e).Msg("error loading wal");
|
1500
|
+
return void 0;
|
1501
|
+
});
|
1502
|
+
if (!walState) {
|
1503
|
+
this.walState.operations = [];
|
1504
|
+
this.walState.fileOperations = [];
|
1505
|
+
} else {
|
1506
|
+
this.walState.operations = walState.operations || [];
|
1507
|
+
this.walState.fileOperations = walState.fileOperations || [];
|
1508
|
+
}
|
1509
|
+
});
|
1510
|
+
}
|
1511
|
+
async enqueue(dbMeta, opts) {
|
1512
|
+
await this.ready();
|
1513
|
+
if (opts.noLoader) {
|
1514
|
+
this.walState.noLoaderOps.push(dbMeta);
|
1515
|
+
} else {
|
1516
|
+
this.walState.operations.push(dbMeta);
|
1517
|
+
}
|
1518
|
+
await this.save(this.walState);
|
1519
|
+
void this._process();
|
1520
|
+
}
|
1521
|
+
async enqueueFile(fileCid, publicFile = false) {
|
1522
|
+
await this.ready();
|
1523
|
+
this.walState.fileOperations.push({ cid: fileCid, public: publicFile });
|
1524
|
+
}
|
1525
|
+
async _process() {
|
1526
|
+
await this.ready();
|
1527
|
+
if (!this.loader.remoteCarStore) return;
|
1528
|
+
await this.processQueue.enqueue(async () => {
|
1529
|
+
await this._doProcess();
|
1530
|
+
if (this.walState.operations.length || this.walState.fileOperations.length || this.walState.noLoaderOps.length) {
|
1531
|
+
setTimeout(() => void this._process(), 0);
|
1532
|
+
}
|
1533
|
+
});
|
1534
|
+
}
|
1535
|
+
async _doProcess() {
|
1536
|
+
if (!this.loader.remoteCarStore) return;
|
1537
|
+
const rmlp = (async () => {
|
1538
|
+
const operations = [...this.walState.operations];
|
1539
|
+
const fileOperations = [...this.walState.fileOperations];
|
1540
|
+
const uploads = [];
|
1541
|
+
const noLoaderOps = [...this.walState.noLoaderOps];
|
1542
|
+
const limit = pLimit2(5);
|
1543
|
+
if (operations.length + fileOperations.length + noLoaderOps.length === 0) return;
|
1544
|
+
for (const dbMeta of noLoaderOps) {
|
1545
|
+
const uploadP = limit(async () => {
|
1546
|
+
for (const cid of dbMeta.cars) {
|
1547
|
+
const car = await (await this.loader.carStore()).load(cid);
|
1548
|
+
if (!car) {
|
1549
|
+
if (carLogIncludesGroup2(this.loader.carLog, dbMeta.cars))
|
1550
|
+
throw this.logger.Error().Ref("cid", cid).Msg("missing local car").AsError();
|
1551
|
+
} else {
|
1552
|
+
await throwFalsy(this.loader.remoteCarStore).save(car);
|
1553
|
+
}
|
1554
|
+
this.walState.noLoaderOps = this.walState.noLoaderOps.filter((op) => op !== dbMeta);
|
1555
|
+
}
|
1556
|
+
});
|
1557
|
+
uploads.push(uploadP);
|
1558
|
+
}
|
1559
|
+
for (const dbMeta of operations) {
|
1560
|
+
const uploadP = limit(async () => {
|
1561
|
+
for (const cid of dbMeta.cars) {
|
1562
|
+
const car = await (await this.loader.carStore()).load(cid).catch(() => null);
|
1563
|
+
if (!car) {
|
1564
|
+
if (carLogIncludesGroup2(this.loader.carLog, dbMeta.cars))
|
1565
|
+
throw this.logger.Error().Ref("cid", cid).Msg(`missing local car`).AsError();
|
1566
|
+
} else {
|
1567
|
+
await throwFalsy(this.loader.remoteCarStore).save(car);
|
1568
|
+
}
|
1569
|
+
}
|
1570
|
+
this.walState.operations = this.walState.operations.filter((op) => op !== dbMeta);
|
1571
|
+
});
|
1572
|
+
uploads.push(uploadP);
|
1573
|
+
}
|
1574
|
+
if (fileOperations.length) {
|
1575
|
+
const dbLoader = this.loader;
|
1576
|
+
for (const { cid: fileCid, public: publicFile } of fileOperations) {
|
1577
|
+
const uploadP = limit(async () => {
|
1578
|
+
const fileBlock = await (await dbLoader.fileStore()).load(fileCid);
|
1579
|
+
await dbLoader.remoteFileStore?.save(fileBlock, { public: publicFile });
|
1580
|
+
this.walState.fileOperations = this.walState.fileOperations.filter((op) => op.cid !== fileCid);
|
1581
|
+
});
|
1582
|
+
uploads.push(uploadP);
|
1583
|
+
}
|
1584
|
+
}
|
1585
|
+
try {
|
1586
|
+
const res = await Promise.allSettled(uploads);
|
1587
|
+
const errors = res.filter((r) => r.status === "rejected");
|
1588
|
+
if (errors.length) {
|
1589
|
+
throw this.logger.Error().Any(
|
1590
|
+
"errors",
|
1591
|
+
errors.map((e) => e.reason)
|
1592
|
+
).Msg("error uploading").AsError();
|
1593
|
+
errors[0].reason;
|
1594
|
+
}
|
1595
|
+
if (operations.length) {
|
1596
|
+
const lastOp = operations[operations.length - 1];
|
1597
|
+
await this.loader.remoteMetaStore?.save(lastOp).catch((e) => {
|
1598
|
+
this.walState.operations.push(lastOp);
|
1599
|
+
throw this.logger.Error().Any("error", e).Msg("error saving remote meta").AsError();
|
1600
|
+
});
|
1601
|
+
}
|
1602
|
+
} finally {
|
1603
|
+
await this.save(this.walState);
|
1604
|
+
}
|
1605
|
+
})();
|
1606
|
+
await rmlp;
|
1607
|
+
}
|
1608
|
+
async start() {
|
1609
|
+
const res = await this.gateway.start(this.url);
|
1610
|
+
if (res.isErr()) {
|
1611
|
+
return res;
|
1612
|
+
}
|
1613
|
+
const ver = guardVersion(this.url);
|
1614
|
+
if (ver.isErr()) {
|
1615
|
+
await this.close();
|
1616
|
+
return ver;
|
1617
|
+
}
|
1618
|
+
const ready = await exception2Result(() => this.ready());
|
1619
|
+
this._onStarted.forEach((fn) => fn());
|
1620
|
+
if (ready.isErr()) {
|
1621
|
+
await this.close();
|
1622
|
+
return ready;
|
1623
|
+
}
|
1624
|
+
return ready;
|
1625
|
+
}
|
1626
|
+
async load() {
|
1627
|
+
this.logger.Debug().Msg("loading");
|
1628
|
+
const filepath = await this.gateway.buildUrl(this.url, "main");
|
1629
|
+
if (filepath.isErr()) {
|
1630
|
+
throw this.logger.Error().Err(filepath.Err()).Str("url", this.url.toString()).Msg("error building url").AsError();
|
1631
|
+
}
|
1632
|
+
const bytes = await this.gateway.get(filepath.Ok());
|
1633
|
+
if (bytes.isErr()) {
|
1634
|
+
if (isNotFoundError(bytes)) {
|
1635
|
+
return void 0;
|
1636
|
+
}
|
1637
|
+
throw this.logger.Error().Err(bytes.Err()).Msg("error get").AsError();
|
1638
|
+
}
|
1639
|
+
try {
|
1640
|
+
return bytes && parse(textDecoder.decode(bytes.Ok()));
|
1641
|
+
} catch (e) {
|
1642
|
+
throw this.logger.Error().Err(e).Msg("error parse").AsError();
|
1643
|
+
}
|
1644
|
+
}
|
1645
|
+
async save(state) {
|
1646
|
+
const filepath = await this.gateway.buildUrl(this.url, "main");
|
1647
|
+
if (filepath.isErr()) {
|
1648
|
+
throw this.logger.Error().Err(filepath.Err()).Str("url", this.url.toString()).Msg("error building url").AsError();
|
1649
|
+
}
|
1650
|
+
let encoded;
|
1651
|
+
try {
|
1652
|
+
encoded = format(state);
|
1653
|
+
} catch (e) {
|
1654
|
+
throw this.logger.Error().Err(e).Any("state", state).Msg("error format").AsError();
|
1655
|
+
}
|
1656
|
+
const res = await this.gateway.put(filepath.Ok(), textEncoder.encode(encoded));
|
1657
|
+
if (res.isErr()) {
|
1658
|
+
throw this.logger.Error().Err(res.Err()).Str("filePath", filepath.Ok().toString()).Msg("error saving").AsError();
|
1659
|
+
}
|
1660
|
+
}
|
1661
|
+
async close() {
|
1662
|
+
await this.gateway.close(this.url);
|
1663
|
+
this._onClosed.forEach((fn) => fn());
|
1664
|
+
return Result.Ok(void 0);
|
1665
|
+
}
|
1666
|
+
destroy() {
|
1667
|
+
return this.gateway.destroy(this.url);
|
1668
|
+
}
|
1669
|
+
};
|
1670
|
+
|
1671
|
+
// src/blockstore/store-factory.ts
|
1672
|
+
function ensureIsIndex(url, isIndex) {
|
1673
|
+
if (isIndex) {
|
1674
|
+
url.searchParams.set("index", isIndex);
|
1675
|
+
return url;
|
1676
|
+
} else {
|
1677
|
+
url.searchParams.delete("index");
|
1678
|
+
return url;
|
1679
|
+
}
|
1680
|
+
}
|
1681
|
+
function toURL(pathOrUrl, isIndex) {
|
1682
|
+
if (pathOrUrl instanceof URL) return ensureIsIndex(pathOrUrl, isIndex);
|
1683
|
+
try {
|
1684
|
+
const url = new URL(pathOrUrl);
|
1685
|
+
return ensureIsIndex(url, isIndex);
|
1686
|
+
} catch (e) {
|
1687
|
+
const url = new URL(`file://${pathOrUrl}`);
|
1688
|
+
return ensureIsIndex(url, isIndex);
|
1689
|
+
}
|
1690
|
+
}
|
1691
|
+
var storeFactory = /* @__PURE__ */ new Map();
|
1692
|
+
function buildURL(optURL, loader) {
|
1693
|
+
const storeOpts = loader.ebOpts.store;
|
1694
|
+
const obuItem = Array.from(storeFactory.values()).find((items) => items.overrideBaseURL);
|
1695
|
+
let obuUrl;
|
1696
|
+
if (obuItem && obuItem.overrideBaseURL) {
|
1697
|
+
obuUrl = new URL(obuItem.overrideBaseURL);
|
1698
|
+
}
|
1699
|
+
return toURL(optURL || obuUrl || dataDir(loader.name, storeOpts.stores?.base), storeOpts.isIndex);
|
1700
|
+
}
|
1701
|
+
function registerStoreProtocol(item) {
|
1702
|
+
if (storeFactory.has(item.protocol)) {
|
1703
|
+
throw new Error(`protocol ${item.protocol} already registered`);
|
1704
|
+
}
|
1705
|
+
if (item.overrideBaseURL) {
|
1706
|
+
Array.from(storeFactory.values()).forEach((items) => {
|
1707
|
+
items.overrideBaseURL = void 0;
|
1708
|
+
});
|
1709
|
+
}
|
1710
|
+
storeFactory.set(item.protocol, item);
|
1711
|
+
return () => {
|
1712
|
+
storeFactory.delete(item.protocol);
|
1713
|
+
};
|
1714
|
+
}
|
1715
|
+
function runStoreFactory(url, logger, run) {
|
1716
|
+
const item = storeFactory.get(url.protocol);
|
1717
|
+
if (!item) {
|
1718
|
+
throw logger.Error().Url(url).Str("protocol", url.protocol).Any("keys", Array(storeFactory.keys())).Msg(`unsupported protocol`).AsError();
|
1719
|
+
}
|
1720
|
+
logger.Debug().Str("protocol", url.protocol).Msg("run");
|
1721
|
+
return run(item);
|
1722
|
+
}
|
1723
|
+
var onceLoadDataGateway = new KeyedResolvOnce();
|
1724
|
+
function loadDataGateway(url, logger) {
|
1725
|
+
return onceLoadDataGateway.get(url.protocol).once(async () => {
|
1726
|
+
return runStoreFactory(url, logger, async (item) => item.data(logger));
|
1727
|
+
});
|
1728
|
+
}
|
1729
|
+
var onceDataStoreFactory = new KeyedResolvOnce();
|
1730
|
+
async function dataStoreFactory(loader) {
|
1731
|
+
const url = buildURL(loader.ebOpts.store.stores?.data, loader);
|
1732
|
+
const logger = ensureLogger(loader.logger, "dataStoreFactory", { url: url.toString() });
|
1733
|
+
url.searchParams.set("store", "data");
|
1734
|
+
return onceDataStoreFactory.get(url.toString()).once(async () => {
|
1735
|
+
const gateway = await loadDataGateway(url, logger);
|
1736
|
+
const store = new DataStore(loader.name, url, loader.logger, gateway);
|
1737
|
+
await store.start();
|
1738
|
+
logger.Debug().Str("prepared", store.url.toString()).Msg("produced");
|
1739
|
+
return store;
|
1740
|
+
});
|
1741
|
+
}
|
1742
|
+
var onceLoadMetaGateway = new KeyedResolvOnce();
|
1743
|
+
function loadMetaGateway(url, logger) {
|
1744
|
+
return onceLoadMetaGateway.get(url.protocol).once(async () => {
|
1745
|
+
return runStoreFactory(url, logger, async (item) => item.meta(logger));
|
1746
|
+
});
|
1747
|
+
}
|
1748
|
+
var onceMetaStoreFactory = new KeyedResolvOnce();
|
1749
|
+
async function metaStoreFactory(loader) {
|
1750
|
+
const url = buildURL(loader.ebOpts.store.stores?.meta, loader);
|
1751
|
+
const logger = ensureLogger(loader.logger, "metaStoreFactory", { url: () => url.toString() });
|
1752
|
+
url.searchParams.set("store", "meta");
|
1753
|
+
return onceMetaStoreFactory.get(url.toString()).once(async () => {
|
1754
|
+
logger.Debug().Str("protocol", url.protocol).Msg("pre-protocol switch");
|
1755
|
+
const gateway = await loadMetaGateway(url, logger);
|
1756
|
+
const store = new MetaStore(loader.name, url, loader.logger, gateway);
|
1757
|
+
logger.Debug().Msg("pre-start");
|
1758
|
+
await store.start();
|
1759
|
+
logger.Debug().Msg("post-start");
|
1760
|
+
return store;
|
1761
|
+
});
|
1762
|
+
}
|
1763
|
+
var onceWalGateway = new KeyedResolvOnce();
|
1764
|
+
function loadWalGateway(url, logger) {
|
1765
|
+
return onceWalGateway.get(url.protocol).once(async () => {
|
1766
|
+
return runStoreFactory(url, logger, async (item) => item.wal(logger));
|
1767
|
+
});
|
1768
|
+
}
|
1769
|
+
var onceRemoteWalFactory = new KeyedResolvOnce();
|
1770
|
+
async function remoteWalFactory(loader) {
|
1771
|
+
const url = buildURL(loader.ebOpts.store.stores?.meta, loader);
|
1772
|
+
const logger = ensureLogger(loader.logger, "remoteWalFactory", { url: url.toString() });
|
1773
|
+
url.searchParams.set("store", "wal");
|
1774
|
+
return onceRemoteWalFactory.get(url.toString()).once(async () => {
|
1775
|
+
const gateway = await loadWalGateway(url, logger);
|
1776
|
+
logger.Debug().Str("prepared", url.toString()).Msg("produced");
|
1777
|
+
const store = new RemoteWAL(loader, url, loader.logger, gateway);
|
1778
|
+
await store.start();
|
1779
|
+
return store;
|
1780
|
+
});
|
1781
|
+
}
|
1782
|
+
async function testStoreFactory(url, ilogger) {
|
1783
|
+
const logger = ensureLogger(
|
1784
|
+
{
|
1785
|
+
logger: ilogger
|
1786
|
+
},
|
1787
|
+
"testStoreFactory"
|
1788
|
+
);
|
1789
|
+
return runStoreFactory(url, logger, async (item) => item.test(logger));
|
1790
|
+
}
|
1791
|
+
function toStoreRuntime(opts, ilogger) {
|
1792
|
+
const logger = ensureLogger(ilogger, "toStoreRuntime", {});
|
1793
|
+
return {
|
1794
|
+
makeMetaStore: (loader) => {
|
1795
|
+
logger.Debug().Str("fromOpts", "" + !!loader.ebOpts.store.makeMetaStore).Msg("makeMetaStore");
|
1796
|
+
return (loader.ebOpts.store.makeMetaStore || metaStoreFactory)(loader);
|
1797
|
+
},
|
1798
|
+
makeDataStore: (loader) => {
|
1799
|
+
logger.Debug().Str("fromOpts", "" + !!loader.ebOpts.store.makeDataStore).Msg("makeDataStore");
|
1800
|
+
return (loader.ebOpts.store.makeDataStore || dataStoreFactory)(loader);
|
1801
|
+
},
|
1802
|
+
makeRemoteWAL: (loader) => {
|
1803
|
+
logger.Debug().Str("fromOpts", "" + !!loader.ebOpts.store.makeRemoteWAL).Msg("makeRemoteWAL");
|
1804
|
+
return (loader.ebOpts.store.makeRemoteWAL || remoteWalFactory)(loader);
|
1805
|
+
},
|
1806
|
+
encodeFile: opts.encodeFile || encodeFile,
|
1807
|
+
decodeFile: opts.decodeFile || decodeFile
|
1808
|
+
};
|
1809
|
+
}
|
1810
|
+
registerStoreProtocol({
|
1811
|
+
protocol: "file:",
|
1812
|
+
data: async (logger) => {
|
1813
|
+
const { FileDataGateway } = await import("./store-file-D472VFCS.js");
|
1814
|
+
return new FileDataGateway(logger);
|
1815
|
+
},
|
1816
|
+
meta: async (logger) => {
|
1817
|
+
const { FileMetaGateway } = await import("./store-file-D472VFCS.js");
|
1818
|
+
return new FileMetaGateway(logger);
|
1819
|
+
},
|
1820
|
+
wal: async (logger) => {
|
1821
|
+
const { FileWALGateway } = await import("./store-file-D472VFCS.js");
|
1822
|
+
return new FileWALGateway(logger);
|
1823
|
+
},
|
1824
|
+
test: async (logger) => {
|
1825
|
+
const { FileTestStore } = await import("./store-file-D472VFCS.js");
|
1826
|
+
return new FileTestStore(logger);
|
1827
|
+
}
|
1828
|
+
});
|
1829
|
+
registerStoreProtocol({
|
1830
|
+
protocol: "indexdb:",
|
1831
|
+
data: async (logger) => {
|
1832
|
+
const { IndexDBDataGateway } = await import("./store-indexdb-FRX5PTKR.js");
|
1833
|
+
return new IndexDBDataGateway(logger);
|
1834
|
+
},
|
1835
|
+
meta: async (logger) => {
|
1836
|
+
const { IndexDBMetaGateway } = await import("./store-indexdb-FRX5PTKR.js");
|
1837
|
+
return new IndexDBMetaGateway(logger);
|
1838
|
+
},
|
1839
|
+
wal: async (logger) => {
|
1840
|
+
const { IndexDBMetaGateway } = await import("./store-indexdb-FRX5PTKR.js");
|
1841
|
+
return new IndexDBMetaGateway(logger);
|
1842
|
+
},
|
1843
|
+
test: async (logger) => {
|
1844
|
+
const { IndexDBTestStore } = await import("./store-indexdb-FRX5PTKR.js");
|
1845
|
+
return new IndexDBTestStore(logger);
|
1846
|
+
}
|
1847
|
+
});
|
1848
|
+
registerStoreProtocol({
|
1849
|
+
protocol: "sqlite:",
|
1850
|
+
data: async (logger) => {
|
1851
|
+
const { SQLDataGateway } = await import("./store-sql-MDSU23Y7.js");
|
1852
|
+
return new SQLDataGateway(logger);
|
1853
|
+
},
|
1854
|
+
meta: async (logger) => {
|
1855
|
+
const { SQLMetaGateway } = await import("./store-sql-MDSU23Y7.js");
|
1856
|
+
return new SQLMetaGateway(logger);
|
1857
|
+
},
|
1858
|
+
wal: async (logger) => {
|
1859
|
+
const { SQLWalGateway } = await import("./store-sql-MDSU23Y7.js");
|
1860
|
+
return new SQLWalGateway(logger);
|
1861
|
+
},
|
1862
|
+
test: async (logger) => {
|
1863
|
+
const { SQLTestStore } = await import("./store-sql-MDSU23Y7.js");
|
1864
|
+
return new SQLTestStore(logger);
|
1865
|
+
}
|
1866
|
+
});
|
1867
|
+
|
1868
|
+
// src/crdt-helpers.ts
|
1869
|
+
function time(tag) {
|
1870
|
+
}
|
1871
|
+
function timeEnd(tag) {
|
1872
|
+
}
|
1873
|
+
function toString(key, logger) {
|
1874
|
+
switch (typeof key) {
|
1875
|
+
case "string":
|
1876
|
+
case "number":
|
1877
|
+
return key.toString();
|
1878
|
+
default:
|
1879
|
+
throw logger.Error().Msg("Invalid key type").AsError();
|
1880
|
+
}
|
1881
|
+
}
|
1882
|
+
async function applyBulkUpdateToCrdt(store, tblocks, head, updates, logger) {
|
1883
|
+
let result = null;
|
1884
|
+
if (updates.length > 1) {
|
1885
|
+
const batch = await Batch.create(tblocks, head);
|
1886
|
+
for (const update of updates) {
|
1887
|
+
const link = await writeDocContent(store, tblocks, update, logger);
|
1888
|
+
await batch.put(toString(update.id, logger), link);
|
1889
|
+
}
|
1890
|
+
result = await batch.commit();
|
1891
|
+
} else if (updates.length === 1) {
|
1892
|
+
const link = await writeDocContent(store, tblocks, updates[0], logger);
|
1893
|
+
result = await put(tblocks, head, toString(updates[0].id, logger), link);
|
1894
|
+
}
|
1895
|
+
if (!result) throw logger.Error().Uint64("updates.len", updates.length).Msg("Missing result").AsError();
|
1896
|
+
if (result.event) {
|
1897
|
+
for (const { cid, bytes } of [
|
1898
|
+
...result.additions,
|
1899
|
+
// ...result.removals,
|
1900
|
+
result.event
|
1901
|
+
]) {
|
1902
|
+
tblocks.putSync(cid, bytes);
|
1903
|
+
}
|
1904
|
+
}
|
1905
|
+
return { head: result.head };
|
1906
|
+
}
|
1907
|
+
async function writeDocContent(store, blocks, update, logger) {
|
1908
|
+
let value;
|
1909
|
+
if (update.del) {
|
1910
|
+
value = { del: true };
|
1911
|
+
} else {
|
1912
|
+
if (!update.value) throw logger.Error().Msg("Missing value").AsError();
|
1913
|
+
await processFiles(store, blocks, update.value, logger);
|
1914
|
+
value = { doc: update.value };
|
1915
|
+
}
|
1916
|
+
const block = await encode3({ value, hasher: hasher2, codec: codec2 });
|
1917
|
+
blocks.putSync(block.cid, block.bytes);
|
1918
|
+
return block.cid;
|
1919
|
+
}
|
1920
|
+
async function processFiles(store, blocks, doc, logger) {
|
1921
|
+
if (doc._files) {
|
1922
|
+
await processFileset(logger, store, blocks, doc._files);
|
1923
|
+
}
|
1924
|
+
if (doc._publicFiles) {
|
1925
|
+
await processFileset(logger, store, blocks, doc._publicFiles, true);
|
1926
|
+
}
|
1927
|
+
}
|
1928
|
+
async function processFileset(logger, store, blocks, files, publicFiles = false) {
|
1929
|
+
const dbBlockstore = blocks.parent;
|
1930
|
+
if (!dbBlockstore.loader) throw logger.Error().Msg("Missing loader, database name is required").AsError();
|
1931
|
+
const t = new CarTransaction(dbBlockstore);
|
1932
|
+
const didPut = [];
|
1933
|
+
for (const filename in files) {
|
1934
|
+
if (File === files[filename].constructor) {
|
1935
|
+
const file = files[filename];
|
1936
|
+
const { cid, blocks: fileBlocks } = await store.encodeFile(file);
|
1937
|
+
didPut.push(filename);
|
1938
|
+
for (const block of fileBlocks) {
|
1939
|
+
t.putSync(block.cid, block.bytes);
|
1940
|
+
}
|
1941
|
+
files[filename] = { cid, type: file.type, size: file.size };
|
1942
|
+
} else {
|
1943
|
+
const { cid, type, size, car } = files[filename];
|
1944
|
+
if (cid && type && size && car) {
|
1945
|
+
files[filename] = { cid, type, size, car };
|
1946
|
+
}
|
1947
|
+
}
|
1948
|
+
}
|
1949
|
+
if (didPut.length) {
|
1950
|
+
const car = await dbBlockstore.loader.commitFiles(t, { files }, {
|
1951
|
+
public: publicFiles
|
1952
|
+
});
|
1953
|
+
if (car) {
|
1954
|
+
for (const name of didPut) {
|
1955
|
+
files[name] = { car, ...files[name] };
|
1956
|
+
}
|
1957
|
+
}
|
1958
|
+
}
|
1959
|
+
}
|
1960
|
+
async function getValueFromCrdt(blocks, head, key, logger) {
|
1961
|
+
if (!head.length) throw logger.Debug().Msg("Getting from an empty database").AsError();
|
1962
|
+
const link = await get(blocks, head, key);
|
1963
|
+
if (!link) throw logger.Error().Str("key", key).Msg(`Missing key`).AsError();
|
1964
|
+
return await getValueFromLink(blocks, link, logger);
|
1965
|
+
}
|
1966
|
+
function readFiles(blocks, { doc }) {
|
1967
|
+
if (!doc) return;
|
1968
|
+
if (doc._files) {
|
1969
|
+
readFileset(blocks, doc._files);
|
1970
|
+
}
|
1971
|
+
if (doc._publicFiles) {
|
1972
|
+
readFileset(blocks, doc._publicFiles, true);
|
1973
|
+
}
|
1974
|
+
}
|
1975
|
+
function readFileset(blocks, files, isPublic = false) {
|
1976
|
+
for (const filename in files) {
|
1977
|
+
const fileMeta = files[filename];
|
1978
|
+
if (fileMeta.cid) {
|
1979
|
+
if (isPublic) {
|
1980
|
+
fileMeta.url = `https://${fileMeta.cid.toString()}.ipfs.w3s.link/`;
|
1981
|
+
}
|
1982
|
+
if (fileMeta.car) {
|
1983
|
+
fileMeta.file = async () => await blocks.ebOpts.storeRuntime.decodeFile(
|
1984
|
+
{
|
1985
|
+
get: async (cid) => {
|
1986
|
+
return await blocks.getFile(throwFalsy(fileMeta.car), cid, isPublic);
|
1987
|
+
}
|
1988
|
+
},
|
1989
|
+
fileMeta.cid,
|
1990
|
+
fileMeta
|
1991
|
+
);
|
1992
|
+
}
|
1993
|
+
}
|
1994
|
+
files[filename] = fileMeta;
|
1995
|
+
}
|
1996
|
+
}
|
1997
|
+
async function getValueFromLink(blocks, link, logger) {
|
1998
|
+
const block = await blocks.get(link);
|
1999
|
+
if (!block) throw logger.Error().Str("link", link.toString()).Msg(`Missing linked block`).AsError();
|
2000
|
+
const { value } = await decode3({ bytes: block.bytes, hasher: hasher2, codec: codec2 });
|
2001
|
+
const cvalue = {
|
2002
|
+
...value,
|
2003
|
+
cid: link
|
2004
|
+
};
|
2005
|
+
readFiles(blocks, cvalue);
|
2006
|
+
return cvalue;
|
2007
|
+
}
|
2008
|
+
var DirtyEventFetcher = class extends EventFetcher {
|
2009
|
+
async get(link) {
|
2010
|
+
try {
|
2011
|
+
return super.get(link);
|
2012
|
+
} catch (e) {
|
2013
|
+
console.error("missing event", link.toString(), e);
|
2014
|
+
return { value: void 0 };
|
2015
|
+
}
|
2016
|
+
}
|
2017
|
+
};
|
2018
|
+
async function clockChangesSince(blocks, head, since, opts, logger) {
|
2019
|
+
const eventsFetcher = opts.dirty ? new DirtyEventFetcher(blocks) : new EventFetcher(blocks);
|
2020
|
+
const keys = /* @__PURE__ */ new Set();
|
2021
|
+
const updates = await gatherUpdates(
|
2022
|
+
blocks,
|
2023
|
+
eventsFetcher,
|
2024
|
+
head,
|
2025
|
+
since,
|
2026
|
+
[],
|
2027
|
+
keys,
|
2028
|
+
/* @__PURE__ */ new Set(),
|
2029
|
+
opts.limit || Infinity,
|
2030
|
+
logger
|
2031
|
+
);
|
2032
|
+
return { result: updates.reverse(), head };
|
2033
|
+
}
|
2034
|
+
async function gatherUpdates(blocks, eventsFetcher, head, since, updates = [], keys, didLinks, limit, logger) {
|
2035
|
+
if (limit <= 0) return updates;
|
2036
|
+
const sHead = head.map((l) => l.toString());
|
2037
|
+
for (const link of since) {
|
2038
|
+
if (sHead.includes(link.toString())) {
|
2039
|
+
return updates;
|
2040
|
+
}
|
2041
|
+
}
|
2042
|
+
for (const link of head) {
|
2043
|
+
if (didLinks.has(link.toString())) continue;
|
2044
|
+
didLinks.add(link.toString());
|
2045
|
+
const { value: event } = await eventsFetcher.get(link);
|
2046
|
+
if (!event) continue;
|
2047
|
+
const { type } = event.data;
|
2048
|
+
let ops = [];
|
2049
|
+
if (type === "batch") {
|
2050
|
+
ops = event.data.ops;
|
2051
|
+
} else if (type === "put") {
|
2052
|
+
ops = [event.data];
|
2053
|
+
}
|
2054
|
+
for (let i = ops.length - 1; i >= 0; i--) {
|
2055
|
+
const { key, value } = ops[i];
|
2056
|
+
if (!keys.has(key)) {
|
2057
|
+
const docValue = await getValueFromLink(blocks, value, logger);
|
2058
|
+
updates.push({ id: key, value: docValue.doc, del: docValue.del, clock: link });
|
2059
|
+
limit--;
|
2060
|
+
keys.add(key);
|
2061
|
+
}
|
2062
|
+
}
|
2063
|
+
if (event.parents) {
|
2064
|
+
updates = await gatherUpdates(blocks, eventsFetcher, event.parents, since, updates, keys, didLinks, limit, logger);
|
2065
|
+
}
|
2066
|
+
}
|
2067
|
+
return updates;
|
2068
|
+
}
|
2069
|
+
async function* getAllEntries(blocks, head, logger) {
|
2070
|
+
for await (const [key, link] of entries(blocks, head)) {
|
2071
|
+
const docValue = await getValueFromLink(blocks, link, logger);
|
2072
|
+
yield { id: key, value: docValue.doc, del: docValue.del };
|
2073
|
+
}
|
2074
|
+
}
|
2075
|
+
async function* clockVis(blocks, head) {
|
2076
|
+
for await (const line of vis(blocks, head)) {
|
2077
|
+
yield line;
|
2078
|
+
}
|
2079
|
+
}
|
2080
|
+
var isCompacting = false;
|
2081
|
+
async function doCompact(blockLog, head, logger) {
|
2082
|
+
if (isCompacting) {
|
2083
|
+
return;
|
2084
|
+
}
|
2085
|
+
isCompacting = true;
|
2086
|
+
time("compact head");
|
2087
|
+
for (const cid of head) {
|
2088
|
+
const bl = await blockLog.get(cid);
|
2089
|
+
if (!bl) throw logger.Error().Ref("cid", cid).Msg("Missing head block").AsError();
|
2090
|
+
}
|
2091
|
+
timeEnd("compact head");
|
2092
|
+
time("compact all entries");
|
2093
|
+
for await (const _entry of getAllEntries(blockLog, head, logger)) {
|
2094
|
+
continue;
|
2095
|
+
}
|
2096
|
+
timeEnd("compact all entries");
|
2097
|
+
time("compact clock vis");
|
2098
|
+
for await (const _line of vis(blockLog, head)) {
|
2099
|
+
}
|
2100
|
+
timeEnd("compact clock vis");
|
2101
|
+
time("compact root");
|
2102
|
+
const result = await root(blockLog, head);
|
2103
|
+
timeEnd("compact root");
|
2104
|
+
time("compact root blocks");
|
2105
|
+
for (const { cid, bytes } of [...result.additions, ...result.removals]) {
|
2106
|
+
blockLog.loggedBlocks.putSync(cid, bytes);
|
2107
|
+
}
|
2108
|
+
timeEnd("compact root blocks");
|
2109
|
+
time("compact changes");
|
2110
|
+
await clockChangesSince(blockLog, head, [], {}, logger);
|
2111
|
+
timeEnd("compact changes");
|
2112
|
+
isCompacting = false;
|
2113
|
+
}
|
2114
|
+
async function getBlock(blocks, cidString) {
|
2115
|
+
const block = await blocks.get(parse2(cidString));
|
2116
|
+
if (!block) throw new Error(`Missing block ${cidString}`);
|
2117
|
+
const { cid, value } = await decode3({ bytes: block.bytes, codec: codec2, hasher: hasher2 });
|
2118
|
+
return new Block2({ cid, value, bytes: block.bytes });
|
2119
|
+
}
|
2120
|
+
|
2121
|
+
// src/indexer-helpers.ts
|
2122
|
+
import { create as create3 } from "multiformats/block";
|
2123
|
+
import { sha256 as hasher3 } from "multiformats/hashes/sha2";
|
2124
|
+
import * as codec3 from "@ipld/dag-cbor";
|
2125
|
+
import charwise from "charwise";
|
2126
|
+
import * as DbIndex from "prolly-trees/db-index";
|
2127
|
+
import { bf as bf2, simpleCompare } from "prolly-trees/utils";
|
2128
|
+
import { nocache as cache2 } from "prolly-trees/cache";
|
2129
|
+
var IndexTree = class {
|
2130
|
+
};
|
2131
|
+
function refCompare(aRef, bRef) {
|
2132
|
+
if (Number.isNaN(aRef)) return -1;
|
2133
|
+
if (Number.isNaN(bRef)) throw new Error("ref may not be Infinity or NaN");
|
2134
|
+
if (aRef === Infinity) return 1;
|
2135
|
+
return simpleCompare(aRef, bRef);
|
2136
|
+
}
|
2137
|
+
function compare(a, b) {
|
2138
|
+
const [aKey, aRef] = a;
|
2139
|
+
const [bKey, bRef] = b;
|
2140
|
+
const comp = simpleCompare(aKey, bKey);
|
2141
|
+
if (comp !== 0) return comp;
|
2142
|
+
return refCompare(aRef, bRef);
|
2143
|
+
}
|
2144
|
+
var byKeyOpts = { cache: cache2, chunker: bf2(30), codec: codec3, hasher: hasher3, compare };
|
2145
|
+
var byIdOpts = { cache: cache2, chunker: bf2(30), codec: codec3, hasher: hasher3, compare: simpleCompare };
|
2146
|
+
function indexEntriesForChanges(changes, mapFn) {
|
2147
|
+
const indexEntries = [];
|
2148
|
+
changes.forEach(({ id: key, value, del }) => {
|
2149
|
+
if (del || !value) return;
|
2150
|
+
let mapCalled = false;
|
2151
|
+
const mapReturn = mapFn({ ...value, _id: key }, (k, v) => {
|
2152
|
+
mapCalled = true;
|
2153
|
+
if (typeof k === "undefined") return;
|
2154
|
+
indexEntries.push({
|
2155
|
+
key: [charwise.encode(k), key],
|
2156
|
+
value: v || null
|
2157
|
+
});
|
2158
|
+
});
|
2159
|
+
if (!mapCalled && mapReturn) {
|
2160
|
+
indexEntries.push({
|
2161
|
+
key: [charwise.encode(mapReturn), key],
|
2162
|
+
value: null
|
2163
|
+
});
|
2164
|
+
}
|
2165
|
+
});
|
2166
|
+
return indexEntries;
|
2167
|
+
}
|
2168
|
+
function makeProllyGetBlock(blocks) {
|
2169
|
+
return async (address) => {
|
2170
|
+
const block = await blocks.get(address);
|
2171
|
+
if (!block) throw new Error(`Missing block ${address.toString()}`);
|
2172
|
+
const { cid, bytes } = block;
|
2173
|
+
return create3({ cid, bytes, hasher: hasher3, codec: codec3 });
|
2174
|
+
};
|
2175
|
+
}
|
2176
|
+
async function bulkIndex(tblocks, inIndex, indexEntries, opts) {
|
2177
|
+
if (!indexEntries.length) return inIndex;
|
2178
|
+
if (!inIndex.root) {
|
2179
|
+
if (!inIndex.cid) {
|
2180
|
+
let returnRootBlock = void 0;
|
2181
|
+
let returnNode = void 0;
|
2182
|
+
for await (const node of await DbIndex.create({
|
2183
|
+
get: makeProllyGetBlock(tblocks),
|
2184
|
+
list: indexEntries,
|
2185
|
+
...opts
|
2186
|
+
})) {
|
2187
|
+
const block = await node.block;
|
2188
|
+
await tblocks.put(block.cid, block.bytes);
|
2189
|
+
returnRootBlock = block;
|
2190
|
+
returnNode = node;
|
2191
|
+
}
|
2192
|
+
if (!returnNode || !returnRootBlock) throw new Error("failed to create index");
|
2193
|
+
return { root: returnNode, cid: returnRootBlock.cid };
|
2194
|
+
} else {
|
2195
|
+
inIndex.root = await DbIndex.load({ cid: inIndex.cid, get: makeProllyGetBlock(tblocks), ...opts });
|
2196
|
+
}
|
2197
|
+
}
|
2198
|
+
const { root: root3, blocks: newBlocks } = await inIndex.root.bulk(indexEntries);
|
2199
|
+
if (root3) {
|
2200
|
+
for await (const block of newBlocks) {
|
2201
|
+
await tblocks.put(block.cid, block.bytes);
|
2202
|
+
}
|
2203
|
+
return { root: root3, cid: (await root3.block).cid };
|
2204
|
+
} else {
|
2205
|
+
return { root: void 0, cid: void 0 };
|
2206
|
+
}
|
2207
|
+
}
|
2208
|
+
async function loadIndex(tblocks, cid, opts) {
|
2209
|
+
return await DbIndex.load({ cid, get: makeProllyGetBlock(tblocks), ...opts });
|
2210
|
+
}
|
2211
|
+
async function applyQuery(crdt, resp, query) {
|
2212
|
+
if (query.descending) {
|
2213
|
+
resp.result = resp.result.reverse();
|
2214
|
+
}
|
2215
|
+
if (query.limit) {
|
2216
|
+
resp.result = resp.result.slice(0, query.limit);
|
2217
|
+
}
|
2218
|
+
if (query.includeDocs) {
|
2219
|
+
resp.result = await Promise.all(
|
2220
|
+
resp.result.map(async (row) => {
|
2221
|
+
const val = await crdt.get(row.id);
|
2222
|
+
const doc = val ? { ...val.doc, _id: row.id } : void 0;
|
2223
|
+
return { ...row, doc };
|
2224
|
+
})
|
2225
|
+
);
|
2226
|
+
}
|
2227
|
+
return {
|
2228
|
+
rows: resp.result.map(({ key, ...row }) => {
|
2229
|
+
return {
|
2230
|
+
key: charwise.decode(key),
|
2231
|
+
...row
|
2232
|
+
};
|
2233
|
+
})
|
2234
|
+
};
|
2235
|
+
}
|
2236
|
+
function encodeRange(range) {
|
2237
|
+
return [charwise.encode(range[0]), charwise.encode(range[1])];
|
2238
|
+
}
|
2239
|
+
function encodeKey(key) {
|
2240
|
+
return charwise.encode(key);
|
2241
|
+
}
|
2242
|
+
|
2243
|
+
// src/indexer.ts
|
2244
|
+
function index({ _crdt }, name, mapFn, meta) {
|
2245
|
+
if (mapFn && meta) throw _crdt.logger.Error().Msg("cannot provide both mapFn and meta").AsError();
|
2246
|
+
if (mapFn && mapFn.constructor.name !== "Function") throw _crdt.logger.Error().Msg("mapFn must be a function").AsError();
|
2247
|
+
if (_crdt.indexers.has(name)) {
|
2248
|
+
const idx = _crdt.indexers.get(name);
|
2249
|
+
idx.applyMapFn(name, mapFn, meta);
|
2250
|
+
} else {
|
2251
|
+
const idx = new Index(_crdt, name, mapFn, meta);
|
2252
|
+
_crdt.indexers.set(name, idx);
|
2253
|
+
}
|
2254
|
+
return _crdt.indexers.get(name);
|
2255
|
+
}
|
2256
|
+
var Index = class {
|
2257
|
+
constructor(crdt, name, mapFn, meta) {
|
2258
|
+
this.mapFnString = "";
|
2259
|
+
this.byKey = new IndexTree();
|
2260
|
+
this.byId = new IndexTree();
|
2261
|
+
this.includeDocsDefault = false;
|
2262
|
+
this.logger = ensureLogger(crdt.logger, "Index");
|
2263
|
+
this.blockstore = crdt.indexBlockstore;
|
2264
|
+
this.crdt = crdt;
|
2265
|
+
this.applyMapFn(name, mapFn, meta);
|
2266
|
+
this.name = name;
|
2267
|
+
if (!(this.mapFnString || this.initError)) throw this.logger.Error().Msg("missing mapFnString").AsError();
|
2268
|
+
}
|
2269
|
+
ready() {
|
2270
|
+
return Promise.all([this.blockstore.ready(), this.crdt.ready()]).then(() => {
|
2271
|
+
});
|
2272
|
+
}
|
2273
|
+
close() {
|
2274
|
+
return Promise.all([this.blockstore.close(), this.crdt.close()]).then(() => {
|
2275
|
+
});
|
2276
|
+
}
|
2277
|
+
destroy() {
|
2278
|
+
return Promise.all([this.blockstore.destroy(), this.crdt.destroy()]).then(() => {
|
2279
|
+
});
|
2280
|
+
}
|
2281
|
+
applyMapFn(name, mapFn, meta) {
|
2282
|
+
if (mapFn && meta) throw this.logger.Error().Msg("cannot provide both mapFn and meta").AsError();
|
2283
|
+
if (this.name && this.name !== name) throw this.logger.Error().Msg("cannot change name").AsError();
|
2284
|
+
this.name = name;
|
2285
|
+
try {
|
2286
|
+
if (meta) {
|
2287
|
+
if (this.indexHead && this.indexHead.map((c) => c.toString()).join() !== meta.head.map((c) => c.toString()).join()) {
|
2288
|
+
throw this.logger.Error().Msg("cannot apply different head meta").AsError();
|
2289
|
+
}
|
2290
|
+
if (this.mapFnString) {
|
2291
|
+
if (this.mapFnString !== meta.map) {
|
2292
|
+
this.logger.Warn().Msg(`cannot apply different mapFn meta: old mapFnString ${this.mapFnString} new mapFnString ${meta.map}`);
|
2293
|
+
} else {
|
2294
|
+
this.byId.cid = meta.byId;
|
2295
|
+
this.byKey.cid = meta.byKey;
|
2296
|
+
this.indexHead = meta.head;
|
2297
|
+
}
|
2298
|
+
} else {
|
2299
|
+
this.mapFnString = meta.map;
|
2300
|
+
this.byId.cid = meta.byId;
|
2301
|
+
this.byKey.cid = meta.byKey;
|
2302
|
+
this.indexHead = meta.head;
|
2303
|
+
}
|
2304
|
+
} else {
|
2305
|
+
if (this.mapFn) {
|
2306
|
+
if (mapFn) {
|
2307
|
+
if (this.mapFn.toString() !== mapFn.toString()) {
|
2308
|
+
throw this.logger.Error().Msg("cannot apply different mapFn app2").AsError();
|
2309
|
+
}
|
2310
|
+
}
|
2311
|
+
} else {
|
2312
|
+
if (!mapFn) {
|
2313
|
+
mapFn = (doc) => doc[name] ?? void 0;
|
2314
|
+
}
|
2315
|
+
if (this.mapFnString) {
|
2316
|
+
if (this.mapFnString !== mapFn.toString()) {
|
2317
|
+
throw this.logger.Error().Msg("cannot apply different mapFn app").AsError();
|
2318
|
+
}
|
2319
|
+
} else {
|
2320
|
+
this.mapFnString = mapFn.toString();
|
2321
|
+
}
|
2322
|
+
this.mapFn = mapFn;
|
2323
|
+
}
|
2324
|
+
}
|
2325
|
+
const matches = /=>\s*(.*)/.test(this.mapFnString);
|
2326
|
+
this.includeDocsDefault = matches;
|
2327
|
+
} catch (e) {
|
2328
|
+
this.initError = e;
|
2329
|
+
}
|
2330
|
+
}
|
2331
|
+
async query(opts = {}) {
|
2332
|
+
await this.ready();
|
2333
|
+
await this._updateIndex();
|
2334
|
+
await this._hydrateIndex();
|
2335
|
+
if (!this.byKey.root) {
|
2336
|
+
return await applyQuery(this.crdt, { result: [] }, opts);
|
2337
|
+
}
|
2338
|
+
if (this.includeDocsDefault && opts.includeDocs === void 0) opts.includeDocs = true;
|
2339
|
+
if (opts.range) {
|
2340
|
+
const eRange = encodeRange(opts.range);
|
2341
|
+
return await applyQuery(this.crdt, await throwFalsy(this.byKey.root).range(eRange[0], eRange[1]), opts);
|
2342
|
+
}
|
2343
|
+
if (opts.key) {
|
2344
|
+
const encodedKey = encodeKey(opts.key);
|
2345
|
+
return await applyQuery(this.crdt, await throwFalsy(this.byKey.root).get(encodedKey), opts);
|
2346
|
+
}
|
2347
|
+
if (Array.isArray(opts.keys)) {
|
2348
|
+
const results = await Promise.all(
|
2349
|
+
opts.keys.map(async (key) => {
|
2350
|
+
const encodedKey = encodeKey(key);
|
2351
|
+
return (await applyQuery(this.crdt, await throwFalsy(this.byKey.root).get(encodedKey), opts)).rows;
|
2352
|
+
})
|
2353
|
+
);
|
2354
|
+
return { rows: results.flat() };
|
2355
|
+
}
|
2356
|
+
if (opts.prefix) {
|
2357
|
+
if (!Array.isArray(opts.prefix)) opts.prefix = [opts.prefix];
|
2358
|
+
const start = [...opts.prefix, NaN];
|
2359
|
+
const end = [...opts.prefix, Infinity];
|
2360
|
+
const encodedR = encodeRange([start, end]);
|
2361
|
+
return await applyQuery(this.crdt, await this.byKey.root.range(...encodedR), opts);
|
2362
|
+
}
|
2363
|
+
const all = await this.byKey.root.getAllEntries();
|
2364
|
+
return await applyQuery(
|
2365
|
+
this.crdt,
|
2366
|
+
{
|
2367
|
+
// @ts-expect-error getAllEntries returns a different type than range
|
2368
|
+
result: all.result.map(({ key: [k, id], value }) => ({
|
2369
|
+
key: k,
|
2370
|
+
id,
|
2371
|
+
value
|
2372
|
+
}))
|
2373
|
+
},
|
2374
|
+
opts
|
2375
|
+
);
|
2376
|
+
}
|
2377
|
+
_resetIndex() {
|
2378
|
+
this.byId = new IndexTree();
|
2379
|
+
this.byKey = new IndexTree();
|
2380
|
+
this.indexHead = void 0;
|
2381
|
+
}
|
2382
|
+
async _hydrateIndex() {
|
2383
|
+
if (this.byId.root && this.byKey.root) return;
|
2384
|
+
if (!this.byId.cid || !this.byKey.cid) return;
|
2385
|
+
this.byId.root = await loadIndex(this.blockstore, this.byId.cid, byIdOpts);
|
2386
|
+
this.byKey.root = await loadIndex(this.blockstore, this.byKey.cid, byKeyOpts);
|
2387
|
+
}
|
2388
|
+
async _updateIndex() {
|
2389
|
+
await this.ready();
|
2390
|
+
if (this.initError) throw this.initError;
|
2391
|
+
if (!this.mapFn) throw this.logger.Error().Msg("No map function defined").AsError();
|
2392
|
+
let result, head;
|
2393
|
+
if (!this.indexHead || this.indexHead.length === 0) {
|
2394
|
+
({ result, head } = await this.crdt.allDocs());
|
2395
|
+
} else {
|
2396
|
+
({ result, head } = await this.crdt.changes(this.indexHead));
|
2397
|
+
}
|
2398
|
+
if (result.length === 0) {
|
2399
|
+
this.indexHead = head;
|
2400
|
+
}
|
2401
|
+
let staleKeyIndexEntries = [];
|
2402
|
+
let removeIdIndexEntries = [];
|
2403
|
+
if (this.byId.root) {
|
2404
|
+
const removeIds = result.map(({ id: key }) => key);
|
2405
|
+
const { result: oldChangeEntries } = await this.byId.root.getMany(removeIds);
|
2406
|
+
staleKeyIndexEntries = oldChangeEntries.map((key) => ({ key, del: true }));
|
2407
|
+
removeIdIndexEntries = oldChangeEntries.map((key) => ({ key: key[1], del: true }));
|
2408
|
+
}
|
2409
|
+
const indexEntries = indexEntriesForChanges(result, this.mapFn);
|
2410
|
+
const byIdIndexEntries = indexEntries.map(({ key }) => ({
|
2411
|
+
key: key[1],
|
2412
|
+
value: key
|
2413
|
+
}));
|
2414
|
+
const indexerMeta = { indexes: /* @__PURE__ */ new Map() };
|
2415
|
+
for (const [name, indexer] of this.crdt.indexers) {
|
2416
|
+
if (indexer.indexHead) {
|
2417
|
+
indexerMeta.indexes?.set(name, {
|
2418
|
+
byId: indexer.byId.cid,
|
2419
|
+
byKey: indexer.byKey.cid,
|
2420
|
+
head: indexer.indexHead,
|
2421
|
+
map: indexer.mapFnString,
|
2422
|
+
name: indexer.name
|
2423
|
+
});
|
2424
|
+
}
|
2425
|
+
}
|
2426
|
+
if (result.length === 0) {
|
2427
|
+
return indexerMeta;
|
2428
|
+
}
|
2429
|
+
const { meta } = await this.blockstore.transaction(async (tblocks) => {
|
2430
|
+
this.byId = await bulkIndex(tblocks, this.byId, removeIdIndexEntries.concat(byIdIndexEntries), byIdOpts);
|
2431
|
+
this.byKey = await bulkIndex(tblocks, this.byKey, staleKeyIndexEntries.concat(indexEntries), byKeyOpts);
|
2432
|
+
this.indexHead = head;
|
2433
|
+
if (this.byId.cid && this.byKey.cid) {
|
2434
|
+
const idxMeta = {
|
2435
|
+
byId: this.byId.cid,
|
2436
|
+
byKey: this.byKey.cid,
|
2437
|
+
head,
|
2438
|
+
map: this.mapFnString,
|
2439
|
+
name: this.name
|
2440
|
+
};
|
2441
|
+
indexerMeta.indexes?.set(this.name, idxMeta);
|
2442
|
+
}
|
2443
|
+
return indexerMeta;
|
2444
|
+
});
|
2445
|
+
return meta;
|
2446
|
+
}
|
2447
|
+
};
|
2448
|
+
|
2449
|
+
// src/crdt-clock.ts
|
2450
|
+
import { advance } from "@web3-storage/pail/clock";
|
2451
|
+
import { root as root2 } from "@web3-storage/pail/crdt";
|
2452
|
+
import { ResolveOnce as ResolveOnce3 } from "@adviser/cement";
|
2453
|
+
|
2454
|
+
// src/apply-head-queue.ts
|
2455
|
+
function applyHeadQueue(worker, logger) {
|
2456
|
+
const queue = [];
|
2457
|
+
let isProcessing = false;
|
2458
|
+
async function* process() {
|
2459
|
+
if (isProcessing || queue.length === 0) return;
|
2460
|
+
isProcessing = true;
|
2461
|
+
const allUpdates = [];
|
2462
|
+
try {
|
2463
|
+
while (queue.length > 0) {
|
2464
|
+
queue.sort((a, b) => b.updates ? 1 : -1);
|
2465
|
+
const task = queue.shift();
|
2466
|
+
if (!task) continue;
|
2467
|
+
await worker(task.newHead, task.prevHead, task.updates !== null).catch((e) => {
|
2468
|
+
throw logger.Error().Err(e).Msg("int_applyHead worker error").AsError();
|
2469
|
+
});
|
2470
|
+
if (task.updates) {
|
2471
|
+
allUpdates.push(...task.updates);
|
2472
|
+
}
|
2473
|
+
if (!queue.some((t) => t.updates) || task.updates) {
|
2474
|
+
const allTasksHaveUpdates = queue.every((task2) => task2.updates !== null);
|
2475
|
+
yield { updates: allUpdates, all: allTasksHaveUpdates };
|
2476
|
+
allUpdates.length = 0;
|
2477
|
+
}
|
2478
|
+
}
|
2479
|
+
} finally {
|
2480
|
+
isProcessing = false;
|
2481
|
+
const generator = process();
|
2482
|
+
let result = await generator.next();
|
2483
|
+
while (!result.done) {
|
2484
|
+
result = await generator.next();
|
2485
|
+
}
|
2486
|
+
}
|
2487
|
+
}
|
2488
|
+
return {
|
2489
|
+
push(task) {
|
2490
|
+
queue.push(task);
|
2491
|
+
return process();
|
2492
|
+
},
|
2493
|
+
size() {
|
2494
|
+
return queue.length;
|
2495
|
+
}
|
2496
|
+
};
|
2497
|
+
}
|
2498
|
+
|
2499
|
+
// src/crdt-clock.ts
|
2500
|
+
var CRDTClock = class {
|
2501
|
+
constructor(blockstore) {
|
2502
|
+
// todo: track local and remote clocks independently, merge on read
|
2503
|
+
// that way we can drop the whole remote if we need to
|
2504
|
+
// should go with making sure the local clock only references locally available blockstore on write
|
2505
|
+
this.head = [];
|
2506
|
+
this.zoomers = /* @__PURE__ */ new Set();
|
2507
|
+
this.watchers = /* @__PURE__ */ new Set();
|
2508
|
+
this.emptyWatchers = /* @__PURE__ */ new Set();
|
2509
|
+
this._ready = new ResolveOnce3();
|
2510
|
+
this.blockstore = blockstore;
|
2511
|
+
this.logger = ensureLogger(blockstore.logger, "CRDTClock");
|
2512
|
+
this.applyHeadQueue = applyHeadQueue(this.int_applyHead.bind(this), this.logger);
|
2513
|
+
}
|
2514
|
+
async ready() {
|
2515
|
+
return this._ready.once(async () => {
|
2516
|
+
await this.blockstore.ready();
|
2517
|
+
});
|
2518
|
+
}
|
2519
|
+
async close() {
|
2520
|
+
await this.blockstore.close();
|
2521
|
+
}
|
2522
|
+
setHead(head) {
|
2523
|
+
this.head = head;
|
2524
|
+
}
|
2525
|
+
async applyHead(newHead, prevHead, updates) {
|
2526
|
+
for await (const { updates: updatesAcc, all } of this.applyHeadQueue.push({
|
2527
|
+
newHead,
|
2528
|
+
prevHead,
|
2529
|
+
updates
|
2530
|
+
})) {
|
2531
|
+
return this.processUpdates(updatesAcc, all, prevHead);
|
2532
|
+
}
|
2533
|
+
}
|
2534
|
+
async processUpdates(updatesAcc, all, prevHead) {
|
2535
|
+
let internalUpdates = updatesAcc;
|
2536
|
+
if (this.watchers.size && !all) {
|
2537
|
+
const changes = await clockChangesSince(throwFalsy(this.blockstore), this.head, prevHead, {}, this.logger);
|
2538
|
+
internalUpdates = changes.result;
|
2539
|
+
}
|
2540
|
+
this.zoomers.forEach((fn) => fn());
|
2541
|
+
this.notifyWatchers(internalUpdates || []);
|
2542
|
+
}
|
2543
|
+
notifyWatchers(updates) {
|
2544
|
+
this.emptyWatchers.forEach((fn) => fn());
|
2545
|
+
this.watchers.forEach((fn) => fn(updates || []));
|
2546
|
+
}
|
2547
|
+
onTick(fn) {
|
2548
|
+
this.watchers.add(fn);
|
2549
|
+
}
|
2550
|
+
onTock(fn) {
|
2551
|
+
this.emptyWatchers.add(fn);
|
2552
|
+
}
|
2553
|
+
onZoom(fn) {
|
2554
|
+
this.zoomers.add(fn);
|
2555
|
+
}
|
2556
|
+
async int_applyHead(newHead, prevHead, localUpdates) {
|
2557
|
+
const ogHead = sortClockHead(this.head);
|
2558
|
+
newHead = sortClockHead(newHead);
|
2559
|
+
if (compareClockHeads(ogHead, newHead)) {
|
2560
|
+
return;
|
2561
|
+
}
|
2562
|
+
const ogPrev = sortClockHead(prevHead);
|
2563
|
+
if (compareClockHeads(ogHead, ogPrev)) {
|
2564
|
+
this.setHead(newHead);
|
2565
|
+
return;
|
2566
|
+
}
|
2567
|
+
const noLoader = !localUpdates;
|
2568
|
+
if (!this.blockstore) {
|
2569
|
+
throw this.logger.Error().Msg("missing blockstore").AsError();
|
2570
|
+
}
|
2571
|
+
await validateBlocks(this.logger, newHead, this.blockstore);
|
2572
|
+
const { meta } = await this.blockstore.transaction(
|
2573
|
+
async (tblocks) => {
|
2574
|
+
const advancedHead = await advanceBlocks(this.logger, newHead, tblocks, this.head);
|
2575
|
+
const result = await root2(tblocks, advancedHead);
|
2576
|
+
for (const { cid, bytes } of [
|
2577
|
+
...result.additions
|
2578
|
+
// ...result.removals
|
2579
|
+
]) {
|
2580
|
+
tblocks.putSync(cid, bytes);
|
2581
|
+
}
|
2582
|
+
return { head: advancedHead };
|
2583
|
+
},
|
2584
|
+
{ noLoader }
|
2585
|
+
);
|
2586
|
+
this.setHead(meta.head);
|
2587
|
+
}
|
2588
|
+
};
|
2589
|
+
function sortClockHead(clockHead) {
|
2590
|
+
return clockHead.sort((a, b) => a.toString().localeCompare(b.toString()));
|
2591
|
+
}
|
2592
|
+
async function validateBlocks(logger, newHead, blockstore) {
|
2593
|
+
if (!blockstore) throw logger.Error().Msg("missing blockstore");
|
2594
|
+
newHead.map(async (cid) => {
|
2595
|
+
const got = await blockstore.get(cid);
|
2596
|
+
if (!got) {
|
2597
|
+
throw logger.Error().Str("cid", cid.toString()).Msg("int_applyHead missing block").AsError();
|
2598
|
+
}
|
2599
|
+
});
|
2600
|
+
}
|
2601
|
+
function compareClockHeads(head1, head2) {
|
2602
|
+
return head1.toString() === head2.toString();
|
2603
|
+
}
|
2604
|
+
async function advanceBlocks(logger, newHead, tblocks, head) {
|
2605
|
+
for (const cid of newHead) {
|
2606
|
+
try {
|
2607
|
+
head = await advance(tblocks, head, cid);
|
2608
|
+
} catch (e) {
|
2609
|
+
logger.Debug().Err(e).Msg("failed to advance head");
|
2610
|
+
continue;
|
2611
|
+
}
|
2612
|
+
}
|
2613
|
+
return head;
|
2614
|
+
}
|
2615
|
+
|
2616
|
+
// src/crdt.ts
|
2617
|
+
var CRDT = class {
|
2618
|
+
constructor(name, opts = {}) {
|
2619
|
+
this.onceReady = new ResolveOnce4();
|
2620
|
+
this.indexers = /* @__PURE__ */ new Map();
|
2621
|
+
this.name = name;
|
2622
|
+
this.logger = ensureLogger(opts, "CRDT");
|
2623
|
+
this.opts = opts;
|
2624
|
+
this.blockstore = blockstoreFactory({
|
2625
|
+
name,
|
2626
|
+
applyMeta: async (meta) => {
|
2627
|
+
const crdtMeta = meta;
|
2628
|
+
if (!crdtMeta.head) throw this.logger.Error().Msg("missing head").AsError();
|
2629
|
+
await this.clock.applyHead(crdtMeta.head, []);
|
2630
|
+
},
|
2631
|
+
compact: async (blocks) => {
|
2632
|
+
await doCompact(blocks, this.clock.head, this.logger);
|
2633
|
+
return { head: this.clock.head };
|
2634
|
+
},
|
2635
|
+
autoCompact: this.opts.autoCompact || 100,
|
2636
|
+
crypto: this.opts.crypto,
|
2637
|
+
store: { ...this.opts.store, isIndex: void 0 },
|
2638
|
+
public: this.opts.public,
|
2639
|
+
meta: this.opts.meta,
|
2640
|
+
threshold: this.opts.threshold
|
2641
|
+
});
|
2642
|
+
this.indexBlockstore = blockstoreFactory({
|
2643
|
+
name,
|
2644
|
+
applyMeta: async (meta) => {
|
2645
|
+
const idxCarMeta = meta;
|
2646
|
+
if (!idxCarMeta.indexes) throw this.logger.Error().Msg("missing indexes").AsError();
|
2647
|
+
for (const [name2, idx] of Object.entries(idxCarMeta.indexes)) {
|
2648
|
+
index({ _crdt: this }, name2, void 0, idx);
|
2649
|
+
}
|
2650
|
+
},
|
2651
|
+
crypto: this.opts.crypto,
|
2652
|
+
store: { ...this.opts.store, isIndex: this.opts.store?.isIndex || "idx" },
|
2653
|
+
public: this.opts.public
|
2654
|
+
});
|
2655
|
+
this.clock = new CRDTClock(this.blockstore);
|
2656
|
+
this.clock.onZoom(() => {
|
2657
|
+
for (const idx of this.indexers.values()) {
|
2658
|
+
idx._resetIndex();
|
2659
|
+
}
|
2660
|
+
});
|
2661
|
+
}
|
2662
|
+
async ready() {
|
2663
|
+
return this.onceReady.once(async () => {
|
2664
|
+
await Promise.all([this.blockstore.ready(), this.indexBlockstore.ready(), this.clock.ready()]);
|
2665
|
+
});
|
2666
|
+
}
|
2667
|
+
async close() {
|
2668
|
+
await Promise.all([this.blockstore.close(), this.indexBlockstore.close(), this.clock.close()]);
|
2669
|
+
}
|
2670
|
+
async destroy() {
|
2671
|
+
await Promise.all([this.blockstore.destroy(), this.indexBlockstore.destroy()]);
|
2672
|
+
}
|
2673
|
+
async bulk(updates) {
|
2674
|
+
await this.ready();
|
2675
|
+
const prevHead = [...this.clock.head];
|
2676
|
+
const done = await this.blockstore.transaction(async (blocks) => {
|
2677
|
+
const { head } = await applyBulkUpdateToCrdt(
|
2678
|
+
this.blockstore.ebOpts.storeRuntime,
|
2679
|
+
blocks,
|
2680
|
+
this.clock.head,
|
2681
|
+
updates,
|
2682
|
+
this.logger
|
2683
|
+
);
|
2684
|
+
updates = updates.map((dupdate) => {
|
2685
|
+
readFiles(this.blockstore, { doc: dupdate.value });
|
2686
|
+
return dupdate;
|
2687
|
+
});
|
2688
|
+
return { head };
|
2689
|
+
});
|
2690
|
+
await this.clock.applyHead(done.meta.head, prevHead, updates);
|
2691
|
+
return done.meta;
|
2692
|
+
}
|
2693
|
+
// if (snap) await this.clock.applyHead(crdtMeta.head, this.clock.head)
|
2694
|
+
async allDocs() {
|
2695
|
+
await this.ready();
|
2696
|
+
const result = [];
|
2697
|
+
for await (const entry of getAllEntries(this.blockstore, this.clock.head, this.logger)) {
|
2698
|
+
result.push(entry);
|
2699
|
+
}
|
2700
|
+
return { result, head: this.clock.head };
|
2701
|
+
}
|
2702
|
+
async vis() {
|
2703
|
+
await this.ready();
|
2704
|
+
const txt = [];
|
2705
|
+
for await (const line of clockVis(this.blockstore, this.clock.head)) {
|
2706
|
+
txt.push(line);
|
2707
|
+
}
|
2708
|
+
return txt.join("\n");
|
2709
|
+
}
|
2710
|
+
async getBlock(cidString) {
|
2711
|
+
await this.ready();
|
2712
|
+
return await getBlock(this.blockstore, cidString);
|
2713
|
+
}
|
2714
|
+
async get(key) {
|
2715
|
+
await this.ready();
|
2716
|
+
const result = await getValueFromCrdt(this.blockstore, this.clock.head, key, this.logger);
|
2717
|
+
if (result.del) return void 0;
|
2718
|
+
return result;
|
2719
|
+
}
|
2720
|
+
async changes(since = [], opts = {}) {
|
2721
|
+
await this.ready();
|
2722
|
+
return await clockChangesSince(this.blockstore, this.clock.head, since, opts, this.logger);
|
2723
|
+
}
|
2724
|
+
async compact() {
|
2725
|
+
const blocks = this.blockstore;
|
2726
|
+
return await blocks.compact();
|
2727
|
+
}
|
2728
|
+
};
|
2729
|
+
|
2730
|
+
// src/database.ts
|
2731
|
+
var Database = class {
|
2732
|
+
constructor(name, opts) {
|
2733
|
+
this.opts = {};
|
2734
|
+
this._listening = false;
|
2735
|
+
this._listeners = /* @__PURE__ */ new Set();
|
2736
|
+
this._noupdate_listeners = /* @__PURE__ */ new Set();
|
2737
|
+
this._ready = new ResolveOnce5();
|
2738
|
+
this.name = name;
|
2739
|
+
this.opts = opts || this.opts;
|
2740
|
+
this.logger = ensureLogger(this.opts, "Database");
|
2741
|
+
this._crdt = new CRDT(name, this.opts);
|
2742
|
+
this.blockstore = this._crdt.blockstore;
|
2743
|
+
this._writeQueue = writeQueue(async (updates) => {
|
2744
|
+
return await this._crdt.bulk(updates);
|
2745
|
+
});
|
2746
|
+
this._crdt.clock.onTock(() => {
|
2747
|
+
this._no_update_notify();
|
2748
|
+
});
|
2749
|
+
}
|
2750
|
+
static {
|
2751
|
+
this.databases = /* @__PURE__ */ new Map();
|
2752
|
+
}
|
2753
|
+
async close() {
|
2754
|
+
await this.ready();
|
2755
|
+
await this._crdt.close();
|
2756
|
+
await this.blockstore.close();
|
2757
|
+
}
|
2758
|
+
async destroy() {
|
2759
|
+
await this.ready();
|
2760
|
+
await this._crdt.destroy();
|
2761
|
+
await this.blockstore.destroy();
|
2762
|
+
}
|
2763
|
+
async ready() {
|
2764
|
+
return this._ready.once(async () => {
|
2765
|
+
await SysContainer.start();
|
2766
|
+
await this._crdt.ready();
|
2767
|
+
await this.blockstore.ready();
|
2768
|
+
});
|
2769
|
+
}
|
2770
|
+
async get(id) {
|
2771
|
+
this.logger.Debug().Str("id", id).Msg("get-pre-ready");
|
2772
|
+
await this.ready();
|
2773
|
+
this.logger.Debug().Str("id", id).Msg("get-post-ready");
|
2774
|
+
const got = await this._crdt.get(id).catch((e) => {
|
2775
|
+
throw new NotFoundError(`Not found: ${id} - ${e.message}`);
|
2776
|
+
});
|
2777
|
+
if (!got) throw new NotFoundError(`Not found: ${id}`);
|
2778
|
+
const { doc } = got;
|
2779
|
+
return { ...doc, _id: id };
|
2780
|
+
}
|
2781
|
+
async put(doc) {
|
2782
|
+
this.logger.Debug().Str("id", doc._id).Msg("put-pre-ready");
|
2783
|
+
await this.ready();
|
2784
|
+
this.logger.Debug().Str("id", doc._id).Msg("put-post-ready");
|
2785
|
+
const { _id, ...value } = doc;
|
2786
|
+
const docId = _id || uuidv7();
|
2787
|
+
const result = await this._writeQueue.push({
|
2788
|
+
id: docId,
|
2789
|
+
value: {
|
2790
|
+
...value,
|
2791
|
+
_id: docId
|
2792
|
+
}
|
2793
|
+
});
|
2794
|
+
return { id: docId, clock: result?.head };
|
2795
|
+
}
|
2796
|
+
async del(id) {
|
2797
|
+
await this.ready();
|
2798
|
+
const result = await this._writeQueue.push({ id, del: true });
|
2799
|
+
return { id, clock: result?.head };
|
2800
|
+
}
|
2801
|
+
async changes(since = [], opts = {}) {
|
2802
|
+
await this.ready();
|
2803
|
+
const { result, head } = await this._crdt.changes(since, opts);
|
2804
|
+
const rows = result.map(({ id: key, value, del, clock }) => ({
|
2805
|
+
key,
|
2806
|
+
value: del ? { _id: key, _deleted: true } : { _id: key, ...value },
|
2807
|
+
clock
|
2808
|
+
}));
|
2809
|
+
return { rows, clock: head };
|
2810
|
+
}
|
2811
|
+
async allDocs() {
|
2812
|
+
await this.ready();
|
2813
|
+
const { result, head } = await this._crdt.allDocs();
|
2814
|
+
const rows = result.map(({ id: key, value, del }) => ({
|
2815
|
+
key,
|
2816
|
+
value: del ? { _id: key, _deleted: true } : { _id: key, ...value }
|
2817
|
+
}));
|
2818
|
+
return { rows, clock: head };
|
2819
|
+
}
|
2820
|
+
async allDocuments() {
|
2821
|
+
return this.allDocs();
|
2822
|
+
}
|
2823
|
+
subscribe(listener, updates) {
|
2824
|
+
if (updates) {
|
2825
|
+
if (!this._listening) {
|
2826
|
+
this._listening = true;
|
2827
|
+
this._crdt.clock.onTick((updates2) => {
|
2828
|
+
void this._notify(updates2);
|
2829
|
+
});
|
2830
|
+
}
|
2831
|
+
this._listeners.add(listener);
|
2832
|
+
return () => {
|
2833
|
+
this._listeners.delete(listener);
|
2834
|
+
};
|
2835
|
+
} else {
|
2836
|
+
this._noupdate_listeners.add(listener);
|
2837
|
+
return () => {
|
2838
|
+
this._noupdate_listeners.delete(listener);
|
2839
|
+
};
|
2840
|
+
}
|
2841
|
+
}
|
2842
|
+
// todo if we add this onto dbs in fireproof.ts then we can make index.ts a separate package
|
2843
|
+
async query(field, opts = {}) {
|
2844
|
+
await this.ready();
|
2845
|
+
const _crdt = this._crdt;
|
2846
|
+
const idx = typeof field === "string" ? index({ _crdt }, field) : index({ _crdt }, makeName(field.toString()), field);
|
2847
|
+
return await idx.query(opts);
|
2848
|
+
}
|
2849
|
+
async compact() {
|
2850
|
+
await this.ready();
|
2851
|
+
await this._crdt.compact();
|
2852
|
+
}
|
2853
|
+
async _notify(updates) {
|
2854
|
+
await this.ready();
|
2855
|
+
if (this._listeners.size) {
|
2856
|
+
const docs = updates.map(({ id, value }) => ({ ...value, _id: id }));
|
2857
|
+
for (const listener of this._listeners) {
|
2858
|
+
await (async () => await listener(docs))().catch((e) => {
|
2859
|
+
this.logger.Error().Err(e).Msg("subscriber error");
|
2860
|
+
});
|
2861
|
+
}
|
2862
|
+
}
|
2863
|
+
}
|
2864
|
+
async _no_update_notify() {
|
2865
|
+
await this.ready();
|
2866
|
+
if (this._noupdate_listeners.size) {
|
2867
|
+
for (const listener of this._noupdate_listeners) {
|
2868
|
+
await (async () => await listener([]))().catch((e) => {
|
2869
|
+
this.logger.Error().Err(e).Msg("subscriber error");
|
2870
|
+
});
|
2871
|
+
}
|
2872
|
+
}
|
2873
|
+
}
|
2874
|
+
};
|
2875
|
+
function toSortedArray(set) {
|
2876
|
+
if (!set) return [];
|
2877
|
+
return Object.entries(set).sort(([a], [b]) => a.localeCompare(b)).map(([k, v]) => ({ [k]: v }));
|
2878
|
+
}
|
2879
|
+
function fireproof(name, opts) {
|
2880
|
+
const key = JSON.stringify(
|
2881
|
+
toSortedArray({
|
2882
|
+
name,
|
2883
|
+
stores: toSortedArray(opts?.store?.stores),
|
2884
|
+
makeMetaStore: !!opts?.store?.makeMetaStore,
|
2885
|
+
makeDataStore: !!opts?.store?.makeDataStore,
|
2886
|
+
makeRemoteWAL: !!opts?.store?.makeRemoteWAL,
|
2887
|
+
encodeFile: !!opts?.store?.encodeFile,
|
2888
|
+
decodeFile: !!opts?.store?.decodeFile
|
2889
|
+
})
|
2890
|
+
);
|
2891
|
+
let db = Database.databases.get(key);
|
2892
|
+
if (!db) {
|
2893
|
+
db = new Database(name, opts);
|
2894
|
+
Database.databases.set(key, db);
|
2895
|
+
}
|
2896
|
+
return db;
|
2897
|
+
}
|
2898
|
+
function makeName(fnString) {
|
2899
|
+
const regex = /\(([^,()]+,\s*[^,()]+|\[[^\]]+\],\s*[^,()]+)\)/g;
|
2900
|
+
let found = null;
|
2901
|
+
const matches = Array.from(fnString.matchAll(regex), (match) => match[1].trim());
|
2902
|
+
if (matches.length === 0) {
|
2903
|
+
found = /=>\s*(.*)/.exec(fnString);
|
2904
|
+
}
|
2905
|
+
if (!found) {
|
2906
|
+
return fnString;
|
2907
|
+
} else {
|
2908
|
+
return found[1];
|
2909
|
+
}
|
2910
|
+
}
|
2911
|
+
|
2912
|
+
// src/version.ts
|
2913
|
+
var PACKAGE_VERSION = Object.keys({
|
2914
|
+
"0.0.0-dev": "xxxx"
|
2915
|
+
})[0];
|
2916
|
+
export {
|
2917
|
+
CRDT,
|
2918
|
+
Database,
|
2919
|
+
Index,
|
2920
|
+
PACKAGE_VERSION,
|
2921
|
+
blockstore_exports as blockstore,
|
2922
|
+
blockstore_exports as bs,
|
2923
|
+
ensureLogger,
|
2924
|
+
exception2Result,
|
2925
|
+
exceptionWrapper,
|
2926
|
+
falsyToUndef,
|
2927
|
+
fireproof,
|
2928
|
+
getKey,
|
2929
|
+
getName,
|
2930
|
+
getStore,
|
2931
|
+
index,
|
2932
|
+
isFalsy,
|
2933
|
+
runtime_exports as rt,
|
2934
|
+
runtime_exports as runtime,
|
2935
|
+
throwFalsy
|
2936
|
+
};
|
2937
|
+
//# sourceMappingURL=index.js.map
|