@fireproof/core-base 0.0.0-smoke-1b31059-1752074105

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. package/LICENSE.md +232 -0
  2. package/apply-head-queue.d.ts +17 -0
  3. package/apply-head-queue.js +47 -0
  4. package/apply-head-queue.js.map +1 -0
  5. package/apply-head-queue.ts +72 -0
  6. package/bundle-not-impl.d.ts +1 -0
  7. package/bundle-not-impl.js +4 -0
  8. package/bundle-not-impl.js.map +1 -0
  9. package/bundle-not-impl.ts +4 -0
  10. package/crdt-clock.d.ts +25 -0
  11. package/crdt-clock.js +138 -0
  12. package/crdt-clock.js.map +1 -0
  13. package/crdt-clock.ts +192 -0
  14. package/crdt-helpers.d.ts +18 -0
  15. package/crdt-helpers.js +331 -0
  16. package/crdt-helpers.js.map +1 -0
  17. package/crdt-helpers.ts +484 -0
  18. package/crdt.d.ts +40 -0
  19. package/crdt.js +172 -0
  20. package/crdt.js.map +1 -0
  21. package/crdt.ts +268 -0
  22. package/database.d.ts +32 -0
  23. package/database.js +136 -0
  24. package/database.js.map +1 -0
  25. package/database.ts +200 -0
  26. package/index.d.ts +6 -0
  27. package/index.js +7 -0
  28. package/index.js.map +1 -0
  29. package/index.ts +9 -0
  30. package/indexer-helpers.d.ts +25 -0
  31. package/indexer-helpers.js +155 -0
  32. package/indexer-helpers.js.map +1 -0
  33. package/indexer-helpers.ts +263 -0
  34. package/indexer.d.ts +22 -0
  35. package/indexer.js +246 -0
  36. package/indexer.js.map +1 -0
  37. package/indexer.ts +360 -0
  38. package/ledger.d.ts +55 -0
  39. package/ledger.js +245 -0
  40. package/ledger.js.map +1 -0
  41. package/ledger.ts +344 -0
  42. package/package.json +54 -0
  43. package/tsconfig.json +18 -0
  44. package/version.d.ts +1 -0
  45. package/version.js +4 -0
  46. package/version.js.map +1 -0
  47. package/version.ts +3 -0
  48. package/write-queue.d.ts +4 -0
  49. package/write-queue.js +69 -0
  50. package/write-queue.js.map +1 -0
  51. package/write-queue.ts +93 -0
@@ -0,0 +1,484 @@
1
+ import { asyncBlockDecode } from "@fireproof/core-runtime";
2
+ import { parse } from "multiformats/link";
3
+ import { Block } from "multiformats/block";
4
+ import { sha256 as hasher } from "multiformats/hashes/sha2";
5
+ import * as codec from "@ipld/dag-cbor";
6
+ import { put, get, entries, root } from "@web3-storage/pail/crdt";
7
+ import {
8
+ EventBlockView,
9
+ EventLink,
10
+ Operation,
11
+ PutOperation,
12
+ Result,
13
+ BlockFetcher as PailBlockFetcher,
14
+ } from "@web3-storage/pail/crdt/api";
15
+ import { EventFetcher, vis } from "@web3-storage/pail/clock";
16
+ import * as Batch from "@web3-storage/pail/crdt/batch";
17
+ import { BlockFetcher, TransactionMeta, AnyLink, StoreRuntime, CompactFetcher } from "@fireproof/core-types-blockstore";
18
+ import {
19
+ type EncryptedBlockstore,
20
+ CarTransactionImpl,
21
+ anyBlock2FPBlock,
22
+ doc2FPBlock,
23
+ fileBlock2FPBlock,
24
+ } from "@fireproof/core-blockstore";
25
+ import {
26
+ type IndexKeyType,
27
+ type DocUpdate,
28
+ type ClockHead,
29
+ type DocValue,
30
+ type CRDTMeta,
31
+ type ChangesOptions,
32
+ type DocFileMeta,
33
+ type DocFiles,
34
+ type DocSet,
35
+ type DocWithId,
36
+ type DocTypes,
37
+ throwFalsy,
38
+ CarTransaction,
39
+ BaseBlockstore,
40
+ PARAM,
41
+ NotFoundError,
42
+ } from "@fireproof/core-types-base";
43
+ import { Logger } from "@adviser/cement";
44
+ import { Link, Version } from "multiformats";
45
+
46
+ // eslint-disable-next-line @typescript-eslint/no-unused-vars
47
+ function time(tag: string) {
48
+ // console.time(tag)
49
+ }
50
+
51
+ // eslint-disable-next-line @typescript-eslint/no-unused-vars
52
+ function timeEnd(tag: string) {
53
+ // console.timeEnd(tag)
54
+ }
55
+
56
+ function toString<K extends IndexKeyType>(key: K, logger: Logger): string {
57
+ switch (typeof key) {
58
+ case "string":
59
+ case "number":
60
+ return key.toString();
61
+ default:
62
+ throw logger.Error().Msg("Invalid key type").AsError();
63
+ }
64
+ }
65
+
66
+ export function toPailFetcher(tblocks: BlockFetcher): PailBlockFetcher {
67
+ return {
68
+ get: async <T = unknown, C extends number = number, A extends number = number, V extends Version = 1>(
69
+ link: Link<T, C, A, V>,
70
+ ) => {
71
+ const block = await tblocks.get(link);
72
+ return block
73
+ ? ({
74
+ cid: block.cid,
75
+ bytes: block.bytes,
76
+ } as Block<T, C, A, V>)
77
+ : undefined;
78
+ },
79
+ };
80
+ }
81
+
82
+ export function sanitizeDocumentFields<T>(obj: T): T {
83
+ if (Array.isArray(obj)) {
84
+ return obj.map((item: unknown) => {
85
+ if (typeof item === "object" && item !== null) {
86
+ return sanitizeDocumentFields(item);
87
+ }
88
+ return item;
89
+ }) as T;
90
+ } else if (typeof obj === "object" && obj !== null) {
91
+ // Special case for Date objects - convert to ISO string
92
+ if (obj instanceof Date) {
93
+ return obj.toISOString() as unknown as T;
94
+ }
95
+
96
+ const typedObj = obj as Record<string, unknown>;
97
+ const result: Record<string, unknown> = {};
98
+ for (const key in typedObj) {
99
+ if (Object.hasOwnProperty.call(typedObj, key)) {
100
+ const value = typedObj[key];
101
+ if (value === null || (!Number.isNaN(value) && value !== undefined)) {
102
+ if (typeof value === "object" && !key.startsWith("_")) {
103
+ // Handle Date objects in properties
104
+ if (value instanceof Date) {
105
+ result[key] = (value as Date).toISOString();
106
+ } else {
107
+ const sanitized = sanitizeDocumentFields(value);
108
+ result[key] = sanitized;
109
+ }
110
+ } else {
111
+ result[key] = value;
112
+ }
113
+ }
114
+ }
115
+ }
116
+ return result as T;
117
+ }
118
+ return obj;
119
+ }
120
+
121
+ export async function applyBulkUpdateToCrdt<T extends DocTypes>(
122
+ store: StoreRuntime,
123
+ tblocks: CarTransaction,
124
+ head: ClockHead,
125
+ updates: DocUpdate<T>[],
126
+ logger: Logger,
127
+ ): Promise<CRDTMeta> {
128
+ let result: Result | null = null;
129
+ if (updates.length > 1) {
130
+ const batch = await Batch.create(toPailFetcher(tblocks), head);
131
+ for (const update of updates) {
132
+ const link = await writeDocContent(store, tblocks, update, logger);
133
+ await batch.put(toString(update.id, logger), link);
134
+ }
135
+ result = await batch.commit();
136
+ } else if (updates.length === 1) {
137
+ const link = await writeDocContent(store, tblocks, updates[0], logger);
138
+ result = await put(toPailFetcher(tblocks), head, toString(updates[0].id, logger), link);
139
+ }
140
+ if (!result) throw logger.Error().Uint64("updates.len", updates.length).Msg("Missing result").AsError();
141
+
142
+ if (result.event) {
143
+ for (const block of [
144
+ ...result.additions,
145
+ // ...result.removals,
146
+ result.event,
147
+ ]) {
148
+ tblocks.putSync(await anyBlock2FPBlock(block));
149
+ }
150
+ }
151
+ return { head: result.head }; // satisfies CRDTMeta;
152
+ }
153
+
154
+ // this whole thing can get pulled outside of the write queue
155
+ async function writeDocContent<T extends DocTypes>(
156
+ store: StoreRuntime,
157
+ blocks: CarTransaction,
158
+ update: DocUpdate<T>,
159
+ logger: Logger,
160
+ ): Promise<AnyLink> {
161
+ let value: Partial<DocValue<T>>;
162
+ if (update.del) {
163
+ value = { del: true };
164
+ } else {
165
+ if (!update.value) throw logger.Error().Msg("Missing value").AsError();
166
+ await processFiles(store, blocks, update.value, logger);
167
+ value = { doc: update.value as DocWithId<T> };
168
+ }
169
+ // const ref = await encode({ value, hasher, codec });
170
+ const block = await doc2FPBlock(value);
171
+ // if (ref.cid.toString() !== block.cid.toString()) {
172
+ // debugger
173
+ // }
174
+ blocks.putSync(block);
175
+ return block.cid;
176
+ }
177
+
178
+ async function processFiles<T extends DocTypes>(store: StoreRuntime, blocks: CarTransaction, doc: DocSet<T>, logger: Logger) {
179
+ if (doc._files) {
180
+ await processFileset(logger, store, blocks, doc._files);
181
+ }
182
+ if (doc._publicFiles) {
183
+ await processFileset(logger, store, blocks, doc._publicFiles /*, true*/);
184
+ }
185
+ }
186
+
187
+ async function processFileset(
188
+ logger: Logger,
189
+ store: StoreRuntime,
190
+ blocks: CarTransaction,
191
+ files: DocFiles /*, publicFiles = false */,
192
+ ) {
193
+ const dbBlockstore = blocks.parent as unknown as EncryptedBlockstore;
194
+ if (!dbBlockstore.loader) throw logger.Error().Msg("Missing loader, ledger name is required").AsError();
195
+ const t = new CarTransactionImpl(dbBlockstore); // maybe this should move to encrypted-blockstore
196
+ const didPut = [];
197
+ // let totalSize = 0
198
+ for (const filename in files) {
199
+ if (File === files[filename].constructor) {
200
+ const file = files[filename] as File;
201
+
202
+ // totalSize += file.size
203
+ const { cid, blocks: fileBlocks } = await store.encodeFile(file);
204
+ didPut.push(filename);
205
+ for (const block of fileBlocks) {
206
+ // console.log("processFileset", block.cid.toString())
207
+ t.putSync(await fileBlock2FPBlock(block));
208
+ }
209
+ files[filename] = { cid, type: file.type, size: file.size, lastModified: file.lastModified } as DocFileMeta;
210
+ } else {
211
+ const { cid, type, size, car, lastModified } = files[filename] as DocFileMeta;
212
+ if (cid && type && size && car) {
213
+ files[filename] = { cid, type, size, car, lastModified };
214
+ }
215
+ }
216
+ }
217
+
218
+ if (didPut.length) {
219
+ const car = await dbBlockstore.loader.commitFiles(
220
+ t,
221
+ { files } as unknown as TransactionMeta /* {
222
+ public: publicFiles,
223
+ } */,
224
+ );
225
+ if (car) {
226
+ for (const name of didPut) {
227
+ files[name] = { car, ...files[name] } as DocFileMeta;
228
+ }
229
+ }
230
+ }
231
+ }
232
+
233
+ export async function getValueFromCrdt<T extends DocTypes>(
234
+ blocks: BaseBlockstore,
235
+ head: ClockHead,
236
+ key: string,
237
+ logger: Logger,
238
+ ): Promise<DocValue<T>> {
239
+ if (!head.length) throw logger.Debug().Msg("Getting from an empty ledger").AsError();
240
+ // console.log("getValueFromCrdt-1", head, key)
241
+ const link = await get(toPailFetcher(blocks), head, key);
242
+ // console.log("getValueFromCrdt-2", key)
243
+ if (!link) {
244
+ // Use NotFoundError instead of logging an error
245
+ throw new NotFoundError(`Not found: ${key}`);
246
+ }
247
+ const ret = await getValueFromLink<T>(blocks, link, logger);
248
+ // console.log("getValueFromCrdt-3", key)
249
+ return ret;
250
+ }
251
+
252
+ export function readFiles<T extends DocTypes>(blocks: BaseBlockstore, { doc }: Partial<DocValue<T>>) {
253
+ if (!doc) return;
254
+ if (doc._files) {
255
+ readFileset(blocks as EncryptedBlockstore, doc._files);
256
+ }
257
+ if (doc._publicFiles) {
258
+ readFileset(blocks as EncryptedBlockstore, doc._publicFiles, true);
259
+ }
260
+ }
261
+
262
+ function readFileset(blocks: EncryptedBlockstore, files: DocFiles, isPublic = false) {
263
+ for (const filename in files) {
264
+ const fileMeta = files[filename] as DocFileMeta;
265
+ if (fileMeta.cid) {
266
+ if (isPublic) {
267
+ fileMeta.url = `https://${fileMeta.cid.toString()}.ipfs.w3s.link/`;
268
+ }
269
+ if (fileMeta.car) {
270
+ fileMeta.file = async () => {
271
+ const result = await blocks.ebOpts.storeRuntime.decodeFile(
272
+ {
273
+ get: async (cid: AnyLink) => {
274
+ return await blocks.getFile(throwFalsy(fileMeta.car), cid);
275
+ },
276
+ },
277
+ fileMeta.cid,
278
+ fileMeta,
279
+ );
280
+ if (result.isErr()) {
281
+ throw blocks.logger.Error().Any("error", result.Err()).Any("cid", fileMeta.cid).Msg("Error decoding file").AsError();
282
+ }
283
+
284
+ return result.unwrap();
285
+ };
286
+ }
287
+ }
288
+ files[filename] = fileMeta;
289
+ }
290
+ }
291
+
292
+ async function getValueFromLink<T extends DocTypes>(blocks: BlockFetcher, link: AnyLink, logger: Logger): Promise<DocValue<T>> {
293
+ const block = await blocks.get(link);
294
+ if (!block) throw logger.Error().Str("link", link.toString()).Msg(`Missing linked block`).AsError();
295
+ const { value } = (await asyncBlockDecode({ bytes: block.bytes, hasher, codec })) as { value: DocValue<T> };
296
+ const cvalue = {
297
+ ...value,
298
+ cid: link,
299
+ };
300
+ readFiles(blocks as EncryptedBlockstore, cvalue);
301
+ return cvalue;
302
+ }
303
+
304
+ class DirtyEventFetcher<T> extends EventFetcher<T> {
305
+ readonly logger: Logger;
306
+ constructor(logger: Logger, blocks: BlockFetcher) {
307
+ super(toPailFetcher(blocks));
308
+ this.logger = logger;
309
+ }
310
+ async get(link: EventLink<T>): Promise<EventBlockView<T>> {
311
+ try {
312
+ return await super.get(link);
313
+ } catch (e) {
314
+ this.logger.Error().Ref("link", link.toString()).Err(e).Msg("Missing event");
315
+ return { value: undefined } as unknown as EventBlockView<T>;
316
+ }
317
+ }
318
+ }
319
+
320
+ export async function clockChangesSince<T extends DocTypes>(
321
+ blocks: BlockFetcher,
322
+ head: ClockHead,
323
+ since: ClockHead,
324
+ opts: ChangesOptions,
325
+ logger: Logger,
326
+ ): Promise<{ result: DocUpdate<T>[]; head: ClockHead }> {
327
+ const eventsFetcher = (
328
+ opts.dirty ? new DirtyEventFetcher<Operation>(logger, blocks) : new EventFetcher<Operation>(toPailFetcher(blocks))
329
+ ) as EventFetcher<Operation>;
330
+ const keys = new Set<string>();
331
+ const updates = await gatherUpdates<T>(
332
+ blocks,
333
+ eventsFetcher,
334
+ head,
335
+ since,
336
+ [],
337
+ keys,
338
+ new Set<string>(),
339
+ opts.limit || Infinity,
340
+ logger,
341
+ );
342
+ return { result: updates.reverse(), head };
343
+ }
344
+
345
+ async function gatherUpdates<T extends DocTypes>(
346
+ blocks: BlockFetcher,
347
+ eventsFetcher: EventFetcher<Operation>,
348
+ head: ClockHead,
349
+ since: ClockHead,
350
+ updates: DocUpdate<T>[] = [],
351
+ keys: Set<string>,
352
+ didLinks: Set<string>,
353
+ limit: number,
354
+ logger: Logger,
355
+ ): Promise<DocUpdate<T>[]> {
356
+ if (limit <= 0) return updates;
357
+ // if (Math.random() < 0.001) console.log('gatherUpdates', head.length, since.length, updates.length)
358
+ const sHead = head.map((l) => l.toString());
359
+ for (const link of since) {
360
+ if (sHead.includes(link.toString())) {
361
+ return updates;
362
+ }
363
+ }
364
+ for (const link of head) {
365
+ if (didLinks.has(link.toString())) continue;
366
+ didLinks.add(link.toString());
367
+ const { value: event } = await eventsFetcher.get(link);
368
+ if (!event) continue;
369
+ const { type } = event.data;
370
+ let ops = [] as PutOperation[];
371
+ if (type === "batch") {
372
+ ops = event.data.ops as PutOperation[];
373
+ } else if (type === "put") {
374
+ ops = [event.data] as PutOperation[];
375
+ }
376
+ for (let i = ops.length - 1; i >= 0; i--) {
377
+ const { key, value } = ops[i];
378
+ if (!keys.has(key)) {
379
+ // todo option to see all updates
380
+ const docValue = await getValueFromLink<T>(blocks, value, logger);
381
+ if (key === PARAM.GENESIS_CID) {
382
+ continue;
383
+ }
384
+ updates.push({ id: key, value: docValue.doc, del: docValue.del, clock: link });
385
+ limit--;
386
+ keys.add(key);
387
+ }
388
+ }
389
+ if (event.parents) {
390
+ updates = await gatherUpdates(blocks, eventsFetcher, event.parents, since, updates, keys, didLinks, limit, logger);
391
+ }
392
+ }
393
+ return updates;
394
+ }
395
+
396
+ export async function* getAllEntries<T extends DocTypes>(blocks: BlockFetcher, head: ClockHead, logger: Logger) {
397
+ // return entries(blocks, head)
398
+ for await (const [key, link] of entries(toPailFetcher(blocks), head)) {
399
+ // console.log("getAllEntries", key, link);
400
+ if (key !== PARAM.GENESIS_CID) {
401
+ const docValue = await getValueFromLink(blocks, link, logger);
402
+ yield { id: key, value: docValue.doc, del: docValue.del } as DocUpdate<T>;
403
+ }
404
+ }
405
+ }
406
+
407
+ export async function* clockVis(blocks: BlockFetcher, head: ClockHead) {
408
+ for await (const line of vis(toPailFetcher(blocks), head)) {
409
+ yield line;
410
+ }
411
+ }
412
+
413
+ let isCompacting = false;
414
+ export async function doCompact(blockLog: CompactFetcher, head: ClockHead, logger: Logger) {
415
+ if (isCompacting) {
416
+ // console.log('already compacting')
417
+ return;
418
+ }
419
+ isCompacting = true;
420
+
421
+ time("compact head");
422
+ for (const cid of head) {
423
+ const bl = await blockLog.get(cid);
424
+ if (!bl) throw logger.Error().Ref("cid", cid).Msg("Missing head block").AsError();
425
+ }
426
+ timeEnd("compact head");
427
+
428
+ // for await (const blk of blocks.entries()) {
429
+ // const bl = await blockLog.get(blk.cid)
430
+ // if (!bl) throw new Error('Missing tblock: ' + blk.cid.toString())
431
+ // }
432
+
433
+ // todo maybe remove
434
+ // for await (const blk of blocks.loader!.entries()) {
435
+ // const bl = await blockLog.get(blk.cid)
436
+ // if (!bl) throw new Error('Missing db block: ' + blk.cid.toString())
437
+ // }
438
+
439
+ time("compact all entries");
440
+ // eslint-disable-next-line @typescript-eslint/no-unused-vars
441
+ for await (const _entry of getAllEntries(blockLog, head, logger)) {
442
+ // result.push(entry)
443
+ // void 1;
444
+ // continue;
445
+ }
446
+ timeEnd("compact all entries");
447
+
448
+ // time("compact crdt entries")
449
+ // for await (const [, link] of entries(blockLog, head)) {
450
+ // const bl = await blockLog.get(link)
451
+ // if (!bl) throw new Error('Missing entry block: ' + link.toString())
452
+ // }
453
+ // timeEnd("compact crdt entries")
454
+
455
+ time("compact clock vis");
456
+ // eslint-disable-next-line @typescript-eslint/no-unused-vars
457
+ for await (const _line of vis(toPailFetcher(blockLog), head)) {
458
+ void 1;
459
+ }
460
+ timeEnd("compact clock vis");
461
+
462
+ time("compact root");
463
+ const result = await root(toPailFetcher(blockLog), head);
464
+ timeEnd("compact root");
465
+
466
+ time("compact root blocks");
467
+ for (const block of [...result.additions, ...result.removals]) {
468
+ blockLog.loggedBlocks.putSync(await anyBlock2FPBlock(block));
469
+ }
470
+ timeEnd("compact root blocks");
471
+
472
+ time("compact changes");
473
+ await clockChangesSince(blockLog, head, [], {}, logger);
474
+ timeEnd("compact changes");
475
+
476
+ isCompacting = false;
477
+ }
478
+
479
+ export async function getBlock(blocks: BlockFetcher, cidString: string) {
480
+ const block = await blocks.get(parse(cidString));
481
+ if (!block) throw new Error(`Missing block ${cidString}`);
482
+ const { cid, value } = await asyncBlockDecode({ bytes: block.bytes, codec, hasher });
483
+ return new Block({ cid, value, bytes: block.bytes });
484
+ }
package/crdt.d.ts ADDED
@@ -0,0 +1,40 @@
1
+ import type { Block } from "multiformats";
2
+ import { Logger, ResolveOnce } from "@adviser/cement";
3
+ import { type DocUpdate, type CRDTMeta, type ClockHead, type ChangesOptions, type DocValue, type IndexKeyType, type Falsy, type SuperThis, type LedgerOpts, type BaseBlockstore, type CRDT, type CRDTClock, type DocTypes, Ledger } from "@fireproof/core-types-base";
4
+ import { type Index } from "./indexer.js";
5
+ export type CRDTOpts = Omit<LedgerOpts, "storeUrls"> & {
6
+ readonly storeUrls: {
7
+ readonly data: LedgerOpts["storeUrls"]["data"];
8
+ readonly idx?: LedgerOpts["storeUrls"]["idx"];
9
+ };
10
+ };
11
+ export declare class CRDTImpl implements CRDT {
12
+ readonly opts: CRDTOpts;
13
+ readonly blockstore: BaseBlockstore;
14
+ readonly indexBlockstore?: BaseBlockstore;
15
+ readonly indexers: Map<string, Index<{}, IndexKeyType, {}>>;
16
+ readonly clock: CRDTClock;
17
+ readonly logger: Logger;
18
+ readonly sthis: SuperThis;
19
+ readonly crdt: CRDT;
20
+ readonly ledgerParent?: Ledger;
21
+ constructor(sthis: SuperThis, opts: CRDTOpts, parent?: Ledger);
22
+ bulk<T extends DocTypes>(updates: DocUpdate<T>[]): Promise<CRDTMeta>;
23
+ _bulk<T extends DocTypes>(updates: DocUpdate<T>[]): Promise<CRDTMeta>;
24
+ readonly onceReady: ResolveOnce<void>;
25
+ ready(): Promise<void>;
26
+ close(): Promise<void>;
27
+ destroy(): Promise<void>;
28
+ allDocs<T extends DocTypes>(): Promise<{
29
+ result: DocUpdate<T>[];
30
+ head: ClockHead;
31
+ }>;
32
+ vis(): Promise<string>;
33
+ getBlock(cidString: string): Promise<Block>;
34
+ get(key: string): Promise<DocValue<DocTypes> | Falsy>;
35
+ changes<T extends DocTypes>(since?: ClockHead, opts?: ChangesOptions): Promise<{
36
+ result: DocUpdate<T>[];
37
+ head: ClockHead;
38
+ }>;
39
+ compact(): Promise<void>;
40
+ }
package/crdt.js ADDED
@@ -0,0 +1,172 @@
1
+ import { ResolveOnce } from "@adviser/cement";
2
+ import { EncryptedBlockstore, toStoreRuntime } from "@fireproof/core-blockstore";
3
+ import { clockChangesSince, applyBulkUpdateToCrdt, getValueFromCrdt, readFiles, getAllEntries, clockVis, getBlock, doCompact, sanitizeDocumentFields, } from "./crdt-helpers.js";
4
+ import { PARAM, } from "@fireproof/core-types-base";
5
+ import { index } from "./indexer.js";
6
+ import { ensureLogger } from "@fireproof/core-runtime";
7
+ import { CRDTClockImpl } from "./crdt-clock.js";
8
+ export class CRDTImpl {
9
+ opts;
10
+ blockstore;
11
+ indexBlockstore;
12
+ indexers = new Map();
13
+ clock;
14
+ logger;
15
+ sthis;
16
+ crdt;
17
+ ledgerParent;
18
+ constructor(sthis, opts, parent) {
19
+ this.sthis = sthis;
20
+ this.ledgerParent = parent;
21
+ this.crdt = this;
22
+ this.logger = ensureLogger(sthis, "CRDTImpl");
23
+ this.opts = opts;
24
+ this.blockstore = new EncryptedBlockstore(sthis, {
25
+ tracer: (event) => {
26
+ switch (event.event) {
27
+ case "idleFromCommitQueue":
28
+ opts.tracer({
29
+ event: "idleFromBlockstore",
30
+ blockstore: "data",
31
+ ledger: parent,
32
+ });
33
+ break;
34
+ case "busyFromCommitQueue":
35
+ opts.tracer({
36
+ event: "busyFromBlockstore",
37
+ blockstore: "data",
38
+ ledger: parent,
39
+ queueLen: event.queueLen,
40
+ });
41
+ break;
42
+ default:
43
+ return opts.tracer(event);
44
+ }
45
+ },
46
+ applyMeta: async (meta) => {
47
+ const crdtMeta = meta;
48
+ if (!crdtMeta.head)
49
+ throw this.logger.Error().Msg("missing head").AsError();
50
+ await this.clock.applyHead(crdtMeta.head, []);
51
+ },
52
+ compact: async (blocks) => {
53
+ await doCompact(blocks, this.clock.head, this.logger);
54
+ return { head: this.clock.head };
55
+ },
56
+ gatewayInterceptor: opts.gatewayInterceptor,
57
+ storeRuntime: toStoreRuntime(this.sthis, this.opts.storeEnDe),
58
+ storeUrls: this.opts.storeUrls.data,
59
+ keyBag: this.opts.keyBag,
60
+ meta: this.opts.meta,
61
+ }, this);
62
+ if (this.opts.storeUrls.idx) {
63
+ this.indexBlockstore = new EncryptedBlockstore(sthis, {
64
+ tracer: opts.tracer,
65
+ applyMeta: async (meta) => {
66
+ const idxCarMeta = meta;
67
+ if (!idxCarMeta.indexes)
68
+ throw this.logger.Error().Msg("missing indexes").AsError();
69
+ for (const [name, idx] of Object.entries(idxCarMeta.indexes)) {
70
+ index(this, name, undefined, idx);
71
+ }
72
+ },
73
+ gatewayInterceptor: opts.gatewayInterceptor,
74
+ storeRuntime: toStoreRuntime(this.sthis, this.opts.storeEnDe),
75
+ storeUrls: this.opts.storeUrls.idx,
76
+ keyBag: this.opts.keyBag,
77
+ }, this);
78
+ }
79
+ this.clock = new CRDTClockImpl(this.blockstore);
80
+ this.clock.onZoom(() => {
81
+ for (const idx of this.indexers.values()) {
82
+ idx._resetIndex();
83
+ }
84
+ });
85
+ }
86
+ async bulk(updates) {
87
+ await this.ready();
88
+ updates = updates.map((dupdate) => ({
89
+ ...dupdate,
90
+ value: sanitizeDocumentFields(dupdate.value),
91
+ }));
92
+ if (this.clock.head.length === 0) {
93
+ const value = { id: PARAM.GENESIS_CID, value: { _id: PARAM.GENESIS_CID } };
94
+ await this._bulk([value]);
95
+ }
96
+ return await this._bulk(updates);
97
+ }
98
+ async _bulk(updates) {
99
+ const prevHead = [...this.clock.head];
100
+ const done = await this.blockstore.transaction(async (blocks) => {
101
+ const { head } = await applyBulkUpdateToCrdt(this.blockstore.ebOpts.storeRuntime, blocks, this.clock.head, updates, this.logger);
102
+ updates = updates.map((dupdate) => {
103
+ readFiles(this.blockstore, { doc: dupdate.value });
104
+ return dupdate;
105
+ });
106
+ return { head };
107
+ });
108
+ await this.clock.applyHead(done.meta.head, prevHead, updates);
109
+ return done.meta;
110
+ }
111
+ onceReady = new ResolveOnce();
112
+ async ready() {
113
+ return this.onceReady.once(async () => {
114
+ try {
115
+ await Promise.all([
116
+ this.blockstore.ready(),
117
+ this.indexBlockstore ? this.indexBlockstore.ready() : Promise.resolve(),
118
+ this.clock.ready(),
119
+ ]);
120
+ }
121
+ catch (e) {
122
+ throw this.logger.Error().Err(e).Msg(`CRDT is not ready`).AsError();
123
+ }
124
+ });
125
+ }
126
+ async close() {
127
+ await Promise.all([
128
+ this.blockstore.close(),
129
+ this.indexBlockstore ? this.indexBlockstore.close() : Promise.resolve(),
130
+ this.clock.close(),
131
+ ]);
132
+ }
133
+ async destroy() {
134
+ await Promise.all([this.blockstore.destroy(), this.indexBlockstore ? this.indexBlockstore.destroy() : Promise.resolve()]);
135
+ }
136
+ async allDocs() {
137
+ await this.ready();
138
+ const result = [];
139
+ for await (const entry of getAllEntries(this.blockstore, this.clock.head, this.logger)) {
140
+ result.push(entry);
141
+ }
142
+ return { result, head: this.clock.head };
143
+ }
144
+ async vis() {
145
+ await this.ready();
146
+ const txt = [];
147
+ for await (const line of clockVis(this.blockstore, this.clock.head)) {
148
+ txt.push(line);
149
+ }
150
+ return txt.join("\n");
151
+ }
152
+ async getBlock(cidString) {
153
+ await this.ready();
154
+ return await getBlock(this.blockstore, cidString);
155
+ }
156
+ async get(key) {
157
+ await this.ready();
158
+ const result = await getValueFromCrdt(this.blockstore, this.clock.head, key, this.logger);
159
+ if (result.del)
160
+ return undefined;
161
+ return result;
162
+ }
163
+ async changes(since = [], opts = {}) {
164
+ await this.ready();
165
+ return await clockChangesSince(this.blockstore, this.clock.head, since, opts, this.logger);
166
+ }
167
+ async compact() {
168
+ const blocks = this.blockstore;
169
+ return await blocks.compact();
170
+ }
171
+ }
172
+ //# sourceMappingURL=crdt.js.map