@peerbit/log 1.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (92) hide show
  1. package/LICENSE +23 -0
  2. package/README.md +11 -0
  3. package/lib/esm/change.d.ts +5 -0
  4. package/lib/esm/change.js +2 -0
  5. package/lib/esm/change.js.map +1 -0
  6. package/lib/esm/clock.d.ts +87 -0
  7. package/lib/esm/clock.js +260 -0
  8. package/lib/esm/clock.js.map +1 -0
  9. package/lib/esm/difference.d.ts +1 -0
  10. package/lib/esm/difference.js +20 -0
  11. package/lib/esm/difference.js.map +1 -0
  12. package/lib/esm/encoding.d.ts +7 -0
  13. package/lib/esm/encoding.js +20 -0
  14. package/lib/esm/encoding.js.map +1 -0
  15. package/lib/esm/entry-index.d.ts +21 -0
  16. package/lib/esm/entry-index.js +63 -0
  17. package/lib/esm/entry-index.js.map +1 -0
  18. package/lib/esm/entry-with-refs.d.ts +5 -0
  19. package/lib/esm/entry-with-refs.js +2 -0
  20. package/lib/esm/entry-with-refs.js.map +1 -0
  21. package/lib/esm/entry.d.ts +179 -0
  22. package/lib/esm/entry.js +591 -0
  23. package/lib/esm/entry.js.map +1 -0
  24. package/lib/esm/find-uniques.d.ts +1 -0
  25. package/lib/esm/find-uniques.js +12 -0
  26. package/lib/esm/find-uniques.js.map +1 -0
  27. package/lib/esm/heads-cache.d.ts +64 -0
  28. package/lib/esm/heads-cache.js +317 -0
  29. package/lib/esm/heads-cache.js.map +1 -0
  30. package/lib/esm/heads.d.ts +63 -0
  31. package/lib/esm/heads.js +143 -0
  32. package/lib/esm/heads.js.map +1 -0
  33. package/lib/esm/hrtime.d.ts +5 -0
  34. package/lib/esm/hrtime.js +71 -0
  35. package/lib/esm/hrtime.js.map +1 -0
  36. package/lib/esm/index.d.ts +11 -0
  37. package/lib/esm/index.js +11 -0
  38. package/lib/esm/index.js.map +1 -0
  39. package/lib/esm/is-defined.d.ts +1 -0
  40. package/lib/esm/is-defined.js +2 -0
  41. package/lib/esm/is-defined.js.map +1 -0
  42. package/lib/esm/log-errors.d.ts +5 -0
  43. package/lib/esm/log-errors.js +6 -0
  44. package/lib/esm/log-errors.js.map +1 -0
  45. package/lib/esm/log-sorting.d.ts +44 -0
  46. package/lib/esm/log-sorting.js +86 -0
  47. package/lib/esm/log-sorting.js.map +1 -0
  48. package/lib/esm/log.d.ts +205 -0
  49. package/lib/esm/log.js +1004 -0
  50. package/lib/esm/log.js.map +1 -0
  51. package/lib/esm/logger.d.ts +2 -0
  52. package/lib/esm/logger.js +4 -0
  53. package/lib/esm/logger.js.map +1 -0
  54. package/lib/esm/package.json +3 -0
  55. package/lib/esm/snapshot.d.ts +22 -0
  56. package/lib/esm/snapshot.js +83 -0
  57. package/lib/esm/snapshot.js.map +1 -0
  58. package/lib/esm/trim.d.ts +49 -0
  59. package/lib/esm/trim.js +203 -0
  60. package/lib/esm/trim.js.map +1 -0
  61. package/lib/esm/types.d.ts +6 -0
  62. package/lib/esm/types.js +23 -0
  63. package/lib/esm/types.js.map +1 -0
  64. package/lib/esm/utils.d.ts +2 -0
  65. package/lib/esm/utils.js +3 -0
  66. package/lib/esm/utils.js.map +1 -0
  67. package/lib/esm/values.d.ts +33 -0
  68. package/lib/esm/values.js +141 -0
  69. package/lib/esm/values.js.map +1 -0
  70. package/package.json +79 -0
  71. package/src/change.ts +2 -0
  72. package/src/clock.ts +280 -0
  73. package/src/difference.ts +22 -0
  74. package/src/encoding.ts +27 -0
  75. package/src/entry-index.ts +78 -0
  76. package/src/entry-with-refs.ts +6 -0
  77. package/src/entry.ts +749 -0
  78. package/src/find-uniques.ts +14 -0
  79. package/src/heads-cache.ts +400 -0
  80. package/src/heads.ts +208 -0
  81. package/src/hrtime.ts +78 -0
  82. package/src/index.ts +11 -0
  83. package/src/is-defined.ts +1 -0
  84. package/src/log-errors.ts +9 -0
  85. package/src/log-sorting.ts +108 -0
  86. package/src/log.ts +1262 -0
  87. package/src/logger.ts +3 -0
  88. package/src/snapshot.ts +103 -0
  89. package/src/trim.ts +269 -0
  90. package/src/types.ts +12 -0
  91. package/src/utils.ts +2 -0
  92. package/src/values.ts +193 -0
package/src/log.ts ADDED
@@ -0,0 +1,1262 @@
1
+ import {
2
+ SignatureWithKey,
3
+ randomBytes,
4
+ sha256Base64Sync,
5
+ Identity,
6
+ Keychain,
7
+ X25519Keypair,
8
+ } from "@peerbit/crypto";
9
+ import { Cache } from "@peerbit/cache";
10
+ import { SimpleLevel } from "@peerbit/lazy-level";
11
+
12
+ import { EntryIndex } from "./entry-index.js";
13
+ import * as LogError from "./log-errors.js";
14
+ import * as Sorting from "./log-sorting.js";
15
+ import { isDefined } from "./is-defined.js";
16
+ import { findUniques } from "./find-uniques.js";
17
+ import {
18
+ EncryptionTemplateMaybeEncrypted,
19
+ Entry,
20
+ Payload,
21
+ CanAppend,
22
+ EntryType,
23
+ } from "./entry.js";
24
+ import {
25
+ HLC,
26
+ LamportClock as Clock,
27
+ LamportClock,
28
+ Timestamp,
29
+ } from "./clock.js";
30
+
31
+ import { field, fixedArray, variant } from "@dao-xyz/borsh";
32
+ import { Encoding, NO_ENCODING } from "./encoding.js";
33
+ import { CacheUpdateOptions, HeadsIndex } from "./heads.js";
34
+ import { EntryNode, Values } from "./values.js";
35
+ import { Trim, TrimOptions } from "./trim.js";
36
+ import { logger } from "./logger.js";
37
+ import { Change } from "./change.js";
38
+ import { EntryWithRefs } from "./entry-with-refs.js";
39
+ import { Blocks } from "@peerbit/blocks-interface";
40
+
41
+ const { LastWriteWins, NoZeroes } = Sorting;
42
+
43
+ export type LogEvents<T> = {
44
+ onChange?: (change: Change<T>) => void;
45
+ };
46
+
47
+ export type MemoryProperties = {
48
+ cache?: SimpleLevel;
49
+ };
50
+
51
+ export type LogProperties<T> = {
52
+ keychain?: Keychain;
53
+ encoding?: Encoding<T>;
54
+ clock?: LamportClock;
55
+ sortFn?: Sorting.ISortFunction;
56
+ trim?: TrimOptions;
57
+ canAppend?: CanAppend<T>;
58
+ };
59
+
60
+ export type LogOptions<T> = LogProperties<T> & LogEvents<T> & MemoryProperties;
61
+
62
+ const ENTRY_CACHE_MAX = 1000; // TODO as param
63
+
64
+ export type AppendOptions<T> = {
65
+ type?: EntryType;
66
+ gidSeed?: Uint8Array;
67
+ nexts?: Entry<any>[];
68
+ identity?: Identity;
69
+ signers?: ((
70
+ data: Uint8Array
71
+ ) => Promise<SignatureWithKey> | SignatureWithKey)[];
72
+ onGidsShadowed?: (gids: string[]) => void;
73
+ trim?: TrimOptions;
74
+ timestamp?: Timestamp;
75
+ encryption?: {
76
+ keypair: X25519Keypair;
77
+ reciever: EncryptionTemplateMaybeEncrypted;
78
+ };
79
+ };
80
+
81
+ @variant(0)
82
+ export class Log<T> {
83
+ @field({ type: fixedArray("u8", 32) })
84
+ private _id: Uint8Array;
85
+
86
+ private _sortFn: Sorting.ISortFunction;
87
+ private _storage: Blocks;
88
+ private _hlc: HLC;
89
+
90
+ // Identity
91
+ private _identity: Identity;
92
+
93
+ // Keeping track of entries
94
+ private _entryIndex: EntryIndex<T>;
95
+ private _headsIndex: HeadsIndex<T>;
96
+ private _values: Values<T>;
97
+
98
+ // Index of all next pointers in this log
99
+ private _nextsIndex: Map<string, Set<string>>;
100
+ private _keychain?: Keychain;
101
+ private _encoding: Encoding<T>;
102
+ private _trim: Trim<T>;
103
+ private _entryCache: Cache<Entry<T>>;
104
+
105
+ private _canAppend?: CanAppend<T>;
106
+ private _onChange?: (change: Change<T>) => void;
107
+ private _closed = true;
108
+ private _memory?: SimpleLevel;
109
+ private _joining: Map<string, Promise<any>>; // entry hashes that are currently joining into this log
110
+
111
+ constructor(properties?: { id?: Uint8Array }) {
112
+ this._id = properties?.id || randomBytes(32);
113
+ }
114
+
115
+ async open(store: Blocks, identity: Identity, options: LogOptions<T> = {}) {
116
+ if (!isDefined(store)) {
117
+ throw LogError.BlockStoreNotDefinedError();
118
+ }
119
+
120
+ if (!isDefined(identity)) {
121
+ throw new Error("Identity is required");
122
+ }
123
+
124
+ if (this.closed === false) {
125
+ throw new Error("Already open");
126
+ }
127
+
128
+ const { encoding, trim, keychain, cache } = options;
129
+ let { sortFn } = options;
130
+
131
+ if (!isDefined(sortFn)) {
132
+ sortFn = LastWriteWins;
133
+ }
134
+ sortFn = sortFn as Sorting.ISortFunction;
135
+
136
+ this._sortFn = NoZeroes(sortFn);
137
+ this._storage = store;
138
+ this._memory = cache;
139
+ if (this._memory && this._memory.status !== "open") {
140
+ await this._memory.open();
141
+ }
142
+
143
+ this._encoding = encoding || NO_ENCODING;
144
+ this._joining = new Map();
145
+
146
+ // Identity
147
+ this._identity = identity;
148
+
149
+ // encoder/decoder
150
+ this._keychain = keychain;
151
+
152
+ // Clock
153
+ this._hlc = new HLC();
154
+
155
+ this._nextsIndex = new Map();
156
+ const id = this.id;
157
+ if (!id) {
158
+ throw new Error("Id not set");
159
+ }
160
+ this._headsIndex = new HeadsIndex(id);
161
+ await this._headsIndex.init(this);
162
+ this._entryCache = new Cache({ max: ENTRY_CACHE_MAX });
163
+ this._entryIndex = new EntryIndex({
164
+ store: this._storage,
165
+ init: (e) => e.init(this),
166
+ cache: this._entryCache,
167
+ });
168
+ this._values = new Values(this._entryIndex, this._sortFn);
169
+ this._trim = new Trim(
170
+ {
171
+ deleteNode: async (node: EntryNode) => {
172
+ // TODO check if we have before delete?
173
+ const entry = await this.get(node.value.hash);
174
+ //f (!!entry)
175
+ const a = this.values.length;
176
+ if (entry) {
177
+ this.values.deleteNode(node);
178
+ await this.entryIndex.delete(node.value.hash);
179
+ await this.headsIndex.del(node.value);
180
+ this.nextsIndex.delete(node.value.hash);
181
+ await this.storage.rm(node.value.hash);
182
+ }
183
+ const b = this.values.length;
184
+ if (a === b) {
185
+ throw new Error(
186
+ "UNexpected: " +
187
+ this.values.length +
188
+ "_-- " +
189
+ this.entryIndex._index.size
190
+ );
191
+ }
192
+ return entry;
193
+ },
194
+ values: () => this.values,
195
+ },
196
+ trim
197
+ );
198
+
199
+ this._canAppend = async (entry) => {
200
+ if (options?.canAppend) {
201
+ if (!(await options.canAppend(entry))) {
202
+ return false;
203
+ }
204
+ }
205
+ return true;
206
+ };
207
+
208
+ this._onChange = options?.onChange;
209
+ this._closed = false;
210
+ }
211
+
212
+ private _idString: string | undefined;
213
+
214
+ get idString() {
215
+ if (!this.id) {
216
+ throw new Error("Id not set");
217
+ }
218
+ return this._idString || (this._idString = Log.createIdString(this.id));
219
+ }
220
+
221
+ public static createIdString(id: Uint8Array) {
222
+ return sha256Base64Sync(id);
223
+ }
224
+
225
+ get id() {
226
+ return this._id;
227
+ }
228
+ set id(id: Uint8Array) {
229
+ if (this.closed === false) {
230
+ throw new Error("Can not change id after open");
231
+ }
232
+ this._idString = undefined;
233
+ this._id = id;
234
+ }
235
+
236
+ /**
237
+ * Returns the length of the log.
238
+ */
239
+ get length() {
240
+ return this._values.length;
241
+ }
242
+
243
+ get values(): Values<T> {
244
+ return this._values;
245
+ }
246
+
247
+ /**
248
+ * Checks if a entry is part of the log
249
+ * @param {string} hash The hash of the entry
250
+ * @returns {boolean}
251
+ */
252
+
253
+ has(cid: string) {
254
+ return this._entryIndex._index.has(cid);
255
+ }
256
+ /**
257
+ * Get all entries sorted. Don't use this method anywhere where performance matters
258
+ */
259
+ toArray(): Promise<Entry<T>[]> {
260
+ // we call init, because the values might be unitialized
261
+ return this._values.toArray().then((arr) => arr.map((x) => x.init(this)));
262
+ }
263
+
264
+ /**
265
+ * Returns the head index
266
+ */
267
+ get headsIndex(): HeadsIndex<T> {
268
+ return this._headsIndex;
269
+ }
270
+
271
+ get memory(): SimpleLevel | undefined {
272
+ return this._memory;
273
+ }
274
+
275
+ /**
276
+ * Don't use this anywhere performance matters
277
+ */
278
+ async getHeads(): Promise<Entry<T>[]> {
279
+ const heads: Promise<Entry<T> | undefined>[] = new Array(
280
+ this.headsIndex.index.size
281
+ );
282
+ let i = 0;
283
+ for (const hash of this.headsIndex.index) {
284
+ heads[i++] = this._entryIndex.get(hash).then((x) => x?.init(this));
285
+ }
286
+ const resolved = await Promise.all(heads);
287
+ const defined = resolved.filter((x) => !!x);
288
+ if (defined.length !== resolved.length) {
289
+ logger.error("Failed to resolve all heads");
290
+ }
291
+ return defined as Entry<T>[];
292
+ }
293
+
294
+ /**
295
+ * Returns an array of Entry objects that reference entries which
296
+ * are not in the log currently.
297
+ * @returns {Array<Entry<T>>}
298
+ */
299
+ async getTails(): Promise<Entry<T>[]> {
300
+ return Log.findTails(await this.toArray());
301
+ }
302
+
303
+ /**
304
+ * Returns an array of hashes that are referenced by entries which
305
+ * are not in the log currently.
306
+ * @returns {Array<string>} Array of hashes
307
+ */
308
+ async getTailHashes(): Promise<string[]> {
309
+ return Log.findTailHashes(await this.toArray());
310
+ }
311
+
312
+ /**
313
+ * Get local HLC
314
+ */
315
+ get hlc(): HLC {
316
+ return this._hlc;
317
+ }
318
+
319
+ get identity(): Identity {
320
+ return this._identity;
321
+ }
322
+
323
+ get storage(): Blocks {
324
+ return this._storage;
325
+ }
326
+
327
+ get nextsIndex(): Map<string, Set<string>> {
328
+ return this._nextsIndex;
329
+ }
330
+
331
+ get entryIndex(): EntryIndex<T> {
332
+ return this._entryIndex;
333
+ }
334
+
335
+ get keychain() {
336
+ return this._keychain;
337
+ }
338
+
339
+ get encoding() {
340
+ return this._encoding;
341
+ }
342
+
343
+ get sortFn() {
344
+ return this._sortFn;
345
+ }
346
+
347
+ get closed() {
348
+ return this._closed;
349
+ }
350
+
351
+ /**
352
+ * Set the identity for the log
353
+ * @param {Identity} [identity] The identity to be set
354
+ */
355
+ setIdentity(identity: Identity) {
356
+ this._identity = identity;
357
+ }
358
+
359
+ /**
360
+ * Find an entry.
361
+ * @param {string} [hash] The hashes of the entry
362
+ */
363
+ get(
364
+ hash: string,
365
+ options?: { timeout?: number }
366
+ ): Promise<Entry<T> | undefined> {
367
+ return this._entryIndex.get(hash, options);
368
+ }
369
+
370
+ async traverse(
371
+ rootEntries: Entry<T>[],
372
+ amount = -1,
373
+ endHash?: string
374
+ ): Promise<{ [key: string]: Entry<T> }> {
375
+ // Sort the given given root entries and use as the starting stack
376
+ let stack: Entry<T>[] = rootEntries.sort(this._sortFn).reverse();
377
+
378
+ // Cache for checking if we've processed an entry already
379
+ let traversed: { [key: string]: boolean } = {};
380
+ // End result
381
+ const result: { [key: string]: Entry<T> } = {};
382
+ let count = 0;
383
+ // Named function for getting an entry from the log
384
+ const getEntry = (e: string) => this.get(e);
385
+
386
+ // Add an entry to the stack and traversed nodes index
387
+ const addToStack = (entry: Entry<T>) => {
388
+ // If we've already processed the Entry<T>, don't add it to the stack
389
+ if (!entry || traversed[entry.hash]) {
390
+ return;
391
+ }
392
+
393
+ // Add the entry in front of the stack and sort
394
+ stack = [entry, ...stack].sort(this._sortFn).reverse();
395
+ // Add to the cache of processed entries
396
+ traversed[entry.hash] = true;
397
+ };
398
+
399
+ const addEntry = (rootEntry: Entry<T>) => {
400
+ result[rootEntry.hash] = rootEntry;
401
+ traversed[rootEntry.hash] = true;
402
+ count++;
403
+ };
404
+
405
+ // Start traversal
406
+ // Process stack until it's empty (traversed the full log)
407
+ // or when we have the requested amount of entries
408
+ // If requested entry amount is -1, traverse all
409
+ while (stack.length > 0 && (count < amount || amount < 0)) {
410
+ // eslint-disable-line no-unmodified-loop-condition
411
+ // Get the next element from the stack
412
+ const entry = stack.shift();
413
+ if (!entry) {
414
+ throw new Error("Unexpected");
415
+ }
416
+ // Add to the result
417
+ addEntry(entry);
418
+ // If it is the specified end hash, break out of the while loop
419
+ if (endHash && endHash === entry.hash) break;
420
+
421
+ // Add entry's next references to the stack
422
+ const entries = (await Promise.all(entry.next.map(getEntry))).filter(
423
+ (x) => !!x
424
+ ) as Entry<any>[];
425
+ entries.forEach(addToStack);
426
+ }
427
+
428
+ stack = [];
429
+ traversed = {};
430
+ // End result
431
+ return result;
432
+ }
433
+
434
+ async getReferenceSamples(
435
+ from: Entry<T>,
436
+ options?: { pointerCount?: number; memoryLimit?: number }
437
+ ): Promise<Entry<T>[]> {
438
+ const hashes = new Set<string>();
439
+ const pointerCount = options?.pointerCount || 0;
440
+ const memoryLimit = options?.memoryLimit;
441
+ const maxDistance = Math.min(pointerCount, this._values.length);
442
+ if (maxDistance === 0) {
443
+ return [];
444
+ }
445
+ hashes.add(from.hash);
446
+ let memoryCounter = from._payload.byteLength;
447
+ if (from.next?.length > 0 && pointerCount >= 2) {
448
+ let next = new Set(from.next);
449
+ let prev = 2;
450
+ outer: for (let i = 2; i <= maxDistance - 1; i *= 2) {
451
+ for (let j = prev; j < i; j++) {
452
+ if (next.size === 0) {
453
+ break outer;
454
+ }
455
+ const nextNext = new Set<string>();
456
+ for (const n of next) {
457
+ const nentry = await this.get(n);
458
+ nentry?.next.forEach((n2) => {
459
+ nextNext.add(n2);
460
+ });
461
+ }
462
+ next = nextNext;
463
+ }
464
+
465
+ prev = i;
466
+ if (next) {
467
+ for (const n of next) {
468
+ if (!memoryLimit) {
469
+ hashes.add(n);
470
+ } else {
471
+ const entry = await this.get(n);
472
+ if (!entry) {
473
+ break outer;
474
+ }
475
+ memoryCounter += entry._payload.byteLength;
476
+ if (memoryCounter > memoryLimit) {
477
+ break outer;
478
+ }
479
+ hashes.add(n);
480
+ }
481
+ if (hashes.size === pointerCount) {
482
+ break outer;
483
+ }
484
+ }
485
+ }
486
+ }
487
+ }
488
+
489
+ const ret: Entry<any>[] = [];
490
+ for (const hash of hashes) {
491
+ const entry = await this.get(hash);
492
+ if (entry) {
493
+ ret.push(entry);
494
+ }
495
+ }
496
+ return ret;
497
+ }
498
+
499
+ /**
500
+ * Append an entry to the log.
501
+ * @param {Entry} entry Entry to add
502
+ * @return {Log} New Log containing the appended value
503
+ */
504
+ async append(
505
+ data: T,
506
+ options: AppendOptions<T> = {}
507
+ ): Promise<{ entry: Entry<T>; removed: Entry<T>[] }> {
508
+ // Update the clock (find the latest clock)
509
+ if (options.nexts) {
510
+ for (const n of options.nexts) {
511
+ if (!n.hash)
512
+ throw new Error(
513
+ "Expecting nexts to already be saved. missing hash for one or more entries"
514
+ );
515
+ }
516
+ }
517
+
518
+ await this.load({ reload: false });
519
+
520
+ const hasNext = !!options.nexts; // true for [], which means we have explicitly said that nexts are empty
521
+ const nexts: Entry<any>[] = options.nexts || (await this.getHeads());
522
+
523
+ // Calculate max time for log/graph
524
+ const clock = new Clock({
525
+ id: this._identity.publicKey.bytes,
526
+ timestamp: options.timestamp || this._hlc.now(),
527
+ });
528
+
529
+ const entry = await Entry.create<T>({
530
+ store: this._storage,
531
+ identity: options.identity || this._identity,
532
+ signers: options.signers,
533
+ data,
534
+ clock,
535
+ type: options.type,
536
+ encoding: this._encoding,
537
+ next: nexts,
538
+ gidSeed: options.gidSeed,
539
+ encryption: options.encryption
540
+ ? {
541
+ keypair: options.encryption.keypair,
542
+ reciever: {
543
+ ...options.encryption.reciever,
544
+ },
545
+ }
546
+ : undefined,
547
+ canAppend: this._canAppend,
548
+ });
549
+
550
+ if (!isDefined(entry.hash)) {
551
+ throw new Error("Unexpected");
552
+ }
553
+
554
+ for (const e of nexts) {
555
+ let nextIndexSet = this._nextsIndex.get(e.hash);
556
+ if (!nextIndexSet) {
557
+ nextIndexSet = new Set();
558
+ nextIndexSet.add(entry.hash);
559
+ this._nextsIndex.set(e.hash, nextIndexSet);
560
+ } else {
561
+ nextIndexSet.add(entry.hash);
562
+ }
563
+ }
564
+
565
+ const removedGids: Set<string> = new Set();
566
+ if (hasNext) {
567
+ for (const next of nexts) {
568
+ const deletion = await this._headsIndex.del(next);
569
+ if (deletion.lastWithGid && next.gid !== entry.gid) {
570
+ removedGids.add(next.gid);
571
+ }
572
+ }
573
+ } else {
574
+ // next is all heads, which means we should just overwrite
575
+ for (const key of this.headsIndex.gids.keys()) {
576
+ if (key !== entry.gid) {
577
+ removedGids.add(key);
578
+ }
579
+ }
580
+ await this.headsIndex.reset([entry], { cache: { update: false } });
581
+ }
582
+
583
+ await this._entryIndex.set(entry, false); // save === false, because its already saved when Entry.create
584
+ await this._headsIndex.put(entry, { cache: { update: false } }); // we will update the cache a few lines later *
585
+ await this._values.put(entry);
586
+
587
+ const removed = await this.processEntry(entry);
588
+
589
+ // if next contails all gids
590
+ if (options.onGidsShadowed && removedGids.size > 0) {
591
+ options.onGidsShadowed([...removedGids]);
592
+ }
593
+
594
+ entry.init({ encoding: this._encoding, keychain: this._keychain });
595
+ // console.log('put entry', entry.hash, (await this._entryIndex._index.size));
596
+
597
+ const trimmed = await this.trim(options?.trim);
598
+
599
+ for (const entry of trimmed) {
600
+ removed.push(entry);
601
+ }
602
+
603
+ const changes: Change<T> = {
604
+ added: [entry],
605
+ removed: removed,
606
+ };
607
+
608
+ await this._headsIndex.updateHeadsCache(changes); // * here
609
+ await this._onChange?.(changes);
610
+ return { entry, removed };
611
+ }
612
+
613
+ async reset(entries: Entry<T>[], heads?: (string | Entry<T>)[]) {
614
+ this._nextsIndex = new Map();
615
+ this._entryIndex = new EntryIndex({
616
+ store: this._storage,
617
+ init: (e) => e.init(this),
618
+ cache: this._entryCache,
619
+ });
620
+ const promises: Promise<any>[] = [];
621
+ const set = new Set<string>();
622
+ const uniqueEntries: Entry<T>[] = [];
623
+ for (const entry of entries) {
624
+ if (!entry.hash) {
625
+ throw new Error("Unexpected");
626
+ }
627
+
628
+ if (set.has(entry.hash)) {
629
+ continue;
630
+ }
631
+
632
+ set.add(entry.hash);
633
+ uniqueEntries.push(entry);
634
+ promises.push(this._entryIndex.set(entry));
635
+ }
636
+
637
+ await Promise.all(promises);
638
+
639
+ // Set heads if not passed as an argument
640
+ const foundHeads = heads
641
+ ? ((await Promise.all(
642
+ heads.map((x) => {
643
+ if (x instanceof Entry) return x;
644
+ const resolved = this._entryIndex.get(x);
645
+ if (!resolved) {
646
+ throw new Error("Missing head with cid: " + x);
647
+ }
648
+ return resolved;
649
+ })
650
+ )) as Entry<T>[])
651
+ : Log.findHeads(uniqueEntries);
652
+
653
+ await this._headsIndex.reset(foundHeads);
654
+
655
+ this._values = new Values(this._entryIndex, this._sortFn, uniqueEntries);
656
+
657
+ for (const e of entries) {
658
+ for (const a of e.next) {
659
+ let nextIndexSet = this._nextsIndex.get(a);
660
+ if (!nextIndexSet) {
661
+ nextIndexSet = new Set();
662
+ nextIndexSet.add(e.hash);
663
+ this._nextsIndex.set(a, nextIndexSet);
664
+ } else {
665
+ nextIndexSet.add(e.hash);
666
+ }
667
+ }
668
+ }
669
+ }
670
+
671
+ async remove(
672
+ entry: Entry<T> | Entry<T>[],
673
+ options?: { recursively?: boolean }
674
+ ): Promise<Change<T>> {
675
+ await this.load({ reload: false });
676
+ const entries = Array.isArray(entry) ? entry : [entry];
677
+
678
+ if (entries.length === 0) {
679
+ return {
680
+ added: [],
681
+ removed: [],
682
+ };
683
+ }
684
+
685
+ if (options?.recursively) {
686
+ await this.deleteRecursively(entry);
687
+ } else {
688
+ for (const entry of entries) {
689
+ await this.delete(entry);
690
+ }
691
+ }
692
+
693
+ const change: Change<T> = {
694
+ added: [],
695
+ removed: Array.isArray(entry) ? entry : [entry],
696
+ };
697
+
698
+ /* await Promise.all([
699
+ this._logCache?.queue(change),
700
+ this._onUpdate(change),
701
+ ]); */
702
+ await this._onChange?.(change);
703
+ return change;
704
+ }
705
+
706
+ iterator(options?: {
707
+ from?: "tail" | "head";
708
+ amount?: number;
709
+ }): IterableIterator<string> {
710
+ const from = options?.from || "tail";
711
+ const amount = typeof options?.amount === "number" ? options?.amount : -1;
712
+ let next = from === "tail" ? this._values.tail : this._values.head;
713
+ const nextFn = from === "tail" ? (e) => e.prev : (e) => e.next;
714
+ return (function* () {
715
+ let counter = 0;
716
+ while (next) {
717
+ if (amount >= 0 && counter >= amount) {
718
+ return;
719
+ }
720
+
721
+ yield next.value.hash;
722
+ counter++;
723
+
724
+ next = nextFn(next);
725
+ }
726
+ })();
727
+ }
728
+
729
+ async trim(option: TrimOptions | undefined = this._trim.options) {
730
+ return this._trim.trim(option);
731
+ }
732
+
733
+ /**
734
+ *
735
+ * @param entries
736
+ * @returns change
737
+ */
738
+ /* async sync(
739
+ entries: (EntryWithRefs<T> | Entry<T> | string)[],
740
+ options: {
741
+ canAppend?: CanAppend<T>;
742
+ onChange?: (change: Change<T>) => void | Promise<void>;
743
+ timeout?: number;
744
+ } = {}
745
+ ): Promise<void> {
746
+
747
+
748
+ logger.debug(`Sync request #${entries.length}`);
749
+ const entriesToJoin: (Entry<T> | string)[] = [];
750
+ for (const e of entries) {
751
+ if (e instanceof Entry || typeof e === "string") {
752
+ entriesToJoin.push(e);
753
+ } else {
754
+ for (const ref of e.references) {
755
+ entriesToJoin.push(ref);
756
+ }
757
+ entriesToJoin.push(e.entry);
758
+ }
759
+ }
760
+
761
+ await this.join(entriesToJoin, {
762
+ canAppend: (entry) => {
763
+ const canAppend = options?.canAppend || this.canAppend;
764
+ return !canAppend || canAppend(entry);
765
+ },
766
+ onChange: (change) => {
767
+ options?.onChange?.(change);
768
+ return this._onChange?.({
769
+ added: change.added,
770
+ removed: change.removed,
771
+ });
772
+ },
773
+ timeout: options.timeout,
774
+ });
775
+ } */
776
+
777
+ async join(
778
+ entriesOrLog: (string | Entry<T> | EntryWithRefs<T>)[] | Log<T>,
779
+ options?: {
780
+ verifySignatures?: boolean;
781
+ trim?: TrimOptions;
782
+ timeout?: number;
783
+ } & CacheUpdateOptions
784
+ ): Promise<void> {
785
+ await this.load({ reload: false });
786
+ if (entriesOrLog.length === 0) {
787
+ return;
788
+ }
789
+ /* const joinLength = options?.length ?? Number.MAX_SAFE_INTEGER; TODO */
790
+ const visited = new Set<string>();
791
+ const nextRefs: Map<string, Entry<T>[]> = new Map();
792
+ const entriesBottomUp: Entry<T>[] = [];
793
+ const stack: string[] = [];
794
+ const resolvedEntries: Map<string, Entry<T>> = new Map();
795
+ const entries = Array.isArray(entriesOrLog)
796
+ ? entriesOrLog
797
+ : await entriesOrLog.values.toArray();
798
+
799
+ // Build a list of already resolved entries, and filter out already joined entries
800
+ for (const e of entries) {
801
+ // TODO, do this less ugly
802
+ let hash: string;
803
+ if (e instanceof Entry) {
804
+ hash = e.hash;
805
+ resolvedEntries.set(e.hash, e);
806
+ if (this.has(hash)) {
807
+ continue;
808
+ }
809
+ stack.push(hash);
810
+ } else if (typeof e === "string") {
811
+ hash = e;
812
+
813
+ if (this.has(hash)) {
814
+ continue;
815
+ }
816
+ stack.push(hash);
817
+ } else {
818
+ hash = e.entry.hash;
819
+ resolvedEntries.set(e.entry.hash, e.entry);
820
+ if (this.has(hash)) {
821
+ continue;
822
+ }
823
+ stack.push(hash);
824
+
825
+ for (const e2 of e.references) {
826
+ resolvedEntries.set(e2.hash, e2);
827
+ if (this.has(e2.hash)) {
828
+ continue;
829
+ }
830
+ stack.push(e2.hash);
831
+ }
832
+ }
833
+ }
834
+
835
+ // Resolve missing entries
836
+ const removedHeads: Entry<T>[] = [];
837
+ for (const hash of stack) {
838
+ if (visited.has(hash) || this.has(hash)) {
839
+ continue;
840
+ }
841
+ visited.add(hash);
842
+
843
+ const entry =
844
+ resolvedEntries.get(hash) ||
845
+ (await Entry.fromMultihash<T>(this._storage, hash, {
846
+ timeout: options?.timeout,
847
+ }));
848
+
849
+ entry.init(this);
850
+ resolvedEntries.set(entry.hash, entry);
851
+
852
+ let nexts: string[];
853
+ if (
854
+ entry.metadata.type !== EntryType.CUT &&
855
+ (nexts = await entry.getNext())
856
+ ) {
857
+ let isRoot = true;
858
+ for (const next of nexts) {
859
+ if (!this.has(next)) {
860
+ isRoot = false;
861
+ } else {
862
+ if (this._headsIndex.has(next)) {
863
+ const toRemove = (await this.get(next, options))!;
864
+ await this._headsIndex.del(toRemove);
865
+ removedHeads.push(toRemove);
866
+ }
867
+ }
868
+ let nextIndexSet = nextRefs.get(next);
869
+ if (!nextIndexSet) {
870
+ nextIndexSet = [];
871
+ nextIndexSet.push(entry);
872
+ nextRefs.set(next, nextIndexSet);
873
+ } else {
874
+ nextIndexSet.push(entry);
875
+ }
876
+ if (!visited.has(next)) {
877
+ stack.push(next);
878
+ }
879
+ }
880
+ if (isRoot) {
881
+ entriesBottomUp.push(entry);
882
+ }
883
+ } else {
884
+ entriesBottomUp.push(entry);
885
+ }
886
+ }
887
+
888
+ while (entriesBottomUp.length > 0) {
889
+ const e = entriesBottomUp.shift()!;
890
+ await this._joining.get(e.hash);
891
+ const p = this.joinEntry(e, nextRefs, entriesBottomUp, options).then(
892
+ () => this._joining.delete(e.hash) // TODO, if head we run into problems with concurrency here!, we add heads at line 929 but resolve here
893
+ );
894
+ this._joining.set(e.hash, p);
895
+ await p;
896
+ }
897
+ }
898
+
899
+ private async joinEntry(
900
+ e: Entry<T>,
901
+ nextRefs: Map<string, Entry<T>[]>,
902
+ stack: Entry<T>[],
903
+ options?: {
904
+ verifySignatures?: boolean;
905
+ trim?: TrimOptions;
906
+ length?: number;
907
+ } & CacheUpdateOptions
908
+ ): Promise<void> {
909
+ if (this.length > (options?.length ?? Number.MAX_SAFE_INTEGER)) {
910
+ return;
911
+ }
912
+
913
+ if (!isDefined(e.hash)) {
914
+ throw new Error("Unexpected");
915
+ }
916
+
917
+ if (!this.has(e.hash)) {
918
+ if (options?.verifySignatures) {
919
+ if (!(await e.verifySignatures())) {
920
+ throw new Error('Invalid signature entry with hash "' + e.hash + '"');
921
+ }
922
+ }
923
+
924
+ if (this?._canAppend && !(await this?._canAppend(e))) {
925
+ return;
926
+ }
927
+
928
+ // Update the internal entry index
929
+ await this._entryIndex.set(e);
930
+ await this._values.put(e);
931
+
932
+ if (e.metadata.type !== EntryType.CUT) {
933
+ for (const a of e.next) {
934
+ if (!this.has(a)) {
935
+ await this.join([a]);
936
+ }
937
+
938
+ let nextIndexSet = this._nextsIndex.get(a);
939
+ if (!nextIndexSet) {
940
+ nextIndexSet = new Set();
941
+ nextIndexSet.add(e.hash);
942
+ this._nextsIndex.set(a, nextIndexSet);
943
+ } else {
944
+ nextIndexSet.add(a);
945
+ }
946
+ }
947
+ }
948
+
949
+ const clock = await e.getClock();
950
+ this._hlc.update(clock.timestamp);
951
+
952
+ const removed = await this.processEntry(e);
953
+ const trimmed = await this.trim(options?.trim);
954
+
955
+ for (const entry of trimmed) {
956
+ removed.push(entry);
957
+ }
958
+
959
+ await this?._onChange?.({ added: [e], removed: removed });
960
+ }
961
+
962
+ const forward = nextRefs.get(e.hash);
963
+ if (forward) {
964
+ if (this._headsIndex.has(e.hash)) {
965
+ await this._headsIndex.del(e, options);
966
+ }
967
+ for (const en of forward) {
968
+ stack.push(en);
969
+ }
970
+ } else {
971
+ await this.headsIndex.put(e, options);
972
+ }
973
+ }
974
+
975
+ private async processEntry(entry: Entry<T>) {
976
+ if (entry.metadata.type === EntryType.CUT) {
977
+ return this.deleteRecursively(entry, true);
978
+ }
979
+ return [];
980
+ }
981
+
982
+ /// TODO simplify methods below
983
+ async deleteRecursively(from: Entry<any> | Entry<any>[], skipFirst = false) {
984
+ const stack = Array.isArray(from) ? [...from] : [from];
985
+ const promises: Promise<void>[] = [];
986
+ let counter = 0;
987
+ const deleted: Entry<T>[] = [];
988
+ while (stack.length > 0) {
989
+ const entry = stack.pop()!;
990
+ if ((counter > 0 || !skipFirst) && this.has(entry.hash)) {
991
+ // TODO test last argument: It is for when multiple heads point to the same entry, hence we might visit it multiple times? or a concurrent delete process is doing it before us.
992
+ this._trim.deleteFromCache(entry);
993
+ await this._values.delete(entry);
994
+ await this._entryIndex.delete(entry.hash);
995
+ await this._headsIndex.del(entry);
996
+ this._nextsIndex.delete(entry.hash);
997
+ deleted.push(entry);
998
+ promises.push(entry.delete(this._storage));
999
+ }
1000
+
1001
+ for (const next of entry.next) {
1002
+ const nextFromNext = this._nextsIndex.get(next);
1003
+ if (nextFromNext) {
1004
+ nextFromNext.delete(entry.hash);
1005
+ }
1006
+
1007
+ if (!nextFromNext || nextFromNext.size === 0) {
1008
+ const ne = await this.get(next);
1009
+ if (ne) {
1010
+ stack.push(ne);
1011
+ }
1012
+ }
1013
+ }
1014
+ counter++;
1015
+ }
1016
+ await Promise.all(promises);
1017
+ return deleted;
1018
+ }
1019
+
1020
+ async delete(entry: Entry<any>) {
1021
+ this._trim.deleteFromCache(entry);
1022
+ await this._values.delete(entry);
1023
+ await this._entryIndex.delete(entry.hash);
1024
+ await this._headsIndex.del(entry);
1025
+ this._nextsIndex.delete(entry.hash);
1026
+ const newHeads: string[] = [];
1027
+ for (const next of entry.next) {
1028
+ const ne = await this.get(next);
1029
+ if (ne) {
1030
+ const nexts = this._nextsIndex.get(next)!;
1031
+ nexts.delete(entry.hash);
1032
+ if (nexts.size === 0) {
1033
+ await this._headsIndex.put(ne);
1034
+ newHeads.push(ne.hash);
1035
+ }
1036
+ }
1037
+ }
1038
+ await this._headsIndex.updateHeadsCache({
1039
+ added: newHeads,
1040
+ removed: [entry.hash],
1041
+ });
1042
+ return entry.delete(this._storage);
1043
+ }
1044
+
1045
+ /**
1046
+ * Returns the log entries as a formatted string.
1047
+ * @returns {string}
1048
+ * @example
1049
+ * two
1050
+ * └─one
1051
+ * └─three
1052
+ */
1053
+ async toString(
1054
+ payloadMapper: (payload: Payload<T>) => string = (payload) =>
1055
+ (payload.getValue(this.encoding) as any).toString()
1056
+ ): Promise<string> {
1057
+ return (
1058
+ await Promise.all(
1059
+ (
1060
+ await this.toArray()
1061
+ )
1062
+ .slice()
1063
+ .reverse()
1064
+ .map(async (e, idx) => {
1065
+ const parents: Entry<any>[] = Entry.findDirectChildren(
1066
+ e,
1067
+ await this.toArray()
1068
+ );
1069
+ const len = parents.length;
1070
+ let padding = new Array(Math.max(len - 1, 0));
1071
+ padding = len > 1 ? padding.fill(" ") : padding;
1072
+ padding = len > 0 ? padding.concat(["└─"]) : padding;
1073
+ /* istanbul ignore next */
1074
+ return (
1075
+ padding.join("") +
1076
+ (payloadMapper ? payloadMapper(e.payload) : e.payload)
1077
+ );
1078
+ })
1079
+ )
1080
+ ).join("\n");
1081
+ }
1082
+ async idle() {
1083
+ await this._headsIndex.headsCache?.idle();
1084
+ }
1085
+
1086
+ async close() {
1087
+ // Don't return early here if closed = true, because "load" might create processes that needs to be closed
1088
+ this._closed = true; // closed = true before doing below, else we might try to open the headsIndex cache because it is closed as we assume log is still open
1089
+ await this._entryCache?.clear();
1090
+ await this._headsIndex?.close();
1091
+ await this._memory?.close();
1092
+ }
1093
+
1094
+ async drop() {
1095
+ // Don't return early here if closed = true, because "load" might create processes that needs to be closed
1096
+ this._closed = true; // closed = true before doing below, else we might try to open the headsIndex cache because it is closed as we assume log is still open
1097
+ await this._headsIndex?.drop();
1098
+ await this._entryCache?.clear();
1099
+ await this._memory?.clear();
1100
+ await this._memory?.close();
1101
+ }
1102
+ async load(
1103
+ opts: ({ fetchEntryTimeout?: number } & (
1104
+ | {
1105
+ /* amount?: number TODO */
1106
+ }
1107
+ | { heads?: true }
1108
+ )) & { reload: boolean } = { reload: true }
1109
+ ) {
1110
+ const heads = await this.headsIndex.load({
1111
+ replicate: true, // TODO this.replication.replicate(x) => true/false
1112
+ timeout: opts.fetchEntryTimeout,
1113
+ reload: opts.reload,
1114
+ cache: { update: true, reset: true },
1115
+ });
1116
+
1117
+ if (heads) {
1118
+ // Load the log
1119
+ if ((opts as { heads?: true }).heads) {
1120
+ await this.reset(heads);
1121
+ } else {
1122
+ const amount = (opts as { amount?: number }).amount;
1123
+ if (amount != null && amount >= 0 && amount < heads.length) {
1124
+ throw new Error(
1125
+ "You are not loading all heads, this will lead to unexpected behaviours on write. Please load at least load: " +
1126
+ amount +
1127
+ " entries"
1128
+ );
1129
+ }
1130
+
1131
+ await this.join(heads instanceof Entry ? [heads] : heads, {
1132
+ /* length: amount, */
1133
+ timeout: opts?.fetchEntryTimeout,
1134
+ cache: {
1135
+ update: false,
1136
+ },
1137
+ });
1138
+ }
1139
+ }
1140
+ }
1141
+
1142
+ static async fromEntry<T>(
1143
+ store: Blocks,
1144
+ identity: Identity,
1145
+ entryOrHash: string | string[] | Entry<T> | Entry<T>[],
1146
+ options: {
1147
+ id?: Uint8Array;
1148
+ /* length?: number; TODO */
1149
+ timeout?: number;
1150
+ } & LogOptions<T> = { id: randomBytes(32) }
1151
+ ): Promise<Log<T>> {
1152
+ const log = new Log<T>(options.id && { id: options.id });
1153
+ await log.open(store, identity, options);
1154
+ await log.join(!Array.isArray(entryOrHash) ? [entryOrHash] : entryOrHash, {
1155
+ timeout: options.timeout,
1156
+ trim: options.trim,
1157
+ verifySignatures: true,
1158
+ });
1159
+ return log;
1160
+ }
1161
+
1162
+ /**
1163
+ * Find heads from a collection of entries.
1164
+ *
1165
+ * Finds entries that are the heads of this collection,
1166
+ * ie. entries that are not referenced by other entries.
1167
+ *
1168
+ * @param {Array<Entry<T>>} entries Entries to search heads from
1169
+ * @returns {Array<Entry<T>>}
1170
+ */
1171
+ static findHeads<T>(entries: Entry<T>[]) {
1172
+ const indexReducer = (
1173
+ res: { [key: string]: string },
1174
+ entry: Entry<any>
1175
+ ) => {
1176
+ const addToResult = (e: string) => (res[e] = entry.hash);
1177
+ entry.next.forEach(addToResult);
1178
+ return res;
1179
+ };
1180
+
1181
+ const items = entries.reduce(indexReducer, {});
1182
+ const exists = (e: Entry<T>) => items[e.hash] === undefined;
1183
+ return entries.filter(exists);
1184
+ }
1185
+
1186
+ // Find entries that point to another entry that is not in the
1187
+ // input array
1188
+ static findTails<T>(entries: Entry<T>[]): Entry<T>[] {
1189
+ // Reverse index { next -> entry }
1190
+ const reverseIndex: { [key: string]: Entry<T>[] } = {};
1191
+ // Null index containing entries that have no parents (nexts)
1192
+ const nullIndex: Entry<T>[] = [];
1193
+ // Hashes for all entries for quick lookups
1194
+ const hashes: { [key: string]: boolean } = {};
1195
+ // Hashes of all next entries
1196
+ let nexts: string[] = [];
1197
+
1198
+ const addToIndex = (e: Entry<T>) => {
1199
+ if (e.next.length === 0) {
1200
+ nullIndex.push(e);
1201
+ }
1202
+ const addToReverseIndex = (a: any) => {
1203
+ /* istanbul ignore else */
1204
+ if (!reverseIndex[a]) reverseIndex[a] = [];
1205
+ reverseIndex[a].push(e);
1206
+ };
1207
+
1208
+ // Add all entries and their parents to the reverse index
1209
+ e.next.forEach(addToReverseIndex);
1210
+ // Get all next references
1211
+ nexts = nexts.concat(e.next);
1212
+ // Get the hashes of input entries
1213
+ hashes[e.hash] = true;
1214
+ };
1215
+
1216
+ // Create our indices
1217
+ entries.forEach(addToIndex);
1218
+
1219
+ const addUniques = (
1220
+ res: Entry<T>[],
1221
+ entries: Entry<T>[],
1222
+ _idx: any,
1223
+ _arr: any
1224
+ ) => res.concat(findUniques(entries, "hash"));
1225
+ const exists = (e: string) => hashes[e] === undefined;
1226
+ const findFromReverseIndex = (e: string) => reverseIndex[e];
1227
+
1228
+ // Drop hashes that are not in the input entries
1229
+ const tails = nexts // For every hash in nexts:
1230
+ .filter(exists) // Remove undefineds and nulls
1231
+ .map(findFromReverseIndex) // Get the Entry from the reverse index
1232
+ .reduce(addUniques, []) // Flatten the result and take only uniques
1233
+ .concat(nullIndex); // Combine with tails the have no next refs (ie. first-in-their-chain)
1234
+
1235
+ return findUniques(tails, "hash").sort(Entry.compare);
1236
+ }
1237
+
1238
+ // Find the hashes to entries that are not in a collection
1239
+ // but referenced by other entries
1240
+ static findTailHashes(entries: Entry<any>[]) {
1241
+ const hashes: { [key: string]: boolean } = {};
1242
+ const addToIndex = (e: Entry<any>) => (hashes[e.hash] = true);
1243
+ const reduceTailHashes = (
1244
+ res: string[],
1245
+ entry: Entry<any>,
1246
+ idx: number,
1247
+ arr: Entry<any>[]
1248
+ ) => {
1249
+ const addToResult = (e: string) => {
1250
+ /* istanbul ignore else */
1251
+ if (hashes[e] === undefined) {
1252
+ res.splice(0, 0, e);
1253
+ }
1254
+ };
1255
+ entry.next.reverse().forEach(addToResult);
1256
+ return res;
1257
+ };
1258
+
1259
+ entries.forEach(addToIndex);
1260
+ return entries.reduce(reduceTailHashes, []);
1261
+ }
1262
+ }