@peerbit/log 1.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (92) hide show
  1. package/LICENSE +23 -0
  2. package/README.md +11 -0
  3. package/lib/esm/change.d.ts +5 -0
  4. package/lib/esm/change.js +2 -0
  5. package/lib/esm/change.js.map +1 -0
  6. package/lib/esm/clock.d.ts +87 -0
  7. package/lib/esm/clock.js +260 -0
  8. package/lib/esm/clock.js.map +1 -0
  9. package/lib/esm/difference.d.ts +1 -0
  10. package/lib/esm/difference.js +20 -0
  11. package/lib/esm/difference.js.map +1 -0
  12. package/lib/esm/encoding.d.ts +7 -0
  13. package/lib/esm/encoding.js +20 -0
  14. package/lib/esm/encoding.js.map +1 -0
  15. package/lib/esm/entry-index.d.ts +21 -0
  16. package/lib/esm/entry-index.js +63 -0
  17. package/lib/esm/entry-index.js.map +1 -0
  18. package/lib/esm/entry-with-refs.d.ts +5 -0
  19. package/lib/esm/entry-with-refs.js +2 -0
  20. package/lib/esm/entry-with-refs.js.map +1 -0
  21. package/lib/esm/entry.d.ts +179 -0
  22. package/lib/esm/entry.js +591 -0
  23. package/lib/esm/entry.js.map +1 -0
  24. package/lib/esm/find-uniques.d.ts +1 -0
  25. package/lib/esm/find-uniques.js +12 -0
  26. package/lib/esm/find-uniques.js.map +1 -0
  27. package/lib/esm/heads-cache.d.ts +64 -0
  28. package/lib/esm/heads-cache.js +317 -0
  29. package/lib/esm/heads-cache.js.map +1 -0
  30. package/lib/esm/heads.d.ts +63 -0
  31. package/lib/esm/heads.js +143 -0
  32. package/lib/esm/heads.js.map +1 -0
  33. package/lib/esm/hrtime.d.ts +5 -0
  34. package/lib/esm/hrtime.js +71 -0
  35. package/lib/esm/hrtime.js.map +1 -0
  36. package/lib/esm/index.d.ts +11 -0
  37. package/lib/esm/index.js +11 -0
  38. package/lib/esm/index.js.map +1 -0
  39. package/lib/esm/is-defined.d.ts +1 -0
  40. package/lib/esm/is-defined.js +2 -0
  41. package/lib/esm/is-defined.js.map +1 -0
  42. package/lib/esm/log-errors.d.ts +5 -0
  43. package/lib/esm/log-errors.js +6 -0
  44. package/lib/esm/log-errors.js.map +1 -0
  45. package/lib/esm/log-sorting.d.ts +44 -0
  46. package/lib/esm/log-sorting.js +86 -0
  47. package/lib/esm/log-sorting.js.map +1 -0
  48. package/lib/esm/log.d.ts +205 -0
  49. package/lib/esm/log.js +1004 -0
  50. package/lib/esm/log.js.map +1 -0
  51. package/lib/esm/logger.d.ts +2 -0
  52. package/lib/esm/logger.js +4 -0
  53. package/lib/esm/logger.js.map +1 -0
  54. package/lib/esm/package.json +3 -0
  55. package/lib/esm/snapshot.d.ts +22 -0
  56. package/lib/esm/snapshot.js +83 -0
  57. package/lib/esm/snapshot.js.map +1 -0
  58. package/lib/esm/trim.d.ts +49 -0
  59. package/lib/esm/trim.js +203 -0
  60. package/lib/esm/trim.js.map +1 -0
  61. package/lib/esm/types.d.ts +6 -0
  62. package/lib/esm/types.js +23 -0
  63. package/lib/esm/types.js.map +1 -0
  64. package/lib/esm/utils.d.ts +2 -0
  65. package/lib/esm/utils.js +3 -0
  66. package/lib/esm/utils.js.map +1 -0
  67. package/lib/esm/values.d.ts +33 -0
  68. package/lib/esm/values.js +141 -0
  69. package/lib/esm/values.js.map +1 -0
  70. package/package.json +79 -0
  71. package/src/change.ts +2 -0
  72. package/src/clock.ts +280 -0
  73. package/src/difference.ts +22 -0
  74. package/src/encoding.ts +27 -0
  75. package/src/entry-index.ts +78 -0
  76. package/src/entry-with-refs.ts +6 -0
  77. package/src/entry.ts +749 -0
  78. package/src/find-uniques.ts +14 -0
  79. package/src/heads-cache.ts +400 -0
  80. package/src/heads.ts +208 -0
  81. package/src/hrtime.ts +78 -0
  82. package/src/index.ts +11 -0
  83. package/src/is-defined.ts +1 -0
  84. package/src/log-errors.ts +9 -0
  85. package/src/log-sorting.ts +108 -0
  86. package/src/log.ts +1262 -0
  87. package/src/logger.ts +3 -0
  88. package/src/snapshot.ts +103 -0
  89. package/src/trim.ts +269 -0
  90. package/src/types.ts +12 -0
  91. package/src/utils.ts +2 -0
  92. package/src/values.ts +193 -0
@@ -0,0 +1,14 @@
1
+ export const findUniques = <T>(value: T[], key?: string): T[] => {
2
+ // Create an index of the collection
3
+ // TODO fix types. This method is quite ugly, maybe lets remove it altogether
4
+ const uniques: { [key: string | number | symbol]: T } = {};
5
+ const get = (key: string | number | symbol) => uniques[key];
6
+ const addToIndex = (e: T) =>
7
+ (uniques[
8
+ key
9
+ ? ((e as any)[key] as string | number | symbol)
10
+ : (e as string | number | symbol)
11
+ ] = e);
12
+ value.forEach(addToIndex);
13
+ return Object.keys(uniques).map(get);
14
+ };
@@ -0,0 +1,400 @@
1
+ import PQueue from "p-queue";
2
+ import { v4 as uuid } from "uuid";
3
+ import { Entry } from "./entry";
4
+ import { SimpleLevel } from "@peerbit/lazy-level";
5
+ import { variant, option, field, vec } from "@dao-xyz/borsh";
6
+ import { serialize, deserialize } from "@dao-xyz/borsh";
7
+ import { logger as loggerFn } from "@peerbit/logger";
8
+ import path from "path-browserify";
9
+ export const logger = loggerFn({ module: "heads-cache" });
10
+ export class CachedValue {}
11
+ /* export type AppendOptions<T> = {
12
+ signers?: ((data: Uint8Array) => Promise<SignatureWithKey>)[];
13
+ nexts?: Entry<T>[];
14
+ reciever?: EncryptionTemplateMaybeEncrypted;
15
+ type?: EntryType;
16
+ };
17
+ */
18
+
19
+ @variant(0)
20
+ export class CachePath {
21
+ @field({ type: "string" })
22
+ path: string;
23
+
24
+ constructor(path: string) {
25
+ this.path = path;
26
+ }
27
+ }
28
+
29
+ @variant(0)
30
+ export class UnsfinishedReplication {
31
+ @field({ type: vec("string") })
32
+ hashes: string[];
33
+
34
+ constructor(opts?: { hashes: string[] }) {
35
+ if (opts) {
36
+ this.hashes = opts.hashes;
37
+ }
38
+ }
39
+ }
40
+
41
+ @variant(0)
42
+ export class HeadsCacheToSerialize {
43
+ @field({ type: vec("string") })
44
+ heads: string[];
45
+
46
+ @field({ type: option("string") })
47
+ last?: string;
48
+
49
+ @field({ type: "u64" })
50
+ counter: bigint;
51
+
52
+ constructor(heads: string[], counter: bigint, last?: string) {
53
+ this.heads = heads;
54
+ this.last = last;
55
+ this.counter = counter;
56
+ }
57
+ }
58
+
59
+ const updateHashes = async (
60
+ headCache: HeadsCache<any>,
61
+ headsPath: string,
62
+ lastCid: string | undefined,
63
+ lastCounter: bigint,
64
+ hashes: string[]
65
+ ): Promise<{ counter: bigint; newPath: string }> => {
66
+ const newHeadsPath = path.join(
67
+ headsPath,
68
+ String(headCache.headsPathCounter),
69
+ uuid()
70
+ );
71
+ const counter = lastCounter + BigInt(hashes.length);
72
+ await Promise.all([
73
+ headCache.cache?.put(
74
+ headsPath,
75
+ serialize(new CachePath(newHeadsPath.toString()))
76
+ ),
77
+ headCache.cache?.put(
78
+ newHeadsPath,
79
+ serialize(new HeadsCacheToSerialize(hashes, counter, lastCid))
80
+ ),
81
+ ]);
82
+ return { counter, newPath: newHeadsPath };
83
+ };
84
+
85
+ interface HeadsIndex {
86
+ id: Uint8Array;
87
+ size: number;
88
+ has(cid: string): boolean;
89
+ }
90
+ export class HeadsCache<T> /* implements Initiable<T> */ {
91
+ // An access controller that is note part of the store manifest, usefull for circular store -> access controller -> store structures
92
+ headsPath: string;
93
+ removedHeadsPath: string;
94
+ initialized: boolean;
95
+
96
+ private _headsPathCounter = 0;
97
+
98
+ private _lastHeadsPath?: string;
99
+ private _lastHeadsCount = 0n;
100
+
101
+ private _lastRemovedHeadsPath?: string;
102
+ private _lastRemovedHeadsCount = 0n;
103
+
104
+ private _cache?: SimpleLevel;
105
+ private _cacheWriteQueue?: PQueue<any, any>;
106
+
107
+ private _loaded = false;
108
+ private _index: HeadsIndex;
109
+
110
+ constructor(index: HeadsIndex) {
111
+ this._index = index;
112
+ }
113
+
114
+ get cache(): SimpleLevel | undefined {
115
+ return this._cache;
116
+ }
117
+
118
+ get headsPathCounter(): number {
119
+ return this._headsPathCounter;
120
+ }
121
+
122
+ async init(cache?: SimpleLevel): Promise<this> {
123
+ if (this.initialized) {
124
+ throw new Error("Already initialized");
125
+ }
126
+
127
+ this._cache = cache;
128
+
129
+ // Set the options (we will use the topic property after thiis)
130
+ await this._cache?.open();
131
+
132
+ this.headsPath = "heads";
133
+ this.removedHeadsPath = "heads_removed";
134
+
135
+ await this.loadLastHeadsPath();
136
+
137
+ // append and log-joins queue. Adding ops and joins to the queue
138
+ // makes sure they get processed sequentially to avoid race conditions
139
+ this._cacheWriteQueue = new PQueue({ concurrency: 1 });
140
+ /* if (this._options.onOpen) {
141
+ await this._options.onOpen();
142
+ } */
143
+ this.initialized = true;
144
+
145
+ return this;
146
+ }
147
+
148
+ get loaded(): boolean {
149
+ return this._loaded;
150
+ }
151
+
152
+ private async _updateCachedHeads(
153
+ change: {
154
+ added?: (Entry<T> | string)[];
155
+ removed?: (Entry<T> | string)[];
156
+ },
157
+ reset?: boolean
158
+ ) {
159
+ if (typeof reset !== "boolean" && change.added) {
160
+ // Only reset all heads if loaded once, since we don't want too loose track of unloaded heads
161
+ if (this._loaded && this._index.size <= change.added.length) {
162
+ let addedIsAllHeads = true;
163
+ for (const entry of change.added) {
164
+ const hash = typeof entry === "string" ? entry : entry.hash;
165
+ if (!this._index.has(hash)) {
166
+ addedIsAllHeads = false;
167
+ }
168
+ }
169
+ reset = addedIsAllHeads;
170
+ } else {
171
+ // added size < head size, meaning we have not rewritten all heads
172
+ reset = false;
173
+ }
174
+ }
175
+
176
+ // If 'reset' then dont keep references to old heads caches, assume new cache will fully describe all heads
177
+
178
+ // TODO dont delete old before saving new
179
+ if (reset) {
180
+ await this.cache?.clear();
181
+ this._lastHeadsPath = undefined;
182
+ this._lastRemovedHeadsPath = undefined;
183
+ this._lastHeadsCount = 0n;
184
+ this._lastRemovedHeadsCount = 0n;
185
+ this._headsPathCounter += 1;
186
+ }
187
+
188
+ if (change.added && change.added.length > 0) {
189
+ const update = await updateHashes(
190
+ this,
191
+ this.headsPath,
192
+ this._lastHeadsPath,
193
+ this._lastHeadsCount,
194
+ change.added.map((x) => (typeof x === "string" ? x : x.hash))
195
+ );
196
+ this._lastHeadsPath = update.newPath;
197
+ this._lastHeadsCount = update.counter;
198
+ }
199
+
200
+ if (this._lastHeadsPath) {
201
+ // only add removed heads if we actually have added heads, else these are pointless
202
+ if (change.removed && change.removed.length > 0) {
203
+ const update = await updateHashes(
204
+ this,
205
+ this.removedHeadsPath,
206
+ this._lastRemovedHeadsPath,
207
+ this._lastRemovedHeadsCount,
208
+ change.removed.map((x) => (typeof x === "string" ? x : x.hash))
209
+ );
210
+ this._lastRemovedHeadsPath = update.newPath;
211
+ this._lastRemovedHeadsCount = update.counter;
212
+ if (
213
+ update.counter > 0n &&
214
+ 2n * update.counter >= this._lastHeadsCount
215
+ ) {
216
+ const resetToHeads = await this.getCachedHeads(
217
+ this._lastHeadsPath,
218
+ this._lastRemovedHeadsPath
219
+ );
220
+ await this._updateCachedHeads(
221
+ { added: resetToHeads, removed: [] },
222
+ true
223
+ );
224
+ }
225
+ }
226
+ }
227
+ }
228
+
229
+ async idle(): Promise<void> {
230
+ // Wait for the operations queue to finish processing
231
+ // to make sure everything that all operations that have
232
+ // been queued will be written to disk
233
+ await this._cacheWriteQueue?.onIdle();
234
+ await this._cache?.idle?.();
235
+ }
236
+
237
+ async getCachedHeads(
238
+ lastHeadsPath: string | undefined = this._lastHeadsPath,
239
+ lastRemovedHeadsPath: string | undefined = this._lastRemovedHeadsPath
240
+ ): Promise<string[]> {
241
+ if (!this._cache) {
242
+ return [];
243
+ }
244
+ const getHashes = async (
245
+ start: string | undefined,
246
+ filter?: Set<string>
247
+ ) => {
248
+ const result: string[] = [];
249
+ let next = start;
250
+ while (next) {
251
+ const cache = await this._cache
252
+ ?.get(next)
253
+ .then((bytes) => bytes && deserialize(bytes, HeadsCacheToSerialize));
254
+ next = cache?.last;
255
+ cache?.heads.forEach((head) => {
256
+ if (filter && filter.has(head)) {
257
+ return;
258
+ }
259
+
260
+ result.push(head);
261
+ });
262
+ }
263
+ return result;
264
+ };
265
+
266
+ const removedHeads = new Set(await getHashes(lastRemovedHeadsPath));
267
+ const heads = await getHashes(lastHeadsPath, removedHeads);
268
+ return heads; // Saved heads - removed heads
269
+ }
270
+
271
+ /* get logOptions(): LogOptions<T> {
272
+ return {
273
+ logId: this.id,
274
+ trim: this._options.trim && {
275
+ // I can trim if I am not a replicator of an entry
276
+
277
+ ...this._options.trim,
278
+ filter: this.options.replicator && {
279
+ canTrim: async (gid) => !(await this.options.replicator!(gid)),
280
+ cacheId: this.options.replicatorsCacheId,
281
+ },
282
+ },
283
+ };
284
+ } */
285
+
286
+ get closed() {
287
+ return !this._cache || this._cache.status === "closed";
288
+ }
289
+
290
+ async close() {
291
+ await this.idle();
292
+ await this._cache?.close();
293
+ this._loaded = false;
294
+ this._lastHeadsPath = undefined;
295
+ this._lastRemovedHeadsPath = undefined;
296
+ this._lastRemovedHeadsCount = 0n;
297
+ this._lastHeadsCount = 0n;
298
+ }
299
+
300
+ /**
301
+ * Drops a database and removes local data
302
+ */
303
+ async drop() {
304
+ this.initialized = false;
305
+
306
+ if (!this._cache) {
307
+ return; // already dropped
308
+ }
309
+ if (this._cache.status !== "open") {
310
+ await this._cache.open();
311
+ }
312
+
313
+ await this._cache.del(this.headsPath);
314
+ await this._cache.del(this.removedHeadsPath);
315
+ await this.close();
316
+
317
+ delete this._cache;
318
+ }
319
+
320
+ private async _loadHeads(): Promise<string[]> {
321
+ if (!this.initialized) {
322
+ throw new Error("Store needs to be initialized before loaded");
323
+ }
324
+
325
+ if (this._cache!.status !== "open") {
326
+ await this._cache!.open();
327
+ }
328
+
329
+ await this.loadLastHeadsPath();
330
+ return this.getCachedHeads();
331
+ }
332
+
333
+ async load() {
334
+ if (!this.initialized) {
335
+ throw new Error("Needs to be initialized before loaded");
336
+ }
337
+
338
+ if (this._cache!.status !== "open") {
339
+ await this._cache!.open();
340
+ }
341
+
342
+ const heads = await this._loadHeads();
343
+ this._loaded = true;
344
+ return heads;
345
+ }
346
+
347
+ async loadLastHeadsPath() {
348
+ this._lastHeadsPath = await this._cache
349
+ ?.get(this.headsPath)
350
+ .then((bytes) => bytes && deserialize(bytes, CachePath).path);
351
+ this._lastRemovedHeadsPath = await this._cache
352
+ ?.get(this.removedHeadsPath)
353
+ .then((bytes) => bytes && deserialize(bytes, CachePath).path);
354
+ this._lastHeadsCount = this._lastHeadsPath
355
+ ? await this.getCachedHeadsCount(this._lastHeadsPath)
356
+ : 0n;
357
+ this._lastRemovedHeadsCount = this._lastRemovedHeadsPath
358
+ ? await this.getCachedHeadsCount(this._lastRemovedHeadsPath)
359
+ : 0n;
360
+ }
361
+
362
+ async getCachedHeadsCount(headPath?: string): Promise<bigint> {
363
+ if (!headPath) {
364
+ return 0n;
365
+ }
366
+ return (
367
+ (
368
+ await this._cache
369
+ ?.get(headPath)
370
+ .then((bytes) => bytes && deserialize(bytes, HeadsCacheToSerialize))
371
+ )?.counter || 0n
372
+ );
373
+ }
374
+
375
+ async waitForHeads() {
376
+ if (this.closed) {
377
+ throw new Error("Store is closed");
378
+ }
379
+ if (!this._loaded) {
380
+ return this._cacheWriteQueue?.add(async () => {
381
+ if (this._loaded) {
382
+ return;
383
+ }
384
+ return this.load();
385
+ });
386
+ }
387
+ }
388
+
389
+ public queue(
390
+ changes: {
391
+ added?: (Entry<T> | string)[];
392
+ removed?: (Entry<T> | string)[];
393
+ },
394
+ reset?: boolean
395
+ ) {
396
+ return this._cacheWriteQueue?.add(() =>
397
+ this._updateCachedHeads(changes, reset)
398
+ );
399
+ }
400
+ }
package/src/heads.ts ADDED
@@ -0,0 +1,208 @@
1
+ import { Entry } from "./entry.js";
2
+ import { SimpleLevel } from "@peerbit/lazy-level";
3
+ import { HeadsCache } from "./heads-cache.js";
4
+ import { Blocks } from "@peerbit/blocks-interface";
5
+ import { Keychain } from "@peerbit/crypto";
6
+ import { Encoding } from "./encoding.js";
7
+
8
+ export type CacheUpdateOptions = {
9
+ cache?: { update?: false; reset?: false } | { update: true; reset?: boolean };
10
+ };
11
+
12
+ interface Config {
13
+ storage: Blocks;
14
+ keychain?: Keychain;
15
+ memory?: SimpleLevel;
16
+ encoding: Encoding<any>;
17
+ }
18
+ export class HeadsIndex<T> {
19
+ private _id: Uint8Array;
20
+ private _index: Set<string> = new Set();
21
+ private _gids: Map<string, number>;
22
+ private _headsCache: HeadsCache<T> | undefined;
23
+ private _config: Config;
24
+ constructor(id: Uint8Array) {
25
+ this._gids = new Map();
26
+ this._id = id;
27
+ }
28
+
29
+ async init(config: Config, options: { entries?: Entry<T>[] } = {}) {
30
+ this._config = config;
31
+ await this.reset(options?.entries || []);
32
+ if (config.memory) {
33
+ this._headsCache = new HeadsCache(this);
34
+ return this._headsCache.init(await config.memory.sublevel("heads"));
35
+ }
36
+ }
37
+
38
+ async load(
39
+ options?: {
40
+ timeout?: number;
41
+ replicate?: boolean;
42
+ reload?: boolean;
43
+ } & CacheUpdateOptions
44
+ ) {
45
+ if (!this._headsCache || (this._headsCache.loaded && !options?.reload)) {
46
+ return;
47
+ }
48
+
49
+ // TODO make below into a promise that concurrenct caklls can wait on?
50
+ const heads = await this._headsCache?.load();
51
+ if (!heads) {
52
+ return;
53
+ }
54
+ try {
55
+ const entries = await Promise.all(
56
+ heads.map(async (x) => {
57
+ const entry = await Entry.fromMultihash<T>(
58
+ this._config.storage,
59
+ x,
60
+ options
61
+ );
62
+ entry.init(this._config);
63
+ await entry.getGid(); // decrypt gid
64
+ return entry;
65
+ })
66
+ );
67
+ await this.reset(entries);
68
+ return entries;
69
+ } catch (error) {
70
+ const q = 123;
71
+ throw error;
72
+ }
73
+ }
74
+
75
+ get headsCache(): HeadsCache<T> | undefined {
76
+ return this._headsCache;
77
+ }
78
+
79
+ close() {
80
+ return this._headsCache?.close();
81
+ }
82
+
83
+ drop() {
84
+ return this._headsCache?.drop();
85
+ }
86
+
87
+ get id(): Uint8Array {
88
+ return this._id;
89
+ }
90
+
91
+ get index() {
92
+ return this._index;
93
+ }
94
+
95
+ get gids(): Map<string, number> {
96
+ return this._gids;
97
+ }
98
+
99
+ get size() {
100
+ return this._index.size;
101
+ }
102
+
103
+ async reset(
104
+ entries: Entry<T>[],
105
+ options: CacheUpdateOptions = { cache: { reset: true, update: true } }
106
+ ) {
107
+ this._index.clear();
108
+ this._gids = new Map();
109
+ if (entries) {
110
+ await this.putAll(entries, options); // reset cache = true
111
+ }
112
+ }
113
+
114
+ has(cid: string) {
115
+ return this._index.has(cid);
116
+ }
117
+
118
+ async put(entry: Entry<T>, options?: CacheUpdateOptions) {
119
+ this._putOne(entry);
120
+ if (options?.cache?.update) {
121
+ await this._headsCache?.queue({ added: [entry] }, options.cache.reset);
122
+ }
123
+ }
124
+
125
+ async putAll(entries: Entry<T>[], options?: CacheUpdateOptions) {
126
+ this._putAll(entries);
127
+ if (options?.cache?.update) {
128
+ await this._headsCache?.queue({ added: entries }, options.cache.reset);
129
+ }
130
+ }
131
+
132
+ async resetHeadsCache() {
133
+ await this._headsCache?.queue(
134
+ { added: [...this._index], removed: [] },
135
+ true
136
+ );
137
+ }
138
+ async updateHeadsCache(
139
+ change: {
140
+ added?: (Entry<T> | string)[];
141
+ removed?: (Entry<T> | string)[];
142
+ } = {},
143
+ reset?: boolean
144
+ ) {
145
+ await this._headsCache?.queue(change, reset);
146
+ }
147
+
148
+ private _putOne(entry: Entry<T>) {
149
+ if (!entry.hash) {
150
+ throw new Error("Missing hash");
151
+ }
152
+ if (this._index.has(entry.hash)) {
153
+ return;
154
+ }
155
+
156
+ this._index.add(entry.hash);
157
+ if (!this._gids.has(entry.gid)) {
158
+ this._gids.set(entry.gid, 1);
159
+ } else {
160
+ this._gids.set(entry.gid, this._gids.get(entry.gid)! + 1);
161
+ }
162
+ }
163
+
164
+ private _putAll(entries: Entry<T>[]) {
165
+ for (const entry of entries) {
166
+ this._putOne(entry);
167
+ }
168
+ }
169
+
170
+ async del(
171
+ entry: { hash: string; gid: string },
172
+ options?: CacheUpdateOptions
173
+ ): Promise<{
174
+ removed: boolean;
175
+ lastWithGid: boolean;
176
+ }> {
177
+ const wasHead = this._index.delete(entry.hash);
178
+ if (!wasHead) {
179
+ return {
180
+ lastWithGid: false,
181
+ removed: false,
182
+ };
183
+ }
184
+ const newValue = this._gids.get(entry.gid)! - 1;
185
+ const lastWithGid = newValue <= 0;
186
+ if (newValue <= 0) {
187
+ this._gids.delete(entry.gid);
188
+ } else {
189
+ this._gids.set(entry.gid, newValue);
190
+ }
191
+ if (!entry.hash) {
192
+ throw new Error("Missing hash");
193
+ }
194
+
195
+ if (wasHead && options?.cache?.update) {
196
+ await this._headsCache?.queue(
197
+ { removed: [entry.hash] },
198
+ options.cache.reset
199
+ );
200
+ }
201
+
202
+ return {
203
+ removed: wasHead,
204
+ lastWithGid: lastWithGid,
205
+ };
206
+ // this._headsCache = undefined; // TODO do smarter things here, only remove the element needed (?)
207
+ }
208
+ }
package/src/hrtime.ts ADDED
@@ -0,0 +1,78 @@
1
+ /*
2
+
3
+ MIT License
4
+
5
+ Copyright (c) 2020 Vlad Tansky
6
+ Copyright (c) 2022 dao.xyz
7
+
8
+ Permission is hereby granted, free of charge, to any person obtaining a copy
9
+ of this software and associated documentation files (the "Software"), to deal
10
+ in the Software without restriction, including without limitation the rights
11
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12
+ copies of the Software, and to permit persons to whom the Software is
13
+ furnished to do so, subject to the following conditions:
14
+
15
+ The above copyright notice and this permission notice shall be included in all
16
+ copies or substantial portions of the Software.
17
+
18
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
21
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
23
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
24
+ SOFTWARE.
25
+
26
+ */
27
+
28
+ const _perfomancePolyfill = () => {
29
+ // based on https://gist.github.com/paulirish/5438650 copyright Paul Irish 2015.
30
+ if ("performance" in window === false) {
31
+ (window.performance as any) = {};
32
+ }
33
+
34
+ Date.now =
35
+ Date.now ||
36
+ (() => {
37
+ // thanks IE8
38
+ return new Date().getTime();
39
+ });
40
+
41
+ if ("now" in window.performance === false) {
42
+ let nowOffset = Date.now();
43
+
44
+ if (performance.timing && performance.timing.navigationStart) {
45
+ nowOffset = performance.timing.navigationStart;
46
+ }
47
+
48
+ window.performance.now = () => Date.now() - nowOffset;
49
+ }
50
+ };
51
+
52
+ const _hrtime = (previousTimestamp?: [number, number]): [number, number] => {
53
+ _perfomancePolyfill();
54
+ const baseNow = Math.floor((Date.now() - performance.now()) * 1e-3);
55
+ const clocktime = performance.now() * 1e-3;
56
+ let seconds = Math.floor(clocktime) + baseNow;
57
+ let nanoseconds = Math.floor((clocktime % 1) * 1e9);
58
+
59
+ if (previousTimestamp) {
60
+ seconds = seconds - previousTimestamp[0];
61
+ nanoseconds = nanoseconds - previousTimestamp[1];
62
+ if (nanoseconds < 0) {
63
+ seconds--;
64
+ nanoseconds += 1e9;
65
+ }
66
+ }
67
+ return [seconds, nanoseconds];
68
+ };
69
+ const NS_PER_SEC = 1e9;
70
+ _hrtime.bigint = (time?: [number, number]): bigint => {
71
+ const diff = _hrtime(time);
72
+ return BigInt(diff[0] * NS_PER_SEC + diff[1]);
73
+ };
74
+
75
+ export default typeof process === "undefined" ||
76
+ typeof process.hrtime === "undefined"
77
+ ? _hrtime
78
+ : process.hrtime;
package/src/index.ts ADDED
@@ -0,0 +1,11 @@
1
+ export * from "./log.js";
2
+ export * from "./log-sorting.js";
3
+ export * from "./log-errors.js";
4
+ export * from "./snapshot.js";
5
+ export * from "./entry.js";
6
+ export * from "./utils.js";
7
+ export * from "./clock.js";
8
+ export * from "./encoding.js";
9
+ export * from "./trim.js";
10
+ export * from "./change.js";
11
+ export type { TrimToByteLengthOption, TrimToLengthOption } from "./trim.js";
@@ -0,0 +1 @@
1
+ export const isDefined = (arg: any) => arg !== undefined && arg !== null;