sliftutils 0.56.0 → 0.57.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/index.d.ts CHANGED
@@ -950,7 +950,8 @@ declare module "sliftutils/storage/TransactionStorage" {
950
950
  private updatePendingAppends;
951
951
  getKeys(): Promise<string[]>;
952
952
  private loadAllTransactions;
953
- private loadTransactionFile;
953
+ private parseTransactionFile;
954
+ private applyTransactionEntries;
954
955
  private readTransactionEntry;
955
956
  private serializeTransactionEntry;
956
957
  private getHeader;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "sliftutils",
3
- "version": "0.56.0",
3
+ "version": "0.57.0",
4
4
  "main": "index.js",
5
5
  "license": "MIT",
6
6
  "files": [
@@ -34,7 +34,8 @@ export declare class TransactionStorage implements IStorage<Buffer> {
34
34
  private updatePendingAppends;
35
35
  getKeys(): Promise<string[]>;
36
36
  private loadAllTransactions;
37
- private loadTransactionFile;
37
+ private parseTransactionFile;
38
+ private applyTransactionEntries;
38
39
  private readTransactionEntry;
39
40
  private serializeTransactionEntry;
40
41
  private getHeader;
@@ -73,13 +73,6 @@ let seqNum = 0;
73
73
  function getNextChunkPath(): string {
74
74
  return `${Date.now()}_${seqNum++}_${ourId}.chunk`;
75
75
  }
76
- function sortChunks(chunks: string[]): string[] {
77
- function getChunkParts(chunk: string): unknown[] {
78
- const parts = chunk.split("_");
79
- return parts.map(part => +part);
80
- }
81
- return chunks.sort((a, b) => compareArray(getChunkParts(a), getChunkParts(b)));
82
- }
83
76
 
84
77
  export class TransactionStorage implements IStorage<Buffer> {
85
78
  public cache: Map<string, TransactionEntry> = new Map();
@@ -108,7 +101,7 @@ export class TransactionStorage implements IStorage<Buffer> {
108
101
  });
109
102
  }
110
103
 
111
- private init: Promise<void> | undefined = this.loadAllTransactions();
104
+ private init: Promise<unknown> | undefined = this.loadAllTransactions();
112
105
 
113
106
  private getCurrentChunk(): string {
114
107
  if (this.currentChunk && this.currentChunk.timeCreated < Date.now() - FILE_MAX_LIFETIME) {
@@ -247,33 +240,37 @@ export class TransactionStorage implements IStorage<Buffer> {
247
240
  }
248
241
 
249
242
 
250
- private async loadAllTransactions(): Promise<void> {
251
- if (isInBuild()) return;
243
+ // NOTE: This is either called in init (which blocks all other calls), or inside of the global file lock, so it is safe to load.
244
+ private async loadAllTransactions(): Promise<string[]> {
245
+ if (isInBuild()) return [];
246
+
252
247
  let time = Date.now();
253
248
  const keys = await this.rawStorage.getKeys();
254
249
  const transactionFiles = keys.filter(key => key.endsWith(CHUNK_EXT));
255
250
 
256
- sortChunks(transactionFiles);
257
-
258
- let size = 0;
259
- for (const file of transactionFiles) {
260
- let curSize = await this.loadTransactionFile(file);
261
- size += curSize;
251
+ let entryList: TransactionEntry[][] = [];
252
+ for (let file of transactionFiles) {
253
+ entryList.push(await this.parseTransactionFile(file));
262
254
  }
255
+ let entries = entryList.flat();
256
+ this.applyTransactionEntries(entries);
257
+
263
258
  time = Date.now() - time;
264
259
  if (time > 50) {
265
- console.log(`Loaded ${this.debugName} in ${formatTime(time)}, ${formatNumber(this.cache.size)} keys, from ${formatNumber(transactionFiles.length)} files, ${formatNumber(size)}B`, transactionFiles);
260
+ console.log(`Loaded ${this.debugName} in ${formatTime(time)}, ${formatNumber(this.cache.size)} keys, from ${formatNumber(transactionFiles.length)} files, entries ${formatNumber(entries.length)}B`, transactionFiles);
266
261
  }
267
262
 
268
263
  this.init = undefined;
264
+ return transactionFiles;
269
265
  }
270
266
 
271
- private async loadTransactionFile(filename: string): Promise<number> {
267
+ // ONLY call this inside of loadAllTransactions
268
+ private async parseTransactionFile(filename: string): Promise<TransactionEntry[]> {
272
269
  const fullFile = await this.rawStorage.get(filename);
273
- if (!fullFile) return 0;
270
+ if (!fullFile) return [];
274
271
  if (fullFile.length < 4) {
275
272
  //console.error(`Transaction in ${this.debugName} file ${filename} is too small, skipping`);
276
- return 0;
273
+ return [];
277
274
  }
278
275
  let headerSize = fullFile.readUInt32LE(0);
279
276
  let headerBuffer = fullFile.slice(4, 4 + headerSize);
@@ -282,22 +279,13 @@ export class TransactionStorage implements IStorage<Buffer> {
282
279
  header = JSON.parse(headerBuffer.toString());
283
280
  } catch (e) {
284
281
  console.error(`Failed to parse header of transaction file in ${this.debugName}, ${filename}`);
285
- return 0;
282
+ return [];
286
283
  }
287
284
  let content = fullFile.slice(4 + headerSize);
288
285
  if (header.zipped) {
289
286
  content = await Zip.gunzip(content);
290
287
  }
291
288
 
292
- let pendingWriteTimes = new Map<string, number>();
293
- for (const entry of this.pendingAppends) {
294
- let prevTime = pendingWriteTimes.get(entry.key);
295
- if (prevTime && prevTime > entry.time) {
296
- continue;
297
- }
298
- pendingWriteTimes.set(entry.key, entry.time);
299
- }
300
-
301
289
  let offset = 0;
302
290
  let entries: TransactionEntry[] = [];
303
291
  while (offset < content.length) {
@@ -330,7 +318,21 @@ export class TransactionStorage implements IStorage<Buffer> {
330
318
  let { entry } = entryObj;
331
319
  offset = entryObj.offset;
332
320
  entries.push(entry);
321
+ }
322
+ return entries;
323
+ }
324
+ private applyTransactionEntries(entries: TransactionEntry[]): void {
325
+ let pendingWriteTimes = new Map<string, number>();
326
+ for (const entry of this.pendingAppends) {
327
+ let prevTime = pendingWriteTimes.get(entry.key);
328
+ if (prevTime && prevTime > entry.time) {
329
+ continue;
330
+ }
331
+ pendingWriteTimes.set(entry.key, entry.time);
332
+ }
333
333
 
334
+ sort(entries, x => x.time);
335
+ for (let entry of entries) {
334
336
  let time = entry.time;
335
337
  let prevTime = pendingWriteTimes.get(entry.key);
336
338
  if (prevTime && prevTime > time) {
@@ -347,7 +349,6 @@ export class TransactionStorage implements IStorage<Buffer> {
347
349
  this.cache.set(entry.key, entry);
348
350
  }
349
351
  }
350
- return fullFile.length;
351
352
  }
352
353
 
353
354
  private readTransactionEntry(buffer: Buffer, offset: number): {
@@ -469,11 +470,7 @@ export class TransactionStorage implements IStorage<Buffer> {
469
470
 
470
471
  // Load off disk, in case there are other writes. We still race with them, but at least
471
472
  // this reduces the race condition considerably
472
-
473
- sortChunks(existingDiskEntries);
474
- for (let entry of existingDiskEntries) {
475
- await this.loadTransactionFile(entry);
476
- }
473
+ existingDiskEntries = await this.loadAllTransactions();
477
474
 
478
475
  this.entryCount = this.cache.size;
479
476