sliftutils 0.34.0 → 0.35.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/index.d.ts CHANGED
@@ -803,13 +803,13 @@ declare module "sliftutils/storage/TransactionStorage" {
803
803
  private writeDelay;
804
804
  cache: Map<string, TransactionEntry>;
805
805
  private currentChunk;
806
- private currentChunkSize;
807
806
  private entryCount;
808
807
  private static allStorage;
809
808
  constructor(rawStorage: IStorageRaw, debugName: string, writeDelay?: number);
810
809
  static compressAll(): Promise<void>;
811
810
  private init;
812
- private getChunk;
811
+ private getCurrentChunk;
812
+ private onAddToChunk;
813
813
  get(key: string): Promise<Buffer | undefined>;
814
814
  set(key: string, value: Buffer): Promise<void>;
815
815
  remove(key: string): Promise<void>;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "sliftutils",
3
- "version": "0.34.0",
3
+ "version": "0.35.0",
4
4
  "main": "index.js",
5
5
  "license": "MIT",
6
6
  "files": [
@@ -13,13 +13,13 @@ export declare class TransactionStorage implements IStorage<Buffer> {
13
13
  private writeDelay;
14
14
  cache: Map<string, TransactionEntry>;
15
15
  private currentChunk;
16
- private currentChunkSize;
17
16
  private entryCount;
18
17
  private static allStorage;
19
18
  constructor(rawStorage: IStorageRaw, debugName: string, writeDelay?: number);
20
19
  static compressAll(): Promise<void>;
21
20
  private init;
22
- private getChunk;
21
+ private getCurrentChunk;
22
+ private onAddToChunk;
23
23
  get(key: string): Promise<Buffer | undefined>;
24
24
  set(key: string, value: Buffer): Promise<void>;
25
25
  remove(key: string): Promise<void>;
@@ -1,4 +1,4 @@
1
- import { PromiseObj, sort, throttleFunction } from "socket-function/src/misc";
1
+ import { PromiseObj, compareArray, sort, throttleFunction, timeInMinute } from "socket-function/src/misc";
2
2
  import { IStorage, IStorageRaw } from "./IStorage";
3
3
  import { Zip } from "../misc/zip";
4
4
  import { runInSerial } from "socket-function/src/batching";
@@ -40,6 +40,7 @@ IMPORTANT! If there are multiple writers, we clobber writes from other writers w
40
40
 
41
41
 
42
42
  const FILE_CHUNK_SIZE = 1024 * 1024;
43
+ const FILE_MAX_LIFETIME = timeInMinute * 30;
43
44
 
44
45
  const FILE_ZIP_THRESHOLD = 16 * 1024 * 1024;
45
46
 
@@ -66,11 +67,26 @@ const fileLockSection = runInSerial(async (fnc: () => Promise<void>) => {
66
67
 
67
68
  const CHUNK_EXT = ".chunk";
68
69
  const ourId = Date.now() + Math.random();
70
+ let seqNum = 0;
71
+
72
+ function getNextChunkPath(): string {
73
+ return `${Date.now()}_${seqNum++}_${ourId}.chunk`;
74
+ }
75
+ function sortChunks(chunks: string[]): string[] {
76
+ function getChunkParts(chunk: string): unknown[] {
77
+ const parts = chunk.split("_");
78
+ return parts.map(part => +part);
79
+ }
80
+ return chunks.sort((a, b) => compareArray(getChunkParts(a), getChunkParts(b)));
81
+ }
69
82
 
70
83
  export class TransactionStorage implements IStorage<Buffer> {
71
84
  public cache: Map<string, TransactionEntry> = new Map();
72
- private currentChunk = 0;
73
- private currentChunkSize = 0;
85
+ private currentChunk: {
86
+ path: string;
87
+ size: number;
88
+ timeCreated: number;
89
+ } | undefined = undefined;
74
90
  private entryCount = 0;
75
91
 
76
92
  private static allStorage: TransactionStorage[] = [];
@@ -91,7 +107,26 @@ export class TransactionStorage implements IStorage<Buffer> {
91
107
 
92
108
  private init: Promise<void> | undefined = this.loadAllTransactions();
93
109
 
94
- private getChunk(chunk: number) { return `${chunk}_${ourId}${CHUNK_EXT}`; }
110
+ private getCurrentChunk(): string {
111
+ if (this.currentChunk && this.currentChunk.timeCreated < Date.now() - FILE_MAX_LIFETIME) {
112
+ this.currentChunk = undefined;
113
+ }
114
+ if (!this.currentChunk) {
115
+ this.currentChunk = {
116
+ path: getNextChunkPath(),
117
+ size: 0,
118
+ timeCreated: Date.now()
119
+ };
120
+ }
121
+ return this.currentChunk.path;
122
+ }
123
+ private onAddToChunk(size: number): void {
124
+ if (!this.currentChunk) return;
125
+ this.currentChunk.size += size;
126
+ if (this.currentChunk.size >= FILE_CHUNK_SIZE) {
127
+ this.currentChunk = undefined;
128
+ }
129
+ }
95
130
 
96
131
  public async get(key: string): Promise<Buffer | undefined> {
97
132
  await this.init;
@@ -173,18 +208,14 @@ export class TransactionStorage implements IStorage<Buffer> {
173
208
 
174
209
  let newChunks = this.chunkBuffers(buffers);
175
210
  for (let chunk of newChunks) {
176
- let file = this.getChunk(this.currentChunk);
211
+ let file = this.getCurrentChunk();
177
212
  if (!await this.rawStorage.get(file)) {
178
213
  let { header, headerBuffer } = this.getHeader(false);
179
214
  await this.rawStorage.set(file, headerBuffer);
180
215
  }
181
216
  let content = chunk.buffer;
182
217
  await this.rawStorage.append(file, content);
183
- this.currentChunkSize += content.length;
184
- if (this.currentChunkSize >= FILE_CHUNK_SIZE) {
185
- this.currentChunk++;
186
- this.currentChunkSize = 0;
187
- }
218
+ this.onAddToChunk(content.length);
188
219
  }
189
220
 
190
221
  await this.compressTransactionLog();
@@ -219,21 +250,16 @@ export class TransactionStorage implements IStorage<Buffer> {
219
250
  const keys = await this.rawStorage.getKeys();
220
251
  const transactionFiles = keys.filter(key => key.endsWith(CHUNK_EXT));
221
252
 
222
- sort(transactionFiles, x => parseInt(x));
253
+ sortChunks(transactionFiles);
223
254
 
224
255
  let size = 0;
225
256
  for (const file of transactionFiles) {
226
- let chunk = parseInt(file);
227
257
  let curSize = await this.loadTransactionFile(file);
228
- if (chunk >= this.currentChunk) {
229
- this.currentChunk = chunk;
230
- this.currentChunkSize = curSize;
231
- }
232
258
  size += curSize;
233
259
  }
234
260
  time = Date.now() - time;
235
261
  if (time > 50) {
236
- console.log(`Loaded ${this.debugName} in ${formatTime(time)}, ${formatNumber(this.cache.size)} keys, ${formatNumber(size)}B`);
262
+ console.log(`Loaded ${this.debugName} in ${formatTime(time)}, ${formatNumber(this.cache.size)} keys, from ${formatNumber(transactionFiles.length)} files, ${formatNumber(size)}B`, transactionFiles);
237
263
  }
238
264
 
239
265
  this.init = undefined;
@@ -424,14 +450,13 @@ export class TransactionStorage implements IStorage<Buffer> {
424
450
  // Load off disk, in case there are other writes. We still race with them, but at least
425
451
  // this reduces the race condition considerably
426
452
 
427
- sort(existingDiskEntries, x => parseInt(x));
453
+ sortChunks(existingDiskEntries);
428
454
  for (let entry of existingDiskEntries) {
429
455
  await this.loadTransactionFile(entry);
430
456
  }
431
457
 
432
458
  this.entryCount = this.cache.size;
433
459
 
434
- let nextStart = Math.max(...existingDiskEntries.map(x => parseInt(x))) + 1;
435
460
 
436
461
  let buffers: Buffer[] = [];
437
462
  for (const entry of this.cache.values()) {
@@ -441,9 +466,9 @@ export class TransactionStorage implements IStorage<Buffer> {
441
466
 
442
467
  let newChunks = this.chunkBuffers(buffers);
443
468
 
444
- let curChunk = nextStart;
445
469
  for (let chunk of newChunks) {
446
- let file = this.getChunk(curChunk++);
470
+ let file = this.getCurrentChunk();
471
+ this.currentChunk = undefined;
447
472
  let content = chunk.buffer;
448
473
  let { header, headerBuffer } = this.getHeader(
449
474
  // AND, never compress the last one, otherwise we can't append to it!
@@ -462,8 +487,6 @@ export class TransactionStorage implements IStorage<Buffer> {
462
487
  for (const file of existingDiskEntries) {
463
488
  await this.rawStorage.remove(file);
464
489
  }
465
-
466
- this.currentChunk = curChunk;
467
490
  }
468
491
 
469
492
  public async reset() {
@@ -477,8 +500,7 @@ export class TransactionStorage implements IStorage<Buffer> {
477
500
 
478
501
  this.pendingAppends = [];
479
502
  this.cache.clear();
480
- this.currentChunk = 0;
481
- this.currentChunkSize = 0;
503
+ this.currentChunk = undefined;
482
504
  this.entryCount = 0;
483
505
  });
484
506
  }