sliftutils 0.77.0 → 0.79.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.d.ts +6 -1
- package/misc/zip.d.ts +6 -1
- package/misc/zip.ts +73 -4
- package/package.json +1 -1
- package/storage/TransactionStorage.ts +19 -17
- package/web/Page.tsx +0 -1
package/index.d.ts
CHANGED
|
@@ -145,9 +145,14 @@ declare module "sliftutils/misc/yamlBase" {
|
|
|
145
145
|
declare module "sliftutils/misc/zip" {
|
|
146
146
|
/// <reference types="node" />
|
|
147
147
|
/// <reference types="node" />
|
|
148
|
+
import { MaybePromise } from "socket-function/src/types";
|
|
148
149
|
export declare class Zip {
|
|
149
150
|
static gzip(buffer: Buffer, level?: number): Promise<Buffer>;
|
|
150
|
-
static gunzip(buffer: Buffer):
|
|
151
|
+
static gunzip(buffer: Buffer): MaybePromise<Buffer>;
|
|
152
|
+
static gunzipAsyncBase(buffer: Buffer): Promise<Buffer>;
|
|
153
|
+
static gunzipUntracked(buffer: Buffer): Promise<Buffer>;
|
|
154
|
+
static gunzipSync(buffer: Buffer): Buffer;
|
|
155
|
+
private static gunzipUntrackedSync;
|
|
151
156
|
static gunzipBatch(buffers: Buffer[]): Promise<Buffer[]>;
|
|
152
157
|
}
|
|
153
158
|
|
package/misc/zip.d.ts
CHANGED
|
@@ -1,7 +1,12 @@
|
|
|
1
1
|
/// <reference types="node" />
|
|
2
2
|
/// <reference types="node" />
|
|
3
|
+
import { MaybePromise } from "socket-function/src/types";
|
|
3
4
|
export declare class Zip {
|
|
4
5
|
static gzip(buffer: Buffer, level?: number): Promise<Buffer>;
|
|
5
|
-
static gunzip(buffer: Buffer):
|
|
6
|
+
static gunzip(buffer: Buffer): MaybePromise<Buffer>;
|
|
7
|
+
static gunzipAsyncBase(buffer: Buffer): Promise<Buffer>;
|
|
8
|
+
static gunzipUntracked(buffer: Buffer): Promise<Buffer>;
|
|
9
|
+
static gunzipSync(buffer: Buffer): Buffer;
|
|
10
|
+
private static gunzipUntrackedSync;
|
|
6
11
|
static gunzipBatch(buffers: Buffer[]): Promise<Buffer[]>;
|
|
7
12
|
}
|
package/misc/zip.ts
CHANGED
|
@@ -1,18 +1,86 @@
|
|
|
1
|
+
import { isNode } from "socket-function/src/misc";
|
|
2
|
+
import { measureFnc } from "socket-function/src/profiling/measure";
|
|
3
|
+
import zlib from "zlib";
|
|
4
|
+
import * as pako from "pako";
|
|
1
5
|
|
|
6
|
+
import { setFlag } from "socket-function/require/compileFlags";
|
|
7
|
+
import { MaybePromise } from "socket-function/src/types";
|
|
8
|
+
setFlag(require, "pako", "allowclient", true);
|
|
9
|
+
|
|
10
|
+
const SYNC_THRESHOLD_BYTES = 100_000_000;
|
|
2
11
|
|
|
3
12
|
export class Zip {
|
|
13
|
+
@measureFnc
|
|
4
14
|
public static async gzip(buffer: Buffer, level?: number): Promise<Buffer> {
|
|
5
|
-
|
|
15
|
+
if (isNode()) {
|
|
16
|
+
return new Promise((resolve, reject) => {
|
|
17
|
+
zlib.gzip(buffer, { level }, (err: any, result: Buffer) => {
|
|
18
|
+
if (err) reject(err);
|
|
19
|
+
else resolve(result);
|
|
20
|
+
});
|
|
21
|
+
});
|
|
22
|
+
} else {
|
|
23
|
+
// @ts-ignore
|
|
24
|
+
return await doStream(new CompressionStream("gzip"), buffer);
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
public static gunzip(buffer: Buffer): MaybePromise<Buffer> {
|
|
28
|
+
// Switch to the synchronous version if the buffer is small. This is a lot faster in Node.js and clientside.
|
|
29
|
+
// - On tests of random small amounts of data, this seems to be up to 7X faster (on node). However, on non-random data, on the actual data we're using, it seems to be almost 50 times faster. So... definitely worth it...
|
|
30
|
+
if (buffer.length < SYNC_THRESHOLD_BYTES) {
|
|
31
|
+
let time = Date.now();
|
|
32
|
+
let result = Zip.gunzipSync(buffer);
|
|
33
|
+
let duration = Date.now() - time;
|
|
34
|
+
if (duration > 50) {
|
|
35
|
+
// Wait, so we don't lock up the main thread. And if we already wait it 50ms, then waiting for one frame is marginal, even client-side.
|
|
36
|
+
return ((async () => {
|
|
37
|
+
await new Promise(resolve => setTimeout(resolve, 0));
|
|
38
|
+
return result;
|
|
39
|
+
}))();
|
|
40
|
+
}
|
|
41
|
+
return result;
|
|
42
|
+
}
|
|
43
|
+
return Zip.gunzipAsyncBase(buffer);
|
|
44
|
+
}
|
|
45
|
+
@measureFnc
|
|
46
|
+
public static async gunzipAsyncBase(buffer: Buffer): Promise<Buffer> {
|
|
47
|
+
return Zip.gunzipUntracked(buffer);
|
|
6
48
|
}
|
|
7
|
-
|
|
49
|
+
// A base function, so we can avoid instrumentation for batch calls
|
|
50
|
+
public static async gunzipUntracked(buffer: Buffer): Promise<Buffer> {
|
|
51
|
+
if (isNode()) {
|
|
52
|
+
return await new Promise<Buffer>((resolve, reject) => {
|
|
53
|
+
zlib.gunzip(buffer, (err: any, result: Buffer) => {
|
|
54
|
+
if (err) reject(err);
|
|
55
|
+
else resolve(result);
|
|
56
|
+
});
|
|
57
|
+
});
|
|
58
|
+
}
|
|
8
59
|
return await doStream(new DecompressionStream("gzip"), buffer);
|
|
9
60
|
}
|
|
10
61
|
|
|
62
|
+
@measureFnc
|
|
63
|
+
public static gunzipSync(buffer: Buffer): Buffer {
|
|
64
|
+
return this.gunzipUntrackedSync(buffer);
|
|
65
|
+
}
|
|
66
|
+
private static gunzipUntrackedSync(buffer: Buffer): Buffer {
|
|
67
|
+
if (isNode()) {
|
|
68
|
+
return Buffer.from(zlib.gunzipSync(buffer));
|
|
69
|
+
}
|
|
70
|
+
return Buffer.from(pako.inflate(buffer));
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
@measureFnc
|
|
11
74
|
public static async gunzipBatch(buffers: Buffer[]): Promise<Buffer[]> {
|
|
12
75
|
let time = Date.now();
|
|
13
|
-
buffers = await Promise.all(buffers.map(
|
|
76
|
+
buffers = await Promise.all(buffers.map(x => {
|
|
77
|
+
if (x.length < SYNC_THRESHOLD_BYTES) {
|
|
78
|
+
return this.gunzipUntrackedSync(x);
|
|
79
|
+
}
|
|
80
|
+
return this.gunzipUntracked(x);
|
|
81
|
+
}));
|
|
14
82
|
time = Date.now() - time;
|
|
15
|
-
|
|
83
|
+
let totalSize = buffers.reduce((acc, buffer) => acc + buffer.length, 0);
|
|
16
84
|
//console.log(`Gunzip ${formatNumber(totalSize)}B at ${formatNumber(totalSize / time * 1000)}B/s`);
|
|
17
85
|
return buffers;
|
|
18
86
|
}
|
|
@@ -34,4 +102,5 @@ async function doStream(stream: GenericTransformStream, buffer: Buffer): Promise
|
|
|
34
102
|
}
|
|
35
103
|
outputBuffers.push(Buffer.from(value));
|
|
36
104
|
}
|
|
105
|
+
|
|
37
106
|
}
|
package/package.json
CHANGED
|
@@ -47,7 +47,6 @@ const FILE_MAX_LIFETIME = timeInMinute * 30;
|
|
|
47
47
|
|
|
48
48
|
const FILE_ZIP_THRESHOLD = 16 * 1024 * 1024;
|
|
49
49
|
|
|
50
|
-
const ZIP_THRESHOLD = 4096;
|
|
51
50
|
const START_BYTES = Buffer.from([236, 49, 112, 121, 27, 127, 227, 63]);
|
|
52
51
|
const END_BYTES = Buffer.from([220, 111, 243, 202, 200, 79, 213, 63]);
|
|
53
52
|
// Delay writes, so we batch better, and thrash the disk less
|
|
@@ -56,6 +55,7 @@ const WRITE_DELAY = 500;
|
|
|
56
55
|
interface TransactionEntry {
|
|
57
56
|
key: string;
|
|
58
57
|
value: Buffer | undefined;
|
|
58
|
+
// NOTE: We disabled per-entry zipping. Either use a raw collection so you don't have to load all of them into memory or shard your collection. Zipping is only a band-aid solution for memory issues. ALSO, it makes loading slower, which makes all other applications that don't need zipping worse.
|
|
59
59
|
isZipped: boolean;
|
|
60
60
|
time: number;
|
|
61
61
|
}
|
|
@@ -143,9 +143,8 @@ export class TransactionStorage implements IStorage<Buffer> {
|
|
|
143
143
|
public async get(key: string): Promise<Buffer | undefined> {
|
|
144
144
|
await this.init;
|
|
145
145
|
const value = this.cache.get(key);
|
|
146
|
-
if (value && value.isZipped
|
|
147
|
-
|
|
148
|
-
value.isZipped = false;
|
|
146
|
+
if (value && value.isZipped) {
|
|
147
|
+
throw new Error(`Transactions should be unzipped on file ready, so this state should be impossible. Key: ${key}`);
|
|
149
148
|
}
|
|
150
149
|
return value?.value;
|
|
151
150
|
}
|
|
@@ -157,11 +156,6 @@ export class TransactionStorage implements IStorage<Buffer> {
|
|
|
157
156
|
let entry: TransactionEntry = { key, value, isZipped: false, time: 0 };
|
|
158
157
|
this.cache.set(key, entry);
|
|
159
158
|
|
|
160
|
-
if (value.length >= ZIP_THRESHOLD) {
|
|
161
|
-
value = await Zip.gzip(value);
|
|
162
|
-
entry.value = value;
|
|
163
|
-
entry.isZipped = true;
|
|
164
|
-
}
|
|
165
159
|
await this.pushAppend(entry);
|
|
166
160
|
}
|
|
167
161
|
|
|
@@ -302,16 +296,17 @@ export class TransactionStorage implements IStorage<Buffer> {
|
|
|
302
296
|
this.diskFiles.set(file, info?.lastModified ?? Date.now());
|
|
303
297
|
}
|
|
304
298
|
|
|
299
|
+
let size = { value: 0 };
|
|
305
300
|
let entryList: TransactionEntry[][] = [];
|
|
306
301
|
for (let file of transactionFiles) {
|
|
307
|
-
entryList.push(await this.parseTransactionFile(file));
|
|
302
|
+
entryList.push(await this.parseTransactionFile(file, size));
|
|
308
303
|
}
|
|
309
304
|
let entries = entryList.flat();
|
|
310
|
-
this.applyTransactionEntries(entries);
|
|
305
|
+
this.applyTransactionEntries(entries, initialLoad);
|
|
311
306
|
|
|
312
307
|
time = Date.now() - time;
|
|
313
308
|
if (time > 50) {
|
|
314
|
-
console.log(`Loaded ${this.debugName} in ${formatTime(time)}, ${formatNumber(this.cache.size)} keys, from ${formatNumber(transactionFiles.length)} files, entries ${formatNumber(entries.length)}B`, transactionFiles);
|
|
309
|
+
console.log(`Loaded ${this.debugName} in ${formatTime(time)}, ${formatNumber(this.cache.size)} keys, from ${formatNumber(transactionFiles.length)} files, entries ${formatNumber(entries.length)}, total size ${formatNumber(size.value)}B`, transactionFiles);
|
|
315
310
|
}
|
|
316
311
|
|
|
317
312
|
this.init = undefined;
|
|
@@ -319,13 +314,14 @@ export class TransactionStorage implements IStorage<Buffer> {
|
|
|
319
314
|
}
|
|
320
315
|
|
|
321
316
|
// ONLY call this inside of loadAllTransactions
|
|
322
|
-
private async parseTransactionFile(filename: string): Promise<TransactionEntry[]> {
|
|
317
|
+
private async parseTransactionFile(filename: string, size: { value: number }): Promise<TransactionEntry[]> {
|
|
323
318
|
const fullFile = await this.rawStorage.get(filename);
|
|
324
319
|
if (!fullFile) return [];
|
|
325
320
|
if (fullFile.length < 4) {
|
|
326
321
|
//console.error(`Transaction in ${this.debugName} file ${filename} is too small, skipping`);
|
|
327
322
|
return [];
|
|
328
323
|
}
|
|
324
|
+
size.value += fullFile.length;
|
|
329
325
|
let headerSize = fullFile.readUInt32LE(0);
|
|
330
326
|
let headerBuffer = fullFile.slice(4, 4 + headerSize);
|
|
331
327
|
let header: TransactionHeader;
|
|
@@ -375,7 +371,7 @@ export class TransactionStorage implements IStorage<Buffer> {
|
|
|
375
371
|
}
|
|
376
372
|
return entries;
|
|
377
373
|
}
|
|
378
|
-
private applyTransactionEntries(entries: TransactionEntry[]): void {
|
|
374
|
+
private applyTransactionEntries(entries: TransactionEntry[], initialLoad?: boolean): void {
|
|
379
375
|
let pendingWriteTimes = new Map<string, number>();
|
|
380
376
|
for (const entry of this.pendingAppends) {
|
|
381
377
|
let prevTime = pendingWriteTimes.get(entry.key);
|
|
@@ -404,9 +400,9 @@ export class TransactionStorage implements IStorage<Buffer> {
|
|
|
404
400
|
anyChanged = true;
|
|
405
401
|
this.cache.delete(entry.key);
|
|
406
402
|
} else {
|
|
407
|
-
if (!anyChanged) {
|
|
403
|
+
if (!anyChanged && !initialLoad) {
|
|
408
404
|
let prev = this.cache.get(entry.key);
|
|
409
|
-
if (!prev
|
|
405
|
+
if (!prev) {
|
|
410
406
|
anyChanged = true;
|
|
411
407
|
} else {
|
|
412
408
|
if (!prev.value) {
|
|
@@ -429,7 +425,8 @@ export class TransactionStorage implements IStorage<Buffer> {
|
|
|
429
425
|
}
|
|
430
426
|
}
|
|
431
427
|
|
|
432
|
-
if (anyChanged) {
|
|
428
|
+
if (anyChanged && !initialLoad) {
|
|
429
|
+
console.warn(`Transaction storage ${this.debugName} changed on disk, triggering full download from disk`);
|
|
433
430
|
for (const callback of this.resyncCallbacks) {
|
|
434
431
|
try {
|
|
435
432
|
callback();
|
|
@@ -467,6 +464,11 @@ export class TransactionStorage implements IStorage<Buffer> {
|
|
|
467
464
|
let isZipped = (flags & 1) === 1;
|
|
468
465
|
let isDelete = (flags & 2) === 2;
|
|
469
466
|
|
|
467
|
+
if (isZipped) {
|
|
468
|
+
value = Zip.gunzipSync(value);
|
|
469
|
+
isZipped = false;
|
|
470
|
+
}
|
|
471
|
+
|
|
470
472
|
let entry: TransactionEntry = { key, value, isZipped, time };
|
|
471
473
|
if (isDelete) {
|
|
472
474
|
entry.value = undefined;
|