querysub 0.41.0 → 0.43.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json
CHANGED
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
import { SocketFunctionClientHook } from "socket-function/SocketFunctionTypes";
|
|
2
|
+
import { cache, lazy } from "socket-function/src/caching";
|
|
3
|
+
import { decodeCborx, encodeCborx } from "../../misc/cloneHelpers";
|
|
4
|
+
import { sha256 } from "js-sha256";
|
|
5
|
+
import { errorToUndefined } from "../../errors";
|
|
6
|
+
import { isNode } from "typesafecss";
|
|
7
|
+
|
|
8
|
+
let getRootDirectory = lazy(async () => {
|
|
9
|
+
await navigator.storage.persist();
|
|
10
|
+
return navigator.storage.getDirectory();
|
|
11
|
+
});
|
|
12
|
+
|
|
13
|
+
export const getBrowserLargeFileCache = cache((name: string) => new BrowserLargeFileCache(name));
|
|
14
|
+
|
|
15
|
+
export class BrowserLargeFileCache {
|
|
16
|
+
constructor(private name: string) { }
|
|
17
|
+
|
|
18
|
+
private getDir = lazy(async () => {
|
|
19
|
+
let root = await getRootDirectory();
|
|
20
|
+
return await root.getDirectoryHandle(this.name, { create: true });
|
|
21
|
+
});
|
|
22
|
+
|
|
23
|
+
public async set(key: string, value: Buffer) {
|
|
24
|
+
let dir = await this.getDir();
|
|
25
|
+
let file = await dir.getFileHandle(key, { create: true });
|
|
26
|
+
let writable = await file.createWritable();
|
|
27
|
+
await writable.write(value);
|
|
28
|
+
await writable.close();
|
|
29
|
+
}
|
|
30
|
+
public async get(key: string): Promise<Buffer | undefined> {
|
|
31
|
+
let dir = await this.getDir();
|
|
32
|
+
try {
|
|
33
|
+
let file = await dir.getFileHandle(key, { create: false });
|
|
34
|
+
let readable = await file.getFile();
|
|
35
|
+
return Buffer.from(await readable.arrayBuffer());
|
|
36
|
+
} catch {
|
|
37
|
+
return undefined;
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
/** Cache key = [args, functionName, classGuid]
|
|
43
|
+
* - Not nodeId, as that change so frequently, so caching based on server is not usually useful.
|
|
44
|
+
* - If you want to cache based on nodeId, pass it as an unused arg.
|
|
45
|
+
*
|
|
46
|
+
* Uses a file per key, and never cleans them up. So... basically, this sucks, but, it should
|
|
47
|
+
* work for a few specific functions with large and consistent values.
|
|
48
|
+
*/
|
|
49
|
+
export const cacheCalls: SocketFunctionClientHook = async config => {
|
|
50
|
+
if (isNode()) return;
|
|
51
|
+
|
|
52
|
+
let { args, functionName, classGuid } = config.call;
|
|
53
|
+
let bucket = sha256(JSON.stringify({ functionName, classGuid }));
|
|
54
|
+
let cache = getBrowserLargeFileCache(bucket);
|
|
55
|
+
let key = sha256(encodeCborx(args));
|
|
56
|
+
let cachedValue = await cache.get(key);
|
|
57
|
+
if (cachedValue) {
|
|
58
|
+
config.overrideResult = decodeCborx(cachedValue);
|
|
59
|
+
return;
|
|
60
|
+
}
|
|
61
|
+
config.onResult.push(async (result) => {
|
|
62
|
+
await errorToUndefined(cache.set(key, encodeCborx(result)));
|
|
63
|
+
});
|
|
64
|
+
};
|
|
@@ -10,7 +10,7 @@ import { getSyncedController } from "../../library-components/SyncedController";
|
|
|
10
10
|
import { getBrowserUrlNode } from "../../-f-node-discovery/NodeDiscovery";
|
|
11
11
|
import { formatDateTime, formatNiceDateTime, formatNumber, formatTime } from "socket-function/src/formatting/format";
|
|
12
12
|
import { Anchor } from "../../library-components/ATag";
|
|
13
|
-
import { cache, cacheLimited, cacheShallowConfigArgEqual } from "socket-function/src/caching";
|
|
13
|
+
import { cache, cacheLimited, cacheShallowConfigArgEqual, lazy } from "socket-function/src/caching";
|
|
14
14
|
import { InputLabel, InputLabelURL } from "../../library-components/InputLabel";
|
|
15
15
|
import { DropdownSelector } from "../../library-components/DropdownSelector";
|
|
16
16
|
import { Table, TableType } from "../../5-diagnostics/Table";
|
|
@@ -30,6 +30,8 @@ import { getNodeIdLocation } from "socket-function/src/nodeCache";
|
|
|
30
30
|
import { decodeNodeId, encodeNodeId, getMachineId } from "../../-a-auth/certs";
|
|
31
31
|
import { Button } from "../../library-components/Button";
|
|
32
32
|
import { TimeRangeSelector } from "../../library-components/TimeRangeSelector";
|
|
33
|
+
import { BrowserLargeFileCache, cacheCalls } from "./BrowserLargeFileCache";
|
|
34
|
+
import { Zip } from "../../zip";
|
|
33
35
|
|
|
34
36
|
// TODO: Realtime log mode, by reading from the previous length forward, to add buffers
|
|
35
37
|
// to what we already read.
|
|
@@ -140,6 +142,7 @@ export class DiskLoggerPage extends qreact.Component {
|
|
|
140
142
|
if (file.endTime > lastTime) lastTime = file.endTime;
|
|
141
143
|
let buffer = controller.getRemoteLogBuffer(file.nodeId, file.path);
|
|
142
144
|
if (buffer) {
|
|
145
|
+
buffer = Zip.gunzipSync(buffer);
|
|
143
146
|
buffers.push(buffer);
|
|
144
147
|
} else {
|
|
145
148
|
loadingCount++;
|
|
@@ -541,7 +544,14 @@ class DiskLoggerControllerBase {
|
|
|
541
544
|
return await getLogFiles();
|
|
542
545
|
}
|
|
543
546
|
public async getLogBuffer(path: string): Promise<Buffer | undefined> {
|
|
544
|
-
|
|
547
|
+
// Always compress it. If it is in progress it can't be compressed on disk, but
|
|
548
|
+
// the compression ratio is so high it's worth it to compress it dynamically now,
|
|
549
|
+
// to speed up the network transfer.
|
|
550
|
+
let buffer = await getLogBuffer(path);
|
|
551
|
+
if (buffer && path.endsWith(".log")) {
|
|
552
|
+
buffer = await Zip.gzip(buffer);
|
|
553
|
+
}
|
|
554
|
+
return buffer;
|
|
545
555
|
}
|
|
546
556
|
}
|
|
547
557
|
|
|
@@ -550,7 +560,9 @@ export const DiskLoggerController = SocketFunction.register(
|
|
|
550
560
|
new DiskLoggerControllerBase(),
|
|
551
561
|
() => ({
|
|
552
562
|
getRemoteLogFiles: {},
|
|
553
|
-
getRemoteLogBuffer: {
|
|
563
|
+
getRemoteLogBuffer: {
|
|
564
|
+
clientHooks: [cacheCalls]
|
|
565
|
+
},
|
|
554
566
|
|
|
555
567
|
getLogFiles: {},
|
|
556
568
|
getLogBuffer: {},
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
|
|
2
|
-
import { batchFunction, runInfinitePoll } from "socket-function/src/batching";
|
|
2
|
+
import { batchFunction, runInfinitePoll, runInfinitePollCallAtStart } from "socket-function/src/batching";
|
|
3
3
|
import { nextId, timeInDay, timeInHour } from "socket-function/src/misc";
|
|
4
4
|
import { getStorageDir, getStorageFolder } from "../../fs";
|
|
5
5
|
import fs from "fs";
|
|
@@ -8,6 +8,7 @@ import { SizeLimiter } from "../SizeLimiter";
|
|
|
8
8
|
import { SocketFunction } from "socket-function/SocketFunction";
|
|
9
9
|
import { isNode } from "typesafecss";
|
|
10
10
|
import { logGitHashes } from "./logGitHashes";
|
|
11
|
+
import { Zip } from "../../zip";
|
|
11
12
|
|
|
12
13
|
if (isNode()) {
|
|
13
14
|
// Delayed setup, as we depend on diskLogger early, and we don't want to force high level
|
|
@@ -105,6 +106,7 @@ export type LogFile = {
|
|
|
105
106
|
export async function getLogFiles(): Promise<LogFile[]> {
|
|
106
107
|
let files = await fs.promises.readdir(folder);
|
|
107
108
|
let paths = files.map(file => folder + file);
|
|
109
|
+
paths = paths.filter(x => x.endsWith(".log") || x.endsWith(".zip"));
|
|
108
110
|
let objs = paths.map(decodeLogFileName);
|
|
109
111
|
for (let obj of objs) {
|
|
110
112
|
try {
|
|
@@ -214,7 +216,8 @@ function packageLogObj(args: unknown[]): LogObj {
|
|
|
214
216
|
|
|
215
217
|
function decodeLogFileName(path: string): LogFile {
|
|
216
218
|
let name = path.split("/").pop()!;
|
|
217
|
-
let
|
|
219
|
+
let withoutExt = name.split(".").slice(0, -1).join(".");
|
|
220
|
+
let [start, end] = withoutExt.split("-").map(Number);
|
|
218
221
|
return {
|
|
219
222
|
startTime: start,
|
|
220
223
|
endTime: end,
|
|
@@ -231,7 +234,16 @@ const logBase = batchFunction({ delay: 0 }, async function logBase(logObjList: L
|
|
|
231
234
|
|
|
232
235
|
let byLogPath = new Map<string, LogObj[]>();
|
|
233
236
|
for (let log of logs) {
|
|
234
|
-
|
|
237
|
+
function createLogFileName(logObj: LogObj): LogFile {
|
|
238
|
+
let start = Math.floor(logObj.time / LOG_FILE_DURATION) * LOG_FILE_DURATION;
|
|
239
|
+
let startTime = start;
|
|
240
|
+
let endTime = start + LOG_FILE_DURATION;
|
|
241
|
+
let name = startTime + "-" + endTime + ".log";
|
|
242
|
+
let path = folder + name;
|
|
243
|
+
let logFile: LogFile = { startTime, endTime, name, path, size: 0, };
|
|
244
|
+
return logFile;
|
|
245
|
+
}
|
|
246
|
+
let logFile = createLogFileName(log);
|
|
235
247
|
let list = byLogPath.get(logFile.path);
|
|
236
248
|
if (!list) {
|
|
237
249
|
list = [];
|
|
@@ -268,18 +280,9 @@ const logBase = batchFunction({ delay: 0 }, async function logBase(logObjList: L
|
|
|
268
280
|
}
|
|
269
281
|
});
|
|
270
282
|
|
|
271
|
-
function getLogFileName(logObj: LogObj): LogFile {
|
|
272
|
-
let start = Math.floor(logObj.time / LOG_FILE_DURATION) * LOG_FILE_DURATION;
|
|
273
|
-
let startTime = start;
|
|
274
|
-
let endTime = start + LOG_FILE_DURATION;
|
|
275
|
-
let name = startTime + "-" + endTime + ".log";
|
|
276
|
-
let path = folder + name;
|
|
277
|
-
let logFile: LogFile = { startTime, endTime, name, path, size: 0, };
|
|
278
|
-
return logFile;
|
|
279
|
-
}
|
|
280
283
|
|
|
281
284
|
if (isNode()) {
|
|
282
|
-
runInfinitePoll(timeInHour, async function
|
|
285
|
+
runInfinitePoll(timeInHour, async function compressLogs() {
|
|
283
286
|
// Maintain our size restrictions
|
|
284
287
|
let logFiles = await fs.promises.readdir(folder);
|
|
285
288
|
let objs: { time: number; bytes: number; path: string; }[] = [];
|
|
@@ -293,6 +296,23 @@ if (isNode()) {
|
|
|
293
296
|
await fs.promises.unlink(file.path);
|
|
294
297
|
}
|
|
295
298
|
});
|
|
299
|
+
// Wait a random time, so we hopefully don't synchronize with any other services on this machine
|
|
300
|
+
void runInfinitePollCallAtStart(timeInHour + (1 + Math.random()), async function compressOldLogs() {
|
|
301
|
+
let logFiles = await fs.promises.readdir(folder);
|
|
302
|
+
let compressTime = Date.now() - LOG_FILE_DURATION * 2;
|
|
303
|
+
for (let file of logFiles) {
|
|
304
|
+
if (!file.endsWith(".log")) continue;
|
|
305
|
+
let path = folder + file;
|
|
306
|
+
if (decodeLogFileName(path).endTime > compressTime) continue;
|
|
307
|
+
let basePath = path.split(".").slice(0, -1).join(".");
|
|
308
|
+
let buffer = await fs.promises.readFile(path);
|
|
309
|
+
buffer = await Zip.gzip(buffer);
|
|
310
|
+
let tempPath = basePath + Math.random() + ".temp";
|
|
311
|
+
await fs.promises.writeFile(tempPath, buffer);
|
|
312
|
+
await fs.promises.rename(tempPath, basePath + ".zip");
|
|
313
|
+
await fs.promises.unlink(path);
|
|
314
|
+
}
|
|
315
|
+
});
|
|
296
316
|
}
|
|
297
317
|
|
|
298
318
|
/*
|
package/src/zip.ts
CHANGED
|
@@ -61,6 +61,14 @@ export class Zip {
|
|
|
61
61
|
}
|
|
62
62
|
}
|
|
63
63
|
|
|
64
|
+
public static gunzipSync(buffer: Buffer): Buffer {
|
|
65
|
+
if (isNode()) {
|
|
66
|
+
return Buffer.from(zlib.gunzipSync(buffer));
|
|
67
|
+
} else {
|
|
68
|
+
return Buffer.from(pako.inflate(buffer));
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
|
|
64
72
|
@measureFnc
|
|
65
73
|
public static async gunzipBatch(buffers: Buffer[]): Promise<Buffer[]> {
|
|
66
74
|
let time = Date.now();
|