querysub 0.324.0 → 0.325.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +3 -2
- package/src/-a-auth/certs.ts +2 -1
- package/src/-d-trust/NetworkTrust2.ts +2 -4
- package/src/-e-certs/EdgeCertController.ts +13 -4
- package/src/0-path-value-core/archiveLocks/ArchiveLocks2.ts +9 -5
- package/src/4-querysub/Querysub.ts +3 -1
- package/src/diagnostics/logs/FastArchiveAppendable.ts +5 -3
- package/src/diagnostics/logs/FastArchiveViewer.tsx +43 -32
- package/src/diagnostics/logs/LogViewer2.tsx +35 -34
- package/src/diagnostics/logs/TimeRangeSelector.tsx +18 -2
- package/src/diagnostics/logs/errorNotifications/ErrorNotificationController.ts +171 -34
- package/src/diagnostics/logs/errorNotifications/ErrorSuppressionUI.tsx +13 -7
- package/src/diagnostics/logs/errorNotifications/ErrorWarning.tsx +16 -4
- package/src/diagnostics/logs/errorNotifications/errorWatchEntry.tsx +63 -0
- package/src/diagnostics/logs/lifeCycleAnalysis/spec.md +19 -28
- package/src/diagnostics/managementPages.tsx +2 -1
- package/src/functional/SocketChannel.ts +5 -1
- package/src/library-components/ATag.tsx +1 -0
- package/src/library-components/SyncedController.ts +6 -3
- package/src/library-components/SyncedControllerLoadingIndicator.tsx +3 -2
- package/src/library-components/URLParam.ts +35 -5
- package/src/library-components/niceStringify.ts +1 -1
- package/src/library-components/urlResetGroups.ts +14 -0
- package/src/misc/formatJSX.tsx +7 -1
- package/testEntry2.ts +16 -5
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "querysub",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.325.0",
|
|
4
4
|
"main": "index.js",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"note1": "note on node-forge fork, see https://github.com/digitalbazaar/forge/issues/744 for details",
|
|
@@ -37,7 +37,8 @@
|
|
|
37
37
|
"depend": "yarn --silent depcruise src --include-only \"^src\" --config --output-type dot | dot -T svg > dependency-graph.svg",
|
|
38
38
|
"test": "yarn typenode ./test.ts",
|
|
39
39
|
"test3": "yarn typenode ./src/test/test.tsx --local",
|
|
40
|
-
"test2": "yarn typenode ./src/4-dom/qreactTest.tsx --local"
|
|
40
|
+
"test2": "yarn typenode ./src/4-dom/qreactTest.tsx --local",
|
|
41
|
+
"error-watch": "yarn typenode ./src/diagnostics/logs/errorNotifications/errorWatchEntry.tsx"
|
|
41
42
|
},
|
|
42
43
|
"bin": {
|
|
43
44
|
"deploy": "./bin/deploy.js",
|
package/src/-a-auth/certs.ts
CHANGED
|
@@ -469,7 +469,8 @@ export function decodeNodeId(nodeId: string, allowMissingThreadId?: "allowMissin
|
|
|
469
469
|
return undefined;
|
|
470
470
|
}
|
|
471
471
|
let parts = locationObj.address.split(".");
|
|
472
|
-
if
|
|
472
|
+
// NOTE: We have to only allow localhost domains on our port as the underlying domain gets stripped when we're looking at the machine ID, So if we allowed localhost domains on other domains, then it would potentially allow the possibility for a server to trick us to connecting to them. And then once the connection is established, it could talk back and we would think it has a localhost machine ID, which is implicitly trusted, which would then give it access to everything.
|
|
473
|
+
if (nodeId.startsWith(`127-0-0-1.${getDomain()}`) && parts.length === 3 && nodeId.includes(":")) {
|
|
473
474
|
return {
|
|
474
475
|
threadId: "",
|
|
475
476
|
machineId: parts.at(-3) || "",
|
|
@@ -92,10 +92,8 @@ const isTrustedBase = runInSerial(measureWrap(async function isTrustedBase(machi
|
|
|
92
92
|
// Always trust ourself
|
|
93
93
|
trustedCache.add(getOwnMachineId());
|
|
94
94
|
|
|
95
|
-
// NOTE:
|
|
96
|
-
|
|
97
|
-
trustedCache.add("127-0-0-1");
|
|
98
|
-
}
|
|
95
|
+
// NOTE: This only happens to servers that we connect to. Also we only allow the machine ID to be this special ID in the case it's on our domain. And because we use HTTPS when connecting to domains, it means that it must be implicitly trusted if it has a certificate for our domain.
|
|
96
|
+
trustedCache.add("127-0-0-1");
|
|
99
97
|
|
|
100
98
|
if (!trustedCache.has(machineId)) {
|
|
101
99
|
untrustedCache.set(machineId, Date.now() + UNTRUST_CACHE_TIME);
|
|
@@ -129,12 +129,21 @@ export async function publishMachineARecords() {
|
|
|
129
129
|
let nodeObj = getNodeIdLocation(selfNodeId);
|
|
130
130
|
if (!nodeObj) throw new Error(`Invalid nodeId ${selfNodeId}`);
|
|
131
131
|
let machineAddress = nodeObj.address.split(".").slice(1).join(".");
|
|
132
|
-
await
|
|
133
|
-
await setRecord("A", "*." + machineAddress, ip);
|
|
132
|
+
let prevMachineIP = await getRecords("A", machineAddress);
|
|
134
133
|
let ipDomain = await getIPDomain();
|
|
135
|
-
|
|
134
|
+
let promises: Promise<void>[] = [];
|
|
135
|
+
promises.push(setRecord("A", ipDomain, ip));
|
|
136
136
|
|
|
137
|
-
|
|
137
|
+
|
|
138
|
+
if (ip === "127.0.0.1" && prevMachineIP.length > 0 && !prevMachineIP.includes("127.0.0.1")) {
|
|
139
|
+
console.log(yellow(`Not setting A record for ${machineAddress} to ${ip}, as we previously had a public IP. IF you want to switch back to 127.0.0.1, manually go in and delete the A records for ${machineAddress}. Port forwarding should allow this to work anyways, and the bootstrapper should be smart enough to try 127-0-0-1 style addresses to allow fast development (ex, if you want to download a large file from the local development server quickly).`));
|
|
140
|
+
} else {
|
|
141
|
+
promises.push(setRecord("A", machineAddress, ip));
|
|
142
|
+
promises.push(setRecord("A", "*." + machineAddress, ip));
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
promises.push(publishEdgeDomain());
|
|
146
|
+
await Promise.all(promises);
|
|
138
147
|
|
|
139
148
|
return {
|
|
140
149
|
ip,
|
|
@@ -699,11 +699,15 @@ class TransactionLocker {
|
|
|
699
699
|
// where backblaze returns no files? Which it might be doing, as multiple times this code has
|
|
700
700
|
// triggered (without this check), and deletes all of our files...
|
|
701
701
|
let unconfirmedOldFiles2 = unconfirmedOldFiles.filter(a => !doubleCheckLookup.has(a.file) && doubleCheckDataFiles.has(a.file));
|
|
702
|
-
|
|
703
|
-
|
|
704
|
-
|
|
705
|
-
|
|
706
|
-
|
|
702
|
+
if (unconfirmedOldFiles2.length > 0) {
|
|
703
|
+
console.warn(red(`Deleted ${unconfirmedOldFiles2.length} very old unconfirmed files`), { files: unconfirmedOldFiles2.map(x => x.file) });
|
|
704
|
+
logNodeStats(`archives|TΔ Delete Old Rejected File`, formatNumber, unconfirmedOldFiles2.length);
|
|
705
|
+
// At the point the file was very old when we started reading, not part of the active transaction.
|
|
706
|
+
for (let file of unconfirmedOldFiles2) {
|
|
707
|
+
await this.deleteDataFile(file.file, `old unconfirmed file`);
|
|
708
|
+
}
|
|
709
|
+
} else {
|
|
710
|
+
console.warn(`Almost deleted ${unconfirmedOldFiles.length} very old unconfirmed files. This is bad, did we miss their confirmations that first time? If we missed them twice in a row, we might literally delete the database, and need to enter recovery mode to fix it...`, { files: unconfirmedOldFiles });
|
|
707
711
|
}
|
|
708
712
|
}
|
|
709
713
|
}
|
|
@@ -52,6 +52,7 @@ import yargs, { check } from "yargs";
|
|
|
52
52
|
import { parseArgsFactory } from "../misc/rawParams";
|
|
53
53
|
|
|
54
54
|
import * as typesafecss from "typesafecss";
|
|
55
|
+
import "../library-components/urlResetGroups";
|
|
55
56
|
|
|
56
57
|
|
|
57
58
|
typesafecss.setMeasureBlock(measureBlock);
|
|
@@ -72,6 +73,7 @@ let yargObj = parseArgsFactory()
|
|
|
72
73
|
.option("verbosenetwork", { type: "boolean", desc: "Log all network activity" })
|
|
73
74
|
.option("verboseframework", { type: "boolean", desc: "Log internal SocketFunction framework" })
|
|
74
75
|
.option("nodelay", { type: "boolean", desc: "Don't delay committing functions, even ones that are marked to be delayed." })
|
|
76
|
+
.option("hot", { type: "boolean", desc: "force hot reloading to turn on even if public is true." })
|
|
75
77
|
.argv
|
|
76
78
|
;
|
|
77
79
|
setImmediate(() => {
|
|
@@ -724,7 +726,7 @@ export class Querysub {
|
|
|
724
726
|
}
|
|
725
727
|
|
|
726
728
|
// Hot reloading on public servers breaks things when we update (as the git pull triggers a hot reload), so... don't do that.
|
|
727
|
-
if (config.hotReload && !isPublic()) {
|
|
729
|
+
if (config.hotReload && !isPublic() || yargObj.hot) {
|
|
728
730
|
watchFilesAndTriggerHotReloading();
|
|
729
731
|
}
|
|
730
732
|
|
|
@@ -348,7 +348,7 @@ export class FastArchiveAppendable<Datum> {
|
|
|
348
348
|
endTime: number;
|
|
349
349
|
};
|
|
350
350
|
cacheBust: number;
|
|
351
|
-
|
|
351
|
+
getWantData?: (file: FileMetadata) => Promise<((posStart: number, posEnd: number, data: Buffer) => boolean) | undefined>;
|
|
352
352
|
onData: (datum: Datum[], file: FileMetadata) => void;
|
|
353
353
|
// Called after onData
|
|
354
354
|
onStats?: (stats: DatumStats, file: FileMetadata) => void;
|
|
@@ -365,7 +365,7 @@ export class FastArchiveAppendable<Datum> {
|
|
|
365
365
|
};
|
|
366
366
|
stopSynchronize: () => void;
|
|
367
367
|
} | "cancelled"> {
|
|
368
|
-
let {
|
|
368
|
+
let { onData, onStats } = config;
|
|
369
369
|
// Create unique client sync ID upfront
|
|
370
370
|
let syncId = nextId();
|
|
371
371
|
|
|
@@ -525,12 +525,14 @@ export class FastArchiveAppendable<Datum> {
|
|
|
525
525
|
|
|
526
526
|
let scanProgressCount = 0;
|
|
527
527
|
|
|
528
|
+
const wantData = await config.getWantData?.(file);
|
|
529
|
+
|
|
528
530
|
function onParsedData(posStart: number, posEnd: number, buffer: Buffer | "done"): MaybePromise<void> {
|
|
529
531
|
if (buffer !== "done") {
|
|
530
532
|
scanProgressCount++;
|
|
531
533
|
}
|
|
532
534
|
if (buffer !== "done") {
|
|
533
|
-
if (wantData && !wantData(posStart, posEnd, buffer
|
|
535
|
+
if (wantData && !wantData(posStart, posEnd, buffer)) {
|
|
534
536
|
notMatchedSize += (posEnd - posStart);
|
|
535
537
|
notMatchedCount++;
|
|
536
538
|
return;
|
|
@@ -31,7 +31,7 @@ const caseInsensitiveParam = new URLParam("caseInsensitive", false);
|
|
|
31
31
|
export class FastArchiveViewer<T> extends qreact.Component<{
|
|
32
32
|
fastArchives: FastArchiveAppendable<T>[];
|
|
33
33
|
onStart: () => void;
|
|
34
|
-
getWantData?: () => Promise<((posStart: number, posEnd: number, data: Buffer
|
|
34
|
+
getWantData?: (file: FileMetadata) => Promise<((posStart: number, posEnd: number, data: Buffer) => boolean) | undefined>;
|
|
35
35
|
onDatums: (source: FastArchiveAppendable<T>, datums: T[], metadata: FileMetadata) => void;
|
|
36
36
|
// Called after onData
|
|
37
37
|
onStats?: (source: FastArchiveAppendable<T>, stats: DatumStats, metadata: FileMetadata) => void;
|
|
@@ -103,7 +103,6 @@ export class FastArchiveViewer<T> extends qreact.Component<{
|
|
|
103
103
|
let fastArchives = Querysub.fastRead(() => this.props.fastArchives);
|
|
104
104
|
let onFinish = Querysub.fastRead(() => this.props.onFinish);
|
|
105
105
|
let getWantData = Querysub.fastRead(() => this.props.getWantData);
|
|
106
|
-
let wantData = await getWantData?.();
|
|
107
106
|
// Increment sequence number for this new sync attempt
|
|
108
107
|
this.currentSequenceNumber++;
|
|
109
108
|
this.latestSequenceNumber = this.currentSequenceNumber;
|
|
@@ -296,35 +295,37 @@ export class FastArchiveViewer<T> extends qreact.Component<{
|
|
|
296
295
|
const result = await fastArchive.synchronizeData({
|
|
297
296
|
range: timeRange,
|
|
298
297
|
cacheBust: Querysub.fastRead(() => cacheBustParam.value),
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
298
|
+
getWantData: async (file) => {
|
|
299
|
+
let wantData = await getWantData?.(file);
|
|
300
|
+
return (posStart: number, posEnd: number, data: Buffer) => {
|
|
301
|
+
let isLimited = false;
|
|
302
|
+
for (let i = posStart; i < posEnd && !isLimited; i++) {
|
|
303
|
+
if (data[i] === limitedBuffer[0]) {
|
|
304
|
+
for (let j = 1; j < limitedBuffer.length; j++) {
|
|
305
|
+
if (data[i + j] !== limitedBuffer[j]) {
|
|
306
|
+
break;
|
|
307
|
+
}
|
|
308
|
+
if (j === limitedBuffer.length - 1) {
|
|
309
|
+
isLimited = true;
|
|
310
|
+
break;
|
|
311
|
+
}
|
|
311
312
|
}
|
|
312
313
|
}
|
|
313
314
|
}
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
}
|
|
315
|
+
if (isLimited) {
|
|
316
|
+
this.limitedScanCount++;
|
|
317
|
+
}
|
|
318
318
|
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
matched
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
319
|
+
// scanMatch is faster than wantData (generally), so run it first
|
|
320
|
+
let matched = scanMatch(posStart, posEnd, data);
|
|
321
|
+
if (matched && wantData) {
|
|
322
|
+
matched = wantData(posStart, posEnd, data);
|
|
323
|
+
}
|
|
324
|
+
if (isLimited && matched) {
|
|
325
|
+
this.limitedMatchCount++;
|
|
326
|
+
}
|
|
327
|
+
return matched;
|
|
328
|
+
};
|
|
328
329
|
},
|
|
329
330
|
onData: (datums, file) => {
|
|
330
331
|
if (!isLatestSync()) return;
|
|
@@ -610,13 +611,23 @@ export class FastArchiveViewer<T> extends qreact.Component<{
|
|
|
610
611
|
</div>}
|
|
611
612
|
{!this.state.finished && <LoaderAurora />}
|
|
612
613
|
{this.limitedScanCount > 0 && (
|
|
613
|
-
<div className={infoDisplay(60).boldStyle}
|
|
614
|
-
{
|
|
614
|
+
<div className={infoDisplay(60).boldStyle.button}
|
|
615
|
+
onClick={() => {
|
|
616
|
+
filterParam.value = LOG_LIMIT_FLAG;
|
|
617
|
+
void this.handleDownload();
|
|
618
|
+
}}
|
|
619
|
+
>
|
|
620
|
+
Click here to see {formatNumber(this.limitedScanCount)} scanned logs were rate limited at a file level. We do not track how many were ignored, there might be infinite missing logs.
|
|
615
621
|
</div>
|
|
616
622
|
)}
|
|
617
|
-
{this.limitedMatchCount > 0 && (
|
|
618
|
-
<div className={infoDisplay(0).boldStyle}
|
|
619
|
-
{
|
|
623
|
+
{this.limitedMatchCount > 0 && filterParam.value && (
|
|
624
|
+
<div className={infoDisplay(0).boldStyle.button}
|
|
625
|
+
onClick={() => {
|
|
626
|
+
filterParam.value += " & " + LOG_LIMIT_FLAG;
|
|
627
|
+
void this.handleDownload();
|
|
628
|
+
}}
|
|
629
|
+
>
|
|
630
|
+
Click here to see {formatNumber(this.limitedMatchCount)} matched logs were rate limited at a file level. We do not track how many were ignored, there might be infinite missing logs.
|
|
620
631
|
</div>
|
|
621
632
|
)}
|
|
622
633
|
{this.state.pendingSyncInitializations > 0 && (
|
|
@@ -13,7 +13,7 @@ import { logErrors } from "../../errors";
|
|
|
13
13
|
import { batchFunction, runInSerial } from "socket-function/src/batching";
|
|
14
14
|
import { Querysub } from "../../4-querysub/QuerysubController";
|
|
15
15
|
import { sort, timeInDay, timeInHour } from "socket-function/src/misc";
|
|
16
|
-
import { FastArchiveViewer } from "./FastArchiveViewer";
|
|
16
|
+
import { FastArchiveViewer, filterParam } from "./FastArchiveViewer";
|
|
17
17
|
import { LogDatum, getLoggers, LOG_LIMIT_FLAG } from "./diskLogger";
|
|
18
18
|
import { ColumnType, Table, TableType } from "../../5-diagnostics/Table";
|
|
19
19
|
import { formatDateJSX } from "../../misc/formatJSX";
|
|
@@ -24,7 +24,7 @@ import { ObjectDisplay } from "./ObjectDisplay";
|
|
|
24
24
|
import { endTime } from "../misc-pages/archiveViewerShared";
|
|
25
25
|
import { ErrorSuppressionUI } from "./errorNotifications/ErrorSuppressionUI";
|
|
26
26
|
import { FileMetadata } from "./FastArchiveController";
|
|
27
|
-
import { SuppressionListController, getSuppressEntryChecker } from "./errorNotifications/ErrorNotificationController";
|
|
27
|
+
import { SuppressionListController, getSuppressEntryChecker, getSuppressionFull } from "./errorNotifications/ErrorNotificationController";
|
|
28
28
|
import { SocketFunction } from "socket-function/SocketFunction";
|
|
29
29
|
|
|
30
30
|
const RENDER_INTERVAL = 1000;
|
|
@@ -56,6 +56,7 @@ export class LogViewer2 extends qreact.Component {
|
|
|
56
56
|
});
|
|
57
57
|
|
|
58
58
|
private example: string | undefined = undefined;
|
|
59
|
+
private operationSequenceNum = 0;
|
|
59
60
|
private datumCount = 0;
|
|
60
61
|
private matchedSize = 0;
|
|
61
62
|
private notMatchedSize = 0;
|
|
@@ -100,6 +101,10 @@ export class LogViewer2 extends qreact.Component {
|
|
|
100
101
|
}
|
|
101
102
|
}
|
|
102
103
|
|
|
104
|
+
let suppressionController = SuppressionListController(SocketFunction.browserNodeId());
|
|
105
|
+
let suppressionList = suppressionController.getSuppressionList();
|
|
106
|
+
let hasErrorNotifyToggle = errorNotifyToggleURL.value;
|
|
107
|
+
|
|
103
108
|
const timeRange = getTimeRange();
|
|
104
109
|
return (
|
|
105
110
|
<div className={css.vbox(20).pad2(20).fillBoth}>
|
|
@@ -114,6 +119,7 @@ export class LogViewer2 extends qreact.Component {
|
|
|
114
119
|
let now = Date.now();
|
|
115
120
|
startTimeParam.value = now - timeInDay * 7;
|
|
116
121
|
endTimeParam.value = now + timeInHour * 2;
|
|
122
|
+
filterParam.value = "";
|
|
117
123
|
}
|
|
118
124
|
this.rerun();
|
|
119
125
|
}}
|
|
@@ -148,40 +154,34 @@ export class LogViewer2 extends qreact.Component {
|
|
|
148
154
|
this.datums = [];
|
|
149
155
|
this.suppressionCounts = new Map();
|
|
150
156
|
this.expiredSuppressionCounts = new Map();
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
let checkers = suppressionList.map(x => getSuppressEntryChecker(x));
|
|
159
|
-
this.suppressionCounts = new Map(suppressionList.map(x => [x.key, 0]));
|
|
160
|
-
this.expiredSuppressionCounts = new Map(suppressionList.map(x => [x.key, 0]));
|
|
161
|
-
let updateCounts = batchFunction({ delay: 1000 }, () => {
|
|
162
|
-
Querysub.commit(() => {
|
|
163
|
-
this.state.datumsSeqNum++;
|
|
164
|
-
});
|
|
165
|
-
});
|
|
166
|
-
return (posStart, posEnd, data, file) => {
|
|
167
|
-
for (let checker of checkers) {
|
|
168
|
-
if (checker.fnc(data, posStart, posEnd)) {
|
|
169
|
-
if (checker.entry.expiresAt < now) {
|
|
170
|
-
let count = this.expiredSuppressionCounts.get(checker.entry.key) || 0;
|
|
171
|
-
count++;
|
|
172
|
-
this.expiredSuppressionCounts.set(checker.entry.key, count);
|
|
173
|
-
void updateCounts(undefined);
|
|
174
|
-
} else {
|
|
175
|
-
let count = this.suppressionCounts.get(checker.entry.key) || 0;
|
|
176
|
-
count++;
|
|
177
|
-
this.suppressionCounts.set(checker.entry.key, count);
|
|
178
|
-
void updateCounts(undefined);
|
|
179
|
-
return false;
|
|
180
|
-
}
|
|
157
|
+
this.operationSequenceNum++;
|
|
158
|
+
void (async () => {
|
|
159
|
+
const currentSequenceNum = this.operationSequenceNum;
|
|
160
|
+
while (true) {
|
|
161
|
+
await new Promise(resolve => setTimeout(resolve, 1000));
|
|
162
|
+
if (this.operationSequenceNum !== currentSequenceNum) {
|
|
163
|
+
break;
|
|
181
164
|
}
|
|
165
|
+
Querysub.commit(() => {
|
|
166
|
+
this.state.datumsSeqNum++;
|
|
167
|
+
});
|
|
182
168
|
}
|
|
183
|
-
|
|
184
|
-
|
|
169
|
+
})();
|
|
170
|
+
}}
|
|
171
|
+
getWantData={async (file) => {
|
|
172
|
+
if (!hasErrorNotifyToggle) return undefined;
|
|
173
|
+
// By defaulting to the synchronous one, the list will be updated if there's any changes. However, we will also just get it asynchronously if the list hasn't been updated by the time we call get1data. And because we assign it back to the variable, it'll be cached.
|
|
174
|
+
suppressionList = suppressionList || await suppressionController.getSuppressionList.promise();
|
|
175
|
+
let suppressionFull = getSuppressionFull({
|
|
176
|
+
entries: suppressionList,
|
|
177
|
+
blockTimeRange: {
|
|
178
|
+
startTime: file.startTime,
|
|
179
|
+
endTime: file.endTime,
|
|
180
|
+
},
|
|
181
|
+
suppressionCounts: this.suppressionCounts,
|
|
182
|
+
expiredSuppressionCounts: this.expiredSuppressionCounts,
|
|
183
|
+
});
|
|
184
|
+
return suppressionFull;
|
|
185
185
|
}}
|
|
186
186
|
onDatums={(source, datums, file) => {
|
|
187
187
|
this.datumCount += datums.length;
|
|
@@ -212,6 +212,7 @@ export class LogViewer2 extends qreact.Component {
|
|
|
212
212
|
}
|
|
213
213
|
}}
|
|
214
214
|
onFinish={() => {
|
|
215
|
+
this.operationSequenceNum++;
|
|
215
216
|
Querysub.commit(() => {
|
|
216
217
|
sort(this.datums, x => -(x.time || 0));
|
|
217
218
|
this.state.datumsSeqNum++;
|
|
@@ -82,10 +82,26 @@ export class TimeRangeSelector extends qreact.Component {
|
|
|
82
82
|
>
|
|
83
83
|
Set to future data
|
|
84
84
|
</Button>
|
|
85
|
-
|
|
85
|
+
<Button
|
|
86
|
+
hue={110} onClick={() => {
|
|
87
|
+
startTimeParam.value = now - timeInDay;
|
|
88
|
+
endTimeParam.value = now + timeInHour * 2;
|
|
89
|
+
}}
|
|
90
|
+
>
|
|
91
|
+
Set to last day
|
|
92
|
+
</Button>
|
|
93
|
+
<Button
|
|
94
|
+
hue={110} onClick={() => {
|
|
95
|
+
startTimeParam.value = now - timeInDay * 7;
|
|
96
|
+
endTimeParam.value = now + timeInHour * 2;
|
|
97
|
+
}}
|
|
98
|
+
>
|
|
99
|
+
Set to last 7 days
|
|
100
|
+
</Button>
|
|
101
|
+
{!!(endTimeParam.value || startTimeParam.value) && <Button
|
|
86
102
|
hue={110} onClick={resetToLastDay}
|
|
87
103
|
>
|
|
88
|
-
Reset
|
|
104
|
+
Reset
|
|
89
105
|
</Button>}
|
|
90
106
|
{(!startTimeParam.value || !endTimeParam.value) && <Button
|
|
91
107
|
hue={110}
|
|
@@ -109,74 +109,201 @@ export const getSuppressEntryChecker = cacheLimited(
|
|
|
109
109
|
}
|
|
110
110
|
);
|
|
111
111
|
|
|
112
|
+
export const getSuppressionFull = measureWrap(function getSuppressionFull(config: {
|
|
113
|
+
entries: SuppressionEntry[];
|
|
114
|
+
blockTimeRange: {
|
|
115
|
+
startTime: number;
|
|
116
|
+
endTime: number;
|
|
117
|
+
};
|
|
118
|
+
suppressionCounts?: Map<string, number>;
|
|
119
|
+
expiredSuppressionCounts?: Map<string, number>;
|
|
120
|
+
// => wants data
|
|
121
|
+
}): ((posStart: number, posEnd: number, data: Buffer, obj?: { outdatedSuppressionKey?: string }) => boolean) {
|
|
122
|
+
let { entries, blockTimeRange } = config;
|
|
123
|
+
const { suppressionCounts, expiredSuppressionCounts } = config;
|
|
124
|
+
// Add some buffer, just in case entries get added a bit later, or early.
|
|
125
|
+
let startTime = blockTimeRange.startTime - timeInHour;
|
|
126
|
+
let endTime = blockTimeRange.endTime + timeInHour;
|
|
127
|
+
|
|
128
|
+
sort(entries, x => -x.lastUpdateTime);
|
|
129
|
+
|
|
130
|
+
let checkers = entries.map(x => getSuppressEntryChecker(x));
|
|
131
|
+
|
|
132
|
+
let definitelyNotExpired = checkers.filter(x => x.entry.expiresAt > endTime);
|
|
133
|
+
let definitelyExpired = checkers.filter(x => x.entry.expiresAt < startTime);
|
|
134
|
+
let maybeExpired = checkers.filter(x => x.entry.expiresAt >= startTime && x.entry.expiresAt <= endTime);
|
|
135
|
+
|
|
136
|
+
return (posStart, posEnd, data, obj) => {
|
|
137
|
+
let suppressed = false;
|
|
138
|
+
for (let checker of definitelyNotExpired) {
|
|
139
|
+
if (checker.fnc(data, posStart, posEnd)) {
|
|
140
|
+
if (!suppressionCounts && !expiredSuppressionCounts && !obj) {
|
|
141
|
+
return false;
|
|
142
|
+
}
|
|
143
|
+
suppressed = true;
|
|
144
|
+
if (!suppressionCounts) break;
|
|
145
|
+
|
|
146
|
+
let count = suppressionCounts.get(checker.entry.key) || 0;
|
|
147
|
+
count++;
|
|
148
|
+
suppressionCounts.set(checker.entry.key, count);
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
// Handle definitelyExpired - these are outdated suppressions
|
|
153
|
+
let mostRecentOutdatedSuppressionKey: string | undefined = undefined;
|
|
154
|
+
|
|
155
|
+
// Handle maybeExpired - need to parse timestamp to check if suppression was active
|
|
156
|
+
if (maybeExpired.length > 0 && (suppressionCounts || expiredSuppressionCounts || obj)) {
|
|
157
|
+
const getLogTime = () => {
|
|
158
|
+
try {
|
|
159
|
+
let logEntry = JSON.parse(data.slice(posStart, posEnd).toString()) as LogDatum;
|
|
160
|
+
return typeof logEntry.time === "number" ? logEntry.time : 0;
|
|
161
|
+
} catch {
|
|
162
|
+
return 0;
|
|
163
|
+
}
|
|
164
|
+
};
|
|
165
|
+
let logTime = getLogTime();
|
|
166
|
+
|
|
167
|
+
for (let checker of maybeExpired) {
|
|
168
|
+
if (checker.fnc(data, posStart, posEnd)) {
|
|
169
|
+
if (checker.entry.expiresAt >= logTime) {
|
|
170
|
+
suppressed = true;
|
|
171
|
+
if (suppressionCounts) {
|
|
172
|
+
let count = suppressionCounts.get(checker.entry.key) || 0;
|
|
173
|
+
count++;
|
|
174
|
+
suppressionCounts.set(checker.entry.key, count);
|
|
175
|
+
}
|
|
176
|
+
} else {
|
|
177
|
+
if (!mostRecentOutdatedSuppressionKey) {
|
|
178
|
+
mostRecentOutdatedSuppressionKey = checker.entry.key;
|
|
179
|
+
}
|
|
180
|
+
// Even if we don't want the expired suppression counts, we might want the normal suppression counts, so we have to keep going.
|
|
181
|
+
if (expiredSuppressionCounts) {
|
|
182
|
+
let count = expiredSuppressionCounts.get(checker.entry.key) || 0;
|
|
183
|
+
count++;
|
|
184
|
+
expiredSuppressionCounts.set(checker.entry.key, count);
|
|
185
|
+
}
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
if (expiredSuppressionCounts || obj) {
|
|
192
|
+
for (let checker of definitelyExpired) {
|
|
193
|
+
if (checker.fnc(data, posStart, posEnd)) {
|
|
194
|
+
// First match is the most recent (entries are sorted by lastUpdateTime desc)
|
|
195
|
+
if (!mostRecentOutdatedSuppressionKey) {
|
|
196
|
+
mostRecentOutdatedSuppressionKey = checker.entry.key;
|
|
197
|
+
}
|
|
198
|
+
if (!expiredSuppressionCounts) break;
|
|
199
|
+
let count = expiredSuppressionCounts.get(checker.entry.key) || 0;
|
|
200
|
+
count++;
|
|
201
|
+
expiredSuppressionCounts.set(checker.entry.key, count);
|
|
202
|
+
}
|
|
203
|
+
}
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
// Set the most recent outdated suppression key if we found any and weren't suppressed
|
|
207
|
+
if (obj && mostRecentOutdatedSuppressionKey && !suppressed) {
|
|
208
|
+
obj.outdatedSuppressionKey = mostRecentOutdatedSuppressionKey;
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
return !suppressed;
|
|
212
|
+
};
|
|
213
|
+
});
|
|
214
|
+
|
|
112
215
|
|
|
113
216
|
const suppressionListKey = "suppression-list.json";
|
|
114
217
|
const suppressionListArchive = archiveJSONT<SuppressionListBase>(() =>
|
|
115
218
|
getArchives("suppression-list"),
|
|
116
219
|
);
|
|
220
|
+
const suppressionUpdatedChannel = new SocketChannel<boolean>("suppression-updated");
|
|
117
221
|
|
|
118
222
|
class SuppressionList {
|
|
223
|
+
private init = lazy(async () => {
|
|
224
|
+
suppressionUpdatedChannel.watch(() => {
|
|
225
|
+
void this.updateEntriesNow();
|
|
226
|
+
});
|
|
227
|
+
});
|
|
228
|
+
private updateEntriesNow = async () => {
|
|
229
|
+
let entries = await suppressionListArchive.get(suppressionListKey);
|
|
230
|
+
if (!entries) {
|
|
231
|
+
entries = { entries: {} };
|
|
232
|
+
}
|
|
233
|
+
this.getEntries.set(Promise.resolve(entries));
|
|
234
|
+
};
|
|
119
235
|
private getEntries = lazy(async (): Promise<SuppressionListBase> => {
|
|
236
|
+
await this.init();
|
|
120
237
|
await runInfinitePollCallAtStart(SUPPRESSION_POLL_INTERVAL, async () => {
|
|
121
|
-
|
|
122
|
-
if (!entries) {
|
|
123
|
-
entries = { entries: {} };
|
|
124
|
-
}
|
|
125
|
-
await suppressionListArchive.set(suppressionListKey, entries);
|
|
126
|
-
this.getEntries.set(Promise.resolve(entries));
|
|
238
|
+
await this.updateEntriesNow();
|
|
127
239
|
});
|
|
128
240
|
// Infinite poll will have set this, so we don't infinitely loop
|
|
129
241
|
return await this.getEntries();
|
|
130
242
|
});
|
|
131
243
|
|
|
132
244
|
public async filterObjsToNonSuppressed(objs: LogDatum[]): Promise<LogDatum[]> {
|
|
133
|
-
// NOTE: Streamed data should be rare enough
|
|
245
|
+
// NOTE: Streamed data should be rare enough, that handling this inefficiently is okay.
|
|
246
|
+
if (objs.length === 0) return [];
|
|
247
|
+
let startTime = objs[0].time;
|
|
248
|
+
let endTime = objs[objs.length - 1].time;
|
|
134
249
|
let parts: Buffer[] = [];
|
|
135
250
|
for (let obj of objs) {
|
|
136
251
|
parts.push(Buffer.from(JSON.stringify(obj)));
|
|
137
252
|
parts.push(objectDelimitterBuffer);
|
|
253
|
+
if (obj.time < startTime) {
|
|
254
|
+
startTime = obj.time;
|
|
255
|
+
}
|
|
256
|
+
if (obj.time > endTime) {
|
|
257
|
+
endTime = obj.time;
|
|
258
|
+
}
|
|
138
259
|
}
|
|
139
260
|
let buffer = Buffer.concat(parts);
|
|
140
|
-
let scanner = await this.scanForRecentErrors(
|
|
261
|
+
let scanner = await this.scanForRecentErrors({
|
|
262
|
+
startTime,
|
|
263
|
+
endTime,
|
|
264
|
+
});
|
|
141
265
|
await scanner.onData(buffer);
|
|
142
266
|
return await scanner.finish();
|
|
143
267
|
}
|
|
144
|
-
public async scanForRecentErrors(
|
|
268
|
+
public async scanForRecentErrors(config: {
|
|
269
|
+
startTime: number;
|
|
270
|
+
endTime: number;
|
|
271
|
+
}): Promise<{
|
|
145
272
|
onData: (data: Buffer) => void;
|
|
146
273
|
finish: () => Promise<LogDatum[]>;
|
|
147
274
|
}> {
|
|
148
275
|
let entries = await this.getEntries();
|
|
149
|
-
let
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
276
|
+
let suppressionFull = getSuppressionFull({
|
|
277
|
+
entries: Object.values(entries.entries),
|
|
278
|
+
blockTimeRange: {
|
|
279
|
+
startTime: config.startTime,
|
|
280
|
+
endTime: config.endTime,
|
|
281
|
+
},
|
|
282
|
+
});
|
|
153
283
|
let datums: LogDatum[] = [];
|
|
284
|
+
// Create an object which we'll reuse that will be the output object for the suppression key.
|
|
285
|
+
// for the suppression key.
|
|
286
|
+
let obj: { outdatedSuppressionKey?: string } = {};
|
|
154
287
|
let callback = createLogScanner({
|
|
155
288
|
onParsedData: (posStart, posEnd, buffer) => {
|
|
156
289
|
if (buffer === "done") {
|
|
157
290
|
return;
|
|
158
291
|
}
|
|
159
|
-
let
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
continue;
|
|
165
|
-
}
|
|
166
|
-
return;
|
|
167
|
-
}
|
|
168
|
-
}
|
|
169
|
-
let obj: LogDatum;
|
|
292
|
+
let result = suppressionFull(posStart, posEnd, buffer, obj);
|
|
293
|
+
|
|
294
|
+
if (!result) return;
|
|
295
|
+
|
|
296
|
+
let datum: LogDatum;
|
|
170
297
|
try {
|
|
171
|
-
|
|
298
|
+
datum = JSON.parse(buffer.slice(posStart, posEnd).toString()) as LogDatum;
|
|
172
299
|
} catch (e: any) {
|
|
173
300
|
process.stderr.write(`Failed to parse log datum in around ${buffer.slice(posStart, posEnd).slice(0, 100).toString("hex")}, error is:\n${e.stack}`);
|
|
174
301
|
return;
|
|
175
302
|
}
|
|
176
|
-
if (outdatedSuppressionKey) {
|
|
177
|
-
|
|
303
|
+
if (obj.outdatedSuppressionKey) {
|
|
304
|
+
datum.__matchedOutdatedSuppressionKey = obj.outdatedSuppressionKey;
|
|
178
305
|
}
|
|
179
|
-
datums.push(
|
|
306
|
+
datums.push(datum);
|
|
180
307
|
},
|
|
181
308
|
});
|
|
182
309
|
let lastWaitTime = Date.now();
|
|
@@ -201,13 +328,15 @@ class SuppressionList {
|
|
|
201
328
|
let entries = await this.getEntries();
|
|
202
329
|
entry.lastUpdateTime = Date.now();
|
|
203
330
|
entries.entries[entry.key] = entry;
|
|
204
|
-
|
|
331
|
+
void suppressionListArchive.set(suppressionListKey, entries);
|
|
332
|
+
suppressionUpdatedChannel.broadcast(true);
|
|
205
333
|
await recentErrors.onSuppressionChanged();
|
|
206
334
|
}
|
|
207
335
|
public async removeSuppressionEntry(key: string) {
|
|
208
336
|
let entries = await this.getEntries();
|
|
209
337
|
delete entries.entries[key];
|
|
210
|
-
|
|
338
|
+
void suppressionListArchive.set(suppressionListKey, entries);
|
|
339
|
+
suppressionUpdatedChannel.broadcast(true);
|
|
211
340
|
await recentErrors.onSuppressionChanged();
|
|
212
341
|
}
|
|
213
342
|
|
|
@@ -378,7 +507,9 @@ const limitRecentErrors = measureWrap(function limitRecentErrors(objs: LogDatum[
|
|
|
378
507
|
class RecentErrors {
|
|
379
508
|
|
|
380
509
|
private initialize = lazy(async () => {
|
|
381
|
-
errorWatcherBase.watch(x =>
|
|
510
|
+
errorWatcherBase.watch(x => {
|
|
511
|
+
void this.addErrors(x);
|
|
512
|
+
});
|
|
382
513
|
await this.scanNow({});
|
|
383
514
|
runInfinitePoll(BACKBLAZE_POLL_INTERVAL, async () => {
|
|
384
515
|
await this.scanNow({ noLocalFiles: true });
|
|
@@ -415,6 +546,7 @@ class RecentErrors {
|
|
|
415
546
|
});
|
|
416
547
|
|
|
417
548
|
private async addErrors(objs: LogDatum[]) {
|
|
549
|
+
if (objs.length === 0) return;
|
|
418
550
|
for (let obj of objs) {
|
|
419
551
|
this._recentErrors.push(obj);
|
|
420
552
|
}
|
|
@@ -464,7 +596,10 @@ class RecentErrors {
|
|
|
464
596
|
let hash = getFileMetadataHash(file);
|
|
465
597
|
path = await urlCache.getURLLocalPath(file.url, hash);
|
|
466
598
|
if (!path) continue;
|
|
467
|
-
let scanner = await suppressionList.scanForRecentErrors(
|
|
599
|
+
let scanner = await suppressionList.scanForRecentErrors({
|
|
600
|
+
startTime: file.startTime,
|
|
601
|
+
endTime: file.endTime,
|
|
602
|
+
});
|
|
468
603
|
|
|
469
604
|
// Stream decompress the file while reading it
|
|
470
605
|
size = await fs.promises.stat(path).then(x => x.size);
|
|
@@ -554,7 +689,7 @@ export const RecentErrorsController = getSyncedController(SocketFunction.registe
|
|
|
554
689
|
},
|
|
555
690
|
});
|
|
556
691
|
|
|
557
|
-
const recentErrorsChannel = new SocketChannel<true>("recent-errors-eeceb0c8-4086-4ab3-b3ff-fa9fd5282e14");
|
|
692
|
+
export const recentErrorsChannel = new SocketChannel<true>("recent-errors-eeceb0c8-4086-4ab3-b3ff-fa9fd5282e14");
|
|
558
693
|
|
|
559
694
|
export const watchRecentErrors = lazy(function watchRecentErrors() {
|
|
560
695
|
recentErrorsChannel.watch(async () => {
|
|
@@ -571,7 +706,9 @@ export const notifyWatchersOfError = batchFunction({
|
|
|
571
706
|
},
|
|
572
707
|
async (objs: LogDatum[]) => {
|
|
573
708
|
objs = await suppressionList.filterObjsToNonSuppressed(objs);
|
|
574
|
-
|
|
709
|
+
if (objs.length > 0) {
|
|
710
|
+
errorWatcherBase.broadcast(objs);
|
|
711
|
+
}
|
|
575
712
|
}
|
|
576
713
|
);
|
|
577
714
|
|
|
@@ -60,7 +60,7 @@ export class ErrorSuppressionUI extends qreact.Component<{
|
|
|
60
60
|
const previewMatchCount = this.calculatePreviewMatchCount(this.state.matchedInput);
|
|
61
61
|
|
|
62
62
|
return <div className={css.vbox(16).pad2(16).fillWidth.bord2(0, 0, 50, 5).hsl(0, 0, 80)}>
|
|
63
|
-
<
|
|
63
|
+
<div className={css.fontSize(18)}>Error Suppression List ({formatNumber(entries.length)})</div>
|
|
64
64
|
|
|
65
65
|
<div className={css.hbox(8).fillWidth}>
|
|
66
66
|
<InputLabel
|
|
@@ -133,7 +133,7 @@ export class ErrorSuppressionUI extends qreact.Component<{
|
|
|
133
133
|
});
|
|
134
134
|
});
|
|
135
135
|
}}>
|
|
136
|
-
Not
|
|
136
|
+
Not a bug
|
|
137
137
|
</Button>
|
|
138
138
|
</div>
|
|
139
139
|
|
|
@@ -192,22 +192,28 @@ export class ErrorSuppressionUI extends qreact.Component<{
|
|
|
192
192
|
Ignore (for a week)
|
|
193
193
|
</Button>
|
|
194
194
|
<Button onClick={() => updateEntry({ expiresAt: NOT_AN_ERROR_EXPIRE_TIME })}>
|
|
195
|
-
Not
|
|
195
|
+
Not a bug
|
|
196
196
|
</Button>
|
|
197
197
|
<Button
|
|
198
198
|
onClick={() => updateEntry({ expiresAt: Date.now() - timeInDay * 7 })}
|
|
199
|
-
title="Unignore"
|
|
199
|
+
title="Unignore, so past errors show up again as errors"
|
|
200
200
|
>
|
|
201
201
|
Unignore
|
|
202
202
|
</Button>
|
|
203
|
+
<Button
|
|
204
|
+
onClick={() => updateEntry({ expiresAt: Date.now() })}
|
|
205
|
+
title="Set ignore time to now, so any future errors will receive notifications"
|
|
206
|
+
>
|
|
207
|
+
Ignore previous
|
|
208
|
+
</Button>
|
|
203
209
|
{entry.expiresAt === NOT_AN_ERROR_EXPIRE_TIME && <span>
|
|
204
|
-
Not
|
|
210
|
+
Not a bug
|
|
205
211
|
</span>}
|
|
206
212
|
{entry.expiresAt < Date.now() && <span>
|
|
207
|
-
Expired
|
|
213
|
+
Expired {formatDateJSX(entry.expiresAt)}
|
|
208
214
|
</span>}
|
|
209
215
|
{entry.expiresAt > Date.now() && entry.expiresAt !== NOT_AN_ERROR_EXPIRE_TIME && <span>
|
|
210
|
-
Expires
|
|
216
|
+
Expires {formatDateJSX(entry.expiresAt)}
|
|
211
217
|
</span>}
|
|
212
218
|
<span className={css.opacity(0.5)}>
|
|
213
219
|
(Last Updated: {formatDateJSX(entry.lastUpdateTime)})
|
|
@@ -10,9 +10,12 @@ import { ATag } from "../../../library-components/ATag";
|
|
|
10
10
|
import { managementPageURL, showingManagementURL } from "../../managementPages";
|
|
11
11
|
import { errorNotifyToggleURL } from "../LogViewer2";
|
|
12
12
|
import { Querysub } from "../../../4-querysub/QuerysubController";
|
|
13
|
-
import { nextId, timeInDay } from "socket-function/src/misc";
|
|
13
|
+
import { nextId, timeInDay, timeInHour } from "socket-function/src/misc";
|
|
14
14
|
import { formatNumber } from "socket-function/src/formatting/format";
|
|
15
15
|
import { Icon } from "../../../library-components/icons";
|
|
16
|
+
import { filterParam } from "../FastArchiveViewer";
|
|
17
|
+
import { endTimeParam, startTimeParam } from "../TimeRangeSelector";
|
|
18
|
+
import { formatDateJSX } from "../../../misc/formatJSX";
|
|
16
19
|
|
|
17
20
|
export class ErrorWarning extends qreact.Component {
|
|
18
21
|
state = t.state({
|
|
@@ -64,9 +67,18 @@ export class ErrorWarning extends qreact.Component {
|
|
|
64
67
|
</style>
|
|
65
68
|
</Button>;
|
|
66
69
|
|
|
70
|
+
const logLink = [
|
|
71
|
+
showingManagementURL.getOverride(true),
|
|
72
|
+
managementPageURL.getOverride("LogViewer2"),
|
|
73
|
+
errorNotifyToggleURL.getOverride(true),
|
|
74
|
+
filterParam.getOverride(""),
|
|
75
|
+
startTimeParam.getOverride(Date.now() - timeInDay * 7),
|
|
76
|
+
endTimeParam.getOverride(Date.now() + timeInHour * 2),
|
|
77
|
+
];
|
|
78
|
+
|
|
67
79
|
if (!recentErrors || recentErrors.length === 0) {
|
|
68
80
|
return <span className={css.hbox(8)}>
|
|
69
|
-
<ATag target="_blank" values={
|
|
81
|
+
<ATag target="_blank" values={logLink}>
|
|
70
82
|
No Errors
|
|
71
83
|
</ATag>
|
|
72
84
|
{refreshButton}
|
|
@@ -112,7 +124,7 @@ export class ErrorWarning extends qreact.Component {
|
|
|
112
124
|
⚠️ {fileCountText} files with errors
|
|
113
125
|
</div>
|
|
114
126
|
|
|
115
|
-
<ATag target="_blank" values={
|
|
127
|
+
<ATag target="_blank" values={logLink}>
|
|
116
128
|
View Logs
|
|
117
129
|
</ATag>
|
|
118
130
|
{refreshButton}
|
|
@@ -139,7 +151,7 @@ export class ErrorWarning extends qreact.Component {
|
|
|
139
151
|
</div>
|
|
140
152
|
}
|
|
141
153
|
<div className={css.hbox(8).hsl(0, 50, 50).pad2(4, 2).colorhsl(0, 50, 95)}>
|
|
142
|
-
{recentErrors[0].param0} ({recentErrors[0].__NAME__})
|
|
154
|
+
({formatDateJSX(recentErrors[0].time)}) {recentErrors[0].param0} ({recentErrors[0].__NAME__})
|
|
143
155
|
</div>
|
|
144
156
|
|
|
145
157
|
<div className={css.hbox(8).fillWidth}>
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
import { batchFunction, runInfinitePollCallAtStart } from "socket-function/src/batching";
|
|
2
|
+
import { getControllerNodeId } from "../../../-g-core-values/NodeCapabilities";
|
|
3
|
+
import { RecentErrorsController, recentErrorsChannel, watchRecentErrors } from "./ErrorNotificationController";
|
|
4
|
+
import { timeInSecond } from "socket-function/src/misc";
|
|
5
|
+
import { formatDateTime } from "socket-function/src/formatting/format";
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
//todonext
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
// 5) Add a channel to watch suppression entries, as once we suppress it, we don't want notifications anymore anywhere, And we don't want to have to wait
|
|
12
|
+
// - test by logging when we get errors and then having our script which automatically keeps adding errors to keep running so we can verify that we keep logging new errors and then ignore the errors and this suppression should propagate and cause us to no longer omit the errors.
|
|
13
|
+
// - This will also cause the errors to be suppressed across different HTTP servers.
|
|
14
|
+
|
|
15
|
+
// 5) Get IMs actually sending.
|
|
16
|
+
|
|
17
|
+
// 6) Set up all the code to properly rate limit IMs, batch them, link back to the log page, etc.
|
|
18
|
+
// - Just link to the error page for the last week. We don't need to link to anything specific.
|
|
19
|
+
// - properly getting the node ID that we're going to be watching, and if it goes down, getting a new one, and ignoring messages from the old node.
|
|
20
|
+
// - And if no node exists, we need to warn and then wait.
|
|
21
|
+
|
|
22
|
+
// 7) Write the digest script, which is very different, but will run in the same entry.
|
|
23
|
+
// - Separate warnings and errors and also bucket by time bucket
|
|
24
|
+
// 8) Write a page that shows the results of the digest in tabs, writing the digest probably just to backblaze
|
|
25
|
+
// - For now, just have two tabs, one for errors and one for warnings.
|
|
26
|
+
// - If we're going to do a full scan, we might as well show time series data as well. It's trivial.
|
|
27
|
+
// - Also track the number of suppressed errors as well. We won't have details on these such as a breakdown, but we can at least show the count (and the count by time)
|
|
28
|
+
// 9) send an email every time period, and also send an IM that has smaller information
|
|
29
|
+
// - Both will link to the actual web page that has the digest, deep linking to the specific tabs.
|
|
30
|
+
// - Show the chart in the email as well, but just format it like ASCII Because image dependencies are annoying and I don't want to implement them right now as it might take a few days to get working.
|
|
31
|
+
|
|
32
|
+
async function runIMNotifies() {
|
|
33
|
+
let controllerNodeId = await getControllerNodeId(RecentErrorsController.base);
|
|
34
|
+
if (!controllerNodeId) throw new Error("No controller node id found");
|
|
35
|
+
//todonext
|
|
36
|
+
// Temporary hardcode to use the local server
|
|
37
|
+
controllerNodeId = "127-0-0-1.querysubtest.com:7007";
|
|
38
|
+
|
|
39
|
+
let controller = RecentErrorsController.base.nodes[controllerNodeId];
|
|
40
|
+
recentErrorsChannel.watch(() => {
|
|
41
|
+
void updateRecentErrors(undefined);
|
|
42
|
+
});
|
|
43
|
+
const updateRecentErrors = batchFunction(
|
|
44
|
+
//todonext
|
|
45
|
+
// Increase this after we finish testing
|
|
46
|
+
{ delay: 1000 },
|
|
47
|
+
async function updateRecentErrors() {
|
|
48
|
+
let recentErrors = await controller.getRecentErrors();
|
|
49
|
+
console.log(`Received ${recentErrors.length} recent errors at ${formatDateTime(Date.now())}`);
|
|
50
|
+
for (let error of recentErrors) {
|
|
51
|
+
console.log(` ${error.param0}`);
|
|
52
|
+
}
|
|
53
|
+
console.log();
|
|
54
|
+
console.log();
|
|
55
|
+
}
|
|
56
|
+
);
|
|
57
|
+
await updateRecentErrors(undefined);
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
async function main() {
|
|
61
|
+
void runIMNotifies();
|
|
62
|
+
}
|
|
63
|
+
void main();
|
|
@@ -4,34 +4,6 @@ Very small amount of data
|
|
|
4
4
|
https://127-0-0-1.querysubtest.com:7007/?hot&enableLogs&page=login&filter=%22431%22&showingmanagement&endTime=1755140880000&startTime=1754950020000&managementpage=LogViewer2
|
|
5
5
|
|
|
6
6
|
|
|
7
|
-
3) Our time range is not being correctly applied. We have data from 4-5, and are filtering from 4:25 to 5:32, and not matching that data, even though it overlaps a lot.
|
|
8
|
-
4) API watchers are repeatedly dying. I feel like this is fixable, especially with our logs...
|
|
9
|
-
|
|
10
|
-
5) Update all services, and move them to that machine
|
|
11
|
-
5) Verify the hezner server can run the site well
|
|
12
|
-
6) Take down our digital ocean server
|
|
13
|
-
7) Destroy our digital ocean server
|
|
14
|
-
|
|
15
|
-
9) Add a filter to JUST see rate limited logs (by clicking on the button. which just searches for the special text)
|
|
16
|
-
- say "click here to view rate limited logs"
|
|
17
|
-
|
|
18
|
-
10) "Started Listening" isn't being logged?
|
|
19
|
-
- https://127-0-0-1.querysubtest.com:7007/?enableLogs&page=login&showingmanagement&endTime=1757835685102.667&managementpage=LogViewer2&machineview=service-detail&startTime=1757745685102.667&serviceId=service-1756340309836&filter=__machineId%20%3D%20a794fbcf7b104c68%20%26%20Edge
|
|
20
|
-
- Or... maybe logs are lost SOMETIMES, and ALWAYS when we kill the server? Although... that would mean we have multiple issues. Ugh...
|
|
21
|
-
|
|
22
|
-
11) API ranges overlapped? What? I think our code to pick an empty range is wrong?
|
|
23
|
-
|
|
24
|
-
6) Update URLParam to allow linking it to other parameters, resetting when they change.
|
|
25
|
-
- With a function, and have standard one beside URLParam (that uses page and tab)
|
|
26
|
-
- ALSO managementPageURL
|
|
27
|
-
- Reset filter in FastArchiveViewer
|
|
28
|
-
- First observe the overlap with it and the BookOverview
|
|
29
|
-
- If we are actually on the book overview page and we close the management page then that shouldn't reset it. We just want to reset it when you change pages. Because we want you to be able to hide and show the management page quickly if you want to double check something. Generally speaking though, you won't be on a page with filter and then going back and forth. And if you are, whatever. That's just the management page. We just want to avoid the overall confusion and annoyance of having lots of pre-filled values (And mostly the confusion of having filter prefilled all the time because it's always going to be set because everyone uses it and no one resets it at the moment.
|
|
30
|
-
- DON'T reset tab. It's useful to remember the tab? Hmm... sometimes at least...
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
7
|
5) IM error notifications - allow immediately knowing about production issues, for better testing
|
|
36
8
|
- Create a dedicated entry point which acts like a client of the HTTP server, using RecentErrorControllers.getRecentErrors
|
|
37
9
|
- Getting it working in a script will be interesting, but... in theory it should just work?
|
|
@@ -62,6 +34,25 @@ Very small amount of data
|
|
|
62
34
|
|
|
63
35
|
|
|
64
36
|
5) Life cycle analyzer
|
|
37
|
+
- Implement regular range lifecycles first (matching an === object field)
|
|
38
|
+
- THEN, Add lifecycle collections where we can define life cycles that have a certain type.
|
|
39
|
+
- A collection that has everything by default and we remove life cycles from it would also be useful.
|
|
40
|
+
- AFTER that, MAYBE, support global values, setting them, and referencing them in the life cycle (which makes them shown as a whole, AND on each line in the life cycle, so we can see the progression).
|
|
41
|
+
- We have to allow selecting the value we want as well based on a lifecycle state that it can set with its logs. And then we're basically building expression. The whole thing becomes very complicated. So maybe we won't even do it.
|
|
42
|
+
- We should definitely wait until we create life cycles and find use out of them before we start adding global values.
|
|
43
|
+
- It seems like life cycles have two types.
|
|
44
|
+
1) Set global values for user in other logs
|
|
45
|
+
- Allows for providing specific context, such as what are all the nodes that exist at this time, What are all the paths, authorities that exist, etc, So we can tell what the results should be by essentially knowing what the other values are on other servers inst... antly in a way that the current node couldn't possibly know at that time (And because it can't know it, it couldn't log it. So it's the only way to get this information).
|
|
46
|
+
2) Range based, a bunch of values connected via an === object field, making one range
|
|
47
|
+
- allows reducing the complexity of an analysis, by taking lots of different logs and reducing it to just one..
|
|
48
|
+
- IMPORTANT! Show the time and the count of logs in the life cycle.
|
|
49
|
+
- also allows us to look at them as a whole, as in how long did it take. Which is minor because we could just measure it, However, sometimes it's annoying to pass the start time information around And if we don't pass the start time around, then we need to look at the start time from the first log and the end time for the last log, which again, would require looking at it as a lifecycle analysis.
|
|
50
|
+
- allows adding information based on the existence or the non-existence of a log in that group, such as A setup process missing the last step.
|
|
51
|
+
- IMPORTANT! we need to support this by allowing defining life cycles as requiring certain field value combinations which we will end up making basically just the field will always be type and the value will be the type in that life cycle and we'll hard code all the steps that are required and then we can warn when there's a missing step
|
|
52
|
+
- I guess also show the count of every single step because that's useful to see if some things were repeated.
|
|
53
|
+
- Allows drilling into a lifecycle to see just the specific information of the lifecycle cross server
|
|
54
|
+
- Otherwise, it's somewhat annoying to look at things cross-server, and while we can search for a specific key that we log, which is exactly what life cycles will do, it's easier to just click to drilldown, rather than having to make the query dynamically every time we want it.
|
|
55
|
+
2.1) Collections of life cycles so we can further reduce the complexity.
|
|
65
56
|
- Uses FastArchiveViewer, but instead of showing a table, shows lifecycles (a derived concept)
|
|
66
57
|
- We save them in backblaze, with a bit of cache for loading them
|
|
67
58
|
- List of life cycles
|
|
@@ -230,7 +230,8 @@ export async function isManagementUser() {
|
|
|
230
230
|
debugName: "isManagementUser",
|
|
231
231
|
call: testCall,
|
|
232
232
|
});
|
|
233
|
-
|
|
233
|
+
let isTrustedResult = !!writes.result;
|
|
234
|
+
return isTrustedResult;
|
|
234
235
|
}
|
|
235
236
|
|
|
236
237
|
export async function assertIsManagementUser() {
|
|
@@ -4,6 +4,8 @@ import { getAllNodeIds, watchNodeIds } from "../-f-node-discovery/NodeDiscovery"
|
|
|
4
4
|
import { requiresNetworkTrustHook } from "../-d-trust/NetworkTrust2";
|
|
5
5
|
import { errorToUndefinedSilent } from "../errors";
|
|
6
6
|
import { assertIsManagementUser } from "../diagnostics/managementPages";
|
|
7
|
+
import { IdentityController_getCurrentReconnectNodeId, IdentityController_getMachineId } from "../-c-identity/IdentityController";
|
|
8
|
+
import { blue } from "socket-function/src/formatting/logColors";
|
|
7
9
|
|
|
8
10
|
export class SocketChannel<T> {
|
|
9
11
|
constructor(private globalUniqueChannelName: string) { }
|
|
@@ -34,7 +36,9 @@ export class SocketChannel<T> {
|
|
|
34
36
|
private remoteWatchers = new Set<string>();
|
|
35
37
|
public async _internal_watchMessages() {
|
|
36
38
|
let caller = SocketFunction.getCaller();
|
|
39
|
+
let callerNodeId = IdentityController_getCurrentReconnectNodeId();
|
|
37
40
|
this.remoteWatchers.add(caller.nodeId);
|
|
41
|
+
console.info(blue(`New watcher for channel ${this.globalUniqueChannelName} from ${caller.nodeId} (${callerNodeId})`));
|
|
38
42
|
SocketFunction.onNextDisconnect(caller.nodeId, () => {
|
|
39
43
|
this.remoteWatchers.delete(caller.nodeId);
|
|
40
44
|
});
|
|
@@ -44,7 +48,7 @@ export class SocketChannel<T> {
|
|
|
44
48
|
void Array.from(this.remoteWatchers).map(async (nodeId) => {
|
|
45
49
|
try {
|
|
46
50
|
await this.controller.nodes[nodeId]._internal_onMessage(message);
|
|
47
|
-
} catch {
|
|
51
|
+
} catch (e) {
|
|
48
52
|
this.remoteWatchers.delete(nodeId);
|
|
49
53
|
}
|
|
50
54
|
});
|
|
@@ -2,6 +2,7 @@ import preact from "preact";
|
|
|
2
2
|
import { css, isNode } from "typesafecss";
|
|
3
3
|
import { URLParam, parseSearchString, encodeSearchString } from "./URLParam";
|
|
4
4
|
import { qreact } from "../4-dom/qreact";
|
|
5
|
+
import { niceStringify } from "../niceStringify";
|
|
5
6
|
|
|
6
7
|
export type URLOverride<T = unknown> = {
|
|
7
8
|
param: URLParam<T>;
|
|
@@ -40,11 +40,11 @@ onHotReload(() => {
|
|
|
40
40
|
export function syncedIsAnyLoading() {
|
|
41
41
|
return Querysub.fastRead(() => {
|
|
42
42
|
for (let controllerId in syncedData()) {
|
|
43
|
-
for (let fncs of Object.
|
|
44
|
-
for (let fnc of Object.
|
|
43
|
+
for (let [nodeId, fncs] of Object.entries(syncedData()[controllerId])) {
|
|
44
|
+
for (let [fncName, fnc] of Object.entries(fncs)) {
|
|
45
45
|
for (let obj of Object.values(fnc)) {
|
|
46
46
|
if (atomic(obj.promise)) {
|
|
47
|
-
return
|
|
47
|
+
return `${fncName} (on ${nodeId})`;
|
|
48
48
|
}
|
|
49
49
|
}
|
|
50
50
|
}
|
|
@@ -109,6 +109,7 @@ export function getSyncedController<T extends SocketRegistered>(
|
|
|
109
109
|
resetAll(): void;
|
|
110
110
|
refreshAll(): void;
|
|
111
111
|
isAnyLoading(): boolean;
|
|
112
|
+
base: T;
|
|
112
113
|
} {
|
|
113
114
|
if (isNode()) {
|
|
114
115
|
let result = cache((nodeId: string) => {
|
|
@@ -160,6 +161,7 @@ export function getSyncedController<T extends SocketRegistered>(
|
|
|
160
161
|
result.isAnyLoading = () => {
|
|
161
162
|
notAllowedOnServer();
|
|
162
163
|
};
|
|
164
|
+
result.base = controller;
|
|
163
165
|
return result;
|
|
164
166
|
}
|
|
165
167
|
let id = nextId();
|
|
@@ -404,5 +406,6 @@ export function getSyncedController<T extends SocketRegistered>(
|
|
|
404
406
|
}
|
|
405
407
|
});
|
|
406
408
|
};
|
|
409
|
+
result.base = controller;
|
|
407
410
|
return result;
|
|
408
411
|
}
|
|
@@ -7,7 +7,8 @@ export class SyncedControllerLoadingIndicator extends qreact.Component {
|
|
|
7
7
|
timeLastLoaded = Date.now();
|
|
8
8
|
lastWasLoaded = false;
|
|
9
9
|
render() {
|
|
10
|
-
|
|
10
|
+
let loadingPath = syncedIsAnyLoading();
|
|
11
|
+
if (!loadingPath) {
|
|
11
12
|
if (!this.lastWasLoaded) {
|
|
12
13
|
console.log(`Loaded all SyncedController calls in ${formatTime(Date.now() - this.timeLastLoaded)}`);
|
|
13
14
|
}
|
|
@@ -33,7 +34,7 @@ export class SyncedControllerLoadingIndicator extends qreact.Component {
|
|
|
33
34
|
.borderRadius("50%")
|
|
34
35
|
+ " " + spinAnimationClass
|
|
35
36
|
}></div>
|
|
36
|
-
<span>Syncing data...</span>
|
|
37
|
+
<span title={loadingPath}>Syncing data...</span>
|
|
37
38
|
<style>
|
|
38
39
|
{`
|
|
39
40
|
@keyframes ${spinAnimationClass} {
|
|
@@ -34,16 +34,37 @@ if (!isNode()) {
|
|
|
34
34
|
loadSearchCache = parseSearchString(location.search);
|
|
35
35
|
}
|
|
36
36
|
|
|
37
|
+
declare global {
|
|
38
|
+
var urlResetLinks: Map<string, Set<string>>;
|
|
39
|
+
}
|
|
40
|
+
// changed => reset
|
|
41
|
+
let resetLinks: Map<string, Set<string>> = globalThis.urlResetLinks || (globalThis.urlResetLinks = new Map());
|
|
42
|
+
|
|
37
43
|
|
|
38
44
|
export class URLParam<T = unknown> {
|
|
39
|
-
constructor(urlKey: string, defaultValue: T) {
|
|
40
|
-
return createURLSync(urlKey, defaultValue);
|
|
45
|
+
constructor(urlKey: string, defaultValue: T, config?: URLParamConfig) {
|
|
46
|
+
return createURLSync(urlKey, defaultValue, config);
|
|
41
47
|
}
|
|
42
48
|
}
|
|
43
|
-
|
|
44
|
-
export function createURLSync<T>(urlKey: string, defaultValue: T, config?: {
|
|
49
|
+
export type URLParamConfig = {
|
|
45
50
|
storage?: "url" | "localStorage";
|
|
46
|
-
|
|
51
|
+
// Reset when any of these urlKeys change
|
|
52
|
+
reset?: { (): { urlKey: string }[] }[];
|
|
53
|
+
};
|
|
54
|
+
/** const myVariable = createURLSync("myvar", 0) */
|
|
55
|
+
export function createURLSync<T>(urlKey: string, defaultValue: T, config?: URLParamConfig): URLParam<T> {
|
|
56
|
+
setImmediate(() => {
|
|
57
|
+
for (let reset of config?.reset || []) {
|
|
58
|
+
for (let resetSource of reset()) {
|
|
59
|
+
let lookup = resetLinks.get(resetSource.urlKey);
|
|
60
|
+
if (!lookup) {
|
|
61
|
+
lookup = new Set();
|
|
62
|
+
resetLinks.set(resetSource.urlKey, lookup);
|
|
63
|
+
}
|
|
64
|
+
lookup.add(urlKey);
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
});
|
|
47
68
|
|
|
48
69
|
let prev = globalURLLookup[urlKey];
|
|
49
70
|
if (prev) {
|
|
@@ -104,6 +125,15 @@ export function createURLSync<T>(urlKey: string, defaultValue: T, config?: {
|
|
|
104
125
|
}
|
|
105
126
|
},
|
|
106
127
|
set value(value: T) {
|
|
128
|
+
let resetTargets = resetLinks.get(urlKey);
|
|
129
|
+
if (resetTargets) {
|
|
130
|
+
for (let resetTarget of resetTargets) {
|
|
131
|
+
let param = globalURLLookup[resetTarget];
|
|
132
|
+
if (param) {
|
|
133
|
+
param.reset();
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
}
|
|
107
137
|
if (!proxyWatcher.inWatcher()) {
|
|
108
138
|
Querysub.commit(() => {
|
|
109
139
|
param.value = value;
|
|
@@ -10,7 +10,7 @@ let specialStringValuesEncode = new Map([...specialStringValuesDecode].map(([key
|
|
|
10
10
|
export function serializeURLParam(value: unknown): string | undefined {
|
|
11
11
|
if (value === true) return undefined;
|
|
12
12
|
// If it is a string and doesn't look JSON encoded, then just use it raw.
|
|
13
|
-
if (typeof value === "string" && !lookJSONEncoded(value)) {
|
|
13
|
+
if (typeof value === "string" && !lookJSONEncoded(value) && value !== "") {
|
|
14
14
|
return value;
|
|
15
15
|
}
|
|
16
16
|
let specialValue = specialStringValuesEncode.get(value);
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import { URLParam, URLParamConfig } from "./URLParam";
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
export const pageURL = new URLParam("page", "");
|
|
5
|
+
export const tabURL = new URLParam("tab", "");
|
|
6
|
+
export const managementPageURL = new URLParam("managementpage", "");
|
|
7
|
+
|
|
8
|
+
export const filterURL = new URLParam("filter", "", { reset: [mainResets] });
|
|
9
|
+
export const startTimeURL = new URLParam("startTime", 0, { reset: [mainResets] });
|
|
10
|
+
export const endTimeURL = new URLParam("endTime", 0, { reset: [mainResets] });
|
|
11
|
+
|
|
12
|
+
export function mainResets(): URLParam[] {
|
|
13
|
+
return [pageURL, tabURL, managementPageURL];
|
|
14
|
+
}
|
package/src/misc/formatJSX.tsx
CHANGED
|
@@ -1,6 +1,12 @@
|
|
|
1
1
|
import { qreact } from "../4-dom/qreact";
|
|
2
2
|
import { formatTime, formatVeryNiceDateTime } from "socket-function/src/formatting/format";
|
|
3
|
+
import { Querysub } from "../4-querysub/QuerysubController";
|
|
3
4
|
|
|
4
5
|
export function formatDateJSX(time: number) {
|
|
5
|
-
|
|
6
|
+
let diff = time - Querysub.nowDelayed(10 * 1000);
|
|
7
|
+
let ago = diff < 0;
|
|
8
|
+
if (ago) {
|
|
9
|
+
diff = -diff;
|
|
10
|
+
}
|
|
11
|
+
return <span title={formatVeryNiceDateTime(time)}>{!ago && "IN "}{formatTime(diff)}{ago && " AGO"}</span>;
|
|
6
12
|
}
|
package/testEntry2.ts
CHANGED
|
@@ -3,13 +3,23 @@ import { getOwnMachineId } from "./src/-a-auth/certs";
|
|
|
3
3
|
import { getOwnThreadId } from "./src/-f-node-discovery/NodeDiscovery";
|
|
4
4
|
import { shutdown } from "./src/diagnostics/periodic";
|
|
5
5
|
import { testTCPIsListening } from "socket-function/src/networking";
|
|
6
|
+
import { Querysub } from "./src/4-querysub/QuerysubController";
|
|
7
|
+
import { timeInSecond } from "socket-function/src/misc";
|
|
6
8
|
|
|
7
9
|
export async function testMain() {
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
//
|
|
10
|
+
Querysub;
|
|
11
|
+
//let test = await testTCPIsListening("1.1.1.1", 443);
|
|
12
|
+
//console.log(test);
|
|
13
|
+
// Writing heartbeat 2025/09/14 08:37:46 PM for self (5ac8a2fa78fce4ea.971ed8b01743d123.querysubtest.com:13900)
|
|
14
|
+
await delay(timeInSecond);
|
|
15
|
+
await Querysub.hostService("test");
|
|
16
|
+
await delay(timeInSecond * 5);
|
|
11
17
|
// console.log(getOwnThreadId());
|
|
12
|
-
//
|
|
18
|
+
// Log an error every 30 seconds forever.
|
|
19
|
+
while (true) {
|
|
20
|
+
console.error(`Test warning for im testing ${Date.now()}`);
|
|
21
|
+
await delay(timeInSecond * 30);
|
|
22
|
+
}
|
|
13
23
|
// console.log(getOwnThreadId());
|
|
14
24
|
// await shutdown();
|
|
15
25
|
//await Querysub.hostService("test");
|
|
@@ -24,7 +34,8 @@ export async function testMain() {
|
|
|
24
34
|
// }
|
|
25
35
|
// await testLogs.flushNow();
|
|
26
36
|
// }
|
|
27
|
-
|
|
37
|
+
await delay(timeInSecond * 15);
|
|
38
|
+
await shutdown();
|
|
28
39
|
}
|
|
29
40
|
async function main() {
|
|
30
41
|
|