querysub 0.377.0 → 0.378.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/error-watch-public.js +7 -0
- package/bin/error-watch.js +6 -0
- package/package.json +7 -4
- package/src/-f-node-discovery/NodeDiscovery.ts +7 -0
- package/src/-g-core-values/NodeCapabilities.ts +28 -14
- package/src/3-path-functions/PathFunctionRunnerMain.ts +0 -4
- package/src/diagnostics/logs/IndexedLogs/IndexedLogs.ts +24 -1
- package/src/diagnostics/logs/IndexedLogs/LogViewer3.tsx +1 -1
- package/src/diagnostics/logs/diskLogger.ts +26 -27
- package/src/diagnostics/logs/diskShimConsoleLogs.ts +4 -0
- package/src/diagnostics/logs/errorNotifications2/ErrorNotificationPage.tsx +276 -0
- package/src/diagnostics/logs/errorNotifications2/errorNotifications.ts +269 -0
- package/src/diagnostics/logs/errorNotifications2/errorWatchEntry.ts +13 -0
- package/src/diagnostics/logs/errorNotifications2/logWatcher.ts +104 -0
- package/src/diagnostics/logs/lifeCycleAnalysis/lifeCycles.tsx +35 -8
- package/src/diagnostics/managementPages.tsx +6 -0
- package/src/server.ts +0 -8
- package/test.ts +16 -6
- package/test2.ts +20 -0
- package/src/diagnostics/logs/errorNotifications2/errorNotifications2.ts +0 -9
- package/src/diagnostics/logs/lifeCycleAnalysis/test.ts +0 -0
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "querysub",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.378.0",
|
|
4
4
|
"main": "index.js",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"note1": "note on node-forge fork, see https://github.com/digitalbazaar/forge/issues/744 for details",
|
|
@@ -12,9 +12,10 @@
|
|
|
12
12
|
"servershardedtest": "yarn server --authority ./pathremain.json & yarn server --authority ./patha.json & yarn server --authority ./pathb.json & yarn server --authority ./pathc.json & yarn server --authority ./pathd.json",
|
|
13
13
|
"type": "yarn tsc --noEmit",
|
|
14
14
|
"depend": "yarn --silent depcruise src --include-only \"^src\" --config --output-type dot | dot -T svg > dependency-graph.svg",
|
|
15
|
-
"t": "yarn typenode ./
|
|
15
|
+
"t": "yarn typenode ./test.ts",
|
|
16
|
+
"t2": "yarn typenode ./test2.ts",
|
|
16
17
|
"test-wat": "yarn typenode ./src/wat/watCompiler.ts",
|
|
17
|
-
"error-watch": "yarn typenode ./src/diagnostics/logs/
|
|
18
|
+
"error-watch": "yarn typenode ./src/diagnostics/logs/errorNotifications2/errorWatchEntry.ts",
|
|
18
19
|
"error-email": "yarn typenode ./src/diagnostics/logs/errorNotifications/errorDigestEntry.tsx",
|
|
19
20
|
"build-native": "cd src/diagnostics/logs/IndexedLogs && node-gyp rebuild"
|
|
20
21
|
},
|
|
@@ -33,7 +34,9 @@
|
|
|
33
34
|
"join": "./bin/join.js",
|
|
34
35
|
"join-public": "./bin/join-public.js",
|
|
35
36
|
"movelogs": "./bin/movelogs.js",
|
|
36
|
-
"addsuperuser": "./bin/addsuperuser.js"
|
|
37
|
+
"addsuperuser": "./bin/addsuperuser.js",
|
|
38
|
+
"error-watch": "./bin/error-watch.js",
|
|
39
|
+
"error-watch-public": "./bin/error-watch-public.js"
|
|
37
40
|
},
|
|
38
41
|
"dependencies": {
|
|
39
42
|
"@types/fs-ext": "^2.0.3",
|
|
@@ -113,6 +113,13 @@ export function isNodeIdOnOwnMachineId(nodeId: string): boolean {
|
|
|
113
113
|
return certs.getMachineId(nodeId) === getOwnMachineId() || nodeId.startsWith("127-0-0-1.");
|
|
114
114
|
}
|
|
115
115
|
|
|
116
|
+
/** Often, when getting all nodes, you're going to receive duplicates of all local nodes with the duplicate containing this prefix. Using the local ID is faster as it will actually use the local IP.
|
|
117
|
+
TODO: We need a robust way to give a deduplicated set that prefers the local ids. At the moment we just filter out the id locals if we can't have duplicates.
|
|
118
|
+
*/
|
|
119
|
+
export function isNodeIdLocal(nodeId: string): boolean {
|
|
120
|
+
return nodeId.startsWith("127-0-0-1.");
|
|
121
|
+
}
|
|
122
|
+
|
|
116
123
|
let nodeOverrides: string[] | undefined;
|
|
117
124
|
let beforeGetNodeAllId = async () => { };
|
|
118
125
|
export async function getAllNodeIds() {
|
|
@@ -16,15 +16,14 @@ import { requiresNetworkTrustHook } from "../-d-trust/NetworkTrust2";
|
|
|
16
16
|
import { isNoNetwork } from "../config";
|
|
17
17
|
import { getDebuggerUrl } from "../diagnostics/listenOnDebugger";
|
|
18
18
|
import { hackDevtoolsWebsocketForward } from "./oneTimeForward";
|
|
19
|
-
import { getOwnMachineId, decodeNodeId, decodeNodeIdAssert } from "../-a-auth/certs";
|
|
19
|
+
import { getOwnMachineId, decodeNodeId, decodeNodeIdAssert, getMachineId } from "../-a-auth/certs";
|
|
20
20
|
import { sort } from "socket-function/src/misc";
|
|
21
21
|
import { getPathStr2 } from "../path";
|
|
22
|
+
import { PromiseObj } from "../promise";
|
|
22
23
|
setImmediate(() => {
|
|
23
24
|
import("../diagnostics/MachineThreadInfo");
|
|
24
25
|
});
|
|
25
26
|
|
|
26
|
-
let loadTime = Date.now();
|
|
27
|
-
|
|
28
27
|
let controllerNodeIdCache = new Map<string, string | Promise<string | undefined>>();
|
|
29
28
|
|
|
30
29
|
// NOTE: If this becomes slow (because we are just trying all servers), we could start to store capabilities
|
|
@@ -39,32 +38,45 @@ export async function getControllerNodeId(
|
|
|
39
38
|
if (cached && typeof cached !== "string") {
|
|
40
39
|
cached = await cached;
|
|
41
40
|
}
|
|
41
|
+
// TODO: We are relying on is node connected to do a lot of work here. If the node's connected, but somehow doesn't work, we can get in an unrecoverable state, even if there are plenty of other working nodes.
|
|
42
42
|
if (cached && !SocketFunction.isNodeConnected(cached)) {
|
|
43
43
|
controllerNodeIdCache.delete(controller._classGuid);
|
|
44
44
|
cached = undefined;
|
|
45
45
|
}
|
|
46
46
|
if (cached) return cached;
|
|
47
|
-
let promise = getInternal();
|
|
47
|
+
let promise = getInternal(retryCount);
|
|
48
48
|
controllerNodeIdCache.set(controller._classGuid, promise);
|
|
49
49
|
return promise;
|
|
50
50
|
|
|
51
|
-
async function getInternal() {
|
|
52
|
-
|
|
51
|
+
async function getInternal(retryCount: number) {
|
|
53
52
|
let nodeIdsToTest = await getAllNodeIds();
|
|
54
53
|
// Shuffle, so we aren't always using the same node!
|
|
55
54
|
nodeIdsToTest = shuffle(nodeIdsToTest, Date.now());
|
|
56
|
-
|
|
55
|
+
let resolvedNode = new PromiseObj<string>();
|
|
56
|
+
|
|
57
|
+
if (!quiet) {
|
|
58
|
+
console.log(`Checking ${nodeIdsToTest.length} nodes for capability ${controller._classGuid}`);
|
|
59
|
+
}
|
|
60
|
+
let allFinished = Promise.all(nodeIdsToTest.map(async nodeId => {
|
|
57
61
|
if (await doesNodeExposeController(nodeId, controller)) {
|
|
58
62
|
if (!quiet) {
|
|
59
63
|
let duration = Date.now() - initialTime;
|
|
60
64
|
console.log(green(`Resolved capability ${controller._classGuid} with node ${nodeId}, in ${formatTime(duration)}`));
|
|
61
65
|
}
|
|
62
|
-
|
|
66
|
+
resolvedNode.resolve(nodeId);
|
|
63
67
|
}
|
|
68
|
+
}));
|
|
69
|
+
let result = await Promise.race([allFinished, resolvedNode.promise]);
|
|
70
|
+
if (typeof result === "string") {
|
|
71
|
+
return result;
|
|
64
72
|
}
|
|
73
|
+
|
|
65
74
|
if (retryCount > 0) {
|
|
75
|
+
if (!quiet) {
|
|
76
|
+
console.log(`Did not find capability in ${nodeIdsToTest.length} nodes, retrying... ${controller._classGuid}`);
|
|
77
|
+
}
|
|
66
78
|
await delay(2000);
|
|
67
|
-
return
|
|
79
|
+
return getInternal(retryCount - 1);
|
|
68
80
|
}
|
|
69
81
|
if (!quiet) {
|
|
70
82
|
console.warn(yellow(`Could not find a node that exposes controller ${controller._classGuid}, tried: ${JSON.stringify(nodeIdsToTest)}`));
|
|
@@ -81,7 +93,11 @@ export async function getControllerNodeIdList(
|
|
|
81
93
|
await Promise.all(nodeIdsToTest.map(async nodeId => {
|
|
82
94
|
let result = await doesNodeExposeController(nodeId, controller);
|
|
83
95
|
if (result) {
|
|
84
|
-
|
|
96
|
+
let entryPoint = await NodeCapabilitiesController.nodes[nodeId].getEntryPoint();
|
|
97
|
+
passedNodeIds.set(nodeId, {
|
|
98
|
+
machineId: getMachineId(nodeId),
|
|
99
|
+
entryPoint,
|
|
100
|
+
});
|
|
85
101
|
}
|
|
86
102
|
}));
|
|
87
103
|
|
|
@@ -99,13 +115,11 @@ export async function getControllerNodeIdList(
|
|
|
99
115
|
}
|
|
100
116
|
|
|
101
117
|
|
|
102
|
-
export async function doesNodeExposeController(reconnectNodeId: string, controller: SocketRegistered<{}>): Promise<
|
|
118
|
+
export async function doesNodeExposeController(reconnectNodeId: string, controller: SocketRegistered<{}>): Promise<boolean> {
|
|
103
119
|
let exposedControllers = await timeoutToUndefinedSilent(10_000, NodeCapabilitiesController.nodes[reconnectNodeId].getExposedControllers());
|
|
104
|
-
let machineId = await getOwnMachineId();
|
|
105
|
-
let entryPoint = await new NodeCapabilitiesControllerBase().getEntryPoint();
|
|
106
120
|
|
|
107
121
|
if (exposedControllers?.includes(controller._classGuid)) {
|
|
108
|
-
return
|
|
122
|
+
return true;
|
|
109
123
|
}
|
|
110
124
|
return false;
|
|
111
125
|
}
|
|
@@ -40,10 +40,6 @@ async function main() {
|
|
|
40
40
|
|
|
41
41
|
await Querysub.hostService("PathFunctionRunnerMain");
|
|
42
42
|
|
|
43
|
-
if (!isPublic()) {
|
|
44
|
-
void IndexedLogs.runLogMoveLoop();
|
|
45
|
-
}
|
|
46
|
-
|
|
47
43
|
// Use a fairly high stick time (the default is 10s), because having wait to sync data is very slow,
|
|
48
44
|
// and the function runner SHOULD have more memory than the clients, and much faster network speeds
|
|
49
45
|
// (it should be on the same local network as the path value authorities).
|
|
@@ -26,6 +26,9 @@ import { LimitGroup } from "../../../functional/limitProcessing";
|
|
|
26
26
|
import { getAllNodeIds } from "../../../-f-node-discovery/NodeDiscovery";
|
|
27
27
|
import { NodeCapabilitiesController } from "../../../-g-core-values/NodeCapabilities";
|
|
28
28
|
import { getLoggers2Async } from "../diskLogger";
|
|
29
|
+
import { watchAllValues } from "../errorNotifications2/logWatcher";
|
|
30
|
+
//
|
|
31
|
+
watchAllValues;
|
|
29
32
|
|
|
30
33
|
export type TimeFilePathWithSize = TimeFilePath & {
|
|
31
34
|
size: number;
|
|
@@ -33,7 +36,8 @@ export type TimeFilePathWithSize = TimeFilePath & {
|
|
|
33
36
|
sourceName: string;
|
|
34
37
|
};
|
|
35
38
|
|
|
36
|
-
let loggerByName = new Map<string, IndexedLogs<unknown>>();
|
|
39
|
+
export let loggerByName = new Map<string, IndexedLogs<unknown>>();
|
|
40
|
+
|
|
37
41
|
|
|
38
42
|
|
|
39
43
|
export class IndexedLogs<T> {
|
|
@@ -218,11 +222,30 @@ export class IndexedLogs<T> {
|
|
|
218
222
|
return groups;
|
|
219
223
|
}
|
|
220
224
|
|
|
225
|
+
private errorWatchers = new Set<(datum: T) => void>();
|
|
226
|
+
public watchErrors(callback: (datum: T) => void): () => void {
|
|
227
|
+
console.log(blue(`Watching errors: ${this.config.name}`));
|
|
228
|
+
this.errorWatchers.add(callback);
|
|
229
|
+
return () => {
|
|
230
|
+
this.errorWatchers.delete(callback);
|
|
231
|
+
};
|
|
232
|
+
}
|
|
233
|
+
|
|
221
234
|
public append(datum: T) {
|
|
222
235
|
this.getCurrentLogStream().append(datum);
|
|
223
236
|
if (IndexedLogs.shouldRunLoop) {
|
|
224
237
|
this.runLogMoverLoop();
|
|
225
238
|
}
|
|
239
|
+
|
|
240
|
+
for (let callback of this.errorWatchers.values()) {
|
|
241
|
+
console.log(blue(`Calling error callback: ${this.config.name}`));
|
|
242
|
+
try {
|
|
243
|
+
callback(datum);
|
|
244
|
+
} catch (e) {
|
|
245
|
+
this.errorWatchers.delete(callback);
|
|
246
|
+
console.warn("Error in watchErrors callback, removing callback (likely just a client disconnect)", e);
|
|
247
|
+
}
|
|
248
|
+
}
|
|
226
249
|
}
|
|
227
250
|
|
|
228
251
|
private machineNodeCache = new Map<string, string>();
|
|
@@ -4,7 +4,7 @@ import { css } from "../../../4-dom/css";
|
|
|
4
4
|
import { t } from "../../../2-proxy/schema2";
|
|
5
5
|
import { Button } from "../../../library-components/Button";
|
|
6
6
|
import { InputLabel, InputLabelURL } from "../../../library-components/InputLabel";
|
|
7
|
-
import {
|
|
7
|
+
import { getLoggers2Async, LogDatum } from "../diskLogger";
|
|
8
8
|
import { list, timeInDay, keyByArray, sort, throttleFunction } from "socket-function/src/misc";
|
|
9
9
|
import { formatDateTime, formatDateTimeDetailed, formatNumber, formatTime, formatPercent } from "socket-function/src/formatting/format";
|
|
10
10
|
import { IndexedLogs, TimeFilePathWithSize } from "./IndexedLogs";
|
|
@@ -7,6 +7,7 @@ import { formatTime } from "socket-function/src/formatting/format";
|
|
|
7
7
|
import { addEpsilons } from "../../bits";
|
|
8
8
|
import { getPathStr2 } from "../../path";
|
|
9
9
|
import { isPublic } from "../../config";
|
|
10
|
+
import type { IndexedLogs } from "./IndexedLogs/IndexedLogs";
|
|
10
11
|
// IMPORTANT! We can't have any real imports here, because we are depended on so early in startup!
|
|
11
12
|
|
|
12
13
|
if (isNode()) {
|
|
@@ -65,32 +66,35 @@ export const LOG_LINE_LIMIT_FLAG = String.fromCharCode(44534) + "LOGS_LINE_LIMIT
|
|
|
65
66
|
/** If this key exists in the logged object, as in a key in one of the objects logged, then we will use the value of it as the limit ID. This is useful as it allows us to either override a limit or limit something independently from other logs in the file. */
|
|
66
67
|
export const LOG_LINE_LIMIT_ID = "LIMIT_LINE_ID";
|
|
67
68
|
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
});
|
|
75
|
-
return undefined;
|
|
76
|
-
}
|
|
77
|
-
|
|
78
|
-
return {
|
|
79
|
-
logLogs: new IndexedLogs<LogDatum>({ name: "logs/log", getTime: x => x.time }),
|
|
80
|
-
warnLogs: new IndexedLogs<LogDatum>({ name: "logs/warn", getTime: x => x.time }),
|
|
81
|
-
infoLogs: new IndexedLogs<LogDatum>({ name: "logs/info", getTime: x => x.time }),
|
|
82
|
-
errorLogs: new IndexedLogs<LogDatum>({ name: "logs/error", getTime: x => x.time }),
|
|
83
|
-
};
|
|
84
|
-
});
|
|
69
|
+
let loggersObj: {
|
|
70
|
+
logLogs: IndexedLogs<LogDatum>;
|
|
71
|
+
warnLogs: IndexedLogs<LogDatum>;
|
|
72
|
+
infoLogs: IndexedLogs<LogDatum>;
|
|
73
|
+
errorLogs: IndexedLogs<LogDatum>;
|
|
74
|
+
} | undefined;
|
|
85
75
|
export const getLoggers2Async = lazy(async () => {
|
|
86
76
|
const { IndexedLogs } = await import("./IndexedLogs/IndexedLogs");
|
|
87
|
-
|
|
77
|
+
loggersObj = {
|
|
88
78
|
logLogs: new IndexedLogs<LogDatum>({ name: "logs/log", getTime: x => x.time }),
|
|
89
79
|
warnLogs: new IndexedLogs<LogDatum>({ name: "logs/warn", getTime: x => x.time }),
|
|
90
80
|
infoLogs: new IndexedLogs<LogDatum>({ name: "logs/info", getTime: x => x.time }),
|
|
91
81
|
errorLogs: new IndexedLogs<LogDatum>({ name: "logs/error", getTime: x => x.time }),
|
|
92
82
|
};
|
|
83
|
+
return loggersObj;
|
|
93
84
|
});
|
|
85
|
+
export function getLogLogs(): Promise<IndexedLogs<LogDatum>> {
|
|
86
|
+
return getLoggers2Async().then(x => x.logLogs);
|
|
87
|
+
}
|
|
88
|
+
export function getWarnLogs(): Promise<IndexedLogs<LogDatum>> {
|
|
89
|
+
return getLoggers2Async().then(x => x.warnLogs);
|
|
90
|
+
}
|
|
91
|
+
export function getInfoLogs(): Promise<IndexedLogs<LogDatum>> {
|
|
92
|
+
return getLoggers2Async().then(x => x.infoLogs);
|
|
93
|
+
}
|
|
94
|
+
export function getErrorLogs(): Promise<IndexedLogs<LogDatum>> {
|
|
95
|
+
return getLoggers2Async().then(x => x.errorLogs);
|
|
96
|
+
}
|
|
97
|
+
|
|
94
98
|
|
|
95
99
|
// NOTE: If any message (first param) starts with this, we don't log it to the disk. VERY useful for multi-line logging where it wouldn't make sense in the logs
|
|
96
100
|
// NOTE: This is visible, otherwise it's easy to accidentally copy it, and not know why the text is behaving strangely (not === other seemingly equal text, etc).
|
|
@@ -105,10 +109,6 @@ export function addGlobalContext(fnc: () => { [key: string]: unknown }) {
|
|
|
105
109
|
globalContextParts.push(fnc);
|
|
106
110
|
}
|
|
107
111
|
|
|
108
|
-
let startupDone = false;
|
|
109
|
-
void Promise.resolve().then(() => {
|
|
110
|
-
startupDone = true;
|
|
111
|
-
});
|
|
112
112
|
|
|
113
113
|
|
|
114
114
|
|
|
@@ -136,15 +136,14 @@ export function logDisk(type: "log" | "warn" | "info" | "error", ...args: unknow
|
|
|
136
136
|
// NOTE: Local logs now log to their local disk instead of backblaze, so we can log even if in development (they just won't show up on the remote server).
|
|
137
137
|
//if (isPublic())
|
|
138
138
|
{
|
|
139
|
-
let
|
|
140
|
-
if (!
|
|
141
|
-
|
|
142
|
-
setImmediate(() => {
|
|
139
|
+
let promise = getLoggers2Async();
|
|
140
|
+
if (!loggersObj) {
|
|
141
|
+
void promise.finally(() => {
|
|
143
142
|
logDiskDontShim(type, ...args);
|
|
144
143
|
});
|
|
145
144
|
return;
|
|
146
145
|
}
|
|
147
|
-
const { logLogs, warnLogs, infoLogs, errorLogs } =
|
|
146
|
+
const { logLogs, warnLogs, infoLogs, errorLogs } = loggersObj;
|
|
148
147
|
if (type === "log") {
|
|
149
148
|
logLogs.append(logObj);
|
|
150
149
|
} else if (type === "warn") {
|
|
@@ -20,6 +20,10 @@ export function shimConsoleLogs() {
|
|
|
20
20
|
let console = globalThis.console;
|
|
21
21
|
let originalFnc = console[fncName];
|
|
22
22
|
console[fncName] = (...args: any[]) => {
|
|
23
|
+
if (!(globalThis as any).shimmedConsoleLogs) {
|
|
24
|
+
if (fncName === "info") return;
|
|
25
|
+
return originalFnc(...args);
|
|
26
|
+
}
|
|
23
27
|
try {
|
|
24
28
|
if (
|
|
25
29
|
args.length > 0
|
|
@@ -0,0 +1,276 @@
|
|
|
1
|
+
import { qreact } from "../../../4-dom/qreact";
|
|
2
|
+
import { t } from "../../../2-proxy/schema2";
|
|
3
|
+
import { css } from "typesafecss";
|
|
4
|
+
import { ErrorNotificationsController, watchUnmatchedErrors } from "./errorNotifications";
|
|
5
|
+
import { LogDatum } from "../diskLogger";
|
|
6
|
+
import { sort, nextId } from "socket-function/src/misc";
|
|
7
|
+
import { formatVeryNiceDateTime } from "socket-function/src/formatting/format";
|
|
8
|
+
import { Querysub } from "../../../4-querysub/QuerysubController";
|
|
9
|
+
import { InputLabel } from "../../../library-components/InputLabel";
|
|
10
|
+
import { SocketFunction } from "socket-function/SocketFunction";
|
|
11
|
+
import { isPublic } from "../../../config";
|
|
12
|
+
|
|
13
|
+
export class ErrorNotificationPage extends qreact.Component {
|
|
14
|
+
state = t.state({
|
|
15
|
+
unmatchedErrors: t.atomic<LogDatum[]>([]),
|
|
16
|
+
suppressionMatches: t.atomic<Map<string, {
|
|
17
|
+
history: Map<number, { count: number }>;
|
|
18
|
+
exampleIndex: number;
|
|
19
|
+
examples: LogDatum[];
|
|
20
|
+
}>>(new Map()),
|
|
21
|
+
suppressionEntries: t.atomic<Array<{
|
|
22
|
+
id: string;
|
|
23
|
+
description: string;
|
|
24
|
+
pattern: string;
|
|
25
|
+
createdTime: number;
|
|
26
|
+
lastUpdatedTime: number;
|
|
27
|
+
}>>([]),
|
|
28
|
+
addingNewSuppression: t.atomic<boolean>(false),
|
|
29
|
+
newSuppressionDescription: t.atomic<string>(""),
|
|
30
|
+
newSuppressionPattern: t.atomic<string>(""),
|
|
31
|
+
initError: t.atomic<string | undefined>(undefined),
|
|
32
|
+
isLoading: t.atomic<boolean>(true),
|
|
33
|
+
testErrorMessage: t.atomic<string>("example error"),
|
|
34
|
+
});
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
unsubscribe?: () => void;
|
|
38
|
+
|
|
39
|
+
componentDidMount() {
|
|
40
|
+
Querysub.onCommitFinished(async () => {
|
|
41
|
+
try {
|
|
42
|
+
this.unsubscribe = await watchUnmatchedErrors({
|
|
43
|
+
callback: (datums: LogDatum[]) => {
|
|
44
|
+
Querysub.commit(() => {
|
|
45
|
+
this.state.unmatchedErrors = [...this.state.unmatchedErrors, ...datums];
|
|
46
|
+
});
|
|
47
|
+
},
|
|
48
|
+
});
|
|
49
|
+
|
|
50
|
+
let controller = ErrorNotificationsController(SocketFunction.browserNodeId());
|
|
51
|
+
let data = await controller.getData.promise();
|
|
52
|
+
let suppressionEntries = await controller.getSuppressionEntries.promise();
|
|
53
|
+
|
|
54
|
+
Querysub.commit(() => {
|
|
55
|
+
this.state.unmatchedErrors = data.unmatchedErrors;
|
|
56
|
+
this.state.suppressionMatches = data.suppressionMatches;
|
|
57
|
+
this.state.suppressionEntries = suppressionEntries;
|
|
58
|
+
this.state.initError = undefined;
|
|
59
|
+
this.state.isLoading = false;
|
|
60
|
+
});
|
|
61
|
+
} catch (error) {
|
|
62
|
+
Querysub.commit(() => {
|
|
63
|
+
this.state.initError = error instanceof Error ? error.message : String(error);
|
|
64
|
+
this.state.isLoading = false;
|
|
65
|
+
});
|
|
66
|
+
}
|
|
67
|
+
});
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
componentWillUnmount() {
|
|
71
|
+
this.unsubscribe?.();
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
render() {
|
|
75
|
+
if (this.state.isLoading) {
|
|
76
|
+
return <div className={css.vbox(16).pad2(16)}>
|
|
77
|
+
<h2>Error Notifications</h2>
|
|
78
|
+
<div>Loading error notifications...</div>
|
|
79
|
+
</div>;
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
if (this.state.initError) {
|
|
83
|
+
return <div className={css.vbox(16).pad2(16)}>
|
|
84
|
+
<h2>Error Notifications</h2>
|
|
85
|
+
<div className={css.pad2(12).bord2(0, 80, 50).hsl(0, 80, 95).colorhsl(0, 80, 30)}>
|
|
86
|
+
<strong>Error loading error notifications:</strong>
|
|
87
|
+
<pre className={css.pad2(8).hsl(0, 0, 98).bord2(0, 0, 85)}>
|
|
88
|
+
{this.state.initError}
|
|
89
|
+
</pre>
|
|
90
|
+
</div>
|
|
91
|
+
</div>;
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
let sortedErrors = [...this.state.unmatchedErrors];
|
|
95
|
+
sort(sortedErrors, x => -x.time);
|
|
96
|
+
let recentErrors = sortedErrors.slice(0, 10);
|
|
97
|
+
|
|
98
|
+
let sortedSuppressions = [...this.state.suppressionEntries];
|
|
99
|
+
sort(sortedSuppressions, x => -x.lastUpdatedTime);
|
|
100
|
+
|
|
101
|
+
return <div className={css.vbox(16).pad2(16)}>
|
|
102
|
+
<h2>Error Notifications</h2>
|
|
103
|
+
|
|
104
|
+
{!isPublic() && <h1>
|
|
105
|
+
IMPORTANT! You MUST run `yarn error-watch` to get errors notifications in the dev environment.
|
|
106
|
+
</h1>}
|
|
107
|
+
|
|
108
|
+
<div className={css.hbox(12).pad2(12).bord2(30, 50, 60).hsl(30, 50, 95)}>
|
|
109
|
+
<InputLabel
|
|
110
|
+
label="Test Error Message"
|
|
111
|
+
value={this.state.testErrorMessage}
|
|
112
|
+
onChangeValue={(value) => {
|
|
113
|
+
this.state.testErrorMessage = value;
|
|
114
|
+
}}
|
|
115
|
+
className={css.width(400)}
|
|
116
|
+
/>
|
|
117
|
+
<button
|
|
118
|
+
className={css.pad2(12, 8).button.bord2(30, 80, 50).hsl(30, 80, 85)}
|
|
119
|
+
onClick={() => {
|
|
120
|
+
let message = this.state.testErrorMessage;
|
|
121
|
+
Querysub.onCommitFinished(async () => {
|
|
122
|
+
let controller = ErrorNotificationsController(SocketFunction.browserNodeId());
|
|
123
|
+
await controller.logTestError.promise(message);
|
|
124
|
+
});
|
|
125
|
+
}}
|
|
126
|
+
>
|
|
127
|
+
Log Test Error
|
|
128
|
+
</button>
|
|
129
|
+
</div>
|
|
130
|
+
|
|
131
|
+
<div className={css.vbox(12)}>
|
|
132
|
+
<h3>Unmatched Errors ({this.state.unmatchedErrors.length})</h3>
|
|
133
|
+
{recentErrors.length === 0 && <div>No unmatched errors</div>}
|
|
134
|
+
<div className={css.vbox(8)}>
|
|
135
|
+
{recentErrors.map((error, idx) => (
|
|
136
|
+
<div key={idx} className={css.pad2(12).bord2(0, 30, 70).hsl(0, 50, 95).vbox(4)}>
|
|
137
|
+
<div className={css.hbox(8)}>
|
|
138
|
+
<strong>{formatVeryNiceDateTime(error.time)}</strong>
|
|
139
|
+
{error.machineId && <span>Machine: {error.machineId}</span>}
|
|
140
|
+
</div>
|
|
141
|
+
<pre className={css.overflowAuto.pad2(8).hsl(0, 0, 98).bord2(0, 0, 85)}>
|
|
142
|
+
{JSON.stringify(error)}
|
|
143
|
+
</pre>
|
|
144
|
+
</div>
|
|
145
|
+
))}
|
|
146
|
+
</div>
|
|
147
|
+
</div>
|
|
148
|
+
|
|
149
|
+
<div className={css.vbox(12)}>
|
|
150
|
+
<div className={css.hbox(12)}>
|
|
151
|
+
<h3>Suppressions ({sortedSuppressions.length})</h3>
|
|
152
|
+
<button
|
|
153
|
+
className={css.pad2(12, 8).button.bord2(120, 80, 50).hsl(120, 80, 90)}
|
|
154
|
+
onClick={() => {
|
|
155
|
+
this.state.addingNewSuppression = !this.state.addingNewSuppression;
|
|
156
|
+
}}
|
|
157
|
+
>
|
|
158
|
+
{this.state.addingNewSuppression ? "Cancel" : "Add Suppression"}
|
|
159
|
+
</button>
|
|
160
|
+
</div>
|
|
161
|
+
|
|
162
|
+
{this.state.addingNewSuppression && (
|
|
163
|
+
<div className={css.vbox(8).pad2(12).bord2(120, 50, 60).hsl(120, 50, 95)}>
|
|
164
|
+
<InputLabel
|
|
165
|
+
label="Description"
|
|
166
|
+
value={this.state.newSuppressionDescription}
|
|
167
|
+
onChangeValue={(value) => {
|
|
168
|
+
this.state.newSuppressionDescription = value;
|
|
169
|
+
}}
|
|
170
|
+
className={css.width(500)}
|
|
171
|
+
/>
|
|
172
|
+
<InputLabel
|
|
173
|
+
label="Pattern (regex)"
|
|
174
|
+
value={this.state.newSuppressionPattern}
|
|
175
|
+
onChangeValue={(value) => {
|
|
176
|
+
this.state.newSuppressionPattern = value;
|
|
177
|
+
}}
|
|
178
|
+
className={css.width(500)}
|
|
179
|
+
/>
|
|
180
|
+
<button
|
|
181
|
+
className={css.pad2(12, 8).button.bord2(120, 80, 50).hsl(120, 80, 85)}
|
|
182
|
+
onClick={() => {
|
|
183
|
+
let description = this.state.newSuppressionDescription;
|
|
184
|
+
let pattern = this.state.newSuppressionPattern;
|
|
185
|
+
if (!description || !pattern) {
|
|
186
|
+
alert("Description and pattern are required");
|
|
187
|
+
return;
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
Querysub.onCommitFinished(async () => {
|
|
191
|
+
let now = Date.now();
|
|
192
|
+
let newEntry = {
|
|
193
|
+
id: nextId(),
|
|
194
|
+
description,
|
|
195
|
+
pattern,
|
|
196
|
+
createdTime: now,
|
|
197
|
+
lastUpdatedTime: now,
|
|
198
|
+
};
|
|
199
|
+
let controller = ErrorNotificationsController(SocketFunction.browserNodeId());
|
|
200
|
+
await controller.setSuppressionEntry.promise(newEntry);
|
|
201
|
+
|
|
202
|
+
let suppressionEntries = await controller.getSuppressionEntries.promise();
|
|
203
|
+
Querysub.commit(() => {
|
|
204
|
+
this.state.suppressionEntries = suppressionEntries;
|
|
205
|
+
this.state.addingNewSuppression = false;
|
|
206
|
+
this.state.newSuppressionDescription = "";
|
|
207
|
+
this.state.newSuppressionPattern = "";
|
|
208
|
+
});
|
|
209
|
+
});
|
|
210
|
+
}}
|
|
211
|
+
>
|
|
212
|
+
Save Suppression
|
|
213
|
+
</button>
|
|
214
|
+
</div>
|
|
215
|
+
)}
|
|
216
|
+
|
|
217
|
+
<div className={css.vbox(12)}>
|
|
218
|
+
{sortedSuppressions.map((suppression) => {
|
|
219
|
+
let matchData = this.state.suppressionMatches.get(suppression.id);
|
|
220
|
+
let totalCount = 0;
|
|
221
|
+
if (matchData) {
|
|
222
|
+
for (let [chunk, data] of matchData.history) {
|
|
223
|
+
totalCount += data.count;
|
|
224
|
+
}
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
return <div key={suppression.id} className={css.pad2(12).bord2(200, 30, 70).hsl(200, 30, 95).vbox(8)}>
|
|
228
|
+
<div className={css.hbox(12)}>
|
|
229
|
+
<div className={css.flexGrow(1).vbox(4)}>
|
|
230
|
+
<strong>{suppression.description}</strong>
|
|
231
|
+
<div className={css.hbox(12)}>
|
|
232
|
+
<span>Pattern: <code className={css.pad2(4, 2).hsl(0, 0, 90)}>{suppression.pattern}</code></span>
|
|
233
|
+
<span>Total Matches: {totalCount}</span>
|
|
234
|
+
</div>
|
|
235
|
+
<div className={css.hbox(12)}>
|
|
236
|
+
<span>Created: {formatVeryNiceDateTime(suppression.createdTime)}</span>
|
|
237
|
+
<span>Updated: {formatVeryNiceDateTime(suppression.lastUpdatedTime)}</span>
|
|
238
|
+
</div>
|
|
239
|
+
</div>
|
|
240
|
+
<button
|
|
241
|
+
className={css.pad2(8, 6).button.bord2(0, 80, 50).hsl(0, 80, 90)}
|
|
242
|
+
onClick={() => {
|
|
243
|
+
if (!confirm(`Delete suppression "${suppression.description}"?`)) return;
|
|
244
|
+
|
|
245
|
+
Querysub.onCommitFinished(async () => {
|
|
246
|
+
let controller = ErrorNotificationsController(SocketFunction.browserNodeId());
|
|
247
|
+
await controller.deleteSuppressionEntry.promise(suppression.id);
|
|
248
|
+
|
|
249
|
+
let suppressionEntries = await controller.getSuppressionEntries.promise();
|
|
250
|
+
Querysub.commit(() => {
|
|
251
|
+
this.state.suppressionEntries = suppressionEntries;
|
|
252
|
+
});
|
|
253
|
+
});
|
|
254
|
+
}}
|
|
255
|
+
>
|
|
256
|
+
Delete
|
|
257
|
+
</button>
|
|
258
|
+
</div>
|
|
259
|
+
|
|
260
|
+
{matchData && matchData.examples.length > 0 && (
|
|
261
|
+
<div className={css.vbox(8)}>
|
|
262
|
+
<strong>Recent Examples ({matchData.examples.length}):</strong>
|
|
263
|
+
{matchData.examples.slice(0, 3).map((example, idx) => (
|
|
264
|
+
<pre key={idx} className={css.overflowAuto.pad2(8).hsl(0, 0, 98).bord2(0, 0, 85).fontSize(12)}>
|
|
265
|
+
{JSON.stringify(example, null, 2)}
|
|
266
|
+
</pre>
|
|
267
|
+
))}
|
|
268
|
+
</div>
|
|
269
|
+
)}
|
|
270
|
+
</div>;
|
|
271
|
+
})}
|
|
272
|
+
</div>
|
|
273
|
+
</div>
|
|
274
|
+
</div>;
|
|
275
|
+
}
|
|
276
|
+
}
|
|
@@ -0,0 +1,269 @@
|
|
|
1
|
+
import { cacheLimited, lazy } from "socket-function/src/caching";
|
|
2
|
+
import { nestArchives } from "../../../-a-archives/archives";
|
|
3
|
+
import { getArchivesBackblaze } from "../../../-a-archives/archivesBackBlaze";
|
|
4
|
+
import { archiveJSONT } from "../../../-a-archives/archivesJSONT";
|
|
5
|
+
import { Querysub } from "../../../4-querysub/QuerysubController";
|
|
6
|
+
import { getDomain } from "../../../config";
|
|
7
|
+
import { MachineInfo } from "../../../deployManager/machineSchema";
|
|
8
|
+
import { createMatchesPattern } from "../IndexedLogs/bufferSearchFindMatcher";
|
|
9
|
+
import { LogDatum, getErrorLogs } from "../diskLogger";
|
|
10
|
+
import { watchAllValues } from "./logWatcher";
|
|
11
|
+
import { batchFunction, runInfinitePollCallAtStart } from "socket-function/src/batching";
|
|
12
|
+
import { nextId, timeInMinute } from "socket-function/src/misc";
|
|
13
|
+
import { SocketFunction } from "socket-function/SocketFunction";
|
|
14
|
+
import { assertIsManagementUser } from "../../managementPages";
|
|
15
|
+
import { getSyncedController } from "../../../library-components/SyncedController";
|
|
16
|
+
import { getControllerNodeId } from "../../../-g-core-values/NodeCapabilities";
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
type SuppressionEntry = {
|
|
20
|
+
id: string;
|
|
21
|
+
description: string;
|
|
22
|
+
pattern: string;
|
|
23
|
+
|
|
24
|
+
createdTime: number;
|
|
25
|
+
lastUpdatedTime: number;
|
|
26
|
+
};
|
|
27
|
+
const suppression = archiveJSONT<SuppressionEntry>(() => nestArchives("logs/error-suppression/", getArchivesBackblaze(getDomain())));
|
|
28
|
+
let suppressionCache: SuppressionEntry[] = [];
|
|
29
|
+
let ensureWatching = lazy(async () => {
|
|
30
|
+
await runInfinitePollCallAtStart(timeInMinute * 5, updateNow);
|
|
31
|
+
});
|
|
32
|
+
async function updateNow() {
|
|
33
|
+
suppressionCache = await suppression.values();
|
|
34
|
+
}
|
|
35
|
+
async function getSuppressionEntries(): Promise<SuppressionEntry[]> {
|
|
36
|
+
await ensureWatching();
|
|
37
|
+
return suppressionCache;
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
let getMatcher = cacheLimited(10000, (text: string) => createMatchesPattern(Buffer.from(text), false));
|
|
41
|
+
|
|
42
|
+
const MAX_UNMATCHED = 10000;
|
|
43
|
+
const MAX_EXAMPLES = 100;
|
|
44
|
+
let unmatchedIndex = 0;
|
|
45
|
+
let unmatchedErrors: LogDatum[] = [];
|
|
46
|
+
let suppressionMatches = new Map<string, {
|
|
47
|
+
// getHistoryChunk =>
|
|
48
|
+
history: Map<number, {
|
|
49
|
+
count: number;
|
|
50
|
+
}>;
|
|
51
|
+
exampleIndex: number;
|
|
52
|
+
examples: LogDatum[];
|
|
53
|
+
}>();
|
|
54
|
+
const chunkUnit = timeInMinute * 5;
|
|
55
|
+
export function getHistoryChunk(time: number) {
|
|
56
|
+
return Math.floor(time / chunkUnit) * chunkUnit;
|
|
57
|
+
}
|
|
58
|
+
export function getChunkEndTime(chunk: number) {
|
|
59
|
+
chunk = getHistoryChunk(chunk);
|
|
60
|
+
return chunk + chunkUnit;
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
export async function exposeErrorWatchService() {
|
|
64
|
+
SocketFunction.expose(ErrorNotificationServiceBase);
|
|
65
|
+
|
|
66
|
+
let errorLogs = await getErrorLogs();
|
|
67
|
+
for await (let error of watchAllValues(errorLogs)) {
|
|
68
|
+
let errorBuffer = Buffer.from(JSON.stringify(error));
|
|
69
|
+
let suppressionEntries = await getSuppressionEntries();
|
|
70
|
+
let anyMatches = false;
|
|
71
|
+
for (let suppressionEntry of suppressionEntries) {
|
|
72
|
+
let matcher = getMatcher(suppressionEntry.pattern);
|
|
73
|
+
if (!matcher(errorBuffer)) continue;
|
|
74
|
+
anyMatches = true;
|
|
75
|
+
let history = suppressionMatches.get(suppressionEntry.id);
|
|
76
|
+
if (!history) {
|
|
77
|
+
history = {
|
|
78
|
+
history: new Map(),
|
|
79
|
+
exampleIndex: 0,
|
|
80
|
+
examples: [],
|
|
81
|
+
};
|
|
82
|
+
suppressionMatches.set(suppressionEntry.id, history);
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
if (history.examples.length > MAX_EXAMPLES) {
|
|
86
|
+
history.examples[history.exampleIndex] = error;
|
|
87
|
+
history.exampleIndex = (history.exampleIndex + 1) % MAX_EXAMPLES;
|
|
88
|
+
} else {
|
|
89
|
+
history.examples.push(error);
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
let historyChunk = getHistoryChunk(error.time);
|
|
93
|
+
let historyEntry = history.history.get(historyChunk);
|
|
94
|
+
if (!historyEntry) {
|
|
95
|
+
historyEntry = { count: 0 };
|
|
96
|
+
history.history.set(historyChunk, historyEntry);
|
|
97
|
+
}
|
|
98
|
+
historyEntry.count++;
|
|
99
|
+
}
|
|
100
|
+
if (!anyMatches) {
|
|
101
|
+
if (unmatchedErrors.length > unmatchedIndex) {
|
|
102
|
+
unmatchedErrors[unmatchedIndex] = error;
|
|
103
|
+
unmatchedIndex = (unmatchedIndex + 1) % MAX_UNMATCHED;
|
|
104
|
+
} else {
|
|
105
|
+
unmatchedErrors.push(error);
|
|
106
|
+
}
|
|
107
|
+
void ErrorNotificationService.triggerUnmatchedError(error);
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
class ErrorNotificationService {
|
|
113
|
+
public async getData() {
|
|
114
|
+
return {
|
|
115
|
+
unmatchedErrors,
|
|
116
|
+
suppressionMatches,
|
|
117
|
+
};
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
private static watchersSERVICE = new Set<string>();
|
|
121
|
+
public async watchUnmatchedErrorsSERVICE(): Promise<void> {
|
|
122
|
+
let caller = SocketFunction.getCaller();
|
|
123
|
+
ErrorNotificationService.watchersSERVICE.add(caller.nodeId);
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
public static triggerUnmatchedError = batchFunction({ delay: 100 }, (datums: LogDatum[]) => {
|
|
127
|
+
for (let nodeId of ErrorNotificationService.watchersSERVICE) {
|
|
128
|
+
void (async () => {
|
|
129
|
+
try {
|
|
130
|
+
await ErrorNotificationDataBase.nodes[nodeId].receiveErrorHTTP(datums);
|
|
131
|
+
} catch {
|
|
132
|
+
ErrorNotificationService.watchersSERVICE.delete(nodeId);
|
|
133
|
+
}
|
|
134
|
+
})();
|
|
135
|
+
}
|
|
136
|
+
});
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
const ErrorNotificationServiceBase = SocketFunction.register(
|
|
140
|
+
"ErrorNotificationService-019c9cae-8333-7708-a4e7-500f5fc23175",
|
|
141
|
+
new ErrorNotificationService(),
|
|
142
|
+
() => ({
|
|
143
|
+
getData: {},
|
|
144
|
+
watchUnmatchedErrorsSERVICE: {},
|
|
145
|
+
}),
|
|
146
|
+
() => ({
|
|
147
|
+
hooks: [assertIsManagementUser],
|
|
148
|
+
}),
|
|
149
|
+
{
|
|
150
|
+
noAutoExpose: true,
|
|
151
|
+
}
|
|
152
|
+
);
|
|
153
|
+
|
|
154
|
+
class ErrorNotificationData {
|
|
155
|
+
public async getData() {
|
|
156
|
+
let controllerNodeId = await getControllerNodeId(ErrorNotificationServiceBase);
|
|
157
|
+
if (!controllerNodeId) {
|
|
158
|
+
throw new Error(`Could not find node exposing controller ErrorNotificationServiceBase`);
|
|
159
|
+
}
|
|
160
|
+
return await ErrorNotificationServiceBase.nodes[controllerNodeId].getData();
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
static browserCallbacks = new Set<(datum: LogDatum[]) => void>();
|
|
164
|
+
public static async watchUnmatchedErrors(config: {
|
|
165
|
+
callback: (datum: LogDatum[]) => void;
|
|
166
|
+
}) {
|
|
167
|
+
ErrorNotificationData.browserCallbacks.add(config.callback);
|
|
168
|
+
await ErrorNotificationData.ensureWatchingErrorsBrowser();
|
|
169
|
+
return () => {
|
|
170
|
+
ErrorNotificationData.browserCallbacks.delete(config.callback);
|
|
171
|
+
};
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
private static ensureWatchingErrorsBrowser = lazy(async () => {
|
|
175
|
+
await ErrorNotificationDataBase.nodes[SocketFunction.getBrowserNodeId()].watchUnmatchedErrorsHTTP();
|
|
176
|
+
});
|
|
177
|
+
|
|
178
|
+
public async receiveErrorBrowser(datums: LogDatum[]) {
|
|
179
|
+
for (let callback of ErrorNotificationData.browserCallbacks) {
|
|
180
|
+
callback(datums);
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
private watchersHTTP = new Set<string>();
|
|
185
|
+
public async watchUnmatchedErrorsHTTP() {
|
|
186
|
+
let caller = SocketFunction.getCaller();
|
|
187
|
+
this.watchersHTTP.add(caller.nodeId);
|
|
188
|
+
await ErrorNotificationData.ensureWatchingErrorsHTTP();
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
private static ensureWatchingErrorsHTTP = lazy(async () => {
|
|
192
|
+
let controllerNodeId = await getControllerNodeId(ErrorNotificationServiceBase);
|
|
193
|
+
if (!controllerNodeId) {
|
|
194
|
+
ErrorNotificationData.ensureWatchingErrorsHTTP.reset();
|
|
195
|
+
throw new Error(`Could not find node exposing controller ErrorNotificationServiceBase`);
|
|
196
|
+
}
|
|
197
|
+
SocketFunction.onNextDisconnect(controllerNodeId, () => {
|
|
198
|
+
ErrorNotificationData.ensureWatchingErrorsHTTP.reset();
|
|
199
|
+
});
|
|
200
|
+
await ErrorNotificationServiceBase.nodes[controllerNodeId].watchUnmatchedErrorsSERVICE();
|
|
201
|
+
});
|
|
202
|
+
|
|
203
|
+
public async receiveErrorHTTP(datums: LogDatum[]) {
|
|
204
|
+
for (let nodeId of this.watchersHTTP) {
|
|
205
|
+
void (async () => {
|
|
206
|
+
try {
|
|
207
|
+
await ErrorNotificationDataBase.nodes[nodeId].receiveErrorBrowser(datums);
|
|
208
|
+
} catch {
|
|
209
|
+
this.watchersHTTP.delete(nodeId);
|
|
210
|
+
}
|
|
211
|
+
})();
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
public async getSuppressionEntries() {
|
|
216
|
+
return await getSuppressionEntries();
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
public async setSuppressionEntry(entry: SuppressionEntry) {
|
|
220
|
+
await suppression.set(entry.id, entry);
|
|
221
|
+
let prevEntry = suppressionCache.findIndex(e => e.id === entry.id);
|
|
222
|
+
if (prevEntry !== -1) {
|
|
223
|
+
suppressionCache[prevEntry] = entry;
|
|
224
|
+
} else {
|
|
225
|
+
suppressionCache.push(entry);
|
|
226
|
+
}
|
|
227
|
+
void updateNow();
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
public async deleteSuppressionEntry(id: string) {
|
|
231
|
+
await suppression.delete(id);
|
|
232
|
+
let prevEntry = suppressionCache.findIndex(e => e.id === id);
|
|
233
|
+
if (prevEntry !== -1) {
|
|
234
|
+
suppressionCache.splice(prevEntry, 1);
|
|
235
|
+
}
|
|
236
|
+
void updateNow();
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
public async logTestError(errorMessage: string) {
|
|
240
|
+
console.error(errorMessage);
|
|
241
|
+
}
|
|
242
|
+
}
|
|
243
|
+
|
|
244
|
+
const ErrorNotificationDataBase = SocketFunction.register(
|
|
245
|
+
"ErrorNotificationData-019c9cae-8333-7708-a4e7-500f5fc23174",
|
|
246
|
+
new ErrorNotificationData(),
|
|
247
|
+
() => ({
|
|
248
|
+
getData: {},
|
|
249
|
+
receiveErrorBrowser: {},
|
|
250
|
+
receiveErrorHTTP: {},
|
|
251
|
+
watchUnmatchedErrorsHTTP: {},
|
|
252
|
+
getSuppressionEntries: {},
|
|
253
|
+
setSuppressionEntry: {},
|
|
254
|
+
deleteSuppressionEntry: {},
|
|
255
|
+
logTestError: {},
|
|
256
|
+
}),
|
|
257
|
+
() => ({
|
|
258
|
+
hooks: [assertIsManagementUser],
|
|
259
|
+
})
|
|
260
|
+
);
|
|
261
|
+
export const ErrorNotificationsController = getSyncedController(ErrorNotificationDataBase);
|
|
262
|
+
|
|
263
|
+
export function watchUnmatchedErrors(config: {
|
|
264
|
+
callback: (datum: LogDatum[]) => void;
|
|
265
|
+
}) {
|
|
266
|
+
return ErrorNotificationData.watchUnmatchedErrors({
|
|
267
|
+
callback: config.callback,
|
|
268
|
+
});
|
|
269
|
+
}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import "../../../forceProduction";
|
|
2
|
+
import "../../../inject";
|
|
3
|
+
|
|
4
|
+
import { logErrors } from "../../../errors";
|
|
5
|
+
import { Querysub } from "../../../4-querysub/QuerysubController";
|
|
6
|
+
import { exposeErrorWatchService } from "./errorNotifications";
|
|
7
|
+
|
|
8
|
+
logErrors(main());
|
|
9
|
+
|
|
10
|
+
async function main() {
|
|
11
|
+
await Querysub.hostService("ErrorWatch");
|
|
12
|
+
await exposeErrorWatchService();
|
|
13
|
+
}
|
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
import { SocketFunction } from "socket-function/SocketFunction";
|
|
2
|
+
import { isNodeIdLocal, isOwnNodeId, watchDeltaNodeIds } from "../../../-f-node-discovery/NodeDiscovery";
|
|
3
|
+
import { IndexedLogs, loggerByName } from "../IndexedLogs/IndexedLogs";
|
|
4
|
+
import { nextId } from "socket-function/src/misc";
|
|
5
|
+
import { assertIsManagementUser, isManagementUser } from "../../managementPages";
|
|
6
|
+
import { ignoreErrors } from "../../../errors";
|
|
7
|
+
import { blue } from "socket-function/src/formatting/logColors";
|
|
8
|
+
import { SocketChannel } from "../../../functional/SocketChannel";
|
|
9
|
+
import { batchFunction } from "socket-function/src/batching";
|
|
10
|
+
import { isPublic } from "../../../config";
|
|
11
|
+
|
|
12
|
+
export async function* watchAllValues<T>(logs: IndexedLogs<T>): AsyncGenerator<T> {
|
|
13
|
+
let callbackId = nextId();
|
|
14
|
+
let buffer: T[] = [];
|
|
15
|
+
let resolveNext: ((value: T) => void) | undefined;
|
|
16
|
+
|
|
17
|
+
clientCallbacks.set(callbackId, (datums: unknown[]) => {
|
|
18
|
+
for (let i = 1; i < datums.length; i++) {
|
|
19
|
+
buffer.push(datums[i] as T);
|
|
20
|
+
}
|
|
21
|
+
let datum = datums[0] as T;
|
|
22
|
+
if (resolveNext) {
|
|
23
|
+
resolveNext(datum as T);
|
|
24
|
+
resolveNext = undefined;
|
|
25
|
+
} else {
|
|
26
|
+
buffer.push(datum as T);
|
|
27
|
+
}
|
|
28
|
+
});
|
|
29
|
+
|
|
30
|
+
watchDeltaNodeIds((delta) => {
|
|
31
|
+
for (let nodeId of delta.newNodeIds) {
|
|
32
|
+
if (isOwnNodeId(nodeId)) continue;
|
|
33
|
+
let isNodeLocal = isNodeIdLocal(nodeId);
|
|
34
|
+
if (isNodeLocal !== isPublic()) continue;
|
|
35
|
+
ignoreErrors(ErrorNotificationControllerRegistered.nodes[nodeId].watchErrors({
|
|
36
|
+
loggerName: logs.config.name,
|
|
37
|
+
callbackId,
|
|
38
|
+
}));
|
|
39
|
+
}
|
|
40
|
+
});
|
|
41
|
+
|
|
42
|
+
try {
|
|
43
|
+
while (true) {
|
|
44
|
+
if (buffer.length > 0) {
|
|
45
|
+
yield buffer.shift()!;
|
|
46
|
+
} else {
|
|
47
|
+
yield await new Promise<T>((resolve) => {
|
|
48
|
+
resolveNext = resolve;
|
|
49
|
+
});
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
} finally {
|
|
53
|
+
clientCallbacks.delete(callbackId);
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
let clientCallbacks = new Map<string, (datums: unknown[]) => void>();
|
|
60
|
+
|
|
61
|
+
class ErrorNotificationController {
|
|
62
|
+
public async onError(config: {
|
|
63
|
+
callbackId: string;
|
|
64
|
+
datums: unknown[];
|
|
65
|
+
}) {
|
|
66
|
+
let callback = clientCallbacks.get(config.callbackId);
|
|
67
|
+
if (!callback) {
|
|
68
|
+
throw new Error(`Callback ${config.callbackId} not found`);
|
|
69
|
+
}
|
|
70
|
+
callback(config.datums);
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
public async watchErrors(config: {
|
|
74
|
+
loggerName: string;
|
|
75
|
+
callbackId: string;
|
|
76
|
+
}): Promise<void> {
|
|
77
|
+
let caller = SocketFunction.getCaller();
|
|
78
|
+
let logger = loggerByName.get(config.loggerName);
|
|
79
|
+
if (!logger) {
|
|
80
|
+
throw new Error(`Logger ${config.loggerName} not found`);
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
let unsubscribe = logger.watchErrors(batchFunction({ delay: 100 }, (datums: unknown[]) => {
|
|
84
|
+
ErrorNotificationControllerRegistered.nodes[caller.nodeId].onError({
|
|
85
|
+
callbackId: config.callbackId,
|
|
86
|
+
datums,
|
|
87
|
+
}).catch(() => {
|
|
88
|
+
unsubscribe();
|
|
89
|
+
});
|
|
90
|
+
}));
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
const ErrorNotificationControllerRegistered = SocketFunction.register(
|
|
95
|
+
"ErrorNotificationController-019c9cae-8333-7708-a4e7-500f5fc23173",
|
|
96
|
+
new ErrorNotificationController(),
|
|
97
|
+
() => ({
|
|
98
|
+
onError: {},
|
|
99
|
+
watchErrors: {}
|
|
100
|
+
}),
|
|
101
|
+
() => ({
|
|
102
|
+
hooks: [assertIsManagementUser],
|
|
103
|
+
})
|
|
104
|
+
);
|
|
@@ -20,16 +20,30 @@ IMPORTANT! Now I am properly calling shutdown, so none of the streamed logs shou
|
|
|
20
20
|
|
|
21
21
|
|
|
22
22
|
|
|
23
|
+
//todonext
|
|
23
24
|
|
|
24
|
-
2) Create lot of remote server logs
|
|
25
|
-
- Via our refresh loop
|
|
26
25
|
|
|
27
|
-
|
|
28
|
-
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
26
|
+
1) Add button that will log an error serverside, to ensure this system works
|
|
27
|
+
- Allow typing any error we want
|
|
28
|
+
2) Write smaller component just for watching
|
|
29
|
+
- Lazily the component, and only if super user (just uncomment the code in Page.tsx)
|
|
30
|
+
|
|
31
|
+
3) Fix up the error watching UI
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
4) Publish
|
|
35
|
+
|
|
36
|
+
5) Deploy watcher service to remote (yarn error-watch-public)
|
|
37
|
+
|
|
38
|
+
5) Test on actual server, with actual logs
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
// single service, no history search, discord messaging, suppression and suppression page
|
|
42
|
+
// - Storing suppression history just in memory? (although suppression values, obviously, on disk)
|
|
43
|
+
// - receive all errors, and service will do suppression
|
|
44
|
+
|
|
45
|
+
// Start with page itself showing errors and suppressing them, for debugging/testing
|
|
46
|
+
// - Ai to automatically create suppression searches, and combine existing ones, etc
|
|
33
47
|
|
|
34
48
|
|
|
35
49
|
Rewrite error notification code
|
|
@@ -48,6 +62,9 @@ Rewrite error notification code
|
|
|
48
62
|
|
|
49
63
|
|
|
50
64
|
0) Add LZ4 compression to socket-function by default
|
|
65
|
+
- Setup local tester for it to start
|
|
66
|
+
- with non-synced endpoint, just surely socket-function
|
|
67
|
+
- purely nodejs, just talking to itself
|
|
51
68
|
- Allow setting "compress" to "none" or "lz4" or "zip" or "zip0" or "zip3", etc, for levels.
|
|
52
69
|
- default is "lz4"
|
|
53
70
|
- REQUIRES feature checking the remote, to make sure it is new enough to accept this.
|
|
@@ -76,6 +93,16 @@ Rewrite error notification code
|
|
|
76
93
|
|
|
77
94
|
|
|
78
95
|
|
|
96
|
+
//todonext
|
|
97
|
+
// Reorganize our lifecycles ideas
|
|
98
|
+
// - Scans over past logs, and only finished, never pending
|
|
99
|
+
// - Probably... realtime creating cache, BUT, if we add new lifecycles, we then only add them on demand. So when we are in the debugging mindset, and adding and changing them, it's fast (only as needed), but when we come back later when we aren't in the middle of debugging, it'll be super fast so we can fix the bug quickly and move on.
|
|
100
|
+
// - OR... we can do pending dynamically, but we don't cache it? Hmm... then what's the point? Hmm...
|
|
101
|
+
// - MAYBE, no caching, but we restrict the time by a lot? HMM...
|
|
102
|
+
// - I mean, we have to implement it without caching anyways to start, so we can just do that, and then cache it later if we see the need...
|
|
103
|
+
// - WELL, we need SOME kind of caching... Maybe... we DO use a strict time range, and then... if we've scanned a pending file in the past, we can cache that, because we know we've got all of it's values. Hmm...
|
|
104
|
+
// - Use lifecycles to debug rejections. To a point, until it is likely we are out of sync, then we should write the sync verification code.
|
|
105
|
+
|
|
79
106
|
|
|
80
107
|
// todonext;
|
|
81
108
|
// Hmm... so... should we index it, so we can search it? HMM... I think we might want to?
|
|
@@ -89,6 +89,12 @@ export async function registerManagementPages2(config: {
|
|
|
89
89
|
componentName: "LogViewer3",
|
|
90
90
|
getModule: () => import("./logs/IndexedLogs/LogViewer3"),
|
|
91
91
|
});
|
|
92
|
+
inputPages.push({
|
|
93
|
+
title: "Error Notifications",
|
|
94
|
+
componentName: "ErrorNotificationPage",
|
|
95
|
+
controllerName: "",
|
|
96
|
+
getModule: () => import("./logs/errorNotifications2/ErrorNotificationPage"),
|
|
97
|
+
});
|
|
92
98
|
inputPages.push({
|
|
93
99
|
title: "Security",
|
|
94
100
|
componentName: "SecurityPage",
|
package/src/server.ts
CHANGED
|
@@ -13,16 +13,8 @@ import { formatTime } from "socket-function/src/formatting/format";
|
|
|
13
13
|
// we DO want to register some static diagnostics from Querysub, so... import it explicitly.
|
|
14
14
|
import "./4-querysub/Querysub";
|
|
15
15
|
import { getOurAuthorities } from "./config2";
|
|
16
|
-
import { getDomain, isPublic } from "./config";
|
|
17
16
|
|
|
18
|
-
import yargs from "yargs";
|
|
19
17
|
import { Querysub } from "./4-querysub/Querysub";
|
|
20
|
-
import { createSourceCheck } from "./user-implementation/canSeeSource";
|
|
21
|
-
let yargObj = isNodeTrue() && yargs(process.argv)
|
|
22
|
-
.option("authority", { type: "string", desc: `Defines the base paths we are an authority on (the domain is prepended to them). Either a file path to a JSON(AuthorityPath[]), or a base64 representation of the JSON(AuthorityPath[]).` })
|
|
23
|
-
.option("verbose", { type: "boolean", desc: "Log all writes and reads" })
|
|
24
|
-
.argv || {}
|
|
25
|
-
;
|
|
26
18
|
|
|
27
19
|
logErrors(main());
|
|
28
20
|
|
package/test.ts
CHANGED
|
@@ -1,8 +1,18 @@
|
|
|
1
|
+
import { chdir } from "process";
|
|
2
|
+
chdir("D:/repos/qs-cyoa/");
|
|
3
|
+
|
|
4
|
+
import "./inject";
|
|
5
|
+
|
|
1
6
|
import { Querysub } from "./src/4-querysub/QuerysubController";
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
7
|
+
import { getErrorLogs, getLoggers2Async } from "./src/diagnostics/logs/diskLogger";
|
|
8
|
+
import { watchAllValues } from "./src/diagnostics/logs/errorNotifications2/logWatcher";
|
|
9
|
+
|
|
10
|
+
async function main() {
|
|
11
|
+
await Querysub.hostService("testwatcher");
|
|
12
|
+
let errorLogs = await getErrorLogs();
|
|
13
|
+
for await (let error of watchAllValues(errorLogs)) {
|
|
14
|
+
process.stdout.write(JSON.stringify(error) + "\n");
|
|
15
|
+
}
|
|
16
|
+
}
|
|
5
17
|
|
|
6
|
-
|
|
7
|
-
process.exit(0);
|
|
8
|
-
});
|
|
18
|
+
main().catch(console.error).finally(() => process.exit(0));
|
package/test2.ts
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import { chdir } from "process";
|
|
2
|
+
chdir("D:/repos/qs-cyoa/");
|
|
3
|
+
|
|
4
|
+
import "./inject";
|
|
5
|
+
|
|
6
|
+
import { timeInSecond } from "socket-function/src/misc";
|
|
7
|
+
import { getErrorLogs, getLoggers2Async } from "./src/diagnostics/logs/diskLogger";
|
|
8
|
+
import { watchAllValues } from "./src/diagnostics/logs/errorNotifications2/logWatcher";
|
|
9
|
+
import { delay } from "socket-function/src/batching";
|
|
10
|
+
import { Querysub } from "./src/4-querysub/QuerysubController";
|
|
11
|
+
|
|
12
|
+
async function main() {
|
|
13
|
+
await Querysub.hostService("test");
|
|
14
|
+
while (true) {
|
|
15
|
+
console.error("Test error");
|
|
16
|
+
await delay(timeInSecond * 5);
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
main().catch(console.error).finally(() => process.exit(0));
|
|
@@ -1,9 +0,0 @@
|
|
|
1
|
-
|
|
2
|
-
//todonext
|
|
3
|
-
|
|
4
|
-
// single service, no history search, discord messaging, suppression and suppression page
|
|
5
|
-
// - Storing suppression history just in memory? (although suppression values, obviously, on disk)
|
|
6
|
-
// - receive all errors, and service will do suppression
|
|
7
|
-
|
|
8
|
-
// Start with page itself showing errors and suppressing them, for debugging/testing
|
|
9
|
-
// - Ai to automatically create suppression searches, and combine existing ones, etc
|
|
File without changes
|