querysub 0.327.0 → 0.328.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. package/package.json +3 -4
  2. package/src/-a-archives/archivesBackBlaze.ts +20 -0
  3. package/src/-a-archives/archivesDisk.ts +5 -5
  4. package/src/-a-archives/archivesLimitedCache.ts +118 -7
  5. package/src/-a-archives/archivesPrivateFileSystem.ts +3 -0
  6. package/src/-g-core-values/NodeCapabilities.ts +26 -11
  7. package/src/0-path-value-core/auditLogs.ts +4 -2
  8. package/src/2-proxy/PathValueProxyWatcher.ts +3 -0
  9. package/src/3-path-functions/PathFunctionRunner.ts +2 -2
  10. package/src/4-querysub/Querysub.ts +1 -1
  11. package/src/5-diagnostics/GenericFormat.tsx +2 -2
  12. package/src/deployManager/machineApplyMainCode.ts +10 -8
  13. package/src/deployManager/machineSchema.ts +4 -3
  14. package/src/deployManager/setupMachineMain.ts +3 -2
  15. package/src/diagnostics/logs/FastArchiveAppendable.ts +75 -51
  16. package/src/diagnostics/logs/FastArchiveController.ts +5 -2
  17. package/src/diagnostics/logs/FastArchiveViewer.tsx +205 -48
  18. package/src/diagnostics/logs/LogViewer2.tsx +78 -34
  19. package/src/diagnostics/logs/TimeRangeSelector.tsx +8 -0
  20. package/src/diagnostics/logs/diskLogGlobalContext.ts +3 -3
  21. package/src/diagnostics/logs/diskLogger.ts +70 -23
  22. package/src/diagnostics/logs/errorNotifications/ErrorNotificationController.ts +82 -63
  23. package/src/diagnostics/logs/errorNotifications/ErrorSuppressionUI.tsx +37 -3
  24. package/src/diagnostics/logs/errorNotifications/ErrorWarning.tsx +45 -16
  25. package/src/diagnostics/logs/errorNotifications/errorDigests.tsx +8 -0
  26. package/src/diagnostics/logs/errorNotifications/errorWatchEntry.tsx +198 -56
  27. package/src/diagnostics/logs/lifeCycleAnalysis/spec.md +3 -2
  28. package/src/diagnostics/managementPages.tsx +5 -0
  29. package/src/email_ims_notifications/discord.tsx +203 -0
  30. package/src/fs.ts +9 -0
  31. package/src/functional/SocketChannel.ts +9 -0
  32. package/src/functional/throttleRender.ts +134 -0
  33. package/src/library-components/ATag.tsx +2 -2
  34. package/src/misc.ts +13 -0
  35. package/src/misc2.ts +54 -0
  36. package/src/user-implementation/SecurityPage.tsx +11 -5
  37. package/src/user-implementation/userData.ts +31 -16
  38. package/testEntry2.ts +14 -5
  39. package/src/user-implementation/setEmailKey.ts +0 -25
  40. /package/src/{email → email_ims_notifications}/postmark.tsx +0 -0
  41. /package/src/{email → email_ims_notifications}/sendgrid.tsx +0 -0
@@ -6,6 +6,8 @@ import { timeInMinute } from "socket-function/src/misc";
6
6
  import { formatTime } from "socket-function/src/formatting/format";
7
7
  import { addEpsilons } from "../../bits";
8
8
  import { FileMetadata } from "./FastArchiveController";
9
+ import { getPathStr2 } from "../../path";
10
+ import { isPublic } from "../../config";
9
11
  // IMPORTANT! We can't have any real imports here, because we are depended on so early in startup!
10
12
 
11
13
  if (isNode()) {
@@ -35,7 +37,25 @@ export type LogDatum = Record<string, unknown> & {
35
37
  /** Dynamically set when matching recent errors only. */
36
38
  __matchedOutdatedSuppressionKey?: string;
37
39
  };
40
+ export function getLogHash(obj: LogDatum) {
41
+ return getPathStr2(obj.__threadId || "", obj.time.toString());
42
+ }
43
+ export function getLogFile(obj: LogDatum) {
44
+ let logType = obj.param0 || "";
45
+ if (obj.__FILE__) {
46
+ logType = String(obj.__FILE__);
47
+ }
48
+ if (obj[LOG_LINE_LIMIT_ID]) {
49
+ logType += "::" + String(obj[LOG_LINE_LIMIT_ID]);
50
+ }
51
+ return logType;
52
+
53
+ }
38
54
  export const LOG_LIMIT_FLAG = String.fromCharCode(44533) + "LOGS_LIMITED_FLAG-9277640b-d709-4591-ab08-2bb29bbb94f4";
55
+ export const LOG_LINE_LIMIT_FLAG = String.fromCharCode(44534) + "LOGS_LINE_LIMIT_FLAG-dd50ab1f-3021-45e3-82fc-d2702c7a64c8";
56
+
57
+ /** If this key exists in the logged object, as in a key in one of the objects logged, then we will use the value of it as the limit ID. This is useful as it allows us to either override a limit or limit something independently from other logs in the file. */
58
+ export const LOG_LINE_LIMIT_ID = "LIMIT_LINE_ID";
39
59
 
40
60
  export const getLoggers = lazy(function () {
41
61
  const { FastArchiveAppendable } = require("./FastArchiveAppendable") as typeof import("./FastArchiveAppendable");
@@ -52,6 +72,10 @@ export const getLoggers = lazy(function () {
52
72
  errorLogs: new FastArchiveAppendable<LogDatum>("logs-error/"),
53
73
  };
54
74
  });
75
+ setImmediate(() => {
76
+ // If we don't import it at all, then it doesn't work client-side.
77
+ require("./FastArchiveAppendable") as typeof import("./FastArchiveAppendable");
78
+ });
55
79
  const getNotifyErrors = lazy(function () {
56
80
  const { notifyWatchersOfError: notifyErrors } = require("./errorNotifications/ErrorNotificationController") as typeof import("./errorNotifications/ErrorNotificationController");
57
81
  if (typeof notifyErrors !== "function") {
@@ -88,6 +112,8 @@ let logLimitLookup: {
88
112
 
89
113
  const LIMIT_PERIOD = timeInMinute * 15;
90
114
  const LIMIT_THRESHOLD = 1000;
115
+ const WARN_LIMIT = 100;
116
+ const ERROR_LIMIT = 100;
91
117
 
92
118
  const logDiskDontShim = logDisk;
93
119
  /** NOTE: Calling this directly means we lose __FILE__ tracking. But... that's probably fine... */
@@ -104,6 +130,11 @@ export function logDisk(type: "log" | "warn" | "info" | "error", ...args: unknow
104
130
  if (logObj.__FILE__) {
105
131
  logType = String(logObj.__FILE__);
106
132
  }
133
+ let hasLineLimit = false;
134
+ if (logObj[LOG_LINE_LIMIT_ID]) {
135
+ logType += "::" + String(logObj[LOG_LINE_LIMIT_ID]);
136
+ hasLineLimit = true;
137
+ }
107
138
 
108
139
  if (logLimitLookup) {
109
140
  if (logObj.time > logLimitLookup.resetTime) {
@@ -120,32 +151,47 @@ export function logDisk(type: "log" | "warn" | "info" | "error", ...args: unknow
120
151
  let count = logLimitLookup.counts.get(logType) || 0;
121
152
  count++;
122
153
  logLimitLookup.counts.set(logType, count);
123
- if (count > LIMIT_THRESHOLD) {
154
+ let limit = LIMIT_THRESHOLD;
155
+ if (type === "warn") {
156
+ limit = WARN_LIMIT;
157
+ } else if (type === "error") {
158
+ limit = ERROR_LIMIT;
159
+ }
160
+ if (count > limit) {
124
161
  let timeUntilReset = logLimitLookup.resetTime - logObj.time;
125
- process.stdout.write(`Log type hit limit, not writing log type to disk for ~${formatTime(timeUntilReset)}: ${logType}\n`);
162
+ if (hasLineLimit) {
163
+ process.stdout.write(`Log type hit limit, not writing log type to disk for ~${formatTime(timeUntilReset)}: ${logType}\n`);
164
+ }
126
165
  return;
127
166
  }
128
- if (count === LIMIT_THRESHOLD) {
129
- logObj[LOG_LIMIT_FLAG] = true;
167
+ if (count >= limit) {
168
+ if (hasLineLimit) {
169
+ logObj[LOG_LINE_LIMIT_FLAG] = true;
170
+ } else {
171
+ logObj[LOG_LIMIT_FLAG] = true;
172
+ }
130
173
  }
131
174
 
132
- let loggers = startupDone ? getLoggers() : undefined;
133
- if (!loggers) {
134
- getLoggers.reset();
135
- setImmediate(() => {
136
- logDiskDontShim(type, ...args);
137
- });
138
- return;
139
- }
140
- const { logLogs, warnLogs, infoLogs, errorLogs } = loggers;
141
- if (type === "log") {
142
- logLogs.append(logObj);
143
- } else if (type === "warn") {
144
- warnLogs.append(logObj);
145
- } else if (type === "info") {
146
- infoLogs.append(logObj);
147
- } else {
148
- errorLogs.append(logObj);
175
+ // We don't want developer errors clogging up the error logs. However, they can still notify errors, Because this will only notify nodes that are able to access us (It uses a reverse connection scheme, so instead of talking to nodes that we can access, we only talk to nodes that can access us), Which will mean it will only notify for local services, so the developer still gets error notifications, But our errors won't be spread to all developers. BUT, we will still watch global errors, because we can contact the global server, so developers will still get errors about production issues, even while developing!
176
+ if (isPublic()) {
177
+ let loggers = startupDone ? getLoggers() : undefined;
178
+ if (!loggers) {
179
+ getLoggers.reset();
180
+ setImmediate(() => {
181
+ logDiskDontShim(type, ...args);
182
+ });
183
+ return;
184
+ }
185
+ const { logLogs, warnLogs, infoLogs, errorLogs } = loggers;
186
+ if (type === "log") {
187
+ logLogs.append(logObj);
188
+ } else if (type === "warn") {
189
+ warnLogs.append(logObj);
190
+ } else if (type === "info") {
191
+ infoLogs.append(logObj);
192
+ } else {
193
+ errorLogs.append(logObj);
194
+ }
149
195
  }
150
196
 
151
197
  if (type === "warn" || type === "error") {
@@ -162,12 +208,12 @@ let lastLogTime = 0;
162
208
 
163
209
  function packageLogObj(type: string, args: unknown[]): LogDatum {
164
210
  let now = Date.now();
165
- if (now < lastLogTime) {
211
+ if (now <= lastLogTime) {
166
212
  now = addEpsilons(lastLogTime, 1);
167
213
  }
168
214
  lastLogTime = now;
169
215
  let logObj: LogDatum = {
170
- time: now,
216
+ time: 0,
171
217
  __LOG_TYPE: type,
172
218
  };
173
219
  for (let part of globalContextParts) {
@@ -184,5 +230,6 @@ function packageLogObj(type: string, args: unknown[]): LogDatum {
184
230
  stringCount++;
185
231
  }
186
232
  }
233
+ logObj.time = now;
187
234
  return logObj;
188
235
  }
@@ -2,7 +2,7 @@ import { isNode } from "typesafecss";
2
2
  import { getArchives } from "../../../-a-archives/archives";
3
3
  import { SizeLimiter } from "../../SizeLimiter";
4
4
  import { FastArchiveAppendable, createLogScanner, objectDelimitterBuffer } from "../FastArchiveAppendable";
5
- import { LogDatum, getLoggers } from "../diskLogger";
5
+ import { LogDatum, getLogHash, getLoggers } from "../diskLogger";
6
6
  import os from "os";
7
7
  import { SocketFunction } from "socket-function/SocketFunction";
8
8
  import { cache, cacheLimited, lazy } from "socket-function/src/caching";
@@ -21,6 +21,8 @@ import { qreact } from "../../../4-dom/qreact";
21
21
  import { requiresNetworkTrustHook } from "../../../-d-trust/NetworkTrust2";
22
22
  import { assertIsManagementUser } from "../../managementPages";
23
23
  import { streamToIteratable } from "../../../misc";
24
+ import { fsExistsAsync } from "../../../fs";
25
+ import { getPathStr2 } from "../../../path";
24
26
 
25
27
  export const MAX_RECENT_ERRORS = 20;
26
28
  const MAX_RECENT_ERRORS_PER_FILE = 3;
@@ -151,18 +153,17 @@ export const getSuppressionFull = measureWrap(function getSuppressionFull(config
151
153
 
152
154
  // Handle definitelyExpired - these are outdated suppressions
153
155
  let mostRecentOutdatedSuppressionKey: string | undefined = undefined;
156
+ let mostRecentOutdatedSuppressionTime = 0;
154
157
 
155
158
  // Handle maybeExpired - need to parse timestamp to check if suppression was active
156
159
  if (maybeExpired.length > 0 && (suppressionCounts || expiredSuppressionCounts || obj)) {
157
- const getLogTime = () => {
158
- try {
159
- let logEntry = JSON.parse(data.slice(posStart, posEnd).toString()) as LogDatum;
160
- return typeof logEntry.time === "number" ? logEntry.time : 0;
161
- } catch {
162
- return 0;
160
+ let logTime = 0;
161
+ try {
162
+ let logEntry = JSON.parse(data.slice(posStart, posEnd).toString()) as LogDatum;
163
+ if (typeof logEntry.time === "number") {
164
+ logTime = logEntry.time;
163
165
  }
164
- };
165
- let logTime = getLogTime();
166
+ } catch { }
166
167
 
167
168
  for (let checker of maybeExpired) {
168
169
  if (checker.fnc(data, posStart, posEnd)) {
@@ -174,8 +175,10 @@ export const getSuppressionFull = measureWrap(function getSuppressionFull(config
174
175
  suppressionCounts.set(checker.entry.key, count);
175
176
  }
176
177
  } else {
177
- if (!mostRecentOutdatedSuppressionKey) {
178
+
179
+ if (checker.entry.expiresAt > mostRecentOutdatedSuppressionTime) {
178
180
  mostRecentOutdatedSuppressionKey = checker.entry.key;
181
+ mostRecentOutdatedSuppressionTime = checker.entry.expiresAt;
179
182
  }
180
183
  // Even if we don't want the expired suppression counts, we might want the normal suppression counts, so we have to keep going.
181
184
  if (expiredSuppressionCounts) {
@@ -192,7 +195,7 @@ export const getSuppressionFull = measureWrap(function getSuppressionFull(config
192
195
  for (let checker of definitelyExpired) {
193
196
  if (checker.fnc(data, posStart, posEnd)) {
194
197
  // First match is the most recent (entries are sorted by lastUpdateTime desc)
195
- if (!mostRecentOutdatedSuppressionKey) {
198
+ if (checker.entry.expiresAt > mostRecentOutdatedSuppressionTime) {
196
199
  mostRecentOutdatedSuppressionKey = checker.entry.key;
197
200
  }
198
201
  if (!expiredSuppressionCounts) break;
@@ -204,7 +207,7 @@ export const getSuppressionFull = measureWrap(function getSuppressionFull(config
204
207
  }
205
208
 
206
209
  // Set the most recent outdated suppression key if we found any and weren't suppressed
207
- if (obj && mostRecentOutdatedSuppressionKey && !suppressed) {
210
+ if (obj && mostRecentOutdatedSuppressionKey) {
208
211
  obj.outdatedSuppressionKey = mostRecentOutdatedSuppressionKey;
209
212
  }
210
213
 
@@ -221,8 +224,9 @@ const suppressionUpdatedChannel = new SocketChannel<boolean>("suppression-update
221
224
 
222
225
  class SuppressionList {
223
226
  private init = lazy(async () => {
224
- suppressionUpdatedChannel.watch(() => {
225
- void this.updateEntriesNow();
227
+ suppressionUpdatedChannel.watch(async () => {
228
+ await this.updateEntriesNow();
229
+ await recentErrors.onSuppressionChanged();
226
230
  });
227
231
  await runInfinitePollCallAtStart(SUPPRESSION_POLL_INTERVAL, async () => {
228
232
  await this.updateEntriesNow();
@@ -336,14 +340,14 @@ class SuppressionList {
336
340
  let entries = await this.getEntries();
337
341
  entry.lastUpdateTime = Date.now();
338
342
  entries.entries[entry.key] = entry;
339
- void suppressionListArchive.set(suppressionListKey, entries);
343
+ await suppressionListArchive.set(suppressionListKey, entries);
340
344
  suppressionUpdatedChannel.broadcast(true);
341
345
  await recentErrors.onSuppressionChanged();
342
346
  }
343
347
  public async removeSuppressionEntry(key: string) {
344
348
  let entries = await this.getEntries();
345
349
  delete entries.entries[key];
346
- void suppressionListArchive.set(suppressionListKey, entries);
350
+ await suppressionListArchive.set(suppressionListKey, entries);
347
351
  suppressionUpdatedChannel.broadcast(true);
348
352
  await recentErrors.onSuppressionChanged();
349
353
  }
@@ -353,7 +357,7 @@ class SuppressionList {
353
357
  return entries;
354
358
  }
355
359
  }
356
- const suppressionList = new SuppressionList();
360
+ export const suppressionList = new SuppressionList();
357
361
  export const SuppressionListController = getSyncedController(SocketFunction.register(
358
362
  "SuppressionListController-08f985d8-8d06-4041-ac4b-44566c54615d",
359
363
  suppressionList,
@@ -397,7 +401,7 @@ class URLCache {
397
401
  if (!isNode()) return undefined;
398
402
 
399
403
  // Create cache directory if it doesn't exist
400
- if (!fs.existsSync(this.root)) {
404
+ if (!await fsExistsAsync(this.root)) {
401
405
  await fs.promises.mkdir(this.root, { recursive: true });
402
406
  }
403
407
 
@@ -498,13 +502,18 @@ const urlCache = new URLCache();
498
502
  const limitRecentErrors = measureWrap(function limitRecentErrors(objs: LogDatum[]) {
499
503
  sort(objs, x => x.time);
500
504
  let recent: LogDatum[] = [];
505
+ let foundHashes = new Set<string>();
501
506
  let countByFile = new Map<string, number>();
502
507
  // NOTE: We iterate backwards, because... usually new logs come in at the end, and are pushed, so we want to sort by time (that way we often don't have to resort by much). And if we sort by time, the newest at at the end!
503
508
  for (let i = objs.length - 1; i >= 0; i--) {
504
509
  let obj = objs[i];
505
510
  let file = String(obj.__FILE__) || "";
506
511
  let count = countByFile.get(file) || 0;
512
+ if (count > MAX_RECENT_ERRORS_PER_FILE) continue;
507
513
  count++;
514
+ let hash = getLogHash(obj);
515
+ if (foundHashes.has(hash)) continue;
516
+ foundHashes.add(hash);
508
517
  if (count > MAX_RECENT_ERRORS_PER_FILE) continue;
509
518
  countByFile.set(file, count);
510
519
  recent.push(obj);
@@ -513,8 +522,13 @@ const limitRecentErrors = measureWrap(function limitRecentErrors(objs: LogDatum[
513
522
  return recent;
514
523
  });
515
524
 
516
- class RecentErrors {
525
+ export class RecentErrors {
517
526
 
527
+ constructor(private addErrorsCallback?: (objs: LogDatum[]) => void | Promise<void>) {
528
+ this.addErrorsCallback = addErrorsCallback;
529
+ }
530
+
531
+ // TODO: Uninitialize (stopping the infinite polling), if all of our recent errors watchers go away.
518
532
  private initialize = lazy(async () => {
519
533
  errorWatcherBase.watch(x => {
520
534
  void this.addErrors(x);
@@ -526,7 +540,7 @@ class RecentErrors {
526
540
  });
527
541
 
528
542
  private _recentErrors: LogDatum[] = [];
529
- private updateRecentErrors = async (objs: LogDatum[]) => {
543
+ private updateRecentErrors = runInSerial(async (objs: LogDatum[]) => {
530
544
  objs = await suppressionList.filterObjsToNonSuppressed(objs);
531
545
  let newRecentErrors = limitRecentErrors(objs);
532
546
  // If any changed
@@ -549,27 +563,59 @@ class RecentErrors {
549
563
  this._recentErrors = newRecentErrors;
550
564
  void this.broadcastUpdate(undefined);
551
565
  }
552
- };
566
+ });
553
567
  private broadcastUpdate = batchFunction({ delay: NOTIFICATION_BROADCAST_BATCH }, () => {
554
568
  recentErrorsChannel.broadcast(true);
555
569
  });
556
570
 
557
- private async addErrors(objs: LogDatum[]) {
571
+ private addErrors = runInSerial(async (objs: LogDatum[]) => {
558
572
  if (objs.length === 0) return;
573
+
574
+ if (this.addErrorsCallback) {
575
+ await this.addErrorsCallback(objs);
576
+ return;
577
+ }
559
578
  for (let obj of objs) {
560
579
  this._recentErrors.push(obj);
561
580
  }
562
581
  await this.updateRecentErrors(this._recentErrors);
563
- }
582
+ });
564
583
 
565
- public async onSuppressionChanged() {
584
+ private lastSuppressionList = new Map<string, SuppressionEntry>();
585
+ public onSuppressionChanged = runInSerial(async () => {
586
+ let newSuppressionList = new Map((await suppressionList.getSuppressionList()).map(x => [x.key, x]));
587
+ let prev = this.lastSuppressionList;
588
+ function anyReduced() {
589
+ for (let newEntry of newSuppressionList.values()) {
590
+ let oldEntry = prev.get(newEntry.key);
591
+ if (oldEntry && newEntry.expiresAt < oldEntry.expiresAt) {
592
+ return true;
593
+ }
594
+ }
595
+ for (let oldEntry of prev.values()) {
596
+ if (!newSuppressionList.has(oldEntry.key)) {
597
+ return true;
598
+ }
599
+ }
600
+ return false;
601
+ }
602
+ if (anyReduced()) {
603
+ console.info("Suppression has been reduced (entries removed or expiry times decreased), performing full rescan to find any revealed values.");
604
+ this.scannedHashes.clear();
605
+ void this.scanNow({});
606
+ }
607
+ this.lastSuppressionList = newSuppressionList;
566
608
  await this.updateRecentErrors(this._recentErrors);
567
- }
609
+ });
568
610
 
569
611
  private scannedHashes = new Set<string>();
570
612
  private scanNow = runInSerial(async (config: {
571
613
  noLocalFiles?: boolean;
572
614
  }) => {
615
+ // If we're scanning everything, we should update the suppression list, because it might have been changed remotely, and we might be scanning everything because the user clicked refresh.
616
+ if (!this.lastSuppressionList || !config.noLocalFiles) {
617
+ this.lastSuppressionList = new Map((await suppressionList.getSuppressionList()).map(x => [x.key, x]));
618
+ }
573
619
  for (let appendable of getAppendables()) {
574
620
  let startTime = Date.now() - VIEW_WINDOW;
575
621
  let endTime = Date.now() + timeInHour * 2;
@@ -619,43 +665,11 @@ class RecentErrors {
619
665
  await fs.promises.unlink(path);
620
666
  continue;
621
667
  }
622
- let sizeT = size;
623
- let fd = await fs.promises.open(path, "r");
624
- try {
625
- await new Promise<void>(async (resolve, reject) => {
626
- const gunzip = zlib.createGunzip();
627
-
628
- gunzip.on("data", (chunk: Buffer) => {
629
- void scanner.onData(chunk);
630
- });
631
-
632
- gunzip.on("end", async () => {
633
- try {
634
- resolve();
635
- } catch (error) {
636
- reject(error);
637
- }
638
- });
639
-
640
- gunzip.on("error", reject);
641
-
642
- try {
643
- for (let i = 0; i < sizeT; i += READ_CHUNK_SIZE) {
644
- let chunkSize = Math.min(READ_CHUNK_SIZE, sizeT - i);
645
- let buffer = Buffer.alloc(chunkSize);
646
- await fd.read(buffer, 0, chunkSize, i);
647
- let result = gunzip.write(buffer);
648
- if (!result) {
649
- await new Promise(resolve => gunzip.once("drain", resolve));
650
- }
651
- }
652
- gunzip.end();
653
- } catch (error) {
654
- reject(error);
655
- }
656
- });
657
- } finally {
658
- await fd.close();
668
+ const fileStream = fs.createReadStream(path);
669
+ const gunzip = zlib.createGunzip();
670
+ const decompressedStream = fileStream.pipe(gunzip);
671
+ for await (const chunk of decompressedStream) {
672
+ scanner.onData(chunk);
659
673
  }
660
674
  let newErrors = await scanner.finish();
661
675
  await this.addErrors(newErrors);
@@ -678,6 +692,10 @@ class RecentErrors {
678
692
  await this.scanNow({});
679
693
  return this._recentErrors;
680
694
  }
695
+
696
+ public async raiseTestError(...params: unknown[]) {
697
+ console.error(...params);
698
+ }
681
699
  }
682
700
  const recentErrors = new RecentErrors();
683
701
  export const RecentErrorsController = getSyncedController(SocketFunction.register(
@@ -686,6 +704,7 @@ export const RecentErrorsController = getSyncedController(SocketFunction.registe
686
704
  () => ({
687
705
  getRecentErrors: {},
688
706
  rescanAllErrorsNow: {},
707
+ raiseTestError: {},
689
708
  }),
690
709
  () => ({
691
710
  hooks: [assertIsManagementUser],
@@ -722,4 +741,4 @@ export const notifyWatchersOfError = batchFunction({
722
741
  }
723
742
  );
724
743
 
725
- const errorWatcherBase = new SocketChannel<LogDatum[]>("error-watcher-38de08cd-3247-4f75-9ac0-7919b240607d");
744
+ export const errorWatcherBase = new SocketChannel<LogDatum[]>("error-watcher-38de08cd-3247-4f75-9ac0-7919b240607d");
@@ -10,6 +10,8 @@ import { nextId, sort, timeInDay } from "socket-function/src/misc";
10
10
  import { formatNumber, formatVeryNiceDateTime } from "socket-function/src/formatting/format";
11
11
  import { formatDateJSX } from "../../../misc/formatJSX";
12
12
  import { LogDatum } from "../diskLogger";
13
+ import { measureFnc } from "socket-function/src/profiling/measure";
14
+ import { throttleRender } from "../../../functional/throttleRender";
13
15
 
14
16
  export class ErrorSuppressionUI extends qreact.Component<{
15
17
  dataSeqNum: number;
@@ -20,8 +22,10 @@ export class ErrorSuppressionUI extends qreact.Component<{
20
22
  }> {
21
23
  state = t.state({
22
24
  matchedInput: t.string(""),
25
+ renderLimit: t.number(10)
23
26
  });
24
27
 
28
+ @measureFnc
25
29
  private calculatePreviewMatchCount(pattern: string): number {
26
30
  if (!pattern.trim()) return 0;
27
31
 
@@ -50,6 +54,8 @@ export class ErrorSuppressionUI extends qreact.Component<{
50
54
  }
51
55
 
52
56
  public render() {
57
+ if (throttleRender({ key: "ErrorSuppressionUI", frameDelay: 30 })) return undefined;
58
+
53
59
  this.props.dataSeqNum;
54
60
  const controller = SuppressionListController(SocketFunction.browserNodeId());
55
61
  const entries = (controller.getSuppressionList() || []);
@@ -117,6 +123,27 @@ export class ErrorSuppressionUI extends qreact.Component<{
117
123
  >
118
124
  Fixed
119
125
  </Button>
126
+ <Button
127
+ onClick={() => {
128
+ let value = this.state.matchedInput;
129
+ this.state.matchedInput = "";
130
+ void Querysub.onCommitFinished(async () => {
131
+ await controller.setSuppressionEntry.promise({
132
+ key: nextId(),
133
+ match: value,
134
+ comment: "",
135
+ lastUpdateTime: Date.now(),
136
+ expiresAt: Date.now(),
137
+ });
138
+ Querysub.commit(() => {
139
+ this.props.rerunFilters();
140
+ });
141
+ });
142
+ }}
143
+ title="Fixed immediately, any future errors even that happen right now will trigger again. "
144
+ >
145
+ Fixed Now
146
+ </Button>
120
147
  <Button onClick={() => {
121
148
  let value = this.state.matchedInput;
122
149
  this.state.matchedInput = "";
@@ -137,8 +164,12 @@ export class ErrorSuppressionUI extends qreact.Component<{
137
164
  </Button>
138
165
  </div>
139
166
 
140
- <div className={css.vbox(8).fillWidth.overflowAuto.maxHeight("30vh")}>
141
- {entries.map((entry) => {
167
+ <div className={css.pad2(12).bord2(200, 40, 85).hsl(200, 40, 95).fillWidth}>
168
+ <strong>Note:</strong> Suppression time updates don't automatically rerun the search. Click Run to rerun the search.
169
+ </div>
170
+
171
+ <div className={css.vbox(8).fillWidth.overflowAuto.maxHeight("20vh")}>
172
+ {entries.slice(0, this.state.renderLimit).map((entry) => {
142
173
  const updateEntry = (changes: Partial<SuppressionEntry>) => {
143
174
  let newEntry = { ...entry, ...changes };
144
175
  void Querysub.onCommitFinished(async () => {
@@ -152,7 +183,7 @@ export class ErrorSuppressionUI extends qreact.Component<{
152
183
  className={
153
184
  css.hbox(8).pad2(12).bord2(0, 0, 10).fillWidth
154
185
  //+ (entry.expiresAt < Date.now() && expiredCount > 0 && css.opacity(0.5))
155
- + ((count === 0 && expiredCount === 0) && css.opacity(0.6))
186
+ + ((expiredCount === 0) && css.opacity(0.6))
156
187
  + (
157
188
  count > 0 && entry.expiresAt !== NOT_AN_ERROR_EXPIRE_TIME && css.hsla(0, 50, 50, 0.5)
158
189
  || css.hsla(0, 0, 0, 0.1)
@@ -226,6 +257,9 @@ export class ErrorSuppressionUI extends qreact.Component<{
226
257
  </Button>
227
258
  </div>;
228
259
  })}
260
+ {entries.length > this.state.renderLimit && <Button onClick={() => this.state.renderLimit *= 2}>
261
+ Load More
262
+ </Button>}
229
263
  </div>
230
264
  </div>;
231
265
  }
@@ -1,12 +1,12 @@
1
1
  import { SocketFunction } from "socket-function/SocketFunction";
2
2
  import { qreact } from "../../../4-dom/qreact";
3
3
  import { css } from "../../../4-dom/css";
4
- import { isCurrentUserSuperUser } from "../../../user-implementation/userData";
4
+ import { isCurrentUserSuperUser, user_data } from "../../../user-implementation/userData";
5
5
  import { RecentErrorsController, SuppressionListController, watchRecentErrors, MAX_RECENT_ERRORS, NOT_AN_ERROR_EXPIRE_TIME, SuppressionEntry } from "./ErrorNotificationController";
6
6
  import { t } from "../../../2-proxy/schema2";
7
7
  import { InputLabel } from "../../../library-components/InputLabel";
8
8
  import { Button } from "../../../library-components/Button";
9
- import { ATag } from "../../../library-components/ATag";
9
+ import { ATag, Anchor, URLOverride } from "../../../library-components/ATag";
10
10
  import { managementPageURL, showingManagementURL } from "../../managementPages";
11
11
  import { errorNotifyToggleURL } from "../LogViewer2";
12
12
  import { Querysub } from "../../../4-querysub/QuerysubController";
@@ -16,6 +16,20 @@ import { Icon } from "../../../library-components/icons";
16
16
  import { filterParam } from "../FastArchiveViewer";
17
17
  import { endTimeParam, startTimeParam } from "../TimeRangeSelector";
18
18
  import { formatDateJSX } from "../../../misc/formatJSX";
19
+ import { atomic } from "../../../2-proxy/PathValueProxyWatcher";
20
+
21
+ export function getLogsLinkParts(): URLOverride[] {
22
+ return [
23
+ showingManagementURL.getOverride(true),
24
+ managementPageURL.getOverride("LogViewer2"),
25
+ errorNotifyToggleURL.getOverride(true),
26
+ filterParam.getOverride(""),
27
+
28
+ // NOTE: While loading a weeks worth of logs clientside is a bit slow. Scanning serverside is not nearly as bad, as it can be done over hours, but... we want the page to be snappy, loading in seconds, so... just use a day, and we might reduce it even further if needed...
29
+ startTimeParam.getOverride(Date.now() - timeInDay * 1),
30
+ endTimeParam.getOverride(Date.now() + timeInHour * 2),
31
+ ];
32
+ }
19
33
 
20
34
  export class ErrorWarning extends qreact.Component {
21
35
  state = t.state({
@@ -67,22 +81,36 @@ export class ErrorWarning extends qreact.Component {
67
81
  </style>
68
82
  </Button>;
69
83
 
70
- const logLink = [
71
- showingManagementURL.getOverride(true),
72
- managementPageURL.getOverride("LogViewer2"),
73
- errorNotifyToggleURL.getOverride(true),
74
- filterParam.getOverride(""),
75
- startTimeParam.getOverride(Date.now() - timeInDay * 7),
76
- endTimeParam.getOverride(Date.now() + timeInHour * 2),
77
- ];
84
+ let discordURLWarning: qreact.ComponentChildren = undefined;
85
+ if (!atomic(user_data().secure.notifyDiscordWebhookURL)) {
86
+ discordURLWarning = (
87
+ <Anchor
88
+ target="_blank"
89
+ title="Can't send application notifications to developers due to missing Discord hook URL. Click here and set it."
90
+ values={[
91
+ showingManagementURL.getOverride(true),
92
+ managementPageURL.getOverride("SecurityPage"),
93
+ ]}
94
+ >
95
+ <Button hue={0}>
96
+ ⚠️ Missing Discord Hook URL <span className={css.filter("invert(1)")}>📞</span>
97
+ </Button>
98
+ </Anchor>
99
+ );
100
+ }
101
+
102
+ const logLink = getLogsLinkParts();
78
103
 
79
104
  if (!recentErrors || recentErrors.length === 0) {
80
- return <span className={css.hbox(8)}>
81
- <ATag target="_blank" values={logLink}>
82
- No Errors
83
- </ATag>
84
- {refreshButton}
85
- </span>;
105
+ return (
106
+ <span className={css.hbox(8)}>
107
+ <ATag target="_blank" values={logLink}>
108
+ No Errors
109
+ </ATag>
110
+ {refreshButton}
111
+ {discordURLWarning}
112
+ </span>
113
+ );
86
114
  }
87
115
 
88
116
  // Count unique files
@@ -128,6 +156,7 @@ export class ErrorWarning extends qreact.Component {
128
156
  View Logs
129
157
  </ATag>
130
158
  {refreshButton}
159
+ {discordURLWarning}
131
160
  </div>
132
161
 
133
162
  {topExpired &&
@@ -0,0 +1,8 @@
1
+ type DigestInfo = {
2
+ histogram: {
3
+ [timeGroup: number]: number;
4
+ };
5
+ topByFile: {
6
+ [file: string]: number;
7
+ };
8
+ };