querysub 0.326.0 → 0.328.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. package/package.json +3 -4
  2. package/src/-a-archives/archivesBackBlaze.ts +20 -0
  3. package/src/-a-archives/archivesDisk.ts +5 -5
  4. package/src/-a-archives/archivesLimitedCache.ts +118 -7
  5. package/src/-a-archives/archivesPrivateFileSystem.ts +3 -0
  6. package/src/-g-core-values/NodeCapabilities.ts +26 -11
  7. package/src/0-path-value-core/auditLogs.ts +4 -2
  8. package/src/2-proxy/PathValueProxyWatcher.ts +3 -0
  9. package/src/3-path-functions/PathFunctionRunner.ts +2 -2
  10. package/src/4-querysub/Querysub.ts +1 -1
  11. package/src/5-diagnostics/GenericFormat.tsx +2 -2
  12. package/src/deployManager/machineApplyMainCode.ts +10 -8
  13. package/src/deployManager/machineSchema.ts +4 -3
  14. package/src/deployManager/setupMachineMain.ts +3 -2
  15. package/src/diagnostics/logs/FastArchiveAppendable.ts +85 -59
  16. package/src/diagnostics/logs/FastArchiveController.ts +5 -2
  17. package/src/diagnostics/logs/FastArchiveViewer.tsx +222 -51
  18. package/src/diagnostics/logs/LogViewer2.tsx +83 -35
  19. package/src/diagnostics/logs/TimeRangeSelector.tsx +8 -0
  20. package/src/diagnostics/logs/diskLogGlobalContext.ts +3 -3
  21. package/src/diagnostics/logs/diskLogger.ts +70 -23
  22. package/src/diagnostics/logs/errorNotifications/ErrorNotificationController.ts +111 -82
  23. package/src/diagnostics/logs/errorNotifications/ErrorSuppressionUI.tsx +37 -3
  24. package/src/diagnostics/logs/errorNotifications/ErrorWarning.tsx +52 -22
  25. package/src/diagnostics/logs/errorNotifications/errorDigests.tsx +8 -0
  26. package/src/diagnostics/logs/errorNotifications/errorWatchEntry.tsx +198 -52
  27. package/src/diagnostics/logs/lifeCycleAnalysis/spec.md +3 -2
  28. package/src/diagnostics/managementPages.tsx +5 -0
  29. package/src/email_ims_notifications/discord.tsx +203 -0
  30. package/src/fs.ts +9 -0
  31. package/src/functional/SocketChannel.ts +9 -0
  32. package/src/functional/throttleRender.ts +134 -0
  33. package/src/library-components/ATag.tsx +2 -2
  34. package/src/library-components/SyncedController.ts +5 -3
  35. package/src/misc.ts +13 -0
  36. package/src/misc2.ts +54 -0
  37. package/src/user-implementation/SecurityPage.tsx +11 -5
  38. package/src/user-implementation/userData.ts +31 -16
  39. package/testEntry2.ts +14 -5
  40. package/src/user-implementation/setEmailKey.ts +0 -25
  41. /package/src/{email → email_ims_notifications}/postmark.tsx +0 -0
  42. /package/src/{email → email_ims_notifications}/sendgrid.tsx +0 -0
@@ -2,7 +2,7 @@ import { isNode } from "typesafecss";
2
2
  import { getArchives } from "../../../-a-archives/archives";
3
3
  import { SizeLimiter } from "../../SizeLimiter";
4
4
  import { FastArchiveAppendable, createLogScanner, objectDelimitterBuffer } from "../FastArchiveAppendable";
5
- import { LogDatum, getLoggers } from "../diskLogger";
5
+ import { LogDatum, getLogHash, getLoggers } from "../diskLogger";
6
6
  import os from "os";
7
7
  import { SocketFunction } from "socket-function/SocketFunction";
8
8
  import { cache, cacheLimited, lazy } from "socket-function/src/caching";
@@ -21,6 +21,8 @@ import { qreact } from "../../../4-dom/qreact";
21
21
  import { requiresNetworkTrustHook } from "../../../-d-trust/NetworkTrust2";
22
22
  import { assertIsManagementUser } from "../../managementPages";
23
23
  import { streamToIteratable } from "../../../misc";
24
+ import { fsExistsAsync } from "../../../fs";
25
+ import { getPathStr2 } from "../../../path";
24
26
 
25
27
  export const MAX_RECENT_ERRORS = 20;
26
28
  const MAX_RECENT_ERRORS_PER_FILE = 3;
@@ -151,18 +153,17 @@ export const getSuppressionFull = measureWrap(function getSuppressionFull(config
151
153
 
152
154
  // Handle definitelyExpired - these are outdated suppressions
153
155
  let mostRecentOutdatedSuppressionKey: string | undefined = undefined;
156
+ let mostRecentOutdatedSuppressionTime = 0;
154
157
 
155
158
  // Handle maybeExpired - need to parse timestamp to check if suppression was active
156
159
  if (maybeExpired.length > 0 && (suppressionCounts || expiredSuppressionCounts || obj)) {
157
- const getLogTime = () => {
158
- try {
159
- let logEntry = JSON.parse(data.slice(posStart, posEnd).toString()) as LogDatum;
160
- return typeof logEntry.time === "number" ? logEntry.time : 0;
161
- } catch {
162
- return 0;
160
+ let logTime = 0;
161
+ try {
162
+ let logEntry = JSON.parse(data.slice(posStart, posEnd).toString()) as LogDatum;
163
+ if (typeof logEntry.time === "number") {
164
+ logTime = logEntry.time;
163
165
  }
164
- };
165
- let logTime = getLogTime();
166
+ } catch { }
166
167
 
167
168
  for (let checker of maybeExpired) {
168
169
  if (checker.fnc(data, posStart, posEnd)) {
@@ -174,8 +175,10 @@ export const getSuppressionFull = measureWrap(function getSuppressionFull(config
174
175
  suppressionCounts.set(checker.entry.key, count);
175
176
  }
176
177
  } else {
177
- if (!mostRecentOutdatedSuppressionKey) {
178
+
179
+ if (checker.entry.expiresAt > mostRecentOutdatedSuppressionTime) {
178
180
  mostRecentOutdatedSuppressionKey = checker.entry.key;
181
+ mostRecentOutdatedSuppressionTime = checker.entry.expiresAt;
179
182
  }
180
183
  // Even if we don't want the expired suppression counts, we might want the normal suppression counts, so we have to keep going.
181
184
  if (expiredSuppressionCounts) {
@@ -192,7 +195,7 @@ export const getSuppressionFull = measureWrap(function getSuppressionFull(config
192
195
  for (let checker of definitelyExpired) {
193
196
  if (checker.fnc(data, posStart, posEnd)) {
194
197
  // First match is the most recent (entries are sorted by lastUpdateTime desc)
195
- if (!mostRecentOutdatedSuppressionKey) {
198
+ if (checker.entry.expiresAt > mostRecentOutdatedSuppressionTime) {
196
199
  mostRecentOutdatedSuppressionKey = checker.entry.key;
197
200
  }
198
201
  if (!expiredSuppressionCounts) break;
@@ -204,7 +207,7 @@ export const getSuppressionFull = measureWrap(function getSuppressionFull(config
204
207
  }
205
208
 
206
209
  // Set the most recent outdated suppression key if we found any and weren't suppressed
207
- if (obj && mostRecentOutdatedSuppressionKey && !suppressed) {
210
+ if (obj && mostRecentOutdatedSuppressionKey) {
208
211
  obj.outdatedSuppressionKey = mostRecentOutdatedSuppressionKey;
209
212
  }
210
213
 
@@ -221,25 +224,30 @@ const suppressionUpdatedChannel = new SocketChannel<boolean>("suppression-update
221
224
 
222
225
  class SuppressionList {
223
226
  private init = lazy(async () => {
224
- suppressionUpdatedChannel.watch(() => {
225
- void this.updateEntriesNow();
227
+ suppressionUpdatedChannel.watch(async () => {
228
+ await this.updateEntriesNow();
229
+ await recentErrors.onSuppressionChanged();
230
+ });
231
+ await runInfinitePollCallAtStart(SUPPRESSION_POLL_INTERVAL, async () => {
232
+ await this.updateEntriesNow();
226
233
  });
227
234
  });
235
+ private cacheEntries: SuppressionListBase | undefined = undefined;
228
236
  private updateEntriesNow = async () => {
229
237
  let entries = await suppressionListArchive.get(suppressionListKey);
230
238
  if (!entries) {
231
239
  entries = { entries: {} };
232
240
  }
233
- this.getEntries.set(Promise.resolve(entries));
241
+ this.cacheEntries = entries;
234
242
  };
235
- private getEntries = lazy(async (): Promise<SuppressionListBase> => {
243
+ private async getEntries(): Promise<SuppressionListBase> {
236
244
  await this.init();
237
- await runInfinitePollCallAtStart(SUPPRESSION_POLL_INTERVAL, async () => {
238
- await this.updateEntriesNow();
239
- });
245
+ if (!this.cacheEntries) {
246
+ throw new Error("Cache entries not set? Should be impossible.");
247
+ }
240
248
  // Infinite poll will have set this, so we don't infinitely loop
241
- return await this.getEntries();
242
- });
249
+ return this.cacheEntries;
250
+ }
243
251
 
244
252
  public async filterObjsToNonSuppressed(objs: LogDatum[]): Promise<LogDatum[]> {
245
253
  // NOTE: Streamed data should be rare enough, that handling this inefficiently is okay.
@@ -259,6 +267,7 @@ class SuppressionList {
259
267
  }
260
268
  let buffer = Buffer.concat(parts);
261
269
  let scanner = await this.scanForRecentErrors({
270
+ debugName: "filterObjsToNonSuppressed",
262
271
  startTime,
263
272
  endTime,
264
273
  });
@@ -266,6 +275,7 @@ class SuppressionList {
266
275
  return await scanner.finish();
267
276
  }
268
277
  public async scanForRecentErrors(config: {
278
+ debugName: string;
269
279
  startTime: number;
270
280
  endTime: number;
271
281
  }): Promise<{
@@ -285,6 +295,7 @@ class SuppressionList {
285
295
  // for the suppression key.
286
296
  let obj: { outdatedSuppressionKey?: string } = {};
287
297
  let callback = createLogScanner({
298
+ debugName: config.debugName,
288
299
  onParsedData: (posStart, posEnd, buffer) => {
289
300
  if (buffer === "done") {
290
301
  return;
@@ -297,7 +308,7 @@ class SuppressionList {
297
308
  try {
298
309
  datum = JSON.parse(buffer.slice(posStart, posEnd).toString()) as LogDatum;
299
310
  } catch (e: any) {
300
- process.stderr.write(`Failed to parse log datum in around ${buffer.slice(posStart, posEnd).slice(0, 100).toString("hex")}, error is:\n${e.stack}`);
311
+ process.stderr.write(`Failed to parse log datum in around ${buffer.slice(posStart, posEnd).slice(0, 100).toString("hex")}, in source ${config.debugName}, error is:\n${e.stack}`);
301
312
  return;
302
313
  }
303
314
  if (obj.outdatedSuppressionKey) {
@@ -307,18 +318,19 @@ class SuppressionList {
307
318
  },
308
319
  });
309
320
  let lastWaitTime = Date.now();
321
+ const stream = runInSerial(async (buffer: Buffer | "done") => {
322
+ // TODO: Maybe we should add this pattern to batching.ts? Basically, if we get called fast, we allow the calls through. BUT, if we called slowly OR we are doing a lot of processing (and so we are working for all of SELF_THROTTLE_INTERVAL), then we wait. This prevents this from taking over the machine. The back off is steep though, and if the machine is lagging we might reduce to a trickle, just getting 1 call in per SELF_THROTTLE_DELAY + synchronous lag from work in other parts of the program.
323
+ let now = Date.now();
324
+ if (now - lastWaitTime > SELF_THROTTLE_INTERVAL) {
325
+ await delay(SELF_THROTTLE_DELAY);
326
+ lastWaitTime = now;
327
+ }
328
+ await callback(buffer);
329
+ });
310
330
  return {
311
- onData: runInSerial(async (buffer) => {
312
- // TODO: Maybe we should add this pattern to batching.ts? Basically, if we get called fast, we allow the calls through. BUT, if we called slowly OR we are doing a lot of processing (and so we are working for all of SELF_THROTTLE_INTERVAL), then we wait. This prevents this from taking over the machine. The back off is steep though, and if the machine is lagging we might reduce to a trickle, just getting 1 call in per SELF_THROTTLE_DELAY + synchronous lag from work in other parts of the program.
313
- let now = Date.now();
314
- if (now - lastWaitTime > SELF_THROTTLE_INTERVAL) {
315
- await delay(SELF_THROTTLE_DELAY);
316
- lastWaitTime = now;
317
- }
318
- await callback(buffer);
319
- }),
331
+ onData: stream,
320
332
  finish: async () => {
321
- await callback("done");
333
+ await stream("done");
322
334
  // NOTE: We COULD limit as we run, however... how many errors are we really going to encounter that AREN'T suppressed? Suppression is supposed to prevent overload anyways. I guess worst case scenario, yes, we could get overloaded, but... if we are logging more NEW errors than we can store in memory, we have bigger problems...
323
335
  return limitRecentErrors(datums);
324
336
  },
@@ -328,23 +340,24 @@ class SuppressionList {
328
340
  let entries = await this.getEntries();
329
341
  entry.lastUpdateTime = Date.now();
330
342
  entries.entries[entry.key] = entry;
331
- void suppressionListArchive.set(suppressionListKey, entries);
343
+ await suppressionListArchive.set(suppressionListKey, entries);
332
344
  suppressionUpdatedChannel.broadcast(true);
333
345
  await recentErrors.onSuppressionChanged();
334
346
  }
335
347
  public async removeSuppressionEntry(key: string) {
336
348
  let entries = await this.getEntries();
337
349
  delete entries.entries[key];
338
- void suppressionListArchive.set(suppressionListKey, entries);
350
+ await suppressionListArchive.set(suppressionListKey, entries);
339
351
  suppressionUpdatedChannel.broadcast(true);
340
352
  await recentErrors.onSuppressionChanged();
341
353
  }
342
354
 
343
355
  public async getSuppressionList(): Promise<SuppressionEntry[]> {
344
- return Object.values((await this.getEntries()).entries);
356
+ let entries = Object.values((await this.getEntries()).entries);
357
+ return entries;
345
358
  }
346
359
  }
347
- const suppressionList = new SuppressionList();
360
+ export const suppressionList = new SuppressionList();
348
361
  export const SuppressionListController = getSyncedController(SocketFunction.register(
349
362
  "SuppressionListController-08f985d8-8d06-4041-ac4b-44566c54615d",
350
363
  suppressionList,
@@ -388,7 +401,7 @@ class URLCache {
388
401
  if (!isNode()) return undefined;
389
402
 
390
403
  // Create cache directory if it doesn't exist
391
- if (!fs.existsSync(this.root)) {
404
+ if (!await fsExistsAsync(this.root)) {
392
405
  await fs.promises.mkdir(this.root, { recursive: true });
393
406
  }
394
407
 
@@ -489,13 +502,18 @@ const urlCache = new URLCache();
489
502
  const limitRecentErrors = measureWrap(function limitRecentErrors(objs: LogDatum[]) {
490
503
  sort(objs, x => x.time);
491
504
  let recent: LogDatum[] = [];
505
+ let foundHashes = new Set<string>();
492
506
  let countByFile = new Map<string, number>();
493
507
  // NOTE: We iterate backwards, because... usually new logs come in at the end, and are pushed, so we want to sort by time (that way we often don't have to resort by much). And if we sort by time, the newest at at the end!
494
508
  for (let i = objs.length - 1; i >= 0; i--) {
495
509
  let obj = objs[i];
496
510
  let file = String(obj.__FILE__) || "";
497
511
  let count = countByFile.get(file) || 0;
512
+ if (count > MAX_RECENT_ERRORS_PER_FILE) continue;
498
513
  count++;
514
+ let hash = getLogHash(obj);
515
+ if (foundHashes.has(hash)) continue;
516
+ foundHashes.add(hash);
499
517
  if (count > MAX_RECENT_ERRORS_PER_FILE) continue;
500
518
  countByFile.set(file, count);
501
519
  recent.push(obj);
@@ -504,8 +522,13 @@ const limitRecentErrors = measureWrap(function limitRecentErrors(objs: LogDatum[
504
522
  return recent;
505
523
  });
506
524
 
507
- class RecentErrors {
525
+ export class RecentErrors {
526
+
527
+ constructor(private addErrorsCallback?: (objs: LogDatum[]) => void | Promise<void>) {
528
+ this.addErrorsCallback = addErrorsCallback;
529
+ }
508
530
 
531
+ // TODO: Uninitialize (stopping the infinite polling), if all of our recent errors watchers go away.
509
532
  private initialize = lazy(async () => {
510
533
  errorWatcherBase.watch(x => {
511
534
  void this.addErrors(x);
@@ -517,7 +540,7 @@ class RecentErrors {
517
540
  });
518
541
 
519
542
  private _recentErrors: LogDatum[] = [];
520
- private updateRecentErrors = async (objs: LogDatum[]) => {
543
+ private updateRecentErrors = runInSerial(async (objs: LogDatum[]) => {
521
544
  objs = await suppressionList.filterObjsToNonSuppressed(objs);
522
545
  let newRecentErrors = limitRecentErrors(objs);
523
546
  // If any changed
@@ -540,27 +563,59 @@ class RecentErrors {
540
563
  this._recentErrors = newRecentErrors;
541
564
  void this.broadcastUpdate(undefined);
542
565
  }
543
- };
566
+ });
544
567
  private broadcastUpdate = batchFunction({ delay: NOTIFICATION_BROADCAST_BATCH }, () => {
545
568
  recentErrorsChannel.broadcast(true);
546
569
  });
547
570
 
548
- private async addErrors(objs: LogDatum[]) {
571
+ private addErrors = runInSerial(async (objs: LogDatum[]) => {
549
572
  if (objs.length === 0) return;
573
+
574
+ if (this.addErrorsCallback) {
575
+ await this.addErrorsCallback(objs);
576
+ return;
577
+ }
550
578
  for (let obj of objs) {
551
579
  this._recentErrors.push(obj);
552
580
  }
553
581
  await this.updateRecentErrors(this._recentErrors);
554
- }
582
+ });
555
583
 
556
- public async onSuppressionChanged() {
584
+ private lastSuppressionList = new Map<string, SuppressionEntry>();
585
+ public onSuppressionChanged = runInSerial(async () => {
586
+ let newSuppressionList = new Map((await suppressionList.getSuppressionList()).map(x => [x.key, x]));
587
+ let prev = this.lastSuppressionList;
588
+ function anyReduced() {
589
+ for (let newEntry of newSuppressionList.values()) {
590
+ let oldEntry = prev.get(newEntry.key);
591
+ if (oldEntry && newEntry.expiresAt < oldEntry.expiresAt) {
592
+ return true;
593
+ }
594
+ }
595
+ for (let oldEntry of prev.values()) {
596
+ if (!newSuppressionList.has(oldEntry.key)) {
597
+ return true;
598
+ }
599
+ }
600
+ return false;
601
+ }
602
+ if (anyReduced()) {
603
+ console.info("Suppression has been reduced (entries removed or expiry times decreased), performing full rescan to find any revealed values.");
604
+ this.scannedHashes.clear();
605
+ void this.scanNow({});
606
+ }
607
+ this.lastSuppressionList = newSuppressionList;
557
608
  await this.updateRecentErrors(this._recentErrors);
558
- }
609
+ });
559
610
 
560
611
  private scannedHashes = new Set<string>();
561
612
  private scanNow = runInSerial(async (config: {
562
613
  noLocalFiles?: boolean;
563
614
  }) => {
615
+ // If we're scanning everything, we should update the suppression list, because it might have been changed remotely, and we might be scanning everything because the user clicked refresh.
616
+ if (!this.lastSuppressionList || !config.noLocalFiles) {
617
+ this.lastSuppressionList = new Map((await suppressionList.getSuppressionList()).map(x => [x.key, x]));
618
+ }
564
619
  for (let appendable of getAppendables()) {
565
620
  let startTime = Date.now() - VIEW_WINDOW;
566
621
  let endTime = Date.now() + timeInHour * 2;
@@ -597,6 +652,7 @@ class RecentErrors {
597
652
  path = await urlCache.getURLLocalPath(file.url, hash);
598
653
  if (!path) continue;
599
654
  let scanner = await suppressionList.scanForRecentErrors({
655
+ debugName: file.url,
600
656
  startTime: file.startTime,
601
657
  endTime: file.endTime,
602
658
  });
@@ -609,43 +665,11 @@ class RecentErrors {
609
665
  await fs.promises.unlink(path);
610
666
  continue;
611
667
  }
612
- let sizeT = size;
613
- let fd = await fs.promises.open(path, "r");
614
- try {
615
- await new Promise<void>(async (resolve, reject) => {
616
- const gunzip = zlib.createGunzip();
617
-
618
- gunzip.on("data", (chunk: Buffer) => {
619
- void scanner.onData(chunk);
620
- });
621
-
622
- gunzip.on("end", async () => {
623
- try {
624
- resolve();
625
- } catch (error) {
626
- reject(error);
627
- }
628
- });
629
-
630
- gunzip.on("error", reject);
631
-
632
- try {
633
- for (let i = 0; i < sizeT; i += READ_CHUNK_SIZE) {
634
- let chunkSize = Math.min(READ_CHUNK_SIZE, sizeT - i);
635
- let buffer = Buffer.alloc(chunkSize);
636
- await fd.read(buffer, 0, chunkSize, i);
637
- let result = gunzip.write(buffer);
638
- if (!result) {
639
- await new Promise(resolve => gunzip.once("drain", resolve));
640
- }
641
- }
642
- gunzip.end();
643
- } catch (error) {
644
- reject(error);
645
- }
646
- });
647
- } finally {
648
- await fd.close();
668
+ const fileStream = fs.createReadStream(path);
669
+ const gunzip = zlib.createGunzip();
670
+ const decompressedStream = fileStream.pipe(gunzip);
671
+ for await (const chunk of decompressedStream) {
672
+ scanner.onData(chunk);
649
673
  }
650
674
  let newErrors = await scanner.finish();
651
675
  await this.addErrors(newErrors);
@@ -668,6 +692,10 @@ class RecentErrors {
668
692
  await this.scanNow({});
669
693
  return this._recentErrors;
670
694
  }
695
+
696
+ public async raiseTestError(...params: unknown[]) {
697
+ console.error(...params);
698
+ }
671
699
  }
672
700
  const recentErrors = new RecentErrors();
673
701
  export const RecentErrorsController = getSyncedController(SocketFunction.register(
@@ -676,6 +704,7 @@ export const RecentErrorsController = getSyncedController(SocketFunction.registe
676
704
  () => ({
677
705
  getRecentErrors: {},
678
706
  rescanAllErrorsNow: {},
707
+ raiseTestError: {},
679
708
  }),
680
709
  () => ({
681
710
  hooks: [assertIsManagementUser],
@@ -712,4 +741,4 @@ export const notifyWatchersOfError = batchFunction({
712
741
  }
713
742
  );
714
743
 
715
- const errorWatcherBase = new SocketChannel<LogDatum[]>("error-watcher-38de08cd-3247-4f75-9ac0-7919b240607d");
744
+ export const errorWatcherBase = new SocketChannel<LogDatum[]>("error-watcher-38de08cd-3247-4f75-9ac0-7919b240607d");
@@ -10,6 +10,8 @@ import { nextId, sort, timeInDay } from "socket-function/src/misc";
10
10
  import { formatNumber, formatVeryNiceDateTime } from "socket-function/src/formatting/format";
11
11
  import { formatDateJSX } from "../../../misc/formatJSX";
12
12
  import { LogDatum } from "../diskLogger";
13
+ import { measureFnc } from "socket-function/src/profiling/measure";
14
+ import { throttleRender } from "../../../functional/throttleRender";
13
15
 
14
16
  export class ErrorSuppressionUI extends qreact.Component<{
15
17
  dataSeqNum: number;
@@ -20,8 +22,10 @@ export class ErrorSuppressionUI extends qreact.Component<{
20
22
  }> {
21
23
  state = t.state({
22
24
  matchedInput: t.string(""),
25
+ renderLimit: t.number(10)
23
26
  });
24
27
 
28
+ @measureFnc
25
29
  private calculatePreviewMatchCount(pattern: string): number {
26
30
  if (!pattern.trim()) return 0;
27
31
 
@@ -50,6 +54,8 @@ export class ErrorSuppressionUI extends qreact.Component<{
50
54
  }
51
55
 
52
56
  public render() {
57
+ if (throttleRender({ key: "ErrorSuppressionUI", frameDelay: 30 })) return undefined;
58
+
53
59
  this.props.dataSeqNum;
54
60
  const controller = SuppressionListController(SocketFunction.browserNodeId());
55
61
  const entries = (controller.getSuppressionList() || []);
@@ -117,6 +123,27 @@ export class ErrorSuppressionUI extends qreact.Component<{
117
123
  >
118
124
  Fixed
119
125
  </Button>
126
+ <Button
127
+ onClick={() => {
128
+ let value = this.state.matchedInput;
129
+ this.state.matchedInput = "";
130
+ void Querysub.onCommitFinished(async () => {
131
+ await controller.setSuppressionEntry.promise({
132
+ key: nextId(),
133
+ match: value,
134
+ comment: "",
135
+ lastUpdateTime: Date.now(),
136
+ expiresAt: Date.now(),
137
+ });
138
+ Querysub.commit(() => {
139
+ this.props.rerunFilters();
140
+ });
141
+ });
142
+ }}
143
+ title="Fixed immediately, any future errors even that happen right now will trigger again. "
144
+ >
145
+ Fixed Now
146
+ </Button>
120
147
  <Button onClick={() => {
121
148
  let value = this.state.matchedInput;
122
149
  this.state.matchedInput = "";
@@ -137,8 +164,12 @@ export class ErrorSuppressionUI extends qreact.Component<{
137
164
  </Button>
138
165
  </div>
139
166
 
140
- <div className={css.vbox(8).fillWidth.overflowAuto.maxHeight("30vh")}>
141
- {entries.map((entry) => {
167
+ <div className={css.pad2(12).bord2(200, 40, 85).hsl(200, 40, 95).fillWidth}>
168
+ <strong>Note:</strong> Suppression time updates don't automatically rerun the search. Click Run to rerun the search.
169
+ </div>
170
+
171
+ <div className={css.vbox(8).fillWidth.overflowAuto.maxHeight("20vh")}>
172
+ {entries.slice(0, this.state.renderLimit).map((entry) => {
142
173
  const updateEntry = (changes: Partial<SuppressionEntry>) => {
143
174
  let newEntry = { ...entry, ...changes };
144
175
  void Querysub.onCommitFinished(async () => {
@@ -152,7 +183,7 @@ export class ErrorSuppressionUI extends qreact.Component<{
152
183
  className={
153
184
  css.hbox(8).pad2(12).bord2(0, 0, 10).fillWidth
154
185
  //+ (entry.expiresAt < Date.now() && expiredCount > 0 && css.opacity(0.5))
155
- + ((count === 0 && expiredCount === 0) && css.opacity(0.6))
186
+ + ((expiredCount === 0) && css.opacity(0.6))
156
187
  + (
157
188
  count > 0 && entry.expiresAt !== NOT_AN_ERROR_EXPIRE_TIME && css.hsla(0, 50, 50, 0.5)
158
189
  || css.hsla(0, 0, 0, 0.1)
@@ -226,6 +257,9 @@ export class ErrorSuppressionUI extends qreact.Component<{
226
257
  </Button>
227
258
  </div>;
228
259
  })}
260
+ {entries.length > this.state.renderLimit && <Button onClick={() => this.state.renderLimit *= 2}>
261
+ Load More
262
+ </Button>}
229
263
  </div>
230
264
  </div>;
231
265
  }
@@ -1,21 +1,35 @@
1
1
  import { SocketFunction } from "socket-function/SocketFunction";
2
2
  import { qreact } from "../../../4-dom/qreact";
3
3
  import { css } from "../../../4-dom/css";
4
- import { isCurrentUserSuperUser } from "../../../user-implementation/userData";
4
+ import { isCurrentUserSuperUser, user_data } from "../../../user-implementation/userData";
5
5
  import { RecentErrorsController, SuppressionListController, watchRecentErrors, MAX_RECENT_ERRORS, NOT_AN_ERROR_EXPIRE_TIME, SuppressionEntry } from "./ErrorNotificationController";
6
6
  import { t } from "../../../2-proxy/schema2";
7
7
  import { InputLabel } from "../../../library-components/InputLabel";
8
8
  import { Button } from "../../../library-components/Button";
9
- import { ATag } from "../../../library-components/ATag";
9
+ import { ATag, Anchor, URLOverride } from "../../../library-components/ATag";
10
10
  import { managementPageURL, showingManagementURL } from "../../managementPages";
11
11
  import { errorNotifyToggleURL } from "../LogViewer2";
12
12
  import { Querysub } from "../../../4-querysub/QuerysubController";
13
- import { nextId, timeInDay, timeInHour } from "socket-function/src/misc";
13
+ import { deepCloneJSON, nextId, timeInDay, timeInHour } from "socket-function/src/misc";
14
14
  import { formatNumber } from "socket-function/src/formatting/format";
15
15
  import { Icon } from "../../../library-components/icons";
16
16
  import { filterParam } from "../FastArchiveViewer";
17
17
  import { endTimeParam, startTimeParam } from "../TimeRangeSelector";
18
18
  import { formatDateJSX } from "../../../misc/formatJSX";
19
+ import { atomic } from "../../../2-proxy/PathValueProxyWatcher";
20
+
21
+ export function getLogsLinkParts(): URLOverride[] {
22
+ return [
23
+ showingManagementURL.getOverride(true),
24
+ managementPageURL.getOverride("LogViewer2"),
25
+ errorNotifyToggleURL.getOverride(true),
26
+ filterParam.getOverride(""),
27
+
28
+ // NOTE: While loading a weeks worth of logs clientside is a bit slow. Scanning serverside is not nearly as bad, as it can be done over hours, but... we want the page to be snappy, loading in seconds, so... just use a day, and we might reduce it even further if needed...
29
+ startTimeParam.getOverride(Date.now() - timeInDay * 1),
30
+ endTimeParam.getOverride(Date.now() + timeInHour * 2),
31
+ ];
32
+ }
19
33
 
20
34
  export class ErrorWarning extends qreact.Component {
21
35
  state = t.state({
@@ -67,22 +81,36 @@ export class ErrorWarning extends qreact.Component {
67
81
  </style>
68
82
  </Button>;
69
83
 
70
- const logLink = [
71
- showingManagementURL.getOverride(true),
72
- managementPageURL.getOverride("LogViewer2"),
73
- errorNotifyToggleURL.getOverride(true),
74
- filterParam.getOverride(""),
75
- startTimeParam.getOverride(Date.now() - timeInDay * 7),
76
- endTimeParam.getOverride(Date.now() + timeInHour * 2),
77
- ];
84
+ let discordURLWarning: qreact.ComponentChildren = undefined;
85
+ if (!atomic(user_data().secure.notifyDiscordWebhookURL)) {
86
+ discordURLWarning = (
87
+ <Anchor
88
+ target="_blank"
89
+ title="Can't send application notifications to developers due to missing Discord hook URL. Click here and set it."
90
+ values={[
91
+ showingManagementURL.getOverride(true),
92
+ managementPageURL.getOverride("SecurityPage"),
93
+ ]}
94
+ >
95
+ <Button hue={0}>
96
+ ⚠️ Missing Discord Hook URL <span className={css.filter("invert(1)")}>📞</span>
97
+ </Button>
98
+ </Anchor>
99
+ );
100
+ }
101
+
102
+ const logLink = getLogsLinkParts();
78
103
 
79
104
  if (!recentErrors || recentErrors.length === 0) {
80
- return <span className={css.hbox(8)}>
81
- <ATag target="_blank" values={logLink}>
82
- No Errors
83
- </ATag>
84
- {refreshButton}
85
- </span>;
105
+ return (
106
+ <span className={css.hbox(8)}>
107
+ <ATag target="_blank" values={logLink}>
108
+ No Errors
109
+ </ATag>
110
+ {refreshButton}
111
+ {discordURLWarning}
112
+ </span>
113
+ );
86
114
  }
87
115
 
88
116
  // Count unique files
@@ -128,19 +156,21 @@ export class ErrorWarning extends qreact.Component {
128
156
  View Logs
129
157
  </ATag>
130
158
  {refreshButton}
159
+ {discordURLWarning}
131
160
  </div>
132
161
 
133
162
  {topExpired &&
134
163
  <div className={css.hbox(8)}>
135
164
  <Button onClick={() => {
165
+ let newObj = deepCloneJSON({
166
+ ...topExpired!,
167
+ expiresAt: Date.now() + timeInDay,
168
+ });
136
169
  void Querysub.onCommitFinished(async () => {
137
- await suppressionController.setSuppressionEntry.promise({
138
- ...topExpired!,
139
- expiresAt: Date.now() + timeInDay,
140
- });
170
+ await suppressionController.setSuppressionEntry.promise(newObj);
141
171
  });
142
172
  }}>
143
- Ignore Again
173
+ Ignore Again ({formatDateJSX(topExpired.expiresAt)})
144
174
  </Button>
145
175
  <div>
146
176
  Match Pattern =
@@ -0,0 +1,8 @@
1
+ type DigestInfo = {
2
+ histogram: {
3
+ [timeGroup: number]: number;
4
+ };
5
+ topByFile: {
6
+ [file: string]: number;
7
+ };
8
+ };