querysub 0.326.0 → 0.328.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. package/package.json +3 -4
  2. package/src/-a-archives/archivesBackBlaze.ts +20 -0
  3. package/src/-a-archives/archivesDisk.ts +5 -5
  4. package/src/-a-archives/archivesLimitedCache.ts +118 -7
  5. package/src/-a-archives/archivesPrivateFileSystem.ts +3 -0
  6. package/src/-g-core-values/NodeCapabilities.ts +26 -11
  7. package/src/0-path-value-core/auditLogs.ts +4 -2
  8. package/src/2-proxy/PathValueProxyWatcher.ts +3 -0
  9. package/src/3-path-functions/PathFunctionRunner.ts +2 -2
  10. package/src/4-querysub/Querysub.ts +1 -1
  11. package/src/5-diagnostics/GenericFormat.tsx +2 -2
  12. package/src/deployManager/machineApplyMainCode.ts +10 -8
  13. package/src/deployManager/machineSchema.ts +4 -3
  14. package/src/deployManager/setupMachineMain.ts +3 -2
  15. package/src/diagnostics/logs/FastArchiveAppendable.ts +85 -59
  16. package/src/diagnostics/logs/FastArchiveController.ts +5 -2
  17. package/src/diagnostics/logs/FastArchiveViewer.tsx +222 -51
  18. package/src/diagnostics/logs/LogViewer2.tsx +83 -35
  19. package/src/diagnostics/logs/TimeRangeSelector.tsx +8 -0
  20. package/src/diagnostics/logs/diskLogGlobalContext.ts +3 -3
  21. package/src/diagnostics/logs/diskLogger.ts +70 -23
  22. package/src/diagnostics/logs/errorNotifications/ErrorNotificationController.ts +111 -82
  23. package/src/diagnostics/logs/errorNotifications/ErrorSuppressionUI.tsx +37 -3
  24. package/src/diagnostics/logs/errorNotifications/ErrorWarning.tsx +52 -22
  25. package/src/diagnostics/logs/errorNotifications/errorDigests.tsx +8 -0
  26. package/src/diagnostics/logs/errorNotifications/errorWatchEntry.tsx +198 -52
  27. package/src/diagnostics/logs/lifeCycleAnalysis/spec.md +3 -2
  28. package/src/diagnostics/managementPages.tsx +5 -0
  29. package/src/email_ims_notifications/discord.tsx +203 -0
  30. package/src/fs.ts +9 -0
  31. package/src/functional/SocketChannel.ts +9 -0
  32. package/src/functional/throttleRender.ts +134 -0
  33. package/src/library-components/ATag.tsx +2 -2
  34. package/src/library-components/SyncedController.ts +5 -3
  35. package/src/misc.ts +13 -0
  36. package/src/misc2.ts +54 -0
  37. package/src/user-implementation/SecurityPage.tsx +11 -5
  38. package/src/user-implementation/userData.ts +31 -16
  39. package/testEntry2.ts +14 -5
  40. package/src/user-implementation/setEmailKey.ts +0 -25
  41. /package/src/{email → email_ims_notifications}/postmark.tsx +0 -0
  42. /package/src/{email → email_ims_notifications}/sendgrid.tsx +0 -0
@@ -21,7 +21,7 @@ import { errorToUndefined, ignoreErrors, timeoutToUndefinedSilent } from "../../
21
21
  import { getCallObj } from "socket-function/src/nodeProxy";
22
22
  import { getSyncedController } from "../../library-components/SyncedController";
23
23
  import { getBrowserUrlNode, getOwnNodeId } from "../../-f-node-discovery/NodeDiscovery";
24
- import { secureRandom } from "../../misc/random";
24
+ import { secureRandom, shuffle } from "../../misc/random";
25
25
  import { getPathIndex, getPathStr2 } from "../../path";
26
26
  import { onNextPaint } from "../../functional/onNextPaint";
27
27
  import { getArchivesBackblazePrivateImmutable, getArchivesBackblazePublicImmutable } from "../../-a-archives/archivesBackBlaze";
@@ -34,6 +34,7 @@ import { sha256 } from "js-sha256";
34
34
  import { assertIsNetworkTrusted } from "../../-d-trust/NetworkTrust2";
35
35
  import { blue, magenta } from "socket-function/src/formatting/logColors";
36
36
  import { FileMetadata, FastArchiveAppendableControllerBase, FastArchiveAppendableController, getFileMetadataHash } from "./FastArchiveController";
37
+ import { fsExistsAsync } from "../../fs";
37
38
 
38
39
  // NOTE: In a single command line micro-test it looks like we can write about 40K writes of 500 per once, when using 10X parallel, on a fairly potato server. We should probably batch though, and only do 1X parallel.
39
40
  /*
@@ -140,7 +141,7 @@ export class FastArchiveAppendable<Datum> {
140
141
  await this.flushNow();
141
142
  });
142
143
  // Random, to try to prevent the dead file detection code from getting in sync. It's fine if it gets in sync, it's just inefficient as we'll have multiple services uploading the same file to the same location at the same time.
143
- void runInfinitePollCallAtStart(timeInMinute * 20 + Math.random() * timeInMinute * 5, async () => {
144
+ void runInfinitePoll(timeInMinute * 20 + Math.random() * timeInMinute * 5, async () => {
144
145
  await this.moveLogsToBackblaze();
145
146
  });
146
147
  }
@@ -151,13 +152,16 @@ export class FastArchiveAppendable<Datum> {
151
152
  return nestArchives("fast-logs/" + this.rootPath, archives);
152
153
  });
153
154
 
154
- public getLocalPathRoot = lazy(() => {
155
- let path = (
155
+ public baseGetLocalPathRoot = () => {
156
+ return (
156
157
  os.homedir()
157
158
  + "/fast-log-cache/"
158
159
  + getDomain() + "/"
159
160
  + this.rootPath
160
161
  );
162
+ };
163
+ public getLocalPathRoot = lazy(() => {
164
+ let path = this.baseGetLocalPathRoot();
161
165
  if (!fs.existsSync(path)) {
162
166
  fs.mkdirSync(path, { recursive: true });
163
167
  }
@@ -199,7 +203,10 @@ export class FastArchiveAppendable<Datum> {
199
203
  // NOTE: This is disk writing, which should be fast, but if it's slow we might be able to remove the measureWrap (as technically spending 50% of our time writing to the disk is fine, and won't lag anything).
200
204
  @measureFnc
201
205
  public async flushNow(now = Date.now()) {
206
+
202
207
  await appendableSerialLock(async () => {
208
+ if (this.pendingWriteQueue.length === 0) return;
209
+
203
210
  // 2025-09-06T07
204
211
  let hourFile = new Date(now).toISOString().slice(0, 13) + ".log";
205
212
  let localCacheFolder = this.getLocalPathRoot() + getOwnThreadId() + "/";
@@ -208,8 +215,6 @@ export class FastArchiveAppendable<Datum> {
208
215
  // Always heartbeat
209
216
  await fs.promises.writeFile(localCacheFolder + "heartbeat", Buffer.from(now + ""));
210
217
 
211
- if (this.pendingWriteQueue.length === 0) return;
212
-
213
218
  try {
214
219
  let beforeSize = await fs.promises.stat(localCachePath);
215
220
  if (beforeSize.size > UNCOMPRESSED_LOG_FILE_STOP_THRESHOLD) {
@@ -277,18 +282,22 @@ export class FastArchiveAppendable<Datum> {
277
282
 
278
283
  public async moveLogsToBackblaze() {
279
284
  await appendableSerialLock(async () => {
280
- let rootCacheFolder = this.getLocalPathRoot();
285
+ let rootCacheFolder = this.baseGetLocalPathRoot();
286
+ if (!await fsExistsAsync(rootCacheFolder)) return;
281
287
  console.log(magenta(`Moving old logs to Backblaze from ${rootCacheFolder}`));
282
288
 
283
289
  let archives = this.getArchives();
284
290
  async function moveLogsForFolder(threadId: string) {
285
291
  let threadDir = rootCacheFolder + threadId + "/";
286
- if (!fs.existsSync(threadDir)) return;
292
+ if (!await fsExistsAsync(threadDir)) return;
287
293
  let files = await fs.promises.readdir(threadDir);
294
+ // Shuffle, so if we do run multiple at the same time, we are less likely to use the same files at the same time.
295
+ files = shuffle(files, Math.random());
288
296
  for (let file of files) {
289
297
  if (file === "heartbeat") continue;
290
298
  let fullPath = threadDir + file;
291
299
  try {
300
+ if (!await fsExistsAsync(fullPath)) continue;
292
301
  // We could use modified time here? Although, this is nice if we move files around, and then manually have them moved, although even then... this could cause problem be tripping while we are copying the file, so... maybe this is just wrong?
293
302
  let timeStamp = getFileTimeStamp(fullPath);
294
303
  if (timeStamp.endTime > Date.now() - UPLOAD_THRESHOLD) continue;
@@ -419,6 +428,7 @@ export class FastArchiveAppendable<Datum> {
419
428
  });
420
429
 
421
430
  let createProgress = async (section: string, max: number) => {
431
+ section = `${this.rootPath}|${section}`;
422
432
  let cancelled: Error | undefined;
423
433
  let lastValue = 0;
424
434
  let baseBatch = batchFunction({ delay: 150, },
@@ -499,7 +509,7 @@ export class FastArchiveAppendable<Datum> {
499
509
  synchronizeObj.parametersResult.resolve(syncResult);
500
510
 
501
511
 
502
- let downloadProgress = await createProgress("Downloading (bytes)", 0);
512
+ let downloadProgress = await createProgress(`Downloading (bytes)`, 0);
503
513
  let decompressProgress = await createProgress("Decompressing (bytes)", 0);
504
514
  let scanProgress = await createProgress("Scanning (datums)", 0);
505
515
  let decodeProgress = await createProgress("Decoding (datums)", 0);
@@ -509,13 +519,14 @@ export class FastArchiveAppendable<Datum> {
509
519
  const self = this;
510
520
 
511
521
 
512
- async function downloadAndParseFile(file: FileMetadata) {
522
+ async function downloadAndParseFile(file: FileMetadata, runInner: (code: () => Promise<void>) => Promise<void>) {
513
523
  const onFetchedData = runInSerial(async (data: Buffer) => {
514
524
  await downloadProgress(data.length, data.length, true);
515
525
  await decompressWriter.write(data);
516
526
  });
517
527
 
518
528
  const onDecompressedData = createLogScanner({
529
+ debugName: file.path,
519
530
  onParsedData,
520
531
  });
521
532
  let batchedData: Buffer[] = [];
@@ -610,65 +621,79 @@ export class FastArchiveAppendable<Datum> {
610
621
 
611
622
  // TODO: Stream from the local cache instead? It should be possible, we can get the total size, and read chunks.
612
623
  let hash = getFileMetadataHash(file) + ".file";
613
- let info = await localCache.getInfo(hash);
614
- if (info?.size) {
615
- const CHUNK_SIZE = 1000 * 1000 * 10;
616
- for (let i = 0; i < info.size; i += CHUNK_SIZE) {
617
- let data = await localCache.get(hash, { range: { start: i, end: i + CHUNK_SIZE } });
618
- if (!data) {
619
- throw new Error(`Cached data disappeared (the info was there?) for ${hash} at ${i}`);
624
+ let contents = await localCache.get(hash);
625
+ if (stoppedPromise.resolveCalled) return;
626
+ await runInner(async () => {
627
+ if (contents?.length) {
628
+ const CHUNK_SIZE = 1000 * 1000 * 10;
629
+ for (let i = 0; i < contents.length; i += CHUNK_SIZE) {
630
+ let data = contents.slice(i, i + CHUNK_SIZE);
631
+ await onFetchedData(data);
632
+ }
633
+ } else {
634
+ const response = await fetch(url);
635
+ if (!response.ok) {
636
+ throw new Error(`Failed to fetch ${url}: ${response.status} ${response.statusText}`);
620
637
  }
621
- await onFetchedData(data);
622
- }
623
- } else {
624
- const response = await fetch(url);
625
- if (!response.ok) {
626
- throw new Error(`Failed to fetch ${url}: ${response.status} ${response.statusText}`);
627
- }
628
638
 
629
- if (!response.body) {
630
- throw new Error(`Response body is undefined for ${url}`);
631
- }
632
- let values: Buffer[] = [];
633
- const reader = response.body.getReader();
634
- void stoppedPromise.promise.finally(() => {
635
- void response.body?.cancel();
636
- });
637
- try {
638
- for await (let value of streamToIteratable(reader)) {
639
- // Cancel entirely
640
- if (stoppedPromise.resolveCalled) return;
641
- if (!value) continue;
642
- let buffer = Buffer.from(value);
643
- values.push(buffer);
644
- await onFetchedData(buffer);
639
+ if (!response.body) {
640
+ throw new Error(`Response body is undefined for ${url}`);
641
+ }
642
+ let values: Buffer[] = [];
643
+ const reader = response.body.getReader();
644
+ void stoppedPromise.promise.finally(() => {
645
+ void response.body?.cancel();
646
+ });
647
+ try {
648
+ for await (let value of streamToIteratable(reader)) {
649
+ // Cancel entirely
650
+ if (stoppedPromise.resolveCalled) return;
651
+ if (!value) continue;
652
+ let buffer = Buffer.from(value);
653
+ values.push(buffer);
654
+ await onFetchedData(buffer);
655
+ }
656
+ } finally {
657
+ reader.releaseLock();
645
658
  }
646
- } finally {
647
- reader.releaseLock();
648
- }
649
659
 
650
- await localCache.set(hash, Buffer.concat(values));
651
- }
652
- await decompressWriter.close();
653
- await decompressPromise;
660
+ await localCache.set(hash, Buffer.concat(values));
661
+ }
662
+ await decompressWriter.close();
663
+ await decompressPromise;
664
+ });
654
665
  }
655
666
 
656
667
  // Fork off the processing
657
668
  void (async () => {
658
669
  try {
659
670
  // Iterate over all files and process them
660
- let fileProgress = await createProgress("Files", 0);
671
+ let fileProgress = await createProgress("Files", syncResult.files.length);
672
+ let fileInnerProgress = await createProgress("Files Inner", syncResult.files.length);
661
673
  let failedFiles = await createProgress("Failed Files", 0);
662
- for (let file of syncResult.files) {
674
+ let runSerial = runInSerial(async (fnc: () => Promise<void>) => {
675
+ try {
676
+ await fnc();
677
+ } finally {
678
+ await fileInnerProgress(1, 0, true);
679
+ }
680
+ });
681
+ async function downloadFileWrapper(file: FileMetadata) {
663
682
  if (stoppedPromise.resolveCalled) return;
664
683
  try {
665
- await downloadAndParseFile(file);
684
+ await downloadAndParseFile(file, runSerial);
666
685
  } catch (e: any) {
667
686
  console.warn(`Failed to download and parse file ${file.path}:\n${e.stack}`);
668
687
  await failedFiles(1, 1, true);
669
688
  }
670
- await fileProgress(1, 1, true);
689
+ if (stoppedPromise.resolveCalled) return;
690
+ await fileProgress(1, 0, true);
691
+ }
692
+ for (let file of syncResult.files) {
693
+ await downloadFileWrapper(file);
671
694
  }
695
+ await runInSerial(async () => {
696
+ });
672
697
 
673
698
  await (await createProgress("Done", 0))(1, 1, true);
674
699
  } catch (e: any) {
@@ -697,14 +722,18 @@ export class FastArchiveAppendable<Datum> {
697
722
 
698
723
 
699
724
  export function createLogScanner(config: {
725
+ debugName: string;
700
726
  onParsedData: (posStart: number, posEnd: number, buffer: Buffer | "done") => MaybePromise<void>;
701
727
  }): (data: Buffer | "done") => Promise<void> {
702
728
  const { onParsedData } = config;
703
729
  let pendingData: Buffer[] = [];
704
730
 
731
+ let finished = false;
732
+
705
733
  let delimitterMatchIndex = 0;
706
- return runInSerial(async (data: Buffer | "done") => {
734
+ return (async (data: Buffer | "done") => {
707
735
  if (data === "done") {
736
+ finished = true;
708
737
  // Flush any pending data, even though we have no delimitter. It will probably fail to parse, but... maybe it will work?
709
738
  if (pendingData.length > 0) {
710
739
  let combinedBuffer = Buffer.concat(pendingData);
@@ -714,6 +743,9 @@ export function createLogScanner(config: {
714
743
  await onParsedData(0, 0, "done");
715
744
  return;
716
745
  }
746
+ if (finished) {
747
+ throw new Error(`Finished scan, but we received more data: ${data.length}, sample is: ${data.slice(0, 100).toString("hex")}, ${config.debugName}`);
748
+ }
717
749
 
718
750
  let lastStart = 0;
719
751
  await measureBlock(async () => {
@@ -734,6 +766,7 @@ export function createLogScanner(config: {
734
766
  ...pendingData,
735
767
  data.slice(lastStart, i + 1),
736
768
  ]).slice(0, -objectDelimitterBuffer.length);
769
+ pendingData = [];
737
770
  posStart = 0;
738
771
  posEnd = buffer.length;
739
772
  } else {
@@ -741,19 +774,12 @@ export function createLogScanner(config: {
741
774
  posStart = lastStart;
742
775
  posEnd = i + 1 - objectDelimitterBuffer.length;
743
776
  }
744
- // Delimitter was the start of the chunk, and it's the first chunk. Just skip it.
745
- if (posStart === posEnd && i === 0) {
746
- lastStart = i + 1;
747
- continue;
748
- }
749
-
750
777
  // Only sometimes awaiting here makes scanning almost 2X faster, in the normal case, somehow?
751
778
  let maybePromise = onParsedData(posStart, posEnd, buffer);
752
779
  if (maybePromise) {
753
780
  await maybePromise;
754
781
  }
755
782
 
756
- pendingData = [];
757
783
  lastStart = i + 1;
758
784
  }
759
785
  }
@@ -35,6 +35,7 @@ import { assertIsNetworkTrusted } from "../../-d-trust/NetworkTrust2";
35
35
  import { blue, magenta } from "socket-function/src/formatting/logColors";
36
36
  import { FastArchiveAppendable, getFileTimeStamp } from "./FastArchiveAppendable";
37
37
  import { IdentityController_getMachineId, IdentityController_getReconnectNodeId } from "../../-c-identity/IdentityController";
38
+ import { fsExistsAsync } from "../../fs";
38
39
 
39
40
  export type FileMetadata = {
40
41
  nodeId?: string;
@@ -84,7 +85,8 @@ export class FastArchiveAppendableControllerBase {
84
85
  /** Get all pending local files for this rootPath that haven't been uploaded to Backblaze yet */
85
86
  public async getPendingFiles(rootPath: string, timeRange: { startTime: number; endTime: number; }): Promise<InternalFileMetadata[]> {
86
87
 
87
- let rootCacheFolder = new FastArchiveAppendable(rootPath).getLocalPathRoot();
88
+ let rootCacheFolder = new FastArchiveAppendable(rootPath).baseGetLocalPathRoot();
89
+ if (!await fsExistsAsync(rootCacheFolder)) return [];
88
90
 
89
91
  //console.log(`Searching for pending files in ${rootCacheFolder} for time range ${new Date(timeRange.startTime).toISOString()} to ${new Date(timeRange.endTime).toISOString()}`);
90
92
 
@@ -377,6 +379,7 @@ export class FastArchiveAppendableControllerBase {
377
379
  let remoteProgress = this.updateProgress(syncId, "Discovering remote files", byMachineId.size);
378
380
  let remoteValue = 0;
379
381
 
382
+
380
383
  await Promise.all(Array.from(byMachineId).map(async ([machineId, nodeObjs]) => {
381
384
  let firstAliveNode = new PromiseObj<string>();
382
385
  let allFinished = Promise.all(nodeObjs.map(async ({ nodeId, entryPoint }) => {
@@ -396,7 +399,7 @@ export class FastArchiveAppendableControllerBase {
396
399
 
397
400
  let pendingFiles = await errorToUndefined(
398
401
  controller.getPendingFiles(config.rootPath, config.range));
399
- console.log(blue(`Found ${pendingFiles?.length} pending files on node ${aliveNodeId}`), nodeObjs);
402
+ console.log(blue(`Found ${pendingFiles?.length} pending files on node ${aliveNodeId}`));
400
403
 
401
404
  remoteValue++;
402
405
  remoteProgress(remoteValue);