querysub 0.326.0 → 0.328.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. package/package.json +3 -4
  2. package/src/-a-archives/archivesBackBlaze.ts +20 -0
  3. package/src/-a-archives/archivesDisk.ts +5 -5
  4. package/src/-a-archives/archivesLimitedCache.ts +118 -7
  5. package/src/-a-archives/archivesPrivateFileSystem.ts +3 -0
  6. package/src/-g-core-values/NodeCapabilities.ts +26 -11
  7. package/src/0-path-value-core/auditLogs.ts +4 -2
  8. package/src/2-proxy/PathValueProxyWatcher.ts +3 -0
  9. package/src/3-path-functions/PathFunctionRunner.ts +2 -2
  10. package/src/4-querysub/Querysub.ts +1 -1
  11. package/src/5-diagnostics/GenericFormat.tsx +2 -2
  12. package/src/deployManager/machineApplyMainCode.ts +10 -8
  13. package/src/deployManager/machineSchema.ts +4 -3
  14. package/src/deployManager/setupMachineMain.ts +3 -2
  15. package/src/diagnostics/logs/FastArchiveAppendable.ts +85 -59
  16. package/src/diagnostics/logs/FastArchiveController.ts +5 -2
  17. package/src/diagnostics/logs/FastArchiveViewer.tsx +222 -51
  18. package/src/diagnostics/logs/LogViewer2.tsx +83 -35
  19. package/src/diagnostics/logs/TimeRangeSelector.tsx +8 -0
  20. package/src/diagnostics/logs/diskLogGlobalContext.ts +3 -3
  21. package/src/diagnostics/logs/diskLogger.ts +70 -23
  22. package/src/diagnostics/logs/errorNotifications/ErrorNotificationController.ts +111 -82
  23. package/src/diagnostics/logs/errorNotifications/ErrorSuppressionUI.tsx +37 -3
  24. package/src/diagnostics/logs/errorNotifications/ErrorWarning.tsx +52 -22
  25. package/src/diagnostics/logs/errorNotifications/errorDigests.tsx +8 -0
  26. package/src/diagnostics/logs/errorNotifications/errorWatchEntry.tsx +198 -52
  27. package/src/diagnostics/logs/lifeCycleAnalysis/spec.md +3 -2
  28. package/src/diagnostics/managementPages.tsx +5 -0
  29. package/src/email_ims_notifications/discord.tsx +203 -0
  30. package/src/fs.ts +9 -0
  31. package/src/functional/SocketChannel.ts +9 -0
  32. package/src/functional/throttleRender.ts +134 -0
  33. package/src/library-components/ATag.tsx +2 -2
  34. package/src/library-components/SyncedController.ts +5 -3
  35. package/src/misc.ts +13 -0
  36. package/src/misc2.ts +54 -0
  37. package/src/user-implementation/SecurityPage.tsx +11 -5
  38. package/src/user-implementation/userData.ts +31 -16
  39. package/testEntry2.ts +14 -5
  40. package/src/user-implementation/setEmailKey.ts +0 -25
  41. /package/src/{email → email_ims_notifications}/postmark.tsx +0 -0
  42. /package/src/{email → email_ims_notifications}/sendgrid.tsx +0 -0
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "querysub",
3
- "version": "0.326.0",
3
+ "version": "0.328.0",
4
4
  "main": "index.js",
5
5
  "license": "MIT",
6
6
  "note1": "note on node-forge fork, see https://github.com/digitalbazaar/forge/issues/744 for details",
@@ -22,7 +22,7 @@
22
22
  "js-sha512": "^0.9.0",
23
23
  "node-forge": "https://github.com/sliftist/forge#e618181b469b07bdc70b968b0391beb8ef5fecd6",
24
24
  "pako": "^2.1.0",
25
- "socket-function": "^0.136.0",
25
+ "socket-function": "^0.138.0",
26
26
  "terser": "^5.31.0",
27
27
  "typesafecss": "^0.22.0",
28
28
  "yaml": "^2.5.0",
@@ -55,8 +55,7 @@
55
55
  "join": "./bin/join.js",
56
56
  "join-public": "./bin/join-public.js",
57
57
  "merge": "./bin/merge.js",
58
- "addsuperuser": "./bin/addsuperuser.js",
59
- "setemailkey": "./bin/setemailkey.js"
58
+ "addsuperuser": "./bin/addsuperuser.js"
60
59
  },
61
60
  "devDependencies": {
62
61
  "dependency-cruiser": "^12.11.0",
@@ -11,6 +11,7 @@ import { formatNumber, formatTime } from "socket-function/src/formatting/format"
11
11
  import { blue, green, magenta } from "socket-function/src/formatting/logColors";
12
12
  import debugbreak from "debugbreak";
13
13
  import { onTimeProfile } from "../-0-hooks/hooks";
14
+ import dns from "dns";
14
15
 
15
16
  export function hasBackblazePermissions() {
16
17
  return isNode() && fs.existsSync(getBackblazePath());
@@ -382,6 +383,7 @@ const getAPI = lazy(async () => {
382
383
  cancelLargeFile,
383
384
  getDownloadAuthorization,
384
385
  getDownloadURL,
386
+ apiUrl: auth.apiUrl,
385
387
  };
386
388
  });
387
389
 
@@ -520,12 +522,30 @@ export class ArchivesBackblaze {
520
522
  // Eh... this might be bad, but... I think we just get random 400 errors. If this spams errors,
521
523
  // we can remove this line.
522
524
  || err.stack.includes(`400 Bad Request`)
525
+ || err.stack.includes(`"no tomes available"`)
526
+ || err.stack.includes(`getaddrinfo ENOTFOUND`)
523
527
  ) {
524
528
  this.log(err.message + " retrying in 5s");
525
529
  await delay(5000);
526
530
  return this.apiRetryLogic(fnc, retries - 1);
527
531
  }
528
532
 
533
+ if (err.stack.includes(`getaddrinfo ENOTFOUND`)) {
534
+ let urlObj = new URL(api.apiUrl);
535
+ let hostname = urlObj.hostname;
536
+ let lookupAddresses = await new Promise(resolve => {
537
+ dns.lookup(hostname, (err, addresses) => {
538
+ resolve(addresses);
539
+ });
540
+ });
541
+ let resolveAddresses = await new Promise(resolve => {
542
+ dns.resolve4(hostname, (err, addresses) => {
543
+ resolve(addresses);
544
+ });
545
+ });
546
+ console.error(`getaddrinfo ENOTFOUND ${hostname}`, { lookupAddresses, resolveAddresses, apiUrl: api.apiUrl, fullError: err.stack });
547
+ }
548
+
529
549
  // If the error is that the authorization token is invalid, reset getBucketAPI and getAPI
530
550
  // If the error is that the bucket isn't found, reset getBucketAPI
531
551
  if (err.stack.includes(`"expired_auth_token"`)) {
@@ -1,5 +1,5 @@
1
1
  import { isNode } from "socket-function/src/misc";
2
- import { getSubFolder } from "../fs";
2
+ import { fsExistsAsync, getSubFolder } from "../fs";
3
3
  import fs from "fs";
4
4
  import { blue, red, yellow } from "socket-function/src/formatting/logColors";
5
5
  import { measureFnc } from "socket-function/src/profiling/measure";
@@ -65,8 +65,8 @@ class ArchivesDisk {
65
65
  // Don't create the drive (and also only add up to the last path, via slicing (0, i)
66
66
  for (let i = 1; i < fileNameParts.length; i++) {
67
67
  let dir = this.LOCAL_ARCHIVE_FOLDER + fileNameParts.slice(0, i).join("/");
68
- if (!fs.existsSync(dir)) {
69
- fs.mkdirSync(dir);
68
+ if (!await fsExistsAsync(dir)) {
69
+ await fs.promises.mkdir(dir);
70
70
  }
71
71
  }
72
72
  }
@@ -91,7 +91,7 @@ class ArchivesDisk {
91
91
  if (storageDisabled) return;
92
92
  fileName = escapeFileName(fileName);
93
93
  await this.simulateLag();
94
- if (!fs.existsSync(this.LOCAL_ARCHIVE_FOLDER + fileName)) return;
94
+ if (!await fsExistsAsync(this.LOCAL_ARCHIVE_FOLDER + fileName)) return;
95
95
  try {
96
96
  await fs.promises.unlink(this.LOCAL_ARCHIVE_FOLDER + fileName);
97
97
  } catch { }
@@ -222,7 +222,7 @@ class ArchivesDisk {
222
222
  let fileNames: string[] = [];
223
223
  let folderNames: string[] = [];
224
224
  async function readDir(dir: string) {
225
- if (!fs.existsSync(dir)) {
225
+ if (!await fsExistsAsync(dir)) {
226
226
  return;
227
227
  }
228
228
  try {
@@ -1,6 +1,9 @@
1
- import { formatNumber } from "socket-function/src/formatting/format";
1
+ import { formatNumber, formatTime } from "socket-function/src/formatting/format";
2
2
  import { Archives } from "./archives";
3
3
  import { cache } from "socket-function/src/caching";
4
+ import { measureFnc } from "socket-function/src/profiling/measure";
5
+ import { batchFunction, runInfinitePoll } from "socket-function/src/batching";
6
+ import { timeInHour } from "socket-function/src/misc";
4
7
 
5
8
  interface FileInfo {
6
9
  writeTime: number;
@@ -8,36 +11,68 @@ interface FileInfo {
8
11
  size: number;
9
12
  }
10
13
 
14
+ interface IndexData {
15
+ files: Record<string, FileInfo>;
16
+ }
17
+
11
18
  class ArchivesLimitedCache {
12
19
  private baseArchives: Archives;
13
20
  private maxFiles: number;
14
21
  private maxSize: number;
15
22
  private cache = new Map<string, FileInfo>();
16
23
  private initialized = false;
24
+ private readonly indexPath = ".cache-index.json";
17
25
 
18
26
  constructor(baseArchives: Archives, config: { maxFiles: number; maxSize: number }) {
19
27
  this.baseArchives = baseArchives;
20
28
  this.maxFiles = config.maxFiles;
21
29
  this.maxSize = config.maxSize;
22
30
  this.initOptionalMethods();
31
+ this.initPeriodicSync();
23
32
  }
24
33
 
25
34
  public getDebugName(): string {
26
35
  return `limitedCache(${this.maxFiles}files,${Math.round(this.maxSize / (1024 * 1024))}MB)/${this.baseArchives.getDebugName()}`;
27
36
  }
28
37
 
29
- private async ensureInitialized(): Promise<void> {
30
- if (this.initialized) return;
31
-
32
- // Load all files on first access
38
+ @measureFnc
39
+ private async rebuildCacheFromFiles(): Promise<void> {
33
40
  const allFiles = await this.baseArchives.findInfo("", { type: "files" });
41
+ const newCache = new Map<string, FileInfo>();
42
+
43
+ // Build map of actual files (excluding index file)
34
44
  for (const file of allFiles) {
35
- this.cache.set(file.path, {
45
+ if (file.path === this.indexPath) continue;
46
+
47
+ newCache.set(file.path, {
36
48
  writeTime: file.createTime,
37
- accessTime: file.createTime,
49
+ accessTime: this.cache.get(file.path)?.accessTime ?? file.createTime,
38
50
  size: file.size,
39
51
  });
40
52
  }
53
+
54
+ // Update cache with actual files
55
+ this.cache.clear();
56
+ for (const [path, info] of newCache.entries()) {
57
+ this.cache.set(path, info);
58
+ }
59
+ }
60
+
61
+ @measureFnc
62
+ private async ensureInitialized(): Promise<void> {
63
+ if (this.initialized) return;
64
+
65
+ // First try to load from index file
66
+ await this.loadIndex();
67
+
68
+ // If index is empty or missing, fall back to scanning all files
69
+ if (this.cache.size === 0) {
70
+ console.log("Index file missing or empty, scanning all files...");
71
+ await this.rebuildCacheFromFiles();
72
+ // Save the newly built index
73
+ await this.saveIndex();
74
+ }
75
+
41
76
  this.initialized = true;
42
77
 
43
78
  // Cleanup if we're already over limits
@@ -83,12 +118,18 @@ class ArchivesLimitedCache {
83
118
  currentFiles--;
84
119
  }
85
120
  }
121
+
122
+ // Trigger index flush if any files were deleted
123
+ if (currentFiles < totalFiles) {
124
+ this.triggerIndexFlush();
125
+ }
86
126
  }
87
127
 
88
128
  private updateAccessTime(path: string): void {
89
129
  const info = this.cache.get(path);
90
130
  if (info) {
91
131
  info.accessTime = Date.now();
132
+ this.triggerIndexFlush();
92
133
  }
93
134
  }
94
135
 
@@ -109,12 +150,14 @@ class ArchivesLimitedCache {
109
150
  accessTime: now,
110
151
  size: data.length,
111
152
  });
153
+ this.triggerIndexFlush();
112
154
  }
113
155
 
114
156
  public async del(path: string): Promise<void> {
115
157
  await this.ensureInitialized();
116
158
  await this.baseArchives.del(path);
117
159
  this.cache.delete(path);
160
+ this.triggerIndexFlush();
118
161
  }
119
162
 
120
163
  public async getInfo(path: string): Promise<{ writeTime: number; size: number; } | undefined> {
@@ -164,6 +207,74 @@ class ArchivesLimitedCache {
164
207
  public getURL?: (path: string) => Promise<string>;
165
208
  public getDownloadAuthorization?: (config: { validDurationInSeconds: number }) => Promise<{ bucketId: string; fileNamePrefix: string; authorizationToken: string; }>;
166
209
 
210
+ private batchedFlushIndex = batchFunction(
211
+ { delay: 2000 }, // Flush index changes every 2 seconds
212
+ async () => {
213
+ // We don't actually need the batched values, just flush the index
214
+ await this.saveIndex();
215
+ }
216
+ );
217
+
218
+ private async loadIndex(): Promise<void> {
219
+ try {
220
+ const indexBuffer = await this.baseArchives.get(this.indexPath);
221
+ if (!indexBuffer) {
222
+ // No existing index, will need to build it
223
+ return;
224
+ }
225
+
226
+ const indexData: IndexData = JSON.parse(indexBuffer.toString("utf8"));
227
+
228
+ // Load the cache from the index
229
+ this.cache.clear();
230
+ for (const [path, info] of Object.entries(indexData.files)) {
231
+ this.cache.set(path, info);
232
+ }
233
+
234
+ console.log(`Loaded index with ${this.cache.size} files`);
235
+ } catch (error) {
236
+ console.warn(`Failed to load index file:`, error);
237
+ // Continue without index, will rebuild
238
+ }
239
+ }
240
+
241
+ private async saveIndex(): Promise<void> {
242
+ try {
243
+ const indexData: IndexData = {
244
+ files: Object.fromEntries(this.cache.entries())
245
+ };
246
+
247
+ const indexBuffer = Buffer.from(JSON.stringify(indexData), "utf8");
248
+ await this.baseArchives.set(this.indexPath, indexBuffer);
249
+ } catch (error) {
250
+ console.warn(`Failed to save index file:`, error);
251
+ }
252
+ }
253
+
254
+ private triggerIndexFlush(): void {
255
+ void this.batchedFlushIndex(undefined);
256
+ }
257
+
258
+ private initPeriodicSync(): void {
259
+ // Sync index with actual files every hour to prevent drift
260
+ runInfinitePoll(timeInHour, async () => {
261
+ await this.syncIndexWithFiles();
262
+ });
263
+ }
264
+
265
+ private async syncIndexWithFiles(): Promise<void> {
266
+ try {
267
+ console.log("Syncing index with actual files...");
268
+ await this.rebuildCacheFromFiles();
269
+
270
+ // Save updated index
271
+ await this.saveIndex();
272
+ console.log(`Index synced with ${this.cache.size} actual files`);
273
+ } catch (error) {
274
+ console.warn("Failed to sync index with files:", error);
275
+ }
276
+ }
277
+
167
278
  private initOptionalMethods(): void {
168
279
  this.getURL = this.baseArchives.getURL;
169
280
  this.getDownloadAuthorization = this.baseArchives.getDownloadAuthorization;
@@ -4,6 +4,7 @@ import { Archives } from "./archives";
4
4
  import { cache, lazy } from "socket-function/src/caching";
5
5
  import { delay } from "socket-function/src/batching";
6
6
  import { isDefined } from "../misc";
7
+ import { formatTime } from "socket-function/src/formatting/format";
7
8
 
8
9
  let getRootDirectory = lazy(async () => {
9
10
  await navigator.storage.persist();
@@ -168,9 +169,11 @@ class ArchivesPrivateFileSystem {
168
169
  }
169
170
 
170
171
  try {
172
+ let readDirTime = Date.now();
171
173
  // Start from the root directory of our namespace
172
174
  const rootDirectory = await this.getOrCreateDirectory("");
173
175
  await readDir(rootDirectory, "");
176
+ console.log(`readDir took ${formatTime(Date.now() - readDirTime)}`);
174
177
 
175
178
  let results = config?.type === "folders" ? folderNames : fileNames;
176
179
  results = results.filter(name => name.startsWith(prefix));
@@ -16,6 +16,9 @@ import { requiresNetworkTrustHook } from "../-d-trust/NetworkTrust2";
16
16
  import { isNoNetwork } from "../config";
17
17
  import { getDebuggerUrl } from "../diagnostics/listenOnDebugger";
18
18
  import { hackDevtoolsWebsocketForward } from "./oneTimeForward";
19
+ import { getOwnMachineId, decodeNodeId, decodeNodeIdAssert } from "../-a-auth/certs";
20
+ import { sort } from "socket-function/src/misc";
21
+ import { getPathStr2 } from "../path";
19
22
 
20
23
  let loadTime = Date.now();
21
24
 
@@ -71,25 +74,37 @@ export async function getControllerNodeIdList(
71
74
  controller: SocketRegistered<{}>,
72
75
  ): Promise<{ nodeId: string; entryPoint: string }[]> {
73
76
  let nodeIdsToTest = await getAllNodeIds();
74
- let passedNodeIds = new Map<string, string>();
77
+ let passedNodeIds = new Map<string, { machineId: string; entryPoint: string }>();
75
78
  await Promise.all(nodeIdsToTest.map(async nodeId => {
76
- if (await doesNodeExposeController(nodeId, controller)) {
77
- let entryPoint = await timeoutToUndefinedSilent(10_000, NodeCapabilitiesController.nodes[nodeId].getEntryPoint()) || "Entry point timedout";
78
- passedNodeIds.set(nodeId, entryPoint);
79
+ let result = await doesNodeExposeController(nodeId, controller);
80
+ if (result) {
81
+ passedNodeIds.set(nodeId, result);
79
82
  }
80
83
  }));
81
84
 
82
- let results = Array.from(passedNodeIds.entries()).map(([nodeId, entryPoint]) => ({ nodeId, entryPoint }));
83
- // Ignore the special local IDs, otherwise we'll be returning duplicates. And our caller probably doesn't want the fastest path, they probably just want every path.
84
- // TODO: We should detect if the local ID and the remote ID is the same and then always pick the local ID instead. However, I have no idea how to do this as it doesn't include the machine ID, just the port. So how do we know if they're the same? I think we actually have to talk to them and identify their machine IDs, which we need to cache, which then takes longer, et cetera, et cetera.
85
- results = results.filter(x => !x.nodeId.startsWith("127-0-0-1."));
86
- return results;
85
+ let results = Array.from(passedNodeIds.entries());
86
+ // Prefer localhost connections as they're faster.
87
+ sort(results, (x) => x[0].startsWith("127-0-0-1.") ? 0 : 1);
88
+ let lookup = new Map<string, { nodeId: string; entryPoint: string }>();
89
+ for (let x of results) {
90
+ let key = getPathStr2(x[1].machineId, decodeNodeIdAssert(x[0]).port.toString());
91
+ if (key in lookup) continue;
92
+ lookup.set(key, { nodeId: x[0], entryPoint: x[1].entryPoint });
93
+ }
94
+
95
+ return Array.from(lookup.values());
87
96
  }
88
97
 
89
98
 
90
- export async function doesNodeExposeController(reconnectNodeId: string, controller: SocketRegistered<{}>): Promise<boolean> {
99
+ export async function doesNodeExposeController(reconnectNodeId: string, controller: SocketRegistered<{}>): Promise<{ machineId: string, entryPoint: string } | false> {
91
100
  let exposedControllers = await timeoutToUndefinedSilent(10_000, NodeCapabilitiesController.nodes[reconnectNodeId].getExposedControllers());
92
- return !!exposedControllers?.includes(controller._classGuid);
101
+ let machineId = await getOwnMachineId();
102
+ let entryPoint = await new NodeCapabilitiesControllerBase().getEntryPoint();
103
+
104
+ if (exposedControllers?.includes(controller._classGuid)) {
105
+ return { machineId, entryPoint };
106
+ }
107
+ return false;
93
108
  }
94
109
 
95
110
  const startupTime = Date.now();
@@ -3,6 +3,7 @@ import { requiresNetworkTrustHook } from "../-d-trust/NetworkTrust2";
3
3
  import { isDevDebugbreak } from "../config";
4
4
  import { measureWrap } from "socket-function/src/profiling/measure";
5
5
  import { QueueLimited } from "socket-function/src/misc";
6
+ import { isNode } from "typesafecss";
6
7
 
7
8
  export type DebugLog = {
8
9
  type: string;
@@ -12,7 +13,8 @@ export type DebugLog = {
12
13
 
13
14
  // NOTE: For now, this is actually fairly light, so we'll just turn it on by default. It is very useful
14
15
  // in debugging synchronization issues.
15
- let ENABLED_LOGGING = true;
16
+ // NOTE: Clientside it is not quite as light, so... don't turn it on clientside, as we don't really use it anyways.
17
+ let ENABLED_LOGGING = isNode();
16
18
  export function enableAuditLogging() {
17
19
  ENABLED_LOGGING = true;
18
20
  debugLogFnc = debugLogBase;
@@ -25,7 +27,7 @@ export function isDebugLogEnabled() {
25
27
  return ENABLED_LOGGING;
26
28
  }
27
29
 
28
- let logHistory = new QueueLimited<DebugLog>(1000 * 1000 * 10);
30
+ let logHistory = new QueueLimited<DebugLog>(1000 * 500);
29
31
  export function getFullLogHistory() {
30
32
  return logHistory;
31
33
  }
@@ -212,6 +212,9 @@ export interface WatcherOptions<Result> {
212
212
  logSyncTimings?: boolean;
213
213
 
214
214
  maxLocksOverride?: number;
215
+
216
+ // NOTE: The reason there isn't throttle support here is very frequently when you want to throttle one component rendering, it's because you have many components. So you actually want to throttle many components and have them throttle in conjunction with each other, which results in the logic becoming complicated.
217
+ // - But maybe we should support the single throttle case anyways?
215
218
  }
216
219
 
217
220
  let harvestableReadyLoopCount = 0;
@@ -228,7 +228,7 @@ export class PathFunctionRunner {
228
228
 
229
229
  // Rarely we might need to queue a function multiple times, when we are late to receive rejections.
230
230
  // HOWEVER, after enough times, we should stop, as we will probably just infinitely queue it.
231
- const MAX_QUEUE_COUNT = 25;
231
+ const MAX_QUEUE_COUNT = 100;
232
232
  let queueLimitCounts = new Map<string, number>();
233
233
  // Clear every hour, so we don't leave
234
234
  runInfinitePoll(timeInHour, () => queueLimitCounts.clear());
@@ -323,7 +323,7 @@ export class PathFunctionRunner {
323
323
  if (limitCount >= MAX_QUEUE_COUNT) {
324
324
  // Only error the first time, as we don't need need that many errors
325
325
  if (limitCount === MAX_QUEUE_COUNT) {
326
- console.error(`Queue limit reached (${limitCount}) for ${getDebugName(callData, functionSpec, true)}`);
326
+ console.error(`Tried to requeue a function to run too many times (${limitCount}) for ${getDebugName(callData, functionSpec, true)}. This is NOT due to cascading reads. It might be due to repeated rejections?`);
327
327
  }
328
328
  continue;
329
329
  }
@@ -770,7 +770,7 @@ export class Querysub {
770
770
  return `<script>${edgeBootstrapFile}</script>`;
771
771
  });
772
772
 
773
- if (!noSyncing()) {
773
+ if (!noSyncing() && !isBootstrapOnly()) {
774
774
  SocketFunction.expose(QuerysubController);
775
775
  }
776
776
 
@@ -1,4 +1,4 @@
1
- import { formatDateTime, formatNiceDateTime, formatNumber, formatPercent, formatTime, formatVeryNiceDateTime } from "socket-function/src/formatting/format";
1
+ import { formatDateTime, formatDateTimeDetailed, formatNiceDateTime, formatNumber, formatPercent, formatTime, formatVeryNiceDateTime } from "socket-function/src/formatting/format";
2
2
  import { css } from "typesafecss";
3
3
  import { canHaveChildren } from "socket-function/src/types";
4
4
  import { qreact } from "../4-dom/qreact";
@@ -44,7 +44,7 @@ let formatters: { [formatter in StringFormatters]: (value: unknown) => preact.Co
44
44
  number: (value) => d(value, formatNumber(Number(value))),
45
45
  percent: (value) => d(value, formatPercent(Number(value))),
46
46
  timeSpan: (value) => d(value, formatTime(Number(value))),
47
- date: (value) => d(value, formatVeryNiceDateTime(Number(value))),
47
+ date: (value) => d(value, <span title={formatDateTimeDetailed(Number(value))}>{formatVeryNiceDateTime(Number(value))}</span>),
48
48
  error: (value) => d(value, <span class={errorMessage}>{String(value)}</span>),
49
49
  toSpaceCase: (value) => d(value, toSpaceCase(String(value))),
50
50
  "<Selector>": (value) => d(value, <Selector {...JSON.parse(String(value).slice("<Selector>".length))} />),
@@ -24,9 +24,11 @@ import { shutdown } from "../diagnostics/periodic";
24
24
  import { onServiceConfigChange, triggerRollingUpdate } from "./machineController";
25
25
  import { PromiseObj } from "../promise";
26
26
  import path from "path";
27
+ import { fsExistsAsync } from "../fs";
27
28
 
28
29
  const PIPE_FILE_LINE_LIMIT = 10_000;
29
30
 
31
+
30
32
  const getMemoryInfo = measureWrap(async function getMemoryInfo(): Promise<{ value: number; max: number } | undefined> {
31
33
  if (os.platform() === "win32") {
32
34
  throw new Error("Windows is not supported for machine resource monitoring");
@@ -379,7 +381,7 @@ const getScreenState = measureWrap(async function getScreenState(populateIsProce
379
381
  });
380
382
  async function removeOldNodeId(screenName: string) {
381
383
  let nodeIdFile = os.homedir() + "/" + SERVICE_FOLDER + screenName + "/" + SERVICE_NODE_FILE_NAME;
382
- if (fs.existsSync(nodeIdFile)) {
384
+ if (await fsExistsAsync(nodeIdFile)) {
383
385
  let nodeId = await fs.promises.readFile(nodeIdFile, "utf8");
384
386
  console.log(green(`Removing node if for dead service on ${nodeIdFile}, node id ${nodeId}`));
385
387
  await fs.promises.unlink(nodeIdFile);
@@ -416,7 +418,7 @@ const runScreenCommand = measureWrap(async function runScreenCommand(config: {
416
418
  if (existingScreen && !rollingObj && await isScreenRunningProcess(existingScreen.pid)) {
417
419
  let nodeIdPath = os.homedir() + "/" + SERVICE_FOLDER + screenName + "/" + SERVICE_NODE_FILE_NAME;
418
420
  let rollingFinalTime = Date.now() + config.rollingWindow;
419
- if (fs.existsSync(nodeIdPath)) {
421
+ if (await fsExistsAsync(nodeIdPath)) {
420
422
  let nodeId = await fs.promises.readFile(nodeIdPath, "utf8");
421
423
  // REMOVE the nodeId file, so we the node isn't terminated!
422
424
  await fs.promises.unlink(nodeIdPath);
@@ -551,7 +553,7 @@ const ensureGitSynced = measureWrap(async function ensureGitSynced(config: {
551
553
  repoUrl: string;
552
554
  gitRef: string;
553
555
  }) {
554
- if (!await fs.existsSync(config.gitFolder + ".git")) {
556
+ if (!await fsExistsAsync(config.gitFolder + ".git")) {
555
557
  await runPromise(`git clone ${config.repoUrl} ${config.gitFolder}`);
556
558
  }
557
559
  try {
@@ -595,7 +597,7 @@ async function quickIsOutdated() {
595
597
  let folder = root + screenName + "/";
596
598
  await fs.promises.mkdir(folder, { recursive: true });
597
599
  let parameterPath = folder + "/parameters.json";
598
- if (!fs.existsSync(parameterPath)) return true;
600
+ if (!await fsExistsAsync(parameterPath)) return true;
599
601
  let prevParameters = await fs.promises.readFile(parameterPath, "utf8");
600
602
  if (prevParameters !== newParametersString) return true;
601
603
  }
@@ -654,7 +656,7 @@ const resyncServicesBase = runInSerial(measureWrap(async function resyncServices
654
656
  }
655
657
  let parameterPath = folder + "/parameters.json";
656
658
  let prevParameters = "";
657
- if (fs.existsSync(parameterPath)) {
659
+ if (await fsExistsAsync(parameterPath)) {
658
660
  prevParameters = await fs.promises.readFile(parameterPath, "utf8");
659
661
  }
660
662
  let newParametersString = JSON.stringify(config.parameters);
@@ -673,7 +675,7 @@ const resyncServicesBase = runInSerial(measureWrap(async function resyncServices
673
675
  await fs.promises.writeFile(parameterPath, newParametersString);
674
676
 
675
677
  let nodePathId = folder + SERVICE_NODE_FILE_NAME;
676
- if (fs.existsSync(nodePathId)) {
678
+ if (await fsExistsAsync(nodePathId)) {
677
679
  let nodeId = await fs.promises.readFile(nodePathId, "utf8");
678
680
  machineInfo.services[config.serviceId].nodeId = nodeId;
679
681
  }
@@ -788,7 +790,7 @@ export async function machineApplyMain() {
788
790
 
789
791
  // NOTE: Error's don't get logged unless we host, so... we can't do this earlier than this...
790
792
  let lastErrorPath = os.homedir() + "/lastAlwaysUpError.txt";
791
- if (fs.existsSync(lastErrorPath)) {
793
+ if (await fsExistsAsync(lastErrorPath)) {
792
794
  let lastError = await fs.promises.readFile(lastErrorPath, "utf8");
793
795
  await fs.promises.unlink(lastErrorPath);
794
796
  console.error(`Always up error: ${lastError}`);
@@ -796,7 +798,7 @@ export async function machineApplyMain() {
796
798
 
797
799
  // Kill the last running one
798
800
  let isRunningPath = os.homedir() + "/machineApplyPID.txt";
799
- if (fs.existsSync(isRunningPath)) {
801
+ if (await fsExistsAsync(isRunningPath)) {
800
802
  let pid = await fs.promises.readFile(isRunningPath, "utf8");
801
803
  try {
802
804
  process.kill(parseInt(pid), "SIGKILL");
@@ -23,6 +23,7 @@ import { DeployProgress, deployFunctions, deployGetFunctions } from "../4-deploy
23
23
  import { FunctionSpec, functionSchema } from "../3-path-functions/PathFunctionRunner";
24
24
  import { Querysub } from "../4-querysub/QuerysubController";
25
25
  import { green, red } from "socket-function/src/formatting/logColors";
26
+ import { fsExistsAsync } from "../fs";
26
27
 
27
28
  const SERVICE_FOLDER_NAME = "machine-services";
28
29
  export const SERVICE_FOLDER = `${SERVICE_FOLDER_NAME}/`;
@@ -240,7 +241,7 @@ export class MachineServiceControllerBase {
240
241
  let querysubFolder = path.resolve("../querysub");
241
242
  let querysubRef = "";
242
243
  let querysubUncommitted: string[] = [];
243
- if (fs.existsSync(querysubFolder)) {
244
+ if (await fsExistsAsync(querysubFolder)) {
244
245
  querysubRef = await getGitRefLive(querysubFolder);
245
246
  querysubUncommitted = await getGitUncommitted(querysubFolder);
246
247
  }
@@ -276,7 +277,7 @@ export class MachineServiceControllerBase {
276
277
  public async commitPushService(commitMessage: string) {
277
278
  if (commitMessage.toLowerCase().includes("querysub")) {
278
279
  let querysubFolder = path.resolve("../querysub");
279
- if (fs.existsSync(querysubFolder)) {
280
+ if (await fsExistsAsync(querysubFolder)) {
280
281
  let querysubLastCommit = await getGitRefInfo({
281
282
  gitDir: querysubFolder,
282
283
  ref: "origin/main",
@@ -293,7 +294,7 @@ export class MachineServiceControllerBase {
293
294
  }
294
295
  public async commitPushAndPublishQuerysub(commitMessage: string) {
295
296
  let querysubFolder = path.resolve("../querysub");
296
- if (!fs.existsSync(querysubFolder)) {
297
+ if (!await fsExistsAsync(querysubFolder)) {
297
298
  throw new Error(`Querysub folder does not exist at ${querysubFolder}`);
298
299
  }
299
300
 
@@ -6,6 +6,7 @@ import fs from "fs";
6
6
  import os from "os";
7
7
  import readline from "readline";
8
8
  import open from "open";
9
+ import { fsExistsAsync } from "../fs";
9
10
  // Import querysub, to fix missing dependencies
10
11
  Querysub;
11
12
 
@@ -32,7 +33,7 @@ async function getGitHubApiKey(repoUrl: string, sshRemote: string): Promise<stri
32
33
  const cacheFile = os.homedir() + `/githubkey_${repoOwner}_${repoName}.json`;
33
34
 
34
35
  // Check if we have a cached key
35
- if (fs.existsSync(cacheFile)) {
36
+ if (await fsExistsAsync(cacheFile)) {
36
37
  try {
37
38
  const cached = JSON.parse(fs.readFileSync(cacheFile, "utf8"));
38
39
  if (cached.apiKey) {
@@ -222,7 +223,7 @@ async function main() {
222
223
 
223
224
  // 1. Copy backblaze file to remote server (~/backblaze.json)
224
225
  console.log("Copying backblaze credentials...");
225
- if (fs.existsSync(backblazePath)) {
226
+ if (await fsExistsAsync(backblazePath)) {
226
227
  await runPromise(`scp "${backblazePath}" ${sshRemote}:~/backblaze.json`);
227
228
  console.log("✅ Backblaze credentials copied");
228
229
  } else {