querysub 0.328.0 → 0.330.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,8 +1,291 @@
1
- type DigestInfo = {
2
- histogram: {
3
- [timeGroup: number]: number;
1
+ import { insertIntoSortedList, list, timeInDay, timeInHour } from "socket-function/src/misc";
2
+ import { runScheduler } from "../../../misc2";
3
+ import { getErrorAppendables, getSuppressionFull, getSuppressionListRaw, suppressionList } from "./ErrorNotificationController";
4
+ import { createLogScanner } from "../FastArchiveAppendable";
5
+ import { LogDatum, getLogFile, getLoggers } from "../diskLogger";
6
+ import { FastArchiveAppendableControllerBase, FileMetadata } from "../FastArchiveController";
7
+ import { httpsRequest } from "socket-function/src/https";
8
+ import { Zip } from "../../../zip";
9
+ import { encodeCborx } from "../../../misc/cloneHelpers";
10
+ import { archiveJSONT } from "../../../-a-archives/archivesJSONT";
11
+ import { nestArchives } from "../../../-a-archives/archives";
12
+ import { getArchivesBackblaze } from "../../../-a-archives/archivesBackBlaze";
13
+ import { getDomain } from "../../../config";
14
+ import { getOwnMachineId, getOwnThreadId } from "../../../-a-auth/certs";
15
+ import { sendErrorDigestEmail } from "./errorDigestEmail";
16
+ import { archiveCborT } from "../../../-a-archives/archivesCborT";
17
+ import { Querysub } from "../../../4-querysub/Querysub";
18
+ import { sendEmail } from "../../../user-implementation/userData";
19
+ import { qreact } from "../../../4-dom/qreact";
20
+ import { getSyncedController } from "../../../library-components/SyncedController";
21
+ import { SocketFunction } from "socket-function/SocketFunction";
22
+ import { assertIsManagementUser } from "../../managementPages";
23
+
24
+ const LATEST_ERRORS_COUNT_PER_FILE = 20;
25
+
26
+ export type ErrorDigestInfo = {
27
+ key: string;
28
+ // timeGroupEnd =>
29
+ histogram: Map<number, {
30
+ suppressedErrors: number;
31
+ unsuppressedErrors: number;
32
+ suppressedWarnings: number;
33
+ unsuppressedWarnings: number;
34
+
35
+ corruptErrors: number;
36
+ corruptWarnings: number;
37
+
38
+ firstCorruptError?: string;
39
+ firstCorruptWarning?: string;
40
+ }>;
41
+ // file =>
42
+ byFile: Map<string, {
43
+ errors: number;
44
+ warnings: number;
45
+ // Sorted from oldest to newest
46
+ latestErrors: LogDatum[];
47
+ latestWarnings: LogDatum[];
48
+ }>;
49
+ totalCompressedBytes: number;
50
+ totalUncompressedBytes: number;
51
+ totalFiles: number;
52
+
53
+ scanDuration: number;
54
+ scanStartTime: number;
55
+ scanEndTime: number;
56
+ startTime: number;
57
+ endTime: number;
58
+ };
59
+ class ErrorDigest {
60
+ public async getDigestKeys() {
61
+ return errorDigestHistory.keys();
62
+ }
63
+ public async getDigest(key: string) {
64
+ return errorDigestHistory.get(key);
65
+ }
66
+ }
67
+
68
+ export const ErrorDigestController = getSyncedController(SocketFunction.register(
69
+ "ErrorDigestController-e5996b95-dcfc-412e-a104-80ed2c2d5933",
70
+ new ErrorDigest(),
71
+ () => ({
72
+ getDigestKeys: {},
73
+ getDigest: {},
74
+ }),
75
+ () => ({
76
+ hooks: [assertIsManagementUser],
77
+ })
78
+ ));
79
+
80
+ export const errorDigestHistory = archiveCborT<ErrorDigestInfo>(() => nestArchives("error-digests/", getArchivesBackblaze(getDomain())));
81
+
82
+ function getClosest(value: number, choices: number[]) {
83
+ let dist = Number.POSITIVE_INFINITY;
84
+ let closest: number = choices[0];
85
+ for (let choice of choices) {
86
+ let curDist = Math.abs(value - choice);
87
+ if (curDist < dist) {
88
+ dist = curDist;
89
+ closest = choice;
90
+ }
91
+ }
92
+ return closest;
93
+ }
94
+
95
+ async function runDigest() {
96
+ console.log("Running error digest gathering");
97
+ // Find the previous day
98
+ let endTime = getClosest(
99
+ Date.now(),
100
+ [
101
+ new Date().setHours(11),
102
+ new Date(Date.now() - timeInDay).setHours(11),
103
+ ]
104
+ );
105
+ let endDate = new Date(endTime);
106
+ endDate.setMinutes(0);
107
+ endDate.setSeconds(0);
108
+ endDate.setMilliseconds(0);
109
+ endTime = endDate.getTime();
110
+ let startTime = new Date(endTime - timeInDay).getTime();
111
+ let scanStartTime = Date.now();
112
+
113
+ let digestInfo: ErrorDigestInfo = {
114
+ key: `${Date.now()}_${getOwnMachineId()}_${getOwnThreadId()}`,
115
+ histogram: new Map(),
116
+ byFile: new Map(),
117
+ scanDuration: 0,
118
+ scanStartTime,
119
+ scanEndTime: Date.now(),
120
+ startTime,
121
+ endTime,
122
+ totalCompressedBytes: 0,
123
+ totalUncompressedBytes: 0,
124
+ totalFiles: 0,
4
125
  };
5
- topByFile: {
6
- [file: string]: number;
7
- };
8
- };
126
+
127
+ let entries = await getSuppressionListRaw();
128
+ let suppressionFull = getSuppressionFull({
129
+ entries: Object.values(entries.entries),
130
+ blockTimeRange: {
131
+ startTime,
132
+ endTime,
133
+ },
134
+ });
135
+
136
+ function getTimeGroup(time: number) {
137
+ return Math.floor((time - startTime) / timeInHour) * timeInHour + startTime;
138
+ }
139
+
140
+
141
+ let appendables = getErrorAppendables();
142
+ for (let appendable of appendables) {
143
+ let isError = true;
144
+ if (appendable.rootPath.includes("warn")) {
145
+ isError = false;
146
+ } else if (appendable.rootPath.includes("error")) {
147
+ isError = true;
148
+ } else {
149
+ throw new Error(`Unhandled appendable root path: ${appendable.rootPath}`);
150
+ }
151
+ function registerCount(time: number, isSuppressed: boolean, corruptError?: string) {
152
+ let timeGroup = getTimeGroup(time);
153
+ let obj = digestInfo.histogram.get(timeGroup);
154
+ if (!obj) {
155
+ obj = {
156
+ suppressedErrors: 0,
157
+ unsuppressedErrors: 0,
158
+ suppressedWarnings: 0,
159
+ unsuppressedWarnings: 0,
160
+ corruptErrors: 0,
161
+ corruptWarnings: 0,
162
+ };
163
+ digestInfo.histogram.set(timeGroup, obj);
164
+ }
165
+ if (isError) {
166
+ if (isSuppressed) {
167
+ obj.suppressedErrors++;
168
+ } else {
169
+ obj.unsuppressedErrors++;
170
+ }
171
+ if (corruptError) {
172
+ obj.corruptErrors++;
173
+ if (!obj.firstCorruptError) {
174
+ obj.firstCorruptError = corruptError;
175
+ }
176
+ }
177
+ } else {
178
+ if (isSuppressed) {
179
+ obj.suppressedWarnings++;
180
+ } else {
181
+ obj.unsuppressedWarnings++;
182
+ }
183
+ if (corruptError) {
184
+ obj.corruptWarnings++;
185
+ if (!obj.firstCorruptWarning) {
186
+ obj.firstCorruptWarning = corruptError;
187
+ }
188
+ }
189
+ }
190
+ }
191
+ console.log(`Gathering files for ${appendable.rootPath}`);
192
+ let result = await new FastArchiveAppendableControllerBase().startSynchronizeInternal({
193
+ range: {
194
+ startTime,
195
+ endTime,
196
+ },
197
+ rootPath: appendable.rootPath,
198
+ });
199
+ let filesLeft = result.files.slice();
200
+ await Promise.all(list(32).map(() => runThread()));
201
+ async function runThread() {
202
+ while (true) {
203
+ let file = filesLeft.shift();
204
+ if (!file) {
205
+ return;
206
+ }
207
+ await processFile(file);
208
+ }
209
+ }
210
+ async function processFile(file: FileMetadata) {
211
+ try {
212
+ console.log(`Processing file ${file.path}`);
213
+ let compressed = await httpsRequest(file.url);
214
+ let data = await Zip.gunzip(compressed);
215
+ digestInfo.totalCompressedBytes += compressed.length;
216
+ digestInfo.totalUncompressedBytes += data.length;
217
+ digestInfo.totalFiles++;
218
+
219
+ let callback = createLogScanner({
220
+ debugName: "digestScanner",
221
+ onParsedData: (posStart, posEnd, buffer) => {
222
+ if (buffer === "done") {
223
+ return;
224
+ }
225
+ let result = suppressionFull(posStart, posEnd, buffer);
226
+ if (!result) {
227
+ registerCount(file.endTime, true);
228
+ return;
229
+ }
230
+
231
+ let datum: LogDatum;
232
+ try {
233
+ datum = JSON.parse(buffer.slice(posStart, posEnd).toString()) as LogDatum;
234
+ } catch (e: any) {
235
+ let message = `Failed to parse log datum in around ${buffer.slice(posStart, posEnd).slice(0, 100).toString("hex")}, error is:\n${e.stack}`;
236
+ process.stderr.write(message);
237
+ registerCount(file.endTime, false, message);
238
+ return;
239
+ }
240
+ registerCount(datum.time, false);
241
+
242
+ let fileGroup = getLogFile(datum);
243
+ let obj = digestInfo.byFile.get(fileGroup);
244
+ if (!obj) {
245
+ obj = {
246
+ errors: 0,
247
+ warnings: 0,
248
+ latestErrors: [],
249
+ latestWarnings: [],
250
+ };
251
+ digestInfo.byFile.set(fileGroup, obj);
252
+ }
253
+ if (isError) {
254
+ obj.errors++;
255
+ } else {
256
+ obj.warnings++;
257
+ }
258
+ let list = isError ? obj.latestErrors : obj.latestWarnings;
259
+ if (list.length === 0 || datum.time >= list[0].time) {
260
+ // NOTE: This should almost never trigger, so the search, and even worse, the splice, should almost never happen
261
+ insertIntoSortedList(list, x => x.time, datum);
262
+ if (list.length > LATEST_ERRORS_COUNT_PER_FILE) {
263
+ list.splice(0, list.length - LATEST_ERRORS_COUNT_PER_FILE);
264
+ }
265
+ }
266
+ },
267
+ });
268
+ await callback(data);
269
+ await callback("done");
270
+ } catch (e: any) {
271
+ console.warn(`Failed to process file ${file.path}, error: ${e.stack}`);
272
+ }
273
+ let progress = result.files.length - filesLeft.length + 1;
274
+ console.log(`Processed file ${file.path} (${progress} / ${result.files.length}) in ${appendable.rootPath}`);
275
+ }
276
+ }
277
+
278
+
279
+ let scanEndTime = Date.now();
280
+ digestInfo.scanDuration = scanEndTime - scanStartTime;
281
+ digestInfo.scanEndTime = scanEndTime;
282
+ await errorDigestHistory.set(digestInfo.key, digestInfo);
283
+ await sendErrorDigestEmail(digestInfo);
284
+ }
285
+
286
+ export async function runDigestLoop() {
287
+ await Querysub.hostService("error-digests");
288
+
289
+ // TODO: We might want to change the scheduler to run only on some days, adding support to weekday filtering as well (ex, just monday, wednesday, friday)
290
+ await runScheduler([12], runDigest);
291
+ }
@@ -10,7 +10,7 @@ import { LogDatum, getLogFile } from "../diskLogger";
10
10
  import { sendDiscordMessage } from "../../../email_ims_notifications/discord";
11
11
  import { user_data } from "../../../user-implementation/userData";
12
12
  import { createLink } from "../../../library-components/ATag";
13
- import { getLogsLinkParts } from "./ErrorWarning";
13
+ import { getErrorLogsLink } from "./ErrorWarning";
14
14
 
15
15
  const MAX_IMS_PER_DAY = 3;
16
16
  const MAX_IMS_PER_HOURS = 1;
@@ -20,32 +20,8 @@ const MAX_PER_IM = 10;
20
20
 
21
21
  // Wait a bit, because it's likely if there's one error, there are more errors.
22
22
  const BATCH_TIME = timeInSecond * 30;
23
- const DIGEST_INTERVAL = timeInDay * 3;
24
-
25
-
26
- // 4) Deploy
27
- // 4) Create local errors and make sure the remote server doesn't get them
28
- // 5) Create remote errors and make sure that the all servers get them (Local and remote servers)
29
-
30
-
31
- // Abstract out RecentErrors scanning, so we can reuse it
32
- // (appendables, timeRange, callback)
33
- // - Oh, we also need a way to tell it to not limit to recent errors.
34
- // - And also to disable throttling.
35
-
36
-
37
- // createDigest(appendables, timeRange) => Promise<DigestInfo>
38
-
39
- // 7) Write the digest script, which does a fresh scan on just a specific range of data using start synchronize internal and suppression list, etc. etc.
40
- // - The most recent ten unsuppressed errors. Counts of unsuppressed errors by file. Or by line id? If they have it. Counts of suppressed and unsuppressed errors by hour group.
41
- // - Oh, also everything we do for errors do for warnings as well, and split the values. We want warnings and errors.
42
- // - Time the entire thing, and put that, and the profile, in the digest too! That will give us a good gauge on if the errors/suppressions are getting slow (due to a lot of errors, or a lot of suppression checks!)
43
- // store the digest results in Back Blaze.
44
- // 8) Write a page that will show the results from Backblaze, allowing you to see a list of all of the digests and to view a specific one. Just showing all the data in a chart, whatever. Showed two charts, one with just the unsuppressed errors, and the other with both suppressed and unsuppressed, because you really only care about suppressed when you're looking at the fraction of suppressed to unsuppressed, and the sheer quantity of unsuppressed.
45
- // 9) send an email every time period, and also send an IM that has smaller information
46
- // - Both will link to the actual web page that has the digest, deep linking to the specific tabs.
47
- // - Show the chart in the email as well, but just format it like ASCII Because image dependencies are annoying and I don't want to implement them right now as it might take a few days to get working.
48
23
 
24
+ // 11) Deploy services to service
49
25
 
50
26
 
51
27
  // NOTE: Yes, this is stored in memory, so if the server reboots or if this script keeps crashing, we might send a lot of instant messages. However, one, Discord will probably late rate limit us, and two, this means something is really wrong, especially if it happens a lot, and we really should fix it right away.
@@ -142,7 +118,7 @@ const sendIMs = batchFunction(({ delay: BATCH_TIME }), async (logsAll: LogDatum[
142
118
  console.error(`No Discord webhook URL set, cannot send warning instant messages`);
143
119
  return;
144
120
  }
145
- let url = createLink(getLogsLinkParts());
121
+ let url = createLink(getErrorLogsLink());
146
122
  let message = Object.values(info.perFile).flat().map(
147
123
  x => `[${formatDateTime(x.time)}](${url}) | ${x.param0} (${x.__NAME__})`
148
124
  ).join("\n");
@@ -184,7 +160,6 @@ async function runIMNotifies() {
184
160
  }
185
161
  threadIdMap.add(obj.time);
186
162
  return false;
187
-
188
163
  }
189
164
  function clearOldDays() {
190
165
  // Clear all the days that are more than 14 days older than our current day.
@@ -4,24 +4,7 @@ Very small amount of data
4
4
  https://127-0-0-1.querysubtest.com:7007/?hot&enableLogs&page=login&filter=%22431%22&showingmanagement&endTime=1755140880000&startTime=1754950020000&managementpage=LogViewer2
5
5
 
6
6
 
7
- 5) IM error notifications - allow immediately knowing about production issues, for better testing
8
- - Create a dedicated entry point which acts like a client of the HTTP server, using RecentErrorControllers.getRecentErrors
9
- - Getting it working in a script will be interesting, but... in theory it should just work?
10
- - Just for new errors
11
- - Using backblaze to track when we send it, so we can heavily limit IMs and email
12
- - IM api key tracked in secrets (like email api key)
13
- - Once we get it working, deploy to production
14
-
15
- 6) IM + email digests (daily / weekly?)
16
- - a very short digest for the instant message which then links to a page on the site with a larger digest
17
- - which has tabs, and each part in the instant message links to the correct tab
18
- - Augments the error notifications entry point, having it also queue stuff up for digests.
19
- - Some notifications will never be immediate and will always be only in digests.
20
- - For now this will just be for:
21
- - non-suppressed errors
22
- - suppressed errors
23
-
24
-
7
+ AFTER digests, go back to adding application code, as the framework is getting boring...
25
8
 
26
9
  5) Life cycle analyzer
27
10
  - Implement regular range lifecycles first (matching an === object field)
@@ -95,6 +78,13 @@ Check the startup lifecycle to make sure we can detect the nodes pretty fast and
95
78
  - Receiving values from different authorities and the ones we're watching is weird. Why does that keep happening?
96
79
  - And we keep running into audit mismatches? Why does that keep happening? Is it only because of our local development server?
97
80
 
81
+ DEBUG: Deploy hash updates.
82
+ - Forced refresh now, and then immediately refreshing is STILL not giving us the latest code. Even though we waited for everything to reload the UI, which took forever.
83
+ - It's probably an issue with the routing information being out of date, I think it's cached in Cloudflare. We could at least use life cycles to verify the values we have, and then if they're different than the values in the client, then I guess it must be in Cloudflare. We can also verify our timing, as I'm pretty sure we're supposed to be waiting for the cloud flare values to update, and if we're not, then that's a problem.
84
+
85
+ DEBUG: Suppression creation propagation
86
+ - It didn't propagate to all the servers?
87
+
98
88
  SPECIAL UI links for certain errors in log view
99
89
  - Probably dynamically created, based on contents of log
100
90
  - LINKS to filters for all these special errors on a special page
@@ -90,6 +90,12 @@ export async function registerManagementPages2(config: {
90
90
  componentName: "LogViewer2",
91
91
  getModule: () => import("./logs/LogViewer2"),
92
92
  });
93
+ inputPages.push({
94
+ title: "Error Digests",
95
+ componentName: "ErrorDigestPage",
96
+ controllerName: "ErrorDigestController",
97
+ getModule: () => import("./logs/errorNotifications/ErrorDigestPage"),
98
+ });
93
99
  inputPages.push({
94
100
  title: "Security",
95
101
  componentName: "SecurityPage",
@@ -181,22 +187,29 @@ export async function registerManagementPages2(config: {
181
187
  // Wait, so the import system knows the modules are async imports
182
188
  await delay(0);
183
189
  for (let page of inputPages) {
184
- // NOTE: If we split this into a module for component/controller, we need to make sure we
185
- // import both serverside, so we can whitelist them for import clientside.
186
- let mod = await page.getModule();
187
- if (!page.controllerName) continue;
188
- if (!(page.controllerName in mod)) {
189
- console.error(`Controller ${page.controllerName} not found in module`, mod);
190
- throw new Error(`Controller ${page.controllerName} not found in module`);
191
- }
192
- let controller = mod[page.controllerName] as SocketRegistered;
193
- if (!controller) {
194
- throw new Error(`Controller ${page.controllerName} not found in module`);
195
- }
196
- if (!controller._classGuid) {
197
- throw new Error(`Controller ${page.controllerName} does not have a class guid`);
190
+ try {
191
+ // NOTE: If we split this into a module for component/controller, we need to make sure we
192
+ // import both serverside, so we can whitelist them for import clientside.
193
+ let mod = await page.getModule();
194
+ if (!page.controllerName) continue;
195
+ if (!(page.controllerName in mod)) {
196
+ console.error(`Controller ${page.controllerName} not found in module`, mod);
197
+ throw new Error(`Controller ${page.controllerName} not found in module`);
198
+ }
199
+ let controller = mod[page.controllerName] as SocketRegistered;
200
+ if ((controller as any)?.__baseController) {
201
+ controller = (controller as any).__baseController;
202
+ }
203
+ if (!controller) {
204
+ throw new Error(`Controller ${page.controllerName} not found in module`);
205
+ }
206
+ if (!controller._classGuid) {
207
+ throw new Error(`Controller ${page.controllerName} does not have a class guid`);
208
+ }
209
+ SocketFunction.expose(controller);
210
+ } catch (e: any) {
211
+ console.error(`Error when registering management page ${page.controllerName} in ${page.componentName}: ${e.stack}`);
198
212
  }
199
- SocketFunction.expose(controller);
200
213
  }
201
214
  } else {
202
215
  for (let page of inputPages) {
@@ -8,7 +8,7 @@ import { renderToString } from "../library-components/renderToString";
8
8
 
9
9
  export async function sendEmail_postmark(config: {
10
10
  apiKey: string;
11
- to: string;
11
+ to: string[];
12
12
  from: string;
13
13
  subject: string;
14
14
  contents: preact.VNode;
@@ -18,13 +18,13 @@ export async function sendEmail_postmark(config: {
18
18
  if (Querysub.isInSyncedCall()) {
19
19
  throw new Error("sendEmail_sendgrid should not be called in a synced call, as this might result in multiple sends. Instead, use Querysub.onCommitFinished to call after the synced call");
20
20
  }
21
- console.log(`${magenta("Sending email")} to ${green(config.to)} with subject ${config.subject}`);
21
+ console.log(`${magenta("Sending email")} to ${green(config.to.join(", "))} with subject ${config.subject}`);
22
22
  let htmlContent = renderToString(config.contents);
23
23
  await httpsRequest(
24
24
  "https://api.postmarkapp.com/email",
25
25
  Buffer.from(JSON.stringify({
26
26
  From: config.from,
27
- To: config.to,
27
+ To: config.to.join(","),
28
28
  Subject: config.subject,
29
29
  HtmlBody: htmlContent,
30
30
  })),
@@ -109,7 +109,7 @@ export function getSyncedController<T extends SocketRegistered>(
109
109
  resetAll(): void;
110
110
  refreshAll(): void;
111
111
  isAnyLoading(): boolean;
112
- base: T;
112
+ __baseController: T;
113
113
  } {
114
114
  if (isNode()) {
115
115
  let result = cache((nodeId: string) => {
@@ -161,7 +161,7 @@ export function getSyncedController<T extends SocketRegistered>(
161
161
  result.isAnyLoading = () => {
162
162
  notAllowedOnServer();
163
163
  };
164
- result.base = controller;
164
+ result.__baseController = controller;
165
165
  return result;
166
166
  }
167
167
  let id = nextId();
@@ -416,6 +416,6 @@ export function getSyncedController<T extends SocketRegistered>(
416
416
  }
417
417
  });
418
418
  };
419
- result.base = controller;
419
+ result.__baseController = controller;
420
420
  return result;
421
421
  }
package/src/misc.ts CHANGED
@@ -185,4 +185,9 @@ export function streamToIteratable<T>(reader: {
185
185
  }
186
186
  }
187
187
  };
188
+ }
189
+
190
+ const AsyncFunction = (async () => { }).constructor;
191
+ export function isAsyncFunction(func: unknown): boolean {
192
+ return func instanceof AsyncFunction;
188
193
  }
package/src/misc2.ts CHANGED
@@ -1,3 +1,6 @@
1
+ import { delay } from "socket-function/src/batching";
2
+ import { formatTime } from "socket-function/src/formatting/format";
3
+ import { timeInHour } from "socket-function/src/misc";
1
4
  import { atomic } from "./2-proxy/PathValueProxyWatcher";
2
5
 
3
6
  export function isStrSimilar(a: string | undefined, b: string | undefined) {
@@ -56,4 +59,53 @@ export async function* streamToAsyncIterable(stream: StreamLike): AsyncIterable<
56
59
  while (pendingChunks.length > 0) {
57
60
  yield pendingChunks.shift()!;
58
61
  }
62
+ }
63
+
64
+
65
+ export async function runScheduler(hours: number[], func: () => Promise<void>) {
66
+ while (true) {
67
+ try {
68
+ const now = new Date();
69
+
70
+ // Find the next check time from our list
71
+ const sortedHours = [...hours].sort((a, b) => a - b);
72
+ let targetTime = new Date(now);
73
+
74
+ // Find the next check hour after the current time
75
+ let nextHour = sortedHours.find(hour => {
76
+ const checkTime = new Date(now);
77
+ checkTime.setHours(hour, 0, 0, 0);
78
+ return checkTime > now;
79
+ });
80
+
81
+ if (nextHour === undefined) {
82
+ // No more checks today, take the first one tomorrow
83
+ nextHour = sortedHours[0];
84
+ targetTime.setDate(targetTime.getDate() + 1);
85
+ }
86
+
87
+ targetTime.setHours(nextHour, 0, 0, 0);
88
+
89
+ console.log(`Next target time: ${targetTime.toLocaleString()}`);
90
+
91
+ // Wait and check every 15 minutes until it's time
92
+ while (true) {
93
+ const currentTime = new Date();
94
+ const timeUntilCheck = targetTime.getTime() - currentTime.getTime();
95
+
96
+ if (timeUntilCheck <= 0) {
97
+ console.log("Time to run the check!");
98
+ await func();
99
+ break; // Break out of inner loop to calculate next check time
100
+ }
101
+
102
+ console.log(`Time until next check: ${formatTime(timeUntilCheck)}`);
103
+ await delay(timeInHour / 4); // Wait 15 minutes (1/4 of an hour)
104
+ }
105
+ } catch (error) {
106
+ console.error(error);
107
+ // Even if there's an error, wait 15 minutes before trying again
108
+ await delay(timeInHour / 4);
109
+ }
110
+ }
59
111
  }
@@ -11,7 +11,7 @@ import { isClient } from "../config2";
11
11
  import { getExternalIP } from "../misc/networking";
12
12
  import { MAX_ACCEPTED_CHANGE_AGE } from "../0-path-value-core/pathValueCore";
13
13
  import { createURLSync } from "../library-components/URLParam";
14
- import { devDebugbreak, getDomain, isDevDebugbreak, isRecovery } from "../config";
14
+ import { devDebugbreak, getDomain, getEmailDomain, isDevDebugbreak, isRecovery } from "../config";
15
15
  import { delay } from "socket-function/src/batching";
16
16
  import { enableErrorNotifications } from "../library-components/errorNotifications";
17
17
  import { clamp } from "../misc";
@@ -607,6 +607,7 @@ function sendLoginEmail(config: {
607
607
  const machineId = Querysub.getCallerMachineId();
608
608
  const now = Querysub.getCallTime();
609
609
 
610
+ // Check for the API key, even though we don't immediately use it, so that we can get good errors to tell the user (likely the developer) to setup the postmark API key.
610
611
  const apiKey = atomic(data().secure.postmarkAPIKey);
611
612
  if (!apiKey) {
612
613
  let link = createLink([
@@ -688,18 +689,36 @@ function sendLoginEmail(config: {
688
689
  timeoutTime,
689
690
  });
690
691
  Querysub.onCommitFinished(async () => {
691
- await sendEmail_postmark({
692
- apiKey,
693
- to: email,
694
- // TODO: Allow configuring this (defaulting to getDomain() if unconfigured). For now we hardcode it, because it takes
695
- // a while to verify a new postmark email, and we don't even know what final domain we will be using.
696
- from: "login@querysub.com",
692
+ await sendEmail({
693
+ to: [email],
694
+ fromPrefix: "login",
697
695
  subject,
698
696
  contents,
699
697
  });
700
698
  });
701
699
  });
702
700
  }
701
+ export async function sendEmail(config: {
702
+ to: string[];
703
+ // The domain should be getDomain
704
+ // TODO: Actually use git domain, for now it's hardcoded, because setting up a new
705
+ fromPrefix: string;
706
+ subject: string;
707
+ contents: preact.VNode;
708
+ }) {
709
+ let key = await Querysub.commitAsync(() => atomic(data().secure.postmarkAPIKey));
710
+ if (!key) {
711
+ console.warn(`No postmark API key setup, so we can't send email`);
712
+ return;
713
+ }
714
+ await sendEmail_postmark({
715
+ apiKey: key,
716
+ to: config.to,
717
+ from: `${config.fromPrefix}@${getEmailDomain()}`,
718
+ subject: config.subject,
719
+ contents: config.contents,
720
+ });
721
+ }
703
722
 
704
723
  function verifyMachineId(config: {
705
724
  loginToken: string
package/testEntry2.ts CHANGED
@@ -19,7 +19,7 @@ export async function testMain() {
19
19
  await Querysub.hostService("test");
20
20
  await delay(timeInSecond * 5);
21
21
 
22
- console.error(`This should show up locally, but not remotely.`);
22
+ console.error(`A completely new error that is not suppressed. .`);
23
23
  await delay(timeInSecond * 15);
24
24
  await shutdown();
25
25
  return;