querysub 0.326.0 → 0.327.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/src/diagnostics/logs/FastArchiveAppendable.ts +10 -8
- package/src/diagnostics/logs/FastArchiveViewer.tsx +32 -18
- package/src/diagnostics/logs/LogViewer2.tsx +5 -1
- package/src/diagnostics/logs/errorNotifications/ErrorNotificationController.ts +29 -19
- package/src/diagnostics/logs/errorNotifications/ErrorWarning.tsx +7 -6
- package/src/diagnostics/logs/errorNotifications/errorWatchEntry.tsx +10 -6
- package/src/library-components/SyncedController.ts +5 -3
package/package.json
CHANGED
|
@@ -516,6 +516,7 @@ export class FastArchiveAppendable<Datum> {
|
|
|
516
516
|
});
|
|
517
517
|
|
|
518
518
|
const onDecompressedData = createLogScanner({
|
|
519
|
+
debugName: file.path,
|
|
519
520
|
onParsedData,
|
|
520
521
|
});
|
|
521
522
|
let batchedData: Buffer[] = [];
|
|
@@ -697,14 +698,18 @@ export class FastArchiveAppendable<Datum> {
|
|
|
697
698
|
|
|
698
699
|
|
|
699
700
|
export function createLogScanner(config: {
|
|
701
|
+
debugName: string;
|
|
700
702
|
onParsedData: (posStart: number, posEnd: number, buffer: Buffer | "done") => MaybePromise<void>;
|
|
701
703
|
}): (data: Buffer | "done") => Promise<void> {
|
|
702
704
|
const { onParsedData } = config;
|
|
703
705
|
let pendingData: Buffer[] = [];
|
|
704
706
|
|
|
707
|
+
let finished = false;
|
|
708
|
+
|
|
705
709
|
let delimitterMatchIndex = 0;
|
|
706
|
-
return
|
|
710
|
+
return (async (data: Buffer | "done") => {
|
|
707
711
|
if (data === "done") {
|
|
712
|
+
finished = true;
|
|
708
713
|
// Flush any pending data, even though we have no delimitter. It will probably fail to parse, but... maybe it will work?
|
|
709
714
|
if (pendingData.length > 0) {
|
|
710
715
|
let combinedBuffer = Buffer.concat(pendingData);
|
|
@@ -714,6 +719,9 @@ export function createLogScanner(config: {
|
|
|
714
719
|
await onParsedData(0, 0, "done");
|
|
715
720
|
return;
|
|
716
721
|
}
|
|
722
|
+
if (finished) {
|
|
723
|
+
throw new Error(`Finished scan, but we received more data: ${data.length}, sample is: ${data.slice(0, 100).toString("hex")}, ${config.debugName}`);
|
|
724
|
+
}
|
|
717
725
|
|
|
718
726
|
let lastStart = 0;
|
|
719
727
|
await measureBlock(async () => {
|
|
@@ -734,6 +742,7 @@ export function createLogScanner(config: {
|
|
|
734
742
|
...pendingData,
|
|
735
743
|
data.slice(lastStart, i + 1),
|
|
736
744
|
]).slice(0, -objectDelimitterBuffer.length);
|
|
745
|
+
pendingData = [];
|
|
737
746
|
posStart = 0;
|
|
738
747
|
posEnd = buffer.length;
|
|
739
748
|
} else {
|
|
@@ -741,19 +750,12 @@ export function createLogScanner(config: {
|
|
|
741
750
|
posStart = lastStart;
|
|
742
751
|
posEnd = i + 1 - objectDelimitterBuffer.length;
|
|
743
752
|
}
|
|
744
|
-
// Delimitter was the start of the chunk, and it's the first chunk. Just skip it.
|
|
745
|
-
if (posStart === posEnd && i === 0) {
|
|
746
|
-
lastStart = i + 1;
|
|
747
|
-
continue;
|
|
748
|
-
}
|
|
749
|
-
|
|
750
753
|
// Only sometimes awaiting here makes scanning almost 2X faster, in the normal case, somehow?
|
|
751
754
|
let maybePromise = onParsedData(posStart, posEnd, buffer);
|
|
752
755
|
if (maybePromise) {
|
|
753
756
|
await maybePromise;
|
|
754
757
|
}
|
|
755
758
|
|
|
756
|
-
pendingData = [];
|
|
757
759
|
lastStart = i + 1;
|
|
758
760
|
}
|
|
759
761
|
}
|
|
@@ -15,7 +15,7 @@ import { ButtonSelector } from "../../library-components/ButtonSelector";
|
|
|
15
15
|
import { Button } from "../../library-components/Button";
|
|
16
16
|
import { lazy } from "socket-function/src/caching";
|
|
17
17
|
import { LOG_LIMIT_FLAG } from "./diskLogger";
|
|
18
|
-
import { canHaveChildren } from "socket-function/src/types";
|
|
18
|
+
import { MaybePromise, canHaveChildren } from "socket-function/src/types";
|
|
19
19
|
import { niceParse } from "../../niceStringify";
|
|
20
20
|
import { FileMetadata } from "./FastArchiveController";
|
|
21
21
|
|
|
@@ -30,7 +30,8 @@ const caseInsensitiveParam = new URLParam("caseInsensitive", false);
|
|
|
30
30
|
|
|
31
31
|
export class FastArchiveViewer<T> extends qreact.Component<{
|
|
32
32
|
fastArchives: FastArchiveAppendable<T>[];
|
|
33
|
-
|
|
33
|
+
runOnLoad?: boolean;
|
|
34
|
+
onStart: () => MaybePromise<void>;
|
|
34
35
|
getWantData?: (file: FileMetadata) => Promise<((posStart: number, posEnd: number, data: Buffer) => boolean) | undefined>;
|
|
35
36
|
onDatums: (source: FastArchiveAppendable<T>, datums: T[], metadata: FileMetadata) => void;
|
|
36
37
|
// Called after onData
|
|
@@ -38,12 +39,13 @@ export class FastArchiveViewer<T> extends qreact.Component<{
|
|
|
38
39
|
onFinish?: () => void;
|
|
39
40
|
}> {
|
|
40
41
|
state = t.state({
|
|
42
|
+
runCount: t.atomic<number>(0),
|
|
41
43
|
// rootPath =>
|
|
42
44
|
fileMetadata: t.atomic<({
|
|
43
45
|
files: FileMetadata[];
|
|
44
46
|
createTime?: number;
|
|
45
47
|
} | undefined)[]>([]),
|
|
46
|
-
finished: t.atomic(
|
|
48
|
+
finished: t.atomic(true),
|
|
47
49
|
error: t.atomic<string | undefined>(undefined),
|
|
48
50
|
pendingSyncInitializations: t.atomic<number>(0),
|
|
49
51
|
|
|
@@ -108,6 +110,11 @@ export class FastArchiveViewer<T> extends qreact.Component<{
|
|
|
108
110
|
this.latestSequenceNumber = this.currentSequenceNumber;
|
|
109
111
|
const mySequenceNumber = this.currentSequenceNumber;
|
|
110
112
|
|
|
113
|
+
// Increment run count for each new run
|
|
114
|
+
Querysub.commit(() => {
|
|
115
|
+
this.state.runCount++;
|
|
116
|
+
});
|
|
117
|
+
|
|
111
118
|
// Helper function to check if this sequence number is still the latest
|
|
112
119
|
const isLatestSync = () => mySequenceNumber === this.latestSequenceNumber;
|
|
113
120
|
|
|
@@ -150,7 +157,7 @@ export class FastArchiveViewer<T> extends qreact.Component<{
|
|
|
150
157
|
|
|
151
158
|
this.histogramStartTime = timeRange.startTime - this.histogramBucketTime * 2;
|
|
152
159
|
this.histogramEndTime = timeRange.endTime + this.histogramBucketTime * 2;
|
|
153
|
-
const bucketCount = Math.ceil((this.histogramEndTime - this.histogramStartTime) / this.histogramBucketTime);
|
|
160
|
+
const bucketCount = clamp(Math.ceil((this.histogramEndTime - this.histogramStartTime) / this.histogramBucketTime), 1, 10000);
|
|
154
161
|
|
|
155
162
|
this.histogramAllDataCounts = new Float64Array(bucketCount);
|
|
156
163
|
this.histogramSelectedDataCounts = new Float64Array(bucketCount);
|
|
@@ -181,9 +188,7 @@ export class FastArchiveViewer<T> extends qreact.Component<{
|
|
|
181
188
|
}
|
|
182
189
|
};
|
|
183
190
|
try {
|
|
184
|
-
|
|
185
|
-
onStart();
|
|
186
|
-
});
|
|
191
|
+
await onStart();
|
|
187
192
|
|
|
188
193
|
const caseInsensitive = Querysub.fastRead(() => caseInsensitiveParam.value);
|
|
189
194
|
let caseInsensitiveMapping = new Uint8Array(256);
|
|
@@ -571,6 +576,11 @@ export class FastArchiveViewer<T> extends qreact.Component<{
|
|
|
571
576
|
flavor="large"
|
|
572
577
|
fillWidth
|
|
573
578
|
onKeyUp={this.handleDownload}
|
|
579
|
+
ref2={() => {
|
|
580
|
+
if (this.props.runOnLoad) {
|
|
581
|
+
void this.handleDownload();
|
|
582
|
+
}
|
|
583
|
+
}}
|
|
574
584
|
noEnterKeyBlur
|
|
575
585
|
placeholder="Filter terms, ex x | y & z"
|
|
576
586
|
/>
|
|
@@ -584,17 +594,21 @@ export class FastArchiveViewer<T> extends qreact.Component<{
|
|
|
584
594
|
flavor="small"
|
|
585
595
|
/>
|
|
586
596
|
<div className={css.vbox(10)}>
|
|
587
|
-
{this.state.
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
597
|
+
{this.state.runCount > 0 && (
|
|
598
|
+
<div
|
|
599
|
+
className={infoDisplay(120)}
|
|
600
|
+
title={this.state.fileMetadata.map(x => x?.files || []).flat().map(x =>
|
|
601
|
+
`${x.path} (${formatNumber(x.size)})`
|
|
602
|
+
).join("\n")}
|
|
603
|
+
>
|
|
604
|
+
File count: {formatNumber(totalFileCount)}, Backblaze size: {formatNumber(totalBackblazeByteCount)}B (compressed), Disk size: {formatNumber(totalLocalByteCount)}B (uncompressed)
|
|
605
|
+
</div>
|
|
606
|
+
)}
|
|
607
|
+
{this.state.runCount === 0 && (
|
|
608
|
+
<div className={infoDisplay(200)}>
|
|
609
|
+
No data downloaded yet. Click Run to download data.
|
|
610
|
+
</div>
|
|
611
|
+
)}
|
|
598
612
|
{this.state.finished && <div
|
|
599
613
|
className={infoDisplay(60).button}
|
|
600
614
|
onClick={() => {
|
|
@@ -144,7 +144,8 @@ export class LogViewer2 extends qreact.Component {
|
|
|
144
144
|
<FastArchiveViewer
|
|
145
145
|
ref2={x => this.fastArchiveViewer = x}
|
|
146
146
|
fastArchives={logs}
|
|
147
|
-
|
|
147
|
+
runOnLoad={errorNotifyToggleURL.value}
|
|
148
|
+
onStart={async () => {
|
|
148
149
|
this.datumCount = 0;
|
|
149
150
|
this.notMatchedCount = 0;
|
|
150
151
|
this.errors = 0;
|
|
@@ -167,6 +168,9 @@ export class LogViewer2 extends qreact.Component {
|
|
|
167
168
|
});
|
|
168
169
|
}
|
|
169
170
|
})();
|
|
171
|
+
// ALWAYS update it, as the synchronous one might be out of date, and if we use an outdated one extra errors show up.
|
|
172
|
+
suppressionList = await suppressionController.getSuppressionList.promise();
|
|
173
|
+
|
|
170
174
|
}}
|
|
171
175
|
getWantData={async (file) => {
|
|
172
176
|
if (!hasErrorNotifyToggle) return undefined;
|
|
@@ -224,22 +224,26 @@ class SuppressionList {
|
|
|
224
224
|
suppressionUpdatedChannel.watch(() => {
|
|
225
225
|
void this.updateEntriesNow();
|
|
226
226
|
});
|
|
227
|
+
await runInfinitePollCallAtStart(SUPPRESSION_POLL_INTERVAL, async () => {
|
|
228
|
+
await this.updateEntriesNow();
|
|
229
|
+
});
|
|
227
230
|
});
|
|
231
|
+
private cacheEntries: SuppressionListBase | undefined = undefined;
|
|
228
232
|
private updateEntriesNow = async () => {
|
|
229
233
|
let entries = await suppressionListArchive.get(suppressionListKey);
|
|
230
234
|
if (!entries) {
|
|
231
235
|
entries = { entries: {} };
|
|
232
236
|
}
|
|
233
|
-
this.
|
|
237
|
+
this.cacheEntries = entries;
|
|
234
238
|
};
|
|
235
|
-
private
|
|
239
|
+
private async getEntries(): Promise<SuppressionListBase> {
|
|
236
240
|
await this.init();
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
}
|
|
241
|
+
if (!this.cacheEntries) {
|
|
242
|
+
throw new Error("Cache entries not set? Should be impossible.");
|
|
243
|
+
}
|
|
240
244
|
// Infinite poll will have set this, so we don't infinitely loop
|
|
241
|
-
return
|
|
242
|
-
}
|
|
245
|
+
return this.cacheEntries;
|
|
246
|
+
}
|
|
243
247
|
|
|
244
248
|
public async filterObjsToNonSuppressed(objs: LogDatum[]): Promise<LogDatum[]> {
|
|
245
249
|
// NOTE: Streamed data should be rare enough, that handling this inefficiently is okay.
|
|
@@ -259,6 +263,7 @@ class SuppressionList {
|
|
|
259
263
|
}
|
|
260
264
|
let buffer = Buffer.concat(parts);
|
|
261
265
|
let scanner = await this.scanForRecentErrors({
|
|
266
|
+
debugName: "filterObjsToNonSuppressed",
|
|
262
267
|
startTime,
|
|
263
268
|
endTime,
|
|
264
269
|
});
|
|
@@ -266,6 +271,7 @@ class SuppressionList {
|
|
|
266
271
|
return await scanner.finish();
|
|
267
272
|
}
|
|
268
273
|
public async scanForRecentErrors(config: {
|
|
274
|
+
debugName: string;
|
|
269
275
|
startTime: number;
|
|
270
276
|
endTime: number;
|
|
271
277
|
}): Promise<{
|
|
@@ -285,6 +291,7 @@ class SuppressionList {
|
|
|
285
291
|
// for the suppression key.
|
|
286
292
|
let obj: { outdatedSuppressionKey?: string } = {};
|
|
287
293
|
let callback = createLogScanner({
|
|
294
|
+
debugName: config.debugName,
|
|
288
295
|
onParsedData: (posStart, posEnd, buffer) => {
|
|
289
296
|
if (buffer === "done") {
|
|
290
297
|
return;
|
|
@@ -297,7 +304,7 @@ class SuppressionList {
|
|
|
297
304
|
try {
|
|
298
305
|
datum = JSON.parse(buffer.slice(posStart, posEnd).toString()) as LogDatum;
|
|
299
306
|
} catch (e: any) {
|
|
300
|
-
process.stderr.write(`Failed to parse log datum in around ${buffer.slice(posStart, posEnd).slice(0, 100).toString("hex")}, error is:\n${e.stack}`);
|
|
307
|
+
process.stderr.write(`Failed to parse log datum in around ${buffer.slice(posStart, posEnd).slice(0, 100).toString("hex")}, in source ${config.debugName}, error is:\n${e.stack}`);
|
|
301
308
|
return;
|
|
302
309
|
}
|
|
303
310
|
if (obj.outdatedSuppressionKey) {
|
|
@@ -307,18 +314,19 @@ class SuppressionList {
|
|
|
307
314
|
},
|
|
308
315
|
});
|
|
309
316
|
let lastWaitTime = Date.now();
|
|
317
|
+
const stream = runInSerial(async (buffer: Buffer | "done") => {
|
|
318
|
+
// TODO: Maybe we should add this pattern to batching.ts? Basically, if we get called fast, we allow the calls through. BUT, if we called slowly OR we are doing a lot of processing (and so we are working for all of SELF_THROTTLE_INTERVAL), then we wait. This prevents this from taking over the machine. The back off is steep though, and if the machine is lagging we might reduce to a trickle, just getting 1 call in per SELF_THROTTLE_DELAY + synchronous lag from work in other parts of the program.
|
|
319
|
+
let now = Date.now();
|
|
320
|
+
if (now - lastWaitTime > SELF_THROTTLE_INTERVAL) {
|
|
321
|
+
await delay(SELF_THROTTLE_DELAY);
|
|
322
|
+
lastWaitTime = now;
|
|
323
|
+
}
|
|
324
|
+
await callback(buffer);
|
|
325
|
+
});
|
|
310
326
|
return {
|
|
311
|
-
onData:
|
|
312
|
-
// TODO: Maybe we should add this pattern to batching.ts? Basically, if we get called fast, we allow the calls through. BUT, if we called slowly OR we are doing a lot of processing (and so we are working for all of SELF_THROTTLE_INTERVAL), then we wait. This prevents this from taking over the machine. The back off is steep though, and if the machine is lagging we might reduce to a trickle, just getting 1 call in per SELF_THROTTLE_DELAY + synchronous lag from work in other parts of the program.
|
|
313
|
-
let now = Date.now();
|
|
314
|
-
if (now - lastWaitTime > SELF_THROTTLE_INTERVAL) {
|
|
315
|
-
await delay(SELF_THROTTLE_DELAY);
|
|
316
|
-
lastWaitTime = now;
|
|
317
|
-
}
|
|
318
|
-
await callback(buffer);
|
|
319
|
-
}),
|
|
327
|
+
onData: stream,
|
|
320
328
|
finish: async () => {
|
|
321
|
-
await
|
|
329
|
+
await stream("done");
|
|
322
330
|
// NOTE: We COULD limit as we run, however... how many errors are we really going to encounter that AREN'T suppressed? Suppression is supposed to prevent overload anyways. I guess worst case scenario, yes, we could get overloaded, but... if we are logging more NEW errors than we can store in memory, we have bigger problems...
|
|
323
331
|
return limitRecentErrors(datums);
|
|
324
332
|
},
|
|
@@ -341,7 +349,8 @@ class SuppressionList {
|
|
|
341
349
|
}
|
|
342
350
|
|
|
343
351
|
public async getSuppressionList(): Promise<SuppressionEntry[]> {
|
|
344
|
-
|
|
352
|
+
let entries = Object.values((await this.getEntries()).entries);
|
|
353
|
+
return entries;
|
|
345
354
|
}
|
|
346
355
|
}
|
|
347
356
|
const suppressionList = new SuppressionList();
|
|
@@ -597,6 +606,7 @@ class RecentErrors {
|
|
|
597
606
|
path = await urlCache.getURLLocalPath(file.url, hash);
|
|
598
607
|
if (!path) continue;
|
|
599
608
|
let scanner = await suppressionList.scanForRecentErrors({
|
|
609
|
+
debugName: file.url,
|
|
600
610
|
startTime: file.startTime,
|
|
601
611
|
endTime: file.endTime,
|
|
602
612
|
});
|
|
@@ -10,7 +10,7 @@ import { ATag } from "../../../library-components/ATag";
|
|
|
10
10
|
import { managementPageURL, showingManagementURL } from "../../managementPages";
|
|
11
11
|
import { errorNotifyToggleURL } from "../LogViewer2";
|
|
12
12
|
import { Querysub } from "../../../4-querysub/QuerysubController";
|
|
13
|
-
import { nextId, timeInDay, timeInHour } from "socket-function/src/misc";
|
|
13
|
+
import { deepCloneJSON, nextId, timeInDay, timeInHour } from "socket-function/src/misc";
|
|
14
14
|
import { formatNumber } from "socket-function/src/formatting/format";
|
|
15
15
|
import { Icon } from "../../../library-components/icons";
|
|
16
16
|
import { filterParam } from "../FastArchiveViewer";
|
|
@@ -133,14 +133,15 @@ export class ErrorWarning extends qreact.Component {
|
|
|
133
133
|
{topExpired &&
|
|
134
134
|
<div className={css.hbox(8)}>
|
|
135
135
|
<Button onClick={() => {
|
|
136
|
+
let newObj = deepCloneJSON({
|
|
137
|
+
...topExpired!,
|
|
138
|
+
expiresAt: Date.now() + timeInDay,
|
|
139
|
+
});
|
|
136
140
|
void Querysub.onCommitFinished(async () => {
|
|
137
|
-
await suppressionController.setSuppressionEntry.promise(
|
|
138
|
-
...topExpired!,
|
|
139
|
-
expiresAt: Date.now() + timeInDay,
|
|
140
|
-
});
|
|
141
|
+
await suppressionController.setSuppressionEntry.promise(newObj);
|
|
141
142
|
});
|
|
142
143
|
}}>
|
|
143
|
-
Ignore Again
|
|
144
|
+
Ignore Again ({formatDateJSX(topExpired.expiresAt)})
|
|
144
145
|
</Button>
|
|
145
146
|
<div>
|
|
146
147
|
Match Pattern =
|
|
@@ -4,24 +4,28 @@ import { RecentErrorsController, recentErrorsChannel, watchRecentErrors } from "
|
|
|
4
4
|
import { timeInSecond } from "socket-function/src/misc";
|
|
5
5
|
import { formatDateTime } from "socket-function/src/formatting/format";
|
|
6
6
|
|
|
7
|
+
//todonext
|
|
7
8
|
|
|
9
|
+
// 1) Fix whatever is causing our memory usage to highly fluctuate. We seem to just go up and up and up from 100 megabytes all the way up to 300 megabytes and then we GC going back to 100. I turned on the socket function logging and I don't think we're sending network messages. And I ran a profile and it doesn't seem to be doing anything slow, but what are we even doing? We need to fix this.
|
|
10
|
+
// - It could be the FPS display. It's pretty easy to check.
|
|
11
|
+
// YEP! I think it's the FPS display. I disabled it. Now we could see if the air parsing lag goes away and if it does and then comes back when we turn back on the FPS display then you know we're gonna have to fix that. I could understand lagging but why would it be allocating so much? Is it the animation rendering? It definitely could be that because the animations are slow to render and can lag Chrome and can lag the GPU and can lag other windows.
|
|
8
12
|
|
|
9
|
-
//
|
|
10
|
-
// - I think our expiry date comparison code might be wrong. It seems like once they leave the maybe expired range they come back immediately. If we can reproduce this locally, it'll be trivial to debug because the suppression stuff is global, so we can just see if there are any errors, and if there are, we break in on them.
|
|
11
|
-
// UGH... To debug this, we need to ignore the changes and then we need to work on the other stuff and then we need to come back later and see if those changes have shown up again. We basically need to debug it when it happens. We can't debug it now. It's too late, Now the errors should be showing up because they are expired.
|
|
13
|
+
// 2) The error parsing page is clearly lagging. Even when loading just the error data for the last few days, which is less than a megabytes of total logs, the error loading animation is locking up. Stuttering that is. We need to fix this.
|
|
12
14
|
|
|
13
15
|
|
|
16
|
+
// Deploy everything and then update the server runner itself.
|
|
17
|
+
|
|
14
18
|
// The constant error notifications might be fixed now. We'll see tomorrow after all the rolling updates finish.
|
|
15
19
|
// 4) fix whatever's causing constant error notifications. Something is broadcasting on the Recent Errors Change channel constantly.
|
|
16
20
|
// - I guess usually there's no server that's going to be listening on it. So it's... Not that big of a deal, but it's still annoying.
|
|
17
21
|
|
|
18
22
|
|
|
19
|
-
// 4.1) update channel watching so you can specify that you want to watch only on a specific node ID and then update our code so we only watch it on the controller node ID that we're interfacing with.
|
|
20
|
-
|
|
21
23
|
|
|
22
|
-
// 5) Verify our suppression updates broadcast across the channel correctly, causing us to be able to suppress a notification and our watching script to then stop seeing the new updates. Realistically, it's the calling script that stops setting them, but same thing.
|
|
23
24
|
|
|
25
|
+
// Back to getting a Node.js watcher that works smoothly without receiving a whole bunch of extra errors etc.
|
|
26
|
+
// 4.1) update channel watching so you can specify that you want to watch only on a specific node ID and then update our code so we only watch it on the controller node ID that we're interfacing with.
|
|
24
27
|
|
|
28
|
+
// 5) Verify our suppression updates broadcast across the channel correctly, causing us to be able to suppress a notification and our watching script to then stop seeing the new updates. Realistically, it's the calling script that stops setting them, but same thing.
|
|
25
29
|
|
|
26
30
|
// 5) Set up the Instant Messaging Sending API.
|
|
27
31
|
// - Discord. With beeper it won't really matter what we're messaging. We could also do WhatsApp. It's really all the same.
|
|
@@ -318,9 +318,11 @@ export function getSyncedController<T extends SocketRegistered>(
|
|
|
318
318
|
}
|
|
319
319
|
// Don't cache promise calls
|
|
320
320
|
void promise.finally(() => {
|
|
321
|
-
|
|
322
|
-
obj.promise
|
|
323
|
-
|
|
321
|
+
Querysub.fastRead(() => {
|
|
322
|
+
if (obj.promise === promise) {
|
|
323
|
+
obj.promise = undefined;
|
|
324
|
+
}
|
|
325
|
+
});
|
|
324
326
|
});
|
|
325
327
|
return promise;
|
|
326
328
|
});
|