querysub 0.326.0 → 0.328.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. package/package.json +3 -4
  2. package/src/-a-archives/archivesBackBlaze.ts +20 -0
  3. package/src/-a-archives/archivesDisk.ts +5 -5
  4. package/src/-a-archives/archivesLimitedCache.ts +118 -7
  5. package/src/-a-archives/archivesPrivateFileSystem.ts +3 -0
  6. package/src/-g-core-values/NodeCapabilities.ts +26 -11
  7. package/src/0-path-value-core/auditLogs.ts +4 -2
  8. package/src/2-proxy/PathValueProxyWatcher.ts +3 -0
  9. package/src/3-path-functions/PathFunctionRunner.ts +2 -2
  10. package/src/4-querysub/Querysub.ts +1 -1
  11. package/src/5-diagnostics/GenericFormat.tsx +2 -2
  12. package/src/deployManager/machineApplyMainCode.ts +10 -8
  13. package/src/deployManager/machineSchema.ts +4 -3
  14. package/src/deployManager/setupMachineMain.ts +3 -2
  15. package/src/diagnostics/logs/FastArchiveAppendable.ts +85 -59
  16. package/src/diagnostics/logs/FastArchiveController.ts +5 -2
  17. package/src/diagnostics/logs/FastArchiveViewer.tsx +222 -51
  18. package/src/diagnostics/logs/LogViewer2.tsx +83 -35
  19. package/src/diagnostics/logs/TimeRangeSelector.tsx +8 -0
  20. package/src/diagnostics/logs/diskLogGlobalContext.ts +3 -3
  21. package/src/diagnostics/logs/diskLogger.ts +70 -23
  22. package/src/diagnostics/logs/errorNotifications/ErrorNotificationController.ts +111 -82
  23. package/src/diagnostics/logs/errorNotifications/ErrorSuppressionUI.tsx +37 -3
  24. package/src/diagnostics/logs/errorNotifications/ErrorWarning.tsx +52 -22
  25. package/src/diagnostics/logs/errorNotifications/errorDigests.tsx +8 -0
  26. package/src/diagnostics/logs/errorNotifications/errorWatchEntry.tsx +198 -52
  27. package/src/diagnostics/logs/lifeCycleAnalysis/spec.md +3 -2
  28. package/src/diagnostics/managementPages.tsx +5 -0
  29. package/src/email_ims_notifications/discord.tsx +203 -0
  30. package/src/fs.ts +9 -0
  31. package/src/functional/SocketChannel.ts +9 -0
  32. package/src/functional/throttleRender.ts +134 -0
  33. package/src/library-components/ATag.tsx +2 -2
  34. package/src/library-components/SyncedController.ts +5 -3
  35. package/src/misc.ts +13 -0
  36. package/src/misc2.ts +54 -0
  37. package/src/user-implementation/SecurityPage.tsx +11 -5
  38. package/src/user-implementation/userData.ts +31 -16
  39. package/testEntry2.ts +14 -5
  40. package/src/user-implementation/setEmailKey.ts +0 -25
  41. /package/src/{email → email_ims_notifications}/postmark.tsx +0 -0
  42. /package/src/{email → email_ims_notifications}/sendgrid.tsx +0 -0
@@ -1,75 +1,221 @@
1
1
  import { batchFunction, runInfinitePollCallAtStart } from "socket-function/src/batching";
2
2
  import { getControllerNodeId } from "../../../-g-core-values/NodeCapabilities";
3
- import { RecentErrorsController, recentErrorsChannel, watchRecentErrors } from "./ErrorNotificationController";
4
- import { timeInSecond } from "socket-function/src/misc";
5
- import { formatDateTime } from "socket-function/src/formatting/format";
3
+ import { RecentErrors, RecentErrorsController, errorWatcherBase, recentErrorsChannel, suppressionList, watchRecentErrors } from "./ErrorNotificationController";
4
+ import { sort, timeInDay, timeInHour, timeInMinute, timeInSecond } from "socket-function/src/misc";
5
+ import { formatDate, formatDateTime } from "socket-function/src/formatting/format";
6
+ import { getDomain } from "../../../config";
7
+ import { Querysub, QuerysubController } from "../../../4-querysub/QuerysubController";
8
+ import { formatPercent } from "socket-function/src/formatting/format";
9
+ import { LogDatum, getLogFile } from "../diskLogger";
10
+ import { sendDiscordMessage } from "../../../email_ims_notifications/discord";
11
+ import { user_data } from "../../../user-implementation/userData";
12
+ import { createLink } from "../../../library-components/ATag";
13
+ import { getLogsLinkParts } from "./ErrorWarning";
6
14
 
15
+ const MAX_IMS_PER_DAY = 3;
16
+ const MAX_IMS_PER_HOURS = 1;
17
+ const MAX_IMS_PER_FILE_PER_DAY = 1;
18
+ const MAX_PER_FILE_PER_IM = 2;
19
+ const MAX_PER_IM = 10;
7
20
 
21
+ // Wait a bit, because it's likely if there's one error, there are more errors.
22
+ const BATCH_TIME = timeInSecond * 30;
23
+ const DIGEST_INTERVAL = timeInDay * 3;
8
24
 
9
- // 3) Dismissing of certain errors is not working. They keep coming back.
10
- // - I think our expiry date comparison code might be wrong. It seems like once they leave the maybe expired range they come back immediately. If we can reproduce this locally, it'll be trivial to debug because the suppression stuff is global, so we can just see if there are any errors, and if there are, we break in on them.
11
- // UGH... To debug this, we need to ignore the changes and then we need to work on the other stuff and then we need to come back later and see if those changes have shown up again. We basically need to debug it when it happens. We can't debug it now. It's too late, Now the errors should be showing up because they are expired.
12
25
 
26
+ // 4) Deploy
27
+ // 4) Create local errors and make sure the remote server doesn't get them
28
+ // 5) Create remote errors and make sure that the all servers get them (Local and remote servers)
13
29
 
14
- // The constant error notifications might be fixed now. We'll see tomorrow after all the rolling updates finish.
15
- // 4) fix whatever's causing constant error notifications. Something is broadcasting on the Recent Errors Change channel constantly.
16
- // - I guess usually there's no server that's going to be listening on it. So it's... Not that big of a deal, but it's still annoying.
17
30
 
31
+ // Abstract out RecentErrors scanning, so we can reuse it
32
+ // (appendables, timeRange, callback)
33
+ // - Oh, we also need a way to tell it to not limit to recent errors.
34
+ // - And also to disable throttling.
18
35
 
19
- // 4.1) update channel watching so you can specify that you want to watch only on a specific node ID and then update our code so we only watch it on the controller node ID that we're interfacing with.
20
36
 
37
+ // createDigest(appendables, timeRange) => Promise<DigestInfo>
21
38
 
22
- // 5) Verify our suppression updates broadcast across the channel correctly, causing us to be able to suppress a notification and our watching script to then stop seeing the new updates. Realistically, it's the calling script that stops setting them, but same thing.
23
-
39
+ // 7) Write the digest script, which does a fresh scan on just a specific range of data using start synchronize internal and suppression list, etc. etc.
40
+ // - The most recent ten unsuppressed errors. Counts of unsuppressed errors by file. Or by line id? If they have it. Counts of suppressed and unsuppressed errors by hour group.
41
+ // - Oh, also everything we do for errors do for warnings as well, and split the values. We want warnings and errors.
42
+ // - Time the entire thing, and put that, and the profile, in the digest too! That will give us a good gauge on if the errors/suppressions are getting slow (due to a lot of errors, or a lot of suppression checks!)
43
+ // store the digest results in Back Blaze.
44
+ // 8) Write a page that will show the results from Backblaze, allowing you to see a list of all of the digests and to view a specific one. Just showing all the data in a chart, whatever. Showed two charts, one with just the unsuppressed errors, and the other with both suppressed and unsuppressed, because you really only care about suppressed when you're looking at the fraction of suppressed to unsuppressed, and the sheer quantity of unsuppressed.
45
+ // 9) send an email every time period, and also send an IM that has smaller information
46
+ // - Both will link to the actual web page that has the digest, deep linking to the specific tabs.
47
+ // - Show the chart in the email as well, but just format it like ASCII Because image dependencies are annoying and I don't want to implement them right now as it might take a few days to get working.
24
48
 
25
49
 
26
- // 5) Set up the Instant Messaging Sending API.
27
- // - Discord. With beeper it won't really matter what we're messaging. We could also do WhatsApp. It's really all the same.
28
50
 
29
- // 6) Set up all the code to properly rate limit IMs, batch them, link back to the log page, etc.
30
- // - Just link to the error page for the last week. We don't need to link to anything specific.
31
- // - properly getting the node ID that we're going to be watching, and if it goes down, getting a new one, and ignoring messages from the old node.
32
- // - And if no node exists, we need to warn and then wait.
51
+ // NOTE: Yes, this is stored in memory, so if the server reboots or if this script keeps crashing, we might send a lot of instant messages. However, one, Discord will probably late rate limit us, and two, this means something is really wrong, especially if it happens a lot, and we really should fix it right away.
52
+ // NOTE: If we decide not to send IMs, we don't queue them up, because that's just extremely annoying. The point of the limit isn't to send the maximum we can that stays just under the limit! The point of the limit is if a lot happens at once, ignore most of it.
53
+ type IMInfo = {
54
+ time: number;
55
+ perFile: {
56
+ [file: string]: LogDatum[];
57
+ };
58
+ };
59
+
60
+ let imHistory: IMInfo[] = [];
61
+ function filterIMInfo(info: IMInfo): {
62
+ info: IMInfo;
63
+ countFiltered: number;
64
+ } {
65
+ let countFiltered = 0;
66
+ // Don't prefer the most warnings, prefer The oldest warnings, which are generally the ones that are first in the object.
67
+ // filter based on files per im, and max per im
68
+ for (let [key, value] of Object.entries(info.perFile)) {
69
+ if (value.length > MAX_PER_FILE_PER_IM) {
70
+ countFiltered += value.length - MAX_PER_FILE_PER_IM;
71
+ value = value.slice(0, MAX_PER_FILE_PER_IM);
72
+ }
73
+ info.perFile[key] = value;
74
+ }
75
+ let entries = Object.entries(info.perFile);
76
+ if (entries.length > MAX_PER_IM) {
77
+ let removed = entries.slice(MAX_PER_IM);
78
+ for (let [key, value] of removed) {
79
+ countFiltered += value.length;
80
+ }
81
+ entries = entries.slice(0, MAX_PER_IM);
82
+ }
83
+ info.perFile = Object.fromEntries(entries);
84
+ // Also ignore files if they've been mentioned too many times today.
85
+ let dayThreshold = Date.now() - timeInDay;
86
+ let historyInDay = imHistory.filter(x => x.time > dayThreshold);
87
+ let countByFile = new Map<string, number>();
88
+ for (let obj of historyInDay) {
89
+ for (let [key, value] of Object.entries(obj.perFile)) {
90
+ countByFile.set(key, (countByFile.get(key) || 0) + value.length);
91
+ }
92
+ }
93
+ for (let key of Object.keys(info.perFile)) {
94
+ let count = countByFile.get(key) || 0;
95
+ if (count >= MAX_IMS_PER_FILE_PER_DAY) {
96
+ countFiltered += count;
97
+ delete info.perFile[key];
98
+ }
99
+ }
100
+ return {
101
+ info,
102
+ countFiltered,
103
+ };
104
+ }
105
+ function canSendNow(info: IMInfo) {
106
+ let dayThreshold = Date.now() - timeInDay;
107
+ let historyInDay = imHistory.filter(x => x.time > dayThreshold);
108
+ let hourThreshold = Date.now() - timeInHour;
109
+ let historyInHour = historyInDay.filter(x => x.time > hourThreshold);
110
+
111
+ if (historyInDay.length >= MAX_IMS_PER_DAY) {
112
+ return false;
113
+ }
114
+ if (historyInHour.length >= MAX_IMS_PER_HOURS) {
115
+ return false;
116
+ }
117
+ return true;
118
+ }
33
119
 
120
+ const sendIMs = batchFunction(({ delay: BATCH_TIME }), async (logsAll: LogDatum[][]) => {
121
+ let logs = logsAll.flat();
122
+ let infoBase: IMInfo = {
123
+ time: Date.now(),
124
+ perFile: {},
125
+ };
126
+ for (let log of logs) {
127
+ let file = getLogFile(log);
128
+ let array = infoBase.perFile[file];
129
+ if (!array) {
130
+ array = [];
131
+ infoBase.perFile[file] = array;
132
+ }
133
+ array.push(log);
134
+ }
135
+ let { info, countFiltered } = filterIMInfo(infoBase);
136
+ if (canSendNow(info)) {
137
+ imHistory.push(info);
138
+ let webhookURL = await Querysub.commitAsync(() => {
139
+ return user_data().secure.notifyDiscordWebhookURL;
140
+ });
141
+ if (!webhookURL) {
142
+ console.error(`No Discord webhook URL set, cannot send warning instant messages`);
143
+ return;
144
+ }
145
+ let url = createLink(getLogsLinkParts());
146
+ let message = Object.values(info.perFile).flat().map(
147
+ x => `[${formatDateTime(x.time)}](${url}) | ${x.param0} (${x.__NAME__})`
148
+ ).join("\n");
149
+ if (countFiltered > 0) {
150
+ message += `\n+${countFiltered} more errors`;
151
+ }
152
+ void sendDiscordMessage({
153
+ webhookURL,
154
+ message,
155
+ });
156
+ }
157
+ });
34
158
 
35
- // 7) Write the digest script, which is very different, but will run in the same entry.
36
- // - Separate warnings and errors and also bucket by time bucket
37
- // - suppressed errors by time bucket (but no type, as we definitely don't want to parse all suppressed errors...)
38
- // - Time the entire thing, and put that, and the profile, in the digest too! That will give us a good gauge on if the errors/suppressions are getting slow (due to a lot of errors, or a lot of suppression checks!)
39
- // 8) Write a page that shows the results of the digest in tabs, writing the digest probably just to backblaze
40
- // - For now, just have two tabs, one for errors and one for warnings.
41
- // - If we're going to do a full scan, we might as well show time series data as well. It's trivial.
42
- // - Also track the number of suppressed errors as well. We won't have details on these such as a breakdown, but we can at least show the count (and the count by time)
43
- // 9) send an email every time period, and also send an IM that has smaller information
44
- // - Both will link to the actual web page that has the digest, deep linking to the specific tabs.
45
- // - Show the chart in the email as well, but just format it like ASCII Because image dependencies are annoying and I don't want to implement them right now as it might take a few days to get working.
46
159
 
47
160
  async function runIMNotifies() {
48
- let controllerNodeId = await getControllerNodeId(RecentErrorsController.base);
49
- if (!controllerNodeId) throw new Error("No controller node id found");
50
- //todonext
51
- // Temporary hardcode to use the local server
52
- controllerNodeId = "127-0-0-1.querysubtest.com:7007";
53
-
54
- let controller = RecentErrorsController.base.nodes[controllerNodeId];
55
- recentErrorsChannel.watch(() => {
56
- void updateRecentErrors(undefined);
57
- });
58
- const updateRecentErrors = batchFunction(
59
- //todonext
60
- // Increase this after we finish testing
61
- { delay: 1000 },
62
- async function updateRecentErrors() {
63
- let recentErrors = await controller.getRecentErrors();
64
- console.log(`Received ${recentErrors.length} recent errors at ${formatDateTime(Date.now())}`);
65
- for (let error of recentErrors) {
66
- console.log(` ${error.param0}`);
161
+ await Querysub.hostService("error-notifications");
162
+
163
+ // NOTE: This should be fine, as realistically how many errors are we going to see in the last two weeks. At 10 million, we're still probably only going to allocate 160 megs of memory, assuming we allocate 16 bytes per number. If we have more than 16 million for a single thread, it'll fail because the max set size... However, they will likely be somewhat distributed between threads.
164
+ let errorByThreadIdByDay = new Map<number, Map<string, Set<number>>>();
165
+ function getDay(time: number) {
166
+ return Math.floor(time / timeInDay) * timeInDay;
167
+ }
168
+ function isDuplicate(obj: LogDatum): boolean {
169
+ // Checks if it's a duplicate, and if it's not, adds it.
170
+ let day = getDay(obj.time);
171
+ let threadId = obj.__threadId || "";
172
+ let dayMap = errorByThreadIdByDay.get(day);
173
+ if (!dayMap) {
174
+ dayMap = new Map<string, Set<number>>();
175
+ errorByThreadIdByDay.set(day, dayMap);
176
+ }
177
+ let threadIdMap = dayMap.get(threadId);
178
+ if (!threadIdMap) {
179
+ threadIdMap = new Set<number>();
180
+ dayMap.set(threadId, threadIdMap);
181
+ }
182
+ if (threadIdMap.has(obj.time)) {
183
+ return true;
184
+ }
185
+ threadIdMap.add(obj.time);
186
+ return false;
187
+
188
+ }
189
+ function clearOldDays() {
190
+ // Clear all the days that are more than 14 days older than our current day.
191
+ let now = Date.now();
192
+ let currentDay = getDay(now);
193
+ let cutOffDay = currentDay - 14;
194
+ for (let day of errorByThreadIdByDay.keys()) {
195
+ if (day < cutOffDay) {
196
+ errorByThreadIdByDay.delete(day);
197
+ console.log(`Cleared old day ${formatDateTime(day)}`);
67
198
  }
68
- console.log();
69
- console.log();
70
199
  }
71
- );
72
- await updateRecentErrors(undefined);
200
+ }
201
+ errorWatcherBase.watch(async (objs) => {
202
+ clearOldDays();
203
+ objs = await suppressionList.filterObjsToNonSuppressed(objs);
204
+ objs = objs.filter(x => !isDuplicate(x));
205
+ if (objs.length === 0) return;
206
+ // The oldest first as they are most likely the cause.
207
+ sort(objs, x => -x.time);
208
+ void sendIMs(objs);
209
+ console.log();
210
+ console.log();
211
+ console.log(`Received ${objs.length} recent errors at ${formatDateTime(Date.now())}`);
212
+ for (let obj of objs) {
213
+ console.log(` ${obj.param0}`);
214
+ }
215
+ console.log();
216
+ console.log();
217
+ });
218
+
73
219
  }
74
220
 
75
221
  async function main() {
@@ -91,8 +91,9 @@ Make sure we check our life cycles for nodes being added and removed to make sur
91
91
 
92
92
  Check the startup lifecycle to make sure we can detect the nodes pretty fast and in parallel, instead of serial
93
93
 
94
- 10) Verify old user/fast-log-cache machine folders are deleted
95
-
94
+ 11) Take all of the errors that we are ignoring and use the life cycles to detect why they're happening. A lot of them really shouldn't be happening.
95
+ - Receiving values from different authorities and the ones we're watching is weird. Why does that keep happening?
96
+ - And we keep running into audit mismatches? Why does that keep happening? Is it only because of our local development server?
96
97
 
97
98
  SPECIAL UI links for certain errors in log view
98
99
  - Probably dynamically created, based on contents of log
@@ -90,6 +90,11 @@ export async function registerManagementPages2(config: {
90
90
  componentName: "LogViewer2",
91
91
  getModule: () => import("./logs/LogViewer2"),
92
92
  });
93
+ inputPages.push({
94
+ title: "Security",
95
+ componentName: "SecurityPage",
96
+ getModule: () => import("../user-implementation/SecurityPage"),
97
+ });
93
98
  inputPages.push({
94
99
  title: "Audit Paths",
95
100
  componentName: "AuditLogPage",
@@ -0,0 +1,203 @@
1
+ import { httpsRequest } from "../https";
2
+
3
+ export interface DiscordEmbedFooter {
4
+ text: string;
5
+ icon_url?: string;
6
+ }
7
+
8
+ export interface DiscordEmbedImage {
9
+ url: string;
10
+ }
11
+
12
+ export interface DiscordEmbedThumbnail {
13
+ url: string;
14
+ }
15
+
16
+ export interface DiscordEmbedVideo {
17
+ url: string;
18
+ }
19
+
20
+ export interface DiscordEmbedProvider {
21
+ name?: string;
22
+ url?: string;
23
+ }
24
+
25
+ export interface DiscordEmbedAuthor {
26
+ name: string;
27
+ url?: string;
28
+ icon_url?: string;
29
+ }
30
+
31
+ export interface DiscordEmbedField {
32
+ name: string;
33
+ value: string;
34
+ inline?: boolean;
35
+ }
36
+
37
+ export interface DiscordEmbed {
38
+ title?: string;
39
+ type?: "rich" | "image" | "video" | "gifv" | "article" | "link";
40
+ description?: string;
41
+ url?: string;
42
+ timestamp?: string; // ISO8601 timestamp
43
+ color?: number; // integer color value
44
+ footer?: DiscordEmbedFooter;
45
+ image?: DiscordEmbedImage;
46
+ thumbnail?: DiscordEmbedThumbnail;
47
+ video?: DiscordEmbedVideo;
48
+ provider?: DiscordEmbedProvider;
49
+ author?: DiscordEmbedAuthor;
50
+ fields?: DiscordEmbedField[];
51
+ }
52
+
53
+ export interface DiscordAllowedMentions {
54
+ parse?: ("roles" | "users" | "everyone")[];
55
+ roles?: string[];
56
+ users?: string[];
57
+ replied_user?: boolean;
58
+ }
59
+
60
+ export interface DiscordAttachment {
61
+ id: string;
62
+ filename: string;
63
+ description?: string;
64
+ content_type?: string;
65
+ size: number;
66
+ url: string;
67
+ proxy_url: string;
68
+ }
69
+
70
+ export interface DiscordEmoji {
71
+ id?: string;
72
+ name?: string;
73
+ animated?: boolean;
74
+ }
75
+
76
+ export interface DiscordSelectOption {
77
+ label: string;
78
+ value: string;
79
+ description?: string;
80
+ emoji?: DiscordEmoji;
81
+ default?: boolean;
82
+ }
83
+
84
+ export interface DiscordButtonComponent {
85
+ type: 2; // Button component type
86
+ style: 1 | 2 | 3 | 4 | 5; // Primary, Secondary, Success, Danger, Link
87
+ label?: string;
88
+ emoji?: DiscordEmoji;
89
+ custom_id?: string; // Required for non-link buttons
90
+ url?: string; // Required for link buttons
91
+ disabled?: boolean;
92
+ }
93
+
94
+ export interface DiscordSelectMenuComponent {
95
+ type: 3; // String select menu component type
96
+ custom_id: string;
97
+ options: DiscordSelectOption[];
98
+ placeholder?: string;
99
+ min_values?: number;
100
+ max_values?: number;
101
+ disabled?: boolean;
102
+ }
103
+
104
+ export interface DiscordUserSelectMenuComponent {
105
+ type: 5; // User select menu component type
106
+ custom_id: string;
107
+ placeholder?: string;
108
+ min_values?: number;
109
+ max_values?: number;
110
+ disabled?: boolean;
111
+ }
112
+
113
+ export interface DiscordRoleSelectMenuComponent {
114
+ type: 6; // Role select menu component type
115
+ custom_id: string;
116
+ placeholder?: string;
117
+ min_values?: number;
118
+ max_values?: number;
119
+ disabled?: boolean;
120
+ }
121
+
122
+ export interface DiscordMentionableSelectMenuComponent {
123
+ type: 7; // Mentionable (users + roles) select menu component type
124
+ custom_id: string;
125
+ placeholder?: string;
126
+ min_values?: number;
127
+ max_values?: number;
128
+ disabled?: boolean;
129
+ }
130
+
131
+ export interface DiscordChannelSelectMenuComponent {
132
+ type: 8; // Channel select menu component type
133
+ custom_id: string;
134
+ placeholder?: string;
135
+ min_values?: number;
136
+ max_values?: number;
137
+ disabled?: boolean;
138
+ channel_types?: number[]; // Array of channel type integers to filter by
139
+ }
140
+
141
+ export interface DiscordTextInputComponent {
142
+ type: 4; // Text input component type
143
+ custom_id: string;
144
+ style: 1 | 2; // Short (1) or Paragraph (2)
145
+ label: string;
146
+ min_length?: number;
147
+ max_length?: number;
148
+ required?: boolean;
149
+ value?: string;
150
+ placeholder?: string;
151
+ }
152
+
153
+ export type DiscordComponent =
154
+ | DiscordButtonComponent
155
+ | DiscordSelectMenuComponent
156
+ | DiscordUserSelectMenuComponent
157
+ | DiscordRoleSelectMenuComponent
158
+ | DiscordMentionableSelectMenuComponent
159
+ | DiscordChannelSelectMenuComponent
160
+ | DiscordTextInputComponent;
161
+
162
+ export interface DiscordActionRow {
163
+ type: 1; // Action row component type
164
+ components: DiscordComponent[];
165
+ }
166
+
167
+ export interface DiscordWebhookMessage {
168
+ content?: string;
169
+ username?: string;
170
+ avatar_url?: string;
171
+ tts?: boolean;
172
+ embeds?: DiscordEmbed[];
173
+ allowed_mentions?: DiscordAllowedMentions;
174
+ components?: DiscordActionRow[];
175
+ files?: any[]; // File attachments - these are handled differently in multipart requests
176
+ payload_json?: string;
177
+ attachments?: DiscordAttachment[];
178
+ flags?: number;
179
+ thread_name?: string;
180
+ }
181
+
182
+ export async function sendDiscordMessage(config: {
183
+ webhookURL: string;
184
+ message: DiscordWebhookMessage | string;
185
+ }) {
186
+ const { webhookURL, message } = config;
187
+
188
+ const payload: DiscordWebhookMessage = typeof message === "string"
189
+ ? { content: message }
190
+ : message;
191
+
192
+ await httpsRequest(
193
+ webhookURL,
194
+ Buffer.from(JSON.stringify(payload)),
195
+ "POST",
196
+ false,
197
+ {
198
+ headers: {
199
+ "Content-Type": "application/json",
200
+ },
201
+ }
202
+ );
203
+ }
package/src/fs.ts CHANGED
@@ -69,4 +69,13 @@ export async function* readDirRecursive(dir: string): AsyncGenerator<string> {
69
69
  } catch { }
70
70
  }
71
71
  } catch { }
72
+ }
73
+
74
+ export async function fsExistsAsync(path: string) {
75
+ try {
76
+ await fs.promises.stat(path);
77
+ return true;
78
+ } catch {
79
+ return false;
80
+ }
72
81
  }
@@ -55,6 +55,7 @@ export class SocketChannel<T> {
55
55
  }
56
56
 
57
57
  private watchAllNodes = lazy(async () => {
58
+ // NOTE: By watching instead of having nodes broadcast, it naturally prevents non-public servers from sending messages to public servers. This is really nice, and helps keep non-public servers (development servers), actually non-public...
58
59
  watchNodeIds((nodeIds) => {
59
60
  for (let nodeId of nodeIds) {
60
61
  void errorToUndefinedSilent(this.controller.nodes[nodeId]._internal_watchMessages());
@@ -68,4 +69,12 @@ export class SocketChannel<T> {
68
69
  this.localWatchers.delete(callback);
69
70
  };
70
71
  }
72
+ // NOTE: We also get notifications for watching on all nodes, which should be fine...
73
+ public async watchSingleNode(nodeId: string, callback: (message: T) => void) {
74
+ await this.controller.nodes[nodeId]._internal_watchMessages();
75
+ this.localWatchers.add(callback);
76
+ return () => {
77
+ this.localWatchers.delete(callback);
78
+ };
79
+ }
71
80
  }
@@ -0,0 +1,134 @@
1
+
2
+
3
+ // NOTE: Many cases we don't know if we want to throttle, or how long we want to throttle for, until we start the watcher, so this has to be done inside our render, instead of in ProxyWatcher.
4
+
5
+ import { cache } from "socket-function/src/caching";
6
+ import { proxyWatcher, SyncWatcher } from "../2-proxy/PathValueProxyWatcher";
7
+ import { qreact } from "../4-dom/qreact";
8
+ import { onNextPaint } from "./onNextPaint";
9
+
10
+ // Throttles calls that have the same throttleKey
11
+ /** Used near the start of your render, like so:
12
+ if (throttleRender({ key: "NodeControls", frameDelay: 0, frameDelaySmear: 60 })) return undefined;
13
+ - Doesn't break watches, so debug tools will still show you are watching them
14
+ - Doesn't break the rendered output (uses the last fully rendered output)
15
+ - Pauses rendering until the throttle finishes, then forcefully renders once.
16
+ - Always delays by at least 1 frame.
17
+ */
18
+ export function throttleRender(config: {
19
+ // Throttles are smeared perkey
20
+ key: string;
21
+ // We pick a large value in this range as throttleKey overlaps, allowing you to smear a large number of changes over a larger period of time, BUT, while also waiting less time if there are few changes!
22
+ // - Throttles won't necessarily finish in the same order you call them (we have to wrap around in the range after we hit the end, so later calls might re-trigger earlier
23
+ frameDelay: number;
24
+ // Range is [frameDelay, frameDelay + smear]
25
+ frameDelaySmear?: number;
26
+ // Delays for additional frames. Ex, set frameDelay to 0, smear to 60, and count to 30. The first delay is 0, then 30, then 60
27
+ frameDelayCount?: number;
28
+ }): boolean {
29
+ let watcher = proxyWatcher.getTriggeredWatcher();
30
+ // Never throttle the first render, as that would be noticeable and always unintended
31
+ if (watcher.syncRunCount === 0) return false;
32
+ let { schedule, inSchedule, isTriggered, runScheduler } = throttleManager(config.key);
33
+ if (isTriggered(watcher)) {
34
+ //console.log(`Triggering throttle render for ${watcher.debugName} on frame ${getFrameNumber()}`);
35
+ return false;
36
+ }
37
+ runScheduler();
38
+ if (inSchedule.has(watcher)) {
39
+ //console.log(`Skipping throttle render for ${watcher.debugName} because it is already in the schedule`);
40
+ proxyWatcher.reuseLastWatches();
41
+ qreact.cancelRender();
42
+ return true;
43
+ }
44
+ inSchedule.add(watcher);
45
+ //console.log(`Adding throttle render for ${watcher.debugName}`);
46
+
47
+
48
+ let count = config.frameDelayCount || 1;
49
+ let smear = config.frameDelaySmear ?? 0;
50
+ let curIndex = config.frameDelay;
51
+ let endIndex = config.frameDelay + smear;
52
+ let targetFillCount = 0;
53
+ // Annoying algorithm to find the lowest slow available...
54
+ while (true) {
55
+ let cur = schedule[curIndex];
56
+ if ((cur?.length || 0) === targetFillCount) {
57
+ schedule[curIndex] = schedule[curIndex] || [];
58
+ schedule[curIndex]!.push(watcher);
59
+ for (let i = 1; i < count; i++) {
60
+ schedule[curIndex + i] = schedule[curIndex + i] || [];
61
+ schedule[curIndex + i]!.push("delay");
62
+ }
63
+ break;
64
+ }
65
+ curIndex++;
66
+ if (curIndex > endIndex) {
67
+ curIndex = config.frameDelay;
68
+ targetFillCount++;
69
+ }
70
+ }
71
+
72
+ proxyWatcher.reuseLastWatches();
73
+ qreact.cancelRender();
74
+ return true;
75
+ }
76
+
77
+ let throttleManager = cache((key: string) => {
78
+ let schedule: ((SyncWatcher | "delay")[] | undefined)[] = [];
79
+ let inSchedule = new Set<SyncWatcher>();
80
+ let triggered = new Set<SyncWatcher>();
81
+ let runningScheduler = false;
82
+ function isTriggered(watcher: SyncWatcher) {
83
+ return triggered.has(watcher);
84
+ }
85
+
86
+ return {
87
+ schedule,
88
+ inSchedule,
89
+ isTriggered,
90
+ runScheduler: () => {
91
+ if (runningScheduler) return;
92
+ runningScheduler = true;
93
+ void onNextPaint().finally(runNextTick);
94
+ function runNextTick() {
95
+ let next = schedule.shift();
96
+ void onNextPaint().finally(() => {
97
+ triggered.clear();
98
+ });
99
+ if (next) {
100
+ for (let watcher of next) {
101
+ if (watcher === "delay") continue;
102
+ inSchedule.delete(watcher);
103
+ triggered.add(watcher);
104
+ //console.log(`Triggering throttle render for ${watcher.debugName}`);
105
+ watcher.explicitlyTrigger({
106
+ paths: new Set(),
107
+ pathSources: new Set(),
108
+ newParentsSynced: new Set(),
109
+ extraReasons: ["throttleRender trigger"],
110
+ });
111
+ }
112
+ }
113
+ if (schedule.length > 0) {
114
+ void onNextPaint().finally(runNextTick);
115
+ } else {
116
+ runningScheduler = false;
117
+ }
118
+ }
119
+ },
120
+ };
121
+ });
122
+
123
+
124
+
125
+ let rendersInFrame = 0;
126
+ export function countRendersInFrame() {
127
+ if (rendersInFrame === 0) {
128
+ void onNextPaint().finally(() => {
129
+ //console.log(`Render in frame ${rendersInFrame}`);
130
+ rendersInFrame = 0;
131
+ });
132
+ }
133
+ rendersInFrame++;
134
+ }