querysub 0.327.0 → 0.329.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/error-email.js +8 -0
- package/bin/error-im.js +8 -0
- package/package.json +4 -3
- package/src/-a-archives/archivesBackBlaze.ts +20 -0
- package/src/-a-archives/archivesCborT.ts +52 -0
- package/src/-a-archives/archivesDisk.ts +5 -5
- package/src/-a-archives/archivesJSONT.ts +19 -5
- package/src/-a-archives/archivesLimitedCache.ts +118 -7
- package/src/-a-archives/archivesPrivateFileSystem.ts +3 -0
- package/src/-g-core-values/NodeCapabilities.ts +26 -11
- package/src/0-path-value-core/auditLogs.ts +4 -2
- package/src/2-proxy/PathValueProxyWatcher.ts +7 -0
- package/src/3-path-functions/PathFunctionRunner.ts +2 -2
- package/src/4-querysub/Querysub.ts +1 -1
- package/src/5-diagnostics/GenericFormat.tsx +2 -2
- package/src/config.ts +15 -3
- package/src/deployManager/machineApplyMainCode.ts +10 -8
- package/src/deployManager/machineSchema.ts +4 -3
- package/src/deployManager/setupMachineMain.ts +3 -2
- package/src/diagnostics/logs/FastArchiveAppendable.ts +86 -53
- package/src/diagnostics/logs/FastArchiveController.ts +11 -2
- package/src/diagnostics/logs/FastArchiveViewer.tsx +205 -48
- package/src/diagnostics/logs/LogViewer2.tsx +78 -34
- package/src/diagnostics/logs/TimeRangeSelector.tsx +8 -0
- package/src/diagnostics/logs/diskLogGlobalContext.ts +5 -4
- package/src/diagnostics/logs/diskLogger.ts +70 -23
- package/src/diagnostics/logs/errorNotifications/ErrorDigestPage.tsx +409 -0
- package/src/diagnostics/logs/errorNotifications/ErrorNotificationController.ts +94 -67
- package/src/diagnostics/logs/errorNotifications/ErrorSuppressionUI.tsx +37 -3
- package/src/diagnostics/logs/errorNotifications/ErrorWarning.tsx +50 -16
- package/src/diagnostics/logs/errorNotifications/errorDigestEmail.tsx +174 -0
- package/src/diagnostics/logs/errorNotifications/errorDigests.tsx +291 -0
- package/src/diagnostics/logs/errorNotifications/errorLoopEntry.tsx +7 -0
- package/src/diagnostics/logs/errorNotifications/errorWatchEntry.tsx +185 -68
- package/src/diagnostics/logs/lifeCycleAnalysis/spec.md +10 -19
- package/src/diagnostics/managementPages.tsx +33 -15
- package/src/email_ims_notifications/discord.tsx +203 -0
- package/src/{email → email_ims_notifications}/postmark.tsx +3 -3
- package/src/fs.ts +9 -0
- package/src/functional/SocketChannel.ts +9 -0
- package/src/functional/throttleRender.ts +134 -0
- package/src/library-components/ATag.tsx +2 -2
- package/src/library-components/SyncedController.ts +3 -3
- package/src/misc.ts +18 -0
- package/src/misc2.ts +106 -0
- package/src/user-implementation/SecurityPage.tsx +11 -5
- package/src/user-implementation/userData.ts +57 -23
- package/testEntry2.ts +14 -5
- package/src/user-implementation/setEmailKey.ts +0 -25
- /package/src/{email → email_ims_notifications}/sendgrid.tsx +0 -0
package/bin/error-im.js
ADDED
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "querysub",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.329.0",
|
|
4
4
|
"main": "index.js",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"note1": "note on node-forge fork, see https://github.com/digitalbazaar/forge/issues/744 for details",
|
|
@@ -22,7 +22,7 @@
|
|
|
22
22
|
"js-sha512": "^0.9.0",
|
|
23
23
|
"node-forge": "https://github.com/sliftist/forge#e618181b469b07bdc70b968b0391beb8ef5fecd6",
|
|
24
24
|
"pako": "^2.1.0",
|
|
25
|
-
"socket-function": "^0.
|
|
25
|
+
"socket-function": "^0.140.0",
|
|
26
26
|
"terser": "^5.31.0",
|
|
27
27
|
"typesafecss": "^0.22.0",
|
|
28
28
|
"yaml": "^2.5.0",
|
|
@@ -56,7 +56,8 @@
|
|
|
56
56
|
"join-public": "./bin/join-public.js",
|
|
57
57
|
"merge": "./bin/merge.js",
|
|
58
58
|
"addsuperuser": "./bin/addsuperuser.js",
|
|
59
|
-
"
|
|
59
|
+
"error-email": "./bin/error-email.js",
|
|
60
|
+
"error-im": "./bin/error-im.js"
|
|
60
61
|
},
|
|
61
62
|
"devDependencies": {
|
|
62
63
|
"dependency-cruiser": "^12.11.0",
|
|
@@ -11,6 +11,7 @@ import { formatNumber, formatTime } from "socket-function/src/formatting/format"
|
|
|
11
11
|
import { blue, green, magenta } from "socket-function/src/formatting/logColors";
|
|
12
12
|
import debugbreak from "debugbreak";
|
|
13
13
|
import { onTimeProfile } from "../-0-hooks/hooks";
|
|
14
|
+
import dns from "dns";
|
|
14
15
|
|
|
15
16
|
export function hasBackblazePermissions() {
|
|
16
17
|
return isNode() && fs.existsSync(getBackblazePath());
|
|
@@ -382,6 +383,7 @@ const getAPI = lazy(async () => {
|
|
|
382
383
|
cancelLargeFile,
|
|
383
384
|
getDownloadAuthorization,
|
|
384
385
|
getDownloadURL,
|
|
386
|
+
apiUrl: auth.apiUrl,
|
|
385
387
|
};
|
|
386
388
|
});
|
|
387
389
|
|
|
@@ -520,12 +522,30 @@ export class ArchivesBackblaze {
|
|
|
520
522
|
// Eh... this might be bad, but... I think we just get random 400 errors. If this spams errors,
|
|
521
523
|
// we can remove this line.
|
|
522
524
|
|| err.stack.includes(`400 Bad Request`)
|
|
525
|
+
|| err.stack.includes(`"no tomes available"`)
|
|
526
|
+
|| err.stack.includes(`getaddrinfo ENOTFOUND`)
|
|
523
527
|
) {
|
|
524
528
|
this.log(err.message + " retrying in 5s");
|
|
525
529
|
await delay(5000);
|
|
526
530
|
return this.apiRetryLogic(fnc, retries - 1);
|
|
527
531
|
}
|
|
528
532
|
|
|
533
|
+
if (err.stack.includes(`getaddrinfo ENOTFOUND`)) {
|
|
534
|
+
let urlObj = new URL(api.apiUrl);
|
|
535
|
+
let hostname = urlObj.hostname;
|
|
536
|
+
let lookupAddresses = await new Promise(resolve => {
|
|
537
|
+
dns.lookup(hostname, (err, addresses) => {
|
|
538
|
+
resolve(addresses);
|
|
539
|
+
});
|
|
540
|
+
});
|
|
541
|
+
let resolveAddresses = await new Promise(resolve => {
|
|
542
|
+
dns.resolve4(hostname, (err, addresses) => {
|
|
543
|
+
resolve(addresses);
|
|
544
|
+
});
|
|
545
|
+
});
|
|
546
|
+
console.error(`getaddrinfo ENOTFOUND ${hostname}`, { lookupAddresses, resolveAddresses, apiUrl: api.apiUrl, fullError: err.stack });
|
|
547
|
+
}
|
|
548
|
+
|
|
529
549
|
// If the error is that the authorization token is invalid, reset getBucketAPI and getAPI
|
|
530
550
|
// If the error is that the bucket isn't found, reset getBucketAPI
|
|
531
551
|
if (err.stack.includes(`"expired_auth_token"`)) {
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
import { lazy } from "socket-function/src/caching";
|
|
2
|
+
import { Archives } from "./archives";
|
|
3
|
+
import { decodeCborx, encodeCborx } from "../misc/cloneHelpers";
|
|
4
|
+
import { ArchiveT } from "./archivesJSONT";
|
|
5
|
+
|
|
6
|
+
export function archiveCborT<T>(archives: () => Archives): ArchiveT<T> {
|
|
7
|
+
archives = lazy(archives);
|
|
8
|
+
async function get(key: string) {
|
|
9
|
+
let buffer = await archives().get(key);
|
|
10
|
+
if (!buffer) return undefined;
|
|
11
|
+
return decodeCborx(buffer) as T;
|
|
12
|
+
}
|
|
13
|
+
async function set(key: string, value: T) {
|
|
14
|
+
await archives().set(key, encodeCborx(value));
|
|
15
|
+
}
|
|
16
|
+
async function deleteFnc(key: string) {
|
|
17
|
+
await archives().del(key);
|
|
18
|
+
}
|
|
19
|
+
async function keys() {
|
|
20
|
+
return (await archives().find("")).map(value => value.toString());
|
|
21
|
+
}
|
|
22
|
+
async function values() {
|
|
23
|
+
let keysArray = await keys();
|
|
24
|
+
let results: T[] = [];
|
|
25
|
+
await Promise.all(keysArray.map(async key => {
|
|
26
|
+
let value = await get(key);
|
|
27
|
+
if (value) {
|
|
28
|
+
results.push(value);
|
|
29
|
+
}
|
|
30
|
+
}));
|
|
31
|
+
return results;
|
|
32
|
+
}
|
|
33
|
+
async function entries(): Promise<[string, T][]> {
|
|
34
|
+
let keysArray = await keys();
|
|
35
|
+
let results: [string, T][] = [];
|
|
36
|
+
await Promise.all(keysArray.map(async key => {
|
|
37
|
+
let value = await get(key);
|
|
38
|
+
if (value) {
|
|
39
|
+
results.push([key, value]);
|
|
40
|
+
}
|
|
41
|
+
}));
|
|
42
|
+
return results;
|
|
43
|
+
}
|
|
44
|
+
async function* asyncIterator(): AsyncIterator<[string, T]> {
|
|
45
|
+
for (let [key, value] of await entries()) {
|
|
46
|
+
yield [key, value];
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
return {
|
|
50
|
+
get, set, delete: deleteFnc, keys, values, entries, [Symbol.asyncIterator]: asyncIterator
|
|
51
|
+
};
|
|
52
|
+
}
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { isNode } from "socket-function/src/misc";
|
|
2
|
-
import { getSubFolder } from "../fs";
|
|
2
|
+
import { fsExistsAsync, getSubFolder } from "../fs";
|
|
3
3
|
import fs from "fs";
|
|
4
4
|
import { blue, red, yellow } from "socket-function/src/formatting/logColors";
|
|
5
5
|
import { measureFnc } from "socket-function/src/profiling/measure";
|
|
@@ -65,8 +65,8 @@ class ArchivesDisk {
|
|
|
65
65
|
// Don't create the drive (and also only add up to the last path, via slicing (0, i)
|
|
66
66
|
for (let i = 1; i < fileNameParts.length; i++) {
|
|
67
67
|
let dir = this.LOCAL_ARCHIVE_FOLDER + fileNameParts.slice(0, i).join("/");
|
|
68
|
-
if (!
|
|
69
|
-
fs.
|
|
68
|
+
if (!await fsExistsAsync(dir)) {
|
|
69
|
+
await fs.promises.mkdir(dir);
|
|
70
70
|
}
|
|
71
71
|
}
|
|
72
72
|
}
|
|
@@ -91,7 +91,7 @@ class ArchivesDisk {
|
|
|
91
91
|
if (storageDisabled) return;
|
|
92
92
|
fileName = escapeFileName(fileName);
|
|
93
93
|
await this.simulateLag();
|
|
94
|
-
if (!
|
|
94
|
+
if (!await fsExistsAsync(this.LOCAL_ARCHIVE_FOLDER + fileName)) return;
|
|
95
95
|
try {
|
|
96
96
|
await fs.promises.unlink(this.LOCAL_ARCHIVE_FOLDER + fileName);
|
|
97
97
|
} catch { }
|
|
@@ -222,7 +222,7 @@ class ArchivesDisk {
|
|
|
222
222
|
let fileNames: string[] = [];
|
|
223
223
|
let folderNames: string[] = [];
|
|
224
224
|
async function readDir(dir: string) {
|
|
225
|
-
if (!
|
|
225
|
+
if (!await fsExistsAsync(dir)) {
|
|
226
226
|
return;
|
|
227
227
|
}
|
|
228
228
|
try {
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { lazy } from "socket-function/src/caching";
|
|
2
2
|
import { Archives } from "./archives";
|
|
3
3
|
|
|
4
|
-
export type
|
|
4
|
+
export type ArchiveT<T> = {
|
|
5
5
|
get(key: string): Promise<T | undefined>;
|
|
6
6
|
set(key: string, value: T): Promise<void>;
|
|
7
7
|
delete(key: string): Promise<void>;
|
|
@@ -11,12 +11,12 @@ export type ArchiveJSONT<T> = {
|
|
|
11
11
|
[Symbol.asyncIterator](): AsyncIterator<[string, T]>;
|
|
12
12
|
};
|
|
13
13
|
|
|
14
|
-
export function archiveJSONT<T>(archives: () => Archives):
|
|
14
|
+
export function archiveJSONT<T>(archives: () => Archives): ArchiveT<T> {
|
|
15
15
|
archives = lazy(archives);
|
|
16
16
|
async function get(key: string) {
|
|
17
17
|
let buffer = await archives().get(key);
|
|
18
18
|
if (!buffer) return undefined;
|
|
19
|
-
return JSON.parse(buffer.toString());
|
|
19
|
+
return JSON.parse(buffer.toString()) as T;
|
|
20
20
|
}
|
|
21
21
|
async function set(key: string, value: T) {
|
|
22
22
|
await archives().set(key, Buffer.from(JSON.stringify(value)));
|
|
@@ -29,11 +29,25 @@ export function archiveJSONT<T>(archives: () => Archives): ArchiveJSONT<T> {
|
|
|
29
29
|
}
|
|
30
30
|
async function values() {
|
|
31
31
|
let keysArray = await keys();
|
|
32
|
-
|
|
32
|
+
let results: T[] = [];
|
|
33
|
+
await Promise.all(keysArray.map(async key => {
|
|
34
|
+
let value = await get(key);
|
|
35
|
+
if (value) {
|
|
36
|
+
results.push(value);
|
|
37
|
+
}
|
|
38
|
+
}));
|
|
39
|
+
return results;
|
|
33
40
|
}
|
|
34
41
|
async function entries(): Promise<[string, T][]> {
|
|
35
42
|
let keysArray = await keys();
|
|
36
|
-
|
|
43
|
+
let results: [string, T][] = [];
|
|
44
|
+
await Promise.all(keysArray.map(async key => {
|
|
45
|
+
let value = await get(key);
|
|
46
|
+
if (value) {
|
|
47
|
+
results.push([key, value]);
|
|
48
|
+
}
|
|
49
|
+
}));
|
|
50
|
+
return results;
|
|
37
51
|
}
|
|
38
52
|
async function* asyncIterator(): AsyncIterator<[string, T]> {
|
|
39
53
|
for (let [key, value] of await entries()) {
|
|
@@ -1,6 +1,9 @@
|
|
|
1
|
-
import { formatNumber } from "socket-function/src/formatting/format";
|
|
1
|
+
import { formatNumber, formatTime } from "socket-function/src/formatting/format";
|
|
2
2
|
import { Archives } from "./archives";
|
|
3
3
|
import { cache } from "socket-function/src/caching";
|
|
4
|
+
import { measureFnc } from "socket-function/src/profiling/measure";
|
|
5
|
+
import { batchFunction, runInfinitePoll } from "socket-function/src/batching";
|
|
6
|
+
import { timeInHour } from "socket-function/src/misc";
|
|
4
7
|
|
|
5
8
|
interface FileInfo {
|
|
6
9
|
writeTime: number;
|
|
@@ -8,36 +11,68 @@ interface FileInfo {
|
|
|
8
11
|
size: number;
|
|
9
12
|
}
|
|
10
13
|
|
|
14
|
+
interface IndexData {
|
|
15
|
+
files: Record<string, FileInfo>;
|
|
16
|
+
}
|
|
17
|
+
|
|
11
18
|
class ArchivesLimitedCache {
|
|
12
19
|
private baseArchives: Archives;
|
|
13
20
|
private maxFiles: number;
|
|
14
21
|
private maxSize: number;
|
|
15
22
|
private cache = new Map<string, FileInfo>();
|
|
16
23
|
private initialized = false;
|
|
24
|
+
private readonly indexPath = ".cache-index.json";
|
|
17
25
|
|
|
18
26
|
constructor(baseArchives: Archives, config: { maxFiles: number; maxSize: number }) {
|
|
19
27
|
this.baseArchives = baseArchives;
|
|
20
28
|
this.maxFiles = config.maxFiles;
|
|
21
29
|
this.maxSize = config.maxSize;
|
|
22
30
|
this.initOptionalMethods();
|
|
31
|
+
this.initPeriodicSync();
|
|
23
32
|
}
|
|
24
33
|
|
|
25
34
|
public getDebugName(): string {
|
|
26
35
|
return `limitedCache(${this.maxFiles}files,${Math.round(this.maxSize / (1024 * 1024))}MB)/${this.baseArchives.getDebugName()}`;
|
|
27
36
|
}
|
|
28
37
|
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
// Load all files on first access
|
|
38
|
+
@measureFnc
|
|
39
|
+
private async rebuildCacheFromFiles(): Promise<void> {
|
|
33
40
|
const allFiles = await this.baseArchives.findInfo("", { type: "files" });
|
|
41
|
+
const newCache = new Map<string, FileInfo>();
|
|
42
|
+
|
|
43
|
+
// Build map of actual files (excluding index file)
|
|
34
44
|
for (const file of allFiles) {
|
|
35
|
-
|
|
45
|
+
if (file.path === this.indexPath) continue;
|
|
46
|
+
|
|
47
|
+
newCache.set(file.path, {
|
|
36
48
|
writeTime: file.createTime,
|
|
37
|
-
accessTime: file.createTime,
|
|
49
|
+
accessTime: this.cache.get(file.path)?.accessTime ?? file.createTime,
|
|
38
50
|
size: file.size,
|
|
39
51
|
});
|
|
40
52
|
}
|
|
53
|
+
|
|
54
|
+
// Update cache with actual files
|
|
55
|
+
this.cache.clear();
|
|
56
|
+
for (const [path, info] of newCache.entries()) {
|
|
57
|
+
this.cache.set(path, info);
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
@measureFnc
|
|
62
|
+
private async ensureInitialized(): Promise<void> {
|
|
63
|
+
if (this.initialized) return;
|
|
64
|
+
|
|
65
|
+
// First try to load from index file
|
|
66
|
+
await this.loadIndex();
|
|
67
|
+
|
|
68
|
+
// If index is empty or missing, fall back to scanning all files
|
|
69
|
+
if (this.cache.size === 0) {
|
|
70
|
+
console.log("Index file missing or empty, scanning all files...");
|
|
71
|
+
await this.rebuildCacheFromFiles();
|
|
72
|
+
// Save the newly built index
|
|
73
|
+
await this.saveIndex();
|
|
74
|
+
}
|
|
75
|
+
|
|
41
76
|
this.initialized = true;
|
|
42
77
|
|
|
43
78
|
// Cleanup if we're already over limits
|
|
@@ -83,12 +118,18 @@ class ArchivesLimitedCache {
|
|
|
83
118
|
currentFiles--;
|
|
84
119
|
}
|
|
85
120
|
}
|
|
121
|
+
|
|
122
|
+
// Trigger index flush if any files were deleted
|
|
123
|
+
if (currentFiles < totalFiles) {
|
|
124
|
+
this.triggerIndexFlush();
|
|
125
|
+
}
|
|
86
126
|
}
|
|
87
127
|
|
|
88
128
|
private updateAccessTime(path: string): void {
|
|
89
129
|
const info = this.cache.get(path);
|
|
90
130
|
if (info) {
|
|
91
131
|
info.accessTime = Date.now();
|
|
132
|
+
this.triggerIndexFlush();
|
|
92
133
|
}
|
|
93
134
|
}
|
|
94
135
|
|
|
@@ -109,12 +150,14 @@ class ArchivesLimitedCache {
|
|
|
109
150
|
accessTime: now,
|
|
110
151
|
size: data.length,
|
|
111
152
|
});
|
|
153
|
+
this.triggerIndexFlush();
|
|
112
154
|
}
|
|
113
155
|
|
|
114
156
|
public async del(path: string): Promise<void> {
|
|
115
157
|
await this.ensureInitialized();
|
|
116
158
|
await this.baseArchives.del(path);
|
|
117
159
|
this.cache.delete(path);
|
|
160
|
+
this.triggerIndexFlush();
|
|
118
161
|
}
|
|
119
162
|
|
|
120
163
|
public async getInfo(path: string): Promise<{ writeTime: number; size: number; } | undefined> {
|
|
@@ -164,6 +207,74 @@ class ArchivesLimitedCache {
|
|
|
164
207
|
public getURL?: (path: string) => Promise<string>;
|
|
165
208
|
public getDownloadAuthorization?: (config: { validDurationInSeconds: number }) => Promise<{ bucketId: string; fileNamePrefix: string; authorizationToken: string; }>;
|
|
166
209
|
|
|
210
|
+
private batchedFlushIndex = batchFunction(
|
|
211
|
+
{ delay: 2000 }, // Flush index changes every 2 seconds
|
|
212
|
+
async () => {
|
|
213
|
+
// We don't actually need the batched values, just flush the index
|
|
214
|
+
await this.saveIndex();
|
|
215
|
+
}
|
|
216
|
+
);
|
|
217
|
+
|
|
218
|
+
private async loadIndex(): Promise<void> {
|
|
219
|
+
try {
|
|
220
|
+
const indexBuffer = await this.baseArchives.get(this.indexPath);
|
|
221
|
+
if (!indexBuffer) {
|
|
222
|
+
// No existing index, will need to build it
|
|
223
|
+
return;
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
const indexData: IndexData = JSON.parse(indexBuffer.toString("utf8"));
|
|
227
|
+
|
|
228
|
+
// Load the cache from the index
|
|
229
|
+
this.cache.clear();
|
|
230
|
+
for (const [path, info] of Object.entries(indexData.files)) {
|
|
231
|
+
this.cache.set(path, info);
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
console.log(`Loaded index with ${this.cache.size} files`);
|
|
235
|
+
} catch (error) {
|
|
236
|
+
console.warn(`Failed to load index file:`, error);
|
|
237
|
+
// Continue without index, will rebuild
|
|
238
|
+
}
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
private async saveIndex(): Promise<void> {
|
|
242
|
+
try {
|
|
243
|
+
const indexData: IndexData = {
|
|
244
|
+
files: Object.fromEntries(this.cache.entries())
|
|
245
|
+
};
|
|
246
|
+
|
|
247
|
+
const indexBuffer = Buffer.from(JSON.stringify(indexData), "utf8");
|
|
248
|
+
await this.baseArchives.set(this.indexPath, indexBuffer);
|
|
249
|
+
} catch (error) {
|
|
250
|
+
console.warn(`Failed to save index file:`, error);
|
|
251
|
+
}
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
private triggerIndexFlush(): void {
|
|
255
|
+
void this.batchedFlushIndex(undefined);
|
|
256
|
+
}
|
|
257
|
+
|
|
258
|
+
private initPeriodicSync(): void {
|
|
259
|
+
// Sync index with actual files every hour to prevent drift
|
|
260
|
+
runInfinitePoll(timeInHour, async () => {
|
|
261
|
+
await this.syncIndexWithFiles();
|
|
262
|
+
});
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
private async syncIndexWithFiles(): Promise<void> {
|
|
266
|
+
try {
|
|
267
|
+
console.log("Syncing index with actual files...");
|
|
268
|
+
await this.rebuildCacheFromFiles();
|
|
269
|
+
|
|
270
|
+
// Save updated index
|
|
271
|
+
await this.saveIndex();
|
|
272
|
+
console.log(`Index synced with ${this.cache.size} actual files`);
|
|
273
|
+
} catch (error) {
|
|
274
|
+
console.warn("Failed to sync index with files:", error);
|
|
275
|
+
}
|
|
276
|
+
}
|
|
277
|
+
|
|
167
278
|
private initOptionalMethods(): void {
|
|
168
279
|
this.getURL = this.baseArchives.getURL;
|
|
169
280
|
this.getDownloadAuthorization = this.baseArchives.getDownloadAuthorization;
|
|
@@ -4,6 +4,7 @@ import { Archives } from "./archives";
|
|
|
4
4
|
import { cache, lazy } from "socket-function/src/caching";
|
|
5
5
|
import { delay } from "socket-function/src/batching";
|
|
6
6
|
import { isDefined } from "../misc";
|
|
7
|
+
import { formatTime } from "socket-function/src/formatting/format";
|
|
7
8
|
|
|
8
9
|
let getRootDirectory = lazy(async () => {
|
|
9
10
|
await navigator.storage.persist();
|
|
@@ -168,9 +169,11 @@ class ArchivesPrivateFileSystem {
|
|
|
168
169
|
}
|
|
169
170
|
|
|
170
171
|
try {
|
|
172
|
+
let readDirTime = Date.now();
|
|
171
173
|
// Start from the root directory of our namespace
|
|
172
174
|
const rootDirectory = await this.getOrCreateDirectory("");
|
|
173
175
|
await readDir(rootDirectory, "");
|
|
176
|
+
console.log(`readDir took ${formatTime(Date.now() - readDirTime)}`);
|
|
174
177
|
|
|
175
178
|
let results = config?.type === "folders" ? folderNames : fileNames;
|
|
176
179
|
results = results.filter(name => name.startsWith(prefix));
|
|
@@ -16,6 +16,9 @@ import { requiresNetworkTrustHook } from "../-d-trust/NetworkTrust2";
|
|
|
16
16
|
import { isNoNetwork } from "../config";
|
|
17
17
|
import { getDebuggerUrl } from "../diagnostics/listenOnDebugger";
|
|
18
18
|
import { hackDevtoolsWebsocketForward } from "./oneTimeForward";
|
|
19
|
+
import { getOwnMachineId, decodeNodeId, decodeNodeIdAssert } from "../-a-auth/certs";
|
|
20
|
+
import { sort } from "socket-function/src/misc";
|
|
21
|
+
import { getPathStr2 } from "../path";
|
|
19
22
|
|
|
20
23
|
let loadTime = Date.now();
|
|
21
24
|
|
|
@@ -71,25 +74,37 @@ export async function getControllerNodeIdList(
|
|
|
71
74
|
controller: SocketRegistered<{}>,
|
|
72
75
|
): Promise<{ nodeId: string; entryPoint: string }[]> {
|
|
73
76
|
let nodeIdsToTest = await getAllNodeIds();
|
|
74
|
-
let passedNodeIds = new Map<string, string>();
|
|
77
|
+
let passedNodeIds = new Map<string, { machineId: string; entryPoint: string }>();
|
|
75
78
|
await Promise.all(nodeIdsToTest.map(async nodeId => {
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
passedNodeIds.set(nodeId,
|
|
79
|
+
let result = await doesNodeExposeController(nodeId, controller);
|
|
80
|
+
if (result) {
|
|
81
|
+
passedNodeIds.set(nodeId, result);
|
|
79
82
|
}
|
|
80
83
|
}));
|
|
81
84
|
|
|
82
|
-
let results = Array.from(passedNodeIds.entries())
|
|
83
|
-
//
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
85
|
+
let results = Array.from(passedNodeIds.entries());
|
|
86
|
+
// Prefer localhost connections as they're faster.
|
|
87
|
+
sort(results, (x) => x[0].startsWith("127-0-0-1.") ? 0 : 1);
|
|
88
|
+
let lookup = new Map<string, { nodeId: string; entryPoint: string }>();
|
|
89
|
+
for (let x of results) {
|
|
90
|
+
let key = getPathStr2(x[1].machineId, decodeNodeIdAssert(x[0]).port.toString());
|
|
91
|
+
if (key in lookup) continue;
|
|
92
|
+
lookup.set(key, { nodeId: x[0], entryPoint: x[1].entryPoint });
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
return Array.from(lookup.values());
|
|
87
96
|
}
|
|
88
97
|
|
|
89
98
|
|
|
90
|
-
export async function doesNodeExposeController(reconnectNodeId: string, controller: SocketRegistered<{}>): Promise<
|
|
99
|
+
export async function doesNodeExposeController(reconnectNodeId: string, controller: SocketRegistered<{}>): Promise<{ machineId: string, entryPoint: string } | false> {
|
|
91
100
|
let exposedControllers = await timeoutToUndefinedSilent(10_000, NodeCapabilitiesController.nodes[reconnectNodeId].getExposedControllers());
|
|
92
|
-
|
|
101
|
+
let machineId = await getOwnMachineId();
|
|
102
|
+
let entryPoint = await new NodeCapabilitiesControllerBase().getEntryPoint();
|
|
103
|
+
|
|
104
|
+
if (exposedControllers?.includes(controller._classGuid)) {
|
|
105
|
+
return { machineId, entryPoint };
|
|
106
|
+
}
|
|
107
|
+
return false;
|
|
93
108
|
}
|
|
94
109
|
|
|
95
110
|
const startupTime = Date.now();
|
|
@@ -3,6 +3,7 @@ import { requiresNetworkTrustHook } from "../-d-trust/NetworkTrust2";
|
|
|
3
3
|
import { isDevDebugbreak } from "../config";
|
|
4
4
|
import { measureWrap } from "socket-function/src/profiling/measure";
|
|
5
5
|
import { QueueLimited } from "socket-function/src/misc";
|
|
6
|
+
import { isNode } from "typesafecss";
|
|
6
7
|
|
|
7
8
|
export type DebugLog = {
|
|
8
9
|
type: string;
|
|
@@ -12,7 +13,8 @@ export type DebugLog = {
|
|
|
12
13
|
|
|
13
14
|
// NOTE: For now, this is actually fairly light, so we'll just turn it on by default. It is very useful
|
|
14
15
|
// in debugging synchronization issues.
|
|
15
|
-
|
|
16
|
+
// NOTE: Clientside it is not quite as light, so... don't turn it on clientside, as we don't really use it anyways.
|
|
17
|
+
let ENABLED_LOGGING = isNode();
|
|
16
18
|
export function enableAuditLogging() {
|
|
17
19
|
ENABLED_LOGGING = true;
|
|
18
20
|
debugLogFnc = debugLogBase;
|
|
@@ -25,7 +27,7 @@ export function isDebugLogEnabled() {
|
|
|
25
27
|
return ENABLED_LOGGING;
|
|
26
28
|
}
|
|
27
29
|
|
|
28
|
-
let logHistory = new QueueLimited<DebugLog>(1000 *
|
|
30
|
+
let logHistory = new QueueLimited<DebugLog>(1000 * 500);
|
|
29
31
|
export function getFullLogHistory() {
|
|
30
32
|
return logHistory;
|
|
31
33
|
}
|
|
@@ -35,6 +35,7 @@ import { deepCloneCborx } from "../misc/cloneHelpers";
|
|
|
35
35
|
import { formatPercent, formatTime } from "socket-function/src/formatting/format";
|
|
36
36
|
import { addStatPeriodic, interceptCalls, onAllPredictionsFinished, onTimeProfile } from "../-0-hooks/hooks";
|
|
37
37
|
import { onNextPaint } from "../functional/onNextPaint";
|
|
38
|
+
import { isAsyncFunction } from "../misc";
|
|
38
39
|
|
|
39
40
|
// TODO: Break this into two parts:
|
|
40
41
|
// 1) Run and get accesses
|
|
@@ -212,6 +213,9 @@ export interface WatcherOptions<Result> {
|
|
|
212
213
|
logSyncTimings?: boolean;
|
|
213
214
|
|
|
214
215
|
maxLocksOverride?: number;
|
|
216
|
+
|
|
217
|
+
// NOTE: The reason there isn't throttle support here is very frequently when you want to throttle one component rendering, it's because you have many components. So you actually want to throttle many components and have them throttle in conjunction with each other, which results in the logic becoming complicated.
|
|
218
|
+
// - But maybe we should support the single throttle case anyways?
|
|
215
219
|
}
|
|
216
220
|
|
|
217
221
|
let harvestableReadyLoopCount = 0;
|
|
@@ -972,6 +976,9 @@ export class PathValueProxyWatcher {
|
|
|
972
976
|
public createWatcher<Result = void>(
|
|
973
977
|
options: WatcherOptions<Result>
|
|
974
978
|
): SyncWatcher {
|
|
979
|
+
if (isAsyncFunction(options.watchFunction)) {
|
|
980
|
+
throw new Error(`Async functions are not supported in watchers. They must run the caller synchronously. You are likely not using Await anyway, so just remove the async and make it a synchronous function. The caller will be called again whenever the data you access changes, And if you are running this to return a result, it will be rerun until all the data you want is synchronized. Watch function: ${options.watchFunction.toString()}`);
|
|
981
|
+
}
|
|
975
982
|
// NOTE: Setting an order is needed for rendering, so parents render before children. I believe
|
|
976
983
|
// it is generally what we want, so event triggering is consistent, and fits with any tree based
|
|
977
984
|
// watching system. If this causes problems we COULD remove it from here and have just qreact.tsx set it.
|
|
@@ -228,7 +228,7 @@ export class PathFunctionRunner {
|
|
|
228
228
|
|
|
229
229
|
// Rarely we might need to queue a function multiple times, when we are late to receive rejections.
|
|
230
230
|
// HOWEVER, after enough times, we should stop, as we will probably just infinitely queue it.
|
|
231
|
-
const MAX_QUEUE_COUNT =
|
|
231
|
+
const MAX_QUEUE_COUNT = 100;
|
|
232
232
|
let queueLimitCounts = new Map<string, number>();
|
|
233
233
|
// Clear every hour, so we don't leave
|
|
234
234
|
runInfinitePoll(timeInHour, () => queueLimitCounts.clear());
|
|
@@ -323,7 +323,7 @@ export class PathFunctionRunner {
|
|
|
323
323
|
if (limitCount >= MAX_QUEUE_COUNT) {
|
|
324
324
|
// Only error the first time, as we don't need need that many errors
|
|
325
325
|
if (limitCount === MAX_QUEUE_COUNT) {
|
|
326
|
-
console.error(`
|
|
326
|
+
console.error(`Tried to requeue a function to run too many times (${limitCount}) for ${getDebugName(callData, functionSpec, true)}. This is NOT due to cascading reads. It might be due to repeated rejections?`);
|
|
327
327
|
}
|
|
328
328
|
continue;
|
|
329
329
|
}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { formatDateTime, formatNiceDateTime, formatNumber, formatPercent, formatTime, formatVeryNiceDateTime } from "socket-function/src/formatting/format";
|
|
1
|
+
import { formatDateTime, formatDateTimeDetailed, formatNiceDateTime, formatNumber, formatPercent, formatTime, formatVeryNiceDateTime } from "socket-function/src/formatting/format";
|
|
2
2
|
import { css } from "typesafecss";
|
|
3
3
|
import { canHaveChildren } from "socket-function/src/types";
|
|
4
4
|
import { qreact } from "../4-dom/qreact";
|
|
@@ -44,7 +44,7 @@ let formatters: { [formatter in StringFormatters]: (value: unknown) => preact.Co
|
|
|
44
44
|
number: (value) => d(value, formatNumber(Number(value))),
|
|
45
45
|
percent: (value) => d(value, formatPercent(Number(value))),
|
|
46
46
|
timeSpan: (value) => d(value, formatTime(Number(value))),
|
|
47
|
-
date: (value) => d(value, formatVeryNiceDateTime(Number(value))),
|
|
47
|
+
date: (value) => d(value, <span title={formatDateTimeDetailed(Number(value))}>{formatVeryNiceDateTime(Number(value))}</span>),
|
|
48
48
|
error: (value) => d(value, <span class={errorMessage}>{String(value)}</span>),
|
|
49
49
|
toSpaceCase: (value) => d(value, toSpaceCase(String(value))),
|
|
50
50
|
"<Selector>": (value) => d(value, <Selector {...JSON.parse(String(value).slice("<Selector>".length))} />),
|
package/src/config.ts
CHANGED
|
@@ -11,6 +11,7 @@ export const serverPort = 11748;
|
|
|
11
11
|
let yargObj = parseArgsFactory()
|
|
12
12
|
.option("nonetwork", { type: "boolean", desc: `Disables all network requirements. Reduces security, as this means we cannot use real certificates.` })
|
|
13
13
|
.option("domain", { type: "string", desc: `Sets the domain` })
|
|
14
|
+
.option("emaildomain", { type: "string", desc: `Sets the domain to use for email` })
|
|
14
15
|
.option("client", { type: "boolean", desc: `Drops permissions, acting as an unauthenticated node` })
|
|
15
16
|
.option("authority", { type: "string", desc: `Defines the base paths we are an authority on (the domain is prepended to them). Either a file path to a JSON(AuthorityPath[]), or a base64 representation of the JSON(AuthorityPath[]).` })
|
|
16
17
|
.option("nobreak", { type: "boolean", desc: "Do not break on errors. Safer to set this than to just not set debugbreak, as some places might break without checking debugbreak, but nobreak works at a level where it is always used." })
|
|
@@ -20,12 +21,15 @@ let yargObj = parseArgsFactory()
|
|
|
20
21
|
.option("recovery", { type: "boolean", desc: `Allows any localhost connections to act like a superuser (and a trusted node), to help recover the database (as you need permission to access the snapshot page).` })
|
|
21
22
|
// TODO: The bootstrapper is a single file. Maybe we shouldn't run the entire service just for that. Although... maybe it's fine, as services are light?
|
|
22
23
|
.option("bootstraponly", { type: "boolean", desc: "Don't register as an edge node, so we serve the bootstrap files, but we don't need up to date code because we are not used for endpoints or the UI." })
|
|
24
|
+
.option("notifyemails", { type: "array", desc: "The emails to notify when errors occur." })
|
|
23
25
|
.argv
|
|
24
26
|
;
|
|
25
|
-
|
|
26
|
-
let querysubConfig = lazy((): {
|
|
27
|
+
type QuerysubConfig = {
|
|
27
28
|
domain?: string;
|
|
28
|
-
|
|
29
|
+
emaildomain?: string;
|
|
30
|
+
notifyemails?: string[];
|
|
31
|
+
};
|
|
32
|
+
let querysubConfig = lazy((): QuerysubConfig => {
|
|
29
33
|
if (!isNode()) throw new Error("querysubConfig is only available on the server");
|
|
30
34
|
const path = "./querysub.json";
|
|
31
35
|
if (!fs.existsSync(path)) {
|
|
@@ -54,6 +58,14 @@ export function getDomain() {
|
|
|
54
58
|
return yargObj.domain || querysubConfig().domain || "querysub.com";
|
|
55
59
|
}
|
|
56
60
|
|
|
61
|
+
export function getEmailDomain() {
|
|
62
|
+
return yargObj.emaildomain || querysubConfig().emaildomain || getDomain();
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
export function getNotifyEmails() {
|
|
66
|
+
return Array.from(new Set([...(yargObj.notifyemails || []), ...(querysubConfig().notifyemails || [])]));
|
|
67
|
+
}
|
|
68
|
+
|
|
57
69
|
export function baseIsClient() {
|
|
58
70
|
return !isNode() || yargObj.client;
|
|
59
71
|
}
|