querysub 0.7.0 → 0.9.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +2 -2
- package/src/-a-auth/certs.ts +8 -0
- package/src/-g-core-values/NodeCapabilities.ts +2 -2
- package/src/0-path-value-core/PathValueCommitter.ts +1 -1
- package/src/2-proxy/PathValueProxyWatcher.ts +32 -45
- package/src/3-path-functions/PathFunctionHelpers.ts +73 -16
- package/src/3-path-functions/PathFunctionRunner.ts +22 -6
- package/src/3-path-functions/deployMain.ts +3 -45
- package/src/3-path-functions/pathFunctionLoader.ts +44 -7
- package/src/3-path-functions/syncSchema.ts +7 -1
- package/src/4-dom/qreact.tsx +5 -2
- package/src/4-querysub/QuerysubController.ts +2 -0
- package/src/4-querysub/permissions.ts +26 -10
- package/src/4-querysub/querysubPrediction.ts +2 -2
- package/src/5-diagnostics/qreactDebug.tsx +2 -2
- package/src/diagnostics/logs/diskLogger.ts +37 -33
- package/src/diagnostics/logs/diskShimConsoleLogs.ts +16 -11
- package/src/3-path-functions/tests/functionsTest.ts +0 -135
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "querysub",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.9.0",
|
|
4
4
|
"main": "index.js",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"note1": "note on node-forge fork, see https://github.com/digitalbazaar/forge/issues/744 for details",
|
|
@@ -24,7 +24,7 @@
|
|
|
24
24
|
"node-forge": "https://github.com/sliftist/forge#e618181b469b07bdc70b968b0391beb8ef5fecd6",
|
|
25
25
|
"pako": "^2.1.0",
|
|
26
26
|
"preact": "^10.11.3",
|
|
27
|
-
"socket-function": "^0.
|
|
27
|
+
"socket-function": "^0.34.0",
|
|
28
28
|
"terser": "^5.31.0",
|
|
29
29
|
"typesafecss": "^0.6.3",
|
|
30
30
|
"yaml": "^2.5.0",
|
package/src/-a-auth/certs.ts
CHANGED
|
@@ -447,6 +447,14 @@ export function decodeNodeId(nodeId: string): NodeIdParts | undefined {
|
|
|
447
447
|
return undefined;
|
|
448
448
|
}
|
|
449
449
|
let parts = locationObj.address.split(".");
|
|
450
|
+
if (nodeId.startsWith("127-0-0-1.") && parts.length === 3) {
|
|
451
|
+
return {
|
|
452
|
+
threadId: "",
|
|
453
|
+
machineId: parts.at(-3) || "",
|
|
454
|
+
domain: parts.slice(-2).join("."),
|
|
455
|
+
port: locationObj.port,
|
|
456
|
+
};
|
|
457
|
+
}
|
|
450
458
|
if (parts.length < 4) {
|
|
451
459
|
return undefined;
|
|
452
460
|
}
|
|
@@ -54,11 +54,11 @@ export async function getControllerNodeIdList(
|
|
|
54
54
|
): Promise<string[]> {
|
|
55
55
|
let nodeIdsToTest = await getAllNodeIds();
|
|
56
56
|
let passedNodeIds = new Set<string>();
|
|
57
|
-
|
|
57
|
+
await Promise.all(nodeIdsToTest.map(async nodeId => {
|
|
58
58
|
if (await doesNodeExposeController(nodeId, controller)) {
|
|
59
59
|
passedNodeIds.add(nodeId);
|
|
60
60
|
}
|
|
61
|
-
}
|
|
61
|
+
}));
|
|
62
62
|
return Array.from(passedNodeIds);
|
|
63
63
|
}
|
|
64
64
|
|
|
@@ -71,7 +71,7 @@ class PathValueCommitter {
|
|
|
71
71
|
// - Check the caller proxyWatcher startTime vs lastSyncTime, to see how long it took from start to sync
|
|
72
72
|
// (assuming the caller is a commitFunction, and not just a general watcher. If it is a general
|
|
73
73
|
// watcher it might have taken a really long time to run, in which case check the watchFunction).
|
|
74
|
-
let message = `
|
|
74
|
+
let message = `MAX_CHANGE_AGE EXCEEDED! Cannot commit write, that is before the max age ${pathValue.time.time} < ${maxAge}. Acceping this write would result in changes in the past that wouldn't propagate correctly`;
|
|
75
75
|
console.error(red(message));
|
|
76
76
|
debugbreak(2);
|
|
77
77
|
debugger;
|
|
@@ -15,22 +15,22 @@ import { ClientWatcher, WatchSpecData, clientWatcher } from "../1-path-client/pa
|
|
|
15
15
|
import { createPathValueProxy, getProxyPath, isValueProxy, isValueProxy2 } from "./pathValueProxy";
|
|
16
16
|
import { authorityStorage, compareTime, ReadLock, epochTime, getNextTime, MAX_ACCEPTED_CHANGE_AGE, PathValue, Time, getCreatorId } from "../0-path-value-core/pathValueCore";
|
|
17
17
|
import { runCodeWithDatabase, rawSchema } from "./pathDatabaseProxyBase";
|
|
18
|
-
import { CallSpec, DEPTH_TO_DATA, MODULE_INDEX, getCurrentCall } from "../3-path-functions/PathFunctionRunner";
|
|
19
|
-
import { interceptCalls, runCall } from "../3-path-functions/PathFunctionHelpers";
|
|
20
18
|
import { LOCAL_DOMAIN } from "../0-path-value-core/PathController";
|
|
21
19
|
import debugbreak from "debugbreak";
|
|
22
|
-
import { FunctionMetadata, getSchemaObject, inlineNestedCalls } from "../3-path-functions/syncSchema";
|
|
23
20
|
import { pathValueCommitter } from "../0-path-value-core/PathValueController";
|
|
24
21
|
import { pathValueSerializer } from "../-h-path-value-serialize/PathValueSerializer";
|
|
25
|
-
import { isClient } from "../config2";
|
|
26
|
-
import { isDeploy } from "../3-path-functions/deployCheck";
|
|
27
22
|
import { LOCAL_DOMAIN_PATH } from "../0-path-value-core/NodePathAuthorities";
|
|
28
|
-
import { PermissionsCheck } from "../4-querysub/permissions";
|
|
29
23
|
import { registerPeriodic } from "../diagnostics/periodic";
|
|
30
24
|
import { remoteWatcher } from "../1-path-client/RemoteWatcher";
|
|
31
25
|
import { Schema2, Schema2Fncs } from "./schema2";
|
|
32
26
|
import { getDomain } from "../config";
|
|
33
|
-
|
|
27
|
+
|
|
28
|
+
import type { CallSpec } from "../3-path-functions/PathFunctionRunner";
|
|
29
|
+
import type { FunctionMetadata } from "../3-path-functions/syncSchema";
|
|
30
|
+
|
|
31
|
+
import { DEPTH_TO_DATA, MODULE_INDEX, getCurrentCall, getCurrentCallObj } from "../3-path-functions/PathFunctionRunner";
|
|
32
|
+
import { inlineNestedCalls } from "../3-path-functions/syncSchema";
|
|
33
|
+
import { interceptCalls, runCall } from "../3-path-functions/PathFunctionHelpers";
|
|
34
34
|
|
|
35
35
|
// TODO: Break this into two parts:
|
|
36
36
|
// 1) Run and get accesses
|
|
@@ -334,6 +334,7 @@ export function doProxyOptions<T>(options: Partial<WatcherOptions<any>>, callbac
|
|
|
334
334
|
// the dependent call, and find the key exists again, and the only difference will
|
|
335
335
|
// be a bit of lag, and value flicker.
|
|
336
336
|
export const specialObjectWriteValue = "_specialObjectWriteValue_16c4c3bb43f24111976a2681c972f6f4";
|
|
337
|
+
export const specialObjectWriteSymbol = Symbol("specialObjectWriteSymbol");
|
|
337
338
|
// Values that don't add another proxy layer
|
|
338
339
|
export function isTransparentValue(value: unknown) {
|
|
339
340
|
return (
|
|
@@ -360,11 +361,6 @@ export type PermissionsChecker = {
|
|
|
360
361
|
};
|
|
361
362
|
|
|
362
363
|
|
|
363
|
-
const specialObjectWriteDepth = lazy(() => {
|
|
364
|
-
if (isDeploy()) return MODULE_INDEX;
|
|
365
|
-
return DEPTH_TO_DATA;
|
|
366
|
-
});
|
|
367
|
-
|
|
368
364
|
class SyncWatcherTag { }
|
|
369
365
|
|
|
370
366
|
export class PathValueProxyWatcher {
|
|
@@ -406,7 +402,7 @@ export class PathValueProxyWatcher {
|
|
|
406
402
|
throw new Error(`Tried to read a non-local path in a "noLocks" watcher, ${watcher.debugName}, path ${pathStr}`);
|
|
407
403
|
}
|
|
408
404
|
|
|
409
|
-
if (watcher.permissionsChecker
|
|
405
|
+
if (watcher.permissionsChecker) {
|
|
410
406
|
if (!watcher.permissionsChecker.checkPermissions(pathStr).allowed) {
|
|
411
407
|
if (
|
|
412
408
|
!watcher.hasAnyUnsyncedAccesses()
|
|
@@ -571,6 +567,10 @@ export class PathValueProxyWatcher {
|
|
|
571
567
|
}
|
|
572
568
|
}
|
|
573
569
|
}
|
|
570
|
+
if (value === specialObjectWriteSymbol) {
|
|
571
|
+
value = specialObjectWriteValue;
|
|
572
|
+
allowSpecial = true;
|
|
573
|
+
}
|
|
574
574
|
|
|
575
575
|
// Escape specialObjectWriteValue-like strings
|
|
576
576
|
// - Check for .startsWith, so we change all values. If we just added when it was ===, then
|
|
@@ -644,7 +644,7 @@ export class PathValueProxyWatcher {
|
|
|
644
644
|
let parentPathStr = pathStr;
|
|
645
645
|
while (true) {
|
|
646
646
|
parentPathStr = getParentPathStr(parentPathStr);
|
|
647
|
-
if (getPathDepth(parentPathStr) <
|
|
647
|
+
if (getPathDepth(parentPathStr) < DEPTH_TO_DATA) break;
|
|
648
648
|
// We don't need to check all parents, as if any value is set, all parents will
|
|
649
649
|
// likely have their specialObjectWriteValue set, due to a previous run of this function!
|
|
650
650
|
if (watcher.pendingWrites.has(parentPathStr)) break;
|
|
@@ -871,13 +871,6 @@ export class PathValueProxyWatcher {
|
|
|
871
871
|
let runLeeway = MAX_ACCEPTED_CHANGE_AGE * 0.8;
|
|
872
872
|
let runCutoffTime = Date.now() - runLeeway;
|
|
873
873
|
if (options.runAtTime && options.runAtTime.time < runCutoffTime) {
|
|
874
|
-
// let timeSinceStart = Date.now() - startTime;
|
|
875
|
-
// // If we JUST started, we are just clearing out old function calls, so don't break on the error
|
|
876
|
-
// // (Run leeway time + 60s)
|
|
877
|
-
// if (timeSinceStart > 1000 * 60 + runLeeway) {
|
|
878
|
-
// debugbreak(2);
|
|
879
|
-
// debugger;
|
|
880
|
-
// }
|
|
881
874
|
let message = `MAX_CHANGE_AGE_EXCEEDED! Cannot run watcher at time ${options.runAtTime.time} because it is older than the cutOff time of ${runCutoffTime}. Writing this far in the past would break things, and might be rejected by other authorities due to being too old.`;
|
|
882
875
|
console.error(red(message));
|
|
883
876
|
// NOTE: We could also adjust the to be more recent, to allow it to be commited anyway,
|
|
@@ -1704,8 +1697,8 @@ export class PathValueProxyWatcher {
|
|
|
1704
1697
|
}
|
|
1705
1698
|
}
|
|
1706
1699
|
|
|
1707
|
-
// domainName => moduleId => schema
|
|
1708
|
-
let schemas = new Map<string, Map<string, Schema2
|
|
1700
|
+
// gitHash => domainName => moduleId => schema
|
|
1701
|
+
let schemas = new Map<string, Map<string, Map<string, Schema2>>>();
|
|
1709
1702
|
|
|
1710
1703
|
// NOTE: We hardcode knowledge of how module data is nested to handle schemas. This... isn't great,
|
|
1711
1704
|
// but it makes the code faster, and if we change how module data is nested it will break
|
|
@@ -1727,28 +1720,13 @@ function getBaseMatchingSchema(pathStr: string): {
|
|
|
1727
1720
|
schema: Schema2;
|
|
1728
1721
|
nestedPath: string[];
|
|
1729
1722
|
} | undefined {
|
|
1730
|
-
|
|
1731
|
-
// So... the current call, need to register the hash used for the call. This needs to be
|
|
1732
|
-
// used to prefer that schema from that same hash.
|
|
1733
|
-
// - And if we ever do cross repo calls... we need to store multiple hashes in the call
|
|
1734
|
-
// (or cascade hashes, etc, but we need to know all the hashes, and then use them here!)
|
|
1735
|
-
//todonext;
|
|
1736
|
-
// Ah, it's not quite so easy. So... pathFunctionLoader knows the hash on load (or that it's local),
|
|
1737
|
-
// but this information is not maintained when we are making the call (writeFunctionCall). BUT,
|
|
1738
|
-
// when we are in the call (unless it's a test call), we just called getModuleFromConfig
|
|
1739
|
-
// so we aren't too far away from knowing the hash, and passing it on.
|
|
1740
|
-
// - I think... we should return the hash, and then request it in overrideCurrentCall?
|
|
1741
|
-
//todonext;
|
|
1742
|
-
// Permissions checks are annoying, but... they DO call getModuleFromConfig at some time.
|
|
1743
|
-
// ONLY managementPages.tsx does not actually have a module.
|
|
1744
|
-
//todonext;
|
|
1745
|
-
// Oh, it looks like managementPages.tsx can't operate the way it does. Hmm...
|
|
1723
|
+
let gitHash = getCurrentCallObj()?.fnc.gitRef || "ambient";
|
|
1746
1724
|
|
|
1747
1725
|
if (_noAtomicSchema) return undefined;
|
|
1748
1726
|
if (getPathIndex(pathStr, 3) !== "Data") return undefined;
|
|
1749
1727
|
let domain = getPathIndex(pathStr, 0)!;
|
|
1750
1728
|
let moduleId = getPathIndex(pathStr, 2)!;
|
|
1751
|
-
let schema = schemas.get(domain)?.get(moduleId);
|
|
1729
|
+
let schema = schemas.get(gitHash)?.get(domain)?.get(moduleId);
|
|
1752
1730
|
if (!schema) return undefined;
|
|
1753
1731
|
let nestedPathStr = getPathSuffix(pathStr, 4);
|
|
1754
1732
|
let nestedPath = getPathFromStr(nestedPathStr);
|
|
@@ -1759,15 +1737,19 @@ export function registerSchema(config: {
|
|
|
1759
1737
|
schema: Schema2;
|
|
1760
1738
|
domainName: string;
|
|
1761
1739
|
moduleId: string;
|
|
1740
|
+
gitHash: string;
|
|
1762
1741
|
}) {
|
|
1763
|
-
|
|
1764
|
-
|
|
1742
|
+
const { schema, domainName, moduleId, gitHash } = config;
|
|
1743
|
+
let gitSchemas = schemas.get(gitHash);
|
|
1744
|
+
if (!gitSchemas) {
|
|
1745
|
+
schemas.set(gitHash, gitSchemas = new Map());
|
|
1746
|
+
}
|
|
1765
1747
|
|
|
1766
|
-
let domainSchemas =
|
|
1748
|
+
let domainSchemas = gitSchemas.get(domainName);
|
|
1767
1749
|
if (!domainSchemas) {
|
|
1768
|
-
|
|
1750
|
+
gitSchemas.set(domainName, domainSchemas = new Map());
|
|
1769
1751
|
}
|
|
1770
|
-
domainSchemas.set(
|
|
1752
|
+
domainSchemas.set(moduleId, schema);
|
|
1771
1753
|
}
|
|
1772
1754
|
|
|
1773
1755
|
export let schemaPrefixes: { len: number; prefixes: Map<string, Schema2> }[] = [];
|
|
@@ -1780,6 +1762,11 @@ export function registerSchemaPrefix(config: {
|
|
|
1780
1762
|
if (!list) {
|
|
1781
1763
|
schemaPrefixes.push(list = { len, prefixes: new Map() });
|
|
1782
1764
|
}
|
|
1765
|
+
if (isNode()) {
|
|
1766
|
+
if (list.prefixes.has(config.prefixPathStr)) {
|
|
1767
|
+
throw new Error(`Prefix matches only work due to only having one version of the code ever load (except for during hot reloading development clientside). If we try to render multiple versions of the same code serverside (ex, because we pre-loaded new code during a deploy), it will break, as we can't distinguish them. ADD GIT HASH TO PREFIX SCHEMAS TO FIX THIS!`);
|
|
1768
|
+
}
|
|
1769
|
+
}
|
|
1783
1770
|
list.prefixes.set(config.prefixPathStr, config.schema);
|
|
1784
1771
|
}
|
|
1785
1772
|
export function unregisterSchemaPrefix(config: {
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { getOwnMachineId } from "../-a-auth/certs";
|
|
2
|
-
import { atomicObjectWrite, proxyWatcher } from "../2-proxy/PathValueProxyWatcher";
|
|
2
|
+
import { atomicObjectWrite, proxyWatcher, specialObjectWriteSymbol } from "../2-proxy/PathValueProxyWatcher";
|
|
3
3
|
import { pathValueCommitter } from "../0-path-value-core/PathValueController";
|
|
4
4
|
import { getNextTime } from "../0-path-value-core/pathValueCore";
|
|
5
5
|
import { FunctionSpec, functionSchema, CallSpec } from "./PathFunctionRunner";
|
|
@@ -10,25 +10,82 @@ import cborx from "cbor-x";
|
|
|
10
10
|
import { lazy } from "socket-function/src/caching";
|
|
11
11
|
import { secureRandom } from "../misc/random";
|
|
12
12
|
import { onCallPredict } from "../4-querysub/QuerysubController";
|
|
13
|
+
import { isDefined } from "../misc";
|
|
14
|
+
import { blue, green, red } from "socket-function/src/formatting/logColors";
|
|
15
|
+
import { getPathStr2 } from "../path";
|
|
16
|
+
import { sort } from "socket-function/src/misc";
|
|
13
17
|
const cborxInstance = lazy(() => new cborx.Encoder({ structuredClone: true }));
|
|
14
18
|
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
});
|
|
23
|
-
}
|
|
24
|
-
export async function undeployFunction(spec: FunctionSpec) {
|
|
25
|
-
const { DomainName, ModuleId, FunctionId } = spec;
|
|
19
|
+
|
|
20
|
+
// NOTE: We could deploy single functions, but... we will almost always be updating all functions at
|
|
21
|
+
// once, because keeping everything on the same git hash reduces a lot of potential bugs.
|
|
22
|
+
export async function replaceFunctions(config: {
|
|
23
|
+
domainName: string;
|
|
24
|
+
functions: FunctionSpec[];
|
|
25
|
+
}) {
|
|
26
26
|
await proxyWatcher.commitFunction({
|
|
27
|
-
|
|
28
|
-
watchFunction
|
|
29
|
-
|
|
30
|
-
|
|
27
|
+
debugName: "replaceFunctions",
|
|
28
|
+
watchFunction() {
|
|
29
|
+
function debugFunction(func: FunctionSpec) {
|
|
30
|
+
return `${func.DomainName}:${func.FilePath}:${func.FunctionId}`;
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
let { domainName, functions } = config;
|
|
34
|
+
for (let fnc of functions) {
|
|
35
|
+
if (fnc.DomainName !== domainName) {
|
|
36
|
+
throw new Error(`Tried to deploy function ${fnc.FunctionId} is not in domain ${domainName}, was in ${fnc.DomainName}`);
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
let base = functionSchema()[domainName].PathFunctionRunner;
|
|
41
|
+
let previousFunctions = Object.values(base).flatMap(x => Object.values(x.Sources)).filter(isDefined);
|
|
42
|
+
|
|
43
|
+
function getFunctionName(func: FunctionSpec) {
|
|
44
|
+
return getPathStr2(func.ModuleId, func.FunctionId);
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
let currentFunctions = functions;
|
|
48
|
+
|
|
49
|
+
let previousHashes = new Set(previousFunctions.map(x => JSON.stringify(x)));
|
|
50
|
+
let currentHashes = new Set(currentFunctions.map(x => JSON.stringify(x)));
|
|
51
|
+
|
|
52
|
+
let prevFunctionNames = new Set(previousFunctions.map(getFunctionName));
|
|
53
|
+
|
|
54
|
+
let messages: {
|
|
55
|
+
text: string;
|
|
56
|
+
order: number;
|
|
57
|
+
}[] = [];
|
|
58
|
+
|
|
59
|
+
for (let previous of previousFunctions) {
|
|
60
|
+
let hash = JSON.stringify(previous);
|
|
61
|
+
if (currentHashes.has(hash)) continue;
|
|
62
|
+
messages.push({ text: `Removing ${red(debugFunction(previous))}`, order: 3 });
|
|
63
|
+
base[previous.ModuleId].Sources[previous.FunctionId] = undefined;
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
console.log();
|
|
67
|
+
for (let func of currentFunctions) {
|
|
68
|
+
if (previousHashes.has(JSON.stringify(func))) {
|
|
69
|
+
messages.push({ text: `Unchanged\t ${debugFunction(func)}`, order: 0 });
|
|
70
|
+
continue;
|
|
71
|
+
}
|
|
72
|
+
if (!prevFunctionNames.has(getFunctionName(func))) {
|
|
73
|
+
messages.push({ text: `Deploying new\t ${green(debugFunction(func))}`, order: 2 });
|
|
74
|
+
} else {
|
|
75
|
+
messages.push({ text: `Deploying\t ${blue(debugFunction(func))}`, order: 1 });
|
|
76
|
+
}
|
|
77
|
+
base[func.ModuleId] = specialObjectWriteSymbol as any;
|
|
78
|
+
base[func.ModuleId].Sources[func.FunctionId] = atomicObjectWrite(func);
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
sort(messages, x => -x.order);
|
|
82
|
+
for (let message of messages) {
|
|
83
|
+
console.log(message.text);
|
|
84
|
+
}
|
|
85
|
+
}
|
|
31
86
|
});
|
|
87
|
+
|
|
88
|
+
await pathValueCommitter.waitForValuesToCommit();
|
|
32
89
|
}
|
|
33
90
|
|
|
34
91
|
export const writeCall = {
|
|
@@ -14,7 +14,6 @@ import { atomic, atomicObjectRead, atomicObjectWrite, doAtomicWrites, doProxyOpt
|
|
|
14
14
|
import { __getRoutingHash, authorityStorage, compareTime, debugTime, epochTime, getNextTime, isCoreQuiet, MAX_ACCEPTED_CHANGE_AGE, MAX_CHANGE_AGE, PathValue, Time } from "../0-path-value-core/pathValueCore";
|
|
15
15
|
import { getModuleFromSpec, watchModuleHotreloads } from "./pathFunctionLoader";
|
|
16
16
|
import debugbreak from "debugbreak";
|
|
17
|
-
import { Querysub } from "../4-querysub/Querysub";
|
|
18
17
|
import { parseArgs } from "./PathFunctionHelpers";
|
|
19
18
|
import { PERMISSIONS_FUNCTION_ID, getExportPath } from "./syncSchema";
|
|
20
19
|
import { formatTime } from "socket-function/src/formatting/format";
|
|
@@ -52,6 +51,7 @@ export type FunctionSpec = {
|
|
|
52
51
|
exportPathStr: string;
|
|
53
52
|
FilePath: string;
|
|
54
53
|
gitURL: string;
|
|
54
|
+
/** Ex, the hash */
|
|
55
55
|
gitRef: string;
|
|
56
56
|
};
|
|
57
57
|
|
|
@@ -79,21 +79,31 @@ export type FunctionResult = ({
|
|
|
79
79
|
runCount: number;
|
|
80
80
|
};
|
|
81
81
|
|
|
82
|
-
let currentCallSpec: CallSpec | undefined;
|
|
82
|
+
let currentCallSpec: { spec: CallSpec; fnc: FunctionSpec; } | undefined;
|
|
83
83
|
export function getCurrentCall() {
|
|
84
84
|
if (!currentCallSpec) {
|
|
85
85
|
debugbreak(2);
|
|
86
86
|
debugger;
|
|
87
87
|
throw new Error("Not presently in a call, and so cannot get call");
|
|
88
88
|
}
|
|
89
|
-
return currentCallSpec;
|
|
89
|
+
return currentCallSpec.spec;
|
|
90
90
|
}
|
|
91
91
|
export function getCurrentCallAllowUndefined() {
|
|
92
|
+
return currentCallSpec?.spec;
|
|
93
|
+
}
|
|
94
|
+
export function getCurrentCallObj() {
|
|
92
95
|
return currentCallSpec;
|
|
93
96
|
}
|
|
94
|
-
|
|
97
|
+
/** NOTE: We require the FunctionSpec so we know what has is loaded, to identify the
|
|
98
|
+
* correct schema. This should be available, but if it isn't... we MIGHT be able
|
|
99
|
+
* to work around it, if we can be assured the schema won't be used?
|
|
100
|
+
* - This is so pre-loading of functions (which might happen without a deploy, if the deploy script
|
|
101
|
+
* is killed part of the way through), doesn't change schemas until the deploy actually
|
|
102
|
+
* updates the underlying functions.
|
|
103
|
+
*/
|
|
104
|
+
export function overrideCurrentCall<T>(config: { spec: CallSpec; fnc: FunctionSpec; }, code: () => T) {
|
|
95
105
|
let prev = currentCallSpec;
|
|
96
|
-
currentCallSpec =
|
|
106
|
+
currentCallSpec = config;
|
|
97
107
|
try {
|
|
98
108
|
return code();
|
|
99
109
|
} finally {
|
|
@@ -155,6 +165,7 @@ export class PathFunctionRunner {
|
|
|
155
165
|
|
|
156
166
|
// Calls runCall on any calls that match our domain + shard range
|
|
157
167
|
private async startWatching() {
|
|
168
|
+
const { Querysub } = await import("../4-querysub/Querysub");
|
|
158
169
|
let { shardRange, secondaryShardRange } = this.config;
|
|
159
170
|
|
|
160
171
|
// We will use PathValueProxyWatcher to watch the paths
|
|
@@ -398,6 +409,8 @@ export class PathFunctionRunner {
|
|
|
398
409
|
}
|
|
399
410
|
|
|
400
411
|
private async runCall(callPath: CallSpec, functionSpec: FunctionSpec): Promise<void> {
|
|
412
|
+
const { Querysub } = await import("../4-querysub/Querysub");
|
|
413
|
+
|
|
401
414
|
const PermissionsChecker = this.config.PermissionsChecker;
|
|
402
415
|
let skipPermissions = <T>(code: () => T) => code();
|
|
403
416
|
if (PermissionsChecker) {
|
|
@@ -497,6 +510,9 @@ export class PathFunctionRunner {
|
|
|
497
510
|
let syncedSpec = skipPermissions(() =>
|
|
498
511
|
atomicObjectRead(syncedModule.Sources[callPath.FunctionId])
|
|
499
512
|
);
|
|
513
|
+
if (!syncedSpec) {
|
|
514
|
+
throw new Error(`Function spec not found for ${getDebugName(callPath, functionSpec, true)}`);
|
|
515
|
+
}
|
|
500
516
|
|
|
501
517
|
// (We also need to depend on the RIGHT function spec).
|
|
502
518
|
if (
|
|
@@ -526,7 +542,7 @@ export class PathFunctionRunner {
|
|
|
526
542
|
|
|
527
543
|
let evalTimeStart = Date.now();
|
|
528
544
|
let args = parseArgs(callPath);
|
|
529
|
-
overrideCurrentCall(callPath, () => {
|
|
545
|
+
overrideCurrentCall({ spec: callPath, fnc: syncedSpec, }, () => {
|
|
530
546
|
baseFunction(...args);
|
|
531
547
|
});
|
|
532
548
|
evalTime += Date.now() - evalTimeStart;
|
|
@@ -1,24 +1,16 @@
|
|
|
1
1
|
import "../inject";
|
|
2
2
|
|
|
3
|
-
import {
|
|
4
|
-
import { functionSchema, FunctionSpec } from "./PathFunctionRunner";
|
|
3
|
+
import { FunctionSpec } from "./PathFunctionRunner";
|
|
5
4
|
import path from "path";
|
|
6
5
|
import { getExportPath, getSchemaObject, PERMISSIONS_FUNCTION_ID } from "./syncSchema";
|
|
7
6
|
import { getModuleRelativePath } from "./syncSchema";
|
|
8
7
|
import * as child_process from "child_process";
|
|
9
|
-
import { proxyWatcher } from "../2-proxy/PathValueProxyWatcher";
|
|
10
|
-
import { green, red } from "socket-function/src/formatting/logColors";
|
|
11
|
-
import { isDefined, isEmpty } from "../misc";
|
|
12
8
|
import fs from "fs";
|
|
13
9
|
import { SocketFunction } from "socket-function/SocketFunction";
|
|
14
10
|
import { getThreadKeyCert } from "../-a-auth/certs";
|
|
15
|
-
import {
|
|
16
|
-
import { pathValueCommitter } from "../0-path-value-core/PathValueController";
|
|
11
|
+
import { replaceFunctions } from "./PathFunctionHelpers";
|
|
17
12
|
import { getPathStr2 } from "../path";
|
|
18
|
-
import { cacheJSONArgsEqual } from "socket-function/src/caching";
|
|
19
13
|
import { setIsDeploy } from "./deployCheck";
|
|
20
|
-
import { ClientWatcher } from "../1-path-client/pathValueClientWatcher";
|
|
21
|
-
import debugbreak from "debugbreak";
|
|
22
14
|
|
|
23
15
|
import yargs from "yargs";
|
|
24
16
|
import { isPublic } from "../config";
|
|
@@ -51,9 +43,6 @@ export async function deployMain() {
|
|
|
51
43
|
const srcRoot = path.resolve(__dirname + "/../").replaceAll("\\", "/");
|
|
52
44
|
|
|
53
45
|
let currentFunctions: FunctionSpec[] = [];
|
|
54
|
-
function debugFunction(func: FunctionSpec) {
|
|
55
|
-
return `${func.DomainName}:${func.FilePath}:${func.FunctionId}`;
|
|
56
|
-
}
|
|
57
46
|
|
|
58
47
|
let gitDir = folderRoot;
|
|
59
48
|
if (!fs.existsSync(gitDir + "/.git")) {
|
|
@@ -118,38 +107,7 @@ export async function deployMain() {
|
|
|
118
107
|
}
|
|
119
108
|
}
|
|
120
109
|
|
|
121
|
-
|
|
122
|
-
debugName: "readPreviousFunctions",
|
|
123
|
-
watchFunction(): FunctionSpec[] {
|
|
124
|
-
let base = functionSchema()[domainName].PathFunctionRunner;
|
|
125
|
-
return Object.values(base).flatMap(x => Object.values(x.Sources)).filter(isDefined);
|
|
126
|
-
},
|
|
127
|
-
});
|
|
128
|
-
|
|
129
|
-
let previousHashes = new Set(previousFunctions.map(x => JSON.stringify(x)));
|
|
130
|
-
let currentHashes = new Set(currentFunctions.map(x => JSON.stringify(x)));
|
|
131
|
-
// BUG: This is incorrect, and breaks when you accidentally reuse a modelId
|
|
132
|
-
for (let func of currentFunctions) {
|
|
133
|
-
if (previousHashes.has(JSON.stringify(func))) {
|
|
134
|
-
console.log(`Unchanged ${debugFunction(func)}`);
|
|
135
|
-
}
|
|
136
|
-
}
|
|
137
|
-
|
|
138
|
-
for (let previous of previousFunctions) {
|
|
139
|
-
let hash = JSON.stringify(previous);
|
|
140
|
-
if (currentHashes.has(hash)) continue;
|
|
141
|
-
console.log(`Removing ${red(debugFunction(previous))}`);
|
|
142
|
-
await undeployFunction(previous);
|
|
143
|
-
}
|
|
144
|
-
|
|
145
|
-
console.log();
|
|
146
|
-
for (let func of currentFunctions) {
|
|
147
|
-
if (previousHashes.has(JSON.stringify(func))) continue;
|
|
148
|
-
console.log(`Deploying ${green(debugFunction(func))}`);
|
|
149
|
-
await deployFunction(func);
|
|
150
|
-
}
|
|
151
|
-
|
|
152
|
-
await pathValueCommitter.waitForValuesToCommit();
|
|
110
|
+
await replaceFunctions({ domainName, functions: currentFunctions, });
|
|
153
111
|
}
|
|
154
112
|
function getGitURL(gitDir: string) {
|
|
155
113
|
if (yargObj.nogit) return "git@github.com:nogit/nogit.git";
|
|
@@ -158,10 +158,29 @@ let moduleResolver = async (spec: FunctionSpec) => {
|
|
|
158
158
|
};
|
|
159
159
|
|
|
160
160
|
export function isDynamicModule(module: NodeJS.Module): boolean {
|
|
161
|
-
|
|
162
|
-
|
|
161
|
+
return isDynamicModulePath(module.filename);
|
|
162
|
+
}
|
|
163
|
+
export function isDynamicModulePath(path: string): boolean {
|
|
164
|
+
let parts = path.replaceAll("\\", "/").split("/");
|
|
163
165
|
return parts.includes("node_modules") || parts.includes("synced_repos");
|
|
164
166
|
}
|
|
167
|
+
/** Annoying, and slower than isDynamicModule, but... necessary. We can't expect the user to call isDynamicModule,
|
|
168
|
+
* and our functions will all resolve to the root on purpose, so... we need to check the callstack!
|
|
169
|
+
*/
|
|
170
|
+
export function isCallerDynamicModule(): boolean {
|
|
171
|
+
return getCallstackFiles().some(isDynamicModulePath);
|
|
172
|
+
}
|
|
173
|
+
function getCallstackFiles(): string[] {
|
|
174
|
+
let stack = new Error().stack;
|
|
175
|
+
if (!stack) return [];
|
|
176
|
+
let lines = stack.split("\n");
|
|
177
|
+
let files = lines.map(line => {
|
|
178
|
+
let match = line.match(/\(([^)]+)\)/);
|
|
179
|
+
if (!match) return "";
|
|
180
|
+
return match[1];
|
|
181
|
+
});
|
|
182
|
+
return files;
|
|
183
|
+
}
|
|
165
184
|
|
|
166
185
|
async function getModuleFromSpecBase(
|
|
167
186
|
spec: FunctionSpec
|
|
@@ -188,7 +207,18 @@ async function getModuleFromSpecBase(
|
|
|
188
207
|
path = packagePath + specFilePath;
|
|
189
208
|
deployPath = packagePath + "deploy.ts";
|
|
190
209
|
}
|
|
210
|
+
|
|
191
211
|
console.log(blue(`require(${JSON.stringify(path)})`));
|
|
212
|
+
|
|
213
|
+
// Set functionSpec for the next synchronous evaluation
|
|
214
|
+
// - This breaks with async require, but... that's mostly just
|
|
215
|
+
// clientside, so this SHOULD be fine? Maybe... Hopefully...
|
|
216
|
+
evaluatingFunctionSpec = spec;
|
|
217
|
+
void Promise.resolve().finally(() => {
|
|
218
|
+
if (evaluatingFunctionSpec === spec) {
|
|
219
|
+
evaluatingFunctionSpec = undefined;
|
|
220
|
+
}
|
|
221
|
+
});
|
|
192
222
|
try {
|
|
193
223
|
await SocketFunction.ignoreExposeCalls(async () => {
|
|
194
224
|
// Import deploy, which should always exist, and provides a consistent
|
|
@@ -201,6 +231,10 @@ async function getModuleFromSpecBase(
|
|
|
201
231
|
});
|
|
202
232
|
} catch (e: any) {
|
|
203
233
|
throw new Error(`Error when loading function for ${JSON.stringify(path)}:${spec.FunctionId}\n${e.stack}`);
|
|
234
|
+
} finally {
|
|
235
|
+
if (evaluatingFunctionSpec === spec) {
|
|
236
|
+
evaluatingFunctionSpec = undefined;
|
|
237
|
+
}
|
|
204
238
|
}
|
|
205
239
|
let moduleId = require.resolve(path) || path;
|
|
206
240
|
let module = require.cache[moduleId];
|
|
@@ -210,11 +244,7 @@ async function getModuleFromSpecBase(
|
|
|
210
244
|
throw new Error(`Module not found: ${moduleId} (for ${spec.FunctionId})`);
|
|
211
245
|
}
|
|
212
246
|
|
|
213
|
-
|
|
214
|
-
// Expose a function that goes from NodeJS.Module to FunctionSpec,
|
|
215
|
-
// via a lookup we control?
|
|
216
|
-
// - Hmm... we COULD change the module. This is nice, as it is more discoverable.
|
|
217
|
-
// But... eh... I don't know. Just a lookup is safer...
|
|
247
|
+
moduleToSpec.set(module, spec);
|
|
218
248
|
|
|
219
249
|
if (hotReloadPackagePath) {
|
|
220
250
|
hotReloadUnderPath(hotReloadPackagePath);
|
|
@@ -223,6 +253,13 @@ async function getModuleFromSpecBase(
|
|
|
223
253
|
return module;
|
|
224
254
|
}
|
|
225
255
|
|
|
256
|
+
let evaluatingFunctionSpec: FunctionSpec | undefined;
|
|
257
|
+
|
|
258
|
+
let moduleToSpec = new Map<NodeJS.Module, FunctionSpec>();
|
|
259
|
+
export function getSpecFromModule(module: NodeJS.Module): FunctionSpec | undefined {
|
|
260
|
+
return evaluatingFunctionSpec ?? moduleToSpec.get(module);
|
|
261
|
+
}
|
|
262
|
+
|
|
226
263
|
// Hot reload at or under the path
|
|
227
264
|
const hotReloadUnderPath = cache((path: string) => {
|
|
228
265
|
console.log(magenta(`Hot reloading under path: ${path}`));
|
|
@@ -18,6 +18,8 @@ import { CALL_PERMISSIONS_KEY } from "../4-querysub/permissionsShared";
|
|
|
18
18
|
import { LOCAL_DOMAIN } from "../0-path-value-core/PathController";
|
|
19
19
|
import path from "path";
|
|
20
20
|
import { isEmpty } from "../misc";
|
|
21
|
+
import { getSpecFromModule } from "./pathFunctionLoader";
|
|
22
|
+
import { isNode } from "typesafecss";
|
|
21
23
|
|
|
22
24
|
// This is the the function id which should be used when creating the FunctionSpec (in order to load the module),
|
|
23
25
|
// to access the permissions in the schema.
|
|
@@ -265,7 +267,11 @@ export function syncSchema<Schema>(schema?: Schema2): SyncSchemaResult<Schema> {
|
|
|
265
267
|
}[] = [];
|
|
266
268
|
if (schema) {
|
|
267
269
|
let dataPrefixPath = getPathFromStr(getProxyPath(data));
|
|
268
|
-
|
|
270
|
+
let fnc = getSpecFromModule(module);
|
|
271
|
+
// NOTE: We call syncSchema a lot, so we default the git hash. However, for modules that provide
|
|
272
|
+
// functions we call as synced functions, we SHOULD have a real hash here.
|
|
273
|
+
let gitHash = fnc?.gitRef || "ambient";
|
|
274
|
+
registerSchema({ domainName, moduleId, schema, gitHash });
|
|
269
275
|
|
|
270
276
|
let gcDefs = Schema2Fncs.getGCObjects(schema);
|
|
271
277
|
for (let { path, gcDelay } of gcDefs) {
|
package/src/4-dom/qreact.tsx
CHANGED
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
import type preact from "preact";
|
|
2
2
|
import { isNode, sort } from "socket-function/src/misc";
|
|
3
3
|
import { isDeploy } from "../3-path-functions/deployCheck";
|
|
4
|
-
import { Querysub } from "../4-querysub/QuerysubController";
|
|
5
4
|
import { PermissionsCheck } from "../4-querysub/permissions";
|
|
6
5
|
import { ClientWatcher, clientWatcher } from "../1-path-client/pathValueClientWatcher";
|
|
7
6
|
import { LOCAL_DOMAIN } from "../0-path-value-core/PathController";
|
|
@@ -2319,4 +2318,8 @@ if (isClient()) {
|
|
|
2319
2318
|
});
|
|
2320
2319
|
}
|
|
2321
2320
|
|
|
2322
|
-
// #endregion
|
|
2321
|
+
// #endregion
|
|
2322
|
+
|
|
2323
|
+
// NOTE: Import Querysub at the end, so we can export qreact before we require it. That way Querysub
|
|
2324
|
+
// can statically access qreact.
|
|
2325
|
+
import { Querysub } from "../4-querysub/Querysub";
|
|
@@ -36,6 +36,7 @@ import * as prediction from "./querysubPrediction";
|
|
|
36
36
|
import { getCallResultPath } from "./querysubPrediction";
|
|
37
37
|
import { pathValueAuthority2 } from "../0-path-value-core/NodePathAuthorities";
|
|
38
38
|
import { diskLog } from "../diagnostics/logs/diskLogger";
|
|
39
|
+
import { assertIsManagementUser } from "../diagnostics/managementPages";
|
|
39
40
|
setFlag(require, "preact", "allowclient", true);
|
|
40
41
|
|
|
41
42
|
export { Querysub, id };
|
|
@@ -489,6 +490,7 @@ class QuerysubControllerBase {
|
|
|
489
490
|
}
|
|
490
491
|
pathWatcher.unwatchPath({ callback: callerId, ...config });
|
|
491
492
|
}
|
|
493
|
+
|
|
492
494
|
public async addCall(call: CallSpec) {
|
|
493
495
|
if (Querysub.DEBUG_CALLS) {
|
|
494
496
|
console.log(`[Querysub] addCall @${debugTime(call.runAtTime)}: ${call.DomainName}.${call.ModuleId}.${call.FunctionId}`);
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { cache, cacheLimited } from "socket-function/src/caching";
|
|
1
|
+
import { cache, cacheArgsEqual, cacheLimited } from "socket-function/src/caching";
|
|
2
2
|
import { measureFnc, measureWrap, nameFunction } from "socket-function/src/profiling/measure";
|
|
3
3
|
import { getPathSuffix, getPathDepth, trimPathStrToDepth, getPathFromStr, rootPathStr, getPathIndex, getPathStr1, joinPathStres, appendToPathStr, getPathStr } from "../path";
|
|
4
4
|
import { atomic, atomicObjectRead, isSynced, proxyWatcher } from "../2-proxy/PathValueProxyWatcher";
|
|
@@ -34,7 +34,7 @@ const callPermissionsPath = getPathStr1(CALL_PERMISSIONS_KEY);
|
|
|
34
34
|
*/
|
|
35
35
|
export class PermissionsCheck {
|
|
36
36
|
private dead = false;
|
|
37
|
-
|
|
37
|
+
private static skippingChecks = false;
|
|
38
38
|
|
|
39
39
|
public static DEBUG = false;
|
|
40
40
|
|
|
@@ -42,8 +42,12 @@ export class PermissionsCheck {
|
|
|
42
42
|
setImmediate(() => this.dead = true);
|
|
43
43
|
}
|
|
44
44
|
|
|
45
|
-
private perSchema =
|
|
46
|
-
return new PermissionsCheckSchema(
|
|
45
|
+
private perSchema = cacheArgsEqual((schema: SchemaObject, fnc: FunctionSpec) => {
|
|
46
|
+
return new PermissionsCheckSchema(
|
|
47
|
+
{ machineID: this.callerConfig.callerMachineId, IP: this.callerConfig.callerIP },
|
|
48
|
+
schema,
|
|
49
|
+
fnc
|
|
50
|
+
);
|
|
47
51
|
});
|
|
48
52
|
|
|
49
53
|
private getModulePermissions = cache((domainName: string) => {
|
|
@@ -67,7 +71,12 @@ export class PermissionsCheck {
|
|
|
67
71
|
}
|
|
68
72
|
|
|
69
73
|
if (!module) return undefined;
|
|
70
|
-
|
|
74
|
+
let schema = getSchemaObject(module);
|
|
75
|
+
if (!schema) return undefined;
|
|
76
|
+
return {
|
|
77
|
+
schema,
|
|
78
|
+
fnc: functionConfig,
|
|
79
|
+
};
|
|
71
80
|
});
|
|
72
81
|
});
|
|
73
82
|
|
|
@@ -106,10 +115,10 @@ export class PermissionsCheck {
|
|
|
106
115
|
if (!pathParts.moduleId) return { permissionsPath: trimPathStrToDepth(path, MODULE_INDEX + 1), allowed: false };
|
|
107
116
|
if (!pathParts.rootKey) return { permissionsPath: trimPathStrToDepth(path, DEPTH_TO_DATA), allowed: false };
|
|
108
117
|
|
|
109
|
-
const
|
|
110
|
-
if (!
|
|
118
|
+
const schemaObj = this.getModulePermissions(pathParts.domainName)(pathParts.moduleId);
|
|
119
|
+
if (!schemaObj) return { permissionsPath: trimPathStrToDepth(path, MODULE_INDEX + 1), allowed: false };
|
|
111
120
|
|
|
112
|
-
let instance = this.perSchema(schema);
|
|
121
|
+
let instance = this.perSchema(schemaObj.schema, schemaObj.fnc);
|
|
113
122
|
instance.pathPrefix = pathPrefix;
|
|
114
123
|
|
|
115
124
|
const rootKey = pathParts.rootKey;
|
|
@@ -151,7 +160,11 @@ export class PermissionsCheck {
|
|
|
151
160
|
class PermissionsCheckSchema {
|
|
152
161
|
public pathPrefix = rootPathStr;
|
|
153
162
|
|
|
154
|
-
constructor(
|
|
163
|
+
constructor(
|
|
164
|
+
private callerConfig: { machineID: string; IP: string },
|
|
165
|
+
private schema: SchemaObject,
|
|
166
|
+
private fnc: FunctionSpec,
|
|
167
|
+
) { }
|
|
155
168
|
private exampleCall: CallSpec = {
|
|
156
169
|
callerMachineId: this.callerConfig.machineID,
|
|
157
170
|
callerIP: this.callerConfig.IP,
|
|
@@ -195,7 +208,10 @@ class PermissionsCheckSchema {
|
|
|
195
208
|
// so it is important to cache it based on the permissionsPath.
|
|
196
209
|
private checkCache = cacheLimited(1000 * 1000, measureWrap((permissionsPath: string) => {
|
|
197
210
|
let checks = this.getChecks(permissionsPath);
|
|
198
|
-
let allowed = overrideCurrentCall(
|
|
211
|
+
let allowed = overrideCurrentCall({
|
|
212
|
+
spec: this.exampleCall,
|
|
213
|
+
fnc: this.fnc,
|
|
214
|
+
}, () => {
|
|
199
215
|
// NOTE: If we read inside of a permissions check we don't want to ALSO check
|
|
200
216
|
// permissions checks on those because:
|
|
201
217
|
// 1) It might infinitely loop
|
|
@@ -475,7 +475,7 @@ export async function getCallWrites(config: {
|
|
|
475
475
|
overrides?: PathValue[];
|
|
476
476
|
}) {
|
|
477
477
|
let { call, debugName } = config;
|
|
478
|
-
|
|
478
|
+
const { functionSpec } = await proxyWatcher.commitFunction({
|
|
479
479
|
watchFunction: function getModuleConfig() {
|
|
480
480
|
//let moduleConfig = atomicObjectRead(functionSchema()[call.DomainName].PathFunctionRunner[call.ModuleId].Module);
|
|
481
481
|
// if (!(call.DomainName in functionSchema())) {
|
|
@@ -516,7 +516,7 @@ export async function getCallWrites(config: {
|
|
|
516
516
|
overrides: config.overrides,
|
|
517
517
|
nestedCalls: "inline",
|
|
518
518
|
watchFunction() {
|
|
519
|
-
return overrideCurrentCall(call, () => {
|
|
519
|
+
return overrideCurrentCall({ spec: call, fnc: functionSpec }, () => {
|
|
520
520
|
let args = parseArgs(call);
|
|
521
521
|
return baseFunction(...args);
|
|
522
522
|
});
|
|
@@ -16,7 +16,7 @@ import { PathValueProxyWatcher } from "../2-proxy/PathValueProxyWatcher";
|
|
|
16
16
|
import { InputLabel, InputLabelURL } from "../library-components/InputLabel";
|
|
17
17
|
import { URLParam } from "../library-components/URLParam";
|
|
18
18
|
import { hotReloadingGuard, isHotReloading, onHotReload } from "socket-function/hot/HotReloadController";
|
|
19
|
-
import { isDynamicModule } from "../3-path-functions/pathFunctionLoader";
|
|
19
|
+
import { isCallerDynamicModule, isDynamicModule } from "../3-path-functions/pathFunctionLoader";
|
|
20
20
|
|
|
21
21
|
// Map, so hot reloading doesn't break things
|
|
22
22
|
let componentButtons = new Map<string, { title: string, callback: (component: ExternalRenderClass) => void }>();
|
|
@@ -25,7 +25,7 @@ export function addComponentButton(config: {
|
|
|
25
25
|
title: string;
|
|
26
26
|
callback: (component: ExternalRenderClass) => void;
|
|
27
27
|
}) {
|
|
28
|
-
if (
|
|
28
|
+
if (isCallerDynamicModule()) return;
|
|
29
29
|
if (!isHotReloading() && componentButtons.has(config.title)) {
|
|
30
30
|
throw new Error(`Component button with title ${config.title} already exists`);
|
|
31
31
|
}
|
|
@@ -139,50 +139,54 @@ export function parseLogBuffer(buffer: Buffer): LogObj[] {
|
|
|
139
139
|
}
|
|
140
140
|
|
|
141
141
|
function safeCopyObject<T>(obj: T): T {
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
142
|
+
try {
|
|
143
|
+
const maxFields = 20;
|
|
144
|
+
let fieldCount = 0;
|
|
145
|
+
const seen = new WeakSet();
|
|
146
|
+
|
|
147
|
+
function copy(value: unknown): unknown {
|
|
148
|
+
// Handle primitives
|
|
149
|
+
if (!canHaveChildren(value)) {
|
|
150
|
+
return value;
|
|
151
|
+
}
|
|
151
152
|
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
153
|
+
// Check for circular references
|
|
154
|
+
if (seen.has(value as object)) {
|
|
155
|
+
return null;
|
|
156
|
+
}
|
|
157
|
+
seen.add(value as object);
|
|
157
158
|
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
159
|
+
// Check if we've hit the field limit
|
|
160
|
+
if (fieldCount >= maxFields) {
|
|
161
|
+
return null;
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
if (Array.isArray(value)) {
|
|
165
|
+
const result: unknown[] = [];
|
|
166
|
+
for (const item of value) {
|
|
167
|
+
fieldCount++;
|
|
168
|
+
if (fieldCount >= maxFields) break;
|
|
169
|
+
result.push(copy(item));
|
|
170
|
+
}
|
|
171
|
+
seen.delete(value);
|
|
172
|
+
return result;
|
|
173
|
+
}
|
|
162
174
|
|
|
163
|
-
|
|
164
|
-
const result: unknown
|
|
165
|
-
for (const
|
|
175
|
+
// Handle plain objects
|
|
176
|
+
const result: Record<string, unknown> = {};
|
|
177
|
+
for (const key of Object.keys(value as object)) {
|
|
166
178
|
fieldCount++;
|
|
167
179
|
if (fieldCount >= maxFields) break;
|
|
168
|
-
result
|
|
180
|
+
result[key] = copy((value as Record<string, unknown>)[key]);
|
|
169
181
|
}
|
|
170
182
|
seen.delete(value);
|
|
171
183
|
return result;
|
|
172
184
|
}
|
|
173
185
|
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
fieldCount++;
|
|
178
|
-
if (fieldCount >= maxFields) break;
|
|
179
|
-
result[key] = copy((value as Record<string, unknown>)[key]);
|
|
180
|
-
}
|
|
181
|
-
seen.delete(value);
|
|
182
|
-
return result;
|
|
186
|
+
return copy(obj) as any;
|
|
187
|
+
} catch (e: any) {
|
|
188
|
+
return { errorCopying: e.messsage } as any;
|
|
183
189
|
}
|
|
184
|
-
|
|
185
|
-
return copy(obj) as any;
|
|
186
190
|
}
|
|
187
191
|
function packageLogObj(args: unknown[]): LogObj {
|
|
188
192
|
let logObj: LogObj = {
|
|
@@ -9,19 +9,24 @@ export function shimConsoleLogs() {
|
|
|
9
9
|
let console = globalThis.console;
|
|
10
10
|
let originalFnc = console[fncName];
|
|
11
11
|
console[fncName] = (...args: any[]) => {
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
12
|
+
try {
|
|
13
|
+
if (
|
|
14
|
+
args.length > 0
|
|
15
|
+
&& String(args[0]).trim().length > 0
|
|
16
|
+
) {
|
|
17
|
+
if (typeof logDisk === "function") {
|
|
18
|
+
// Don't call it directly, so we don't get extra line debug context added to this call
|
|
19
|
+
// (as it wouldn't be useful, as we really want the caller)
|
|
20
|
+
let stopDoubleShim = logDisk;
|
|
21
|
+
stopDoubleShim(...args, { type: fncName });
|
|
22
|
+
}
|
|
21
23
|
}
|
|
24
|
+
// Filter out objects added by injectFileLocationToConsole
|
|
25
|
+
args = args.filter(x => !(canHaveChildren(x) && x["__FILE__"]));
|
|
26
|
+
} catch {
|
|
27
|
+
// Some arguments might throw if accessed (as they might be proxies), so
|
|
28
|
+
// catch and ignore errors
|
|
22
29
|
}
|
|
23
|
-
// Filter out objects added by injectFileLocationToConsole
|
|
24
|
-
args = args.filter(x => !(canHaveChildren(x) && x["__FILE__"]));
|
|
25
30
|
return originalFnc(...args);
|
|
26
31
|
};
|
|
27
32
|
}
|
|
@@ -1,135 +0,0 @@
|
|
|
1
|
-
import { SocketFunction } from "socket-function/SocketFunction";
|
|
2
|
-
import { delay } from "socket-function/src/batching";
|
|
3
|
-
import { magenta, red } from "socket-function/src/formatting/logColors";
|
|
4
|
-
import { measureCode } from "socket-function/src/profiling/measure";
|
|
5
|
-
import { getThreadKeyCert } from "../../-a-auth/certs";
|
|
6
|
-
import { logErrors } from "../../errors";
|
|
7
|
-
import { getPathStr1 } from "../../path";
|
|
8
|
-
import { proxyWatcher } from "../../2-proxy/PathValueProxyWatcher";
|
|
9
|
-
import { pathValueCommitter } from "../../0-path-value-core/PathValueController";
|
|
10
|
-
import { quietCoreMode } from "../../0-path-value-core/pathValueCore";
|
|
11
|
-
import { PathFunctionRunner } from "../PathFunctionRunner";
|
|
12
|
-
import * as FunctionRunner from "../PathFunctionRunner";
|
|
13
|
-
import * as test from "../../../../shard-app/src/data";
|
|
14
|
-
import { deployFunction } from "../PathFunctionHelpers";
|
|
15
|
-
|
|
16
|
-
// async function testCallFunctions() {
|
|
17
|
-
// //ActionsHistory.LOG_ACTION_HISTORY = "caller";
|
|
18
|
-
|
|
19
|
-
// quietCoreMode();
|
|
20
|
-
// // ClientWatcher.DEBUG_READS = true;
|
|
21
|
-
// // ClientWatcher.DEBUG_WRITES = true;
|
|
22
|
-
// //PathValueProxyWatcher.DEBUG = true;
|
|
23
|
-
// //PathFunctionRunner.DEBUG_CALLS = true;
|
|
24
|
-
|
|
25
|
-
// await SocketFunction.mount({ port: 0, ...await getThreadKeyCert() });
|
|
26
|
-
|
|
27
|
-
// let count = 1000 * 1000;
|
|
28
|
-
// let writesBetweenDelays = 500;
|
|
29
|
-
// let delayTime = 1000;
|
|
30
|
-
// // writesBetweenDelays = 10;
|
|
31
|
-
// //count = 1;
|
|
32
|
-
// //count = 0;
|
|
33
|
-
// // 0 + 30 * 4 = 120
|
|
34
|
-
// //count = 1000 * 100;
|
|
35
|
-
// //count = 1000 * 10;
|
|
36
|
-
// //count = 1000;
|
|
37
|
-
// //count = 100;
|
|
38
|
-
// count = 1;
|
|
39
|
-
|
|
40
|
-
// await delay(1000);
|
|
41
|
-
|
|
42
|
-
// await measureCode(async function testCallFunctions() {
|
|
43
|
-
|
|
44
|
-
// const moduleId = "880d9f63-232a-4df7-86b8-eeb711ac3e48";
|
|
45
|
-
// const functionId = "80d87119-68fc-4440-87d9-fcbab00bec22";
|
|
46
|
-
// // Deploy the function
|
|
47
|
-
// await deployFunction({
|
|
48
|
-
// domainName: "querysub.com",
|
|
49
|
-
// moduleId,
|
|
50
|
-
// functionId,
|
|
51
|
-
// filePath: "/test.ts",
|
|
52
|
-
// exportPathStr: getPathStr1("testWrite"),
|
|
53
|
-
// gitRef: "main",
|
|
54
|
-
// gitURL: "https://github.com/sliftist/shard-app.git",
|
|
55
|
-
// });
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
// let lastValue = await proxyWatcher.commitFunction({
|
|
59
|
-
// debugName: "readValue",
|
|
60
|
-
// watchFunction() {
|
|
61
|
-
// return +test.schema()["querysub.com"].testData.test;
|
|
62
|
-
// },
|
|
63
|
-
// });
|
|
64
|
-
// let endValue = lastValue + count;
|
|
65
|
-
|
|
66
|
-
// console.log(magenta(`Start at ${lastValue}`));
|
|
67
|
-
|
|
68
|
-
// let triggerReaderCount = 0;
|
|
69
|
-
// // Watch for updates
|
|
70
|
-
// proxyWatcher.createWatcher({
|
|
71
|
-
// watchFunction: function reader() {
|
|
72
|
-
// const testData = test.schema()["querysub.com"].testData;
|
|
73
|
-
// let value = testData.test;
|
|
74
|
-
// lastValue = value as number;
|
|
75
|
-
// triggerReaderCount++;
|
|
76
|
-
// }
|
|
77
|
-
// });
|
|
78
|
-
|
|
79
|
-
// // Write function calls
|
|
80
|
-
// for (let i = 0; i < count; i++) {
|
|
81
|
-
// FunctionRunner.writeFunctionCall({
|
|
82
|
-
// domainName: "querysub.com",
|
|
83
|
-
// moduleId,
|
|
84
|
-
// functionId,
|
|
85
|
-
// args: ["test"],
|
|
86
|
-
// });
|
|
87
|
-
// if (i % (writesBetweenDelays + 1) === 0) {
|
|
88
|
-
// // Wait a while, so FunctionRunner has a change to actually run the calls.
|
|
89
|
-
// console.log(`Delay to give functions time to run (at ${lastValue})`);
|
|
90
|
-
// await delay(delayTime);
|
|
91
|
-
// console.log(`(at ${lastValue})`);
|
|
92
|
-
// }
|
|
93
|
-
// }
|
|
94
|
-
|
|
95
|
-
// await pathValueCommitter.waitForValuesToCommit();
|
|
96
|
-
// await delay(1000 * 2);
|
|
97
|
-
|
|
98
|
-
// while (!(lastValue >= endValue)) {
|
|
99
|
-
// console.log(`Waiting for ${endValue}, value is ${lastValue} (${endValue - lastValue} to go)`);
|
|
100
|
-
// await delay(1000);
|
|
101
|
-
// }
|
|
102
|
-
|
|
103
|
-
// console.log(magenta(`Finished at ${endValue}`));
|
|
104
|
-
// });
|
|
105
|
-
// console.log(`PathFunctionRunner.RUN_FINISH_COUNT = ${PathFunctionRunner.RUN_FINISH_COUNT}`);
|
|
106
|
-
|
|
107
|
-
// /*
|
|
108
|
-
// // NOTE: Temporary, to ensure we aren't leaking memory
|
|
109
|
-
// console.log(`Wait for memory to clear`);
|
|
110
|
-
// await delay(MAX_CHANGE_AGE);
|
|
111
|
-
// await delay(MAX_CHANGE_AGE);
|
|
112
|
-
// global.gc?.();
|
|
113
|
-
// console.log(`Done waiting memory to clear`);
|
|
114
|
-
// await writeHeapSnapshot(getSubFolder("heapdumps"));
|
|
115
|
-
// console.log(`Done post heap dump`);
|
|
116
|
-
// */
|
|
117
|
-
|
|
118
|
-
// process.exit();
|
|
119
|
-
// }
|
|
120
|
-
// if (process.argv.includes("--caller")) {
|
|
121
|
-
// logErrors(testCallFunctions());
|
|
122
|
-
// }
|
|
123
|
-
|
|
124
|
-
// async function watch() {
|
|
125
|
-
// await SocketFunction.mount({ port: 0, ...await getThreadKeyCert() });
|
|
126
|
-
// proxyWatcher.createWatcher({
|
|
127
|
-
// watchFunction: function reader() {
|
|
128
|
-
// const testData = test.schema()["querysub.com"].testData;
|
|
129
|
-
// console.log(red(`testData.test = ${testData.test}`));
|
|
130
|
-
// }
|
|
131
|
-
// });
|
|
132
|
-
// }
|
|
133
|
-
// if (process.argv.includes("--watch")) {
|
|
134
|
-
// logErrors(watch());
|
|
135
|
-
// }
|