querysub 0.433.0 → 0.437.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. package/.eslintrc.js +50 -50
  2. package/bin/deploy.js +0 -0
  3. package/bin/function.js +0 -0
  4. package/bin/server.js +0 -0
  5. package/costsBenefits.txt +115 -115
  6. package/deploy.ts +2 -2
  7. package/package.json +1 -1
  8. package/spec.txt +1192 -1192
  9. package/src/-a-archives/archives.ts +202 -202
  10. package/src/-a-archives/archivesDisk.ts +454 -454
  11. package/src/-a-auth/certs.ts +540 -540
  12. package/src/-a-auth/node-forge-ed25519.d.ts +16 -16
  13. package/src/-b-authorities/dnsAuthority.ts +138 -138
  14. package/src/-c-identity/IdentityController.ts +258 -258
  15. package/src/-d-trust/NetworkTrust2.ts +180 -180
  16. package/src/-e-certs/EdgeCertController.ts +252 -252
  17. package/src/-e-certs/certAuthority.ts +201 -201
  18. package/src/-f-node-discovery/NodeDiscovery.ts +640 -640
  19. package/src/-g-core-values/NodeCapabilities.ts +200 -200
  20. package/src/-h-path-value-serialize/stringSerializer.ts +175 -175
  21. package/src/0-path-value-core/PathValueCommitter.ts +468 -468
  22. package/src/0-path-value-core/pathValueCore.ts +2 -2
  23. package/src/2-proxy/PathValueProxyWatcher.ts +2542 -2542
  24. package/src/2-proxy/TransactionDelayer.ts +94 -94
  25. package/src/2-proxy/pathDatabaseProxyBase.ts +36 -36
  26. package/src/2-proxy/pathValueProxy.ts +159 -159
  27. package/src/3-path-functions/PathFunctionRunnerMain.ts +87 -87
  28. package/src/3-path-functions/pathFunctionLoader.ts +516 -516
  29. package/src/3-path-functions/tests/rejectTest.ts +76 -76
  30. package/src/4-deploy/deployCheck.ts +6 -6
  31. package/src/4-dom/css.tsx +29 -29
  32. package/src/4-dom/cssTypes.d.ts +211 -211
  33. package/src/4-dom/qreact.tsx +2799 -2799
  34. package/src/4-dom/qreactTest.tsx +410 -410
  35. package/src/4-querysub/permissions.ts +335 -335
  36. package/src/4-querysub/querysubPrediction.ts +483 -483
  37. package/src/5-diagnostics/qreactDebug.tsx +346 -346
  38. package/src/TestController.ts +34 -34
  39. package/src/bits.ts +104 -104
  40. package/src/buffers.ts +69 -69
  41. package/src/diagnostics/ActionsHistory.ts +57 -57
  42. package/src/diagnostics/listenOnDebugger.ts +71 -71
  43. package/src/diagnostics/periodic.ts +111 -111
  44. package/src/diagnostics/trackResources.ts +91 -91
  45. package/src/diagnostics/watchdog.ts +120 -120
  46. package/src/errors.ts +133 -133
  47. package/src/forceProduction.ts +2 -2
  48. package/src/fs.ts +80 -80
  49. package/src/functional/diff.ts +857 -857
  50. package/src/functional/promiseCache.ts +78 -78
  51. package/src/functional/random.ts +8 -8
  52. package/src/functional/stats.ts +60 -60
  53. package/src/heapDumps.ts +665 -665
  54. package/src/https.ts +1 -1
  55. package/src/library-components/AspectSizedComponent.tsx +87 -87
  56. package/src/library-components/ButtonSelector.tsx +64 -64
  57. package/src/library-components/DropdownCustom.tsx +150 -150
  58. package/src/library-components/DropdownSelector.tsx +31 -31
  59. package/src/library-components/InlinePopup.tsx +66 -66
  60. package/src/misc/color.ts +29 -29
  61. package/src/misc/hash.ts +83 -83
  62. package/src/misc/ipPong.js +13 -13
  63. package/src/misc/networking.ts +1 -1
  64. package/src/misc/random.ts +44 -44
  65. package/src/misc.ts +196 -196
  66. package/src/path.ts +255 -255
  67. package/src/persistentLocalStore.ts +41 -41
  68. package/src/promise.ts +14 -14
  69. package/src/storage/fileSystemPointer.ts +71 -71
  70. package/src/test/heapProcess.ts +35 -35
  71. package/src/zip.ts +15 -15
  72. package/tsconfig.json +26 -26
  73. package/yarnSpec.txt +56 -56
@@ -1,484 +1,484 @@
1
- import { cacheJSONArgsEqual, lazy } from "socket-function/src/caching";
2
- import { MAX_CHANGE_AGE, PathValue, ReadLock, Time, authorityStorage, predictionLockVersion } from "../0-path-value-core/pathValueCore";
3
- import { validStateComputer } from "../0-path-value-core/ValidStateComputer";
4
- import { proxyWatcher, atomicObjectRead, DryRunResult, getCurrentCallCreationProxy } from "../2-proxy/PathValueProxyWatcher";
5
- import { getProxyPath } from "../2-proxy/pathValueProxy";
6
- import { CallSpec, FunctionResult, FunctionSpec, functionSchema, overrideCurrentCall } from "../3-path-functions/PathFunctionRunner";
7
- import { getModuleFromConfig, setGitURLMapping } from "../3-path-functions/pathFunctionLoader";
8
- import { logErrors } from "../errors";
9
- import { getPathFromStr, getPathStr1 } from "../path";
10
- import { Querysub, QuerysubController, QuerysubControllerBase, registerPredictionBlocker, querysubNodeId } from "./QuerysubController";
11
- import { parseArgs } from "../3-path-functions/PathFunctionHelpers";
12
- import { magenta, red } from "socket-function/src/formatting/logColors";
13
- import { pathValueSerializer } from "../-h-path-value-serialize/PathValueSerializer";
14
-
15
- import { setFlag } from "socket-function/require/compileFlags";
16
- import cbor from "cbor-x";
17
- import { FunctionMetadata } from "../3-path-functions/syncSchema";
18
- import { PromiseObj, isNode } from "socket-function/src/misc";
19
- import { isPublic } from "../config";
20
- import { clientWatcher } from "../1-path-client/pathValueClientWatcher";
21
- import { cacheAsyncLimitedJSON } from "../functional/promiseCache";
22
- import { addEpsilons } from "../bits";
23
- import { delay } from "socket-function/src/batching";
24
- setFlag(require, "cbor-x", "allowclient", true);
25
- const cborEncoder = lazy(() => new cbor.Encoder({ structuredClone: true }));
26
-
27
- let onPredictionFinishedCallbacks: Array<(data: { callId: string; result: FunctionResult; functionId: string }) => void> = [];
28
-
29
- export function onPredictionFinished(callback: (data: {
30
- callId: string;
31
- result: FunctionResult;
32
- functionId: string;
33
- }) => void) {
34
- onPredictionFinishedCallbacks.push(callback);
35
- }
36
-
37
- // NOTE: Most functions won't use this, as they should use regular api calls. However,
38
- // as we are using paths for RequireJS, we explicitly need our local (when serverside).
39
- async function getPredictController() {
40
- let controller: QuerysubControllerBase;
41
- if (isNode()) {
42
- // NOTE: If we are on node, then the require WON'T run over the network, so we need to use
43
- // our local module path, not the remote one.
44
- controller = new QuerysubControllerBase();
45
- } else {
46
- let nodeId = await querysubNodeId();
47
- if (!nodeId) throw new Error("No querysub node found");
48
- controller = QuerysubController.nodes[nodeId] as any;
49
- }
50
- return controller;
51
- }
52
- // TODO: I think our use of filePath, moduleId, etc are wrong here? We give pathFunctionLoader the filePath,
53
- // but we really should just give it the moduleId, or... even just avoid calling it altogether, as it doesn't
54
- // do too much for us if we already have the fully resolved path...
55
- // - Although using it DOES allow permissions checks to work nicely, so, eh... maybe it is fine to use pathFunctionLoader?
56
- const addModuleToLoader = cacheJSONArgsEqual(async (spec: FunctionSpec): Promise<true> => {
57
- let controller = await getPredictController();
58
-
59
- let path = await controller.getModulePath({ functionSpec: spec });
60
- setGitURLMapping({ spec, resolvedPath: path });
61
- return true;
62
- });
63
-
64
- const getDevFunctionSpecFromCall = cacheJSONArgsEqual(async (call: {
65
- DomainName: string;
66
- ModuleId: string;
67
- FunctionId: string;
68
- }) => {
69
- let controller = await getPredictController();
70
- return controller.getDevFunctionSpecFromCall(call);
71
- });
72
-
73
-
74
- export function getCallResultPath(call: CallSpec) {
75
- return getProxyPath(() => functionSchema()[call.DomainName].PathFunctionRunner[call.ModuleId].Results[call.CallId]);
76
- }
77
- export function getCallResult(call: CallSpec) {
78
- return functionSchema()[call.DomainName].PathFunctionRunner[call.ModuleId].Results[call.CallId];
79
- }
80
-
81
- /** Force predictions to run in the trigger order, so they can resolve an be added before
82
- * the next predictions. Also, to preserve call order.
83
- */
84
- //const predictRunCommitLoop = runInSerial((run: () => Promise<PredictResult | undefined>) => run());
85
- // UPDATE: We no longer run them in serial, as this caused a lot of lag if we ran a lot of functions which access unique values they needed to sync. Generally speaking, most predictions should run in serial anyway, As most functions should be accessing already synchronized values.
86
- const predictRunCommitLoop = (run: () => Promise<PredictResult | undefined>) => run();
87
-
88
-
89
-
90
-
91
- // IMPORTANT! This has to be synchronous, and we need to synchronously get into the proxy watcher call. That way other calls after this know to wait for our proxy watcher to finish. Otherwise, it's very, very, very easy to write code, where you call a function, you expect it to write to a value, and then you run some other code, maybe which just uses a commit async, and tries to read from that value. But the value won't exist, because we won't even have started the prediction yet, and the proxy watcher can't possibly know to wait, because we haven't started the prediction. So... this has to be synchronous!
92
- export function predictCall(call: CallSpec, metadata: FunctionMetadata): {
93
- onApplied: () => void;
94
- cancel: () => void;
95
- predictPromise: Promise<PredictResult | undefined>;
96
- } {
97
- let predictObj = predictCallBase({ call, metadata });
98
- let cancel = predictObj.cancel;
99
- let onApplied = predictObj.onApplied;
100
- registerPredictionBlocker(call.CallId, predictObj.predictPromise);
101
- return { cancel, onApplied, predictPromise: predictObj.predictPromise };
102
- }
103
-
104
- function getPredictTime(time: Time) {
105
- return { time: time.time, version: predictionLockVersion, creatorId: time.creatorId };
106
- }
107
-
108
- export interface PredictResult {
109
- readPaths: Set<string>;
110
- readParentPaths: Set<string>;
111
- writes: PathValue[];
112
- /** Writes that have been replaced AT the write time of the prediction */
113
- replacedWriteValues: PathValue[];
114
- }
115
- function predictCallBase(config: {
116
- call: CallSpec;
117
- metadata: FunctionMetadata;
118
- overrides?: PathValue[];
119
- }): {
120
- onApplied: () => void;
121
- cancel: () => void;
122
- predictPromise: Promise<PredictResult | undefined>;
123
- } {
124
- let call = config.call;
125
- let pathResultWrite = getCallResultPath(call);
126
-
127
- let actualValueFinished = new PromiseObj();
128
- function onActualFinished() {
129
- if (actualValueFinished.resolveCalled) return;
130
- let resultObj = authorityStorage.getValueAtTime(pathResultWrite, undefined);
131
- let result = pathValueSerializer.getPathValue(resultObj) as FunctionResult | undefined;
132
- if (!result) return;
133
- if (result.lastInternalLoopCount === -1) return;
134
- cleanupPrediction();
135
- actualValueFinished.resolve();
136
- for (let callback of onPredictionFinishedCallbacks) {
137
- callback({ callId: call.CallId, result, functionId: call.FunctionId });
138
- }
139
- }
140
- function onApplied() {
141
- clientWatcher.setWatches({
142
- callback: onActualFinished,
143
- paths: new Set([pathResultWrite]),
144
- parentPaths: new Set(),
145
- });
146
- }
147
-
148
- let functionResult: FunctionResult = {
149
- // Special prediction values (we don't even really need to put anything here, but it
150
- // might as well have the right format, in case a bug causes it to be used somewhere)
151
- lastInternalLoopCount: -1,
152
- outerLoopCount: -1,
153
- totalInternalLoopCount: -1,
154
- timeTaken: -1,
155
- totalTime: -1,
156
- evalTime: 0,
157
- };
158
-
159
- // NOTE: We don't cascade predictions, instead only depending on the call succeeding. This is because if we depend on writes,
160
- // we will be invalidated before we get the actual call result, which results in going back in time, a delay, then
161
- // having the actual call results, and so jumping forward again. It is better to just wait, so our latest value
162
- // becomes the latest call value, when we receive it.
163
- let predictResultWrite: PathValue = {
164
- path: pathResultWrite,
165
- value: functionResult,
166
- locks: [
167
- {
168
- // We use a very low negative version, so we don't have to worry about this actually existing. We don't really want
169
- // a lock on startTime, we just want the range lock.
170
- startTime: { time: call.runAtTime.time, version: -2344282313, creatorId: call.runAtTime.creatorId },
171
- // NOTE: The version is special, and used to detect prediction rejections in some logging.
172
- // NOTE: We need to set our end time enough in the future so errors cause us to be rejected
173
- endTime: { time: call.runAtTime.time + MAX_CHANGE_AGE, version: Number.MAX_SAFE_INTEGER, creatorId: call.runAtTime.creatorId },
174
- path: pathResultWrite,
175
- // Technically we are depending on a future time, so... we are depending on ourself? And our value isn't undefined.
176
- readIsTransparent: false,
177
- }
178
- ],
179
- lockCount: 1,
180
- // We right our result BEFORE the actual result, so it will be clobbered (and before our read lock!)
181
- // when the result arrives (although... it will ALSO be rejected, so... this might be less important...)
182
- // (Also, more importantly, this ha to be UNIQUE, otherwise time locks break!)
183
- time: getPredictTime(call.runAtTime),
184
- valid: true,
185
- event: true,
186
- };
187
- let predictLocks: ReadLock[] = [
188
- {
189
- startTime: predictResultWrite.time,
190
- endTime: predictResultWrite.time,
191
- path: pathResultWrite,
192
- readIsTransparent: false,
193
- }
194
- ];
195
- let predictions: PredictResult | undefined;
196
- let predictPromise = predictRunCommitLoop(async () => {
197
- // IMPORTANT! See the addCall note for why we have to NOT have any waiting before getCallWrites.
198
- if (Querysub.DEBUG_PREDICTIONS) {
199
- console.log(magenta(`Start predict call`), `${call.DomainName}.${call.FunctionId}`);
200
- }
201
-
202
- let debugName = `[predict]|${call.DomainName}.${call.FunctionId}`;
203
-
204
- let dryRunResult: {
205
- writes: PathValue[];
206
- readPaths: Set<string>;
207
- readParentPaths: Set<string>;
208
- };
209
-
210
- setTimeout(() => {
211
- clientWatcher.unwatch(onActualFinished);
212
- }, 30 * 1000);
213
-
214
- try {
215
- let tempDryRunResult = await Promise.race([
216
- await getCallWrites({ call, debugName, overrides: config.overrides, useFinishReordering: true, metadata: config.metadata }),
217
- actualValueFinished.promise,
218
- ]);
219
- if (!tempDryRunResult) {
220
- if (Querysub.DEBUG_PREDICTIONS || Querysub.DEBUG_CALLS) {
221
- console.log(magenta(`Abort predict call before prediction finished, already received call result`), `${call.DomainName}.${call.FunctionId}`);
222
- }
223
- return undefined;
224
- }
225
- dryRunResult = tempDryRunResult;
226
- } catch (e: any) {
227
- if (!pathValueSerializer.getPathValue(authorityStorage.getValueAtTime(pathResultWrite, undefined))) {
228
- console.log(`Skipping prediction for ${debugName} due to error running predictive call. Likely just an out of order error.`, e.stack);
229
- } else {
230
- // NOTE: This case happens a lot, because of how we handle locks. We don't receive locked values, and so
231
- // we assume all values have no locks, and only keep the latest. This is usually fine, but... if we lose
232
- // the race to predict the function against the server updating it, it is likely our prediction will now
233
- // be running before the latest write. In which case (as we don't really store write history), we will read undefined.
234
- // This isn't accurate, but... our write WILL almost certainly be wrong (as the value changed), so we don't log here.
235
- }
236
- return {
237
- writes: [],
238
- readPaths: new Set(),
239
- readParentPaths: new Set(),
240
- replacedWriteValues: [],
241
- };
242
- }
243
- predictions = {
244
- writes: dryRunResult.writes,
245
- readPaths: dryRunResult.readPaths,
246
- readParentPaths: dryRunResult.readParentPaths,
247
- replacedWriteValues: dryRunResult.writes.map(write => {
248
- let path = write.path;
249
- let newWrite: PathValue = authorityStorage.getValueAtTime(write.path, write.time) || {
250
- path, valid: true, time: write.time, locks: [], lockCount: 0, value: undefined, canGCValue: true, isTransparent: true,
251
- };
252
- newWrite = { ...newWrite };
253
- // Use a time very slightly after, so it clobbers the write, and any writes before it, but none after.
254
- newWrite.time = write.time;
255
- write.time = {
256
- time: write.time.time,
257
- version: write.time.version,
258
- creatorId: write.time.creatorId + 1,
259
- };
260
- return newWrite;
261
- }),
262
- };
263
-
264
- // Abort, and don't predict
265
- if (didCancel) return predictions;
266
-
267
- for (let value of predictions.writes) {
268
- value.source = debugName;
269
- }
270
-
271
- // If we already received the actual call, we can't add the prediction, as... the invalidation
272
- // code won't properly immediately reject our prediction, as we are not the authority on the
273
- // path, so it treats it as source of truth.
274
- if (authorityStorage.getValueAtTime(pathResultWrite, undefined)?.value) {
275
- if (Querysub.DEBUG_PREDICTIONS || Querysub.DEBUG_CALLS) {
276
- console.log(magenta(`Abort predict call, already received call result`), `${call.DomainName}.${call.FunctionId}`);
277
- }
278
- return predictions;
279
- }
280
-
281
- // We commit slightly different writes, for lock invalidation. Our caller doesn't need / doesn't want this
282
- // extra write, so we just don't return it.
283
- // NOTE: I guess we technically don't need to actually write to the result, but... might as well?
284
- predictions.writes.push(predictResultWrite);
285
- // Update all locks to use the same locks as predictResultWrite
286
- // Ensure we use the predict time, otherwise these times will be reused when the call actually happens,
287
- // potentially with different values, which breaks all of our ReadLocks!
288
- for (let predict of predictions.writes) {
289
- predict.time = predictResultWrite.time;
290
- predict.locks = predictLocks;
291
- predict.lockCount = predictLocks.length;
292
- }
293
-
294
- validStateComputer.ingestValuesAndValidStates({
295
- pathValues: predictions.writes,
296
- parentSyncs: [],
297
- initialTriggers: { values: new Set(), parentPaths: new Set() },
298
- doNotArchive: true,
299
- });
300
-
301
- if (Querysub.DEBUG_PREDICTIONS) {
302
- console.log(magenta(`Finished and applied predict call`), `${call.DomainName}.${call.FunctionId}`);
303
- }
304
- return predictions;
305
- });
306
- logErrors(predictPromise);
307
-
308
- let didCancel = false;
309
- function rejectPrediction() {
310
- if (didCancel) return;
311
- didCancel = true;
312
- if (!predictions) return;
313
- // Reject our predictions, as the call likely never got committed, so it will never be written
314
- validStateComputer.ingestValuesAndValidStates({
315
- pathValues: predictions.writes.map(write => ({
316
- path: write.path,
317
- value: write.value,
318
- locks: [],
319
- lockCount: 0,
320
- valid: false,
321
- time: predictResultWrite.time,
322
- isTransparent: false,
323
- })),
324
- parentSyncs: [],
325
- initialTriggers: { values: new Set(), parentPaths: new Set() },
326
- doNotArchive: true,
327
- });
328
- }
329
-
330
- setTimeout(cleanupPrediction, Querysub.PREDICTION_MAX_LIFESPAN);
331
- function cleanupPrediction() {
332
- if (didCancel) return;
333
-
334
- // ALWAYS reject the prediction, in case the function runner server is down.
335
- // - ALSO, once it goes back up, the write likely will be too far in the future, so our lock won't be rejected,
336
- // will will cause the bad value to stick around.
337
- // We could fix this with an endTime infinitely in the future, but then the lock can never be GCed,
338
- // which create a memory leak.
339
- rejectPrediction();
340
-
341
- if (Querysub.AUDIT_PREDICTIONS && predictions) {
342
- const afterTime = { time: call.runAtTime.time, version: Number.MAX_SAFE_INTEGER, creatorId: 0 };
343
- // Clone predictions, to strip symbols
344
- let predictionsCopy = cborEncoder().decode(cborEncoder().encode(predictions)) as typeof predictions;
345
- for (let predict of predictionsCopy.writes ?? []) {
346
- if (predict.path === pathResultWrite) continue;
347
- let finalValueObj = authorityStorage.getValueAtTime(predict.path, afterTime);
348
- if (!authorityStorage.isSynced(predict.path)) continue;
349
-
350
- let finalValue = pathValueSerializer.getPathValue(finalValueObj);
351
- let predictValue = pathValueSerializer.getPathValue(predict);
352
-
353
- let finalBuffer = cborEncoder().encode(finalValue);
354
- let predictBuffer = cborEncoder().encode(predictValue);
355
- // If they are different, warn
356
- if (!finalBuffer.equals(predictBuffer)) {
357
- // NOTE: This MIGHT be due to delayCommit on another function. We can't know when to force flush
358
- // delayCommit values, and so we only force flush if other (non-delayCommit) functions are committed
359
- // in the same schema, which will be wrong if data is accessed cross schema.
360
- // - If this happens, and is a problem, we COULD handle it, but... it is very difficult, as we would need
361
- // to evaluate the other function, detect it conflicts with a delayCommit functions, then run the delayCommit
362
- // function, then re-evaluated the conflicted prediction (which is a lot of communication and reruns
363
- // between a lot of systems... especially because in a second this will resolve by the server
364
- // clobbering our prediction).
365
- // - AND, we can't commit the pending calls until we sort out the order, so this necessarily requires
366
- // slowing down commits if we are delaying other calls.
367
- console.warn(`${red("Prediction was wrong")}: for ${call.DomainName}.${call.FunctionId} value path ${getPathFromStr(predict.path).join(".")} predict != finalValue. ${config.metadata.delayCommit && "This function is using delay commit. It's likely that you are using the time from Querysub.now() (or accessing the function call time in some other way). This doesn't work with delay commit because the server will rewrite the commit time, instead you should pass the timestamp as a parameter (And then make sure it's not some kind of privilege value that the user could cheat)" || ""} It might be the case that you shouldn't even predict the call client side (you can use the functionMetadata to set it as nopredict). It might also be the case that you do want to predict it, but you should batch the calls, so you only make one call, instead of many calls at once.`, predictValue, finalValue);
368
- }
369
- }
370
- }
371
- }
372
-
373
- return {
374
- cancel: rejectPrediction,
375
- predictPromise,
376
- onApplied,
377
- };
378
- }
379
-
380
- const getDevFunctionCache = cacheAsyncLimitedJSON(100_000, getDevFunctionSpecFromCall);
381
- const addToModuleLoaderCache = cacheAsyncLimitedJSON(100_000, addModuleToLoader);
382
-
383
- function getFunctionSpec(call: CallSpec): FunctionSpec | undefined {
384
- if (!isPublic()) {
385
- let obj = getDevFunctionCache({
386
- DomainName: call.DomainName,
387
- ModuleId: call.ModuleId,
388
- FunctionId: call.FunctionId,
389
- });
390
- if (!obj) throw new Error(`Function not referenced in deploy.ts ${call.DomainName}.${call.ModuleId}.${call.FunctionId}`);
391
- setGitURLMapping({ spec: obj.functionSpec, resolvedPath: obj.modulePath });
392
- return obj.functionSpec;
393
- }
394
-
395
- let domainObject = functionSchema()[call.DomainName];
396
- let moduleObject = domainObject.PathFunctionRunner[call.ModuleId];
397
- let functionSpec = atomicObjectRead(moduleObject.Sources[call.FunctionId]);
398
- if (!functionSpec) {
399
- if (!Querysub.isAllSynced()) return undefined;
400
- throw new Error(`Function not found in database ${call.DomainName}.${call.ModuleId}.${call.FunctionId}`);
401
- }
402
- // Add the module to the loader via asking the server the exact url for this call. The loader will
403
- // then load the code from that url when it ends up running it.
404
- if (!addToModuleLoaderCache(functionSpec)) return undefined;
405
- return functionSpec;
406
- }
407
-
408
- function getFunctionInfoBase(call: CallSpec): {
409
- functionSpec: FunctionSpec;
410
- baseFunction: Function;
411
- } | undefined {
412
- let functionSpec = getFunctionSpec(call);
413
- if (!functionSpec) return undefined;
414
-
415
- let module = getModuleFromConfig(functionSpec);
416
- if (module instanceof Promise) {
417
- proxyWatcher.triggerOnPromiseFinish(module, { waitReason: `Loading function ${call.DomainName}.${call.ModuleId}.${call.FunctionId}` });
418
- return undefined;
419
- }
420
- let exportPath = getPathFromStr(functionSpec.exportPathStr);
421
- let exportObj = module.exports;
422
- for (let path of exportPath) {
423
- if (!(path in exportObj)) {
424
- throw new Error(`Export not found for call prediction: ${path}, in ${call.DomainName}.${call.ModuleId}.${call.FunctionId}. Have ${Object.keys(exportObj)}`);
425
- }
426
- exportObj = exportObj[path];
427
- }
428
- let baseFunction = exportObj as Function;
429
-
430
- return { functionSpec, baseFunction };
431
- }
432
-
433
- // IMPORTANT! See the addCall note for why we have to NOT have any waiting before getCallWrites.
434
- export function getCallWrites(config: {
435
- debugName: string;
436
- call: CallSpec;
437
- overrides?: PathValue[];
438
- useFinishReordering?: boolean;
439
- metadata?: FunctionMetadata;
440
- }): Promise<DryRunResult> {
441
- let { call, debugName } = config;
442
-
443
- let finishInStartOrder: number | boolean | undefined;
444
-
445
- if (config.useFinishReordering && !config.metadata?.noFinishInStartOrder && call.fromProxy) {
446
- let triggerCaller = getCurrentCallCreationProxy();
447
- if (!triggerCaller) {
448
- require("debugbreak")(2);
449
- debugger;
450
- throw new Error(`getCallWrites did not happen synchronously when triggering a call. It's very important that we synchronously start the proxy watcher, so we can make sure that predictions happen in a consistent order. FIX THIS! Find the async weighting that was added and move it inside the watch function using cacheAsync to make it watched. For: ${debugName}`);
451
- }
452
- finishInStartOrder = addEpsilons(triggerCaller.createTime, 1);
453
- }
454
- let first = true;
455
- return proxyWatcher.dryRunFull({
456
- debugName,
457
- runAtTime: call.runAtTime,
458
- overrideAllowLockDomainsPrefixes: [getPathStr1(call.DomainName)],
459
- // We are going to clobber locks anyways, so there is no reason to create them
460
- unsafeNoLocks: true,
461
- overrides: config.overrides,
462
- nestedCalls: "inline",
463
- // Run after our trigger function. This will insert us before functions which may have been run before us, but after our trigger function.
464
- finishInStartOrder,
465
- predictMetadata: config.metadata,
466
- watchFunction() {
467
- let fncObj = getFunctionInfoBase(call);
468
- if (!fncObj) return undefined;
469
- let { functionSpec, baseFunction } = fncObj;
470
- return overrideCurrentCall({ spec: call, fnc: functionSpec }, () => {
471
- if (first) {
472
- if (Querysub.DEBUG_PREDICTIONS) {
473
- console.log(magenta(`Loaded predict function and first run`), `${call.DomainName}.${call.FunctionId}`);
474
- }
475
- }
476
- first = false;
477
-
478
- let args = parseArgs(call);
479
- let result = baseFunction(...args);
480
- return result;
481
- });
482
- },
483
- });
1
+ import { cacheJSONArgsEqual, lazy } from "socket-function/src/caching";
2
+ import { MAX_CHANGE_AGE, PathValue, ReadLock, Time, authorityStorage, predictionLockVersion } from "../0-path-value-core/pathValueCore";
3
+ import { validStateComputer } from "../0-path-value-core/ValidStateComputer";
4
+ import { proxyWatcher, atomicObjectRead, DryRunResult, getCurrentCallCreationProxy } from "../2-proxy/PathValueProxyWatcher";
5
+ import { getProxyPath } from "../2-proxy/pathValueProxy";
6
+ import { CallSpec, FunctionResult, FunctionSpec, functionSchema, overrideCurrentCall } from "../3-path-functions/PathFunctionRunner";
7
+ import { getModuleFromConfig, setGitURLMapping } from "../3-path-functions/pathFunctionLoader";
8
+ import { logErrors } from "../errors";
9
+ import { getPathFromStr, getPathStr1 } from "../path";
10
+ import { Querysub, QuerysubController, QuerysubControllerBase, registerPredictionBlocker, querysubNodeId } from "./QuerysubController";
11
+ import { parseArgs } from "../3-path-functions/PathFunctionHelpers";
12
+ import { magenta, red } from "socket-function/src/formatting/logColors";
13
+ import { pathValueSerializer } from "../-h-path-value-serialize/PathValueSerializer";
14
+
15
+ import { setFlag } from "socket-function/require/compileFlags";
16
+ import cbor from "cbor-x";
17
+ import { FunctionMetadata } from "../3-path-functions/syncSchema";
18
+ import { PromiseObj, isNode } from "socket-function/src/misc";
19
+ import { isPublic } from "../config";
20
+ import { clientWatcher } from "../1-path-client/pathValueClientWatcher";
21
+ import { cacheAsyncLimitedJSON } from "../functional/promiseCache";
22
+ import { addEpsilons } from "../bits";
23
+ import { delay } from "socket-function/src/batching";
24
+ setFlag(require, "cbor-x", "allowclient", true);
25
+ const cborEncoder = lazy(() => new cbor.Encoder({ structuredClone: true }));
26
+
27
+ let onPredictionFinishedCallbacks: Array<(data: { callId: string; result: FunctionResult; functionId: string }) => void> = [];
28
+
29
+ export function onPredictionFinished(callback: (data: {
30
+ callId: string;
31
+ result: FunctionResult;
32
+ functionId: string;
33
+ }) => void) {
34
+ onPredictionFinishedCallbacks.push(callback);
35
+ }
36
+
37
+ // NOTE: Most functions won't use this, as they should use regular api calls. However,
38
+ // as we are using paths for RequireJS, we explicitly need our local (when serverside).
39
+ async function getPredictController() {
40
+ let controller: QuerysubControllerBase;
41
+ if (isNode()) {
42
+ // NOTE: If we are on node, then the require WON'T run over the network, so we need to use
43
+ // our local module path, not the remote one.
44
+ controller = new QuerysubControllerBase();
45
+ } else {
46
+ let nodeId = await querysubNodeId();
47
+ if (!nodeId) throw new Error("No querysub node found");
48
+ controller = QuerysubController.nodes[nodeId] as any;
49
+ }
50
+ return controller;
51
+ }
52
+ // TODO: I think our use of filePath, moduleId, etc are wrong here? We give pathFunctionLoader the filePath,
53
+ // but we really should just give it the moduleId, or... even just avoid calling it altogether, as it doesn't
54
+ // do too much for us if we already have the fully resolved path...
55
+ // - Although using it DOES allow permissions checks to work nicely, so, eh... maybe it is fine to use pathFunctionLoader?
56
+ const addModuleToLoader = cacheJSONArgsEqual(async (spec: FunctionSpec): Promise<true> => {
57
+ let controller = await getPredictController();
58
+
59
+ let path = await controller.getModulePath({ functionSpec: spec });
60
+ setGitURLMapping({ spec, resolvedPath: path });
61
+ return true;
62
+ });
63
+
64
+ const getDevFunctionSpecFromCall = cacheJSONArgsEqual(async (call: {
65
+ DomainName: string;
66
+ ModuleId: string;
67
+ FunctionId: string;
68
+ }) => {
69
+ let controller = await getPredictController();
70
+ return controller.getDevFunctionSpecFromCall(call);
71
+ });
72
+
73
+
74
+ export function getCallResultPath(call: CallSpec) {
75
+ return getProxyPath(() => functionSchema()[call.DomainName].PathFunctionRunner[call.ModuleId].Results[call.CallId]);
76
+ }
77
+ export function getCallResult(call: CallSpec) {
78
+ return functionSchema()[call.DomainName].PathFunctionRunner[call.ModuleId].Results[call.CallId];
79
+ }
80
+
81
+ /** Force predictions to run in the trigger order, so they can resolve an be added before
82
+ * the next predictions. Also, to preserve call order.
83
+ */
84
+ //const predictRunCommitLoop = runInSerial((run: () => Promise<PredictResult | undefined>) => run());
85
+ // UPDATE: We no longer run them in serial, as this caused a lot of lag if we ran a lot of functions which access unique values they needed to sync. Generally speaking, most predictions should run in serial anyway, As most functions should be accessing already synchronized values.
86
+ const predictRunCommitLoop = (run: () => Promise<PredictResult | undefined>) => run();
87
+
88
+
89
+
90
+
91
+ // IMPORTANT! This has to be synchronous, and we need to synchronously get into the proxy watcher call. That way other calls after this know to wait for our proxy watcher to finish. Otherwise, it's very, very, very easy to write code, where you call a function, you expect it to write to a value, and then you run some other code, maybe which just uses a commit async, and tries to read from that value. But the value won't exist, because we won't even have started the prediction yet, and the proxy watcher can't possibly know to wait, because we haven't started the prediction. So... this has to be synchronous!
92
+ export function predictCall(call: CallSpec, metadata: FunctionMetadata): {
93
+ onApplied: () => void;
94
+ cancel: () => void;
95
+ predictPromise: Promise<PredictResult | undefined>;
96
+ } {
97
+ let predictObj = predictCallBase({ call, metadata });
98
+ let cancel = predictObj.cancel;
99
+ let onApplied = predictObj.onApplied;
100
+ registerPredictionBlocker(call.CallId, predictObj.predictPromise);
101
+ return { cancel, onApplied, predictPromise: predictObj.predictPromise };
102
+ }
103
+
104
+ function getPredictTime(time: Time) {
105
+ return { time: time.time, version: predictionLockVersion, creatorId: time.creatorId };
106
+ }
107
+
108
+ export interface PredictResult {
109
+ readPaths: Set<string>;
110
+ readParentPaths: Set<string>;
111
+ writes: PathValue[];
112
+ /** Writes that have been replaced AT the write time of the prediction */
113
+ replacedWriteValues: PathValue[];
114
+ }
115
+ function predictCallBase(config: {
116
+ call: CallSpec;
117
+ metadata: FunctionMetadata;
118
+ overrides?: PathValue[];
119
+ }): {
120
+ onApplied: () => void;
121
+ cancel: () => void;
122
+ predictPromise: Promise<PredictResult | undefined>;
123
+ } {
124
+ let call = config.call;
125
+ let pathResultWrite = getCallResultPath(call);
126
+
127
+ let actualValueFinished = new PromiseObj();
128
+ function onActualFinished() {
129
+ if (actualValueFinished.resolveCalled) return;
130
+ let resultObj = authorityStorage.getValueAtTime(pathResultWrite, undefined);
131
+ let result = pathValueSerializer.getPathValue(resultObj) as FunctionResult | undefined;
132
+ if (!result) return;
133
+ if (result.lastInternalLoopCount === -1) return;
134
+ cleanupPrediction();
135
+ actualValueFinished.resolve();
136
+ for (let callback of onPredictionFinishedCallbacks) {
137
+ callback({ callId: call.CallId, result, functionId: call.FunctionId });
138
+ }
139
+ }
140
+ function onApplied() {
141
+ clientWatcher.setWatches({
142
+ callback: onActualFinished,
143
+ paths: new Set([pathResultWrite]),
144
+ parentPaths: new Set(),
145
+ });
146
+ }
147
+
148
+ let functionResult: FunctionResult = {
149
+ // Special prediction values (we don't even really need to put anything here, but it
150
+ // might as well have the right format, in case a bug causes it to be used somewhere)
151
+ lastInternalLoopCount: -1,
152
+ outerLoopCount: -1,
153
+ totalInternalLoopCount: -1,
154
+ timeTaken: -1,
155
+ totalTime: -1,
156
+ evalTime: 0,
157
+ };
158
+
159
+ // NOTE: We don't cascade predictions, instead only depending on the call succeeding. This is because if we depend on writes,
160
+ // we will be invalidated before we get the actual call result, which results in going back in time, a delay, then
161
+ // having the actual call results, and so jumping forward again. It is better to just wait, so our latest value
162
+ // becomes the latest call value, when we receive it.
163
+ let predictResultWrite: PathValue = {
164
+ path: pathResultWrite,
165
+ value: functionResult,
166
+ locks: [
167
+ {
168
+ // We use a very low negative version, so we don't have to worry about this actually existing. We don't really want
169
+ // a lock on startTime, we just want the range lock.
170
+ startTime: { time: call.runAtTime.time, version: -2344282313, creatorId: call.runAtTime.creatorId },
171
+ // NOTE: The version is special, and used to detect prediction rejections in some logging.
172
+ // NOTE: We need to set our end time enough in the future so errors cause us to be rejected
173
+ endTime: { time: call.runAtTime.time + MAX_CHANGE_AGE, version: Number.MAX_SAFE_INTEGER, creatorId: call.runAtTime.creatorId },
174
+ path: pathResultWrite,
175
+ // Technically we are depending on a future time, so... we are depending on ourself? And our value isn't undefined.
176
+ readIsTransparent: false,
177
+ }
178
+ ],
179
+ lockCount: 1,
180
+ // We right our result BEFORE the actual result, so it will be clobbered (and before our read lock!)
181
+ // when the result arrives (although... it will ALSO be rejected, so... this might be less important...)
182
+ // (Also, more importantly, this ha to be UNIQUE, otherwise time locks break!)
183
+ time: getPredictTime(call.runAtTime),
184
+ valid: true,
185
+ event: true,
186
+ };
187
+ let predictLocks: ReadLock[] = [
188
+ {
189
+ startTime: predictResultWrite.time,
190
+ endTime: predictResultWrite.time,
191
+ path: pathResultWrite,
192
+ readIsTransparent: false,
193
+ }
194
+ ];
195
+ let predictions: PredictResult | undefined;
196
+ let predictPromise = predictRunCommitLoop(async () => {
197
+ // IMPORTANT! See the addCall note for why we have to NOT have any waiting before getCallWrites.
198
+ if (Querysub.DEBUG_PREDICTIONS) {
199
+ console.log(magenta(`Start predict call`), `${call.DomainName}.${call.FunctionId}`);
200
+ }
201
+
202
+ let debugName = `[predict]|${call.DomainName}.${call.FunctionId}`;
203
+
204
+ let dryRunResult: {
205
+ writes: PathValue[];
206
+ readPaths: Set<string>;
207
+ readParentPaths: Set<string>;
208
+ };
209
+
210
+ setTimeout(() => {
211
+ clientWatcher.unwatch(onActualFinished);
212
+ }, 30 * 1000);
213
+
214
+ try {
215
+ let tempDryRunResult = await Promise.race([
216
+ await getCallWrites({ call, debugName, overrides: config.overrides, useFinishReordering: true, metadata: config.metadata }),
217
+ actualValueFinished.promise,
218
+ ]);
219
+ if (!tempDryRunResult) {
220
+ if (Querysub.DEBUG_PREDICTIONS || Querysub.DEBUG_CALLS) {
221
+ console.log(magenta(`Abort predict call before prediction finished, already received call result`), `${call.DomainName}.${call.FunctionId}`);
222
+ }
223
+ return undefined;
224
+ }
225
+ dryRunResult = tempDryRunResult;
226
+ } catch (e: any) {
227
+ if (!pathValueSerializer.getPathValue(authorityStorage.getValueAtTime(pathResultWrite, undefined))) {
228
+ console.log(`Skipping prediction for ${debugName} due to error running predictive call. Likely just an out of order error.`, e.stack);
229
+ } else {
230
+ // NOTE: This case happens a lot, because of how we handle locks. We don't receive locked values, and so
231
+ // we assume all values have no locks, and only keep the latest. This is usually fine, but... if we lose
232
+ // the race to predict the function against the server updating it, it is likely our prediction will now
233
+ // be running before the latest write. In which case (as we don't really store write history), we will read undefined.
234
+ // This isn't accurate, but... our write WILL almost certainly be wrong (as the value changed), so we don't log here.
235
+ }
236
+ return {
237
+ writes: [],
238
+ readPaths: new Set(),
239
+ readParentPaths: new Set(),
240
+ replacedWriteValues: [],
241
+ };
242
+ }
243
+ predictions = {
244
+ writes: dryRunResult.writes,
245
+ readPaths: dryRunResult.readPaths,
246
+ readParentPaths: dryRunResult.readParentPaths,
247
+ replacedWriteValues: dryRunResult.writes.map(write => {
248
+ let path = write.path;
249
+ let newWrite: PathValue = authorityStorage.getValueAtTime(write.path, write.time) || {
250
+ path, valid: true, time: write.time, locks: [], lockCount: 0, value: undefined, canGCValue: true, isTransparent: true,
251
+ };
252
+ newWrite = { ...newWrite };
253
+ // Use a time very slightly after, so it clobbers the write, and any writes before it, but none after.
254
+ newWrite.time = write.time;
255
+ write.time = {
256
+ time: write.time.time,
257
+ version: write.time.version,
258
+ creatorId: write.time.creatorId + 1,
259
+ };
260
+ return newWrite;
261
+ }),
262
+ };
263
+
264
+ // Abort, and don't predict
265
+ if (didCancel) return predictions;
266
+
267
+ for (let value of predictions.writes) {
268
+ value.source = debugName;
269
+ }
270
+
271
+ // If we already received the actual call, we can't add the prediction, as... the invalidation
272
+ // code won't properly immediately reject our prediction, as we are not the authority on the
273
+ // path, so it treats it as source of truth.
274
+ if (authorityStorage.getValueAtTime(pathResultWrite, undefined)?.value) {
275
+ if (Querysub.DEBUG_PREDICTIONS || Querysub.DEBUG_CALLS) {
276
+ console.log(magenta(`Abort predict call, already received call result`), `${call.DomainName}.${call.FunctionId}`);
277
+ }
278
+ return predictions;
279
+ }
280
+
281
+ // We commit slightly different writes, for lock invalidation. Our caller doesn't need / doesn't want this
282
+ // extra write, so we just don't return it.
283
+ // NOTE: I guess we technically don't need to actually write to the result, but... might as well?
284
+ predictions.writes.push(predictResultWrite);
285
+ // Update all locks to use the same locks as predictResultWrite
286
+ // Ensure we use the predict time, otherwise these times will be reused when the call actually happens,
287
+ // potentially with different values, which breaks all of our ReadLocks!
288
+ for (let predict of predictions.writes) {
289
+ predict.time = predictResultWrite.time;
290
+ predict.locks = predictLocks;
291
+ predict.lockCount = predictLocks.length;
292
+ }
293
+
294
+ validStateComputer.ingestValuesAndValidStates({
295
+ pathValues: predictions.writes,
296
+ parentSyncs: [],
297
+ initialTriggers: { values: new Set(), parentPaths: new Set() },
298
+ doNotArchive: true,
299
+ });
300
+
301
+ if (Querysub.DEBUG_PREDICTIONS) {
302
+ console.log(magenta(`Finished and applied predict call`), `${call.DomainName}.${call.FunctionId}`);
303
+ }
304
+ return predictions;
305
+ });
306
+ logErrors(predictPromise);
307
+
308
+ let didCancel = false;
309
+ function rejectPrediction() {
310
+ if (didCancel) return;
311
+ didCancel = true;
312
+ if (!predictions) return;
313
+ // Reject our predictions, as the call likely never got committed, so it will never be written
314
+ validStateComputer.ingestValuesAndValidStates({
315
+ pathValues: predictions.writes.map(write => ({
316
+ path: write.path,
317
+ value: write.value,
318
+ locks: [],
319
+ lockCount: 0,
320
+ valid: false,
321
+ time: predictResultWrite.time,
322
+ isTransparent: false,
323
+ })),
324
+ parentSyncs: [],
325
+ initialTriggers: { values: new Set(), parentPaths: new Set() },
326
+ doNotArchive: true,
327
+ });
328
+ }
329
+
330
+ setTimeout(cleanupPrediction, Querysub.PREDICTION_MAX_LIFESPAN);
331
+ function cleanupPrediction() {
332
+ if (didCancel) return;
333
+
334
+ // ALWAYS reject the prediction, in case the function runner server is down.
335
+ // - ALSO, once it goes back up, the write likely will be too far in the future, so our lock won't be rejected,
336
+ // will will cause the bad value to stick around.
337
+ // We could fix this with an endTime infinitely in the future, but then the lock can never be GCed,
338
+ // which create a memory leak.
339
+ rejectPrediction();
340
+
341
+ if (Querysub.AUDIT_PREDICTIONS && predictions) {
342
+ const afterTime = { time: call.runAtTime.time, version: Number.MAX_SAFE_INTEGER, creatorId: 0 };
343
+ // Clone predictions, to strip symbols
344
+ let predictionsCopy = cborEncoder().decode(cborEncoder().encode(predictions)) as typeof predictions;
345
+ for (let predict of predictionsCopy.writes ?? []) {
346
+ if (predict.path === pathResultWrite) continue;
347
+ let finalValueObj = authorityStorage.getValueAtTime(predict.path, afterTime);
348
+ if (!authorityStorage.isSynced(predict.path)) continue;
349
+
350
+ let finalValue = pathValueSerializer.getPathValue(finalValueObj);
351
+ let predictValue = pathValueSerializer.getPathValue(predict);
352
+
353
+ let finalBuffer = cborEncoder().encode(finalValue);
354
+ let predictBuffer = cborEncoder().encode(predictValue);
355
+ // If they are different, warn
356
+ if (!finalBuffer.equals(predictBuffer)) {
357
+ // NOTE: This MIGHT be due to delayCommit on another function. We can't know when to force flush
358
+ // delayCommit values, and so we only force flush if other (non-delayCommit) functions are committed
359
+ // in the same schema, which will be wrong if data is accessed cross schema.
360
+ // - If this happens, and is a problem, we COULD handle it, but... it is very difficult, as we would need
361
+ // to evaluate the other function, detect it conflicts with a delayCommit functions, then run the delayCommit
362
+ // function, then re-evaluated the conflicted prediction (which is a lot of communication and reruns
363
+ // between a lot of systems... especially because in a second this will resolve by the server
364
+ // clobbering our prediction).
365
+ // - AND, we can't commit the pending calls until we sort out the order, so this necessarily requires
366
+ // slowing down commits if we are delaying other calls.
367
+ console.warn(`${red("Prediction was wrong")}: for ${call.DomainName}.${call.FunctionId} value path ${getPathFromStr(predict.path).join(".")} predict != finalValue. ${config.metadata.delayCommit && "This function is using delay commit. It's likely that you are using the time from Querysub.now() (or accessing the function call time in some other way). This doesn't work with delay commit because the server will rewrite the commit time, instead you should pass the timestamp as a parameter (And then make sure it's not some kind of privilege value that the user could cheat)" || ""} It might be the case that you shouldn't even predict the call client side (you can use the functionMetadata to set it as nopredict). It might also be the case that you do want to predict it, but you should batch the calls, so you only make one call, instead of many calls at once.`, predictValue, finalValue);
368
+ }
369
+ }
370
+ }
371
+ }
372
+
373
+ return {
374
+ cancel: rejectPrediction,
375
+ predictPromise,
376
+ onApplied,
377
+ };
378
+ }
379
+
380
+ const getDevFunctionCache = cacheAsyncLimitedJSON(100_000, getDevFunctionSpecFromCall);
381
+ const addToModuleLoaderCache = cacheAsyncLimitedJSON(100_000, addModuleToLoader);
382
+
383
+ function getFunctionSpec(call: CallSpec): FunctionSpec | undefined {
384
+ if (!isPublic()) {
385
+ let obj = getDevFunctionCache({
386
+ DomainName: call.DomainName,
387
+ ModuleId: call.ModuleId,
388
+ FunctionId: call.FunctionId,
389
+ });
390
+ if (!obj) throw new Error(`Function not referenced in deploy.ts ${call.DomainName}.${call.ModuleId}.${call.FunctionId}`);
391
+ setGitURLMapping({ spec: obj.functionSpec, resolvedPath: obj.modulePath });
392
+ return obj.functionSpec;
393
+ }
394
+
395
+ let domainObject = functionSchema()[call.DomainName];
396
+ let moduleObject = domainObject.PathFunctionRunner[call.ModuleId];
397
+ let functionSpec = atomicObjectRead(moduleObject.Sources[call.FunctionId]);
398
+ if (!functionSpec) {
399
+ if (!Querysub.isAllSynced()) return undefined;
400
+ throw new Error(`Function not found in database ${call.DomainName}.${call.ModuleId}.${call.FunctionId}`);
401
+ }
402
+ // Add the module to the loader via asking the server the exact url for this call. The loader will
403
+ // then load the code from that url when it ends up running it.
404
+ if (!addToModuleLoaderCache(functionSpec)) return undefined;
405
+ return functionSpec;
406
+ }
407
+
408
+ function getFunctionInfoBase(call: CallSpec): {
409
+ functionSpec: FunctionSpec;
410
+ baseFunction: Function;
411
+ } | undefined {
412
+ let functionSpec = getFunctionSpec(call);
413
+ if (!functionSpec) return undefined;
414
+
415
+ let module = getModuleFromConfig(functionSpec);
416
+ if (module instanceof Promise) {
417
+ proxyWatcher.triggerOnPromiseFinish(module, { waitReason: `Loading function ${call.DomainName}.${call.ModuleId}.${call.FunctionId}` });
418
+ return undefined;
419
+ }
420
+ let exportPath = getPathFromStr(functionSpec.exportPathStr);
421
+ let exportObj = module.exports;
422
+ for (let path of exportPath) {
423
+ if (!(path in exportObj)) {
424
+ throw new Error(`Export not found for call prediction: ${path}, in ${call.DomainName}.${call.ModuleId}.${call.FunctionId}. Have ${Object.keys(exportObj)}`);
425
+ }
426
+ exportObj = exportObj[path];
427
+ }
428
+ let baseFunction = exportObj as Function;
429
+
430
+ return { functionSpec, baseFunction };
431
+ }
432
+
433
+ // IMPORTANT! See the addCall note for why we have to NOT have any waiting before getCallWrites.
434
+ export function getCallWrites(config: {
435
+ debugName: string;
436
+ call: CallSpec;
437
+ overrides?: PathValue[];
438
+ useFinishReordering?: boolean;
439
+ metadata?: FunctionMetadata;
440
+ }): Promise<DryRunResult> {
441
+ let { call, debugName } = config;
442
+
443
+ let finishInStartOrder: number | boolean | undefined;
444
+
445
+ if (config.useFinishReordering && !config.metadata?.noFinishInStartOrder && call.fromProxy) {
446
+ let triggerCaller = getCurrentCallCreationProxy();
447
+ if (!triggerCaller) {
448
+ require("debugbreak")(2);
449
+ debugger;
450
+ throw new Error(`getCallWrites did not happen synchronously when triggering a call. It's very important that we synchronously start the proxy watcher, so we can make sure that predictions happen in a consistent order. FIX THIS! Find the async weighting that was added and move it inside the watch function using cacheAsync to make it watched. For: ${debugName}`);
451
+ }
452
+ finishInStartOrder = addEpsilons(triggerCaller.createTime, 1);
453
+ }
454
+ let first = true;
455
+ return proxyWatcher.dryRunFull({
456
+ debugName,
457
+ runAtTime: call.runAtTime,
458
+ overrideAllowLockDomainsPrefixes: [getPathStr1(call.DomainName)],
459
+ // We are going to clobber locks anyways, so there is no reason to create them
460
+ unsafeNoLocks: true,
461
+ overrides: config.overrides,
462
+ nestedCalls: "inline",
463
+ // Run after our trigger function. This will insert us before functions which may have been run before us, but after our trigger function.
464
+ finishInStartOrder,
465
+ predictMetadata: config.metadata,
466
+ watchFunction() {
467
+ let fncObj = getFunctionInfoBase(call);
468
+ if (!fncObj) return undefined;
469
+ let { functionSpec, baseFunction } = fncObj;
470
+ return overrideCurrentCall({ spec: call, fnc: functionSpec }, () => {
471
+ if (first) {
472
+ if (Querysub.DEBUG_PREDICTIONS) {
473
+ console.log(magenta(`Loaded predict function and first run`), `${call.DomainName}.${call.FunctionId}`);
474
+ }
475
+ }
476
+ first = false;
477
+
478
+ let args = parseArgs(call);
479
+ let result = baseFunction(...args);
480
+ return result;
481
+ });
482
+ },
483
+ });
484
484
  }