querysub 0.437.0 → 0.439.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.eslintrc.js +50 -50
- package/bin/deploy.js +0 -0
- package/bin/function.js +0 -0
- package/bin/server.js +0 -0
- package/costsBenefits.txt +115 -115
- package/deploy.ts +2 -2
- package/package.json +2 -2
- package/spec.txt +1192 -1192
- package/src/-a-archives/archives.ts +202 -202
- package/src/-a-archives/archivesDisk.ts +454 -454
- package/src/-a-auth/certs.ts +540 -540
- package/src/-a-auth/node-forge-ed25519.d.ts +16 -16
- package/src/-b-authorities/dnsAuthority.ts +138 -138
- package/src/-c-identity/IdentityController.ts +258 -258
- package/src/-d-trust/NetworkTrust2.ts +180 -180
- package/src/-e-certs/EdgeCertController.ts +252 -252
- package/src/-e-certs/certAuthority.ts +201 -201
- package/src/-f-node-discovery/NodeDiscovery.ts +640 -640
- package/src/-g-core-values/NodeCapabilities.ts +200 -200
- package/src/-h-path-value-serialize/stringSerializer.ts +175 -175
- package/src/0-path-value-core/PathValueCommitter.ts +468 -468
- package/src/0-path-value-core/PathValueController.ts +0 -2
- package/src/0-path-value-core/archiveLocks/archiveSnapshots.ts +37 -1
- package/src/0-path-value-core/pathValueCore.ts +12 -0
- package/src/2-proxy/PathValueProxyWatcher.ts +2542 -2542
- package/src/2-proxy/TransactionDelayer.ts +94 -94
- package/src/2-proxy/pathDatabaseProxyBase.ts +36 -36
- package/src/2-proxy/pathValueProxy.ts +159 -159
- package/src/3-path-functions/PathFunctionRunner.ts +24 -13
- package/src/3-path-functions/PathFunctionRunnerMain.ts +87 -87
- package/src/3-path-functions/pathFunctionLoader.ts +516 -516
- package/src/3-path-functions/tests/rejectTest.ts +76 -76
- package/src/4-deploy/deployCheck.ts +6 -6
- package/src/4-dom/css.tsx +29 -29
- package/src/4-dom/cssTypes.d.ts +211 -211
- package/src/4-dom/qreact.tsx +2799 -2799
- package/src/4-dom/qreactTest.tsx +410 -410
- package/src/4-querysub/permissions.ts +335 -335
- package/src/4-querysub/querysubPrediction.ts +483 -483
- package/src/5-diagnostics/qreactDebug.tsx +400 -346
- package/src/TestController.ts +34 -34
- package/src/bits.ts +104 -104
- package/src/buffers.ts +69 -69
- package/src/diagnostics/ActionsHistory.ts +57 -57
- package/src/diagnostics/PathDistributionInfo.tsx +9 -1
- package/src/diagnostics/listenOnDebugger.ts +71 -71
- package/src/diagnostics/logs/IndexedLogs/BufferUnitIndex.ts +1 -1
- package/src/diagnostics/logs/diskLogger.ts +6 -0
- package/src/diagnostics/misc-pages/SnapshotViewer.tsx +78 -1
- package/src/diagnostics/periodic.ts +111 -111
- package/src/diagnostics/trackResources.ts +91 -91
- package/src/diagnostics/watchdog.ts +120 -120
- package/src/errors.ts +133 -133
- package/src/forceProduction.ts +2 -2
- package/src/fs.ts +80 -80
- package/src/functional/diff.ts +857 -857
- package/src/functional/promiseCache.ts +78 -78
- package/src/functional/random.ts +8 -8
- package/src/functional/stats.ts +60 -60
- package/src/heapDumps.ts +665 -665
- package/src/https.ts +1 -1
- package/src/library-components/AspectSizedComponent.tsx +87 -87
- package/src/library-components/ButtonSelector.tsx +64 -64
- package/src/library-components/DropdownCustom.tsx +150 -150
- package/src/library-components/DropdownSelector.tsx +31 -31
- package/src/library-components/InlinePopup.tsx +66 -66
- package/src/library-components/uncaughtToast.tsx +2 -0
- package/src/misc/color.ts +29 -29
- package/src/misc/hash.ts +83 -83
- package/src/misc/ipPong.js +13 -13
- package/src/misc/networking.ts +1 -1
- package/src/misc/random.ts +44 -44
- package/src/misc.ts +196 -196
- package/src/path.ts +255 -255
- package/src/persistentLocalStore.ts +41 -41
- package/src/promise.ts +14 -14
- package/src/storage/fileSystemPointer.ts +71 -71
- package/src/test/heapProcess.ts +35 -35
- package/src/zip.ts +15 -15
- package/tsconfig.json +26 -26
- package/yarnSpec.txt +56 -56
|
@@ -1,516 +1,516 @@
|
|
|
1
|
-
import { getSubFolder } from "../fs";
|
|
2
|
-
import { FunctionSpec } from "./PathFunctionRunner";
|
|
3
|
-
import child_process from "child_process";
|
|
4
|
-
import fs from "fs";
|
|
5
|
-
import { blue, magenta, red } from "socket-function/src/formatting/logColors";
|
|
6
|
-
import debugbreak from "debugbreak";
|
|
7
|
-
import { cache, lazy } from "socket-function/src/caching";
|
|
8
|
-
import { batchFunction, delay, runInSerial, runInfinitePoll } from "socket-function/src/batching";
|
|
9
|
-
import { errorToUndefined, logErrors } from "../errors";
|
|
10
|
-
import crypto from "crypto";
|
|
11
|
-
import { MaybePromise } from "socket-function/src/types";
|
|
12
|
-
import yargs from "yargs";
|
|
13
|
-
import { SyncWatcher } from "../2-proxy/PathValueProxyWatcher";
|
|
14
|
-
import { MAX_CHANGE_AGE } from "../0-path-value-core/pathValueCore";
|
|
15
|
-
import { isNode, isNodeTrue, nextId, timeInSecond } from "socket-function/src/misc";
|
|
16
|
-
import { getPathStr2, getPathStr3 } from "../path";
|
|
17
|
-
import { consistentHash } from "../misc/hash";
|
|
18
|
-
import { setExternalHotReloading } from "socket-function/hot/HotReloadController";
|
|
19
|
-
import { devDebugbreak, isLocal, isPublic, setIsDynamicallyLoading } from "../config";
|
|
20
|
-
import { SocketFunction } from "socket-function/SocketFunction";
|
|
21
|
-
import { requiresNetworkTrustHook } from "../-d-trust/NetworkTrust2";
|
|
22
|
-
import { getControllerNodeId, getControllerNodeIdList } from "../-g-core-values/NodeCapabilities";
|
|
23
|
-
import { sha256 } from "js-sha256";
|
|
24
|
-
import os from "os";
|
|
25
|
-
import { formatTime } from "socket-function/src/formatting/format";
|
|
26
|
-
import path from "path";
|
|
27
|
-
|
|
28
|
-
export type LoadFunctionSpec = {
|
|
29
|
-
gitURL: string;
|
|
30
|
-
gitRef: string;
|
|
31
|
-
FilePath: string;
|
|
32
|
-
FunctionId: string;
|
|
33
|
-
// NOTE: Ignores the local flag temporarily. For testing.
|
|
34
|
-
noLocal?: boolean;
|
|
35
|
-
};
|
|
36
|
-
|
|
37
|
-
let watchers = new Set<SyncWatcher>();
|
|
38
|
-
let ensureDisposingDeadWatchers = lazy(() => {
|
|
39
|
-
runInfinitePoll(MAX_CHANGE_AGE, function disposeDeadWatchers() {
|
|
40
|
-
for (let watcher of watchers) {
|
|
41
|
-
if (watcher.disposed) {
|
|
42
|
-
watchers.delete(watcher);
|
|
43
|
-
}
|
|
44
|
-
}
|
|
45
|
-
});
|
|
46
|
-
});
|
|
47
|
-
export function watchModuleHotreloads(watcher: SyncWatcher) {
|
|
48
|
-
watchers.add(watcher);
|
|
49
|
-
ensureDisposingDeadWatchers();
|
|
50
|
-
}
|
|
51
|
-
|
|
52
|
-
const getLocalPathRemapping = lazy((): { [gitUrl: string]: string } => {
|
|
53
|
-
let localPaths: string[] = [];
|
|
54
|
-
if (!isPublic()) {
|
|
55
|
-
localPaths.push(process.cwd().replaceAll(/\\/g, "/") + "/");
|
|
56
|
-
}
|
|
57
|
-
if (!localPaths.length) return {};
|
|
58
|
-
let mapping: { [gitUrl: string]: string } = Object.create(null);
|
|
59
|
-
for (let path of localPaths) {
|
|
60
|
-
if (!path.endsWith("/")) {
|
|
61
|
-
path += "/";
|
|
62
|
-
}
|
|
63
|
-
let packageJSON = fs.readFileSync(path + "package.json", "utf8");
|
|
64
|
-
let packageJSONObj = JSON.parse(packageJSON);
|
|
65
|
-
let repo = packageJSONObj.repository as { type: string, url: string } | undefined;
|
|
66
|
-
if (!repo) throw new Error(`No "repository" property in package.json at ${path}`);
|
|
67
|
-
if (repo.type !== "git" && repo.type !== "git+https" && repo.type !== "https") {
|
|
68
|
-
throw new Error(`Repository type ${JSON.stringify(repo.type)} is not supported yet. Only "git", "git+https", and "https" are presently supported.`);
|
|
69
|
-
}
|
|
70
|
-
mapping[repo.url] = path;
|
|
71
|
-
// https://github.com/sliftist/qs-cyoa.git
|
|
72
|
-
// git@github.com:sliftist/qs-cyoa.git
|
|
73
|
-
mapping[repo.url.replace(/^https:\/\/([^\/]+)\//, "git@$1:")] = path;
|
|
74
|
-
// Go from git to https as well, as it might be https
|
|
75
|
-
mapping[repo.url.replace(/^git@([^:]+):(.+)$/, "https://$1/$2")] = path;
|
|
76
|
-
}
|
|
77
|
-
return mapping;
|
|
78
|
-
});
|
|
79
|
-
|
|
80
|
-
// If we have so many different functions that we can't cache them all... then we are going
|
|
81
|
-
// to have a problem! Also... this cache is only the function, not the require.cache, which
|
|
82
|
-
// isn't being cleared anyways, so THIS isn't really where we would leak.
|
|
83
|
-
export function getModuleFromSpec(spec: FunctionSpec): MaybePromise<NodeJS.Module> {
|
|
84
|
-
return getModuleFromConfig(spec);
|
|
85
|
-
}
|
|
86
|
-
let moduleCache = new Map<string, {
|
|
87
|
-
result: MaybePromise<NodeJS.Module>;
|
|
88
|
-
error: Error | undefined;
|
|
89
|
-
}>();
|
|
90
|
-
function getSpecKey(spec: LoadFunctionSpec) {
|
|
91
|
-
// Only include these specific keys AND order them, so the hash hits no matter how this function is called
|
|
92
|
-
spec = {
|
|
93
|
-
FilePath: spec.FilePath,
|
|
94
|
-
gitURL: spec.gitURL,
|
|
95
|
-
gitRef: spec.gitRef,
|
|
96
|
-
FunctionId: spec.FunctionId,
|
|
97
|
-
noLocal: spec.noLocal,
|
|
98
|
-
};
|
|
99
|
-
return JSON.stringify(spec);
|
|
100
|
-
}
|
|
101
|
-
export function getModuleFromConfig(spec: LoadFunctionSpec): MaybePromise<NodeJS.Module> {
|
|
102
|
-
let key = getSpecKey(spec);
|
|
103
|
-
let value = moduleCache.get(key);
|
|
104
|
-
if (!value) {
|
|
105
|
-
let promise = getModuleFromSpecBase(spec);
|
|
106
|
-
value = { result: promise, error: undefined };
|
|
107
|
-
moduleCache.set(key, value);
|
|
108
|
-
promise.then(
|
|
109
|
-
result => {
|
|
110
|
-
moduleCache.set(key, { result, error: undefined });
|
|
111
|
-
},
|
|
112
|
-
error => {
|
|
113
|
-
moduleCache.set(key, { result: null as any, error });
|
|
114
|
-
}
|
|
115
|
-
);
|
|
116
|
-
}
|
|
117
|
-
if (value.error) throw value.error;
|
|
118
|
-
return value.result;
|
|
119
|
-
}
|
|
120
|
-
|
|
121
|
-
let gitURLRefMappings = new Map<string, string>();
|
|
122
|
-
export function setGitURLMapping(config: {
|
|
123
|
-
spec: FunctionSpec;
|
|
124
|
-
resolvedPath: string;
|
|
125
|
-
}) {
|
|
126
|
-
gitURLRefMappings.set(getSpecKey(config.spec), config.resolvedPath);
|
|
127
|
-
}
|
|
128
|
-
|
|
129
|
-
const loadTimeIndicatorFileName = "loadTimeIndicator-71cd93ba-1667-49ac-9206-b27930bbd983";
|
|
130
|
-
|
|
131
|
-
/** spec => path that we can use with require */
|
|
132
|
-
let moduleResolver = async (spec: {
|
|
133
|
-
gitURL: string;
|
|
134
|
-
gitRef: string;
|
|
135
|
-
}) => {
|
|
136
|
-
if (!isPublic()) {
|
|
137
|
-
// Probably a bug. The local path should have matched.
|
|
138
|
-
devDebugbreak();
|
|
139
|
-
}
|
|
140
|
-
let gitURL = spec.gitURL;
|
|
141
|
-
let urlForPath = gitURL;
|
|
142
|
-
|
|
143
|
-
if (urlForPath.startsWith("http")) {
|
|
144
|
-
// Switch to a git url... incase the repo is private? Or something?
|
|
145
|
-
// TODO: We should only do this selectively, as I think we can't sync this
|
|
146
|
-
// way without owning the repo, or... something?
|
|
147
|
-
|
|
148
|
-
let url = new URL(urlForPath);
|
|
149
|
-
gitURL = "git@" + url.host + ":" + url.pathname.slice(1);
|
|
150
|
-
urlForPath = gitURL;
|
|
151
|
-
}
|
|
152
|
-
urlForPath = urlForPath.replaceAll("@", "/");
|
|
153
|
-
urlForPath = urlForPath.replaceAll(":", "/");
|
|
154
|
-
if (urlForPath.endsWith(".git")) {
|
|
155
|
-
urlForPath = urlForPath.slice(0, -".git".length);
|
|
156
|
-
}
|
|
157
|
-
let repoPath = getSubFolder("synced_repos") + urlForPath + "/" + spec.gitRef + "/";
|
|
158
|
-
|
|
159
|
-
const lockFolder = getSubFolder("synced_repos_locks");
|
|
160
|
-
let lockPath = lockFolder + sha256(repoPath).slice(0, 16) + ".loadinglock";
|
|
161
|
-
let exists = fs.existsSync(repoPath);
|
|
162
|
-
if (exists && !fs.existsSync(repoPath + loadTimeIndicatorFileName)) {
|
|
163
|
-
exists = false;
|
|
164
|
-
}
|
|
165
|
-
if (!exists) {
|
|
166
|
-
await getFileLock(lockPath, async () => {
|
|
167
|
-
if (fs.existsSync(repoPath + loadTimeIndicatorFileName)) return;
|
|
168
|
-
|
|
169
|
-
let time = Date.now();
|
|
170
|
-
|
|
171
|
-
// Remove any previous attempt to sync it
|
|
172
|
-
if (fs.existsSync(repoPath)) {
|
|
173
|
-
await fs.promises.rm(repoPath, { recursive: true });
|
|
174
|
-
}
|
|
175
|
-
// Clone it
|
|
176
|
-
await executeCommand("git", ["clone", gitURL, repoPath]);
|
|
177
|
-
await executeCommand("git", ["reset", "--hard", spec.gitRef], { cwd: repoPath });
|
|
178
|
-
|
|
179
|
-
// Yarn install
|
|
180
|
-
await executeCommand("yarn", ["install"], { cwd: repoPath });
|
|
181
|
-
|
|
182
|
-
// Delete querysub, and replace it with a symlink. Otherwise the synchronization code
|
|
183
|
-
// will run again, and a lot of setup code will run again, etc, and nothing will work correctly.
|
|
184
|
-
let querysubPath = repoPath + "node_modules/querysub";
|
|
185
|
-
await fs.promises.rm(querysubPath, { recursive: true });
|
|
186
|
-
|
|
187
|
-
let actualQuerysubPath = path.resolve("./node_modules/querysub");
|
|
188
|
-
await fs.promises.symlink(actualQuerysubPath, querysubPath, "junction");
|
|
189
|
-
|
|
190
|
-
// Mark it as loaded. If we don't reach this point we will move the folder and try again next time
|
|
191
|
-
await fs.promises.writeFile(repoPath + loadTimeIndicatorFileName, Date.now() + "");
|
|
192
|
-
|
|
193
|
-
time = Date.now() - time;
|
|
194
|
-
console.log(blue(`Cloned and yarn installed repo`), { gitURL, time });
|
|
195
|
-
});
|
|
196
|
-
}
|
|
197
|
-
|
|
198
|
-
return repoPath;
|
|
199
|
-
};
|
|
200
|
-
|
|
201
|
-
const lockHeartbeatInterval = timeInSecond * 15;
|
|
202
|
-
const lockExpiryTime = timeInSecond * 60;
|
|
203
|
-
const lockCheckTime = 500;
|
|
204
|
-
async function tryGetFileLock(file: string) {
|
|
205
|
-
let expiryTime = Date.now() - lockExpiryTime;
|
|
206
|
-
try {
|
|
207
|
-
let lastHeartbeat = Number((await fs.promises.readFile(file, "utf8")).split("-")[0]) || 0;
|
|
208
|
-
if (lastHeartbeat > expiryTime) return false;
|
|
209
|
-
} catch { }
|
|
210
|
-
let heartbeatWrite = Date.now() + "-" + nextId();
|
|
211
|
-
let dir = path.dirname(file);
|
|
212
|
-
if (!fs.existsSync(dir)) {
|
|
213
|
-
await fs.promises.mkdir(dir, { recursive: true });
|
|
214
|
-
}
|
|
215
|
-
await fs.promises.writeFile(file, heartbeatWrite);
|
|
216
|
-
await delay(lockCheckTime);
|
|
217
|
-
let readBack = await fs.promises.readFile(file, "utf8");
|
|
218
|
-
if (readBack !== heartbeatWrite) return false;
|
|
219
|
-
return true;
|
|
220
|
-
}
|
|
221
|
-
async function getFileLock(file: string, callback: () => Promise<void>) {
|
|
222
|
-
console.log(magenta(`Getting file lock: ${file}`));
|
|
223
|
-
while (true) {
|
|
224
|
-
if (await tryGetFileLock(file)) {
|
|
225
|
-
console.log(magenta(`Got file lock: ${file}`));
|
|
226
|
-
let done = false;
|
|
227
|
-
logErrors((async () => {
|
|
228
|
-
while (!done) {
|
|
229
|
-
await delay(lockHeartbeatInterval);
|
|
230
|
-
console.log(magenta(`Not done with file lock, writing heartbeat: ${file}`));
|
|
231
|
-
await fs.promises.writeFile(file, Date.now() + "-" + nextId());
|
|
232
|
-
}
|
|
233
|
-
})());
|
|
234
|
-
try {
|
|
235
|
-
await callback();
|
|
236
|
-
} finally {
|
|
237
|
-
await fs.promises.unlink(file);
|
|
238
|
-
done = true;
|
|
239
|
-
console.log(magenta(`Releasing file lock: ${file}`));
|
|
240
|
-
}
|
|
241
|
-
return;
|
|
242
|
-
}
|
|
243
|
-
console.log(magenta(`Waiting for lock file lock: ${file}`));
|
|
244
|
-
await delay(lockHeartbeatInterval);
|
|
245
|
-
}
|
|
246
|
-
}
|
|
247
|
-
|
|
248
|
-
export function isDynamicModule(module: NodeJS.Module): boolean {
|
|
249
|
-
return isDynamicModulePath(module.filename);
|
|
250
|
-
}
|
|
251
|
-
export function isDynamicModulePath(path: string): boolean {
|
|
252
|
-
let parts = path.replaceAll("\\", "/").split("/");
|
|
253
|
-
return parts.includes("node_modules") || parts.includes("synced_repos");
|
|
254
|
-
}
|
|
255
|
-
/** Annoying, and slower than isDynamicModule, but... necessary. We can't expect the user to call isDynamicModule,
|
|
256
|
-
* and our functions will all resolve to the root on purpose, so... we need to check the callstack!
|
|
257
|
-
*/
|
|
258
|
-
export function isCallerDynamicModule(): boolean {
|
|
259
|
-
return getCallstackFiles().some(isDynamicModulePath);
|
|
260
|
-
}
|
|
261
|
-
function getCallstackFiles(): string[] {
|
|
262
|
-
let stack = new Error().stack;
|
|
263
|
-
if (!stack) return [];
|
|
264
|
-
let lines = stack.split("\n");
|
|
265
|
-
let files = lines.map(line => {
|
|
266
|
-
let match = line.match(/\(([^)]+)\)/);
|
|
267
|
-
if (!match) return "";
|
|
268
|
-
return match[1];
|
|
269
|
-
});
|
|
270
|
-
return files;
|
|
271
|
-
}
|
|
272
|
-
|
|
273
|
-
let importBlockers = new Set<Promise<unknown>>();
|
|
274
|
-
export function registerImportBlockers(blocker: Promise<unknown>) {
|
|
275
|
-
importBlockers.add(blocker);
|
|
276
|
-
void blocker.finally(() => {
|
|
277
|
-
importBlockers.delete(blocker);
|
|
278
|
-
});
|
|
279
|
-
}
|
|
280
|
-
export async function waitForImportBlockers() {
|
|
281
|
-
while (importBlockers.size > 0) {
|
|
282
|
-
await Promise.all(Array.from(importBlockers));
|
|
283
|
-
}
|
|
284
|
-
}
|
|
285
|
-
|
|
286
|
-
function getDirname(fileName: string) {
|
|
287
|
-
fileName = fileName.replaceAll("\\", "/");
|
|
288
|
-
let lastSlash = fileName.lastIndexOf("/");
|
|
289
|
-
if (lastSlash === -1) return "";
|
|
290
|
-
return fileName.slice(0, lastSlash);
|
|
291
|
-
}
|
|
292
|
-
|
|
293
|
-
// On import every parent folder is set to the spec. For root folders this makes
|
|
294
|
-
// the spec useless, but for unique folders it will only be set once, making it very useful!
|
|
295
|
-
// "overlapping" means multiple folders mapped to this spec
|
|
296
|
-
let folderToSpec = new Map<string, LoadFunctionSpec | "overlapping">();
|
|
297
|
-
function registerSpec(fileName: string, spec: LoadFunctionSpec | "overlapping") {
|
|
298
|
-
let folder = getDirname(fileName);
|
|
299
|
-
while (true) {
|
|
300
|
-
if (folderToSpec.has(folder)) {
|
|
301
|
-
folderToSpec.set(folder, "overlapping");
|
|
302
|
-
} else {
|
|
303
|
-
folderToSpec.set(folder, spec);
|
|
304
|
-
}
|
|
305
|
-
let parentFolder = getDirname(folder);
|
|
306
|
-
if (parentFolder === folder) {
|
|
307
|
-
break;
|
|
308
|
-
}
|
|
309
|
-
folder = parentFolder;
|
|
310
|
-
}
|
|
311
|
-
}
|
|
312
|
-
|
|
313
|
-
export function getSpecFromModule(module: NodeJS.Module): LoadFunctionSpec | undefined {
|
|
314
|
-
let folder = getDirname(module.filename);
|
|
315
|
-
while (true) {
|
|
316
|
-
let spec = folderToSpec.get(folder);
|
|
317
|
-
if (spec && typeof spec !== "string") return spec;
|
|
318
|
-
let parentFolder = getDirname(folder);
|
|
319
|
-
if (parentFolder === folder) break;
|
|
320
|
-
folder = parentFolder;
|
|
321
|
-
}
|
|
322
|
-
if (!module.parent) return undefined;
|
|
323
|
-
// Keep going up to our parent? This shouldn't really happen, but... maybe it will?
|
|
324
|
-
return getSpecFromModule(module.parent);
|
|
325
|
-
}
|
|
326
|
-
|
|
327
|
-
async function getModuleFromSpecBase(
|
|
328
|
-
spec: LoadFunctionSpec
|
|
329
|
-
): Promise<NodeJS.Module> {
|
|
330
|
-
console.log(blue(`Loading module for ${JSON.stringify(spec)}`));
|
|
331
|
-
// Register ourself as overlapping, to prevent ambient modules from being set in this way.
|
|
332
|
-
registerSpec(module.filename, "overlapping");
|
|
333
|
-
|
|
334
|
-
let hotReloadPackagePath = "";
|
|
335
|
-
let path = gitURLRefMappings.get(getSpecKey(spec));
|
|
336
|
-
let deployPath = "";
|
|
337
|
-
if (!path) {
|
|
338
|
-
// Sync the git repo, `yarn install --ignore-scripts`, require the path, get the export, and then return that function.
|
|
339
|
-
let packagePath = !spec.noLocal && getLocalPathRemapping()[spec.gitURL] || "";
|
|
340
|
-
hotReloadPackagePath = packagePath;
|
|
341
|
-
if (!packagePath) {
|
|
342
|
-
packagePath = await moduleResolver(spec);
|
|
343
|
-
}
|
|
344
|
-
|
|
345
|
-
if (!packagePath.endsWith("/")) {
|
|
346
|
-
packagePath += "/";
|
|
347
|
-
}
|
|
348
|
-
|
|
349
|
-
let specFilePath = spec.FilePath;
|
|
350
|
-
if (specFilePath.startsWith("/")) {
|
|
351
|
-
specFilePath = specFilePath.slice(1);
|
|
352
|
-
}
|
|
353
|
-
path = packagePath + specFilePath;
|
|
354
|
-
deployPath = packagePath + "deploy.ts";
|
|
355
|
-
}
|
|
356
|
-
|
|
357
|
-
registerSpec(path, spec);
|
|
358
|
-
|
|
359
|
-
console.log(blue(`require(${JSON.stringify(path)})`));
|
|
360
|
-
try {
|
|
361
|
-
await SocketFunction.ignoreExposeCalls(async () => {
|
|
362
|
-
await setIsDynamicallyLoading(async () => {
|
|
363
|
-
// Import deploy, which should always exist, and provides a consistent
|
|
364
|
-
// import order, fixing a lot of cyclic / module level code logic issues.
|
|
365
|
-
if (deployPath) {
|
|
366
|
-
await (require as any)(deployPath, true);
|
|
367
|
-
}
|
|
368
|
-
// NOTE: The true tells require to not warn about the async loading
|
|
369
|
-
await (require as any)(path, true);
|
|
370
|
-
|
|
371
|
-
await waitForImportBlockers();
|
|
372
|
-
});
|
|
373
|
-
});
|
|
374
|
-
} catch (e: any) {
|
|
375
|
-
throw new Error(`Error when loading function for ${JSON.stringify(path)}:${spec.FunctionId}\n${e.stack}`);
|
|
376
|
-
}
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
let moduleId = require.resolve(path) || path;
|
|
380
|
-
let moduleImported = require.cache[moduleId];
|
|
381
|
-
if (!moduleImported) {
|
|
382
|
-
debugbreak(2);
|
|
383
|
-
debugger;
|
|
384
|
-
throw new Error(`Module not found: ${moduleId} (for ${spec.FunctionId})`);
|
|
385
|
-
}
|
|
386
|
-
|
|
387
|
-
if (!isPublic()) {
|
|
388
|
-
if (hotReloadPackagePath) {
|
|
389
|
-
hotReloadUnderPath(hotReloadPackagePath);
|
|
390
|
-
}
|
|
391
|
-
}
|
|
392
|
-
|
|
393
|
-
return moduleImported;
|
|
394
|
-
}
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
// Hot reload at or under the path
|
|
400
|
-
const hotReloadUnderPath = cache((path: string) => {
|
|
401
|
-
console.log(magenta(`Hot reloading under path: ${path}`));
|
|
402
|
-
path = path.replace(/\\/g, "/");
|
|
403
|
-
for (let module of Object.values(require.cache)) {
|
|
404
|
-
if (!module) continue;
|
|
405
|
-
let modulePath = module.filename.replace(/\\/g, "/");
|
|
406
|
-
if (modulePath.startsWith(path)) {
|
|
407
|
-
hotReloadModule(module);
|
|
408
|
-
}
|
|
409
|
-
}
|
|
410
|
-
});
|
|
411
|
-
const hotReloadModule = cache((module: NodeJS.Module) => {
|
|
412
|
-
fs.watch(module.filename, () => {
|
|
413
|
-
logErrors(hotreloadIfChanged(module));
|
|
414
|
-
});
|
|
415
|
-
});
|
|
416
|
-
const hotreloadIfChanged = batchFunction({ delay: 100, name: "hotreloadIfChanged" }, async (modules: NodeJS.Module[]) => {
|
|
417
|
-
let changedModules = new Set<NodeJS.Module>();
|
|
418
|
-
for (let module of modules) {
|
|
419
|
-
let newContents = await fs.promises.readFile(module.filename, "utf8");
|
|
420
|
-
let newSHA256 = crypto.createHash("sha256").update(newContents).digest("hex");
|
|
421
|
-
if (newSHA256 !== module.sourceSHA256) {
|
|
422
|
-
changedModules.add(module);
|
|
423
|
-
}
|
|
424
|
-
}
|
|
425
|
-
console.log(magenta(`Hot reloading changed modules:`));
|
|
426
|
-
for (let module of changedModules) {
|
|
427
|
-
if (module.updateContents) {
|
|
428
|
-
let justContents = module.hotreload === false || isNode() && module.noserverhotreload;
|
|
429
|
-
console.log(magenta(` ${module.filename} ${justContents && "(no re-evaluate)" || ""}`));
|
|
430
|
-
setExternalHotReloading(true);
|
|
431
|
-
try {
|
|
432
|
-
try {
|
|
433
|
-
module.updateContents();
|
|
434
|
-
} catch (e) {
|
|
435
|
-
console.error(red(`Error when loading module contents ${module.filename}`));
|
|
436
|
-
console.error(e);
|
|
437
|
-
}
|
|
438
|
-
if (!justContents) {
|
|
439
|
-
module.loaded = false;
|
|
440
|
-
try {
|
|
441
|
-
module.load(module.id);
|
|
442
|
-
} catch (e) {
|
|
443
|
-
module.load(module.id);
|
|
444
|
-
console.error(red(`Error when hot reloading ${module.filename}`));
|
|
445
|
-
console.error(e);
|
|
446
|
-
}
|
|
447
|
-
}
|
|
448
|
-
} finally {
|
|
449
|
-
setExternalHotReloading(false);
|
|
450
|
-
}
|
|
451
|
-
} else {
|
|
452
|
-
console.log(red(` (skipping due to missing updateContents functions) ${module.filename}`));
|
|
453
|
-
}
|
|
454
|
-
}
|
|
455
|
-
|
|
456
|
-
for (let watcher of watchers) {
|
|
457
|
-
if (watcher.disposed) {
|
|
458
|
-
watcher.dispose();
|
|
459
|
-
} else {
|
|
460
|
-
watcher.explicitlyTrigger();
|
|
461
|
-
}
|
|
462
|
-
}
|
|
463
|
-
});
|
|
464
|
-
|
|
465
|
-
async function which(command: string): Promise<string> {
|
|
466
|
-
let whichOrWhere = os.platform() === "win32" ? "where" : "which";
|
|
467
|
-
let path = child_process.execSync(`${whichOrWhere} ${command}`).toString().trim().replaceAll("\r", "").split("\n")[0].trim().replaceAll("\\", "/");
|
|
468
|
-
if (!path) {
|
|
469
|
-
throw new Error(`Command ${command} not found`);
|
|
470
|
-
}
|
|
471
|
-
path = `"${path}"`;
|
|
472
|
-
return path;
|
|
473
|
-
}
|
|
474
|
-
|
|
475
|
-
async function executeCommand(command: string, args: string[], options?: {
|
|
476
|
-
cwd?: string;
|
|
477
|
-
}): Promise<string> {
|
|
478
|
-
let resolvedCommandPath = await which(command);
|
|
479
|
-
let debug = `${resolvedCommandPath} ${args.join(" ")}`;
|
|
480
|
-
if (options?.cwd) {
|
|
481
|
-
debug += ` (in ${options.cwd})`;
|
|
482
|
-
}
|
|
483
|
-
console.log(`Running command: ${debug}`);
|
|
484
|
-
return new Promise((resolve, reject) => {
|
|
485
|
-
const child = child_process.spawn(resolvedCommandPath, args, {
|
|
486
|
-
cwd: options?.cwd,
|
|
487
|
-
stdio: ["ignore", "pipe", "pipe"],
|
|
488
|
-
shell: true,
|
|
489
|
-
});
|
|
490
|
-
|
|
491
|
-
let stdout = "";
|
|
492
|
-
let stderr = "";
|
|
493
|
-
|
|
494
|
-
child.stdout.on("data", (data) => {
|
|
495
|
-
console.log(data.toString());
|
|
496
|
-
stdout += data.toString();
|
|
497
|
-
});
|
|
498
|
-
|
|
499
|
-
child.stderr.on("data", (data) => {
|
|
500
|
-
console.log(data.toString());
|
|
501
|
-
stderr += data.toString();
|
|
502
|
-
});
|
|
503
|
-
|
|
504
|
-
child.on("error", (error) => {
|
|
505
|
-
reject(new Error(`Failed to execute command ${debug}: ${error.message}`));
|
|
506
|
-
});
|
|
507
|
-
|
|
508
|
-
child.on("close", (code) => {
|
|
509
|
-
if (code !== 0) {
|
|
510
|
-
reject(new Error(`Command ${debug} failed with code ${code}.\nstdout: ${JSON.stringify(stdout)}\nstderr: ${JSON.stringify(stderr)}`));
|
|
511
|
-
return;
|
|
512
|
-
}
|
|
513
|
-
resolve(stdout.trim());
|
|
514
|
-
});
|
|
515
|
-
});
|
|
516
|
-
}
|
|
1
|
+
import { getSubFolder } from "../fs";
|
|
2
|
+
import { FunctionSpec } from "./PathFunctionRunner";
|
|
3
|
+
import child_process from "child_process";
|
|
4
|
+
import fs from "fs";
|
|
5
|
+
import { blue, magenta, red } from "socket-function/src/formatting/logColors";
|
|
6
|
+
import debugbreak from "debugbreak";
|
|
7
|
+
import { cache, lazy } from "socket-function/src/caching";
|
|
8
|
+
import { batchFunction, delay, runInSerial, runInfinitePoll } from "socket-function/src/batching";
|
|
9
|
+
import { errorToUndefined, logErrors } from "../errors";
|
|
10
|
+
import crypto from "crypto";
|
|
11
|
+
import { MaybePromise } from "socket-function/src/types";
|
|
12
|
+
import yargs from "yargs";
|
|
13
|
+
import { SyncWatcher } from "../2-proxy/PathValueProxyWatcher";
|
|
14
|
+
import { MAX_CHANGE_AGE } from "../0-path-value-core/pathValueCore";
|
|
15
|
+
import { isNode, isNodeTrue, nextId, timeInSecond } from "socket-function/src/misc";
|
|
16
|
+
import { getPathStr2, getPathStr3 } from "../path";
|
|
17
|
+
import { consistentHash } from "../misc/hash";
|
|
18
|
+
import { setExternalHotReloading } from "socket-function/hot/HotReloadController";
|
|
19
|
+
import { devDebugbreak, isLocal, isPublic, setIsDynamicallyLoading } from "../config";
|
|
20
|
+
import { SocketFunction } from "socket-function/SocketFunction";
|
|
21
|
+
import { requiresNetworkTrustHook } from "../-d-trust/NetworkTrust2";
|
|
22
|
+
import { getControllerNodeId, getControllerNodeIdList } from "../-g-core-values/NodeCapabilities";
|
|
23
|
+
import { sha256 } from "js-sha256";
|
|
24
|
+
import os from "os";
|
|
25
|
+
import { formatTime } from "socket-function/src/formatting/format";
|
|
26
|
+
import path from "path";
|
|
27
|
+
|
|
28
|
+
export type LoadFunctionSpec = {
|
|
29
|
+
gitURL: string;
|
|
30
|
+
gitRef: string;
|
|
31
|
+
FilePath: string;
|
|
32
|
+
FunctionId: string;
|
|
33
|
+
// NOTE: Ignores the local flag temporarily. For testing.
|
|
34
|
+
noLocal?: boolean;
|
|
35
|
+
};
|
|
36
|
+
|
|
37
|
+
let watchers = new Set<SyncWatcher>();
|
|
38
|
+
let ensureDisposingDeadWatchers = lazy(() => {
|
|
39
|
+
runInfinitePoll(MAX_CHANGE_AGE, function disposeDeadWatchers() {
|
|
40
|
+
for (let watcher of watchers) {
|
|
41
|
+
if (watcher.disposed) {
|
|
42
|
+
watchers.delete(watcher);
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
});
|
|
46
|
+
});
|
|
47
|
+
export function watchModuleHotreloads(watcher: SyncWatcher) {
|
|
48
|
+
watchers.add(watcher);
|
|
49
|
+
ensureDisposingDeadWatchers();
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
const getLocalPathRemapping = lazy((): { [gitUrl: string]: string } => {
|
|
53
|
+
let localPaths: string[] = [];
|
|
54
|
+
if (!isPublic()) {
|
|
55
|
+
localPaths.push(process.cwd().replaceAll(/\\/g, "/") + "/");
|
|
56
|
+
}
|
|
57
|
+
if (!localPaths.length) return {};
|
|
58
|
+
let mapping: { [gitUrl: string]: string } = Object.create(null);
|
|
59
|
+
for (let path of localPaths) {
|
|
60
|
+
if (!path.endsWith("/")) {
|
|
61
|
+
path += "/";
|
|
62
|
+
}
|
|
63
|
+
let packageJSON = fs.readFileSync(path + "package.json", "utf8");
|
|
64
|
+
let packageJSONObj = JSON.parse(packageJSON);
|
|
65
|
+
let repo = packageJSONObj.repository as { type: string, url: string } | undefined;
|
|
66
|
+
if (!repo) throw new Error(`No "repository" property in package.json at ${path}`);
|
|
67
|
+
if (repo.type !== "git" && repo.type !== "git+https" && repo.type !== "https") {
|
|
68
|
+
throw new Error(`Repository type ${JSON.stringify(repo.type)} is not supported yet. Only "git", "git+https", and "https" are presently supported.`);
|
|
69
|
+
}
|
|
70
|
+
mapping[repo.url] = path;
|
|
71
|
+
// https://github.com/sliftist/qs-cyoa.git
|
|
72
|
+
// git@github.com:sliftist/qs-cyoa.git
|
|
73
|
+
mapping[repo.url.replace(/^https:\/\/([^\/]+)\//, "git@$1:")] = path;
|
|
74
|
+
// Go from git to https as well, as it might be https
|
|
75
|
+
mapping[repo.url.replace(/^git@([^:]+):(.+)$/, "https://$1/$2")] = path;
|
|
76
|
+
}
|
|
77
|
+
return mapping;
|
|
78
|
+
});
|
|
79
|
+
|
|
80
|
+
// If we have so many different functions that we can't cache them all... then we are going
|
|
81
|
+
// to have a problem! Also... this cache is only the function, not the require.cache, which
|
|
82
|
+
// isn't being cleared anyways, so THIS isn't really where we would leak.
|
|
83
|
+
export function getModuleFromSpec(spec: FunctionSpec): MaybePromise<NodeJS.Module> {
|
|
84
|
+
return getModuleFromConfig(spec);
|
|
85
|
+
}
|
|
86
|
+
let moduleCache = new Map<string, {
|
|
87
|
+
result: MaybePromise<NodeJS.Module>;
|
|
88
|
+
error: Error | undefined;
|
|
89
|
+
}>();
|
|
90
|
+
function getSpecKey(spec: LoadFunctionSpec) {
|
|
91
|
+
// Only include these specific keys AND order them, so the hash hits no matter how this function is called
|
|
92
|
+
spec = {
|
|
93
|
+
FilePath: spec.FilePath,
|
|
94
|
+
gitURL: spec.gitURL,
|
|
95
|
+
gitRef: spec.gitRef,
|
|
96
|
+
FunctionId: spec.FunctionId,
|
|
97
|
+
noLocal: spec.noLocal,
|
|
98
|
+
};
|
|
99
|
+
return JSON.stringify(spec);
|
|
100
|
+
}
|
|
101
|
+
export function getModuleFromConfig(spec: LoadFunctionSpec): MaybePromise<NodeJS.Module> {
|
|
102
|
+
let key = getSpecKey(spec);
|
|
103
|
+
let value = moduleCache.get(key);
|
|
104
|
+
if (!value) {
|
|
105
|
+
let promise = getModuleFromSpecBase(spec);
|
|
106
|
+
value = { result: promise, error: undefined };
|
|
107
|
+
moduleCache.set(key, value);
|
|
108
|
+
promise.then(
|
|
109
|
+
result => {
|
|
110
|
+
moduleCache.set(key, { result, error: undefined });
|
|
111
|
+
},
|
|
112
|
+
error => {
|
|
113
|
+
moduleCache.set(key, { result: null as any, error });
|
|
114
|
+
}
|
|
115
|
+
);
|
|
116
|
+
}
|
|
117
|
+
if (value.error) throw value.error;
|
|
118
|
+
return value.result;
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
let gitURLRefMappings = new Map<string, string>();
|
|
122
|
+
export function setGitURLMapping(config: {
|
|
123
|
+
spec: FunctionSpec;
|
|
124
|
+
resolvedPath: string;
|
|
125
|
+
}) {
|
|
126
|
+
gitURLRefMappings.set(getSpecKey(config.spec), config.resolvedPath);
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
const loadTimeIndicatorFileName = "loadTimeIndicator-71cd93ba-1667-49ac-9206-b27930bbd983";
|
|
130
|
+
|
|
131
|
+
/** spec => path that we can use with require */
|
|
132
|
+
let moduleResolver = async (spec: {
|
|
133
|
+
gitURL: string;
|
|
134
|
+
gitRef: string;
|
|
135
|
+
}) => {
|
|
136
|
+
if (!isPublic()) {
|
|
137
|
+
// Probably a bug. The local path should have matched.
|
|
138
|
+
devDebugbreak();
|
|
139
|
+
}
|
|
140
|
+
let gitURL = spec.gitURL;
|
|
141
|
+
let urlForPath = gitURL;
|
|
142
|
+
|
|
143
|
+
if (urlForPath.startsWith("http")) {
|
|
144
|
+
// Switch to a git url... incase the repo is private? Or something?
|
|
145
|
+
// TODO: We should only do this selectively, as I think we can't sync this
|
|
146
|
+
// way without owning the repo, or... something?
|
|
147
|
+
|
|
148
|
+
let url = new URL(urlForPath);
|
|
149
|
+
gitURL = "git@" + url.host + ":" + url.pathname.slice(1);
|
|
150
|
+
urlForPath = gitURL;
|
|
151
|
+
}
|
|
152
|
+
urlForPath = urlForPath.replaceAll("@", "/");
|
|
153
|
+
urlForPath = urlForPath.replaceAll(":", "/");
|
|
154
|
+
if (urlForPath.endsWith(".git")) {
|
|
155
|
+
urlForPath = urlForPath.slice(0, -".git".length);
|
|
156
|
+
}
|
|
157
|
+
let repoPath = getSubFolder("synced_repos") + urlForPath + "/" + spec.gitRef + "/";
|
|
158
|
+
|
|
159
|
+
const lockFolder = getSubFolder("synced_repos_locks");
|
|
160
|
+
let lockPath = lockFolder + sha256(repoPath).slice(0, 16) + ".loadinglock";
|
|
161
|
+
let exists = fs.existsSync(repoPath);
|
|
162
|
+
if (exists && !fs.existsSync(repoPath + loadTimeIndicatorFileName)) {
|
|
163
|
+
exists = false;
|
|
164
|
+
}
|
|
165
|
+
if (!exists) {
|
|
166
|
+
await getFileLock(lockPath, async () => {
|
|
167
|
+
if (fs.existsSync(repoPath + loadTimeIndicatorFileName)) return;
|
|
168
|
+
|
|
169
|
+
let time = Date.now();
|
|
170
|
+
|
|
171
|
+
// Remove any previous attempt to sync it
|
|
172
|
+
if (fs.existsSync(repoPath)) {
|
|
173
|
+
await fs.promises.rm(repoPath, { recursive: true });
|
|
174
|
+
}
|
|
175
|
+
// Clone it
|
|
176
|
+
await executeCommand("git", ["clone", gitURL, repoPath]);
|
|
177
|
+
await executeCommand("git", ["reset", "--hard", spec.gitRef], { cwd: repoPath });
|
|
178
|
+
|
|
179
|
+
// Yarn install
|
|
180
|
+
await executeCommand("yarn", ["install"], { cwd: repoPath });
|
|
181
|
+
|
|
182
|
+
// Delete querysub, and replace it with a symlink. Otherwise the synchronization code
|
|
183
|
+
// will run again, and a lot of setup code will run again, etc, and nothing will work correctly.
|
|
184
|
+
let querysubPath = repoPath + "node_modules/querysub";
|
|
185
|
+
await fs.promises.rm(querysubPath, { recursive: true });
|
|
186
|
+
|
|
187
|
+
let actualQuerysubPath = path.resolve("./node_modules/querysub");
|
|
188
|
+
await fs.promises.symlink(actualQuerysubPath, querysubPath, "junction");
|
|
189
|
+
|
|
190
|
+
// Mark it as loaded. If we don't reach this point we will move the folder and try again next time
|
|
191
|
+
await fs.promises.writeFile(repoPath + loadTimeIndicatorFileName, Date.now() + "");
|
|
192
|
+
|
|
193
|
+
time = Date.now() - time;
|
|
194
|
+
console.log(blue(`Cloned and yarn installed repo`), { gitURL, time });
|
|
195
|
+
});
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
return repoPath;
|
|
199
|
+
};
|
|
200
|
+
|
|
201
|
+
const lockHeartbeatInterval = timeInSecond * 15;
|
|
202
|
+
const lockExpiryTime = timeInSecond * 60;
|
|
203
|
+
const lockCheckTime = 500;
|
|
204
|
+
async function tryGetFileLock(file: string) {
|
|
205
|
+
let expiryTime = Date.now() - lockExpiryTime;
|
|
206
|
+
try {
|
|
207
|
+
let lastHeartbeat = Number((await fs.promises.readFile(file, "utf8")).split("-")[0]) || 0;
|
|
208
|
+
if (lastHeartbeat > expiryTime) return false;
|
|
209
|
+
} catch { }
|
|
210
|
+
let heartbeatWrite = Date.now() + "-" + nextId();
|
|
211
|
+
let dir = path.dirname(file);
|
|
212
|
+
if (!fs.existsSync(dir)) {
|
|
213
|
+
await fs.promises.mkdir(dir, { recursive: true });
|
|
214
|
+
}
|
|
215
|
+
await fs.promises.writeFile(file, heartbeatWrite);
|
|
216
|
+
await delay(lockCheckTime);
|
|
217
|
+
let readBack = await fs.promises.readFile(file, "utf8");
|
|
218
|
+
if (readBack !== heartbeatWrite) return false;
|
|
219
|
+
return true;
|
|
220
|
+
}
|
|
221
|
+
async function getFileLock(file: string, callback: () => Promise<void>) {
|
|
222
|
+
console.log(magenta(`Getting file lock: ${file}`));
|
|
223
|
+
while (true) {
|
|
224
|
+
if (await tryGetFileLock(file)) {
|
|
225
|
+
console.log(magenta(`Got file lock: ${file}`));
|
|
226
|
+
let done = false;
|
|
227
|
+
logErrors((async () => {
|
|
228
|
+
while (!done) {
|
|
229
|
+
await delay(lockHeartbeatInterval);
|
|
230
|
+
console.log(magenta(`Not done with file lock, writing heartbeat: ${file}`));
|
|
231
|
+
await fs.promises.writeFile(file, Date.now() + "-" + nextId());
|
|
232
|
+
}
|
|
233
|
+
})());
|
|
234
|
+
try {
|
|
235
|
+
await callback();
|
|
236
|
+
} finally {
|
|
237
|
+
await fs.promises.unlink(file);
|
|
238
|
+
done = true;
|
|
239
|
+
console.log(magenta(`Releasing file lock: ${file}`));
|
|
240
|
+
}
|
|
241
|
+
return;
|
|
242
|
+
}
|
|
243
|
+
console.log(magenta(`Waiting for lock file lock: ${file}`));
|
|
244
|
+
await delay(lockHeartbeatInterval);
|
|
245
|
+
}
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
export function isDynamicModule(module: NodeJS.Module): boolean {
|
|
249
|
+
return isDynamicModulePath(module.filename);
|
|
250
|
+
}
|
|
251
|
+
export function isDynamicModulePath(path: string): boolean {
|
|
252
|
+
let parts = path.replaceAll("\\", "/").split("/");
|
|
253
|
+
return parts.includes("node_modules") || parts.includes("synced_repos");
|
|
254
|
+
}
|
|
255
|
+
/** Annoying, and slower than isDynamicModule, but... necessary. We can't expect the user to call isDynamicModule,
|
|
256
|
+
* and our functions will all resolve to the root on purpose, so... we need to check the callstack!
|
|
257
|
+
*/
|
|
258
|
+
export function isCallerDynamicModule(): boolean {
|
|
259
|
+
return getCallstackFiles().some(isDynamicModulePath);
|
|
260
|
+
}
|
|
261
|
+
function getCallstackFiles(): string[] {
|
|
262
|
+
let stack = new Error().stack;
|
|
263
|
+
if (!stack) return [];
|
|
264
|
+
let lines = stack.split("\n");
|
|
265
|
+
let files = lines.map(line => {
|
|
266
|
+
let match = line.match(/\(([^)]+)\)/);
|
|
267
|
+
if (!match) return "";
|
|
268
|
+
return match[1];
|
|
269
|
+
});
|
|
270
|
+
return files;
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
let importBlockers = new Set<Promise<unknown>>();
|
|
274
|
+
export function registerImportBlockers(blocker: Promise<unknown>) {
|
|
275
|
+
importBlockers.add(blocker);
|
|
276
|
+
void blocker.finally(() => {
|
|
277
|
+
importBlockers.delete(blocker);
|
|
278
|
+
});
|
|
279
|
+
}
|
|
280
|
+
export async function waitForImportBlockers() {
|
|
281
|
+
while (importBlockers.size > 0) {
|
|
282
|
+
await Promise.all(Array.from(importBlockers));
|
|
283
|
+
}
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
function getDirname(fileName: string) {
|
|
287
|
+
fileName = fileName.replaceAll("\\", "/");
|
|
288
|
+
let lastSlash = fileName.lastIndexOf("/");
|
|
289
|
+
if (lastSlash === -1) return "";
|
|
290
|
+
return fileName.slice(0, lastSlash);
|
|
291
|
+
}
|
|
292
|
+
|
|
293
|
+
// On import every parent folder is set to the spec. For root folders this makes
|
|
294
|
+
// the spec useless, but for unique folders it will only be set once, making it very useful!
|
|
295
|
+
// "overlapping" means multiple folders mapped to this spec
|
|
296
|
+
let folderToSpec = new Map<string, LoadFunctionSpec | "overlapping">();
|
|
297
|
+
function registerSpec(fileName: string, spec: LoadFunctionSpec | "overlapping") {
|
|
298
|
+
let folder = getDirname(fileName);
|
|
299
|
+
while (true) {
|
|
300
|
+
if (folderToSpec.has(folder)) {
|
|
301
|
+
folderToSpec.set(folder, "overlapping");
|
|
302
|
+
} else {
|
|
303
|
+
folderToSpec.set(folder, spec);
|
|
304
|
+
}
|
|
305
|
+
let parentFolder = getDirname(folder);
|
|
306
|
+
if (parentFolder === folder) {
|
|
307
|
+
break;
|
|
308
|
+
}
|
|
309
|
+
folder = parentFolder;
|
|
310
|
+
}
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
export function getSpecFromModule(module: NodeJS.Module): LoadFunctionSpec | undefined {
|
|
314
|
+
let folder = getDirname(module.filename);
|
|
315
|
+
while (true) {
|
|
316
|
+
let spec = folderToSpec.get(folder);
|
|
317
|
+
if (spec && typeof spec !== "string") return spec;
|
|
318
|
+
let parentFolder = getDirname(folder);
|
|
319
|
+
if (parentFolder === folder) break;
|
|
320
|
+
folder = parentFolder;
|
|
321
|
+
}
|
|
322
|
+
if (!module.parent) return undefined;
|
|
323
|
+
// Keep going up to our parent? This shouldn't really happen, but... maybe it will?
|
|
324
|
+
return getSpecFromModule(module.parent);
|
|
325
|
+
}
|
|
326
|
+
|
|
327
|
+
async function getModuleFromSpecBase(
|
|
328
|
+
spec: LoadFunctionSpec
|
|
329
|
+
): Promise<NodeJS.Module> {
|
|
330
|
+
console.log(blue(`Loading module for ${JSON.stringify(spec)}`));
|
|
331
|
+
// Register ourself as overlapping, to prevent ambient modules from being set in this way.
|
|
332
|
+
registerSpec(module.filename, "overlapping");
|
|
333
|
+
|
|
334
|
+
let hotReloadPackagePath = "";
|
|
335
|
+
let path = gitURLRefMappings.get(getSpecKey(spec));
|
|
336
|
+
let deployPath = "";
|
|
337
|
+
if (!path) {
|
|
338
|
+
// Sync the git repo, `yarn install --ignore-scripts`, require the path, get the export, and then return that function.
|
|
339
|
+
let packagePath = !spec.noLocal && getLocalPathRemapping()[spec.gitURL] || "";
|
|
340
|
+
hotReloadPackagePath = packagePath;
|
|
341
|
+
if (!packagePath) {
|
|
342
|
+
packagePath = await moduleResolver(spec);
|
|
343
|
+
}
|
|
344
|
+
|
|
345
|
+
if (!packagePath.endsWith("/")) {
|
|
346
|
+
packagePath += "/";
|
|
347
|
+
}
|
|
348
|
+
|
|
349
|
+
let specFilePath = spec.FilePath;
|
|
350
|
+
if (specFilePath.startsWith("/")) {
|
|
351
|
+
specFilePath = specFilePath.slice(1);
|
|
352
|
+
}
|
|
353
|
+
path = packagePath + specFilePath;
|
|
354
|
+
deployPath = packagePath + "deploy.ts";
|
|
355
|
+
}
|
|
356
|
+
|
|
357
|
+
registerSpec(path, spec);
|
|
358
|
+
|
|
359
|
+
console.log(blue(`require(${JSON.stringify(path)})`));
|
|
360
|
+
try {
|
|
361
|
+
await SocketFunction.ignoreExposeCalls(async () => {
|
|
362
|
+
await setIsDynamicallyLoading(async () => {
|
|
363
|
+
// Import deploy, which should always exist, and provides a consistent
|
|
364
|
+
// import order, fixing a lot of cyclic / module level code logic issues.
|
|
365
|
+
if (deployPath) {
|
|
366
|
+
await (require as any)(deployPath, true);
|
|
367
|
+
}
|
|
368
|
+
// NOTE: The true tells require to not warn about the async loading
|
|
369
|
+
await (require as any)(path, true);
|
|
370
|
+
|
|
371
|
+
await waitForImportBlockers();
|
|
372
|
+
});
|
|
373
|
+
});
|
|
374
|
+
} catch (e: any) {
|
|
375
|
+
throw new Error(`Error when loading function for ${JSON.stringify(path)}:${spec.FunctionId}\n${e.stack}`);
|
|
376
|
+
}
|
|
377
|
+
|
|
378
|
+
|
|
379
|
+
let moduleId = require.resolve(path) || path;
|
|
380
|
+
let moduleImported = require.cache[moduleId];
|
|
381
|
+
if (!moduleImported) {
|
|
382
|
+
debugbreak(2);
|
|
383
|
+
debugger;
|
|
384
|
+
throw new Error(`Module not found: ${moduleId} (for ${spec.FunctionId})`);
|
|
385
|
+
}
|
|
386
|
+
|
|
387
|
+
if (!isPublic()) {
|
|
388
|
+
if (hotReloadPackagePath) {
|
|
389
|
+
hotReloadUnderPath(hotReloadPackagePath);
|
|
390
|
+
}
|
|
391
|
+
}
|
|
392
|
+
|
|
393
|
+
return moduleImported;
|
|
394
|
+
}
|
|
395
|
+
|
|
396
|
+
|
|
397
|
+
|
|
398
|
+
|
|
399
|
+
// Hot reload at or under the path
|
|
400
|
+
const hotReloadUnderPath = cache((path: string) => {
|
|
401
|
+
console.log(magenta(`Hot reloading under path: ${path}`));
|
|
402
|
+
path = path.replace(/\\/g, "/");
|
|
403
|
+
for (let module of Object.values(require.cache)) {
|
|
404
|
+
if (!module) continue;
|
|
405
|
+
let modulePath = module.filename.replace(/\\/g, "/");
|
|
406
|
+
if (modulePath.startsWith(path)) {
|
|
407
|
+
hotReloadModule(module);
|
|
408
|
+
}
|
|
409
|
+
}
|
|
410
|
+
});
|
|
411
|
+
const hotReloadModule = cache((module: NodeJS.Module) => {
|
|
412
|
+
fs.watch(module.filename, () => {
|
|
413
|
+
logErrors(hotreloadIfChanged(module));
|
|
414
|
+
});
|
|
415
|
+
});
|
|
416
|
+
const hotreloadIfChanged = batchFunction({ delay: 100, name: "hotreloadIfChanged" }, async (modules: NodeJS.Module[]) => {
|
|
417
|
+
let changedModules = new Set<NodeJS.Module>();
|
|
418
|
+
for (let module of modules) {
|
|
419
|
+
let newContents = await fs.promises.readFile(module.filename, "utf8");
|
|
420
|
+
let newSHA256 = crypto.createHash("sha256").update(newContents).digest("hex");
|
|
421
|
+
if (newSHA256 !== module.sourceSHA256) {
|
|
422
|
+
changedModules.add(module);
|
|
423
|
+
}
|
|
424
|
+
}
|
|
425
|
+
console.log(magenta(`Hot reloading changed modules:`));
|
|
426
|
+
for (let module of changedModules) {
|
|
427
|
+
if (module.updateContents) {
|
|
428
|
+
let justContents = module.hotreload === false || isNode() && module.noserverhotreload;
|
|
429
|
+
console.log(magenta(` ${module.filename} ${justContents && "(no re-evaluate)" || ""}`));
|
|
430
|
+
setExternalHotReloading(true);
|
|
431
|
+
try {
|
|
432
|
+
try {
|
|
433
|
+
module.updateContents();
|
|
434
|
+
} catch (e) {
|
|
435
|
+
console.error(red(`Error when loading module contents ${module.filename}`));
|
|
436
|
+
console.error(e);
|
|
437
|
+
}
|
|
438
|
+
if (!justContents) {
|
|
439
|
+
module.loaded = false;
|
|
440
|
+
try {
|
|
441
|
+
module.load(module.id);
|
|
442
|
+
} catch (e) {
|
|
443
|
+
module.load(module.id);
|
|
444
|
+
console.error(red(`Error when hot reloading ${module.filename}`));
|
|
445
|
+
console.error(e);
|
|
446
|
+
}
|
|
447
|
+
}
|
|
448
|
+
} finally {
|
|
449
|
+
setExternalHotReloading(false);
|
|
450
|
+
}
|
|
451
|
+
} else {
|
|
452
|
+
console.log(red(` (skipping due to missing updateContents functions) ${module.filename}`));
|
|
453
|
+
}
|
|
454
|
+
}
|
|
455
|
+
|
|
456
|
+
for (let watcher of watchers) {
|
|
457
|
+
if (watcher.disposed) {
|
|
458
|
+
watcher.dispose();
|
|
459
|
+
} else {
|
|
460
|
+
watcher.explicitlyTrigger();
|
|
461
|
+
}
|
|
462
|
+
}
|
|
463
|
+
});
|
|
464
|
+
|
|
465
|
+
async function which(command: string): Promise<string> {
|
|
466
|
+
let whichOrWhere = os.platform() === "win32" ? "where" : "which";
|
|
467
|
+
let path = child_process.execSync(`${whichOrWhere} ${command}`).toString().trim().replaceAll("\r", "").split("\n")[0].trim().replaceAll("\\", "/");
|
|
468
|
+
if (!path) {
|
|
469
|
+
throw new Error(`Command ${command} not found`);
|
|
470
|
+
}
|
|
471
|
+
path = `"${path}"`;
|
|
472
|
+
return path;
|
|
473
|
+
}
|
|
474
|
+
|
|
475
|
+
async function executeCommand(command: string, args: string[], options?: {
|
|
476
|
+
cwd?: string;
|
|
477
|
+
}): Promise<string> {
|
|
478
|
+
let resolvedCommandPath = await which(command);
|
|
479
|
+
let debug = `${resolvedCommandPath} ${args.join(" ")}`;
|
|
480
|
+
if (options?.cwd) {
|
|
481
|
+
debug += ` (in ${options.cwd})`;
|
|
482
|
+
}
|
|
483
|
+
console.log(`Running command: ${debug}`);
|
|
484
|
+
return new Promise((resolve, reject) => {
|
|
485
|
+
const child = child_process.spawn(resolvedCommandPath, args, {
|
|
486
|
+
cwd: options?.cwd,
|
|
487
|
+
stdio: ["ignore", "pipe", "pipe"],
|
|
488
|
+
shell: true,
|
|
489
|
+
});
|
|
490
|
+
|
|
491
|
+
let stdout = "";
|
|
492
|
+
let stderr = "";
|
|
493
|
+
|
|
494
|
+
child.stdout.on("data", (data) => {
|
|
495
|
+
console.log(data.toString());
|
|
496
|
+
stdout += data.toString();
|
|
497
|
+
});
|
|
498
|
+
|
|
499
|
+
child.stderr.on("data", (data) => {
|
|
500
|
+
console.log(data.toString());
|
|
501
|
+
stderr += data.toString();
|
|
502
|
+
});
|
|
503
|
+
|
|
504
|
+
child.on("error", (error) => {
|
|
505
|
+
reject(new Error(`Failed to execute command ${debug}: ${error.message}`));
|
|
506
|
+
});
|
|
507
|
+
|
|
508
|
+
child.on("close", (code) => {
|
|
509
|
+
if (code !== 0) {
|
|
510
|
+
reject(new Error(`Command ${debug} failed with code ${code}.\nstdout: ${JSON.stringify(stdout)}\nstderr: ${JSON.stringify(stderr)}`));
|
|
511
|
+
return;
|
|
512
|
+
}
|
|
513
|
+
resolve(stdout.trim());
|
|
514
|
+
});
|
|
515
|
+
});
|
|
516
|
+
}
|