@ondc/automation-mock-runner 1.3.43 → 1.3.45
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/lib/MockRunner.d.ts +11 -2
- package/dist/lib/MockRunner.js +29 -2
- package/dist/lib/configHelper.js +21 -64
- package/dist/lib/constants/function-registry.js +1 -1
- package/dist/lib/helpers/default-helpers-source.d.ts +1 -0
- package/dist/lib/helpers/default-helpers-source.js +221 -0
- package/dist/lib/helpers/default-helpers.d.ts +83 -0
- package/dist/lib/helpers/default-helpers.js +200 -0
- package/dist/lib/helpers/default-helpers.test.d.ts +9 -0
- package/dist/lib/helpers/default-helpers.test.js +265 -0
- package/dist/lib/helpers/index.d.ts +1 -0
- package/dist/lib/helpers/index.js +10 -0
- package/dist/lib/runners/node-runner.d.ts +15 -6
- package/dist/lib/runners/node-runner.js +4 -0
- package/dist/lib/runners/runner-factory.d.ts +3 -2
- package/dist/lib/validators/code-validator.js +3 -1
- package/dist/test/GenerateFetchAndTimeout.test.d.ts +4 -0
- package/dist/test/GenerateFetchAndTimeout.test.js +240 -0
- package/package.json +5 -2
- package/public/node-worker.js +98 -6
|
@@ -0,0 +1,240 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* Tests for generate-only fetch allowlist + 45s sandbox setTimeout cap.
|
|
4
|
+
*/
|
|
5
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
6
|
+
if (k2 === undefined) k2 = k;
|
|
7
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
8
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
9
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
10
|
+
}
|
|
11
|
+
Object.defineProperty(o, k2, desc);
|
|
12
|
+
}) : (function(o, m, k, k2) {
|
|
13
|
+
if (k2 === undefined) k2 = k;
|
|
14
|
+
o[k2] = m[k];
|
|
15
|
+
}));
|
|
16
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
17
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
18
|
+
}) : function(o, v) {
|
|
19
|
+
o["default"] = v;
|
|
20
|
+
});
|
|
21
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
22
|
+
var ownKeys = function(o) {
|
|
23
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
24
|
+
var ar = [];
|
|
25
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
26
|
+
return ar;
|
|
27
|
+
};
|
|
28
|
+
return ownKeys(o);
|
|
29
|
+
};
|
|
30
|
+
return function (mod) {
|
|
31
|
+
if (mod && mod.__esModule) return mod;
|
|
32
|
+
var result = {};
|
|
33
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
34
|
+
__setModuleDefault(result, mod);
|
|
35
|
+
return result;
|
|
36
|
+
};
|
|
37
|
+
})();
|
|
38
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
39
|
+
const http = __importStar(require("http"));
|
|
40
|
+
const MockRunner_1 = require("../lib/MockRunner");
|
|
41
|
+
const function_registry_1 = require("../lib/constants/function-registry");
|
|
42
|
+
function baseConfig() {
|
|
43
|
+
return {
|
|
44
|
+
meta: { domain: "ONDC:TRV14", version: "2.0.0", flowId: "fetch-test" },
|
|
45
|
+
transaction_data: {
|
|
46
|
+
transaction_id: "e9e0b5cb-3f15-48a1-9d86-d4d643f0909d",
|
|
47
|
+
latest_timestamp: "1970-01-01T00:00:00.000Z",
|
|
48
|
+
},
|
|
49
|
+
steps: [],
|
|
50
|
+
transaction_history: [],
|
|
51
|
+
validationLib: "",
|
|
52
|
+
helperLib: "",
|
|
53
|
+
};
|
|
54
|
+
}
|
|
55
|
+
function stepWithGenerate(runner, actionId, genSource) {
|
|
56
|
+
const step = runner.getDefaultStep("search", actionId);
|
|
57
|
+
step.mock.generate = MockRunner_1.MockRunner.encodeBase64(genSource);
|
|
58
|
+
step.mock.inputs = {};
|
|
59
|
+
return step;
|
|
60
|
+
}
|
|
61
|
+
async function resetSharedRunner() {
|
|
62
|
+
const sr = MockRunner_1.MockRunner.sharedRunner;
|
|
63
|
+
if (sr?.terminate) {
|
|
64
|
+
await sr.terminate();
|
|
65
|
+
}
|
|
66
|
+
MockRunner_1.MockRunner.sharedRunner = undefined;
|
|
67
|
+
}
|
|
68
|
+
describe("generate timeout = 45s", () => {
|
|
69
|
+
afterEach(resetSharedRunner);
|
|
70
|
+
it("schema advertises a 45s timeout for generate", () => {
|
|
71
|
+
expect(function_registry_1.FUNCTION_REGISTRY.generate.timeout).toBe(45 * 1000);
|
|
72
|
+
expect((0, function_registry_1.getFunctionSchema)("generate").timeout).toBe(45 * 1000);
|
|
73
|
+
});
|
|
74
|
+
it("other function kinds keep their tighter timeouts", () => {
|
|
75
|
+
expect(function_registry_1.FUNCTION_REGISTRY.validate.timeout).toBe(5000);
|
|
76
|
+
expect(function_registry_1.FUNCTION_REGISTRY.meetsRequirements.timeout).toBe(3000);
|
|
77
|
+
});
|
|
78
|
+
it("sandbox setTimeout accepts delays up to 45000ms (was capped at 35000)", async () => {
|
|
79
|
+
const cfg = baseConfig();
|
|
80
|
+
const r = new MockRunner_1.MockRunner(cfg);
|
|
81
|
+
r.getConfig().steps.push(stepWithGenerate(r, "delayed", `async function generate(defaultPayload, sessionData) {
|
|
82
|
+
// Just exercise the clamp — schedule but don't await.
|
|
83
|
+
setTimeout(() => {}, 40000);
|
|
84
|
+
return { scheduled: true };
|
|
85
|
+
}`));
|
|
86
|
+
const res = await r.runGeneratePayload("delayed", {});
|
|
87
|
+
expect(res.success).toBe(true);
|
|
88
|
+
expect(res.result).toEqual({ scheduled: true });
|
|
89
|
+
});
|
|
90
|
+
it("sandbox setTimeout still rejects delays above 45000ms", async () => {
|
|
91
|
+
const cfg = baseConfig();
|
|
92
|
+
const r = new MockRunner_1.MockRunner(cfg);
|
|
93
|
+
r.getConfig().steps.push(stepWithGenerate(r, "too-long", `async function generate(defaultPayload, sessionData) {
|
|
94
|
+
setTimeout(() => {}, 60000);
|
|
95
|
+
return {};
|
|
96
|
+
}`));
|
|
97
|
+
const res = await r.runGeneratePayload("too-long", {});
|
|
98
|
+
expect(res.success).toBe(false);
|
|
99
|
+
expect(res.error?.message || "").toMatch(/1-45000ms/);
|
|
100
|
+
});
|
|
101
|
+
});
|
|
102
|
+
describe("fetch allowlist (generate-only)", () => {
|
|
103
|
+
let server;
|
|
104
|
+
let baseUrl;
|
|
105
|
+
beforeAll(async () => {
|
|
106
|
+
server = http.createServer((req, res) => {
|
|
107
|
+
switch (req.url) {
|
|
108
|
+
case "/v1/ping":
|
|
109
|
+
res.writeHead(200, { "content-type": "text/plain" });
|
|
110
|
+
res.end("pong");
|
|
111
|
+
return;
|
|
112
|
+
case "/v1/redir":
|
|
113
|
+
res.writeHead(302, { location: "http://127.0.0.1:1/nope" });
|
|
114
|
+
res.end();
|
|
115
|
+
return;
|
|
116
|
+
case "/v10/foo":
|
|
117
|
+
res.writeHead(200);
|
|
118
|
+
res.end("v10");
|
|
119
|
+
return;
|
|
120
|
+
case "/other":
|
|
121
|
+
res.writeHead(200);
|
|
122
|
+
res.end("other");
|
|
123
|
+
return;
|
|
124
|
+
default:
|
|
125
|
+
res.writeHead(404);
|
|
126
|
+
res.end();
|
|
127
|
+
}
|
|
128
|
+
});
|
|
129
|
+
await new Promise((r) => server.listen(0, "127.0.0.1", () => r()));
|
|
130
|
+
const addr = server.address();
|
|
131
|
+
baseUrl = `http://127.0.0.1:${addr.port}`;
|
|
132
|
+
});
|
|
133
|
+
afterAll(async () => {
|
|
134
|
+
await new Promise((r) => server.close(() => r()));
|
|
135
|
+
});
|
|
136
|
+
afterEach(resetSharedRunner);
|
|
137
|
+
it("fetch is undefined when allowlist is empty/unset", async () => {
|
|
138
|
+
// No initSharedRunner → default runner has no allowlist.
|
|
139
|
+
const cfg = baseConfig();
|
|
140
|
+
const r = new MockRunner_1.MockRunner(cfg);
|
|
141
|
+
r.getConfig().steps.push(stepWithGenerate(r, "fetch-undef", `async function generate(defaultPayload, sessionData) {
|
|
142
|
+
return { t: typeof fetch };
|
|
143
|
+
}`));
|
|
144
|
+
const res = await r.runGeneratePayload("fetch-undef", {});
|
|
145
|
+
expect(res.success).toBe(true);
|
|
146
|
+
expect(res.result).toEqual({ t: "undefined" });
|
|
147
|
+
});
|
|
148
|
+
it("allows fetch matching origin + path prefix", async () => {
|
|
149
|
+
MockRunner_1.MockRunner.initSharedRunner({
|
|
150
|
+
allowedFetchBaseUrls: [`${baseUrl}/v1`],
|
|
151
|
+
});
|
|
152
|
+
const cfg = baseConfig();
|
|
153
|
+
const r = new MockRunner_1.MockRunner(cfg);
|
|
154
|
+
r.getConfig().steps.push(stepWithGenerate(r, "ok", `async function generate(defaultPayload, sessionData) {
|
|
155
|
+
const res = await fetch("${baseUrl}/v1/ping");
|
|
156
|
+
const body = await res.text();
|
|
157
|
+
return { body };
|
|
158
|
+
}`));
|
|
159
|
+
const res = await r.runGeneratePayload("ok", {});
|
|
160
|
+
expect(res.success).toBe(true);
|
|
161
|
+
expect(res.result).toEqual({ body: "pong" });
|
|
162
|
+
});
|
|
163
|
+
it("rejects URLs with non-allowlisted path prefix", async () => {
|
|
164
|
+
MockRunner_1.MockRunner.initSharedRunner({
|
|
165
|
+
allowedFetchBaseUrls: [`${baseUrl}/v1`],
|
|
166
|
+
});
|
|
167
|
+
const cfg = baseConfig();
|
|
168
|
+
const r = new MockRunner_1.MockRunner(cfg);
|
|
169
|
+
r.getConfig().steps.push(stepWithGenerate(r, "bad-path", `async function generate(defaultPayload, sessionData) {
|
|
170
|
+
const res = await fetch("${baseUrl}/other");
|
|
171
|
+
return { body: await res.text() };
|
|
172
|
+
}`));
|
|
173
|
+
const res = await r.runGeneratePayload("bad-path", {});
|
|
174
|
+
expect(res.success).toBe(false);
|
|
175
|
+
expect(res.error?.message || "").toMatch(/fetch blocked/);
|
|
176
|
+
});
|
|
177
|
+
it("treats /v1 as a strict segment prefix (no /v10/* match)", async () => {
|
|
178
|
+
MockRunner_1.MockRunner.initSharedRunner({
|
|
179
|
+
allowedFetchBaseUrls: [`${baseUrl}/v1`],
|
|
180
|
+
});
|
|
181
|
+
const cfg = baseConfig();
|
|
182
|
+
const r = new MockRunner_1.MockRunner(cfg);
|
|
183
|
+
r.getConfig().steps.push(stepWithGenerate(r, "prefix-strict", `async function generate(defaultPayload, sessionData) {
|
|
184
|
+
const res = await fetch("${baseUrl}/v10/foo");
|
|
185
|
+
return { body: await res.text() };
|
|
186
|
+
}`));
|
|
187
|
+
const res = await r.runGeneratePayload("prefix-strict", {});
|
|
188
|
+
expect(res.success).toBe(false);
|
|
189
|
+
expect(res.error?.message || "").toMatch(/fetch blocked/);
|
|
190
|
+
});
|
|
191
|
+
it("rejects different origin even with matching path", async () => {
|
|
192
|
+
MockRunner_1.MockRunner.initSharedRunner({
|
|
193
|
+
allowedFetchBaseUrls: [`${baseUrl}/v1`],
|
|
194
|
+
});
|
|
195
|
+
const cfg = baseConfig();
|
|
196
|
+
const r = new MockRunner_1.MockRunner(cfg);
|
|
197
|
+
r.getConfig().steps.push(stepWithGenerate(r, "bad-origin", `async function generate(defaultPayload, sessionData) {
|
|
198
|
+
const res = await fetch("http://example.invalid/v1/ping");
|
|
199
|
+
return { body: await res.text() };
|
|
200
|
+
}`));
|
|
201
|
+
const res = await r.runGeneratePayload("bad-origin", {});
|
|
202
|
+
expect(res.success).toBe(false);
|
|
203
|
+
expect(res.error?.message || "").toMatch(/fetch blocked/);
|
|
204
|
+
});
|
|
205
|
+
it("blocks 3xx redirects (no allowlist bypass via Location header)", async () => {
|
|
206
|
+
MockRunner_1.MockRunner.initSharedRunner({
|
|
207
|
+
allowedFetchBaseUrls: [`${baseUrl}/v1`],
|
|
208
|
+
});
|
|
209
|
+
const cfg = baseConfig();
|
|
210
|
+
const r = new MockRunner_1.MockRunner(cfg);
|
|
211
|
+
r.getConfig().steps.push(stepWithGenerate(r, "redir", `async function generate(defaultPayload, sessionData) {
|
|
212
|
+
await fetch("${baseUrl}/v1/redir");
|
|
213
|
+
return { ok: true };
|
|
214
|
+
}`));
|
|
215
|
+
const res = await r.runGeneratePayload("redir", {});
|
|
216
|
+
expect(res.success).toBe(false);
|
|
217
|
+
// undici surfaces a TypeError when redirect:'error' encounters a 3xx
|
|
218
|
+
expect((res.error?.message || "").toLowerCase()).toMatch(/redirect|fetch failed/);
|
|
219
|
+
});
|
|
220
|
+
it("fetch is not injected into validate even when allowlist is configured", async () => {
|
|
221
|
+
MockRunner_1.MockRunner.initSharedRunner({
|
|
222
|
+
allowedFetchBaseUrls: [`${baseUrl}/v1`],
|
|
223
|
+
});
|
|
224
|
+
const cfg = baseConfig();
|
|
225
|
+
const r = new MockRunner_1.MockRunner(cfg);
|
|
226
|
+
const step = r.getDefaultStep("search", "validate-pure");
|
|
227
|
+
step.mock.inputs = {};
|
|
228
|
+
step.mock.validate = MockRunner_1.MockRunner.encodeBase64(`function validate(targetPayload, sessionData) {
|
|
229
|
+
return {
|
|
230
|
+
valid: typeof fetch === "undefined",
|
|
231
|
+
code: 200,
|
|
232
|
+
description: "fetch-absence probe",
|
|
233
|
+
};
|
|
234
|
+
}`);
|
|
235
|
+
r.getConfig().steps.push(step);
|
|
236
|
+
const res = await r.runValidatePayload("validate-pure", {});
|
|
237
|
+
expect(res.success).toBe(true);
|
|
238
|
+
expect(res.result?.valid).toBe(true);
|
|
239
|
+
});
|
|
240
|
+
});
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@ondc/automation-mock-runner",
|
|
3
|
-
"version": "1.3.
|
|
3
|
+
"version": "1.3.45",
|
|
4
4
|
"description": "A TypeScript library for ONDC automation mock runner",
|
|
5
5
|
"main": "dist/index.js",
|
|
6
6
|
"types": "dist/index.d.ts",
|
|
@@ -11,6 +11,8 @@
|
|
|
11
11
|
"public"
|
|
12
12
|
],
|
|
13
13
|
"scripts": {
|
|
14
|
+
"helpers:gen": "node scripts/generate-helpers-source.js",
|
|
15
|
+
"prebuild": "npm run helpers:gen",
|
|
14
16
|
"build": "npm run clean && tsc",
|
|
15
17
|
"build:watch": "tsc --watch",
|
|
16
18
|
"start": "node dist/index.js",
|
|
@@ -18,6 +20,7 @@
|
|
|
18
20
|
"clean": "rm -rf dist coverage browser-dist",
|
|
19
21
|
"prepare": "npm run clean && npm run build",
|
|
20
22
|
"prepublishOnly": "npm run test && npm run build",
|
|
23
|
+
"pretest": "npm run helpers:gen",
|
|
21
24
|
"test": "jest",
|
|
22
25
|
"test:watch": "jest --watch",
|
|
23
26
|
"test:coverage": "jest --coverage",
|
|
@@ -45,7 +48,7 @@
|
|
|
45
48
|
"author": "ONDC Development Team <dev@ondc.org>",
|
|
46
49
|
"license": "ISC",
|
|
47
50
|
"engines": {
|
|
48
|
-
"node": ">=
|
|
51
|
+
"node": ">=18.0.0"
|
|
49
52
|
},
|
|
50
53
|
"repository": {
|
|
51
54
|
"type": "git",
|
package/public/node-worker.js
CHANGED
|
@@ -1,8 +1,73 @@
|
|
|
1
|
-
const { parentPort } = require("worker_threads");
|
|
1
|
+
const { parentPort, workerData } = require("worker_threads");
|
|
2
2
|
const vm = require("vm");
|
|
3
3
|
|
|
4
|
+
const ALLOWED_FETCH_BASE_URLS = Array.isArray(workerData?.allowedFetchBaseUrls)
|
|
5
|
+
? workerData.allowedFetchBaseUrls
|
|
6
|
+
: [];
|
|
7
|
+
|
|
8
|
+
// Parse + normalize allowlist entries once per worker.
|
|
9
|
+
// Each entry contributes { origin, pathname } where pathname has no trailing
|
|
10
|
+
// slash; matching requires request.origin === entry.origin AND the request
|
|
11
|
+
// pathname is a strict segment-prefix of entry.pathname (so `/v1` matches
|
|
12
|
+
// `/v1` and `/v1/foo` but NOT `/v10/foo`).
|
|
13
|
+
const PARSED_ALLOWLIST = ALLOWED_FETCH_BASE_URLS.map((raw) => {
|
|
14
|
+
try {
|
|
15
|
+
const u = new URL(raw);
|
|
16
|
+
let pathname = u.pathname;
|
|
17
|
+
if (pathname.endsWith("/") && pathname !== "/") {
|
|
18
|
+
pathname = pathname.slice(0, -1);
|
|
19
|
+
}
|
|
20
|
+
return { origin: u.origin, pathname };
|
|
21
|
+
} catch {
|
|
22
|
+
return null;
|
|
23
|
+
}
|
|
24
|
+
}).filter(Boolean);
|
|
25
|
+
|
|
26
|
+
function isFetchAllowed(requestUrl) {
|
|
27
|
+
let parsed;
|
|
28
|
+
try {
|
|
29
|
+
parsed = new URL(requestUrl);
|
|
30
|
+
} catch {
|
|
31
|
+
return false;
|
|
32
|
+
}
|
|
33
|
+
let reqPath = parsed.pathname;
|
|
34
|
+
if (reqPath.endsWith("/") && reqPath !== "/") {
|
|
35
|
+
reqPath = reqPath.slice(0, -1);
|
|
36
|
+
}
|
|
37
|
+
for (const entry of PARSED_ALLOWLIST) {
|
|
38
|
+
if (parsed.origin !== entry.origin) continue;
|
|
39
|
+
if (entry.pathname === "" || entry.pathname === "/") return true;
|
|
40
|
+
if (reqPath === entry.pathname) return true;
|
|
41
|
+
if (reqPath.startsWith(entry.pathname + "/")) return true;
|
|
42
|
+
}
|
|
43
|
+
return false;
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
function makeScopedFetch() {
|
|
47
|
+
if (typeof globalThis.fetch !== "function") {
|
|
48
|
+
return undefined;
|
|
49
|
+
}
|
|
50
|
+
if (PARSED_ALLOWLIST.length === 0) {
|
|
51
|
+
return undefined;
|
|
52
|
+
}
|
|
53
|
+
return async function scopedFetch(input, init) {
|
|
54
|
+
const requestUrl =
|
|
55
|
+
typeof input === "string"
|
|
56
|
+
? input
|
|
57
|
+
: input && typeof input.url === "string"
|
|
58
|
+
? input.url
|
|
59
|
+
: String(input);
|
|
60
|
+
if (!isFetchAllowed(requestUrl)) {
|
|
61
|
+
throw new Error(
|
|
62
|
+
`fetch blocked: ${requestUrl} is not in the configured allowlist`,
|
|
63
|
+
);
|
|
64
|
+
}
|
|
65
|
+
return globalThis.fetch(input, { ...(init || {}), redirect: "error" });
|
|
66
|
+
};
|
|
67
|
+
}
|
|
68
|
+
|
|
4
69
|
// Create a secure sandbox context
|
|
5
|
-
function createSandbox() {
|
|
70
|
+
function createSandbox(functionName) {
|
|
6
71
|
const logs = [];
|
|
7
72
|
|
|
8
73
|
// Safe console implementation that captures logs
|
|
@@ -108,12 +173,17 @@ function createSandbox() {
|
|
|
108
173
|
decodeURIComponent,
|
|
109
174
|
// Utility functions for ONDC operations
|
|
110
175
|
setTimeout: (fn, delay) => {
|
|
111
|
-
if (delay < 1 || delay >
|
|
112
|
-
throw new Error("Timeout must be between 1-
|
|
176
|
+
if (delay < 1 || delay > 45 * 1000) {
|
|
177
|
+
throw new Error("Timeout must be between 1-45000ms");
|
|
113
178
|
}
|
|
114
179
|
return setTimeout(fn, delay);
|
|
115
180
|
},
|
|
116
181
|
clearTimeout,
|
|
182
|
+
// AbortController is a pure control-flow primitive with no I/O of its
|
|
183
|
+
// own — safe to expose unconditionally. Needed by helpers that pair
|
|
184
|
+
// `fetch` with a timeout (see generateConsentHandler).
|
|
185
|
+
AbortController,
|
|
186
|
+
AbortSignal,
|
|
117
187
|
// Blocked globals
|
|
118
188
|
require: undefined,
|
|
119
189
|
process: undefined,
|
|
@@ -128,6 +198,28 @@ function createSandbox() {
|
|
|
128
198
|
Function: undefined,
|
|
129
199
|
};
|
|
130
200
|
|
|
201
|
+
// Only `generate` gets outbound HTTP — validate/meetsRequirements/getSave
|
|
202
|
+
// stay pure. Fetch itself is still gated by the allowlist inside the wrapper.
|
|
203
|
+
if (functionName === "generate") {
|
|
204
|
+
const scopedFetch = makeScopedFetch();
|
|
205
|
+
if (scopedFetch) {
|
|
206
|
+
sandbox.fetch = scopedFetch;
|
|
207
|
+
if (typeof globalThis.URL === "function") sandbox.URL = globalThis.URL;
|
|
208
|
+
if (typeof globalThis.URLSearchParams === "function") {
|
|
209
|
+
sandbox.URLSearchParams = globalThis.URLSearchParams;
|
|
210
|
+
}
|
|
211
|
+
if (typeof globalThis.Headers === "function") {
|
|
212
|
+
sandbox.Headers = globalThis.Headers;
|
|
213
|
+
}
|
|
214
|
+
if (typeof globalThis.Request === "function") {
|
|
215
|
+
sandbox.Request = globalThis.Request;
|
|
216
|
+
}
|
|
217
|
+
if (typeof globalThis.Response === "function") {
|
|
218
|
+
sandbox.Response = globalThis.Response;
|
|
219
|
+
}
|
|
220
|
+
}
|
|
221
|
+
}
|
|
222
|
+
|
|
131
223
|
return { sandbox, logs };
|
|
132
224
|
}
|
|
133
225
|
|
|
@@ -138,7 +230,7 @@ parentPort?.on("message", async (message) => {
|
|
|
138
230
|
|
|
139
231
|
try {
|
|
140
232
|
// Create fresh sandbox for each execution
|
|
141
|
-
const { sandbox, logs } = createSandbox();
|
|
233
|
+
const { sandbox, logs } = createSandbox(functionName);
|
|
142
234
|
|
|
143
235
|
// Create VM context with timeout
|
|
144
236
|
const context = vm.createContext(sandbox);
|
|
@@ -151,7 +243,7 @@ parentPort?.on("message", async (message) => {
|
|
|
151
243
|
|
|
152
244
|
// Execute the script
|
|
153
245
|
script.runInContext(context, {
|
|
154
|
-
timeout: timeout ||
|
|
246
|
+
timeout: timeout || 45000,
|
|
155
247
|
breakOnSigint: true,
|
|
156
248
|
});
|
|
157
249
|
|