@vercel/backends 0.0.26 → 0.0.28

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -1,22 +1,22 @@
1
1
  import { builtinModules, createRequire } from "node:module";
2
2
  import { delimiter, dirname, extname, join } from "path";
3
- import { FileFsRef, NodejsLambda, Span, debug, defaultCachePathGlob, download, execCommand, getEnvForPackageManager, getNodeBinPaths, getNodeVersion, getPackageJson, getScriptName, glob, isExperimentalBackendsWithoutIntrospectionEnabled, runNpmInstall, runPackageJsonScript, scanParentDirs } from "@vercel/build-utils";
3
+ import { FileBlob, FileFsRef, NodejsLambda, Span, debug, defaultCachePathGlob, download, execCommand, getEnvForPackageManager, getNodeBinPaths, getNodeVersion, glob, isBackendFramework, isExperimentalBackendsWithoutIntrospectionEnabled, runNpmInstall, runPackageJsonScript, scanParentDirs } from "@vercel/build-utils";
4
+ import { createWriteStream, existsSync, mkdirSync, mkdtempSync, readFileSync, rmSync, unlinkSync, writeFileSync } from "node:fs";
5
+ import { lstat, readFile, rm } from "node:fs/promises";
4
6
  import { dirname as dirname$1, extname as extname$1, isAbsolute, join as join$1, relative } from "node:path";
5
- import { createWriteStream, existsSync, mkdtempSync, readFileSync, rmSync, unlinkSync } from "node:fs";
6
- import { spawn } from "node:child_process";
7
- import { tmpdir } from "node:os";
8
- import { z } from "zod";
9
- import { lstat, readFile, rm, writeFile } from "node:fs/promises";
10
7
  import { build as build$2 } from "rolldown";
11
8
  import { exports } from "resolve.exports";
12
9
  import { isNativeError } from "node:util/types";
13
10
  import { nodeFileTrace as nodeFileTrace$1, resolve } from "@vercel/nft";
14
11
  import { transform } from "oxc-transform";
15
12
  import { createRequire as createRequire$1 } from "module";
16
- import { spawn as spawn$1 } from "child_process";
13
+ import { spawn } from "child_process";
17
14
  import { existsSync as existsSync$1 } from "fs";
18
15
  import execa from "execa";
19
- import { readFile as readFile$1, writeFile as writeFile$1 } from "fs/promises";
16
+ import { readFile as readFile$1, writeFile } from "fs/promises";
17
+ import { spawn as spawn$1 } from "node:child_process";
18
+ import { tmpdir } from "node:os";
19
+ import { z } from "zod";
20
20
 
21
21
  //#region src/utils.ts
22
22
  async function downloadInstallAndBundle(args) {
@@ -64,200 +64,9 @@ async function maybeExecBuildCommand(args, { spawnEnv, entrypointFsDirname }) {
64
64
  return runPackageJsonScript(entrypointFsDirname, ["build"], { env: spawnEnv }, args.config.projectSettings?.createdAt);
65
65
  }
66
66
 
67
- //#endregion
68
- //#region src/introspection/util.ts
69
- const BEGIN_INTROSPECTION_RESULT = "\n__VERCEL_INTROSPECTION_BEGIN__\n";
70
- const END_INTROSPECTION_RESULT = "\n__VERCEL_INTROSPECTION_END__\n";
71
-
72
- //#endregion
73
- //#region src/introspection/index.ts
74
- const require$1 = createRequire(import.meta.url);
75
- const introspectApp = async (args) => {
76
- const { span } = args;
77
- const introspectionSpan = span.child("vc.builder.backends.introspection");
78
- if (isExperimentalBackendsWithoutIntrospectionEnabled()) return defaultResult(args);
79
- const cjsLoaderPath = require$1.resolve("@vercel/backends/introspection/loaders/cjs");
80
- const rolldownEsmLoaderPath = `file://${require$1.resolve("@vercel/backends/introspection/loaders/rolldown-esm")}`;
81
- const handlerPath = join$1(args.dir, args.handler);
82
- const introspectionSchema = z.object({
83
- frameworkSlug: z.string().optional(),
84
- routes: z.array(z.object({
85
- src: z.string(),
86
- dest: z.string(),
87
- methods: z.array(z.string())
88
- })),
89
- additionalFolders: z.array(z.string()).optional().transform((values) => {
90
- return values?.map((val) => {
91
- if (isAbsolute(val)) return relative(args.dir, val);
92
- return val;
93
- });
94
- }),
95
- additionalDeps: z.array(z.string()).optional()
96
- });
97
- let introspectionData;
98
- await new Promise((resolvePromise) => {
99
- try {
100
- debug("Spawning introspection process");
101
- const child = spawn("node", [
102
- "-r",
103
- cjsLoaderPath,
104
- "--import",
105
- rolldownEsmLoaderPath,
106
- handlerPath
107
- ], {
108
- stdio: [
109
- "pipe",
110
- "pipe",
111
- "pipe"
112
- ],
113
- cwd: args.dir,
114
- env: {
115
- ...process.env,
116
- ...args.env
117
- }
118
- });
119
- const tempDir = mkdtempSync(join$1(tmpdir(), "introspection-"));
120
- const tempFilePath = join$1(tempDir, "output.txt");
121
- const writeStream = createWriteStream(tempFilePath);
122
- let streamClosed = false;
123
- child.stdout?.pipe(writeStream);
124
- let stderrBuffer = "";
125
- child.stderr?.on("data", (data) => {
126
- stderrBuffer += data.toString();
127
- });
128
- writeStream.on("error", (err) => {
129
- debug(`Write stream error: ${err.message}`);
130
- });
131
- const timeout = setTimeout(() => {
132
- debug("Introspection timeout, killing process with SIGTERM");
133
- child.kill("SIGTERM");
134
- }, 8e3);
135
- const timeout2 = setTimeout(() => {
136
- debug("Introspection timeout, killing process with SIGKILL");
137
- child.kill("SIGKILL");
138
- }, 9e3);
139
- child.on("error", (err) => {
140
- clearTimeout(timeout);
141
- clearTimeout(timeout2);
142
- debug(`Loader error: ${err.message}`);
143
- if (!streamClosed) writeStream.end(() => {
144
- streamClosed = true;
145
- try {
146
- unlinkSync(tempFilePath);
147
- } catch (cleanupErr) {
148
- debug(`Error deleting temp file on error: ${cleanupErr}`);
149
- }
150
- resolvePromise(void 0);
151
- });
152
- else resolvePromise(void 0);
153
- });
154
- child.on("close", () => {
155
- clearTimeout(timeout);
156
- clearTimeout(timeout2);
157
- debug("Introspection process closed");
158
- if (!streamClosed) writeStream.end(() => {
159
- streamClosed = true;
160
- let stdoutBuffer;
161
- try {
162
- stdoutBuffer = readFileSync(tempFilePath, "utf8");
163
- const beginIndex = stdoutBuffer.indexOf(BEGIN_INTROSPECTION_RESULT);
164
- const endIndex = stdoutBuffer.indexOf(END_INTROSPECTION_RESULT);
165
- if (beginIndex !== -1 && endIndex !== -1) {
166
- const introspectionString = stdoutBuffer.substring(beginIndex + BEGIN_INTROSPECTION_RESULT.length, endIndex);
167
- if (introspectionString) {
168
- introspectionData = introspectionSchema.parse(JSON.parse(introspectionString));
169
- debug("Introspection data parsed successfully");
170
- }
171
- } else debug(`Introspection markers not found.\nstdout:\n${stdoutBuffer}\nstderr:\n${stderrBuffer}`);
172
- } catch (error) {
173
- debug(`Error parsing introspection data: ${error}\nstdout:\n${stdoutBuffer}\nstderr:\n${stderrBuffer}`);
174
- } finally {
175
- try {
176
- rmSync(tempDir, {
177
- recursive: true,
178
- force: true
179
- });
180
- } catch (err) {
181
- debug(`Error deleting temp directory: ${err}`);
182
- }
183
- resolvePromise(void 0);
184
- }
185
- });
186
- else resolvePromise(void 0);
187
- });
188
- } catch (error) {
189
- debug("Introspection error", error);
190
- resolvePromise(void 0);
191
- }
192
- });
193
- const framework = getFramework(args);
194
- if (!introspectionData) {
195
- introspectionSpan.setAttributes({
196
- "introspection.success": "false",
197
- "introspection.routes": "0"
198
- });
199
- return defaultResult(args);
200
- }
201
- const routes = [
202
- { handle: "filesystem" },
203
- ...introspectionData.routes,
204
- {
205
- src: "/(.*)",
206
- dest: "/"
207
- }
208
- ];
209
- introspectionSpan.setAttributes({
210
- "introspection.success": "true",
211
- "introspection.routes": String(introspectionData.routes.length),
212
- "introspection.framework": introspectionData.frameworkSlug ?? ""
213
- });
214
- return {
215
- routes,
216
- framework,
217
- additionalFolders: introspectionData.additionalFolders ?? [],
218
- additionalDeps: introspectionData.additionalDeps ?? []
219
- };
220
- };
221
- const defaultResult = (args) => {
222
- return {
223
- routes: [{ handle: "filesystem" }, {
224
- src: "/(.*)",
225
- dest: "/"
226
- }],
227
- framework: getFramework(args)
228
- };
229
- };
230
- const getFramework = (args) => {
231
- try {
232
- let version$1;
233
- if (args.framework) {
234
- const frameworkLibPath = require$1.resolve(`${args.framework}`, { paths: [args.dir] });
235
- const findNearestPackageJson = (dir) => {
236
- const packageJsonPath = join$1(dir, "package.json");
237
- if (existsSync(packageJsonPath)) return packageJsonPath;
238
- const parentDir = dirname$1(dir);
239
- if (parentDir === dir) return;
240
- return findNearestPackageJson(parentDir);
241
- };
242
- const nearestPackageJsonPath = findNearestPackageJson(frameworkLibPath);
243
- if (nearestPackageJsonPath) version$1 = require$1(nearestPackageJsonPath).version;
244
- }
245
- return {
246
- slug: args.framework ?? "",
247
- version: version$1 ?? ""
248
- };
249
- } catch (error) {
250
- debug(`Error getting framework for ${args.framework}. Setting framework version to empty string.`, error);
251
- return {
252
- slug: args.framework ?? "",
253
- version: ""
254
- };
255
- }
256
- };
257
-
258
67
  //#endregion
259
68
  //#region src/cervel/plugin.ts
260
- const CJS_SHIM_PREFIX = "\0cjs-shim:";
69
+ const CJS_SHIM_PREFIX$1 = "\0cjs-shim:";
261
70
  const plugin = (args) => {
262
71
  const packageJsonCache = /* @__PURE__ */ new Map();
263
72
  const shimMeta = /* @__PURE__ */ new Map();
@@ -268,7 +77,7 @@ const plugin = (args) => {
268
77
  /**
269
78
  * Read and cache package.json contents
270
79
  */
271
- const getPackageJson$1 = async (pkgPath) => {
80
+ const getPackageJson = async (pkgPath) => {
272
81
  if (packageJsonCache.has(pkgPath)) return packageJsonCache.get(pkgPath);
273
82
  try {
274
83
  const contents = await readFile(pkgPath, "utf-8");
@@ -290,7 +99,7 @@ const plugin = (args) => {
290
99
  if (ext === ".js" || ext === ".ts") {
291
100
  const pkgJsonPath = resolvedInfo.packageJsonPath;
292
101
  if (!pkgJsonPath) return true;
293
- const pkgJson = await getPackageJson$1(pkgJsonPath);
102
+ const pkgJson = await getPackageJson(pkgJsonPath);
294
103
  if (!pkgJson) return true;
295
104
  const pkgDir = dirname$1(pkgJsonPath);
296
105
  const relativePath = resolvedPath.startsWith(pkgDir) ? resolvedPath.slice(pkgDir.length + 1).replace(/\\/g, "/") : null;
@@ -324,7 +133,7 @@ const plugin = (args) => {
324
133
  resolveId: {
325
134
  order: "pre",
326
135
  async handler(id, importer, rOpts) {
327
- if (id.startsWith(CJS_SHIM_PREFIX)) return {
136
+ if (id.startsWith(CJS_SHIM_PREFIX$1)) return {
328
137
  id,
329
138
  external: false
330
139
  };
@@ -334,7 +143,7 @@ const plugin = (args) => {
334
143
  external: true
335
144
  };
336
145
  if (resolved?.id && isLocalImport(resolved.id)) tracedPaths.add(resolved.id);
337
- if (importer?.startsWith(CJS_SHIM_PREFIX) && isBareImport(id)) return {
146
+ if (importer?.startsWith(CJS_SHIM_PREFIX$1) && isBareImport(id)) return {
338
147
  id,
339
148
  external: true
340
149
  };
@@ -344,7 +153,7 @@ const plugin = (args) => {
344
153
  const importerPkgJsonPath = (await this.resolve(importer))?.packageJsonPath;
345
154
  if (importerPkgJsonPath) {
346
155
  const importerPkgDir = relative(args.repoRootPath, dirname$1(importerPkgJsonPath));
347
- const shimId$1 = `${CJS_SHIM_PREFIX}${importerPkgDir.replace(/\//g, "_")}_${id.replace(/\//g, "_")}`;
156
+ const shimId$1 = `${CJS_SHIM_PREFIX$1}${importerPkgDir.replace(/\//g, "_")}_${id.replace(/\//g, "_")}`;
348
157
  shimMeta.set(shimId$1, {
349
158
  pkgDir: importerPkgDir,
350
159
  pkgName: id
@@ -354,7 +163,7 @@ const plugin = (args) => {
354
163
  external: false
355
164
  };
356
165
  }
357
- const shimId = `${CJS_SHIM_PREFIX}${id.replace(/\//g, "_")}`;
166
+ const shimId = `${CJS_SHIM_PREFIX$1}${id.replace(/\//g, "_")}`;
358
167
  shimMeta.set(shimId, {
359
168
  pkgDir: "",
360
169
  pkgName: id
@@ -379,7 +188,7 @@ const plugin = (args) => {
379
188
  }
380
189
  },
381
190
  load: { async handler(id) {
382
- if (id.startsWith(CJS_SHIM_PREFIX)) {
191
+ if (id.startsWith(CJS_SHIM_PREFIX$1)) {
383
192
  const meta = shimMeta.get(id);
384
193
  if (!meta) return { code: `module.exports = require('${id.slice(10)}');` };
385
194
  const { pkgDir, pkgName } = meta;
@@ -464,7 +273,7 @@ var require = typeof require !== 'undefined' ? require : __createRequire(import.
464
273
  var __filename = typeof __filename !== 'undefined' ? __filename : __fileURLToPath(import.meta.url);
465
274
  var __dirname = typeof __dirname !== 'undefined' ? __dirname : __dirname_(__filename);
466
275
  `.trim();
467
- const rolldown = async (args) => {
276
+ const rolldown$1 = async (args) => {
468
277
  const entrypointPath = join$1(args.workPath, args.entrypoint);
469
278
  const outputDir = join$1(args.workPath, args.out);
470
279
  const extension = extname$1(args.entrypoint);
@@ -599,8 +408,8 @@ const Colors = {
599
408
 
600
409
  //#endregion
601
410
  //#region src/cervel/typescript.ts
602
- const require_ = createRequire$1(import.meta.url);
603
- const typescript = (args) => {
411
+ const require_$1 = createRequire$1(import.meta.url);
412
+ const typescript$1 = (args) => {
604
413
  const { span } = args;
605
414
  return span.child("vc.builder.backends.tsCompile").trace(async () => {
606
415
  const extension = extname(args.entrypoint);
@@ -609,15 +418,15 @@ const typescript = (args) => {
609
418
  ".mts",
610
419
  ".cts"
611
420
  ].includes(extension)) return;
612
- const tscPath = resolveTscPath(args);
421
+ const tscPath = resolveTscPath$1(args);
613
422
  if (!tscPath) {
614
423
  console.log(Colors.gray(`${Colors.bold(Colors.cyan("✓"))} Typecheck skipped ${Colors.gray("(TypeScript not found)")}`));
615
424
  return null;
616
425
  }
617
- return doTypeCheck(args, tscPath);
426
+ return doTypeCheck$1(args, tscPath);
618
427
  });
619
428
  };
620
- async function doTypeCheck(args, tscPath) {
429
+ async function doTypeCheck$1(args, tscPath) {
621
430
  let stdout = "";
622
431
  let stderr = "";
623
432
  /**
@@ -634,10 +443,10 @@ async function doTypeCheck(args, tscPath) {
634
443
  "--esModuleInterop",
635
444
  "--skipLibCheck"
636
445
  ];
637
- const tsconfig = await findNearestTsconfig(args.workPath);
446
+ const tsconfig = await findNearestTsconfig$1(args.workPath);
638
447
  if (tsconfig) tscArgs.push("--project", tsconfig);
639
448
  else tscArgs.push(args.entrypoint);
640
- const child = spawn$1(process.execPath, tscArgs, {
449
+ const child = spawn(process.execPath, tscArgs, {
641
450
  cwd: args.workPath,
642
451
  stdio: [
643
452
  "ignore",
@@ -670,22 +479,22 @@ async function doTypeCheck(args, tscPath) {
670
479
  });
671
480
  });
672
481
  }
673
- const resolveTscPath = (args) => {
482
+ const resolveTscPath$1 = (args) => {
674
483
  try {
675
- return require_.resolve("typescript/bin/tsc", { paths: [args.workPath] });
484
+ return require_$1.resolve("typescript/bin/tsc", { paths: [args.workPath] });
676
485
  } catch (e) {
677
486
  return null;
678
487
  }
679
488
  };
680
- const findNearestTsconfig = async (workPath) => {
489
+ const findNearestTsconfig$1 = async (workPath) => {
681
490
  const tsconfigPath = join(workPath, "tsconfig.json");
682
491
  if (existsSync$1(tsconfigPath)) return tsconfigPath;
683
492
  if (workPath === "/") return;
684
- return findNearestTsconfig(join(workPath, ".."));
493
+ return findNearestTsconfig$1(join(workPath, ".."));
685
494
  };
686
495
 
687
496
  //#endregion
688
- //#region src/cervel/find-entrypoint.ts
497
+ //#region src/find-entrypoint.ts
689
498
  const frameworks = [
690
499
  "express",
691
500
  "hono",
@@ -698,7 +507,11 @@ const entrypointFilenames = [
698
507
  "app",
699
508
  "index",
700
509
  "server",
701
- "main"
510
+ "main",
511
+ "src/app",
512
+ "src/index",
513
+ "src/server",
514
+ "src/main"
702
515
  ];
703
516
  const entrypointExtensions = [
704
517
  "js",
@@ -710,78 +523,66 @@ const entrypointExtensions = [
710
523
  ];
711
524
  const entrypoints = entrypointFilenames.flatMap((filename) => entrypointExtensions.map((extension) => `${filename}.${extension}`));
712
525
  const createFrameworkRegex = (framework) => new RegExp(`(?:from|require|import)\\s*(?:\\(\\s*)?["']${framework}["']\\s*(?:\\))?`, "g");
713
- const findEntrypoint = async (cwd, options) => {
714
- if (options?.ignoreRegex ?? false) {
715
- for (const entrypoint of entrypoints) if (existsSync(join$1(cwd, entrypoint))) return entrypoint;
716
- for (const entrypoint of entrypoints) if (existsSync(join$1(cwd, "src", entrypoint))) return join$1("src", entrypoint);
717
- throw new Error("No entrypoint file found");
718
- }
719
- const packageJson = await readFile(join$1(cwd, "package.json"), "utf-8");
720
- const packageJsonObject = JSON.parse(packageJson);
721
- const framework = frameworks.find((framework$1) => packageJsonObject.dependencies?.[framework$1]);
722
- if (!framework) {
723
- for (const entrypoint of entrypoints) {
724
- const entrypointPath = join$1(cwd, entrypoint);
725
- try {
726
- await readFile(entrypointPath, "utf-8");
727
- return entrypoint;
728
- } catch (e) {
729
- continue;
730
- }
731
- }
732
- throw new Error("No entrypoint or framework found");
733
- }
734
- const regex = createFrameworkRegex(framework);
735
- for (const entrypoint of entrypoints) {
526
+ const findEntrypoint = async (cwd) => {
527
+ let framework;
528
+ try {
529
+ const packageJson = await readFile(join$1(cwd, "package.json"), "utf-8");
530
+ const packageJsonObject = JSON.parse(packageJson);
531
+ framework = frameworks.find((framework$1) => packageJsonObject.dependencies?.[framework$1]);
532
+ } catch (_) {}
533
+ if (!framework) for (const entrypoint of entrypoints) {
736
534
  const entrypointPath = join$1(cwd, entrypoint);
737
535
  try {
738
- const content = await readFile(entrypointPath, "utf-8");
739
- if (regex.test(content)) return entrypoint;
740
- } catch (e) {
741
- continue;
742
- }
536
+ await readFile(entrypointPath, "utf-8");
537
+ return entrypoint;
538
+ } catch (_) {}
743
539
  }
540
+ const regex = framework ? createFrameworkRegex(framework) : void 0;
744
541
  for (const entrypoint of entrypoints) {
745
- const entrypointPath = join$1(cwd, "src", entrypoint);
542
+ const entrypointPath = join$1(cwd, entrypoint);
746
543
  try {
747
544
  const content = await readFile(entrypointPath, "utf-8");
748
- if (regex.test(content)) return join$1("src", entrypoint);
749
- } catch (e) {
750
- continue;
751
- }
545
+ if (regex) {
546
+ if (regex.test(content)) return entrypoint;
547
+ }
548
+ } catch (_) {}
752
549
  }
753
- throw new Error("No entrypoint found");
550
+ };
551
+ const findEntrypointOrThrow = async (cwd) => {
552
+ const entrypoint = await findEntrypoint(cwd);
553
+ if (!entrypoint) throw new Error(`No entrypoint found in "${cwd}". Expected one of: ${entrypoints.join(", ")}`);
554
+ return entrypoint;
754
555
  };
755
556
 
756
557
  //#endregion
757
558
  //#region src/cervel/index.ts
758
- const require = createRequire(import.meta.url);
559
+ const require$2 = createRequire(import.meta.url);
759
560
  const getBuildSummary = async (outputDir) => {
760
561
  const buildSummary = await readFile$1(join$1(outputDir, ".cervel.json"), "utf-8");
761
562
  return JSON.parse(buildSummary);
762
563
  };
763
564
  const build$1 = async (args) => {
764
- const entrypoint = args.entrypoint || await findEntrypoint(args.workPath);
565
+ const entrypoint = args.entrypoint || await findEntrypointOrThrow(args.workPath);
765
566
  const span = args.span ?? new Span({ name: "cervel-build" });
766
- const [, rolldownResult] = await Promise.all([typescript({
567
+ const [, rolldownResult] = await Promise.all([typescript$1({
767
568
  entrypoint,
768
569
  workPath: args.workPath,
769
570
  span
770
- }), rolldown({
571
+ }), rolldown$1({
771
572
  entrypoint,
772
573
  workPath: args.workPath,
773
574
  repoRootPath: args.repoRootPath,
774
575
  out: args.out,
775
576
  span
776
577
  })]);
777
- await writeFile$1(join$1(args.workPath, args.out, ".cervel.json"), JSON.stringify({ handler: rolldownResult.result.handler }, null, 2));
578
+ await writeFile(join$1(args.workPath, args.out, ".cervel.json"), JSON.stringify({ handler: rolldownResult.result.handler }, null, 2));
778
579
  console.log(Colors.gray(`${Colors.bold(Colors.cyan("✓"))} Build complete`));
779
580
  return { rolldownResult: rolldownResult.result };
780
581
  };
781
582
  const serve = async (args) => {
782
- const entrypoint = await findEntrypoint(args.workPath);
783
- const srvxBin = join$1(require.resolve("srvx"), "..", "..", "..", "bin", "srvx.mjs");
784
- const tsxBin = require.resolve("tsx");
583
+ const entrypoint = await findEntrypointOrThrow(args.workPath);
584
+ const srvxBin = join$1(require$2.resolve("srvx"), "..", "..", "..", "bin", "srvx.mjs");
585
+ const tsxBin = require$2.resolve("tsx");
785
586
  const restArgs = Object.entries(args.rest).filter(([, value]) => value !== void 0 && value !== false).map(([key, value]) => typeof value === "boolean" ? `--${key}` : `--${key}=${value}`);
786
587
  if (!args.rest.import) restArgs.push("--import", tsxBin);
787
588
  await execa("npx", [
@@ -822,130 +623,785 @@ const srvxOptions = {
822
623
  };
823
624
 
824
625
  //#endregion
825
- //#region src/build.ts
826
- const defaultOutputDirectory = join$1(".vercel", "node");
827
- const doBuild = async (args, downloadResult, span) => {
828
- const buildCommandResult = await maybeExecBuildCommand(args, downloadResult);
829
- const outputSetting = args.config.outputDirectory;
830
- const buildCommand = args.config.projectSettings?.buildCommand;
831
- const isCervelCommand = buildCommand?.trim().startsWith("cervel");
832
- if (!outputSetting) {
833
- debug("No output directory configured, using default output directory");
834
- if (isCervelCommand) {
835
- debug("Cervel command ran, using its default output location");
836
- const cervelOutputDir = join$1(args.workPath, "dist");
837
- if (existsSync(join$1(cervelOutputDir, ".cervel.json"))) {
838
- debug("Cervel JSON file found, using its handler");
839
- const { handler: handler$2 } = await getBuildSummary(cervelOutputDir);
840
- return {
841
- dir: cervelOutputDir,
842
- handler: handler$2
843
- };
844
- }
845
- throw new Error(`Build command "${buildCommand}" completed, but no output was found at ${cervelOutputDir}. Make sure your cervel command is configured correctly.`);
626
+ //#region src/rolldown/resolve-format.ts
627
+ const resolveEntrypointAndFormat = async (args) => {
628
+ const extension = extname$1(args.entrypoint);
629
+ const extensionMap = {
630
+ ".ts": {
631
+ format: "auto",
632
+ extension: "js"
633
+ },
634
+ ".mts": {
635
+ format: "esm",
636
+ extension: "mjs"
637
+ },
638
+ ".cts": {
639
+ format: "cjs",
640
+ extension: "cjs"
641
+ },
642
+ ".cjs": {
643
+ format: "cjs",
644
+ extension: "cjs"
645
+ },
646
+ ".js": {
647
+ format: "auto",
648
+ extension: "js"
649
+ },
650
+ ".mjs": {
651
+ format: "esm",
652
+ extension: "mjs"
846
653
  }
847
- const distDir = join$1(args.workPath, "dist");
848
- if (existsSync(distDir)) {
849
- debug("Dist directory found, checking for .cervel.json");
850
- const cervelJsonPath$1 = join$1(distDir, ".cervel.json");
851
- if (existsSync(cervelJsonPath$1)) {
852
- const { handler: handler$3 } = await getBuildSummary(distDir);
853
- return {
854
- dir: distDir,
855
- handler: handler$3
856
- };
857
- }
858
- let handler$2;
859
- try {
860
- debug("Finding entrypoint in dist directory");
861
- handler$2 = await findEntrypoint(distDir);
862
- } catch (error) {
863
- try {
864
- debug("Finding entrypoint in dist directory with ignoreRegex");
865
- handler$2 = await findEntrypoint(distDir, { ignoreRegex: true });
866
- debug("Found entrypoint in dist directory with ignoreRegex", handler$2);
867
- } catch (error$1) {
868
- debug("Unable to detect entrypoint, building ourselves");
869
- const buildResult$1 = await build$1({
870
- workPath: args.workPath,
871
- repoRootPath: args.repoRootPath,
872
- out: defaultOutputDirectory,
873
- span
874
- });
875
- const { handler: handler$3 } = await getBuildSummary(buildResult$1.rolldownResult.outputDir);
876
- return {
877
- dir: buildResult$1.rolldownResult.outputDir,
878
- handler: handler$3,
879
- files: buildResult$1.rolldownResult.outputFiles
880
- };
881
- }
882
- }
883
- await writeFile(cervelJsonPath$1, JSON.stringify({ handler: handler$2 }, null, 2));
884
- const files = await nodeFileTrace({
885
- keepTracedPaths: true,
886
- tracedPaths: [join$1(distDir, handler$2)],
887
- repoRootPath: args.repoRootPath,
888
- workPath: args.workPath,
889
- outDir: distDir,
890
- span
891
- });
892
- return {
893
- dir: distDir,
894
- handler: handler$2,
895
- files
896
- };
654
+ };
655
+ const extensionInfo = extensionMap[extension] || extensionMap[".js"];
656
+ let resolvedFormat = extensionInfo.format === "auto" ? void 0 : extensionInfo.format;
657
+ const packageJsonPath = join$1(args.workPath, "package.json");
658
+ let pkg = {};
659
+ if (existsSync(packageJsonPath)) {
660
+ const source = await readFile(packageJsonPath, "utf8");
661
+ try {
662
+ pkg = JSON.parse(source.toString());
663
+ } catch (_e) {
664
+ pkg = {};
897
665
  }
898
- debug("No dist directory found, or unable to detect entrypoint, building ourselves");
899
- const buildResult = await build$1({
900
- workPath: args.workPath,
901
- repoRootPath: args.repoRootPath,
902
- out: defaultOutputDirectory,
903
- span
666
+ if (extensionInfo.format === "auto") if (pkg.type === "module") resolvedFormat = "esm";
667
+ else resolvedFormat = "cjs";
668
+ }
669
+ if (!resolvedFormat) throw new Error(`Unable to resolve format for ${args.entrypoint}`);
670
+ return {
671
+ format: resolvedFormat,
672
+ extension: resolvedFormat === "esm" ? "mjs" : "cjs"
673
+ };
674
+ };
675
+
676
+ //#endregion
677
+ //#region src/rolldown/nft.ts
678
+ const nft = async (args) => {
679
+ const nftSpan = args.span.child("vc.builder.backends.nft");
680
+ const runNft = async () => {
681
+ const nftResult = await nodeFileTrace$1(Array.from(args.localBuildFiles), {
682
+ base: args.repoRootPath,
683
+ processCwd: args.workPath,
684
+ ts: true,
685
+ mixedModules: true,
686
+ ignore: args.ignoreNodeModules ? (path) => path.includes("node_modules") : void 0,
687
+ async readFile(fsPath) {
688
+ try {
689
+ let source = await readFile(fsPath);
690
+ if (isTypeScriptFile(fsPath)) source = (await transform(fsPath, source.toString())).code;
691
+ return source;
692
+ } catch (error) {
693
+ if (isNativeError(error) && "code" in error && (error.code === "ENOENT" || error.code === "EISDIR")) return null;
694
+ throw error;
695
+ }
696
+ }
904
697
  });
905
- const { handler: handler$1 } = await getBuildSummary(buildResult.rolldownResult.outputDir);
698
+ for (const file of nftResult.fileList) {
699
+ const absolutePath = join$1(args.repoRootPath, file);
700
+ const stats = await lstat(absolutePath);
701
+ const outputPath = file;
702
+ if (args.localBuildFiles.has(join$1(args.repoRootPath, outputPath))) continue;
703
+ if (stats.isSymbolicLink() || stats.isFile()) if (args.ignoreNodeModules) {
704
+ const content = await readFile(absolutePath, "utf-8");
705
+ args.files[outputPath] = new FileBlob({
706
+ data: content,
707
+ mode: stats.mode
708
+ });
709
+ } else args.files[outputPath] = new FileFsRef({
710
+ fsPath: absolutePath,
711
+ mode: stats.mode
712
+ });
713
+ }
714
+ };
715
+ await nftSpan.trace(runNft);
716
+ };
717
+ const isTypeScriptFile = (fsPath) => {
718
+ return fsPath.endsWith(".ts") || fsPath.endsWith(".tsx") || fsPath.endsWith(".mts") || fsPath.endsWith(".cts");
719
+ };
720
+
721
+ //#endregion
722
+ //#region src/rolldown/index.ts
723
+ const PLUGIN_NAME = "vercel:backends";
724
+ const CJS_SHIM_PREFIX = "\0cjs-shim:";
725
+ const rolldown = async (args) => {
726
+ const files = {};
727
+ const { format, extension } = await resolveEntrypointAndFormat(args);
728
+ const localBuildFiles = /* @__PURE__ */ new Set();
729
+ let handler = null;
730
+ const packageJsonCache = /* @__PURE__ */ new Map();
731
+ const shimMeta = /* @__PURE__ */ new Map();
732
+ const framework = {
733
+ slug: "",
734
+ version: ""
735
+ };
736
+ const getPackageJson = async (pkgPath) => {
737
+ if (packageJsonCache.has(pkgPath)) return packageJsonCache.get(pkgPath);
738
+ try {
739
+ const contents = await readFile(pkgPath, "utf-8");
740
+ const parsed = JSON.parse(contents);
741
+ packageJsonCache.set(pkgPath, parsed);
742
+ return parsed;
743
+ } catch {
744
+ packageJsonCache.set(pkgPath, null);
745
+ return null;
746
+ }
747
+ };
748
+ const isCommonJS = async (bareImport, resolvedPath, resolvedInfo) => {
749
+ const ext = extname$1(resolvedPath);
750
+ if (ext === ".cjs") return true;
751
+ if (ext === ".mjs") return false;
752
+ if (ext === ".js" || ext === ".ts") {
753
+ const pkgJsonPath = resolvedInfo.packageJsonPath;
754
+ if (!pkgJsonPath) return true;
755
+ const pkgJson = await getPackageJson(pkgJsonPath);
756
+ if (!pkgJson) return true;
757
+ const pkgDir = dirname$1(pkgJsonPath);
758
+ const relativePath = resolvedPath.slice(pkgDir.length + 1).replace(/\\/g, "/");
759
+ const pkgName = pkgJson.name || "";
760
+ const subpath = bareImport.startsWith(pkgName) ? `.${bareImport.slice(pkgName.length)}` || "." : ".";
761
+ try {
762
+ if (exports(pkgJson, subpath, {
763
+ require: false,
764
+ conditions: ["node", "import"]
765
+ })?.some((p) => p === relativePath || p === `./${relativePath}`)) return false;
766
+ if (exports(pkgJson, subpath, {
767
+ require: true,
768
+ conditions: ["node", "require"]
769
+ })?.some((p) => p === relativePath || p === `./${relativePath}`)) return true;
770
+ } catch {}
771
+ if (pkgJson.module) return false;
772
+ return pkgJson.type !== "module";
773
+ }
774
+ return true;
775
+ };
776
+ const isBareImport = (id) => {
777
+ return !id.startsWith(".") && !id.startsWith("/") && !/^[a-z][a-z0-9+.-]*:/i.test(id);
778
+ };
779
+ const isNodeModule = (resolved) => {
780
+ return resolved?.id?.includes("node_modules") ?? false;
781
+ };
782
+ const isNodeBuiltin = (id) => {
783
+ const normalizedId = id.includes(":") ? id.split(":")[1] : id;
784
+ return builtinModules.includes(normalizedId);
785
+ };
786
+ const isLocalImport = (id) => {
787
+ return !id.startsWith("node:") && !id.includes("node_modules");
788
+ };
789
+ const plugin$1 = () => {
906
790
  return {
907
- dir: buildResult.rolldownResult.outputDir,
908
- handler: handler$1,
909
- files: buildResult.rolldownResult.outputFiles
791
+ name: PLUGIN_NAME,
792
+ resolveId: {
793
+ order: "pre",
794
+ async handler(id, importer, rOpts) {
795
+ if (id.startsWith(CJS_SHIM_PREFIX)) return {
796
+ id,
797
+ external: false
798
+ };
799
+ const resolved = await this.resolve(id, importer, rOpts);
800
+ if (isNodeBuiltin(id)) return {
801
+ id: id.startsWith("node:") ? id : `node:${id}`,
802
+ external: true
803
+ };
804
+ if (resolved?.id && isLocalImport(resolved.id)) localBuildFiles.add(resolved.id);
805
+ else if (!resolved) localBuildFiles.add(join$1(args.workPath, id));
806
+ if (importer?.startsWith(CJS_SHIM_PREFIX) && isBareImport(id)) return {
807
+ id,
808
+ external: true
809
+ };
810
+ if (importer && isBareImport(id) && isNodeModule(resolved)) {
811
+ if (isBackendFramework(id) && resolved?.packageJsonPath) try {
812
+ const pkg = await readFile(resolved.packageJsonPath, "utf8");
813
+ const pkgJson = JSON.parse(pkg);
814
+ framework.slug = pkgJson.name;
815
+ framework.version = pkgJson.version;
816
+ } catch {}
817
+ if (resolved ? await isCommonJS(id, resolved.id, resolved) : false) {
818
+ const importerPkgJsonPath = (await this.resolve(importer))?.packageJsonPath;
819
+ if (importerPkgJsonPath) {
820
+ const importerPkgDir = relative(args.repoRootPath, dirname$1(importerPkgJsonPath));
821
+ const shimId$1 = `${CJS_SHIM_PREFIX}${importerPkgDir.replace(/\//g, "_")}_${id.replace(/\//g, "_")}`;
822
+ shimMeta.set(shimId$1, {
823
+ pkgDir: importerPkgDir,
824
+ pkgName: id
825
+ });
826
+ return {
827
+ id: shimId$1,
828
+ external: false
829
+ };
830
+ }
831
+ const shimId = `${CJS_SHIM_PREFIX}${id.replace(/\//g, "_")}`;
832
+ shimMeta.set(shimId, {
833
+ pkgDir: "",
834
+ pkgName: id
835
+ });
836
+ return {
837
+ id: shimId,
838
+ external: false
839
+ };
840
+ }
841
+ return {
842
+ id,
843
+ external: true
844
+ };
845
+ }
846
+ if (importer && isBareImport(id)) return resolved;
847
+ if (resolved && !isNodeModule(resolved)) return resolved;
848
+ return resolved;
849
+ }
850
+ },
851
+ load: { async handler(id) {
852
+ if (id.startsWith(CJS_SHIM_PREFIX)) {
853
+ const meta = shimMeta.get(id);
854
+ if (!meta) return { code: `module.exports = require('${id.slice(10)}');` };
855
+ const { pkgDir, pkgName } = meta;
856
+ return { code: `
857
+ import { createRequire } from 'node:module';
858
+ import { fileURLToPath } from 'node:url';
859
+ import { dirname, join } from 'node:path';
860
+
861
+ const requireFromContext = createRequire(join(dirname(fileURLToPath(import.meta.url)), '${pkgDir ? join$1("..", pkgDir, "package.json") : "../package.json"}'));
862
+ module.exports = requireFromContext('${pkgName}');
863
+ `.trim() };
864
+ }
865
+ return null;
866
+ } }
910
867
  };
868
+ };
869
+ const runRolldown = () => build$2({
870
+ input: args.entrypoint,
871
+ write: false,
872
+ cwd: args.workPath,
873
+ platform: "node",
874
+ transform: { define: format === "esm" ? {
875
+ __dirname: "import.meta.dirname",
876
+ __filename: "import.meta.filename"
877
+ } : void 0 },
878
+ tsconfig: true,
879
+ plugins: [plugin$1()],
880
+ output: {
881
+ cleanDir: true,
882
+ format,
883
+ entryFileNames: `[name].${extension}`,
884
+ preserveModules: true,
885
+ preserveModulesRoot: args.repoRootPath,
886
+ sourcemap: false
887
+ }
888
+ });
889
+ const rolldownSpan = args.span?.child("vc.builder.backends.rolldown");
890
+ const out = await rolldownSpan?.trace(runRolldown) || await runRolldown();
891
+ for (const file of out.output) if (file.type === "chunk") {
892
+ if (file.isEntry) handler = file.fileName;
893
+ files[file.fileName] = new FileBlob({
894
+ data: file.code,
895
+ mode: 420
896
+ });
911
897
  }
912
- const outputDir = join$1(args.workPath, outputSetting);
913
- const packageJson = await getPackageJson(args.workPath);
914
- const monorepoWithoutBuildScript = args.config.projectSettings?.monorepoManager && !getScriptName(packageJson, ["build"]);
915
- if (!buildCommandResult || monorepoWithoutBuildScript) {
916
- const buildResult = await build$1({
917
- workPath: args.workPath,
918
- repoRootPath: args.repoRootPath,
919
- out: outputDir,
920
- span
898
+ await nft({
899
+ ...args,
900
+ localBuildFiles,
901
+ files,
902
+ span: rolldownSpan ?? new Span({ name: "vc.builder.backends.nft" }),
903
+ ignoreNodeModules: true
904
+ });
905
+ if (!handler) throw new Error(`Unable to resolve build handler for entrypoint: ${args.entrypoint}`);
906
+ return {
907
+ files,
908
+ handler,
909
+ framework,
910
+ localBuildFiles
911
+ };
912
+ };
913
+
914
+ //#endregion
915
+ //#region src/rolldown/util.ts
916
+ const BEGIN_INTROSPECTION_RESULT = "\n__VERCEL_INTROSPECTION_BEGIN__\n";
917
+ const END_INTROSPECTION_RESULT = "\n__VERCEL_INTROSPECTION_END__\n";
918
+
919
+ //#endregion
920
+ //#region src/rolldown/introspection.ts
921
+ const require$1 = createRequire(import.meta.url);
922
+ const introspectionSchema = z.object({
923
+ routes: z.array(z.object({
924
+ src: z.string(),
925
+ dest: z.string(),
926
+ methods: z.array(z.string())
927
+ })),
928
+ additionalFolders: z.array(z.string()).optional(),
929
+ additionalDeps: z.array(z.string()).optional()
930
+ });
931
+ const introspection = async (args) => {
932
+ const defaultResult$1 = {
933
+ routes: [],
934
+ additionalFolders: [],
935
+ additionalDeps: []
936
+ };
937
+ if (isExperimentalBackendsWithoutIntrospectionEnabled()) return defaultResult$1;
938
+ const introspectionSpan = args.span.child("vc.builder.backends.introspection");
939
+ const runIntrospection = async () => {
940
+ const rolldownEsmLoaderPath = `file://${require$1.resolve("@vercel/backends/rolldown/esm")}`;
941
+ const rolldownCjsLoaderPath = require$1.resolve("@vercel/backends/rolldown/cjs-hooks");
942
+ const handlerPath = join$1(args.workPath, args.entrypoint);
943
+ const files = args.files;
944
+ const tmpDir = mkdtempSync(join$1(tmpdir(), "vercel-introspection-"));
945
+ for (const [key, value] of Object.entries(files)) {
946
+ if (!(value instanceof FileBlob) || typeof value.data !== "string") continue;
947
+ const filePath = join$1(tmpDir, key);
948
+ mkdirSync(dirname$1(filePath), { recursive: true });
949
+ writeFileSync(filePath, value.data);
950
+ }
951
+ let introspectionData;
952
+ await new Promise((resolvePromise) => {
953
+ try {
954
+ debug("Spawning introspection process");
955
+ const outputTempDir = mkdtempSync(join$1(tmpdir(), "introspection-output-"));
956
+ const tempFilePath = join$1(outputTempDir, "output.txt");
957
+ const writeStream = createWriteStream(tempFilePath);
958
+ let streamClosed = false;
959
+ const child = spawn$1("node", [
960
+ "-r",
961
+ rolldownCjsLoaderPath,
962
+ "--import",
963
+ rolldownEsmLoaderPath,
964
+ handlerPath
965
+ ], {
966
+ stdio: [
967
+ "pipe",
968
+ "pipe",
969
+ "pipe"
970
+ ],
971
+ cwd: args.workPath,
972
+ env: {
973
+ ...process.env,
974
+ ...args.meta?.buildEnv,
975
+ ...args.meta?.env,
976
+ VERCEL_INTROSPECTION_HANDLER: handlerPath,
977
+ VERCEL_INTROSPECTION_HANDLER_BUILT: args.handler,
978
+ VERCEL_INTROSPECTION_WORK_PATH: args.workPath,
979
+ VERCEL_INTROSPECTION_REPO_ROOT_PATH: args.repoRootPath,
980
+ VERCEL_INTROSPECTION_TMP_DIR: tmpDir
981
+ }
982
+ });
983
+ child.stdout?.pipe(writeStream);
984
+ let stderrBuffer = "";
985
+ child.stderr?.on("data", (data) => {
986
+ stderrBuffer += data.toString();
987
+ });
988
+ writeStream.on("error", (err) => {
989
+ debug(`Write stream error: ${err.message}`);
990
+ });
991
+ const timeout = setTimeout(() => {
992
+ debug("Introspection timeout, killing process with SIGTERM");
993
+ child.kill("SIGTERM");
994
+ }, 8e3);
995
+ const timeout2 = setTimeout(() => {
996
+ debug("Introspection timeout, killing process with SIGKILL");
997
+ child.kill("SIGKILL");
998
+ }, 9e3);
999
+ const cleanup = () => {
1000
+ clearTimeout(timeout);
1001
+ clearTimeout(timeout2);
1002
+ try {
1003
+ rmSync(tmpDir, {
1004
+ recursive: true,
1005
+ force: true
1006
+ });
1007
+ } catch (err) {
1008
+ debug(`Error deleting tmpDir: ${err}`);
1009
+ }
1010
+ };
1011
+ child.on("error", (err) => {
1012
+ cleanup();
1013
+ debug(`Loader error: ${err.message}`);
1014
+ if (!streamClosed) writeStream.end(() => {
1015
+ streamClosed = true;
1016
+ try {
1017
+ rmSync(outputTempDir, {
1018
+ recursive: true,
1019
+ force: true
1020
+ });
1021
+ } catch (cleanupErr) {
1022
+ debug(`Error deleting output temp dir: ${cleanupErr}`);
1023
+ }
1024
+ resolvePromise();
1025
+ });
1026
+ else resolvePromise();
1027
+ });
1028
+ child.on("close", () => {
1029
+ cleanup();
1030
+ debug("Introspection process closed");
1031
+ if (!streamClosed) writeStream.end(() => {
1032
+ streamClosed = true;
1033
+ let stdoutBuffer;
1034
+ try {
1035
+ stdoutBuffer = readFileSync(tempFilePath, "utf8");
1036
+ const beginIndex = stdoutBuffer.indexOf(BEGIN_INTROSPECTION_RESULT);
1037
+ const endIndex = stdoutBuffer.indexOf(END_INTROSPECTION_RESULT);
1038
+ if (beginIndex !== -1 && endIndex !== -1) {
1039
+ const introspectionString = stdoutBuffer.substring(beginIndex + BEGIN_INTROSPECTION_RESULT.length, endIndex);
1040
+ if (introspectionString) {
1041
+ introspectionData = introspectionSchema.parse(JSON.parse(introspectionString));
1042
+ debug("Introspection data parsed successfully");
1043
+ }
1044
+ } else debug(`Introspection markers not found.\nstdout:\n${stdoutBuffer}\nstderr:\n${stderrBuffer}`);
1045
+ } catch (error) {
1046
+ debug(`Error parsing introspection data: ${error}\nstdout:\n${stdoutBuffer}\nstderr:\n${stderrBuffer}`);
1047
+ } finally {
1048
+ try {
1049
+ rmSync(outputTempDir, {
1050
+ recursive: true,
1051
+ force: true
1052
+ });
1053
+ } catch (err) {
1054
+ debug(`Error deleting output temp directory: ${err}`);
1055
+ }
1056
+ resolvePromise();
1057
+ }
1058
+ });
1059
+ else resolvePromise();
1060
+ });
1061
+ } catch (error) {
1062
+ debug("Introspection error", error);
1063
+ resolvePromise();
1064
+ }
1065
+ });
1066
+ if (!introspectionData) {
1067
+ introspectionSpan.setAttributes({
1068
+ "introspection.success": "false",
1069
+ "introspection.routes": "0"
1070
+ });
1071
+ return defaultResult$1;
1072
+ }
1073
+ const additionalFolders = (introspectionData.additionalFolders ?? []).map((val) => {
1074
+ if (isAbsolute(val)) return relative(args.workPath, val);
1075
+ return val;
1076
+ });
1077
+ introspectionSpan.setAttributes({
1078
+ "introspection.success": "true",
1079
+ "introspection.routes": String(introspectionData.routes.length)
921
1080
  });
922
- const { handler: handler$1 } = await getBuildSummary(buildResult.rolldownResult.outputDir);
923
1081
  return {
924
- dir: buildResult.rolldownResult.outputDir,
925
- handler: handler$1,
926
- files: buildResult.rolldownResult.outputFiles
1082
+ routes: introspectionData.routes,
1083
+ additionalFolders,
1084
+ additionalDeps: introspectionData.additionalDeps ?? []
927
1085
  };
1086
+ };
1087
+ return introspectionSpan.trace(runIntrospection);
1088
+ };
1089
+
1090
+ //#endregion
1091
+ //#region src/build.ts
1092
+ const maybeDoBuildCommand = async (args, downloadResult) => {
1093
+ const buildCommandResult = await maybeExecBuildCommand(args, downloadResult);
1094
+ const outputSetting = args.config.outputDirectory;
1095
+ let outputDir;
1096
+ let entrypoint;
1097
+ if (buildCommandResult && outputSetting) if (outputSetting) {
1098
+ const _outputDir = join$1(args.workPath, outputSetting);
1099
+ const _entrypoint = await findEntrypoint(_outputDir);
1100
+ if (_entrypoint) {
1101
+ outputDir = _outputDir;
1102
+ entrypoint = _entrypoint;
1103
+ }
1104
+ } else for (const outputDirectory of [
1105
+ "dist",
1106
+ "build",
1107
+ "output"
1108
+ ]) {
1109
+ const _outputDir = join$1(args.workPath, outputDirectory);
1110
+ if (existsSync(_outputDir)) {
1111
+ const _entrypoint = await findEntrypoint(_outputDir);
1112
+ if (_entrypoint) {
1113
+ outputDir = _outputDir;
1114
+ entrypoint = _entrypoint;
1115
+ break;
1116
+ }
1117
+ }
928
1118
  }
929
- const cervelJsonPath = join$1(outputDir, ".cervel.json");
930
- if (existsSync(cervelJsonPath)) {
931
- const { handler: handler$1 } = await getBuildSummary(outputDir);
932
- return {
933
- dir: outputDir,
934
- handler: handler$1
935
- };
1119
+ const localBuildFiles = /* @__PURE__ */ new Set();
1120
+ let files;
1121
+ if (outputDir && entrypoint) {
1122
+ files = await glob("**", outputDir);
1123
+ for (const file of Object.keys(files)) localBuildFiles.add(join$1(outputDir, file));
936
1124
  }
937
- let handler;
1125
+ return {
1126
+ localBuildFiles,
1127
+ files,
1128
+ handler: entrypoint,
1129
+ outputDir
1130
+ };
1131
+ };
1132
+
1133
+ //#endregion
1134
+ //#region src/typescript.ts
1135
+ const require_ = createRequire(import.meta.url);
1136
+ const typescript = (args) => {
1137
+ const { span } = args;
1138
+ return span.child("vc.builder.backends.tsCompile").trace(async () => {
1139
+ const extension = extname$1(args.entrypoint);
1140
+ if (![
1141
+ ".ts",
1142
+ ".mts",
1143
+ ".cts"
1144
+ ].includes(extension)) return;
1145
+ const tscPath = resolveTscPath(args);
1146
+ if (!tscPath) {
1147
+ console.log(Colors.gray(`${Colors.bold(Colors.cyan("✓"))} Typecheck skipped ${Colors.gray("(TypeScript not found)")}`));
1148
+ return null;
1149
+ }
1150
+ return doTypeCheck(args, tscPath);
1151
+ });
1152
+ };
1153
+ async function doTypeCheck(args, tscPath) {
1154
+ let stdout = "";
1155
+ let stderr = "";
1156
+ /**
1157
+ * This might be subject to change.
1158
+ * - if no tscPath, skip typecheck
1159
+ * - if tsconfig, provide the tsconfig path
1160
+ * - else provide the entrypoint path
1161
+ */
1162
+ const tscArgs = [
1163
+ tscPath,
1164
+ "--noEmit",
1165
+ "--pretty",
1166
+ "--allowJs",
1167
+ "--esModuleInterop",
1168
+ "--skipLibCheck"
1169
+ ];
1170
+ const tsconfig = await findNearestTsconfig(args.workPath);
1171
+ if (tsconfig) tscArgs.push("--project", tsconfig);
1172
+ else tscArgs.push(args.entrypoint);
1173
+ const child = spawn$1(process.execPath, tscArgs, {
1174
+ cwd: args.workPath,
1175
+ stdio: [
1176
+ "ignore",
1177
+ "pipe",
1178
+ "pipe"
1179
+ ]
1180
+ });
1181
+ child.stdout?.on("data", (data) => {
1182
+ stdout += data.toString();
1183
+ });
1184
+ child.stderr?.on("data", (data) => {
1185
+ stderr += data.toString();
1186
+ });
1187
+ await new Promise((resolve$1, reject) => {
1188
+ child.on("close", (code) => {
1189
+ if (code === 0) {
1190
+ console.log(Colors.gray(`${Colors.bold(Colors.cyan("✓"))} Typecheck complete`));
1191
+ resolve$1();
1192
+ } else {
1193
+ const output = stdout || stderr;
1194
+ if (output) {
1195
+ console.error("\nTypeScript type check failed:\n");
1196
+ console.error(output);
1197
+ }
1198
+ reject(/* @__PURE__ */ new Error("TypeScript type check failed"));
1199
+ }
1200
+ });
1201
+ child.on("error", (err) => {
1202
+ reject(err);
1203
+ });
1204
+ });
1205
+ }
1206
+ const resolveTscPath = (args) => {
938
1207
  try {
939
- handler = await findEntrypoint(outputDir);
940
- } catch (error) {
941
- handler = await findEntrypoint(outputDir, { ignoreRegex: true });
1208
+ return require_.resolve("typescript/bin/tsc", { paths: [args.workPath] });
1209
+ } catch (e) {
1210
+ return null;
1211
+ }
1212
+ };
1213
+ const findNearestTsconfig = async (workPath) => {
1214
+ const tsconfigPath = join$1(workPath, "tsconfig.json");
1215
+ if (existsSync(tsconfigPath)) return tsconfigPath;
1216
+ if (workPath === "/") return;
1217
+ return findNearestTsconfig(join$1(workPath, ".."));
1218
+ };
1219
+
1220
+ //#endregion
1221
+ //#region src/introspection/index.ts
1222
+ const require = createRequire(import.meta.url);
1223
+ const introspectApp = async (args) => {
1224
+ const { span } = args;
1225
+ const introspectionSpan = span.child("vc.builder.backends.introspection");
1226
+ if (isExperimentalBackendsWithoutIntrospectionEnabled()) return defaultResult(args);
1227
+ const cjsLoaderPath = require.resolve("@vercel/backends/introspection/loaders/cjs");
1228
+ const rolldownEsmLoaderPath = `file://${require.resolve("@vercel/backends/introspection/loaders/rolldown-esm")}`;
1229
+ const handlerPath = join$1(args.dir, args.handler);
1230
+ const introspectionSchema$1 = z.object({
1231
+ frameworkSlug: z.string().optional(),
1232
+ routes: z.array(z.object({
1233
+ src: z.string(),
1234
+ dest: z.string(),
1235
+ methods: z.array(z.string())
1236
+ })),
1237
+ additionalFolders: z.array(z.string()).optional().transform((values) => {
1238
+ return values?.map((val) => {
1239
+ if (isAbsolute(val)) return relative(args.dir, val);
1240
+ return val;
1241
+ });
1242
+ }),
1243
+ additionalDeps: z.array(z.string()).optional()
1244
+ });
1245
+ let introspectionData;
1246
+ await new Promise((resolvePromise) => {
1247
+ try {
1248
+ debug("Spawning introspection process");
1249
+ const child = spawn$1("node", [
1250
+ "-r",
1251
+ cjsLoaderPath,
1252
+ "--import",
1253
+ rolldownEsmLoaderPath,
1254
+ handlerPath
1255
+ ], {
1256
+ stdio: [
1257
+ "pipe",
1258
+ "pipe",
1259
+ "pipe"
1260
+ ],
1261
+ cwd: args.dir,
1262
+ env: {
1263
+ ...process.env,
1264
+ ...args.env
1265
+ }
1266
+ });
1267
+ const tempDir = mkdtempSync(join$1(tmpdir(), "introspection-"));
1268
+ const tempFilePath = join$1(tempDir, "output.txt");
1269
+ const writeStream = createWriteStream(tempFilePath);
1270
+ let streamClosed = false;
1271
+ child.stdout?.pipe(writeStream);
1272
+ let stderrBuffer = "";
1273
+ child.stderr?.on("data", (data) => {
1274
+ stderrBuffer += data.toString();
1275
+ });
1276
+ writeStream.on("error", (err) => {
1277
+ debug(`Write stream error: ${err.message}`);
1278
+ });
1279
+ const timeout = setTimeout(() => {
1280
+ debug("Introspection timeout, killing process with SIGTERM");
1281
+ child.kill("SIGTERM");
1282
+ }, 8e3);
1283
+ const timeout2 = setTimeout(() => {
1284
+ debug("Introspection timeout, killing process with SIGKILL");
1285
+ child.kill("SIGKILL");
1286
+ }, 9e3);
1287
+ child.on("error", (err) => {
1288
+ clearTimeout(timeout);
1289
+ clearTimeout(timeout2);
1290
+ debug(`Loader error: ${err.message}`);
1291
+ if (!streamClosed) writeStream.end(() => {
1292
+ streamClosed = true;
1293
+ try {
1294
+ unlinkSync(tempFilePath);
1295
+ } catch (cleanupErr) {
1296
+ debug(`Error deleting temp file on error: ${cleanupErr}`);
1297
+ }
1298
+ resolvePromise(void 0);
1299
+ });
1300
+ else resolvePromise(void 0);
1301
+ });
1302
+ child.on("close", () => {
1303
+ clearTimeout(timeout);
1304
+ clearTimeout(timeout2);
1305
+ debug("Introspection process closed");
1306
+ if (!streamClosed) writeStream.end(() => {
1307
+ streamClosed = true;
1308
+ let stdoutBuffer;
1309
+ try {
1310
+ stdoutBuffer = readFileSync(tempFilePath, "utf8");
1311
+ const beginIndex = stdoutBuffer.indexOf(BEGIN_INTROSPECTION_RESULT);
1312
+ const endIndex = stdoutBuffer.indexOf(END_INTROSPECTION_RESULT);
1313
+ if (beginIndex !== -1 && endIndex !== -1) {
1314
+ const introspectionString = stdoutBuffer.substring(beginIndex + BEGIN_INTROSPECTION_RESULT.length, endIndex);
1315
+ if (introspectionString) {
1316
+ introspectionData = introspectionSchema$1.parse(JSON.parse(introspectionString));
1317
+ debug("Introspection data parsed successfully");
1318
+ }
1319
+ } else debug(`Introspection markers not found.\nstdout:\n${stdoutBuffer}\nstderr:\n${stderrBuffer}`);
1320
+ } catch (error) {
1321
+ debug(`Error parsing introspection data: ${error}\nstdout:\n${stdoutBuffer}\nstderr:\n${stderrBuffer}`);
1322
+ } finally {
1323
+ try {
1324
+ rmSync(tempDir, {
1325
+ recursive: true,
1326
+ force: true
1327
+ });
1328
+ } catch (err) {
1329
+ debug(`Error deleting temp directory: ${err}`);
1330
+ }
1331
+ resolvePromise(void 0);
1332
+ }
1333
+ });
1334
+ else resolvePromise(void 0);
1335
+ });
1336
+ } catch (error) {
1337
+ debug("Introspection error", error);
1338
+ resolvePromise(void 0);
1339
+ }
1340
+ });
1341
+ const framework = getFramework(args);
1342
+ if (!introspectionData) {
1343
+ introspectionSpan.setAttributes({
1344
+ "introspection.success": "false",
1345
+ "introspection.routes": "0"
1346
+ });
1347
+ return defaultResult(args);
942
1348
  }
943
- await writeFile(cervelJsonPath, JSON.stringify({ handler }, null, 2));
1349
+ const routes = [
1350
+ { handle: "filesystem" },
1351
+ ...introspectionData.routes,
1352
+ {
1353
+ src: "/(.*)",
1354
+ dest: "/"
1355
+ }
1356
+ ];
1357
+ introspectionSpan.setAttributes({
1358
+ "introspection.success": "true",
1359
+ "introspection.routes": String(introspectionData.routes.length),
1360
+ "introspection.framework": introspectionData.frameworkSlug ?? ""
1361
+ });
944
1362
  return {
945
- dir: outputDir,
946
- handler
1363
+ routes,
1364
+ framework,
1365
+ additionalFolders: introspectionData.additionalFolders ?? [],
1366
+ additionalDeps: introspectionData.additionalDeps ?? []
947
1367
  };
948
1368
  };
1369
+ const defaultResult = (args) => {
1370
+ return {
1371
+ routes: [{ handle: "filesystem" }, {
1372
+ src: "/(.*)",
1373
+ dest: "/"
1374
+ }],
1375
+ framework: getFramework(args)
1376
+ };
1377
+ };
1378
+ const getFramework = (args) => {
1379
+ try {
1380
+ let version$1;
1381
+ if (args.framework) {
1382
+ const frameworkLibPath = require.resolve(`${args.framework}`, { paths: [args.dir] });
1383
+ const findNearestPackageJson = (dir) => {
1384
+ const packageJsonPath = join$1(dir, "package.json");
1385
+ if (existsSync(packageJsonPath)) return packageJsonPath;
1386
+ const parentDir = dirname$1(dir);
1387
+ if (parentDir === dir) return;
1388
+ return findNearestPackageJson(parentDir);
1389
+ };
1390
+ const nearestPackageJsonPath = findNearestPackageJson(frameworkLibPath);
1391
+ if (nearestPackageJsonPath) version$1 = require(nearestPackageJsonPath).version;
1392
+ }
1393
+ return {
1394
+ slug: args.framework ?? "",
1395
+ version: version$1 ?? ""
1396
+ };
1397
+ } catch (error) {
1398
+ debug(`Error getting framework for ${args.framework}. Setting framework version to empty string.`, error);
1399
+ return {
1400
+ slug: args.framework ?? "",
1401
+ version: ""
1402
+ };
1403
+ }
1404
+ };
949
1405
 
950
1406
  //#endregion
951
1407
  //#region src/index.ts
@@ -956,52 +1412,73 @@ const build = async (args) => {
956
1412
  const builderName = "@vercel/backends";
957
1413
  const span = args.span ?? new Span({ name: builderName });
958
1414
  span.setAttributes({ "builder.name": builderName });
959
- const entrypoint = await findEntrypoint(args.workPath);
960
- debug("Entrypoint", entrypoint);
961
1415
  const buildSpan = span.child("vc.builder.backends.build");
962
- const introspectionSpan = span.child("vc.builder.backends.introspectApp");
963
- const [buildResult, introspectionResult] = await Promise.all([buildSpan.trace(() => doBuild(args, downloadResult, buildSpan)), introspectionSpan.trace(() => introspectApp({
964
- handler: entrypoint,
965
- dir: args.workPath,
966
- framework: args.config.framework,
967
- env: {
968
- ...args.meta?.env ?? {},
969
- ...args.meta?.buildEnv ?? {}
970
- },
971
- span: introspectionSpan
972
- }))]);
973
- const files = buildResult.files;
974
- const { routes, framework } = introspectionResult;
975
- if (routes.length > 2) debug(`Introspection completed successfully with ${routes.length} routes`);
976
- else debug(`Introspection failed to detect routes`);
977
- const handler = buildResult.handler;
978
- if (!files) throw new Error("Unable to trace files for build");
979
- const lambda = new NodejsLambda({
980
- runtime: nodeVersion.runtime,
981
- handler,
982
- files,
983
- shouldAddHelpers: false,
984
- shouldAddSourcemapSupport: true,
985
- framework: {
986
- slug: framework?.slug ?? "",
987
- version: framework?.version ?? ""
988
- },
989
- awsLambdaHandler: "",
990
- shouldDisableAutomaticFetchInstrumentation: process.env.VERCEL_TRACING_DISABLE_AUTOMATIC_FETCH_INSTRUMENTATION === "1"
1416
+ return buildSpan.trace(async () => {
1417
+ const entrypoint = await findEntrypointOrThrow(args.workPath);
1418
+ debug("Entrypoint", entrypoint);
1419
+ args.entrypoint = entrypoint;
1420
+ const userBuildResult = await maybeDoBuildCommand(args, downloadResult);
1421
+ const rolldownResult = await rolldown({
1422
+ ...args,
1423
+ span: buildSpan
1424
+ });
1425
+ const introspectionPromise = introspection({
1426
+ ...args,
1427
+ span: buildSpan,
1428
+ files: rolldownResult.files,
1429
+ handler: rolldownResult.handler
1430
+ });
1431
+ const typescriptPromise = typescript({
1432
+ entrypoint,
1433
+ workPath: args.workPath,
1434
+ span: buildSpan
1435
+ });
1436
+ const localBuildFiles = userBuildResult?.localBuildFiles.size > 0 ? userBuildResult?.localBuildFiles : rolldownResult.localBuildFiles;
1437
+ const files = userBuildResult?.files || rolldownResult.files;
1438
+ const handler = userBuildResult?.handler || rolldownResult.handler;
1439
+ const nftWorkPath = userBuildResult?.outputDir || args.workPath;
1440
+ await nft({
1441
+ ...args,
1442
+ workPath: nftWorkPath,
1443
+ localBuildFiles,
1444
+ files,
1445
+ ignoreNodeModules: false,
1446
+ span: buildSpan
1447
+ });
1448
+ const introspectionResult = await introspectionPromise;
1449
+ await typescriptPromise;
1450
+ const lambda = new NodejsLambda({
1451
+ runtime: nodeVersion.runtime,
1452
+ handler,
1453
+ files,
1454
+ framework: rolldownResult.framework,
1455
+ shouldAddHelpers: false,
1456
+ shouldAddSourcemapSupport: true,
1457
+ awsLambdaHandler: "",
1458
+ shouldDisableAutomaticFetchInstrumentation: process.env.VERCEL_TRACING_DISABLE_AUTOMATIC_FETCH_INSTRUMENTATION === "1"
1459
+ });
1460
+ const routes = [
1461
+ { handle: "filesystem" },
1462
+ ...introspectionResult.routes,
1463
+ {
1464
+ src: "/(.*)",
1465
+ dest: "/"
1466
+ }
1467
+ ];
1468
+ const output = { index: lambda };
1469
+ for (const route of routes) if (route.dest) {
1470
+ if (route.dest === "/") continue;
1471
+ output[route.dest] = lambda;
1472
+ }
1473
+ return {
1474
+ routes,
1475
+ output
1476
+ };
991
1477
  });
992
- const output = { index: lambda };
993
- for (const route of routes) if (route.dest) {
994
- if (route.dest === "/") continue;
995
- output[route.dest] = lambda;
996
- }
997
- return {
998
- routes,
999
- output
1000
- };
1001
1478
  };
1002
1479
  const prepareCache = ({ repoRootPath, workPath }) => {
1003
1480
  return glob(defaultCachePathGlob, repoRootPath || workPath);
1004
1481
  };
1005
1482
 
1006
1483
  //#endregion
1007
- export { build, build$1 as cervelBuild, serve as cervelServe, findEntrypoint, getBuildSummary, introspectApp, nodeFileTrace, prepareCache, srvxOptions, version };
1484
+ export { build, build$1 as cervelBuild, serve as cervelServe, findEntrypoint, findEntrypointOrThrow, getBuildSummary, introspectApp, nodeFileTrace, prepareCache, srvxOptions, version };