@depup/vercel 50.32.5-depup.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +202 -0
- package/README.md +38 -0
- package/changes.json +38 -0
- package/dist/chunks/chunk-2DLBVZWU.js +197 -0
- package/dist/chunks/chunk-2HSQ7YUK.js +93 -0
- package/dist/chunks/chunk-2IQTNMUG.js +86 -0
- package/dist/chunks/chunk-3FRG2XGZ.js +466 -0
- package/dist/chunks/chunk-3KMKI2FP.js +34 -0
- package/dist/chunks/chunk-3XFFP2BA.js +110 -0
- package/dist/chunks/chunk-4S3Y3ATR.js +5383 -0
- package/dist/chunks/chunk-7EHTK7LP.js +359 -0
- package/dist/chunks/chunk-7YHZDJ4G.js +116 -0
- package/dist/chunks/chunk-A3NYPUKZ.js +17 -0
- package/dist/chunks/chunk-AA7QEJFB.js +5204 -0
- package/dist/chunks/chunk-AHU7WNL2.js +24 -0
- package/dist/chunks/chunk-AKQZ7KG3.js +4172 -0
- package/dist/chunks/chunk-AQLVWVEN.js +39155 -0
- package/dist/chunks/chunk-BQ3DXZNT.js +968 -0
- package/dist/chunks/chunk-E65JE2CC.js +102 -0
- package/dist/chunks/chunk-EKPSCRJZ.js +26 -0
- package/dist/chunks/chunk-EOZFDJSY.js +18 -0
- package/dist/chunks/chunk-FDJURQMQ.js +4676 -0
- package/dist/chunks/chunk-FLKHKWZV.js +1854 -0
- package/dist/chunks/chunk-G6BUEBF5.js +192 -0
- package/dist/chunks/chunk-GBNIO3KP.js +771 -0
- package/dist/chunks/chunk-GGP5R3FU.js +129 -0
- package/dist/chunks/chunk-H5XJSH37.js +91 -0
- package/dist/chunks/chunk-IB5L4LKZ.js +1082 -0
- package/dist/chunks/chunk-IE7MNZ56.js +149 -0
- package/dist/chunks/chunk-IK7DLK2T.js +16112 -0
- package/dist/chunks/chunk-IUGPWINM.js +104 -0
- package/dist/chunks/chunk-J7HDA5GH.js +54 -0
- package/dist/chunks/chunk-JLYZNGYY.js +293 -0
- package/dist/chunks/chunk-JQ4NA5MX.js +250 -0
- package/dist/chunks/chunk-LL26LVRR.js +81 -0
- package/dist/chunks/chunk-LW5ZNGW7.js +127 -0
- package/dist/chunks/chunk-LWBSOTJP.js +1772 -0
- package/dist/chunks/chunk-MBGJBHYD.js +388 -0
- package/dist/chunks/chunk-NUKAG3YM.js +168 -0
- package/dist/chunks/chunk-O7I4ZOCC.js +58 -0
- package/dist/chunks/chunk-OWR3XNE3.js +48 -0
- package/dist/chunks/chunk-P3SKP5WM.js +27 -0
- package/dist/chunks/chunk-P4I4DMEU.js +342 -0
- package/dist/chunks/chunk-P5Q6F5IA.js +107 -0
- package/dist/chunks/chunk-PMSMUMUO.js +30 -0
- package/dist/chunks/chunk-QXRJ52T4.js +2977 -0
- package/dist/chunks/chunk-RQXPRFRM.js +90 -0
- package/dist/chunks/chunk-S7KYDPEM.js +1564 -0
- package/dist/chunks/chunk-SGGLJFUZ.js +68 -0
- package/dist/chunks/chunk-SOTR4CXR.js +34 -0
- package/dist/chunks/chunk-TEVP63TU.js +1717 -0
- package/dist/chunks/chunk-TNBMKNET.js +323 -0
- package/dist/chunks/chunk-TZ2YI2VH.js +87 -0
- package/dist/chunks/chunk-U6XOC6E4.js +903 -0
- package/dist/chunks/chunk-V5P25P7F.js +22 -0
- package/dist/chunks/chunk-WQ5CUZWR.js +333 -0
- package/dist/chunks/chunk-WU2BPWRP.js +12237 -0
- package/dist/chunks/chunk-XPKWKPWA.js +44 -0
- package/dist/chunks/chunk-XR53KVJD.js +33 -0
- package/dist/chunks/chunk-Y4JJYHUG.js +16 -0
- package/dist/chunks/chunk-YPQSDAEW.js +29 -0
- package/dist/chunks/chunk-ZB2UO4V2.js +135 -0
- package/dist/chunks/chunk-ZLCMHY2G.js +1528 -0
- package/dist/chunks/compile-vercel-config-XU3YY2CZ.js +32 -0
- package/dist/chunks/delete-EJ2V7KQO.js +144 -0
- package/dist/chunks/disable-BKRFMX4U.js +122 -0
- package/dist/chunks/discard-4WF34DXK.js +118 -0
- package/dist/chunks/edit-FQE7JGU3.js +509 -0
- package/dist/chunks/emit-flags-datafiles-QYKPNWPX.js +17 -0
- package/dist/chunks/enable-VCNMX63U.js +122 -0
- package/dist/chunks/export-3KNVJCQR.js +133 -0
- package/dist/chunks/list-43XQCGKH.js +382 -0
- package/dist/chunks/list-DUL6PHUR.js +394 -0
- package/dist/chunks/publish-CF7GVZK3.js +128 -0
- package/dist/chunks/query-KWKO7VWO.js +954 -0
- package/dist/chunks/reorder-GU65YMIN.js +259 -0
- package/dist/chunks/restore-Q7ENGWVJ.js +158 -0
- package/dist/chunks/routes-Q5CWG44T.js +20 -0
- package/dist/chunks/schema-PJKLO2K2.js +176 -0
- package/dist/chunks/stamp-RTPE2EBB.js +15 -0
- package/dist/chunks/types-563KUQRV.js +108 -0
- package/dist/chunks/update-route-version-E3V47KNI.js +13 -0
- package/dist/commands/build/index.js +1597 -0
- package/dist/commands/deploy/index.js +1711 -0
- package/dist/commands/dev/builder-worker.cjs +95 -0
- package/dist/commands/dev/index.js +20810 -0
- package/dist/commands/env/index.js +2154 -0
- package/dist/commands/link/index.js +225 -0
- package/dist/commands/list/index.js +528 -0
- package/dist/commands-bulk.js +29627 -0
- package/dist/get-latest-worker.cjs +272 -0
- package/dist/help.js +14 -0
- package/dist/index.js +24274 -0
- package/dist/vc.js +36 -0
- package/dist/version.mjs +1 -0
- package/package.json +254 -0
|
@@ -0,0 +1,1597 @@
|
|
|
1
|
+
import { createRequire as __createRequire } from 'node:module';
|
|
2
|
+
import { fileURLToPath as __fileURLToPath } from 'node:url';
|
|
3
|
+
import { dirname as __dirname_ } from 'node:path';
|
|
4
|
+
const require = __createRequire(import.meta.url);
|
|
5
|
+
const __filename = __fileURLToPath(import.meta.url);
|
|
6
|
+
const __dirname = __dirname_(__filename);
|
|
7
|
+
import {
|
|
8
|
+
OUTPUT_DIR,
|
|
9
|
+
importBuilders,
|
|
10
|
+
isLambda,
|
|
11
|
+
staticFiles,
|
|
12
|
+
validateConfig,
|
|
13
|
+
writeBuildResult
|
|
14
|
+
} from "../../chunks/chunk-WU2BPWRP.js";
|
|
15
|
+
import {
|
|
16
|
+
require_semver
|
|
17
|
+
} from "../../chunks/chunk-IB5L4LKZ.js";
|
|
18
|
+
import {
|
|
19
|
+
pullCommandLogic
|
|
20
|
+
} from "../../chunks/chunk-G6BUEBF5.js";
|
|
21
|
+
import {
|
|
22
|
+
pickOverrides,
|
|
23
|
+
readProjectSettings
|
|
24
|
+
} from "../../chunks/chunk-RQXPRFRM.js";
|
|
25
|
+
import {
|
|
26
|
+
require_dist
|
|
27
|
+
} from "../../chunks/chunk-IK7DLK2T.js";
|
|
28
|
+
import "../../chunks/chunk-QXRJ52T4.js";
|
|
29
|
+
import "../../chunks/chunk-FLKHKWZV.js";
|
|
30
|
+
import "../../chunks/chunk-IUGPWINM.js";
|
|
31
|
+
import "../../chunks/chunk-LL26LVRR.js";
|
|
32
|
+
import "../../chunks/chunk-GBNIO3KP.js";
|
|
33
|
+
import "../../chunks/chunk-OWR3XNE3.js";
|
|
34
|
+
import {
|
|
35
|
+
DEFAULT_VERCEL_CONFIG_FILENAME,
|
|
36
|
+
compileVercelConfig,
|
|
37
|
+
findSourceVercelConfigFile,
|
|
38
|
+
require_main
|
|
39
|
+
} from "../../chunks/chunk-MBGJBHYD.js";
|
|
40
|
+
import {
|
|
41
|
+
buildCommand
|
|
42
|
+
} from "../../chunks/chunk-ZB2UO4V2.js";
|
|
43
|
+
import {
|
|
44
|
+
help
|
|
45
|
+
} from "../../chunks/chunk-JQ4NA5MX.js";
|
|
46
|
+
import {
|
|
47
|
+
VERCEL_DIR,
|
|
48
|
+
getProjectLink,
|
|
49
|
+
parseTarget,
|
|
50
|
+
readJSONFile,
|
|
51
|
+
require_dist as require_dist2,
|
|
52
|
+
require_dist2 as require_dist3,
|
|
53
|
+
require_frameworks,
|
|
54
|
+
require_lib,
|
|
55
|
+
require_minimatch2 as require_minimatch,
|
|
56
|
+
resolveProjectCwd
|
|
57
|
+
} from "../../chunks/chunk-AQLVWVEN.js";
|
|
58
|
+
import {
|
|
59
|
+
TelemetryClient
|
|
60
|
+
} from "../../chunks/chunk-P4I4DMEU.js";
|
|
61
|
+
import {
|
|
62
|
+
stamp_default
|
|
63
|
+
} from "../../chunks/chunk-SOTR4CXR.js";
|
|
64
|
+
import "../../chunks/chunk-LWBSOTJP.js";
|
|
65
|
+
import "../../chunks/chunk-7EHTK7LP.js";
|
|
66
|
+
import "../../chunks/chunk-GGP5R3FU.js";
|
|
67
|
+
import {
|
|
68
|
+
CantParseJSONFile,
|
|
69
|
+
cmd,
|
|
70
|
+
getCommandName,
|
|
71
|
+
getFlagsSpecification,
|
|
72
|
+
packageName,
|
|
73
|
+
parseArguments,
|
|
74
|
+
printError,
|
|
75
|
+
require_lib as require_lib2,
|
|
76
|
+
toEnumerableError
|
|
77
|
+
} from "../../chunks/chunk-ZLCMHY2G.js";
|
|
78
|
+
import {
|
|
79
|
+
init_pkg,
|
|
80
|
+
pkg_default
|
|
81
|
+
} from "../../chunks/chunk-3XFFP2BA.js";
|
|
82
|
+
import {
|
|
83
|
+
emoji,
|
|
84
|
+
output_manager_default,
|
|
85
|
+
prependEmoji
|
|
86
|
+
} from "../../chunks/chunk-FDJURQMQ.js";
|
|
87
|
+
import {
|
|
88
|
+
require_source
|
|
89
|
+
} from "../../chunks/chunk-S7KYDPEM.js";
|
|
90
|
+
import {
|
|
91
|
+
__toESM
|
|
92
|
+
} from "../../chunks/chunk-TZ2YI2VH.js";
|
|
93
|
+
|
|
94
|
+
// src/commands/build/index.ts
|
|
95
|
+
var import_chalk = __toESM(require_source(), 1);
|
|
96
|
+
var import_dotenv = __toESM(require_main(), 1);
|
|
97
|
+
var import_fs_extra2 = __toESM(require_lib(), 1);
|
|
98
|
+
var import_minimatch = __toESM(require_minimatch(), 1);
|
|
99
|
+
var import_semver = __toESM(require_semver(), 1);
|
|
100
|
+
var import_client = __toESM(require_dist(), 1);
|
|
101
|
+
var import_frameworks2 = __toESM(require_frameworks(), 1);
|
|
102
|
+
var import_fs_detectors2 = __toESM(require_dist3(), 1);
|
|
103
|
+
var import_routing_utils2 = __toESM(require_dist2(), 1);
|
|
104
|
+
import { dirname, join as join2, normalize, relative as relative2, resolve, sep } from "path";
|
|
105
|
+
import { readdirSync, statSync } from "fs";
|
|
106
|
+
import {
|
|
107
|
+
download,
|
|
108
|
+
FileFsRef,
|
|
109
|
+
getDiscontinuedNodeVersions,
|
|
110
|
+
getInstalledPackageVersion,
|
|
111
|
+
getServiceUrlEnvVars,
|
|
112
|
+
normalizePath,
|
|
113
|
+
NowBuildError as NowBuildError2,
|
|
114
|
+
runNpmInstall,
|
|
115
|
+
runCustomInstallCommand,
|
|
116
|
+
resetCustomInstallCommandSet,
|
|
117
|
+
Span,
|
|
118
|
+
validateNpmrc,
|
|
119
|
+
glob,
|
|
120
|
+
isBackendBuilder
|
|
121
|
+
} from "@vercel/build-utils";
|
|
122
|
+
|
|
123
|
+
// src/util/build/corepack.ts
|
|
124
|
+
var import_fs_extra = __toESM(require_lib(), 1);
|
|
125
|
+
import { delimiter, join } from "path";
|
|
126
|
+
import { spawnAsync } from "@vercel/build-utils";
|
|
127
|
+
async function initCorepack({
|
|
128
|
+
repoRootPath
|
|
129
|
+
}) {
|
|
130
|
+
if (process.env.ENABLE_EXPERIMENTAL_COREPACK !== "1") {
|
|
131
|
+
return null;
|
|
132
|
+
}
|
|
133
|
+
const pkg = await readJSONFile(
|
|
134
|
+
join(repoRootPath, "package.json")
|
|
135
|
+
);
|
|
136
|
+
if (pkg instanceof CantParseJSONFile) {
|
|
137
|
+
output_manager_default.warn(
|
|
138
|
+
"Warning: Could not enable corepack because package.json is invalid JSON",
|
|
139
|
+
pkg.meta.parseErrorLocation
|
|
140
|
+
);
|
|
141
|
+
} else if (!pkg?.packageManager) {
|
|
142
|
+
output_manager_default.warn(
|
|
143
|
+
'Warning: Could not enable corepack because package.json is missing "packageManager" property'
|
|
144
|
+
);
|
|
145
|
+
} else {
|
|
146
|
+
output_manager_default.log(
|
|
147
|
+
`Detected ENABLE_EXPERIMENTAL_COREPACK=1 and "${pkg.packageManager}" in package.json`
|
|
148
|
+
);
|
|
149
|
+
const corepackRootDir = join(repoRootPath, VERCEL_DIR, "cache", "corepack");
|
|
150
|
+
const corepackHomeDir = join(corepackRootDir, "home");
|
|
151
|
+
const corepackShimDir = join(corepackRootDir, "shim");
|
|
152
|
+
await import_fs_extra.default.mkdirp(corepackHomeDir);
|
|
153
|
+
await import_fs_extra.default.mkdirp(corepackShimDir);
|
|
154
|
+
process.env.COREPACK_HOME = corepackHomeDir;
|
|
155
|
+
process.env.PATH = `${corepackShimDir}${delimiter}${process.env.PATH}`;
|
|
156
|
+
const pkgManagerName = pkg.packageManager.split("@")[0];
|
|
157
|
+
await spawnAsync(
|
|
158
|
+
"corepack",
|
|
159
|
+
["enable", pkgManagerName, "--install-directory", corepackShimDir],
|
|
160
|
+
{
|
|
161
|
+
prettyCommand: `corepack enable ${pkgManagerName}`
|
|
162
|
+
}
|
|
163
|
+
);
|
|
164
|
+
return corepackShimDir;
|
|
165
|
+
}
|
|
166
|
+
return null;
|
|
167
|
+
}
|
|
168
|
+
function cleanupCorepack(corepackShimDir) {
|
|
169
|
+
if (process.env.COREPACK_HOME) {
|
|
170
|
+
delete process.env.COREPACK_HOME;
|
|
171
|
+
}
|
|
172
|
+
if (process.env.PATH) {
|
|
173
|
+
process.env.PATH = process.env.PATH.replace(
|
|
174
|
+
`${corepackShimDir}${delimiter}`,
|
|
175
|
+
""
|
|
176
|
+
);
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
// src/util/build/monorepo.ts
|
|
181
|
+
var import_fs_detectors = __toESM(require_dist3(), 1);
|
|
182
|
+
var import_title = __toESM(require_lib2(), 1);
|
|
183
|
+
import { relative, basename } from "path";
|
|
184
|
+
import { debug } from "@vercel/build-utils";
|
|
185
|
+
async function setMonorepoDefaultSettings(cwd, workPath, projectSettings) {
|
|
186
|
+
const localFileSystem = new import_fs_detectors.LocalFileSystemDetector(cwd);
|
|
187
|
+
const projectName = basename(workPath);
|
|
188
|
+
const relativeToRoot = relative(workPath, cwd);
|
|
189
|
+
const setCommand = (command, value) => {
|
|
190
|
+
if (projectSettings[command]) {
|
|
191
|
+
debug(
|
|
192
|
+
`Skipping auto-assignment of ${command} as it is already set via project settings or configuration overrides.`
|
|
193
|
+
);
|
|
194
|
+
} else {
|
|
195
|
+
projectSettings[command] = value;
|
|
196
|
+
}
|
|
197
|
+
};
|
|
198
|
+
try {
|
|
199
|
+
const result = await (0, import_fs_detectors.getMonorepoDefaultSettings)(
|
|
200
|
+
projectName,
|
|
201
|
+
relative(cwd, workPath),
|
|
202
|
+
relativeToRoot,
|
|
203
|
+
localFileSystem
|
|
204
|
+
);
|
|
205
|
+
if (result === null) {
|
|
206
|
+
return;
|
|
207
|
+
}
|
|
208
|
+
projectSettings.monorepoManager = result.monorepoManager;
|
|
209
|
+
const { monorepoManager, ...commands } = result;
|
|
210
|
+
output_manager_default.log(
|
|
211
|
+
`Detected ${(0, import_title.default)(monorepoManager)}. Adjusting default settings...`
|
|
212
|
+
);
|
|
213
|
+
if (commands.buildCommand) {
|
|
214
|
+
setCommand("buildCommand", commands.buildCommand);
|
|
215
|
+
}
|
|
216
|
+
if (commands.installCommand) {
|
|
217
|
+
setCommand("installCommand", commands.installCommand);
|
|
218
|
+
}
|
|
219
|
+
if (commands.commandForIgnoringBuildStep) {
|
|
220
|
+
setCommand(
|
|
221
|
+
"commandForIgnoringBuildStep",
|
|
222
|
+
commands.commandForIgnoringBuildStep
|
|
223
|
+
);
|
|
224
|
+
}
|
|
225
|
+
} catch (error) {
|
|
226
|
+
if (error instanceof import_fs_detectors.MissingBuildPipeline || error instanceof import_fs_detectors.MissingBuildTarget) {
|
|
227
|
+
output_manager_default.warn(`${error.message} Skipping automatic setting assignment.`);
|
|
228
|
+
return;
|
|
229
|
+
}
|
|
230
|
+
throw error;
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
// src/util/build/scrub-argv.ts
|
|
235
|
+
function scrubArgv(argv) {
|
|
236
|
+
const clonedArgv = [...argv];
|
|
237
|
+
const tokenRE = /^(-[A-Za-z]*[bet]|--(?:build-env|env|token))(=.*)?$/;
|
|
238
|
+
for (let i = 0, len = clonedArgv.length; i < len; i++) {
|
|
239
|
+
const m = clonedArgv[i].match(tokenRE);
|
|
240
|
+
if (m?.[2]) {
|
|
241
|
+
clonedArgv[i] = `${m[1]}=REDACTED`;
|
|
242
|
+
} else if (m && i + 1 < len) {
|
|
243
|
+
clonedArgv[++i] = "REDACTED";
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
return clonedArgv;
|
|
247
|
+
}
|
|
248
|
+
|
|
249
|
+
// src/util/build/service-route-ownership.ts
|
|
250
|
+
var import_routing_utils = __toESM(require_dist2(), 1);
|
|
251
|
+
function isWebServiceWithPrefix(service) {
|
|
252
|
+
return service.type === "web" && typeof service.routePrefix === "string";
|
|
253
|
+
}
|
|
254
|
+
function getWebRoutePrefixes(services) {
|
|
255
|
+
const unique = /* @__PURE__ */ new Set();
|
|
256
|
+
for (const service of services) {
|
|
257
|
+
if (!isWebServiceWithPrefix(service))
|
|
258
|
+
continue;
|
|
259
|
+
unique.add((0, import_routing_utils.normalizeRoutePrefix)(service.routePrefix));
|
|
260
|
+
}
|
|
261
|
+
return Array.from(unique);
|
|
262
|
+
}
|
|
263
|
+
function scopeRoutesToServiceOwnership({
|
|
264
|
+
routes,
|
|
265
|
+
owner,
|
|
266
|
+
allServices
|
|
267
|
+
}) {
|
|
268
|
+
if (!isWebServiceWithPrefix(owner)) {
|
|
269
|
+
return routes;
|
|
270
|
+
}
|
|
271
|
+
const allWebPrefixes = getWebRoutePrefixes(allServices);
|
|
272
|
+
const ownershipGuard = (0, import_routing_utils.getOwnershipGuard)(owner.routePrefix, allWebPrefixes);
|
|
273
|
+
if (!ownershipGuard) {
|
|
274
|
+
return routes;
|
|
275
|
+
}
|
|
276
|
+
return routes.map((route) => {
|
|
277
|
+
if ("handle" in route || typeof route.src !== "string") {
|
|
278
|
+
return route;
|
|
279
|
+
}
|
|
280
|
+
return {
|
|
281
|
+
...route,
|
|
282
|
+
src: (0, import_routing_utils.scopeRouteSourceToOwnership)(route.src, ownershipGuard)
|
|
283
|
+
};
|
|
284
|
+
});
|
|
285
|
+
}
|
|
286
|
+
|
|
287
|
+
// src/util/build/sort-builders.ts
|
|
288
|
+
var import_frameworks = __toESM(require_frameworks(), 1);
|
|
289
|
+
function sortBuilders(builds) {
|
|
290
|
+
const frontendRuntimeSet = new Set(
|
|
291
|
+
import_frameworks.frameworkList.map((f) => f.useRuntime?.use || "@vercel/static-build")
|
|
292
|
+
);
|
|
293
|
+
frontendRuntimeSet.delete("@vercel/python");
|
|
294
|
+
frontendRuntimeSet.delete("@vercel/ruby");
|
|
295
|
+
frontendRuntimeSet.delete("@vercel/rust");
|
|
296
|
+
const toNumber = (build) => build.use === "@vercel/python" || build.use === "@vercel/ruby" || build.use === "@vercel/rust" ? 1 : frontendRuntimeSet.has(build.use) ? 0 : 2;
|
|
297
|
+
return builds.sort((build1, build2) => {
|
|
298
|
+
return toNumber(build1) - toNumber(build2);
|
|
299
|
+
});
|
|
300
|
+
}
|
|
301
|
+
|
|
302
|
+
// src/commands/build/index.ts
|
|
303
|
+
init_pkg();
|
|
304
|
+
|
|
305
|
+
// src/util/telemetry/commands/build/index.ts
|
|
306
|
+
var BuildTelemetryClient = class extends TelemetryClient {
|
|
307
|
+
trackCliOptionOutput(path) {
|
|
308
|
+
if (path) {
|
|
309
|
+
this.trackCliOption({
|
|
310
|
+
option: "output",
|
|
311
|
+
value: this.redactedValue
|
|
312
|
+
});
|
|
313
|
+
}
|
|
314
|
+
}
|
|
315
|
+
trackCliOptionTarget(option) {
|
|
316
|
+
if (option) {
|
|
317
|
+
this.trackCliOption({
|
|
318
|
+
option: "target",
|
|
319
|
+
value: this.redactedTargetName(option)
|
|
320
|
+
});
|
|
321
|
+
}
|
|
322
|
+
}
|
|
323
|
+
trackCliFlagProd(flag) {
|
|
324
|
+
if (flag) {
|
|
325
|
+
this.trackCliFlag("prod");
|
|
326
|
+
}
|
|
327
|
+
}
|
|
328
|
+
trackCliFlagYes(flag) {
|
|
329
|
+
if (flag) {
|
|
330
|
+
this.trackCliFlag("yes");
|
|
331
|
+
}
|
|
332
|
+
}
|
|
333
|
+
trackCliFlagStandalone(flag) {
|
|
334
|
+
if (flag) {
|
|
335
|
+
this.trackCliFlag("standalone");
|
|
336
|
+
}
|
|
337
|
+
}
|
|
338
|
+
};
|
|
339
|
+
|
|
340
|
+
// src/util/validate-cron-secret.ts
|
|
341
|
+
import { NowBuildError } from "@vercel/build-utils";
|
|
342
|
+
function validateCronSecret(cronSecret) {
|
|
343
|
+
if (!cronSecret) {
|
|
344
|
+
return null;
|
|
345
|
+
}
|
|
346
|
+
if (cronSecret !== cronSecret.trim()) {
|
|
347
|
+
return new NowBuildError({
|
|
348
|
+
code: "INVALID_CRON_SECRET",
|
|
349
|
+
message: "The `CRON_SECRET` environment variable contains leading or trailing whitespace, which is not allowed in HTTP header values.",
|
|
350
|
+
link: "https://vercel.link/securing-cron-jobs",
|
|
351
|
+
action: "Learn More"
|
|
352
|
+
});
|
|
353
|
+
}
|
|
354
|
+
const invalidChars = [];
|
|
355
|
+
for (let i = 0; i < cronSecret.length; i++) {
|
|
356
|
+
const code = cronSecret.charCodeAt(i);
|
|
357
|
+
const isValidChar = code === 9 || // HTAB
|
|
358
|
+
code >= 32 && code <= 126;
|
|
359
|
+
if (!isValidChar) {
|
|
360
|
+
invalidChars.push({
|
|
361
|
+
char: cronSecret[i],
|
|
362
|
+
index: i,
|
|
363
|
+
code
|
|
364
|
+
});
|
|
365
|
+
}
|
|
366
|
+
}
|
|
367
|
+
if (invalidChars.length > 0) {
|
|
368
|
+
const descriptions = invalidChars.slice(0, 3).map(({ code, index }) => {
|
|
369
|
+
if (code < 32) {
|
|
370
|
+
return `control character (0x${code.toString(16).padStart(2, "0")}) at position ${index}`;
|
|
371
|
+
} else if (code === 127) {
|
|
372
|
+
return `DEL character at position ${index}`;
|
|
373
|
+
} else {
|
|
374
|
+
return `non-ASCII character (0x${code.toString(16).padStart(2, "0")}) at position ${index}`;
|
|
375
|
+
}
|
|
376
|
+
});
|
|
377
|
+
const moreCount = invalidChars.length - 3;
|
|
378
|
+
const moreText = moreCount > 0 ? `, and ${moreCount} more` : "";
|
|
379
|
+
return new NowBuildError({
|
|
380
|
+
code: "INVALID_CRON_SECRET",
|
|
381
|
+
message: `The \`CRON_SECRET\` environment variable contains characters that are not valid in HTTP headers: ${descriptions.join(", ")}${moreText}. Only visible ASCII characters (letters, digits, symbols), spaces, and tabs are allowed.`,
|
|
382
|
+
link: "https://vercel.link/securing-cron-jobs",
|
|
383
|
+
action: "Learn More"
|
|
384
|
+
});
|
|
385
|
+
}
|
|
386
|
+
return null;
|
|
387
|
+
}
|
|
388
|
+
|
|
389
|
+
// src/commands/build/index.ts
|
|
390
|
+
import { mkdir, writeFile } from "fs/promises";
|
|
391
|
+
var InMemoryReporter = class {
|
|
392
|
+
constructor() {
|
|
393
|
+
this.events = [];
|
|
394
|
+
}
|
|
395
|
+
report(event) {
|
|
396
|
+
this.events.push(event);
|
|
397
|
+
}
|
|
398
|
+
};
|
|
399
|
+
async function main(client) {
|
|
400
|
+
const telemetryClient = new BuildTelemetryClient({
|
|
401
|
+
opts: {
|
|
402
|
+
store: client.telemetryEventStore
|
|
403
|
+
}
|
|
404
|
+
});
|
|
405
|
+
const reporter = new InMemoryReporter();
|
|
406
|
+
const rootSpan = new Span({ name: "vc", reporter });
|
|
407
|
+
let { cwd } = client;
|
|
408
|
+
cwd = await resolveProjectCwd(cwd);
|
|
409
|
+
if (process.env.__VERCEL_BUILD_RUNNING) {
|
|
410
|
+
output_manager_default.error(
|
|
411
|
+
`${cmd(
|
|
412
|
+
`${packageName} build`
|
|
413
|
+
)} must not recursively invoke itself. Check the Build Command in the Project Settings or the ${cmd(
|
|
414
|
+
"build"
|
|
415
|
+
)} script in ${cmd("package.json")}`
|
|
416
|
+
);
|
|
417
|
+
output_manager_default.error(
|
|
418
|
+
`Learn More: https://vercel.link/recursive-invocation-of-commands`
|
|
419
|
+
);
|
|
420
|
+
return 1;
|
|
421
|
+
} else {
|
|
422
|
+
process.env.__VERCEL_BUILD_RUNNING = "1";
|
|
423
|
+
}
|
|
424
|
+
let parsedArgs = null;
|
|
425
|
+
const flagsSpecification = getFlagsSpecification(buildCommand.options);
|
|
426
|
+
try {
|
|
427
|
+
parsedArgs = parseArguments(client.argv.slice(2), flagsSpecification);
|
|
428
|
+
telemetryClient.trackCliOptionOutput(parsedArgs.flags["--output"]);
|
|
429
|
+
telemetryClient.trackCliOptionTarget(parsedArgs.flags["--target"]);
|
|
430
|
+
telemetryClient.trackCliFlagProd(parsedArgs.flags["--prod"]);
|
|
431
|
+
telemetryClient.trackCliFlagYes(parsedArgs.flags["--yes"]);
|
|
432
|
+
telemetryClient.trackCliFlagStandalone(parsedArgs.flags["--standalone"]);
|
|
433
|
+
} catch (error) {
|
|
434
|
+
printError(error);
|
|
435
|
+
return 1;
|
|
436
|
+
}
|
|
437
|
+
if (parsedArgs.flags["--help"]) {
|
|
438
|
+
telemetryClient.trackCliFlagHelp("build");
|
|
439
|
+
output_manager_default.print(help(buildCommand, { columns: client.stderr.columns }));
|
|
440
|
+
return 2;
|
|
441
|
+
}
|
|
442
|
+
const target = parseTarget({
|
|
443
|
+
flagName: "target",
|
|
444
|
+
flags: parsedArgs.flags
|
|
445
|
+
}) || "preview";
|
|
446
|
+
const yes = Boolean(parsedArgs.flags["--yes"]);
|
|
447
|
+
const hasDeprecatedEnvVar = process.env.VERCEL_EXPERIMENTAL_STANDALONE_BUILD === "1";
|
|
448
|
+
if (hasDeprecatedEnvVar) {
|
|
449
|
+
output_manager_default.warn(
|
|
450
|
+
"The VERCEL_EXPERIMENTAL_STANDALONE_BUILD environment variable is deprecated. Please use the --standalone flag instead."
|
|
451
|
+
);
|
|
452
|
+
}
|
|
453
|
+
const standalone = Boolean(
|
|
454
|
+
parsedArgs.flags["--standalone"] || hasDeprecatedEnvVar
|
|
455
|
+
);
|
|
456
|
+
try {
|
|
457
|
+
await validateNpmrc(cwd);
|
|
458
|
+
} catch (err) {
|
|
459
|
+
output_manager_default.prettyError(err);
|
|
460
|
+
return 1;
|
|
461
|
+
}
|
|
462
|
+
const link = await getProjectLink(client, cwd);
|
|
463
|
+
const projectRootDirectory = link?.projectRootDirectory ?? "";
|
|
464
|
+
if (link?.repoRoot) {
|
|
465
|
+
cwd = client.cwd = link.repoRoot;
|
|
466
|
+
}
|
|
467
|
+
const vercelDir = join2(cwd, projectRootDirectory, VERCEL_DIR);
|
|
468
|
+
let project = await readProjectSettings(vercelDir);
|
|
469
|
+
const isTTY = process.stdin.isTTY;
|
|
470
|
+
while (!project?.settings) {
|
|
471
|
+
let confirmed = yes;
|
|
472
|
+
if (!confirmed) {
|
|
473
|
+
if (!isTTY) {
|
|
474
|
+
output_manager_default.print(
|
|
475
|
+
`No Project Settings found locally. Run ${getCommandName(
|
|
476
|
+
"pull --yes"
|
|
477
|
+
)} to retrieve them. In non-interactive mode, set VERCEL_TOKEN for authentication.`
|
|
478
|
+
);
|
|
479
|
+
return 1;
|
|
480
|
+
}
|
|
481
|
+
confirmed = await client.input.confirm(
|
|
482
|
+
`No Project Settings found locally. Run ${getCommandName(
|
|
483
|
+
"pull"
|
|
484
|
+
)} for retrieving them?`,
|
|
485
|
+
true
|
|
486
|
+
);
|
|
487
|
+
}
|
|
488
|
+
if (!confirmed) {
|
|
489
|
+
output_manager_default.print(`Canceled. No Project Settings retrieved.
|
|
490
|
+
`);
|
|
491
|
+
return 0;
|
|
492
|
+
}
|
|
493
|
+
const { argv: originalArgv } = client;
|
|
494
|
+
client.cwd = join2(cwd, projectRootDirectory);
|
|
495
|
+
client.argv = [
|
|
496
|
+
...originalArgv.slice(0, 2),
|
|
497
|
+
"pull",
|
|
498
|
+
`--environment`,
|
|
499
|
+
target
|
|
500
|
+
];
|
|
501
|
+
const result = await pullCommandLogic(
|
|
502
|
+
client,
|
|
503
|
+
client.cwd,
|
|
504
|
+
Boolean(parsedArgs.flags["--yes"]),
|
|
505
|
+
target,
|
|
506
|
+
parsedArgs.flags
|
|
507
|
+
);
|
|
508
|
+
if (result !== 0) {
|
|
509
|
+
return result;
|
|
510
|
+
}
|
|
511
|
+
client.cwd = cwd;
|
|
512
|
+
client.argv = originalArgv;
|
|
513
|
+
project = await readProjectSettings(vercelDir);
|
|
514
|
+
}
|
|
515
|
+
const defaultOutputDir = join2(cwd, projectRootDirectory, OUTPUT_DIR);
|
|
516
|
+
const outputDir = parsedArgs.flags["--output"] ? resolve(parsedArgs.flags["--output"]) : defaultOutputDir;
|
|
517
|
+
await Promise.all([
|
|
518
|
+
import_fs_extra2.default.remove(outputDir),
|
|
519
|
+
// Also delete `.vercel/output`, in case the script is targeting Build Output API directly
|
|
520
|
+
outputDir !== defaultOutputDir ? import_fs_extra2.default.remove(defaultOutputDir) : void 0
|
|
521
|
+
]);
|
|
522
|
+
const buildsJson = {
|
|
523
|
+
"//": "This file was generated by the `vercel build` command. It is not part of the Build Output API.",
|
|
524
|
+
target,
|
|
525
|
+
argv: scrubArgv(process.argv),
|
|
526
|
+
cliVersion: pkg_default.version
|
|
527
|
+
};
|
|
528
|
+
if (!process.env.VERCEL_BUILD_IMAGE) {
|
|
529
|
+
output_manager_default.warn(
|
|
530
|
+
"Build not running on Vercel. System environment variables will not be available."
|
|
531
|
+
);
|
|
532
|
+
}
|
|
533
|
+
const envToUnset = /* @__PURE__ */ new Set(["VERCEL", "NOW_BUILDER"]);
|
|
534
|
+
try {
|
|
535
|
+
const envPath = join2(
|
|
536
|
+
cwd,
|
|
537
|
+
projectRootDirectory,
|
|
538
|
+
VERCEL_DIR,
|
|
539
|
+
`.env.${target}.local`
|
|
540
|
+
);
|
|
541
|
+
const dotenvResult = import_dotenv.default.config({
|
|
542
|
+
path: envPath,
|
|
543
|
+
debug: output_manager_default.isDebugEnabled()
|
|
544
|
+
});
|
|
545
|
+
if (dotenvResult.error) {
|
|
546
|
+
output_manager_default.debug(
|
|
547
|
+
`Failed loading environment variables: ${dotenvResult.error}`
|
|
548
|
+
);
|
|
549
|
+
} else if (dotenvResult.parsed) {
|
|
550
|
+
for (const key of Object.keys(dotenvResult.parsed)) {
|
|
551
|
+
envToUnset.add(key);
|
|
552
|
+
}
|
|
553
|
+
output_manager_default.debug(`Loaded environment variables from "${envPath}"`);
|
|
554
|
+
}
|
|
555
|
+
if (project.settings.analyticsId) {
|
|
556
|
+
envToUnset.add("VERCEL_ANALYTICS_ID");
|
|
557
|
+
process.env.VERCEL_ANALYTICS_ID = project.settings.analyticsId;
|
|
558
|
+
}
|
|
559
|
+
process.env.VERCEL = "1";
|
|
560
|
+
process.env.NOW_BUILDER = "1";
|
|
561
|
+
try {
|
|
562
|
+
await rootSpan.child("vc.doBuild").trace(
|
|
563
|
+
(span) => doBuild(client, project, buildsJson, cwd, outputDir, span, standalone)
|
|
564
|
+
);
|
|
565
|
+
} finally {
|
|
566
|
+
await rootSpan.stop();
|
|
567
|
+
}
|
|
568
|
+
return 0;
|
|
569
|
+
} catch (err) {
|
|
570
|
+
output_manager_default.prettyError(err);
|
|
571
|
+
buildsJson.error = toEnumerableError(err);
|
|
572
|
+
const buildsJsonPath = join2(outputDir, "builds.json");
|
|
573
|
+
const configJsonPath = join2(outputDir, "config.json");
|
|
574
|
+
await import_fs_extra2.default.outputJSON(buildsJsonPath, buildsJson, {
|
|
575
|
+
spaces: 2
|
|
576
|
+
});
|
|
577
|
+
await import_fs_extra2.default.writeJSON(configJsonPath, { version: 3 }, { spaces: 2 });
|
|
578
|
+
return 1;
|
|
579
|
+
} finally {
|
|
580
|
+
try {
|
|
581
|
+
const diagnosticsOutputPath = join2(outputDir, "diagnostics");
|
|
582
|
+
await mkdir(diagnosticsOutputPath, { recursive: true });
|
|
583
|
+
await writeFile(
|
|
584
|
+
join2(diagnosticsOutputPath, "cli_traces.json"),
|
|
585
|
+
JSON.stringify(reporter.events)
|
|
586
|
+
);
|
|
587
|
+
} catch (err) {
|
|
588
|
+
output_manager_default.error("Failed to write diagnostics trace file");
|
|
589
|
+
output_manager_default.prettyError(err);
|
|
590
|
+
}
|
|
591
|
+
for (const key of envToUnset) {
|
|
592
|
+
delete process.env[key];
|
|
593
|
+
}
|
|
594
|
+
delete process.env.VERCEL_INSTALL_COMPLETED;
|
|
595
|
+
resetCustomInstallCommandSet();
|
|
596
|
+
}
|
|
597
|
+
}
|
|
598
|
+
async function doBuild(client, project, buildsJson, cwd, outputDir, span, standalone = false) {
|
|
599
|
+
const { localConfigPath } = client;
|
|
600
|
+
const VALID_DEPLOYMENT_ID_PATTERN = /^[a-zA-Z0-9_-]+$/;
|
|
601
|
+
const workPath = join2(cwd, project.settings.rootDirectory || ".");
|
|
602
|
+
const sourceConfigFile = await findSourceVercelConfigFile(workPath);
|
|
603
|
+
let corepackShimDir;
|
|
604
|
+
if (sourceConfigFile) {
|
|
605
|
+
corepackShimDir = await initCorepack({ repoRootPath: cwd });
|
|
606
|
+
const installCommand = project.settings.installCommand;
|
|
607
|
+
if (typeof installCommand === "string") {
|
|
608
|
+
if (installCommand.trim()) {
|
|
609
|
+
output_manager_default.log(`Running install command before config compilation...`);
|
|
610
|
+
await runCustomInstallCommand({
|
|
611
|
+
destPath: workPath,
|
|
612
|
+
installCommand,
|
|
613
|
+
spawnOpts: { env: process.env },
|
|
614
|
+
projectCreatedAt: project.settings.createdAt
|
|
615
|
+
});
|
|
616
|
+
} else {
|
|
617
|
+
output_manager_default.debug("Skipping empty install command");
|
|
618
|
+
}
|
|
619
|
+
} else {
|
|
620
|
+
output_manager_default.log(`Installing dependencies before config compilation...`);
|
|
621
|
+
await runNpmInstall(
|
|
622
|
+
workPath,
|
|
623
|
+
[],
|
|
624
|
+
{ env: process.env },
|
|
625
|
+
void 0,
|
|
626
|
+
project.settings.createdAt
|
|
627
|
+
);
|
|
628
|
+
}
|
|
629
|
+
process.env.VERCEL_INSTALL_COMPLETED = "1";
|
|
630
|
+
}
|
|
631
|
+
const compileResult = await compileVercelConfig(workPath);
|
|
632
|
+
const vercelConfigPath = localConfigPath || compileResult.configPath || join2(workPath, "vercel.json");
|
|
633
|
+
const [pkg, vercelConfig, nowConfig, hasInstrumentation] = await Promise.all([
|
|
634
|
+
readJSONFile(join2(workPath, "package.json")),
|
|
635
|
+
readJSONFile(vercelConfigPath),
|
|
636
|
+
readJSONFile(join2(workPath, "now.json")),
|
|
637
|
+
(0, import_fs_detectors2.detectInstrumentation)(new import_fs_detectors2.LocalFileSystemDetector(workPath))
|
|
638
|
+
]);
|
|
639
|
+
if (pkg instanceof CantParseJSONFile)
|
|
640
|
+
throw pkg;
|
|
641
|
+
if (vercelConfig instanceof CantParseJSONFile)
|
|
642
|
+
throw vercelConfig;
|
|
643
|
+
if (nowConfig instanceof CantParseJSONFile)
|
|
644
|
+
throw nowConfig;
|
|
645
|
+
if (hasInstrumentation) {
|
|
646
|
+
output_manager_default.debug(
|
|
647
|
+
"OpenTelemetry instrumentation detected. Automatic fetch instrumentation will be disabled."
|
|
648
|
+
);
|
|
649
|
+
process.env.VERCEL_TRACING_DISABLE_AUTOMATIC_FETCH_INSTRUMENTATION = "1";
|
|
650
|
+
}
|
|
651
|
+
if (vercelConfig) {
|
|
652
|
+
vercelConfig[import_client.fileNameSymbol] = compileResult.wasCompiled ? compileResult.sourceFile || DEFAULT_VERCEL_CONFIG_FILENAME : "vercel.json";
|
|
653
|
+
} else if (nowConfig) {
|
|
654
|
+
nowConfig[import_client.fileNameSymbol] = "now.json";
|
|
655
|
+
}
|
|
656
|
+
const localConfig = vercelConfig || nowConfig || {};
|
|
657
|
+
const validateError = validateConfig(localConfig);
|
|
658
|
+
if (validateError) {
|
|
659
|
+
throw validateError;
|
|
660
|
+
}
|
|
661
|
+
if (localConfig.crons && localConfig.crons.length > 0) {
|
|
662
|
+
const cronSecretError = validateCronSecret(process.env.CRON_SECRET);
|
|
663
|
+
if (cronSecretError) {
|
|
664
|
+
throw cronSecretError;
|
|
665
|
+
}
|
|
666
|
+
}
|
|
667
|
+
const projectSettings = {
|
|
668
|
+
...project.settings,
|
|
669
|
+
...pickOverrides(localConfig)
|
|
670
|
+
};
|
|
671
|
+
if (process.env.VERCEL_BUILD_MONOREPO_SUPPORT === "1" && pkg?.scripts?.["vercel-build"] === void 0 && projectSettings.rootDirectory !== null && projectSettings.rootDirectory !== ".") {
|
|
672
|
+
await setMonorepoDefaultSettings(cwd, workPath, projectSettings);
|
|
673
|
+
}
|
|
674
|
+
if (process.env.VERCEL_EXPERIMENTAL_EMBED_FLAG_DEFINITIONS === "1") {
|
|
675
|
+
const { emitFlagsDatafiles } = await import("../../chunks/emit-flags-datafiles-QYKPNWPX.js");
|
|
676
|
+
await emitFlagsDatafiles(cwd, process.env);
|
|
677
|
+
}
|
|
678
|
+
const files = (await staticFiles(workPath, {})).map(
|
|
679
|
+
(f) => normalizePath(relative2(workPath, f))
|
|
680
|
+
);
|
|
681
|
+
const routesResult = (0, import_routing_utils2.getTransformedRoutes)(localConfig);
|
|
682
|
+
if (routesResult.error) {
|
|
683
|
+
throw routesResult.error;
|
|
684
|
+
}
|
|
685
|
+
if (localConfig.builds && localConfig.functions) {
|
|
686
|
+
throw new NowBuildError2({
|
|
687
|
+
code: "bad_request",
|
|
688
|
+
message: "The `functions` property cannot be used in conjunction with the `builds` property. Please remove one of them.",
|
|
689
|
+
link: "https://vercel.link/functions-and-builds"
|
|
690
|
+
});
|
|
691
|
+
}
|
|
692
|
+
let builds = localConfig.builds || [];
|
|
693
|
+
let zeroConfigRoutes = [];
|
|
694
|
+
let detectedServices;
|
|
695
|
+
let isZeroConfig = false;
|
|
696
|
+
if (builds.length > 0) {
|
|
697
|
+
output_manager_default.warn(
|
|
698
|
+
"Due to `builds` existing in your configuration file, the Build and Development Settings defined in your Project Settings will not apply. Learn More: https://vercel.link/unused-build-settings"
|
|
699
|
+
);
|
|
700
|
+
builds = builds.flatMap((b) => expandBuild(files, b));
|
|
701
|
+
} else {
|
|
702
|
+
isZeroConfig = true;
|
|
703
|
+
const detectedBuilders = await (0, import_fs_detectors2.detectBuilders)(files, pkg, {
|
|
704
|
+
...localConfig,
|
|
705
|
+
projectSettings,
|
|
706
|
+
ignoreBuildScript: true,
|
|
707
|
+
featHandleMiss: true,
|
|
708
|
+
workPath
|
|
709
|
+
});
|
|
710
|
+
if (detectedBuilders.errors && detectedBuilders.errors.length > 0) {
|
|
711
|
+
throw detectedBuilders.errors[0];
|
|
712
|
+
}
|
|
713
|
+
for (const w of detectedBuilders.warnings) {
|
|
714
|
+
output_manager_default.warn(w.message, null, w.link, w.action || "Learn More");
|
|
715
|
+
}
|
|
716
|
+
if (detectedBuilders.builders) {
|
|
717
|
+
builds = detectedBuilders.builders;
|
|
718
|
+
} else {
|
|
719
|
+
builds = [{ src: "**", use: "@vercel/static" }];
|
|
720
|
+
}
|
|
721
|
+
detectedServices = detectedBuilders.services;
|
|
722
|
+
if (detectedServices && detectedServices.length > 0) {
|
|
723
|
+
const serviceUrlEnvVars = getServiceUrlEnvVars({
|
|
724
|
+
services: detectedServices,
|
|
725
|
+
frameworkList: import_frameworks2.frameworkList,
|
|
726
|
+
currentEnv: process.env,
|
|
727
|
+
deploymentUrl: process.env.VERCEL_URL
|
|
728
|
+
});
|
|
729
|
+
for (const [key, value] of Object.entries(serviceUrlEnvVars)) {
|
|
730
|
+
process.env[key] = value;
|
|
731
|
+
output_manager_default.debug(`Injected service URL env var: ${key}=${value}`);
|
|
732
|
+
}
|
|
733
|
+
}
|
|
734
|
+
zeroConfigRoutes.push(...detectedBuilders.redirectRoutes || []);
|
|
735
|
+
const detectedHostRewriteRoutes = detectedBuilders.hostRewriteRoutes;
|
|
736
|
+
zeroConfigRoutes = (0, import_routing_utils2.appendRoutesToPhase)({
|
|
737
|
+
routes: zeroConfigRoutes,
|
|
738
|
+
newRoutes: detectedHostRewriteRoutes ?? null,
|
|
739
|
+
phase: null
|
|
740
|
+
});
|
|
741
|
+
zeroConfigRoutes.push(
|
|
742
|
+
...(0, import_routing_utils2.appendRoutesToPhase)({
|
|
743
|
+
routes: [],
|
|
744
|
+
newRoutes: detectedBuilders.rewriteRoutes,
|
|
745
|
+
phase: "filesystem"
|
|
746
|
+
})
|
|
747
|
+
);
|
|
748
|
+
zeroConfigRoutes = (0, import_routing_utils2.appendRoutesToPhase)({
|
|
749
|
+
routes: zeroConfigRoutes,
|
|
750
|
+
newRoutes: detectedBuilders.errorRoutes,
|
|
751
|
+
phase: "error"
|
|
752
|
+
});
|
|
753
|
+
zeroConfigRoutes.push(...detectedBuilders.defaultRoutes || []);
|
|
754
|
+
}
|
|
755
|
+
const builderSpecs = new Set(builds.map((b) => b.use));
|
|
756
|
+
const buildersWithPkgs = await importBuilders(builderSpecs, cwd, span);
|
|
757
|
+
const filesMap = {};
|
|
758
|
+
for (const path of files) {
|
|
759
|
+
const fsPath = join2(workPath, path);
|
|
760
|
+
const { mode } = await import_fs_extra2.default.stat(fsPath);
|
|
761
|
+
filesMap[path] = new FileFsRef({ mode, fsPath });
|
|
762
|
+
}
|
|
763
|
+
const buildStamp = stamp_default();
|
|
764
|
+
await import_fs_extra2.default.mkdirp(outputDir);
|
|
765
|
+
const ops = [];
|
|
766
|
+
const buildsJsonBuilds = new Map(
|
|
767
|
+
builds.map((build) => {
|
|
768
|
+
const builderWithPkg = buildersWithPkgs.get(build.use);
|
|
769
|
+
if (!builderWithPkg) {
|
|
770
|
+
throw new Error(`Failed to load Builder "${build.use}"`);
|
|
771
|
+
}
|
|
772
|
+
const { builder, pkg: builderPkg } = builderWithPkg;
|
|
773
|
+
return [
|
|
774
|
+
build,
|
|
775
|
+
{
|
|
776
|
+
require: builderPkg.name,
|
|
777
|
+
requirePath: builderWithPkg.path,
|
|
778
|
+
apiVersion: builder.version,
|
|
779
|
+
...build
|
|
780
|
+
}
|
|
781
|
+
];
|
|
782
|
+
})
|
|
783
|
+
);
|
|
784
|
+
buildsJson.builds = Array.from(buildsJsonBuilds.values());
|
|
785
|
+
await writeBuildJson(buildsJson, outputDir);
|
|
786
|
+
const meta = {
|
|
787
|
+
skipDownload: true,
|
|
788
|
+
cliVersion: pkg_default.version
|
|
789
|
+
};
|
|
790
|
+
const sortedBuilders = sortBuilders(builds);
|
|
791
|
+
const buildResults = /* @__PURE__ */ new Map();
|
|
792
|
+
const overrides = [];
|
|
793
|
+
const repoRootPath = cwd;
|
|
794
|
+
if (!corepackShimDir) {
|
|
795
|
+
corepackShimDir = await initCorepack({ repoRootPath });
|
|
796
|
+
}
|
|
797
|
+
const diagnostics = {};
|
|
798
|
+
const hasDetectedServices = detectedServices !== void 0 && detectedServices.length > 0;
|
|
799
|
+
const hasWorkerServices = hasDetectedServices && detectedServices.some((s) => s.type === "worker");
|
|
800
|
+
const servicesByBuilderSrc = /* @__PURE__ */ new Map();
|
|
801
|
+
if (hasDetectedServices) {
|
|
802
|
+
for (const service of detectedServices) {
|
|
803
|
+
if (service.builder.src) {
|
|
804
|
+
const existing = servicesByBuilderSrc.get(service.builder.src);
|
|
805
|
+
if (existing) {
|
|
806
|
+
throw new NowBuildError2({
|
|
807
|
+
code: "DUPLICATE_SERVICE_BUILDER_SRC",
|
|
808
|
+
message: `Services "${existing.name}" and "${service.name}" both have the same builder source "${service.builder.src}". Each service must have a unique builder source.`
|
|
809
|
+
});
|
|
810
|
+
}
|
|
811
|
+
servicesByBuilderSrc.set(service.builder.src, service);
|
|
812
|
+
}
|
|
813
|
+
}
|
|
814
|
+
}
|
|
815
|
+
for (const build of sortedBuilders) {
|
|
816
|
+
if (typeof build.src !== "string")
|
|
817
|
+
continue;
|
|
818
|
+
const builderWithPkg = buildersWithPkgs.get(build.use);
|
|
819
|
+
if (!builderWithPkg) {
|
|
820
|
+
throw new Error(`Failed to load Builder "${build.use}"`);
|
|
821
|
+
}
|
|
822
|
+
try {
|
|
823
|
+
const { builder, pkg: builderPkg } = builderWithPkg;
|
|
824
|
+
const service = hasDetectedServices ? servicesByBuilderSrc.get(build.src) : void 0;
|
|
825
|
+
const stripServiceRoutePrefix = !!service?.routePrefix && service.routePrefix !== "/";
|
|
826
|
+
let buildWorkPath = workPath;
|
|
827
|
+
let buildEntrypoint = build.src;
|
|
828
|
+
let buildFiles = filesMap;
|
|
829
|
+
if (service && service.workspace !== ".") {
|
|
830
|
+
const wsPrefix = service.workspace + "/";
|
|
831
|
+
buildWorkPath = join2(workPath, service.workspace);
|
|
832
|
+
buildEntrypoint = build.src.startsWith(wsPrefix) ? build.src.slice(wsPrefix.length) : build.src;
|
|
833
|
+
buildFiles = {};
|
|
834
|
+
for (const [filePath, file] of Object.entries(filesMap)) {
|
|
835
|
+
if (filePath.startsWith(wsPrefix)) {
|
|
836
|
+
buildFiles[filePath.slice(wsPrefix.length)] = file;
|
|
837
|
+
}
|
|
838
|
+
}
|
|
839
|
+
output_manager_default.debug(
|
|
840
|
+
`Service "${service.name}": workspace-rooted build at "${buildWorkPath}", entrypoint "${buildEntrypoint}" (original: "${build.src}")`
|
|
841
|
+
);
|
|
842
|
+
}
|
|
843
|
+
const settingsForEnv = service ? {
|
|
844
|
+
buildCommand: service.buildCommand ?? void 0,
|
|
845
|
+
installCommand: service.installCommand ?? void 0,
|
|
846
|
+
outputDirectory: projectSettings.outputDirectory ?? void 0,
|
|
847
|
+
nodeVersion: projectSettings.nodeVersion ?? void 0
|
|
848
|
+
} : projectSettings;
|
|
849
|
+
for (const key of [
|
|
850
|
+
"buildCommand",
|
|
851
|
+
"installCommand",
|
|
852
|
+
"outputDirectory",
|
|
853
|
+
"nodeVersion"
|
|
854
|
+
]) {
|
|
855
|
+
const value = settingsForEnv[key];
|
|
856
|
+
const envKey = `VERCEL_PROJECT_SETTINGS_` + key.replace(/[A-Z]/g, (letter) => `_${letter}`).toUpperCase();
|
|
857
|
+
if (typeof value === "string") {
|
|
858
|
+
process.env[envKey] = value;
|
|
859
|
+
output_manager_default.debug(`Setting env ${envKey} to "${value}"`);
|
|
860
|
+
} else {
|
|
861
|
+
delete process.env[envKey];
|
|
862
|
+
}
|
|
863
|
+
}
|
|
864
|
+
const isFrontendBuilder = build.config && "framework" in build.config;
|
|
865
|
+
const builderFramework = build.config?.framework ?? projectSettings.framework;
|
|
866
|
+
let buildConfig;
|
|
867
|
+
if (isZeroConfig) {
|
|
868
|
+
if (service) {
|
|
869
|
+
buildConfig = {
|
|
870
|
+
...build.config,
|
|
871
|
+
...hasWorkerServices ? { hasWorkerServices: true } : void 0,
|
|
872
|
+
// Override project-level settings with service-specific ones.
|
|
873
|
+
// The project-level framework is "services" which must NOT be
|
|
874
|
+
// propagated to individual builders.
|
|
875
|
+
projectSettings: {
|
|
876
|
+
...projectSettings,
|
|
877
|
+
framework: service.framework ?? null,
|
|
878
|
+
buildCommand: service.buildCommand ?? null,
|
|
879
|
+
installCommand: service.installCommand ?? null
|
|
880
|
+
},
|
|
881
|
+
installCommand: service.installCommand ?? void 0,
|
|
882
|
+
buildCommand: service.buildCommand ?? void 0,
|
|
883
|
+
framework: builderFramework,
|
|
884
|
+
nodeVersion: projectSettings.nodeVersion,
|
|
885
|
+
bunVersion: localConfig.bunVersion ?? void 0
|
|
886
|
+
};
|
|
887
|
+
} else {
|
|
888
|
+
buildConfig = {
|
|
889
|
+
outputDirectory: projectSettings.outputDirectory ?? void 0,
|
|
890
|
+
...build.config,
|
|
891
|
+
projectSettings,
|
|
892
|
+
installCommand: projectSettings.installCommand ?? void 0,
|
|
893
|
+
devCommand: projectSettings.devCommand ?? void 0,
|
|
894
|
+
buildCommand: projectSettings.buildCommand ?? void 0,
|
|
895
|
+
framework: projectSettings.framework,
|
|
896
|
+
nodeVersion: projectSettings.nodeVersion,
|
|
897
|
+
bunVersion: localConfig.bunVersion ?? void 0
|
|
898
|
+
};
|
|
899
|
+
}
|
|
900
|
+
} else {
|
|
901
|
+
buildConfig = {
|
|
902
|
+
...build.config || {},
|
|
903
|
+
bunVersion: localConfig.bunVersion ?? void 0
|
|
904
|
+
};
|
|
905
|
+
}
|
|
906
|
+
const builderSpan = span.child("vc.builder", {
|
|
907
|
+
"builder.name": builderPkg.name,
|
|
908
|
+
"builder.version": builderPkg.version,
|
|
909
|
+
"builder.dynamicallyInstalled": String(
|
|
910
|
+
builderWithPkg.dynamicallyInstalled
|
|
911
|
+
)
|
|
912
|
+
});
|
|
913
|
+
const serviceRoutePrefix = build.config?.routePrefix;
|
|
914
|
+
const serviceWorkspace = build.config?.workspace;
|
|
915
|
+
const buildOptions = {
|
|
916
|
+
files: buildFiles,
|
|
917
|
+
entrypoint: buildEntrypoint,
|
|
918
|
+
workPath: buildWorkPath,
|
|
919
|
+
repoRootPath,
|
|
920
|
+
config: buildConfig,
|
|
921
|
+
meta,
|
|
922
|
+
span: builderSpan,
|
|
923
|
+
...typeof serviceRoutePrefix === "string" || typeof serviceWorkspace === "string" ? {
|
|
924
|
+
service: {
|
|
925
|
+
routePrefix: typeof serviceRoutePrefix === "string" ? serviceRoutePrefix : void 0,
|
|
926
|
+
workspace: typeof serviceWorkspace === "string" ? serviceWorkspace : void 0
|
|
927
|
+
}
|
|
928
|
+
} : void 0
|
|
929
|
+
};
|
|
930
|
+
output_manager_default.debug(
|
|
931
|
+
`Building entrypoint "${build.src}" with "${builderPkg.name}"`
|
|
932
|
+
);
|
|
933
|
+
let buildResult;
|
|
934
|
+
try {
|
|
935
|
+
buildResult = await builderSpan.trace(
|
|
936
|
+
async () => builder.build(buildOptions)
|
|
937
|
+
);
|
|
938
|
+
if (!hasDetectedServices && buildConfig.zeroConfig && isFrontendBuilder && "output" in buildResult && !buildResult.routes) {
|
|
939
|
+
const framework2 = import_frameworks2.frameworkList.find(
|
|
940
|
+
(f) => f.slug === buildConfig.framework
|
|
941
|
+
);
|
|
942
|
+
if (framework2) {
|
|
943
|
+
const defaultRoutes = await getFrameworkRoutes(
|
|
944
|
+
framework2,
|
|
945
|
+
buildWorkPath
|
|
946
|
+
);
|
|
947
|
+
buildResult.routes = defaultRoutes;
|
|
948
|
+
}
|
|
949
|
+
}
|
|
950
|
+
} finally {
|
|
951
|
+
try {
|
|
952
|
+
const builderDiagnostics = await builderSpan.child("vc.builder.diagnostics").trace(async () => {
|
|
953
|
+
return await builder.diagnostics?.(buildOptions);
|
|
954
|
+
});
|
|
955
|
+
Object.assign(diagnostics, builderDiagnostics);
|
|
956
|
+
} catch (error) {
|
|
957
|
+
output_manager_default.error("Collecting diagnostics failed");
|
|
958
|
+
output_manager_default.debug(error);
|
|
959
|
+
}
|
|
960
|
+
}
|
|
961
|
+
if (buildResult && "output" in buildResult && "runtime" in buildResult.output && "type" in buildResult.output && buildResult.output.type === "Lambda") {
|
|
962
|
+
const lambdaRuntime = buildResult.output.runtime;
|
|
963
|
+
if (getDiscontinuedNodeVersions().some((o) => o.runtime === lambdaRuntime)) {
|
|
964
|
+
throw new NowBuildError2({
|
|
965
|
+
code: "NODEJS_DISCONTINUED_VERSION",
|
|
966
|
+
message: `The Runtime "${build.use}" is using "${lambdaRuntime}", which is discontinued. Please upgrade your Runtime to a more recent version or consult the author for more details.`,
|
|
967
|
+
link: "https://vercel.link/function-runtimes"
|
|
968
|
+
});
|
|
969
|
+
}
|
|
970
|
+
}
|
|
971
|
+
if ("output" in buildResult && buildResult.output && (isBackendBuilder(build) || build.use === "@vercel/python")) {
|
|
972
|
+
const routesJsonPath = join2(buildWorkPath, ".vercel", "routes.json");
|
|
973
|
+
if ((0, import_fs_extra2.existsSync)(routesJsonPath)) {
|
|
974
|
+
try {
|
|
975
|
+
const routesJson = await readJSONFile(routesJsonPath);
|
|
976
|
+
if (routesJson && typeof routesJson === "object" && "routes" in routesJson && Array.isArray(routesJson.routes)) {
|
|
977
|
+
const indexLambda = "index" in buildResult.output ? buildResult.output["index"] : void 0;
|
|
978
|
+
const convertedRoutes = [];
|
|
979
|
+
const convertedOutputs = indexLambda ? { index: indexLambda } : {};
|
|
980
|
+
for (const route of routesJson.routes) {
|
|
981
|
+
if (typeof route.source !== "string") {
|
|
982
|
+
continue;
|
|
983
|
+
}
|
|
984
|
+
const { src } = (0, import_routing_utils2.sourceToRegex)(route.source);
|
|
985
|
+
const newRoute = {
|
|
986
|
+
src,
|
|
987
|
+
dest: route.source
|
|
988
|
+
};
|
|
989
|
+
if (route.methods) {
|
|
990
|
+
newRoute.methods = route.methods;
|
|
991
|
+
}
|
|
992
|
+
if (route.source === "/") {
|
|
993
|
+
continue;
|
|
994
|
+
}
|
|
995
|
+
if (indexLambda) {
|
|
996
|
+
convertedOutputs[route.source] = indexLambda;
|
|
997
|
+
}
|
|
998
|
+
convertedRoutes.push(newRoute);
|
|
999
|
+
}
|
|
1000
|
+
buildResult.routes = [
|
|
1001
|
+
{ handle: "filesystem" },
|
|
1002
|
+
...convertedRoutes,
|
|
1003
|
+
{ src: "/(.*)", dest: "/" }
|
|
1004
|
+
];
|
|
1005
|
+
if (indexLambda) {
|
|
1006
|
+
buildResult.output = convertedOutputs;
|
|
1007
|
+
}
|
|
1008
|
+
}
|
|
1009
|
+
} catch (error) {
|
|
1010
|
+
output_manager_default.error(`Failed to read routes.json: ${error}`);
|
|
1011
|
+
}
|
|
1012
|
+
}
|
|
1013
|
+
}
|
|
1014
|
+
if (hasDetectedServices && service && "routes" in buildResult && Array.isArray(buildResult.routes) && detectedServices) {
|
|
1015
|
+
buildResult.routes = scopeRoutesToServiceOwnership({
|
|
1016
|
+
routes: buildResult.routes,
|
|
1017
|
+
owner: service,
|
|
1018
|
+
allServices: detectedServices
|
|
1019
|
+
});
|
|
1020
|
+
}
|
|
1021
|
+
if (service?.type === "worker" && "output" in buildResult) {
|
|
1022
|
+
attachWorkerServiceTrigger(buildResult.output, service);
|
|
1023
|
+
}
|
|
1024
|
+
let mergedBuildResult = buildResult;
|
|
1025
|
+
if ("buildOutputPath" in buildResult) {
|
|
1026
|
+
const buildOutputConfigPath = join2(
|
|
1027
|
+
buildResult.buildOutputPath,
|
|
1028
|
+
"config.json"
|
|
1029
|
+
);
|
|
1030
|
+
const buildOutputConfig = await readJSONFile(
|
|
1031
|
+
buildOutputConfigPath
|
|
1032
|
+
);
|
|
1033
|
+
if (buildOutputConfig instanceof CantParseJSONFile) {
|
|
1034
|
+
throw buildOutputConfig;
|
|
1035
|
+
}
|
|
1036
|
+
if (buildOutputConfig) {
|
|
1037
|
+
if (buildOutputConfig.overrides) {
|
|
1038
|
+
overrides.push(buildOutputConfig.overrides);
|
|
1039
|
+
}
|
|
1040
|
+
if (hasDetectedServices && service && Array.isArray(buildOutputConfig.routes) && detectedServices) {
|
|
1041
|
+
buildOutputConfig.routes = scopeRoutesToServiceOwnership({
|
|
1042
|
+
routes: buildOutputConfig.routes,
|
|
1043
|
+
owner: service,
|
|
1044
|
+
allServices: detectedServices
|
|
1045
|
+
});
|
|
1046
|
+
}
|
|
1047
|
+
mergedBuildResult = buildOutputConfig;
|
|
1048
|
+
}
|
|
1049
|
+
}
|
|
1050
|
+
buildResults.set(build, mergedBuildResult);
|
|
1051
|
+
let buildOutputLength = 0;
|
|
1052
|
+
if ("output" in buildResult) {
|
|
1053
|
+
buildOutputLength = Array.isArray(buildResult.output) ? buildResult.output.length : 1;
|
|
1054
|
+
}
|
|
1055
|
+
ops.push(
|
|
1056
|
+
builderSpan.child("vc.builder.writeBuildResult", {
|
|
1057
|
+
buildOutputLength: String(buildOutputLength)
|
|
1058
|
+
}).trace(
|
|
1059
|
+
() => writeBuildResult({
|
|
1060
|
+
repoRootPath,
|
|
1061
|
+
outputDir,
|
|
1062
|
+
buildResult,
|
|
1063
|
+
build,
|
|
1064
|
+
builder,
|
|
1065
|
+
builderPkg,
|
|
1066
|
+
vercelConfig: localConfig,
|
|
1067
|
+
standalone,
|
|
1068
|
+
workPath: buildWorkPath,
|
|
1069
|
+
service,
|
|
1070
|
+
stripServiceRoutePrefix
|
|
1071
|
+
})
|
|
1072
|
+
).then(
|
|
1073
|
+
(override) => {
|
|
1074
|
+
if (override)
|
|
1075
|
+
overrides.push(override);
|
|
1076
|
+
},
|
|
1077
|
+
(err) => err
|
|
1078
|
+
)
|
|
1079
|
+
);
|
|
1080
|
+
} catch (err) {
|
|
1081
|
+
const buildJsonBuild = buildsJsonBuilds.get(build);
|
|
1082
|
+
if (buildJsonBuild) {
|
|
1083
|
+
buildJsonBuild.error = toEnumerableError(err);
|
|
1084
|
+
}
|
|
1085
|
+
throw err;
|
|
1086
|
+
} finally {
|
|
1087
|
+
ops.push(
|
|
1088
|
+
download(diagnostics, join2(outputDir, "diagnostics")).then(
|
|
1089
|
+
() => void 0,
|
|
1090
|
+
(err) => err
|
|
1091
|
+
)
|
|
1092
|
+
);
|
|
1093
|
+
}
|
|
1094
|
+
}
|
|
1095
|
+
if (corepackShimDir) {
|
|
1096
|
+
cleanupCorepack(corepackShimDir);
|
|
1097
|
+
}
|
|
1098
|
+
const errors = await Promise.all(ops);
|
|
1099
|
+
for (const error of errors) {
|
|
1100
|
+
if (error) {
|
|
1101
|
+
throw error;
|
|
1102
|
+
}
|
|
1103
|
+
}
|
|
1104
|
+
let needBuildsJsonOverride = false;
|
|
1105
|
+
const speedInsightsVersion = await getInstalledPackageVersion(
|
|
1106
|
+
"@vercel/speed-insights"
|
|
1107
|
+
);
|
|
1108
|
+
if (speedInsightsVersion) {
|
|
1109
|
+
buildsJson.features = {
|
|
1110
|
+
...buildsJson.features ?? {},
|
|
1111
|
+
speedInsightsVersion
|
|
1112
|
+
};
|
|
1113
|
+
needBuildsJsonOverride = true;
|
|
1114
|
+
}
|
|
1115
|
+
const webAnalyticsVersion = await getInstalledPackageVersion("@vercel/analytics");
|
|
1116
|
+
if (webAnalyticsVersion) {
|
|
1117
|
+
buildsJson.features = {
|
|
1118
|
+
...buildsJson.features ?? {},
|
|
1119
|
+
webAnalyticsVersion
|
|
1120
|
+
};
|
|
1121
|
+
needBuildsJsonOverride = true;
|
|
1122
|
+
}
|
|
1123
|
+
if (needBuildsJsonOverride) {
|
|
1124
|
+
await writeBuildJson(buildsJson, outputDir);
|
|
1125
|
+
}
|
|
1126
|
+
const configPath = join2(outputDir, "config.json");
|
|
1127
|
+
const existingConfig = await readJSONFile(configPath);
|
|
1128
|
+
if (existingConfig instanceof CantParseJSONFile) {
|
|
1129
|
+
throw existingConfig;
|
|
1130
|
+
}
|
|
1131
|
+
if (existingConfig) {
|
|
1132
|
+
if ("deploymentId" in existingConfig && typeof existingConfig.deploymentId === "string") {
|
|
1133
|
+
const deploymentId = existingConfig.deploymentId;
|
|
1134
|
+
if (deploymentId.length > 32) {
|
|
1135
|
+
throw new NowBuildError2({
|
|
1136
|
+
code: "INVALID_DEPLOYMENT_ID",
|
|
1137
|
+
message: `The deploymentId "${deploymentId}" must be 32 characters or less. Please choose a shorter deploymentId in your config.`,
|
|
1138
|
+
link: "https://vercel.com/docs/skew-protection#custom-skew-protection-deployment-id"
|
|
1139
|
+
});
|
|
1140
|
+
}
|
|
1141
|
+
if (!VALID_DEPLOYMENT_ID_PATTERN.test(deploymentId)) {
|
|
1142
|
+
throw new NowBuildError2({
|
|
1143
|
+
code: "INVALID_DEPLOYMENT_ID",
|
|
1144
|
+
message: `The deploymentId "${deploymentId}" contains invalid characters. Only alphanumeric characters (a-z, A-Z, 0-9), hyphens (-), and underscores (_) are allowed.`,
|
|
1145
|
+
link: "https://vercel.com/docs/skew-protection#custom-skew-protection-deployment-id"
|
|
1146
|
+
});
|
|
1147
|
+
}
|
|
1148
|
+
}
|
|
1149
|
+
if (existingConfig.overrides) {
|
|
1150
|
+
overrides.push(existingConfig.overrides);
|
|
1151
|
+
}
|
|
1152
|
+
}
|
|
1153
|
+
const builderRoutes = Array.from(
|
|
1154
|
+
buildResults.entries()
|
|
1155
|
+
).filter((b) => "routes" in b[1] && Array.isArray(b[1].routes)).map((b) => {
|
|
1156
|
+
const build = b[0];
|
|
1157
|
+
const buildResult = b[1];
|
|
1158
|
+
let entrypoint = build.src;
|
|
1159
|
+
if (hasDetectedServices && typeof build.src === "string") {
|
|
1160
|
+
const service = servicesByBuilderSrc.get(build.src);
|
|
1161
|
+
if (service && service.type === "web" && typeof service.routePrefix === "string") {
|
|
1162
|
+
entrypoint = getServicesMergeEntrypoint(service, build.src);
|
|
1163
|
+
}
|
|
1164
|
+
}
|
|
1165
|
+
return {
|
|
1166
|
+
use: build.use,
|
|
1167
|
+
entrypoint,
|
|
1168
|
+
routes: buildResult.routes
|
|
1169
|
+
};
|
|
1170
|
+
});
|
|
1171
|
+
if (zeroConfigRoutes.length) {
|
|
1172
|
+
builderRoutes.unshift({
|
|
1173
|
+
use: "@vercel/zero-config-routes",
|
|
1174
|
+
entrypoint: "/",
|
|
1175
|
+
routes: zeroConfigRoutes
|
|
1176
|
+
});
|
|
1177
|
+
}
|
|
1178
|
+
const mergedRoutes = (0, import_routing_utils2.mergeRoutes)({
|
|
1179
|
+
userRoutes: routesResult.routes,
|
|
1180
|
+
builds: builderRoutes
|
|
1181
|
+
});
|
|
1182
|
+
const mergedImages = mergeImages(localConfig.images, buildResults.values());
|
|
1183
|
+
const serviceCrons = getServiceCrons(detectedServices);
|
|
1184
|
+
const mergedCrons = mergeCrons(
|
|
1185
|
+
[...localConfig.crons || [], ...serviceCrons],
|
|
1186
|
+
buildResults.values()
|
|
1187
|
+
);
|
|
1188
|
+
const mergedWildcard = mergeWildcard(buildResults.values());
|
|
1189
|
+
const mergedDeploymentId = await mergeDeploymentId(
|
|
1190
|
+
existingConfig?.deploymentId,
|
|
1191
|
+
buildResults.values(),
|
|
1192
|
+
workPath
|
|
1193
|
+
);
|
|
1194
|
+
if (mergedDeploymentId) {
|
|
1195
|
+
if (mergedDeploymentId.length > 32) {
|
|
1196
|
+
throw new NowBuildError2({
|
|
1197
|
+
code: "INVALID_DEPLOYMENT_ID",
|
|
1198
|
+
message: `The deploymentId "${mergedDeploymentId}" must be 32 characters or less. Please choose a shorter deploymentId in your config.`,
|
|
1199
|
+
link: "https://vercel.com/docs/skew-protection#custom-skew-protection-deployment-id"
|
|
1200
|
+
});
|
|
1201
|
+
}
|
|
1202
|
+
if (!VALID_DEPLOYMENT_ID_PATTERN.test(mergedDeploymentId)) {
|
|
1203
|
+
throw new NowBuildError2({
|
|
1204
|
+
code: "INVALID_DEPLOYMENT_ID",
|
|
1205
|
+
message: `The deploymentId "${mergedDeploymentId}" contains invalid characters. Only alphanumeric characters (a-z, A-Z, 0-9), hyphens (-), and underscores (_) are allowed.`,
|
|
1206
|
+
link: "https://vercel.com/docs/skew-protection#custom-skew-protection-deployment-id"
|
|
1207
|
+
});
|
|
1208
|
+
}
|
|
1209
|
+
}
|
|
1210
|
+
const mergedOverrides = overrides.length > 0 ? Object.assign({}, ...overrides) : void 0;
|
|
1211
|
+
const framework = await getFramework(workPath, buildResults);
|
|
1212
|
+
const config = {
|
|
1213
|
+
version: 3,
|
|
1214
|
+
routes: mergedRoutes,
|
|
1215
|
+
images: mergedImages,
|
|
1216
|
+
wildcard: mergedWildcard,
|
|
1217
|
+
overrides: mergedOverrides,
|
|
1218
|
+
framework,
|
|
1219
|
+
crons: mergedCrons,
|
|
1220
|
+
...detectedServices && detectedServices.length > 0 && { services: detectedServices },
|
|
1221
|
+
...mergedDeploymentId && { deploymentId: mergedDeploymentId }
|
|
1222
|
+
};
|
|
1223
|
+
await import_fs_extra2.default.writeJSON(join2(outputDir, "config.json"), config, { spaces: 2 });
|
|
1224
|
+
await writeFlagsJSON(buildResults.values(), outputDir);
|
|
1225
|
+
const relOutputDir = relative2(cwd, outputDir);
|
|
1226
|
+
output_manager_default.print(
|
|
1227
|
+
`${prependEmoji(
|
|
1228
|
+
`Build Completed in ${import_chalk.default.bold(
|
|
1229
|
+
relOutputDir.startsWith("..") ? outputDir : relOutputDir
|
|
1230
|
+
)} ${import_chalk.default.gray(buildStamp())}`,
|
|
1231
|
+
emoji("success")
|
|
1232
|
+
)}
|
|
1233
|
+
`
|
|
1234
|
+
);
|
|
1235
|
+
if (process.env.VERCEL_ANALYZE_BUILD_OUTPUT === "1") {
|
|
1236
|
+
await analyzeVcConfigFiles(cwd, outputDir);
|
|
1237
|
+
}
|
|
1238
|
+
}
|
|
1239
|
+
function getFunctionUrlPath(vcConfigPath, outputDir) {
|
|
1240
|
+
const funcPath = normalizePath(relative2(outputDir, vcConfigPath)).replace(/^functions\//, "").replace(/\/\.vc-config\.json$/, "").replace(/\.func$/, "");
|
|
1241
|
+
return "/" + funcPath.split("/").filter((part) => part && part !== "index").join("/");
|
|
1242
|
+
}
|
|
1243
|
+
var LAMBDA_SIZE_LIMIT_MB = 250;
|
|
1244
|
+
function printFileSizeBreakdown(files) {
|
|
1245
|
+
const dependencies = /* @__PURE__ */ new Map();
|
|
1246
|
+
for (const [bundlePath, sizeMB] of files.entries()) {
|
|
1247
|
+
const depKey = bundlePath.split("/").slice(0, 3).join("/");
|
|
1248
|
+
dependencies.set(depKey, (dependencies.get(depKey) || 0) + sizeMB);
|
|
1249
|
+
}
|
|
1250
|
+
const sortedDeps = Array.from(dependencies.entries()).sort((a, b) => b[1] - a[1]).slice(0, 10);
|
|
1251
|
+
if (sortedDeps.length > 0) {
|
|
1252
|
+
output_manager_default.print(import_chalk.default.yellow(" Large dependencies:\n"));
|
|
1253
|
+
for (const [dep, size] of sortedDeps) {
|
|
1254
|
+
if (size >= 0.5) {
|
|
1255
|
+
output_manager_default.print(
|
|
1256
|
+
` ${import_chalk.default.gray("\u2022")} ${dep}: ${import_chalk.default.bold(size.toFixed(2))} MB
|
|
1257
|
+
`
|
|
1258
|
+
);
|
|
1259
|
+
}
|
|
1260
|
+
}
|
|
1261
|
+
output_manager_default.print("\n");
|
|
1262
|
+
}
|
|
1263
|
+
}
|
|
1264
|
+
async function analyzeVcConfigFiles(cwd, outputDir) {
|
|
1265
|
+
const filesObject = await glob("**/.vc-config.json", {
|
|
1266
|
+
cwd: outputDir
|
|
1267
|
+
});
|
|
1268
|
+
const vcConfigFiles = Object.keys(filesObject).filter((relativePath) => !relativePath.includes(".rsc.func")).map((relativePath) => join2(outputDir, relativePath));
|
|
1269
|
+
if (vcConfigFiles.length === 0) {
|
|
1270
|
+
output_manager_default.print("No functions to analyze.\n");
|
|
1271
|
+
return;
|
|
1272
|
+
}
|
|
1273
|
+
output_manager_default.print(
|
|
1274
|
+
`
|
|
1275
|
+
Analyzing ${vcConfigFiles.length} function${vcConfigFiles.length === 1 ? "" : "s"}...
|
|
1276
|
+
`
|
|
1277
|
+
);
|
|
1278
|
+
const results = await Promise.all(
|
|
1279
|
+
vcConfigFiles.map((file) => analyzeSingleFunction(file, cwd, outputDir))
|
|
1280
|
+
);
|
|
1281
|
+
const validResults = results.filter(
|
|
1282
|
+
(r) => r !== null
|
|
1283
|
+
);
|
|
1284
|
+
const sortedResults = validResults.sort((a, b) => b.size - a.size);
|
|
1285
|
+
const exceededFunctions = sortedResults.filter(
|
|
1286
|
+
(r) => r.size > LAMBDA_SIZE_LIMIT_MB
|
|
1287
|
+
);
|
|
1288
|
+
const normalFunctions = sortedResults.filter(
|
|
1289
|
+
(r) => r.size <= LAMBDA_SIZE_LIMIT_MB
|
|
1290
|
+
);
|
|
1291
|
+
if (exceededFunctions.length > 0) {
|
|
1292
|
+
output_manager_default.print(
|
|
1293
|
+
`${import_chalk.default.red.bold(`\u26A0\uFE0F Max serverless function size of ${LAMBDA_SIZE_LIMIT_MB} MB uncompressed reached`)}
|
|
1294
|
+
|
|
1295
|
+
`
|
|
1296
|
+
);
|
|
1297
|
+
for (const result of exceededFunctions) {
|
|
1298
|
+
output_manager_default.print(
|
|
1299
|
+
`${import_chalk.default.red("Function :")} ${import_chalk.default.red.bold(result.path)}
|
|
1300
|
+
${import_chalk.default.red("Size :")} ${import_chalk.default.red.bold(result.size.toFixed(2))} MB
|
|
1301
|
+
`
|
|
1302
|
+
);
|
|
1303
|
+
printFileSizeBreakdown(result.files);
|
|
1304
|
+
output_manager_default.print("\n");
|
|
1305
|
+
}
|
|
1306
|
+
if (normalFunctions.length > 0) {
|
|
1307
|
+
output_manager_default.print(import_chalk.default.cyan(`Other functions:
|
|
1308
|
+
`));
|
|
1309
|
+
for (const result of normalFunctions) {
|
|
1310
|
+
output_manager_default.print(
|
|
1311
|
+
`${import_chalk.default.cyan(result.path)}: ${import_chalk.default.bold(result.size.toFixed(2))} MB
|
|
1312
|
+
`
|
|
1313
|
+
);
|
|
1314
|
+
}
|
|
1315
|
+
}
|
|
1316
|
+
throw new NowBuildError2({
|
|
1317
|
+
code: "NOW_SANDBOX_WORKER_MAX_LAMBDA_SIZE",
|
|
1318
|
+
message: `${exceededFunctions.length} function${exceededFunctions.length === 1 ? "" : "s"} exceeded the uncompressed maximum size of ${LAMBDA_SIZE_LIMIT_MB} MB.`,
|
|
1319
|
+
link: "https://vercel.link/serverless-function-size",
|
|
1320
|
+
action: "Learn More"
|
|
1321
|
+
});
|
|
1322
|
+
}
|
|
1323
|
+
}
|
|
1324
|
+
async function analyzeSingleFunction(file, cwd, outputDir) {
|
|
1325
|
+
try {
|
|
1326
|
+
const content = await import_fs_extra2.default.readFile(file, "utf8");
|
|
1327
|
+
const parsed = JSON.parse(content);
|
|
1328
|
+
const funcDir = dirname(file);
|
|
1329
|
+
const funcDirStats = getDirectorySizeInMB(funcDir);
|
|
1330
|
+
const filePathMap = parsed.filePathMap && typeof parsed.filePathMap === "object" ? Object.entries(parsed.filePathMap).filter(
|
|
1331
|
+
(entry) => typeof entry[1] === "string"
|
|
1332
|
+
).map(([bundlePath, sourcePath]) => ({
|
|
1333
|
+
bundlePath,
|
|
1334
|
+
sourcePath: join2(cwd, sourcePath)
|
|
1335
|
+
})) : [];
|
|
1336
|
+
const fsRefStats = getTotalFileSizeInMB(filePathMap);
|
|
1337
|
+
const totalSize = funcDirStats.size + fsRefStats.size;
|
|
1338
|
+
const allFiles = new Map([...funcDirStats.files, ...fsRefStats.files]);
|
|
1339
|
+
const functionUrlPath = getFunctionUrlPath(file, outputDir);
|
|
1340
|
+
return {
|
|
1341
|
+
path: functionUrlPath,
|
|
1342
|
+
size: totalSize,
|
|
1343
|
+
files: allFiles
|
|
1344
|
+
};
|
|
1345
|
+
} catch (error) {
|
|
1346
|
+
output_manager_default.warn(`Failed to analyze ${file}: ${error}`);
|
|
1347
|
+
return null;
|
|
1348
|
+
}
|
|
1349
|
+
}
|
|
1350
|
+
function getTotalFileSizeInMB(files) {
|
|
1351
|
+
let size = 0;
|
|
1352
|
+
const filesSizeMap = /* @__PURE__ */ new Map();
|
|
1353
|
+
for (const { bundlePath, sourcePath } of files) {
|
|
1354
|
+
try {
|
|
1355
|
+
const stats = statSync(sourcePath);
|
|
1356
|
+
if (stats.isFile()) {
|
|
1357
|
+
const fileSizeMB = stats.size / (1024 * 1024);
|
|
1358
|
+
size += fileSizeMB;
|
|
1359
|
+
filesSizeMap.set(bundlePath, fileSizeMB);
|
|
1360
|
+
}
|
|
1361
|
+
} catch {
|
|
1362
|
+
}
|
|
1363
|
+
}
|
|
1364
|
+
return { size, files: filesSizeMap };
|
|
1365
|
+
}
|
|
1366
|
+
function getDirectorySizeInMB(dir) {
|
|
1367
|
+
let size = 0;
|
|
1368
|
+
const filesSizeMap = /* @__PURE__ */ new Map();
|
|
1369
|
+
try {
|
|
1370
|
+
const entries = readdirSync(dir, { recursive: true });
|
|
1371
|
+
for (const entry of entries) {
|
|
1372
|
+
const entryPath = typeof entry === "string" ? entry : entry.toString();
|
|
1373
|
+
const fullPath = join2(dir, entryPath);
|
|
1374
|
+
try {
|
|
1375
|
+
const stats = statSync(fullPath);
|
|
1376
|
+
if (stats.isFile()) {
|
|
1377
|
+
const fileSizeMB = stats.size / (1024 * 1024);
|
|
1378
|
+
size += fileSizeMB;
|
|
1379
|
+
filesSizeMap.set(normalizePath(entryPath), fileSizeMB);
|
|
1380
|
+
}
|
|
1381
|
+
} catch {
|
|
1382
|
+
}
|
|
1383
|
+
}
|
|
1384
|
+
} catch {
|
|
1385
|
+
}
|
|
1386
|
+
return { size, files: filesSizeMap };
|
|
1387
|
+
}
|
|
1388
|
+
async function getFramework(cwd, buildResults) {
|
|
1389
|
+
const detectedFramework = await (0, import_fs_detectors2.detectFrameworkRecord)({
|
|
1390
|
+
fs: new import_fs_detectors2.LocalFileSystemDetector(cwd),
|
|
1391
|
+
frameworkList: import_frameworks2.frameworkList
|
|
1392
|
+
});
|
|
1393
|
+
if (!detectedFramework) {
|
|
1394
|
+
return;
|
|
1395
|
+
}
|
|
1396
|
+
if (detectedFramework.useRuntime) {
|
|
1397
|
+
for (const [build, buildResult] of buildResults.entries()) {
|
|
1398
|
+
if ("framework" in buildResult && build.use === detectedFramework.useRuntime.use) {
|
|
1399
|
+
return buildResult.framework;
|
|
1400
|
+
}
|
|
1401
|
+
}
|
|
1402
|
+
}
|
|
1403
|
+
if (detectedFramework.detectedVersion) {
|
|
1404
|
+
if (import_semver.default.valid(detectedFramework.detectedVersion)) {
|
|
1405
|
+
return {
|
|
1406
|
+
version: detectedFramework.detectedVersion
|
|
1407
|
+
};
|
|
1408
|
+
}
|
|
1409
|
+
}
|
|
1410
|
+
const frameworkVersion = (0, import_fs_detectors2.detectFrameworkVersion)(detectedFramework);
|
|
1411
|
+
if (frameworkVersion) {
|
|
1412
|
+
return {
|
|
1413
|
+
version: frameworkVersion
|
|
1414
|
+
};
|
|
1415
|
+
}
|
|
1416
|
+
}
|
|
1417
|
+
function expandBuild(files, build) {
|
|
1418
|
+
if (!build.use) {
|
|
1419
|
+
throw new NowBuildError2({
|
|
1420
|
+
code: `invalid_build_specification`,
|
|
1421
|
+
message: "Field `use` is missing in build specification",
|
|
1422
|
+
link: "https://vercel.com/docs/concepts/projects/project-configuration#builds",
|
|
1423
|
+
action: "View Documentation"
|
|
1424
|
+
});
|
|
1425
|
+
}
|
|
1426
|
+
let src = normalize(build.src || "**").split(sep).join("/");
|
|
1427
|
+
if (src === "." || src === "./") {
|
|
1428
|
+
throw new NowBuildError2({
|
|
1429
|
+
code: `invalid_build_specification`,
|
|
1430
|
+
message: "A build `src` path resolves to an empty string",
|
|
1431
|
+
link: "https://vercel.com/docs/concepts/projects/project-configuration#builds",
|
|
1432
|
+
action: "View Documentation"
|
|
1433
|
+
});
|
|
1434
|
+
}
|
|
1435
|
+
if (src[0] === "/") {
|
|
1436
|
+
src = src.substring(1);
|
|
1437
|
+
}
|
|
1438
|
+
const matches = files.filter(
|
|
1439
|
+
(name) => name === src || (0, import_minimatch.default)(name, src, { dot: true })
|
|
1440
|
+
);
|
|
1441
|
+
return matches.map((m) => {
|
|
1442
|
+
return {
|
|
1443
|
+
...build,
|
|
1444
|
+
src: m
|
|
1445
|
+
};
|
|
1446
|
+
});
|
|
1447
|
+
}
|
|
1448
|
+
function mergeImages(images, buildResults) {
|
|
1449
|
+
for (const result of buildResults) {
|
|
1450
|
+
if ("images" in result && result.images) {
|
|
1451
|
+
images = Object.assign({}, images, result.images);
|
|
1452
|
+
}
|
|
1453
|
+
}
|
|
1454
|
+
return images;
|
|
1455
|
+
}
|
|
1456
|
+
function getServiceCrons(services) {
|
|
1457
|
+
if (!services || services.length === 0) {
|
|
1458
|
+
return [];
|
|
1459
|
+
}
|
|
1460
|
+
const crons = [];
|
|
1461
|
+
for (const service of services) {
|
|
1462
|
+
if (service.type !== "cron" || typeof service.schedule !== "string") {
|
|
1463
|
+
continue;
|
|
1464
|
+
}
|
|
1465
|
+
const cronEntrypoint = service.entrypoint || service.builder.src || "index";
|
|
1466
|
+
crons.push({
|
|
1467
|
+
path: (0, import_fs_detectors2.getInternalServiceCronPath)(service.name, cronEntrypoint),
|
|
1468
|
+
schedule: service.schedule
|
|
1469
|
+
});
|
|
1470
|
+
}
|
|
1471
|
+
return crons;
|
|
1472
|
+
}
|
|
1473
|
+
function mergeCrons(crons = [], buildResults) {
|
|
1474
|
+
for (const result of buildResults) {
|
|
1475
|
+
if ("crons" in result && result.crons) {
|
|
1476
|
+
crons = crons.concat(result.crons);
|
|
1477
|
+
}
|
|
1478
|
+
}
|
|
1479
|
+
return crons;
|
|
1480
|
+
}
|
|
1481
|
+
function mergeWildcard(buildResults) {
|
|
1482
|
+
let wildcard = void 0;
|
|
1483
|
+
for (const result of buildResults) {
|
|
1484
|
+
if ("wildcard" in result && result.wildcard) {
|
|
1485
|
+
if (!wildcard)
|
|
1486
|
+
wildcard = [];
|
|
1487
|
+
wildcard.push(...result.wildcard);
|
|
1488
|
+
}
|
|
1489
|
+
}
|
|
1490
|
+
return wildcard;
|
|
1491
|
+
}
|
|
1492
|
+
async function mergeDeploymentId(existingDeploymentId, buildResults, workPath) {
|
|
1493
|
+
if (existingDeploymentId) {
|
|
1494
|
+
return existingDeploymentId;
|
|
1495
|
+
}
|
|
1496
|
+
for (const result of buildResults) {
|
|
1497
|
+
if ("deploymentId" in result && result.deploymentId) {
|
|
1498
|
+
return result.deploymentId;
|
|
1499
|
+
}
|
|
1500
|
+
}
|
|
1501
|
+
try {
|
|
1502
|
+
const routesManifestPath = join2(workPath, ".next", "routes-manifest.json");
|
|
1503
|
+
if (await import_fs_extra2.default.pathExists(routesManifestPath)) {
|
|
1504
|
+
const routesManifest = await readJSONFile(
|
|
1505
|
+
routesManifestPath
|
|
1506
|
+
);
|
|
1507
|
+
if (routesManifest && !(routesManifest instanceof CantParseJSONFile)) {
|
|
1508
|
+
if (routesManifest.deploymentId) {
|
|
1509
|
+
return routesManifest.deploymentId;
|
|
1510
|
+
}
|
|
1511
|
+
}
|
|
1512
|
+
}
|
|
1513
|
+
} catch {
|
|
1514
|
+
}
|
|
1515
|
+
return void 0;
|
|
1516
|
+
}
|
|
1517
|
+
async function writeFlagsJSON(buildResults, outputDir) {
|
|
1518
|
+
const flagsFilePath = join2(outputDir, "flags.json");
|
|
1519
|
+
let hasFlags = true;
|
|
1520
|
+
const flags = await import_fs_extra2.default.readJSON(flagsFilePath).catch((error) => {
|
|
1521
|
+
if (error.code === "ENOENT") {
|
|
1522
|
+
hasFlags = false;
|
|
1523
|
+
return { definitions: {} };
|
|
1524
|
+
}
|
|
1525
|
+
throw error;
|
|
1526
|
+
});
|
|
1527
|
+
for (const result of buildResults) {
|
|
1528
|
+
if (!("flags" in result) || !result.flags || !result.flags.definitions)
|
|
1529
|
+
continue;
|
|
1530
|
+
for (const [key, definition] of Object.entries(result.flags.definitions)) {
|
|
1531
|
+
if (result.flags.definitions[key]) {
|
|
1532
|
+
output_manager_default.warn(
|
|
1533
|
+
`The flag "${key}" was found multiple times. Only its first occurrence will be considered.`
|
|
1534
|
+
);
|
|
1535
|
+
continue;
|
|
1536
|
+
}
|
|
1537
|
+
hasFlags = true;
|
|
1538
|
+
flags.definitions[key] = definition;
|
|
1539
|
+
}
|
|
1540
|
+
}
|
|
1541
|
+
if (hasFlags) {
|
|
1542
|
+
await import_fs_extra2.default.writeJSON(flagsFilePath, flags, { spaces: 2 });
|
|
1543
|
+
}
|
|
1544
|
+
}
|
|
1545
|
+
async function writeBuildJson(buildsJson, outputDir) {
|
|
1546
|
+
await import_fs_extra2.default.writeJSON(join2(outputDir, "builds.json"), buildsJson, { spaces: 2 });
|
|
1547
|
+
}
|
|
1548
|
+
async function getFrameworkRoutes(framework, dirPrefix) {
|
|
1549
|
+
let routes = [];
|
|
1550
|
+
if (typeof framework.defaultRoutes === "function") {
|
|
1551
|
+
routes = await framework.defaultRoutes(dirPrefix);
|
|
1552
|
+
} else if (Array.isArray(framework.defaultRoutes)) {
|
|
1553
|
+
routes = framework.defaultRoutes;
|
|
1554
|
+
}
|
|
1555
|
+
return routes;
|
|
1556
|
+
}
|
|
1557
|
+
function normalizeServiceRoutePrefix(routePrefix) {
|
|
1558
|
+
let prefix = routePrefix.startsWith("/") ? routePrefix : `/${routePrefix}`;
|
|
1559
|
+
if (prefix !== "/" && prefix.endsWith("/")) {
|
|
1560
|
+
prefix = prefix.slice(0, -1);
|
|
1561
|
+
}
|
|
1562
|
+
return prefix;
|
|
1563
|
+
}
|
|
1564
|
+
function getServicesMergeEntrypoint(service, buildSrc) {
|
|
1565
|
+
const routePrefix = typeof service.routePrefix === "string" ? service.routePrefix : "/";
|
|
1566
|
+
const normalized = normalizeServiceRoutePrefix(routePrefix);
|
|
1567
|
+
const sortKey = String(1e4 - normalized.length).padStart(5, "0");
|
|
1568
|
+
return `svc:${sortKey}:${normalized}:${service.name}:${buildSrc}`;
|
|
1569
|
+
}
|
|
1570
|
+
function attachWorkerServiceTrigger(buildOutput, service) {
|
|
1571
|
+
const trigger = {
|
|
1572
|
+
type: "queue/v1beta",
|
|
1573
|
+
topic: service.topic || "default",
|
|
1574
|
+
consumer: service.consumer || "default"
|
|
1575
|
+
};
|
|
1576
|
+
if (isLambda(buildOutput)) {
|
|
1577
|
+
appendWorkerTrigger(buildOutput, trigger);
|
|
1578
|
+
return;
|
|
1579
|
+
}
|
|
1580
|
+
for (const output of Object.values(buildOutput)) {
|
|
1581
|
+
if (isLambda(output)) {
|
|
1582
|
+
appendWorkerTrigger(output, trigger);
|
|
1583
|
+
}
|
|
1584
|
+
}
|
|
1585
|
+
}
|
|
1586
|
+
function appendWorkerTrigger(lambda, trigger) {
|
|
1587
|
+
const existingTriggers = Array.isArray(lambda.experimentalTriggers) ? lambda.experimentalTriggers : [];
|
|
1588
|
+
const alreadyConfigured = existingTriggers.some(
|
|
1589
|
+
(existing) => existing.type === trigger.type && existing.topic === trigger.topic && existing.consumer === trigger.consumer
|
|
1590
|
+
);
|
|
1591
|
+
if (!alreadyConfigured) {
|
|
1592
|
+
lambda.experimentalTriggers = [...existingTriggers, trigger];
|
|
1593
|
+
}
|
|
1594
|
+
}
|
|
1595
|
+
export {
|
|
1596
|
+
main as default
|
|
1597
|
+
};
|