@vercel/build-utils 2.12.3-canary.37 → 2.12.3-canary.40
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/convert-runtime-to-plugin.js +68 -61
- package/dist/index.js +74 -132
- package/package.json +3 -3
@@ -70,7 +70,8 @@ function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
|
|
70
70
|
}
|
71
71
|
const pages = {};
|
72
72
|
const pluginName = packageName.replace('vercel-plugin-', '');
|
73
|
-
const
|
73
|
+
const outputPath = path_1.join(workPath, '.output');
|
74
|
+
const traceDir = path_1.join(outputPath, `inputs`,
|
74
75
|
// Legacy Runtimes can only provide API Routes, so that's
|
75
76
|
// why we can use this prefix for all of them. Here, we have to
|
76
77
|
// make sure to not use a cryptic hash name, because people
|
@@ -78,9 +79,7 @@ function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
|
|
78
79
|
`api-routes-${pluginName}`);
|
79
80
|
await fs_extra_1.default.ensureDir(traceDir);
|
80
81
|
let newPathsRuntime = new Set();
|
81
|
-
|
82
|
-
const entryDir = path_1.join('.output', 'server', 'pages');
|
83
|
-
const entryRoot = path_1.join(workPath, entryDir);
|
82
|
+
const entryRoot = path_1.join(outputPath, 'server', 'pages');
|
84
83
|
for (const entrypoint of Object.keys(entrypoints)) {
|
85
84
|
const { output } = await buildRuntime({
|
86
85
|
files: sourceFilesPreBuild,
|
@@ -91,6 +90,7 @@ function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
|
|
91
90
|
},
|
92
91
|
meta: {
|
93
92
|
avoidTopLevelInstall: true,
|
93
|
+
skipDownload: true,
|
94
94
|
},
|
95
95
|
});
|
96
96
|
// Legacy Runtimes tend to pollute the `workPath` with compiled results,
|
@@ -113,6 +113,7 @@ function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
|
|
113
113
|
}
|
114
114
|
let handlerFileBase = output.handler;
|
115
115
|
let handlerFile = lambdaFiles[handlerFileBase];
|
116
|
+
let handlerHasImport = false;
|
116
117
|
const { handler } = output;
|
117
118
|
const handlerMethod = handler.split('.').pop();
|
118
119
|
const handlerFileName = handler.replace(`.${handlerMethod}`, '');
|
@@ -124,6 +125,7 @@ function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
|
|
124
125
|
if (!handlerFile) {
|
125
126
|
handlerFileBase = handlerFileName + ext;
|
126
127
|
handlerFile = lambdaFiles[handlerFileBase];
|
128
|
+
handlerHasImport = true;
|
127
129
|
}
|
128
130
|
if (!handlerFile || !handlerFile.fsPath) {
|
129
131
|
throw new Error(`Could not find a handler file. Please ensure that \`files\` for the returned \`Lambda\` contains an \`FileFsRef\` named "${handlerFileBase}" with a valid \`fsPath\`.`);
|
@@ -138,6 +140,53 @@ function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
|
|
138
140
|
// one, so linking would end with a broken reference.
|
139
141
|
await fs_extra_1.default.ensureDir(path_1.dirname(entry));
|
140
142
|
await fs_extra_1.default.copy(handlerFile.fsPath, entry);
|
143
|
+
// For compiled languages, the launcher file will be binary and therefore
|
144
|
+
// won't try to import a user-provided request handler (instead, it will
|
145
|
+
// contain it). But for interpreted languages, the launcher might try to
|
146
|
+
// load a user-provided request handler from the source file instead of bundling
|
147
|
+
// it, so we have to adjust the import statement inside the launcher to point
|
148
|
+
// to the respective source file. Previously, Legacy Runtimes simply expected
|
149
|
+
// the user-provided request-handler to be copied right next to the launcher,
|
150
|
+
// but with the new File System API, files won't be moved around unnecessarily.
|
151
|
+
if (handlerHasImport) {
|
152
|
+
const { fsPath } = handlerFile;
|
153
|
+
const encoding = 'utf-8';
|
154
|
+
// This is the true directory of the user-provided request handler in the
|
155
|
+
// source files, so that's what we will use as an import path in the launcher.
|
156
|
+
const locationPrefix = path_1.relative(entry, outputPath);
|
157
|
+
let handlerContent = await fs_extra_1.default.readFile(fsPath, encoding);
|
158
|
+
const importPaths = [
|
159
|
+
// This is the full entrypoint path, like `./api/test.py`
|
160
|
+
`./${entrypoint}`,
|
161
|
+
// This is the entrypoint path without extension, like `api/test`
|
162
|
+
entrypoint.slice(0, -ext.length),
|
163
|
+
];
|
164
|
+
// Generate a list of regular expressions that we can use for
|
165
|
+
// finding matches, but only allow matches if the import path is
|
166
|
+
// wrapped inside single (') or double quotes (").
|
167
|
+
const patterns = importPaths.map(path => {
|
168
|
+
// eslint-disable-next-line no-useless-escape
|
169
|
+
return new RegExp(`('|")(${path.replace(/\./g, '\\.')})('|")`, 'g');
|
170
|
+
});
|
171
|
+
let replacedMatch = null;
|
172
|
+
for (const pattern of patterns) {
|
173
|
+
const newContent = handlerContent.replace(pattern, (_, p1, p2, p3) => {
|
174
|
+
return `${p1}${path_1.join(locationPrefix, p2)}${p3}`;
|
175
|
+
});
|
176
|
+
if (newContent !== handlerContent) {
|
177
|
+
_1.debug(`Replaced "${pattern}" inside "${entry}" to ensure correct import of user-provided request handler`);
|
178
|
+
handlerContent = newContent;
|
179
|
+
replacedMatch = true;
|
180
|
+
}
|
181
|
+
}
|
182
|
+
if (!replacedMatch) {
|
183
|
+
new Error(`No replacable matches for "${importPaths[0]}" or "${importPaths[1]}" found in "${fsPath}"`);
|
184
|
+
}
|
185
|
+
await fs_extra_1.default.writeFile(entry, handlerContent, encoding);
|
186
|
+
}
|
187
|
+
else {
|
188
|
+
await fs_extra_1.default.copy(handlerFile.fsPath, entry);
|
189
|
+
}
|
141
190
|
const newFilesEntrypoint = [];
|
142
191
|
const newDirectoriesEntrypoint = [];
|
143
192
|
const preBuildFiles = Object.values(sourceFilesPreBuild).map(file => {
|
@@ -184,56 +233,22 @@ function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
|
|
184
233
|
}
|
185
234
|
}
|
186
235
|
}
|
187
|
-
const tracedFiles = [];
|
188
|
-
const linkers = Object.entries(lambdaFiles).map(async ([relPath, file]) => {
|
189
|
-
const newPath = path_1.join(traceDir, relPath);
|
190
|
-
// The handler was already moved into position above.
|
191
|
-
if (relPath === handlerFileBase) {
|
192
|
-
return;
|
193
|
-
}
|
194
|
-
tracedFiles.push({ absolutePath: newPath, relativePath: relPath });
|
195
|
-
const { fsPath, type } = file;
|
196
|
-
if (fsPath) {
|
197
|
-
await fs_extra_1.default.ensureDir(path_1.dirname(newPath));
|
198
|
-
const isNewFile = newFilesEntrypoint.includes(fsPath);
|
199
|
-
const isInsideNewDirectory = newDirectoriesEntrypoint.some(dirPath => {
|
200
|
-
return fsPath.startsWith(dirPath);
|
201
|
-
});
|
202
|
-
// With this, we're making sure that files in the `workPath` that existed
|
203
|
-
// before the Legacy Runtime was invoked (source files) are linked from
|
204
|
-
// `.output` instead of copying there (the latter only happens if linking fails),
|
205
|
-
// which is the fastest solution. However, files that are created fresh
|
206
|
-
// by the Legacy Runtimes are always copied, because their link destinations
|
207
|
-
// are likely to be overwritten every time an entrypoint is processed by
|
208
|
-
// the Legacy Runtime. This is likely to overwrite the destination on subsequent
|
209
|
-
// runs, but that's also how `workPath` used to work originally, without
|
210
|
-
// the File System API (meaning that there was one `workPath` for all entrypoints).
|
211
|
-
if (isNewFile || isInsideNewDirectory) {
|
212
|
-
_1.debug(`Copying from ${fsPath} to ${newPath}`);
|
213
|
-
await fs_extra_1.default.copy(fsPath, newPath);
|
214
|
-
}
|
215
|
-
else {
|
216
|
-
await linkOrCopy(fsPath, newPath);
|
217
|
-
}
|
218
|
-
}
|
219
|
-
else if (type === 'FileBlob') {
|
220
|
-
const { data, mode } = file;
|
221
|
-
await fs_extra_1.default.writeFile(newPath, data, { mode });
|
222
|
-
}
|
223
|
-
else {
|
224
|
-
throw new Error(`Unknown file type: ${type}`);
|
225
|
-
}
|
226
|
-
});
|
227
|
-
linkersRuntime = linkersRuntime.concat(linkers);
|
228
236
|
const nft = `${entry}.nft.json`;
|
229
237
|
const json = JSON.stringify({
|
230
|
-
version:
|
231
|
-
files:
|
232
|
-
|
233
|
-
|
234
|
-
|
235
|
-
|
236
|
-
|
238
|
+
version: 2,
|
239
|
+
files: Object.keys(lambdaFiles)
|
240
|
+
.map(file => {
|
241
|
+
const { fsPath } = lambdaFiles[file];
|
242
|
+
if (!fsPath) {
|
243
|
+
throw new Error(`File "${file}" is missing valid \`fsPath\` property`);
|
244
|
+
}
|
245
|
+
// The handler was already moved into position above.
|
246
|
+
if (file === handlerFileBase) {
|
247
|
+
return;
|
248
|
+
}
|
249
|
+
return normalize_path_1.normalizePath(path_1.relative(path_1.dirname(nft), fsPath));
|
250
|
+
})
|
251
|
+
.filter(Boolean),
|
237
252
|
});
|
238
253
|
await fs_extra_1.default.ensureDir(path_1.dirname(nft));
|
239
254
|
await fs_extra_1.default.writeFile(nft, json);
|
@@ -260,11 +275,6 @@ function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
|
|
260
275
|
allowQuery: output.allowQuery,
|
261
276
|
};
|
262
277
|
}
|
263
|
-
// Instead of of waiting for all of the linking to be done for every
|
264
|
-
// entrypoint before processing the next one, we immediately handle all
|
265
|
-
// of them one after the other, while then waiting for the linking
|
266
|
-
// to finish right here, before we clean up newly created files below.
|
267
|
-
await Promise.all(linkersRuntime);
|
268
278
|
// A list of all the files that were created by the Legacy Runtime,
|
269
279
|
// which we'd like to remove from the File System.
|
270
280
|
const toRemove = Array.from(newPathsRuntime).map(path => {
|
@@ -283,9 +293,6 @@ function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
|
|
283
293
|
};
|
284
294
|
}
|
285
295
|
exports.convertRuntimeToPlugin = convertRuntimeToPlugin;
|
286
|
-
async function linkOrCopy(existingPath, newPath) {
|
287
|
-
await fs_extra_1.default.copyFile(existingPath, newPath);
|
288
|
-
}
|
289
296
|
async function readJson(filePath) {
|
290
297
|
try {
|
291
298
|
const str = await fs_extra_1.default.readFile(filePath, 'utf8');
|
@@ -306,7 +313,7 @@ async function updateFunctionsManifest({ workPath, pages, }) {
|
|
306
313
|
const functionsManifestPath = path_1.join(workPath, '.output', 'functions-manifest.json');
|
307
314
|
const functionsManifest = await readJson(functionsManifestPath);
|
308
315
|
if (!functionsManifest.version)
|
309
|
-
functionsManifest.version =
|
316
|
+
functionsManifest.version = 2;
|
310
317
|
if (!functionsManifest.pages)
|
311
318
|
functionsManifest.pages = {};
|
312
319
|
for (const [pageKey, pageConfig] of Object.entries(pages)) {
|
package/dist/index.js
CHANGED
@@ -26693,7 +26693,6 @@ exports.frameworks = [
|
|
26693
26693
|
},
|
26694
26694
|
dependency: 'gatsby',
|
26695
26695
|
getOutputDirName: async () => 'public',
|
26696
|
-
getFsOutputDir: async () => 'public',
|
26697
26696
|
defaultRoutes: async (dirPrefix) => {
|
26698
26697
|
// This file could be generated by gatsby-plugin-now or gatsby-plugin-zeit-now
|
26699
26698
|
try {
|
@@ -26774,7 +26773,6 @@ exports.frameworks = [
|
|
26774
26773
|
},
|
26775
26774
|
},
|
26776
26775
|
dependency: 'remix',
|
26777
|
-
getFsOutputDir: async () => 'public',
|
26778
26776
|
getOutputDirName: async () => 'public',
|
26779
26777
|
defaultRoutes: [
|
26780
26778
|
{
|
@@ -26802,10 +26800,13 @@ exports.frameworks = [
|
|
26802
26800
|
source: '/build/(.*)',
|
26803
26801
|
regex: '/build/(.*)',
|
26804
26802
|
headers: [
|
26805
|
-
{
|
26803
|
+
{
|
26804
|
+
key: 'cache-control',
|
26805
|
+
value: 'public, max-age=31536000, immutable',
|
26806
|
+
},
|
26806
26807
|
],
|
26807
26808
|
},
|
26808
|
-
]
|
26809
|
+
],
|
26809
26810
|
},
|
26810
26811
|
{
|
26811
26812
|
name: 'Hexo',
|
@@ -26840,7 +26841,6 @@ exports.frameworks = [
|
|
26840
26841
|
},
|
26841
26842
|
},
|
26842
26843
|
dependency: 'hexo',
|
26843
|
-
getFsOutputDir: async () => 'public',
|
26844
26844
|
getOutputDirName: async () => 'public',
|
26845
26845
|
},
|
26846
26846
|
{
|
@@ -26876,7 +26876,6 @@ exports.frameworks = [
|
|
26876
26876
|
},
|
26877
26877
|
},
|
26878
26878
|
dependency: '@11ty/eleventy',
|
26879
|
-
getFsOutputDir: async () => '_site',
|
26880
26879
|
getOutputDirName: async () => '_site',
|
26881
26880
|
cachePattern: '.cache/**',
|
26882
26881
|
},
|
@@ -26913,21 +26912,6 @@ exports.frameworks = [
|
|
26913
26912
|
},
|
26914
26913
|
},
|
26915
26914
|
dependency: '@docusaurus/core',
|
26916
|
-
getFsOutputDir: async (dirPrefix) => {
|
26917
|
-
const base = 'build';
|
26918
|
-
try {
|
26919
|
-
const location = path_1.join(dirPrefix, base);
|
26920
|
-
const content = await readdir(location, { withFileTypes: true });
|
26921
|
-
// If there is only one file in it that is a dir we'll use it as dist dir
|
26922
|
-
if (content.length === 1 && content[0].isDirectory()) {
|
26923
|
-
return path_1.join(base, content[0].name);
|
26924
|
-
}
|
26925
|
-
}
|
26926
|
-
catch (error) {
|
26927
|
-
console.error(`Error detecting output directory: `, error);
|
26928
|
-
}
|
26929
|
-
return base;
|
26930
|
-
},
|
26931
26915
|
getOutputDirName: async (dirPrefix) => {
|
26932
26916
|
const base = 'build';
|
26933
26917
|
try {
|
@@ -27057,21 +27041,6 @@ exports.frameworks = [
|
|
27057
27041
|
},
|
27058
27042
|
},
|
27059
27043
|
dependency: 'docusaurus',
|
27060
|
-
getFsOutputDir: async (dirPrefix) => {
|
27061
|
-
const base = 'build';
|
27062
|
-
try {
|
27063
|
-
const location = path_1.join(dirPrefix, base);
|
27064
|
-
const content = await readdir(location, { withFileTypes: true });
|
27065
|
-
// If there is only one file in it that is a dir we'll use it as dist dir
|
27066
|
-
if (content.length === 1 && content[0].isDirectory()) {
|
27067
|
-
return path_1.join(base, content[0].name);
|
27068
|
-
}
|
27069
|
-
}
|
27070
|
-
catch (error) {
|
27071
|
-
console.error(`Error detecting output directory: `, error);
|
27072
|
-
}
|
27073
|
-
return base;
|
27074
|
-
},
|
27075
27044
|
getOutputDirName: async (dirPrefix) => {
|
27076
27045
|
const base = 'build';
|
27077
27046
|
try {
|
@@ -27121,7 +27090,6 @@ exports.frameworks = [
|
|
27121
27090
|
},
|
27122
27091
|
},
|
27123
27092
|
dependency: 'preact-cli',
|
27124
|
-
getFsOutputDir: async () => 'build',
|
27125
27093
|
getOutputDirName: async () => 'build',
|
27126
27094
|
defaultRoutes: [
|
27127
27095
|
{
|
@@ -27176,7 +27144,6 @@ exports.frameworks = [
|
|
27176
27144
|
},
|
27177
27145
|
},
|
27178
27146
|
dependency: '@dojo/cli',
|
27179
|
-
getFsOutputDir: async () => 'output/dist',
|
27180
27147
|
getOutputDirName: async () => path_1.join('output', 'dist'),
|
27181
27148
|
defaultRoutes: [
|
27182
27149
|
{
|
@@ -27241,7 +27208,6 @@ exports.frameworks = [
|
|
27241
27208
|
},
|
27242
27209
|
},
|
27243
27210
|
dependency: 'ember-cli',
|
27244
|
-
getFsOutputDir: async () => 'dist',
|
27245
27211
|
getOutputDirName: async () => 'dist',
|
27246
27212
|
defaultRoutes: [
|
27247
27213
|
{
|
@@ -27294,7 +27260,6 @@ exports.frameworks = [
|
|
27294
27260
|
},
|
27295
27261
|
},
|
27296
27262
|
dependency: '@vue/cli-service',
|
27297
|
-
getFsOutputDir: async () => 'dist',
|
27298
27263
|
getOutputDirName: async () => 'dist',
|
27299
27264
|
defaultRoutes: [
|
27300
27265
|
{
|
@@ -27370,7 +27335,6 @@ exports.frameworks = [
|
|
27370
27335
|
},
|
27371
27336
|
},
|
27372
27337
|
dependency: '@scullyio/init',
|
27373
|
-
getFsOutputDir: async () => 'dist',
|
27374
27338
|
getOutputDirName: async () => 'dist/static',
|
27375
27339
|
},
|
27376
27340
|
{
|
@@ -27405,7 +27369,6 @@ exports.frameworks = [
|
|
27405
27369
|
},
|
27406
27370
|
},
|
27407
27371
|
dependency: '@ionic/angular',
|
27408
|
-
getFsOutputDir: async () => 'www',
|
27409
27372
|
getOutputDirName: async () => 'www',
|
27410
27373
|
defaultRoutes: [
|
27411
27374
|
{
|
@@ -27457,7 +27420,6 @@ exports.frameworks = [
|
|
27457
27420
|
},
|
27458
27421
|
},
|
27459
27422
|
dependency: '@angular/cli',
|
27460
|
-
getFsOutputDir: async () => 'dist',
|
27461
27423
|
getOutputDirName: async (dirPrefix) => {
|
27462
27424
|
const base = 'dist';
|
27463
27425
|
try {
|
@@ -27523,7 +27485,6 @@ exports.frameworks = [
|
|
27523
27485
|
},
|
27524
27486
|
},
|
27525
27487
|
dependency: 'polymer-cli',
|
27526
|
-
getFsOutputDir: async () => 'build',
|
27527
27488
|
getOutputDirName: async (dirPrefix) => {
|
27528
27489
|
const base = 'build';
|
27529
27490
|
try {
|
@@ -27591,7 +27552,6 @@ exports.frameworks = [
|
|
27591
27552
|
},
|
27592
27553
|
},
|
27593
27554
|
dependency: 'sirv-cli',
|
27594
|
-
getFsOutputDir: async () => 'public',
|
27595
27555
|
getOutputDirName: async () => 'public',
|
27596
27556
|
defaultRoutes: [
|
27597
27557
|
{
|
@@ -27639,10 +27599,9 @@ exports.frameworks = [
|
|
27639
27599
|
placeholder: 'svelte-kit dev',
|
27640
27600
|
},
|
27641
27601
|
outputDirectory: {
|
27642
|
-
|
27602
|
+
value: 'public',
|
27643
27603
|
},
|
27644
27604
|
},
|
27645
|
-
getFsOutputDir: async () => '.output',
|
27646
27605
|
getOutputDirName: async () => 'public',
|
27647
27606
|
},
|
27648
27607
|
{
|
@@ -27677,7 +27636,6 @@ exports.frameworks = [
|
|
27677
27636
|
},
|
27678
27637
|
},
|
27679
27638
|
dependency: '@ionic/react',
|
27680
|
-
getFsOutputDir: async () => 'build',
|
27681
27639
|
getOutputDirName: async () => 'build',
|
27682
27640
|
defaultRoutes: [
|
27683
27641
|
{
|
@@ -27782,7 +27740,6 @@ exports.frameworks = [
|
|
27782
27740
|
},
|
27783
27741
|
},
|
27784
27742
|
dependency: 'react-scripts',
|
27785
|
-
getFsOutputDir: async () => 'build',
|
27786
27743
|
getOutputDirName: async () => 'build',
|
27787
27744
|
defaultRoutes: [
|
27788
27745
|
{
|
@@ -27882,7 +27839,6 @@ exports.frameworks = [
|
|
27882
27839
|
},
|
27883
27840
|
},
|
27884
27841
|
dependency: 'gridsome',
|
27885
|
-
getFsOutputDir: async () => 'dist',
|
27886
27842
|
getOutputDirName: async () => 'dist',
|
27887
27843
|
},
|
27888
27844
|
{
|
@@ -27918,7 +27874,6 @@ exports.frameworks = [
|
|
27918
27874
|
},
|
27919
27875
|
},
|
27920
27876
|
dependency: 'umi',
|
27921
|
-
getFsOutputDir: async () => 'dist',
|
27922
27877
|
getOutputDirName: async () => 'dist',
|
27923
27878
|
defaultRoutes: [
|
27924
27879
|
{
|
@@ -27970,7 +27925,6 @@ exports.frameworks = [
|
|
27970
27925
|
},
|
27971
27926
|
},
|
27972
27927
|
dependency: 'sapper',
|
27973
|
-
getFsOutputDir: async () => '__sapper__/export',
|
27974
27928
|
getOutputDirName: async () => '__sapper__/export',
|
27975
27929
|
},
|
27976
27930
|
{
|
@@ -28006,7 +27960,6 @@ exports.frameworks = [
|
|
28006
27960
|
},
|
28007
27961
|
},
|
28008
27962
|
dependency: 'saber',
|
28009
|
-
getFsOutputDir: async () => 'public',
|
28010
27963
|
getOutputDirName: async () => 'public',
|
28011
27964
|
defaultRoutes: [
|
28012
27965
|
{
|
@@ -28073,7 +28026,6 @@ exports.frameworks = [
|
|
28073
28026
|
},
|
28074
28027
|
},
|
28075
28028
|
dependency: '@stencil/core',
|
28076
|
-
getFsOutputDir: async () => 'www',
|
28077
28029
|
getOutputDirName: async () => 'www',
|
28078
28030
|
defaultRoutes: [
|
28079
28031
|
{
|
@@ -28160,7 +28112,6 @@ exports.frameworks = [
|
|
28160
28112
|
},
|
28161
28113
|
},
|
28162
28114
|
dependency: 'nuxt',
|
28163
|
-
getFsOutputDir: async () => '.output',
|
28164
28115
|
getOutputDirName: async () => 'dist',
|
28165
28116
|
cachePattern: '.nuxt/**',
|
28166
28117
|
defaultRoutes: [
|
@@ -28217,7 +28168,6 @@ exports.frameworks = [
|
|
28217
28168
|
placeholder: 'RedwoodJS default',
|
28218
28169
|
},
|
28219
28170
|
},
|
28220
|
-
getFsOutputDir: async () => 'public',
|
28221
28171
|
getOutputDirName: async () => 'public',
|
28222
28172
|
},
|
28223
28173
|
{
|
@@ -28260,12 +28210,6 @@ exports.frameworks = [
|
|
28260
28210
|
placeholder: '`public` or `publishDir` from the `config` file',
|
28261
28211
|
},
|
28262
28212
|
},
|
28263
|
-
getFsOutputDir: async (dirPrefix) => {
|
28264
|
-
const config = await read_config_file_1.readConfigFile(['config.json', 'config.yaml', 'config.toml'].map(fileName => {
|
28265
|
-
return path_1.join(dirPrefix, fileName);
|
28266
|
-
}));
|
28267
|
-
return (config && config.publishDir) || 'public';
|
28268
|
-
},
|
28269
28213
|
getOutputDirName: async (dirPrefix) => {
|
28270
28214
|
const config = await read_config_file_1.readConfigFile(['config.json', 'config.yaml', 'config.toml'].map(fileName => {
|
28271
28215
|
return path_1.join(dirPrefix, fileName);
|
@@ -28305,10 +28249,6 @@ exports.frameworks = [
|
|
28305
28249
|
placeholder: '`_site` or `destination` from `_config.yml`',
|
28306
28250
|
},
|
28307
28251
|
},
|
28308
|
-
getFsOutputDir: async (dirPrefix) => {
|
28309
|
-
const config = await read_config_file_1.readConfigFile(path_1.join(dirPrefix, '_config.yml'));
|
28310
|
-
return (config && config.destination) || '_site';
|
28311
|
-
},
|
28312
28252
|
getOutputDirName: async (dirPrefix) => {
|
28313
28253
|
const config = await read_config_file_1.readConfigFile(path_1.join(dirPrefix, '_config.yml'));
|
28314
28254
|
return (config && config.destination) || '_site';
|
@@ -28346,7 +28286,6 @@ exports.frameworks = [
|
|
28346
28286
|
value: 'public',
|
28347
28287
|
},
|
28348
28288
|
},
|
28349
|
-
getFsOutputDir: async () => 'public',
|
28350
28289
|
getOutputDirName: async () => 'public',
|
28351
28290
|
},
|
28352
28291
|
{
|
@@ -28380,7 +28319,6 @@ exports.frameworks = [
|
|
28380
28319
|
value: 'build',
|
28381
28320
|
},
|
28382
28321
|
},
|
28383
|
-
getFsOutputDir: async () => 'build',
|
28384
28322
|
getOutputDirName: async () => 'build',
|
28385
28323
|
cachePattern: '{vendor/bin,vendor/cache,vendor/bundle}/**',
|
28386
28324
|
},
|
@@ -28415,7 +28353,6 @@ exports.frameworks = [
|
|
28415
28353
|
value: 'public',
|
28416
28354
|
},
|
28417
28355
|
},
|
28418
|
-
getFsOutputDir: async () => 'public',
|
28419
28356
|
getOutputDirName: async () => 'public',
|
28420
28357
|
defaultVersion: '0.13.0',
|
28421
28358
|
},
|
@@ -28453,7 +28390,6 @@ exports.frameworks = [
|
|
28453
28390
|
},
|
28454
28391
|
},
|
28455
28392
|
dependency: 'vite',
|
28456
|
-
getFsOutputDir: async () => 'dist',
|
28457
28393
|
getOutputDirName: async () => 'dist',
|
28458
28394
|
},
|
28459
28395
|
{
|
@@ -28489,7 +28425,6 @@ exports.frameworks = [
|
|
28489
28425
|
},
|
28490
28426
|
},
|
28491
28427
|
dependency: 'parcel',
|
28492
|
-
getFsOutputDir: async () => 'dist',
|
28493
28428
|
getOutputDirName: async () => 'dist',
|
28494
28429
|
defaultRoutes: [
|
28495
28430
|
{
|
@@ -32817,7 +32752,8 @@ function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
|
|
32817
32752
|
}
|
32818
32753
|
const pages = {};
|
32819
32754
|
const pluginName = packageName.replace('vercel-plugin-', '');
|
32820
|
-
const
|
32755
|
+
const outputPath = path_1.join(workPath, '.output');
|
32756
|
+
const traceDir = path_1.join(outputPath, `inputs`,
|
32821
32757
|
// Legacy Runtimes can only provide API Routes, so that's
|
32822
32758
|
// why we can use this prefix for all of them. Here, we have to
|
32823
32759
|
// make sure to not use a cryptic hash name, because people
|
@@ -32825,9 +32761,7 @@ function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
|
|
32825
32761
|
`api-routes-${pluginName}`);
|
32826
32762
|
await fs_extra_1.default.ensureDir(traceDir);
|
32827
32763
|
let newPathsRuntime = new Set();
|
32828
|
-
|
32829
|
-
const entryDir = path_1.join('.output', 'server', 'pages');
|
32830
|
-
const entryRoot = path_1.join(workPath, entryDir);
|
32764
|
+
const entryRoot = path_1.join(outputPath, 'server', 'pages');
|
32831
32765
|
for (const entrypoint of Object.keys(entrypoints)) {
|
32832
32766
|
const { output } = await buildRuntime({
|
32833
32767
|
files: sourceFilesPreBuild,
|
@@ -32838,6 +32772,7 @@ function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
|
|
32838
32772
|
},
|
32839
32773
|
meta: {
|
32840
32774
|
avoidTopLevelInstall: true,
|
32775
|
+
skipDownload: true,
|
32841
32776
|
},
|
32842
32777
|
});
|
32843
32778
|
// Legacy Runtimes tend to pollute the `workPath` with compiled results,
|
@@ -32860,6 +32795,7 @@ function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
|
|
32860
32795
|
}
|
32861
32796
|
let handlerFileBase = output.handler;
|
32862
32797
|
let handlerFile = lambdaFiles[handlerFileBase];
|
32798
|
+
let handlerHasImport = false;
|
32863
32799
|
const { handler } = output;
|
32864
32800
|
const handlerMethod = handler.split('.').pop();
|
32865
32801
|
const handlerFileName = handler.replace(`.${handlerMethod}`, '');
|
@@ -32871,6 +32807,7 @@ function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
|
|
32871
32807
|
if (!handlerFile) {
|
32872
32808
|
handlerFileBase = handlerFileName + ext;
|
32873
32809
|
handlerFile = lambdaFiles[handlerFileBase];
|
32810
|
+
handlerHasImport = true;
|
32874
32811
|
}
|
32875
32812
|
if (!handlerFile || !handlerFile.fsPath) {
|
32876
32813
|
throw new Error(`Could not find a handler file. Please ensure that \`files\` for the returned \`Lambda\` contains an \`FileFsRef\` named "${handlerFileBase}" with a valid \`fsPath\`.`);
|
@@ -32885,6 +32822,53 @@ function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
|
|
32885
32822
|
// one, so linking would end with a broken reference.
|
32886
32823
|
await fs_extra_1.default.ensureDir(path_1.dirname(entry));
|
32887
32824
|
await fs_extra_1.default.copy(handlerFile.fsPath, entry);
|
32825
|
+
// For compiled languages, the launcher file will be binary and therefore
|
32826
|
+
// won't try to import a user-provided request handler (instead, it will
|
32827
|
+
// contain it). But for interpreted languages, the launcher might try to
|
32828
|
+
// load a user-provided request handler from the source file instead of bundling
|
32829
|
+
// it, so we have to adjust the import statement inside the launcher to point
|
32830
|
+
// to the respective source file. Previously, Legacy Runtimes simply expected
|
32831
|
+
// the user-provided request-handler to be copied right next to the launcher,
|
32832
|
+
// but with the new File System API, files won't be moved around unnecessarily.
|
32833
|
+
if (handlerHasImport) {
|
32834
|
+
const { fsPath } = handlerFile;
|
32835
|
+
const encoding = 'utf-8';
|
32836
|
+
// This is the true directory of the user-provided request handler in the
|
32837
|
+
// source files, so that's what we will use as an import path in the launcher.
|
32838
|
+
const locationPrefix = path_1.relative(entry, outputPath);
|
32839
|
+
let handlerContent = await fs_extra_1.default.readFile(fsPath, encoding);
|
32840
|
+
const importPaths = [
|
32841
|
+
// This is the full entrypoint path, like `./api/test.py`
|
32842
|
+
`./${entrypoint}`,
|
32843
|
+
// This is the entrypoint path without extension, like `api/test`
|
32844
|
+
entrypoint.slice(0, -ext.length),
|
32845
|
+
];
|
32846
|
+
// Generate a list of regular expressions that we can use for
|
32847
|
+
// finding matches, but only allow matches if the import path is
|
32848
|
+
// wrapped inside single (') or double quotes (").
|
32849
|
+
const patterns = importPaths.map(path => {
|
32850
|
+
// eslint-disable-next-line no-useless-escape
|
32851
|
+
return new RegExp(`('|")(${path.replace(/\./g, '\\.')})('|")`, 'g');
|
32852
|
+
});
|
32853
|
+
let replacedMatch = null;
|
32854
|
+
for (const pattern of patterns) {
|
32855
|
+
const newContent = handlerContent.replace(pattern, (_, p1, p2, p3) => {
|
32856
|
+
return `${p1}${path_1.join(locationPrefix, p2)}${p3}`;
|
32857
|
+
});
|
32858
|
+
if (newContent !== handlerContent) {
|
32859
|
+
_1.debug(`Replaced "${pattern}" inside "${entry}" to ensure correct import of user-provided request handler`);
|
32860
|
+
handlerContent = newContent;
|
32861
|
+
replacedMatch = true;
|
32862
|
+
}
|
32863
|
+
}
|
32864
|
+
if (!replacedMatch) {
|
32865
|
+
new Error(`No replacable matches for "${importPaths[0]}" or "${importPaths[1]}" found in "${fsPath}"`);
|
32866
|
+
}
|
32867
|
+
await fs_extra_1.default.writeFile(entry, handlerContent, encoding);
|
32868
|
+
}
|
32869
|
+
else {
|
32870
|
+
await fs_extra_1.default.copy(handlerFile.fsPath, entry);
|
32871
|
+
}
|
32888
32872
|
const newFilesEntrypoint = [];
|
32889
32873
|
const newDirectoriesEntrypoint = [];
|
32890
32874
|
const preBuildFiles = Object.values(sourceFilesPreBuild).map(file => {
|
@@ -32931,56 +32915,22 @@ function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
|
|
32931
32915
|
}
|
32932
32916
|
}
|
32933
32917
|
}
|
32934
|
-
const tracedFiles = [];
|
32935
|
-
const linkers = Object.entries(lambdaFiles).map(async ([relPath, file]) => {
|
32936
|
-
const newPath = path_1.join(traceDir, relPath);
|
32937
|
-
// The handler was already moved into position above.
|
32938
|
-
if (relPath === handlerFileBase) {
|
32939
|
-
return;
|
32940
|
-
}
|
32941
|
-
tracedFiles.push({ absolutePath: newPath, relativePath: relPath });
|
32942
|
-
const { fsPath, type } = file;
|
32943
|
-
if (fsPath) {
|
32944
|
-
await fs_extra_1.default.ensureDir(path_1.dirname(newPath));
|
32945
|
-
const isNewFile = newFilesEntrypoint.includes(fsPath);
|
32946
|
-
const isInsideNewDirectory = newDirectoriesEntrypoint.some(dirPath => {
|
32947
|
-
return fsPath.startsWith(dirPath);
|
32948
|
-
});
|
32949
|
-
// With this, we're making sure that files in the `workPath` that existed
|
32950
|
-
// before the Legacy Runtime was invoked (source files) are linked from
|
32951
|
-
// `.output` instead of copying there (the latter only happens if linking fails),
|
32952
|
-
// which is the fastest solution. However, files that are created fresh
|
32953
|
-
// by the Legacy Runtimes are always copied, because their link destinations
|
32954
|
-
// are likely to be overwritten every time an entrypoint is processed by
|
32955
|
-
// the Legacy Runtime. This is likely to overwrite the destination on subsequent
|
32956
|
-
// runs, but that's also how `workPath` used to work originally, without
|
32957
|
-
// the File System API (meaning that there was one `workPath` for all entrypoints).
|
32958
|
-
if (isNewFile || isInsideNewDirectory) {
|
32959
|
-
_1.debug(`Copying from ${fsPath} to ${newPath}`);
|
32960
|
-
await fs_extra_1.default.copy(fsPath, newPath);
|
32961
|
-
}
|
32962
|
-
else {
|
32963
|
-
await linkOrCopy(fsPath, newPath);
|
32964
|
-
}
|
32965
|
-
}
|
32966
|
-
else if (type === 'FileBlob') {
|
32967
|
-
const { data, mode } = file;
|
32968
|
-
await fs_extra_1.default.writeFile(newPath, data, { mode });
|
32969
|
-
}
|
32970
|
-
else {
|
32971
|
-
throw new Error(`Unknown file type: ${type}`);
|
32972
|
-
}
|
32973
|
-
});
|
32974
|
-
linkersRuntime = linkersRuntime.concat(linkers);
|
32975
32918
|
const nft = `${entry}.nft.json`;
|
32976
32919
|
const json = JSON.stringify({
|
32977
|
-
version:
|
32978
|
-
files:
|
32979
|
-
|
32980
|
-
|
32981
|
-
|
32982
|
-
|
32983
|
-
|
32920
|
+
version: 2,
|
32921
|
+
files: Object.keys(lambdaFiles)
|
32922
|
+
.map(file => {
|
32923
|
+
const { fsPath } = lambdaFiles[file];
|
32924
|
+
if (!fsPath) {
|
32925
|
+
throw new Error(`File "${file}" is missing valid \`fsPath\` property`);
|
32926
|
+
}
|
32927
|
+
// The handler was already moved into position above.
|
32928
|
+
if (file === handlerFileBase) {
|
32929
|
+
return;
|
32930
|
+
}
|
32931
|
+
return normalize_path_1.normalizePath(path_1.relative(path_1.dirname(nft), fsPath));
|
32932
|
+
})
|
32933
|
+
.filter(Boolean),
|
32984
32934
|
});
|
32985
32935
|
await fs_extra_1.default.ensureDir(path_1.dirname(nft));
|
32986
32936
|
await fs_extra_1.default.writeFile(nft, json);
|
@@ -33007,11 +32957,6 @@ function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
|
|
33007
32957
|
allowQuery: output.allowQuery,
|
33008
32958
|
};
|
33009
32959
|
}
|
33010
|
-
// Instead of of waiting for all of the linking to be done for every
|
33011
|
-
// entrypoint before processing the next one, we immediately handle all
|
33012
|
-
// of them one after the other, while then waiting for the linking
|
33013
|
-
// to finish right here, before we clean up newly created files below.
|
33014
|
-
await Promise.all(linkersRuntime);
|
33015
32960
|
// A list of all the files that were created by the Legacy Runtime,
|
33016
32961
|
// which we'd like to remove from the File System.
|
33017
32962
|
const toRemove = Array.from(newPathsRuntime).map(path => {
|
@@ -33030,9 +32975,6 @@ function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
|
|
33030
32975
|
};
|
33031
32976
|
}
|
33032
32977
|
exports.convertRuntimeToPlugin = convertRuntimeToPlugin;
|
33033
|
-
async function linkOrCopy(existingPath, newPath) {
|
33034
|
-
await fs_extra_1.default.copyFile(existingPath, newPath);
|
33035
|
-
}
|
33036
32978
|
async function readJson(filePath) {
|
33037
32979
|
try {
|
33038
32980
|
const str = await fs_extra_1.default.readFile(filePath, 'utf8');
|
@@ -33053,7 +32995,7 @@ async function updateFunctionsManifest({ workPath, pages, }) {
|
|
33053
32995
|
const functionsManifestPath = path_1.join(workPath, '.output', 'functions-manifest.json');
|
33054
32996
|
const functionsManifest = await readJson(functionsManifestPath);
|
33055
32997
|
if (!functionsManifest.version)
|
33056
|
-
functionsManifest.version =
|
32998
|
+
functionsManifest.version = 2;
|
33057
32999
|
if (!functionsManifest.pages)
|
33058
33000
|
functionsManifest.pages = {};
|
33059
33001
|
for (const [pageKey, pageConfig] of Object.entries(pages)) {
|
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "@vercel/build-utils",
|
3
|
-
"version": "2.12.3-canary.
|
3
|
+
"version": "2.12.3-canary.40",
|
4
4
|
"license": "MIT",
|
5
5
|
"main": "./dist/index.js",
|
6
6
|
"types": "./dist/index.d.js",
|
@@ -30,7 +30,7 @@
|
|
30
30
|
"@types/node-fetch": "^2.1.6",
|
31
31
|
"@types/semver": "6.0.0",
|
32
32
|
"@types/yazl": "^2.4.1",
|
33
|
-
"@vercel/frameworks": "0.5.1-canary.
|
33
|
+
"@vercel/frameworks": "0.5.1-canary.17",
|
34
34
|
"@vercel/ncc": "0.24.0",
|
35
35
|
"aggregate-error": "3.0.1",
|
36
36
|
"async-retry": "1.2.3",
|
@@ -49,5 +49,5 @@
|
|
49
49
|
"typescript": "4.3.4",
|
50
50
|
"yazl": "2.4.3"
|
51
51
|
},
|
52
|
-
"gitHead": "
|
52
|
+
"gitHead": "d31ebbabe4d9533d0e98137d76eb319b01ac8b13"
|
53
53
|
}
|