@vercel/build-utils 2.12.3-canary.38 → 2.12.3-canary.41
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/convert-runtime-to-plugin.js +74 -144
- package/dist/index.js +74 -144
- package/package.json +2 -2
@@ -70,17 +70,15 @@ function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
|
|
70
70
|
}
|
71
71
|
const pages = {};
|
72
72
|
const pluginName = packageName.replace('vercel-plugin-', '');
|
73
|
-
const
|
73
|
+
const outputPath = path_1.join(workPath, '.output');
|
74
|
+
const traceDir = path_1.join(outputPath, `inputs`,
|
74
75
|
// Legacy Runtimes can only provide API Routes, so that's
|
75
76
|
// why we can use this prefix for all of them. Here, we have to
|
76
77
|
// make sure to not use a cryptic hash name, because people
|
77
78
|
// need to be able to easily inspect the output.
|
78
79
|
`api-routes-${pluginName}`);
|
79
80
|
await fs_extra_1.default.ensureDir(traceDir);
|
80
|
-
|
81
|
-
let linkersRuntime = [];
|
82
|
-
const entryDir = path_1.join('.output', 'server', 'pages');
|
83
|
-
const entryRoot = path_1.join(workPath, entryDir);
|
81
|
+
const entryRoot = path_1.join(outputPath, 'server', 'pages');
|
84
82
|
for (const entrypoint of Object.keys(entrypoints)) {
|
85
83
|
const { output } = await buildRuntime({
|
86
84
|
files: sourceFilesPreBuild,
|
@@ -94,13 +92,6 @@ function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
|
|
94
92
|
skipDownload: true,
|
95
93
|
},
|
96
94
|
});
|
97
|
-
// Legacy Runtimes tend to pollute the `workPath` with compiled results,
|
98
|
-
// because the `workPath` used to be a place that was a place where they could
|
99
|
-
// just put anything, but nowadays it's the working directory of the `vercel build`
|
100
|
-
// command, which is the place where the developer keeps their source files,
|
101
|
-
// so we don't want to pollute this space unnecessarily. That means we have to clean
|
102
|
-
// up files that were created by the build, which is done further below.
|
103
|
-
const sourceFilesAfterBuild = await getSourceFiles(workPath, ignoreFilter);
|
104
95
|
// @ts-ignore This symbol is a private API
|
105
96
|
const lambdaFiles = output[lambda_1.FILES_SYMBOL];
|
106
97
|
// When deploying, the `files` that are passed to the Legacy Runtimes already
|
@@ -114,6 +105,7 @@ function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
|
|
114
105
|
}
|
115
106
|
let handlerFileBase = output.handler;
|
116
107
|
let handlerFile = lambdaFiles[handlerFileBase];
|
108
|
+
let handlerHasImport = false;
|
117
109
|
const { handler } = output;
|
118
110
|
const handlerMethod = handler.split('.').pop();
|
119
111
|
const handlerFileName = handler.replace(`.${handlerMethod}`, '');
|
@@ -125,6 +117,7 @@ function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
|
|
125
117
|
if (!handlerFile) {
|
126
118
|
handlerFileBase = handlerFileName + ext;
|
127
119
|
handlerFile = lambdaFiles[handlerFileBase];
|
120
|
+
handlerHasImport = true;
|
128
121
|
}
|
129
122
|
if (!handlerFile || !handlerFile.fsPath) {
|
130
123
|
throw new Error(`Could not find a handler file. Please ensure that \`files\` for the returned \`Lambda\` contains an \`FileFsRef\` named "${handlerFileBase}" with a valid \`fsPath\`.`);
|
@@ -133,119 +126,83 @@ function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
|
|
133
126
|
const entryBase = path_1.basename(entrypoint).replace(ext, handlerExtName);
|
134
127
|
const entryPath = path_1.join(path_1.dirname(entrypoint), entryBase);
|
135
128
|
const entry = path_1.join(entryRoot, entryPath);
|
136
|
-
//
|
137
|
-
//
|
138
|
-
// every build for every entrypoint overwrites the launcher of the previous
|
139
|
-
// one, so linking would end with a broken reference.
|
129
|
+
// Create the parent directory of the API Route that will be created
|
130
|
+
// for the current entrypoint inside of `.output/server/pages/api`.
|
140
131
|
await fs_extra_1.default.ensureDir(path_1.dirname(entry));
|
141
|
-
|
142
|
-
|
143
|
-
|
144
|
-
|
145
|
-
|
146
|
-
|
147
|
-
//
|
148
|
-
//
|
149
|
-
|
150
|
-
|
151
|
-
|
152
|
-
|
153
|
-
|
154
|
-
|
155
|
-
|
156
|
-
|
157
|
-
|
158
|
-
|
159
|
-
//
|
160
|
-
|
161
|
-
|
162
|
-
|
163
|
-
|
164
|
-
|
165
|
-
|
166
|
-
//
|
167
|
-
|
168
|
-
|
169
|
-
|
132
|
+
// For compiled languages, the launcher file will be binary and therefore
|
133
|
+
// won't try to import a user-provided request handler (instead, it will
|
134
|
+
// contain it). But for interpreted languages, the launcher might try to
|
135
|
+
// load a user-provided request handler from the source file instead of bundling
|
136
|
+
// it, so we have to adjust the import statement inside the launcher to point
|
137
|
+
// to the respective source file. Previously, Legacy Runtimes simply expected
|
138
|
+
// the user-provided request-handler to be copied right next to the launcher,
|
139
|
+
// but with the new File System API, files won't be moved around unnecessarily.
|
140
|
+
if (handlerHasImport) {
|
141
|
+
const { fsPath } = handlerFile;
|
142
|
+
const encoding = 'utf-8';
|
143
|
+
// This is the true directory of the user-provided request handler in the
|
144
|
+
// source files, so that's what we will use as an import path in the launcher.
|
145
|
+
const locationPrefix = path_1.relative(entry, outputPath);
|
146
|
+
let handlerContent = await fs_extra_1.default.readFile(fsPath, encoding);
|
147
|
+
const importPaths = [
|
148
|
+
// This is the full entrypoint path, like `./api/test.py`
|
149
|
+
`./${entrypoint}`,
|
150
|
+
// This is the entrypoint path without extension, like `api/test`
|
151
|
+
entrypoint.slice(0, -ext.length),
|
152
|
+
];
|
153
|
+
// Generate a list of regular expressions that we can use for
|
154
|
+
// finding matches, but only allow matches if the import path is
|
155
|
+
// wrapped inside single (') or double quotes (").
|
156
|
+
const patterns = importPaths.map(path => {
|
157
|
+
// eslint-disable-next-line no-useless-escape
|
158
|
+
return new RegExp(`('|")(${path.replace(/\./g, '\\.')})('|")`, 'g');
|
159
|
+
});
|
160
|
+
let replacedMatch = null;
|
161
|
+
for (const pattern of patterns) {
|
162
|
+
const newContent = handlerContent.replace(pattern, (_, p1, p2, p3) => {
|
163
|
+
return `${p1}${path_1.join(locationPrefix, p2)}${p3}`;
|
170
164
|
});
|
171
|
-
|
172
|
-
|
173
|
-
|
174
|
-
|
175
|
-
// all of its children (and therefore the path we're looking at)
|
176
|
-
// will automatically get removed anyways.
|
177
|
-
if (hasParentDir) {
|
178
|
-
continue;
|
179
|
-
}
|
180
|
-
if (isNewDir) {
|
181
|
-
newDirectoriesEntrypoint.push(dirPath);
|
165
|
+
if (newContent !== handlerContent) {
|
166
|
+
_1.debug(`Replaced "${pattern}" inside "${entry}" to ensure correct import of user-provided request handler`);
|
167
|
+
handlerContent = newContent;
|
168
|
+
replacedMatch = true;
|
182
169
|
}
|
183
|
-
else {
|
184
|
-
newFilesEntrypoint.push(path);
|
185
|
-
}
|
186
|
-
}
|
187
|
-
}
|
188
|
-
const tracedFiles = [];
|
189
|
-
const linkers = Object.entries(lambdaFiles).map(async ([relPath, file]) => {
|
190
|
-
const newPath = path_1.join(traceDir, relPath);
|
191
|
-
// The handler was already moved into position above.
|
192
|
-
if (relPath === handlerFileBase) {
|
193
|
-
return;
|
194
|
-
}
|
195
|
-
tracedFiles.push({ absolutePath: newPath, relativePath: relPath });
|
196
|
-
const { fsPath, type } = file;
|
197
|
-
if (fsPath) {
|
198
|
-
await fs_extra_1.default.ensureDir(path_1.dirname(newPath));
|
199
|
-
const isNewFile = newFilesEntrypoint.includes(fsPath);
|
200
|
-
const isInsideNewDirectory = newDirectoriesEntrypoint.some(dirPath => {
|
201
|
-
return fsPath.startsWith(dirPath);
|
202
|
-
});
|
203
|
-
// With this, we're making sure that files in the `workPath` that existed
|
204
|
-
// before the Legacy Runtime was invoked (source files) are linked from
|
205
|
-
// `.output` instead of copying there (the latter only happens if linking fails),
|
206
|
-
// which is the fastest solution. However, files that are created fresh
|
207
|
-
// by the Legacy Runtimes are always copied, because their link destinations
|
208
|
-
// are likely to be overwritten every time an entrypoint is processed by
|
209
|
-
// the Legacy Runtime. This is likely to overwrite the destination on subsequent
|
210
|
-
// runs, but that's also how `workPath` used to work originally, without
|
211
|
-
// the File System API (meaning that there was one `workPath` for all entrypoints).
|
212
|
-
if (isNewFile || isInsideNewDirectory) {
|
213
|
-
_1.debug(`Copying from ${fsPath} to ${newPath}`);
|
214
|
-
await fs_extra_1.default.copy(fsPath, newPath);
|
215
|
-
}
|
216
|
-
else {
|
217
|
-
await linkOrCopy(fsPath, newPath);
|
218
|
-
}
|
219
|
-
}
|
220
|
-
else if (type === 'FileBlob') {
|
221
|
-
const { data, mode } = file;
|
222
|
-
await fs_extra_1.default.writeFile(newPath, data, { mode });
|
223
170
|
}
|
224
|
-
|
225
|
-
|
171
|
+
if (!replacedMatch) {
|
172
|
+
new Error(`No replacable matches for "${importPaths[0]}" or "${importPaths[1]}" found in "${fsPath}"`);
|
226
173
|
}
|
227
|
-
|
228
|
-
|
174
|
+
await fs_extra_1.default.writeFile(entry, handlerContent, encoding);
|
175
|
+
}
|
176
|
+
else {
|
177
|
+
await fs_extra_1.default.copy(handlerFile.fsPath, entry);
|
178
|
+
}
|
179
|
+
// Legacy Runtimes based on interpreted languages will create a new launcher file
|
180
|
+
// for every entrypoint, but they will create each one inside `workPath`, which means that
|
181
|
+
// the launcher for one entrypoint will overwrite the launcher provided for the previous
|
182
|
+
// entrypoint. That's why, above, we copy the file contents into the new destination (and
|
183
|
+
// optionally transform them along the way), instead of linking. We then also want to remove
|
184
|
+
// the copy origin right here, so that the `workPath` doesn't contain a useless launcher file
|
185
|
+
// once the build has finished running.
|
186
|
+
await fs_extra_1.default.remove(handlerFile.fsPath);
|
187
|
+
_1.debug(`Removed temporary file "${handlerFile.fsPath}"`);
|
229
188
|
const nft = `${entry}.nft.json`;
|
230
189
|
const json = JSON.stringify({
|
231
|
-
version:
|
232
|
-
files:
|
233
|
-
|
234
|
-
|
235
|
-
|
236
|
-
|
237
|
-
|
190
|
+
version: 2,
|
191
|
+
files: Object.keys(lambdaFiles)
|
192
|
+
.map(file => {
|
193
|
+
const { fsPath } = lambdaFiles[file];
|
194
|
+
if (!fsPath) {
|
195
|
+
throw new Error(`File "${file}" is missing valid \`fsPath\` property`);
|
196
|
+
}
|
197
|
+
// The handler was already moved into position above.
|
198
|
+
if (file === handlerFileBase) {
|
199
|
+
return;
|
200
|
+
}
|
201
|
+
return normalize_path_1.normalizePath(path_1.relative(path_1.dirname(nft), fsPath));
|
202
|
+
})
|
203
|
+
.filter(Boolean),
|
238
204
|
});
|
239
|
-
await fs_extra_1.default.ensureDir(path_1.dirname(nft));
|
240
205
|
await fs_extra_1.default.writeFile(nft, json);
|
241
|
-
// Extend the list of directories and files that were created by the
|
242
|
-
// Legacy Runtime with the list of directories and files that were
|
243
|
-
// created for the entrypoint that was just processed above.
|
244
|
-
newPathsRuntime = new Set([
|
245
|
-
...newPathsRuntime,
|
246
|
-
...newFilesEntrypoint,
|
247
|
-
...newDirectoriesEntrypoint,
|
248
|
-
]);
|
249
206
|
// Add an entry that will later on be added to the `functions-manifest.json`
|
250
207
|
// file that is placed inside of the `.output` directory.
|
251
208
|
pages[normalize_path_1.normalizePath(entryPath)] = {
|
@@ -261,39 +218,12 @@ function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
|
|
261
218
|
allowQuery: output.allowQuery,
|
262
219
|
};
|
263
220
|
}
|
264
|
-
// Instead of of waiting for all of the linking to be done for every
|
265
|
-
// entrypoint before processing the next one, we immediately handle all
|
266
|
-
// of them one after the other, while then waiting for the linking
|
267
|
-
// to finish right here, before we clean up newly created files below.
|
268
|
-
await Promise.all(linkersRuntime);
|
269
|
-
// A list of all the files that were created by the Legacy Runtime,
|
270
|
-
// which we'd like to remove from the File System.
|
271
|
-
const toRemove = Array.from(newPathsRuntime).map(path => {
|
272
|
-
_1.debug(`Removing ${path} as part of cleanup`);
|
273
|
-
return fs_extra_1.default.remove(path);
|
274
|
-
});
|
275
|
-
// Once all the entrypoints have been processed, we'd like to
|
276
|
-
// remove all the files from `workPath` that originally weren't present
|
277
|
-
// before the Legacy Runtime began running, because the `workPath`
|
278
|
-
// is nowadays the directory in which the user keeps their source code, since
|
279
|
-
// we're no longer running separate parallel builds for every Legacy Runtime.
|
280
|
-
await Promise.all(toRemove);
|
281
221
|
// Add any Serverless Functions that were exposed by the Legacy Runtime
|
282
222
|
// to the `functions-manifest.json` file provided in `.output`.
|
283
223
|
await updateFunctionsManifest({ workPath, pages });
|
284
224
|
};
|
285
225
|
}
|
286
226
|
exports.convertRuntimeToPlugin = convertRuntimeToPlugin;
|
287
|
-
async function linkOrCopy(existingPath, newPath) {
|
288
|
-
try {
|
289
|
-
await fs_extra_1.default.createLink(existingPath, newPath);
|
290
|
-
}
|
291
|
-
catch (err) {
|
292
|
-
if (err.code !== 'EEXIST') {
|
293
|
-
await fs_extra_1.default.copyFile(existingPath, newPath);
|
294
|
-
}
|
295
|
-
}
|
296
|
-
}
|
297
227
|
async function readJson(filePath) {
|
298
228
|
try {
|
299
229
|
const str = await fs_extra_1.default.readFile(filePath, 'utf8');
|
@@ -314,7 +244,7 @@ async function updateFunctionsManifest({ workPath, pages, }) {
|
|
314
244
|
const functionsManifestPath = path_1.join(workPath, '.output', 'functions-manifest.json');
|
315
245
|
const functionsManifest = await readJson(functionsManifestPath);
|
316
246
|
if (!functionsManifest.version)
|
317
|
-
functionsManifest.version =
|
247
|
+
functionsManifest.version = 2;
|
318
248
|
if (!functionsManifest.pages)
|
319
249
|
functionsManifest.pages = {};
|
320
250
|
for (const [pageKey, pageConfig] of Object.entries(pages)) {
|
package/dist/index.js
CHANGED
@@ -32752,17 +32752,15 @@ function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
|
|
32752
32752
|
}
|
32753
32753
|
const pages = {};
|
32754
32754
|
const pluginName = packageName.replace('vercel-plugin-', '');
|
32755
|
-
const
|
32755
|
+
const outputPath = path_1.join(workPath, '.output');
|
32756
|
+
const traceDir = path_1.join(outputPath, `inputs`,
|
32756
32757
|
// Legacy Runtimes can only provide API Routes, so that's
|
32757
32758
|
// why we can use this prefix for all of them. Here, we have to
|
32758
32759
|
// make sure to not use a cryptic hash name, because people
|
32759
32760
|
// need to be able to easily inspect the output.
|
32760
32761
|
`api-routes-${pluginName}`);
|
32761
32762
|
await fs_extra_1.default.ensureDir(traceDir);
|
32762
|
-
|
32763
|
-
let linkersRuntime = [];
|
32764
|
-
const entryDir = path_1.join('.output', 'server', 'pages');
|
32765
|
-
const entryRoot = path_1.join(workPath, entryDir);
|
32763
|
+
const entryRoot = path_1.join(outputPath, 'server', 'pages');
|
32766
32764
|
for (const entrypoint of Object.keys(entrypoints)) {
|
32767
32765
|
const { output } = await buildRuntime({
|
32768
32766
|
files: sourceFilesPreBuild,
|
@@ -32776,13 +32774,6 @@ function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
|
|
32776
32774
|
skipDownload: true,
|
32777
32775
|
},
|
32778
32776
|
});
|
32779
|
-
// Legacy Runtimes tend to pollute the `workPath` with compiled results,
|
32780
|
-
// because the `workPath` used to be a place that was a place where they could
|
32781
|
-
// just put anything, but nowadays it's the working directory of the `vercel build`
|
32782
|
-
// command, which is the place where the developer keeps their source files,
|
32783
|
-
// so we don't want to pollute this space unnecessarily. That means we have to clean
|
32784
|
-
// up files that were created by the build, which is done further below.
|
32785
|
-
const sourceFilesAfterBuild = await getSourceFiles(workPath, ignoreFilter);
|
32786
32777
|
// @ts-ignore This symbol is a private API
|
32787
32778
|
const lambdaFiles = output[lambda_1.FILES_SYMBOL];
|
32788
32779
|
// When deploying, the `files` that are passed to the Legacy Runtimes already
|
@@ -32796,6 +32787,7 @@ function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
|
|
32796
32787
|
}
|
32797
32788
|
let handlerFileBase = output.handler;
|
32798
32789
|
let handlerFile = lambdaFiles[handlerFileBase];
|
32790
|
+
let handlerHasImport = false;
|
32799
32791
|
const { handler } = output;
|
32800
32792
|
const handlerMethod = handler.split('.').pop();
|
32801
32793
|
const handlerFileName = handler.replace(`.${handlerMethod}`, '');
|
@@ -32807,6 +32799,7 @@ function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
|
|
32807
32799
|
if (!handlerFile) {
|
32808
32800
|
handlerFileBase = handlerFileName + ext;
|
32809
32801
|
handlerFile = lambdaFiles[handlerFileBase];
|
32802
|
+
handlerHasImport = true;
|
32810
32803
|
}
|
32811
32804
|
if (!handlerFile || !handlerFile.fsPath) {
|
32812
32805
|
throw new Error(`Could not find a handler file. Please ensure that \`files\` for the returned \`Lambda\` contains an \`FileFsRef\` named "${handlerFileBase}" with a valid \`fsPath\`.`);
|
@@ -32815,119 +32808,83 @@ function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
|
|
32815
32808
|
const entryBase = path_1.basename(entrypoint).replace(ext, handlerExtName);
|
32816
32809
|
const entryPath = path_1.join(path_1.dirname(entrypoint), entryBase);
|
32817
32810
|
const entry = path_1.join(entryRoot, entryPath);
|
32818
|
-
//
|
32819
|
-
//
|
32820
|
-
// every build for every entrypoint overwrites the launcher of the previous
|
32821
|
-
// one, so linking would end with a broken reference.
|
32811
|
+
// Create the parent directory of the API Route that will be created
|
32812
|
+
// for the current entrypoint inside of `.output/server/pages/api`.
|
32822
32813
|
await fs_extra_1.default.ensureDir(path_1.dirname(entry));
|
32823
|
-
|
32824
|
-
|
32825
|
-
|
32826
|
-
|
32827
|
-
|
32828
|
-
|
32829
|
-
//
|
32830
|
-
//
|
32831
|
-
|
32832
|
-
|
32833
|
-
|
32834
|
-
|
32835
|
-
|
32836
|
-
|
32837
|
-
|
32838
|
-
|
32839
|
-
|
32840
|
-
|
32841
|
-
//
|
32842
|
-
|
32843
|
-
|
32844
|
-
|
32845
|
-
|
32846
|
-
|
32847
|
-
|
32848
|
-
//
|
32849
|
-
|
32850
|
-
|
32851
|
-
|
32852
|
-
|
32853
|
-
|
32854
|
-
|
32855
|
-
// looking at, we don't need to add more entries to the list
|
32856
|
-
// because when the parent will get removed in the future,
|
32857
|
-
// all of its children (and therefore the path we're looking at)
|
32858
|
-
// will automatically get removed anyways.
|
32859
|
-
if (hasParentDir) {
|
32860
|
-
continue;
|
32861
|
-
}
|
32862
|
-
if (isNewDir) {
|
32863
|
-
newDirectoriesEntrypoint.push(dirPath);
|
32864
|
-
}
|
32865
|
-
else {
|
32866
|
-
newFilesEntrypoint.push(path);
|
32867
|
-
}
|
32868
|
-
}
|
32869
|
-
}
|
32870
|
-
const tracedFiles = [];
|
32871
|
-
const linkers = Object.entries(lambdaFiles).map(async ([relPath, file]) => {
|
32872
|
-
const newPath = path_1.join(traceDir, relPath);
|
32873
|
-
// The handler was already moved into position above.
|
32874
|
-
if (relPath === handlerFileBase) {
|
32875
|
-
return;
|
32876
|
-
}
|
32877
|
-
tracedFiles.push({ absolutePath: newPath, relativePath: relPath });
|
32878
|
-
const { fsPath, type } = file;
|
32879
|
-
if (fsPath) {
|
32880
|
-
await fs_extra_1.default.ensureDir(path_1.dirname(newPath));
|
32881
|
-
const isNewFile = newFilesEntrypoint.includes(fsPath);
|
32882
|
-
const isInsideNewDirectory = newDirectoriesEntrypoint.some(dirPath => {
|
32883
|
-
return fsPath.startsWith(dirPath);
|
32814
|
+
// For compiled languages, the launcher file will be binary and therefore
|
32815
|
+
// won't try to import a user-provided request handler (instead, it will
|
32816
|
+
// contain it). But for interpreted languages, the launcher might try to
|
32817
|
+
// load a user-provided request handler from the source file instead of bundling
|
32818
|
+
// it, so we have to adjust the import statement inside the launcher to point
|
32819
|
+
// to the respective source file. Previously, Legacy Runtimes simply expected
|
32820
|
+
// the user-provided request-handler to be copied right next to the launcher,
|
32821
|
+
// but with the new File System API, files won't be moved around unnecessarily.
|
32822
|
+
if (handlerHasImport) {
|
32823
|
+
const { fsPath } = handlerFile;
|
32824
|
+
const encoding = 'utf-8';
|
32825
|
+
// This is the true directory of the user-provided request handler in the
|
32826
|
+
// source files, so that's what we will use as an import path in the launcher.
|
32827
|
+
const locationPrefix = path_1.relative(entry, outputPath);
|
32828
|
+
let handlerContent = await fs_extra_1.default.readFile(fsPath, encoding);
|
32829
|
+
const importPaths = [
|
32830
|
+
// This is the full entrypoint path, like `./api/test.py`
|
32831
|
+
`./${entrypoint}`,
|
32832
|
+
// This is the entrypoint path without extension, like `api/test`
|
32833
|
+
entrypoint.slice(0, -ext.length),
|
32834
|
+
];
|
32835
|
+
// Generate a list of regular expressions that we can use for
|
32836
|
+
// finding matches, but only allow matches if the import path is
|
32837
|
+
// wrapped inside single (') or double quotes (").
|
32838
|
+
const patterns = importPaths.map(path => {
|
32839
|
+
// eslint-disable-next-line no-useless-escape
|
32840
|
+
return new RegExp(`('|")(${path.replace(/\./g, '\\.')})('|")`, 'g');
|
32841
|
+
});
|
32842
|
+
let replacedMatch = null;
|
32843
|
+
for (const pattern of patterns) {
|
32844
|
+
const newContent = handlerContent.replace(pattern, (_, p1, p2, p3) => {
|
32845
|
+
return `${p1}${path_1.join(locationPrefix, p2)}${p3}`;
|
32884
32846
|
});
|
32885
|
-
|
32886
|
-
|
32887
|
-
|
32888
|
-
|
32889
|
-
// by the Legacy Runtimes are always copied, because their link destinations
|
32890
|
-
// are likely to be overwritten every time an entrypoint is processed by
|
32891
|
-
// the Legacy Runtime. This is likely to overwrite the destination on subsequent
|
32892
|
-
// runs, but that's also how `workPath` used to work originally, without
|
32893
|
-
// the File System API (meaning that there was one `workPath` for all entrypoints).
|
32894
|
-
if (isNewFile || isInsideNewDirectory) {
|
32895
|
-
_1.debug(`Copying from ${fsPath} to ${newPath}`);
|
32896
|
-
await fs_extra_1.default.copy(fsPath, newPath);
|
32847
|
+
if (newContent !== handlerContent) {
|
32848
|
+
_1.debug(`Replaced "${pattern}" inside "${entry}" to ensure correct import of user-provided request handler`);
|
32849
|
+
handlerContent = newContent;
|
32850
|
+
replacedMatch = true;
|
32897
32851
|
}
|
32898
|
-
else {
|
32899
|
-
await linkOrCopy(fsPath, newPath);
|
32900
|
-
}
|
32901
|
-
}
|
32902
|
-
else if (type === 'FileBlob') {
|
32903
|
-
const { data, mode } = file;
|
32904
|
-
await fs_extra_1.default.writeFile(newPath, data, { mode });
|
32905
32852
|
}
|
32906
|
-
|
32907
|
-
|
32853
|
+
if (!replacedMatch) {
|
32854
|
+
new Error(`No replacable matches for "${importPaths[0]}" or "${importPaths[1]}" found in "${fsPath}"`);
|
32908
32855
|
}
|
32909
|
-
|
32910
|
-
|
32856
|
+
await fs_extra_1.default.writeFile(entry, handlerContent, encoding);
|
32857
|
+
}
|
32858
|
+
else {
|
32859
|
+
await fs_extra_1.default.copy(handlerFile.fsPath, entry);
|
32860
|
+
}
|
32861
|
+
// Legacy Runtimes based on interpreted languages will create a new launcher file
|
32862
|
+
// for every entrypoint, but they will create each one inside `workPath`, which means that
|
32863
|
+
// the launcher for one entrypoint will overwrite the launcher provided for the previous
|
32864
|
+
// entrypoint. That's why, above, we copy the file contents into the new destination (and
|
32865
|
+
// optionally transform them along the way), instead of linking. We then also want to remove
|
32866
|
+
// the copy origin right here, so that the `workPath` doesn't contain a useless launcher file
|
32867
|
+
// once the build has finished running.
|
32868
|
+
await fs_extra_1.default.remove(handlerFile.fsPath);
|
32869
|
+
_1.debug(`Removed temporary file "${handlerFile.fsPath}"`);
|
32911
32870
|
const nft = `${entry}.nft.json`;
|
32912
32871
|
const json = JSON.stringify({
|
32913
|
-
version:
|
32914
|
-
files:
|
32915
|
-
|
32916
|
-
|
32917
|
-
|
32918
|
-
|
32919
|
-
|
32872
|
+
version: 2,
|
32873
|
+
files: Object.keys(lambdaFiles)
|
32874
|
+
.map(file => {
|
32875
|
+
const { fsPath } = lambdaFiles[file];
|
32876
|
+
if (!fsPath) {
|
32877
|
+
throw new Error(`File "${file}" is missing valid \`fsPath\` property`);
|
32878
|
+
}
|
32879
|
+
// The handler was already moved into position above.
|
32880
|
+
if (file === handlerFileBase) {
|
32881
|
+
return;
|
32882
|
+
}
|
32883
|
+
return normalize_path_1.normalizePath(path_1.relative(path_1.dirname(nft), fsPath));
|
32884
|
+
})
|
32885
|
+
.filter(Boolean),
|
32920
32886
|
});
|
32921
|
-
await fs_extra_1.default.ensureDir(path_1.dirname(nft));
|
32922
32887
|
await fs_extra_1.default.writeFile(nft, json);
|
32923
|
-
// Extend the list of directories and files that were created by the
|
32924
|
-
// Legacy Runtime with the list of directories and files that were
|
32925
|
-
// created for the entrypoint that was just processed above.
|
32926
|
-
newPathsRuntime = new Set([
|
32927
|
-
...newPathsRuntime,
|
32928
|
-
...newFilesEntrypoint,
|
32929
|
-
...newDirectoriesEntrypoint,
|
32930
|
-
]);
|
32931
32888
|
// Add an entry that will later on be added to the `functions-manifest.json`
|
32932
32889
|
// file that is placed inside of the `.output` directory.
|
32933
32890
|
pages[normalize_path_1.normalizePath(entryPath)] = {
|
@@ -32943,39 +32900,12 @@ function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
|
|
32943
32900
|
allowQuery: output.allowQuery,
|
32944
32901
|
};
|
32945
32902
|
}
|
32946
|
-
// Instead of of waiting for all of the linking to be done for every
|
32947
|
-
// entrypoint before processing the next one, we immediately handle all
|
32948
|
-
// of them one after the other, while then waiting for the linking
|
32949
|
-
// to finish right here, before we clean up newly created files below.
|
32950
|
-
await Promise.all(linkersRuntime);
|
32951
|
-
// A list of all the files that were created by the Legacy Runtime,
|
32952
|
-
// which we'd like to remove from the File System.
|
32953
|
-
const toRemove = Array.from(newPathsRuntime).map(path => {
|
32954
|
-
_1.debug(`Removing ${path} as part of cleanup`);
|
32955
|
-
return fs_extra_1.default.remove(path);
|
32956
|
-
});
|
32957
|
-
// Once all the entrypoints have been processed, we'd like to
|
32958
|
-
// remove all the files from `workPath` that originally weren't present
|
32959
|
-
// before the Legacy Runtime began running, because the `workPath`
|
32960
|
-
// is nowadays the directory in which the user keeps their source code, since
|
32961
|
-
// we're no longer running separate parallel builds for every Legacy Runtime.
|
32962
|
-
await Promise.all(toRemove);
|
32963
32903
|
// Add any Serverless Functions that were exposed by the Legacy Runtime
|
32964
32904
|
// to the `functions-manifest.json` file provided in `.output`.
|
32965
32905
|
await updateFunctionsManifest({ workPath, pages });
|
32966
32906
|
};
|
32967
32907
|
}
|
32968
32908
|
exports.convertRuntimeToPlugin = convertRuntimeToPlugin;
|
32969
|
-
async function linkOrCopy(existingPath, newPath) {
|
32970
|
-
try {
|
32971
|
-
await fs_extra_1.default.createLink(existingPath, newPath);
|
32972
|
-
}
|
32973
|
-
catch (err) {
|
32974
|
-
if (err.code !== 'EEXIST') {
|
32975
|
-
await fs_extra_1.default.copyFile(existingPath, newPath);
|
32976
|
-
}
|
32977
|
-
}
|
32978
|
-
}
|
32979
32909
|
async function readJson(filePath) {
|
32980
32910
|
try {
|
32981
32911
|
const str = await fs_extra_1.default.readFile(filePath, 'utf8');
|
@@ -32996,7 +32926,7 @@ async function updateFunctionsManifest({ workPath, pages, }) {
|
|
32996
32926
|
const functionsManifestPath = path_1.join(workPath, '.output', 'functions-manifest.json');
|
32997
32927
|
const functionsManifest = await readJson(functionsManifestPath);
|
32998
32928
|
if (!functionsManifest.version)
|
32999
|
-
functionsManifest.version =
|
32929
|
+
functionsManifest.version = 2;
|
33000
32930
|
if (!functionsManifest.pages)
|
33001
32931
|
functionsManifest.pages = {};
|
33002
32932
|
for (const [pageKey, pageConfig] of Object.entries(pages)) {
|
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "@vercel/build-utils",
|
3
|
-
"version": "2.12.3-canary.
|
3
|
+
"version": "2.12.3-canary.41",
|
4
4
|
"license": "MIT",
|
5
5
|
"main": "./dist/index.js",
|
6
6
|
"types": "./dist/index.d.js",
|
@@ -49,5 +49,5 @@
|
|
49
49
|
"typescript": "4.3.4",
|
50
50
|
"yazl": "2.4.3"
|
51
51
|
},
|
52
|
-
"gitHead": "
|
52
|
+
"gitHead": "35c8fc272905524eeb83268bdc09edb165d3382a"
|
53
53
|
}
|