@vercel/build-utils 2.12.3-canary.32 → 2.12.3-canary.36
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/convert-runtime-to-plugin.js +143 -37
- package/dist/index.js +143 -37
- package/package.json +2 -2
@@ -77,6 +77,10 @@ function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
|
|
77
77
|
// need to be able to easily inspect the output.
|
78
78
|
`api-routes-${pluginName}`);
|
79
79
|
await fs_extra_1.default.ensureDir(traceDir);
|
80
|
+
let newPathsRuntime = new Set();
|
81
|
+
let linkersRuntime = [];
|
82
|
+
const entryDir = path_1.join('.output', 'server', 'pages');
|
83
|
+
const entryRoot = path_1.join(workPath, entryDir);
|
80
84
|
for (const entrypoint of Object.keys(entrypoints)) {
|
81
85
|
const { output } = await buildRuntime({
|
82
86
|
files: sourceFilesPreBuild,
|
@@ -96,22 +100,6 @@ function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
|
|
96
100
|
// so we don't want to pollute this space unnecessarily. That means we have to clean
|
97
101
|
// up files that were created by the build, which is done further below.
|
98
102
|
const sourceFilesAfterBuild = await getSourceFiles(workPath, ignoreFilter);
|
99
|
-
// Further down, we will need the filename of the Lambda handler
|
100
|
-
// for placing it inside `server/pages/api`, but because Legacy Runtimes
|
101
|
-
// don't expose the filename directly, we have to construct it
|
102
|
-
// from the handler name, and then find the matching file further below,
|
103
|
-
// because we don't yet know its extension here.
|
104
|
-
const handler = output.handler;
|
105
|
-
const handlerMethod = handler.split('.').reverse()[0];
|
106
|
-
const handlerFileName = handler.replace(`.${handlerMethod}`, '');
|
107
|
-
pages[entrypoint] = {
|
108
|
-
handler: handler,
|
109
|
-
runtime: output.runtime,
|
110
|
-
memory: output.memory,
|
111
|
-
maxDuration: output.maxDuration,
|
112
|
-
environment: output.environment,
|
113
|
-
allowQuery: output.allowQuery,
|
114
|
-
};
|
115
103
|
// @ts-ignore This symbol is a private API
|
116
104
|
const lambdaFiles = output[lambda_1.FILES_SYMBOL];
|
117
105
|
// When deploying, the `files` that are passed to the Legacy Runtimes already
|
@@ -123,56 +111,174 @@ function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
|
|
123
111
|
delete lambdaFiles[file];
|
124
112
|
}
|
125
113
|
}
|
126
|
-
|
127
|
-
|
128
|
-
}
|
129
|
-
const
|
130
|
-
|
131
|
-
|
114
|
+
let handlerFileBase = output.handler;
|
115
|
+
let handlerFile = lambdaFiles[handlerFileBase];
|
116
|
+
const { handler } = output;
|
117
|
+
const handlerMethod = handler.split('.').pop();
|
118
|
+
const handlerFileName = handler.replace(`.${handlerMethod}`, '');
|
119
|
+
// For compiled languages, the launcher file for the Lambda generated
|
120
|
+
// by the Legacy Runtime matches the `handler` defined for it, but for
|
121
|
+
// interpreted languages, the `handler` consists of the launcher file name
|
122
|
+
// without an extension, plus the name of the method inside of that file
|
123
|
+
// that should be invoked, so we have to construct the file path explicitly.
|
124
|
+
if (!handlerFile) {
|
125
|
+
handlerFileBase = handlerFileName + ext;
|
126
|
+
handlerFile = lambdaFiles[handlerFileBase];
|
132
127
|
}
|
133
|
-
|
128
|
+
if (!handlerFile || !handlerFile.fsPath) {
|
129
|
+
throw new Error(`Could not find a handler file. Please ensure that \`files\` for the returned \`Lambda\` contains an \`FileFsRef\` named "${handlerFileBase}" with a valid \`fsPath\`.`);
|
130
|
+
}
|
131
|
+
const handlerExtName = path_1.extname(handlerFile.fsPath);
|
132
|
+
const entryBase = path_1.basename(entrypoint).replace(ext, handlerExtName);
|
133
|
+
const entryPath = path_1.join(path_1.dirname(entrypoint), entryBase);
|
134
|
+
const entry = path_1.join(entryRoot, entryPath);
|
135
|
+
// We never want to link here, only copy, because the launcher
|
136
|
+
// file often has the same name for every entrypoint, which means that
|
137
|
+
// every build for every entrypoint overwrites the launcher of the previous
|
138
|
+
// one, so linking would end with a broken reference.
|
134
139
|
await fs_extra_1.default.ensureDir(path_1.dirname(entry));
|
135
|
-
await
|
136
|
-
const
|
137
|
-
|
138
|
-
|
140
|
+
await fs_extra_1.default.copy(handlerFile.fsPath, entry);
|
141
|
+
const newFilesEntrypoint = [];
|
142
|
+
const newDirectoriesEntrypoint = [];
|
143
|
+
const preBuildFiles = Object.values(sourceFilesPreBuild).map(file => {
|
144
|
+
return file.fsPath;
|
145
|
+
});
|
146
|
+
// Generate a list of directories and files that weren't present
|
147
|
+
// before the entrypoint was processed by the Legacy Runtime, so
|
148
|
+
// that we can perform a cleanup later. We need to divide into files
|
149
|
+
// and directories because only cleaning up files might leave empty
|
150
|
+
// directories, and listing directories separately also speeds up the
|
151
|
+
// build because we can just delete them, which wipes all of their nested
|
152
|
+
// paths, instead of iterating through all files that should be deleted.
|
139
153
|
for (const file in sourceFilesAfterBuild) {
|
140
154
|
if (!sourceFilesPreBuild[file]) {
|
141
155
|
const path = sourceFilesAfterBuild[file].fsPath;
|
142
|
-
|
156
|
+
const dirPath = path_1.dirname(path);
|
157
|
+
// If none of the files that were present before the entrypoint
|
158
|
+
// was processed are contained within the directory we're looking
|
159
|
+
// at right now, then we know it's a newly added directory
|
160
|
+
// and it can therefore be removed later on.
|
161
|
+
const isNewDir = !preBuildFiles.some(filePath => {
|
162
|
+
return path_1.dirname(filePath).startsWith(dirPath);
|
163
|
+
});
|
164
|
+
// Check out the list of tracked directories that were
|
165
|
+
// newly added and see if one of them contains the path
|
166
|
+
// we're looking at.
|
167
|
+
const hasParentDir = newDirectoriesEntrypoint.some(dir => {
|
168
|
+
return path.startsWith(dir);
|
169
|
+
});
|
170
|
+
// If we have already tracked a directory that was newly
|
171
|
+
// added that sits above the file or directory that we're
|
172
|
+
// looking at, we don't need to add more entries to the list
|
173
|
+
// because when the parent will get removed in the future,
|
174
|
+
// all of its children (and therefore the path we're looking at)
|
175
|
+
// will automatically get removed anyways.
|
176
|
+
if (hasParentDir) {
|
177
|
+
continue;
|
178
|
+
}
|
179
|
+
if (isNewDir) {
|
180
|
+
newDirectoriesEntrypoint.push(dirPath);
|
181
|
+
}
|
182
|
+
else {
|
183
|
+
newFilesEntrypoint.push(path);
|
184
|
+
}
|
143
185
|
}
|
144
186
|
}
|
145
|
-
await Promise.all(toRemove);
|
146
187
|
const tracedFiles = [];
|
147
|
-
Object.entries(lambdaFiles).
|
188
|
+
const linkers = Object.entries(lambdaFiles).map(async ([relPath, file]) => {
|
148
189
|
const newPath = path_1.join(traceDir, relPath);
|
149
190
|
// The handler was already moved into position above.
|
150
|
-
if (relPath ===
|
191
|
+
if (relPath === handlerFileBase) {
|
151
192
|
return;
|
152
193
|
}
|
153
194
|
tracedFiles.push({ absolutePath: newPath, relativePath: relPath });
|
154
|
-
|
155
|
-
|
195
|
+
const { fsPath, type } = file;
|
196
|
+
if (fsPath) {
|
197
|
+
await fs_extra_1.default.ensureDir(path_1.dirname(newPath));
|
198
|
+
const isNewFile = newFilesEntrypoint.includes(fsPath);
|
199
|
+
const isInsideNewDirectory = newDirectoriesEntrypoint.some(dirPath => {
|
200
|
+
return fsPath.startsWith(dirPath);
|
201
|
+
});
|
202
|
+
// With this, we're making sure that files in the `workPath` that existed
|
203
|
+
// before the Legacy Runtime was invoked (source files) are linked from
|
204
|
+
// `.output` instead of copying there (the latter only happens if linking fails),
|
205
|
+
// which is the fastest solution. However, files that are created fresh
|
206
|
+
// by the Legacy Runtimes are always copied, because their link destinations
|
207
|
+
// are likely to be overwritten every time an entrypoint is processed by
|
208
|
+
// the Legacy Runtime. This is likely to overwrite the destination on subsequent
|
209
|
+
// runs, but that's also how `workPath` used to work originally, without
|
210
|
+
// the File System API (meaning that there was one `workPath` for all entrypoints).
|
211
|
+
if (isNewFile || isInsideNewDirectory) {
|
212
|
+
_1.debug(`Copying from ${fsPath} to ${newPath}`);
|
213
|
+
await fs_extra_1.default.copy(fsPath, newPath);
|
214
|
+
}
|
215
|
+
else {
|
216
|
+
await linkOrCopy(fsPath, newPath);
|
217
|
+
}
|
156
218
|
}
|
157
|
-
else if (
|
219
|
+
else if (type === 'FileBlob') {
|
158
220
|
const { data, mode } = file;
|
159
221
|
await fs_extra_1.default.writeFile(newPath, data, { mode });
|
160
222
|
}
|
161
223
|
else {
|
162
|
-
throw new Error(`Unknown file type: ${
|
224
|
+
throw new Error(`Unknown file type: ${type}`);
|
163
225
|
}
|
164
226
|
});
|
165
|
-
|
227
|
+
linkersRuntime = linkersRuntime.concat(linkers);
|
228
|
+
const nft = `${entry}.nft.json`;
|
166
229
|
const json = JSON.stringify({
|
167
230
|
version: 1,
|
168
231
|
files: tracedFiles.map(file => ({
|
169
232
|
input: normalize_path_1.normalizePath(path_1.relative(path_1.dirname(nft), file.absolutePath)),
|
170
|
-
|
233
|
+
// We'd like to place all the dependency files right next
|
234
|
+
// to the final launcher file inside of the Lambda.
|
235
|
+
output: normalize_path_1.normalizePath(path_1.join(entryDir, 'api', file.relativePath)),
|
171
236
|
})),
|
172
237
|
});
|
173
238
|
await fs_extra_1.default.ensureDir(path_1.dirname(nft));
|
174
239
|
await fs_extra_1.default.writeFile(nft, json);
|
240
|
+
// Extend the list of directories and files that were created by the
|
241
|
+
// Legacy Runtime with the list of directories and files that were
|
242
|
+
// created for the entrypoint that was just processed above.
|
243
|
+
newPathsRuntime = new Set([
|
244
|
+
...newPathsRuntime,
|
245
|
+
...newFilesEntrypoint,
|
246
|
+
...newDirectoriesEntrypoint,
|
247
|
+
]);
|
248
|
+
// Add an entry that will later on be added to the `functions-manifest.json`
|
249
|
+
// file that is placed inside of the `.output` directory.
|
250
|
+
pages[normalize_path_1.normalizePath(entryPath)] = {
|
251
|
+
// Because the underlying file used as a handler was placed
|
252
|
+
// inside `.output/server/pages/api`, it no longer has the name it originally
|
253
|
+
// had and is now named after the API Route that it's responsible for,
|
254
|
+
// so we have to adjust the name of the Lambda handler accordingly.
|
255
|
+
handler: handler.replace(handlerFileName, path_1.parse(entry).name),
|
256
|
+
runtime: output.runtime,
|
257
|
+
memory: output.memory,
|
258
|
+
maxDuration: output.maxDuration,
|
259
|
+
environment: output.environment,
|
260
|
+
allowQuery: output.allowQuery,
|
261
|
+
};
|
175
262
|
}
|
263
|
+
// Instead of of waiting for all of the linking to be done for every
|
264
|
+
// entrypoint before processing the next one, we immediately handle all
|
265
|
+
// of them one after the other, while then waiting for the linking
|
266
|
+
// to finish right here, before we clean up newly created files below.
|
267
|
+
await Promise.all(linkersRuntime);
|
268
|
+
// A list of all the files that were created by the Legacy Runtime,
|
269
|
+
// which we'd like to remove from the File System.
|
270
|
+
const toRemove = Array.from(newPathsRuntime).map(path => {
|
271
|
+
_1.debug(`Removing ${path} as part of cleanup`);
|
272
|
+
return fs_extra_1.default.remove(path);
|
273
|
+
});
|
274
|
+
// Once all the entrypoints have been processed, we'd like to
|
275
|
+
// remove all the files from `workPath` that originally weren't present
|
276
|
+
// before the Legacy Runtime began running, because the `workPath`
|
277
|
+
// is nowadays the directory in which the user keeps their source code, since
|
278
|
+
// we're no longer running separate parallel builds for every Legacy Runtime.
|
279
|
+
await Promise.all(toRemove);
|
280
|
+
// Add any Serverless Functions that were exposed by the Legacy Runtime
|
281
|
+
// to the `functions-manifest.json` file provided in `.output`.
|
176
282
|
await updateFunctionsManifest({ workPath, pages });
|
177
283
|
};
|
178
284
|
}
|
package/dist/index.js
CHANGED
@@ -32824,6 +32824,10 @@ function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
|
|
32824
32824
|
// need to be able to easily inspect the output.
|
32825
32825
|
`api-routes-${pluginName}`);
|
32826
32826
|
await fs_extra_1.default.ensureDir(traceDir);
|
32827
|
+
let newPathsRuntime = new Set();
|
32828
|
+
let linkersRuntime = [];
|
32829
|
+
const entryDir = path_1.join('.output', 'server', 'pages');
|
32830
|
+
const entryRoot = path_1.join(workPath, entryDir);
|
32827
32831
|
for (const entrypoint of Object.keys(entrypoints)) {
|
32828
32832
|
const { output } = await buildRuntime({
|
32829
32833
|
files: sourceFilesPreBuild,
|
@@ -32843,22 +32847,6 @@ function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
|
|
32843
32847
|
// so we don't want to pollute this space unnecessarily. That means we have to clean
|
32844
32848
|
// up files that were created by the build, which is done further below.
|
32845
32849
|
const sourceFilesAfterBuild = await getSourceFiles(workPath, ignoreFilter);
|
32846
|
-
// Further down, we will need the filename of the Lambda handler
|
32847
|
-
// for placing it inside `server/pages/api`, but because Legacy Runtimes
|
32848
|
-
// don't expose the filename directly, we have to construct it
|
32849
|
-
// from the handler name, and then find the matching file further below,
|
32850
|
-
// because we don't yet know its extension here.
|
32851
|
-
const handler = output.handler;
|
32852
|
-
const handlerMethod = handler.split('.').reverse()[0];
|
32853
|
-
const handlerFileName = handler.replace(`.${handlerMethod}`, '');
|
32854
|
-
pages[entrypoint] = {
|
32855
|
-
handler: handler,
|
32856
|
-
runtime: output.runtime,
|
32857
|
-
memory: output.memory,
|
32858
|
-
maxDuration: output.maxDuration,
|
32859
|
-
environment: output.environment,
|
32860
|
-
allowQuery: output.allowQuery,
|
32861
|
-
};
|
32862
32850
|
// @ts-ignore This symbol is a private API
|
32863
32851
|
const lambdaFiles = output[lambda_1.FILES_SYMBOL];
|
32864
32852
|
// When deploying, the `files` that are passed to the Legacy Runtimes already
|
@@ -32870,56 +32858,174 @@ function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
|
|
32870
32858
|
delete lambdaFiles[file];
|
32871
32859
|
}
|
32872
32860
|
}
|
32873
|
-
|
32874
|
-
|
32875
|
-
}
|
32876
|
-
const
|
32877
|
-
|
32878
|
-
|
32861
|
+
let handlerFileBase = output.handler;
|
32862
|
+
let handlerFile = lambdaFiles[handlerFileBase];
|
32863
|
+
const { handler } = output;
|
32864
|
+
const handlerMethod = handler.split('.').pop();
|
32865
|
+
const handlerFileName = handler.replace(`.${handlerMethod}`, '');
|
32866
|
+
// For compiled languages, the launcher file for the Lambda generated
|
32867
|
+
// by the Legacy Runtime matches the `handler` defined for it, but for
|
32868
|
+
// interpreted languages, the `handler` consists of the launcher file name
|
32869
|
+
// without an extension, plus the name of the method inside of that file
|
32870
|
+
// that should be invoked, so we have to construct the file path explicitly.
|
32871
|
+
if (!handlerFile) {
|
32872
|
+
handlerFileBase = handlerFileName + ext;
|
32873
|
+
handlerFile = lambdaFiles[handlerFileBase];
|
32879
32874
|
}
|
32880
|
-
|
32875
|
+
if (!handlerFile || !handlerFile.fsPath) {
|
32876
|
+
throw new Error(`Could not find a handler file. Please ensure that \`files\` for the returned \`Lambda\` contains an \`FileFsRef\` named "${handlerFileBase}" with a valid \`fsPath\`.`);
|
32877
|
+
}
|
32878
|
+
const handlerExtName = path_1.extname(handlerFile.fsPath);
|
32879
|
+
const entryBase = path_1.basename(entrypoint).replace(ext, handlerExtName);
|
32880
|
+
const entryPath = path_1.join(path_1.dirname(entrypoint), entryBase);
|
32881
|
+
const entry = path_1.join(entryRoot, entryPath);
|
32882
|
+
// We never want to link here, only copy, because the launcher
|
32883
|
+
// file often has the same name for every entrypoint, which means that
|
32884
|
+
// every build for every entrypoint overwrites the launcher of the previous
|
32885
|
+
// one, so linking would end with a broken reference.
|
32881
32886
|
await fs_extra_1.default.ensureDir(path_1.dirname(entry));
|
32882
|
-
await
|
32883
|
-
const
|
32884
|
-
|
32885
|
-
|
32887
|
+
await fs_extra_1.default.copy(handlerFile.fsPath, entry);
|
32888
|
+
const newFilesEntrypoint = [];
|
32889
|
+
const newDirectoriesEntrypoint = [];
|
32890
|
+
const preBuildFiles = Object.values(sourceFilesPreBuild).map(file => {
|
32891
|
+
return file.fsPath;
|
32892
|
+
});
|
32893
|
+
// Generate a list of directories and files that weren't present
|
32894
|
+
// before the entrypoint was processed by the Legacy Runtime, so
|
32895
|
+
// that we can perform a cleanup later. We need to divide into files
|
32896
|
+
// and directories because only cleaning up files might leave empty
|
32897
|
+
// directories, and listing directories separately also speeds up the
|
32898
|
+
// build because we can just delete them, which wipes all of their nested
|
32899
|
+
// paths, instead of iterating through all files that should be deleted.
|
32886
32900
|
for (const file in sourceFilesAfterBuild) {
|
32887
32901
|
if (!sourceFilesPreBuild[file]) {
|
32888
32902
|
const path = sourceFilesAfterBuild[file].fsPath;
|
32889
|
-
|
32903
|
+
const dirPath = path_1.dirname(path);
|
32904
|
+
// If none of the files that were present before the entrypoint
|
32905
|
+
// was processed are contained within the directory we're looking
|
32906
|
+
// at right now, then we know it's a newly added directory
|
32907
|
+
// and it can therefore be removed later on.
|
32908
|
+
const isNewDir = !preBuildFiles.some(filePath => {
|
32909
|
+
return path_1.dirname(filePath).startsWith(dirPath);
|
32910
|
+
});
|
32911
|
+
// Check out the list of tracked directories that were
|
32912
|
+
// newly added and see if one of them contains the path
|
32913
|
+
// we're looking at.
|
32914
|
+
const hasParentDir = newDirectoriesEntrypoint.some(dir => {
|
32915
|
+
return path.startsWith(dir);
|
32916
|
+
});
|
32917
|
+
// If we have already tracked a directory that was newly
|
32918
|
+
// added that sits above the file or directory that we're
|
32919
|
+
// looking at, we don't need to add more entries to the list
|
32920
|
+
// because when the parent will get removed in the future,
|
32921
|
+
// all of its children (and therefore the path we're looking at)
|
32922
|
+
// will automatically get removed anyways.
|
32923
|
+
if (hasParentDir) {
|
32924
|
+
continue;
|
32925
|
+
}
|
32926
|
+
if (isNewDir) {
|
32927
|
+
newDirectoriesEntrypoint.push(dirPath);
|
32928
|
+
}
|
32929
|
+
else {
|
32930
|
+
newFilesEntrypoint.push(path);
|
32931
|
+
}
|
32890
32932
|
}
|
32891
32933
|
}
|
32892
|
-
await Promise.all(toRemove);
|
32893
32934
|
const tracedFiles = [];
|
32894
|
-
Object.entries(lambdaFiles).
|
32935
|
+
const linkers = Object.entries(lambdaFiles).map(async ([relPath, file]) => {
|
32895
32936
|
const newPath = path_1.join(traceDir, relPath);
|
32896
32937
|
// The handler was already moved into position above.
|
32897
|
-
if (relPath ===
|
32938
|
+
if (relPath === handlerFileBase) {
|
32898
32939
|
return;
|
32899
32940
|
}
|
32900
32941
|
tracedFiles.push({ absolutePath: newPath, relativePath: relPath });
|
32901
|
-
|
32902
|
-
|
32942
|
+
const { fsPath, type } = file;
|
32943
|
+
if (fsPath) {
|
32944
|
+
await fs_extra_1.default.ensureDir(path_1.dirname(newPath));
|
32945
|
+
const isNewFile = newFilesEntrypoint.includes(fsPath);
|
32946
|
+
const isInsideNewDirectory = newDirectoriesEntrypoint.some(dirPath => {
|
32947
|
+
return fsPath.startsWith(dirPath);
|
32948
|
+
});
|
32949
|
+
// With this, we're making sure that files in the `workPath` that existed
|
32950
|
+
// before the Legacy Runtime was invoked (source files) are linked from
|
32951
|
+
// `.output` instead of copying there (the latter only happens if linking fails),
|
32952
|
+
// which is the fastest solution. However, files that are created fresh
|
32953
|
+
// by the Legacy Runtimes are always copied, because their link destinations
|
32954
|
+
// are likely to be overwritten every time an entrypoint is processed by
|
32955
|
+
// the Legacy Runtime. This is likely to overwrite the destination on subsequent
|
32956
|
+
// runs, but that's also how `workPath` used to work originally, without
|
32957
|
+
// the File System API (meaning that there was one `workPath` for all entrypoints).
|
32958
|
+
if (isNewFile || isInsideNewDirectory) {
|
32959
|
+
_1.debug(`Copying from ${fsPath} to ${newPath}`);
|
32960
|
+
await fs_extra_1.default.copy(fsPath, newPath);
|
32961
|
+
}
|
32962
|
+
else {
|
32963
|
+
await linkOrCopy(fsPath, newPath);
|
32964
|
+
}
|
32903
32965
|
}
|
32904
|
-
else if (
|
32966
|
+
else if (type === 'FileBlob') {
|
32905
32967
|
const { data, mode } = file;
|
32906
32968
|
await fs_extra_1.default.writeFile(newPath, data, { mode });
|
32907
32969
|
}
|
32908
32970
|
else {
|
32909
|
-
throw new Error(`Unknown file type: ${
|
32971
|
+
throw new Error(`Unknown file type: ${type}`);
|
32910
32972
|
}
|
32911
32973
|
});
|
32912
|
-
|
32974
|
+
linkersRuntime = linkersRuntime.concat(linkers);
|
32975
|
+
const nft = `${entry}.nft.json`;
|
32913
32976
|
const json = JSON.stringify({
|
32914
32977
|
version: 1,
|
32915
32978
|
files: tracedFiles.map(file => ({
|
32916
32979
|
input: normalize_path_1.normalizePath(path_1.relative(path_1.dirname(nft), file.absolutePath)),
|
32917
|
-
|
32980
|
+
// We'd like to place all the dependency files right next
|
32981
|
+
// to the final launcher file inside of the Lambda.
|
32982
|
+
output: normalize_path_1.normalizePath(path_1.join(entryDir, 'api', file.relativePath)),
|
32918
32983
|
})),
|
32919
32984
|
});
|
32920
32985
|
await fs_extra_1.default.ensureDir(path_1.dirname(nft));
|
32921
32986
|
await fs_extra_1.default.writeFile(nft, json);
|
32987
|
+
// Extend the list of directories and files that were created by the
|
32988
|
+
// Legacy Runtime with the list of directories and files that were
|
32989
|
+
// created for the entrypoint that was just processed above.
|
32990
|
+
newPathsRuntime = new Set([
|
32991
|
+
...newPathsRuntime,
|
32992
|
+
...newFilesEntrypoint,
|
32993
|
+
...newDirectoriesEntrypoint,
|
32994
|
+
]);
|
32995
|
+
// Add an entry that will later on be added to the `functions-manifest.json`
|
32996
|
+
// file that is placed inside of the `.output` directory.
|
32997
|
+
pages[normalize_path_1.normalizePath(entryPath)] = {
|
32998
|
+
// Because the underlying file used as a handler was placed
|
32999
|
+
// inside `.output/server/pages/api`, it no longer has the name it originally
|
33000
|
+
// had and is now named after the API Route that it's responsible for,
|
33001
|
+
// so we have to adjust the name of the Lambda handler accordingly.
|
33002
|
+
handler: handler.replace(handlerFileName, path_1.parse(entry).name),
|
33003
|
+
runtime: output.runtime,
|
33004
|
+
memory: output.memory,
|
33005
|
+
maxDuration: output.maxDuration,
|
33006
|
+
environment: output.environment,
|
33007
|
+
allowQuery: output.allowQuery,
|
33008
|
+
};
|
32922
33009
|
}
|
33010
|
+
// Instead of of waiting for all of the linking to be done for every
|
33011
|
+
// entrypoint before processing the next one, we immediately handle all
|
33012
|
+
// of them one after the other, while then waiting for the linking
|
33013
|
+
// to finish right here, before we clean up newly created files below.
|
33014
|
+
await Promise.all(linkersRuntime);
|
33015
|
+
// A list of all the files that were created by the Legacy Runtime,
|
33016
|
+
// which we'd like to remove from the File System.
|
33017
|
+
const toRemove = Array.from(newPathsRuntime).map(path => {
|
33018
|
+
_1.debug(`Removing ${path} as part of cleanup`);
|
33019
|
+
return fs_extra_1.default.remove(path);
|
33020
|
+
});
|
33021
|
+
// Once all the entrypoints have been processed, we'd like to
|
33022
|
+
// remove all the files from `workPath` that originally weren't present
|
33023
|
+
// before the Legacy Runtime began running, because the `workPath`
|
33024
|
+
// is nowadays the directory in which the user keeps their source code, since
|
33025
|
+
// we're no longer running separate parallel builds for every Legacy Runtime.
|
33026
|
+
await Promise.all(toRemove);
|
33027
|
+
// Add any Serverless Functions that were exposed by the Legacy Runtime
|
33028
|
+
// to the `functions-manifest.json` file provided in `.output`.
|
32923
33029
|
await updateFunctionsManifest({ workPath, pages });
|
32924
33030
|
};
|
32925
33031
|
}
|
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "@vercel/build-utils",
|
3
|
-
"version": "2.12.3-canary.
|
3
|
+
"version": "2.12.3-canary.36",
|
4
4
|
"license": "MIT",
|
5
5
|
"main": "./dist/index.js",
|
6
6
|
"types": "./dist/index.d.js",
|
@@ -49,5 +49,5 @@
|
|
49
49
|
"typescript": "4.3.4",
|
50
50
|
"yazl": "2.4.3"
|
51
51
|
},
|
52
|
-
"gitHead": "
|
52
|
+
"gitHead": "1c3701628d88e82f6e0535ecdba2c5e91ce61976"
|
53
53
|
}
|