@vercel/build-utils 2.12.3-canary.30 → 2.12.3-canary.34
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/convert-runtime-to-plugin.js +178 -29
- package/dist/index.js +178 -29
- package/package.json +2 -2
@@ -24,6 +24,22 @@ const shouldIgnorePath = (file, ignoreFilter, ignoreFile) => {
|
|
24
24
|
}
|
25
25
|
return isNative || ignoreFilter(file);
|
26
26
|
};
|
27
|
+
const getSourceFiles = async (workPath, ignoreFilter) => {
|
28
|
+
const list = await glob_1.default('**', {
|
29
|
+
cwd: workPath,
|
30
|
+
});
|
31
|
+
// We're not passing this as an `ignore` filter to the `glob` function above,
|
32
|
+
// so that we can re-use exactly the same `getIgnoreFilter` method that the
|
33
|
+
// Build Step uses (literally the same code). Note that this exclusion only applies
|
34
|
+
// when deploying. Locally, another exclusion is needed, which is handled
|
35
|
+
// further below in the `convertRuntimeToPlugin` function.
|
36
|
+
for (const file in list) {
|
37
|
+
if (shouldIgnorePath(file, ignoreFilter, true)) {
|
38
|
+
delete list[file];
|
39
|
+
}
|
40
|
+
}
|
41
|
+
return list;
|
42
|
+
};
|
27
43
|
/**
|
28
44
|
* Convert legacy Runtime to a Plugin.
|
29
45
|
* @param buildRuntime - a legacy build() function from a Runtime
|
@@ -33,22 +49,25 @@ const shouldIgnorePath = (file, ignoreFilter, ignoreFile) => {
|
|
33
49
|
function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
|
34
50
|
// This `build()` signature should match `plugin.build()` signature in `vercel build`.
|
35
51
|
return async function build({ workPath }) {
|
36
|
-
const opts = { cwd: workPath };
|
37
|
-
const files = await glob_1.default('**', opts);
|
38
52
|
// We also don't want to provide any files to Runtimes that were ignored
|
39
53
|
// through `.vercelignore` or `.nowignore`, because the Build Step does the same.
|
40
54
|
const ignoreFilter = await _1.getIgnoreFilter(workPath);
|
41
|
-
//
|
42
|
-
//
|
43
|
-
|
44
|
-
//
|
45
|
-
|
46
|
-
|
47
|
-
|
55
|
+
// Retrieve the files that are currently available on the File System,
|
56
|
+
// before the Legacy Runtime has even started to build.
|
57
|
+
const sourceFilesPreBuild = await getSourceFiles(workPath, ignoreFilter);
|
58
|
+
// Instead of doing another `glob` to get all the matching source files,
|
59
|
+
// we'll filter the list of existing files down to only the ones
|
60
|
+
// that are matching the entrypoint pattern, so we're first creating
|
61
|
+
// a clean new list to begin.
|
62
|
+
const entrypoints = Object.assign({}, sourceFilesPreBuild);
|
63
|
+
const entrypointMatch = new RegExp(`^api/.*${ext}$`);
|
64
|
+
// Up next, we'll strip out the files from the list of entrypoints
|
65
|
+
// that aren't actually considered entrypoints.
|
66
|
+
for (const file in entrypoints) {
|
67
|
+
if (!entrypointMatch.test(file)) {
|
68
|
+
delete entrypoints[file];
|
48
69
|
}
|
49
70
|
}
|
50
|
-
const entrypointPattern = `api/**/*${ext}`;
|
51
|
-
const entrypoints = await glob_1.default(entrypointPattern, opts);
|
52
71
|
const pages = {};
|
53
72
|
const pluginName = packageName.replace('vercel-plugin-', '');
|
54
73
|
const traceDir = path_1.join(workPath, `.output`, `inputs`,
|
@@ -58,9 +77,11 @@ function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
|
|
58
77
|
// need to be able to easily inspect the output.
|
59
78
|
`api-routes-${pluginName}`);
|
60
79
|
await fs_extra_1.default.ensureDir(traceDir);
|
80
|
+
let newPathsRuntime = new Set();
|
81
|
+
let linkersRuntime = [];
|
61
82
|
for (const entrypoint of Object.keys(entrypoints)) {
|
62
83
|
const { output } = await buildRuntime({
|
63
|
-
files,
|
84
|
+
files: sourceFilesPreBuild,
|
64
85
|
entrypoint,
|
65
86
|
workPath,
|
66
87
|
config: {
|
@@ -70,14 +91,21 @@ function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
|
|
70
91
|
avoidTopLevelInstall: true,
|
71
92
|
},
|
72
93
|
});
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
94
|
+
// Legacy Runtimes tend to pollute the `workPath` with compiled results,
|
95
|
+
// because the `workPath` used to be a place that was a place where they could
|
96
|
+
// just put anything, but nowadays it's the working directory of the `vercel build`
|
97
|
+
// command, which is the place where the developer keeps their source files,
|
98
|
+
// so we don't want to pollute this space unnecessarily. That means we have to clean
|
99
|
+
// up files that were created by the build, which is done further below.
|
100
|
+
const sourceFilesAfterBuild = await getSourceFiles(workPath, ignoreFilter);
|
101
|
+
// Further down, we will need the filename of the Lambda handler
|
102
|
+
// for placing it inside `server/pages/api`, but because Legacy Runtimes
|
103
|
+
// don't expose the filename directly, we have to construct it
|
104
|
+
// from the handler name, and then find the matching file further below,
|
105
|
+
// because we don't yet know its extension here.
|
106
|
+
const handler = output.handler;
|
107
|
+
const handlerMethod = handler.split('.').reverse()[0];
|
108
|
+
const handlerFileName = handler.replace(`.${handlerMethod}`, '');
|
81
109
|
// @ts-ignore This symbol is a private API
|
82
110
|
const lambdaFiles = output[lambda_1.FILES_SYMBOL];
|
83
111
|
// When deploying, the `files` that are passed to the Legacy Runtimes already
|
@@ -89,35 +117,156 @@ function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
|
|
89
117
|
delete lambdaFiles[file];
|
90
118
|
}
|
91
119
|
}
|
120
|
+
const handlerFilePath = Object.keys(lambdaFiles).find(item => {
|
121
|
+
return path_1.parse(item).name === handlerFileName;
|
122
|
+
});
|
123
|
+
const handlerFileOrigin = lambdaFiles[handlerFilePath || ''].fsPath;
|
124
|
+
if (!handlerFileOrigin) {
|
125
|
+
throw new Error(`Could not find a handler file. Please ensure that the list of \`files\` defined for the returned \`Lambda\` contains a file with the name ${handlerFileName} (+ any extension).`);
|
126
|
+
}
|
92
127
|
const entry = path_1.join(workPath, '.output', 'server', 'pages', entrypoint);
|
128
|
+
// We never want to link here, only copy, because the launcher
|
129
|
+
// file often has the same name for every entrypoint, which means that
|
130
|
+
// every build for every entrypoint overwrites the launcher of the previous
|
131
|
+
// one, so linking would end with a broken reference.
|
93
132
|
await fs_extra_1.default.ensureDir(path_1.dirname(entry));
|
94
|
-
await
|
133
|
+
await fs_extra_1.default.copy(handlerFileOrigin, entry);
|
134
|
+
const newFilesEntrypoint = [];
|
135
|
+
const newDirectoriesEntrypoint = [];
|
136
|
+
const preBuildFiles = Object.values(sourceFilesPreBuild).map(file => {
|
137
|
+
return file.fsPath;
|
138
|
+
});
|
139
|
+
// Generate a list of directories and files that weren't present
|
140
|
+
// before the entrypoint was processed by the Legacy Runtime, so
|
141
|
+
// that we can perform a cleanup later. We need to divide into files
|
142
|
+
// and directories because only cleaning up files might leave empty
|
143
|
+
// directories, and listing directories separately also speeds up the
|
144
|
+
// build because we can just delete them, which wipes all of their nested
|
145
|
+
// paths, instead of iterating through all files that should be deleted.
|
146
|
+
for (const file in sourceFilesAfterBuild) {
|
147
|
+
if (!sourceFilesPreBuild[file]) {
|
148
|
+
const path = sourceFilesAfterBuild[file].fsPath;
|
149
|
+
const dirPath = path_1.dirname(path);
|
150
|
+
// If none of the files that were present before the entrypoint
|
151
|
+
// was processed are contained within the directory we're looking
|
152
|
+
// at right now, then we know it's a newly added directory
|
153
|
+
// and it can therefore be removed later on.
|
154
|
+
const isNewDir = !preBuildFiles.some(filePath => {
|
155
|
+
return path_1.dirname(filePath).startsWith(dirPath);
|
156
|
+
});
|
157
|
+
// Check out the list of tracked directories that were
|
158
|
+
// newly added and see if one of them contains the path
|
159
|
+
// we're looking at.
|
160
|
+
const hasParentDir = newDirectoriesEntrypoint.some(dir => {
|
161
|
+
return path.startsWith(dir);
|
162
|
+
});
|
163
|
+
// If we have already tracked a directory that was newly
|
164
|
+
// added that sits above the file or directory that we're
|
165
|
+
// looking at, we don't need to add more entries to the list
|
166
|
+
// because when the parent will get removed in the future,
|
167
|
+
// all of its children (and therefore the path we're looking at)
|
168
|
+
// will automatically get removed anyways.
|
169
|
+
if (hasParentDir) {
|
170
|
+
continue;
|
171
|
+
}
|
172
|
+
if (isNewDir) {
|
173
|
+
newDirectoriesEntrypoint.push(dirPath);
|
174
|
+
}
|
175
|
+
else {
|
176
|
+
newFilesEntrypoint.push(path);
|
177
|
+
}
|
178
|
+
}
|
179
|
+
}
|
95
180
|
const tracedFiles = [];
|
96
|
-
Object.entries(lambdaFiles).
|
181
|
+
const linkers = Object.entries(lambdaFiles).map(async ([relPath, file]) => {
|
97
182
|
const newPath = path_1.join(traceDir, relPath);
|
183
|
+
// The handler was already moved into position above.
|
184
|
+
if (relPath === handlerFilePath) {
|
185
|
+
return;
|
186
|
+
}
|
98
187
|
tracedFiles.push({ absolutePath: newPath, relativePath: relPath });
|
99
|
-
|
100
|
-
|
188
|
+
const { fsPath, type } = file;
|
189
|
+
if (fsPath) {
|
190
|
+
await fs_extra_1.default.ensureDir(path_1.dirname(newPath));
|
191
|
+
const isNewFile = newFilesEntrypoint.includes(fsPath);
|
192
|
+
const isInsideNewDirectory = newDirectoriesEntrypoint.some(dirPath => {
|
193
|
+
return fsPath.startsWith(dirPath);
|
194
|
+
});
|
195
|
+
// With this, we're making sure that files in the `workPath` that existed
|
196
|
+
// before the Legacy Runtime was invoked (source files) are linked from
|
197
|
+
// `.output` instead of copying there (the latter only happens if linking fails),
|
198
|
+
// which is the fastest solution. However, files that are created fresh
|
199
|
+
// by the Legacy Runtimes are always copied, because their link destinations
|
200
|
+
// are likely to be overwritten every time an entrypoint is processed by
|
201
|
+
// the Legacy Runtime. This is likely to overwrite the destination on subsequent
|
202
|
+
// runs, but that's also how `workPath` used to work originally, without
|
203
|
+
// the File System API (meaning that there was one `workPath` for all entrypoints).
|
204
|
+
if (isNewFile || isInsideNewDirectory) {
|
205
|
+
_1.debug(`Copying from ${fsPath} to ${newPath}`);
|
206
|
+
await fs_extra_1.default.copy(fsPath, newPath);
|
207
|
+
}
|
208
|
+
else {
|
209
|
+
await linkOrCopy(fsPath, newPath);
|
210
|
+
}
|
101
211
|
}
|
102
|
-
else if (
|
212
|
+
else if (type === 'FileBlob') {
|
103
213
|
const { data, mode } = file;
|
104
214
|
await fs_extra_1.default.writeFile(newPath, data, { mode });
|
105
215
|
}
|
106
216
|
else {
|
107
|
-
throw new Error(`Unknown file type: ${
|
217
|
+
throw new Error(`Unknown file type: ${type}`);
|
108
218
|
}
|
109
219
|
});
|
220
|
+
linkersRuntime = linkersRuntime.concat(linkers);
|
110
221
|
const nft = path_1.join(workPath, '.output', 'server', 'pages', `${entrypoint}.nft.json`);
|
111
222
|
const json = JSON.stringify({
|
112
223
|
version: 1,
|
113
|
-
files: tracedFiles.map(
|
114
|
-
input: normalize_path_1.normalizePath(path_1.relative(nft,
|
115
|
-
output: normalize_path_1.normalizePath(
|
224
|
+
files: tracedFiles.map(file => ({
|
225
|
+
input: normalize_path_1.normalizePath(path_1.relative(path_1.dirname(nft), file.absolutePath)),
|
226
|
+
output: normalize_path_1.normalizePath(file.relativePath),
|
116
227
|
})),
|
117
228
|
});
|
118
229
|
await fs_extra_1.default.ensureDir(path_1.dirname(nft));
|
119
230
|
await fs_extra_1.default.writeFile(nft, json);
|
231
|
+
// Extend the list of directories and files that were created by the
|
232
|
+
// Legacy Runtime with the list of directories and files that were
|
233
|
+
// created for the entrypoint that was just processed above.
|
234
|
+
newPathsRuntime = new Set([
|
235
|
+
...newPathsRuntime,
|
236
|
+
...newFilesEntrypoint,
|
237
|
+
...newDirectoriesEntrypoint,
|
238
|
+
]);
|
239
|
+
const apiRouteHandler = `${path_1.parse(entry).name}.${handlerMethod}`;
|
240
|
+
// Add an entry that will later on be added to the `functions-manifest.json`
|
241
|
+
// file that is placed inside of the `.output` directory.
|
242
|
+
pages[entrypoint] = {
|
243
|
+
handler: apiRouteHandler,
|
244
|
+
runtime: output.runtime,
|
245
|
+
memory: output.memory,
|
246
|
+
maxDuration: output.maxDuration,
|
247
|
+
environment: output.environment,
|
248
|
+
allowQuery: output.allowQuery,
|
249
|
+
};
|
120
250
|
}
|
251
|
+
// Instead of of waiting for all of the linking to be done for every
|
252
|
+
// entrypoint before processing the next one, we immediately handle all
|
253
|
+
// of them one after the other, while then waiting for the linking
|
254
|
+
// to finish right here, before we clean up newly created files below.
|
255
|
+
await Promise.all(linkersRuntime);
|
256
|
+
// A list of all the files that were created by the Legacy Runtime,
|
257
|
+
// which we'd like to remove from the File System.
|
258
|
+
const toRemove = Array.from(newPathsRuntime).map(path => {
|
259
|
+
_1.debug(`Removing ${path} as part of cleanup`);
|
260
|
+
return fs_extra_1.default.remove(path);
|
261
|
+
});
|
262
|
+
// Once all the entrypoints have been processed, we'd like to
|
263
|
+
// remove all the files from `workPath` that originally weren't present
|
264
|
+
// before the Legacy Runtime began running, because the `workPath`
|
265
|
+
// is nowadays the directory in which the user keeps their source code, since
|
266
|
+
// we're no longer running separate parallel builds for every Legacy Runtime.
|
267
|
+
await Promise.all(toRemove);
|
268
|
+
// Add any Serverless Functions that were exposed by the Legacy Runtime
|
269
|
+
// to the `functions-manifest.json` file provided in `.output`.
|
121
270
|
await updateFunctionsManifest({ workPath, pages });
|
122
271
|
};
|
123
272
|
}
|
package/dist/index.js
CHANGED
@@ -32771,6 +32771,22 @@ const shouldIgnorePath = (file, ignoreFilter, ignoreFile) => {
|
|
32771
32771
|
}
|
32772
32772
|
return isNative || ignoreFilter(file);
|
32773
32773
|
};
|
32774
|
+
const getSourceFiles = async (workPath, ignoreFilter) => {
|
32775
|
+
const list = await glob_1.default('**', {
|
32776
|
+
cwd: workPath,
|
32777
|
+
});
|
32778
|
+
// We're not passing this as an `ignore` filter to the `glob` function above,
|
32779
|
+
// so that we can re-use exactly the same `getIgnoreFilter` method that the
|
32780
|
+
// Build Step uses (literally the same code). Note that this exclusion only applies
|
32781
|
+
// when deploying. Locally, another exclusion is needed, which is handled
|
32782
|
+
// further below in the `convertRuntimeToPlugin` function.
|
32783
|
+
for (const file in list) {
|
32784
|
+
if (shouldIgnorePath(file, ignoreFilter, true)) {
|
32785
|
+
delete list[file];
|
32786
|
+
}
|
32787
|
+
}
|
32788
|
+
return list;
|
32789
|
+
};
|
32774
32790
|
/**
|
32775
32791
|
* Convert legacy Runtime to a Plugin.
|
32776
32792
|
* @param buildRuntime - a legacy build() function from a Runtime
|
@@ -32780,22 +32796,25 @@ const shouldIgnorePath = (file, ignoreFilter, ignoreFile) => {
|
|
32780
32796
|
function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
|
32781
32797
|
// This `build()` signature should match `plugin.build()` signature in `vercel build`.
|
32782
32798
|
return async function build({ workPath }) {
|
32783
|
-
const opts = { cwd: workPath };
|
32784
|
-
const files = await glob_1.default('**', opts);
|
32785
32799
|
// We also don't want to provide any files to Runtimes that were ignored
|
32786
32800
|
// through `.vercelignore` or `.nowignore`, because the Build Step does the same.
|
32787
32801
|
const ignoreFilter = await _1.getIgnoreFilter(workPath);
|
32788
|
-
//
|
32789
|
-
//
|
32790
|
-
|
32791
|
-
//
|
32792
|
-
|
32793
|
-
|
32794
|
-
|
32802
|
+
// Retrieve the files that are currently available on the File System,
|
32803
|
+
// before the Legacy Runtime has even started to build.
|
32804
|
+
const sourceFilesPreBuild = await getSourceFiles(workPath, ignoreFilter);
|
32805
|
+
// Instead of doing another `glob` to get all the matching source files,
|
32806
|
+
// we'll filter the list of existing files down to only the ones
|
32807
|
+
// that are matching the entrypoint pattern, so we're first creating
|
32808
|
+
// a clean new list to begin.
|
32809
|
+
const entrypoints = Object.assign({}, sourceFilesPreBuild);
|
32810
|
+
const entrypointMatch = new RegExp(`^api/.*${ext}$`);
|
32811
|
+
// Up next, we'll strip out the files from the list of entrypoints
|
32812
|
+
// that aren't actually considered entrypoints.
|
32813
|
+
for (const file in entrypoints) {
|
32814
|
+
if (!entrypointMatch.test(file)) {
|
32815
|
+
delete entrypoints[file];
|
32795
32816
|
}
|
32796
32817
|
}
|
32797
|
-
const entrypointPattern = `api/**/*${ext}`;
|
32798
|
-
const entrypoints = await glob_1.default(entrypointPattern, opts);
|
32799
32818
|
const pages = {};
|
32800
32819
|
const pluginName = packageName.replace('vercel-plugin-', '');
|
32801
32820
|
const traceDir = path_1.join(workPath, `.output`, `inputs`,
|
@@ -32805,9 +32824,11 @@ function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
|
|
32805
32824
|
// need to be able to easily inspect the output.
|
32806
32825
|
`api-routes-${pluginName}`);
|
32807
32826
|
await fs_extra_1.default.ensureDir(traceDir);
|
32827
|
+
let newPathsRuntime = new Set();
|
32828
|
+
let linkersRuntime = [];
|
32808
32829
|
for (const entrypoint of Object.keys(entrypoints)) {
|
32809
32830
|
const { output } = await buildRuntime({
|
32810
|
-
files,
|
32831
|
+
files: sourceFilesPreBuild,
|
32811
32832
|
entrypoint,
|
32812
32833
|
workPath,
|
32813
32834
|
config: {
|
@@ -32817,14 +32838,21 @@ function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
|
|
32817
32838
|
avoidTopLevelInstall: true,
|
32818
32839
|
},
|
32819
32840
|
});
|
32820
|
-
|
32821
|
-
|
32822
|
-
|
32823
|
-
|
32824
|
-
|
32825
|
-
|
32826
|
-
|
32827
|
-
|
32841
|
+
// Legacy Runtimes tend to pollute the `workPath` with compiled results,
|
32842
|
+
// because the `workPath` used to be a place that was a place where they could
|
32843
|
+
// just put anything, but nowadays it's the working directory of the `vercel build`
|
32844
|
+
// command, which is the place where the developer keeps their source files,
|
32845
|
+
// so we don't want to pollute this space unnecessarily. That means we have to clean
|
32846
|
+
// up files that were created by the build, which is done further below.
|
32847
|
+
const sourceFilesAfterBuild = await getSourceFiles(workPath, ignoreFilter);
|
32848
|
+
// Further down, we will need the filename of the Lambda handler
|
32849
|
+
// for placing it inside `server/pages/api`, but because Legacy Runtimes
|
32850
|
+
// don't expose the filename directly, we have to construct it
|
32851
|
+
// from the handler name, and then find the matching file further below,
|
32852
|
+
// because we don't yet know its extension here.
|
32853
|
+
const handler = output.handler;
|
32854
|
+
const handlerMethod = handler.split('.').reverse()[0];
|
32855
|
+
const handlerFileName = handler.replace(`.${handlerMethod}`, '');
|
32828
32856
|
// @ts-ignore This symbol is a private API
|
32829
32857
|
const lambdaFiles = output[lambda_1.FILES_SYMBOL];
|
32830
32858
|
// When deploying, the `files` that are passed to the Legacy Runtimes already
|
@@ -32836,35 +32864,156 @@ function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
|
|
32836
32864
|
delete lambdaFiles[file];
|
32837
32865
|
}
|
32838
32866
|
}
|
32867
|
+
const handlerFilePath = Object.keys(lambdaFiles).find(item => {
|
32868
|
+
return path_1.parse(item).name === handlerFileName;
|
32869
|
+
});
|
32870
|
+
const handlerFileOrigin = lambdaFiles[handlerFilePath || ''].fsPath;
|
32871
|
+
if (!handlerFileOrigin) {
|
32872
|
+
throw new Error(`Could not find a handler file. Please ensure that the list of \`files\` defined for the returned \`Lambda\` contains a file with the name ${handlerFileName} (+ any extension).`);
|
32873
|
+
}
|
32839
32874
|
const entry = path_1.join(workPath, '.output', 'server', 'pages', entrypoint);
|
32875
|
+
// We never want to link here, only copy, because the launcher
|
32876
|
+
// file often has the same name for every entrypoint, which means that
|
32877
|
+
// every build for every entrypoint overwrites the launcher of the previous
|
32878
|
+
// one, so linking would end with a broken reference.
|
32840
32879
|
await fs_extra_1.default.ensureDir(path_1.dirname(entry));
|
32841
|
-
await
|
32880
|
+
await fs_extra_1.default.copy(handlerFileOrigin, entry);
|
32881
|
+
const newFilesEntrypoint = [];
|
32882
|
+
const newDirectoriesEntrypoint = [];
|
32883
|
+
const preBuildFiles = Object.values(sourceFilesPreBuild).map(file => {
|
32884
|
+
return file.fsPath;
|
32885
|
+
});
|
32886
|
+
// Generate a list of directories and files that weren't present
|
32887
|
+
// before the entrypoint was processed by the Legacy Runtime, so
|
32888
|
+
// that we can perform a cleanup later. We need to divide into files
|
32889
|
+
// and directories because only cleaning up files might leave empty
|
32890
|
+
// directories, and listing directories separately also speeds up the
|
32891
|
+
// build because we can just delete them, which wipes all of their nested
|
32892
|
+
// paths, instead of iterating through all files that should be deleted.
|
32893
|
+
for (const file in sourceFilesAfterBuild) {
|
32894
|
+
if (!sourceFilesPreBuild[file]) {
|
32895
|
+
const path = sourceFilesAfterBuild[file].fsPath;
|
32896
|
+
const dirPath = path_1.dirname(path);
|
32897
|
+
// If none of the files that were present before the entrypoint
|
32898
|
+
// was processed are contained within the directory we're looking
|
32899
|
+
// at right now, then we know it's a newly added directory
|
32900
|
+
// and it can therefore be removed later on.
|
32901
|
+
const isNewDir = !preBuildFiles.some(filePath => {
|
32902
|
+
return path_1.dirname(filePath).startsWith(dirPath);
|
32903
|
+
});
|
32904
|
+
// Check out the list of tracked directories that were
|
32905
|
+
// newly added and see if one of them contains the path
|
32906
|
+
// we're looking at.
|
32907
|
+
const hasParentDir = newDirectoriesEntrypoint.some(dir => {
|
32908
|
+
return path.startsWith(dir);
|
32909
|
+
});
|
32910
|
+
// If we have already tracked a directory that was newly
|
32911
|
+
// added that sits above the file or directory that we're
|
32912
|
+
// looking at, we don't need to add more entries to the list
|
32913
|
+
// because when the parent will get removed in the future,
|
32914
|
+
// all of its children (and therefore the path we're looking at)
|
32915
|
+
// will automatically get removed anyways.
|
32916
|
+
if (hasParentDir) {
|
32917
|
+
continue;
|
32918
|
+
}
|
32919
|
+
if (isNewDir) {
|
32920
|
+
newDirectoriesEntrypoint.push(dirPath);
|
32921
|
+
}
|
32922
|
+
else {
|
32923
|
+
newFilesEntrypoint.push(path);
|
32924
|
+
}
|
32925
|
+
}
|
32926
|
+
}
|
32842
32927
|
const tracedFiles = [];
|
32843
|
-
Object.entries(lambdaFiles).
|
32928
|
+
const linkers = Object.entries(lambdaFiles).map(async ([relPath, file]) => {
|
32844
32929
|
const newPath = path_1.join(traceDir, relPath);
|
32930
|
+
// The handler was already moved into position above.
|
32931
|
+
if (relPath === handlerFilePath) {
|
32932
|
+
return;
|
32933
|
+
}
|
32845
32934
|
tracedFiles.push({ absolutePath: newPath, relativePath: relPath });
|
32846
|
-
|
32847
|
-
|
32935
|
+
const { fsPath, type } = file;
|
32936
|
+
if (fsPath) {
|
32937
|
+
await fs_extra_1.default.ensureDir(path_1.dirname(newPath));
|
32938
|
+
const isNewFile = newFilesEntrypoint.includes(fsPath);
|
32939
|
+
const isInsideNewDirectory = newDirectoriesEntrypoint.some(dirPath => {
|
32940
|
+
return fsPath.startsWith(dirPath);
|
32941
|
+
});
|
32942
|
+
// With this, we're making sure that files in the `workPath` that existed
|
32943
|
+
// before the Legacy Runtime was invoked (source files) are linked from
|
32944
|
+
// `.output` instead of copying there (the latter only happens if linking fails),
|
32945
|
+
// which is the fastest solution. However, files that are created fresh
|
32946
|
+
// by the Legacy Runtimes are always copied, because their link destinations
|
32947
|
+
// are likely to be overwritten every time an entrypoint is processed by
|
32948
|
+
// the Legacy Runtime. This is likely to overwrite the destination on subsequent
|
32949
|
+
// runs, but that's also how `workPath` used to work originally, without
|
32950
|
+
// the File System API (meaning that there was one `workPath` for all entrypoints).
|
32951
|
+
if (isNewFile || isInsideNewDirectory) {
|
32952
|
+
_1.debug(`Copying from ${fsPath} to ${newPath}`);
|
32953
|
+
await fs_extra_1.default.copy(fsPath, newPath);
|
32954
|
+
}
|
32955
|
+
else {
|
32956
|
+
await linkOrCopy(fsPath, newPath);
|
32957
|
+
}
|
32848
32958
|
}
|
32849
|
-
else if (
|
32959
|
+
else if (type === 'FileBlob') {
|
32850
32960
|
const { data, mode } = file;
|
32851
32961
|
await fs_extra_1.default.writeFile(newPath, data, { mode });
|
32852
32962
|
}
|
32853
32963
|
else {
|
32854
|
-
throw new Error(`Unknown file type: ${
|
32964
|
+
throw new Error(`Unknown file type: ${type}`);
|
32855
32965
|
}
|
32856
32966
|
});
|
32967
|
+
linkersRuntime = linkersRuntime.concat(linkers);
|
32857
32968
|
const nft = path_1.join(workPath, '.output', 'server', 'pages', `${entrypoint}.nft.json`);
|
32858
32969
|
const json = JSON.stringify({
|
32859
32970
|
version: 1,
|
32860
|
-
files: tracedFiles.map(
|
32861
|
-
input: normalize_path_1.normalizePath(path_1.relative(nft,
|
32862
|
-
output: normalize_path_1.normalizePath(
|
32971
|
+
files: tracedFiles.map(file => ({
|
32972
|
+
input: normalize_path_1.normalizePath(path_1.relative(path_1.dirname(nft), file.absolutePath)),
|
32973
|
+
output: normalize_path_1.normalizePath(file.relativePath),
|
32863
32974
|
})),
|
32864
32975
|
});
|
32865
32976
|
await fs_extra_1.default.ensureDir(path_1.dirname(nft));
|
32866
32977
|
await fs_extra_1.default.writeFile(nft, json);
|
32978
|
+
// Extend the list of directories and files that were created by the
|
32979
|
+
// Legacy Runtime with the list of directories and files that were
|
32980
|
+
// created for the entrypoint that was just processed above.
|
32981
|
+
newPathsRuntime = new Set([
|
32982
|
+
...newPathsRuntime,
|
32983
|
+
...newFilesEntrypoint,
|
32984
|
+
...newDirectoriesEntrypoint,
|
32985
|
+
]);
|
32986
|
+
const apiRouteHandler = `${path_1.parse(entry).name}.${handlerMethod}`;
|
32987
|
+
// Add an entry that will later on be added to the `functions-manifest.json`
|
32988
|
+
// file that is placed inside of the `.output` directory.
|
32989
|
+
pages[entrypoint] = {
|
32990
|
+
handler: apiRouteHandler,
|
32991
|
+
runtime: output.runtime,
|
32992
|
+
memory: output.memory,
|
32993
|
+
maxDuration: output.maxDuration,
|
32994
|
+
environment: output.environment,
|
32995
|
+
allowQuery: output.allowQuery,
|
32996
|
+
};
|
32867
32997
|
}
|
32998
|
+
// Instead of of waiting for all of the linking to be done for every
|
32999
|
+
// entrypoint before processing the next one, we immediately handle all
|
33000
|
+
// of them one after the other, while then waiting for the linking
|
33001
|
+
// to finish right here, before we clean up newly created files below.
|
33002
|
+
await Promise.all(linkersRuntime);
|
33003
|
+
// A list of all the files that were created by the Legacy Runtime,
|
33004
|
+
// which we'd like to remove from the File System.
|
33005
|
+
const toRemove = Array.from(newPathsRuntime).map(path => {
|
33006
|
+
_1.debug(`Removing ${path} as part of cleanup`);
|
33007
|
+
return fs_extra_1.default.remove(path);
|
33008
|
+
});
|
33009
|
+
// Once all the entrypoints have been processed, we'd like to
|
33010
|
+
// remove all the files from `workPath` that originally weren't present
|
33011
|
+
// before the Legacy Runtime began running, because the `workPath`
|
33012
|
+
// is nowadays the directory in which the user keeps their source code, since
|
33013
|
+
// we're no longer running separate parallel builds for every Legacy Runtime.
|
33014
|
+
await Promise.all(toRemove);
|
33015
|
+
// Add any Serverless Functions that were exposed by the Legacy Runtime
|
33016
|
+
// to the `functions-manifest.json` file provided in `.output`.
|
32868
33017
|
await updateFunctionsManifest({ workPath, pages });
|
32869
33018
|
};
|
32870
33019
|
}
|
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "@vercel/build-utils",
|
3
|
-
"version": "2.12.3-canary.
|
3
|
+
"version": "2.12.3-canary.34",
|
4
4
|
"license": "MIT",
|
5
5
|
"main": "./dist/index.js",
|
6
6
|
"types": "./dist/index.d.js",
|
@@ -49,5 +49,5 @@
|
|
49
49
|
"typescript": "4.3.4",
|
50
50
|
"yazl": "2.4.3"
|
51
51
|
},
|
52
|
-
"gitHead": "
|
52
|
+
"gitHead": "34f4222ca2d3cc5134469daa355a7f67c2054c9b"
|
53
53
|
}
|