@vercel/build-utils 2.12.3-canary.32 → 2.12.3-canary.33

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -77,6 +77,8 @@ function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
77
77
  // need to be able to easily inspect the output.
78
78
  `api-routes-${pluginName}`);
79
79
  await fs_extra_1.default.ensureDir(traceDir);
80
+ let newPathsRuntime = new Set();
81
+ let linkersRuntime = [];
80
82
  for (const entrypoint of Object.keys(entrypoints)) {
81
83
  const { output } = await buildRuntime({
82
84
  files: sourceFilesPreBuild,
@@ -133,35 +135,93 @@ function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
133
135
  const entry = path_1.join(workPath, '.output', 'server', 'pages', entrypoint);
134
136
  await fs_extra_1.default.ensureDir(path_1.dirname(entry));
135
137
  await linkOrCopy(handlerFileOrigin, entry);
136
- const toRemove = [];
137
- // You can find more details about this at the point where the
138
- // `sourceFilesAfterBuild` is created originally.
138
+ const newFilesEntrypoint = [];
139
+ const newDirectoriesEntrypoint = [];
140
+ const preBuildFiles = Object.values(sourceFilesPreBuild).map(file => {
141
+ return file.fsPath;
142
+ });
143
+ // Generate a list of directories and files that weren't present
144
+ // before the entrypoint was processed by the Legacy Runtime, so
145
+ // that we can perform a cleanup later. We need to divide into files
146
+ // and directories because only cleaning up files might leave empty
147
+ // directories, and listing directories separately also speeds up the
148
+ // build because we can just delete them, which wipes all of their nested
149
+ // paths, instead of iterating through all files that should be deleted.
139
150
  for (const file in sourceFilesAfterBuild) {
140
151
  if (!sourceFilesPreBuild[file]) {
141
152
  const path = sourceFilesAfterBuild[file].fsPath;
142
- toRemove.push(fs_extra_1.default.remove(path));
153
+ const dirPath = path_1.dirname(path);
154
+ // If none of the files that were present before the entrypoint
155
+ // was processed are contained within the directory we're looking
156
+ // at right now, then we know it's a newly added directory
157
+ // and it can therefore be removed later on.
158
+ const isNewDir = !preBuildFiles.some(filePath => {
159
+ return path_1.dirname(filePath).startsWith(dirPath);
160
+ });
161
+ // Check out the list of tracked directories that were
162
+ // newly added and see if one of them contains the path
163
+ // we're looking at.
164
+ const hasParentDir = newDirectoriesEntrypoint.some(dir => {
165
+ return path.startsWith(dir);
166
+ });
167
+ // If we have already tracked a directory that was newly
168
+ // added that sits above the file or directory that we're
169
+ // looking at, we don't need to add more entries to the list
170
+ // because when the parent will get removed in the future,
171
+ // all of its children (and therefore the path we're looking at)
172
+ // will automatically get removed anyways.
173
+ if (hasParentDir) {
174
+ continue;
175
+ }
176
+ if (isNewDir) {
177
+ newDirectoriesEntrypoint.push(dirPath);
178
+ }
179
+ else {
180
+ newFilesEntrypoint.push(path);
181
+ }
143
182
  }
144
183
  }
145
- await Promise.all(toRemove);
146
184
  const tracedFiles = [];
147
- Object.entries(lambdaFiles).forEach(async ([relPath, file]) => {
185
+ const linkers = Object.entries(lambdaFiles).map(async ([relPath, file]) => {
148
186
  const newPath = path_1.join(traceDir, relPath);
149
187
  // The handler was already moved into position above.
150
188
  if (relPath === handlerFilePath) {
151
189
  return;
152
190
  }
153
191
  tracedFiles.push({ absolutePath: newPath, relativePath: relPath });
154
- if (file.fsPath) {
155
- await linkOrCopy(file.fsPath, newPath);
192
+ const { fsPath, type } = file;
193
+ if (fsPath) {
194
+ await fs_extra_1.default.ensureDir(path_1.dirname(newPath));
195
+ const isNewFile = newFilesEntrypoint.includes(fsPath);
196
+ const isInsideNewDirectory = newDirectoriesEntrypoint.some(dirPath => {
197
+ return fsPath.startsWith(dirPath);
198
+ });
199
+ // With this, we're making sure that files in the `workPath` that existed
200
+ // before the Legacy Runtime was invoked (source files) are linked from
201
+ // `.output` instead of copying there (the latter only happens if linking fails),
202
+ // which is the fastest solution. However, files that are created fresh
203
+ // by the Legacy Runtimes are always copied, because their link destinations
204
+ // are likely to be overwritten every time an entrypoint is processed by
205
+ // the Legacy Runtime. This is likely to overwrite the destination on subsequent
206
+ // runs, but that's also how `workPath` used to work originally, without
207
+ // the File System API (meaning that there was one `workPath` for all entrypoints).
208
+ if (isNewFile || isInsideNewDirectory) {
209
+ _1.debug(`Copying from ${fsPath} to ${newPath}`);
210
+ await fs_extra_1.default.copy(fsPath, newPath);
211
+ }
212
+ else {
213
+ await linkOrCopy(fsPath, newPath);
214
+ }
156
215
  }
157
- else if (file.type === 'FileBlob') {
216
+ else if (type === 'FileBlob') {
158
217
  const { data, mode } = file;
159
218
  await fs_extra_1.default.writeFile(newPath, data, { mode });
160
219
  }
161
220
  else {
162
- throw new Error(`Unknown file type: ${file.type}`);
221
+ throw new Error(`Unknown file type: ${type}`);
163
222
  }
164
223
  });
224
+ linkersRuntime = linkersRuntime.concat(linkers);
165
225
  const nft = path_1.join(workPath, '.output', 'server', 'pages', `${entrypoint}.nft.json`);
166
226
  const json = JSON.stringify({
167
227
  version: 1,
@@ -172,7 +232,34 @@ function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
172
232
  });
173
233
  await fs_extra_1.default.ensureDir(path_1.dirname(nft));
174
234
  await fs_extra_1.default.writeFile(nft, json);
235
+ // Extend the list of directories and files that were created by the
236
+ // Legacy Runtime with the list of directories and files that were
237
+ // created for the entrypoint that was just processed above.
238
+ newPathsRuntime = new Set([
239
+ ...newPathsRuntime,
240
+ ...newFilesEntrypoint,
241
+ ...newDirectoriesEntrypoint,
242
+ ]);
175
243
  }
244
+ // Instead of of waiting for all of the linking to be done for every
245
+ // entrypoint before processing the next one, we immediately handle all
246
+ // of them one after the other, while then waiting for the linking
247
+ // to finish right here, before we clean up newly created files below.
248
+ await Promise.all(linkersRuntime);
249
+ // A list of all the files that were created by the Legacy Runtime,
250
+ // which we'd like to remove from the File System.
251
+ const toRemove = Array.from(newPathsRuntime).map(path => {
252
+ _1.debug(`Removing ${path} as part of cleanup`);
253
+ return fs_extra_1.default.remove(path);
254
+ });
255
+ // Once all the entrypoints have been processed, we'd like to
256
+ // remove all the files from `workPath` that originally weren't present
257
+ // before the Legacy Runtime began running, because the `workPath`
258
+ // is nowadays the directory in which the user keeps their source code, since
259
+ // we're no longer running separate parallel builds for every Legacy Runtime.
260
+ await Promise.all(toRemove);
261
+ // Add any Serverless Functions that were exposed by the Legacy Runtime
262
+ // to the `functions-manifest.json` file provided in `.output`.
176
263
  await updateFunctionsManifest({ workPath, pages });
177
264
  };
178
265
  }
package/dist/index.js CHANGED
@@ -32824,6 +32824,8 @@ function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
32824
32824
  // need to be able to easily inspect the output.
32825
32825
  `api-routes-${pluginName}`);
32826
32826
  await fs_extra_1.default.ensureDir(traceDir);
32827
+ let newPathsRuntime = new Set();
32828
+ let linkersRuntime = [];
32827
32829
  for (const entrypoint of Object.keys(entrypoints)) {
32828
32830
  const { output } = await buildRuntime({
32829
32831
  files: sourceFilesPreBuild,
@@ -32880,35 +32882,93 @@ function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
32880
32882
  const entry = path_1.join(workPath, '.output', 'server', 'pages', entrypoint);
32881
32883
  await fs_extra_1.default.ensureDir(path_1.dirname(entry));
32882
32884
  await linkOrCopy(handlerFileOrigin, entry);
32883
- const toRemove = [];
32884
- // You can find more details about this at the point where the
32885
- // `sourceFilesAfterBuild` is created originally.
32885
+ const newFilesEntrypoint = [];
32886
+ const newDirectoriesEntrypoint = [];
32887
+ const preBuildFiles = Object.values(sourceFilesPreBuild).map(file => {
32888
+ return file.fsPath;
32889
+ });
32890
+ // Generate a list of directories and files that weren't present
32891
+ // before the entrypoint was processed by the Legacy Runtime, so
32892
+ // that we can perform a cleanup later. We need to divide into files
32893
+ // and directories because only cleaning up files might leave empty
32894
+ // directories, and listing directories separately also speeds up the
32895
+ // build because we can just delete them, which wipes all of their nested
32896
+ // paths, instead of iterating through all files that should be deleted.
32886
32897
  for (const file in sourceFilesAfterBuild) {
32887
32898
  if (!sourceFilesPreBuild[file]) {
32888
32899
  const path = sourceFilesAfterBuild[file].fsPath;
32889
- toRemove.push(fs_extra_1.default.remove(path));
32900
+ const dirPath = path_1.dirname(path);
32901
+ // If none of the files that were present before the entrypoint
32902
+ // was processed are contained within the directory we're looking
32903
+ // at right now, then we know it's a newly added directory
32904
+ // and it can therefore be removed later on.
32905
+ const isNewDir = !preBuildFiles.some(filePath => {
32906
+ return path_1.dirname(filePath).startsWith(dirPath);
32907
+ });
32908
+ // Check out the list of tracked directories that were
32909
+ // newly added and see if one of them contains the path
32910
+ // we're looking at.
32911
+ const hasParentDir = newDirectoriesEntrypoint.some(dir => {
32912
+ return path.startsWith(dir);
32913
+ });
32914
+ // If we have already tracked a directory that was newly
32915
+ // added that sits above the file or directory that we're
32916
+ // looking at, we don't need to add more entries to the list
32917
+ // because when the parent will get removed in the future,
32918
+ // all of its children (and therefore the path we're looking at)
32919
+ // will automatically get removed anyways.
32920
+ if (hasParentDir) {
32921
+ continue;
32922
+ }
32923
+ if (isNewDir) {
32924
+ newDirectoriesEntrypoint.push(dirPath);
32925
+ }
32926
+ else {
32927
+ newFilesEntrypoint.push(path);
32928
+ }
32890
32929
  }
32891
32930
  }
32892
- await Promise.all(toRemove);
32893
32931
  const tracedFiles = [];
32894
- Object.entries(lambdaFiles).forEach(async ([relPath, file]) => {
32932
+ const linkers = Object.entries(lambdaFiles).map(async ([relPath, file]) => {
32895
32933
  const newPath = path_1.join(traceDir, relPath);
32896
32934
  // The handler was already moved into position above.
32897
32935
  if (relPath === handlerFilePath) {
32898
32936
  return;
32899
32937
  }
32900
32938
  tracedFiles.push({ absolutePath: newPath, relativePath: relPath });
32901
- if (file.fsPath) {
32902
- await linkOrCopy(file.fsPath, newPath);
32939
+ const { fsPath, type } = file;
32940
+ if (fsPath) {
32941
+ await fs_extra_1.default.ensureDir(path_1.dirname(newPath));
32942
+ const isNewFile = newFilesEntrypoint.includes(fsPath);
32943
+ const isInsideNewDirectory = newDirectoriesEntrypoint.some(dirPath => {
32944
+ return fsPath.startsWith(dirPath);
32945
+ });
32946
+ // With this, we're making sure that files in the `workPath` that existed
32947
+ // before the Legacy Runtime was invoked (source files) are linked from
32948
+ // `.output` instead of copying there (the latter only happens if linking fails),
32949
+ // which is the fastest solution. However, files that are created fresh
32950
+ // by the Legacy Runtimes are always copied, because their link destinations
32951
+ // are likely to be overwritten every time an entrypoint is processed by
32952
+ // the Legacy Runtime. This is likely to overwrite the destination on subsequent
32953
+ // runs, but that's also how `workPath` used to work originally, without
32954
+ // the File System API (meaning that there was one `workPath` for all entrypoints).
32955
+ if (isNewFile || isInsideNewDirectory) {
32956
+ _1.debug(`Copying from ${fsPath} to ${newPath}`);
32957
+ await fs_extra_1.default.copy(fsPath, newPath);
32958
+ }
32959
+ else {
32960
+ await linkOrCopy(fsPath, newPath);
32961
+ }
32903
32962
  }
32904
- else if (file.type === 'FileBlob') {
32963
+ else if (type === 'FileBlob') {
32905
32964
  const { data, mode } = file;
32906
32965
  await fs_extra_1.default.writeFile(newPath, data, { mode });
32907
32966
  }
32908
32967
  else {
32909
- throw new Error(`Unknown file type: ${file.type}`);
32968
+ throw new Error(`Unknown file type: ${type}`);
32910
32969
  }
32911
32970
  });
32971
+ linkersRuntime = linkersRuntime.concat(linkers);
32912
32972
  const nft = path_1.join(workPath, '.output', 'server', 'pages', `${entrypoint}.nft.json`);
32913
32973
  const json = JSON.stringify({
32914
32974
  version: 1,
@@ -32919,7 +32979,34 @@ function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
32919
32979
  });
32920
32980
  await fs_extra_1.default.ensureDir(path_1.dirname(nft));
32921
32981
  await fs_extra_1.default.writeFile(nft, json);
32982
+ // Extend the list of directories and files that were created by the
32983
+ // Legacy Runtime with the list of directories and files that were
32984
+ // created for the entrypoint that was just processed above.
32985
+ newPathsRuntime = new Set([
32986
+ ...newPathsRuntime,
32987
+ ...newFilesEntrypoint,
32988
+ ...newDirectoriesEntrypoint,
32989
+ ]);
32922
32990
  }
32991
+ // Instead of of waiting for all of the linking to be done for every
32992
+ // entrypoint before processing the next one, we immediately handle all
32993
+ // of them one after the other, while then waiting for the linking
32994
+ // to finish right here, before we clean up newly created files below.
32995
+ await Promise.all(linkersRuntime);
32996
+ // A list of all the files that were created by the Legacy Runtime,
32997
+ // which we'd like to remove from the File System.
32998
+ const toRemove = Array.from(newPathsRuntime).map(path => {
32999
+ _1.debug(`Removing ${path} as part of cleanup`);
33000
+ return fs_extra_1.default.remove(path);
33001
+ });
33002
+ // Once all the entrypoints have been processed, we'd like to
33003
+ // remove all the files from `workPath` that originally weren't present
33004
+ // before the Legacy Runtime began running, because the `workPath`
33005
+ // is nowadays the directory in which the user keeps their source code, since
33006
+ // we're no longer running separate parallel builds for every Legacy Runtime.
33007
+ await Promise.all(toRemove);
33008
+ // Add any Serverless Functions that were exposed by the Legacy Runtime
33009
+ // to the `functions-manifest.json` file provided in `.output`.
32923
33010
  await updateFunctionsManifest({ workPath, pages });
32924
33011
  };
32925
33012
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@vercel/build-utils",
3
- "version": "2.12.3-canary.32",
3
+ "version": "2.12.3-canary.33",
4
4
  "license": "MIT",
5
5
  "main": "./dist/index.js",
6
6
  "types": "./dist/index.d.js",
@@ -49,5 +49,5 @@
49
49
  "typescript": "4.3.4",
50
50
  "yazl": "2.4.3"
51
51
  },
52
- "gitHead": "ba7bf2e4a60bc4c9fd35ad33a11d94f1dae2ac81"
52
+ "gitHead": "5efd3b98deb33029180ac40b0e30df1155c1ea5d"
53
53
  }