@vercel/build-utils 2.12.3-canary.3 → 2.12.3-canary.33
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/convert-runtime-to-plugin.d.ts +65 -0
- package/dist/convert-runtime-to-plugin.js +344 -0
- package/dist/detect-builders.d.ts +6 -0
- package/dist/detect-builders.js +58 -5
- package/dist/fs/glob.js +2 -1
- package/dist/fs/normalize-path.d.ts +4 -0
- package/dist/fs/normalize-path.js +11 -0
- package/dist/get-ignore-filter.d.ts +1 -0
- package/dist/get-ignore-filter.js +59 -0
- package/dist/index.d.ts +10 -1
- package/dist/index.js +5030 -3526
- package/dist/lambda.d.ts +7 -3
- package/dist/lambda.js +14 -4
- package/dist/prerender.d.ts +3 -1
- package/dist/prerender.js +10 -1
- package/dist/types.d.ts +2 -0
- package/package.json +6 -6
@@ -0,0 +1,65 @@
|
|
1
|
+
import { Lambda } from './lambda';
|
2
|
+
import type { BuildOptions } from './types';
|
3
|
+
/**
|
4
|
+
* Convert legacy Runtime to a Plugin.
|
5
|
+
* @param buildRuntime - a legacy build() function from a Runtime
|
6
|
+
* @param packageName - the name of the package, for example `vercel-plugin-python`
|
7
|
+
* @param ext - the file extension, for example `.py`
|
8
|
+
*/
|
9
|
+
export declare function convertRuntimeToPlugin(buildRuntime: (options: BuildOptions) => Promise<{
|
10
|
+
output: Lambda;
|
11
|
+
}>, packageName: string, ext: string): ({ workPath }: {
|
12
|
+
workPath: string;
|
13
|
+
}) => Promise<void>;
|
14
|
+
/**
|
15
|
+
* If `.output/functions-manifest.json` exists, append to the pages
|
16
|
+
* property. Otherwise write a new file.
|
17
|
+
*/
|
18
|
+
export declare function updateFunctionsManifest({ workPath, pages, }: {
|
19
|
+
workPath: string;
|
20
|
+
pages: {
|
21
|
+
[key: string]: any;
|
22
|
+
};
|
23
|
+
}): Promise<void>;
|
24
|
+
/**
|
25
|
+
* Append routes to the `routes-manifest.json` file.
|
26
|
+
* If the file does not exist, it will be created.
|
27
|
+
*/
|
28
|
+
export declare function updateRoutesManifest({ workPath, redirects, rewrites, headers, dynamicRoutes, staticRoutes, }: {
|
29
|
+
workPath: string;
|
30
|
+
redirects?: {
|
31
|
+
source: string;
|
32
|
+
destination: string;
|
33
|
+
statusCode: number;
|
34
|
+
regex: string;
|
35
|
+
}[];
|
36
|
+
rewrites?: {
|
37
|
+
source: string;
|
38
|
+
destination: string;
|
39
|
+
regex: string;
|
40
|
+
}[];
|
41
|
+
headers?: {
|
42
|
+
source: string;
|
43
|
+
headers: {
|
44
|
+
key: string;
|
45
|
+
value: string;
|
46
|
+
}[];
|
47
|
+
regex: string;
|
48
|
+
}[];
|
49
|
+
dynamicRoutes?: {
|
50
|
+
page: string;
|
51
|
+
regex: string;
|
52
|
+
namedRegex?: string;
|
53
|
+
routeKeys?: {
|
54
|
+
[named: string]: string;
|
55
|
+
};
|
56
|
+
}[];
|
57
|
+
staticRoutes?: {
|
58
|
+
page: string;
|
59
|
+
regex: string;
|
60
|
+
namedRegex?: string;
|
61
|
+
routeKeys?: {
|
62
|
+
[named: string]: string;
|
63
|
+
};
|
64
|
+
}[];
|
65
|
+
}): Promise<void>;
|
@@ -0,0 +1,344 @@
|
|
1
|
+
"use strict";
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
4
|
+
};
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
6
|
+
exports.updateRoutesManifest = exports.updateFunctionsManifest = exports.convertRuntimeToPlugin = void 0;
|
7
|
+
const fs_extra_1 = __importDefault(require("fs-extra"));
|
8
|
+
const path_1 = require("path");
|
9
|
+
const glob_1 = __importDefault(require("./fs/glob"));
|
10
|
+
const normalize_path_1 = require("./fs/normalize-path");
|
11
|
+
const lambda_1 = require("./lambda");
|
12
|
+
const _1 = require(".");
|
13
|
+
// `.output` was already created by the Build Command, so we have
|
14
|
+
// to ensure its contents don't get bundled into the Lambda. Similarily,
|
15
|
+
// we don't want to bundle anything from `.vercel` either. Lastly,
|
16
|
+
// Builders/Runtimes didn't have `vercel.json` or `now.json`.
|
17
|
+
const ignoredPaths = ['.output', '.vercel', 'vercel.json', 'now.json'];
|
18
|
+
const shouldIgnorePath = (file, ignoreFilter, ignoreFile) => {
|
19
|
+
const isNative = ignoredPaths.some(item => {
|
20
|
+
return file.startsWith(item);
|
21
|
+
});
|
22
|
+
if (!ignoreFile) {
|
23
|
+
return isNative;
|
24
|
+
}
|
25
|
+
return isNative || ignoreFilter(file);
|
26
|
+
};
|
27
|
+
const getSourceFiles = async (workPath, ignoreFilter) => {
|
28
|
+
const list = await glob_1.default('**', {
|
29
|
+
cwd: workPath,
|
30
|
+
});
|
31
|
+
// We're not passing this as an `ignore` filter to the `glob` function above,
|
32
|
+
// so that we can re-use exactly the same `getIgnoreFilter` method that the
|
33
|
+
// Build Step uses (literally the same code). Note that this exclusion only applies
|
34
|
+
// when deploying. Locally, another exclusion is needed, which is handled
|
35
|
+
// further below in the `convertRuntimeToPlugin` function.
|
36
|
+
for (const file in list) {
|
37
|
+
if (shouldIgnorePath(file, ignoreFilter, true)) {
|
38
|
+
delete list[file];
|
39
|
+
}
|
40
|
+
}
|
41
|
+
return list;
|
42
|
+
};
|
43
|
+
/**
|
44
|
+
* Convert legacy Runtime to a Plugin.
|
45
|
+
* @param buildRuntime - a legacy build() function from a Runtime
|
46
|
+
* @param packageName - the name of the package, for example `vercel-plugin-python`
|
47
|
+
* @param ext - the file extension, for example `.py`
|
48
|
+
*/
|
49
|
+
function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
|
50
|
+
// This `build()` signature should match `plugin.build()` signature in `vercel build`.
|
51
|
+
return async function build({ workPath }) {
|
52
|
+
// We also don't want to provide any files to Runtimes that were ignored
|
53
|
+
// through `.vercelignore` or `.nowignore`, because the Build Step does the same.
|
54
|
+
const ignoreFilter = await _1.getIgnoreFilter(workPath);
|
55
|
+
// Retrieve the files that are currently available on the File System,
|
56
|
+
// before the Legacy Runtime has even started to build.
|
57
|
+
const sourceFilesPreBuild = await getSourceFiles(workPath, ignoreFilter);
|
58
|
+
// Instead of doing another `glob` to get all the matching source files,
|
59
|
+
// we'll filter the list of existing files down to only the ones
|
60
|
+
// that are matching the entrypoint pattern, so we're first creating
|
61
|
+
// a clean new list to begin.
|
62
|
+
const entrypoints = Object.assign({}, sourceFilesPreBuild);
|
63
|
+
const entrypointMatch = new RegExp(`^api/.*${ext}$`);
|
64
|
+
// Up next, we'll strip out the files from the list of entrypoints
|
65
|
+
// that aren't actually considered entrypoints.
|
66
|
+
for (const file in entrypoints) {
|
67
|
+
if (!entrypointMatch.test(file)) {
|
68
|
+
delete entrypoints[file];
|
69
|
+
}
|
70
|
+
}
|
71
|
+
const pages = {};
|
72
|
+
const pluginName = packageName.replace('vercel-plugin-', '');
|
73
|
+
const traceDir = path_1.join(workPath, `.output`, `inputs`,
|
74
|
+
// Legacy Runtimes can only provide API Routes, so that's
|
75
|
+
// why we can use this prefix for all of them. Here, we have to
|
76
|
+
// make sure to not use a cryptic hash name, because people
|
77
|
+
// need to be able to easily inspect the output.
|
78
|
+
`api-routes-${pluginName}`);
|
79
|
+
await fs_extra_1.default.ensureDir(traceDir);
|
80
|
+
let newPathsRuntime = new Set();
|
81
|
+
let linkersRuntime = [];
|
82
|
+
for (const entrypoint of Object.keys(entrypoints)) {
|
83
|
+
const { output } = await buildRuntime({
|
84
|
+
files: sourceFilesPreBuild,
|
85
|
+
entrypoint,
|
86
|
+
workPath,
|
87
|
+
config: {
|
88
|
+
zeroConfig: true,
|
89
|
+
},
|
90
|
+
meta: {
|
91
|
+
avoidTopLevelInstall: true,
|
92
|
+
},
|
93
|
+
});
|
94
|
+
// Legacy Runtimes tend to pollute the `workPath` with compiled results,
|
95
|
+
// because the `workPath` used to be a place that was a place where they could
|
96
|
+
// just put anything, but nowadays it's the working directory of the `vercel build`
|
97
|
+
// command, which is the place where the developer keeps their source files,
|
98
|
+
// so we don't want to pollute this space unnecessarily. That means we have to clean
|
99
|
+
// up files that were created by the build, which is done further below.
|
100
|
+
const sourceFilesAfterBuild = await getSourceFiles(workPath, ignoreFilter);
|
101
|
+
// Further down, we will need the filename of the Lambda handler
|
102
|
+
// for placing it inside `server/pages/api`, but because Legacy Runtimes
|
103
|
+
// don't expose the filename directly, we have to construct it
|
104
|
+
// from the handler name, and then find the matching file further below,
|
105
|
+
// because we don't yet know its extension here.
|
106
|
+
const handler = output.handler;
|
107
|
+
const handlerMethod = handler.split('.').reverse()[0];
|
108
|
+
const handlerFileName = handler.replace(`.${handlerMethod}`, '');
|
109
|
+
pages[entrypoint] = {
|
110
|
+
handler: handler,
|
111
|
+
runtime: output.runtime,
|
112
|
+
memory: output.memory,
|
113
|
+
maxDuration: output.maxDuration,
|
114
|
+
environment: output.environment,
|
115
|
+
allowQuery: output.allowQuery,
|
116
|
+
};
|
117
|
+
// @ts-ignore This symbol is a private API
|
118
|
+
const lambdaFiles = output[lambda_1.FILES_SYMBOL];
|
119
|
+
// When deploying, the `files` that are passed to the Legacy Runtimes already
|
120
|
+
// have certain files that are ignored stripped, but locally, that list of
|
121
|
+
// files isn't used by the Legacy Runtimes, so we need to apply the filters
|
122
|
+
// to the outputs that they are returning instead.
|
123
|
+
for (const file in lambdaFiles) {
|
124
|
+
if (shouldIgnorePath(file, ignoreFilter, false)) {
|
125
|
+
delete lambdaFiles[file];
|
126
|
+
}
|
127
|
+
}
|
128
|
+
const handlerFilePath = Object.keys(lambdaFiles).find(item => {
|
129
|
+
return path_1.parse(item).name === handlerFileName;
|
130
|
+
});
|
131
|
+
const handlerFileOrigin = lambdaFiles[handlerFilePath || ''].fsPath;
|
132
|
+
if (!handlerFileOrigin) {
|
133
|
+
throw new Error(`Could not find a handler file. Please ensure that the list of \`files\` defined for the returned \`Lambda\` contains a file with the name ${handlerFileName} (+ any extension).`);
|
134
|
+
}
|
135
|
+
const entry = path_1.join(workPath, '.output', 'server', 'pages', entrypoint);
|
136
|
+
await fs_extra_1.default.ensureDir(path_1.dirname(entry));
|
137
|
+
await linkOrCopy(handlerFileOrigin, entry);
|
138
|
+
const newFilesEntrypoint = [];
|
139
|
+
const newDirectoriesEntrypoint = [];
|
140
|
+
const preBuildFiles = Object.values(sourceFilesPreBuild).map(file => {
|
141
|
+
return file.fsPath;
|
142
|
+
});
|
143
|
+
// Generate a list of directories and files that weren't present
|
144
|
+
// before the entrypoint was processed by the Legacy Runtime, so
|
145
|
+
// that we can perform a cleanup later. We need to divide into files
|
146
|
+
// and directories because only cleaning up files might leave empty
|
147
|
+
// directories, and listing directories separately also speeds up the
|
148
|
+
// build because we can just delete them, which wipes all of their nested
|
149
|
+
// paths, instead of iterating through all files that should be deleted.
|
150
|
+
for (const file in sourceFilesAfterBuild) {
|
151
|
+
if (!sourceFilesPreBuild[file]) {
|
152
|
+
const path = sourceFilesAfterBuild[file].fsPath;
|
153
|
+
const dirPath = path_1.dirname(path);
|
154
|
+
// If none of the files that were present before the entrypoint
|
155
|
+
// was processed are contained within the directory we're looking
|
156
|
+
// at right now, then we know it's a newly added directory
|
157
|
+
// and it can therefore be removed later on.
|
158
|
+
const isNewDir = !preBuildFiles.some(filePath => {
|
159
|
+
return path_1.dirname(filePath).startsWith(dirPath);
|
160
|
+
});
|
161
|
+
// Check out the list of tracked directories that were
|
162
|
+
// newly added and see if one of them contains the path
|
163
|
+
// we're looking at.
|
164
|
+
const hasParentDir = newDirectoriesEntrypoint.some(dir => {
|
165
|
+
return path.startsWith(dir);
|
166
|
+
});
|
167
|
+
// If we have already tracked a directory that was newly
|
168
|
+
// added that sits above the file or directory that we're
|
169
|
+
// looking at, we don't need to add more entries to the list
|
170
|
+
// because when the parent will get removed in the future,
|
171
|
+
// all of its children (and therefore the path we're looking at)
|
172
|
+
// will automatically get removed anyways.
|
173
|
+
if (hasParentDir) {
|
174
|
+
continue;
|
175
|
+
}
|
176
|
+
if (isNewDir) {
|
177
|
+
newDirectoriesEntrypoint.push(dirPath);
|
178
|
+
}
|
179
|
+
else {
|
180
|
+
newFilesEntrypoint.push(path);
|
181
|
+
}
|
182
|
+
}
|
183
|
+
}
|
184
|
+
const tracedFiles = [];
|
185
|
+
const linkers = Object.entries(lambdaFiles).map(async ([relPath, file]) => {
|
186
|
+
const newPath = path_1.join(traceDir, relPath);
|
187
|
+
// The handler was already moved into position above.
|
188
|
+
if (relPath === handlerFilePath) {
|
189
|
+
return;
|
190
|
+
}
|
191
|
+
tracedFiles.push({ absolutePath: newPath, relativePath: relPath });
|
192
|
+
const { fsPath, type } = file;
|
193
|
+
if (fsPath) {
|
194
|
+
await fs_extra_1.default.ensureDir(path_1.dirname(newPath));
|
195
|
+
const isNewFile = newFilesEntrypoint.includes(fsPath);
|
196
|
+
const isInsideNewDirectory = newDirectoriesEntrypoint.some(dirPath => {
|
197
|
+
return fsPath.startsWith(dirPath);
|
198
|
+
});
|
199
|
+
// With this, we're making sure that files in the `workPath` that existed
|
200
|
+
// before the Legacy Runtime was invoked (source files) are linked from
|
201
|
+
// `.output` instead of copying there (the latter only happens if linking fails),
|
202
|
+
// which is the fastest solution. However, files that are created fresh
|
203
|
+
// by the Legacy Runtimes are always copied, because their link destinations
|
204
|
+
// are likely to be overwritten every time an entrypoint is processed by
|
205
|
+
// the Legacy Runtime. This is likely to overwrite the destination on subsequent
|
206
|
+
// runs, but that's also how `workPath` used to work originally, without
|
207
|
+
// the File System API (meaning that there was one `workPath` for all entrypoints).
|
208
|
+
if (isNewFile || isInsideNewDirectory) {
|
209
|
+
_1.debug(`Copying from ${fsPath} to ${newPath}`);
|
210
|
+
await fs_extra_1.default.copy(fsPath, newPath);
|
211
|
+
}
|
212
|
+
else {
|
213
|
+
await linkOrCopy(fsPath, newPath);
|
214
|
+
}
|
215
|
+
}
|
216
|
+
else if (type === 'FileBlob') {
|
217
|
+
const { data, mode } = file;
|
218
|
+
await fs_extra_1.default.writeFile(newPath, data, { mode });
|
219
|
+
}
|
220
|
+
else {
|
221
|
+
throw new Error(`Unknown file type: ${type}`);
|
222
|
+
}
|
223
|
+
});
|
224
|
+
linkersRuntime = linkersRuntime.concat(linkers);
|
225
|
+
const nft = path_1.join(workPath, '.output', 'server', 'pages', `${entrypoint}.nft.json`);
|
226
|
+
const json = JSON.stringify({
|
227
|
+
version: 1,
|
228
|
+
files: tracedFiles.map(file => ({
|
229
|
+
input: normalize_path_1.normalizePath(path_1.relative(path_1.dirname(nft), file.absolutePath)),
|
230
|
+
output: normalize_path_1.normalizePath(file.relativePath),
|
231
|
+
})),
|
232
|
+
});
|
233
|
+
await fs_extra_1.default.ensureDir(path_1.dirname(nft));
|
234
|
+
await fs_extra_1.default.writeFile(nft, json);
|
235
|
+
// Extend the list of directories and files that were created by the
|
236
|
+
// Legacy Runtime with the list of directories and files that were
|
237
|
+
// created for the entrypoint that was just processed above.
|
238
|
+
newPathsRuntime = new Set([
|
239
|
+
...newPathsRuntime,
|
240
|
+
...newFilesEntrypoint,
|
241
|
+
...newDirectoriesEntrypoint,
|
242
|
+
]);
|
243
|
+
}
|
244
|
+
// Instead of of waiting for all of the linking to be done for every
|
245
|
+
// entrypoint before processing the next one, we immediately handle all
|
246
|
+
// of them one after the other, while then waiting for the linking
|
247
|
+
// to finish right here, before we clean up newly created files below.
|
248
|
+
await Promise.all(linkersRuntime);
|
249
|
+
// A list of all the files that were created by the Legacy Runtime,
|
250
|
+
// which we'd like to remove from the File System.
|
251
|
+
const toRemove = Array.from(newPathsRuntime).map(path => {
|
252
|
+
_1.debug(`Removing ${path} as part of cleanup`);
|
253
|
+
return fs_extra_1.default.remove(path);
|
254
|
+
});
|
255
|
+
// Once all the entrypoints have been processed, we'd like to
|
256
|
+
// remove all the files from `workPath` that originally weren't present
|
257
|
+
// before the Legacy Runtime began running, because the `workPath`
|
258
|
+
// is nowadays the directory in which the user keeps their source code, since
|
259
|
+
// we're no longer running separate parallel builds for every Legacy Runtime.
|
260
|
+
await Promise.all(toRemove);
|
261
|
+
// Add any Serverless Functions that were exposed by the Legacy Runtime
|
262
|
+
// to the `functions-manifest.json` file provided in `.output`.
|
263
|
+
await updateFunctionsManifest({ workPath, pages });
|
264
|
+
};
|
265
|
+
}
|
266
|
+
exports.convertRuntimeToPlugin = convertRuntimeToPlugin;
|
267
|
+
async function linkOrCopy(existingPath, newPath) {
|
268
|
+
try {
|
269
|
+
await fs_extra_1.default.createLink(existingPath, newPath);
|
270
|
+
}
|
271
|
+
catch (err) {
|
272
|
+
if (err.code !== 'EEXIST') {
|
273
|
+
await fs_extra_1.default.copyFile(existingPath, newPath);
|
274
|
+
}
|
275
|
+
}
|
276
|
+
}
|
277
|
+
async function readJson(filePath) {
|
278
|
+
try {
|
279
|
+
const str = await fs_extra_1.default.readFile(filePath, 'utf8');
|
280
|
+
return JSON.parse(str);
|
281
|
+
}
|
282
|
+
catch (err) {
|
283
|
+
if (err.code === 'ENOENT') {
|
284
|
+
return {};
|
285
|
+
}
|
286
|
+
throw err;
|
287
|
+
}
|
288
|
+
}
|
289
|
+
/**
|
290
|
+
* If `.output/functions-manifest.json` exists, append to the pages
|
291
|
+
* property. Otherwise write a new file.
|
292
|
+
*/
|
293
|
+
async function updateFunctionsManifest({ workPath, pages, }) {
|
294
|
+
const functionsManifestPath = path_1.join(workPath, '.output', 'functions-manifest.json');
|
295
|
+
const functionsManifest = await readJson(functionsManifestPath);
|
296
|
+
if (!functionsManifest.version)
|
297
|
+
functionsManifest.version = 1;
|
298
|
+
if (!functionsManifest.pages)
|
299
|
+
functionsManifest.pages = {};
|
300
|
+
for (const [pageKey, pageConfig] of Object.entries(pages)) {
|
301
|
+
functionsManifest.pages[pageKey] = { ...pageConfig };
|
302
|
+
}
|
303
|
+
await fs_extra_1.default.writeFile(functionsManifestPath, JSON.stringify(functionsManifest));
|
304
|
+
}
|
305
|
+
exports.updateFunctionsManifest = updateFunctionsManifest;
|
306
|
+
/**
|
307
|
+
* Append routes to the `routes-manifest.json` file.
|
308
|
+
* If the file does not exist, it will be created.
|
309
|
+
*/
|
310
|
+
async function updateRoutesManifest({ workPath, redirects, rewrites, headers, dynamicRoutes, staticRoutes, }) {
|
311
|
+
const routesManifestPath = path_1.join(workPath, '.output', 'routes-manifest.json');
|
312
|
+
const routesManifest = await readJson(routesManifestPath);
|
313
|
+
if (!routesManifest.version)
|
314
|
+
routesManifest.version = 3;
|
315
|
+
if (routesManifest.pages404 === undefined)
|
316
|
+
routesManifest.pages404 = true;
|
317
|
+
if (redirects) {
|
318
|
+
if (!routesManifest.redirects)
|
319
|
+
routesManifest.redirects = [];
|
320
|
+
routesManifest.redirects.push(...redirects);
|
321
|
+
}
|
322
|
+
if (rewrites) {
|
323
|
+
if (!routesManifest.rewrites)
|
324
|
+
routesManifest.rewrites = [];
|
325
|
+
routesManifest.rewrites.push(...rewrites);
|
326
|
+
}
|
327
|
+
if (headers) {
|
328
|
+
if (!routesManifest.headers)
|
329
|
+
routesManifest.headers = [];
|
330
|
+
routesManifest.headers.push(...headers);
|
331
|
+
}
|
332
|
+
if (dynamicRoutes) {
|
333
|
+
if (!routesManifest.dynamicRoutes)
|
334
|
+
routesManifest.dynamicRoutes = [];
|
335
|
+
routesManifest.dynamicRoutes.push(...dynamicRoutes);
|
336
|
+
}
|
337
|
+
if (staticRoutes) {
|
338
|
+
if (!routesManifest.staticRoutes)
|
339
|
+
routesManifest.staticRoutes = [];
|
340
|
+
routesManifest.staticRoutes.push(...staticRoutes);
|
341
|
+
}
|
342
|
+
await fs_extra_1.default.writeFile(routesManifestPath, JSON.stringify(routesManifest));
|
343
|
+
}
|
344
|
+
exports.updateRoutesManifest = updateRoutesManifest;
|
@@ -34,5 +34,11 @@ export declare function detectBuilders(files: string[], pkg?: PackageJson | unde
|
|
34
34
|
redirectRoutes: Route[] | null;
|
35
35
|
rewriteRoutes: Route[] | null;
|
36
36
|
errorRoutes: Route[] | null;
|
37
|
+
limitedRoutes: LimitedRoutes | null;
|
37
38
|
}>;
|
39
|
+
interface LimitedRoutes {
|
40
|
+
defaultRoutes: Route[];
|
41
|
+
redirectRoutes: Route[];
|
42
|
+
rewriteRoutes: Route[];
|
43
|
+
}
|
38
44
|
export {};
|
package/dist/detect-builders.js
CHANGED
@@ -66,6 +66,7 @@ async function detectBuilders(files, pkg, options = {}) {
|
|
66
66
|
redirectRoutes: null,
|
67
67
|
rewriteRoutes: null,
|
68
68
|
errorRoutes: null,
|
69
|
+
limitedRoutes: null,
|
69
70
|
};
|
70
71
|
}
|
71
72
|
const sortedFiles = files.sort(sortFiles);
|
@@ -113,6 +114,7 @@ async function detectBuilders(files, pkg, options = {}) {
|
|
113
114
|
redirectRoutes: null,
|
114
115
|
rewriteRoutes: null,
|
115
116
|
errorRoutes: null,
|
117
|
+
limitedRoutes: null,
|
116
118
|
};
|
117
119
|
}
|
118
120
|
if (apiRoute) {
|
@@ -167,6 +169,7 @@ async function detectBuilders(files, pkg, options = {}) {
|
|
167
169
|
defaultRoutes: null,
|
168
170
|
rewriteRoutes: null,
|
169
171
|
errorRoutes: null,
|
172
|
+
limitedRoutes: null,
|
170
173
|
};
|
171
174
|
}
|
172
175
|
// If `outputDirectory` is an empty string,
|
@@ -203,6 +206,7 @@ async function detectBuilders(files, pkg, options = {}) {
|
|
203
206
|
defaultRoutes: null,
|
204
207
|
rewriteRoutes: null,
|
205
208
|
errorRoutes: null,
|
209
|
+
limitedRoutes: null,
|
206
210
|
};
|
207
211
|
}
|
208
212
|
const builders = [];
|
@@ -221,7 +225,7 @@ async function detectBuilders(files, pkg, options = {}) {
|
|
221
225
|
});
|
222
226
|
}
|
223
227
|
}
|
224
|
-
const routesResult = getRouteResult(apiRoutes, dynamicRoutes, usedOutputDirectory, apiBuilders, frontendBuilder, options);
|
228
|
+
const routesResult = getRouteResult(pkg, apiRoutes, dynamicRoutes, usedOutputDirectory, apiBuilders, frontendBuilder, options);
|
225
229
|
return {
|
226
230
|
warnings,
|
227
231
|
builders: builders.length ? builders : null,
|
@@ -230,6 +234,7 @@ async function detectBuilders(files, pkg, options = {}) {
|
|
230
234
|
defaultRoutes: routesResult.defaultRoutes,
|
231
235
|
rewriteRoutes: routesResult.rewriteRoutes,
|
232
236
|
errorRoutes: routesResult.errorRoutes,
|
237
|
+
limitedRoutes: routesResult.limitedRoutes,
|
233
238
|
};
|
234
239
|
}
|
235
240
|
exports.detectBuilders = detectBuilders;
|
@@ -670,23 +675,51 @@ function createRouteFromPath(filePath, featHandleMiss, cleanUrls) {
|
|
670
675
|
}
|
671
676
|
return { route, isDynamic };
|
672
677
|
}
|
673
|
-
function getRouteResult(apiRoutes, dynamicRoutes, outputDirectory, apiBuilders, frontendBuilder, options) {
|
678
|
+
function getRouteResult(pkg, apiRoutes, dynamicRoutes, outputDirectory, apiBuilders, frontendBuilder, options) {
|
674
679
|
var _a, _b;
|
680
|
+
const deps = Object.assign({}, pkg === null || pkg === void 0 ? void 0 : pkg.dependencies, pkg === null || pkg === void 0 ? void 0 : pkg.devDependencies);
|
675
681
|
const defaultRoutes = [];
|
676
682
|
const redirectRoutes = [];
|
677
683
|
const rewriteRoutes = [];
|
678
684
|
const errorRoutes = [];
|
685
|
+
const limitedRoutes = {
|
686
|
+
defaultRoutes: [],
|
687
|
+
redirectRoutes: [],
|
688
|
+
rewriteRoutes: [],
|
689
|
+
};
|
679
690
|
const framework = ((_a = frontendBuilder === null || frontendBuilder === void 0 ? void 0 : frontendBuilder.config) === null || _a === void 0 ? void 0 : _a.framework) || '';
|
680
691
|
const isNextjs = framework === 'nextjs' || _1.isOfficialRuntime('next', frontendBuilder === null || frontendBuilder === void 0 ? void 0 : frontendBuilder.use);
|
681
692
|
const ignoreRuntimes = (_b = slugToFramework.get(framework)) === null || _b === void 0 ? void 0 : _b.ignoreRuntimes;
|
682
693
|
if (apiRoutes && apiRoutes.length > 0) {
|
683
694
|
if (options.featHandleMiss) {
|
695
|
+
// Exclude extension names if the corresponding plugin is not found in package.json
|
696
|
+
// detectBuilders({ignoreRoutesForBuilders: ['@vercel/python']})
|
697
|
+
// return a copy of routes.
|
698
|
+
// We should exclud errorRoutes and
|
684
699
|
const extSet = detectApiExtensions(apiBuilders);
|
700
|
+
const withTag = options.tag ? `@${options.tag}` : '';
|
701
|
+
const extSetLimited = detectApiExtensions(apiBuilders.filter(b => {
|
702
|
+
if (b.use === `@vercel/python${withTag}` &&
|
703
|
+
!('vercel-plugin-python' in deps)) {
|
704
|
+
return false;
|
705
|
+
}
|
706
|
+
if (b.use === `@vercel/go${withTag}` &&
|
707
|
+
!('vercel-plugin-go' in deps)) {
|
708
|
+
return false;
|
709
|
+
}
|
710
|
+
if (b.use === `@vercel/ruby${withTag}` &&
|
711
|
+
!('vercel-plugin-ruby' in deps)) {
|
712
|
+
return false;
|
713
|
+
}
|
714
|
+
return true;
|
715
|
+
}));
|
685
716
|
if (extSet.size > 0) {
|
686
|
-
const
|
717
|
+
const extGroup = `(?:\\.(?:${Array.from(extSet)
|
687
718
|
.map(ext => ext.slice(1))
|
688
|
-
.join('|')
|
689
|
-
const
|
719
|
+
.join('|')}))`;
|
720
|
+
const extGroupLimited = `(?:\\.(?:${Array.from(extSetLimited)
|
721
|
+
.map(ext => ext.slice(1))
|
722
|
+
.join('|')}))`;
|
690
723
|
if (options.cleanUrls) {
|
691
724
|
redirectRoutes.push({
|
692
725
|
src: `^/(api(?:.+)?)/index${extGroup}?/?$`,
|
@@ -700,6 +733,18 @@ function getRouteResult(apiRoutes, dynamicRoutes, outputDirectory, apiBuilders,
|
|
700
733
|
},
|
701
734
|
status: 308,
|
702
735
|
});
|
736
|
+
limitedRoutes.redirectRoutes.push({
|
737
|
+
src: `^/(api(?:.+)?)/index${extGroupLimited}?/?$`,
|
738
|
+
headers: { Location: options.trailingSlash ? '/$1/' : '/$1' },
|
739
|
+
status: 308,
|
740
|
+
});
|
741
|
+
limitedRoutes.redirectRoutes.push({
|
742
|
+
src: `^/api/(.+)${extGroupLimited}/?$`,
|
743
|
+
headers: {
|
744
|
+
Location: options.trailingSlash ? '/api/$1/' : '/api/$1',
|
745
|
+
},
|
746
|
+
status: 308,
|
747
|
+
});
|
703
748
|
}
|
704
749
|
else {
|
705
750
|
defaultRoutes.push({ handle: 'miss' });
|
@@ -708,9 +753,16 @@ function getRouteResult(apiRoutes, dynamicRoutes, outputDirectory, apiBuilders,
|
|
708
753
|
dest: '/api/$1',
|
709
754
|
check: true,
|
710
755
|
});
|
756
|
+
limitedRoutes.defaultRoutes.push({ handle: 'miss' });
|
757
|
+
limitedRoutes.defaultRoutes.push({
|
758
|
+
src: `^/api/(.+)${extGroupLimited}$`,
|
759
|
+
dest: '/api/$1',
|
760
|
+
check: true,
|
761
|
+
});
|
711
762
|
}
|
712
763
|
}
|
713
764
|
rewriteRoutes.push(...dynamicRoutes);
|
765
|
+
limitedRoutes.rewriteRoutes.push(...dynamicRoutes);
|
714
766
|
if (typeof ignoreRuntimes === 'undefined') {
|
715
767
|
// This route is only necessary to hide the directory listing
|
716
768
|
// to avoid enumerating serverless function names.
|
@@ -755,6 +807,7 @@ function getRouteResult(apiRoutes, dynamicRoutes, outputDirectory, apiBuilders,
|
|
755
807
|
redirectRoutes,
|
756
808
|
rewriteRoutes,
|
757
809
|
errorRoutes,
|
810
|
+
limitedRoutes,
|
758
811
|
};
|
759
812
|
}
|
760
813
|
function sortFilesBySegmentCount(fileA, fileB) {
|
package/dist/fs/glob.js
CHANGED
@@ -8,6 +8,7 @@ const assert_1 = __importDefault(require("assert"));
|
|
8
8
|
const glob_1 = __importDefault(require("glob"));
|
9
9
|
const util_1 = require("util");
|
10
10
|
const fs_extra_1 = require("fs-extra");
|
11
|
+
const normalize_path_1 = require("./normalize-path");
|
11
12
|
const file_fs_ref_1 = __importDefault(require("../file-fs-ref"));
|
12
13
|
const vanillaGlob = util_1.promisify(glob_1.default);
|
13
14
|
async function glob(pattern, opts, mountpoint) {
|
@@ -31,7 +32,7 @@ async function glob(pattern, opts, mountpoint) {
|
|
31
32
|
options.dot = true;
|
32
33
|
const files = await vanillaGlob(pattern, options);
|
33
34
|
for (const relativePath of files) {
|
34
|
-
const fsPath = path_1.default.join(options.cwd, relativePath)
|
35
|
+
const fsPath = normalize_path_1.normalizePath(path_1.default.join(options.cwd, relativePath));
|
35
36
|
let stat = options.statCache[fsPath];
|
36
37
|
assert_1.default(stat, `statCache does not contain value for ${relativePath} (resolved to ${fsPath})`);
|
37
38
|
const isSymlink = options.symlinks[fsPath];
|
@@ -0,0 +1,11 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.normalizePath = void 0;
|
4
|
+
const isWin = process.platform === 'win32';
|
5
|
+
/**
|
6
|
+
* Convert Windows separators to Unix separators.
|
7
|
+
*/
|
8
|
+
function normalizePath(p) {
|
9
|
+
return isWin ? p.replace(/\\/g, '/') : p;
|
10
|
+
}
|
11
|
+
exports.normalizePath = normalizePath;
|
@@ -0,0 +1 @@
|
|
1
|
+
export default function (downloadPath: string, rootDirectory?: string | undefined): Promise<(p: string) => any>;
|
@@ -0,0 +1,59 @@
|
|
1
|
+
"use strict";
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
4
|
+
};
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
6
|
+
const path_1 = __importDefault(require("path"));
|
7
|
+
const fs_extra_1 = __importDefault(require("fs-extra"));
|
8
|
+
const ignore_1 = __importDefault(require("ignore"));
|
9
|
+
function isCodedError(error) {
|
10
|
+
return (error !== null &&
|
11
|
+
error !== undefined &&
|
12
|
+
error.code !== undefined);
|
13
|
+
}
|
14
|
+
function clearRelative(s) {
|
15
|
+
return s.replace(/(\n|^)\.\//g, '$1');
|
16
|
+
}
|
17
|
+
async function default_1(downloadPath, rootDirectory) {
|
18
|
+
const readFile = async (p) => {
|
19
|
+
try {
|
20
|
+
return await fs_extra_1.default.readFile(p, 'utf8');
|
21
|
+
}
|
22
|
+
catch (error) {
|
23
|
+
if (error.code === 'ENOENT' ||
|
24
|
+
(error instanceof Error && error.message.includes('ENOENT'))) {
|
25
|
+
return undefined;
|
26
|
+
}
|
27
|
+
throw error;
|
28
|
+
}
|
29
|
+
};
|
30
|
+
const vercelIgnorePath = path_1.default.join(downloadPath, rootDirectory || '', '.vercelignore');
|
31
|
+
const nowIgnorePath = path_1.default.join(downloadPath, rootDirectory || '', '.nowignore');
|
32
|
+
const ignoreContents = [];
|
33
|
+
try {
|
34
|
+
ignoreContents.push(...(await Promise.all([readFile(vercelIgnorePath), readFile(nowIgnorePath)])).filter(Boolean));
|
35
|
+
}
|
36
|
+
catch (error) {
|
37
|
+
if (isCodedError(error) && error.code === 'ENOTDIR') {
|
38
|
+
console.log(`Warning: Cannot read ignore file from ${vercelIgnorePath}`);
|
39
|
+
}
|
40
|
+
else {
|
41
|
+
throw error;
|
42
|
+
}
|
43
|
+
}
|
44
|
+
if (ignoreContents.length === 2) {
|
45
|
+
throw new Error('Cannot use both a `.vercelignore` and `.nowignore` file. Please delete the `.nowignore` file.');
|
46
|
+
}
|
47
|
+
if (ignoreContents.length === 0) {
|
48
|
+
return () => false;
|
49
|
+
}
|
50
|
+
const ignoreFilter = ignore_1.default().add(clearRelative(ignoreContents[0]));
|
51
|
+
return function (p) {
|
52
|
+
// we should not ignore now.json and vercel.json if it asked to.
|
53
|
+
// we depend on these files for building the app with sourceless
|
54
|
+
if (p === 'now.json' || p === 'vercel.json')
|
55
|
+
return false;
|
56
|
+
return ignoreFilter.test(p).ignored;
|
57
|
+
};
|
58
|
+
}
|
59
|
+
exports.default = default_1;
|