@vercel/redwood 1.1.14 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +45 -45
- package/package.json +11 -12
package/dist/index.js
CHANGED
|
@@ -11,8 +11,8 @@ const routing_utils_1 = require("@vercel/routing-utils");
|
|
|
11
11
|
// it refers to Vercels builder version
|
|
12
12
|
exports.version = 2;
|
|
13
13
|
const build = async ({ workPath, files, entrypoint, meta = {}, config = {}, }) => {
|
|
14
|
-
await build_utils_1.download(files, workPath, meta);
|
|
15
|
-
const prefixedEnvs = build_utils_1.getPrefixedEnvVars({
|
|
14
|
+
await (0, build_utils_1.download)(files, workPath, meta);
|
|
15
|
+
const prefixedEnvs = (0, build_utils_1.getPrefixedEnvVars)({
|
|
16
16
|
envPrefix: 'REDWOOD_ENV_',
|
|
17
17
|
envs: process.env,
|
|
18
18
|
});
|
|
@@ -20,15 +20,15 @@ const build = async ({ workPath, files, entrypoint, meta = {}, config = {}, }) =
|
|
|
20
20
|
process.env[key] = value;
|
|
21
21
|
}
|
|
22
22
|
const { installCommand, buildCommand } = config;
|
|
23
|
-
const mountpoint = path_1.dirname(entrypoint);
|
|
24
|
-
const entrypointFsDirname = path_1.join(workPath, mountpoint);
|
|
25
|
-
const nodeVersion = await build_utils_1.getNodeVersion(entrypointFsDirname, undefined, config, meta);
|
|
26
|
-
const spawnOpts = build_utils_1.getSpawnOptions(meta, nodeVersion);
|
|
23
|
+
const mountpoint = (0, path_1.dirname)(entrypoint);
|
|
24
|
+
const entrypointFsDirname = (0, path_1.join)(workPath, mountpoint);
|
|
25
|
+
const nodeVersion = await (0, build_utils_1.getNodeVersion)(entrypointFsDirname, undefined, config, meta);
|
|
26
|
+
const spawnOpts = (0, build_utils_1.getSpawnOptions)(meta, nodeVersion);
|
|
27
27
|
if (!spawnOpts.env) {
|
|
28
28
|
spawnOpts.env = {};
|
|
29
29
|
}
|
|
30
|
-
const { cliType, lockfileVersion } = await build_utils_1.scanParentDirs(entrypointFsDirname);
|
|
31
|
-
spawnOpts.env = build_utils_1.getEnvForPackageManager({
|
|
30
|
+
const { cliType, lockfileVersion } = await (0, build_utils_1.scanParentDirs)(entrypointFsDirname);
|
|
31
|
+
spawnOpts.env = (0, build_utils_1.getEnvForPackageManager)({
|
|
32
32
|
cliType,
|
|
33
33
|
lockfileVersion,
|
|
34
34
|
nodeVersion,
|
|
@@ -37,7 +37,7 @@ const build = async ({ workPath, files, entrypoint, meta = {}, config = {}, }) =
|
|
|
37
37
|
if (typeof installCommand === 'string') {
|
|
38
38
|
if (installCommand.trim()) {
|
|
39
39
|
console.log(`Running "install" command: \`${installCommand}\`...`);
|
|
40
|
-
await build_utils_1.execCommand(installCommand, {
|
|
40
|
+
await (0, build_utils_1.execCommand)(installCommand, {
|
|
41
41
|
...spawnOpts,
|
|
42
42
|
cwd: entrypointFsDirname,
|
|
43
43
|
});
|
|
@@ -47,37 +47,37 @@ const build = async ({ workPath, files, entrypoint, meta = {}, config = {}, }) =
|
|
|
47
47
|
}
|
|
48
48
|
}
|
|
49
49
|
else {
|
|
50
|
-
await build_utils_1.runNpmInstall(entrypointFsDirname, [], spawnOpts, meta, nodeVersion);
|
|
50
|
+
await (0, build_utils_1.runNpmInstall)(entrypointFsDirname, [], spawnOpts, meta, nodeVersion);
|
|
51
51
|
}
|
|
52
52
|
if (meta.isDev) {
|
|
53
53
|
throw new Error('Detected `@vercel/redwood` dev but this is not supported');
|
|
54
54
|
}
|
|
55
|
-
const pkg = await build_utils_1.readConfigFile(path_1.join(workPath, 'package.json'));
|
|
56
|
-
const toml = await build_utils_1.readConfigFile(path_1.join(workPath, 'redwood.toml'));
|
|
55
|
+
const pkg = await (0, build_utils_1.readConfigFile)((0, path_1.join)(workPath, 'package.json'));
|
|
56
|
+
const toml = await (0, build_utils_1.readConfigFile)((0, path_1.join)(workPath, 'redwood.toml'));
|
|
57
57
|
if (buildCommand) {
|
|
58
|
-
build_utils_1.debug(`Executing build command "${buildCommand}"`);
|
|
59
|
-
await build_utils_1.execCommand(buildCommand, {
|
|
58
|
+
(0, build_utils_1.debug)(`Executing build command "${buildCommand}"`);
|
|
59
|
+
await (0, build_utils_1.execCommand)(buildCommand, {
|
|
60
60
|
...spawnOpts,
|
|
61
61
|
cwd: workPath,
|
|
62
62
|
});
|
|
63
63
|
}
|
|
64
64
|
else if (hasScript('vercel-build', pkg)) {
|
|
65
|
-
build_utils_1.debug(`Executing "yarn vercel-build"`);
|
|
66
|
-
await build_utils_1.runPackageJsonScript(workPath, 'vercel-build', spawnOpts);
|
|
65
|
+
(0, build_utils_1.debug)(`Executing "yarn vercel-build"`);
|
|
66
|
+
await (0, build_utils_1.runPackageJsonScript)(workPath, 'vercel-build', spawnOpts);
|
|
67
67
|
}
|
|
68
68
|
else if (hasScript('build', pkg)) {
|
|
69
|
-
build_utils_1.debug(`Executing "yarn build"`);
|
|
70
|
-
await build_utils_1.runPackageJsonScript(workPath, 'build', spawnOpts);
|
|
69
|
+
(0, build_utils_1.debug)(`Executing "yarn build"`);
|
|
70
|
+
await (0, build_utils_1.runPackageJsonScript)(workPath, 'build', spawnOpts);
|
|
71
71
|
}
|
|
72
72
|
else {
|
|
73
73
|
const { devDependencies = {} } = pkg || {};
|
|
74
74
|
const versionRange = devDependencies['@redwoodjs/core'];
|
|
75
75
|
let cmd;
|
|
76
|
-
if (!versionRange || !semver_1.validRange(versionRange)) {
|
|
76
|
+
if (!versionRange || !(0, semver_1.validRange)(versionRange)) {
|
|
77
77
|
console.log('WARNING: Unable to detect RedwoodJS version in package.json devDependencies');
|
|
78
78
|
cmd = 'yarn rw deploy vercel'; // Assume 0.25.0 and newer
|
|
79
79
|
}
|
|
80
|
-
else if (semver_1.intersects(versionRange, '<0.25.0')) {
|
|
80
|
+
else if ((0, semver_1.intersects)(versionRange, '<0.25.0')) {
|
|
81
81
|
// older than 0.25.0
|
|
82
82
|
cmd =
|
|
83
83
|
'yarn rw build && yarn rw db up --no-db-client --auto-approve && yarn rw dataMigrate up';
|
|
@@ -86,32 +86,32 @@ const build = async ({ workPath, files, entrypoint, meta = {}, config = {}, }) =
|
|
|
86
86
|
// 0.25.0 and newer
|
|
87
87
|
cmd = 'yarn rw deploy vercel';
|
|
88
88
|
}
|
|
89
|
-
await build_utils_1.execCommand(cmd, {
|
|
89
|
+
await (0, build_utils_1.execCommand)(cmd, {
|
|
90
90
|
...spawnOpts,
|
|
91
91
|
cwd: workPath,
|
|
92
92
|
});
|
|
93
93
|
}
|
|
94
94
|
const apiDir = toml?.web?.apiProxyPath?.replace(/^\//, '') ?? 'api';
|
|
95
|
-
const apiDistPath = path_1.join(workPath, 'api', 'dist', 'functions');
|
|
96
|
-
const webDistPath = path_1.join(workPath, 'web', 'dist');
|
|
95
|
+
const apiDistPath = (0, path_1.join)(workPath, 'api', 'dist', 'functions');
|
|
96
|
+
const webDistPath = (0, path_1.join)(workPath, 'web', 'dist');
|
|
97
97
|
const lambdaOutputs = {};
|
|
98
98
|
// Strip out the .html extensions
|
|
99
99
|
// And populate staticOutputs map with updated paths and contentType
|
|
100
|
-
const webDistFiles = await build_utils_1.glob('**', webDistPath);
|
|
100
|
+
const webDistFiles = await (0, build_utils_1.glob)('**', webDistPath);
|
|
101
101
|
const staticOutputs = {};
|
|
102
102
|
for (const [fileName, fileFsRef] of Object.entries(webDistFiles)) {
|
|
103
|
-
const parsedPath = path_1.parse(fileFsRef.fsPath);
|
|
103
|
+
const parsedPath = (0, path_1.parse)(fileFsRef.fsPath);
|
|
104
104
|
if (parsedPath.ext !== '.html') {
|
|
105
105
|
// No need to transform non-html files
|
|
106
106
|
staticOutputs[fileName] = fileFsRef;
|
|
107
107
|
}
|
|
108
108
|
else {
|
|
109
|
-
const fileNameWithoutExtension = path_1.basename(fileName, '.html');
|
|
110
|
-
const pathWithoutHtmlExtension = path_1.join(parsedPath.dir, fileNameWithoutExtension);
|
|
109
|
+
const fileNameWithoutExtension = (0, path_1.basename)(fileName, '.html');
|
|
110
|
+
const pathWithoutHtmlExtension = (0, path_1.join)(parsedPath.dir, fileNameWithoutExtension);
|
|
111
111
|
fileFsRef.contentType = 'text/html; charset=utf-8';
|
|
112
112
|
// @NOTE: Filename is relative to webDistPath
|
|
113
113
|
// e.g. {'./200': fsRef}
|
|
114
|
-
staticOutputs[path_1.relative(webDistPath, pathWithoutHtmlExtension)] =
|
|
114
|
+
staticOutputs[(0, path_1.relative)(webDistPath, pathWithoutHtmlExtension)] =
|
|
115
115
|
fileFsRef;
|
|
116
116
|
}
|
|
117
117
|
}
|
|
@@ -122,25 +122,25 @@ const build = async ({ workPath, files, entrypoint, meta = {}, config = {}, }) =
|
|
|
122
122
|
// │ │ ├── bazinga.js
|
|
123
123
|
// │ ├── graphql.js
|
|
124
124
|
const functionFiles = {
|
|
125
|
-
...(await build_utils_1.glob('*.js', apiDistPath)),
|
|
126
|
-
...(await build_utils_1.glob('*/*.js', apiDistPath)), // one-level deep
|
|
125
|
+
...(await (0, build_utils_1.glob)('*.js', apiDistPath)),
|
|
126
|
+
...(await (0, build_utils_1.glob)('*/*.js', apiDistPath)), // one-level deep
|
|
127
127
|
};
|
|
128
128
|
const sourceCache = new Map();
|
|
129
129
|
const fsCache = new Map();
|
|
130
130
|
for (const [funcName, fileFsRef] of Object.entries(functionFiles)) {
|
|
131
|
-
const outputName = path_1.join(apiDir, path_1.parse(funcName).name); // remove `.js` extension
|
|
131
|
+
const outputName = (0, path_1.join)(apiDir, (0, path_1.parse)(funcName).name); // remove `.js` extension
|
|
132
132
|
const absEntrypoint = fileFsRef.fsPath;
|
|
133
|
-
const relativeEntrypoint = path_1.relative(workPath, absEntrypoint);
|
|
133
|
+
const relativeEntrypoint = (0, path_1.relative)(workPath, absEntrypoint);
|
|
134
134
|
const awsLambdaHandler = getAWSLambdaHandler(relativeEntrypoint, 'handler');
|
|
135
135
|
const sourceFile = relativeEntrypoint.replace('/dist/', '/src/');
|
|
136
|
-
const { fileList, esmFileList, warnings } = await nft_1.nodeFileTrace([absEntrypoint], {
|
|
136
|
+
const { fileList, esmFileList, warnings } = await (0, nft_1.nodeFileTrace)([absEntrypoint], {
|
|
137
137
|
base: workPath,
|
|
138
138
|
processCwd: workPath,
|
|
139
139
|
ts: true,
|
|
140
140
|
mixedModules: true,
|
|
141
141
|
ignore: config.excludeFiles,
|
|
142
142
|
async readFile(fsPath) {
|
|
143
|
-
const relPath = path_1.relative(workPath, fsPath);
|
|
143
|
+
const relPath = (0, path_1.relative)(workPath, fsPath);
|
|
144
144
|
const cached = sourceCache.get(relPath);
|
|
145
145
|
if (cached)
|
|
146
146
|
return cached.toString();
|
|
@@ -148,10 +148,10 @@ const build = async ({ workPath, files, entrypoint, meta = {}, config = {}, }) =
|
|
|
148
148
|
if (cached === null)
|
|
149
149
|
return null;
|
|
150
150
|
try {
|
|
151
|
-
const source = fs_1.readFileSync(fsPath);
|
|
152
|
-
const { mode } = fs_1.lstatSync(fsPath);
|
|
151
|
+
const source = (0, fs_1.readFileSync)(fsPath);
|
|
152
|
+
const { mode } = (0, fs_1.lstatSync)(fsPath);
|
|
153
153
|
let entry;
|
|
154
|
-
if (build_utils_1.isSymbolicLink(mode)) {
|
|
154
|
+
if ((0, build_utils_1.isSymbolicLink)(mode)) {
|
|
155
155
|
entry = new build_utils_1.FileFsRef({ fsPath, mode });
|
|
156
156
|
}
|
|
157
157
|
else {
|
|
@@ -171,17 +171,17 @@ const build = async ({ workPath, files, entrypoint, meta = {}, config = {}, }) =
|
|
|
171
171
|
},
|
|
172
172
|
});
|
|
173
173
|
for (const warning of warnings) {
|
|
174
|
-
build_utils_1.debug(`Warning from trace: ${warning.message}`);
|
|
174
|
+
(0, build_utils_1.debug)(`Warning from trace: ${warning.message}`);
|
|
175
175
|
}
|
|
176
176
|
const lambdaFiles = {};
|
|
177
177
|
const allFiles = [...fileList, ...esmFileList];
|
|
178
178
|
for (const filePath of allFiles) {
|
|
179
179
|
lambdaFiles[filePath] = await build_utils_1.FileFsRef.fromFsPath({
|
|
180
|
-
fsPath: path_1.join(workPath, filePath),
|
|
180
|
+
fsPath: (0, path_1.join)(workPath, filePath),
|
|
181
181
|
});
|
|
182
182
|
}
|
|
183
|
-
lambdaFiles[path_1.relative(workPath, fileFsRef.fsPath)] = fileFsRef;
|
|
184
|
-
const lambdaOptions = await build_utils_1.getLambdaOptionsFromFunction({
|
|
183
|
+
lambdaFiles[(0, path_1.relative)(workPath, fileFsRef.fsPath)] = fileFsRef;
|
|
184
|
+
const lambdaOptions = await (0, build_utils_1.getLambdaOptionsFromFunction)({
|
|
185
185
|
sourceFile,
|
|
186
186
|
config,
|
|
187
187
|
});
|
|
@@ -199,10 +199,10 @@ const build = async ({ workPath, files, entrypoint, meta = {}, config = {}, }) =
|
|
|
199
199
|
// Older versions of redwood did not create 200.html automatically
|
|
200
200
|
// From v0.50.0+ 200.html is always generated as part of web build
|
|
201
201
|
// Note that in builder post-processing, we remove the .html extension
|
|
202
|
-
const fallbackHtmlPage = fs_1.existsSync(path_1.join(webDistPath, '200.html'))
|
|
202
|
+
const fallbackHtmlPage = (0, fs_1.existsSync)((0, path_1.join)(webDistPath, '200.html'))
|
|
203
203
|
? '/200'
|
|
204
204
|
: '/index';
|
|
205
|
-
const defaultRoutesConfig = routing_utils_1.getTransformedRoutes({
|
|
205
|
+
const defaultRoutesConfig = (0, routing_utils_1.getTransformedRoutes)({
|
|
206
206
|
// this makes sure we send back 200.html for unprerendered pages
|
|
207
207
|
rewrites: [{ source: '/(.*)', destination: fallbackHtmlPage }],
|
|
208
208
|
cleanUrls: true,
|
|
@@ -218,7 +218,7 @@ const build = async ({ workPath, files, entrypoint, meta = {}, config = {}, }) =
|
|
|
218
218
|
};
|
|
219
219
|
exports.build = build;
|
|
220
220
|
function getAWSLambdaHandler(filePath, handlerName) {
|
|
221
|
-
const { dir, name } = path_1.parse(filePath);
|
|
221
|
+
const { dir, name } = (0, path_1.parse)(filePath);
|
|
222
222
|
return `${dir}${dir ? path_1.sep : ''}${name}.${handlerName}`;
|
|
223
223
|
}
|
|
224
224
|
function hasScript(scriptName, pkg) {
|
|
@@ -226,6 +226,6 @@ function hasScript(scriptName, pkg) {
|
|
|
226
226
|
return typeof scripts[scriptName] === 'string';
|
|
227
227
|
}
|
|
228
228
|
const prepareCache = ({ repoRootPath, workPath }) => {
|
|
229
|
-
return build_utils_1.glob('**/node_modules/**', repoRootPath || workPath);
|
|
229
|
+
return (0, build_utils_1.glob)('**/node_modules/**', repoRootPath || workPath);
|
|
230
230
|
};
|
|
231
231
|
exports.prepareCache = prepareCache;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@vercel/redwood",
|
|
3
|
-
"version": "
|
|
3
|
+
"version": "2.0.0",
|
|
4
4
|
"main": "./dist/index.js",
|
|
5
5
|
"license": "Apache-2.0",
|
|
6
6
|
"homepage": "https://vercel.com/docs",
|
|
@@ -12,25 +12,24 @@
|
|
|
12
12
|
"url": "https://github.com/vercel/vercel.git",
|
|
13
13
|
"directory": "packages/redwood"
|
|
14
14
|
},
|
|
15
|
-
"scripts": {
|
|
16
|
-
"build": "node build.js",
|
|
17
|
-
"test-e2e": "pnpm test test/test.js",
|
|
18
|
-
"test": "jest --env node --verbose --bail --runInBand",
|
|
19
|
-
"test-unit": "pnpm test test/prepare-cache.test.js"
|
|
20
|
-
},
|
|
21
15
|
"dependencies": {
|
|
22
16
|
"@vercel/nft": "0.22.5",
|
|
23
|
-
"@vercel/routing-utils": "
|
|
17
|
+
"@vercel/routing-utils": "3.0.0",
|
|
24
18
|
"semver": "6.1.1"
|
|
25
19
|
},
|
|
26
20
|
"devDependencies": {
|
|
27
21
|
"@types/aws-lambda": "8.10.19",
|
|
28
22
|
"@types/node": "14.18.33",
|
|
29
23
|
"@types/semver": "6.0.0",
|
|
30
|
-
"@vercel/build-utils": "
|
|
24
|
+
"@vercel/build-utils": "7.0.0",
|
|
31
25
|
"execa": "3.2.0",
|
|
32
26
|
"fs-extra": "11.1.0",
|
|
33
|
-
"
|
|
27
|
+
"jest-junit": "16.0.0"
|
|
34
28
|
},
|
|
35
|
-
"
|
|
36
|
-
|
|
29
|
+
"scripts": {
|
|
30
|
+
"build": "node build.js",
|
|
31
|
+
"test-e2e": "pnpm test test/test.js",
|
|
32
|
+
"test": "jest --reporters=default --reporters=jest-junit --env node --verbose --bail --runInBand",
|
|
33
|
+
"test-unit": "pnpm test test/prepare-cache.test.js"
|
|
34
|
+
}
|
|
35
|
+
}
|