@vercel/redwood 0.8.2 → 0.8.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.js +43 -43
  2. package/package.json +8 -7
package/dist/index.js CHANGED
@@ -12,7 +12,7 @@ const routing_utils_1 = require("@vercel/routing-utils");
12
12
  exports.version = 2;
13
13
  const build = async ({ workPath, files, entrypoint, meta = {}, config = {}, }) => {
14
14
  var _a, _b, _c;
15
- await build_utils_1.download(files, workPath, meta);
15
+ await (0, build_utils_1.download)(files, workPath, meta);
16
16
  Object.keys(process.env)
17
17
  .filter(key => key.startsWith('VERCEL_'))
18
18
  .forEach(key => {
@@ -22,14 +22,14 @@ const build = async ({ workPath, files, entrypoint, meta = {}, config = {}, }) =
22
22
  }
23
23
  });
24
24
  const { installCommand, buildCommand } = config;
25
- const mountpoint = path_1.dirname(entrypoint);
26
- const entrypointFsDirname = path_1.join(workPath, mountpoint);
27
- const nodeVersion = await build_utils_1.getNodeVersion(entrypointFsDirname, undefined, config, meta);
28
- const spawnOpts = build_utils_1.getSpawnOptions(meta, nodeVersion);
25
+ const mountpoint = (0, path_1.dirname)(entrypoint);
26
+ const entrypointFsDirname = (0, path_1.join)(workPath, mountpoint);
27
+ const nodeVersion = await (0, build_utils_1.getNodeVersion)(entrypointFsDirname, undefined, config, meta);
28
+ const spawnOpts = (0, build_utils_1.getSpawnOptions)(meta, nodeVersion);
29
29
  if (!spawnOpts.env) {
30
30
  spawnOpts.env = {};
31
31
  }
32
- const { cliType, lockfileVersion } = await build_utils_1.scanParentDirs(entrypointFsDirname);
32
+ const { cliType, lockfileVersion } = await (0, build_utils_1.scanParentDirs)(entrypointFsDirname);
33
33
  if (cliType === 'npm') {
34
34
  if (typeof lockfileVersion === 'number' &&
35
35
  lockfileVersion >= 2 &&
@@ -53,7 +53,7 @@ const build = async ({ workPath, files, entrypoint, meta = {}, config = {}, }) =
53
53
  YARN_NODE_LINKER: 'node-modules',
54
54
  ...spawnOpts.env,
55
55
  };
56
- await build_utils_1.execCommand(installCommand, {
56
+ await (0, build_utils_1.execCommand)(installCommand, {
57
57
  ...spawnOpts,
58
58
  env,
59
59
  cwd: entrypointFsDirname,
@@ -64,37 +64,37 @@ const build = async ({ workPath, files, entrypoint, meta = {}, config = {}, }) =
64
64
  }
65
65
  }
66
66
  else {
67
- await build_utils_1.runNpmInstall(entrypointFsDirname, [], spawnOpts, meta, nodeVersion);
67
+ await (0, build_utils_1.runNpmInstall)(entrypointFsDirname, [], spawnOpts, meta, nodeVersion);
68
68
  }
69
69
  if (meta.isDev) {
70
70
  throw new Error('Detected `@vercel/redwood` dev but this is not supported');
71
71
  }
72
- const pkg = await build_utils_1.readConfigFile(path_1.join(workPath, 'package.json'));
73
- const toml = await build_utils_1.readConfigFile(path_1.join(workPath, 'redwood.toml'));
72
+ const pkg = await (0, build_utils_1.readConfigFile)((0, path_1.join)(workPath, 'package.json'));
73
+ const toml = await (0, build_utils_1.readConfigFile)((0, path_1.join)(workPath, 'redwood.toml'));
74
74
  if (buildCommand) {
75
- build_utils_1.debug(`Executing build command "${buildCommand}"`);
76
- await build_utils_1.execCommand(buildCommand, {
75
+ (0, build_utils_1.debug)(`Executing build command "${buildCommand}"`);
76
+ await (0, build_utils_1.execCommand)(buildCommand, {
77
77
  ...spawnOpts,
78
78
  cwd: workPath,
79
79
  });
80
80
  }
81
81
  else if (hasScript('vercel-build', pkg)) {
82
- build_utils_1.debug(`Executing "yarn vercel-build"`);
83
- await build_utils_1.runPackageJsonScript(workPath, 'vercel-build', spawnOpts);
82
+ (0, build_utils_1.debug)(`Executing "yarn vercel-build"`);
83
+ await (0, build_utils_1.runPackageJsonScript)(workPath, 'vercel-build', spawnOpts);
84
84
  }
85
85
  else if (hasScript('build', pkg)) {
86
- build_utils_1.debug(`Executing "yarn build"`);
87
- await build_utils_1.runPackageJsonScript(workPath, 'build', spawnOpts);
86
+ (0, build_utils_1.debug)(`Executing "yarn build"`);
87
+ await (0, build_utils_1.runPackageJsonScript)(workPath, 'build', spawnOpts);
88
88
  }
89
89
  else {
90
90
  const { devDependencies = {} } = pkg || {};
91
91
  const versionRange = devDependencies['@redwoodjs/core'];
92
92
  let cmd;
93
- if (!versionRange || !semver_1.validRange(versionRange)) {
93
+ if (!versionRange || !(0, semver_1.validRange)(versionRange)) {
94
94
  console.log('WARNING: Unable to detect RedwoodJS version in package.json devDependencies');
95
95
  cmd = 'yarn rw deploy vercel'; // Assume 0.25.0 and newer
96
96
  }
97
- else if (semver_1.intersects(versionRange, '<0.25.0')) {
97
+ else if ((0, semver_1.intersects)(versionRange, '<0.25.0')) {
98
98
  // older than 0.25.0
99
99
  cmd =
100
100
  'yarn rw build && yarn rw db up --no-db-client --auto-approve && yarn rw dataMigrate up';
@@ -103,32 +103,32 @@ const build = async ({ workPath, files, entrypoint, meta = {}, config = {}, }) =
103
103
  // 0.25.0 and newer
104
104
  cmd = 'yarn rw deploy vercel';
105
105
  }
106
- await build_utils_1.execCommand(cmd, {
106
+ await (0, build_utils_1.execCommand)(cmd, {
107
107
  ...spawnOpts,
108
108
  cwd: workPath,
109
109
  });
110
110
  }
111
111
  const apiDir = (_c = (_b = (_a = toml === null || toml === void 0 ? void 0 : toml.web) === null || _a === void 0 ? void 0 : _a.apiProxyPath) === null || _b === void 0 ? void 0 : _b.replace(/^\//, '')) !== null && _c !== void 0 ? _c : 'api';
112
- const apiDistPath = path_1.join(workPath, 'api', 'dist', 'functions');
113
- const webDistPath = path_1.join(workPath, 'web', 'dist');
112
+ const apiDistPath = (0, path_1.join)(workPath, 'api', 'dist', 'functions');
113
+ const webDistPath = (0, path_1.join)(workPath, 'web', 'dist');
114
114
  const lambdaOutputs = {};
115
115
  // Strip out the .html extensions
116
116
  // And populate staticOutputs map with updated paths and contentType
117
- const webDistFiles = await build_utils_1.glob('**', webDistPath);
117
+ const webDistFiles = await (0, build_utils_1.glob)('**', webDistPath);
118
118
  const staticOutputs = {};
119
119
  for (const [fileName, fileFsRef] of Object.entries(webDistFiles)) {
120
- const parsedPath = path_1.parse(fileFsRef.fsPath);
120
+ const parsedPath = (0, path_1.parse)(fileFsRef.fsPath);
121
121
  if (parsedPath.ext !== '.html') {
122
122
  // No need to transform non-html files
123
123
  staticOutputs[fileName] = fileFsRef;
124
124
  }
125
125
  else {
126
- const fileNameWithoutExtension = path_1.basename(fileName, '.html');
127
- const pathWithoutHtmlExtension = path_1.join(parsedPath.dir, fileNameWithoutExtension);
126
+ const fileNameWithoutExtension = (0, path_1.basename)(fileName, '.html');
127
+ const pathWithoutHtmlExtension = (0, path_1.join)(parsedPath.dir, fileNameWithoutExtension);
128
128
  fileFsRef.contentType = 'text/html; charset=utf-8';
129
129
  // @NOTE: Filename is relative to webDistPath
130
130
  // e.g. {'./200': fsRef}
131
- staticOutputs[path_1.relative(webDistPath, pathWithoutHtmlExtension)] =
131
+ staticOutputs[(0, path_1.relative)(webDistPath, pathWithoutHtmlExtension)] =
132
132
  fileFsRef;
133
133
  }
134
134
  }
@@ -139,25 +139,25 @@ const build = async ({ workPath, files, entrypoint, meta = {}, config = {}, }) =
139
139
  // │ │ ├── bazinga.js
140
140
  // │ ├── graphql.js
141
141
  const functionFiles = {
142
- ...(await build_utils_1.glob('*.js', apiDistPath)),
143
- ...(await build_utils_1.glob('*/*.js', apiDistPath)), // one-level deep
142
+ ...(await (0, build_utils_1.glob)('*.js', apiDistPath)),
143
+ ...(await (0, build_utils_1.glob)('*/*.js', apiDistPath)), // one-level deep
144
144
  };
145
145
  const sourceCache = new Map();
146
146
  const fsCache = new Map();
147
147
  for (const [funcName, fileFsRef] of Object.entries(functionFiles)) {
148
- const outputName = path_1.join(apiDir, path_1.parse(funcName).name); // remove `.js` extension
148
+ const outputName = (0, path_1.join)(apiDir, (0, path_1.parse)(funcName).name); // remove `.js` extension
149
149
  const absEntrypoint = fileFsRef.fsPath;
150
- const relativeEntrypoint = path_1.relative(workPath, absEntrypoint);
150
+ const relativeEntrypoint = (0, path_1.relative)(workPath, absEntrypoint);
151
151
  const awsLambdaHandler = getAWSLambdaHandler(relativeEntrypoint, 'handler');
152
152
  const sourceFile = relativeEntrypoint.replace('/dist/', '/src/');
153
- const { fileList, esmFileList, warnings } = await nft_1.nodeFileTrace([absEntrypoint], {
153
+ const { fileList, esmFileList, warnings } = await (0, nft_1.nodeFileTrace)([absEntrypoint], {
154
154
  base: workPath,
155
155
  processCwd: workPath,
156
156
  ts: true,
157
157
  mixedModules: true,
158
158
  ignore: config.excludeFiles,
159
159
  async readFile(fsPath) {
160
- const relPath = path_1.relative(workPath, fsPath);
160
+ const relPath = (0, path_1.relative)(workPath, fsPath);
161
161
  const cached = sourceCache.get(relPath);
162
162
  if (cached)
163
163
  return cached.toString();
@@ -165,10 +165,10 @@ const build = async ({ workPath, files, entrypoint, meta = {}, config = {}, }) =
165
165
  if (cached === null)
166
166
  return null;
167
167
  try {
168
- const source = fs_1.readFileSync(fsPath);
169
- const { mode } = fs_1.lstatSync(fsPath);
168
+ const source = (0, fs_1.readFileSync)(fsPath);
169
+ const { mode } = (0, fs_1.lstatSync)(fsPath);
170
170
  let entry;
171
- if (build_utils_1.isSymbolicLink(mode)) {
171
+ if ((0, build_utils_1.isSymbolicLink)(mode)) {
172
172
  entry = new build_utils_1.FileFsRef({ fsPath, mode });
173
173
  }
174
174
  else {
@@ -189,18 +189,18 @@ const build = async ({ workPath, files, entrypoint, meta = {}, config = {}, }) =
189
189
  });
190
190
  for (const warning of warnings) {
191
191
  if (warning === null || warning === void 0 ? void 0 : warning.stack) {
192
- build_utils_1.debug(warning.stack.replace('Error: ', 'Warning: '));
192
+ (0, build_utils_1.debug)(warning.stack.replace('Error: ', 'Warning: '));
193
193
  }
194
194
  }
195
195
  const lambdaFiles = {};
196
196
  const allFiles = [...fileList, ...esmFileList];
197
197
  for (const filePath of allFiles) {
198
198
  lambdaFiles[filePath] = await build_utils_1.FileFsRef.fromFsPath({
199
- fsPath: path_1.join(workPath, filePath),
199
+ fsPath: (0, path_1.join)(workPath, filePath),
200
200
  });
201
201
  }
202
- lambdaFiles[path_1.relative(workPath, fileFsRef.fsPath)] = fileFsRef;
203
- const { memory, maxDuration } = await build_utils_1.getLambdaOptionsFromFunction({
202
+ lambdaFiles[(0, path_1.relative)(workPath, fileFsRef.fsPath)] = fileFsRef;
203
+ const { memory, maxDuration } = await (0, build_utils_1.getLambdaOptionsFromFunction)({
204
204
  sourceFile,
205
205
  config,
206
206
  });
@@ -219,10 +219,10 @@ const build = async ({ workPath, files, entrypoint, meta = {}, config = {}, }) =
219
219
  // Older versions of redwood did not create 200.html automatically
220
220
  // From v0.50.0+ 200.html is always generated as part of web build
221
221
  // Note that in builder post-processing, we remove the .html extension
222
- const fallbackHtmlPage = fs_1.existsSync(path_1.join(webDistPath, '200.html'))
222
+ const fallbackHtmlPage = (0, fs_1.existsSync)((0, path_1.join)(webDistPath, '200.html'))
223
223
  ? '/200'
224
224
  : '/index';
225
- const defaultRoutesConfig = routing_utils_1.getTransformedRoutes({
225
+ const defaultRoutesConfig = (0, routing_utils_1.getTransformedRoutes)({
226
226
  nowConfig: {
227
227
  // this makes sure we send back 200.html for unprerendered pages
228
228
  rewrites: [{ source: '/(.*)', destination: fallbackHtmlPage }],
@@ -240,7 +240,7 @@ const build = async ({ workPath, files, entrypoint, meta = {}, config = {}, }) =
240
240
  };
241
241
  exports.build = build;
242
242
  function getAWSLambdaHandler(filePath, handlerName) {
243
- const { dir, name } = path_1.parse(filePath);
243
+ const { dir, name } = (0, path_1.parse)(filePath);
244
244
  return `${dir}${dir ? path_1.sep : ''}${name}.${handlerName}`;
245
245
  }
246
246
  function hasScript(scriptName, pkg) {
@@ -248,6 +248,6 @@ function hasScript(scriptName, pkg) {
248
248
  return typeof scripts[scriptName] === 'string';
249
249
  }
250
250
  const prepareCache = ({ repoRootPath, workPath }) => {
251
- return build_utils_1.glob('**/node_modules/**', repoRootPath || workPath);
251
+ return (0, build_utils_1.glob)('**/node_modules/**', repoRootPath || workPath);
252
252
  };
253
253
  exports.prepareCache = prepareCache;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@vercel/redwood",
3
- "version": "0.8.2",
3
+ "version": "0.8.3",
4
4
  "main": "./dist/index.js",
5
5
  "license": "MIT",
6
6
  "homepage": "https://vercel.com/docs",
@@ -14,20 +14,21 @@
14
14
  },
15
15
  "scripts": {
16
16
  "build": "node build.js",
17
- "test-integration-once": "jest --env node --verbose --runInBand --bail test/test.js",
18
- "test-unit": "jest --env node --verbose --bail test/prepare-cache.test.js",
17
+ "test-integration-once": "yarn test test/test.js",
18
+ "test": "jest --env node --verbose --bail --runInBand",
19
+ "test-unit": "yarn test test/prepare-cache.test.js",
19
20
  "prepublishOnly": "node build.js"
20
21
  },
21
22
  "dependencies": {
22
- "@vercel/nft": "0.18.1",
23
- "@vercel/routing-utils": "1.13.2",
23
+ "@vercel/nft": "0.19.0",
24
+ "@vercel/routing-utils": "1.13.3",
24
25
  "semver": "6.1.1"
25
26
  },
26
27
  "devDependencies": {
27
28
  "@types/aws-lambda": "8.10.19",
28
29
  "@types/node": "*",
29
30
  "@types/semver": "6.0.0",
30
- "@vercel/build-utils": "3.0.1"
31
+ "@vercel/build-utils": "3.1.0"
31
32
  },
32
- "gitHead": "5e66d4b2ccd80a7e6a21d53cf4be3b11f2861513"
33
+ "gitHead": "6e8935883b874d68499283e7a3081a1e2824cbee"
33
34
  }