@netlify/plugin-nextjs 4.1.1 → 4.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +22 -1
- package/lib/helpers/cache.js +3 -3
- package/lib/helpers/config.js +4 -4
- package/lib/helpers/files.js +53 -53
- package/lib/helpers/functions.js +18 -15
- package/lib/helpers/redirects.js +55 -65
- package/lib/helpers/types.js +2 -0
- package/lib/helpers/utils.js +94 -3
- package/lib/helpers/verification.js +27 -21
- package/lib/index.js +28 -25
- package/lib/templates/getHandler.js +62 -49
- package/lib/templates/getPageResolver.js +4 -4
- package/lib/templates/handlerUtils.js +21 -8
- package/lib/templates/ipx.js +1 -1
- package/package.json +9 -14
package/lib/helpers/utils.js
CHANGED
|
@@ -1,8 +1,13 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
2
5
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.shouldSkip = exports.
|
|
6
|
+
exports.shouldSkip = exports.getPreviewRewrites = exports.getApiRewrites = exports.redirectsForNextRouteWithData = exports.redirectsForNextRoute = exports.targetForFallback = exports.isApiRoute = exports.routeToDataRoute = exports.netlifyRoutesForNextRouteWithData = exports.toNetlifyRoute = void 0;
|
|
7
|
+
const globby_1 = __importDefault(require("globby"));
|
|
8
|
+
const pathe_1 = require("pathe");
|
|
4
9
|
const constants_1 = require("../constants");
|
|
5
|
-
const
|
|
10
|
+
const toNetlifyRoute = (nextRoute) => {
|
|
6
11
|
const netlifyRoutes = [nextRoute];
|
|
7
12
|
// If the route is an optional catch-all route, we need to add a second
|
|
8
13
|
// Netlify route for the base path (when no parameters are present).
|
|
@@ -29,7 +34,93 @@ const netlifyRoutesForNextRoute = (nextRoute) => {
|
|
|
29
34
|
// Replace dynamic parameters, e.g., [id]
|
|
30
35
|
.replace(constants_1.DYNAMIC_PARAMETER_REGEX, '/:$1'));
|
|
31
36
|
};
|
|
32
|
-
exports.
|
|
37
|
+
exports.toNetlifyRoute = toNetlifyRoute;
|
|
38
|
+
const netlifyRoutesForNextRouteWithData = ({ route, dataRoute }) => [
|
|
39
|
+
...(0, exports.toNetlifyRoute)(dataRoute),
|
|
40
|
+
...(0, exports.toNetlifyRoute)(route),
|
|
41
|
+
];
|
|
42
|
+
exports.netlifyRoutesForNextRouteWithData = netlifyRoutesForNextRouteWithData;
|
|
43
|
+
const routeToDataRoute = (route, buildId, locale) => `/_next/data/${buildId}${locale ? `/${locale}` : ''}${route === '/' ? '/index' : route}.json`;
|
|
44
|
+
exports.routeToDataRoute = routeToDataRoute;
|
|
45
|
+
const netlifyRoutesForNextRoute = (route, buildId, i18n) => {
|
|
46
|
+
var _a;
|
|
47
|
+
if (!((_a = i18n === null || i18n === void 0 ? void 0 : i18n.locales) === null || _a === void 0 ? void 0 : _a.length)) {
|
|
48
|
+
return (0, exports.netlifyRoutesForNextRouteWithData)({ route, dataRoute: (0, exports.routeToDataRoute)(route, buildId) });
|
|
49
|
+
}
|
|
50
|
+
const { locales, defaultLocale } = i18n;
|
|
51
|
+
const routes = [];
|
|
52
|
+
locales.forEach((locale) => {
|
|
53
|
+
// Data route is always localized
|
|
54
|
+
const dataRoute = (0, exports.routeToDataRoute)(route, buildId, locale);
|
|
55
|
+
routes.push(
|
|
56
|
+
// Default locale is served from root, not localized
|
|
57
|
+
...(0, exports.netlifyRoutesForNextRouteWithData)({
|
|
58
|
+
route: locale === defaultLocale ? route : `/${locale}${route}`,
|
|
59
|
+
dataRoute,
|
|
60
|
+
}));
|
|
61
|
+
});
|
|
62
|
+
return routes;
|
|
63
|
+
};
|
|
64
|
+
const isApiRoute = (route) => route.startsWith('/api/') || route === '/api';
|
|
65
|
+
exports.isApiRoute = isApiRoute;
|
|
66
|
+
const targetForFallback = (fallback) => {
|
|
67
|
+
if (fallback === null || fallback === false) {
|
|
68
|
+
// fallback = null mean "blocking", which uses ODB. For fallback=false then anything prerendered should 404.
|
|
69
|
+
// However i18n pages may not have been prerendered, so we still need to hit the origin
|
|
70
|
+
return { to: constants_1.ODB_FUNCTION_PATH, status: 200 };
|
|
71
|
+
}
|
|
72
|
+
// fallback = true is also ODB
|
|
73
|
+
return { to: constants_1.ODB_FUNCTION_PATH, status: 200 };
|
|
74
|
+
};
|
|
75
|
+
exports.targetForFallback = targetForFallback;
|
|
76
|
+
const redirectsForNextRoute = ({ route, buildId, basePath, to, i18n, status = 200, force = false, }) => netlifyRoutesForNextRoute(route, buildId, i18n).map((redirect) => ({
|
|
77
|
+
from: `${basePath}${redirect}`,
|
|
78
|
+
to,
|
|
79
|
+
status,
|
|
80
|
+
force,
|
|
81
|
+
}));
|
|
82
|
+
exports.redirectsForNextRoute = redirectsForNextRoute;
|
|
83
|
+
const redirectsForNextRouteWithData = ({ route, dataRoute, basePath, to, status = 200, force = false, }) => (0, exports.netlifyRoutesForNextRouteWithData)({ route, dataRoute }).map((redirect) => ({
|
|
84
|
+
from: `${basePath}${redirect}`,
|
|
85
|
+
to,
|
|
86
|
+
status,
|
|
87
|
+
force,
|
|
88
|
+
}));
|
|
89
|
+
exports.redirectsForNextRouteWithData = redirectsForNextRouteWithData;
|
|
90
|
+
const getApiRewrites = (basePath) => [
|
|
91
|
+
{
|
|
92
|
+
from: `${basePath}/api`,
|
|
93
|
+
to: constants_1.HANDLER_FUNCTION_PATH,
|
|
94
|
+
status: 200,
|
|
95
|
+
},
|
|
96
|
+
{
|
|
97
|
+
from: `${basePath}/api/*`,
|
|
98
|
+
to: constants_1.HANDLER_FUNCTION_PATH,
|
|
99
|
+
status: 200,
|
|
100
|
+
},
|
|
101
|
+
];
|
|
102
|
+
exports.getApiRewrites = getApiRewrites;
|
|
103
|
+
const getPreviewRewrites = async ({ basePath, appDir }) => {
|
|
104
|
+
const publicFiles = await (0, globby_1.default)('**/*', { cwd: (0, pathe_1.join)(appDir, 'public') });
|
|
105
|
+
// Preview mode gets forced to the function, to bypass pre-rendered pages, but static files need to be skipped
|
|
106
|
+
return [
|
|
107
|
+
...publicFiles.map((file) => ({
|
|
108
|
+
from: `${basePath}/${file}`,
|
|
109
|
+
// This is a no-op, but we do it to stop it matching the following rule
|
|
110
|
+
to: `${basePath}/${file}`,
|
|
111
|
+
conditions: { Cookie: ['__prerender_bypass', '__next_preview_data'] },
|
|
112
|
+
status: 200,
|
|
113
|
+
})),
|
|
114
|
+
{
|
|
115
|
+
from: `${basePath}/*`,
|
|
116
|
+
to: constants_1.HANDLER_FUNCTION_PATH,
|
|
117
|
+
status: 200,
|
|
118
|
+
conditions: { Cookie: ['__prerender_bypass', '__next_preview_data'] },
|
|
119
|
+
force: true,
|
|
120
|
+
},
|
|
121
|
+
];
|
|
122
|
+
};
|
|
123
|
+
exports.getPreviewRewrites = getPreviewRewrites;
|
|
33
124
|
const shouldSkip = () => process.env.NEXT_PLUGIN_FORCE_RUN === 'false' ||
|
|
34
125
|
process.env.NEXT_PLUGIN_FORCE_RUN === '0' ||
|
|
35
126
|
process.env.NETLIFY_NEXT_PLUGIN_SKIP === 'true' ||
|
|
@@ -22,7 +22,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
22
22
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
23
23
|
};
|
|
24
24
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
25
|
-
exports.warnForProblematicUserRewrites = exports.getProblematicUserRewrites = exports.checkZipSize = exports.checkForRootPublish = exports.checkNextSiteHasBuilt = exports.checkForOldFunctions = exports.verifyNetlifyBuildVersion = void 0;
|
|
25
|
+
exports.warnForRootRedirects = exports.warnForProblematicUserRewrites = exports.getProblematicUserRewrites = exports.checkZipSize = exports.checkForRootPublish = exports.checkNextSiteHasBuilt = exports.checkForOldFunctions = exports.verifyNetlifyBuildVersion = void 0;
|
|
26
26
|
/* eslint-disable max-lines */
|
|
27
27
|
const fs_1 = require("fs");
|
|
28
28
|
const path_1 = __importStar(require("path"));
|
|
@@ -36,8 +36,8 @@ const constants_1 = require("../constants");
|
|
|
36
36
|
const REQUIRED_BUILD_VERSION = '>=18.16.0';
|
|
37
37
|
const verifyNetlifyBuildVersion = ({ IS_LOCAL, NETLIFY_BUILD_VERSION, failBuild, }) => {
|
|
38
38
|
// We check for build version because that's what's available to us, but prompt about the cli because that's what they can upgrade
|
|
39
|
-
if (IS_LOCAL && !semver_1.satisfies(NETLIFY_BUILD_VERSION, REQUIRED_BUILD_VERSION, { includePrerelease: true })) {
|
|
40
|
-
return failBuild(outdent_1.outdent `
|
|
39
|
+
if (IS_LOCAL && !(0, semver_1.satisfies)(NETLIFY_BUILD_VERSION, REQUIRED_BUILD_VERSION, { includePrerelease: true })) {
|
|
40
|
+
return failBuild((0, outdent_1.outdent) `
|
|
41
41
|
This version of the Essential Next.js plugin requires netlify-cli@6.12.4 or higher. Please upgrade and try again.
|
|
42
42
|
You can do this by running: "npm install -g netlify-cli@latest" or "yarn global add netlify-cli@latest"
|
|
43
43
|
`);
|
|
@@ -48,31 +48,31 @@ const checkForOldFunctions = async ({ functions }) => {
|
|
|
48
48
|
const allOldFunctions = await functions.list();
|
|
49
49
|
const oldFunctions = allOldFunctions.filter(({ name }) => name.startsWith('next_'));
|
|
50
50
|
if (oldFunctions.length !== 0) {
|
|
51
|
-
console.log(chalk_1.yellowBright(outdent_1.outdent `
|
|
51
|
+
console.log((0, chalk_1.yellowBright)((0, outdent_1.outdent) `
|
|
52
52
|
We have found the following functions in your site that seem to be left over from the old Next.js plugin (v3). We have guessed this because the name starts with "next_".
|
|
53
53
|
|
|
54
|
-
${chalk_1.reset(oldFunctions.map(({ name }) => `- ${name}`).join('\n'))}
|
|
54
|
+
${(0, chalk_1.reset)(oldFunctions.map(({ name }) => `- ${name}`).join('\n'))}
|
|
55
55
|
|
|
56
56
|
If they were created by the old plugin, these functions are likely to cause errors so should be removed. You can do this by deleting the following directories:
|
|
57
57
|
|
|
58
|
-
${chalk_1.reset(oldFunctions.map(({ mainFile }) => `- ${path_1.default.relative(process.cwd(), path_1.default.dirname(mainFile))}`).join('\n'))}
|
|
58
|
+
${(0, chalk_1.reset)(oldFunctions.map(({ mainFile }) => `- ${path_1.default.relative(process.cwd(), path_1.default.dirname(mainFile))}`).join('\n'))}
|
|
59
59
|
`));
|
|
60
60
|
}
|
|
61
61
|
};
|
|
62
62
|
exports.checkForOldFunctions = checkForOldFunctions;
|
|
63
63
|
const checkNextSiteHasBuilt = ({ publish, failBuild, }) => {
|
|
64
|
-
if (!fs_1.existsSync(path_1.default.join(publish, 'BUILD_ID'))) {
|
|
64
|
+
if (!(0, fs_1.existsSync)(path_1.default.join(publish, 'BUILD_ID'))) {
|
|
65
65
|
const outWarning = path_1.default.basename(publish) === 'out'
|
|
66
66
|
? `Your publish directory is set to "out", but in most cases it should be ".next".`
|
|
67
67
|
: `In most cases it should be set to ".next", unless you have chosen a custom "distDir" in your Next config.`;
|
|
68
|
-
return failBuild(outdent_1.outdent `
|
|
68
|
+
return failBuild((0, outdent_1.outdent) `
|
|
69
69
|
The directory "${path_1.default.relative(process.cwd(), publish)}" does not contain a Next.js production build. Perhaps the build command was not run, or you specified the wrong publish directory.
|
|
70
70
|
${outWarning}
|
|
71
71
|
If you are using "next export" then you should set the environment variable NETLIFY_NEXT_PLUGIN_SKIP to "true".
|
|
72
72
|
`);
|
|
73
73
|
}
|
|
74
|
-
if (fs_1.existsSync(path_1.default.join(publish, 'export-detail.json'))) {
|
|
75
|
-
failBuild(outdent_1.outdent `
|
|
74
|
+
if ((0, fs_1.existsSync)(path_1.default.join(publish, 'export-detail.json'))) {
|
|
75
|
+
failBuild((0, outdent_1.outdent) `
|
|
76
76
|
Detected that "next export" was run, but site is incorrectly publishing the ".next" directory.
|
|
77
77
|
The publish directory should be set to "out", and you should set the environment variable NETLIFY_NEXT_PLUGIN_SKIP to "true".
|
|
78
78
|
`);
|
|
@@ -81,7 +81,7 @@ const checkNextSiteHasBuilt = ({ publish, failBuild, }) => {
|
|
|
81
81
|
exports.checkNextSiteHasBuilt = checkNextSiteHasBuilt;
|
|
82
82
|
const checkForRootPublish = ({ publish, failBuild, }) => {
|
|
83
83
|
if (path_1.default.resolve(publish) === path_1.default.resolve('.')) {
|
|
84
|
-
failBuild(outdent_1.outdent `
|
|
84
|
+
failBuild((0, outdent_1.outdent) `
|
|
85
85
|
Your publish directory is pointing to the base directory of your site. This is not supported for Next.js sites, and is probably a mistake.
|
|
86
86
|
In most cases it should be set to ".next", unless you have chosen a custom "distDir" in your Next config, or the Next site is in a subdirectory.
|
|
87
87
|
`);
|
|
@@ -89,7 +89,7 @@ const checkForRootPublish = ({ publish, failBuild, }) => {
|
|
|
89
89
|
};
|
|
90
90
|
exports.checkForRootPublish = checkForRootPublish;
|
|
91
91
|
const checkZipSize = async (file, maxSize = constants_1.LAMBDA_MAX_SIZE) => {
|
|
92
|
-
if (!fs_1.existsSync(file)) {
|
|
92
|
+
if (!(0, fs_1.existsSync)(file)) {
|
|
93
93
|
console.warn(`Could not check zip size because ${file} does not exist`);
|
|
94
94
|
return;
|
|
95
95
|
}
|
|
@@ -98,8 +98,8 @@ const checkZipSize = async (file, maxSize = constants_1.LAMBDA_MAX_SIZE) => {
|
|
|
98
98
|
return;
|
|
99
99
|
}
|
|
100
100
|
// We don't fail the build, because the actual hard max size is larger so it might still succeed
|
|
101
|
-
console.log(chalk_1.redBright(outdent_1.outdent `
|
|
102
|
-
The function zip ${chalk_1.yellowBright(path_1.relative(process.cwd(), file))} size is ${pretty_bytes_1.default(fileSize)}, which is larger than the maximum supported size of ${pretty_bytes_1.default(maxSize)}.
|
|
101
|
+
console.log((0, chalk_1.redBright)((0, outdent_1.outdent) `
|
|
102
|
+
The function zip ${(0, chalk_1.yellowBright)((0, path_1.relative)(process.cwd(), file))} size is ${(0, pretty_bytes_1.default)(fileSize)}, which is larger than the maximum supported size of ${(0, pretty_bytes_1.default)(maxSize)}.
|
|
103
103
|
There are a few reasons this could happen. You may have accidentally bundled a large dependency, or you might have a
|
|
104
104
|
large number of pre-rendered pages included.
|
|
105
105
|
`));
|
|
@@ -110,13 +110,13 @@ const checkZipSize = async (file, maxSize = constants_1.LAMBDA_MAX_SIZE) => {
|
|
|
110
110
|
for (let i = 0; i < 10 && i < sortedFiles.length; i++) {
|
|
111
111
|
largest[`${i + 1}`] = {
|
|
112
112
|
File: sortedFiles[i].name,
|
|
113
|
-
'Compressed Size': pretty_bytes_1.default(sortedFiles[i].compressedSize),
|
|
114
|
-
'Uncompressed Size': pretty_bytes_1.default(sortedFiles[i].size),
|
|
113
|
+
'Compressed Size': (0, pretty_bytes_1.default)(sortedFiles[i].compressedSize),
|
|
114
|
+
'Uncompressed Size': (0, pretty_bytes_1.default)(sortedFiles[i].size),
|
|
115
115
|
};
|
|
116
116
|
}
|
|
117
|
-
console.log(chalk_1.yellowBright `\n\nThese are the largest files in the zip:`);
|
|
117
|
+
console.log((0, chalk_1.yellowBright) `\n\nThese are the largest files in the zip:`);
|
|
118
118
|
console.table(largest);
|
|
119
|
-
console.log(chalk_1.greenBright `\n\nFor more information on fixing this, see ${chalk_1.blueBright `https://ntl.fyi/large-next-functions`}`);
|
|
119
|
+
console.log((0, chalk_1.greenBright) `\n\nFor more information on fixing this, see ${(0, chalk_1.blueBright) `https://ntl.fyi/large-next-functions`}`);
|
|
120
120
|
};
|
|
121
121
|
exports.checkZipSize = checkZipSize;
|
|
122
122
|
const getProblematicUserRewrites = ({ redirects, basePath, }) => {
|
|
@@ -140,17 +140,23 @@ const getProblematicUserRewrites = ({ redirects, basePath, }) => {
|
|
|
140
140
|
};
|
|
141
141
|
exports.getProblematicUserRewrites = getProblematicUserRewrites;
|
|
142
142
|
const warnForProblematicUserRewrites = ({ redirects, basePath, }) => {
|
|
143
|
-
const userRewrites = exports.getProblematicUserRewrites({ redirects, basePath });
|
|
143
|
+
const userRewrites = (0, exports.getProblematicUserRewrites)({ redirects, basePath });
|
|
144
144
|
if (userRewrites.length === 0) {
|
|
145
145
|
return;
|
|
146
146
|
}
|
|
147
|
-
console.log(chalk_1.yellowBright(outdent_1.outdent `
|
|
147
|
+
console.log((0, chalk_1.yellowBright)((0, outdent_1.outdent) `
|
|
148
148
|
You have the following Netlify rewrite${userRewrites.length === 1 ? '' : 's'} that might cause conflicts with the Next.js plugin:
|
|
149
149
|
|
|
150
|
-
${chalk_1.reset(userRewrites.map(({ from, to, status }) => `- ${from} ${to} ${status}`).join('\n'))}
|
|
150
|
+
${(0, chalk_1.reset)(userRewrites.map(({ from, to, status }) => `- ${from} ${to} ${status}`).join('\n'))}
|
|
151
151
|
|
|
152
152
|
For more information, see https://ntl.fyi/next-rewrites
|
|
153
153
|
`));
|
|
154
154
|
};
|
|
155
155
|
exports.warnForProblematicUserRewrites = warnForProblematicUserRewrites;
|
|
156
|
+
const warnForRootRedirects = ({ appDir }) => {
|
|
157
|
+
if ((0, fs_1.existsSync)((0, path_1.join)(appDir, '_redirects'))) {
|
|
158
|
+
console.log((0, chalk_1.yellowBright)(`You have a "_redirects" file in your root directory, which is not deployed and will be ignored. If you want it to be used, please move it into "public".`));
|
|
159
|
+
}
|
|
160
|
+
};
|
|
161
|
+
exports.warnForRootRedirects = warnForRootRedirects;
|
|
156
162
|
/* eslint-enable max-lines */
|
package/lib/index.js
CHANGED
|
@@ -14,55 +14,57 @@ const plugin = {
|
|
|
14
14
|
async onPreBuild({ constants, netlifyConfig, utils: { build: { failBuild }, cache, }, }) {
|
|
15
15
|
var _a;
|
|
16
16
|
const { publish } = netlifyConfig.build;
|
|
17
|
-
if (utils_1.shouldSkip()) {
|
|
18
|
-
await cache_1.restoreCache({ cache, publish });
|
|
17
|
+
if ((0, utils_1.shouldSkip)()) {
|
|
18
|
+
await (0, cache_1.restoreCache)({ cache, publish });
|
|
19
19
|
console.log('Not running Essential Next.js plugin');
|
|
20
|
-
if (fs_extra_1.existsSync(path_1.join(constants.INTERNAL_FUNCTIONS_SRC, constants_1.HANDLER_FUNCTION_NAME))) {
|
|
20
|
+
if ((0, fs_extra_1.existsSync)((0, path_1.join)(constants.INTERNAL_FUNCTIONS_SRC, constants_1.HANDLER_FUNCTION_NAME))) {
|
|
21
21
|
console.log(`Please ensure you remove any generated functions from ${constants.INTERNAL_FUNCTIONS_SRC}`);
|
|
22
22
|
}
|
|
23
23
|
return;
|
|
24
24
|
}
|
|
25
|
-
verification_1.checkForRootPublish({ publish, failBuild });
|
|
26
|
-
verification_1.verifyNetlifyBuildVersion({ failBuild, ...constants });
|
|
27
|
-
await cache_1.restoreCache({ cache, publish });
|
|
25
|
+
(0, verification_1.checkForRootPublish)({ publish, failBuild });
|
|
26
|
+
(0, verification_1.verifyNetlifyBuildVersion)({ failBuild, ...constants });
|
|
27
|
+
await (0, cache_1.restoreCache)({ cache, publish });
|
|
28
28
|
(_a = netlifyConfig.build).environment || (_a.environment = {});
|
|
29
29
|
// eslint-disable-next-line unicorn/consistent-destructuring
|
|
30
30
|
netlifyConfig.build.environment.NEXT_PRIVATE_TARGET = 'server';
|
|
31
31
|
},
|
|
32
32
|
async onBuild({ constants, netlifyConfig, utils: { build: { failBuild }, }, }) {
|
|
33
|
-
if (utils_1.shouldSkip()) {
|
|
33
|
+
if ((0, utils_1.shouldSkip)()) {
|
|
34
34
|
return;
|
|
35
35
|
}
|
|
36
36
|
const { publish } = netlifyConfig.build;
|
|
37
|
-
verification_1.checkNextSiteHasBuilt({ publish, failBuild });
|
|
38
|
-
const { appDir, basePath, i18n, images, target, ignore, trailingSlash, outdir } = await config_1.getNextConfig({
|
|
37
|
+
(0, verification_1.checkNextSiteHasBuilt)({ publish, failBuild });
|
|
38
|
+
const { appDir, basePath, i18n, images, target, ignore, trailingSlash, outdir } = await (0, config_1.getNextConfig)({
|
|
39
39
|
publish,
|
|
40
40
|
failBuild,
|
|
41
41
|
});
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
await functions_1.
|
|
45
|
-
await
|
|
46
|
-
await files_1.
|
|
42
|
+
const buildId = (0, fs_extra_1.readFileSync)((0, path_1.join)(publish, 'BUILD_ID'), 'utf8').trim();
|
|
43
|
+
(0, config_1.configureHandlerFunctions)({ netlifyConfig, ignore, publish: (0, path_1.relative)(process.cwd(), publish) });
|
|
44
|
+
await (0, functions_1.generateFunctions)(constants, appDir);
|
|
45
|
+
await (0, functions_1.generatePagesResolver)({ target, constants });
|
|
46
|
+
await (0, files_1.movePublicFiles)({ appDir, outdir, publish });
|
|
47
|
+
await (0, files_1.patchNextFiles)(basePath);
|
|
47
48
|
if (process.env.EXPERIMENTAL_MOVE_STATIC_PAGES) {
|
|
48
49
|
console.log("The flag 'EXPERIMENTAL_MOVE_STATIC_PAGES' is no longer required, as it is now the default. To disable this behavior, set the env var 'SERVE_STATIC_FILES_FROM_ORIGIN' to 'true'");
|
|
49
50
|
}
|
|
50
51
|
if (!process.env.SERVE_STATIC_FILES_FROM_ORIGIN) {
|
|
51
|
-
await files_1.moveStaticPages({ target, netlifyConfig, i18n });
|
|
52
|
+
await (0, files_1.moveStaticPages)({ target, netlifyConfig, i18n });
|
|
52
53
|
}
|
|
53
|
-
await redirects_1.generateStaticRedirects({
|
|
54
|
+
await (0, redirects_1.generateStaticRedirects)({
|
|
54
55
|
netlifyConfig,
|
|
55
56
|
nextConfig: { basePath, i18n },
|
|
56
57
|
});
|
|
57
|
-
await functions_1.setupImageFunction({ constants, imageconfig: images, netlifyConfig, basePath });
|
|
58
|
-
await redirects_1.generateRedirects({
|
|
58
|
+
await (0, functions_1.setupImageFunction)({ constants, imageconfig: images, netlifyConfig, basePath });
|
|
59
|
+
await (0, redirects_1.generateRedirects)({
|
|
59
60
|
netlifyConfig,
|
|
60
61
|
nextConfig: { basePath, i18n, trailingSlash, appDir },
|
|
62
|
+
buildId,
|
|
61
63
|
});
|
|
62
64
|
},
|
|
63
65
|
async onPostBuild({ netlifyConfig: { build: { publish }, redirects, }, utils: { status, cache, functions, build: { failBuild }, }, constants: { FUNCTIONS_DIST }, }) {
|
|
64
|
-
await cache_1.saveCache({ cache, publish });
|
|
65
|
-
if (utils_1.shouldSkip()) {
|
|
66
|
+
await (0, cache_1.saveCache)({ cache, publish });
|
|
67
|
+
if ((0, utils_1.shouldSkip)()) {
|
|
66
68
|
status.show({
|
|
67
69
|
title: 'Essential Next.js plugin did not run',
|
|
68
70
|
summary: `Next cache was stored, but all other functions were skipped because ${process.env.NETLIFY_NEXT_PLUGIN_SKIP
|
|
@@ -71,11 +73,12 @@ const plugin = {
|
|
|
71
73
|
});
|
|
72
74
|
return;
|
|
73
75
|
}
|
|
74
|
-
await verification_1.checkForOldFunctions({ functions });
|
|
75
|
-
await verification_1.checkZipSize(path_1.join(FUNCTIONS_DIST, `${constants_1.ODB_FUNCTION_NAME}.zip`));
|
|
76
|
-
const { basePath } = await config_1.getNextConfig({ publish, failBuild });
|
|
77
|
-
verification_1.warnForProblematicUserRewrites({ basePath, redirects });
|
|
78
|
-
|
|
76
|
+
await (0, verification_1.checkForOldFunctions)({ functions });
|
|
77
|
+
await (0, verification_1.checkZipSize)((0, path_1.join)(FUNCTIONS_DIST, `${constants_1.ODB_FUNCTION_NAME}.zip`));
|
|
78
|
+
const { basePath, appDir } = await (0, config_1.getNextConfig)({ publish, failBuild });
|
|
79
|
+
(0, verification_1.warnForProblematicUserRewrites)({ basePath, redirects });
|
|
80
|
+
(0, verification_1.warnForRootRedirects)({ appDir });
|
|
81
|
+
await (0, files_1.unpatchNextFiles)(basePath);
|
|
79
82
|
},
|
|
80
83
|
};
|
|
81
84
|
module.exports = plugin;
|
|
@@ -1,17 +1,18 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.getHandler = void 0;
|
|
4
|
+
const outdent_1 = require("outdent");
|
|
5
|
+
/* eslint-disable @typescript-eslint/no-var-requires */
|
|
4
6
|
const { promises } = require('fs');
|
|
5
7
|
const { Server } = require('http');
|
|
6
8
|
const path = require('path');
|
|
7
9
|
// eslint-disable-next-line node/prefer-global/url, node/prefer-global/url-search-params
|
|
8
10
|
const { URLSearchParams, URL } = require('url');
|
|
9
|
-
const { Bridge } = require('@vercel/node/
|
|
11
|
+
const { Bridge } = require('@vercel/node-bridge/bridge');
|
|
10
12
|
const { augmentFsModule, getMaxAge, getMultiValueHeaders, getNextServer } = require('./handlerUtils');
|
|
11
|
-
const makeHandler = () =>
|
|
12
13
|
// We return a function and then call `toString()` on it to serialise it as the launcher function
|
|
13
14
|
// eslint-disable-next-line max-params
|
|
14
|
-
(conf, app, pageRoot, staticManifest = [], mode = 'ssr') => {
|
|
15
|
+
const makeHandler = (conf, app, pageRoot, staticManifest = [], mode = 'ssr') => {
|
|
15
16
|
var _a;
|
|
16
17
|
// This is just so nft knows about the page entrypoints. It's not actually used
|
|
17
18
|
try {
|
|
@@ -23,30 +24,47 @@ const makeHandler = () =>
|
|
|
23
24
|
(_a = process.env).NODE_ENV || (_a.NODE_ENV = 'production');
|
|
24
25
|
// We don't want to write ISR files to disk in the lambda environment
|
|
25
26
|
conf.experimental.isrFlushToDisk = false;
|
|
27
|
+
// This is our flag that we use when patching the source
|
|
26
28
|
// eslint-disable-next-line no-underscore-dangle
|
|
27
29
|
process.env._BYPASS_SSG = 'true';
|
|
28
30
|
// Set during the request as it needs the host header. Hoisted so we can define the function once
|
|
29
31
|
let base;
|
|
30
32
|
augmentFsModule({ promises, staticManifest, pageRoot, getBase: () => base });
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
const requestHandler = nextServer.getRequestHandler();
|
|
38
|
-
const server = new Server(async (req, res) => {
|
|
39
|
-
try {
|
|
40
|
-
await requestHandler(req, res);
|
|
33
|
+
// We memoize this because it can be shared between requests, but don't instantiate it until
|
|
34
|
+
// the first request because we need the host and port.
|
|
35
|
+
let bridge;
|
|
36
|
+
const getBridge = (event) => {
|
|
37
|
+
if (bridge) {
|
|
38
|
+
return bridge;
|
|
41
39
|
}
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
40
|
+
const url = new URL(event.rawUrl);
|
|
41
|
+
const port = Number.parseInt(url.port) || 80;
|
|
42
|
+
const { host } = event.headers;
|
|
43
|
+
const protocol = event.headers['x-forwarded-proto'] || 'http';
|
|
44
|
+
base = `${protocol}://${host}`;
|
|
45
|
+
const NextServer = getNextServer();
|
|
46
|
+
const nextServer = new NextServer({
|
|
47
|
+
conf,
|
|
48
|
+
dir: path.resolve(__dirname, app),
|
|
49
|
+
customServer: false,
|
|
50
|
+
hostname: url.hostname,
|
|
51
|
+
port,
|
|
52
|
+
});
|
|
53
|
+
const requestHandler = nextServer.getRequestHandler();
|
|
54
|
+
const server = new Server(async (req, res) => {
|
|
55
|
+
try {
|
|
56
|
+
await requestHandler(req, res);
|
|
57
|
+
}
|
|
58
|
+
catch (error) {
|
|
59
|
+
console.error(error);
|
|
60
|
+
throw new Error('Error handling request. See function logs for details.');
|
|
61
|
+
}
|
|
62
|
+
});
|
|
63
|
+
bridge = new Bridge(server);
|
|
64
|
+
bridge.listen();
|
|
65
|
+
return bridge;
|
|
66
|
+
};
|
|
67
|
+
return async function handler(event, context) {
|
|
50
68
|
var _a, _b, _c;
|
|
51
69
|
let requestMode = mode;
|
|
52
70
|
// Ensure that paths are encoded - but don't double-encode them
|
|
@@ -54,13 +72,7 @@ const makeHandler = () =>
|
|
|
54
72
|
// Next expects to be able to parse the query from the URL
|
|
55
73
|
const query = new URLSearchParams(event.queryStringParameters).toString();
|
|
56
74
|
event.path = query ? `${event.path}?${query}` : event.path;
|
|
57
|
-
|
|
58
|
-
if (staticManifest.length !== 0) {
|
|
59
|
-
const { host } = event.headers;
|
|
60
|
-
const protocol = event.headers['x-forwarded-proto'] || 'http';
|
|
61
|
-
base = `${protocol}://${host}`;
|
|
62
|
-
}
|
|
63
|
-
const { headers, ...result } = await bridge.launcher(event, context);
|
|
75
|
+
const { headers, ...result } = await getBridge(event).launcher(event, context);
|
|
64
76
|
// Convert all headers to multiValueHeaders
|
|
65
77
|
const multiValueHeaders = getMultiValueHeaders(headers);
|
|
66
78
|
if ((_b = (_a = multiValueHeaders['set-cookie']) === null || _a === void 0 ? void 0 : _a[0]) === null || _b === void 0 ? void 0 : _b.includes('__prerender_bypass')) {
|
|
@@ -71,17 +83,17 @@ const makeHandler = () =>
|
|
|
71
83
|
const cacheHeader = (_c = multiValueHeaders['cache-control']) === null || _c === void 0 ? void 0 : _c[0];
|
|
72
84
|
if (cacheHeader === null || cacheHeader === void 0 ? void 0 : cacheHeader.includes('stale-while-revalidate')) {
|
|
73
85
|
if (requestMode === 'odb') {
|
|
74
|
-
requestMode = 'isr';
|
|
75
86
|
const ttl = getMaxAge(cacheHeader);
|
|
76
|
-
// Long-expiry TTL is basically no TTL
|
|
87
|
+
// Long-expiry TTL is basically no TTL, so we'll skip it
|
|
77
88
|
if (ttl > 0 && ttl < ONE_YEAR_IN_SECONDS) {
|
|
78
89
|
result.ttl = ttl;
|
|
90
|
+
requestMode = 'isr';
|
|
79
91
|
}
|
|
80
|
-
multiValueHeaders['x-rendered-at'] = [new Date().toISOString()];
|
|
81
92
|
}
|
|
82
93
|
multiValueHeaders['cache-control'] = ['public, max-age=0, must-revalidate'];
|
|
83
94
|
}
|
|
84
95
|
multiValueHeaders['x-render-mode'] = [requestMode];
|
|
96
|
+
console.log(`[${event.httpMethod}] ${event.path} (${requestMode === null || requestMode === void 0 ? void 0 : requestMode.toUpperCase()})`);
|
|
85
97
|
return {
|
|
86
98
|
...result,
|
|
87
99
|
multiValueHeaders,
|
|
@@ -89,24 +101,25 @@ const makeHandler = () =>
|
|
|
89
101
|
};
|
|
90
102
|
};
|
|
91
103
|
};
|
|
92
|
-
const getHandler = ({ isODB = false, publishDir = '../../../.next', appDir = '../../..' }) =>
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
const {
|
|
97
|
-
|
|
104
|
+
const getHandler = ({ isODB = false, publishDir = '../../../.next', appDir = '../../..' }) =>
|
|
105
|
+
// This is a string, but if you have the right editor plugin it should format as js
|
|
106
|
+
(0, outdent_1.outdent) `
|
|
107
|
+
const { Server } = require("http");
|
|
108
|
+
const { promises } = require("fs");
|
|
109
|
+
// We copy the file here rather than requiring from the node module
|
|
110
|
+
const { Bridge } = require("./bridge");
|
|
111
|
+
const { augmentFsModule, getMaxAge, getMultiValueHeaders, getNextServer } = require('./handlerUtils')
|
|
98
112
|
|
|
99
|
-
const { builder } = require("@netlify/functions");
|
|
100
|
-
const { config } = require("${publishDir}/required-server-files.json")
|
|
101
|
-
let staticManifest
|
|
102
|
-
try {
|
|
103
|
-
|
|
104
|
-
} catch {}
|
|
105
|
-
const path = require("path");
|
|
106
|
-
const pageRoot = path.resolve(path.join(__dirname, "${publishDir}", config.target === "server" ? "server" : "serverless", "pages"));
|
|
107
|
-
exports.handler = ${isODB
|
|
108
|
-
? `builder((${makeHandler
|
|
109
|
-
: `(${makeHandler
|
|
113
|
+
const { builder } = require("@netlify/functions");
|
|
114
|
+
const { config } = require("${publishDir}/required-server-files.json")
|
|
115
|
+
let staticManifest
|
|
116
|
+
try {
|
|
117
|
+
staticManifest = require("${publishDir}/static-manifest.json")
|
|
118
|
+
} catch {}
|
|
119
|
+
const path = require("path");
|
|
120
|
+
const pageRoot = path.resolve(path.join(__dirname, "${publishDir}", config.target === "server" ? "server" : "serverless", "pages"));
|
|
121
|
+
exports.handler = ${isODB
|
|
122
|
+
? `builder((${makeHandler.toString()})(config, "${appDir}", pageRoot, staticManifest, 'odb'));`
|
|
123
|
+
: `(${makeHandler.toString()})(config, "${appDir}", pageRoot, staticManifest, 'ssr');`}
|
|
110
124
|
`;
|
|
111
125
|
exports.getHandler = getHandler;
|
|
112
|
-
/* eslint-enable @typescript-eslint/no-var-requires */
|
|
@@ -13,15 +13,15 @@ const constants_1 = require("../constants");
|
|
|
13
13
|
// build. This is used by the nft bundler to find all the pages.
|
|
14
14
|
const getPageResolver = async ({ publish, target }) => {
|
|
15
15
|
const functionDir = path_1.posix.resolve(path_1.posix.join('.netlify', 'functions', constants_1.HANDLER_FUNCTION_NAME));
|
|
16
|
-
const root = path_1.posix.resolve(slash_1.default(publish), target === 'server' ? 'server' : 'serverless', 'pages');
|
|
17
|
-
const pages = await tiny_glob_1.default('**/*.js', {
|
|
16
|
+
const root = path_1.posix.resolve((0, slash_1.default)(publish), target === 'server' ? 'server' : 'serverless', 'pages');
|
|
17
|
+
const pages = await (0, tiny_glob_1.default)('**/*.js', {
|
|
18
18
|
cwd: root,
|
|
19
19
|
dot: true,
|
|
20
20
|
});
|
|
21
21
|
const pageFiles = pages
|
|
22
|
-
.map((page) => `require.resolve('${path_1.posix.relative(functionDir, path_1.posix.join(root, slash_1.default(page)))}')`)
|
|
22
|
+
.map((page) => `require.resolve('${path_1.posix.relative(functionDir, path_1.posix.join(root, (0, slash_1.default)(page)))}')`)
|
|
23
23
|
.sort();
|
|
24
|
-
return outdent_1.outdent `
|
|
24
|
+
return (0, outdent_1.outdent) `
|
|
25
25
|
// This file is purely to allow nft to know about these pages. It should be temporary.
|
|
26
26
|
exports.resolvePages = () => {
|
|
27
27
|
try {
|
|
@@ -11,7 +11,11 @@ const os_1 = require("os");
|
|
|
11
11
|
const path_1 = __importDefault(require("path"));
|
|
12
12
|
const stream_1 = require("stream");
|
|
13
13
|
const util_1 = require("util");
|
|
14
|
-
const streamPipeline = util_1.promisify(stream_1.pipeline);
|
|
14
|
+
const streamPipeline = (0, util_1.promisify)(stream_1.pipeline);
|
|
15
|
+
/**
|
|
16
|
+
* Downloads a file from the CDN to the local aliased filesystem. This is a fallback, because in most cases we'd expect
|
|
17
|
+
* files required at runtime to not be sent to the CDN.
|
|
18
|
+
*/
|
|
15
19
|
const downloadFile = async (url, destination) => {
|
|
16
20
|
console.log(`Downloading ${url} to ${destination}`);
|
|
17
21
|
const httpx = url.startsWith('https') ? https_1.default : http_1.default;
|
|
@@ -21,7 +25,7 @@ const downloadFile = async (url, destination) => {
|
|
|
21
25
|
reject(new Error(`Failed to download ${url}: ${response.statusCode} ${response.statusMessage || ''}`));
|
|
22
26
|
return;
|
|
23
27
|
}
|
|
24
|
-
const fileStream = fs_1.createWriteStream(destination);
|
|
28
|
+
const fileStream = (0, fs_1.createWriteStream)(destination);
|
|
25
29
|
streamPipeline(response, fileStream)
|
|
26
30
|
.then(resolve)
|
|
27
31
|
.catch((error) => {
|
|
@@ -36,6 +40,9 @@ const downloadFile = async (url, destination) => {
|
|
|
36
40
|
});
|
|
37
41
|
};
|
|
38
42
|
exports.downloadFile = downloadFile;
|
|
43
|
+
/**
|
|
44
|
+
* Parse maxage from a cache-control header
|
|
45
|
+
*/
|
|
39
46
|
const getMaxAge = (header) => {
|
|
40
47
|
const parts = header.split(',');
|
|
41
48
|
let maxAge;
|
|
@@ -66,6 +73,9 @@ const getMultiValueHeaders = (headers) => {
|
|
|
66
73
|
return multiValueHeaders;
|
|
67
74
|
};
|
|
68
75
|
exports.getMultiValueHeaders = getMultiValueHeaders;
|
|
76
|
+
/**
|
|
77
|
+
* Monkey-patch the fs module to download missing files from the CDN
|
|
78
|
+
*/
|
|
69
79
|
const augmentFsModule = ({ promises, staticManifest, pageRoot, getBase, }) => {
|
|
70
80
|
// Only do this if we have some static files moved to the CDN
|
|
71
81
|
if (staticManifest.length === 0) {
|
|
@@ -78,7 +88,7 @@ const augmentFsModule = ({ promises, staticManifest, pageRoot, getBase, }) => {
|
|
|
78
88
|
const staticFiles = new Map(staticManifest);
|
|
79
89
|
const downloadPromises = new Map();
|
|
80
90
|
// Yes, you can cache stuff locally in a Lambda
|
|
81
|
-
const cacheDir = path_1.default.join(os_1.tmpdir(), 'next-static-cache');
|
|
91
|
+
const cacheDir = path_1.default.join((0, os_1.tmpdir)(), 'next-static-cache');
|
|
82
92
|
// Grab the real fs.promises.readFile...
|
|
83
93
|
const readfileOrig = promises.readFile;
|
|
84
94
|
const statsOrig = promises.stat;
|
|
@@ -90,7 +100,7 @@ const augmentFsModule = ({ promises, staticManifest, pageRoot, getBase, }) => {
|
|
|
90
100
|
// We only want the part after `pages/`
|
|
91
101
|
const filePath = file.slice(pageRoot.length + 1);
|
|
92
102
|
// Is it in the CDN and not local?
|
|
93
|
-
if (staticFiles.has(filePath) && !fs_1.existsSync(file)) {
|
|
103
|
+
if (staticFiles.has(filePath) && !(0, fs_1.existsSync)(file)) {
|
|
94
104
|
// This name is safe to use, because it's one that was already created by Next
|
|
95
105
|
const cacheFile = path_1.default.join(cacheDir, filePath);
|
|
96
106
|
const url = `${base}/${staticFiles.get(filePath)}`;
|
|
@@ -99,11 +109,11 @@ const augmentFsModule = ({ promises, staticManifest, pageRoot, getBase, }) => {
|
|
|
99
109
|
await downloadPromises.get(url);
|
|
100
110
|
}
|
|
101
111
|
// Have we already cached it? We download every time if running locally to avoid staleness
|
|
102
|
-
if ((!fs_1.existsSync(cacheFile) || process.env.NETLIFY_DEV) && base) {
|
|
112
|
+
if ((!(0, fs_1.existsSync)(cacheFile) || process.env.NETLIFY_DEV) && base) {
|
|
103
113
|
await promises.mkdir(path_1.default.dirname(cacheFile), { recursive: true });
|
|
104
114
|
try {
|
|
105
115
|
// Append the path to our host and we can load it like a regular page
|
|
106
|
-
const downloadPromise = exports.downloadFile(url, cacheFile);
|
|
116
|
+
const downloadPromise = (0, exports.downloadFile)(url, cacheFile);
|
|
107
117
|
downloadPromises.set(url, downloadPromise);
|
|
108
118
|
await downloadPromise;
|
|
109
119
|
}
|
|
@@ -122,7 +132,7 @@ const augmentFsModule = ({ promises, staticManifest, pageRoot, getBase, }) => {
|
|
|
122
132
|
if (file.startsWith(pageRoot)) {
|
|
123
133
|
// We only want the part after `pages/`
|
|
124
134
|
const cacheFile = path_1.default.join(cacheDir, file.slice(pageRoot.length + 1));
|
|
125
|
-
if (fs_1.existsSync(cacheFile)) {
|
|
135
|
+
if ((0, fs_1.existsSync)(cacheFile)) {
|
|
126
136
|
return statsOrig(cacheFile, options);
|
|
127
137
|
}
|
|
128
138
|
}
|
|
@@ -130,6 +140,9 @@ const augmentFsModule = ({ promises, staticManifest, pageRoot, getBase, }) => {
|
|
|
130
140
|
});
|
|
131
141
|
};
|
|
132
142
|
exports.augmentFsModule = augmentFsModule;
|
|
143
|
+
/**
|
|
144
|
+
* Next.js has an annoying habit of needing deep imports, but then moving those in patch releases. This is our abstraction.
|
|
145
|
+
*/
|
|
133
146
|
const getNextServer = () => {
|
|
134
147
|
let NextServer;
|
|
135
148
|
try {
|
|
@@ -142,7 +155,7 @@ const getNextServer = () => {
|
|
|
142
155
|
// A different error, so rethrow it
|
|
143
156
|
throw error;
|
|
144
157
|
}
|
|
145
|
-
// Probably an old version of next
|
|
158
|
+
// Probably an old version of next, so fall through and find it elsewhere.
|
|
146
159
|
}
|
|
147
160
|
if (!NextServer) {
|
|
148
161
|
try {
|
package/lib/templates/ipx.js
CHANGED
|
@@ -5,7 +5,7 @@ exports.handler = void 0;
|
|
|
5
5
|
const ipx_1 = require("@netlify/ipx");
|
|
6
6
|
// @ts-ignore Injected at build time
|
|
7
7
|
const imageconfig_json_1 = require("./imageconfig.json");
|
|
8
|
-
exports.handler = ipx_1.createIPXHandler({
|
|
8
|
+
exports.handler = (0, ipx_1.createIPXHandler)({
|
|
9
9
|
basePath: imageconfig_json_1.basePath,
|
|
10
10
|
domains: imageconfig_json_1.domains,
|
|
11
11
|
});
|