@opennextjs/cloudflare 1.11.1 → 1.13.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +0 -27
- package/dist/api/cloudflare-context.d.ts +0 -2
- package/dist/cli/build/bundle-server.js +6 -6
- package/dist/cli/build/open-next/createServerBundle.js +2 -0
- package/dist/cli/build/patches/ast/patch-vercel-og-library.js +9 -2
- package/dist/cli/build/patches/ast/vercel-og.d.ts +2 -2
- package/dist/cli/build/patches/plugins/turbopack.d.ts +2 -0
- package/dist/cli/build/patches/plugins/turbopack.js +78 -0
- package/dist/cli/commands/populate-cache.d.ts +1 -1
- package/dist/cli/commands/populate-cache.js +34 -152
- package/dist/cli/commands/utils.d.ts +1 -1
- package/package.json +7 -8
package/README.md
CHANGED
|
@@ -55,30 +55,3 @@ Deploy your application to production with the following:
|
|
|
55
55
|
# or
|
|
56
56
|
bun opennextjs-cloudflare build && bun opennextjs-cloudflare deploy
|
|
57
57
|
```
|
|
58
|
-
|
|
59
|
-
### Batch Cache Population (Optional, Recommended)
|
|
60
|
-
|
|
61
|
-
For improved performance with large caches, you can enable batch upload by providing R2 credentials via .env or environment variables.
|
|
62
|
-
|
|
63
|
-
Create a `.env` file in your project root (automatically loaded by the CLI):
|
|
64
|
-
|
|
65
|
-
```bash
|
|
66
|
-
R2_ACCESS_KEY_ID=your_access_key_id
|
|
67
|
-
R2_SECRET_ACCESS_KEY=your_secret_access_key
|
|
68
|
-
CF_ACCOUNT_ID=your_account_id
|
|
69
|
-
```
|
|
70
|
-
|
|
71
|
-
You can also set the environment variables for CI builds.
|
|
72
|
-
|
|
73
|
-
**Note:**
|
|
74
|
-
|
|
75
|
-
You can follow documentation https://developers.cloudflare.com/r2/api/tokens/ for creating API tokens with appropriate permissions for R2 access.
|
|
76
|
-
|
|
77
|
-
**Benefits:**
|
|
78
|
-
|
|
79
|
-
- Significantly faster uploads for large caches using parallel transfers
|
|
80
|
-
- Reduced API calls to Cloudflare
|
|
81
|
-
- Automatically enabled when credentials are provided
|
|
82
|
-
|
|
83
|
-
**Fallback:**
|
|
84
|
-
If these environment variables are not set, the CLI will use standard Wrangler uploads. Both methods work correctly - batch upload is simply faster for large caches.
|
|
@@ -31,8 +31,6 @@ declare global {
|
|
|
31
31
|
CF_PREVIEW_DOMAIN?: string;
|
|
32
32
|
CF_WORKERS_SCRIPTS_API_TOKEN?: string;
|
|
33
33
|
CF_ACCOUNT_ID?: string;
|
|
34
|
-
R2_ACCESS_KEY_ID?: string;
|
|
35
|
-
R2_SECRET_ACCESS_KEY?: string;
|
|
36
34
|
}
|
|
37
35
|
}
|
|
38
36
|
export type CloudflareContext<CfProperties extends Record<string, unknown> = IncomingRequestCfProperties, Context = ExecutionContext> = {
|
|
@@ -43,9 +43,10 @@ const optionalDependencies = [
|
|
|
43
43
|
export async function bundleServer(buildOpts, projectOpts) {
|
|
44
44
|
copyPackageCliFiles(packageDistDir, buildOpts);
|
|
45
45
|
const { appPath, outputDir, monorepoRoot, debug } = buildOpts;
|
|
46
|
-
const
|
|
47
|
-
const serverFiles = path.join(
|
|
46
|
+
const dotNextPath = path.join(outputDir, "server-functions/default", getPackagePath(buildOpts), ".next");
|
|
47
|
+
const serverFiles = path.join(dotNextPath, "required-server-files.json");
|
|
48
48
|
const nextConfig = JSON.parse(fs.readFileSync(serverFiles, "utf-8")).config;
|
|
49
|
+
const useTurbopack = fs.existsSync(path.join(dotNextPath, "server/chunks/[turbopack]_runtime.js"));
|
|
49
50
|
console.log(`\x1b[35m⚙️ Bundling the OpenNext server...\n\x1b[0m`);
|
|
50
51
|
await patchWebpackRuntime(buildOpts);
|
|
51
52
|
patchVercelOgLibrary(buildOpts);
|
|
@@ -118,13 +119,12 @@ export async function bundleServer(buildOpts, projectOpts) {
|
|
|
118
119
|
// Note: we need the __non_webpack_require__ variable declared as it is used by next-server:
|
|
119
120
|
// https://github.com/vercel/next.js/blob/be0c3283/packages/next/src/server/next-server.ts#L116-L119
|
|
120
121
|
__non_webpack_require__: "require",
|
|
122
|
+
// The 2 following defines are used to reduce the bundle size by removing unnecessary code
|
|
123
|
+
// Next uses different precompiled renderers (i.e. `app-page.runtime.prod.js`) based on if you use `TURBOPACK` or some experimental React features
|
|
124
|
+
...(useTurbopack ? {} : { "process.env.TURBOPACK": "false" }),
|
|
121
125
|
// We make sure that environment variables that Next.js expects are properly defined
|
|
122
126
|
"process.env.NEXT_RUNTIME": '"nodejs"',
|
|
123
127
|
"process.env.NODE_ENV": '"production"',
|
|
124
|
-
// The 2 following defines are used to reduce the bundle size by removing unnecessary code
|
|
125
|
-
// Next uses different precompiled renderers (i.e. `app-page.runtime.prod.js`) based on if you use `TURBOPACK` or some experimental React features
|
|
126
|
-
// Turbopack is not supported for build at the moment, so we disable it
|
|
127
|
-
"process.env.TURBOPACK": "false",
|
|
128
128
|
// This define should be safe to use for Next 14.2+, earlier versions (13.5 and less) will cause trouble
|
|
129
129
|
"process.env.__NEXT_EXPERIMENTAL_REACT": `${needsExperimentalReact(nextConfig)}`,
|
|
130
130
|
// Fix `res.validate` in Next 15.4 (together with the `route-module` patch)
|
|
@@ -20,6 +20,7 @@ import { openNextResolvePlugin } from "@opennextjs/aws/plugins/resolve.js";
|
|
|
20
20
|
import { getCrossPlatformPathRegex } from "@opennextjs/aws/utils/regex.js";
|
|
21
21
|
import { getOpenNextConfig } from "../../../api/config.js";
|
|
22
22
|
import { patchResRevalidate } from "../patches/plugins/res-revalidate.js";
|
|
23
|
+
import { patchTurbopackRuntime } from "../patches/plugins/turbopack.js";
|
|
23
24
|
import { patchUseCacheIO } from "../patches/plugins/use-cache.js";
|
|
24
25
|
import { normalizePath } from "../utils/index.js";
|
|
25
26
|
import { copyWorkerdPackages } from "../utils/workerd.js";
|
|
@@ -142,6 +143,7 @@ async function generateBundle(name, options, fnOptions, codeCustomization) {
|
|
|
142
143
|
// Cloudflare specific patches
|
|
143
144
|
patchResRevalidate,
|
|
144
145
|
patchUseCacheIO,
|
|
146
|
+
patchTurbopackRuntime,
|
|
145
147
|
...additionalCodePatches,
|
|
146
148
|
]);
|
|
147
149
|
// Build Lambda code
|
|
@@ -16,6 +16,7 @@ export function patchVercelOgLibrary(buildOpts) {
|
|
|
16
16
|
for (const traceInfoPath of globSync(path.join(appBuildOutputPath, ".next/server/**/*.nft.json"), {
|
|
17
17
|
windowsPathsNoEscape: true,
|
|
18
18
|
})) {
|
|
19
|
+
let edgeFilePatched = false;
|
|
19
20
|
const traceInfo = JSON.parse(readFileSync(traceInfoPath, { encoding: "utf8" }));
|
|
20
21
|
const tracedNodePath = traceInfo.files.find((p) => p.endsWith("@vercel/og/index.node.js"));
|
|
21
22
|
if (!tracedNodePath)
|
|
@@ -26,14 +27,20 @@ export function patchVercelOgLibrary(buildOpts) {
|
|
|
26
27
|
if (!existsSync(outputEdgePath)) {
|
|
27
28
|
const tracedEdgePath = path.join(path.dirname(traceInfoPath), tracedNodePath.replace("index.node.js", "index.edge.js"));
|
|
28
29
|
copyFileSync(tracedEdgePath, outputEdgePath);
|
|
30
|
+
}
|
|
31
|
+
if (!edgeFilePatched) {
|
|
32
|
+
edgeFilePatched = true;
|
|
29
33
|
// Change font fetches in the library to use imports.
|
|
30
34
|
const node = parseFile(outputEdgePath);
|
|
31
35
|
const { edits, matches } = patchVercelOgFallbackFont(node);
|
|
32
36
|
writeFileSync(outputEdgePath, node.commitEdits(edits));
|
|
33
|
-
|
|
34
|
-
|
|
37
|
+
if (matches.length > 0) {
|
|
38
|
+
const fontFileName = matches[0].getMatch("PATH").text();
|
|
39
|
+
renameSync(path.join(outputDir, fontFileName), path.join(outputDir, `${fontFileName}.bin`));
|
|
40
|
+
}
|
|
35
41
|
}
|
|
36
42
|
// Change node imports for the library to edge imports.
|
|
43
|
+
// This is only useful when turbopack is not used to bundle the function.
|
|
37
44
|
const routeFilePath = traceInfoPath.replace(appBuildOutputPath, packagePath).replace(".nft.json", "");
|
|
38
45
|
const node = parseFile(routeFilePath);
|
|
39
46
|
const { edits } = patchVercelOgImport(node);
|
|
@@ -7,7 +7,7 @@ export declare const vercelOgImportRule = "\nrule:\n pattern: $NODE\n kind: st
|
|
|
7
7
|
* @returns Results of applying the rule.
|
|
8
8
|
*/
|
|
9
9
|
export declare function patchVercelOgImport(root: SgNode): {
|
|
10
|
-
edits: import("@
|
|
10
|
+
edits: import("@ast-grep/napi/types/sgnode").Edit[];
|
|
11
11
|
matches: SgNode[];
|
|
12
12
|
};
|
|
13
13
|
export declare const vercelOgFallbackFontRule = "\nrule:\n kind: variable_declaration\n all:\n - has:\n kind: variable_declarator\n has:\n kind: identifier\n regex: ^fallbackFont$\n - has:\n kind: call_expression\n pattern: fetch(new URL(\"$PATH\", $$$REST))\n stopBy: end\n\nfix: |-\n async function getFallbackFont() {\n // .bin is used so that a loader does not need to be configured for .ttf files\n return (await import(\"$PATH.bin\")).default;\n }\n\n var fallbackFont = getFallbackFont();\n";
|
|
@@ -18,6 +18,6 @@ export declare const vercelOgFallbackFontRule = "\nrule:\n kind: variable_decla
|
|
|
18
18
|
* @returns Results of applying the rule.
|
|
19
19
|
*/
|
|
20
20
|
export declare function patchVercelOgFallbackFont(root: SgNode): {
|
|
21
|
-
edits: import("@
|
|
21
|
+
edits: import("@ast-grep/napi/types/sgnode").Edit[];
|
|
22
22
|
matches: SgNode[];
|
|
23
23
|
};
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
import { patchCode } from "@opennextjs/aws/build/patch/astCodePatcher.js";
|
|
2
|
+
import { getCrossPlatformPathRegex } from "@opennextjs/aws/utils/regex.js";
|
|
3
|
+
const inlineChunksRule = `
|
|
4
|
+
rule:
|
|
5
|
+
kind: call_expression
|
|
6
|
+
pattern: require(resolved)
|
|
7
|
+
fix:
|
|
8
|
+
requireChunk(chunkPath)
|
|
9
|
+
`;
|
|
10
|
+
export const patchTurbopackRuntime = {
|
|
11
|
+
name: "inline-turbopack-chunks",
|
|
12
|
+
patches: [
|
|
13
|
+
{
|
|
14
|
+
versions: ">=15.0.0",
|
|
15
|
+
pathFilter: getCrossPlatformPathRegex(String.raw `\[turbopack\]_runtime\.js$`, {
|
|
16
|
+
escape: false,
|
|
17
|
+
}),
|
|
18
|
+
contentFilter: /loadRuntimeChunkPath/,
|
|
19
|
+
patchCode: async ({ code, tracedFiles }) => {
|
|
20
|
+
let patched = patchCode(code, inlineExternalImportRule);
|
|
21
|
+
patched = patchCode(patched, inlineChunksRule);
|
|
22
|
+
return `${patched}\n${inlineChunksFn(tracedFiles)}`;
|
|
23
|
+
},
|
|
24
|
+
},
|
|
25
|
+
],
|
|
26
|
+
};
|
|
27
|
+
function getInlinableChunks(tracedFiles) {
|
|
28
|
+
const chunks = new Set();
|
|
29
|
+
for (const file of tracedFiles) {
|
|
30
|
+
if (file === "[turbopack]_runtime.js") {
|
|
31
|
+
continue;
|
|
32
|
+
}
|
|
33
|
+
if (file.includes(".next/server/chunks/")) {
|
|
34
|
+
chunks.add(file);
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
return Array.from(chunks);
|
|
38
|
+
}
|
|
39
|
+
function inlineChunksFn(tracedFiles) {
|
|
40
|
+
// From the outputs, we extract every chunks
|
|
41
|
+
const chunks = getInlinableChunks(tracedFiles);
|
|
42
|
+
return `
|
|
43
|
+
function requireChunk(chunkPath) {
|
|
44
|
+
switch(chunkPath) {
|
|
45
|
+
${chunks
|
|
46
|
+
.map((chunk) => ` case "${
|
|
47
|
+
// we only want the path after /path/to/.next/
|
|
48
|
+
chunk.replace(/.*\/\.next\//, "")}": return require("${chunk}");`)
|
|
49
|
+
.join("\n")}
|
|
50
|
+
default:
|
|
51
|
+
throw new Error(\`Not found \${chunkPath}\`);
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
`;
|
|
55
|
+
}
|
|
56
|
+
// Turbopack imports `og` via `externalImport`.
|
|
57
|
+
// We patch it to:
|
|
58
|
+
// - add the explicit path so that the file is inlined by wrangler
|
|
59
|
+
// - use the edge version of the module instead of the node version.
|
|
60
|
+
//
|
|
61
|
+
// Modules that are not inlined (no added to the switch), would generate an error similar to:
|
|
62
|
+
// Failed to load external module path/to/module: Error: No such module "path/to/module"
|
|
63
|
+
const inlineExternalImportRule = `
|
|
64
|
+
rule:
|
|
65
|
+
pattern: "$RAW = await import($ID)"
|
|
66
|
+
inside:
|
|
67
|
+
regex: "externalImport"
|
|
68
|
+
kind: function_declaration
|
|
69
|
+
stopBy: end
|
|
70
|
+
fix: |-
|
|
71
|
+
switch ($ID) {
|
|
72
|
+
case "next/dist/compiled/@vercel/og/index.node.js":
|
|
73
|
+
$RAW = await import("next/dist/compiled/@vercel/og/index.edge.js");
|
|
74
|
+
break;
|
|
75
|
+
default:
|
|
76
|
+
$RAW = await import($ID);
|
|
77
|
+
}
|
|
78
|
+
`;
|
|
@@ -1,9 +1,9 @@
|
|
|
1
|
-
import
|
|
2
|
-
import
|
|
1
|
+
import fs from "node:fs";
|
|
2
|
+
import fsp from "node:fs/promises";
|
|
3
|
+
import os from "node:os";
|
|
3
4
|
import path from "node:path";
|
|
4
5
|
import logger from "@opennextjs/aws/logger.js";
|
|
5
6
|
import { globSync } from "glob";
|
|
6
|
-
import rclone from "rclone.js";
|
|
7
7
|
import { tqdm } from "ts-tqdm";
|
|
8
8
|
import { BINDING_NAME as KV_CACHE_BINDING_NAME, NAME as KV_CACHE_NAME, PREFIX_ENV_NAME as KV_CACHE_PREFIX_ENV_NAME, } from "../../api/overrides/incremental-cache/kv-incremental-cache.js";
|
|
9
9
|
import { BINDING_NAME as R2_CACHE_BINDING_NAME, NAME as R2_CACHE_NAME, PREFIX_ENV_NAME as R2_CACHE_PREFIX_ENV_NAME, } from "../../api/overrides/incremental-cache/r2-incremental-cache.js";
|
|
@@ -35,7 +35,7 @@ async function populateCacheCommand(target, args) {
|
|
|
35
35
|
}
|
|
36
36
|
export async function populateCache(buildOpts, config, wranglerConfig, populateCacheOptions, envVars) {
|
|
37
37
|
const { incrementalCache, tagCache } = config.default.override ?? {};
|
|
38
|
-
if (!existsSync(buildOpts.outputDir)) {
|
|
38
|
+
if (!fs.existsSync(buildOpts.outputDir)) {
|
|
39
39
|
logger.error("Unable to populate cache: Open Next build not found");
|
|
40
40
|
process.exit(1);
|
|
41
41
|
}
|
|
@@ -74,10 +74,11 @@ export function getCacheAssets(opts) {
|
|
|
74
74
|
withFileTypes: true,
|
|
75
75
|
windowsPathsNoEscape: true,
|
|
76
76
|
}).filter((f) => f.isFile());
|
|
77
|
+
const baseCacheDir = path.join(opts.outputDir, "cache");
|
|
77
78
|
const assets = [];
|
|
78
79
|
for (const file of allFiles) {
|
|
79
80
|
const fullPath = file.fullpath();
|
|
80
|
-
const relativePath = normalizePath(path.relative(
|
|
81
|
+
const relativePath = normalizePath(path.relative(baseCacheDir, fullPath));
|
|
81
82
|
if (relativePath.startsWith("__fetch")) {
|
|
82
83
|
const [__fetch, buildId, ...keyParts] = relativePath.split("/");
|
|
83
84
|
if (__fetch !== "__fetch" || buildId === undefined || keyParts.length === 0) {
|
|
@@ -105,133 +106,6 @@ export function getCacheAssets(opts) {
|
|
|
105
106
|
}
|
|
106
107
|
return assets;
|
|
107
108
|
}
|
|
108
|
-
/**
|
|
109
|
-
* Create a temporary configuration file for batch upload from environment variables
|
|
110
|
-
* @returns Path to the temporary config file or null if env vars not available
|
|
111
|
-
*/
|
|
112
|
-
function createTempRcloneConfig(accessKey, secretKey, accountId) {
|
|
113
|
-
const tempDir = tmpdir();
|
|
114
|
-
const tempConfigPath = path.join(tempDir, `rclone-config-${Date.now()}.conf`);
|
|
115
|
-
const configContent = `[r2]
|
|
116
|
-
type = s3
|
|
117
|
-
provider = Cloudflare
|
|
118
|
-
access_key_id = ${accessKey}
|
|
119
|
-
secret_access_key = ${secretKey}
|
|
120
|
-
endpoint = https://${accountId}.r2.cloudflarestorage.com
|
|
121
|
-
acl = private
|
|
122
|
-
`;
|
|
123
|
-
/**
|
|
124
|
-
* 0o600 is an octal number (the 0o prefix indicates octal in JavaScript)
|
|
125
|
-
* that represents Unix file permissions:
|
|
126
|
-
*
|
|
127
|
-
* - 6 (owner): read (4) + write (2) = readable and writable by the file owner
|
|
128
|
-
* - 0 (group): no permissions for the group
|
|
129
|
-
* - 0 (others): no permissions for anyone else
|
|
130
|
-
*
|
|
131
|
-
* In symbolic notation, this is: rw-------
|
|
132
|
-
*/
|
|
133
|
-
writeFileSync(tempConfigPath, configContent, { mode: 0o600 });
|
|
134
|
-
return tempConfigPath;
|
|
135
|
-
}
|
|
136
|
-
/**
|
|
137
|
-
* Populate R2 incremental cache using batch upload for better performance
|
|
138
|
-
* Uses parallel transfers to significantly speed up cache population
|
|
139
|
-
*/
|
|
140
|
-
async function populateR2IncrementalCacheWithBatchUpload(bucket, prefix, assets, envVars) {
|
|
141
|
-
const accessKey = envVars.R2_ACCESS_KEY_ID || null;
|
|
142
|
-
const secretKey = envVars.R2_SECRET_ACCESS_KEY || null;
|
|
143
|
-
const accountId = envVars.CF_ACCOUNT_ID || null;
|
|
144
|
-
// Ensure all required env vars are set correctly
|
|
145
|
-
if (!accessKey || !secretKey || !accountId) {
|
|
146
|
-
throw new Error("Please set R2_ACCESS_KEY_ID, R2_SECRET_ACCESS_KEY, and CF_ACCOUNT_ID environment variables to enable faster batch upload for remote R2.");
|
|
147
|
-
}
|
|
148
|
-
logger.info("\nPopulating remote R2 incremental cache using batch upload...");
|
|
149
|
-
// Create temporary config from env vars - required for batch upload
|
|
150
|
-
const tempConfigPath = createTempRcloneConfig(accessKey, secretKey, accountId);
|
|
151
|
-
if (!tempConfigPath) {
|
|
152
|
-
throw new Error("Failed to create temporary rclone config for R2 batch upload.");
|
|
153
|
-
}
|
|
154
|
-
const env = {
|
|
155
|
-
...process.env,
|
|
156
|
-
RCLONE_CONFIG: tempConfigPath,
|
|
157
|
-
};
|
|
158
|
-
logger.info("Using batch upload with R2 credentials from environment variables");
|
|
159
|
-
// Create a staging dir in temp directory with proper key paths
|
|
160
|
-
const tempDir = tmpdir();
|
|
161
|
-
const stagingDir = path.join(tempDir, `.r2-staging-${Date.now()}`);
|
|
162
|
-
// Track success to ensure cleanup happens correctly
|
|
163
|
-
let success = null;
|
|
164
|
-
try {
|
|
165
|
-
mkdirSync(stagingDir, { recursive: true });
|
|
166
|
-
for (const { fullPath, key, buildId, isFetch } of assets) {
|
|
167
|
-
const cacheKey = computeCacheKey(key, {
|
|
168
|
-
prefix,
|
|
169
|
-
buildId,
|
|
170
|
-
cacheType: isFetch ? "fetch" : "cache",
|
|
171
|
-
});
|
|
172
|
-
const destPath = path.join(stagingDir, cacheKey);
|
|
173
|
-
mkdirSync(path.dirname(destPath), { recursive: true });
|
|
174
|
-
copyFileSync(fullPath, destPath);
|
|
175
|
-
}
|
|
176
|
-
// Use rclone.js to sync the R2
|
|
177
|
-
const remote = `r2:${bucket}`;
|
|
178
|
-
// Using rclone.js Promise-based API for the copy operation
|
|
179
|
-
await rclone.promises.copy(stagingDir, remote, {
|
|
180
|
-
progress: true,
|
|
181
|
-
transfers: 16,
|
|
182
|
-
checkers: 8,
|
|
183
|
-
env,
|
|
184
|
-
});
|
|
185
|
-
logger.info(`Successfully uploaded ${assets.length} assets to R2 using batch upload`);
|
|
186
|
-
success = true;
|
|
187
|
-
}
|
|
188
|
-
finally {
|
|
189
|
-
try {
|
|
190
|
-
// Cleanup temporary staging directory
|
|
191
|
-
rmSync(stagingDir, { recursive: true, force: true });
|
|
192
|
-
}
|
|
193
|
-
catch {
|
|
194
|
-
console.warn(`Failed to remove temporary staging directory at ${stagingDir}`);
|
|
195
|
-
}
|
|
196
|
-
try {
|
|
197
|
-
// Cleanup temporary config file
|
|
198
|
-
rmSync(tempConfigPath);
|
|
199
|
-
}
|
|
200
|
-
catch {
|
|
201
|
-
console.warn(`Failed to remove temporary config at ${tempConfigPath}`);
|
|
202
|
-
}
|
|
203
|
-
}
|
|
204
|
-
if (!success) {
|
|
205
|
-
throw new Error("R2 batch upload failed, falling back to sequential uploads...");
|
|
206
|
-
}
|
|
207
|
-
}
|
|
208
|
-
/**
|
|
209
|
-
* Populate R2 incremental cache using sequential Wrangler uploads
|
|
210
|
-
* Falls back to this method when batch upload is not available or fails
|
|
211
|
-
*/
|
|
212
|
-
async function populateR2IncrementalCacheWithSequentialUpload(buildOpts, bucket, prefix, assets, populateCacheOptions) {
|
|
213
|
-
logger.info("Using sequential cache uploads.");
|
|
214
|
-
for (const { fullPath, key, buildId, isFetch } of tqdm(assets)) {
|
|
215
|
-
const cacheKey = computeCacheKey(key, {
|
|
216
|
-
prefix,
|
|
217
|
-
buildId,
|
|
218
|
-
cacheType: isFetch ? "fetch" : "cache",
|
|
219
|
-
});
|
|
220
|
-
runWrangler(buildOpts, [
|
|
221
|
-
"r2 object put",
|
|
222
|
-
quoteShellMeta(normalizePath(path.join(bucket, cacheKey))),
|
|
223
|
-
`--file ${quoteShellMeta(fullPath)}`,
|
|
224
|
-
], {
|
|
225
|
-
target: populateCacheOptions.target,
|
|
226
|
-
configPath: populateCacheOptions.wranglerConfigPath,
|
|
227
|
-
// R2 does not support the environment flag and results in the following error:
|
|
228
|
-
// Incorrect type for the 'cacheExpiry' field on 'HttpMetadata': the provided value is not of type 'date'.
|
|
229
|
-
environment: undefined,
|
|
230
|
-
logging: "error",
|
|
231
|
-
});
|
|
232
|
-
}
|
|
233
|
-
logger.info(`Successfully populated cache with ${assets.length} assets`);
|
|
234
|
-
}
|
|
235
109
|
async function populateR2IncrementalCache(buildOpts, config, populateCacheOptions, envVars) {
|
|
236
110
|
logger.info("\nPopulating R2 incremental cache...");
|
|
237
111
|
const binding = config.r2_buckets.find(({ binding }) => binding === R2_CACHE_BINDING_NAME);
|
|
@@ -244,21 +118,28 @@ async function populateR2IncrementalCache(buildOpts, config, populateCacheOption
|
|
|
244
118
|
}
|
|
245
119
|
const prefix = envVars[R2_CACHE_PREFIX_ENV_NAME];
|
|
246
120
|
const assets = getCacheAssets(buildOpts);
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
121
|
+
const objectList = assets.map(({ fullPath, key, buildId, isFetch }) => ({
|
|
122
|
+
key: computeCacheKey(key, {
|
|
123
|
+
prefix,
|
|
124
|
+
buildId,
|
|
125
|
+
cacheType: isFetch ? "fetch" : "cache",
|
|
126
|
+
}),
|
|
127
|
+
file: fullPath,
|
|
128
|
+
}));
|
|
129
|
+
const tempDir = await fsp.mkdtemp(path.join(os.tmpdir(), "open-next-"));
|
|
130
|
+
const listFile = path.join(tempDir, `r2-bulk-list.json`);
|
|
131
|
+
fs.writeFileSync(listFile, JSON.stringify(objectList));
|
|
132
|
+
const concurrency = Math.max(1, populateCacheOptions.cacheChunkSize ?? 50);
|
|
133
|
+
runWrangler(buildOpts, ["r2 bulk put", bucket, `--filename ${quoteShellMeta(listFile)}`, `--concurrency ${concurrency}`], {
|
|
134
|
+
target: populateCacheOptions.target,
|
|
135
|
+
configPath: populateCacheOptions.wranglerConfigPath,
|
|
136
|
+
// R2 does not support the environment flag and results in the following error:
|
|
137
|
+
// Incorrect type for the 'cacheExpiry' field on 'HttpMetadata': the provided value is not of type 'date'.
|
|
138
|
+
environment: undefined,
|
|
139
|
+
logging: "error",
|
|
140
|
+
});
|
|
141
|
+
fs.rmSync(listFile, { force: true });
|
|
142
|
+
logger.info(`Successfully populated cache with ${assets.length} assets`);
|
|
262
143
|
}
|
|
263
144
|
async function populateKVIncrementalCache(buildOpts, config, populateCacheOptions, envVars) {
|
|
264
145
|
logger.info("\nPopulating KV incremental cache...");
|
|
@@ -271,8 +152,9 @@ async function populateKVIncrementalCache(buildOpts, config, populateCacheOption
|
|
|
271
152
|
const chunkSize = Math.max(1, populateCacheOptions.cacheChunkSize ?? 25);
|
|
272
153
|
const totalChunks = Math.ceil(assets.length / chunkSize);
|
|
273
154
|
logger.info(`Inserting ${assets.length} assets to KV in chunks of ${chunkSize}`);
|
|
155
|
+
const tempDir = await fsp.mkdtemp(path.join(os.tmpdir(), "open-next-"));
|
|
274
156
|
for (const i of tqdm(Array.from({ length: totalChunks }, (_, i) => i))) {
|
|
275
|
-
const chunkPath = path.join(
|
|
157
|
+
const chunkPath = path.join(tempDir, `cache-chunk-${i}.json`);
|
|
276
158
|
const kvMapping = assets
|
|
277
159
|
.slice(i * chunkSize, (i + 1) * chunkSize)
|
|
278
160
|
.map(({ fullPath, key, buildId, isFetch }) => ({
|
|
@@ -281,9 +163,9 @@ async function populateKVIncrementalCache(buildOpts, config, populateCacheOption
|
|
|
281
163
|
buildId,
|
|
282
164
|
cacheType: isFetch ? "fetch" : "cache",
|
|
283
165
|
}),
|
|
284
|
-
value: readFileSync(fullPath, "utf8"),
|
|
166
|
+
value: fs.readFileSync(fullPath, "utf8"),
|
|
285
167
|
}));
|
|
286
|
-
writeFileSync(chunkPath, JSON.stringify(kvMapping));
|
|
168
|
+
fs.writeFileSync(chunkPath, JSON.stringify(kvMapping));
|
|
287
169
|
runWrangler(buildOpts, [
|
|
288
170
|
"kv bulk put",
|
|
289
171
|
quoteShellMeta(chunkPath),
|
|
@@ -295,7 +177,7 @@ async function populateKVIncrementalCache(buildOpts, config, populateCacheOption
|
|
|
295
177
|
configPath: populateCacheOptions.wranglerConfigPath,
|
|
296
178
|
logging: "error",
|
|
297
179
|
});
|
|
298
|
-
rmSync(chunkPath);
|
|
180
|
+
fs.rmSync(chunkPath, { force: true });
|
|
299
181
|
}
|
|
300
182
|
logger.info(`Successfully populated cache with ${assets.length} assets`);
|
|
301
183
|
}
|
|
@@ -320,7 +202,7 @@ function populateD1TagCache(buildOpts, config, populateCacheOptions) {
|
|
|
320
202
|
}
|
|
321
203
|
function populateStaticAssetsIncrementalCache(options) {
|
|
322
204
|
logger.info("\nPopulating Workers static assets...");
|
|
323
|
-
cpSync(path.join(options.outputDir, "cache"), path.join(options.outputDir, "assets", STATIC_ASSETS_CACHE_DIR), { recursive: true });
|
|
205
|
+
fs.cpSync(path.join(options.outputDir, "cache"), path.join(options.outputDir, "assets", STATIC_ASSETS_CACHE_DIR), { recursive: true });
|
|
324
206
|
logger.info(`Successfully populated static assets cache`);
|
|
325
207
|
}
|
|
326
208
|
/**
|
|
@@ -65,7 +65,7 @@ export declare function getNormalizedOptions(config: OpenNextConfig, buildDir?:
|
|
|
65
65
|
* @param args Wrangler environment and config path.
|
|
66
66
|
* @returns Wrangler config.
|
|
67
67
|
*/
|
|
68
|
-
export declare function readWranglerConfig(args: WithWranglerArgs):
|
|
68
|
+
export declare function readWranglerConfig(args: WithWranglerArgs): Config;
|
|
69
69
|
/**
|
|
70
70
|
* Adds flags for the wrangler config path and environment to the yargs configuration.
|
|
71
71
|
*/
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@opennextjs/cloudflare",
|
|
3
3
|
"description": "Cloudflare builder for next apps",
|
|
4
|
-
"version": "1.
|
|
4
|
+
"version": "1.13.0",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"bin": {
|
|
7
7
|
"opennextjs-cloudflare": "dist/cli/index.js"
|
|
@@ -42,13 +42,12 @@
|
|
|
42
42
|
},
|
|
43
43
|
"homepage": "https://github.com/opennextjs/opennextjs-cloudflare",
|
|
44
44
|
"dependencies": {
|
|
45
|
+
"@ast-grep/napi": "0.40.0",
|
|
45
46
|
"@dotenvx/dotenvx": "1.31.0",
|
|
46
|
-
"@opennextjs/aws": "3.
|
|
47
|
-
"@types/rclone.js": "^0.6.3",
|
|
47
|
+
"@opennextjs/aws": "3.9.0",
|
|
48
48
|
"cloudflare": "^4.4.1",
|
|
49
49
|
"enquirer": "^2.4.1",
|
|
50
|
-
"glob": "^
|
|
51
|
-
"rclone.js": "^0.6.6",
|
|
50
|
+
"glob": "^12.0.0",
|
|
52
51
|
"ts-tqdm": "^0.8.6",
|
|
53
52
|
"yargs": "^18.0.0"
|
|
54
53
|
},
|
|
@@ -61,7 +60,7 @@
|
|
|
61
60
|
"@types/picomatch": "^4.0.0",
|
|
62
61
|
"@types/yargs": "^17.0.33",
|
|
63
62
|
"diff": "^8.0.2",
|
|
64
|
-
"esbuild": "^0.
|
|
63
|
+
"esbuild": "^0.27.0",
|
|
65
64
|
"eslint": "^9.31.0",
|
|
66
65
|
"eslint-plugin-import": "^2.31.0",
|
|
67
66
|
"eslint-plugin-simple-import-sort": "^12.1.1",
|
|
@@ -71,12 +70,12 @@
|
|
|
71
70
|
"next": "~14.2.24",
|
|
72
71
|
"picomatch": "^4.0.2",
|
|
73
72
|
"rimraf": "^6.0.1",
|
|
74
|
-
"typescript": "^5.
|
|
73
|
+
"typescript": "^5.9.3",
|
|
75
74
|
"typescript-eslint": "^8.37.0",
|
|
76
75
|
"vitest": "^2.1.1"
|
|
77
76
|
},
|
|
78
77
|
"peerDependencies": {
|
|
79
|
-
"wrangler": "^4.
|
|
78
|
+
"wrangler": "^4.49.0"
|
|
80
79
|
},
|
|
81
80
|
"scripts": {
|
|
82
81
|
"clean": "rimraf dist",
|