@opennextjs/cloudflare 1.17.2 → 1.18.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli/build/patches/plugins/load-manifest.js +1 -1
- package/dist/cli/commands/populate-cache.d.ts +5 -4
- package/dist/cli/commands/populate-cache.js +209 -42
- package/dist/cli/utils/create-wrangler-config.js +2 -133
- package/dist/cli/utils/ensure-r2-bucket.d.ts +16 -0
- package/dist/cli/utils/ensure-r2-bucket.js +144 -0
- package/dist/cli/workers/r2-cache-types.d.ts +25 -0
- package/dist/cli/workers/r2-cache-types.js +9 -0
- package/dist/cli/workers/r2-cache.d.ts +28 -0
- package/dist/cli/workers/r2-cache.js +61 -0
- package/package.json +1 -1
|
@@ -29,7 +29,7 @@ async function getLoadManifestRule(buildOpts) {
|
|
|
29
29
|
const { outputDir } = buildOpts;
|
|
30
30
|
const baseDir = join(outputDir, "server-functions/default", getPackagePath(buildOpts));
|
|
31
31
|
const dotNextDir = join(baseDir, ".next");
|
|
32
|
-
const manifests = await glob(join(dotNextDir, "**/{*-manifest,required-server-files}.json"), {
|
|
32
|
+
const manifests = await glob(join(dotNextDir, "**/{*-manifest,required-server-files,prefetch-hints}.json"), {
|
|
33
33
|
windowsPathsNoEscape: true,
|
|
34
34
|
});
|
|
35
35
|
const returnManifests = (await Promise.all(manifests.map(async (manifest) => `
|
|
@@ -12,7 +12,7 @@ export type CacheAsset = {
|
|
|
12
12
|
buildId: string;
|
|
13
13
|
};
|
|
14
14
|
export declare function getCacheAssets(opts: BuildOptions): CacheAsset[];
|
|
15
|
-
type PopulateCacheOptions = {
|
|
15
|
+
export type PopulateCacheOptions = {
|
|
16
16
|
/**
|
|
17
17
|
* Whether to populate the local or remote cache.
|
|
18
18
|
*/
|
|
@@ -26,9 +26,11 @@ type PopulateCacheOptions = {
|
|
|
26
26
|
*/
|
|
27
27
|
wranglerConfigPath?: string;
|
|
28
28
|
/**
|
|
29
|
-
*
|
|
29
|
+
* Number of concurrent requests when populating the cache.
|
|
30
|
+
* For KV this is the chunk size passed to `wrangler kv bulk put`.
|
|
31
|
+
* For R2 this is the number of concurrent requests to the local worker.
|
|
30
32
|
*
|
|
31
|
-
* @default 25
|
|
33
|
+
* @default 25
|
|
32
34
|
*/
|
|
33
35
|
cacheChunkSize?: number;
|
|
34
36
|
/**
|
|
@@ -51,4 +53,3 @@ export declare function withPopulateCacheOptions<T extends yargs.Argv>(args: T):
|
|
|
51
53
|
} & {
|
|
52
54
|
cacheChunkSize: number | undefined;
|
|
53
55
|
}>;
|
|
54
|
-
export {};
|
|
@@ -2,14 +2,19 @@ import fs from "node:fs";
|
|
|
2
2
|
import fsp from "node:fs/promises";
|
|
3
3
|
import os from "node:os";
|
|
4
4
|
import path from "node:path";
|
|
5
|
+
import { Readable } from "node:stream";
|
|
6
|
+
import { setTimeout } from "node:timers/promises";
|
|
7
|
+
import { fileURLToPath } from "node:url";
|
|
5
8
|
import logger from "@opennextjs/aws/logger.js";
|
|
6
9
|
import { globSync } from "glob";
|
|
7
10
|
import { tqdm } from "ts-tqdm";
|
|
11
|
+
import { unstable_startWorker } from "wrangler";
|
|
8
12
|
import { BINDING_NAME as KV_CACHE_BINDING_NAME, NAME as KV_CACHE_NAME, PREFIX_ENV_NAME as KV_CACHE_PREFIX_ENV_NAME, } from "../../api/overrides/incremental-cache/kv-incremental-cache.js";
|
|
9
13
|
import { BINDING_NAME as R2_CACHE_BINDING_NAME, NAME as R2_CACHE_NAME, PREFIX_ENV_NAME as R2_CACHE_PREFIX_ENV_NAME, } from "../../api/overrides/incremental-cache/r2-incremental-cache.js";
|
|
10
14
|
import { CACHE_DIR as STATIC_ASSETS_CACHE_DIR, NAME as STATIC_ASSETS_CACHE_NAME, } from "../../api/overrides/incremental-cache/static-assets-incremental-cache.js";
|
|
11
15
|
import { computeCacheKey } from "../../api/overrides/internal.js";
|
|
12
16
|
import { BINDING_NAME as D1_TAG_BINDING_NAME, NAME as D1_TAG_NAME, } from "../../api/overrides/tag-cache/d1-next-tag-cache.js";
|
|
17
|
+
import { ensureR2Bucket } from "../utils/ensure-r2-bucket.js";
|
|
13
18
|
import { normalizePath } from "../utils/normalize-path.js";
|
|
14
19
|
import { getEnvFromPlatformProxy, quoteShellMeta } from "./utils/helpers.js";
|
|
15
20
|
import { runWrangler } from "./utils/run-wrangler.js";
|
|
@@ -36,8 +41,7 @@ async function populateCacheCommand(target, args) {
|
|
|
36
41
|
export async function populateCache(buildOpts, config, wranglerConfig, populateCacheOptions, envVars) {
|
|
37
42
|
const { incrementalCache, tagCache } = config.default.override ?? {};
|
|
38
43
|
if (!fs.existsSync(buildOpts.outputDir)) {
|
|
39
|
-
|
|
40
|
-
process.exit(1);
|
|
44
|
+
throw new Error("Unable to populate cache: Open Next build not found");
|
|
41
45
|
}
|
|
42
46
|
if (!config.dangerous?.disableIncrementalCache && incrementalCache) {
|
|
43
47
|
const name = await resolveCacheName(incrementalCache);
|
|
@@ -106,63 +110,228 @@ export function getCacheAssets(opts) {
|
|
|
106
110
|
}
|
|
107
111
|
return assets;
|
|
108
112
|
}
|
|
113
|
+
/**
|
|
114
|
+
* Populates the R2 incremental cache by starting a worker with an R2 binding.
|
|
115
|
+
*
|
|
116
|
+
* Flow:
|
|
117
|
+
* 1. Reads the R2 binding configuration from the wrangler config.
|
|
118
|
+
* 2. Collects cache assets from the build output.
|
|
119
|
+
* 3. Starts a worker (via `unstable_startWorker`) with the R2 binding, set to run remotely or locally depending upon the cache target.
|
|
120
|
+
* 4. Sends individual POST requests to the worker.
|
|
121
|
+
*
|
|
122
|
+
* Using a binding bypasses the Cloudflare REST API rate limit that affects `wrangler r2 bulk put`.
|
|
123
|
+
*/
|
|
109
124
|
async function populateR2IncrementalCache(buildOpts, config, populateCacheOptions, envVars) {
|
|
110
|
-
logger.info(
|
|
125
|
+
logger.info(`\nPopulating ${populateCacheOptions.target} R2 incremental cache...`);
|
|
111
126
|
const binding = config.r2_buckets.find(({ binding }) => binding === R2_CACHE_BINDING_NAME);
|
|
112
127
|
if (!binding) {
|
|
113
|
-
throw new Error(`No R2 binding ${
|
|
128
|
+
throw new Error(`No R2 binding "${R2_CACHE_BINDING_NAME}" found!`);
|
|
114
129
|
}
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
throw new Error(`R2 binding ${JSON.stringify(R2_CACHE_BINDING_NAME)} should have a 'bucket_name'`);
|
|
130
|
+
if (typeof binding.bucket_name !== "string") {
|
|
131
|
+
throw new Error(`R2 binding "${R2_CACHE_BINDING_NAME}" is missing a bucket_name.`);
|
|
118
132
|
}
|
|
133
|
+
const bucketName = populateCacheOptions.shouldUsePreviewId && typeof binding.preview_bucket_name === "string"
|
|
134
|
+
? binding.preview_bucket_name
|
|
135
|
+
: binding.bucket_name;
|
|
119
136
|
const prefix = envVars[R2_CACHE_PREFIX_ENV_NAME];
|
|
120
137
|
const assets = getCacheAssets(buildOpts);
|
|
121
|
-
|
|
138
|
+
if (assets.length === 0) {
|
|
139
|
+
logger.info("No cache assets to populate");
|
|
140
|
+
return;
|
|
141
|
+
}
|
|
142
|
+
const currentDir = path.dirname(fileURLToPath(import.meta.url));
|
|
143
|
+
const handlerPath = path.join(currentDir, "../workers/r2-cache.js");
|
|
144
|
+
const isRemote = populateCacheOptions.target === "remote";
|
|
145
|
+
if (isRemote) {
|
|
146
|
+
const result = await ensureR2Bucket(buildOpts.appPath, bucketName, binding.jurisdiction);
|
|
147
|
+
if (!result.success) {
|
|
148
|
+
throw new Error(`Failed to provision remote R2 bucket "${bucketName}" for binding "${R2_CACHE_BINDING_NAME}": ${result.error}`);
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
// Start a local worker and proxy it to the Cloudflare network when remote mode is enabled.
|
|
152
|
+
const worker = await unstable_startWorker({
|
|
153
|
+
name: "open-next-cache-populate",
|
|
154
|
+
// Prevent it from discovering the project's wrangler config and leaking unrelated bindings.
|
|
155
|
+
config: "",
|
|
156
|
+
entrypoint: handlerPath,
|
|
157
|
+
compatibilityDate: "2026-01-01",
|
|
158
|
+
bindings: {
|
|
159
|
+
R2: {
|
|
160
|
+
type: "r2_bucket",
|
|
161
|
+
bucket_name: bucketName,
|
|
162
|
+
jurisdiction: binding.jurisdiction,
|
|
163
|
+
},
|
|
164
|
+
},
|
|
165
|
+
dev: {
|
|
166
|
+
remote: isRemote,
|
|
167
|
+
server: { port: 0 },
|
|
168
|
+
inspector: false,
|
|
169
|
+
watch: false,
|
|
170
|
+
liveReload: false,
|
|
171
|
+
logLevel: "none",
|
|
172
|
+
},
|
|
173
|
+
});
|
|
174
|
+
try {
|
|
175
|
+
const baseUrl = await worker.url;
|
|
176
|
+
await sendEntriesToR2Worker({
|
|
177
|
+
workerUrl: new URL("/populate", baseUrl).href,
|
|
178
|
+
assets,
|
|
179
|
+
prefix,
|
|
180
|
+
maxConcurrency: Math.max(1, populateCacheOptions.cacheChunkSize ?? 25),
|
|
181
|
+
});
|
|
182
|
+
}
|
|
183
|
+
catch (e) {
|
|
184
|
+
if (isRemote) {
|
|
185
|
+
throw new Error(`Failed to populate remote R2 bucket "${bucketName}" for binding "${R2_CACHE_BINDING_NAME}": ${e instanceof Error ? e.message : String(e)}`);
|
|
186
|
+
}
|
|
187
|
+
else {
|
|
188
|
+
throw new Error(`Failed to populate the local R2 cache: ${e instanceof Error ? e.message : String(e)}`);
|
|
189
|
+
}
|
|
190
|
+
}
|
|
191
|
+
finally {
|
|
192
|
+
await worker.dispose();
|
|
193
|
+
}
|
|
194
|
+
logger.info(`Successfully populated cache with ${assets.length} entries`);
|
|
195
|
+
}
|
|
196
|
+
/**
|
|
197
|
+
* Sends cache entries to the R2 worker, one entry per request.
|
|
198
|
+
*
|
|
199
|
+
* Up to `concurrency` requests are in-flight at any given time.
|
|
200
|
+
* Retry logic for transient R2 write failures is handled by the worker.
|
|
201
|
+
*
|
|
202
|
+
* @param options
|
|
203
|
+
* @param options.workerUrl - The URL of the local R2 worker's `/populate` endpoint.
|
|
204
|
+
* @param options.assets - The cache assets to write, as collected by {@link getCacheAssets}.
|
|
205
|
+
* @param options.prefix - Optional prefix prepended to each R2 key.
|
|
206
|
+
* @param options.concurrency - Maximum number of concurrent in-flight requests.
|
|
207
|
+
* @returns Resolves when all entries have been written successfully.
|
|
208
|
+
* @throws {Error} If any entry fails after all retries or encounters a non-retryable error.
|
|
209
|
+
*/
|
|
210
|
+
async function sendEntriesToR2Worker(options) {
|
|
211
|
+
const { workerUrl, assets, prefix, maxConcurrency } = options;
|
|
212
|
+
// Build the list of entries to send (key + filename).
|
|
213
|
+
// File contents are read lazily in sendEntryToR2Worker to avoid
|
|
214
|
+
// loading all cache values into memory at once.
|
|
215
|
+
const entries = assets.map(({ fullPath, key, buildId, isFetch }) => ({
|
|
122
216
|
key: computeCacheKey(key, {
|
|
123
217
|
prefix,
|
|
124
218
|
buildId,
|
|
125
219
|
cacheType: isFetch ? "fetch" : "cache",
|
|
126
220
|
}),
|
|
127
|
-
|
|
221
|
+
filename: fullPath,
|
|
128
222
|
}));
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
const
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
223
|
+
// Use a concurrency-limited loop with a progress bar.
|
|
224
|
+
// `pending` tracks in-flight promises so we can cap concurrency.
|
|
225
|
+
const pending = new Set();
|
|
226
|
+
let concurrency = 1;
|
|
227
|
+
for (const entry of tqdm(entries)) {
|
|
228
|
+
const task = sendEntryToR2Worker({
|
|
229
|
+
workerUrl,
|
|
230
|
+
key: entry.key,
|
|
231
|
+
filename: entry.filename,
|
|
232
|
+
}).finally(() => pending.delete(task));
|
|
233
|
+
pending.add(task);
|
|
234
|
+
// If we've reached the concurrency limit, wait for one to finish.
|
|
235
|
+
if (pending.size >= concurrency) {
|
|
236
|
+
await Promise.race(pending);
|
|
237
|
+
// Increase concurrency gradually to avoid overwhelming the worker
|
|
238
|
+
// with too many requests at once.
|
|
239
|
+
if (concurrency < maxConcurrency) {
|
|
240
|
+
concurrency++;
|
|
241
|
+
}
|
|
242
|
+
}
|
|
243
|
+
}
|
|
244
|
+
await Promise.all(pending);
|
|
245
|
+
}
|
|
246
|
+
class RetryableWorkerError extends Error {
|
|
247
|
+
}
|
|
248
|
+
/**
|
|
249
|
+
* Sends a single cache entry to the R2 worker.
|
|
250
|
+
*
|
|
251
|
+
* The file is streamed from disk and sent as the raw request body.
|
|
252
|
+
*
|
|
253
|
+
* @param options
|
|
254
|
+
* @param options.workerUrl - The URL of the local R2 worker's `/populate` endpoint.
|
|
255
|
+
* @param options.key - The R2 object key.
|
|
256
|
+
* @param options.filename - Path to the cache file on disk. Read at send time to avoid holding all values in memory.
|
|
257
|
+
* @throws {Error} If the worker reports a failure.
|
|
258
|
+
*/
|
|
259
|
+
async function sendEntryToR2Worker(options) {
|
|
260
|
+
const { workerUrl, key, filename } = options;
|
|
261
|
+
const CLIENT_RETRY_ATTEMPTS = 5;
|
|
262
|
+
const CLIENT_RETRY_BASE_DELAY_MS = 250;
|
|
263
|
+
for (let attempt = 0; attempt < CLIENT_RETRY_ATTEMPTS; attempt++) {
|
|
264
|
+
try {
|
|
265
|
+
let response;
|
|
266
|
+
try {
|
|
267
|
+
response = await fetch(workerUrl, {
|
|
268
|
+
method: "POST",
|
|
269
|
+
headers: {
|
|
270
|
+
"x-opennext-cache-key": key,
|
|
271
|
+
"content-length": fs.statSync(filename).size.toString(),
|
|
272
|
+
},
|
|
273
|
+
body: Readable.toWeb(fs.createReadStream(filename)),
|
|
274
|
+
signal: AbortSignal.timeout(60_000),
|
|
275
|
+
// @ts-expect-error - `duplex` is required for streaming request bodies in Node.js
|
|
276
|
+
duplex: "half",
|
|
277
|
+
});
|
|
278
|
+
}
|
|
279
|
+
catch (e) {
|
|
280
|
+
throw new RetryableWorkerError(`Failed to send request to R2 worker: ${e instanceof Error ? e.message : String(e)}`, {
|
|
281
|
+
cause: e,
|
|
282
|
+
});
|
|
283
|
+
}
|
|
284
|
+
const body = await response.text();
|
|
285
|
+
let result;
|
|
286
|
+
try {
|
|
287
|
+
result = JSON.parse(body);
|
|
288
|
+
}
|
|
289
|
+
catch (e) {
|
|
290
|
+
if (body.includes("Worker exceeded resource limits")) {
|
|
291
|
+
throw new RetryableWorkerError("Worker exceeded resource limits", { cause: e });
|
|
292
|
+
}
|
|
293
|
+
if (response.status >= 500) {
|
|
294
|
+
throw new RetryableWorkerError(`Worker returned a ${response.status} ${response.statusText} response`, { cause: e });
|
|
295
|
+
}
|
|
296
|
+
throw new Error(`Unexpected ${response.status} response from R2 worker: ${body}`, {
|
|
297
|
+
cause: e,
|
|
298
|
+
});
|
|
299
|
+
}
|
|
300
|
+
if (!result.success && response.status >= 500) {
|
|
301
|
+
throw new RetryableWorkerError(result.error);
|
|
302
|
+
}
|
|
303
|
+
if (!result.success) {
|
|
304
|
+
throw new Error(`Failed to write "${key}" to R2: ${result.error}`);
|
|
305
|
+
}
|
|
306
|
+
return;
|
|
307
|
+
}
|
|
308
|
+
catch (e) {
|
|
309
|
+
if (e instanceof RetryableWorkerError && attempt < CLIENT_RETRY_ATTEMPTS - 1) {
|
|
310
|
+
logger.error(`Attempt ${attempt + 1} to write "${key}" failed with a retryable error: ${e.message}. Retrying...`);
|
|
311
|
+
await setTimeout(CLIENT_RETRY_BASE_DELAY_MS * Math.pow(2, attempt));
|
|
312
|
+
continue;
|
|
313
|
+
}
|
|
314
|
+
throw new Error(`Failed to write "${key}" to R2 after ${CLIENT_RETRY_ATTEMPTS} attempts`, {
|
|
315
|
+
cause: e,
|
|
316
|
+
});
|
|
317
|
+
}
|
|
152
318
|
}
|
|
153
|
-
logger.info(`Successfully populated cache with ${assets.length} assets`);
|
|
154
319
|
}
|
|
155
320
|
async function populateKVIncrementalCache(buildOpts, config, populateCacheOptions, envVars) {
|
|
156
|
-
logger.info(
|
|
321
|
+
logger.info(`\nPopulating ${populateCacheOptions.target} KV incremental cache...`);
|
|
157
322
|
const binding = config.kv_namespaces.find(({ binding }) => binding === KV_CACHE_BINDING_NAME);
|
|
158
323
|
if (!binding) {
|
|
159
|
-
throw new Error(`No KV binding ${
|
|
324
|
+
throw new Error(`No KV binding "${KV_CACHE_BINDING_NAME}" found!`);
|
|
160
325
|
}
|
|
161
326
|
const prefix = envVars[KV_CACHE_PREFIX_ENV_NAME];
|
|
162
327
|
const assets = getCacheAssets(buildOpts);
|
|
328
|
+
if (assets.length === 0) {
|
|
329
|
+
logger.info("No cache assets to populate");
|
|
330
|
+
return;
|
|
331
|
+
}
|
|
163
332
|
const chunkSize = Math.max(1, populateCacheOptions.cacheChunkSize ?? 25);
|
|
164
333
|
const totalChunks = Math.ceil(assets.length / chunkSize);
|
|
165
|
-
logger.info(`Inserting ${assets.length} assets to KV in chunks of ${chunkSize}`);
|
|
334
|
+
logger.info(`Inserting ${assets.length} assets to ${populateCacheOptions.target} KV in chunks of ${chunkSize}`);
|
|
166
335
|
const tempDir = await fsp.mkdtemp(path.join(os.tmpdir(), "open-next-"));
|
|
167
336
|
for (const i of tqdm(Array.from({ length: totalChunks }, (_, i) => i))) {
|
|
168
337
|
const chunkPath = path.join(tempDir, `cache-chunk-${i}.json`);
|
|
@@ -190,17 +359,16 @@ async function populateKVIncrementalCache(buildOpts, config, populateCacheOption
|
|
|
190
359
|
});
|
|
191
360
|
fs.rmSync(chunkPath, { force: true });
|
|
192
361
|
if (!result.success) {
|
|
193
|
-
|
|
194
|
-
process.exit(1);
|
|
362
|
+
throw new Error(`Wrangler kv bulk put command failed${result.stderr ? `:\n${result.stderr}` : ""}`);
|
|
195
363
|
}
|
|
196
364
|
}
|
|
197
|
-
logger.info(`Successfully populated cache with ${assets.length}
|
|
365
|
+
logger.info(`Successfully populated cache with ${assets.length} entries`);
|
|
198
366
|
}
|
|
199
367
|
function populateD1TagCache(buildOpts, config, populateCacheOptions) {
|
|
200
368
|
logger.info("\nCreating D1 table if necessary...");
|
|
201
369
|
const binding = config.d1_databases.find(({ binding }) => binding === D1_TAG_BINDING_NAME);
|
|
202
370
|
if (!binding) {
|
|
203
|
-
throw new Error(`No D1 binding ${
|
|
371
|
+
throw new Error(`No D1 binding "${D1_TAG_BINDING_NAME}" found!`);
|
|
204
372
|
}
|
|
205
373
|
const result = runWrangler(buildOpts, [
|
|
206
374
|
"d1 execute",
|
|
@@ -214,8 +382,7 @@ function populateD1TagCache(buildOpts, config, populateCacheOptions) {
|
|
|
214
382
|
logging: "error",
|
|
215
383
|
});
|
|
216
384
|
if (!result.success) {
|
|
217
|
-
|
|
218
|
-
process.exit(1);
|
|
385
|
+
throw new Error(`Wrangler d1 execute command failed${result.stderr ? `:\n${result.stderr}` : ""}`);
|
|
219
386
|
}
|
|
220
387
|
logger.info("\nSuccessfully created D1 table");
|
|
221
388
|
}
|
|
@@ -1,12 +1,9 @@
|
|
|
1
1
|
import assert from "node:assert";
|
|
2
2
|
import { existsSync, readFileSync, writeFileSync } from "node:fs";
|
|
3
3
|
import { join } from "node:path";
|
|
4
|
-
import { findPackagerAndRoot } from "@opennextjs/aws/build/helper.js";
|
|
5
|
-
import Cloudflare from "cloudflare";
|
|
6
4
|
import { parse, stringify } from "comment-json";
|
|
7
5
|
import { getPackageTemplatesDirPath } from "../../utils/get-package-templates-dir-path.js";
|
|
8
|
-
import {
|
|
9
|
-
import { askAccountSelection } from "./ask-account-selection.js";
|
|
6
|
+
import { ensureR2Bucket } from "./ensure-r2-bucket.js";
|
|
10
7
|
/**
|
|
11
8
|
* Gets the path to the Wrangler configuration file if it exists.
|
|
12
9
|
*
|
|
@@ -49,7 +46,7 @@ export async function createWranglerConfigFile(projectDir, defaultCompatDate = "
|
|
|
49
46
|
assert("bucket_name" in wranglerConfig.r2_buckets[0] &&
|
|
50
47
|
typeof wranglerConfig.r2_buckets[0].bucket_name === "string");
|
|
51
48
|
const bucketName = wranglerConfig.r2_buckets[0].bucket_name;
|
|
52
|
-
const { success: cachingEnabled } = await
|
|
49
|
+
const { success: cachingEnabled } = await ensureR2Bucket(projectDir, bucketName);
|
|
53
50
|
if (!cachingEnabled) {
|
|
54
51
|
delete wranglerConfig.r2_buckets;
|
|
55
52
|
}
|
|
@@ -114,131 +111,3 @@ async function getLatestCompatDate() {
|
|
|
114
111
|
/* empty */
|
|
115
112
|
}
|
|
116
113
|
}
|
|
117
|
-
/**
|
|
118
|
-
* Gets the authentication credentials for Cloudflare API calls.
|
|
119
|
-
*
|
|
120
|
-
* Uses `wrangler auth token --json` which checks the following sources in order:
|
|
121
|
-
* 1. CLOUDFLARE_API_TOKEN environment variable
|
|
122
|
-
* 2. CLOUDFLARE_API_KEY + CLOUDFLARE_EMAIL environment variables
|
|
123
|
-
* 3. OAuth token from `wrangler login`
|
|
124
|
-
*
|
|
125
|
-
* @param options The build options containing packager and monorepo root
|
|
126
|
-
* @returns The auth credentials if available, undefined otherwise
|
|
127
|
-
*/
|
|
128
|
-
function getAuthCredentials(options) {
|
|
129
|
-
const result = runWrangler(options, ["auth", "token", "--json"], { logging: "none" });
|
|
130
|
-
if (!result.success) {
|
|
131
|
-
return undefined;
|
|
132
|
-
}
|
|
133
|
-
try {
|
|
134
|
-
const json = JSON.parse(result.stdout);
|
|
135
|
-
if (json.type === "api_key") {
|
|
136
|
-
return { type: "api_key", apiKey: json.key, apiEmail: json.email };
|
|
137
|
-
}
|
|
138
|
-
// Both "oauth" and "api_token" types have a token field
|
|
139
|
-
if (json.token) {
|
|
140
|
-
return { type: "token", token: json.token };
|
|
141
|
-
}
|
|
142
|
-
}
|
|
143
|
-
catch {
|
|
144
|
-
/* empty */
|
|
145
|
-
}
|
|
146
|
-
return undefined;
|
|
147
|
-
}
|
|
148
|
-
/**
|
|
149
|
-
* Gets the account ID for Cloudflare API calls.
|
|
150
|
-
*
|
|
151
|
-
* Tries the following sources in order:
|
|
152
|
-
* 1. CLOUDFLARE_ACCOUNT_ID or CF_ACCOUNT_ID environment variable
|
|
153
|
-
* 2. List accounts using the SDK and return the first one
|
|
154
|
-
*
|
|
155
|
-
* @param client The Cloudflare SDK client
|
|
156
|
-
* @returns The account ID if available, undefined otherwise
|
|
157
|
-
*/
|
|
158
|
-
async function getAccountId(client) {
|
|
159
|
-
if (process.env.CLOUDFLARE_ACCOUNT_ID || process.env.CF_ACCOUNT_ID) {
|
|
160
|
-
return process.env.CLOUDFLARE_ACCOUNT_ID || process.env.CF_ACCOUNT_ID;
|
|
161
|
-
}
|
|
162
|
-
try {
|
|
163
|
-
const accountsList = await client.accounts.list();
|
|
164
|
-
const accounts = [];
|
|
165
|
-
for await (const account of accountsList) {
|
|
166
|
-
accounts.push({ id: account.id, name: account.name });
|
|
167
|
-
}
|
|
168
|
-
if (accounts.length === 0) {
|
|
169
|
-
return undefined;
|
|
170
|
-
}
|
|
171
|
-
if (accounts.length === 1 && accounts[0]) {
|
|
172
|
-
return accounts[0].id;
|
|
173
|
-
}
|
|
174
|
-
return await askAccountSelection(accounts);
|
|
175
|
-
}
|
|
176
|
-
catch {
|
|
177
|
-
/* empty */
|
|
178
|
-
}
|
|
179
|
-
return undefined;
|
|
180
|
-
}
|
|
181
|
-
/**
|
|
182
|
-
* Attempts to log in to Cloudflare via wrangler.
|
|
183
|
-
*
|
|
184
|
-
* @param options The build options containing packager and monorepo root
|
|
185
|
-
* @returns true if login was successful, false otherwise
|
|
186
|
-
*/
|
|
187
|
-
function wranglerLogin(options) {
|
|
188
|
-
const result = runWrangler(options, ["login"], { logging: "all" });
|
|
189
|
-
return result.success;
|
|
190
|
-
}
|
|
191
|
-
/**
|
|
192
|
-
* Creates an R2 bucket.
|
|
193
|
-
*
|
|
194
|
-
* If no auth credentials are available, falls back to wrangler login for OAuth authentication.
|
|
195
|
-
*
|
|
196
|
-
* @param projectDir The project directory to detect the package manager
|
|
197
|
-
* @param bucketName The name of the R2 bucket to create
|
|
198
|
-
* @returns An object indicating success with the bucket name, or failure
|
|
199
|
-
*/
|
|
200
|
-
async function maybeCreateR2Bucket(projectDir, bucketName) {
|
|
201
|
-
try {
|
|
202
|
-
const { packager, root: monorepoRoot } = findPackagerAndRoot(projectDir);
|
|
203
|
-
const options = { packager, monorepoRoot };
|
|
204
|
-
let authCredentials = getAuthCredentials(options);
|
|
205
|
-
// If no credentials available, fall back to wrangler login
|
|
206
|
-
if (!authCredentials) {
|
|
207
|
-
const loginSuccess = wranglerLogin(options);
|
|
208
|
-
if (!loginSuccess) {
|
|
209
|
-
return { success: false };
|
|
210
|
-
}
|
|
211
|
-
// Get credentials after login
|
|
212
|
-
authCredentials = getAuthCredentials(options);
|
|
213
|
-
if (!authCredentials) {
|
|
214
|
-
return { success: false };
|
|
215
|
-
}
|
|
216
|
-
}
|
|
217
|
-
const client = authCredentials.type === "api_key"
|
|
218
|
-
? new Cloudflare({ apiKey: authCredentials.apiKey, apiEmail: authCredentials.apiEmail })
|
|
219
|
-
: new Cloudflare({ apiToken: authCredentials.token });
|
|
220
|
-
const accountId = await getAccountId(client);
|
|
221
|
-
if (!accountId) {
|
|
222
|
-
return { success: false };
|
|
223
|
-
}
|
|
224
|
-
// Check if bucket already exists
|
|
225
|
-
try {
|
|
226
|
-
await client.r2.buckets.get(bucketName, { account_id: accountId });
|
|
227
|
-
// Bucket exists
|
|
228
|
-
return { success: true, bucketName };
|
|
229
|
-
}
|
|
230
|
-
catch (error) {
|
|
231
|
-
if (!(error instanceof Cloudflare.NotFoundError)) {
|
|
232
|
-
return { success: false };
|
|
233
|
-
}
|
|
234
|
-
}
|
|
235
|
-
await client.r2.buckets.create({
|
|
236
|
-
account_id: accountId,
|
|
237
|
-
name: bucketName,
|
|
238
|
-
});
|
|
239
|
-
return { success: true, bucketName };
|
|
240
|
-
}
|
|
241
|
-
catch {
|
|
242
|
-
return { success: false };
|
|
243
|
-
}
|
|
244
|
-
}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Creates an R2 bucket if it doesn't already exist
|
|
3
|
+
*
|
|
4
|
+
* If no auth credentials are available, falls back to wrangler login for OAuth authentication.
|
|
5
|
+
*
|
|
6
|
+
* @param projectDir The project directory to detect the package manager
|
|
7
|
+
* @param bucketName The name of the R2 bucket to create
|
|
8
|
+
* @returns An object indicating success with the bucket name, or failure
|
|
9
|
+
*/
|
|
10
|
+
export declare function ensureR2Bucket(projectDir: string, bucketName: string, jurisdiction?: string): Promise<{
|
|
11
|
+
success: true;
|
|
12
|
+
bucketName: string;
|
|
13
|
+
} | {
|
|
14
|
+
success: false;
|
|
15
|
+
error: string;
|
|
16
|
+
}>;
|
|
@@ -0,0 +1,144 @@
|
|
|
1
|
+
import { findPackagerAndRoot } from "@opennextjs/aws/build/helper.js";
|
|
2
|
+
import Cloudflare from "cloudflare";
|
|
3
|
+
import { runWrangler } from "../commands/utils/run-wrangler.js";
|
|
4
|
+
import { askAccountSelection } from "./ask-account-selection.js";
|
|
5
|
+
function getErrorMessage(error) {
|
|
6
|
+
return error instanceof Error ? error.message : String(error);
|
|
7
|
+
}
|
|
8
|
+
/**
|
|
9
|
+
* Gets the authentication credentials for Cloudflare API calls.
|
|
10
|
+
*
|
|
11
|
+
* Uses `wrangler auth token --json` which checks the following sources in order:
|
|
12
|
+
* 1. CLOUDFLARE_API_TOKEN environment variable
|
|
13
|
+
* 2. CLOUDFLARE_API_KEY + CLOUDFLARE_EMAIL environment variables
|
|
14
|
+
* 3. OAuth token from `wrangler login`
|
|
15
|
+
*
|
|
16
|
+
* @param options The build options containing packager and monorepo root
|
|
17
|
+
* @returns The auth credentials if available, undefined otherwise
|
|
18
|
+
*/
|
|
19
|
+
function getAuthCredentials(options) {
|
|
20
|
+
const result = runWrangler(options, ["auth", "token", "--json"], { logging: "none" });
|
|
21
|
+
if (!result.success) {
|
|
22
|
+
return undefined;
|
|
23
|
+
}
|
|
24
|
+
try {
|
|
25
|
+
const json = JSON.parse(result.stdout);
|
|
26
|
+
if (json.type === "api_key") {
|
|
27
|
+
return { type: "api_key", apiKey: json.key, apiEmail: json.email };
|
|
28
|
+
}
|
|
29
|
+
// Both "oauth" and "api_token" types have a token field
|
|
30
|
+
if (json.token) {
|
|
31
|
+
return { type: "token", token: json.token };
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
catch {
|
|
35
|
+
/* empty */
|
|
36
|
+
}
|
|
37
|
+
return undefined;
|
|
38
|
+
}
|
|
39
|
+
/**
|
|
40
|
+
* Gets the account ID for Cloudflare API calls.
|
|
41
|
+
*
|
|
42
|
+
* Tries the following sources in order:
|
|
43
|
+
* 1. CLOUDFLARE_ACCOUNT_ID or CF_ACCOUNT_ID environment variable
|
|
44
|
+
* 2. List accounts using the SDK and return the first one
|
|
45
|
+
*
|
|
46
|
+
* @param client The Cloudflare SDK client
|
|
47
|
+
* @returns The account ID if available, undefined otherwise
|
|
48
|
+
*/
|
|
49
|
+
async function getAccountId(client) {
|
|
50
|
+
if (process.env.CLOUDFLARE_ACCOUNT_ID || process.env.CF_ACCOUNT_ID) {
|
|
51
|
+
return process.env.CLOUDFLARE_ACCOUNT_ID || process.env.CF_ACCOUNT_ID;
|
|
52
|
+
}
|
|
53
|
+
try {
|
|
54
|
+
const accountsList = await client.accounts.list();
|
|
55
|
+
const accounts = [];
|
|
56
|
+
for await (const account of accountsList) {
|
|
57
|
+
accounts.push({ id: account.id, name: account.name });
|
|
58
|
+
}
|
|
59
|
+
if (accounts.length === 0) {
|
|
60
|
+
return undefined;
|
|
61
|
+
}
|
|
62
|
+
if (accounts.length === 1 && accounts[0]) {
|
|
63
|
+
return accounts[0].id;
|
|
64
|
+
}
|
|
65
|
+
return await askAccountSelection(accounts);
|
|
66
|
+
}
|
|
67
|
+
catch {
|
|
68
|
+
/* empty */
|
|
69
|
+
}
|
|
70
|
+
return undefined;
|
|
71
|
+
}
|
|
72
|
+
/**
|
|
73
|
+
* Attempts to log in to Cloudflare via wrangler.
|
|
74
|
+
*
|
|
75
|
+
* @param options The build options containing packager and monorepo root
|
|
76
|
+
* @returns true if login was successful, false otherwise
|
|
77
|
+
*/
|
|
78
|
+
function wranglerLogin(options) {
|
|
79
|
+
const result = runWrangler(options, ["login"], { logging: "all" });
|
|
80
|
+
return result.success;
|
|
81
|
+
}
|
|
82
|
+
/**
|
|
83
|
+
* Creates an R2 bucket if it doesn't already exist
|
|
84
|
+
*
|
|
85
|
+
* If no auth credentials are available, falls back to wrangler login for OAuth authentication.
|
|
86
|
+
*
|
|
87
|
+
* @param projectDir The project directory to detect the package manager
|
|
88
|
+
* @param bucketName The name of the R2 bucket to create
|
|
89
|
+
* @returns An object indicating success with the bucket name, or failure
|
|
90
|
+
*/
|
|
91
|
+
export async function ensureR2Bucket(projectDir, bucketName, jurisdiction) {
|
|
92
|
+
try {
|
|
93
|
+
const { packager, root: monorepoRoot } = findPackagerAndRoot(projectDir);
|
|
94
|
+
const options = { packager, monorepoRoot };
|
|
95
|
+
let authCredentials = getAuthCredentials(options);
|
|
96
|
+
// If no credentials available, fall back to wrangler login
|
|
97
|
+
if (!authCredentials) {
|
|
98
|
+
const loginSuccess = wranglerLogin(options);
|
|
99
|
+
if (!loginSuccess) {
|
|
100
|
+
return { success: false, error: "wrangler login failed" };
|
|
101
|
+
}
|
|
102
|
+
// Get credentials after login
|
|
103
|
+
authCredentials = getAuthCredentials(options);
|
|
104
|
+
if (!authCredentials) {
|
|
105
|
+
return { success: false, error: "Could not determine Cloudflare auth credentials after login" };
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
const client = authCredentials.type === "api_key"
|
|
109
|
+
? new Cloudflare({ apiKey: authCredentials.apiKey, apiEmail: authCredentials.apiEmail })
|
|
110
|
+
: new Cloudflare({ apiToken: authCredentials.token });
|
|
111
|
+
const accountId = await getAccountId(client);
|
|
112
|
+
if (!accountId) {
|
|
113
|
+
return { success: false, error: "Could not determine Cloudflare account ID" };
|
|
114
|
+
}
|
|
115
|
+
// Check if bucket already exists
|
|
116
|
+
try {
|
|
117
|
+
await client.r2.buckets.get(bucketName, {
|
|
118
|
+
account_id: accountId,
|
|
119
|
+
// @ts-expect-error Let the API handle validation and potential errors for unsupported jurisdictions
|
|
120
|
+
jurisdiction,
|
|
121
|
+
});
|
|
122
|
+
// Bucket exists
|
|
123
|
+
return { success: true, bucketName };
|
|
124
|
+
}
|
|
125
|
+
catch (error) {
|
|
126
|
+
if (!(error instanceof Cloudflare.NotFoundError)) {
|
|
127
|
+
return {
|
|
128
|
+
success: false,
|
|
129
|
+
error: `Failed to check whether bucket exists: ${getErrorMessage(error)}`,
|
|
130
|
+
};
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
await client.r2.buckets.create({
|
|
134
|
+
account_id: accountId,
|
|
135
|
+
name: bucketName,
|
|
136
|
+
// @ts-expect-error Let the API handle validation and potential errors for unsupported jurisdictions
|
|
137
|
+
jurisdiction,
|
|
138
|
+
});
|
|
139
|
+
return { success: true, bucketName };
|
|
140
|
+
}
|
|
141
|
+
catch (error) {
|
|
142
|
+
return { success: false, error: getErrorMessage(error) };
|
|
143
|
+
}
|
|
144
|
+
}
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Shared types and error codes for the R2 cache worker and its caller.
|
|
3
|
+
*/
|
|
4
|
+
/** The R2 bucket binding is not configured in the worker environment. */
|
|
5
|
+
export declare const ERR_BINDING_NOT_FOUND = "ERR_BINDING_NOT_FOUND";
|
|
6
|
+
/** The request is missing required cache metadata or body. */
|
|
7
|
+
export declare const ERR_INVALID_REQUEST = "ERR_INVALID_REQUEST";
|
|
8
|
+
/** The R2 put operation failed. */
|
|
9
|
+
export declare const ERR_WRITE_FAILED = "ERR_WRITE_FAILED";
|
|
10
|
+
export type ErrorCode = typeof ERR_BINDING_NOT_FOUND | typeof ERR_INVALID_REQUEST | typeof ERR_WRITE_FAILED;
|
|
11
|
+
/** Successful response from the worker. */
|
|
12
|
+
export interface R2SuccessResponse {
|
|
13
|
+
success: true;
|
|
14
|
+
}
|
|
15
|
+
/** Error response from the worker, includes an error message and a typed code. */
|
|
16
|
+
export interface R2ErrorResponse {
|
|
17
|
+
success: false;
|
|
18
|
+
error: string;
|
|
19
|
+
code: ErrorCode;
|
|
20
|
+
}
|
|
21
|
+
/** Union of all possible responses from the worker. */
|
|
22
|
+
export type R2Response = R2SuccessResponse | R2ErrorResponse;
|
|
23
|
+
export interface CachePopulateEnv {
|
|
24
|
+
R2?: R2Bucket;
|
|
25
|
+
}
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Shared types and error codes for the R2 cache worker and its caller.
|
|
3
|
+
*/
|
|
4
|
+
/** The R2 bucket binding is not configured in the worker environment. */
|
|
5
|
+
export const ERR_BINDING_NOT_FOUND = "ERR_BINDING_NOT_FOUND";
|
|
6
|
+
/** The request is missing required cache metadata or body. */
|
|
7
|
+
export const ERR_INVALID_REQUEST = "ERR_INVALID_REQUEST";
|
|
8
|
+
/** The R2 put operation failed. */
|
|
9
|
+
export const ERR_WRITE_FAILED = "ERR_WRITE_FAILED";
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* This worker writes a cache entry to R2.
|
|
3
|
+
*
|
|
4
|
+
* It handles POST requests to /populate with:
|
|
5
|
+
* - `x-opennext-cache-key`: the R2 object key (header, required).
|
|
6
|
+
* - request body: the cache value to store (required).
|
|
7
|
+
*
|
|
8
|
+
* The worker validates the R2 binding and request body, then writes the entry
|
|
9
|
+
* to R2.
|
|
10
|
+
*
|
|
11
|
+
* This is used by the `populate-cache` command to bypass REST API rate limits when populating large caches.
|
|
12
|
+
*/
|
|
13
|
+
import { type CachePopulateEnv } from "./r2-cache-types.js";
|
|
14
|
+
/**
|
|
15
|
+
* Worker fetch handler.
|
|
16
|
+
*
|
|
17
|
+
* Routes `POST /populate` to the cache population logic.
|
|
18
|
+
* Validates the R2 binding, request metadata, and request body, then writes the entry to R2.
|
|
19
|
+
*
|
|
20
|
+
* Response format:
|
|
21
|
+
* - 200 with `{ success: true }` on success.
|
|
22
|
+
* - 4xx/5xx with `{ success: false, error, code }` on failure.
|
|
23
|
+
* - 404 for unmatched routes.
|
|
24
|
+
*/
|
|
25
|
+
declare const _default: {
|
|
26
|
+
fetch(request: Request, env: CachePopulateEnv): Promise<Response>;
|
|
27
|
+
};
|
|
28
|
+
export default _default;
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* This worker writes a cache entry to R2.
|
|
3
|
+
*
|
|
4
|
+
* It handles POST requests to /populate with:
|
|
5
|
+
* - `x-opennext-cache-key`: the R2 object key (header, required).
|
|
6
|
+
* - request body: the cache value to store (required).
|
|
7
|
+
*
|
|
8
|
+
* The worker validates the R2 binding and request body, then writes the entry
|
|
9
|
+
* to R2.
|
|
10
|
+
*
|
|
11
|
+
* This is used by the `populate-cache` command to bypass REST API rate limits when populating large caches.
|
|
12
|
+
*/
|
|
13
|
+
import { ERR_BINDING_NOT_FOUND, ERR_INVALID_REQUEST, ERR_WRITE_FAILED, } from "./r2-cache-types.js";
|
|
14
|
+
/**
|
|
15
|
+
* Worker fetch handler.
|
|
16
|
+
*
|
|
17
|
+
* Routes `POST /populate` to the cache population logic.
|
|
18
|
+
* Validates the R2 binding, request metadata, and request body, then writes the entry to R2.
|
|
19
|
+
*
|
|
20
|
+
* Response format:
|
|
21
|
+
* - 200 with `{ success: true }` on success.
|
|
22
|
+
* - 4xx/5xx with `{ success: false, error, code }` on failure.
|
|
23
|
+
* - 404 for unmatched routes.
|
|
24
|
+
*/
|
|
25
|
+
export default {
|
|
26
|
+
async fetch(request, env) {
|
|
27
|
+
const url = new URL(request.url);
|
|
28
|
+
if (request.method !== "POST" || url.pathname !== "/populate") {
|
|
29
|
+
return new Response("Not found", { status: 404 });
|
|
30
|
+
}
|
|
31
|
+
// Verify the R2 binding exists before processing the request.
|
|
32
|
+
const r2 = env.R2;
|
|
33
|
+
if (!r2) {
|
|
34
|
+
return Response.json({
|
|
35
|
+
success: false,
|
|
36
|
+
error: 'R2 binding "R2" is not configured',
|
|
37
|
+
code: ERR_BINDING_NOT_FOUND,
|
|
38
|
+
}, { status: 500 });
|
|
39
|
+
}
|
|
40
|
+
const key = request.headers.get("x-opennext-cache-key");
|
|
41
|
+
if (!key || request.body === null) {
|
|
42
|
+
return Response.json({
|
|
43
|
+
success: false,
|
|
44
|
+
error: "Request must include x-opennext-cache-key header and a body",
|
|
45
|
+
code: ERR_INVALID_REQUEST,
|
|
46
|
+
}, { status: 400 });
|
|
47
|
+
}
|
|
48
|
+
try {
|
|
49
|
+
await r2.put(key, request.body);
|
|
50
|
+
return Response.json({ success: true }, { status: 200 });
|
|
51
|
+
}
|
|
52
|
+
catch (e) {
|
|
53
|
+
const detail = e instanceof Error ? e.message : String(e);
|
|
54
|
+
return Response.json({
|
|
55
|
+
success: false,
|
|
56
|
+
error: `Failed to write key "${key}": ${detail}`,
|
|
57
|
+
code: ERR_WRITE_FAILED,
|
|
58
|
+
}, { status: 500 });
|
|
59
|
+
}
|
|
60
|
+
},
|
|
61
|
+
};
|