@opennextjs/cloudflare 1.0.0-beta.2 → 1.0.0-beta.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/api/config.js +2 -0
- package/dist/api/overrides/incremental-cache/kv-incremental-cache.d.ts +5 -0
- package/dist/api/overrides/incremental-cache/kv-incremental-cache.js +10 -3
- package/dist/api/overrides/incremental-cache/r2-incremental-cache.d.ts +6 -0
- package/dist/api/overrides/incremental-cache/r2-incremental-cache.js +11 -2
- package/dist/api/overrides/incremental-cache/regional-cache.d.ts +17 -2
- package/dist/api/overrides/incremental-cache/regional-cache.js +52 -16
- package/dist/api/overrides/incremental-cache/static-assets-incremental-cache.js +0 -1
- package/dist/api/overrides/tag-cache/d1-next-tag-cache.d.ts +0 -1
- package/dist/api/overrides/tag-cache/d1-next-tag-cache.js +3 -8
- package/dist/cli/args.d.ts +1 -1
- package/dist/cli/args.js +2 -1
- package/dist/cli/build/build.js +4 -1
- package/dist/cli/build/bundle-server.js +1 -42
- package/dist/cli/build/open-next/compile-env-files.js +1 -1
- package/dist/cli/build/open-next/compile-init.d.ts +5 -0
- package/dist/cli/build/open-next/compile-init.js +27 -0
- package/dist/cli/build/open-next/createServerBundle.js +7 -2
- package/dist/cli/build/utils/ensure-cf-config.js +2 -0
- package/dist/cli/commands/populate-cache.d.ts +7 -0
- package/dist/cli/commands/populate-cache.js +51 -23
- package/dist/cli/commands/populate-cache.spec.js +61 -0
- package/dist/cli/commands/upload.d.ts +5 -0
- package/dist/cli/commands/upload.js +9 -0
- package/dist/cli/index.js +3 -0
- package/dist/cli/templates/init.d.ts +13 -0
- package/dist/cli/templates/init.js +105 -0
- package/dist/cli/templates/worker.js +5 -53
- package/package.json +2 -2
- package/templates/open-next.config.ts +2 -2
- package/templates/wrangler.jsonc +9 -7
- package/dist/cli/build/patches/plugins/next-minimal.d.ts +0 -4
- package/dist/cli/build/patches/plugins/next-minimal.js +0 -86
- package/dist/cli/build/patches/plugins/next-minimal.spec.js +0 -71
- /package/dist/cli/{build/patches/plugins/next-minimal.spec.d.ts → commands/populate-cache.spec.d.ts} +0 -0
package/dist/api/config.js
CHANGED
|
@@ -1,6 +1,11 @@
|
|
|
1
1
|
import type { CacheValue, IncrementalCache, WithLastModified } from "@opennextjs/aws/types/overrides.js";
|
|
2
2
|
export declare const NAME = "cf-kv-incremental-cache";
|
|
3
3
|
export declare const BINDING_NAME = "NEXT_INC_CACHE_KV";
|
|
4
|
+
export type KeyOptions = {
|
|
5
|
+
isFetch?: boolean;
|
|
6
|
+
buildId?: string;
|
|
7
|
+
};
|
|
8
|
+
export declare function computeCacheKey(key: string, options: KeyOptions): string;
|
|
4
9
|
/**
|
|
5
10
|
* Open Next cache based on Cloudflare KV.
|
|
6
11
|
*
|
|
@@ -1,9 +1,15 @@
|
|
|
1
|
+
import { createHash } from "node:crypto";
|
|
1
2
|
import { error } from "@opennextjs/aws/adapters/logger.js";
|
|
2
3
|
import { IgnorableError } from "@opennextjs/aws/utils/error.js";
|
|
3
4
|
import { getCloudflareContext } from "../../cloudflare-context.js";
|
|
4
5
|
import { debugCache, FALLBACK_BUILD_ID } from "../internal.js";
|
|
5
6
|
export const NAME = "cf-kv-incremental-cache";
|
|
6
7
|
export const BINDING_NAME = "NEXT_INC_CACHE_KV";
|
|
8
|
+
export function computeCacheKey(key, options) {
|
|
9
|
+
const { isFetch = false, buildId = FALLBACK_BUILD_ID } = options;
|
|
10
|
+
const hash = createHash("sha256").update(key).digest("hex");
|
|
11
|
+
return `${buildId}/${hash}.${isFetch ? "fetch" : "cache"}`.replace(/\/+/g, "/");
|
|
12
|
+
}
|
|
7
13
|
/**
|
|
8
14
|
* Open Next cache based on Cloudflare KV.
|
|
9
15
|
*
|
|
@@ -28,7 +34,6 @@ class KVIncrementalCache {
|
|
|
28
34
|
// if there is no lastModified property, the file was stored during build-time cache population.
|
|
29
35
|
return {
|
|
30
36
|
value: entry,
|
|
31
|
-
// __BUILD_TIMESTAMP_MS__ is injected by ESBuild.
|
|
32
37
|
lastModified: globalThis.__BUILD_TIMESTAMP_MS__,
|
|
33
38
|
};
|
|
34
39
|
}
|
|
@@ -70,8 +75,10 @@ class KVIncrementalCache {
|
|
|
70
75
|
}
|
|
71
76
|
}
|
|
72
77
|
getKVKey(key, isFetch) {
|
|
73
|
-
|
|
74
|
-
|
|
78
|
+
return computeCacheKey(key, {
|
|
79
|
+
buildId: process.env.NEXT_BUILD_ID,
|
|
80
|
+
isFetch,
|
|
81
|
+
});
|
|
75
82
|
}
|
|
76
83
|
}
|
|
77
84
|
export default new KVIncrementalCache();
|
|
@@ -3,6 +3,12 @@ export declare const NAME = "cf-r2-incremental-cache";
|
|
|
3
3
|
export declare const BINDING_NAME = "NEXT_INC_CACHE_R2_BUCKET";
|
|
4
4
|
export declare const PREFIX_ENV_NAME = "NEXT_INC_CACHE_R2_PREFIX";
|
|
5
5
|
export declare const DEFAULT_PREFIX = "incremental-cache";
|
|
6
|
+
export type KeyOptions = {
|
|
7
|
+
isFetch?: boolean;
|
|
8
|
+
directory?: string;
|
|
9
|
+
buildId?: string;
|
|
10
|
+
};
|
|
11
|
+
export declare function computeCacheKey(key: string, options: KeyOptions): string;
|
|
6
12
|
/**
|
|
7
13
|
* An instance of the Incremental Cache that uses an R2 bucket (`NEXT_INC_CACHE_R2_BUCKET`) as it's
|
|
8
14
|
* underlying data store.
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import { createHash } from "node:crypto";
|
|
1
2
|
import { error } from "@opennextjs/aws/adapters/logger.js";
|
|
2
3
|
import { IgnorableError } from "@opennextjs/aws/utils/error.js";
|
|
3
4
|
import { getCloudflareContext } from "../../cloudflare-context.js";
|
|
@@ -6,6 +7,11 @@ export const NAME = "cf-r2-incremental-cache";
|
|
|
6
7
|
export const BINDING_NAME = "NEXT_INC_CACHE_R2_BUCKET";
|
|
7
8
|
export const PREFIX_ENV_NAME = "NEXT_INC_CACHE_R2_PREFIX";
|
|
8
9
|
export const DEFAULT_PREFIX = "incremental-cache";
|
|
10
|
+
export function computeCacheKey(key, options) {
|
|
11
|
+
const { isFetch = false, directory = DEFAULT_PREFIX, buildId = FALLBACK_BUILD_ID } = options;
|
|
12
|
+
const hash = createHash("sha256").update(key).digest("hex");
|
|
13
|
+
return `${directory}/${buildId}/${hash}.${isFetch ? "fetch" : "cache"}`.replace(/\/+/g, "/");
|
|
14
|
+
}
|
|
9
15
|
/**
|
|
10
16
|
* An instance of the Incremental Cache that uses an R2 bucket (`NEXT_INC_CACHE_R2_BUCKET`) as it's
|
|
11
17
|
* underlying data store.
|
|
@@ -59,8 +65,11 @@ class R2IncrementalCache {
|
|
|
59
65
|
}
|
|
60
66
|
}
|
|
61
67
|
getR2Key(key, isFetch) {
|
|
62
|
-
|
|
63
|
-
|
|
68
|
+
return computeCacheKey(key, {
|
|
69
|
+
directory: getCloudflareContext().env[PREFIX_ENV_NAME],
|
|
70
|
+
buildId: process.env.NEXT_BUILD_ID,
|
|
71
|
+
isFetch,
|
|
72
|
+
});
|
|
64
73
|
}
|
|
65
74
|
}
|
|
66
75
|
export default new R2IncrementalCache();
|
|
@@ -9,6 +9,13 @@ type Options = {
|
|
|
9
9
|
* or an ISR/SSG entry for up to 30 minutes.
|
|
10
10
|
*/
|
|
11
11
|
mode: "short-lived" | "long-lived";
|
|
12
|
+
/**
|
|
13
|
+
* The default TTL of long-lived cache entries.
|
|
14
|
+
* When no revalidate is provided, the default age will be used.
|
|
15
|
+
*
|
|
16
|
+
* @default `THIRTY_MINUTES_IN_SECONDS`
|
|
17
|
+
*/
|
|
18
|
+
defaultLongLivedTtlSec?: number;
|
|
12
19
|
/**
|
|
13
20
|
* Whether the regional cache entry should be updated in the background or not when it experiences
|
|
14
21
|
* a cache hit.
|
|
@@ -17,6 +24,11 @@ type Options = {
|
|
|
17
24
|
*/
|
|
18
25
|
shouldLazilyUpdateOnCacheHit?: boolean;
|
|
19
26
|
};
|
|
27
|
+
interface PutToCacheInput {
|
|
28
|
+
key: string;
|
|
29
|
+
isFetch: boolean | undefined;
|
|
30
|
+
entry: IncrementalCacheEntry<boolean>;
|
|
31
|
+
}
|
|
20
32
|
/**
|
|
21
33
|
* Wrapper adding a regional cache on an `IncrementalCache` implementation
|
|
22
34
|
*/
|
|
@@ -30,8 +42,8 @@ declare class RegionalCache implements IncrementalCache {
|
|
|
30
42
|
set<IsFetch extends boolean = false>(key: string, value: CacheValue<IsFetch>, isFetch?: IsFetch): Promise<void>;
|
|
31
43
|
delete(key: string): Promise<void>;
|
|
32
44
|
protected getCacheInstance(): Promise<Cache>;
|
|
33
|
-
protected
|
|
34
|
-
protected putToCache(key
|
|
45
|
+
protected getCacheUrlKey(key: string, isFetch?: boolean): string;
|
|
46
|
+
protected putToCache({ key, isFetch, entry }: PutToCacheInput): Promise<void>;
|
|
35
47
|
}
|
|
36
48
|
/**
|
|
37
49
|
* A regional cache will wrap an incremental cache and provide faster cache lookups for an entry
|
|
@@ -50,6 +62,9 @@ declare class RegionalCache implements IncrementalCache {
|
|
|
50
62
|
* or an ISR/SSG entry for up to 30 minutes.
|
|
51
63
|
* @param opts.shouldLazilyUpdateOnCacheHit Whether the regional cache entry should be updated in
|
|
52
64
|
* the background or not when it experiences a cache hit.
|
|
65
|
+
* @param opts.defaultLongLivedTtlSec The default age to use for long-lived cache entries.
|
|
66
|
+
* When no revalidate is provided, the default age will be used.
|
|
67
|
+
* @default `THIRTY_MINUTES_IN_SECONDS`
|
|
53
68
|
*
|
|
54
69
|
* @default `false` for the `short-lived` mode, and `true` for the `long-lived` mode.
|
|
55
70
|
*/
|
|
@@ -24,9 +24,9 @@ class RegionalCache {
|
|
|
24
24
|
async get(key, isFetch) {
|
|
25
25
|
try {
|
|
26
26
|
const cache = await this.getCacheInstance();
|
|
27
|
-
const
|
|
27
|
+
const urlKey = this.getCacheUrlKey(key, isFetch);
|
|
28
28
|
// Check for a cached entry as this will be faster than the store response.
|
|
29
|
-
const cachedResponse = await cache.match(
|
|
29
|
+
const cachedResponse = await cache.match(urlKey);
|
|
30
30
|
if (cachedResponse) {
|
|
31
31
|
debugCache("Get - cached response");
|
|
32
32
|
// Re-fetch from the store and update the regional cache in the background
|
|
@@ -34,7 +34,7 @@ class RegionalCache {
|
|
|
34
34
|
getCloudflareContext().ctx.waitUntil(this.store.get(key, isFetch).then(async (rawEntry) => {
|
|
35
35
|
const { value, lastModified } = rawEntry ?? {};
|
|
36
36
|
if (value && typeof lastModified === "number") {
|
|
37
|
-
await this.putToCache(
|
|
37
|
+
await this.putToCache({ key, isFetch, entry: { value, lastModified } });
|
|
38
38
|
}
|
|
39
39
|
}));
|
|
40
40
|
}
|
|
@@ -45,7 +45,7 @@ class RegionalCache {
|
|
|
45
45
|
if (!value || typeof lastModified !== "number")
|
|
46
46
|
return null;
|
|
47
47
|
// Update the locale cache after retrieving from the store.
|
|
48
|
-
getCloudflareContext().ctx.waitUntil(this.putToCache(
|
|
48
|
+
getCloudflareContext().ctx.waitUntil(this.putToCache({ key, isFetch, entry: { value, lastModified } }));
|
|
49
49
|
return { value, lastModified };
|
|
50
50
|
}
|
|
51
51
|
catch (e) {
|
|
@@ -56,11 +56,15 @@ class RegionalCache {
|
|
|
56
56
|
async set(key, value, isFetch) {
|
|
57
57
|
try {
|
|
58
58
|
await this.store.set(key, value, isFetch);
|
|
59
|
-
await this.putToCache(
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
59
|
+
await this.putToCache({
|
|
60
|
+
key,
|
|
61
|
+
isFetch,
|
|
62
|
+
entry: {
|
|
63
|
+
value,
|
|
64
|
+
// Note: `Date.now()` returns the time of the last IO rather than the actual time.
|
|
65
|
+
// See https://developers.cloudflare.com/workers/reference/security-model/
|
|
66
|
+
lastModified: Date.now(),
|
|
67
|
+
},
|
|
64
68
|
});
|
|
65
69
|
}
|
|
66
70
|
catch (e) {
|
|
@@ -71,7 +75,7 @@ class RegionalCache {
|
|
|
71
75
|
try {
|
|
72
76
|
await this.store.delete(key);
|
|
73
77
|
const cache = await this.getCacheInstance();
|
|
74
|
-
await cache.delete(this.
|
|
78
|
+
await cache.delete(this.getCacheUrlKey(key));
|
|
75
79
|
}
|
|
76
80
|
catch (e) {
|
|
77
81
|
error("Failed to delete from regional cache", e);
|
|
@@ -83,16 +87,28 @@ class RegionalCache {
|
|
|
83
87
|
this.localCache = await caches.open("incremental-cache");
|
|
84
88
|
return this.localCache;
|
|
85
89
|
}
|
|
86
|
-
|
|
87
|
-
|
|
90
|
+
getCacheUrlKey(key, isFetch) {
|
|
91
|
+
const buildId = process.env.NEXT_BUILD_ID ?? FALLBACK_BUILD_ID;
|
|
92
|
+
return ("http://cache.local" + `/${buildId}/${key}`.replace(/\/+/g, "/") + `.${isFetch ? "fetch" : "cache"}`);
|
|
88
93
|
}
|
|
89
|
-
async putToCache(key, entry) {
|
|
94
|
+
async putToCache({ key, isFetch, entry }) {
|
|
95
|
+
const urlKey = this.getCacheUrlKey(key, isFetch);
|
|
90
96
|
const cache = await this.getCacheInstance();
|
|
91
97
|
const age = this.opts.mode === "short-lived"
|
|
92
98
|
? ONE_MINUTE_IN_SECONDS
|
|
93
|
-
: entry.value.revalidate || THIRTY_MINUTES_IN_SECONDS;
|
|
94
|
-
|
|
95
|
-
|
|
99
|
+
: entry.value.revalidate || this.opts.defaultLongLivedTtlSec || THIRTY_MINUTES_IN_SECONDS;
|
|
100
|
+
// We default to the entry key if no tags are found.
|
|
101
|
+
// so that we can also revalidate page router based entry this way.
|
|
102
|
+
const tags = getTagsFromCacheEntry(entry) ?? [key];
|
|
103
|
+
await cache.put(urlKey, new Response(JSON.stringify(entry), {
|
|
104
|
+
headers: new Headers({
|
|
105
|
+
"cache-control": `max-age=${age}`,
|
|
106
|
+
...(tags.length > 0
|
|
107
|
+
? {
|
|
108
|
+
"cache-tag": tags.join(","),
|
|
109
|
+
}
|
|
110
|
+
: {}),
|
|
111
|
+
}),
|
|
96
112
|
}));
|
|
97
113
|
}
|
|
98
114
|
}
|
|
@@ -113,9 +129,29 @@ class RegionalCache {
|
|
|
113
129
|
* or an ISR/SSG entry for up to 30 minutes.
|
|
114
130
|
* @param opts.shouldLazilyUpdateOnCacheHit Whether the regional cache entry should be updated in
|
|
115
131
|
* the background or not when it experiences a cache hit.
|
|
132
|
+
* @param opts.defaultLongLivedTtlSec The default age to use for long-lived cache entries.
|
|
133
|
+
* When no revalidate is provided, the default age will be used.
|
|
134
|
+
* @default `THIRTY_MINUTES_IN_SECONDS`
|
|
116
135
|
*
|
|
117
136
|
* @default `false` for the `short-lived` mode, and `true` for the `long-lived` mode.
|
|
118
137
|
*/
|
|
119
138
|
export function withRegionalCache(cache, opts) {
|
|
120
139
|
return new RegionalCache(cache, opts);
|
|
121
140
|
}
|
|
141
|
+
/**
|
|
142
|
+
* Extract the list of tags from a cache entry.
|
|
143
|
+
*/
|
|
144
|
+
function getTagsFromCacheEntry(entry) {
|
|
145
|
+
if ("tags" in entry.value && entry.value.tags) {
|
|
146
|
+
return entry.value.tags;
|
|
147
|
+
}
|
|
148
|
+
if ("meta" in entry.value &&
|
|
149
|
+
entry.value.meta &&
|
|
150
|
+
"headers" in entry.value.meta &&
|
|
151
|
+
entry.value.meta.headers) {
|
|
152
|
+
const rawTags = entry.value.meta.headers["x-next-cache-tags"];
|
|
153
|
+
if (typeof rawTags === "string") {
|
|
154
|
+
return rawTags.split(",");
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
}
|
|
@@ -7,7 +7,6 @@ export declare class D1NextModeTagCache implements NextModeTagCache {
|
|
|
7
7
|
hasBeenRevalidated(tags: string[], lastModified?: number): Promise<boolean>;
|
|
8
8
|
writeTags(tags: string[]): Promise<void>;
|
|
9
9
|
private getConfig;
|
|
10
|
-
protected removeBuildId(key: string): string;
|
|
11
10
|
protected getCacheKey(key: string): string;
|
|
12
11
|
protected getBuildId(): string;
|
|
13
12
|
}
|
|
@@ -1,5 +1,4 @@
|
|
|
1
1
|
import { error } from "@opennextjs/aws/adapters/logger.js";
|
|
2
|
-
import { RecoverableError } from "@opennextjs/aws/utils/error.js";
|
|
3
2
|
import { getCloudflareContext } from "../../cloudflare-context.js";
|
|
4
3
|
import { debugCache, FALLBACK_BUILD_ID } from "../internal.js";
|
|
5
4
|
export const NAME = "d1-next-mode-tag-cache";
|
|
@@ -27,13 +26,12 @@ export class D1NextModeTagCache {
|
|
|
27
26
|
}
|
|
28
27
|
async writeTags(tags) {
|
|
29
28
|
const { isDisabled, db } = this.getConfig();
|
|
30
|
-
|
|
29
|
+
// TODO: Remove `tags.length === 0` when https://github.com/opennextjs/opennextjs-aws/pull/828 is used
|
|
30
|
+
if (isDisabled || tags.length === 0)
|
|
31
31
|
return Promise.resolve();
|
|
32
|
-
|
|
32
|
+
await db.batch(tags.map((tag) => db
|
|
33
33
|
.prepare(`INSERT INTO revalidations (tag, revalidatedAt) VALUES (?, ?)`)
|
|
34
34
|
.bind(this.getCacheKey(tag), Date.now())));
|
|
35
|
-
if (!result)
|
|
36
|
-
throw new RecoverableError(`D1 insert failed for ${tags}`);
|
|
37
35
|
}
|
|
38
36
|
getConfig() {
|
|
39
37
|
const db = getCloudflareContext().env[BINDING_NAME];
|
|
@@ -48,9 +46,6 @@ export class D1NextModeTagCache {
|
|
|
48
46
|
db,
|
|
49
47
|
};
|
|
50
48
|
}
|
|
51
|
-
removeBuildId(key) {
|
|
52
|
-
return key.replace(`${this.getBuildId()}/`, "");
|
|
53
|
-
}
|
|
54
49
|
getCacheKey(key) {
|
|
55
50
|
return `${this.getBuildId()}/${key}`.replaceAll("//", "/");
|
|
56
51
|
}
|
package/dist/cli/args.d.ts
CHANGED
package/dist/cli/args.js
CHANGED
|
@@ -28,6 +28,7 @@ export function getArgs() {
|
|
|
28
28
|
};
|
|
29
29
|
case "preview":
|
|
30
30
|
case "deploy":
|
|
31
|
+
case "upload":
|
|
31
32
|
return {
|
|
32
33
|
command: positionals[0],
|
|
33
34
|
outputDir,
|
|
@@ -44,7 +45,7 @@ export function getArgs() {
|
|
|
44
45
|
environment: getWranglerEnvironmentFlag(passthroughArgs),
|
|
45
46
|
};
|
|
46
47
|
default:
|
|
47
|
-
throw new Error("Error: invalid command, expected 'build' | 'preview' | 'deploy' | 'populateCache'");
|
|
48
|
+
throw new Error("Error: invalid command, expected 'build' | 'preview' | 'deploy' | 'upload' | 'populateCache'");
|
|
48
49
|
}
|
|
49
50
|
}
|
|
50
51
|
function getPassthroughArgs() {
|
package/dist/cli/build/build.js
CHANGED
|
@@ -8,6 +8,7 @@ import logger from "@opennextjs/aws/logger.js";
|
|
|
8
8
|
import { bundleServer } from "./bundle-server.js";
|
|
9
9
|
import { compileCacheAssetsManifestSqlFile } from "./open-next/compile-cache-assets-manifest.js";
|
|
10
10
|
import { compileEnvFiles } from "./open-next/compile-env-files.js";
|
|
11
|
+
import { compileInit } from "./open-next/compile-init.js";
|
|
11
12
|
import { compileDurableObjects } from "./open-next/compileDurableObjects.js";
|
|
12
13
|
import { createServerBundle } from "./open-next/createServerBundle.js";
|
|
13
14
|
import { createWranglerConfigIfNotExistent } from "./utils/index.js";
|
|
@@ -49,9 +50,11 @@ export async function build(options, config, projectOpts) {
|
|
|
49
50
|
compileCache(options);
|
|
50
51
|
// Compile .env files
|
|
51
52
|
compileEnvFiles(options);
|
|
53
|
+
// Compile workerd init
|
|
54
|
+
compileInit(options);
|
|
52
55
|
// Compile middleware
|
|
53
56
|
await createMiddleware(options, { forceOnlyBuildOnce: true });
|
|
54
|
-
createStaticAssets(options);
|
|
57
|
+
createStaticAssets(options, { useBasePath: true });
|
|
55
58
|
if (config.dangerous?.disableIncrementalCache !== true) {
|
|
56
59
|
const { useTagCache, metaFiles } = createCacheAssets(options);
|
|
57
60
|
if (useTagCache) {
|
|
@@ -14,7 +14,6 @@ import { inlineEvalManifest } from "./patches/plugins/eval-manifest.js";
|
|
|
14
14
|
import { inlineFindDir } from "./patches/plugins/find-dir.js";
|
|
15
15
|
import { patchInstrumentation } from "./patches/plugins/instrumentation.js";
|
|
16
16
|
import { inlineLoadManifest } from "./patches/plugins/load-manifest.js";
|
|
17
|
-
import { patchNextMinimal } from "./patches/plugins/next-minimal.js";
|
|
18
17
|
import { handleOptionalDependencies } from "./patches/plugins/optional-deps.js";
|
|
19
18
|
import { patchDepdDeprecations } from "./patches/plugins/patch-depd-deprecations.js";
|
|
20
19
|
import { fixRequire } from "./patches/plugins/require.js";
|
|
@@ -83,7 +82,6 @@ export async function bundleServer(buildOpts) {
|
|
|
83
82
|
inlineLoadManifest(updater, buildOpts),
|
|
84
83
|
inlineBuildId(updater),
|
|
85
84
|
patchDepdDeprecations(updater),
|
|
86
|
-
patchNextMinimal(updater),
|
|
87
85
|
// Apply updater updates, must be the last plugin
|
|
88
86
|
updater.plugin,
|
|
89
87
|
],
|
|
@@ -125,46 +123,6 @@ export async function bundleServer(buildOpts) {
|
|
|
125
123
|
"process.env.__NEXT_EXPERIMENTAL_REACT": `${needsExperimentalReact(nextConfig)}`,
|
|
126
124
|
},
|
|
127
125
|
platform: "node",
|
|
128
|
-
banner: {
|
|
129
|
-
js: `
|
|
130
|
-
// Used by unbundled js files (which don't inherit the __dirname present in the define field)
|
|
131
|
-
// so we also need to set it on the global scope
|
|
132
|
-
// Note: this was hit in the next/dist/compiled/@opentelemetry/api module
|
|
133
|
-
globalThis.__dirname ??= "";
|
|
134
|
-
globalThis.__filename ??= "";
|
|
135
|
-
|
|
136
|
-
// Do not crash on cache not supported
|
|
137
|
-
// https://github.com/cloudflare/workerd/pull/2434
|
|
138
|
-
// compatibility flag "cache_option_enabled" -> does not support "force-cache"
|
|
139
|
-
const curFetch = globalThis.fetch;
|
|
140
|
-
globalThis.fetch = (input, init) => {
|
|
141
|
-
if (init) {
|
|
142
|
-
delete init.cache;
|
|
143
|
-
}
|
|
144
|
-
return curFetch(input, init);
|
|
145
|
-
};
|
|
146
|
-
import __cf_stream from 'node:stream';
|
|
147
|
-
fetch = globalThis.fetch;
|
|
148
|
-
const CustomRequest = class extends globalThis.Request {
|
|
149
|
-
constructor(input, init) {
|
|
150
|
-
if (init) {
|
|
151
|
-
delete init.cache;
|
|
152
|
-
// https://github.com/cloudflare/workerd/issues/2746
|
|
153
|
-
// https://github.com/cloudflare/workerd/issues/3245
|
|
154
|
-
Object.defineProperty(init, "body", {
|
|
155
|
-
value: init.body instanceof __cf_stream.Readable ? ReadableStream.from(init.body) : init.body
|
|
156
|
-
});
|
|
157
|
-
}
|
|
158
|
-
super(input, init);
|
|
159
|
-
}
|
|
160
|
-
};
|
|
161
|
-
globalThis.Request = CustomRequest;
|
|
162
|
-
Request = globalThis.Request;
|
|
163
|
-
// Makes the edge converter returns either a Response or a Request.
|
|
164
|
-
globalThis.__dangerous_ON_edge_converter_returns_request = true;
|
|
165
|
-
globalThis.__BUILD_TIMESTAMP_MS__ = ${Date.now()};
|
|
166
|
-
`,
|
|
167
|
-
},
|
|
168
126
|
});
|
|
169
127
|
fs.writeFileSync(openNextServerBundle + ".meta.json", JSON.stringify(result.metafile, null, 2));
|
|
170
128
|
await updateWorkerBundledCode(openNextServerBundle, buildOpts);
|
|
@@ -185,6 +143,7 @@ export async function updateWorkerBundledCode(workerOutputFile, buildOpts) {
|
|
|
185
143
|
[
|
|
186
144
|
"'require(this.middlewareManifestPath)'",
|
|
187
145
|
(code) => patches.inlineMiddlewareManifestRequire(code, buildOpts),
|
|
146
|
+
{ isOptional: true },
|
|
188
147
|
],
|
|
189
148
|
[
|
|
190
149
|
"`require.resolve` call",
|
|
@@ -5,7 +5,7 @@ import { extractProjectEnvVars } from "../utils/index.js";
|
|
|
5
5
|
* Compiles the values extracted from the project's env files to the output directory for use in the worker.
|
|
6
6
|
*/
|
|
7
7
|
export function compileEnvFiles(buildOpts) {
|
|
8
|
-
const envDir = path.join(buildOpts.outputDir, "
|
|
8
|
+
const envDir = path.join(buildOpts.outputDir, "cloudflare");
|
|
9
9
|
fs.mkdirSync(envDir, { recursive: true });
|
|
10
10
|
["production", "development", "test"].forEach((mode) => fs.appendFileSync(path.join(envDir, `next-env.mjs`), `export const ${mode} = ${JSON.stringify(extractProjectEnvVars(mode, buildOpts))};\n`));
|
|
11
11
|
}
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import path from "node:path";
|
|
2
|
+
import { fileURLToPath } from "node:url";
|
|
3
|
+
import { loadConfig } from "@opennextjs/aws/adapters/config/util.js";
|
|
4
|
+
import { build } from "esbuild";
|
|
5
|
+
/**
|
|
6
|
+
* Compiles the initialization code for the workerd runtime
|
|
7
|
+
*/
|
|
8
|
+
export async function compileInit(options) {
|
|
9
|
+
const currentDir = path.join(path.dirname(fileURLToPath(import.meta.url)));
|
|
10
|
+
const templatesDir = path.join(currentDir, "../../templates");
|
|
11
|
+
const initPath = path.join(templatesDir, "init.js");
|
|
12
|
+
const nextConfig = loadConfig(path.join(options.appBuildOutputPath, ".next"));
|
|
13
|
+
const basePath = nextConfig.basePath ?? "";
|
|
14
|
+
await build({
|
|
15
|
+
entryPoints: [initPath],
|
|
16
|
+
outdir: path.join(options.outputDir, "cloudflare"),
|
|
17
|
+
bundle: false,
|
|
18
|
+
minify: false,
|
|
19
|
+
format: "esm",
|
|
20
|
+
target: "esnext",
|
|
21
|
+
platform: "node",
|
|
22
|
+
define: {
|
|
23
|
+
__BUILD_TIMESTAMP_MS__: JSON.stringify(Date.now()),
|
|
24
|
+
__NEXT_BASE_PATH__: JSON.stringify(basePath),
|
|
25
|
+
},
|
|
26
|
+
});
|
|
27
|
+
}
|
|
@@ -10,8 +10,9 @@ import { copyMiddlewareResources, generateEdgeBundle } from "@opennextjs/aws/bui
|
|
|
10
10
|
import * as buildHelper from "@opennextjs/aws/build/helper.js";
|
|
11
11
|
import { installDependencies } from "@opennextjs/aws/build/installDeps.js";
|
|
12
12
|
import { applyCodePatches } from "@opennextjs/aws/build/patch/codePatcher.js";
|
|
13
|
-
import { patchFetchCacheForISR, patchUnstableCacheForISR, } from "@opennextjs/aws/build/patch/
|
|
14
|
-
import
|
|
13
|
+
import { patchEnvVars, patchFetchCacheForISR, patchFetchCacheSetMissingWaitUntil, patchNextServer, patchUnstableCacheForISR, } from "@opennextjs/aws/build/patch/patches/index.js";
|
|
14
|
+
// TODO: import from patches/index.js when https://github.com/opennextjs/opennextjs-aws/pull/827 is released
|
|
15
|
+
import { patchBackgroundRevalidation } from "@opennextjs/aws/build/patch/patches/patchBackgroundRevalidation.js";
|
|
15
16
|
import logger from "@opennextjs/aws/logger.js";
|
|
16
17
|
import { minifyAll } from "@opennextjs/aws/minimize-js.js";
|
|
17
18
|
import { openNextEdgePlugins } from "@opennextjs/aws/plugins/edge.js";
|
|
@@ -124,6 +125,9 @@ async function generateBundle(name, options, fnOptions, codeCustomization) {
|
|
|
124
125
|
patchFetchCacheSetMissingWaitUntil,
|
|
125
126
|
patchFetchCacheForISR,
|
|
126
127
|
patchUnstableCacheForISR,
|
|
128
|
+
patchNextServer,
|
|
129
|
+
patchEnvVars,
|
|
130
|
+
patchBackgroundRevalidation,
|
|
127
131
|
// Cloudflare specific patches
|
|
128
132
|
patchResRevalidate,
|
|
129
133
|
...additionalCodePatches,
|
|
@@ -147,6 +151,7 @@ async function generateBundle(name, options, fnOptions, codeCustomization) {
|
|
|
147
151
|
...(disableNextPrebundledReact ? ["applyNextjsPrebundledReact"] : []),
|
|
148
152
|
...(disableRouting ? ["withRouting"] : []),
|
|
149
153
|
...(isAfter142 ? ["patchAsyncStorage"] : []),
|
|
154
|
+
...(isAfter141 ? ["appendPrefetch"] : []),
|
|
150
155
|
],
|
|
151
156
|
}),
|
|
152
157
|
openNextReplacementPlugin({
|
|
@@ -17,6 +17,7 @@ export function ensureCloudflareConfig(config) {
|
|
|
17
17
|
config.default?.override?.queue === "direct" ||
|
|
18
18
|
typeof config.default?.override?.queue === "function",
|
|
19
19
|
mwIsMiddlewareIntegrated: config.middleware === undefined,
|
|
20
|
+
hasCryptoExternal: config.edgeExternals?.includes("node:crypto"),
|
|
20
21
|
};
|
|
21
22
|
if (config.default?.override?.queue === "direct") {
|
|
22
23
|
logger.warn("The direct mode queue is not recommended for use in production.");
|
|
@@ -34,6 +35,7 @@ export function ensureCloudflareConfig(config) {
|
|
|
34
35
|
queue: "dummy" | "direct" | function,
|
|
35
36
|
},
|
|
36
37
|
},
|
|
38
|
+
edgeExternals: ["node:crypto"],
|
|
37
39
|
}\n\n`.replace(/^ {8}/gm, ""));
|
|
38
40
|
}
|
|
39
41
|
}
|
|
@@ -1,6 +1,13 @@
|
|
|
1
1
|
import type { BuildOptions } from "@opennextjs/aws/build/helper.js";
|
|
2
2
|
import type { OpenNextConfig } from "@opennextjs/aws/types/open-next.js";
|
|
3
3
|
import type { WranglerTarget } from "../utils/run-wrangler.js";
|
|
4
|
+
export type CacheAsset = {
|
|
5
|
+
isFetch: boolean;
|
|
6
|
+
fullPath: string;
|
|
7
|
+
key: string;
|
|
8
|
+
buildId: string;
|
|
9
|
+
};
|
|
10
|
+
export declare function getCacheAssets(opts: BuildOptions): CacheAsset[];
|
|
4
11
|
export declare function populateCache(options: BuildOptions, config: OpenNextConfig, populateCacheOptions: {
|
|
5
12
|
target: WranglerTarget;
|
|
6
13
|
environment?: string;
|
|
@@ -4,29 +4,49 @@ import logger from "@opennextjs/aws/logger.js";
|
|
|
4
4
|
import { globSync } from "glob";
|
|
5
5
|
import { tqdm } from "ts-tqdm";
|
|
6
6
|
import { unstable_readConfig } from "wrangler";
|
|
7
|
-
import { BINDING_NAME as KV_CACHE_BINDING_NAME, NAME as KV_CACHE_NAME, } from "../../api/overrides/incremental-cache/kv-incremental-cache.js";
|
|
8
|
-
import { BINDING_NAME as R2_CACHE_BINDING_NAME,
|
|
7
|
+
import { BINDING_NAME as KV_CACHE_BINDING_NAME, computeCacheKey as computeKVCacheKey, NAME as KV_CACHE_NAME, } from "../../api/overrides/incremental-cache/kv-incremental-cache.js";
|
|
8
|
+
import { BINDING_NAME as R2_CACHE_BINDING_NAME, computeCacheKey as computeR2CacheKey, NAME as R2_CACHE_NAME, PREFIX_ENV_NAME as R2_CACHE_PREFIX_ENV_NAME, } from "../../api/overrides/incremental-cache/r2-incremental-cache.js";
|
|
9
9
|
import { CACHE_DIR as STATIC_ASSETS_CACHE_DIR, NAME as STATIC_ASSETS_CACHE_NAME, } from "../../api/overrides/incremental-cache/static-assets-incremental-cache.js";
|
|
10
10
|
import { BINDING_NAME as D1_TAG_BINDING_NAME, NAME as D1_TAG_NAME, } from "../../api/overrides/tag-cache/d1-next-tag-cache.js";
|
|
11
11
|
import { runWrangler } from "../utils/run-wrangler.js";
|
|
12
12
|
async function resolveCacheName(value) {
|
|
13
13
|
return typeof value === "function" ? (await value()).name : value;
|
|
14
14
|
}
|
|
15
|
-
function
|
|
16
|
-
|
|
15
|
+
export function getCacheAssets(opts) {
|
|
16
|
+
const allFiles = globSync(path.join(opts.outputDir, "cache/**/*"), {
|
|
17
17
|
withFileTypes: true,
|
|
18
18
|
windowsPathsNoEscape: true,
|
|
19
|
-
})
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
const
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
: relativePath
|
|
28
|
-
|
|
29
|
-
|
|
19
|
+
}).filter((f) => f.isFile());
|
|
20
|
+
const assets = [];
|
|
21
|
+
for (const file of allFiles) {
|
|
22
|
+
const fullPath = file.fullpathPosix();
|
|
23
|
+
const relativePath = path.relative(path.join(opts.outputDir, "cache"), fullPath);
|
|
24
|
+
if (relativePath.startsWith("__fetch")) {
|
|
25
|
+
const [__fetch, buildId, ...keyParts] = relativePath.split("/");
|
|
26
|
+
if (__fetch !== "__fetch" || buildId === undefined || keyParts.length === 0) {
|
|
27
|
+
throw new Error(`Invalid path for a Cache Asset file: ${relativePath}`);
|
|
28
|
+
}
|
|
29
|
+
assets.push({
|
|
30
|
+
isFetch: true,
|
|
31
|
+
fullPath,
|
|
32
|
+
key: `/${keyParts.join("/")}`,
|
|
33
|
+
buildId,
|
|
34
|
+
});
|
|
35
|
+
}
|
|
36
|
+
else {
|
|
37
|
+
const [buildId, ...keyParts] = relativePath.slice(0, -".cache".length).split("/");
|
|
38
|
+
if (!relativePath.endsWith(".cache") || buildId === undefined || keyParts.length === 0) {
|
|
39
|
+
throw new Error(`Invalid path for a Cache Asset file: ${relativePath}`);
|
|
40
|
+
}
|
|
41
|
+
assets.push({
|
|
42
|
+
isFetch: false,
|
|
43
|
+
fullPath,
|
|
44
|
+
key: `/${keyParts.join("/")}`,
|
|
45
|
+
buildId,
|
|
46
|
+
});
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
return assets;
|
|
30
50
|
}
|
|
31
51
|
function populateR2IncrementalCache(options, populateCacheOptions) {
|
|
32
52
|
logger.info("\nPopulating R2 incremental cache...");
|
|
@@ -39,10 +59,14 @@ function populateR2IncrementalCache(options, populateCacheOptions) {
|
|
|
39
59
|
if (!bucket) {
|
|
40
60
|
throw new Error(`R2 binding ${JSON.stringify(R2_CACHE_BINDING_NAME)} should have a 'bucket_name'`);
|
|
41
61
|
}
|
|
42
|
-
const assets =
|
|
43
|
-
for (const {
|
|
44
|
-
const
|
|
45
|
-
|
|
62
|
+
const assets = getCacheAssets(options);
|
|
63
|
+
for (const { fullPath, key, buildId, isFetch } of tqdm(assets)) {
|
|
64
|
+
const cacheKey = computeR2CacheKey(key, {
|
|
65
|
+
directory: process.env[R2_CACHE_PREFIX_ENV_NAME],
|
|
66
|
+
buildId,
|
|
67
|
+
isFetch,
|
|
68
|
+
});
|
|
69
|
+
runWrangler(options, ["r2 object put", JSON.stringify(path.join(bucket, cacheKey)), `--file ${JSON.stringify(fullPath)}`],
|
|
46
70
|
// NOTE: R2 does not support the environment flag and results in the following error:
|
|
47
71
|
// Incorrect type for the 'cacheExpiry' field on 'HttpMetadata': the provided value is not of type 'date'.
|
|
48
72
|
{ target: populateCacheOptions.target, excludeRemoteFlag: true, logging: "error" });
|
|
@@ -56,13 +80,17 @@ function populateKVIncrementalCache(options, populateCacheOptions) {
|
|
|
56
80
|
if (!binding) {
|
|
57
81
|
throw new Error(`No KV binding ${JSON.stringify(KV_CACHE_BINDING_NAME)} found!`);
|
|
58
82
|
}
|
|
59
|
-
const assets =
|
|
60
|
-
for (const {
|
|
83
|
+
const assets = getCacheAssets(options);
|
|
84
|
+
for (const { fullPath, key, buildId, isFetch } of tqdm(assets)) {
|
|
85
|
+
const cacheKey = computeKVCacheKey(key, {
|
|
86
|
+
buildId,
|
|
87
|
+
isFetch,
|
|
88
|
+
});
|
|
61
89
|
runWrangler(options, [
|
|
62
90
|
"kv key put",
|
|
63
|
-
JSON.stringify(
|
|
91
|
+
JSON.stringify(cacheKey),
|
|
64
92
|
`--binding ${JSON.stringify(KV_CACHE_BINDING_NAME)}`,
|
|
65
|
-
`--path ${JSON.stringify(
|
|
93
|
+
`--path ${JSON.stringify(fullPath)}`,
|
|
66
94
|
], { ...populateCacheOptions, logging: "error" });
|
|
67
95
|
}
|
|
68
96
|
logger.info(`Successfully populated cache with ${assets.length} assets`);
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
import { mkdirSync, writeFileSync } from "node:fs";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import mockFs from "mock-fs";
|
|
4
|
+
import { afterAll, beforeAll, describe, expect, test } from "vitest";
|
|
5
|
+
import { getCacheAssets } from "./populate-cache";
|
|
6
|
+
describe("getCacheAssets", () => {
|
|
7
|
+
beforeAll(() => {
|
|
8
|
+
mockFs();
|
|
9
|
+
const fetchBaseDir = "/base/path/cache/__fetch/buildID";
|
|
10
|
+
const cacheDir = "/base/path/cache/buildID/path/to";
|
|
11
|
+
mkdirSync(fetchBaseDir, { recursive: true });
|
|
12
|
+
mkdirSync(cacheDir, { recursive: true });
|
|
13
|
+
for (let i = 0; i < 3; i++) {
|
|
14
|
+
writeFileSync(path.join(fetchBaseDir, `${i}`), "", { encoding: "utf-8" });
|
|
15
|
+
writeFileSync(path.join(cacheDir, `${i}.cache`), "", { encoding: "utf-8" });
|
|
16
|
+
}
|
|
17
|
+
});
|
|
18
|
+
afterAll(() => mockFs.restore());
|
|
19
|
+
test("list cache assets", () => {
|
|
20
|
+
expect(getCacheAssets({ outputDir: "/base/path" })).toMatchInlineSnapshot(`
|
|
21
|
+
[
|
|
22
|
+
{
|
|
23
|
+
"buildId": "buildID",
|
|
24
|
+
"fullPath": "/base/path/cache/buildID/path/to/2.cache",
|
|
25
|
+
"isFetch": false,
|
|
26
|
+
"key": "/path/to/2",
|
|
27
|
+
},
|
|
28
|
+
{
|
|
29
|
+
"buildId": "buildID",
|
|
30
|
+
"fullPath": "/base/path/cache/buildID/path/to/1.cache",
|
|
31
|
+
"isFetch": false,
|
|
32
|
+
"key": "/path/to/1",
|
|
33
|
+
},
|
|
34
|
+
{
|
|
35
|
+
"buildId": "buildID",
|
|
36
|
+
"fullPath": "/base/path/cache/buildID/path/to/0.cache",
|
|
37
|
+
"isFetch": false,
|
|
38
|
+
"key": "/path/to/0",
|
|
39
|
+
},
|
|
40
|
+
{
|
|
41
|
+
"buildId": "buildID",
|
|
42
|
+
"fullPath": "/base/path/cache/__fetch/buildID/2",
|
|
43
|
+
"isFetch": true,
|
|
44
|
+
"key": "/2",
|
|
45
|
+
},
|
|
46
|
+
{
|
|
47
|
+
"buildId": "buildID",
|
|
48
|
+
"fullPath": "/base/path/cache/__fetch/buildID/1",
|
|
49
|
+
"isFetch": true,
|
|
50
|
+
"key": "/1",
|
|
51
|
+
},
|
|
52
|
+
{
|
|
53
|
+
"buildId": "buildID",
|
|
54
|
+
"fullPath": "/base/path/cache/__fetch/buildID/0",
|
|
55
|
+
"isFetch": true,
|
|
56
|
+
"key": "/0",
|
|
57
|
+
},
|
|
58
|
+
]
|
|
59
|
+
`);
|
|
60
|
+
});
|
|
61
|
+
});
|
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
import { BuildOptions } from "@opennextjs/aws/build/helper.js";
|
|
2
|
+
import { OpenNextConfig } from "@opennextjs/aws/types/open-next.js";
|
|
3
|
+
export declare function upload(options: BuildOptions, config: OpenNextConfig, uploadOptions: {
|
|
4
|
+
passthroughArgs: string[];
|
|
5
|
+
}): Promise<void>;
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import { getWranglerEnvironmentFlag, runWrangler } from "../utils/run-wrangler.js";
|
|
2
|
+
import { populateCache } from "./populate-cache.js";
|
|
3
|
+
export async function upload(options, config, uploadOptions) {
|
|
4
|
+
await populateCache(options, config, {
|
|
5
|
+
target: "remote",
|
|
6
|
+
environment: getWranglerEnvironmentFlag(uploadOptions.passthroughArgs),
|
|
7
|
+
});
|
|
8
|
+
runWrangler(options, ["versions upload", ...uploadOptions.passthroughArgs], { logging: "all" });
|
|
9
|
+
}
|
package/dist/cli/index.js
CHANGED
|
@@ -11,6 +11,7 @@ import { createOpenNextConfigIfNotExistent, ensureCloudflareConfig } from "./bui
|
|
|
11
11
|
import { deploy } from "./commands/deploy.js";
|
|
12
12
|
import { populateCache } from "./commands/populate-cache.js";
|
|
13
13
|
import { preview } from "./commands/preview.js";
|
|
14
|
+
import { upload } from "./commands/upload.js";
|
|
14
15
|
const nextAppDir = process.cwd();
|
|
15
16
|
async function runCommand(args) {
|
|
16
17
|
printHeader(`Cloudflare ${args.command}`);
|
|
@@ -33,6 +34,8 @@ async function runCommand(args) {
|
|
|
33
34
|
return preview(options, config, args);
|
|
34
35
|
case "deploy":
|
|
35
36
|
return deploy(options, config, args);
|
|
37
|
+
case "upload":
|
|
38
|
+
return upload(options, config, args);
|
|
36
39
|
case "populateCache":
|
|
37
40
|
return populateCache(options, config, args);
|
|
38
41
|
}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Initialization for the workerd runtime.
|
|
3
|
+
*
|
|
4
|
+
* The file must be imported at the top level the worker.
|
|
5
|
+
*/
|
|
6
|
+
/**
|
|
7
|
+
* Executes the handler with the Cloudflare context.
|
|
8
|
+
*/
|
|
9
|
+
export declare function runWithCloudflareRequestContext(request: Request, env: CloudflareEnv, ctx: ExecutionContext, handler: () => Promise<Response>): Promise<Response>;
|
|
10
|
+
declare global {
|
|
11
|
+
var __BUILD_TIMESTAMP_MS__: number;
|
|
12
|
+
var __NEXT_BASE_PATH__: string;
|
|
13
|
+
}
|
|
@@ -0,0 +1,105 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Initialization for the workerd runtime.
|
|
3
|
+
*
|
|
4
|
+
* The file must be imported at the top level the worker.
|
|
5
|
+
*/
|
|
6
|
+
import { AsyncLocalStorage } from "node:async_hooks";
|
|
7
|
+
import process from "node:process";
|
|
8
|
+
import stream from "node:stream";
|
|
9
|
+
// @ts-expect-error: resolved by wrangler build
|
|
10
|
+
import * as nextEnvVars from "./next-env.mjs";
|
|
11
|
+
const cloudflareContextALS = new AsyncLocalStorage();
|
|
12
|
+
// Note: this symbol needs to be kept in sync with `src/api/get-cloudflare-context.ts`
|
|
13
|
+
Object.defineProperty(globalThis, Symbol.for("__cloudflare-context__"), {
|
|
14
|
+
get() {
|
|
15
|
+
return cloudflareContextALS.getStore();
|
|
16
|
+
},
|
|
17
|
+
});
|
|
18
|
+
/**
|
|
19
|
+
* Executes the handler with the Cloudflare context.
|
|
20
|
+
*/
|
|
21
|
+
export async function runWithCloudflareRequestContext(request, env, ctx, handler) {
|
|
22
|
+
init(request, env);
|
|
23
|
+
return cloudflareContextALS.run({ env, ctx, cf: request.cf }, handler);
|
|
24
|
+
}
|
|
25
|
+
let initialized = false;
|
|
26
|
+
/**
|
|
27
|
+
* Initializes the runtime on the first call,
|
|
28
|
+
* no-op on subsequent invocations.
|
|
29
|
+
*/
|
|
30
|
+
function init(request, env) {
|
|
31
|
+
if (initialized) {
|
|
32
|
+
return;
|
|
33
|
+
}
|
|
34
|
+
initialized = true;
|
|
35
|
+
const url = new URL(request.url);
|
|
36
|
+
initRuntime();
|
|
37
|
+
populateProcessEnv(url, env);
|
|
38
|
+
}
|
|
39
|
+
function initRuntime() {
|
|
40
|
+
// Some packages rely on `process.version` and `process.versions.node` (i.e. Jose@4)
|
|
41
|
+
// TODO: Remove when https://github.com/unjs/unenv/pull/493 is merged
|
|
42
|
+
Object.assign(process, { version: process.version || "v22.14.0" });
|
|
43
|
+
// @ts-expect-error Node type does not match workerd
|
|
44
|
+
Object.assign(process.versions, { node: "22.14.0", ...process.versions });
|
|
45
|
+
globalThis.__dirname ??= "";
|
|
46
|
+
globalThis.__filename ??= "";
|
|
47
|
+
// Do not crash on cache not supported
|
|
48
|
+
// https://github.com/cloudflare/workerd/pull/2434
|
|
49
|
+
// compatibility flag "cache_option_enabled" -> does not support "force-cache"
|
|
50
|
+
const __original_fetch = globalThis.fetch;
|
|
51
|
+
globalThis.fetch = (input, init) => {
|
|
52
|
+
if (init) {
|
|
53
|
+
delete init.cache;
|
|
54
|
+
}
|
|
55
|
+
return __original_fetch(input, init);
|
|
56
|
+
};
|
|
57
|
+
const CustomRequest = class extends globalThis.Request {
|
|
58
|
+
constructor(input, init) {
|
|
59
|
+
if (init) {
|
|
60
|
+
delete init.cache;
|
|
61
|
+
// https://github.com/cloudflare/workerd/issues/2746
|
|
62
|
+
// https://github.com/cloudflare/workerd/issues/3245
|
|
63
|
+
Object.defineProperty(init, "body", {
|
|
64
|
+
// @ts-ignore
|
|
65
|
+
value: init.body instanceof stream.Readable ? ReadableStream.from(init.body) : init.body,
|
|
66
|
+
});
|
|
67
|
+
}
|
|
68
|
+
super(input, init);
|
|
69
|
+
}
|
|
70
|
+
};
|
|
71
|
+
Object.assign(globalThis, {
|
|
72
|
+
Request: CustomRequest,
|
|
73
|
+
__BUILD_TIMESTAMP_MS__: __BUILD_TIMESTAMP_MS__,
|
|
74
|
+
__NEXT_BASE_PATH__: __NEXT_BASE_PATH__,
|
|
75
|
+
});
|
|
76
|
+
}
|
|
77
|
+
/**
|
|
78
|
+
* Populate process.env with:
|
|
79
|
+
* - the environment variables and secrets from the cloudflare platform
|
|
80
|
+
* - the variables from Next .env* files
|
|
81
|
+
* - the origin resolver information
|
|
82
|
+
*/
|
|
83
|
+
function populateProcessEnv(url, env) {
|
|
84
|
+
for (const [key, value] of Object.entries(env)) {
|
|
85
|
+
if (typeof value === "string") {
|
|
86
|
+
process.env[key] = value;
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
const mode = env.NEXTJS_ENV ?? "production";
|
|
90
|
+
if (nextEnvVars[mode]) {
|
|
91
|
+
for (const key in nextEnvVars[mode]) {
|
|
92
|
+
process.env[key] ??= nextEnvVars[mode][key];
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
// Set the default Origin for the origin resolver.
|
|
96
|
+
// This is only needed for an external middleware bundle
|
|
97
|
+
process.env.OPEN_NEXT_ORIGIN = JSON.stringify({
|
|
98
|
+
default: {
|
|
99
|
+
host: url.hostname,
|
|
100
|
+
protocol: url.protocol.slice(0, -1),
|
|
101
|
+
port: url.port,
|
|
102
|
+
},
|
|
103
|
+
});
|
|
104
|
+
}
|
|
105
|
+
/* eslint-enable no-var */
|
|
@@ -1,25 +1,13 @@
|
|
|
1
|
-
|
|
2
|
-
import
|
|
3
|
-
// @ts-expect-error: resolved by wrangler build
|
|
4
|
-
import * as nextEnvVars from "./env/next-env.mjs";
|
|
5
|
-
const cloudflareContextALS = new AsyncLocalStorage();
|
|
6
|
-
// Note: this symbol needs to be kept in sync with `src/api/get-cloudflare-context.ts`
|
|
7
|
-
Object.defineProperty(globalThis, Symbol.for("__cloudflare-context__"), {
|
|
8
|
-
get() {
|
|
9
|
-
return cloudflareContextALS.getStore();
|
|
10
|
-
},
|
|
11
|
-
});
|
|
1
|
+
//@ts-expect-error: Will be resolved by wrangler build
|
|
2
|
+
import { runWithCloudflareRequestContext } from "./cloudflare/init.js";
|
|
12
3
|
//@ts-expect-error: Will be resolved by wrangler build
|
|
13
4
|
export { DOQueueHandler } from "./.build/durable-objects/queue.js";
|
|
14
5
|
//@ts-expect-error: Will be resolved by wrangler build
|
|
15
6
|
export { DOShardedTagCache } from "./.build/durable-objects/sharded-tag-cache.js";
|
|
16
|
-
// Populate process.env on the first request
|
|
17
|
-
let processEnvPopulated = false;
|
|
18
7
|
export default {
|
|
19
8
|
async fetch(request, env, ctx) {
|
|
20
|
-
return
|
|
9
|
+
return runWithCloudflareRequestContext(request, env, ctx, async () => {
|
|
21
10
|
const url = new URL(request.url);
|
|
22
|
-
populateProcessEnv(url, env);
|
|
23
11
|
// Serve images in development.
|
|
24
12
|
// Note: "/cdn-cgi/image/..." requests do not reach production workers.
|
|
25
13
|
if (url.pathname.startsWith("/cdn-cgi/image/")) {
|
|
@@ -33,10 +21,10 @@ export default {
|
|
|
33
21
|
: env.ASSETS?.fetch(new URL(`/${imageUrl}`, url));
|
|
34
22
|
}
|
|
35
23
|
// Fallback for the Next default image loader.
|
|
36
|
-
if (url.pathname ===
|
|
24
|
+
if (url.pathname === `${globalThis.__NEXT_BASE_PATH__}/_next/image`) {
|
|
37
25
|
const imageUrl = url.searchParams.get("url") ?? "";
|
|
38
26
|
return imageUrl.startsWith("/")
|
|
39
|
-
? env.ASSETS?.fetch(
|
|
27
|
+
? env.ASSETS?.fetch(`http://assets.local${imageUrl}`)
|
|
40
28
|
: fetch(imageUrl, { cf: { cacheEverything: true } });
|
|
41
29
|
}
|
|
42
30
|
// @ts-expect-error: resolved by wrangler build
|
|
@@ -45,39 +33,3 @@ export default {
|
|
|
45
33
|
});
|
|
46
34
|
},
|
|
47
35
|
};
|
|
48
|
-
/**
|
|
49
|
-
* Populate process.env with:
|
|
50
|
-
* - the environment variables and secrets from the cloudflare platform
|
|
51
|
-
* - the variables from Next .env* files
|
|
52
|
-
* - the origin resolver information
|
|
53
|
-
*/
|
|
54
|
-
function populateProcessEnv(url, env) {
|
|
55
|
-
if (processEnvPopulated) {
|
|
56
|
-
return;
|
|
57
|
-
}
|
|
58
|
-
// Some packages rely on `process.version` and `process.versions.node` (i.e. Jose@4)
|
|
59
|
-
// TODO: Remove when https://github.com/unjs/unenv/pull/493 is merged
|
|
60
|
-
Object.assign(process, { version: process.version || "v22.14.0" });
|
|
61
|
-
// @ts-expect-error Node type does not match workerd
|
|
62
|
-
Object.assign(process.versions, { node: "22.14.0", ...process.versions });
|
|
63
|
-
processEnvPopulated = true;
|
|
64
|
-
for (const [key, value] of Object.entries(env)) {
|
|
65
|
-
if (typeof value === "string") {
|
|
66
|
-
process.env[key] = value;
|
|
67
|
-
}
|
|
68
|
-
}
|
|
69
|
-
const mode = env.NEXTJS_ENV ?? "production";
|
|
70
|
-
if (nextEnvVars[mode]) {
|
|
71
|
-
for (const key in nextEnvVars[mode]) {
|
|
72
|
-
process.env[key] ??= nextEnvVars[mode][key];
|
|
73
|
-
}
|
|
74
|
-
}
|
|
75
|
-
// Set the default Origin for the origin resolver.
|
|
76
|
-
process.env.OPEN_NEXT_ORIGIN = JSON.stringify({
|
|
77
|
-
default: {
|
|
78
|
-
host: url.hostname,
|
|
79
|
-
protocol: url.protocol.slice(0, -1),
|
|
80
|
-
port: url.port,
|
|
81
|
-
},
|
|
82
|
-
});
|
|
83
|
-
}
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@opennextjs/cloudflare",
|
|
3
3
|
"description": "Cloudflare builder for next apps",
|
|
4
|
-
"version": "1.0.0-beta.
|
|
4
|
+
"version": "1.0.0-beta.3",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"bin": {
|
|
7
7
|
"opennextjs-cloudflare": "dist/cli/index.js"
|
|
@@ -43,7 +43,7 @@
|
|
|
43
43
|
"homepage": "https://github.com/opennextjs/opennextjs-cloudflare",
|
|
44
44
|
"dependencies": {
|
|
45
45
|
"@dotenvx/dotenvx": "1.31.0",
|
|
46
|
-
"@opennextjs/aws": "3.5.
|
|
46
|
+
"@opennextjs/aws": "3.5.7",
|
|
47
47
|
"enquirer": "^2.4.1",
|
|
48
48
|
"glob": "^11.0.0",
|
|
49
49
|
"ts-tqdm": "^0.8.6"
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
// default open-next.config.ts file created by @opennextjs/cloudflare
|
|
2
2
|
import { defineCloudflareConfig } from "@opennextjs/cloudflare/config";
|
|
3
|
-
import
|
|
3
|
+
import r2IncrementalCache from "@opennextjs/cloudflare/overrides/incremental-cache/r2-incremental-cache";
|
|
4
4
|
|
|
5
5
|
export default defineCloudflareConfig({
|
|
6
|
-
incrementalCache:
|
|
6
|
+
incrementalCache: r2IncrementalCache,
|
|
7
7
|
});
|
package/templates/wrangler.jsonc
CHANGED
|
@@ -8,12 +8,14 @@
|
|
|
8
8
|
"directory": ".open-next/assets",
|
|
9
9
|
"binding": "ASSETS"
|
|
10
10
|
},
|
|
11
|
-
"
|
|
12
|
-
//
|
|
13
|
-
//
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
11
|
+
"r2_buckets": [
|
|
12
|
+
// Use R2 incremental cache
|
|
13
|
+
// See https://opennext.js.org/cloudflare/caching
|
|
14
|
+
{
|
|
15
|
+
"binding": "NEXT_INC_CACHE_R2_BUCKET",
|
|
16
|
+
// Create a bucket before deploying
|
|
17
|
+
// See https://developers.cloudflare.com/workers/wrangler/commands/#r2-bucket-create
|
|
18
|
+
"bucket_name": "<BUCKET_NAME>"
|
|
19
|
+
}
|
|
18
20
|
]
|
|
19
21
|
}
|
|
@@ -1,4 +0,0 @@
|
|
|
1
|
-
import { ContentUpdater, type Plugin } from "@opennextjs/aws/plugins/content-updater.js";
|
|
2
|
-
export declare const abortControllerRule = "\nrule:\n all:\n - kind: lexical_declaration\n pattern: let $VAR = new AbortController\n - precedes:\n kind: function_declaration\n stopBy: end\n has:\n kind: statement_block\n has:\n kind: try_statement\n has:\n kind: catch_clause\n has:\n kind: statement_block\n has:\n kind: return_statement\n all:\n - has:\n stopBy: end\n kind: member_expression\n pattern: $VAR.signal.aborted\n - has:\n stopBy: end\n kind: call_expression\n regex: console.error\\(\"Failed to fetch RSC payload for\n\nfix:\n 'let $VAR = {signal:{aborted: false}};'\n";
|
|
3
|
-
export declare const nextMinimalRule = "\nrule:\n kind: member_expression\n pattern: process.env.NEXT_MINIMAL\n any:\n - inside:\n kind: parenthesized_expression\n stopBy: end\n inside:\n kind: if_statement\n any:\n - inside:\n kind: statement_block\n inside:\n kind: method_definition\n any:\n - has: {kind: property_identifier, field: name, regex: runEdgeFunction}\n - has: {kind: property_identifier, field: name, regex: runMiddleware}\n - has: {kind: property_identifier, field: name, regex: imageOptimizer}\n - has:\n kind: statement_block\n has:\n kind: expression_statement\n pattern: res.statusCode = 400;\nfix:\n 'true'\n";
|
|
4
|
-
export declare function patchNextMinimal(updater: ContentUpdater): Plugin;
|
|
@@ -1,86 +0,0 @@
|
|
|
1
|
-
import { patchCode } from "@opennextjs/aws/build/patch/astCodePatcher.js";
|
|
2
|
-
// Remove an instantiation of `AbortController` from the runtime.
|
|
3
|
-
//
|
|
4
|
-
// Solves https://github.com/cloudflare/workerd/issues/3657:
|
|
5
|
-
// - The `AbortController` is meant for the client side, but ends in the server code somehow.
|
|
6
|
-
// That's why we can get ride of it. See https://github.com/vercel/next.js/pull/73975/files.
|
|
7
|
-
// - Top level instantiation of `AbortController` are not supported by workerd as of March, 2025.
|
|
8
|
-
// See https://github.com/cloudflare/workerd/issues/3657
|
|
9
|
-
// - As Next code is not more executed at top level, we do not need to apply this patch
|
|
10
|
-
// See https://github.com/opennextjs/opennextjs-cloudflare/pull/497
|
|
11
|
-
//
|
|
12
|
-
// We try to be as specific as possible to avoid patching the wrong thing here
|
|
13
|
-
export const abortControllerRule = `
|
|
14
|
-
rule:
|
|
15
|
-
all:
|
|
16
|
-
- kind: lexical_declaration
|
|
17
|
-
pattern: let $VAR = new AbortController
|
|
18
|
-
- precedes:
|
|
19
|
-
kind: function_declaration
|
|
20
|
-
stopBy: end
|
|
21
|
-
has:
|
|
22
|
-
kind: statement_block
|
|
23
|
-
has:
|
|
24
|
-
kind: try_statement
|
|
25
|
-
has:
|
|
26
|
-
kind: catch_clause
|
|
27
|
-
has:
|
|
28
|
-
kind: statement_block
|
|
29
|
-
has:
|
|
30
|
-
kind: return_statement
|
|
31
|
-
all:
|
|
32
|
-
- has:
|
|
33
|
-
stopBy: end
|
|
34
|
-
kind: member_expression
|
|
35
|
-
pattern: $VAR.signal.aborted
|
|
36
|
-
- has:
|
|
37
|
-
stopBy: end
|
|
38
|
-
kind: call_expression
|
|
39
|
-
regex: console.error\\("Failed to fetch RSC payload for
|
|
40
|
-
|
|
41
|
-
fix:
|
|
42
|
-
'let $VAR = {signal:{aborted: false}};'
|
|
43
|
-
`;
|
|
44
|
-
// This rule is used instead of defining `process.env.NEXT_MINIMAL` in the `esbuild config.
|
|
45
|
-
// Do we want to entirely replace these functions to reduce the bundle size?
|
|
46
|
-
// In next `renderHTML` is used as a fallback in case of errors, but in minimal mode it just throws the error and the responsibility of handling it is on the infra.
|
|
47
|
-
export const nextMinimalRule = `
|
|
48
|
-
rule:
|
|
49
|
-
kind: member_expression
|
|
50
|
-
pattern: process.env.NEXT_MINIMAL
|
|
51
|
-
any:
|
|
52
|
-
- inside:
|
|
53
|
-
kind: parenthesized_expression
|
|
54
|
-
stopBy: end
|
|
55
|
-
inside:
|
|
56
|
-
kind: if_statement
|
|
57
|
-
any:
|
|
58
|
-
- inside:
|
|
59
|
-
kind: statement_block
|
|
60
|
-
inside:
|
|
61
|
-
kind: method_definition
|
|
62
|
-
any:
|
|
63
|
-
- has: {kind: property_identifier, field: name, regex: runEdgeFunction}
|
|
64
|
-
- has: {kind: property_identifier, field: name, regex: runMiddleware}
|
|
65
|
-
- has: {kind: property_identifier, field: name, regex: imageOptimizer}
|
|
66
|
-
- has:
|
|
67
|
-
kind: statement_block
|
|
68
|
-
has:
|
|
69
|
-
kind: expression_statement
|
|
70
|
-
pattern: res.statusCode = 400;
|
|
71
|
-
fix:
|
|
72
|
-
'true'
|
|
73
|
-
`;
|
|
74
|
-
export function patchNextMinimal(updater) {
|
|
75
|
-
return updater.updateContent("patch-next-minimal", [
|
|
76
|
-
{
|
|
77
|
-
field: {
|
|
78
|
-
filter: /next-server\.(js)$/,
|
|
79
|
-
contentFilter: /.*/,
|
|
80
|
-
callback: ({ contents }) => {
|
|
81
|
-
return patchCode(contents, nextMinimalRule);
|
|
82
|
-
},
|
|
83
|
-
},
|
|
84
|
-
},
|
|
85
|
-
]);
|
|
86
|
-
}
|
|
@@ -1,71 +0,0 @@
|
|
|
1
|
-
import { patchCode } from "@opennextjs/aws/build/patch/astCodePatcher.js";
|
|
2
|
-
import { describe, expect, test } from "vitest";
|
|
3
|
-
import { abortControllerRule } from "./next-minimal";
|
|
4
|
-
const appPageRuntimeProdJs = `let p = new AbortController;
|
|
5
|
-
async function h(e3, t3) {
|
|
6
|
-
let { flightRouterState: r3, nextUrl: a2, prefetchKind: i2 } = t3, u2 = { [n2.hY]: "1", [n2.B]: encodeURIComponent(JSON.stringify(r3)) };
|
|
7
|
-
i2 === o.ob.AUTO && (u2[n2._V] = "1"), a2 && (u2[n2.kO] = a2);
|
|
8
|
-
try {
|
|
9
|
-
var c2;
|
|
10
|
-
let t4 = i2 ? i2 === o.ob.TEMPORARY ? "high" : "low" : "auto";
|
|
11
|
-
"export" === process.env.__NEXT_CONFIG_OUTPUT && ((e3 = new URL(e3)).pathname.endsWith("/") ? e3.pathname += "index.txt" : e3.pathname += ".txt");
|
|
12
|
-
let r4 = await m(e3, u2, t4, p.signal), a3 = d(r4.url), h2 = r4.redirected ? a3 : void 0, g = r4.headers.get("content-type") || "", v = !!(null == (c2 = r4.headers.get("vary")) ? void 0 : c2.includes(n2.kO)), b = !!r4.headers.get(n2.jc), S = r4.headers.get(n2.UK), _ = null !== S ? parseInt(S, 10) : -1, w = g.startsWith(n2.al);
|
|
13
|
-
if ("export" !== process.env.__NEXT_CONFIG_OUTPUT || w || (w = g.startsWith("text/plain")), !w || !r4.ok || !r4.body)
|
|
14
|
-
return e3.hash && (a3.hash = e3.hash), f(a3.toString());
|
|
15
|
-
let k = b ? function(e4) {
|
|
16
|
-
let t5 = e4.getReader();
|
|
17
|
-
return new ReadableStream({ async pull(e5) {
|
|
18
|
-
for (; ; ) {
|
|
19
|
-
let { done: r5, value: n3 } = await t5.read();
|
|
20
|
-
if (!r5) {
|
|
21
|
-
e5.enqueue(n3);
|
|
22
|
-
continue;
|
|
23
|
-
}
|
|
24
|
-
return;
|
|
25
|
-
}
|
|
26
|
-
} });
|
|
27
|
-
}(r4.body) : r4.body, E = await y(k);
|
|
28
|
-
if ((0, l.X)() !== E.b)
|
|
29
|
-
return f(r4.url);
|
|
30
|
-
return { flightData: (0, s.aj)(E.f), canonicalUrl: h2, couldBeIntercepted: v, prerendered: E.S, postponed: b, staleTime: _ };
|
|
31
|
-
} catch (t4) {
|
|
32
|
-
return p.signal.aborted || console.error("Failed to fetch RSC payload for " + e3 + ". Falling back to browser navigation.", t4), { flightData: e3.toString(), canonicalUrl: void 0, couldBeIntercepted: false, prerendered: false, postponed: false, staleTime: -1 };
|
|
33
|
-
}
|
|
34
|
-
}
|
|
35
|
-
`;
|
|
36
|
-
describe("Abort controller", () => {
|
|
37
|
-
test("minimal", () => {
|
|
38
|
-
expect(patchCode(appPageRuntimeProdJs, abortControllerRule)).toBe(`let p = {signal:{aborted: false}};
|
|
39
|
-
async function h(e3, t3) {
|
|
40
|
-
let { flightRouterState: r3, nextUrl: a2, prefetchKind: i2 } = t3, u2 = { [n2.hY]: "1", [n2.B]: encodeURIComponent(JSON.stringify(r3)) };
|
|
41
|
-
i2 === o.ob.AUTO && (u2[n2._V] = "1"), a2 && (u2[n2.kO] = a2);
|
|
42
|
-
try {
|
|
43
|
-
var c2;
|
|
44
|
-
let t4 = i2 ? i2 === o.ob.TEMPORARY ? "high" : "low" : "auto";
|
|
45
|
-
"export" === process.env.__NEXT_CONFIG_OUTPUT && ((e3 = new URL(e3)).pathname.endsWith("/") ? e3.pathname += "index.txt" : e3.pathname += ".txt");
|
|
46
|
-
let r4 = await m(e3, u2, t4, p.signal), a3 = d(r4.url), h2 = r4.redirected ? a3 : void 0, g = r4.headers.get("content-type") || "", v = !!(null == (c2 = r4.headers.get("vary")) ? void 0 : c2.includes(n2.kO)), b = !!r4.headers.get(n2.jc), S = r4.headers.get(n2.UK), _ = null !== S ? parseInt(S, 10) : -1, w = g.startsWith(n2.al);
|
|
47
|
-
if ("export" !== process.env.__NEXT_CONFIG_OUTPUT || w || (w = g.startsWith("text/plain")), !w || !r4.ok || !r4.body)
|
|
48
|
-
return e3.hash && (a3.hash = e3.hash), f(a3.toString());
|
|
49
|
-
let k = b ? function(e4) {
|
|
50
|
-
let t5 = e4.getReader();
|
|
51
|
-
return new ReadableStream({ async pull(e5) {
|
|
52
|
-
for (; ; ) {
|
|
53
|
-
let { done: r5, value: n3 } = await t5.read();
|
|
54
|
-
if (!r5) {
|
|
55
|
-
e5.enqueue(n3);
|
|
56
|
-
continue;
|
|
57
|
-
}
|
|
58
|
-
return;
|
|
59
|
-
}
|
|
60
|
-
} });
|
|
61
|
-
}(r4.body) : r4.body, E = await y(k);
|
|
62
|
-
if ((0, l.X)() !== E.b)
|
|
63
|
-
return f(r4.url);
|
|
64
|
-
return { flightData: (0, s.aj)(E.f), canonicalUrl: h2, couldBeIntercepted: v, prerendered: E.S, postponed: b, staleTime: _ };
|
|
65
|
-
} catch (t4) {
|
|
66
|
-
return p.signal.aborted || console.error("Failed to fetch RSC payload for " + e3 + ". Falling back to browser navigation.", t4), { flightData: e3.toString(), canonicalUrl: void 0, couldBeIntercepted: false, prerendered: false, postponed: false, staleTime: -1 };
|
|
67
|
-
}
|
|
68
|
-
}
|
|
69
|
-
`);
|
|
70
|
-
});
|
|
71
|
-
});
|
/package/dist/cli/{build/patches/plugins/next-minimal.spec.d.ts → commands/populate-cache.spec.d.ts}
RENAMED
|
File without changes
|