@opennextjs/cloudflare 0.4.8 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,18 @@
1
+ import type { Queue, QueueMessage } from "@opennextjs/aws/types/overrides.js";
2
+ export declare const DEFAULT_REVALIDATION_TIMEOUT_MS = 10000;
3
+ /**
4
+ * The Memory Queue offers basic ISR revalidation by directly requesting a revalidation of a route.
5
+ *
6
+ * It offers basic support for in-memory de-duping per isolate.
7
+ */
8
+ export declare class MemoryQueue implements Queue {
9
+ private opts;
10
+ readonly name = "memory-queue";
11
+ revalidatedPaths: Map<string, NodeJS.Timeout>;
12
+ constructor(opts?: {
13
+ revalidationTimeoutMs: number;
14
+ });
15
+ send({ MessageBody: { host, url }, MessageGroupId }: QueueMessage): Promise<void>;
16
+ }
17
+ declare const _default: MemoryQueue;
18
+ export default _default;
@@ -0,0 +1,43 @@
1
+ import logger from "@opennextjs/aws/logger.js";
2
+ export const DEFAULT_REVALIDATION_TIMEOUT_MS = 10_000;
3
+ /**
4
+ * The Memory Queue offers basic ISR revalidation by directly requesting a revalidation of a route.
5
+ *
6
+ * It offers basic support for in-memory de-duping per isolate.
7
+ */
8
+ export class MemoryQueue {
9
+ opts;
10
+ name = "memory-queue";
11
+ revalidatedPaths = new Map();
12
+ constructor(opts = { revalidationTimeoutMs: DEFAULT_REVALIDATION_TIMEOUT_MS }) {
13
+ this.opts = opts;
14
+ }
15
+ async send({ MessageBody: { host, url }, MessageGroupId }) {
16
+ if (this.revalidatedPaths.has(MessageGroupId))
17
+ return;
18
+ this.revalidatedPaths.set(MessageGroupId,
19
+ // force remove to allow new revalidations incase something went wrong
20
+ setTimeout(() => this.revalidatedPaths.delete(MessageGroupId), this.opts.revalidationTimeoutMs));
21
+ try {
22
+ const protocol = host.includes("localhost") ? "http" : "https";
23
+ // TODO: Drop the import - https://github.com/opennextjs/opennextjs-cloudflare/issues/361
24
+ // @ts-ignore
25
+ const manifest = await import("./.next/prerender-manifest.json");
26
+ await globalThis.internalFetch(`${protocol}://${host}${url}`, {
27
+ method: "HEAD",
28
+ headers: {
29
+ "x-prerender-revalidate": manifest.preview.previewModeId,
30
+ "x-isr": "1",
31
+ },
32
+ });
33
+ }
34
+ catch (e) {
35
+ logger.error(e);
36
+ }
37
+ finally {
38
+ clearTimeout(this.revalidatedPaths.get(MessageGroupId));
39
+ this.revalidatedPaths.delete(MessageGroupId);
40
+ }
41
+ }
42
+ }
43
+ export default new MemoryQueue();
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,64 @@
1
+ import { generateMessageGroupId } from "@opennextjs/aws/core/routing/queue.js";
2
+ import { afterEach, beforeAll, describe, expect, it, vi } from "vitest";
3
+ import cache, { DEFAULT_REVALIDATION_TIMEOUT_MS } from "./memory-queue";
4
+ vi.mock("./.next/prerender-manifest.json", () => Promise.resolve({ preview: { previewModeId: "id" } }));
5
+ describe("MemoryQueue", () => {
6
+ beforeAll(() => {
7
+ vi.useFakeTimers();
8
+ globalThis.internalFetch = vi.fn().mockReturnValue(new Promise((res) => setTimeout(() => res(true), 1)));
9
+ });
10
+ afterEach(() => vi.clearAllMocks());
11
+ it("should process revalidations for a path", async () => {
12
+ const firstRequest = cache.send({
13
+ MessageBody: { host: "test.local", url: "/test" },
14
+ MessageGroupId: generateMessageGroupId("/test"),
15
+ MessageDeduplicationId: "",
16
+ });
17
+ vi.advanceTimersByTime(DEFAULT_REVALIDATION_TIMEOUT_MS);
18
+ await firstRequest;
19
+ expect(globalThis.internalFetch).toHaveBeenCalledTimes(1);
20
+ const secondRequest = cache.send({
21
+ MessageBody: { host: "test.local", url: "/test" },
22
+ MessageGroupId: generateMessageGroupId("/test"),
23
+ MessageDeduplicationId: "",
24
+ });
25
+ vi.advanceTimersByTime(1);
26
+ await secondRequest;
27
+ expect(globalThis.internalFetch).toHaveBeenCalledTimes(2);
28
+ });
29
+ it("should process revalidations for multiple paths", async () => {
30
+ const firstRequest = cache.send({
31
+ MessageBody: { host: "test.local", url: "/test" },
32
+ MessageGroupId: generateMessageGroupId("/test"),
33
+ MessageDeduplicationId: "",
34
+ });
35
+ vi.advanceTimersByTime(1);
36
+ await firstRequest;
37
+ expect(globalThis.internalFetch).toHaveBeenCalledTimes(1);
38
+ const secondRequest = cache.send({
39
+ MessageBody: { host: "test.local", url: "/test" },
40
+ MessageGroupId: generateMessageGroupId("/other"),
41
+ MessageDeduplicationId: "",
42
+ });
43
+ vi.advanceTimersByTime(1);
44
+ await secondRequest;
45
+ expect(globalThis.internalFetch).toHaveBeenCalledTimes(2);
46
+ });
47
+ it("should de-dupe revalidations", async () => {
48
+ const requests = [
49
+ cache.send({
50
+ MessageBody: { host: "test.local", url: "/test" },
51
+ MessageGroupId: generateMessageGroupId("/test"),
52
+ MessageDeduplicationId: "",
53
+ }),
54
+ cache.send({
55
+ MessageBody: { host: "test.local", url: "/test" },
56
+ MessageGroupId: generateMessageGroupId("/test"),
57
+ MessageDeduplicationId: "",
58
+ }),
59
+ ];
60
+ vi.advanceTimersByTime(1);
61
+ await Promise.all(requests);
62
+ expect(globalThis.internalFetch).toHaveBeenCalledTimes(1);
63
+ });
64
+ });
@@ -8,6 +8,7 @@ import { patchVercelOgLibrary } from "./patches/ast/patch-vercel-og-library.js";
8
8
  import { patchWebpackRuntime } from "./patches/ast/webpack-runtime.js";
9
9
  import * as patches from "./patches/index.js";
10
10
  import { ContentUpdater } from "./patches/plugins/content-updater.js";
11
+ import { patchFetchCacheSetMissingWaitUntil } from "./patches/plugins/fetch-cache-wait-until.js";
11
12
  import { patchLoadInstrumentation } from "./patches/plugins/load-instrumentation.js";
12
13
  import { handleOptionalDependencies } from "./patches/plugins/optional-deps.js";
13
14
  import { fixRequire } from "./patches/plugins/require.js";
@@ -71,6 +72,7 @@ export async function bundleServer(buildOpts) {
71
72
  fixRequire(updater),
72
73
  handleOptionalDependencies(optionalDependencies),
73
74
  patchLoadInstrumentation(updater),
75
+ patchFetchCacheSetMissingWaitUntil(updater),
74
76
  // Apply updater updaters, must be the last plugin
75
77
  updater.plugin,
76
78
  ],
@@ -4,11 +4,21 @@
4
4
  * The updater allows multiple plugins to update the content.
5
5
  */
6
6
  import { type OnLoadOptions, type Plugin, type PluginBuild } from "esbuild";
7
+ /**
8
+ * The callbacks returns either an updated content or undefined if the content is unchanged.
9
+ */
7
10
  export type Callback = (args: {
8
11
  contents: string;
9
12
  path: string;
10
13
  }) => string | undefined | Promise<string | undefined>;
11
- export type Updater = OnLoadOptions & {
14
+ /**
15
+ * The callback is called only when `contentFilter` matches the content.
16
+ * It can be used as a fast heuristic to prevent an expensive update.
17
+ */
18
+ export type OnUpdateOptions = OnLoadOptions & {
19
+ contentFilter: RegExp;
20
+ };
21
+ export type Updater = OnUpdateOptions & {
12
22
  callback: Callback;
13
23
  };
14
24
  export declare class ContentUpdater {
@@ -19,11 +29,11 @@ export declare class ContentUpdater {
19
29
  * The callbacks are called in order of registration.
20
30
  *
21
31
  * @param name The name of the plugin (must be unique).
22
- * @param options Same options as the `onLoad` hook to restrict updates.
32
+ * @param options Options.
23
33
  * @param callback The callback updating the content.
24
34
  * @returns A noop ESBuild plugin.
25
35
  */
26
- updateContent(name: string, options: OnLoadOptions, callback: Callback): Plugin;
36
+ updateContent(name: string, options: OnUpdateOptions, callback: Callback): Plugin;
27
37
  /**
28
38
  * Returns an ESBuild plugin applying the registered updates.
29
39
  */
@@ -12,7 +12,7 @@ export class ContentUpdater {
12
12
  * The callbacks are called in order of registration.
13
13
  *
14
14
  * @param name The name of the plugin (must be unique).
15
- * @param options Same options as the `onLoad` hook to restrict updates.
15
+ * @param options Options.
16
16
  * @param callback The callback updating the content.
17
17
  * @returns A noop ESBuild plugin.
18
18
  */
@@ -35,11 +35,14 @@ export class ContentUpdater {
35
35
  setup: async (build) => {
36
36
  build.onLoad({ filter: /\.(js|mjs|cjs|jsx|ts|tsx)$/ }, async (args) => {
37
37
  let contents = await readFile(args.path, "utf-8");
38
- for (const { filter, namespace, callback } of this.updaters.values()) {
38
+ for (const { filter, namespace, contentFilter, callback } of this.updaters.values()) {
39
39
  if (namespace !== undefined && args.namespace !== namespace) {
40
40
  continue;
41
41
  }
42
- if (!filter.test(args.path)) {
42
+ if (!args.path.match(filter)) {
43
+ continue;
44
+ }
45
+ if (!contents.match(contentFilter)) {
43
46
  continue;
44
47
  }
45
48
  contents = (await callback({ contents, path: args.path })) ?? contents;
@@ -0,0 +1,14 @@
1
+ import type { ContentUpdater } from "./content-updater.js";
2
+ /**
3
+ * The following Next.js code sets values in the incremental cache for fetch calls:
4
+ * https://github.com/vercel/next.js/blob/e5fc495e3d4/packages/next/src/server/lib/patch-fetch.ts#L690-L728
5
+ *
6
+ * The issue here is that this promise is never awaited in the Next.js code (since in a standard node.js server
7
+ * the promise will eventually simply just run) but we do need to run it inside `waitUntil` (so that the worker
8
+ * is not killed before the promise is fully executed), without that this promise gets discarded and values
9
+ * don't get saved in the incremental cache.
10
+ *
11
+ * This function wraps the promise in a `waitUntil` call (retrieved from `globalThis.__openNextAls.getStore()`).
12
+ */
13
+ export declare function patchFetchCacheSetMissingWaitUntil(updater: ContentUpdater): import("esbuild").Plugin;
14
+ export declare const rule = "\nrule:\n kind: call_expression\n pattern: $PROMISE\n all:\n - has: { pattern: $_.arrayBuffer().then, stopBy: end }\n - has: { pattern: \"Buffer.from\", stopBy: end }\n - any:\n - inside:\n kind: sequence_expression\n inside:\n kind: return_statement\n - inside:\n kind: expression_statement\n precedes:\n kind: return_statement\n - has: { pattern: $_.FETCH, stopBy: end }\n\nfix: |\n globalThis.__openNextAls?.getStore()?.waitUntil?.($PROMISE)\n";
@@ -0,0 +1,40 @@
1
+ import { getCrossPlatformPathRegex } from "@opennextjs/aws/utils/regex.js";
2
+ import { patchCode } from "../ast/util.js";
3
+ /**
4
+ * The following Next.js code sets values in the incremental cache for fetch calls:
5
+ * https://github.com/vercel/next.js/blob/e5fc495e3d4/packages/next/src/server/lib/patch-fetch.ts#L690-L728
6
+ *
7
+ * The issue here is that this promise is never awaited in the Next.js code (since in a standard node.js server
8
+ * the promise will eventually simply just run) but we do need to run it inside `waitUntil` (so that the worker
9
+ * is not killed before the promise is fully executed), without that this promise gets discarded and values
10
+ * don't get saved in the incremental cache.
11
+ *
12
+ * This function wraps the promise in a `waitUntil` call (retrieved from `globalThis.__openNextAls.getStore()`).
13
+ */
14
+ export function patchFetchCacheSetMissingWaitUntil(updater) {
15
+ return updater.updateContent("patch-fetch-cache-set-missing-wait-until", {
16
+ filter: getCrossPlatformPathRegex(String.raw `(server/chunks/.*\.js|.*\.runtime\..*\.js|patch-fetch\.js)$`, { escape: false }),
17
+ contentFilter: /arrayBuffer\(\)\s*\.then/,
18
+ }, ({ contents }) => patchCode(contents, rule));
19
+ }
20
+ export const rule = `
21
+ rule:
22
+ kind: call_expression
23
+ pattern: $PROMISE
24
+ all:
25
+ - has: { pattern: $_.arrayBuffer().then, stopBy: end }
26
+ - has: { pattern: "Buffer.from", stopBy: end }
27
+ - any:
28
+ - inside:
29
+ kind: sequence_expression
30
+ inside:
31
+ kind: return_statement
32
+ - inside:
33
+ kind: expression_statement
34
+ precedes:
35
+ kind: return_statement
36
+ - has: { pattern: $_.FETCH, stopBy: end }
37
+
38
+ fix: |
39
+ globalThis.__openNextAls?.getStore()?.waitUntil?.($PROMISE)
40
+ `;
@@ -0,0 +1,453 @@
1
+ import { describe, expect, test } from "vitest";
2
+ import { patchCode } from "../ast/util.js";
3
+ import { rule } from "./fetch-cache-wait-until.js";
4
+ describe("patchFetchCacheSetMissingWaitUntil", () => {
5
+ test("on minified code", () => {
6
+ const code = `
7
+ {
8
+ let [o4, a2] = (0, d2.cloneResponse)(e3);
9
+ return o4.arrayBuffer().then(async (e4) => {
10
+ var a3;
11
+ let i4 = Buffer.from(e4), s3 = { headers: Object.fromEntries(o4.headers.entries()), body: i4.toString("base64"), status: o4.status, url: o4.url };
12
+ null == $ || null == (a3 = $.serverComponentsHmrCache) || a3.set(n2, s3), F && await H.set(n2, { kind: c2.CachedRouteKind.FETCH, data: s3, revalidate: t5 }, { fetchCache: true, revalidate: r4, fetchUrl: _, fetchIdx: q, tags: A2 });
13
+ }).catch((e4) => console.warn("Failed to set fetch cache", u4, e4)).finally(X), a2;
14
+ }`;
15
+ expect(patchCode(code, rule)).toMatchInlineSnapshot(`
16
+ "{
17
+ let [o4, a2] = (0, d2.cloneResponse)(e3);
18
+ return globalThis.__openNextAls?.getStore()?.waitUntil?.(o4.arrayBuffer().then(async (e4) => {
19
+ var a3;
20
+ let i4 = Buffer.from(e4), s3 = { headers: Object.fromEntries(o4.headers.entries()), body: i4.toString("base64"), status: o4.status, url: o4.url };
21
+ null == $ || null == (a3 = $.serverComponentsHmrCache) || a3.set(n2, s3), F && await H.set(n2, { kind: c2.CachedRouteKind.FETCH, data: s3, revalidate: t5 }, { fetchCache: true, revalidate: r4, fetchUrl: _, fetchIdx: q, tags: A2 });
22
+ }).catch((e4) => console.warn("Failed to set fetch cache", u4, e4)).finally(X))
23
+ , a2;
24
+ }"
25
+ `);
26
+ });
27
+ describe("on non-minified code", () => {
28
+ test("15.1.0", () => {
29
+ // source: https://github.com/vercel/next.js/blob/fe45b74fdac83d3/packages/next/src/server/lib/patch-fetch.ts#L627-L732
30
+ const code = `if (
31
+ res.status === 200 &&
32
+ incrementalCache &&
33
+ cacheKey &&
34
+ (isCacheableRevalidate ||
35
+ useCacheOrRequestStore?.serverComponentsHmrCache)
36
+ ) {
37
+ const normalizedRevalidate =
38
+ finalRevalidate >= INFINITE_CACHE
39
+ ? CACHE_ONE_YEAR
40
+ : finalRevalidate
41
+ const externalRevalidate =
42
+ finalRevalidate >= INFINITE_CACHE ? false : finalRevalidate
43
+
44
+ if (workUnitStore && workUnitStore.type === 'prerender') {
45
+ // We are prerendering at build time or revalidate time with dynamicIO so we need to
46
+ // buffer the response so we can guarantee it can be read in a microtask
47
+ const bodyBuffer = await res.arrayBuffer()
48
+
49
+ const fetchedData = {
50
+ headers: Object.fromEntries(res.headers.entries()),
51
+ body: Buffer.from(bodyBuffer).toString('base64'),
52
+ status: res.status,
53
+ url: res.url,
54
+ }
55
+
56
+ // We can skip checking the serverComponentsHmrCache because we aren't in
57
+ // dev mode.
58
+
59
+ await incrementalCache.set(
60
+ cacheKey,
61
+ {
62
+ kind: CachedRouteKind.FETCH,
63
+ data: fetchedData,
64
+ revalidate: normalizedRevalidate,
65
+ },
66
+ {
67
+ fetchCache: true,
68
+ revalidate: externalRevalidate,
69
+ fetchUrl,
70
+ fetchIdx,
71
+ tags,
72
+ }
73
+ )
74
+ await handleUnlock()
75
+
76
+ // We return a new Response to the caller.
77
+ return new Response(bodyBuffer, {
78
+ headers: res.headers,
79
+ status: res.status,
80
+ statusText: res.statusText,
81
+ })
82
+ } else {
83
+ // We're cloning the response using this utility because there
84
+ // exists a bug in the undici library around response cloning.
85
+ // See the following pull request for more details:
86
+ // https://github.com/vercel/next.js/pull/73274
87
+
88
+ const [cloned1, cloned2] = cloneResponse(res)
89
+
90
+ // We are dynamically rendering including dev mode. We want to return
91
+ // the response to the caller as soon as possible because it might stream
92
+ // over a very long time.
93
+ cloned1
94
+ .arrayBuffer()
95
+ .then(async (arrayBuffer) => {
96
+ const bodyBuffer = Buffer.from(arrayBuffer)
97
+
98
+ const fetchedData = {
99
+ headers: Object.fromEntries(cloned1.headers.entries()),
100
+ body: bodyBuffer.toString('base64'),
101
+ status: cloned1.status,
102
+ url: cloned1.url,
103
+ }
104
+
105
+ useCacheOrRequestStore?.serverComponentsHmrCache?.set(
106
+ cacheKey,
107
+ fetchedData
108
+ )
109
+
110
+ if (isCacheableRevalidate) {
111
+ await incrementalCache.set(
112
+ cacheKey,
113
+ {
114
+ kind: CachedRouteKind.FETCH,
115
+ data: fetchedData,
116
+ revalidate: normalizedRevalidate,
117
+ },
118
+ {
119
+ fetchCache: true,
120
+ revalidate: externalRevalidate,
121
+ fetchUrl,
122
+ fetchIdx,
123
+ tags,
124
+ }
125
+ )
126
+ }
127
+ })
128
+ .catch((error) =>
129
+ console.warn(\`Failed to set fetch cache\`, input, error)
130
+ )
131
+ .finally(handleUnlock)
132
+
133
+ return cloned2
134
+ }
135
+ }
136
+ `;
137
+ expect(patchCode(code, rule)).toMatchInlineSnapshot(`
138
+ "if (
139
+ res.status === 200 &&
140
+ incrementalCache &&
141
+ cacheKey &&
142
+ (isCacheableRevalidate ||
143
+ useCacheOrRequestStore?.serverComponentsHmrCache)
144
+ ) {
145
+ const normalizedRevalidate =
146
+ finalRevalidate >= INFINITE_CACHE
147
+ ? CACHE_ONE_YEAR
148
+ : finalRevalidate
149
+ const externalRevalidate =
150
+ finalRevalidate >= INFINITE_CACHE ? false : finalRevalidate
151
+
152
+ if (workUnitStore && workUnitStore.type === 'prerender') {
153
+ // We are prerendering at build time or revalidate time with dynamicIO so we need to
154
+ // buffer the response so we can guarantee it can be read in a microtask
155
+ const bodyBuffer = await res.arrayBuffer()
156
+
157
+ const fetchedData = {
158
+ headers: Object.fromEntries(res.headers.entries()),
159
+ body: Buffer.from(bodyBuffer).toString('base64'),
160
+ status: res.status,
161
+ url: res.url,
162
+ }
163
+
164
+ // We can skip checking the serverComponentsHmrCache because we aren't in
165
+ // dev mode.
166
+
167
+ await incrementalCache.set(
168
+ cacheKey,
169
+ {
170
+ kind: CachedRouteKind.FETCH,
171
+ data: fetchedData,
172
+ revalidate: normalizedRevalidate,
173
+ },
174
+ {
175
+ fetchCache: true,
176
+ revalidate: externalRevalidate,
177
+ fetchUrl,
178
+ fetchIdx,
179
+ tags,
180
+ }
181
+ )
182
+ await handleUnlock()
183
+
184
+ // We return a new Response to the caller.
185
+ return new Response(bodyBuffer, {
186
+ headers: res.headers,
187
+ status: res.status,
188
+ statusText: res.statusText,
189
+ })
190
+ } else {
191
+ // We're cloning the response using this utility because there
192
+ // exists a bug in the undici library around response cloning.
193
+ // See the following pull request for more details:
194
+ // https://github.com/vercel/next.js/pull/73274
195
+
196
+ const [cloned1, cloned2] = cloneResponse(res)
197
+
198
+ // We are dynamically rendering including dev mode. We want to return
199
+ // the response to the caller as soon as possible because it might stream
200
+ // over a very long time.
201
+ globalThis.__openNextAls?.getStore()?.waitUntil?.(cloned1
202
+ .arrayBuffer()
203
+ .then(async (arrayBuffer) => {
204
+ const bodyBuffer = Buffer.from(arrayBuffer)
205
+
206
+ const fetchedData = {
207
+ headers: Object.fromEntries(cloned1.headers.entries()),
208
+ body: bodyBuffer.toString('base64'),
209
+ status: cloned1.status,
210
+ url: cloned1.url,
211
+ }
212
+
213
+ useCacheOrRequestStore?.serverComponentsHmrCache?.set(
214
+ cacheKey,
215
+ fetchedData
216
+ )
217
+
218
+ if (isCacheableRevalidate) {
219
+ await incrementalCache.set(
220
+ cacheKey,
221
+ {
222
+ kind: CachedRouteKind.FETCH,
223
+ data: fetchedData,
224
+ revalidate: normalizedRevalidate,
225
+ },
226
+ {
227
+ fetchCache: true,
228
+ revalidate: externalRevalidate,
229
+ fetchUrl,
230
+ fetchIdx,
231
+ tags,
232
+ }
233
+ )
234
+ }
235
+ })
236
+ .catch((error) =>
237
+ console.warn(\`Failed to set fetch cache\`, input, error)
238
+ )
239
+ .finally(handleUnlock))
240
+
241
+
242
+ return cloned2
243
+ }
244
+ }
245
+ "
246
+ `);
247
+ });
248
+ test("Next.js 15.0.4", () => {
249
+ // source: https://github.com/vercel/next.js/blob/d6a6aa14069/packages/next/src/server/lib/patch-fetch.ts#L627-L725
250
+ const code = `if (
251
+ res.status === 200 &&
252
+ incrementalCache &&
253
+ cacheKey &&
254
+ (isCacheableRevalidate || requestStore?.serverComponentsHmrCache)
255
+ ) {
256
+ const normalizedRevalidate =
257
+ finalRevalidate >= INFINITE_CACHE
258
+ ? CACHE_ONE_YEAR
259
+ : finalRevalidate
260
+ const externalRevalidate =
261
+ finalRevalidate >= INFINITE_CACHE ? false : finalRevalidate
262
+
263
+ if (workUnitStore && workUnitStore.type === 'prerender') {
264
+ // We are prerendering at build time or revalidate time with dynamicIO so we need to
265
+ // buffer the response so we can guarantee it can be read in a microtask
266
+ const bodyBuffer = await res.arrayBuffer()
267
+
268
+ const fetchedData = {
269
+ headers: Object.fromEntries(res.headers.entries()),
270
+ body: Buffer.from(bodyBuffer).toString('base64'),
271
+ status: res.status,
272
+ url: res.url,
273
+ }
274
+
275
+ // We can skip checking the serverComponentsHmrCache because we aren't in
276
+ // dev mode.
277
+
278
+ await incrementalCache.set(
279
+ cacheKey,
280
+ {
281
+ kind: CachedRouteKind.FETCH,
282
+ data: fetchedData,
283
+ revalidate: normalizedRevalidate,
284
+ },
285
+ {
286
+ fetchCache: true,
287
+ revalidate: externalRevalidate,
288
+ fetchUrl,
289
+ fetchIdx,
290
+ tags,
291
+ }
292
+ )
293
+ await handleUnlock()
294
+
295
+ // We we return a new Response to the caller.
296
+ return new Response(bodyBuffer, {
297
+ headers: res.headers,
298
+ status: res.status,
299
+ statusText: res.statusText,
300
+ })
301
+ } else {
302
+ // We are dynamically rendering including dev mode. We want to return
303
+ // the response to the caller as soon as possible because it might stream
304
+ // over a very long time.
305
+ res
306
+ .clone()
307
+ .arrayBuffer()
308
+ .then(async (arrayBuffer) => {
309
+ const bodyBuffer = Buffer.from(arrayBuffer)
310
+
311
+ const fetchedData = {
312
+ headers: Object.fromEntries(res.headers.entries()),
313
+ body: bodyBuffer.toString('base64'),
314
+ status: res.status,
315
+ url: res.url,
316
+ }
317
+
318
+ requestStore?.serverComponentsHmrCache?.set(
319
+ cacheKey,
320
+ fetchedData
321
+ )
322
+
323
+ if (isCacheableRevalidate) {
324
+ await incrementalCache.set(
325
+ cacheKey,
326
+ {
327
+ kind: CachedRouteKind.FETCH,
328
+ data: fetchedData,
329
+ revalidate: normalizedRevalidate,
330
+ },
331
+ {
332
+ fetchCache: true,
333
+ revalidate: externalRevalidate,
334
+ fetchUrl,
335
+ fetchIdx,
336
+ tags,
337
+ }
338
+ )
339
+ }
340
+ })
341
+ .catch((error) =>
342
+ console.warn(\`Failed to set fetch cache\`, input, error)
343
+ )
344
+ .finally(handleUnlock)
345
+
346
+ return res
347
+ }
348
+ }`;
349
+ expect(patchCode(code, rule)).toMatchInlineSnapshot(`
350
+ "if (
351
+ res.status === 200 &&
352
+ incrementalCache &&
353
+ cacheKey &&
354
+ (isCacheableRevalidate || requestStore?.serverComponentsHmrCache)
355
+ ) {
356
+ const normalizedRevalidate =
357
+ finalRevalidate >= INFINITE_CACHE
358
+ ? CACHE_ONE_YEAR
359
+ : finalRevalidate
360
+ const externalRevalidate =
361
+ finalRevalidate >= INFINITE_CACHE ? false : finalRevalidate
362
+
363
+ if (workUnitStore && workUnitStore.type === 'prerender') {
364
+ // We are prerendering at build time or revalidate time with dynamicIO so we need to
365
+ // buffer the response so we can guarantee it can be read in a microtask
366
+ const bodyBuffer = await res.arrayBuffer()
367
+
368
+ const fetchedData = {
369
+ headers: Object.fromEntries(res.headers.entries()),
370
+ body: Buffer.from(bodyBuffer).toString('base64'),
371
+ status: res.status,
372
+ url: res.url,
373
+ }
374
+
375
+ // We can skip checking the serverComponentsHmrCache because we aren't in
376
+ // dev mode.
377
+
378
+ await incrementalCache.set(
379
+ cacheKey,
380
+ {
381
+ kind: CachedRouteKind.FETCH,
382
+ data: fetchedData,
383
+ revalidate: normalizedRevalidate,
384
+ },
385
+ {
386
+ fetchCache: true,
387
+ revalidate: externalRevalidate,
388
+ fetchUrl,
389
+ fetchIdx,
390
+ tags,
391
+ }
392
+ )
393
+ await handleUnlock()
394
+
395
+ // We we return a new Response to the caller.
396
+ return new Response(bodyBuffer, {
397
+ headers: res.headers,
398
+ status: res.status,
399
+ statusText: res.statusText,
400
+ })
401
+ } else {
402
+ // We are dynamically rendering including dev mode. We want to return
403
+ // the response to the caller as soon as possible because it might stream
404
+ // over a very long time.
405
+ globalThis.__openNextAls?.getStore()?.waitUntil?.(res
406
+ .clone()
407
+ .arrayBuffer()
408
+ .then(async (arrayBuffer) => {
409
+ const bodyBuffer = Buffer.from(arrayBuffer)
410
+
411
+ const fetchedData = {
412
+ headers: Object.fromEntries(res.headers.entries()),
413
+ body: bodyBuffer.toString('base64'),
414
+ status: res.status,
415
+ url: res.url,
416
+ }
417
+
418
+ requestStore?.serverComponentsHmrCache?.set(
419
+ cacheKey,
420
+ fetchedData
421
+ )
422
+
423
+ if (isCacheableRevalidate) {
424
+ await incrementalCache.set(
425
+ cacheKey,
426
+ {
427
+ kind: CachedRouteKind.FETCH,
428
+ data: fetchedData,
429
+ revalidate: normalizedRevalidate,
430
+ },
431
+ {
432
+ fetchCache: true,
433
+ revalidate: externalRevalidate,
434
+ fetchUrl,
435
+ fetchIdx,
436
+ tags,
437
+ }
438
+ )
439
+ }
440
+ })
441
+ .catch((error) =>
442
+ console.warn(\`Failed to set fetch cache\`, input, error)
443
+ )
444
+ .finally(handleUnlock))
445
+
446
+
447
+ return res
448
+ }
449
+ }"
450
+ `);
451
+ });
452
+ });
453
+ });
@@ -12,9 +12,5 @@ rule:
12
12
  fix: async loadInstrumentationModule() { }
13
13
  `;
14
14
  export function patchLoadInstrumentation(updater) {
15
- return updater.updateContent("patch-load-instrumentation", { filter: /\.(js|mjs|cjs|jsx|ts|tsx)$/ }, ({ contents }) => {
16
- if (/async loadInstrumentationModule\(/.test(contents)) {
17
- return patchCode(contents, instrumentationRule);
18
- }
19
- });
15
+ return updater.updateContent("patch-load-instrumentation", { filter: /\.(js|mjs|cjs|jsx|ts|tsx)$/, contentFilter: /async loadInstrumentationModule\(/ }, ({ contents }) => patchCode(contents, instrumentationRule));
20
16
  }
@@ -6,11 +6,8 @@ import { patchCode } from "../ast/util.js";
6
6
  export function inlineRequirePagePlugin(updater, buildOpts) {
7
7
  return updater.updateContent("inline-require-page", {
8
8
  filter: getCrossPlatformPathRegex(String.raw `/next/dist/server/require\.js$`, { escape: false }),
9
- }, async ({ contents }) => {
10
- if (/function requirePage\(/.test(contents)) {
11
- return patchCode(contents, await getRule(buildOpts));
12
- }
13
- });
9
+ contentFilter: /function requirePage\(/,
10
+ }, async ({ contents }) => patchCode(contents, await getRule(buildOpts)));
14
11
  }
15
12
  async function getRule(buildOpts) {
16
13
  const { outputDir } = buildOpts;
@@ -1,5 +1,5 @@
1
1
  export function fixRequire(updater) {
2
- return updater.updateContent("fix-require", { filter: /\.(js|mjs|cjs|jsx|ts|tsx)$/ }, ({ contents }) => {
2
+ return updater.updateContent("fix-require", { filter: /\.(js|mjs|cjs|jsx|ts|tsx)$/, contentFilter: /.*/ }, ({ contents }) => {
3
3
  // `eval(...)` is not supported by workerd.
4
4
  contents = contents.replaceAll(`eval("require")`, "require");
5
5
  // `@opentelemetry` has a few issues.
@@ -14,7 +14,7 @@ export function fixRequire(updater) {
14
14
  //
15
15
  // The intent is to allow users to install their own version of `@opentelemetry/api`.
16
16
  //
17
- // The problem is that even when users do not explicitely install `@opentelemetry/api`,
17
+ // The problem is that even when users do not explicitly install `@opentelemetry/api`,
18
18
  // `require('@opentelemetry/api')` resolves to the package which is a dependency
19
19
  // of Next.
20
20
  //
@@ -11,8 +11,9 @@ export function ensureCloudflareConfig(config) {
11
11
  dftMaybeUseCache: config.default?.override?.incrementalCache === "dummy" ||
12
12
  typeof config.default?.override?.incrementalCache === "function",
13
13
  dftUseDummyTagCache: config.default?.override?.tagCache === "dummy",
14
- dftMaybeUseQueue: config.default?.override?.queue === "dummy" || config.default?.override?.queue === "direct",
15
- disableCacheInterception: config.dangerous?.enableCacheInterception !== true,
14
+ dftMaybeUseQueue: config.default?.override?.queue === "dummy" ||
15
+ config.default?.override?.queue === "direct" ||
16
+ typeof config.default?.override?.queue === "function",
16
17
  mwIsMiddlewareExternal: config.middleware?.external == true,
17
18
  mwUseCloudflareWrapper: config.middleware?.override?.wrapper === "cloudflare-edge",
18
19
  mwUseEdgeConverter: config.middleware?.override?.converter === "edge",
@@ -30,7 +31,7 @@ export function ensureCloudflareConfig(config) {
30
31
  converter: "edge",
31
32
  incrementalCache: "dummy" | function,
32
33
  tagCache: "dummy",
33
- queue: "dummy" | "direct",
34
+ queue: "dummy" | "direct" | function,
34
35
  },
35
36
  },
36
37
 
@@ -42,10 +43,6 @@ export function ensureCloudflareConfig(config) {
42
43
  proxyExternalRequest: "fetch",
43
44
  },
44
45
  },
45
-
46
- "dangerous": {
47
- "enableCacheInterception": false
48
- },
49
46
  }\n\n`.replace(/^ {8}/gm, ""));
50
47
  }
51
48
  }
@@ -1,5 +1,8 @@
1
- import * as path from "node:path";
2
- const templatesDirPath = path.resolve(`${import.meta.dirname}/../../templates`);
1
+ import path from "node:path";
2
+ import url from "node:url";
3
+ const __filename = url.fileURLToPath(import.meta.url);
4
+ const __dirname = path.dirname(__filename);
5
+ const templatesDirPath = path.join(__dirname, "/../../templates");
3
6
  /**
4
7
  * Utility for getting the resolved path to the package's templates directory
5
8
  *
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@opennextjs/cloudflare",
3
3
  "description": "Cloudflare builder for next apps",
4
- "version": "0.4.8",
4
+ "version": "0.5.0",
5
5
  "type": "module",
6
6
  "bin": {
7
7
  "opennextjs-cloudflare": "dist/cli/index.js"
@@ -21,10 +21,6 @@ const config = {
21
21
  proxyExternalRequest: "fetch",
22
22
  },
23
23
  },
24
-
25
- dangerous: {
26
- enableCacheInterception: false,
27
- },
28
24
  };
29
25
 
30
26
  export default config;