@opennextjs/cloudflare 1.3.0 → 1.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli/args.d.ts +2 -0
- package/dist/cli/args.js +37 -19
- package/dist/cli/build/build.d.ts +2 -2
- package/dist/cli/build/build.js +3 -0
- package/dist/cli/build/open-next/compile-images.d.ts +5 -0
- package/dist/cli/build/open-next/compile-images.js +29 -0
- package/dist/cli/build/open-next/compile-init.js +0 -21
- package/dist/cli/build/open-next/createServerBundle.js +0 -1
- package/dist/cli/build/patches/plugins/next-server.d.ts +0 -2
- package/dist/cli/build/patches/plugins/next-server.js +0 -19
- package/dist/cli/build/utils/workerd.js +3 -1
- package/dist/cli/templates/images.d.ts +24 -0
- package/dist/cli/templates/images.js +82 -0
- package/dist/cli/templates/init.d.ts +0 -17
- package/dist/cli/templates/init.js +0 -76
- package/dist/cli/templates/worker.js +3 -1
- package/package.json +2 -2
- package/templates/open-next.config.ts +1 -1
- package/templates/wrangler.jsonc +20 -20
- package/dist/api/durable-objects/bucket-cache-purge.spec.d.ts +0 -1
- package/dist/api/durable-objects/bucket-cache-purge.spec.js +0 -121
- package/dist/api/durable-objects/queue.spec.d.ts +0 -1
- package/dist/api/durable-objects/queue.spec.js +0 -287
- package/dist/api/durable-objects/sharded-tag-cache.spec.d.ts +0 -1
- package/dist/api/durable-objects/sharded-tag-cache.spec.js +0 -37
- package/dist/api/overrides/queue/memory-queue.spec.d.ts +0 -1
- package/dist/api/overrides/queue/memory-queue.spec.js +0 -76
- package/dist/api/overrides/queue/queue-cache.spec.d.ts +0 -1
- package/dist/api/overrides/queue/queue-cache.spec.js +0 -92
- package/dist/api/overrides/tag-cache/do-sharded-tag-cache.spec.d.ts +0 -1
- package/dist/api/overrides/tag-cache/do-sharded-tag-cache.spec.js +0 -413
- package/dist/api/overrides/tag-cache/tag-cache-filter.spec.d.ts +0 -1
- package/dist/api/overrides/tag-cache/tag-cache-filter.spec.js +0 -97
- package/dist/cli/build/patches/ast/patch-vercel-og-library.spec.d.ts +0 -1
- package/dist/cli/build/patches/ast/patch-vercel-og-library.spec.js +0 -50
- package/dist/cli/build/patches/ast/vercel-og.spec.d.ts +0 -1
- package/dist/cli/build/patches/ast/vercel-og.spec.js +0 -22
- package/dist/cli/build/patches/ast/webpack-runtime.spec.d.ts +0 -1
- package/dist/cli/build/patches/ast/webpack-runtime.spec.js +0 -102
- package/dist/cli/build/patches/plugins/instrumentation.spec.d.ts +0 -1
- package/dist/cli/build/patches/plugins/instrumentation.spec.js +0 -91
- package/dist/cli/build/patches/plugins/next-server.spec.d.ts +0 -1
- package/dist/cli/build/patches/plugins/next-server.spec.js +0 -216
- package/dist/cli/build/patches/plugins/patch-depd-deprecations.spec.d.ts +0 -1
- package/dist/cli/build/patches/plugins/patch-depd-deprecations.spec.js +0 -29
- package/dist/cli/build/patches/plugins/res-revalidate.spec.d.ts +0 -1
- package/dist/cli/build/patches/plugins/res-revalidate.spec.js +0 -99
- package/dist/cli/build/patches/plugins/use-cache.spec.d.ts +0 -1
- package/dist/cli/build/patches/plugins/use-cache.spec.js +0 -101
- package/dist/cli/build/utils/extract-project-env-vars.spec.d.ts +0 -1
- package/dist/cli/build/utils/extract-project-env-vars.spec.js +0 -67
- package/dist/cli/build/utils/workerd.spec.d.ts +0 -1
- package/dist/cli/build/utils/workerd.spec.js +0 -188
- package/dist/cli/commands/populate-cache.spec.d.ts +0 -1
- package/dist/cli/commands/populate-cache.spec.js +0 -61
|
@@ -1,121 +0,0 @@
|
|
|
1
|
-
import { describe, expect, it, vi } from "vitest";
|
|
2
|
-
import * as internal from "../overrides/internal";
|
|
3
|
-
import { BucketCachePurge } from "./bucket-cache-purge";
|
|
4
|
-
vi.mock("cloudflare:workers", () => ({
|
|
5
|
-
DurableObject: class {
|
|
6
|
-
ctx;
|
|
7
|
-
env;
|
|
8
|
-
constructor(ctx, env) {
|
|
9
|
-
this.ctx = ctx;
|
|
10
|
-
this.env = env;
|
|
11
|
-
}
|
|
12
|
-
},
|
|
13
|
-
}));
|
|
14
|
-
const createBucketCachePurge = () => {
|
|
15
|
-
const mockState = {
|
|
16
|
-
waitUntil: vi.fn(),
|
|
17
|
-
blockConcurrencyWhile: vi.fn().mockImplementation(async (fn) => fn()),
|
|
18
|
-
storage: {
|
|
19
|
-
setAlarm: vi.fn(),
|
|
20
|
-
getAlarm: vi.fn(),
|
|
21
|
-
sql: {
|
|
22
|
-
exec: vi.fn().mockImplementation(() => ({
|
|
23
|
-
one: vi.fn(),
|
|
24
|
-
toArray: vi.fn().mockReturnValue([]),
|
|
25
|
-
})),
|
|
26
|
-
},
|
|
27
|
-
},
|
|
28
|
-
};
|
|
29
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
30
|
-
return new BucketCachePurge(mockState, {});
|
|
31
|
-
};
|
|
32
|
-
describe("BucketCachePurge", () => {
|
|
33
|
-
it("should block concurrency while creating the table", async () => {
|
|
34
|
-
const cache = createBucketCachePurge();
|
|
35
|
-
// @ts-expect-error - testing private method
|
|
36
|
-
expect(cache.ctx.blockConcurrencyWhile).toHaveBeenCalled();
|
|
37
|
-
// @ts-expect-error - testing private method
|
|
38
|
-
expect(cache.ctx.storage.sql.exec).toHaveBeenCalledWith(expect.stringContaining("CREATE TABLE IF NOT EXISTS cache_purge"));
|
|
39
|
-
});
|
|
40
|
-
describe("purgeCacheByTags", () => {
|
|
41
|
-
it("should insert tags into the sql table", async () => {
|
|
42
|
-
const cache = createBucketCachePurge();
|
|
43
|
-
const tags = ["tag1", "tag2"];
|
|
44
|
-
await cache.purgeCacheByTags(tags);
|
|
45
|
-
// @ts-expect-error - testing private method
|
|
46
|
-
expect(cache.ctx.storage.sql.exec).toHaveBeenCalledWith(expect.stringContaining("INSERT OR REPLACE INTO cache_purge"), [tags[0]]);
|
|
47
|
-
// @ts-expect-error - testing private method
|
|
48
|
-
expect(cache.ctx.storage.sql.exec).toHaveBeenCalledWith(expect.stringContaining("INSERT OR REPLACE INTO cache_purge"), [tags[1]]);
|
|
49
|
-
});
|
|
50
|
-
it("should set an alarm if no alarm is set", async () => {
|
|
51
|
-
const cache = createBucketCachePurge();
|
|
52
|
-
// @ts-expect-error - testing private method
|
|
53
|
-
cache.ctx.storage.getAlarm.mockResolvedValueOnce(null);
|
|
54
|
-
await cache.purgeCacheByTags(["tag"]);
|
|
55
|
-
// @ts-expect-error - testing private method
|
|
56
|
-
expect(cache.ctx.storage.setAlarm).toHaveBeenCalled();
|
|
57
|
-
});
|
|
58
|
-
it("should not set an alarm if one is already set", async () => {
|
|
59
|
-
const cache = createBucketCachePurge();
|
|
60
|
-
// @ts-expect-error - testing private method
|
|
61
|
-
cache.ctx.storage.getAlarm.mockResolvedValueOnce(true);
|
|
62
|
-
await cache.purgeCacheByTags(["tag"]);
|
|
63
|
-
// @ts-expect-error - testing private method
|
|
64
|
-
expect(cache.ctx.storage.setAlarm).not.toHaveBeenCalled();
|
|
65
|
-
});
|
|
66
|
-
});
|
|
67
|
-
describe("alarm", () => {
|
|
68
|
-
it("should purge cache by tags and delete them from the sql table", async () => {
|
|
69
|
-
const cache = createBucketCachePurge();
|
|
70
|
-
// @ts-expect-error - testing private method
|
|
71
|
-
cache.ctx.storage.sql.exec.mockReturnValueOnce({
|
|
72
|
-
toArray: () => [{ tag: "tag1" }, { tag: "tag2" }],
|
|
73
|
-
});
|
|
74
|
-
await cache.alarm();
|
|
75
|
-
// @ts-expect-error - testing private method
|
|
76
|
-
expect(cache.ctx.storage.sql.exec).toHaveBeenCalledWith(expect.stringContaining("DELETE FROM cache_purge"), ["tag1", "tag2"]);
|
|
77
|
-
});
|
|
78
|
-
it("should not purge cache if no tags are found", async () => {
|
|
79
|
-
const cache = createBucketCachePurge();
|
|
80
|
-
// @ts-expect-error - testing private method
|
|
81
|
-
cache.ctx.storage.sql.exec.mockReturnValueOnce({
|
|
82
|
-
toArray: () => [],
|
|
83
|
-
});
|
|
84
|
-
await cache.alarm();
|
|
85
|
-
// @ts-expect-error - testing private method
|
|
86
|
-
expect(cache.ctx.storage.sql.exec).not.toHaveBeenCalledWith(expect.stringContaining("DELETE FROM cache_purge"), []);
|
|
87
|
-
});
|
|
88
|
-
it("should call internalPurgeCacheByTags with the correct tags", async () => {
|
|
89
|
-
const cache = createBucketCachePurge();
|
|
90
|
-
const tags = ["tag1", "tag2"];
|
|
91
|
-
// @ts-expect-error - testing private method
|
|
92
|
-
cache.ctx.storage.sql.exec.mockReturnValueOnce({
|
|
93
|
-
toArray: () => tags.map((tag) => ({ tag })),
|
|
94
|
-
});
|
|
95
|
-
const internalPurgeCacheByTagsSpy = vi.spyOn(internal, "internalPurgeCacheByTags");
|
|
96
|
-
await cache.alarm();
|
|
97
|
-
expect(internalPurgeCacheByTagsSpy).toHaveBeenCalledWith(
|
|
98
|
-
// @ts-expect-error - testing private method
|
|
99
|
-
cache.env, tags);
|
|
100
|
-
// @ts-expect-error - testing private method 1st is constructor, 2nd is to get the tags and 3rd is to delete them
|
|
101
|
-
expect(cache.ctx.storage.sql.exec).toHaveBeenCalledTimes(3);
|
|
102
|
-
});
|
|
103
|
-
it("should continue until all tags are purged", async () => {
|
|
104
|
-
const cache = createBucketCachePurge();
|
|
105
|
-
const tags = Array.from({ length: 100 }, (_, i) => `tag${i}`);
|
|
106
|
-
// @ts-expect-error - testing private method
|
|
107
|
-
cache.ctx.storage.sql.exec.mockReturnValueOnce({
|
|
108
|
-
toArray: () => tags.map((tag) => ({ tag })),
|
|
109
|
-
});
|
|
110
|
-
const internalPurgeCacheByTagsSpy = vi.spyOn(internal, "internalPurgeCacheByTags");
|
|
111
|
-
await cache.alarm();
|
|
112
|
-
expect(internalPurgeCacheByTagsSpy).toHaveBeenCalledWith(
|
|
113
|
-
// @ts-expect-error - testing private method
|
|
114
|
-
cache.env, tags);
|
|
115
|
-
// @ts-expect-error - testing private method 1st is constructor, 2nd is to get the tags and 3rd is to delete them, 4th is to get the next 100 tags
|
|
116
|
-
expect(cache.ctx.storage.sql.exec).toHaveBeenCalledTimes(4);
|
|
117
|
-
// @ts-expect-error - testing private method
|
|
118
|
-
expect(cache.ctx.storage.sql.exec).toHaveBeenLastCalledWith(expect.stringContaining("SELECT * FROM cache_purge LIMIT 100"));
|
|
119
|
-
});
|
|
120
|
-
});
|
|
121
|
-
});
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export {};
|
|
@@ -1,287 +0,0 @@
|
|
|
1
|
-
import { describe, expect, it, vi } from "vitest";
|
|
2
|
-
import { DOQueueHandler } from "./queue";
|
|
3
|
-
vi.mock("cloudflare:workers", () => ({
|
|
4
|
-
DurableObject: class {
|
|
5
|
-
ctx;
|
|
6
|
-
env;
|
|
7
|
-
constructor(ctx, env) {
|
|
8
|
-
this.ctx = ctx;
|
|
9
|
-
this.env = env;
|
|
10
|
-
}
|
|
11
|
-
},
|
|
12
|
-
}));
|
|
13
|
-
const createDurableObjectQueue = ({ fetchDuration, statusCode, headers, disableSQLite, }) => {
|
|
14
|
-
const mockState = {
|
|
15
|
-
waitUntil: vi.fn(),
|
|
16
|
-
blockConcurrencyWhile: vi.fn().mockImplementation(async (fn) => fn()),
|
|
17
|
-
storage: {
|
|
18
|
-
setAlarm: vi.fn(),
|
|
19
|
-
getAlarm: vi.fn(),
|
|
20
|
-
sql: {
|
|
21
|
-
exec: vi.fn().mockImplementation(() => ({
|
|
22
|
-
one: vi.fn(),
|
|
23
|
-
})),
|
|
24
|
-
},
|
|
25
|
-
},
|
|
26
|
-
};
|
|
27
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
28
|
-
return new DOQueueHandler(mockState, {
|
|
29
|
-
WORKER_SELF_REFERENCE: {
|
|
30
|
-
fetch: vi.fn().mockReturnValue(new Promise((res) => setTimeout(() => res(new Response(null, {
|
|
31
|
-
status: statusCode,
|
|
32
|
-
headers: headers ?? new Headers([["x-nextjs-cache", "REVALIDATED"]]),
|
|
33
|
-
})), fetchDuration))),
|
|
34
|
-
connect: vi.fn(),
|
|
35
|
-
},
|
|
36
|
-
NEXT_CACHE_DO_QUEUE_DISABLE_SQLITE: disableSQLite ? "true" : undefined,
|
|
37
|
-
});
|
|
38
|
-
};
|
|
39
|
-
const createMessage = (dedupId, lastModified = Date.now()) => ({
|
|
40
|
-
MessageBody: { host: "test.local", url: "/test", eTag: "test", lastModified },
|
|
41
|
-
MessageGroupId: "test.local/test",
|
|
42
|
-
MessageDeduplicationId: dedupId,
|
|
43
|
-
previewModeId: "test",
|
|
44
|
-
});
|
|
45
|
-
describe("DurableObjectQueue", () => {
|
|
46
|
-
describe("successful revalidation", () => {
|
|
47
|
-
it("should process a single revalidation", async () => {
|
|
48
|
-
process.env.__NEXT_PREVIEW_MODE_ID = "test";
|
|
49
|
-
const queue = createDurableObjectQueue({ fetchDuration: 10 });
|
|
50
|
-
const firstRequest = await queue.revalidate(createMessage("id"));
|
|
51
|
-
expect(firstRequest).toBeUndefined();
|
|
52
|
-
expect(queue.ongoingRevalidations.size).toBe(1);
|
|
53
|
-
expect(queue.ongoingRevalidations.has("id")).toBe(true);
|
|
54
|
-
await queue.ongoingRevalidations.get("id");
|
|
55
|
-
expect(queue.ongoingRevalidations.size).toBe(0);
|
|
56
|
-
expect(queue.ongoingRevalidations.has("id")).toBe(false);
|
|
57
|
-
expect(queue.service.fetch).toHaveBeenCalledWith("https://test.local/test", {
|
|
58
|
-
method: "HEAD",
|
|
59
|
-
headers: {
|
|
60
|
-
"x-prerender-revalidate": "test",
|
|
61
|
-
"x-isr": "1",
|
|
62
|
-
},
|
|
63
|
-
signal: expect.any(AbortSignal),
|
|
64
|
-
});
|
|
65
|
-
});
|
|
66
|
-
it("should dedupe revalidations", async () => {
|
|
67
|
-
const queue = createDurableObjectQueue({ fetchDuration: 10 });
|
|
68
|
-
await queue.revalidate(createMessage("id"));
|
|
69
|
-
await queue.revalidate(createMessage("id"));
|
|
70
|
-
expect(queue.ongoingRevalidations.size).toBe(1);
|
|
71
|
-
expect(queue.ongoingRevalidations.has("id")).toBe(true);
|
|
72
|
-
});
|
|
73
|
-
it("should block concurrency", async () => {
|
|
74
|
-
const queue = createDurableObjectQueue({ fetchDuration: 10 });
|
|
75
|
-
await queue.revalidate(createMessage("id"));
|
|
76
|
-
await queue.revalidate(createMessage("id2"));
|
|
77
|
-
await queue.revalidate(createMessage("id3"));
|
|
78
|
-
await queue.revalidate(createMessage("id4"));
|
|
79
|
-
await queue.revalidate(createMessage("id5"));
|
|
80
|
-
// the next one should block until one of the previous ones finishes
|
|
81
|
-
const blockedReq = queue.revalidate(createMessage("id6"));
|
|
82
|
-
expect(queue.ongoingRevalidations.size).toBe(queue.maxRevalidations);
|
|
83
|
-
expect(queue.ongoingRevalidations.has("id6")).toBe(false);
|
|
84
|
-
expect(Array.from(queue.ongoingRevalidations.keys())).toEqual(["id", "id2", "id3", "id4", "id5"]);
|
|
85
|
-
// Here we await the blocked request to ensure it's resolved
|
|
86
|
-
await blockedReq;
|
|
87
|
-
// We then need to await for the actual revalidation to finish
|
|
88
|
-
await Promise.all(Array.from(queue.ongoingRevalidations.values()));
|
|
89
|
-
expect(queue.ongoingRevalidations.size).toBe(0);
|
|
90
|
-
expect(queue.service.fetch).toHaveBeenCalledTimes(6);
|
|
91
|
-
});
|
|
92
|
-
});
|
|
93
|
-
describe("failed revalidation", () => {
|
|
94
|
-
it("should not put it in failed state for an incorrect 200", async () => {
|
|
95
|
-
const queue = createDurableObjectQueue({
|
|
96
|
-
fetchDuration: 10,
|
|
97
|
-
statusCode: 200,
|
|
98
|
-
headers: new Headers([["x-nextjs-cache", "MISS"]]),
|
|
99
|
-
});
|
|
100
|
-
await queue.revalidate(createMessage("id"));
|
|
101
|
-
await queue.ongoingRevalidations.get("id");
|
|
102
|
-
expect(queue.routeInFailedState.size).toBe(0);
|
|
103
|
-
});
|
|
104
|
-
it("should not put it in failed state for a failed revalidation with 404", async () => {
|
|
105
|
-
const queue = createDurableObjectQueue({
|
|
106
|
-
fetchDuration: 10,
|
|
107
|
-
statusCode: 404,
|
|
108
|
-
});
|
|
109
|
-
await queue.revalidate(createMessage("id"));
|
|
110
|
-
await queue.ongoingRevalidations.get("id");
|
|
111
|
-
expect(queue.routeInFailedState.size).toBe(0);
|
|
112
|
-
expect(queue.service.fetch).toHaveBeenCalledTimes(1);
|
|
113
|
-
await queue.revalidate(createMessage("id"));
|
|
114
|
-
expect(queue.routeInFailedState.size).toBe(0);
|
|
115
|
-
expect(queue.service.fetch).toHaveBeenCalledTimes(2);
|
|
116
|
-
});
|
|
117
|
-
it("should put it in failed state if revalidation fails with 500", async () => {
|
|
118
|
-
const queue = createDurableObjectQueue({
|
|
119
|
-
fetchDuration: 10,
|
|
120
|
-
statusCode: 500,
|
|
121
|
-
});
|
|
122
|
-
await queue.revalidate(createMessage("id"));
|
|
123
|
-
await queue.ongoingRevalidations.get("id");
|
|
124
|
-
expect(queue.routeInFailedState.size).toBe(1);
|
|
125
|
-
expect(queue.routeInFailedState.has("id")).toBe(true);
|
|
126
|
-
expect(queue.service.fetch).toHaveBeenCalledTimes(1);
|
|
127
|
-
await queue.revalidate(createMessage("id"));
|
|
128
|
-
expect(queue.routeInFailedState.size).toBe(1);
|
|
129
|
-
expect(queue.service.fetch).toHaveBeenCalledTimes(1);
|
|
130
|
-
});
|
|
131
|
-
it("should put it in failed state if revalidation fetch throw", async () => {
|
|
132
|
-
const queue = createDurableObjectQueue({
|
|
133
|
-
fetchDuration: 10,
|
|
134
|
-
});
|
|
135
|
-
// @ts-expect-error - This is mocked above
|
|
136
|
-
queue.service.fetch.mockImplementationOnce(() => Promise.reject(new Error("fetch error")));
|
|
137
|
-
await queue.revalidate(createMessage("id"));
|
|
138
|
-
await queue.ongoingRevalidations.get("id");
|
|
139
|
-
expect(queue.routeInFailedState.size).toBe(1);
|
|
140
|
-
expect(queue.routeInFailedState.has("id")).toBe(true);
|
|
141
|
-
expect(queue.ongoingRevalidations.size).toBe(0);
|
|
142
|
-
expect(queue.service.fetch).toHaveBeenCalledTimes(1);
|
|
143
|
-
await queue.revalidate(createMessage("id"));
|
|
144
|
-
expect(queue.routeInFailedState.size).toBe(1);
|
|
145
|
-
expect(queue.service.fetch).toHaveBeenCalledTimes(1);
|
|
146
|
-
});
|
|
147
|
-
});
|
|
148
|
-
describe("addAlarm", () => {
|
|
149
|
-
const getStorage = (queue) => {
|
|
150
|
-
// @ts-expect-error - ctx is a protected field
|
|
151
|
-
return queue.ctx.storage;
|
|
152
|
-
};
|
|
153
|
-
it("should not add an alarm if there are no failed states", async () => {
|
|
154
|
-
const queue = createDurableObjectQueue({ fetchDuration: 10 });
|
|
155
|
-
await queue.addAlarm();
|
|
156
|
-
expect(getStorage(queue).setAlarm).not.toHaveBeenCalled();
|
|
157
|
-
});
|
|
158
|
-
it("should add an alarm if there are failed states", async () => {
|
|
159
|
-
const queue = createDurableObjectQueue({ fetchDuration: 10 });
|
|
160
|
-
const nextAlarmMs = Date.now() + 1000;
|
|
161
|
-
queue.routeInFailedState.set("id", { msg: createMessage("id"), retryCount: 0, nextAlarmMs });
|
|
162
|
-
await queue.addAlarm();
|
|
163
|
-
expect(getStorage(queue).setAlarm).toHaveBeenCalledWith(nextAlarmMs);
|
|
164
|
-
});
|
|
165
|
-
it("should not add an alarm if there is already an alarm set", async () => {
|
|
166
|
-
const queue = createDurableObjectQueue({ fetchDuration: 10 });
|
|
167
|
-
queue.routeInFailedState.set("id", { msg: createMessage("id"), retryCount: 0, nextAlarmMs: 1000 });
|
|
168
|
-
// @ts-expect-error
|
|
169
|
-
queue.ctx.storage.getAlarm.mockResolvedValueOnce(1000);
|
|
170
|
-
await queue.addAlarm();
|
|
171
|
-
expect(getStorage(queue).setAlarm).not.toHaveBeenCalled();
|
|
172
|
-
});
|
|
173
|
-
it("should set the alarm to the lowest nextAlarm", async () => {
|
|
174
|
-
const queue = createDurableObjectQueue({ fetchDuration: 10 });
|
|
175
|
-
const nextAlarmMs = Date.now() + 1000;
|
|
176
|
-
const firstAlarm = Date.now() + 500;
|
|
177
|
-
queue.routeInFailedState.set("id", { msg: createMessage("id"), retryCount: 0, nextAlarmMs });
|
|
178
|
-
queue.routeInFailedState.set("id2", {
|
|
179
|
-
msg: createMessage("id2"),
|
|
180
|
-
retryCount: 0,
|
|
181
|
-
nextAlarmMs: firstAlarm,
|
|
182
|
-
});
|
|
183
|
-
await queue.addAlarm();
|
|
184
|
-
expect(getStorage(queue).setAlarm).toHaveBeenCalledWith(firstAlarm);
|
|
185
|
-
});
|
|
186
|
-
});
|
|
187
|
-
describe("addToFailedState", () => {
|
|
188
|
-
it("should add a failed state", async () => {
|
|
189
|
-
const queue = createDurableObjectQueue({ fetchDuration: 10 });
|
|
190
|
-
await queue.addToFailedState(createMessage("id"));
|
|
191
|
-
expect(queue.routeInFailedState.size).toBe(1);
|
|
192
|
-
expect(queue.routeInFailedState.has("id")).toBe(true);
|
|
193
|
-
expect(queue.routeInFailedState.get("id")?.retryCount).toBe(1);
|
|
194
|
-
});
|
|
195
|
-
it("should add a failed state with the correct nextAlarm", async () => {
|
|
196
|
-
const queue = createDurableObjectQueue({ fetchDuration: 10 });
|
|
197
|
-
await queue.addToFailedState(createMessage("id"));
|
|
198
|
-
expect(queue.routeInFailedState.get("id")?.nextAlarmMs).toBeGreaterThan(Date.now());
|
|
199
|
-
expect(queue.routeInFailedState.get("id")?.retryCount).toBe(1);
|
|
200
|
-
});
|
|
201
|
-
it("should add a failed state with the correct nextAlarm for a retry", async () => {
|
|
202
|
-
const queue = createDurableObjectQueue({ fetchDuration: 10 });
|
|
203
|
-
await queue.addToFailedState(createMessage("id"));
|
|
204
|
-
await queue.addToFailedState(createMessage("id"));
|
|
205
|
-
expect(queue.routeInFailedState.get("id")?.nextAlarmMs).toBeGreaterThan(Date.now());
|
|
206
|
-
expect(queue.routeInFailedState.get("id")?.retryCount).toBe(2);
|
|
207
|
-
});
|
|
208
|
-
it("should not add a failed state if it has been retried 6 times", async () => {
|
|
209
|
-
const queue = createDurableObjectQueue({ fetchDuration: 10 });
|
|
210
|
-
queue.routeInFailedState.set("id", { msg: createMessage("id"), retryCount: 6, nextAlarmMs: 1000 });
|
|
211
|
-
await queue.addToFailedState(createMessage("id"));
|
|
212
|
-
expect(queue.routeInFailedState.size).toBe(0);
|
|
213
|
-
});
|
|
214
|
-
});
|
|
215
|
-
describe("alarm", () => {
|
|
216
|
-
it("should execute revalidations for expired events", async () => {
|
|
217
|
-
const queue = createDurableObjectQueue({ fetchDuration: 10 });
|
|
218
|
-
queue.routeInFailedState.set("id", {
|
|
219
|
-
msg: createMessage("id"),
|
|
220
|
-
retryCount: 0,
|
|
221
|
-
nextAlarmMs: Date.now() - 1000,
|
|
222
|
-
});
|
|
223
|
-
queue.routeInFailedState.set("id2", {
|
|
224
|
-
msg: createMessage("id2"),
|
|
225
|
-
retryCount: 0,
|
|
226
|
-
nextAlarmMs: Date.now() - 1000,
|
|
227
|
-
});
|
|
228
|
-
await queue.alarm();
|
|
229
|
-
expect(queue.routeInFailedState.size).toBe(0);
|
|
230
|
-
expect(queue.service.fetch).toHaveBeenCalledTimes(2);
|
|
231
|
-
});
|
|
232
|
-
it("should execute revalidations for the next event to retry", async () => {
|
|
233
|
-
const queue = createDurableObjectQueue({ fetchDuration: 10 });
|
|
234
|
-
queue.routeInFailedState.set("id", {
|
|
235
|
-
msg: createMessage("id"),
|
|
236
|
-
retryCount: 0,
|
|
237
|
-
nextAlarmMs: Date.now() + 1000,
|
|
238
|
-
});
|
|
239
|
-
queue.routeInFailedState.set("id2", {
|
|
240
|
-
msg: createMessage("id2"),
|
|
241
|
-
retryCount: 0,
|
|
242
|
-
nextAlarmMs: Date.now() + 500,
|
|
243
|
-
});
|
|
244
|
-
await queue.alarm();
|
|
245
|
-
expect(queue.routeInFailedState.size).toBe(1);
|
|
246
|
-
expect(queue.service.fetch).toHaveBeenCalledTimes(1);
|
|
247
|
-
expect(queue.routeInFailedState.has("id2")).toBe(false);
|
|
248
|
-
});
|
|
249
|
-
it("should execute revalidations for the next event to retry and expired events", async () => {
|
|
250
|
-
const queue = createDurableObjectQueue({ fetchDuration: 10 });
|
|
251
|
-
queue.routeInFailedState.set("id", {
|
|
252
|
-
msg: createMessage("id"),
|
|
253
|
-
retryCount: 0,
|
|
254
|
-
nextAlarmMs: Date.now() + 1000,
|
|
255
|
-
});
|
|
256
|
-
queue.routeInFailedState.set("id2", {
|
|
257
|
-
msg: createMessage("id2"),
|
|
258
|
-
retryCount: 0,
|
|
259
|
-
nextAlarmMs: Date.now() - 1000,
|
|
260
|
-
});
|
|
261
|
-
await queue.alarm();
|
|
262
|
-
expect(queue.routeInFailedState.size).toBe(0);
|
|
263
|
-
expect(queue.service.fetch).toHaveBeenCalledTimes(2);
|
|
264
|
-
});
|
|
265
|
-
});
|
|
266
|
-
describe("disableSQLite", () => {
|
|
267
|
-
it("should not initialize the sqlite storage", async () => {
|
|
268
|
-
const queue = createDurableObjectQueue({ fetchDuration: 10, disableSQLite: true });
|
|
269
|
-
expect(queue.sql.exec).not.toHaveBeenCalled();
|
|
270
|
-
});
|
|
271
|
-
it("should not write to the sqlite storage on failed state", async () => {
|
|
272
|
-
const queue = createDurableObjectQueue({ fetchDuration: 10, disableSQLite: true });
|
|
273
|
-
await queue.addToFailedState(createMessage("id"));
|
|
274
|
-
expect(queue.sql.exec).not.toHaveBeenCalled();
|
|
275
|
-
});
|
|
276
|
-
it("should not read from the sqlite storage on checkSyncTable", async () => {
|
|
277
|
-
const queue = createDurableObjectQueue({ fetchDuration: 10, disableSQLite: true });
|
|
278
|
-
queue.checkSyncTable(createMessage("id"));
|
|
279
|
-
expect(queue.sql.exec).not.toHaveBeenCalled();
|
|
280
|
-
});
|
|
281
|
-
it("should not write to sql on successful revalidation", async () => {
|
|
282
|
-
const queue = createDurableObjectQueue({ fetchDuration: 10, disableSQLite: true });
|
|
283
|
-
await queue.revalidate(createMessage("id"));
|
|
284
|
-
expect(queue.sql.exec).not.toHaveBeenCalled();
|
|
285
|
-
});
|
|
286
|
-
});
|
|
287
|
-
});
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export {};
|
|
@@ -1,37 +0,0 @@
|
|
|
1
|
-
import { describe, expect, it, vi } from "vitest";
|
|
2
|
-
import { DOShardedTagCache } from "./sharded-tag-cache";
|
|
3
|
-
vi.mock("cloudflare:workers", () => ({
|
|
4
|
-
DurableObject: class {
|
|
5
|
-
ctx;
|
|
6
|
-
env;
|
|
7
|
-
constructor(ctx, env) {
|
|
8
|
-
this.ctx = ctx;
|
|
9
|
-
this.env = env;
|
|
10
|
-
}
|
|
11
|
-
},
|
|
12
|
-
}));
|
|
13
|
-
const createDOShardedTagCache = () => {
|
|
14
|
-
const mockState = {
|
|
15
|
-
waitUntil: vi.fn(),
|
|
16
|
-
blockConcurrencyWhile: vi.fn().mockImplementation(async (fn) => fn()),
|
|
17
|
-
storage: {
|
|
18
|
-
setAlarm: vi.fn(),
|
|
19
|
-
getAlarm: vi.fn(),
|
|
20
|
-
sql: {
|
|
21
|
-
exec: vi.fn().mockImplementation(() => ({
|
|
22
|
-
one: vi.fn(),
|
|
23
|
-
})),
|
|
24
|
-
},
|
|
25
|
-
},
|
|
26
|
-
};
|
|
27
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
28
|
-
return new DOShardedTagCache(mockState, {});
|
|
29
|
-
};
|
|
30
|
-
describe("DOShardedTagCache class", () => {
|
|
31
|
-
it("should block concurrency while creating the table", async () => {
|
|
32
|
-
const cache = createDOShardedTagCache();
|
|
33
|
-
// @ts-expect-error - testing private method
|
|
34
|
-
expect(cache.ctx.blockConcurrencyWhile).toHaveBeenCalled();
|
|
35
|
-
expect(cache.sql.exec).toHaveBeenCalledWith(`CREATE TABLE IF NOT EXISTS revalidations (tag TEXT PRIMARY KEY, revalidatedAt INTEGER)`);
|
|
36
|
-
});
|
|
37
|
-
});
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export {};
|
|
@@ -1,76 +0,0 @@
|
|
|
1
|
-
import { generateMessageGroupId } from "@opennextjs/aws/core/routing/queue.js";
|
|
2
|
-
import { afterEach, beforeAll, describe, expect, it, vi } from "vitest";
|
|
3
|
-
import cache, { DEFAULT_REVALIDATION_TIMEOUT_MS } from "./memory-queue.js";
|
|
4
|
-
vi.mock("./.next/prerender-manifest.json", () => Promise.resolve({ preview: { previewModeId: "id" } }));
|
|
5
|
-
const mockServiceWorkerFetch = vi.fn();
|
|
6
|
-
vi.mock("../../cloudflare-context", () => ({
|
|
7
|
-
getCloudflareContext: () => ({
|
|
8
|
-
env: { WORKER_SELF_REFERENCE: { fetch: mockServiceWorkerFetch } },
|
|
9
|
-
}),
|
|
10
|
-
}));
|
|
11
|
-
const generateMessageBody = ({ host, url }) => ({
|
|
12
|
-
host,
|
|
13
|
-
url,
|
|
14
|
-
eTag: "etag",
|
|
15
|
-
lastModified: Date.now(),
|
|
16
|
-
});
|
|
17
|
-
describe("MemoryQueue", () => {
|
|
18
|
-
beforeAll(() => {
|
|
19
|
-
vi.useFakeTimers();
|
|
20
|
-
globalThis.internalFetch = vi.fn().mockReturnValue(new Promise((res) => setTimeout(() => res(true), 1)));
|
|
21
|
-
});
|
|
22
|
-
afterEach(() => vi.clearAllMocks());
|
|
23
|
-
it("should process revalidations for a path", async () => {
|
|
24
|
-
const firstRequest = cache.send({
|
|
25
|
-
MessageBody: generateMessageBody({ host: "test.local", url: "/test" }),
|
|
26
|
-
MessageGroupId: generateMessageGroupId("/test"),
|
|
27
|
-
MessageDeduplicationId: "/test",
|
|
28
|
-
});
|
|
29
|
-
vi.advanceTimersByTime(DEFAULT_REVALIDATION_TIMEOUT_MS);
|
|
30
|
-
await firstRequest;
|
|
31
|
-
expect(mockServiceWorkerFetch).toHaveBeenCalledTimes(1);
|
|
32
|
-
const secondRequest = cache.send({
|
|
33
|
-
MessageBody: generateMessageBody({ host: "test.local", url: "/test" }),
|
|
34
|
-
MessageGroupId: generateMessageGroupId("/test"),
|
|
35
|
-
MessageDeduplicationId: "/test",
|
|
36
|
-
});
|
|
37
|
-
vi.advanceTimersByTime(1);
|
|
38
|
-
await secondRequest;
|
|
39
|
-
expect(mockServiceWorkerFetch).toHaveBeenCalledTimes(2);
|
|
40
|
-
});
|
|
41
|
-
it("should process revalidations for multiple paths", async () => {
|
|
42
|
-
const firstRequest = cache.send({
|
|
43
|
-
MessageBody: generateMessageBody({ host: "test.local", url: "/test" }),
|
|
44
|
-
MessageGroupId: generateMessageGroupId("/test"),
|
|
45
|
-
MessageDeduplicationId: "/test",
|
|
46
|
-
});
|
|
47
|
-
vi.advanceTimersByTime(1);
|
|
48
|
-
await firstRequest;
|
|
49
|
-
expect(mockServiceWorkerFetch).toHaveBeenCalledTimes(1);
|
|
50
|
-
const secondRequest = cache.send({
|
|
51
|
-
MessageBody: generateMessageBody({ host: "test.local", url: "/test" }),
|
|
52
|
-
MessageGroupId: generateMessageGroupId("/other"),
|
|
53
|
-
MessageDeduplicationId: "/other",
|
|
54
|
-
});
|
|
55
|
-
vi.advanceTimersByTime(1);
|
|
56
|
-
await secondRequest;
|
|
57
|
-
expect(mockServiceWorkerFetch).toHaveBeenCalledTimes(2);
|
|
58
|
-
});
|
|
59
|
-
it("should de-dupe revalidations", async () => {
|
|
60
|
-
const requests = [
|
|
61
|
-
cache.send({
|
|
62
|
-
MessageBody: generateMessageBody({ host: "test.local", url: "/test" }),
|
|
63
|
-
MessageGroupId: generateMessageGroupId("/test"),
|
|
64
|
-
MessageDeduplicationId: "/test",
|
|
65
|
-
}),
|
|
66
|
-
cache.send({
|
|
67
|
-
MessageBody: generateMessageBody({ host: "test.local", url: "/test" }),
|
|
68
|
-
MessageGroupId: generateMessageGroupId("/test"),
|
|
69
|
-
MessageDeduplicationId: "/test",
|
|
70
|
-
}),
|
|
71
|
-
];
|
|
72
|
-
vi.advanceTimersByTime(1);
|
|
73
|
-
await Promise.all(requests);
|
|
74
|
-
expect(mockServiceWorkerFetch).toHaveBeenCalledTimes(1);
|
|
75
|
-
});
|
|
76
|
-
});
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export {};
|
|
@@ -1,92 +0,0 @@
|
|
|
1
|
-
import { afterEach, beforeEach, describe, expect, test, vi } from "vitest";
|
|
2
|
-
import queueCache from "./queue-cache";
|
|
3
|
-
const mockedQueue = {
|
|
4
|
-
name: "mocked-queue",
|
|
5
|
-
send: vi.fn(),
|
|
6
|
-
};
|
|
7
|
-
const generateMessage = () => ({
|
|
8
|
-
MessageGroupId: "test",
|
|
9
|
-
MessageBody: {
|
|
10
|
-
eTag: "test",
|
|
11
|
-
url: "test",
|
|
12
|
-
host: "test",
|
|
13
|
-
lastModified: Date.now(),
|
|
14
|
-
},
|
|
15
|
-
MessageDeduplicationId: "test",
|
|
16
|
-
});
|
|
17
|
-
const mockedPut = vi.fn();
|
|
18
|
-
const mockedMatch = vi.fn().mockReturnValue(null);
|
|
19
|
-
describe("queue-cache", () => {
|
|
20
|
-
beforeEach(() => {
|
|
21
|
-
// @ts-ignore
|
|
22
|
-
globalThis.caches = {
|
|
23
|
-
open: vi.fn().mockReturnValue({
|
|
24
|
-
put: mockedPut,
|
|
25
|
-
match: mockedMatch,
|
|
26
|
-
}),
|
|
27
|
-
};
|
|
28
|
-
});
|
|
29
|
-
afterEach(() => {
|
|
30
|
-
vi.resetAllMocks();
|
|
31
|
-
});
|
|
32
|
-
test("should send the message to the original queue", async () => {
|
|
33
|
-
const msg = generateMessage();
|
|
34
|
-
const queue = queueCache(mockedQueue, {});
|
|
35
|
-
expect(queue.name).toBe("cached-mocked-queue");
|
|
36
|
-
await queue.send(msg);
|
|
37
|
-
expect(mockedQueue.send).toHaveBeenCalledWith(msg);
|
|
38
|
-
});
|
|
39
|
-
test("should use the local cache", async () => {
|
|
40
|
-
const msg = generateMessage();
|
|
41
|
-
const queue = queueCache(mockedQueue, {});
|
|
42
|
-
await queue.send(msg);
|
|
43
|
-
expect(queue.localCache.size).toBe(1);
|
|
44
|
-
expect(queue.localCache.has(`queue/test/test`)).toBe(true);
|
|
45
|
-
expect(mockedPut).toHaveBeenCalled();
|
|
46
|
-
const spiedHas = vi.spyOn(queue.localCache, "has");
|
|
47
|
-
await queue.send(msg);
|
|
48
|
-
expect(spiedHas).toHaveBeenCalled();
|
|
49
|
-
expect(mockedQueue.send).toHaveBeenCalledTimes(1);
|
|
50
|
-
expect(mockedMatch).toHaveBeenCalledTimes(1);
|
|
51
|
-
});
|
|
52
|
-
test("should clear the local cache after 5s", async () => {
|
|
53
|
-
vi.useFakeTimers();
|
|
54
|
-
const msg = generateMessage();
|
|
55
|
-
const queue = queueCache(mockedQueue, {});
|
|
56
|
-
await queue.send(msg);
|
|
57
|
-
expect(queue.localCache.size).toBe(1);
|
|
58
|
-
expect(queue.localCache.has(`queue/test/test`)).toBe(true);
|
|
59
|
-
vi.advanceTimersByTime(5001);
|
|
60
|
-
const alteredMsg = generateMessage();
|
|
61
|
-
alteredMsg.MessageGroupId = "test2";
|
|
62
|
-
await queue.send(alteredMsg);
|
|
63
|
-
expect(queue.localCache.size).toBe(1);
|
|
64
|
-
console.log(queue.localCache);
|
|
65
|
-
expect(queue.localCache.has(`queue/test2/test`)).toBe(true);
|
|
66
|
-
expect(queue.localCache.has(`queue/test/test`)).toBe(false);
|
|
67
|
-
vi.useRealTimers();
|
|
68
|
-
});
|
|
69
|
-
test("should use the regional cache if not in local cache", async () => {
|
|
70
|
-
const msg = generateMessage();
|
|
71
|
-
const queue = queueCache(mockedQueue, {});
|
|
72
|
-
await queue.send(msg);
|
|
73
|
-
expect(mockedMatch).toHaveBeenCalledTimes(1);
|
|
74
|
-
expect(mockedPut).toHaveBeenCalledTimes(1);
|
|
75
|
-
expect(queue.localCache.size).toBe(1);
|
|
76
|
-
expect(queue.localCache.has(`queue/test/test`)).toBe(true);
|
|
77
|
-
// We need to delete the local cache to test the regional cache
|
|
78
|
-
queue.localCache.delete(`queue/test/test`);
|
|
79
|
-
const spiedHas = vi.spyOn(queue.localCache, "has");
|
|
80
|
-
await queue.send(msg);
|
|
81
|
-
expect(spiedHas).toHaveBeenCalled();
|
|
82
|
-
expect(mockedMatch).toHaveBeenCalledTimes(2);
|
|
83
|
-
});
|
|
84
|
-
test("should return early if the message is in the regional cache", async () => {
|
|
85
|
-
const msg = generateMessage();
|
|
86
|
-
const queue = queueCache(mockedQueue, {});
|
|
87
|
-
mockedMatch.mockReturnValueOnce(new Response(null, { status: 200 }));
|
|
88
|
-
const spiedSend = mockedQueue.send;
|
|
89
|
-
await queue.send(msg);
|
|
90
|
-
expect(spiedSend).not.toHaveBeenCalled();
|
|
91
|
-
});
|
|
92
|
-
});
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export {};
|