@opennextjs/cloudflare 0.5.11 → 0.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +8 -8
- package/dist/api/cloudflare-context.d.ts +16 -5
- package/dist/api/config.d.ts +16 -43
- package/dist/api/config.js +21 -19
- package/dist/api/durable-objects/queue.d.ts +32 -0
- package/dist/api/durable-objects/queue.js +234 -0
- package/dist/api/durable-objects/queue.spec.js +290 -0
- package/dist/api/durable-objects/sharded-tag-cache.d.ts +7 -0
- package/dist/api/durable-objects/sharded-tag-cache.js +22 -0
- package/dist/api/durable-objects/sharded-tag-cache.spec.js +37 -0
- package/dist/api/overrides/incremental-cache/internal.d.ts +5 -0
- package/dist/api/{kv-cache.d.ts → overrides/incremental-cache/kv-incremental-cache.d.ts} +1 -1
- package/dist/api/{kv-cache.js → overrides/incremental-cache/kv-incremental-cache.js} +5 -5
- package/dist/api/overrides/incremental-cache/r2-incremental-cache.d.ts +17 -0
- package/dist/api/overrides/incremental-cache/r2-incremental-cache.js +61 -0
- package/dist/api/overrides/incremental-cache/regional-cache.d.ts +51 -0
- package/dist/api/overrides/incremental-cache/regional-cache.js +111 -0
- package/dist/api/overrides/queue/do-queue.d.ts +6 -0
- package/dist/api/overrides/queue/do-queue.js +15 -0
- package/dist/api/{memory-queue.d.ts → overrides/queue/memory-queue.d.ts} +3 -3
- package/dist/api/{memory-queue.js → overrides/queue/memory-queue.js} +18 -14
- package/dist/api/overrides/queue/memory-queue.spec.d.ts +1 -0
- package/dist/api/{memory-queue.spec.js → overrides/queue/memory-queue.spec.js} +20 -14
- package/dist/api/overrides/tag-cache/d1-next-tag-cache.d.ts +13 -0
- package/dist/api/overrides/tag-cache/d1-next-tag-cache.js +61 -0
- package/dist/api/{d1-tag-cache.d.ts → overrides/tag-cache/d1-tag-cache.d.ts} +3 -5
- package/dist/api/{d1-tag-cache.js → overrides/tag-cache/d1-tag-cache.js} +22 -29
- package/dist/api/overrides/tag-cache/do-sharded-tag-cache.d.ts +122 -0
- package/dist/api/overrides/tag-cache/do-sharded-tag-cache.js +247 -0
- package/dist/api/overrides/tag-cache/do-sharded-tag-cache.spec.d.ts +1 -0
- package/dist/api/overrides/tag-cache/do-sharded-tag-cache.spec.js +322 -0
- package/dist/cli/args.d.ts +13 -2
- package/dist/cli/args.js +44 -29
- package/dist/cli/build/build.d.ts +5 -1
- package/dist/cli/build/build.js +9 -19
- package/dist/cli/build/bundle-server.js +5 -13
- package/dist/cli/build/open-next/compile-cache-assets-manifest.d.ts +1 -1
- package/dist/cli/build/open-next/compile-cache-assets-manifest.js +4 -6
- package/dist/cli/build/open-next/compileDurableObjects.d.ts +2 -0
- package/dist/cli/build/open-next/compileDurableObjects.js +30 -0
- package/dist/cli/build/open-next/copyCacheAssets.js +1 -1
- package/dist/cli/build/open-next/createServerBundle.d.ts +9 -1
- package/dist/cli/build/open-next/createServerBundle.js +28 -9
- package/dist/cli/build/patches/ast/patch-vercel-og-library.js +1 -1
- package/dist/cli/build/patches/ast/vercel-og.d.ts +5 -5
- package/dist/cli/build/patches/ast/vercel-og.js +1 -1
- package/dist/cli/build/patches/ast/vercel-og.spec.js +1 -1
- package/dist/cli/build/patches/ast/webpack-runtime.js +1 -1
- package/dist/cli/build/patches/ast/webpack-runtime.spec.js +1 -1
- package/dist/cli/build/patches/plugins/build-id.d.ts +2 -2
- package/dist/cli/build/patches/plugins/build-id.js +12 -5
- package/dist/cli/build/patches/plugins/build-id.spec.js +1 -1
- package/dist/cli/build/patches/plugins/dynamic-requires.d.ts +1 -2
- package/dist/cli/build/patches/plugins/dynamic-requires.js +21 -11
- package/dist/cli/build/patches/plugins/eval-manifest.d.ts +2 -2
- package/dist/cli/build/patches/plugins/eval-manifest.js +12 -5
- package/dist/cli/build/patches/plugins/find-dir.d.ts +2 -2
- package/dist/cli/build/patches/plugins/find-dir.js +10 -5
- package/dist/cli/build/patches/plugins/instrumentation.d.ts +2 -5
- package/dist/cli/build/patches/plugins/instrumentation.js +19 -3
- package/dist/cli/build/patches/plugins/instrumentation.spec.js +1 -1
- package/dist/cli/build/patches/plugins/load-manifest.d.ts +2 -2
- package/dist/cli/build/patches/plugins/load-manifest.js +12 -5
- package/dist/cli/build/patches/plugins/next-minimal.d.ts +4 -7
- package/dist/cli/build/patches/plugins/next-minimal.js +31 -15
- package/dist/cli/build/patches/plugins/next-minimal.spec.js +1 -1
- package/dist/cli/build/patches/plugins/patch-depd-deprecations.d.ts +2 -2
- package/dist/cli/build/patches/plugins/patch-depd-deprecations.js +10 -2
- package/dist/cli/build/patches/plugins/patch-depd-deprecations.spec.js +1 -1
- package/dist/cli/build/patches/plugins/require.d.ts +2 -2
- package/dist/cli/build/patches/plugins/require.js +43 -35
- package/dist/cli/build/patches/plugins/res-revalidate.d.ts +3 -0
- package/dist/cli/build/patches/plugins/res-revalidate.js +77 -0
- package/dist/cli/build/patches/plugins/res-revalidate.spec.d.ts +1 -0
- package/dist/cli/build/patches/plugins/res-revalidate.spec.js +141 -0
- package/dist/cli/build/utils/create-config-files.d.ts +2 -2
- package/dist/cli/build/utils/create-config-files.js +3 -3
- package/dist/cli/build/utils/ensure-cf-config.js +3 -13
- package/dist/cli/commands/deploy.d.ts +5 -0
- package/dist/cli/commands/deploy.js +9 -0
- package/dist/cli/commands/populate-cache.d.ts +7 -0
- package/dist/cli/commands/populate-cache.js +78 -0
- package/dist/cli/commands/preview.d.ts +5 -0
- package/dist/cli/commands/preview.js +9 -0
- package/dist/cli/index.js +36 -9
- package/dist/cli/project-options.d.ts +5 -1
- package/dist/cli/templates/worker.d.ts +3 -4
- package/dist/cli/templates/worker.js +30 -18
- package/dist/cli/utils/run-wrangler.d.ts +18 -0
- package/dist/cli/utils/run-wrangler.js +41 -0
- package/package.json +8 -10
- package/templates/open-next.config.ts +1 -1
- package/templates/wrangler.jsonc +2 -2
- package/dist/api/kvCache.d.ts +0 -5
- package/dist/api/kvCache.js +0 -5
- package/dist/cli/build/patches/ast/util.d.ts +0 -50
- package/dist/cli/build/patches/ast/util.js +0 -65
- package/dist/cli/build/patches/ast/util.spec.js +0 -43
- package/dist/cli/build/patches/plugins/content-updater.d.ts +0 -44
- package/dist/cli/build/patches/plugins/content-updater.js +0 -55
- package/dist/cli/build/patches/plugins/fetch-cache-wait-until.d.ts +0 -14
- package/dist/cli/build/patches/plugins/fetch-cache-wait-until.js +0 -40
- package/dist/cli/build/patches/plugins/fetch-cache-wait-until.spec.js +0 -453
- package/dist/cli/templates/shims/node-fs.d.ts +0 -17
- package/dist/cli/templates/shims/node-fs.js +0 -51
- package/dist/cli/templates/shims/throw.d.ts +0 -0
- package/dist/cli/templates/shims/throw.js +0 -2
- /package/dist/api/{memory-queue.spec.d.ts → durable-objects/queue.spec.d.ts} +0 -0
- /package/dist/{cli/build/patches/ast/util.spec.d.ts → api/durable-objects/sharded-tag-cache.spec.d.ts} +0 -0
- /package/dist/{cli/build/patches/plugins/fetch-cache-wait-until.spec.d.ts → api/overrides/incremental-cache/internal.js} +0 -0
|
@@ -0,0 +1,247 @@
|
|
|
1
|
+
import { debug, error } from "@opennextjs/aws/adapters/logger.js";
|
|
2
|
+
import { generateShardId } from "@opennextjs/aws/core/routing/queue.js";
|
|
3
|
+
import { IgnorableError } from "@opennextjs/aws/utils/error.js";
|
|
4
|
+
import { getCloudflareContext } from "../../cloudflare-context";
|
|
5
|
+
const SOFT_TAG_PREFIX = "_N_T_/";
|
|
6
|
+
export const DEFAULT_SOFT_REPLICAS = 4;
|
|
7
|
+
export const DEFAULT_HARD_REPLICAS = 2;
|
|
8
|
+
export const DEFAULT_WRITE_RETRIES = 3;
|
|
9
|
+
export const DEFAULT_NUM_SHARDS = 4;
|
|
10
|
+
export class TagCacheDOId {
|
|
11
|
+
options;
|
|
12
|
+
shardId;
|
|
13
|
+
replicaId;
|
|
14
|
+
constructor(options) {
|
|
15
|
+
this.options = options;
|
|
16
|
+
const { baseShardId, shardType, numberOfReplicas, replicaId } = options;
|
|
17
|
+
this.shardId = `tag-${shardType};${baseShardId}`;
|
|
18
|
+
this.replicaId = replicaId ?? this.generateRandomNumberBetween(1, numberOfReplicas);
|
|
19
|
+
}
|
|
20
|
+
generateRandomNumberBetween(min, max) {
|
|
21
|
+
return Math.floor(Math.random() * (max - min + 1) + min);
|
|
22
|
+
}
|
|
23
|
+
get key() {
|
|
24
|
+
return `${this.shardId};replica-${this.replicaId}`;
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
class ShardedDOTagCache {
|
|
28
|
+
opts;
|
|
29
|
+
mode = "nextMode";
|
|
30
|
+
name = "do-sharded-tag-cache";
|
|
31
|
+
numSoftReplicas;
|
|
32
|
+
numHardReplicas;
|
|
33
|
+
maxWriteRetries;
|
|
34
|
+
localCache;
|
|
35
|
+
constructor(opts = { baseShardSize: DEFAULT_NUM_SHARDS }) {
|
|
36
|
+
this.opts = opts;
|
|
37
|
+
this.numSoftReplicas = opts.shardReplicationOptions?.numberOfSoftReplicas ?? DEFAULT_SOFT_REPLICAS;
|
|
38
|
+
this.numHardReplicas = opts.shardReplicationOptions?.numberOfHardReplicas ?? DEFAULT_HARD_REPLICAS;
|
|
39
|
+
this.maxWriteRetries = opts.maxWriteRetries ?? DEFAULT_WRITE_RETRIES;
|
|
40
|
+
}
|
|
41
|
+
getDurableObjectStub(doId) {
|
|
42
|
+
const durableObject = getCloudflareContext().env.NEXT_TAG_CACHE_DO_SHARDED;
|
|
43
|
+
if (!durableObject)
|
|
44
|
+
throw new IgnorableError("No durable object binding for cache revalidation");
|
|
45
|
+
const id = durableObject.idFromName(doId.key);
|
|
46
|
+
return durableObject.get(id);
|
|
47
|
+
}
|
|
48
|
+
/**
|
|
49
|
+
* Generates a list of DO ids for the shards and replicas
|
|
50
|
+
* @param tags The tags to generate shards for
|
|
51
|
+
* @param shardType Whether to generate shards for soft or hard tags
|
|
52
|
+
* @param generateAllShards Whether to generate all shards or only one
|
|
53
|
+
* @returns An array of TagCacheDOId and tag
|
|
54
|
+
*/
|
|
55
|
+
generateDOIdArray({ tags, shardType, generateAllReplicas = false, }) {
|
|
56
|
+
let replicaIndexes = [1];
|
|
57
|
+
const isSoft = shardType === "soft";
|
|
58
|
+
let numReplicas = 1;
|
|
59
|
+
if (this.opts.enableShardReplication) {
|
|
60
|
+
numReplicas = isSoft ? this.numSoftReplicas : this.numHardReplicas;
|
|
61
|
+
replicaIndexes = generateAllReplicas
|
|
62
|
+
? Array.from({ length: numReplicas }, (_, i) => i + 1)
|
|
63
|
+
: [undefined];
|
|
64
|
+
}
|
|
65
|
+
return replicaIndexes.flatMap((replicaId) => {
|
|
66
|
+
return tags
|
|
67
|
+
.filter((tag) => (isSoft ? tag.startsWith(SOFT_TAG_PREFIX) : !tag.startsWith(SOFT_TAG_PREFIX)))
|
|
68
|
+
.map((tag) => {
|
|
69
|
+
return {
|
|
70
|
+
doId: new TagCacheDOId({
|
|
71
|
+
baseShardId: generateShardId(tag, this.opts.baseShardSize, "shard"),
|
|
72
|
+
numberOfReplicas: numReplicas,
|
|
73
|
+
shardType,
|
|
74
|
+
replicaId,
|
|
75
|
+
}),
|
|
76
|
+
tag,
|
|
77
|
+
};
|
|
78
|
+
});
|
|
79
|
+
});
|
|
80
|
+
}
|
|
81
|
+
/**
|
|
82
|
+
* Same tags are guaranteed to be in the same shard
|
|
83
|
+
* @param tags
|
|
84
|
+
* @returns An array of DO ids and tags
|
|
85
|
+
*/
|
|
86
|
+
groupTagsByDO({ tags, generateAllReplicas = false }) {
|
|
87
|
+
// Here we'll start by splitting soft tags from hard tags
|
|
88
|
+
// This will greatly increase the cache hit rate for the soft tag (which are the most likely to cause issue because of load)
|
|
89
|
+
const softTags = this.generateDOIdArray({ tags, shardType: "soft", generateAllReplicas });
|
|
90
|
+
const hardTags = this.generateDOIdArray({ tags, shardType: "hard", generateAllReplicas });
|
|
91
|
+
const tagIdCollection = [...softTags, ...hardTags];
|
|
92
|
+
// We then group the tags by DO id
|
|
93
|
+
const tagsByDOId = new Map();
|
|
94
|
+
for (const { doId, tag } of tagIdCollection) {
|
|
95
|
+
const doIdString = doId.key;
|
|
96
|
+
const tagsArray = tagsByDOId.get(doIdString)?.tags ?? [];
|
|
97
|
+
tagsArray.push(tag);
|
|
98
|
+
tagsByDOId.set(doIdString, {
|
|
99
|
+
// We override the doId here, but it should be the same for all tags
|
|
100
|
+
doId,
|
|
101
|
+
tags: tagsArray,
|
|
102
|
+
});
|
|
103
|
+
}
|
|
104
|
+
const result = Array.from(tagsByDOId.values());
|
|
105
|
+
return result;
|
|
106
|
+
}
|
|
107
|
+
async getConfig() {
|
|
108
|
+
const cfEnv = getCloudflareContext().env;
|
|
109
|
+
const db = cfEnv.NEXT_TAG_CACHE_DO_SHARDED;
|
|
110
|
+
if (!db)
|
|
111
|
+
debug("No Durable object found");
|
|
112
|
+
const isDisabled = !!globalThis.openNextConfig
|
|
113
|
+
.dangerous?.disableTagCache;
|
|
114
|
+
return !db || isDisabled
|
|
115
|
+
? { isDisabled: true }
|
|
116
|
+
: {
|
|
117
|
+
isDisabled: false,
|
|
118
|
+
db,
|
|
119
|
+
};
|
|
120
|
+
}
|
|
121
|
+
/**
|
|
122
|
+
* This function checks if the tags have been revalidated
|
|
123
|
+
* It is never supposed to throw and in case of error, it will return false
|
|
124
|
+
* @param tags
|
|
125
|
+
* @param lastModified default to `Date.now()`
|
|
126
|
+
* @returns
|
|
127
|
+
*/
|
|
128
|
+
async hasBeenRevalidated(tags, lastModified) {
|
|
129
|
+
const { isDisabled } = await this.getConfig();
|
|
130
|
+
if (isDisabled)
|
|
131
|
+
return false;
|
|
132
|
+
try {
|
|
133
|
+
const shardedTagGroups = this.groupTagsByDO({ tags });
|
|
134
|
+
const shardedTagRevalidationOutcomes = await Promise.all(shardedTagGroups.map(async ({ doId, tags }) => {
|
|
135
|
+
const cachedValue = await this.getFromRegionalCache(doId, tags);
|
|
136
|
+
if (cachedValue) {
|
|
137
|
+
return (await cachedValue.text()) === "true";
|
|
138
|
+
}
|
|
139
|
+
const stub = this.getDurableObjectStub(doId);
|
|
140
|
+
const _hasBeenRevalidated = await stub.hasBeenRevalidated(tags, lastModified);
|
|
141
|
+
//TODO: Do we want to cache the result if it has been revalidated ?
|
|
142
|
+
// If we do so, we risk causing cache MISS even though it has been revalidated elsewhere
|
|
143
|
+
// On the other hand revalidating a tag that is used in a lot of places will cause a lot of requests
|
|
144
|
+
if (!_hasBeenRevalidated) {
|
|
145
|
+
getCloudflareContext().ctx.waitUntil(this.putToRegionalCache(doId, tags, _hasBeenRevalidated));
|
|
146
|
+
}
|
|
147
|
+
return _hasBeenRevalidated;
|
|
148
|
+
}));
|
|
149
|
+
return shardedTagRevalidationOutcomes.some((result) => result);
|
|
150
|
+
}
|
|
151
|
+
catch (e) {
|
|
152
|
+
error("Error while checking revalidation", e);
|
|
153
|
+
return false;
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
/**
|
|
157
|
+
* This function writes the tags to the cache
|
|
158
|
+
* Due to the way shards and regional cache are implemented, the regional cache may not be properly invalidated
|
|
159
|
+
* @param tags
|
|
160
|
+
* @returns
|
|
161
|
+
*/
|
|
162
|
+
async writeTags(tags) {
|
|
163
|
+
const { isDisabled } = await this.getConfig();
|
|
164
|
+
if (isDisabled)
|
|
165
|
+
return;
|
|
166
|
+
const shardedTagGroups = this.groupTagsByDO({ tags, generateAllReplicas: true });
|
|
167
|
+
// We want to use the same revalidation time for all tags
|
|
168
|
+
const currentTime = Date.now();
|
|
169
|
+
await Promise.all(shardedTagGroups.map(async ({ doId, tags }) => {
|
|
170
|
+
await this.performWriteTagsWithRetry(doId, tags, currentTime);
|
|
171
|
+
}));
|
|
172
|
+
}
|
|
173
|
+
async performWriteTagsWithRetry(doId, tags, lastModified, retryNumber = 0) {
|
|
174
|
+
try {
|
|
175
|
+
const stub = this.getDurableObjectStub(doId);
|
|
176
|
+
await stub.writeTags(tags, lastModified);
|
|
177
|
+
// Depending on the shards and the tags, deleting from the regional cache will not work for every tag
|
|
178
|
+
await this.deleteRegionalCache(doId, tags);
|
|
179
|
+
}
|
|
180
|
+
catch (e) {
|
|
181
|
+
error("Error while writing tags", e);
|
|
182
|
+
if (retryNumber >= this.maxWriteRetries) {
|
|
183
|
+
error("Error while writing tags, too many retries");
|
|
184
|
+
// Do we want to throw an error here ?
|
|
185
|
+
await getCloudflareContext().env.NEXT_TAG_CACHE_DO_SHARDED_DLQ?.send({
|
|
186
|
+
failingShardId: doId.key,
|
|
187
|
+
failingTags: tags,
|
|
188
|
+
lastModified,
|
|
189
|
+
});
|
|
190
|
+
return;
|
|
191
|
+
}
|
|
192
|
+
await this.performWriteTagsWithRetry(doId, tags, lastModified, retryNumber + 1);
|
|
193
|
+
}
|
|
194
|
+
}
|
|
195
|
+
// Cache API
|
|
196
|
+
async getCacheInstance() {
|
|
197
|
+
if (!this.localCache && this.opts.regionalCache) {
|
|
198
|
+
this.localCache = await caches.open("sharded-do-tag-cache");
|
|
199
|
+
}
|
|
200
|
+
return this.localCache;
|
|
201
|
+
}
|
|
202
|
+
async getCacheKey(doId, tags) {
|
|
203
|
+
return new Request(new URL(`shard/${doId.shardId}?tags=${encodeURIComponent(tags.join(";"))}`, "http://local.cache"));
|
|
204
|
+
}
|
|
205
|
+
async getFromRegionalCache(doId, tags) {
|
|
206
|
+
try {
|
|
207
|
+
if (!this.opts.regionalCache)
|
|
208
|
+
return;
|
|
209
|
+
const cache = await this.getCacheInstance();
|
|
210
|
+
if (!cache)
|
|
211
|
+
return;
|
|
212
|
+
const key = await this.getCacheKey(doId, tags);
|
|
213
|
+
return cache.match(key);
|
|
214
|
+
}
|
|
215
|
+
catch (e) {
|
|
216
|
+
error("Error while fetching from regional cache", e);
|
|
217
|
+
return;
|
|
218
|
+
}
|
|
219
|
+
}
|
|
220
|
+
async putToRegionalCache(doId, tags, hasBeenRevalidated) {
|
|
221
|
+
if (!this.opts.regionalCache)
|
|
222
|
+
return;
|
|
223
|
+
const cache = await this.getCacheInstance();
|
|
224
|
+
if (!cache)
|
|
225
|
+
return;
|
|
226
|
+
const key = await this.getCacheKey(doId, tags);
|
|
227
|
+
await cache.put(key, new Response(`${hasBeenRevalidated}`, {
|
|
228
|
+
headers: { "cache-control": `max-age=${this.opts.regionalCacheTtlSec ?? 5}` },
|
|
229
|
+
}));
|
|
230
|
+
}
|
|
231
|
+
async deleteRegionalCache(doId, tags) {
|
|
232
|
+
// We never want to crash because of the cache
|
|
233
|
+
try {
|
|
234
|
+
if (!this.opts.regionalCache)
|
|
235
|
+
return;
|
|
236
|
+
const cache = await this.getCacheInstance();
|
|
237
|
+
if (!cache)
|
|
238
|
+
return;
|
|
239
|
+
const key = await this.getCacheKey(doId, tags);
|
|
240
|
+
await cache.delete(key);
|
|
241
|
+
}
|
|
242
|
+
catch (e) {
|
|
243
|
+
debug("Error while deleting from regional cache", e);
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
}
|
|
247
|
+
export default (opts) => new ShardedDOTagCache(opts);
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,322 @@
|
|
|
1
|
+
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
|
|
2
|
+
import doShardedTagCache, { DEFAULT_HARD_REPLICAS, DEFAULT_SOFT_REPLICAS, TagCacheDOId, } from "./do-sharded-tag-cache";
|
|
3
|
+
const hasBeenRevalidatedMock = vi.fn();
|
|
4
|
+
const writeTagsMock = vi.fn();
|
|
5
|
+
const idFromNameMock = vi.fn();
|
|
6
|
+
const getMock = vi
|
|
7
|
+
.fn()
|
|
8
|
+
.mockReturnValue({ hasBeenRevalidated: hasBeenRevalidatedMock, writeTags: writeTagsMock });
|
|
9
|
+
const waitUntilMock = vi.fn().mockImplementation(async (fn) => fn());
|
|
10
|
+
const sendDLQMock = vi.fn();
|
|
11
|
+
vi.mock("../../cloudflare-context", () => ({
|
|
12
|
+
getCloudflareContext: () => ({
|
|
13
|
+
env: {
|
|
14
|
+
NEXT_TAG_CACHE_DO_SHARDED: { idFromName: idFromNameMock, get: getMock },
|
|
15
|
+
NEXT_TAG_CACHE_DO_SHARDED_DLQ: {
|
|
16
|
+
send: sendDLQMock,
|
|
17
|
+
},
|
|
18
|
+
},
|
|
19
|
+
ctx: { waitUntil: waitUntilMock },
|
|
20
|
+
}),
|
|
21
|
+
}));
|
|
22
|
+
describe("DOShardedTagCache", () => {
|
|
23
|
+
afterEach(() => vi.clearAllMocks());
|
|
24
|
+
describe("generateShardId", () => {
|
|
25
|
+
it("should generate a shardId", () => {
|
|
26
|
+
const cache = doShardedTagCache();
|
|
27
|
+
const expectedResult = [
|
|
28
|
+
{ doId: expect.objectContaining({ shardId: "tag-hard;shard-1" }), tags: ["tag1"] },
|
|
29
|
+
{ doId: expect.objectContaining({ shardId: "tag-hard;shard-2" }), tags: ["tag2"] },
|
|
30
|
+
];
|
|
31
|
+
const result = cache.groupTagsByDO({ tags: ["tag1", "tag2"] });
|
|
32
|
+
expect(result).toEqual(expectedResult);
|
|
33
|
+
expect(result[0]?.doId.key).toBe("tag-hard;shard-1;replica-1");
|
|
34
|
+
expect(result[1]?.doId.key).toBe("tag-hard;shard-2;replica-1");
|
|
35
|
+
});
|
|
36
|
+
it("should group tags by shard", () => {
|
|
37
|
+
const cache = doShardedTagCache();
|
|
38
|
+
const expectedResult = [
|
|
39
|
+
{ doId: expect.objectContaining({ shardId: "tag-hard;shard-1" }), tags: ["tag1", "tag6"] },
|
|
40
|
+
];
|
|
41
|
+
const result = cache.groupTagsByDO({ tags: ["tag1", "tag6"] });
|
|
42
|
+
expect(result).toEqual(expectedResult);
|
|
43
|
+
expect(result[0]?.doId.key).toBe("tag-hard;shard-1;replica-1");
|
|
44
|
+
});
|
|
45
|
+
it("should generate the same shardId for the same tag", () => {
|
|
46
|
+
const cache = doShardedTagCache();
|
|
47
|
+
const firstResult = cache.groupTagsByDO({ tags: ["tag1"] });
|
|
48
|
+
const secondResult = cache.groupTagsByDO({ tags: ["tag1", "tag3", "tag4"] });
|
|
49
|
+
expect(firstResult[0]).toEqual(secondResult[0]);
|
|
50
|
+
});
|
|
51
|
+
it("should split hard and soft tags", () => {
|
|
52
|
+
const cache = doShardedTagCache();
|
|
53
|
+
const expectedResult = [
|
|
54
|
+
{ doId: expect.objectContaining({ shardId: "tag-soft;shard-3" }), tags: ["_N_T_/tag1"] },
|
|
55
|
+
{ doId: expect.objectContaining({ shardId: "tag-hard;shard-1", replicaId: 1 }), tags: ["tag1"] },
|
|
56
|
+
];
|
|
57
|
+
const result = cache.groupTagsByDO({ tags: ["tag1", "_N_T_/tag1"] });
|
|
58
|
+
expect(result).toEqual(expectedResult);
|
|
59
|
+
expect(result[1]?.doId.key).toBe("tag-hard;shard-1;replica-1");
|
|
60
|
+
expect(result[0]?.doId.key).toBe("tag-soft;shard-3;replica-1");
|
|
61
|
+
});
|
|
62
|
+
describe("with shard replication", () => {
|
|
63
|
+
it("should generate all doIds if generateAllReplicas is true", () => {
|
|
64
|
+
const cache = doShardedTagCache({ baseShardSize: 4, enableShardReplication: true });
|
|
65
|
+
const expectedResult = [
|
|
66
|
+
{ doId: expect.objectContaining({ shardId: "tag-soft;shard-3" }), tags: ["_N_T_/tag1"] },
|
|
67
|
+
{ doId: expect.objectContaining({ shardId: "tag-soft;shard-3" }), tags: ["_N_T_/tag1"] },
|
|
68
|
+
{ doId: expect.objectContaining({ shardId: "tag-soft;shard-3" }), tags: ["_N_T_/tag1"] },
|
|
69
|
+
{ doId: expect.objectContaining({ shardId: "tag-soft;shard-3" }), tags: ["_N_T_/tag1"] },
|
|
70
|
+
{ doId: expect.objectContaining({ shardId: "tag-hard;shard-1" }), tags: ["tag1"] },
|
|
71
|
+
{ doId: expect.objectContaining({ shardId: "tag-hard;shard-1" }), tags: ["tag1"] },
|
|
72
|
+
];
|
|
73
|
+
const result = cache.groupTagsByDO({ tags: ["tag1", "_N_T_/tag1"], generateAllReplicas: true });
|
|
74
|
+
expect(result).toEqual(expectedResult);
|
|
75
|
+
});
|
|
76
|
+
it("should generate only one doIds by tag type if generateAllReplicas is false", () => {
|
|
77
|
+
const cache = doShardedTagCache({ baseShardSize: 4, enableShardReplication: true });
|
|
78
|
+
const shardedTagCollection = cache.groupTagsByDO({
|
|
79
|
+
tags: ["tag1", "_N_T_/tag1"],
|
|
80
|
+
generateAllReplicas: false,
|
|
81
|
+
});
|
|
82
|
+
expect(shardedTagCollection.length).toBe(2);
|
|
83
|
+
const firstDOId = shardedTagCollection[0]?.doId;
|
|
84
|
+
const secondDOId = shardedTagCollection[1]?.doId;
|
|
85
|
+
expect(firstDOId?.shardId).toBe("tag-soft;shard-3");
|
|
86
|
+
expect(secondDOId?.shardId).toBe("tag-hard;shard-1");
|
|
87
|
+
// We still need to check if the last part is between the correct boundaries
|
|
88
|
+
expect(firstDOId?.replicaId).toBeGreaterThanOrEqual(1);
|
|
89
|
+
expect(firstDOId?.replicaId).toBeLessThanOrEqual(DEFAULT_SOFT_REPLICAS);
|
|
90
|
+
expect(secondDOId?.replicaId).toBeGreaterThanOrEqual(1);
|
|
91
|
+
expect(secondDOId?.replicaId).toBeLessThanOrEqual(DEFAULT_HARD_REPLICAS);
|
|
92
|
+
});
|
|
93
|
+
});
|
|
94
|
+
});
|
|
95
|
+
describe("hasBeenRevalidated", () => {
|
|
96
|
+
beforeEach(() => {
|
|
97
|
+
globalThis.openNextConfig = {
|
|
98
|
+
dangerous: { disableTagCache: false },
|
|
99
|
+
};
|
|
100
|
+
});
|
|
101
|
+
it("should return false if the cache is disabled", async () => {
|
|
102
|
+
globalThis.openNextConfig = {
|
|
103
|
+
dangerous: { disableTagCache: true },
|
|
104
|
+
};
|
|
105
|
+
const cache = doShardedTagCache();
|
|
106
|
+
const result = await cache.hasBeenRevalidated(["tag1"]);
|
|
107
|
+
expect(result).toBe(false);
|
|
108
|
+
expect(idFromNameMock).not.toHaveBeenCalled();
|
|
109
|
+
});
|
|
110
|
+
it("should return false if stub return false", async () => {
|
|
111
|
+
const cache = doShardedTagCache();
|
|
112
|
+
cache.getFromRegionalCache = vi.fn();
|
|
113
|
+
hasBeenRevalidatedMock.mockImplementationOnce(() => false);
|
|
114
|
+
const result = await cache.hasBeenRevalidated(["tag1"], 123456);
|
|
115
|
+
expect(cache.getFromRegionalCache).toHaveBeenCalled();
|
|
116
|
+
expect(idFromNameMock).toHaveBeenCalled();
|
|
117
|
+
expect(hasBeenRevalidatedMock).toHaveBeenCalled();
|
|
118
|
+
expect(result).toBe(false);
|
|
119
|
+
});
|
|
120
|
+
it("should return true if stub return true", async () => {
|
|
121
|
+
const cache = doShardedTagCache();
|
|
122
|
+
cache.getFromRegionalCache = vi.fn();
|
|
123
|
+
hasBeenRevalidatedMock.mockImplementationOnce(() => true);
|
|
124
|
+
const result = await cache.hasBeenRevalidated(["tag1"], 123456);
|
|
125
|
+
expect(cache.getFromRegionalCache).toHaveBeenCalled();
|
|
126
|
+
expect(idFromNameMock).toHaveBeenCalled();
|
|
127
|
+
expect(hasBeenRevalidatedMock).toHaveBeenCalledWith(["tag1"], 123456);
|
|
128
|
+
expect(result).toBe(true);
|
|
129
|
+
});
|
|
130
|
+
it("should return false if it throws", async () => {
|
|
131
|
+
const cache = doShardedTagCache();
|
|
132
|
+
cache.getFromRegionalCache = vi.fn();
|
|
133
|
+
hasBeenRevalidatedMock.mockImplementationOnce(() => {
|
|
134
|
+
throw new Error("error");
|
|
135
|
+
});
|
|
136
|
+
const result = await cache.hasBeenRevalidated(["tag1"], 123456);
|
|
137
|
+
expect(cache.getFromRegionalCache).toHaveBeenCalled();
|
|
138
|
+
expect(idFromNameMock).toHaveBeenCalled();
|
|
139
|
+
expect(hasBeenRevalidatedMock).toHaveBeenCalled();
|
|
140
|
+
expect(result).toBe(false);
|
|
141
|
+
});
|
|
142
|
+
it("Should return from the cache if it was found there", async () => {
|
|
143
|
+
const cache = doShardedTagCache();
|
|
144
|
+
cache.getFromRegionalCache = vi.fn().mockReturnValueOnce(new Response("true"));
|
|
145
|
+
const result = await cache.hasBeenRevalidated(["tag1"], 123456);
|
|
146
|
+
expect(result).toBe(true);
|
|
147
|
+
expect(idFromNameMock).not.toHaveBeenCalled();
|
|
148
|
+
expect(hasBeenRevalidatedMock).not.toHaveBeenCalled();
|
|
149
|
+
});
|
|
150
|
+
it("should try to put the result in the cache if it was not revalidated", async () => {
|
|
151
|
+
const cache = doShardedTagCache();
|
|
152
|
+
cache.getFromRegionalCache = vi.fn();
|
|
153
|
+
cache.putToRegionalCache = vi.fn();
|
|
154
|
+
hasBeenRevalidatedMock.mockImplementationOnce(() => false);
|
|
155
|
+
const result = await cache.hasBeenRevalidated(["tag1"], 123456);
|
|
156
|
+
expect(result).toBe(false);
|
|
157
|
+
expect(waitUntilMock).toHaveBeenCalled();
|
|
158
|
+
expect(cache.putToRegionalCache).toHaveBeenCalled();
|
|
159
|
+
});
|
|
160
|
+
it("should call all the durable object instance", async () => {
|
|
161
|
+
const cache = doShardedTagCache();
|
|
162
|
+
cache.getFromRegionalCache = vi.fn();
|
|
163
|
+
const result = await cache.hasBeenRevalidated(["tag1", "tag2"], 123456);
|
|
164
|
+
expect(result).toBe(false);
|
|
165
|
+
expect(idFromNameMock).toHaveBeenCalledTimes(2);
|
|
166
|
+
expect(hasBeenRevalidatedMock).toHaveBeenCalledTimes(2);
|
|
167
|
+
});
|
|
168
|
+
});
|
|
169
|
+
describe("writeTags", () => {
|
|
170
|
+
beforeEach(() => {
|
|
171
|
+
globalThis.openNextConfig = {
|
|
172
|
+
dangerous: { disableTagCache: false },
|
|
173
|
+
};
|
|
174
|
+
vi.useFakeTimers();
|
|
175
|
+
vi.setSystemTime(1000);
|
|
176
|
+
});
|
|
177
|
+
afterEach(() => {
|
|
178
|
+
vi.useRealTimers();
|
|
179
|
+
});
|
|
180
|
+
it("should return early if the cache is disabled", async () => {
|
|
181
|
+
globalThis.openNextConfig = {
|
|
182
|
+
dangerous: { disableTagCache: true },
|
|
183
|
+
};
|
|
184
|
+
const cache = doShardedTagCache();
|
|
185
|
+
await cache.writeTags(["tag1"]);
|
|
186
|
+
expect(idFromNameMock).not.toHaveBeenCalled();
|
|
187
|
+
expect(writeTagsMock).not.toHaveBeenCalled();
|
|
188
|
+
});
|
|
189
|
+
it("should write the tags to the cache", async () => {
|
|
190
|
+
const cache = doShardedTagCache();
|
|
191
|
+
await cache.writeTags(["tag1"]);
|
|
192
|
+
expect(idFromNameMock).toHaveBeenCalled();
|
|
193
|
+
expect(writeTagsMock).toHaveBeenCalled();
|
|
194
|
+
expect(writeTagsMock).toHaveBeenCalledWith(["tag1"], 1000);
|
|
195
|
+
});
|
|
196
|
+
it("should write the tags to the cache for multiple shards", async () => {
|
|
197
|
+
const cache = doShardedTagCache();
|
|
198
|
+
await cache.writeTags(["tag1", "tag2"]);
|
|
199
|
+
expect(idFromNameMock).toHaveBeenCalledTimes(2);
|
|
200
|
+
expect(writeTagsMock).toHaveBeenCalledTimes(2);
|
|
201
|
+
expect(writeTagsMock).toHaveBeenCalledWith(["tag1"], 1000);
|
|
202
|
+
expect(writeTagsMock).toHaveBeenCalledWith(["tag2"], 1000);
|
|
203
|
+
});
|
|
204
|
+
it('should write to all the replicated shards if "generateAllReplicas" is true', async () => {
|
|
205
|
+
const cache = doShardedTagCache({ baseShardSize: 4, enableShardReplication: true });
|
|
206
|
+
await cache.writeTags(["tag1", "_N_T_/tag1"]);
|
|
207
|
+
expect(idFromNameMock).toHaveBeenCalledTimes(6);
|
|
208
|
+
expect(writeTagsMock).toHaveBeenCalledTimes(6);
|
|
209
|
+
expect(writeTagsMock).toHaveBeenCalledWith(["tag1"], 1000);
|
|
210
|
+
expect(writeTagsMock).toHaveBeenCalledWith(["_N_T_/tag1"], 1000);
|
|
211
|
+
});
|
|
212
|
+
it("should call deleteRegionalCache", async () => {
|
|
213
|
+
const cache = doShardedTagCache();
|
|
214
|
+
cache.deleteRegionalCache = vi.fn();
|
|
215
|
+
await cache.writeTags(["tag1"]);
|
|
216
|
+
expect(cache.deleteRegionalCache).toHaveBeenCalled();
|
|
217
|
+
expect(cache.deleteRegionalCache).toHaveBeenCalledWith(expect.objectContaining({ key: "tag-hard;shard-1;replica-1" }), ["tag1"]);
|
|
218
|
+
// expect(cache.deleteRegionalCache).toHaveBeenCalledWith("tag-hard;shard-1;replica-1", ["tag1"]);
|
|
219
|
+
});
|
|
220
|
+
});
|
|
221
|
+
describe("getCacheInstance", () => {
|
|
222
|
+
it("should return undefined by default", async () => {
|
|
223
|
+
const cache = doShardedTagCache();
|
|
224
|
+
expect(await cache.getCacheInstance()).toBeUndefined();
|
|
225
|
+
});
|
|
226
|
+
it("should try to return the cache instance if regional cache is enabled", async () => {
|
|
227
|
+
// @ts-expect-error - Defined on cloudfare context
|
|
228
|
+
globalThis.caches = {
|
|
229
|
+
open: vi.fn().mockResolvedValue("cache"),
|
|
230
|
+
};
|
|
231
|
+
const cache = doShardedTagCache({ baseShardSize: 4, regionalCache: true });
|
|
232
|
+
expect(cache.localCache).toBeUndefined();
|
|
233
|
+
expect(await cache.getCacheInstance()).toBe("cache");
|
|
234
|
+
expect(cache.localCache).toBe("cache");
|
|
235
|
+
// @ts-expect-error - Defined on cloudfare context
|
|
236
|
+
globalThis.caches = undefined;
|
|
237
|
+
});
|
|
238
|
+
});
|
|
239
|
+
describe("getFromRegionalCache", () => {
|
|
240
|
+
it("should return undefined if regional cache is disabled", async () => {
|
|
241
|
+
const cache = doShardedTagCache();
|
|
242
|
+
const doId = new TagCacheDOId({
|
|
243
|
+
baseShardId: "shard-1",
|
|
244
|
+
numberOfReplicas: 1,
|
|
245
|
+
shardType: "hard",
|
|
246
|
+
});
|
|
247
|
+
expect(await cache.getFromRegionalCache(doId, ["tag1"])).toBeUndefined();
|
|
248
|
+
});
|
|
249
|
+
it("should call .match on the cache", async () => {
|
|
250
|
+
// @ts-expect-error - Defined on cloudfare context
|
|
251
|
+
globalThis.caches = {
|
|
252
|
+
open: vi.fn().mockResolvedValue({
|
|
253
|
+
match: vi.fn().mockResolvedValue("response"),
|
|
254
|
+
}),
|
|
255
|
+
};
|
|
256
|
+
const cache = doShardedTagCache({ baseShardSize: 4, regionalCache: true });
|
|
257
|
+
const doId = new TagCacheDOId({
|
|
258
|
+
baseShardId: "shard-1",
|
|
259
|
+
numberOfReplicas: 1,
|
|
260
|
+
shardType: "hard",
|
|
261
|
+
});
|
|
262
|
+
expect(await cache.getFromRegionalCache(doId, ["tag1"])).toBe("response");
|
|
263
|
+
// @ts-expect-error - Defined on cloudfare context
|
|
264
|
+
globalThis.caches = undefined;
|
|
265
|
+
});
|
|
266
|
+
});
|
|
267
|
+
describe("getCacheKey", () => {
|
|
268
|
+
it("should return the cache key without the random part", async () => {
|
|
269
|
+
const cache = doShardedTagCache();
|
|
270
|
+
const doId1 = new TagCacheDOId({ baseShardId: "shard-0", numberOfReplicas: 1, shardType: "hard" });
|
|
271
|
+
const reqKey = await cache.getCacheKey(doId1, ["_N_T_/tag1"]);
|
|
272
|
+
expect(reqKey.url).toBe("http://local.cache/shard/tag-hard;shard-0?tags=_N_T_%2Ftag1");
|
|
273
|
+
const doId2 = new TagCacheDOId({
|
|
274
|
+
baseShardId: "shard-1",
|
|
275
|
+
numberOfReplicas: 1,
|
|
276
|
+
shardType: "hard",
|
|
277
|
+
});
|
|
278
|
+
const reqKey2 = await cache.getCacheKey(doId2, ["tag1"]);
|
|
279
|
+
expect(reqKey2.url).toBe("http://local.cache/shard/tag-hard;shard-1?tags=tag1");
|
|
280
|
+
});
|
|
281
|
+
});
|
|
282
|
+
describe("performWriteTagsWithRetry", () => {
|
|
283
|
+
it("should retry if it fails", async () => {
|
|
284
|
+
vi.useFakeTimers();
|
|
285
|
+
vi.setSystemTime(1000);
|
|
286
|
+
const cache = doShardedTagCache();
|
|
287
|
+
writeTagsMock.mockImplementationOnce(() => {
|
|
288
|
+
throw new Error("error");
|
|
289
|
+
});
|
|
290
|
+
const spiedFn = vi.spyOn(cache, "performWriteTagsWithRetry");
|
|
291
|
+
const doId = new TagCacheDOId({
|
|
292
|
+
baseShardId: "shard-1",
|
|
293
|
+
numberOfReplicas: 1,
|
|
294
|
+
shardType: "hard",
|
|
295
|
+
});
|
|
296
|
+
await cache.performWriteTagsWithRetry(doId, ["tag1"], Date.now());
|
|
297
|
+
expect(writeTagsMock).toHaveBeenCalledTimes(2);
|
|
298
|
+
expect(spiedFn).toHaveBeenCalledTimes(2);
|
|
299
|
+
expect(spiedFn).toHaveBeenCalledWith(doId, ["tag1"], 1000, 1);
|
|
300
|
+
expect(sendDLQMock).not.toHaveBeenCalled();
|
|
301
|
+
vi.useRealTimers();
|
|
302
|
+
});
|
|
303
|
+
it("should stop retrying after 3 times", async () => {
|
|
304
|
+
vi.useFakeTimers();
|
|
305
|
+
vi.setSystemTime(1000);
|
|
306
|
+
const cache = doShardedTagCache();
|
|
307
|
+
writeTagsMock.mockImplementationOnce(() => {
|
|
308
|
+
throw new Error("error");
|
|
309
|
+
});
|
|
310
|
+
const spiedFn = vi.spyOn(cache, "performWriteTagsWithRetry");
|
|
311
|
+
await cache.performWriteTagsWithRetry(new TagCacheDOId({ baseShardId: "shard-1", numberOfReplicas: 1, shardType: "hard" }), ["tag1"], Date.now(), 3);
|
|
312
|
+
expect(writeTagsMock).toHaveBeenCalledTimes(1);
|
|
313
|
+
expect(spiedFn).toHaveBeenCalledTimes(1);
|
|
314
|
+
expect(sendDLQMock).toHaveBeenCalledWith({
|
|
315
|
+
failingShardId: "tag-hard;shard-1;replica-1",
|
|
316
|
+
failingTags: ["tag1"],
|
|
317
|
+
lastModified: 1000,
|
|
318
|
+
});
|
|
319
|
+
vi.useRealTimers();
|
|
320
|
+
});
|
|
321
|
+
});
|
|
322
|
+
});
|
package/dist/cli/args.d.ts
CHANGED
|
@@ -1,6 +1,17 @@
|
|
|
1
|
-
|
|
1
|
+
import type { WranglerTarget } from "./utils/run-wrangler.js";
|
|
2
|
+
export type Arguments = ({
|
|
3
|
+
command: "build";
|
|
2
4
|
skipNextBuild: boolean;
|
|
3
5
|
skipWranglerConfigCheck: boolean;
|
|
4
|
-
outputDir?: string;
|
|
5
6
|
minify: boolean;
|
|
7
|
+
} | {
|
|
8
|
+
command: "preview" | "deploy";
|
|
9
|
+
passthroughArgs: string[];
|
|
10
|
+
} | {
|
|
11
|
+
command: "populateCache";
|
|
12
|
+
target: WranglerTarget;
|
|
13
|
+
environment?: string;
|
|
14
|
+
}) & {
|
|
15
|
+
outputDir?: string;
|
|
6
16
|
};
|
|
17
|
+
export declare function getArgs(): Arguments;
|
package/dist/cli/args.js
CHANGED
|
@@ -1,40 +1,55 @@
|
|
|
1
1
|
import { mkdirSync, statSync } from "node:fs";
|
|
2
2
|
import { resolve } from "node:path";
|
|
3
3
|
import { parseArgs } from "node:util";
|
|
4
|
+
import { getWranglerEnvironmentFlag, isWranglerTarget } from "./utils/run-wrangler.js";
|
|
4
5
|
export function getArgs() {
|
|
5
|
-
const {
|
|
6
|
+
const { positionals, values } = parseArgs({
|
|
6
7
|
options: {
|
|
7
|
-
skipBuild: {
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
},
|
|
12
|
-
output: {
|
|
13
|
-
type: "string",
|
|
14
|
-
short: "o",
|
|
15
|
-
},
|
|
16
|
-
noMinify: {
|
|
17
|
-
type: "boolean",
|
|
18
|
-
default: false,
|
|
19
|
-
},
|
|
20
|
-
skipWranglerConfigCheck: {
|
|
21
|
-
type: "boolean",
|
|
22
|
-
default: false,
|
|
23
|
-
},
|
|
8
|
+
skipBuild: { type: "boolean", short: "s", default: false },
|
|
9
|
+
output: { type: "string", short: "o" },
|
|
10
|
+
noMinify: { type: "boolean", default: false },
|
|
11
|
+
skipWranglerConfigCheck: { type: "boolean", default: false },
|
|
24
12
|
},
|
|
25
|
-
allowPositionals:
|
|
26
|
-
})
|
|
27
|
-
const outputDir = output ? resolve(output) : undefined;
|
|
28
|
-
if (outputDir)
|
|
13
|
+
allowPositionals: true,
|
|
14
|
+
});
|
|
15
|
+
const outputDir = values.output ? resolve(values.output) : undefined;
|
|
16
|
+
if (outputDir)
|
|
29
17
|
assertDirArg(outputDir, "output", true);
|
|
18
|
+
const passthroughArgs = getPassthroughArgs();
|
|
19
|
+
switch (positionals[0]) {
|
|
20
|
+
case "build":
|
|
21
|
+
return {
|
|
22
|
+
command: "build",
|
|
23
|
+
outputDir,
|
|
24
|
+
skipNextBuild: values.skipBuild || ["1", "true", "yes"].includes(String(process.env.SKIP_NEXT_APP_BUILD)),
|
|
25
|
+
skipWranglerConfigCheck: values.skipWranglerConfigCheck ||
|
|
26
|
+
["1", "true", "yes"].includes(String(process.env.SKIP_WRANGLER_CONFIG_CHECK)),
|
|
27
|
+
minify: !values.noMinify,
|
|
28
|
+
};
|
|
29
|
+
case "preview":
|
|
30
|
+
case "deploy":
|
|
31
|
+
return {
|
|
32
|
+
command: positionals[0],
|
|
33
|
+
outputDir,
|
|
34
|
+
passthroughArgs,
|
|
35
|
+
};
|
|
36
|
+
case "populateCache":
|
|
37
|
+
if (!isWranglerTarget(positionals[1])) {
|
|
38
|
+
throw new Error(`Error: invalid target for populating the cache, expected 'local' | 'remote'`);
|
|
39
|
+
}
|
|
40
|
+
return {
|
|
41
|
+
command: "populateCache",
|
|
42
|
+
outputDir,
|
|
43
|
+
target: positionals[1],
|
|
44
|
+
environment: getWranglerEnvironmentFlag(passthroughArgs),
|
|
45
|
+
};
|
|
46
|
+
default:
|
|
47
|
+
throw new Error("Error: invalid command, expected 'build' | 'preview' | 'deploy' | 'populateCache'");
|
|
30
48
|
}
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
["1", "true", "yes"].includes(String(process.env.SKIP_WRANGLER_CONFIG_CHECK)),
|
|
36
|
-
minify: !noMinify,
|
|
37
|
-
};
|
|
49
|
+
}
|
|
50
|
+
function getPassthroughArgs() {
|
|
51
|
+
const passthroughPos = process.argv.indexOf("--");
|
|
52
|
+
return passthroughPos === -1 ? [] : process.argv.slice(passthroughPos + 1);
|
|
38
53
|
}
|
|
39
54
|
function assertDirArg(path, argName, make) {
|
|
40
55
|
let dirStats;
|