wrangler 2.6.0 → 2.6.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -130,41 +130,43 @@ var init_environment_polyfills = __esm({
130
130
 
131
131
  // ../pages-shared/environment-polyfills/miniflare-tre.ts
132
132
  var miniflare_tre_exports = {};
133
- import {
134
- fetch as miniflareFetch,
135
- Headers as MiniflareHeaders,
136
- Request as MiniflareRequest,
137
- Response as MiniflareResponse
138
- } from "@miniflare/tre";
133
+ __export(miniflare_tre_exports, {
134
+ default: () => miniflare_tre_default
135
+ });
136
+ var miniflare_tre_default;
139
137
  var init_miniflare_tre = __esm({
140
138
  "../pages-shared/environment-polyfills/miniflare-tre.ts"() {
141
139
  init_environment_polyfills();
142
- polyfill({
143
- fetch: miniflareFetch,
144
- Headers: MiniflareHeaders,
145
- Request: MiniflareRequest,
146
- Response: MiniflareResponse
147
- });
140
+ miniflare_tre_default = async () => {
141
+ const mf = await import("@miniflare/tre");
142
+ polyfill({
143
+ fetch: mf.fetch,
144
+ Headers: mf.Headers,
145
+ Request: mf.Request,
146
+ Response: mf.Response
147
+ });
148
+ };
148
149
  }
149
150
  });
150
151
 
151
152
  // ../pages-shared/environment-polyfills/miniflare.ts
152
153
  var miniflare_exports = {};
153
- import {
154
- fetch as miniflareFetch2,
155
- Headers as MiniflareHeaders2,
156
- Request as MiniflareRequest2,
157
- Response as MiniflareResponse2
158
- } from "@miniflare/core";
154
+ __export(miniflare_exports, {
155
+ default: () => miniflare_default
156
+ });
157
+ var miniflare_default;
159
158
  var init_miniflare = __esm({
160
159
  "../pages-shared/environment-polyfills/miniflare.ts"() {
161
160
  init_environment_polyfills();
162
- polyfill({
163
- fetch: miniflareFetch2,
164
- Headers: MiniflareHeaders2,
165
- Request: MiniflareRequest2,
166
- Response: MiniflareResponse2
167
- });
161
+ miniflare_default = async () => {
162
+ const mf = await import("@miniflare/core");
163
+ polyfill({
164
+ fetch: mf.fetch,
165
+ Headers: mf.Headers,
166
+ Request: mf.Request,
167
+ Response: mf.Response
168
+ });
169
+ };
168
170
  }
169
171
  });
170
172
 
@@ -786,8 +788,8 @@ import {
786
788
  Log as MiniflareLog,
787
789
  LogLevel as MiniflareLogLevel,
788
790
  Miniflare,
789
- Request as MiniflareRequest3,
790
- Response as MiniflareResponse3
791
+ Request as MiniflareRequest,
792
+ Response as MiniflareResponse
791
793
  } from "miniflare";
792
794
 
793
795
  // ../../node_modules/yargs/lib/platform-shims/esm.mjs
@@ -6066,11 +6068,8 @@ async function generateASSETSBinding(options) {
6066
6068
  };
6067
6069
  }
6068
6070
  async function generateAssetsFetch(directory, log, tre) {
6069
- if (tre) {
6070
- await Promise.resolve().then(() => (init_miniflare_tre(), miniflare_tre_exports));
6071
- } else {
6072
- await Promise.resolve().then(() => (init_miniflare(), miniflare_exports));
6073
- }
6071
+ const polyfill2 = tre ? (await Promise.resolve().then(() => (init_miniflare_tre(), miniflare_tre_exports))).default : (await Promise.resolve().then(() => (init_miniflare(), miniflare_exports))).default;
6072
+ await polyfill2();
6074
6073
  const miniflare = tre ? await import("@miniflare/tre") : await import("@miniflare/core");
6075
6074
  const Request = miniflare.Request;
6076
6075
  const { generateHandler: generateHandler2, parseQualityWeightedList: parseQualityWeightedList2 } = await Promise.resolve().then(() => (init_handler(), handler_exports));
@@ -6234,12 +6233,12 @@ async function main() {
6234
6233
  namespace.get = (id) => {
6235
6234
  const stub = new DurableObjectStub(factory, id);
6236
6235
  stub.fetch = (...reqArgs) => {
6237
- const requestFromArgs = new MiniflareRequest3(...reqArgs);
6236
+ const requestFromArgs = new MiniflareRequest(...reqArgs);
6238
6237
  const url = new URL(requestFromArgs.url);
6239
6238
  url.host = host;
6240
6239
  if (port !== void 0)
6241
6240
  url.port = port.toString();
6242
- const request = new MiniflareRequest3(
6241
+ const request = new MiniflareRequest(
6243
6242
  url.toString(),
6244
6243
  requestFromArgs
6245
6244
  );
@@ -6299,7 +6298,7 @@ async function main() {
6299
6298
  }`,
6300
6299
  serviceBindings: {
6301
6300
  DO: async (request) => {
6302
- request = new MiniflareRequest3(request);
6301
+ request = new MiniflareRequest(request);
6303
6302
  const name = request.headers.get("x-miniflare-durable-object-name");
6304
6303
  const idString = request.headers.get(
6305
6304
  "x-miniflare-durable-object-id"
@@ -6307,7 +6306,7 @@ async function main() {
6307
6306
  request.headers.delete("x-miniflare-durable-object-name");
6308
6307
  request.headers.delete("x-miniflare-durable-object-id");
6309
6308
  if (!name || !idString) {
6310
- return new MiniflareResponse3(
6309
+ return new MiniflareResponse(
6311
6310
  "[durable-object-proxy-err] Missing `x-miniflare-durable-object-name` or `x-miniflare-durable-object-id` headers.",
6312
6311
  { status: 400 }
6313
6312
  );
@@ -6315,14 +6314,14 @@ async function main() {
6315
6314
  const namespace = await mf?.getDurableObjectNamespace(name);
6316
6315
  const id = namespace?.idFromString(idString);
6317
6316
  if (!id) {
6318
- return new MiniflareResponse3(
6317
+ return new MiniflareResponse(
6319
6318
  "[durable-object-proxy-err] Could not generate an ID. Possibly due to a mismatched DO name and ID?",
6320
6319
  { status: 500 }
6321
6320
  );
6322
6321
  }
6323
6322
  const stub = namespace?.get(id);
6324
6323
  if (!stub) {
6325
- return new MiniflareResponse3(
6324
+ return new MiniflareResponse(
6326
6325
  "[durable-object-proxy-err] Could not generate a stub. Possibly due to a mismatched DO name and ID?",
6327
6326
  { status: 500 }
6328
6327
  );
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "wrangler",
3
- "version": "2.6.0",
3
+ "version": "2.6.2",
4
4
  "description": "Command-line interface for all things Cloudflare Workers",
5
5
  "keywords": [
6
6
  "wrangler",
@@ -31,6 +31,7 @@ describe("wrangler", () => {
31
31
  "wrangler
32
32
 
33
33
  Commands:
34
+ wrangler docs [command] 📚 Open wrangler's docs in your browser
34
35
  wrangler init [name] 📥 Create a wrangler.toml configuration file
35
36
  wrangler dev [script] 👂 Start a local server for developing your worker
36
37
  wrangler publish [script] 🆙 Publish your Worker to Cloudflare.
@@ -77,6 +78,7 @@ describe("wrangler", () => {
77
78
  wrangler
78
79
 
79
80
  Commands:
81
+ wrangler docs [command] 📚 Open wrangler's docs in your browser
80
82
  wrangler init [name] 📥 Create a wrangler.toml configuration file
81
83
  wrangler dev [script] 👂 Start a local server for developing your worker
82
84
  wrangler publish [script] 🆙 Publish your Worker to Cloudflare.
@@ -2400,6 +2400,42 @@ describe("init", () => {
2400
2400
  });
2401
2401
  });
2402
2402
 
2403
+ it("should fail on init --from-dash on non-existent worker name", async () => {
2404
+ setMockResponse(
2405
+ `/accounts/:accountId/workers/services/:scriptName`,
2406
+ "GET",
2407
+ () => mockServiceMetadata
2408
+ );
2409
+ setMockFetchDashScript({
2410
+ accountId: "LCARS",
2411
+ fromDashScriptName: "memory-crystal",
2412
+ environment: mockServiceMetadata.default_environment.environment,
2413
+ mockResponse: mockDashboardScript,
2414
+ });
2415
+ mockConfirm(
2416
+ {
2417
+ text: "Would you like to use git to manage this Worker?",
2418
+ result: false,
2419
+ },
2420
+ {
2421
+ text: "Would you like to use TypeScript?",
2422
+ result: true,
2423
+ },
2424
+ {
2425
+ text: "No package.json found. Would you like to create one?",
2426
+ result: true,
2427
+ },
2428
+ {
2429
+ text: "Would you like to install the type definitions for Workers into your package.json?",
2430
+ result: true,
2431
+ }
2432
+ );
2433
+
2434
+ await expect(
2435
+ runWrangler("init isolinear-optical-chip --from-dash i-dont-exist")
2436
+ ).rejects.toThrowError();
2437
+ });
2438
+
2403
2439
  it("should download source script from dashboard w/ out positional <name>", async () => {
2404
2440
  mockSupportingDashRequests({
2405
2441
  expectedAccountId: "LCARS",
@@ -335,6 +335,7 @@ describe("pages", () => {
335
335
  --commit-hash The SHA to attach to this deployment [string]
336
336
  --commit-message The commit message to attach to this deployment [string]
337
337
  --commit-dirty Whether or not the workspace should be considered dirty for this deployment [boolean]
338
+ --skip-caching Skip asset caching which speeds up builds [boolean]
338
339
 
339
340
  🚧 'wrangler pages <command>' is a beta command. Please report any issues to https://github.com/cloudflare/wrangler2/issues/new/choose"
340
341
  `);
@@ -44,8 +44,13 @@ export async function performApiFetch(
44
44
  logger.debug(
45
45
  `-- START CF API REQUEST: ${method} ${getCloudflareAPIBaseURL()}${resource}${queryString}`
46
46
  );
47
- logger.debug("HEADERS:", JSON.stringify(headers, null, 2));
48
- logger.debug("INIT:", JSON.stringify(init, null, 2));
47
+ const logHeaders = cloneHeaders(headers);
48
+ delete logHeaders["Authorization"];
49
+ logger.debug("HEADERS:", JSON.stringify(logHeaders, null, 2));
50
+ logger.debug(
51
+ "INIT:",
52
+ JSON.stringify({ ...init, headers: logHeaders }, null, 2)
53
+ );
49
54
  logger.debug("-- END CF API REQUEST");
50
55
  return await fetch(`${getCloudflareAPIBaseURL()}${resource}${queryString}`, {
51
56
  method,
@@ -83,7 +88,9 @@ export async function fetchInternal<ResponseType>(
83
88
  response.statusText,
84
89
  response.status
85
90
  );
86
- logger.debug("HEADERS:", JSON.stringify(response.headers, null, 2));
91
+ const logHeaders = cloneHeaders(response.headers);
92
+ delete logHeaders["Authorization"];
93
+ logger.debug("HEADERS:", JSON.stringify(logHeaders, null, 2));
87
94
  logger.debug("RESPONSE:", jsonText);
88
95
  logger.debug("-- END CF API RESPONSE");
89
96
 
@@ -166,7 +166,8 @@ export function printBindings(bindings: CfWorkerInit["bindings"]) {
166
166
  if (database_name) {
167
167
  databaseValue = `${database_name} (${database_id})`;
168
168
  }
169
- if (preview_database_id) {
169
+ //database_id is local when running `wrangler dev --local`
170
+ if (preview_database_id && database_id !== "local") {
170
171
  databaseValue += `, Preview: (${preview_database_id})`;
171
172
  }
172
173
  return {
@@ -102,8 +102,11 @@ export const ApplyHandler = withConfig<BaseSqlExecuteArgs>(
102
102
  if (!ok) return;
103
103
  }
104
104
 
105
- render(<Text>🕒 Creating backup...</Text>);
106
- await createBackup(accountId, databaseInfo.uuid);
105
+ // don't backup prod db when applying migrations locally
106
+ if (!local) {
107
+ render(<Text>🕒 Creating backup...</Text>);
108
+ await createBackup(accountId, databaseInfo.uuid);
109
+ }
107
110
 
108
111
  for (const migration of unappliedMigrations) {
109
112
  let query = fs.readFileSync(
package/src/dev.tsx CHANGED
@@ -442,7 +442,7 @@ export async function startDev(args: StartDevOptions) {
442
442
 
443
443
  // eslint-disable-next-line no-inner-declarations
444
444
  async function getDevReactElement(configParam: Config) {
445
- const { assetPaths, bindings } = await getBindingsAndAssetPaths(
445
+ const { assetPaths, bindings } = getBindingsAndAssetPaths(
446
446
  args,
447
447
  configParam
448
448
  );
@@ -559,7 +559,7 @@ export async function startApiDev(args: StartDevOptions) {
559
559
 
560
560
  // eslint-disable-next-line no-inner-declarations
561
561
  async function getDevServer(configParam: Config) {
562
- const { assetPaths, bindings } = await getBindingsAndAssetPaths(
562
+ const { assetPaths, bindings } = getBindingsAndAssetPaths(
563
563
  args,
564
564
  configParam
565
565
  );
@@ -798,14 +798,11 @@ async function validateDevServerSettings(
798
798
  };
799
799
  }
800
800
 
801
- async function getBindingsAndAssetPaths(
802
- args: StartDevOptions,
803
- configParam: Config
804
- ) {
801
+ function getBindingsAndAssetPaths(args: StartDevOptions, configParam: Config) {
805
802
  const cliVars = collectKeyValues(args.var);
806
803
 
807
804
  // now log all available bindings into the terminal
808
- const bindings = await getBindings(configParam, args.env, {
805
+ const bindings = getBindings(configParam, args.env, args.local ?? false, {
809
806
  kv: args.kv,
810
807
  vars: { ...args.vars, ...cliVars },
811
808
  durableObjects: args.durableObjects,
@@ -832,11 +829,12 @@ async function getBindingsAndAssetPaths(
832
829
  return { assetPaths, bindings };
833
830
  }
834
831
 
835
- async function getBindings(
832
+ function getBindings(
836
833
  configParam: Config,
837
834
  env: string | undefined,
835
+ local: boolean,
838
836
  args: AdditionalDevProps
839
- ): Promise<CfWorkerInit["bindings"]> {
837
+ ): CfWorkerInit["bindings"] {
840
838
  const bindings = {
841
839
  kv_namespaces: [
842
840
  ...(configParam.kv_namespaces || []).map(
@@ -906,6 +904,13 @@ async function getBindings(
906
904
  logfwdr: configParam.logfwdr,
907
905
  d1_databases: identifyD1BindingsAsBeta([
908
906
  ...(configParam.d1_databases ?? []).map((d1Db) => {
907
+ //in local dev, bindings don't matter
908
+ if (local) {
909
+ return {
910
+ ...d1Db,
911
+ database_id: "local",
912
+ };
913
+ }
909
914
  if (!d1Db.preview_database_id) {
910
915
  throw new Error(
911
916
  `In development, you should use a separate D1 database than the one you'd use in production. Please create a new D1 database with "wrangler d1 create <name>" and add its id as preview_database_id to the d1_database "${d1Db.binding}" in your wrangler.toml`
@@ -0,0 +1,94 @@
1
+ import { printWranglerBanner } from "..";
2
+ import { readConfig } from "../config";
3
+ import { logger } from "../logger";
4
+ import * as metrics from "../metrics";
5
+ import openInBrowser from "../open-in-browser";
6
+
7
+ import type {
8
+ CommonYargsOptions,
9
+ YargsOptionsToInterface,
10
+ } from "../yargs-types";
11
+ import type { ArgumentsCamelCase, Argv } from "yargs";
12
+
13
+ const argToUrlHash = {
14
+ init: "init",
15
+ generate: "generate",
16
+ dev: "dev",
17
+ publish: "publish",
18
+ delete: "delete",
19
+ "kv:namespace": "kvnamespace",
20
+ "kv:key": "kvkey",
21
+ "kv:bulk": "kvbulk",
22
+ "r2 bucket": "r2-bucket",
23
+ "r2 object": "r2-object",
24
+ secret: "secret",
25
+ "secret:bulk": "secretbulk",
26
+ tail: "tail",
27
+ pages: "pages",
28
+ login: "login",
29
+ logout: "logout",
30
+ whoami: "whoami",
31
+ types: "types",
32
+ deployments: "deployments",
33
+ };
34
+
35
+ export function docsOptions(yargs: Argv<CommonYargsOptions>) {
36
+ return yargs.positional("command", {
37
+ describe: "Enter the wrangler command you want to know more about",
38
+ type: "string",
39
+ // requiresArg: true,
40
+ choices: [
41
+ "init",
42
+ "dev",
43
+ "publish",
44
+ "delete",
45
+ "tail",
46
+ "secret",
47
+ "secret:bulk",
48
+ "kv:namespace",
49
+ "kv:key",
50
+ "kv:bulk",
51
+ "pages",
52
+ // "queues", //TODO: Undocumented
53
+ "r2 object",
54
+ "r2 bucket",
55
+ // "dispatch-namespace", // TODO: Undocumented - Workers for Platforms
56
+ // "d1", //TODO: Undocumented
57
+ // "pubsub", //TODO: Undocumented
58
+ "login",
59
+ "logout",
60
+ "whoami",
61
+ "types",
62
+ "deployments",
63
+ "api",
64
+ ],
65
+ });
66
+ }
67
+
68
+ type DocsArgs = YargsOptionsToInterface<typeof docsOptions>;
69
+
70
+ function isValidParam(k: string): k is keyof typeof argToUrlHash {
71
+ return k in argToUrlHash;
72
+ }
73
+
74
+ export async function docsHandler(args: ArgumentsCamelCase<DocsArgs>) {
75
+ let urlToOpen =
76
+ "https://developers.cloudflare.com/workers/wrangler/commands/";
77
+
78
+ if (args.command === "api") {
79
+ //if api, take them to the API docs
80
+ urlToOpen = "https://developers.cloudflare.com/workers/wrangler/api/";
81
+ } else if (args.command && isValidParam(args.command)) {
82
+ //otherwise, they get the wrangler commands page
83
+ urlToOpen += `#${argToUrlHash[args.command]}`;
84
+ }
85
+
86
+ await printWranglerBanner();
87
+
88
+ logger.log(`Opening a link in your default browser: ${urlToOpen}`);
89
+ await openInBrowser(urlToOpen);
90
+ const config = readConfig(undefined, {});
91
+ await metrics.sendMetricsEvent("view docs", {
92
+ sendMetrics: config.send_metrics,
93
+ });
94
+ }
package/src/index.tsx CHANGED
@@ -26,6 +26,7 @@ import {
26
26
  } from "./deprecated";
27
27
  import { devHandler, devOptions } from "./dev";
28
28
  import { workerNamespaceCommands } from "./dispatch-namespace";
29
+ import { docsHandler, docsOptions } from "./docs";
29
30
  import { initHandler, initOptions } from "./init";
30
31
  import { kvNamespace, kvKey, kvBulk } from "./kv";
31
32
  import { logBuildFailure, logger } from "./logger";
@@ -274,6 +275,14 @@ export function createCLIParser(argv: string[]) {
274
275
  generateHandler
275
276
  );
276
277
 
278
+ // docs
279
+ wrangler.command(
280
+ "docs [command]",
281
+ "📚 Open wrangler's docs in your browser",
282
+ docsOptions,
283
+ docsHandler
284
+ );
285
+
277
286
  // init
278
287
  wrangler.command(
279
288
  "init [name]",
package/src/init.ts CHANGED
@@ -165,6 +165,18 @@ export async function initHandler(args: ArgumentsCamelCase<InitArgs>) {
165
165
  );
166
166
  let justCreatedWranglerToml = false;
167
167
 
168
+ let accountId = "";
169
+ let serviceMetaData: undefined | ServiceMetadataRes;
170
+
171
+ // If --from-dash, check that script actually exists
172
+ if (fromDashScriptName) {
173
+ const config = readConfig(args.config as ConfigPath, args);
174
+ accountId = await requireAuth(config);
175
+ serviceMetaData = await fetchResult<ServiceMetadataRes>(
176
+ `/accounts/${accountId}/workers/services/${fromDashScriptName}`
177
+ );
178
+ }
179
+
168
180
  if (fs.existsSync(wranglerTomlDestination)) {
169
181
  let shouldContinue = false;
170
182
  logger.warn(
@@ -452,16 +464,13 @@ export async function initHandler(args: ArgumentsCamelCase<InitArgs>) {
452
464
  `After running "wrangler init --from-dash", modifying your worker via the Cloudflare dashboard is discouraged.
453
465
  Edits made via the Dashboard will not be synchronized locally and will be overridden by your local code and config when you publish.`
454
466
  );
455
- const config = readConfig(args.config as ConfigPath, args);
456
- const accountId = await requireAuth(config);
467
+
457
468
  await mkdir(path.join(creationDirectory, "./src"), {
458
469
  recursive: true,
459
470
  });
460
- const serviceMetaData = await fetchResult<ServiceMetadataRes>(
461
- `/accounts/${accountId}/workers/services/${fromDashScriptName}`
462
- );
471
+
463
472
  const defaultEnvironment =
464
- serviceMetaData.default_environment.environment;
473
+ serviceMetaData?.default_environment.environment;
465
474
  // I want the default environment, assuming it's the most up to date code.
466
475
  const dashScript = await fetchDashboardScript(
467
476
  `/accounts/${accountId}/workers/services/${fromDashScriptName}/environments/${defaultEnvironment}/content`
@@ -479,7 +488,7 @@ export async function initHandler(args: ArgumentsCamelCase<InitArgs>) {
479
488
  scriptPath: "src/index.ts",
480
489
  extraToml: (await getWorkerConfig(accountId, fromDashScriptName, {
481
490
  defaultEnvironment,
482
- environments: serviceMetaData.environments,
491
+ environments: serviceMetaData?.environments,
483
492
  })) as TOML.JsonMap,
484
493
  });
485
494
  } else {
@@ -493,6 +502,7 @@ export async function initHandler(args: ArgumentsCamelCase<InitArgs>) {
493
502
  await mkdir(path.join(creationDirectory, "./src"), {
494
503
  recursive: true,
495
504
  });
505
+
496
506
  await writeFile(
497
507
  path.join(creationDirectory, "./src/index.ts"),
498
508
  readFileSync(path.join(getBasePath(), `templates/${template}`))
@@ -527,17 +537,13 @@ export async function initHandler(args: ArgumentsCamelCase<InitArgs>) {
527
537
  `After running "wrangler init --from-dash", modifying your worker via the Cloudflare dashboard is discouraged.
528
538
  Edits made via the Dashboard will not be synchronized locally and will be overridden by your local code and config when you publish.`
529
539
  );
530
- const config = readConfig(args.config as ConfigPath, args);
531
- const accountId = await requireAuth(config);
540
+
532
541
  await mkdir(path.join(creationDirectory, "./src"), {
533
542
  recursive: true,
534
543
  });
535
544
 
536
- const serviceMetaData = await fetchResult<ServiceMetadataRes>(
537
- `/accounts/${accountId}/workers/services/${fromDashScriptName}`
538
- );
539
545
  const defaultEnvironment =
540
- serviceMetaData.default_environment.environment;
546
+ serviceMetaData?.default_environment.environment;
541
547
 
542
548
  // I want the default environment, assuming it's the most up to date code.
543
549
  const dashScript = await fetchDashboardScript(
@@ -557,7 +563,7 @@ export async function initHandler(args: ArgumentsCamelCase<InitArgs>) {
557
563
  //? Should we have Environment argument for `wrangler init --from-dash` - Jacob
558
564
  extraToml: (await getWorkerConfig(accountId, fromDashScriptName, {
559
565
  defaultEnvironment,
560
- environments: serviceMetaData.environments,
566
+ environments: serviceMetaData?.environments,
561
567
  })) as TOML.JsonMap,
562
568
  });
563
569
  } else {
@@ -770,8 +776,8 @@ async function getWorkerConfig(
770
776
  defaultEnvironment,
771
777
  environments,
772
778
  }: {
773
- defaultEnvironment: string;
774
- environments: ServiceMetadataRes["environments"];
779
+ defaultEnvironment: string | undefined;
780
+ environments: ServiceMetadataRes["environments"] | undefined;
775
781
  }
776
782
  ): Promise<RawConfig> {
777
783
  const [bindings, routes, serviceEnvMetadata, cronTriggers] =
@@ -967,7 +973,7 @@ async function getWorkerConfig(
967
973
  crons: cronTriggers.schedules.map((scheduled) => scheduled.cron),
968
974
  },
969
975
  env: environments
970
- .filter((env) => env.environment !== "production")
976
+ ?.filter((env) => env.environment !== "production")
971
977
  // `env` can have multiple Environments, with different configs.
972
978
  .reduce((envObj, { environment }) => {
973
979
  return { ...envObj, [environment]: {} };
@@ -55,7 +55,8 @@ export type EventNames =
55
55
  | "build pages functions"
56
56
  | "run dev"
57
57
  | "run dev (api)"
58
- | "run pages dev";
58
+ | "run pages dev"
59
+ | "view docs";
59
60
 
60
61
  /**
61
62
  * Send a metrics event, with no extra properties, to Cloudflare, if usage tracking is enabled.
@@ -89,13 +89,17 @@ async function generateAssetsFetch(
89
89
  tre: boolean
90
90
  ): Promise<typeof fetch> {
91
91
  // Defer importing miniflare until we really need it
92
- if (tre) {
93
- await import(
94
- "@cloudflare/pages-shared/environment-polyfills/miniflare-tre"
95
- );
96
- } else {
97
- await import("@cloudflare/pages-shared/environment-polyfills/miniflare");
98
- }
92
+
93
+ const polyfill = tre
94
+ ? (
95
+ await import(
96
+ "@cloudflare/pages-shared/environment-polyfills/miniflare-tre"
97
+ )
98
+ ).default
99
+ : (await import("@cloudflare/pages-shared/environment-polyfills/miniflare"))
100
+ .default;
101
+
102
+ await polyfill();
99
103
 
100
104
  const miniflare = tre
101
105
  ? await import("@miniflare/tre")
@@ -58,6 +58,10 @@ export function Options(yargs: Argv) {
58
58
  description:
59
59
  "Whether or not the workspace should be considered dirty for this deployment",
60
60
  },
61
+ "skip-caching": {
62
+ type: "boolean",
63
+ description: "Skip asset caching which speeds up builds",
64
+ },
61
65
  config: {
62
66
  describe: "Pages does not support wrangler.toml",
63
67
  type: "string",
@@ -74,6 +78,7 @@ export const Handler = async ({
74
78
  commitHash,
75
79
  commitMessage,
76
80
  commitDirty,
81
+ skipCaching,
77
82
  config: wranglerConfig,
78
83
  }: PublishArgs) => {
79
84
  if (wranglerConfig) {
@@ -313,7 +318,12 @@ export const Handler = async ({
313
318
  }
314
319
  }
315
320
 
316
- const manifest = await upload({ directory, accountId, projectName });
321
+ const manifest = await upload({
322
+ directory,
323
+ accountId,
324
+ projectName,
325
+ skipCaching: skipCaching ?? false,
326
+ });
317
327
 
318
328
  const formData = new FormData();
319
329
 
@@ -40,6 +40,10 @@ export function Options(yargs: Argv) {
40
40
  type: "string",
41
41
  description: "The name of the project you want to deploy to",
42
42
  },
43
+ "skip-caching": {
44
+ type: "boolean",
45
+ description: "Skip asset caching which speeds up builds",
46
+ },
43
47
  })
44
48
  .epilogue(pagesBetaWarning);
45
49
  }
@@ -47,6 +51,7 @@ export function Options(yargs: Argv) {
47
51
  export const Handler = async ({
48
52
  directory,
49
53
  outputManifestPath,
54
+ skipCaching,
50
55
  }: UploadArgs) => {
51
56
  if (!directory) {
52
57
  throw new FatalError("Must specify a directory.", 1);
@@ -59,6 +64,7 @@ export const Handler = async ({
59
64
  const manifest = await upload({
60
65
  directory,
61
66
  jwt: process.env.CF_PAGES_UPLOAD_JWT,
67
+ skipCaching: skipCaching ?? false,
62
68
  });
63
69
 
64
70
  if (outputManifestPath) {
@@ -74,8 +80,14 @@ export const upload = async (
74
80
  | {
75
81
  directory: string;
76
82
  jwt: string;
83
+ skipCaching: boolean;
84
+ }
85
+ | {
86
+ directory: string;
87
+ accountId: string;
88
+ projectName: string;
89
+ skipCaching: boolean;
77
90
  }
78
- | { directory: string; accountId: string; projectName: string }
79
91
  ) => {
80
92
  async function fetchJwt(): Promise<string> {
81
93
  if ("jwt" in args) {
@@ -184,7 +196,12 @@ export const upload = async (
184
196
  const start = Date.now();
185
197
 
186
198
  let attempts = 0;
187
- const getMissingHashes = async (): Promise<string[]> => {
199
+ const getMissingHashes = async (skipCaching: boolean): Promise<string[]> => {
200
+ if (skipCaching) {
201
+ console.debug("Force skipping cache");
202
+ return files.map(({ hash }) => hash);
203
+ }
204
+
188
205
  try {
189
206
  return await fetchResult<string[]>(`/pages/assets/check-missing`, {
190
207
  method: "POST",
@@ -207,13 +224,13 @@ export const upload = async (
207
224
  // Looks like the JWT expired, fetch another one
208
225
  jwt = await fetchJwt();
209
226
  }
210
- return getMissingHashes();
227
+ return getMissingHashes(skipCaching);
211
228
  } else {
212
229
  throw e;
213
230
  }
214
231
  }
215
232
  };
216
- const missingHashes = await getMissingHashes();
233
+ const missingHashes = await getMissingHashes(args.skipCaching);
217
234
 
218
235
  const sortedFiles = files
219
236
  .filter((file) => missingHashes.includes(file.hash))
@@ -283,7 +300,8 @@ export const upload = async (
283
300
  );
284
301
 
285
302
  try {
286
- return await fetchResult(`/pages/assets/upload`, {
303
+ console.debug("POST /pages/assets/upload");
304
+ const res = await fetchResult(`/pages/assets/upload`, {
287
305
  method: "POST",
288
306
  headers: {
289
307
  "Content-Type": "application/json",
@@ -291,8 +309,10 @@ export const upload = async (
291
309
  },
292
310
  body: JSON.stringify(payload),
293
311
  });
312
+ console.debug("result:", res);
294
313
  } catch (e) {
295
314
  if (attempts < MAX_UPLOAD_ATTEMPTS) {
315
+ console.debug("failed:", e, "retrying...");
296
316
  // Exponential backoff, 1 second first time, then 2 second, then 4 second etc.
297
317
  await new Promise((resolvePromise) =>
298
318
  setTimeout(resolvePromise, Math.pow(2, attempts++) * 1000)
@@ -304,6 +324,7 @@ export const upload = async (
304
324
  }
305
325
  return doUpload();
306
326
  } else {
327
+ console.debug("failed:", e);
307
328
  throw e;
308
329
  }
309
330
  }
@@ -129453,35 +129453,45 @@ var init_environment_polyfills = __esm({
129453
129453
 
129454
129454
  // ../pages-shared/environment-polyfills/miniflare-tre.ts
129455
129455
  var miniflare_tre_exports = {};
129456
- var import_tre;
129456
+ __export(miniflare_tre_exports, {
129457
+ default: () => miniflare_tre_default
129458
+ });
129459
+ var miniflare_tre_default;
129457
129460
  var init_miniflare_tre = __esm({
129458
129461
  "../pages-shared/environment-polyfills/miniflare-tre.ts"() {
129459
129462
  init_import_meta_url();
129460
- import_tre = require("@miniflare/tre");
129461
129463
  init_environment_polyfills();
129462
- polyfill({
129463
- fetch: import_tre.fetch,
129464
- Headers: import_tre.Headers,
129465
- Request: import_tre.Request,
129466
- Response: import_tre.Response
129467
- });
129464
+ miniflare_tre_default = async () => {
129465
+ const mf = await import("@miniflare/tre");
129466
+ polyfill({
129467
+ fetch: mf.fetch,
129468
+ Headers: mf.Headers,
129469
+ Request: mf.Request,
129470
+ Response: mf.Response
129471
+ });
129472
+ };
129468
129473
  }
129469
129474
  });
129470
129475
 
129471
129476
  // ../pages-shared/environment-polyfills/miniflare.ts
129472
129477
  var miniflare_exports = {};
129473
- var import_core2;
129478
+ __export(miniflare_exports, {
129479
+ default: () => miniflare_default
129480
+ });
129481
+ var miniflare_default;
129474
129482
  var init_miniflare = __esm({
129475
129483
  "../pages-shared/environment-polyfills/miniflare.ts"() {
129476
129484
  init_import_meta_url();
129477
- import_core2 = require("@miniflare/core");
129478
129485
  init_environment_polyfills();
129479
- polyfill({
129480
- fetch: import_core2.fetch,
129481
- Headers: import_core2.Headers,
129482
- Request: import_core2.Request,
129483
- Response: import_core2.Response
129484
- });
129486
+ miniflare_default = async () => {
129487
+ const mf = await import("@miniflare/core");
129488
+ polyfill({
129489
+ fetch: mf.fetch,
129490
+ Headers: mf.Headers,
129491
+ Request: mf.Request,
129492
+ Response: mf.Response
129493
+ });
129494
+ };
129485
129495
  }
129486
129496
  });
129487
129497
 
@@ -140154,7 +140164,7 @@ function printBindings(bindings) {
140154
140164
  if (database_name) {
140155
140165
  databaseValue = `${database_name} (${database_id})`;
140156
140166
  }
140157
- if (preview_database_id) {
140167
+ if (preview_database_id && database_id !== "local") {
140158
140168
  databaseValue += `, Preview: (${preview_database_id})`;
140159
140169
  }
140160
140170
  return {
@@ -141885,7 +141895,7 @@ var import_websocket_server = __toESM(require_websocket_server2(), 1);
141885
141895
  var wrapper_default = import_websocket.default;
141886
141896
 
141887
141897
  // package.json
141888
- var version = "2.6.0";
141898
+ var version = "2.6.2";
141889
141899
  var package_default = {
141890
141900
  name: "wrangler",
141891
141901
  version,
@@ -144375,8 +144385,13 @@ async function performApiFetch(resource, init = {}, queryParams, abortSignal) {
144375
144385
  logger.debug(
144376
144386
  `-- START CF API REQUEST: ${method} ${getCloudflareAPIBaseURL()}${resource}${queryString}`
144377
144387
  );
144378
- logger.debug("HEADERS:", JSON.stringify(headers, null, 2));
144379
- logger.debug("INIT:", JSON.stringify(init, null, 2));
144388
+ const logHeaders = cloneHeaders(headers);
144389
+ delete logHeaders["Authorization"];
144390
+ logger.debug("HEADERS:", JSON.stringify(logHeaders, null, 2));
144391
+ logger.debug(
144392
+ "INIT:",
144393
+ JSON.stringify({ ...init, headers: logHeaders }, null, 2)
144394
+ );
144380
144395
  logger.debug("-- END CF API REQUEST");
144381
144396
  return await (0, import_undici4.fetch)(`${getCloudflareAPIBaseURL()}${resource}${queryString}`, {
144382
144397
  method,
@@ -144399,7 +144414,9 @@ async function fetchInternal(resource, init = {}, queryParams, abortSignal) {
144399
144414
  response.statusText,
144400
144415
  response.status
144401
144416
  );
144402
- logger.debug("HEADERS:", JSON.stringify(response.headers, null, 2));
144417
+ const logHeaders = cloneHeaders(response.headers);
144418
+ delete logHeaders["Authorization"];
144419
+ logger.debug("HEADERS:", JSON.stringify(logHeaders, null, 2));
144403
144420
  logger.debug("RESPONSE:", jsonText);
144404
144421
  logger.debug("-- END CF API RESPONSE");
144405
144422
  try {
@@ -144986,11 +145003,8 @@ async function generateASSETSBinding(options6) {
144986
145003
  };
144987
145004
  }
144988
145005
  async function generateAssetsFetch(directory, log, tre) {
144989
- if (tre) {
144990
- await Promise.resolve().then(() => (init_miniflare_tre(), miniflare_tre_exports));
144991
- } else {
144992
- await Promise.resolve().then(() => (init_miniflare(), miniflare_exports));
144993
- }
145006
+ const polyfill2 = tre ? (await Promise.resolve().then(() => (init_miniflare_tre(), miniflare_tre_exports))).default : (await Promise.resolve().then(() => (init_miniflare(), miniflare_exports))).default;
145007
+ await polyfill2();
144994
145008
  const miniflare = tre ? await import("@miniflare/tre") : await import("@miniflare/core");
144995
145009
  const Request2 = miniflare.Request;
144996
145010
  const { generateHandler: generateHandler3, parseQualityWeightedList: parseQualityWeightedList2 } = await Promise.resolve().then(() => (init_handler(), handler_exports));
@@ -155748,8 +155762,10 @@ Your database may not be available to serve requests during the migration, conti
155748
155762
  if (!ok)
155749
155763
  return;
155750
155764
  }
155751
- (0, import_ink10.render)(/* @__PURE__ */ import_react14.default.createElement(import_ink10.Text, null, "\u{1F552} Creating backup..."));
155752
- await createBackup(accountId, databaseInfo.uuid);
155765
+ if (!local) {
155766
+ (0, import_ink10.render)(/* @__PURE__ */ import_react14.default.createElement(import_ink10.Text, null, "\u{1F552} Creating backup..."));
155767
+ await createBackup(accountId, databaseInfo.uuid);
155768
+ }
155753
155769
  for (const migration of unappliedMigrations) {
155754
155770
  let query = import_node_fs14.default.readFileSync(
155755
155771
  `${migrationsPath}/${migration.Name}`,
@@ -156432,6 +156448,15 @@ Have you considered using Cloudflare Pages instead? See https://pages.cloudflare
156432
156448
  "./wrangler.toml"
156433
156449
  );
156434
156450
  let justCreatedWranglerToml = false;
156451
+ let accountId = "";
156452
+ let serviceMetaData;
156453
+ if (fromDashScriptName) {
156454
+ const config = readConfig(args.config, args);
156455
+ accountId = await requireAuth(config);
156456
+ serviceMetaData = await fetchResult(
156457
+ `/accounts/${accountId}/workers/services/${fromDashScriptName}`
156458
+ );
156459
+ }
156435
156460
  if (fs12.existsSync(wranglerTomlDestination)) {
156436
156461
  let shouldContinue = false;
156437
156462
  logger.warn(
@@ -156657,15 +156682,10 @@ To start developing your Worker, run \`npx wrangler dev\`${isCreatingWranglerTom
156657
156682
  `After running "wrangler init --from-dash", modifying your worker via the Cloudflare dashboard is discouraged.
156658
156683
  Edits made via the Dashboard will not be synchronized locally and will be overridden by your local code and config when you publish.`
156659
156684
  );
156660
- const config = readConfig(args.config, args);
156661
- const accountId = await requireAuth(config);
156662
156685
  await (0, import_promises9.mkdir)(import_node_path24.default.join(creationDirectory, "./src"), {
156663
156686
  recursive: true
156664
156687
  });
156665
- const serviceMetaData = await fetchResult(
156666
- `/accounts/${accountId}/workers/services/${fromDashScriptName}`
156667
- );
156668
- const defaultEnvironment = serviceMetaData.default_environment.environment;
156688
+ const defaultEnvironment = serviceMetaData?.default_environment.environment;
156669
156689
  const dashScript = await fetchDashboardScript(
156670
156690
  `/accounts/${accountId}/workers/services/${fromDashScriptName}/environments/${defaultEnvironment}/content`
156671
156691
  );
@@ -156680,7 +156700,7 @@ To start developing your Worker, run \`npx wrangler dev\`${isCreatingWranglerTom
156680
156700
  scriptPath: "src/index.ts",
156681
156701
  extraToml: await getWorkerConfig(accountId, fromDashScriptName, {
156682
156702
  defaultEnvironment,
156683
- environments: serviceMetaData.environments
156703
+ environments: serviceMetaData?.environments
156684
156704
  })
156685
156705
  });
156686
156706
  } else {
@@ -156721,15 +156741,10 @@ To start developing your Worker, run \`npx wrangler dev\`${isCreatingWranglerTom
156721
156741
  `After running "wrangler init --from-dash", modifying your worker via the Cloudflare dashboard is discouraged.
156722
156742
  Edits made via the Dashboard will not be synchronized locally and will be overridden by your local code and config when you publish.`
156723
156743
  );
156724
- const config = readConfig(args.config, args);
156725
- const accountId = await requireAuth(config);
156726
156744
  await (0, import_promises9.mkdir)(import_node_path24.default.join(creationDirectory, "./src"), {
156727
156745
  recursive: true
156728
156746
  });
156729
- const serviceMetaData = await fetchResult(
156730
- `/accounts/${accountId}/workers/services/${fromDashScriptName}`
156731
- );
156732
- const defaultEnvironment = serviceMetaData.default_environment.environment;
156747
+ const defaultEnvironment = serviceMetaData?.default_environment.environment;
156733
156748
  const dashScript = await fetchDashboardScript(
156734
156749
  `/accounts/${accountId}/workers/services/${fromDashScriptName}/environments/${defaultEnvironment}/content`
156735
156750
  );
@@ -156744,7 +156759,7 @@ To start developing your Worker, run \`npx wrangler dev\`${isCreatingWranglerTom
156744
156759
  scriptPath: "src/index.ts",
156745
156760
  extraToml: await getWorkerConfig(accountId, fromDashScriptName, {
156746
156761
  defaultEnvironment,
156747
- environments: serviceMetaData.environments
156762
+ environments: serviceMetaData?.environments
156748
156763
  })
156749
156764
  });
156750
156765
  } else {
@@ -157068,7 +157083,7 @@ async function getWorkerConfig(accountId, fromDashScriptName, {
157068
157083
  triggers: {
157069
157084
  crons: cronTriggers.schedules.map((scheduled) => scheduled.cron)
157070
157085
  },
157071
- env: environments.filter((env5) => env5.environment !== "production").reduce((envObj, { environment }) => {
157086
+ env: environments?.filter((env5) => env5.environment !== "production").reduce((envObj, { environment }) => {
157072
157087
  return { ...envObj, [environment]: {} };
157073
157088
  }, {}),
157074
157089
  ...mappedBindings
@@ -157459,6 +157474,75 @@ function workerNamespaceCommands(workerNamespaceYargs, subHelp) {
157459
157474
  );
157460
157475
  }
157461
157476
 
157477
+ // src/docs/index.ts
157478
+ init_import_meta_url();
157479
+ var argToUrlHash = {
157480
+ init: "init",
157481
+ generate: "generate",
157482
+ dev: "dev",
157483
+ publish: "publish",
157484
+ delete: "delete",
157485
+ "kv:namespace": "kvnamespace",
157486
+ "kv:key": "kvkey",
157487
+ "kv:bulk": "kvbulk",
157488
+ "r2 bucket": "r2-bucket",
157489
+ "r2 object": "r2-object",
157490
+ secret: "secret",
157491
+ "secret:bulk": "secretbulk",
157492
+ tail: "tail",
157493
+ pages: "pages",
157494
+ login: "login",
157495
+ logout: "logout",
157496
+ whoami: "whoami",
157497
+ types: "types",
157498
+ deployments: "deployments"
157499
+ };
157500
+ function docsOptions(yargs) {
157501
+ return yargs.positional("command", {
157502
+ describe: "Enter the wrangler command you want to know more about",
157503
+ type: "string",
157504
+ choices: [
157505
+ "init",
157506
+ "dev",
157507
+ "publish",
157508
+ "delete",
157509
+ "tail",
157510
+ "secret",
157511
+ "secret:bulk",
157512
+ "kv:namespace",
157513
+ "kv:key",
157514
+ "kv:bulk",
157515
+ "pages",
157516
+ "r2 object",
157517
+ "r2 bucket",
157518
+ "login",
157519
+ "logout",
157520
+ "whoami",
157521
+ "types",
157522
+ "deployments",
157523
+ "api"
157524
+ ]
157525
+ });
157526
+ }
157527
+ function isValidParam(k) {
157528
+ return k in argToUrlHash;
157529
+ }
157530
+ async function docsHandler(args) {
157531
+ let urlToOpen = "https://developers.cloudflare.com/workers/wrangler/commands/";
157532
+ if (args.command === "api") {
157533
+ urlToOpen = "https://developers.cloudflare.com/workers/wrangler/api/";
157534
+ } else if (args.command && isValidParam(args.command)) {
157535
+ urlToOpen += `#${argToUrlHash[args.command]}`;
157536
+ }
157537
+ await printWranglerBanner();
157538
+ logger.log(`Opening a link in your default browser: ${urlToOpen}`);
157539
+ await openInBrowser(urlToOpen);
157540
+ const config = readConfig(void 0, {});
157541
+ await sendMetricsEvent("view docs", {
157542
+ sendMetrics: config.send_metrics
157543
+ });
157544
+ }
157545
+
157462
157546
  // src/kv/index.ts
157463
157547
  init_import_meta_url();
157464
157548
  var import_node_string_decoder = require("node:string_decoder");
@@ -160690,12 +160774,17 @@ function Options8(yargs) {
160690
160774
  "output-manifest-path": {
160691
160775
  type: "string",
160692
160776
  description: "The name of the project you want to deploy to"
160777
+ },
160778
+ "skip-caching": {
160779
+ type: "boolean",
160780
+ description: "Skip asset caching which speeds up builds"
160693
160781
  }
160694
160782
  }).epilogue(pagesBetaWarning);
160695
160783
  }
160696
160784
  var Handler8 = async ({
160697
160785
  directory,
160698
- outputManifestPath
160786
+ outputManifestPath,
160787
+ skipCaching
160699
160788
  }) => {
160700
160789
  if (!directory) {
160701
160790
  throw new FatalError("Must specify a directory.", 1);
@@ -160705,7 +160794,8 @@ var Handler8 = async ({
160705
160794
  }
160706
160795
  const manifest = await upload({
160707
160796
  directory,
160708
- jwt: process.env.CF_PAGES_UPLOAD_JWT
160797
+ jwt: process.env.CF_PAGES_UPLOAD_JWT,
160798
+ skipCaching: skipCaching ?? false
160709
160799
  });
160710
160800
  if (outputManifestPath) {
160711
160801
  await (0, import_promises15.mkdir)((0, import_node_path34.dirname)(outputManifestPath), { recursive: true });
@@ -160784,7 +160874,11 @@ ${name} is ${prettyBytes(filestat.size)} in size`,
160784
160874
  let jwt = await fetchJwt();
160785
160875
  const start = Date.now();
160786
160876
  let attempts = 0;
160787
- const getMissingHashes = async () => {
160877
+ const getMissingHashes = async (skipCaching) => {
160878
+ if (skipCaching) {
160879
+ console.debug("Force skipping cache");
160880
+ return files.map(({ hash }) => hash);
160881
+ }
160788
160882
  try {
160789
160883
  return await fetchResult(`/pages/assets/check-missing`, {
160790
160884
  method: "POST",
@@ -160804,13 +160898,13 @@ ${name} is ${prettyBytes(filestat.size)} in size`,
160804
160898
  if (e2.code === 8000013) {
160805
160899
  jwt = await fetchJwt();
160806
160900
  }
160807
- return getMissingHashes();
160901
+ return getMissingHashes(skipCaching);
160808
160902
  } else {
160809
160903
  throw e2;
160810
160904
  }
160811
160905
  }
160812
160906
  };
160813
- const missingHashes = await getMissingHashes();
160907
+ const missingHashes = await getMissingHashes(args.skipCaching);
160814
160908
  const sortedFiles = files.filter((file) => missingHashes.includes(file.hash)).sort((a, b) => b.sizeInBytes - a.sizeInBytes);
160815
160909
  const buckets = new Array(BULK_UPLOAD_CONCURRENCY).fill(null).map(() => ({
160816
160910
  files: [],
@@ -160860,7 +160954,8 @@ ${name} is ${prettyBytes(filestat.size)} in size`,
160860
160954
  }))
160861
160955
  );
160862
160956
  try {
160863
- return await fetchResult(`/pages/assets/upload`, {
160957
+ console.debug("POST /pages/assets/upload");
160958
+ const res = await fetchResult(`/pages/assets/upload`, {
160864
160959
  method: "POST",
160865
160960
  headers: {
160866
160961
  "Content-Type": "application/json",
@@ -160868,8 +160963,10 @@ ${name} is ${prettyBytes(filestat.size)} in size`,
160868
160963
  },
160869
160964
  body: JSON.stringify(payload)
160870
160965
  });
160966
+ console.debug("result:", res);
160871
160967
  } catch (e2) {
160872
160968
  if (attempts < MAX_UPLOAD_ATTEMPTS) {
160969
+ console.debug("failed:", e2, "retrying...");
160873
160970
  await new Promise(
160874
160971
  (resolvePromise) => setTimeout(resolvePromise, Math.pow(2, attempts++) * 1e3)
160875
160972
  );
@@ -160878,6 +160975,7 @@ ${name} is ${prettyBytes(filestat.size)} in size`,
160878
160975
  }
160879
160976
  return doUpload();
160880
160977
  } else {
160978
+ console.debug("failed:", e2);
160881
160979
  throw e2;
160882
160980
  }
160883
160981
  }
@@ -161006,6 +161104,10 @@ function Options9(yargs) {
161006
161104
  type: "boolean",
161007
161105
  description: "Whether or not the workspace should be considered dirty for this deployment"
161008
161106
  },
161107
+ "skip-caching": {
161108
+ type: "boolean",
161109
+ description: "Skip asset caching which speeds up builds"
161110
+ },
161009
161111
  config: {
161010
161112
  describe: "Pages does not support wrangler.toml",
161011
161113
  type: "string",
@@ -161020,6 +161122,7 @@ var Handler9 = async ({
161020
161122
  commitHash,
161021
161123
  commitMessage,
161022
161124
  commitDirty,
161125
+ skipCaching,
161023
161126
  config: wranglerConfig
161024
161127
  }) => {
161025
161128
  if (wranglerConfig) {
@@ -161202,7 +161305,12 @@ To silence this warning, pass in --commit-dirty=true`
161202
161305
  }
161203
161306
  }
161204
161307
  }
161205
- const manifest = await upload({ directory, accountId, projectName });
161308
+ const manifest = await upload({
161309
+ directory,
161310
+ accountId,
161311
+ projectName,
161312
+ skipCaching: skipCaching ?? false
161313
+ });
161206
161314
  const formData = new import_undici10.FormData();
161207
161315
  formData.append("manifest", JSON.stringify(manifest));
161208
161316
  if (branch) {
@@ -164387,6 +164495,12 @@ function createCLIParser(argv) {
164387
164495
  generateOptions,
164388
164496
  generateHandler2
164389
164497
  );
164498
+ wrangler.command(
164499
+ "docs [command]",
164500
+ "\u{1F4DA} Open wrangler's docs in your browser",
164501
+ docsOptions,
164502
+ docsHandler
164503
+ );
164390
164504
  wrangler.command(
164391
164505
  "init [name]",
164392
164506
  "\u{1F4E5} Create a wrangler.toml configuration file",
@@ -164954,7 +165068,7 @@ Switch out --local for ${import_chalk8.default.bold(
164954
165068
  { sendMetrics: config.send_metrics, offline: args.local }
164955
165069
  );
164956
165070
  async function getDevReactElement(configParam) {
164957
- const { assetPaths, bindings } = await getBindingsAndAssetPaths(
165071
+ const { assetPaths, bindings } = getBindingsAndAssetPaths(
164958
165072
  args,
164959
165073
  configParam
164960
165074
  );
@@ -165049,7 +165163,7 @@ async function startApiDev(args) {
165049
165163
  { sendMetrics: config.send_metrics, offline: args.local }
165050
165164
  );
165051
165165
  async function getDevServer(configParam) {
165052
- const { assetPaths, bindings } = await getBindingsAndAssetPaths(
165166
+ const { assetPaths, bindings } = getBindingsAndAssetPaths(
165053
165167
  args,
165054
165168
  configParam
165055
165169
  );
@@ -165234,9 +165348,9 @@ use --persist-to=./wrangler-local-state to keep using the old path.`
165234
165348
  localPersistencePath
165235
165349
  };
165236
165350
  }
165237
- async function getBindingsAndAssetPaths(args, configParam) {
165351
+ function getBindingsAndAssetPaths(args, configParam) {
165238
165352
  const cliVars = collectKeyValues(args.var);
165239
- const bindings = await getBindings(configParam, args.env, {
165353
+ const bindings = getBindings(configParam, args.env, args.local ?? false, {
165240
165354
  kv: args.kv,
165241
165355
  vars: { ...args.vars, ...cliVars },
165242
165356
  durableObjects: args.durableObjects,
@@ -165256,7 +165370,7 @@ async function getBindingsAndAssetPaths(args, configParam) {
165256
165370
  );
165257
165371
  return { assetPaths, bindings };
165258
165372
  }
165259
- async function getBindings(configParam, env5, args) {
165373
+ function getBindings(configParam, env5, local, args) {
165260
165374
  const bindings = {
165261
165375
  kv_namespaces: [
165262
165376
  ...(configParam.kv_namespaces || []).map(
@@ -165315,6 +165429,12 @@ async function getBindings(configParam, env5, args) {
165315
165429
  logfwdr: configParam.logfwdr,
165316
165430
  d1_databases: identifyD1BindingsAsBeta([
165317
165431
  ...(configParam.d1_databases ?? []).map((d1Db) => {
165432
+ if (local) {
165433
+ return {
165434
+ ...d1Db,
165435
+ database_id: "local"
165436
+ };
165437
+ }
165318
165438
  if (!d1Db.preview_database_id) {
165319
165439
  throw new Error(
165320
165440
  `In development, you should use a separate D1 database than the one you'd use in production. Please create a new D1 database with "wrangler d1 create <name>" and add its id as preview_database_id to the d1_database "${d1Db.binding}" in your wrangler.toml`