wrangler 4.7.1 → 4.8.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -77785,12 +77785,12 @@ var html_rewriter_exports = {};
77785
77785
  __export(html_rewriter_exports, {
77786
77786
  HTMLRewriter: () => HTMLRewriter2
77787
77787
  });
77788
- var import_web2, import_miniflare24, HTMLRewriter2;
77788
+ var import_web2, import_miniflare26, HTMLRewriter2;
77789
77789
  var init_html_rewriter = __esm({
77790
77790
  "../pages-shared/environment-polyfills/html-rewriter.ts"() {
77791
77791
  init_import_meta_url();
77792
77792
  import_web2 = require("stream/web");
77793
- import_miniflare24 = require("miniflare");
77793
+ import_miniflare26 = require("miniflare");
77794
77794
  HTMLRewriter2 = class {
77795
77795
  static {
77796
77796
  __name(this, "HTMLRewriter");
@@ -77808,9 +77808,9 @@ var init_html_rewriter = __esm({
77808
77808
  transform(response) {
77809
77809
  const body = response.body;
77810
77810
  if (body === null) {
77811
- return new import_miniflare24.Response(body, response);
77811
+ return new import_miniflare26.Response(body, response);
77812
77812
  }
77813
- response = new import_miniflare24.Response(response.body, response);
77813
+ response = new import_miniflare26.Response(response.body, response);
77814
77814
  let rewriter;
77815
77815
  const transformStream = new import_web2.TransformStream({
77816
77816
  start: /* @__PURE__ */ __name(async (controller) => {
@@ -77839,7 +77839,7 @@ var init_html_rewriter = __esm({
77839
77839
  const promise = body.pipeTo(transformStream.writable);
77840
77840
  promise.catch(() => {
77841
77841
  }).finally(() => rewriter.free());
77842
- const res = new import_miniflare24.Response(transformStream.readable, response);
77842
+ const res = new import_miniflare26.Response(transformStream.readable, response);
77843
77843
  res.headers.delete("Content-Length");
77844
77844
  return res;
77845
77845
  }
@@ -78698,17 +78698,17 @@ async function generateASSETSBinding(options33) {
78698
78698
  try {
78699
78699
  const url4 = new URL(miniflareRequest.url);
78700
78700
  url4.host = `localhost:${options33.proxyPort}`;
78701
- const proxyRequest = new import_miniflare25.Request(url4, miniflareRequest);
78701
+ const proxyRequest = new import_miniflare27.Request(url4, miniflareRequest);
78702
78702
  if (proxyRequest.headers.get("Upgrade") === "websocket") {
78703
78703
  proxyRequest.headers.delete("Sec-WebSocket-Accept");
78704
78704
  proxyRequest.headers.delete("Sec-WebSocket-Key");
78705
78705
  }
78706
- return await (0, import_miniflare25.fetch)(proxyRequest, {
78706
+ return await (0, import_miniflare27.fetch)(proxyRequest, {
78707
78707
  dispatcher: new ProxyDispatcher(miniflareRequest.headers.get("Host"))
78708
78708
  });
78709
78709
  } catch (thrown) {
78710
78710
  options33.log.error(new Error(`Could not proxy request: ${thrown}`));
78711
- return new import_miniflare25.Response(`[wrangler] Could not proxy request: ${thrown}`, {
78711
+ return new import_miniflare27.Response(`[wrangler] Could not proxy request: ${thrown}`, {
78712
78712
  status: 502
78713
78713
  });
78714
78714
  }
@@ -78717,7 +78717,7 @@ async function generateASSETSBinding(options33) {
78717
78717
  return await assetsFetch(miniflareRequest);
78718
78718
  } catch (thrown) {
78719
78719
  options33.log.error(new Error(`Could not serve static asset: ${thrown}`));
78720
- return new import_miniflare25.Response(
78720
+ return new import_miniflare27.Response(
78721
78721
  `[wrangler] Could not serve static asset: ${thrown}`,
78722
78722
  { status: 502 }
78723
78723
  );
@@ -78829,11 +78829,11 @@ async function generateAssetsFetch(directory, log2) {
78829
78829
  });
78830
78830
  }, "generateResponse");
78831
78831
  return async (input, init2) => {
78832
- const request4 = new import_miniflare25.Request(input, init2);
78832
+ const request4 = new import_miniflare27.Request(input, init2);
78833
78833
  return await generateResponse(request4);
78834
78834
  };
78835
78835
  }
78836
- var import_node_assert25, import_node_fs33, import_node_path62, import_mime3, import_miniflare25, import_undici23, ProxyDispatcher, invalidAssetsFetch;
78836
+ var import_node_assert25, import_node_fs33, import_node_path62, import_mime3, import_miniflare27, import_undici23, ProxyDispatcher, invalidAssetsFetch;
78837
78837
  var init_assets = __esm({
78838
78838
  "src/miniflare-cli/assets.ts"() {
78839
78839
  init_import_meta_url();
@@ -78845,7 +78845,7 @@ var init_assets = __esm({
78845
78845
  init_parseRedirects();
78846
78846
  init_esm2();
78847
78847
  import_mime3 = __toESM(require_mime());
78848
- import_miniflare25 = require("miniflare");
78848
+ import_miniflare27 = require("miniflare");
78849
78849
  import_undici23 = __toESM(require_undici());
78850
78850
  init_hash();
78851
78851
  __name(generateASSETSBinding, "generateASSETSBinding");
@@ -81185,7 +81185,7 @@ var import_undici3 = __toESM(require_undici());
81185
81185
 
81186
81186
  // package.json
81187
81187
  var name = "wrangler";
81188
- var version = "4.7.1";
81188
+ var version = "4.8.0";
81189
81189
 
81190
81190
  // src/environment-variables/misc-variables.ts
81191
81191
  init_import_meta_url();
@@ -83843,6 +83843,7 @@ var friendlyBindingNames = {
83843
83843
  mtls_certificates: "mTLS Certificates",
83844
83844
  workflows: "Workflows",
83845
83845
  pipelines: "Pipelines",
83846
+ secrets_store_secrets: "Secrets Store Secrets",
83846
83847
  assets: "Assets"
83847
83848
  };
83848
83849
  function printBindings(bindings, context2 = {}) {
@@ -83872,6 +83873,7 @@ function printBindings(bindings, context2 = {}) {
83872
83873
  hyperdrive: hyperdrive2,
83873
83874
  r2_buckets,
83874
83875
  logfwdr,
83876
+ secrets_store_secrets,
83875
83877
  services,
83876
83878
  analytics_engine_datasets,
83877
83879
  text_blobs,
@@ -84053,6 +84055,21 @@ function printBindings(bindings, context2 = {}) {
84053
84055
  })
84054
84056
  });
84055
84057
  }
84058
+ if (secrets_store_secrets !== void 0 && secrets_store_secrets.length > 0) {
84059
+ output.push({
84060
+ name: friendlyBindingNames.secrets_store_secrets,
84061
+ entries: secrets_store_secrets.map(
84062
+ ({ binding, store_id, secret_name }) => {
84063
+ return {
84064
+ key: binding,
84065
+ value: addSuffix(`${store_id}/${secret_name}`, {
84066
+ isSimulatedLocally: true
84067
+ })
84068
+ };
84069
+ }
84070
+ )
84071
+ });
84072
+ }
84056
84073
  if (services !== void 0 && services.length > 0) {
84057
84074
  output.push({
84058
84075
  name: friendlyBindingNames.services,
@@ -84751,52 +84768,67 @@ function normalizeAndValidateConfig(rawConfig, configPath, userConfigPath, args)
84751
84768
  rawConfig,
84752
84769
  isDispatchNamespace
84753
84770
  );
84771
+ const isRedirectedConfig = configPath && configPath !== userConfigPath;
84772
+ const definedEnvironments = Object.keys(rawConfig.env ?? {});
84773
+ if (isRedirectedConfig && definedEnvironments.length > 0) {
84774
+ diagnostics.errors.push(
84775
+ `Redirected configurations cannot include environments but the following have been found: ${definedEnvironments.map((env6) => JSON.stringify(env6)).join(", ")}`
84776
+ );
84777
+ }
84754
84778
  const envName = args.env;
84755
84779
  (0, import_node_assert.default)(envName === void 0 || typeof envName === "string");
84756
84780
  let activeEnv = topLevelEnv;
84757
84781
  if (envName !== void 0) {
84758
- const envDiagnostics = new Diagnostics(
84759
- `"env.${envName}" environment configuration`
84760
- );
84761
- const rawEnv = rawConfig.env?.[envName];
84762
- if (rawEnv !== void 0) {
84763
- activeEnv = normalizeAndValidateEnvironment(
84764
- envDiagnostics,
84765
- configPath,
84766
- rawEnv,
84767
- isDispatchNamespace,
84768
- envName,
84769
- topLevelEnv,
84770
- isLegacyEnv2,
84771
- rawConfig
84772
- );
84773
- diagnostics.addChild(envDiagnostics);
84774
- } else if (!isPagesConfig(rawConfig)) {
84775
- activeEnv = normalizeAndValidateEnvironment(
84776
- envDiagnostics,
84777
- configPath,
84778
- topLevelEnv,
84779
- // in this case reuse the topLevelEnv to ensure that nonInherited fields are not removed
84780
- isDispatchNamespace,
84781
- envName,
84782
- topLevelEnv,
84783
- isLegacyEnv2,
84784
- rawConfig
84782
+ if (isRedirectedConfig) {
84783
+ diagnostics.errors.push(dedent`
84784
+ You have specified the environment "${envName}", but are using a redirected configuration, produced by a build tool such as Vite.
84785
+ You need to set the environment in your build tool, rather than via Wrangler.
84786
+ For example, if you are using Vite, refer to these docs: https://developers.cloudflare.com/workers/vite-plugin/reference/cloudflare-environments/
84787
+ `);
84788
+ } else {
84789
+ const envDiagnostics = new Diagnostics(
84790
+ `"env.${envName}" environment configuration`
84785
84791
  );
84786
- const envNames = rawConfig.env ? `The available configured environment names are: ${JSON.stringify(
84787
- Object.keys(rawConfig.env)
84788
- )}
84792
+ const rawEnv = rawConfig.env?.[envName];
84793
+ if (rawEnv !== void 0) {
84794
+ activeEnv = normalizeAndValidateEnvironment(
84795
+ envDiagnostics,
84796
+ configPath,
84797
+ rawEnv,
84798
+ isDispatchNamespace,
84799
+ envName,
84800
+ topLevelEnv,
84801
+ isLegacyEnv2,
84802
+ rawConfig
84803
+ );
84804
+ diagnostics.addChild(envDiagnostics);
84805
+ } else if (!isPagesConfig(rawConfig)) {
84806
+ activeEnv = normalizeAndValidateEnvironment(
84807
+ envDiagnostics,
84808
+ configPath,
84809
+ topLevelEnv,
84810
+ // in this case reuse the topLevelEnv to ensure that nonInherited fields are not removed
84811
+ isDispatchNamespace,
84812
+ envName,
84813
+ topLevelEnv,
84814
+ isLegacyEnv2,
84815
+ rawConfig
84816
+ );
84817
+ const envNames = rawConfig.env ? `The available configured environment names are: ${JSON.stringify(
84818
+ Object.keys(rawConfig.env)
84819
+ )}
84789
84820
  ` : "";
84790
- const message = `No environment found in configuration with name "${envName}".
84821
+ const message = `No environment found in configuration with name "${envName}".
84791
84822
  Before using \`--env=${envName}\` there should be an equivalent environment section in the configuration.
84792
84823
  ${envNames}
84793
84824
  Consider adding an environment configuration section to the ${configFileName(configPath)} file:
84794
84825
  \`\`\`
84795
84826
  [env.` + envName + "]\n```\n";
84796
- if (envNames.length > 0) {
84797
- diagnostics.errors.push(message);
84798
- } else {
84799
- diagnostics.warnings.push(message);
84827
+ if (envNames.length > 0) {
84828
+ diagnostics.errors.push(message);
84829
+ } else {
84830
+ diagnostics.warnings.push(message);
84831
+ }
84800
84832
  }
84801
84833
  }
84802
84834
  }
@@ -85690,6 +85722,16 @@ function normalizeAndValidateEnvironment(diagnostics, configPath, rawEnv, isDisp
85690
85722
  validateBindingArray(envName, validatePipelineBinding),
85691
85723
  []
85692
85724
  ),
85725
+ secrets_store_secrets: notInheritable(
85726
+ diagnostics,
85727
+ topLevelEnv,
85728
+ rawConfig,
85729
+ rawEnv,
85730
+ envName,
85731
+ "secrets_store_secrets",
85732
+ validateBindingArray(envName, validateSecretsStoreSecretBinding),
85733
+ []
85734
+ ),
85693
85735
  version_metadata: notInheritable(
85694
85736
  diagnostics,
85695
85737
  topLevelEnv,
@@ -87097,6 +87139,45 @@ var validatePipelineBinding = /* @__PURE__ */ __name((diagnostics, field, value)
87097
87139
  ]);
87098
87140
  return isValid2;
87099
87141
  }, "validatePipelineBinding");
87142
+ var validateSecretsStoreSecretBinding = /* @__PURE__ */ __name((diagnostics, field, value) => {
87143
+ if (typeof value !== "object" || value === null) {
87144
+ diagnostics.errors.push(
87145
+ `"secrets_store_secrets" bindings should be objects, but got ${JSON.stringify(value)}`
87146
+ );
87147
+ return false;
87148
+ }
87149
+ let isValid2 = true;
87150
+ if (!isRequiredProperty(value, "binding", "string")) {
87151
+ diagnostics.errors.push(
87152
+ `"${field}" bindings must have a string "binding" field but got ${JSON.stringify(
87153
+ value
87154
+ )}.`
87155
+ );
87156
+ isValid2 = false;
87157
+ }
87158
+ if (!isRequiredProperty(value, "store_id", "string")) {
87159
+ diagnostics.errors.push(
87160
+ `"${field}" bindings must have a string "store_id" field but got ${JSON.stringify(
87161
+ value
87162
+ )}.`
87163
+ );
87164
+ isValid2 = false;
87165
+ }
87166
+ if (!isRequiredProperty(value, "secret_name", "string")) {
87167
+ diagnostics.errors.push(
87168
+ `"${field}" bindings must have a string "secret_name" field but got ${JSON.stringify(
87169
+ value
87170
+ )}.`
87171
+ );
87172
+ isValid2 = false;
87173
+ }
87174
+ validateAdditionalProperties(diagnostics, field, Object.keys(value), [
87175
+ "binding",
87176
+ "store_id",
87177
+ "secret_name"
87178
+ ]);
87179
+ return isValid2;
87180
+ }, "validateSecretsStoreSecretBinding");
87100
87181
  function normalizeAndValidateLimits(diagnostics, topLevelEnv, rawEnv) {
87101
87182
  if (rawEnv.limits) {
87102
87183
  validateRequiredProperty(
@@ -87360,6 +87441,7 @@ var defaultWranglerConfig = {
87360
87441
  vectorize: [],
87361
87442
  hyperdrive: [],
87362
87443
  workflows: [],
87444
+ secrets_store_secrets: [],
87363
87445
  services: [],
87364
87446
  analytics_engine_datasets: [],
87365
87447
  ai: void 0,
@@ -92850,6 +92932,12 @@ function buildMiniflareBindingOptions(config) {
92850
92932
  bindings.hyperdrive?.map(hyperdriveEntry) ?? []
92851
92933
  ),
92852
92934
  workflows: Object.fromEntries(bindings.workflows?.map(workflowEntry) ?? []),
92935
+ secretsStoreSecrets: Object.fromEntries(
92936
+ bindings.secrets_store_secrets?.map((binding) => [
92937
+ binding.binding,
92938
+ binding
92939
+ ]) ?? []
92940
+ ),
92853
92941
  durableObjects: Object.fromEntries([
92854
92942
  ...internalObjects.map(({ name: name2, class_name }) => {
92855
92943
  const useSQLite = classNameToUseSQLite.get(class_name);
@@ -92908,7 +92996,8 @@ function buildPersistOptions(localPersistencePath) {
92908
92996
  kvPersist: import_node_path15.default.join(v3Path, "kv"),
92909
92997
  r2Persist: import_node_path15.default.join(v3Path, "r2"),
92910
92998
  d1Persist: import_node_path15.default.join(v3Path, "d1"),
92911
- workflowsPersist: import_node_path15.default.join(v3Path, "workflows")
92999
+ workflowsPersist: import_node_path15.default.join(v3Path, "workflows"),
93000
+ secretsStorePersist: import_node_path15.default.join(v3Path, "secrets-store")
92912
93001
  };
92913
93002
  }
92914
93003
  }
@@ -93338,6 +93427,12 @@ function convertCfWorkerInitBindingstoBindings(inputBindings) {
93338
93427
  }
93339
93428
  break;
93340
93429
  }
93430
+ case "secrets_store_secrets": {
93431
+ for (const { binding, ...x6 } of info) {
93432
+ output[binding] = { type: "secrets_store_secret", ...x6 };
93433
+ }
93434
+ break;
93435
+ }
93341
93436
  default: {
93342
93437
  assertNever(type);
93343
93438
  }
@@ -93365,6 +93460,7 @@ async function convertBindingsToCfWorkerInitBindings(inputBindings) {
93365
93460
  d1_databases: void 0,
93366
93461
  vectorize: void 0,
93367
93462
  hyperdrive: void 0,
93463
+ secrets_store_secrets: void 0,
93368
93464
  services: void 0,
93369
93465
  analytics_engine_datasets: void 0,
93370
93466
  dispatch_namespaces: void 0,
@@ -93452,6 +93548,9 @@ async function convertBindingsToCfWorkerInitBindings(inputBindings) {
93452
93548
  } else if (binding.type === "workflow") {
93453
93549
  bindings.workflows ??= [];
93454
93550
  bindings.workflows.push({ ...binding, binding: name2 });
93551
+ } else if (binding.type === "secrets_store_secret") {
93552
+ bindings.secrets_store_secrets ??= [];
93553
+ bindings.secrets_store_secrets.push({ ...binding, binding: name2 });
93455
93554
  } else if (isUnsafeBindingType(binding.type)) {
93456
93555
  bindings.unsafe ??= {
93457
93556
  bindings: [],
@@ -100994,7 +101093,7 @@ __name(getDeviceId, "getDeviceId");
100994
101093
  // src/metrics/metrics-dispatcher.ts
100995
101094
  var SPARROW_URL = "https://sparrow.cloudflare.com";
100996
101095
  function getMetricsDispatcher(options33) {
100997
- const SPARROW_SOURCE_KEY = "50598e014ed44c739ec8074fdc16057c";
101096
+ const SPARROW_SOURCE_KEY = "";
100998
101097
  const requests = [];
100999
101098
  const wranglerVersion = getWranglerVersion();
101000
101099
  const amplitude_session_id = Date.now();
@@ -102343,6 +102442,7 @@ function getBindings(config, options33) {
102343
102442
  d1_databases: config?.d1_databases,
102344
102443
  vectorize: config?.vectorize,
102345
102444
  hyperdrive: config?.hyperdrive,
102445
+ secrets_store_secrets: config?.secrets_store_secrets,
102346
102446
  services: config?.services,
102347
102447
  analytics_engine_datasets: config?.analytics_engine_datasets,
102348
102448
  dispatch_namespaces: options33?.pages ? void 0 : config?.dispatch_namespaces,
@@ -103923,6 +104023,16 @@ function createWorkerUploadForm(worker) {
103923
104023
  id
103924
104024
  });
103925
104025
  });
104026
+ bindings.secrets_store_secrets?.forEach(
104027
+ ({ binding, store_id, secret_name }) => {
104028
+ metadataBindings.push({
104029
+ name: binding,
104030
+ type: "secrets_store_secret",
104031
+ store_id,
104032
+ secret_name
104033
+ });
104034
+ }
104035
+ );
103926
104036
  bindings.services?.forEach(
103927
104037
  ({ binding, service, environment, entrypoint }) => {
103928
104038
  metadataBindings.push({
@@ -105232,7 +105342,7 @@ var import_promises34 = require("fs/promises");
105232
105342
  var import_node_events4 = __toESM(require("node:events"));
105233
105343
  var import_promises35 = require("node:fs/promises");
105234
105344
  var import_path23 = __toESM(require("path"));
105235
- var import_miniflare22 = require("miniflare");
105345
+ var import_miniflare24 = require("miniflare");
105236
105346
 
105237
105347
  // src/index.ts
105238
105348
  init_import_meta_url();
@@ -117371,9 +117481,9 @@ init_import_meta_url();
117371
117481
  var import_node_assert20 = __toESM(require("node:assert"));
117372
117482
  var import_undici11 = __toESM(require_undici());
117373
117483
  async function runSearch(searchTerm) {
117374
- const id = "8MU1G3QO9P";
117484
+ const id = ALGOLIA_APP_ID;
117375
117485
  const index = "developers-cloudflare2";
117376
- const key = "045e8dbec8c137a52f0f56e196d7abe0";
117486
+ const key = ALGOLIA_PUBLIC_KEY;
117377
117487
  const params = new URLSearchParams({
117378
117488
  query: searchTerm,
117379
117489
  hitsPerPage: "1",
@@ -118250,6 +118360,18 @@ async function mapBindings(accountId, bindings) {
118250
118360
  ];
118251
118361
  }
118252
118362
  break;
118363
+ case "secrets_store_secret":
118364
+ {
118365
+ configObj.secrets_store_secrets = [
118366
+ ...configObj.secrets_store_secrets ?? [],
118367
+ {
118368
+ binding: binding.name,
118369
+ store_id: binding.store_id,
118370
+ secret_name: binding.secret_name
118371
+ }
118372
+ ];
118373
+ }
118374
+ break;
118253
118375
  case "service":
118254
118376
  {
118255
118377
  configObj.services = [
@@ -120636,7 +120758,8 @@ function Options8(yargs) {
120636
120758
  }).options({
120637
120759
  outfile: {
120638
120760
  type: "string",
120639
- description: "The location of the output Worker script"
120761
+ description: "The location of the output Worker script",
120762
+ deprecated: true
120640
120763
  },
120641
120764
  outdir: {
120642
120765
  type: "string",
@@ -124302,6 +124425,7 @@ async function createDraftWorker({
124302
124425
  d1_databases: [],
124303
124426
  vectorize: [],
124304
124427
  hyperdrive: [],
124428
+ secrets_store_secrets: [],
124305
124429
  services: [],
124306
124430
  analytics_engine_datasets: [],
124307
124431
  wasm_modules: {},
@@ -124981,20 +125105,20 @@ function pages(yargs, subHelp) {
124981
125105
  Handler14
124982
125106
  ).command(
124983
125107
  "functions",
124984
- false,
125108
+ "Helpers related to Pages Functions",
124985
125109
  (args) => args.command(subHelp).command(
124986
125110
  "build [directory]",
124987
- "Compile a folder of Cloudflare Pages Functions into a single Worker",
125111
+ "Compile a folder of Pages Functions into a single Worker",
124988
125112
  Options8,
124989
125113
  Handler8
124990
125114
  ).command(
124991
125115
  "build-env [projectDir]",
124992
- "Render a list of environment variables from the config file",
125116
+ false,
124993
125117
  Options9,
124994
125118
  Handler9
124995
125119
  ).command(
124996
125120
  "optimize-routes [routesPath] [outputRoutesPath]",
124997
- "Consolidate and optimize the route paths declared in _routes.json",
125121
+ false,
124998
125122
  OptimizeRoutesOptions,
124999
125123
  OptimizeRoutesHandler
125000
125124
  )
@@ -128954,22 +129078,12 @@ function addCreateOptions(yargs) {
128954
129078
  type: "string",
128955
129079
  demandOption: true
128956
129080
  }).group(
128957
- [
128958
- "enable-worker-binding",
128959
- "enable-http",
128960
- "require-http-auth",
128961
- "cors-origins"
128962
- ],
129081
+ ["source", "require-http-auth", "cors-origins"],
128963
129082
  `${source_default.bold("Source settings")}`
128964
- ).option("enable-worker-binding", {
128965
- type: "boolean",
128966
- describe: "Send data from a Worker to a Pipeline using a Binding",
128967
- default: true,
128968
- demandOption: false
128969
- }).option("enable-http", {
128970
- type: "boolean",
128971
- describe: "Generate an endpoint to ingest data via HTTP",
128972
- default: true,
129083
+ ).option("source", {
129084
+ type: "array",
129085
+ describe: "Space separated list of allowed sources. Options are 'http' or 'worker'",
129086
+ default: ["http", "worker"],
128973
129087
  demandOption: false
128974
129088
  }).option("require-http-auth", {
128975
129089
  type: "boolean",
@@ -128978,7 +129092,7 @@ function addCreateOptions(yargs) {
128978
129092
  demandOption: false
128979
129093
  }).option("cors-origins", {
128980
129094
  type: "array",
128981
- describe: "CORS origin allowlist for HTTP endpoint (use * for any origin)",
129095
+ describe: "CORS origin allowlist for HTTP endpoint (use * for any origin). Defaults to an empty array",
128982
129096
  demandOption: false,
128983
129097
  coerce: validateCorsOrigins
128984
129098
  }).group(
@@ -128986,17 +129100,17 @@ function addCreateOptions(yargs) {
128986
129100
  `${source_default.bold("Batch hints")}`
128987
129101
  ).option("batch-max-mb", {
128988
129102
  type: "number",
128989
- describe: "Maximum batch size in megabytes before flushing",
129103
+ describe: "Maximum batch size in megabytes before flushing. Defaults to 100 MB if unset. Minimum: 1, Maximum: 100",
128990
129104
  demandOption: false,
128991
129105
  coerce: validateInRange("batch-max-mb", 1, 100)
128992
129106
  }).option("batch-max-rows", {
128993
129107
  type: "number",
128994
- describe: "Maximum number of rows per batch before flushing",
129108
+ describe: "Maximum number of rows per batch before flushing. Defaults to 10,000,000 if unset. Minimum: 100, Maximum: 10,000,000",
128995
129109
  demandOption: false,
128996
- coerce: validateInRange("batch-max-rows", 100, 1e6)
129110
+ coerce: validateInRange("batch-max-rows", 100, 1e7)
128997
129111
  }).option("batch-max-seconds", {
128998
129112
  type: "number",
128999
- describe: "Maximum age of batch in seconds before flushing",
129113
+ describe: "Maximum age of batch in seconds before flushing. Defaults to 300 if unset. Minimum: 1, Maximum: 300",
129000
129114
  demandOption: false,
129001
129115
  coerce: validateInRange("batch-max-seconds", 1, 300)
129002
129116
  }).group(["transform-worker"], `${source_default.bold("Transformations")}`).option("transform-worker", {
@@ -129011,9 +129125,7 @@ function addCreateOptions(yargs) {
129011
129125
  "r2-access-key-id",
129012
129126
  "r2-secret-access-key",
129013
129127
  "r2-prefix",
129014
- "compression",
129015
- "file-template",
129016
- "partition-template"
129128
+ "compression"
129017
129129
  ],
129018
129130
  `${source_default.bold("Destination settings")}`
129019
129131
  ).option("r2-bucket", {
@@ -129037,7 +129149,7 @@ function addCreateOptions(yargs) {
129037
129149
  return true;
129038
129150
  }).option("r2-prefix", {
129039
129151
  type: "string",
129040
- describe: "Prefix for storing files in the destination bucket",
129152
+ describe: "Prefix for storing files in the destination bucket. Default is no prefix",
129041
129153
  default: "",
129042
129154
  demandOption: false
129043
129155
  }).option("compression", {
@@ -129046,23 +129158,9 @@ function addCreateOptions(yargs) {
129046
129158
  choices: ["none", "gzip", "deflate"],
129047
129159
  default: "gzip",
129048
129160
  demandOption: false
129049
- }).option("partition-template", {
129050
- type: "string",
129051
- describe: "Path template for partitioned files in the bucket. If not specified, the default will be used",
129052
- demandOption: false
129053
- }).option("file-template", {
129054
- type: "string",
129055
- describe: `Template for individual file names (must include \${slug}). For example: "\${slug}.log.gz"`,
129056
- demandOption: false,
129057
- coerce: /* @__PURE__ */ __name((val2) => {
129058
- if (!val2.includes("${slug}")) {
129059
- throw new UserError("filename must contain ${slug}");
129060
- }
129061
- return val2;
129062
- }, "coerce")
129063
129161
  }).group(["shard-count"], `${source_default.bold("Pipeline settings")}`).option("shard-count", {
129064
129162
  type: "number",
129065
- describe: "Number of pipeline shards. More shards handle higher request volume; fewer shards produce larger output files",
129163
+ describe: "Number of pipeline shards. More shards handle higher request volume; fewer shards produce larger output files. Defaults to 2 if unset. Minimum: 1, Maximum: 15",
129066
129164
  demandOption: false
129067
129165
  });
129068
129166
  }
@@ -129117,22 +129215,30 @@ async function createPipelineHandler(args) {
129117
129215
  if (!destination.credentials.secret_access_key) {
129118
129216
  throw new FatalError("Requires a r2 secret access key");
129119
129217
  }
129120
- if (args.enableWorkerBinding) {
129121
- pipelineConfig.source.push({
129122
- type: "binding",
129123
- format: "json"
129124
- });
129125
- }
129126
- if (args.enableHttp) {
129127
- const source = {
129128
- type: "http",
129129
- format: "json",
129130
- authentication: args.requireHttpAuth
129218
+ if (args.source.length > 0) {
129219
+ const sourceHandlers = {
129220
+ http: /* @__PURE__ */ __name(() => {
129221
+ const http5 = {
129222
+ type: "http",
129223
+ format: "json",
129224
+ authentication: args.requireHttpAuth
129225
+ };
129226
+ if (args.corsOrigins && args.corsOrigins.length > 0) {
129227
+ http5.cors = { origins: args.corsOrigins };
129228
+ }
129229
+ return http5;
129230
+ }, "http"),
129231
+ worker: /* @__PURE__ */ __name(() => ({
129232
+ type: "binding",
129233
+ format: "json"
129234
+ }), "worker")
129131
129235
  };
129132
- if (args.corsOrigins && args.corsOrigins.length > 0) {
129133
- source.cors = { origins: args.corsOrigins };
129236
+ for (const source of args.source) {
129237
+ const handler32 = sourceHandlers[source];
129238
+ if (handler32) {
129239
+ pipelineConfig.source.push(handler32());
129240
+ }
129134
129241
  }
129135
- pipelineConfig.source.push(source);
129136
129242
  }
129137
129243
  if (pipelineConfig.source.length === 0) {
129138
129244
  throw new UserError(
@@ -129145,25 +129251,21 @@ async function createPipelineHandler(args) {
129145
129251
  if (args.r2Prefix) {
129146
129252
  pipelineConfig.destination.path.prefix = args.r2Prefix;
129147
129253
  }
129148
- if (args.partitionTemplate) {
129149
- pipelineConfig.destination.path.filepath = args.partitionTemplate;
129150
- }
129151
- if (args.fileTemplate) {
129152
- pipelineConfig.destination.path.filename = args.fileTemplate;
129153
- }
129154
129254
  if (args.shardCount) {
129155
129255
  pipelineConfig.metadata.shards = args.shardCount;
129156
129256
  }
129157
129257
  logger.log(`\u{1F300} Creating Pipeline named "${name2}"`);
129158
129258
  const pipeline = await createPipeline(accountId, pipelineConfig);
129159
129259
  logger.log(
129160
- `\u2705 Successfully created Pipeline "${pipeline.name}" with id ${pipeline.id}`
129260
+ `\u2705 Successfully created Pipeline "${pipeline.name}" with ID ${pipeline.id}
129261
+ `
129161
129262
  );
129263
+ logger.log(formatPipelinePretty(pipeline));
129162
129264
  logger.log("\u{1F389} You can now send data to your Pipeline!");
129163
- if (args.enableWorkerBinding) {
129265
+ if (args.source.includes("worker")) {
129164
129266
  logger.log(
129165
129267
  `
129166
- To start interacting with this Pipeline from a Worker, open your Worker\u2019s config file and add the following binding configuration:
129268
+ To send data to your pipeline from a Worker, add the following to your wrangler config file:
129167
129269
  `
129168
129270
  );
129169
129271
  logger.log(
@@ -129180,11 +129282,11 @@ To start interacting with this Pipeline from a Worker, open your Worker\u2019s c
129180
129282
  )
129181
129283
  );
129182
129284
  }
129183
- if (args.enableHttp) {
129285
+ if (args.source.includes("http")) {
129184
129286
  logger.log(`
129185
129287
  Send data to your Pipeline's HTTP endpoint:
129186
129288
  `);
129187
- logger.log(` curl "${pipeline.endpoint}" -d '[{"foo": "bar"}]'
129289
+ logger.log(`curl "${pipeline.endpoint}" -d '[{"foo": "bar"}]'
129188
129290
  `);
129189
129291
  }
129190
129292
  }
@@ -129212,6 +129314,46 @@ async function deletePipelineHandler(args) {
129212
129314
  }
129213
129315
  __name(deletePipelineHandler, "deletePipelineHandler");
129214
129316
 
129317
+ // src/pipelines/cli/get.ts
129318
+ init_import_meta_url();
129319
+ function addGetOptions(yargs) {
129320
+ return yargs.positional("pipeline", {
129321
+ type: "string",
129322
+ describe: "The name of the Pipeline to show",
129323
+ demandOption: true
129324
+ }).option("format", {
129325
+ type: "string",
129326
+ describe: "The output format for pipeline",
129327
+ default: "pretty",
129328
+ demandOption: false,
129329
+ coerce: /* @__PURE__ */ __name((value) => {
129330
+ const formats = ["pretty", "json"];
129331
+ if (!formats.includes(value)) {
129332
+ throw new UserError(`Unknown format value: ${value}`);
129333
+ }
129334
+ return value;
129335
+ }, "coerce")
129336
+ });
129337
+ }
129338
+ __name(addGetOptions, "addGetOptions");
129339
+ async function getPipelineHandler(args) {
129340
+ await printWranglerBanner();
129341
+ const config = readConfig(args);
129342
+ const accountId = await requireAuth(config);
129343
+ const name2 = args.pipeline;
129344
+ validateName("pipeline name", name2);
129345
+ const pipeline = await getPipeline(accountId, name2);
129346
+ switch (args.format) {
129347
+ case "json":
129348
+ logger.log(JSON.stringify(pipeline, null, 2));
129349
+ break;
129350
+ case "pretty":
129351
+ logger.log(formatPipelinePretty(pipeline));
129352
+ break;
129353
+ }
129354
+ }
129355
+ __name(getPipelineHandler, "getPipelineHandler");
129356
+
129215
129357
  // src/pipelines/cli/list.ts
129216
129358
  init_import_meta_url();
129217
129359
  async function listPipelinesHandler(args) {
@@ -129229,28 +129371,6 @@ async function listPipelinesHandler(args) {
129229
129371
  }
129230
129372
  __name(listPipelinesHandler, "listPipelinesHandler");
129231
129373
 
129232
- // src/pipelines/cli/show.ts
129233
- init_import_meta_url();
129234
- function addShowOptions(yargs) {
129235
- return yargs.positional("pipeline", {
129236
- type: "string",
129237
- describe: "The name of the Pipeline to show",
129238
- demandOption: true
129239
- });
129240
- }
129241
- __name(addShowOptions, "addShowOptions");
129242
- async function showPipelineHandler(args) {
129243
- await printWranglerBanner();
129244
- const config = readConfig(args);
129245
- const accountId = await requireAuth(config);
129246
- const name2 = args.pipeline;
129247
- validateName("pipeline name", name2);
129248
- logger.log(`Retrieving config for Pipeline "${name2}".`);
129249
- const pipeline = await getPipeline(accountId, name2);
129250
- logger.log(JSON.stringify(pipeline, null, 2));
129251
- }
129252
- __name(showPipelineHandler, "showPipelineHandler");
129253
-
129254
129374
  // src/pipelines/cli/update.ts
129255
129375
  init_import_meta_url();
129256
129376
  function addUpdateOptions(yargs) {
@@ -129264,20 +129384,11 @@ function addUpdateOptions(yargs) {
129264
129384
  demandOption: false
129265
129385
  // Not required for updates.
129266
129386
  }).group(
129267
- [
129268
- "enable-worker-binding",
129269
- "enable-http",
129270
- "require-http-auth",
129271
- "cors-origins"
129272
- ],
129387
+ ["source", "require-http-auth", "cors-origins"],
129273
129388
  `${source_default.bold("Source settings")}`
129274
- ).option("enable-worker-binding", {
129275
- type: "boolean",
129276
- describe: "Send data from a Worker to a Pipeline using a Binding",
129277
- demandOption: false
129278
- }).option("enable-http", {
129279
- type: "boolean",
129280
- describe: "Generate an endpoint to ingest data via HTTP",
129389
+ ).option("source", {
129390
+ type: "array",
129391
+ describe: "Space separated list of allowed sources. Options are 'http' or 'worker'. Setting this will remove all other existing sources.",
129281
129392
  demandOption: false
129282
129393
  }).option("require-http-auth", {
129283
129394
  type: "boolean",
@@ -129293,17 +129404,17 @@ function addUpdateOptions(yargs) {
129293
129404
  `${source_default.bold("Batch hints")}`
129294
129405
  ).option("batch-max-mb", {
129295
129406
  type: "number",
129296
- describe: "Maximum batch size in megabytes before flushing",
129407
+ describe: "Maximum batch size in megabytes before flushing. Minimum: 1, Maximum: 100",
129297
129408
  demandOption: false,
129298
129409
  coerce: validateInRange("batch-max-mb", 1, 100)
129299
129410
  }).option("batch-max-rows", {
129300
129411
  type: "number",
129301
- describe: "Maximum number of rows per batch before flushing",
129412
+ describe: "Maximum number of rows per batch before flushing. Minimum: 100, Maximum: 10,000,000",
129302
129413
  demandOption: false,
129303
- coerce: validateInRange("batch-max-rows", 100, 1e6)
129414
+ coerce: validateInRange("batch-max-rows", 100, 1e7)
129304
129415
  }).option("batch-max-seconds", {
129305
129416
  type: "number",
129306
- describe: "Maximum age of batch in seconds before flushing",
129417
+ describe: "Maximum age of batch in seconds before flushing. Minimum: 1, Maximum: 300",
129307
129418
  demandOption: false,
129308
129419
  coerce: validateInRange("batch-max-seconds", 1, 300)
129309
129420
  }).group(["transform-worker"], `${source_default.bold("Transformations")}`).option("transform-worker", {
@@ -129318,9 +129429,7 @@ function addUpdateOptions(yargs) {
129318
129429
  "r2-access-key-id",
129319
129430
  "r2-secret-access-key",
129320
129431
  "r2-prefix",
129321
- "compression",
129322
- "file-template",
129323
- "partition-template"
129432
+ "compression"
129324
129433
  ],
129325
129434
  `${source_default.bold("Destination settings")}`
129326
129435
  ).option("r2-access-key-id", {
@@ -129347,20 +129456,6 @@ function addUpdateOptions(yargs) {
129347
129456
  describe: "Compression format for output files",
129348
129457
  choices: ["none", "gzip", "deflate"],
129349
129458
  demandOption: false
129350
- }).option("partition-template", {
129351
- type: "string",
129352
- describe: "Path template for partitioned files in the bucket",
129353
- demandOption: false
129354
- }).option("file-template", {
129355
- type: "string",
129356
- describe: "Template for individual file names (must include ${slug})",
129357
- demandOption: false,
129358
- coerce: /* @__PURE__ */ __name((val2) => {
129359
- if (!val2.includes("${slug}")) {
129360
- throw new Error("filename must contain ${slug}");
129361
- }
129362
- return val2;
129363
- }, "coerce")
129364
129459
  }).group(["shard-count"], `${source_default.bold("Pipeline settings")}`).option("shard-count", {
129365
129460
  type: "number",
129366
129461
  describe: "Number of pipeline shards. More shards handle higher request volume; fewer shards produce larger output files",
@@ -129415,45 +129510,48 @@ async function updatePipelineHandler(args) {
129415
129510
  throw new FatalError("Requires a r2 secret access key");
129416
129511
  }
129417
129512
  }
129418
- if (args.enableWorkerBinding !== void 0) {
129419
- const source = pipelineConfig.source.find(
129420
- (s5) => s5.type === "binding"
129421
- );
129422
- pipelineConfig.source = pipelineConfig.source.filter(
129423
- (s5) => s5.type !== "binding"
129424
- );
129425
- if (args.enableWorkerBinding) {
129426
- pipelineConfig.source.push({
129427
- ...source,
129428
- type: "binding",
129429
- format: "json"
129430
- });
129513
+ if (args.source && args.source.length > 0) {
129514
+ const existingSources = pipelineConfig.source;
129515
+ pipelineConfig.source = [];
129516
+ const sourceHandlers = {
129517
+ http: /* @__PURE__ */ __name(() => {
129518
+ const existing = existingSources.find((s5) => s5.type === "http");
129519
+ const http5 = {
129520
+ ...existing,
129521
+ // Copy over existing properties for forwards compatibility
129522
+ type: "http",
129523
+ format: "json",
129524
+ ...args.requireHttpAuth && { authentication: args.requireHttpAuth }
129525
+ // Include only if defined
129526
+ };
129527
+ if (args.corsOrigins && args.corsOrigins.length > 0) {
129528
+ http5.cors = { origins: args.corsOrigins };
129529
+ }
129530
+ return http5;
129531
+ }, "http"),
129532
+ worker: /* @__PURE__ */ __name(() => {
129533
+ const existing = existingSources.find(
129534
+ (s5) => s5.type === "binding"
129535
+ );
129536
+ return {
129537
+ ...existing,
129538
+ // Copy over existing properties for forwards compatibility
129539
+ type: "binding",
129540
+ format: "json"
129541
+ };
129542
+ }, "worker")
129543
+ };
129544
+ for (const source of args.source) {
129545
+ const handler32 = sourceHandlers[source];
129546
+ if (handler32) {
129547
+ pipelineConfig.source.push(handler32());
129548
+ }
129431
129549
  }
129432
129550
  }
129433
- if (args.enableHttp !== void 0) {
129434
- const source = pipelineConfig.source.find((s5) => s5.type === "http");
129435
- pipelineConfig.source = pipelineConfig.source.filter(
129436
- (s5) => s5.type !== "http"
129551
+ if (pipelineConfig.source.length === 0) {
129552
+ throw new UserError(
129553
+ "No sources have been enabled. At least one source (HTTP or Worker Binding) should be enabled"
129437
129554
  );
129438
- if (args.enableHttp) {
129439
- const update = {
129440
- type: "http",
129441
- format: "json",
129442
- ...source
129443
- };
129444
- pipelineConfig.source.push(update);
129445
- }
129446
- }
129447
- const httpSource = pipelineConfig.source.find(
129448
- (s5) => s5.type === "http"
129449
- );
129450
- if (httpSource) {
129451
- if (args.requireHttpAuth) {
129452
- httpSource.authentication = args.requireHttpAuth;
129453
- }
129454
- if (args.corsOrigins && args.corsOrigins.length > 0) {
129455
- httpSource.cors = { origins: args.corsOrigins };
129456
- }
129457
129555
  }
129458
129556
  if (args.transformWorker) {
129459
129557
  if (args.transformWorker === "none") {
@@ -129465,15 +129563,20 @@ async function updatePipelineHandler(args) {
129465
129563
  if (args.r2Prefix) {
129466
129564
  pipelineConfig.destination.path.prefix = args.r2Prefix;
129467
129565
  }
129468
- if (args.partitionTemplate) {
129469
- pipelineConfig.destination.path.filepath = args.partitionTemplate;
129470
- }
129471
- if (args.fileTemplate) {
129472
- pipelineConfig.destination.path.filename = args.fileTemplate;
129473
- }
129474
129566
  if (args.shardCount) {
129475
129567
  pipelineConfig.metadata.shards = args.shardCount;
129476
129568
  }
129569
+ const httpSource = pipelineConfig.source.find(
129570
+ (s5) => s5.type === "http"
129571
+ );
129572
+ if (httpSource) {
129573
+ if (args.requireHttpAuth) {
129574
+ httpSource.authentication = args.requireHttpAuth;
129575
+ }
129576
+ if (args.corsOrigins && args.corsOrigins.length > 0) {
129577
+ httpSource.cors = { origins: args.corsOrigins };
129578
+ }
129579
+ }
129477
129580
  logger.log(`\u{1F300} Updating Pipeline "${name2}"`);
129478
129581
  const pipeline = await updatePipeline(accountId, name2, pipelineConfig);
129479
129582
  logger.log(
@@ -129576,10 +129679,10 @@ function pipelines(pipelineYargs) {
129576
129679
  (yargs) => yargs,
129577
129680
  listPipelinesHandler
129578
129681
  ).command(
129579
- "show <pipeline>",
129580
- "Show a Pipeline configuration",
129581
- addShowOptions,
129582
- showPipelineHandler
129682
+ "get <pipeline>",
129683
+ "Get a Pipeline configuration",
129684
+ addGetOptions,
129685
+ getPipelineHandler
129583
129686
  ).command(
129584
129687
  "update <pipeline>",
129585
129688
  "Update a Pipeline",
@@ -129593,6 +129696,74 @@ function pipelines(pipelineYargs) {
129593
129696
  );
129594
129697
  }
129595
129698
  __name(pipelines, "pipelines");
129699
+ function formatPipelinePretty(pipeline) {
129700
+ let buffer = "";
129701
+ const formatTypeLabels = {
129702
+ json: "JSON"
129703
+ };
129704
+ buffer += `${formatLabelledValues({
129705
+ Id: pipeline.id,
129706
+ Name: pipeline.name
129707
+ })}
129708
+ `;
129709
+ buffer += "Sources:\n";
129710
+ const httpSource = pipeline.source.find((s5) => s5.type === "http");
129711
+ if (httpSource) {
129712
+ const httpInfo = {
129713
+ Endpoint: pipeline.endpoint,
129714
+ Authentication: httpSource.authentication === true ? "on" : "off",
129715
+ ...httpSource?.cors?.origins && {
129716
+ "CORS Origins": httpSource.cors.origins.join(", ")
129717
+ },
129718
+ Format: formatTypeLabels[httpSource.format]
129719
+ };
129720
+ buffer += " HTTP:\n";
129721
+ buffer += `${formatLabelledValues(httpInfo, { indentationCount: 4 })}
129722
+ `;
129723
+ }
129724
+ const bindingSource = pipeline.source.find((s5) => s5.type === "binding");
129725
+ if (bindingSource) {
129726
+ const bindingInfo = {
129727
+ Format: formatTypeLabels[bindingSource.format]
129728
+ };
129729
+ buffer += " Worker:\n";
129730
+ buffer += `${formatLabelledValues(bindingInfo, { indentationCount: 4 })}
129731
+ `;
129732
+ }
129733
+ const destinationInfo = {
129734
+ Type: pipeline.destination.type.toUpperCase(),
129735
+ Bucket: pipeline.destination.path.bucket,
129736
+ Format: "newline-delimited JSON",
129737
+ // TODO: Make dynamic once we support more output formats
129738
+ ...pipeline.destination.path.prefix && {
129739
+ Prefix: pipeline.destination.path.prefix
129740
+ },
129741
+ ...pipeline.destination.compression.type && {
129742
+ Compression: pipeline.destination.compression.type.toUpperCase()
129743
+ }
129744
+ };
129745
+ buffer += "Destination:\n";
129746
+ buffer += `${formatLabelledValues(destinationInfo, { indentationCount: 2 })}
129747
+ `;
129748
+ const batchHints = {
129749
+ ...pipeline.destination.batch.max_bytes && {
129750
+ "Max bytes": prettyBytes(pipeline.destination.batch.max_bytes)
129751
+ },
129752
+ ...pipeline.destination.batch.max_duration_s && {
129753
+ "Max duration": `${pipeline.destination.batch.max_duration_s?.toLocaleString()} seconds`
129754
+ },
129755
+ ...pipeline.destination.batch.max_rows && {
129756
+ "Max records": pipeline.destination.batch.max_rows?.toLocaleString()
129757
+ }
129758
+ };
129759
+ if (Object.keys(batchHints).length > 0) {
129760
+ buffer += " Batch hints:\n";
129761
+ buffer += `${formatLabelledValues(batchHints, { indentationCount: 4 })}
129762
+ `;
129763
+ }
129764
+ return buffer;
129765
+ }
129766
+ __name(formatPipelinePretty, "formatPipelinePretty");
129596
129767
 
129597
129768
  // src/pubsub/pubsub-commands.ts
129598
129769
  init_import_meta_url();
@@ -131159,10 +131330,11 @@ var r2BucketCatalogEnableCommand = createCommand({
131159
131330
  const response = await enableR2Catalog(accountId, args.bucket);
131160
131331
  let catalogHost;
131161
131332
  const env6 = getCloudflareApiEnvironmentFromEnv();
131333
+ const path69 = response.name.replace("_", "/");
131162
131334
  if (env6 === "staging") {
131163
- catalogHost = `https://catalog-staging.cloudflarestorage.com/${response.name}`;
131335
+ catalogHost = `https://catalog-staging.cloudflarestorage.com/${path69}`;
131164
131336
  } else {
131165
- catalogHost = `https://catalog.cloudflarestorage.com/${response.name}`;
131337
+ catalogHost = `https://catalog.cloudflarestorage.com/${path69}`;
131166
131338
  }
131167
131339
  logger.log(
131168
131340
  `\u2728 Successfully enabled data catalog on bucket '${args.bucket}'.
@@ -131237,10 +131409,11 @@ var r2BucketCatalogGetCommand = createCommand({
131237
131409
  const catalog = await getR2Catalog(accountId, args.bucket);
131238
131410
  const env6 = getCloudflareApiEnvironmentFromEnv();
131239
131411
  let catalogHost;
131412
+ const path69 = catalog.name.replace("_", "/");
131240
131413
  if (env6 === "staging") {
131241
- catalogHost = `https://catalog-staging.cloudflarestorage.com/${catalog.name}`;
131414
+ catalogHost = `https://catalog-staging.cloudflarestorage.com/${path69}`;
131242
131415
  } else {
131243
- catalogHost = `https://catalog.cloudflarestorage.com/${catalog.name}`;
131416
+ catalogHost = `https://catalog.cloudflarestorage.com/${path69}`;
131244
131417
  }
131245
131418
  const output = {
131246
131419
  "Catalog URI": catalogHost,
@@ -133373,6 +133546,7 @@ var secretsStoreSecretNamespace = createNamespace({
133373
133546
 
133374
133547
  // src/secrets-store/commands.ts
133375
133548
  init_import_meta_url();
133549
+ var import_miniflare20 = require("miniflare");
133376
133550
 
133377
133551
  // src/secrets-store/client.ts
133378
133552
  init_import_meta_url();
@@ -133462,6 +133636,27 @@ async function duplicateSecret(accountId, storeId, secretId, body) {
133462
133636
  __name(duplicateSecret, "duplicateSecret");
133463
133637
 
133464
133638
  // src/secrets-store/commands.ts
133639
+ async function usingLocalSecretsStoreSecretAPI(persistTo, config, storeId, secretName, closure) {
133640
+ const persist = getLocalPersistencePath(persistTo, config);
133641
+ const persistOptions = buildPersistOptions(persist);
133642
+ const mf = new import_miniflare20.Miniflare({
133643
+ script: 'addEventListener("fetch", (e) => e.respondWith(new Response(null, { status: 404 })))',
133644
+ ...persistOptions,
133645
+ secretsStoreSecrets: {
133646
+ SECRET: {
133647
+ store_id: storeId,
133648
+ secret_name: secretName
133649
+ }
133650
+ }
133651
+ });
133652
+ const namespace = await mf.getSecretsStoreSecretAPI("SECRET");
133653
+ try {
133654
+ return await closure(namespace());
133655
+ } finally {
133656
+ await mf.dispose();
133657
+ }
133658
+ }
133659
+ __name(usingLocalSecretsStoreSecretAPI, "usingLocalSecretsStoreSecretAPI");
133465
133660
  var secretsStoreStoreCreateCommand = createCommand({
133466
133661
  metadata: {
133467
133662
  description: "Create a store within an account",
@@ -133489,8 +133684,10 @@ var secretsStoreStoreCreateCommand = createCommand({
133489
133684
  const accountId = config.account_id || await getAccountId();
133490
133685
  store = await createStore(accountId, { name: args.name });
133491
133686
  } else {
133492
- logger.log(`Local mode enabled, this command is a no-op.`);
133493
- return;
133687
+ throw new UserError(
133688
+ "Local secrets stores are automatically created for you on use. To create a Secrets Store on your account, use the --remote flag.",
133689
+ { telemetryMessage: true }
133690
+ );
133494
133691
  }
133495
133692
  logger.log(`\u2705 Created store! (Name: ${args.name}, ID: ${store.id})`);
133496
133693
  }
@@ -133521,8 +133718,10 @@ var secretsStoreStoreDeleteCommand = createCommand({
133521
133718
  const accountId = config.account_id || await getAccountId();
133522
133719
  await deleteStore(accountId, args.storeId);
133523
133720
  } else {
133524
- logger.log(`Local mode enabled, this command is a no-op.`);
133525
- return;
133721
+ throw new UserError(
133722
+ "This command is not supported in local mode. Use `wrangler <cmd> --remote` to delete a Secrets Store from your account.",
133723
+ { telemetryMessage: true }
133724
+ );
133526
133725
  }
133527
133726
  logger.log(`\u2705 Deleted store! (ID: ${args.storeId})`);
133528
133727
  }
@@ -133561,7 +133760,7 @@ var secretsStoreStoreListCommand = createCommand({
133561
133760
  stores = await listStores(accountId, urlParams);
133562
133761
  } else {
133563
133762
  throw new UserError(
133564
- "No local dev version of this command available, need to include --remote in command",
133763
+ "This command is not supported in local mode. Use `wrangler <cmd> --remote` to list Secrets Stores on your account.",
133565
133764
  { telemetryMessage: true }
133566
133765
  );
133567
133766
  }
@@ -133609,6 +133808,10 @@ var secretsStoreSecretListCommand = createCommand({
133609
133808
  type: "boolean",
133610
133809
  description: "Execute command against remote Secrets Store",
133611
133810
  default: false
133811
+ },
133812
+ "persist-to": {
133813
+ type: "string",
133814
+ describe: "Directory for local persistence"
133612
133815
  }
133613
133816
  },
133614
133817
  async handler(args, { config }) {
@@ -133623,10 +133826,23 @@ var secretsStoreSecretListCommand = createCommand({
133623
133826
  const accountId = config.account_id || await getAccountId();
133624
133827
  secrets = await listSecrets(accountId, args.storeId, urlParams);
133625
133828
  } else {
133626
- throw new UserError(
133627
- "No local dev version of this command available, need to include --remote in command",
133628
- { telemetryMessage: true }
133629
- );
133829
+ secrets = (await usingLocalSecretsStoreSecretAPI(
133830
+ args.persistTo,
133831
+ config,
133832
+ args.storeId,
133833
+ "",
133834
+ (api) => api.list()
133835
+ )).map((key) => ({
133836
+ // eslint-disable-next-line @typescript-eslint/no-non-null-assertion
133837
+ id: key.metadata.uuid,
133838
+ store_id: args.storeId,
133839
+ name: key.name,
133840
+ comment: "",
133841
+ scopes: [],
133842
+ created: (/* @__PURE__ */ new Date()).toISOString(),
133843
+ modified: (/* @__PURE__ */ new Date()).toISOString(),
133844
+ status: "active"
133845
+ }));
133630
133846
  }
133631
133847
  if (secrets.length === 0) {
133632
133848
  throw new FatalError("List request returned no secrets.", 1, {
@@ -133670,6 +133886,10 @@ var secretsStoreSecretGetCommand = createCommand({
133670
133886
  type: "boolean",
133671
133887
  description: "Execute command against remote Secrets Store",
133672
133888
  default: false
133889
+ },
133890
+ "persist-to": {
133891
+ type: "string",
133892
+ describe: "Directory for local persistence"
133673
133893
  }
133674
133894
  },
133675
133895
  async handler(args, { config }) {
@@ -133679,10 +133899,23 @@ var secretsStoreSecretGetCommand = createCommand({
133679
133899
  const accountId = config.account_id || await getAccountId();
133680
133900
  secret2 = await getSecret(accountId, args.storeId, args.secretId);
133681
133901
  } else {
133682
- throw new UserError(
133683
- "No local dev version of this command available, need to include --remote in command",
133684
- { telemetryMessage: true }
133902
+ const name2 = await usingLocalSecretsStoreSecretAPI(
133903
+ args.persistTo,
133904
+ config,
133905
+ args.storeId,
133906
+ "",
133907
+ (api) => api.get(args.secretId)
133685
133908
  );
133909
+ secret2 = {
133910
+ id: args.secretId,
133911
+ store_id: args.storeId,
133912
+ name: name2,
133913
+ comment: "",
133914
+ scopes: [],
133915
+ created: (/* @__PURE__ */ new Date()).toISOString(),
133916
+ modified: (/* @__PURE__ */ new Date()).toISOString(),
133917
+ status: "active"
133918
+ };
133686
133919
  }
133687
133920
  const prettierSecret = [
133688
133921
  {
@@ -133737,6 +133970,10 @@ var secretsStoreSecretCreateCommand = createCommand({
133737
133970
  type: "boolean",
133738
133971
  description: "Execute command against remote Secrets Store",
133739
133972
  default: false
133973
+ },
133974
+ "persist-to": {
133975
+ type: "string",
133976
+ describe: "Directory for local persistence"
133740
133977
  }
133741
133978
  },
133742
133979
  async handler(args, { config }) {
@@ -133766,10 +134003,24 @@ var secretsStoreSecretCreateCommand = createCommand({
133766
134003
  comment: args.comment
133767
134004
  });
133768
134005
  } else {
133769
- throw new UserError(
133770
- "No local dev version of this command available, need to include --remote in command",
133771
- { telemetryMessage: true }
133772
- );
134006
+ secrets = [
134007
+ await usingLocalSecretsStoreSecretAPI(
134008
+ args.persistTo,
134009
+ config,
134010
+ args.storeId,
134011
+ args.name,
134012
+ (api) => api.create(secretValue)
134013
+ )
134014
+ ].map((id) => ({
134015
+ id,
134016
+ store_id: args.storeId,
134017
+ name: args.name,
134018
+ comment: args.comment ?? "",
134019
+ scopes: args.scopes.split(","),
134020
+ created: (/* @__PURE__ */ new Date()).toISOString(),
134021
+ modified: (/* @__PURE__ */ new Date()).toISOString(),
134022
+ status: "pending"
134023
+ }));
133773
134024
  }
133774
134025
  if (secrets.length === 0) {
133775
134026
  throw new FatalError("Failed to create a secret.", 1, {
@@ -133829,6 +134080,10 @@ var secretsStoreSecretUpdateCommand = createCommand({
133829
134080
  type: "boolean",
133830
134081
  description: "Execute command against remote Secrets Store",
133831
134082
  default: false
134083
+ },
134084
+ "persist-to": {
134085
+ type: "string",
134086
+ describe: "Directory for local persistence"
133832
134087
  }
133833
134088
  },
133834
134089
  async handler(args, { config }) {
@@ -133862,10 +134117,23 @@ var secretsStoreSecretUpdateCommand = createCommand({
133862
134117
  ...args.comment && { comment: args.comment }
133863
134118
  });
133864
134119
  } else {
133865
- throw new UserError(
133866
- "No local dev version of this command available, need to include --remote in command",
133867
- { telemetryMessage: true }
134120
+ const name2 = await usingLocalSecretsStoreSecretAPI(
134121
+ args.persistTo,
134122
+ config,
134123
+ args.storeId,
134124
+ "",
134125
+ (api) => api.update(secretValue, args.secretId)
133868
134126
  );
134127
+ secret2 = {
134128
+ id: args.secretId,
134129
+ store_id: args.storeId,
134130
+ name: name2,
134131
+ comment: "",
134132
+ scopes: [],
134133
+ created: (/* @__PURE__ */ new Date()).toISOString(),
134134
+ modified: (/* @__PURE__ */ new Date()).toISOString(),
134135
+ status: "active"
134136
+ };
133869
134137
  }
133870
134138
  logger.log(`\u2705 Updated secret! (ID: ${secret2.id})`);
133871
134139
  const prettierSecret = [
@@ -133907,6 +134175,10 @@ var secretsStoreSecretDeleteCommand = createCommand({
133907
134175
  type: "boolean",
133908
134176
  description: "Execute command against remote Secrets Store",
133909
134177
  default: false
134178
+ },
134179
+ "persist-to": {
134180
+ type: "string",
134181
+ describe: "Directory for local persistence"
133910
134182
  }
133911
134183
  },
133912
134184
  async handler(args, { config }) {
@@ -133915,9 +134187,12 @@ var secretsStoreSecretDeleteCommand = createCommand({
133915
134187
  const accountId = config.account_id || await getAccountId();
133916
134188
  await deleteSecret(accountId, args.storeId, args.secretId);
133917
134189
  } else {
133918
- throw new UserError(
133919
- "No local dev version of this command available, need to include --remote in command",
133920
- { telemetryMessage: true }
134190
+ await usingLocalSecretsStoreSecretAPI(
134191
+ args.persistTo,
134192
+ config,
134193
+ args.storeId,
134194
+ "",
134195
+ (api) => api.delete(args.secretId)
133921
134196
  );
133922
134197
  }
133923
134198
  logger.log(`\u2705 Deleted secret! (ID: ${args.secretId})`);
@@ -133963,6 +134238,10 @@ var secretsStoreSecretDuplicateCommand = createCommand({
133963
134238
  type: "boolean",
133964
134239
  description: "Execute command against remote Secrets Store",
133965
134240
  default: false
134241
+ },
134242
+ "persist-to": {
134243
+ type: "string",
134244
+ describe: "Directory for local persistence"
133966
134245
  }
133967
134246
  },
133968
134247
  async handler(args, { config }) {
@@ -133981,10 +134260,23 @@ var secretsStoreSecretDuplicateCommand = createCommand({
133981
134260
  }
133982
134261
  );
133983
134262
  } else {
133984
- throw new UserError(
133985
- "No local dev version of this command available, need to include --remote in command",
133986
- { telemetryMessage: true }
134263
+ const duplicatedSecretId = await usingLocalSecretsStoreSecretAPI(
134264
+ args.persistTo,
134265
+ config,
134266
+ args.storeId,
134267
+ "",
134268
+ (api) => api.duplicate(args.secretId, args.name)
133987
134269
  );
134270
+ duplicatedSecret = {
134271
+ id: duplicatedSecretId,
134272
+ store_id: args.storeId,
134273
+ name: args.name,
134274
+ comment: "",
134275
+ scopes: [],
134276
+ created: (/* @__PURE__ */ new Date()).toISOString(),
134277
+ modified: (/* @__PURE__ */ new Date()).toISOString(),
134278
+ status: "active"
134279
+ };
133988
134280
  }
133989
134281
  logger.log(`\u2705 Duplicated secret! (ID: ${duplicatedSecret.id})`);
133990
134282
  const prettierSecret = [
@@ -143122,10 +143414,10 @@ var disabledDefaultIntegrations = [
143122
143414
  // Request data to Wrangler's HTTP servers may contain PII
143123
143415
  ];
143124
143416
  function setupSentry() {
143125
- if (true) {
143417
+ if (typeof SENTRY_DSN !== "undefined") {
143126
143418
  init({
143127
143419
  release: `wrangler@${version}`,
143128
- dsn: "https://9edbb8417b284aa2bbead9b4c318918b@sentry10.cfdata.org/583",
143420
+ dsn: SENTRY_DSN,
143129
143421
  transport: makeSentry10Transport,
143130
143422
  integrations(defaultIntegrations2) {
143131
143423
  return defaultIntegrations2.filter(
@@ -143157,7 +143449,7 @@ function setupSentry() {
143157
143449
  }
143158
143450
  __name(setupSentry, "setupSentry");
143159
143451
  function addBreadcrumb2(message, level = "log") {
143160
- if (true) {
143452
+ if (typeof SENTRY_DSN !== "undefined") {
143161
143453
  addBreadcrumb({
143162
143454
  message,
143163
143455
  level
@@ -143166,7 +143458,7 @@ function addBreadcrumb2(message, level = "log") {
143166
143458
  }
143167
143459
  __name(addBreadcrumb2, "addBreadcrumb");
143168
143460
  async function captureGlobalException(e7) {
143169
- if (true) {
143461
+ if (typeof SENTRY_DSN !== "undefined") {
143170
143462
  sentryReportingAllowed = await confirm(
143171
143463
  "Would you like to report this error to Cloudflare? Wrangler's output and the error details will be shared with the Wrangler team to help us diagnose and fix the issue.",
143172
143464
  { fallbackValue: false }
@@ -143181,7 +143473,7 @@ async function captureGlobalException(e7) {
143181
143473
  }
143182
143474
  __name(captureGlobalException, "captureGlobalException");
143183
143475
  async function closeSentry() {
143184
- if (true) {
143476
+ if (typeof SENTRY_DSN !== "undefined") {
143185
143477
  await close();
143186
143478
  }
143187
143479
  }
@@ -143448,7 +143740,7 @@ init_import_meta_url();
143448
143740
  var import_node_crypto13 = require("node:crypto");
143449
143741
  var fs22 = __toESM(require("node:fs"));
143450
143742
  var import_node_path56 = require("node:path");
143451
- var import_miniflare21 = require("miniflare");
143743
+ var import_miniflare23 = require("miniflare");
143452
143744
 
143453
143745
  // src/dev/dev-vars.ts
143454
143746
  init_import_meta_url();
@@ -143494,7 +143786,7 @@ __name(isProcessEnvPopulated, "isProcessEnvPopulated");
143494
143786
  init_import_meta_url();
143495
143787
  var import_fs22 = require("fs");
143496
143788
  var import_promises32 = require("fs/promises");
143497
- var import_miniflare20 = require("miniflare");
143789
+ var import_miniflare22 = require("miniflare");
143498
143790
  var import_workerd = require("workerd");
143499
143791
  var DEFAULT_OUTFILE_RELATIVE_PATH = "worker-configuration.d.ts";
143500
143792
  async function generateRuntimeTypes({
@@ -143540,7 +143832,7 @@ async function generate({
143540
143832
  compatibilityFlags = []
143541
143833
  }) {
143542
143834
  const worker = (0, import_fs22.readFileSync)(require.resolve("workerd/worker.mjs")).toString();
143543
- const mf = new import_miniflare20.Miniflare({
143835
+ const mf = new import_miniflare22.Miniflare({
143544
143836
  compatibilityDate: "2024-01-01",
143545
143837
  compatibilityFlags: ["nodejs_compat", "rtti_api"],
143546
143838
  modules: true,
@@ -143762,7 +144054,7 @@ ${content.join("\n")}`,
143762
144054
  }
143763
144055
  const tsconfigPath = config.tsconfig ?? (0, import_node_path56.join)((0, import_node_path56.dirname)(config.configPath), "tsconfig.json");
143764
144056
  const tsconfigTypes = readTsconfigTypes(tsconfigPath);
143765
- const { mode } = (0, import_miniflare21.getNodeCompat)(
144057
+ const { mode } = (0, import_miniflare23.getNodeCompat)(
143766
144058
  config.compatibility_date,
143767
144059
  config.compatibility_flags
143768
144060
  );
@@ -143847,7 +144139,8 @@ async function generateEnvTypes(config, args, envInterface, outputPath, entrypoi
143847
144139
  secrets,
143848
144140
  assets: config.assets,
143849
144141
  workflows: config.workflows,
143850
- pipelines: config.pipelines
144142
+ pipelines: config.pipelines,
144143
+ secrets_store_secrets: config.secrets_store_secrets
143851
144144
  };
143852
144145
  const entrypointFormat = entrypoint?.format ?? "modules";
143853
144146
  const fullOutputPath = (0, import_node_path56.resolve)(outputPath);
@@ -143909,6 +144202,14 @@ async function generateEnvTypes(config, args, envInterface, outputPath, entrypoi
143909
144202
  envTypeStructure.push([constructTypeKey(d12.binding), "D1Database"]);
143910
144203
  }
143911
144204
  }
144205
+ if (configToDTS.secrets_store_secrets) {
144206
+ for (const secretsStoreSecret of configToDTS.secrets_store_secrets) {
144207
+ envTypeStructure.push([
144208
+ constructTypeKey(secretsStoreSecret.binding),
144209
+ "SecretsStoreSecret"
144210
+ ]);
144211
+ }
144212
+ }
143912
144213
  if (configToDTS.services) {
143913
144214
  for (const service of configToDTS.services) {
143914
144215
  envTypeStructure.push([constructTypeKey(service.binding), "Fetcher"]);
@@ -144289,7 +144590,7 @@ async function printMembershipInfo(user, accountFilter) {
144289
144590
  } catch (e7) {
144290
144591
  if (isAuthenticationError(e7)) {
144291
144592
  logger.log(
144292
- `\u{1F3A2} Unable to get membership roles. Make sure you have permissions to read the account.`
144593
+ `\u{1F3A2} Unable to get membership roles. Make sure you have permissions to read the account. Are you missing the \`User->Memberships->Read\` permission?`
144293
144594
  );
144294
144595
  return;
144295
144596
  } else {
@@ -150042,9 +150343,13 @@ function createCLIParser(argv) {
150042
150343
  }
150043
150344
  ]);
150044
150345
  registry.registerNamespace("workflows");
150045
- wrangler.command("pipelines", false, (pipelinesYargs) => {
150046
- return pipelines(pipelinesYargs.command(subHelp));
150047
- });
150346
+ wrangler.command(
150347
+ "pipelines",
150348
+ `\u{1F6B0} Manage Worker Pipelines ${source_default.hex(betaCmdColor)("[open beta]")}`,
150349
+ (pipelinesYargs) => {
150350
+ return pipelines(pipelinesYargs.command(subHelp));
150351
+ }
150352
+ );
150048
150353
  registry.define([
150049
150354
  {
150050
150355
  command: "wrangler login",
@@ -150423,7 +150728,7 @@ async function analyseBundle(workerBundle) {
150423
150728
  "`wrangler check startup` does not support service-worker format Workers. Refer to https://developers.cloudflare.com/workers/reference/migrate-to-module-workers/ for migration guidance."
150424
150729
  );
150425
150730
  }
150426
- const mf = new import_miniflare22.Miniflare({
150731
+ const mf = new import_miniflare24.Miniflare({
150427
150732
  name: "profiler",
150428
150733
  compatibilityDate: metadata.compatibility_date,
150429
150734
  compatibilityFlags: metadata.compatibility_flags,
@@ -151903,7 +152208,7 @@ init_import_meta_url();
151903
152208
  // src/cli-hotkeys.ts
151904
152209
  init_import_meta_url();
151905
152210
  var import_readline = __toESM(require("readline"));
151906
- var import_miniflare23 = require("miniflare");
152211
+ var import_miniflare25 = require("miniflare");
151907
152212
 
151908
152213
  // src/utils/onKeyPress.ts
151909
152214
  init_import_meta_url();
@@ -151997,16 +152302,16 @@ function cli_hotkeys_default(options33) {
151997
152302
  __name(printInstructions, "printInstructions");
151998
152303
  Logger.registerBeforeLogHook(clearPreviousInstructions);
151999
152304
  Logger.registerAfterLogHook(printInstructions);
152000
- import_miniflare23.Log.unstable_registerBeforeLogHook(clearPreviousInstructions);
152001
- import_miniflare23.Log.unstable_registerAfterLogHook(printInstructions);
152305
+ import_miniflare25.Log.unstable_registerBeforeLogHook(clearPreviousInstructions);
152306
+ import_miniflare25.Log.unstable_registerAfterLogHook(printInstructions);
152002
152307
  printInstructions();
152003
152308
  return () => {
152004
152309
  unregisterKeyPress();
152005
152310
  clearPreviousInstructions();
152006
152311
  Logger.registerBeforeLogHook(void 0);
152007
152312
  Logger.registerAfterLogHook(void 0);
152008
- import_miniflare23.Log.unstable_registerBeforeLogHook(void 0);
152009
- import_miniflare23.Log.unstable_registerAfterLogHook(void 0);
152313
+ import_miniflare25.Log.unstable_registerBeforeLogHook(void 0);
152314
+ import_miniflare25.Log.unstable_registerAfterLogHook(void 0);
152010
152315
  };
152011
152316
  }
152012
152317
  __name(cli_hotkeys_default, "default");
@@ -152472,6 +152777,7 @@ async function setupDevEnv(devEnv, configPath, auth, args) {
152472
152777
  d1_databases: args.d1Databases,
152473
152778
  vectorize: void 0,
152474
152779
  hyperdrive: void 0,
152780
+ secrets_store_secrets: void 0,
152475
152781
  services: args.services,
152476
152782
  analytics_engine_datasets: void 0,
152477
152783
  dispatch_namespaces: void 0,
@@ -152831,6 +153137,7 @@ function getBindings2(configParam, env6, local, args) {
152831
153137
  d1_databases: mergedD1Bindings,
152832
153138
  vectorize: configParam.vectorize,
152833
153139
  hyperdrive: hyperdriveBindings,
153140
+ secrets_store_secrets: configParam.secrets_store_secrets,
152834
153141
  services: mergedServiceBindings,
152835
153142
  analytics_engine_datasets: configParam.analytics_engine_datasets,
152836
153143
  browser: configParam.browser,
@@ -154040,7 +154347,7 @@ var ConfigController = class extends Controller {
154040
154347
 
154041
154348
  // src/api/startDevWorker/RemoteRuntimeController.ts
154042
154349
  init_import_meta_url();
154043
- var import_miniflare26 = require("miniflare");
154350
+ var import_miniflare28 = require("miniflare");
154044
154351
 
154045
154352
  // src/dev/create-worker-preview.ts
154046
154353
  init_import_meta_url();
@@ -154397,7 +154704,7 @@ var RemoteRuntimeController = class extends RuntimeController {
154397
154704
  }
154398
154705
  #abortController = new AbortController();
154399
154706
  #currentBundleId = 0;
154400
- #mutex = new import_miniflare26.Mutex();
154707
+ #mutex = new import_miniflare28.Mutex();
154401
154708
  #session;
154402
154709
  async #previewSession(props) {
154403
154710
  try {
@@ -154792,7 +155099,7 @@ init_import_meta_url();
154792
155099
 
154793
155100
  // src/api/integrations/platform/index.ts
154794
155101
  init_import_meta_url();
154795
- var import_miniflare28 = require("miniflare");
155102
+ var import_miniflare30 = require("miniflare");
154796
155103
 
154797
155104
  // src/api/integrations/platform/caches.ts
154798
155105
  init_import_meta_url();
@@ -154859,7 +155166,7 @@ var ExecutionContext = class _ExecutionContext {
154859
155166
 
154860
155167
  // src/api/integrations/platform/services.ts
154861
155168
  init_import_meta_url();
154862
- var import_miniflare27 = require("miniflare");
155169
+ var import_miniflare29 = require("miniflare");
154863
155170
  var import_undici26 = __toESM(require_undici());
154864
155171
  async function getServiceBindings(services = []) {
154865
155172
  if (services.length === 0) {
@@ -154911,9 +155218,9 @@ function getServiceBindingProxyFetch({
154911
155218
  try {
154912
155219
  const resp = await (0, import_undici26.fetch)(newUrl, request4);
154913
155220
  const respBody = await resp.arrayBuffer();
154914
- return new import_miniflare27.Response(respBody, resp);
155221
+ return new import_miniflare29.Response(respBody, resp);
154915
155222
  } catch {
154916
- return new import_miniflare27.Response(
155223
+ return new import_miniflare29.Response(
154917
155224
  `Error: Unable to fetch from external service (${serviceName} bound with ${bindingName} binding), please make sure that the service is still running with \`wrangler dev\``,
154918
155225
  { status: 500 }
154919
155226
  );
@@ -154944,7 +155251,7 @@ async function getPlatformProxy(options33 = {}) {
154944
155251
  },
154945
155252
  () => getMiniflareOptionsFromConfig(rawConfig, env6, options33)
154946
155253
  );
154947
- const mf = new import_miniflare28.Miniflare({
155254
+ const mf = new import_miniflare30.Miniflare({
154948
155255
  script: "",
154949
155256
  modules: true,
154950
155257
  ...miniflareOptions
@@ -155063,7 +155370,7 @@ function unstable_getMiniflareWorkerOptions(configOrConfigPath, env6, options33)
155063
155370
  if (bindings.services !== void 0) {
155064
155371
  bindingOptions.serviceBindings = Object.fromEntries(
155065
155372
  bindings.services.map((binding) => {
155066
- const name2 = binding.service === config.name ? import_miniflare28.kCurrentWorker : binding.service;
155373
+ const name2 = binding.service === config.name ? import_miniflare30.kCurrentWorker : binding.service;
155067
155374
  return [binding.binding, { name: name2, entrypoint: binding.entrypoint }];
155068
155375
  })
155069
155376
  );