wrangler 3.114.12 → 3.114.14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -17658,7 +17658,7 @@ var require_OSPaths = __commonJS({
17658
17658
  }
17659
17659
  __name(isEmpty, "isEmpty");
17660
17660
  function Adapt(adapter_) {
17661
- var env6 = adapter_.env, os12 = adapter_.os, path72 = adapter_.path;
17661
+ var env6 = adapter_.env, os13 = adapter_.os, path72 = adapter_.path;
17662
17662
  var isWinOS = /^win/i.test(adapter_.process.platform);
17663
17663
  function normalizePath2(path_) {
17664
17664
  return path_ ? adapter_.path.normalize(adapter_.path.join(path_, ".")) : void 0;
@@ -17666,11 +17666,11 @@ var require_OSPaths = __commonJS({
17666
17666
  __name(normalizePath2, "normalizePath");
17667
17667
  function home() {
17668
17668
  var posix2 = /* @__PURE__ */ __name(function() {
17669
- return normalizePath2((typeof os12.homedir === "function" ? os12.homedir() : void 0) || env6.get("HOME"));
17669
+ return normalizePath2((typeof os13.homedir === "function" ? os13.homedir() : void 0) || env6.get("HOME"));
17670
17670
  }, "posix");
17671
17671
  var windows = /* @__PURE__ */ __name(function() {
17672
17672
  var priorityList = [
17673
- typeof os12.homedir === "function" ? os12.homedir() : void 0,
17673
+ typeof os13.homedir === "function" ? os13.homedir() : void 0,
17674
17674
  env6.get("USERPROFILE"),
17675
17675
  env6.get("HOME"),
17676
17676
  env6.get("HOMEDRIVE") || env6.get("HOMEPATH") ? path72.join(env6.get("HOMEDRIVE") || "", env6.get("HOMEPATH") || "") : void 0
@@ -17690,7 +17690,7 @@ var require_OSPaths = __commonJS({
17690
17690
  function posix2() {
17691
17691
  var fallback = "/tmp";
17692
17692
  var priorityList = [
17693
- typeof os12.tmpdir === "function" ? os12.tmpdir() : void 0,
17693
+ typeof os13.tmpdir === "function" ? os13.tmpdir() : void 0,
17694
17694
  env6.get("TMPDIR"),
17695
17695
  env6.get("TEMP"),
17696
17696
  env6.get("TMP")
@@ -17703,7 +17703,7 @@ var require_OSPaths = __commonJS({
17703
17703
  function windows() {
17704
17704
  var fallback = "C:\\Temp";
17705
17705
  var priorityListLazy = [
17706
- typeof os12.tmpdir === "function" ? os12.tmpdir : function() {
17706
+ typeof os13.tmpdir === "function" ? os13.tmpdir : function() {
17707
17707
  return void 0;
17708
17708
  },
17709
17709
  function() {
@@ -17795,7 +17795,7 @@ var require_node = __commonJS({
17795
17795
  };
17796
17796
  exports2.__esModule = true;
17797
17797
  exports2.adapter = void 0;
17798
- var os12 = __importStar(require("os"));
17798
+ var os13 = __importStar(require("os"));
17799
17799
  var path72 = __importStar(require("path"));
17800
17800
  exports2.adapter = {
17801
17801
  atImportPermissions: { env: true },
@@ -17804,7 +17804,7 @@ var require_node = __commonJS({
17804
17804
  return process.env[s5];
17805
17805
  }
17806
17806
  },
17807
- os: os12,
17807
+ os: os13,
17808
17808
  path: path72,
17809
17809
  process
17810
17810
  };
@@ -18502,7 +18502,7 @@ var require_supports_colors = __commonJS({
18502
18502
  "../../node_modules/.pnpm/@colors+colors@1.5.0/node_modules/@colors/colors/lib/system/supports-colors.js"(exports2, module3) {
18503
18503
  "use strict";
18504
18504
  init_import_meta_url();
18505
- var os12 = require("os");
18505
+ var os13 = require("os");
18506
18506
  var hasFlag3 = require_has_flag();
18507
18507
  var env6 = process.env;
18508
18508
  var forceColor = void 0;
@@ -18541,7 +18541,7 @@ var require_supports_colors = __commonJS({
18541
18541
  }
18542
18542
  var min = forceColor ? 1 : 0;
18543
18543
  if (process.platform === "win32") {
18544
- var osRelease = os12.release().split(".");
18544
+ var osRelease = os13.release().split(".");
18545
18545
  if (Number(process.versions.node.split(".")[0]) >= 8 && Number(osRelease[0]) >= 10 && Number(osRelease[2]) >= 10586) {
18546
18546
  return Number(osRelease[2]) >= 14931 ? 3 : 2;
18547
18547
  }
@@ -22715,7 +22715,7 @@ var require_main2 = __commonJS({
22715
22715
  init_import_meta_url();
22716
22716
  var fs27 = require("fs");
22717
22717
  var path72 = require("path");
22718
- var os12 = require("os");
22718
+ var os13 = require("os");
22719
22719
  var crypto8 = require("crypto");
22720
22720
  var packageJson = require_package();
22721
22721
  var version4 = packageJson.version;
@@ -22821,7 +22821,7 @@ var require_main2 = __commonJS({
22821
22821
  }
22822
22822
  __name(_vaultPath, "_vaultPath");
22823
22823
  function _resolveHome(envPath) {
22824
- return envPath[0] === "~" ? path72.join(os12.homedir(), envPath.slice(1)) : envPath;
22824
+ return envPath[0] === "~" ? path72.join(os13.homedir(), envPath.slice(1)) : envPath;
22825
22825
  }
22826
22826
  __name(_resolveHome, "_resolveHome");
22827
22827
  function _configVault(options32) {
@@ -28833,14 +28833,14 @@ var require_is_wsl = __commonJS({
28833
28833
  "../../node_modules/.pnpm/is-wsl@2.2.0/node_modules/is-wsl/index.js"(exports2, module3) {
28834
28834
  "use strict";
28835
28835
  init_import_meta_url();
28836
- var os12 = require("os");
28836
+ var os13 = require("os");
28837
28837
  var fs27 = require("fs");
28838
28838
  var isDocker = require_is_docker();
28839
28839
  var isWsl = /* @__PURE__ */ __name(() => {
28840
28840
  if (process.platform !== "linux") {
28841
28841
  return false;
28842
28842
  }
28843
- if (os12.release().toLowerCase().includes("microsoft")) {
28843
+ if (os13.release().toLowerCase().includes("microsoft")) {
28844
28844
  if (isDocker()) {
28845
28845
  return false;
28846
28846
  }
@@ -29224,8 +29224,8 @@ var require_homedir = __commonJS({
29224
29224
  "../../node_modules/.pnpm/resolve@1.22.8/node_modules/resolve/lib/homedir.js"(exports2, module3) {
29225
29225
  "use strict";
29226
29226
  init_import_meta_url();
29227
- var os12 = require("os");
29228
- module3.exports = os12.homedir || /* @__PURE__ */ __name(function homedir3() {
29227
+ var os13 = require("os");
29228
+ module3.exports = os13.homedir || /* @__PURE__ */ __name(function homedir3() {
29229
29229
  var home = process.env.HOME;
29230
29230
  var user = process.env.LOGNAME || process.env.USER || process.env.LNAME || process.env.USERNAME;
29231
29231
  if (process.platform === "win32") {
@@ -53853,7 +53853,7 @@ function _supportsColor2(haveStream, { streamIsTTY, sniffFlags = true } = {}) {
53853
53853
  return min;
53854
53854
  }
53855
53855
  if (import_node_process7.default.platform === "win32") {
53856
- const osRelease = import_node_os4.default.release().split(".");
53856
+ const osRelease = import_node_os5.default.release().split(".");
53857
53857
  if (Number(osRelease[0]) >= 10 && Number(osRelease[2]) >= 10586) {
53858
53858
  return Number(osRelease[2]) >= 14931 ? 3 : 2;
53859
53859
  }
@@ -53901,12 +53901,12 @@ function createSupportsColor2(stream2, options32 = {}) {
53901
53901
  });
53902
53902
  return translateLevel2(level);
53903
53903
  }
53904
- var import_node_process7, import_node_os4, import_node_tty3, env3, flagForceColor2, supportsColor2, supports_color_default2;
53904
+ var import_node_process7, import_node_os5, import_node_tty3, env3, flagForceColor2, supportsColor2, supports_color_default2;
53905
53905
  var init_supports_color = __esm({
53906
53906
  "../../node_modules/.pnpm/supports-color@9.2.2/node_modules/supports-color/index.js"() {
53907
53907
  init_import_meta_url();
53908
53908
  import_node_process7 = __toESM(require("node:process"), 1);
53909
- import_node_os4 = __toESM(require("node:os"), 1);
53909
+ import_node_os5 = __toESM(require("node:os"), 1);
53910
53910
  import_node_tty3 = __toESM(require("node:tty"), 1);
53911
53911
  __name(hasFlag2, "hasFlag");
53912
53912
  ({ env: env3 } = import_node_process7.default);
@@ -81432,7 +81432,7 @@ var source_default = chalk;
81432
81432
  // src/api/startDevWorker/MultiworkerRuntimeController.ts
81433
81433
  var import_miniflare10 = require("miniflare");
81434
81434
 
81435
- // src/dev/miniflare.ts
81435
+ // src/dev/miniflare/index.ts
81436
81436
  init_import_meta_url();
81437
81437
  var import_node_assert5 = __toESM(require("node:assert"));
81438
81438
  var import_node_crypto4 = require("node:crypto");
@@ -81450,7 +81450,7 @@ var import_undici3 = __toESM(require_undici());
81450
81450
 
81451
81451
  // package.json
81452
81452
  var name = "wrangler";
81453
- var version = "3.114.12";
81453
+ var version = "3.114.14";
81454
81454
 
81455
81455
  // src/environment-variables/misc-variables.ts
81456
81456
  init_import_meta_url();
@@ -81637,6 +81637,30 @@ var import_node_path4 = __toESM(require("node:path"));
81637
81637
  var import_miniflare = require("miniflare");
81638
81638
  var import_signal_exit = __toESM(require_signal_exit());
81639
81639
 
81640
+ // ../../node_modules/.pnpm/strip-ansi@7.1.0/node_modules/strip-ansi/index.js
81641
+ init_import_meta_url();
81642
+
81643
+ // ../../node_modules/.pnpm/ansi-regex@6.0.1/node_modules/ansi-regex/index.js
81644
+ init_import_meta_url();
81645
+ function ansiRegex({ onlyFirst = false } = {}) {
81646
+ const pattern = [
81647
+ "[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]+)*|[a-zA-Z\\d]+(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)",
81648
+ "(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))"
81649
+ ].join("|");
81650
+ return new RegExp(pattern, onlyFirst ? void 0 : "g");
81651
+ }
81652
+ __name(ansiRegex, "ansiRegex");
81653
+
81654
+ // ../../node_modules/.pnpm/strip-ansi@7.1.0/node_modules/strip-ansi/index.js
81655
+ var regex = ansiRegex();
81656
+ function stripAnsi2(string) {
81657
+ if (typeof string !== "string") {
81658
+ throw new TypeError(`Expected a \`string\`, got \`${typeof string}\``);
81659
+ }
81660
+ return string.replace(regex, "");
81661
+ }
81662
+ __name(stripAnsi2, "stripAnsi");
81663
+
81640
81664
  // src/utils/filesystem.ts
81641
81665
  init_import_meta_url();
81642
81666
  var import_fs5 = require("fs");
@@ -81756,7 +81780,7 @@ var hasSeenErrorMessage = false;
81756
81780
  async function appendToDebugLogFile(messageLevel, message) {
81757
81781
  const entry = `
81758
81782
  --- ${(/* @__PURE__ */ new Date()).toISOString()} ${messageLevel}
81759
- ${message}
81783
+ ${stripAnsi2(message)}
81760
81784
  ---
81761
81785
  `;
81762
81786
  if (!hasLoggedLocation) {
@@ -83496,8 +83520,12 @@ function readFileSyncToBuffer(file) {
83496
83520
  __name(readFileSyncToBuffer, "readFileSyncToBuffer");
83497
83521
  function readFileSync6(file) {
83498
83522
  try {
83499
- return fs2.readFileSync(file, { encoding: "utf-8" });
83523
+ const buffer = fs2.readFileSync(file);
83524
+ return removeBOMAndValidate(buffer, file);
83500
83525
  } catch (err) {
83526
+ if (err instanceof ParseError) {
83527
+ throw err;
83528
+ }
83501
83529
  const { message } = err;
83502
83530
  throw new ParseError({
83503
83531
  text: `Could not read file: ${file}`,
@@ -83599,6 +83627,46 @@ function parseNonHyphenedUuid(uuid) {
83599
83627
  return hyphenated.slice(0, 36);
83600
83628
  }
83601
83629
  __name(parseNonHyphenedUuid, "parseNonHyphenedUuid");
83630
+ var UNSUPPORTED_BOMS = [
83631
+ {
83632
+ buffer: Buffer.from([0, 0, 254, 255]),
83633
+ encoding: "UTF-32 BE"
83634
+ },
83635
+ {
83636
+ buffer: Buffer.from([255, 254, 0, 0]),
83637
+ encoding: "UTF-32 LE"
83638
+ },
83639
+ {
83640
+ buffer: Buffer.from([254, 255]),
83641
+ encoding: "UTF-16 BE"
83642
+ },
83643
+ {
83644
+ buffer: Buffer.from([255, 254]),
83645
+ encoding: "UTF-16 LE"
83646
+ }
83647
+ ];
83648
+ function removeBOMAndValidate(buffer, file) {
83649
+ for (const bom of UNSUPPORTED_BOMS) {
83650
+ if (buffer.length >= bom.buffer.length && buffer.subarray(0, bom.buffer.length).equals(bom.buffer)) {
83651
+ throw new ParseError({
83652
+ text: `Configuration file contains ${bom.encoding} byte order marker`,
83653
+ notes: [
83654
+ {
83655
+ text: `The file "${file}" appears to be encoded as ${bom.encoding}. Please save the file as UTF-8 without BOM.`
83656
+ }
83657
+ ],
83658
+ location: { file, line: 1, column: 0 },
83659
+ telemetryMessage: `${bom.encoding} BOM detected`
83660
+ });
83661
+ }
83662
+ }
83663
+ const content = buffer.toString("utf-8");
83664
+ if (content.charCodeAt(0) === 65279) {
83665
+ return content.slice(1);
83666
+ }
83667
+ return content;
83668
+ }
83669
+ __name(removeBOMAndValidate, "removeBOMAndValidate");
83602
83670
 
83603
83671
  // src/user/index.ts
83604
83672
  init_import_meta_url();
@@ -84651,19 +84719,6 @@ async function getQueueById(accountId, queueId) {
84651
84719
  return fetchResult(queuesUrl(accountId, queueId), {});
84652
84720
  }
84653
84721
  __name(getQueueById, "getQueueById");
84654
- async function putQueue(config, queueName, body) {
84655
- const queue = await getQueue(config, queueName);
84656
- return putQueueById(config, queue.queue_id, body);
84657
- }
84658
- __name(putQueue, "putQueue");
84659
- async function putQueueById(config, queueId, body) {
84660
- const accountId = await requireAuth(config);
84661
- return fetchResult(queuesUrl(accountId, queueId), {
84662
- method: "PUT",
84663
- body: JSON.stringify(body)
84664
- });
84665
- }
84666
- __name(putQueueById, "putQueueById");
84667
84722
  async function postConsumer(config, queueName, body) {
84668
84723
  const queue = await getQueue(config, queueName);
84669
84724
  return postConsumerById(config, queue.queue_id, body);
@@ -88098,8 +88153,67 @@ var validateDurableObjectBinding = /* @__PURE__ */ __name((diagnostics, field, v
88098
88153
  ]);
88099
88154
  return isValid2;
88100
88155
  }, "validateDurableObjectBinding");
88101
- var validateWorkflowBinding = /* @__PURE__ */ __name((_diagnostics, _field, _value) => {
88102
- return true;
88156
+ var validateWorkflowBinding = /* @__PURE__ */ __name((diagnostics, field, value) => {
88157
+ if (typeof value !== "object" || value === null) {
88158
+ diagnostics.errors.push(
88159
+ `"workflows" bindings should be objects, but got ${JSON.stringify(value)}`
88160
+ );
88161
+ return false;
88162
+ }
88163
+ let isValid2 = true;
88164
+ if (!isRequiredProperty(value, "binding", "string")) {
88165
+ diagnostics.errors.push(
88166
+ `"${field}" bindings should have a string "binding" field but got ${JSON.stringify(
88167
+ value
88168
+ )}.`
88169
+ );
88170
+ isValid2 = false;
88171
+ }
88172
+ if (!isRequiredProperty(value, "name", "string")) {
88173
+ diagnostics.errors.push(
88174
+ `"${field}" bindings should have a string "name" field but got ${JSON.stringify(
88175
+ value
88176
+ )}.`
88177
+ );
88178
+ isValid2 = false;
88179
+ } else if (value.name.length > 64) {
88180
+ diagnostics.errors.push(
88181
+ `"${field}" binding "name" field must be 64 characters or less, but got ${value.name.length} characters.`
88182
+ );
88183
+ isValid2 = false;
88184
+ }
88185
+ if (!isRequiredProperty(value, "class_name", "string")) {
88186
+ diagnostics.errors.push(
88187
+ `"${field}" bindings should have a string "class_name" field but got ${JSON.stringify(
88188
+ value
88189
+ )}.`
88190
+ );
88191
+ isValid2 = false;
88192
+ }
88193
+ if (!isOptionalProperty(value, "script_name", "string")) {
88194
+ diagnostics.errors.push(
88195
+ `"${field}" bindings should, optionally, have a string "script_name" field but got ${JSON.stringify(
88196
+ value
88197
+ )}.`
88198
+ );
88199
+ isValid2 = false;
88200
+ }
88201
+ if (!isOptionalProperty(value, "experimental_remote", "boolean")) {
88202
+ diagnostics.errors.push(
88203
+ `"${field}" bindings should, optionally, have a boolean "experimental_remote" field but got ${JSON.stringify(
88204
+ value
88205
+ )}.`
88206
+ );
88207
+ isValid2 = false;
88208
+ }
88209
+ validateAdditionalProperties(diagnostics, field, Object.keys(value), [
88210
+ "binding",
88211
+ "name",
88212
+ "class_name",
88213
+ "script_name",
88214
+ "experimental_remote"
88215
+ ]);
88216
+ return isValid2;
88103
88217
  }, "validateWorkflowBinding");
88104
88218
  var validateCflogfwdrObject = /* @__PURE__ */ __name((envName) => (diagnostics, field, value, topLevelEnv) => {
88105
88219
  const bindingsValidation = validateBindingsProperty(
@@ -90321,32 +90435,6 @@ init_import_meta_url();
90321
90435
 
90322
90436
  // ../../node_modules/.pnpm/string-width@5.1.2/node_modules/string-width/index.js
90323
90437
  init_import_meta_url();
90324
-
90325
- // ../../node_modules/.pnpm/strip-ansi@7.1.0/node_modules/strip-ansi/index.js
90326
- init_import_meta_url();
90327
-
90328
- // ../../node_modules/.pnpm/ansi-regex@6.0.1/node_modules/ansi-regex/index.js
90329
- init_import_meta_url();
90330
- function ansiRegex({ onlyFirst = false } = {}) {
90331
- const pattern = [
90332
- "[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]+)*|[a-zA-Z\\d]+(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)",
90333
- "(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))"
90334
- ].join("|");
90335
- return new RegExp(pattern, onlyFirst ? void 0 : "g");
90336
- }
90337
- __name(ansiRegex, "ansiRegex");
90338
-
90339
- // ../../node_modules/.pnpm/strip-ansi@7.1.0/node_modules/strip-ansi/index.js
90340
- var regex = ansiRegex();
90341
- function stripAnsi2(string) {
90342
- if (typeof string !== "string") {
90343
- throw new TypeError(`Expected a \`string\`, got \`${typeof string}\``);
90344
- }
90345
- return string.replace(regex, "");
90346
- }
90347
- __name(stripAnsi2, "stripAnsi");
90348
-
90349
- // ../../node_modules/.pnpm/string-width@5.1.2/node_modules/string-width/index.js
90350
90438
  var import_eastasianwidth = __toESM(require_eastasianwidth(), 1);
90351
90439
  var import_emoji_regex = __toESM(require_emoji_regex2(), 1);
90352
90440
  function stringWidth(string, options32 = {}) {
@@ -90991,6 +91079,78 @@ var stderr = process.stderr;
90991
91079
  // ../cli/index.ts
90992
91080
  init_import_meta_url();
90993
91081
  var import_process = require("process");
91082
+
91083
+ // ../cli/check-macos-version.ts
91084
+ init_import_meta_url();
91085
+ var import_node_os3 = __toESM(require("node:os"));
91086
+ var MINIMUM_MACOS_VERSION = "13.5.0";
91087
+ function checkMacOSVersion(options32) {
91088
+ if (process.platform !== "darwin") {
91089
+ return;
91090
+ }
91091
+ if (process.env.CI) {
91092
+ return;
91093
+ }
91094
+ const release3 = import_node_os3.default.release();
91095
+ const macOSVersion = darwinVersionToMacOSVersion(release3);
91096
+ if (macOSVersion && isVersionLessThan(macOSVersion, MINIMUM_MACOS_VERSION)) {
91097
+ if (options32.shouldThrow) {
91098
+ throw new Error(
91099
+ `Unsupported macOS version: The Cloudflare Workers runtime cannot run on the current version of macOS (${macOSVersion}). The minimum requirement is macOS ${MINIMUM_MACOS_VERSION}+. See https://github.com/cloudflare/workerd?tab=readme-ov-file#running-workerd If you cannot upgrade your version of macOS, you could try running in a DevContainer setup with a supported version of Linux (glibc 2.35+ required).`
91100
+ );
91101
+ } else {
91102
+ console.warn(
91103
+ `\u26A0\uFE0F Warning: Unsupported macOS version detected (${macOSVersion}). The Cloudflare Workers runtime may not work correctly on macOS versions below ${MINIMUM_MACOS_VERSION}. Consider upgrading to macOS ${MINIMUM_MACOS_VERSION}+ or using a DevContainer setup with a supported version of Linux (glibc 2.35+ required).`
91104
+ );
91105
+ }
91106
+ }
91107
+ }
91108
+ __name(checkMacOSVersion, "checkMacOSVersion");
91109
+ function darwinVersionToMacOSVersion(darwinVersion) {
91110
+ const match2 = darwinVersion.match(/^(\d+)\.(\d+)\.(\d+)/);
91111
+ if (!match2) {
91112
+ return null;
91113
+ }
91114
+ const major = parseInt(match2[1], 10);
91115
+ if (major >= 20) {
91116
+ const macOSMajor = major - 9;
91117
+ const minor = parseInt(match2[2], 10);
91118
+ const patch = parseInt(match2[3], 10);
91119
+ return `${macOSMajor}.${minor}.${patch}`;
91120
+ }
91121
+ return null;
91122
+ }
91123
+ __name(darwinVersionToMacOSVersion, "darwinVersionToMacOSVersion");
91124
+ function isVersionLessThan(version1, version22) {
91125
+ const versionRegex = /^(\d+)\.(\d+)\.(\d+)$/;
91126
+ const match1 = version1.match(versionRegex);
91127
+ const match2 = version22.match(versionRegex);
91128
+ if (!match1 || !match2) {
91129
+ throw new Error(
91130
+ `Invalid version format. Expected M.m.p format, got: ${version1}, ${version22}`
91131
+ );
91132
+ }
91133
+ const [major1, minor1, patch1] = [
91134
+ parseInt(match1[1], 10),
91135
+ parseInt(match1[2], 10),
91136
+ parseInt(match1[3], 10)
91137
+ ];
91138
+ const [major2, minor2, patch2] = [
91139
+ parseInt(match2[1], 10),
91140
+ parseInt(match2[2], 10),
91141
+ parseInt(match2[3], 10)
91142
+ ];
91143
+ if (major1 !== major2) {
91144
+ return major1 < major2;
91145
+ }
91146
+ if (minor1 !== minor2) {
91147
+ return minor1 < minor2;
91148
+ }
91149
+ return patch1 < patch2;
91150
+ }
91151
+ __name(isVersionLessThan, "isVersionLessThan");
91152
+
91153
+ // ../cli/index.ts
90994
91154
  var shapes = {
90995
91155
  diamond: "\u25C7",
90996
91156
  dash: "\u2500",
@@ -93925,11 +94085,132 @@ async function imagesRemoteFetcher(request4) {
93925
94085
  }
93926
94086
  __name(imagesRemoteFetcher, "imagesRemoteFetcher");
93927
94087
 
94088
+ // src/update-check.ts
94089
+ init_import_meta_url();
94090
+ var import_update_check = __toESM(require_update_check());
94091
+ async function doUpdateCheck() {
94092
+ let update = null;
94093
+ const pkg = { name, version };
94094
+ try {
94095
+ update = await (0, import_update_check.default)(pkg, {
94096
+ distTag: pkg.version.startsWith("0.0.0") ? "beta" : "latest"
94097
+ });
94098
+ } catch (err) {
94099
+ }
94100
+ return update?.latest;
94101
+ }
94102
+ __name(doUpdateCheck, "doUpdateCheck");
94103
+ var updateCheckPromise;
94104
+ function updateCheck() {
94105
+ return updateCheckPromise ??= doUpdateCheck();
94106
+ }
94107
+ __name(updateCheck, "updateCheck");
94108
+
94109
+ // src/vectorize/fetcher.ts
94110
+ init_import_meta_url();
94111
+ var import_miniflare6 = require("miniflare");
94112
+ var EXTERNAL_VECTORIZE_WORKER_NAME = "__WRANGLER_EXTERNAL_VECTORIZE_WORKER";
94113
+ var EXTERNAL_VECTORIZE_WORKER_SCRIPT = `
94114
+ import makeBinding from 'cloudflare-internal:vectorize-api'
94115
+
94116
+ export default function (env) {
94117
+ return makeBinding({
94118
+ fetcher: env.FETCHER,
94119
+ indexId: env.INDEX_ID,
94120
+ indexVersion: env.INDEX_VERSION,
94121
+ useNdJson: true,
94122
+ });
94123
+ }
94124
+ `;
94125
+ var URL_SUBSTITUTIONS = /* @__PURE__ */ new Map([
94126
+ ["getByIds", "get_by_ids"],
94127
+ ["deleteByIds", "delete_by_ids"]
94128
+ ]);
94129
+ function MakeVectorizeFetcher(indexId) {
94130
+ return async function(request4) {
94131
+ const accountId = await getAccountId();
94132
+ request4.headers.delete("Host");
94133
+ request4.headers.delete("Content-Length");
94134
+ let op = request4.url.split("/").pop() || "";
94135
+ op = URL_SUBSTITUTIONS.get(op) || op;
94136
+ const base = `/accounts/${accountId}/vectorize/v2/indexes/${indexId}/`;
94137
+ const url4 = base + op;
94138
+ const res = await performApiFetch(url4, {
94139
+ method: request4.method,
94140
+ headers: Object.fromEntries(request4.headers.entries()),
94141
+ body: request4.body,
94142
+ duplex: "half"
94143
+ });
94144
+ const respHeaders = new import_miniflare6.Headers(res.headers);
94145
+ respHeaders.delete("Host");
94146
+ respHeaders.delete("Content-Length");
94147
+ const apiResponse = await res.json();
94148
+ const newResponse = apiResponse.success ? apiResponse.result : {
94149
+ error: apiResponse.errors[0].message,
94150
+ code: apiResponse.errors[0].code
94151
+ };
94152
+ return new import_miniflare6.Response(JSON.stringify(newResponse), {
94153
+ status: res.status,
94154
+ headers: respHeaders
94155
+ });
94156
+ };
94157
+ }
94158
+ __name(MakeVectorizeFetcher, "MakeVectorizeFetcher");
94159
+
94160
+ // src/dev/class-names-sqlite.ts
94161
+ init_import_meta_url();
94162
+ function getClassNamesWhichUseSQLite(migrations) {
94163
+ const classNamesWhichUseSQLite = /* @__PURE__ */ new Map();
94164
+ (migrations || []).forEach((migration) => {
94165
+ migration.deleted_classes?.forEach((deleted_class) => {
94166
+ if (!classNamesWhichUseSQLite.delete(deleted_class)) {
94167
+ throw new UserError(
94168
+ `Cannot apply deleted_classes migration to non-existent class ${deleted_class}`
94169
+ );
94170
+ }
94171
+ });
94172
+ migration.renamed_classes?.forEach(({ from, to }) => {
94173
+ const useSQLite = classNamesWhichUseSQLite.get(from);
94174
+ if (useSQLite === void 0) {
94175
+ throw new UserError(
94176
+ `Cannot apply renamed_classes migration to non-existent class ${from}`
94177
+ );
94178
+ } else {
94179
+ classNamesWhichUseSQLite.delete(from);
94180
+ classNamesWhichUseSQLite.set(to, useSQLite);
94181
+ }
94182
+ });
94183
+ migration.new_classes?.forEach((new_class) => {
94184
+ if (classNamesWhichUseSQLite.has(new_class)) {
94185
+ throw new UserError(
94186
+ `Cannot apply new_classes migration to existing class ${new_class}`
94187
+ );
94188
+ } else {
94189
+ classNamesWhichUseSQLite.set(new_class, false);
94190
+ }
94191
+ });
94192
+ migration.new_sqlite_classes?.forEach((new_class) => {
94193
+ if (classNamesWhichUseSQLite.has(new_class)) {
94194
+ throw new UserError(
94195
+ `Cannot apply new_sqlite_classes migration to existing class ${new_class}`
94196
+ );
94197
+ } else {
94198
+ classNamesWhichUseSQLite.set(new_class, true);
94199
+ }
94200
+ });
94201
+ });
94202
+ return classNamesWhichUseSQLite;
94203
+ }
94204
+ __name(getClassNamesWhichUseSQLite, "getClassNamesWhichUseSQLite");
94205
+
94206
+ // src/dev/miniflare/stdio.ts
94207
+ init_import_meta_url();
94208
+
93928
94209
  // src/sourcemap.ts
93929
94210
  init_import_meta_url();
93930
94211
  var import_node_assert4 = __toESM(require("node:assert"));
93931
94212
  var import_node_url6 = __toESM(require("node:url"));
93932
- var import_miniflare6 = require("miniflare");
94213
+ var import_miniflare7 = require("miniflare");
93933
94214
  function maybeRetrieveFileSourceMap(filePath) {
93934
94215
  if (filePath === void 0) {
93935
94216
  return null;
@@ -93966,7 +94247,7 @@ function getSourceMappingPrepareStackTrace(retrieveSourceMap) {
93966
94247
  if (sourceMappingPrepareStackTrace !== void 0) {
93967
94248
  return sourceMappingPrepareStackTrace;
93968
94249
  }
93969
- const support = (0, import_miniflare6.getFreshSourceMapSupport)();
94250
+ const support = (0, import_miniflare7.getFreshSourceMapSupport)();
93970
94251
  const originalPrepareStackTrace = Error.prepareStackTrace;
93971
94252
  support.install({
93972
94253
  environment: "node",
@@ -94150,125 +94431,125 @@ var CallSite = class {
94150
94431
  };
94151
94432
  __name(CallSite, "CallSite");
94152
94433
 
94153
- // src/update-check.ts
94154
- init_import_meta_url();
94155
- var import_update_check = __toESM(require_update_check());
94156
- async function doUpdateCheck() {
94157
- let update = null;
94158
- const pkg = { name, version };
94159
- try {
94160
- update = await (0, import_update_check.default)(pkg, {
94161
- distTag: pkg.version.startsWith("0.0.0") ? "beta" : "latest"
94162
- });
94163
- } catch (err) {
94164
- }
94165
- return update?.latest;
94166
- }
94167
- __name(doUpdateCheck, "doUpdateCheck");
94168
- var updateCheckPromise;
94169
- function updateCheck() {
94170
- return updateCheckPromise ??= doUpdateCheck();
94171
- }
94172
- __name(updateCheck, "updateCheck");
94173
-
94174
- // src/vectorize/fetcher.ts
94175
- init_import_meta_url();
94176
- var import_miniflare7 = require("miniflare");
94177
- var EXTERNAL_VECTORIZE_WORKER_NAME = "__WRANGLER_EXTERNAL_VECTORIZE_WORKER";
94178
- var EXTERNAL_VECTORIZE_WORKER_SCRIPT = `
94179
- import makeBinding from 'cloudflare-internal:vectorize-api'
94180
-
94181
- export default function (env) {
94182
- return makeBinding({
94183
- fetcher: env.FETCHER,
94184
- indexId: env.INDEX_ID,
94185
- indexVersion: env.INDEX_VERSION,
94186
- useNdJson: true,
94187
- });
94434
+ // src/dev/miniflare/stdio.ts
94435
+ function handleRuntimeStdioWithStructuredLogs(stdout2, stderr2) {
94436
+ stdout2.on("data", getProcessStreamDataListener("stdout"));
94437
+ stderr2.on("data", getProcessStreamDataListener("stderr"));
94438
+ }
94439
+ __name(handleRuntimeStdioWithStructuredLogs, "handleRuntimeStdioWithStructuredLogs");
94440
+ function getProcessStreamDataListener(processStream) {
94441
+ let streamAccumulator = "";
94442
+ return (chunk) => {
94443
+ const fullStreamOutput = `${streamAccumulator}${chunk}`;
94444
+ let currentLogsStr = "";
94445
+ const lastNewlineIdx = fullStreamOutput.lastIndexOf("\n");
94446
+ if (lastNewlineIdx > 0) {
94447
+ currentLogsStr = fullStreamOutput.slice(0, lastNewlineIdx);
94448
+ streamAccumulator = fullStreamOutput.slice(lastNewlineIdx + 1);
94449
+ } else {
94450
+ streamAccumulator = fullStreamOutput;
94451
+ }
94452
+ const lines = currentLogsStr.split("\n");
94453
+ for (const line of lines) {
94454
+ const structuredLog = parseStructuredLog(line);
94455
+ if (structuredLog) {
94456
+ logStructuredLog(structuredLog, processStream);
94457
+ } else {
94458
+ const level = processStream === "stdout" ? "log" : "error";
94459
+ logger[level](line);
94460
+ }
94461
+ }
94462
+ };
94188
94463
  }
94189
- `;
94190
- var URL_SUBSTITUTIONS = /* @__PURE__ */ new Map([
94191
- ["getByIds", "get_by_ids"],
94192
- ["deleteByIds", "delete_by_ids"]
94193
- ]);
94194
- function MakeVectorizeFetcher(indexId) {
94195
- return async function(request4) {
94196
- const accountId = await getAccountId();
94197
- request4.headers.delete("Host");
94198
- request4.headers.delete("Content-Length");
94199
- let op = request4.url.split("/").pop() || "";
94200
- op = URL_SUBSTITUTIONS.get(op) || op;
94201
- const base = `/accounts/${accountId}/vectorize/v2/indexes/${indexId}/`;
94202
- const url4 = base + op;
94203
- const res = await performApiFetch(url4, {
94204
- method: request4.method,
94205
- headers: Object.fromEntries(request4.headers.entries()),
94206
- body: request4.body,
94207
- duplex: "half"
94208
- });
94209
- const respHeaders = new import_miniflare7.Headers(res.headers);
94210
- respHeaders.delete("Host");
94211
- respHeaders.delete("Content-Length");
94212
- const apiResponse = await res.json();
94213
- const newResponse = apiResponse.success ? apiResponse.result : {
94214
- error: apiResponse.errors[0].message,
94215
- code: apiResponse.errors[0].code
94464
+ __name(getProcessStreamDataListener, "getProcessStreamDataListener");
94465
+ function parseStructuredLog(str) {
94466
+ try {
94467
+ const maybeStructuredLog = JSON.parse(str);
94468
+ if (typeof maybeStructuredLog !== "object" || maybeStructuredLog === null) {
94469
+ return null;
94470
+ }
94471
+ const timestamp = parseInt(maybeStructuredLog.timestamp);
94472
+ if (isNaN(timestamp) || typeof maybeStructuredLog.level !== "string" || typeof maybeStructuredLog.message !== "string") {
94473
+ return null;
94474
+ }
94475
+ return {
94476
+ timestamp,
94477
+ level: maybeStructuredLog.level,
94478
+ message: maybeStructuredLog.message
94216
94479
  };
94217
- return new import_miniflare7.Response(JSON.stringify(newResponse), {
94218
- status: res.status,
94219
- headers: respHeaders
94220
- });
94221
- };
94480
+ } catch {
94481
+ return null;
94482
+ }
94222
94483
  }
94223
- __name(MakeVectorizeFetcher, "MakeVectorizeFetcher");
94224
-
94225
- // src/dev/class-names-sqlite.ts
94226
- init_import_meta_url();
94227
- function getClassNamesWhichUseSQLite(migrations) {
94228
- const classNamesWhichUseSQLite = /* @__PURE__ */ new Map();
94229
- (migrations || []).forEach((migration) => {
94230
- migration.deleted_classes?.forEach((deleted_class) => {
94231
- if (!classNamesWhichUseSQLite.delete(deleted_class)) {
94232
- throw new UserError(
94233
- `Cannot apply deleted_classes migration to non-existent class ${deleted_class}`
94234
- );
94235
- }
94236
- });
94237
- migration.renamed_classes?.forEach(({ from, to }) => {
94238
- const useSQLite = classNamesWhichUseSQLite.get(from);
94239
- if (useSQLite === void 0) {
94240
- throw new UserError(
94241
- `Cannot apply renamed_classes migration to non-existent class ${from}`
94242
- );
94243
- } else {
94244
- classNamesWhichUseSQLite.delete(from);
94245
- classNamesWhichUseSQLite.set(to, useSQLite);
94246
- }
94247
- });
94248
- migration.new_classes?.forEach((new_class) => {
94249
- if (classNamesWhichUseSQLite.has(new_class)) {
94250
- throw new UserError(
94251
- `Cannot apply new_classes migration to existing class ${new_class}`
94252
- );
94253
- } else {
94254
- classNamesWhichUseSQLite.set(new_class, false);
94255
- }
94256
- });
94257
- migration.new_sqlite_classes?.forEach((new_class) => {
94258
- if (classNamesWhichUseSQLite.has(new_class)) {
94259
- throw new UserError(
94260
- `Cannot apply new_sqlite_classes migration to existing class ${new_class}`
94261
- );
94262
- } else {
94263
- classNamesWhichUseSQLite.set(new_class, true);
94484
+ __name(parseStructuredLog, "parseStructuredLog");
94485
+ function logStructuredLog({ level, message }, processStream) {
94486
+ if (messageClassifiers.isBarf(message)) {
94487
+ if (messageClassifiers.isAddressInUse(message)) {
94488
+ const address = message.match(
94489
+ /Address already in use; toString\(\) = (.+)\n/
94490
+ )?.[1];
94491
+ logger.error(
94492
+ `Address already in use (${address}). Please check that you are not already running a server on this address or specify a different port with --port.`
94493
+ );
94494
+ return logger.debug(message);
94495
+ }
94496
+ if (messageClassifiers.isAccessViolation(message)) {
94497
+ let error2 = "There was an access violation in the runtime.";
94498
+ if (process.platform === "win32") {
94499
+ error2 += "\nOn Windows, this may be caused by an outdated Microsoft Visual C++ Redistributable library.\nCheck that you have the latest version installed.\nSee https://learn.microsoft.com/en-us/cpp/windows/latest-supported-vc-redist.";
94264
94500
  }
94265
- });
94266
- });
94267
- return classNamesWhichUseSQLite;
94501
+ logger.error(error2);
94502
+ return logger.debug(message);
94503
+ }
94504
+ return logger.debug(message);
94505
+ }
94506
+ if ((level === "info" || level === "error") && messageClassifiers.isCodeMovedWarning(message)) {
94507
+ return;
94508
+ }
94509
+ if (level === "warn") {
94510
+ return logger.warn(message);
94511
+ }
94512
+ if (level === "info") {
94513
+ return logger.info(message);
94514
+ }
94515
+ if (level === "debug") {
94516
+ return logger.debug(message);
94517
+ }
94518
+ if (level === "error") {
94519
+ return logger.error(getSourceMappedString(message));
94520
+ }
94521
+ if (processStream === "stderr") {
94522
+ return logger.error(getSourceMappedString(message));
94523
+ } else {
94524
+ return logger.log(getSourceMappedString(message));
94525
+ }
94268
94526
  }
94269
- __name(getClassNamesWhichUseSQLite, "getClassNamesWhichUseSQLite");
94527
+ __name(logStructuredLog, "logStructuredLog");
94528
+ var messageClassifiers = {
94529
+ // Is this chunk a big chonky barf from workerd that we want to hijack to cleanup/ignore?
94530
+ isBarf(chunk) {
94531
+ const containsLlvmSymbolizerWarning = chunk.includes(
94532
+ "Not symbolizing stack traces because $LLVM_SYMBOLIZER is not set"
94533
+ );
94534
+ const containsRecursiveIsolateLockWarning = chunk.includes(
94535
+ "took recursive isolate lock"
94536
+ );
94537
+ const containsHexStack = /stack:( (0|[a-f\d]{4,})){3,}/.test(chunk);
94538
+ return containsLlvmSymbolizerWarning || containsRecursiveIsolateLockWarning || containsHexStack;
94539
+ },
94540
+ // Is this chunk an Address In Use error?
94541
+ isAddressInUse(chunk) {
94542
+ return chunk.includes("Address already in use; toString() = ");
94543
+ },
94544
+ isCodeMovedWarning(chunk) {
94545
+ return /CODE_MOVED for unknown code block/.test(chunk);
94546
+ },
94547
+ isAccessViolation(chunk) {
94548
+ return chunk.includes("access violation;");
94549
+ }
94550
+ };
94270
94551
 
94271
- // src/dev/miniflare.ts
94552
+ // src/dev/miniflare/index.ts
94272
94553
  var EXTERNAL_SERVICE_WORKER_NAME = "__WRANGLER_EXTERNAL_DURABLE_OBJECTS_WORKER";
94273
94554
  var EXTERNAL_SERVICE_WORKER_SCRIPT = `
94274
94555
  import { DurableObject, WorkerEntrypoint } from "cloudflare:workers";
@@ -94846,73 +95127,6 @@ function buildSitesOptions({
94846
95127
  }
94847
95128
  }
94848
95129
  __name(buildSitesOptions, "buildSitesOptions");
94849
- function handleRuntimeStdio(stdout2, stderr2) {
94850
- const classifiers = {
94851
- // Is this chunk a big chonky barf from workerd that we want to hijack to cleanup/ignore?
94852
- isBarf(chunk) {
94853
- const containsLlvmSymbolizerWarning = chunk.includes(
94854
- "Not symbolizing stack traces because $LLVM_SYMBOLIZER is not set"
94855
- );
94856
- const containsRecursiveIsolateLockWarning = chunk.includes(
94857
- "took recursive isolate lock"
94858
- );
94859
- const containsHexStack = /stack:( (0|[a-f\d]{4,})){3,}/.test(chunk);
94860
- return containsLlvmSymbolizerWarning || containsRecursiveIsolateLockWarning || containsHexStack;
94861
- },
94862
- // Is this chunk an Address In Use error?
94863
- isAddressInUse(chunk) {
94864
- return chunk.includes("Address already in use; toString() = ");
94865
- },
94866
- isWarning(chunk) {
94867
- return /\.c\+\+:\d+: warning:/.test(chunk);
94868
- },
94869
- isCodeMovedWarning(chunk) {
94870
- return /CODE_MOVED for unknown code block/.test(chunk);
94871
- },
94872
- isAccessViolation(chunk) {
94873
- return chunk.includes("access violation;");
94874
- }
94875
- };
94876
- stdout2.on("data", (chunk) => {
94877
- chunk = chunk.toString().trim();
94878
- if (classifiers.isBarf(chunk)) {
94879
- logger.debug(chunk);
94880
- } else if (classifiers.isWarning(chunk)) {
94881
- logger.warn(chunk);
94882
- } else {
94883
- logger.info(getSourceMappedString(chunk));
94884
- }
94885
- });
94886
- stderr2.on("data", (chunk) => {
94887
- chunk = chunk.toString().trim();
94888
- if (classifiers.isBarf(chunk)) {
94889
- if (classifiers.isAddressInUse(chunk)) {
94890
- const address = chunk.match(
94891
- /Address already in use; toString\(\) = (.+)\n/
94892
- )?.[1];
94893
- logger.error(
94894
- `Address already in use (${address}). Please check that you are not already running a server on this address or specify a different port with --port.`
94895
- );
94896
- logger.debug(chunk);
94897
- } else if (classifiers.isAccessViolation(chunk)) {
94898
- let error2 = "There was an access violation in the runtime.";
94899
- if (process.platform === "win32") {
94900
- error2 += "\nOn Windows, this may be caused by an outdated Microsoft Visual C++ Redistributable library.\nCheck that you have the latest version installed.\nSee https://learn.microsoft.com/en-us/cpp/windows/latest-supported-vc-redist.";
94901
- }
94902
- logger.error(error2);
94903
- logger.debug(chunk);
94904
- } else {
94905
- logger.debug(chunk);
94906
- }
94907
- } else if (classifiers.isWarning(chunk)) {
94908
- logger.warn(chunk);
94909
- } else if (classifiers.isCodeMovedWarning(chunk)) {
94910
- } else {
94911
- logger.error(getSourceMappedString(chunk));
94912
- }
94913
- });
94914
- }
94915
- __name(handleRuntimeStdio, "handleRuntimeStdio");
94916
95130
  var didWarnMiniflareCronSupport = false;
94917
95131
  var didWarnMiniflareVectorizeSupport = false;
94918
95132
  var didWarnAiAccountUsage = false;
@@ -94978,7 +95192,8 @@ async function buildMiniflareOptions(log2, config, proxyToUserWorkerAuthenticati
94978
95192
  unsafeEnableAssetsRpc: getFlag("ASSETS_RPC"),
94979
95193
  log: log2,
94980
95194
  verbose: logger.loggerLevel === "debug",
94981
- handleRuntimeStdio,
95195
+ handleRuntimeStdio: handleRuntimeStdioWithStructuredLogs,
95196
+ structuredWorkerdLogs: true,
94982
95197
  ...persistOptions,
94983
95198
  workers: [
94984
95199
  {
@@ -95815,7 +96030,7 @@ var wrapper_default = import_websocket.default;
95815
96030
  // src/dev/inspect.ts
95816
96031
  init_import_meta_url();
95817
96032
  var import_fs7 = require("fs");
95818
- var import_node_os3 = __toESM(require("node:os"));
96033
+ var import_node_os4 = __toESM(require("node:os"));
95819
96034
  var import_node_url8 = require("node:url");
95820
96035
  var import_path6 = __toESM(require("path"));
95821
96036
  var import_open2 = __toESM(require_open());
@@ -96067,7 +96282,7 @@ var openInspector = /* @__PURE__ */ __name(async (inspectorPort, worker) => {
96067
96282
  const url4 = `https://devtools.devprod.cloudflare.dev/js_app?${query.toString()}`;
96068
96283
  const errorMessage = "Failed to open inspector.\nInspector depends on having a Chromium-based browser installed, maybe you need to install one?";
96069
96284
  let braveBrowser;
96070
- switch (import_node_os3.default.platform()) {
96285
+ switch (import_node_os4.default.platform()) {
96071
96286
  case "darwin":
96072
96287
  case "win32":
96073
96288
  braveBrowser = "Brave";
@@ -96307,7 +96522,8 @@ var ProxyController = class extends Controller {
96307
96522
  logger.loggerLevel === "debug" ? "wrangler-ProxyWorker" : "wrangler"
96308
96523
  )
96309
96524
  }),
96310
- handleRuntimeStdio,
96525
+ handleRuntimeStdio: handleRuntimeStdioWithStructuredLogs,
96526
+ structuredWorkerdLogs: true,
96311
96527
  liveReload: false
96312
96528
  };
96313
96529
  const proxyWorkerOptionsChanged = didMiniflareOptionsChange(
@@ -102080,7 +102296,7 @@ var normalizeStdio = /* @__PURE__ */ __name((options32) => {
102080
102296
 
102081
102297
  // ../../node_modules/.pnpm/execa@6.1.0/node_modules/execa/lib/kill.js
102082
102298
  init_import_meta_url();
102083
- var import_node_os5 = __toESM(require("node:os"), 1);
102299
+ var import_node_os6 = __toESM(require("node:os"), 1);
102084
102300
  var import_signal_exit4 = __toESM(require_signal_exit(), 1);
102085
102301
  var DEFAULT_FORCE_KILL_TIMEOUT = 1e3 * 5;
102086
102302
  var spawnedKill = /* @__PURE__ */ __name((kill, signal = "SIGTERM", options32 = {}) => {
@@ -102101,7 +102317,7 @@ var setKillTimeout = /* @__PURE__ */ __name((kill, signal, options32, killResult
102101
102317
  }
102102
102318
  }, "setKillTimeout");
102103
102319
  var shouldForceKill = /* @__PURE__ */ __name((signal, { forceKillAfterTimeout }, killResult) => isSigterm(signal) && forceKillAfterTimeout !== false && killResult, "shouldForceKill");
102104
- var isSigterm = /* @__PURE__ */ __name((signal) => signal === import_node_os5.default.constants.signals.SIGTERM || typeof signal === "string" && signal.toUpperCase() === "SIGTERM", "isSigterm");
102320
+ var isSigterm = /* @__PURE__ */ __name((signal) => signal === import_node_os6.default.constants.signals.SIGTERM || typeof signal === "string" && signal.toUpperCase() === "SIGTERM", "isSigterm");
102105
102321
  var getForceKillAfterTimeout = /* @__PURE__ */ __name(({ forceKillAfterTimeout = true }) => {
102106
102322
  if (forceKillAfterTimeout === true) {
102107
102323
  return DEFAULT_FORCE_KILL_TIMEOUT;
@@ -102649,13 +102865,13 @@ __name(sniffUserAgent, "sniffUserAgent");
102649
102865
 
102650
102866
  // src/metrics/helpers.ts
102651
102867
  init_import_meta_url();
102652
- var import_node_os6 = __toESM(require("node:os"));
102868
+ var import_node_os7 = __toESM(require("node:os"));
102653
102869
  function getWranglerVersion() {
102654
102870
  return version;
102655
102871
  }
102656
102872
  __name(getWranglerVersion, "getWranglerVersion");
102657
102873
  function getPlatform() {
102658
- const platform3 = import_node_os6.default.platform();
102874
+ const platform3 = import_node_os7.default.platform();
102659
102875
  switch (platform3) {
102660
102876
  case "win32":
102661
102877
  return "Windows";
@@ -102673,7 +102889,7 @@ function getOS() {
102673
102889
  }
102674
102890
  __name(getOS, "getOS");
102675
102891
  function getOSVersion() {
102676
- return import_node_os6.default.version();
102892
+ return import_node_os7.default.version();
102677
102893
  }
102678
102894
  __name(getOSVersion, "getOSVersion");
102679
102895
  function getNodeVersion() {
@@ -104394,27 +104610,6 @@ async function printBundleSize(main2, modules) {
104394
104610
  logger.log(`Total Upload: ${colorizedReport}`);
104395
104611
  }
104396
104612
  __name(printBundleSize, "printBundleSize");
104397
- function printOffendingDependencies(dependencies) {
104398
- const warning = [];
104399
- const dependenciesSorted = Object.entries(dependencies);
104400
- dependenciesSorted.sort(
104401
- ([_adep, aData], [_bdep, bData]) => bData.bytesInOutput - aData.bytesInOutput
104402
- );
104403
- const topLargest = dependenciesSorted.slice(0, 5);
104404
- if (topLargest.length > 0) {
104405
- warning.push(
104406
- `Here are the ${topLargest.length} largest dependencies included in your script:`
104407
- );
104408
- for (const [dep, data] of topLargest) {
104409
- warning.push(
104410
- `- ${dep} - ${(data.bytesInOutput / ONE_KIB_BYTES).toFixed(2)} KiB`
104411
- );
104412
- }
104413
- warning.push("If these are unnecessary, consider removing them");
104414
- logger.warn(warning.join("\n"));
104415
- }
104416
- }
104417
- __name(printOffendingDependencies, "printOffendingDependencies");
104418
104613
 
104419
104614
  // src/deployment-bundle/create-worker-upload-form.ts
104420
104615
  init_import_meta_url();
@@ -104443,12 +104638,14 @@ function handleUnsafeCapnp(capnp) {
104443
104638
  );
104444
104639
  }
104445
104640
  const srcPrefix = (0, import_node_path27.resolve)(base_path ?? ".");
104446
- const capnpProcess = (0, import_node_child_process2.spawnSync)("capnp", [
104447
- "compile",
104448
- "-o-",
104449
- `--src-prefix=${srcPrefix}`,
104450
- ...capnpSchemas
104451
- ]);
104641
+ const capnpProcess = (0, import_node_child_process2.spawnSync)(
104642
+ "capnp",
104643
+ ["compile", "-o-", `--src-prefix=${srcPrefix}`, ...capnpSchemas],
104644
+ // This number was chosen arbitrarily. If you get ENOBUFS because your compiled schema is still
104645
+ // too large, then we may need to bump this again or figure out another approach.
104646
+ // https://github.com/cloudflare/workers-sdk/pull/10217
104647
+ { maxBuffer: 3 * 1024 * 1024 }
104648
+ );
104452
104649
  if (capnpProcess.error) {
104453
104650
  throw capnpProcess.error;
104454
104651
  }
@@ -105954,8 +106151,11 @@ ${dashLink}`);
105954
106151
  );
105955
106152
  }
105956
106153
  if (config.queues.producers && config.queues.producers.length) {
105957
- const updateProducers = await updateQueueProducers(config);
105958
- deployments.push(...updateProducers);
106154
+ deployments.push(
106155
+ ...config.queues.producers.map(
106156
+ (producer) => Promise.resolve([`Producer for ${producer.queue}`])
106157
+ )
106158
+ );
105959
106159
  }
105960
106160
  if (config.queues.consumers && config.queues.consumers.length) {
105961
106161
  const updateConsumers = await updateQueueConsumers(scriptName, config);
@@ -106015,7 +106215,7 @@ var import_miniflare22 = require("miniflare");
106015
106215
 
106016
106216
  // src/index.ts
106017
106217
  init_import_meta_url();
106018
- var import_node_os8 = __toESM(require("node:os"));
106218
+ var import_node_os9 = __toESM(require("node:os"));
106019
106219
  var import_promises34 = require("node:timers/promises");
106020
106220
  var import_undici20 = __toESM(require_undici());
106021
106221
 
@@ -119870,6 +120070,10 @@ function isUsedAsServiceBinding(references) {
119870
120070
  return (references.services?.incoming.length || 0) > 0;
119871
120071
  }
119872
120072
  __name(isUsedAsServiceBinding, "isUsedAsServiceBinding");
120073
+ function isUsedByPagesFunction(references) {
120074
+ return references.services?.pages_function === true;
120075
+ }
120076
+ __name(isUsedByPagesFunction, "isUsedByPagesFunction");
119873
120077
  function isUsedAsDurableObjectNamespace(references, scriptName) {
119874
120078
  return (references.durable_objects?.filter((ref) => ref.service !== scriptName)?.length || 0) > 0;
119875
120079
  }
@@ -119889,7 +120093,7 @@ async function checkAndConfirmForceDeleteIfNecessary(scriptName, accountId) {
119889
120093
  const tailProducers = await fetchResult(
119890
120094
  `/accounts/${accountId}/workers/tails/by-consumer/${scriptName}`
119891
120095
  );
119892
- const isDependentService = isUsedAsServiceBinding(references) || isUsedAsDurableObjectNamespace(references, scriptName) || isUsedAsDispatchOutbound(references) || isUsedAsTailConsumer(tailProducers);
120096
+ const isDependentService = isUsedAsServiceBinding(references) || isUsedByPagesFunction(references) || isUsedAsDurableObjectNamespace(references, scriptName) || isUsedAsDispatchOutbound(references) || isUsedAsTailConsumer(tailProducers);
119893
120097
  if (!isDependentService) {
119894
120098
  return false;
119895
120099
  }
@@ -119900,6 +120104,11 @@ async function checkAndConfirmForceDeleteIfNecessary(scriptName, accountId) {
119900
120104
  `- Worker ${dependentScript} uses this Worker as a Service Binding`
119901
120105
  );
119902
120106
  }
120107
+ if (isUsedByPagesFunction(references)) {
120108
+ dependentMessages.push(
120109
+ `- A Pages project has a Service Binding to this Worker`
120110
+ );
120111
+ }
119903
120112
  for (const implementedDOBindingReference of references.durable_objects || []) {
119904
120113
  if (implementedDOBindingReference.service === scriptName) {
119905
120114
  continue;
@@ -121157,7 +121366,7 @@ var import_node_path38 = __toESM(require("node:path"));
121157
121366
  // src/git-client.ts
121158
121367
  init_import_meta_url();
121159
121368
  var import_node_fs19 = __toESM(require("node:fs"));
121160
- var import_node_os7 = __toESM(require("node:os"));
121369
+ var import_node_os8 = __toESM(require("node:os"));
121161
121370
  var import_node_path36 = __toESM(require("node:path"));
121162
121371
 
121163
121372
  // ../../node_modules/.pnpm/semiver@1.1.0/node_modules/semiver/dist/semiver.mjs
@@ -121228,7 +121437,7 @@ async function cloneIntoDirectory(remote, targetDirectory, subdirectory) {
121228
121437
  args.push(remote.substring(0, tagIndex));
121229
121438
  }
121230
121439
  const tempDir = import_node_fs19.default.mkdtempSync(
121231
- import_node_path36.default.join(import_node_os7.default.tmpdir(), `wrangler-generate-repo-`)
121440
+ import_node_path36.default.join(import_node_os8.default.tmpdir(), `wrangler-generate-repo-`)
121232
121441
  );
121233
121442
  args.push(tempDir);
121234
121443
  await execa("git", args);
@@ -126708,10 +126917,18 @@ To silence this warning, pass in --commit-dirty=true`
126708
126917
  );
126709
126918
  const failureMessage = logs.data[logs.total - 1].line.replace("Error:", "").trim();
126710
126919
  if (failureMessage.includes("Script startup exceeded CPU time limit")) {
126920
+ const startupError = new ParseError({ text: failureMessage });
126921
+ Object.assign(startupError, { code: 10021 });
126711
126922
  const workerBundle = formData.get("_worker.bundle");
126712
126923
  const filePath = import_node_path53.default.join(getPagesTmpDir(), "_worker.bundle");
126713
126924
  await (0, import_promises22.writeFile)(filePath, workerBundle.stream());
126714
- await handleStartupError(filePath, getPagesProjectRoot());
126925
+ throw new UserError(
126926
+ await diagnoseStartupError(
126927
+ startupError,
126928
+ filePath,
126929
+ getPagesProjectRoot()
126930
+ )
126931
+ );
126715
126932
  }
126716
126933
  throw new FatalError(
126717
126934
  `Deployment failed!
@@ -144263,7 +144480,7 @@ __name(wrapResolver, "wrapResolver");
144263
144480
 
144264
144481
  // ../../node_modules/.pnpm/@sentry+node@7.87.0_supports-color@9.2.2/node_modules/@sentry/node/esm/client.js
144265
144482
  init_import_meta_url();
144266
- var os8 = __toESM(require("os"));
144483
+ var os9 = __toESM(require("os"));
144267
144484
  var import_util15 = require("util");
144268
144485
  var NodeClient = class extends ServerRuntimeClient {
144269
144486
  /**
@@ -144290,7 +144507,7 @@ var NodeClient = class extends ServerRuntimeClient {
144290
144507
  ...options32,
144291
144508
  platform: "node",
144292
144509
  runtime: { name: "node", version: global.process.version },
144293
- serverName: options32.serverName || global.process.env.SENTRY_NAME || os8.hostname()
144510
+ serverName: options32.serverName || global.process.env.SENTRY_NAME || os9.hostname()
144294
144511
  };
144295
144512
  super(clientOptions);
144296
144513
  }
@@ -145335,7 +145552,7 @@ __name(_readSourceFile, "_readSourceFile");
145335
145552
  init_import_meta_url();
145336
145553
  var import_child_process4 = require("child_process");
145337
145554
  var import_fs21 = require("fs");
145338
- var os9 = __toESM(require("os"));
145555
+ var os10 = __toESM(require("os"));
145339
145556
  var import_path22 = require("path");
145340
145557
  var import_util16 = require("util");
145341
145558
  var readFileAsync = (0, import_util16.promisify)(import_fs21.readFile);
@@ -145396,7 +145613,7 @@ var Context = class {
145396
145613
  contexts.app.app_memory = process.memoryUsage().rss;
145397
145614
  }
145398
145615
  if (_optionalChain([contexts, "optionalAccess", (_13) => _13.device, "optionalAccess", (_14) => _14.free_memory])) {
145399
- contexts.device.free_memory = os9.freemem();
145616
+ contexts.device.free_memory = os10.freemem();
145400
145617
  }
145401
145618
  return contexts;
145402
145619
  }
@@ -145429,7 +145646,7 @@ var Context = class {
145429
145646
  __name(Context, "Context");
145430
145647
  Context.__initStatic();
145431
145648
  async function getOsContext() {
145432
- const platformId = os9.platform();
145649
+ const platformId = os10.platform();
145433
145650
  switch (platformId) {
145434
145651
  case "darwin":
145435
145652
  return getDarwinInfo();
@@ -145438,7 +145655,7 @@ async function getOsContext() {
145438
145655
  default:
145439
145656
  return {
145440
145657
  name: PLATFORM_NAMES[platformId] || platformId,
145441
- version: os9.release()
145658
+ version: os10.release()
145442
145659
  };
145443
145660
  }
145444
145661
  }
@@ -145472,19 +145689,19 @@ function getDeviceContext(deviceOpt) {
145472
145689
  const device = {};
145473
145690
  let uptime3;
145474
145691
  try {
145475
- uptime3 = os9.uptime && os9.uptime();
145692
+ uptime3 = os10.uptime && os10.uptime();
145476
145693
  } catch (e7) {
145477
145694
  }
145478
145695
  if (typeof uptime3 === "number") {
145479
145696
  device.boot_time = new Date(Date.now() - uptime3 * 1e3).toISOString();
145480
145697
  }
145481
- device.arch = os9.arch();
145698
+ device.arch = os10.arch();
145482
145699
  if (deviceOpt === true || deviceOpt.memory) {
145483
- device.memory_size = os9.totalmem();
145484
- device.free_memory = os9.freemem();
145700
+ device.memory_size = os10.totalmem();
145701
+ device.free_memory = os10.freemem();
145485
145702
  }
145486
145703
  if (deviceOpt === true || deviceOpt.cpu) {
145487
- const cpuInfo = os9.cpus();
145704
+ const cpuInfo = os10.cpus();
145488
145705
  if (cpuInfo && cpuInfo.length) {
145489
145706
  const firstCpu = cpuInfo[0];
145490
145707
  device.processor_count = cpuInfo.length;
@@ -145533,9 +145750,9 @@ function matchFirst(regex2, text) {
145533
145750
  __name(matchFirst, "matchFirst");
145534
145751
  async function getDarwinInfo() {
145535
145752
  const darwinInfo = {
145536
- kernel_version: os9.release(),
145753
+ kernel_version: os10.release(),
145537
145754
  name: "Mac OS X",
145538
- version: `10.${Number(os9.release().split(".")[0]) - 4}`
145755
+ version: `10.${Number(os10.release().split(".")[0]) - 4}`
145539
145756
  };
145540
145757
  try {
145541
145758
  const output = await new Promise((resolve25, reject) => {
@@ -145561,7 +145778,7 @@ function getLinuxDistroId(name2) {
145561
145778
  __name(getLinuxDistroId, "getLinuxDistroId");
145562
145779
  async function getLinuxInfo() {
145563
145780
  const linuxInfo = {
145564
- kernel_version: os9.release(),
145781
+ kernel_version: os10.release(),
145565
145782
  name: "Linux"
145566
145783
  };
145567
145784
  try {
@@ -151012,12 +151229,15 @@ See https://developers.cloudflare.com/workers/platform/compatibility-dates for m
151012
151229
  if (!bindingsPrinted) {
151013
151230
  printBindings({ ...bindings, vars: maskedVars });
151014
151231
  }
151015
- await helpIfErrorIsSizeOrScriptStartup(
151232
+ const message = await helpIfErrorIsSizeOrScriptStartup(
151016
151233
  err,
151017
151234
  dependencies,
151018
151235
  workerBundle,
151019
151236
  props.projectRoot
151020
151237
  );
151238
+ if (message) {
151239
+ logger.error(message);
151240
+ }
151021
151241
  if (err instanceof ParseError && "code" in err && err.code === 10021 && err.notes.length > 0) {
151022
151242
  const maybeNameToFilePath = /* @__PURE__ */ __name((moduleName) => {
151023
151243
  if (bundleType === "commonjs") {
@@ -153376,6 +153596,7 @@ function createCLIParser(argv) {
153376
153596
  __name(createCLIParser, "createCLIParser");
153377
153597
  async function main(argv) {
153378
153598
  setupSentry();
153599
+ checkMacOSVersion({ shouldThrow: false });
153379
153600
  const startTime = Date.now();
153380
153601
  const wrangler = createCLIParser(argv);
153381
153602
  let command2;
@@ -153460,7 +153681,7 @@ async function main(argv) {
153460
153681
  logger.log(e7.message);
153461
153682
  } else if (e7 instanceof Error && e7.message.includes("Raw mode is not supported on")) {
153462
153683
  mayReport = false;
153463
- const currentPlatform = import_node_os8.default.platform();
153684
+ const currentPlatform = import_node_os9.default.platform();
153464
153685
  const thisTerminalIsUnsupported = "This terminal doesn't support raw mode.";
153465
153686
  const soWranglerWontWork = "Wrangler uses raw mode to read user input and write output to the terminal, and won't function correctly without it.";
153466
153687
  const tryRunningItIn = "Try running your previous command in a terminal that supports raw mode";
@@ -153661,11 +153882,13 @@ function getModuleType(entry) {
153661
153882
  __name(getModuleType, "getModuleType");
153662
153883
  async function convertWorkerBundleToModules(workerBundle) {
153663
153884
  return await Promise.all(
153664
- [...workerBundle.entries()].map(async (m6) => ({
153665
- type: getModuleType(m6[1]),
153666
- path: m6[0],
153667
- contents: await getEntryValue(m6[1])
153668
- }))
153885
+ [...workerBundle.entries()].map(
153886
+ async (m6) => ({
153887
+ type: getModuleType(m6[1]),
153888
+ path: m6[0],
153889
+ contents: await getEntryValue(m6[1])
153890
+ })
153891
+ )
153669
153892
  );
153670
153893
  }
153671
153894
  __name(convertWorkerBundleToModules, "convertWorkerBundleToModules");
@@ -153740,52 +153963,106 @@ async function analyseBundle(workerBundle) {
153740
153963
  __name(analyseBundle, "analyseBundle");
153741
153964
 
153742
153965
  // src/utils/friendly-validator-errors.ts
153966
+ async function helpIfErrorIsSizeOrScriptStartup(err, dependencies, workerBundle, projectRoot) {
153967
+ if (errIsScriptSize(err)) {
153968
+ return await diagnoseScriptSizeError(err, dependencies);
153969
+ }
153970
+ if (errIsStartupErr(err)) {
153971
+ return await diagnoseStartupError(err, workerBundle, projectRoot);
153972
+ }
153973
+ return null;
153974
+ }
153975
+ __name(helpIfErrorIsSizeOrScriptStartup, "helpIfErrorIsSizeOrScriptStartup");
153976
+ function diagnoseScriptSizeError(err, dependencies) {
153977
+ let message = esm_default2`
153978
+ Your Worker failed validation because it exceeded size limits.
153979
+
153980
+ ${err.text}
153981
+ ${err.notes.map((note) => ` - ${note.text}`).join("\n")}
153982
+ `;
153983
+ const dependenciesMessage = getOffendingDependenciesMessage(dependencies);
153984
+ if (dependenciesMessage) {
153985
+ message += dependenciesMessage;
153986
+ }
153987
+ return message;
153988
+ }
153989
+ __name(diagnoseScriptSizeError, "diagnoseScriptSizeError");
153990
+ async function diagnoseStartupError(err, workerBundle, projectRoot) {
153991
+ let errorMessage = esm_default2`
153992
+ Your Worker failed validation because it exceeded startup limits.
153993
+
153994
+ ${err.text}
153995
+ ${err.notes.map((note) => ` - ${note.text}`).join("\n")}
153996
+
153997
+ To ensure fast responses, there are constraints on Worker startup, such as how much CPU it can use, or how long it can take. Your Worker has hit one of these startup limits. Try reducing the amount of work done during startup (outside the event handler), either by removing code or relocating it inside the event handler.
153998
+
153999
+ Refer to https://developers.cloudflare.com/workers/platform/limits/#worker-startup-time for more details`;
154000
+ try {
154001
+ const cpuProfile = await analyseBundle(workerBundle);
154002
+ const tmpDir = await getWranglerTmpDir(
154003
+ projectRoot,
154004
+ "startup-profile",
154005
+ false
154006
+ );
154007
+ const profile = import_node_path60.default.relative(
154008
+ projectRoot ?? process.cwd(),
154009
+ import_node_path60.default.join(tmpDir.path, `worker.cpuprofile`)
154010
+ );
154011
+ await (0, import_promises37.writeFile)(profile, JSON.stringify(cpuProfile));
154012
+ errorMessage += esm_default2`
154013
+
154014
+ A CPU Profile of your Worker's startup phase has been written to ${profile} - load it into the Chrome DevTools profiler (or directly in VSCode) to view a flamegraph.`;
154015
+ } catch (profilingError) {
154016
+ logger.debug(
154017
+ `An error occurred while trying to locally profile the Worker: ${profilingError}`
154018
+ );
154019
+ }
154020
+ return errorMessage;
154021
+ }
154022
+ __name(diagnoseStartupError, "diagnoseStartupError");
154023
+ function getOffendingDependenciesMessage(dependencies) {
154024
+ const dependenciesSorted = Object.entries(dependencies);
154025
+ if (dependenciesSorted.length === 0) {
154026
+ return null;
154027
+ }
154028
+ dependenciesSorted.sort(
154029
+ ([, aData], [, bData]) => bData.bytesInOutput - aData.bytesInOutput
154030
+ );
154031
+ const topLargest = dependenciesSorted.slice(0, 5);
154032
+ const ONE_KIB_BYTES2 = 1024;
154033
+ return [
154034
+ "",
154035
+ `Here are the ${topLargest.length} largest dependencies included in your script:`,
154036
+ "",
154037
+ ...topLargest.map(
154038
+ ([dep, data]) => `- ${dep} - ${(data.bytesInOutput / ONE_KIB_BYTES2).toFixed(2)} KiB`
154039
+ ),
154040
+ "",
154041
+ "If these are unnecessary, consider removing them",
154042
+ ""
154043
+ ].join("\n");
154044
+ }
154045
+ __name(getOffendingDependenciesMessage, "getOffendingDependenciesMessage");
153743
154046
  function errIsScriptSize(err) {
153744
- if (!err) {
154047
+ if (!(err instanceof ParseError)) {
153745
154048
  return false;
153746
154049
  }
153747
- if (err.code === 10027) {
154050
+ if ("code" in err && err.code === 10027) {
153748
154051
  return true;
153749
154052
  }
153750
154053
  return false;
153751
154054
  }
153752
154055
  __name(errIsScriptSize, "errIsScriptSize");
153753
- var scriptStartupErrorRegex = /startup/i;
153754
154056
  function errIsStartupErr(err) {
153755
- if (!err) {
154057
+ if (!(err instanceof ParseError)) {
153756
154058
  return false;
153757
154059
  }
153758
- if (err.code === 10021 && err instanceof ParseError && scriptStartupErrorRegex.test(err.notes[0]?.text)) {
154060
+ if ("code" in err && err.code === 10021 && /startup/i.test(err.notes[0]?.text)) {
153759
154061
  return true;
153760
154062
  }
153761
154063
  return false;
153762
154064
  }
153763
154065
  __name(errIsStartupErr, "errIsStartupErr");
153764
- async function handleStartupError(workerBundle, projectRoot) {
153765
- const cpuProfile = await analyseBundle(workerBundle);
153766
- const tmpDir = await getWranglerTmpDir(projectRoot, "startup-profile", false);
153767
- const profile = import_node_path60.default.relative(
153768
- projectRoot ?? process.cwd(),
153769
- import_node_path60.default.join(tmpDir.path, `worker.cpuprofile`)
153770
- );
153771
- await (0, import_promises37.writeFile)(profile, JSON.stringify(cpuProfile));
153772
- throw new UserError(esm_default2`
153773
- Your Worker failed validation because it exceeded startup limits.
153774
- To ensure fast responses, there are constraints on Worker startup, such as how much CPU it can use, or how long it can take. Your Worker has hit one of these startup limits. Try reducing the amount of work done during startup (outside the event handler), either by removing code or relocating it inside the event handler.
153775
-
153776
- A CPU Profile of your Worker's startup phase has been written to ${profile} - load it into the Chrome DevTools profiler (or directly in VSCode) to view a flamegraph.
153777
-
153778
- Refer to https://developers.cloudflare.com/workers/platform/limits/#worker-startup-time for more details`);
153779
- }
153780
- __name(handleStartupError, "handleStartupError");
153781
- async function helpIfErrorIsSizeOrScriptStartup(err, dependencies, workerBundle, projectRoot) {
153782
- if (errIsScriptSize(err)) {
153783
- printOffendingDependencies(dependencies);
153784
- } else if (errIsStartupErr(err)) {
153785
- await handleStartupError(workerBundle, projectRoot);
153786
- }
153787
- }
153788
- __name(helpIfErrorIsSizeOrScriptStartup, "helpIfErrorIsSizeOrScriptStartup");
153789
154066
 
153790
154067
  // src/deploy/deploy.ts
153791
154068
  var validateRoutes3 = /* @__PURE__ */ __name((routes, assets) => {
@@ -154340,12 +154617,15 @@ See https://developers.cloudflare.com/workers/platform/compatibility-dates for m
154340
154617
  if (!bindingsPrinted) {
154341
154618
  printBindings({ ...withoutStaticAssets, vars: maskedVars });
154342
154619
  }
154343
- await helpIfErrorIsSizeOrScriptStartup(
154620
+ const message = await helpIfErrorIsSizeOrScriptStartup(
154344
154621
  err,
154345
154622
  dependencies,
154346
154623
  workerBundle,
154347
154624
  props.projectRoot
154348
154625
  );
154626
+ if (message !== null) {
154627
+ logger.error(message);
154628
+ }
154349
154629
  if (err instanceof APIError && "code" in err && err.code === 10021 && err.notes.length > 0) {
154350
154630
  err.preventReport();
154351
154631
  if (err.notes[0].text === "binding DB of type d1 must have a valid `id` specified [code: 10021]") {
@@ -154547,25 +154827,6 @@ function isAuthenticationError(e7) {
154547
154827
  return e7 instanceof ParseError && e7.code === 1e4;
154548
154828
  }
154549
154829
  __name(isAuthenticationError, "isAuthenticationError");
154550
- async function updateQueueProducers(config) {
154551
- const producers = config.queues.producers || [];
154552
- const updateProducers = [];
154553
- for (const producer of producers) {
154554
- const body = {
154555
- queue_name: producer.queue,
154556
- settings: {
154557
- delivery_delay: producer.delivery_delay
154558
- }
154559
- };
154560
- updateProducers.push(
154561
- putQueue(config, producer.queue, body).then(() => [
154562
- `Producer for ${producer.queue}`
154563
- ])
154564
- );
154565
- }
154566
- return updateProducers;
154567
- }
154568
- __name(updateQueueProducers, "updateQueueProducers");
154569
154830
  async function updateQueueConsumers(scriptName, config) {
154570
154831
  const consumers2 = config.queues.consumers || [];
154571
154832
  const updateConsumers = [];
@@ -156918,7 +157179,7 @@ init_import_meta_url();
156918
157179
  // ../../node_modules/.pnpm/get-port@7.0.0/node_modules/get-port/index.js
156919
157180
  init_import_meta_url();
156920
157181
  var import_node_net = __toESM(require("node:net"), 1);
156921
- var import_node_os9 = __toESM(require("node:os"), 1);
157182
+ var import_node_os10 = __toESM(require("node:os"), 1);
156922
157183
  var Locked = class extends Error {
156923
157184
  constructor(port) {
156924
157185
  super(`${port} is locked`);
@@ -156932,7 +157193,7 @@ var lockedPorts = {
156932
157193
  var releaseOldLockedPortsIntervalMs = 1e3 * 15;
156933
157194
  var timeout;
156934
157195
  var getLocalHosts = /* @__PURE__ */ __name(() => {
156935
- const interfaces = import_node_os9.default.networkInterfaces();
157196
+ const interfaces = import_node_os10.default.networkInterfaces();
156936
157197
  const results = /* @__PURE__ */ new Set([void 0, "0.0.0.0"]);
156937
157198
  for (const _interface of Object.values(interfaces)) {
156938
157199
  for (const config of _interface) {