wrangler 3.74.0 → 3.76.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -35206,7 +35206,7 @@ var require_websocket2 = __commonJS({
35206
35206
  var http4 = require("http");
35207
35207
  var net3 = require("net");
35208
35208
  var tls = require("tls");
35209
- var { randomBytes: randomBytes2, createHash: createHash3 } = require("crypto");
35209
+ var { randomBytes: randomBytes2, createHash: createHash4 } = require("crypto");
35210
35210
  var { Readable: Readable2 } = require("stream");
35211
35211
  var { URL: URL7 } = require("url");
35212
35212
  var PerMessageDeflate = require_permessage_deflate();
@@ -35767,7 +35767,7 @@ var require_websocket2 = __commonJS({
35767
35767
  if (websocket.readyState !== WebSocket2.CONNECTING)
35768
35768
  return;
35769
35769
  req = websocket._req = null;
35770
- const digest = createHash3("sha1").update(key + GUID).digest("base64");
35770
+ const digest = createHash4("sha1").update(key + GUID).digest("base64");
35771
35771
  if (res.headers["sec-websocket-accept"] !== digest) {
35772
35772
  abortHandshake(websocket, socket, "Invalid Sec-WebSocket-Accept header");
35773
35773
  return;
@@ -36114,7 +36114,7 @@ var require_websocket_server = __commonJS({
36114
36114
  var https3 = require("https");
36115
36115
  var net3 = require("net");
36116
36116
  var tls = require("tls");
36117
- var { createHash: createHash3 } = require("crypto");
36117
+ var { createHash: createHash4 } = require("crypto");
36118
36118
  var PerMessageDeflate = require_permessage_deflate();
36119
36119
  var WebSocket2 = require_websocket2();
36120
36120
  var { format: format10, parse: parse7 } = require_extension();
@@ -36344,7 +36344,7 @@ var require_websocket_server = __commonJS({
36344
36344
  }
36345
36345
  if (this._state > RUNNING)
36346
36346
  return abortHandshake(socket, 503);
36347
- const digest = createHash3("sha1").update(key + GUID).digest("base64");
36347
+ const digest = createHash4("sha1").update(key + GUID).digest("base64");
36348
36348
  const headers = [
36349
36349
  "HTTP/1.1 101 Switching Protocols",
36350
36350
  "Upgrade: websocket",
@@ -58615,7 +58615,7 @@ var require_parse_stream = __commonJS({
58615
58615
  if (stm) {
58616
58616
  return parseReadable(stm);
58617
58617
  } else {
58618
- return parseTransform(stm);
58618
+ return parseTransform2(stm);
58619
58619
  }
58620
58620
  }
58621
58621
  __name(parseStream, "parseStream");
@@ -58666,7 +58666,7 @@ var require_parse_stream = __commonJS({
58666
58666
  });
58667
58667
  }
58668
58668
  __name(parseReadable, "parseReadable");
58669
- function parseTransform() {
58669
+ function parseTransform2() {
58670
58670
  const parser2 = new TOMLParser();
58671
58671
  return new stream2.Transform({
58672
58672
  objectMode: true,
@@ -58688,7 +58688,7 @@ var require_parse_stream = __commonJS({
58688
58688
  }
58689
58689
  });
58690
58690
  }
58691
- __name(parseTransform, "parseTransform");
58691
+ __name(parseTransform2, "parseTransform");
58692
58692
  }
58693
58693
  });
58694
58694
 
@@ -67521,8 +67521,8 @@ var require_read = __commonJS({
67521
67521
  var onFinished = require_on_finished();
67522
67522
  var unpipe = require_unpipe();
67523
67523
  var zlib = require("zlib");
67524
- module3.exports = read;
67525
- function read(req, res, next, parse7, debug, options29) {
67524
+ module3.exports = read2;
67525
+ function read2(req, res, next, parse7, debug, options29) {
67526
67526
  var length;
67527
67527
  var opts = options29;
67528
67528
  var stream2;
@@ -67592,7 +67592,7 @@ var require_read = __commonJS({
67592
67592
  next();
67593
67593
  });
67594
67594
  }
67595
- __name(read, "read");
67595
+ __name(read2, "read");
67596
67596
  function contentstream(req, debug, inflate) {
67597
67597
  var encoding = (req.headers["content-encoding"] || "identity").toLowerCase();
67598
67598
  var length = req.headers["content-length"];
@@ -76522,7 +76522,7 @@ var require_json = __commonJS({
76522
76522
  var contentType = require_content_type();
76523
76523
  var createError = require_http_errors();
76524
76524
  var debug = require_src2()("body-parser:json");
76525
- var read = require_read();
76525
+ var read2 = require_read();
76526
76526
  var typeis = require_type_is();
76527
76527
  module3.exports = json;
76528
76528
  var FIRST_CHAR_REGEXP = /^[\x20\x09\x0a\x0d]*([^\x20\x09\x0a\x0d])/;
@@ -76587,7 +76587,7 @@ var require_json = __commonJS({
76587
76587
  }));
76588
76588
  return;
76589
76589
  }
76590
- read(req, res, next, parse7, debug, {
76590
+ read2(req, res, next, parse7, debug, {
76591
76591
  encoding: charset,
76592
76592
  inflate,
76593
76593
  limit,
@@ -76652,7 +76652,7 @@ var require_raw = __commonJS({
76652
76652
  init_import_meta_url();
76653
76653
  var bytes = require_bytes();
76654
76654
  var debug = require_src2()("body-parser:raw");
76655
- var read = require_read();
76655
+ var read2 = require_read();
76656
76656
  var typeis = require_type_is();
76657
76657
  module3.exports = raw;
76658
76658
  function raw(options29) {
@@ -76687,7 +76687,7 @@ var require_raw = __commonJS({
76687
76687
  next();
76688
76688
  return;
76689
76689
  }
76690
- read(req, res, next, parse7, debug, {
76690
+ read2(req, res, next, parse7, debug, {
76691
76691
  encoding: null,
76692
76692
  inflate,
76693
76693
  limit,
@@ -76713,7 +76713,7 @@ var require_text = __commonJS({
76713
76713
  var bytes = require_bytes();
76714
76714
  var contentType = require_content_type();
76715
76715
  var debug = require_src2()("body-parser:text");
76716
- var read = require_read();
76716
+ var read2 = require_read();
76717
76717
  var typeis = require_type_is();
76718
76718
  module3.exports = text;
76719
76719
  function text(options29) {
@@ -76750,7 +76750,7 @@ var require_text = __commonJS({
76750
76750
  return;
76751
76751
  }
76752
76752
  var charset = getCharset(req) || defaultCharset;
76753
- read(req, res, next, parse7, debug, {
76753
+ read2(req, res, next, parse7, debug, {
76754
76754
  encoding: charset,
76755
76755
  inflate,
76756
76756
  limit,
@@ -78615,7 +78615,7 @@ var require_urlencoded2 = __commonJS({
78615
78615
  var createError = require_http_errors();
78616
78616
  var debug = require_src2()("body-parser:urlencoded");
78617
78617
  var deprecate = require_depd()("body-parser");
78618
- var read = require_read();
78618
+ var read2 = require_read();
78619
78619
  var typeis = require_type_is();
78620
78620
  module3.exports = urlencoded;
78621
78621
  var parsers = /* @__PURE__ */ Object.create(null);
@@ -78665,7 +78665,7 @@ var require_urlencoded2 = __commonJS({
78665
78665
  }));
78666
78666
  return;
78667
78667
  }
78668
- read(req, res, next, parse7, debug, {
78668
+ read2(req, res, next, parse7, debug, {
78669
78669
  debug,
78670
78670
  encoding: charset,
78671
78671
  inflate,
@@ -109947,7 +109947,7 @@ A subrequest is a call to fetch(), a redirect, or a call to any Cache API method
109947
109947
  }
109948
109948
  }, "ThrowingEventTarget");
109949
109949
  var import_path22 = __toModule(require("path"));
109950
- var import_colors15 = __toModule(require_colors2());
109950
+ var import_colors16 = __toModule(require_colors2());
109951
109951
  var cwd2 = process.cwd();
109952
109952
  var cwdNodeModules = import_path22.default.join(cwd2, "node_modules");
109953
109953
  var LogLevel3;
@@ -109968,16 +109968,16 @@ A subrequest is a call to fetch(), a redirect, or a call to any Cache API method
109968
109968
  [5]: "vrb"
109969
109969
  };
109970
109970
  var LEVEL_COLOUR = {
109971
- [0]: import_colors15.reset,
109972
- [1]: import_colors15.red,
109973
- [2]: import_colors15.yellow,
109974
- [3]: import_colors15.green,
109975
- [4]: import_colors15.grey,
109976
- [5]: (input) => (0, import_colors15.dim)((0, import_colors15.grey)(input))
109971
+ [0]: import_colors16.reset,
109972
+ [1]: import_colors16.red,
109973
+ [2]: import_colors16.yellow,
109974
+ [3]: import_colors16.green,
109975
+ [4]: import_colors16.grey,
109976
+ [5]: (input) => (0, import_colors16.dim)((0, import_colors16.grey)(input))
109977
109977
  };
109978
109978
  function dimInternalStackLine(line) {
109979
109979
  if (line.startsWith(" at") && (!line.includes(cwd2) || line.includes(cwdNodeModules))) {
109980
- return (0, import_colors15.dim)(line);
109980
+ return (0, import_colors16.dim)(line);
109981
109981
  }
109982
109982
  return line;
109983
109983
  }
@@ -132374,7 +132374,7 @@ var require_parse_stream2 = __commonJS({
132374
132374
  if (stm) {
132375
132375
  return parseReadable(stm);
132376
132376
  } else {
132377
- return parseTransform(stm);
132377
+ return parseTransform2(stm);
132378
132378
  }
132379
132379
  }
132380
132380
  __name(parseStream, "parseStream");
@@ -132425,7 +132425,7 @@ var require_parse_stream2 = __commonJS({
132425
132425
  });
132426
132426
  }
132427
132427
  __name(parseReadable, "parseReadable");
132428
- function parseTransform() {
132428
+ function parseTransform2() {
132429
132429
  const parser2 = new TOMLParser();
132430
132430
  return new stream2.Transform({
132431
132431
  objectMode: true,
@@ -132447,7 +132447,7 @@ var require_parse_stream2 = __commonJS({
132447
132447
  }
132448
132448
  });
132449
132449
  }
132450
- __name(parseTransform, "parseTransform");
132450
+ __name(parseTransform2, "parseTransform");
132451
132451
  }
132452
132452
  });
132453
132453
 
@@ -133220,7 +133220,7 @@ var require_src8 = __commonJS({
133220
133220
  var import_path22 = __toModule(require("path"));
133221
133221
  var import_shared8 = __toModule(require_src6());
133222
133222
  var import_dotenv2 = __toModule(require_main4());
133223
- var import_crypto2 = __toModule(require("crypto"));
133223
+ var import_crypto3 = __toModule(require("crypto"));
133224
133224
  var import_web2 = __toModule(require("stream/web"));
133225
133225
  var import_shared3 = __toModule(require_src6());
133226
133226
  var DOM_EXCEPTION_NAMES = {
@@ -133311,7 +133311,7 @@ var require_src8 = __commonJS({
133311
133311
  } : func;
133312
133312
  }
133313
133313
  __name(assertsInRequest, "assertsInRequest");
133314
- var CryptoKey = globalThis.CryptoKey ?? import_crypto2.webcrypto.CryptoKey;
133314
+ var CryptoKey = globalThis.CryptoKey ?? import_crypto3.webcrypto.CryptoKey;
133315
133315
  var supportedDigests = ["sha-1", "sha-256", "sha-384", "sha-512", "md5"];
133316
133316
  var DigestStream = /* @__PURE__ */ __name(class extends import_web2.WritableStream {
133317
133317
  digest;
@@ -133323,7 +133323,7 @@ var require_src8 = __commonJS({
133323
133323
  name = name.replace("-", "");
133324
133324
  let digestResolve;
133325
133325
  const digest2 = new Promise((r3) => digestResolve = r3);
133326
- const hash = (0, import_crypto2.createHash)(name);
133326
+ const hash = (0, import_crypto3.createHash)(name);
133327
133327
  super({
133328
133328
  write(chunk) {
133329
133329
  if (isBufferSource(chunk)) {
@@ -133341,7 +133341,7 @@ var require_src8 = __commonJS({
133341
133341
  }, "DigestStream");
133342
133342
  var usesModernEd25519 = (async () => {
133343
133343
  try {
133344
- await import_crypto2.webcrypto.subtle.generateKey({ name: "Ed25519", namedCurve: "Ed25519" }, false, ["sign", "verify"]);
133344
+ await import_crypto3.webcrypto.subtle.generateKey({ name: "Ed25519", namedCurve: "Ed25519" }, false, ["sign", "verify"]);
133345
133345
  return true;
133346
133346
  } catch {
133347
133347
  return false;
@@ -133384,14 +133384,14 @@ var require_src8 = __commonJS({
133384
133384
  if (name?.toLowerCase() == "md5") {
133385
133385
  if (data instanceof ArrayBuffer)
133386
133386
  data = new Uint8Array(data);
133387
- const hash = (0, import_crypto2.createHash)("md5").update(data);
133387
+ const hash = (0, import_crypto3.createHash)("md5").update(data);
133388
133388
  return Promise.resolve((0, import_shared3.viewToBuffer)(hash.digest()));
133389
133389
  }
133390
- return import_crypto2.webcrypto.subtle.digest(algorithm, data);
133390
+ return import_crypto3.webcrypto.subtle.digest(algorithm, data);
133391
133391
  }, "digest");
133392
133392
  var generateKey = /* @__PURE__ */ __name(async function(algorithm, extractable, keyUsages) {
133393
133393
  algorithm = await ensureValidNodeAlgorithm(algorithm);
133394
- const key = await import_crypto2.webcrypto.subtle.generateKey(algorithm, extractable, keyUsages);
133394
+ const key = await import_crypto3.webcrypto.subtle.generateKey(algorithm, extractable, keyUsages);
133395
133395
  if (key instanceof CryptoKey) {
133396
133396
  return ensureValidWorkerKey(key);
133397
133397
  } else {
@@ -133405,27 +133405,27 @@ var require_src8 = __commonJS({
133405
133405
  algorithm = await ensureValidNodeAlgorithm(algorithm);
133406
133406
  if (forcePublic)
133407
133407
  algorithm.public = true;
133408
- const key = await import_crypto2.webcrypto.subtle.importKey(format22, keyData, algorithm, extractable, keyUsages);
133408
+ const key = await import_crypto3.webcrypto.subtle.importKey(format22, keyData, algorithm, extractable, keyUsages);
133409
133409
  return ensureValidWorkerKey(key);
133410
133410
  }, "importKey");
133411
133411
  var exportKey = /* @__PURE__ */ __name(async function(format22, key) {
133412
133412
  key = await ensureValidNodeKey(key);
133413
- return import_crypto2.webcrypto.subtle.exportKey(format22, key);
133413
+ return import_crypto3.webcrypto.subtle.exportKey(format22, key);
133414
133414
  }, "exportKey");
133415
133415
  var sign = /* @__PURE__ */ __name(async function(algorithm, key, data) {
133416
133416
  algorithm = await ensureValidNodeAlgorithm(algorithm);
133417
133417
  key = await ensureValidNodeKey(key);
133418
- return import_crypto2.webcrypto.subtle.sign(algorithm, key, data);
133418
+ return import_crypto3.webcrypto.subtle.sign(algorithm, key, data);
133419
133419
  }, "sign");
133420
133420
  var verify = /* @__PURE__ */ __name(async function(algorithm, key, signature, data) {
133421
133421
  algorithm = await ensureValidNodeAlgorithm(algorithm);
133422
133422
  key = await ensureValidNodeKey(key);
133423
- return import_crypto2.webcrypto.subtle.verify(algorithm, key, signature, data);
133423
+ return import_crypto3.webcrypto.subtle.verify(algorithm, key, signature, data);
133424
133424
  }, "verify");
133425
133425
  function createCrypto(blockGlobalRandom = false) {
133426
- const assertingGetRandomValues = assertsInRequest(import_crypto2.webcrypto.getRandomValues.bind(import_crypto2.webcrypto), blockGlobalRandom);
133426
+ const assertingGetRandomValues = assertsInRequest(import_crypto3.webcrypto.getRandomValues.bind(import_crypto3.webcrypto), blockGlobalRandom);
133427
133427
  const assertingGenerateKey = assertsInRequest(generateKey, blockGlobalRandom);
133428
- const subtle = new Proxy(import_crypto2.webcrypto.subtle, {
133428
+ const subtle = new Proxy(import_crypto3.webcrypto.subtle, {
133429
133429
  get(target, propertyKey, receiver) {
133430
133430
  if (propertyKey === "digest")
133431
133431
  return digest;
@@ -133438,16 +133438,16 @@ var require_src8 = __commonJS({
133438
133438
  if (propertyKey === "sign")
133439
133439
  return sign;
133440
133440
  if (propertyKey === "timingSafeEqual")
133441
- return import_crypto2.timingSafeEqual;
133441
+ return import_crypto3.timingSafeEqual;
133442
133442
  if (propertyKey === "verify")
133443
133443
  return verify;
133444
133444
  let result = Reflect.get(target, propertyKey, receiver);
133445
133445
  if (typeof result === "function")
133446
- result = result.bind(import_crypto2.webcrypto.subtle);
133446
+ result = result.bind(import_crypto3.webcrypto.subtle);
133447
133447
  return result;
133448
133448
  }
133449
133449
  });
133450
- return new Proxy(import_crypto2.webcrypto, {
133450
+ return new Proxy(import_crypto3.webcrypto, {
133451
133451
  get(target, propertyKey, receiver) {
133452
133452
  if (propertyKey === "getRandomValues")
133453
133453
  return assertingGetRandomValues;
@@ -133457,7 +133457,7 @@ var require_src8 = __commonJS({
133457
133457
  return DigestStream;
133458
133458
  let result = Reflect.get(target, propertyKey, receiver);
133459
133459
  if (typeof result === "function")
133460
- result = result.bind(import_crypto2.webcrypto);
133460
+ result = result.bind(import_crypto3.webcrypto);
133461
133461
  return result;
133462
133462
  }
133463
133463
  });
@@ -133512,7 +133512,7 @@ var require_src8 = __commonJS({
133512
133512
  var import_web3 = __toModule(require("stream/web"));
133513
133513
  var import_url9 = __toModule(require("url"));
133514
133514
  var import_shared5 = __toModule(require_src6());
133515
- var import_colors15 = __toModule(require_colors2());
133515
+ var import_colors16 = __toModule(require_colors2());
133516
133516
  var import_set_cookie_parser = __toModule(require_set_cookie());
133517
133517
  var import_undici28 = __toModule(require_undici2());
133518
133518
  var import_web22 = __toModule(require("stream/web"));
@@ -133600,20 +133600,20 @@ var require_src8 = __commonJS({
133600
133600
  const bytesPerElement = proto2.BYTES_PER_ELEMENT ?? 1;
133601
133601
  const ctor = proto2.constructor;
133602
133602
  let buffer = view.buffer;
133603
- let read = 0;
133603
+ let read2 = 0;
133604
133604
  let done = false;
133605
- while (read < byteLength && read < bytes) {
133606
- const result = await this.read(new ctor(buffer, byteOffset + read, (byteLength - read) / bytesPerElement));
133605
+ while (read2 < byteLength && read2 < bytes) {
133606
+ const result = await this.read(new ctor(buffer, byteOffset + read2, (byteLength - read2) / bytesPerElement));
133607
133607
  if (result.value) {
133608
133608
  buffer = result.value.buffer;
133609
- read += result.value.byteLength;
133609
+ read2 += result.value.byteLength;
133610
133610
  }
133611
133611
  if (result.done) {
133612
- done = read === 0;
133612
+ done = read2 === 0;
133613
133613
  break;
133614
133614
  }
133615
133615
  }
133616
- const value = new ctor(buffer, byteOffset, read / bytesPerElement);
133616
+ const value = new ctor(buffer, byteOffset, read2 / bytesPerElement);
133617
133617
  return { value, done };
133618
133618
  };
133619
133619
  var originalTee = import_web22.ReadableStream.prototype.tee;
@@ -134333,12 +134333,12 @@ var require_src8 = __commonJS({
134333
134333
  __name(millisFromCPUTime, "millisFromCPUTime");
134334
134334
  function colourFromHTTPStatus(status2) {
134335
134335
  if (200 <= status2 && status2 < 300)
134336
- return import_colors15.green;
134336
+ return import_colors16.green;
134337
134337
  if (400 <= status2 && status2 < 500)
134338
- return import_colors15.yellow;
134338
+ return import_colors16.yellow;
134339
134339
  if (500 <= status2)
134340
- return import_colors15.red;
134341
- return import_colors15.blue;
134340
+ return import_colors16.red;
134341
+ return import_colors16.blue;
134342
134342
  }
134343
134343
  __name(colourFromHTTPStatus, "colourFromHTTPStatus");
134344
134344
  async function logResponse(log2, {
@@ -134353,7 +134353,7 @@ var require_src8 = __commonJS({
134353
134353
  const responseTime = millisFromHRTime(process.hrtime(start));
134354
134354
  if (startCpu !== void 0) {
134355
134355
  const responseTimeCpu = millisFromCPUTime(process.cpuUsage(startCpu).user);
134356
- cpuParts.push((0, import_colors15.dim)((0, import_colors15.grey)(` (CPU: ~${responseTimeCpu}`)));
134356
+ cpuParts.push((0, import_colors16.dim)((0, import_colors16.grey)(` (CPU: ~${responseTimeCpu}`)));
134357
134357
  }
134358
134358
  let waitUntilResponse;
134359
134359
  try {
@@ -134366,16 +134366,16 @@ var require_src8 = __commonJS({
134366
134366
  if (startCpu !== void 0) {
134367
134367
  if (waitUntilResponse?.length) {
134368
134368
  const waitUntilTimeCpu = millisFromCPUTime(process.cpuUsage(startCpu).user);
134369
- cpuParts.push((0, import_colors15.dim)((0, import_colors15.grey)(`, waitUntil: ~${waitUntilTimeCpu}`)));
134369
+ cpuParts.push((0, import_colors16.dim)((0, import_colors16.grey)(`, waitUntil: ~${waitUntilTimeCpu}`)));
134370
134370
  }
134371
- cpuParts.push((0, import_colors15.dim)((0, import_colors15.grey)(")")));
134371
+ cpuParts.push((0, import_colors16.dim)((0, import_colors16.grey)(")")));
134372
134372
  }
134373
134373
  log2.log([
134374
- `${(0, import_colors15.bold)(method)} ${url4} `,
134375
- status2 ? colourFromHTTPStatus(status2)(`${(0, import_colors15.bold)(status2)} ${import_http5.default.STATUS_CODES[status2]} `) : "",
134376
- (0, import_colors15.grey)(`(${responseTime}`),
134377
- waitUntilResponse?.length ? (0, import_colors15.grey)(`, waitUntil: ${waitUntilTime}`) : "",
134378
- (0, import_colors15.grey)(")"),
134374
+ `${(0, import_colors16.bold)(method)} ${url4} `,
134375
+ status2 ? colourFromHTTPStatus(status2)(`${(0, import_colors16.bold)(status2)} ${import_http5.default.STATUS_CODES[status2]} `) : "",
134376
+ (0, import_colors16.grey)(`(${responseTime}`),
134377
+ waitUntilResponse?.length ? (0, import_colors16.grey)(`, waitUntil: ${waitUntilTime}`) : "",
134378
+ (0, import_colors16.grey)(")"),
134379
134379
  ...cpuParts
134380
134380
  ].join(""));
134381
134381
  }
@@ -144510,18 +144510,18 @@ var require_parse_proxy_response = __commonJS({
144510
144510
  return new Promise((resolve22, reject) => {
144511
144511
  let buffersLength = 0;
144512
144512
  const buffers = [];
144513
- function read() {
144513
+ function read2() {
144514
144514
  const b2 = socket.read();
144515
144515
  if (b2)
144516
144516
  ondata(b2);
144517
144517
  else
144518
- socket.once("readable", read);
144518
+ socket.once("readable", read2);
144519
144519
  }
144520
- __name(read, "read");
144520
+ __name(read2, "read");
144521
144521
  function cleanup() {
144522
144522
  socket.removeListener("end", onend);
144523
144523
  socket.removeListener("error", onerror);
144524
- socket.removeListener("readable", read);
144524
+ socket.removeListener("readable", read2);
144525
144525
  }
144526
144526
  __name(cleanup, "cleanup");
144527
144527
  function onend() {
@@ -144543,7 +144543,7 @@ var require_parse_proxy_response = __commonJS({
144543
144543
  const endOfHeaders = buffered.indexOf("\r\n\r\n");
144544
144544
  if (endOfHeaders === -1) {
144545
144545
  debug("have not received end of HTTP headers yet...");
144546
- read();
144546
+ read2();
144547
144547
  return;
144548
144548
  }
144549
144549
  const headerParts = buffered.slice(0, endOfHeaders).toString("ascii").split("\r\n");
@@ -144589,7 +144589,7 @@ var require_parse_proxy_response = __commonJS({
144589
144589
  __name(ondata, "ondata");
144590
144590
  socket.on("error", onerror);
144591
144591
  socket.on("end", onend);
144592
- read();
144592
+ read2();
144593
144593
  });
144594
144594
  }
144595
144595
  __name(parseProxyResponse, "parseProxyResponse");
@@ -147054,7 +147054,7 @@ var require_websocket4 = __commonJS({
147054
147054
  var http4 = require("http");
147055
147055
  var net3 = require("net");
147056
147056
  var tls = require("tls");
147057
- var { randomBytes: randomBytes2, createHash: createHash3 } = require("crypto");
147057
+ var { randomBytes: randomBytes2, createHash: createHash4 } = require("crypto");
147058
147058
  var { Duplex, Readable: Readable2 } = require("stream");
147059
147059
  var { URL: URL7 } = require("url");
147060
147060
  var PerMessageDeflate = require_permessage_deflate2();
@@ -147731,7 +147731,7 @@ var require_websocket4 = __commonJS({
147731
147731
  abortHandshake(websocket, socket, "Invalid Upgrade header");
147732
147732
  return;
147733
147733
  }
147734
- const digest = createHash3("sha1").update(key + GUID).digest("base64");
147734
+ const digest = createHash4("sha1").update(key + GUID).digest("base64");
147735
147735
  if (res.headers["sec-websocket-accept"] !== digest) {
147736
147736
  abortHandshake(websocket, socket, "Invalid Sec-WebSocket-Accept header");
147737
147737
  return;
@@ -148009,7 +148009,7 @@ var require_websocket_server2 = __commonJS({
148009
148009
  var EventEmitter5 = require("events");
148010
148010
  var http4 = require("http");
148011
148011
  var { Duplex } = require("stream");
148012
- var { createHash: createHash3 } = require("crypto");
148012
+ var { createHash: createHash4 } = require("crypto");
148013
148013
  var extension = require_extension2();
148014
148014
  var PerMessageDeflate = require_permessage_deflate2();
148015
148015
  var subprotocol = require_subprotocol();
@@ -148312,7 +148312,7 @@ var require_websocket_server2 = __commonJS({
148312
148312
  }
148313
148313
  if (this._state > RUNNING)
148314
148314
  return abortHandshake(socket, 503);
148315
- const digest = createHash3("sha1").update(key + GUID).digest("base64");
148315
+ const digest = createHash4("sha1").update(key + GUID).digest("base64");
148316
148316
  const headers = [
148317
148317
  "HTTP/1.1 101 Switching Protocols",
148318
148318
  "Upgrade: websocket",
@@ -149176,19 +149176,19 @@ var require_parse_proxy_response2 = __commonJS({
149176
149176
  return new Promise((resolve22, reject) => {
149177
149177
  let buffersLength = 0;
149178
149178
  const buffers = [];
149179
- function read() {
149179
+ function read2() {
149180
149180
  const b2 = socket.read();
149181
149181
  if (b2)
149182
149182
  ondata(b2);
149183
149183
  else
149184
- socket.once("readable", read);
149184
+ socket.once("readable", read2);
149185
149185
  }
149186
- __name(read, "read");
149186
+ __name(read2, "read");
149187
149187
  function cleanup() {
149188
149188
  socket.removeListener("end", onend);
149189
149189
  socket.removeListener("error", onerror);
149190
149190
  socket.removeListener("close", onclose);
149191
- socket.removeListener("readable", read);
149191
+ socket.removeListener("readable", read2);
149192
149192
  }
149193
149193
  __name(cleanup, "cleanup");
149194
149194
  function onclose(err) {
@@ -149212,7 +149212,7 @@ var require_parse_proxy_response2 = __commonJS({
149212
149212
  const endOfHeaders = buffered.indexOf("\r\n\r\n");
149213
149213
  if (endOfHeaders === -1) {
149214
149214
  debug("have not received end of HTTP headers yet...");
149215
- read();
149215
+ read2();
149216
149216
  return;
149217
149217
  }
149218
149218
  const firstLine = buffered.toString("ascii", 0, buffered.indexOf("\r\n"));
@@ -149227,7 +149227,7 @@ var require_parse_proxy_response2 = __commonJS({
149227
149227
  socket.on("error", onerror);
149228
149228
  socket.on("close", onclose);
149229
149229
  socket.on("end", onend);
149230
- read();
149230
+ read2();
149231
149231
  });
149232
149232
  }
149233
149233
  __name(parseProxyResponse, "parseProxyResponse");
@@ -152394,7 +152394,7 @@ __name(getEnvironmentVariableFactory, "getEnvironmentVariableFactory");
152394
152394
  init_import_meta_url();
152395
152395
  var getC3CommandFromEnv = getEnvironmentVariableFactory({
152396
152396
  variableName: "WRANGLER_C3_COMMAND",
152397
- defaultValue: () => "create cloudflare@2.5.0"
152397
+ defaultValue: () => "create cloudflare@^2.5.0"
152398
152398
  });
152399
152399
  var getWranglerSendMetricsFromEnv = getEnvironmentVariableFactory({
152400
152400
  variableName: "WRANGLER_SEND_METRICS"
@@ -152693,7 +152693,7 @@ init_import_meta_url();
152693
152693
  init_import_meta_url();
152694
152694
 
152695
152695
  // package.json
152696
- var version = "3.74.0";
152696
+ var version = "3.76.0";
152697
152697
  var package_default = {
152698
152698
  name: "wrangler",
152699
152699
  version,
@@ -157058,7 +157058,14 @@ var validateMigrations = /* @__PURE__ */ __name((diagnostics, field, value) => {
157058
157058
  }
157059
157059
  let valid = true;
157060
157060
  for (let i = 0; i < rawMigrations.length; i++) {
157061
- const { tag, new_classes, renamed_classes, deleted_classes, ...rest } = rawMigrations[i];
157061
+ const {
157062
+ tag,
157063
+ new_classes,
157064
+ new_sqlite_classes,
157065
+ renamed_classes,
157066
+ deleted_classes,
157067
+ ...rest
157068
+ } = rawMigrations[i];
157062
157069
  valid = validateAdditionalProperties(
157063
157070
  diagnostics,
157064
157071
  "migrations",
@@ -157078,6 +157085,12 @@ var validateMigrations = /* @__PURE__ */ __name((diagnostics, field, value) => {
157078
157085
  new_classes,
157079
157086
  "string"
157080
157087
  ) && valid;
157088
+ valid = validateOptionalTypedArray(
157089
+ diagnostics,
157090
+ `migrations[${i}].new_sqlite_classes`,
157091
+ new_sqlite_classes,
157092
+ "string"
157093
+ ) && valid;
157081
157094
  if (renamed_classes !== void 0) {
157082
157095
  if (!Array.isArray(renamed_classes)) {
157083
157096
  diagnostics.errors.push(
@@ -158029,6 +158042,17 @@ ${entryPointList}`
158029
158042
  }
158030
158043
  __name(getEntryPointFromMetafile, "getEntryPointFromMetafile");
158031
158044
 
158045
+ // src/deployment-bundle/esbuild-plugins/als-external.ts
158046
+ init_import_meta_url();
158047
+ var asyncLocalStoragePlugin = {
158048
+ name: "Mark async local storage imports as external plugin",
158049
+ setup(pluginBuild) {
158050
+ pluginBuild.onResolve({ filter: /^node:async_hooks(\/|$)/ }, () => {
158051
+ return { external: true };
158052
+ });
158053
+ }
158054
+ };
158055
+
158032
158056
  // src/deployment-bundle/esbuild-plugins/cloudflare-internal.ts
158033
158057
  init_import_meta_url();
158034
158058
  var cloudflareInternalPlugin = {
@@ -159173,6 +159197,7 @@ async function bundleWorker(entry, destination, {
159173
159197
  plugins: [
159174
159198
  aliasPlugin,
159175
159199
  moduleCollector.plugin,
159200
+ ...nodejsCompatMode === "als" ? [asyncLocalStoragePlugin] : [],
159176
159201
  ...nodejsCompatMode === "legacy" ? [
159177
159202
  (0, import_node_globals_polyfill.default)({ buffer: true }),
159178
159203
  standardURLPlugin(),
@@ -159513,6 +159538,7 @@ function getNodeCompatMode(compatibilityFlags, {
159513
159538
  noBundle = void 0
159514
159539
  }) {
159515
159540
  const {
159541
+ hasNodejsAlsFlag,
159516
159542
  hasNodejsCompatFlag,
159517
159543
  hasNodejsCompatV2Flag,
159518
159544
  hasExperimentalNodejsCompatV2Flag
@@ -159523,6 +159549,8 @@ function getNodeCompatMode(compatibilityFlags, {
159523
159549
  mode = "v2";
159524
159550
  } else if (hasNodejsCompatFlag) {
159525
159551
  mode = "v1";
159552
+ } else if (hasNodejsAlsFlag) {
159553
+ mode = "als";
159526
159554
  } else if (legacy) {
159527
159555
  mode = "legacy";
159528
159556
  }
@@ -159539,9 +159567,10 @@ function getNodeCompatMode(compatibilityFlags, {
159539
159567
  "The `nodejs_compat` and `nodejs_compat_v2` compatibility flags cannot be used in together. Please select just one."
159540
159568
  );
159541
159569
  }
159542
- if (legacy && (hasNodejsCompatFlag || hasNodejsCompatV2Flag)) {
159570
+ if (legacy && (hasNodejsCompatFlag || hasNodejsCompatV2Flag || hasNodejsAlsFlag)) {
159571
+ const nodejsFlag = hasNodejsCompatFlag ? "`nodejs_compat`" : hasNodejsCompatV2Flag ? "`nodejs_compat_v2`" : "`nodejs_als`";
159543
159572
  throw new UserError(
159544
- `The ${hasNodejsCompatFlag ? "`nodejs_compat`" : "`nodejs_compat_v2`"} compatibility flag cannot be used in conjunction with the legacy \`--node-compat\` flag. If you want to use the Workers ${hasNodejsCompatFlag ? "`nodejs_compat`" : "`nodejs_compat_v2`"} compatibility flag, please remove the \`--node-compat\` argument from your CLI command or \`node_compat = true\` from your config file.`
159573
+ `The ${nodejsFlag} compatibility flag cannot be used in conjunction with the legacy \`--node-compat\` flag. If you want to use the Workers ${nodejsFlag} compatibility flag, please remove the \`--node-compat\` argument from your CLI command or \`node_compat = true\` from your config file.`
159545
159574
  );
159546
159575
  }
159547
159576
  if (noBundle && legacy) {
@@ -159564,6 +159593,7 @@ function getNodeCompatMode(compatibilityFlags, {
159564
159593
  __name(getNodeCompatMode, "getNodeCompatMode");
159565
159594
  function parseNodeCompatibilityFlags(compatibilityFlags) {
159566
159595
  return {
159596
+ hasNodejsAlsFlag: compatibilityFlags.includes("nodejs_als"),
159567
159597
  hasNodejsCompatFlag: compatibilityFlags.includes("nodejs_compat"),
159568
159598
  hasNodejsCompatV2Flag: compatibilityFlags.includes("nodejs_compat_v2"),
159569
159599
  hasExperimentalNodejsCompatV2Flag: compatibilityFlags.includes(
@@ -160975,7 +161005,8 @@ var DefaultScopes = {
160975
161005
  "zone:read": "Grants read level access to account zone.",
160976
161006
  "ssl_certs:write": "See and manage mTLS certificates for your account",
160977
161007
  "ai:write": "See and change Workers AI catalog and assets",
160978
- "queues:write": "See and change Cloudflare Queues settings and data"
161008
+ "queues:write": "See and change Cloudflare Queues settings and data",
161009
+ "pipelines:write": "See and change Cloudflare Pipelines configurations and data"
160979
161010
  };
160980
161011
  var OptionalScopes = {
160981
161012
  "cloudchamber:write": "Manage Cloudchamber"
@@ -165037,7 +165068,10 @@ var syncExperimentalAssets = /* @__PURE__ */ __name(async (accountId, scriptName
165037
165068
  1
165038
165069
  );
165039
165070
  }
165040
- assetLogCount = logAssetUpload(`+ ${manifestEntry[0]}`, assetLogCount);
165071
+ assetLogCount = logAssetUpload(
165072
+ `+ ${decodeFilepath(manifestEntry[0])}`,
165073
+ assetLogCount
165074
+ );
165041
165075
  return manifestEntry;
165042
165076
  });
165043
165077
  });
@@ -165051,7 +165085,8 @@ var syncExperimentalAssets = /* @__PURE__ */ __name(async (accountId, scriptName
165051
165085
  const doUpload = /* @__PURE__ */ __name(async () => {
165052
165086
  const payload = await Promise.all(
165053
165087
  bucket.map(async (manifestEntry) => {
165054
- const absFilePath = path28.join(assetDirectory, manifestEntry[0]);
165088
+ const decodedFilePath = decodeFilepath(manifestEntry[0]);
165089
+ const absFilePath = path28.join(assetDirectory, decodedFilePath);
165055
165090
  return {
165056
165091
  base64: true,
165057
165092
  key: manifestEntry[1].hash,
@@ -165239,6 +165274,9 @@ var encodeFilePath = /* @__PURE__ */ __name((filePath) => {
165239
165274
  const encodedPath = filePath.split(path28.sep).map((segment) => encodeURIComponent(segment)).join("/");
165240
165275
  return "/" + encodedPath;
165241
165276
  }, "encodeFilePath");
165277
+ var decodeFilepath = /* @__PURE__ */ __name((filePath) => {
165278
+ return filePath.split("/").map((segment) => decodeURIComponent(segment)).join(path28.sep);
165279
+ }, "decodeFilepath");
165242
165280
 
165243
165281
  // src/metrics/index.ts
165244
165282
  init_import_meta_url();
@@ -169584,22 +169622,9 @@ See https://developers.cloudflare.com/workers/platform/compatibility-dates for m
169584
169622
  headers: await getMetricsUsageHeaders(config.send_metrics)
169585
169623
  }
169586
169624
  );
169587
- await fetchResult(
169588
- `/accounts/${accountId}/workers/scripts/${scriptName}/deployments`,
169589
- {
169590
- method: "POST",
169591
- body: JSON.stringify({
169592
- stratergy: "percentage",
169593
- versions: [
169594
- {
169595
- percentage: 100,
169596
- version_id: versionResult.id
169597
- }
169598
- ]
169599
- }),
169600
- headers: await getMetricsUsageHeaders(config.send_metrics)
169601
- }
169602
- );
169625
+ const versionMap = /* @__PURE__ */ new Map();
169626
+ versionMap.set(versionResult.id, 100);
169627
+ await createDeployment(accountId, scriptName, versionMap, void 0);
169603
169628
  await patchNonVersionedScriptSettings(accountId, scriptName, {
169604
169629
  tail_consumers: worker.tail_consumers,
169605
169630
  logpush: worker.logpush
@@ -178817,6 +178842,124 @@ IP: ${deployment.network.ipv4}`);
178817
178842
  __name(handleCreateCommand, "handleCreateCommand");
178818
178843
  var whichImageQuestion = "Which image url should we use for your container?";
178819
178844
 
178845
+ // src/cloudchamber/curl.ts
178846
+ init_import_meta_url();
178847
+ var import_crypto = require("crypto");
178848
+ function yargsCurl(args) {
178849
+ return args.positional("path", { type: "string", default: "/" }).option("header", {
178850
+ type: "array",
178851
+ alias: "H",
178852
+ describe: "Add headers in the form of --header <name>:<value>"
178853
+ }).option("data", {
178854
+ type: "string",
178855
+ describe: "Add a JSON body to the request",
178856
+ alias: "D"
178857
+ }).option("method", {
178858
+ type: "string",
178859
+ alias: "X",
178860
+ default: "GET"
178861
+ }).option("silent", {
178862
+ describe: "Only output response",
178863
+ type: "boolean",
178864
+ alias: "s"
178865
+ }).option("verbose", {
178866
+ describe: "Print everything, like request id, or headers",
178867
+ type: "boolean",
178868
+ alias: "v"
178869
+ }).option("use-stdin", {
178870
+ describe: "Equivalent of using --data-binary @- in curl",
178871
+ type: "boolean",
178872
+ alias: "stdin"
178873
+ }).option("json", {
178874
+ describe: "Output json. Use for consistent, machine readable output.",
178875
+ type: "boolean",
178876
+ default: false
178877
+ });
178878
+ }
178879
+ __name(yargsCurl, "yargsCurl");
178880
+ async function curlCommand(args, config) {
178881
+ await requestFromCmd(args, config);
178882
+ }
178883
+ __name(curlCommand, "curlCommand");
178884
+ async function read(stream2) {
178885
+ const chunks = [];
178886
+ for await (const chunk of stream2) {
178887
+ chunks.push(chunk);
178888
+ }
178889
+ return Buffer.concat(chunks).toString("utf8");
178890
+ }
178891
+ __name(read, "read");
178892
+ async function requestFromCmd(args, _config) {
178893
+ const requestId = `wrangler-${(0, import_crypto.randomUUID)()}`;
178894
+ if (!args.json && args.verbose) {
178895
+ logRaw(bold(brandColor("Request id: " + requestId)));
178896
+ }
178897
+ if (args.useStdin) {
178898
+ args.data = await read(process.stdin);
178899
+ }
178900
+ try {
178901
+ const headers = (args.header ?? []).reduce(
178902
+ (prev, now) => ({
178903
+ ...prev,
178904
+ [now.toString().split(":")[0].trim()]: now.toString().split(":")[1].trim()
178905
+ }),
178906
+ { "coordinator-request-id": requestId }
178907
+ );
178908
+ const res = await request(OpenAPI, {
178909
+ url: args.path,
178910
+ method: args.method,
178911
+ body: args.data ? JSON.parse(args.data) : void 0,
178912
+ mediaType: "application/json",
178913
+ headers
178914
+ });
178915
+ if (args.json || args.silent) {
178916
+ logRaw(
178917
+ JSON.stringify(
178918
+ !args.verbose ? res : {
178919
+ res,
178920
+ headers,
178921
+ request_id: requestId
178922
+ },
178923
+ null,
178924
+ 4
178925
+ )
178926
+ );
178927
+ } else {
178928
+ if (args.verbose) {
178929
+ logRaw(cyanBright(">> Headers"));
178930
+ logRaw(
178931
+ formatLabelledValues(headers, {
178932
+ indentationCount: 4,
178933
+ formatLabel: function(label) {
178934
+ return yellow(label + ":");
178935
+ },
178936
+ formatValue: yellow
178937
+ })
178938
+ );
178939
+ }
178940
+ logRaw(cyanBright(">> Body"));
178941
+ const text = JSON.stringify(res, null, 4);
178942
+ logRaw(
178943
+ text.split("\n").map((line) => `${brandColor(line)}`).join("\n")
178944
+ );
178945
+ }
178946
+ return;
178947
+ } catch (error2) {
178948
+ if (error2 instanceof ApiError) {
178949
+ logRaw(
178950
+ JSON.stringify({
178951
+ request: error2.request,
178952
+ status: error2.status,
178953
+ statusText: error2.statusText
178954
+ })
178955
+ );
178956
+ } else {
178957
+ logRaw(String(error2));
178958
+ }
178959
+ }
178960
+ }
178961
+ __name(requestFromCmd, "requestFromCmd");
178962
+
178820
178963
  // src/cloudchamber/delete.ts
178821
178964
  init_import_meta_url();
178822
178965
 
@@ -179627,6 +179770,11 @@ var cloudchamber = /* @__PURE__ */ __name((yargs, subHelp) => {
179627
179770
  "registries",
179628
179771
  "Configure registries via Cloudchamber",
179629
179772
  (args) => registriesCommand(args).command(subHelp)
179773
+ ).command(
179774
+ "curl <path>",
179775
+ "send a request to an arbitrary cloudchamber endpoint",
179776
+ (args) => yargsCurl(args),
179777
+ (args) => handleFailure(curlCommand)(args)
179630
179778
  );
179631
179779
  }, "cloudchamber");
179632
179780
 
@@ -188504,7 +188652,7 @@ var import_esbuild3 = require("esbuild");
188504
188652
 
188505
188653
  // ../../node_modules/.pnpm/nanoid@3.3.6/node_modules/nanoid/index.js
188506
188654
  init_import_meta_url();
188507
- var import_crypto = __toESM(require("crypto"), 1);
188655
+ var import_crypto2 = __toESM(require("crypto"), 1);
188508
188656
 
188509
188657
  // ../../node_modules/.pnpm/nanoid@3.3.6/node_modules/nanoid/url-alphabet/index.js
188510
188658
  init_import_meta_url();
@@ -188517,10 +188665,10 @@ var poolOffset;
188517
188665
  var fillPool = /* @__PURE__ */ __name((bytes) => {
188518
188666
  if (!pool || pool.length < bytes) {
188519
188667
  pool = Buffer.allocUnsafe(bytes * POOL_SIZE_MULTIPLIER);
188520
- import_crypto.default.randomFillSync(pool);
188668
+ import_crypto2.default.randomFillSync(pool);
188521
188669
  poolOffset = 0;
188522
188670
  } else if (poolOffset + bytes > pool.length) {
188523
- import_crypto.default.randomFillSync(pool);
188671
+ import_crypto2.default.randomFillSync(pool);
188524
188672
  poolOffset = 0;
188525
188673
  }
188526
188674
  poolOffset += bytes;
@@ -193010,6 +193158,465 @@ function pages(yargs, subHelp) {
193010
193158
  }
193011
193159
  __name(pages, "pages");
193012
193160
 
193161
+ // src/pipelines/index.ts
193162
+ init_import_meta_url();
193163
+
193164
+ // src/pipelines/client.ts
193165
+ init_import_meta_url();
193166
+ var import_node_crypto9 = require("node:crypto");
193167
+ var API_HEADERS = {
193168
+ "Content-Type": "application/json"
193169
+ };
193170
+ function sha256(s) {
193171
+ return (0, import_node_crypto9.createHash)("sha256").update(s).digest("hex");
193172
+ }
193173
+ __name(sha256, "sha256");
193174
+ async function generateR2ServiceToken(label, accountId, bucket) {
193175
+ const res = await fetchResult(
193176
+ `/user/tokens/permission_groups`,
193177
+ {
193178
+ method: "GET"
193179
+ }
193180
+ );
193181
+ const perm = res.find(
193182
+ (g2) => g2.name == "Workers R2 Storage Bucket Item Write"
193183
+ );
193184
+ if (!perm) {
193185
+ throw new Error("Missing R2 Permissions");
193186
+ }
193187
+ const body = JSON.stringify({
193188
+ policies: [
193189
+ {
193190
+ effect: "allow",
193191
+ permission_groups: [{ id: perm.id }],
193192
+ resources: {
193193
+ [`com.cloudflare.edge.r2.bucket.${accountId}_default_${bucket}`]: "*"
193194
+ }
193195
+ }
193196
+ ],
193197
+ name: label
193198
+ });
193199
+ return await fetchResult(`/user/tokens`, {
193200
+ method: "POST",
193201
+ headers: API_HEADERS,
193202
+ body
193203
+ });
193204
+ }
193205
+ __name(generateR2ServiceToken, "generateR2ServiceToken");
193206
+ async function getR2Bucket(accountId, name) {
193207
+ return await fetchResult(
193208
+ `/accounts/${accountId}/r2/buckets/${name}`
193209
+ );
193210
+ }
193211
+ __name(getR2Bucket, "getR2Bucket");
193212
+ async function createPipeline(accountId, config) {
193213
+ return await fetchResult(`/accounts/${accountId}/pipelines`, {
193214
+ method: "POST",
193215
+ headers: API_HEADERS,
193216
+ body: JSON.stringify(config)
193217
+ });
193218
+ }
193219
+ __name(createPipeline, "createPipeline");
193220
+ async function getPipeline(accountId, name) {
193221
+ return await fetchResult(
193222
+ `/accounts/${accountId}/pipelines/${name}`,
193223
+ {
193224
+ method: "GET"
193225
+ }
193226
+ );
193227
+ }
193228
+ __name(getPipeline, "getPipeline");
193229
+ async function updatePipeline(accountId, name, config) {
193230
+ return await fetchResult(
193231
+ `/accounts/${accountId}/pipelines/${name}`,
193232
+ {
193233
+ method: "PUT",
193234
+ headers: API_HEADERS,
193235
+ body: JSON.stringify(config)
193236
+ }
193237
+ );
193238
+ }
193239
+ __name(updatePipeline, "updatePipeline");
193240
+ async function listPipelines(accountId) {
193241
+ return await fetchResult(
193242
+ `/accounts/${accountId}/pipelines`,
193243
+ {
193244
+ method: "GET"
193245
+ }
193246
+ );
193247
+ }
193248
+ __name(listPipelines, "listPipelines");
193249
+ async function deletePipeline(accountId, name) {
193250
+ return await fetchResult(`/accounts/${accountId}/pipelines/${name}`, {
193251
+ method: "DELETE",
193252
+ headers: API_HEADERS
193253
+ });
193254
+ }
193255
+ __name(deletePipeline, "deletePipeline");
193256
+
193257
+ // src/pipelines/index.ts
193258
+ var __testSkipDelaysFlag = false;
193259
+ async function authorizeR2Bucket(name, accountId, bucket) {
193260
+ try {
193261
+ await getR2Bucket(accountId, bucket);
193262
+ } catch (err) {
193263
+ if (err instanceof APIError) {
193264
+ if (err.code == 10006) {
193265
+ throw new FatalError(`The R2 bucket [${bucket}] doesn't exist`);
193266
+ }
193267
+ }
193268
+ throw err;
193269
+ }
193270
+ logger.log(`\u{1F300} Authorizing R2 bucket "${bucket}"`);
193271
+ const serviceToken = await generateR2ServiceToken(
193272
+ `Service token for Pipeline ${name}`,
193273
+ accountId,
193274
+ bucket
193275
+ );
193276
+ const access_key_id = serviceToken.id;
193277
+ const secret_access_key = sha256(serviceToken.value);
193278
+ !__testSkipDelaysFlag && await sleep(3e3);
193279
+ return {
193280
+ secret_access_key,
193281
+ access_key_id
193282
+ };
193283
+ }
193284
+ __name(authorizeR2Bucket, "authorizeR2Bucket");
193285
+ function getAccountR2Endpoint(accountId) {
193286
+ return `https://${accountId}.r2.cloudflarestorage.com`;
193287
+ }
193288
+ __name(getAccountR2Endpoint, "getAccountR2Endpoint");
193289
+ function validateName(label, name) {
193290
+ if (!name.match(/^[a-zA-Z0-9-]+$/)) {
193291
+ throw new Error(`Must provide a valid ${label}`);
193292
+ }
193293
+ }
193294
+ __name(validateName, "validateName");
193295
+ function parseTransform(spec) {
193296
+ const [script, entrypoint, ...rest] = spec.split(".");
193297
+ if (!script || rest.length > 0) {
193298
+ throw new Error(
193299
+ "Invalid transform: required syntax <script>[.<entrypoint>]"
193300
+ );
193301
+ }
193302
+ return {
193303
+ script,
193304
+ entrypoint: entrypoint || "Transform"
193305
+ };
193306
+ }
193307
+ __name(parseTransform, "parseTransform");
193308
+ function addCreateAndUpdateOptions(yargs) {
193309
+ return yargs.option("secret-access-key", {
193310
+ describe: "The R2 service token Access Key to write data",
193311
+ type: "string",
193312
+ demandOption: false
193313
+ }).option("access-key-id", {
193314
+ describe: "The R2 service token Secret Key to write data",
193315
+ type: "string",
193316
+ demandOption: false
193317
+ }).option("batch-max-mb", {
193318
+ describe: "The approximate maximum size of a batch before flush in megabytes \nDefault: 10",
193319
+ type: "number",
193320
+ demandOption: false
193321
+ }).option("batch-max-rows", {
193322
+ describe: "The approximate maximum size of a batch before flush in rows \nDefault: 10000",
193323
+ type: "number",
193324
+ demandOption: false
193325
+ }).option("batch-max-seconds", {
193326
+ describe: "The approximate maximum duration of a batch before flush in seconds \nDefault: 15",
193327
+ type: "number",
193328
+ demandOption: false
193329
+ }).option("transform", {
193330
+ describe: 'The worker and entrypoint of the PipelineTransform implementation in the format "worker.entrypoint" \nDefault: No transformation worker',
193331
+ type: "string",
193332
+ demandOption: false
193333
+ }).option("compression", {
193334
+ describe: "Sets the compression format of output files \nDefault: gzip",
193335
+ type: "string",
193336
+ choices: ["none", "gzip", "deflate"],
193337
+ demandOption: false
193338
+ }).option("filepath", {
193339
+ describe: "The path to store files in the destination bucket \nDefault: event_date=${date}/hr=${hr}",
193340
+ type: "string",
193341
+ demandOption: false
193342
+ }).option("filename", {
193343
+ describe: 'The name of the file in the bucket. Must contain "${slug}". File extension is optional \nDefault: ${slug}-${hr}.json',
193344
+ type: "string",
193345
+ demandOption: false
193346
+ }).option("authentication", {
193347
+ describe: "Enabling authentication means that data can only be sent to the pipeline via the binding \nDefault: false",
193348
+ type: "boolean",
193349
+ demandOption: false
193350
+ });
193351
+ }
193352
+ __name(addCreateAndUpdateOptions, "addCreateAndUpdateOptions");
193353
+ function pipelines(pipelineYargs) {
193354
+ return pipelineYargs.command(
193355
+ "create <pipeline>",
193356
+ "Create a new pipeline",
193357
+ (yargs) => {
193358
+ return addCreateAndUpdateOptions(yargs).positional("pipeline", {
193359
+ describe: "The name of the new pipeline",
193360
+ type: "string",
193361
+ demandOption: true
193362
+ }).option("r2", {
193363
+ type: "string",
193364
+ describe: "Destination R2 bucket name",
193365
+ demandOption: true
193366
+ });
193367
+ },
193368
+ async (args) => {
193369
+ await printWranglerBanner();
193370
+ const config = readConfig(args.config, args);
193371
+ const bucket = args.r2;
193372
+ const name = args.pipeline;
193373
+ const compression = args.compression === void 0 ? "gzip" : args.compression;
193374
+ const batch = {
193375
+ max_mb: args["batch-max-mb"],
193376
+ max_duration_s: args["batch-max-seconds"],
193377
+ max_rows: args["batch-max-rows"]
193378
+ };
193379
+ if (!bucket) {
193380
+ throw new FatalError("Requires a r2 bucket");
193381
+ }
193382
+ const accountId = await requireAuth(config);
193383
+ const pipelineConfig = {
193384
+ name,
193385
+ metadata: {},
193386
+ source: [
193387
+ {
193388
+ type: "http",
193389
+ format: "json"
193390
+ },
193391
+ {
193392
+ type: "binding",
193393
+ format: "json"
193394
+ }
193395
+ ],
193396
+ transforms: [],
193397
+ destination: {
193398
+ type: "r2",
193399
+ format: "json",
193400
+ compression: {
193401
+ type: compression
193402
+ },
193403
+ batch,
193404
+ path: {
193405
+ bucket
193406
+ },
193407
+ credentials: {
193408
+ endpoint: getAccountR2Endpoint(accountId),
193409
+ access_key_id: args["access-key-id"] || "",
193410
+ secret_access_key: args["secret-access-key"] || ""
193411
+ }
193412
+ }
193413
+ };
193414
+ const destination = pipelineConfig.destination;
193415
+ if (!destination.credentials.access_key_id && !destination.credentials.secret_access_key) {
193416
+ const auth = await authorizeR2Bucket(
193417
+ name,
193418
+ accountId,
193419
+ pipelineConfig.destination.path.bucket
193420
+ );
193421
+ destination.credentials.access_key_id = auth.access_key_id;
193422
+ destination.credentials.secret_access_key = auth.secret_access_key;
193423
+ }
193424
+ if (!destination.credentials.access_key_id) {
193425
+ throw new FatalError("Requires a r2 access key id");
193426
+ }
193427
+ if (!destination.credentials.secret_access_key) {
193428
+ throw new FatalError("Requires a r2 secret access key");
193429
+ }
193430
+ if (args.authentication) {
193431
+ pipelineConfig.source = [
193432
+ {
193433
+ type: "binding",
193434
+ format: "json"
193435
+ }
193436
+ ];
193437
+ }
193438
+ if (args.transform !== void 0) {
193439
+ pipelineConfig.transforms.push(parseTransform(args.transform));
193440
+ }
193441
+ if (args.filepath) {
193442
+ pipelineConfig.destination.path.filepath = args.filepath;
193443
+ }
193444
+ if (args.filename) {
193445
+ pipelineConfig.destination.path.filename = args.filename;
193446
+ }
193447
+ logger.log(`\u{1F300} Creating pipeline named "${name}"`);
193448
+ const pipeline = await createPipeline(accountId, pipelineConfig);
193449
+ await sendMetricsEvent("create pipeline", {
193450
+ sendMetrics: config.send_metrics
193451
+ });
193452
+ logger.log(
193453
+ `\u2705 Successfully created pipeline "${pipeline.name}" with id ${pipeline.id}`
193454
+ );
193455
+ logger.log("\u{1F389} You can now send data to your pipeline!");
193456
+ logger.log(
193457
+ `Example: curl "${pipeline.endpoint}" -d '[{"foo": "bar"}]'`
193458
+ );
193459
+ }
193460
+ ).command(
193461
+ "list",
193462
+ "List current pipelines",
193463
+ (yargs) => yargs,
193464
+ async (args) => {
193465
+ const config = readConfig(args.config, args);
193466
+ const accountId = await requireAuth(config);
193467
+ const list = await listPipelines(accountId);
193468
+ await sendMetricsEvent("list pipelines", {
193469
+ sendMetrics: config.send_metrics
193470
+ });
193471
+ logger.table(
193472
+ list.map((pipeline) => ({
193473
+ name: pipeline.name,
193474
+ id: pipeline.id,
193475
+ endpoint: pipeline.endpoint
193476
+ }))
193477
+ );
193478
+ }
193479
+ ).command(
193480
+ "show <pipeline>",
193481
+ "Show a pipeline configuration",
193482
+ (yargs) => {
193483
+ return yargs.positional("pipeline", {
193484
+ type: "string",
193485
+ describe: "The name of the pipeline to show",
193486
+ demandOption: true
193487
+ });
193488
+ },
193489
+ async (args) => {
193490
+ await printWranglerBanner();
193491
+ const config = readConfig(args.config, args);
193492
+ const accountId = await requireAuth(config);
193493
+ const name = args.pipeline;
193494
+ validateName("pipeline name", name);
193495
+ logger.log(`Retrieving config for pipeline "${name}".`);
193496
+ const pipeline = await getPipeline(accountId, name);
193497
+ await sendMetricsEvent("show pipeline", {
193498
+ sendMetrics: config.send_metrics
193499
+ });
193500
+ logger.log(JSON.stringify(pipeline, null, 2));
193501
+ }
193502
+ ).command(
193503
+ "update <pipeline>",
193504
+ "Update a pipeline",
193505
+ (yargs) => {
193506
+ return addCreateAndUpdateOptions(yargs).positional("pipeline", {
193507
+ describe: "The name of the pipeline to update",
193508
+ type: "string",
193509
+ demandOption: true
193510
+ }).option("r2", {
193511
+ type: "string",
193512
+ describe: "Destination R2 bucket name",
193513
+ demandOption: false
193514
+ });
193515
+ },
193516
+ async (args) => {
193517
+ await printWranglerBanner();
193518
+ const name = args.pipeline;
193519
+ const config = readConfig(args.config, args);
193520
+ const accountId = await requireAuth(config);
193521
+ const pipelineConfig = await getPipeline(accountId, name);
193522
+ if (args.compression) {
193523
+ pipelineConfig.destination.compression.type = args.compression;
193524
+ }
193525
+ if (args["batch-max-mb"]) {
193526
+ pipelineConfig.destination.batch.max_mb = args["batch-max-mb"];
193527
+ }
193528
+ if (args["batch-max-seconds"]) {
193529
+ pipelineConfig.destination.batch.max_duration_s = args["batch-max-seconds"];
193530
+ }
193531
+ if (args["batch-max-rows"]) {
193532
+ pipelineConfig.destination.batch.max_rows = args["batch-max-rows"];
193533
+ }
193534
+ const bucket = args.r2;
193535
+ const accessKeyId = args["access-key-id"];
193536
+ const secretAccessKey = args["secret-access-key"];
193537
+ if (bucket || accessKeyId || secretAccessKey) {
193538
+ const destination = pipelineConfig.destination;
193539
+ if (bucket) {
193540
+ pipelineConfig.destination.path.bucket = bucket;
193541
+ }
193542
+ destination.credentials = {
193543
+ endpoint: getAccountR2Endpoint(accountId),
193544
+ access_key_id: accessKeyId || "",
193545
+ secret_access_key: secretAccessKey || ""
193546
+ };
193547
+ if (!accessKeyId && !secretAccessKey) {
193548
+ const auth = await authorizeR2Bucket(
193549
+ name,
193550
+ accountId,
193551
+ destination.path.bucket
193552
+ );
193553
+ destination.credentials.access_key_id = auth.access_key_id;
193554
+ destination.credentials.secret_access_key = auth.secret_access_key;
193555
+ }
193556
+ if (!destination.credentials.access_key_id) {
193557
+ throw new FatalError("Requires a r2 access key id");
193558
+ }
193559
+ if (!destination.credentials.secret_access_key) {
193560
+ throw new FatalError("Requires a r2 secret access key");
193561
+ }
193562
+ }
193563
+ if (args.authentication !== void 0) {
193564
+ pipelineConfig.source = pipelineConfig.source.filter(
193565
+ (s) => s.type == "http"
193566
+ );
193567
+ if (!args.authentication) {
193568
+ pipelineConfig.source.push({
193569
+ type: "http",
193570
+ format: "json"
193571
+ });
193572
+ }
193573
+ }
193574
+ if (args.transform !== void 0) {
193575
+ pipelineConfig.transforms.push(parseTransform(args.transform));
193576
+ }
193577
+ if (args.filepath) {
193578
+ pipelineConfig.destination.path.filepath = args.filepath;
193579
+ }
193580
+ if (args.filename) {
193581
+ pipelineConfig.destination.path.filename = args.filename;
193582
+ }
193583
+ logger.log(`\u{1F300} Updating pipeline "${name}"`);
193584
+ const pipeline = await updatePipeline(accountId, name, pipelineConfig);
193585
+ await sendMetricsEvent("update pipeline", {
193586
+ sendMetrics: config.send_metrics
193587
+ });
193588
+ logger.log(
193589
+ `\u2705 Successfully updated pipeline "${pipeline.name}" with ID ${pipeline.id}
193590
+ `
193591
+ );
193592
+ }
193593
+ ).command(
193594
+ "delete <pipeline>",
193595
+ "Delete a pipeline",
193596
+ (yargs) => {
193597
+ return yargs.positional("pipeline", {
193598
+ type: "string",
193599
+ describe: "The name of the pipeline to delete",
193600
+ demandOption: true
193601
+ });
193602
+ },
193603
+ async (args) => {
193604
+ await printWranglerBanner();
193605
+ const config = readConfig(args.config, args);
193606
+ const accountId = await requireAuth(config);
193607
+ const name = args.pipeline;
193608
+ validateName("pipeline name", name);
193609
+ logger.log(`Deleting pipeline ${name}.`);
193610
+ await deletePipeline(accountId, name);
193611
+ logger.log(`Deleted pipeline ${name}.`);
193612
+ await sendMetricsEvent("delete pipeline", {
193613
+ sendMetrics: config.send_metrics
193614
+ });
193615
+ }
193616
+ );
193617
+ }
193618
+ __name(pipelines, "pipelines");
193619
+
193013
193620
  // src/pubsub/pubsub-commands.ts
193014
193621
  init_import_meta_url();
193015
193622
 
@@ -195369,7 +195976,7 @@ var secret2 = /* @__PURE__ */ __name((secretYargs) => {
195369
195976
  } catch (e3) {
195370
195977
  if (e3 instanceof APIError && e3.code === VERSION_NOT_DEPLOYED_ERR_CODE) {
195371
195978
  throw new UserError(
195372
- "Secret edit failed. You attempted to modify a secret, but the latest version of your Worker isn't currently deployed. Please ensure that the latest version of your Worker is fully deployed (wrangler versions deploy --x-versions) before modifying secrets. Alternatively, you can use the Cloudflare dashboard to modify secrets and deploy the version.\n\nNote: This limitation will be addressed in an upcoming release."
195979
+ "Secret edit failed. You attempted to modify a secret, but the latest version of your Worker isn't currently deployed. Please ensure that the latest version of your Worker is fully deployed (wrangler versions deploy) before modifying secrets. Alternatively, you can use the Cloudflare dashboard to modify secrets and deploy the version.\n\nNote: This limitation will be addressed in an upcoming release."
195373
195980
  );
195374
195981
  } else {
195375
195982
  throw e3;
@@ -205316,7 +205923,11 @@ async function generateTypes(configToDTS, config, envInterface, outputPath) {
205316
205923
  }
205317
205924
  if (configToDTS.unsafe?.bindings) {
205318
205925
  for (const unsafe of configToDTS.unsafe.bindings) {
205319
- envTypeStructure.push(constructType(unsafe.name, "any"));
205926
+ if (unsafe.type === "ratelimit") {
205927
+ envTypeStructure.push(constructType(unsafe.name, "RateLimit"));
205928
+ } else {
205929
+ envTypeStructure.push(constructType(unsafe.name, "any"));
205930
+ }
205320
205931
  }
205321
205932
  }
205322
205933
  if (configToDTS.queues) {
@@ -206677,7 +207288,7 @@ async function versionsSecretPutBulkHandler(args) {
206677
207288
  }
206678
207289
  logger.log(
206679
207290
  `\u2728 Success! Created version ${newVersion.id} with ${secrets.length} secrets.
206680
- \u27A1\uFE0F To deploy this version to production traffic use the command "wrangler versions deploy --x-versions".`
207291
+ \u27A1\uFE0F To deploy this version to production traffic use the command "wrangler versions deploy".`
206681
207292
  );
206682
207293
  }
206683
207294
  __name(versionsSecretPutBulkHandler, "versionsSecretPutBulkHandler");
@@ -206755,7 +207366,7 @@ async function versionsSecretDeleteHandler(args) {
206755
207366
  });
206756
207367
  logger.log(
206757
207368
  `\u2728 Success! Created version ${newVersion.id} with deleted secret ${args.key}.
206758
- \u27A1\uFE0F To deploy this version without the secret ${args.key} to production traffic use the command "wrangler versions deploy --x-versions".`
207369
+ \u27A1\uFE0F To deploy this version without the secret ${args.key} to production traffic use the command "wrangler versions deploy".`
206759
207370
  );
206760
207371
  }
206761
207372
  }
@@ -206891,7 +207502,7 @@ async function versionsSecretPutHandler(args) {
206891
207502
  });
206892
207503
  logger.log(
206893
207504
  `\u2728 Success! Created version ${newVersion.id} with secret ${args.key}.
206894
- \u27A1\uFE0F To deploy this version with secret ${args.key} to production traffic use the command "wrangler versions deploy --x-versions".`
207505
+ \u27A1\uFE0F To deploy this version with secret ${args.key} to production traffic use the command "wrangler versions deploy".`
206895
207506
  );
206896
207507
  }
206897
207508
  __name(versionsSecretPutHandler, "versionsSecretPutHandler");
@@ -207081,7 +207692,6 @@ __name(parseModules, "parseModules");
207081
207692
  init_import_meta_url();
207082
207693
  var import_node_fs36 = require("node:fs");
207083
207694
  var import_node_path61 = __toESM(require("node:path"));
207084
- var import_node_url16 = require("node:url");
207085
207695
  var scriptStartupErrorRegex2 = /startup/i;
207086
207696
  function errIsScriptSize2(err) {
207087
207697
  if (!err) {
@@ -207192,7 +207802,7 @@ See https://developers.cloudflare.com/workers/platform/compatibility-dates for m
207192
207802
  const destination = props.outDir ?? getWranglerTmpDir(props.projectRoot, "deploy");
207193
207803
  const start = Date.now();
207194
207804
  const workerName = scriptName;
207195
- const workerUrl = `/accounts/${accountId}/workers/scripts/${scriptName}/versions`;
207805
+ const workerUrl = `/accounts/${accountId}/workers/scripts/${scriptName}`;
207196
207806
  const { format: format10 } = props.entry;
207197
207807
  if (config.wasm_modules && format10 === "modules") {
207198
207808
  throw new UserError(
@@ -207209,6 +207819,7 @@ See https://developers.cloudflare.com/workers/platform/compatibility-dates for m
207209
207819
  "You cannot configure [data_blobs] with an ES module worker. Instead, import the file directly in your code, and optionally configure `[rules]` in your wrangler.toml"
207210
207820
  );
207211
207821
  }
207822
+ let hasPreview = false;
207212
207823
  try {
207213
207824
  if (props.noBundle) {
207214
207825
  const destinationDir = typeof destination === "string" ? destination : destination.path;
@@ -207374,25 +207985,16 @@ See https://developers.cloudflare.com/workers/platform/compatibility-dates for m
207374
207985
  let bindingsPrinted = false;
207375
207986
  try {
207376
207987
  const body = createWorkerUploadForm(worker);
207377
- const result = await fetchResult(
207378
- workerUrl,
207379
- {
207380
- method: "POST",
207381
- body,
207382
- headers: await getMetricsUsageHeaders(config.send_metrics)
207383
- },
207384
- new import_node_url16.URLSearchParams({
207385
- include_subdomain_availability: "true",
207386
- // pass excludeScript so the whole body of the
207387
- // script doesn't get included in the response
207388
- excludeScript: "true"
207389
- })
207390
- );
207988
+ const result = await fetchResult(`${workerUrl}/versions`, {
207989
+ method: "POST",
207990
+ body,
207991
+ headers: await getMetricsUsageHeaders(config.send_metrics)
207992
+ });
207391
207993
  logger.log("Worker Startup Time:", result.startup_time_ms, "ms");
207392
207994
  bindingsPrinted = true;
207393
207995
  printBindings({ ...withoutStaticAssets, vars: maskedVars });
207394
- logger.log("Worker Version ID:", result.id);
207395
207996
  versionId = result.id;
207997
+ hasPreview = result.metadata.has_preview;
207396
207998
  } catch (err) {
207397
207999
  if (!bindingsPrinted) {
207398
208000
  printBindings({ ...withoutStaticAssets, vars: maskedVars });
@@ -207435,12 +208037,19 @@ See https://developers.cloudflare.com/workers/platform/compatibility-dates for m
207435
208037
  }
207436
208038
  const uploadMs = Date.now() - start;
207437
208039
  logger.log("Uploaded", workerName, formatTime3(uploadMs));
207438
- const cmdVersionsDeploy = blue(
207439
- "wrangler versions deploy --experimental-versions"
207440
- );
207441
- const cmdTriggersDeploy = blue(
207442
- "wrangler triggers deploy --experimental-versions"
207443
- );
208040
+ logger.log("Worker Version ID:", versionId);
208041
+ if (versionId && hasPreview) {
208042
+ const { enabled: available_on_subdomain } = await fetchResult(`${workerUrl}/subdomain`);
208043
+ if (available_on_subdomain) {
208044
+ const userSubdomain = await getWorkersDevSubdomain(accountId);
208045
+ const shortVersion = versionId.slice(0, 8);
208046
+ logger.log(
208047
+ `Version Preview URL: https://${shortVersion}-${workerName}.${userSubdomain}.workers.dev`
208048
+ );
208049
+ }
208050
+ }
208051
+ const cmdVersionsDeploy = blue("wrangler versions deploy");
208052
+ const cmdTriggersDeploy = blue("wrangler triggers deploy");
207444
208053
  logger.info(
207445
208054
  gray(`
207446
208055
  To deploy this version to production traffic use the command ${cmdVersionsDeploy}
@@ -208084,11 +208693,11 @@ async function versionsDeploymentsViewHandler(args) {
208084
208693
  await printWranglerBanner();
208085
208694
  if (args.deploymentId === void 0) {
208086
208695
  throw new UserError(
208087
- "`wrangler deployments view` has been renamed `wrangler deployments status --x-versions`. Please use that command instead."
208696
+ "`wrangler deployments view` has been renamed `wrangler deployments status`. Please use that command instead."
208088
208697
  );
208089
208698
  } else {
208090
208699
  throw new UserError(
208091
- "`wrangler deployments view <deployment-id>` has been renamed `wrangler versions view [version-id] --x-versions`. Please use that command instead."
208700
+ "`wrangler deployments view <deployment-id>` has been renamed `wrangler versions view [version-id]`. Please use that command instead."
208092
208701
  );
208093
208702
  }
208094
208703
  }
@@ -208751,9 +209360,17 @@ function createCLIParser(argv) {
208751
209360
  return workerNamespaceCommands(workerNamespaceYargs, subHelp);
208752
209361
  }
208753
209362
  );
208754
- wrangler.command("ai", "\u{1F916} Manage AI models\n", (aiYargs) => {
209363
+ wrangler.command("ai", "\u{1F916} Manage AI models", (aiYargs) => {
208755
209364
  return ai(aiYargs.command(subHelp));
208756
209365
  });
209366
+ wrangler.command(
209367
+ "pipelines",
209368
+ `\u{1F6B0} Manage Worker Pipelines ${source_default.hex(betaCmdColor)("[open beta]")}
209369
+ `,
209370
+ (pipelinesYargs) => {
209371
+ return pipelines(pipelinesYargs.command(subHelp));
209372
+ }
209373
+ );
208757
209374
  wrangler.command(
208758
209375
  // this needs scopes as an option?
208759
209376
  "login",
@@ -211005,7 +211622,7 @@ __name(ConfigController, "ConfigController");
211005
211622
 
211006
211623
  // src/api/startDevWorker/LocalRuntimeController.ts
211007
211624
  init_import_meta_url();
211008
- var import_node_crypto9 = require("node:crypto");
211625
+ var import_node_crypto10 = require("node:crypto");
211009
211626
  var import_promises27 = require("node:fs/promises");
211010
211627
  var import_miniflare19 = require("miniflare");
211011
211628
  async function getBinaryFileContents2(file) {
@@ -211114,7 +211731,7 @@ var LocalRuntimeController = class extends RuntimeController {
211114
211731
  // This is given as a shared secret to the Proxy and User workers
211115
211732
  // so that the User Worker can trust aspects of HTTP requests from the Proxy Worker
211116
211733
  // if it provides the secret in a `MF-Proxy-Shared-Secret` header.
211117
- #proxyToUserWorkerAuthenticationSecret = (0, import_node_crypto9.randomUUID)();
211734
+ #proxyToUserWorkerAuthenticationSecret = (0, import_node_crypto10.randomUUID)();
211118
211735
  // `buildMiniflareOptions()` is asynchronous, meaning if multiple bundle
211119
211736
  // updates were submitted, the second may apply before the first. Therefore,
211120
211737
  // wrap updates in a mutex, so they're always applied in invocation order.
@@ -211231,7 +211848,7 @@ __name(LocalRuntimeController, "LocalRuntimeController");
211231
211848
  // src/api/startDevWorker/ProxyController.ts
211232
211849
  init_import_meta_url();
211233
211850
  var import_node_assert24 = __toESM(require("node:assert"));
211234
- var import_node_crypto10 = require("node:crypto");
211851
+ var import_node_crypto11 = require("node:crypto");
211235
211852
  var import_node_events5 = __toESM(require("node:events"));
211236
211853
  var import_node_path67 = __toESM(require("node:path"));
211237
211854
  var import_miniflare20 = require("miniflare");
@@ -211256,7 +211873,7 @@ var ProxyController = class extends Controller {
211256
211873
  inspectorProxyWorkerWebSocket;
211257
211874
  latestConfig;
211258
211875
  latestBundle;
211259
- secret = (0, import_node_crypto10.randomUUID)();
211876
+ secret = (0, import_node_crypto11.randomUUID)();
211260
211877
  createProxyWorker() {
211261
211878
  if (this._torndown) {
211262
211879
  return;