wrangler 3.71.0 → 3.72.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -80778,7 +80778,7 @@ var require_send = __commonJS({
80778
80778
  var join18 = path74.join;
80779
80779
  var normalize4 = path74.normalize;
80780
80780
  var resolve22 = path74.resolve;
80781
- var sep3 = path74.sep;
80781
+ var sep4 = path74.sep;
80782
80782
  var BYTES_RANGE_REGEXP = /^ *bytes=/;
80783
80783
  var MAX_MAXAGE = 60 * 60 * 24 * 365 * 1e3;
80784
80784
  var UP_PATH_REGEXP = /(?:^|[\\/])\.\.(?:[\\/]|$)/;
@@ -80988,14 +80988,14 @@ var require_send = __commonJS({
80988
80988
  var parts;
80989
80989
  if (root !== null) {
80990
80990
  if (path75) {
80991
- path75 = normalize4("." + sep3 + path75);
80991
+ path75 = normalize4("." + sep4 + path75);
80992
80992
  }
80993
80993
  if (UP_PATH_REGEXP.test(path75)) {
80994
80994
  debug('malicious path "%s"', path75);
80995
80995
  this.error(403);
80996
80996
  return res;
80997
80997
  }
80998
- parts = path75.split(sep3);
80998
+ parts = path75.split(sep4);
80999
80999
  path75 = normalize4(join18(root, path75));
81000
81000
  } else {
81001
81001
  if (UP_PATH_REGEXP.test(path75)) {
@@ -81003,7 +81003,7 @@ var require_send = __commonJS({
81003
81003
  this.error(403);
81004
81004
  return res;
81005
81005
  }
81006
- parts = normalize4(path75).split(sep3);
81006
+ parts = normalize4(path75).split(sep4);
81007
81007
  path75 = resolve22(path75);
81008
81008
  }
81009
81009
  if (containsDotFile(parts)) {
@@ -81102,7 +81102,7 @@ var require_send = __commonJS({
81102
81102
  var self2 = this;
81103
81103
  debug('stat "%s"', path75);
81104
81104
  fs26.stat(path75, /* @__PURE__ */ __name(function onstat(err, stat6) {
81105
- if (err && err.code === "ENOENT" && !extname5(path75) && path75[path75.length - 1] !== sep3) {
81105
+ if (err && err.code === "ENOENT" && !extname5(path75) && path75[path75.length - 1] !== sep4) {
81106
81106
  return next(err);
81107
81107
  }
81108
81108
  if (err)
@@ -100662,9 +100662,9 @@ var require_prompts = __commonJS({
100662
100662
  $3.date = (args) => toPrompt("DatePrompt", args);
100663
100663
  $3.confirm = (args) => toPrompt("ConfirmPrompt", args);
100664
100664
  $3.list = (args) => {
100665
- const sep3 = args.separator || ",";
100665
+ const sep4 = args.separator || ",";
100666
100666
  return toPrompt("TextPrompt", args, {
100667
- onSubmit: (str) => str.split(sep3).map((s) => s.trim())
100667
+ onSubmit: (str) => str.split(sep4).map((s) => s.trim())
100668
100668
  });
100669
100669
  };
100670
100670
  $3.toggle = (args) => toPrompt("TogglePrompt", args);
@@ -103117,9 +103117,9 @@ var require_prompts2 = __commonJS({
103117
103117
  $3.date = (args) => toPrompt("DatePrompt", args);
103118
103118
  $3.confirm = (args) => toPrompt("ConfirmPrompt", args);
103119
103119
  $3.list = (args) => {
103120
- const sep3 = args.separator || ",";
103120
+ const sep4 = args.separator || ",";
103121
103121
  return toPrompt("TextPrompt", args, {
103122
- onSubmit: (str) => str.split(sep3).map((s) => s.trim())
103122
+ onSubmit: (str) => str.split(sep4).map((s) => s.trim())
103123
103123
  });
103124
103124
  };
103125
103125
  $3.toggle = (args) => toPrompt("TogglePrompt", args);
@@ -137868,13 +137868,13 @@ ${err.stack}`;
137868
137868
  });
137869
137869
 
137870
137870
  // ../pages-shared/asset-server/rulesEngine.ts
137871
- var ESCAPE_REGEX_CHARACTERS, escapeRegex, HOST_PLACEHOLDER_REGEX, PLACEHOLDER_REGEX2, replacer, generateRulesMatcher;
137871
+ var ESCAPE_REGEX_CHARACTERS2, escapeRegex2, HOST_PLACEHOLDER_REGEX, PLACEHOLDER_REGEX2, replacer, generateRulesMatcher;
137872
137872
  var init_rulesEngine = __esm({
137873
137873
  "../pages-shared/asset-server/rulesEngine.ts"() {
137874
137874
  init_import_meta_url();
137875
- ESCAPE_REGEX_CHARACTERS = /[-/\\^$*+?.()|[\]{}]/g;
137876
- escapeRegex = /* @__PURE__ */ __name((str) => {
137877
- return str.replace(ESCAPE_REGEX_CHARACTERS, "\\$&");
137875
+ ESCAPE_REGEX_CHARACTERS2 = /[-/\\^$*+?.()|[\]{}]/g;
137876
+ escapeRegex2 = /* @__PURE__ */ __name((str) => {
137877
+ return str.replace(ESCAPE_REGEX_CHARACTERS2, "\\$&");
137878
137878
  }, "escapeRegex");
137879
137879
  HOST_PLACEHOLDER_REGEX = /(?<=^https:\\\/\\\/[^/]*?):([A-Za-z]\w*)(?=\\)/g;
137880
137880
  PLACEHOLDER_REGEX2 = /:([A-Za-z]\w*)/g;
@@ -137890,7 +137890,7 @@ var init_rulesEngine = __esm({
137890
137890
  }
137891
137891
  const compiledRules = Object.entries(rules).map(([rule, match]) => {
137892
137892
  const crossHost = rule.startsWith("https://");
137893
- rule = rule.split("*").map(escapeRegex).join("(?<splat>.*)");
137893
+ rule = rule.split("*").map(escapeRegex2).join("(?<splat>.*)");
137894
137894
  const host_matches = rule.matchAll(HOST_PLACEHOLDER_REGEX);
137895
137895
  for (const host_match of host_matches) {
137896
137896
  rule = rule.split(host_match[0]).join(`(?<${host_match[1]}>[^/.]+)`);
@@ -151359,6 +151359,10 @@ function convertCfWorkerInitBindingstoBindings(inputBindings) {
151359
151359
  }
151360
151360
  break;
151361
151361
  }
151362
+ case "experimental_assets": {
151363
+ output[info["binding"]] = { type: "assets" };
151364
+ break;
151365
+ }
151362
151366
  default: {
151363
151367
  assertNever(type);
151364
151368
  }
@@ -151389,7 +151393,8 @@ async function convertBindingsToCfWorkerInitBindings(inputBindings) {
151389
151393
  dispatch_namespaces: void 0,
151390
151394
  mtls_certificates: void 0,
151391
151395
  logfwdr: void 0,
151392
- unsafe: void 0
151396
+ unsafe: void 0,
151397
+ experimental_assets: void 0
151393
151398
  };
151394
151399
  const fetchers = {};
151395
151400
  for (const [name, binding] of Object.entries(inputBindings ?? {})) {
@@ -152688,7 +152693,7 @@ init_import_meta_url();
152688
152693
  init_import_meta_url();
152689
152694
 
152690
152695
  // package.json
152691
- var version = "3.71.0";
152696
+ var version = "3.72.0";
152692
152697
  var package_default = {
152693
152698
  name: "wrangler",
152694
152699
  version,
@@ -158994,6 +158999,10 @@ async function writeAdditionalModules(modules, destination) {
158994
158999
  __name(writeAdditionalModules, "writeAdditionalModules");
158995
159000
 
158996
159001
  // src/deployment-bundle/bundle.ts
159002
+ var ESCAPE_REGEX_CHARACTERS = /[-/\\^$*+?.()|[\]{}]/g;
159003
+ var escapeRegex = /* @__PURE__ */ __name((str) => {
159004
+ return str.replace(ESCAPE_REGEX_CHARACTERS, "\\$&");
159005
+ }, "escapeRegex");
158997
159006
  var COMMON_ESBUILD_OPTIONS = {
158998
159007
  // Our workerd runtime uses the same V8 version as recent Chrome, which is highly ES2022 compliant: https://kangax.github.io/compat-table/es2016plus/
158999
159008
  target: "es2022",
@@ -159111,6 +159120,30 @@ async function bundleWorker(entry, destination, {
159111
159120
  if (watch9) {
159112
159121
  inject.push(path12.resolve(getBasePath(), "templates/modules-watch-stub.js"));
159113
159122
  }
159123
+ const aliasPlugin = {
159124
+ name: "alias",
159125
+ setup(build5) {
159126
+ if (!alias) {
159127
+ return;
159128
+ }
159129
+ const filter = new RegExp(
159130
+ Object.keys(alias).map((key) => escapeRegex(key)).join("|")
159131
+ );
159132
+ build5.onResolve({ filter }, (args) => {
159133
+ const aliasPath = alias[args.path];
159134
+ if (aliasPath) {
159135
+ return {
159136
+ // resolve with node resolution
159137
+ path: require.resolve(aliasPath, {
159138
+ // From the esbuild alias docs: "Note that when an import path is substituted using an alias, the resulting import path is resolved in the working directory instead of in the directory containing the source file with the import path."
159139
+ // https://esbuild.github.io/api/#alias:~:text=Note%20that%20when%20an%20import%20path%20is%20substituted%20using%20an%20alias%2C%20the%20resulting%20import%20path%20is%20resolved%20in%20the%20working%20directory%20instead%20of%20in%20the%20directory%20containing%20the%20source%20file%20with%20the%20import%20path.
159140
+ paths: [entry.directory]
159141
+ })
159142
+ };
159143
+ }
159144
+ });
159145
+ }
159146
+ };
159114
159147
  const buildOptions2 = {
159115
159148
  // Don't use entryFile here as the file may have been changed when applying the middleware
159116
159149
  entryPoints: [entry.file],
@@ -159144,12 +159177,12 @@ async function bundleWorker(entry, destination, {
159144
159177
  ...define2
159145
159178
  }
159146
159179
  },
159147
- alias,
159148
159180
  loader: {
159149
159181
  ...COMMON_ESBUILD_OPTIONS.loader,
159150
159182
  ...loader || {}
159151
159183
  },
159152
159184
  plugins: [
159185
+ aliasPlugin,
159153
159186
  moduleCollector.plugin,
159154
159187
  ...nodejsCompatMode === "legacy" ? [
159155
159188
  (0, import_node_globals_polyfill.default)({ buffer: true }),
@@ -162997,7 +163030,10 @@ ${err}`
162997
163030
  host: "127.0.0.1",
162998
163031
  port: 0
162999
163032
  }
163000
- ]
163033
+ ],
163034
+ assetsPath: config.experimentalAssets.directory,
163035
+ assetsKVBindingName: "ASSETS_KV_NAMESPACE",
163036
+ assetsManifestBindingName: "ASSETS_MANIFEST"
163001
163037
  }
163002
163038
  ];
163003
163039
  }
@@ -163223,6 +163259,11 @@ function useLocalWorker(props) {
163223
163259
  });
163224
163260
  server.addEventListener("error", ({ error: error2 }) => {
163225
163261
  if (typeof error2 === "object" && error2 !== null && "code" in error2 && error2.code === "ERR_RUNTIME_FAILURE") {
163262
+ if (process.platform === "win32") {
163263
+ logger.error(
163264
+ "Check that you have the latest Microsoft Visual C++ Redistributable library installed.\nSee https://learn.microsoft.com/en-us/cpp/windows/latest-supported-vc-redist."
163265
+ );
163266
+ }
163226
163267
  logger.error(String(error2));
163227
163268
  } else {
163228
163269
  logger.error("Error reloading local server:", error2);
@@ -163302,6 +163343,8 @@ var shapes = {
163302
163343
  dash: "\u2500",
163303
163344
  radioInactive: "\u25CB",
163304
163345
  radioActive: "\u25CF",
163346
+ backActive: "\u25C0",
163347
+ backInactive: "\u25C1",
163305
163348
  bar: "\u2502",
163306
163349
  leftT: "\u251C",
163307
163350
  rigthT: "\u2524",
@@ -163591,12 +163634,12 @@ function createWorkerUploadForm(worker) {
163591
163634
  tail_consumers,
163592
163635
  limits,
163593
163636
  annotations,
163594
- experimental_assets
163637
+ experimental_assets_jwt
163595
163638
  } = worker;
163596
- if (experimental_assets?.staticAssetsOnly) {
163639
+ if (main2.name === "no-op-assets-worker.js" && experimental_assets_jwt) {
163597
163640
  formData.set(
163598
163641
  "metadata",
163599
- JSON.stringify({ assets: experimental_assets.jwt })
163642
+ JSON.stringify({ assets: experimental_assets_jwt })
163600
163643
  );
163601
163644
  return formData;
163602
163645
  }
@@ -163761,6 +163804,12 @@ function createWorkerUploadForm(worker) {
163761
163804
  type: "version_metadata"
163762
163805
  });
163763
163806
  }
163807
+ if (bindings.experimental_assets !== void 0) {
163808
+ metadataBindings.push({
163809
+ name: bindings.experimental_assets.binding,
163810
+ type: "assets"
163811
+ });
163812
+ }
163764
163813
  for (const [name, filePath] of Object.entries(bindings.text_blobs || {})) {
163765
163814
  metadataBindings.push({
163766
163815
  name,
@@ -163880,7 +163929,8 @@ function createWorkerUploadForm(worker) {
163880
163929
  ...placement && { placement },
163881
163930
  ...tail_consumers && { tail_consumers },
163882
163931
  ...limits && { limits },
163883
- ...annotations && { annotations }
163932
+ ...annotations && { annotations },
163933
+ ...experimental_assets_jwt && { assets: experimental_assets_jwt }
163884
163934
  };
163885
163935
  if (bindings.unsafe?.metadata !== void 0) {
163886
163936
  for (const key of Object.keys(bindings.unsafe.metadata)) {
@@ -165046,13 +165096,13 @@ function usage(yargs, shim3) {
165046
165096
  };
165047
165097
  self2.stringifiedValues = /* @__PURE__ */ __name(function stringifiedValues(values, separator) {
165048
165098
  let string = "";
165049
- const sep3 = separator || ", ";
165099
+ const sep4 = separator || ", ";
165050
165100
  const array = [].concat(values);
165051
165101
  if (!values || !array.length)
165052
165102
  return string;
165053
165103
  array.forEach((value) => {
165054
165104
  if (string.length)
165055
- string += sep3;
165105
+ string += sep4;
165056
165106
  string += JSON.stringify(value);
165057
165107
  });
165058
165108
  return string;
@@ -168921,7 +168971,7 @@ var getSelectRenderers = /* @__PURE__ */ __name((config) => {
168921
168971
  const color = isInListOfValues || active2 ? blue : white;
168922
168972
  const text = active2 ? color.underline(optionLabel) : color(optionLabel);
168923
168973
  const sublabel = opt.sublabel ? color.grey(opt.sublabel) : "";
168924
- const indicator = isInListOfValues || active2 && !Array.isArray(value) ? color(shapes.radioActive) : color(shapes.radioInactive);
168974
+ const indicator = isInListOfValues || active2 && !Array.isArray(value) ? color(opt.activeIcon ?? shapes.radioActive) : color(opt.inactiveIcon ?? shapes.radioInactive);
168925
168975
  return `${space(2)}${indicator} ${text} ${sublabel}`;
168926
168976
  }, "renderOption");
168927
168977
  const renderOptionCondition = /* @__PURE__ */ __name((_3, i) => {
@@ -170496,19 +170546,15 @@ init_import_meta_url();
170496
170546
  // ../cli/args.ts
170497
170547
  init_import_meta_url();
170498
170548
  var processArgument = /* @__PURE__ */ __name(async (args, name, promptConfig) => {
170499
- let value = args[name];
170500
- const renderSubmitted = getRenderers(promptConfig).submit;
170501
- if (value !== void 0) {
170502
- const error2 = promptConfig.validate?.(value);
170503
- if (error2) {
170504
- crash(error2);
170505
- }
170506
- const lines = renderSubmitted({ value });
170507
- logRaw(lines.join("\n"));
170508
- return value;
170509
- }
170510
- value = await inputPrompt(promptConfig);
170511
- return value;
170549
+ const value = args[name];
170550
+ const result = await inputPrompt({
170551
+ ...promptConfig,
170552
+ // Accept the default value if the arg is already set
170553
+ acceptDefault: promptConfig.acceptDefault ?? value !== void 0,
170554
+ defaultValue: value ?? promptConfig.defaultValue
170555
+ });
170556
+ args[name] = result;
170557
+ return result;
170512
170558
  }, "processArgument");
170513
170559
 
170514
170560
  // src/cloudchamber/cli/index.ts
@@ -178010,10 +178056,10 @@ var import_node_path36 = __toESM(require("node:path"));
178010
178056
 
178011
178057
  // src/experimental-assets.ts
178012
178058
  init_import_meta_url();
178013
- var import_node_assert16 = __toESM(require("node:assert"));
178059
+ var import_node_assert15 = __toESM(require("node:assert"));
178014
178060
  var import_node_fs20 = require("node:fs");
178015
- var import_promises14 = require("node:fs/promises");
178016
- var path40 = __toESM(require("node:path"));
178061
+ var import_promises13 = require("node:fs/promises");
178062
+ var path39 = __toESM(require("node:path"));
178017
178063
  var import_mime3 = __toESM(require_mime2());
178018
178064
 
178019
178065
  // ../../node_modules/.pnpm/p-queue@7.2.0/node_modules/p-queue/dist/index.js
@@ -178881,11 +178927,291 @@ function Progress({ done, total }) {
178881
178927
  }
178882
178928
  __name(Progress, "Progress");
178883
178929
 
178930
+ // src/experimental-assets.ts
178931
+ var BULK_UPLOAD_CONCURRENCY2 = 3;
178932
+ var MAX_ASSET_COUNT2 = 2e4;
178933
+ var MAX_ASSET_SIZE2 = 25 * 1024 * 1024;
178934
+ var MAX_UPLOAD_ATTEMPTS2 = 5;
178935
+ var MAX_UPLOAD_GATEWAY_ERRORS2 = 5;
178936
+ var syncExperimentalAssets = /* @__PURE__ */ __name(async (accountId, scriptName, assetDirectory) => {
178937
+ (0, import_node_assert15.default)(accountId, "Missing accountId");
178938
+ logger.info("\u{1F300} Building list of assets...");
178939
+ const manifest = await walk(assetDirectory, {});
178940
+ logger.info("\u{1F300} Starting asset upload...");
178941
+ const initializeAssetsResponse = await fetchResult(
178942
+ `/accounts/${accountId}/workers/scripts/${scriptName}/assets-upload-session`,
178943
+ {
178944
+ headers: { "Content-Type": "application/json" },
178945
+ method: "POST",
178946
+ body: JSON.stringify({ manifest })
178947
+ }
178948
+ );
178949
+ if (initializeAssetsResponse.buckets.flat().length === 0) {
178950
+ if (!initializeAssetsResponse.jwt) {
178951
+ throw new FatalError(
178952
+ "Could not find assets information to attach to deployment. Please try again.",
178953
+ 1
178954
+ );
178955
+ }
178956
+ logger.info(`No files to upload. Proceeding with deployment...`);
178957
+ return initializeAssetsResponse.jwt;
178958
+ }
178959
+ const numberFilesToUpload = initializeAssetsResponse.buckets.flat().length;
178960
+ logger.info(
178961
+ `\u{1F300} Found ${numberFilesToUpload} file${numberFilesToUpload > 1 ? "s" : ""} to upload. Proceeding with upload...`
178962
+ );
178963
+ const manifestLookup = Object.entries(manifest);
178964
+ let assetLogCount = 0;
178965
+ const assetBuckets = initializeAssetsResponse.buckets.map((bucket) => {
178966
+ return bucket.map((fileHash) => {
178967
+ const manifestEntry = manifestLookup.find(
178968
+ (file) => file[1].hash === fileHash
178969
+ );
178970
+ if (manifestEntry === void 0) {
178971
+ throw new FatalError(
178972
+ `A file was requested that does not appear to exist.`,
178973
+ 1
178974
+ );
178975
+ }
178976
+ assetLogCount = logAssetUpload(`+ ${manifestEntry[0]}`, assetLogCount);
178977
+ return manifestEntry;
178978
+ });
178979
+ });
178980
+ const queue = new PQueue({ concurrency: BULK_UPLOAD_CONCURRENCY2 });
178981
+ let attempts = 0;
178982
+ const start = Date.now();
178983
+ let completionJwt = "";
178984
+ for (const [bucketIndex, bucket] of assetBuckets.entries()) {
178985
+ attempts = 0;
178986
+ let gatewayErrors = 0;
178987
+ const doUpload = /* @__PURE__ */ __name(async () => {
178988
+ const payload = await Promise.all(
178989
+ bucket.map(async (manifestEntry) => {
178990
+ const absFilePath = path39.join(assetDirectory, manifestEntry[0]);
178991
+ return {
178992
+ base64: true,
178993
+ key: manifestEntry[1].hash,
178994
+ metadata: {
178995
+ contentType: (0, import_mime3.getType)(absFilePath) || "application/octet-stream"
178996
+ },
178997
+ value: (await (0, import_promises13.readFile)(absFilePath)).toString("base64")
178998
+ };
178999
+ })
179000
+ );
179001
+ try {
179002
+ const res = await fetchResult(
179003
+ `/accounts/${accountId}/workers/assets/upload`,
179004
+ {
179005
+ method: "POST",
179006
+ headers: {
179007
+ "Content-Type": "application/jsonl",
179008
+ Authorization: `Bearer ${initializeAssetsResponse.jwt}`
179009
+ },
179010
+ body: payload.map((x2) => JSON.stringify(x2)).join("\n")
179011
+ }
179012
+ );
179013
+ logger.info(
179014
+ `Uploaded bucket ${bucketIndex + 1}/${initializeAssetsResponse.buckets.length}`
179015
+ );
179016
+ return res;
179017
+ } catch (e3) {
179018
+ if (attempts < MAX_UPLOAD_ATTEMPTS2) {
179019
+ logger.info(
179020
+ source_default.dim(
179021
+ `Bucket ${bucketIndex + 1}/${initializeAssetsResponse.buckets.length} upload failed. Retrying...
179022
+ `,
179023
+ e3
179024
+ )
179025
+ );
179026
+ await new Promise(
179027
+ (resolvePromise) => setTimeout(resolvePromise, Math.pow(2, attempts) * 1e3)
179028
+ );
179029
+ if (e3 instanceof APIError && e3.isGatewayError()) {
179030
+ queue.concurrency = 1;
179031
+ await new Promise(
179032
+ (resolvePromise) => setTimeout(resolvePromise, Math.pow(2, gatewayErrors) * 5e3)
179033
+ );
179034
+ gatewayErrors++;
179035
+ if (gatewayErrors >= MAX_UPLOAD_GATEWAY_ERRORS2) {
179036
+ attempts++;
179037
+ }
179038
+ } else {
179039
+ attempts++;
179040
+ }
179041
+ return doUpload();
179042
+ } else if (isJwtExpired(initializeAssetsResponse.jwt)) {
179043
+ throw new FatalError(
179044
+ `Upload took too long.
179045
+ Asset upload took too long on bucket ${bucketIndex + 1}/${initializeAssetsResponse.buckets.length}. Please try again.
179046
+ Assets already uploaded have been saved, so the next attempt will automatically resume from this point.`
179047
+ );
179048
+ } else {
179049
+ throw e3;
179050
+ }
179051
+ }
179052
+ }, "doUpload");
179053
+ void queue.add(
179054
+ () => doUpload().then((res) => {
179055
+ completionJwt = res.jwt || completionJwt;
179056
+ })
179057
+ );
179058
+ }
179059
+ queue.on("error", (error2) => {
179060
+ logger.error(error2.message);
179061
+ throw error2;
179062
+ });
179063
+ await queue.onIdle();
179064
+ if (!completionJwt) {
179065
+ throw new FatalError(
179066
+ "Failed to complete asset upload. Please try again.",
179067
+ 1
179068
+ );
179069
+ }
179070
+ const uploadMs = Date.now() - start;
179071
+ const skipped = Object.keys(manifest).length - numberFilesToUpload;
179072
+ const skippedMessage = skipped > 0 ? `(${skipped} already uploaded) ` : "";
179073
+ logger.log(
179074
+ `\u2728 Success! Uploaded ${numberFilesToUpload} file${numberFilesToUpload > 1 ? "s" : ""} ${skippedMessage}${formatTime2(uploadMs)}
179075
+ `
179076
+ );
179077
+ return completionJwt;
179078
+ }, "syncExperimentalAssets");
179079
+ var walk = /* @__PURE__ */ __name(async (dir, manifest, startingDir = dir) => {
179080
+ const files = await (0, import_promises13.readdir)(dir);
179081
+ let counter = 0;
179082
+ await Promise.all(
179083
+ files.map(async (file) => {
179084
+ const filepath = path39.join(dir, file);
179085
+ const relativeFilepath = path39.relative(startingDir, filepath);
179086
+ const filestat = await (0, import_promises13.stat)(filepath);
179087
+ if (filestat.isSymbolicLink()) {
179088
+ return;
179089
+ }
179090
+ if (filestat.isDirectory()) {
179091
+ manifest = await walk(filepath, manifest, startingDir);
179092
+ } else {
179093
+ if (counter >= MAX_ASSET_COUNT2) {
179094
+ throw new UserError(
179095
+ `Maximum number of assets exceeded.
179096
+ Cloudflare Workers supports up to ${MAX_ASSET_COUNT2.toLocaleString()} assets in a version. We found ${counter.toLocaleString()} files in the specified assets directory "${startingDir}".
179097
+ Ensure your assets directory contains a maximum of ${MAX_ASSET_COUNT2.toLocaleString()} files, and that you have specified your assets directory correctly.`
179098
+ );
179099
+ }
179100
+ if (filestat.size > MAX_ASSET_SIZE2) {
179101
+ throw new UserError(
179102
+ `Asset too large.
179103
+ Cloudflare Workers supports assets with sizes of up to ${prettyBytes(
179104
+ MAX_ASSET_SIZE2,
179105
+ {
179106
+ binary: true
179107
+ }
179108
+ )}. We found a file ${filepath} with a size of ${prettyBytes(
179109
+ filestat.size,
179110
+ {
179111
+ binary: true
179112
+ }
179113
+ )}.
179114
+ Ensure all assets in your assets directory "${startingDir}" conform with the Workers maximum size requirement.`
179115
+ );
179116
+ }
179117
+ manifest[encodeFilePath(relativeFilepath)] = {
179118
+ hash: hashFile(filepath),
179119
+ size: filestat.size
179120
+ };
179121
+ counter++;
179122
+ }
179123
+ })
179124
+ );
179125
+ return manifest;
179126
+ }, "walk");
179127
+ var MAX_DIFF_LINES = 100;
179128
+ function logAssetUpload(line, diffCount) {
179129
+ const level = logger.loggerLevel;
179130
+ if (LOGGER_LEVELS[level] >= LOGGER_LEVELS.debug) {
179131
+ logger.debug(line);
179132
+ } else if (diffCount < MAX_DIFF_LINES) {
179133
+ logger.info(line);
179134
+ } else if (diffCount === MAX_DIFF_LINES) {
179135
+ const msg = " (truncating changed assets log, set `WRANGLER_LOG=debug` environment variable to see full diff)";
179136
+ logger.info(source_default.dim(msg));
179137
+ }
179138
+ return diffCount++;
179139
+ }
179140
+ __name(logAssetUpload, "logAssetUpload");
179141
+ function getExperimentalAssetsBasePath(config, experimentalAssetsCommandLineArg) {
179142
+ return experimentalAssetsCommandLineArg ? process.cwd() : path39.resolve(path39.dirname(config.configPath ?? "wrangler.toml"));
179143
+ }
179144
+ __name(getExperimentalAssetsBasePath, "getExperimentalAssetsBasePath");
179145
+ function processExperimentalAssetsArg(args, config) {
179146
+ const experimentalAssets = args.experimentalAssets ? { directory: args.experimentalAssets } : config.experimental_assets;
179147
+ if (experimentalAssets) {
179148
+ const experimentalAssetsBasePath = getExperimentalAssetsBasePath(
179149
+ config,
179150
+ args.experimentalAssets
179151
+ );
179152
+ const resolvedExperimentalAssetsPath = path39.resolve(
179153
+ experimentalAssetsBasePath,
179154
+ experimentalAssets.directory
179155
+ );
179156
+ if (!(0, import_node_fs20.existsSync)(resolvedExperimentalAssetsPath)) {
179157
+ const sourceOfTruthMessage = args.experimentalAssets ? '"--experimental-assets" command line argument' : '"experimental_assets.directory" field in your configuration file';
179158
+ throw new UserError(
179159
+ `The directory specified by the ${sourceOfTruthMessage} does not exist:
179160
+ ${resolvedExperimentalAssetsPath}`
179161
+ );
179162
+ }
179163
+ experimentalAssets.directory = resolvedExperimentalAssetsPath;
179164
+ }
179165
+ return experimentalAssets;
179166
+ }
179167
+ __name(processExperimentalAssetsArg, "processExperimentalAssetsArg");
179168
+ var encodeFilePath = /* @__PURE__ */ __name((filePath) => {
179169
+ const encodedPath = filePath.split(path39.sep).map((segment) => encodeURIComponent(segment)).join("/");
179170
+ return "/" + encodedPath;
179171
+ }, "encodeFilePath");
179172
+
179173
+ // src/output.ts
179174
+ init_import_meta_url();
179175
+ var import_node_crypto6 = require("node:crypto");
179176
+ var import_node_fs21 = require("node:fs");
179177
+ var import_node_path35 = require("node:path");
179178
+ function writeOutput(entry) {
179179
+ if (outputFilePath === void 0) {
179180
+ outputFilePath = getOutputFilePath();
179181
+ }
179182
+ if (outputFilePath !== null) {
179183
+ ensureDirectoryExistsSync(outputFilePath);
179184
+ const entryJSON = JSON.stringify({
179185
+ ...entry,
179186
+ timestamp: (/* @__PURE__ */ new Date()).toISOString()
179187
+ });
179188
+ (0, import_node_fs21.appendFileSync)(outputFilePath, entryJSON + "\n");
179189
+ }
179190
+ }
179191
+ __name(writeOutput, "writeOutput");
179192
+ var outputFilePath = void 0;
179193
+ function getOutputFilePath() {
179194
+ const outputFilePathFromEnv = getOutputFilePathFromEnv();
179195
+ if (outputFilePathFromEnv) {
179196
+ return outputFilePathFromEnv;
179197
+ }
179198
+ const outputFileDirectoryFromEnv = getOutputFileDirectoryFromEnv();
179199
+ if (outputFileDirectoryFromEnv) {
179200
+ const date = (/* @__PURE__ */ new Date()).toISOString().replaceAll(":", "-").replace(".", "_").replace("T", "_").replace("Z", "");
179201
+ return (0, import_node_path35.resolve)(
179202
+ outputFileDirectoryFromEnv,
179203
+ `wrangler-output-${date}-${(0, import_node_crypto6.randomBytes)(3).toString("hex")}.json`
179204
+ );
179205
+ }
179206
+ return null;
179207
+ }
179208
+ __name(getOutputFilePath, "getOutputFilePath");
179209
+
178884
179210
  // src/sites.ts
178885
179211
  init_import_meta_url();
178886
- var import_node_assert15 = __toESM(require("node:assert"));
178887
- var import_promises13 = require("node:fs/promises");
178888
- var path39 = __toESM(require("node:path"));
179212
+ var import_node_assert16 = __toESM(require("node:assert"));
179213
+ var import_promises14 = require("node:fs/promises");
179214
+ var path40 = __toESM(require("node:path"));
178889
179215
  var import_ignore = __toESM(require_ignore());
178890
179216
 
178891
179217
  // ../../node_modules/.pnpm/xxhash-wasm@1.0.1/node_modules/xxhash-wasm/esm/xxhash-wasm.js
@@ -178955,7 +179281,7 @@ var HIDDEN_FILES_TO_INCLUDE = /* @__PURE__ */ new Set([
178955
179281
  // See https://datatracker.ietf.org/doc/html/rfc8615
178956
179282
  ]);
178957
179283
  async function* getFilesInFolder(dirPath) {
178958
- const files = await (0, import_promises13.readdir)(dirPath, { withFileTypes: true });
179284
+ const files = await (0, import_promises14.readdir)(dirPath, { withFileTypes: true });
178959
179285
  for (const file of files) {
178960
179286
  if (ALWAYS_IGNORE.has(file.name)) {
178961
179287
  continue;
@@ -178964,9 +179290,9 @@ async function* getFilesInFolder(dirPath) {
178964
179290
  continue;
178965
179291
  }
178966
179292
  if (file.isDirectory()) {
178967
- yield* await getFilesInFolder(path39.join(dirPath, file.name));
179293
+ yield* await getFilesInFolder(path40.join(dirPath, file.name));
178968
179294
  } else {
178969
- yield path39.join(dirPath, file.name);
179295
+ yield path40.join(dirPath, file.name);
178970
179296
  }
178971
179297
  }
178972
179298
  }
@@ -178976,11 +179302,11 @@ function hashFileContent(hasher, content) {
178976
179302
  }
178977
179303
  __name(hashFileContent, "hashFileContent");
178978
179304
  function hashAsset(hasher, filePath, content) {
178979
- const extName = path39.extname(filePath) || "";
178980
- const baseName = path39.basename(filePath, extName);
178981
- const directory = path39.dirname(filePath);
179305
+ const extName = path40.extname(filePath) || "";
179306
+ const baseName = path40.basename(filePath, extName);
179307
+ const directory = path40.dirname(filePath);
178982
179308
  const hash = hashFileContent(hasher, content);
178983
- return urlSafe(path39.join(directory, `${baseName}.${hash}${extName}`));
179309
+ return urlSafe(path40.join(directory, `${baseName}.${hash}${extName}`));
178984
179310
  }
178985
179311
  __name(hashAsset, "hashAsset");
178986
179312
  async function createKVNamespaceIfNotAlreadyExisting(title, accountId) {
@@ -178997,7 +179323,7 @@ async function createKVNamespaceIfNotAlreadyExisting(title, accountId) {
178997
179323
  };
178998
179324
  }
178999
179325
  __name(createKVNamespaceIfNotAlreadyExisting, "createKVNamespaceIfNotAlreadyExisting");
179000
- var MAX_DIFF_LINES = 100;
179326
+ var MAX_DIFF_LINES2 = 100;
179001
179327
  var MAX_BUCKET_SIZE2 = 98 * 1e3 * 1e3;
179002
179328
  var MAX_BUCKET_KEYS = BATCH_KEY_MAX;
179003
179329
  var MAX_BATCH_OPERATIONS = 5;
@@ -179013,7 +179339,7 @@ async function syncLegacyAssets(accountId, scriptName, siteAssets, preview, dryR
179013
179339
  logger.log("(Note: doing a dry run, not uploading or deleting anything.)");
179014
179340
  return { manifest: void 0, namespace: void 0 };
179015
179341
  }
179016
- (0, import_node_assert15.default)(accountId, "Missing accountId");
179342
+ (0, import_node_assert16.default)(accountId, "Missing accountId");
179017
179343
  const title = `__${scriptName}-workers_sites_assets${preview ? "_preview" : ""}`;
179018
179344
  const { id: namespace } = await createKVNamespaceIfNotAlreadyExisting(
179019
179345
  title,
@@ -179023,7 +179349,7 @@ async function syncLegacyAssets(accountId, scriptName, siteAssets, preview, dryR
179023
179349
  const namespaceKeysResponse = await listKVNamespaceKeys(accountId, namespace);
179024
179350
  const namespaceKeyInfoMap = new Map(namespaceKeysResponse.map((x2) => [x2.name, x2]));
179025
179351
  const namespaceKeys = new Set(namespaceKeysResponse.map((x2) => x2.name));
179026
- const assetDirectory = path39.join(
179352
+ const assetDirectory = path40.join(
179027
179353
  siteAssets.baseDirectory,
179028
179354
  siteAssets.assetDirectory
179029
179355
  );
@@ -179041,9 +179367,9 @@ async function syncLegacyAssets(accountId, scriptName, siteAssets, preview, dryR
179041
179367
  const level = logger.loggerLevel;
179042
179368
  if (LOGGER_LEVELS[level] >= LOGGER_LEVELS.debug) {
179043
179369
  logger.debug(line);
179044
- } else if (diffCount < MAX_DIFF_LINES) {
179370
+ } else if (diffCount < MAX_DIFF_LINES2) {
179045
179371
  logger.info(line);
179046
- } else if (diffCount === MAX_DIFF_LINES) {
179372
+ } else if (diffCount === MAX_DIFF_LINES2) {
179047
179373
  const msg = " (truncating changed assets log, set `WRANGLER_LOG=debug` environment variable to see full diff)";
179048
179374
  logger.info(source_default.dim(msg));
179049
179375
  }
@@ -179052,11 +179378,11 @@ async function syncLegacyAssets(accountId, scriptName, siteAssets, preview, dryR
179052
179378
  __name(logDiff, "logDiff");
179053
179379
  logger.info("Building list of assets to upload...");
179054
179380
  for await (const absAssetFile of getFilesInFolder(assetDirectory)) {
179055
- const assetFile = path39.relative(assetDirectory, absAssetFile);
179381
+ const assetFile = path40.relative(assetDirectory, absAssetFile);
179056
179382
  if (!include(assetFile) || exclude(assetFile)) {
179057
179383
  continue;
179058
179384
  }
179059
- const content = await (0, import_promises13.readFile)(absAssetFile, "base64");
179385
+ const content = await (0, import_promises14.readFile)(absAssetFile, "base64");
179060
179386
  const assetSize = Buffer.byteLength(content);
179061
179387
  await validateAssetSize(absAssetFile, assetFile);
179062
179388
  const assetKey = hashAsset(hasher, assetFile, content);
@@ -179078,7 +179404,7 @@ async function syncLegacyAssets(accountId, scriptName, siteAssets, preview, dryR
179078
179404
  skipCount++;
179079
179405
  }
179080
179406
  namespaceKeys.delete(assetKey);
179081
- const manifestKey = urlSafe(path39.relative(assetDirectory, absAssetFile));
179407
+ const manifestKey = urlSafe(path40.relative(assetDirectory, absAssetFile));
179082
179408
  manifest[manifestKey] = assetKey;
179083
179409
  }
179084
179410
  if (uploadBucket.length > 0) {
@@ -179111,7 +179437,7 @@ async function syncLegacyAssets(accountId, scriptName, siteAssets, preview, dryR
179111
179437
  for (const [absAssetFile, assetKey] of nextBucket) {
179112
179438
  bucket.push({
179113
179439
  key: assetKey,
179114
- value: await (0, import_promises13.readFile)(absAssetFile, "base64"),
179440
+ value: await (0, import_promises14.readFile)(absAssetFile, "base64"),
179115
179441
  base64: true
179116
179442
  });
179117
179443
  if (controller.signal.aborted) {
@@ -179199,7 +179525,7 @@ function createPatternMatcher(patterns, exclude) {
179199
179525
  }
179200
179526
  __name(createPatternMatcher, "createPatternMatcher");
179201
179527
  async function validateAssetSize(absFilePath, relativeFilePath) {
179202
- const { size } = await (0, import_promises13.stat)(absFilePath);
179528
+ const { size } = await (0, import_promises14.stat)(absFilePath);
179203
179529
  if (size > 25 * 1024 * 1024) {
179204
179530
  throw new UserError(
179205
179531
  `File ${relativeFilePath} is too big, it should be under 25 MiB. See https://developers.cloudflare.com/workers/platform/limits#kv-limits`
@@ -179220,7 +179546,7 @@ function urlSafe(filePath) {
179220
179546
  }
179221
179547
  __name(urlSafe, "urlSafe");
179222
179548
  function getLegacyAssetPaths(config, assetDirectory) {
179223
- const baseDirectory = assetDirectory ? process.cwd() : path39.resolve(path39.dirname(config.configPath ?? "wrangler.toml"));
179549
+ const baseDirectory = assetDirectory ? process.cwd() : path40.resolve(path40.dirname(config.configPath ?? "wrangler.toml"));
179224
179550
  assetDirectory ??= typeof config.legacy_assets === "string" ? config.legacy_assets : config.legacy_assets !== void 0 ? config.legacy_assets.bucket : void 0;
179225
179551
  const includePatterns = typeof config.legacy_assets !== "string" && config.legacy_assets?.include || [];
179226
179552
  const excludePatterns = typeof config.legacy_assets !== "string" && config.legacy_assets?.exclude || [];
@@ -179233,7 +179559,7 @@ function getLegacyAssetPaths(config, assetDirectory) {
179233
179559
  }
179234
179560
  __name(getLegacyAssetPaths, "getLegacyAssetPaths");
179235
179561
  function getSiteAssetPaths(config, assetDirectory, includePatterns = config.site?.include ?? [], excludePatterns = config.site?.exclude ?? []) {
179236
- const baseDirectory = assetDirectory ? process.cwd() : path39.resolve(path39.dirname(config.configPath ?? "wrangler.toml"));
179562
+ const baseDirectory = assetDirectory ? process.cwd() : path40.resolve(path40.dirname(config.configPath ?? "wrangler.toml"));
179237
179563
  assetDirectory ??= config.site?.bucket;
179238
179564
  if (assetDirectory) {
179239
179565
  return {
@@ -179248,283 +179574,6 @@ function getSiteAssetPaths(config, assetDirectory, includePatterns = config.site
179248
179574
  }
179249
179575
  __name(getSiteAssetPaths, "getSiteAssetPaths");
179250
179576
 
179251
- // src/experimental-assets.ts
179252
- var BULK_UPLOAD_CONCURRENCY2 = 3;
179253
- var MAX_ASSET_COUNT2 = 2e4;
179254
- var MAX_ASSET_SIZE2 = 25 * 1024 * 1024;
179255
- var MAX_UPLOAD_ATTEMPTS2 = 5;
179256
- var MAX_UPLOAD_GATEWAY_ERRORS2 = 5;
179257
- var syncExperimentalAssets = /* @__PURE__ */ __name(async (accountId, scriptName, assetDirectory) => {
179258
- (0, import_node_assert16.default)(accountId, "Missing accountId");
179259
- logger.info("\u{1F300} Building list of assets...");
179260
- const manifest = await walk(assetDirectory, {});
179261
- logger.info("\u{1F300} Starting asset upload...");
179262
- const initializeAssetsResponse = await fetchResult(
179263
- `/accounts/${accountId}/workers/scripts/${scriptName}/assets-upload-session`,
179264
- {
179265
- headers: { "Content-Type": "application/json" },
179266
- method: "POST",
179267
- body: JSON.stringify({ manifest })
179268
- }
179269
- );
179270
- if (initializeAssetsResponse.buckets.flat().length === 0) {
179271
- if (!initializeAssetsResponse.jwt) {
179272
- throw new FatalError(
179273
- "Could not find assets information to attach to deployment. Please try again.",
179274
- 1
179275
- );
179276
- }
179277
- logger.info(`No files to upload. Proceeding with deployment...`);
179278
- return initializeAssetsResponse.jwt;
179279
- }
179280
- const numberFilesToUpload = initializeAssetsResponse.buckets.flat().length;
179281
- logger.info(
179282
- `\u{1F300} Found ${numberFilesToUpload} file${numberFilesToUpload > 1 ? "s" : ""} to upload. Proceeding with upload...`
179283
- );
179284
- const manifestLookup = Object.entries(manifest);
179285
- let assetLogCount = 0;
179286
- const assetBuckets = initializeAssetsResponse.buckets.map((bucket) => {
179287
- return bucket.map((fileHash) => {
179288
- const manifestEntry = manifestLookup.find(
179289
- (file) => file[1].hash === fileHash
179290
- );
179291
- if (manifestEntry === void 0) {
179292
- throw new FatalError(
179293
- `A file was requested that does not appear to exist.`,
179294
- 1
179295
- );
179296
- }
179297
- assetLogCount = logAssetUpload(`+ ${manifestEntry[0]}`, assetLogCount);
179298
- return manifestEntry;
179299
- });
179300
- });
179301
- const queue = new PQueue({ concurrency: BULK_UPLOAD_CONCURRENCY2 });
179302
- let attempts = 0;
179303
- const start = Date.now();
179304
- let completionJwt = "";
179305
- for (const [bucketIndex, bucket] of assetBuckets.entries()) {
179306
- attempts = 0;
179307
- let gatewayErrors = 0;
179308
- const doUpload = /* @__PURE__ */ __name(async () => {
179309
- const payload = await Promise.all(
179310
- bucket.map(async (manifestEntry) => {
179311
- const absFilePath = path40.join(assetDirectory, manifestEntry[0]);
179312
- return {
179313
- base64: true,
179314
- key: manifestEntry[1].hash,
179315
- metadata: {
179316
- contentType: (0, import_mime3.getType)(absFilePath) || "application/octet-stream"
179317
- },
179318
- value: (await (0, import_promises14.readFile)(absFilePath)).toString("base64")
179319
- };
179320
- })
179321
- );
179322
- try {
179323
- const res = await fetchResult(
179324
- `/accounts/${accountId}/workers/assets/upload`,
179325
- {
179326
- method: "POST",
179327
- headers: {
179328
- "Content-Type": "application/x-ndjson",
179329
- Authorization: `Bearer ${initializeAssetsResponse.jwt}`
179330
- },
179331
- body: payload.map((x2) => JSON.stringify(x2)).join("\n")
179332
- }
179333
- );
179334
- logger.info(
179335
- `Uploaded bucket ${bucketIndex + 1}/${initializeAssetsResponse.buckets.length}`
179336
- );
179337
- return res;
179338
- } catch (e3) {
179339
- if (attempts < MAX_UPLOAD_ATTEMPTS2) {
179340
- logger.info(
179341
- source_default.dim(
179342
- `Bucket ${bucketIndex + 1}/${initializeAssetsResponse.buckets.length} upload failed. Retrying...
179343
- `,
179344
- e3
179345
- )
179346
- );
179347
- await new Promise(
179348
- (resolvePromise) => setTimeout(resolvePromise, Math.pow(2, attempts) * 1e3)
179349
- );
179350
- if (e3 instanceof APIError && e3.isGatewayError()) {
179351
- queue.concurrency = 1;
179352
- await new Promise(
179353
- (resolvePromise) => setTimeout(resolvePromise, Math.pow(2, gatewayErrors) * 5e3)
179354
- );
179355
- gatewayErrors++;
179356
- if (gatewayErrors >= MAX_UPLOAD_GATEWAY_ERRORS2) {
179357
- attempts++;
179358
- }
179359
- } else {
179360
- attempts++;
179361
- }
179362
- return doUpload();
179363
- } else if (isJwtExpired(initializeAssetsResponse.jwt)) {
179364
- throw new FatalError(
179365
- `Upload took too long.
179366
- Asset upload took too long on bucket ${bucketIndex + 1}/${initializeAssetsResponse.buckets.length}. Please try again.
179367
- Assets already uploaded have been saved, so the next attempt will automatically resume from this point.`
179368
- );
179369
- } else {
179370
- throw e3;
179371
- }
179372
- }
179373
- }, "doUpload");
179374
- void queue.add(
179375
- () => doUpload().then((res) => {
179376
- completionJwt = res.jwt || completionJwt;
179377
- })
179378
- );
179379
- }
179380
- queue.on("error", (error2) => {
179381
- logger.error(error2.message);
179382
- throw error2;
179383
- });
179384
- await queue.onIdle();
179385
- if (!completionJwt) {
179386
- throw new FatalError(
179387
- "Failed to complete asset upload. Please try again.",
179388
- 1
179389
- );
179390
- }
179391
- const uploadMs = Date.now() - start;
179392
- const skipped = Object.keys(manifest).length - numberFilesToUpload;
179393
- const skippedMessage = skipped > 0 ? `(${skipped} already uploaded) ` : "";
179394
- logger.log(
179395
- `\u2728 Success! Uploaded ${numberFilesToUpload} file${numberFilesToUpload > 1 ? "s" : ""} ${skippedMessage}${formatTime2(uploadMs)}
179396
- `
179397
- );
179398
- return completionJwt;
179399
- }, "syncExperimentalAssets");
179400
- var walk = /* @__PURE__ */ __name(async (dir, manifest, startingDir = dir) => {
179401
- const files = await (0, import_promises14.readdir)(dir);
179402
- let counter = 0;
179403
- await Promise.all(
179404
- files.map(async (file) => {
179405
- const filepath = path40.join(dir, file);
179406
- const relativeFilepath = path40.relative(startingDir, filepath);
179407
- const filestat = await (0, import_promises14.stat)(filepath);
179408
- if (filestat.isSymbolicLink()) {
179409
- return;
179410
- }
179411
- if (filestat.isDirectory()) {
179412
- manifest = await walk(filepath, manifest, startingDir);
179413
- } else {
179414
- if (counter >= MAX_ASSET_COUNT2) {
179415
- throw new UserError(
179416
- `Maximum number of assets exceeded.
179417
- Cloudflare Workers supports up to ${MAX_ASSET_COUNT2.toLocaleString()} assets in a version. We found ${counter.toLocaleString()} files in the specified assets directory "${startingDir}".
179418
- Ensure your assets directory contains a maximum of ${MAX_ASSET_COUNT2.toLocaleString()} files, and that you have specified your assets directory correctly.`
179419
- );
179420
- }
179421
- const name = urlSafe(relativeFilepath);
179422
- if (filestat.size > MAX_ASSET_SIZE2) {
179423
- throw new UserError(
179424
- `Asset too large.
179425
- Cloudflare Workers supports assets with sizes of up to ${prettyBytes(
179426
- MAX_ASSET_SIZE2,
179427
- {
179428
- binary: true
179429
- }
179430
- )}. We found a file ${filepath} with a size of ${prettyBytes(
179431
- filestat.size,
179432
- {
179433
- binary: true
179434
- }
179435
- )}.
179436
- Ensure all assets in your assets directory "${startingDir}" conform with the Workers maximum size requirement.`
179437
- );
179438
- }
179439
- manifest[urlSafe(path40.join("/", name))] = {
179440
- hash: hashFile(filepath),
179441
- size: filestat.size
179442
- };
179443
- counter++;
179444
- }
179445
- })
179446
- );
179447
- return manifest;
179448
- }, "walk");
179449
- var MAX_DIFF_LINES2 = 100;
179450
- function logAssetUpload(line, diffCount) {
179451
- const level = logger.loggerLevel;
179452
- if (LOGGER_LEVELS[level] >= LOGGER_LEVELS.debug) {
179453
- logger.debug(line);
179454
- } else if (diffCount < MAX_DIFF_LINES2) {
179455
- logger.info(line);
179456
- } else if (diffCount === MAX_DIFF_LINES2) {
179457
- const msg = " (truncating changed assets log, set `WRANGLER_LOG=debug` environment variable to see full diff)";
179458
- logger.info(source_default.dim(msg));
179459
- }
179460
- return diffCount++;
179461
- }
179462
- __name(logAssetUpload, "logAssetUpload");
179463
- function getExperimentalAssetsBasePath(config, experimentalAssetsCommandLineArg) {
179464
- return experimentalAssetsCommandLineArg ? process.cwd() : path40.resolve(path40.dirname(config.configPath ?? "wrangler.toml"));
179465
- }
179466
- __name(getExperimentalAssetsBasePath, "getExperimentalAssetsBasePath");
179467
- function processExperimentalAssetsArg(args, config) {
179468
- const experimentalAssets = args.experimentalAssets ? { directory: args.experimentalAssets } : config.experimental_assets;
179469
- if (experimentalAssets) {
179470
- const experimentalAssetsBasePath = getExperimentalAssetsBasePath(
179471
- config,
179472
- args.experimentalAssets
179473
- );
179474
- const resolvedExperimentalAssetsPath = path40.resolve(
179475
- experimentalAssetsBasePath,
179476
- experimentalAssets.directory
179477
- );
179478
- if (!(0, import_node_fs20.existsSync)(resolvedExperimentalAssetsPath)) {
179479
- const sourceOfTruthMessage = args.experimentalAssets ? '"--experimental-assets" command line argument' : '"experimental_assets.directory" field in your configuration file';
179480
- throw new UserError(
179481
- `The directory specified by the ${sourceOfTruthMessage} does not exist:
179482
- ${resolvedExperimentalAssetsPath}`
179483
- );
179484
- }
179485
- experimentalAssets.directory = resolvedExperimentalAssetsPath;
179486
- }
179487
- return experimentalAssets;
179488
- }
179489
- __name(processExperimentalAssetsArg, "processExperimentalAssetsArg");
179490
-
179491
- // src/output.ts
179492
- init_import_meta_url();
179493
- var import_node_crypto6 = require("node:crypto");
179494
- var import_node_fs21 = require("node:fs");
179495
- var import_node_path35 = require("node:path");
179496
- function writeOutput(entry) {
179497
- if (outputFilePath === void 0) {
179498
- outputFilePath = getOutputFilePath();
179499
- }
179500
- if (outputFilePath !== null) {
179501
- ensureDirectoryExistsSync(outputFilePath);
179502
- const entryJSON = JSON.stringify({
179503
- ...entry,
179504
- timestamp: (/* @__PURE__ */ new Date()).toISOString()
179505
- });
179506
- (0, import_node_fs21.appendFileSync)(outputFilePath, entryJSON + "\n");
179507
- }
179508
- }
179509
- __name(writeOutput, "writeOutput");
179510
- var outputFilePath = void 0;
179511
- function getOutputFilePath() {
179512
- const outputFilePathFromEnv = getOutputFilePathFromEnv();
179513
- if (outputFilePathFromEnv) {
179514
- return outputFilePathFromEnv;
179515
- }
179516
- const outputFileDirectoryFromEnv = getOutputFileDirectoryFromEnv();
179517
- if (outputFileDirectoryFromEnv) {
179518
- const date = (/* @__PURE__ */ new Date()).toISOString().replaceAll(":", "-").replace(".", "_").replace("T", "_").replace("Z", "");
179519
- return (0, import_node_path35.resolve)(
179520
- outputFileDirectoryFromEnv,
179521
- `wrangler-output-${date}-${(0, import_node_crypto6.randomBytes)(3).toString("hex")}.json`
179522
- );
179523
- }
179524
- return null;
179525
- }
179526
- __name(getOutputFilePath, "getOutputFilePath");
179527
-
179528
179577
  // src/utils/collectKeyValues.ts
179529
179578
  init_import_meta_url();
179530
179579
  function collectKeyValues(array) {
@@ -179774,10 +179823,6 @@ Please shift to the --legacy-assets command to preserve the current functionalit
179774
179823
  if (!args.dryRun) {
179775
179824
  await standardPricingWarning(config);
179776
179825
  }
179777
- const experimentalAssetsOptions = experimentalAssets ? {
179778
- ...experimentalAssets,
179779
- staticAssetsOnly: !(args.script || config.main)
179780
- } : void 0;
179781
179826
  const beforeUpload = Date.now();
179782
179827
  const name = getScriptName(args, config);
179783
179828
  const { sourceMapSize, deploymentId, workerTag } = await deploy({
@@ -179797,7 +179842,7 @@ Please shift to the --legacy-assets command to preserve the current functionalit
179797
179842
  jsxFragment: args.jsxFragment,
179798
179843
  tsconfig: args.tsconfig,
179799
179844
  routes: args.routes,
179800
- experimentalAssets: experimentalAssetsOptions,
179845
+ experimentalAssets,
179801
179846
  legacyAssetPaths,
179802
179847
  legacyEnv: isLegacyEnv(config),
179803
179848
  minify: args.minify,
@@ -181740,7 +181785,8 @@ function createWorkerBundleFormData(workerBundle, config) {
181740
181785
  data_blobs: void 0,
181741
181786
  dispatch_namespaces: void 0,
181742
181787
  logfwdr: void 0,
181743
- unsafe: void 0
181788
+ unsafe: void 0,
181789
+ experimental_assets: void 0
181744
181790
  };
181745
181791
  const placement = config?.placement?.mode === "smart" ? { mode: "smart" } : void 0;
181746
181792
  const worker = {
@@ -181759,7 +181805,7 @@ function createWorkerBundleFormData(workerBundle, config) {
181759
181805
  placement,
181760
181806
  tail_consumers: void 0,
181761
181807
  limits: config?.limits,
181762
- experimental_assets: void 0
181808
+ experimental_assets_jwt: void 0
181763
181809
  };
181764
181810
  return createWorkerUploadForm(worker);
181765
181811
  }
@@ -188879,6 +188925,7 @@ async function createDraftWorker({
188879
188925
  dispatch_namespaces: [],
188880
188926
  mtls_certificates: [],
188881
188927
  logfwdr: { bindings: [] },
188928
+ experimental_assets: void 0,
188882
188929
  unsafe: {
188883
188930
  bindings: void 0,
188884
188931
  metadata: void 0,
@@ -188898,7 +188945,7 @@ async function createDraftWorker({
188898
188945
  placement: void 0,
188899
188946
  tail_consumers: void 0,
188900
188947
  limits: void 0,
188901
- experimental_assets: void 0
188948
+ experimental_assets_jwt: void 0
188902
188949
  })
188903
188950
  }
188904
188951
  );
@@ -201638,7 +201685,7 @@ async function copyWorkerVersionWithNewSecrets({
201638
201685
  "workers/message": versionMessage,
201639
201686
  "workers/tag": versionTag
201640
201687
  },
201641
- experimental_assets: void 0
201688
+ experimental_assets_jwt: void 0
201642
201689
  };
201643
201690
  const body = createWorkerUploadForm(worker);
201644
201691
  const result = await fetchResult(
@@ -201999,14 +202046,11 @@ See https://developers.cloudflare.com/workers/platform/compatibility-dates for m
201999
202046
  legacyEnv: props.legacyEnv,
202000
202047
  env: props.env
202001
202048
  }) : void 0;
202002
- const experimentalAssetsWorkerInfo = props.experimentalAssets && !props.dryRun ? {
202003
- jwt: await syncExperimentalAssets(
202004
- accountId,
202005
- scriptName,
202006
- props.experimentalAssets.directory
202007
- ),
202008
- staticAssetsOnly: props.experimentalAssets.staticAssetsOnly
202009
- } : void 0;
202049
+ const experimentalAssetsJwt = props.experimentalAssets && !props.dryRun ? await syncExperimentalAssets(
202050
+ accountId,
202051
+ scriptName,
202052
+ props.experimentalAssets.directory
202053
+ ) : void 0;
202010
202054
  const bindings = {
202011
202055
  kv_namespaces: config.kv_namespaces || [],
202012
202056
  send_email: config.send_email,
@@ -202030,6 +202074,7 @@ See https://developers.cloudflare.com/workers/platform/compatibility-dates for m
202030
202074
  dispatch_namespaces: config.dispatch_namespaces,
202031
202075
  mtls_certificates: config.mtls_certificates,
202032
202076
  logfwdr: config.logfwdr,
202077
+ experimental_assets: config.experimental_assets?.binding ? { binding: config.experimental_assets?.binding } : void 0,
202033
202078
  unsafe: {
202034
202079
  bindings: config.unsafe.bindings,
202035
202080
  metadata: config.unsafe.metadata,
@@ -202065,7 +202110,7 @@ See https://developers.cloudflare.com/workers/platform/compatibility-dates for m
202065
202110
  "workers/message": props.message,
202066
202111
  "workers/tag": props.tag
202067
202112
  },
202068
- experimental_assets: experimentalAssetsWorkerInfo
202113
+ experimental_assets_jwt: experimentalAssetsJwt
202069
202114
  };
202070
202115
  await printBundleSize(
202071
202116
  { name: import_node_path56.default.basename(resolvedEntryPointPath), content },
@@ -202540,11 +202585,6 @@ async function versionsUploadHandler(args) {
202540
202585
  "Using the latest version of the Workers runtime. To silence this warning, please choose a specific version of the runtime with --compatibility-date, or add a compatibility_date to your wrangler.toml.\n"
202541
202586
  );
202542
202587
  }
202543
- const experimentalAssetsOptions = experimentalAssets ? {
202544
- ...experimentalAssets,
202545
- // Flag use of assets without user worker
202546
- staticAssetsOnly: !(args.script || config.main)
202547
- } : void 0;
202548
202588
  const cliVars = collectKeyValues(args.var);
202549
202589
  const cliDefines = collectKeyValues(args.define);
202550
202590
  const cliAlias = collectKeyValues(args.alias);
@@ -202567,7 +202607,7 @@ async function versionsUploadHandler(args) {
202567
202607
  jsxFactory: args.jsxFactory,
202568
202608
  jsxFragment: args.jsxFragment,
202569
202609
  tsconfig: args.tsconfig,
202570
- experimentalAssets: experimentalAssetsOptions,
202610
+ experimentalAssets,
202571
202611
  minify: args.minify,
202572
202612
  uploadSourceMaps: args.uploadSourceMaps,
202573
202613
  nodeCompat: args.nodeCompat,
@@ -205511,14 +205551,11 @@ See https://developers.cloudflare.com/workers/platform/compatibility-dates for m
205511
205551
  legacyEnv: props.legacyEnv,
205512
205552
  env: props.env
205513
205553
  }) : void 0;
205514
- const experimentalAssetsWorkerInfo = props.experimentalAssets && !props.dryRun ? {
205515
- jwt: await syncExperimentalAssets(
205516
- accountId,
205517
- scriptName,
205518
- props.experimentalAssets.directory
205519
- ),
205520
- staticAssetsOnly: props.experimentalAssets.staticAssetsOnly
205521
- } : void 0;
205554
+ const experimentalAssetsJwt = props.experimentalAssets && !props.dryRun ? await syncExperimentalAssets(
205555
+ accountId,
205556
+ scriptName,
205557
+ props.experimentalAssets.directory
205558
+ ) : void 0;
205522
205559
  const legacyAssets = await syncLegacyAssets(
205523
205560
  accountId,
205524
205561
  // When we're using the newer service environments, we wouldn't
@@ -205561,6 +205598,7 @@ See https://developers.cloudflare.com/workers/platform/compatibility-dates for m
205561
205598
  dispatch_namespaces: config.dispatch_namespaces,
205562
205599
  mtls_certificates: config.mtls_certificates,
205563
205600
  logfwdr: config.logfwdr,
205601
+ experimental_assets: config.experimental_assets?.binding ? { binding: config.experimental_assets.binding } : void 0,
205564
205602
  unsafe: {
205565
205603
  bindings: config.unsafe.bindings,
205566
205604
  metadata: config.unsafe.metadata,
@@ -205599,7 +205637,7 @@ See https://developers.cloudflare.com/workers/platform/compatibility-dates for m
205599
205637
  placement,
205600
205638
  tail_consumers: config.tail_consumers,
205601
205639
  limits: config.limits,
205602
- experimental_assets: experimentalAssetsWorkerInfo
205640
+ experimental_assets_jwt: experimentalAssetsJwt
205603
205641
  };
205604
205642
  sourceMapSize = worker.sourceMaps?.reduce(
205605
205643
  (acc, m2) => acc + m2.content.length,
@@ -206145,7 +206183,13 @@ var path63 = __toESM(require("node:path"));
206145
206183
  var import_miniflare15 = require("miniflare");
206146
206184
  var CERT_EXPIRY_DAYS = 30;
206147
206185
  var ONE_DAY_IN_MS = 864e5;
206148
- function getHttpsOptions(customHttpsKeyPath, customHttpsCertPath) {
206186
+ var getHttpsKeyPathFromEnv = getEnvironmentVariableFactory({
206187
+ variableName: "WRANGLER_HTTPS_KEY_PATH"
206188
+ });
206189
+ var getHttpsCertPathFromEnv = getEnvironmentVariableFactory({
206190
+ variableName: "WRANGLER_HTTPS_CERT_PATH"
206191
+ });
206192
+ function getHttpsOptions(customHttpsKeyPath = getHttpsKeyPathFromEnv(), customHttpsCertPath = getHttpsCertPathFromEnv()) {
206149
206193
  if (customHttpsKeyPath !== void 0 || customHttpsCertPath !== void 0) {
206150
206194
  if (customHttpsKeyPath === void 0 || customHttpsCertPath === void 0) {
206151
206195
  throw new UserError(
@@ -207100,7 +207144,7 @@ async function createRemoteWorkerInit(props) {
207100
207144
  // no tail consumers in dev - TODO revisit?
207101
207145
  limits: void 0,
207102
207146
  // no limits in preview - not supported yet but can be added
207103
- experimental_assets: void 0
207147
+ experimental_assets_jwt: void 0
207104
207148
  // no remote mode for assets
207105
207149
  };
207106
207150
  return init3;
@@ -209425,7 +209469,8 @@ use --persist-to=./wrangler-local-state to keep using the old path.`
209425
209469
  dispatch_namespaces: void 0,
209426
209470
  mtls_certificates: void 0,
209427
209471
  logfwdr: void 0,
209428
- unsafe: void 0
209472
+ unsafe: void 0,
209473
+ experimental_assets: void 0
209429
209474
  })
209430
209475
  },
209431
209476
  dev: {
@@ -210040,7 +210085,8 @@ function getBindings(configParam, env7, local, args) {
210040
210085
  capnp: configParam.unsafe.capnp
210041
210086
  },
210042
210087
  mtls_certificates: configParam.mtls_certificates,
210043
- send_email: configParam.send_email
210088
+ send_email: configParam.send_email,
210089
+ experimental_assets: configParam.experimental_assets?.binding ? { binding: configParam.experimental_assets?.binding } : void 0
210044
210090
  };
210045
210091
  return bindings;
210046
210092
  }
@@ -212814,4 +212860,3 @@ yargs-parser/build/lib/index.js:
212814
212860
  * SPDX-License-Identifier: ISC
212815
212861
  *)
212816
212862
  */
212817
- //# sourceMappingURL=cli.js.map