wrangler 3.105.0 → 3.106.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +9 -9
- package/wrangler-dist/cli.js +141 -78
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "wrangler",
|
3
|
-
"version": "3.
|
3
|
+
"version": "3.106.0",
|
4
4
|
"description": "Command-line interface for all things Cloudflare Workers",
|
5
5
|
"keywords": [
|
6
6
|
"wrangler",
|
@@ -55,15 +55,15 @@
|
|
55
55
|
"blake3-wasm": "2.1.5",
|
56
56
|
"esbuild": "0.17.19",
|
57
57
|
"path-to-regexp": "6.3.0",
|
58
|
-
"unenv": "2.0.0-rc.
|
59
|
-
"workerd": "1.
|
58
|
+
"unenv": "2.0.0-rc.1",
|
59
|
+
"workerd": "1.20250124.0",
|
60
60
|
"@cloudflare/kv-asset-handler": "0.3.4",
|
61
|
-
"miniflare": "3.
|
61
|
+
"miniflare": "3.20250124.0"
|
62
62
|
},
|
63
63
|
"devDependencies": {
|
64
64
|
"@aws-sdk/client-s3": "^3.721.0",
|
65
65
|
"@cloudflare/types": "6.18.4",
|
66
|
-
"@cloudflare/workers-types": "^4.
|
66
|
+
"@cloudflare/workers-types": "^4.20250121.0",
|
67
67
|
"@cspotcode/source-map-support": "0.8.1",
|
68
68
|
"@iarna/toml": "^3.0.0",
|
69
69
|
"@microsoft/api-extractor": "^7.47.0",
|
@@ -137,14 +137,14 @@
|
|
137
137
|
"xdg-app-paths": "^8.3.0",
|
138
138
|
"xxhash-wasm": "^1.0.1",
|
139
139
|
"yargs": "^17.7.2",
|
140
|
-
"@cloudflare/eslint-config-worker": "1.1.0",
|
141
|
-
"@cloudflare/pages-shared": "^0.13.2",
|
142
140
|
"@cloudflare/cli": "1.1.1",
|
143
|
-
"@cloudflare/
|
141
|
+
"@cloudflare/pages-shared": "^0.13.3",
|
142
|
+
"@cloudflare/eslint-config-worker": "1.1.0",
|
143
|
+
"@cloudflare/workers-shared": "0.12.5",
|
144
144
|
"@cloudflare/workers-tsconfig": "0.0.0"
|
145
145
|
},
|
146
146
|
"peerDependencies": {
|
147
|
-
"@cloudflare/workers-types": "^4.
|
147
|
+
"@cloudflare/workers-types": "^4.20250121.0"
|
148
148
|
},
|
149
149
|
"peerDependenciesMeta": {
|
150
150
|
"@cloudflare/workers-types": {
|
package/wrangler-dist/cli.js
CHANGED
@@ -98108,7 +98108,7 @@ var import_undici3 = __toESM(require_undici());
|
|
98108
98108
|
|
98109
98109
|
// package.json
|
98110
98110
|
var name = "wrangler";
|
98111
|
-
var version = "3.
|
98111
|
+
var version = "3.106.0";
|
98112
98112
|
|
98113
98113
|
// src/environment-variables/misc-variables.ts
|
98114
98114
|
init_import_meta_url();
|
@@ -98229,22 +98229,24 @@ var getRegistryPath = getEnvironmentVariableFactory({
|
|
98229
98229
|
// src/errors.ts
|
98230
98230
|
init_import_meta_url();
|
98231
98231
|
var UserError = class extends Error {
|
98232
|
-
|
98233
|
-
|
98232
|
+
telemetryMessage;
|
98233
|
+
constructor(message, options30) {
|
98234
|
+
super(message, options30);
|
98234
98235
|
Object.setPrototypeOf(this, new.target.prototype);
|
98236
|
+
this.telemetryMessage = options30?.telemetryMessage === true ? message : options30?.telemetryMessage;
|
98235
98237
|
}
|
98236
98238
|
};
|
98237
98239
|
__name(UserError, "UserError");
|
98238
98240
|
var DeprecationError = class extends UserError {
|
98239
|
-
constructor(message) {
|
98241
|
+
constructor(message, options30) {
|
98240
98242
|
super(`Deprecation:
|
98241
|
-
${message}
|
98243
|
+
${message}`, options30);
|
98242
98244
|
}
|
98243
98245
|
};
|
98244
98246
|
__name(DeprecationError, "DeprecationError");
|
98245
98247
|
var FatalError = class extends UserError {
|
98246
|
-
constructor(message, code) {
|
98247
|
-
super(message);
|
98248
|
+
constructor(message, code, options30) {
|
98249
|
+
super(message, options30);
|
98248
98250
|
this.code = code;
|
98249
98251
|
}
|
98250
98252
|
};
|
@@ -98253,23 +98255,29 @@ var CommandLineArgsError = class extends UserError {
|
|
98253
98255
|
};
|
98254
98256
|
__name(CommandLineArgsError, "CommandLineArgsError");
|
98255
98257
|
var JsonFriendlyFatalError = class extends FatalError {
|
98256
|
-
constructor(message, code) {
|
98257
|
-
super(message);
|
98258
|
+
constructor(message, code, options30) {
|
98259
|
+
super(message, code, options30);
|
98258
98260
|
this.code = code;
|
98259
98261
|
}
|
98260
98262
|
};
|
98261
98263
|
__name(JsonFriendlyFatalError, "JsonFriendlyFatalError");
|
98262
98264
|
var MissingConfigError = class extends Error {
|
98265
|
+
telemetryMessage;
|
98263
98266
|
constructor(key) {
|
98264
98267
|
super(`Missing config value for ${key}`);
|
98268
|
+
this.telemetryMessage = `Missing config value for ${key}`;
|
98265
98269
|
}
|
98266
98270
|
};
|
98267
98271
|
__name(MissingConfigError, "MissingConfigError");
|
98268
|
-
function createFatalError(message, isJson, code) {
|
98272
|
+
function createFatalError(message, isJson, code, telemetryMessage) {
|
98269
98273
|
if (isJson) {
|
98270
|
-
return new JsonFriendlyFatalError(
|
98274
|
+
return new JsonFriendlyFatalError(
|
98275
|
+
JSON.stringify(message),
|
98276
|
+
code,
|
98277
|
+
telemetryMessage
|
98278
|
+
);
|
98271
98279
|
} else {
|
98272
|
-
return new FatalError(`${message}`, code);
|
98280
|
+
return new FatalError(`${message}`, code, telemetryMessage);
|
98273
98281
|
}
|
98274
98282
|
}
|
98275
98283
|
__name(createFatalError, "createFatalError");
|
@@ -99959,8 +99967,8 @@ var ParseError = class extends UserError {
|
|
99959
99967
|
notes;
|
99960
99968
|
location;
|
99961
99969
|
kind;
|
99962
|
-
constructor({ text, notes, location, kind }) {
|
99963
|
-
super(text);
|
99970
|
+
constructor({ text, notes, location, kind, telemetryMessage }) {
|
99971
|
+
super(text, { telemetryMessage });
|
99964
99972
|
this.name = this.constructor.name;
|
99965
99973
|
this.text = text;
|
99966
99974
|
this.notes = notes ?? [];
|
@@ -100017,43 +100025,37 @@ function parseTOML(input, file) {
|
|
100017
100025
|
file,
|
100018
100026
|
fileText: input
|
100019
100027
|
};
|
100020
|
-
throw new ParseError({
|
100028
|
+
throw new ParseError({
|
100029
|
+
text,
|
100030
|
+
location,
|
100031
|
+
telemetryMessage: "TOML parse error"
|
100032
|
+
});
|
100021
100033
|
}
|
100022
100034
|
}
|
100023
100035
|
__name(parseTOML, "parseTOML");
|
100024
|
-
var JSON_ERROR_SUFFIX = " in JSON at position ";
|
100025
100036
|
function parsePackageJSON(input, file) {
|
100026
100037
|
return parseJSON(input, file);
|
100027
100038
|
}
|
100028
100039
|
__name(parsePackageJSON, "parsePackageJSON");
|
100029
100040
|
function parseJSON(input, file) {
|
100030
|
-
|
100031
|
-
|
100032
|
-
|
100033
|
-
|
100034
|
-
|
100035
|
-
if (index < 0) {
|
100036
|
-
throw err;
|
100037
|
-
}
|
100038
|
-
const text = message.substring(0, index);
|
100039
|
-
const position = parseInt(
|
100040
|
-
message.substring(index + JSON_ERROR_SUFFIX.length)
|
100041
|
-
);
|
100042
|
-
const location = indexLocation({ file, fileText: input }, position);
|
100043
|
-
throw new ParseError({ text, location });
|
100044
|
-
}
|
100041
|
+
return parseJSONC(input, file, {
|
100042
|
+
allowEmptyContent: false,
|
100043
|
+
allowTrailingComma: false,
|
100044
|
+
disallowComments: true
|
100045
|
+
});
|
100045
100046
|
}
|
100046
100047
|
__name(parseJSON, "parseJSON");
|
100047
|
-
function parseJSONC(input, file) {
|
100048
|
+
function parseJSONC(input, file, options30 = { allowTrailingComma: true }) {
|
100048
100049
|
const errors = [];
|
100049
|
-
const data = parse2(input, errors,
|
100050
|
+
const data = parse2(input, errors, options30);
|
100050
100051
|
if (errors.length) {
|
100051
100052
|
throw new ParseError({
|
100052
100053
|
text: printParseErrorCode(errors[0].error),
|
100053
100054
|
location: {
|
100054
100055
|
...indexLocation({ file, fileText: input }, errors[0].offset + 1),
|
100055
100056
|
length: errors[0].length
|
100056
|
-
}
|
100057
|
+
},
|
100058
|
+
telemetryMessage: "JSON(C) parse error"
|
100057
100059
|
});
|
100058
100060
|
}
|
100059
100061
|
return data;
|
@@ -100086,7 +100088,8 @@ function readFileSync5(file) {
|
|
100086
100088
|
{
|
100087
100089
|
text: message.replace(file, (0, import_node_path4.resolve)(file))
|
100088
100090
|
}
|
100089
|
-
]
|
100091
|
+
],
|
100092
|
+
telemetryMessage: "Could not read file"
|
100090
100093
|
});
|
100091
100094
|
}
|
100092
100095
|
}
|
@@ -100773,10 +100776,7 @@ function findRedirectedWranglerConfig(cwd2, userConfigPath) {
|
|
100773
100776
|
let redirectedConfigPath;
|
100774
100777
|
const deployConfigFile = readFileSync5(deployConfigPath);
|
100775
100778
|
try {
|
100776
|
-
const deployConfig = parseJSONC(
|
100777
|
-
deployConfigFile,
|
100778
|
-
deployConfigPath
|
100779
|
-
);
|
100779
|
+
const deployConfig = parseJSONC(deployConfigFile, deployConfigPath);
|
100780
100780
|
redirectedConfigPath = deployConfig.configPath && import_node_path7.default.resolve(import_node_path7.default.dirname(deployConfigPath), deployConfig.configPath);
|
100781
100781
|
} catch (e7) {
|
100782
100782
|
throw new UserError(
|
@@ -106852,7 +106852,9 @@ init_import_meta_url();
|
|
106852
106852
|
var import_prompts = __toESM(require_prompts3());
|
106853
106853
|
var NoDefaultValueProvided = class extends UserError {
|
106854
106854
|
constructor() {
|
106855
|
-
super("This command cannot be run in a non-interactive context"
|
106855
|
+
super("This command cannot be run in a non-interactive context", {
|
106856
|
+
telemetryMessage: true
|
106857
|
+
});
|
106856
106858
|
Object.setPrototypeOf(this, new.target.prototype);
|
106857
106859
|
}
|
106858
106860
|
};
|
@@ -117455,7 +117457,7 @@ async function deleteCORSPolicy(accountId, bucketName, jurisdiction) {
|
|
117455
117457
|
}
|
117456
117458
|
__name(deleteCORSPolicy, "deleteCORSPolicy");
|
117457
117459
|
function isValidR2BucketName(name2) {
|
117458
|
-
return typeof name2 === "string" && /^[a-
|
117460
|
+
return typeof name2 === "string" && /^[a-z0-9][a-z0-9-]{1,61}[a-z0-9]$/.test(name2);
|
117459
117461
|
}
|
117460
117462
|
__name(isValidR2BucketName, "isValidR2BucketName");
|
117461
117463
|
var CHUNK_SIZE = 1024;
|
@@ -118791,7 +118793,7 @@ init_import_meta_url();
|
|
118791
118793
|
var import_node_buffer2 = require("node:buffer");
|
118792
118794
|
var import_node_zlib = require("node:zlib");
|
118793
118795
|
var ONE_KIB_BYTES = 1024;
|
118794
|
-
var
|
118796
|
+
var MAX_GZIP_SIZE_BYTES = 3 * ONE_KIB_BYTES * ONE_KIB_BYTES;
|
118795
118797
|
async function getSize(modules) {
|
118796
118798
|
const gzipSize = (0, import_node_zlib.gzipSync)(
|
118797
118799
|
await new import_node_buffer2.Blob(modules.map((file) => file.content)).arrayBuffer()
|
@@ -118803,7 +118805,7 @@ __name(getSize, "getSize");
|
|
118803
118805
|
async function printBundleSize(main2, modules) {
|
118804
118806
|
const { size, gzipSize } = await getSize([...modules, main2]);
|
118805
118807
|
const bundleReport = `${(size / ONE_KIB_BYTES).toFixed(2)} KiB / gzip: ${(gzipSize / ONE_KIB_BYTES).toFixed(2)} KiB`;
|
118806
|
-
const percentage = gzipSize /
|
118808
|
+
const percentage = gzipSize / MAX_GZIP_SIZE_BYTES * 100;
|
118807
118809
|
const colorizedReport = percentage > 90 ? source_default.red(bundleReport) : percentage > 70 ? source_default.yellow(bundleReport) : source_default.green(bundleReport);
|
118808
118810
|
logger.log(`Total Upload: ${colorizedReport}`);
|
118809
118811
|
}
|
@@ -120867,7 +120869,11 @@ async function retryOnAPIFailure(action, backoff = 0, attempts = MAX_ATTEMPTS) {
|
|
120867
120869
|
try {
|
120868
120870
|
return await action();
|
120869
120871
|
} catch (err) {
|
120870
|
-
if (err instanceof APIError
|
120872
|
+
if (err instanceof APIError) {
|
120873
|
+
if (!err.isRetryable()) {
|
120874
|
+
throw err;
|
120875
|
+
}
|
120876
|
+
} else if (!(err instanceof TypeError)) {
|
120871
120877
|
throw err;
|
120872
120878
|
}
|
120873
120879
|
logger.info(source_default.dim(`Retrying API call after error...`));
|
@@ -122854,7 +122860,8 @@ var syncAssets = /* @__PURE__ */ __name(async (accountId, assetDirectory, script
|
|
122854
122860
|
if (!initializeAssetsResponse.jwt) {
|
122855
122861
|
throw new FatalError(
|
122856
122862
|
"Could not find assets information to attach to deployment. Please try again.",
|
122857
|
-
1
|
122863
|
+
1,
|
122864
|
+
{ telemetryMessage: true }
|
122858
122865
|
);
|
122859
122866
|
}
|
122860
122867
|
logger.info(`No files to upload. Proceeding with deployment...`);
|
@@ -122874,7 +122881,10 @@ var syncAssets = /* @__PURE__ */ __name(async (accountId, assetDirectory, script
|
|
122874
122881
|
if (manifestEntry === void 0) {
|
122875
122882
|
throw new FatalError(
|
122876
122883
|
`A file was requested that does not appear to exist.`,
|
122877
|
-
1
|
122884
|
+
1,
|
122885
|
+
{
|
122886
|
+
telemetryMessage: "A file was requested that does not appear to exist. (asset manifest upload)"
|
122887
|
+
}
|
122878
122888
|
);
|
122879
122889
|
}
|
122880
122890
|
assetLogCount = logAssetUpload(`+ ${manifestEntry[0]}`, assetLogCount);
|
@@ -122945,7 +122955,9 @@ var syncAssets = /* @__PURE__ */ __name(async (accountId, assetDirectory, script
|
|
122945
122955
|
throw new FatalError(
|
122946
122956
|
`Upload took too long.
|
122947
122957
|
Asset upload took too long on bucket ${bucketIndex + 1}/${initializeAssetsResponse.buckets.length}. Please try again.
|
122948
|
-
Assets already uploaded have been saved, so the next attempt will automatically resume from this point
|
122958
|
+
Assets already uploaded have been saved, so the next attempt will automatically resume from this point.`,
|
122959
|
+
void 0,
|
122960
|
+
{ telemetryMessage: "Asset upload took too long" }
|
122949
122961
|
);
|
122950
122962
|
} else {
|
122951
122963
|
throw e7;
|
@@ -122966,7 +122978,8 @@ Assets already uploaded have been saved, so the next attempt will automatically
|
|
122966
122978
|
if (!completionJwt) {
|
122967
122979
|
throw new FatalError(
|
122968
122980
|
"Failed to complete asset upload. Please try again.",
|
122969
|
-
1
|
122981
|
+
1,
|
122982
|
+
{ telemetryMessage: true }
|
122970
122983
|
);
|
122971
122984
|
}
|
122972
122985
|
const uploadMs = Date.now() - start;
|
@@ -122999,7 +123012,8 @@ var buildAssetManifest = /* @__PURE__ */ __name(async (dir) => {
|
|
122999
123012
|
throw new UserError(
|
123000
123013
|
`Maximum number of assets exceeded.
|
123001
123014
|
Cloudflare Workers supports up to ${MAX_ASSET_COUNT2.toLocaleString()} assets in a version. We found ${counter.toLocaleString()} files in the specified assets directory "${dir}".
|
123002
|
-
Ensure your assets directory contains a maximum of ${MAX_ASSET_COUNT2.toLocaleString()} files, and that you have specified your assets directory correctly
|
123015
|
+
Ensure your assets directory contains a maximum of ${MAX_ASSET_COUNT2.toLocaleString()} files, and that you have specified your assets directory correctly.`,
|
123016
|
+
{ telemetryMessage: "Maximum number of assets exceeded" }
|
123003
123017
|
);
|
123004
123018
|
}
|
123005
123019
|
if (filestat.size > MAX_ASSET_SIZE2) {
|
@@ -123016,7 +123030,8 @@ Cloudflare Workers supports assets with sizes of up to ${prettyBytes(
|
|
123016
123030
|
binary: true
|
123017
123031
|
}
|
123018
123032
|
)}.
|
123019
|
-
Ensure all assets in your assets directory "${dir}" conform with the Workers maximum size requirement
|
123033
|
+
Ensure all assets in your assets directory "${dir}" conform with the Workers maximum size requirement.`,
|
123034
|
+
{ telemetryMessage: "Asset too large" }
|
123020
123035
|
);
|
123021
123036
|
}
|
123022
123037
|
manifest[normalizeFilePath(relativeFilepath)] = {
|
@@ -123055,11 +123070,14 @@ function getAssetsOptions(args, config) {
|
|
123055
123070
|
const { directory, binding } = assets;
|
123056
123071
|
if (directory === void 0) {
|
123057
123072
|
throw new UserError(
|
123058
|
-
"The `assets` property in your configuration is missing the required `directory` property."
|
123073
|
+
"The `assets` property in your configuration is missing the required `directory` property.",
|
123074
|
+
{ telemetryMessage: true }
|
123059
123075
|
);
|
123060
123076
|
}
|
123061
123077
|
if (directory === "") {
|
123062
|
-
throw new UserError("`The assets directory cannot be an empty string."
|
123078
|
+
throw new UserError("`The assets directory cannot be an empty string.", {
|
123079
|
+
telemetryMessage: true
|
123080
|
+
});
|
123063
123081
|
}
|
123064
123082
|
const assetsBasePath = getAssetsBasePath(config, args.assets);
|
123065
123083
|
const resolvedAssetsPath = path32.resolve(assetsBasePath, directory);
|
@@ -123067,7 +123085,10 @@ function getAssetsOptions(args, config) {
|
|
123067
123085
|
const sourceOfTruthMessage = args.assets ? '"--assets" command line argument' : '"assets.directory" field in your configuration file';
|
123068
123086
|
throw new UserError(
|
123069
123087
|
`The directory specified by the ${sourceOfTruthMessage} does not exist:
|
123070
|
-
${resolvedAssetsPath}
|
123088
|
+
${resolvedAssetsPath}`,
|
123089
|
+
{
|
123090
|
+
telemetryMessage: `The assets directory specified does not exist`
|
123091
|
+
}
|
123071
123092
|
);
|
123072
123093
|
}
|
123073
123094
|
const routingConfig = {
|
@@ -123090,7 +123111,10 @@ __name(getAssetsOptions, "getAssetsOptions");
|
|
123090
123111
|
function validateAssetsArgsAndConfig(args, config) {
|
123091
123112
|
if ("legacy" in args ? args.assets && args.legacy.legacyAssets : (args.assets || config?.assets) && (args?.legacyAssets || config?.legacy_assets)) {
|
123092
123113
|
throw new UserError(
|
123093
|
-
"Cannot use assets and legacy assets in the same Worker.\nPlease remove either the `legacy_assets` or `assets` field from your configuration file."
|
123114
|
+
"Cannot use assets and legacy assets in the same Worker.\nPlease remove either the `legacy_assets` or `assets` field from your configuration file.",
|
123115
|
+
{
|
123116
|
+
telemetryMessage: "Cannot use assets and legacy assets in the same Worker"
|
123117
|
+
}
|
123094
123118
|
);
|
123095
123119
|
}
|
123096
123120
|
if ("legacy" in args ? args.assets && args.legacy.site : (args.assets || config?.assets) && (args.site || config?.site)) {
|
@@ -123104,7 +123128,8 @@ function validateAssetsArgsAndConfig(args, config) {
|
|
123104
123128
|
);
|
123105
123129
|
if ("legacy" in args ? args.entrypoint === noOpEntrypoint && args.assets?.binding : !(args.script || config?.main) && config?.assets?.binding) {
|
123106
123130
|
throw new UserError(
|
123107
|
-
"Cannot use assets with a binding in an assets-only Worker.\nPlease remove the asset binding from your configuration file, or provide a Worker script in your configuration file (`main`)."
|
123131
|
+
"Cannot use assets with a binding in an assets-only Worker.\nPlease remove the asset binding from your configuration file, or provide a Worker script in your configuration file (`main`).",
|
123132
|
+
{ telemetryMessage: true }
|
123108
123133
|
);
|
123109
123134
|
}
|
123110
123135
|
if (config?.placement?.mode === "smart" && config?.assets?.experimental_serve_directly === false) {
|
@@ -123119,7 +123144,8 @@ function validateAssetsArgsAndConfig(args, config) {
|
|
123119
123144
|
}
|
123120
123145
|
if ("legacy" in args ? args.entrypoint === noOpEntrypoint && args.assets?.assetConfig?.serve_directly === false : !config?.main && config?.assets?.experimental_serve_directly === false) {
|
123121
123146
|
throw new UserError(
|
123122
|
-
"Cannot set experimental_serve_directly=false without a Worker script.\nPlease remove experimental_serve_directly from your configuration file, or provide a Worker script in your configuration file (`main`)."
|
123147
|
+
"Cannot set experimental_serve_directly=false without a Worker script.\nPlease remove experimental_serve_directly from your configuration file, or provide a Worker script in your configuration file (`main`).",
|
123148
|
+
{ telemetryMessage: true }
|
123123
123149
|
);
|
123124
123150
|
}
|
123125
123151
|
}
|
@@ -123139,12 +123165,15 @@ function errorOnLegacyPagesWorkerJSAsset(file, hasAssetsIgnoreFile) {
|
|
123139
123165
|
if (!hasAssetsIgnoreFile) {
|
123140
123166
|
const workerJsType = file === "_worker.js" ? "file" : file.startsWith("_worker.js") ? "directory" : null;
|
123141
123167
|
if (workerJsType !== null) {
|
123142
|
-
throw new UserError(
|
123168
|
+
throw new UserError(
|
123169
|
+
dedent2`
|
123143
123170
|
Uploading a Pages _worker.js ${workerJsType} as an asset.
|
123144
123171
|
This could expose your private server-side code to the public Internet. Is this intended?
|
123145
123172
|
If you do not want to upload this ${workerJsType}, either remove it or add an "${CF_ASSETS_IGNORE_FILENAME}" file, to the root of your asset directory, containing "_worker.js" to avoid uploading.
|
123146
123173
|
If you do want to upload this ${workerJsType}, you can add an empty "${CF_ASSETS_IGNORE_FILENAME}" file, to the root of your asset directory, to hide this error.
|
123147
|
-
|
123174
|
+
`,
|
123175
|
+
{ telemetryMessage: true }
|
123176
|
+
);
|
123148
123177
|
}
|
123149
123178
|
}
|
123150
123179
|
}
|
@@ -124339,7 +124368,7 @@ function getBindings2(configParam, env7, local, args) {
|
|
124339
124368
|
);
|
124340
124369
|
const hyperdriveBindings = configParam.hyperdrive.map((hyperdrive2) => {
|
124341
124370
|
const connectionStringFromEnv = process.env[`WRANGLER_HYPERDRIVE_LOCAL_CONNECTION_STRING_${hyperdrive2.binding}`];
|
124342
|
-
if (
|
124371
|
+
if (local && connectionStringFromEnv === void 0 && hyperdrive2.localConnectionString === void 0) {
|
124343
124372
|
throw new UserError(
|
124344
124373
|
`When developing locally, you should use a local Postgres connection string to emulate Hyperdrive functionality. Please setup Postgres locally and set the value of the 'WRANGLER_HYPERDRIVE_LOCAL_CONNECTION_STRING_${hyperdrive2.binding}' variable or "${hyperdrive2.binding}"'s "localConnectionString" to the Postgres connection string.`
|
124345
124374
|
);
|
@@ -140243,7 +140272,9 @@ var Handler6 = /* @__PURE__ */ __name(async (args) => {
|
|
140243
140272
|
if (file && command2) {
|
140244
140273
|
throw createFatalError(
|
140245
140274
|
`Error: can't provide both --command and --file.`,
|
140246
|
-
json
|
140275
|
+
json,
|
140276
|
+
void 0,
|
140277
|
+
{ telemetryMessage: true }
|
140247
140278
|
);
|
140248
140279
|
}
|
140249
140280
|
const isInteractive3 = process.stdout.isTTY;
|
@@ -141857,7 +141888,8 @@ async function deleteHandler(args) {
|
|
141857
141888
|
const config = readConfig(args);
|
141858
141889
|
if (config.pages_build_output_dir) {
|
141859
141890
|
throw new UserError(
|
141860
|
-
"It looks like you've run a Workers-specific command in a Pages project.\nFor Pages, please run `wrangler pages project delete` instead."
|
141891
|
+
"It looks like you've run a Workers-specific command in a Pages project.\nFor Pages, please run `wrangler pages project delete` instead.",
|
141892
|
+
{ telemetryMessage: true }
|
141861
141893
|
);
|
141862
141894
|
}
|
141863
141895
|
sendMetricsEvent(
|
@@ -141869,7 +141901,10 @@ async function deleteHandler(args) {
|
|
141869
141901
|
const scriptName = getScriptName(args, config);
|
141870
141902
|
if (!scriptName) {
|
141871
141903
|
throw new UserError(
|
141872
|
-
`A worker name must be defined, either via --name, or in your ${configFileName(config.configPath)} file
|
141904
|
+
`A worker name must be defined, either via --name, or in your ${configFileName(config.configPath)} file`,
|
141905
|
+
{
|
141906
|
+
telemetryMessage: "`A worker name must be defined, either via --name, or in your config file"
|
141907
|
+
}
|
141873
141908
|
);
|
141874
141909
|
}
|
141875
141910
|
if (args.dryRun) {
|
@@ -142203,7 +142238,7 @@ function deployOptions(yargs) {
|
|
142203
142238
|
}).option("experimental-auto-create", {
|
142204
142239
|
describe: "Automatically provision draft bindings with new resources",
|
142205
142240
|
type: "boolean",
|
142206
|
-
default:
|
142241
|
+
default: true,
|
142207
142242
|
hidden: true,
|
142208
142243
|
alias: "x-auto-create"
|
142209
142244
|
});
|
@@ -146627,6 +146662,12 @@ function prettyPrintLogs(data) {
|
|
146627
146662
|
logger.log(
|
146628
146663
|
`Queue ${queueName} (${batchSizeMsg}) - ${outcome} @ ${datetime}`
|
146629
146664
|
);
|
146665
|
+
} else if (isRpcEvent(eventMessage2.event)) {
|
146666
|
+
const outcome = prettifyOutcome(eventMessage2.outcome);
|
146667
|
+
const datetime = new Date(eventMessage2.eventTimestamp).toLocaleString();
|
146668
|
+
logger.log(
|
146669
|
+
`${eventMessage2.entrypoint}.${eventMessage2.event.rpcMethod} - ${outcome} @ ${datetime}`
|
146670
|
+
);
|
146630
146671
|
} else {
|
146631
146672
|
const outcome = prettifyOutcome(eventMessage2.outcome);
|
146632
146673
|
const datetime = new Date(eventMessage2.eventTimestamp).toLocaleString();
|
@@ -146664,6 +146705,10 @@ function isQueueEvent(event) {
|
|
146664
146705
|
return Boolean(event && "queue" in event);
|
146665
146706
|
}
|
146666
146707
|
__name(isQueueEvent, "isQueueEvent");
|
146708
|
+
function isRpcEvent(event) {
|
146709
|
+
return Boolean(event && "rpcMethod" in event);
|
146710
|
+
}
|
146711
|
+
__name(isRpcEvent, "isRpcEvent");
|
146667
146712
|
function isAlarmEvent(event) {
|
146668
146713
|
return Boolean(event && "scheduledTime" in event && !("cron" in event));
|
146669
146714
|
}
|
@@ -152879,11 +152924,21 @@ async function authorizeR2Bucket(pipelineName, accountId, bucketName) {
|
|
152879
152924
|
});
|
152880
152925
|
!__testSkipDelaysFlag && await retryOnAPIFailure(
|
152881
152926
|
async () => {
|
152882
|
-
|
152883
|
-
|
152884
|
-
|
152885
|
-
|
152886
|
-
|
152927
|
+
try {
|
152928
|
+
await r22.send(
|
152929
|
+
new HeadBucketCommand({
|
152930
|
+
Bucket: bucketName
|
152931
|
+
})
|
152932
|
+
);
|
152933
|
+
} catch (err) {
|
152934
|
+
if (err instanceof Error && err.name === "401") {
|
152935
|
+
throw new AuthAPIError({
|
152936
|
+
status: 401,
|
152937
|
+
text: "R2 HeadBucket request failed with status: 401"
|
152938
|
+
});
|
152939
|
+
}
|
152940
|
+
throw err;
|
152941
|
+
}
|
152887
152942
|
},
|
152888
152943
|
1e3,
|
152889
152944
|
10
|
@@ -152891,6 +152946,12 @@ async function authorizeR2Bucket(pipelineName, accountId, bucketName) {
|
|
152891
152946
|
return serviceToken;
|
152892
152947
|
}
|
152893
152948
|
__name(authorizeR2Bucket, "authorizeR2Bucket");
|
152949
|
+
var AuthAPIError = class extends APIError {
|
152950
|
+
isRetryable() {
|
152951
|
+
return true;
|
152952
|
+
}
|
152953
|
+
};
|
152954
|
+
__name(AuthAPIError, "AuthAPIError");
|
152894
152955
|
function getAccountR2Endpoint(accountId) {
|
152895
152956
|
return `https://${accountId}.r2.cloudflarestorage.com`;
|
152896
152957
|
}
|
@@ -154432,7 +154493,7 @@ var r2BucketCreateCommand = createCommand({
|
|
154432
154493
|
const { name: name2, location, storageClass, jurisdiction } = args;
|
154433
154494
|
if (!isValidR2BucketName(name2)) {
|
154434
154495
|
throw new UserError(
|
154435
|
-
`The bucket name "${name2}" is invalid. Bucket names can only
|
154496
|
+
`The bucket name "${name2}" is invalid. Bucket names must begin and end with an alphanumeric and can only contain letters (a-z), numbers (0-9), and hyphens (-).`
|
154436
154497
|
);
|
154437
154498
|
}
|
154438
154499
|
if (jurisdiction && location) {
|
@@ -154691,10 +154752,7 @@ var r2BucketCORSSetCommand = createCommand({
|
|
154691
154752
|
async handler({ bucket, file, jurisdiction, force }, { config }) {
|
154692
154753
|
const accountId = await requireAuth(config);
|
154693
154754
|
const jsonFilePath = import_node_path65.default.resolve(file);
|
154694
|
-
const corsConfig = parseJSON(
|
154695
|
-
readFileSync5(jsonFilePath),
|
154696
|
-
jsonFilePath
|
154697
|
-
);
|
154755
|
+
const corsConfig = parseJSON(readFileSync5(jsonFilePath), jsonFilePath);
|
154698
154756
|
if (!corsConfig.rules || !Array.isArray(corsConfig.rules)) {
|
154699
154757
|
throw new UserError(
|
154700
154758
|
`The CORS configuration file must contain a 'rules' array as expected by the request body of the CORS API: https://developers.cloudflare.com/api/operations/r2-put-bucket-cors-policy`
|
@@ -165651,7 +165709,8 @@ var tailCommand = createCommand({
|
|
165651
165709
|
throw createFatalError(
|
165652
165710
|
"Tail disconnected, exiting.",
|
165653
165711
|
args.format === "json",
|
165654
|
-
1
|
165712
|
+
1,
|
165713
|
+
{ telemetryMessage: true }
|
165655
165714
|
);
|
165656
165715
|
}
|
165657
165716
|
waitingForPong = true;
|
@@ -170888,7 +170947,9 @@ if (proxy) {
|
|
170888
170947
|
function createCLIParser(argv) {
|
170889
170948
|
const wrangler = yargs_default(argv).strict().showHelpOnFail(false).fail((msg, error2) => {
|
170890
170949
|
if (!error2 || error2.name === "YError") {
|
170891
|
-
error2 = new CommandLineArgsError(msg
|
170950
|
+
error2 = new CommandLineArgsError(msg, {
|
170951
|
+
telemetryMessage: "yargs validation error"
|
170952
|
+
});
|
170892
170953
|
}
|
170893
170954
|
throw error2;
|
170894
170955
|
}).scriptName("wrangler").wrap(null).locale("en_US").version(false).option("v", {
|
@@ -170907,7 +170968,7 @@ function createCLIParser(argv) {
|
|
170907
170968
|
)
|
170908
170969
|
).option("env", {
|
170909
170970
|
alias: "e",
|
170910
|
-
describe: "Environment to use for operations and .env files",
|
170971
|
+
describe: "Environment to use for operations, and for selecting .env and .dev.vars files",
|
170911
170972
|
type: "string",
|
170912
170973
|
requiresArg: true
|
170913
170974
|
}).check(demandSingleValue("env")).option("experimental-json-config", {
|
@@ -170920,7 +170981,8 @@ function createCLIParser(argv) {
|
|
170920
170981
|
}).check((args) => {
|
170921
170982
|
if (args["experimental-json-config"] === false) {
|
170922
170983
|
throw new CommandLineArgsError(
|
170923
|
-
`Wrangler now supports wrangler.json configuration files by default and ignores the value of the \`--experimental-json-config\` flag
|
170984
|
+
`Wrangler now supports wrangler.json configuration files by default and ignores the value of the \`--experimental-json-config\` flag.`,
|
170985
|
+
{ telemetryMessage: true }
|
170924
170986
|
);
|
170925
170987
|
}
|
170926
170988
|
return true;
|
@@ -171649,7 +171711,8 @@ ${tryRunningItIn}${oneOfThese}`
|
|
171649
171711
|
durationMs,
|
171650
171712
|
durationSeconds: durationMs / 1e3,
|
171651
171713
|
durationMinutes: durationMs / 1e3 / 60,
|
171652
|
-
errorType: errorType ?? (e7 instanceof Error ? e7.constructor.name : void 0)
|
171714
|
+
errorType: errorType ?? (e7 instanceof Error ? e7.constructor.name : void 0),
|
171715
|
+
errorMessage: e7 instanceof UserError ? e7.telemetryMessage : void 0
|
171653
171716
|
},
|
171654
171717
|
argv
|
171655
171718
|
);
|