wrangler 3.72.0 → 3.72.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/config-schema.json +9 -0
- package/package.json +8 -8
- package/templates/startDevWorker/ProxyWorker.ts +22 -22
- package/wrangler-dist/ProxyWorker.js +21 -22
- package/wrangler-dist/ProxyWorker.js.map +1 -1
- package/wrangler-dist/cli.d.ts +4 -1
- package/wrangler-dist/cli.js +207 -205
- package/wrangler-dist/cli.js.map +3 -3
package/wrangler-dist/cli.js
CHANGED
@@ -93993,8 +93993,8 @@ function $asStringSmall (str) {
|
|
93993
93993
|
}
|
93994
93994
|
let code = "return schema";
|
93995
93995
|
if (ref[1]) {
|
93996
|
-
const
|
93997
|
-
if (
|
93996
|
+
const walk = ref[1].split("/");
|
93997
|
+
if (walk.length === 1) {
|
93998
93998
|
const targetId = `#${ref[1]}`;
|
93999
93999
|
let dereferenced = idFinder(schema, targetId);
|
94000
94000
|
if (dereferenced === void 0 && !ref[0]) {
|
@@ -94012,8 +94012,8 @@ function $asStringSmall (str) {
|
|
94012
94012
|
externalSchema
|
94013
94013
|
};
|
94014
94014
|
} else {
|
94015
|
-
for (var i = 1; i <
|
94016
|
-
code += `[${JSON.stringify(
|
94015
|
+
for (var i = 1; i < walk.length; i++) {
|
94016
|
+
code += `[${JSON.stringify(walk[i])}]`;
|
94017
94017
|
}
|
94018
94018
|
}
|
94019
94019
|
}
|
@@ -94023,8 +94023,8 @@ function $asStringSmall (str) {
|
|
94023
94023
|
} catch (err) {
|
94024
94024
|
}
|
94025
94025
|
if (result === void 0 && ref[1]) {
|
94026
|
-
const
|
94027
|
-
findBadKey(schema,
|
94026
|
+
const walk = ref[1].split("/");
|
94027
|
+
findBadKey(schema, walk.slice(1));
|
94028
94028
|
}
|
94029
94029
|
if (result.$ref) {
|
94030
94030
|
return refFinder(result.$ref, {
|
@@ -152693,7 +152693,7 @@ init_import_meta_url();
|
|
152693
152693
|
init_import_meta_url();
|
152694
152694
|
|
152695
152695
|
// package.json
|
152696
|
-
var version = "3.72.
|
152696
|
+
var version = "3.72.2";
|
152697
152697
|
var package_default = {
|
152698
152698
|
name: "wrangler",
|
152699
152699
|
version,
|
@@ -152781,7 +152781,7 @@ var package_default = {
|
|
152781
152781
|
selfsigned: "^2.0.1",
|
152782
152782
|
"source-map": "^0.6.1",
|
152783
152783
|
unenv: "npm:unenv-nightly@1.10.0-1717606461.a117952",
|
152784
|
-
workerd: "1.
|
152784
|
+
workerd: "1.20240821.1",
|
152785
152785
|
"xxhash-wasm": "^1.0.1"
|
152786
152786
|
},
|
152787
152787
|
devDependencies: {
|
@@ -152790,7 +152790,7 @@ var package_default = {
|
|
152790
152790
|
"@cloudflare/pages-shared": "workspace:^",
|
152791
152791
|
"@cloudflare/types": "^6.18.4",
|
152792
152792
|
"@cloudflare/workers-tsconfig": "workspace:*",
|
152793
|
-
"@cloudflare/workers-types": "^4.
|
152793
|
+
"@cloudflare/workers-types": "^4.20240821.1",
|
152794
152794
|
"@cspotcode/source-map-support": "0.8.1",
|
152795
152795
|
"@iarna/toml": "^3.0.0",
|
152796
152796
|
"@microsoft/api-extractor": "^7.47.0",
|
@@ -152873,7 +152873,7 @@ var package_default = {
|
|
152873
152873
|
"yoga-layout": "file:../../vendor/yoga-layout-2.0.0-beta.1.tgz"
|
152874
152874
|
},
|
152875
152875
|
peerDependencies: {
|
152876
|
-
"@cloudflare/workers-types": "^4.
|
152876
|
+
"@cloudflare/workers-types": "^4.20240821.1"
|
152877
152877
|
},
|
152878
152878
|
peerDependenciesMeta: {
|
152879
152879
|
"@cloudflare/workers-types": {
|
@@ -156346,23 +156346,29 @@ Please add a binding for "${configBindingName}" to "env.${envName}.${field}.bind
|
|
156346
156346
|
return isValid;
|
156347
156347
|
}, "validateBindingArray");
|
156348
156348
|
var validateCloudchamberConfig = /* @__PURE__ */ __name((diagnostics, field, value) => {
|
156349
|
-
if (typeof value !== "object" || value === null) {
|
156349
|
+
if (typeof value !== "object" || value === null || Array.isArray(value)) {
|
156350
156350
|
diagnostics.errors.push(
|
156351
156351
|
`"cloudchamber" should be an object, but got ${JSON.stringify(value)}`
|
156352
156352
|
);
|
156353
156353
|
return false;
|
156354
156354
|
}
|
156355
|
+
const optionalAttrsByType = {
|
156356
|
+
string: ["memory", "image", "location"],
|
156357
|
+
boolean: ["ipv4"],
|
156358
|
+
number: ["vcpu"]
|
156359
|
+
};
|
156355
156360
|
let isValid = true;
|
156356
|
-
|
156357
|
-
|
156358
|
-
|
156359
|
-
|
156360
|
-
|
156361
|
-
|
156362
|
-
|
156363
|
-
|
156364
|
-
|
156365
|
-
|
156361
|
+
Object.entries(optionalAttrsByType).forEach(([attrType, attrNames]) => {
|
156362
|
+
attrNames.forEach((key) => {
|
156363
|
+
if (!isOptionalProperty(value, key, attrType)) {
|
156364
|
+
diagnostics.errors.push(
|
156365
|
+
`"${field}" bindings should, optionally, have a ${attrType} "${key}" field but got ${JSON.stringify(
|
156366
|
+
value
|
156367
|
+
)}.`
|
156368
|
+
);
|
156369
|
+
isValid = false;
|
156370
|
+
}
|
156371
|
+
});
|
156366
156372
|
});
|
156367
156373
|
return isValid;
|
156368
156374
|
}, "validateCloudchamberConfig");
|
@@ -158102,86 +158108,42 @@ function handleAliasedNodeJSPackages(build5, alias, external) {
|
|
158102
158108
|
__name(handleAliasedNodeJSPackages, "handleAliasedNodeJSPackages");
|
158103
158109
|
function handleNodeJSGlobals(build5, inject) {
|
158104
158110
|
const UNENV_GLOBALS_RE = /_virtual_unenv_global_polyfill-([^.]+)\.js$/;
|
158111
|
+
const prefix = import_node_path8.default.resolve(
|
158112
|
+
getBasePath(),
|
158113
|
+
"_virtual_unenv_global_polyfill-"
|
158114
|
+
);
|
158105
158115
|
build5.initialOptions.inject = [
|
158106
158116
|
...build5.initialOptions.inject ?? [],
|
158107
158117
|
//convert unenv's inject keys to absolute specifiers of custom virtual modules that will be provided via a custom onLoad
|
158108
158118
|
...Object.keys(inject).map(
|
158109
|
-
(globalName) =>
|
158110
|
-
getBasePath(),
|
158111
|
-
`_virtual_unenv_global_polyfill-${encodeToLowerCase(globalName)}.js`
|
158112
|
-
)
|
158119
|
+
(globalName) => `${prefix}${encodeToLowerCase(globalName)}.js`
|
158113
158120
|
)
|
158114
158121
|
];
|
158115
158122
|
build5.onResolve({ filter: UNENV_GLOBALS_RE }, ({ path: path74 }) => ({ path: path74 }));
|
158116
158123
|
build5.onLoad({ filter: UNENV_GLOBALS_RE }, ({ path: path74 }) => {
|
158117
158124
|
const globalName = decodeFromLowerCase(path74.match(UNENV_GLOBALS_RE)[1]);
|
158118
|
-
const
|
158119
|
-
if (typeof globalMapping === "string") {
|
158120
|
-
const globalPolyfillSpecifier = globalMapping;
|
158121
|
-
return {
|
158122
|
-
contents: `
|
158123
|
-
import globalVar from "${globalPolyfillSpecifier}";
|
158124
|
-
|
158125
|
-
${/*
|
158126
|
-
// ESBuild's inject doesn't actually touch globalThis, so let's do it ourselves
|
158127
|
-
// by creating an exportable so that we can preserve the globalThis assignment if
|
158128
|
-
// the ${globalName} was found in the app, or tree-shake it, if it wasn't
|
158129
|
-
// see https://esbuild.github.io/api/#inject
|
158130
|
-
*/
|
158131
|
-
""}
|
158132
|
-
const exportable =
|
158133
|
-
${/*
|
158134
|
-
// mark this as a PURE call so it can be ignored and tree-shaken by ESBuild,
|
158135
|
-
// when we don't detect 'process', 'global.process', or 'globalThis.process'
|
158136
|
-
// in the app code
|
158137
|
-
// see https://esbuild.github.io/api/#tree-shaking-and-side-effects
|
158138
|
-
*/
|
158139
|
-
""}
|
158140
|
-
/* @__PURE__ */ (() => {
|
158141
|
-
return globalThis.${globalName} = globalVar;
|
158142
|
-
})();
|
158143
|
-
|
158144
|
-
export {
|
158145
|
-
exportable as '${globalName}',
|
158146
|
-
exportable as 'globalThis.${globalName}',
|
158147
|
-
}
|
158148
|
-
`
|
158149
|
-
};
|
158150
|
-
}
|
158151
|
-
const [moduleName, exportName] = inject[globalName];
|
158125
|
+
const { importStatement, exportName } = getGlobalInject(inject[globalName]);
|
158152
158126
|
return {
|
158153
|
-
contents:
|
158154
|
-
|
158155
|
-
|
158156
|
-
${/*
|
158157
|
-
// ESBuild's inject doesn't actually touch globalThis, so let's do it ourselves
|
158158
|
-
// by creating an exportable so that we can preserve the globalThis assignment if
|
158159
|
-
// the ${globalName} was found in the app, or tree-shake it, if it wasn't
|
158160
|
-
// see https://esbuild.github.io/api/#inject
|
158161
|
-
*/
|
158162
|
-
""}
|
158163
|
-
const exportable =
|
158164
|
-
${/*
|
158165
|
-
// mark this as a PURE call so it can be ignored and tree-shaken by ESBuild,
|
158166
|
-
// when we don't detect 'process', 'global.process', or 'globalThis.process'
|
158167
|
-
// in the app code
|
158168
|
-
// see https://esbuild.github.io/api/#tree-shaking-and-side-effects
|
158169
|
-
*/
|
158170
|
-
""}
|
158171
|
-
/* @__PURE__ */ (() => {
|
158172
|
-
return globalThis.${globalName} = ${exportName};
|
158173
|
-
})();
|
158174
|
-
|
158175
|
-
export {
|
158176
|
-
exportable as '${globalName}',
|
158177
|
-
exportable as 'global.${globalName}',
|
158178
|
-
exportable as 'globalThis.${globalName}'
|
158179
|
-
}
|
158180
|
-
`
|
158127
|
+
contents: `${importStatement}
|
158128
|
+
globalThis.${globalName} = ${exportName};`
|
158181
158129
|
};
|
158182
158130
|
});
|
158183
158131
|
}
|
158184
158132
|
__name(handleNodeJSGlobals, "handleNodeJSGlobals");
|
158133
|
+
function getGlobalInject(globalInject) {
|
158134
|
+
if (typeof globalInject === "string") {
|
158135
|
+
return {
|
158136
|
+
importStatement: `import globalVar from "${globalInject}";`,
|
158137
|
+
exportName: "globalVar"
|
158138
|
+
};
|
158139
|
+
}
|
158140
|
+
const [moduleSpecifier, exportName] = globalInject;
|
158141
|
+
return {
|
158142
|
+
importStatement: `import { ${exportName} } from "${moduleSpecifier}";`,
|
158143
|
+
exportName
|
158144
|
+
};
|
158145
|
+
}
|
158146
|
+
__name(getGlobalInject, "getGlobalInject");
|
158185
158147
|
function encodeToLowerCase(str) {
|
158186
158148
|
return str.replaceAll(/\$/g, () => "$$").replaceAll(/[A-Z]/g, (letter) => `$${letter.toLowerCase()}`);
|
158187
158149
|
}
|
@@ -159536,9 +159498,6 @@ function validateNodeCompat({
|
|
159536
159498
|
node_compat: legacyNodeCompat
|
159537
159499
|
});
|
159538
159500
|
const nodejsCompatV2NotExperimental = compatibilityFlags.includes("nodejs_compat_v2");
|
159539
|
-
if (nodejsCompatV2) {
|
159540
|
-
compatibilityFlags[compatibilityFlags.indexOf("experimental:nodejs_compat_v2")] = "nodejs_compat_v2";
|
159541
|
-
}
|
159542
159501
|
if (nodejsCompat && nodejsCompatV2) {
|
159543
159502
|
throw new UserError(
|
159544
159503
|
"The `nodejs_compat` and `nodejs_compat_v2` compatibility flags cannot be used in together. Please select just one."
|
@@ -159598,6 +159557,10 @@ function getNodeCompatMode({
|
|
159598
159557
|
};
|
159599
159558
|
}
|
159600
159559
|
__name(getNodeCompatMode, "getNodeCompatMode");
|
159560
|
+
function stripExperimentalPrefixes(compatFlags) {
|
159561
|
+
return compatFlags?.map((flag) => flag.replace(/^experimental:/, ""));
|
159562
|
+
}
|
159563
|
+
__name(stripExperimentalPrefixes, "stripExperimentalPrefixes");
|
159601
159564
|
|
159602
159565
|
// src/dev-registry.ts
|
159603
159566
|
init_import_meta_url();
|
@@ -162900,6 +162863,9 @@ function handleRuntimeStdio(stdout2, stderr2) {
|
|
162900
162863
|
},
|
162901
162864
|
isCodeMovedWarning(chunk) {
|
162902
162865
|
return /CODE_MOVED for unknown code block/.test(chunk);
|
162866
|
+
},
|
162867
|
+
isAccessViolation(chunk) {
|
162868
|
+
return chunk.includes("access violation;");
|
162903
162869
|
}
|
162904
162870
|
};
|
162905
162871
|
stdout2.on("data", (chunk) => {
|
@@ -162922,8 +162888,17 @@ function handleRuntimeStdio(stdout2, stderr2) {
|
|
162922
162888
|
logger.error(
|
162923
162889
|
`Address already in use (${address}). Please check that you are not already running a server on this address or specify a different port with --port.`
|
162924
162890
|
);
|
162891
|
+
logger.debug(chunk);
|
162892
|
+
} else if (classifiers.isAccessViolation(chunk)) {
|
162893
|
+
let error2 = "There was an access violation in the runtime.";
|
162894
|
+
if (process.platform === "win32") {
|
162895
|
+
error2 += "\nOn Windows, this may be caused by an outdated Microsoft Visual C++ Redistributable library.\nCheck that you have the latest version installed.\nSee https://learn.microsoft.com/en-us/cpp/windows/latest-supported-vc-redist.";
|
162896
|
+
}
|
162897
|
+
logger.error(error2);
|
162898
|
+
logger.debug(chunk);
|
162899
|
+
} else {
|
162900
|
+
logger.debug(chunk);
|
162925
162901
|
}
|
162926
|
-
logger.debug(chunk);
|
162927
162902
|
} else if (classifiers.isWarning(chunk)) {
|
162928
162903
|
logger.warn(chunk);
|
162929
162904
|
} else if (classifiers.isCodeMovedWarning(chunk)) {
|
@@ -162979,7 +162954,9 @@ async function buildMiniflareOptions(log2, config, proxyToUserWorkerAuthenticati
|
|
162979
162954
|
{
|
162980
162955
|
name: getName(config),
|
162981
162956
|
compatibilityDate: config.compatibilityDate,
|
162982
|
-
compatibilityFlags:
|
162957
|
+
compatibilityFlags: stripExperimentalPrefixes(
|
162958
|
+
config.compatibilityFlags
|
162959
|
+
),
|
162983
162960
|
...sourceOptions,
|
162984
162961
|
...bindingOptions,
|
162985
162962
|
...sitesOptions,
|
@@ -163259,11 +163236,6 @@ function useLocalWorker(props) {
|
|
163259
163236
|
});
|
163260
163237
|
server.addEventListener("error", ({ error: error2 }) => {
|
163261
163238
|
if (typeof error2 === "object" && error2 !== null && "code" in error2 && error2.code === "ERR_RUNTIME_FAILURE") {
|
163262
|
-
if (process.platform === "win32") {
|
163263
|
-
logger.error(
|
163264
|
-
"Check that you have the latest Microsoft Visual C++ Redistributable library installed.\nSee https://learn.microsoft.com/en-us/cpp/windows/latest-supported-vc-redist."
|
163265
|
-
);
|
163266
|
-
}
|
163267
163239
|
logger.error(String(error2));
|
163268
163240
|
} else {
|
163269
163241
|
logger.error("Error reloading local server:", error2);
|
@@ -163921,7 +163893,9 @@ function createWorkerUploadForm(worker) {
|
|
163921
163893
|
...main2.type !== "commonjs" ? { main_module: main2.name } : { body_part: main2.name },
|
163922
163894
|
bindings: metadataBindings,
|
163923
163895
|
...compatibility_date && { compatibility_date },
|
163924
|
-
...compatibility_flags && {
|
163896
|
+
...compatibility_flags && {
|
163897
|
+
compatibility_flags: stripExperimentalPrefixes(compatibility_flags)
|
163898
|
+
},
|
163925
163899
|
...migrations && { migrations },
|
163926
163900
|
capnp_schema: capnpSchemaOutputFile,
|
163927
163901
|
...keep_bindings && { keep_bindings },
|
@@ -171334,9 +171308,16 @@ async function createCommand(args, config) {
|
|
171334
171308
|
);
|
171335
171309
|
const labels = collectLabels(args.label);
|
171336
171310
|
if (!interactWithUser(args)) {
|
171311
|
+
if (config.cloudchamber.image != void 0 && args.image == void 0) {
|
171312
|
+
args.image = config.cloudchamber.image;
|
171313
|
+
}
|
171314
|
+
if (config.cloudchamber.location != void 0 && args.location == void 0) {
|
171315
|
+
args.location = config.cloudchamber.location;
|
171316
|
+
}
|
171337
171317
|
const body = checkEverythingIsSet(args, ["image", "location"]);
|
171338
171318
|
const keysToAdd = args.allSshKeys ? (await pollSSHKeysUntilCondition(() => true)).map((key) => key.id) : [];
|
171339
|
-
const
|
171319
|
+
const useIpv4 = args.ipv4 ?? config.cloudchamber.ipv4;
|
171320
|
+
const network = useIpv4 === true ? { assign_ipv4: "predefined" /* PREDEFINED */ } : void 0;
|
171340
171321
|
const deployment = await DeploymentsService.createDeploymentV2({
|
171341
171322
|
image: body.image,
|
171342
171323
|
location: body.location,
|
@@ -171422,7 +171403,8 @@ __name(askWhichSSHKeysDoTheyWantToAdd, "askWhichSSHKeysDoTheyWantToAdd");
|
|
171422
171403
|
async function handleCreateCommand(args, config, environmentVariables, labels) {
|
171423
171404
|
startSection("Create a Cloudflare container", "Step 1 of 2");
|
171424
171405
|
const sshKeyID = await sshPrompts(args);
|
171425
|
-
const
|
171406
|
+
const givenImage = args.image ?? config.cloudchamber.image;
|
171407
|
+
const image = await processArgument({ image: givenImage }, "image", {
|
171426
171408
|
question: whichImageQuestion,
|
171427
171409
|
label: "image",
|
171428
171410
|
validate: (value) => {
|
@@ -171436,14 +171418,18 @@ async function handleCreateCommand(args, config, environmentVariables, labels) {
|
|
171436
171418
|
return "we don't allow :latest tags";
|
171437
171419
|
}
|
171438
171420
|
},
|
171439
|
-
defaultValue:
|
171440
|
-
initialValue:
|
171421
|
+
defaultValue: givenImage ?? "",
|
171422
|
+
initialValue: givenImage ?? "",
|
171441
171423
|
helpText: 'i.e. "docker.io/org/app:1.2", :latest tags are not allowed!',
|
171442
171424
|
type: "text"
|
171443
171425
|
});
|
171444
|
-
const location = await getLocation2(
|
171426
|
+
const location = await getLocation2({
|
171427
|
+
location: args.location ?? config.cloudchamber.location
|
171428
|
+
});
|
171445
171429
|
const keys = await askWhichSSHKeysDoTheyWantToAdd(args, sshKeyID);
|
171446
|
-
const network = await getNetworkInput(
|
171430
|
+
const network = await getNetworkInput({
|
171431
|
+
ipv4: args.ipv4 ?? config.cloudchamber.ipv4
|
171432
|
+
});
|
171447
171433
|
const selectedEnvironmentVariables = await promptForEnvironmentVariables(
|
171448
171434
|
environmentVariables,
|
171449
171435
|
[],
|
@@ -172125,8 +172111,8 @@ async function modifyCommand(modifyArgs, config) {
|
|
172125
172111
|
const deployment = await DeploymentsService.modifyDeploymentV2(
|
172126
172112
|
modifyArgs.deploymentId,
|
172127
172113
|
{
|
172128
|
-
image: modifyArgs.image,
|
172129
|
-
location: modifyArgs.location,
|
172114
|
+
image: modifyArgs.image ?? config.cloudchamber.image,
|
172115
|
+
location: modifyArgs.location ?? config.cloudchamber.location,
|
172130
172116
|
environment_variables: environmentVariables,
|
172131
172117
|
labels,
|
172132
172118
|
ssh_public_key_ids: modifyArgs.sshPublicKeyId,
|
@@ -172184,8 +172170,9 @@ async function handleModifyCommand(args, config) {
|
|
172184
172170
|
startSection("Modify deployment");
|
172185
172171
|
const deployment = await pickDeployment(args.deploymentId);
|
172186
172172
|
const keys = await handleSSH(args, config, deployment);
|
172173
|
+
const givenImage = args.image ?? config.cloudchamber.image;
|
172187
172174
|
const imagePrompt = await processArgument(
|
172188
|
-
{ image:
|
172175
|
+
{ image: givenImage },
|
172189
172176
|
"image",
|
172190
172177
|
{
|
172191
172178
|
question: modifyImageQuestion,
|
@@ -172198,14 +172185,17 @@ async function handleModifyCommand(args, config) {
|
|
172198
172185
|
return "we don't allow :latest tags";
|
172199
172186
|
}
|
172200
172187
|
},
|
172201
|
-
defaultValue:
|
172202
|
-
initialValue:
|
172188
|
+
defaultValue: givenImage ?? "",
|
172189
|
+
initialValue: givenImage ?? "",
|
172203
172190
|
helpText: "if you don't want to modify the image, press return",
|
172204
172191
|
type: "text"
|
172205
172192
|
}
|
172206
172193
|
);
|
172207
172194
|
const image = !imagePrompt ? void 0 : imagePrompt;
|
172208
|
-
const locationPick = await getLocation2(
|
172195
|
+
const locationPick = await getLocation2(
|
172196
|
+
{ location: args.location ?? config.cloudchamber.location },
|
172197
|
+
{ skipLocation: true }
|
172198
|
+
);
|
172209
172199
|
const location = locationPick === "Skip" ? void 0 : locationPick;
|
172210
172200
|
const environmentVariables = collectEnvironmentVariables(
|
172211
172201
|
deployment.environment_variables,
|
@@ -178582,7 +178572,7 @@ var validate = /* @__PURE__ */ __name(async (args) => {
|
|
178582
178572
|
"**/.git"
|
178583
178573
|
].map((pattern) => new import_minimatch.Minimatch(pattern));
|
178584
178574
|
const directory = (0, import_node_path33.resolve)(args.directory);
|
178585
|
-
const
|
178575
|
+
const walk = /* @__PURE__ */ __name(async (dir, fileMap2 = /* @__PURE__ */ new Map(), startingDir = dir) => {
|
178586
178576
|
const files = await (0, import_promises11.readdir)(dir);
|
178587
178577
|
await Promise.all(
|
178588
178578
|
files.map(async (file) => {
|
@@ -178598,7 +178588,7 @@ var validate = /* @__PURE__ */ __name(async (args) => {
|
|
178598
178588
|
return;
|
178599
178589
|
}
|
178600
178590
|
if (filestat.isDirectory()) {
|
178601
|
-
fileMap2 = await
|
178591
|
+
fileMap2 = await walk(filepath, fileMap2, startingDir);
|
178602
178592
|
} else {
|
178603
178593
|
const name = relativeFilepath.split(import_node_path33.sep).join("/");
|
178604
178594
|
if (filestat.size > MAX_ASSET_SIZE) {
|
@@ -178624,7 +178614,7 @@ ${name} is ${prettyBytes(filestat.size, {
|
|
178624
178614
|
);
|
178625
178615
|
return fileMap2;
|
178626
178616
|
}, "walk");
|
178627
|
-
const fileMap = await
|
178617
|
+
const fileMap = await walk(directory);
|
178628
178618
|
if (fileMap.size > MAX_ASSET_COUNT) {
|
178629
178619
|
throw new FatalError(
|
178630
178620
|
`Error: Pages only supports up to ${MAX_ASSET_COUNT.toLocaleString()} files in a deployment. Ensure you have specified your build output directory correctly.`,
|
@@ -178929,14 +178919,14 @@ __name(Progress, "Progress");
|
|
178929
178919
|
|
178930
178920
|
// src/experimental-assets.ts
|
178931
178921
|
var BULK_UPLOAD_CONCURRENCY2 = 3;
|
178932
|
-
var MAX_ASSET_COUNT2 = 2e4;
|
178933
|
-
var MAX_ASSET_SIZE2 = 25 * 1024 * 1024;
|
178934
178922
|
var MAX_UPLOAD_ATTEMPTS2 = 5;
|
178935
178923
|
var MAX_UPLOAD_GATEWAY_ERRORS2 = 5;
|
178924
|
+
var MAX_ASSET_COUNT2 = 2e4;
|
178925
|
+
var MAX_ASSET_SIZE2 = 25 * 1024 * 1024;
|
178936
178926
|
var syncExperimentalAssets = /* @__PURE__ */ __name(async (accountId, scriptName, assetDirectory) => {
|
178937
178927
|
(0, import_node_assert15.default)(accountId, "Missing accountId");
|
178938
178928
|
logger.info("\u{1F300} Building list of assets...");
|
178939
|
-
const manifest = await
|
178929
|
+
const manifest = await buildAssetsManifest(assetDirectory);
|
178940
178930
|
logger.info("\u{1F300} Starting asset upload...");
|
178941
178931
|
const initializeAssetsResponse = await fetchResult(
|
178942
178932
|
`/accounts/${accountId}/workers/scripts/${scriptName}/assets-upload-session`,
|
@@ -179076,24 +179066,22 @@ Assets already uploaded have been saved, so the next attempt will automatically
|
|
179076
179066
|
);
|
179077
179067
|
return completionJwt;
|
179078
179068
|
}, "syncExperimentalAssets");
|
179079
|
-
var
|
179080
|
-
const files = await (0, import_promises13.readdir)(dir);
|
179069
|
+
var buildAssetsManifest = /* @__PURE__ */ __name(async (dir) => {
|
179070
|
+
const files = await (0, import_promises13.readdir)(dir, { recursive: true });
|
179071
|
+
const manifest = {};
|
179081
179072
|
let counter = 0;
|
179082
179073
|
await Promise.all(
|
179083
179074
|
files.map(async (file) => {
|
179084
179075
|
const filepath = path39.join(dir, file);
|
179085
|
-
const relativeFilepath = path39.relative(
|
179076
|
+
const relativeFilepath = path39.relative(dir, filepath);
|
179086
179077
|
const filestat = await (0, import_promises13.stat)(filepath);
|
179087
|
-
if (filestat.isSymbolicLink()) {
|
179078
|
+
if (filestat.isSymbolicLink() || filestat.isDirectory()) {
|
179088
179079
|
return;
|
179089
|
-
}
|
179090
|
-
if (filestat.isDirectory()) {
|
179091
|
-
manifest = await walk(filepath, manifest, startingDir);
|
179092
179080
|
} else {
|
179093
179081
|
if (counter >= MAX_ASSET_COUNT2) {
|
179094
179082
|
throw new UserError(
|
179095
179083
|
`Maximum number of assets exceeded.
|
179096
|
-
Cloudflare Workers supports up to ${MAX_ASSET_COUNT2.toLocaleString()} assets in a version. We found ${counter.toLocaleString()} files in the specified assets directory "${
|
179084
|
+
Cloudflare Workers supports up to ${MAX_ASSET_COUNT2.toLocaleString()} assets in a version. We found ${counter.toLocaleString()} files in the specified assets directory "${dir}".
|
179097
179085
|
Ensure your assets directory contains a maximum of ${MAX_ASSET_COUNT2.toLocaleString()} files, and that you have specified your assets directory correctly.`
|
179098
179086
|
);
|
179099
179087
|
}
|
@@ -179111,7 +179099,7 @@ Cloudflare Workers supports assets with sizes of up to ${prettyBytes(
|
|
179111
179099
|
binary: true
|
179112
179100
|
}
|
179113
179101
|
)}.
|
179114
|
-
Ensure all assets in your assets directory "${
|
179102
|
+
Ensure all assets in your assets directory "${dir}" conform with the Workers maximum size requirement.`
|
179115
179103
|
);
|
179116
179104
|
}
|
179117
179105
|
manifest[encodeFilePath(relativeFilepath)] = {
|
@@ -179123,7 +179111,7 @@ Ensure all assets in your assets directory "${startingDir}" conform with the Wor
|
|
179123
179111
|
})
|
179124
179112
|
);
|
179125
179113
|
return manifest;
|
179126
|
-
}, "
|
179114
|
+
}, "buildAssetsManifest");
|
179127
179115
|
var MAX_DIFF_LINES = 100;
|
179128
179116
|
function logAssetUpload(line, diffCount) {
|
179129
179117
|
const level = logger.loggerLevel;
|
@@ -181964,71 +181952,7 @@ function buildWorkerFromFunctions({
|
|
181964
181952
|
doBindings: [],
|
181965
181953
|
// Pages functions don't support internal Durable Objects
|
181966
181954
|
external,
|
181967
|
-
plugins: [
|
181968
|
-
buildNotifierPlugin(onEnd),
|
181969
|
-
{
|
181970
|
-
name: "Assets",
|
181971
|
-
setup(pluginBuild) {
|
181972
|
-
const identifiers = /* @__PURE__ */ new Map();
|
181973
|
-
pluginBuild.onResolve({ filter: /^assets:/ }, async (args) => {
|
181974
|
-
const directory = (0, import_node_path41.resolve)(
|
181975
|
-
args.resolveDir,
|
181976
|
-
args.path.slice("assets:".length)
|
181977
|
-
);
|
181978
|
-
const exists = await (0, import_promises15.access)(directory).then(() => true).catch(() => false);
|
181979
|
-
const isDirectory2 = exists && (await (0, import_promises15.lstat)(directory)).isDirectory();
|
181980
|
-
if (!isDirectory2) {
|
181981
|
-
return {
|
181982
|
-
errors: [
|
181983
|
-
{
|
181984
|
-
text: `'${directory}' does not exist or is not a directory.`
|
181985
|
-
}
|
181986
|
-
]
|
181987
|
-
};
|
181988
|
-
}
|
181989
|
-
identifiers.set(directory, nanoid());
|
181990
|
-
if (!buildOutputDirectory) {
|
181991
|
-
console.warn(
|
181992
|
-
"You're attempting to import static assets as part of your Pages Functions, but have not specified a directory in which to put them. You must use 'wrangler pages dev <directory>' rather than 'wrangler pages dev -- <command>' to import static assets in Functions."
|
181993
|
-
);
|
181994
|
-
}
|
181995
|
-
return { path: directory, namespace: "assets" };
|
181996
|
-
});
|
181997
|
-
pluginBuild.onLoad(
|
181998
|
-
{ filter: /.*/, namespace: "assets" },
|
181999
|
-
async (args) => {
|
182000
|
-
const identifier = identifiers.get(args.path);
|
182001
|
-
if (buildOutputDirectory) {
|
182002
|
-
const staticAssetsOutputDirectory = (0, import_node_path41.join)(
|
182003
|
-
buildOutputDirectory,
|
182004
|
-
"cdn-cgi",
|
182005
|
-
"pages-plugins",
|
182006
|
-
identifier
|
182007
|
-
);
|
182008
|
-
await (0, import_promises15.rm)(staticAssetsOutputDirectory, {
|
182009
|
-
force: true,
|
182010
|
-
recursive: true
|
182011
|
-
});
|
182012
|
-
await (0, import_promises15.cp)(args.path, staticAssetsOutputDirectory, {
|
182013
|
-
force: true,
|
182014
|
-
recursive: true
|
182015
|
-
});
|
182016
|
-
return {
|
182017
|
-
// TODO: Watch args.path for changes and re-copy when updated
|
182018
|
-
contents: `export const onRequest = ({ request, env, functionPath }) => {
|
182019
|
-
const url = new URL(request.url)
|
182020
|
-
const relativePathname = \`/\${url.pathname.replace(functionPath, "") || ""}\`.replace(/^\\/\\//, '/');
|
182021
|
-
url.pathname = '/cdn-cgi/pages-plugins/${identifier}' + relativePathname
|
182022
|
-
request = new Request(url.toString(), request)
|
182023
|
-
return env.ASSETS.fetch(request)
|
182024
|
-
}`
|
182025
|
-
};
|
182026
|
-
}
|
182027
|
-
}
|
182028
|
-
);
|
182029
|
-
}
|
182030
|
-
}
|
182031
|
-
],
|
181955
|
+
plugins: [buildNotifierPlugin(onEnd), assetsPlugin(buildOutputDirectory)],
|
182032
181956
|
isOutfile: !outdir,
|
182033
181957
|
serveLegacyAssetsFromWorker: false,
|
182034
181958
|
checkFetch: local,
|
@@ -182231,6 +182155,71 @@ function blockWorkerJsImports(nodejsCompatMode) {
|
|
182231
182155
|
};
|
182232
182156
|
}
|
182233
182157
|
__name(blockWorkerJsImports, "blockWorkerJsImports");
|
182158
|
+
function assetsPlugin(buildOutputDirectory) {
|
182159
|
+
return {
|
182160
|
+
name: "Assets",
|
182161
|
+
setup(pluginBuild) {
|
182162
|
+
const identifiers = /* @__PURE__ */ new Map();
|
182163
|
+
pluginBuild.onResolve({ filter: /^assets:/ }, async (args) => {
|
182164
|
+
const directory = (0, import_node_path41.resolve)(
|
182165
|
+
args.resolveDir,
|
182166
|
+
args.path.slice("assets:".length)
|
182167
|
+
);
|
182168
|
+
const exists = await (0, import_promises15.access)(directory).then(() => true).catch(() => false);
|
182169
|
+
const isDirectory2 = exists && (await (0, import_promises15.lstat)(directory)).isDirectory();
|
182170
|
+
if (!isDirectory2) {
|
182171
|
+
return {
|
182172
|
+
errors: [
|
182173
|
+
{
|
182174
|
+
text: `'${directory}' does not exist or is not a directory.`
|
182175
|
+
}
|
182176
|
+
]
|
182177
|
+
};
|
182178
|
+
}
|
182179
|
+
identifiers.set(directory, nanoid());
|
182180
|
+
if (!buildOutputDirectory) {
|
182181
|
+
console.warn(
|
182182
|
+
"You're attempting to import static assets as part of your Pages Functions, but have not specified a directory in which to put them. You must use 'wrangler pages dev <directory>' rather than 'wrangler pages dev -- <command>' to import static assets in Functions."
|
182183
|
+
);
|
182184
|
+
}
|
182185
|
+
return { path: directory, namespace: "assets" };
|
182186
|
+
});
|
182187
|
+
pluginBuild.onLoad(
|
182188
|
+
{ filter: /.*/, namespace: "assets" },
|
182189
|
+
async (args) => {
|
182190
|
+
const identifier = identifiers.get(args.path);
|
182191
|
+
if (buildOutputDirectory) {
|
182192
|
+
const staticAssetsOutputDirectory = (0, import_node_path41.join)(
|
182193
|
+
buildOutputDirectory,
|
182194
|
+
"cdn-cgi",
|
182195
|
+
"pages-plugins",
|
182196
|
+
identifier
|
182197
|
+
);
|
182198
|
+
await (0, import_promises15.rm)(staticAssetsOutputDirectory, {
|
182199
|
+
force: true,
|
182200
|
+
recursive: true
|
182201
|
+
});
|
182202
|
+
await (0, import_promises15.cp)(args.path, staticAssetsOutputDirectory, {
|
182203
|
+
force: true,
|
182204
|
+
recursive: true
|
182205
|
+
});
|
182206
|
+
return {
|
182207
|
+
// TODO: Watch args.path for changes and re-copy when updated
|
182208
|
+
contents: `export const onRequest = ({ request, env, functionPath }) => {
|
182209
|
+
const url = new URL(request.url);
|
182210
|
+
const relativePathname = \`/\${url.pathname.replace(functionPath, "") || ""}\`.replace(/^\\/\\//, '/');
|
182211
|
+
url.pathname = '/cdn-cgi/pages-plugins/${identifier}' + relativePathname;
|
182212
|
+
request = new Request(url.toString(), request);
|
182213
|
+
return env.ASSETS.fetch(request);
|
182214
|
+
}`
|
182215
|
+
};
|
182216
|
+
}
|
182217
|
+
}
|
182218
|
+
);
|
182219
|
+
}
|
182220
|
+
};
|
182221
|
+
}
|
182222
|
+
__name(assetsPlugin, "assetsPlugin");
|
182234
182223
|
|
182235
182224
|
// src/pages/functions/buildPlugin.ts
|
182236
182225
|
function buildPluginFromFunctions({
|
@@ -199724,7 +199713,7 @@ async function listMetadataIndex(config, indexName) {
|
|
199724
199713
|
return await fetchResult(
|
199725
199714
|
`/accounts/${accountId}/vectorize/v2/indexes/${indexName}/metadata_index/list`,
|
199726
199715
|
{
|
199727
|
-
method: "
|
199716
|
+
method: "GET"
|
199728
199717
|
}
|
199729
199718
|
);
|
199730
199719
|
}
|
@@ -200259,12 +200248,12 @@ function options27(yargs) {
|
|
200259
200248
|
return yargs.positional("name", {
|
200260
200249
|
type: "string",
|
200261
200250
|
demandOption: true,
|
200262
|
-
description: "The name of the Vectorize index
|
200251
|
+
description: "The name of the Vectorize index"
|
200263
200252
|
}).options({
|
200264
200253
|
vector: {
|
200265
200254
|
type: "array",
|
200266
200255
|
demandOption: true,
|
200267
|
-
describe: "Vector to query the Vectorize Index
|
200256
|
+
describe: "Vector to query the Vectorize Index",
|
200268
200257
|
coerce: (arg) => arg.map(
|
200269
200258
|
(value) => typeof value === "string" ? parseFloat(value) : value
|
200270
200259
|
).filter(
|
@@ -200274,18 +200263,18 @@ function options27(yargs) {
|
|
200274
200263
|
"top-k": {
|
200275
200264
|
type: "number",
|
200276
200265
|
default: 5,
|
200277
|
-
describe: "The number of results (nearest neighbors) to return
|
200266
|
+
describe: "The number of results (nearest neighbors) to return"
|
200278
200267
|
},
|
200279
200268
|
"return-values": {
|
200280
200269
|
type: "boolean",
|
200281
200270
|
default: false,
|
200282
|
-
describe: "Specify if the vector values should be included in the results
|
200271
|
+
describe: "Specify if the vector values should be included in the results"
|
200283
200272
|
},
|
200284
200273
|
"return-metadata": {
|
200285
200274
|
type: "string",
|
200286
200275
|
choices: ["all", "indexed", "none"],
|
200287
200276
|
default: "none",
|
200288
|
-
describe: "Specify if the vector metadata should be included in the results
|
200277
|
+
describe: "Specify if the vector metadata should be included in the results"
|
200289
200278
|
},
|
200290
200279
|
namespace: {
|
200291
200280
|
type: "string",
|
@@ -200293,7 +200282,7 @@ function options27(yargs) {
|
|
200293
200282
|
},
|
200294
200283
|
filter: {
|
200295
200284
|
type: "string",
|
200296
|
-
describe: "Filter the query results based on this metadata filter.
|
200285
|
+
describe: "Filter the query results based on this metadata filter.",
|
200297
200286
|
coerce: (jsonStr) => {
|
200298
200287
|
try {
|
200299
200288
|
return JSON.parse(jsonStr);
|
@@ -200304,7 +200293,17 @@ function options27(yargs) {
|
|
200304
200293
|
}
|
200305
200294
|
}
|
200306
200295
|
}
|
200307
|
-
}).
|
200296
|
+
}).example([
|
200297
|
+
[
|
200298
|
+
`\u276F\u276F wrangler vectorize query --vector 1 2 3 0.5 1.25 6
|
200299
|
+
Query the Vectorize Index by vector. To read from a json file that contains data in the format [1, 2, 3], you could use a command like
|
200300
|
+
\`wrangler vectorize query --vector $(jq -r '.[]' data.json | xargs)\`
|
200301
|
+
`
|
200302
|
+
],
|
200303
|
+
[
|
200304
|
+
"\u276F\u276F wrangler vectorize query --filter '{ 'p1': 'abc', 'p2': { '$ne': true }, 'p3': 10, 'p4': false, 'nested.p5': 'abcd' }'\n Filter the query results."
|
200305
|
+
]
|
200306
|
+
]).epilogue(vectorizeBetaWarning);
|
200308
200307
|
}
|
200309
200308
|
__name(options27, "options");
|
200310
200309
|
async function handler28(args) {
|
@@ -202127,7 +202126,6 @@ See https://developers.cloudflare.com/workers/platform/compatibility-dates for m
|
|
202127
202126
|
maskedVars[key] = "(hidden)";
|
202128
202127
|
}
|
202129
202128
|
}
|
202130
|
-
printBindings({ ...withoutStaticAssets, vars: maskedVars });
|
202131
202129
|
if (props.dryRun) {
|
202132
202130
|
printBindings({ ...withoutStaticAssets, vars: maskedVars });
|
202133
202131
|
} else {
|
@@ -203278,7 +203276,8 @@ function createCLIParser(argv) {
|
|
203278
203276
|
wrangler.updateStrings({
|
203279
203277
|
"Commands:": `${source_default.bold("COMMANDS")}`,
|
203280
203278
|
"Options:": `${source_default.bold("OPTIONS")}`,
|
203281
|
-
"Positionals:": `${source_default.bold("POSITIONALS")}
|
203279
|
+
"Positionals:": `${source_default.bold("POSITIONALS")}`,
|
203280
|
+
"Examples:": `${source_default.bold("EXAMPLES")}`
|
203282
203281
|
});
|
203283
203282
|
wrangler.group(
|
203284
203283
|
["experimental-json-config", "config", "env", "help", "version"],
|
@@ -205407,12 +205406,12 @@ See https://developers.cloudflare.com/workers/platform/compatibility-dates for m
|
|
205407
205406
|
const jsxFragment = props.jsxFragment || config.jsx_fragment;
|
205408
205407
|
const keepVars = props.keepVars || config.keep_vars;
|
205409
205408
|
const minify = props.minify ?? config.minify;
|
205409
|
+
const compatibilityFlags = props.compatibilityFlags ?? config.compatibility_flags;
|
205410
205410
|
const nodejsCompatMode = validateNodeCompat({
|
205411
205411
|
legacyNodeCompat: props.nodeCompat ?? config.node_compat ?? false,
|
205412
|
-
compatibilityFlags
|
205412
|
+
compatibilityFlags,
|
205413
205413
|
noBundle: props.noBundle ?? config.no_bundle ?? false
|
205414
205414
|
});
|
205415
|
-
const compatibilityFlags = props.compatibilityFlags ?? config.compatibility_flags;
|
205416
205415
|
if (props.noBundle && minify) {
|
205417
205416
|
logger.warn(
|
205418
205417
|
"`--minify` and `--no-bundle` can't be used together. If you want to minify your Worker and disable Wrangler's bundling, please minify as part of your own bundling process."
|
@@ -207133,7 +207132,7 @@ async function createRemoteWorkerInit(props) {
|
|
207133
207132
|
migrations: void 0,
|
207134
207133
|
// no migrations in dev
|
207135
207134
|
compatibility_date: props.compatibilityDate,
|
207136
|
-
compatibility_flags: props.compatibilityFlags,
|
207135
|
+
compatibility_flags: stripExperimentalPrefixes(props.compatibilityFlags),
|
207137
207136
|
keepVars: true,
|
207138
207137
|
keepSecrets: true,
|
207139
207138
|
logpush: false,
|
@@ -210975,6 +210974,7 @@ var LocalRuntimeController = class extends RuntimeController {
|
|
210975
210974
|
await convertToConfigBundle(data),
|
210976
210975
|
this.#proxyToUserWorkerAuthenticationSecret
|
210977
210976
|
);
|
210977
|
+
options29.liveReload = false;
|
210978
210978
|
if (this.#mf === void 0) {
|
210979
210979
|
logger.log(source_default.dim("\u2394 Starting local server..."));
|
210980
210980
|
this.#mf = new import_miniflare19.Miniflare(options29);
|
@@ -211187,7 +211187,8 @@ var ProxyController = class extends Controller {
|
|
211187
211187
|
logger.loggerLevel === "debug" ? "wrangler-ProxyWorker" : "wrangler"
|
211188
211188
|
)
|
211189
211189
|
}),
|
211190
|
-
handleRuntimeStdio
|
211190
|
+
handleRuntimeStdio,
|
211191
|
+
liveReload: false
|
211191
211192
|
};
|
211192
211193
|
const proxyWorkerOptionsChanged = didMiniflareOptionsChange(
|
211193
211194
|
this.proxyWorkerOptions,
|
@@ -212860,3 +212861,4 @@ yargs-parser/build/lib/index.js:
|
|
212860
212861
|
* SPDX-License-Identifier: ISC
|
212861
212862
|
*)
|
212862
212863
|
*/
|
212864
|
+
//# sourceMappingURL=cli.js.map
|