wrangler 2.6.1 → 2.6.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/src/__tests__/index.test.ts +2 -0
- package/src/__tests__/init.test.ts +36 -0
- package/src/__tests__/pages.test.ts +1 -0
- package/src/cfetch/internal.ts +10 -3
- package/src/config/index.ts +2 -1
- package/src/d1/migrations/apply.tsx +5 -2
- package/src/dev.tsx +14 -9
- package/src/docs/index.ts +94 -0
- package/src/index.tsx +9 -0
- package/src/init.ts +23 -17
- package/src/metrics/send-event.ts +2 -1
- package/src/pages/publish.tsx +11 -1
- package/src/pages/upload.tsx +26 -5
- package/wrangler-dist/cli.js +147 -34
package/package.json
CHANGED
|
@@ -31,6 +31,7 @@ describe("wrangler", () => {
|
|
|
31
31
|
"wrangler
|
|
32
32
|
|
|
33
33
|
Commands:
|
|
34
|
+
wrangler docs [command] 📚 Open wrangler's docs in your browser
|
|
34
35
|
wrangler init [name] 📥 Create a wrangler.toml configuration file
|
|
35
36
|
wrangler dev [script] 👂 Start a local server for developing your worker
|
|
36
37
|
wrangler publish [script] 🆙 Publish your Worker to Cloudflare.
|
|
@@ -77,6 +78,7 @@ describe("wrangler", () => {
|
|
|
77
78
|
wrangler
|
|
78
79
|
|
|
79
80
|
Commands:
|
|
81
|
+
wrangler docs [command] 📚 Open wrangler's docs in your browser
|
|
80
82
|
wrangler init [name] 📥 Create a wrangler.toml configuration file
|
|
81
83
|
wrangler dev [script] 👂 Start a local server for developing your worker
|
|
82
84
|
wrangler publish [script] 🆙 Publish your Worker to Cloudflare.
|
|
@@ -2400,6 +2400,42 @@ describe("init", () => {
|
|
|
2400
2400
|
});
|
|
2401
2401
|
});
|
|
2402
2402
|
|
|
2403
|
+
it("should fail on init --from-dash on non-existent worker name", async () => {
|
|
2404
|
+
setMockResponse(
|
|
2405
|
+
`/accounts/:accountId/workers/services/:scriptName`,
|
|
2406
|
+
"GET",
|
|
2407
|
+
() => mockServiceMetadata
|
|
2408
|
+
);
|
|
2409
|
+
setMockFetchDashScript({
|
|
2410
|
+
accountId: "LCARS",
|
|
2411
|
+
fromDashScriptName: "memory-crystal",
|
|
2412
|
+
environment: mockServiceMetadata.default_environment.environment,
|
|
2413
|
+
mockResponse: mockDashboardScript,
|
|
2414
|
+
});
|
|
2415
|
+
mockConfirm(
|
|
2416
|
+
{
|
|
2417
|
+
text: "Would you like to use git to manage this Worker?",
|
|
2418
|
+
result: false,
|
|
2419
|
+
},
|
|
2420
|
+
{
|
|
2421
|
+
text: "Would you like to use TypeScript?",
|
|
2422
|
+
result: true,
|
|
2423
|
+
},
|
|
2424
|
+
{
|
|
2425
|
+
text: "No package.json found. Would you like to create one?",
|
|
2426
|
+
result: true,
|
|
2427
|
+
},
|
|
2428
|
+
{
|
|
2429
|
+
text: "Would you like to install the type definitions for Workers into your package.json?",
|
|
2430
|
+
result: true,
|
|
2431
|
+
}
|
|
2432
|
+
);
|
|
2433
|
+
|
|
2434
|
+
await expect(
|
|
2435
|
+
runWrangler("init isolinear-optical-chip --from-dash i-dont-exist")
|
|
2436
|
+
).rejects.toThrowError();
|
|
2437
|
+
});
|
|
2438
|
+
|
|
2403
2439
|
it("should download source script from dashboard w/ out positional <name>", async () => {
|
|
2404
2440
|
mockSupportingDashRequests({
|
|
2405
2441
|
expectedAccountId: "LCARS",
|
|
@@ -335,6 +335,7 @@ describe("pages", () => {
|
|
|
335
335
|
--commit-hash The SHA to attach to this deployment [string]
|
|
336
336
|
--commit-message The commit message to attach to this deployment [string]
|
|
337
337
|
--commit-dirty Whether or not the workspace should be considered dirty for this deployment [boolean]
|
|
338
|
+
--skip-caching Skip asset caching which speeds up builds [boolean]
|
|
338
339
|
|
|
339
340
|
🚧 'wrangler pages <command>' is a beta command. Please report any issues to https://github.com/cloudflare/wrangler2/issues/new/choose"
|
|
340
341
|
`);
|
package/src/cfetch/internal.ts
CHANGED
|
@@ -44,8 +44,13 @@ export async function performApiFetch(
|
|
|
44
44
|
logger.debug(
|
|
45
45
|
`-- START CF API REQUEST: ${method} ${getCloudflareAPIBaseURL()}${resource}${queryString}`
|
|
46
46
|
);
|
|
47
|
-
|
|
48
|
-
|
|
47
|
+
const logHeaders = cloneHeaders(headers);
|
|
48
|
+
delete logHeaders["Authorization"];
|
|
49
|
+
logger.debug("HEADERS:", JSON.stringify(logHeaders, null, 2));
|
|
50
|
+
logger.debug(
|
|
51
|
+
"INIT:",
|
|
52
|
+
JSON.stringify({ ...init, headers: logHeaders }, null, 2)
|
|
53
|
+
);
|
|
49
54
|
logger.debug("-- END CF API REQUEST");
|
|
50
55
|
return await fetch(`${getCloudflareAPIBaseURL()}${resource}${queryString}`, {
|
|
51
56
|
method,
|
|
@@ -83,7 +88,9 @@ export async function fetchInternal<ResponseType>(
|
|
|
83
88
|
response.statusText,
|
|
84
89
|
response.status
|
|
85
90
|
);
|
|
86
|
-
|
|
91
|
+
const logHeaders = cloneHeaders(response.headers);
|
|
92
|
+
delete logHeaders["Authorization"];
|
|
93
|
+
logger.debug("HEADERS:", JSON.stringify(logHeaders, null, 2));
|
|
87
94
|
logger.debug("RESPONSE:", jsonText);
|
|
88
95
|
logger.debug("-- END CF API RESPONSE");
|
|
89
96
|
|
package/src/config/index.ts
CHANGED
|
@@ -166,7 +166,8 @@ export function printBindings(bindings: CfWorkerInit["bindings"]) {
|
|
|
166
166
|
if (database_name) {
|
|
167
167
|
databaseValue = `${database_name} (${database_id})`;
|
|
168
168
|
}
|
|
169
|
-
|
|
169
|
+
//database_id is local when running `wrangler dev --local`
|
|
170
|
+
if (preview_database_id && database_id !== "local") {
|
|
170
171
|
databaseValue += `, Preview: (${preview_database_id})`;
|
|
171
172
|
}
|
|
172
173
|
return {
|
|
@@ -102,8 +102,11 @@ export const ApplyHandler = withConfig<BaseSqlExecuteArgs>(
|
|
|
102
102
|
if (!ok) return;
|
|
103
103
|
}
|
|
104
104
|
|
|
105
|
-
|
|
106
|
-
|
|
105
|
+
// don't backup prod db when applying migrations locally
|
|
106
|
+
if (!local) {
|
|
107
|
+
render(<Text>🕒 Creating backup...</Text>);
|
|
108
|
+
await createBackup(accountId, databaseInfo.uuid);
|
|
109
|
+
}
|
|
107
110
|
|
|
108
111
|
for (const migration of unappliedMigrations) {
|
|
109
112
|
let query = fs.readFileSync(
|
package/src/dev.tsx
CHANGED
|
@@ -442,7 +442,7 @@ export async function startDev(args: StartDevOptions) {
|
|
|
442
442
|
|
|
443
443
|
// eslint-disable-next-line no-inner-declarations
|
|
444
444
|
async function getDevReactElement(configParam: Config) {
|
|
445
|
-
const { assetPaths, bindings } =
|
|
445
|
+
const { assetPaths, bindings } = getBindingsAndAssetPaths(
|
|
446
446
|
args,
|
|
447
447
|
configParam
|
|
448
448
|
);
|
|
@@ -559,7 +559,7 @@ export async function startApiDev(args: StartDevOptions) {
|
|
|
559
559
|
|
|
560
560
|
// eslint-disable-next-line no-inner-declarations
|
|
561
561
|
async function getDevServer(configParam: Config) {
|
|
562
|
-
const { assetPaths, bindings } =
|
|
562
|
+
const { assetPaths, bindings } = getBindingsAndAssetPaths(
|
|
563
563
|
args,
|
|
564
564
|
configParam
|
|
565
565
|
);
|
|
@@ -798,14 +798,11 @@ async function validateDevServerSettings(
|
|
|
798
798
|
};
|
|
799
799
|
}
|
|
800
800
|
|
|
801
|
-
|
|
802
|
-
args: StartDevOptions,
|
|
803
|
-
configParam: Config
|
|
804
|
-
) {
|
|
801
|
+
function getBindingsAndAssetPaths(args: StartDevOptions, configParam: Config) {
|
|
805
802
|
const cliVars = collectKeyValues(args.var);
|
|
806
803
|
|
|
807
804
|
// now log all available bindings into the terminal
|
|
808
|
-
const bindings =
|
|
805
|
+
const bindings = getBindings(configParam, args.env, args.local ?? false, {
|
|
809
806
|
kv: args.kv,
|
|
810
807
|
vars: { ...args.vars, ...cliVars },
|
|
811
808
|
durableObjects: args.durableObjects,
|
|
@@ -832,11 +829,12 @@ async function getBindingsAndAssetPaths(
|
|
|
832
829
|
return { assetPaths, bindings };
|
|
833
830
|
}
|
|
834
831
|
|
|
835
|
-
|
|
832
|
+
function getBindings(
|
|
836
833
|
configParam: Config,
|
|
837
834
|
env: string | undefined,
|
|
835
|
+
local: boolean,
|
|
838
836
|
args: AdditionalDevProps
|
|
839
|
-
):
|
|
837
|
+
): CfWorkerInit["bindings"] {
|
|
840
838
|
const bindings = {
|
|
841
839
|
kv_namespaces: [
|
|
842
840
|
...(configParam.kv_namespaces || []).map(
|
|
@@ -906,6 +904,13 @@ async function getBindings(
|
|
|
906
904
|
logfwdr: configParam.logfwdr,
|
|
907
905
|
d1_databases: identifyD1BindingsAsBeta([
|
|
908
906
|
...(configParam.d1_databases ?? []).map((d1Db) => {
|
|
907
|
+
//in local dev, bindings don't matter
|
|
908
|
+
if (local) {
|
|
909
|
+
return {
|
|
910
|
+
...d1Db,
|
|
911
|
+
database_id: "local",
|
|
912
|
+
};
|
|
913
|
+
}
|
|
909
914
|
if (!d1Db.preview_database_id) {
|
|
910
915
|
throw new Error(
|
|
911
916
|
`In development, you should use a separate D1 database than the one you'd use in production. Please create a new D1 database with "wrangler d1 create <name>" and add its id as preview_database_id to the d1_database "${d1Db.binding}" in your wrangler.toml`
|
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
import { printWranglerBanner } from "..";
|
|
2
|
+
import { readConfig } from "../config";
|
|
3
|
+
import { logger } from "../logger";
|
|
4
|
+
import * as metrics from "../metrics";
|
|
5
|
+
import openInBrowser from "../open-in-browser";
|
|
6
|
+
|
|
7
|
+
import type {
|
|
8
|
+
CommonYargsOptions,
|
|
9
|
+
YargsOptionsToInterface,
|
|
10
|
+
} from "../yargs-types";
|
|
11
|
+
import type { ArgumentsCamelCase, Argv } from "yargs";
|
|
12
|
+
|
|
13
|
+
const argToUrlHash = {
|
|
14
|
+
init: "init",
|
|
15
|
+
generate: "generate",
|
|
16
|
+
dev: "dev",
|
|
17
|
+
publish: "publish",
|
|
18
|
+
delete: "delete",
|
|
19
|
+
"kv:namespace": "kvnamespace",
|
|
20
|
+
"kv:key": "kvkey",
|
|
21
|
+
"kv:bulk": "kvbulk",
|
|
22
|
+
"r2 bucket": "r2-bucket",
|
|
23
|
+
"r2 object": "r2-object",
|
|
24
|
+
secret: "secret",
|
|
25
|
+
"secret:bulk": "secretbulk",
|
|
26
|
+
tail: "tail",
|
|
27
|
+
pages: "pages",
|
|
28
|
+
login: "login",
|
|
29
|
+
logout: "logout",
|
|
30
|
+
whoami: "whoami",
|
|
31
|
+
types: "types",
|
|
32
|
+
deployments: "deployments",
|
|
33
|
+
};
|
|
34
|
+
|
|
35
|
+
export function docsOptions(yargs: Argv<CommonYargsOptions>) {
|
|
36
|
+
return yargs.positional("command", {
|
|
37
|
+
describe: "Enter the wrangler command you want to know more about",
|
|
38
|
+
type: "string",
|
|
39
|
+
// requiresArg: true,
|
|
40
|
+
choices: [
|
|
41
|
+
"init",
|
|
42
|
+
"dev",
|
|
43
|
+
"publish",
|
|
44
|
+
"delete",
|
|
45
|
+
"tail",
|
|
46
|
+
"secret",
|
|
47
|
+
"secret:bulk",
|
|
48
|
+
"kv:namespace",
|
|
49
|
+
"kv:key",
|
|
50
|
+
"kv:bulk",
|
|
51
|
+
"pages",
|
|
52
|
+
// "queues", //TODO: Undocumented
|
|
53
|
+
"r2 object",
|
|
54
|
+
"r2 bucket",
|
|
55
|
+
// "dispatch-namespace", // TODO: Undocumented - Workers for Platforms
|
|
56
|
+
// "d1", //TODO: Undocumented
|
|
57
|
+
// "pubsub", //TODO: Undocumented
|
|
58
|
+
"login",
|
|
59
|
+
"logout",
|
|
60
|
+
"whoami",
|
|
61
|
+
"types",
|
|
62
|
+
"deployments",
|
|
63
|
+
"api",
|
|
64
|
+
],
|
|
65
|
+
});
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
type DocsArgs = YargsOptionsToInterface<typeof docsOptions>;
|
|
69
|
+
|
|
70
|
+
function isValidParam(k: string): k is keyof typeof argToUrlHash {
|
|
71
|
+
return k in argToUrlHash;
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
export async function docsHandler(args: ArgumentsCamelCase<DocsArgs>) {
|
|
75
|
+
let urlToOpen =
|
|
76
|
+
"https://developers.cloudflare.com/workers/wrangler/commands/";
|
|
77
|
+
|
|
78
|
+
if (args.command === "api") {
|
|
79
|
+
//if api, take them to the API docs
|
|
80
|
+
urlToOpen = "https://developers.cloudflare.com/workers/wrangler/api/";
|
|
81
|
+
} else if (args.command && isValidParam(args.command)) {
|
|
82
|
+
//otherwise, they get the wrangler commands page
|
|
83
|
+
urlToOpen += `#${argToUrlHash[args.command]}`;
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
await printWranglerBanner();
|
|
87
|
+
|
|
88
|
+
logger.log(`Opening a link in your default browser: ${urlToOpen}`);
|
|
89
|
+
await openInBrowser(urlToOpen);
|
|
90
|
+
const config = readConfig(undefined, {});
|
|
91
|
+
await metrics.sendMetricsEvent("view docs", {
|
|
92
|
+
sendMetrics: config.send_metrics,
|
|
93
|
+
});
|
|
94
|
+
}
|
package/src/index.tsx
CHANGED
|
@@ -26,6 +26,7 @@ import {
|
|
|
26
26
|
} from "./deprecated";
|
|
27
27
|
import { devHandler, devOptions } from "./dev";
|
|
28
28
|
import { workerNamespaceCommands } from "./dispatch-namespace";
|
|
29
|
+
import { docsHandler, docsOptions } from "./docs";
|
|
29
30
|
import { initHandler, initOptions } from "./init";
|
|
30
31
|
import { kvNamespace, kvKey, kvBulk } from "./kv";
|
|
31
32
|
import { logBuildFailure, logger } from "./logger";
|
|
@@ -274,6 +275,14 @@ export function createCLIParser(argv: string[]) {
|
|
|
274
275
|
generateHandler
|
|
275
276
|
);
|
|
276
277
|
|
|
278
|
+
// docs
|
|
279
|
+
wrangler.command(
|
|
280
|
+
"docs [command]",
|
|
281
|
+
"📚 Open wrangler's docs in your browser",
|
|
282
|
+
docsOptions,
|
|
283
|
+
docsHandler
|
|
284
|
+
);
|
|
285
|
+
|
|
277
286
|
// init
|
|
278
287
|
wrangler.command(
|
|
279
288
|
"init [name]",
|
package/src/init.ts
CHANGED
|
@@ -165,6 +165,18 @@ export async function initHandler(args: ArgumentsCamelCase<InitArgs>) {
|
|
|
165
165
|
);
|
|
166
166
|
let justCreatedWranglerToml = false;
|
|
167
167
|
|
|
168
|
+
let accountId = "";
|
|
169
|
+
let serviceMetaData: undefined | ServiceMetadataRes;
|
|
170
|
+
|
|
171
|
+
// If --from-dash, check that script actually exists
|
|
172
|
+
if (fromDashScriptName) {
|
|
173
|
+
const config = readConfig(args.config as ConfigPath, args);
|
|
174
|
+
accountId = await requireAuth(config);
|
|
175
|
+
serviceMetaData = await fetchResult<ServiceMetadataRes>(
|
|
176
|
+
`/accounts/${accountId}/workers/services/${fromDashScriptName}`
|
|
177
|
+
);
|
|
178
|
+
}
|
|
179
|
+
|
|
168
180
|
if (fs.existsSync(wranglerTomlDestination)) {
|
|
169
181
|
let shouldContinue = false;
|
|
170
182
|
logger.warn(
|
|
@@ -452,16 +464,13 @@ export async function initHandler(args: ArgumentsCamelCase<InitArgs>) {
|
|
|
452
464
|
`After running "wrangler init --from-dash", modifying your worker via the Cloudflare dashboard is discouraged.
|
|
453
465
|
Edits made via the Dashboard will not be synchronized locally and will be overridden by your local code and config when you publish.`
|
|
454
466
|
);
|
|
455
|
-
|
|
456
|
-
const accountId = await requireAuth(config);
|
|
467
|
+
|
|
457
468
|
await mkdir(path.join(creationDirectory, "./src"), {
|
|
458
469
|
recursive: true,
|
|
459
470
|
});
|
|
460
|
-
|
|
461
|
-
`/accounts/${accountId}/workers/services/${fromDashScriptName}`
|
|
462
|
-
);
|
|
471
|
+
|
|
463
472
|
const defaultEnvironment =
|
|
464
|
-
serviceMetaData
|
|
473
|
+
serviceMetaData?.default_environment.environment;
|
|
465
474
|
// I want the default environment, assuming it's the most up to date code.
|
|
466
475
|
const dashScript = await fetchDashboardScript(
|
|
467
476
|
`/accounts/${accountId}/workers/services/${fromDashScriptName}/environments/${defaultEnvironment}/content`
|
|
@@ -479,7 +488,7 @@ export async function initHandler(args: ArgumentsCamelCase<InitArgs>) {
|
|
|
479
488
|
scriptPath: "src/index.ts",
|
|
480
489
|
extraToml: (await getWorkerConfig(accountId, fromDashScriptName, {
|
|
481
490
|
defaultEnvironment,
|
|
482
|
-
environments: serviceMetaData
|
|
491
|
+
environments: serviceMetaData?.environments,
|
|
483
492
|
})) as TOML.JsonMap,
|
|
484
493
|
});
|
|
485
494
|
} else {
|
|
@@ -493,6 +502,7 @@ export async function initHandler(args: ArgumentsCamelCase<InitArgs>) {
|
|
|
493
502
|
await mkdir(path.join(creationDirectory, "./src"), {
|
|
494
503
|
recursive: true,
|
|
495
504
|
});
|
|
505
|
+
|
|
496
506
|
await writeFile(
|
|
497
507
|
path.join(creationDirectory, "./src/index.ts"),
|
|
498
508
|
readFileSync(path.join(getBasePath(), `templates/${template}`))
|
|
@@ -527,17 +537,13 @@ export async function initHandler(args: ArgumentsCamelCase<InitArgs>) {
|
|
|
527
537
|
`After running "wrangler init --from-dash", modifying your worker via the Cloudflare dashboard is discouraged.
|
|
528
538
|
Edits made via the Dashboard will not be synchronized locally and will be overridden by your local code and config when you publish.`
|
|
529
539
|
);
|
|
530
|
-
|
|
531
|
-
const accountId = await requireAuth(config);
|
|
540
|
+
|
|
532
541
|
await mkdir(path.join(creationDirectory, "./src"), {
|
|
533
542
|
recursive: true,
|
|
534
543
|
});
|
|
535
544
|
|
|
536
|
-
const serviceMetaData = await fetchResult<ServiceMetadataRes>(
|
|
537
|
-
`/accounts/${accountId}/workers/services/${fromDashScriptName}`
|
|
538
|
-
);
|
|
539
545
|
const defaultEnvironment =
|
|
540
|
-
serviceMetaData
|
|
546
|
+
serviceMetaData?.default_environment.environment;
|
|
541
547
|
|
|
542
548
|
// I want the default environment, assuming it's the most up to date code.
|
|
543
549
|
const dashScript = await fetchDashboardScript(
|
|
@@ -557,7 +563,7 @@ export async function initHandler(args: ArgumentsCamelCase<InitArgs>) {
|
|
|
557
563
|
//? Should we have Environment argument for `wrangler init --from-dash` - Jacob
|
|
558
564
|
extraToml: (await getWorkerConfig(accountId, fromDashScriptName, {
|
|
559
565
|
defaultEnvironment,
|
|
560
|
-
environments: serviceMetaData
|
|
566
|
+
environments: serviceMetaData?.environments,
|
|
561
567
|
})) as TOML.JsonMap,
|
|
562
568
|
});
|
|
563
569
|
} else {
|
|
@@ -770,8 +776,8 @@ async function getWorkerConfig(
|
|
|
770
776
|
defaultEnvironment,
|
|
771
777
|
environments,
|
|
772
778
|
}: {
|
|
773
|
-
defaultEnvironment: string;
|
|
774
|
-
environments: ServiceMetadataRes["environments"];
|
|
779
|
+
defaultEnvironment: string | undefined;
|
|
780
|
+
environments: ServiceMetadataRes["environments"] | undefined;
|
|
775
781
|
}
|
|
776
782
|
): Promise<RawConfig> {
|
|
777
783
|
const [bindings, routes, serviceEnvMetadata, cronTriggers] =
|
|
@@ -967,7 +973,7 @@ async function getWorkerConfig(
|
|
|
967
973
|
crons: cronTriggers.schedules.map((scheduled) => scheduled.cron),
|
|
968
974
|
},
|
|
969
975
|
env: environments
|
|
970
|
-
|
|
976
|
+
?.filter((env) => env.environment !== "production")
|
|
971
977
|
// `env` can have multiple Environments, with different configs.
|
|
972
978
|
.reduce((envObj, { environment }) => {
|
|
973
979
|
return { ...envObj, [environment]: {} };
|
|
@@ -55,7 +55,8 @@ export type EventNames =
|
|
|
55
55
|
| "build pages functions"
|
|
56
56
|
| "run dev"
|
|
57
57
|
| "run dev (api)"
|
|
58
|
-
| "run pages dev"
|
|
58
|
+
| "run pages dev"
|
|
59
|
+
| "view docs";
|
|
59
60
|
|
|
60
61
|
/**
|
|
61
62
|
* Send a metrics event, with no extra properties, to Cloudflare, if usage tracking is enabled.
|
package/src/pages/publish.tsx
CHANGED
|
@@ -58,6 +58,10 @@ export function Options(yargs: Argv) {
|
|
|
58
58
|
description:
|
|
59
59
|
"Whether or not the workspace should be considered dirty for this deployment",
|
|
60
60
|
},
|
|
61
|
+
"skip-caching": {
|
|
62
|
+
type: "boolean",
|
|
63
|
+
description: "Skip asset caching which speeds up builds",
|
|
64
|
+
},
|
|
61
65
|
config: {
|
|
62
66
|
describe: "Pages does not support wrangler.toml",
|
|
63
67
|
type: "string",
|
|
@@ -74,6 +78,7 @@ export const Handler = async ({
|
|
|
74
78
|
commitHash,
|
|
75
79
|
commitMessage,
|
|
76
80
|
commitDirty,
|
|
81
|
+
skipCaching,
|
|
77
82
|
config: wranglerConfig,
|
|
78
83
|
}: PublishArgs) => {
|
|
79
84
|
if (wranglerConfig) {
|
|
@@ -313,7 +318,12 @@ export const Handler = async ({
|
|
|
313
318
|
}
|
|
314
319
|
}
|
|
315
320
|
|
|
316
|
-
const manifest = await upload({
|
|
321
|
+
const manifest = await upload({
|
|
322
|
+
directory,
|
|
323
|
+
accountId,
|
|
324
|
+
projectName,
|
|
325
|
+
skipCaching: skipCaching ?? false,
|
|
326
|
+
});
|
|
317
327
|
|
|
318
328
|
const formData = new FormData();
|
|
319
329
|
|
package/src/pages/upload.tsx
CHANGED
|
@@ -40,6 +40,10 @@ export function Options(yargs: Argv) {
|
|
|
40
40
|
type: "string",
|
|
41
41
|
description: "The name of the project you want to deploy to",
|
|
42
42
|
},
|
|
43
|
+
"skip-caching": {
|
|
44
|
+
type: "boolean",
|
|
45
|
+
description: "Skip asset caching which speeds up builds",
|
|
46
|
+
},
|
|
43
47
|
})
|
|
44
48
|
.epilogue(pagesBetaWarning);
|
|
45
49
|
}
|
|
@@ -47,6 +51,7 @@ export function Options(yargs: Argv) {
|
|
|
47
51
|
export const Handler = async ({
|
|
48
52
|
directory,
|
|
49
53
|
outputManifestPath,
|
|
54
|
+
skipCaching,
|
|
50
55
|
}: UploadArgs) => {
|
|
51
56
|
if (!directory) {
|
|
52
57
|
throw new FatalError("Must specify a directory.", 1);
|
|
@@ -59,6 +64,7 @@ export const Handler = async ({
|
|
|
59
64
|
const manifest = await upload({
|
|
60
65
|
directory,
|
|
61
66
|
jwt: process.env.CF_PAGES_UPLOAD_JWT,
|
|
67
|
+
skipCaching: skipCaching ?? false,
|
|
62
68
|
});
|
|
63
69
|
|
|
64
70
|
if (outputManifestPath) {
|
|
@@ -74,8 +80,14 @@ export const upload = async (
|
|
|
74
80
|
| {
|
|
75
81
|
directory: string;
|
|
76
82
|
jwt: string;
|
|
83
|
+
skipCaching: boolean;
|
|
84
|
+
}
|
|
85
|
+
| {
|
|
86
|
+
directory: string;
|
|
87
|
+
accountId: string;
|
|
88
|
+
projectName: string;
|
|
89
|
+
skipCaching: boolean;
|
|
77
90
|
}
|
|
78
|
-
| { directory: string; accountId: string; projectName: string }
|
|
79
91
|
) => {
|
|
80
92
|
async function fetchJwt(): Promise<string> {
|
|
81
93
|
if ("jwt" in args) {
|
|
@@ -184,7 +196,12 @@ export const upload = async (
|
|
|
184
196
|
const start = Date.now();
|
|
185
197
|
|
|
186
198
|
let attempts = 0;
|
|
187
|
-
const getMissingHashes = async (): Promise<string[]> => {
|
|
199
|
+
const getMissingHashes = async (skipCaching: boolean): Promise<string[]> => {
|
|
200
|
+
if (skipCaching) {
|
|
201
|
+
console.debug("Force skipping cache");
|
|
202
|
+
return files.map(({ hash }) => hash);
|
|
203
|
+
}
|
|
204
|
+
|
|
188
205
|
try {
|
|
189
206
|
return await fetchResult<string[]>(`/pages/assets/check-missing`, {
|
|
190
207
|
method: "POST",
|
|
@@ -207,13 +224,13 @@ export const upload = async (
|
|
|
207
224
|
// Looks like the JWT expired, fetch another one
|
|
208
225
|
jwt = await fetchJwt();
|
|
209
226
|
}
|
|
210
|
-
return getMissingHashes();
|
|
227
|
+
return getMissingHashes(skipCaching);
|
|
211
228
|
} else {
|
|
212
229
|
throw e;
|
|
213
230
|
}
|
|
214
231
|
}
|
|
215
232
|
};
|
|
216
|
-
const missingHashes = await getMissingHashes();
|
|
233
|
+
const missingHashes = await getMissingHashes(args.skipCaching);
|
|
217
234
|
|
|
218
235
|
const sortedFiles = files
|
|
219
236
|
.filter((file) => missingHashes.includes(file.hash))
|
|
@@ -283,7 +300,8 @@ export const upload = async (
|
|
|
283
300
|
);
|
|
284
301
|
|
|
285
302
|
try {
|
|
286
|
-
|
|
303
|
+
console.debug("POST /pages/assets/upload");
|
|
304
|
+
const res = await fetchResult(`/pages/assets/upload`, {
|
|
287
305
|
method: "POST",
|
|
288
306
|
headers: {
|
|
289
307
|
"Content-Type": "application/json",
|
|
@@ -291,8 +309,10 @@ export const upload = async (
|
|
|
291
309
|
},
|
|
292
310
|
body: JSON.stringify(payload),
|
|
293
311
|
});
|
|
312
|
+
console.debug("result:", res);
|
|
294
313
|
} catch (e) {
|
|
295
314
|
if (attempts < MAX_UPLOAD_ATTEMPTS) {
|
|
315
|
+
console.debug("failed:", e, "retrying...");
|
|
296
316
|
// Exponential backoff, 1 second first time, then 2 second, then 4 second etc.
|
|
297
317
|
await new Promise((resolvePromise) =>
|
|
298
318
|
setTimeout(resolvePromise, Math.pow(2, attempts++) * 1000)
|
|
@@ -304,6 +324,7 @@ export const upload = async (
|
|
|
304
324
|
}
|
|
305
325
|
return doUpload();
|
|
306
326
|
} else {
|
|
327
|
+
console.debug("failed:", e);
|
|
307
328
|
throw e;
|
|
308
329
|
}
|
|
309
330
|
}
|
package/wrangler-dist/cli.js
CHANGED
|
@@ -140164,7 +140164,7 @@ function printBindings(bindings) {
|
|
|
140164
140164
|
if (database_name) {
|
|
140165
140165
|
databaseValue = `${database_name} (${database_id})`;
|
|
140166
140166
|
}
|
|
140167
|
-
if (preview_database_id) {
|
|
140167
|
+
if (preview_database_id && database_id !== "local") {
|
|
140168
140168
|
databaseValue += `, Preview: (${preview_database_id})`;
|
|
140169
140169
|
}
|
|
140170
140170
|
return {
|
|
@@ -141895,7 +141895,7 @@ var import_websocket_server = __toESM(require_websocket_server2(), 1);
|
|
|
141895
141895
|
var wrapper_default = import_websocket.default;
|
|
141896
141896
|
|
|
141897
141897
|
// package.json
|
|
141898
|
-
var version = "2.6.
|
|
141898
|
+
var version = "2.6.2";
|
|
141899
141899
|
var package_default = {
|
|
141900
141900
|
name: "wrangler",
|
|
141901
141901
|
version,
|
|
@@ -144385,8 +144385,13 @@ async function performApiFetch(resource, init = {}, queryParams, abortSignal) {
|
|
|
144385
144385
|
logger.debug(
|
|
144386
144386
|
`-- START CF API REQUEST: ${method} ${getCloudflareAPIBaseURL()}${resource}${queryString}`
|
|
144387
144387
|
);
|
|
144388
|
-
|
|
144389
|
-
|
|
144388
|
+
const logHeaders = cloneHeaders(headers);
|
|
144389
|
+
delete logHeaders["Authorization"];
|
|
144390
|
+
logger.debug("HEADERS:", JSON.stringify(logHeaders, null, 2));
|
|
144391
|
+
logger.debug(
|
|
144392
|
+
"INIT:",
|
|
144393
|
+
JSON.stringify({ ...init, headers: logHeaders }, null, 2)
|
|
144394
|
+
);
|
|
144390
144395
|
logger.debug("-- END CF API REQUEST");
|
|
144391
144396
|
return await (0, import_undici4.fetch)(`${getCloudflareAPIBaseURL()}${resource}${queryString}`, {
|
|
144392
144397
|
method,
|
|
@@ -144409,7 +144414,9 @@ async function fetchInternal(resource, init = {}, queryParams, abortSignal) {
|
|
|
144409
144414
|
response.statusText,
|
|
144410
144415
|
response.status
|
|
144411
144416
|
);
|
|
144412
|
-
|
|
144417
|
+
const logHeaders = cloneHeaders(response.headers);
|
|
144418
|
+
delete logHeaders["Authorization"];
|
|
144419
|
+
logger.debug("HEADERS:", JSON.stringify(logHeaders, null, 2));
|
|
144413
144420
|
logger.debug("RESPONSE:", jsonText);
|
|
144414
144421
|
logger.debug("-- END CF API RESPONSE");
|
|
144415
144422
|
try {
|
|
@@ -155755,8 +155762,10 @@ Your database may not be available to serve requests during the migration, conti
|
|
|
155755
155762
|
if (!ok)
|
|
155756
155763
|
return;
|
|
155757
155764
|
}
|
|
155758
|
-
(
|
|
155759
|
-
|
|
155765
|
+
if (!local) {
|
|
155766
|
+
(0, import_ink10.render)(/* @__PURE__ */ import_react14.default.createElement(import_ink10.Text, null, "\u{1F552} Creating backup..."));
|
|
155767
|
+
await createBackup(accountId, databaseInfo.uuid);
|
|
155768
|
+
}
|
|
155760
155769
|
for (const migration of unappliedMigrations) {
|
|
155761
155770
|
let query = import_node_fs14.default.readFileSync(
|
|
155762
155771
|
`${migrationsPath}/${migration.Name}`,
|
|
@@ -156439,6 +156448,15 @@ Have you considered using Cloudflare Pages instead? See https://pages.cloudflare
|
|
|
156439
156448
|
"./wrangler.toml"
|
|
156440
156449
|
);
|
|
156441
156450
|
let justCreatedWranglerToml = false;
|
|
156451
|
+
let accountId = "";
|
|
156452
|
+
let serviceMetaData;
|
|
156453
|
+
if (fromDashScriptName) {
|
|
156454
|
+
const config = readConfig(args.config, args);
|
|
156455
|
+
accountId = await requireAuth(config);
|
|
156456
|
+
serviceMetaData = await fetchResult(
|
|
156457
|
+
`/accounts/${accountId}/workers/services/${fromDashScriptName}`
|
|
156458
|
+
);
|
|
156459
|
+
}
|
|
156442
156460
|
if (fs12.existsSync(wranglerTomlDestination)) {
|
|
156443
156461
|
let shouldContinue = false;
|
|
156444
156462
|
logger.warn(
|
|
@@ -156664,15 +156682,10 @@ To start developing your Worker, run \`npx wrangler dev\`${isCreatingWranglerTom
|
|
|
156664
156682
|
`After running "wrangler init --from-dash", modifying your worker via the Cloudflare dashboard is discouraged.
|
|
156665
156683
|
Edits made via the Dashboard will not be synchronized locally and will be overridden by your local code and config when you publish.`
|
|
156666
156684
|
);
|
|
156667
|
-
const config = readConfig(args.config, args);
|
|
156668
|
-
const accountId = await requireAuth(config);
|
|
156669
156685
|
await (0, import_promises9.mkdir)(import_node_path24.default.join(creationDirectory, "./src"), {
|
|
156670
156686
|
recursive: true
|
|
156671
156687
|
});
|
|
156672
|
-
const
|
|
156673
|
-
`/accounts/${accountId}/workers/services/${fromDashScriptName}`
|
|
156674
|
-
);
|
|
156675
|
-
const defaultEnvironment = serviceMetaData.default_environment.environment;
|
|
156688
|
+
const defaultEnvironment = serviceMetaData?.default_environment.environment;
|
|
156676
156689
|
const dashScript = await fetchDashboardScript(
|
|
156677
156690
|
`/accounts/${accountId}/workers/services/${fromDashScriptName}/environments/${defaultEnvironment}/content`
|
|
156678
156691
|
);
|
|
@@ -156687,7 +156700,7 @@ To start developing your Worker, run \`npx wrangler dev\`${isCreatingWranglerTom
|
|
|
156687
156700
|
scriptPath: "src/index.ts",
|
|
156688
156701
|
extraToml: await getWorkerConfig(accountId, fromDashScriptName, {
|
|
156689
156702
|
defaultEnvironment,
|
|
156690
|
-
environments: serviceMetaData
|
|
156703
|
+
environments: serviceMetaData?.environments
|
|
156691
156704
|
})
|
|
156692
156705
|
});
|
|
156693
156706
|
} else {
|
|
@@ -156728,15 +156741,10 @@ To start developing your Worker, run \`npx wrangler dev\`${isCreatingWranglerTom
|
|
|
156728
156741
|
`After running "wrangler init --from-dash", modifying your worker via the Cloudflare dashboard is discouraged.
|
|
156729
156742
|
Edits made via the Dashboard will not be synchronized locally and will be overridden by your local code and config when you publish.`
|
|
156730
156743
|
);
|
|
156731
|
-
const config = readConfig(args.config, args);
|
|
156732
|
-
const accountId = await requireAuth(config);
|
|
156733
156744
|
await (0, import_promises9.mkdir)(import_node_path24.default.join(creationDirectory, "./src"), {
|
|
156734
156745
|
recursive: true
|
|
156735
156746
|
});
|
|
156736
|
-
const
|
|
156737
|
-
`/accounts/${accountId}/workers/services/${fromDashScriptName}`
|
|
156738
|
-
);
|
|
156739
|
-
const defaultEnvironment = serviceMetaData.default_environment.environment;
|
|
156747
|
+
const defaultEnvironment = serviceMetaData?.default_environment.environment;
|
|
156740
156748
|
const dashScript = await fetchDashboardScript(
|
|
156741
156749
|
`/accounts/${accountId}/workers/services/${fromDashScriptName}/environments/${defaultEnvironment}/content`
|
|
156742
156750
|
);
|
|
@@ -156751,7 +156759,7 @@ To start developing your Worker, run \`npx wrangler dev\`${isCreatingWranglerTom
|
|
|
156751
156759
|
scriptPath: "src/index.ts",
|
|
156752
156760
|
extraToml: await getWorkerConfig(accountId, fromDashScriptName, {
|
|
156753
156761
|
defaultEnvironment,
|
|
156754
|
-
environments: serviceMetaData
|
|
156762
|
+
environments: serviceMetaData?.environments
|
|
156755
156763
|
})
|
|
156756
156764
|
});
|
|
156757
156765
|
} else {
|
|
@@ -157075,7 +157083,7 @@ async function getWorkerConfig(accountId, fromDashScriptName, {
|
|
|
157075
157083
|
triggers: {
|
|
157076
157084
|
crons: cronTriggers.schedules.map((scheduled) => scheduled.cron)
|
|
157077
157085
|
},
|
|
157078
|
-
env: environments
|
|
157086
|
+
env: environments?.filter((env5) => env5.environment !== "production").reduce((envObj, { environment }) => {
|
|
157079
157087
|
return { ...envObj, [environment]: {} };
|
|
157080
157088
|
}, {}),
|
|
157081
157089
|
...mappedBindings
|
|
@@ -157466,6 +157474,75 @@ function workerNamespaceCommands(workerNamespaceYargs, subHelp) {
|
|
|
157466
157474
|
);
|
|
157467
157475
|
}
|
|
157468
157476
|
|
|
157477
|
+
// src/docs/index.ts
|
|
157478
|
+
init_import_meta_url();
|
|
157479
|
+
var argToUrlHash = {
|
|
157480
|
+
init: "init",
|
|
157481
|
+
generate: "generate",
|
|
157482
|
+
dev: "dev",
|
|
157483
|
+
publish: "publish",
|
|
157484
|
+
delete: "delete",
|
|
157485
|
+
"kv:namespace": "kvnamespace",
|
|
157486
|
+
"kv:key": "kvkey",
|
|
157487
|
+
"kv:bulk": "kvbulk",
|
|
157488
|
+
"r2 bucket": "r2-bucket",
|
|
157489
|
+
"r2 object": "r2-object",
|
|
157490
|
+
secret: "secret",
|
|
157491
|
+
"secret:bulk": "secretbulk",
|
|
157492
|
+
tail: "tail",
|
|
157493
|
+
pages: "pages",
|
|
157494
|
+
login: "login",
|
|
157495
|
+
logout: "logout",
|
|
157496
|
+
whoami: "whoami",
|
|
157497
|
+
types: "types",
|
|
157498
|
+
deployments: "deployments"
|
|
157499
|
+
};
|
|
157500
|
+
function docsOptions(yargs) {
|
|
157501
|
+
return yargs.positional("command", {
|
|
157502
|
+
describe: "Enter the wrangler command you want to know more about",
|
|
157503
|
+
type: "string",
|
|
157504
|
+
choices: [
|
|
157505
|
+
"init",
|
|
157506
|
+
"dev",
|
|
157507
|
+
"publish",
|
|
157508
|
+
"delete",
|
|
157509
|
+
"tail",
|
|
157510
|
+
"secret",
|
|
157511
|
+
"secret:bulk",
|
|
157512
|
+
"kv:namespace",
|
|
157513
|
+
"kv:key",
|
|
157514
|
+
"kv:bulk",
|
|
157515
|
+
"pages",
|
|
157516
|
+
"r2 object",
|
|
157517
|
+
"r2 bucket",
|
|
157518
|
+
"login",
|
|
157519
|
+
"logout",
|
|
157520
|
+
"whoami",
|
|
157521
|
+
"types",
|
|
157522
|
+
"deployments",
|
|
157523
|
+
"api"
|
|
157524
|
+
]
|
|
157525
|
+
});
|
|
157526
|
+
}
|
|
157527
|
+
function isValidParam(k) {
|
|
157528
|
+
return k in argToUrlHash;
|
|
157529
|
+
}
|
|
157530
|
+
async function docsHandler(args) {
|
|
157531
|
+
let urlToOpen = "https://developers.cloudflare.com/workers/wrangler/commands/";
|
|
157532
|
+
if (args.command === "api") {
|
|
157533
|
+
urlToOpen = "https://developers.cloudflare.com/workers/wrangler/api/";
|
|
157534
|
+
} else if (args.command && isValidParam(args.command)) {
|
|
157535
|
+
urlToOpen += `#${argToUrlHash[args.command]}`;
|
|
157536
|
+
}
|
|
157537
|
+
await printWranglerBanner();
|
|
157538
|
+
logger.log(`Opening a link in your default browser: ${urlToOpen}`);
|
|
157539
|
+
await openInBrowser(urlToOpen);
|
|
157540
|
+
const config = readConfig(void 0, {});
|
|
157541
|
+
await sendMetricsEvent("view docs", {
|
|
157542
|
+
sendMetrics: config.send_metrics
|
|
157543
|
+
});
|
|
157544
|
+
}
|
|
157545
|
+
|
|
157469
157546
|
// src/kv/index.ts
|
|
157470
157547
|
init_import_meta_url();
|
|
157471
157548
|
var import_node_string_decoder = require("node:string_decoder");
|
|
@@ -160697,12 +160774,17 @@ function Options8(yargs) {
|
|
|
160697
160774
|
"output-manifest-path": {
|
|
160698
160775
|
type: "string",
|
|
160699
160776
|
description: "The name of the project you want to deploy to"
|
|
160777
|
+
},
|
|
160778
|
+
"skip-caching": {
|
|
160779
|
+
type: "boolean",
|
|
160780
|
+
description: "Skip asset caching which speeds up builds"
|
|
160700
160781
|
}
|
|
160701
160782
|
}).epilogue(pagesBetaWarning);
|
|
160702
160783
|
}
|
|
160703
160784
|
var Handler8 = async ({
|
|
160704
160785
|
directory,
|
|
160705
|
-
outputManifestPath
|
|
160786
|
+
outputManifestPath,
|
|
160787
|
+
skipCaching
|
|
160706
160788
|
}) => {
|
|
160707
160789
|
if (!directory) {
|
|
160708
160790
|
throw new FatalError("Must specify a directory.", 1);
|
|
@@ -160712,7 +160794,8 @@ var Handler8 = async ({
|
|
|
160712
160794
|
}
|
|
160713
160795
|
const manifest = await upload({
|
|
160714
160796
|
directory,
|
|
160715
|
-
jwt: process.env.CF_PAGES_UPLOAD_JWT
|
|
160797
|
+
jwt: process.env.CF_PAGES_UPLOAD_JWT,
|
|
160798
|
+
skipCaching: skipCaching ?? false
|
|
160716
160799
|
});
|
|
160717
160800
|
if (outputManifestPath) {
|
|
160718
160801
|
await (0, import_promises15.mkdir)((0, import_node_path34.dirname)(outputManifestPath), { recursive: true });
|
|
@@ -160791,7 +160874,11 @@ ${name} is ${prettyBytes(filestat.size)} in size`,
|
|
|
160791
160874
|
let jwt = await fetchJwt();
|
|
160792
160875
|
const start = Date.now();
|
|
160793
160876
|
let attempts = 0;
|
|
160794
|
-
const getMissingHashes = async () => {
|
|
160877
|
+
const getMissingHashes = async (skipCaching) => {
|
|
160878
|
+
if (skipCaching) {
|
|
160879
|
+
console.debug("Force skipping cache");
|
|
160880
|
+
return files.map(({ hash }) => hash);
|
|
160881
|
+
}
|
|
160795
160882
|
try {
|
|
160796
160883
|
return await fetchResult(`/pages/assets/check-missing`, {
|
|
160797
160884
|
method: "POST",
|
|
@@ -160811,13 +160898,13 @@ ${name} is ${prettyBytes(filestat.size)} in size`,
|
|
|
160811
160898
|
if (e2.code === 8000013) {
|
|
160812
160899
|
jwt = await fetchJwt();
|
|
160813
160900
|
}
|
|
160814
|
-
return getMissingHashes();
|
|
160901
|
+
return getMissingHashes(skipCaching);
|
|
160815
160902
|
} else {
|
|
160816
160903
|
throw e2;
|
|
160817
160904
|
}
|
|
160818
160905
|
}
|
|
160819
160906
|
};
|
|
160820
|
-
const missingHashes = await getMissingHashes();
|
|
160907
|
+
const missingHashes = await getMissingHashes(args.skipCaching);
|
|
160821
160908
|
const sortedFiles = files.filter((file) => missingHashes.includes(file.hash)).sort((a, b) => b.sizeInBytes - a.sizeInBytes);
|
|
160822
160909
|
const buckets = new Array(BULK_UPLOAD_CONCURRENCY).fill(null).map(() => ({
|
|
160823
160910
|
files: [],
|
|
@@ -160867,7 +160954,8 @@ ${name} is ${prettyBytes(filestat.size)} in size`,
|
|
|
160867
160954
|
}))
|
|
160868
160955
|
);
|
|
160869
160956
|
try {
|
|
160870
|
-
|
|
160957
|
+
console.debug("POST /pages/assets/upload");
|
|
160958
|
+
const res = await fetchResult(`/pages/assets/upload`, {
|
|
160871
160959
|
method: "POST",
|
|
160872
160960
|
headers: {
|
|
160873
160961
|
"Content-Type": "application/json",
|
|
@@ -160875,8 +160963,10 @@ ${name} is ${prettyBytes(filestat.size)} in size`,
|
|
|
160875
160963
|
},
|
|
160876
160964
|
body: JSON.stringify(payload)
|
|
160877
160965
|
});
|
|
160966
|
+
console.debug("result:", res);
|
|
160878
160967
|
} catch (e2) {
|
|
160879
160968
|
if (attempts < MAX_UPLOAD_ATTEMPTS) {
|
|
160969
|
+
console.debug("failed:", e2, "retrying...");
|
|
160880
160970
|
await new Promise(
|
|
160881
160971
|
(resolvePromise) => setTimeout(resolvePromise, Math.pow(2, attempts++) * 1e3)
|
|
160882
160972
|
);
|
|
@@ -160885,6 +160975,7 @@ ${name} is ${prettyBytes(filestat.size)} in size`,
|
|
|
160885
160975
|
}
|
|
160886
160976
|
return doUpload();
|
|
160887
160977
|
} else {
|
|
160978
|
+
console.debug("failed:", e2);
|
|
160888
160979
|
throw e2;
|
|
160889
160980
|
}
|
|
160890
160981
|
}
|
|
@@ -161013,6 +161104,10 @@ function Options9(yargs) {
|
|
|
161013
161104
|
type: "boolean",
|
|
161014
161105
|
description: "Whether or not the workspace should be considered dirty for this deployment"
|
|
161015
161106
|
},
|
|
161107
|
+
"skip-caching": {
|
|
161108
|
+
type: "boolean",
|
|
161109
|
+
description: "Skip asset caching which speeds up builds"
|
|
161110
|
+
},
|
|
161016
161111
|
config: {
|
|
161017
161112
|
describe: "Pages does not support wrangler.toml",
|
|
161018
161113
|
type: "string",
|
|
@@ -161027,6 +161122,7 @@ var Handler9 = async ({
|
|
|
161027
161122
|
commitHash,
|
|
161028
161123
|
commitMessage,
|
|
161029
161124
|
commitDirty,
|
|
161125
|
+
skipCaching,
|
|
161030
161126
|
config: wranglerConfig
|
|
161031
161127
|
}) => {
|
|
161032
161128
|
if (wranglerConfig) {
|
|
@@ -161209,7 +161305,12 @@ To silence this warning, pass in --commit-dirty=true`
|
|
|
161209
161305
|
}
|
|
161210
161306
|
}
|
|
161211
161307
|
}
|
|
161212
|
-
const manifest = await upload({
|
|
161308
|
+
const manifest = await upload({
|
|
161309
|
+
directory,
|
|
161310
|
+
accountId,
|
|
161311
|
+
projectName,
|
|
161312
|
+
skipCaching: skipCaching ?? false
|
|
161313
|
+
});
|
|
161213
161314
|
const formData = new import_undici10.FormData();
|
|
161214
161315
|
formData.append("manifest", JSON.stringify(manifest));
|
|
161215
161316
|
if (branch) {
|
|
@@ -164394,6 +164495,12 @@ function createCLIParser(argv) {
|
|
|
164394
164495
|
generateOptions,
|
|
164395
164496
|
generateHandler2
|
|
164396
164497
|
);
|
|
164498
|
+
wrangler.command(
|
|
164499
|
+
"docs [command]",
|
|
164500
|
+
"\u{1F4DA} Open wrangler's docs in your browser",
|
|
164501
|
+
docsOptions,
|
|
164502
|
+
docsHandler
|
|
164503
|
+
);
|
|
164397
164504
|
wrangler.command(
|
|
164398
164505
|
"init [name]",
|
|
164399
164506
|
"\u{1F4E5} Create a wrangler.toml configuration file",
|
|
@@ -164961,7 +165068,7 @@ Switch out --local for ${import_chalk8.default.bold(
|
|
|
164961
165068
|
{ sendMetrics: config.send_metrics, offline: args.local }
|
|
164962
165069
|
);
|
|
164963
165070
|
async function getDevReactElement(configParam) {
|
|
164964
|
-
const { assetPaths, bindings } =
|
|
165071
|
+
const { assetPaths, bindings } = getBindingsAndAssetPaths(
|
|
164965
165072
|
args,
|
|
164966
165073
|
configParam
|
|
164967
165074
|
);
|
|
@@ -165056,7 +165163,7 @@ async function startApiDev(args) {
|
|
|
165056
165163
|
{ sendMetrics: config.send_metrics, offline: args.local }
|
|
165057
165164
|
);
|
|
165058
165165
|
async function getDevServer(configParam) {
|
|
165059
|
-
const { assetPaths, bindings } =
|
|
165166
|
+
const { assetPaths, bindings } = getBindingsAndAssetPaths(
|
|
165060
165167
|
args,
|
|
165061
165168
|
configParam
|
|
165062
165169
|
);
|
|
@@ -165241,9 +165348,9 @@ use --persist-to=./wrangler-local-state to keep using the old path.`
|
|
|
165241
165348
|
localPersistencePath
|
|
165242
165349
|
};
|
|
165243
165350
|
}
|
|
165244
|
-
|
|
165351
|
+
function getBindingsAndAssetPaths(args, configParam) {
|
|
165245
165352
|
const cliVars = collectKeyValues(args.var);
|
|
165246
|
-
const bindings =
|
|
165353
|
+
const bindings = getBindings(configParam, args.env, args.local ?? false, {
|
|
165247
165354
|
kv: args.kv,
|
|
165248
165355
|
vars: { ...args.vars, ...cliVars },
|
|
165249
165356
|
durableObjects: args.durableObjects,
|
|
@@ -165263,7 +165370,7 @@ async function getBindingsAndAssetPaths(args, configParam) {
|
|
|
165263
165370
|
);
|
|
165264
165371
|
return { assetPaths, bindings };
|
|
165265
165372
|
}
|
|
165266
|
-
|
|
165373
|
+
function getBindings(configParam, env5, local, args) {
|
|
165267
165374
|
const bindings = {
|
|
165268
165375
|
kv_namespaces: [
|
|
165269
165376
|
...(configParam.kv_namespaces || []).map(
|
|
@@ -165322,6 +165429,12 @@ async function getBindings(configParam, env5, args) {
|
|
|
165322
165429
|
logfwdr: configParam.logfwdr,
|
|
165323
165430
|
d1_databases: identifyD1BindingsAsBeta([
|
|
165324
165431
|
...(configParam.d1_databases ?? []).map((d1Db) => {
|
|
165432
|
+
if (local) {
|
|
165433
|
+
return {
|
|
165434
|
+
...d1Db,
|
|
165435
|
+
database_id: "local"
|
|
165436
|
+
};
|
|
165437
|
+
}
|
|
165325
165438
|
if (!d1Db.preview_database_id) {
|
|
165326
165439
|
throw new Error(
|
|
165327
165440
|
`In development, you should use a separate D1 database than the one you'd use in production. Please create a new D1 database with "wrangler d1 create <name>" and add its id as preview_database_id to the d1_database "${d1Db.binding}" in your wrangler.toml`
|