wrangler 3.78.12 → 3.79.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +5 -5
- package/wrangler-dist/cli.js +285 -418
- package/wrangler-dist/cli.js.map +0 -7
package/wrangler-dist/cli.js
CHANGED
@@ -19059,7 +19059,7 @@ var require_lodash = __commonJS({
|
|
19059
19059
|
if (typeof func != "function") {
|
19060
19060
|
throw new TypeError2(FUNC_ERROR_TEXT);
|
19061
19061
|
}
|
19062
|
-
return
|
19062
|
+
return setTimeout5(function() {
|
19063
19063
|
func.apply(undefined2, args);
|
19064
19064
|
}, wait);
|
19065
19065
|
}
|
@@ -21045,7 +21045,7 @@ var require_lodash = __commonJS({
|
|
21045
21045
|
}
|
21046
21046
|
__name(safeGet, "safeGet");
|
21047
21047
|
var setData = shortOut(baseSetData);
|
21048
|
-
var
|
21048
|
+
var setTimeout5 = ctxSetTimeout || function(func, wait) {
|
21049
21049
|
return root.setTimeout(func, wait);
|
21050
21050
|
};
|
21051
21051
|
var setToString = shortOut(baseSetToString);
|
@@ -21925,7 +21925,7 @@ var require_lodash = __commonJS({
|
|
21925
21925
|
__name(invokeFunc, "invokeFunc");
|
21926
21926
|
function leadingEdge(time) {
|
21927
21927
|
lastInvokeTime = time;
|
21928
|
-
timerId =
|
21928
|
+
timerId = setTimeout5(timerExpired, wait);
|
21929
21929
|
return leading ? invokeFunc(time) : result2;
|
21930
21930
|
}
|
21931
21931
|
__name(leadingEdge, "leadingEdge");
|
@@ -21944,7 +21944,7 @@ var require_lodash = __commonJS({
|
|
21944
21944
|
if (shouldInvoke(time)) {
|
21945
21945
|
return trailingEdge(time);
|
21946
21946
|
}
|
21947
|
-
timerId =
|
21947
|
+
timerId = setTimeout5(timerExpired, remainingWait(time));
|
21948
21948
|
}
|
21949
21949
|
__name(timerExpired, "timerExpired");
|
21950
21950
|
function trailingEdge(time) {
|
@@ -21979,12 +21979,12 @@ var require_lodash = __commonJS({
|
|
21979
21979
|
}
|
21980
21980
|
if (maxing) {
|
21981
21981
|
clearTimeout2(timerId);
|
21982
|
-
timerId =
|
21982
|
+
timerId = setTimeout5(timerExpired, wait);
|
21983
21983
|
return invokeFunc(lastCallTime);
|
21984
21984
|
}
|
21985
21985
|
}
|
21986
21986
|
if (timerId === undefined2) {
|
21987
|
-
timerId =
|
21987
|
+
timerId = setTimeout5(timerExpired, wait);
|
21988
21988
|
}
|
21989
21989
|
return result2;
|
21990
21990
|
}
|
@@ -113156,7 +113156,7 @@ A subrequest is a call to fetch(), a redirect, or a call to any Cache API method
|
|
113156
113156
|
var import_assert32 = __toModule(require("assert"));
|
113157
113157
|
var import_async_hooks22 = __toModule(require("async_hooks"));
|
113158
113158
|
var import_timers = __toModule(require("timers"));
|
113159
|
-
var
|
113159
|
+
var import_promises29 = __toModule(require("timers/promises"));
|
113160
113160
|
var inputGateStorage = new import_async_hooks22.AsyncLocalStorage();
|
113161
113161
|
var outputGateStorage = new import_async_hooks22.AsyncLocalStorage();
|
113162
113162
|
function waitForOpenInputGate() {
|
@@ -113198,7 +113198,7 @@ A subrequest is a call to fetch(), a redirect, or a call to any Cache API method
|
|
113198
113198
|
return inputGateStorage.run(this, closure);
|
113199
113199
|
}
|
113200
113200
|
async waitForOpen() {
|
113201
|
-
await (0,
|
113201
|
+
await (0, import_promises29.setImmediate)();
|
113202
113202
|
if (this.#lockCount === 0)
|
113203
113203
|
return;
|
113204
113204
|
return new Promise((resolve22) => this.#resolveQueue.push(resolve22));
|
@@ -113223,7 +113223,7 @@ A subrequest is a call to fetch(), a redirect, or a call to any Cache API method
|
|
113223
113223
|
this.#lockCount--;
|
113224
113224
|
while (this.#lockCount === 0 && this.#resolveQueue.length) {
|
113225
113225
|
this.#resolveQueue.shift()();
|
113226
|
-
await (0,
|
113226
|
+
await (0, import_promises29.setImmediate)();
|
113227
113227
|
}
|
113228
113228
|
if (this.#parent)
|
113229
113229
|
return this.#parent.#unlock();
|
@@ -136216,7 +136216,7 @@ var require_src8 = __commonJS({
|
|
136216
136216
|
}
|
136217
136217
|
__name(formatSize, "formatSize");
|
136218
136218
|
var import_assert22 = __toModule(require("assert"));
|
136219
|
-
var
|
136219
|
+
var import_promises29 = __toModule(require("fs/promises"));
|
136220
136220
|
var import_path22 = __toModule(require("path"));
|
136221
136221
|
var import_shared8 = __toModule(require_src6());
|
136222
136222
|
var import_dotenv2 = __toModule(require_main4());
|
@@ -137953,7 +137953,7 @@ The \`binding\` key should be used to define binding names.`);
|
|
137953
137953
|
if (envPath) {
|
137954
137954
|
envPath = import_path22.default.resolve(this.ctx.rootPath, envPath);
|
137955
137955
|
try {
|
137956
|
-
Object.assign(bindings, import_dotenv2.default.parse(await
|
137956
|
+
Object.assign(bindings, import_dotenv2.default.parse(await import_promises29.default.readFile(envPath, "utf8")));
|
137957
137957
|
} catch (e3) {
|
137958
137958
|
if (!(e3.code === "ENOENT" && this.envPath === true))
|
137959
137959
|
throw e3;
|
@@ -137963,21 +137963,21 @@ The \`binding\` key should be used to define binding names.`);
|
|
137963
137963
|
if (this.wasmBindings) {
|
137964
137964
|
for (let [name2, wasmPath] of Object.entries(this.wasmBindings)) {
|
137965
137965
|
wasmPath = import_path22.default.resolve(this.ctx.rootPath, wasmPath);
|
137966
|
-
bindings[name2] = new WebAssembly.Module(await
|
137966
|
+
bindings[name2] = new WebAssembly.Module(await import_promises29.default.readFile(wasmPath));
|
137967
137967
|
watch9.push(wasmPath);
|
137968
137968
|
}
|
137969
137969
|
}
|
137970
137970
|
if (this.textBlobBindings) {
|
137971
137971
|
for (let [name2, textPath] of Object.entries(this.textBlobBindings)) {
|
137972
137972
|
textPath = import_path22.default.resolve(this.ctx.rootPath, textPath);
|
137973
|
-
bindings[name2] = await
|
137973
|
+
bindings[name2] = await import_promises29.default.readFile(textPath, "utf-8");
|
137974
137974
|
watch9.push(textPath);
|
137975
137975
|
}
|
137976
137976
|
}
|
137977
137977
|
if (this.dataBlobBindings) {
|
137978
137978
|
for (let [name2, dataPath] of Object.entries(this.dataBlobBindings)) {
|
137979
137979
|
dataPath = import_path22.default.resolve(this.ctx.rootPath, dataPath);
|
137980
|
-
const fileContent = await
|
137980
|
+
const fileContent = await import_promises29.default.readFile(dataPath);
|
137981
137981
|
bindings[name2] = (0, import_shared8.viewToBuffer)(fileContent);
|
137982
137982
|
watch9.push(dataPath);
|
137983
137983
|
}
|
@@ -138213,7 +138213,7 @@ Make sure "${service}" is mounted so Miniflare knows where to find it.`);
|
|
138213
138213
|
}
|
138214
138214
|
__name(_populateBuildConfig, "_populateBuildConfig");
|
138215
138215
|
var import_buffer = __toModule(require("buffer"));
|
138216
|
-
var
|
138216
|
+
var import_promises210 = __toModule(require("fs/promises"));
|
138217
138217
|
var import_path42 = __toModule(require("path"));
|
138218
138218
|
var import_web5 = __toModule(require("stream/web"));
|
138219
138219
|
var import_web6 = __toModule(require("stream/web"));
|
@@ -138598,7 +138598,7 @@ Make sure "${service}" is mounted so Miniflare knows where to find it.`);
|
|
138598
138598
|
if (packagePath) {
|
138599
138599
|
packagePath = import_path42.default.resolve(this.ctx.rootPath, packagePath);
|
138600
138600
|
try {
|
138601
|
-
const pkg = JSON.parse(await
|
138601
|
+
const pkg = JSON.parse(await import_promises210.default.readFile(packagePath, "utf8"));
|
138602
138602
|
scriptPath3 = this.modules ? pkg.module : pkg.main;
|
138603
138603
|
scriptPath3 &&= import_path42.default.resolve(import_path42.default.dirname(packagePath), scriptPath3);
|
138604
138604
|
} catch (e3) {
|
@@ -138610,7 +138610,7 @@ Make sure "${service}" is mounted so Miniflare knows where to find it.`);
|
|
138610
138610
|
}
|
138611
138611
|
if (scriptPath3 !== void 0) {
|
138612
138612
|
scriptPath3 = import_path42.default.resolve(this.ctx.rootPath, scriptPath3);
|
138613
|
-
const code = await
|
138613
|
+
const code = await import_promises210.default.readFile(scriptPath3, "utf8");
|
138614
138614
|
watch9.push(scriptPath3);
|
138615
138615
|
return {
|
138616
138616
|
globals,
|
@@ -149426,166 +149426,6 @@ var require_dist7 = __commonJS({
|
|
149426
149426
|
}
|
149427
149427
|
});
|
149428
149428
|
|
149429
|
-
// ../../node_modules/.pnpm/workerd@1.20240925.0/node_modules/workerd/lib/main.js
|
149430
|
-
var require_main5 = __commonJS({
|
149431
|
-
"../../node_modules/.pnpm/workerd@1.20240925.0/node_modules/workerd/lib/main.js"(exports2, module3) {
|
149432
|
-
init_import_meta_url();
|
149433
|
-
var __create2 = Object.create;
|
149434
|
-
var __defProp2 = Object.defineProperty;
|
149435
|
-
var __getOwnPropDesc2 = Object.getOwnPropertyDescriptor;
|
149436
|
-
var __getOwnPropNames2 = Object.getOwnPropertyNames;
|
149437
|
-
var __getProtoOf2 = Object.getPrototypeOf;
|
149438
|
-
var __hasOwnProp2 = Object.prototype.hasOwnProperty;
|
149439
|
-
var __export2 = /* @__PURE__ */ __name((target, all2) => {
|
149440
|
-
for (var name2 in all2)
|
149441
|
-
__defProp2(target, name2, { get: all2[name2], enumerable: true });
|
149442
|
-
}, "__export");
|
149443
|
-
var __copyProps2 = /* @__PURE__ */ __name((to, from, except, desc) => {
|
149444
|
-
if (from && typeof from === "object" || typeof from === "function") {
|
149445
|
-
for (let key of __getOwnPropNames2(from))
|
149446
|
-
if (!__hasOwnProp2.call(to, key) && key !== except)
|
149447
|
-
__defProp2(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc2(from, key)) || desc.enumerable });
|
149448
|
-
}
|
149449
|
-
return to;
|
149450
|
-
}, "__copyProps");
|
149451
|
-
var __toESM2 = /* @__PURE__ */ __name((mod, isNodeMode, target) => (target = mod != null ? __create2(__getProtoOf2(mod)) : {}, __copyProps2(
|
149452
|
-
// If the importer is in node compatibility mode or this is not an ESM
|
149453
|
-
// file that has been converted to a CommonJS file using a Babel-
|
149454
|
-
// compatible transform (i.e. "__esModule" has not been set), then set
|
149455
|
-
// "default" to the CommonJS "module.exports" for node compatibility.
|
149456
|
-
isNodeMode || !mod || !mod.__esModule ? __defProp2(target, "default", { value: mod, enumerable: true }) : target,
|
149457
|
-
mod
|
149458
|
-
)), "__toESM");
|
149459
|
-
var __toCommonJS2 = /* @__PURE__ */ __name((mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod), "__toCommonJS");
|
149460
|
-
var node_path_exports = {};
|
149461
|
-
__export2(node_path_exports, {
|
149462
|
-
compatibilityDate: () => compatibilityDate,
|
149463
|
-
default: () => node_path_default,
|
149464
|
-
version: () => version4
|
149465
|
-
});
|
149466
|
-
module3.exports = __toCommonJS2(node_path_exports);
|
149467
|
-
var import_fs16 = __toESM2(require("fs"));
|
149468
|
-
var import_os4 = __toESM2(require("os"));
|
149469
|
-
var import_path20 = __toESM2(require("path"));
|
149470
|
-
var knownPackages = {
|
149471
|
-
"darwin arm64 LE": "@cloudflare/workerd-darwin-arm64",
|
149472
|
-
"darwin x64 LE": "@cloudflare/workerd-darwin-64",
|
149473
|
-
"linux arm64 LE": "@cloudflare/workerd-linux-arm64",
|
149474
|
-
"linux x64 LE": "@cloudflare/workerd-linux-64",
|
149475
|
-
"win32 x64 LE": "@cloudflare/workerd-windows-64"
|
149476
|
-
};
|
149477
|
-
var maybeExeExtension = process.platform === "win32" ? ".exe" : "";
|
149478
|
-
function pkgAndSubpathForCurrentPlatform() {
|
149479
|
-
let pkg;
|
149480
|
-
let subpath;
|
149481
|
-
let platformKey = `${process.platform} ${import_os4.default.arch()} ${import_os4.default.endianness()}`;
|
149482
|
-
if (platformKey in knownPackages) {
|
149483
|
-
pkg = knownPackages[platformKey];
|
149484
|
-
subpath = `bin/workerd${maybeExeExtension}`;
|
149485
|
-
} else {
|
149486
|
-
throw new Error(`Unsupported platform: ${platformKey}`);
|
149487
|
-
}
|
149488
|
-
return { pkg, subpath };
|
149489
|
-
}
|
149490
|
-
__name(pkgAndSubpathForCurrentPlatform, "pkgAndSubpathForCurrentPlatform");
|
149491
|
-
function pkgForSomeOtherPlatform() {
|
149492
|
-
const libMain = require.resolve("workerd");
|
149493
|
-
const nodeModulesDirectory = import_path20.default.dirname(
|
149494
|
-
import_path20.default.dirname(import_path20.default.dirname(libMain))
|
149495
|
-
);
|
149496
|
-
if (import_path20.default.basename(nodeModulesDirectory) === "node_modules") {
|
149497
|
-
for (const unixKey in knownPackages) {
|
149498
|
-
try {
|
149499
|
-
const pkg = knownPackages[unixKey];
|
149500
|
-
if (import_fs16.default.existsSync(import_path20.default.join(nodeModulesDirectory, pkg)))
|
149501
|
-
return pkg;
|
149502
|
-
} catch {
|
149503
|
-
}
|
149504
|
-
}
|
149505
|
-
}
|
149506
|
-
return null;
|
149507
|
-
}
|
149508
|
-
__name(pkgForSomeOtherPlatform, "pkgForSomeOtherPlatform");
|
149509
|
-
function downloadedBinPath(pkg, subpath) {
|
149510
|
-
const libDir = import_path20.default.dirname(require.resolve("workerd"));
|
149511
|
-
return import_path20.default.join(libDir, `downloaded-${pkg.replace("/", "-")}-${import_path20.default.basename(subpath)}${maybeExeExtension}`);
|
149512
|
-
}
|
149513
|
-
__name(downloadedBinPath, "downloadedBinPath");
|
149514
|
-
function generateBinPath() {
|
149515
|
-
const { pkg, subpath } = pkgAndSubpathForCurrentPlatform();
|
149516
|
-
let binPath2;
|
149517
|
-
try {
|
149518
|
-
binPath2 = require.resolve(`${pkg}/${subpath}`);
|
149519
|
-
} catch (e3) {
|
149520
|
-
binPath2 = downloadedBinPath(pkg, subpath);
|
149521
|
-
if (!import_fs16.default.existsSync(binPath2)) {
|
149522
|
-
try {
|
149523
|
-
require.resolve(pkg);
|
149524
|
-
} catch {
|
149525
|
-
const otherPkg = pkgForSomeOtherPlatform();
|
149526
|
-
if (otherPkg) {
|
149527
|
-
throw new Error(`
|
149528
|
-
You installed workerd on another platform than the one you're currently using.
|
149529
|
-
This won't work because workerd is written with native code and needs to
|
149530
|
-
install a platform-specific binary executable.
|
149531
|
-
|
149532
|
-
Specifically the "${otherPkg}" package is present but this platform
|
149533
|
-
needs the "${pkg}" package instead. People often get into this
|
149534
|
-
situation by installing workerd on macOS and copying "node_modules"
|
149535
|
-
into a Docker image that runs Linux.
|
149536
|
-
|
149537
|
-
If you are installing with npm, you can try not copying the "node_modules"
|
149538
|
-
directory when you copy the files over, and running "npm ci" or "npm install"
|
149539
|
-
on the destination platform after the copy. Or you could consider using yarn
|
149540
|
-
instead which has built-in support for installing a package on multiple
|
149541
|
-
platforms simultaneously.
|
149542
|
-
|
149543
|
-
If you are installing with yarn, you can try listing both this platform and the
|
149544
|
-
other platform in your ".yarnrc.yml" file using the "supportedArchitectures"
|
149545
|
-
feature: https://yarnpkg.com/configuration/yarnrc/#supportedArchitectures
|
149546
|
-
Keep in mind that this means multiple copies of workerd will be present.
|
149547
|
-
`);
|
149548
|
-
}
|
149549
|
-
throw new Error(`The package "${pkg}" could not be found, and is needed by workerd.
|
149550
|
-
|
149551
|
-
If you are installing workerd with npm, make sure that you don't specify the
|
149552
|
-
"--no-optional" flag. The "optionalDependencies" package.json feature is used
|
149553
|
-
by workerd to install the correct binary executable for your current platform.`);
|
149554
|
-
}
|
149555
|
-
throw e3;
|
149556
|
-
}
|
149557
|
-
}
|
149558
|
-
let pnpapi;
|
149559
|
-
try {
|
149560
|
-
pnpapi = require("pnpapi");
|
149561
|
-
} catch (e3) {
|
149562
|
-
}
|
149563
|
-
if (pnpapi) {
|
149564
|
-
const root = pnpapi.getPackageInformation(pnpapi.topLevel).packageLocation;
|
149565
|
-
const binTargetPath = import_path20.default.join(
|
149566
|
-
root,
|
149567
|
-
"node_modules",
|
149568
|
-
".cache",
|
149569
|
-
"workerd",
|
149570
|
-
`pnpapi-${pkg.replace("/", "-")}-${"1.20240925.0"}-${import_path20.default.basename(subpath)}`
|
149571
|
-
);
|
149572
|
-
if (!import_fs16.default.existsSync(binTargetPath)) {
|
149573
|
-
import_fs16.default.mkdirSync(import_path20.default.dirname(binTargetPath), { recursive: true });
|
149574
|
-
import_fs16.default.copyFileSync(binPath2, binTargetPath);
|
149575
|
-
import_fs16.default.chmodSync(binTargetPath, 493);
|
149576
|
-
}
|
149577
|
-
return { binPath: binTargetPath };
|
149578
|
-
}
|
149579
|
-
return { binPath: binPath2 };
|
149580
|
-
}
|
149581
|
-
__name(generateBinPath, "generateBinPath");
|
149582
|
-
var { binPath } = generateBinPath();
|
149583
|
-
var node_path_default = binPath;
|
149584
|
-
var compatibilityDate = "2024-09-25";
|
149585
|
-
var version4 = "1.20240925.0";
|
149586
|
-
}
|
149587
|
-
});
|
149588
|
-
|
149589
149429
|
// src/cli.ts
|
149590
149430
|
var cli_exports2 = {};
|
149591
149431
|
__export(cli_exports2, {
|
@@ -151676,7 +151516,7 @@ __name(fakeResolvedInput, "fakeResolvedInput");
|
|
151676
151516
|
init_import_meta_url();
|
151677
151517
|
var import_node_assert13 = __toESM(require("node:assert"));
|
151678
151518
|
var import_node_fs15 = require("node:fs");
|
151679
|
-
var
|
151519
|
+
var import_promises10 = require("node:fs/promises");
|
151680
151520
|
var path22 = __toESM(require("node:path"));
|
151681
151521
|
|
151682
151522
|
// ../workers-shared/index.ts
|
@@ -158104,7 +157944,7 @@ var import_undici3 = __toESM(require_undici());
|
|
158104
157944
|
|
158105
157945
|
// package.json
|
158106
157946
|
var name = "wrangler";
|
158107
|
-
var version = "3.
|
157947
|
+
var version = "3.79.0";
|
158108
157948
|
|
158109
157949
|
// src/user/index.ts
|
158110
157950
|
init_import_meta_url();
|
@@ -168881,6 +168721,22 @@ ${dashLink}`);
|
|
168881
168721
|
}
|
168882
168722
|
__name(triggersDeploy, "triggersDeploy");
|
168883
168723
|
|
168724
|
+
// src/utils/retry.ts
|
168725
|
+
init_import_meta_url();
|
168726
|
+
var import_promises7 = require("node:timers/promises");
|
168727
|
+
async function retryOnError(action, backoff = 2e3, attempts = 3) {
|
168728
|
+
try {
|
168729
|
+
return await action();
|
168730
|
+
} catch (err) {
|
168731
|
+
if (attempts <= 1) {
|
168732
|
+
throw err;
|
168733
|
+
}
|
168734
|
+
await (0, import_promises7.setTimeout)(backoff);
|
168735
|
+
return retryOnError(action, backoff, attempts - 1);
|
168736
|
+
}
|
168737
|
+
}
|
168738
|
+
__name(retryOnError, "retryOnError");
|
168739
|
+
|
168884
168740
|
// src/versions/api.ts
|
168885
168741
|
init_import_meta_url();
|
168886
168742
|
async function fetchVersion(accountId, workerName, versionId, versionCache) {
|
@@ -171730,13 +171586,15 @@ See https://developers.cloudflare.com/workers/platform/compatibility-dates for m
|
|
171730
171586
|
try {
|
171731
171587
|
let result;
|
171732
171588
|
if (canUseNewVersionsDeploymentsApi) {
|
171733
|
-
const versionResult = await
|
171734
|
-
|
171735
|
-
|
171736
|
-
|
171737
|
-
|
171738
|
-
|
171739
|
-
|
171589
|
+
const versionResult = await retryOnError(
|
171590
|
+
async () => fetchResult(
|
171591
|
+
`/accounts/${accountId}/workers/scripts/${scriptName}/versions`,
|
171592
|
+
{
|
171593
|
+
method: "POST",
|
171594
|
+
body: createWorkerUploadForm(worker),
|
171595
|
+
headers: await getMetricsUsageHeaders(config.send_metrics)
|
171596
|
+
}
|
171597
|
+
)
|
171740
171598
|
);
|
171741
171599
|
const versionMap = /* @__PURE__ */ new Map();
|
171742
171600
|
versionMap.set(versionResult.id, 100);
|
@@ -171764,19 +171622,21 @@ See https://developers.cloudflare.com/workers/platform/compatibility-dates for m
|
|
171764
171622
|
startup_time_ms: versionResult.startup_time_ms
|
171765
171623
|
};
|
171766
171624
|
} else {
|
171767
|
-
result = await
|
171768
|
-
|
171769
|
-
|
171770
|
-
|
171771
|
-
|
171772
|
-
|
171773
|
-
|
171774
|
-
|
171775
|
-
|
171776
|
-
|
171777
|
-
|
171778
|
-
|
171779
|
-
|
171625
|
+
result = await retryOnError(
|
171626
|
+
async () => fetchResult(
|
171627
|
+
workerUrl,
|
171628
|
+
{
|
171629
|
+
method: "PUT",
|
171630
|
+
body: createWorkerUploadForm(worker),
|
171631
|
+
headers: await getMetricsUsageHeaders(config.send_metrics)
|
171632
|
+
},
|
171633
|
+
new import_node_url8.URLSearchParams({
|
171634
|
+
include_subdomain_availability: "true",
|
171635
|
+
// pass excludeScript so the whole body of the
|
171636
|
+
// script doesn't get included in the response
|
171637
|
+
excludeScript: "true"
|
171638
|
+
})
|
171639
|
+
)
|
171780
171640
|
);
|
171781
171641
|
}
|
171782
171642
|
if (result.startup_time_ms) {
|
@@ -172113,14 +171973,14 @@ init_hash();
|
|
172113
171973
|
|
172114
171974
|
// src/pages/upload.tsx
|
172115
171975
|
init_import_meta_url();
|
172116
|
-
var
|
171976
|
+
var import_promises9 = require("node:fs/promises");
|
172117
171977
|
var import_node_path23 = require("node:path");
|
172118
171978
|
var import_ink = __toESM(require_build2());
|
172119
171979
|
var import_ink_spinner = __toESM(require_build3());
|
172120
171980
|
|
172121
171981
|
// src/pages/validate.tsx
|
172122
171982
|
init_import_meta_url();
|
172123
|
-
var
|
171983
|
+
var import_promises8 = require("node:fs/promises");
|
172124
171984
|
var import_node_path22 = require("node:path");
|
172125
171985
|
var import_mime2 = __toESM(require_mime());
|
172126
171986
|
var import_minimatch = __toESM(require_minimatch());
|
@@ -172154,12 +172014,12 @@ var validate = /* @__PURE__ */ __name(async (args) => {
|
|
172154
172014
|
].map((pattern) => new import_minimatch.Minimatch(pattern));
|
172155
172015
|
const directory = (0, import_node_path22.resolve)(args.directory);
|
172156
172016
|
const walk = /* @__PURE__ */ __name(async (dir, fileMap2 = /* @__PURE__ */ new Map(), startingDir = dir) => {
|
172157
|
-
const files = await (0,
|
172017
|
+
const files = await (0, import_promises8.readdir)(dir);
|
172158
172018
|
await Promise.all(
|
172159
172019
|
files.map(async (file) => {
|
172160
172020
|
const filepath = (0, import_node_path22.join)(dir, file);
|
172161
172021
|
const relativeFilepath = (0, import_node_path22.relative)(startingDir, filepath);
|
172162
|
-
const filestat = await (0,
|
172022
|
+
const filestat = await (0, import_promises8.stat)(filepath);
|
172163
172023
|
for (const minimatch of IGNORE_LIST) {
|
172164
172024
|
if (minimatch.match(relativeFilepath)) {
|
172165
172025
|
return;
|
@@ -172242,8 +172102,8 @@ var Handler2 = /* @__PURE__ */ __name(async ({
|
|
172242
172102
|
skipCaching: skipCaching ?? false
|
172243
172103
|
});
|
172244
172104
|
if (outputManifestPath) {
|
172245
|
-
await (0,
|
172246
|
-
await (0,
|
172105
|
+
await (0, import_promises9.mkdir)((0, import_node_path23.dirname)(outputManifestPath), { recursive: true });
|
172106
|
+
await (0, import_promises9.writeFile)(outputManifestPath, JSON.stringify(manifest));
|
172247
172107
|
}
|
172248
172108
|
logger.log(`\u2728 Upload complete!`);
|
172249
172109
|
}, "Handler");
|
@@ -172331,7 +172191,7 @@ var upload = /* @__PURE__ */ __name(async (args) => {
|
|
172331
172191
|
const payload = await Promise.all(
|
172332
172192
|
bucket.files.map(async (file) => ({
|
172333
172193
|
key: file.hash,
|
172334
|
-
value: (await (0,
|
172194
|
+
value: (await (0, import_promises9.readFile)(file.path)).toString("base64"),
|
172335
172195
|
metadata: {
|
172336
172196
|
contentType: file.contentType
|
172337
172197
|
},
|
@@ -172561,7 +172421,7 @@ var syncAssets = /* @__PURE__ */ __name(async (accountId, scriptName, assetDirec
|
|
172561
172421
|
payload.append(
|
172562
172422
|
manifestEntry[1].hash,
|
172563
172423
|
new import_undici7.File(
|
172564
|
-
[(await (0,
|
172424
|
+
[(await (0, import_promises10.readFile)(absFilePath)).toString("base64")],
|
172565
172425
|
manifestEntry[1].hash,
|
172566
172426
|
{
|
172567
172427
|
type: getContentType(absFilePath)
|
@@ -172644,7 +172504,7 @@ Assets already uploaded have been saved, so the next attempt will automatically
|
|
172644
172504
|
return completionJwt;
|
172645
172505
|
}, "syncAssets");
|
172646
172506
|
var buildAssetManifest = /* @__PURE__ */ __name(async (dir) => {
|
172647
|
-
const files = await (0,
|
172507
|
+
const files = await (0, import_promises10.readdir)(dir, { recursive: true });
|
172648
172508
|
const manifest = {};
|
172649
172509
|
let counter = 0;
|
172650
172510
|
const ignoreFn = await createAssetIgnoreFunction(dir);
|
@@ -172655,7 +172515,7 @@ var buildAssetManifest = /* @__PURE__ */ __name(async (dir) => {
|
|
172655
172515
|
return;
|
172656
172516
|
}
|
172657
172517
|
const filepath = path22.join(dir, relativeFilepath);
|
172658
|
-
const filestat = await (0,
|
172518
|
+
const filestat = await (0, import_promises10.stat)(filepath);
|
172659
172519
|
if (filestat.isSymbolicLink() || filestat.isDirectory()) {
|
172660
172520
|
return;
|
172661
172521
|
} else {
|
@@ -172770,7 +172630,7 @@ async function createAssetIgnoreFunction(dir) {
|
|
172770
172630
|
if (!(0, import_node_fs15.existsSync)(cfAssetIgnorePath)) {
|
172771
172631
|
return null;
|
172772
172632
|
}
|
172773
|
-
const ignorePatterns = (await (0,
|
172633
|
+
const ignorePatterns = (await (0, import_promises10.readFile)(cfAssetIgnorePath, { encoding: "utf8" })).split("\n");
|
172774
172634
|
ignorePatterns.push(CF_ASSETS_IGNORE_FILENAME);
|
172775
172635
|
return createPatternMatcher(ignorePatterns, true);
|
172776
172636
|
}
|
@@ -173042,7 +172902,7 @@ __name(generateAddScriptNameExamples, "generateAddScriptNameExamples");
|
|
173042
172902
|
init_import_meta_url();
|
173043
172903
|
var import_node_events = __toESM(require("node:events"));
|
173044
172904
|
var import_node_fs17 = require("node:fs");
|
173045
|
-
var
|
172905
|
+
var import_promises11 = require("node:fs/promises");
|
173046
172906
|
var import_node_http2 = require("node:http");
|
173047
172907
|
var import_node_net = __toESM(require("node:net"));
|
173048
172908
|
var import_node_path29 = __toESM(require("node:path"));
|
@@ -173063,17 +172923,17 @@ var globalWatcher;
|
|
173063
172923
|
var globalWorkers;
|
173064
172924
|
var heartbeats = /* @__PURE__ */ new Map();
|
173065
172925
|
async function loadWorkerDefinitions() {
|
173066
|
-
await (0,
|
172926
|
+
await (0, import_promises11.mkdir)(DEV_REGISTRY_PATH, { recursive: true });
|
173067
172927
|
globalWorkers ??= {};
|
173068
172928
|
const newWorkers = /* @__PURE__ */ new Set();
|
173069
|
-
const workerDefinitions = await (0,
|
172929
|
+
const workerDefinitions = await (0, import_promises11.readdir)(DEV_REGISTRY_PATH);
|
173070
172930
|
for (const workerName of workerDefinitions) {
|
173071
172931
|
try {
|
173072
|
-
const file = await (0,
|
172932
|
+
const file = await (0, import_promises11.readFile)(
|
173073
172933
|
import_node_path29.default.join(DEV_REGISTRY_PATH, workerName),
|
173074
172934
|
"utf8"
|
173075
172935
|
);
|
173076
|
-
const stats = await (0,
|
172936
|
+
const stats = await (0, import_promises11.stat)(import_node_path29.default.join(DEV_REGISTRY_PATH, workerName));
|
173077
172937
|
if (stats.mtime.getTime() < Date.now() - 6e5) {
|
173078
172938
|
await unregisterWorker(workerName);
|
173079
172939
|
} else {
|
@@ -173179,8 +173039,8 @@ async function registerWorker(name2, definition) {
|
|
173179
173039
|
if (existingHeartbeat) {
|
173180
173040
|
clearInterval(existingHeartbeat);
|
173181
173041
|
}
|
173182
|
-
await (0,
|
173183
|
-
await (0,
|
173042
|
+
await (0, import_promises11.mkdir)(DEV_REGISTRY_PATH, { recursive: true });
|
173043
|
+
await (0, import_promises11.writeFile)(
|
173184
173044
|
import_node_path29.default.join(DEV_REGISTRY_PATH, name2),
|
173185
173045
|
// We don't currently do anything with the stored Wrangler version,
|
173186
173046
|
// but if we need to make breaking changes to this format in the future
|
@@ -173218,7 +173078,7 @@ __name(registerWorker, "registerWorker");
|
|
173218
173078
|
async function unregisterWorker(name2) {
|
173219
173079
|
if (getFlag("FILE_BASED_REGISTRY")) {
|
173220
173080
|
try {
|
173221
|
-
await (0,
|
173081
|
+
await (0, import_promises11.unlink)(import_node_path29.default.join(DEV_REGISTRY_PATH, name2));
|
173222
173082
|
const existingHeartbeat = heartbeats.get(name2);
|
173223
173083
|
if (existingHeartbeat) {
|
173224
173084
|
clearInterval(existingHeartbeat);
|
@@ -176429,7 +176289,7 @@ __name(registerDevHotKeys, "registerDevHotKeys");
|
|
176429
176289
|
// src/dev/start-server.ts
|
176430
176290
|
init_import_meta_url();
|
176431
176291
|
var import_node_events2 = require("node:events");
|
176432
|
-
var
|
176292
|
+
var import_promises12 = require("node:fs/promises");
|
176433
176293
|
var path39 = __toESM(require("node:path"));
|
176434
176294
|
var util3 = __toESM(require("node:util"));
|
176435
176295
|
var import_signal_exit7 = __toESM(require_signal_exit());
|
@@ -176820,7 +176680,7 @@ async function runEsbuild({
|
|
176820
176680
|
dependencies: bundleResult?.dependencies ?? {},
|
176821
176681
|
sourceMapPath: bundleResult?.sourceMapPath,
|
176822
176682
|
sourceMapMetadata: bundleResult?.sourceMapMetadata,
|
176823
|
-
entrypointSource: await (0,
|
176683
|
+
entrypointSource: await (0, import_promises12.readFile)(entrypointPath, "utf8")
|
176824
176684
|
};
|
176825
176685
|
}
|
176826
176686
|
__name(runEsbuild, "runEsbuild");
|
@@ -176898,6 +176758,66 @@ async function startLocalServer(props) {
|
|
176898
176758
|
}
|
176899
176759
|
__name(startLocalServer, "startLocalServer");
|
176900
176760
|
|
176761
|
+
// src/pages/utils.ts
|
176762
|
+
init_import_meta_url();
|
176763
|
+
var import_node_path38 = __toESM(require("node:path"));
|
176764
|
+
var RUNNING_BUILDERS = [];
|
176765
|
+
var CLEANUP_CALLBACKS = [];
|
176766
|
+
var CLEANUP = /* @__PURE__ */ __name(() => {
|
176767
|
+
CLEANUP_CALLBACKS.forEach((callback) => callback());
|
176768
|
+
RUNNING_BUILDERS.forEach((builder) => builder.stop?.());
|
176769
|
+
}, "CLEANUP");
|
176770
|
+
function isUrl(maybeUrl) {
|
176771
|
+
if (!maybeUrl) {
|
176772
|
+
return false;
|
176773
|
+
}
|
176774
|
+
try {
|
176775
|
+
new URL(maybeUrl);
|
176776
|
+
return true;
|
176777
|
+
} catch (e3) {
|
176778
|
+
return false;
|
176779
|
+
}
|
176780
|
+
}
|
176781
|
+
__name(isUrl, "isUrl");
|
176782
|
+
var projectRootCacheCwd;
|
176783
|
+
var projectRootCache;
|
176784
|
+
var tmpDirCacheProjectRoot;
|
176785
|
+
var tmpDirCache;
|
176786
|
+
function getPagesProjectRoot() {
|
176787
|
+
const cwd2 = process.cwd();
|
176788
|
+
if (projectRootCache !== void 0 && projectRootCacheCwd === cwd2) {
|
176789
|
+
return projectRootCache;
|
176790
|
+
}
|
176791
|
+
const packagePath = findUpSync("package.json");
|
176792
|
+
projectRootCache = packagePath ? import_node_path38.default.dirname(packagePath) : process.cwd();
|
176793
|
+
projectRootCacheCwd = cwd2;
|
176794
|
+
return projectRootCache;
|
176795
|
+
}
|
176796
|
+
__name(getPagesProjectRoot, "getPagesProjectRoot");
|
176797
|
+
function getPagesTmpDir() {
|
176798
|
+
const projectRoot = getPagesProjectRoot();
|
176799
|
+
if (tmpDirCache !== void 0 && tmpDirCacheProjectRoot === projectRoot) {
|
176800
|
+
return tmpDirCache;
|
176801
|
+
}
|
176802
|
+
const tmpDir = getWranglerTmpDir(getPagesProjectRoot(), "pages");
|
176803
|
+
tmpDirCache = tmpDir.path;
|
176804
|
+
tmpDirCacheProjectRoot = projectRoot;
|
176805
|
+
return tmpDirCache;
|
176806
|
+
}
|
176807
|
+
__name(getPagesTmpDir, "getPagesTmpDir");
|
176808
|
+
function debounce(fn2, delayMs = 100) {
|
176809
|
+
let crrTimeoutId;
|
176810
|
+
return () => {
|
176811
|
+
if (crrTimeoutId) {
|
176812
|
+
clearTimeout(crrTimeoutId);
|
176813
|
+
}
|
176814
|
+
crrTimeoutId = setTimeout(() => {
|
176815
|
+
fn2();
|
176816
|
+
}, delayMs);
|
176817
|
+
};
|
176818
|
+
}
|
176819
|
+
__name(debounce, "debounce");
|
176820
|
+
|
176901
176821
|
// src/utils/collectKeyValues.ts
|
176902
176822
|
init_import_meta_url();
|
176903
176823
|
function collectKeyValues(array) {
|
@@ -180639,7 +180559,7 @@ init_import_meta_url();
|
|
180639
180559
|
|
180640
180560
|
// src/cloudchamber/common.ts
|
180641
180561
|
init_import_meta_url();
|
180642
|
-
var
|
180562
|
+
var import_promises13 = require("fs/promises");
|
180643
180563
|
var import_process2 = require("process");
|
180644
180564
|
|
180645
180565
|
// src/cloudchamber/client/index.ts
|
@@ -181613,7 +181533,7 @@ __name(promiseSpinner, "promiseSpinner");
|
|
181613
181533
|
async function fillOpenAPIConfiguration(config, json) {
|
181614
181534
|
const headers = OpenAPI.HEADERS !== void 0 ? { ...OpenAPI.HEADERS } : {};
|
181615
181535
|
if (Object.keys(getConfigCache("wrangler-account.json")).length === 0) {
|
181616
|
-
await wrap2((0,
|
181536
|
+
await wrap2((0, import_promises13.mkdir)("node_modules", {}));
|
181617
181537
|
purgeConfigCaches();
|
181618
181538
|
}
|
181619
181539
|
const scopes = getScopes();
|
@@ -182360,7 +182280,7 @@ __name(getNetworkInput, "getNetworkInput");
|
|
182360
182280
|
|
182361
182281
|
// src/cloudchamber/ssh/ssh.ts
|
182362
182282
|
init_import_meta_url();
|
182363
|
-
var
|
182283
|
+
var import_promises14 = require("fs/promises");
|
182364
182284
|
var import_os3 = require("os");
|
182365
182285
|
var import_process4 = require("process");
|
182366
182286
|
|
@@ -182424,7 +182344,7 @@ function createSSHPublicKeyOptionalYargs(yargs) {
|
|
182424
182344
|
__name(createSSHPublicKeyOptionalYargs, "createSSHPublicKeyOptionalYargs");
|
182425
182345
|
async function retrieveSSHKey(sshKeyPath, { json } = { json: false }) {
|
182426
182346
|
try {
|
182427
|
-
const file = (await (0,
|
182347
|
+
const file = (await (0, import_promises14.readFile)(sshKeyPath)).toString();
|
182428
182348
|
validatePublicSSHKeyCLI(file, { json });
|
182429
182349
|
return file;
|
182430
182350
|
} catch (err) {
|
@@ -182505,10 +182425,10 @@ async function tryToRetrieveAllDefaultSSHKeyPaths() {
|
|
182505
182425
|
const path74 = `${HOME}/.ssh`;
|
182506
182426
|
const paths = [];
|
182507
182427
|
try {
|
182508
|
-
const dirList = await (0,
|
182428
|
+
const dirList = await (0, import_promises14.readdir)(path74);
|
182509
182429
|
for (const file of dirList) {
|
182510
182430
|
if (file.endsWith(".pub")) {
|
182511
|
-
const s = await (0,
|
182431
|
+
const s = await (0, import_promises14.stat)(`${path74}/${file}`);
|
182512
182432
|
if (s.isFile()) {
|
182513
182433
|
paths.push(`${path74}/${file}`);
|
182514
182434
|
}
|
@@ -182550,7 +182470,7 @@ async function shouldPromptForNewSSHKeyAppear(keys = void 0) {
|
|
182550
182470
|
}
|
182551
182471
|
let foundValidSSHKeyThatDontExist = false;
|
182552
182472
|
for (const defaultSSHKeyPath of defaultSSHKeyPaths) {
|
182553
|
-
const file = (await (0,
|
182473
|
+
const file = (await (0, import_promises14.readFile)(defaultSSHKeyPath)).toString().trim();
|
182554
182474
|
try {
|
182555
182475
|
validateSSHKey(file);
|
182556
182476
|
} catch {
|
@@ -183853,8 +183773,8 @@ init_import_meta_url();
|
|
183853
183773
|
|
183854
183774
|
// src/d1/backups.tsx
|
183855
183775
|
init_import_meta_url();
|
183856
|
-
var
|
183857
|
-
var
|
183776
|
+
var import_promises15 = __toESM(require("node:fs/promises"));
|
183777
|
+
var path42 = __toESM(require("path"));
|
183858
183778
|
var import_ink_table2 = __toESM(require_dist4());
|
183859
183779
|
|
183860
183780
|
// src/utils/render.ts
|
@@ -186953,7 +186873,7 @@ var DownloadHandler = withConfig(
|
|
186953
186873
|
accountId,
|
186954
186874
|
name2
|
186955
186875
|
);
|
186956
|
-
const filename = output ||
|
186876
|
+
const filename = output || path42.resolve(`${name2}.${backupId.slice(0, 8)}.sqlite3`);
|
186957
186877
|
logger.log(`\u{1F300} Downloading backup ${backupId} from '${name2}'`);
|
186958
186878
|
const response = await getBackupResponse(accountId, db.uuid, backupId);
|
186959
186879
|
if (!response.ok) {
|
@@ -186963,7 +186883,7 @@ var DownloadHandler = withConfig(
|
|
186963
186883
|
}
|
186964
186884
|
logger.log(`\u{1F300} Saving to ${filename}`);
|
186965
186885
|
const buffer = await response.arrayBuffer();
|
186966
|
-
await
|
186886
|
+
await import_promises15.default.writeFile(filename, new Buffer(buffer));
|
186967
186887
|
logger.log(`\u{1F300} Done!`);
|
186968
186888
|
}
|
186969
186889
|
);
|
@@ -187088,7 +187008,7 @@ var Handler5 = withConfig(
|
|
187088
187008
|
init_import_meta_url();
|
187089
187009
|
var import_fs9 = require("fs");
|
187090
187010
|
var import_node_assert19 = __toESM(require("node:assert"));
|
187091
|
-
var
|
187011
|
+
var import_node_path39 = __toESM(require("node:path"));
|
187092
187012
|
var import_ink7 = __toESM(require_build2());
|
187093
187013
|
var import_ink_table3 = __toESM(require_dist4());
|
187094
187014
|
var import_md5_file = __toESM(require_md5_file());
|
@@ -187416,7 +187336,7 @@ async function executeLocally({
|
|
187416
187336
|
}
|
187417
187337
|
const id = localDB.previewDatabaseUuid ?? localDB.uuid;
|
187418
187338
|
const persistencePath = getLocalPersistencePath(persistTo, config.configPath);
|
187419
|
-
const d1Persist =
|
187339
|
+
const d1Persist = import_node_path39.default.join(persistencePath, "v3", "d1");
|
187420
187340
|
logger.log(
|
187421
187341
|
`\u{1F300} Executing on local database ${name2} (${id}) from ${readableRelative(
|
187422
187342
|
d1Persist
|
@@ -187680,8 +187600,8 @@ __name(checkForSQLiteBinary, "checkForSQLiteBinary");
|
|
187680
187600
|
|
187681
187601
|
// src/d1/export.ts
|
187682
187602
|
init_import_meta_url();
|
187683
|
-
var
|
187684
|
-
var
|
187603
|
+
var import_promises16 = __toESM(require("node:fs/promises"));
|
187604
|
+
var import_node_path40 = __toESM(require("node:path"));
|
187685
187605
|
var import_miniflare15 = require("miniflare");
|
187686
187606
|
var import_undici15 = __toESM(require_undici());
|
187687
187607
|
function Options7(yargs) {
|
@@ -187745,7 +187665,7 @@ async function exportLocal(config, name2, output, tables, noSchema, noData) {
|
|
187745
187665
|
}
|
187746
187666
|
const id = localDB.previewDatabaseUuid ?? localDB.uuid;
|
187747
187667
|
const persistencePath = getLocalPersistencePath(void 0, config.configPath);
|
187748
|
-
const d1Persist =
|
187668
|
+
const d1Persist = import_node_path40.default.join(persistencePath, "v3", "d1");
|
187749
187669
|
logger.log(
|
187750
187670
|
`\u{1F300} Exporting local database ${name2} (${id}) from ${readableRelative(
|
187751
187671
|
d1Persist
|
@@ -187764,7 +187684,7 @@ async function exportLocal(config, name2, output, tables, noSchema, noData) {
|
|
187764
187684
|
logger.log(`\u{1F300} Exporting SQL to ${output}...`);
|
187765
187685
|
try {
|
187766
187686
|
const dump = await db.prepare(`PRAGMA miniflare_d1_export(?,?,?);`).bind(noSchema, noData, ...tables).raw();
|
187767
|
-
await
|
187687
|
+
await import_promises16.default.writeFile(output, dump[0].join("\n"));
|
187768
187688
|
} catch (e3) {
|
187769
187689
|
throw new UserError(e3.message);
|
187770
187690
|
} finally {
|
@@ -187804,7 +187724,7 @@ async function exportRemotely(config, name2, output, tables, noSchema, noData) {
|
|
187804
187724
|
startMessage: `Downloading SQL to ${output}`,
|
187805
187725
|
async promise() {
|
187806
187726
|
const contents = await (0, import_undici15.fetch)(finalResponse.result.signed_url);
|
187807
|
-
await
|
187727
|
+
await import_promises16.default.writeFile(output, contents.body || "");
|
187808
187728
|
}
|
187809
187729
|
});
|
187810
187730
|
logger.log(`\u{1F300} Downloaded to ${output} successfully!`);
|
@@ -189076,7 +188996,7 @@ __name(checkAndConfirmForceDeleteIfNecessary, "checkAndConfirmForceDeleteIfNeces
|
|
189076
188996
|
// src/deploy/index.ts
|
189077
188997
|
init_import_meta_url();
|
189078
188998
|
var import_node_assert21 = __toESM(require("node:assert"));
|
189079
|
-
var
|
188999
|
+
var import_node_path41 = __toESM(require("node:path"));
|
189080
189000
|
|
189081
189001
|
// src/match-tag.ts
|
189082
189002
|
init_import_meta_url();
|
@@ -189302,8 +189222,8 @@ async function deployHandler(args) {
|
|
189302
189222
|
To learn more about Workers with assets, visit our documentation at https://developers.cloudflare.com/workers/frameworks/.`
|
189303
189223
|
);
|
189304
189224
|
}
|
189305
|
-
const configPath = args.config || args.script && findWranglerToml(
|
189306
|
-
const projectRoot = configPath &&
|
189225
|
+
const configPath = args.config || args.script && findWranglerToml(import_node_path41.default.dirname(args.script));
|
189226
|
+
const projectRoot = configPath && import_node_path41.default.dirname(configPath);
|
189307
189227
|
const config = readConfig(configPath, args);
|
189308
189228
|
const entry = await getEntry(args, config, "deploy");
|
189309
189229
|
if (args.public) {
|
@@ -189352,7 +189272,7 @@ To learn more about Workers with assets, visit our documentation at https://deve
|
|
189352
189272
|
await verifyWorkerMatchesCITag(
|
189353
189273
|
accountId,
|
189354
189274
|
name2,
|
189355
|
-
|
189275
|
+
import_node_path41.default.relative(entry.directory, config.configPath ?? "wrangler.toml")
|
189356
189276
|
);
|
189357
189277
|
}
|
189358
189278
|
const { sourceMapSize, versionId, workerTag, targets } = await deploy({
|
@@ -189420,8 +189340,8 @@ var import_undici16 = __toESM(require_undici());
|
|
189420
189340
|
// src/init.ts
|
189421
189341
|
init_import_meta_url();
|
189422
189342
|
var fs20 = __toESM(require("node:fs"));
|
189423
|
-
var
|
189424
|
-
var
|
189343
|
+
var import_promises17 = require("node:fs/promises");
|
189344
|
+
var import_node_path44 = __toESM(require("node:path"));
|
189425
189345
|
var import_toml4 = __toESM(require_toml());
|
189426
189346
|
init_execa();
|
189427
189347
|
|
@@ -189429,7 +189349,7 @@ init_execa();
|
|
189429
189349
|
init_import_meta_url();
|
189430
189350
|
var import_node_fs24 = __toESM(require("node:fs"));
|
189431
189351
|
var import_node_os8 = __toESM(require("node:os"));
|
189432
|
-
var
|
189352
|
+
var import_node_path42 = __toESM(require("node:path"));
|
189433
189353
|
init_execa();
|
189434
189354
|
|
189435
189355
|
// ../../node_modules/.pnpm/semiver@1.1.0/node_modules/semiver/dist/semiver.mjs
|
@@ -189500,7 +189420,7 @@ async function cloneIntoDirectory(remote, targetDirectory, subdirectory) {
|
|
189500
189420
|
args.push(remote.substring(0, tagIndex));
|
189501
189421
|
}
|
189502
189422
|
const tempDir = import_node_fs24.default.mkdtempSync(
|
189503
|
-
|
189423
|
+
import_node_path42.default.join(import_node_os8.default.tmpdir(), `wrangler-generate-repo-`)
|
189504
189424
|
);
|
189505
189425
|
args.push(tempDir);
|
189506
189426
|
await execa("git", args);
|
@@ -189509,7 +189429,7 @@ async function cloneIntoDirectory(remote, targetDirectory, subdirectory) {
|
|
189509
189429
|
cwd: tempDir
|
189510
189430
|
});
|
189511
189431
|
}
|
189512
|
-
const templatePath = subdirectory !== void 0 ?
|
189432
|
+
const templatePath = subdirectory !== void 0 ? import_node_path42.default.join(tempDir, subdirectory) : tempDir;
|
189513
189433
|
try {
|
189514
189434
|
import_node_fs24.default.renameSync(templatePath, targetDirectory);
|
189515
189435
|
} catch (err) {
|
@@ -189528,7 +189448,7 @@ async function cloneIntoDirectory(remote, targetDirectory, subdirectory) {
|
|
189528
189448
|
throw new UserError(`Failed to find "${subdirectory}" in ${remote}`);
|
189529
189449
|
}
|
189530
189450
|
}
|
189531
|
-
import_node_fs24.default.rmSync(
|
189451
|
+
import_node_fs24.default.rmSync(import_node_path42.default.join(targetDirectory, ".git"), {
|
189532
189452
|
recursive: true,
|
189533
189453
|
force: true
|
189534
189454
|
});
|
@@ -189538,7 +189458,7 @@ __name(cloneIntoDirectory, "cloneIntoDirectory");
|
|
189538
189458
|
// src/package-manager.ts
|
189539
189459
|
init_import_meta_url();
|
189540
189460
|
var import_node_fs25 = require("node:fs");
|
189541
|
-
var
|
189461
|
+
var import_node_path43 = require("node:path");
|
189542
189462
|
var import_node_process11 = require("node:process");
|
189543
189463
|
init_execa();
|
189544
189464
|
async function getPackageManager2(cwd2) {
|
@@ -189547,9 +189467,9 @@ async function getPackageManager2(cwd2) {
|
|
189547
189467
|
supportsNpm(),
|
189548
189468
|
supportsPnpm()
|
189549
189469
|
]);
|
189550
|
-
const hasYarnLock = (0, import_node_fs25.existsSync)((0,
|
189551
|
-
const hasNpmLock = (0, import_node_fs25.existsSync)((0,
|
189552
|
-
const hasPnpmLock = (0, import_node_fs25.existsSync)((0,
|
189470
|
+
const hasYarnLock = (0, import_node_fs25.existsSync)((0, import_node_path43.join)(cwd2, "yarn.lock"));
|
189471
|
+
const hasNpmLock = (0, import_node_fs25.existsSync)((0, import_node_path43.join)(cwd2, "package-lock.json"));
|
189472
|
+
const hasPnpmLock = (0, import_node_fs25.existsSync)((0, import_node_path43.join)(cwd2, "pnpm-lock.yaml"));
|
189553
189473
|
const userAgent = sniffUserAgent();
|
189554
189474
|
if (hasNpmLock) {
|
189555
189475
|
if (hasNpm) {
|
@@ -189791,15 +189711,15 @@ async function initHandler(args) {
|
|
189791
189711
|
const instructions = [];
|
189792
189712
|
let shouldRunPackageManagerInstall = false;
|
189793
189713
|
const fromDashWorkerName = args.fromDash;
|
189794
|
-
const creationDirectory =
|
189714
|
+
const creationDirectory = import_node_path44.default.resolve(
|
189795
189715
|
process.cwd(),
|
189796
189716
|
(args.name ? args.name : fromDashWorkerName) ?? ""
|
189797
189717
|
);
|
189798
189718
|
assertNoTypeArg(args);
|
189799
189719
|
assertNoSiteArg(args, creationDirectory);
|
189800
|
-
const workerName =
|
189720
|
+
const workerName = import_node_path44.default.basename(creationDirectory).toLowerCase().replaceAll(/[^a-z0-9\-_]/gm, "-");
|
189801
189721
|
const packageManager = await getPackageManager2(creationDirectory);
|
189802
|
-
const wranglerTomlDestination =
|
189722
|
+
const wranglerTomlDestination = import_node_path44.default.join(
|
189803
189723
|
creationDirectory,
|
189804
189724
|
"./wrangler.toml"
|
189805
189725
|
);
|
@@ -189849,7 +189769,7 @@ The \`init\` command will be removed in a future version.`
|
|
189849
189769
|
if (fs20.existsSync(wranglerTomlDestination)) {
|
189850
189770
|
let shouldContinue = false;
|
189851
189771
|
logger.warn(
|
189852
|
-
`${
|
189772
|
+
`${import_node_path44.default.relative(process.cwd(), wranglerTomlDestination)} already exists!`
|
189853
189773
|
);
|
189854
189774
|
if (!fromDashWorkerName) {
|
189855
189775
|
shouldContinue = await confirm(
|
@@ -189890,10 +189810,10 @@ The \`init\` command will be removed in a future version.`
|
|
189890
189810
|
return;
|
189891
189811
|
}
|
189892
189812
|
}
|
189893
|
-
await (0,
|
189813
|
+
await (0, import_promises17.mkdir)(creationDirectory, { recursive: true });
|
189894
189814
|
const compatibilityDate = (/* @__PURE__ */ new Date()).toISOString().substring(0, 10);
|
189895
189815
|
try {
|
189896
|
-
await (0,
|
189816
|
+
await (0, import_promises17.writeFile)(
|
189897
189817
|
wranglerTomlDestination,
|
189898
189818
|
import_toml4.default.stringify({
|
189899
189819
|
name: workerName,
|
@@ -189901,12 +189821,12 @@ The \`init\` command will be removed in a future version.`
|
|
189901
189821
|
}) + "\n"
|
189902
189822
|
);
|
189903
189823
|
logger.log(
|
189904
|
-
`\u2728 Created ${
|
189824
|
+
`\u2728 Created ${import_node_path44.default.relative(process.cwd(), wranglerTomlDestination)}`
|
189905
189825
|
);
|
189906
189826
|
justCreatedWranglerToml = true;
|
189907
189827
|
} catch (err) {
|
189908
189828
|
throw new Error(
|
189909
|
-
`Failed to create ${
|
189829
|
+
`Failed to create ${import_node_path44.default.relative(
|
189910
189830
|
process.cwd(),
|
189911
189831
|
wranglerTomlDestination
|
189912
189832
|
)}.
|
@@ -189918,12 +189838,12 @@ ${err.message ?? err}`
|
|
189918
189838
|
const shouldInitGit = yesFlag || await confirm("Would you like to use git to manage this Worker?");
|
189919
189839
|
if (shouldInitGit) {
|
189920
189840
|
await initializeGit(creationDirectory);
|
189921
|
-
await (0,
|
189922
|
-
|
189923
|
-
readFileSync5(
|
189841
|
+
await (0, import_promises17.writeFile)(
|
189842
|
+
import_node_path44.default.join(creationDirectory, ".gitignore"),
|
189843
|
+
readFileSync5(import_node_path44.default.join(getBasePath(), "templates/gitignore"))
|
189924
189844
|
);
|
189925
189845
|
logger.log(
|
189926
|
-
args.name && args.name !== "." ? `\u2728 Initialized git repository at ${
|
189846
|
+
args.name && args.name !== "." ? `\u2728 Initialized git repository at ${import_node_path44.default.relative(
|
189927
189847
|
process.cwd(),
|
189928
189848
|
creationDirectory
|
189929
189849
|
)}` : `\u2728 Initialized git repository`
|
@@ -189942,8 +189862,8 @@ ${err.message ?? err}`
|
|
189942
189862
|
if (!pathToPackageJson) {
|
189943
189863
|
shouldCreatePackageJson = yesFlag || await confirm("No package.json found. Would you like to create one?");
|
189944
189864
|
if (shouldCreatePackageJson) {
|
189945
|
-
await (0,
|
189946
|
-
|
189865
|
+
await (0, import_promises17.writeFile)(
|
189866
|
+
import_node_path44.default.join(creationDirectory, "./package.json"),
|
189947
189867
|
JSON.stringify(
|
189948
189868
|
{
|
189949
189869
|
name: workerName,
|
@@ -189958,9 +189878,9 @@ ${err.message ?? err}`
|
|
189958
189878
|
) + "\n"
|
189959
189879
|
);
|
189960
189880
|
shouldRunPackageManagerInstall = true;
|
189961
|
-
pathToPackageJson =
|
189881
|
+
pathToPackageJson = import_node_path44.default.join(creationDirectory, "package.json");
|
189962
189882
|
logger.log(
|
189963
|
-
`\u2728 Created ${
|
189883
|
+
`\u2728 Created ${import_node_path44.default.relative(process.cwd(), pathToPackageJson)}`
|
189964
189884
|
);
|
189965
189885
|
} else {
|
189966
189886
|
return;
|
@@ -189972,7 +189892,7 @@ ${err.message ?? err}`
|
|
189972
189892
|
);
|
189973
189893
|
if (!(packageJson.devDependencies?.wrangler || packageJson.dependencies?.wrangler)) {
|
189974
189894
|
const shouldInstall = yesFlag || await confirm(
|
189975
|
-
`Would you like to install wrangler into ${
|
189895
|
+
`Would you like to install wrangler into ${import_node_path44.default.relative(
|
189976
189896
|
process.cwd(),
|
189977
189897
|
pathToPackageJson
|
189978
189898
|
)}?`
|
@@ -189992,15 +189912,15 @@ ${err.message ?? err}`
|
|
189992
189912
|
if (!pathToTSConfig) {
|
189993
189913
|
if (yesFlag || await confirm("Would you like to use TypeScript?")) {
|
189994
189914
|
isTypescriptProject = true;
|
189995
|
-
await (0,
|
189996
|
-
|
189997
|
-
readFileSync5(
|
189915
|
+
await (0, import_promises17.writeFile)(
|
189916
|
+
import_node_path44.default.join(creationDirectory, "./tsconfig.json"),
|
189917
|
+
readFileSync5(import_node_path44.default.join(getBasePath(), "templates/tsconfig.init.json"))
|
189998
189918
|
);
|
189999
189919
|
devDepsToInstall.push("@cloudflare/workers-types");
|
190000
189920
|
devDepsToInstall.push("typescript");
|
190001
|
-
pathToTSConfig =
|
189921
|
+
pathToTSConfig = import_node_path44.default.join(creationDirectory, "tsconfig.json");
|
190002
189922
|
logger.log(
|
190003
|
-
`\u2728 Created ${
|
189923
|
+
`\u2728 Created ${import_node_path44.default.relative(process.cwd(), pathToTSConfig)}`
|
190004
189924
|
);
|
190005
189925
|
}
|
190006
189926
|
} else {
|
@@ -190016,7 +189936,7 @@ ${err.message ?? err}`
|
|
190016
189936
|
if (shouldInstall) {
|
190017
189937
|
devDepsToInstall.push("@cloudflare/workers-types");
|
190018
189938
|
instructions.push(
|
190019
|
-
`\u{1F6A8} Please add "@cloudflare/workers-types" to compilerOptions.types in ${
|
189939
|
+
`\u{1F6A8} Please add "@cloudflare/workers-types" to compilerOptions.types in ${import_node_path44.default.relative(
|
190020
189940
|
process.cwd(),
|
190021
189941
|
pathToTSConfig
|
190022
189942
|
)}`
|
@@ -190054,10 +189974,10 @@ ${err.message ?? err}`
|
|
190054
189974
|
};
|
190055
189975
|
fs20.writeFileSync(wranglerTomlDestination, import_toml4.default.stringify(newToml));
|
190056
189976
|
}
|
190057
|
-
const isNamedWorker = isCreatingWranglerToml &&
|
189977
|
+
const isNamedWorker = isCreatingWranglerToml && import_node_path44.default.dirname(packagePath) !== process.cwd();
|
190058
189978
|
const isAddingTestScripts = isAddingTests && !packageJsonContent.scripts?.test;
|
190059
189979
|
if (isWritingScripts) {
|
190060
|
-
await (0,
|
189980
|
+
await (0, import_promises17.writeFile)(
|
190061
189981
|
packagePath,
|
190062
189982
|
JSON.stringify(
|
190063
189983
|
{
|
@@ -190095,25 +190015,25 @@ To start developing your Worker, run \`npx wrangler dev\`${isCreatingWranglerTom
|
|
190095
190015
|
}
|
190096
190016
|
__name(writePackageJsonScriptsAndUpdateWranglerToml, "writePackageJsonScriptsAndUpdateWranglerToml");
|
190097
190017
|
if (isTypescriptProject) {
|
190098
|
-
if (!fs20.existsSync(
|
190099
|
-
const newWorkerFilename =
|
190018
|
+
if (!fs20.existsSync(import_node_path44.default.join(creationDirectory, "./src/index.ts"))) {
|
190019
|
+
const newWorkerFilename = import_node_path44.default.relative(
|
190100
190020
|
process.cwd(),
|
190101
|
-
|
190021
|
+
import_node_path44.default.join(creationDirectory, "./src/index.ts")
|
190102
190022
|
);
|
190103
190023
|
const newWorkerType = yesFlag ? "fetch" : await getNewWorkerType(newWorkerFilename);
|
190104
190024
|
if (newWorkerType !== "none") {
|
190105
190025
|
const template = getNewWorkerTemplate("ts", newWorkerType);
|
190106
|
-
await (0,
|
190026
|
+
await (0, import_promises17.mkdir)(import_node_path44.default.join(creationDirectory, "./src"), {
|
190107
190027
|
recursive: true
|
190108
190028
|
});
|
190109
|
-
await (0,
|
190110
|
-
|
190111
|
-
readFileSync5(
|
190029
|
+
await (0, import_promises17.writeFile)(
|
190030
|
+
import_node_path44.default.join(creationDirectory, "./src/index.ts"),
|
190031
|
+
readFileSync5(import_node_path44.default.join(getBasePath(), `templates/${template}`))
|
190112
190032
|
);
|
190113
190033
|
logger.log(
|
190114
|
-
`\u2728 Created ${
|
190034
|
+
`\u2728 Created ${import_node_path44.default.relative(
|
190115
190035
|
process.cwd(),
|
190116
|
-
|
190036
|
+
import_node_path44.default.join(creationDirectory, "./src/index.ts")
|
190117
190037
|
)}`
|
190118
190038
|
);
|
190119
190039
|
shouldCreateTests = yesFlag || await confirm(
|
@@ -190125,19 +190045,19 @@ To start developing your Worker, run \`npx wrangler dev\`${isCreatingWranglerTom
|
|
190125
190045
|
}
|
190126
190046
|
newWorkerTestType = "vitest";
|
190127
190047
|
devDepsToInstall.push(newWorkerTestType);
|
190128
|
-
await (0,
|
190129
|
-
|
190048
|
+
await (0, import_promises17.writeFile)(
|
190049
|
+
import_node_path44.default.join(creationDirectory, "./src/index.test.ts"),
|
190130
190050
|
readFileSync5(
|
190131
|
-
|
190051
|
+
import_node_path44.default.join(
|
190132
190052
|
getBasePath(),
|
190133
190053
|
`templates/init-tests/test-${newWorkerTestType}-new-worker.ts`
|
190134
190054
|
)
|
190135
190055
|
)
|
190136
190056
|
);
|
190137
190057
|
logger.log(
|
190138
|
-
`\u2728 Created ${
|
190058
|
+
`\u2728 Created ${import_node_path44.default.relative(
|
190139
190059
|
process.cwd(),
|
190140
|
-
|
190060
|
+
import_node_path44.default.join(creationDirectory, "./src/index.test.ts")
|
190141
190061
|
)}`
|
190142
190062
|
);
|
190143
190063
|
}
|
@@ -190153,10 +190073,10 @@ To start developing your Worker, run \`npx wrangler dev\`${isCreatingWranglerTom
|
|
190153
190073
|
}
|
190154
190074
|
}
|
190155
190075
|
} else {
|
190156
|
-
if (!fs20.existsSync(
|
190157
|
-
const newWorkerFilename =
|
190076
|
+
if (!fs20.existsSync(import_node_path44.default.join(creationDirectory, "./src/index.js"))) {
|
190077
|
+
const newWorkerFilename = import_node_path44.default.relative(
|
190158
190078
|
process.cwd(),
|
190159
|
-
|
190079
|
+
import_node_path44.default.join(creationDirectory, "./src/index.js")
|
190160
190080
|
);
|
190161
190081
|
if (fromDashWorkerName) {
|
190162
190082
|
logger.warn(
|
@@ -190166,7 +190086,7 @@ To start developing your Worker, run \`npx wrangler dev\`${isCreatingWranglerTom
|
|
190166
190086
|
accountId,
|
190167
190087
|
fromDashWorkerName
|
190168
190088
|
);
|
190169
|
-
await (0,
|
190089
|
+
await (0, import_promises17.mkdir)(import_node_path44.default.join(creationDirectory, "./src"), {
|
190170
190090
|
recursive: true
|
190171
190091
|
});
|
190172
190092
|
config.main = `src/${config.main}`;
|
@@ -190174,13 +190094,13 @@ To start developing your Worker, run \`npx wrangler dev\`${isCreatingWranglerTom
|
|
190174
190094
|
for (const files of createBatches(modules, 10)) {
|
190175
190095
|
await Promise.all(
|
190176
190096
|
files.map(async (file) => {
|
190177
|
-
const filepath =
|
190097
|
+
const filepath = import_node_path44.default.join(
|
190178
190098
|
creationDirectory,
|
190179
190099
|
`./src/${file.name}`
|
190180
190100
|
);
|
190181
|
-
const directory = (0,
|
190182
|
-
await (0,
|
190183
|
-
await (0,
|
190101
|
+
const directory = (0, import_node_path44.dirname)(filepath);
|
190102
|
+
await (0, import_promises17.mkdir)(directory, { recursive: true });
|
190103
|
+
await (0, import_promises17.writeFile)(filepath, file.stream());
|
190184
190104
|
})
|
190185
190105
|
);
|
190186
190106
|
}
|
@@ -190196,36 +190116,36 @@ To start developing your Worker, run \`npx wrangler dev\`${isCreatingWranglerTom
|
|
190196
190116
|
const newWorkerType = yesFlag ? "fetch" : await getNewWorkerType(newWorkerFilename);
|
190197
190117
|
if (newWorkerType !== "none") {
|
190198
190118
|
const template = getNewWorkerTemplate("js", newWorkerType);
|
190199
|
-
await (0,
|
190119
|
+
await (0, import_promises17.mkdir)(import_node_path44.default.join(creationDirectory, "./src"), {
|
190200
190120
|
recursive: true
|
190201
190121
|
});
|
190202
|
-
await (0,
|
190203
|
-
|
190204
|
-
readFileSync5(
|
190122
|
+
await (0, import_promises17.writeFile)(
|
190123
|
+
import_node_path44.default.join(creationDirectory, "./src/index.js"),
|
190124
|
+
readFileSync5(import_node_path44.default.join(getBasePath(), `templates/${template}`))
|
190205
190125
|
);
|
190206
190126
|
logger.log(
|
190207
|
-
`\u2728 Created ${
|
190127
|
+
`\u2728 Created ${import_node_path44.default.relative(
|
190208
190128
|
process.cwd(),
|
190209
|
-
|
190129
|
+
import_node_path44.default.join(creationDirectory, "./src/index.js")
|
190210
190130
|
)}`
|
190211
190131
|
);
|
190212
190132
|
shouldCreateTests = yesFlag || await confirm("Would you like us to write your first test?");
|
190213
190133
|
if (shouldCreateTests) {
|
190214
190134
|
newWorkerTestType = await getNewWorkerTestType(yesFlag);
|
190215
190135
|
devDepsToInstall.push(newWorkerTestType);
|
190216
|
-
await (0,
|
190217
|
-
|
190136
|
+
await (0, import_promises17.writeFile)(
|
190137
|
+
import_node_path44.default.join(creationDirectory, "./src/index.test.js"),
|
190218
190138
|
readFileSync5(
|
190219
|
-
|
190139
|
+
import_node_path44.default.join(
|
190220
190140
|
getBasePath(),
|
190221
190141
|
`templates/init-tests/test-${newWorkerTestType}-new-worker.js`
|
190222
190142
|
)
|
190223
190143
|
)
|
190224
190144
|
);
|
190225
190145
|
logger.log(
|
190226
|
-
`\u2728 Created ${
|
190146
|
+
`\u2728 Created ${import_node_path44.default.relative(
|
190227
190147
|
process.cwd(),
|
190228
|
-
|
190148
|
+
import_node_path44.default.join(creationDirectory, "./src/index.test.js")
|
190229
190149
|
)}`
|
190230
190150
|
);
|
190231
190151
|
}
|
@@ -190338,7 +190258,7 @@ function getNewWorkerToml(workerType) {
|
|
190338
190258
|
__name(getNewWorkerToml, "getNewWorkerToml");
|
190339
190259
|
async function findPath(isolatedInit, cwd2, basename5) {
|
190340
190260
|
if (isolatedInit) {
|
190341
|
-
return fs20.existsSync(
|
190261
|
+
return fs20.existsSync(import_node_path44.default.resolve(cwd2, basename5)) ? import_node_path44.default.resolve(cwd2, basename5) : void 0;
|
190342
190262
|
} else {
|
190343
190263
|
return await findUp(basename5, {
|
190344
190264
|
cwd: cwd2
|
@@ -190685,7 +190605,7 @@ function assertNoTypeArg(args) {
|
|
190685
190605
|
__name(assertNoTypeArg, "assertNoTypeArg");
|
190686
190606
|
function assertNoSiteArg(args, creationDirectory) {
|
190687
190607
|
if (args.site) {
|
190688
|
-
const gitDirectory = creationDirectory !== process.cwd() ?
|
190608
|
+
const gitDirectory = creationDirectory !== process.cwd() ? import_node_path44.default.basename(creationDirectory) : "my-site";
|
190689
190609
|
const message = `The --site option is no longer supported.
|
190690
190610
|
If you wish to create a brand new Worker Sites project then clone the \`worker-sites-template\` starter repository:
|
190691
190611
|
|
@@ -191320,7 +191240,7 @@ __name(docsHandler, "docsHandler");
|
|
191320
191240
|
// src/generate/index.ts
|
191321
191241
|
init_import_meta_url();
|
191322
191242
|
var import_node_fs26 = __toESM(require("node:fs"));
|
191323
|
-
var
|
191243
|
+
var import_node_path45 = __toESM(require("node:path"));
|
191324
191244
|
init_execa();
|
191325
191245
|
function generateOptions(yargs) {
|
191326
191246
|
return yargs.positional("name", {
|
@@ -191380,7 +191300,7 @@ Please refer to https://developers.cloudflare.com/workers/wrangler/deprecations/
|
|
191380
191300
|
}
|
191381
191301
|
const creationDirectory = generateWorkerDirectoryName(args.name);
|
191382
191302
|
if (args.site) {
|
191383
|
-
const gitDirectory = creationDirectory !== process.cwd() ?
|
191303
|
+
const gitDirectory = creationDirectory !== process.cwd() ? import_node_path45.default.basename(creationDirectory) : "my-site";
|
191384
191304
|
const message = `The --site option is no longer supported.
|
191385
191305
|
If you wish to create a brand new Worker Sites project then clone the \`worker-sites-template\` starter repository:
|
191386
191306
|
|
@@ -191412,7 +191332,7 @@ Running \`npm create cloudflare@latest\` for you instead.
|
|
191412
191332
|
return;
|
191413
191333
|
}
|
191414
191334
|
logger.log(
|
191415
|
-
`Creating a worker in ${
|
191335
|
+
`Creating a worker in ${import_node_path45.default.basename(creationDirectory)} from ${args.template}`
|
191416
191336
|
);
|
191417
191337
|
const { remote, subdirectory } = parseTemplatePath(args.template);
|
191418
191338
|
await cloneIntoDirectory(remote, creationDirectory, subdirectory);
|
@@ -191421,10 +191341,10 @@ Running \`npm create cloudflare@latest\` for you instead.
|
|
191421
191341
|
}
|
191422
191342
|
__name(generateHandler2, "generateHandler");
|
191423
191343
|
function generateWorkerDirectoryName(workerName) {
|
191424
|
-
let workerDirectoryPath =
|
191344
|
+
let workerDirectoryPath = import_node_path45.default.resolve(process.cwd(), workerName);
|
191425
191345
|
let i = 1;
|
191426
191346
|
while (import_node_fs26.default.existsSync(workerDirectoryPath)) {
|
191427
|
-
workerDirectoryPath =
|
191347
|
+
workerDirectoryPath = import_node_path45.default.resolve(process.cwd(), `${workerName}-${i}`);
|
191428
191348
|
i++;
|
191429
191349
|
}
|
191430
191350
|
return workerDirectoryPath;
|
@@ -192718,13 +192638,13 @@ init_import_meta_url();
|
|
192718
192638
|
init_import_meta_url();
|
192719
192639
|
var import_node_crypto7 = require("node:crypto");
|
192720
192640
|
var import_node_fs29 = require("node:fs");
|
192721
|
-
var
|
192641
|
+
var import_promises22 = require("node:fs/promises");
|
192722
192642
|
var import_node_path53 = __toESM(require("node:path"));
|
192723
192643
|
|
192724
192644
|
// src/api/pages/create-worker-bundle-contents.ts
|
192725
192645
|
init_import_meta_url();
|
192726
192646
|
var import_node_fs27 = require("node:fs");
|
192727
|
-
var
|
192647
|
+
var import_node_path46 = __toESM(require("node:path"));
|
192728
192648
|
var import_undici18 = __toESM(require_undici());
|
192729
192649
|
async function createUploadWorkerBundleContents(workerBundle, config) {
|
192730
192650
|
const workerBundleFormData = createWorkerBundleFormData(workerBundle, config);
|
@@ -192738,7 +192658,7 @@ async function createUploadWorkerBundleContents(workerBundle, config) {
|
|
192738
192658
|
__name(createUploadWorkerBundleContents, "createUploadWorkerBundleContents");
|
192739
192659
|
function createWorkerBundleFormData(workerBundle, config) {
|
192740
192660
|
const mainModule = {
|
192741
|
-
name:
|
192661
|
+
name: import_node_path46.default.basename(workerBundle.resolvedEntryPointPath),
|
192742
192662
|
filePath: workerBundle.resolvedEntryPointPath,
|
192743
192663
|
content: (0, import_node_fs27.readFileSync)(workerBundle.resolvedEntryPointPath, {
|
192744
192664
|
encoding: "utf-8"
|
@@ -192803,72 +192723,12 @@ var import_node_path52 = require("node:path");
|
|
192803
192723
|
|
192804
192724
|
// src/pages/functions/buildPlugin.ts
|
192805
192725
|
init_import_meta_url();
|
192806
|
-
var
|
192726
|
+
var import_promises19 = require("node:fs/promises");
|
192807
192727
|
var import_node_path48 = require("node:path");
|
192808
192728
|
|
192809
|
-
// src/pages/utils.ts
|
192810
|
-
init_import_meta_url();
|
192811
|
-
var import_node_path46 = __toESM(require("node:path"));
|
192812
|
-
var RUNNING_BUILDERS = [];
|
192813
|
-
var CLEANUP_CALLBACKS = [];
|
192814
|
-
var CLEANUP = /* @__PURE__ */ __name(() => {
|
192815
|
-
CLEANUP_CALLBACKS.forEach((callback) => callback());
|
192816
|
-
RUNNING_BUILDERS.forEach((builder) => builder.stop?.());
|
192817
|
-
}, "CLEANUP");
|
192818
|
-
function isUrl(maybeUrl) {
|
192819
|
-
if (!maybeUrl) {
|
192820
|
-
return false;
|
192821
|
-
}
|
192822
|
-
try {
|
192823
|
-
new URL(maybeUrl);
|
192824
|
-
return true;
|
192825
|
-
} catch (e3) {
|
192826
|
-
return false;
|
192827
|
-
}
|
192828
|
-
}
|
192829
|
-
__name(isUrl, "isUrl");
|
192830
|
-
var projectRootCacheCwd;
|
192831
|
-
var projectRootCache;
|
192832
|
-
var tmpDirCacheProjectRoot;
|
192833
|
-
var tmpDirCache;
|
192834
|
-
function getPagesProjectRoot() {
|
192835
|
-
const cwd2 = process.cwd();
|
192836
|
-
if (projectRootCache !== void 0 && projectRootCacheCwd === cwd2) {
|
192837
|
-
return projectRootCache;
|
192838
|
-
}
|
192839
|
-
const packagePath = findUpSync("package.json");
|
192840
|
-
projectRootCache = packagePath ? import_node_path46.default.dirname(packagePath) : process.cwd();
|
192841
|
-
projectRootCacheCwd = cwd2;
|
192842
|
-
return projectRootCache;
|
192843
|
-
}
|
192844
|
-
__name(getPagesProjectRoot, "getPagesProjectRoot");
|
192845
|
-
function getPagesTmpDir() {
|
192846
|
-
const projectRoot = getPagesProjectRoot();
|
192847
|
-
if (tmpDirCache !== void 0 && tmpDirCacheProjectRoot === projectRoot) {
|
192848
|
-
return tmpDirCache;
|
192849
|
-
}
|
192850
|
-
const tmpDir = getWranglerTmpDir(getPagesProjectRoot(), "pages");
|
192851
|
-
tmpDirCache = tmpDir.path;
|
192852
|
-
tmpDirCacheProjectRoot = projectRoot;
|
192853
|
-
return tmpDirCache;
|
192854
|
-
}
|
192855
|
-
__name(getPagesTmpDir, "getPagesTmpDir");
|
192856
|
-
function debounce(fn2, delayMs = 100) {
|
192857
|
-
let crrTimeoutId;
|
192858
|
-
return () => {
|
192859
|
-
if (crrTimeoutId) {
|
192860
|
-
clearTimeout(crrTimeoutId);
|
192861
|
-
}
|
192862
|
-
crrTimeoutId = setTimeout(() => {
|
192863
|
-
fn2();
|
192864
|
-
}, delayMs);
|
192865
|
-
};
|
192866
|
-
}
|
192867
|
-
__name(debounce, "debounce");
|
192868
|
-
|
192869
192729
|
// src/pages/functions/buildWorker.ts
|
192870
192730
|
init_import_meta_url();
|
192871
|
-
var
|
192731
|
+
var import_promises18 = require("node:fs/promises");
|
192872
192732
|
var import_node_path47 = require("node:path");
|
192873
192733
|
var import_esbuild3 = require("esbuild");
|
192874
192734
|
|
@@ -193164,8 +193024,8 @@ function assetsPlugin(buildOutputDirectory) {
|
|
193164
193024
|
args.resolveDir,
|
193165
193025
|
args.path.slice("assets:".length)
|
193166
193026
|
);
|
193167
|
-
const exists = await (0,
|
193168
|
-
const isDirectory2 = exists && (await (0,
|
193027
|
+
const exists = await (0, import_promises18.access)(directory).then(() => true).catch(() => false);
|
193028
|
+
const isDirectory2 = exists && (await (0, import_promises18.lstat)(directory)).isDirectory();
|
193169
193029
|
if (!isDirectory2) {
|
193170
193030
|
return {
|
193171
193031
|
errors: [
|
@@ -193194,11 +193054,11 @@ function assetsPlugin(buildOutputDirectory) {
|
|
193194
193054
|
"pages-plugins",
|
193195
193055
|
identifier
|
193196
193056
|
);
|
193197
|
-
await (0,
|
193057
|
+
await (0, import_promises18.rm)(staticAssetsOutputDirectory, {
|
193198
193058
|
force: true,
|
193199
193059
|
recursive: true
|
193200
193060
|
});
|
193201
|
-
await (0,
|
193061
|
+
await (0, import_promises18.cp)(args.path, staticAssetsOutputDirectory, {
|
193202
193062
|
force: true,
|
193203
193063
|
recursive: true
|
193204
193064
|
});
|
@@ -193273,8 +193133,8 @@ function buildPluginFromFunctions({
|
|
193273
193133
|
args.resolveDir,
|
193274
193134
|
args.path.slice("assets:".length)
|
193275
193135
|
);
|
193276
|
-
const exists = await (0,
|
193277
|
-
const isDirectory2 = exists && (await (0,
|
193136
|
+
const exists = await (0, import_promises19.access)(directory).then(() => true).catch(() => false);
|
193137
|
+
const isDirectory2 = exists && (await (0, import_promises19.lstat)(directory)).isDirectory();
|
193278
193138
|
if (!isDirectory2) {
|
193279
193139
|
return {
|
193280
193140
|
errors: [
|
@@ -193320,7 +193180,7 @@ __name(buildPluginFromFunctions, "buildPluginFromFunctions");
|
|
193320
193180
|
|
193321
193181
|
// src/pages/functions/filepath-routing.ts
|
193322
193182
|
init_import_meta_url();
|
193323
|
-
var
|
193183
|
+
var import_promises20 = __toESM(require("node:fs/promises"));
|
193324
193184
|
var import_node_path49 = __toESM(require("node:path"));
|
193325
193185
|
var import_esbuild4 = require("esbuild");
|
193326
193186
|
async function generateConfigFromFileTree({
|
@@ -193452,7 +193312,7 @@ async function forEachFile(baseDir, fn2) {
|
|
193452
193312
|
const returnValues = [];
|
193453
193313
|
while (isNotEmpty(searchPaths)) {
|
193454
193314
|
const cwd2 = searchPaths.shift();
|
193455
|
-
const dir = await
|
193315
|
+
const dir = await import_promises20.default.readdir(cwd2, { withFileTypes: true });
|
193456
193316
|
for (const entry of dir) {
|
193457
193317
|
const pathname = import_node_path49.default.join(cwd2, entry.name);
|
193458
193318
|
if (entry.isDirectory()) {
|
@@ -193497,7 +193357,7 @@ __name(convertSimpleParams, "convertSimpleParams");
|
|
193497
193357
|
|
193498
193358
|
// src/pages/functions/routes.ts
|
193499
193359
|
init_import_meta_url();
|
193500
|
-
var
|
193360
|
+
var import_promises21 = __toESM(require("node:fs/promises"));
|
193501
193361
|
var import_node_path50 = __toESM(require("node:path"));
|
193502
193362
|
|
193503
193363
|
// src/pages/functions/identifiers.ts
|
@@ -193577,7 +193437,7 @@ async function writeRoutesModule({
|
|
193577
193437
|
}) {
|
193578
193438
|
const { importMap, routes } = parseConfig(config, srcDir);
|
193579
193439
|
const routesModule = generateRoutesModule(importMap, routes);
|
193580
|
-
await
|
193440
|
+
await import_promises21.default.writeFile(outfile, routesModule);
|
193581
193441
|
return outfile;
|
193582
193442
|
}
|
193583
193443
|
__name(writeRoutesModule, "writeRoutesModule");
|
@@ -194113,7 +193973,7 @@ async function maybeReadPagesConfig(args) {
|
|
194113
193973
|
);
|
194114
193974
|
return {
|
194115
193975
|
...config,
|
194116
|
-
hash: (0, import_node_crypto7.createHash)("sha256").update(await (0,
|
193976
|
+
hash: (0, import_node_crypto7.createHash)("sha256").update(await (0, import_promises22.readFile)(configPath)).digest("hex")
|
194117
193977
|
};
|
194118
193978
|
} catch (e3) {
|
194119
193979
|
if (e3 instanceof FatalError && e3.code === EXIT_CODE_INVALID_PAGES_CONFIG) {
|
@@ -194316,7 +194176,7 @@ var import_ink_select_input3 = __toESM(require_build4());
|
|
194316
194176
|
init_import_meta_url();
|
194317
194177
|
var import_node_crypto8 = require("node:crypto");
|
194318
194178
|
var import_node_fs31 = require("node:fs");
|
194319
|
-
var
|
194179
|
+
var import_promises23 = require("node:fs/promises");
|
194320
194180
|
var import_node_path55 = __toESM(require("node:path"));
|
194321
194181
|
var import_node_process12 = require("node:process");
|
194322
194182
|
var import_undici19 = __toESM(require_undici());
|
@@ -194466,7 +194326,7 @@ async function deploy2({
|
|
194466
194326
|
);
|
194467
194327
|
}
|
194468
194328
|
if (config !== void 0 && config.configPath !== void 0 && config.pages_build_output_dir) {
|
194469
|
-
const configHash = (0, import_node_crypto8.createHash)("sha256").update(await (0,
|
194329
|
+
const configHash = (0, import_node_crypto8.createHash)("sha256").update(await (0, import_promises23.readFile)(config.configPath)).digest("hex");
|
194470
194330
|
const outputDir = import_node_path55.default.relative(
|
194471
194331
|
process.cwd(),
|
194472
194332
|
config.pages_build_output_dir
|
@@ -195253,7 +195113,7 @@ __name(promptSelectExistingOrNewProject, "promptSelectExistingOrNewProject");
|
|
195253
195113
|
|
195254
195114
|
// src/pages/deployment-tails.ts
|
195255
195115
|
init_import_meta_url();
|
195256
|
-
var
|
195116
|
+
var import_promises24 = require("node:timers/promises");
|
195257
195117
|
var import_signal_exit8 = __toESM(require_signal_exit());
|
195258
195118
|
|
195259
195119
|
// src/tail/createTail.ts
|
@@ -195760,10 +195620,10 @@ async function Handler13({
|
|
195760
195620
|
while (tail.readyState !== tail.OPEN) {
|
195761
195621
|
switch (tail.readyState) {
|
195762
195622
|
case tail.CONNECTING:
|
195763
|
-
await (0,
|
195623
|
+
await (0, import_promises24.setTimeout)(100);
|
195764
195624
|
break;
|
195765
195625
|
case tail.CLOSING:
|
195766
|
-
await (0,
|
195626
|
+
await (0, import_promises24.setTimeout)(100);
|
195767
195627
|
break;
|
195768
195628
|
case tail.CLOSED:
|
195769
195629
|
await sendMetricsEvent("end log stream", {
|
@@ -196714,7 +196574,7 @@ __name(getBindingsFromArgs, "getBindingsFromArgs");
|
|
196714
196574
|
// src/pages/download-config.ts
|
196715
196575
|
init_import_meta_url();
|
196716
196576
|
var import_fs10 = require("fs");
|
196717
|
-
var
|
196577
|
+
var import_promises25 = require("node:fs/promises");
|
196718
196578
|
var import_toml6 = __toESM(require_toml());
|
196719
196579
|
var import_miniflare16 = require("miniflare");
|
196720
196580
|
async function toEnvironment(deploymentConfig, accountId) {
|
@@ -196827,7 +196687,7 @@ __name(toEnvironment, "toEnvironment");
|
|
196827
196687
|
async function writeWranglerToml(toml) {
|
196828
196688
|
let tomlString = import_toml6.default.stringify(toml);
|
196829
196689
|
tomlString = tomlString.split("\n").map((line) => line.trimStart()).join("\n");
|
196830
|
-
await (0,
|
196690
|
+
await (0, import_promises25.writeFile)(
|
196831
196691
|
"wrangler.toml",
|
196832
196692
|
`# Generated by Wrangler on ${/* @__PURE__ */ new Date()}
|
196833
196693
|
${tomlString}`
|
@@ -209629,7 +209489,7 @@ __name(closeSentry, "closeSentry");
|
|
209629
209489
|
|
209630
209490
|
// src/tail/index.ts
|
209631
209491
|
init_import_meta_url();
|
209632
|
-
var
|
209492
|
+
var import_promises26 = require("node:timers/promises");
|
209633
209493
|
var import_signal_exit9 = __toESM(require_signal_exit());
|
209634
209494
|
function tailOptions(yargs) {
|
209635
209495
|
return yargs.positional("worker", {
|
@@ -209734,10 +209594,10 @@ async function tailHandler(args) {
|
|
209734
209594
|
while (tail.readyState !== tail.OPEN) {
|
209735
209595
|
switch (tail.readyState) {
|
209736
209596
|
case tail.CONNECTING:
|
209737
|
-
await (0,
|
209597
|
+
await (0, import_promises26.setTimeout)(100);
|
209738
209598
|
break;
|
209739
209599
|
case tail.CLOSING:
|
209740
|
-
await (0,
|
209600
|
+
await (0, import_promises26.setTimeout)(100);
|
209741
209601
|
break;
|
209742
209602
|
case tail.CLOSED:
|
209743
209603
|
await sendMetricsEvent("end log stream", {
|
@@ -209864,9 +209724,9 @@ var import_miniflare20 = require("miniflare");
|
|
209864
209724
|
// src/type-generation/runtime/index.ts
|
209865
209725
|
init_import_meta_url();
|
209866
209726
|
var import_fs14 = require("fs");
|
209867
|
-
var
|
209727
|
+
var import_promises27 = require("fs/promises");
|
209868
209728
|
var import_miniflare19 = require("miniflare");
|
209869
|
-
var import_workerd =
|
209729
|
+
var import_workerd = require("workerd");
|
209870
209730
|
var DEFAULT_OUTFILE_RELATIVE_PATH = "./.wrangler/types/runtime.d.ts";
|
209871
209731
|
async function generateRuntimeTypes({
|
209872
209732
|
config: { compatibility_date, compatibility_flags = [] },
|
@@ -209878,7 +209738,7 @@ async function generateRuntimeTypes({
|
|
209878
209738
|
await ensureDirectoryExists(outFile);
|
209879
209739
|
const header = `// Runtime types generated with workerd@${import_workerd.version} ${compatibility_date} ${compatibility_flags.join(",")}`;
|
209880
209740
|
try {
|
209881
|
-
const existingTypes = await (0,
|
209741
|
+
const existingTypes = await (0, import_promises27.readFile)(outFile, "utf8");
|
209882
209742
|
if (existingTypes.split("\n")[0] === header) {
|
209883
209743
|
logger.debug("Using cached runtime types: ", header);
|
209884
209744
|
return { outFile };
|
@@ -209895,7 +209755,7 @@ async function generateRuntimeTypes({
|
|
209895
209755
|
(flag) => !flag.includes("nodejs_compat")
|
209896
209756
|
)
|
209897
209757
|
});
|
209898
|
-
await (0,
|
209758
|
+
await (0, import_promises27.writeFile)(outFile, `${header}
|
209899
209759
|
${types}`, "utf8");
|
209900
209760
|
return {
|
209901
209761
|
outFile
|
@@ -212284,11 +212144,13 @@ See https://developers.cloudflare.com/workers/platform/compatibility-dates for m
|
|
212284
212144
|
let bindingsPrinted = false;
|
212285
212145
|
try {
|
212286
212146
|
const body = createWorkerUploadForm(worker);
|
212287
|
-
const result = await
|
212288
|
-
|
212289
|
-
|
212290
|
-
|
212291
|
-
|
212147
|
+
const result = await retryOnError(
|
212148
|
+
async () => fetchResult(`${workerUrl}/versions`, {
|
212149
|
+
method: "POST",
|
212150
|
+
body,
|
212151
|
+
headers: await getMetricsUsageHeaders(config.send_metrics)
|
212152
|
+
})
|
212153
|
+
);
|
212292
212154
|
logger.log("Worker Startup Time:", result.startup_time_ms, "ms");
|
212293
212155
|
bindingsPrinted = true;
|
212294
212156
|
printBindings({ ...withoutStaticAssets, vars: maskedVars });
|
@@ -214551,13 +214413,16 @@ use --persist-to=./wrangler-local-state to keep using the old path.`
|
|
214551
214413
|
if (assetsOptions && !args.assets) {
|
214552
214414
|
await assetsWatcher?.close();
|
214553
214415
|
if (assetsOptions) {
|
214416
|
+
const debouncedRerender = debounce(async () => {
|
214417
|
+
rerender(await getDevReactElement(config));
|
214418
|
+
}, 100);
|
214554
214419
|
assetsWatcher = (0, import_chokidar6.watch)(assetsOptions.directory, {
|
214555
214420
|
persistent: true,
|
214556
214421
|
ignoreInitial: true
|
214557
214422
|
}).on("all", async (eventName, changedPath) => {
|
214558
214423
|
const message = getAssetChangeMessage(eventName, changedPath);
|
214559
214424
|
logger.log(`\u{1F300} ${message}...`);
|
214560
|
-
|
214425
|
+
debouncedRerender();
|
214561
214426
|
});
|
214562
214427
|
}
|
214563
214428
|
}
|
@@ -214671,13 +214536,16 @@ use --persist-to=./wrangler-local-state to keep using the old path.`
|
|
214671
214536
|
const devReactElement = (0, import_ink13.render)(await getDevReactElement(config));
|
214672
214537
|
rerender = devReactElement.rerender;
|
214673
214538
|
if (assetsOptions && !args.experimentalDevEnv) {
|
214539
|
+
const debouncedRerender = debounce(async () => {
|
214540
|
+
rerender(await getDevReactElement(config));
|
214541
|
+
}, 100);
|
214674
214542
|
assetsWatcher = (0, import_chokidar6.watch)(assetsOptions.directory, {
|
214675
214543
|
persistent: true,
|
214676
214544
|
ignoreInitial: true
|
214677
214545
|
}).on("all", async (eventName, filePath) => {
|
214678
214546
|
const message = getAssetChangeMessage(eventName, filePath);
|
214679
214547
|
logger.log(`\u{1F300} ${message}...`);
|
214680
|
-
|
214548
|
+
debouncedRerender();
|
214681
214549
|
});
|
214682
214550
|
}
|
214683
214551
|
return {
|
@@ -215950,7 +215818,7 @@ __name(ConfigController, "ConfigController");
|
|
215950
215818
|
// src/api/startDevWorker/LocalRuntimeController.ts
|
215951
215819
|
init_import_meta_url();
|
215952
215820
|
var import_node_crypto10 = require("node:crypto");
|
215953
|
-
var
|
215821
|
+
var import_promises28 = require("node:fs/promises");
|
215954
215822
|
var import_miniflare21 = require("miniflare");
|
215955
215823
|
async function getBinaryFileContents2(file) {
|
215956
215824
|
if ("contents" in file) {
|
@@ -215959,7 +215827,7 @@ async function getBinaryFileContents2(file) {
|
|
215959
215827
|
}
|
215960
215828
|
return Buffer.from(file.contents);
|
215961
215829
|
}
|
215962
|
-
return (0,
|
215830
|
+
return (0, import_promises28.readFile)(file.path);
|
215963
215831
|
}
|
215964
215832
|
__name(getBinaryFileContents2, "getBinaryFileContents");
|
215965
215833
|
async function getTextFileContents(file) {
|
@@ -215972,7 +215840,7 @@ async function getTextFileContents(file) {
|
|
215972
215840
|
}
|
215973
215841
|
return Buffer.from(file.contents).toString();
|
215974
215842
|
}
|
215975
|
-
return (0,
|
215843
|
+
return (0, import_promises28.readFile)(file.path, "utf8");
|
215976
215844
|
}
|
215977
215845
|
__name(getTextFileContents, "getTextFileContents");
|
215978
215846
|
var DEFAULT_WORKER_NAME2 = "worker";
|
@@ -217971,4 +217839,3 @@ yargs-parser/build/lib/index.js:
|
|
217971
217839
|
* SPDX-License-Identifier: ISC
|
217972
217840
|
*)
|
217973
217841
|
*/
|
217974
|
-
//# sourceMappingURL=cli.js.map
|