wrangler 3.78.11 → 3.79.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +4 -4
- package/wrangler-dist/cli.js +287 -424
- package/wrangler-dist/cli.js.map +0 -7
package/wrangler-dist/cli.js
CHANGED
@@ -19059,7 +19059,7 @@ var require_lodash = __commonJS({
|
|
19059
19059
|
if (typeof func != "function") {
|
19060
19060
|
throw new TypeError2(FUNC_ERROR_TEXT);
|
19061
19061
|
}
|
19062
|
-
return
|
19062
|
+
return setTimeout5(function() {
|
19063
19063
|
func.apply(undefined2, args);
|
19064
19064
|
}, wait);
|
19065
19065
|
}
|
@@ -21045,7 +21045,7 @@ var require_lodash = __commonJS({
|
|
21045
21045
|
}
|
21046
21046
|
__name(safeGet, "safeGet");
|
21047
21047
|
var setData = shortOut(baseSetData);
|
21048
|
-
var
|
21048
|
+
var setTimeout5 = ctxSetTimeout || function(func, wait) {
|
21049
21049
|
return root.setTimeout(func, wait);
|
21050
21050
|
};
|
21051
21051
|
var setToString = shortOut(baseSetToString);
|
@@ -21925,7 +21925,7 @@ var require_lodash = __commonJS({
|
|
21925
21925
|
__name(invokeFunc, "invokeFunc");
|
21926
21926
|
function leadingEdge(time) {
|
21927
21927
|
lastInvokeTime = time;
|
21928
|
-
timerId =
|
21928
|
+
timerId = setTimeout5(timerExpired, wait);
|
21929
21929
|
return leading ? invokeFunc(time) : result2;
|
21930
21930
|
}
|
21931
21931
|
__name(leadingEdge, "leadingEdge");
|
@@ -21944,7 +21944,7 @@ var require_lodash = __commonJS({
|
|
21944
21944
|
if (shouldInvoke(time)) {
|
21945
21945
|
return trailingEdge(time);
|
21946
21946
|
}
|
21947
|
-
timerId =
|
21947
|
+
timerId = setTimeout5(timerExpired, remainingWait(time));
|
21948
21948
|
}
|
21949
21949
|
__name(timerExpired, "timerExpired");
|
21950
21950
|
function trailingEdge(time) {
|
@@ -21979,12 +21979,12 @@ var require_lodash = __commonJS({
|
|
21979
21979
|
}
|
21980
21980
|
if (maxing) {
|
21981
21981
|
clearTimeout2(timerId);
|
21982
|
-
timerId =
|
21982
|
+
timerId = setTimeout5(timerExpired, wait);
|
21983
21983
|
return invokeFunc(lastCallTime);
|
21984
21984
|
}
|
21985
21985
|
}
|
21986
21986
|
if (timerId === undefined2) {
|
21987
|
-
timerId =
|
21987
|
+
timerId = setTimeout5(timerExpired, wait);
|
21988
21988
|
}
|
21989
21989
|
return result2;
|
21990
21990
|
}
|
@@ -113156,7 +113156,7 @@ A subrequest is a call to fetch(), a redirect, or a call to any Cache API method
|
|
113156
113156
|
var import_assert32 = __toModule(require("assert"));
|
113157
113157
|
var import_async_hooks22 = __toModule(require("async_hooks"));
|
113158
113158
|
var import_timers = __toModule(require("timers"));
|
113159
|
-
var
|
113159
|
+
var import_promises29 = __toModule(require("timers/promises"));
|
113160
113160
|
var inputGateStorage = new import_async_hooks22.AsyncLocalStorage();
|
113161
113161
|
var outputGateStorage = new import_async_hooks22.AsyncLocalStorage();
|
113162
113162
|
function waitForOpenInputGate() {
|
@@ -113198,7 +113198,7 @@ A subrequest is a call to fetch(), a redirect, or a call to any Cache API method
|
|
113198
113198
|
return inputGateStorage.run(this, closure);
|
113199
113199
|
}
|
113200
113200
|
async waitForOpen() {
|
113201
|
-
await (0,
|
113201
|
+
await (0, import_promises29.setImmediate)();
|
113202
113202
|
if (this.#lockCount === 0)
|
113203
113203
|
return;
|
113204
113204
|
return new Promise((resolve22) => this.#resolveQueue.push(resolve22));
|
@@ -113223,7 +113223,7 @@ A subrequest is a call to fetch(), a redirect, or a call to any Cache API method
|
|
113223
113223
|
this.#lockCount--;
|
113224
113224
|
while (this.#lockCount === 0 && this.#resolveQueue.length) {
|
113225
113225
|
this.#resolveQueue.shift()();
|
113226
|
-
await (0,
|
113226
|
+
await (0, import_promises29.setImmediate)();
|
113227
113227
|
}
|
113228
113228
|
if (this.#parent)
|
113229
113229
|
return this.#parent.#unlock();
|
@@ -136216,7 +136216,7 @@ var require_src8 = __commonJS({
|
|
136216
136216
|
}
|
136217
136217
|
__name(formatSize, "formatSize");
|
136218
136218
|
var import_assert22 = __toModule(require("assert"));
|
136219
|
-
var
|
136219
|
+
var import_promises29 = __toModule(require("fs/promises"));
|
136220
136220
|
var import_path22 = __toModule(require("path"));
|
136221
136221
|
var import_shared8 = __toModule(require_src6());
|
136222
136222
|
var import_dotenv2 = __toModule(require_main4());
|
@@ -137953,7 +137953,7 @@ The \`binding\` key should be used to define binding names.`);
|
|
137953
137953
|
if (envPath) {
|
137954
137954
|
envPath = import_path22.default.resolve(this.ctx.rootPath, envPath);
|
137955
137955
|
try {
|
137956
|
-
Object.assign(bindings, import_dotenv2.default.parse(await
|
137956
|
+
Object.assign(bindings, import_dotenv2.default.parse(await import_promises29.default.readFile(envPath, "utf8")));
|
137957
137957
|
} catch (e3) {
|
137958
137958
|
if (!(e3.code === "ENOENT" && this.envPath === true))
|
137959
137959
|
throw e3;
|
@@ -137963,21 +137963,21 @@ The \`binding\` key should be used to define binding names.`);
|
|
137963
137963
|
if (this.wasmBindings) {
|
137964
137964
|
for (let [name2, wasmPath] of Object.entries(this.wasmBindings)) {
|
137965
137965
|
wasmPath = import_path22.default.resolve(this.ctx.rootPath, wasmPath);
|
137966
|
-
bindings[name2] = new WebAssembly.Module(await
|
137966
|
+
bindings[name2] = new WebAssembly.Module(await import_promises29.default.readFile(wasmPath));
|
137967
137967
|
watch9.push(wasmPath);
|
137968
137968
|
}
|
137969
137969
|
}
|
137970
137970
|
if (this.textBlobBindings) {
|
137971
137971
|
for (let [name2, textPath] of Object.entries(this.textBlobBindings)) {
|
137972
137972
|
textPath = import_path22.default.resolve(this.ctx.rootPath, textPath);
|
137973
|
-
bindings[name2] = await
|
137973
|
+
bindings[name2] = await import_promises29.default.readFile(textPath, "utf-8");
|
137974
137974
|
watch9.push(textPath);
|
137975
137975
|
}
|
137976
137976
|
}
|
137977
137977
|
if (this.dataBlobBindings) {
|
137978
137978
|
for (let [name2, dataPath] of Object.entries(this.dataBlobBindings)) {
|
137979
137979
|
dataPath = import_path22.default.resolve(this.ctx.rootPath, dataPath);
|
137980
|
-
const fileContent = await
|
137980
|
+
const fileContent = await import_promises29.default.readFile(dataPath);
|
137981
137981
|
bindings[name2] = (0, import_shared8.viewToBuffer)(fileContent);
|
137982
137982
|
watch9.push(dataPath);
|
137983
137983
|
}
|
@@ -138213,7 +138213,7 @@ Make sure "${service}" is mounted so Miniflare knows where to find it.`);
|
|
138213
138213
|
}
|
138214
138214
|
__name(_populateBuildConfig, "_populateBuildConfig");
|
138215
138215
|
var import_buffer = __toModule(require("buffer"));
|
138216
|
-
var
|
138216
|
+
var import_promises210 = __toModule(require("fs/promises"));
|
138217
138217
|
var import_path42 = __toModule(require("path"));
|
138218
138218
|
var import_web5 = __toModule(require("stream/web"));
|
138219
138219
|
var import_web6 = __toModule(require("stream/web"));
|
@@ -138598,7 +138598,7 @@ Make sure "${service}" is mounted so Miniflare knows where to find it.`);
|
|
138598
138598
|
if (packagePath) {
|
138599
138599
|
packagePath = import_path42.default.resolve(this.ctx.rootPath, packagePath);
|
138600
138600
|
try {
|
138601
|
-
const pkg = JSON.parse(await
|
138601
|
+
const pkg = JSON.parse(await import_promises210.default.readFile(packagePath, "utf8"));
|
138602
138602
|
scriptPath3 = this.modules ? pkg.module : pkg.main;
|
138603
138603
|
scriptPath3 &&= import_path42.default.resolve(import_path42.default.dirname(packagePath), scriptPath3);
|
138604
138604
|
} catch (e3) {
|
@@ -138610,7 +138610,7 @@ Make sure "${service}" is mounted so Miniflare knows where to find it.`);
|
|
138610
138610
|
}
|
138611
138611
|
if (scriptPath3 !== void 0) {
|
138612
138612
|
scriptPath3 = import_path42.default.resolve(this.ctx.rootPath, scriptPath3);
|
138613
|
-
const code = await
|
138613
|
+
const code = await import_promises210.default.readFile(scriptPath3, "utf8");
|
138614
138614
|
watch9.push(scriptPath3);
|
138615
138615
|
return {
|
138616
138616
|
globals,
|
@@ -149426,166 +149426,6 @@ var require_dist7 = __commonJS({
|
|
149426
149426
|
}
|
149427
149427
|
});
|
149428
149428
|
|
149429
|
-
// ../../node_modules/.pnpm/workerd@1.20240925.0/node_modules/workerd/lib/main.js
|
149430
|
-
var require_main5 = __commonJS({
|
149431
|
-
"../../node_modules/.pnpm/workerd@1.20240925.0/node_modules/workerd/lib/main.js"(exports2, module3) {
|
149432
|
-
init_import_meta_url();
|
149433
|
-
var __create2 = Object.create;
|
149434
|
-
var __defProp2 = Object.defineProperty;
|
149435
|
-
var __getOwnPropDesc2 = Object.getOwnPropertyDescriptor;
|
149436
|
-
var __getOwnPropNames2 = Object.getOwnPropertyNames;
|
149437
|
-
var __getProtoOf2 = Object.getPrototypeOf;
|
149438
|
-
var __hasOwnProp2 = Object.prototype.hasOwnProperty;
|
149439
|
-
var __export2 = /* @__PURE__ */ __name((target, all2) => {
|
149440
|
-
for (var name2 in all2)
|
149441
|
-
__defProp2(target, name2, { get: all2[name2], enumerable: true });
|
149442
|
-
}, "__export");
|
149443
|
-
var __copyProps2 = /* @__PURE__ */ __name((to, from, except, desc) => {
|
149444
|
-
if (from && typeof from === "object" || typeof from === "function") {
|
149445
|
-
for (let key of __getOwnPropNames2(from))
|
149446
|
-
if (!__hasOwnProp2.call(to, key) && key !== except)
|
149447
|
-
__defProp2(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc2(from, key)) || desc.enumerable });
|
149448
|
-
}
|
149449
|
-
return to;
|
149450
|
-
}, "__copyProps");
|
149451
|
-
var __toESM2 = /* @__PURE__ */ __name((mod, isNodeMode, target) => (target = mod != null ? __create2(__getProtoOf2(mod)) : {}, __copyProps2(
|
149452
|
-
// If the importer is in node compatibility mode or this is not an ESM
|
149453
|
-
// file that has been converted to a CommonJS file using a Babel-
|
149454
|
-
// compatible transform (i.e. "__esModule" has not been set), then set
|
149455
|
-
// "default" to the CommonJS "module.exports" for node compatibility.
|
149456
|
-
isNodeMode || !mod || !mod.__esModule ? __defProp2(target, "default", { value: mod, enumerable: true }) : target,
|
149457
|
-
mod
|
149458
|
-
)), "__toESM");
|
149459
|
-
var __toCommonJS2 = /* @__PURE__ */ __name((mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod), "__toCommonJS");
|
149460
|
-
var node_path_exports = {};
|
149461
|
-
__export2(node_path_exports, {
|
149462
|
-
compatibilityDate: () => compatibilityDate,
|
149463
|
-
default: () => node_path_default,
|
149464
|
-
version: () => version4
|
149465
|
-
});
|
149466
|
-
module3.exports = __toCommonJS2(node_path_exports);
|
149467
|
-
var import_fs16 = __toESM2(require("fs"));
|
149468
|
-
var import_os4 = __toESM2(require("os"));
|
149469
|
-
var import_path20 = __toESM2(require("path"));
|
149470
|
-
var knownPackages = {
|
149471
|
-
"darwin arm64 LE": "@cloudflare/workerd-darwin-arm64",
|
149472
|
-
"darwin x64 LE": "@cloudflare/workerd-darwin-64",
|
149473
|
-
"linux arm64 LE": "@cloudflare/workerd-linux-arm64",
|
149474
|
-
"linux x64 LE": "@cloudflare/workerd-linux-64",
|
149475
|
-
"win32 x64 LE": "@cloudflare/workerd-windows-64"
|
149476
|
-
};
|
149477
|
-
var maybeExeExtension = process.platform === "win32" ? ".exe" : "";
|
149478
|
-
function pkgAndSubpathForCurrentPlatform() {
|
149479
|
-
let pkg;
|
149480
|
-
let subpath;
|
149481
|
-
let platformKey = `${process.platform} ${import_os4.default.arch()} ${import_os4.default.endianness()}`;
|
149482
|
-
if (platformKey in knownPackages) {
|
149483
|
-
pkg = knownPackages[platformKey];
|
149484
|
-
subpath = `bin/workerd${maybeExeExtension}`;
|
149485
|
-
} else {
|
149486
|
-
throw new Error(`Unsupported platform: ${platformKey}`);
|
149487
|
-
}
|
149488
|
-
return { pkg, subpath };
|
149489
|
-
}
|
149490
|
-
__name(pkgAndSubpathForCurrentPlatform, "pkgAndSubpathForCurrentPlatform");
|
149491
|
-
function pkgForSomeOtherPlatform() {
|
149492
|
-
const libMain = require.resolve("workerd");
|
149493
|
-
const nodeModulesDirectory = import_path20.default.dirname(
|
149494
|
-
import_path20.default.dirname(import_path20.default.dirname(libMain))
|
149495
|
-
);
|
149496
|
-
if (import_path20.default.basename(nodeModulesDirectory) === "node_modules") {
|
149497
|
-
for (const unixKey in knownPackages) {
|
149498
|
-
try {
|
149499
|
-
const pkg = knownPackages[unixKey];
|
149500
|
-
if (import_fs16.default.existsSync(import_path20.default.join(nodeModulesDirectory, pkg)))
|
149501
|
-
return pkg;
|
149502
|
-
} catch {
|
149503
|
-
}
|
149504
|
-
}
|
149505
|
-
}
|
149506
|
-
return null;
|
149507
|
-
}
|
149508
|
-
__name(pkgForSomeOtherPlatform, "pkgForSomeOtherPlatform");
|
149509
|
-
function downloadedBinPath(pkg, subpath) {
|
149510
|
-
const libDir = import_path20.default.dirname(require.resolve("workerd"));
|
149511
|
-
return import_path20.default.join(libDir, `downloaded-${pkg.replace("/", "-")}-${import_path20.default.basename(subpath)}${maybeExeExtension}`);
|
149512
|
-
}
|
149513
|
-
__name(downloadedBinPath, "downloadedBinPath");
|
149514
|
-
function generateBinPath() {
|
149515
|
-
const { pkg, subpath } = pkgAndSubpathForCurrentPlatform();
|
149516
|
-
let binPath2;
|
149517
|
-
try {
|
149518
|
-
binPath2 = require.resolve(`${pkg}/${subpath}`);
|
149519
|
-
} catch (e3) {
|
149520
|
-
binPath2 = downloadedBinPath(pkg, subpath);
|
149521
|
-
if (!import_fs16.default.existsSync(binPath2)) {
|
149522
|
-
try {
|
149523
|
-
require.resolve(pkg);
|
149524
|
-
} catch {
|
149525
|
-
const otherPkg = pkgForSomeOtherPlatform();
|
149526
|
-
if (otherPkg) {
|
149527
|
-
throw new Error(`
|
149528
|
-
You installed workerd on another platform than the one you're currently using.
|
149529
|
-
This won't work because workerd is written with native code and needs to
|
149530
|
-
install a platform-specific binary executable.
|
149531
|
-
|
149532
|
-
Specifically the "${otherPkg}" package is present but this platform
|
149533
|
-
needs the "${pkg}" package instead. People often get into this
|
149534
|
-
situation by installing workerd on macOS and copying "node_modules"
|
149535
|
-
into a Docker image that runs Linux.
|
149536
|
-
|
149537
|
-
If you are installing with npm, you can try not copying the "node_modules"
|
149538
|
-
directory when you copy the files over, and running "npm ci" or "npm install"
|
149539
|
-
on the destination platform after the copy. Or you could consider using yarn
|
149540
|
-
instead which has built-in support for installing a package on multiple
|
149541
|
-
platforms simultaneously.
|
149542
|
-
|
149543
|
-
If you are installing with yarn, you can try listing both this platform and the
|
149544
|
-
other platform in your ".yarnrc.yml" file using the "supportedArchitectures"
|
149545
|
-
feature: https://yarnpkg.com/configuration/yarnrc/#supportedArchitectures
|
149546
|
-
Keep in mind that this means multiple copies of workerd will be present.
|
149547
|
-
`);
|
149548
|
-
}
|
149549
|
-
throw new Error(`The package "${pkg}" could not be found, and is needed by workerd.
|
149550
|
-
|
149551
|
-
If you are installing workerd with npm, make sure that you don't specify the
|
149552
|
-
"--no-optional" flag. The "optionalDependencies" package.json feature is used
|
149553
|
-
by workerd to install the correct binary executable for your current platform.`);
|
149554
|
-
}
|
149555
|
-
throw e3;
|
149556
|
-
}
|
149557
|
-
}
|
149558
|
-
let pnpapi;
|
149559
|
-
try {
|
149560
|
-
pnpapi = require("pnpapi");
|
149561
|
-
} catch (e3) {
|
149562
|
-
}
|
149563
|
-
if (pnpapi) {
|
149564
|
-
const root = pnpapi.getPackageInformation(pnpapi.topLevel).packageLocation;
|
149565
|
-
const binTargetPath = import_path20.default.join(
|
149566
|
-
root,
|
149567
|
-
"node_modules",
|
149568
|
-
".cache",
|
149569
|
-
"workerd",
|
149570
|
-
`pnpapi-${pkg.replace("/", "-")}-${"1.20240925.0"}-${import_path20.default.basename(subpath)}`
|
149571
|
-
);
|
149572
|
-
if (!import_fs16.default.existsSync(binTargetPath)) {
|
149573
|
-
import_fs16.default.mkdirSync(import_path20.default.dirname(binTargetPath), { recursive: true });
|
149574
|
-
import_fs16.default.copyFileSync(binPath2, binTargetPath);
|
149575
|
-
import_fs16.default.chmodSync(binTargetPath, 493);
|
149576
|
-
}
|
149577
|
-
return { binPath: binTargetPath };
|
149578
|
-
}
|
149579
|
-
return { binPath: binPath2 };
|
149580
|
-
}
|
149581
|
-
__name(generateBinPath, "generateBinPath");
|
149582
|
-
var { binPath } = generateBinPath();
|
149583
|
-
var node_path_default = binPath;
|
149584
|
-
var compatibilityDate = "2024-09-25";
|
149585
|
-
var version4 = "1.20240925.0";
|
149586
|
-
}
|
149587
|
-
});
|
149588
|
-
|
149589
149429
|
// src/cli.ts
|
149590
149430
|
var cli_exports2 = {};
|
149591
149431
|
__export(cli_exports2, {
|
@@ -151676,7 +151516,7 @@ __name(fakeResolvedInput, "fakeResolvedInput");
|
|
151676
151516
|
init_import_meta_url();
|
151677
151517
|
var import_node_assert13 = __toESM(require("node:assert"));
|
151678
151518
|
var import_node_fs15 = require("node:fs");
|
151679
|
-
var
|
151519
|
+
var import_promises10 = require("node:fs/promises");
|
151680
151520
|
var path22 = __toESM(require("node:path"));
|
151681
151521
|
|
151682
151522
|
// ../workers-shared/index.ts
|
@@ -158104,7 +157944,7 @@ var import_undici3 = __toESM(require_undici());
|
|
158104
157944
|
|
158105
157945
|
// package.json
|
158106
157946
|
var name = "wrangler";
|
158107
|
-
var version = "3.
|
157947
|
+
var version = "3.79.0";
|
158108
157948
|
|
158109
157949
|
// src/user/index.ts
|
158110
157950
|
init_import_meta_url();
|
@@ -168881,6 +168721,22 @@ ${dashLink}`);
|
|
168881
168721
|
}
|
168882
168722
|
__name(triggersDeploy, "triggersDeploy");
|
168883
168723
|
|
168724
|
+
// src/utils/retry.ts
|
168725
|
+
init_import_meta_url();
|
168726
|
+
var import_promises7 = require("node:timers/promises");
|
168727
|
+
async function retryOnError(action, backoff = 2e3, attempts = 3) {
|
168728
|
+
try {
|
168729
|
+
return await action();
|
168730
|
+
} catch (err) {
|
168731
|
+
if (attempts <= 1) {
|
168732
|
+
throw err;
|
168733
|
+
}
|
168734
|
+
await (0, import_promises7.setTimeout)(backoff);
|
168735
|
+
return retryOnError(action, backoff, attempts - 1);
|
168736
|
+
}
|
168737
|
+
}
|
168738
|
+
__name(retryOnError, "retryOnError");
|
168739
|
+
|
168884
168740
|
// src/versions/api.ts
|
168885
168741
|
init_import_meta_url();
|
168886
168742
|
async function fetchVersion(accountId, workerName, versionId, versionCache) {
|
@@ -171730,13 +171586,15 @@ See https://developers.cloudflare.com/workers/platform/compatibility-dates for m
|
|
171730
171586
|
try {
|
171731
171587
|
let result;
|
171732
171588
|
if (canUseNewVersionsDeploymentsApi) {
|
171733
|
-
const versionResult = await
|
171734
|
-
|
171735
|
-
|
171736
|
-
|
171737
|
-
|
171738
|
-
|
171739
|
-
|
171589
|
+
const versionResult = await retryOnError(
|
171590
|
+
async () => fetchResult(
|
171591
|
+
`/accounts/${accountId}/workers/scripts/${scriptName}/versions`,
|
171592
|
+
{
|
171593
|
+
method: "POST",
|
171594
|
+
body: createWorkerUploadForm(worker),
|
171595
|
+
headers: await getMetricsUsageHeaders(config.send_metrics)
|
171596
|
+
}
|
171597
|
+
)
|
171740
171598
|
);
|
171741
171599
|
const versionMap = /* @__PURE__ */ new Map();
|
171742
171600
|
versionMap.set(versionResult.id, 100);
|
@@ -171764,19 +171622,21 @@ See https://developers.cloudflare.com/workers/platform/compatibility-dates for m
|
|
171764
171622
|
startup_time_ms: versionResult.startup_time_ms
|
171765
171623
|
};
|
171766
171624
|
} else {
|
171767
|
-
result = await
|
171768
|
-
|
171769
|
-
|
171770
|
-
|
171771
|
-
|
171772
|
-
|
171773
|
-
|
171774
|
-
|
171775
|
-
|
171776
|
-
|
171777
|
-
|
171778
|
-
|
171779
|
-
|
171625
|
+
result = await retryOnError(
|
171626
|
+
async () => fetchResult(
|
171627
|
+
workerUrl,
|
171628
|
+
{
|
171629
|
+
method: "PUT",
|
171630
|
+
body: createWorkerUploadForm(worker),
|
171631
|
+
headers: await getMetricsUsageHeaders(config.send_metrics)
|
171632
|
+
},
|
171633
|
+
new import_node_url8.URLSearchParams({
|
171634
|
+
include_subdomain_availability: "true",
|
171635
|
+
// pass excludeScript so the whole body of the
|
171636
|
+
// script doesn't get included in the response
|
171637
|
+
excludeScript: "true"
|
171638
|
+
})
|
171639
|
+
)
|
171780
171640
|
);
|
171781
171641
|
}
|
171782
171642
|
if (result.startup_time_ms) {
|
@@ -172113,14 +171973,14 @@ init_hash();
|
|
172113
171973
|
|
172114
171974
|
// src/pages/upload.tsx
|
172115
171975
|
init_import_meta_url();
|
172116
|
-
var
|
171976
|
+
var import_promises9 = require("node:fs/promises");
|
172117
171977
|
var import_node_path23 = require("node:path");
|
172118
171978
|
var import_ink = __toESM(require_build2());
|
172119
171979
|
var import_ink_spinner = __toESM(require_build3());
|
172120
171980
|
|
172121
171981
|
// src/pages/validate.tsx
|
172122
171982
|
init_import_meta_url();
|
172123
|
-
var
|
171983
|
+
var import_promises8 = require("node:fs/promises");
|
172124
171984
|
var import_node_path22 = require("node:path");
|
172125
171985
|
var import_mime2 = __toESM(require_mime());
|
172126
171986
|
var import_minimatch = __toESM(require_minimatch());
|
@@ -172154,12 +172014,12 @@ var validate = /* @__PURE__ */ __name(async (args) => {
|
|
172154
172014
|
].map((pattern) => new import_minimatch.Minimatch(pattern));
|
172155
172015
|
const directory = (0, import_node_path22.resolve)(args.directory);
|
172156
172016
|
const walk = /* @__PURE__ */ __name(async (dir, fileMap2 = /* @__PURE__ */ new Map(), startingDir = dir) => {
|
172157
|
-
const files = await (0,
|
172017
|
+
const files = await (0, import_promises8.readdir)(dir);
|
172158
172018
|
await Promise.all(
|
172159
172019
|
files.map(async (file) => {
|
172160
172020
|
const filepath = (0, import_node_path22.join)(dir, file);
|
172161
172021
|
const relativeFilepath = (0, import_node_path22.relative)(startingDir, filepath);
|
172162
|
-
const filestat = await (0,
|
172022
|
+
const filestat = await (0, import_promises8.stat)(filepath);
|
172163
172023
|
for (const minimatch of IGNORE_LIST) {
|
172164
172024
|
if (minimatch.match(relativeFilepath)) {
|
172165
172025
|
return;
|
@@ -172242,8 +172102,8 @@ var Handler2 = /* @__PURE__ */ __name(async ({
|
|
172242
172102
|
skipCaching: skipCaching ?? false
|
172243
172103
|
});
|
172244
172104
|
if (outputManifestPath) {
|
172245
|
-
await (0,
|
172246
|
-
await (0,
|
172105
|
+
await (0, import_promises9.mkdir)((0, import_node_path23.dirname)(outputManifestPath), { recursive: true });
|
172106
|
+
await (0, import_promises9.writeFile)(outputManifestPath, JSON.stringify(manifest));
|
172247
172107
|
}
|
172248
172108
|
logger.log(`\u2728 Upload complete!`);
|
172249
172109
|
}, "Handler");
|
@@ -172331,7 +172191,7 @@ var upload = /* @__PURE__ */ __name(async (args) => {
|
|
172331
172191
|
const payload = await Promise.all(
|
172332
172192
|
bucket.files.map(async (file) => ({
|
172333
172193
|
key: file.hash,
|
172334
|
-
value: (await (0,
|
172194
|
+
value: (await (0, import_promises9.readFile)(file.path)).toString("base64"),
|
172335
172195
|
metadata: {
|
172336
172196
|
contentType: file.contentType
|
172337
172197
|
},
|
@@ -172561,7 +172421,7 @@ var syncAssets = /* @__PURE__ */ __name(async (accountId, scriptName, assetDirec
|
|
172561
172421
|
payload.append(
|
172562
172422
|
manifestEntry[1].hash,
|
172563
172423
|
new import_undici7.File(
|
172564
|
-
[(await (0,
|
172424
|
+
[(await (0, import_promises10.readFile)(absFilePath)).toString("base64")],
|
172565
172425
|
manifestEntry[1].hash,
|
172566
172426
|
{
|
172567
172427
|
type: getContentType(absFilePath)
|
@@ -172644,7 +172504,7 @@ Assets already uploaded have been saved, so the next attempt will automatically
|
|
172644
172504
|
return completionJwt;
|
172645
172505
|
}, "syncAssets");
|
172646
172506
|
var buildAssetManifest = /* @__PURE__ */ __name(async (dir) => {
|
172647
|
-
const files = await (0,
|
172507
|
+
const files = await (0, import_promises10.readdir)(dir, { recursive: true });
|
172648
172508
|
const manifest = {};
|
172649
172509
|
let counter = 0;
|
172650
172510
|
const ignoreFn = await createAssetIgnoreFunction(dir);
|
@@ -172655,7 +172515,7 @@ var buildAssetManifest = /* @__PURE__ */ __name(async (dir) => {
|
|
172655
172515
|
return;
|
172656
172516
|
}
|
172657
172517
|
const filepath = path22.join(dir, relativeFilepath);
|
172658
|
-
const filestat = await (0,
|
172518
|
+
const filestat = await (0, import_promises10.stat)(filepath);
|
172659
172519
|
if (filestat.isSymbolicLink() || filestat.isDirectory()) {
|
172660
172520
|
return;
|
172661
172521
|
} else {
|
@@ -172770,7 +172630,7 @@ async function createAssetIgnoreFunction(dir) {
|
|
172770
172630
|
if (!(0, import_node_fs15.existsSync)(cfAssetIgnorePath)) {
|
172771
172631
|
return null;
|
172772
172632
|
}
|
172773
|
-
const ignorePatterns = (await (0,
|
172633
|
+
const ignorePatterns = (await (0, import_promises10.readFile)(cfAssetIgnorePath, { encoding: "utf8" })).split("\n");
|
172774
172634
|
ignorePatterns.push(CF_ASSETS_IGNORE_FILENAME);
|
172775
172635
|
return createPatternMatcher(ignorePatterns, true);
|
172776
172636
|
}
|
@@ -173042,7 +172902,7 @@ __name(generateAddScriptNameExamples, "generateAddScriptNameExamples");
|
|
173042
172902
|
init_import_meta_url();
|
173043
172903
|
var import_node_events = __toESM(require("node:events"));
|
173044
172904
|
var import_node_fs17 = require("node:fs");
|
173045
|
-
var
|
172905
|
+
var import_promises11 = require("node:fs/promises");
|
173046
172906
|
var import_node_http2 = require("node:http");
|
173047
172907
|
var import_node_net = __toESM(require("node:net"));
|
173048
172908
|
var import_node_path29 = __toESM(require("node:path"));
|
@@ -173063,17 +172923,17 @@ var globalWatcher;
|
|
173063
172923
|
var globalWorkers;
|
173064
172924
|
var heartbeats = /* @__PURE__ */ new Map();
|
173065
172925
|
async function loadWorkerDefinitions() {
|
173066
|
-
await (0,
|
172926
|
+
await (0, import_promises11.mkdir)(DEV_REGISTRY_PATH, { recursive: true });
|
173067
172927
|
globalWorkers ??= {};
|
173068
172928
|
const newWorkers = /* @__PURE__ */ new Set();
|
173069
|
-
const workerDefinitions = await (0,
|
172929
|
+
const workerDefinitions = await (0, import_promises11.readdir)(DEV_REGISTRY_PATH);
|
173070
172930
|
for (const workerName of workerDefinitions) {
|
173071
172931
|
try {
|
173072
|
-
const file = await (0,
|
172932
|
+
const file = await (0, import_promises11.readFile)(
|
173073
172933
|
import_node_path29.default.join(DEV_REGISTRY_PATH, workerName),
|
173074
172934
|
"utf8"
|
173075
172935
|
);
|
173076
|
-
const stats = await (0,
|
172936
|
+
const stats = await (0, import_promises11.stat)(import_node_path29.default.join(DEV_REGISTRY_PATH, workerName));
|
173077
172937
|
if (stats.mtime.getTime() < Date.now() - 6e5) {
|
173078
172938
|
await unregisterWorker(workerName);
|
173079
172939
|
} else {
|
@@ -173179,8 +173039,8 @@ async function registerWorker(name2, definition) {
|
|
173179
173039
|
if (existingHeartbeat) {
|
173180
173040
|
clearInterval(existingHeartbeat);
|
173181
173041
|
}
|
173182
|
-
await (0,
|
173183
|
-
await (0,
|
173042
|
+
await (0, import_promises11.mkdir)(DEV_REGISTRY_PATH, { recursive: true });
|
173043
|
+
await (0, import_promises11.writeFile)(
|
173184
173044
|
import_node_path29.default.join(DEV_REGISTRY_PATH, name2),
|
173185
173045
|
// We don't currently do anything with the stored Wrangler version,
|
173186
173046
|
// but if we need to make breaking changes to this format in the future
|
@@ -173218,7 +173078,7 @@ __name(registerWorker, "registerWorker");
|
|
173218
173078
|
async function unregisterWorker(name2) {
|
173219
173079
|
if (getFlag("FILE_BASED_REGISTRY")) {
|
173220
173080
|
try {
|
173221
|
-
await (0,
|
173081
|
+
await (0, import_promises11.unlink)(import_node_path29.default.join(DEV_REGISTRY_PATH, name2));
|
173222
173082
|
const existingHeartbeat = heartbeats.get(name2);
|
173223
173083
|
if (existingHeartbeat) {
|
173224
173084
|
clearInterval(existingHeartbeat);
|
@@ -175939,9 +175799,7 @@ function DevSession(props) {
|
|
175939
175799
|
)
|
175940
175800
|
});
|
175941
175801
|
if (!props.local && (props.bindings.queues?.length || props.queueConsumers?.length)) {
|
175942
|
-
logger.warn(
|
175943
|
-
"Queues are currently in Beta and are not supported in wrangler dev remote mode."
|
175944
|
-
);
|
175802
|
+
logger.warn("Queues are not yet supported in wrangler dev remote mode.");
|
175945
175803
|
}
|
175946
175804
|
const classNamesWhichUseSQLite = getClassNamesWhichUseSQLite(
|
175947
175805
|
props.migrations
|
@@ -176431,7 +176289,7 @@ __name(registerDevHotKeys, "registerDevHotKeys");
|
|
176431
176289
|
// src/dev/start-server.ts
|
176432
176290
|
init_import_meta_url();
|
176433
176291
|
var import_node_events2 = require("node:events");
|
176434
|
-
var
|
176292
|
+
var import_promises12 = require("node:fs/promises");
|
176435
176293
|
var path39 = __toESM(require("node:path"));
|
176436
176294
|
var util3 = __toESM(require("node:util"));
|
176437
176295
|
var import_signal_exit7 = __toESM(require_signal_exit());
|
@@ -176822,7 +176680,7 @@ async function runEsbuild({
|
|
176822
176680
|
dependencies: bundleResult?.dependencies ?? {},
|
176823
176681
|
sourceMapPath: bundleResult?.sourceMapPath,
|
176824
176682
|
sourceMapMetadata: bundleResult?.sourceMapMetadata,
|
176825
|
-
entrypointSource: await (0,
|
176683
|
+
entrypointSource: await (0, import_promises12.readFile)(entrypointPath, "utf8")
|
176826
176684
|
};
|
176827
176685
|
}
|
176828
176686
|
__name(runEsbuild, "runEsbuild");
|
@@ -176900,6 +176758,66 @@ async function startLocalServer(props) {
|
|
176900
176758
|
}
|
176901
176759
|
__name(startLocalServer, "startLocalServer");
|
176902
176760
|
|
176761
|
+
// src/pages/utils.ts
|
176762
|
+
init_import_meta_url();
|
176763
|
+
var import_node_path38 = __toESM(require("node:path"));
|
176764
|
+
var RUNNING_BUILDERS = [];
|
176765
|
+
var CLEANUP_CALLBACKS = [];
|
176766
|
+
var CLEANUP = /* @__PURE__ */ __name(() => {
|
176767
|
+
CLEANUP_CALLBACKS.forEach((callback) => callback());
|
176768
|
+
RUNNING_BUILDERS.forEach((builder) => builder.stop?.());
|
176769
|
+
}, "CLEANUP");
|
176770
|
+
function isUrl(maybeUrl) {
|
176771
|
+
if (!maybeUrl) {
|
176772
|
+
return false;
|
176773
|
+
}
|
176774
|
+
try {
|
176775
|
+
new URL(maybeUrl);
|
176776
|
+
return true;
|
176777
|
+
} catch (e3) {
|
176778
|
+
return false;
|
176779
|
+
}
|
176780
|
+
}
|
176781
|
+
__name(isUrl, "isUrl");
|
176782
|
+
var projectRootCacheCwd;
|
176783
|
+
var projectRootCache;
|
176784
|
+
var tmpDirCacheProjectRoot;
|
176785
|
+
var tmpDirCache;
|
176786
|
+
function getPagesProjectRoot() {
|
176787
|
+
const cwd2 = process.cwd();
|
176788
|
+
if (projectRootCache !== void 0 && projectRootCacheCwd === cwd2) {
|
176789
|
+
return projectRootCache;
|
176790
|
+
}
|
176791
|
+
const packagePath = findUpSync("package.json");
|
176792
|
+
projectRootCache = packagePath ? import_node_path38.default.dirname(packagePath) : process.cwd();
|
176793
|
+
projectRootCacheCwd = cwd2;
|
176794
|
+
return projectRootCache;
|
176795
|
+
}
|
176796
|
+
__name(getPagesProjectRoot, "getPagesProjectRoot");
|
176797
|
+
function getPagesTmpDir() {
|
176798
|
+
const projectRoot = getPagesProjectRoot();
|
176799
|
+
if (tmpDirCache !== void 0 && tmpDirCacheProjectRoot === projectRoot) {
|
176800
|
+
return tmpDirCache;
|
176801
|
+
}
|
176802
|
+
const tmpDir = getWranglerTmpDir(getPagesProjectRoot(), "pages");
|
176803
|
+
tmpDirCache = tmpDir.path;
|
176804
|
+
tmpDirCacheProjectRoot = projectRoot;
|
176805
|
+
return tmpDirCache;
|
176806
|
+
}
|
176807
|
+
__name(getPagesTmpDir, "getPagesTmpDir");
|
176808
|
+
function debounce(fn2, delayMs = 100) {
|
176809
|
+
let crrTimeoutId;
|
176810
|
+
return () => {
|
176811
|
+
if (crrTimeoutId) {
|
176812
|
+
clearTimeout(crrTimeoutId);
|
176813
|
+
}
|
176814
|
+
crrTimeoutId = setTimeout(() => {
|
176815
|
+
fn2();
|
176816
|
+
}, delayMs);
|
176817
|
+
};
|
176818
|
+
}
|
176819
|
+
__name(debounce, "debounce");
|
176820
|
+
|
176903
176821
|
// src/utils/collectKeyValues.ts
|
176904
176822
|
init_import_meta_url();
|
176905
176823
|
function collectKeyValues(array) {
|
@@ -180641,7 +180559,7 @@ init_import_meta_url();
|
|
180641
180559
|
|
180642
180560
|
// src/cloudchamber/common.ts
|
180643
180561
|
init_import_meta_url();
|
180644
|
-
var
|
180562
|
+
var import_promises13 = require("fs/promises");
|
180645
180563
|
var import_process2 = require("process");
|
180646
180564
|
|
180647
180565
|
// src/cloudchamber/client/index.ts
|
@@ -181615,7 +181533,7 @@ __name(promiseSpinner, "promiseSpinner");
|
|
181615
181533
|
async function fillOpenAPIConfiguration(config, json) {
|
181616
181534
|
const headers = OpenAPI.HEADERS !== void 0 ? { ...OpenAPI.HEADERS } : {};
|
181617
181535
|
if (Object.keys(getConfigCache("wrangler-account.json")).length === 0) {
|
181618
|
-
await wrap2((0,
|
181536
|
+
await wrap2((0, import_promises13.mkdir)("node_modules", {}));
|
181619
181537
|
purgeConfigCaches();
|
181620
181538
|
}
|
181621
181539
|
const scopes = getScopes();
|
@@ -182362,7 +182280,7 @@ __name(getNetworkInput, "getNetworkInput");
|
|
182362
182280
|
|
182363
182281
|
// src/cloudchamber/ssh/ssh.ts
|
182364
182282
|
init_import_meta_url();
|
182365
|
-
var
|
182283
|
+
var import_promises14 = require("fs/promises");
|
182366
182284
|
var import_os3 = require("os");
|
182367
182285
|
var import_process4 = require("process");
|
182368
182286
|
|
@@ -182426,7 +182344,7 @@ function createSSHPublicKeyOptionalYargs(yargs) {
|
|
182426
182344
|
__name(createSSHPublicKeyOptionalYargs, "createSSHPublicKeyOptionalYargs");
|
182427
182345
|
async function retrieveSSHKey(sshKeyPath, { json } = { json: false }) {
|
182428
182346
|
try {
|
182429
|
-
const file = (await (0,
|
182347
|
+
const file = (await (0, import_promises14.readFile)(sshKeyPath)).toString();
|
182430
182348
|
validatePublicSSHKeyCLI(file, { json });
|
182431
182349
|
return file;
|
182432
182350
|
} catch (err) {
|
@@ -182507,10 +182425,10 @@ async function tryToRetrieveAllDefaultSSHKeyPaths() {
|
|
182507
182425
|
const path74 = `${HOME}/.ssh`;
|
182508
182426
|
const paths = [];
|
182509
182427
|
try {
|
182510
|
-
const dirList = await (0,
|
182428
|
+
const dirList = await (0, import_promises14.readdir)(path74);
|
182511
182429
|
for (const file of dirList) {
|
182512
182430
|
if (file.endsWith(".pub")) {
|
182513
|
-
const s = await (0,
|
182431
|
+
const s = await (0, import_promises14.stat)(`${path74}/${file}`);
|
182514
182432
|
if (s.isFile()) {
|
182515
182433
|
paths.push(`${path74}/${file}`);
|
182516
182434
|
}
|
@@ -182552,7 +182470,7 @@ async function shouldPromptForNewSSHKeyAppear(keys = void 0) {
|
|
182552
182470
|
}
|
182553
182471
|
let foundValidSSHKeyThatDontExist = false;
|
182554
182472
|
for (const defaultSSHKeyPath of defaultSSHKeyPaths) {
|
182555
|
-
const file = (await (0,
|
182473
|
+
const file = (await (0, import_promises14.readFile)(defaultSSHKeyPath)).toString().trim();
|
182556
182474
|
try {
|
182557
182475
|
validateSSHKey(file);
|
182558
182476
|
} catch {
|
@@ -183855,8 +183773,8 @@ init_import_meta_url();
|
|
183855
183773
|
|
183856
183774
|
// src/d1/backups.tsx
|
183857
183775
|
init_import_meta_url();
|
183858
|
-
var
|
183859
|
-
var
|
183776
|
+
var import_promises15 = __toESM(require("node:fs/promises"));
|
183777
|
+
var path42 = __toESM(require("path"));
|
183860
183778
|
var import_ink_table2 = __toESM(require_dist4());
|
183861
183779
|
|
183862
183780
|
// src/utils/render.ts
|
@@ -186955,7 +186873,7 @@ var DownloadHandler = withConfig(
|
|
186955
186873
|
accountId,
|
186956
186874
|
name2
|
186957
186875
|
);
|
186958
|
-
const filename = output ||
|
186876
|
+
const filename = output || path42.resolve(`${name2}.${backupId.slice(0, 8)}.sqlite3`);
|
186959
186877
|
logger.log(`\u{1F300} Downloading backup ${backupId} from '${name2}'`);
|
186960
186878
|
const response = await getBackupResponse(accountId, db.uuid, backupId);
|
186961
186879
|
if (!response.ok) {
|
@@ -186965,7 +186883,7 @@ var DownloadHandler = withConfig(
|
|
186965
186883
|
}
|
186966
186884
|
logger.log(`\u{1F300} Saving to ${filename}`);
|
186967
186885
|
const buffer = await response.arrayBuffer();
|
186968
|
-
await
|
186886
|
+
await import_promises15.default.writeFile(filename, new Buffer(buffer));
|
186969
186887
|
logger.log(`\u{1F300} Done!`);
|
186970
186888
|
}
|
186971
186889
|
);
|
@@ -187090,7 +187008,7 @@ var Handler5 = withConfig(
|
|
187090
187008
|
init_import_meta_url();
|
187091
187009
|
var import_fs9 = require("fs");
|
187092
187010
|
var import_node_assert19 = __toESM(require("node:assert"));
|
187093
|
-
var
|
187011
|
+
var import_node_path39 = __toESM(require("node:path"));
|
187094
187012
|
var import_ink7 = __toESM(require_build2());
|
187095
187013
|
var import_ink_table3 = __toESM(require_dist4());
|
187096
187014
|
var import_md5_file = __toESM(require_md5_file());
|
@@ -187418,7 +187336,7 @@ async function executeLocally({
|
|
187418
187336
|
}
|
187419
187337
|
const id = localDB.previewDatabaseUuid ?? localDB.uuid;
|
187420
187338
|
const persistencePath = getLocalPersistencePath(persistTo, config.configPath);
|
187421
|
-
const d1Persist =
|
187339
|
+
const d1Persist = import_node_path39.default.join(persistencePath, "v3", "d1");
|
187422
187340
|
logger.log(
|
187423
187341
|
`\u{1F300} Executing on local database ${name2} (${id}) from ${readableRelative(
|
187424
187342
|
d1Persist
|
@@ -187682,8 +187600,8 @@ __name(checkForSQLiteBinary, "checkForSQLiteBinary");
|
|
187682
187600
|
|
187683
187601
|
// src/d1/export.ts
|
187684
187602
|
init_import_meta_url();
|
187685
|
-
var
|
187686
|
-
var
|
187603
|
+
var import_promises16 = __toESM(require("node:fs/promises"));
|
187604
|
+
var import_node_path40 = __toESM(require("node:path"));
|
187687
187605
|
var import_miniflare15 = require("miniflare");
|
187688
187606
|
var import_undici15 = __toESM(require_undici());
|
187689
187607
|
function Options7(yargs) {
|
@@ -187747,7 +187665,7 @@ async function exportLocal(config, name2, output, tables, noSchema, noData) {
|
|
187747
187665
|
}
|
187748
187666
|
const id = localDB.previewDatabaseUuid ?? localDB.uuid;
|
187749
187667
|
const persistencePath = getLocalPersistencePath(void 0, config.configPath);
|
187750
|
-
const d1Persist =
|
187668
|
+
const d1Persist = import_node_path40.default.join(persistencePath, "v3", "d1");
|
187751
187669
|
logger.log(
|
187752
187670
|
`\u{1F300} Exporting local database ${name2} (${id}) from ${readableRelative(
|
187753
187671
|
d1Persist
|
@@ -187766,7 +187684,7 @@ async function exportLocal(config, name2, output, tables, noSchema, noData) {
|
|
187766
187684
|
logger.log(`\u{1F300} Exporting SQL to ${output}...`);
|
187767
187685
|
try {
|
187768
187686
|
const dump = await db.prepare(`PRAGMA miniflare_d1_export(?,?,?);`).bind(noSchema, noData, ...tables).raw();
|
187769
|
-
await
|
187687
|
+
await import_promises16.default.writeFile(output, dump[0].join("\n"));
|
187770
187688
|
} catch (e3) {
|
187771
187689
|
throw new UserError(e3.message);
|
187772
187690
|
} finally {
|
@@ -187806,7 +187724,7 @@ async function exportRemotely(config, name2, output, tables, noSchema, noData) {
|
|
187806
187724
|
startMessage: `Downloading SQL to ${output}`,
|
187807
187725
|
async promise() {
|
187808
187726
|
const contents = await (0, import_undici15.fetch)(finalResponse.result.signed_url);
|
187809
|
-
await
|
187727
|
+
await import_promises16.default.writeFile(output, contents.body || "");
|
187810
187728
|
}
|
187811
187729
|
});
|
187812
187730
|
logger.log(`\u{1F300} Downloaded to ${output} successfully!`);
|
@@ -189078,7 +188996,7 @@ __name(checkAndConfirmForceDeleteIfNecessary, "checkAndConfirmForceDeleteIfNeces
|
|
189078
188996
|
// src/deploy/index.ts
|
189079
188997
|
init_import_meta_url();
|
189080
188998
|
var import_node_assert21 = __toESM(require("node:assert"));
|
189081
|
-
var
|
188999
|
+
var import_node_path41 = __toESM(require("node:path"));
|
189082
189000
|
|
189083
189001
|
// src/match-tag.ts
|
189084
189002
|
init_import_meta_url();
|
@@ -189304,8 +189222,8 @@ async function deployHandler(args) {
|
|
189304
189222
|
To learn more about Workers with assets, visit our documentation at https://developers.cloudflare.com/workers/frameworks/.`
|
189305
189223
|
);
|
189306
189224
|
}
|
189307
|
-
const configPath = args.config || args.script && findWranglerToml(
|
189308
|
-
const projectRoot = configPath &&
|
189225
|
+
const configPath = args.config || args.script && findWranglerToml(import_node_path41.default.dirname(args.script));
|
189226
|
+
const projectRoot = configPath && import_node_path41.default.dirname(configPath);
|
189309
189227
|
const config = readConfig(configPath, args);
|
189310
189228
|
const entry = await getEntry(args, config, "deploy");
|
189311
189229
|
if (args.public) {
|
@@ -189354,7 +189272,7 @@ To learn more about Workers with assets, visit our documentation at https://deve
|
|
189354
189272
|
await verifyWorkerMatchesCITag(
|
189355
189273
|
accountId,
|
189356
189274
|
name2,
|
189357
|
-
|
189275
|
+
import_node_path41.default.relative(entry.directory, config.configPath ?? "wrangler.toml")
|
189358
189276
|
);
|
189359
189277
|
}
|
189360
189278
|
const { sourceMapSize, versionId, workerTag, targets } = await deploy({
|
@@ -189422,8 +189340,8 @@ var import_undici16 = __toESM(require_undici());
|
|
189422
189340
|
// src/init.ts
|
189423
189341
|
init_import_meta_url();
|
189424
189342
|
var fs20 = __toESM(require("node:fs"));
|
189425
|
-
var
|
189426
|
-
var
|
189343
|
+
var import_promises17 = require("node:fs/promises");
|
189344
|
+
var import_node_path44 = __toESM(require("node:path"));
|
189427
189345
|
var import_toml4 = __toESM(require_toml());
|
189428
189346
|
init_execa();
|
189429
189347
|
|
@@ -189431,7 +189349,7 @@ init_execa();
|
|
189431
189349
|
init_import_meta_url();
|
189432
189350
|
var import_node_fs24 = __toESM(require("node:fs"));
|
189433
189351
|
var import_node_os8 = __toESM(require("node:os"));
|
189434
|
-
var
|
189352
|
+
var import_node_path42 = __toESM(require("node:path"));
|
189435
189353
|
init_execa();
|
189436
189354
|
|
189437
189355
|
// ../../node_modules/.pnpm/semiver@1.1.0/node_modules/semiver/dist/semiver.mjs
|
@@ -189502,7 +189420,7 @@ async function cloneIntoDirectory(remote, targetDirectory, subdirectory) {
|
|
189502
189420
|
args.push(remote.substring(0, tagIndex));
|
189503
189421
|
}
|
189504
189422
|
const tempDir = import_node_fs24.default.mkdtempSync(
|
189505
|
-
|
189423
|
+
import_node_path42.default.join(import_node_os8.default.tmpdir(), `wrangler-generate-repo-`)
|
189506
189424
|
);
|
189507
189425
|
args.push(tempDir);
|
189508
189426
|
await execa("git", args);
|
@@ -189511,7 +189429,7 @@ async function cloneIntoDirectory(remote, targetDirectory, subdirectory) {
|
|
189511
189429
|
cwd: tempDir
|
189512
189430
|
});
|
189513
189431
|
}
|
189514
|
-
const templatePath = subdirectory !== void 0 ?
|
189432
|
+
const templatePath = subdirectory !== void 0 ? import_node_path42.default.join(tempDir, subdirectory) : tempDir;
|
189515
189433
|
try {
|
189516
189434
|
import_node_fs24.default.renameSync(templatePath, targetDirectory);
|
189517
189435
|
} catch (err) {
|
@@ -189530,7 +189448,7 @@ async function cloneIntoDirectory(remote, targetDirectory, subdirectory) {
|
|
189530
189448
|
throw new UserError(`Failed to find "${subdirectory}" in ${remote}`);
|
189531
189449
|
}
|
189532
189450
|
}
|
189533
|
-
import_node_fs24.default.rmSync(
|
189451
|
+
import_node_fs24.default.rmSync(import_node_path42.default.join(targetDirectory, ".git"), {
|
189534
189452
|
recursive: true,
|
189535
189453
|
force: true
|
189536
189454
|
});
|
@@ -189540,7 +189458,7 @@ __name(cloneIntoDirectory, "cloneIntoDirectory");
|
|
189540
189458
|
// src/package-manager.ts
|
189541
189459
|
init_import_meta_url();
|
189542
189460
|
var import_node_fs25 = require("node:fs");
|
189543
|
-
var
|
189461
|
+
var import_node_path43 = require("node:path");
|
189544
189462
|
var import_node_process11 = require("node:process");
|
189545
189463
|
init_execa();
|
189546
189464
|
async function getPackageManager2(cwd2) {
|
@@ -189549,9 +189467,9 @@ async function getPackageManager2(cwd2) {
|
|
189549
189467
|
supportsNpm(),
|
189550
189468
|
supportsPnpm()
|
189551
189469
|
]);
|
189552
|
-
const hasYarnLock = (0, import_node_fs25.existsSync)((0,
|
189553
|
-
const hasNpmLock = (0, import_node_fs25.existsSync)((0,
|
189554
|
-
const hasPnpmLock = (0, import_node_fs25.existsSync)((0,
|
189470
|
+
const hasYarnLock = (0, import_node_fs25.existsSync)((0, import_node_path43.join)(cwd2, "yarn.lock"));
|
189471
|
+
const hasNpmLock = (0, import_node_fs25.existsSync)((0, import_node_path43.join)(cwd2, "package-lock.json"));
|
189472
|
+
const hasPnpmLock = (0, import_node_fs25.existsSync)((0, import_node_path43.join)(cwd2, "pnpm-lock.yaml"));
|
189555
189473
|
const userAgent = sniffUserAgent();
|
189556
189474
|
if (hasNpmLock) {
|
189557
189475
|
if (hasNpm) {
|
@@ -189793,15 +189711,15 @@ async function initHandler(args) {
|
|
189793
189711
|
const instructions = [];
|
189794
189712
|
let shouldRunPackageManagerInstall = false;
|
189795
189713
|
const fromDashWorkerName = args.fromDash;
|
189796
|
-
const creationDirectory =
|
189714
|
+
const creationDirectory = import_node_path44.default.resolve(
|
189797
189715
|
process.cwd(),
|
189798
189716
|
(args.name ? args.name : fromDashWorkerName) ?? ""
|
189799
189717
|
);
|
189800
189718
|
assertNoTypeArg(args);
|
189801
189719
|
assertNoSiteArg(args, creationDirectory);
|
189802
|
-
const workerName =
|
189720
|
+
const workerName = import_node_path44.default.basename(creationDirectory).toLowerCase().replaceAll(/[^a-z0-9\-_]/gm, "-");
|
189803
189721
|
const packageManager = await getPackageManager2(creationDirectory);
|
189804
|
-
const wranglerTomlDestination =
|
189722
|
+
const wranglerTomlDestination = import_node_path44.default.join(
|
189805
189723
|
creationDirectory,
|
189806
189724
|
"./wrangler.toml"
|
189807
189725
|
);
|
@@ -189851,7 +189769,7 @@ The \`init\` command will be removed in a future version.`
|
|
189851
189769
|
if (fs20.existsSync(wranglerTomlDestination)) {
|
189852
189770
|
let shouldContinue = false;
|
189853
189771
|
logger.warn(
|
189854
|
-
`${
|
189772
|
+
`${import_node_path44.default.relative(process.cwd(), wranglerTomlDestination)} already exists!`
|
189855
189773
|
);
|
189856
189774
|
if (!fromDashWorkerName) {
|
189857
189775
|
shouldContinue = await confirm(
|
@@ -189892,10 +189810,10 @@ The \`init\` command will be removed in a future version.`
|
|
189892
189810
|
return;
|
189893
189811
|
}
|
189894
189812
|
}
|
189895
|
-
await (0,
|
189813
|
+
await (0, import_promises17.mkdir)(creationDirectory, { recursive: true });
|
189896
189814
|
const compatibilityDate = (/* @__PURE__ */ new Date()).toISOString().substring(0, 10);
|
189897
189815
|
try {
|
189898
|
-
await (0,
|
189816
|
+
await (0, import_promises17.writeFile)(
|
189899
189817
|
wranglerTomlDestination,
|
189900
189818
|
import_toml4.default.stringify({
|
189901
189819
|
name: workerName,
|
@@ -189903,12 +189821,12 @@ The \`init\` command will be removed in a future version.`
|
|
189903
189821
|
}) + "\n"
|
189904
189822
|
);
|
189905
189823
|
logger.log(
|
189906
|
-
`\u2728 Created ${
|
189824
|
+
`\u2728 Created ${import_node_path44.default.relative(process.cwd(), wranglerTomlDestination)}`
|
189907
189825
|
);
|
189908
189826
|
justCreatedWranglerToml = true;
|
189909
189827
|
} catch (err) {
|
189910
189828
|
throw new Error(
|
189911
|
-
`Failed to create ${
|
189829
|
+
`Failed to create ${import_node_path44.default.relative(
|
189912
189830
|
process.cwd(),
|
189913
189831
|
wranglerTomlDestination
|
189914
189832
|
)}.
|
@@ -189920,12 +189838,12 @@ ${err.message ?? err}`
|
|
189920
189838
|
const shouldInitGit = yesFlag || await confirm("Would you like to use git to manage this Worker?");
|
189921
189839
|
if (shouldInitGit) {
|
189922
189840
|
await initializeGit(creationDirectory);
|
189923
|
-
await (0,
|
189924
|
-
|
189925
|
-
readFileSync5(
|
189841
|
+
await (0, import_promises17.writeFile)(
|
189842
|
+
import_node_path44.default.join(creationDirectory, ".gitignore"),
|
189843
|
+
readFileSync5(import_node_path44.default.join(getBasePath(), "templates/gitignore"))
|
189926
189844
|
);
|
189927
189845
|
logger.log(
|
189928
|
-
args.name && args.name !== "." ? `\u2728 Initialized git repository at ${
|
189846
|
+
args.name && args.name !== "." ? `\u2728 Initialized git repository at ${import_node_path44.default.relative(
|
189929
189847
|
process.cwd(),
|
189930
189848
|
creationDirectory
|
189931
189849
|
)}` : `\u2728 Initialized git repository`
|
@@ -189944,8 +189862,8 @@ ${err.message ?? err}`
|
|
189944
189862
|
if (!pathToPackageJson) {
|
189945
189863
|
shouldCreatePackageJson = yesFlag || await confirm("No package.json found. Would you like to create one?");
|
189946
189864
|
if (shouldCreatePackageJson) {
|
189947
|
-
await (0,
|
189948
|
-
|
189865
|
+
await (0, import_promises17.writeFile)(
|
189866
|
+
import_node_path44.default.join(creationDirectory, "./package.json"),
|
189949
189867
|
JSON.stringify(
|
189950
189868
|
{
|
189951
189869
|
name: workerName,
|
@@ -189960,9 +189878,9 @@ ${err.message ?? err}`
|
|
189960
189878
|
) + "\n"
|
189961
189879
|
);
|
189962
189880
|
shouldRunPackageManagerInstall = true;
|
189963
|
-
pathToPackageJson =
|
189881
|
+
pathToPackageJson = import_node_path44.default.join(creationDirectory, "package.json");
|
189964
189882
|
logger.log(
|
189965
|
-
`\u2728 Created ${
|
189883
|
+
`\u2728 Created ${import_node_path44.default.relative(process.cwd(), pathToPackageJson)}`
|
189966
189884
|
);
|
189967
189885
|
} else {
|
189968
189886
|
return;
|
@@ -189974,7 +189892,7 @@ ${err.message ?? err}`
|
|
189974
189892
|
);
|
189975
189893
|
if (!(packageJson.devDependencies?.wrangler || packageJson.dependencies?.wrangler)) {
|
189976
189894
|
const shouldInstall = yesFlag || await confirm(
|
189977
|
-
`Would you like to install wrangler into ${
|
189895
|
+
`Would you like to install wrangler into ${import_node_path44.default.relative(
|
189978
189896
|
process.cwd(),
|
189979
189897
|
pathToPackageJson
|
189980
189898
|
)}?`
|
@@ -189994,15 +189912,15 @@ ${err.message ?? err}`
|
|
189994
189912
|
if (!pathToTSConfig) {
|
189995
189913
|
if (yesFlag || await confirm("Would you like to use TypeScript?")) {
|
189996
189914
|
isTypescriptProject = true;
|
189997
|
-
await (0,
|
189998
|
-
|
189999
|
-
readFileSync5(
|
189915
|
+
await (0, import_promises17.writeFile)(
|
189916
|
+
import_node_path44.default.join(creationDirectory, "./tsconfig.json"),
|
189917
|
+
readFileSync5(import_node_path44.default.join(getBasePath(), "templates/tsconfig.init.json"))
|
190000
189918
|
);
|
190001
189919
|
devDepsToInstall.push("@cloudflare/workers-types");
|
190002
189920
|
devDepsToInstall.push("typescript");
|
190003
|
-
pathToTSConfig =
|
189921
|
+
pathToTSConfig = import_node_path44.default.join(creationDirectory, "tsconfig.json");
|
190004
189922
|
logger.log(
|
190005
|
-
`\u2728 Created ${
|
189923
|
+
`\u2728 Created ${import_node_path44.default.relative(process.cwd(), pathToTSConfig)}`
|
190006
189924
|
);
|
190007
189925
|
}
|
190008
189926
|
} else {
|
@@ -190018,7 +189936,7 @@ ${err.message ?? err}`
|
|
190018
189936
|
if (shouldInstall) {
|
190019
189937
|
devDepsToInstall.push("@cloudflare/workers-types");
|
190020
189938
|
instructions.push(
|
190021
|
-
`\u{1F6A8} Please add "@cloudflare/workers-types" to compilerOptions.types in ${
|
189939
|
+
`\u{1F6A8} Please add "@cloudflare/workers-types" to compilerOptions.types in ${import_node_path44.default.relative(
|
190022
189940
|
process.cwd(),
|
190023
189941
|
pathToTSConfig
|
190024
189942
|
)}`
|
@@ -190056,10 +189974,10 @@ ${err.message ?? err}`
|
|
190056
189974
|
};
|
190057
189975
|
fs20.writeFileSync(wranglerTomlDestination, import_toml4.default.stringify(newToml));
|
190058
189976
|
}
|
190059
|
-
const isNamedWorker = isCreatingWranglerToml &&
|
189977
|
+
const isNamedWorker = isCreatingWranglerToml && import_node_path44.default.dirname(packagePath) !== process.cwd();
|
190060
189978
|
const isAddingTestScripts = isAddingTests && !packageJsonContent.scripts?.test;
|
190061
189979
|
if (isWritingScripts) {
|
190062
|
-
await (0,
|
189980
|
+
await (0, import_promises17.writeFile)(
|
190063
189981
|
packagePath,
|
190064
189982
|
JSON.stringify(
|
190065
189983
|
{
|
@@ -190097,25 +190015,25 @@ To start developing your Worker, run \`npx wrangler dev\`${isCreatingWranglerTom
|
|
190097
190015
|
}
|
190098
190016
|
__name(writePackageJsonScriptsAndUpdateWranglerToml, "writePackageJsonScriptsAndUpdateWranglerToml");
|
190099
190017
|
if (isTypescriptProject) {
|
190100
|
-
if (!fs20.existsSync(
|
190101
|
-
const newWorkerFilename =
|
190018
|
+
if (!fs20.existsSync(import_node_path44.default.join(creationDirectory, "./src/index.ts"))) {
|
190019
|
+
const newWorkerFilename = import_node_path44.default.relative(
|
190102
190020
|
process.cwd(),
|
190103
|
-
|
190021
|
+
import_node_path44.default.join(creationDirectory, "./src/index.ts")
|
190104
190022
|
);
|
190105
190023
|
const newWorkerType = yesFlag ? "fetch" : await getNewWorkerType(newWorkerFilename);
|
190106
190024
|
if (newWorkerType !== "none") {
|
190107
190025
|
const template = getNewWorkerTemplate("ts", newWorkerType);
|
190108
|
-
await (0,
|
190026
|
+
await (0, import_promises17.mkdir)(import_node_path44.default.join(creationDirectory, "./src"), {
|
190109
190027
|
recursive: true
|
190110
190028
|
});
|
190111
|
-
await (0,
|
190112
|
-
|
190113
|
-
readFileSync5(
|
190029
|
+
await (0, import_promises17.writeFile)(
|
190030
|
+
import_node_path44.default.join(creationDirectory, "./src/index.ts"),
|
190031
|
+
readFileSync5(import_node_path44.default.join(getBasePath(), `templates/${template}`))
|
190114
190032
|
);
|
190115
190033
|
logger.log(
|
190116
|
-
`\u2728 Created ${
|
190034
|
+
`\u2728 Created ${import_node_path44.default.relative(
|
190117
190035
|
process.cwd(),
|
190118
|
-
|
190036
|
+
import_node_path44.default.join(creationDirectory, "./src/index.ts")
|
190119
190037
|
)}`
|
190120
190038
|
);
|
190121
190039
|
shouldCreateTests = yesFlag || await confirm(
|
@@ -190127,19 +190045,19 @@ To start developing your Worker, run \`npx wrangler dev\`${isCreatingWranglerTom
|
|
190127
190045
|
}
|
190128
190046
|
newWorkerTestType = "vitest";
|
190129
190047
|
devDepsToInstall.push(newWorkerTestType);
|
190130
|
-
await (0,
|
190131
|
-
|
190048
|
+
await (0, import_promises17.writeFile)(
|
190049
|
+
import_node_path44.default.join(creationDirectory, "./src/index.test.ts"),
|
190132
190050
|
readFileSync5(
|
190133
|
-
|
190051
|
+
import_node_path44.default.join(
|
190134
190052
|
getBasePath(),
|
190135
190053
|
`templates/init-tests/test-${newWorkerTestType}-new-worker.ts`
|
190136
190054
|
)
|
190137
190055
|
)
|
190138
190056
|
);
|
190139
190057
|
logger.log(
|
190140
|
-
`\u2728 Created ${
|
190058
|
+
`\u2728 Created ${import_node_path44.default.relative(
|
190141
190059
|
process.cwd(),
|
190142
|
-
|
190060
|
+
import_node_path44.default.join(creationDirectory, "./src/index.test.ts")
|
190143
190061
|
)}`
|
190144
190062
|
);
|
190145
190063
|
}
|
@@ -190155,10 +190073,10 @@ To start developing your Worker, run \`npx wrangler dev\`${isCreatingWranglerTom
|
|
190155
190073
|
}
|
190156
190074
|
}
|
190157
190075
|
} else {
|
190158
|
-
if (!fs20.existsSync(
|
190159
|
-
const newWorkerFilename =
|
190076
|
+
if (!fs20.existsSync(import_node_path44.default.join(creationDirectory, "./src/index.js"))) {
|
190077
|
+
const newWorkerFilename = import_node_path44.default.relative(
|
190160
190078
|
process.cwd(),
|
190161
|
-
|
190079
|
+
import_node_path44.default.join(creationDirectory, "./src/index.js")
|
190162
190080
|
);
|
190163
190081
|
if (fromDashWorkerName) {
|
190164
190082
|
logger.warn(
|
@@ -190168,7 +190086,7 @@ To start developing your Worker, run \`npx wrangler dev\`${isCreatingWranglerTom
|
|
190168
190086
|
accountId,
|
190169
190087
|
fromDashWorkerName
|
190170
190088
|
);
|
190171
|
-
await (0,
|
190089
|
+
await (0, import_promises17.mkdir)(import_node_path44.default.join(creationDirectory, "./src"), {
|
190172
190090
|
recursive: true
|
190173
190091
|
});
|
190174
190092
|
config.main = `src/${config.main}`;
|
@@ -190176,13 +190094,13 @@ To start developing your Worker, run \`npx wrangler dev\`${isCreatingWranglerTom
|
|
190176
190094
|
for (const files of createBatches(modules, 10)) {
|
190177
190095
|
await Promise.all(
|
190178
190096
|
files.map(async (file) => {
|
190179
|
-
const filepath =
|
190097
|
+
const filepath = import_node_path44.default.join(
|
190180
190098
|
creationDirectory,
|
190181
190099
|
`./src/${file.name}`
|
190182
190100
|
);
|
190183
|
-
const directory = (0,
|
190184
|
-
await (0,
|
190185
|
-
await (0,
|
190101
|
+
const directory = (0, import_node_path44.dirname)(filepath);
|
190102
|
+
await (0, import_promises17.mkdir)(directory, { recursive: true });
|
190103
|
+
await (0, import_promises17.writeFile)(filepath, file.stream());
|
190186
190104
|
})
|
190187
190105
|
);
|
190188
190106
|
}
|
@@ -190198,36 +190116,36 @@ To start developing your Worker, run \`npx wrangler dev\`${isCreatingWranglerTom
|
|
190198
190116
|
const newWorkerType = yesFlag ? "fetch" : await getNewWorkerType(newWorkerFilename);
|
190199
190117
|
if (newWorkerType !== "none") {
|
190200
190118
|
const template = getNewWorkerTemplate("js", newWorkerType);
|
190201
|
-
await (0,
|
190119
|
+
await (0, import_promises17.mkdir)(import_node_path44.default.join(creationDirectory, "./src"), {
|
190202
190120
|
recursive: true
|
190203
190121
|
});
|
190204
|
-
await (0,
|
190205
|
-
|
190206
|
-
readFileSync5(
|
190122
|
+
await (0, import_promises17.writeFile)(
|
190123
|
+
import_node_path44.default.join(creationDirectory, "./src/index.js"),
|
190124
|
+
readFileSync5(import_node_path44.default.join(getBasePath(), `templates/${template}`))
|
190207
190125
|
);
|
190208
190126
|
logger.log(
|
190209
|
-
`\u2728 Created ${
|
190127
|
+
`\u2728 Created ${import_node_path44.default.relative(
|
190210
190128
|
process.cwd(),
|
190211
|
-
|
190129
|
+
import_node_path44.default.join(creationDirectory, "./src/index.js")
|
190212
190130
|
)}`
|
190213
190131
|
);
|
190214
190132
|
shouldCreateTests = yesFlag || await confirm("Would you like us to write your first test?");
|
190215
190133
|
if (shouldCreateTests) {
|
190216
190134
|
newWorkerTestType = await getNewWorkerTestType(yesFlag);
|
190217
190135
|
devDepsToInstall.push(newWorkerTestType);
|
190218
|
-
await (0,
|
190219
|
-
|
190136
|
+
await (0, import_promises17.writeFile)(
|
190137
|
+
import_node_path44.default.join(creationDirectory, "./src/index.test.js"),
|
190220
190138
|
readFileSync5(
|
190221
|
-
|
190139
|
+
import_node_path44.default.join(
|
190222
190140
|
getBasePath(),
|
190223
190141
|
`templates/init-tests/test-${newWorkerTestType}-new-worker.js`
|
190224
190142
|
)
|
190225
190143
|
)
|
190226
190144
|
);
|
190227
190145
|
logger.log(
|
190228
|
-
`\u2728 Created ${
|
190146
|
+
`\u2728 Created ${import_node_path44.default.relative(
|
190229
190147
|
process.cwd(),
|
190230
|
-
|
190148
|
+
import_node_path44.default.join(creationDirectory, "./src/index.test.js")
|
190231
190149
|
)}`
|
190232
190150
|
);
|
190233
190151
|
}
|
@@ -190340,7 +190258,7 @@ function getNewWorkerToml(workerType) {
|
|
190340
190258
|
__name(getNewWorkerToml, "getNewWorkerToml");
|
190341
190259
|
async function findPath(isolatedInit, cwd2, basename5) {
|
190342
190260
|
if (isolatedInit) {
|
190343
|
-
return fs20.existsSync(
|
190261
|
+
return fs20.existsSync(import_node_path44.default.resolve(cwd2, basename5)) ? import_node_path44.default.resolve(cwd2, basename5) : void 0;
|
190344
190262
|
} else {
|
190345
190263
|
return await findUp(basename5, {
|
190346
190264
|
cwd: cwd2
|
@@ -190687,7 +190605,7 @@ function assertNoTypeArg(args) {
|
|
190687
190605
|
__name(assertNoTypeArg, "assertNoTypeArg");
|
190688
190606
|
function assertNoSiteArg(args, creationDirectory) {
|
190689
190607
|
if (args.site) {
|
190690
|
-
const gitDirectory = creationDirectory !== process.cwd() ?
|
190608
|
+
const gitDirectory = creationDirectory !== process.cwd() ? import_node_path44.default.basename(creationDirectory) : "my-site";
|
190691
190609
|
const message = `The --site option is no longer supported.
|
190692
190610
|
If you wish to create a brand new Worker Sites project then clone the \`worker-sites-template\` starter repository:
|
190693
190611
|
|
@@ -191322,7 +191240,7 @@ __name(docsHandler, "docsHandler");
|
|
191322
191240
|
// src/generate/index.ts
|
191323
191241
|
init_import_meta_url();
|
191324
191242
|
var import_node_fs26 = __toESM(require("node:fs"));
|
191325
|
-
var
|
191243
|
+
var import_node_path45 = __toESM(require("node:path"));
|
191326
191244
|
init_execa();
|
191327
191245
|
function generateOptions(yargs) {
|
191328
191246
|
return yargs.positional("name", {
|
@@ -191382,7 +191300,7 @@ Please refer to https://developers.cloudflare.com/workers/wrangler/deprecations/
|
|
191382
191300
|
}
|
191383
191301
|
const creationDirectory = generateWorkerDirectoryName(args.name);
|
191384
191302
|
if (args.site) {
|
191385
|
-
const gitDirectory = creationDirectory !== process.cwd() ?
|
191303
|
+
const gitDirectory = creationDirectory !== process.cwd() ? import_node_path45.default.basename(creationDirectory) : "my-site";
|
191386
191304
|
const message = `The --site option is no longer supported.
|
191387
191305
|
If you wish to create a brand new Worker Sites project then clone the \`worker-sites-template\` starter repository:
|
191388
191306
|
|
@@ -191414,7 +191332,7 @@ Running \`npm create cloudflare@latest\` for you instead.
|
|
191414
191332
|
return;
|
191415
191333
|
}
|
191416
191334
|
logger.log(
|
191417
|
-
`Creating a worker in ${
|
191335
|
+
`Creating a worker in ${import_node_path45.default.basename(creationDirectory)} from ${args.template}`
|
191418
191336
|
);
|
191419
191337
|
const { remote, subdirectory } = parseTemplatePath(args.template);
|
191420
191338
|
await cloneIntoDirectory(remote, creationDirectory, subdirectory);
|
@@ -191423,10 +191341,10 @@ Running \`npm create cloudflare@latest\` for you instead.
|
|
191423
191341
|
}
|
191424
191342
|
__name(generateHandler2, "generateHandler");
|
191425
191343
|
function generateWorkerDirectoryName(workerName) {
|
191426
|
-
let workerDirectoryPath =
|
191344
|
+
let workerDirectoryPath = import_node_path45.default.resolve(process.cwd(), workerName);
|
191427
191345
|
let i = 1;
|
191428
191346
|
while (import_node_fs26.default.existsSync(workerDirectoryPath)) {
|
191429
|
-
workerDirectoryPath =
|
191347
|
+
workerDirectoryPath = import_node_path45.default.resolve(process.cwd(), `${workerName}-${i}`);
|
191430
191348
|
i++;
|
191431
191349
|
}
|
191432
191350
|
return workerDirectoryPath;
|
@@ -192720,13 +192638,13 @@ init_import_meta_url();
|
|
192720
192638
|
init_import_meta_url();
|
192721
192639
|
var import_node_crypto7 = require("node:crypto");
|
192722
192640
|
var import_node_fs29 = require("node:fs");
|
192723
|
-
var
|
192641
|
+
var import_promises22 = require("node:fs/promises");
|
192724
192642
|
var import_node_path53 = __toESM(require("node:path"));
|
192725
192643
|
|
192726
192644
|
// src/api/pages/create-worker-bundle-contents.ts
|
192727
192645
|
init_import_meta_url();
|
192728
192646
|
var import_node_fs27 = require("node:fs");
|
192729
|
-
var
|
192647
|
+
var import_node_path46 = __toESM(require("node:path"));
|
192730
192648
|
var import_undici18 = __toESM(require_undici());
|
192731
192649
|
async function createUploadWorkerBundleContents(workerBundle, config) {
|
192732
192650
|
const workerBundleFormData = createWorkerBundleFormData(workerBundle, config);
|
@@ -192740,7 +192658,7 @@ async function createUploadWorkerBundleContents(workerBundle, config) {
|
|
192740
192658
|
__name(createUploadWorkerBundleContents, "createUploadWorkerBundleContents");
|
192741
192659
|
function createWorkerBundleFormData(workerBundle, config) {
|
192742
192660
|
const mainModule = {
|
192743
|
-
name:
|
192661
|
+
name: import_node_path46.default.basename(workerBundle.resolvedEntryPointPath),
|
192744
192662
|
filePath: workerBundle.resolvedEntryPointPath,
|
192745
192663
|
content: (0, import_node_fs27.readFileSync)(workerBundle.resolvedEntryPointPath, {
|
192746
192664
|
encoding: "utf-8"
|
@@ -192805,72 +192723,12 @@ var import_node_path52 = require("node:path");
|
|
192805
192723
|
|
192806
192724
|
// src/pages/functions/buildPlugin.ts
|
192807
192725
|
init_import_meta_url();
|
192808
|
-
var
|
192726
|
+
var import_promises19 = require("node:fs/promises");
|
192809
192727
|
var import_node_path48 = require("node:path");
|
192810
192728
|
|
192811
|
-
// src/pages/utils.ts
|
192812
|
-
init_import_meta_url();
|
192813
|
-
var import_node_path46 = __toESM(require("node:path"));
|
192814
|
-
var RUNNING_BUILDERS = [];
|
192815
|
-
var CLEANUP_CALLBACKS = [];
|
192816
|
-
var CLEANUP = /* @__PURE__ */ __name(() => {
|
192817
|
-
CLEANUP_CALLBACKS.forEach((callback) => callback());
|
192818
|
-
RUNNING_BUILDERS.forEach((builder) => builder.stop?.());
|
192819
|
-
}, "CLEANUP");
|
192820
|
-
function isUrl(maybeUrl) {
|
192821
|
-
if (!maybeUrl) {
|
192822
|
-
return false;
|
192823
|
-
}
|
192824
|
-
try {
|
192825
|
-
new URL(maybeUrl);
|
192826
|
-
return true;
|
192827
|
-
} catch (e3) {
|
192828
|
-
return false;
|
192829
|
-
}
|
192830
|
-
}
|
192831
|
-
__name(isUrl, "isUrl");
|
192832
|
-
var projectRootCacheCwd;
|
192833
|
-
var projectRootCache;
|
192834
|
-
var tmpDirCacheProjectRoot;
|
192835
|
-
var tmpDirCache;
|
192836
|
-
function getPagesProjectRoot() {
|
192837
|
-
const cwd2 = process.cwd();
|
192838
|
-
if (projectRootCache !== void 0 && projectRootCacheCwd === cwd2) {
|
192839
|
-
return projectRootCache;
|
192840
|
-
}
|
192841
|
-
const packagePath = findUpSync("package.json");
|
192842
|
-
projectRootCache = packagePath ? import_node_path46.default.dirname(packagePath) : process.cwd();
|
192843
|
-
projectRootCacheCwd = cwd2;
|
192844
|
-
return projectRootCache;
|
192845
|
-
}
|
192846
|
-
__name(getPagesProjectRoot, "getPagesProjectRoot");
|
192847
|
-
function getPagesTmpDir() {
|
192848
|
-
const projectRoot = getPagesProjectRoot();
|
192849
|
-
if (tmpDirCache !== void 0 && tmpDirCacheProjectRoot === projectRoot) {
|
192850
|
-
return tmpDirCache;
|
192851
|
-
}
|
192852
|
-
const tmpDir = getWranglerTmpDir(getPagesProjectRoot(), "pages");
|
192853
|
-
tmpDirCache = tmpDir.path;
|
192854
|
-
tmpDirCacheProjectRoot = projectRoot;
|
192855
|
-
return tmpDirCache;
|
192856
|
-
}
|
192857
|
-
__name(getPagesTmpDir, "getPagesTmpDir");
|
192858
|
-
function debounce(fn2, delayMs = 100) {
|
192859
|
-
let crrTimeoutId;
|
192860
|
-
return () => {
|
192861
|
-
if (crrTimeoutId) {
|
192862
|
-
clearTimeout(crrTimeoutId);
|
192863
|
-
}
|
192864
|
-
crrTimeoutId = setTimeout(() => {
|
192865
|
-
fn2();
|
192866
|
-
}, delayMs);
|
192867
|
-
};
|
192868
|
-
}
|
192869
|
-
__name(debounce, "debounce");
|
192870
|
-
|
192871
192729
|
// src/pages/functions/buildWorker.ts
|
192872
192730
|
init_import_meta_url();
|
192873
|
-
var
|
192731
|
+
var import_promises18 = require("node:fs/promises");
|
192874
192732
|
var import_node_path47 = require("node:path");
|
192875
192733
|
var import_esbuild3 = require("esbuild");
|
192876
192734
|
|
@@ -193166,8 +193024,8 @@ function assetsPlugin(buildOutputDirectory) {
|
|
193166
193024
|
args.resolveDir,
|
193167
193025
|
args.path.slice("assets:".length)
|
193168
193026
|
);
|
193169
|
-
const exists = await (0,
|
193170
|
-
const isDirectory2 = exists && (await (0,
|
193027
|
+
const exists = await (0, import_promises18.access)(directory).then(() => true).catch(() => false);
|
193028
|
+
const isDirectory2 = exists && (await (0, import_promises18.lstat)(directory)).isDirectory();
|
193171
193029
|
if (!isDirectory2) {
|
193172
193030
|
return {
|
193173
193031
|
errors: [
|
@@ -193196,11 +193054,11 @@ function assetsPlugin(buildOutputDirectory) {
|
|
193196
193054
|
"pages-plugins",
|
193197
193055
|
identifier
|
193198
193056
|
);
|
193199
|
-
await (0,
|
193057
|
+
await (0, import_promises18.rm)(staticAssetsOutputDirectory, {
|
193200
193058
|
force: true,
|
193201
193059
|
recursive: true
|
193202
193060
|
});
|
193203
|
-
await (0,
|
193061
|
+
await (0, import_promises18.cp)(args.path, staticAssetsOutputDirectory, {
|
193204
193062
|
force: true,
|
193205
193063
|
recursive: true
|
193206
193064
|
});
|
@@ -193275,8 +193133,8 @@ function buildPluginFromFunctions({
|
|
193275
193133
|
args.resolveDir,
|
193276
193134
|
args.path.slice("assets:".length)
|
193277
193135
|
);
|
193278
|
-
const exists = await (0,
|
193279
|
-
const isDirectory2 = exists && (await (0,
|
193136
|
+
const exists = await (0, import_promises19.access)(directory).then(() => true).catch(() => false);
|
193137
|
+
const isDirectory2 = exists && (await (0, import_promises19.lstat)(directory)).isDirectory();
|
193280
193138
|
if (!isDirectory2) {
|
193281
193139
|
return {
|
193282
193140
|
errors: [
|
@@ -193322,7 +193180,7 @@ __name(buildPluginFromFunctions, "buildPluginFromFunctions");
|
|
193322
193180
|
|
193323
193181
|
// src/pages/functions/filepath-routing.ts
|
193324
193182
|
init_import_meta_url();
|
193325
|
-
var
|
193183
|
+
var import_promises20 = __toESM(require("node:fs/promises"));
|
193326
193184
|
var import_node_path49 = __toESM(require("node:path"));
|
193327
193185
|
var import_esbuild4 = require("esbuild");
|
193328
193186
|
async function generateConfigFromFileTree({
|
@@ -193454,7 +193312,7 @@ async function forEachFile(baseDir, fn2) {
|
|
193454
193312
|
const returnValues = [];
|
193455
193313
|
while (isNotEmpty(searchPaths)) {
|
193456
193314
|
const cwd2 = searchPaths.shift();
|
193457
|
-
const dir = await
|
193315
|
+
const dir = await import_promises20.default.readdir(cwd2, { withFileTypes: true });
|
193458
193316
|
for (const entry of dir) {
|
193459
193317
|
const pathname = import_node_path49.default.join(cwd2, entry.name);
|
193460
193318
|
if (entry.isDirectory()) {
|
@@ -193499,7 +193357,7 @@ __name(convertSimpleParams, "convertSimpleParams");
|
|
193499
193357
|
|
193500
193358
|
// src/pages/functions/routes.ts
|
193501
193359
|
init_import_meta_url();
|
193502
|
-
var
|
193360
|
+
var import_promises21 = __toESM(require("node:fs/promises"));
|
193503
193361
|
var import_node_path50 = __toESM(require("node:path"));
|
193504
193362
|
|
193505
193363
|
// src/pages/functions/identifiers.ts
|
@@ -193579,7 +193437,7 @@ async function writeRoutesModule({
|
|
193579
193437
|
}) {
|
193580
193438
|
const { importMap, routes } = parseConfig(config, srcDir);
|
193581
193439
|
const routesModule = generateRoutesModule(importMap, routes);
|
193582
|
-
await
|
193440
|
+
await import_promises21.default.writeFile(outfile, routesModule);
|
193583
193441
|
return outfile;
|
193584
193442
|
}
|
193585
193443
|
__name(writeRoutesModule, "writeRoutesModule");
|
@@ -194115,7 +193973,7 @@ async function maybeReadPagesConfig(args) {
|
|
194115
193973
|
);
|
194116
193974
|
return {
|
194117
193975
|
...config,
|
194118
|
-
hash: (0, import_node_crypto7.createHash)("sha256").update(await (0,
|
193976
|
+
hash: (0, import_node_crypto7.createHash)("sha256").update(await (0, import_promises22.readFile)(configPath)).digest("hex")
|
194119
193977
|
};
|
194120
193978
|
} catch (e3) {
|
194121
193979
|
if (e3 instanceof FatalError && e3.code === EXIT_CODE_INVALID_PAGES_CONFIG) {
|
@@ -194318,7 +194176,7 @@ var import_ink_select_input3 = __toESM(require_build4());
|
|
194318
194176
|
init_import_meta_url();
|
194319
194177
|
var import_node_crypto8 = require("node:crypto");
|
194320
194178
|
var import_node_fs31 = require("node:fs");
|
194321
|
-
var
|
194179
|
+
var import_promises23 = require("node:fs/promises");
|
194322
194180
|
var import_node_path55 = __toESM(require("node:path"));
|
194323
194181
|
var import_node_process12 = require("node:process");
|
194324
194182
|
var import_undici19 = __toESM(require_undici());
|
@@ -194468,7 +194326,7 @@ async function deploy2({
|
|
194468
194326
|
);
|
194469
194327
|
}
|
194470
194328
|
if (config !== void 0 && config.configPath !== void 0 && config.pages_build_output_dir) {
|
194471
|
-
const configHash = (0, import_node_crypto8.createHash)("sha256").update(await (0,
|
194329
|
+
const configHash = (0, import_node_crypto8.createHash)("sha256").update(await (0, import_promises23.readFile)(config.configPath)).digest("hex");
|
194472
194330
|
const outputDir = import_node_path55.default.relative(
|
194473
194331
|
process.cwd(),
|
194474
194332
|
config.pages_build_output_dir
|
@@ -195255,7 +195113,7 @@ __name(promptSelectExistingOrNewProject, "promptSelectExistingOrNewProject");
|
|
195255
195113
|
|
195256
195114
|
// src/pages/deployment-tails.ts
|
195257
195115
|
init_import_meta_url();
|
195258
|
-
var
|
195116
|
+
var import_promises24 = require("node:timers/promises");
|
195259
195117
|
var import_signal_exit8 = __toESM(require_signal_exit());
|
195260
195118
|
|
195261
195119
|
// src/tail/createTail.ts
|
@@ -195762,10 +195620,10 @@ async function Handler13({
|
|
195762
195620
|
while (tail.readyState !== tail.OPEN) {
|
195763
195621
|
switch (tail.readyState) {
|
195764
195622
|
case tail.CONNECTING:
|
195765
|
-
await (0,
|
195623
|
+
await (0, import_promises24.setTimeout)(100);
|
195766
195624
|
break;
|
195767
195625
|
case tail.CLOSING:
|
195768
|
-
await (0,
|
195626
|
+
await (0, import_promises24.setTimeout)(100);
|
195769
195627
|
break;
|
195770
195628
|
case tail.CLOSED:
|
195771
195629
|
await sendMetricsEvent("end log stream", {
|
@@ -196716,7 +196574,7 @@ __name(getBindingsFromArgs, "getBindingsFromArgs");
|
|
196716
196574
|
// src/pages/download-config.ts
|
196717
196575
|
init_import_meta_url();
|
196718
196576
|
var import_fs10 = require("fs");
|
196719
|
-
var
|
196577
|
+
var import_promises25 = require("node:fs/promises");
|
196720
196578
|
var import_toml6 = __toESM(require_toml());
|
196721
196579
|
var import_miniflare16 = require("miniflare");
|
196722
196580
|
async function toEnvironment(deploymentConfig, accountId) {
|
@@ -196829,7 +196687,7 @@ __name(toEnvironment, "toEnvironment");
|
|
196829
196687
|
async function writeWranglerToml(toml) {
|
196830
196688
|
let tomlString = import_toml6.default.stringify(toml);
|
196831
196689
|
tomlString = tomlString.split("\n").map((line) => line.trimStart()).join("\n");
|
196832
|
-
await (0,
|
196690
|
+
await (0, import_promises25.writeFile)(
|
196833
196691
|
"wrangler.toml",
|
196834
196692
|
`# Generated by Wrangler on ${/* @__PURE__ */ new Date()}
|
196835
196693
|
${tomlString}`
|
@@ -209631,7 +209489,7 @@ __name(closeSentry, "closeSentry");
|
|
209631
209489
|
|
209632
209490
|
// src/tail/index.ts
|
209633
209491
|
init_import_meta_url();
|
209634
|
-
var
|
209492
|
+
var import_promises26 = require("node:timers/promises");
|
209635
209493
|
var import_signal_exit9 = __toESM(require_signal_exit());
|
209636
209494
|
function tailOptions(yargs) {
|
209637
209495
|
return yargs.positional("worker", {
|
@@ -209736,10 +209594,10 @@ async function tailHandler(args) {
|
|
209736
209594
|
while (tail.readyState !== tail.OPEN) {
|
209737
209595
|
switch (tail.readyState) {
|
209738
209596
|
case tail.CONNECTING:
|
209739
|
-
await (0,
|
209597
|
+
await (0, import_promises26.setTimeout)(100);
|
209740
209598
|
break;
|
209741
209599
|
case tail.CLOSING:
|
209742
|
-
await (0,
|
209600
|
+
await (0, import_promises26.setTimeout)(100);
|
209743
209601
|
break;
|
209744
209602
|
case tail.CLOSED:
|
209745
209603
|
await sendMetricsEvent("end log stream", {
|
@@ -209866,9 +209724,9 @@ var import_miniflare20 = require("miniflare");
|
|
209866
209724
|
// src/type-generation/runtime/index.ts
|
209867
209725
|
init_import_meta_url();
|
209868
209726
|
var import_fs14 = require("fs");
|
209869
|
-
var
|
209727
|
+
var import_promises27 = require("fs/promises");
|
209870
209728
|
var import_miniflare19 = require("miniflare");
|
209871
|
-
var import_workerd =
|
209729
|
+
var import_workerd = require("workerd");
|
209872
209730
|
var DEFAULT_OUTFILE_RELATIVE_PATH = "./.wrangler/types/runtime.d.ts";
|
209873
209731
|
async function generateRuntimeTypes({
|
209874
209732
|
config: { compatibility_date, compatibility_flags = [] },
|
@@ -209880,7 +209738,7 @@ async function generateRuntimeTypes({
|
|
209880
209738
|
await ensureDirectoryExists(outFile);
|
209881
209739
|
const header = `// Runtime types generated with workerd@${import_workerd.version} ${compatibility_date} ${compatibility_flags.join(",")}`;
|
209882
209740
|
try {
|
209883
|
-
const existingTypes = await (0,
|
209741
|
+
const existingTypes = await (0, import_promises27.readFile)(outFile, "utf8");
|
209884
209742
|
if (existingTypes.split("\n")[0] === header) {
|
209885
209743
|
logger.debug("Using cached runtime types: ", header);
|
209886
209744
|
return { outFile };
|
@@ -209897,7 +209755,7 @@ async function generateRuntimeTypes({
|
|
209897
209755
|
(flag) => !flag.includes("nodejs_compat")
|
209898
209756
|
)
|
209899
209757
|
});
|
209900
|
-
await (0,
|
209758
|
+
await (0, import_promises27.writeFile)(outFile, `${header}
|
209901
209759
|
${types}`, "utf8");
|
209902
209760
|
return {
|
209903
209761
|
outFile
|
@@ -212286,11 +212144,13 @@ See https://developers.cloudflare.com/workers/platform/compatibility-dates for m
|
|
212286
212144
|
let bindingsPrinted = false;
|
212287
212145
|
try {
|
212288
212146
|
const body = createWorkerUploadForm(worker);
|
212289
|
-
const result = await
|
212290
|
-
|
212291
|
-
|
212292
|
-
|
212293
|
-
|
212147
|
+
const result = await retryOnError(
|
212148
|
+
async () => fetchResult(`${workerUrl}/versions`, {
|
212149
|
+
method: "POST",
|
212150
|
+
body,
|
212151
|
+
headers: await getMetricsUsageHeaders(config.send_metrics)
|
212152
|
+
})
|
212153
|
+
);
|
212294
212154
|
logger.log("Worker Startup Time:", result.startup_time_ms, "ms");
|
212295
212155
|
bindingsPrinted = true;
|
212296
212156
|
printBindings({ ...withoutStaticAssets, vars: maskedVars });
|
@@ -214553,13 +214413,16 @@ use --persist-to=./wrangler-local-state to keep using the old path.`
|
|
214553
214413
|
if (assetsOptions && !args.assets) {
|
214554
214414
|
await assetsWatcher?.close();
|
214555
214415
|
if (assetsOptions) {
|
214416
|
+
const debouncedRerender = debounce(async () => {
|
214417
|
+
rerender(await getDevReactElement(config));
|
214418
|
+
}, 100);
|
214556
214419
|
assetsWatcher = (0, import_chokidar6.watch)(assetsOptions.directory, {
|
214557
214420
|
persistent: true,
|
214558
214421
|
ignoreInitial: true
|
214559
214422
|
}).on("all", async (eventName, changedPath) => {
|
214560
214423
|
const message = getAssetChangeMessage(eventName, changedPath);
|
214561
214424
|
logger.log(`\u{1F300} ${message}...`);
|
214562
|
-
|
214425
|
+
debouncedRerender();
|
214563
214426
|
});
|
214564
214427
|
}
|
214565
214428
|
}
|
@@ -214673,13 +214536,16 @@ use --persist-to=./wrangler-local-state to keep using the old path.`
|
|
214673
214536
|
const devReactElement = (0, import_ink13.render)(await getDevReactElement(config));
|
214674
214537
|
rerender = devReactElement.rerender;
|
214675
214538
|
if (assetsOptions && !args.experimentalDevEnv) {
|
214539
|
+
const debouncedRerender = debounce(async () => {
|
214540
|
+
rerender(await getDevReactElement(config));
|
214541
|
+
}, 100);
|
214676
214542
|
assetsWatcher = (0, import_chokidar6.watch)(assetsOptions.directory, {
|
214677
214543
|
persistent: true,
|
214678
214544
|
ignoreInitial: true
|
214679
214545
|
}).on("all", async (eventName, filePath) => {
|
214680
214546
|
const message = getAssetChangeMessage(eventName, filePath);
|
214681
214547
|
logger.log(`\u{1F300} ${message}...`);
|
214682
|
-
|
214548
|
+
debouncedRerender();
|
214683
214549
|
});
|
214684
214550
|
}
|
214685
214551
|
return {
|
@@ -215832,9 +215698,7 @@ async function resolveConfig(config, input) {
|
|
215832
215698
|
}
|
215833
215699
|
const queues2 = extractBindingsOfType("queue", resolved.bindings);
|
215834
215700
|
if (resolved.dev.remote && (queues2?.length || resolved.triggers?.some((t3) => t3.type === "queue-consumer"))) {
|
215835
|
-
logger.warn(
|
215836
|
-
"Queues are currently in Beta and are not supported in wrangler dev remote mode."
|
215837
|
-
);
|
215701
|
+
logger.warn("Queues are not yet supported in wrangler dev remote mode.");
|
215838
215702
|
}
|
215839
215703
|
const classNamesWhichUseSQLite = getClassNamesWhichUseSQLite(
|
215840
215704
|
resolved.migrations
|
@@ -215954,7 +215818,7 @@ __name(ConfigController, "ConfigController");
|
|
215954
215818
|
// src/api/startDevWorker/LocalRuntimeController.ts
|
215955
215819
|
init_import_meta_url();
|
215956
215820
|
var import_node_crypto10 = require("node:crypto");
|
215957
|
-
var
|
215821
|
+
var import_promises28 = require("node:fs/promises");
|
215958
215822
|
var import_miniflare21 = require("miniflare");
|
215959
215823
|
async function getBinaryFileContents2(file) {
|
215960
215824
|
if ("contents" in file) {
|
@@ -215963,7 +215827,7 @@ async function getBinaryFileContents2(file) {
|
|
215963
215827
|
}
|
215964
215828
|
return Buffer.from(file.contents);
|
215965
215829
|
}
|
215966
|
-
return (0,
|
215830
|
+
return (0, import_promises28.readFile)(file.path);
|
215967
215831
|
}
|
215968
215832
|
__name(getBinaryFileContents2, "getBinaryFileContents");
|
215969
215833
|
async function getTextFileContents(file) {
|
@@ -215976,7 +215840,7 @@ async function getTextFileContents(file) {
|
|
215976
215840
|
}
|
215977
215841
|
return Buffer.from(file.contents).toString();
|
215978
215842
|
}
|
215979
|
-
return (0,
|
215843
|
+
return (0, import_promises28.readFile)(file.path, "utf8");
|
215980
215844
|
}
|
215981
215845
|
__name(getTextFileContents, "getTextFileContents");
|
215982
215846
|
var DEFAULT_WORKER_NAME2 = "worker";
|
@@ -217975,4 +217839,3 @@ yargs-parser/build/lib/index.js:
|
|
217975
217839
|
* SPDX-License-Identifier: ISC
|
217976
217840
|
*)
|
217977
217841
|
*/
|
217978
|
-
//# sourceMappingURL=cli.js.map
|