@hot-updater/aws 0.20.10 → 0.20.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/iac/index.cjs +206 -345
- package/dist/iac/index.js +204 -343
- package/dist/index.cjs +14 -22
- package/dist/index.js +13 -21
- package/dist/lambda/index.cjs +125 -207
- package/package.json +4 -4
package/dist/iac/index.cjs
CHANGED
|
@@ -24,8 +24,6 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
|
|
|
24
24
|
}) : target, mod));
|
|
25
25
|
|
|
26
26
|
//#endregion
|
|
27
|
-
let fs = require("fs");
|
|
28
|
-
fs = __toESM(fs);
|
|
29
27
|
let __aws_sdk_credential_providers = require("@aws-sdk/credential-providers");
|
|
30
28
|
__aws_sdk_credential_providers = __toESM(__aws_sdk_credential_providers);
|
|
31
29
|
let node_util = require("node:util");
|
|
@@ -48,6 +46,8 @@ let node_path = require("node:path");
|
|
|
48
46
|
node_path = __toESM(node_path);
|
|
49
47
|
let path = require("path");
|
|
50
48
|
path = __toESM(path);
|
|
49
|
+
let fs = require("fs");
|
|
50
|
+
fs = __toESM(fs);
|
|
51
51
|
let node_timers_promises = require("node:timers/promises");
|
|
52
52
|
node_timers_promises = __toESM(node_timers_promises);
|
|
53
53
|
let node_os = require("node:os");
|
|
@@ -64,10 +64,10 @@ let node_stream = require("node:stream");
|
|
|
64
64
|
node_stream = __toESM(node_stream);
|
|
65
65
|
let node_buffer = require("node:buffer");
|
|
66
66
|
node_buffer = __toESM(node_buffer);
|
|
67
|
-
let crypto = require("crypto");
|
|
68
|
-
crypto = __toESM(crypto);
|
|
69
67
|
let __aws_sdk_client_cloudfront = require("@aws-sdk/client-cloudfront");
|
|
70
68
|
__aws_sdk_client_cloudfront = __toESM(__aws_sdk_client_cloudfront);
|
|
69
|
+
let crypto = require("crypto");
|
|
70
|
+
crypto = __toESM(crypto);
|
|
71
71
|
let __aws_sdk_client_iam = require("@aws-sdk/client-iam");
|
|
72
72
|
__aws_sdk_client_iam = __toESM(__aws_sdk_client_iam);
|
|
73
73
|
let __aws_sdk_client_lambda = require("@aws-sdk/client-lambda");
|
|
@@ -473,16 +473,16 @@ function G(t, u$2, F$1) {
|
|
|
473
473
|
`).map((e$1) => oD(e$1, u$2, F$1)).join(`
|
|
474
474
|
`);
|
|
475
475
|
}
|
|
476
|
-
const
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
|
|
476
|
+
const c$1 = {
|
|
477
|
+
actions: new Set([
|
|
478
|
+
"up",
|
|
479
|
+
"down",
|
|
480
|
+
"left",
|
|
481
|
+
"right",
|
|
482
|
+
"space",
|
|
483
|
+
"enter",
|
|
484
|
+
"cancel"
|
|
485
|
+
]),
|
|
486
486
|
aliases: new Map([
|
|
487
487
|
["k", "up"],
|
|
488
488
|
["j", "down"],
|
|
@@ -522,9 +522,8 @@ function cD({ input: t = node_process.stdin, output: u$2 = node_process.stdout,
|
|
|
522
522
|
});
|
|
523
523
|
node_readline.emitKeypressEvents(t, s), t.isTTY && t.setRawMode(!0);
|
|
524
524
|
const i$1 = (D$1, { name: C$1, sequence: o$2 }) => {
|
|
525
|
-
const E = String(D$1);
|
|
526
525
|
if (k$1([
|
|
527
|
-
|
|
526
|
+
String(D$1),
|
|
528
527
|
C$1,
|
|
529
528
|
o$2
|
|
530
529
|
], "cancel")) {
|
|
@@ -713,10 +712,7 @@ let dD = class extends x {
|
|
|
713
712
|
if (u$2.group === !0) {
|
|
714
713
|
const F$1 = u$2.value, e$1 = this.getGroupItems(F$1);
|
|
715
714
|
this.isGroupSelected(F$1) ? this.value = this.value.filter((s) => e$1.findIndex((i$1) => i$1.value === s) === -1) : this.value = [...this.value, ...e$1.map((s) => s.value)], this.value = Array.from(new Set(this.value));
|
|
716
|
-
} else
|
|
717
|
-
const F$1 = this.value.includes(u$2.value);
|
|
718
|
-
this.value = F$1 ? this.value.filter((e$1) => e$1 !== u$2.value) : [...this.value, u$2.value];
|
|
719
|
-
}
|
|
715
|
+
} else this.value = this.value.includes(u$2.value) ? this.value.filter((e$1) => e$1 !== u$2.value) : [...this.value, u$2.value];
|
|
720
716
|
}
|
|
721
717
|
};
|
|
722
718
|
var bD = Object.defineProperty, mD = (t, u$2, F$1) => u$2 in t ? bD(t, u$2, {
|
|
@@ -749,12 +745,10 @@ let wD = class extends x {
|
|
|
749
745
|
return this.options[this.cursor].value;
|
|
750
746
|
}
|
|
751
747
|
toggleAll() {
|
|
752
|
-
|
|
753
|
-
this.value = u$2 ? [] : this.options.map((F$1) => F$1.value);
|
|
748
|
+
this.value = this.value.length === this.options.length ? [] : this.options.map((F$1) => F$1.value);
|
|
754
749
|
}
|
|
755
750
|
toggleValue() {
|
|
756
|
-
|
|
757
|
-
this.value = u$2 ? this.value.filter((F$1) => F$1 !== this._value) : [...this.value, this._value];
|
|
751
|
+
this.value = this.value.includes(this._value) ? this.value.filter((F$1) => F$1 !== this._value) : [...this.value, this._value];
|
|
758
752
|
}
|
|
759
753
|
};
|
|
760
754
|
var yD = Object.defineProperty, _D = (t, u$2, F$1) => u$2 in t ? yD(t, u$2, {
|
|
@@ -1181,8 +1175,7 @@ ${J}${r$1.trimStart()}`), s = 3 + (0, node_util.stripVTControlCharacters)(r$1.tr
|
|
|
1181
1175
|
], s = V ? 80 : 120, r$1 = process.env.CI === "true";
|
|
1182
1176
|
let i$1, a$1, c$2 = !1, l$1 = "", $$2, p$1 = performance.now();
|
|
1183
1177
|
const M$1 = (m$1) => {
|
|
1184
|
-
|
|
1185
|
-
c$2 && N$1(h$2, m$1);
|
|
1178
|
+
c$2 && N$1(m$1 > 1 ? "Something went wrong" : "Canceled", m$1);
|
|
1186
1179
|
}, v$1 = () => M$1(2), x$1 = () => M$1(1), j$1 = () => {
|
|
1187
1180
|
process.on("uncaughtExceptionMonitor", v$1), process.on("unhandledRejection", v$1), process.on("SIGINT", x$1), process.on("SIGTERM", x$1), process.on("exit", M$1);
|
|
1188
1181
|
}, E = () => {
|
|
@@ -1298,8 +1291,7 @@ const stringToUint8Array = (string) => textEncoder$1.encode(string);
|
|
|
1298
1291
|
const textDecoder = new TextDecoder();
|
|
1299
1292
|
const uint8ArrayToString = (uint8Array) => textDecoder.decode(uint8Array);
|
|
1300
1293
|
const joinToString = (uint8ArraysOrStrings, encoding) => {
|
|
1301
|
-
|
|
1302
|
-
return strings.join("");
|
|
1294
|
+
return uint8ArraysToStrings(uint8ArraysOrStrings, encoding).join("");
|
|
1303
1295
|
};
|
|
1304
1296
|
const uint8ArraysToStrings = (uint8ArraysOrStrings, encoding) => {
|
|
1305
1297
|
if (encoding === "utf8" && uint8ArraysOrStrings.every((uint8ArrayOrString) => typeof uint8ArrayOrString === "string")) return uint8ArraysOrStrings;
|
|
@@ -1354,8 +1346,7 @@ const parseTemplate = ({ templates, expressions, tokens, index, template }) => {
|
|
|
1354
1346
|
const newTokens = concatTokens(tokens, nextTokens, leadingWhitespaces);
|
|
1355
1347
|
if (index === expressions.length) return newTokens;
|
|
1356
1348
|
const expression = expressions[index];
|
|
1357
|
-
|
|
1358
|
-
return concatTokens(newTokens, expressionTokens, trailingWhitespaces);
|
|
1349
|
+
return concatTokens(newTokens, Array.isArray(expression) ? expression.map((expression$1) => parseExpression(expression$1)) : [parseExpression(expression)], trailingWhitespaces);
|
|
1359
1350
|
};
|
|
1360
1351
|
const splitByWhitespaces = (template, rawTemplate) => {
|
|
1361
1352
|
if (rawTemplate.length === 0) return {
|
|
@@ -1442,8 +1433,7 @@ const normalizeFdSpecificOptions = (options) => {
|
|
|
1442
1433
|
};
|
|
1443
1434
|
const normalizeFdSpecificOption = (options, optionName) => {
|
|
1444
1435
|
const optionBaseArray = Array.from({ length: getStdioLength(options) + 1 });
|
|
1445
|
-
|
|
1446
|
-
return addDefaultValue$1(optionArray, optionName);
|
|
1436
|
+
return addDefaultValue$1(normalizeFdSpecificValue(options[optionName], optionBaseArray, optionName), optionName);
|
|
1447
1437
|
};
|
|
1448
1438
|
const getStdioLength = ({ stdio }) => Array.isArray(stdio) ? Math.max(stdio.length, STANDARD_STREAMS_ALIASES.length) : STANDARD_STREAMS_ALIASES.length;
|
|
1449
1439
|
const normalizeFdSpecificValue = (optionValue, optionArray, optionName) => isPlainObject$1(optionValue) ? normalizeOptionObject(optionValue, optionArray, optionName) : optionArray.fill(optionValue);
|
|
@@ -1473,12 +1463,11 @@ const parseFd = (fdName) => {
|
|
|
1473
1463
|
};
|
|
1474
1464
|
const FD_REGEXP = /^fd(\d+)$/;
|
|
1475
1465
|
const addDefaultValue$1 = (optionArray, optionName) => optionArray.map((optionValue) => optionValue === void 0 ? DEFAULT_OPTIONS[optionName] : optionValue);
|
|
1476
|
-
const verboseDefault = (0, node_util.debuglog)("execa").enabled ? "full" : "none";
|
|
1477
1466
|
const DEFAULT_OPTIONS = {
|
|
1478
1467
|
lines: false,
|
|
1479
1468
|
buffer: true,
|
|
1480
1469
|
maxBuffer: 1e3 * 1e3 * 100,
|
|
1481
|
-
verbose:
|
|
1470
|
+
verbose: (0, node_util.debuglog)("execa").enabled ? "full" : "none",
|
|
1482
1471
|
stripFinalNewline: true
|
|
1483
1472
|
};
|
|
1484
1473
|
const FD_SPECIFIC_OPTIONS = [
|
|
@@ -1511,11 +1500,9 @@ const VERBOSE_VALUES = [
|
|
|
1511
1500
|
//#region ../../node_modules/.pnpm/execa@9.5.2/node_modules/execa/lib/arguments/escape.js
|
|
1512
1501
|
const joinCommand = (filePath, rawArguments) => {
|
|
1513
1502
|
const fileAndArguments = [filePath, ...rawArguments];
|
|
1514
|
-
const command = fileAndArguments.join(" ");
|
|
1515
|
-
const escapedCommand = fileAndArguments.map((fileAndArgument) => quoteString(escapeControlCharacters(fileAndArgument))).join(" ");
|
|
1516
1503
|
return {
|
|
1517
|
-
command,
|
|
1518
|
-
escapedCommand
|
|
1504
|
+
command: fileAndArguments.join(" "),
|
|
1505
|
+
escapedCommand: fileAndArguments.map((fileAndArgument) => quoteString(escapeControlCharacters(fileAndArgument))).join(" ")
|
|
1519
1506
|
};
|
|
1520
1507
|
};
|
|
1521
1508
|
const escapeLines = (lines) => (0, node_util.stripVTControlCharacters)(lines).split("\n").map((line) => escapeControlCharacters(line)).join("\n");
|
|
@@ -1956,13 +1943,11 @@ const appendNewline = (printedLine) => printedLine.endsWith("\n") ? printedLine
|
|
|
1956
1943
|
//#endregion
|
|
1957
1944
|
//#region ../../node_modules/.pnpm/execa@9.5.2/node_modules/execa/lib/verbose/log.js
|
|
1958
1945
|
const verboseLog = ({ type, verboseMessage, fdNumber, verboseInfo, result }) => {
|
|
1959
|
-
const
|
|
1946
|
+
const finalLines = applyVerboseOnLines(getPrintedLines(verboseMessage, getVerboseObject({
|
|
1960
1947
|
type,
|
|
1961
1948
|
result,
|
|
1962
1949
|
verboseInfo
|
|
1963
|
-
});
|
|
1964
|
-
const printedLines = getPrintedLines(verboseMessage, verboseObject);
|
|
1965
|
-
const finalLines = applyVerboseOnLines(printedLines, verboseInfo, fdNumber);
|
|
1950
|
+
})), verboseInfo, fdNumber);
|
|
1966
1951
|
if (finalLines !== "") console.warn(finalLines.slice(0, -1));
|
|
1967
1952
|
};
|
|
1968
1953
|
const getVerboseObject = ({ type, result, verboseInfo: { escapedCommand, commandId, rawOptions: { piped = false,...options } } }) => ({
|
|
@@ -1979,16 +1964,13 @@ const getPrintedLines = (verboseMessage, verboseObject) => verboseMessage.split(
|
|
|
1979
1964
|
message
|
|
1980
1965
|
}));
|
|
1981
1966
|
const getPrintedLine = (verboseObject) => {
|
|
1982
|
-
const verboseLine = defaultVerboseFunction(verboseObject);
|
|
1983
1967
|
return {
|
|
1984
|
-
verboseLine,
|
|
1968
|
+
verboseLine: defaultVerboseFunction(verboseObject),
|
|
1985
1969
|
verboseObject
|
|
1986
1970
|
};
|
|
1987
1971
|
};
|
|
1988
1972
|
const serializeVerboseMessage = (message) => {
|
|
1989
|
-
|
|
1990
|
-
const escapedMessage = escapeLines(messageString);
|
|
1991
|
-
return escapedMessage.replaceAll(" ", " ".repeat(TAB_SIZE));
|
|
1973
|
+
return escapeLines(typeof message === "string" ? message : (0, node_util.inspect)(message)).replaceAll(" ", " ".repeat(TAB_SIZE));
|
|
1992
1974
|
};
|
|
1993
1975
|
const TAB_SIZE = 2;
|
|
1994
1976
|
|
|
@@ -2007,11 +1989,10 @@ const logCommand = (escapedCommand, verboseInfo) => {
|
|
|
2007
1989
|
//#region ../../node_modules/.pnpm/execa@9.5.2/node_modules/execa/lib/verbose/info.js
|
|
2008
1990
|
const getVerboseInfo = (verbose, escapedCommand, rawOptions) => {
|
|
2009
1991
|
validateVerbose(verbose);
|
|
2010
|
-
const commandId = getCommandId(verbose);
|
|
2011
1992
|
return {
|
|
2012
1993
|
verbose,
|
|
2013
1994
|
escapedCommand,
|
|
2014
|
-
commandId,
|
|
1995
|
+
commandId: getCommandId(verbose),
|
|
2015
1996
|
rawOptions
|
|
2016
1997
|
};
|
|
2017
1998
|
};
|
|
@@ -2038,8 +2019,7 @@ const getDurationMs = (startTime) => Number(node_process.hrtime.bigint() - start
|
|
|
2038
2019
|
const handleCommand = (filePath, rawArguments, rawOptions) => {
|
|
2039
2020
|
const startTime = getStartTime();
|
|
2040
2021
|
const { command, escapedCommand } = joinCommand(filePath, rawArguments);
|
|
2041
|
-
const
|
|
2042
|
-
const verboseInfo = getVerboseInfo(verbose, escapedCommand, { ...rawOptions });
|
|
2022
|
+
const verboseInfo = getVerboseInfo(normalizeFdSpecificOption(rawOptions, "verbose"), escapedCommand, { ...rawOptions });
|
|
2043
2023
|
logCommand(escapedCommand, verboseInfo);
|
|
2044
2024
|
return {
|
|
2045
2025
|
command,
|
|
@@ -2107,8 +2087,7 @@ var require_mode = /* @__PURE__ */ __commonJS({ "../../node_modules/.pnpm/isexe@
|
|
|
2107
2087
|
var g$2 = parseInt("010", 8);
|
|
2108
2088
|
var o$2 = parseInt("001", 8);
|
|
2109
2089
|
var ug = u$2 | g$2;
|
|
2110
|
-
|
|
2111
|
-
return ret;
|
|
2090
|
+
return mod & o$2 || mod & g$2 && gid === myGid || mod & u$2 && uid === myUid || mod & ug && myUid === 0;
|
|
2112
2091
|
}
|
|
2113
2092
|
}) });
|
|
2114
2093
|
|
|
@@ -2190,8 +2169,7 @@ var require_which = /* @__PURE__ */ __commonJS({ "../../node_modules/.pnpm/which
|
|
|
2190
2169
|
const ppRaw = pathEnv[i$1];
|
|
2191
2170
|
const pathPart = /^".*"$/.test(ppRaw) ? ppRaw.slice(1, -1) : ppRaw;
|
|
2192
2171
|
const pCmd = path$9.join(pathPart, cmd);
|
|
2193
|
-
|
|
2194
|
-
resolve(subStep(p$1, i$1, 0));
|
|
2172
|
+
resolve(subStep(!pathPart && /^\.[\\\/]/.test(cmd) ? cmd.slice(0, 2) + pCmd : pCmd, i$1, 0));
|
|
2195
2173
|
});
|
|
2196
2174
|
const subStep = (p$1, i$1, ii) => new Promise((resolve, reject) => {
|
|
2197
2175
|
if (ii === pathExt.length) return resolve(step(i$1 + 1));
|
|
@@ -2216,8 +2194,7 @@ var require_which = /* @__PURE__ */ __commonJS({ "../../node_modules/.pnpm/which
|
|
|
2216
2194
|
for (let j$1 = 0; j$1 < pathExt.length; j$1++) {
|
|
2217
2195
|
const cur = p$1 + pathExt[j$1];
|
|
2218
2196
|
try {
|
|
2219
|
-
|
|
2220
|
-
if (is) if (opt.all) found.push(cur);
|
|
2197
|
+
if (isexe.sync(cur, { pathExt: pathExtExe })) if (opt.all) found.push(cur);
|
|
2221
2198
|
else return cur;
|
|
2222
2199
|
} catch (ex) {}
|
|
2223
2200
|
}
|
|
@@ -2235,8 +2212,7 @@ var require_which = /* @__PURE__ */ __commonJS({ "../../node_modules/.pnpm/which
|
|
|
2235
2212
|
var require_path_key = /* @__PURE__ */ __commonJS({ "../../node_modules/.pnpm/path-key@3.1.1/node_modules/path-key/index.js": ((exports, module) => {
|
|
2236
2213
|
const pathKey$1 = (options = {}) => {
|
|
2237
2214
|
const environment = options.env || process.env;
|
|
2238
|
-
|
|
2239
|
-
if (platform$1 !== "win32") return "PATH";
|
|
2215
|
+
if ((options.platform || process.platform) !== "win32") return "PATH";
|
|
2240
2216
|
return Object.keys(environment).reverse().find((key) => key.toUpperCase() === "PATH") || "Path";
|
|
2241
2217
|
};
|
|
2242
2218
|
module.exports = pathKey$1;
|
|
@@ -2364,12 +2340,11 @@ var require_parse = /* @__PURE__ */ __commonJS({ "../../node_modules/.pnpm/cross
|
|
|
2364
2340
|
parsed.command = path$7.normalize(parsed.command);
|
|
2365
2341
|
parsed.command = escape.command(parsed.command);
|
|
2366
2342
|
parsed.args = parsed.args.map((arg) => escape.argument(arg, needsDoubleEscapeMetaChars));
|
|
2367
|
-
const shellCommand = [parsed.command].concat(parsed.args).join(" ");
|
|
2368
2343
|
parsed.args = [
|
|
2369
2344
|
"/d",
|
|
2370
2345
|
"/s",
|
|
2371
2346
|
"/c",
|
|
2372
|
-
`"${
|
|
2347
|
+
`"${[parsed.command].concat(parsed.args).join(" ")}"`
|
|
2373
2348
|
];
|
|
2374
2349
|
parsed.command = process.env.comspec || "cmd.exe";
|
|
2375
2350
|
parsed.options.windowsVerbatimArguments = true;
|
|
@@ -2522,9 +2497,7 @@ const npmRunPathEnv = ({ env: env$1 = node_process.default.env,...options } = {}
|
|
|
2522
2497
|
//#endregion
|
|
2523
2498
|
//#region ../../node_modules/.pnpm/execa@9.5.2/node_modules/execa/lib/return/final-error.js
|
|
2524
2499
|
const getFinalError = (originalError, message, isSync) => {
|
|
2525
|
-
|
|
2526
|
-
const options = originalError instanceof DiscardedError ? {} : { cause: originalError };
|
|
2527
|
-
return new ErrorClass(message, options);
|
|
2500
|
+
return new (isSync ? ExecaSyncError : ExecaError)(message, originalError instanceof DiscardedError ? {} : { cause: originalError });
|
|
2528
2501
|
};
|
|
2529
2502
|
var DiscardedError = class extends Error {};
|
|
2530
2503
|
const setErrorName = (ErrorClass, value) => {
|
|
@@ -2843,16 +2816,14 @@ const SIGNALS = [
|
|
|
2843
2816
|
//#region ../../node_modules/.pnpm/human-signals@8.0.0/node_modules/human-signals/build/src/signals.js
|
|
2844
2817
|
const getSignals = () => {
|
|
2845
2818
|
const realtimeSignals = getRealtimeSignals();
|
|
2846
|
-
|
|
2847
|
-
return signals$1;
|
|
2819
|
+
return [...SIGNALS, ...realtimeSignals].map(normalizeSignal$1);
|
|
2848
2820
|
};
|
|
2849
2821
|
const normalizeSignal$1 = ({ name, number: defaultNumber, description, action, forced = false, standard }) => {
|
|
2850
2822
|
const { signals: { [name]: constantSignal } } = node_os.constants;
|
|
2851
2823
|
const supported = constantSignal !== void 0;
|
|
2852
|
-
const number = supported ? constantSignal : defaultNumber;
|
|
2853
2824
|
return {
|
|
2854
2825
|
name,
|
|
2855
|
-
number,
|
|
2826
|
+
number: supported ? constantSignal : defaultNumber,
|
|
2856
2827
|
description,
|
|
2857
2828
|
supported,
|
|
2858
2829
|
action,
|
|
@@ -3264,8 +3235,7 @@ const isConnected = (anyProcess) => {
|
|
|
3264
3235
|
//#region ../../node_modules/.pnpm/execa@9.5.2/node_modules/execa/lib/ipc/strict.js
|
|
3265
3236
|
const handleSendStrict = ({ anyProcess, channel, isSubprocess, message, strict }) => {
|
|
3266
3237
|
if (!strict) return message;
|
|
3267
|
-
const
|
|
3268
|
-
const hasListeners = hasMessageListeners(anyProcess, ipcEmitter);
|
|
3238
|
+
const hasListeners = hasMessageListeners(anyProcess, getIpcEmitter(anyProcess, channel, isSubprocess));
|
|
3269
3239
|
return {
|
|
3270
3240
|
id: count++,
|
|
3271
3241
|
type: REQUEST_TYPE,
|
|
@@ -3338,11 +3308,9 @@ const RESPONSE_TYPE = "execa:ipc:response";
|
|
|
3338
3308
|
const startSendMessage = (anyProcess, wrappedMessage, strict) => {
|
|
3339
3309
|
if (!OUTGOING_MESSAGES.has(anyProcess)) OUTGOING_MESSAGES.set(anyProcess, /* @__PURE__ */ new Set());
|
|
3340
3310
|
const outgoingMessages = OUTGOING_MESSAGES.get(anyProcess);
|
|
3341
|
-
const onMessageSent = createDeferred();
|
|
3342
|
-
const id = strict ? wrappedMessage.id : void 0;
|
|
3343
3311
|
const outgoingMessage = {
|
|
3344
|
-
onMessageSent,
|
|
3345
|
-
id
|
|
3312
|
+
onMessageSent: createDeferred(),
|
|
3313
|
+
id: strict ? wrappedMessage.id : void 0
|
|
3346
3314
|
};
|
|
3347
3315
|
outgoingMessages.add(outgoingMessage);
|
|
3348
3316
|
return {
|
|
@@ -3503,8 +3471,7 @@ const throwOnGracefulCancel = ({ subprocess, cancelSignal, gracefulCancel, force
|
|
|
3503
3471
|
})] : [];
|
|
3504
3472
|
const sendOnAbort = async ({ subprocess, cancelSignal, forceKillAfterDelay, context, controller: { signal } }) => {
|
|
3505
3473
|
await onAbortedSignal(cancelSignal, signal);
|
|
3506
|
-
|
|
3507
|
-
await sendAbort(subprocess, reason);
|
|
3474
|
+
await sendAbort(subprocess, getReason(cancelSignal));
|
|
3508
3475
|
killOnTimeout({
|
|
3509
3476
|
kill: subprocess.kill,
|
|
3510
3477
|
forceKillAfterDelay,
|
|
@@ -3678,8 +3645,7 @@ const normalizeOptions = (filePath, rawArguments, rawOptions) => {
|
|
|
3678
3645
|
rawOptions.cwd = normalizeCwd(rawOptions.cwd);
|
|
3679
3646
|
const [processedFile, processedArguments, processedOptions] = handleNodeOption(filePath, rawArguments, rawOptions);
|
|
3680
3647
|
const { command: file, args: commandArguments, options: initialOptions } = import_cross_spawn.default._parse(processedFile, processedArguments, processedOptions);
|
|
3681
|
-
const
|
|
3682
|
-
const options = addDefaultOptions(fdOptions);
|
|
3648
|
+
const options = addDefaultOptions(normalizeFdSpecificOptions(initialOptions));
|
|
3683
3649
|
validateTimeout(options);
|
|
3684
3650
|
validateEncoding(options);
|
|
3685
3651
|
validateIpcInputOption(options);
|
|
@@ -3836,7 +3802,7 @@ const u = Object.create(a, {
|
|
|
3836
3802
|
}
|
|
3837
3803
|
});
|
|
3838
3804
|
function h({ preventCancel: r$1 = !1 } = {}) {
|
|
3839
|
-
const
|
|
3805
|
+
const t = new c(this.getReader(), r$1), s = Object.create(u);
|
|
3840
3806
|
return s[n] = t, s;
|
|
3841
3807
|
}
|
|
3842
3808
|
|
|
@@ -3885,18 +3851,14 @@ const getStreamContents$1 = async (stream, { init, convertChunk, getSize, trunca
|
|
|
3885
3851
|
const state = init();
|
|
3886
3852
|
state.length = 0;
|
|
3887
3853
|
try {
|
|
3888
|
-
for await (const chunk of asyncIterable) {
|
|
3889
|
-
|
|
3890
|
-
|
|
3891
|
-
|
|
3892
|
-
|
|
3893
|
-
|
|
3894
|
-
|
|
3895
|
-
|
|
3896
|
-
addChunk,
|
|
3897
|
-
maxBuffer
|
|
3898
|
-
});
|
|
3899
|
-
}
|
|
3854
|
+
for await (const chunk of asyncIterable) appendChunk({
|
|
3855
|
+
convertedChunk: convertChunk[getChunkType(chunk)](chunk, state),
|
|
3856
|
+
state,
|
|
3857
|
+
getSize,
|
|
3858
|
+
truncateChunk,
|
|
3859
|
+
addChunk,
|
|
3860
|
+
maxBuffer
|
|
3861
|
+
});
|
|
3900
3862
|
appendFinalChunk({
|
|
3901
3863
|
state,
|
|
3902
3864
|
convertChunk,
|
|
@@ -4086,10 +4048,9 @@ const stringMethods = {
|
|
|
4086
4048
|
const handleMaxBuffer = ({ error, stream, readableObjectMode, lines, encoding, fdNumber }) => {
|
|
4087
4049
|
if (!(error instanceof MaxBufferError)) throw error;
|
|
4088
4050
|
if (fdNumber === "all") return error;
|
|
4089
|
-
const unit = getMaxBufferUnit(readableObjectMode, lines, encoding);
|
|
4090
4051
|
error.maxBufferInfo = {
|
|
4091
4052
|
fdNumber,
|
|
4092
|
-
unit
|
|
4053
|
+
unit: getMaxBufferUnit(readableObjectMode, lines, encoding)
|
|
4093
4054
|
};
|
|
4094
4055
|
stream.destroy();
|
|
4095
4056
|
throw error;
|
|
@@ -4159,19 +4120,16 @@ const createMessages = ({ stdio, all, ipcOutput, originalError, signal, signalDe
|
|
|
4159
4120
|
killSignal
|
|
4160
4121
|
});
|
|
4161
4122
|
const originalMessage = getOriginalMessage(originalError, cwd);
|
|
4162
|
-
const
|
|
4163
|
-
const shortMessage = `${prefix}: ${escapedCommand}${suffix}`;
|
|
4164
|
-
const messageStdio = all === void 0 ? [stdio[2], stdio[1]] : [all];
|
|
4165
|
-
const message = [
|
|
4166
|
-
shortMessage,
|
|
4167
|
-
...messageStdio,
|
|
4168
|
-
...stdio.slice(3),
|
|
4169
|
-
ipcOutput.map((ipcMessage) => serializeIpcMessage(ipcMessage)).join("\n")
|
|
4170
|
-
].map((messagePart) => escapeLines(stripFinalNewline(serializeMessagePart(messagePart)))).filter(Boolean).join("\n\n");
|
|
4123
|
+
const shortMessage = `${prefix}: ${escapedCommand}${originalMessage === void 0 ? "" : `\n${originalMessage}`}`;
|
|
4171
4124
|
return {
|
|
4172
4125
|
originalMessage,
|
|
4173
4126
|
shortMessage,
|
|
4174
|
-
message
|
|
4127
|
+
message: [
|
|
4128
|
+
shortMessage,
|
|
4129
|
+
...all === void 0 ? [stdio[2], stdio[1]] : [all],
|
|
4130
|
+
...stdio.slice(3),
|
|
4131
|
+
ipcOutput.map((ipcMessage) => serializeIpcMessage(ipcMessage)).join("\n")
|
|
4132
|
+
].map((messagePart) => escapeLines(stripFinalNewline(serializeMessagePart(messagePart)))).filter(Boolean).join("\n\n")
|
|
4175
4133
|
};
|
|
4176
4134
|
};
|
|
4177
4135
|
const getErrorPrefix = ({ originalError, timedOut, timeout, isMaxBuffer, maxBuffer, errorCode, signal, signalDescription, exitCode, isCanceled, isGracefullyCanceled, isForcefullyTerminated, forceKillAfterDelay, killSignal }) => {
|
|
@@ -4192,8 +4150,7 @@ const getErrorPrefix = ({ originalError, timedOut, timeout, isMaxBuffer, maxBuff
|
|
|
4192
4150
|
const getForcefulSuffix = (isForcefullyTerminated, forceKillAfterDelay) => isForcefullyTerminated ? ` and was forcefully terminated after ${forceKillAfterDelay} milliseconds` : "";
|
|
4193
4151
|
const getOriginalMessage = (originalError, cwd) => {
|
|
4194
4152
|
if (originalError instanceof DiscardedError) return;
|
|
4195
|
-
const
|
|
4196
|
-
const escapedOriginalMessage = escapeLines(fixCwdError(originalMessage, cwd));
|
|
4153
|
+
const escapedOriginalMessage = escapeLines(fixCwdError(isExecaError(originalError) ? originalError.originalMessage : String(originalError?.message ?? originalError), cwd));
|
|
4197
4154
|
return escapedOriginalMessage === "" ? void 0 : escapedOriginalMessage;
|
|
4198
4155
|
};
|
|
4199
4156
|
const serializeIpcMessage = (ipcMessage) => typeof ipcMessage === "string" ? ipcMessage : (0, node_util.inspect)(ipcMessage);
|
|
@@ -4315,11 +4272,10 @@ const omitUndefinedProperties = (result) => Object.fromEntries(Object.entries(re
|
|
|
4315
4272
|
const normalizeExitPayload = (rawExitCode, rawSignal) => {
|
|
4316
4273
|
const exitCode = rawExitCode === null ? void 0 : rawExitCode;
|
|
4317
4274
|
const signal = rawSignal === null ? void 0 : rawSignal;
|
|
4318
|
-
const signalDescription = signal === void 0 ? void 0 : getSignalDescription(rawSignal);
|
|
4319
4275
|
return {
|
|
4320
4276
|
exitCode,
|
|
4321
4277
|
signal,
|
|
4322
|
-
signalDescription
|
|
4278
|
+
signalDescription: signal === void 0 ? void 0 : getSignalDescription(rawSignal)
|
|
4323
4279
|
};
|
|
4324
4280
|
};
|
|
4325
4281
|
|
|
@@ -4384,8 +4340,7 @@ function prettyMilliseconds(milliseconds, options) {
|
|
|
4384
4340
|
let result = [];
|
|
4385
4341
|
const floorDecimals = (value, decimalDigits) => {
|
|
4386
4342
|
const flooredInterimValue = Math.floor(value * 10 ** decimalDigits + SECOND_ROUNDING_EPSILON);
|
|
4387
|
-
|
|
4388
|
-
return flooredValue.toFixed(decimalDigits);
|
|
4343
|
+
return (Math.round(flooredInterimValue) / 10 ** decimalDigits).toFixed(decimalDigits);
|
|
4389
4344
|
};
|
|
4390
4345
|
const add = (value, long, short, valueString) => {
|
|
4391
4346
|
if ((result.length === 0 || !options.colonNotation) && isZero(value) && !(options.colonNotation && short === "m")) return;
|
|
@@ -4422,14 +4377,11 @@ function prettyMilliseconds(milliseconds, options) {
|
|
|
4422
4377
|
} else {
|
|
4423
4378
|
const millisecondsAndBelow = milliseconds$1 + microseconds / 1e3 + nanoseconds / 1e6;
|
|
4424
4379
|
const millisecondsDecimalDigits = typeof options.millisecondsDecimalDigits === "number" ? options.millisecondsDecimalDigits : 0;
|
|
4425
|
-
const
|
|
4426
|
-
const millisecondsString = millisecondsDecimalDigits ? millisecondsAndBelow.toFixed(millisecondsDecimalDigits) : roundedMilliseconds;
|
|
4380
|
+
const millisecondsString = millisecondsDecimalDigits ? millisecondsAndBelow.toFixed(millisecondsDecimalDigits) : millisecondsAndBelow >= 1 ? Math.round(millisecondsAndBelow) : Math.ceil(millisecondsAndBelow);
|
|
4427
4381
|
add(Number.parseFloat(millisecondsString), "millisecond", "ms", millisecondsString);
|
|
4428
4382
|
}
|
|
4429
4383
|
} else {
|
|
4430
|
-
const
|
|
4431
|
-
const secondsDecimalDigits = typeof options.secondsDecimalDigits === "number" ? options.secondsDecimalDigits : 1;
|
|
4432
|
-
const secondsFixed = floorDecimals(seconds, secondsDecimalDigits);
|
|
4384
|
+
const secondsFixed = floorDecimals((isBigInt ? Number(milliseconds % ONE_DAY_IN_MILLISECONDS) : milliseconds) / 1e3 % 60, typeof options.secondsDecimalDigits === "number" ? options.secondsDecimalDigits : 1);
|
|
4433
4385
|
const secondsString = options.keepDecimalsOnWholeSeconds ? secondsFixed : secondsFixed.replace(/\.0+$/, "");
|
|
4434
4386
|
add(Number.parseFloat(secondsString), "second", "s", secondsString);
|
|
4435
4387
|
}
|
|
@@ -4458,10 +4410,9 @@ const logResult = (result, verboseInfo) => {
|
|
|
4458
4410
|
logDuration(result, verboseInfo);
|
|
4459
4411
|
};
|
|
4460
4412
|
const logDuration = (result, verboseInfo) => {
|
|
4461
|
-
const verboseMessage = `(done in ${prettyMilliseconds(result.durationMs)})`;
|
|
4462
4413
|
verboseLog({
|
|
4463
4414
|
type: "duration",
|
|
4464
|
-
verboseMessage
|
|
4415
|
+
verboseMessage: `(done in ${prettyMilliseconds(result.durationMs)})`,
|
|
4465
4416
|
verboseInfo,
|
|
4466
4417
|
result
|
|
4467
4418
|
});
|
|
@@ -4588,18 +4539,16 @@ const TYPE_TO_MESSAGE = {
|
|
|
4588
4539
|
const getTransformObjectModes = (objectMode, index, newTransforms, direction) => direction === "output" ? getOutputObjectModes(objectMode, index, newTransforms) : getInputObjectModes(objectMode, index, newTransforms);
|
|
4589
4540
|
const getOutputObjectModes = (objectMode, index, newTransforms) => {
|
|
4590
4541
|
const writableObjectMode = index !== 0 && newTransforms[index - 1].value.readableObjectMode;
|
|
4591
|
-
const readableObjectMode = objectMode ?? writableObjectMode;
|
|
4592
4542
|
return {
|
|
4593
4543
|
writableObjectMode,
|
|
4594
|
-
readableObjectMode
|
|
4544
|
+
readableObjectMode: objectMode ?? writableObjectMode
|
|
4595
4545
|
};
|
|
4596
4546
|
};
|
|
4597
4547
|
const getInputObjectModes = (objectMode, index, newTransforms) => {
|
|
4598
4548
|
const writableObjectMode = index === 0 ? objectMode === true : newTransforms[index - 1].value.readableObjectMode;
|
|
4599
|
-
const readableObjectMode = index !== newTransforms.length - 1 && (objectMode ?? writableObjectMode);
|
|
4600
4549
|
return {
|
|
4601
4550
|
writableObjectMode,
|
|
4602
|
-
readableObjectMode
|
|
4551
|
+
readableObjectMode: index !== newTransforms.length - 1 && (objectMode ?? writableObjectMode)
|
|
4603
4552
|
};
|
|
4604
4553
|
};
|
|
4605
4554
|
const getFdObjectMode = (stdioItems, direction) => {
|
|
@@ -4908,8 +4857,7 @@ const validateDuplicateStreamSync = ({ otherStdioItems, type, value, optionName,
|
|
|
4908
4857
|
const getDuplicateStreamInstance = ({ otherStdioItems, type, value, optionName, direction }) => {
|
|
4909
4858
|
const duplicateStdioItems = otherStdioItems.filter((stdioItem) => hasSameValue(stdioItem, value));
|
|
4910
4859
|
if (duplicateStdioItems.length === 0) return;
|
|
4911
|
-
|
|
4912
|
-
throwOnDuplicateStream(differentStdioItem, optionName, type);
|
|
4860
|
+
throwOnDuplicateStream(duplicateStdioItems.find((stdioItem) => stdioItem.direction !== direction), optionName, type);
|
|
4913
4861
|
return direction === "output" ? duplicateStdioItems[0].stream : void 0;
|
|
4914
4862
|
};
|
|
4915
4863
|
const hasSameValue = ({ type, value }, secondValue) => {
|
|
@@ -4918,8 +4866,7 @@ const hasSameValue = ({ type, value }, secondValue) => {
|
|
|
4918
4866
|
return value === secondValue;
|
|
4919
4867
|
};
|
|
4920
4868
|
const validateDuplicateTransform = ({ otherStdioItems, type, value, optionName }) => {
|
|
4921
|
-
|
|
4922
|
-
throwOnDuplicateStream(duplicateStdioItem, optionName, type);
|
|
4869
|
+
throwOnDuplicateStream(otherStdioItems.find(({ value: { transform } }) => transform === value.transform), optionName, type);
|
|
4923
4870
|
};
|
|
4924
4871
|
const throwOnDuplicateStream = (stdioItem, optionName, type) => {
|
|
4925
4872
|
if (stdioItem !== void 0) throw new TypeError(`The \`${stdioItem.optionName}\` and \`${optionName}\` options must not target ${TYPE_TO_MESSAGE[type]} that is the same.`);
|
|
@@ -4928,15 +4875,13 @@ const throwOnDuplicateStream = (stdioItem, optionName, type) => {
|
|
|
4928
4875
|
//#endregion
|
|
4929
4876
|
//#region ../../node_modules/.pnpm/execa@9.5.2/node_modules/execa/lib/stdio/handle.js
|
|
4930
4877
|
const handleStdio = (addProperties$2, options, verboseInfo, isSync) => {
|
|
4931
|
-
const stdio = normalizeStdioOption(options, verboseInfo, isSync);
|
|
4932
|
-
const initialFileDescriptors = stdio.map((stdioOption, fdNumber) => getFileDescriptor({
|
|
4933
|
-
stdioOption,
|
|
4934
|
-
fdNumber,
|
|
4935
|
-
options,
|
|
4936
|
-
isSync
|
|
4937
|
-
}));
|
|
4938
4878
|
const fileDescriptors = getFinalFileDescriptors({
|
|
4939
|
-
initialFileDescriptors,
|
|
4879
|
+
initialFileDescriptors: normalizeStdioOption(options, verboseInfo, isSync).map((stdioOption, fdNumber) => getFileDescriptor({
|
|
4880
|
+
stdioOption,
|
|
4881
|
+
fdNumber,
|
|
4882
|
+
options,
|
|
4883
|
+
isSync
|
|
4884
|
+
})),
|
|
4940
4885
|
addProperties: addProperties$2,
|
|
4941
4886
|
options,
|
|
4942
4887
|
isSync
|
|
@@ -4953,14 +4898,13 @@ const getFileDescriptor = ({ stdioOption, fdNumber, options, isSync }) => {
|
|
|
4953
4898
|
optionName
|
|
4954
4899
|
});
|
|
4955
4900
|
const direction = getStreamDirection(initialStdioItems, fdNumber, optionName);
|
|
4956
|
-
const
|
|
4901
|
+
const normalizedStdioItems = normalizeTransforms(initialStdioItems.map((stdioItem) => handleNativeStream({
|
|
4957
4902
|
stdioItem,
|
|
4958
4903
|
isStdioArray,
|
|
4959
4904
|
fdNumber,
|
|
4960
4905
|
direction,
|
|
4961
4906
|
isSync
|
|
4962
|
-
}));
|
|
4963
|
-
const normalizedStdioItems = normalizeTransforms(stdioItems, optionName, direction, options);
|
|
4907
|
+
})), optionName, direction, options);
|
|
4964
4908
|
const objectMode = getFdObjectMode(normalizedStdioItems, direction);
|
|
4965
4909
|
validateFileObjectMode(normalizedStdioItems, objectMode);
|
|
4966
4910
|
return {
|
|
@@ -4970,9 +4914,7 @@ const getFileDescriptor = ({ stdioOption, fdNumber, options, isSync }) => {
|
|
|
4970
4914
|
};
|
|
4971
4915
|
};
|
|
4972
4916
|
const initializeStdioItems = ({ stdioOption, fdNumber, options, optionName }) => {
|
|
4973
|
-
const
|
|
4974
|
-
const initialStdioItems = [...values.map((value) => initializeStdioItem(value, optionName)), ...handleInputOptions(options, fdNumber)];
|
|
4975
|
-
const stdioItems = filterDuplicates(initialStdioItems);
|
|
4917
|
+
const stdioItems = filterDuplicates([...(Array.isArray(stdioOption) ? stdioOption : [stdioOption]).map((value) => initializeStdioItem(value, optionName)), ...handleInputOptions(options, fdNumber)]);
|
|
4976
4918
|
const isStdioArray = stdioItems.length > 1;
|
|
4977
4919
|
validateStdioArray(stdioItems, isStdioArray, optionName);
|
|
4978
4920
|
validateStreams(stdioItems);
|
|
@@ -5022,18 +4964,17 @@ const getFinalFileDescriptors = ({ initialFileDescriptors, addProperties: addPro
|
|
|
5022
4964
|
}
|
|
5023
4965
|
};
|
|
5024
4966
|
const getFinalFileDescriptor = ({ fileDescriptor: { direction, objectMode, stdioItems }, fileDescriptors, addProperties: addProperties$2, options, isSync }) => {
|
|
5025
|
-
const finalStdioItems = stdioItems.map((stdioItem) => addStreamProperties({
|
|
5026
|
-
stdioItem,
|
|
5027
|
-
addProperties: addProperties$2,
|
|
5028
|
-
direction,
|
|
5029
|
-
options,
|
|
5030
|
-
fileDescriptors,
|
|
5031
|
-
isSync
|
|
5032
|
-
}));
|
|
5033
4967
|
return {
|
|
5034
4968
|
direction,
|
|
5035
4969
|
objectMode,
|
|
5036
|
-
stdioItems:
|
|
4970
|
+
stdioItems: stdioItems.map((stdioItem) => addStreamProperties({
|
|
4971
|
+
stdioItem,
|
|
4972
|
+
addProperties: addProperties$2,
|
|
4973
|
+
direction,
|
|
4974
|
+
options,
|
|
4975
|
+
fileDescriptors,
|
|
4976
|
+
isSync
|
|
4977
|
+
}))
|
|
5037
4978
|
};
|
|
5038
4979
|
};
|
|
5039
4980
|
const addStreamProperties = ({ stdioItem, addProperties: addProperties$2, direction, options, fileDescriptors, isSync }) => {
|
|
@@ -5163,8 +5104,7 @@ const appendNewlineGenerator = function* ({ isWindowsNewline = false }, chunk) {
|
|
|
5163
5104
|
yield chunk;
|
|
5164
5105
|
return;
|
|
5165
5106
|
}
|
|
5166
|
-
|
|
5167
|
-
yield concatBytes(chunk, newline);
|
|
5107
|
+
yield concatBytes(chunk, isWindowsNewline ? windowsNewline : unixNewline);
|
|
5168
5108
|
};
|
|
5169
5109
|
const concatString = (firstChunk, secondChunk) => `${firstChunk}${secondChunk}`;
|
|
5170
5110
|
const linesStringInfo = {
|
|
@@ -5309,7 +5249,7 @@ const generatorToStream = ({ value, value: { transform, final, writableObjectMod
|
|
|
5309
5249
|
const transformMethod = transformAsync ? pushChunks.bind(void 0, transformChunk, state) : pushChunksSync.bind(void 0, transformChunkSync);
|
|
5310
5250
|
const finalMethod = transformAsync || finalAsync ? pushChunks.bind(void 0, finalChunks, state) : pushChunksSync.bind(void 0, finalChunksSync);
|
|
5311
5251
|
const destroyMethod = transformAsync || finalAsync ? destroyTransform.bind(void 0, state) : void 0;
|
|
5312
|
-
|
|
5252
|
+
return { stream: new node_stream.Transform({
|
|
5313
5253
|
writableObjectMode,
|
|
5314
5254
|
writableHighWaterMark: (0, node_stream.getDefaultHighWaterMark)(writableObjectMode),
|
|
5315
5255
|
readableObjectMode,
|
|
@@ -5325,16 +5265,12 @@ const generatorToStream = ({ value, value: { transform, final, writableObjectMod
|
|
|
5325
5265
|
finalMethod([generators], this, done);
|
|
5326
5266
|
},
|
|
5327
5267
|
destroy: destroyMethod
|
|
5328
|
-
});
|
|
5329
|
-
return { stream };
|
|
5268
|
+
}) };
|
|
5330
5269
|
};
|
|
5331
5270
|
const runGeneratorsSync = (chunks, stdioItems, encoding, isInput) => {
|
|
5332
5271
|
const generators = stdioItems.filter(({ type }) => type === "generator");
|
|
5333
5272
|
const reversedGenerators = isInput ? generators.reverse() : generators;
|
|
5334
|
-
for (const { value, optionName } of reversedGenerators)
|
|
5335
|
-
const generators$1 = addInternalGenerators(value, encoding, optionName);
|
|
5336
|
-
chunks = runTransformSync(generators$1, chunks);
|
|
5337
|
-
}
|
|
5273
|
+
for (const { value, optionName } of reversedGenerators) chunks = runTransformSync(addInternalGenerators(value, encoding, optionName), chunks);
|
|
5338
5274
|
return chunks;
|
|
5339
5275
|
};
|
|
5340
5276
|
const addInternalGenerators = ({ transform, final, binary, writableObjectMode, readableObjectMode, preserveNewlines }, encoding, optionName) => {
|
|
@@ -5371,9 +5307,7 @@ const addInputOptionSync = (fileDescriptors, fdNumber, options) => {
|
|
|
5371
5307
|
const [{ type, optionName }] = allStdioItems;
|
|
5372
5308
|
throw new TypeError(`Only the \`stdin\` option, not \`${optionName}\`, can be ${TYPE_TO_MESSAGE[type]} with synchronous methods.`);
|
|
5373
5309
|
}
|
|
5374
|
-
|
|
5375
|
-
const transformedContents = allContents.map((contents) => applySingleInputGeneratorsSync(contents, stdioItems));
|
|
5376
|
-
options.input = joinToUint8Array(transformedContents);
|
|
5310
|
+
options.input = joinToUint8Array(allStdioItems.map(({ contents }) => contents).map((contents) => applySingleInputGeneratorsSync(contents, stdioItems)));
|
|
5377
5311
|
};
|
|
5378
5312
|
const applySingleInputGeneratorsSync = (contents, stdioItems) => {
|
|
5379
5313
|
const newContents = runGeneratorsSync(contents, stdioItems, "utf8", true);
|
|
@@ -5398,10 +5332,9 @@ const logLinesSync = (linesArray, fdNumber, verboseInfo) => {
|
|
|
5398
5332
|
};
|
|
5399
5333
|
const isPipingStream = (stream) => stream._readableState.pipes.length > 0;
|
|
5400
5334
|
const logLine = (line, fdNumber, verboseInfo) => {
|
|
5401
|
-
const verboseMessage = serializeVerboseMessage(line);
|
|
5402
5335
|
verboseLog({
|
|
5403
5336
|
type: "output",
|
|
5404
|
-
verboseMessage,
|
|
5337
|
+
verboseMessage: serializeVerboseMessage(line),
|
|
5405
5338
|
fdNumber,
|
|
5406
5339
|
verboseInfo
|
|
5407
5340
|
});
|
|
@@ -5413,28 +5346,25 @@ const transformOutputSync = ({ fileDescriptors, syncResult: { output }, options,
|
|
|
5413
5346
|
if (output === null) return { output: Array.from({ length: 3 }) };
|
|
5414
5347
|
const state = {};
|
|
5415
5348
|
const outputFiles = /* @__PURE__ */ new Set([]);
|
|
5416
|
-
const transformedOutput = output.map((result, fdNumber) => transformOutputResultSync({
|
|
5417
|
-
result,
|
|
5418
|
-
fileDescriptors,
|
|
5419
|
-
fdNumber,
|
|
5420
|
-
state,
|
|
5421
|
-
outputFiles,
|
|
5422
|
-
isMaxBuffer,
|
|
5423
|
-
verboseInfo
|
|
5424
|
-
}, options));
|
|
5425
5349
|
return {
|
|
5426
|
-
output:
|
|
5350
|
+
output: output.map((result, fdNumber) => transformOutputResultSync({
|
|
5351
|
+
result,
|
|
5352
|
+
fileDescriptors,
|
|
5353
|
+
fdNumber,
|
|
5354
|
+
state,
|
|
5355
|
+
outputFiles,
|
|
5356
|
+
isMaxBuffer,
|
|
5357
|
+
verboseInfo
|
|
5358
|
+
}, options)),
|
|
5427
5359
|
...state
|
|
5428
5360
|
};
|
|
5429
5361
|
};
|
|
5430
5362
|
const transformOutputResultSync = ({ result, fileDescriptors, fdNumber, state, outputFiles, isMaxBuffer, verboseInfo }, { buffer, encoding, lines, stripFinalNewline: stripFinalNewline$1, maxBuffer }) => {
|
|
5431
5363
|
if (result === null) return;
|
|
5432
|
-
const
|
|
5433
|
-
const uint8ArrayResult = bufferToUint8Array(truncatedResult);
|
|
5364
|
+
const uint8ArrayResult = bufferToUint8Array(truncateMaxBufferSync(result, isMaxBuffer, maxBuffer));
|
|
5434
5365
|
const { stdioItems, objectMode } = fileDescriptors[fdNumber];
|
|
5435
|
-
const chunks = runOutputGeneratorsSync([uint8ArrayResult], stdioItems, encoding, state);
|
|
5436
5366
|
const { serializedResult, finalResult = serializedResult } = serializeChunks({
|
|
5437
|
-
chunks,
|
|
5367
|
+
chunks: runOutputGeneratorsSync([uint8ArrayResult], stdioItems, encoding, state),
|
|
5438
5368
|
objectMode,
|
|
5439
5369
|
encoding,
|
|
5440
5370
|
lines,
|
|
@@ -5545,14 +5475,12 @@ const isFailedExit = (exitCode, signal) => exitCode !== 0 || signal !== null;
|
|
|
5545
5475
|
//#region ../../node_modules/.pnpm/execa@9.5.2/node_modules/execa/lib/resolve/exit-sync.js
|
|
5546
5476
|
const getExitResultSync = ({ error, status: exitCode, signal, output }, { maxBuffer }) => {
|
|
5547
5477
|
const resultError = getResultError(error, exitCode, signal);
|
|
5548
|
-
const timedOut = resultError?.code === "ETIMEDOUT";
|
|
5549
|
-
const isMaxBuffer = isMaxBufferSync(resultError, output, maxBuffer);
|
|
5550
5478
|
return {
|
|
5551
5479
|
resultError,
|
|
5552
5480
|
exitCode,
|
|
5553
5481
|
signal,
|
|
5554
|
-
timedOut,
|
|
5555
|
-
isMaxBuffer
|
|
5482
|
+
timedOut: resultError?.code === "ETIMEDOUT",
|
|
5483
|
+
isMaxBuffer: isMaxBufferSync(resultError, output, maxBuffer)
|
|
5556
5484
|
};
|
|
5557
5485
|
};
|
|
5558
5486
|
const getResultError = (error, exitCode, signal) => {
|
|
@@ -5564,7 +5492,7 @@ const getResultError = (error, exitCode, signal) => {
|
|
|
5564
5492
|
//#region ../../node_modules/.pnpm/execa@9.5.2/node_modules/execa/lib/methods/main-sync.js
|
|
5565
5493
|
const execaCoreSync = (rawFile, rawArguments, rawOptions) => {
|
|
5566
5494
|
const { file, commandArguments, command, escapedCommand, startTime, verboseInfo, options, fileDescriptors } = handleSyncArguments(rawFile, rawArguments, rawOptions);
|
|
5567
|
-
|
|
5495
|
+
return handleResult(spawnSubprocessSync({
|
|
5568
5496
|
file,
|
|
5569
5497
|
commandArguments,
|
|
5570
5498
|
options,
|
|
@@ -5573,15 +5501,12 @@ const execaCoreSync = (rawFile, rawArguments, rawOptions) => {
|
|
|
5573
5501
|
verboseInfo,
|
|
5574
5502
|
fileDescriptors,
|
|
5575
5503
|
startTime
|
|
5576
|
-
});
|
|
5577
|
-
return handleResult(result, verboseInfo, options);
|
|
5504
|
+
}), verboseInfo, options);
|
|
5578
5505
|
};
|
|
5579
5506
|
const handleSyncArguments = (rawFile, rawArguments, rawOptions) => {
|
|
5580
5507
|
const { command, escapedCommand, startTime, verboseInfo } = handleCommand(rawFile, rawArguments, rawOptions);
|
|
5581
|
-
const
|
|
5582
|
-
const { file, commandArguments, options } = normalizeOptions(rawFile, rawArguments, syncOptions);
|
|
5508
|
+
const { file, commandArguments, options } = normalizeOptions(rawFile, rawArguments, normalizeSyncOptions(rawOptions));
|
|
5583
5509
|
validateSyncOptions(options);
|
|
5584
|
-
const fileDescriptors = handleStdioSync(options, verboseInfo);
|
|
5585
5510
|
return {
|
|
5586
5511
|
file,
|
|
5587
5512
|
commandArguments,
|
|
@@ -5590,7 +5515,7 @@ const handleSyncArguments = (rawFile, rawArguments, rawOptions) => {
|
|
|
5590
5515
|
startTime,
|
|
5591
5516
|
verboseInfo,
|
|
5592
5517
|
options,
|
|
5593
|
-
fileDescriptors
|
|
5518
|
+
fileDescriptors: handleStdioSync(options, verboseInfo)
|
|
5594
5519
|
};
|
|
5595
5520
|
};
|
|
5596
5521
|
const normalizeSyncOptions = (options) => options.node && !options.ipc ? {
|
|
@@ -5625,16 +5550,14 @@ const spawnSubprocessSync = ({ file, commandArguments, options, command, escaped
|
|
|
5625
5550
|
isMaxBuffer,
|
|
5626
5551
|
verboseInfo
|
|
5627
5552
|
});
|
|
5628
|
-
const stdio = output.map((stdioOutput, fdNumber) => stripNewline(stdioOutput, options, fdNumber));
|
|
5629
|
-
const all = stripNewline(getAllSync(output, options), options, "all");
|
|
5630
5553
|
return getSyncResult({
|
|
5631
5554
|
error,
|
|
5632
5555
|
exitCode,
|
|
5633
5556
|
signal,
|
|
5634
5557
|
timedOut,
|
|
5635
5558
|
isMaxBuffer,
|
|
5636
|
-
stdio,
|
|
5637
|
-
all,
|
|
5559
|
+
stdio: output.map((stdioOutput, fdNumber) => stripNewline(stdioOutput, options, fdNumber)),
|
|
5560
|
+
all: stripNewline(getAllSync(output, options), options, "all"),
|
|
5638
5561
|
options,
|
|
5639
5562
|
command,
|
|
5640
5563
|
escapedCommand,
|
|
@@ -5644,8 +5567,7 @@ const spawnSubprocessSync = ({ file, commandArguments, options, command, escaped
|
|
|
5644
5567
|
const runSubprocessSync = ({ file, commandArguments, options, command, escapedCommand, fileDescriptors, startTime }) => {
|
|
5645
5568
|
try {
|
|
5646
5569
|
addInputOptionsSync(fileDescriptors, options);
|
|
5647
|
-
|
|
5648
|
-
return (0, node_child_process.spawnSync)(file, commandArguments, normalizedOptions);
|
|
5570
|
+
return (0, node_child_process.spawnSync)(file, commandArguments, normalizeSpawnSyncOptions(options));
|
|
5649
5571
|
} catch (error) {
|
|
5650
5572
|
return makeEarlyError({
|
|
5651
5573
|
error,
|
|
@@ -5863,19 +5785,17 @@ const handleEarlyError = ({ error, command, escapedCommand, fileDescriptors, opt
|
|
|
5863
5785
|
writable,
|
|
5864
5786
|
duplex
|
|
5865
5787
|
});
|
|
5866
|
-
const earlyError = makeEarlyError({
|
|
5867
|
-
error,
|
|
5868
|
-
command,
|
|
5869
|
-
escapedCommand,
|
|
5870
|
-
fileDescriptors,
|
|
5871
|
-
options,
|
|
5872
|
-
startTime,
|
|
5873
|
-
isSync: false
|
|
5874
|
-
});
|
|
5875
|
-
const promise = handleDummyPromise(earlyError, verboseInfo, options);
|
|
5876
5788
|
return {
|
|
5877
5789
|
subprocess,
|
|
5878
|
-
promise
|
|
5790
|
+
promise: handleDummyPromise(makeEarlyError({
|
|
5791
|
+
error,
|
|
5792
|
+
command,
|
|
5793
|
+
escapedCommand,
|
|
5794
|
+
fileDescriptors,
|
|
5795
|
+
options,
|
|
5796
|
+
startTime,
|
|
5797
|
+
isSync: false
|
|
5798
|
+
}), verboseInfo, options)
|
|
5879
5799
|
};
|
|
5880
5800
|
};
|
|
5881
5801
|
const createDummyStreams = (subprocess, fileDescriptors) => {
|
|
@@ -5924,8 +5844,7 @@ const addProperties = {
|
|
|
5924
5844
|
nodeStream: ({ value }) => ({ stream: value }),
|
|
5925
5845
|
webTransform({ value: { transform, writableObjectMode, readableObjectMode } }) {
|
|
5926
5846
|
const objectMode = writableObjectMode || readableObjectMode;
|
|
5927
|
-
|
|
5928
|
-
return { stream };
|
|
5847
|
+
return { stream: node_stream.Duplex.fromWeb(transform, { objectMode }) };
|
|
5929
5848
|
},
|
|
5930
5849
|
duplex: ({ value: { transform } }) => ({ stream: transform }),
|
|
5931
5850
|
native() {}
|
|
@@ -6171,10 +6090,7 @@ const pipeOutputAsync = (subprocess, fileDescriptors, controller) => {
|
|
|
6171
6090
|
controller
|
|
6172
6091
|
});
|
|
6173
6092
|
}
|
|
6174
|
-
for (const [outputStream, inputStreams] of pipeGroups.entries())
|
|
6175
|
-
const inputStream = inputStreams.length === 1 ? inputStreams[0] : mergeStreams(inputStreams);
|
|
6176
|
-
pipeStreams(inputStream, outputStream);
|
|
6177
|
-
}
|
|
6093
|
+
for (const [outputStream, inputStreams] of pipeGroups.entries()) pipeStreams(inputStreams.length === 1 ? inputStreams[0] : mergeStreams(inputStreams), outputStream);
|
|
6178
6094
|
};
|
|
6179
6095
|
const pipeTransform = (subprocess, stream, direction, fdNumber) => {
|
|
6180
6096
|
if (direction === "output") pipeStreams(subprocess.stdio[fdNumber], stream);
|
|
@@ -6436,10 +6352,9 @@ const normalizePipeArguments = ({ source, sourcePromise, boundOptions, createNes
|
|
|
6436
6352
|
const getDestinationStream = (boundOptions, createNested, pipeArguments) => {
|
|
6437
6353
|
try {
|
|
6438
6354
|
const { destination, pipeOptions: { from, to, unpipeSignal } = {} } = getDestination(boundOptions, createNested, ...pipeArguments);
|
|
6439
|
-
const destinationStream = getToStream(destination, to);
|
|
6440
6355
|
return {
|
|
6441
6356
|
destination,
|
|
6442
|
-
destinationStream,
|
|
6357
|
+
destinationStream: getToStream(destination, to),
|
|
6443
6358
|
from,
|
|
6444
6359
|
unpipeSignal
|
|
6445
6360
|
};
|
|
@@ -6448,19 +6363,15 @@ const getDestinationStream = (boundOptions, createNested, pipeArguments) => {
|
|
|
6448
6363
|
}
|
|
6449
6364
|
};
|
|
6450
6365
|
const getDestination = (boundOptions, createNested, firstArgument, ...pipeArguments) => {
|
|
6451
|
-
if (Array.isArray(firstArgument)) {
|
|
6452
|
-
|
|
6453
|
-
|
|
6454
|
-
|
|
6455
|
-
pipeOptions: boundOptions
|
|
6456
|
-
};
|
|
6457
|
-
}
|
|
6366
|
+
if (Array.isArray(firstArgument)) return {
|
|
6367
|
+
destination: createNested(mapDestinationArguments, boundOptions)(firstArgument, ...pipeArguments),
|
|
6368
|
+
pipeOptions: boundOptions
|
|
6369
|
+
};
|
|
6458
6370
|
if (typeof firstArgument === "string" || firstArgument instanceof URL || isDenoExecPath(firstArgument)) {
|
|
6459
6371
|
if (Object.keys(boundOptions).length > 0) throw new TypeError("Please use .pipe(\"file\", ..., options) or .pipe(execa(\"file\", ..., options)) instead of .pipe(options)(\"file\", ...).");
|
|
6460
6372
|
const [rawFile, rawArguments, rawOptions] = normalizeParameters(firstArgument, ...pipeArguments);
|
|
6461
|
-
const destination = createNested(mapDestinationArguments)(rawFile, rawArguments, rawOptions);
|
|
6462
6373
|
return {
|
|
6463
|
-
destination,
|
|
6374
|
+
destination: createNested(mapDestinationArguments)(rawFile, rawArguments, rawOptions),
|
|
6464
6375
|
pipeOptions: rawOptions
|
|
6465
6376
|
};
|
|
6466
6377
|
}
|
|
@@ -6480,8 +6391,7 @@ const mapDestinationArguments = ({ options }) => ({ options: {
|
|
|
6480
6391
|
} });
|
|
6481
6392
|
const getSourceStream = (source, from) => {
|
|
6482
6393
|
try {
|
|
6483
|
-
|
|
6484
|
-
return { sourceStream };
|
|
6394
|
+
return { sourceStream: getFromStream(source, from) };
|
|
6485
6395
|
} catch (error) {
|
|
6486
6396
|
return { sourceError: error };
|
|
6487
6397
|
}
|
|
@@ -6575,9 +6485,8 @@ const unpipeOnAbort = (unpipeSignal, unpipeContext) => unpipeSignal === void 0 ?
|
|
|
6575
6485
|
const unpipeOnSignalAbort = async (unpipeSignal, { sourceStream, mergedStream, fileDescriptors, sourceOptions, startTime }) => {
|
|
6576
6486
|
await (0, node_util.aborted)(unpipeSignal, sourceStream);
|
|
6577
6487
|
await mergedStream.remove(sourceStream);
|
|
6578
|
-
const error = /* @__PURE__ */ new Error("Pipe canceled by `unpipeSignal` option.");
|
|
6579
6488
|
throw createNonCommandError({
|
|
6580
|
-
error,
|
|
6489
|
+
error: /* @__PURE__ */ new Error("Pipe canceled by `unpipeSignal` option."),
|
|
6581
6490
|
fileDescriptors,
|
|
6582
6491
|
sourceOptions,
|
|
6583
6492
|
startTime
|
|
@@ -6680,13 +6589,12 @@ const stopReadingOnStreamEnd = async (onStreamEnd, controller, stream) => {
|
|
|
6680
6589
|
}
|
|
6681
6590
|
};
|
|
6682
6591
|
const iterateOnStream = ({ stream, controller, binary, shouldEncode, encoding, shouldSplit, preserveNewlines }) => {
|
|
6683
|
-
const onStdoutChunk = (0, node_events.on)(stream, "data", {
|
|
6684
|
-
signal: controller.signal,
|
|
6685
|
-
highWaterMark: HIGH_WATER_MARK,
|
|
6686
|
-
highWatermark: HIGH_WATER_MARK
|
|
6687
|
-
});
|
|
6688
6592
|
return iterateOnData({
|
|
6689
|
-
onStdoutChunk,
|
|
6593
|
+
onStdoutChunk: (0, node_events.on)(stream, "data", {
|
|
6594
|
+
signal: controller.signal,
|
|
6595
|
+
highWaterMark: HIGH_WATER_MARK,
|
|
6596
|
+
highWatermark: HIGH_WATER_MARK
|
|
6597
|
+
}),
|
|
6690
6598
|
controller,
|
|
6691
6599
|
binary,
|
|
6692
6600
|
shouldEncode,
|
|
@@ -6731,13 +6639,12 @@ const getStreamOutput = async ({ stream, onStreamEnd, fdNumber, encoding, buffer
|
|
|
6731
6639
|
await Promise.all([resumeStream(stream), logPromise]);
|
|
6732
6640
|
return;
|
|
6733
6641
|
}
|
|
6734
|
-
const stripFinalNewlineValue = getStripFinalNewline(stripFinalNewline$1, fdNumber);
|
|
6735
6642
|
const iterable = iterateForResult({
|
|
6736
6643
|
stream,
|
|
6737
6644
|
onStreamEnd,
|
|
6738
6645
|
lines,
|
|
6739
6646
|
encoding,
|
|
6740
|
-
stripFinalNewline:
|
|
6647
|
+
stripFinalNewline: getStripFinalNewline(stripFinalNewline$1, fdNumber),
|
|
6741
6648
|
allMixed
|
|
6742
6649
|
});
|
|
6743
6650
|
const [output] = await Promise.all([getStreamContents({
|
|
@@ -6757,15 +6664,14 @@ const logOutputAsync = async ({ stream, onStreamEnd, fdNumber, encoding, allMixe
|
|
|
6757
6664
|
verboseInfo,
|
|
6758
6665
|
fdNumber
|
|
6759
6666
|
})) return;
|
|
6760
|
-
|
|
6667
|
+
await logLines(iterateForResult({
|
|
6761
6668
|
stream,
|
|
6762
6669
|
onStreamEnd,
|
|
6763
6670
|
lines: true,
|
|
6764
6671
|
encoding,
|
|
6765
6672
|
stripFinalNewline: true,
|
|
6766
6673
|
allMixed
|
|
6767
|
-
});
|
|
6768
|
-
await logLines(linesIterable, stream, fdNumber, verboseInfo);
|
|
6674
|
+
}), stream, fdNumber, verboseInfo);
|
|
6769
6675
|
};
|
|
6770
6676
|
const resumeStream = async (stream) => {
|
|
6771
6677
|
await (0, node_timers_promises.setImmediate)();
|
|
@@ -6915,10 +6821,9 @@ const getAllMixed = ({ all, stdout, stderr }) => all && stdout && stderr && stdo
|
|
|
6915
6821
|
//#region ../../node_modules/.pnpm/execa@9.5.2/node_modules/execa/lib/verbose/ipc.js
|
|
6916
6822
|
const shouldLogIpc = (verboseInfo) => isFullVerbose(verboseInfo, "ipc");
|
|
6917
6823
|
const logIpcOutput = (message, verboseInfo) => {
|
|
6918
|
-
const verboseMessage = serializeVerboseMessage(message);
|
|
6919
6824
|
verboseLog({
|
|
6920
6825
|
type: "ipc",
|
|
6921
|
-
verboseMessage,
|
|
6826
|
+
verboseMessage: serializeVerboseMessage(message),
|
|
6922
6827
|
fdNumber: "ipc",
|
|
6923
6828
|
verboseInfo
|
|
6924
6829
|
});
|
|
@@ -7061,9 +6966,8 @@ const addConcurrentStream = (concurrentStreams, stream, waitName) => {
|
|
|
7061
6966
|
const promises = weakMap.get(stream);
|
|
7062
6967
|
const promise = createDeferred();
|
|
7063
6968
|
promises.push(promise);
|
|
7064
|
-
const resolve = promise.resolve.bind(promise);
|
|
7065
6969
|
return {
|
|
7066
|
-
resolve,
|
|
6970
|
+
resolve: promise.resolve.bind(promise),
|
|
7067
6971
|
promises
|
|
7068
6972
|
};
|
|
7069
6973
|
};
|
|
@@ -7144,10 +7048,9 @@ const createReadable = ({ subprocess, concurrentStreams, encoding }, { from, bin
|
|
|
7144
7048
|
};
|
|
7145
7049
|
const getSubprocessStdout = (subprocess, from, concurrentStreams) => {
|
|
7146
7050
|
const subprocessStdout = getFromStream(subprocess, from);
|
|
7147
|
-
const waitReadableDestroy = addConcurrentStream(concurrentStreams, subprocessStdout, "readableDestroy");
|
|
7148
7051
|
return {
|
|
7149
7052
|
subprocessStdout,
|
|
7150
|
-
waitReadableDestroy
|
|
7053
|
+
waitReadableDestroy: addConcurrentStream(concurrentStreams, subprocessStdout, "readableDestroy")
|
|
7151
7054
|
};
|
|
7152
7055
|
};
|
|
7153
7056
|
const getReadableOptions = ({ readableEncoding, readableObjectMode, readableHighWaterMark }, binary) => binary ? {
|
|
@@ -7225,12 +7128,10 @@ const createWritable = ({ subprocess, concurrentStreams }, { to } = {}) => {
|
|
|
7225
7128
|
};
|
|
7226
7129
|
const getSubprocessStdin = (subprocess, to, concurrentStreams) => {
|
|
7227
7130
|
const subprocessStdin = getToStream(subprocess, to);
|
|
7228
|
-
const waitWritableFinal = addConcurrentStream(concurrentStreams, subprocessStdin, "writableFinal");
|
|
7229
|
-
const waitWritableDestroy = addConcurrentStream(concurrentStreams, subprocessStdin, "writableDestroy");
|
|
7230
7131
|
return {
|
|
7231
7132
|
subprocessStdin,
|
|
7232
|
-
waitWritableFinal,
|
|
7233
|
-
waitWritableDestroy
|
|
7133
|
+
waitWritableFinal: addConcurrentStream(concurrentStreams, subprocessStdin, "writableFinal"),
|
|
7134
|
+
waitWritableDestroy: addConcurrentStream(concurrentStreams, subprocessStdin, "writableDestroy")
|
|
7234
7135
|
};
|
|
7235
7136
|
};
|
|
7236
7137
|
const getWritableMethods = (subprocessStdin, subprocess, waitWritableFinal) => ({
|
|
@@ -7326,15 +7227,14 @@ const onDuplexDestroy = async ({ subprocessStdout, subprocessStdin, subprocess,
|
|
|
7326
7227
|
const createIterable = (subprocess, encoding, { from, binary: binaryOption = false, preserveNewlines = false } = {}) => {
|
|
7327
7228
|
const binary = binaryOption || BINARY_ENCODINGS.has(encoding);
|
|
7328
7229
|
const subprocessStdout = getFromStream(subprocess, from);
|
|
7329
|
-
|
|
7230
|
+
return iterateOnStdoutData(iterateOnSubprocessStream({
|
|
7330
7231
|
subprocessStdout,
|
|
7331
7232
|
subprocess,
|
|
7332
7233
|
binary,
|
|
7333
7234
|
shouldEncode: true,
|
|
7334
7235
|
encoding,
|
|
7335
7236
|
preserveNewlines
|
|
7336
|
-
});
|
|
7337
|
-
return iterateOnStdoutData(onStdoutData, subprocessStdout, subprocess);
|
|
7237
|
+
}), subprocessStdout, subprocess);
|
|
7338
7238
|
};
|
|
7339
7239
|
const iterateOnStdoutData = async function* (onStdoutData, subprocessStdout, subprocess) {
|
|
7340
7240
|
try {
|
|
@@ -7416,7 +7316,6 @@ const handleAsyncArguments = (rawFile, rawArguments, rawOptions) => {
|
|
|
7416
7316
|
const { command, escapedCommand, startTime, verboseInfo } = handleCommand(rawFile, rawArguments, rawOptions);
|
|
7417
7317
|
const { file, commandArguments, options: normalizedOptions } = normalizeOptions(rawFile, rawArguments, rawOptions);
|
|
7418
7318
|
const options = handleAsyncOptions(normalizedOptions);
|
|
7419
|
-
const fileDescriptors = handleStdioAsync(options, verboseInfo);
|
|
7420
7319
|
return {
|
|
7421
7320
|
file,
|
|
7422
7321
|
commandArguments,
|
|
@@ -7425,7 +7324,7 @@ const handleAsyncArguments = (rawFile, rawArguments, rawOptions) => {
|
|
|
7425
7324
|
startTime,
|
|
7426
7325
|
verboseInfo,
|
|
7427
7326
|
options,
|
|
7428
|
-
fileDescriptors
|
|
7327
|
+
fileDescriptors: handleStdioAsync(options, verboseInfo)
|
|
7429
7328
|
};
|
|
7430
7329
|
};
|
|
7431
7330
|
const handleAsyncOptions = ({ timeout, signal,...options }) => {
|
|
@@ -7498,22 +7397,19 @@ const handlePromise = async ({ subprocess, options, startTime, verboseInfo, file
|
|
|
7498
7397
|
});
|
|
7499
7398
|
controller.abort();
|
|
7500
7399
|
onInternalError.resolve();
|
|
7501
|
-
|
|
7502
|
-
const all = stripNewline(allResult, options, "all");
|
|
7503
|
-
const result = getAsyncResult({
|
|
7400
|
+
return handleResult(getAsyncResult({
|
|
7504
7401
|
errorInfo,
|
|
7505
7402
|
exitCode,
|
|
7506
7403
|
signal,
|
|
7507
|
-
stdio,
|
|
7508
|
-
all,
|
|
7404
|
+
stdio: stdioResults.map((stdioResult, fdNumber) => stripNewline(stdioResult, options, fdNumber)),
|
|
7405
|
+
all: stripNewline(allResult, options, "all"),
|
|
7509
7406
|
ipcOutput,
|
|
7510
7407
|
context,
|
|
7511
7408
|
options,
|
|
7512
7409
|
command,
|
|
7513
7410
|
escapedCommand,
|
|
7514
7411
|
startTime
|
|
7515
|
-
});
|
|
7516
|
-
return handleResult(result, verboseInfo, options);
|
|
7412
|
+
}), verboseInfo, options);
|
|
7517
7413
|
};
|
|
7518
7414
|
const getAsyncResult = ({ errorInfo, exitCode, signal, stdio, all, ipcOutput, context, options, command, escapedCommand, startTime }) => "error" in errorInfo ? makeError({
|
|
7519
7415
|
error: errorInfo.error,
|
|
@@ -7586,8 +7482,7 @@ const callBoundExeca = ({ mapArguments, deepOptions = {}, boundOptions = {}, set
|
|
|
7586
7482
|
return isSync ? execaCoreSync(file, commandArguments, options) : execaCoreAsync(file, commandArguments, options, createNested);
|
|
7587
7483
|
};
|
|
7588
7484
|
const parseArguments = ({ mapArguments, firstArgument, nextArguments, deepOptions, boundOptions }) => {
|
|
7589
|
-
const
|
|
7590
|
-
const [initialFile, initialArguments, initialOptions] = normalizeParameters(...callArguments);
|
|
7485
|
+
const [initialFile, initialArguments, initialOptions] = normalizeParameters(...isTemplateString(firstArgument) ? parseTemplates(firstArgument, nextArguments) : [firstArgument, ...nextArguments]);
|
|
7591
7486
|
const mergedOptions = mergeOptions(mergeOptions(deepOptions, boundOptions), initialOptions);
|
|
7592
7487
|
const { file = initialFile, commandArguments = initialArguments, options = mergedOptions, isSync = false } = mapArguments({
|
|
7593
7488
|
file: initialFile,
|
|
@@ -7693,8 +7588,7 @@ function delay(ms, { signal } = {}) {
|
|
|
7693
7588
|
function isPlainObject(value) {
|
|
7694
7589
|
if (!value || typeof value !== "object") return false;
|
|
7695
7590
|
const proto = Object.getPrototypeOf(value);
|
|
7696
|
-
|
|
7697
|
-
if (!hasObjectPrototype) return false;
|
|
7591
|
+
if (!(proto === null || proto === Object.prototype || Object.getPrototypeOf(proto) === null)) return false;
|
|
7698
7592
|
return Object.prototype.toString.call(value) === "[object Object]";
|
|
7699
7593
|
}
|
|
7700
7594
|
|
|
@@ -7741,8 +7635,7 @@ var CloudFrontManager = class {
|
|
|
7741
7635
|
region: this.region,
|
|
7742
7636
|
credentials: this.credentials
|
|
7743
7637
|
});
|
|
7744
|
-
const
|
|
7745
|
-
const existingKeyGroup = listKgResp.KeyGroupList?.Items?.find((kg) => kg.KeyGroup?.KeyGroupConfig?.Name?.startsWith(`HotUpdaterKeyGroup-${publicKeyHash}`));
|
|
7638
|
+
const existingKeyGroup = (await cloudfrontClient.listKeyGroups({})).KeyGroupList?.Items?.find((kg) => kg.KeyGroup?.KeyGroupConfig?.Name?.startsWith(`HotUpdaterKeyGroup-${publicKeyHash}`));
|
|
7746
7639
|
const existingPublicKeyId = existingKeyGroup?.KeyGroup?.KeyGroupConfig?.Items?.[0];
|
|
7747
7640
|
const existingKeyGroupId = existingKeyGroup?.KeyGroup?.Id;
|
|
7748
7641
|
if (existingPublicKeyId && existingKeyGroupId) return {
|
|
@@ -7756,8 +7649,7 @@ var CloudFrontManager = class {
|
|
|
7756
7649
|
EncodedKey: publicKey,
|
|
7757
7650
|
Comment: "HotUpdater public key for signed URL"
|
|
7758
7651
|
};
|
|
7759
|
-
const
|
|
7760
|
-
const publicKeyId = createPubKeyResp.PublicKey?.Id;
|
|
7652
|
+
const publicKeyId = (await cloudfrontClient.createPublicKey({ PublicKeyConfig: publicKeyConfig })).PublicKey?.Id;
|
|
7761
7653
|
if (!publicKeyId) throw new Error("Failed to create CloudFront public key");
|
|
7762
7654
|
const callerReferenceKg = `HotUpdaterKeyGroup-${publicKeyHash}`;
|
|
7763
7655
|
const keyGroupConfig = {
|
|
@@ -7766,8 +7658,7 @@ var CloudFrontManager = class {
|
|
|
7766
7658
|
Comment: "HotUpdater key group for signed URL",
|
|
7767
7659
|
Items: [publicKeyId]
|
|
7768
7660
|
};
|
|
7769
|
-
const
|
|
7770
|
-
const keyGroupId = createKgResp.KeyGroup?.Id;
|
|
7661
|
+
const keyGroupId = (await cloudfrontClient.createKeyGroup({ KeyGroupConfig: keyGroupConfig })).KeyGroup?.Id;
|
|
7771
7662
|
if (!keyGroupId) throw new Error("Failed to create Key Group");
|
|
7772
7663
|
f.success(`Created new Key Group: ${keyGroupConfig.Name}`);
|
|
7773
7664
|
return {
|
|
@@ -7781,37 +7672,28 @@ var CloudFrontManager = class {
|
|
|
7781
7672
|
credentials: this.credentials
|
|
7782
7673
|
});
|
|
7783
7674
|
let oacId;
|
|
7784
|
-
|
|
7785
|
-
if (!accountId) throw new Error("Failed to get AWS account ID");
|
|
7675
|
+
if (!options.functionArn.split(":")[4]) throw new Error("Failed to get AWS account ID");
|
|
7786
7676
|
try {
|
|
7787
|
-
const
|
|
7788
|
-
const existingOac = listOacResp.OriginAccessControlList?.Items?.find((oac) => oac.Name === "HotUpdaterOAC");
|
|
7677
|
+
const existingOac = (await cloudfrontClient.listOriginAccessControls({})).OriginAccessControlList?.Items?.find((oac) => oac.Name === "HotUpdaterOAC");
|
|
7789
7678
|
if (existingOac?.Id) oacId = existingOac.Id;
|
|
7790
|
-
else {
|
|
7791
|
-
|
|
7792
|
-
|
|
7793
|
-
|
|
7794
|
-
|
|
7795
|
-
|
|
7796
|
-
|
|
7797
|
-
oacId = createOacResp.OriginAccessControl?.Id;
|
|
7798
|
-
}
|
|
7799
|
-
} catch (error) {
|
|
7679
|
+
else oacId = (await cloudfrontClient.createOriginAccessControl({ OriginAccessControlConfig: {
|
|
7680
|
+
Name: "HotUpdaterOAC",
|
|
7681
|
+
OriginAccessControlOriginType: "s3",
|
|
7682
|
+
SigningBehavior: "always",
|
|
7683
|
+
SigningProtocol: "sigv4"
|
|
7684
|
+
} })).OriginAccessControl?.Id;
|
|
7685
|
+
} catch {
|
|
7800
7686
|
throw new Error("Failed to get or create Origin Access Control");
|
|
7801
7687
|
}
|
|
7802
7688
|
if (!oacId) throw new Error("Failed to get Origin Access Control ID");
|
|
7803
7689
|
const bucketDomain = `${options.bucketName}.s3.${this.region}.amazonaws.com`;
|
|
7804
7690
|
const matchingDistributions = [];
|
|
7805
7691
|
try {
|
|
7806
|
-
const
|
|
7807
|
-
const items
|
|
7808
|
-
|
|
7809
|
-
|
|
7810
|
-
|
|
7811
|
-
Id: dist.Id,
|
|
7812
|
-
DomainName: dist.DomainName
|
|
7813
|
-
});
|
|
7814
|
-
}
|
|
7692
|
+
const items = (await cloudfrontClient.listDistributions({})).DistributionList?.Items || [];
|
|
7693
|
+
for (const dist of items) if ((dist.Origins?.Items || []).some((origin) => origin.DomainName === bucketDomain)) matchingDistributions.push({
|
|
7694
|
+
Id: dist.Id,
|
|
7695
|
+
DomainName: dist.DomainName
|
|
7696
|
+
});
|
|
7815
7697
|
} catch (error) {
|
|
7816
7698
|
console.error("Error listing CloudFront distributions:", error);
|
|
7817
7699
|
}
|
|
@@ -8133,10 +8015,9 @@ var CloudFrontManager = class {
|
|
|
8133
8015
|
title: "Waiting for CloudFront distribution to complete...",
|
|
8134
8016
|
task: async (message) => {
|
|
8135
8017
|
while (retryCount < 600) try {
|
|
8136
|
-
|
|
8137
|
-
if (status.Distribution?.Status === "Deployed") return "CloudFront distribution deployment completed.";
|
|
8018
|
+
if ((await cloudfrontClient.getDistribution({ Id: distributionId })).Distribution?.Status === "Deployed") return "CloudFront distribution deployment completed.";
|
|
8138
8019
|
throw new Error("Retry");
|
|
8139
|
-
} catch (
|
|
8020
|
+
} catch (_err) {
|
|
8140
8021
|
if (retryCount++ >= 5) message(`CloudFront distribution is still in progress. This may take a few minutes. (${retryCount})`);
|
|
8141
8022
|
await delay(1e3);
|
|
8142
8023
|
}
|
|
@@ -8184,14 +8065,13 @@ var IAMManager = class {
|
|
|
8184
8065
|
f.info(`Using existing IAM role: ${roleName} (${existingRole.Arn})`);
|
|
8185
8066
|
return existingRole.Arn;
|
|
8186
8067
|
}
|
|
8187
|
-
} catch
|
|
8068
|
+
} catch {
|
|
8188
8069
|
try {
|
|
8189
|
-
const
|
|
8070
|
+
const lambdaRoleArn = (await iamClient.createRole({
|
|
8190
8071
|
RoleName: roleName,
|
|
8191
8072
|
AssumeRolePolicyDocument: assumeRolePolicyDocument,
|
|
8192
8073
|
Description: "Role for Lambda@Edge to access S3"
|
|
8193
|
-
});
|
|
8194
|
-
const lambdaRoleArn = createRoleResp.Role?.Arn;
|
|
8074
|
+
})).Role?.Arn;
|
|
8195
8075
|
f.info(`Created IAM role: ${roleName} (${lambdaRoleArn})`);
|
|
8196
8076
|
await iamClient.attachRolePolicy({
|
|
8197
8077
|
RoleName: roleName,
|
|
@@ -8228,8 +8108,7 @@ var LambdaEdgeDeployer = class {
|
|
|
8228
8108
|
});
|
|
8229
8109
|
if (BD(lambdaName)) process.exit(1);
|
|
8230
8110
|
const lambdaPath = require.resolve("@hot-updater/aws/lambda");
|
|
8231
|
-
const
|
|
8232
|
-
const { tmpDir, removeTmpDir } = await (0, __hot_updater_plugin_core.copyDirToTmp)(lambdaDir);
|
|
8111
|
+
const { tmpDir, removeTmpDir } = await (0, __hot_updater_plugin_core.copyDirToTmp)(path.default.dirname(lambdaPath));
|
|
8233
8112
|
const indexPath = path.default.join(tmpDir, "index.cjs");
|
|
8234
8113
|
const code = (0, __hot_updater_plugin_core.transformEnv)(indexPath, {
|
|
8235
8114
|
CLOUDFRONT_KEY_PAIR_ID: keyPair.publicKey,
|
|
@@ -8255,7 +8134,7 @@ var LambdaEdgeDeployer = class {
|
|
|
8255
8134
|
targetDir: tmpDir
|
|
8256
8135
|
});
|
|
8257
8136
|
return "Compressed Lambda code to zip";
|
|
8258
|
-
} catch
|
|
8137
|
+
} catch {
|
|
8259
8138
|
throw new Error("Failed to create zip archive of Lambda function code");
|
|
8260
8139
|
}
|
|
8261
8140
|
}
|
|
@@ -8357,7 +8236,7 @@ var S3Migration = class {
|
|
|
8357
8236
|
backupMapping = /* @__PURE__ */ new Map();
|
|
8358
8237
|
async doUpdateFile(key, content, { cacheControl } = {}) {
|
|
8359
8238
|
const normalizedKey = key.startsWith("/") ? key.substring(1) : key;
|
|
8360
|
-
|
|
8239
|
+
await new __aws_sdk_lib_storage.Upload({
|
|
8361
8240
|
client: this.s3,
|
|
8362
8241
|
params: {
|
|
8363
8242
|
Bucket: this.bucketName,
|
|
@@ -8365,12 +8244,11 @@ var S3Migration = class {
|
|
|
8365
8244
|
Body: content,
|
|
8366
8245
|
CacheControl: cacheControl
|
|
8367
8246
|
}
|
|
8368
|
-
});
|
|
8369
|
-
await upload.done();
|
|
8247
|
+
}).done();
|
|
8370
8248
|
}
|
|
8371
8249
|
async getKeys(prefix) {
|
|
8372
8250
|
const keys = [];
|
|
8373
|
-
let continuationToken
|
|
8251
|
+
let continuationToken;
|
|
8374
8252
|
do {
|
|
8375
8253
|
const command = new __aws_sdk_client_s3.ListObjectsV2Command({
|
|
8376
8254
|
Bucket: this.bucketName,
|
|
@@ -8423,8 +8301,7 @@ var S3Migration = class {
|
|
|
8423
8301
|
console.log(import_picocolors$1.default.yellow(`[DRY RUN] Updated ${import_picocolors$1.default.bold(normalizedKey)}`));
|
|
8424
8302
|
return;
|
|
8425
8303
|
}
|
|
8426
|
-
|
|
8427
|
-
if (originalContent !== null) await this.backupFile(key);
|
|
8304
|
+
if (await this.readFile(key) !== null) await this.backupFile(key);
|
|
8428
8305
|
await this.doUpdateFile(normalizedKey, content, { cacheControl });
|
|
8429
8306
|
console.log(import_picocolors$1.default.green(`Updated ${import_picocolors$1.default.bold(normalizedKey)}`));
|
|
8430
8307
|
}
|
|
@@ -8528,37 +8405,32 @@ var S3Migrator = class {
|
|
|
8528
8405
|
async saveMigrationRecords(dryRun) {
|
|
8529
8406
|
if (dryRun) return;
|
|
8530
8407
|
const body = JSON.stringify(this.migrationRecords, null, 2);
|
|
8531
|
-
|
|
8408
|
+
await new __aws_sdk_lib_storage.Upload({
|
|
8532
8409
|
client: this.s3,
|
|
8533
8410
|
params: {
|
|
8534
8411
|
Bucket: this.bucketName,
|
|
8535
8412
|
Key: this.migrationRecordKey,
|
|
8536
8413
|
Body: body
|
|
8537
8414
|
}
|
|
8538
|
-
});
|
|
8539
|
-
await upload.done();
|
|
8415
|
+
}).done();
|
|
8540
8416
|
}
|
|
8541
8417
|
/**
|
|
8542
8418
|
* Returns a JSON object containing applied and pending migrations.
|
|
8543
8419
|
*/
|
|
8544
8420
|
async list() {
|
|
8545
8421
|
await this.loadMigrationRecords();
|
|
8546
|
-
const applied = this.migrationRecords.map((record) => ({
|
|
8547
|
-
name: record.name,
|
|
8548
|
-
appliedAt: record.appliedAt
|
|
8549
|
-
}));
|
|
8550
|
-
const pendingMigrations = this.migrations.filter((migration) => !this.migrationRecords.some((record) => record.name === migration.name));
|
|
8551
|
-
const pending = pendingMigrations.map((migration) => ({ name: migration.name }));
|
|
8552
8422
|
return {
|
|
8553
|
-
applied
|
|
8554
|
-
|
|
8423
|
+
applied: this.migrationRecords.map((record) => ({
|
|
8424
|
+
name: record.name,
|
|
8425
|
+
appliedAt: record.appliedAt
|
|
8426
|
+
})),
|
|
8427
|
+
pending: this.migrations.filter((migration) => !this.migrationRecords.some((record) => record.name === migration.name)).map((migration) => ({ name: migration.name }))
|
|
8555
8428
|
};
|
|
8556
8429
|
}
|
|
8557
8430
|
async migrate({ dryRun }) {
|
|
8558
8431
|
await this.loadMigrationRecords();
|
|
8559
8432
|
for (const migration of this.migrations) {
|
|
8560
|
-
|
|
8561
|
-
if (alreadyApplied) continue;
|
|
8433
|
+
if (this.migrationRecords.some((record) => record.name === migration.name)) continue;
|
|
8562
8434
|
console.log(import_picocolors$1.default.magenta(`Applying migration ${migration.name}...`));
|
|
8563
8435
|
migration.s3 = this.s3;
|
|
8564
8436
|
migration.bucketName = this.bucketName;
|
|
@@ -8599,8 +8471,7 @@ var Migration0001HotUpdater0_13_0 = class extends S3Migration {
|
|
|
8599
8471
|
const newKey = `production/${key}`;
|
|
8600
8472
|
await this.moveFile(key, newKey);
|
|
8601
8473
|
}
|
|
8602
|
-
const
|
|
8603
|
-
const updateKeys = productionKeys.filter((key) => key.endsWith("update.json"));
|
|
8474
|
+
const updateKeys = (await this.getKeys("production/")).filter((key) => key.endsWith("update.json"));
|
|
8604
8475
|
for (const key of updateKeys) {
|
|
8605
8476
|
const data = await this.readJson(key);
|
|
8606
8477
|
if (data && Array.isArray(data)) {
|
|
@@ -8620,8 +8491,7 @@ var Migration0001HotUpdater0_13_0 = class extends S3Migration {
|
|
|
8620
8491
|
var Migration0001HotUpdater0_18_0 = class extends S3Migration {
|
|
8621
8492
|
name = "hot-updater_0.18.0";
|
|
8622
8493
|
async migrate() {
|
|
8623
|
-
const
|
|
8624
|
-
const updateKeys = keys.filter((key) => key.endsWith("update.json"));
|
|
8494
|
+
const updateKeys = (await this.getKeys("")).filter((key) => key.endsWith("update.json"));
|
|
8625
8495
|
for (const key of updateKeys) {
|
|
8626
8496
|
const data = await this.readJson(key);
|
|
8627
8497
|
if (data && Array.isArray(data)) {
|
|
@@ -8685,23 +8555,20 @@ var S3Manager = class {
|
|
|
8685
8555
|
region: "us-east-1",
|
|
8686
8556
|
credentials: this.credentials
|
|
8687
8557
|
});
|
|
8688
|
-
const
|
|
8689
|
-
|
|
8690
|
-
const bucketInfos = await Promise.all(buckets.filter((bucket) => bucket.Name).map(async (bucket) => {
|
|
8558
|
+
const buckets = (await s3Client.listBuckets({})).Buckets ?? [];
|
|
8559
|
+
return await Promise.all(buckets.filter((bucket) => bucket.Name).map(async (bucket) => {
|
|
8691
8560
|
const { LocationConstraint: region } = await s3Client.getBucketLocation({ Bucket: bucket.Name });
|
|
8692
8561
|
return {
|
|
8693
8562
|
name: bucket.Name,
|
|
8694
8563
|
region
|
|
8695
8564
|
};
|
|
8696
8565
|
}));
|
|
8697
|
-
return bucketInfos;
|
|
8698
8566
|
}
|
|
8699
8567
|
async createBucket(bucketName, region) {
|
|
8700
|
-
|
|
8568
|
+
await new __aws_sdk_client_s3.S3({
|
|
8701
8569
|
region,
|
|
8702
8570
|
credentials: this.credentials
|
|
8703
|
-
})
|
|
8704
|
-
await s3Client.createBucket({
|
|
8571
|
+
}).createBucket({
|
|
8705
8572
|
Bucket: bucketName,
|
|
8706
8573
|
...region === "us-east-1" ? {} : { CreateBucketConfiguration: { LocationConstraint: region } }
|
|
8707
8574
|
});
|
|
@@ -8780,11 +8647,10 @@ var SSMKeyPairManager = class {
|
|
|
8780
8647
|
}
|
|
8781
8648
|
}
|
|
8782
8649
|
async putParameter(name, value) {
|
|
8783
|
-
|
|
8650
|
+
await new __aws_sdk_client_ssm.SSM({
|
|
8784
8651
|
region: this.region,
|
|
8785
8652
|
credentials: this.credentials
|
|
8786
|
-
})
|
|
8787
|
-
await ssm.putParameter({
|
|
8653
|
+
}).putParameter({
|
|
8788
8654
|
Name: name,
|
|
8789
8655
|
Value: value,
|
|
8790
8656
|
Type: "SecureString",
|
|
@@ -8809,9 +8675,8 @@ var SSMKeyPairManager = class {
|
|
|
8809
8675
|
format: "pem"
|
|
8810
8676
|
}
|
|
8811
8677
|
});
|
|
8812
|
-
const keyPairId = `HOTUPDATER-${crypto.default.randomBytes(4).toString("hex").toUpperCase()}`;
|
|
8813
8678
|
const keyPair = {
|
|
8814
|
-
keyPairId
|
|
8679
|
+
keyPairId: `HOTUPDATER-${crypto.default.randomBytes(4).toString("hex").toUpperCase()}`,
|
|
8815
8680
|
publicKey,
|
|
8816
8681
|
privateKey
|
|
8817
8682
|
};
|
|
@@ -8884,17 +8749,16 @@ const checkIfAwsCliInstalled = async () => {
|
|
|
8884
8749
|
try {
|
|
8885
8750
|
await execa("aws", ["--version"]);
|
|
8886
8751
|
return true;
|
|
8887
|
-
} catch
|
|
8752
|
+
} catch {
|
|
8888
8753
|
return false;
|
|
8889
8754
|
}
|
|
8890
8755
|
};
|
|
8891
8756
|
const runInit = async ({ build }) => {
|
|
8892
|
-
|
|
8893
|
-
if (!isAwsCliInstalled) {
|
|
8757
|
+
if (!await checkIfAwsCliInstalled()) {
|
|
8894
8758
|
f.error(`AWS CLI is not installed. Please visit ${(0, __hot_updater_plugin_core.link)("https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html")} for installation instructions`);
|
|
8895
8759
|
process.exit(1);
|
|
8896
8760
|
}
|
|
8897
|
-
let credentials
|
|
8761
|
+
let credentials;
|
|
8898
8762
|
const mode = await ve({
|
|
8899
8763
|
message: "Select the mode to login to AWS",
|
|
8900
8764
|
options: [{
|
|
@@ -9005,14 +8869,11 @@ const runInit = async ({ build }) => {
|
|
|
9005
8869
|
region: bucketRegion,
|
|
9006
8870
|
migrations: [new Migration0001HotUpdater0_13_0(), new Migration0001HotUpdater0_18_0()]
|
|
9007
8871
|
});
|
|
9008
|
-
const
|
|
9009
|
-
const
|
|
9010
|
-
const ssmKeyPairManager = new SSMKeyPairManager(bucketRegion, credentials);
|
|
9011
|
-
const keyPair = await ssmKeyPairManager.getOrCreateKeyPair(`/hot-updater/${bucketName}/keypair`);
|
|
8872
|
+
const lambdaRoleArn = await new IAMManager(bucketRegion, credentials).createOrSelectRole();
|
|
8873
|
+
const keyPair = await new SSMKeyPairManager(bucketRegion, credentials).getOrCreateKeyPair(`/hot-updater/${bucketName}/keypair`);
|
|
9012
8874
|
const cloudFrontManager = new CloudFrontManager(bucketRegion, credentials);
|
|
9013
8875
|
const { publicKeyId, keyGroupId } = await cloudFrontManager.getOrCreateKeyGroup(keyPair.publicKey);
|
|
9014
|
-
const
|
|
9015
|
-
const { functionArn } = await lambdaEdgeDeployer.deploy(lambdaRoleArn, {
|
|
8876
|
+
const { functionArn } = await new LambdaEdgeDeployer(credentials).deploy(lambdaRoleArn, {
|
|
9016
8877
|
publicKey: publicKeyId,
|
|
9017
8878
|
privateKey: keyPair.privateKey
|
|
9018
8879
|
});
|