@hot-updater/aws 0.20.11 → 0.20.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/iac/index.cjs +206 -345
- package/dist/iac/index.js +204 -343
- package/dist/index.cjs +14 -22
- package/dist/index.js +13 -21
- package/dist/lambda/index.cjs +125 -207
- package/package.json +4 -4
package/dist/iac/index.js
CHANGED
|
@@ -1,5 +1,4 @@
|
|
|
1
1
|
import { createRequire } from "node:module";
|
|
2
|
-
import fs from "fs";
|
|
3
2
|
import { fromSSO } from "@aws-sdk/credential-providers";
|
|
4
3
|
import { aborted, callbackify, debuglog, inspect, promisify, stripVTControlCharacters } from "node:util";
|
|
5
4
|
import g, { execArgv, execPath, hrtime, platform, stdin, stdout } from "node:process";
|
|
@@ -12,6 +11,7 @@ import { ChildProcess, execFile, spawn, spawnSync } from "node:child_process";
|
|
|
12
11
|
import { StringDecoder } from "node:string_decoder";
|
|
13
12
|
import path from "node:path";
|
|
14
13
|
import path$1 from "path";
|
|
14
|
+
import fs from "fs";
|
|
15
15
|
import { scheduler, setImmediate, setTimeout as setTimeout$1 } from "node:timers/promises";
|
|
16
16
|
import { constants } from "node:os";
|
|
17
17
|
import { EventEmitter, addAbortListener, on, once, setMaxListeners } from "node:events";
|
|
@@ -20,8 +20,8 @@ import { appendFileSync, createReadStream, createWriteStream, readFileSync, stat
|
|
|
20
20
|
import { finished } from "node:stream/promises";
|
|
21
21
|
import { Duplex, PassThrough, Readable, Transform, Writable, getDefaultHighWaterMark } from "node:stream";
|
|
22
22
|
import { Buffer as Buffer$1 } from "node:buffer";
|
|
23
|
-
import crypto from "crypto";
|
|
24
23
|
import { CloudFront } from "@aws-sdk/client-cloudfront";
|
|
24
|
+
import crypto from "crypto";
|
|
25
25
|
import { IAM } from "@aws-sdk/client-iam";
|
|
26
26
|
import { Lambda } from "@aws-sdk/client-lambda";
|
|
27
27
|
import fs$1 from "fs/promises";
|
|
@@ -448,16 +448,16 @@ function G(t, u$2, F$1) {
|
|
|
448
448
|
`).map((e$1) => oD(e$1, u$2, F$1)).join(`
|
|
449
449
|
`);
|
|
450
450
|
}
|
|
451
|
-
const
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
451
|
+
const c$1 = {
|
|
452
|
+
actions: new Set([
|
|
453
|
+
"up",
|
|
454
|
+
"down",
|
|
455
|
+
"left",
|
|
456
|
+
"right",
|
|
457
|
+
"space",
|
|
458
|
+
"enter",
|
|
459
|
+
"cancel"
|
|
460
|
+
]),
|
|
461
461
|
aliases: new Map([
|
|
462
462
|
["k", "up"],
|
|
463
463
|
["j", "down"],
|
|
@@ -497,9 +497,8 @@ function cD({ input: t = stdin, output: u$2 = stdout, overwrite: F$1 = !0, hideC
|
|
|
497
497
|
});
|
|
498
498
|
f$1.emitKeypressEvents(t, s), t.isTTY && t.setRawMode(!0);
|
|
499
499
|
const i$1 = (D$1, { name: C$1, sequence: o$2 }) => {
|
|
500
|
-
const E = String(D$1);
|
|
501
500
|
if (k$1([
|
|
502
|
-
|
|
501
|
+
String(D$1),
|
|
503
502
|
C$1,
|
|
504
503
|
o$2
|
|
505
504
|
], "cancel")) {
|
|
@@ -688,10 +687,7 @@ let dD = class extends x {
|
|
|
688
687
|
if (u$2.group === !0) {
|
|
689
688
|
const F$1 = u$2.value, e$1 = this.getGroupItems(F$1);
|
|
690
689
|
this.isGroupSelected(F$1) ? this.value = this.value.filter((s) => e$1.findIndex((i$1) => i$1.value === s) === -1) : this.value = [...this.value, ...e$1.map((s) => s.value)], this.value = Array.from(new Set(this.value));
|
|
691
|
-
} else
|
|
692
|
-
const F$1 = this.value.includes(u$2.value);
|
|
693
|
-
this.value = F$1 ? this.value.filter((e$1) => e$1 !== u$2.value) : [...this.value, u$2.value];
|
|
694
|
-
}
|
|
690
|
+
} else this.value = this.value.includes(u$2.value) ? this.value.filter((e$1) => e$1 !== u$2.value) : [...this.value, u$2.value];
|
|
695
691
|
}
|
|
696
692
|
};
|
|
697
693
|
var bD = Object.defineProperty, mD = (t, u$2, F$1) => u$2 in t ? bD(t, u$2, {
|
|
@@ -724,12 +720,10 @@ let wD = class extends x {
|
|
|
724
720
|
return this.options[this.cursor].value;
|
|
725
721
|
}
|
|
726
722
|
toggleAll() {
|
|
727
|
-
|
|
728
|
-
this.value = u$2 ? [] : this.options.map((F$1) => F$1.value);
|
|
723
|
+
this.value = this.value.length === this.options.length ? [] : this.options.map((F$1) => F$1.value);
|
|
729
724
|
}
|
|
730
725
|
toggleValue() {
|
|
731
|
-
|
|
732
|
-
this.value = u$2 ? this.value.filter((F$1) => F$1 !== this._value) : [...this.value, this._value];
|
|
726
|
+
this.value = this.value.includes(this._value) ? this.value.filter((F$1) => F$1 !== this._value) : [...this.value, this._value];
|
|
733
727
|
}
|
|
734
728
|
};
|
|
735
729
|
var yD = Object.defineProperty, _D = (t, u$2, F$1) => u$2 in t ? yD(t, u$2, {
|
|
@@ -1156,8 +1150,7 @@ ${J}${r$1.trimStart()}`), s = 3 + stripVTControlCharacters(r$1.trimStart()).leng
|
|
|
1156
1150
|
], s = V ? 80 : 120, r$1 = process.env.CI === "true";
|
|
1157
1151
|
let i$1, a$1, c$2 = !1, l$1 = "", $$1, p$1 = performance.now();
|
|
1158
1152
|
const M$1 = (m$1) => {
|
|
1159
|
-
|
|
1160
|
-
c$2 && N$1(h$2, m$1);
|
|
1153
|
+
c$2 && N$1(m$1 > 1 ? "Something went wrong" : "Canceled", m$1);
|
|
1161
1154
|
}, v$1 = () => M$1(2), x$1 = () => M$1(1), j = () => {
|
|
1162
1155
|
process.on("uncaughtExceptionMonitor", v$1), process.on("unhandledRejection", v$1), process.on("SIGINT", x$1), process.on("SIGTERM", x$1), process.on("exit", M$1);
|
|
1163
1156
|
}, E = () => {
|
|
@@ -1273,8 +1266,7 @@ const stringToUint8Array = (string) => textEncoder$1.encode(string);
|
|
|
1273
1266
|
const textDecoder = new TextDecoder();
|
|
1274
1267
|
const uint8ArrayToString = (uint8Array) => textDecoder.decode(uint8Array);
|
|
1275
1268
|
const joinToString = (uint8ArraysOrStrings, encoding) => {
|
|
1276
|
-
|
|
1277
|
-
return strings.join("");
|
|
1269
|
+
return uint8ArraysToStrings(uint8ArraysOrStrings, encoding).join("");
|
|
1278
1270
|
};
|
|
1279
1271
|
const uint8ArraysToStrings = (uint8ArraysOrStrings, encoding) => {
|
|
1280
1272
|
if (encoding === "utf8" && uint8ArraysOrStrings.every((uint8ArrayOrString) => typeof uint8ArrayOrString === "string")) return uint8ArraysOrStrings;
|
|
@@ -1329,8 +1321,7 @@ const parseTemplate = ({ templates, expressions, tokens, index, template }) => {
|
|
|
1329
1321
|
const newTokens = concatTokens(tokens, nextTokens, leadingWhitespaces);
|
|
1330
1322
|
if (index === expressions.length) return newTokens;
|
|
1331
1323
|
const expression = expressions[index];
|
|
1332
|
-
|
|
1333
|
-
return concatTokens(newTokens, expressionTokens, trailingWhitespaces);
|
|
1324
|
+
return concatTokens(newTokens, Array.isArray(expression) ? expression.map((expression$1) => parseExpression(expression$1)) : [parseExpression(expression)], trailingWhitespaces);
|
|
1334
1325
|
};
|
|
1335
1326
|
const splitByWhitespaces = (template, rawTemplate) => {
|
|
1336
1327
|
if (rawTemplate.length === 0) return {
|
|
@@ -1417,8 +1408,7 @@ const normalizeFdSpecificOptions = (options) => {
|
|
|
1417
1408
|
};
|
|
1418
1409
|
const normalizeFdSpecificOption = (options, optionName) => {
|
|
1419
1410
|
const optionBaseArray = Array.from({ length: getStdioLength(options) + 1 });
|
|
1420
|
-
|
|
1421
|
-
return addDefaultValue$1(optionArray, optionName);
|
|
1411
|
+
return addDefaultValue$1(normalizeFdSpecificValue(options[optionName], optionBaseArray, optionName), optionName);
|
|
1422
1412
|
};
|
|
1423
1413
|
const getStdioLength = ({ stdio }) => Array.isArray(stdio) ? Math.max(stdio.length, STANDARD_STREAMS_ALIASES.length) : STANDARD_STREAMS_ALIASES.length;
|
|
1424
1414
|
const normalizeFdSpecificValue = (optionValue, optionArray, optionName) => isPlainObject$1(optionValue) ? normalizeOptionObject(optionValue, optionArray, optionName) : optionArray.fill(optionValue);
|
|
@@ -1448,12 +1438,11 @@ const parseFd = (fdName) => {
|
|
|
1448
1438
|
};
|
|
1449
1439
|
const FD_REGEXP = /^fd(\d+)$/;
|
|
1450
1440
|
const addDefaultValue$1 = (optionArray, optionName) => optionArray.map((optionValue) => optionValue === void 0 ? DEFAULT_OPTIONS[optionName] : optionValue);
|
|
1451
|
-
const verboseDefault = debuglog("execa").enabled ? "full" : "none";
|
|
1452
1441
|
const DEFAULT_OPTIONS = {
|
|
1453
1442
|
lines: false,
|
|
1454
1443
|
buffer: true,
|
|
1455
1444
|
maxBuffer: 1e3 * 1e3 * 100,
|
|
1456
|
-
verbose:
|
|
1445
|
+
verbose: debuglog("execa").enabled ? "full" : "none",
|
|
1457
1446
|
stripFinalNewline: true
|
|
1458
1447
|
};
|
|
1459
1448
|
const FD_SPECIFIC_OPTIONS = [
|
|
@@ -1486,11 +1475,9 @@ const VERBOSE_VALUES = [
|
|
|
1486
1475
|
//#region ../../node_modules/.pnpm/execa@9.5.2/node_modules/execa/lib/arguments/escape.js
|
|
1487
1476
|
const joinCommand = (filePath, rawArguments) => {
|
|
1488
1477
|
const fileAndArguments = [filePath, ...rawArguments];
|
|
1489
|
-
const command = fileAndArguments.join(" ");
|
|
1490
|
-
const escapedCommand = fileAndArguments.map((fileAndArgument) => quoteString(escapeControlCharacters(fileAndArgument))).join(" ");
|
|
1491
1478
|
return {
|
|
1492
|
-
command,
|
|
1493
|
-
escapedCommand
|
|
1479
|
+
command: fileAndArguments.join(" "),
|
|
1480
|
+
escapedCommand: fileAndArguments.map((fileAndArgument) => quoteString(escapeControlCharacters(fileAndArgument))).join(" ")
|
|
1494
1481
|
};
|
|
1495
1482
|
};
|
|
1496
1483
|
const escapeLines = (lines) => stripVTControlCharacters(lines).split("\n").map((line) => escapeControlCharacters(line)).join("\n");
|
|
@@ -1931,13 +1918,11 @@ const appendNewline = (printedLine) => printedLine.endsWith("\n") ? printedLine
|
|
|
1931
1918
|
//#endregion
|
|
1932
1919
|
//#region ../../node_modules/.pnpm/execa@9.5.2/node_modules/execa/lib/verbose/log.js
|
|
1933
1920
|
const verboseLog = ({ type, verboseMessage, fdNumber, verboseInfo, result }) => {
|
|
1934
|
-
const
|
|
1921
|
+
const finalLines = applyVerboseOnLines(getPrintedLines(verboseMessage, getVerboseObject({
|
|
1935
1922
|
type,
|
|
1936
1923
|
result,
|
|
1937
1924
|
verboseInfo
|
|
1938
|
-
});
|
|
1939
|
-
const printedLines = getPrintedLines(verboseMessage, verboseObject);
|
|
1940
|
-
const finalLines = applyVerboseOnLines(printedLines, verboseInfo, fdNumber);
|
|
1925
|
+
})), verboseInfo, fdNumber);
|
|
1941
1926
|
if (finalLines !== "") console.warn(finalLines.slice(0, -1));
|
|
1942
1927
|
};
|
|
1943
1928
|
const getVerboseObject = ({ type, result, verboseInfo: { escapedCommand, commandId, rawOptions: { piped = false,...options } } }) => ({
|
|
@@ -1954,16 +1939,13 @@ const getPrintedLines = (verboseMessage, verboseObject) => verboseMessage.split(
|
|
|
1954
1939
|
message
|
|
1955
1940
|
}));
|
|
1956
1941
|
const getPrintedLine = (verboseObject) => {
|
|
1957
|
-
const verboseLine = defaultVerboseFunction(verboseObject);
|
|
1958
1942
|
return {
|
|
1959
|
-
verboseLine,
|
|
1943
|
+
verboseLine: defaultVerboseFunction(verboseObject),
|
|
1960
1944
|
verboseObject
|
|
1961
1945
|
};
|
|
1962
1946
|
};
|
|
1963
1947
|
const serializeVerboseMessage = (message) => {
|
|
1964
|
-
|
|
1965
|
-
const escapedMessage = escapeLines(messageString);
|
|
1966
|
-
return escapedMessage.replaceAll(" ", " ".repeat(TAB_SIZE));
|
|
1948
|
+
return escapeLines(typeof message === "string" ? message : inspect(message)).replaceAll(" ", " ".repeat(TAB_SIZE));
|
|
1967
1949
|
};
|
|
1968
1950
|
const TAB_SIZE = 2;
|
|
1969
1951
|
|
|
@@ -1982,11 +1964,10 @@ const logCommand = (escapedCommand, verboseInfo) => {
|
|
|
1982
1964
|
//#region ../../node_modules/.pnpm/execa@9.5.2/node_modules/execa/lib/verbose/info.js
|
|
1983
1965
|
const getVerboseInfo = (verbose, escapedCommand, rawOptions) => {
|
|
1984
1966
|
validateVerbose(verbose);
|
|
1985
|
-
const commandId = getCommandId(verbose);
|
|
1986
1967
|
return {
|
|
1987
1968
|
verbose,
|
|
1988
1969
|
escapedCommand,
|
|
1989
|
-
commandId,
|
|
1970
|
+
commandId: getCommandId(verbose),
|
|
1990
1971
|
rawOptions
|
|
1991
1972
|
};
|
|
1992
1973
|
};
|
|
@@ -2013,8 +1994,7 @@ const getDurationMs = (startTime) => Number(hrtime.bigint() - startTime) / 1e6;
|
|
|
2013
1994
|
const handleCommand = (filePath, rawArguments, rawOptions) => {
|
|
2014
1995
|
const startTime = getStartTime();
|
|
2015
1996
|
const { command, escapedCommand } = joinCommand(filePath, rawArguments);
|
|
2016
|
-
const
|
|
2017
|
-
const verboseInfo = getVerboseInfo(verbose, escapedCommand, { ...rawOptions });
|
|
1997
|
+
const verboseInfo = getVerboseInfo(normalizeFdSpecificOption(rawOptions, "verbose"), escapedCommand, { ...rawOptions });
|
|
2018
1998
|
logCommand(escapedCommand, verboseInfo);
|
|
2019
1999
|
return {
|
|
2020
2000
|
command,
|
|
@@ -2082,8 +2062,7 @@ var require_mode = /* @__PURE__ */ __commonJS({ "../../node_modules/.pnpm/isexe@
|
|
|
2082
2062
|
var g$2 = parseInt("010", 8);
|
|
2083
2063
|
var o$2 = parseInt("001", 8);
|
|
2084
2064
|
var ug = u$2 | g$2;
|
|
2085
|
-
|
|
2086
|
-
return ret;
|
|
2065
|
+
return mod & o$2 || mod & g$2 && gid === myGid || mod & u$2 && uid === myUid || mod & ug && myUid === 0;
|
|
2087
2066
|
}
|
|
2088
2067
|
}) });
|
|
2089
2068
|
|
|
@@ -2165,8 +2144,7 @@ var require_which = /* @__PURE__ */ __commonJS({ "../../node_modules/.pnpm/which
|
|
|
2165
2144
|
const ppRaw = pathEnv[i$1];
|
|
2166
2145
|
const pathPart = /^".*"$/.test(ppRaw) ? ppRaw.slice(1, -1) : ppRaw;
|
|
2167
2146
|
const pCmd = path$4.join(pathPart, cmd);
|
|
2168
|
-
|
|
2169
|
-
resolve(subStep(p$1, i$1, 0));
|
|
2147
|
+
resolve(subStep(!pathPart && /^\.[\\\/]/.test(cmd) ? cmd.slice(0, 2) + pCmd : pCmd, i$1, 0));
|
|
2170
2148
|
});
|
|
2171
2149
|
const subStep = (p$1, i$1, ii) => new Promise((resolve, reject) => {
|
|
2172
2150
|
if (ii === pathExt.length) return resolve(step(i$1 + 1));
|
|
@@ -2191,8 +2169,7 @@ var require_which = /* @__PURE__ */ __commonJS({ "../../node_modules/.pnpm/which
|
|
|
2191
2169
|
for (let j = 0; j < pathExt.length; j++) {
|
|
2192
2170
|
const cur = p$1 + pathExt[j];
|
|
2193
2171
|
try {
|
|
2194
|
-
|
|
2195
|
-
if (is) if (opt.all) found.push(cur);
|
|
2172
|
+
if (isexe.sync(cur, { pathExt: pathExtExe })) if (opt.all) found.push(cur);
|
|
2196
2173
|
else return cur;
|
|
2197
2174
|
} catch (ex) {}
|
|
2198
2175
|
}
|
|
@@ -2210,8 +2187,7 @@ var require_which = /* @__PURE__ */ __commonJS({ "../../node_modules/.pnpm/which
|
|
|
2210
2187
|
var require_path_key = /* @__PURE__ */ __commonJS({ "../../node_modules/.pnpm/path-key@3.1.1/node_modules/path-key/index.js": ((exports, module) => {
|
|
2211
2188
|
const pathKey$1 = (options = {}) => {
|
|
2212
2189
|
const environment = options.env || process.env;
|
|
2213
|
-
|
|
2214
|
-
if (platform$1 !== "win32") return "PATH";
|
|
2190
|
+
if ((options.platform || process.platform) !== "win32") return "PATH";
|
|
2215
2191
|
return Object.keys(environment).reverse().find((key) => key.toUpperCase() === "PATH") || "Path";
|
|
2216
2192
|
};
|
|
2217
2193
|
module.exports = pathKey$1;
|
|
@@ -2339,12 +2315,11 @@ var require_parse = /* @__PURE__ */ __commonJS({ "../../node_modules/.pnpm/cross
|
|
|
2339
2315
|
parsed.command = path$2.normalize(parsed.command);
|
|
2340
2316
|
parsed.command = escape.command(parsed.command);
|
|
2341
2317
|
parsed.args = parsed.args.map((arg) => escape.argument(arg, needsDoubleEscapeMetaChars));
|
|
2342
|
-
const shellCommand = [parsed.command].concat(parsed.args).join(" ");
|
|
2343
2318
|
parsed.args = [
|
|
2344
2319
|
"/d",
|
|
2345
2320
|
"/s",
|
|
2346
2321
|
"/c",
|
|
2347
|
-
`"${
|
|
2322
|
+
`"${[parsed.command].concat(parsed.args).join(" ")}"`
|
|
2348
2323
|
];
|
|
2349
2324
|
parsed.command = process.env.comspec || "cmd.exe";
|
|
2350
2325
|
parsed.options.windowsVerbatimArguments = true;
|
|
@@ -2497,9 +2472,7 @@ const npmRunPathEnv = ({ env: env$1 = g.env,...options } = {}) => {
|
|
|
2497
2472
|
//#endregion
|
|
2498
2473
|
//#region ../../node_modules/.pnpm/execa@9.5.2/node_modules/execa/lib/return/final-error.js
|
|
2499
2474
|
const getFinalError = (originalError, message, isSync) => {
|
|
2500
|
-
|
|
2501
|
-
const options = originalError instanceof DiscardedError ? {} : { cause: originalError };
|
|
2502
|
-
return new ErrorClass(message, options);
|
|
2475
|
+
return new (isSync ? ExecaSyncError : ExecaError)(message, originalError instanceof DiscardedError ? {} : { cause: originalError });
|
|
2503
2476
|
};
|
|
2504
2477
|
var DiscardedError = class extends Error {};
|
|
2505
2478
|
const setErrorName = (ErrorClass, value) => {
|
|
@@ -2818,16 +2791,14 @@ const SIGNALS = [
|
|
|
2818
2791
|
//#region ../../node_modules/.pnpm/human-signals@8.0.0/node_modules/human-signals/build/src/signals.js
|
|
2819
2792
|
const getSignals = () => {
|
|
2820
2793
|
const realtimeSignals = getRealtimeSignals();
|
|
2821
|
-
|
|
2822
|
-
return signals$1;
|
|
2794
|
+
return [...SIGNALS, ...realtimeSignals].map(normalizeSignal$1);
|
|
2823
2795
|
};
|
|
2824
2796
|
const normalizeSignal$1 = ({ name, number: defaultNumber, description, action, forced = false, standard }) => {
|
|
2825
2797
|
const { signals: { [name]: constantSignal } } = constants;
|
|
2826
2798
|
const supported = constantSignal !== void 0;
|
|
2827
|
-
const number = supported ? constantSignal : defaultNumber;
|
|
2828
2799
|
return {
|
|
2829
2800
|
name,
|
|
2830
|
-
number,
|
|
2801
|
+
number: supported ? constantSignal : defaultNumber,
|
|
2831
2802
|
description,
|
|
2832
2803
|
supported,
|
|
2833
2804
|
action,
|
|
@@ -3239,8 +3210,7 @@ const isConnected = (anyProcess) => {
|
|
|
3239
3210
|
//#region ../../node_modules/.pnpm/execa@9.5.2/node_modules/execa/lib/ipc/strict.js
|
|
3240
3211
|
const handleSendStrict = ({ anyProcess, channel, isSubprocess, message, strict }) => {
|
|
3241
3212
|
if (!strict) return message;
|
|
3242
|
-
const
|
|
3243
|
-
const hasListeners = hasMessageListeners(anyProcess, ipcEmitter);
|
|
3213
|
+
const hasListeners = hasMessageListeners(anyProcess, getIpcEmitter(anyProcess, channel, isSubprocess));
|
|
3244
3214
|
return {
|
|
3245
3215
|
id: count++,
|
|
3246
3216
|
type: REQUEST_TYPE,
|
|
@@ -3313,11 +3283,9 @@ const RESPONSE_TYPE = "execa:ipc:response";
|
|
|
3313
3283
|
const startSendMessage = (anyProcess, wrappedMessage, strict) => {
|
|
3314
3284
|
if (!OUTGOING_MESSAGES.has(anyProcess)) OUTGOING_MESSAGES.set(anyProcess, /* @__PURE__ */ new Set());
|
|
3315
3285
|
const outgoingMessages = OUTGOING_MESSAGES.get(anyProcess);
|
|
3316
|
-
const onMessageSent = createDeferred();
|
|
3317
|
-
const id = strict ? wrappedMessage.id : void 0;
|
|
3318
3286
|
const outgoingMessage = {
|
|
3319
|
-
onMessageSent,
|
|
3320
|
-
id
|
|
3287
|
+
onMessageSent: createDeferred(),
|
|
3288
|
+
id: strict ? wrappedMessage.id : void 0
|
|
3321
3289
|
};
|
|
3322
3290
|
outgoingMessages.add(outgoingMessage);
|
|
3323
3291
|
return {
|
|
@@ -3478,8 +3446,7 @@ const throwOnGracefulCancel = ({ subprocess, cancelSignal, gracefulCancel, force
|
|
|
3478
3446
|
})] : [];
|
|
3479
3447
|
const sendOnAbort = async ({ subprocess, cancelSignal, forceKillAfterDelay, context, controller: { signal } }) => {
|
|
3480
3448
|
await onAbortedSignal(cancelSignal, signal);
|
|
3481
|
-
|
|
3482
|
-
await sendAbort(subprocess, reason);
|
|
3449
|
+
await sendAbort(subprocess, getReason(cancelSignal));
|
|
3483
3450
|
killOnTimeout({
|
|
3484
3451
|
kill: subprocess.kill,
|
|
3485
3452
|
forceKillAfterDelay,
|
|
@@ -3653,8 +3620,7 @@ const normalizeOptions = (filePath, rawArguments, rawOptions) => {
|
|
|
3653
3620
|
rawOptions.cwd = normalizeCwd(rawOptions.cwd);
|
|
3654
3621
|
const [processedFile, processedArguments, processedOptions] = handleNodeOption(filePath, rawArguments, rawOptions);
|
|
3655
3622
|
const { command: file, args: commandArguments, options: initialOptions } = import_cross_spawn.default._parse(processedFile, processedArguments, processedOptions);
|
|
3656
|
-
const
|
|
3657
|
-
const options = addDefaultOptions(fdOptions);
|
|
3623
|
+
const options = addDefaultOptions(normalizeFdSpecificOptions(initialOptions));
|
|
3658
3624
|
validateTimeout(options);
|
|
3659
3625
|
validateEncoding(options);
|
|
3660
3626
|
validateIpcInputOption(options);
|
|
@@ -3811,7 +3777,7 @@ const u = Object.create(a, {
|
|
|
3811
3777
|
}
|
|
3812
3778
|
});
|
|
3813
3779
|
function h({ preventCancel: r$1 = !1 } = {}) {
|
|
3814
|
-
const
|
|
3780
|
+
const t = new c(this.getReader(), r$1), s = Object.create(u);
|
|
3815
3781
|
return s[n] = t, s;
|
|
3816
3782
|
}
|
|
3817
3783
|
|
|
@@ -3860,18 +3826,14 @@ const getStreamContents$1 = async (stream, { init, convertChunk, getSize, trunca
|
|
|
3860
3826
|
const state = init();
|
|
3861
3827
|
state.length = 0;
|
|
3862
3828
|
try {
|
|
3863
|
-
for await (const chunk of asyncIterable) {
|
|
3864
|
-
|
|
3865
|
-
|
|
3866
|
-
|
|
3867
|
-
|
|
3868
|
-
|
|
3869
|
-
|
|
3870
|
-
|
|
3871
|
-
addChunk,
|
|
3872
|
-
maxBuffer
|
|
3873
|
-
});
|
|
3874
|
-
}
|
|
3829
|
+
for await (const chunk of asyncIterable) appendChunk({
|
|
3830
|
+
convertedChunk: convertChunk[getChunkType(chunk)](chunk, state),
|
|
3831
|
+
state,
|
|
3832
|
+
getSize,
|
|
3833
|
+
truncateChunk,
|
|
3834
|
+
addChunk,
|
|
3835
|
+
maxBuffer
|
|
3836
|
+
});
|
|
3875
3837
|
appendFinalChunk({
|
|
3876
3838
|
state,
|
|
3877
3839
|
convertChunk,
|
|
@@ -4061,10 +4023,9 @@ const stringMethods = {
|
|
|
4061
4023
|
const handleMaxBuffer = ({ error, stream, readableObjectMode, lines, encoding, fdNumber }) => {
|
|
4062
4024
|
if (!(error instanceof MaxBufferError)) throw error;
|
|
4063
4025
|
if (fdNumber === "all") return error;
|
|
4064
|
-
const unit = getMaxBufferUnit(readableObjectMode, lines, encoding);
|
|
4065
4026
|
error.maxBufferInfo = {
|
|
4066
4027
|
fdNumber,
|
|
4067
|
-
unit
|
|
4028
|
+
unit: getMaxBufferUnit(readableObjectMode, lines, encoding)
|
|
4068
4029
|
};
|
|
4069
4030
|
stream.destroy();
|
|
4070
4031
|
throw error;
|
|
@@ -4134,19 +4095,16 @@ const createMessages = ({ stdio, all, ipcOutput, originalError, signal, signalDe
|
|
|
4134
4095
|
killSignal
|
|
4135
4096
|
});
|
|
4136
4097
|
const originalMessage = getOriginalMessage(originalError, cwd);
|
|
4137
|
-
const
|
|
4138
|
-
const shortMessage = `${prefix}: ${escapedCommand}${suffix}`;
|
|
4139
|
-
const messageStdio = all === void 0 ? [stdio[2], stdio[1]] : [all];
|
|
4140
|
-
const message = [
|
|
4141
|
-
shortMessage,
|
|
4142
|
-
...messageStdio,
|
|
4143
|
-
...stdio.slice(3),
|
|
4144
|
-
ipcOutput.map((ipcMessage) => serializeIpcMessage(ipcMessage)).join("\n")
|
|
4145
|
-
].map((messagePart) => escapeLines(stripFinalNewline(serializeMessagePart(messagePart)))).filter(Boolean).join("\n\n");
|
|
4098
|
+
const shortMessage = `${prefix}: ${escapedCommand}${originalMessage === void 0 ? "" : `\n${originalMessage}`}`;
|
|
4146
4099
|
return {
|
|
4147
4100
|
originalMessage,
|
|
4148
4101
|
shortMessage,
|
|
4149
|
-
message
|
|
4102
|
+
message: [
|
|
4103
|
+
shortMessage,
|
|
4104
|
+
...all === void 0 ? [stdio[2], stdio[1]] : [all],
|
|
4105
|
+
...stdio.slice(3),
|
|
4106
|
+
ipcOutput.map((ipcMessage) => serializeIpcMessage(ipcMessage)).join("\n")
|
|
4107
|
+
].map((messagePart) => escapeLines(stripFinalNewline(serializeMessagePart(messagePart)))).filter(Boolean).join("\n\n")
|
|
4150
4108
|
};
|
|
4151
4109
|
};
|
|
4152
4110
|
const getErrorPrefix = ({ originalError, timedOut, timeout, isMaxBuffer, maxBuffer, errorCode, signal, signalDescription, exitCode, isCanceled, isGracefullyCanceled, isForcefullyTerminated, forceKillAfterDelay, killSignal }) => {
|
|
@@ -4167,8 +4125,7 @@ const getErrorPrefix = ({ originalError, timedOut, timeout, isMaxBuffer, maxBuff
|
|
|
4167
4125
|
const getForcefulSuffix = (isForcefullyTerminated, forceKillAfterDelay) => isForcefullyTerminated ? ` and was forcefully terminated after ${forceKillAfterDelay} milliseconds` : "";
|
|
4168
4126
|
const getOriginalMessage = (originalError, cwd) => {
|
|
4169
4127
|
if (originalError instanceof DiscardedError) return;
|
|
4170
|
-
const
|
|
4171
|
-
const escapedOriginalMessage = escapeLines(fixCwdError(originalMessage, cwd));
|
|
4128
|
+
const escapedOriginalMessage = escapeLines(fixCwdError(isExecaError(originalError) ? originalError.originalMessage : String(originalError?.message ?? originalError), cwd));
|
|
4172
4129
|
return escapedOriginalMessage === "" ? void 0 : escapedOriginalMessage;
|
|
4173
4130
|
};
|
|
4174
4131
|
const serializeIpcMessage = (ipcMessage) => typeof ipcMessage === "string" ? ipcMessage : inspect(ipcMessage);
|
|
@@ -4290,11 +4247,10 @@ const omitUndefinedProperties = (result) => Object.fromEntries(Object.entries(re
|
|
|
4290
4247
|
const normalizeExitPayload = (rawExitCode, rawSignal) => {
|
|
4291
4248
|
const exitCode = rawExitCode === null ? void 0 : rawExitCode;
|
|
4292
4249
|
const signal = rawSignal === null ? void 0 : rawSignal;
|
|
4293
|
-
const signalDescription = signal === void 0 ? void 0 : getSignalDescription(rawSignal);
|
|
4294
4250
|
return {
|
|
4295
4251
|
exitCode,
|
|
4296
4252
|
signal,
|
|
4297
|
-
signalDescription
|
|
4253
|
+
signalDescription: signal === void 0 ? void 0 : getSignalDescription(rawSignal)
|
|
4298
4254
|
};
|
|
4299
4255
|
};
|
|
4300
4256
|
|
|
@@ -4359,8 +4315,7 @@ function prettyMilliseconds(milliseconds, options) {
|
|
|
4359
4315
|
let result = [];
|
|
4360
4316
|
const floorDecimals = (value, decimalDigits) => {
|
|
4361
4317
|
const flooredInterimValue = Math.floor(value * 10 ** decimalDigits + SECOND_ROUNDING_EPSILON);
|
|
4362
|
-
|
|
4363
|
-
return flooredValue.toFixed(decimalDigits);
|
|
4318
|
+
return (Math.round(flooredInterimValue) / 10 ** decimalDigits).toFixed(decimalDigits);
|
|
4364
4319
|
};
|
|
4365
4320
|
const add = (value, long, short, valueString) => {
|
|
4366
4321
|
if ((result.length === 0 || !options.colonNotation) && isZero(value) && !(options.colonNotation && short === "m")) return;
|
|
@@ -4397,14 +4352,11 @@ function prettyMilliseconds(milliseconds, options) {
|
|
|
4397
4352
|
} else {
|
|
4398
4353
|
const millisecondsAndBelow = milliseconds$1 + microseconds / 1e3 + nanoseconds / 1e6;
|
|
4399
4354
|
const millisecondsDecimalDigits = typeof options.millisecondsDecimalDigits === "number" ? options.millisecondsDecimalDigits : 0;
|
|
4400
|
-
const
|
|
4401
|
-
const millisecondsString = millisecondsDecimalDigits ? millisecondsAndBelow.toFixed(millisecondsDecimalDigits) : roundedMilliseconds;
|
|
4355
|
+
const millisecondsString = millisecondsDecimalDigits ? millisecondsAndBelow.toFixed(millisecondsDecimalDigits) : millisecondsAndBelow >= 1 ? Math.round(millisecondsAndBelow) : Math.ceil(millisecondsAndBelow);
|
|
4402
4356
|
add(Number.parseFloat(millisecondsString), "millisecond", "ms", millisecondsString);
|
|
4403
4357
|
}
|
|
4404
4358
|
} else {
|
|
4405
|
-
const
|
|
4406
|
-
const secondsDecimalDigits = typeof options.secondsDecimalDigits === "number" ? options.secondsDecimalDigits : 1;
|
|
4407
|
-
const secondsFixed = floorDecimals(seconds, secondsDecimalDigits);
|
|
4359
|
+
const secondsFixed = floorDecimals((isBigInt ? Number(milliseconds % ONE_DAY_IN_MILLISECONDS) : milliseconds) / 1e3 % 60, typeof options.secondsDecimalDigits === "number" ? options.secondsDecimalDigits : 1);
|
|
4408
4360
|
const secondsString = options.keepDecimalsOnWholeSeconds ? secondsFixed : secondsFixed.replace(/\.0+$/, "");
|
|
4409
4361
|
add(Number.parseFloat(secondsString), "second", "s", secondsString);
|
|
4410
4362
|
}
|
|
@@ -4433,10 +4385,9 @@ const logResult = (result, verboseInfo) => {
|
|
|
4433
4385
|
logDuration(result, verboseInfo);
|
|
4434
4386
|
};
|
|
4435
4387
|
const logDuration = (result, verboseInfo) => {
|
|
4436
|
-
const verboseMessage = `(done in ${prettyMilliseconds(result.durationMs)})`;
|
|
4437
4388
|
verboseLog({
|
|
4438
4389
|
type: "duration",
|
|
4439
|
-
verboseMessage
|
|
4390
|
+
verboseMessage: `(done in ${prettyMilliseconds(result.durationMs)})`,
|
|
4440
4391
|
verboseInfo,
|
|
4441
4392
|
result
|
|
4442
4393
|
});
|
|
@@ -4563,18 +4514,16 @@ const TYPE_TO_MESSAGE = {
|
|
|
4563
4514
|
const getTransformObjectModes = (objectMode, index, newTransforms, direction) => direction === "output" ? getOutputObjectModes(objectMode, index, newTransforms) : getInputObjectModes(objectMode, index, newTransforms);
|
|
4564
4515
|
const getOutputObjectModes = (objectMode, index, newTransforms) => {
|
|
4565
4516
|
const writableObjectMode = index !== 0 && newTransforms[index - 1].value.readableObjectMode;
|
|
4566
|
-
const readableObjectMode = objectMode ?? writableObjectMode;
|
|
4567
4517
|
return {
|
|
4568
4518
|
writableObjectMode,
|
|
4569
|
-
readableObjectMode
|
|
4519
|
+
readableObjectMode: objectMode ?? writableObjectMode
|
|
4570
4520
|
};
|
|
4571
4521
|
};
|
|
4572
4522
|
const getInputObjectModes = (objectMode, index, newTransforms) => {
|
|
4573
4523
|
const writableObjectMode = index === 0 ? objectMode === true : newTransforms[index - 1].value.readableObjectMode;
|
|
4574
|
-
const readableObjectMode = index !== newTransforms.length - 1 && (objectMode ?? writableObjectMode);
|
|
4575
4524
|
return {
|
|
4576
4525
|
writableObjectMode,
|
|
4577
|
-
readableObjectMode
|
|
4526
|
+
readableObjectMode: index !== newTransforms.length - 1 && (objectMode ?? writableObjectMode)
|
|
4578
4527
|
};
|
|
4579
4528
|
};
|
|
4580
4529
|
const getFdObjectMode = (stdioItems, direction) => {
|
|
@@ -4883,8 +4832,7 @@ const validateDuplicateStreamSync = ({ otherStdioItems, type, value, optionName,
|
|
|
4883
4832
|
const getDuplicateStreamInstance = ({ otherStdioItems, type, value, optionName, direction }) => {
|
|
4884
4833
|
const duplicateStdioItems = otherStdioItems.filter((stdioItem) => hasSameValue(stdioItem, value));
|
|
4885
4834
|
if (duplicateStdioItems.length === 0) return;
|
|
4886
|
-
|
|
4887
|
-
throwOnDuplicateStream(differentStdioItem, optionName, type);
|
|
4835
|
+
throwOnDuplicateStream(duplicateStdioItems.find((stdioItem) => stdioItem.direction !== direction), optionName, type);
|
|
4888
4836
|
return direction === "output" ? duplicateStdioItems[0].stream : void 0;
|
|
4889
4837
|
};
|
|
4890
4838
|
const hasSameValue = ({ type, value }, secondValue) => {
|
|
@@ -4893,8 +4841,7 @@ const hasSameValue = ({ type, value }, secondValue) => {
|
|
|
4893
4841
|
return value === secondValue;
|
|
4894
4842
|
};
|
|
4895
4843
|
const validateDuplicateTransform = ({ otherStdioItems, type, value, optionName }) => {
|
|
4896
|
-
|
|
4897
|
-
throwOnDuplicateStream(duplicateStdioItem, optionName, type);
|
|
4844
|
+
throwOnDuplicateStream(otherStdioItems.find(({ value: { transform } }) => transform === value.transform), optionName, type);
|
|
4898
4845
|
};
|
|
4899
4846
|
const throwOnDuplicateStream = (stdioItem, optionName, type) => {
|
|
4900
4847
|
if (stdioItem !== void 0) throw new TypeError(`The \`${stdioItem.optionName}\` and \`${optionName}\` options must not target ${TYPE_TO_MESSAGE[type]} that is the same.`);
|
|
@@ -4903,15 +4850,13 @@ const throwOnDuplicateStream = (stdioItem, optionName, type) => {
|
|
|
4903
4850
|
//#endregion
|
|
4904
4851
|
//#region ../../node_modules/.pnpm/execa@9.5.2/node_modules/execa/lib/stdio/handle.js
|
|
4905
4852
|
const handleStdio = (addProperties$2, options, verboseInfo, isSync) => {
|
|
4906
|
-
const stdio = normalizeStdioOption(options, verboseInfo, isSync);
|
|
4907
|
-
const initialFileDescriptors = stdio.map((stdioOption, fdNumber) => getFileDescriptor({
|
|
4908
|
-
stdioOption,
|
|
4909
|
-
fdNumber,
|
|
4910
|
-
options,
|
|
4911
|
-
isSync
|
|
4912
|
-
}));
|
|
4913
4853
|
const fileDescriptors = getFinalFileDescriptors({
|
|
4914
|
-
initialFileDescriptors,
|
|
4854
|
+
initialFileDescriptors: normalizeStdioOption(options, verboseInfo, isSync).map((stdioOption, fdNumber) => getFileDescriptor({
|
|
4855
|
+
stdioOption,
|
|
4856
|
+
fdNumber,
|
|
4857
|
+
options,
|
|
4858
|
+
isSync
|
|
4859
|
+
})),
|
|
4915
4860
|
addProperties: addProperties$2,
|
|
4916
4861
|
options,
|
|
4917
4862
|
isSync
|
|
@@ -4928,14 +4873,13 @@ const getFileDescriptor = ({ stdioOption, fdNumber, options, isSync }) => {
|
|
|
4928
4873
|
optionName
|
|
4929
4874
|
});
|
|
4930
4875
|
const direction = getStreamDirection(initialStdioItems, fdNumber, optionName);
|
|
4931
|
-
const
|
|
4876
|
+
const normalizedStdioItems = normalizeTransforms(initialStdioItems.map((stdioItem) => handleNativeStream({
|
|
4932
4877
|
stdioItem,
|
|
4933
4878
|
isStdioArray,
|
|
4934
4879
|
fdNumber,
|
|
4935
4880
|
direction,
|
|
4936
4881
|
isSync
|
|
4937
|
-
}));
|
|
4938
|
-
const normalizedStdioItems = normalizeTransforms(stdioItems, optionName, direction, options);
|
|
4882
|
+
})), optionName, direction, options);
|
|
4939
4883
|
const objectMode = getFdObjectMode(normalizedStdioItems, direction);
|
|
4940
4884
|
validateFileObjectMode(normalizedStdioItems, objectMode);
|
|
4941
4885
|
return {
|
|
@@ -4945,9 +4889,7 @@ const getFileDescriptor = ({ stdioOption, fdNumber, options, isSync }) => {
|
|
|
4945
4889
|
};
|
|
4946
4890
|
};
|
|
4947
4891
|
const initializeStdioItems = ({ stdioOption, fdNumber, options, optionName }) => {
|
|
4948
|
-
const
|
|
4949
|
-
const initialStdioItems = [...values.map((value) => initializeStdioItem(value, optionName)), ...handleInputOptions(options, fdNumber)];
|
|
4950
|
-
const stdioItems = filterDuplicates(initialStdioItems);
|
|
4892
|
+
const stdioItems = filterDuplicates([...(Array.isArray(stdioOption) ? stdioOption : [stdioOption]).map((value) => initializeStdioItem(value, optionName)), ...handleInputOptions(options, fdNumber)]);
|
|
4951
4893
|
const isStdioArray = stdioItems.length > 1;
|
|
4952
4894
|
validateStdioArray(stdioItems, isStdioArray, optionName);
|
|
4953
4895
|
validateStreams(stdioItems);
|
|
@@ -4997,18 +4939,17 @@ const getFinalFileDescriptors = ({ initialFileDescriptors, addProperties: addPro
|
|
|
4997
4939
|
}
|
|
4998
4940
|
};
|
|
4999
4941
|
const getFinalFileDescriptor = ({ fileDescriptor: { direction, objectMode, stdioItems }, fileDescriptors, addProperties: addProperties$2, options, isSync }) => {
|
|
5000
|
-
const finalStdioItems = stdioItems.map((stdioItem) => addStreamProperties({
|
|
5001
|
-
stdioItem,
|
|
5002
|
-
addProperties: addProperties$2,
|
|
5003
|
-
direction,
|
|
5004
|
-
options,
|
|
5005
|
-
fileDescriptors,
|
|
5006
|
-
isSync
|
|
5007
|
-
}));
|
|
5008
4942
|
return {
|
|
5009
4943
|
direction,
|
|
5010
4944
|
objectMode,
|
|
5011
|
-
stdioItems:
|
|
4945
|
+
stdioItems: stdioItems.map((stdioItem) => addStreamProperties({
|
|
4946
|
+
stdioItem,
|
|
4947
|
+
addProperties: addProperties$2,
|
|
4948
|
+
direction,
|
|
4949
|
+
options,
|
|
4950
|
+
fileDescriptors,
|
|
4951
|
+
isSync
|
|
4952
|
+
}))
|
|
5012
4953
|
};
|
|
5013
4954
|
};
|
|
5014
4955
|
const addStreamProperties = ({ stdioItem, addProperties: addProperties$2, direction, options, fileDescriptors, isSync }) => {
|
|
@@ -5138,8 +5079,7 @@ const appendNewlineGenerator = function* ({ isWindowsNewline = false }, chunk) {
|
|
|
5138
5079
|
yield chunk;
|
|
5139
5080
|
return;
|
|
5140
5081
|
}
|
|
5141
|
-
|
|
5142
|
-
yield concatBytes(chunk, newline);
|
|
5082
|
+
yield concatBytes(chunk, isWindowsNewline ? windowsNewline : unixNewline);
|
|
5143
5083
|
};
|
|
5144
5084
|
const concatString = (firstChunk, secondChunk) => `${firstChunk}${secondChunk}`;
|
|
5145
5085
|
const linesStringInfo = {
|
|
@@ -5284,7 +5224,7 @@ const generatorToStream = ({ value, value: { transform, final, writableObjectMod
|
|
|
5284
5224
|
const transformMethod = transformAsync ? pushChunks.bind(void 0, transformChunk, state) : pushChunksSync.bind(void 0, transformChunkSync);
|
|
5285
5225
|
const finalMethod = transformAsync || finalAsync ? pushChunks.bind(void 0, finalChunks, state) : pushChunksSync.bind(void 0, finalChunksSync);
|
|
5286
5226
|
const destroyMethod = transformAsync || finalAsync ? destroyTransform.bind(void 0, state) : void 0;
|
|
5287
|
-
|
|
5227
|
+
return { stream: new Transform({
|
|
5288
5228
|
writableObjectMode,
|
|
5289
5229
|
writableHighWaterMark: getDefaultHighWaterMark(writableObjectMode),
|
|
5290
5230
|
readableObjectMode,
|
|
@@ -5300,16 +5240,12 @@ const generatorToStream = ({ value, value: { transform, final, writableObjectMod
|
|
|
5300
5240
|
finalMethod([generators], this, done);
|
|
5301
5241
|
},
|
|
5302
5242
|
destroy: destroyMethod
|
|
5303
|
-
});
|
|
5304
|
-
return { stream };
|
|
5243
|
+
}) };
|
|
5305
5244
|
};
|
|
5306
5245
|
const runGeneratorsSync = (chunks, stdioItems, encoding, isInput) => {
|
|
5307
5246
|
const generators = stdioItems.filter(({ type }) => type === "generator");
|
|
5308
5247
|
const reversedGenerators = isInput ? generators.reverse() : generators;
|
|
5309
|
-
for (const { value, optionName } of reversedGenerators)
|
|
5310
|
-
const generators$1 = addInternalGenerators(value, encoding, optionName);
|
|
5311
|
-
chunks = runTransformSync(generators$1, chunks);
|
|
5312
|
-
}
|
|
5248
|
+
for (const { value, optionName } of reversedGenerators) chunks = runTransformSync(addInternalGenerators(value, encoding, optionName), chunks);
|
|
5313
5249
|
return chunks;
|
|
5314
5250
|
};
|
|
5315
5251
|
const addInternalGenerators = ({ transform, final, binary, writableObjectMode, readableObjectMode, preserveNewlines }, encoding, optionName) => {
|
|
@@ -5346,9 +5282,7 @@ const addInputOptionSync = (fileDescriptors, fdNumber, options) => {
|
|
|
5346
5282
|
const [{ type, optionName }] = allStdioItems;
|
|
5347
5283
|
throw new TypeError(`Only the \`stdin\` option, not \`${optionName}\`, can be ${TYPE_TO_MESSAGE[type]} with synchronous methods.`);
|
|
5348
5284
|
}
|
|
5349
|
-
|
|
5350
|
-
const transformedContents = allContents.map((contents) => applySingleInputGeneratorsSync(contents, stdioItems));
|
|
5351
|
-
options.input = joinToUint8Array(transformedContents);
|
|
5285
|
+
options.input = joinToUint8Array(allStdioItems.map(({ contents }) => contents).map((contents) => applySingleInputGeneratorsSync(contents, stdioItems)));
|
|
5352
5286
|
};
|
|
5353
5287
|
const applySingleInputGeneratorsSync = (contents, stdioItems) => {
|
|
5354
5288
|
const newContents = runGeneratorsSync(contents, stdioItems, "utf8", true);
|
|
@@ -5373,10 +5307,9 @@ const logLinesSync = (linesArray, fdNumber, verboseInfo) => {
|
|
|
5373
5307
|
};
|
|
5374
5308
|
const isPipingStream = (stream) => stream._readableState.pipes.length > 0;
|
|
5375
5309
|
const logLine = (line, fdNumber, verboseInfo) => {
|
|
5376
|
-
const verboseMessage = serializeVerboseMessage(line);
|
|
5377
5310
|
verboseLog({
|
|
5378
5311
|
type: "output",
|
|
5379
|
-
verboseMessage,
|
|
5312
|
+
verboseMessage: serializeVerboseMessage(line),
|
|
5380
5313
|
fdNumber,
|
|
5381
5314
|
verboseInfo
|
|
5382
5315
|
});
|
|
@@ -5388,28 +5321,25 @@ const transformOutputSync = ({ fileDescriptors, syncResult: { output }, options,
|
|
|
5388
5321
|
if (output === null) return { output: Array.from({ length: 3 }) };
|
|
5389
5322
|
const state = {};
|
|
5390
5323
|
const outputFiles = /* @__PURE__ */ new Set([]);
|
|
5391
|
-
const transformedOutput = output.map((result, fdNumber) => transformOutputResultSync({
|
|
5392
|
-
result,
|
|
5393
|
-
fileDescriptors,
|
|
5394
|
-
fdNumber,
|
|
5395
|
-
state,
|
|
5396
|
-
outputFiles,
|
|
5397
|
-
isMaxBuffer,
|
|
5398
|
-
verboseInfo
|
|
5399
|
-
}, options));
|
|
5400
5324
|
return {
|
|
5401
|
-
output:
|
|
5325
|
+
output: output.map((result, fdNumber) => transformOutputResultSync({
|
|
5326
|
+
result,
|
|
5327
|
+
fileDescriptors,
|
|
5328
|
+
fdNumber,
|
|
5329
|
+
state,
|
|
5330
|
+
outputFiles,
|
|
5331
|
+
isMaxBuffer,
|
|
5332
|
+
verboseInfo
|
|
5333
|
+
}, options)),
|
|
5402
5334
|
...state
|
|
5403
5335
|
};
|
|
5404
5336
|
};
|
|
5405
5337
|
const transformOutputResultSync = ({ result, fileDescriptors, fdNumber, state, outputFiles, isMaxBuffer, verboseInfo }, { buffer, encoding, lines, stripFinalNewline: stripFinalNewline$1, maxBuffer }) => {
|
|
5406
5338
|
if (result === null) return;
|
|
5407
|
-
const
|
|
5408
|
-
const uint8ArrayResult = bufferToUint8Array(truncatedResult);
|
|
5339
|
+
const uint8ArrayResult = bufferToUint8Array(truncateMaxBufferSync(result, isMaxBuffer, maxBuffer));
|
|
5409
5340
|
const { stdioItems, objectMode } = fileDescriptors[fdNumber];
|
|
5410
|
-
const chunks = runOutputGeneratorsSync([uint8ArrayResult], stdioItems, encoding, state);
|
|
5411
5341
|
const { serializedResult, finalResult = serializedResult } = serializeChunks({
|
|
5412
|
-
chunks,
|
|
5342
|
+
chunks: runOutputGeneratorsSync([uint8ArrayResult], stdioItems, encoding, state),
|
|
5413
5343
|
objectMode,
|
|
5414
5344
|
encoding,
|
|
5415
5345
|
lines,
|
|
@@ -5520,14 +5450,12 @@ const isFailedExit = (exitCode, signal) => exitCode !== 0 || signal !== null;
|
|
|
5520
5450
|
//#region ../../node_modules/.pnpm/execa@9.5.2/node_modules/execa/lib/resolve/exit-sync.js
|
|
5521
5451
|
const getExitResultSync = ({ error, status: exitCode, signal, output }, { maxBuffer }) => {
|
|
5522
5452
|
const resultError = getResultError(error, exitCode, signal);
|
|
5523
|
-
const timedOut = resultError?.code === "ETIMEDOUT";
|
|
5524
|
-
const isMaxBuffer = isMaxBufferSync(resultError, output, maxBuffer);
|
|
5525
5453
|
return {
|
|
5526
5454
|
resultError,
|
|
5527
5455
|
exitCode,
|
|
5528
5456
|
signal,
|
|
5529
|
-
timedOut,
|
|
5530
|
-
isMaxBuffer
|
|
5457
|
+
timedOut: resultError?.code === "ETIMEDOUT",
|
|
5458
|
+
isMaxBuffer: isMaxBufferSync(resultError, output, maxBuffer)
|
|
5531
5459
|
};
|
|
5532
5460
|
};
|
|
5533
5461
|
const getResultError = (error, exitCode, signal) => {
|
|
@@ -5539,7 +5467,7 @@ const getResultError = (error, exitCode, signal) => {
|
|
|
5539
5467
|
//#region ../../node_modules/.pnpm/execa@9.5.2/node_modules/execa/lib/methods/main-sync.js
|
|
5540
5468
|
const execaCoreSync = (rawFile, rawArguments, rawOptions) => {
|
|
5541
5469
|
const { file, commandArguments, command, escapedCommand, startTime, verboseInfo, options, fileDescriptors } = handleSyncArguments(rawFile, rawArguments, rawOptions);
|
|
5542
|
-
|
|
5470
|
+
return handleResult(spawnSubprocessSync({
|
|
5543
5471
|
file,
|
|
5544
5472
|
commandArguments,
|
|
5545
5473
|
options,
|
|
@@ -5548,15 +5476,12 @@ const execaCoreSync = (rawFile, rawArguments, rawOptions) => {
|
|
|
5548
5476
|
verboseInfo,
|
|
5549
5477
|
fileDescriptors,
|
|
5550
5478
|
startTime
|
|
5551
|
-
});
|
|
5552
|
-
return handleResult(result, verboseInfo, options);
|
|
5479
|
+
}), verboseInfo, options);
|
|
5553
5480
|
};
|
|
5554
5481
|
const handleSyncArguments = (rawFile, rawArguments, rawOptions) => {
|
|
5555
5482
|
const { command, escapedCommand, startTime, verboseInfo } = handleCommand(rawFile, rawArguments, rawOptions);
|
|
5556
|
-
const
|
|
5557
|
-
const { file, commandArguments, options } = normalizeOptions(rawFile, rawArguments, syncOptions);
|
|
5483
|
+
const { file, commandArguments, options } = normalizeOptions(rawFile, rawArguments, normalizeSyncOptions(rawOptions));
|
|
5558
5484
|
validateSyncOptions(options);
|
|
5559
|
-
const fileDescriptors = handleStdioSync(options, verboseInfo);
|
|
5560
5485
|
return {
|
|
5561
5486
|
file,
|
|
5562
5487
|
commandArguments,
|
|
@@ -5565,7 +5490,7 @@ const handleSyncArguments = (rawFile, rawArguments, rawOptions) => {
|
|
|
5565
5490
|
startTime,
|
|
5566
5491
|
verboseInfo,
|
|
5567
5492
|
options,
|
|
5568
|
-
fileDescriptors
|
|
5493
|
+
fileDescriptors: handleStdioSync(options, verboseInfo)
|
|
5569
5494
|
};
|
|
5570
5495
|
};
|
|
5571
5496
|
const normalizeSyncOptions = (options) => options.node && !options.ipc ? {
|
|
@@ -5600,16 +5525,14 @@ const spawnSubprocessSync = ({ file, commandArguments, options, command, escaped
|
|
|
5600
5525
|
isMaxBuffer,
|
|
5601
5526
|
verboseInfo
|
|
5602
5527
|
});
|
|
5603
|
-
const stdio = output.map((stdioOutput, fdNumber) => stripNewline(stdioOutput, options, fdNumber));
|
|
5604
|
-
const all = stripNewline(getAllSync(output, options), options, "all");
|
|
5605
5528
|
return getSyncResult({
|
|
5606
5529
|
error,
|
|
5607
5530
|
exitCode,
|
|
5608
5531
|
signal,
|
|
5609
5532
|
timedOut,
|
|
5610
5533
|
isMaxBuffer,
|
|
5611
|
-
stdio,
|
|
5612
|
-
all,
|
|
5534
|
+
stdio: output.map((stdioOutput, fdNumber) => stripNewline(stdioOutput, options, fdNumber)),
|
|
5535
|
+
all: stripNewline(getAllSync(output, options), options, "all"),
|
|
5613
5536
|
options,
|
|
5614
5537
|
command,
|
|
5615
5538
|
escapedCommand,
|
|
@@ -5619,8 +5542,7 @@ const spawnSubprocessSync = ({ file, commandArguments, options, command, escaped
|
|
|
5619
5542
|
const runSubprocessSync = ({ file, commandArguments, options, command, escapedCommand, fileDescriptors, startTime }) => {
|
|
5620
5543
|
try {
|
|
5621
5544
|
addInputOptionsSync(fileDescriptors, options);
|
|
5622
|
-
|
|
5623
|
-
return spawnSync(file, commandArguments, normalizedOptions);
|
|
5545
|
+
return spawnSync(file, commandArguments, normalizeSpawnSyncOptions(options));
|
|
5624
5546
|
} catch (error) {
|
|
5625
5547
|
return makeEarlyError({
|
|
5626
5548
|
error,
|
|
@@ -5838,19 +5760,17 @@ const handleEarlyError = ({ error, command, escapedCommand, fileDescriptors, opt
|
|
|
5838
5760
|
writable,
|
|
5839
5761
|
duplex
|
|
5840
5762
|
});
|
|
5841
|
-
const earlyError = makeEarlyError({
|
|
5842
|
-
error,
|
|
5843
|
-
command,
|
|
5844
|
-
escapedCommand,
|
|
5845
|
-
fileDescriptors,
|
|
5846
|
-
options,
|
|
5847
|
-
startTime,
|
|
5848
|
-
isSync: false
|
|
5849
|
-
});
|
|
5850
|
-
const promise = handleDummyPromise(earlyError, verboseInfo, options);
|
|
5851
5763
|
return {
|
|
5852
5764
|
subprocess,
|
|
5853
|
-
promise
|
|
5765
|
+
promise: handleDummyPromise(makeEarlyError({
|
|
5766
|
+
error,
|
|
5767
|
+
command,
|
|
5768
|
+
escapedCommand,
|
|
5769
|
+
fileDescriptors,
|
|
5770
|
+
options,
|
|
5771
|
+
startTime,
|
|
5772
|
+
isSync: false
|
|
5773
|
+
}), verboseInfo, options)
|
|
5854
5774
|
};
|
|
5855
5775
|
};
|
|
5856
5776
|
const createDummyStreams = (subprocess, fileDescriptors) => {
|
|
@@ -5899,8 +5819,7 @@ const addProperties = {
|
|
|
5899
5819
|
nodeStream: ({ value }) => ({ stream: value }),
|
|
5900
5820
|
webTransform({ value: { transform, writableObjectMode, readableObjectMode } }) {
|
|
5901
5821
|
const objectMode = writableObjectMode || readableObjectMode;
|
|
5902
|
-
|
|
5903
|
-
return { stream };
|
|
5822
|
+
return { stream: Duplex.fromWeb(transform, { objectMode }) };
|
|
5904
5823
|
},
|
|
5905
5824
|
duplex: ({ value: { transform } }) => ({ stream: transform }),
|
|
5906
5825
|
native() {}
|
|
@@ -6146,10 +6065,7 @@ const pipeOutputAsync = (subprocess, fileDescriptors, controller) => {
|
|
|
6146
6065
|
controller
|
|
6147
6066
|
});
|
|
6148
6067
|
}
|
|
6149
|
-
for (const [outputStream, inputStreams] of pipeGroups.entries())
|
|
6150
|
-
const inputStream = inputStreams.length === 1 ? inputStreams[0] : mergeStreams(inputStreams);
|
|
6151
|
-
pipeStreams(inputStream, outputStream);
|
|
6152
|
-
}
|
|
6068
|
+
for (const [outputStream, inputStreams] of pipeGroups.entries()) pipeStreams(inputStreams.length === 1 ? inputStreams[0] : mergeStreams(inputStreams), outputStream);
|
|
6153
6069
|
};
|
|
6154
6070
|
const pipeTransform = (subprocess, stream, direction, fdNumber) => {
|
|
6155
6071
|
if (direction === "output") pipeStreams(subprocess.stdio[fdNumber], stream);
|
|
@@ -6411,10 +6327,9 @@ const normalizePipeArguments = ({ source, sourcePromise, boundOptions, createNes
|
|
|
6411
6327
|
const getDestinationStream = (boundOptions, createNested, pipeArguments) => {
|
|
6412
6328
|
try {
|
|
6413
6329
|
const { destination, pipeOptions: { from, to, unpipeSignal } = {} } = getDestination(boundOptions, createNested, ...pipeArguments);
|
|
6414
|
-
const destinationStream = getToStream(destination, to);
|
|
6415
6330
|
return {
|
|
6416
6331
|
destination,
|
|
6417
|
-
destinationStream,
|
|
6332
|
+
destinationStream: getToStream(destination, to),
|
|
6418
6333
|
from,
|
|
6419
6334
|
unpipeSignal
|
|
6420
6335
|
};
|
|
@@ -6423,19 +6338,15 @@ const getDestinationStream = (boundOptions, createNested, pipeArguments) => {
|
|
|
6423
6338
|
}
|
|
6424
6339
|
};
|
|
6425
6340
|
const getDestination = (boundOptions, createNested, firstArgument, ...pipeArguments) => {
|
|
6426
|
-
if (Array.isArray(firstArgument)) {
|
|
6427
|
-
|
|
6428
|
-
|
|
6429
|
-
|
|
6430
|
-
pipeOptions: boundOptions
|
|
6431
|
-
};
|
|
6432
|
-
}
|
|
6341
|
+
if (Array.isArray(firstArgument)) return {
|
|
6342
|
+
destination: createNested(mapDestinationArguments, boundOptions)(firstArgument, ...pipeArguments),
|
|
6343
|
+
pipeOptions: boundOptions
|
|
6344
|
+
};
|
|
6433
6345
|
if (typeof firstArgument === "string" || firstArgument instanceof URL || isDenoExecPath(firstArgument)) {
|
|
6434
6346
|
if (Object.keys(boundOptions).length > 0) throw new TypeError("Please use .pipe(\"file\", ..., options) or .pipe(execa(\"file\", ..., options)) instead of .pipe(options)(\"file\", ...).");
|
|
6435
6347
|
const [rawFile, rawArguments, rawOptions] = normalizeParameters(firstArgument, ...pipeArguments);
|
|
6436
|
-
const destination = createNested(mapDestinationArguments)(rawFile, rawArguments, rawOptions);
|
|
6437
6348
|
return {
|
|
6438
|
-
destination,
|
|
6349
|
+
destination: createNested(mapDestinationArguments)(rawFile, rawArguments, rawOptions),
|
|
6439
6350
|
pipeOptions: rawOptions
|
|
6440
6351
|
};
|
|
6441
6352
|
}
|
|
@@ -6455,8 +6366,7 @@ const mapDestinationArguments = ({ options }) => ({ options: {
|
|
|
6455
6366
|
} });
|
|
6456
6367
|
const getSourceStream = (source, from) => {
|
|
6457
6368
|
try {
|
|
6458
|
-
|
|
6459
|
-
return { sourceStream };
|
|
6369
|
+
return { sourceStream: getFromStream(source, from) };
|
|
6460
6370
|
} catch (error) {
|
|
6461
6371
|
return { sourceError: error };
|
|
6462
6372
|
}
|
|
@@ -6550,9 +6460,8 @@ const unpipeOnAbort = (unpipeSignal, unpipeContext) => unpipeSignal === void 0 ?
|
|
|
6550
6460
|
const unpipeOnSignalAbort = async (unpipeSignal, { sourceStream, mergedStream, fileDescriptors, sourceOptions, startTime }) => {
|
|
6551
6461
|
await aborted(unpipeSignal, sourceStream);
|
|
6552
6462
|
await mergedStream.remove(sourceStream);
|
|
6553
|
-
const error = /* @__PURE__ */ new Error("Pipe canceled by `unpipeSignal` option.");
|
|
6554
6463
|
throw createNonCommandError({
|
|
6555
|
-
error,
|
|
6464
|
+
error: /* @__PURE__ */ new Error("Pipe canceled by `unpipeSignal` option."),
|
|
6556
6465
|
fileDescriptors,
|
|
6557
6466
|
sourceOptions,
|
|
6558
6467
|
startTime
|
|
@@ -6655,13 +6564,12 @@ const stopReadingOnStreamEnd = async (onStreamEnd, controller, stream) => {
|
|
|
6655
6564
|
}
|
|
6656
6565
|
};
|
|
6657
6566
|
const iterateOnStream = ({ stream, controller, binary, shouldEncode, encoding, shouldSplit, preserveNewlines }) => {
|
|
6658
|
-
const onStdoutChunk = on(stream, "data", {
|
|
6659
|
-
signal: controller.signal,
|
|
6660
|
-
highWaterMark: HIGH_WATER_MARK,
|
|
6661
|
-
highWatermark: HIGH_WATER_MARK
|
|
6662
|
-
});
|
|
6663
6567
|
return iterateOnData({
|
|
6664
|
-
onStdoutChunk,
|
|
6568
|
+
onStdoutChunk: on(stream, "data", {
|
|
6569
|
+
signal: controller.signal,
|
|
6570
|
+
highWaterMark: HIGH_WATER_MARK,
|
|
6571
|
+
highWatermark: HIGH_WATER_MARK
|
|
6572
|
+
}),
|
|
6665
6573
|
controller,
|
|
6666
6574
|
binary,
|
|
6667
6575
|
shouldEncode,
|
|
@@ -6706,13 +6614,12 @@ const getStreamOutput = async ({ stream, onStreamEnd, fdNumber, encoding, buffer
|
|
|
6706
6614
|
await Promise.all([resumeStream(stream), logPromise]);
|
|
6707
6615
|
return;
|
|
6708
6616
|
}
|
|
6709
|
-
const stripFinalNewlineValue = getStripFinalNewline(stripFinalNewline$1, fdNumber);
|
|
6710
6617
|
const iterable = iterateForResult({
|
|
6711
6618
|
stream,
|
|
6712
6619
|
onStreamEnd,
|
|
6713
6620
|
lines,
|
|
6714
6621
|
encoding,
|
|
6715
|
-
stripFinalNewline:
|
|
6622
|
+
stripFinalNewline: getStripFinalNewline(stripFinalNewline$1, fdNumber),
|
|
6716
6623
|
allMixed
|
|
6717
6624
|
});
|
|
6718
6625
|
const [output] = await Promise.all([getStreamContents({
|
|
@@ -6732,15 +6639,14 @@ const logOutputAsync = async ({ stream, onStreamEnd, fdNumber, encoding, allMixe
|
|
|
6732
6639
|
verboseInfo,
|
|
6733
6640
|
fdNumber
|
|
6734
6641
|
})) return;
|
|
6735
|
-
|
|
6642
|
+
await logLines(iterateForResult({
|
|
6736
6643
|
stream,
|
|
6737
6644
|
onStreamEnd,
|
|
6738
6645
|
lines: true,
|
|
6739
6646
|
encoding,
|
|
6740
6647
|
stripFinalNewline: true,
|
|
6741
6648
|
allMixed
|
|
6742
|
-
});
|
|
6743
|
-
await logLines(linesIterable, stream, fdNumber, verboseInfo);
|
|
6649
|
+
}), stream, fdNumber, verboseInfo);
|
|
6744
6650
|
};
|
|
6745
6651
|
const resumeStream = async (stream) => {
|
|
6746
6652
|
await setImmediate();
|
|
@@ -6890,10 +6796,9 @@ const getAllMixed = ({ all, stdout: stdout$1, stderr }) => all && stdout$1 && st
|
|
|
6890
6796
|
//#region ../../node_modules/.pnpm/execa@9.5.2/node_modules/execa/lib/verbose/ipc.js
|
|
6891
6797
|
const shouldLogIpc = (verboseInfo) => isFullVerbose(verboseInfo, "ipc");
|
|
6892
6798
|
const logIpcOutput = (message, verboseInfo) => {
|
|
6893
|
-
const verboseMessage = serializeVerboseMessage(message);
|
|
6894
6799
|
verboseLog({
|
|
6895
6800
|
type: "ipc",
|
|
6896
|
-
verboseMessage,
|
|
6801
|
+
verboseMessage: serializeVerboseMessage(message),
|
|
6897
6802
|
fdNumber: "ipc",
|
|
6898
6803
|
verboseInfo
|
|
6899
6804
|
});
|
|
@@ -7036,9 +6941,8 @@ const addConcurrentStream = (concurrentStreams, stream, waitName) => {
|
|
|
7036
6941
|
const promises = weakMap.get(stream);
|
|
7037
6942
|
const promise = createDeferred();
|
|
7038
6943
|
promises.push(promise);
|
|
7039
|
-
const resolve = promise.resolve.bind(promise);
|
|
7040
6944
|
return {
|
|
7041
|
-
resolve,
|
|
6945
|
+
resolve: promise.resolve.bind(promise),
|
|
7042
6946
|
promises
|
|
7043
6947
|
};
|
|
7044
6948
|
};
|
|
@@ -7119,10 +7023,9 @@ const createReadable = ({ subprocess, concurrentStreams, encoding }, { from, bin
|
|
|
7119
7023
|
};
|
|
7120
7024
|
const getSubprocessStdout = (subprocess, from, concurrentStreams) => {
|
|
7121
7025
|
const subprocessStdout = getFromStream(subprocess, from);
|
|
7122
|
-
const waitReadableDestroy = addConcurrentStream(concurrentStreams, subprocessStdout, "readableDestroy");
|
|
7123
7026
|
return {
|
|
7124
7027
|
subprocessStdout,
|
|
7125
|
-
waitReadableDestroy
|
|
7028
|
+
waitReadableDestroy: addConcurrentStream(concurrentStreams, subprocessStdout, "readableDestroy")
|
|
7126
7029
|
};
|
|
7127
7030
|
};
|
|
7128
7031
|
const getReadableOptions = ({ readableEncoding, readableObjectMode, readableHighWaterMark }, binary) => binary ? {
|
|
@@ -7200,12 +7103,10 @@ const createWritable = ({ subprocess, concurrentStreams }, { to } = {}) => {
|
|
|
7200
7103
|
};
|
|
7201
7104
|
const getSubprocessStdin = (subprocess, to, concurrentStreams) => {
|
|
7202
7105
|
const subprocessStdin = getToStream(subprocess, to);
|
|
7203
|
-
const waitWritableFinal = addConcurrentStream(concurrentStreams, subprocessStdin, "writableFinal");
|
|
7204
|
-
const waitWritableDestroy = addConcurrentStream(concurrentStreams, subprocessStdin, "writableDestroy");
|
|
7205
7106
|
return {
|
|
7206
7107
|
subprocessStdin,
|
|
7207
|
-
waitWritableFinal,
|
|
7208
|
-
waitWritableDestroy
|
|
7108
|
+
waitWritableFinal: addConcurrentStream(concurrentStreams, subprocessStdin, "writableFinal"),
|
|
7109
|
+
waitWritableDestroy: addConcurrentStream(concurrentStreams, subprocessStdin, "writableDestroy")
|
|
7209
7110
|
};
|
|
7210
7111
|
};
|
|
7211
7112
|
const getWritableMethods = (subprocessStdin, subprocess, waitWritableFinal) => ({
|
|
@@ -7301,15 +7202,14 @@ const onDuplexDestroy = async ({ subprocessStdout, subprocessStdin, subprocess,
|
|
|
7301
7202
|
const createIterable = (subprocess, encoding, { from, binary: binaryOption = false, preserveNewlines = false } = {}) => {
|
|
7302
7203
|
const binary = binaryOption || BINARY_ENCODINGS.has(encoding);
|
|
7303
7204
|
const subprocessStdout = getFromStream(subprocess, from);
|
|
7304
|
-
|
|
7205
|
+
return iterateOnStdoutData(iterateOnSubprocessStream({
|
|
7305
7206
|
subprocessStdout,
|
|
7306
7207
|
subprocess,
|
|
7307
7208
|
binary,
|
|
7308
7209
|
shouldEncode: true,
|
|
7309
7210
|
encoding,
|
|
7310
7211
|
preserveNewlines
|
|
7311
|
-
});
|
|
7312
|
-
return iterateOnStdoutData(onStdoutData, subprocessStdout, subprocess);
|
|
7212
|
+
}), subprocessStdout, subprocess);
|
|
7313
7213
|
};
|
|
7314
7214
|
const iterateOnStdoutData = async function* (onStdoutData, subprocessStdout, subprocess) {
|
|
7315
7215
|
try {
|
|
@@ -7391,7 +7291,6 @@ const handleAsyncArguments = (rawFile, rawArguments, rawOptions) => {
|
|
|
7391
7291
|
const { command, escapedCommand, startTime, verboseInfo } = handleCommand(rawFile, rawArguments, rawOptions);
|
|
7392
7292
|
const { file, commandArguments, options: normalizedOptions } = normalizeOptions(rawFile, rawArguments, rawOptions);
|
|
7393
7293
|
const options = handleAsyncOptions(normalizedOptions);
|
|
7394
|
-
const fileDescriptors = handleStdioAsync(options, verboseInfo);
|
|
7395
7294
|
return {
|
|
7396
7295
|
file,
|
|
7397
7296
|
commandArguments,
|
|
@@ -7400,7 +7299,7 @@ const handleAsyncArguments = (rawFile, rawArguments, rawOptions) => {
|
|
|
7400
7299
|
startTime,
|
|
7401
7300
|
verboseInfo,
|
|
7402
7301
|
options,
|
|
7403
|
-
fileDescriptors
|
|
7302
|
+
fileDescriptors: handleStdioAsync(options, verboseInfo)
|
|
7404
7303
|
};
|
|
7405
7304
|
};
|
|
7406
7305
|
const handleAsyncOptions = ({ timeout, signal,...options }) => {
|
|
@@ -7473,22 +7372,19 @@ const handlePromise = async ({ subprocess, options, startTime, verboseInfo, file
|
|
|
7473
7372
|
});
|
|
7474
7373
|
controller.abort();
|
|
7475
7374
|
onInternalError.resolve();
|
|
7476
|
-
|
|
7477
|
-
const all = stripNewline(allResult, options, "all");
|
|
7478
|
-
const result = getAsyncResult({
|
|
7375
|
+
return handleResult(getAsyncResult({
|
|
7479
7376
|
errorInfo,
|
|
7480
7377
|
exitCode,
|
|
7481
7378
|
signal,
|
|
7482
|
-
stdio,
|
|
7483
|
-
all,
|
|
7379
|
+
stdio: stdioResults.map((stdioResult, fdNumber) => stripNewline(stdioResult, options, fdNumber)),
|
|
7380
|
+
all: stripNewline(allResult, options, "all"),
|
|
7484
7381
|
ipcOutput,
|
|
7485
7382
|
context,
|
|
7486
7383
|
options,
|
|
7487
7384
|
command,
|
|
7488
7385
|
escapedCommand,
|
|
7489
7386
|
startTime
|
|
7490
|
-
});
|
|
7491
|
-
return handleResult(result, verboseInfo, options);
|
|
7387
|
+
}), verboseInfo, options);
|
|
7492
7388
|
};
|
|
7493
7389
|
const getAsyncResult = ({ errorInfo, exitCode, signal, stdio, all, ipcOutput, context, options, command, escapedCommand, startTime }) => "error" in errorInfo ? makeError({
|
|
7494
7390
|
error: errorInfo.error,
|
|
@@ -7561,8 +7457,7 @@ const callBoundExeca = ({ mapArguments, deepOptions = {}, boundOptions = {}, set
|
|
|
7561
7457
|
return isSync ? execaCoreSync(file, commandArguments, options) : execaCoreAsync(file, commandArguments, options, createNested);
|
|
7562
7458
|
};
|
|
7563
7459
|
const parseArguments = ({ mapArguments, firstArgument, nextArguments, deepOptions, boundOptions }) => {
|
|
7564
|
-
const
|
|
7565
|
-
const [initialFile, initialArguments, initialOptions] = normalizeParameters(...callArguments);
|
|
7460
|
+
const [initialFile, initialArguments, initialOptions] = normalizeParameters(...isTemplateString(firstArgument) ? parseTemplates(firstArgument, nextArguments) : [firstArgument, ...nextArguments]);
|
|
7566
7461
|
const mergedOptions = mergeOptions(mergeOptions(deepOptions, boundOptions), initialOptions);
|
|
7567
7462
|
const { file = initialFile, commandArguments = initialArguments, options = mergedOptions, isSync = false } = mapArguments({
|
|
7568
7463
|
file: initialFile,
|
|
@@ -7668,8 +7563,7 @@ function delay(ms, { signal } = {}) {
|
|
|
7668
7563
|
function isPlainObject(value) {
|
|
7669
7564
|
if (!value || typeof value !== "object") return false;
|
|
7670
7565
|
const proto = Object.getPrototypeOf(value);
|
|
7671
|
-
|
|
7672
|
-
if (!hasObjectPrototype) return false;
|
|
7566
|
+
if (!(proto === null || proto === Object.prototype || Object.getPrototypeOf(proto) === null)) return false;
|
|
7673
7567
|
return Object.prototype.toString.call(value) === "[object Object]";
|
|
7674
7568
|
}
|
|
7675
7569
|
|
|
@@ -7716,8 +7610,7 @@ var CloudFrontManager = class {
|
|
|
7716
7610
|
region: this.region,
|
|
7717
7611
|
credentials: this.credentials
|
|
7718
7612
|
});
|
|
7719
|
-
const
|
|
7720
|
-
const existingKeyGroup = listKgResp.KeyGroupList?.Items?.find((kg) => kg.KeyGroup?.KeyGroupConfig?.Name?.startsWith(`HotUpdaterKeyGroup-${publicKeyHash}`));
|
|
7613
|
+
const existingKeyGroup = (await cloudfrontClient.listKeyGroups({})).KeyGroupList?.Items?.find((kg) => kg.KeyGroup?.KeyGroupConfig?.Name?.startsWith(`HotUpdaterKeyGroup-${publicKeyHash}`));
|
|
7721
7614
|
const existingPublicKeyId = existingKeyGroup?.KeyGroup?.KeyGroupConfig?.Items?.[0];
|
|
7722
7615
|
const existingKeyGroupId = existingKeyGroup?.KeyGroup?.Id;
|
|
7723
7616
|
if (existingPublicKeyId && existingKeyGroupId) return {
|
|
@@ -7731,8 +7624,7 @@ var CloudFrontManager = class {
|
|
|
7731
7624
|
EncodedKey: publicKey,
|
|
7732
7625
|
Comment: "HotUpdater public key for signed URL"
|
|
7733
7626
|
};
|
|
7734
|
-
const
|
|
7735
|
-
const publicKeyId = createPubKeyResp.PublicKey?.Id;
|
|
7627
|
+
const publicKeyId = (await cloudfrontClient.createPublicKey({ PublicKeyConfig: publicKeyConfig })).PublicKey?.Id;
|
|
7736
7628
|
if (!publicKeyId) throw new Error("Failed to create CloudFront public key");
|
|
7737
7629
|
const callerReferenceKg = `HotUpdaterKeyGroup-${publicKeyHash}`;
|
|
7738
7630
|
const keyGroupConfig = {
|
|
@@ -7741,8 +7633,7 @@ var CloudFrontManager = class {
|
|
|
7741
7633
|
Comment: "HotUpdater key group for signed URL",
|
|
7742
7634
|
Items: [publicKeyId]
|
|
7743
7635
|
};
|
|
7744
|
-
const
|
|
7745
|
-
const keyGroupId = createKgResp.KeyGroup?.Id;
|
|
7636
|
+
const keyGroupId = (await cloudfrontClient.createKeyGroup({ KeyGroupConfig: keyGroupConfig })).KeyGroup?.Id;
|
|
7746
7637
|
if (!keyGroupId) throw new Error("Failed to create Key Group");
|
|
7747
7638
|
f.success(`Created new Key Group: ${keyGroupConfig.Name}`);
|
|
7748
7639
|
return {
|
|
@@ -7756,37 +7647,28 @@ var CloudFrontManager = class {
|
|
|
7756
7647
|
credentials: this.credentials
|
|
7757
7648
|
});
|
|
7758
7649
|
let oacId;
|
|
7759
|
-
|
|
7760
|
-
if (!accountId) throw new Error("Failed to get AWS account ID");
|
|
7650
|
+
if (!options.functionArn.split(":")[4]) throw new Error("Failed to get AWS account ID");
|
|
7761
7651
|
try {
|
|
7762
|
-
const
|
|
7763
|
-
const existingOac = listOacResp.OriginAccessControlList?.Items?.find((oac) => oac.Name === "HotUpdaterOAC");
|
|
7652
|
+
const existingOac = (await cloudfrontClient.listOriginAccessControls({})).OriginAccessControlList?.Items?.find((oac) => oac.Name === "HotUpdaterOAC");
|
|
7764
7653
|
if (existingOac?.Id) oacId = existingOac.Id;
|
|
7765
|
-
else {
|
|
7766
|
-
|
|
7767
|
-
|
|
7768
|
-
|
|
7769
|
-
|
|
7770
|
-
|
|
7771
|
-
|
|
7772
|
-
oacId = createOacResp.OriginAccessControl?.Id;
|
|
7773
|
-
}
|
|
7774
|
-
} catch (error) {
|
|
7654
|
+
else oacId = (await cloudfrontClient.createOriginAccessControl({ OriginAccessControlConfig: {
|
|
7655
|
+
Name: "HotUpdaterOAC",
|
|
7656
|
+
OriginAccessControlOriginType: "s3",
|
|
7657
|
+
SigningBehavior: "always",
|
|
7658
|
+
SigningProtocol: "sigv4"
|
|
7659
|
+
} })).OriginAccessControl?.Id;
|
|
7660
|
+
} catch {
|
|
7775
7661
|
throw new Error("Failed to get or create Origin Access Control");
|
|
7776
7662
|
}
|
|
7777
7663
|
if (!oacId) throw new Error("Failed to get Origin Access Control ID");
|
|
7778
7664
|
const bucketDomain = `${options.bucketName}.s3.${this.region}.amazonaws.com`;
|
|
7779
7665
|
const matchingDistributions = [];
|
|
7780
7666
|
try {
|
|
7781
|
-
const
|
|
7782
|
-
const items
|
|
7783
|
-
|
|
7784
|
-
|
|
7785
|
-
|
|
7786
|
-
Id: dist.Id,
|
|
7787
|
-
DomainName: dist.DomainName
|
|
7788
|
-
});
|
|
7789
|
-
}
|
|
7667
|
+
const items = (await cloudfrontClient.listDistributions({})).DistributionList?.Items || [];
|
|
7668
|
+
for (const dist of items) if ((dist.Origins?.Items || []).some((origin) => origin.DomainName === bucketDomain)) matchingDistributions.push({
|
|
7669
|
+
Id: dist.Id,
|
|
7670
|
+
DomainName: dist.DomainName
|
|
7671
|
+
});
|
|
7790
7672
|
} catch (error) {
|
|
7791
7673
|
console.error("Error listing CloudFront distributions:", error);
|
|
7792
7674
|
}
|
|
@@ -8108,10 +7990,9 @@ var CloudFrontManager = class {
|
|
|
8108
7990
|
title: "Waiting for CloudFront distribution to complete...",
|
|
8109
7991
|
task: async (message) => {
|
|
8110
7992
|
while (retryCount < 600) try {
|
|
8111
|
-
|
|
8112
|
-
if (status.Distribution?.Status === "Deployed") return "CloudFront distribution deployment completed.";
|
|
7993
|
+
if ((await cloudfrontClient.getDistribution({ Id: distributionId })).Distribution?.Status === "Deployed") return "CloudFront distribution deployment completed.";
|
|
8113
7994
|
throw new Error("Retry");
|
|
8114
|
-
} catch (
|
|
7995
|
+
} catch (_err) {
|
|
8115
7996
|
if (retryCount++ >= 5) message(`CloudFront distribution is still in progress. This may take a few minutes. (${retryCount})`);
|
|
8116
7997
|
await delay(1e3);
|
|
8117
7998
|
}
|
|
@@ -8159,14 +8040,13 @@ var IAMManager = class {
|
|
|
8159
8040
|
f.info(`Using existing IAM role: ${roleName} (${existingRole.Arn})`);
|
|
8160
8041
|
return existingRole.Arn;
|
|
8161
8042
|
}
|
|
8162
|
-
} catch
|
|
8043
|
+
} catch {
|
|
8163
8044
|
try {
|
|
8164
|
-
const
|
|
8045
|
+
const lambdaRoleArn = (await iamClient.createRole({
|
|
8165
8046
|
RoleName: roleName,
|
|
8166
8047
|
AssumeRolePolicyDocument: assumeRolePolicyDocument,
|
|
8167
8048
|
Description: "Role for Lambda@Edge to access S3"
|
|
8168
|
-
});
|
|
8169
|
-
const lambdaRoleArn = createRoleResp.Role?.Arn;
|
|
8049
|
+
})).Role?.Arn;
|
|
8170
8050
|
f.info(`Created IAM role: ${roleName} (${lambdaRoleArn})`);
|
|
8171
8051
|
await iamClient.attachRolePolicy({
|
|
8172
8052
|
RoleName: roleName,
|
|
@@ -8203,8 +8083,7 @@ var LambdaEdgeDeployer = class {
|
|
|
8203
8083
|
});
|
|
8204
8084
|
if (BD(lambdaName)) process.exit(1);
|
|
8205
8085
|
const lambdaPath = __require.resolve("@hot-updater/aws/lambda");
|
|
8206
|
-
const
|
|
8207
|
-
const { tmpDir, removeTmpDir } = await copyDirToTmp(lambdaDir);
|
|
8086
|
+
const { tmpDir, removeTmpDir } = await copyDirToTmp(path$1.dirname(lambdaPath));
|
|
8208
8087
|
const indexPath = path$1.join(tmpDir, "index.cjs");
|
|
8209
8088
|
const code = transformEnv(indexPath, {
|
|
8210
8089
|
CLOUDFRONT_KEY_PAIR_ID: keyPair.publicKey,
|
|
@@ -8230,7 +8109,7 @@ var LambdaEdgeDeployer = class {
|
|
|
8230
8109
|
targetDir: tmpDir
|
|
8231
8110
|
});
|
|
8232
8111
|
return "Compressed Lambda code to zip";
|
|
8233
|
-
} catch
|
|
8112
|
+
} catch {
|
|
8234
8113
|
throw new Error("Failed to create zip archive of Lambda function code");
|
|
8235
8114
|
}
|
|
8236
8115
|
}
|
|
@@ -8332,7 +8211,7 @@ var S3Migration = class {
|
|
|
8332
8211
|
backupMapping = /* @__PURE__ */ new Map();
|
|
8333
8212
|
async doUpdateFile(key, content, { cacheControl } = {}) {
|
|
8334
8213
|
const normalizedKey = key.startsWith("/") ? key.substring(1) : key;
|
|
8335
|
-
|
|
8214
|
+
await new Upload({
|
|
8336
8215
|
client: this.s3,
|
|
8337
8216
|
params: {
|
|
8338
8217
|
Bucket: this.bucketName,
|
|
@@ -8340,12 +8219,11 @@ var S3Migration = class {
|
|
|
8340
8219
|
Body: content,
|
|
8341
8220
|
CacheControl: cacheControl
|
|
8342
8221
|
}
|
|
8343
|
-
});
|
|
8344
|
-
await upload.done();
|
|
8222
|
+
}).done();
|
|
8345
8223
|
}
|
|
8346
8224
|
async getKeys(prefix) {
|
|
8347
8225
|
const keys = [];
|
|
8348
|
-
let continuationToken
|
|
8226
|
+
let continuationToken;
|
|
8349
8227
|
do {
|
|
8350
8228
|
const command = new ListObjectsV2Command({
|
|
8351
8229
|
Bucket: this.bucketName,
|
|
@@ -8398,8 +8276,7 @@ var S3Migration = class {
|
|
|
8398
8276
|
console.log(import_picocolors$1.default.yellow(`[DRY RUN] Updated ${import_picocolors$1.default.bold(normalizedKey)}`));
|
|
8399
8277
|
return;
|
|
8400
8278
|
}
|
|
8401
|
-
|
|
8402
|
-
if (originalContent !== null) await this.backupFile(key);
|
|
8279
|
+
if (await this.readFile(key) !== null) await this.backupFile(key);
|
|
8403
8280
|
await this.doUpdateFile(normalizedKey, content, { cacheControl });
|
|
8404
8281
|
console.log(import_picocolors$1.default.green(`Updated ${import_picocolors$1.default.bold(normalizedKey)}`));
|
|
8405
8282
|
}
|
|
@@ -8503,37 +8380,32 @@ var S3Migrator = class {
|
|
|
8503
8380
|
async saveMigrationRecords(dryRun) {
|
|
8504
8381
|
if (dryRun) return;
|
|
8505
8382
|
const body = JSON.stringify(this.migrationRecords, null, 2);
|
|
8506
|
-
|
|
8383
|
+
await new Upload({
|
|
8507
8384
|
client: this.s3,
|
|
8508
8385
|
params: {
|
|
8509
8386
|
Bucket: this.bucketName,
|
|
8510
8387
|
Key: this.migrationRecordKey,
|
|
8511
8388
|
Body: body
|
|
8512
8389
|
}
|
|
8513
|
-
});
|
|
8514
|
-
await upload.done();
|
|
8390
|
+
}).done();
|
|
8515
8391
|
}
|
|
8516
8392
|
/**
|
|
8517
8393
|
* Returns a JSON object containing applied and pending migrations.
|
|
8518
8394
|
*/
|
|
8519
8395
|
async list() {
|
|
8520
8396
|
await this.loadMigrationRecords();
|
|
8521
|
-
const applied = this.migrationRecords.map((record) => ({
|
|
8522
|
-
name: record.name,
|
|
8523
|
-
appliedAt: record.appliedAt
|
|
8524
|
-
}));
|
|
8525
|
-
const pendingMigrations = this.migrations.filter((migration) => !this.migrationRecords.some((record) => record.name === migration.name));
|
|
8526
|
-
const pending = pendingMigrations.map((migration) => ({ name: migration.name }));
|
|
8527
8397
|
return {
|
|
8528
|
-
applied
|
|
8529
|
-
|
|
8398
|
+
applied: this.migrationRecords.map((record) => ({
|
|
8399
|
+
name: record.name,
|
|
8400
|
+
appliedAt: record.appliedAt
|
|
8401
|
+
})),
|
|
8402
|
+
pending: this.migrations.filter((migration) => !this.migrationRecords.some((record) => record.name === migration.name)).map((migration) => ({ name: migration.name }))
|
|
8530
8403
|
};
|
|
8531
8404
|
}
|
|
8532
8405
|
async migrate({ dryRun }) {
|
|
8533
8406
|
await this.loadMigrationRecords();
|
|
8534
8407
|
for (const migration of this.migrations) {
|
|
8535
|
-
|
|
8536
|
-
if (alreadyApplied) continue;
|
|
8408
|
+
if (this.migrationRecords.some((record) => record.name === migration.name)) continue;
|
|
8537
8409
|
console.log(import_picocolors$1.default.magenta(`Applying migration ${migration.name}...`));
|
|
8538
8410
|
migration.s3 = this.s3;
|
|
8539
8411
|
migration.bucketName = this.bucketName;
|
|
@@ -8574,8 +8446,7 @@ var Migration0001HotUpdater0_13_0 = class extends S3Migration {
|
|
|
8574
8446
|
const newKey = `production/${key}`;
|
|
8575
8447
|
await this.moveFile(key, newKey);
|
|
8576
8448
|
}
|
|
8577
|
-
const
|
|
8578
|
-
const updateKeys = productionKeys.filter((key) => key.endsWith("update.json"));
|
|
8449
|
+
const updateKeys = (await this.getKeys("production/")).filter((key) => key.endsWith("update.json"));
|
|
8579
8450
|
for (const key of updateKeys) {
|
|
8580
8451
|
const data = await this.readJson(key);
|
|
8581
8452
|
if (data && Array.isArray(data)) {
|
|
@@ -8595,8 +8466,7 @@ var Migration0001HotUpdater0_13_0 = class extends S3Migration {
|
|
|
8595
8466
|
var Migration0001HotUpdater0_18_0 = class extends S3Migration {
|
|
8596
8467
|
name = "hot-updater_0.18.0";
|
|
8597
8468
|
async migrate() {
|
|
8598
|
-
const
|
|
8599
|
-
const updateKeys = keys.filter((key) => key.endsWith("update.json"));
|
|
8469
|
+
const updateKeys = (await this.getKeys("")).filter((key) => key.endsWith("update.json"));
|
|
8600
8470
|
for (const key of updateKeys) {
|
|
8601
8471
|
const data = await this.readJson(key);
|
|
8602
8472
|
if (data && Array.isArray(data)) {
|
|
@@ -8660,23 +8530,20 @@ var S3Manager = class {
|
|
|
8660
8530
|
region: "us-east-1",
|
|
8661
8531
|
credentials: this.credentials
|
|
8662
8532
|
});
|
|
8663
|
-
const
|
|
8664
|
-
|
|
8665
|
-
const bucketInfos = await Promise.all(buckets.filter((bucket) => bucket.Name).map(async (bucket) => {
|
|
8533
|
+
const buckets = (await s3Client.listBuckets({})).Buckets ?? [];
|
|
8534
|
+
return await Promise.all(buckets.filter((bucket) => bucket.Name).map(async (bucket) => {
|
|
8666
8535
|
const { LocationConstraint: region } = await s3Client.getBucketLocation({ Bucket: bucket.Name });
|
|
8667
8536
|
return {
|
|
8668
8537
|
name: bucket.Name,
|
|
8669
8538
|
region
|
|
8670
8539
|
};
|
|
8671
8540
|
}));
|
|
8672
|
-
return bucketInfos;
|
|
8673
8541
|
}
|
|
8674
8542
|
async createBucket(bucketName, region) {
|
|
8675
|
-
|
|
8543
|
+
await new S3({
|
|
8676
8544
|
region,
|
|
8677
8545
|
credentials: this.credentials
|
|
8678
|
-
})
|
|
8679
|
-
await s3Client.createBucket({
|
|
8546
|
+
}).createBucket({
|
|
8680
8547
|
Bucket: bucketName,
|
|
8681
8548
|
...region === "us-east-1" ? {} : { CreateBucketConfiguration: { LocationConstraint: region } }
|
|
8682
8549
|
});
|
|
@@ -8755,11 +8622,10 @@ var SSMKeyPairManager = class {
|
|
|
8755
8622
|
}
|
|
8756
8623
|
}
|
|
8757
8624
|
async putParameter(name, value) {
|
|
8758
|
-
|
|
8625
|
+
await new SSM({
|
|
8759
8626
|
region: this.region,
|
|
8760
8627
|
credentials: this.credentials
|
|
8761
|
-
})
|
|
8762
|
-
await ssm.putParameter({
|
|
8628
|
+
}).putParameter({
|
|
8763
8629
|
Name: name,
|
|
8764
8630
|
Value: value,
|
|
8765
8631
|
Type: "SecureString",
|
|
@@ -8784,9 +8650,8 @@ var SSMKeyPairManager = class {
|
|
|
8784
8650
|
format: "pem"
|
|
8785
8651
|
}
|
|
8786
8652
|
});
|
|
8787
|
-
const keyPairId = `HOTUPDATER-${crypto.randomBytes(4).toString("hex").toUpperCase()}`;
|
|
8788
8653
|
const keyPair = {
|
|
8789
|
-
keyPairId
|
|
8654
|
+
keyPairId: `HOTUPDATER-${crypto.randomBytes(4).toString("hex").toUpperCase()}`,
|
|
8790
8655
|
publicKey,
|
|
8791
8656
|
privateKey
|
|
8792
8657
|
};
|
|
@@ -8859,17 +8724,16 @@ const checkIfAwsCliInstalled = async () => {
|
|
|
8859
8724
|
try {
|
|
8860
8725
|
await execa("aws", ["--version"]);
|
|
8861
8726
|
return true;
|
|
8862
|
-
} catch
|
|
8727
|
+
} catch {
|
|
8863
8728
|
return false;
|
|
8864
8729
|
}
|
|
8865
8730
|
};
|
|
8866
8731
|
const runInit = async ({ build }) => {
|
|
8867
|
-
|
|
8868
|
-
if (!isAwsCliInstalled) {
|
|
8732
|
+
if (!await checkIfAwsCliInstalled()) {
|
|
8869
8733
|
f.error(`AWS CLI is not installed. Please visit ${link("https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html")} for installation instructions`);
|
|
8870
8734
|
process.exit(1);
|
|
8871
8735
|
}
|
|
8872
|
-
let credentials
|
|
8736
|
+
let credentials;
|
|
8873
8737
|
const mode = await ve({
|
|
8874
8738
|
message: "Select the mode to login to AWS",
|
|
8875
8739
|
options: [{
|
|
@@ -8980,14 +8844,11 @@ const runInit = async ({ build }) => {
|
|
|
8980
8844
|
region: bucketRegion,
|
|
8981
8845
|
migrations: [new Migration0001HotUpdater0_13_0(), new Migration0001HotUpdater0_18_0()]
|
|
8982
8846
|
});
|
|
8983
|
-
const
|
|
8984
|
-
const
|
|
8985
|
-
const ssmKeyPairManager = new SSMKeyPairManager(bucketRegion, credentials);
|
|
8986
|
-
const keyPair = await ssmKeyPairManager.getOrCreateKeyPair(`/hot-updater/${bucketName}/keypair`);
|
|
8847
|
+
const lambdaRoleArn = await new IAMManager(bucketRegion, credentials).createOrSelectRole();
|
|
8848
|
+
const keyPair = await new SSMKeyPairManager(bucketRegion, credentials).getOrCreateKeyPair(`/hot-updater/${bucketName}/keypair`);
|
|
8987
8849
|
const cloudFrontManager = new CloudFrontManager(bucketRegion, credentials);
|
|
8988
8850
|
const { publicKeyId, keyGroupId } = await cloudFrontManager.getOrCreateKeyGroup(keyPair.publicKey);
|
|
8989
|
-
const
|
|
8990
|
-
const { functionArn } = await lambdaEdgeDeployer.deploy(lambdaRoleArn, {
|
|
8851
|
+
const { functionArn } = await new LambdaEdgeDeployer(credentials).deploy(lambdaRoleArn, {
|
|
8991
8852
|
publicKey: publicKeyId,
|
|
8992
8853
|
privateKey: keyPair.privateKey
|
|
8993
8854
|
});
|