@coana-tech/cli 14.12.159 → 14.12.161
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/cli.mjs +528 -154
- package/package.json +1 -1
- package/reachability-analyzers-cli.mjs +269 -87
- package/repos/coana-tech/goana/bin/goana-darwin-amd64.gz +0 -0
- package/repos/coana-tech/goana/bin/goana-darwin-arm64.gz +0 -0
- package/repos/coana-tech/goana/bin/goana-linux-amd64.gz +0 -0
- package/repos/coana-tech/goana/bin/goana-linux-arm64.gz +0 -0
- package/repos/coana-tech/javap-service/javap-service.jar +0 -0
package/cli.mjs
CHANGED
|
@@ -69910,7 +69910,7 @@ var require_lockfile = __commonJS({
|
|
|
69910
69910
|
}
|
|
69911
69911
|
const file = _ref22;
|
|
69912
69912
|
if (yield exists2(file)) {
|
|
69913
|
-
return
|
|
69913
|
+
return readFile38(file);
|
|
69914
69914
|
}
|
|
69915
69915
|
}
|
|
69916
69916
|
return null;
|
|
@@ -69929,7 +69929,7 @@ var require_lockfile = __commonJS({
|
|
|
69929
69929
|
})();
|
|
69930
69930
|
let readJsonAndFile = exports3.readJsonAndFile = (() => {
|
|
69931
69931
|
var _ref24 = (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* (loc) {
|
|
69932
|
-
const file = yield
|
|
69932
|
+
const file = yield readFile38(loc);
|
|
69933
69933
|
try {
|
|
69934
69934
|
return {
|
|
69935
69935
|
object: (0, (_map || _load_map()).default)(JSON.parse(stripBOM2(file))),
|
|
@@ -70169,7 +70169,7 @@ var require_lockfile = __commonJS({
|
|
|
70169
70169
|
};
|
|
70170
70170
|
})();
|
|
70171
70171
|
exports3.copy = copy;
|
|
70172
|
-
exports3.readFile =
|
|
70172
|
+
exports3.readFile = readFile38;
|
|
70173
70173
|
exports3.readFileRaw = readFileRaw;
|
|
70174
70174
|
exports3.normalizeOS = normalizeOS;
|
|
70175
70175
|
var _fs;
|
|
@@ -70267,7 +70267,7 @@ var require_lockfile = __commonJS({
|
|
|
70267
70267
|
});
|
|
70268
70268
|
});
|
|
70269
70269
|
}
|
|
70270
|
-
function
|
|
70270
|
+
function readFile38(loc) {
|
|
70271
70271
|
return _readFile(loc, "utf8").then(normalizeOS);
|
|
70272
70272
|
}
|
|
70273
70273
|
function readFileRaw(loc) {
|
|
@@ -199895,7 +199895,7 @@ var {
|
|
|
199895
199895
|
} = import_index.default;
|
|
199896
199896
|
|
|
199897
199897
|
// dist/index.js
|
|
199898
|
-
import { mkdir as mkdir7, mkdtemp as mkdtemp2, readFile as
|
|
199898
|
+
import { mkdir as mkdir7, mkdtemp as mkdtemp2, readFile as readFile37, rm as rm3, writeFile as writeFile17 } from "fs/promises";
|
|
199899
199899
|
import { tmpdir as tmpdir5 } from "os";
|
|
199900
199900
|
import { dirname as dirname26, join as join35, resolve as resolve44 } from "path";
|
|
199901
199901
|
|
|
@@ -205220,53 +205220,131 @@ var Spinner = class _Spinner {
|
|
|
205220
205220
|
|
|
205221
205221
|
// ../utils/src/telemetry/telemetry-collector.ts
|
|
205222
205222
|
import { execFile } from "child_process";
|
|
205223
|
-
import {
|
|
205223
|
+
import { readFile as readFile3 } from "fs/promises";
|
|
205224
|
+
import { platform } from "process";
|
|
205224
205225
|
import { promisify } from "util";
|
|
205225
205226
|
var execFileAsync = promisify(execFile);
|
|
205226
|
-
var TelemetryCollector = class {
|
|
205227
|
+
var TelemetryCollector = class _TelemetryCollector {
|
|
205228
|
+
constructor(pid) {
|
|
205229
|
+
this.pid = pid;
|
|
205230
|
+
}
|
|
205231
|
+
previousCpuState;
|
|
205232
|
+
clockTicksPerSecond;
|
|
205233
|
+
pageSize;
|
|
205234
|
+
isCollecting = false;
|
|
205235
|
+
static create(pid) {
|
|
205236
|
+
if (!Number.isInteger(pid) || pid <= 0 || !["darwin", "linux", "win32"].includes(platform)) return void 0;
|
|
205237
|
+
return new _TelemetryCollector(pid);
|
|
205238
|
+
}
|
|
205227
205239
|
/**
|
|
205228
|
-
* Collect
|
|
205229
|
-
* Uses OS-specific
|
|
205230
|
-
*
|
|
205231
|
-
* @param pid - The process ID to query
|
|
205232
|
-
* @returns TelemetryMetrics or undefined if the process doesn't exist or query fails
|
|
205240
|
+
* Collect metrics for the child process.
|
|
205241
|
+
* Uses OS-specific methods to query memory and CPU usage.
|
|
205233
205242
|
*/
|
|
205234
|
-
async collectChildProcessMetrics(
|
|
205235
|
-
if (
|
|
205243
|
+
async collectChildProcessMetrics() {
|
|
205244
|
+
if (this.isCollecting) {
|
|
205236
205245
|
return void 0;
|
|
205237
205246
|
}
|
|
205247
|
+
this.isCollecting = true;
|
|
205238
205248
|
try {
|
|
205239
|
-
|
|
205240
|
-
|
|
205241
|
-
|
|
205249
|
+
if (platform === "darwin") {
|
|
205250
|
+
return await this.collectDarwinProcessMetrics();
|
|
205251
|
+
}
|
|
205252
|
+
if (platform === "linux") {
|
|
205253
|
+
return await this.collectLinuxProcessMetrics();
|
|
205254
|
+
}
|
|
205255
|
+
if (platform === "win32") {
|
|
205256
|
+
return await this.collectWindowsProcessMetrics();
|
|
205242
205257
|
}
|
|
205243
205258
|
return void 0;
|
|
205244
205259
|
} catch {
|
|
205245
205260
|
return void 0;
|
|
205261
|
+
} finally {
|
|
205262
|
+
this.isCollecting = false;
|
|
205246
205263
|
}
|
|
205247
205264
|
}
|
|
205248
205265
|
/**
|
|
205249
|
-
* Collect metrics
|
|
205266
|
+
* Collect metrics on macOS using ps command.
|
|
205250
205267
|
*/
|
|
205251
|
-
async
|
|
205268
|
+
async collectDarwinProcessMetrics() {
|
|
205252
205269
|
try {
|
|
205253
|
-
const { stdout } = await execFileAsync("ps", ["-o", "rss=,pcpu=", "-p", String(pid)], {
|
|
205270
|
+
const { stdout } = await execFileAsync("ps", ["-o", "rss=,pcpu=", "-p", String(this.pid)], {
|
|
205254
205271
|
timeout: 5e3
|
|
205255
205272
|
});
|
|
205273
|
+
const parts = stdout.trim().split(/\s+/);
|
|
205274
|
+
if (parts.length < 2) return void 0;
|
|
205275
|
+
const rssKb = parseInt(parts[0], 10);
|
|
205276
|
+
const cpuPercent = parseFloat(parts[1]);
|
|
205277
|
+
if (isNaN(rssKb) || isNaN(cpuPercent)) return void 0;
|
|
205278
|
+
return {
|
|
205279
|
+
rss: rssKb * 1024,
|
|
205280
|
+
// Convert KB to bytes
|
|
205281
|
+
// Note: cpuPercent can exceed 100% on multi-core systems (e.g., 250% = 2.5 cores used)
|
|
205282
|
+
cpuPercent
|
|
205283
|
+
};
|
|
205284
|
+
} catch {
|
|
205285
|
+
return void 0;
|
|
205286
|
+
}
|
|
205287
|
+
}
|
|
205288
|
+
/**
|
|
205289
|
+
* Collect metrics on Linux using /proc filesystem.
|
|
205290
|
+
* This works on all Linux distributions including Alpine (BusyBox).
|
|
205291
|
+
*/
|
|
205292
|
+
async collectLinuxProcessMetrics() {
|
|
205293
|
+
try {
|
|
205294
|
+
const statmContent = await readFile3(`/proc/${this.pid}/statm`, "utf-8");
|
|
205295
|
+
const statmParts = statmContent.trim().split(/\s+/);
|
|
205296
|
+
if (statmParts.length < 2) {
|
|
205297
|
+
return void 0;
|
|
205298
|
+
}
|
|
205299
|
+
const residentPages = parseInt(statmParts[1], 10);
|
|
205300
|
+
if (isNaN(residentPages)) {
|
|
205301
|
+
return void 0;
|
|
205302
|
+
}
|
|
205303
|
+
const pageSize = await this.getPageSize();
|
|
205304
|
+
const rssBytes = residentPages * pageSize;
|
|
205305
|
+
const statContent = await readFile3(`/proc/${this.pid}/stat`, "utf-8");
|
|
205306
|
+
const cpuPercent = await this.calculateCpuPercent(statContent);
|
|
205307
|
+
return {
|
|
205308
|
+
rss: rssBytes,
|
|
205309
|
+
cpuPercent
|
|
205310
|
+
};
|
|
205311
|
+
} catch {
|
|
205312
|
+
return void 0;
|
|
205313
|
+
}
|
|
205314
|
+
}
|
|
205315
|
+
/**
|
|
205316
|
+
* Collect metrics on Windows using PowerShell.
|
|
205317
|
+
* Uses Get-Process for memory and CPU time tracking for CPU%.
|
|
205318
|
+
*/
|
|
205319
|
+
async collectWindowsProcessMetrics() {
|
|
205320
|
+
try {
|
|
205321
|
+
const psScript = "$p = Get-Process -Id $args[0] -ErrorAction Stop; @{WorkingSet64=$p.WorkingSet64; TotalMs=$p.TotalProcessorTime.TotalMilliseconds} | ConvertTo-Json";
|
|
205322
|
+
const { stdout } = await execFileAsync(
|
|
205323
|
+
"powershell",
|
|
205324
|
+
["-NoProfile", "-Command", psScript, String(this.pid)],
|
|
205325
|
+
{ timeout: 5e3 }
|
|
205326
|
+
);
|
|
205256
205327
|
const trimmed = stdout.trim();
|
|
205257
205328
|
if (!trimmed) {
|
|
205258
205329
|
return void 0;
|
|
205259
205330
|
}
|
|
205260
|
-
const
|
|
205261
|
-
|
|
205331
|
+
const data2 = JSON.parse(trimmed);
|
|
205332
|
+
const rssBytes = data2.WorkingSet64;
|
|
205333
|
+
const totalCpuMs = data2.TotalMs;
|
|
205334
|
+
if (typeof rssBytes !== "number" || typeof totalCpuMs !== "number") {
|
|
205262
205335
|
return void 0;
|
|
205263
205336
|
}
|
|
205264
|
-
const
|
|
205265
|
-
const
|
|
205266
|
-
|
|
205267
|
-
|
|
205337
|
+
const now = Date.now();
|
|
205338
|
+
const currentState = { totalCpuTime: totalCpuMs, timestamp: now };
|
|
205339
|
+
let cpuPercent = 0;
|
|
205340
|
+
if (this.previousCpuState) {
|
|
205341
|
+
const timeDeltaMs = now - this.previousCpuState.timestamp;
|
|
205342
|
+
const cpuTimeDelta = totalCpuMs - this.previousCpuState.totalCpuTime;
|
|
205343
|
+
if (timeDeltaMs > 0 && cpuTimeDelta >= 0) {
|
|
205344
|
+
cpuPercent = Math.max(0, cpuTimeDelta / timeDeltaMs * 100);
|
|
205345
|
+
}
|
|
205268
205346
|
}
|
|
205269
|
-
|
|
205347
|
+
this.previousCpuState = currentState;
|
|
205270
205348
|
return {
|
|
205271
205349
|
rss: rssBytes,
|
|
205272
205350
|
cpuPercent
|
|
@@ -205275,6 +205353,74 @@ var TelemetryCollector = class {
|
|
|
205275
205353
|
return void 0;
|
|
205276
205354
|
}
|
|
205277
205355
|
}
|
|
205356
|
+
/**
|
|
205357
|
+
* Calculate CPU percentage from /proc/<pid>/stat.
|
|
205358
|
+
* Requires tracking state between calls to compute the delta.
|
|
205359
|
+
*/
|
|
205360
|
+
async calculateCpuPercent(statContent) {
|
|
205361
|
+
try {
|
|
205362
|
+
const lastParen = statContent.lastIndexOf(")");
|
|
205363
|
+
if (lastParen === -1) {
|
|
205364
|
+
return 0;
|
|
205365
|
+
}
|
|
205366
|
+
const fieldsAfterComm = statContent.slice(lastParen + 2).split(/\s+/);
|
|
205367
|
+
const utime = parseInt(fieldsAfterComm[11], 10);
|
|
205368
|
+
const stime = parseInt(fieldsAfterComm[12], 10);
|
|
205369
|
+
if (isNaN(utime) || isNaN(stime)) {
|
|
205370
|
+
return 0;
|
|
205371
|
+
}
|
|
205372
|
+
const totalCpuTime = utime + stime;
|
|
205373
|
+
const now = Date.now();
|
|
205374
|
+
const clockTicks = await this.getClockTicksPerSecond();
|
|
205375
|
+
const currentState = { totalCpuTime, timestamp: now };
|
|
205376
|
+
if (!this.previousCpuState) {
|
|
205377
|
+
this.previousCpuState = currentState;
|
|
205378
|
+
return 0;
|
|
205379
|
+
}
|
|
205380
|
+
const timeDeltaMs = now - this.previousCpuState.timestamp;
|
|
205381
|
+
const cpuTimeDelta = totalCpuTime - this.previousCpuState.totalCpuTime;
|
|
205382
|
+
this.previousCpuState = currentState;
|
|
205383
|
+
if (timeDeltaMs <= 0 || cpuTimeDelta < 0) {
|
|
205384
|
+
return 0;
|
|
205385
|
+
}
|
|
205386
|
+
const cpuPercent = cpuTimeDelta * 1e3 * 100 / (clockTicks * timeDeltaMs);
|
|
205387
|
+
return Math.max(0, cpuPercent);
|
|
205388
|
+
} catch {
|
|
205389
|
+
return 0;
|
|
205390
|
+
}
|
|
205391
|
+
}
|
|
205392
|
+
/**
|
|
205393
|
+
* Get the system page size in bytes (cached after first call).
|
|
205394
|
+
*/
|
|
205395
|
+
async getPageSize() {
|
|
205396
|
+
if (this.pageSize !== void 0) {
|
|
205397
|
+
return this.pageSize;
|
|
205398
|
+
}
|
|
205399
|
+
try {
|
|
205400
|
+
const { stdout } = await execFileAsync("getconf", ["PAGE_SIZE"], { timeout: 1e3 });
|
|
205401
|
+
const parsed = parseInt(stdout.trim(), 10);
|
|
205402
|
+
this.pageSize = isNaN(parsed) ? 4096 : parsed;
|
|
205403
|
+
} catch {
|
|
205404
|
+
this.pageSize = 4096;
|
|
205405
|
+
}
|
|
205406
|
+
return this.pageSize;
|
|
205407
|
+
}
|
|
205408
|
+
/**
|
|
205409
|
+
* Get the number of clock ticks per second (used for CPU time conversion).
|
|
205410
|
+
*/
|
|
205411
|
+
async getClockTicksPerSecond() {
|
|
205412
|
+
if (this.clockTicksPerSecond !== void 0) {
|
|
205413
|
+
return this.clockTicksPerSecond;
|
|
205414
|
+
}
|
|
205415
|
+
try {
|
|
205416
|
+
const { stdout } = await execFileAsync("getconf", ["CLK_TCK"], { timeout: 1e3 });
|
|
205417
|
+
const ticks = parseInt(stdout.trim(), 10);
|
|
205418
|
+
this.clockTicksPerSecond = isNaN(ticks) ? 100 : ticks;
|
|
205419
|
+
} catch {
|
|
205420
|
+
this.clockTicksPerSecond = 100;
|
|
205421
|
+
}
|
|
205422
|
+
return this.clockTicksPerSecond;
|
|
205423
|
+
}
|
|
205278
205424
|
};
|
|
205279
205425
|
|
|
205280
205426
|
// ../utils/src/telemetry/analyzer-telemetry-server.ts
|
|
@@ -205376,14 +205522,14 @@ function startHeartbeat(options) {
|
|
|
205376
205522
|
return () => clearInterval(timer);
|
|
205377
205523
|
}
|
|
205378
205524
|
var DEFAULT_TELEMETRY_INTERVAL_MS = 5e3;
|
|
205379
|
-
function startTelemetry(
|
|
205380
|
-
const collector =
|
|
205525
|
+
function startTelemetry(subprocess, handler) {
|
|
205526
|
+
const collector = TelemetryCollector.create(subprocess.pid);
|
|
205527
|
+
if (!collector) return;
|
|
205528
|
+
handler.onInit?.(subprocess);
|
|
205381
205529
|
const intervalMs = handler.intervalMs ?? DEFAULT_TELEMETRY_INTERVAL_MS;
|
|
205382
205530
|
const collectAndReport = async () => {
|
|
205383
|
-
const metrics = await collector.collectChildProcessMetrics(
|
|
205384
|
-
if (metrics)
|
|
205385
|
-
handler.onTelemetry(metrics);
|
|
205386
|
-
}
|
|
205531
|
+
const metrics = await collector.collectChildProcessMetrics();
|
|
205532
|
+
if (metrics) handler.onTelemetry(metrics);
|
|
205387
205533
|
};
|
|
205388
205534
|
collectAndReport().catch((err) => {
|
|
205389
205535
|
logger.debug("Initial telemetry collection failed:", err);
|
|
@@ -205395,6 +205541,48 @@ function startTelemetry(pid, handler) {
|
|
|
205395
205541
|
timer.unref?.();
|
|
205396
205542
|
return () => clearInterval(timer);
|
|
205397
205543
|
}
|
|
205544
|
+
function wrapWithMemoryLimit(cmd, options) {
|
|
205545
|
+
const memoryLimitKiB = Math.ceil(options.memoryLimitInMB * 1024);
|
|
205546
|
+
if (memoryLimitKiB <= 0)
|
|
205547
|
+
throw new Error(`memoryLimitInMB * 1024 must be a positive number, got: ${memoryLimitKiB}`);
|
|
205548
|
+
switch (process.platform) {
|
|
205549
|
+
case "darwin":
|
|
205550
|
+
{
|
|
205551
|
+
const prevHandler = options.telemetryHandler;
|
|
205552
|
+
let subprocess;
|
|
205553
|
+
options.telemetryHandler = {
|
|
205554
|
+
intervalMs: prevHandler?.intervalMs ?? 2e3,
|
|
205555
|
+
onInit: (sp) => {
|
|
205556
|
+
subprocess = sp;
|
|
205557
|
+
prevHandler?.onInit?.(sp);
|
|
205558
|
+
},
|
|
205559
|
+
onTelemetry(metrics) {
|
|
205560
|
+
if (subprocess?.exitCode === null && metrics.rss >= memoryLimitKiB * 1024) {
|
|
205561
|
+
logger.debug(
|
|
205562
|
+
`Memory limit of ${options.memoryLimitInMB} MiB exceeded (RSS: ${(metrics.rss / 1024 / 1024).toFixed(
|
|
205563
|
+
2
|
|
205564
|
+
)} MiB). Terminating process.`
|
|
205565
|
+
);
|
|
205566
|
+
subprocess.kill(options.killSignal ?? "SIGTERM");
|
|
205567
|
+
subprocess = void 0;
|
|
205568
|
+
}
|
|
205569
|
+
prevHandler?.onTelemetry(metrics);
|
|
205570
|
+
}
|
|
205571
|
+
};
|
|
205572
|
+
}
|
|
205573
|
+
break;
|
|
205574
|
+
case "linux":
|
|
205575
|
+
return [
|
|
205576
|
+
"sh",
|
|
205577
|
+
...typeof cmd === "string" ? ["-c", `ulimit -v ${memoryLimitKiB} && eval "$1"`, "_", cmd] : ["-c", `ulimit -v ${memoryLimitKiB} && exec "$@"`, "_", ...cmd]
|
|
205578
|
+
];
|
|
205579
|
+
default:
|
|
205580
|
+
logger.debug(
|
|
205581
|
+
`Memory limit enforcement is not supported on platform: ${process.platform}. Ignoring memory limit option.`
|
|
205582
|
+
);
|
|
205583
|
+
}
|
|
205584
|
+
return cmd;
|
|
205585
|
+
}
|
|
205398
205586
|
async function execNeverFail(cmd, dir, options) {
|
|
205399
205587
|
const stopHeartbeat = options?.heartbeat ? startHeartbeat(options.heartbeat) : void 0;
|
|
205400
205588
|
let stopTelemetry;
|
|
@@ -205404,6 +205592,8 @@ async function execNeverFail(cmd, dir, options) {
|
|
|
205404
205592
|
analyzerTelemetryServer = new AnalyzerTelemetryServer(options.analyzerTelemetryHandler);
|
|
205405
205593
|
analyzerTelemetryFilePath = await analyzerTelemetryServer.start();
|
|
205406
205594
|
}
|
|
205595
|
+
if (options?.memoryLimitInMB !== void 0)
|
|
205596
|
+
cmd = wrapWithMemoryLimit(cmd, options);
|
|
205407
205597
|
try {
|
|
205408
205598
|
return await new Promise((resolve45) => {
|
|
205409
205599
|
let args2;
|
|
@@ -205418,9 +205608,8 @@ async function execNeverFail(cmd, dir, options) {
|
|
|
205418
205608
|
resolve45({ error, stdout, stderr });
|
|
205419
205609
|
}
|
|
205420
205610
|
);
|
|
205421
|
-
if (options?.telemetryHandler && childProcess.pid)
|
|
205422
|
-
stopTelemetry = startTelemetry(childProcess
|
|
205423
|
-
}
|
|
205611
|
+
if (options?.telemetryHandler && childProcess.pid)
|
|
205612
|
+
stopTelemetry = startTelemetry(childProcess, options.telemetryHandler);
|
|
205424
205613
|
if (options?.pipe) {
|
|
205425
205614
|
childProcess.stdout?.on("data", (data2) => {
|
|
205426
205615
|
Spinner.instance().suspend(() => {
|
|
@@ -212545,7 +212734,7 @@ function splitLines(text3) {
|
|
|
212545
212734
|
|
|
212546
212735
|
// ../fixing-management/src/fixing-management/utils/socket-patch-utils.ts
|
|
212547
212736
|
import { existsSync as existsSync3 } from "node:fs";
|
|
212548
|
-
import { readFile as
|
|
212737
|
+
import { readFile as readFile4, writeFile as writeFile3 } from "node:fs/promises";
|
|
212549
212738
|
import { resolve as resolve4 } from "node:path";
|
|
212550
212739
|
|
|
212551
212740
|
// ../utils/src/version-comparison/version-satisfies.ts
|
|
@@ -214068,7 +214257,7 @@ async function applyPatches(ecosystem, rootDir, patches, ctxt, patchAppliedMessa
|
|
|
214068
214257
|
if (!existsSync3(fullPath)) {
|
|
214069
214258
|
await writeFile3(fullPath, "", "utf-8");
|
|
214070
214259
|
}
|
|
214071
|
-
let fileContent = await
|
|
214260
|
+
let fileContent = await readFile4(fullPath, "utf-8");
|
|
214072
214261
|
const groupedPatches = groupPatches(rootDir, patches2);
|
|
214073
214262
|
const resolvedPatches = resolveConflicts(ecosystem, groupedPatches, ctxt);
|
|
214074
214263
|
const sortedPatches = resolvedPatches.sort(
|
|
@@ -214176,7 +214365,7 @@ function resolveConflicts(ecosystem, patches, ctxt) {
|
|
|
214176
214365
|
|
|
214177
214366
|
// ../utils/src/go-mod-utils.ts
|
|
214178
214367
|
import { dirname as dirname6, resolve as resolve5 } from "node:path";
|
|
214179
|
-
import { readFile as
|
|
214368
|
+
import { readFile as readFile5 } from "node:fs/promises";
|
|
214180
214369
|
|
|
214181
214370
|
// ../utils/src/go-mod-parser.ts
|
|
214182
214371
|
function recordError(state, message2, col) {
|
|
@@ -214872,7 +215061,7 @@ function parseGoMod(content, options = {}) {
|
|
|
214872
215061
|
// ../utils/src/go-mod-utils.ts
|
|
214873
215062
|
async function parseGoModFile(rootDir, goModPath) {
|
|
214874
215063
|
const fullPath = resolve5(rootDir, goModPath);
|
|
214875
|
-
const content = await
|
|
215064
|
+
const content = await readFile5(fullPath, "utf-8");
|
|
214876
215065
|
const result = parseGoMod(content, { includeOffsets: true });
|
|
214877
215066
|
return {
|
|
214878
215067
|
...result,
|
|
@@ -215225,12 +215414,12 @@ replace ${modulePath} ${currentVersion} => ${modulePath} ${newVersion}
|
|
|
215225
215414
|
|
|
215226
215415
|
// ../fixing-management/src/fixing-management/maven/gradle-fixing-manager.ts
|
|
215227
215416
|
import { existsSync as existsSync6 } from "node:fs";
|
|
215228
|
-
import { readFile as
|
|
215417
|
+
import { readFile as readFile9 } from "node:fs/promises";
|
|
215229
215418
|
import { join as join7, resolve as resolve9 } from "node:path";
|
|
215230
215419
|
|
|
215231
215420
|
// ../fixing-management/src/fixing-management/utils/coana-patch-application.ts
|
|
215232
215421
|
import { existsSync as existsSync4 } from "node:fs";
|
|
215233
|
-
import { readFile as
|
|
215422
|
+
import { readFile as readFile6, writeFile as writeFile4 } from "node:fs/promises";
|
|
215234
215423
|
import { resolve as resolve7 } from "node:path";
|
|
215235
215424
|
function detectPatchConflicts(rootDir, patchResults) {
|
|
215236
215425
|
const patchesByFile = /* @__PURE__ */ new Map();
|
|
@@ -215363,7 +215552,7 @@ async function applyPatchResults(ecosystem, rootDir, patchResults) {
|
|
|
215363
215552
|
if (!existsSync4(filePath)) {
|
|
215364
215553
|
await writeFile4(filePath, "", "utf-8");
|
|
215365
215554
|
}
|
|
215366
|
-
let fileContent = await
|
|
215555
|
+
let fileContent = await readFile6(filePath, "utf-8");
|
|
215367
215556
|
for (const patch of sortedPatches) {
|
|
215368
215557
|
const start = patch.offset;
|
|
215369
215558
|
const end2 = patch.offset + (patch.oldText?.length ?? 0);
|
|
@@ -215375,7 +215564,7 @@ async function applyPatchResults(ecosystem, rootDir, patchResults) {
|
|
|
215375
215564
|
|
|
215376
215565
|
// ../fixing-management/src/fixing-management/maven/gradle-build-file-helper.ts
|
|
215377
215566
|
var import_good_enough_parser = __toESM(require_cjs(), 1);
|
|
215378
|
-
import { readFile as
|
|
215567
|
+
import { readFile as readFile7 } from "node:fs/promises";
|
|
215379
215568
|
|
|
215380
215569
|
// ../fixing-management/src/fixing-management/maven/utils.ts
|
|
215381
215570
|
import { existsSync as existsSync5 } from "node:fs";
|
|
@@ -215597,7 +215786,7 @@ var treeQuery = import_good_enough_parser.query.tree({
|
|
|
215597
215786
|
});
|
|
215598
215787
|
async function findDependencyDeclsAndCatalogFiles(filePath) {
|
|
215599
215788
|
const gradleLang = import_good_enough_parser.lang.createLang("groovy");
|
|
215600
|
-
const cursor = gradleLang.parse(await
|
|
215789
|
+
const cursor = gradleLang.parse(await readFile7(filePath, "utf-8"));
|
|
215601
215790
|
const ctx = gradleLang.query(cursor, treeQuery, {
|
|
215602
215791
|
mem: {},
|
|
215603
215792
|
depDecls: [],
|
|
@@ -215633,7 +215822,7 @@ ${getConstraintsBlockString(groupId, artifactId, classifier, version4, indentati
|
|
|
215633
215822
|
}
|
|
215634
215823
|
|
|
215635
215824
|
// ../fixing-management/src/fixing-management/maven/gradle-version-catalog-helper.ts
|
|
215636
|
-
import { readFile as
|
|
215825
|
+
import { readFile as readFile8 } from "node:fs/promises";
|
|
215637
215826
|
|
|
215638
215827
|
// ../utils/src/toml-utils.ts
|
|
215639
215828
|
var tomlParser = __toESM(require_lib10(), 1);
|
|
@@ -215900,7 +216089,7 @@ function parseDependencyObject(valueNode) {
|
|
|
215900
216089
|
};
|
|
215901
216090
|
}
|
|
215902
216091
|
async function findVersionCatalogDeclarations(filePath) {
|
|
215903
|
-
const catalogData = parseVersionCatalog(await
|
|
216092
|
+
const catalogData = parseVersionCatalog(await readFile8(filePath, "utf-8"));
|
|
215904
216093
|
return {
|
|
215905
216094
|
depDecls: catalogData.dependencies,
|
|
215906
216095
|
versionDecls: catalogData.versions
|
|
@@ -216105,7 +216294,7 @@ var GradleFixingManager = class {
|
|
|
216105
216294
|
newText: constraintStr + "\n"
|
|
216106
216295
|
};
|
|
216107
216296
|
} else {
|
|
216108
|
-
const fileContent = await
|
|
216297
|
+
const fileContent = await readFile9(targetBuildFile, "utf-8");
|
|
216109
216298
|
const indentationSize = getIndentationSize(fileContent);
|
|
216110
216299
|
const prependNewline = fileContent.split("\n").some((line) => !line.trim());
|
|
216111
216300
|
const finalConstraintStr = getDependencyConstraintString(
|
|
@@ -216292,7 +216481,7 @@ var GradleFixingManager = class {
|
|
|
216292
216481
|
async createConstraintsForFile(buildFile, fixes) {
|
|
216293
216482
|
const { dependenciesBlocks, constraintsBlocks } = await findDependencyDeclsAndCatalogFiles(buildFile);
|
|
216294
216483
|
const fileType = buildFile.endsWith(".kts") ? "kotlin" : "groovy";
|
|
216295
|
-
const fileContent = existsSync6(buildFile) ? await
|
|
216484
|
+
const fileContent = existsSync6(buildFile) ? await readFile9(buildFile, "utf-8") : "";
|
|
216296
216485
|
const indentationSize = getIndentationSize(fileContent);
|
|
216297
216486
|
const constraintDeclarations = fixes.map(({ dependencyDetails, fixedVersion }) => {
|
|
216298
216487
|
const [groupId, artifactId] = dependencyDetails.packageName.split(":");
|
|
@@ -216399,7 +216588,7 @@ import { resolve as resolve11 } from "node:path";
|
|
|
216399
216588
|
|
|
216400
216589
|
// ../utils/src/pom-utils.ts
|
|
216401
216590
|
var import_parse_xml2 = __toESM(require_dist(), 1);
|
|
216402
|
-
import { readFile as
|
|
216591
|
+
import { readFile as readFile10 } from "node:fs/promises";
|
|
216403
216592
|
import { existsSync as existsSync7 } from "node:fs";
|
|
216404
216593
|
import { resolve as resolve10, join as join8, relative as relative3, dirname as dirname8 } from "node:path";
|
|
216405
216594
|
|
|
@@ -216535,7 +216724,7 @@ async function loadPom(rootDir, pomFile, validateFile, visited = /* @__PURE__ */
|
|
|
216535
216724
|
if (!validatedPomFile || !existsSync7(validatedPomFile)) return void 0;
|
|
216536
216725
|
if (visited.has(validatedPomFile)) return void 0;
|
|
216537
216726
|
visited.add(validatedPomFile);
|
|
216538
|
-
const sourceText = await
|
|
216727
|
+
const sourceText = await readFile10(validatedPomFile, "utf-8");
|
|
216539
216728
|
const xml2 = (0, import_parse_xml2.parseXml)(sourceText, { includeOffsets: true });
|
|
216540
216729
|
const indentation = inferIndentationFromParsedXml(xml2, sourceText);
|
|
216541
216730
|
const pom = {
|
|
@@ -217823,11 +218012,11 @@ import { dirname as dirname10, resolve as resolve14 } from "node:path";
|
|
|
217823
218012
|
import assert7 from "node:assert";
|
|
217824
218013
|
|
|
217825
218014
|
// ../fixing-management/src/fixing-management/maven/gradle-lockfile-utils.ts
|
|
217826
|
-
import { readFile as
|
|
218015
|
+
import { readFile as readFile11 } from "node:fs/promises";
|
|
217827
218016
|
import { resolve as resolve13 } from "node:path";
|
|
217828
218017
|
async function loadLockFile(rootDir, lockfilePath) {
|
|
217829
218018
|
const file = resolve13(rootDir, lockfilePath);
|
|
217830
|
-
return { rootDir, file, sourceText: await
|
|
218019
|
+
return { rootDir, file, sourceText: await readFile11(file, "utf-8") };
|
|
217831
218020
|
}
|
|
217832
218021
|
|
|
217833
218022
|
// ../fixing-management/src/fixing-management/maven/handlers/gradle-lockfile-upgrade-handler.ts
|
|
@@ -217898,13 +218087,13 @@ var GradleLockfileUpgradeHandler = class {
|
|
|
217898
218087
|
|
|
217899
218088
|
// ../fixing-management/src/fixing-management/maven/handlers/sbt-upgrade-handler.ts
|
|
217900
218089
|
import { existsSync as existsSync8 } from "node:fs";
|
|
217901
|
-
import { readFile as
|
|
218090
|
+
import { readFile as readFile13 } from "node:fs/promises";
|
|
217902
218091
|
import { basename as basename4, dirname as dirname11, resolve as resolve15 } from "node:path";
|
|
217903
218092
|
import assert8 from "node:assert";
|
|
217904
218093
|
|
|
217905
218094
|
// ../fixing-management/src/fixing-management/maven/sbt-project-utils.ts
|
|
217906
218095
|
var import_good_enough_parser2 = __toESM(require_cjs(), 1);
|
|
217907
|
-
import { readFile as
|
|
218096
|
+
import { readFile as readFile12 } from "node:fs/promises";
|
|
217908
218097
|
var pathQuery2 = import_good_enough_parser2.query.sym((ctx, { offset, value: value2 }) => {
|
|
217909
218098
|
return { ...ctx, pathOffset: offset, pathValue: value2 };
|
|
217910
218099
|
}).many(
|
|
@@ -218081,7 +218270,7 @@ var treeQuery2 = import_good_enough_parser2.query.tree({
|
|
|
218081
218270
|
});
|
|
218082
218271
|
async function loadSbtProject(filePath, acc = { mem: {}, moduleIds: [] }) {
|
|
218083
218272
|
const scalaLang = import_good_enough_parser2.lang.createLang("scala");
|
|
218084
|
-
const cursor = scalaLang.parse(await
|
|
218273
|
+
const cursor = scalaLang.parse(await readFile12(filePath, "utf-8"));
|
|
218085
218274
|
return scalaLang.query(cursor, treeQuery2, acc) ?? acc;
|
|
218086
218275
|
}
|
|
218087
218276
|
function evaluate2(v) {
|
|
@@ -218224,7 +218413,7 @@ ${indent(1, indentationSize)}`)}
|
|
|
218224
218413
|
} else {
|
|
218225
218414
|
let fileContent;
|
|
218226
218415
|
try {
|
|
218227
|
-
fileContent = await
|
|
218416
|
+
fileContent = await readFile13(dependencyOverridesFile, "utf-8");
|
|
218228
218417
|
} catch (error) {
|
|
218229
218418
|
ctxt.statusUpdater?.({
|
|
218230
218419
|
status: "error",
|
|
@@ -218353,7 +218542,7 @@ var MavenSocketUpgradeManager = class {
|
|
|
218353
218542
|
|
|
218354
218543
|
// ../fixing-management/src/fixing-management/maven/sbt-fixing-manager.ts
|
|
218355
218544
|
import { existsSync as existsSync9 } from "node:fs";
|
|
218356
|
-
import { readFile as
|
|
218545
|
+
import { readFile as readFile14 } from "node:fs/promises";
|
|
218357
218546
|
import { join as join9 } from "node:path";
|
|
218358
218547
|
var SbtFixingManager = class {
|
|
218359
218548
|
constructor(rootDir, subprojectPath, otherModulesCommunicator) {
|
|
@@ -218555,7 +218744,7 @@ var SbtFixingManager = class {
|
|
|
218555
218744
|
`
|
|
218556
218745
|
};
|
|
218557
218746
|
} else {
|
|
218558
|
-
const fileContent = await
|
|
218747
|
+
const fileContent = await readFile14(workspaceBuildSbtPath, "utf-8");
|
|
218559
218748
|
const prependNewline = fileContent.split("\n").some((line) => !line.trim());
|
|
218560
218749
|
return {
|
|
218561
218750
|
file: workspaceBuildSbtPath,
|
|
@@ -218634,7 +218823,7 @@ ${indent(1, indentationSize)}`)}
|
|
|
218634
218823
|
newText: overrideText
|
|
218635
218824
|
};
|
|
218636
218825
|
} else {
|
|
218637
|
-
const fileContent = await
|
|
218826
|
+
const fileContent = await readFile14(workspaceBuildSbtPath, "utf-8");
|
|
218638
218827
|
const indentationSize = getIndentationSize(fileContent);
|
|
218639
218828
|
const prependNewline = fileContent.length > 0 && !fileContent.endsWith("\n\n");
|
|
218640
218829
|
const overrideText = `dependencyOverrides ++= Seq(
|
|
@@ -218653,7 +218842,7 @@ ${indent(1, indentationSize)}`)}
|
|
|
218653
218842
|
|
|
218654
218843
|
// ../fixing-management/src/fixing-management/npm/npm-socket-upgrade-manager.ts
|
|
218655
218844
|
import { existsSync as existsSync15 } from "fs";
|
|
218656
|
-
import { readFile as
|
|
218845
|
+
import { readFile as readFile20 } from "fs/promises";
|
|
218657
218846
|
import assert10 from "node:assert";
|
|
218658
218847
|
import { dirname as dirname14, join as join14, relative as relative9, resolve as resolve24 } from "path";
|
|
218659
218848
|
|
|
@@ -224295,53 +224484,131 @@ var Spinner2 = class _Spinner {
|
|
|
224295
224484
|
|
|
224296
224485
|
// ../utils/dist/telemetry/telemetry-collector.js
|
|
224297
224486
|
import { execFile as execFile3 } from "child_process";
|
|
224298
|
-
import {
|
|
224487
|
+
import { readFile as readFile15 } from "fs/promises";
|
|
224488
|
+
import { platform as platform7 } from "process";
|
|
224299
224489
|
import { promisify as promisify2 } from "util";
|
|
224300
224490
|
var execFileAsync2 = promisify2(execFile3);
|
|
224301
|
-
var TelemetryCollector2 = class {
|
|
224491
|
+
var TelemetryCollector2 = class _TelemetryCollector {
|
|
224492
|
+
pid;
|
|
224493
|
+
previousCpuState;
|
|
224494
|
+
clockTicksPerSecond;
|
|
224495
|
+
pageSize;
|
|
224496
|
+
isCollecting = false;
|
|
224497
|
+
constructor(pid) {
|
|
224498
|
+
this.pid = pid;
|
|
224499
|
+
}
|
|
224500
|
+
static create(pid) {
|
|
224501
|
+
if (!Number.isInteger(pid) || pid <= 0 || !["darwin", "linux", "win32"].includes(platform7))
|
|
224502
|
+
return void 0;
|
|
224503
|
+
return new _TelemetryCollector(pid);
|
|
224504
|
+
}
|
|
224302
224505
|
/**
|
|
224303
|
-
* Collect
|
|
224304
|
-
* Uses OS-specific
|
|
224305
|
-
*
|
|
224306
|
-
* @param pid - The process ID to query
|
|
224307
|
-
* @returns TelemetryMetrics or undefined if the process doesn't exist or query fails
|
|
224506
|
+
* Collect metrics for the child process.
|
|
224507
|
+
* Uses OS-specific methods to query memory and CPU usage.
|
|
224308
224508
|
*/
|
|
224309
|
-
async collectChildProcessMetrics(
|
|
224310
|
-
if (
|
|
224509
|
+
async collectChildProcessMetrics() {
|
|
224510
|
+
if (this.isCollecting) {
|
|
224311
224511
|
return void 0;
|
|
224312
224512
|
}
|
|
224513
|
+
this.isCollecting = true;
|
|
224313
224514
|
try {
|
|
224314
|
-
|
|
224315
|
-
|
|
224316
|
-
|
|
224515
|
+
if (platform7 === "darwin") {
|
|
224516
|
+
return await this.collectDarwinProcessMetrics();
|
|
224517
|
+
}
|
|
224518
|
+
if (platform7 === "linux") {
|
|
224519
|
+
return await this.collectLinuxProcessMetrics();
|
|
224520
|
+
}
|
|
224521
|
+
if (platform7 === "win32") {
|
|
224522
|
+
return await this.collectWindowsProcessMetrics();
|
|
224317
224523
|
}
|
|
224318
224524
|
return void 0;
|
|
224319
224525
|
} catch {
|
|
224320
224526
|
return void 0;
|
|
224527
|
+
} finally {
|
|
224528
|
+
this.isCollecting = false;
|
|
224321
224529
|
}
|
|
224322
224530
|
}
|
|
224323
224531
|
/**
|
|
224324
|
-
* Collect metrics
|
|
224532
|
+
* Collect metrics on macOS using ps command.
|
|
224325
224533
|
*/
|
|
224326
|
-
async
|
|
224534
|
+
async collectDarwinProcessMetrics() {
|
|
224327
224535
|
try {
|
|
224328
|
-
const { stdout } = await execFileAsync2("ps", ["-o", "rss=,pcpu=", "-p", String(pid)], {
|
|
224536
|
+
const { stdout } = await execFileAsync2("ps", ["-o", "rss=,pcpu=", "-p", String(this.pid)], {
|
|
224329
224537
|
timeout: 5e3
|
|
224330
224538
|
});
|
|
224539
|
+
const parts = stdout.trim().split(/\s+/);
|
|
224540
|
+
if (parts.length < 2)
|
|
224541
|
+
return void 0;
|
|
224542
|
+
const rssKb = parseInt(parts[0], 10);
|
|
224543
|
+
const cpuPercent = parseFloat(parts[1]);
|
|
224544
|
+
if (isNaN(rssKb) || isNaN(cpuPercent))
|
|
224545
|
+
return void 0;
|
|
224546
|
+
return {
|
|
224547
|
+
rss: rssKb * 1024,
|
|
224548
|
+
// Convert KB to bytes
|
|
224549
|
+
// Note: cpuPercent can exceed 100% on multi-core systems (e.g., 250% = 2.5 cores used)
|
|
224550
|
+
cpuPercent
|
|
224551
|
+
};
|
|
224552
|
+
} catch {
|
|
224553
|
+
return void 0;
|
|
224554
|
+
}
|
|
224555
|
+
}
|
|
224556
|
+
/**
|
|
224557
|
+
* Collect metrics on Linux using /proc filesystem.
|
|
224558
|
+
* This works on all Linux distributions including Alpine (BusyBox).
|
|
224559
|
+
*/
|
|
224560
|
+
async collectLinuxProcessMetrics() {
|
|
224561
|
+
try {
|
|
224562
|
+
const statmContent = await readFile15(`/proc/${this.pid}/statm`, "utf-8");
|
|
224563
|
+
const statmParts = statmContent.trim().split(/\s+/);
|
|
224564
|
+
if (statmParts.length < 2) {
|
|
224565
|
+
return void 0;
|
|
224566
|
+
}
|
|
224567
|
+
const residentPages = parseInt(statmParts[1], 10);
|
|
224568
|
+
if (isNaN(residentPages)) {
|
|
224569
|
+
return void 0;
|
|
224570
|
+
}
|
|
224571
|
+
const pageSize = await this.getPageSize();
|
|
224572
|
+
const rssBytes = residentPages * pageSize;
|
|
224573
|
+
const statContent = await readFile15(`/proc/${this.pid}/stat`, "utf-8");
|
|
224574
|
+
const cpuPercent = await this.calculateCpuPercent(statContent);
|
|
224575
|
+
return {
|
|
224576
|
+
rss: rssBytes,
|
|
224577
|
+
cpuPercent
|
|
224578
|
+
};
|
|
224579
|
+
} catch {
|
|
224580
|
+
return void 0;
|
|
224581
|
+
}
|
|
224582
|
+
}
|
|
224583
|
+
/**
|
|
224584
|
+
* Collect metrics on Windows using PowerShell.
|
|
224585
|
+
* Uses Get-Process for memory and CPU time tracking for CPU%.
|
|
224586
|
+
*/
|
|
224587
|
+
async collectWindowsProcessMetrics() {
|
|
224588
|
+
try {
|
|
224589
|
+
const psScript = "$p = Get-Process -Id $args[0] -ErrorAction Stop; @{WorkingSet64=$p.WorkingSet64; TotalMs=$p.TotalProcessorTime.TotalMilliseconds} | ConvertTo-Json";
|
|
224590
|
+
const { stdout } = await execFileAsync2("powershell", ["-NoProfile", "-Command", psScript, String(this.pid)], { timeout: 5e3 });
|
|
224331
224591
|
const trimmed = stdout.trim();
|
|
224332
224592
|
if (!trimmed) {
|
|
224333
224593
|
return void 0;
|
|
224334
224594
|
}
|
|
224335
|
-
const
|
|
224336
|
-
|
|
224595
|
+
const data2 = JSON.parse(trimmed);
|
|
224596
|
+
const rssBytes = data2.WorkingSet64;
|
|
224597
|
+
const totalCpuMs = data2.TotalMs;
|
|
224598
|
+
if (typeof rssBytes !== "number" || typeof totalCpuMs !== "number") {
|
|
224337
224599
|
return void 0;
|
|
224338
224600
|
}
|
|
224339
|
-
const
|
|
224340
|
-
const
|
|
224341
|
-
|
|
224342
|
-
|
|
224601
|
+
const now = Date.now();
|
|
224602
|
+
const currentState = { totalCpuTime: totalCpuMs, timestamp: now };
|
|
224603
|
+
let cpuPercent = 0;
|
|
224604
|
+
if (this.previousCpuState) {
|
|
224605
|
+
const timeDeltaMs = now - this.previousCpuState.timestamp;
|
|
224606
|
+
const cpuTimeDelta = totalCpuMs - this.previousCpuState.totalCpuTime;
|
|
224607
|
+
if (timeDeltaMs > 0 && cpuTimeDelta >= 0) {
|
|
224608
|
+
cpuPercent = Math.max(0, cpuTimeDelta / timeDeltaMs * 100);
|
|
224609
|
+
}
|
|
224343
224610
|
}
|
|
224344
|
-
|
|
224611
|
+
this.previousCpuState = currentState;
|
|
224345
224612
|
return {
|
|
224346
224613
|
rss: rssBytes,
|
|
224347
224614
|
cpuPercent
|
|
@@ -224350,6 +224617,74 @@ var TelemetryCollector2 = class {
|
|
|
224350
224617
|
return void 0;
|
|
224351
224618
|
}
|
|
224352
224619
|
}
|
|
224620
|
+
/**
|
|
224621
|
+
* Calculate CPU percentage from /proc/<pid>/stat.
|
|
224622
|
+
* Requires tracking state between calls to compute the delta.
|
|
224623
|
+
*/
|
|
224624
|
+
async calculateCpuPercent(statContent) {
|
|
224625
|
+
try {
|
|
224626
|
+
const lastParen = statContent.lastIndexOf(")");
|
|
224627
|
+
if (lastParen === -1) {
|
|
224628
|
+
return 0;
|
|
224629
|
+
}
|
|
224630
|
+
const fieldsAfterComm = statContent.slice(lastParen + 2).split(/\s+/);
|
|
224631
|
+
const utime = parseInt(fieldsAfterComm[11], 10);
|
|
224632
|
+
const stime = parseInt(fieldsAfterComm[12], 10);
|
|
224633
|
+
if (isNaN(utime) || isNaN(stime)) {
|
|
224634
|
+
return 0;
|
|
224635
|
+
}
|
|
224636
|
+
const totalCpuTime = utime + stime;
|
|
224637
|
+
const now = Date.now();
|
|
224638
|
+
const clockTicks = await this.getClockTicksPerSecond();
|
|
224639
|
+
const currentState = { totalCpuTime, timestamp: now };
|
|
224640
|
+
if (!this.previousCpuState) {
|
|
224641
|
+
this.previousCpuState = currentState;
|
|
224642
|
+
return 0;
|
|
224643
|
+
}
|
|
224644
|
+
const timeDeltaMs = now - this.previousCpuState.timestamp;
|
|
224645
|
+
const cpuTimeDelta = totalCpuTime - this.previousCpuState.totalCpuTime;
|
|
224646
|
+
this.previousCpuState = currentState;
|
|
224647
|
+
if (timeDeltaMs <= 0 || cpuTimeDelta < 0) {
|
|
224648
|
+
return 0;
|
|
224649
|
+
}
|
|
224650
|
+
const cpuPercent = cpuTimeDelta * 1e3 * 100 / (clockTicks * timeDeltaMs);
|
|
224651
|
+
return Math.max(0, cpuPercent);
|
|
224652
|
+
} catch {
|
|
224653
|
+
return 0;
|
|
224654
|
+
}
|
|
224655
|
+
}
|
|
224656
|
+
/**
|
|
224657
|
+
* Get the system page size in bytes (cached after first call).
|
|
224658
|
+
*/
|
|
224659
|
+
async getPageSize() {
|
|
224660
|
+
if (this.pageSize !== void 0) {
|
|
224661
|
+
return this.pageSize;
|
|
224662
|
+
}
|
|
224663
|
+
try {
|
|
224664
|
+
const { stdout } = await execFileAsync2("getconf", ["PAGE_SIZE"], { timeout: 1e3 });
|
|
224665
|
+
const parsed = parseInt(stdout.trim(), 10);
|
|
224666
|
+
this.pageSize = isNaN(parsed) ? 4096 : parsed;
|
|
224667
|
+
} catch {
|
|
224668
|
+
this.pageSize = 4096;
|
|
224669
|
+
}
|
|
224670
|
+
return this.pageSize;
|
|
224671
|
+
}
|
|
224672
|
+
/**
|
|
224673
|
+
* Get the number of clock ticks per second (used for CPU time conversion).
|
|
224674
|
+
*/
|
|
224675
|
+
async getClockTicksPerSecond() {
|
|
224676
|
+
if (this.clockTicksPerSecond !== void 0) {
|
|
224677
|
+
return this.clockTicksPerSecond;
|
|
224678
|
+
}
|
|
224679
|
+
try {
|
|
224680
|
+
const { stdout } = await execFileAsync2("getconf", ["CLK_TCK"], { timeout: 1e3 });
|
|
224681
|
+
const ticks = parseInt(stdout.trim(), 10);
|
|
224682
|
+
this.clockTicksPerSecond = isNaN(ticks) ? 100 : ticks;
|
|
224683
|
+
} catch {
|
|
224684
|
+
this.clockTicksPerSecond = 100;
|
|
224685
|
+
}
|
|
224686
|
+
return this.clockTicksPerSecond;
|
|
224687
|
+
}
|
|
224353
224688
|
};
|
|
224354
224689
|
|
|
224355
224690
|
// ../utils/dist/telemetry/analyzer-telemetry-server.js
|
|
@@ -224452,14 +224787,16 @@ function startHeartbeat2(options) {
|
|
|
224452
224787
|
return () => clearInterval(timer);
|
|
224453
224788
|
}
|
|
224454
224789
|
var DEFAULT_TELEMETRY_INTERVAL_MS2 = 5e3;
|
|
224455
|
-
function startTelemetry2(
|
|
224456
|
-
const collector =
|
|
224790
|
+
function startTelemetry2(subprocess, handler) {
|
|
224791
|
+
const collector = TelemetryCollector2.create(subprocess.pid);
|
|
224792
|
+
if (!collector)
|
|
224793
|
+
return;
|
|
224794
|
+
handler.onInit?.(subprocess);
|
|
224457
224795
|
const intervalMs = handler.intervalMs ?? DEFAULT_TELEMETRY_INTERVAL_MS2;
|
|
224458
224796
|
const collectAndReport = async () => {
|
|
224459
|
-
const metrics = await collector.collectChildProcessMetrics(
|
|
224460
|
-
if (metrics)
|
|
224797
|
+
const metrics = await collector.collectChildProcessMetrics();
|
|
224798
|
+
if (metrics)
|
|
224461
224799
|
handler.onTelemetry(metrics);
|
|
224462
|
-
}
|
|
224463
224800
|
};
|
|
224464
224801
|
collectAndReport().catch((err) => {
|
|
224465
224802
|
logger.debug("Initial telemetry collection failed:", err);
|
|
@@ -224471,6 +224808,42 @@ function startTelemetry2(pid, handler) {
|
|
|
224471
224808
|
timer.unref?.();
|
|
224472
224809
|
return () => clearInterval(timer);
|
|
224473
224810
|
}
|
|
224811
|
+
function wrapWithMemoryLimit2(cmd, options) {
|
|
224812
|
+
const memoryLimitKiB = Math.ceil(options.memoryLimitInMB * 1024);
|
|
224813
|
+
if (memoryLimitKiB <= 0)
|
|
224814
|
+
throw new Error(`memoryLimitInMB * 1024 must be a positive number, got: ${memoryLimitKiB}`);
|
|
224815
|
+
switch (process.platform) {
|
|
224816
|
+
case "darwin":
|
|
224817
|
+
{
|
|
224818
|
+
const prevHandler = options.telemetryHandler;
|
|
224819
|
+
let subprocess;
|
|
224820
|
+
options.telemetryHandler = {
|
|
224821
|
+
intervalMs: prevHandler?.intervalMs ?? 2e3,
|
|
224822
|
+
onInit: (sp) => {
|
|
224823
|
+
subprocess = sp;
|
|
224824
|
+
prevHandler?.onInit?.(sp);
|
|
224825
|
+
},
|
|
224826
|
+
onTelemetry(metrics) {
|
|
224827
|
+
if (subprocess?.exitCode === null && metrics.rss >= memoryLimitKiB * 1024) {
|
|
224828
|
+
logger.debug(`Memory limit of ${options.memoryLimitInMB} MiB exceeded (RSS: ${(metrics.rss / 1024 / 1024).toFixed(2)} MiB). Terminating process.`);
|
|
224829
|
+
subprocess.kill(options.killSignal ?? "SIGTERM");
|
|
224830
|
+
subprocess = void 0;
|
|
224831
|
+
}
|
|
224832
|
+
prevHandler?.onTelemetry(metrics);
|
|
224833
|
+
}
|
|
224834
|
+
};
|
|
224835
|
+
}
|
|
224836
|
+
break;
|
|
224837
|
+
case "linux":
|
|
224838
|
+
return [
|
|
224839
|
+
"sh",
|
|
224840
|
+
...typeof cmd === "string" ? ["-c", `ulimit -v ${memoryLimitKiB} && eval "$1"`, "_", cmd] : ["-c", `ulimit -v ${memoryLimitKiB} && exec "$@"`, "_", ...cmd]
|
|
224841
|
+
];
|
|
224842
|
+
default:
|
|
224843
|
+
logger.debug(`Memory limit enforcement is not supported on platform: ${process.platform}. Ignoring memory limit option.`);
|
|
224844
|
+
}
|
|
224845
|
+
return cmd;
|
|
224846
|
+
}
|
|
224474
224847
|
async function execNeverFail3(cmd, dir, options) {
|
|
224475
224848
|
const stopHeartbeat = options?.heartbeat ? startHeartbeat2(options.heartbeat) : void 0;
|
|
224476
224849
|
let stopTelemetry;
|
|
@@ -224480,6 +224853,8 @@ async function execNeverFail3(cmd, dir, options) {
|
|
|
224480
224853
|
analyzerTelemetryServer = new AnalyzerTelemetryServer2(options.analyzerTelemetryHandler);
|
|
224481
224854
|
analyzerTelemetryFilePath = await analyzerTelemetryServer.start();
|
|
224482
224855
|
}
|
|
224856
|
+
if (options?.memoryLimitInMB !== void 0)
|
|
224857
|
+
cmd = wrapWithMemoryLimit2(cmd, options);
|
|
224483
224858
|
try {
|
|
224484
224859
|
return await new Promise((resolve45) => {
|
|
224485
224860
|
let args2;
|
|
@@ -224490,9 +224865,8 @@ async function execNeverFail3(cmd, dir, options) {
|
|
|
224490
224865
|
const childProcess = execFile4(cmd, args2, { ...options, env, cwd: dir, maxBuffer: 1024 * 1024 * 1024, shell: args2 === void 0, timeout }, (error, stdout, stderr) => {
|
|
224491
224866
|
resolve45({ error, stdout, stderr });
|
|
224492
224867
|
});
|
|
224493
|
-
if (options?.telemetryHandler && childProcess.pid)
|
|
224494
|
-
stopTelemetry = startTelemetry2(childProcess
|
|
224495
|
-
}
|
|
224868
|
+
if (options?.telemetryHandler && childProcess.pid)
|
|
224869
|
+
stopTelemetry = startTelemetry2(childProcess, options.telemetryHandler);
|
|
224496
224870
|
if (options?.pipe) {
|
|
224497
224871
|
childProcess.stdout?.on("data", (data2) => {
|
|
224498
224872
|
Spinner2.instance().suspend(() => {
|
|
@@ -224708,12 +225082,12 @@ async function getWorkspacePathsFromPnpmLockFile(lockFileDir, useDotWhenNoWorksp
|
|
|
224708
225082
|
// ../fixing-management/src/fixing-management/npm/yarn-utils.ts
|
|
224709
225083
|
var lockfile = __toESM(require_lockfile(), 1);
|
|
224710
225084
|
var import_parsers = __toESM(require_lib26(), 1);
|
|
224711
|
-
import { readFile as
|
|
225085
|
+
import { readFile as readFile16 } from "fs/promises";
|
|
224712
225086
|
import { resolve as resolve19 } from "path";
|
|
224713
225087
|
async function getYarnType(projectDir) {
|
|
224714
225088
|
const yarnLockLocation = resolve19(projectDir, "yarn.lock");
|
|
224715
225089
|
try {
|
|
224716
|
-
const content = await
|
|
225090
|
+
const content = await readFile16(yarnLockLocation, "utf8");
|
|
224717
225091
|
if (!content || content.length === 0) {
|
|
224718
225092
|
return void 0;
|
|
224719
225093
|
}
|
|
@@ -224734,7 +225108,7 @@ async function getYarnType(projectDir) {
|
|
|
224734
225108
|
}
|
|
224735
225109
|
|
|
224736
225110
|
// ../fixing-management/src/fixing-management/npm/npm-fixing-manager.ts
|
|
224737
|
-
import { readFile as
|
|
225111
|
+
import { readFile as readFile17, writeFile as writeFile6 } from "fs/promises";
|
|
224738
225112
|
import { relative as relative5, resolve as resolve20 } from "path";
|
|
224739
225113
|
|
|
224740
225114
|
// ../fixing-management/src/fixing-management/npm/npm-ecosystem-fixing-manager.ts
|
|
@@ -224796,7 +225170,7 @@ var NpmFixingManager = class extends NpmEcosystemFixingManager {
|
|
|
224796
225170
|
}
|
|
224797
225171
|
async applySecurityFixesSpecificPackageManager(fixes) {
|
|
224798
225172
|
const pkgLockLocation = resolve20(this.rootDir, this.subprojectPath, "package-lock.json");
|
|
224799
|
-
const packageLockContent = await
|
|
225173
|
+
const packageLockContent = await readFile17(pkgLockLocation, "utf-8");
|
|
224800
225174
|
const getPackageName = (pkgPath) => {
|
|
224801
225175
|
const strings = pkgPath.split("node_modules/");
|
|
224802
225176
|
return strings[strings.length - 1];
|
|
@@ -224834,7 +225208,7 @@ var NpmFixingManager = class extends NpmEcosystemFixingManager {
|
|
|
224834
225208
|
};
|
|
224835
225209
|
|
|
224836
225210
|
// ../fixing-management/src/fixing-management/npm/pnpm-fixing-manager.ts
|
|
224837
|
-
import { readFile as
|
|
225211
|
+
import { readFile as readFile18, writeFile as writeFile7 } from "fs/promises";
|
|
224838
225212
|
import { relative as relative6, resolve as resolve21 } from "path";
|
|
224839
225213
|
var import_yaml = __toESM(require_dist10(), 1);
|
|
224840
225214
|
var import_lockfile_file2 = __toESM(require_lib25(), 1);
|
|
@@ -224986,7 +225360,7 @@ function getVersionNumber(version4) {
|
|
|
224986
225360
|
return match2 ? `${match2[1]}` : version4;
|
|
224987
225361
|
}
|
|
224988
225362
|
async function readYamlFile(workspaceYamlFile) {
|
|
224989
|
-
const workspaceYamlString = await
|
|
225363
|
+
const workspaceYamlString = await readFile18(workspaceYamlFile, "utf8");
|
|
224990
225364
|
const parser2 = new import_yaml.Parser();
|
|
224991
225365
|
const [ast] = parser2.parse(workspaceYamlString);
|
|
224992
225366
|
return ast;
|
|
@@ -225023,7 +225397,7 @@ function updateCatalog(update3, map2) {
|
|
|
225023
225397
|
}
|
|
225024
225398
|
|
|
225025
225399
|
// ../fixing-management/src/fixing-management/npm/yarn-fixing-manager.ts
|
|
225026
|
-
import { readFile as
|
|
225400
|
+
import { readFile as readFile19, writeFile as writeFile8 } from "fs/promises";
|
|
225027
225401
|
import { relative as relative8, resolve as resolve23 } from "path";
|
|
225028
225402
|
|
|
225029
225403
|
// ../utils/src/package-utils.ts
|
|
@@ -225131,7 +225505,7 @@ var YarnFixingManager = class extends NpmEcosystemFixingManager {
|
|
|
225131
225505
|
logger.debug("Installation completed.");
|
|
225132
225506
|
}
|
|
225133
225507
|
async getYarnLockObj(filePath) {
|
|
225134
|
-
const fileString = await
|
|
225508
|
+
const fileString = await readFile19(filePath, "utf8");
|
|
225135
225509
|
const yarnType = await this.getYarnType();
|
|
225136
225510
|
return yarnType === "classic" ? (0, import_yarnlock_parse_raw.parseYarnLockRawV1)(fileString) : (0, import_yarnlock_parse_raw.parseYarnLockRawV2)(fileString);
|
|
225137
225511
|
}
|
|
@@ -225449,7 +225823,7 @@ var NpmSocketUpgradeManager = class {
|
|
|
225449
225823
|
const patches = [];
|
|
225450
225824
|
let packageJsonContent;
|
|
225451
225825
|
try {
|
|
225452
|
-
packageJsonContent = await
|
|
225826
|
+
packageJsonContent = await readFile20(resolve24(this.rootDir, mf.file), "utf-8");
|
|
225453
225827
|
} catch (error) {
|
|
225454
225828
|
ctxt.statusUpdater?.({
|
|
225455
225829
|
status: "error",
|
|
@@ -225512,7 +225886,7 @@ var RushFixingManager = class {
|
|
|
225512
225886
|
};
|
|
225513
225887
|
|
|
225514
225888
|
// ../fixing-management/src/fixing-management/nuget/nuget-fixing-manager.ts
|
|
225515
|
-
import { readFile as
|
|
225889
|
+
import { readFile as readFile21, writeFile as writeFile9 } from "fs/promises";
|
|
225516
225890
|
import { join as join15 } from "path";
|
|
225517
225891
|
|
|
225518
225892
|
// ../utils/src/nuget-utils.ts
|
|
@@ -225615,14 +225989,14 @@ var NugetFixingManager = class {
|
|
|
225615
225989
|
if (projectFiles.length !== 1)
|
|
225616
225990
|
throw new Error("Applying fixes to workspaces with more than 1 project file currently not supported");
|
|
225617
225991
|
const projectFilePath = join15(this.getAbsWsPath(wsPath), projectFiles[0]);
|
|
225618
|
-
const initialProjectFile = await
|
|
225992
|
+
const initialProjectFile = await readFile21(projectFilePath, "utf-8");
|
|
225619
225993
|
const initialLockFile = await this.restoreWorkspaceAndParseLockFile(wsPath);
|
|
225620
225994
|
await applySeries(fixesWithId, async ({ fixId, vulnerabilityFixes }) => {
|
|
225621
225995
|
await this.applySecurityFixesForWorkspace(wsPath, projectFilePath, vulnerabilityFixes, dependencyTree);
|
|
225622
225996
|
signalFixApplied2?.(fixId, this.subprojectPath, wsPath, vulnerabilityFixes);
|
|
225623
225997
|
});
|
|
225624
|
-
const finalProjectFile = await
|
|
225625
|
-
const finalLockFile = JSON.parse(await
|
|
225998
|
+
const finalProjectFile = await readFile21(projectFilePath, "utf-8");
|
|
225999
|
+
const finalLockFile = JSON.parse(await readFile21(this.getLockFilePath(wsPath), "utf-8"));
|
|
225626
226000
|
await writeFile9(projectFilePath, initialProjectFile);
|
|
225627
226001
|
await writeFile9(this.getLockFilePath(wsPath), JSON.stringify(initialLockFile, null, 2));
|
|
225628
226002
|
return { projectFile: finalProjectFile, lockFile: finalLockFile };
|
|
@@ -225654,7 +226028,7 @@ var NugetFixingManager = class {
|
|
|
225654
226028
|
}
|
|
225655
226029
|
}
|
|
225656
226030
|
async applySecurityFixesForWorkspace(wsPath, projectFilePath, vulnFixes, dependencyTree) {
|
|
225657
|
-
const initialProjectFile = await
|
|
226031
|
+
const initialProjectFile = await readFile21(projectFilePath, "utf-8");
|
|
225658
226032
|
const initialLockFile = await this.restoreWorkspaceAndParseLockFile(wsPath);
|
|
225659
226033
|
const typeCache = new Cache();
|
|
225660
226034
|
const requestedCache = new Cache();
|
|
@@ -225744,7 +226118,7 @@ var NugetFixingManager = class {
|
|
|
225744
226118
|
async restoreWorkspaceAndParseLockFile(wsPath) {
|
|
225745
226119
|
const succeeded = await execAndLogOnFailure2("dotnet restore --use-lock-file", this.getAbsWsPath(wsPath));
|
|
225746
226120
|
if (!succeeded) throw new Error(`Error applying fix - could not restore project ${this.subprojectPath}/${wsPath}`);
|
|
225747
|
-
return JSON.parse(await
|
|
226121
|
+
return JSON.parse(await readFile21(this.getLockFilePath(wsPath), "utf-8"));
|
|
225748
226122
|
}
|
|
225749
226123
|
getLockFilePath(wsPath, lockFileName = "packages.lock.json") {
|
|
225750
226124
|
return join15(this.getAbsWsPath(wsPath), lockFileName);
|
|
@@ -225822,7 +226196,7 @@ import { dirname as dirname16, resolve as resolve26 } from "node:path";
|
|
|
225822
226196
|
|
|
225823
226197
|
// ../utils/src/nuget-project-utils.ts
|
|
225824
226198
|
var import_parse_xml3 = __toESM(require_dist(), 1);
|
|
225825
|
-
import { readFile as
|
|
226199
|
+
import { readFile as readFile22 } from "node:fs/promises";
|
|
225826
226200
|
import { dirname as dirname15, join as join17, relative as relative10, resolve as resolve25, basename as basename8, extname } from "node:path";
|
|
225827
226201
|
import { existsSync as existsSync16 } from "node:fs";
|
|
225828
226202
|
|
|
@@ -227380,7 +227754,7 @@ async function loadNuGetProjectOrTarget(rootDir, projectFile, mainProject, visit
|
|
|
227380
227754
|
if (!validatedProjectPath || !existsSync16(validatedProjectPath)) return void 0;
|
|
227381
227755
|
if (visited.has(validatedProjectPath)) return void 0;
|
|
227382
227756
|
visited.set(validatedProjectPath);
|
|
227383
|
-
const sourceText = await
|
|
227757
|
+
const sourceText = await readFile22(validatedProjectPath, "utf-8");
|
|
227384
227758
|
const xml2 = (0, import_parse_xml3.parseXml)(sourceText, { includeOffsets: true });
|
|
227385
227759
|
const indentation = inferIndentationFromParsedXml2(xml2, sourceText);
|
|
227386
227760
|
const currentProject = {
|
|
@@ -227453,7 +227827,7 @@ async function loadNuGetProjectOrTarget(rootDir, projectFile, mainProject, visit
|
|
|
227453
227827
|
async function loadPackagesConfig(rootDir, file, validateFile) {
|
|
227454
227828
|
const validatedConfigPath = validateFile(resolve25(rootDir, file));
|
|
227455
227829
|
if (!validatedConfigPath || !existsSync16(validatedConfigPath)) return void 0;
|
|
227456
|
-
const sourceText = await
|
|
227830
|
+
const sourceText = await readFile22(validatedConfigPath, "utf-8");
|
|
227457
227831
|
const configXml = (0, import_parse_xml3.parseXml)(sourceText, { includeOffsets: true });
|
|
227458
227832
|
const packages = extractPackagesFromXml(configXml, sourceText);
|
|
227459
227833
|
return {
|
|
@@ -228274,17 +228648,17 @@ import { dirname as dirname18, relative as relative11, resolve as resolve28 } fr
|
|
|
228274
228648
|
var import_picomatch6 = __toESM(require_picomatch2(), 1);
|
|
228275
228649
|
var import_semver4 = __toESM(require_semver2(), 1);
|
|
228276
228650
|
import assert12 from "node:assert";
|
|
228277
|
-
import { readFile as
|
|
228651
|
+
import { readFile as readFile24, writeFile as writeFile10 } from "node:fs/promises";
|
|
228278
228652
|
|
|
228279
228653
|
// ../utils/src/cargo-utils.ts
|
|
228280
|
-
import { readFile as
|
|
228654
|
+
import { readFile as readFile23 } from "node:fs/promises";
|
|
228281
228655
|
import { dirname as dirname17, resolve as resolve27 } from "node:path";
|
|
228282
228656
|
var import_picomatch5 = __toESM(require_picomatch2(), 1);
|
|
228283
228657
|
async function getCargoTomlFilesForCargoLockFile(rootDir, cargoLockFile, cargoTomlFiles) {
|
|
228284
228658
|
const lockDir = dirname17(cargoLockFile);
|
|
228285
228659
|
const rootTomlFile = cargoTomlFiles.find((file) => dirname17(file) === lockDir);
|
|
228286
228660
|
if (!rootTomlFile) return void 0;
|
|
228287
|
-
const rootTomlContent = await
|
|
228661
|
+
const rootTomlContent = await readFile23(resolve27(rootDir, rootTomlFile), "utf-8");
|
|
228288
228662
|
const toml = parseTOML2(rootTomlContent);
|
|
228289
228663
|
if (!toml) return void 0;
|
|
228290
228664
|
const memberPatterns = [];
|
|
@@ -228381,7 +228755,7 @@ var CargoSocketUpgradeManager = class {
|
|
|
228381
228755
|
const path9 = resolve28(this.rootDir, file);
|
|
228382
228756
|
if (!restoreMap.has(path9)) {
|
|
228383
228757
|
restoreMap.set(path9, {
|
|
228384
|
-
content: await
|
|
228758
|
+
content: await readFile24(path9, "utf-8"),
|
|
228385
228759
|
artifacts: []
|
|
228386
228760
|
});
|
|
228387
228761
|
}
|
|
@@ -228445,7 +228819,7 @@ var CargoSocketUpgradeManager = class {
|
|
|
228445
228819
|
const fullPath = resolve28(this.rootDir, mf.file);
|
|
228446
228820
|
let content;
|
|
228447
228821
|
try {
|
|
228448
|
-
content = await
|
|
228822
|
+
content = await readFile24(fullPath, "utf-8");
|
|
228449
228823
|
} catch (error) {
|
|
228450
228824
|
ctxt.statusUpdater?.({
|
|
228451
228825
|
status: "error",
|
|
@@ -228545,7 +228919,7 @@ var CargoSocketUpgradeManager = class {
|
|
|
228545
228919
|
const fullPath = resolve28(this.rootDir, tomlFile);
|
|
228546
228920
|
let content;
|
|
228547
228921
|
try {
|
|
228548
|
-
content = await
|
|
228922
|
+
content = await readFile24(fullPath, "utf-8");
|
|
228549
228923
|
} catch (error) {
|
|
228550
228924
|
ctxt.statusUpdater?.({
|
|
228551
228925
|
status: "error",
|
|
@@ -228631,7 +229005,7 @@ ${newDependencyLine}`
|
|
|
228631
229005
|
var import_picomatch8 = __toESM(require_picomatch2(), 1);
|
|
228632
229006
|
import { dirname as dirname20, join as join19, resolve as resolve31 } from "node:path";
|
|
228633
229007
|
import assert13 from "node:assert";
|
|
228634
|
-
import { readFile as
|
|
229008
|
+
import { readFile as readFile27 } from "node:fs/promises";
|
|
228635
229009
|
|
|
228636
229010
|
// ../fixing-management/src/fixing-management/pip/pip-requirements-parser.ts
|
|
228637
229011
|
function parsePipRequirementsFileLoosely(src, _options) {
|
|
@@ -228984,7 +229358,7 @@ function createPep508VersionPatches(file, idx, requirement, oldVersion, upgradeV
|
|
|
228984
229358
|
|
|
228985
229359
|
// ../utils/src/pip-utils.ts
|
|
228986
229360
|
import { existsSync as existsSync17 } from "node:fs";
|
|
228987
|
-
import { readFile as
|
|
229361
|
+
import { readFile as readFile26 } from "node:fs/promises";
|
|
228988
229362
|
import { dirname as dirname19, resolve as resolve30 } from "node:path";
|
|
228989
229363
|
import util4 from "node:util";
|
|
228990
229364
|
|
|
@@ -228993,7 +229367,7 @@ var import_lodash6 = __toESM(require_lodash(), 1);
|
|
|
228993
229367
|
var import_semver5 = __toESM(require_semver2(), 1);
|
|
228994
229368
|
import { execFileSync } from "child_process";
|
|
228995
229369
|
import { constants as constants3 } from "fs";
|
|
228996
|
-
import { access as access3, readFile as
|
|
229370
|
+
import { access as access3, readFile as readFile25 } from "fs/promises";
|
|
228997
229371
|
import { join as join18, resolve as resolve29 } from "path";
|
|
228998
229372
|
import util3 from "util";
|
|
228999
229373
|
var { once: once2 } = import_lodash6.default;
|
|
@@ -229016,14 +229390,14 @@ function normalizePackageName(packageName) {
|
|
|
229016
229390
|
return packageName.replace(/[-_.]+/g, "-").toLowerCase();
|
|
229017
229391
|
}
|
|
229018
229392
|
async function isSetupPySetuptools(file) {
|
|
229019
|
-
const content = await
|
|
229393
|
+
const content = await readFile26(file, "utf-8");
|
|
229020
229394
|
return content.includes("setup(") && (/^\s*from\s+(?:setuptools|distutils\.core)\s+import\s+.*setup/m.test(content) || /^\s*import\s+(?:setuptools|distutils\.core)/m.test(content));
|
|
229021
229395
|
}
|
|
229022
229396
|
async function getPyprojectTomlFilesForLockFile(rootDir, uvLockfile, pyprojectFiles) {
|
|
229023
229397
|
const lockDir = dirname19(uvLockfile);
|
|
229024
229398
|
const rootTomlFile = pyprojectFiles.find((file) => dirname19(file) === lockDir);
|
|
229025
229399
|
if (!rootTomlFile) return void 0;
|
|
229026
|
-
const rootTomlContent = await
|
|
229400
|
+
const rootTomlContent = await readFile26(resolve30(rootDir, rootTomlFile), "utf-8");
|
|
229027
229401
|
const toml = parseTOML2(rootTomlContent);
|
|
229028
229402
|
if (!toml) return void 0;
|
|
229029
229403
|
const memberPatterns = [];
|
|
@@ -229196,7 +229570,7 @@ var PipSocketUpgradeManager = class {
|
|
|
229196
229570
|
const fullPath = resolve31(this.rootDir, lockFile);
|
|
229197
229571
|
let content;
|
|
229198
229572
|
try {
|
|
229199
|
-
content = await
|
|
229573
|
+
content = await readFile27(fullPath, "utf-8");
|
|
229200
229574
|
} catch {
|
|
229201
229575
|
return;
|
|
229202
229576
|
}
|
|
@@ -229254,7 +229628,7 @@ var PipSocketUpgradeManager = class {
|
|
|
229254
229628
|
const fullPath = resolve31(this.rootDir, lockFile);
|
|
229255
229629
|
let content;
|
|
229256
229630
|
try {
|
|
229257
|
-
content = await
|
|
229631
|
+
content = await readFile27(fullPath, "utf-8");
|
|
229258
229632
|
} catch {
|
|
229259
229633
|
return;
|
|
229260
229634
|
}
|
|
@@ -229294,7 +229668,7 @@ var PipSocketUpgradeManager = class {
|
|
|
229294
229668
|
assert13(artifact.name);
|
|
229295
229669
|
assert13(artifact.version);
|
|
229296
229670
|
const fullPath = resolve31(this.rootDir, lockFile);
|
|
229297
|
-
const content = await
|
|
229671
|
+
const content = await readFile27(fullPath, "utf-8");
|
|
229298
229672
|
const packageName = normalizePackageName(artifact.name);
|
|
229299
229673
|
const patches = [];
|
|
229300
229674
|
const toml = parseTOML2(content);
|
|
@@ -229423,7 +229797,7 @@ var PipSocketUpgradeManager = class {
|
|
|
229423
229797
|
const refStart = ref.start;
|
|
229424
229798
|
const refEnd = ref.end;
|
|
229425
229799
|
try {
|
|
229426
|
-
const content = await
|
|
229800
|
+
const content = await readFile27(fullPath, "utf-8");
|
|
229427
229801
|
const requirements = parsePipRequirementsFileLoosely(content, { includeLocations: true });
|
|
229428
229802
|
const foundRequirement = requirements.filter((req) => req.data.type === "ProjectName").find((req) => refStart <= req.location.startIdx && req.location.endIdx <= refEnd);
|
|
229429
229803
|
if (foundRequirement) {
|
|
@@ -229473,7 +229847,7 @@ var PipSocketUpgradeManager = class {
|
|
|
229473
229847
|
assert13(artifact.version);
|
|
229474
229848
|
const patches = [];
|
|
229475
229849
|
try {
|
|
229476
|
-
const content = await
|
|
229850
|
+
const content = await readFile27(fullPath, "utf-8");
|
|
229477
229851
|
const newText = `${artifact.name}==${upgradeVersion}`;
|
|
229478
229852
|
patches.push({
|
|
229479
229853
|
file: requirementsFile,
|
|
@@ -229501,7 +229875,7 @@ ${newText}
|
|
|
229501
229875
|
const fullPath = resolve31(this.rootDir, tomlFile);
|
|
229502
229876
|
let content;
|
|
229503
229877
|
try {
|
|
229504
|
-
content = await
|
|
229878
|
+
content = await readFile27(fullPath, "utf-8");
|
|
229505
229879
|
} catch (error) {
|
|
229506
229880
|
ctxt.statusUpdater?.({
|
|
229507
229881
|
status: "error",
|
|
@@ -229647,7 +230021,7 @@ ${newText}
|
|
|
229647
230021
|
assert13(artifact.version);
|
|
229648
230022
|
const patches = [];
|
|
229649
230023
|
try {
|
|
229650
|
-
const content = await
|
|
230024
|
+
const content = await readFile27(resolve31(this.rootDir, pyprojectToml), "utf-8");
|
|
229651
230025
|
const toml = parseTOML2(content);
|
|
229652
230026
|
if (!toml) {
|
|
229653
230027
|
return patches;
|
|
@@ -229714,7 +230088,7 @@ function parseSourceString(source) {
|
|
|
229714
230088
|
async function buildDependencyTreesFromUvLock(rootDir, uvLockFile) {
|
|
229715
230089
|
let lockToml;
|
|
229716
230090
|
try {
|
|
229717
|
-
const lockContent = await
|
|
230091
|
+
const lockContent = await readFile27(resolve31(rootDir, uvLockFile), "utf-8");
|
|
229718
230092
|
lockToml = parseTOML2(lockContent);
|
|
229719
230093
|
} catch {
|
|
229720
230094
|
return void 0;
|
|
@@ -230185,7 +230559,7 @@ function parseGemfileLock(content) {
|
|
|
230185
230559
|
}
|
|
230186
230560
|
|
|
230187
230561
|
// ../fixing-management/src/fixing-management/rubygems/rubygems-socket-upgrade-manager.ts
|
|
230188
|
-
import { readFile as
|
|
230562
|
+
import { readFile as readFile28, writeFile as writeFile11 } from "node:fs/promises";
|
|
230189
230563
|
|
|
230190
230564
|
// ../fixing-management/src/fixing-management/rubygems/rubygems-patch-utils.ts
|
|
230191
230565
|
function createRubygemVersionPatches(gem, idx, upgradeVersion, rangeStyle, statusUpdater) {
|
|
@@ -230398,7 +230772,7 @@ var RubygemsSocketUpgradeManager = class {
|
|
|
230398
230772
|
for (const mf of artifact.manifestFiles ?? []) {
|
|
230399
230773
|
if (this.gemfileLockMatcher(mf.file)) {
|
|
230400
230774
|
if (ctxt.wsFilter && !ctxt.wsFilter(dirname22(mf.file) || ".")) continue;
|
|
230401
|
-
const lockfileContent = await
|
|
230775
|
+
const lockfileContent = await readFile28(resolve33(this.rootDir, mf.file), "utf-8");
|
|
230402
230776
|
const gemfileLock = parseGemfileLock(lockfileContent);
|
|
230403
230777
|
const pathGems = [];
|
|
230404
230778
|
for (const [pathGemName, deps] of gemfileLock.pathDependencies) {
|
|
@@ -230449,7 +230823,7 @@ var RubygemsSocketUpgradeManager = class {
|
|
|
230449
230823
|
const path9 = resolve33(this.rootDir, file);
|
|
230450
230824
|
if (!restoreMap.has(path9)) {
|
|
230451
230825
|
restoreMap.set(path9, {
|
|
230452
|
-
content: await
|
|
230826
|
+
content: await readFile28(path9, "utf-8"),
|
|
230453
230827
|
artifacts: []
|
|
230454
230828
|
});
|
|
230455
230829
|
}
|
|
@@ -230541,7 +230915,7 @@ var RubygemsSocketUpgradeManager = class {
|
|
|
230541
230915
|
const gemfilePatches = [];
|
|
230542
230916
|
const artifact = ctxt.artifacts[idx];
|
|
230543
230917
|
try {
|
|
230544
|
-
const gemfileContent = await
|
|
230918
|
+
const gemfileContent = await readFile28(resolve33(this.rootDir, gemfilePath), "utf-8");
|
|
230545
230919
|
const gemfile = parseGemfile(this.rootDir, gemfilePath, gemfileContent);
|
|
230546
230920
|
const gemspecFiles = /* @__PURE__ */ new Set();
|
|
230547
230921
|
for (const gem of gemfile.gems) {
|
|
@@ -230562,7 +230936,7 @@ var RubygemsSocketUpgradeManager = class {
|
|
|
230562
230936
|
let foundInGemspec = false;
|
|
230563
230937
|
for (const gemspecFile of gemspecFiles) {
|
|
230564
230938
|
try {
|
|
230565
|
-
const gemspecContent = await
|
|
230939
|
+
const gemspecContent = await readFile28(resolve33(this.rootDir, gemspecFile), "utf-8");
|
|
230566
230940
|
const { parseGemspec: parseGemspec2 } = await Promise.resolve().then(() => (init_gemspec_utils(), gemspec_utils_exports));
|
|
230567
230941
|
const gemspec = parseGemspec2(this.rootDir, gemspecFile, gemspecContent);
|
|
230568
230942
|
for (const gem of gemspec.dependencies) {
|
|
@@ -230627,7 +231001,7 @@ var RubygemsSocketUpgradeManager = class {
|
|
|
230627
231001
|
const [version4] = artifact.version.split("-");
|
|
230628
231002
|
const patches = [];
|
|
230629
231003
|
try {
|
|
230630
|
-
const sourceText = await
|
|
231004
|
+
const sourceText = await readFile28(resolve33(this.rootDir, file), "utf-8");
|
|
230631
231005
|
const gemfile = parseGemfile(this.rootDir, file, sourceText);
|
|
230632
231006
|
for (const gem of gemfile.gems) {
|
|
230633
231007
|
if (evaluate4(gem.name) !== packageName) continue;
|
|
@@ -230673,7 +231047,7 @@ var RubygemsSocketUpgradeManager = class {
|
|
|
230673
231047
|
const patches = [];
|
|
230674
231048
|
let content;
|
|
230675
231049
|
try {
|
|
230676
|
-
content = await
|
|
231050
|
+
content = await readFile28(resolve33(this.rootDir, file), "utf-8");
|
|
230677
231051
|
} catch (error) {
|
|
230678
231052
|
ctxt.statusUpdater?.({
|
|
230679
231053
|
status: "error",
|
|
@@ -230947,7 +231321,7 @@ function flattenDockerSpec({
|
|
|
230947
231321
|
var import_winston2 = __toESM(require_winston(), 1);
|
|
230948
231322
|
import { Console as Console2 } from "console";
|
|
230949
231323
|
import { createWriteStream as createWriteStream3 } from "fs";
|
|
230950
|
-
import { readFile as
|
|
231324
|
+
import { readFile as readFile29 } from "fs/promises";
|
|
230951
231325
|
|
|
230952
231326
|
// ../web-compat-utils/dist/util-formatter.js
|
|
230953
231327
|
import { format as format3 } from "util";
|
|
@@ -231150,7 +231524,7 @@ var CLILogger2 = class {
|
|
|
231150
231524
|
await this.finish();
|
|
231151
231525
|
let logContent;
|
|
231152
231526
|
try {
|
|
231153
|
-
logContent = await
|
|
231527
|
+
logContent = await readFile29(logFilePath, "utf-8");
|
|
231154
231528
|
} catch (e) {
|
|
231155
231529
|
console.error("Error reading log file", e);
|
|
231156
231530
|
}
|
|
@@ -231195,13 +231569,13 @@ async function detectVariantMaven(projectDir) {
|
|
|
231195
231569
|
// ../docker-management/src/maven/gradle-version-detector.ts
|
|
231196
231570
|
import { existsSync as existsSync20 } from "fs";
|
|
231197
231571
|
import { join as join23 } from "path";
|
|
231198
|
-
import { readFile as
|
|
231572
|
+
import { readFile as readFile30 } from "fs/promises";
|
|
231199
231573
|
async function detectVariantGradle(projectDir) {
|
|
231200
231574
|
return sanitizeJvmVariant("GRADLE", projectDir, await detect(projectDir));
|
|
231201
231575
|
}
|
|
231202
231576
|
async function detect(projectDir) {
|
|
231203
231577
|
const gradleWrapperPropertiesPath = join23(projectDir, "gradle", "wrapper", "gradle-wrapper.properties");
|
|
231204
|
-
const gradleWrapperProperties = existsSync20(gradleWrapperPropertiesPath) ? (await
|
|
231578
|
+
const gradleWrapperProperties = existsSync20(gradleWrapperPropertiesPath) ? (await readFile30(gradleWrapperPropertiesPath, "utf-8")).split("\n").map((line) => line.trim()).filter((line) => !line.startsWith("#")).filter((line) => line) : void 0;
|
|
231205
231579
|
if (!gradleWrapperProperties) return void 0;
|
|
231206
231580
|
const distributionUrlRegex = /.*gradle-(\d+(\.\d+(\.\d+)?)?)/;
|
|
231207
231581
|
for (const prop2 of gradleWrapperProperties) {
|
|
@@ -231217,13 +231591,13 @@ async function detect(projectDir) {
|
|
|
231217
231591
|
// ../docker-management/src/maven/sbt-version-detector.ts
|
|
231218
231592
|
import { existsSync as existsSync21 } from "fs";
|
|
231219
231593
|
import { join as join24 } from "path";
|
|
231220
|
-
import { readFile as
|
|
231594
|
+
import { readFile as readFile31 } from "fs/promises";
|
|
231221
231595
|
async function detectVariantSbt(projectDir) {
|
|
231222
231596
|
return sanitizeJvmVariant("SBT", projectDir, await detect2(projectDir));
|
|
231223
231597
|
}
|
|
231224
231598
|
async function detect2(projectDir) {
|
|
231225
231599
|
const sbtBuildPropertiesPath = join24(projectDir, "project", "build.properties");
|
|
231226
|
-
const sbtBuildProperties = existsSync21(sbtBuildPropertiesPath) ? (await
|
|
231600
|
+
const sbtBuildProperties = existsSync21(sbtBuildPropertiesPath) ? (await readFile31(sbtBuildPropertiesPath, "utf-8")).split("\n").map((line) => line.trim()).filter((line) => !line.startsWith("#")).filter((line) => line) : void 0;
|
|
231227
231601
|
if (!sbtBuildProperties) return void 0;
|
|
231228
231602
|
for (const prop2 of sbtBuildProperties) {
|
|
231229
231603
|
const [key, value2] = prop2.split("=");
|
|
@@ -231337,7 +231711,7 @@ async function findReachabilityAnalyzersDockerImage(ecosystem) {
|
|
|
231337
231711
|
// ../other-modules-communicator/src/other-modules-communicator.ts
|
|
231338
231712
|
var import_lodash12 = __toESM(require_lodash(), 1);
|
|
231339
231713
|
import { rmSync } from "fs";
|
|
231340
|
-
import { mkdir as mkdir5, readFile as
|
|
231714
|
+
import { mkdir as mkdir5, readFile as readFile32, writeFile as writeFile12 } from "fs/promises";
|
|
231341
231715
|
import assert15 from "node:assert";
|
|
231342
231716
|
import { platform as platform8 } from "os";
|
|
231343
231717
|
import { join as join27, posix as posix2, relative as relative16, sep as sep3 } from "path";
|
|
@@ -231839,7 +232213,7 @@ var OtherModulesCommunicator = class {
|
|
|
231839
232213
|
COANA_API_KEY: this.apiKey.type === "present" ? this.apiKey.value : ""
|
|
231840
232214
|
}
|
|
231841
232215
|
);
|
|
231842
|
-
return JSON.parse(await
|
|
232216
|
+
return JSON.parse(await readFile32(outputFilePathThisProcess, "utf-8")).result;
|
|
231843
232217
|
}
|
|
231844
232218
|
async runReachabilityAnalyzerCommand(commandName, ecosystem, subprojectPath, workspacePath, args2, env, rootWorkingDirOverride, displaySubprojectPath) {
|
|
231845
232219
|
const tmpDir = await this.getTmpDirForSubproject(displaySubprojectPath ?? subprojectPath);
|
|
@@ -231913,7 +232287,7 @@ var OtherModulesCommunicator = class {
|
|
|
231913
232287
|
rootWorkingDirOverride,
|
|
231914
232288
|
displaySubprojectPath
|
|
231915
232289
|
);
|
|
231916
|
-
return JSON.parse(await
|
|
232290
|
+
return JSON.parse(await readFile32(outputFilePathThisProcess, "utf-8")).result;
|
|
231917
232291
|
}
|
|
231918
232292
|
async runInDocker(ecosystem, image, entryPoint, commandName, args2, subprojectPath, tmpDir, env = process.env) {
|
|
231919
232293
|
if (!await pullDockerImage(image)) return false;
|
|
@@ -233295,7 +233669,7 @@ import { join as join29, relative as relative17, resolve as resolve38 } from "pa
|
|
|
233295
233669
|
|
|
233296
233670
|
// ../project-management/src/project-management/ecosystem-management/ecosystem-specs.ts
|
|
233297
233671
|
import { existsSync as existsSync23 } from "fs";
|
|
233298
|
-
import { readdir as readdir5, readFile as
|
|
233672
|
+
import { readdir as readdir5, readFile as readFile33 } from "fs/promises";
|
|
233299
233673
|
import { join as join28, sep as sep4 } from "path";
|
|
233300
233674
|
var specs = {
|
|
233301
233675
|
NPM: [
|
|
@@ -233374,7 +233748,7 @@ function packageManagerIfPackageJSONExistsAndValid(packageManager) {
|
|
|
233374
233748
|
if (!existsSync23(join28(projectDir, "package.json"))) return void 0;
|
|
233375
233749
|
const packageJSONPath = join28(projectDir, "package.json");
|
|
233376
233750
|
try {
|
|
233377
|
-
JSON.parse(await
|
|
233751
|
+
JSON.parse(await readFile33(packageJSONPath, "utf-8"));
|
|
233378
233752
|
return packageManager;
|
|
233379
233753
|
} catch (e) {
|
|
233380
233754
|
throw new InvalidProjectFileError(projectDir, "package.json");
|
|
@@ -234854,7 +235228,7 @@ var DEFAULT_REPORT_FILENAME_BASE = "coana-report";
|
|
|
234854
235228
|
|
|
234855
235229
|
// dist/internal/exclude-dirs-from-configuration-files.js
|
|
234856
235230
|
import { existsSync as existsSync25 } from "fs";
|
|
234857
|
-
import { readFile as
|
|
235231
|
+
import { readFile as readFile34 } from "fs/promises";
|
|
234858
235232
|
import { basename as basename10, resolve as resolve41 } from "path";
|
|
234859
235233
|
var import_yaml2 = __toESM(require_dist11(), 1);
|
|
234860
235234
|
async function inferExcludeDirsFromConfigurationFiles(rootWorkingDir) {
|
|
@@ -234868,7 +235242,7 @@ async function inferExcludeDirsFromConfigurationFiles(rootWorkingDir) {
|
|
|
234868
235242
|
}
|
|
234869
235243
|
async function inferExcludeDirsFromSocketConfig(socketConfigFile) {
|
|
234870
235244
|
try {
|
|
234871
|
-
const config3 = (0, import_yaml2.parse)(await
|
|
235245
|
+
const config3 = (0, import_yaml2.parse)(await readFile34(socketConfigFile, "utf8"));
|
|
234872
235246
|
const version4 = config3.version;
|
|
234873
235247
|
const ignorePaths = config3[version4 === 1 ? "ignore" : "projectIgnorePaths"];
|
|
234874
235248
|
if (!ignorePaths)
|
|
@@ -234885,7 +235259,7 @@ async function inferExcludeDirsFromSocketConfig(socketConfigFile) {
|
|
|
234885
235259
|
// dist/internal/manifest-upload.js
|
|
234886
235260
|
var import_fast_glob = __toESM(require_out4(), 1);
|
|
234887
235261
|
var import_ignore3 = __toESM(require_ignore(), 1);
|
|
234888
|
-
import { readFile as
|
|
235262
|
+
import { readFile as readFile35 } from "fs/promises";
|
|
234889
235263
|
import { join as join31 } from "path";
|
|
234890
235264
|
var DEFAULT_IGNORE_PATTERNS = [
|
|
234891
235265
|
"**/node_modules/**",
|
|
@@ -234901,7 +235275,7 @@ var DEFAULT_IGNORE_PATTERNS = [
|
|
|
234901
235275
|
async function loadGitignore(rootDir) {
|
|
234902
235276
|
try {
|
|
234903
235277
|
const gitignorePath = join31(rootDir, ".gitignore");
|
|
234904
|
-
const content = await
|
|
235278
|
+
const content = await readFile35(gitignorePath, "utf-8");
|
|
234905
235279
|
return (0, import_ignore3.default)().add(content);
|
|
234906
235280
|
} catch {
|
|
234907
235281
|
return void 0;
|
|
@@ -236016,7 +236390,7 @@ function toSocketFactsSocketDependencyTree(artifacts, vulnerabilities, tier1Reac
|
|
|
236016
236390
|
}
|
|
236017
236391
|
|
|
236018
236392
|
// dist/internal/vulnerability-scanning.js
|
|
236019
|
-
import { readFile as
|
|
236393
|
+
import { readFile as readFile36 } from "fs/promises";
|
|
236020
236394
|
|
|
236021
236395
|
// ../security-auditor/security-auditor-builder/src/mongo-connection.ts
|
|
236022
236396
|
var import_mongodb = __toESM(require_lib31(), 1);
|
|
@@ -250885,7 +251259,7 @@ async function scanForVulnerabilities(dependencyTree, offlineVulnerabilityScanne
|
|
|
250885
251259
|
}
|
|
250886
251260
|
async function offlineScan(dependencyTree, offlineVulnerabilityScannerDBPath) {
|
|
250887
251261
|
logger.info("using offline vulnerability scanner db");
|
|
250888
|
-
const offlineVulnerabilityScannerDB = JSON.parse(await
|
|
251262
|
+
const offlineVulnerabilityScannerDB = JSON.parse(await readFile36(offlineVulnerabilityScannerDBPath, "utf-8"));
|
|
250889
251263
|
const { ecosystemToUrlToVulnerabilityDetails, vulnerabilityDatabase } = offlineVulnerabilityScannerDB;
|
|
250890
251264
|
const coanaSupportedVulnerabilitiesLoader = CoanaSupportedVulnerabilitiesLoader.create(ecosystemToUrlToVulnerabilityDetails);
|
|
250891
251265
|
const vulnerabilityAccessPathLoader = CoanaSupportedVulnerabilitiesLoader.create(ecosystemToUrlToVulnerabilityDetails);
|
|
@@ -250903,7 +251277,7 @@ async function onlineScan(dependencyTree, apiKey, timeout) {
|
|
|
250903
251277
|
}
|
|
250904
251278
|
|
|
250905
251279
|
// dist/version.js
|
|
250906
|
-
var version3 = "14.12.
|
|
251280
|
+
var version3 = "14.12.161";
|
|
250907
251281
|
|
|
250908
251282
|
// dist/cli-core.js
|
|
250909
251283
|
var { mapValues, omit, partition, pickBy: pickBy2 } = import_lodash15.default;
|
|
@@ -251979,7 +252353,7 @@ async function initializeComputeFixesAndUpgradePurls(path9, options) {
|
|
|
251979
252353
|
var compareReportsCommand = new Command();
|
|
251980
252354
|
compareReportsCommand.name("compare-reports").argument("<baselineReportPath>", "Path to the baseline report").argument("<newReportPath>", "Path to the new report").option("--api-key <key>", "Set the Coana dashboard API key.").option("-d, --debug", "Enable debug logging", false).option("--no-pr-comment", "Disable pull request comments (only relevant when run from a PR)", true).option("--no-block", "Do not fail with a non-zero exit code when new reachable vulnerabilities are detected", true).option("--ignore-undeterminable-reachability", "Ignore vulnerabilities with undeterminable reachability", false).action(async (baselineReportPath, newReportPath, options) => {
|
|
251981
252355
|
async function readReport(reportPath) {
|
|
251982
|
-
return JSON.parse(await
|
|
252356
|
+
return JSON.parse(await readFile37(reportPath, "utf-8"));
|
|
251983
252357
|
}
|
|
251984
252358
|
const baselineReport = await readReport(baselineReportPath);
|
|
251985
252359
|
const newReport = await readReport(newReportPath);
|