@coana-tech/cli 14.12.160 → 14.12.161
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/cli.mjs +427 -97
- package/package.json +1 -1
- package/reachability-analyzers-cli.mjs +207 -40
- package/repos/coana-tech/goana/bin/goana-darwin-amd64.gz +0 -0
- package/repos/coana-tech/goana/bin/goana-darwin-arm64.gz +0 -0
- package/repos/coana-tech/goana/bin/goana-linux-amd64.gz +0 -0
- package/repos/coana-tech/goana/bin/goana-linux-arm64.gz +0 -0
- package/repos/coana-tech/javap-service/javap-service.jar +0 -0
package/cli.mjs
CHANGED
|
@@ -69910,7 +69910,7 @@ var require_lockfile = __commonJS({
|
|
|
69910
69910
|
}
|
|
69911
69911
|
const file = _ref22;
|
|
69912
69912
|
if (yield exists2(file)) {
|
|
69913
|
-
return
|
|
69913
|
+
return readFile38(file);
|
|
69914
69914
|
}
|
|
69915
69915
|
}
|
|
69916
69916
|
return null;
|
|
@@ -69929,7 +69929,7 @@ var require_lockfile = __commonJS({
|
|
|
69929
69929
|
})();
|
|
69930
69930
|
let readJsonAndFile = exports3.readJsonAndFile = (() => {
|
|
69931
69931
|
var _ref24 = (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* (loc) {
|
|
69932
|
-
const file = yield
|
|
69932
|
+
const file = yield readFile38(loc);
|
|
69933
69933
|
try {
|
|
69934
69934
|
return {
|
|
69935
69935
|
object: (0, (_map || _load_map()).default)(JSON.parse(stripBOM2(file))),
|
|
@@ -70169,7 +70169,7 @@ var require_lockfile = __commonJS({
|
|
|
70169
70169
|
};
|
|
70170
70170
|
})();
|
|
70171
70171
|
exports3.copy = copy;
|
|
70172
|
-
exports3.readFile =
|
|
70172
|
+
exports3.readFile = readFile38;
|
|
70173
70173
|
exports3.readFileRaw = readFileRaw;
|
|
70174
70174
|
exports3.normalizeOS = normalizeOS;
|
|
70175
70175
|
var _fs;
|
|
@@ -70267,7 +70267,7 @@ var require_lockfile = __commonJS({
|
|
|
70267
70267
|
});
|
|
70268
70268
|
});
|
|
70269
70269
|
}
|
|
70270
|
-
function
|
|
70270
|
+
function readFile38(loc) {
|
|
70271
70271
|
return _readFile(loc, "utf8").then(normalizeOS);
|
|
70272
70272
|
}
|
|
70273
70273
|
function readFileRaw(loc) {
|
|
@@ -199895,7 +199895,7 @@ var {
|
|
|
199895
199895
|
} = import_index.default;
|
|
199896
199896
|
|
|
199897
199897
|
// dist/index.js
|
|
199898
|
-
import { mkdir as mkdir7, mkdtemp as mkdtemp2, readFile as
|
|
199898
|
+
import { mkdir as mkdir7, mkdtemp as mkdtemp2, readFile as readFile37, rm as rm3, writeFile as writeFile17 } from "fs/promises";
|
|
199899
199899
|
import { tmpdir as tmpdir5 } from "os";
|
|
199900
199900
|
import { dirname as dirname26, join as join35, resolve as resolve44 } from "path";
|
|
199901
199901
|
|
|
@@ -205220,6 +205220,7 @@ var Spinner = class _Spinner {
|
|
|
205220
205220
|
|
|
205221
205221
|
// ../utils/src/telemetry/telemetry-collector.ts
|
|
205222
205222
|
import { execFile } from "child_process";
|
|
205223
|
+
import { readFile as readFile3 } from "fs/promises";
|
|
205223
205224
|
import { platform } from "process";
|
|
205224
205225
|
import { promisify } from "util";
|
|
205225
205226
|
var execFileAsync = promisify(execFile);
|
|
@@ -205227,14 +205228,44 @@ var TelemetryCollector = class _TelemetryCollector {
|
|
|
205227
205228
|
constructor(pid) {
|
|
205228
205229
|
this.pid = pid;
|
|
205229
205230
|
}
|
|
205231
|
+
previousCpuState;
|
|
205232
|
+
clockTicksPerSecond;
|
|
205233
|
+
pageSize;
|
|
205234
|
+
isCollecting = false;
|
|
205230
205235
|
static create(pid) {
|
|
205231
|
-
if (!Number.isInteger(pid) || pid <= 0 || !["darwin", "linux"].includes(platform)) return void 0;
|
|
205236
|
+
if (!Number.isInteger(pid) || pid <= 0 || !["darwin", "linux", "win32"].includes(platform)) return void 0;
|
|
205232
205237
|
return new _TelemetryCollector(pid);
|
|
205233
205238
|
}
|
|
205234
205239
|
/**
|
|
205235
|
-
* Collect metrics for
|
|
205240
|
+
* Collect metrics for the child process.
|
|
205241
|
+
* Uses OS-specific methods to query memory and CPU usage.
|
|
205236
205242
|
*/
|
|
205237
205243
|
async collectChildProcessMetrics() {
|
|
205244
|
+
if (this.isCollecting) {
|
|
205245
|
+
return void 0;
|
|
205246
|
+
}
|
|
205247
|
+
this.isCollecting = true;
|
|
205248
|
+
try {
|
|
205249
|
+
if (platform === "darwin") {
|
|
205250
|
+
return await this.collectDarwinProcessMetrics();
|
|
205251
|
+
}
|
|
205252
|
+
if (platform === "linux") {
|
|
205253
|
+
return await this.collectLinuxProcessMetrics();
|
|
205254
|
+
}
|
|
205255
|
+
if (platform === "win32") {
|
|
205256
|
+
return await this.collectWindowsProcessMetrics();
|
|
205257
|
+
}
|
|
205258
|
+
return void 0;
|
|
205259
|
+
} catch {
|
|
205260
|
+
return void 0;
|
|
205261
|
+
} finally {
|
|
205262
|
+
this.isCollecting = false;
|
|
205263
|
+
}
|
|
205264
|
+
}
|
|
205265
|
+
/**
|
|
205266
|
+
* Collect metrics on macOS using ps command.
|
|
205267
|
+
*/
|
|
205268
|
+
async collectDarwinProcessMetrics() {
|
|
205238
205269
|
try {
|
|
205239
205270
|
const { stdout } = await execFileAsync("ps", ["-o", "rss=,pcpu=", "-p", String(this.pid)], {
|
|
205240
205271
|
timeout: 5e3
|
|
@@ -205254,6 +205285,142 @@ var TelemetryCollector = class _TelemetryCollector {
|
|
|
205254
205285
|
return void 0;
|
|
205255
205286
|
}
|
|
205256
205287
|
}
|
|
205288
|
+
/**
|
|
205289
|
+
* Collect metrics on Linux using /proc filesystem.
|
|
205290
|
+
* This works on all Linux distributions including Alpine (BusyBox).
|
|
205291
|
+
*/
|
|
205292
|
+
async collectLinuxProcessMetrics() {
|
|
205293
|
+
try {
|
|
205294
|
+
const statmContent = await readFile3(`/proc/${this.pid}/statm`, "utf-8");
|
|
205295
|
+
const statmParts = statmContent.trim().split(/\s+/);
|
|
205296
|
+
if (statmParts.length < 2) {
|
|
205297
|
+
return void 0;
|
|
205298
|
+
}
|
|
205299
|
+
const residentPages = parseInt(statmParts[1], 10);
|
|
205300
|
+
if (isNaN(residentPages)) {
|
|
205301
|
+
return void 0;
|
|
205302
|
+
}
|
|
205303
|
+
const pageSize = await this.getPageSize();
|
|
205304
|
+
const rssBytes = residentPages * pageSize;
|
|
205305
|
+
const statContent = await readFile3(`/proc/${this.pid}/stat`, "utf-8");
|
|
205306
|
+
const cpuPercent = await this.calculateCpuPercent(statContent);
|
|
205307
|
+
return {
|
|
205308
|
+
rss: rssBytes,
|
|
205309
|
+
cpuPercent
|
|
205310
|
+
};
|
|
205311
|
+
} catch {
|
|
205312
|
+
return void 0;
|
|
205313
|
+
}
|
|
205314
|
+
}
|
|
205315
|
+
/**
|
|
205316
|
+
* Collect metrics on Windows using PowerShell.
|
|
205317
|
+
* Uses Get-Process for memory and CPU time tracking for CPU%.
|
|
205318
|
+
*/
|
|
205319
|
+
async collectWindowsProcessMetrics() {
|
|
205320
|
+
try {
|
|
205321
|
+
const psScript = "$p = Get-Process -Id $args[0] -ErrorAction Stop; @{WorkingSet64=$p.WorkingSet64; TotalMs=$p.TotalProcessorTime.TotalMilliseconds} | ConvertTo-Json";
|
|
205322
|
+
const { stdout } = await execFileAsync(
|
|
205323
|
+
"powershell",
|
|
205324
|
+
["-NoProfile", "-Command", psScript, String(this.pid)],
|
|
205325
|
+
{ timeout: 5e3 }
|
|
205326
|
+
);
|
|
205327
|
+
const trimmed = stdout.trim();
|
|
205328
|
+
if (!trimmed) {
|
|
205329
|
+
return void 0;
|
|
205330
|
+
}
|
|
205331
|
+
const data2 = JSON.parse(trimmed);
|
|
205332
|
+
const rssBytes = data2.WorkingSet64;
|
|
205333
|
+
const totalCpuMs = data2.TotalMs;
|
|
205334
|
+
if (typeof rssBytes !== "number" || typeof totalCpuMs !== "number") {
|
|
205335
|
+
return void 0;
|
|
205336
|
+
}
|
|
205337
|
+
const now = Date.now();
|
|
205338
|
+
const currentState = { totalCpuTime: totalCpuMs, timestamp: now };
|
|
205339
|
+
let cpuPercent = 0;
|
|
205340
|
+
if (this.previousCpuState) {
|
|
205341
|
+
const timeDeltaMs = now - this.previousCpuState.timestamp;
|
|
205342
|
+
const cpuTimeDelta = totalCpuMs - this.previousCpuState.totalCpuTime;
|
|
205343
|
+
if (timeDeltaMs > 0 && cpuTimeDelta >= 0) {
|
|
205344
|
+
cpuPercent = Math.max(0, cpuTimeDelta / timeDeltaMs * 100);
|
|
205345
|
+
}
|
|
205346
|
+
}
|
|
205347
|
+
this.previousCpuState = currentState;
|
|
205348
|
+
return {
|
|
205349
|
+
rss: rssBytes,
|
|
205350
|
+
cpuPercent
|
|
205351
|
+
};
|
|
205352
|
+
} catch {
|
|
205353
|
+
return void 0;
|
|
205354
|
+
}
|
|
205355
|
+
}
|
|
205356
|
+
/**
|
|
205357
|
+
* Calculate CPU percentage from /proc/<pid>/stat.
|
|
205358
|
+
* Requires tracking state between calls to compute the delta.
|
|
205359
|
+
*/
|
|
205360
|
+
async calculateCpuPercent(statContent) {
|
|
205361
|
+
try {
|
|
205362
|
+
const lastParen = statContent.lastIndexOf(")");
|
|
205363
|
+
if (lastParen === -1) {
|
|
205364
|
+
return 0;
|
|
205365
|
+
}
|
|
205366
|
+
const fieldsAfterComm = statContent.slice(lastParen + 2).split(/\s+/);
|
|
205367
|
+
const utime = parseInt(fieldsAfterComm[11], 10);
|
|
205368
|
+
const stime = parseInt(fieldsAfterComm[12], 10);
|
|
205369
|
+
if (isNaN(utime) || isNaN(stime)) {
|
|
205370
|
+
return 0;
|
|
205371
|
+
}
|
|
205372
|
+
const totalCpuTime = utime + stime;
|
|
205373
|
+
const now = Date.now();
|
|
205374
|
+
const clockTicks = await this.getClockTicksPerSecond();
|
|
205375
|
+
const currentState = { totalCpuTime, timestamp: now };
|
|
205376
|
+
if (!this.previousCpuState) {
|
|
205377
|
+
this.previousCpuState = currentState;
|
|
205378
|
+
return 0;
|
|
205379
|
+
}
|
|
205380
|
+
const timeDeltaMs = now - this.previousCpuState.timestamp;
|
|
205381
|
+
const cpuTimeDelta = totalCpuTime - this.previousCpuState.totalCpuTime;
|
|
205382
|
+
this.previousCpuState = currentState;
|
|
205383
|
+
if (timeDeltaMs <= 0 || cpuTimeDelta < 0) {
|
|
205384
|
+
return 0;
|
|
205385
|
+
}
|
|
205386
|
+
const cpuPercent = cpuTimeDelta * 1e3 * 100 / (clockTicks * timeDeltaMs);
|
|
205387
|
+
return Math.max(0, cpuPercent);
|
|
205388
|
+
} catch {
|
|
205389
|
+
return 0;
|
|
205390
|
+
}
|
|
205391
|
+
}
|
|
205392
|
+
/**
|
|
205393
|
+
* Get the system page size in bytes (cached after first call).
|
|
205394
|
+
*/
|
|
205395
|
+
async getPageSize() {
|
|
205396
|
+
if (this.pageSize !== void 0) {
|
|
205397
|
+
return this.pageSize;
|
|
205398
|
+
}
|
|
205399
|
+
try {
|
|
205400
|
+
const { stdout } = await execFileAsync("getconf", ["PAGE_SIZE"], { timeout: 1e3 });
|
|
205401
|
+
const parsed = parseInt(stdout.trim(), 10);
|
|
205402
|
+
this.pageSize = isNaN(parsed) ? 4096 : parsed;
|
|
205403
|
+
} catch {
|
|
205404
|
+
this.pageSize = 4096;
|
|
205405
|
+
}
|
|
205406
|
+
return this.pageSize;
|
|
205407
|
+
}
|
|
205408
|
+
/**
|
|
205409
|
+
* Get the number of clock ticks per second (used for CPU time conversion).
|
|
205410
|
+
*/
|
|
205411
|
+
async getClockTicksPerSecond() {
|
|
205412
|
+
if (this.clockTicksPerSecond !== void 0) {
|
|
205413
|
+
return this.clockTicksPerSecond;
|
|
205414
|
+
}
|
|
205415
|
+
try {
|
|
205416
|
+
const { stdout } = await execFileAsync("getconf", ["CLK_TCK"], { timeout: 1e3 });
|
|
205417
|
+
const ticks = parseInt(stdout.trim(), 10);
|
|
205418
|
+
this.clockTicksPerSecond = isNaN(ticks) ? 100 : ticks;
|
|
205419
|
+
} catch {
|
|
205420
|
+
this.clockTicksPerSecond = 100;
|
|
205421
|
+
}
|
|
205422
|
+
return this.clockTicksPerSecond;
|
|
205423
|
+
}
|
|
205257
205424
|
};
|
|
205258
205425
|
|
|
205259
205426
|
// ../utils/src/telemetry/analyzer-telemetry-server.ts
|
|
@@ -212567,7 +212734,7 @@ function splitLines(text3) {
|
|
|
212567
212734
|
|
|
212568
212735
|
// ../fixing-management/src/fixing-management/utils/socket-patch-utils.ts
|
|
212569
212736
|
import { existsSync as existsSync3 } from "node:fs";
|
|
212570
|
-
import { readFile as
|
|
212737
|
+
import { readFile as readFile4, writeFile as writeFile3 } from "node:fs/promises";
|
|
212571
212738
|
import { resolve as resolve4 } from "node:path";
|
|
212572
212739
|
|
|
212573
212740
|
// ../utils/src/version-comparison/version-satisfies.ts
|
|
@@ -214090,7 +214257,7 @@ async function applyPatches(ecosystem, rootDir, patches, ctxt, patchAppliedMessa
|
|
|
214090
214257
|
if (!existsSync3(fullPath)) {
|
|
214091
214258
|
await writeFile3(fullPath, "", "utf-8");
|
|
214092
214259
|
}
|
|
214093
|
-
let fileContent = await
|
|
214260
|
+
let fileContent = await readFile4(fullPath, "utf-8");
|
|
214094
214261
|
const groupedPatches = groupPatches(rootDir, patches2);
|
|
214095
214262
|
const resolvedPatches = resolveConflicts(ecosystem, groupedPatches, ctxt);
|
|
214096
214263
|
const sortedPatches = resolvedPatches.sort(
|
|
@@ -214198,7 +214365,7 @@ function resolveConflicts(ecosystem, patches, ctxt) {
|
|
|
214198
214365
|
|
|
214199
214366
|
// ../utils/src/go-mod-utils.ts
|
|
214200
214367
|
import { dirname as dirname6, resolve as resolve5 } from "node:path";
|
|
214201
|
-
import { readFile as
|
|
214368
|
+
import { readFile as readFile5 } from "node:fs/promises";
|
|
214202
214369
|
|
|
214203
214370
|
// ../utils/src/go-mod-parser.ts
|
|
214204
214371
|
function recordError(state, message2, col) {
|
|
@@ -214894,7 +215061,7 @@ function parseGoMod(content, options = {}) {
|
|
|
214894
215061
|
// ../utils/src/go-mod-utils.ts
|
|
214895
215062
|
async function parseGoModFile(rootDir, goModPath) {
|
|
214896
215063
|
const fullPath = resolve5(rootDir, goModPath);
|
|
214897
|
-
const content = await
|
|
215064
|
+
const content = await readFile5(fullPath, "utf-8");
|
|
214898
215065
|
const result = parseGoMod(content, { includeOffsets: true });
|
|
214899
215066
|
return {
|
|
214900
215067
|
...result,
|
|
@@ -215247,12 +215414,12 @@ replace ${modulePath} ${currentVersion} => ${modulePath} ${newVersion}
|
|
|
215247
215414
|
|
|
215248
215415
|
// ../fixing-management/src/fixing-management/maven/gradle-fixing-manager.ts
|
|
215249
215416
|
import { existsSync as existsSync6 } from "node:fs";
|
|
215250
|
-
import { readFile as
|
|
215417
|
+
import { readFile as readFile9 } from "node:fs/promises";
|
|
215251
215418
|
import { join as join7, resolve as resolve9 } from "node:path";
|
|
215252
215419
|
|
|
215253
215420
|
// ../fixing-management/src/fixing-management/utils/coana-patch-application.ts
|
|
215254
215421
|
import { existsSync as existsSync4 } from "node:fs";
|
|
215255
|
-
import { readFile as
|
|
215422
|
+
import { readFile as readFile6, writeFile as writeFile4 } from "node:fs/promises";
|
|
215256
215423
|
import { resolve as resolve7 } from "node:path";
|
|
215257
215424
|
function detectPatchConflicts(rootDir, patchResults) {
|
|
215258
215425
|
const patchesByFile = /* @__PURE__ */ new Map();
|
|
@@ -215385,7 +215552,7 @@ async function applyPatchResults(ecosystem, rootDir, patchResults) {
|
|
|
215385
215552
|
if (!existsSync4(filePath)) {
|
|
215386
215553
|
await writeFile4(filePath, "", "utf-8");
|
|
215387
215554
|
}
|
|
215388
|
-
let fileContent = await
|
|
215555
|
+
let fileContent = await readFile6(filePath, "utf-8");
|
|
215389
215556
|
for (const patch of sortedPatches) {
|
|
215390
215557
|
const start = patch.offset;
|
|
215391
215558
|
const end2 = patch.offset + (patch.oldText?.length ?? 0);
|
|
@@ -215397,7 +215564,7 @@ async function applyPatchResults(ecosystem, rootDir, patchResults) {
|
|
|
215397
215564
|
|
|
215398
215565
|
// ../fixing-management/src/fixing-management/maven/gradle-build-file-helper.ts
|
|
215399
215566
|
var import_good_enough_parser = __toESM(require_cjs(), 1);
|
|
215400
|
-
import { readFile as
|
|
215567
|
+
import { readFile as readFile7 } from "node:fs/promises";
|
|
215401
215568
|
|
|
215402
215569
|
// ../fixing-management/src/fixing-management/maven/utils.ts
|
|
215403
215570
|
import { existsSync as existsSync5 } from "node:fs";
|
|
@@ -215619,7 +215786,7 @@ var treeQuery = import_good_enough_parser.query.tree({
|
|
|
215619
215786
|
});
|
|
215620
215787
|
async function findDependencyDeclsAndCatalogFiles(filePath) {
|
|
215621
215788
|
const gradleLang = import_good_enough_parser.lang.createLang("groovy");
|
|
215622
|
-
const cursor = gradleLang.parse(await
|
|
215789
|
+
const cursor = gradleLang.parse(await readFile7(filePath, "utf-8"));
|
|
215623
215790
|
const ctx = gradleLang.query(cursor, treeQuery, {
|
|
215624
215791
|
mem: {},
|
|
215625
215792
|
depDecls: [],
|
|
@@ -215655,7 +215822,7 @@ ${getConstraintsBlockString(groupId, artifactId, classifier, version4, indentati
|
|
|
215655
215822
|
}
|
|
215656
215823
|
|
|
215657
215824
|
// ../fixing-management/src/fixing-management/maven/gradle-version-catalog-helper.ts
|
|
215658
|
-
import { readFile as
|
|
215825
|
+
import { readFile as readFile8 } from "node:fs/promises";
|
|
215659
215826
|
|
|
215660
215827
|
// ../utils/src/toml-utils.ts
|
|
215661
215828
|
var tomlParser = __toESM(require_lib10(), 1);
|
|
@@ -215922,7 +216089,7 @@ function parseDependencyObject(valueNode) {
|
|
|
215922
216089
|
};
|
|
215923
216090
|
}
|
|
215924
216091
|
async function findVersionCatalogDeclarations(filePath) {
|
|
215925
|
-
const catalogData = parseVersionCatalog(await
|
|
216092
|
+
const catalogData = parseVersionCatalog(await readFile8(filePath, "utf-8"));
|
|
215926
216093
|
return {
|
|
215927
216094
|
depDecls: catalogData.dependencies,
|
|
215928
216095
|
versionDecls: catalogData.versions
|
|
@@ -216127,7 +216294,7 @@ var GradleFixingManager = class {
|
|
|
216127
216294
|
newText: constraintStr + "\n"
|
|
216128
216295
|
};
|
|
216129
216296
|
} else {
|
|
216130
|
-
const fileContent = await
|
|
216297
|
+
const fileContent = await readFile9(targetBuildFile, "utf-8");
|
|
216131
216298
|
const indentationSize = getIndentationSize(fileContent);
|
|
216132
216299
|
const prependNewline = fileContent.split("\n").some((line) => !line.trim());
|
|
216133
216300
|
const finalConstraintStr = getDependencyConstraintString(
|
|
@@ -216314,7 +216481,7 @@ var GradleFixingManager = class {
|
|
|
216314
216481
|
async createConstraintsForFile(buildFile, fixes) {
|
|
216315
216482
|
const { dependenciesBlocks, constraintsBlocks } = await findDependencyDeclsAndCatalogFiles(buildFile);
|
|
216316
216483
|
const fileType = buildFile.endsWith(".kts") ? "kotlin" : "groovy";
|
|
216317
|
-
const fileContent = existsSync6(buildFile) ? await
|
|
216484
|
+
const fileContent = existsSync6(buildFile) ? await readFile9(buildFile, "utf-8") : "";
|
|
216318
216485
|
const indentationSize = getIndentationSize(fileContent);
|
|
216319
216486
|
const constraintDeclarations = fixes.map(({ dependencyDetails, fixedVersion }) => {
|
|
216320
216487
|
const [groupId, artifactId] = dependencyDetails.packageName.split(":");
|
|
@@ -216421,7 +216588,7 @@ import { resolve as resolve11 } from "node:path";
|
|
|
216421
216588
|
|
|
216422
216589
|
// ../utils/src/pom-utils.ts
|
|
216423
216590
|
var import_parse_xml2 = __toESM(require_dist(), 1);
|
|
216424
|
-
import { readFile as
|
|
216591
|
+
import { readFile as readFile10 } from "node:fs/promises";
|
|
216425
216592
|
import { existsSync as existsSync7 } from "node:fs";
|
|
216426
216593
|
import { resolve as resolve10, join as join8, relative as relative3, dirname as dirname8 } from "node:path";
|
|
216427
216594
|
|
|
@@ -216557,7 +216724,7 @@ async function loadPom(rootDir, pomFile, validateFile, visited = /* @__PURE__ */
|
|
|
216557
216724
|
if (!validatedPomFile || !existsSync7(validatedPomFile)) return void 0;
|
|
216558
216725
|
if (visited.has(validatedPomFile)) return void 0;
|
|
216559
216726
|
visited.add(validatedPomFile);
|
|
216560
|
-
const sourceText = await
|
|
216727
|
+
const sourceText = await readFile10(validatedPomFile, "utf-8");
|
|
216561
216728
|
const xml2 = (0, import_parse_xml2.parseXml)(sourceText, { includeOffsets: true });
|
|
216562
216729
|
const indentation = inferIndentationFromParsedXml(xml2, sourceText);
|
|
216563
216730
|
const pom = {
|
|
@@ -217845,11 +218012,11 @@ import { dirname as dirname10, resolve as resolve14 } from "node:path";
|
|
|
217845
218012
|
import assert7 from "node:assert";
|
|
217846
218013
|
|
|
217847
218014
|
// ../fixing-management/src/fixing-management/maven/gradle-lockfile-utils.ts
|
|
217848
|
-
import { readFile as
|
|
218015
|
+
import { readFile as readFile11 } from "node:fs/promises";
|
|
217849
218016
|
import { resolve as resolve13 } from "node:path";
|
|
217850
218017
|
async function loadLockFile(rootDir, lockfilePath) {
|
|
217851
218018
|
const file = resolve13(rootDir, lockfilePath);
|
|
217852
|
-
return { rootDir, file, sourceText: await
|
|
218019
|
+
return { rootDir, file, sourceText: await readFile11(file, "utf-8") };
|
|
217853
218020
|
}
|
|
217854
218021
|
|
|
217855
218022
|
// ../fixing-management/src/fixing-management/maven/handlers/gradle-lockfile-upgrade-handler.ts
|
|
@@ -217920,13 +218087,13 @@ var GradleLockfileUpgradeHandler = class {
|
|
|
217920
218087
|
|
|
217921
218088
|
// ../fixing-management/src/fixing-management/maven/handlers/sbt-upgrade-handler.ts
|
|
217922
218089
|
import { existsSync as existsSync8 } from "node:fs";
|
|
217923
|
-
import { readFile as
|
|
218090
|
+
import { readFile as readFile13 } from "node:fs/promises";
|
|
217924
218091
|
import { basename as basename4, dirname as dirname11, resolve as resolve15 } from "node:path";
|
|
217925
218092
|
import assert8 from "node:assert";
|
|
217926
218093
|
|
|
217927
218094
|
// ../fixing-management/src/fixing-management/maven/sbt-project-utils.ts
|
|
217928
218095
|
var import_good_enough_parser2 = __toESM(require_cjs(), 1);
|
|
217929
|
-
import { readFile as
|
|
218096
|
+
import { readFile as readFile12 } from "node:fs/promises";
|
|
217930
218097
|
var pathQuery2 = import_good_enough_parser2.query.sym((ctx, { offset, value: value2 }) => {
|
|
217931
218098
|
return { ...ctx, pathOffset: offset, pathValue: value2 };
|
|
217932
218099
|
}).many(
|
|
@@ -218103,7 +218270,7 @@ var treeQuery2 = import_good_enough_parser2.query.tree({
|
|
|
218103
218270
|
});
|
|
218104
218271
|
async function loadSbtProject(filePath, acc = { mem: {}, moduleIds: [] }) {
|
|
218105
218272
|
const scalaLang = import_good_enough_parser2.lang.createLang("scala");
|
|
218106
|
-
const cursor = scalaLang.parse(await
|
|
218273
|
+
const cursor = scalaLang.parse(await readFile12(filePath, "utf-8"));
|
|
218107
218274
|
return scalaLang.query(cursor, treeQuery2, acc) ?? acc;
|
|
218108
218275
|
}
|
|
218109
218276
|
function evaluate2(v) {
|
|
@@ -218246,7 +218413,7 @@ ${indent(1, indentationSize)}`)}
|
|
|
218246
218413
|
} else {
|
|
218247
218414
|
let fileContent;
|
|
218248
218415
|
try {
|
|
218249
|
-
fileContent = await
|
|
218416
|
+
fileContent = await readFile13(dependencyOverridesFile, "utf-8");
|
|
218250
218417
|
} catch (error) {
|
|
218251
218418
|
ctxt.statusUpdater?.({
|
|
218252
218419
|
status: "error",
|
|
@@ -218375,7 +218542,7 @@ var MavenSocketUpgradeManager = class {
|
|
|
218375
218542
|
|
|
218376
218543
|
// ../fixing-management/src/fixing-management/maven/sbt-fixing-manager.ts
|
|
218377
218544
|
import { existsSync as existsSync9 } from "node:fs";
|
|
218378
|
-
import { readFile as
|
|
218545
|
+
import { readFile as readFile14 } from "node:fs/promises";
|
|
218379
218546
|
import { join as join9 } from "node:path";
|
|
218380
218547
|
var SbtFixingManager = class {
|
|
218381
218548
|
constructor(rootDir, subprojectPath, otherModulesCommunicator) {
|
|
@@ -218577,7 +218744,7 @@ var SbtFixingManager = class {
|
|
|
218577
218744
|
`
|
|
218578
218745
|
};
|
|
218579
218746
|
} else {
|
|
218580
|
-
const fileContent = await
|
|
218747
|
+
const fileContent = await readFile14(workspaceBuildSbtPath, "utf-8");
|
|
218581
218748
|
const prependNewline = fileContent.split("\n").some((line) => !line.trim());
|
|
218582
218749
|
return {
|
|
218583
218750
|
file: workspaceBuildSbtPath,
|
|
@@ -218656,7 +218823,7 @@ ${indent(1, indentationSize)}`)}
|
|
|
218656
218823
|
newText: overrideText
|
|
218657
218824
|
};
|
|
218658
218825
|
} else {
|
|
218659
|
-
const fileContent = await
|
|
218826
|
+
const fileContent = await readFile14(workspaceBuildSbtPath, "utf-8");
|
|
218660
218827
|
const indentationSize = getIndentationSize(fileContent);
|
|
218661
218828
|
const prependNewline = fileContent.length > 0 && !fileContent.endsWith("\n\n");
|
|
218662
218829
|
const overrideText = `dependencyOverrides ++= Seq(
|
|
@@ -218675,7 +218842,7 @@ ${indent(1, indentationSize)}`)}
|
|
|
218675
218842
|
|
|
218676
218843
|
// ../fixing-management/src/fixing-management/npm/npm-socket-upgrade-manager.ts
|
|
218677
218844
|
import { existsSync as existsSync15 } from "fs";
|
|
218678
|
-
import { readFile as
|
|
218845
|
+
import { readFile as readFile20 } from "fs/promises";
|
|
218679
218846
|
import assert10 from "node:assert";
|
|
218680
218847
|
import { dirname as dirname14, join as join14, relative as relative9, resolve as resolve24 } from "path";
|
|
218681
218848
|
|
|
@@ -224317,23 +224484,54 @@ var Spinner2 = class _Spinner {
|
|
|
224317
224484
|
|
|
224318
224485
|
// ../utils/dist/telemetry/telemetry-collector.js
|
|
224319
224486
|
import { execFile as execFile3 } from "child_process";
|
|
224487
|
+
import { readFile as readFile15 } from "fs/promises";
|
|
224320
224488
|
import { platform as platform7 } from "process";
|
|
224321
224489
|
import { promisify as promisify2 } from "util";
|
|
224322
224490
|
var execFileAsync2 = promisify2(execFile3);
|
|
224323
224491
|
var TelemetryCollector2 = class _TelemetryCollector {
|
|
224324
224492
|
pid;
|
|
224493
|
+
previousCpuState;
|
|
224494
|
+
clockTicksPerSecond;
|
|
224495
|
+
pageSize;
|
|
224496
|
+
isCollecting = false;
|
|
224325
224497
|
constructor(pid) {
|
|
224326
224498
|
this.pid = pid;
|
|
224327
224499
|
}
|
|
224328
224500
|
static create(pid) {
|
|
224329
|
-
if (!Number.isInteger(pid) || pid <= 0 || !["darwin", "linux"].includes(platform7))
|
|
224501
|
+
if (!Number.isInteger(pid) || pid <= 0 || !["darwin", "linux", "win32"].includes(platform7))
|
|
224330
224502
|
return void 0;
|
|
224331
224503
|
return new _TelemetryCollector(pid);
|
|
224332
224504
|
}
|
|
224333
224505
|
/**
|
|
224334
|
-
* Collect metrics for
|
|
224506
|
+
* Collect metrics for the child process.
|
|
224507
|
+
* Uses OS-specific methods to query memory and CPU usage.
|
|
224335
224508
|
*/
|
|
224336
224509
|
async collectChildProcessMetrics() {
|
|
224510
|
+
if (this.isCollecting) {
|
|
224511
|
+
return void 0;
|
|
224512
|
+
}
|
|
224513
|
+
this.isCollecting = true;
|
|
224514
|
+
try {
|
|
224515
|
+
if (platform7 === "darwin") {
|
|
224516
|
+
return await this.collectDarwinProcessMetrics();
|
|
224517
|
+
}
|
|
224518
|
+
if (platform7 === "linux") {
|
|
224519
|
+
return await this.collectLinuxProcessMetrics();
|
|
224520
|
+
}
|
|
224521
|
+
if (platform7 === "win32") {
|
|
224522
|
+
return await this.collectWindowsProcessMetrics();
|
|
224523
|
+
}
|
|
224524
|
+
return void 0;
|
|
224525
|
+
} catch {
|
|
224526
|
+
return void 0;
|
|
224527
|
+
} finally {
|
|
224528
|
+
this.isCollecting = false;
|
|
224529
|
+
}
|
|
224530
|
+
}
|
|
224531
|
+
/**
|
|
224532
|
+
* Collect metrics on macOS using ps command.
|
|
224533
|
+
*/
|
|
224534
|
+
async collectDarwinProcessMetrics() {
|
|
224337
224535
|
try {
|
|
224338
224536
|
const { stdout } = await execFileAsync2("ps", ["-o", "rss=,pcpu=", "-p", String(this.pid)], {
|
|
224339
224537
|
timeout: 5e3
|
|
@@ -224355,6 +224553,138 @@ var TelemetryCollector2 = class _TelemetryCollector {
|
|
|
224355
224553
|
return void 0;
|
|
224356
224554
|
}
|
|
224357
224555
|
}
|
|
224556
|
+
/**
|
|
224557
|
+
* Collect metrics on Linux using /proc filesystem.
|
|
224558
|
+
* This works on all Linux distributions including Alpine (BusyBox).
|
|
224559
|
+
*/
|
|
224560
|
+
async collectLinuxProcessMetrics() {
|
|
224561
|
+
try {
|
|
224562
|
+
const statmContent = await readFile15(`/proc/${this.pid}/statm`, "utf-8");
|
|
224563
|
+
const statmParts = statmContent.trim().split(/\s+/);
|
|
224564
|
+
if (statmParts.length < 2) {
|
|
224565
|
+
return void 0;
|
|
224566
|
+
}
|
|
224567
|
+
const residentPages = parseInt(statmParts[1], 10);
|
|
224568
|
+
if (isNaN(residentPages)) {
|
|
224569
|
+
return void 0;
|
|
224570
|
+
}
|
|
224571
|
+
const pageSize = await this.getPageSize();
|
|
224572
|
+
const rssBytes = residentPages * pageSize;
|
|
224573
|
+
const statContent = await readFile15(`/proc/${this.pid}/stat`, "utf-8");
|
|
224574
|
+
const cpuPercent = await this.calculateCpuPercent(statContent);
|
|
224575
|
+
return {
|
|
224576
|
+
rss: rssBytes,
|
|
224577
|
+
cpuPercent
|
|
224578
|
+
};
|
|
224579
|
+
} catch {
|
|
224580
|
+
return void 0;
|
|
224581
|
+
}
|
|
224582
|
+
}
|
|
224583
|
+
/**
|
|
224584
|
+
* Collect metrics on Windows using PowerShell.
|
|
224585
|
+
* Uses Get-Process for memory and CPU time tracking for CPU%.
|
|
224586
|
+
*/
|
|
224587
|
+
async collectWindowsProcessMetrics() {
|
|
224588
|
+
try {
|
|
224589
|
+
const psScript = "$p = Get-Process -Id $args[0] -ErrorAction Stop; @{WorkingSet64=$p.WorkingSet64; TotalMs=$p.TotalProcessorTime.TotalMilliseconds} | ConvertTo-Json";
|
|
224590
|
+
const { stdout } = await execFileAsync2("powershell", ["-NoProfile", "-Command", psScript, String(this.pid)], { timeout: 5e3 });
|
|
224591
|
+
const trimmed = stdout.trim();
|
|
224592
|
+
if (!trimmed) {
|
|
224593
|
+
return void 0;
|
|
224594
|
+
}
|
|
224595
|
+
const data2 = JSON.parse(trimmed);
|
|
224596
|
+
const rssBytes = data2.WorkingSet64;
|
|
224597
|
+
const totalCpuMs = data2.TotalMs;
|
|
224598
|
+
if (typeof rssBytes !== "number" || typeof totalCpuMs !== "number") {
|
|
224599
|
+
return void 0;
|
|
224600
|
+
}
|
|
224601
|
+
const now = Date.now();
|
|
224602
|
+
const currentState = { totalCpuTime: totalCpuMs, timestamp: now };
|
|
224603
|
+
let cpuPercent = 0;
|
|
224604
|
+
if (this.previousCpuState) {
|
|
224605
|
+
const timeDeltaMs = now - this.previousCpuState.timestamp;
|
|
224606
|
+
const cpuTimeDelta = totalCpuMs - this.previousCpuState.totalCpuTime;
|
|
224607
|
+
if (timeDeltaMs > 0 && cpuTimeDelta >= 0) {
|
|
224608
|
+
cpuPercent = Math.max(0, cpuTimeDelta / timeDeltaMs * 100);
|
|
224609
|
+
}
|
|
224610
|
+
}
|
|
224611
|
+
this.previousCpuState = currentState;
|
|
224612
|
+
return {
|
|
224613
|
+
rss: rssBytes,
|
|
224614
|
+
cpuPercent
|
|
224615
|
+
};
|
|
224616
|
+
} catch {
|
|
224617
|
+
return void 0;
|
|
224618
|
+
}
|
|
224619
|
+
}
|
|
224620
|
+
/**
|
|
224621
|
+
* Calculate CPU percentage from /proc/<pid>/stat.
|
|
224622
|
+
* Requires tracking state between calls to compute the delta.
|
|
224623
|
+
*/
|
|
224624
|
+
async calculateCpuPercent(statContent) {
|
|
224625
|
+
try {
|
|
224626
|
+
const lastParen = statContent.lastIndexOf(")");
|
|
224627
|
+
if (lastParen === -1) {
|
|
224628
|
+
return 0;
|
|
224629
|
+
}
|
|
224630
|
+
const fieldsAfterComm = statContent.slice(lastParen + 2).split(/\s+/);
|
|
224631
|
+
const utime = parseInt(fieldsAfterComm[11], 10);
|
|
224632
|
+
const stime = parseInt(fieldsAfterComm[12], 10);
|
|
224633
|
+
if (isNaN(utime) || isNaN(stime)) {
|
|
224634
|
+
return 0;
|
|
224635
|
+
}
|
|
224636
|
+
const totalCpuTime = utime + stime;
|
|
224637
|
+
const now = Date.now();
|
|
224638
|
+
const clockTicks = await this.getClockTicksPerSecond();
|
|
224639
|
+
const currentState = { totalCpuTime, timestamp: now };
|
|
224640
|
+
if (!this.previousCpuState) {
|
|
224641
|
+
this.previousCpuState = currentState;
|
|
224642
|
+
return 0;
|
|
224643
|
+
}
|
|
224644
|
+
const timeDeltaMs = now - this.previousCpuState.timestamp;
|
|
224645
|
+
const cpuTimeDelta = totalCpuTime - this.previousCpuState.totalCpuTime;
|
|
224646
|
+
this.previousCpuState = currentState;
|
|
224647
|
+
if (timeDeltaMs <= 0 || cpuTimeDelta < 0) {
|
|
224648
|
+
return 0;
|
|
224649
|
+
}
|
|
224650
|
+
const cpuPercent = cpuTimeDelta * 1e3 * 100 / (clockTicks * timeDeltaMs);
|
|
224651
|
+
return Math.max(0, cpuPercent);
|
|
224652
|
+
} catch {
|
|
224653
|
+
return 0;
|
|
224654
|
+
}
|
|
224655
|
+
}
|
|
224656
|
+
/**
|
|
224657
|
+
* Get the system page size in bytes (cached after first call).
|
|
224658
|
+
*/
|
|
224659
|
+
async getPageSize() {
|
|
224660
|
+
if (this.pageSize !== void 0) {
|
|
224661
|
+
return this.pageSize;
|
|
224662
|
+
}
|
|
224663
|
+
try {
|
|
224664
|
+
const { stdout } = await execFileAsync2("getconf", ["PAGE_SIZE"], { timeout: 1e3 });
|
|
224665
|
+
const parsed = parseInt(stdout.trim(), 10);
|
|
224666
|
+
this.pageSize = isNaN(parsed) ? 4096 : parsed;
|
|
224667
|
+
} catch {
|
|
224668
|
+
this.pageSize = 4096;
|
|
224669
|
+
}
|
|
224670
|
+
return this.pageSize;
|
|
224671
|
+
}
|
|
224672
|
+
/**
|
|
224673
|
+
* Get the number of clock ticks per second (used for CPU time conversion).
|
|
224674
|
+
*/
|
|
224675
|
+
async getClockTicksPerSecond() {
|
|
224676
|
+
if (this.clockTicksPerSecond !== void 0) {
|
|
224677
|
+
return this.clockTicksPerSecond;
|
|
224678
|
+
}
|
|
224679
|
+
try {
|
|
224680
|
+
const { stdout } = await execFileAsync2("getconf", ["CLK_TCK"], { timeout: 1e3 });
|
|
224681
|
+
const ticks = parseInt(stdout.trim(), 10);
|
|
224682
|
+
this.clockTicksPerSecond = isNaN(ticks) ? 100 : ticks;
|
|
224683
|
+
} catch {
|
|
224684
|
+
this.clockTicksPerSecond = 100;
|
|
224685
|
+
}
|
|
224686
|
+
return this.clockTicksPerSecond;
|
|
224687
|
+
}
|
|
224358
224688
|
};
|
|
224359
224689
|
|
|
224360
224690
|
// ../utils/dist/telemetry/analyzer-telemetry-server.js
|
|
@@ -224752,12 +225082,12 @@ async function getWorkspacePathsFromPnpmLockFile(lockFileDir, useDotWhenNoWorksp
|
|
|
224752
225082
|
// ../fixing-management/src/fixing-management/npm/yarn-utils.ts
|
|
224753
225083
|
var lockfile = __toESM(require_lockfile(), 1);
|
|
224754
225084
|
var import_parsers = __toESM(require_lib26(), 1);
|
|
224755
|
-
import { readFile as
|
|
225085
|
+
import { readFile as readFile16 } from "fs/promises";
|
|
224756
225086
|
import { resolve as resolve19 } from "path";
|
|
224757
225087
|
async function getYarnType(projectDir) {
|
|
224758
225088
|
const yarnLockLocation = resolve19(projectDir, "yarn.lock");
|
|
224759
225089
|
try {
|
|
224760
|
-
const content = await
|
|
225090
|
+
const content = await readFile16(yarnLockLocation, "utf8");
|
|
224761
225091
|
if (!content || content.length === 0) {
|
|
224762
225092
|
return void 0;
|
|
224763
225093
|
}
|
|
@@ -224778,7 +225108,7 @@ async function getYarnType(projectDir) {
|
|
|
224778
225108
|
}
|
|
224779
225109
|
|
|
224780
225110
|
// ../fixing-management/src/fixing-management/npm/npm-fixing-manager.ts
|
|
224781
|
-
import { readFile as
|
|
225111
|
+
import { readFile as readFile17, writeFile as writeFile6 } from "fs/promises";
|
|
224782
225112
|
import { relative as relative5, resolve as resolve20 } from "path";
|
|
224783
225113
|
|
|
224784
225114
|
// ../fixing-management/src/fixing-management/npm/npm-ecosystem-fixing-manager.ts
|
|
@@ -224840,7 +225170,7 @@ var NpmFixingManager = class extends NpmEcosystemFixingManager {
|
|
|
224840
225170
|
}
|
|
224841
225171
|
async applySecurityFixesSpecificPackageManager(fixes) {
|
|
224842
225172
|
const pkgLockLocation = resolve20(this.rootDir, this.subprojectPath, "package-lock.json");
|
|
224843
|
-
const packageLockContent = await
|
|
225173
|
+
const packageLockContent = await readFile17(pkgLockLocation, "utf-8");
|
|
224844
225174
|
const getPackageName = (pkgPath) => {
|
|
224845
225175
|
const strings = pkgPath.split("node_modules/");
|
|
224846
225176
|
return strings[strings.length - 1];
|
|
@@ -224878,7 +225208,7 @@ var NpmFixingManager = class extends NpmEcosystemFixingManager {
|
|
|
224878
225208
|
};
|
|
224879
225209
|
|
|
224880
225210
|
// ../fixing-management/src/fixing-management/npm/pnpm-fixing-manager.ts
|
|
224881
|
-
import { readFile as
|
|
225211
|
+
import { readFile as readFile18, writeFile as writeFile7 } from "fs/promises";
|
|
224882
225212
|
import { relative as relative6, resolve as resolve21 } from "path";
|
|
224883
225213
|
var import_yaml = __toESM(require_dist10(), 1);
|
|
224884
225214
|
var import_lockfile_file2 = __toESM(require_lib25(), 1);
|
|
@@ -225030,7 +225360,7 @@ function getVersionNumber(version4) {
|
|
|
225030
225360
|
return match2 ? `${match2[1]}` : version4;
|
|
225031
225361
|
}
|
|
225032
225362
|
async function readYamlFile(workspaceYamlFile) {
|
|
225033
|
-
const workspaceYamlString = await
|
|
225363
|
+
const workspaceYamlString = await readFile18(workspaceYamlFile, "utf8");
|
|
225034
225364
|
const parser2 = new import_yaml.Parser();
|
|
225035
225365
|
const [ast] = parser2.parse(workspaceYamlString);
|
|
225036
225366
|
return ast;
|
|
@@ -225067,7 +225397,7 @@ function updateCatalog(update3, map2) {
|
|
|
225067
225397
|
}
|
|
225068
225398
|
|
|
225069
225399
|
// ../fixing-management/src/fixing-management/npm/yarn-fixing-manager.ts
|
|
225070
|
-
import { readFile as
|
|
225400
|
+
import { readFile as readFile19, writeFile as writeFile8 } from "fs/promises";
|
|
225071
225401
|
import { relative as relative8, resolve as resolve23 } from "path";
|
|
225072
225402
|
|
|
225073
225403
|
// ../utils/src/package-utils.ts
|
|
@@ -225175,7 +225505,7 @@ var YarnFixingManager = class extends NpmEcosystemFixingManager {
|
|
|
225175
225505
|
logger.debug("Installation completed.");
|
|
225176
225506
|
}
|
|
225177
225507
|
async getYarnLockObj(filePath) {
|
|
225178
|
-
const fileString = await
|
|
225508
|
+
const fileString = await readFile19(filePath, "utf8");
|
|
225179
225509
|
const yarnType = await this.getYarnType();
|
|
225180
225510
|
return yarnType === "classic" ? (0, import_yarnlock_parse_raw.parseYarnLockRawV1)(fileString) : (0, import_yarnlock_parse_raw.parseYarnLockRawV2)(fileString);
|
|
225181
225511
|
}
|
|
@@ -225493,7 +225823,7 @@ var NpmSocketUpgradeManager = class {
|
|
|
225493
225823
|
const patches = [];
|
|
225494
225824
|
let packageJsonContent;
|
|
225495
225825
|
try {
|
|
225496
|
-
packageJsonContent = await
|
|
225826
|
+
packageJsonContent = await readFile20(resolve24(this.rootDir, mf.file), "utf-8");
|
|
225497
225827
|
} catch (error) {
|
|
225498
225828
|
ctxt.statusUpdater?.({
|
|
225499
225829
|
status: "error",
|
|
@@ -225556,7 +225886,7 @@ var RushFixingManager = class {
|
|
|
225556
225886
|
};
|
|
225557
225887
|
|
|
225558
225888
|
// ../fixing-management/src/fixing-management/nuget/nuget-fixing-manager.ts
|
|
225559
|
-
import { readFile as
|
|
225889
|
+
import { readFile as readFile21, writeFile as writeFile9 } from "fs/promises";
|
|
225560
225890
|
import { join as join15 } from "path";
|
|
225561
225891
|
|
|
225562
225892
|
// ../utils/src/nuget-utils.ts
|
|
@@ -225659,14 +225989,14 @@ var NugetFixingManager = class {
|
|
|
225659
225989
|
if (projectFiles.length !== 1)
|
|
225660
225990
|
throw new Error("Applying fixes to workspaces with more than 1 project file currently not supported");
|
|
225661
225991
|
const projectFilePath = join15(this.getAbsWsPath(wsPath), projectFiles[0]);
|
|
225662
|
-
const initialProjectFile = await
|
|
225992
|
+
const initialProjectFile = await readFile21(projectFilePath, "utf-8");
|
|
225663
225993
|
const initialLockFile = await this.restoreWorkspaceAndParseLockFile(wsPath);
|
|
225664
225994
|
await applySeries(fixesWithId, async ({ fixId, vulnerabilityFixes }) => {
|
|
225665
225995
|
await this.applySecurityFixesForWorkspace(wsPath, projectFilePath, vulnerabilityFixes, dependencyTree);
|
|
225666
225996
|
signalFixApplied2?.(fixId, this.subprojectPath, wsPath, vulnerabilityFixes);
|
|
225667
225997
|
});
|
|
225668
|
-
const finalProjectFile = await
|
|
225669
|
-
const finalLockFile = JSON.parse(await
|
|
225998
|
+
const finalProjectFile = await readFile21(projectFilePath, "utf-8");
|
|
225999
|
+
const finalLockFile = JSON.parse(await readFile21(this.getLockFilePath(wsPath), "utf-8"));
|
|
225670
226000
|
await writeFile9(projectFilePath, initialProjectFile);
|
|
225671
226001
|
await writeFile9(this.getLockFilePath(wsPath), JSON.stringify(initialLockFile, null, 2));
|
|
225672
226002
|
return { projectFile: finalProjectFile, lockFile: finalLockFile };
|
|
@@ -225698,7 +226028,7 @@ var NugetFixingManager = class {
|
|
|
225698
226028
|
}
|
|
225699
226029
|
}
|
|
225700
226030
|
async applySecurityFixesForWorkspace(wsPath, projectFilePath, vulnFixes, dependencyTree) {
|
|
225701
|
-
const initialProjectFile = await
|
|
226031
|
+
const initialProjectFile = await readFile21(projectFilePath, "utf-8");
|
|
225702
226032
|
const initialLockFile = await this.restoreWorkspaceAndParseLockFile(wsPath);
|
|
225703
226033
|
const typeCache = new Cache();
|
|
225704
226034
|
const requestedCache = new Cache();
|
|
@@ -225788,7 +226118,7 @@ var NugetFixingManager = class {
|
|
|
225788
226118
|
async restoreWorkspaceAndParseLockFile(wsPath) {
|
|
225789
226119
|
const succeeded = await execAndLogOnFailure2("dotnet restore --use-lock-file", this.getAbsWsPath(wsPath));
|
|
225790
226120
|
if (!succeeded) throw new Error(`Error applying fix - could not restore project ${this.subprojectPath}/${wsPath}`);
|
|
225791
|
-
return JSON.parse(await
|
|
226121
|
+
return JSON.parse(await readFile21(this.getLockFilePath(wsPath), "utf-8"));
|
|
225792
226122
|
}
|
|
225793
226123
|
getLockFilePath(wsPath, lockFileName = "packages.lock.json") {
|
|
225794
226124
|
return join15(this.getAbsWsPath(wsPath), lockFileName);
|
|
@@ -225866,7 +226196,7 @@ import { dirname as dirname16, resolve as resolve26 } from "node:path";
|
|
|
225866
226196
|
|
|
225867
226197
|
// ../utils/src/nuget-project-utils.ts
|
|
225868
226198
|
var import_parse_xml3 = __toESM(require_dist(), 1);
|
|
225869
|
-
import { readFile as
|
|
226199
|
+
import { readFile as readFile22 } from "node:fs/promises";
|
|
225870
226200
|
import { dirname as dirname15, join as join17, relative as relative10, resolve as resolve25, basename as basename8, extname } from "node:path";
|
|
225871
226201
|
import { existsSync as existsSync16 } from "node:fs";
|
|
225872
226202
|
|
|
@@ -227424,7 +227754,7 @@ async function loadNuGetProjectOrTarget(rootDir, projectFile, mainProject, visit
|
|
|
227424
227754
|
if (!validatedProjectPath || !existsSync16(validatedProjectPath)) return void 0;
|
|
227425
227755
|
if (visited.has(validatedProjectPath)) return void 0;
|
|
227426
227756
|
visited.set(validatedProjectPath);
|
|
227427
|
-
const sourceText = await
|
|
227757
|
+
const sourceText = await readFile22(validatedProjectPath, "utf-8");
|
|
227428
227758
|
const xml2 = (0, import_parse_xml3.parseXml)(sourceText, { includeOffsets: true });
|
|
227429
227759
|
const indentation = inferIndentationFromParsedXml2(xml2, sourceText);
|
|
227430
227760
|
const currentProject = {
|
|
@@ -227497,7 +227827,7 @@ async function loadNuGetProjectOrTarget(rootDir, projectFile, mainProject, visit
|
|
|
227497
227827
|
async function loadPackagesConfig(rootDir, file, validateFile) {
|
|
227498
227828
|
const validatedConfigPath = validateFile(resolve25(rootDir, file));
|
|
227499
227829
|
if (!validatedConfigPath || !existsSync16(validatedConfigPath)) return void 0;
|
|
227500
|
-
const sourceText = await
|
|
227830
|
+
const sourceText = await readFile22(validatedConfigPath, "utf-8");
|
|
227501
227831
|
const configXml = (0, import_parse_xml3.parseXml)(sourceText, { includeOffsets: true });
|
|
227502
227832
|
const packages = extractPackagesFromXml(configXml, sourceText);
|
|
227503
227833
|
return {
|
|
@@ -228318,17 +228648,17 @@ import { dirname as dirname18, relative as relative11, resolve as resolve28 } fr
|
|
|
228318
228648
|
var import_picomatch6 = __toESM(require_picomatch2(), 1);
|
|
228319
228649
|
var import_semver4 = __toESM(require_semver2(), 1);
|
|
228320
228650
|
import assert12 from "node:assert";
|
|
228321
|
-
import { readFile as
|
|
228651
|
+
import { readFile as readFile24, writeFile as writeFile10 } from "node:fs/promises";
|
|
228322
228652
|
|
|
228323
228653
|
// ../utils/src/cargo-utils.ts
|
|
228324
|
-
import { readFile as
|
|
228654
|
+
import { readFile as readFile23 } from "node:fs/promises";
|
|
228325
228655
|
import { dirname as dirname17, resolve as resolve27 } from "node:path";
|
|
228326
228656
|
var import_picomatch5 = __toESM(require_picomatch2(), 1);
|
|
228327
228657
|
async function getCargoTomlFilesForCargoLockFile(rootDir, cargoLockFile, cargoTomlFiles) {
|
|
228328
228658
|
const lockDir = dirname17(cargoLockFile);
|
|
228329
228659
|
const rootTomlFile = cargoTomlFiles.find((file) => dirname17(file) === lockDir);
|
|
228330
228660
|
if (!rootTomlFile) return void 0;
|
|
228331
|
-
const rootTomlContent = await
|
|
228661
|
+
const rootTomlContent = await readFile23(resolve27(rootDir, rootTomlFile), "utf-8");
|
|
228332
228662
|
const toml = parseTOML2(rootTomlContent);
|
|
228333
228663
|
if (!toml) return void 0;
|
|
228334
228664
|
const memberPatterns = [];
|
|
@@ -228425,7 +228755,7 @@ var CargoSocketUpgradeManager = class {
|
|
|
228425
228755
|
const path9 = resolve28(this.rootDir, file);
|
|
228426
228756
|
if (!restoreMap.has(path9)) {
|
|
228427
228757
|
restoreMap.set(path9, {
|
|
228428
|
-
content: await
|
|
228758
|
+
content: await readFile24(path9, "utf-8"),
|
|
228429
228759
|
artifacts: []
|
|
228430
228760
|
});
|
|
228431
228761
|
}
|
|
@@ -228489,7 +228819,7 @@ var CargoSocketUpgradeManager = class {
|
|
|
228489
228819
|
const fullPath = resolve28(this.rootDir, mf.file);
|
|
228490
228820
|
let content;
|
|
228491
228821
|
try {
|
|
228492
|
-
content = await
|
|
228822
|
+
content = await readFile24(fullPath, "utf-8");
|
|
228493
228823
|
} catch (error) {
|
|
228494
228824
|
ctxt.statusUpdater?.({
|
|
228495
228825
|
status: "error",
|
|
@@ -228589,7 +228919,7 @@ var CargoSocketUpgradeManager = class {
|
|
|
228589
228919
|
const fullPath = resolve28(this.rootDir, tomlFile);
|
|
228590
228920
|
let content;
|
|
228591
228921
|
try {
|
|
228592
|
-
content = await
|
|
228922
|
+
content = await readFile24(fullPath, "utf-8");
|
|
228593
228923
|
} catch (error) {
|
|
228594
228924
|
ctxt.statusUpdater?.({
|
|
228595
228925
|
status: "error",
|
|
@@ -228675,7 +229005,7 @@ ${newDependencyLine}`
|
|
|
228675
229005
|
var import_picomatch8 = __toESM(require_picomatch2(), 1);
|
|
228676
229006
|
import { dirname as dirname20, join as join19, resolve as resolve31 } from "node:path";
|
|
228677
229007
|
import assert13 from "node:assert";
|
|
228678
|
-
import { readFile as
|
|
229008
|
+
import { readFile as readFile27 } from "node:fs/promises";
|
|
228679
229009
|
|
|
228680
229010
|
// ../fixing-management/src/fixing-management/pip/pip-requirements-parser.ts
|
|
228681
229011
|
function parsePipRequirementsFileLoosely(src, _options) {
|
|
@@ -229028,7 +229358,7 @@ function createPep508VersionPatches(file, idx, requirement, oldVersion, upgradeV
|
|
|
229028
229358
|
|
|
229029
229359
|
// ../utils/src/pip-utils.ts
|
|
229030
229360
|
import { existsSync as existsSync17 } from "node:fs";
|
|
229031
|
-
import { readFile as
|
|
229361
|
+
import { readFile as readFile26 } from "node:fs/promises";
|
|
229032
229362
|
import { dirname as dirname19, resolve as resolve30 } from "node:path";
|
|
229033
229363
|
import util4 from "node:util";
|
|
229034
229364
|
|
|
@@ -229037,7 +229367,7 @@ var import_lodash6 = __toESM(require_lodash(), 1);
|
|
|
229037
229367
|
var import_semver5 = __toESM(require_semver2(), 1);
|
|
229038
229368
|
import { execFileSync } from "child_process";
|
|
229039
229369
|
import { constants as constants3 } from "fs";
|
|
229040
|
-
import { access as access3, readFile as
|
|
229370
|
+
import { access as access3, readFile as readFile25 } from "fs/promises";
|
|
229041
229371
|
import { join as join18, resolve as resolve29 } from "path";
|
|
229042
229372
|
import util3 from "util";
|
|
229043
229373
|
var { once: once2 } = import_lodash6.default;
|
|
@@ -229060,14 +229390,14 @@ function normalizePackageName(packageName) {
|
|
|
229060
229390
|
return packageName.replace(/[-_.]+/g, "-").toLowerCase();
|
|
229061
229391
|
}
|
|
229062
229392
|
async function isSetupPySetuptools(file) {
|
|
229063
|
-
const content = await
|
|
229393
|
+
const content = await readFile26(file, "utf-8");
|
|
229064
229394
|
return content.includes("setup(") && (/^\s*from\s+(?:setuptools|distutils\.core)\s+import\s+.*setup/m.test(content) || /^\s*import\s+(?:setuptools|distutils\.core)/m.test(content));
|
|
229065
229395
|
}
|
|
229066
229396
|
async function getPyprojectTomlFilesForLockFile(rootDir, uvLockfile, pyprojectFiles) {
|
|
229067
229397
|
const lockDir = dirname19(uvLockfile);
|
|
229068
229398
|
const rootTomlFile = pyprojectFiles.find((file) => dirname19(file) === lockDir);
|
|
229069
229399
|
if (!rootTomlFile) return void 0;
|
|
229070
|
-
const rootTomlContent = await
|
|
229400
|
+
const rootTomlContent = await readFile26(resolve30(rootDir, rootTomlFile), "utf-8");
|
|
229071
229401
|
const toml = parseTOML2(rootTomlContent);
|
|
229072
229402
|
if (!toml) return void 0;
|
|
229073
229403
|
const memberPatterns = [];
|
|
@@ -229240,7 +229570,7 @@ var PipSocketUpgradeManager = class {
|
|
|
229240
229570
|
const fullPath = resolve31(this.rootDir, lockFile);
|
|
229241
229571
|
let content;
|
|
229242
229572
|
try {
|
|
229243
|
-
content = await
|
|
229573
|
+
content = await readFile27(fullPath, "utf-8");
|
|
229244
229574
|
} catch {
|
|
229245
229575
|
return;
|
|
229246
229576
|
}
|
|
@@ -229298,7 +229628,7 @@ var PipSocketUpgradeManager = class {
|
|
|
229298
229628
|
const fullPath = resolve31(this.rootDir, lockFile);
|
|
229299
229629
|
let content;
|
|
229300
229630
|
try {
|
|
229301
|
-
content = await
|
|
229631
|
+
content = await readFile27(fullPath, "utf-8");
|
|
229302
229632
|
} catch {
|
|
229303
229633
|
return;
|
|
229304
229634
|
}
|
|
@@ -229338,7 +229668,7 @@ var PipSocketUpgradeManager = class {
|
|
|
229338
229668
|
assert13(artifact.name);
|
|
229339
229669
|
assert13(artifact.version);
|
|
229340
229670
|
const fullPath = resolve31(this.rootDir, lockFile);
|
|
229341
|
-
const content = await
|
|
229671
|
+
const content = await readFile27(fullPath, "utf-8");
|
|
229342
229672
|
const packageName = normalizePackageName(artifact.name);
|
|
229343
229673
|
const patches = [];
|
|
229344
229674
|
const toml = parseTOML2(content);
|
|
@@ -229467,7 +229797,7 @@ var PipSocketUpgradeManager = class {
|
|
|
229467
229797
|
const refStart = ref.start;
|
|
229468
229798
|
const refEnd = ref.end;
|
|
229469
229799
|
try {
|
|
229470
|
-
const content = await
|
|
229800
|
+
const content = await readFile27(fullPath, "utf-8");
|
|
229471
229801
|
const requirements = parsePipRequirementsFileLoosely(content, { includeLocations: true });
|
|
229472
229802
|
const foundRequirement = requirements.filter((req) => req.data.type === "ProjectName").find((req) => refStart <= req.location.startIdx && req.location.endIdx <= refEnd);
|
|
229473
229803
|
if (foundRequirement) {
|
|
@@ -229517,7 +229847,7 @@ var PipSocketUpgradeManager = class {
|
|
|
229517
229847
|
assert13(artifact.version);
|
|
229518
229848
|
const patches = [];
|
|
229519
229849
|
try {
|
|
229520
|
-
const content = await
|
|
229850
|
+
const content = await readFile27(fullPath, "utf-8");
|
|
229521
229851
|
const newText = `${artifact.name}==${upgradeVersion}`;
|
|
229522
229852
|
patches.push({
|
|
229523
229853
|
file: requirementsFile,
|
|
@@ -229545,7 +229875,7 @@ ${newText}
|
|
|
229545
229875
|
const fullPath = resolve31(this.rootDir, tomlFile);
|
|
229546
229876
|
let content;
|
|
229547
229877
|
try {
|
|
229548
|
-
content = await
|
|
229878
|
+
content = await readFile27(fullPath, "utf-8");
|
|
229549
229879
|
} catch (error) {
|
|
229550
229880
|
ctxt.statusUpdater?.({
|
|
229551
229881
|
status: "error",
|
|
@@ -229691,7 +230021,7 @@ ${newText}
|
|
|
229691
230021
|
assert13(artifact.version);
|
|
229692
230022
|
const patches = [];
|
|
229693
230023
|
try {
|
|
229694
|
-
const content = await
|
|
230024
|
+
const content = await readFile27(resolve31(this.rootDir, pyprojectToml), "utf-8");
|
|
229695
230025
|
const toml = parseTOML2(content);
|
|
229696
230026
|
if (!toml) {
|
|
229697
230027
|
return patches;
|
|
@@ -229758,7 +230088,7 @@ function parseSourceString(source) {
|
|
|
229758
230088
|
async function buildDependencyTreesFromUvLock(rootDir, uvLockFile) {
|
|
229759
230089
|
let lockToml;
|
|
229760
230090
|
try {
|
|
229761
|
-
const lockContent = await
|
|
230091
|
+
const lockContent = await readFile27(resolve31(rootDir, uvLockFile), "utf-8");
|
|
229762
230092
|
lockToml = parseTOML2(lockContent);
|
|
229763
230093
|
} catch {
|
|
229764
230094
|
return void 0;
|
|
@@ -230229,7 +230559,7 @@ function parseGemfileLock(content) {
|
|
|
230229
230559
|
}
|
|
230230
230560
|
|
|
230231
230561
|
// ../fixing-management/src/fixing-management/rubygems/rubygems-socket-upgrade-manager.ts
|
|
230232
|
-
import { readFile as
|
|
230562
|
+
import { readFile as readFile28, writeFile as writeFile11 } from "node:fs/promises";
|
|
230233
230563
|
|
|
230234
230564
|
// ../fixing-management/src/fixing-management/rubygems/rubygems-patch-utils.ts
|
|
230235
230565
|
function createRubygemVersionPatches(gem, idx, upgradeVersion, rangeStyle, statusUpdater) {
|
|
@@ -230442,7 +230772,7 @@ var RubygemsSocketUpgradeManager = class {
|
|
|
230442
230772
|
for (const mf of artifact.manifestFiles ?? []) {
|
|
230443
230773
|
if (this.gemfileLockMatcher(mf.file)) {
|
|
230444
230774
|
if (ctxt.wsFilter && !ctxt.wsFilter(dirname22(mf.file) || ".")) continue;
|
|
230445
|
-
const lockfileContent = await
|
|
230775
|
+
const lockfileContent = await readFile28(resolve33(this.rootDir, mf.file), "utf-8");
|
|
230446
230776
|
const gemfileLock = parseGemfileLock(lockfileContent);
|
|
230447
230777
|
const pathGems = [];
|
|
230448
230778
|
for (const [pathGemName, deps] of gemfileLock.pathDependencies) {
|
|
@@ -230493,7 +230823,7 @@ var RubygemsSocketUpgradeManager = class {
|
|
|
230493
230823
|
const path9 = resolve33(this.rootDir, file);
|
|
230494
230824
|
if (!restoreMap.has(path9)) {
|
|
230495
230825
|
restoreMap.set(path9, {
|
|
230496
|
-
content: await
|
|
230826
|
+
content: await readFile28(path9, "utf-8"),
|
|
230497
230827
|
artifacts: []
|
|
230498
230828
|
});
|
|
230499
230829
|
}
|
|
@@ -230585,7 +230915,7 @@ var RubygemsSocketUpgradeManager = class {
|
|
|
230585
230915
|
const gemfilePatches = [];
|
|
230586
230916
|
const artifact = ctxt.artifacts[idx];
|
|
230587
230917
|
try {
|
|
230588
|
-
const gemfileContent = await
|
|
230918
|
+
const gemfileContent = await readFile28(resolve33(this.rootDir, gemfilePath), "utf-8");
|
|
230589
230919
|
const gemfile = parseGemfile(this.rootDir, gemfilePath, gemfileContent);
|
|
230590
230920
|
const gemspecFiles = /* @__PURE__ */ new Set();
|
|
230591
230921
|
for (const gem of gemfile.gems) {
|
|
@@ -230606,7 +230936,7 @@ var RubygemsSocketUpgradeManager = class {
|
|
|
230606
230936
|
let foundInGemspec = false;
|
|
230607
230937
|
for (const gemspecFile of gemspecFiles) {
|
|
230608
230938
|
try {
|
|
230609
|
-
const gemspecContent = await
|
|
230939
|
+
const gemspecContent = await readFile28(resolve33(this.rootDir, gemspecFile), "utf-8");
|
|
230610
230940
|
const { parseGemspec: parseGemspec2 } = await Promise.resolve().then(() => (init_gemspec_utils(), gemspec_utils_exports));
|
|
230611
230941
|
const gemspec = parseGemspec2(this.rootDir, gemspecFile, gemspecContent);
|
|
230612
230942
|
for (const gem of gemspec.dependencies) {
|
|
@@ -230671,7 +231001,7 @@ var RubygemsSocketUpgradeManager = class {
|
|
|
230671
231001
|
const [version4] = artifact.version.split("-");
|
|
230672
231002
|
const patches = [];
|
|
230673
231003
|
try {
|
|
230674
|
-
const sourceText = await
|
|
231004
|
+
const sourceText = await readFile28(resolve33(this.rootDir, file), "utf-8");
|
|
230675
231005
|
const gemfile = parseGemfile(this.rootDir, file, sourceText);
|
|
230676
231006
|
for (const gem of gemfile.gems) {
|
|
230677
231007
|
if (evaluate4(gem.name) !== packageName) continue;
|
|
@@ -230717,7 +231047,7 @@ var RubygemsSocketUpgradeManager = class {
|
|
|
230717
231047
|
const patches = [];
|
|
230718
231048
|
let content;
|
|
230719
231049
|
try {
|
|
230720
|
-
content = await
|
|
231050
|
+
content = await readFile28(resolve33(this.rootDir, file), "utf-8");
|
|
230721
231051
|
} catch (error) {
|
|
230722
231052
|
ctxt.statusUpdater?.({
|
|
230723
231053
|
status: "error",
|
|
@@ -230991,7 +231321,7 @@ function flattenDockerSpec({
|
|
|
230991
231321
|
var import_winston2 = __toESM(require_winston(), 1);
|
|
230992
231322
|
import { Console as Console2 } from "console";
|
|
230993
231323
|
import { createWriteStream as createWriteStream3 } from "fs";
|
|
230994
|
-
import { readFile as
|
|
231324
|
+
import { readFile as readFile29 } from "fs/promises";
|
|
230995
231325
|
|
|
230996
231326
|
// ../web-compat-utils/dist/util-formatter.js
|
|
230997
231327
|
import { format as format3 } from "util";
|
|
@@ -231194,7 +231524,7 @@ var CLILogger2 = class {
|
|
|
231194
231524
|
await this.finish();
|
|
231195
231525
|
let logContent;
|
|
231196
231526
|
try {
|
|
231197
|
-
logContent = await
|
|
231527
|
+
logContent = await readFile29(logFilePath, "utf-8");
|
|
231198
231528
|
} catch (e) {
|
|
231199
231529
|
console.error("Error reading log file", e);
|
|
231200
231530
|
}
|
|
@@ -231239,13 +231569,13 @@ async function detectVariantMaven(projectDir) {
|
|
|
231239
231569
|
// ../docker-management/src/maven/gradle-version-detector.ts
|
|
231240
231570
|
import { existsSync as existsSync20 } from "fs";
|
|
231241
231571
|
import { join as join23 } from "path";
|
|
231242
|
-
import { readFile as
|
|
231572
|
+
import { readFile as readFile30 } from "fs/promises";
|
|
231243
231573
|
async function detectVariantGradle(projectDir) {
|
|
231244
231574
|
return sanitizeJvmVariant("GRADLE", projectDir, await detect(projectDir));
|
|
231245
231575
|
}
|
|
231246
231576
|
async function detect(projectDir) {
|
|
231247
231577
|
const gradleWrapperPropertiesPath = join23(projectDir, "gradle", "wrapper", "gradle-wrapper.properties");
|
|
231248
|
-
const gradleWrapperProperties = existsSync20(gradleWrapperPropertiesPath) ? (await
|
|
231578
|
+
const gradleWrapperProperties = existsSync20(gradleWrapperPropertiesPath) ? (await readFile30(gradleWrapperPropertiesPath, "utf-8")).split("\n").map((line) => line.trim()).filter((line) => !line.startsWith("#")).filter((line) => line) : void 0;
|
|
231249
231579
|
if (!gradleWrapperProperties) return void 0;
|
|
231250
231580
|
const distributionUrlRegex = /.*gradle-(\d+(\.\d+(\.\d+)?)?)/;
|
|
231251
231581
|
for (const prop2 of gradleWrapperProperties) {
|
|
@@ -231261,13 +231591,13 @@ async function detect(projectDir) {
|
|
|
231261
231591
|
// ../docker-management/src/maven/sbt-version-detector.ts
|
|
231262
231592
|
import { existsSync as existsSync21 } from "fs";
|
|
231263
231593
|
import { join as join24 } from "path";
|
|
231264
|
-
import { readFile as
|
|
231594
|
+
import { readFile as readFile31 } from "fs/promises";
|
|
231265
231595
|
async function detectVariantSbt(projectDir) {
|
|
231266
231596
|
return sanitizeJvmVariant("SBT", projectDir, await detect2(projectDir));
|
|
231267
231597
|
}
|
|
231268
231598
|
async function detect2(projectDir) {
|
|
231269
231599
|
const sbtBuildPropertiesPath = join24(projectDir, "project", "build.properties");
|
|
231270
|
-
const sbtBuildProperties = existsSync21(sbtBuildPropertiesPath) ? (await
|
|
231600
|
+
const sbtBuildProperties = existsSync21(sbtBuildPropertiesPath) ? (await readFile31(sbtBuildPropertiesPath, "utf-8")).split("\n").map((line) => line.trim()).filter((line) => !line.startsWith("#")).filter((line) => line) : void 0;
|
|
231271
231601
|
if (!sbtBuildProperties) return void 0;
|
|
231272
231602
|
for (const prop2 of sbtBuildProperties) {
|
|
231273
231603
|
const [key, value2] = prop2.split("=");
|
|
@@ -231381,7 +231711,7 @@ async function findReachabilityAnalyzersDockerImage(ecosystem) {
|
|
|
231381
231711
|
// ../other-modules-communicator/src/other-modules-communicator.ts
|
|
231382
231712
|
var import_lodash12 = __toESM(require_lodash(), 1);
|
|
231383
231713
|
import { rmSync } from "fs";
|
|
231384
|
-
import { mkdir as mkdir5, readFile as
|
|
231714
|
+
import { mkdir as mkdir5, readFile as readFile32, writeFile as writeFile12 } from "fs/promises";
|
|
231385
231715
|
import assert15 from "node:assert";
|
|
231386
231716
|
import { platform as platform8 } from "os";
|
|
231387
231717
|
import { join as join27, posix as posix2, relative as relative16, sep as sep3 } from "path";
|
|
@@ -231883,7 +232213,7 @@ var OtherModulesCommunicator = class {
|
|
|
231883
232213
|
COANA_API_KEY: this.apiKey.type === "present" ? this.apiKey.value : ""
|
|
231884
232214
|
}
|
|
231885
232215
|
);
|
|
231886
|
-
return JSON.parse(await
|
|
232216
|
+
return JSON.parse(await readFile32(outputFilePathThisProcess, "utf-8")).result;
|
|
231887
232217
|
}
|
|
231888
232218
|
async runReachabilityAnalyzerCommand(commandName, ecosystem, subprojectPath, workspacePath, args2, env, rootWorkingDirOverride, displaySubprojectPath) {
|
|
231889
232219
|
const tmpDir = await this.getTmpDirForSubproject(displaySubprojectPath ?? subprojectPath);
|
|
@@ -231957,7 +232287,7 @@ var OtherModulesCommunicator = class {
|
|
|
231957
232287
|
rootWorkingDirOverride,
|
|
231958
232288
|
displaySubprojectPath
|
|
231959
232289
|
);
|
|
231960
|
-
return JSON.parse(await
|
|
232290
|
+
return JSON.parse(await readFile32(outputFilePathThisProcess, "utf-8")).result;
|
|
231961
232291
|
}
|
|
231962
232292
|
async runInDocker(ecosystem, image, entryPoint, commandName, args2, subprojectPath, tmpDir, env = process.env) {
|
|
231963
232293
|
if (!await pullDockerImage(image)) return false;
|
|
@@ -233339,7 +233669,7 @@ import { join as join29, relative as relative17, resolve as resolve38 } from "pa
|
|
|
233339
233669
|
|
|
233340
233670
|
// ../project-management/src/project-management/ecosystem-management/ecosystem-specs.ts
|
|
233341
233671
|
import { existsSync as existsSync23 } from "fs";
|
|
233342
|
-
import { readdir as readdir5, readFile as
|
|
233672
|
+
import { readdir as readdir5, readFile as readFile33 } from "fs/promises";
|
|
233343
233673
|
import { join as join28, sep as sep4 } from "path";
|
|
233344
233674
|
var specs = {
|
|
233345
233675
|
NPM: [
|
|
@@ -233418,7 +233748,7 @@ function packageManagerIfPackageJSONExistsAndValid(packageManager) {
|
|
|
233418
233748
|
if (!existsSync23(join28(projectDir, "package.json"))) return void 0;
|
|
233419
233749
|
const packageJSONPath = join28(projectDir, "package.json");
|
|
233420
233750
|
try {
|
|
233421
|
-
JSON.parse(await
|
|
233751
|
+
JSON.parse(await readFile33(packageJSONPath, "utf-8"));
|
|
233422
233752
|
return packageManager;
|
|
233423
233753
|
} catch (e) {
|
|
233424
233754
|
throw new InvalidProjectFileError(projectDir, "package.json");
|
|
@@ -234898,7 +235228,7 @@ var DEFAULT_REPORT_FILENAME_BASE = "coana-report";
|
|
|
234898
235228
|
|
|
234899
235229
|
// dist/internal/exclude-dirs-from-configuration-files.js
|
|
234900
235230
|
import { existsSync as existsSync25 } from "fs";
|
|
234901
|
-
import { readFile as
|
|
235231
|
+
import { readFile as readFile34 } from "fs/promises";
|
|
234902
235232
|
import { basename as basename10, resolve as resolve41 } from "path";
|
|
234903
235233
|
var import_yaml2 = __toESM(require_dist11(), 1);
|
|
234904
235234
|
async function inferExcludeDirsFromConfigurationFiles(rootWorkingDir) {
|
|
@@ -234912,7 +235242,7 @@ async function inferExcludeDirsFromConfigurationFiles(rootWorkingDir) {
|
|
|
234912
235242
|
}
|
|
234913
235243
|
async function inferExcludeDirsFromSocketConfig(socketConfigFile) {
|
|
234914
235244
|
try {
|
|
234915
|
-
const config3 = (0, import_yaml2.parse)(await
|
|
235245
|
+
const config3 = (0, import_yaml2.parse)(await readFile34(socketConfigFile, "utf8"));
|
|
234916
235246
|
const version4 = config3.version;
|
|
234917
235247
|
const ignorePaths = config3[version4 === 1 ? "ignore" : "projectIgnorePaths"];
|
|
234918
235248
|
if (!ignorePaths)
|
|
@@ -234929,7 +235259,7 @@ async function inferExcludeDirsFromSocketConfig(socketConfigFile) {
|
|
|
234929
235259
|
// dist/internal/manifest-upload.js
|
|
234930
235260
|
var import_fast_glob = __toESM(require_out4(), 1);
|
|
234931
235261
|
var import_ignore3 = __toESM(require_ignore(), 1);
|
|
234932
|
-
import { readFile as
|
|
235262
|
+
import { readFile as readFile35 } from "fs/promises";
|
|
234933
235263
|
import { join as join31 } from "path";
|
|
234934
235264
|
var DEFAULT_IGNORE_PATTERNS = [
|
|
234935
235265
|
"**/node_modules/**",
|
|
@@ -234945,7 +235275,7 @@ var DEFAULT_IGNORE_PATTERNS = [
|
|
|
234945
235275
|
async function loadGitignore(rootDir) {
|
|
234946
235276
|
try {
|
|
234947
235277
|
const gitignorePath = join31(rootDir, ".gitignore");
|
|
234948
|
-
const content = await
|
|
235278
|
+
const content = await readFile35(gitignorePath, "utf-8");
|
|
234949
235279
|
return (0, import_ignore3.default)().add(content);
|
|
234950
235280
|
} catch {
|
|
234951
235281
|
return void 0;
|
|
@@ -236060,7 +236390,7 @@ function toSocketFactsSocketDependencyTree(artifacts, vulnerabilities, tier1Reac
|
|
|
236060
236390
|
}
|
|
236061
236391
|
|
|
236062
236392
|
// dist/internal/vulnerability-scanning.js
|
|
236063
|
-
import { readFile as
|
|
236393
|
+
import { readFile as readFile36 } from "fs/promises";
|
|
236064
236394
|
|
|
236065
236395
|
// ../security-auditor/security-auditor-builder/src/mongo-connection.ts
|
|
236066
236396
|
var import_mongodb = __toESM(require_lib31(), 1);
|
|
@@ -250929,7 +251259,7 @@ async function scanForVulnerabilities(dependencyTree, offlineVulnerabilityScanne
|
|
|
250929
251259
|
}
|
|
250930
251260
|
async function offlineScan(dependencyTree, offlineVulnerabilityScannerDBPath) {
|
|
250931
251261
|
logger.info("using offline vulnerability scanner db");
|
|
250932
|
-
const offlineVulnerabilityScannerDB = JSON.parse(await
|
|
251262
|
+
const offlineVulnerabilityScannerDB = JSON.parse(await readFile36(offlineVulnerabilityScannerDBPath, "utf-8"));
|
|
250933
251263
|
const { ecosystemToUrlToVulnerabilityDetails, vulnerabilityDatabase } = offlineVulnerabilityScannerDB;
|
|
250934
251264
|
const coanaSupportedVulnerabilitiesLoader = CoanaSupportedVulnerabilitiesLoader.create(ecosystemToUrlToVulnerabilityDetails);
|
|
250935
251265
|
const vulnerabilityAccessPathLoader = CoanaSupportedVulnerabilitiesLoader.create(ecosystemToUrlToVulnerabilityDetails);
|
|
@@ -250947,7 +251277,7 @@ async function onlineScan(dependencyTree, apiKey, timeout) {
|
|
|
250947
251277
|
}
|
|
250948
251278
|
|
|
250949
251279
|
// dist/version.js
|
|
250950
|
-
var version3 = "14.12.
|
|
251280
|
+
var version3 = "14.12.161";
|
|
250951
251281
|
|
|
250952
251282
|
// dist/cli-core.js
|
|
250953
251283
|
var { mapValues, omit, partition, pickBy: pickBy2 } = import_lodash15.default;
|
|
@@ -252023,7 +252353,7 @@ async function initializeComputeFixesAndUpgradePurls(path9, options) {
|
|
|
252023
252353
|
var compareReportsCommand = new Command();
|
|
252024
252354
|
compareReportsCommand.name("compare-reports").argument("<baselineReportPath>", "Path to the baseline report").argument("<newReportPath>", "Path to the new report").option("--api-key <key>", "Set the Coana dashboard API key.").option("-d, --debug", "Enable debug logging", false).option("--no-pr-comment", "Disable pull request comments (only relevant when run from a PR)", true).option("--no-block", "Do not fail with a non-zero exit code when new reachable vulnerabilities are detected", true).option("--ignore-undeterminable-reachability", "Ignore vulnerabilities with undeterminable reachability", false).action(async (baselineReportPath, newReportPath, options) => {
|
|
252025
252355
|
async function readReport(reportPath) {
|
|
252026
|
-
return JSON.parse(await
|
|
252356
|
+
return JSON.parse(await readFile37(reportPath, "utf-8"));
|
|
252027
252357
|
}
|
|
252028
252358
|
const baselineReport = await readReport(baselineReportPath);
|
|
252029
252359
|
const newReport = await readReport(newReportPath);
|