@coana-tech/cli 14.12.101 → 14.12.102
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/cli.mjs +87 -116
- package/package.json +1 -1
- package/repos/coana-tech/goana/bin/goana-darwin-amd64.gz +0 -0
- package/repos/coana-tech/goana/bin/goana-darwin-arm64.gz +0 -0
- package/repos/coana-tech/goana/bin/goana-linux-amd64.gz +0 -0
- package/repos/coana-tech/goana/bin/goana-linux-arm64.gz +0 -0
- package/repos/coana-tech/javap-service/javap-service.jar +0 -0
package/cli.mjs
CHANGED
|
@@ -86512,7 +86512,7 @@ var require_lockfile = __commonJS({
|
|
|
86512
86512
|
}
|
|
86513
86513
|
const file = _ref22;
|
|
86514
86514
|
if (yield exists2(file)) {
|
|
86515
|
-
return
|
|
86515
|
+
return readFile35(file);
|
|
86516
86516
|
}
|
|
86517
86517
|
}
|
|
86518
86518
|
return null;
|
|
@@ -86531,7 +86531,7 @@ var require_lockfile = __commonJS({
|
|
|
86531
86531
|
})();
|
|
86532
86532
|
let readJsonAndFile = exports3.readJsonAndFile = (() => {
|
|
86533
86533
|
var _ref24 = (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* (loc) {
|
|
86534
|
-
const file = yield
|
|
86534
|
+
const file = yield readFile35(loc);
|
|
86535
86535
|
try {
|
|
86536
86536
|
return {
|
|
86537
86537
|
object: (0, (_map || _load_map()).default)(JSON.parse(stripBOM2(file))),
|
|
@@ -86771,7 +86771,7 @@ var require_lockfile = __commonJS({
|
|
|
86771
86771
|
};
|
|
86772
86772
|
})();
|
|
86773
86773
|
exports3.copy = copy;
|
|
86774
|
-
exports3.readFile =
|
|
86774
|
+
exports3.readFile = readFile35;
|
|
86775
86775
|
exports3.readFileRaw = readFileRaw;
|
|
86776
86776
|
exports3.normalizeOS = normalizeOS;
|
|
86777
86777
|
var _fs;
|
|
@@ -86869,7 +86869,7 @@ var require_lockfile = __commonJS({
|
|
|
86869
86869
|
});
|
|
86870
86870
|
});
|
|
86871
86871
|
}
|
|
86872
|
-
function
|
|
86872
|
+
function readFile35(loc) {
|
|
86873
86873
|
return _readFile(loc, "utf8").then(normalizeOS);
|
|
86874
86874
|
}
|
|
86875
86875
|
function readFileRaw(loc) {
|
|
@@ -201554,7 +201554,7 @@ var {
|
|
|
201554
201554
|
} = import_index.default;
|
|
201555
201555
|
|
|
201556
201556
|
// dist/index.js
|
|
201557
|
-
import { mkdir as mkdir7, mkdtemp as mkdtemp2, readFile as
|
|
201557
|
+
import { mkdir as mkdir7, mkdtemp as mkdtemp2, readFile as readFile34, rm as rm2, writeFile as writeFile15 } from "fs/promises";
|
|
201558
201558
|
import { tmpdir as tmpdir3 } from "os";
|
|
201559
201559
|
import { dirname as dirname27, join as join30, resolve as resolve44 } from "path";
|
|
201560
201560
|
|
|
@@ -213271,7 +213271,6 @@ var GoFixingManager = class {
|
|
|
213271
213271
|
|
|
213272
213272
|
// ../fixing-management/src/fixing-management/go/go-socket-upgrade-manager.ts
|
|
213273
213273
|
import { dirname as dirname7, resolve as resolve7 } from "node:path";
|
|
213274
|
-
import { readFile as readFile5 } from "node:fs/promises";
|
|
213275
213274
|
var import_picomatch = __toESM(require_picomatch4(), 1);
|
|
213276
213275
|
import assert4 from "node:assert";
|
|
213277
213276
|
|
|
@@ -216351,7 +216350,6 @@ replace ${modulePath} ${currentVersion} => ${modulePath} ${newVersion}
|
|
|
216351
216350
|
async refreshChecksumFiles(checksumFileToArtifacts, ctxt) {
|
|
216352
216351
|
await asyncForEach(Object.entries(checksumFileToArtifacts), async ([checksumFile, artifacts]) => {
|
|
216353
216352
|
const goModDir = dirname7(resolve7(this.rootDir, checksumFile));
|
|
216354
|
-
const oldFileContent = await readFile5(resolve7(this.rootDir, checksumFile), "utf-8").catch(() => "");
|
|
216355
216353
|
const result = await execNeverFail2(["go", "mod", "tidy"], goModDir);
|
|
216356
216354
|
if (result.error) {
|
|
216357
216355
|
ctxt.statusUpdater?.({
|
|
@@ -216361,12 +216359,10 @@ replace ${modulePath} ${currentVersion} => ${modulePath} ${newVersion}
|
|
|
216361
216359
|
message: `Failed to update checksum file: ${result.error.message ?? "Unknown error"}`
|
|
216362
216360
|
});
|
|
216363
216361
|
} else {
|
|
216364
|
-
const finalFileContent = await readFile5(resolve7(this.rootDir, checksumFile), "utf-8").catch(() => "");
|
|
216365
216362
|
ctxt.statusUpdater?.({
|
|
216366
216363
|
status: "success",
|
|
216367
216364
|
file: checksumFile,
|
|
216368
216365
|
message: "Checksum file updated",
|
|
216369
|
-
patch: createPatch(checksumFile, oldFileContent, finalFileContent, void 0, void 0, { context: 3 }),
|
|
216370
216366
|
artifacts: i3(artifacts)
|
|
216371
216367
|
});
|
|
216372
216368
|
}
|
|
@@ -216376,12 +216372,12 @@ replace ${modulePath} ${currentVersion} => ${modulePath} ${newVersion}
|
|
|
216376
216372
|
|
|
216377
216373
|
// ../fixing-management/src/fixing-management/maven/gradle-fixing-manager.ts
|
|
216378
216374
|
import { existsSync as existsSync5 } from "node:fs";
|
|
216379
|
-
import { readFile as
|
|
216375
|
+
import { readFile as readFile8 } from "node:fs/promises";
|
|
216380
216376
|
import { join as join6, resolve as resolve10 } from "node:path";
|
|
216381
216377
|
|
|
216382
216378
|
// ../fixing-management/src/fixing-management/utils/coana-patch-application.ts
|
|
216383
216379
|
import { existsSync as existsSync3 } from "node:fs";
|
|
216384
|
-
import { readFile as
|
|
216380
|
+
import { readFile as readFile5, writeFile as writeFile3 } from "node:fs/promises";
|
|
216385
216381
|
import { resolve as resolve8 } from "node:path";
|
|
216386
216382
|
function detectPatchConflicts(rootDir, patchResults) {
|
|
216387
216383
|
const patchesByFile = /* @__PURE__ */ new Map();
|
|
@@ -216514,7 +216510,7 @@ async function applyPatchResults(ecosystem, rootDir, patchResults) {
|
|
|
216514
216510
|
if (!existsSync3(filePath)) {
|
|
216515
216511
|
await writeFile3(filePath, "", "utf-8");
|
|
216516
216512
|
}
|
|
216517
|
-
let fileContent = await
|
|
216513
|
+
let fileContent = await readFile5(filePath, "utf-8");
|
|
216518
216514
|
for (const patch of sortedPatches) {
|
|
216519
216515
|
const start = patch.offset;
|
|
216520
216516
|
const end2 = patch.offset + (patch.oldText?.length ?? 0);
|
|
@@ -216526,7 +216522,7 @@ async function applyPatchResults(ecosystem, rootDir, patchResults) {
|
|
|
216526
216522
|
|
|
216527
216523
|
// ../fixing-management/src/fixing-management/maven/gradle-build-file-helper.ts
|
|
216528
216524
|
var import_good_enough_parser = __toESM(require_cjs(), 1);
|
|
216529
|
-
import { readFile as
|
|
216525
|
+
import { readFile as readFile6 } from "node:fs/promises";
|
|
216530
216526
|
|
|
216531
216527
|
// ../fixing-management/src/fixing-management/maven/utils.ts
|
|
216532
216528
|
import { existsSync as existsSync4 } from "node:fs";
|
|
@@ -216748,7 +216744,7 @@ var treeQuery = import_good_enough_parser.query.tree({
|
|
|
216748
216744
|
});
|
|
216749
216745
|
async function findDependencyDeclsAndCatalogFiles(filePath) {
|
|
216750
216746
|
const gradleLang = import_good_enough_parser.lang.createLang("groovy");
|
|
216751
|
-
const cursor = gradleLang.parse(await
|
|
216747
|
+
const cursor = gradleLang.parse(await readFile6(filePath, "utf-8"));
|
|
216752
216748
|
const ctx = gradleLang.query(cursor, treeQuery, {
|
|
216753
216749
|
mem: {},
|
|
216754
216750
|
depDecls: [],
|
|
@@ -216784,7 +216780,7 @@ ${getConstraintsBlockString(groupId, artifactId, classifier, version4, indentati
|
|
|
216784
216780
|
}
|
|
216785
216781
|
|
|
216786
216782
|
// ../fixing-management/src/fixing-management/maven/gradle-version-catalog-helper.ts
|
|
216787
|
-
import { readFile as
|
|
216783
|
+
import { readFile as readFile7 } from "node:fs/promises";
|
|
216788
216784
|
|
|
216789
216785
|
// ../utils/src/toml-utils.ts
|
|
216790
216786
|
var tomlParser = __toESM(require_lib10(), 1);
|
|
@@ -217051,7 +217047,7 @@ function parseDependencyObject(valueNode) {
|
|
|
217051
217047
|
};
|
|
217052
217048
|
}
|
|
217053
217049
|
async function findVersionCatalogDeclarations(filePath) {
|
|
217054
|
-
const catalogData = parseVersionCatalog(await
|
|
217050
|
+
const catalogData = parseVersionCatalog(await readFile7(filePath, "utf-8"));
|
|
217055
217051
|
return {
|
|
217056
217052
|
depDecls: catalogData.dependencies,
|
|
217057
217053
|
versionDecls: catalogData.versions
|
|
@@ -217256,7 +217252,7 @@ var GradleFixingManager = class {
|
|
|
217256
217252
|
newText: constraintStr + "\n"
|
|
217257
217253
|
};
|
|
217258
217254
|
} else {
|
|
217259
|
-
const fileContent = await
|
|
217255
|
+
const fileContent = await readFile8(targetBuildFile, "utf-8");
|
|
217260
217256
|
const indentationSize = getIndentationSize(fileContent);
|
|
217261
217257
|
const prependNewline = fileContent.split("\n").some((line) => !line.trim());
|
|
217262
217258
|
const finalConstraintStr = getDependencyConstraintString(
|
|
@@ -217443,7 +217439,7 @@ var GradleFixingManager = class {
|
|
|
217443
217439
|
async createConstraintsForFile(buildFile, fixes) {
|
|
217444
217440
|
const { dependenciesBlocks, constraintsBlocks } = await findDependencyDeclsAndCatalogFiles(buildFile);
|
|
217445
217441
|
const fileType = buildFile.endsWith(".kts") ? "kotlin" : "groovy";
|
|
217446
|
-
const fileContent = existsSync5(buildFile) ? await
|
|
217442
|
+
const fileContent = existsSync5(buildFile) ? await readFile8(buildFile, "utf-8") : "";
|
|
217447
217443
|
const indentationSize = getIndentationSize(fileContent);
|
|
217448
217444
|
const constraintDeclarations = fixes.map(({ dependencyDetails, fixedVersion }) => {
|
|
217449
217445
|
const [groupId, artifactId] = dependencyDetails.packageName.split(":");
|
|
@@ -217550,7 +217546,7 @@ import { resolve as resolve12 } from "node:path";
|
|
|
217550
217546
|
|
|
217551
217547
|
// ../utils/src/pom-utils.ts
|
|
217552
217548
|
var import_parse_xml2 = __toESM(require_dist(), 1);
|
|
217553
|
-
import { readFile as
|
|
217549
|
+
import { readFile as readFile9 } from "node:fs/promises";
|
|
217554
217550
|
import { existsSync as existsSync6 } from "node:fs";
|
|
217555
217551
|
import { resolve as resolve11, join as join7, relative as relative4, dirname as dirname8 } from "node:path";
|
|
217556
217552
|
|
|
@@ -217686,7 +217682,7 @@ async function loadPom(rootDir, pomFile, validateFile, visited = /* @__PURE__ */
|
|
|
217686
217682
|
if (!validatedPomFile || !existsSync6(validatedPomFile)) return void 0;
|
|
217687
217683
|
if (visited.has(validatedPomFile)) return void 0;
|
|
217688
217684
|
visited.add(validatedPomFile);
|
|
217689
|
-
const sourceText = await
|
|
217685
|
+
const sourceText = await readFile9(validatedPomFile, "utf-8");
|
|
217690
217686
|
const xml2 = (0, import_parse_xml2.parseXml)(sourceText, { includeOffsets: true });
|
|
217691
217687
|
const indentation = inferIndentationFromParsedXml(xml2, sourceText);
|
|
217692
217688
|
const pom = {
|
|
@@ -218857,11 +218853,11 @@ import { dirname as dirname10, resolve as resolve15 } from "node:path";
|
|
|
218857
218853
|
import assert7 from "node:assert";
|
|
218858
218854
|
|
|
218859
218855
|
// ../fixing-management/src/fixing-management/maven/gradle-lockfile-utils.ts
|
|
218860
|
-
import { readFile as
|
|
218856
|
+
import { readFile as readFile10 } from "node:fs/promises";
|
|
218861
218857
|
import { resolve as resolve14 } from "node:path";
|
|
218862
218858
|
async function loadLockFile(rootDir, lockfilePath) {
|
|
218863
218859
|
const file = resolve14(rootDir, lockfilePath);
|
|
218864
|
-
return { rootDir, file, sourceText: await
|
|
218860
|
+
return { rootDir, file, sourceText: await readFile10(file, "utf-8") };
|
|
218865
218861
|
}
|
|
218866
218862
|
|
|
218867
218863
|
// ../fixing-management/src/fixing-management/maven/handlers/gradle-lockfile-upgrade-handler.ts
|
|
@@ -218921,13 +218917,13 @@ var GradleLockfileUpgradeHandler = class {
|
|
|
218921
218917
|
|
|
218922
218918
|
// ../fixing-management/src/fixing-management/maven/handlers/sbt-upgrade-handler.ts
|
|
218923
218919
|
import { existsSync as existsSync7 } from "node:fs";
|
|
218924
|
-
import { readFile as
|
|
218920
|
+
import { readFile as readFile12 } from "node:fs/promises";
|
|
218925
218921
|
import { basename as basename4, dirname as dirname11, resolve as resolve16 } from "node:path";
|
|
218926
218922
|
import assert8 from "node:assert";
|
|
218927
218923
|
|
|
218928
218924
|
// ../fixing-management/src/fixing-management/maven/sbt-project-utils.ts
|
|
218929
218925
|
var import_good_enough_parser2 = __toESM(require_cjs(), 1);
|
|
218930
|
-
import { readFile as
|
|
218926
|
+
import { readFile as readFile11 } from "node:fs/promises";
|
|
218931
218927
|
var pathQuery2 = import_good_enough_parser2.query.sym((ctx, { offset, value: value2 }) => {
|
|
218932
218928
|
return { ...ctx, pathOffset: offset, pathValue: value2 };
|
|
218933
218929
|
}).many(
|
|
@@ -219104,7 +219100,7 @@ var treeQuery2 = import_good_enough_parser2.query.tree({
|
|
|
219104
219100
|
});
|
|
219105
219101
|
async function loadSbtProject(filePath, acc = { mem: {}, moduleIds: [] }) {
|
|
219106
219102
|
const scalaLang = import_good_enough_parser2.lang.createLang("scala");
|
|
219107
|
-
const cursor = scalaLang.parse(await
|
|
219103
|
+
const cursor = scalaLang.parse(await readFile11(filePath, "utf-8"));
|
|
219108
219104
|
return scalaLang.query(cursor, treeQuery2, acc) ?? acc;
|
|
219109
219105
|
}
|
|
219110
219106
|
function evaluate2(v) {
|
|
@@ -219245,7 +219241,7 @@ ${indent(1, indentationSize)}`)}
|
|
|
219245
219241
|
`
|
|
219246
219242
|
};
|
|
219247
219243
|
} else {
|
|
219248
|
-
const fileContent = await
|
|
219244
|
+
const fileContent = await readFile12(dependencyOverridesFile, "utf-8");
|
|
219249
219245
|
const indentationSize = getIndentationSize(fileContent);
|
|
219250
219246
|
const prependNewline = fileContent.length > 0 && !fileContent.endsWith("\n\n");
|
|
219251
219247
|
return {
|
|
@@ -219365,7 +219361,7 @@ var MavenSocketUpgradeManager = class {
|
|
|
219365
219361
|
|
|
219366
219362
|
// ../fixing-management/src/fixing-management/maven/sbt-fixing-manager.ts
|
|
219367
219363
|
import { existsSync as existsSync8 } from "node:fs";
|
|
219368
|
-
import { readFile as
|
|
219364
|
+
import { readFile as readFile13 } from "node:fs/promises";
|
|
219369
219365
|
import { join as join8 } from "node:path";
|
|
219370
219366
|
var SbtFixingManager = class {
|
|
219371
219367
|
constructor(rootDir, subprojectPath, otherModulesCommunicator) {
|
|
@@ -219567,7 +219563,7 @@ var SbtFixingManager = class {
|
|
|
219567
219563
|
`
|
|
219568
219564
|
};
|
|
219569
219565
|
} else {
|
|
219570
|
-
const fileContent = await
|
|
219566
|
+
const fileContent = await readFile13(workspaceBuildSbtPath, "utf-8");
|
|
219571
219567
|
const prependNewline = fileContent.split("\n").some((line) => !line.trim());
|
|
219572
219568
|
return {
|
|
219573
219569
|
file: workspaceBuildSbtPath,
|
|
@@ -219646,7 +219642,7 @@ ${indent(1, indentationSize)}`)}
|
|
|
219646
219642
|
newText: overrideText
|
|
219647
219643
|
};
|
|
219648
219644
|
} else {
|
|
219649
|
-
const fileContent = await
|
|
219645
|
+
const fileContent = await readFile13(workspaceBuildSbtPath, "utf-8");
|
|
219650
219646
|
const indentationSize = getIndentationSize(fileContent);
|
|
219651
219647
|
const prependNewline = fileContent.length > 0 && !fileContent.endsWith("\n\n");
|
|
219652
219648
|
const overrideText = `dependencyOverrides ++= Seq(
|
|
@@ -219665,7 +219661,7 @@ ${indent(1, indentationSize)}`)}
|
|
|
219665
219661
|
|
|
219666
219662
|
// ../fixing-management/src/fixing-management/npm/npm-socket-upgrade-manager.ts
|
|
219667
219663
|
import { existsSync as existsSync12 } from "fs";
|
|
219668
|
-
import { readFile as
|
|
219664
|
+
import { readFile as readFile18 } from "fs/promises";
|
|
219669
219665
|
import assert10 from "node:assert";
|
|
219670
219666
|
import { dirname as dirname15, join as join11, relative as relative8, resolve as resolve25 } from "path";
|
|
219671
219667
|
|
|
@@ -225511,7 +225507,7 @@ async function getWorkspacePathsFromPnpmLockFile(lockFileDir, useDotWhenNoWorksp
|
|
|
225511
225507
|
}
|
|
225512
225508
|
|
|
225513
225509
|
// ../fixing-management/src/fixing-management/npm/npm-fixing-manager.ts
|
|
225514
|
-
import { readFile as
|
|
225510
|
+
import { readFile as readFile14, writeFile as writeFile4 } from "fs/promises";
|
|
225515
225511
|
import { resolve as resolve20 } from "path";
|
|
225516
225512
|
|
|
225517
225513
|
// ../fixing-management/src/fixing-management/npm/npm-ecosystem-fixing-manager.ts
|
|
@@ -225568,7 +225564,7 @@ var NpmFixingManager = class extends NpmEcosystemFixingManager {
|
|
|
225568
225564
|
}
|
|
225569
225565
|
async applySecurityFixesSpecificPackageManager(fixes) {
|
|
225570
225566
|
const pkgLockLocation = resolve20(this.rootDir, this.subprojectPath, "package-lock.json");
|
|
225571
|
-
const packageLockContent = await
|
|
225567
|
+
const packageLockContent = await readFile14(pkgLockLocation, "utf-8");
|
|
225572
225568
|
const getPackageName = (pkgPath) => {
|
|
225573
225569
|
const strings = pkgPath.split("node_modules/");
|
|
225574
225570
|
return strings[strings.length - 1];
|
|
@@ -225597,7 +225593,7 @@ var NpmFixingManager = class extends NpmEcosystemFixingManager {
|
|
|
225597
225593
|
};
|
|
225598
225594
|
|
|
225599
225595
|
// ../fixing-management/src/fixing-management/npm/pnpm-fixing-manager.ts
|
|
225600
|
-
import { readFile as
|
|
225596
|
+
import { readFile as readFile15, writeFile as writeFile5 } from "fs/promises";
|
|
225601
225597
|
import { resolve as resolve21 } from "path";
|
|
225602
225598
|
var import_yaml = __toESM(require_dist10(), 1);
|
|
225603
225599
|
var import_lockfile_file2 = __toESM(require_lib25(), 1);
|
|
@@ -225741,7 +225737,7 @@ function getVersionNumber(version4) {
|
|
|
225741
225737
|
return match2 ? `${match2[1]}` : version4;
|
|
225742
225738
|
}
|
|
225743
225739
|
async function readYamlFile(workspaceYamlFile) {
|
|
225744
|
-
const workspaceYamlString = await
|
|
225740
|
+
const workspaceYamlString = await readFile15(workspaceYamlFile, "utf8");
|
|
225745
225741
|
const parser2 = new import_yaml.Parser();
|
|
225746
225742
|
const [ast] = parser2.parse(workspaceYamlString);
|
|
225747
225743
|
return ast;
|
|
@@ -225778,7 +225774,7 @@ function updateCatalog(update3, map2) {
|
|
|
225778
225774
|
}
|
|
225779
225775
|
|
|
225780
225776
|
// ../fixing-management/src/fixing-management/npm/yarn-fixing-manager.ts
|
|
225781
|
-
import { readFile as
|
|
225777
|
+
import { readFile as readFile17, writeFile as writeFile6 } from "fs/promises";
|
|
225782
225778
|
import { resolve as resolve24 } from "path";
|
|
225783
225779
|
|
|
225784
225780
|
// ../utils/src/package-utils.ts
|
|
@@ -225813,12 +225809,12 @@ var import_yarnlock_parse_raw = __toESM(require_yarnlock_parse_raw(), 1);
|
|
|
225813
225809
|
// ../fixing-management/src/fixing-management/npm/yarn-utils.ts
|
|
225814
225810
|
var lockfile = __toESM(require_lockfile(), 1);
|
|
225815
225811
|
var import_parsers = __toESM(require_lib27(), 1);
|
|
225816
|
-
import { readFile as
|
|
225812
|
+
import { readFile as readFile16 } from "fs/promises";
|
|
225817
225813
|
import { resolve as resolve23 } from "path";
|
|
225818
225814
|
async function getYarnType(projectDir) {
|
|
225819
225815
|
const yarnLockLocation = resolve23(projectDir, "yarn.lock");
|
|
225820
225816
|
try {
|
|
225821
|
-
const content = await
|
|
225817
|
+
const content = await readFile16(yarnLockLocation, "utf8");
|
|
225822
225818
|
if (!content || content.length === 0) {
|
|
225823
225819
|
return void 0;
|
|
225824
225820
|
}
|
|
@@ -225916,7 +225912,7 @@ var YarnFixingManager = class extends NpmEcosystemFixingManager {
|
|
|
225916
225912
|
logger.debug("Installation completed.");
|
|
225917
225913
|
}
|
|
225918
225914
|
async getYarnLockObj(filePath) {
|
|
225919
|
-
const fileString = await
|
|
225915
|
+
const fileString = await readFile17(filePath, "utf8");
|
|
225920
225916
|
const yarnType = await this.getYarnType();
|
|
225921
225917
|
return yarnType === "classic" ? (0, import_yarnlock_parse_raw.parseYarnLockRawV1)(fileString) : (0, import_yarnlock_parse_raw.parseYarnLockRawV2)(fileString);
|
|
225922
225918
|
}
|
|
@@ -226110,15 +226106,12 @@ var NpmSocketUpgradeManager = class {
|
|
|
226110
226106
|
}));
|
|
226111
226107
|
const lockfileName = this.getLockfileName(subprojectDir);
|
|
226112
226108
|
const lockfilePath = join11(subprojectDir, lockfileName);
|
|
226113
|
-
const oldLockfileContent = await readFile19(resolve25(this.rootDir, lockfilePath), "utf-8");
|
|
226114
226109
|
try {
|
|
226115
226110
|
await fixingManager.applySecurityFixesSpecificPackageManager(upgradesTransformed);
|
|
226116
|
-
const newLockfileContent = await readFile19(resolve25(this.rootDir, lockfilePath), "utf-8");
|
|
226117
226111
|
ctxt.statusUpdater?.({
|
|
226118
226112
|
status: "success",
|
|
226119
226113
|
file: lockfilePath,
|
|
226120
226114
|
message: "Lockfile updated with dependency upgrades",
|
|
226121
|
-
patch: createPatch(lockfilePath, oldLockfileContent, newLockfileContent, void 0, void 0, { context: 3 }),
|
|
226122
226115
|
artifacts: i3(allUpgrades.map((u8) => u8.idx))
|
|
226123
226116
|
});
|
|
226124
226117
|
} catch (e) {
|
|
@@ -226152,22 +226145,12 @@ var NpmSocketUpgradeManager = class {
|
|
|
226152
226145
|
await applyPatches("NPM", this.rootDir, directPatches, ctxt);
|
|
226153
226146
|
}
|
|
226154
226147
|
}
|
|
226155
|
-
const lockfileContentBeforeFinalize = await readFile19(resolve25(this.rootDir, lockfilePath), "utf-8");
|
|
226156
226148
|
try {
|
|
226157
226149
|
await fixingManager.finalizeFixes();
|
|
226158
|
-
const lockfileContentAfterFinalize = await readFile19(resolve25(this.rootDir, lockfilePath), "utf-8");
|
|
226159
226150
|
ctxt.statusUpdater?.({
|
|
226160
226151
|
status: "success",
|
|
226161
226152
|
file: lockfilePath,
|
|
226162
226153
|
message: "Lockfile finalized",
|
|
226163
|
-
patch: createPatch(
|
|
226164
|
-
lockfilePath,
|
|
226165
|
-
lockfileContentBeforeFinalize,
|
|
226166
|
-
lockfileContentAfterFinalize,
|
|
226167
|
-
void 0,
|
|
226168
|
-
void 0,
|
|
226169
|
-
{ context: 3 }
|
|
226170
|
-
),
|
|
226171
226154
|
artifacts: i3(allUpgrades.map((u8) => u8.idx))
|
|
226172
226155
|
});
|
|
226173
226156
|
} catch (e) {
|
|
@@ -226215,7 +226198,7 @@ var NpmSocketUpgradeManager = class {
|
|
|
226215
226198
|
assert10(artifact.name);
|
|
226216
226199
|
assert10(artifact.version);
|
|
226217
226200
|
const patches = [];
|
|
226218
|
-
const packageJsonContent = await
|
|
226201
|
+
const packageJsonContent = await readFile18(resolve25(this.rootDir, mf.file), "utf-8");
|
|
226219
226202
|
if (mf?.start !== void 0 && mf?.end !== void 0) {
|
|
226220
226203
|
const originalVersionString = packageJsonContent.substring(mf.start, mf.end);
|
|
226221
226204
|
let newVersionString;
|
|
@@ -226269,7 +226252,7 @@ var RushFixingManager = class {
|
|
|
226269
226252
|
};
|
|
226270
226253
|
|
|
226271
226254
|
// ../fixing-management/src/fixing-management/nuget/nuget-fixing-manager.ts
|
|
226272
|
-
import { readFile as
|
|
226255
|
+
import { readFile as readFile19, writeFile as writeFile7 } from "fs/promises";
|
|
226273
226256
|
import { join as join12 } from "path";
|
|
226274
226257
|
|
|
226275
226258
|
// ../utils/src/nuget-utils.ts
|
|
@@ -226372,14 +226355,14 @@ var NugetFixingManager = class {
|
|
|
226372
226355
|
if (projectFiles.length !== 1)
|
|
226373
226356
|
throw new Error("Applying fixes to workspaces with more than 1 project file currently not supported");
|
|
226374
226357
|
const projectFilePath = join12(this.getAbsWsPath(wsPath), projectFiles[0]);
|
|
226375
|
-
const initialProjectFile = await
|
|
226358
|
+
const initialProjectFile = await readFile19(projectFilePath, "utf-8");
|
|
226376
226359
|
const initialLockFile = await this.restoreWorkspaceAndParseLockFile(wsPath);
|
|
226377
226360
|
await applySeries(fixesWithId, async ({ fixId, vulnerabilityFixes }) => {
|
|
226378
226361
|
await this.applySecurityFixesForWorkspace(wsPath, projectFilePath, vulnerabilityFixes, dependencyTree);
|
|
226379
226362
|
signalFixApplied2?.(fixId, this.subprojectPath, wsPath, vulnerabilityFixes);
|
|
226380
226363
|
});
|
|
226381
|
-
const finalProjectFile = await
|
|
226382
|
-
const finalLockFile = JSON.parse(await
|
|
226364
|
+
const finalProjectFile = await readFile19(projectFilePath, "utf-8");
|
|
226365
|
+
const finalLockFile = JSON.parse(await readFile19(this.getLockFilePath(wsPath), "utf-8"));
|
|
226383
226366
|
await writeFile7(projectFilePath, initialProjectFile);
|
|
226384
226367
|
await writeFile7(this.getLockFilePath(wsPath), JSON.stringify(initialLockFile, null, 2));
|
|
226385
226368
|
return { projectFile: finalProjectFile, lockFile: finalLockFile };
|
|
@@ -226411,7 +226394,7 @@ var NugetFixingManager = class {
|
|
|
226411
226394
|
}
|
|
226412
226395
|
}
|
|
226413
226396
|
async applySecurityFixesForWorkspace(wsPath, projectFilePath, vulnFixes, dependencyTree) {
|
|
226414
|
-
const initialProjectFile = await
|
|
226397
|
+
const initialProjectFile = await readFile19(projectFilePath, "utf-8");
|
|
226415
226398
|
const initialLockFile = await this.restoreWorkspaceAndParseLockFile(wsPath);
|
|
226416
226399
|
const typeCache = new Cache();
|
|
226417
226400
|
const requestedCache = new Cache();
|
|
@@ -226501,7 +226484,7 @@ var NugetFixingManager = class {
|
|
|
226501
226484
|
async restoreWorkspaceAndParseLockFile(wsPath) {
|
|
226502
226485
|
const succeeded = await execAndLogOnFailure2("dotnet restore --use-lock-file", this.getAbsWsPath(wsPath));
|
|
226503
226486
|
if (!succeeded) throw new Error(`Error applying fix - could not restore project ${this.subprojectPath}/${wsPath}`);
|
|
226504
|
-
return JSON.parse(await
|
|
226487
|
+
return JSON.parse(await readFile19(this.getLockFilePath(wsPath), "utf-8"));
|
|
226505
226488
|
}
|
|
226506
226489
|
getLockFilePath(wsPath, lockFileName = "packages.lock.json") {
|
|
226507
226490
|
return join12(this.getAbsWsPath(wsPath), lockFileName);
|
|
@@ -226579,7 +226562,7 @@ import { dirname as dirname17, resolve as resolve27 } from "node:path";
|
|
|
226579
226562
|
|
|
226580
226563
|
// ../utils/src/nuget-project-utils.ts
|
|
226581
226564
|
var import_parse_xml3 = __toESM(require_dist(), 1);
|
|
226582
|
-
import { readFile as
|
|
226565
|
+
import { readFile as readFile20 } from "node:fs/promises";
|
|
226583
226566
|
import { dirname as dirname16, join as join14, relative as relative9, resolve as resolve26, basename as basename7, extname } from "node:path";
|
|
226584
226567
|
import { existsSync as existsSync13 } from "node:fs";
|
|
226585
226568
|
|
|
@@ -228137,7 +228120,7 @@ async function loadNuGetProjectOrTarget(rootDir, projectFile, mainProject, visit
|
|
|
228137
228120
|
if (!validatedProjectPath || !existsSync13(validatedProjectPath)) return void 0;
|
|
228138
228121
|
if (visited.has(validatedProjectPath)) return void 0;
|
|
228139
228122
|
visited.set(validatedProjectPath);
|
|
228140
|
-
const sourceText = await
|
|
228123
|
+
const sourceText = await readFile20(validatedProjectPath, "utf-8");
|
|
228141
228124
|
const xml2 = (0, import_parse_xml3.parseXml)(sourceText, { includeOffsets: true });
|
|
228142
228125
|
const indentation = inferIndentationFromParsedXml2(xml2, sourceText);
|
|
228143
228126
|
const currentProject = {
|
|
@@ -228210,7 +228193,7 @@ async function loadNuGetProjectOrTarget(rootDir, projectFile, mainProject, visit
|
|
|
228210
228193
|
async function loadPackagesConfig(rootDir, file, validateFile) {
|
|
228211
228194
|
const validatedConfigPath = validateFile(resolve26(rootDir, file));
|
|
228212
228195
|
if (!validatedConfigPath || !existsSync13(validatedConfigPath)) return void 0;
|
|
228213
|
-
const sourceText = await
|
|
228196
|
+
const sourceText = await readFile20(validatedConfigPath, "utf-8");
|
|
228214
228197
|
const configXml = (0, import_parse_xml3.parseXml)(sourceText, { includeOffsets: true });
|
|
228215
228198
|
const packages = extractPackagesFromXml(configXml, sourceText);
|
|
228216
228199
|
return {
|
|
@@ -229031,17 +229014,17 @@ import { dirname as dirname19, relative as relative10, resolve as resolve29 } fr
|
|
|
229031
229014
|
var import_picomatch5 = __toESM(require_picomatch4(), 1);
|
|
229032
229015
|
var import_semver4 = __toESM(require_semver2(), 1);
|
|
229033
229016
|
import assert12 from "node:assert";
|
|
229034
|
-
import { readFile as
|
|
229017
|
+
import { readFile as readFile22, writeFile as writeFile8 } from "node:fs/promises";
|
|
229035
229018
|
|
|
229036
229019
|
// ../utils/src/cargo-utils.ts
|
|
229037
|
-
import { readFile as
|
|
229020
|
+
import { readFile as readFile21 } from "node:fs/promises";
|
|
229038
229021
|
import { dirname as dirname18, resolve as resolve28 } from "node:path";
|
|
229039
229022
|
var import_picomatch4 = __toESM(require_picomatch4(), 1);
|
|
229040
229023
|
async function getCargoTomlFilesForCargoLockFile(rootDir, cargoLockFile, cargoTomlFiles) {
|
|
229041
229024
|
const lockDir = dirname18(cargoLockFile);
|
|
229042
229025
|
const rootTomlFile = cargoTomlFiles.find((file) => dirname18(file) === lockDir);
|
|
229043
229026
|
if (!rootTomlFile) return void 0;
|
|
229044
|
-
const rootTomlContent = await
|
|
229027
|
+
const rootTomlContent = await readFile21(resolve28(rootDir, rootTomlFile), "utf-8");
|
|
229045
229028
|
const toml = parseTOML2(rootTomlContent);
|
|
229046
229029
|
if (!toml) return void 0;
|
|
229047
229030
|
const memberPatterns = [];
|
|
@@ -229138,7 +229121,7 @@ var CargoSocketUpgradeManager = class {
|
|
|
229138
229121
|
const path9 = resolve29(this.rootDir, file);
|
|
229139
229122
|
if (!restoreMap.has(path9)) {
|
|
229140
229123
|
restoreMap.set(path9, {
|
|
229141
|
-
content: await
|
|
229124
|
+
content: await readFile22(path9, "utf-8"),
|
|
229142
229125
|
artifacts: []
|
|
229143
229126
|
});
|
|
229144
229127
|
}
|
|
@@ -229200,7 +229183,7 @@ var CargoSocketUpgradeManager = class {
|
|
|
229200
229183
|
*/
|
|
229201
229184
|
async createDirectDependencyPatches(mf, idx, upgradeVersion, ctxt) {
|
|
229202
229185
|
const fullPath = resolve29(this.rootDir, mf.file);
|
|
229203
|
-
const content = await
|
|
229186
|
+
const content = await readFile22(fullPath, "utf-8");
|
|
229204
229187
|
const toml = parseTOML2(content);
|
|
229205
229188
|
if (!toml) {
|
|
229206
229189
|
ctxt.statusUpdater?.({
|
|
@@ -229289,7 +229272,7 @@ var CargoSocketUpgradeManager = class {
|
|
|
229289
229272
|
*/
|
|
229290
229273
|
async createTransitiveDependencyPatches(tomlFile, idx, upgradeVersion, ctxt) {
|
|
229291
229274
|
const fullPath = resolve29(this.rootDir, tomlFile);
|
|
229292
|
-
const content = await
|
|
229275
|
+
const content = await readFile22(fullPath, "utf-8");
|
|
229293
229276
|
const toml = parseTOML2(content);
|
|
229294
229277
|
if (!toml) {
|
|
229295
229278
|
ctxt.statusUpdater?.({
|
|
@@ -229329,7 +229312,6 @@ ${newDependencyLine}`
|
|
|
229329
229312
|
async refreshLockfiles(lockfileToArtifacts, ctxt, _mode2) {
|
|
229330
229313
|
await asyncForEach(Object.entries(lockfileToArtifacts), async ([lockfile2, artifacts]) => {
|
|
229331
229314
|
const lockfileDir = dirname19(resolve29(this.rootDir, lockfile2));
|
|
229332
|
-
const oldFileContent = await readFile23(resolve29(this.rootDir, lockfile2), "utf-8");
|
|
229333
229315
|
let result;
|
|
229334
229316
|
if (this.cargoLockMatcher(lockfile2)) {
|
|
229335
229317
|
result = await execNeverFail2(["cargo", "fetch"], lockfileDir);
|
|
@@ -229343,12 +229325,10 @@ ${newDependencyLine}`
|
|
|
229343
229325
|
return;
|
|
229344
229326
|
}
|
|
229345
229327
|
if (!result.error) {
|
|
229346
|
-
const finalFileContent = await readFile23(resolve29(this.rootDir, lockfile2), "utf-8");
|
|
229347
229328
|
ctxt.statusUpdater?.({
|
|
229348
229329
|
status: "success",
|
|
229349
229330
|
file: lockfile2,
|
|
229350
229331
|
message: "Lockfile updated",
|
|
229351
|
-
patch: createPatch(lockfile2, oldFileContent, finalFileContent, void 0, void 0, { context: 3 }),
|
|
229352
229332
|
artifacts: i3(artifacts)
|
|
229353
229333
|
});
|
|
229354
229334
|
} else {
|
|
@@ -229369,12 +229349,12 @@ ${newDependencyLine}`
|
|
|
229369
229349
|
import { dirname as dirname21, relative as relative11, resolve as resolve32 } from "node:path";
|
|
229370
229350
|
var import_picomatch7 = __toESM(require_picomatch4(), 1);
|
|
229371
229351
|
import assert13 from "node:assert";
|
|
229372
|
-
import { readFile as
|
|
229352
|
+
import { readFile as readFile25, writeFile as writeFile9 } from "node:fs/promises";
|
|
229373
229353
|
var import_pip_requirements_js = __toESM(require_dist11(), 1);
|
|
229374
229354
|
|
|
229375
229355
|
// ../utils/src/pip-utils.ts
|
|
229376
229356
|
import { existsSync as existsSync14 } from "node:fs";
|
|
229377
|
-
import { readFile as
|
|
229357
|
+
import { readFile as readFile24 } from "node:fs/promises";
|
|
229378
229358
|
import { dirname as dirname20, resolve as resolve31 } from "node:path";
|
|
229379
229359
|
import util4 from "node:util";
|
|
229380
229360
|
|
|
@@ -229383,7 +229363,7 @@ var import_lodash6 = __toESM(require_lodash(), 1);
|
|
|
229383
229363
|
var import_semver5 = __toESM(require_semver2(), 1);
|
|
229384
229364
|
import { execFileSync } from "child_process";
|
|
229385
229365
|
import { constants as constants3 } from "fs";
|
|
229386
|
-
import { access as access3, readFile as
|
|
229366
|
+
import { access as access3, readFile as readFile23 } from "fs/promises";
|
|
229387
229367
|
import { join as join15, resolve as resolve30 } from "path";
|
|
229388
229368
|
import util3 from "util";
|
|
229389
229369
|
var { once: once2 } = import_lodash6.default;
|
|
@@ -229403,14 +229383,14 @@ var hasPyenv = once2(async () => !(await execNeverFail2("which pyenv")).error);
|
|
|
229403
229383
|
// ../utils/src/pip-utils.ts
|
|
229404
229384
|
var import_picomatch6 = __toESM(require_picomatch4(), 1);
|
|
229405
229385
|
async function isSetupPySetuptools(file) {
|
|
229406
|
-
const content = await
|
|
229386
|
+
const content = await readFile24(file, "utf-8");
|
|
229407
229387
|
return content.includes("setup(") && (/^\s*from\s+(?:setuptools|distutils\.core)\s+import\s+.*setup/m.test(content) || /^\s*import\s+(?:setuptools|distutils\.core)/m.test(content));
|
|
229408
229388
|
}
|
|
229409
229389
|
async function getPyprojectTomlFilesForLockFile(rootDir, uvLockfile, pyprojectFiles) {
|
|
229410
229390
|
const lockDir = dirname20(uvLockfile);
|
|
229411
229391
|
const rootTomlFile = pyprojectFiles.find((file) => dirname20(file) === lockDir);
|
|
229412
229392
|
if (!rootTomlFile) return void 0;
|
|
229413
|
-
const rootTomlContent = await
|
|
229393
|
+
const rootTomlContent = await readFile24(resolve31(rootDir, rootTomlFile), "utf-8");
|
|
229414
229394
|
const toml = parseTOML2(rootTomlContent);
|
|
229415
229395
|
if (!toml) return void 0;
|
|
229416
229396
|
const memberPatterns = [];
|
|
@@ -229843,7 +229823,7 @@ var PipSocketUpgradeManager = class {
|
|
|
229843
229823
|
const path9 = resolve32(this.rootDir, file);
|
|
229844
229824
|
if (!restoreMap.has(path9)) {
|
|
229845
229825
|
restoreMap.set(path9, {
|
|
229846
|
-
content: await
|
|
229826
|
+
content: await readFile25(path9, "utf-8"),
|
|
229847
229827
|
artifacts: []
|
|
229848
229828
|
});
|
|
229849
229829
|
}
|
|
@@ -229920,7 +229900,7 @@ var PipSocketUpgradeManager = class {
|
|
|
229920
229900
|
const refStart = ref.start;
|
|
229921
229901
|
const refEnd = ref.end;
|
|
229922
229902
|
try {
|
|
229923
|
-
const content = await
|
|
229903
|
+
const content = await readFile25(fullPath, "utf-8");
|
|
229924
229904
|
const requirements = (0, import_pip_requirements_js.parsePipRequirementsFileLoosely)(content, { includeLocations: true });
|
|
229925
229905
|
const foundRequirement = requirements.filter((req) => req.data.type === "ProjectName").find((req) => refStart <= req.location.startIdx && req.location.endIdx <= refEnd);
|
|
229926
229906
|
if (foundRequirement) {
|
|
@@ -229953,7 +229933,7 @@ var PipSocketUpgradeManager = class {
|
|
|
229953
229933
|
assert13(artifact.version);
|
|
229954
229934
|
const patches = [];
|
|
229955
229935
|
try {
|
|
229956
|
-
const content = await
|
|
229936
|
+
const content = await readFile25(fullPath, "utf-8");
|
|
229957
229937
|
const newText = `${artifact.name}==${upgradeVersion}`;
|
|
229958
229938
|
patches.push({
|
|
229959
229939
|
file: requirementsFile,
|
|
@@ -229979,7 +229959,7 @@ ${newText}
|
|
|
229979
229959
|
*/
|
|
229980
229960
|
async createPyprojectTomlDirectDependencyPatches(tomlFile, idx, upgradeVersion, ctxt) {
|
|
229981
229961
|
const fullPath = resolve32(this.rootDir, tomlFile);
|
|
229982
|
-
const content = await
|
|
229962
|
+
const content = await readFile25(fullPath, "utf-8");
|
|
229983
229963
|
const toml = parseTOML2(content);
|
|
229984
229964
|
if (!toml) {
|
|
229985
229965
|
ctxt.statusUpdater?.({
|
|
@@ -230098,7 +230078,7 @@ ${newText}
|
|
|
230098
230078
|
assert13(artifact.version);
|
|
230099
230079
|
const patches = [];
|
|
230100
230080
|
try {
|
|
230101
|
-
const content = await
|
|
230081
|
+
const content = await readFile25(resolve32(this.rootDir, pyprojectToml), "utf-8");
|
|
230102
230082
|
const toml = parseTOML2(content);
|
|
230103
230083
|
if (!toml) {
|
|
230104
230084
|
ctxt.statusUpdater?.({
|
|
@@ -230154,7 +230134,7 @@ ${newText}
|
|
|
230154
230134
|
assert13(artifact.version);
|
|
230155
230135
|
const patches = [];
|
|
230156
230136
|
try {
|
|
230157
|
-
const content = await
|
|
230137
|
+
const content = await readFile25(resolve32(this.rootDir, pyprojectToml), "utf-8");
|
|
230158
230138
|
const toml = parseTOML2(content);
|
|
230159
230139
|
if (!toml) {
|
|
230160
230140
|
ctxt.statusUpdater?.({
|
|
@@ -230225,7 +230205,6 @@ ${" ".repeat(4)}"${artifact.name}==${upgradeVersion}",
|
|
|
230225
230205
|
async refreshLockfiles(lockfileToArtifacts, ctxt, _mode2) {
|
|
230226
230206
|
await asyncForEach(Object.entries(lockfileToArtifacts), async ([lockfile2, artifacts]) => {
|
|
230227
230207
|
const lockfileDir = dirname21(resolve32(this.rootDir, lockfile2));
|
|
230228
|
-
const oldFileContent = await readFile26(resolve32(this.rootDir, lockfile2), "utf-8");
|
|
230229
230208
|
let result;
|
|
230230
230209
|
if (this.uvLockMatcher(lockfile2)) {
|
|
230231
230210
|
result = await execNeverFail2(["uv", "lock"], lockfileDir);
|
|
@@ -230239,12 +230218,10 @@ ${" ".repeat(4)}"${artifact.name}==${upgradeVersion}",
|
|
|
230239
230218
|
return;
|
|
230240
230219
|
}
|
|
230241
230220
|
if (!result.error) {
|
|
230242
|
-
const finalFileContent = await readFile26(resolve32(this.rootDir, lockfile2), "utf-8");
|
|
230243
230221
|
ctxt.statusUpdater?.({
|
|
230244
230222
|
status: "success",
|
|
230245
230223
|
file: lockfile2,
|
|
230246
230224
|
message: "Lockfile updated",
|
|
230247
|
-
patch: createPatch(lockfile2, oldFileContent, finalFileContent, void 0, void 0, { context: 3 }),
|
|
230248
230225
|
artifacts: i3(artifacts)
|
|
230249
230226
|
});
|
|
230250
230227
|
} else {
|
|
@@ -230265,7 +230242,7 @@ function canonicalizePyPIName(name2) {
|
|
|
230265
230242
|
}
|
|
230266
230243
|
async function getDependenciesMapFromUvLock(rootDir, lockFile, pyprojectTomlFilesForLockFile) {
|
|
230267
230244
|
const result = /* @__PURE__ */ new Map();
|
|
230268
|
-
const lockToml = parseTOML2(await
|
|
230245
|
+
const lockToml = parseTOML2(await readFile25(resolve32(rootDir, lockFile), "utf-8"));
|
|
230269
230246
|
if (!lockToml) return result;
|
|
230270
230247
|
const children2 = /* @__PURE__ */ new Map();
|
|
230271
230248
|
const packages = lockToml.package;
|
|
@@ -230289,7 +230266,7 @@ async function getDependenciesMapFromUvLock(rootDir, lockFile, pyprojectTomlFile
|
|
|
230289
230266
|
}
|
|
230290
230267
|
const tomlFileToName = new Map(
|
|
230291
230268
|
await asyncFilterMap(pyprojectTomlFilesForLockFile, async (tomlFile) => {
|
|
230292
|
-
const toml = parseTOML2(await
|
|
230269
|
+
const toml = parseTOML2(await readFile25(resolve32(rootDir, tomlFile), "utf-8"));
|
|
230293
230270
|
return toml && toml.project instanceof TOMLTable && toml.project.name instanceof TOMLScalar && typeof toml.project.name[value] === "string" ? [tomlFile, toml.project.name[value]] : void 0;
|
|
230294
230271
|
})
|
|
230295
230272
|
);
|
|
@@ -230679,7 +230656,7 @@ function parseGemfileLock(content) {
|
|
|
230679
230656
|
}
|
|
230680
230657
|
|
|
230681
230658
|
// ../fixing-management/src/fixing-management/rubygems/rubygems-socket-upgrade-manager.ts
|
|
230682
|
-
import { readFile as
|
|
230659
|
+
import { readFile as readFile26, writeFile as writeFile10 } from "node:fs/promises";
|
|
230683
230660
|
|
|
230684
230661
|
// ../fixing-management/src/fixing-management/rubygems/rubygems-patch-utils.ts
|
|
230685
230662
|
function createRubygemVersionPatches(gem, idx, upgradeVersion, rangeStyle, statusUpdater) {
|
|
@@ -230892,7 +230869,7 @@ var RubygemsSocketUpgradeManager = class {
|
|
|
230892
230869
|
for (const mf of artifact.manifestFiles ?? []) {
|
|
230893
230870
|
if (this.gemfileLockMatcher(mf.file)) {
|
|
230894
230871
|
if (ctxt.wsFilter && !ctxt.wsFilter(dirname23(mf.file) || ".")) continue;
|
|
230895
|
-
const lockfileContent = await
|
|
230872
|
+
const lockfileContent = await readFile26(resolve34(this.rootDir, mf.file), "utf-8");
|
|
230896
230873
|
const gemfileLock = parseGemfileLock(lockfileContent);
|
|
230897
230874
|
const pathGems = [];
|
|
230898
230875
|
for (const [pathGemName, deps] of gemfileLock.pathDependencies) {
|
|
@@ -230943,7 +230920,7 @@ var RubygemsSocketUpgradeManager = class {
|
|
|
230943
230920
|
const path9 = resolve34(this.rootDir, file);
|
|
230944
230921
|
if (!restoreMap.has(path9)) {
|
|
230945
230922
|
restoreMap.set(path9, {
|
|
230946
|
-
content: await
|
|
230923
|
+
content: await readFile26(path9, "utf-8"),
|
|
230947
230924
|
artifacts: []
|
|
230948
230925
|
});
|
|
230949
230926
|
}
|
|
@@ -230965,15 +230942,12 @@ var RubygemsSocketUpgradeManager = class {
|
|
|
230965
230942
|
if (lockfile2 !== void 0) (lockfileToArtifacts[lockfile2] ??= []).push(...artifacts);
|
|
230966
230943
|
}
|
|
230967
230944
|
await asyncForEach(Object.entries(lockfileToArtifacts), async ([file, artifacts]) => {
|
|
230968
|
-
const oldFileContent = await readFile27(resolve34(this.rootDir, file), "utf-8");
|
|
230969
230945
|
const result = await execNeverFail2("bundle lock", dirname23(resolve34(this.rootDir, file)));
|
|
230970
|
-
const updatedFileContent = await readFile27(resolve34(this.rootDir, file), "utf-8");
|
|
230971
230946
|
if (!result.error) {
|
|
230972
230947
|
ctxt.statusUpdater?.({
|
|
230973
230948
|
status: "success",
|
|
230974
230949
|
file,
|
|
230975
230950
|
message: "Lockfile updated",
|
|
230976
|
-
patch: createPatch(file, oldFileContent, updatedFileContent, void 0, void 0, { context: 3 }),
|
|
230977
230951
|
artifacts: i3(artifacts)
|
|
230978
230952
|
});
|
|
230979
230953
|
} else {
|
|
@@ -231007,15 +230981,12 @@ var RubygemsSocketUpgradeManager = class {
|
|
|
231007
230981
|
});
|
|
231008
230982
|
await applyPatches("RUBYGEMS", this.rootDir, directPatches, ctxt);
|
|
231009
230983
|
await asyncForEach(Object.entries(lockfileToArtifacts), async ([file, artifacts]) => {
|
|
231010
|
-
const oldFileContent = await readFile27(resolve34(this.rootDir, file), "utf-8");
|
|
231011
230984
|
const result = await execNeverFail2(cmdt`bundler lock`, dirname23(resolve34(this.rootDir, file)));
|
|
231012
|
-
const updatedFileContent = await readFile27(resolve34(this.rootDir, file), "utf-8");
|
|
231013
230985
|
if (!result.error) {
|
|
231014
230986
|
ctxt.statusUpdater?.({
|
|
231015
230987
|
status: "success",
|
|
231016
230988
|
file,
|
|
231017
230989
|
message: "Lockfile updated",
|
|
231018
|
-
patch: createPatch(file, oldFileContent, updatedFileContent, void 0, void 0, { context: 3 }),
|
|
231019
230990
|
artifacts
|
|
231020
230991
|
});
|
|
231021
230992
|
} else {
|
|
@@ -231041,7 +231012,7 @@ var RubygemsSocketUpgradeManager = class {
|
|
|
231041
231012
|
const gemfilePatches = [];
|
|
231042
231013
|
const artifact = ctxt.artifacts[idx];
|
|
231043
231014
|
try {
|
|
231044
|
-
const gemfileContent = await
|
|
231015
|
+
const gemfileContent = await readFile26(resolve34(this.rootDir, gemfilePath), "utf-8");
|
|
231045
231016
|
const gemfile = parseGemfile(this.rootDir, gemfilePath, gemfileContent);
|
|
231046
231017
|
const gemspecFiles = /* @__PURE__ */ new Set();
|
|
231047
231018
|
for (const gem of gemfile.gems) {
|
|
@@ -231062,7 +231033,7 @@ var RubygemsSocketUpgradeManager = class {
|
|
|
231062
231033
|
let foundInGemspec = false;
|
|
231063
231034
|
for (const gemspecFile of gemspecFiles) {
|
|
231064
231035
|
try {
|
|
231065
|
-
const gemspecContent = await
|
|
231036
|
+
const gemspecContent = await readFile26(resolve34(this.rootDir, gemspecFile), "utf-8");
|
|
231066
231037
|
const { parseGemspec: parseGemspec2 } = await Promise.resolve().then(() => (init_gemspec_utils(), gemspec_utils_exports));
|
|
231067
231038
|
const gemspec = parseGemspec2(this.rootDir, gemspecFile, gemspecContent);
|
|
231068
231039
|
for (const gem of gemspec.dependencies) {
|
|
@@ -231127,7 +231098,7 @@ var RubygemsSocketUpgradeManager = class {
|
|
|
231127
231098
|
const [version4] = artifact.version.split("-");
|
|
231128
231099
|
const patches = [];
|
|
231129
231100
|
try {
|
|
231130
|
-
const sourceText = await
|
|
231101
|
+
const sourceText = await readFile26(resolve34(this.rootDir, file), "utf-8");
|
|
231131
231102
|
const gemfile = parseGemfile(this.rootDir, file, sourceText);
|
|
231132
231103
|
for (const gem of gemfile.gems) {
|
|
231133
231104
|
if (evaluate4(gem.name) !== packageName) continue;
|
|
@@ -231170,7 +231141,7 @@ var RubygemsSocketUpgradeManager = class {
|
|
|
231170
231141
|
*/
|
|
231171
231142
|
async createAddGemPatch(file, idx, updateVersion, ctxt) {
|
|
231172
231143
|
const artifact = ctxt.artifacts[idx];
|
|
231173
|
-
const content = await
|
|
231144
|
+
const content = await readFile26(resolve34(this.rootDir, file), "utf-8");
|
|
231174
231145
|
const trimmedLength = content.trimEnd().length;
|
|
231175
231146
|
const needsNewline = trimmedLength > 0 && !content[trimmedLength - 1].match(/\n/);
|
|
231176
231147
|
return {
|
|
@@ -231434,7 +231405,7 @@ function flattenDockerSpec({
|
|
|
231434
231405
|
var import_winston2 = __toESM(require_winston(), 1);
|
|
231435
231406
|
import { Console as Console2 } from "console";
|
|
231436
231407
|
import { createWriteStream as createWriteStream3 } from "fs";
|
|
231437
|
-
import { readFile as
|
|
231408
|
+
import { readFile as readFile27 } from "fs/promises";
|
|
231438
231409
|
|
|
231439
231410
|
// ../web-compat-utils/dist/util-formatter.js
|
|
231440
231411
|
import { format as format3 } from "util";
|
|
@@ -231590,7 +231561,7 @@ var CLILogger2 = class {
|
|
|
231590
231561
|
await this.finish();
|
|
231591
231562
|
let logContent;
|
|
231592
231563
|
try {
|
|
231593
|
-
logContent = await
|
|
231564
|
+
logContent = await readFile27(logFilePath, "utf-8");
|
|
231594
231565
|
} catch (e) {
|
|
231595
231566
|
console.error("Error reading log file", e);
|
|
231596
231567
|
}
|
|
@@ -231635,13 +231606,13 @@ async function detectVariantMaven(projectDir) {
|
|
|
231635
231606
|
// ../docker-management/src/maven/gradle-version-detector.ts
|
|
231636
231607
|
import { existsSync as existsSync17 } from "fs";
|
|
231637
231608
|
import { join as join19 } from "path";
|
|
231638
|
-
import { readFile as
|
|
231609
|
+
import { readFile as readFile28 } from "fs/promises";
|
|
231639
231610
|
async function detectVariantGradle(projectDir) {
|
|
231640
231611
|
return sanitizeJvmVariant("GRADLE", projectDir, await detect(projectDir));
|
|
231641
231612
|
}
|
|
231642
231613
|
async function detect(projectDir) {
|
|
231643
231614
|
const gradleWrapperPropertiesPath = join19(projectDir, "gradle", "wrapper", "gradle-wrapper.properties");
|
|
231644
|
-
const gradleWrapperProperties = existsSync17(gradleWrapperPropertiesPath) ? (await
|
|
231615
|
+
const gradleWrapperProperties = existsSync17(gradleWrapperPropertiesPath) ? (await readFile28(gradleWrapperPropertiesPath, "utf-8")).split("\n").map((line) => line.trim()).filter((line) => !line.startsWith("#")).filter((line) => line) : void 0;
|
|
231645
231616
|
if (!gradleWrapperProperties) return void 0;
|
|
231646
231617
|
const distributionUrlRegex = /.*gradle-(\d+(\.\d+(\.\d+)?)?)/;
|
|
231647
231618
|
for (const prop2 of gradleWrapperProperties) {
|
|
@@ -231657,13 +231628,13 @@ async function detect(projectDir) {
|
|
|
231657
231628
|
// ../docker-management/src/maven/sbt-version-detector.ts
|
|
231658
231629
|
import { existsSync as existsSync18 } from "fs";
|
|
231659
231630
|
import { join as join20 } from "path";
|
|
231660
|
-
import { readFile as
|
|
231631
|
+
import { readFile as readFile29 } from "fs/promises";
|
|
231661
231632
|
async function detectVariantSbt(projectDir) {
|
|
231662
231633
|
return sanitizeJvmVariant("SBT", projectDir, await detect2(projectDir));
|
|
231663
231634
|
}
|
|
231664
231635
|
async function detect2(projectDir) {
|
|
231665
231636
|
const sbtBuildPropertiesPath = join20(projectDir, "project", "build.properties");
|
|
231666
|
-
const sbtBuildProperties = existsSync18(sbtBuildPropertiesPath) ? (await
|
|
231637
|
+
const sbtBuildProperties = existsSync18(sbtBuildPropertiesPath) ? (await readFile29(sbtBuildPropertiesPath, "utf-8")).split("\n").map((line) => line.trim()).filter((line) => !line.startsWith("#")).filter((line) => line) : void 0;
|
|
231667
231638
|
if (!sbtBuildProperties) return void 0;
|
|
231668
231639
|
for (const prop2 of sbtBuildProperties) {
|
|
231669
231640
|
const [key, value2] = prop2.split("=");
|
|
@@ -231777,7 +231748,7 @@ async function findReachabilityAnalyzersDockerImage(ecosystem) {
|
|
|
231777
231748
|
// ../other-modules-communicator/src/other-modules-communicator.ts
|
|
231778
231749
|
var import_lodash12 = __toESM(require_lodash(), 1);
|
|
231779
231750
|
import { rmSync } from "fs";
|
|
231780
|
-
import { mkdir as mkdir5, readFile as
|
|
231751
|
+
import { mkdir as mkdir5, readFile as readFile30, writeFile as writeFile11 } from "fs/promises";
|
|
231781
231752
|
import assert15 from "node:assert";
|
|
231782
231753
|
import { platform as platform6 } from "os";
|
|
231783
231754
|
import { join as join23, posix as posix2, relative as relative15, sep as sep3 } from "path";
|
|
@@ -232221,7 +232192,7 @@ var OtherModulesCommunicator = class {
|
|
|
232221
232192
|
COANA_API_KEY: this.apiKey.type === "present" ? this.apiKey.value : ""
|
|
232222
232193
|
}
|
|
232223
232194
|
);
|
|
232224
|
-
return JSON.parse(await
|
|
232195
|
+
return JSON.parse(await readFile30(outputFilePathThisProcess, "utf-8")).result;
|
|
232225
232196
|
}
|
|
232226
232197
|
async runReachabilityAnalyzerCommand(commandName, ecosystem, subprojectPath, workspacePath, args2, env) {
|
|
232227
232198
|
const tmpDir = await this.getTmpDirForSubproject(subprojectPath);
|
|
@@ -232290,7 +232261,7 @@ var OtherModulesCommunicator = class {
|
|
|
232290
232261
|
[...args2, "-o", outputFilePathOtherProcess],
|
|
232291
232262
|
env
|
|
232292
232263
|
);
|
|
232293
|
-
return JSON.parse(await
|
|
232264
|
+
return JSON.parse(await readFile30(outputFilePathThisProcess, "utf-8")).result;
|
|
232294
232265
|
}
|
|
232295
232266
|
async runInDocker(ecosystem, image, entryPoint, commandName, args2, subprojectPath, tmpDir, env = process.env) {
|
|
232296
232267
|
if (!await pullDockerImage(image)) return false;
|
|
@@ -233661,7 +233632,7 @@ import { join as join25, relative as relative16, resolve as resolve39 } from "pa
|
|
|
233661
233632
|
|
|
233662
233633
|
// ../project-management/src/project-management/ecosystem-management/ecosystem-specs.ts
|
|
233663
233634
|
import { existsSync as existsSync20 } from "fs";
|
|
233664
|
-
import { readdir as readdir5, readFile as
|
|
233635
|
+
import { readdir as readdir5, readFile as readFile31 } from "fs/promises";
|
|
233665
233636
|
import { join as join24, sep as sep4 } from "path";
|
|
233666
233637
|
var specs = {
|
|
233667
233638
|
NPM: [
|
|
@@ -233740,7 +233711,7 @@ function packageManagerIfPackageJSONExistsAndValid(packageManager) {
|
|
|
233740
233711
|
if (!existsSync20(join24(projectDir, "package.json"))) return void 0;
|
|
233741
233712
|
const packageJSONPath = join24(projectDir, "package.json");
|
|
233742
233713
|
try {
|
|
233743
|
-
JSON.parse(await
|
|
233714
|
+
JSON.parse(await readFile31(packageJSONPath, "utf-8"));
|
|
233744
233715
|
return packageManager;
|
|
233745
233716
|
} catch (e) {
|
|
233746
233717
|
throw new InvalidProjectFileError(projectDir, "package.json");
|
|
@@ -235419,7 +235390,7 @@ var DEFAULT_REPORT_FILENAME_BASE = "coana-report";
|
|
|
235419
235390
|
|
|
235420
235391
|
// dist/internal/exclude-dirs-from-configuration-files.js
|
|
235421
235392
|
import { existsSync as existsSync22 } from "fs";
|
|
235422
|
-
import { readFile as
|
|
235393
|
+
import { readFile as readFile32 } from "fs/promises";
|
|
235423
235394
|
import { basename as basename10, resolve as resolve42 } from "path";
|
|
235424
235395
|
var import_yaml2 = __toESM(require_dist12(), 1);
|
|
235425
235396
|
async function inferExcludeDirsFromConfigurationFiles(rootWorkingDir) {
|
|
@@ -235433,7 +235404,7 @@ async function inferExcludeDirsFromConfigurationFiles(rootWorkingDir) {
|
|
|
235433
235404
|
}
|
|
235434
235405
|
async function inferExcludeDirsFromSocketConfig(socketConfigFile) {
|
|
235435
235406
|
try {
|
|
235436
|
-
const config3 = (0, import_yaml2.parse)(await
|
|
235407
|
+
const config3 = (0, import_yaml2.parse)(await readFile32(socketConfigFile, "utf8"));
|
|
235437
235408
|
const version4 = config3.version;
|
|
235438
235409
|
const ignorePaths = config3[version4 === 1 ? "ignore" : "projectIgnorePaths"];
|
|
235439
235410
|
if (!ignorePaths)
|
|
@@ -235733,7 +235704,7 @@ function toSocketFactsSocketDependencyTree(artifacts, vulnerabilities, tier1Reac
|
|
|
235733
235704
|
}
|
|
235734
235705
|
|
|
235735
235706
|
// dist/internal/vulnerability-scanning.js
|
|
235736
|
-
import { readFile as
|
|
235707
|
+
import { readFile as readFile33 } from "fs/promises";
|
|
235737
235708
|
|
|
235738
235709
|
// ../security-auditor/security-auditor-builder/src/mongo-connection.ts
|
|
235739
235710
|
var import_mongodb = __toESM(require_lib31(), 1);
|
|
@@ -250602,7 +250573,7 @@ async function scanForVulnerabilities(dependencyTree, offlineVulnerabilityScanne
|
|
|
250602
250573
|
}
|
|
250603
250574
|
async function offlineScan(dependencyTree, offlineVulnerabilityScannerDBPath) {
|
|
250604
250575
|
logger.info("using offline vulnerability scanner db");
|
|
250605
|
-
const offlineVulnerabilityScannerDB = JSON.parse(await
|
|
250576
|
+
const offlineVulnerabilityScannerDB = JSON.parse(await readFile33(offlineVulnerabilityScannerDBPath, "utf-8"));
|
|
250606
250577
|
const { ecosystemToUrlToVulnerabilityDetails, vulnerabilityDatabase } = offlineVulnerabilityScannerDB;
|
|
250607
250578
|
const coanaSupportedVulnerabilitiesLoader = CoanaSupportedVulnerabilitiesLoader.create(ecosystemToUrlToVulnerabilityDetails);
|
|
250608
250579
|
const vulnerabilityAccessPathLoader = CoanaSupportedVulnerabilitiesLoader.create(ecosystemToUrlToVulnerabilityDetails);
|
|
@@ -250620,7 +250591,7 @@ async function onlineScan(dependencyTree, apiKey, timeout) {
|
|
|
250620
250591
|
}
|
|
250621
250592
|
|
|
250622
250593
|
// dist/version.js
|
|
250623
|
-
var version3 = "14.12.
|
|
250594
|
+
var version3 = "14.12.102";
|
|
250624
250595
|
|
|
250625
250596
|
// dist/cli-core.js
|
|
250626
250597
|
var { mapValues, omit, partition, pick } = import_lodash15.default;
|
|
@@ -251510,7 +251481,7 @@ async function initializeComputeFixesAndUpgradePurls(path9, options) {
|
|
|
251510
251481
|
var compareReportsCommand = new Command();
|
|
251511
251482
|
compareReportsCommand.name("compare-reports").argument("<baselineReportPath>", "Path to the baseline report").argument("<newReportPath>", "Path to the new report").option("--api-key <key>", "Set the Coana dashboard API key.").option("-d, --debug", "Enable debug logging", false).option("--no-pr-comment", "Disable pull request comments (only relevant when run from a PR)", true).option("--no-block", "Do not fail with a non-zero exit code when new reachable vulnerabilities are detected", true).option("--ignore-undeterminable-reachability", "Ignore vulnerabilities with undeterminable reachability", false).action(async (baselineReportPath, newReportPath, options) => {
|
|
251512
251483
|
async function readReport(reportPath) {
|
|
251513
|
-
return JSON.parse(await
|
|
251484
|
+
return JSON.parse(await readFile34(reportPath, "utf-8"));
|
|
251514
251485
|
}
|
|
251515
251486
|
const baselineReport = await readReport(baselineReportPath);
|
|
251516
251487
|
const newReport = await readReport(newReportPath);
|
package/package.json
CHANGED
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|