@coana-tech/cli 14.12.21 → 14.12.23
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/cli.mjs +8 -5
- package/package.json +1 -1
- package/repos/coana-tech/alucard/alucard.jar +0 -0
- package/repos/coana-tech/class-graph-analysis/dist/bundle/class-graph-analysis-cli.mjs +16 -8
- package/repos/coana-tech/cocoa/release/Coana.Cocoa.dll +0 -0
- package/repos/coana-tech/goana/bin/goana-darwin-amd64.gz +0 -0
- package/repos/coana-tech/goana/bin/goana-darwin-arm64.gz +0 -0
- package/repos/coana-tech/goana/bin/goana-linux-amd64.gz +0 -0
- package/repos/coana-tech/goana/bin/goana-linux-arm64.gz +0 -0
- package/repos/coana-tech/mambalade/dist/mambalade-0.3.12-py3-none-any.whl +0 -0
package/cli.mjs
CHANGED
|
@@ -198475,13 +198475,14 @@ async function getLatestBucketsSocket(subprojectPath, workspacePath) {
|
|
|
198475
198475
|
return void 0;
|
|
198476
198476
|
}
|
|
198477
198477
|
}
|
|
198478
|
-
async function useSocketComputeFixEndpoint(autofixRunId, artifacts, vulnerableArtifactIdsForGhsas) {
|
|
198478
|
+
async function useSocketComputeFixEndpoint(autofixRunId, artifacts, vulnerableArtifactIdsForGhsas, config3) {
|
|
198479
198479
|
try {
|
|
198480
198480
|
const url2 = getSocketApiUrl("fixes/compute-fixes");
|
|
198481
198481
|
const data2 = {
|
|
198482
198482
|
autofixRunId,
|
|
198483
198483
|
artifacts,
|
|
198484
|
-
vulnerableArtifactIndexes: vulnerableArtifactIdsForGhsas
|
|
198484
|
+
vulnerableArtifactIndexes: vulnerableArtifactIdsForGhsas,
|
|
198485
|
+
config: config3
|
|
198485
198486
|
};
|
|
198486
198487
|
return (await axios2.post(url2, data2, { headers: getAuthHeaders() })).data;
|
|
198487
198488
|
} catch (error) {
|
|
@@ -227213,7 +227214,7 @@ async function onlineScan(dependencyTree, apiKey, timeout) {
|
|
|
227213
227214
|
}
|
|
227214
227215
|
|
|
227215
227216
|
// dist/version.js
|
|
227216
|
-
var version2 = "14.12.
|
|
227217
|
+
var version2 = "14.12.23";
|
|
227217
227218
|
|
|
227218
227219
|
// dist/cli-core.js
|
|
227219
227220
|
var { mapValues, omit, partition, pick } = import_lodash15.default;
|
|
@@ -228057,7 +228058,9 @@ async function computeFixesAndUpgradePurls(path2, options, logFile) {
|
|
|
228057
228058
|
return;
|
|
228058
228059
|
}
|
|
228059
228060
|
const ghsaToVulnerableArtifactIdsToApply = options.applyFixesTo.includes("all") ? ghsaToVulnerableArtifactIds : Object.fromEntries(Object.entries(ghsaToVulnerableArtifactIds).filter(([ghsa]) => options.applyFixesTo.includes(ghsa)));
|
|
228060
|
-
const computedFix = await useSocketComputeFixEndpoint(autofixRunId, artifacts, ghsaToVulnerableArtifactIdsToApply
|
|
228061
|
+
const computedFix = await useSocketComputeFixEndpoint(autofixRunId, artifacts, ghsaToVulnerableArtifactIdsToApply, {
|
|
228062
|
+
noMajorUpdates: options.disableMajorUpdates
|
|
228063
|
+
});
|
|
228061
228064
|
if (computedFix.type !== "success") {
|
|
228062
228065
|
throw new Error(`No fix found for the given vulnerabilities`);
|
|
228063
228066
|
}
|
|
@@ -228231,7 +228234,7 @@ upgradePurls.name("upgrade-purls").argument("<path>", "File system path to the f
|
|
|
228231
228234
|
});
|
|
228232
228235
|
}).configureHelp({ sortOptions: true });
|
|
228233
228236
|
var computeFixesAndUpgradePurlsCmd = new Command();
|
|
228234
|
-
computeFixesAndUpgradePurlsCmd.name("compute-fixes-and-upgrade-purls").argument("<path>", "File system path to the folder containing the project").option("-a, --apply-fixes-to <ghsas...>", 'GHSA IDs to compute fixes for. Use "all" to compute fixes for all vulnerabilities.', []).option("--dry-run", "Show what changes would be made without actually making them", false).option("-g, --glob <pattern>", "Glob pattern to filter workspaces by absolute file path").option("-d, --debug", "Enable debug logging", false).option("-s, --silent", "Silence all debug/warning output", false).option("--range-style <style>", 'Range style to use for the output. Currently only "pin" is supported and it only works for npm.').addOption(new Option("--run-without-docker", "Run package managers without using docker").default(process.env.RUN_WITHOUT_DOCKER === "true").hideHelp()).addOption(new Option("--manifests-tar-hash <hash>", "Hash of the tarball containing all manifest files already uploaded to Socket. If provided, Socket will be used for computing dependency trees.").hideHelp()).version(version2).action(async (path2, options) => {
|
|
228237
|
+
computeFixesAndUpgradePurlsCmd.name("compute-fixes-and-upgrade-purls").argument("<path>", "File system path to the folder containing the project").option("-a, --apply-fixes-to <ghsas...>", 'GHSA IDs to compute fixes for. Use "all" to compute fixes for all vulnerabilities.', []).option("--dry-run", "Show what changes would be made without actually making them", false).option("-g, --glob <pattern>", "Glob pattern to filter workspaces by absolute file path").option("-d, --debug", "Enable debug logging", false).option("-s, --silent", "Silence all debug/warning output", false).option("--range-style <style>", 'Range style to use for the output. Currently only "pin" is supported and it only works for npm.').option("--disable-major-updates", "Do not suggest major updates. If only major update are available, the fix will not be applied.", false).addOption(new Option("--run-without-docker", "Run package managers without using docker").default(process.env.RUN_WITHOUT_DOCKER === "true").hideHelp()).addOption(new Option("--manifests-tar-hash <hash>", "Hash of the tarball containing all manifest files already uploaded to Socket. If provided, Socket will be used for computing dependency trees.").hideHelp()).version(version2).action(async (path2, options) => {
|
|
228235
228238
|
process.env.DOCKER_IMAGE_TAG ??= version2;
|
|
228236
228239
|
if (options.rangeStyle && options.rangeStyle === "preserve") {
|
|
228237
228240
|
options.rangeStyle = void 0;
|
package/package.json
CHANGED
|
Binary file
|
|
@@ -7749,8 +7749,10 @@ var AlucardResolutionManager = class {
|
|
|
7749
7749
|
const inputFile = resolve(tmpDir, "input.json");
|
|
7750
7750
|
await writeFile(inputFile, JSON.stringify(inputFileData));
|
|
7751
7751
|
const execResult = await execNeverFail(cmdt`java -jar ${this.alucardPath} resolveNodes --batch-mode --input-file=${inputFile} --output-dir=${tmpDir} --timeout=${this.timeoutInSeconds}`);
|
|
7752
|
-
if (execResult.error)
|
|
7753
|
-
throw Error(
|
|
7752
|
+
if (execResult.error) {
|
|
7753
|
+
throw Error(`alucard could not resolve nodes: ${execResult.error.message}${execResult.stderr ? `
|
|
7754
|
+
stderr: ${execResult.stderr}` : ""}`);
|
|
7755
|
+
}
|
|
7754
7756
|
const nodes = JSON.parse(await readFile5(resolve(tmpDir, "nodes.json"), "utf-8"));
|
|
7755
7757
|
const fatJarExcludes = new Set(nodes.filter((node) => deps[node.packageId] && node.nodeType === "type").map((node) => node.fullyQualifiedName));
|
|
7756
7758
|
graph.addNodes(nodes.filter((node) => !apps[node.nodeId] && node.nodeType === "type" && fatJarExcludes.has(node.fullyQualifiedName)));
|
|
@@ -7768,8 +7770,10 @@ var AlucardResolutionManager = class {
|
|
|
7768
7770
|
const inputFile = resolve(tmpDir, "input.json");
|
|
7769
7771
|
await writeFile(inputFile, JSON.stringify(inputFileData));
|
|
7770
7772
|
const execResult = await execNeverFail(cmdt`java -jar ${this.alucardPath} resolveEdges --batch-mode ${mode === "DIRECT_DEPENDENCIES" && "--entry-edges-only"} --input-file=${inputFile} --output-dir=${tmpDir} --timeout=${this.timeoutInSeconds}`);
|
|
7771
|
-
if (execResult.error)
|
|
7772
|
-
throw
|
|
7773
|
+
if (execResult.error) {
|
|
7774
|
+
throw Error(`alucard could not resolve edges: ${execResult.error.message}${execResult.stderr ? `
|
|
7775
|
+
stderr: ${execResult.stderr}` : ""}`);
|
|
7776
|
+
}
|
|
7773
7777
|
for (const idx of JSON.parse(await readFile5(resolve(tmpDir, "visited.json"), "utf-8"))) {
|
|
7774
7778
|
visited.add(idx);
|
|
7775
7779
|
}
|
|
@@ -17518,8 +17522,10 @@ var CocoaResolutionManager = class {
|
|
|
17518
17522
|
await writeFile4(inputFile, JSON.stringify(inputFileData));
|
|
17519
17523
|
const cmd = cmdt`dotnet ${this.cocoaPath} resolveNodes --batch-mode --input-file=${inputFile} --output-dir=${tmpDir} --timeout=${this.timeoutInSeconds}`;
|
|
17520
17524
|
const execResult = await execNeverFail(cmd, ".");
|
|
17521
|
-
if (execResult.error)
|
|
17522
|
-
throw
|
|
17525
|
+
if (execResult.error) {
|
|
17526
|
+
throw Error(`cocoa could not resolve nodes: ${execResult.error.message}${execResult.stderr ? `
|
|
17527
|
+
stderr: ${execResult.stderr}` : ""}`);
|
|
17528
|
+
}
|
|
17523
17529
|
const nodes = JSON.parse(await readFile9(resolve3(tmpDir, "nodes.json"), "utf-8"));
|
|
17524
17530
|
graph.addNodes(nodes);
|
|
17525
17531
|
});
|
|
@@ -17537,8 +17543,10 @@ var CocoaResolutionManager = class {
|
|
|
17537
17543
|
await writeFile4(inputFile, JSON.stringify(inputFileData));
|
|
17538
17544
|
const cmd = cmdt`dotnet ${this.cocoaPath} resolveEdges --batch-mode ${mode === "DIRECT_DEPENDENCIES" && "--entry-edges-only"} --input-file=${inputFile} --output-dir=${tmpDir} --timeout=${this.timeoutInSeconds}`;
|
|
17539
17545
|
const execResult = await execNeverFail(cmd, ".");
|
|
17540
|
-
if (execResult.error)
|
|
17541
|
-
throw
|
|
17546
|
+
if (execResult.error) {
|
|
17547
|
+
throw Error(`cocoa could not resolve edges: ${execResult.error.message}${execResult.stderr ? `
|
|
17548
|
+
stderr: ${execResult.stderr}` : ""}`);
|
|
17549
|
+
}
|
|
17542
17550
|
for (const idx of JSON.parse(await readFile9(resolve3(tmpDir, "visited.json"), "utf-8"))) {
|
|
17543
17551
|
visited.add(idx);
|
|
17544
17552
|
}
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|