@socketsecurity/cli-with-sentry 1.0.95 → 1.0.97
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.js +164 -164
- package/dist/cli.js.map +1 -1
- package/dist/constants.js +3 -3
- package/dist/constants.js.map +1 -1
- package/dist/shadow-npm-bin.js +3 -3
- package/dist/shadow-npm-bin.js.map +1 -1
- package/dist/shadow-npm-inject.js +29 -49
- package/dist/shadow-npm-inject.js.map +1 -1
- package/dist/tsconfig.dts.tsbuildinfo +1 -1
- package/dist/types/commands/fix/shared.d.mts +2 -1
- package/dist/types/commands/fix/shared.d.mts.map +1 -1
- package/dist/types/shadow/npm/arborist/lib/arborist/index.d.mts.map +1 -1
- package/dist/types/shadow/npm/arborist-helpers.d.mts +5 -5
- package/dist/types/shadow/npm/arborist-helpers.d.mts.map +1 -1
- package/dist/types/utils/alert/severity.d.mts.map +1 -1
- package/dist/types/utils/alerts-map.d.mts +4 -3
- package/dist/types/utils/alerts-map.d.mts.map +1 -1
- package/dist/types/utils/config.d.mts +5 -4
- package/dist/types/utils/config.d.mts.map +1 -1
- package/dist/types/utils/filter-config.d.mts +5 -0
- package/dist/types/utils/filter-config.d.mts.map +1 -0
- package/dist/types/utils/sdk.d.mts +3 -3
- package/dist/types/utils/sdk.d.mts.map +1 -1
- package/dist/types/utils/socket-package-alert.d.mts +7 -5
- package/dist/types/utils/socket-package-alert.d.mts.map +1 -1
- package/dist/utils.js +207 -207
- package/dist/utils.js.map +1 -1
- package/dist/vendor.js +38 -36
- package/external/@coana-tech/cli/cli-wrapper.mjs +1 -0
- package/external/@coana-tech/cli/cli.mjs +281 -172
- package/external/@coana-tech/cli/reachability-analyzers-cli.mjs +134 -63
- package/external/@coana-tech/cli/repos/coana-tech/alucard/alucard.jar +0 -0
- package/external/@coana-tech/cli/repos/coana-tech/class-graph-analysis/dist/bundle/class-graph-analysis-cli.mjs +2337 -2821
- package/external/@coana-tech/cli/repos/coana-tech/cocoa/release/Coana.Cocoa.dll +0 -0
- package/external/@coana-tech/cli/repos/coana-tech/goana/bin/goana-darwin-amd64.gz +0 -0
- package/external/@coana-tech/cli/repos/coana-tech/goana/bin/goana-darwin-arm64.gz +0 -0
- package/external/@coana-tech/cli/repos/coana-tech/goana/bin/goana-linux-amd64.gz +0 -0
- package/external/@coana-tech/cli/repos/coana-tech/goana/bin/goana-linux-arm64.gz +0 -0
- package/external/@socketsecurity/registry/lib/objects.js +16 -0
- package/package.json +7 -7
- package/dist/types/utils/strings.d.mts +0 -2
- package/dist/types/utils/strings.d.mts.map +0 -1
|
@@ -68747,7 +68747,7 @@ var {
|
|
|
68747
68747
|
} = import_index.default;
|
|
68748
68748
|
|
|
68749
68749
|
// dist/reachability-analyzers-cli.js
|
|
68750
|
-
import { readFile as
|
|
68750
|
+
import { readFile as readFile12, writeFile as writeFile9 } from "fs/promises";
|
|
68751
68751
|
|
|
68752
68752
|
// ../web-compat-utils/src/logger-singleton.ts
|
|
68753
68753
|
var import_winston = __toESM(require_winston(), 1);
|
|
@@ -68768,6 +68768,7 @@ function utilFormatter() {
|
|
|
68768
68768
|
}
|
|
68769
68769
|
|
|
68770
68770
|
// ../web-compat-utils/src/logger-singleton.ts
|
|
68771
|
+
import { readFile } from "fs/promises";
|
|
68771
68772
|
var CLILogger = class {
|
|
68772
68773
|
logger = console;
|
|
68773
68774
|
writeStream;
|
|
@@ -68847,6 +68848,16 @@ var CLILogger = class {
|
|
|
68847
68848
|
});
|
|
68848
68849
|
});
|
|
68849
68850
|
}
|
|
68851
|
+
async getLogContent(logFilePath) {
|
|
68852
|
+
await this.finish();
|
|
68853
|
+
let logContent;
|
|
68854
|
+
try {
|
|
68855
|
+
logContent = await readFile(logFilePath, "utf-8");
|
|
68856
|
+
} catch (e) {
|
|
68857
|
+
console.error("Error reading log file", e);
|
|
68858
|
+
}
|
|
68859
|
+
return logContent;
|
|
68860
|
+
}
|
|
68850
68861
|
set silent(silent) {
|
|
68851
68862
|
if (!(this.logger instanceof import_winston.Logger)) throw new Error("Cannot set silent mode on console logger");
|
|
68852
68863
|
this.logger.silent = silent;
|
|
@@ -73185,7 +73196,7 @@ function getCoanaAPI() {
|
|
|
73185
73196
|
|
|
73186
73197
|
// ../utils/src/dashboard-api/socket-api.ts
|
|
73187
73198
|
var import_form_data2 = __toESM(require_form_data2(), 1);
|
|
73188
|
-
import { readFile } from "fs/promises";
|
|
73199
|
+
import { readFile as readFile2 } from "fs/promises";
|
|
73189
73200
|
import { join } from "path";
|
|
73190
73201
|
|
|
73191
73202
|
// ../web-compat-utils/src/ghsa.ts
|
|
@@ -73388,6 +73399,62 @@ async function getLatestBucketsSocket(subprojectPath, workspacePath) {
|
|
|
73388
73399
|
return void 0;
|
|
73389
73400
|
}
|
|
73390
73401
|
}
|
|
73402
|
+
async function registerAutofixOrUpgradePurlRun(manifestsTarHash, repositoryName, options, cliCommand) {
|
|
73403
|
+
try {
|
|
73404
|
+
const url2 = getSocketApiUrl(`orgs/${process.env.SOCKET_ORG_SLUG}/fixes/register-autofix-or-upgrade-cli-run`);
|
|
73405
|
+
const data2 = {
|
|
73406
|
+
manifestsTarHash,
|
|
73407
|
+
repositoryName,
|
|
73408
|
+
options,
|
|
73409
|
+
cliCommand
|
|
73410
|
+
};
|
|
73411
|
+
const response = await axios2.post(url2, data2, { headers: getAuthHeaders() });
|
|
73412
|
+
return response.data.id;
|
|
73413
|
+
} catch (error) {
|
|
73414
|
+
handleError(error, "Error registering autofix or upgrade purl run", false);
|
|
73415
|
+
}
|
|
73416
|
+
}
|
|
73417
|
+
async function finalizeAutofixRun(autofixRunId, status, stackTrace, logFileContent) {
|
|
73418
|
+
try {
|
|
73419
|
+
const url2 = getSocketApiUrl(`orgs/${process.env.SOCKET_ORG_SLUG}/fixes/finalize-autofix-run`);
|
|
73420
|
+
const data2 = {
|
|
73421
|
+
autofixRunId,
|
|
73422
|
+
status,
|
|
73423
|
+
stackTrace,
|
|
73424
|
+
logFileContent
|
|
73425
|
+
};
|
|
73426
|
+
await axios2.post(url2, data2, { headers: getAuthHeaders() });
|
|
73427
|
+
} catch (error) {
|
|
73428
|
+
handleError(error, "Error finalizing autofix run", false);
|
|
73429
|
+
}
|
|
73430
|
+
}
|
|
73431
|
+
async function registerUpgradePurlRun(autofixRunId, upgradeSpecs) {
|
|
73432
|
+
try {
|
|
73433
|
+
const url2 = getSocketApiUrl(`orgs/${process.env.SOCKET_ORG_SLUG}/fixes/register-upgrade-purl-run`);
|
|
73434
|
+
const data2 = {
|
|
73435
|
+
cliRunId: autofixRunId,
|
|
73436
|
+
upgradeSpecs
|
|
73437
|
+
};
|
|
73438
|
+
const response = await axios2.post(url2, data2, { headers: getAuthHeaders() });
|
|
73439
|
+
return response.data.id;
|
|
73440
|
+
} catch (error) {
|
|
73441
|
+
handleError(error, "Error registering upgrade purl run", false);
|
|
73442
|
+
}
|
|
73443
|
+
}
|
|
73444
|
+
async function finalizeUpgradePurlRun(upgradePurlRunId, status, stackTrace, logFileContent) {
|
|
73445
|
+
try {
|
|
73446
|
+
const url2 = getSocketApiUrl(`orgs/${process.env.SOCKET_ORG_SLUG}/fixes/finalize-upgrade-purl-run`);
|
|
73447
|
+
const data2 = {
|
|
73448
|
+
upgradePurlRunId,
|
|
73449
|
+
status,
|
|
73450
|
+
stackTrace,
|
|
73451
|
+
logFileContent
|
|
73452
|
+
};
|
|
73453
|
+
await axios2.post(url2, data2, { headers: getAuthHeaders() });
|
|
73454
|
+
} catch (error) {
|
|
73455
|
+
handleError(error, "Error finalizing upgrade purl run", false);
|
|
73456
|
+
}
|
|
73457
|
+
}
|
|
73391
73458
|
function getSocketAPI() {
|
|
73392
73459
|
return {
|
|
73393
73460
|
createSocketTier1Scan,
|
|
@@ -73395,7 +73462,11 @@ function getSocketAPI() {
|
|
|
73395
73462
|
registerSubprojectsSocket,
|
|
73396
73463
|
registerCLIProgressSocket,
|
|
73397
73464
|
registerAnalysisMetadataSocket,
|
|
73398
|
-
getLatestBucketsSocket
|
|
73465
|
+
getLatestBucketsSocket,
|
|
73466
|
+
registerAutofixOrUpgradePurlRun,
|
|
73467
|
+
finalizeAutofixRun,
|
|
73468
|
+
registerUpgradePurlRun,
|
|
73469
|
+
finalizeUpgradePurlRun
|
|
73399
73470
|
};
|
|
73400
73471
|
}
|
|
73401
73472
|
|
|
@@ -74116,7 +74187,7 @@ import { resolve as resolve13 } from "path";
|
|
|
74116
74187
|
|
|
74117
74188
|
// ../utils/src/pip-utils.ts
|
|
74118
74189
|
import { existsSync as existsSync2 } from "fs";
|
|
74119
|
-
import { readFile as
|
|
74190
|
+
import { readFile as readFile4 } from "fs/promises";
|
|
74120
74191
|
import { resolve as resolve3 } from "path";
|
|
74121
74192
|
import util4 from "util";
|
|
74122
74193
|
|
|
@@ -74125,7 +74196,7 @@ var import_lodash4 = __toESM(require_lodash(), 1);
|
|
|
74125
74196
|
var import_semver = __toESM(require_semver2(), 1);
|
|
74126
74197
|
import { execFileSync } from "child_process";
|
|
74127
74198
|
import { constants as constants2 } from "fs";
|
|
74128
|
-
import { access as access2, readFile as
|
|
74199
|
+
import { access as access2, readFile as readFile3 } from "fs/promises";
|
|
74129
74200
|
import { join as join4, resolve as resolve2 } from "path";
|
|
74130
74201
|
import util3 from "util";
|
|
74131
74202
|
var { once } = import_lodash4.default;
|
|
@@ -74164,7 +74235,7 @@ var PythonVersionsManager = class _PythonVersionsManager {
|
|
|
74164
74235
|
const pyenvRoot = process.env.PYENV_ROOT ?? await runCommandResolveStdOut("pyenv root");
|
|
74165
74236
|
if (pyenvOrigin !== join4(pyenvRoot, "version"))
|
|
74166
74237
|
try {
|
|
74167
|
-
return [(await
|
|
74238
|
+
return [(await readFile3(pyenvOrigin, "utf-8")).split("\n")[0].trim()];
|
|
74168
74239
|
} catch (e) {
|
|
74169
74240
|
if (e.code !== "ENOENT") logger.warn("Failed to read python version file with error", e);
|
|
74170
74241
|
}
|
|
@@ -74351,7 +74422,7 @@ function addPathToTrie(root3, vulnPath) {
|
|
|
74351
74422
|
var import_lodash14 = __toESM(require_lodash(), 1);
|
|
74352
74423
|
import assert6 from "assert";
|
|
74353
74424
|
import { existsSync as existsSync10 } from "fs";
|
|
74354
|
-
import { cp as cp5, readdir as readdir3, readFile as
|
|
74425
|
+
import { cp as cp5, readdir as readdir3, readFile as readFile10, rm as rm5, writeFile as writeFile8 } from "fs/promises";
|
|
74355
74426
|
import { basename as basename9, dirname as dirname12, join as join20, resolve as resolve11, sep as sep3 } from "path";
|
|
74356
74427
|
import util5 from "util";
|
|
74357
74428
|
|
|
@@ -74518,7 +74589,7 @@ function assertDefined(value) {
|
|
|
74518
74589
|
|
|
74519
74590
|
// dist/whole-program-code-aware-vulnerability-scanner/dotnet/dotnet-code-aware-vulnerability-scanner.js
|
|
74520
74591
|
var import_adm_zip = __toESM(require_adm_zip(), 1);
|
|
74521
|
-
import { mkdir, readFile as
|
|
74592
|
+
import { mkdir, readFile as readFile5, writeFile as writeFile3 } from "fs/promises";
|
|
74522
74593
|
var import_packageurl_js4 = __toESM(require_packageurl_js(), 1);
|
|
74523
74594
|
import { randomUUID } from "crypto";
|
|
74524
74595
|
|
|
@@ -74547,7 +74618,7 @@ var DotnetCodeAwareVulnerabilityScanner = class _DotnetCodeAwareVulnerabilitySca
|
|
|
74547
74618
|
}
|
|
74548
74619
|
static initFromDependencyTree(dependencyTree, timeoutInSeconds, statusUpdater) {
|
|
74549
74620
|
return new _DotnetCodeAwareVulnerabilityScanner({
|
|
74550
|
-
[
|
|
74621
|
+
[serializeNugetDependencyToPackageUrl(dependencyTree)]: {
|
|
74551
74622
|
src: dependencyTree.src,
|
|
74552
74623
|
bin: dependencyTree.bin,
|
|
74553
74624
|
ecosystemSpecificPackageInfo: {
|
|
@@ -74611,7 +74682,7 @@ var DotnetCodeAwareVulnerabilityScanner = class _DotnetCodeAwareVulnerabilitySca
|
|
|
74611
74682
|
try {
|
|
74612
74683
|
const nugetDependencyChain = await convertDependencyChain(dependencyChain, tmpDir);
|
|
74613
74684
|
const scanner = new _DotnetCodeAwareVulnerabilityScanner({
|
|
74614
|
-
[
|
|
74685
|
+
[serializeNugetDependencyToPackageUrl(nugetDependencyChain[0])]: {
|
|
74615
74686
|
src: nugetDependencyChain[0].src,
|
|
74616
74687
|
bin: nugetDependencyChain[0].bin,
|
|
74617
74688
|
ecosystemSpecificPackageInfo: {
|
|
@@ -74653,8 +74724,8 @@ var DotnetCodeAwareVulnerabilityScanner = class _DotnetCodeAwareVulnerabilitySca
|
|
|
74653
74724
|
const result = await execNeverFail(cmdt`node ${classGraphAnalysisCliPath} runDotnetDirectDependencyAnalysis -i ${inputFile} -o ${outputFile} --cocoa ${cocoaPath} --tree-sitter-c-sharp ${treeSitterCSharpPath}`);
|
|
74654
74725
|
if (result.error)
|
|
74655
74726
|
return void 0;
|
|
74656
|
-
const
|
|
74657
|
-
return
|
|
74727
|
+
const packageIds = JSON.parse(await readFile5(outputFile, "utf-8")).result;
|
|
74728
|
+
return packageIds?.filter((packageId) => !Object.hasOwn(this.apps, packageId))?.map((packageId) => parsePackageUrlToNugetDependency(packageId).packageName);
|
|
74658
74729
|
});
|
|
74659
74730
|
}
|
|
74660
74731
|
async runAnalysis(vulnerabilities, heuristic, _analyzesAllVulns, _experiment) {
|
|
@@ -74684,7 +74755,7 @@ var DotnetCodeAwareVulnerabilityScanner = class _DotnetCodeAwareVulnerabilitySca
|
|
|
74684
74755
|
const result = await execNeverFail(cmdt`node ${classGraphAnalysisCliPath} runDotnetReachabilityAnalysis -i ${inputFile} -o ${outputFile} --cocoa ${cocoaPath} --tree-sitter-c-sharp ${treeSitterCSharpPath}`);
|
|
74685
74756
|
if (result.error)
|
|
74686
74757
|
return { type: "error", message: result.error.message ?? "unknown error" };
|
|
74687
|
-
const { success, error, analysisDiagnostics: diagnostics, vulnerablePaths } = JSON.parse(await
|
|
74758
|
+
const { success, error, analysisDiagnostics: diagnostics, vulnerablePaths } = JSON.parse(await readFile5(outputFile, "utf-8")).result;
|
|
74688
74759
|
if (!success)
|
|
74689
74760
|
return { type: "error", message: error ?? "unknown error" };
|
|
74690
74761
|
return {
|
|
@@ -74692,19 +74763,18 @@ var DotnetCodeAwareVulnerabilityScanner = class _DotnetCodeAwareVulnerabilitySca
|
|
|
74692
74763
|
diagnostics,
|
|
74693
74764
|
terminatedEarly: diagnostics.timeout,
|
|
74694
74765
|
reachedDependencies: true,
|
|
74695
|
-
computeDetectedOccurrences: ({ vulnerabilityAccessPaths: vulnerabilityAccessPaths2 }) => _DotnetCodeAwareVulnerabilityScanner.computeDetectedOccurrences(appPackageIds, vulnerablePaths, vulnerabilityAccessPaths2)
|
|
74766
|
+
computeDetectedOccurrences: ({ vulnerabilityAccessPaths: vulnerabilityAccessPaths2 }) => _DotnetCodeAwareVulnerabilityScanner.computeDetectedOccurrences(appPackageIds, vulnerablePaths, i(vulnerabilityAccessPaths2.map((vulnerabilityAccessPath) => vulnerabilityAccessPath.slice(1).split(":")[0])))
|
|
74696
74767
|
};
|
|
74697
74768
|
});
|
|
74698
74769
|
}
|
|
74699
|
-
static computeDetectedOccurrences(appPackageIds, vulnerablePaths,
|
|
74770
|
+
static computeDetectedOccurrences(appPackageIds, vulnerablePaths, vulnerableClasses) {
|
|
74700
74771
|
const affectedPackages = /* @__PURE__ */ new Set();
|
|
74701
74772
|
const classStacks = [];
|
|
74702
|
-
for (const
|
|
74703
|
-
const
|
|
74704
|
-
|
|
74705
|
-
if (!vulnerablePathsForMethod)
|
|
74773
|
+
for (const vulnerableClass of vulnerableClasses) {
|
|
74774
|
+
const vulnerablePathsForClass = vulnerablePaths[vulnerableClass];
|
|
74775
|
+
if (!vulnerablePathsForClass)
|
|
74706
74776
|
continue;
|
|
74707
|
-
classStacks.push(...
|
|
74777
|
+
classStacks.push(...vulnerablePathsForClass.map((vulnPath) => {
|
|
74708
74778
|
if (vulnPath.length < 2)
|
|
74709
74779
|
throw new Error("The path should always have length at least two.");
|
|
74710
74780
|
return vulnPath.map(({ fullyQualifiedName, confidence, packageId }) => ({
|
|
@@ -74713,7 +74783,7 @@ var DotnetCodeAwareVulnerabilityScanner = class _DotnetCodeAwareVulnerabilitySca
|
|
|
74713
74783
|
confidence
|
|
74714
74784
|
}));
|
|
74715
74785
|
}));
|
|
74716
|
-
|
|
74786
|
+
vulnerablePathsForClass.flatMap((vulnPath) => vulnPath).filter(({ packageId }) => !appPackageIds.has(packageId)).map(({ packageId }) => parsePackageUrlToNugetDependency(packageId)).forEach((node) => affectedPackages.add(`${node.packageName}@${node.version}`));
|
|
74717
74787
|
}
|
|
74718
74788
|
return {
|
|
74719
74789
|
analysisLevel: "class-level",
|
|
@@ -74783,7 +74853,7 @@ async function convertSocketArtifacts(artifacts, tmpDir) {
|
|
|
74783
74853
|
// dist/whole-program-code-aware-vulnerability-scanner/java/java-code-aware-vulnerability-scanner.js
|
|
74784
74854
|
var import_lodash8 = __toESM(require_lodash(), 1);
|
|
74785
74855
|
import { existsSync as existsSync7 } from "fs";
|
|
74786
|
-
import { mkdir as mkdir2, readFile as
|
|
74856
|
+
import { mkdir as mkdir2, readFile as readFile6, writeFile as writeFile4 } from "fs/promises";
|
|
74787
74857
|
import { basename as basename5, dirname as dirname4, join as join14 } from "path";
|
|
74788
74858
|
|
|
74789
74859
|
// ../../node_modules/.pnpm/cheerio@1.0.0-rc.12/node_modules/cheerio/lib/esm/options.js
|
|
@@ -88392,7 +88462,7 @@ var JavaCodeAwareVulnerabilityScanner = class _JavaCodeAwareVulnerabilityScanner
|
|
|
88392
88462
|
}
|
|
88393
88463
|
static initFromDependencyTree(dependencyTree, timeoutInSeconds, statusUpdater) {
|
|
88394
88464
|
return new _JavaCodeAwareVulnerabilityScanner({
|
|
88395
|
-
[
|
|
88465
|
+
[serializeMavenDependencyToPackageUrl(dependencyTree)]: {
|
|
88396
88466
|
src: dependencyTree.src,
|
|
88397
88467
|
bin: dependencyTree.bin,
|
|
88398
88468
|
ecosystemSpecificPackageInfo: {
|
|
@@ -88456,7 +88526,7 @@ var JavaCodeAwareVulnerabilityScanner = class _JavaCodeAwareVulnerabilityScanner
|
|
|
88456
88526
|
try {
|
|
88457
88527
|
const mavenDependencyChain = await convertDependencyChain2(dependencyChain, tmpDir);
|
|
88458
88528
|
const scanner = new _JavaCodeAwareVulnerabilityScanner({
|
|
88459
|
-
[
|
|
88529
|
+
[serializeMavenDependencyToPackageUrl(mavenDependencyChain[0])]: {
|
|
88460
88530
|
src: mavenDependencyChain[0].src,
|
|
88461
88531
|
bin: mavenDependencyChain[0].bin,
|
|
88462
88532
|
ecosystemSpecificPackageInfo: {
|
|
@@ -88498,8 +88568,8 @@ var JavaCodeAwareVulnerabilityScanner = class _JavaCodeAwareVulnerabilityScanner
|
|
|
88498
88568
|
const result = await execNeverFail(cmdt`node ${classGraphAnalysisCliPath} runJvmDirectDependencyAnalysis -i ${inputFile} -o ${outputFile} --alucard ${alucardPath} --tree-sitter-java ${treeSitterJavaPath} --tree-sitter-kotlin ${treeSitterKotlinPath} --tree-sitter-scala ${treeSitterScalaPath}`);
|
|
88499
88569
|
if (result.error)
|
|
88500
88570
|
return void 0;
|
|
88501
|
-
const
|
|
88502
|
-
return
|
|
88571
|
+
const packageIds = JSON.parse(await readFile6(outputFile, "utf-8")).result;
|
|
88572
|
+
return packageIds?.filter((packageId) => !Object.hasOwn(this.apps, packageId))?.map((packageId) => parsePackageUrlToMavenDependency(packageId).packageName);
|
|
88503
88573
|
});
|
|
88504
88574
|
}
|
|
88505
88575
|
async runAnalysis(vulnerabilities, heuristic, _analyzesAllVulns, _experiment) {
|
|
@@ -88529,7 +88599,7 @@ var JavaCodeAwareVulnerabilityScanner = class _JavaCodeAwareVulnerabilityScanner
|
|
|
88529
88599
|
const result = await execNeverFail(cmdt`node ${classGraphAnalysisCliPath} runJvmReachabilityAnalysis -i ${inputFile} -o ${outputFile} --alucard ${alucardPath} --tree-sitter-java ${treeSitterJavaPath} --tree-sitter-kotlin ${treeSitterKotlinPath} --tree-sitter-scala ${treeSitterScalaPath}`);
|
|
88530
88600
|
if (result.error)
|
|
88531
88601
|
return { type: "error", message: result.error.message ?? "unknown error" };
|
|
88532
|
-
const { success, error, analysisDiagnostics: diagnostics, vulnerablePaths } = JSON.parse(await
|
|
88602
|
+
const { success, error, analysisDiagnostics: diagnostics, vulnerablePaths } = JSON.parse(await readFile6(outputFile, "utf-8")).result;
|
|
88533
88603
|
if (!success)
|
|
88534
88604
|
return { type: "error", message: error ?? "unknown error" };
|
|
88535
88605
|
return {
|
|
@@ -88537,19 +88607,18 @@ var JavaCodeAwareVulnerabilityScanner = class _JavaCodeAwareVulnerabilityScanner
|
|
|
88537
88607
|
diagnostics,
|
|
88538
88608
|
terminatedEarly: diagnostics.timeout,
|
|
88539
88609
|
reachedDependencies: true,
|
|
88540
|
-
computeDetectedOccurrences: ({ vulnerabilityAccessPaths: vulnerabilityAccessPaths2 }) => _JavaCodeAwareVulnerabilityScanner.computeDetectedOccurrences(appPackageIds, vulnerablePaths, vulnerabilityAccessPaths2)
|
|
88610
|
+
computeDetectedOccurrences: ({ vulnerabilityAccessPaths: vulnerabilityAccessPaths2 }) => _JavaCodeAwareVulnerabilityScanner.computeDetectedOccurrences(appPackageIds, vulnerablePaths, i(vulnerabilityAccessPaths2.map((vulnerabilityAccessPath) => vulnerabilityAccessPath.slice(1).split(":")[0])))
|
|
88541
88611
|
};
|
|
88542
88612
|
});
|
|
88543
88613
|
}
|
|
88544
|
-
static computeDetectedOccurrences(appPackageIds, vulnerablePaths,
|
|
88614
|
+
static computeDetectedOccurrences(appPackageIds, vulnerablePaths, vulnerableClasses) {
|
|
88545
88615
|
const affectedPackages = /* @__PURE__ */ new Set();
|
|
88546
88616
|
const classStacks = [];
|
|
88547
|
-
for (const
|
|
88548
|
-
const
|
|
88549
|
-
|
|
88550
|
-
if (!vulnerablePathsForMethod)
|
|
88617
|
+
for (const vulnerableClass of vulnerableClasses) {
|
|
88618
|
+
const vulnerablePathsForClass = vulnerablePaths[vulnerableClass];
|
|
88619
|
+
if (!vulnerablePathsForClass)
|
|
88551
88620
|
continue;
|
|
88552
|
-
classStacks.push(...
|
|
88621
|
+
classStacks.push(...vulnerablePathsForClass.map((vulnPath) => {
|
|
88553
88622
|
if (vulnPath.length < 2)
|
|
88554
88623
|
throw new Error("The path should always have length at least two.");
|
|
88555
88624
|
return vulnPath.map(({ fullyQualifiedName, confidence, packageId }) => ({
|
|
@@ -88558,7 +88627,7 @@ var JavaCodeAwareVulnerabilityScanner = class _JavaCodeAwareVulnerabilityScanner
|
|
|
88558
88627
|
confidence
|
|
88559
88628
|
}));
|
|
88560
88629
|
}));
|
|
88561
|
-
|
|
88630
|
+
vulnerablePathsForClass.flatMap((vulnPath) => vulnPath).filter(({ packageId }) => !appPackageIds.has(packageId)).map(({ packageId }) => parsePackageUrlToMavenDependency(packageId)).forEach((node) => affectedPackages.add(`${node.packageName}@${node.version}`));
|
|
88562
88631
|
}
|
|
88563
88632
|
return {
|
|
88564
88633
|
analysisLevel: "class-level",
|
|
@@ -88645,7 +88714,7 @@ async function convertSocketArtifacts2(artifacts, tmpDir) {
|
|
|
88645
88714
|
|
|
88646
88715
|
// dist/whole-program-code-aware-vulnerability-scanner/js/jelly-runner.js
|
|
88647
88716
|
var import_lodash9 = __toESM(require_lodash(), 1);
|
|
88648
|
-
import { readFile as
|
|
88717
|
+
import { readFile as readFile7, rm as rm2, writeFile as writeFile5 } from "fs/promises";
|
|
88649
88718
|
import { relative as relative4, resolve as resolve6 } from "path";
|
|
88650
88719
|
var { map: map2, uniq: uniq4 } = import_lodash9.default;
|
|
88651
88720
|
var PRINT_JELLY_COMMAND = false;
|
|
@@ -88704,15 +88773,15 @@ async function runJellyAnalysis(mainProjectRoot, projectRoot, jellyOptions, reac
|
|
|
88704
88773
|
experiment && reachabilityAnalysisOptions.timeoutInSeconds ? { timeout: reachabilityAnalysisOptions.timeoutInSeconds * 1e3 * 1.5 } : void 0
|
|
88705
88774
|
);
|
|
88706
88775
|
if (reachabilityAnalysisOptions.printLogFile)
|
|
88707
|
-
logger.info("JS analysis log file:", await
|
|
88708
|
-
const analysisDiagnostics = JSON.parse(await
|
|
88776
|
+
logger.info("JS analysis log file:", await readFile7(logFile, "utf-8"));
|
|
88777
|
+
const analysisDiagnostics = JSON.parse(await readFile7(diagnosticsFile, "utf-8"));
|
|
88709
88778
|
analysisDiagnostics.time = analysisDiagnostics.analysisTime;
|
|
88710
88779
|
delete analysisDiagnostics.analysisTime;
|
|
88711
88780
|
analysisDiagnostics.timings = {
|
|
88712
88781
|
analysisTime: analysisDiagnostics.time,
|
|
88713
88782
|
patternMatchingTime: analysisDiagnostics.patternMatchingTime
|
|
88714
88783
|
};
|
|
88715
|
-
const callStacks = JSON.parse(await
|
|
88784
|
+
const callStacks = JSON.parse(await readFile7(callStackFile, "utf-8"));
|
|
88716
88785
|
const matches = {};
|
|
88717
88786
|
for (const { vulnerability, paths } of callStacks) {
|
|
88718
88787
|
const transformedStacks = transformJellyCallStacks(projectRoot, paths);
|
|
@@ -88746,7 +88815,7 @@ async function runJellyPhantomDependencyAnalysis(projectRoot) {
|
|
|
88746
88815
|
projectRoot
|
|
88747
88816
|
];
|
|
88748
88817
|
await runCommandResolveStdOut(jellyCmd);
|
|
88749
|
-
return JSON.parse(await
|
|
88818
|
+
return JSON.parse(await readFile7(reachablePackagesFile, "utf-8"));
|
|
88750
88819
|
} finally {
|
|
88751
88820
|
await rm2(tmpFolder, { recursive: true });
|
|
88752
88821
|
}
|
|
@@ -95049,7 +95118,7 @@ function transformSourceLocations(fileMappings, detectedOccurrences) {
|
|
|
95049
95118
|
var import_lodash11 = __toESM(require_lodash(), 1);
|
|
95050
95119
|
import assert4 from "assert";
|
|
95051
95120
|
import { existsSync as existsSync9, createReadStream, createWriteStream as createWriteStream2 } from "fs";
|
|
95052
|
-
import { readFile as
|
|
95121
|
+
import { readFile as readFile8, rm as rm4, cp as cp4 } from "fs/promises";
|
|
95053
95122
|
import zlib2 from "zlib";
|
|
95054
95123
|
import { join as join17, resolve as resolve9, sep } from "path";
|
|
95055
95124
|
import { pipeline } from "stream/promises";
|
|
@@ -95102,9 +95171,9 @@ var GoCodeAwareVulnerabilityScanner = class {
|
|
|
95102
95171
|
if (stderr)
|
|
95103
95172
|
logger.debug(`Go code-aware analysis stderr
|
|
95104
95173
|
${stderr}`);
|
|
95105
|
-
const diagnostics = JSON.parse(await
|
|
95174
|
+
const diagnostics = JSON.parse(await readFile8(diagnosticsOutputFile, "utf8"));
|
|
95106
95175
|
logger.debug("Diagnostics", diagnostics);
|
|
95107
|
-
const result = JSON.parse(await
|
|
95176
|
+
const result = JSON.parse(await readFile8(vulnsOutputFile, "utf8"));
|
|
95108
95177
|
logger.debug("Analysis results", result);
|
|
95109
95178
|
return {
|
|
95110
95179
|
type: "success",
|
|
@@ -95199,7 +95268,7 @@ ${stderr}`);
|
|
|
95199
95268
|
|
|
95200
95269
|
// dist/whole-program-code-aware-vulnerability-scanner/rust/rust-code-aware-vulnerability-scanner.js
|
|
95201
95270
|
var import_lodash12 = __toESM(require_lodash(), 1);
|
|
95202
|
-
import { readFile as
|
|
95271
|
+
import { readFile as readFile9, writeFile as writeFile7 } from "fs/promises";
|
|
95203
95272
|
import { basename as basename8, dirname as dirname11, join as join19 } from "path";
|
|
95204
95273
|
|
|
95205
95274
|
// dist/whole-program-code-aware-vulnerability-scanner/rust/heuristics.js
|
|
@@ -95940,7 +96009,7 @@ var RustCodeAwareVulnerabilityScanner = class _RustCodeAwareVulnerabilityScanner
|
|
|
95940
96009
|
}
|
|
95941
96010
|
static initFromDependencyTree(dependencyTree, timeoutInSeconds, statusUpdater) {
|
|
95942
96011
|
return new _RustCodeAwareVulnerabilityScanner({
|
|
95943
|
-
[
|
|
96012
|
+
[serializeRustDependencyToPackageUrl(dependencyTree)]: {
|
|
95944
96013
|
src: dependencyTree.src,
|
|
95945
96014
|
ecosystemSpecificPackageInfo: {
|
|
95946
96015
|
type: "RUST",
|
|
@@ -96004,7 +96073,7 @@ var RustCodeAwareVulnerabilityScanner = class _RustCodeAwareVulnerabilityScanner
|
|
|
96004
96073
|
try {
|
|
96005
96074
|
const rustDependencyChain = await convertDependencyChain3(dependencyChain, tmpDir);
|
|
96006
96075
|
const scanner = new _RustCodeAwareVulnerabilityScanner({
|
|
96007
|
-
[
|
|
96076
|
+
[serializeRustDependencyToPackageUrl(rustDependencyChain[0])]: {
|
|
96008
96077
|
src: rustDependencyChain[0].src,
|
|
96009
96078
|
ecosystemSpecificPackageInfo: {
|
|
96010
96079
|
type: "RUST",
|
|
@@ -96046,8 +96115,8 @@ var RustCodeAwareVulnerabilityScanner = class _RustCodeAwareVulnerabilityScanner
|
|
|
96046
96115
|
const result = await execNeverFail(cmdt`node ${classGraphAnalysisCliPath} runRustDirectDependencyAnalysis -i ${inputFile} -o ${outputFile} --tree-sitter-rust ${treeSitterRustPath}`);
|
|
96047
96116
|
if (result.error)
|
|
96048
96117
|
return void 0;
|
|
96049
|
-
const
|
|
96050
|
-
return
|
|
96118
|
+
const packageIds = JSON.parse(await readFile9(outputFile, "utf-8")).result;
|
|
96119
|
+
return packageIds?.filter((packageId) => !Object.hasOwn(this.apps, packageId))?.map((packageId) => parsePackageUrlToRustDependency(packageId).packageName);
|
|
96051
96120
|
});
|
|
96052
96121
|
}
|
|
96053
96122
|
async runAnalysis(vulnerabilities, heuristic, _analyzesAllVulns) {
|
|
@@ -96078,7 +96147,7 @@ var RustCodeAwareVulnerabilityScanner = class _RustCodeAwareVulnerabilityScanner
|
|
|
96078
96147
|
const result = await execNeverFail(cmdt`node ${classGraphAnalysisCliPath} runRustReachabilityAnalysis -i ${inputFile} -o ${outputFile} --tree-sitter-rust ${treeSitterRustPath}`);
|
|
96079
96148
|
if (result.error)
|
|
96080
96149
|
return { type: "error", message: result.error.message ?? "unknown error" };
|
|
96081
|
-
const { success, error, analysisDiagnostics: diagnostics, vulnerablePaths } = JSON.parse(await
|
|
96150
|
+
const { success, error, analysisDiagnostics: diagnostics, vulnerablePaths } = JSON.parse(await readFile9(outputFile, "utf-8")).result;
|
|
96082
96151
|
if (!success)
|
|
96083
96152
|
return { type: "error", message: error ?? "unknown error" };
|
|
96084
96153
|
return {
|
|
@@ -96086,19 +96155,21 @@ var RustCodeAwareVulnerabilityScanner = class _RustCodeAwareVulnerabilityScanner
|
|
|
96086
96155
|
diagnostics,
|
|
96087
96156
|
terminatedEarly: diagnostics.timeout,
|
|
96088
96157
|
reachedDependencies: true,
|
|
96089
|
-
computeDetectedOccurrences: ({ vulnerabilityAccessPaths: vulnerabilityAccessPaths2 }) => _RustCodeAwareVulnerabilityScanner.computeDetectedOccurrences(appPackageIds, vulnerablePaths, vulnerabilityAccessPaths2
|
|
96158
|
+
computeDetectedOccurrences: ({ vulnerabilityAccessPaths: vulnerabilityAccessPaths2 }) => _RustCodeAwareVulnerabilityScanner.computeDetectedOccurrences(appPackageIds, vulnerablePaths, i(vulnerabilityAccessPaths2.map(
|
|
96159
|
+
// Note, rust uses '::' as path separator, so we need to split on ': ' and not only ':'
|
|
96160
|
+
(vulnerabilityAccessPath) => vulnerabilityAccessPath.slice(1).split(": ")[0]
|
|
96161
|
+
)))
|
|
96090
96162
|
};
|
|
96091
96163
|
});
|
|
96092
96164
|
}
|
|
96093
|
-
static computeDetectedOccurrences(appPackageIds, vulnerablePaths,
|
|
96165
|
+
static computeDetectedOccurrences(appPackageIds, vulnerablePaths, vulnerableClasses) {
|
|
96094
96166
|
const affectedPackages = /* @__PURE__ */ new Set();
|
|
96095
96167
|
const classStacks = [];
|
|
96096
|
-
for (const
|
|
96097
|
-
const
|
|
96098
|
-
|
|
96099
|
-
if (!vulnerablePathsForMethod)
|
|
96168
|
+
for (const vulnerableClass of vulnerableClasses) {
|
|
96169
|
+
const vulnerablePathsForClass = vulnerablePaths[vulnerableClass];
|
|
96170
|
+
if (!vulnerablePathsForClass)
|
|
96100
96171
|
continue;
|
|
96101
|
-
classStacks.push(...
|
|
96172
|
+
classStacks.push(...vulnerablePathsForClass.map((vulnPath) => {
|
|
96102
96173
|
if (vulnPath.length < 2)
|
|
96103
96174
|
throw new Error("The path should always have length at least two.");
|
|
96104
96175
|
return vulnPath.map(({ fullyQualifiedName, confidence, packageId }) => ({
|
|
@@ -96107,7 +96178,7 @@ var RustCodeAwareVulnerabilityScanner = class _RustCodeAwareVulnerabilityScanner
|
|
|
96107
96178
|
confidence
|
|
96108
96179
|
}));
|
|
96109
96180
|
}));
|
|
96110
|
-
|
|
96181
|
+
vulnerablePathsForClass.flatMap((vulnPath) => vulnPath).filter(({ packageId }) => !appPackageIds.has(packageId)).map(({ packageId }) => parsePackageUrlToRustDependency(packageId)).forEach((node) => affectedPackages.add(`${node.packageName}@${node.version}`));
|
|
96111
96182
|
}
|
|
96112
96183
|
return {
|
|
96113
96184
|
analysisLevel: "class-level",
|
|
@@ -96175,7 +96246,7 @@ async function getCrateInfo(cargoTomlPath) {
|
|
|
96175
96246
|
let examples;
|
|
96176
96247
|
let tests;
|
|
96177
96248
|
const cargoTomlDir = dirname11(cargoTomlPath);
|
|
96178
|
-
const content = await
|
|
96249
|
+
const content = await readFile9(cargoTomlPath, "utf-8");
|
|
96179
96250
|
const parsed = parse14(content);
|
|
96180
96251
|
if (typeof parsed.package === "object" && "name" in parsed.package) {
|
|
96181
96252
|
const crateName = parsed.package.name;
|
|
@@ -96452,10 +96523,10 @@ ${vulnAccPaths.join("\n")}`);
|
|
|
96452
96523
|
if (errors.length > 0)
|
|
96453
96524
|
logger.info(`Error messages from mambalade:
|
|
96454
96525
|
${errors.join("\n")}`);
|
|
96455
|
-
const result = JSON.parse(await
|
|
96526
|
+
const result = JSON.parse(await readFile10(vulnsOutputFile, "utf-8"));
|
|
96456
96527
|
logger.debug("Analysis result:", JSON.stringify(result, null, 2));
|
|
96457
96528
|
logger.debug("About to read diagnostics output file");
|
|
96458
|
-
const { modules, ...mambaladeDiagnosticsOutput } = JSON.parse(await
|
|
96529
|
+
const { modules, ...mambaladeDiagnosticsOutput } = JSON.parse(await readFile10(diagnosticsOutputFile, "utf-8"));
|
|
96459
96530
|
logger.debug("Done reading diagnostics output file");
|
|
96460
96531
|
const getTimes = (...keys) => (
|
|
96461
96532
|
// Mambalade outputs times in seconds, we convert them to milliseconds
|
|
@@ -97500,13 +97571,13 @@ async function runReachabilityAnalysis(state) {
|
|
|
97500
97571
|
}
|
|
97501
97572
|
|
|
97502
97573
|
// dist/reachability-analysis-state.js
|
|
97503
|
-
import { readFile as
|
|
97574
|
+
import { readFile as readFile11 } from "fs/promises";
|
|
97504
97575
|
async function getReachabilityAnalyzersStateFromInput(rootWorkingDir, subprojectDir, workspacePath, inputFile) {
|
|
97505
97576
|
return {
|
|
97506
97577
|
rootWorkingDir,
|
|
97507
97578
|
subprojectDir,
|
|
97508
97579
|
workspacePath,
|
|
97509
|
-
...JSON.parse(await
|
|
97580
|
+
...JSON.parse(await readFile11(inputFile, "utf-8"))
|
|
97510
97581
|
};
|
|
97511
97582
|
}
|
|
97512
97583
|
|
|
@@ -97522,7 +97593,7 @@ var runReachabilityAnalysisCmd = new Command().name("runReachabilityAnalysis").a
|
|
|
97522
97593
|
}
|
|
97523
97594
|
}));
|
|
97524
97595
|
var runOnDependencyChainCmd = new Command().name("runOnDependencyChain").option("-d, --debug", "Enable debug logging", false).option("-s, --silent", "Silence all debug/warning output", false).option("--coana-log-path <logPath>", "Coana log path").option("--silent-spinner", "Silence spinner").requiredOption("-i, --input-file <inputFile>", "Input file for data and vulnerabilities").requiredOption("-o, --output-file <outputFile>", "Output directory for the results").configureHelp({ sortSubcommands: true, sortOptions: true }).action(async (options) => withLoggerAndSpinner("Coana Reachability Analyzers", options, async () => {
|
|
97525
|
-
const { ecosystem, dependencyChain, vulnerability } = JSON.parse(await
|
|
97596
|
+
const { ecosystem, dependencyChain, vulnerability } = JSON.parse(await readFile12(options.inputFile, "utf-8"));
|
|
97526
97597
|
const result = await analyzePackages(ecosystem, deserializeDependencyChain(ecosystem, dependencyChain), vulnerability);
|
|
97527
97598
|
if (options.outputFile) {
|
|
97528
97599
|
logger.debug("Writing result to file", options.outputFile);
|
|
Binary file
|