@coana-tech/cli 14.12.2 → 14.12.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/cli.mjs +108 -102
- package/package.json +1 -1
- package/reachability-analyzers-cli.mjs +75 -69
- package/repos/coana-tech/alucard/alucard.jar +0 -0
- package/repos/coana-tech/goana/bin/goana-darwin-amd64.gz +0 -0
- package/repos/coana-tech/goana/bin/goana-darwin-arm64.gz +0 -0
- package/repos/coana-tech/goana/bin/goana-linux-amd64.gz +0 -0
- package/repos/coana-tech/goana/bin/goana-linux-arm64.gz +0 -0
- package/repos/coana-tech/mambalade/dist/mambalade-0.3.11-py3-none-any.whl +0 -0
package/cli.mjs
CHANGED
|
@@ -6151,7 +6151,7 @@ var require_safe_stable_stringify = __commonJS({
|
|
|
6151
6151
|
return circularValue;
|
|
6152
6152
|
}
|
|
6153
6153
|
let res = "";
|
|
6154
|
-
let
|
|
6154
|
+
let join28 = ",";
|
|
6155
6155
|
const originalIndentation = indentation;
|
|
6156
6156
|
if (Array.isArray(value)) {
|
|
6157
6157
|
if (value.length === 0) {
|
|
@@ -6165,7 +6165,7 @@ var require_safe_stable_stringify = __commonJS({
|
|
|
6165
6165
|
indentation += spacer;
|
|
6166
6166
|
res += `
|
|
6167
6167
|
${indentation}`;
|
|
6168
|
-
|
|
6168
|
+
join28 = `,
|
|
6169
6169
|
${indentation}`;
|
|
6170
6170
|
}
|
|
6171
6171
|
const maximumValuesToStringify = Math.min(value.length, maximumBreadth);
|
|
@@ -6173,13 +6173,13 @@ ${indentation}`;
|
|
|
6173
6173
|
for (; i6 < maximumValuesToStringify - 1; i6++) {
|
|
6174
6174
|
const tmp2 = stringifyFnReplacer(String(i6), value, stack2, replacer, spacer, indentation);
|
|
6175
6175
|
res += tmp2 !== void 0 ? tmp2 : "null";
|
|
6176
|
-
res +=
|
|
6176
|
+
res += join28;
|
|
6177
6177
|
}
|
|
6178
6178
|
const tmp = stringifyFnReplacer(String(i6), value, stack2, replacer, spacer, indentation);
|
|
6179
6179
|
res += tmp !== void 0 ? tmp : "null";
|
|
6180
6180
|
if (value.length - 1 > maximumBreadth) {
|
|
6181
6181
|
const removedKeys = value.length - maximumBreadth - 1;
|
|
6182
|
-
res += `${
|
|
6182
|
+
res += `${join28}"... ${getItemCount(removedKeys)} not stringified"`;
|
|
6183
6183
|
}
|
|
6184
6184
|
if (spacer !== "") {
|
|
6185
6185
|
res += `
|
|
@@ -6200,7 +6200,7 @@ ${originalIndentation}`;
|
|
|
6200
6200
|
let separator = "";
|
|
6201
6201
|
if (spacer !== "") {
|
|
6202
6202
|
indentation += spacer;
|
|
6203
|
-
|
|
6203
|
+
join28 = `,
|
|
6204
6204
|
${indentation}`;
|
|
6205
6205
|
whitespace2 = " ";
|
|
6206
6206
|
}
|
|
@@ -6214,13 +6214,13 @@ ${indentation}`;
|
|
|
6214
6214
|
const tmp = stringifyFnReplacer(key2, value, stack2, replacer, spacer, indentation);
|
|
6215
6215
|
if (tmp !== void 0) {
|
|
6216
6216
|
res += `${separator}${strEscape(key2)}:${whitespace2}${tmp}`;
|
|
6217
|
-
separator =
|
|
6217
|
+
separator = join28;
|
|
6218
6218
|
}
|
|
6219
6219
|
}
|
|
6220
6220
|
if (keyLength > maximumBreadth) {
|
|
6221
6221
|
const removedKeys = keyLength - maximumBreadth;
|
|
6222
6222
|
res += `${separator}"...":${whitespace2}"${getItemCount(removedKeys)} not stringified"`;
|
|
6223
|
-
separator =
|
|
6223
|
+
separator = join28;
|
|
6224
6224
|
}
|
|
6225
6225
|
if (spacer !== "" && separator.length > 1) {
|
|
6226
6226
|
res = `
|
|
@@ -6261,7 +6261,7 @@ ${originalIndentation}`;
|
|
|
6261
6261
|
}
|
|
6262
6262
|
const originalIndentation = indentation;
|
|
6263
6263
|
let res = "";
|
|
6264
|
-
let
|
|
6264
|
+
let join28 = ",";
|
|
6265
6265
|
if (Array.isArray(value)) {
|
|
6266
6266
|
if (value.length === 0) {
|
|
6267
6267
|
return "[]";
|
|
@@ -6274,7 +6274,7 @@ ${originalIndentation}`;
|
|
|
6274
6274
|
indentation += spacer;
|
|
6275
6275
|
res += `
|
|
6276
6276
|
${indentation}`;
|
|
6277
|
-
|
|
6277
|
+
join28 = `,
|
|
6278
6278
|
${indentation}`;
|
|
6279
6279
|
}
|
|
6280
6280
|
const maximumValuesToStringify = Math.min(value.length, maximumBreadth);
|
|
@@ -6282,13 +6282,13 @@ ${indentation}`;
|
|
|
6282
6282
|
for (; i6 < maximumValuesToStringify - 1; i6++) {
|
|
6283
6283
|
const tmp2 = stringifyArrayReplacer(String(i6), value[i6], stack2, replacer, spacer, indentation);
|
|
6284
6284
|
res += tmp2 !== void 0 ? tmp2 : "null";
|
|
6285
|
-
res +=
|
|
6285
|
+
res += join28;
|
|
6286
6286
|
}
|
|
6287
6287
|
const tmp = stringifyArrayReplacer(String(i6), value[i6], stack2, replacer, spacer, indentation);
|
|
6288
6288
|
res += tmp !== void 0 ? tmp : "null";
|
|
6289
6289
|
if (value.length - 1 > maximumBreadth) {
|
|
6290
6290
|
const removedKeys = value.length - maximumBreadth - 1;
|
|
6291
|
-
res += `${
|
|
6291
|
+
res += `${join28}"... ${getItemCount(removedKeys)} not stringified"`;
|
|
6292
6292
|
}
|
|
6293
6293
|
if (spacer !== "") {
|
|
6294
6294
|
res += `
|
|
@@ -6301,7 +6301,7 @@ ${originalIndentation}`;
|
|
|
6301
6301
|
let whitespace2 = "";
|
|
6302
6302
|
if (spacer !== "") {
|
|
6303
6303
|
indentation += spacer;
|
|
6304
|
-
|
|
6304
|
+
join28 = `,
|
|
6305
6305
|
${indentation}`;
|
|
6306
6306
|
whitespace2 = " ";
|
|
6307
6307
|
}
|
|
@@ -6310,7 +6310,7 @@ ${indentation}`;
|
|
|
6310
6310
|
const tmp = stringifyArrayReplacer(key2, value[key2], stack2, replacer, spacer, indentation);
|
|
6311
6311
|
if (tmp !== void 0) {
|
|
6312
6312
|
res += `${separator}${strEscape(key2)}:${whitespace2}${tmp}`;
|
|
6313
|
-
separator =
|
|
6313
|
+
separator = join28;
|
|
6314
6314
|
}
|
|
6315
6315
|
}
|
|
6316
6316
|
if (spacer !== "" && separator.length > 1) {
|
|
@@ -6368,20 +6368,20 @@ ${originalIndentation}`;
|
|
|
6368
6368
|
indentation += spacer;
|
|
6369
6369
|
let res2 = `
|
|
6370
6370
|
${indentation}`;
|
|
6371
|
-
const
|
|
6371
|
+
const join29 = `,
|
|
6372
6372
|
${indentation}`;
|
|
6373
6373
|
const maximumValuesToStringify = Math.min(value.length, maximumBreadth);
|
|
6374
6374
|
let i6 = 0;
|
|
6375
6375
|
for (; i6 < maximumValuesToStringify - 1; i6++) {
|
|
6376
6376
|
const tmp2 = stringifyIndent(String(i6), value[i6], stack2, spacer, indentation);
|
|
6377
6377
|
res2 += tmp2 !== void 0 ? tmp2 : "null";
|
|
6378
|
-
res2 +=
|
|
6378
|
+
res2 += join29;
|
|
6379
6379
|
}
|
|
6380
6380
|
const tmp = stringifyIndent(String(i6), value[i6], stack2, spacer, indentation);
|
|
6381
6381
|
res2 += tmp !== void 0 ? tmp : "null";
|
|
6382
6382
|
if (value.length - 1 > maximumBreadth) {
|
|
6383
6383
|
const removedKeys = value.length - maximumBreadth - 1;
|
|
6384
|
-
res2 += `${
|
|
6384
|
+
res2 += `${join29}"... ${getItemCount(removedKeys)} not stringified"`;
|
|
6385
6385
|
}
|
|
6386
6386
|
res2 += `
|
|
6387
6387
|
${originalIndentation}`;
|
|
@@ -6397,16 +6397,16 @@ ${originalIndentation}`;
|
|
|
6397
6397
|
return '"[Object]"';
|
|
6398
6398
|
}
|
|
6399
6399
|
indentation += spacer;
|
|
6400
|
-
const
|
|
6400
|
+
const join28 = `,
|
|
6401
6401
|
${indentation}`;
|
|
6402
6402
|
let res = "";
|
|
6403
6403
|
let separator = "";
|
|
6404
6404
|
let maximumPropertiesToStringify = Math.min(keyLength, maximumBreadth);
|
|
6405
6405
|
if (isTypedArrayWithEntries(value)) {
|
|
6406
|
-
res += stringifyTypedArray(value,
|
|
6406
|
+
res += stringifyTypedArray(value, join28, maximumBreadth);
|
|
6407
6407
|
keys = keys.slice(value.length);
|
|
6408
6408
|
maximumPropertiesToStringify -= value.length;
|
|
6409
|
-
separator =
|
|
6409
|
+
separator = join28;
|
|
6410
6410
|
}
|
|
6411
6411
|
if (deterministic) {
|
|
6412
6412
|
keys = insertSort(keys);
|
|
@@ -6417,13 +6417,13 @@ ${indentation}`;
|
|
|
6417
6417
|
const tmp = stringifyIndent(key2, value[key2], stack2, spacer, indentation);
|
|
6418
6418
|
if (tmp !== void 0) {
|
|
6419
6419
|
res += `${separator}${strEscape(key2)}: ${tmp}`;
|
|
6420
|
-
separator =
|
|
6420
|
+
separator = join28;
|
|
6421
6421
|
}
|
|
6422
6422
|
}
|
|
6423
6423
|
if (keyLength > maximumBreadth) {
|
|
6424
6424
|
const removedKeys = keyLength - maximumBreadth;
|
|
6425
6425
|
res += `${separator}"...": "${getItemCount(removedKeys)} not stringified"`;
|
|
6426
|
-
separator =
|
|
6426
|
+
separator = join28;
|
|
6427
6427
|
}
|
|
6428
6428
|
if (separator !== "") {
|
|
6429
6429
|
res = `
|
|
@@ -7990,7 +7990,7 @@ var require_buffer_list = __commonJS({
|
|
|
7990
7990
|
}
|
|
7991
7991
|
}, {
|
|
7992
7992
|
key: "join",
|
|
7993
|
-
value: function
|
|
7993
|
+
value: function join28(s4) {
|
|
7994
7994
|
if (this.length === 0) return "";
|
|
7995
7995
|
var p3 = this.head;
|
|
7996
7996
|
var ret = "" + p3.data;
|
|
@@ -19073,7 +19073,7 @@ var require_lodash = __commonJS({
|
|
|
19073
19073
|
}
|
|
19074
19074
|
return mapped.length && mapped[0] === arrays[0] ? baseIntersection(mapped, undefined2, comparator) : [];
|
|
19075
19075
|
});
|
|
19076
|
-
function
|
|
19076
|
+
function join28(array, separator) {
|
|
19077
19077
|
return array == null ? "" : nativeJoin.call(array, separator);
|
|
19078
19078
|
}
|
|
19079
19079
|
function last2(array) {
|
|
@@ -20992,7 +20992,7 @@ var require_lodash = __commonJS({
|
|
|
20992
20992
|
lodash16.isUndefined = isUndefined2;
|
|
20993
20993
|
lodash16.isWeakMap = isWeakMap;
|
|
20994
20994
|
lodash16.isWeakSet = isWeakSet;
|
|
20995
|
-
lodash16.join =
|
|
20995
|
+
lodash16.join = join28;
|
|
20996
20996
|
lodash16.kebabCase = kebabCase;
|
|
20997
20997
|
lodash16.last = last2;
|
|
20998
20998
|
lodash16.lastIndexOf = lastIndexOf;
|
|
@@ -29988,7 +29988,7 @@ var require_builder = __commonJS({
|
|
|
29988
29988
|
}
|
|
29989
29989
|
};
|
|
29990
29990
|
exports2.SeqBuilder = SeqBuilder;
|
|
29991
|
-
function
|
|
29991
|
+
function join28(first2, second, ...others) {
|
|
29992
29992
|
const seq = new SeqBuilder(first2, second);
|
|
29993
29993
|
if (!others.length) {
|
|
29994
29994
|
return seq;
|
|
@@ -29997,7 +29997,7 @@ var require_builder = __commonJS({
|
|
|
29997
29997
|
return res.join(query);
|
|
29998
29998
|
}, seq);
|
|
29999
29999
|
}
|
|
30000
|
-
exports2.join =
|
|
30000
|
+
exports2.join = join28;
|
|
30001
30001
|
var SymBuilder = class extends AbstractBuilder {
|
|
30002
30002
|
constructor(opts) {
|
|
30003
30003
|
super();
|
|
@@ -190952,25 +190952,25 @@ var Spinner = class _Spinner {
|
|
|
190952
190952
|
};
|
|
190953
190953
|
|
|
190954
190954
|
// ../utils/src/command-utils.ts
|
|
190955
|
-
async function execAndLogOnFailure(cmd, dir, options) {
|
|
190955
|
+
async function execAndLogOnFailure(cmd, dir, options, logLevel = "info") {
|
|
190956
190956
|
const result = await execNeverFail(cmd, dir, options);
|
|
190957
|
-
if (result.error) logCommandOutput(result, cmd, dir);
|
|
190957
|
+
if (result.error) logCommandOutput(result, cmd, dir, logLevel);
|
|
190958
190958
|
return !result.error;
|
|
190959
190959
|
}
|
|
190960
190960
|
async function execPipeAndLogOnFailure(cmd, dir, options) {
|
|
190961
190961
|
return execAndLogOnFailure(cmd, dir, { ...options, pipe: true });
|
|
190962
190962
|
}
|
|
190963
|
-
function logCommandOutput(cmdResult, cmd, dir) {
|
|
190963
|
+
function logCommandOutput(cmdResult, cmd, dir, logLevel = "info") {
|
|
190964
190964
|
const { error, stdout, stderr } = cmdResult;
|
|
190965
|
-
logger
|
|
190966
|
-
logger
|
|
190965
|
+
logger[logLevel](error ? `Error running command: ${cmd}` : `Result of running command: ${cmd}`);
|
|
190966
|
+
logger[logLevel](`Directory: ${dir}`);
|
|
190967
190967
|
if (error) {
|
|
190968
190968
|
const em = error.message;
|
|
190969
|
-
logger
|
|
190969
|
+
logger[logLevel](`Error: ${em?.endsWith?.(`
|
|
190970
190970
|
${stderr}`) ? em.slice(0, -stderr.length - 1) : em}`);
|
|
190971
190971
|
}
|
|
190972
|
-
logger
|
|
190973
|
-
logger
|
|
190972
|
+
logger[logLevel](`stdout: ${stdout}`);
|
|
190973
|
+
logger[logLevel](`stderr: ${stderr}`);
|
|
190974
190974
|
}
|
|
190975
190975
|
async function execNeverFail(cmd, dir, options) {
|
|
190976
190976
|
return new Promise((resolve24) => {
|
|
@@ -197761,6 +197761,14 @@ function parseSocketResponse(responseData) {
|
|
|
197761
197761
|
throw new Error(`Unexpected response type from Socket API: ${typeof responseData}`);
|
|
197762
197762
|
}
|
|
197763
197763
|
}
|
|
197764
|
+
function parseComputeArtifactsResponse(responseData) {
|
|
197765
|
+
const response = parseSocketResponse(responseData);
|
|
197766
|
+
return {
|
|
197767
|
+
artifacts: response.filter((r2) => r2.type === "artifact").map((r2) => r2.value),
|
|
197768
|
+
metadata: response.filter((r2) => r2.type === "metadata").flatMap((r2) => r2.value)
|
|
197769
|
+
// There should always only be one metadata object
|
|
197770
|
+
};
|
|
197771
|
+
}
|
|
197764
197772
|
async function createSocketTier1Scan(cliOptions, coanaCliVersion) {
|
|
197765
197773
|
try {
|
|
197766
197774
|
const url2 = getSocketApiUrl("tier1-reachability-scan");
|
|
@@ -197948,7 +197956,7 @@ async function fetchArtifactsFromManifestsTarHash(manifestsTarHash) {
|
|
|
197948
197956
|
try {
|
|
197949
197957
|
const url2 = getSocketApiUrl(`orgs/${process.env.SOCKET_ORG_SLUG}/compute-artifacts?tarHash=${manifestsTarHash}`);
|
|
197950
197958
|
const responseData = (await axios2.post(url2, {}, { headers: getAuthHeaders() })).data;
|
|
197951
|
-
return
|
|
197959
|
+
return parseComputeArtifactsResponse(responseData);
|
|
197952
197960
|
} catch (e) {
|
|
197953
197961
|
if (e instanceof AxiosError2) {
|
|
197954
197962
|
prettyPrintAxiosError(e);
|
|
@@ -197975,12 +197983,7 @@ async function computeSocketFactArtifacts(rootDir, relativeManifestFilePaths) {
|
|
|
197975
197983
|
if (!uploadData.tarHash) {
|
|
197976
197984
|
throw new Error("No tarHash received from upload-manifest-files response");
|
|
197977
197985
|
}
|
|
197978
|
-
|
|
197979
|
-
`orgs/${process.env.SOCKET_ORG_SLUG}/compute-artifacts?tarHash=${uploadData.tarHash}`
|
|
197980
|
-
);
|
|
197981
|
-
const computeResponse = await axios2.post(computeUrl, {}, { headers: getAuthHeaders() });
|
|
197982
|
-
const responseData = computeResponse.data;
|
|
197983
|
-
return parseSocketResponse(responseData);
|
|
197986
|
+
return (await fetchArtifactsFromManifestsTarHash(uploadData.tarHash)).artifacts;
|
|
197984
197987
|
} catch (error) {
|
|
197985
197988
|
logger.warn("Failed to compute socket fact artifacts", error);
|
|
197986
197989
|
return void 0;
|
|
@@ -205296,23 +205299,23 @@ var Spinner2 = class _Spinner {
|
|
|
205296
205299
|
};
|
|
205297
205300
|
|
|
205298
205301
|
// ../utils/dist/command-utils.js
|
|
205299
|
-
async function execAndLogOnFailure2(cmd, dir, options) {
|
|
205302
|
+
async function execAndLogOnFailure2(cmd, dir, options, logLevel = "info") {
|
|
205300
205303
|
const result = await execNeverFail2(cmd, dir, options);
|
|
205301
205304
|
if (result.error)
|
|
205302
|
-
logCommandOutput2(result, cmd, dir);
|
|
205305
|
+
logCommandOutput2(result, cmd, dir, logLevel);
|
|
205303
205306
|
return !result.error;
|
|
205304
205307
|
}
|
|
205305
|
-
function logCommandOutput2(cmdResult, cmd, dir) {
|
|
205308
|
+
function logCommandOutput2(cmdResult, cmd, dir, logLevel = "info") {
|
|
205306
205309
|
const { error, stdout, stderr } = cmdResult;
|
|
205307
|
-
logger
|
|
205308
|
-
logger
|
|
205310
|
+
logger[logLevel](error ? `Error running command: ${cmd}` : `Result of running command: ${cmd}`);
|
|
205311
|
+
logger[logLevel](`Directory: ${dir}`);
|
|
205309
205312
|
if (error) {
|
|
205310
205313
|
const em = error.message;
|
|
205311
|
-
logger
|
|
205314
|
+
logger[logLevel](`Error: ${em?.endsWith?.(`
|
|
205312
205315
|
${stderr}`) ? em.slice(0, -stderr.length - 1) : em}`);
|
|
205313
205316
|
}
|
|
205314
|
-
logger
|
|
205315
|
-
logger
|
|
205317
|
+
logger[logLevel](`stdout: ${stdout}`);
|
|
205318
|
+
logger[logLevel](`stderr: ${stderr}`);
|
|
205316
205319
|
}
|
|
205317
205320
|
async function execNeverFail2(cmd, dir, options) {
|
|
205318
205321
|
return new Promise((resolve24) => {
|
|
@@ -206483,18 +206486,19 @@ import { access as access2, cp, readdir as readdir3, stat as stat2 } from "fs/pr
|
|
|
206483
206486
|
import { basename as basename4, join as join11, relative as relative6, resolve as resolve13 } from "path";
|
|
206484
206487
|
var { uniq } = import_lodash5.default;
|
|
206485
206488
|
var { isMatch } = import_micromatch.default;
|
|
206486
|
-
function
|
|
206487
|
-
let curr = dir;
|
|
206488
|
-
let last2 = dir;
|
|
206489
|
+
function* parents(dir) {
|
|
206490
|
+
let [curr, last2] = [dir, dir];
|
|
206489
206491
|
do {
|
|
206490
|
-
|
|
206491
|
-
|
|
206492
|
-
return curr;
|
|
206493
|
-
last2 = curr;
|
|
206494
|
-
curr = resolve13(curr, "..");
|
|
206492
|
+
yield curr;
|
|
206493
|
+
[last2, curr] = [curr, resolve13(curr, "..")];
|
|
206495
206494
|
} while (curr !== last2);
|
|
206496
206495
|
return void 0;
|
|
206497
206496
|
}
|
|
206497
|
+
function findParent(dir, predicate, wholePath) {
|
|
206498
|
+
for (const parent2 of parents(dir))
|
|
206499
|
+
if (predicate(wholePath ? parent2 : basename4(parent2)))
|
|
206500
|
+
return parent2;
|
|
206501
|
+
}
|
|
206498
206502
|
|
|
206499
206503
|
// ../utils/dist/constants.js
|
|
206500
206504
|
var { once: once2 } = import_lodash6.default;
|
|
@@ -207378,17 +207382,18 @@ import { access as access3, cp as cp2, readdir as readdir4, stat as stat3 } from
|
|
|
207378
207382
|
import { basename as basename5, join as join16, relative as relative7, resolve as resolve15 } from "path";
|
|
207379
207383
|
var { uniq: uniq2 } = import_lodash8.default;
|
|
207380
207384
|
var { isMatch: isMatch2 } = import_micromatch2.default;
|
|
207381
|
-
function
|
|
207382
|
-
let curr = dir;
|
|
207383
|
-
let last2 = dir;
|
|
207385
|
+
function* parents2(dir) {
|
|
207386
|
+
let [curr, last2] = [dir, dir];
|
|
207384
207387
|
do {
|
|
207385
|
-
|
|
207386
|
-
|
|
207387
|
-
last2 = curr;
|
|
207388
|
-
curr = resolve15(curr, "..");
|
|
207388
|
+
yield curr;
|
|
207389
|
+
[last2, curr] = [curr, resolve15(curr, "..")];
|
|
207389
207390
|
} while (curr !== last2);
|
|
207390
207391
|
return void 0;
|
|
207391
207392
|
}
|
|
207393
|
+
function findParent2(dir, predicate, wholePath) {
|
|
207394
|
+
for (const parent2 of parents2(dir))
|
|
207395
|
+
if (predicate(wholePath ? parent2 : basename5(parent2))) return parent2;
|
|
207396
|
+
}
|
|
207392
207397
|
async function getFilesRelative(dir, excludeDirs) {
|
|
207393
207398
|
async function helper(subDir, arrayOfFiles) {
|
|
207394
207399
|
for (const item of await readdir4(join16(dir, subDir), { withFileTypes: true })) {
|
|
@@ -209354,6 +209359,7 @@ import { join as join20, resolve as resolve18 } from "path";
|
|
|
209354
209359
|
import util3 from "util";
|
|
209355
209360
|
var { once: once7 } = import_lodash13.default;
|
|
209356
209361
|
var systemPython = once7(() => execFileSync2("which", ["python"], { encoding: "utf8" }).trim());
|
|
209362
|
+
var hasPyenv = once7(async () => !(await execNeverFail("which pyenv")).error);
|
|
209357
209363
|
|
|
209358
209364
|
// ../utils/src/pip-utils.ts
|
|
209359
209365
|
async function isSetupPySetuptools(file) {
|
|
@@ -209805,7 +209811,7 @@ var kleur_default = $;
|
|
|
209805
209811
|
// dist/cli-core.js
|
|
209806
209812
|
var import_lodash15 = __toESM(require_lodash(), 1);
|
|
209807
209813
|
import os from "os";
|
|
209808
|
-
import { join as
|
|
209814
|
+
import { join as join25, relative as relative11, resolve as resolve23 } from "path";
|
|
209809
209815
|
|
|
209810
209816
|
// ../utils/src/dashboard-api/shared-api.ts
|
|
209811
209817
|
var DashboardAPI = class {
|
|
@@ -210102,8 +210108,8 @@ function getVulnerabilityDependencyType(vulnChainDetails, directDependencies, af
|
|
|
210102
210108
|
finalDepType = depType;
|
|
210103
210109
|
}
|
|
210104
210110
|
}
|
|
210105
|
-
const
|
|
210106
|
-
for (const p3 of
|
|
210111
|
+
const parents4 = vcd.parentsMap.get(devIdentifier);
|
|
210112
|
+
for (const p3 of parents4 ?? []) {
|
|
210107
210113
|
if (p3 === ROOT_NODE_STR) continue;
|
|
210108
210114
|
const parentNode = vcd.transitiveDependencies[p3];
|
|
210109
210115
|
if (afd && !afd.has(parentNode)) continue;
|
|
@@ -210225,17 +210231,17 @@ function computeVulnChainDetails(dependencyTree, dependencyIdentifier, parentsMa
|
|
|
210225
210231
|
function addNode(currentIdentifier, childIdentifier, visited) {
|
|
210226
210232
|
if (visited.has(currentIdentifier))
|
|
210227
210233
|
return;
|
|
210228
|
-
const
|
|
210234
|
+
const parents4 = parentsMap.get(currentIdentifier);
|
|
210229
210235
|
const newCurrentNode = transformToVulnChainNode(dependencyTree.transitiveDependencies[currentIdentifier]);
|
|
210230
210236
|
res.transitiveDependencies[currentIdentifier] = newCurrentNode;
|
|
210231
210237
|
if (childIdentifier && !newCurrentNode.children.includes(childIdentifier))
|
|
210232
210238
|
newCurrentNode.children.push(childIdentifier);
|
|
210233
210239
|
if (!childIdentifier)
|
|
210234
210240
|
newCurrentNode.vulnerable = true;
|
|
210235
|
-
if (!
|
|
210241
|
+
if (!parents4)
|
|
210236
210242
|
return res;
|
|
210237
210243
|
visited.add(currentIdentifier);
|
|
210238
|
-
for (const parent2 of
|
|
210244
|
+
for (const parent2 of parents4) {
|
|
210239
210245
|
if (parent2 === ROOT_IDENTIFIER)
|
|
210240
210246
|
res.children.push(currentIdentifier);
|
|
210241
210247
|
else
|
|
@@ -210253,9 +210259,9 @@ function transformToVulnChainNode(dependencyTree) {
|
|
|
210253
210259
|
}
|
|
210254
210260
|
|
|
210255
210261
|
// dist/internal/socket-mode-helpers-socket-dependency-trees.js
|
|
210256
|
-
var import_picomatch2 = __toESM(require_picomatch2(), 1);
|
|
210257
|
-
import { basename as basename7, dirname as dirname8, sep as sep5 } from "path";
|
|
210258
210262
|
var import_packageurl_js = __toESM(require_packageurl_js(), 1);
|
|
210263
|
+
var import_picomatch2 = __toESM(require_picomatch2(), 1);
|
|
210264
|
+
import { basename as basename7, dirname as dirname8, join as join23, sep as sep5 } from "path";
|
|
210259
210265
|
var REQUIREMENTS_FILES_SEARCH_DEPTH2 = 3;
|
|
210260
210266
|
function inferWorkspaceFromManifestPath(ecosystem, manifestPath, properPythonProjects) {
|
|
210261
210267
|
switch (ecosystem) {
|
|
@@ -210339,7 +210345,7 @@ function getAllToplevelAncestors(artifactMap, artifactId) {
|
|
|
210339
210345
|
async function fetchArtifactsFromSocket(rootWorkingDirectory, manifestsTarHash) {
|
|
210340
210346
|
logger.info("Fetching artifacts from Socket backend using manifests tar hash", manifestsTarHash);
|
|
210341
210347
|
try {
|
|
210342
|
-
const artifacts = await fetchArtifactsFromManifestsTarHash(manifestsTarHash);
|
|
210348
|
+
const { artifacts } = await fetchArtifactsFromManifestsTarHash(manifestsTarHash);
|
|
210343
210349
|
const properPythonProjects = [];
|
|
210344
210350
|
const venvExcludes = [
|
|
210345
210351
|
"venv",
|
|
@@ -210368,7 +210374,7 @@ async function fetchArtifactsFromSocket(rootWorkingDirectory, manifestsTarHash)
|
|
|
210368
210374
|
for (const file of allFiles) {
|
|
210369
210375
|
const base = basename7(file);
|
|
210370
210376
|
const workspaceDir = dirname8(file) || ".";
|
|
210371
|
-
if (base === "pyproject.toml" || base === "setup.py" && await isSetupPySetuptools(file)) {
|
|
210377
|
+
if (base === "pyproject.toml" || base === "setup.py" && await isSetupPySetuptools(join23(rootWorkingDirectory, file))) {
|
|
210372
210378
|
if (!properPythonProjects.includes(workspaceDir)) {
|
|
210373
210379
|
properPythonProjects.push(workspaceDir);
|
|
210374
210380
|
}
|
|
@@ -210442,7 +210448,7 @@ async function fetchArtifactsFromSocket(rootWorkingDirectory, manifestsTarHash)
|
|
|
210442
210448
|
name: artifact.name ?? "",
|
|
210443
210449
|
dependency: artifact.name ?? "",
|
|
210444
210450
|
vulnChainDetails: computeVulnChainDetails2(artifacts, artifact.id),
|
|
210445
|
-
vulnerabilityAccessPaths: vuln.reachabilityData?.pattern ?? null,
|
|
210451
|
+
vulnerabilityAccessPaths: vuln.reachabilityData?.undeterminableReachability ? vuln.reachabilityData.publicComment ?? "" : vuln.reachabilityData?.pattern ?? null,
|
|
210446
210452
|
ecosystem,
|
|
210447
210453
|
artifactId: artifact.id
|
|
210448
210454
|
};
|
|
@@ -210493,7 +210499,7 @@ function computeVulnChainDetails2(artifacts, vulnerableArtifactId) {
|
|
|
210493
210499
|
const currentArtifact = artifactMap.get(currentId);
|
|
210494
210500
|
if (!currentArtifact)
|
|
210495
210501
|
return;
|
|
210496
|
-
const
|
|
210502
|
+
const parents4 = parentsMap.get(currentId);
|
|
210497
210503
|
const newCurrentNode = {
|
|
210498
210504
|
packageName: getNameFromNamespaceAndName(currentArtifact.type, currentArtifact.namespace, currentArtifact.name),
|
|
210499
210505
|
version: currentArtifact.version ?? void 0,
|
|
@@ -210512,8 +210518,8 @@ function computeVulnChainDetails2(artifacts, vulnerableArtifactId) {
|
|
|
210512
210518
|
}
|
|
210513
210519
|
}
|
|
210514
210520
|
visited.add(currentId);
|
|
210515
|
-
if (
|
|
210516
|
-
for (const parentId of
|
|
210521
|
+
if (parents4) {
|
|
210522
|
+
for (const parentId of parents4) {
|
|
210517
210523
|
addNode(parentId, currentId, visited);
|
|
210518
210524
|
}
|
|
210519
210525
|
}
|
|
@@ -213022,7 +213028,7 @@ __export(traversing_exports, {
|
|
|
213022
213028
|
nextUntil: () => nextUntil,
|
|
213023
213029
|
not: () => not,
|
|
213024
213030
|
parent: () => parent,
|
|
213025
|
-
parents: () =>
|
|
213031
|
+
parents: () => parents3,
|
|
213026
213032
|
parentsUntil: () => parentsUntil,
|
|
213027
213033
|
prev: () => prev,
|
|
213028
213034
|
prevAll: () => prevAll,
|
|
@@ -214284,7 +214290,7 @@ function _removeDuplicates(elems) {
|
|
|
214284
214290
|
return Array.from(new Set(elems));
|
|
214285
214291
|
}
|
|
214286
214292
|
var parent = _singleMatcher(({ parent: parent2 }) => parent2 && !isDocument(parent2) ? parent2 : null, _removeDuplicates);
|
|
214287
|
-
var
|
|
214293
|
+
var parents3 = _matcher((elem) => {
|
|
214288
214294
|
const matched = [];
|
|
214289
214295
|
while (elem.parent && !isDocument(elem.parent)) {
|
|
214290
214296
|
matched.push(elem.parent);
|
|
@@ -224212,7 +224218,7 @@ var { root: root2 } = static_exports;
|
|
|
224212
224218
|
// ../utils/src/maven-utils.ts
|
|
224213
224219
|
var import_lodash14 = __toESM(require_lodash(), 1);
|
|
224214
224220
|
import { existsSync as existsSync20, readdirSync as readdirSync4, statSync as statSync4 } from "fs";
|
|
224215
|
-
import { join as
|
|
224221
|
+
import { join as join24 } from "path";
|
|
224216
224222
|
var { memoize: memoize3 } = import_lodash14.default;
|
|
224217
224223
|
var memoizedParseShellArgs = memoize3(parseShellArgs);
|
|
224218
224224
|
var MAVEN_PUBLIC_REPOSITORIES = [
|
|
@@ -225028,10 +225034,10 @@ var FixesTask = class {
|
|
|
225028
225034
|
return;
|
|
225029
225035
|
}
|
|
225030
225036
|
}
|
|
225031
|
-
const
|
|
225037
|
+
const parents4 = this.getParents(pId, vulnChainDetails);
|
|
225032
225038
|
let allowedVersionsForCId = potentialVersionsForFix[cId] ? [...potentialVersionsForFix[cId]] : await this.getSafeVersionsOfPackage(vulnChainDetails.transitiveDependencies[cId].packageName);
|
|
225033
|
-
if (
|
|
225034
|
-
for (const parent2 of
|
|
225039
|
+
if (parents4.length !== 0) {
|
|
225040
|
+
for (const parent2 of parents4) {
|
|
225035
225041
|
await computeFix(parent2, pId, [key, ...visited]);
|
|
225036
225042
|
if (res[pId])
|
|
225037
225043
|
allowedVersionsForCId = await this.filterVersionsAllowedByParent(pId, res[pId], cId, allowedVersionsForCId);
|
|
@@ -225060,11 +225066,11 @@ var FixesTask = class {
|
|
|
225060
225066
|
const deps = vulnChainDetails.transitiveDependencies;
|
|
225061
225067
|
const vulnerablePackageIdentifiers = Object.entries(deps ?? []).filter(([_identifier, node]) => node.vulnerable).map(([identifier, _node]) => identifier);
|
|
225062
225068
|
for (const pId of vulnerablePackageIdentifiers) {
|
|
225063
|
-
const
|
|
225064
|
-
if (
|
|
225069
|
+
const parents4 = this.getParents(pId, vulnChainDetails);
|
|
225070
|
+
if (parents4.length === 0) {
|
|
225065
225071
|
pickVersionWrapper(pId, [...potentialVersionsForFix[pId]]);
|
|
225066
225072
|
} else {
|
|
225067
|
-
for (const parent2 of
|
|
225073
|
+
for (const parent2 of parents4) {
|
|
225068
225074
|
await computeFix(parent2, pId, []);
|
|
225069
225075
|
}
|
|
225070
225076
|
}
|
|
@@ -225125,9 +225131,9 @@ var FixesTask = class {
|
|
|
225125
225131
|
safeVersionsForC
|
|
225126
225132
|
);
|
|
225127
225133
|
const vs = await filterVersions(pId, versionsOfPAllowingSomeSafeVersions);
|
|
225128
|
-
const
|
|
225129
|
-
if (
|
|
225130
|
-
for (const parent2 of
|
|
225134
|
+
const parents4 = this.getParents(pId, vuln.vulnChainDetails);
|
|
225135
|
+
if (parents4.length !== 0) {
|
|
225136
|
+
for (const parent2 of parents4) {
|
|
225131
225137
|
await computePotentialVersionsForFixWithCache(parent2, pId, vs);
|
|
225132
225138
|
}
|
|
225133
225139
|
} else {
|
|
@@ -225139,17 +225145,17 @@ var FixesTask = class {
|
|
|
225139
225145
|
const deps = vuln.vulnChainDetails?.transitiveDependencies;
|
|
225140
225146
|
const vulnerablePackageIdentifiers = Object.entries(deps ?? []).filter(([_identifier, node]) => node.vulnerable).map(([identifier, _node]) => identifier);
|
|
225141
225147
|
for (const pId of vulnerablePackageIdentifiers) {
|
|
225142
|
-
const
|
|
225148
|
+
const parents4 = this.getParents(pId, vuln.vulnChainDetails);
|
|
225143
225149
|
const safeVersionsForVulnerablePackage = await safeVersions(pId);
|
|
225144
225150
|
const { upgrades, downgrades } = this.groupVersionsInUpgradesAndDowngrades(
|
|
225145
225151
|
assertDefined(this.packageStructure.transitiveDependencies[pId].version),
|
|
225146
225152
|
safeVersionsForVulnerablePackage
|
|
225147
225153
|
);
|
|
225148
|
-
if (
|
|
225154
|
+
if (parents4.length === 0) {
|
|
225149
225155
|
if (upgrades.length > 0) res[pId] = upgrades;
|
|
225150
225156
|
else if (downgrades.length > 0) res[pId] = downgrades;
|
|
225151
225157
|
} else {
|
|
225152
|
-
for (const parent2 of
|
|
225158
|
+
for (const parent2 of parents4) {
|
|
225153
225159
|
const resClone = { ...res };
|
|
225154
225160
|
const alreadyComputedCacheClone = new Map(alreadyComputedCache);
|
|
225155
225161
|
try {
|
|
@@ -225583,7 +225589,7 @@ async function onlineScan(dependencyTree, apiKey, timeout) {
|
|
|
225583
225589
|
}
|
|
225584
225590
|
|
|
225585
225591
|
// dist/version.js
|
|
225586
|
-
var version2 = "14.12.
|
|
225592
|
+
var version2 = "14.12.5";
|
|
225587
225593
|
|
|
225588
225594
|
// dist/cli-core.js
|
|
225589
225595
|
var { mapValues, omit, partition, pick } = import_lodash15.default;
|
|
@@ -225685,7 +225691,7 @@ var CliCore = class {
|
|
|
225685
225691
|
}
|
|
225686
225692
|
}
|
|
225687
225693
|
async main() {
|
|
225688
|
-
this.coanaLogPath =
|
|
225694
|
+
this.coanaLogPath = join25(await createTmpDirectory("coana-cli-"), "coana-log.txt");
|
|
225689
225695
|
logger.initWinstonLogger(this.options.debug, this.coanaLogPath);
|
|
225690
225696
|
logger.silent = this.options.silent;
|
|
225691
225697
|
try {
|
|
@@ -226266,7 +226272,7 @@ async function getGitDataToMetadataIfAvailable(rootWorkingDirectory) {
|
|
|
226266
226272
|
}
|
|
226267
226273
|
|
|
226268
226274
|
// dist/cli-upgrade-purl.js
|
|
226269
|
-
import { join as
|
|
226275
|
+
import { join as join26, relative as relative12 } from "node:path";
|
|
226270
226276
|
var import_packageurl_js2 = __toESM(require_packageurl_js(), 1);
|
|
226271
226277
|
var ECOSYSTEMS_WITH_SOCKET_UPGRADES = ["NPM", "MAVEN"];
|
|
226272
226278
|
async function upgradePurl(path2, upgrades, options, logFile, cliFixRunId) {
|
|
@@ -226347,7 +226353,7 @@ ${upgrades.map((upgrade) => ` ${upgrade.purl} -> ${upgrade.upgradeVersion}`).joi
|
|
|
226347
226353
|
const subprojectPromiseQueue = new PromiseQueue(Number(options.concurrency));
|
|
226348
226354
|
supportedSubprojects.forEach((subproject) => {
|
|
226349
226355
|
subprojectPromiseQueue.enqueueTask(async () => {
|
|
226350
|
-
const workspacePathsMatchingGlob = subproject.workspacePaths.filter((wsPath) => minimatch(
|
|
226356
|
+
const workspacePathsMatchingGlob = subproject.workspacePaths.filter((wsPath) => minimatch(join26(subproject.subprojectPath, wsPath), options.globPattern ?? "**"));
|
|
226351
226357
|
if (workspacePathsMatchingGlob.length === 0)
|
|
226352
226358
|
return;
|
|
226353
226359
|
logger.info(`Found workspaces for subproject ${subproject.subprojectPath}${options.globPattern ? `matching glob ${options.globPattern}` : ""}:
|
|
@@ -226376,7 +226382,7 @@ ${workspacePathsMatchingGlob.map((wsPath) => ` ${wsPath}`).join("\n")}`);
|
|
|
226376
226382
|
});
|
|
226377
226383
|
if (vulnerabilityFixes.length === 0)
|
|
226378
226384
|
return;
|
|
226379
|
-
logger.info(`Found ${vulnerabilityFixes.length} ${vulnerabilityFixes.length === 1 ? "dependency" : "dependencies"} matching upgrade specs for ${
|
|
226385
|
+
logger.info(`Found ${vulnerabilityFixes.length} ${vulnerabilityFixes.length === 1 ? "dependency" : "dependencies"} matching upgrade specs for ${join26(subproject.subprojectPath, wsPath)}`);
|
|
226380
226386
|
workspaceToFixes[wsPath] = [
|
|
226381
226387
|
{
|
|
226382
226388
|
fixId: "dummy",
|
|
@@ -226397,7 +226403,7 @@ ${workspacePathsMatchingGlob.map((wsPath) => ` ${wsPath}`).join("\n")}`);
|
|
|
226397
226403
|
}
|
|
226398
226404
|
}
|
|
226399
226405
|
var signalFixApplied = (_fixId, subprojectPath, workspacePath, vulnerabilityFixes) => {
|
|
226400
|
-
logger.info(`Successfully upgraded purls for: ${
|
|
226406
|
+
logger.info(`Successfully upgraded purls for: ${join26(subprojectPath, workspacePath)}`);
|
|
226401
226407
|
logger.info(`Upgraded:
|
|
226402
226408
|
${vulnerabilityFixes.map((fix) => ` ${fix.dependencyName} from ${fix.currentVersion} to ${fix.fixedVersion}`).join("\n")}`);
|
|
226403
226409
|
};
|
|
@@ -226554,7 +226560,7 @@ function computeSBOMTaskArtifacts(dependencyTrees) {
|
|
|
226554
226560
|
}
|
|
226555
226561
|
|
|
226556
226562
|
// dist/index.js
|
|
226557
|
-
import { join as
|
|
226563
|
+
import { join as join27 } from "path";
|
|
226558
226564
|
var program2 = new Command();
|
|
226559
226565
|
var run2 = new Command();
|
|
226560
226566
|
run2.name("run").argument("<path>", "File system path to folder containing the project").option("-o, --output-dir <path>", "Write json report to <path>/coana-report.json").option("-d, --debug", "Enable debug logging", false).option("-s, --silent", "Silence all debug/warning output", false).option("-p, --print-report", "Print the report to the console", false).option("--offline-database <path>", "Path to a coana-offline-db.json file for running the CLI without internet connectivity", void 0).option("-t, --timeout <timeout>", "Set API <timeout> in milliseconds to Coana backend.", "300000").option("-a, --analysis-timeout <timeout>", "Set <timeout> in seconds for each reachability analysis run").option("--memory-limit <memoryInMB>", "Set memory limit for analysis to <memoryInMB> megabytes of memory.", "8192").option("-c, --concurrency <concurrency>", "Set the maximum number of concurrent reachability analysis runs. It's recommended to choose a concurrency level that ensures that each analysis run has at least the --memory-limit amount of memory available.", "1").option("--api-key <key>", "Set the Coana dashboard API key. By setting you also enable the dashboard integration.").addOption(new Option("--write-report-to-file", "Write the report dashboard-compatible report to dashboard-report.json. This report may help the Coana team debug issues with the report insertion mechanism.").default(false).hideHelp()).option("--project-name <repoName>", "Set the name of the repository. Used for dashboard integration.").option("--repo-url <repoUrl>", "Set the URL of the repository. Used for dashboard integration.").option("--include-dirs <relativeDirs...>", "globs for directories to include from the detection of subprojects (space-separated)(use relative paths from the project root). Notice, projects that are not included may still be scanned if they are referenced from included projects.").option("--exclude-dirs <relativeDirs...>", "globs for directories to exclude from the detection of subprojects (space-separated)(use relative paths from the project root). Notice, excluded projects may still be scanned if they are referenced from non-excluded projects.").option("--disable-analysis-splitting", "Limits Coana to at most 1 reachability analysis run per workspace").option("--print-analysis-log-file", "Store log output from the JavaScript/TypeScript reachability analysis in the file js-analysis.log file in the root of each workspace", false).option("--entry-points <entryPoints...>", "List of files to analyze for root workspace. The reachability analysis automatically analyzes all files used by the entry points. If not provided, all JavaScript and TypeScript files are considered entry points. For non-root workspaces, all JavaScript and TypeScript files are analyzed as well.").option("--include-projects-with-no-reachability-support", "Also runs Coana on projects where we support traditional SCA, but does not yet support reachability analysis.", false).option("--ecosystems <ecosystems...>", "List of ecosystems to analyze (space-separated). Currently NPM, PIP, MAVEN, NUGET and GO are supported. Default is all supported ecosystems.").addOption(new Option("--purl-types <purlTypes...>", "List of PURL types to analyze (space-separated). Currently npm, pypi, maven, nuget, golang and cargo are supported. Default is all supported purl types.").hideHelp()).option("--changed-files <files...>", "List of files that have changed. If provided, Coana only analyzes workspaces and modules that contain changed files.").option("--disable-report-submission", "Disable the submission of the report to the Coana dashboard. Used by the pipeline blocking feature.", false).option("--disable-analytics-sharing", "Disable analytics sharing.", false).option("--provider-project <path>", "File system path to folder containing the provider project (Only supported for Maven, Gradle, and SBT)").option("--provider-workspaces <dirs...>", "List of workspaces that build the provided runtime environment (Only supported for Maven, Gradle, and SBT)", (paths) => paths.split(" ")).option("--lightweight-reachability", "Runs Coana in lightweight mode. This increases analysis speed but also raises the risk of Coana misclassifying the reachability of certain complex vulnerabilities. Recommended only for use with Coana Guardrail mode.", false).addOption(new Option("--run-without-docker", "Run package managers and reachability analyzers without using docker").default(process.env.RUN_WITHOUT_DOCKER === "true").hideHelp()).addOption(new Option("--run-env <env>", "Specifies the environment in which the CLI is run. So far only MANAGED_SCAN and UNKNOWN are supported.").default("UNKNOWN").choices(["UNKNOWN", "MANAGED_SCAN"]).hideHelp()).addOption(new Option("--guardrail-mode", "Run Coana in guardrail mode. This mode is used to prevent new reachable vulnerabilities from being introduced into the codebase. Usually run as a CI check when pushing new commits to a pull request.")).option("--ignore-failing-workspaces", "Continue processing when a workspace fails instead of exiting. Failed workspaces will be logged at termination.", false).addOption(new Option("--socket-mode <output-file>", "Run Coana in socket mode and write report to <output-file>").hideHelp()).addOption(new Option("--manifests-tar-hash <hash>", "Hash of the tarball containing all manifest files already uploaded to Socket. If provided, Socket will be used for computing dependency trees.").hideHelp()).version(version2).configureHelp({ sortOptions: true }).action(async (path2, options) => {
|
|
@@ -226572,7 +226578,7 @@ var upgradePurls = new Command();
|
|
|
226572
226578
|
upgradePurls.name("upgrade-purls").argument("<path>", "File system path to the folder containing the project").argument("<specs...>", "Package upgrade specifications in the format 'purl -> newVersion' (e.g., 'pkg:maven/io.micrometer/micrometer-core@1.10.9 -> 1.15.0')").option("-g, --glob <pattern>", "Glob pattern to filter workspaces by absolute file path").option("-c, --concurrency <concurrency>", "Set the maximum number of concurrent reachability analysis runs. It's recommended to choose a concurrency level that ensures that each analysis run has at least the --memory-limit amount of memory available.", "1").option("-d, --debug", "Enable debug logging", false).option("-s, --silent", "Silence all debug/warning output", false).addOption(new Option("--run-without-docker", "Run package managers without using docker").default(process.env.RUN_WITHOUT_DOCKER === "true").hideHelp()).addOption(new Option("--socket-mode", "Use Socket for computing dependency trees").default(process.env.SOCKET_MODE === "true").hideHelp()).version(version2).action(async (path2, specs2, options) => {
|
|
226573
226579
|
process.env.DOCKER_IMAGE_TAG ??= version2;
|
|
226574
226580
|
await withTmpDirectory("upgrade-purls", async (tmpDir) => {
|
|
226575
|
-
const logFile =
|
|
226581
|
+
const logFile = join27(tmpDir, "upgrade-purls.log");
|
|
226576
226582
|
logger.initWinstonLogger(options.debug, logFile);
|
|
226577
226583
|
const upgradeSpecs = specs2.map((spec) => {
|
|
226578
226584
|
const [purl, upgradeVersion] = spec.split("->").map((s4) => s4.trim());
|
|
@@ -226590,7 +226596,7 @@ var computeFixesAndUpgradePurlsCmd = new Command();
|
|
|
226590
226596
|
computeFixesAndUpgradePurlsCmd.name("compute-fixes-and-upgrade-purls").argument("<path>", "File system path to the folder containing the project").option("-a, --apply-fixes-to <ghsas...>", 'GHSA IDs to compute fixes for. Use "all" to compute fixes for all vulnerabilities.', []).option("--dry-run", "Show what changes would be made without actually making them", false).option("-g, --glob <pattern>", "Glob pattern to filter workspaces by absolute file path").option("-d, --debug", "Enable debug logging", false).option("-s, --silent", "Silence all debug/warning output", false).addOption(new Option("--run-without-docker", "Run package managers without using docker").default(process.env.RUN_WITHOUT_DOCKER === "true").hideHelp()).addOption(new Option("--manifests-tar-hash <hash>", "Hash of the tarball containing all manifest files already uploaded to Socket. If provided, Socket will be used for computing dependency trees.").hideHelp()).version(version2).action(async (path2, options) => {
|
|
226591
226597
|
process.env.DOCKER_IMAGE_TAG ??= version2;
|
|
226592
226598
|
await withTmpDirectory("compute-fixes-and-upgrade-purls", async (tmpDir) => {
|
|
226593
|
-
const logFile =
|
|
226599
|
+
const logFile = join27(tmpDir, "compute-fixes-and-upgrade-purls.log");
|
|
226594
226600
|
logger.initWinstonLogger(options.debug, logFile);
|
|
226595
226601
|
await computeFixesAndUpgradePurls(path2, options, logFile);
|
|
226596
226602
|
});
|
package/package.json
CHANGED
|
@@ -73587,22 +73587,22 @@ import { join as join3 } from "path";
|
|
|
73587
73587
|
// ../utils/src/command-utils.ts
|
|
73588
73588
|
import assert from "assert";
|
|
73589
73589
|
import { execFile } from "child_process";
|
|
73590
|
-
async function execAndLogOnFailure(cmd, dir, options) {
|
|
73590
|
+
async function execAndLogOnFailure(cmd, dir, options, logLevel = "info") {
|
|
73591
73591
|
const result = await execNeverFail(cmd, dir, options);
|
|
73592
|
-
if (result.error) logCommandOutput(result, cmd, dir);
|
|
73592
|
+
if (result.error) logCommandOutput(result, cmd, dir, logLevel);
|
|
73593
73593
|
return !result.error;
|
|
73594
73594
|
}
|
|
73595
|
-
function logCommandOutput(cmdResult, cmd, dir) {
|
|
73595
|
+
function logCommandOutput(cmdResult, cmd, dir, logLevel = "info") {
|
|
73596
73596
|
const { error, stdout, stderr } = cmdResult;
|
|
73597
|
-
logger
|
|
73598
|
-
logger
|
|
73597
|
+
logger[logLevel](error ? `Error running command: ${cmd}` : `Result of running command: ${cmd}`);
|
|
73598
|
+
logger[logLevel](`Directory: ${dir}`);
|
|
73599
73599
|
if (error) {
|
|
73600
73600
|
const em = error.message;
|
|
73601
|
-
logger
|
|
73601
|
+
logger[logLevel](`Error: ${em?.endsWith?.(`
|
|
73602
73602
|
${stderr}`) ? em.slice(0, -stderr.length - 1) : em}`);
|
|
73603
73603
|
}
|
|
73604
|
-
logger
|
|
73605
|
-
logger
|
|
73604
|
+
logger[logLevel](`stdout: ${stdout}`);
|
|
73605
|
+
logger[logLevel](`stderr: ${stderr}`);
|
|
73606
73606
|
}
|
|
73607
73607
|
async function execNeverFail(cmd, dir, options) {
|
|
73608
73608
|
return new Promise((resolve16) => {
|
|
@@ -73747,17 +73747,18 @@ function excludeFiles(excludedDirsRoot, filesRoot, files, excludeDirs) {
|
|
|
73747
73747
|
)
|
|
73748
73748
|
).map((f2) => relative(filesRoot, f2));
|
|
73749
73749
|
}
|
|
73750
|
-
function
|
|
73751
|
-
let curr = dir;
|
|
73752
|
-
let last2 = dir;
|
|
73750
|
+
function* parents(dir) {
|
|
73751
|
+
let [curr, last2] = [dir, dir];
|
|
73753
73752
|
do {
|
|
73754
|
-
|
|
73755
|
-
|
|
73756
|
-
last2 = curr;
|
|
73757
|
-
curr = resolve(curr, "..");
|
|
73753
|
+
yield curr;
|
|
73754
|
+
[last2, curr] = [curr, resolve(curr, "..")];
|
|
73758
73755
|
} while (curr !== last2);
|
|
73759
73756
|
return void 0;
|
|
73760
73757
|
}
|
|
73758
|
+
function findParent(dir, predicate, wholePath) {
|
|
73759
|
+
for (const parent2 of parents(dir))
|
|
73760
|
+
if (predicate(wholePath ? parent2 : basename(parent2))) return parent2;
|
|
73761
|
+
}
|
|
73761
73762
|
async function getFiles(dir, excludeDirs) {
|
|
73762
73763
|
async function helper(currDir, arrayOfFiles) {
|
|
73763
73764
|
for (const item of await readdir(currDir, { withFileTypes: true })) {
|
|
@@ -74201,6 +74202,7 @@ import { join as join4, resolve as resolve2 } from "path";
|
|
|
74201
74202
|
import util3 from "util";
|
|
74202
74203
|
var { once } = import_lodash4.default;
|
|
74203
74204
|
var systemPython = once(() => execFileSync("which", ["python"], { encoding: "utf8" }).trim());
|
|
74205
|
+
var hasPyenv = once(async () => !(await execNeverFail("which pyenv")).error);
|
|
74204
74206
|
async function getPythonVersion(executable) {
|
|
74205
74207
|
return runCommandResolveStdOut([executable, "-SIc", `import sys; print(*sys.version_info[:3], sep='.')`]);
|
|
74206
74208
|
}
|
|
@@ -74231,11 +74233,9 @@ var PythonVersionsManager = class _PythonVersionsManager {
|
|
|
74231
74233
|
// Extracts the python version specifier from the workspace and returns it as an array of semver parts.
|
|
74232
74234
|
async getPythonSpecifier(workspacePath, checkPyProject = true) {
|
|
74233
74235
|
const absPath = resolve2(this.projectDir, workspacePath);
|
|
74234
|
-
const
|
|
74235
|
-
const pyenvRoot = process.env.PYENV_ROOT ?? await runCommandResolveStdOut("pyenv root");
|
|
74236
|
-
if (pyenvOrigin !== join4(pyenvRoot, "version"))
|
|
74236
|
+
for (const parent2 of parents(absPath))
|
|
74237
74237
|
try {
|
|
74238
|
-
return [(await readFile3(
|
|
74238
|
+
return [(await readFile3(join4(parent2, ".python-version"), "utf-8")).split("\n")[0].trim()];
|
|
74239
74239
|
} catch (e) {
|
|
74240
74240
|
if (e.code !== "ENOENT") logger.warn("Failed to read python version file with error", e);
|
|
74241
74241
|
}
|
|
@@ -74283,7 +74283,12 @@ var PythonVersionsManager = class _PythonVersionsManager {
|
|
|
74283
74283
|
if (semVerSpec) {
|
|
74284
74284
|
const systemVer = await getPythonVersion(systemPython());
|
|
74285
74285
|
if (versionMatchesSemverParts(systemVer, semVerSpec)) return systemPython();
|
|
74286
|
-
|
|
74286
|
+
if (!await hasPyenv())
|
|
74287
|
+
throw Error(
|
|
74288
|
+
`System Python (${systemVer}) does not satisfy the specifier '${semVerSpec.join(", ")}'. A matching interpreter can automatically be installed if 'pyenv' is available.`
|
|
74289
|
+
);
|
|
74290
|
+
} else if (!await hasPyenv() || _PythonVersionsManager.getGlobalPythonVersion() === "system")
|
|
74291
|
+
return systemPython();
|
|
74287
74292
|
return resolve2(await _PythonVersionsManager.getPythonPrefixMatchingSpecifier(semVerSpec), "bin", "python");
|
|
74288
74293
|
}
|
|
74289
74294
|
// Throws an error if the python version is not installed.
|
|
@@ -77166,7 +77171,7 @@ __export(traversing_exports, {
|
|
|
77166
77171
|
nextUntil: () => nextUntil,
|
|
77167
77172
|
not: () => not,
|
|
77168
77173
|
parent: () => parent,
|
|
77169
|
-
parents: () =>
|
|
77174
|
+
parents: () => parents2,
|
|
77170
77175
|
parentsUntil: () => parentsUntil,
|
|
77171
77176
|
prev: () => prev,
|
|
77172
77177
|
prevAll: () => prevAll,
|
|
@@ -78428,7 +78433,7 @@ function _removeDuplicates(elems) {
|
|
|
78428
78433
|
return Array.from(new Set(elems));
|
|
78429
78434
|
}
|
|
78430
78435
|
var parent = _singleMatcher(({ parent: parent2 }) => parent2 && !isDocument(parent2) ? parent2 : null, _removeDuplicates);
|
|
78431
|
-
var
|
|
78436
|
+
var parents2 = _matcher((elem) => {
|
|
78432
78437
|
const matched = [];
|
|
78433
78438
|
while (elem.parent && !isDocument(elem.parent)) {
|
|
78434
78439
|
matched.push(elem.parent);
|
|
@@ -96448,9 +96453,9 @@ var PythonCodeAwareVulnerabilityScanner = class {
|
|
|
96448
96453
|
const packagesToExclude = heuristic.getPackagesToExcludeFromAnalysis?.(vulns);
|
|
96449
96454
|
const packagesToInstall = uniqBy(preInstalledDepInfos.filter((n) => !packagesToExclude?.has(n.packageName)), "packageName");
|
|
96450
96455
|
if (!await this.tryUsingPreinstalledVirtualEnv(packagesToInstall)) {
|
|
96451
|
-
logger.info(
|
|
96456
|
+
logger.info(`Setting up virtual environment`);
|
|
96452
96457
|
await this.prepareVirtualEnv(packagesToInstall);
|
|
96453
|
-
logger.
|
|
96458
|
+
logger.info("Done setting up virtual environment");
|
|
96454
96459
|
}
|
|
96455
96460
|
}
|
|
96456
96461
|
async runAnalysis(vulns, heuristic, analyzesAllVulns) {
|
|
@@ -96512,7 +96517,7 @@ runpy.run_module("mambalade", alter_sys=True)
|
|
|
96512
96517
|
"--",
|
|
96513
96518
|
...filesToAnalyze
|
|
96514
96519
|
];
|
|
96515
|
-
logger.
|
|
96520
|
+
logger.debug(`Running mambalade on ${filesToAnalyze.length} files for vulnerabilities:
|
|
96516
96521
|
${vulnAccPaths.join("\n")}`);
|
|
96517
96522
|
logger.debug(`Running python executable: ${pythonExecutable}`);
|
|
96518
96523
|
logger.debug(`With args: ${mambaladeArgs.slice(1).join(" ")}`);
|
|
@@ -96521,7 +96526,7 @@ ${vulnAccPaths.join("\n")}`);
|
|
|
96521
96526
|
logger.debug("Done running mambalade");
|
|
96522
96527
|
const errors = stderr.split("\n").filter((line) => line.startsWith("ERROR:") && !/^ERROR: Excluded distribution/.test(line));
|
|
96523
96528
|
if (errors.length > 0)
|
|
96524
|
-
logger.
|
|
96529
|
+
logger.debug(`Error messages from mambalade:
|
|
96525
96530
|
${errors.join("\n")}`);
|
|
96526
96531
|
const result = JSON.parse(await readFile10(vulnsOutputFile, "utf-8"));
|
|
96527
96532
|
logger.debug("Analysis result:", JSON.stringify(result, null, 2));
|
|
@@ -96546,8 +96551,8 @@ ${errors.join("\n")}`);
|
|
|
96546
96551
|
packageInstallationStats: this.virtualEnvInfo.packageInstallationStats
|
|
96547
96552
|
// Including stats in all analysis diagnostics since we might discard the first one that actually installs it due to analysis timeout.
|
|
96548
96553
|
};
|
|
96549
|
-
logger.
|
|
96550
|
-
logger.
|
|
96554
|
+
logger.debug("Analysis diagnostics:");
|
|
96555
|
+
logger.debug(JSON.stringify(omit(diagnostics, this.numberAnalysesRun === 0 ? [] : ["packageInstallationStats"]), null, 2));
|
|
96551
96556
|
return {
|
|
96552
96557
|
type: "success",
|
|
96553
96558
|
diagnostics,
|
|
@@ -96592,21 +96597,25 @@ ${msg}`;
|
|
|
96592
96597
|
rootWorkingDir: projectTmpDir,
|
|
96593
96598
|
reachabilityAnalysisOptions: options
|
|
96594
96599
|
}, projectTmpDir);
|
|
96595
|
-
|
|
96596
|
-
|
|
96597
|
-
|
|
96598
|
-
const
|
|
96599
|
-
|
|
96600
|
-
|
|
96601
|
-
|
|
96602
|
-
|
|
96603
|
-
|
|
96604
|
-
|
|
96605
|
-
|
|
96606
|
-
|
|
96607
|
-
|
|
96608
|
-
|
|
96609
|
-
|
|
96600
|
+
try {
|
|
96601
|
+
await scanner.prepareVirtualEnv([]);
|
|
96602
|
+
const sitePackagesDir = scanner.virtualEnvInfo.virtualEnvPathToSitePackages;
|
|
96603
|
+
for (const dep of dependencies) {
|
|
96604
|
+
const dependencyDir = join20(sitePackagesDir, basename9(dep));
|
|
96605
|
+
logger.info(`Copying ${dep} to ${dependencyDir}`);
|
|
96606
|
+
await cp5(dep, dependencyDir, { recursive: true });
|
|
96607
|
+
fileMappings.set(dependencyDir, dep);
|
|
96608
|
+
}
|
|
96609
|
+
const result = await scanner.runAnalysis([vuln], MambaladeHeuristics.ALL_PACKAGES, false);
|
|
96610
|
+
if (result.type === "error")
|
|
96611
|
+
return { error: result.message, terminatedEarly: true };
|
|
96612
|
+
return {
|
|
96613
|
+
detectedOccurrences: transformSourceLocations2(app, fileMappings, result.computeDetectedOccurrences({ ...vuln, url: "" })),
|
|
96614
|
+
terminatedEarly: result.terminatedEarly
|
|
96615
|
+
};
|
|
96616
|
+
} finally {
|
|
96617
|
+
await scanner.cleanup();
|
|
96618
|
+
}
|
|
96610
96619
|
});
|
|
96611
96620
|
}
|
|
96612
96621
|
static async runOnDependencyChain(chain, vuln, options) {
|
|
@@ -96628,7 +96637,7 @@ ${msg}`;
|
|
|
96628
96637
|
const candidate = findBestWheel(packageName, version3, meta);
|
|
96629
96638
|
if (candidate) {
|
|
96630
96639
|
const filename = candidate.url.split("/").at(-1);
|
|
96631
|
-
if (await downloadFile(candidate.url, join20(tmpDir, filename)) && await execAndLogOnFailure(["unzip", filename], tmpDir))
|
|
96640
|
+
if (await downloadFile(candidate.url, join20(tmpDir, filename)) && await execAndLogOnFailure(["unzip", filename], tmpDir, void 0, "debug"))
|
|
96632
96641
|
return;
|
|
96633
96642
|
}
|
|
96634
96643
|
await exec(cmdt`uv pip install --python-platform ${uvPythonPlatform} --target ${tmpDir} --no-deps ${packageName}==${version3}`);
|
|
@@ -96677,6 +96686,8 @@ ${msg}`;
|
|
|
96677
96686
|
}
|
|
96678
96687
|
// public for testing only
|
|
96679
96688
|
async prepareVirtualEnv(packages) {
|
|
96689
|
+
if (!await hasUv())
|
|
96690
|
+
throw new Error("uv (https://docs.astral.sh/uv/) is missing, but is required for Python analysis");
|
|
96680
96691
|
const tmpDir = await createTmpDirectory("coana-python-analysis-venv");
|
|
96681
96692
|
const virtualEnvFolder = join20(tmpDir, ".venv");
|
|
96682
96693
|
const pythonExecutable = await this.vm.getPythonExecutableForWorkspace(this.projectDir, false);
|
|
@@ -96709,12 +96720,12 @@ ${msg}`;
|
|
|
96709
96720
|
return true;
|
|
96710
96721
|
const filename = candidate.url.split("/").at(-1);
|
|
96711
96722
|
if (await downloadFile(candidate.url, join20(tmpDir, filename)) && await execAndLogOnFailure(cmdt`${uvTool(pythonExecutable)} --from installer==0.7.0 python -m installer
|
|
96712
|
-
--no-compile-bytecode --prefix .venv ${filename}`, tmpDir)) {
|
|
96723
|
+
--no-compile-bytecode --prefix .venv ${filename}`, tmpDir, void 0, "debug")) {
|
|
96713
96724
|
installStats.installedUsingSpecializedInstallCommand.push(packageName);
|
|
96714
96725
|
return false;
|
|
96715
96726
|
}
|
|
96716
96727
|
} catch (e) {
|
|
96717
|
-
logger.
|
|
96728
|
+
logger.debug(`Failed to construct specialized install command for ${packageName}==${version3}`, e);
|
|
96718
96729
|
}
|
|
96719
96730
|
return true;
|
|
96720
96731
|
}, 4);
|
|
@@ -96723,13 +96734,7 @@ ${msg}`;
|
|
|
96723
96734
|
const installPipDeps = once3(async () => exec([...uvInstallBase, "pip", "wheel"]));
|
|
96724
96735
|
for (const { packageName, version: version3, requirement } of failingPackages) {
|
|
96725
96736
|
const requirementToInstall = requirement ?? `${packageName}==${version3}`;
|
|
96726
|
-
let success = await execAndLogOnFailure([
|
|
96727
|
-
...uvInstallBase,
|
|
96728
|
-
"--no-deps",
|
|
96729
|
-
"--no-binary",
|
|
96730
|
-
packageName,
|
|
96731
|
-
requirementToInstall
|
|
96732
|
-
]);
|
|
96737
|
+
let success = await execAndLogOnFailure([...uvInstallBase, "--no-deps", "--no-binary", packageName, requirementToInstall], void 0, void 0, "debug");
|
|
96733
96738
|
if (!success) {
|
|
96734
96739
|
await installPipDeps();
|
|
96735
96740
|
success = await execAndLogOnFailure(
|
|
@@ -96738,7 +96743,9 @@ ${msg}`;
|
|
|
96738
96743
|
cmdt`.venv/bin/python -m pip
|
|
96739
96744
|
--no-input --require-virtualenv --disable-pip-version-check --no-cache-dir --isolated install
|
|
96740
96745
|
--no-deps --ignore-requires-python --no-compile --no-binary ${packageName} ${requirementToInstall}`,
|
|
96741
|
-
tmpDir
|
|
96746
|
+
tmpDir,
|
|
96747
|
+
void 0,
|
|
96748
|
+
"debug"
|
|
96742
96749
|
);
|
|
96743
96750
|
}
|
|
96744
96751
|
(success ? installStats.installedWithoutOnlyBinary : installStats.failedToInstall).push(packageName);
|
|
@@ -96829,7 +96836,7 @@ async function getPythonInterpreter() {
|
|
|
96829
96836
|
}
|
|
96830
96837
|
async function setupMambalade() {
|
|
96831
96838
|
const venvDir = await createTmpDirectory("mambalade-venv");
|
|
96832
|
-
logger.
|
|
96839
|
+
logger.debug("Creating Mambalade virtual environment");
|
|
96833
96840
|
const pythonInterpreter = await getPythonInterpreter();
|
|
96834
96841
|
await exec(cmdt`${pythonInterpreter} -SIm venv ${venvDir}`);
|
|
96835
96842
|
const mambaladeWheelsPath = join20(COANA_REPOS_PATH(), "mambalade", "dist");
|
|
@@ -96837,11 +96844,12 @@ async function setupMambalade() {
|
|
|
96837
96844
|
const mambaladeWheels = wheelFiles.filter((f2) => f2.endsWith(".whl")).map((f2) => join20(mambaladeWheelsPath, f2));
|
|
96838
96845
|
if (!mambaladeWheels.length)
|
|
96839
96846
|
throw new Error(`No mambalade wheel files found in ${mambaladeWheelsPath}`);
|
|
96840
|
-
logger.
|
|
96847
|
+
logger.debug(`Installing mambalade wheels: ${mambaladeWheels.join(", ")}`);
|
|
96841
96848
|
await exec(cmdt`${venvDir}/bin/pip install --no-deps ${mambaladeWheels}`);
|
|
96842
|
-
logger.
|
|
96849
|
+
logger.debug("Mambalade virtual environment setup complete");
|
|
96843
96850
|
return venvDir;
|
|
96844
96851
|
}
|
|
96852
|
+
var hasUv = once3(async () => !(await execNeverFail("which uv")).error);
|
|
96845
96853
|
|
|
96846
96854
|
// dist/whole-program-code-aware-vulnerability-scanner/python/phantom-deps.js
|
|
96847
96855
|
var { uniq: uniq8 } = import_lodash15.default;
|
|
@@ -96937,8 +96945,7 @@ var PipAnalyzer = class {
|
|
|
96937
96945
|
this.heuristic = MambaladeHeuristics.createOnlyVulnPathPackagesHeuristic(this.preInstalledDepInfos);
|
|
96938
96946
|
}
|
|
96939
96947
|
prepareScanner = once4(async () => {
|
|
96940
|
-
|
|
96941
|
-
await this.scanner.prepareDependencies(this.preInstalledDepInfos, vulnerabilities.filter((v) => Array.isArray(v.vulnerabilityAccessPaths)), this.heuristic);
|
|
96948
|
+
await this.scanner.prepareDependencies(this.preInstalledDepInfos, this.state.vulnerabilities.filter((v) => Array.isArray(v.vulnerabilityAccessPaths)), this.heuristic);
|
|
96942
96949
|
return this.scanner;
|
|
96943
96950
|
});
|
|
96944
96951
|
async runPhantomDependencyAnalysis() {
|
|
@@ -96970,14 +96977,13 @@ function getPreInstalledDepInfos(workspaceData) {
|
|
|
96970
96977
|
}));
|
|
96971
96978
|
} else {
|
|
96972
96979
|
workspaceData.type;
|
|
96973
|
-
|
|
96980
|
+
return workspaceData.data.artifacts.filter((a2) => {
|
|
96974
96981
|
if (!a2.version) {
|
|
96975
96982
|
logger.warn(`Artifact ${a2.name} has no version information`);
|
|
96976
96983
|
return false;
|
|
96977
96984
|
}
|
|
96978
96985
|
return true;
|
|
96979
|
-
});
|
|
96980
|
-
return artifactsWithVersion.map((a2) => ({ packageName: a2.name, version: a2.version }));
|
|
96986
|
+
}).map(({ name: name2, version: version3 }) => ({ packageName: name2, version: version3 }));
|
|
96981
96987
|
}
|
|
96982
96988
|
}
|
|
96983
96989
|
|
|
@@ -97168,6 +97174,7 @@ async function analyzeWithHeuristics(state, vulns, heuristicsInOrder, doNotRecom
|
|
|
97168
97174
|
const enqueueWithoutSplitting = !allowSplitInBuckets && initialBucketContainingAllVulns && !state.reachabilityAnalysisOptions.timeoutInSeconds;
|
|
97169
97175
|
await sendErrorAnalysisMetadata(result.message, !allowSplitInBuckets && isLastHeuristic(bucket.heuristic.name) && !enqueueWithoutSplitting, !allowSplitInBuckets);
|
|
97170
97176
|
if (enqueueWithoutSplitting) {
|
|
97177
|
+
logger.info("Analysis failed, retrying different configuration.");
|
|
97171
97178
|
enqueueBucket(vulnDepIdentifiers);
|
|
97172
97179
|
return;
|
|
97173
97180
|
}
|
|
@@ -97177,6 +97184,7 @@ async function analyzeWithHeuristics(state, vulns, heuristicsInOrder, doNotRecom
|
|
|
97177
97184
|
}
|
|
97178
97185
|
}
|
|
97179
97186
|
if (allowSplitInBuckets) {
|
|
97187
|
+
logger.info("Analysis failed, rerunning analysis multiple times with fewer vulnerabilities per run.");
|
|
97180
97188
|
const middle = Math.floor(vulnDepIdentifiers.length / 2);
|
|
97181
97189
|
enqueueBucket(vulnDepIdentifiers.slice(0, middle));
|
|
97182
97190
|
enqueueBucket(vulnDepIdentifiers.slice(middle));
|
|
@@ -97279,9 +97287,6 @@ function getHeuristicFromName(state, heuristicName, ecosystem) {
|
|
|
97279
97287
|
if (ecosystem === "NPM") {
|
|
97280
97288
|
return heuristics[heuristicName];
|
|
97281
97289
|
} else if (ecosystem === "PIP") {
|
|
97282
|
-
if (state.workspaceData.type !== "coana") {
|
|
97283
|
-
throw new Error("MambaladeHeuristics only supports Coana data for analysis");
|
|
97284
|
-
}
|
|
97285
97290
|
if (heuristicName in MambaladeHeuristics)
|
|
97286
97291
|
return MambaladeHeuristics[heuristicName];
|
|
97287
97292
|
else if (heuristicName === "ONLY_VULN_PATH_PACKAGES") {
|
|
@@ -97517,16 +97522,16 @@ function canDismissVulnerability(phantomDependencies, vulnChainDetails) {
|
|
|
97517
97522
|
const recHelper = (nodeIdentifier, depth) => {
|
|
97518
97523
|
if (depth === 0)
|
|
97519
97524
|
return void 0;
|
|
97520
|
-
const
|
|
97525
|
+
const parents3 = parentsMap.get(nodeIdentifier).filter((parent2) => parent2 !== ROOT_NODE_STR);
|
|
97521
97526
|
const thisReachabilityPrecomp = nodeIdentifier === vulnNodeIdentifier ? "Reachable" : vulnChainDetails.transitiveDependencies[nodeIdentifier].reachabilityPrecomp;
|
|
97522
97527
|
if (!thisReachabilityPrecomp)
|
|
97523
97528
|
return void 0;
|
|
97524
97529
|
const thisMayReachVulnerableNode = ["Reachable", "Unknown"].includes(thisReachabilityPrecomp);
|
|
97525
|
-
if (
|
|
97530
|
+
if (parents3.length === 0 && thisMayReachVulnerableNode) {
|
|
97526
97531
|
canDismiss = false;
|
|
97527
97532
|
}
|
|
97528
|
-
if (
|
|
97529
|
-
const parentsReachabilityPrecomp =
|
|
97533
|
+
if (parents3) {
|
|
97534
|
+
const parentsReachabilityPrecomp = parents3.map((p) => recHelper(p, depth - 1));
|
|
97530
97535
|
if (parentsReachabilityPrecomp.some((reachabilityPrecomp) => !reachabilityPrecomp) && thisMayReachVulnerableNode) {
|
|
97531
97536
|
canDismiss = false;
|
|
97532
97537
|
}
|
|
@@ -97555,6 +97560,7 @@ var dashboardAPI2 = new DashboardAPI(process.env.SOCKET_MODE === "true", process
|
|
|
97555
97560
|
async function runReachabilityAnalysis(state) {
|
|
97556
97561
|
const projectDir = resolve15(state.subprojectDir, state.workspacePath);
|
|
97557
97562
|
const ecosystem = state.workspaceData.data.type;
|
|
97563
|
+
logger.info(`Preparing for running reachability analysis for project at "${relative6(state.rootWorkingDir, projectDir) || "."}" (${ecosystem})`);
|
|
97558
97564
|
const constructor = ecosystemAnalyzer[ecosystem];
|
|
97559
97565
|
if (!constructor)
|
|
97560
97566
|
throw Error(`No analyzer associated with ecosystem ${ecosystem}`);
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|