owasp-depscan 5.4.8__py3-none-any.whl → 6.0.0a2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of owasp-depscan might be problematic. Click here for more details.
- depscan/__init__.py +8 -0
- depscan/cli.py +719 -827
- depscan/cli_options.py +302 -0
- depscan/lib/audit.py +3 -1
- depscan/lib/bom.py +390 -288
- depscan/lib/config.py +86 -337
- depscan/lib/explainer.py +363 -98
- depscan/lib/license.py +11 -10
- depscan/lib/logger.py +65 -17
- depscan/lib/package_query/__init__.py +0 -0
- depscan/lib/package_query/cargo_pkg.py +124 -0
- depscan/lib/package_query/metadata.py +170 -0
- depscan/lib/package_query/npm_pkg.py +345 -0
- depscan/lib/package_query/pkg_query.py +195 -0
- depscan/lib/package_query/pypi_pkg.py +113 -0
- depscan/lib/tomlparse.py +116 -0
- depscan/lib/utils.py +34 -188
- owasp_depscan-6.0.0a2.dist-info/METADATA +390 -0
- {owasp_depscan-5.4.8.dist-info → owasp_depscan-6.0.0a2.dist-info}/RECORD +28 -25
- {owasp_depscan-5.4.8.dist-info → owasp_depscan-6.0.0a2.dist-info}/WHEEL +1 -1
- vendor/choosealicense.com/_licenses/cern-ohl-p-2.0.txt +1 -1
- vendor/choosealicense.com/_licenses/cern-ohl-s-2.0.txt +1 -1
- vendor/choosealicense.com/_licenses/cern-ohl-w-2.0.txt +2 -2
- vendor/choosealicense.com/_licenses/mit-0.txt +1 -1
- vendor/spdx/json/licenses.json +904 -677
- depscan/lib/analysis.py +0 -1550
- depscan/lib/csaf.py +0 -1860
- depscan/lib/normalize.py +0 -312
- depscan/lib/orasclient.py +0 -142
- depscan/lib/pkg_query.py +0 -532
- owasp_depscan-5.4.8.dist-info/METADATA +0 -580
- {owasp_depscan-5.4.8.dist-info → owasp_depscan-6.0.0a2.dist-info}/entry_points.txt +0 -0
- {owasp_depscan-5.4.8.dist-info → owasp_depscan-6.0.0a2.dist-info/licenses}/LICENSE +0 -0
- {owasp_depscan-5.4.8.dist-info → owasp_depscan-6.0.0a2.dist-info}/top_level.txt +0 -0
depscan/lib/analysis.py
DELETED
|
@@ -1,1550 +0,0 @@
|
|
|
1
|
-
# -*- coding: utf-8 -*-
|
|
2
|
-
|
|
3
|
-
import contextlib
|
|
4
|
-
import json
|
|
5
|
-
import os.path
|
|
6
|
-
import re
|
|
7
|
-
from collections import OrderedDict, defaultdict
|
|
8
|
-
from dataclasses import dataclass
|
|
9
|
-
from typing import Dict, List, Optional
|
|
10
|
-
|
|
11
|
-
import cvss
|
|
12
|
-
from cvss import CVSSError
|
|
13
|
-
from packageurl import PackageURL
|
|
14
|
-
from rich import box
|
|
15
|
-
from rich.markdown import Markdown
|
|
16
|
-
from rich.panel import Panel
|
|
17
|
-
from rich.style import Style
|
|
18
|
-
from rich.table import Table
|
|
19
|
-
from rich.tree import Tree
|
|
20
|
-
from vdb.lib import CPE_FULL_REGEX
|
|
21
|
-
from vdb.lib.config import placeholder_exclude_version, placeholder_fix_version
|
|
22
|
-
from vdb.lib.utils import parse_cpe, parse_purl
|
|
23
|
-
|
|
24
|
-
from depscan.lib import config
|
|
25
|
-
from depscan.lib.logger import LOG, console
|
|
26
|
-
from depscan.lib.utils import max_version
|
|
27
|
-
|
|
28
|
-
NEWLINE = "\\n"
|
|
29
|
-
|
|
30
|
-
CWE_SPLITTER = re.compile(r"(?<=CWE-)[0-9]\d{0,5}", re.IGNORECASE)
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
def best_fixed_location(sug_version, orig_fixed_location):
|
|
34
|
-
"""
|
|
35
|
-
Compares the suggested version with the version from the original fixed
|
|
36
|
-
location and returns the best version based on the major versions.
|
|
37
|
-
See: https://github.com/AppThreat/dep-scan/issues/72
|
|
38
|
-
|
|
39
|
-
:param sug_version: Suggested version
|
|
40
|
-
:param orig_fixed_location: Version from original fixed location
|
|
41
|
-
:return: Version
|
|
42
|
-
"""
|
|
43
|
-
if (
|
|
44
|
-
not orig_fixed_location
|
|
45
|
-
and sug_version
|
|
46
|
-
and sug_version != placeholder_fix_version
|
|
47
|
-
):
|
|
48
|
-
return sug_version
|
|
49
|
-
if sug_version and orig_fixed_location:
|
|
50
|
-
if sug_version == placeholder_fix_version:
|
|
51
|
-
return ""
|
|
52
|
-
tmp_a = sug_version.split(".")[0]
|
|
53
|
-
tmp_b = orig_fixed_location.split(".")[0]
|
|
54
|
-
if tmp_a == tmp_b:
|
|
55
|
-
return sug_version
|
|
56
|
-
# Handle the placeholder version used by OS distros
|
|
57
|
-
if orig_fixed_location == placeholder_fix_version:
|
|
58
|
-
return ""
|
|
59
|
-
return orig_fixed_location
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
def distro_package(package_issue):
|
|
63
|
-
"""
|
|
64
|
-
Determines if a given Common Platform Enumeration (CPE) belongs to an
|
|
65
|
-
operating system (OS) distribution.
|
|
66
|
-
TODO: Clarify parameter
|
|
67
|
-
:param package_issue: An object
|
|
68
|
-
:return: bool
|
|
69
|
-
"""
|
|
70
|
-
if package_issue:
|
|
71
|
-
all_parts = CPE_FULL_REGEX.match(
|
|
72
|
-
package_issue["affected_location"].get("cpe_uri")
|
|
73
|
-
)
|
|
74
|
-
if (
|
|
75
|
-
all_parts
|
|
76
|
-
and all_parts.group("vendor")
|
|
77
|
-
and all_parts.group("vendor") in config.LINUX_DISTRO_WITH_EDITIONS
|
|
78
|
-
and all_parts.group("edition")
|
|
79
|
-
and all_parts.group("edition") != "*"
|
|
80
|
-
):
|
|
81
|
-
return True
|
|
82
|
-
return False
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
def retrieve_bom_dependency_tree(bom_file):
|
|
86
|
-
"""
|
|
87
|
-
Method to retrieve the dependency tree from a CycloneDX SBOM
|
|
88
|
-
|
|
89
|
-
:param bom_file: Sbom to be loaded
|
|
90
|
-
:return: Dependency tree as a list
|
|
91
|
-
"""
|
|
92
|
-
if not bom_file:
|
|
93
|
-
return [], None
|
|
94
|
-
try:
|
|
95
|
-
with open(bom_file, encoding="utf-8") as bfp:
|
|
96
|
-
bom_data = json.load(bfp)
|
|
97
|
-
if bom_data:
|
|
98
|
-
return bom_data.get("dependencies", []), bom_data
|
|
99
|
-
except json.JSONDecodeError:
|
|
100
|
-
pass
|
|
101
|
-
return [], None
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
def retrieve_oci_properties(bom_data):
|
|
105
|
-
"""
|
|
106
|
-
Retrieves OCI properties from the given BOM data.
|
|
107
|
-
|
|
108
|
-
:param bom_data: The BOM data to retrieve OCI properties from.
|
|
109
|
-
:type bom_data: dict
|
|
110
|
-
|
|
111
|
-
:return: A dictionary containing the retrieved OCI properties.
|
|
112
|
-
:rtype: dict
|
|
113
|
-
"""
|
|
114
|
-
props = {}
|
|
115
|
-
if not bom_data:
|
|
116
|
-
return props
|
|
117
|
-
for p in bom_data.get("metadata", {}).get("properties", []):
|
|
118
|
-
if p.get("name", "").startswith("oci:image:"):
|
|
119
|
-
props[p.get("name")] = p.get("value")
|
|
120
|
-
return props
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
def get_pkg_display(tree_pkg, current_pkg, extra_text=None):
|
|
124
|
-
"""
|
|
125
|
-
Construct a string that can be used for display
|
|
126
|
-
|
|
127
|
-
:param tree_pkg: Package to display
|
|
128
|
-
:param current_pkg: The package currently being processed
|
|
129
|
-
:param extra_text: Additional text to append to the display string
|
|
130
|
-
:return: Constructed display string
|
|
131
|
-
"""
|
|
132
|
-
full_pkg_display = current_pkg
|
|
133
|
-
highlightable = tree_pkg and (
|
|
134
|
-
tree_pkg == current_pkg or tree_pkg in current_pkg
|
|
135
|
-
)
|
|
136
|
-
if tree_pkg:
|
|
137
|
-
if current_pkg.startswith("pkg:"):
|
|
138
|
-
purl_obj = parse_purl(current_pkg)
|
|
139
|
-
if purl_obj:
|
|
140
|
-
version_used = purl_obj.get("version")
|
|
141
|
-
if version_used:
|
|
142
|
-
full_pkg_display = (
|
|
143
|
-
f"""{purl_obj.get("name")}@{version_used}"""
|
|
144
|
-
)
|
|
145
|
-
if extra_text and highlightable:
|
|
146
|
-
full_pkg_display = f"{full_pkg_display} {extra_text}"
|
|
147
|
-
return full_pkg_display
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
def get_tree_style(purl, p):
|
|
151
|
-
"""
|
|
152
|
-
Return a rich style to be used in a tree
|
|
153
|
-
|
|
154
|
-
:param purl: Package purl to compare
|
|
155
|
-
:param p: Package reference to check against purl
|
|
156
|
-
:return: The rich style to be used in a tree visualization.
|
|
157
|
-
"""
|
|
158
|
-
if purl and (purl == p or purl in p):
|
|
159
|
-
return Style(color="#FF753D", bold=True, italic=False)
|
|
160
|
-
return Style(color="#7C8082", bold=False, italic=True)
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
def pkg_sub_tree(
|
|
164
|
-
purl,
|
|
165
|
-
full_pkg,
|
|
166
|
-
bom_dependency_tree,
|
|
167
|
-
pkg_severity=None,
|
|
168
|
-
as_tree=False,
|
|
169
|
-
extra_text=None,
|
|
170
|
-
):
|
|
171
|
-
"""
|
|
172
|
-
Method to locate and return a package tree from a dependency tree
|
|
173
|
-
|
|
174
|
-
:param purl: The package purl to compare.
|
|
175
|
-
:param full_pkg: The package reference to check against purl.
|
|
176
|
-
:param bom_dependency_tree: The dependency tree.
|
|
177
|
-
:param pkg_severity: The severity of the package vulnerability.
|
|
178
|
-
:param as_tree: Flag indicating whether to return as a rich tree object.
|
|
179
|
-
:param extra_text: Additional text to append to the display string.
|
|
180
|
-
"""
|
|
181
|
-
pkg_tree = []
|
|
182
|
-
if full_pkg and not purl:
|
|
183
|
-
purl = full_pkg
|
|
184
|
-
if not bom_dependency_tree:
|
|
185
|
-
return [purl], Tree(
|
|
186
|
-
get_pkg_display(purl, purl, extra_text=extra_text),
|
|
187
|
-
style=Style(
|
|
188
|
-
color="bright_red" if pkg_severity == "CRITICAL" else None
|
|
189
|
-
),
|
|
190
|
-
)
|
|
191
|
-
if len(bom_dependency_tree) > 1:
|
|
192
|
-
for dep in bom_dependency_tree[1:]:
|
|
193
|
-
ref = dep.get("ref")
|
|
194
|
-
depends_on = dep.get("dependsOn", [])
|
|
195
|
-
if purl in ref:
|
|
196
|
-
if not pkg_tree or (pkg_tree and ref != pkg_tree[-1]):
|
|
197
|
-
pkg_tree.append(ref)
|
|
198
|
-
elif purl in depends_on and purl not in pkg_tree:
|
|
199
|
-
pkg_tree.append(ref)
|
|
200
|
-
pkg_tree.append(purl)
|
|
201
|
-
break
|
|
202
|
-
# We need to iterate again to identify any parent for the parent
|
|
203
|
-
if pkg_tree and len(bom_dependency_tree) > 1:
|
|
204
|
-
for dep in bom_dependency_tree[1:]:
|
|
205
|
-
if pkg_tree[0] in dep.get("dependsOn", []):
|
|
206
|
-
if dep.get("ref") not in pkg_tree:
|
|
207
|
-
pkg_tree.insert(0, dep.get("ref"))
|
|
208
|
-
break
|
|
209
|
-
if as_tree and pkg_tree:
|
|
210
|
-
tree = Tree(
|
|
211
|
-
get_pkg_display(purl, pkg_tree[0], extra_text=extra_text),
|
|
212
|
-
style=get_tree_style(purl, pkg_tree[0]),
|
|
213
|
-
)
|
|
214
|
-
if len(pkg_tree) > 1:
|
|
215
|
-
subtree = tree
|
|
216
|
-
for p in pkg_tree[1:]:
|
|
217
|
-
subtree = subtree.add(
|
|
218
|
-
get_pkg_display(purl, p, extra_text=extra_text),
|
|
219
|
-
style=get_tree_style(purl, p),
|
|
220
|
-
)
|
|
221
|
-
return pkg_tree, tree
|
|
222
|
-
return pkg_tree, Tree(
|
|
223
|
-
get_pkg_display(purl, purl, extra_text=extra_text),
|
|
224
|
-
style=Style(color="bright_red" if pkg_severity == "CRITICAL" else None),
|
|
225
|
-
)
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
def is_lang_sw_edition(package_issue):
|
|
229
|
-
"""Check if the specified sw_edition belongs to any application package type"""
|
|
230
|
-
if package_issue and package_issue["affected_location"].get("cpe_uri"):
|
|
231
|
-
all_parts = CPE_FULL_REGEX.match(
|
|
232
|
-
package_issue["affected_location"].get("cpe_uri")
|
|
233
|
-
)
|
|
234
|
-
if not all_parts or all_parts.group("sw_edition") in ("*", "-"):
|
|
235
|
-
return True
|
|
236
|
-
if (
|
|
237
|
-
config.LANG_PKG_TYPES.get(all_parts.group("sw_edition"))
|
|
238
|
-
or all_parts.group("sw_edition") in config.LANG_PKG_TYPES.values()
|
|
239
|
-
):
|
|
240
|
-
return True
|
|
241
|
-
return False
|
|
242
|
-
return True
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
def is_os_target_sw(package_issue):
|
|
246
|
-
"""
|
|
247
|
-
Since we rely on NVD, we filter those target_sw that definitely belong to a language
|
|
248
|
-
"""
|
|
249
|
-
if package_issue and package_issue["affected_location"].get("cpe_uri"):
|
|
250
|
-
all_parts = CPE_FULL_REGEX.match(
|
|
251
|
-
package_issue["affected_location"].get("cpe_uri")
|
|
252
|
-
)
|
|
253
|
-
if (
|
|
254
|
-
all_parts
|
|
255
|
-
and all_parts.group("target_sw") not in ("*", "-")
|
|
256
|
-
and (
|
|
257
|
-
config.LANG_PKG_TYPES.get(all_parts.group("target_sw"))
|
|
258
|
-
or all_parts.group("target_sw")
|
|
259
|
-
in config.LANG_PKG_TYPES.values()
|
|
260
|
-
)
|
|
261
|
-
):
|
|
262
|
-
return False
|
|
263
|
-
return True
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
@dataclass
|
|
267
|
-
class PrepareVdrOptions:
|
|
268
|
-
project_type: str
|
|
269
|
-
results: List
|
|
270
|
-
pkg_aliases: Dict
|
|
271
|
-
purl_aliases: Dict
|
|
272
|
-
sug_version_dict: Dict
|
|
273
|
-
scoped_pkgs: Dict
|
|
274
|
-
no_vuln_table: bool
|
|
275
|
-
bom_file: Optional[str]
|
|
276
|
-
direct_purls: Dict
|
|
277
|
-
reached_purls: Dict
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
def prepare_vdr(options: PrepareVdrOptions):
|
|
281
|
-
"""
|
|
282
|
-
Generates a report summary of the dependency scan results, creates a
|
|
283
|
-
vulnerability table and a top priority table for packages that require
|
|
284
|
-
attention, prints the recommendations, and returns a list of
|
|
285
|
-
vulnerability details.
|
|
286
|
-
|
|
287
|
-
:param options: An instance of PrepareVdrOptions containing the function parameters.
|
|
288
|
-
:return: Vulnerability details, dictionary of prioritized items
|
|
289
|
-
:rtype: Tuple[List, Dict]
|
|
290
|
-
"""
|
|
291
|
-
if not options.results:
|
|
292
|
-
return [], {}
|
|
293
|
-
table = Table(
|
|
294
|
-
title=f"Dependency Scan Results ({options.project_type.upper()})",
|
|
295
|
-
box=box.DOUBLE_EDGE,
|
|
296
|
-
header_style="bold magenta",
|
|
297
|
-
show_lines=True,
|
|
298
|
-
min_width=150,
|
|
299
|
-
)
|
|
300
|
-
ids_seen = {}
|
|
301
|
-
direct_purls = options.direct_purls or {}
|
|
302
|
-
reached_purls = options.reached_purls or {}
|
|
303
|
-
required_pkgs = options.scoped_pkgs.get("required", [])
|
|
304
|
-
optional_pkgs = options.scoped_pkgs.get("optional", [])
|
|
305
|
-
fp_count = 0
|
|
306
|
-
pkg_attention_count = 0
|
|
307
|
-
critical_count = 0
|
|
308
|
-
malicious_count = 0
|
|
309
|
-
has_poc_count = 0
|
|
310
|
-
has_reachable_poc_count = 0
|
|
311
|
-
has_exploit_count = 0
|
|
312
|
-
has_reachable_exploit_count = 0
|
|
313
|
-
fix_version_count = 0
|
|
314
|
-
wont_fix_version_count = 0
|
|
315
|
-
has_os_packages = False
|
|
316
|
-
has_redhat_packages = False
|
|
317
|
-
has_ubuntu_packages = False
|
|
318
|
-
distro_packages_count = 0
|
|
319
|
-
pkg_group_rows = defaultdict(list)
|
|
320
|
-
pkg_vulnerabilities = []
|
|
321
|
-
# Retrieve any dependency tree from the SBOM
|
|
322
|
-
bom_dependency_tree, bom_data = retrieve_bom_dependency_tree(
|
|
323
|
-
options.bom_file
|
|
324
|
-
)
|
|
325
|
-
oci_props = retrieve_oci_properties(bom_data)
|
|
326
|
-
oci_product_types = oci_props.get("oci:image:componentTypes", "")
|
|
327
|
-
for h in [
|
|
328
|
-
"Dependency Tree" if len(bom_dependency_tree) > 0 else "CVE",
|
|
329
|
-
"Insights",
|
|
330
|
-
"Fix Version",
|
|
331
|
-
"Severity",
|
|
332
|
-
"Score",
|
|
333
|
-
]:
|
|
334
|
-
justify = "left"
|
|
335
|
-
if h == "Score":
|
|
336
|
-
justify = "right"
|
|
337
|
-
table.add_column(header=h, justify=justify, vertical="top")
|
|
338
|
-
for vuln_occ_dict in options.results:
|
|
339
|
-
vid = vuln_occ_dict.get("id")
|
|
340
|
-
problem_type = vuln_occ_dict.get("problem_type")
|
|
341
|
-
cwes = []
|
|
342
|
-
if problem_type:
|
|
343
|
-
cwes = split_cwe(problem_type)
|
|
344
|
-
has_flagged_cwe = False
|
|
345
|
-
package_issue = vuln_occ_dict.get("package_issue")
|
|
346
|
-
matched_by = vuln_occ_dict.get("matched_by")
|
|
347
|
-
full_pkg = package_issue["affected_location"].get("package")
|
|
348
|
-
project_type_pkg = (
|
|
349
|
-
f"{options.project_type}:"
|
|
350
|
-
f"{package_issue['affected_location'].get('package')}"
|
|
351
|
-
)
|
|
352
|
-
if package_issue["affected_location"].get("vendor"):
|
|
353
|
-
full_pkg = (
|
|
354
|
-
f"{package_issue['affected_location'].get('vendor')}:"
|
|
355
|
-
f"{package_issue['affected_location'].get('package')}"
|
|
356
|
-
)
|
|
357
|
-
elif package_issue["affected_location"].get("cpe_uri"):
|
|
358
|
-
vendor, _, _, _ = parse_cpe(
|
|
359
|
-
package_issue["affected_location"].get("cpe_uri")
|
|
360
|
-
)
|
|
361
|
-
if vendor:
|
|
362
|
-
full_pkg = (
|
|
363
|
-
f"{vendor}:"
|
|
364
|
-
f"{package_issue['affected_location'].get('package')}"
|
|
365
|
-
)
|
|
366
|
-
if matched_by:
|
|
367
|
-
version = matched_by.split("|")[-1]
|
|
368
|
-
full_pkg = full_pkg + ":" + version
|
|
369
|
-
# De-alias package names
|
|
370
|
-
if options.pkg_aliases.get(full_pkg):
|
|
371
|
-
full_pkg = options.pkg_aliases.get(full_pkg)
|
|
372
|
-
else:
|
|
373
|
-
full_pkg = options.pkg_aliases.get(full_pkg.lower(), full_pkg)
|
|
374
|
-
version_used = package_issue["affected_location"].get("version")
|
|
375
|
-
purl = options.purl_aliases.get(full_pkg, full_pkg)
|
|
376
|
-
package_type = None
|
|
377
|
-
insights = []
|
|
378
|
-
plain_insights = []
|
|
379
|
-
if vid.startswith("MAL-"):
|
|
380
|
-
insights.append("[bright_red]:stop_sign: Malicious[/bright_red]")
|
|
381
|
-
plain_insights.append("Malicious")
|
|
382
|
-
malicious_count += 1
|
|
383
|
-
purl_obj = None
|
|
384
|
-
vendor = package_issue["affected_location"].get("vendor")
|
|
385
|
-
# If the match was based on name and version alone then the alias might legitimately lack a full purl
|
|
386
|
-
# Such results are usually false positives but could yield good hits at times
|
|
387
|
-
# So, instead of suppressing fully we try our best to tune and reduce the FP
|
|
388
|
-
if not purl.startswith("pkg:"):
|
|
389
|
-
if options.project_type in config.OS_PKG_TYPES:
|
|
390
|
-
if vendor and (
|
|
391
|
-
vendor in config.LANG_PKG_TYPES.values()
|
|
392
|
-
or config.LANG_PKG_TYPES.get(vendor)
|
|
393
|
-
):
|
|
394
|
-
fp_count += 1
|
|
395
|
-
continue
|
|
396
|
-
# Some nvd data might match application CVEs for
|
|
397
|
-
# OS vendors which can be filtered
|
|
398
|
-
if not is_os_target_sw(package_issue):
|
|
399
|
-
fp_count += 1
|
|
400
|
-
continue
|
|
401
|
-
# Issue #320 - Malware matches without purl are false positives
|
|
402
|
-
if vid.startswith("MAL-"):
|
|
403
|
-
fp_count += 1
|
|
404
|
-
malicious_count -= 1
|
|
405
|
-
continue
|
|
406
|
-
else:
|
|
407
|
-
purl_obj = parse_purl(purl)
|
|
408
|
-
# Issue #320 - Malware matches without purl are false positives
|
|
409
|
-
if not purl_obj and vid.startswith("MAL-"):
|
|
410
|
-
fp_count += 1
|
|
411
|
-
malicious_count -= 1
|
|
412
|
-
continue
|
|
413
|
-
if purl_obj:
|
|
414
|
-
version_used = purl_obj.get("version")
|
|
415
|
-
package_type = purl_obj.get("type")
|
|
416
|
-
qualifiers = purl_obj.get("qualifiers", {})
|
|
417
|
-
# Filter application CVEs from distros
|
|
418
|
-
if (
|
|
419
|
-
config.LANG_PKG_TYPES.get(package_type)
|
|
420
|
-
or package_type in config.LANG_PKG_TYPES.values()
|
|
421
|
-
) and (
|
|
422
|
-
(vendor and vendor in config.OS_PKG_TYPES)
|
|
423
|
-
or not is_lang_sw_edition(package_issue)
|
|
424
|
-
):
|
|
425
|
-
fp_count += 1
|
|
426
|
-
continue
|
|
427
|
-
if package_type in config.OS_PKG_TYPES:
|
|
428
|
-
# Bug #208 - do not report application CVEs
|
|
429
|
-
if vendor and (
|
|
430
|
-
vendor in config.LANG_PKG_TYPES.values()
|
|
431
|
-
or config.LANG_PKG_TYPES.get(vendor)
|
|
432
|
-
):
|
|
433
|
-
fp_count += 1
|
|
434
|
-
continue
|
|
435
|
-
if package_type and (
|
|
436
|
-
package_type in config.LANG_PKG_TYPES.values()
|
|
437
|
-
or config.LANG_PKG_TYPES.get(package_type)
|
|
438
|
-
):
|
|
439
|
-
fp_count += 1
|
|
440
|
-
continue
|
|
441
|
-
if (
|
|
442
|
-
vendor
|
|
443
|
-
and oci_product_types
|
|
444
|
-
and vendor not in oci_product_types
|
|
445
|
-
):
|
|
446
|
-
# Bug #170 - do not report CVEs belonging to other distros
|
|
447
|
-
if vendor in config.OS_PKG_TYPES:
|
|
448
|
-
fp_count += 1
|
|
449
|
-
continue
|
|
450
|
-
# Some nvd data might match application CVEs for
|
|
451
|
-
# OS vendors which can be filtered
|
|
452
|
-
if not is_os_target_sw(package_issue):
|
|
453
|
-
fp_count += 1
|
|
454
|
-
continue
|
|
455
|
-
insights.append(
|
|
456
|
-
f"[#7C8082]:telescope: Vendor {vendor}[/#7C8082]"
|
|
457
|
-
)
|
|
458
|
-
plain_insights.append(f"Vendor {vendor}")
|
|
459
|
-
has_os_packages = True
|
|
460
|
-
for acwe in cwes:
|
|
461
|
-
if acwe in config.OS_VULN_KEY_CWES:
|
|
462
|
-
has_flagged_cwe = True
|
|
463
|
-
break
|
|
464
|
-
# Don't flag the cwe for ignorable os packages
|
|
465
|
-
if has_flagged_cwe and (
|
|
466
|
-
purl_obj.get("name") in config.OS_PKG_UNINSTALLABLE
|
|
467
|
-
or purl_obj.get("name") in config.OS_PKG_IGNORABLE
|
|
468
|
-
or vendor in config.OS_PKG_IGNORABLE
|
|
469
|
-
):
|
|
470
|
-
has_flagged_cwe = False
|
|
471
|
-
else:
|
|
472
|
-
if (
|
|
473
|
-
purl_obj.get("name") in config.OS_PKG_IGNORABLE
|
|
474
|
-
or vendor in config.OS_PKG_IGNORABLE
|
|
475
|
-
):
|
|
476
|
-
insights.append(
|
|
477
|
-
"[#7C8082]:mute: Suppress for containers[/#7C8082]"
|
|
478
|
-
)
|
|
479
|
-
plain_insights.append("Suppress for containers")
|
|
480
|
-
elif (
|
|
481
|
-
purl_obj.get("name") in config.OS_PKG_UNINSTALLABLE
|
|
482
|
-
):
|
|
483
|
-
insights.append(
|
|
484
|
-
"[#7C8082]:scissors: Uninstall candidate[/#7C8082]"
|
|
485
|
-
)
|
|
486
|
-
plain_insights.append("Uninstall candidate")
|
|
487
|
-
# If the flag remains after all the suppressions then add it as an insight
|
|
488
|
-
if has_flagged_cwe:
|
|
489
|
-
insights.append(
|
|
490
|
-
"[#7C8082]:triangular_flag: Flagged weakness[/#7C8082]"
|
|
491
|
-
)
|
|
492
|
-
plain_insights.append("Flagged weakness")
|
|
493
|
-
if qualifiers:
|
|
494
|
-
if "ubuntu" in qualifiers.get("distro", ""):
|
|
495
|
-
has_ubuntu_packages = True
|
|
496
|
-
if "rhel" in qualifiers.get("distro", ""):
|
|
497
|
-
has_redhat_packages = True
|
|
498
|
-
if ids_seen.get(vid + purl):
|
|
499
|
-
fp_count += 1
|
|
500
|
-
continue
|
|
501
|
-
# Mark this CVE + pkg as seen to avoid duplicates
|
|
502
|
-
ids_seen[vid + purl] = True
|
|
503
|
-
# Find the best fix version
|
|
504
|
-
fixed_location = best_fixed_location(
|
|
505
|
-
options.sug_version_dict.get(purl), package_issue["fixed_location"]
|
|
506
|
-
)
|
|
507
|
-
if (
|
|
508
|
-
options.sug_version_dict.get(purl) == placeholder_fix_version
|
|
509
|
-
or package_issue["fixed_location"] == placeholder_fix_version
|
|
510
|
-
):
|
|
511
|
-
wont_fix_version_count += 1
|
|
512
|
-
package_usage = "N/A"
|
|
513
|
-
plain_package_usage = "N/A"
|
|
514
|
-
pkg_severity = vuln_occ_dict.get("severity")
|
|
515
|
-
is_required = False
|
|
516
|
-
pkg_requires_attn = False
|
|
517
|
-
related_urls = vuln_occ_dict.get("related_urls")
|
|
518
|
-
clinks = classify_links(
|
|
519
|
-
related_urls,
|
|
520
|
-
)
|
|
521
|
-
if direct_purls.get(purl):
|
|
522
|
-
is_required = True
|
|
523
|
-
elif not direct_purls and (
|
|
524
|
-
purl in required_pkgs
|
|
525
|
-
or full_pkg in required_pkgs
|
|
526
|
-
or project_type_pkg in required_pkgs
|
|
527
|
-
):
|
|
528
|
-
is_required = True
|
|
529
|
-
if pkg_severity in ("CRITICAL", "HIGH"):
|
|
530
|
-
if is_required:
|
|
531
|
-
pkg_attention_count += 1
|
|
532
|
-
if fixed_location:
|
|
533
|
-
fix_version_count += 1
|
|
534
|
-
if (
|
|
535
|
-
clinks.get("vendor") or package_type in config.OS_PKG_TYPES
|
|
536
|
-
) and pkg_severity == "CRITICAL":
|
|
537
|
-
critical_count += 1
|
|
538
|
-
# Locate this package in the tree
|
|
539
|
-
pkg_tree_list, p_rich_tree = pkg_sub_tree(
|
|
540
|
-
purl,
|
|
541
|
-
full_pkg.replace(":", "/"),
|
|
542
|
-
bom_dependency_tree,
|
|
543
|
-
pkg_severity=pkg_severity,
|
|
544
|
-
as_tree=True,
|
|
545
|
-
extra_text=f":left_arrow: {vid}",
|
|
546
|
-
)
|
|
547
|
-
if is_required and package_type not in config.OS_PKG_TYPES:
|
|
548
|
-
if direct_purls.get(purl):
|
|
549
|
-
package_usage = (
|
|
550
|
-
f":direct_hit: Used in [info]"
|
|
551
|
-
f"{str(direct_purls.get(purl))}"
|
|
552
|
-
f"[/info] locations"
|
|
553
|
-
)
|
|
554
|
-
plain_package_usage = (
|
|
555
|
-
f"Used in {str(direct_purls.get(purl))} locations"
|
|
556
|
-
)
|
|
557
|
-
else:
|
|
558
|
-
package_usage = ":direct_hit: Direct dependency"
|
|
559
|
-
plain_package_usage = "Direct dependency"
|
|
560
|
-
elif (
|
|
561
|
-
not optional_pkgs and pkg_tree_list and len(pkg_tree_list) > 1
|
|
562
|
-
) or (
|
|
563
|
-
purl in optional_pkgs
|
|
564
|
-
or full_pkg in optional_pkgs
|
|
565
|
-
or project_type_pkg in optional_pkgs
|
|
566
|
-
):
|
|
567
|
-
if package_type in config.OS_PKG_TYPES:
|
|
568
|
-
package_usage = (
|
|
569
|
-
"[spring_green4]:notebook: Local install[/spring_green4]"
|
|
570
|
-
)
|
|
571
|
-
plain_package_usage = "Local install"
|
|
572
|
-
has_os_packages = True
|
|
573
|
-
else:
|
|
574
|
-
package_usage = (
|
|
575
|
-
"[spring_green4]:notebook: Indirect "
|
|
576
|
-
"dependency[/spring_green4]"
|
|
577
|
-
)
|
|
578
|
-
plain_package_usage = "Indirect dependency"
|
|
579
|
-
if package_usage != "N/A":
|
|
580
|
-
insights.append(package_usage)
|
|
581
|
-
plain_insights.append(plain_package_usage)
|
|
582
|
-
if clinks.get("poc") or clinks.get("Bug Bounty"):
|
|
583
|
-
if reached_purls.get(purl):
|
|
584
|
-
insights.append(
|
|
585
|
-
"[yellow]:notebook_with_decorative_cover: Reachable Bounty target[/yellow]"
|
|
586
|
-
)
|
|
587
|
-
plain_insights.append("Reachable Bounty target")
|
|
588
|
-
has_reachable_poc_count += 1
|
|
589
|
-
has_reachable_exploit_count += 1
|
|
590
|
-
pkg_requires_attn = True
|
|
591
|
-
elif direct_purls.get(purl):
|
|
592
|
-
insights.append(
|
|
593
|
-
"[yellow]:notebook_with_decorative_cover: Bug Bounty target[/yellow]"
|
|
594
|
-
)
|
|
595
|
-
plain_insights.append("Bug Bounty target")
|
|
596
|
-
else:
|
|
597
|
-
insights.append(
|
|
598
|
-
"[yellow]:notebook_with_decorative_cover: Has PoC[/yellow]"
|
|
599
|
-
)
|
|
600
|
-
plain_insights.append("Has PoC")
|
|
601
|
-
has_poc_count += 1
|
|
602
|
-
if pkg_severity in ("CRITICAL", "HIGH"):
|
|
603
|
-
pkg_requires_attn = True
|
|
604
|
-
if (clinks.get("vendor") and package_type not in config.OS_PKG_TYPES) or reached_purls.get(purl):
|
|
605
|
-
if reached_purls.get(purl):
|
|
606
|
-
# If it has a poc, an insight might have gotten added above
|
|
607
|
-
if not pkg_requires_attn:
|
|
608
|
-
insights.append(":receipt: Reachable")
|
|
609
|
-
plain_insights.append("Reachable")
|
|
610
|
-
else:
|
|
611
|
-
insights.append(":receipt: Vendor Confirmed")
|
|
612
|
-
plain_insights.append("Vendor Confirmed")
|
|
613
|
-
if clinks.get("exploit"):
|
|
614
|
-
if reached_purls.get(purl) or direct_purls.get(purl):
|
|
615
|
-
insights.append(
|
|
616
|
-
"[bright_red]:exclamation_mark: Reachable and Exploitable[/bright_red]"
|
|
617
|
-
)
|
|
618
|
-
plain_insights.append("Reachable and Exploitable")
|
|
619
|
-
has_reachable_exploit_count += 1
|
|
620
|
-
# Fail safe. Packages with exploits and direct usage without
|
|
621
|
-
# a reachable flow are still considered reachable to reduce
|
|
622
|
-
# false negatives
|
|
623
|
-
if not reached_purls.get(purl):
|
|
624
|
-
reached_purls[purl] = 1
|
|
625
|
-
elif has_flagged_cwe:
|
|
626
|
-
if (vendor and vendor in ("gnu",)) or (
|
|
627
|
-
purl_obj and purl_obj.get("name") in ("glibc", "openssl")
|
|
628
|
-
):
|
|
629
|
-
insights.append(
|
|
630
|
-
"[bright_red]:exclamation_mark: Reachable and Exploitable[/bright_red]"
|
|
631
|
-
)
|
|
632
|
-
plain_insights.append("Reachable and Exploitable")
|
|
633
|
-
has_reachable_exploit_count += 1
|
|
634
|
-
else:
|
|
635
|
-
insights.append(
|
|
636
|
-
"[bright_red]:exclamation_mark: Exploitable[/bright_red]"
|
|
637
|
-
)
|
|
638
|
-
plain_insights.append("Exploitable")
|
|
639
|
-
has_exploit_count += 1
|
|
640
|
-
else:
|
|
641
|
-
insights.append(
|
|
642
|
-
"[bright_red]:exclamation_mark: Known Exploits[/bright_red]"
|
|
643
|
-
)
|
|
644
|
-
plain_insights.append("Known Exploits")
|
|
645
|
-
has_exploit_count += 1
|
|
646
|
-
pkg_requires_attn = True
|
|
647
|
-
if distro_package(package_issue):
|
|
648
|
-
insights.append(
|
|
649
|
-
"[spring_green4]:direct_hit: Distro specific[/spring_green4]"
|
|
650
|
-
)
|
|
651
|
-
plain_insights.append("Distro specific")
|
|
652
|
-
distro_packages_count += 1
|
|
653
|
-
has_os_packages = True
|
|
654
|
-
if pkg_requires_attn and fixed_location and purl:
|
|
655
|
-
pkg_group_rows[purl].append(
|
|
656
|
-
{
|
|
657
|
-
"id": vid,
|
|
658
|
-
"fixed_location": fixed_location,
|
|
659
|
-
"p_rich_tree": p_rich_tree,
|
|
660
|
-
}
|
|
661
|
-
)
|
|
662
|
-
if not options.no_vuln_table:
|
|
663
|
-
table.add_row(
|
|
664
|
-
p_rich_tree,
|
|
665
|
-
"\n".join(insights),
|
|
666
|
-
fixed_location,
|
|
667
|
-
f"""{"[bright_red]" if pkg_severity == "CRITICAL" else ""}{vuln_occ_dict.get("severity")}""",
|
|
668
|
-
f"""{"[bright_red]" if pkg_severity == "CRITICAL" else ""}{vuln_occ_dict.get("cvss_score")}""",
|
|
669
|
-
)
|
|
670
|
-
if purl:
|
|
671
|
-
source = {}
|
|
672
|
-
if vid.startswith("CVE"):
|
|
673
|
-
source = {
|
|
674
|
-
"name": "NVD",
|
|
675
|
-
"url": f"https://nvd.nist.gov/vuln/detail/{vid}",
|
|
676
|
-
}
|
|
677
|
-
elif vid.startswith("GHSA") or vid.startswith("npm"):
|
|
678
|
-
source = {
|
|
679
|
-
"name": "GitHub",
|
|
680
|
-
"url": f"https://github.com/advisories/{vid}",
|
|
681
|
-
}
|
|
682
|
-
versions = [{"version": version_used, "status": "affected"}]
|
|
683
|
-
recommendation = ""
|
|
684
|
-
if fixed_location:
|
|
685
|
-
versions.append(
|
|
686
|
-
{"version": fixed_location, "status": "unaffected"}
|
|
687
|
-
)
|
|
688
|
-
recommendation = f"Update to {fixed_location} or later"
|
|
689
|
-
affects = [{"ref": purl, "versions": versions}]
|
|
690
|
-
analysis = {}
|
|
691
|
-
if clinks.get("exploit"):
|
|
692
|
-
analysis = {
|
|
693
|
-
"state": "exploitable",
|
|
694
|
-
"detail": f'See {clinks.get("exploit")}',
|
|
695
|
-
}
|
|
696
|
-
elif clinks.get("poc"):
|
|
697
|
-
analysis = {
|
|
698
|
-
"state": "in_triage",
|
|
699
|
-
"detail": f'See {clinks.get("poc")}',
|
|
700
|
-
}
|
|
701
|
-
elif pkg_tree_list and len(pkg_tree_list) > 1:
|
|
702
|
-
analysis = {
|
|
703
|
-
"state": "in_triage",
|
|
704
|
-
"detail": f"Dependency Tree: {json.dumps(pkg_tree_list)}",
|
|
705
|
-
}
|
|
706
|
-
ratings = cvss_to_vdr_rating(vuln_occ_dict)
|
|
707
|
-
properties = [
|
|
708
|
-
{
|
|
709
|
-
"name": "depscan:insights",
|
|
710
|
-
"value": "\\n".join(plain_insights),
|
|
711
|
-
},
|
|
712
|
-
{
|
|
713
|
-
"name": "depscan:prioritized",
|
|
714
|
-
"value": "true" if pkg_group_rows.get(purl) else "false",
|
|
715
|
-
},
|
|
716
|
-
]
|
|
717
|
-
affected_version_range = get_version_range(package_issue, purl)
|
|
718
|
-
if affected_version_range:
|
|
719
|
-
properties.append(affected_version_range)
|
|
720
|
-
advisories = []
|
|
721
|
-
for k, v in clinks.items():
|
|
722
|
-
advisories.append({"title": k, "url": v})
|
|
723
|
-
vuln = {
|
|
724
|
-
"bom-ref": f"{vid}/{purl}",
|
|
725
|
-
"id": vid,
|
|
726
|
-
"source": source,
|
|
727
|
-
"ratings": ratings,
|
|
728
|
-
"cwes": cwes,
|
|
729
|
-
"description": vuln_occ_dict.get("short_description"),
|
|
730
|
-
"recommendation": recommendation,
|
|
731
|
-
"advisories": advisories,
|
|
732
|
-
"analysis": analysis,
|
|
733
|
-
"affects": affects,
|
|
734
|
-
"properties": properties,
|
|
735
|
-
}
|
|
736
|
-
if source_orig_time := vuln_occ_dict.get("source_orig_time"):
|
|
737
|
-
vuln["published"] = source_orig_time
|
|
738
|
-
if source_update_time := vuln_occ_dict.get("source_update_time"):
|
|
739
|
-
vuln["updated"] = source_update_time
|
|
740
|
-
pkg_vulnerabilities.append(vuln)
|
|
741
|
-
# If the user doesn't want any table output return quickly
|
|
742
|
-
if options.no_vuln_table:
|
|
743
|
-
return pkg_vulnerabilities, pkg_group_rows
|
|
744
|
-
if pkg_vulnerabilities:
|
|
745
|
-
console.print()
|
|
746
|
-
console.print(table)
|
|
747
|
-
if pkg_group_rows:
|
|
748
|
-
psection = Markdown(
|
|
749
|
-
"""
|
|
750
|
-
Next Steps
|
|
751
|
-
----------
|
|
752
|
-
|
|
753
|
-
Below are the vulnerabilities prioritized by depscan. Follow your team's remediation workflow to mitigate these findings.
|
|
754
|
-
""",
|
|
755
|
-
justify="left",
|
|
756
|
-
)
|
|
757
|
-
console.print(psection)
|
|
758
|
-
utable = Table(
|
|
759
|
-
title=f"Top Priority ({options.project_type.upper()})",
|
|
760
|
-
box=box.DOUBLE_EDGE,
|
|
761
|
-
header_style="bold magenta",
|
|
762
|
-
show_lines=True,
|
|
763
|
-
min_width=150,
|
|
764
|
-
)
|
|
765
|
-
for h in ("Package", "CVEs", "Fix Version", "Reachable"):
|
|
766
|
-
utable.add_column(header=h, vertical="top")
|
|
767
|
-
for k, v in pkg_group_rows.items():
|
|
768
|
-
cve_list = []
|
|
769
|
-
fv = None
|
|
770
|
-
for c in v:
|
|
771
|
-
cve_list.append(c.get("id"))
|
|
772
|
-
if not fv:
|
|
773
|
-
fv = c.get("fixed_location")
|
|
774
|
-
utable.add_row(
|
|
775
|
-
v[0].get("p_rich_tree"),
|
|
776
|
-
"\n".join(sorted(cve_list, reverse=True)),
|
|
777
|
-
f"[bright_green]{fv}[/bright_green]",
|
|
778
|
-
"[warning]Yes[/warning]" if reached_purls.get(k) else "",
|
|
779
|
-
)
|
|
780
|
-
console.print()
|
|
781
|
-
console.print(utable)
|
|
782
|
-
console.print()
|
|
783
|
-
if malicious_count:
|
|
784
|
-
rmessage = ":stop_sign: Malicious package found! Treat this as a [bold]security incident[/bold] and follow your organization's playbook to remove this package from all affected applications."
|
|
785
|
-
if malicious_count > 1:
|
|
786
|
-
rmessage = f":stop_sign: {malicious_count} malicious packages found in this project! Treat this as a [bold]security incident[/bold] and follow your organization's playbook to remove the packages from all affected applications."
|
|
787
|
-
console.print(
|
|
788
|
-
Panel(
|
|
789
|
-
rmessage,
|
|
790
|
-
title="Action Required",
|
|
791
|
-
expand=False,
|
|
792
|
-
)
|
|
793
|
-
)
|
|
794
|
-
elif options.scoped_pkgs or has_exploit_count:
|
|
795
|
-
if not pkg_attention_count and has_exploit_count:
|
|
796
|
-
if has_reachable_exploit_count:
|
|
797
|
-
rmessage = (
|
|
798
|
-
f":point_right: [magenta]{has_reachable_exploit_count}"
|
|
799
|
-
f"[/magenta] out of {len(pkg_vulnerabilities)} vulnerabilities "
|
|
800
|
-
f"have [dark magenta]reachable[/dark magenta] exploits and requires your ["
|
|
801
|
-
f"magenta]immediate[/magenta] attention."
|
|
802
|
-
)
|
|
803
|
-
else:
|
|
804
|
-
rmessage = (
|
|
805
|
-
f":point_right: [magenta]{has_exploit_count}"
|
|
806
|
-
f"[/magenta] out of {len(pkg_vulnerabilities)} vulnerabilities "
|
|
807
|
-
f"have known exploits and requires your ["
|
|
808
|
-
f"magenta]immediate[/magenta] attention."
|
|
809
|
-
)
|
|
810
|
-
if not has_os_packages:
|
|
811
|
-
rmessage += (
|
|
812
|
-
"\nAdditional workarounds and configuration "
|
|
813
|
-
"changes might be required to remediate these "
|
|
814
|
-
"vulnerabilities."
|
|
815
|
-
)
|
|
816
|
-
if not options.scoped_pkgs:
|
|
817
|
-
rmessage += (
|
|
818
|
-
"\nNOTE: Package usage analysis was not "
|
|
819
|
-
"performed for this project."
|
|
820
|
-
)
|
|
821
|
-
else:
|
|
822
|
-
rmessage += (
|
|
823
|
-
"\n:scissors: Consider trimming this image by removing any "
|
|
824
|
-
"unwanted packages. Alternatively, use a slim "
|
|
825
|
-
"base image."
|
|
826
|
-
)
|
|
827
|
-
if distro_packages_count and distro_packages_count < len(
|
|
828
|
-
pkg_vulnerabilities
|
|
829
|
-
):
|
|
830
|
-
if (
|
|
831
|
-
len(pkg_vulnerabilities)
|
|
832
|
-
> config.max_distro_vulnerabilities
|
|
833
|
-
):
|
|
834
|
-
rmessage += f"\nNOTE: Check if the base image or the kernel version used is End-of-Life (EOL)."
|
|
835
|
-
else:
|
|
836
|
-
rmessage += (
|
|
837
|
-
f"\nNOTE: [magenta]{distro_packages_count}"
|
|
838
|
-
f"[/magenta] distro-specific vulnerabilities "
|
|
839
|
-
f"out of {len(pkg_vulnerabilities)} could be prioritized "
|
|
840
|
-
f"for updates."
|
|
841
|
-
)
|
|
842
|
-
if has_redhat_packages:
|
|
843
|
-
rmessage += """\nNOTE: Vulnerabilities in RedHat packages with status "out of support" or "won't fix" are excluded from this result."""
|
|
844
|
-
if has_ubuntu_packages:
|
|
845
|
-
rmessage += """\nNOTE: Vulnerabilities in Ubuntu packages with status "DNE" or "needs-triaging" are excluded from this result."""
|
|
846
|
-
console.print(
|
|
847
|
-
Panel(
|
|
848
|
-
rmessage,
|
|
849
|
-
title="Recommendation",
|
|
850
|
-
expand=False,
|
|
851
|
-
)
|
|
852
|
-
)
|
|
853
|
-
elif pkg_attention_count:
|
|
854
|
-
if has_reachable_exploit_count:
|
|
855
|
-
rmessage = (
|
|
856
|
-
f":point_right: Prioritize the [magenta]{has_reachable_exploit_count}"
|
|
857
|
-
f"[/magenta] [bold magenta]reachable[/bold magenta] vulnerabilities with known exploits."
|
|
858
|
-
)
|
|
859
|
-
elif has_exploit_count:
|
|
860
|
-
rmessage = (
|
|
861
|
-
f":point_right: Prioritize the [magenta]{has_exploit_count}"
|
|
862
|
-
f"[/magenta] vulnerabilities with known exploits."
|
|
863
|
-
)
|
|
864
|
-
else:
|
|
865
|
-
rmessage = (
|
|
866
|
-
f":point_right: [info]{pkg_attention_count}"
|
|
867
|
-
f"[/info] out of {len(pkg_vulnerabilities)} vulnerabilities "
|
|
868
|
-
f"requires your attention."
|
|
869
|
-
)
|
|
870
|
-
if fix_version_count:
|
|
871
|
-
if fix_version_count == pkg_attention_count:
|
|
872
|
-
rmessage += (
|
|
873
|
-
"\n:white_heavy_check_mark: You can update ["
|
|
874
|
-
"bright_green]all[/bright_green] the "
|
|
875
|
-
"packages using the mentioned fix version to "
|
|
876
|
-
"remediate."
|
|
877
|
-
)
|
|
878
|
-
else:
|
|
879
|
-
v_text = (
|
|
880
|
-
"vulnerability"
|
|
881
|
-
if fix_version_count == 1
|
|
882
|
-
else "vulnerabilities"
|
|
883
|
-
)
|
|
884
|
-
rmessage += (
|
|
885
|
-
f"\nYou can remediate [bright_green]"
|
|
886
|
-
f"{fix_version_count}[/bright_green] "
|
|
887
|
-
f"{v_text} "
|
|
888
|
-
f"by updating the packages using the fix "
|
|
889
|
-
f"version :thumbsup:"
|
|
890
|
-
)
|
|
891
|
-
console.print(
|
|
892
|
-
Panel(
|
|
893
|
-
rmessage,
|
|
894
|
-
title="Recommendation",
|
|
895
|
-
expand=False,
|
|
896
|
-
)
|
|
897
|
-
)
|
|
898
|
-
elif critical_count:
|
|
899
|
-
console.print(
|
|
900
|
-
Panel(
|
|
901
|
-
f":white_medium_small_square: Prioritize the [magenta]{critical_count}"
|
|
902
|
-
f"[/magenta] critical vulnerabilities confirmed by the "
|
|
903
|
-
f"vendor.",
|
|
904
|
-
title="Recommendation",
|
|
905
|
-
expand=False,
|
|
906
|
-
)
|
|
907
|
-
)
|
|
908
|
-
else:
|
|
909
|
-
if has_os_packages:
|
|
910
|
-
rmessage = (
|
|
911
|
-
":white_medium_small_square: Prioritize any vulnerabilities in libraries such "
|
|
912
|
-
"as glibc, openssl, or libcurl.\nAdditionally, "
|
|
913
|
-
"prioritize the vulnerabilities with 'Flagged weakness' under insights."
|
|
914
|
-
)
|
|
915
|
-
rmessage += (
|
|
916
|
-
"\nVulnerabilities in Linux Kernel packages can "
|
|
917
|
-
"be usually ignored in containerized "
|
|
918
|
-
"environments as long as the vulnerability "
|
|
919
|
-
"doesn't lead to any 'container-escape' type "
|
|
920
|
-
"vulnerabilities."
|
|
921
|
-
)
|
|
922
|
-
if has_redhat_packages:
|
|
923
|
-
rmessage += """\nNOTE: Vulnerabilities in RedHat packages
|
|
924
|
-
with status "out of support" or "won't fix" are excluded
|
|
925
|
-
from this result."""
|
|
926
|
-
if has_ubuntu_packages:
|
|
927
|
-
rmessage += """\nNOTE: Vulnerabilities in Ubuntu packages
|
|
928
|
-
with status "DNE" or "needs-triaging" are excluded from
|
|
929
|
-
this result."""
|
|
930
|
-
console.print(Panel(rmessage, title="Recommendation"))
|
|
931
|
-
else:
|
|
932
|
-
rmessage = None
|
|
933
|
-
if reached_purls:
|
|
934
|
-
rmessage = ":white_check_mark: No package requires immediate attention since the major vulnerabilities are not reachable."
|
|
935
|
-
elif direct_purls:
|
|
936
|
-
rmessage = ":white_check_mark: No package requires immediate attention since the major vulnerabilities are found only in dev packages and indirect dependencies."
|
|
937
|
-
if rmessage:
|
|
938
|
-
console.print(
|
|
939
|
-
Panel(
|
|
940
|
-
rmessage,
|
|
941
|
-
title="Recommendation",
|
|
942
|
-
expand=False,
|
|
943
|
-
)
|
|
944
|
-
)
|
|
945
|
-
elif critical_count:
|
|
946
|
-
console.print(
|
|
947
|
-
Panel(
|
|
948
|
-
f":white_medium_small_square: Prioritize the [magenta]{critical_count}"
|
|
949
|
-
f"[/magenta] critical vulnerabilities confirmed by the vendor.",
|
|
950
|
-
title="Recommendation",
|
|
951
|
-
expand=False,
|
|
952
|
-
)
|
|
953
|
-
)
|
|
954
|
-
if reached_purls:
|
|
955
|
-
sorted_reached_purls = sorted(
|
|
956
|
-
((value, key) for (key, value) in reached_purls.items()),
|
|
957
|
-
reverse=True,
|
|
958
|
-
)[:3]
|
|
959
|
-
sorted_reached_dict = OrderedDict(
|
|
960
|
-
(k, v) for v, k in sorted_reached_purls
|
|
961
|
-
)
|
|
962
|
-
rsection = Markdown(
|
|
963
|
-
"""
|
|
964
|
-
Proactive Measures
|
|
965
|
-
------------------
|
|
966
|
-
|
|
967
|
-
Below are the top reachable packages identified by depscan. Setup alerts and notifications to actively monitor these packages for new vulnerabilities and exploits.
|
|
968
|
-
""",
|
|
969
|
-
justify="left",
|
|
970
|
-
)
|
|
971
|
-
console.print(rsection)
|
|
972
|
-
rtable = Table(
|
|
973
|
-
title="Top Reachable Packages",
|
|
974
|
-
box=box.DOUBLE_EDGE,
|
|
975
|
-
header_style="bold magenta",
|
|
976
|
-
show_lines=True,
|
|
977
|
-
min_width=150,
|
|
978
|
-
)
|
|
979
|
-
for h in ("Package", "Reachable Flows"):
|
|
980
|
-
rtable.add_column(header=h, vertical="top")
|
|
981
|
-
for k, v in sorted_reached_dict.items():
|
|
982
|
-
rtable.add_row(k, str(v))
|
|
983
|
-
console.print()
|
|
984
|
-
console.print(rtable)
|
|
985
|
-
console.print()
|
|
986
|
-
return pkg_vulnerabilities, pkg_group_rows
|
|
987
|
-
|
|
988
|
-
|
|
989
|
-
def get_version_range(package_issue, purl):
|
|
990
|
-
"""
|
|
991
|
-
Generates a version range object for inclusion in the vdr file.
|
|
992
|
-
|
|
993
|
-
:param package_issue: Vulnerability data dict
|
|
994
|
-
:param purl: Package URL string
|
|
995
|
-
|
|
996
|
-
:return: A list containing a dictionary with version range information.
|
|
997
|
-
"""
|
|
998
|
-
new_prop = {}
|
|
999
|
-
if (affected_location := package_issue.get("affected_location")) and (
|
|
1000
|
-
affected_version := affected_location.get("version")
|
|
1001
|
-
):
|
|
1002
|
-
try:
|
|
1003
|
-
ppurl = PackageURL.from_string(purl)
|
|
1004
|
-
new_prop = {
|
|
1005
|
-
"name": "affectedVersionRange",
|
|
1006
|
-
"value": f"{ppurl.name}@" f"{affected_version}",
|
|
1007
|
-
}
|
|
1008
|
-
if ppurl.namespace:
|
|
1009
|
-
new_prop["value"] = f'{ppurl.namespace}/{new_prop["value"]}'
|
|
1010
|
-
except ValueError:
|
|
1011
|
-
ppurl = purl.split("@")
|
|
1012
|
-
if len(ppurl) == 2:
|
|
1013
|
-
new_prop = {
|
|
1014
|
-
"name": "affectedVersionRange",
|
|
1015
|
-
"value": f"{ppurl[0]}@{affected_version}",
|
|
1016
|
-
}
|
|
1017
|
-
|
|
1018
|
-
return new_prop
|
|
1019
|
-
|
|
1020
|
-
|
|
1021
|
-
def cvss_to_vdr_rating(vuln_occ_dict):
|
|
1022
|
-
"""
|
|
1023
|
-
Generates a rating object for inclusion in the vdr file.
|
|
1024
|
-
|
|
1025
|
-
:param vuln_occ_dict: Vulnerability data
|
|
1026
|
-
|
|
1027
|
-
:return: A list containing a dictionary with CVSS score information.
|
|
1028
|
-
"""
|
|
1029
|
-
cvss_score = vuln_occ_dict.get("cvss_score", 2.0)
|
|
1030
|
-
with contextlib.suppress(ValueError, TypeError):
|
|
1031
|
-
cvss_score = float(cvss_score)
|
|
1032
|
-
if (pkg_severity := vuln_occ_dict.get("severity", "").lower()) not in (
|
|
1033
|
-
"critical",
|
|
1034
|
-
"high",
|
|
1035
|
-
"medium",
|
|
1036
|
-
"low",
|
|
1037
|
-
"info",
|
|
1038
|
-
"none",
|
|
1039
|
-
):
|
|
1040
|
-
pkg_severity = "unknown"
|
|
1041
|
-
ratings = [
|
|
1042
|
-
{
|
|
1043
|
-
"score": cvss_score,
|
|
1044
|
-
"severity": pkg_severity,
|
|
1045
|
-
}
|
|
1046
|
-
]
|
|
1047
|
-
method = "31"
|
|
1048
|
-
if vuln_occ_dict.get("cvss_v3") and (
|
|
1049
|
-
vector_string := vuln_occ_dict["cvss_v3"].get("vector_string")
|
|
1050
|
-
):
|
|
1051
|
-
ratings[0]["vector"] = vector_string
|
|
1052
|
-
with contextlib.suppress(CVSSError):
|
|
1053
|
-
method = cvss.CVSS3(vector_string).as_json().get("version")
|
|
1054
|
-
method = method.replace(".", "").replace("0", "")
|
|
1055
|
-
ratings[0]["method"] = f"CVSSv{method}"
|
|
1056
|
-
|
|
1057
|
-
return ratings
|
|
1058
|
-
|
|
1059
|
-
|
|
1060
|
-
def split_cwe(cwe):
|
|
1061
|
-
"""
|
|
1062
|
-
Split the given CWE string into a list of CWE IDs.
|
|
1063
|
-
|
|
1064
|
-
:param cwe: The problem issue taken from a vulnerability object
|
|
1065
|
-
|
|
1066
|
-
:return: A list of CWE IDs
|
|
1067
|
-
:rtype: list
|
|
1068
|
-
"""
|
|
1069
|
-
cwe_ids = []
|
|
1070
|
-
|
|
1071
|
-
if isinstance(cwe, str):
|
|
1072
|
-
cwe_ids = re.findall(CWE_SPLITTER, cwe)
|
|
1073
|
-
elif isinstance(cwe, list):
|
|
1074
|
-
cwes = "|".join(cwe)
|
|
1075
|
-
cwe_ids = re.findall(CWE_SPLITTER, cwes)
|
|
1076
|
-
|
|
1077
|
-
with contextlib.suppress(ValueError, TypeError):
|
|
1078
|
-
cwe_ids = [int(cwe_id) for cwe_id in cwe_ids]
|
|
1079
|
-
return cwe_ids
|
|
1080
|
-
|
|
1081
|
-
|
|
1082
|
-
def summary_stats(results):
|
|
1083
|
-
"""
|
|
1084
|
-
Generate summary stats
|
|
1085
|
-
|
|
1086
|
-
:param results: List of scan results objects with severity attribute.
|
|
1087
|
-
:return: A dictionary containing the summary statistics for the severity
|
|
1088
|
-
levels of the vulnerabilities in the results list.
|
|
1089
|
-
"""
|
|
1090
|
-
if not results:
|
|
1091
|
-
LOG.info("No oss vulnerabilities detected ✅")
|
|
1092
|
-
return None
|
|
1093
|
-
summary = {
|
|
1094
|
-
"UNSPECIFIED": 0,
|
|
1095
|
-
"LOW": 0,
|
|
1096
|
-
"MEDIUM": 0,
|
|
1097
|
-
"HIGH": 0,
|
|
1098
|
-
"CRITICAL": 0,
|
|
1099
|
-
}
|
|
1100
|
-
for res in results:
|
|
1101
|
-
summary[res.get("severity")] += 1
|
|
1102
|
-
return summary
|
|
1103
|
-
|
|
1104
|
-
|
|
1105
|
-
def jsonl_report(
|
|
1106
|
-
project_type,
|
|
1107
|
-
results,
|
|
1108
|
-
pkg_aliases,
|
|
1109
|
-
purl_aliases,
|
|
1110
|
-
sug_version_dict,
|
|
1111
|
-
scoped_pkgs,
|
|
1112
|
-
out_file_name,
|
|
1113
|
-
direct_purls,
|
|
1114
|
-
reached_purls,
|
|
1115
|
-
):
|
|
1116
|
-
"""
|
|
1117
|
-
DEPRECATED: Produce vulnerability occurrence report in jsonlines format
|
|
1118
|
-
This method should use the pkg_vulnerabilities from prepare_vdr
|
|
1119
|
-
|
|
1120
|
-
:param scoped_pkgs: A dict of lists of required/optional/excluded packages.
|
|
1121
|
-
:param sug_version_dict: A dict mapping package names to suggested versions.
|
|
1122
|
-
:param purl_aliases: A dict mapping package names to their purl aliases.
|
|
1123
|
-
:param project_type: Project type
|
|
1124
|
-
:param results: List of vulnerabilities found
|
|
1125
|
-
:param pkg_aliases: Package alias
|
|
1126
|
-
:param out_file_name: Output filename
|
|
1127
|
-
:param direct_purls: A list of direct purls
|
|
1128
|
-
:param reached_purls: A list of reached purls
|
|
1129
|
-
"""
|
|
1130
|
-
ids_seen = {}
|
|
1131
|
-
required_pkgs = scoped_pkgs.get("required", [])
|
|
1132
|
-
optional_pkgs = scoped_pkgs.get("optional", [])
|
|
1133
|
-
excluded_pkgs = scoped_pkgs.get("excluded", [])
|
|
1134
|
-
with open(out_file_name, "w", encoding="utf-8") as outfile:
|
|
1135
|
-
for vuln_occ_dict in results:
|
|
1136
|
-
vid = vuln_occ_dict.get("id")
|
|
1137
|
-
package_issue = vuln_occ_dict.get("package_issue")
|
|
1138
|
-
if not package_issue.get("affected_location"):
|
|
1139
|
-
continue
|
|
1140
|
-
full_pkg = package_issue["affected_location"].get("package")
|
|
1141
|
-
if package_issue["affected_location"].get("vendor"):
|
|
1142
|
-
full_pkg = (
|
|
1143
|
-
f"{package_issue['affected_location'].get('vendor')}:"
|
|
1144
|
-
f"{package_issue['affected_location'].get('package')}"
|
|
1145
|
-
)
|
|
1146
|
-
elif package_issue["affected_location"].get("cpe_uri"):
|
|
1147
|
-
vendor, _, _, _ = parse_cpe(
|
|
1148
|
-
package_issue["affected_location"].get("cpe_uri")
|
|
1149
|
-
)
|
|
1150
|
-
if vendor:
|
|
1151
|
-
full_pkg = (
|
|
1152
|
-
f"{vendor}:"
|
|
1153
|
-
f"{package_issue['affected_location'].get('package')}"
|
|
1154
|
-
)
|
|
1155
|
-
# De-alias package names
|
|
1156
|
-
full_pkg = pkg_aliases.get(full_pkg, full_pkg)
|
|
1157
|
-
full_pkg_display = full_pkg
|
|
1158
|
-
version_used = package_issue["affected_location"].get("version")
|
|
1159
|
-
purl = purl_aliases.get(full_pkg, full_pkg)
|
|
1160
|
-
if purl:
|
|
1161
|
-
purl_obj = parse_purl(purl)
|
|
1162
|
-
if purl_obj:
|
|
1163
|
-
version_used = purl_obj.get("version")
|
|
1164
|
-
if purl_obj.get("namespace"):
|
|
1165
|
-
full_pkg = f"""{purl_obj.get("namespace")}/
|
|
1166
|
-
{purl_obj.get("name")}@{purl_obj.get("version")}"""
|
|
1167
|
-
else:
|
|
1168
|
-
full_pkg = f"""{purl_obj.get("name")}@{purl_obj
|
|
1169
|
-
.get("version")}"""
|
|
1170
|
-
if ids_seen.get(vid + purl):
|
|
1171
|
-
continue
|
|
1172
|
-
# On occasions, this could still result in duplicates if the
|
|
1173
|
-
# package exists with and without a purl
|
|
1174
|
-
ids_seen[vid + purl] = True
|
|
1175
|
-
project_type_pkg = f"""{project_type}:{package_issue["affected_location"].get("package")}"""
|
|
1176
|
-
fixed_location = best_fixed_location(
|
|
1177
|
-
sug_version_dict.get(purl),
|
|
1178
|
-
package_issue["fixed_location"],
|
|
1179
|
-
)
|
|
1180
|
-
package_usage = "N/A"
|
|
1181
|
-
if (
|
|
1182
|
-
direct_purls.get(purl)
|
|
1183
|
-
or purl in required_pkgs
|
|
1184
|
-
or full_pkg in required_pkgs
|
|
1185
|
-
or project_type_pkg in required_pkgs
|
|
1186
|
-
):
|
|
1187
|
-
package_usage = "required"
|
|
1188
|
-
elif (
|
|
1189
|
-
purl in optional_pkgs
|
|
1190
|
-
or full_pkg in optional_pkgs
|
|
1191
|
-
or project_type_pkg in optional_pkgs
|
|
1192
|
-
):
|
|
1193
|
-
package_usage = "optional"
|
|
1194
|
-
elif (
|
|
1195
|
-
purl in excluded_pkgs
|
|
1196
|
-
or full_pkg in excluded_pkgs
|
|
1197
|
-
or project_type_pkg in excluded_pkgs
|
|
1198
|
-
):
|
|
1199
|
-
package_usage = "excluded"
|
|
1200
|
-
data_obj = {
|
|
1201
|
-
"id": vid,
|
|
1202
|
-
"package": full_pkg_display,
|
|
1203
|
-
"purl": purl,
|
|
1204
|
-
"package_type": vuln_occ_dict.get("type"),
|
|
1205
|
-
"package_usage": package_usage,
|
|
1206
|
-
"version": version_used,
|
|
1207
|
-
"fix_version": fixed_location,
|
|
1208
|
-
"severity": vuln_occ_dict.get("severity"),
|
|
1209
|
-
"cvss_score": vuln_occ_dict.get("cvss_score"),
|
|
1210
|
-
"short_description": vuln_occ_dict.get("short_description"),
|
|
1211
|
-
"related_urls": vuln_occ_dict.get("related_urls"),
|
|
1212
|
-
"occurrence_count": direct_purls.get(purl, 0),
|
|
1213
|
-
"reachable_flows": reached_purls.get(purl, 0),
|
|
1214
|
-
}
|
|
1215
|
-
json.dump(data_obj, outfile)
|
|
1216
|
-
outfile.write("\n")
|
|
1217
|
-
|
|
1218
|
-
|
|
1219
|
-
def analyse_pkg_risks(
|
|
1220
|
-
project_type, scoped_pkgs, risk_results, risk_report_file=None
|
|
1221
|
-
):
|
|
1222
|
-
"""
|
|
1223
|
-
Identify package risk and write to a json file
|
|
1224
|
-
|
|
1225
|
-
:param project_type: Project type
|
|
1226
|
-
:param scoped_pkgs: A dict of lists of required/optional/excluded packages.
|
|
1227
|
-
:param risk_results: A dict of the risk metrics and scope for each package.
|
|
1228
|
-
:param risk_report_file: Path to the JSON file for the risk audit findings.
|
|
1229
|
-
"""
|
|
1230
|
-
if not risk_results:
|
|
1231
|
-
return
|
|
1232
|
-
table = Table(
|
|
1233
|
-
title=f"Risk Audit Summary ({project_type})",
|
|
1234
|
-
box=box.DOUBLE_EDGE,
|
|
1235
|
-
header_style="bold magenta",
|
|
1236
|
-
min_width=150,
|
|
1237
|
-
)
|
|
1238
|
-
report_data = []
|
|
1239
|
-
required_pkgs = scoped_pkgs.get("required", [])
|
|
1240
|
-
optional_pkgs = scoped_pkgs.get("optional", [])
|
|
1241
|
-
excluded_pkgs = scoped_pkgs.get("excluded", [])
|
|
1242
|
-
headers = ["Package", "Used?", "Risk Score", "Identified Risks"]
|
|
1243
|
-
for h in headers:
|
|
1244
|
-
justify = "left"
|
|
1245
|
-
if h == "Risk Score":
|
|
1246
|
-
justify = "right"
|
|
1247
|
-
table.add_column(header=h, justify=justify)
|
|
1248
|
-
for pkg, risk_obj in risk_results.items():
|
|
1249
|
-
if not risk_obj:
|
|
1250
|
-
continue
|
|
1251
|
-
risk_metrics = risk_obj.get("risk_metrics")
|
|
1252
|
-
scope = risk_obj.get("scope")
|
|
1253
|
-
project_type_pkg = f"{project_type}:{pkg}".lower()
|
|
1254
|
-
if project_type_pkg in required_pkgs:
|
|
1255
|
-
scope = "required"
|
|
1256
|
-
elif project_type_pkg in optional_pkgs:
|
|
1257
|
-
scope = "optional"
|
|
1258
|
-
elif project_type_pkg in excluded_pkgs:
|
|
1259
|
-
scope = "excluded"
|
|
1260
|
-
package_usage = "N/A"
|
|
1261
|
-
package_usage_simple = "N/A"
|
|
1262
|
-
if scope == "required":
|
|
1263
|
-
package_usage = "[bright_green][bold]Yes"
|
|
1264
|
-
package_usage_simple = "Yes"
|
|
1265
|
-
if scope == "optional":
|
|
1266
|
-
package_usage = "[magenta]No"
|
|
1267
|
-
package_usage_simple = "No"
|
|
1268
|
-
if not risk_metrics:
|
|
1269
|
-
continue
|
|
1270
|
-
if risk_metrics.get("risk_score") and (
|
|
1271
|
-
risk_metrics.get("risk_score") > config.pkg_max_risk_score
|
|
1272
|
-
or risk_metrics.get("pkg_private_on_public_registry_risk")
|
|
1273
|
-
or risk_metrics.get("pkg_deprecated_risk")
|
|
1274
|
-
):
|
|
1275
|
-
risk_score = f"""{round(risk_metrics.get("risk_score"), 2)}"""
|
|
1276
|
-
data = [
|
|
1277
|
-
pkg,
|
|
1278
|
-
package_usage,
|
|
1279
|
-
risk_score,
|
|
1280
|
-
]
|
|
1281
|
-
edata = [
|
|
1282
|
-
pkg,
|
|
1283
|
-
package_usage_simple,
|
|
1284
|
-
risk_score,
|
|
1285
|
-
]
|
|
1286
|
-
risk_categories = []
|
|
1287
|
-
risk_categories_simple = []
|
|
1288
|
-
for rk, rv in risk_metrics.items():
|
|
1289
|
-
if rk.endswith("_risk") and rv is True:
|
|
1290
|
-
rcat = rk.replace("_risk", "")
|
|
1291
|
-
help_text = config.risk_help_text.get(rcat)
|
|
1292
|
-
# Only add texts that are available.
|
|
1293
|
-
if help_text:
|
|
1294
|
-
if rcat in (
|
|
1295
|
-
"pkg_deprecated",
|
|
1296
|
-
"pkg_private_on_public_registry",
|
|
1297
|
-
):
|
|
1298
|
-
risk_categories.append(f":cross_mark: {help_text}")
|
|
1299
|
-
else:
|
|
1300
|
-
risk_categories.append(f":warning: {help_text}")
|
|
1301
|
-
risk_categories_simple.append(help_text)
|
|
1302
|
-
data.append("\n".join(risk_categories))
|
|
1303
|
-
edata.append(", ".join(risk_categories_simple))
|
|
1304
|
-
table.add_row(*data)
|
|
1305
|
-
report_data.append(dict(zip(headers, edata)))
|
|
1306
|
-
if report_data:
|
|
1307
|
-
console.print(table)
|
|
1308
|
-
# Store the risk audit findings in jsonl format
|
|
1309
|
-
if risk_report_file:
|
|
1310
|
-
with open(risk_report_file, "w", encoding="utf-8") as outfile:
|
|
1311
|
-
for row in report_data:
|
|
1312
|
-
json.dump(row, outfile)
|
|
1313
|
-
outfile.write("\n")
|
|
1314
|
-
else:
|
|
1315
|
-
LOG.info("No package risks detected ✅")
|
|
1316
|
-
|
|
1317
|
-
|
|
1318
|
-
def analyse_licenses(project_type, licenses_results, license_report_file=None):
|
|
1319
|
-
"""
|
|
1320
|
-
Analyze package licenses
|
|
1321
|
-
|
|
1322
|
-
:param project_type: Project type
|
|
1323
|
-
:param licenses_results: A dict with the license results for each package.
|
|
1324
|
-
:param license_report_file: Output filename for the license report.
|
|
1325
|
-
"""
|
|
1326
|
-
if not licenses_results:
|
|
1327
|
-
return
|
|
1328
|
-
table = Table(
|
|
1329
|
-
title=f"License Scan Summary ({project_type})",
|
|
1330
|
-
box=box.DOUBLE_EDGE,
|
|
1331
|
-
header_style="bold magenta",
|
|
1332
|
-
min_width=150,
|
|
1333
|
-
)
|
|
1334
|
-
headers = ["Package", "Version", "License Id", "License conditions"]
|
|
1335
|
-
for h in headers:
|
|
1336
|
-
table.add_column(header=h)
|
|
1337
|
-
report_data = []
|
|
1338
|
-
for pkg, ll in licenses_results.items():
|
|
1339
|
-
pkg_ver = pkg.split("@")
|
|
1340
|
-
for lic in ll:
|
|
1341
|
-
if not lic:
|
|
1342
|
-
data = [*pkg_ver, "Unknown license"]
|
|
1343
|
-
table.add_row(*data)
|
|
1344
|
-
report_data.append(dict(zip(headers, data)))
|
|
1345
|
-
elif lic["condition_flag"]:
|
|
1346
|
-
conditions_str = ", ".join(lic["conditions"])
|
|
1347
|
-
if "http" not in conditions_str:
|
|
1348
|
-
conditions_str = (
|
|
1349
|
-
conditions_str.replace("--", " for ")
|
|
1350
|
-
.replace("-", " ")
|
|
1351
|
-
.title()
|
|
1352
|
-
)
|
|
1353
|
-
data = [
|
|
1354
|
-
*pkg_ver,
|
|
1355
|
-
"{}{}".format(
|
|
1356
|
-
(
|
|
1357
|
-
"[cyan]"
|
|
1358
|
-
if "GPL" in lic["spdx-id"]
|
|
1359
|
-
or "CC-BY-" in lic["spdx-id"]
|
|
1360
|
-
or "Facebook" in lic["spdx-id"]
|
|
1361
|
-
or "WTFPL" in lic["spdx-id"]
|
|
1362
|
-
else ""
|
|
1363
|
-
),
|
|
1364
|
-
lic["spdx-id"],
|
|
1365
|
-
),
|
|
1366
|
-
conditions_str,
|
|
1367
|
-
]
|
|
1368
|
-
table.add_row(*data)
|
|
1369
|
-
report_data.append(dict(zip(headers, data)))
|
|
1370
|
-
if report_data:
|
|
1371
|
-
console.print(table)
|
|
1372
|
-
# Store the license scan findings in jsonl format
|
|
1373
|
-
if license_report_file:
|
|
1374
|
-
with open(license_report_file, "w", encoding="utf-8") as outfile:
|
|
1375
|
-
for row in report_data:
|
|
1376
|
-
json.dump(row, outfile)
|
|
1377
|
-
outfile.write("\n")
|
|
1378
|
-
else:
|
|
1379
|
-
LOG.info("No license violation detected ✅")
|
|
1380
|
-
|
|
1381
|
-
|
|
1382
|
-
def suggest_version(results, pkg_aliases=None, purl_aliases=None):
|
|
1383
|
-
"""
|
|
1384
|
-
Provide version suggestions
|
|
1385
|
-
|
|
1386
|
-
:param results: List of package issue objects or dicts
|
|
1387
|
-
:param pkg_aliases: Dict of package names and aliases
|
|
1388
|
-
:param purl_aliases: Dict of purl names and aliases
|
|
1389
|
-
:return: Dict mapping each package to its suggested version
|
|
1390
|
-
"""
|
|
1391
|
-
pkg_fix_map = {}
|
|
1392
|
-
sug_map = {}
|
|
1393
|
-
if not pkg_aliases:
|
|
1394
|
-
pkg_aliases = {}
|
|
1395
|
-
if not purl_aliases:
|
|
1396
|
-
purl_aliases = {}
|
|
1397
|
-
for res in results:
|
|
1398
|
-
if isinstance(res, dict):
|
|
1399
|
-
full_pkg = res.get("package")
|
|
1400
|
-
fixed_location = res.get("fix_version")
|
|
1401
|
-
matched_by = res.get("matched_by")
|
|
1402
|
-
else:
|
|
1403
|
-
package_issue = res.package_issue
|
|
1404
|
-
full_pkg = package_issue.affected_location.package
|
|
1405
|
-
fixed_location = package_issue.fixed_location
|
|
1406
|
-
matched_by = res.matched_by
|
|
1407
|
-
if package_issue.affected_location.vendor:
|
|
1408
|
-
full_pkg = (
|
|
1409
|
-
f"{package_issue.affected_location.vendor}:"
|
|
1410
|
-
f"{package_issue.affected_location.package}"
|
|
1411
|
-
)
|
|
1412
|
-
if matched_by:
|
|
1413
|
-
version = matched_by.split("|")[-1]
|
|
1414
|
-
full_pkg = full_pkg + ":" + version
|
|
1415
|
-
# De-alias package names
|
|
1416
|
-
if purl_aliases.get(full_pkg):
|
|
1417
|
-
full_pkg = purl_aliases.get(full_pkg)
|
|
1418
|
-
else:
|
|
1419
|
-
full_pkg = pkg_aliases.get(full_pkg, full_pkg)
|
|
1420
|
-
version_upgrades = pkg_fix_map.get(full_pkg, set())
|
|
1421
|
-
if fixed_location not in (
|
|
1422
|
-
placeholder_fix_version,
|
|
1423
|
-
placeholder_exclude_version,
|
|
1424
|
-
):
|
|
1425
|
-
version_upgrades.add(fixed_location)
|
|
1426
|
-
pkg_fix_map[full_pkg] = version_upgrades
|
|
1427
|
-
for k, v in pkg_fix_map.items():
|
|
1428
|
-
# Don't go near certain packages
|
|
1429
|
-
if "kernel" in k or "openssl" in k or "openssh" in k:
|
|
1430
|
-
continue
|
|
1431
|
-
if v:
|
|
1432
|
-
mversion = max_version(list(v))
|
|
1433
|
-
if mversion:
|
|
1434
|
-
sug_map[k] = mversion
|
|
1435
|
-
return sug_map
|
|
1436
|
-
|
|
1437
|
-
|
|
1438
|
-
def classify_links(related_urls):
|
|
1439
|
-
"""
|
|
1440
|
-
Method to classify and identify well-known links
|
|
1441
|
-
|
|
1442
|
-
:param related_urls: List of URLs
|
|
1443
|
-
:return: Dictionary of classified links and URLs
|
|
1444
|
-
"""
|
|
1445
|
-
clinks = {}
|
|
1446
|
-
for rurl in related_urls:
|
|
1447
|
-
if "github.com" in rurl and "/pull" in rurl:
|
|
1448
|
-
clinks["GitHub PR"] = rurl
|
|
1449
|
-
elif "github.com" in rurl and "/issues" in rurl:
|
|
1450
|
-
clinks["GitHub Issue"] = rurl
|
|
1451
|
-
elif "poc" in rurl:
|
|
1452
|
-
clinks["poc"] = rurl
|
|
1453
|
-
elif "apache.org" in rurl and "security" in rurl:
|
|
1454
|
-
clinks["Apache Security"] = rurl
|
|
1455
|
-
clinks["vendor"] = rurl
|
|
1456
|
-
elif "debian.org" in rurl and "security" in rurl:
|
|
1457
|
-
clinks["Debian Security"] = rurl
|
|
1458
|
-
clinks["vendor"] = rurl
|
|
1459
|
-
elif "security.gentoo.org" in rurl:
|
|
1460
|
-
clinks["Gentoo Security"] = rurl
|
|
1461
|
-
clinks["vendor"] = rurl
|
|
1462
|
-
elif "usn.ubuntu.com" in rurl:
|
|
1463
|
-
clinks["Ubuntu Security"] = rurl
|
|
1464
|
-
clinks["vendor"] = rurl
|
|
1465
|
-
elif "rubyonrails-security" in rurl:
|
|
1466
|
-
clinks["Ruby Security"] = rurl
|
|
1467
|
-
clinks["vendor"] = rurl
|
|
1468
|
-
elif "support.apple.com" in rurl:
|
|
1469
|
-
clinks["Apple Security"] = rurl
|
|
1470
|
-
clinks["vendor"] = rurl
|
|
1471
|
-
elif "gitlab.alpinelinux.org" in rurl or "bugs.busybox.net" in rurl:
|
|
1472
|
-
clinks["vendor"] = rurl
|
|
1473
|
-
elif "redhat.com" in rurl or "oracle.com" in rurl:
|
|
1474
|
-
clinks["vendor"] = rurl
|
|
1475
|
-
elif (
|
|
1476
|
-
"openwall.com" in rurl
|
|
1477
|
-
or "oss-security" in rurl
|
|
1478
|
-
or "www.mail-archive.com" in rurl
|
|
1479
|
-
or "lists.debian.org" in rurl
|
|
1480
|
-
or "lists.fedoraproject.org" in rurl
|
|
1481
|
-
or "portal.msrc.microsoft.com" in rurl
|
|
1482
|
-
or "lists.opensuse.org" in rurl
|
|
1483
|
-
):
|
|
1484
|
-
clinks["Mailing List"] = rurl
|
|
1485
|
-
clinks["vendor"] = rurl
|
|
1486
|
-
elif (
|
|
1487
|
-
"exploit-db" in rurl
|
|
1488
|
-
or "exploit-database" in rurl
|
|
1489
|
-
or "seebug.org" in rurl
|
|
1490
|
-
or "seclists.org" in rurl
|
|
1491
|
-
or "nu11secur1ty" in rurl
|
|
1492
|
-
):
|
|
1493
|
-
clinks["exploit"] = rurl
|
|
1494
|
-
elif "github.com/advisories" in rurl:
|
|
1495
|
-
clinks["GitHub Advisory"] = rurl
|
|
1496
|
-
elif (
|
|
1497
|
-
"hackerone" in rurl
|
|
1498
|
-
or "bugcrowd" in rurl
|
|
1499
|
-
or "bug-bounty" in rurl
|
|
1500
|
-
or "huntr.dev" in rurl
|
|
1501
|
-
or "bounties" in rurl
|
|
1502
|
-
):
|
|
1503
|
-
clinks["Bug Bounty"] = rurl
|
|
1504
|
-
elif "cwe.mitre.org" in rurl:
|
|
1505
|
-
clinks["cwe"] = rurl
|
|
1506
|
-
else:
|
|
1507
|
-
clinks["other"] = rurl
|
|
1508
|
-
return clinks
|
|
1509
|
-
|
|
1510
|
-
|
|
1511
|
-
def find_purl_usages(bom_file, src_dir, reachables_slices_file):
|
|
1512
|
-
"""
|
|
1513
|
-
Generates a list of reachable elements based on the given BOM file.
|
|
1514
|
-
|
|
1515
|
-
:param bom_file: The path to the BOM file.
|
|
1516
|
-
:type bom_file: str
|
|
1517
|
-
:param src_dir: Source directory
|
|
1518
|
-
:type src_dir: str
|
|
1519
|
-
:param reachables_slices_file: Path to the reachables slices file
|
|
1520
|
-
:type reachables_slices_file: str
|
|
1521
|
-
|
|
1522
|
-
:return: Tuple of direct_purls and reached_purls based on the occurrence and
|
|
1523
|
-
callstack evidences from the BOM. If reachables slices json were
|
|
1524
|
-
found, the file is read first.
|
|
1525
|
-
"""
|
|
1526
|
-
direct_purls = defaultdict(int)
|
|
1527
|
-
reached_purls = defaultdict(int)
|
|
1528
|
-
if (
|
|
1529
|
-
not reachables_slices_file
|
|
1530
|
-
and src_dir
|
|
1531
|
-
and os.path.exists(os.path.join(src_dir, "reachables.slices.json"))
|
|
1532
|
-
):
|
|
1533
|
-
reachables_slices_file = os.path.join(src_dir, "reachables.slices.json")
|
|
1534
|
-
if reachables_slices_file:
|
|
1535
|
-
with open(reachables_slices_file, "r", encoding="utf-8") as f:
|
|
1536
|
-
reachables = json.load(f).get("reachables")
|
|
1537
|
-
for flow in reachables:
|
|
1538
|
-
if len(flow.get("purls", [])) > 0:
|
|
1539
|
-
for apurl in flow.get("purls"):
|
|
1540
|
-
reached_purls[apurl] += 1
|
|
1541
|
-
if bom_file and os.path.exists(bom_file):
|
|
1542
|
-
# For now we will also include usability slice as well
|
|
1543
|
-
with open(bom_file, "r", encoding="utf-8") as f:
|
|
1544
|
-
data = json.load(f)
|
|
1545
|
-
|
|
1546
|
-
for c in data["components"]:
|
|
1547
|
-
purl = c.get("purl", "")
|
|
1548
|
-
if c.get("evidence") and c["evidence"].get("occurrences"):
|
|
1549
|
-
direct_purls[purl] += len(c["evidence"].get("occurrences"))
|
|
1550
|
-
return dict(direct_purls), dict(reached_purls)
|