dt-sbom-scanner 1.8.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dt_sbom_scanner/AnsiColors.py +22 -0
- dt_sbom_scanner/__init__.py +0 -0
- dt_sbom_scanner/sbom_utils.py +180 -0
- dt_sbom_scanner/scan.py +1038 -0
- dt_sbom_scanner-1.8.0.dist-info/LICENSE.txt +21 -0
- dt_sbom_scanner-1.8.0.dist-info/METADATA +265 -0
- dt_sbom_scanner-1.8.0.dist-info/RECORD +9 -0
- dt_sbom_scanner-1.8.0.dist-info/WHEEL +4 -0
- dt_sbom_scanner-1.8.0.dist-info/entry_points.txt +3 -0
dt_sbom_scanner/scan.py
ADDED
|
@@ -0,0 +1,1038 @@
|
|
|
1
|
+
import argparse
|
|
2
|
+
import glob
|
|
3
|
+
import json
|
|
4
|
+
import os
|
|
5
|
+
import re
|
|
6
|
+
import ssl
|
|
7
|
+
import sys
|
|
8
|
+
from dataclasses import dataclass
|
|
9
|
+
from enum import Enum
|
|
10
|
+
from functools import cache
|
|
11
|
+
from logging import Logger
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
from time import sleep
|
|
14
|
+
from typing import Optional
|
|
15
|
+
|
|
16
|
+
import requests
|
|
17
|
+
from cyclonedx.model.bom import Bom
|
|
18
|
+
from cyclonedx.schema import SchemaVersion
|
|
19
|
+
|
|
20
|
+
from dt_sbom_scanner import sbom_utils
|
|
21
|
+
from dt_sbom_scanner.AnsiColors import AnsiColors
|
|
22
|
+
|
|
23
|
+
LOGGER = Logger(__name__)
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
INSECURE_SSL_CTX = ssl.create_default_context()
|
|
27
|
+
INSECURE_SSL_CTX.check_hostname = False
|
|
28
|
+
INSECURE_SSL_CTX.verify_mode = ssl.CERT_NONE
|
|
29
|
+
|
|
30
|
+
MIME_APPLICATION_JSON = "application/json"
|
|
31
|
+
|
|
32
|
+
IS_STR_TRUE = ["true", "yes", "1"]
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
@dataclass
|
|
36
|
+
class DtSeverity:
|
|
37
|
+
"""Dependency Track severity level"""
|
|
38
|
+
|
|
39
|
+
name: str
|
|
40
|
+
risk_score: int
|
|
41
|
+
"""See: https://docs.dependencytrack.org/terminology/#risk-score"""
|
|
42
|
+
color: str
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
SEVERITY_RANKS = [
|
|
46
|
+
DtSeverity("Critical", 10, AnsiColors.HRED),
|
|
47
|
+
DtSeverity("High", 5, AnsiColors.RED),
|
|
48
|
+
DtSeverity("Medium", 3, AnsiColors.YELLOW),
|
|
49
|
+
DtSeverity("Low", 1, AnsiColors.GREEN),
|
|
50
|
+
DtSeverity("Informational", 0, AnsiColors.RESET),
|
|
51
|
+
DtSeverity("Unassigned", 5, AnsiColors.PURPLE),
|
|
52
|
+
]
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
class CollectionLogic(str, Enum):
|
|
56
|
+
"""Dependency Track collection logics.
|
|
57
|
+
|
|
58
|
+
See: https://github.com/DependencyTrack/dependency-track/blob/master/src/main/java/org/dependencytrack/model/ProjectCollectionLogic.java#L30
|
|
59
|
+
"""
|
|
60
|
+
|
|
61
|
+
NONE = "NONE"
|
|
62
|
+
"Project is not a collection project"
|
|
63
|
+
ALL = "AGGREGATE_DIRECT_CHILDREN"
|
|
64
|
+
"Project aggregate all direct children"
|
|
65
|
+
TAG = "AGGREGATE_DIRECT_CHILDREN_WITH_TAG"
|
|
66
|
+
"Project aggregate direct children with a specific tag"
|
|
67
|
+
LATEST = "AGGREGATE_LATEST_VERSION_CHILDREN"
|
|
68
|
+
"Project aggregate only direct children marked as latest"
|
|
69
|
+
|
|
70
|
+
def __str__(self) -> str:
|
|
71
|
+
return self.name
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
class DtPermission(str, Enum):
|
|
75
|
+
"""Dependency Track permissions.
|
|
76
|
+
|
|
77
|
+
See: https://github.com/DependencyTrack/dependency-track/blob/master/src/main/java/org/dependencytrack/auth/Permissions.java#L27"""
|
|
78
|
+
|
|
79
|
+
BOM_UPLOAD = "BOM_UPLOAD"
|
|
80
|
+
"""Allows the ability to upload CycloneDX Software Bill of Materials (SBOM)"""
|
|
81
|
+
VIEW_PORTFOLIO = "VIEW_PORTFOLIO"
|
|
82
|
+
"""Provides the ability to view the portfolio of projects, components, and licenses"""
|
|
83
|
+
PORTFOLIO_MANAGEMENT = "PORTFOLIO_MANAGEMENT"
|
|
84
|
+
"""Allows the creation, modification, and deletion of data in the portfolio"""
|
|
85
|
+
VIEW_VULNERABILITY = "VIEW_VULNERABILITY"
|
|
86
|
+
"""Provides the ability to view the vulnerabilities projects are affected by"""
|
|
87
|
+
VULNERABILITY_ANALYSIS = "VULNERABILITY_ANALYSIS"
|
|
88
|
+
"""Provides the ability to make analysis decisions on vulnerabilities"""
|
|
89
|
+
VIEW_POLICY_VIOLATION = "VIEW_POLICY_VIOLATION"
|
|
90
|
+
"""Provides the ability to view policy violations"""
|
|
91
|
+
VULNERABILITY_MANAGEMENT = "VULNERABILITY_MANAGEMENT"
|
|
92
|
+
"""Allows management of internally-defined vulnerabilities"""
|
|
93
|
+
POLICY_VIOLATION_ANALYSIS = "POLICY_VIOLATION_ANALYSIS"
|
|
94
|
+
"""Provides the ability to make analysis decisions on policy violations"""
|
|
95
|
+
ACCESS_MANAGEMENT = "ACCESS_MANAGEMENT"
|
|
96
|
+
"""Allows the management of users, teams, and API keys"""
|
|
97
|
+
SYSTEM_CONFIGURATION = "SYSTEM_CONFIGURATION"
|
|
98
|
+
"""Allows the configuration of the system including notifications, repositories, and email settings"""
|
|
99
|
+
PROJECT_CREATION_UPLOAD = "PROJECT_CREATION_UPLOAD"
|
|
100
|
+
"""Provides the ability to optionally create project (if non-existent) on BOM or scan upload"""
|
|
101
|
+
POLICY_MANAGEMENT = "POLICY_MANAGEMENT"
|
|
102
|
+
"""Allows the creation, modification, and deletion of policy"""
|
|
103
|
+
|
|
104
|
+
def __str__(self) -> str:
|
|
105
|
+
return self.name
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
class DtProjectDef:
|
|
109
|
+
"""Dependency Track project definition (either a UUID or name/version)."""
|
|
110
|
+
|
|
111
|
+
def __init__(
|
|
112
|
+
self,
|
|
113
|
+
definition: str,
|
|
114
|
+
):
|
|
115
|
+
self.definition = definition
|
|
116
|
+
|
|
117
|
+
@property
|
|
118
|
+
def is_uuid(self) -> bool:
|
|
119
|
+
return self.definition.startswith("#")
|
|
120
|
+
|
|
121
|
+
@property
|
|
122
|
+
def uuid(self) -> Optional[str]:
|
|
123
|
+
return self.definition[1:] if self.is_uuid else None
|
|
124
|
+
|
|
125
|
+
@property
|
|
126
|
+
def name(self) -> Optional[str]:
|
|
127
|
+
return None if self.is_uuid else self.definition.split("@")[0]
|
|
128
|
+
|
|
129
|
+
@property
|
|
130
|
+
def version(self) -> Optional[str]:
|
|
131
|
+
if self.is_uuid:
|
|
132
|
+
return None
|
|
133
|
+
return self.definition.split("@")[1] if "@" in self.definition else None
|
|
134
|
+
|
|
135
|
+
@property
|
|
136
|
+
def params(self) -> dict[str, str]:
|
|
137
|
+
params = {}
|
|
138
|
+
if self.is_uuid:
|
|
139
|
+
# target project definition is a UUID: nothing more is required
|
|
140
|
+
params["project"] = self.uuid
|
|
141
|
+
else:
|
|
142
|
+
# target project definition is a project name: assume exists or set autoCreate with parent if permission PROJECT_CREATION_UPLOAD
|
|
143
|
+
params["projectName"] = self.name
|
|
144
|
+
params["projectVersion"] = self.version
|
|
145
|
+
return params
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
class Version:
|
|
149
|
+
def __init__(self, version_str):
|
|
150
|
+
self.version_str = version_str
|
|
151
|
+
self.major, self.minor, self.patch, self.prerelease, self.build = self._parse(
|
|
152
|
+
version_str
|
|
153
|
+
)
|
|
154
|
+
|
|
155
|
+
def _parse(self, version_str):
|
|
156
|
+
regex = r"^(?P<major>0|[1-9]\d*)\.(?P<minor>0|[1-9]\d*)\.(?P<patch>0|[1-9]\d*)(?:-(?P<prerelease>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P<build>[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$"
|
|
157
|
+
match = re.match(regex, version_str)
|
|
158
|
+
if match:
|
|
159
|
+
major = int(match.group("major"))
|
|
160
|
+
minor = int(match.group("minor"))
|
|
161
|
+
patch = int(match.group("patch"))
|
|
162
|
+
prerelease = match.group("prerelease")
|
|
163
|
+
build = match.group("build")
|
|
164
|
+
return major, minor, patch, prerelease, build
|
|
165
|
+
else:
|
|
166
|
+
raise ValueError(f"Invalid semantic version: {version_str}")
|
|
167
|
+
|
|
168
|
+
def __str__(self):
|
|
169
|
+
version_str = f"{self.major}.{self.minor}.{self.patch}"
|
|
170
|
+
if self.prerelease:
|
|
171
|
+
version_str += f"-{'.'.join(self.prerelease)}"
|
|
172
|
+
if self.build:
|
|
173
|
+
version_str += f"+{'.'.join(self.build)}"
|
|
174
|
+
return version_str
|
|
175
|
+
|
|
176
|
+
def __lt__(self, other):
|
|
177
|
+
return self._compare(other) < 0
|
|
178
|
+
|
|
179
|
+
def __le__(self, other):
|
|
180
|
+
return self._compare(other) <= 0
|
|
181
|
+
|
|
182
|
+
def __eq__(self, other):
|
|
183
|
+
return self._compare(other) == 0
|
|
184
|
+
|
|
185
|
+
def __ge__(self, other):
|
|
186
|
+
return self._compare(other) >= 0
|
|
187
|
+
|
|
188
|
+
def __gt__(self, other):
|
|
189
|
+
return self._compare(other) > 0
|
|
190
|
+
|
|
191
|
+
def __ne__(self, other):
|
|
192
|
+
return self._compare(other) != 0
|
|
193
|
+
|
|
194
|
+
def _compare(self, other):
|
|
195
|
+
if not isinstance(other, Version):
|
|
196
|
+
other = Version(str(other))
|
|
197
|
+
|
|
198
|
+
if self.major != other.major:
|
|
199
|
+
return 1 if self.major > other.major else -1
|
|
200
|
+
if self.minor != other.minor:
|
|
201
|
+
return 1 if self.minor > other.minor else -1
|
|
202
|
+
if self.patch != other.patch:
|
|
203
|
+
return 1 if self.patch > other.patch else -1
|
|
204
|
+
|
|
205
|
+
# Handle pre-release versions
|
|
206
|
+
if self.prerelease and other.prerelease:
|
|
207
|
+
self_prerelease = [
|
|
208
|
+
self._parse_prerelease(x) for x in self.prerelease.split(".")
|
|
209
|
+
]
|
|
210
|
+
other_prerelease = [
|
|
211
|
+
self._parse_prerelease(x) for x in other.prerelease.split(".")
|
|
212
|
+
]
|
|
213
|
+
for i in range(min(len(self_prerelease), len(other_prerelease))):
|
|
214
|
+
if self_prerelease[i] != other_prerelease[i]:
|
|
215
|
+
return 1 if self_prerelease[i] > other_prerelease[i] else -1
|
|
216
|
+
if len(self_prerelease) != len(other_prerelease):
|
|
217
|
+
return 1 if len(self_prerelease) > len(other_prerelease) else -1
|
|
218
|
+
elif self.prerelease:
|
|
219
|
+
return -1
|
|
220
|
+
elif other.prerelease:
|
|
221
|
+
return 1
|
|
222
|
+
|
|
223
|
+
# Handle build metadata
|
|
224
|
+
if self.build and other.build:
|
|
225
|
+
self_build = [int(x) if x.isdigit() else x for x in self.build.split(".")]
|
|
226
|
+
other_build = [int(x) if x.isdigit() else x for x in other.build.split(".")]
|
|
227
|
+
for i in range(min(len(self_build), len(other_build))):
|
|
228
|
+
if self_build[i] != other_build[i]:
|
|
229
|
+
return 1 if self_build[i] > other_build[i] else -1
|
|
230
|
+
if len(self_build) != len(other_build):
|
|
231
|
+
return 1 if len(self_build) > len(other_build) else -1
|
|
232
|
+
elif self.build:
|
|
233
|
+
return 1
|
|
234
|
+
elif other.build:
|
|
235
|
+
return -1
|
|
236
|
+
|
|
237
|
+
return 0
|
|
238
|
+
|
|
239
|
+
def _parse_prerelease(self, prerelease_str):
|
|
240
|
+
digits = "".join(c for c in prerelease_str if c.isdigit())
|
|
241
|
+
alpha = "".join(c for c in prerelease_str if not c.isdigit())
|
|
242
|
+
if digits:
|
|
243
|
+
return int(digits), alpha
|
|
244
|
+
else:
|
|
245
|
+
return 0, alpha
|
|
246
|
+
|
|
247
|
+
|
|
248
|
+
class ApiClient:
|
|
249
|
+
def __init__(self, base_api_url: str, api_key: str, verify_ssl: bool):
|
|
250
|
+
self.base_api_url = base_api_url
|
|
251
|
+
self.session = requests.Session()
|
|
252
|
+
self.session.headers.update(
|
|
253
|
+
{
|
|
254
|
+
"X-API-Key": api_key,
|
|
255
|
+
"accept": MIME_APPLICATION_JSON,
|
|
256
|
+
}
|
|
257
|
+
)
|
|
258
|
+
self.session.verify = verify_ssl
|
|
259
|
+
|
|
260
|
+
def get(self, path, **kwargs):
|
|
261
|
+
url = f"{self.base_api_url}{path}"
|
|
262
|
+
return self.session.get(url, **kwargs)
|
|
263
|
+
|
|
264
|
+
def post(self, path, **kwargs):
|
|
265
|
+
url = f"{self.base_api_url}{path}"
|
|
266
|
+
return self.session.post(url, **kwargs)
|
|
267
|
+
|
|
268
|
+
def put(self, path, **kwargs):
|
|
269
|
+
url = f"{self.base_api_url}{path}"
|
|
270
|
+
return self.session.put(url, **kwargs)
|
|
271
|
+
|
|
272
|
+
|
|
273
|
+
class Scanner:
|
|
274
|
+
def __init__(
|
|
275
|
+
self,
|
|
276
|
+
base_api_url: str,
|
|
277
|
+
api_key: str,
|
|
278
|
+
project_path: str,
|
|
279
|
+
path_separator: str = "/",
|
|
280
|
+
purl_max_len: int = -1,
|
|
281
|
+
merge: bool = False,
|
|
282
|
+
merge_output: str = None,
|
|
283
|
+
verify_ssl: bool = True,
|
|
284
|
+
show_findings: bool = False,
|
|
285
|
+
risk_score_threshold: int = -1,
|
|
286
|
+
tags: str = "",
|
|
287
|
+
parent_collection_logic: str = CollectionLogic.ALL.name,
|
|
288
|
+
parent_collection_logic_tag: str = "",
|
|
289
|
+
upload_vex: bool = False,
|
|
290
|
+
merged_vex_file=None,
|
|
291
|
+
**_: None,
|
|
292
|
+
):
|
|
293
|
+
self.api_client = ApiClient(base_api_url, api_key, verify_ssl)
|
|
294
|
+
self.project_path = project_path
|
|
295
|
+
self.path_separator = path_separator
|
|
296
|
+
self._purl_max_len = purl_max_len
|
|
297
|
+
self.merge = merge
|
|
298
|
+
self.merge_output = merge_output
|
|
299
|
+
self.show_findings = show_findings
|
|
300
|
+
self.risk_score_threshold = risk_score_threshold
|
|
301
|
+
self.tags = list(filter(None, map(str.strip, tags.split(",")))) if tags else []
|
|
302
|
+
self.parent_collection_logic = parent_collection_logic
|
|
303
|
+
self.parent_collection_logic_tag = parent_collection_logic_tag
|
|
304
|
+
self.sbom_count = 0
|
|
305
|
+
self.sbom_scan_failed = 0
|
|
306
|
+
self.upload_vex = upload_vex
|
|
307
|
+
self.merged_vex_file = merged_vex_file
|
|
308
|
+
|
|
309
|
+
@property
|
|
310
|
+
@cache
|
|
311
|
+
def dt_version(self) -> Version:
|
|
312
|
+
"""Determines the DT server version."""
|
|
313
|
+
return Version(self.api_client.get("/version").json()["version"])
|
|
314
|
+
|
|
315
|
+
@property
|
|
316
|
+
@cache
|
|
317
|
+
def cdx_schema_version(self) -> SchemaVersion:
|
|
318
|
+
"""Determines the most suitable CycloneDX schema version depending on the DT server version."""
|
|
319
|
+
return SchemaVersion.V1_5
|
|
320
|
+
|
|
321
|
+
@property
|
|
322
|
+
@cache
|
|
323
|
+
def purl_max_len(self) -> int:
|
|
324
|
+
"""Determines the PURL max length depending on the DT server version."""
|
|
325
|
+
if self._purl_max_len < 0:
|
|
326
|
+
# see: https://github.com/DependencyTrack/dependency-track/pull/3560
|
|
327
|
+
ver = self.dt_version
|
|
328
|
+
self._purl_max_len = 255 if ver < Version("4.11.0") else 786
|
|
329
|
+
print(
|
|
330
|
+
f"Max PURLs length: {AnsiColors.BLUE}{self._purl_max_len}{AnsiColors.RESET} (server version {ver})"
|
|
331
|
+
)
|
|
332
|
+
|
|
333
|
+
return self._purl_max_len
|
|
334
|
+
|
|
335
|
+
@property
|
|
336
|
+
@cache
|
|
337
|
+
def event_token_path(self) -> str:
|
|
338
|
+
"""Determines the DT bom/token or event/token path depending on the DT server version."""
|
|
339
|
+
return "bom/token" if self.dt_version < Version("4.11.0") else "event/token"
|
|
340
|
+
|
|
341
|
+
@property
|
|
342
|
+
@cache
|
|
343
|
+
def need_findings(self) -> bool:
|
|
344
|
+
return self.show_findings or self.risk_score_threshold >= 0
|
|
345
|
+
|
|
346
|
+
@cache
|
|
347
|
+
def get_permissions(self) -> list[DtPermission]:
|
|
348
|
+
return [
|
|
349
|
+
permission["name"]
|
|
350
|
+
for permission in self.api_client.get("/v1/team/self").json()["permissions"]
|
|
351
|
+
]
|
|
352
|
+
|
|
353
|
+
def has_permission(self, perm: DtPermission) -> bool:
|
|
354
|
+
return perm in self.get_permissions()
|
|
355
|
+
|
|
356
|
+
# rewinds the given project path and creates a DT project for each non-UUID defined project
|
|
357
|
+
# returns the tail project UUID
|
|
358
|
+
@cache
|
|
359
|
+
def get_or_create_project(
|
|
360
|
+
self, project_path: str, classifier="application", is_parent: bool = False
|
|
361
|
+
) -> str:
|
|
362
|
+
project_path_parts = project_path.split(self.path_separator)
|
|
363
|
+
project_def = DtProjectDef(project_path_parts[-1])
|
|
364
|
+
if project_def.is_uuid:
|
|
365
|
+
print(
|
|
366
|
+
f"- {AnsiColors.YELLOW}{project_path}{AnsiColors.RESET} is UUID: assume exists..."
|
|
367
|
+
)
|
|
368
|
+
return project_def.uuid
|
|
369
|
+
|
|
370
|
+
# project is defined by name/version...
|
|
371
|
+
resp = self.api_client.get(
|
|
372
|
+
"/v1/project",
|
|
373
|
+
params={"name": project_def.name},
|
|
374
|
+
)
|
|
375
|
+
resp.raise_for_status()
|
|
376
|
+
# find project with matching name/version
|
|
377
|
+
project_versions: list[dict] = resp.json()
|
|
378
|
+
exact_match = next(
|
|
379
|
+
filter(
|
|
380
|
+
lambda prj: prj["name"] == project_def.name
|
|
381
|
+
and prj.get("version") == project_def.version,
|
|
382
|
+
project_versions,
|
|
383
|
+
),
|
|
384
|
+
None,
|
|
385
|
+
)
|
|
386
|
+
if exact_match:
|
|
387
|
+
# project already exists: replace name with found UUID
|
|
388
|
+
print(
|
|
389
|
+
f"- {AnsiColors.YELLOW}{project_path}{AnsiColors.RESET} found (by name/version): {exact_match['uuid']}..."
|
|
390
|
+
)
|
|
391
|
+
return exact_match["uuid"]
|
|
392
|
+
# if project exists but not the version, we have to CLONE it
|
|
393
|
+
name_match = next(
|
|
394
|
+
filter(
|
|
395
|
+
lambda prj: prj["name"] == project_def.name,
|
|
396
|
+
project_versions,
|
|
397
|
+
),
|
|
398
|
+
None,
|
|
399
|
+
)
|
|
400
|
+
if name_match:
|
|
401
|
+
print(
|
|
402
|
+
f"- {AnsiColors.YELLOW}{project_path}{AnsiColors.RESET} found sibling (version: {name_match.get('version')}): {name_match['uuid']}..."
|
|
403
|
+
)
|
|
404
|
+
# now create a clone of the project
|
|
405
|
+
resp = self.api_client.put(
|
|
406
|
+
"/v1/project/clone",
|
|
407
|
+
headers={
|
|
408
|
+
"content-type": MIME_APPLICATION_JSON,
|
|
409
|
+
},
|
|
410
|
+
json={
|
|
411
|
+
"project": name_match["uuid"],
|
|
412
|
+
"version": project_def.version,
|
|
413
|
+
"includeTags": True,
|
|
414
|
+
"includeProperties": True,
|
|
415
|
+
"includeComponents": True,
|
|
416
|
+
"includeServices": True,
|
|
417
|
+
"includeAuditHistory": True,
|
|
418
|
+
"includeACL": True,
|
|
419
|
+
},
|
|
420
|
+
)
|
|
421
|
+
try:
|
|
422
|
+
resp.raise_for_status()
|
|
423
|
+
# TODO: clone doesn't return UUID :(
|
|
424
|
+
resp = self.api_client.get(
|
|
425
|
+
"/v1/project/lookup",
|
|
426
|
+
headers={
|
|
427
|
+
"accept": MIME_APPLICATION_JSON,
|
|
428
|
+
},
|
|
429
|
+
params={"name": project_def.name, "version": project_def.version},
|
|
430
|
+
)
|
|
431
|
+
resp.raise_for_status()
|
|
432
|
+
# retrieve UUID from response and return
|
|
433
|
+
created_uuid = resp.json()["uuid"]
|
|
434
|
+
print(
|
|
435
|
+
f"- {AnsiColors.YELLOW}{project_path}{AnsiColors.RESET} {AnsiColors.HGREEN}successfully{AnsiColors.RESET} cloned (from sibling): {created_uuid}"
|
|
436
|
+
)
|
|
437
|
+
return created_uuid
|
|
438
|
+
except requests.exceptions.HTTPError as he:
|
|
439
|
+
print(
|
|
440
|
+
f"- create {AnsiColors.YELLOW}{project_path}{AnsiColors.RESET} {AnsiColors.HRED}failed{AnsiColors.RESET} (err {he.response.status_code}): {AnsiColors.HGRAY}{he.response.text}{AnsiColors.RESET}",
|
|
441
|
+
)
|
|
442
|
+
raise
|
|
443
|
+
|
|
444
|
+
# project does not exist: create it
|
|
445
|
+
data = {
|
|
446
|
+
"name": project_def.name,
|
|
447
|
+
"version": project_def.version,
|
|
448
|
+
"classifier": classifier.upper(),
|
|
449
|
+
"active": True,
|
|
450
|
+
}
|
|
451
|
+
|
|
452
|
+
# Set up collection logic if supported
|
|
453
|
+
if is_parent and self.dt_version >= Version("4.13.0"):
|
|
454
|
+
data["collectionLogic"] = CollectionLogic[
|
|
455
|
+
self.parent_collection_logic
|
|
456
|
+
].value
|
|
457
|
+
if data["collectionLogic"] == CollectionLogic.TAG:
|
|
458
|
+
data["collectionTag"] = {
|
|
459
|
+
"name": self.parent_collection_logic_tag.strip()
|
|
460
|
+
}
|
|
461
|
+
|
|
462
|
+
# TODO: externalReferences
|
|
463
|
+
# data["externalReferences"] = [{"type":"vcs","url":project_url}],
|
|
464
|
+
if len(project_path_parts) > 1:
|
|
465
|
+
# project to create is not a root project: retrieve parent
|
|
466
|
+
parent_def = DtProjectDef(project_path_parts[-2])
|
|
467
|
+
if not parent_def.is_uuid:
|
|
468
|
+
# create parent project
|
|
469
|
+
parent_uuid = self.get_or_create_project(
|
|
470
|
+
self.path_separator.join(project_path_parts[:-1]),
|
|
471
|
+
classifier=classifier,
|
|
472
|
+
is_parent=True,
|
|
473
|
+
)
|
|
474
|
+
# now parent def must be a UUID
|
|
475
|
+
parent_def = DtProjectDef("#" + parent_uuid)
|
|
476
|
+
# add parent UUID to params
|
|
477
|
+
data["parent"] = {"uuid": parent_def.uuid}
|
|
478
|
+
|
|
479
|
+
if self.tags:
|
|
480
|
+
data["tags"] = self.tags
|
|
481
|
+
|
|
482
|
+
print(
|
|
483
|
+
f"- {AnsiColors.YELLOW}{project_path}{AnsiColors.RESET} not found: create with params {AnsiColors.HGRAY}{json.dumps(data)}{AnsiColors.RESET}..."
|
|
484
|
+
)
|
|
485
|
+
resp = self.api_client.put(
|
|
486
|
+
"/v1/project",
|
|
487
|
+
headers={
|
|
488
|
+
"content-type": MIME_APPLICATION_JSON,
|
|
489
|
+
},
|
|
490
|
+
json=data,
|
|
491
|
+
)
|
|
492
|
+
try:
|
|
493
|
+
resp.raise_for_status()
|
|
494
|
+
# retrieve UUID from response and return
|
|
495
|
+
created_uuid = resp.json()["uuid"]
|
|
496
|
+
print(
|
|
497
|
+
f"- {AnsiColors.YELLOW}{project_path}{AnsiColors.RESET} {AnsiColors.HGREEN}successfully{AnsiColors.RESET} created: {created_uuid}"
|
|
498
|
+
)
|
|
499
|
+
return created_uuid
|
|
500
|
+
except requests.exceptions.HTTPError as he:
|
|
501
|
+
print(
|
|
502
|
+
f"- create {AnsiColors.YELLOW}{project_path}{AnsiColors.RESET} {AnsiColors.HRED}failed{AnsiColors.RESET} (err {he.response.status_code}): {AnsiColors.HGRAY}{he.response.text}{AnsiColors.RESET}",
|
|
503
|
+
)
|
|
504
|
+
raise
|
|
505
|
+
|
|
506
|
+
def publish(self, sbom: Bom, file_prefix: str, vex_file_path: Path):
|
|
507
|
+
sbom_type = None
|
|
508
|
+
sbom_name = None
|
|
509
|
+
sbom_version = None
|
|
510
|
+
if sbom.metadata and sbom.metadata.component:
|
|
511
|
+
sbom_md_cmp = sbom.metadata.component
|
|
512
|
+
sbom_type = sbom_md_cmp.type.value
|
|
513
|
+
sbom_name = sbom_md_cmp.name
|
|
514
|
+
sbom_version = sbom_md_cmp.version
|
|
515
|
+
print(
|
|
516
|
+
f"- file_prefix: {AnsiColors.HGRAY}{file_prefix}{AnsiColors.RESET}; sbom_type: {AnsiColors.HGRAY}{sbom_type}{AnsiColors.RESET}; sbom_name: {AnsiColors.HGRAY}{sbom_name}{AnsiColors.RESET}; sbom_version: {AnsiColors.HGRAY}{sbom_version}{AnsiColors.RESET}"
|
|
517
|
+
)
|
|
518
|
+
|
|
519
|
+
# compute the target project path
|
|
520
|
+
project_path = str.format(
|
|
521
|
+
self.project_path,
|
|
522
|
+
file_prefix=file_prefix,
|
|
523
|
+
sbom_type=sbom_type or "unk",
|
|
524
|
+
sbom_name=sbom_name or "unk",
|
|
525
|
+
sbom_version=sbom_version or "",
|
|
526
|
+
)
|
|
527
|
+
print(f"- target project: {AnsiColors.YELLOW}{project_path}{AnsiColors.RESET}")
|
|
528
|
+
|
|
529
|
+
# finally trim purls
|
|
530
|
+
if self.purl_max_len > 0:
|
|
531
|
+
print(
|
|
532
|
+
f"- trim PURLs to {AnsiColors.HGRAY}{self.purl_max_len}{AnsiColors.RESET} charaters..."
|
|
533
|
+
)
|
|
534
|
+
sbom_utils.trim_purls(sbom, self.purl_max_len)
|
|
535
|
+
|
|
536
|
+
self.do_publish(
|
|
537
|
+
sbom_utils.to_json(sbom, self.cdx_schema_version),
|
|
538
|
+
project_path,
|
|
539
|
+
sbom_type,
|
|
540
|
+
vex_file_path,
|
|
541
|
+
)
|
|
542
|
+
|
|
543
|
+
def do_publish(
|
|
544
|
+
self,
|
|
545
|
+
sbom_json: str,
|
|
546
|
+
project_path: str,
|
|
547
|
+
sbom_type: str,
|
|
548
|
+
vex_file_path: Path,
|
|
549
|
+
allow_retry=True,
|
|
550
|
+
):
|
|
551
|
+
project_path_parts = project_path.split(self.path_separator)
|
|
552
|
+
# determine publish params
|
|
553
|
+
project_def = DtProjectDef(project_path_parts[-1])
|
|
554
|
+
params = project_def.params
|
|
555
|
+
|
|
556
|
+
if self.has_permission(DtPermission.PROJECT_CREATION_UPLOAD):
|
|
557
|
+
params["autoCreate"] = "true"
|
|
558
|
+
if len(project_path_parts) > 1:
|
|
559
|
+
parent_def = DtProjectDef(project_path_parts[-2])
|
|
560
|
+
if parent_def.is_uuid:
|
|
561
|
+
params["parentUUID"] = parent_def.uuid
|
|
562
|
+
else:
|
|
563
|
+
params["parentName"] = parent_def.name
|
|
564
|
+
params["parentVersion"] = parent_def.version
|
|
565
|
+
if self.tags:
|
|
566
|
+
params["projectTags"] = self.tags
|
|
567
|
+
|
|
568
|
+
# publish SBOM
|
|
569
|
+
print(
|
|
570
|
+
f"- publish params: {AnsiColors.HGRAY}{json.dumps(params)}{AnsiColors.RESET}..."
|
|
571
|
+
)
|
|
572
|
+
resp = self.api_client.post(
|
|
573
|
+
"/v1/bom",
|
|
574
|
+
files={"bom": sbom_json},
|
|
575
|
+
data=params,
|
|
576
|
+
)
|
|
577
|
+
try:
|
|
578
|
+
resp.raise_for_status()
|
|
579
|
+
print(
|
|
580
|
+
f"- publish {AnsiColors.HGREEN}succeeded{AnsiColors.RESET}: {AnsiColors.HGRAY}{resp.text}{AnsiColors.RESET}"
|
|
581
|
+
)
|
|
582
|
+
except requests.exceptions.HTTPError as he:
|
|
583
|
+
print(
|
|
584
|
+
f"- publish {AnsiColors.HRED}failed{AnsiColors.RESET} (err {he.response.status_code}): {AnsiColors.HGRAY}{he.response.text}{AnsiColors.RESET}",
|
|
585
|
+
)
|
|
586
|
+
if (
|
|
587
|
+
he.response.status_code == 404
|
|
588
|
+
and self.has_permission(DtPermission.PORTFOLIO_MANAGEMENT)
|
|
589
|
+
and self.has_permission(DtPermission.VIEW_PORTFOLIO)
|
|
590
|
+
and allow_retry
|
|
591
|
+
):
|
|
592
|
+
# try to create parent projects
|
|
593
|
+
print("- create projects...")
|
|
594
|
+
# replace last path part with project UUID
|
|
595
|
+
# TODO: retrieve classifier from SBOM
|
|
596
|
+
project_path_parts[-1] = "#" + self.get_or_create_project(
|
|
597
|
+
project_path, sbom_type, is_parent=False
|
|
598
|
+
)
|
|
599
|
+
# then retry
|
|
600
|
+
print("- retry publish...")
|
|
601
|
+
self.do_publish(
|
|
602
|
+
sbom_json,
|
|
603
|
+
self.path_separator.join(project_path_parts),
|
|
604
|
+
sbom_type,
|
|
605
|
+
vex_file_path,
|
|
606
|
+
allow_retry=False,
|
|
607
|
+
)
|
|
608
|
+
# to prevent do_scan one more time (must have been done in the retried do_publish())
|
|
609
|
+
return
|
|
610
|
+
else:
|
|
611
|
+
raise
|
|
612
|
+
|
|
613
|
+
event_id = resp.json().get("token")
|
|
614
|
+
|
|
615
|
+
# import VEX file
|
|
616
|
+
if self.upload_vex:
|
|
617
|
+
event_id = self.do_vex_publish(project_def, vex_file_path, event_id)
|
|
618
|
+
|
|
619
|
+
if self.need_findings:
|
|
620
|
+
self.do_scan(project_def, event_id)
|
|
621
|
+
|
|
622
|
+
def do_vex_publish(
|
|
623
|
+
self, project_def: DtProjectDef, vex_file_path: Path, event_id: str
|
|
624
|
+
):
|
|
625
|
+
self.wait_for_event_processing(event_id)
|
|
626
|
+
|
|
627
|
+
if not vex_file_path.exists():
|
|
628
|
+
print(
|
|
629
|
+
f"- VEX file {AnsiColors.YELLOW}not found, skipping upload{AnsiColors.RESET}: {AnsiColors.HGRAY}{vex_file_path}{AnsiColors.RESET}"
|
|
630
|
+
)
|
|
631
|
+
return event_id
|
|
632
|
+
|
|
633
|
+
with open(vex_file_path, "r") as vex_file:
|
|
634
|
+
params = project_def.params
|
|
635
|
+
resp = self.api_client.post(
|
|
636
|
+
"/v1/vex",
|
|
637
|
+
files={"vex": vex_file},
|
|
638
|
+
data=params,
|
|
639
|
+
)
|
|
640
|
+
try:
|
|
641
|
+
resp.raise_for_status()
|
|
642
|
+
print(
|
|
643
|
+
f"- VEX import {AnsiColors.HGREEN}succeeded{AnsiColors.RESET}: {AnsiColors.HGRAY}{resp.text}{AnsiColors.RESET}"
|
|
644
|
+
)
|
|
645
|
+
except requests.exceptions.HTTPError as he:
|
|
646
|
+
print(
|
|
647
|
+
f"- VEX import {AnsiColors.HRED}failed{AnsiColors.RESET} (err {he.response.status_code}): {AnsiColors.HGRAY}{he.response.text}{AnsiColors.RESET}",
|
|
648
|
+
)
|
|
649
|
+
raise
|
|
650
|
+
|
|
651
|
+
return resp.json().get("token")
|
|
652
|
+
|
|
653
|
+
def do_scan(self, project_def: DtProjectDef, event_id: str):
|
|
654
|
+
print(f"- scan: {AnsiColors.HGRAY}{event_id}{AnsiColors.RESET}...")
|
|
655
|
+
if project_def.is_uuid:
|
|
656
|
+
project_id = project_def.uuid
|
|
657
|
+
else:
|
|
658
|
+
params = {}
|
|
659
|
+
params["name"] = project_def.name
|
|
660
|
+
if project_def.version:
|
|
661
|
+
params["version"] = project_def.version
|
|
662
|
+
resp = self.api_client.get(
|
|
663
|
+
"/v1/project/lookup",
|
|
664
|
+
params=params,
|
|
665
|
+
)
|
|
666
|
+
project_id = resp.json().get("uuid")
|
|
667
|
+
|
|
668
|
+
self.wait_for_event_processing(event_id)
|
|
669
|
+
# MAYBE: get SBOM with VEX curl -sSf f"{self.base_api_url}/v1/bom/cyclonedx/project/{project_id}?variant=withVulnerabilities"
|
|
670
|
+
resp = self.api_client.get(
|
|
671
|
+
f"/v1/finding/project/{project_id}",
|
|
672
|
+
)
|
|
673
|
+
resp.raise_for_status()
|
|
674
|
+
risk_score = 0
|
|
675
|
+
findings = sorted(
|
|
676
|
+
resp.json(),
|
|
677
|
+
key=lambda o: o.get("vulnerability", {}).get("cvssV3BaseScore", 0),
|
|
678
|
+
reverse=True,
|
|
679
|
+
)
|
|
680
|
+
for o in findings:
|
|
681
|
+
vuln = o.get("vulnerability", {})
|
|
682
|
+
component = o.get("component", {})
|
|
683
|
+
severity = SEVERITY_RANKS[vuln.get("severityRank", 5)]
|
|
684
|
+
cwes = (cwe["name"] for cwe in vuln.get("cwes", []))
|
|
685
|
+
risk_score += severity.risk_score
|
|
686
|
+
if self.show_findings:
|
|
687
|
+
print(
|
|
688
|
+
f" - {vuln['vulnId']} {severity.color}{severity.name}{AnsiColors.RESET}: {component.get('group', '')}:{component.get('name')}:{component.get('version', '')} - {' '.join(cwes)}"
|
|
689
|
+
)
|
|
690
|
+
print(re.sub("\n+", "\n", vuln.get("description", "").strip()))
|
|
691
|
+
print()
|
|
692
|
+
if self.risk_score_threshold < 0 or risk_score < self.risk_score_threshold:
|
|
693
|
+
print(
|
|
694
|
+
f"- scan {AnsiColors.HGREEN}succeeded{AnsiColors.RESET}: {len(findings)} vulnerabilities found {AnsiColors.HGRAY}risk score: {risk_score}{AnsiColors.RESET}"
|
|
695
|
+
)
|
|
696
|
+
else:
|
|
697
|
+
self.sbom_scan_failed += 1
|
|
698
|
+
print(
|
|
699
|
+
f"- scan {AnsiColors.HRED}failed{AnsiColors.RESET}: risk score {risk_score} exceeds threshold {self.risk_score_threshold} - failing the scan: {AnsiColors.HGRAY}{len(findings)} vulnerabilities found{AnsiColors.RESET}"
|
|
700
|
+
)
|
|
701
|
+
|
|
702
|
+
def wait_for_event_processing(self, event_id: str):
|
|
703
|
+
for n in range(8): # ~5 minutes
|
|
704
|
+
sleep(2**n)
|
|
705
|
+
resp = self.api_client.get(
|
|
706
|
+
f"/v1/{self.event_token_path}/{event_id}",
|
|
707
|
+
)
|
|
708
|
+
if not resp.json().get("processing", False):
|
|
709
|
+
break
|
|
710
|
+
|
|
711
|
+
def scan(self, sbom_patterns: list[str]):
|
|
712
|
+
try:
|
|
713
|
+
# try to connect to Dependency Track server
|
|
714
|
+
self.dt_version
|
|
715
|
+
except requests.exceptions.RequestException as err:
|
|
716
|
+
fail(
|
|
717
|
+
f"Unable to connect to Dependency Track server - check the API URL and network configuration: {err}"
|
|
718
|
+
)
|
|
719
|
+
try:
|
|
720
|
+
# try an authenticated request to Dependency Track server
|
|
721
|
+
self.get_permissions()
|
|
722
|
+
except requests.exceptions.RequestException as err:
|
|
723
|
+
fail(
|
|
724
|
+
f"Unable to authenticate to Dependency Track server - check the API key: {err}"
|
|
725
|
+
)
|
|
726
|
+
|
|
727
|
+
print(
|
|
728
|
+
f"🗝 API key has permissions: {AnsiColors.BLUE}{', '.join(self.get_permissions())}{AnsiColors.RESET}"
|
|
729
|
+
)
|
|
730
|
+
print()
|
|
731
|
+
if not self.has_permission(DtPermission.BOM_UPLOAD):
|
|
732
|
+
fail(
|
|
733
|
+
"BOM_UPLOAD permission is mandatory to publish SBOM files to Dependency Track server"
|
|
734
|
+
)
|
|
735
|
+
if self.need_findings:
|
|
736
|
+
if not self.has_permission(DtPermission.VIEW_VULNERABILITY):
|
|
737
|
+
fail(
|
|
738
|
+
"VIEW_VULNERABILITY permission is mandatory to show finding or compute risk score after SBOM analysis"
|
|
739
|
+
)
|
|
740
|
+
if not self.has_permission(DtPermission.VIEW_PORTFOLIO):
|
|
741
|
+
fail(
|
|
742
|
+
"VIEW_PORTFOLIO permission is mandatory to show finding or compute risk score after SBOM analysis"
|
|
743
|
+
)
|
|
744
|
+
if self.upload_vex and not self.has_permission(
|
|
745
|
+
DtPermission.VULNERABILITY_ANALYSIS
|
|
746
|
+
):
|
|
747
|
+
fail("VULNERABILITY_ANALYSIS permission is mandatory to import VEX files")
|
|
748
|
+
|
|
749
|
+
# scan for SBOM files
|
|
750
|
+
sboms = []
|
|
751
|
+
for pattern in sbom_patterns:
|
|
752
|
+
for file in glob.glob(pattern, recursive=True):
|
|
753
|
+
print(
|
|
754
|
+
f"{AnsiColors.BOLD}📄 SBOM: {AnsiColors.BLUE}{file}{AnsiColors.RESET}"
|
|
755
|
+
)
|
|
756
|
+
# load the SBOM and VEX content
|
|
757
|
+
sbom_file_path = Path(file)
|
|
758
|
+
sbom_file_prefix = sbom_file_path.name.split(".")[0]
|
|
759
|
+
vex_file_path = sbom_file_path.with_name(f"{sbom_file_prefix}.vex.json")
|
|
760
|
+
|
|
761
|
+
sbom = sbom_utils.load_bom(sbom_file_path)
|
|
762
|
+
if self.merge:
|
|
763
|
+
sboms.append(sbom)
|
|
764
|
+
else:
|
|
765
|
+
self.publish(sbom, sbom_file_prefix, vex_file_path)
|
|
766
|
+
|
|
767
|
+
print()
|
|
768
|
+
self.sbom_count += 1
|
|
769
|
+
|
|
770
|
+
if self.sbom_count == 0:
|
|
771
|
+
print(
|
|
772
|
+
f"- {AnsiColors.YELLOW}WARN{AnsiColors.RESET} no SBOM file found - nothing to publish",
|
|
773
|
+
)
|
|
774
|
+
elif self.merge:
|
|
775
|
+
# extract name and version from path
|
|
776
|
+
print(
|
|
777
|
+
f"{AnsiColors.BOLD}📄 Merge SBOMs: {AnsiColors.BLUE}{self.merge_output or 'in memory'}{AnsiColors.RESET}"
|
|
778
|
+
)
|
|
779
|
+
project_path = str.format(
|
|
780
|
+
self.project_path,
|
|
781
|
+
file_prefix="merged",
|
|
782
|
+
sbom_type="unk",
|
|
783
|
+
sbom_name="unk",
|
|
784
|
+
sbom_version="",
|
|
785
|
+
)
|
|
786
|
+
project_path_parts = project_path.split(self.path_separator)
|
|
787
|
+
project_def = DtProjectDef(project_path_parts[-1])
|
|
788
|
+
if project_def.is_uuid:
|
|
789
|
+
sbom_name = "merged"
|
|
790
|
+
sbom_version = None
|
|
791
|
+
else:
|
|
792
|
+
sbom_name = project_def.name
|
|
793
|
+
sbom_version = project_def.version
|
|
794
|
+
|
|
795
|
+
merged_sbom = sbom_utils.merge_boms(
|
|
796
|
+
sbom_name, sbom_version, root_group=None, boms=sboms
|
|
797
|
+
)
|
|
798
|
+
if self.merge_output:
|
|
799
|
+
sbom_utils.save_bom(
|
|
800
|
+
merged_sbom, Path(self.merge_output), self.cdx_schema_version
|
|
801
|
+
)
|
|
802
|
+
vex_file_path = Path(self.merged_vex_file) if self.merged_vex_file else None
|
|
803
|
+
|
|
804
|
+
self.publish(merged_sbom, "merged", vex_file_path)
|
|
805
|
+
|
|
806
|
+
|
|
807
|
+
def fail(msg: str) -> None:
|
|
808
|
+
print(f"{AnsiColors.HRED}ERROR{AnsiColors.RESET} {msg}")
|
|
809
|
+
sys.exit(1)
|
|
810
|
+
|
|
811
|
+
|
|
812
|
+
def run() -> None:
|
|
813
|
+
# define command parser
|
|
814
|
+
parser = argparse.ArgumentParser(
|
|
815
|
+
prog="sbom-scanner",
|
|
816
|
+
description="This tool scans for SBOM files and publishes them to a Dependency Track server.",
|
|
817
|
+
)
|
|
818
|
+
dt_platform_group = parser.add_argument_group("Dependency Track connection")
|
|
819
|
+
dt_platform_group.add_argument(
|
|
820
|
+
"-u",
|
|
821
|
+
"--base-api-url",
|
|
822
|
+
default=os.getenv("DEPTRACK_BASE_API_URL"),
|
|
823
|
+
help="Dependency Track server base API url (includes '/api')",
|
|
824
|
+
)
|
|
825
|
+
dt_platform_group.add_argument(
|
|
826
|
+
"-k",
|
|
827
|
+
"--api-key",
|
|
828
|
+
default=os.getenv("DEPTRACK_API_KEY"),
|
|
829
|
+
help="Dependency Track API key",
|
|
830
|
+
)
|
|
831
|
+
dt_platform_group.add_argument(
|
|
832
|
+
"-i",
|
|
833
|
+
"--insecure",
|
|
834
|
+
action="store_true",
|
|
835
|
+
default=os.getenv("DEPTRACK_INSECURE") in IS_STR_TRUE,
|
|
836
|
+
help="Skip SSL verification",
|
|
837
|
+
)
|
|
838
|
+
|
|
839
|
+
project_selection_group = parser.add_argument_group("Project settings")
|
|
840
|
+
project_selection_group.add_argument(
|
|
841
|
+
"-p",
|
|
842
|
+
"--project-path",
|
|
843
|
+
default=os.getenv("DEPTRACK_PROJECT_PATH"),
|
|
844
|
+
help="Dependency Track target project path to publish SBOM files to (see doc)",
|
|
845
|
+
)
|
|
846
|
+
project_selection_group.add_argument(
|
|
847
|
+
"-s",
|
|
848
|
+
"--path-separator",
|
|
849
|
+
default=os.getenv("DEPTRACK_PATH_SEPARATOR", "/"),
|
|
850
|
+
help="Separator to use in project path (default: '/')",
|
|
851
|
+
)
|
|
852
|
+
project_selection_group.add_argument(
|
|
853
|
+
"-t",
|
|
854
|
+
"--tags",
|
|
855
|
+
type=str,
|
|
856
|
+
default=os.getenv("DEPTRACK_TAGS", ""),
|
|
857
|
+
help="Comma separated list of tags to attach to the project",
|
|
858
|
+
)
|
|
859
|
+
project_selection_group.add_argument(
|
|
860
|
+
"--parent-collection-logic",
|
|
861
|
+
type=str,
|
|
862
|
+
default=os.getenv(
|
|
863
|
+
"DEPTRACK_PARENT_COLLECTION_LOGIC",
|
|
864
|
+
CollectionLogic.ALL.name,
|
|
865
|
+
),
|
|
866
|
+
choices=list(map(lambda x: x.name, list(CollectionLogic))),
|
|
867
|
+
help="Set up how the parent aggregates its direct children (ALL: all, TAG: with tag matching --parent-collection-logic-tag, LATEST: flagged as latest, NONE: disable), default is ALL (DT version >= 4.13.0)",
|
|
868
|
+
)
|
|
869
|
+
project_selection_group.add_argument(
|
|
870
|
+
"--parent-collection-logic-tag",
|
|
871
|
+
type=str,
|
|
872
|
+
default=os.getenv("DEPTRACK_PARENT_COLLECTION_LOGIC_TAG", ""),
|
|
873
|
+
help="Tag for aggregation if --parent-collection-logic is set to TAG",
|
|
874
|
+
)
|
|
875
|
+
|
|
876
|
+
sbom_management_group = parser.add_argument_group("SBOM management")
|
|
877
|
+
sbom_management_group.add_argument(
|
|
878
|
+
"-m",
|
|
879
|
+
"--merge",
|
|
880
|
+
action="store_true",
|
|
881
|
+
default=os.getenv("DEPTRACK_MERGE") in IS_STR_TRUE,
|
|
882
|
+
help="Merge all SBOM files into one",
|
|
883
|
+
)
|
|
884
|
+
sbom_management_group.add_argument(
|
|
885
|
+
"-o",
|
|
886
|
+
"--merge-output",
|
|
887
|
+
default=os.getenv("DEPTRACK_MERGE_OUTPUT"),
|
|
888
|
+
help="Output merged SBOM file (only used with merge enabled) - for debugging purpose",
|
|
889
|
+
)
|
|
890
|
+
# <0: auto (from DT version) / 0: no trim / >0 max length
|
|
891
|
+
sbom_management_group.add_argument(
|
|
892
|
+
"-l",
|
|
893
|
+
"--purl-max-len",
|
|
894
|
+
type=int,
|
|
895
|
+
default=int(os.getenv("DEPTRACK_PURL_MAX_LEN", "-1")),
|
|
896
|
+
help="PURLs max length (-1: auto, 0: no trim, >0: trim to size - default: -1)",
|
|
897
|
+
)
|
|
898
|
+
|
|
899
|
+
vex_group = parser.add_argument_group("VEX")
|
|
900
|
+
vex_group.add_argument(
|
|
901
|
+
"-U",
|
|
902
|
+
"--upload-vex",
|
|
903
|
+
action="store_true",
|
|
904
|
+
default=os.getenv("DEPTRACK_UPLOAD_VEX") in IS_STR_TRUE,
|
|
905
|
+
help="Upload VEX file after SBOM analysis (requires VULNERABILITY_ANALYSIS permission). The VEX file(s) are resolved based on the sbom pattern(s). The first part of the SBOM file name is used to match it with a VEX file (e.g. if there is an SBOM file 'example.cyclonedx.json', the corresponding VEX file name must be 'example.vex.json')",
|
|
906
|
+
)
|
|
907
|
+
vex_group.add_argument(
|
|
908
|
+
"-V",
|
|
909
|
+
"--merged-vex-file",
|
|
910
|
+
type=str,
|
|
911
|
+
default=os.getenv("DEPTRACK_MERGED_VEX_FILE"),
|
|
912
|
+
help="The VEX file to upload if multiple SBOMS are merged (--merge). Can only be used with --upload-vex and --merge.",
|
|
913
|
+
)
|
|
914
|
+
|
|
915
|
+
misc_group = parser.add_argument_group("Miscellaneous")
|
|
916
|
+
misc_group.add_argument(
|
|
917
|
+
"-S",
|
|
918
|
+
"--show-findings",
|
|
919
|
+
action="store_true",
|
|
920
|
+
default=os.getenv("DEPTRACK_SHOW_FINDINGS") in IS_STR_TRUE,
|
|
921
|
+
help="Wait for analysis and display found vulnerabilities",
|
|
922
|
+
)
|
|
923
|
+
misc_group.add_argument(
|
|
924
|
+
"-R",
|
|
925
|
+
"--risk-score-threshold",
|
|
926
|
+
type=int,
|
|
927
|
+
default=int(os.getenv("DEPTRACK_RISK_SCORE_THRESHOLD", "-1")),
|
|
928
|
+
help="Risk score threshold to fail the scan (<0: disabled - default: -1)",
|
|
929
|
+
)
|
|
930
|
+
|
|
931
|
+
parser.add_argument(
|
|
932
|
+
"sbom_patterns",
|
|
933
|
+
nargs="*",
|
|
934
|
+
default=os.getenv(
|
|
935
|
+
"DEPTRACK_SBOM_PATTERNS", "**/*.cyclonedx.json **/*.cyclonedx.xml"
|
|
936
|
+
).split(" "),
|
|
937
|
+
help="SBOM file patterns to publish (supports glob patterns). Default: '**/*.cyclonedx.json **/*.cyclonedx.xml'",
|
|
938
|
+
)
|
|
939
|
+
|
|
940
|
+
# parse command and args
|
|
941
|
+
args = parser.parse_args()
|
|
942
|
+
|
|
943
|
+
# check required args
|
|
944
|
+
if not args.base_api_url:
|
|
945
|
+
fail(
|
|
946
|
+
"Dependency Track server base API url is required (use --base-api-url CLI option or DEPTRACK_BASE_API_URL variable)"
|
|
947
|
+
)
|
|
948
|
+
if not args.api_key:
|
|
949
|
+
fail(
|
|
950
|
+
"Dependency Track API key is required (use --api-key CLI option or DEPTRACK_API_KEY variable)"
|
|
951
|
+
)
|
|
952
|
+
if not args.project_path:
|
|
953
|
+
fail(
|
|
954
|
+
"Dependency Track target project path is required (use --project-path CLI option or DEPTRACK_PROJECT_PATH variable)"
|
|
955
|
+
)
|
|
956
|
+
if (
|
|
957
|
+
not args.parent_collection_logic_tag
|
|
958
|
+
and args.parent_collection_logic == CollectionLogic.TAG.name
|
|
959
|
+
):
|
|
960
|
+
fail(
|
|
961
|
+
f"You need to specify a tag with --parent-collection-logic-tag (or DEPTRACK_PARENT_COLLECTION_LOGIC_TAG env var) if parent collection logic has been set to {CollectionLogic.TAG.name}"
|
|
962
|
+
)
|
|
963
|
+
if args.merge and args.upload_vex and not args.merged_vex_file:
|
|
964
|
+
fail(
|
|
965
|
+
"You need to specify a VEX file with --merged-vex-file (or DEPTRACK_MERGED_VEX_FILE env var) if you want to upload a VEX file and are merging SBOM files (--merge)"
|
|
966
|
+
)
|
|
967
|
+
if not args.merge and args.upload_vex and args.merged_vex_file:
|
|
968
|
+
fail(
|
|
969
|
+
"You cannot specify a VEX file with --merged-vex-file (or DEPTRACK_MERGED_VEX_FILE env var) if you are NOT merging SBOM files (--merge is not set)"
|
|
970
|
+
)
|
|
971
|
+
|
|
972
|
+
# print execution parameters
|
|
973
|
+
print("Scanning SBOM files...")
|
|
974
|
+
print(
|
|
975
|
+
f"- base API url (--base-api-url): {AnsiColors.CYAN}{args.base_api_url}{AnsiColors.RESET}"
|
|
976
|
+
)
|
|
977
|
+
print(
|
|
978
|
+
f"- project path (--project-path): {AnsiColors.CYAN}{args.project_path}{AnsiColors.RESET}"
|
|
979
|
+
)
|
|
980
|
+
print(
|
|
981
|
+
f"- project tags (--tags): {AnsiColors.CYAN}{args.tags}{AnsiColors.RESET}"
|
|
982
|
+
)
|
|
983
|
+
print(
|
|
984
|
+
f"- parent collection logic (--parent-collection-logic): {AnsiColors.CYAN}{args.parent_collection_logic}{AnsiColors.RESET}"
|
|
985
|
+
+ (
|
|
986
|
+
f" matching {AnsiColors.CYAN}{args.parent_collection_logic_tag}{AnsiColors.RESET} (--parent-collection-logic-tag)"
|
|
987
|
+
if args.parent_collection_logic == CollectionLogic.TAG.name
|
|
988
|
+
else ""
|
|
989
|
+
)
|
|
990
|
+
)
|
|
991
|
+
print(
|
|
992
|
+
f"- path separator (--path-separator): {AnsiColors.CYAN}{args.path_separator}{AnsiColors.RESET}"
|
|
993
|
+
)
|
|
994
|
+
print(
|
|
995
|
+
f"- PURLs max length (--purl-max-len): {AnsiColors.CYAN}{'auto (-1)' if args.purl_max_len < 0 else 'no trim (0)' if args.purl_max_len == 0 else args.purl_max_len}{AnsiColors.RESET}"
|
|
996
|
+
)
|
|
997
|
+
print(
|
|
998
|
+
f"- merge SBOM files (--merge) : {AnsiColors.CYAN}{args.merge}{AnsiColors.RESET}"
|
|
999
|
+
)
|
|
1000
|
+
print(
|
|
1001
|
+
f"- merge output (--merge-output): {AnsiColors.CYAN}{args.merge_output}{AnsiColors.RESET}"
|
|
1002
|
+
)
|
|
1003
|
+
print(
|
|
1004
|
+
f"- show findings (--show-findings): {AnsiColors.CYAN}{args.show_findings}{AnsiColors.RESET}"
|
|
1005
|
+
)
|
|
1006
|
+
print(
|
|
1007
|
+
f"- risk score (--risk-score-threshold): {AnsiColors.CYAN}{args.risk_score_threshold}{AnsiColors.RESET}"
|
|
1008
|
+
)
|
|
1009
|
+
print(
|
|
1010
|
+
f"- insecure (--insecure): {AnsiColors.CYAN}{args.insecure}{AnsiColors.RESET}"
|
|
1011
|
+
)
|
|
1012
|
+
print(
|
|
1013
|
+
f"- Upload VEX (--upload-vex): {AnsiColors.CYAN}{args.upload_vex}{AnsiColors.RESET}"
|
|
1014
|
+
)
|
|
1015
|
+
print(
|
|
1016
|
+
f"- VEX file path for merged SBOM (--merged-vex-file): {AnsiColors.CYAN}{args.merged_vex_file}{AnsiColors.RESET}"
|
|
1017
|
+
)
|
|
1018
|
+
print(
|
|
1019
|
+
f"- SBOM file pattern : {AnsiColors.CYAN}{', '.join(args.sbom_patterns)}{AnsiColors.RESET}"
|
|
1020
|
+
)
|
|
1021
|
+
print()
|
|
1022
|
+
|
|
1023
|
+
# execute the scan
|
|
1024
|
+
scanner = Scanner(
|
|
1025
|
+
**vars(args),
|
|
1026
|
+
verify_ssl=not args.insecure,
|
|
1027
|
+
)
|
|
1028
|
+
scanner.scan(args.sbom_patterns)
|
|
1029
|
+
|
|
1030
|
+
print("Done!")
|
|
1031
|
+
print(
|
|
1032
|
+
"----------------------------------------------------------------------------------------------"
|
|
1033
|
+
)
|
|
1034
|
+
print(f"Summary: {scanner.sbom_count} SBOM published")
|
|
1035
|
+
if scanner.sbom_count and scanner.sbom_scan_failed:
|
|
1036
|
+
fail(
|
|
1037
|
+
f"{scanner.sbom_scan_failed} SBOM scan failed. Check the logs for details."
|
|
1038
|
+
)
|