regscale-cli 6.16.2.0__py3-none-any.whl → 6.16.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of regscale-cli might be problematic. Click here for more details.
- regscale/__init__.py +1 -1
- regscale/core/app/utils/api_handler.py +4 -11
- regscale/integrations/commercial/crowdstrike.py +0 -1
- regscale/integrations/commercial/qualys.py +50 -61
- regscale/integrations/commercial/servicenow.py +1 -0
- regscale/integrations/commercial/snyk.py +2 -2
- regscale/integrations/commercial/synqly/ticketing.py +29 -0
- regscale/integrations/commercial/veracode.py +1 -1
- regscale/integrations/scanner_integration.py +53 -18
- regscale/models/integration_models/cisa_kev_data.json +50 -7
- regscale/models/integration_models/flat_file_importer/__init__.py +29 -8
- regscale/models/integration_models/snyk.py +141 -15
- regscale/models/integration_models/synqly_models/capabilities.json +1 -1
- regscale/models/integration_models/veracode.py +91 -48
- regscale/models/regscale_models/user.py +3 -4
- regscale/utils/version.py +3 -5
- {regscale_cli-6.16.2.0.dist-info → regscale_cli-6.16.3.0.dist-info}/METADATA +1 -1
- {regscale_cli-6.16.2.0.dist-info → regscale_cli-6.16.3.0.dist-info}/RECORD +22 -22
- {regscale_cli-6.16.2.0.dist-info → regscale_cli-6.16.3.0.dist-info}/LICENSE +0 -0
- {regscale_cli-6.16.2.0.dist-info → regscale_cli-6.16.3.0.dist-info}/WHEEL +0 -0
- {regscale_cli-6.16.2.0.dist-info → regscale_cli-6.16.3.0.dist-info}/entry_points.txt +0 -0
- {regscale_cli-6.16.2.0.dist-info → regscale_cli-6.16.3.0.dist-info}/top_level.txt +0 -0
|
@@ -82,7 +82,7 @@ class FlatFileIntegration(ScannerIntegration):
|
|
|
82
82
|
"""
|
|
83
83
|
self.asset_identifier_field = asset_identifier_field
|
|
84
84
|
|
|
85
|
-
def fetch_assets(self, *args:
|
|
85
|
+
def fetch_assets(self, *args: Tuple, **kwargs: dict) -> Iterator["IntegrationAsset"]:
|
|
86
86
|
"""
|
|
87
87
|
Fetches assets from FlatFileImporter
|
|
88
88
|
|
|
@@ -220,16 +220,19 @@ class FlatFileImporter(ABC):
|
|
|
220
220
|
)
|
|
221
221
|
self.clean_up()
|
|
222
222
|
|
|
223
|
-
def parse_finding(self, vuln: Vulnerability) -> Optional["IntegrationFinding"]:
|
|
223
|
+
def parse_finding(self, vuln: Union[Vulnerability, "IntegrationFinding"]) -> Optional["IntegrationFinding"]:
|
|
224
224
|
"""
|
|
225
225
|
Parses a vulnerability object into an IntegrationFinding object
|
|
226
226
|
|
|
227
|
-
:param Vulnerability vuln: A vulnerability object
|
|
227
|
+
:param Union[Vulnerability, IntegrationFinding] vuln: A vulnerability object
|
|
228
228
|
:return: The parsed IntegrationFinding or None if parsing fails
|
|
229
229
|
:rtype: Optional[IntegrationFinding]
|
|
230
230
|
"""
|
|
231
231
|
from regscale.integrations.scanner_integration import IntegrationFinding
|
|
232
232
|
|
|
233
|
+
if isinstance(vuln, IntegrationFinding):
|
|
234
|
+
return vuln
|
|
235
|
+
|
|
233
236
|
try:
|
|
234
237
|
asset_id = vuln.dns or vuln.ipAddress
|
|
235
238
|
if not asset_id:
|
|
@@ -237,9 +240,9 @@ class FlatFileImporter(ABC):
|
|
|
237
240
|
|
|
238
241
|
severity = self.finding_severity_map.get(vuln.severity.capitalize(), regscale_models.IssueSeverity.Low)
|
|
239
242
|
status = self.map_status_to_issue_status(vuln.status)
|
|
240
|
-
cve: Optional[str] = vuln
|
|
241
|
-
extract_vuln: Any = self.extract_ghsa_strings(vuln
|
|
242
|
-
plugin_name = vuln
|
|
243
|
+
cve: Optional[str] = getattr(vuln, "cve", "")
|
|
244
|
+
extract_vuln: Any = self.extract_ghsa_strings(getattr(vuln, "plugInName", ""))
|
|
245
|
+
plugin_name = getattr(vuln, "plugInName", getattr(vuln, "title", ""))
|
|
243
246
|
if not self.assert_valid_cve(cve):
|
|
244
247
|
if isinstance(extract_vuln, list):
|
|
245
248
|
cve = ", ".join(extract_vuln)
|
|
@@ -286,16 +289,19 @@ class FlatFileImporter(ABC):
|
|
|
286
289
|
self.attributes.logger.error("Error parsing Wiz finding: %s", str(e), exc_info=True)
|
|
287
290
|
return None
|
|
288
291
|
|
|
289
|
-
def parse_asset(self, asset: Asset) -> "IntegrationAsset":
|
|
292
|
+
def parse_asset(self, asset: Union[Asset, "IntegrationAsset"]) -> "IntegrationAsset":
|
|
290
293
|
"""
|
|
291
294
|
Converts Asset -> IntegrationAsset
|
|
292
295
|
|
|
293
|
-
:param Asset asset: The asset to parse
|
|
296
|
+
:param Union[Asset, IntegrationAsset] asset: The asset to parse
|
|
294
297
|
:return: The parsed IntegrationAsset
|
|
295
298
|
:rtype: IntegrationAsset
|
|
296
299
|
"""
|
|
297
300
|
from regscale.integrations.scanner_integration import IntegrationAsset
|
|
298
301
|
|
|
302
|
+
if isinstance(asset, IntegrationAsset):
|
|
303
|
+
return asset
|
|
304
|
+
|
|
299
305
|
return IntegrationAsset(
|
|
300
306
|
name=asset.name,
|
|
301
307
|
external_id=asset.otherTrackingNumber,
|
|
@@ -514,6 +520,8 @@ class FlatFileImporter(ABC):
|
|
|
514
520
|
"""
|
|
515
521
|
Process the assets in the data
|
|
516
522
|
"""
|
|
523
|
+
from regscale.integrations.scanner_integration import IntegrationAsset
|
|
524
|
+
|
|
517
525
|
# The passed function creates asset objects. Convert to IntegrationAsset here
|
|
518
526
|
if isinstance(self.file_data, list):
|
|
519
527
|
for dat in self.file_data:
|
|
@@ -523,6 +531,8 @@ class FlatFileImporter(ABC):
|
|
|
523
531
|
if isinstance(self.data["assets"], Iterator):
|
|
524
532
|
self.integration_assets = self.data["assets"]
|
|
525
533
|
return None
|
|
534
|
+
elif isinstance(self.data["assets"], IntegrationAsset):
|
|
535
|
+
self.data["assets"] = [self.data["assets"]]
|
|
526
536
|
self.integration_assets = (self.parse_asset(asset) for asset in self.data["assets"])
|
|
527
537
|
|
|
528
538
|
def process_asset_data(self, dat: Any, func: Callable) -> None:
|
|
@@ -712,6 +722,17 @@ class FlatFileImporter(ABC):
|
|
|
712
722
|
|
|
713
723
|
with create_progress_object() as vuln_progress:
|
|
714
724
|
vuln_task = vuln_progress.add_task("Processing vulnerabilities...", total=len(self.file_data))
|
|
725
|
+
try:
|
|
726
|
+
res = func(self.file_data)
|
|
727
|
+
if isinstance(res, list):
|
|
728
|
+
self.integration_findings = res
|
|
729
|
+
self.data["vulns"] = res
|
|
730
|
+
vuln_progress.update(vuln_task, completed=len(self.file_data))
|
|
731
|
+
return
|
|
732
|
+
except Exception as e:
|
|
733
|
+
self.attributes.logger.debug(
|
|
734
|
+
"Cannot process vulns as a whole, now iterating all data to parse vulns: %s", str(e)
|
|
735
|
+
)
|
|
715
736
|
for ix, dat in enumerate(self.file_data):
|
|
716
737
|
vuln = func(dat, index=ix)
|
|
717
738
|
if not vuln:
|
|
@@ -3,15 +3,14 @@ Snyk Scan information
|
|
|
3
3
|
"""
|
|
4
4
|
|
|
5
5
|
from datetime import datetime
|
|
6
|
-
from typing import Optional
|
|
6
|
+
from typing import Optional, Union
|
|
7
7
|
|
|
8
8
|
from regscale.core.app.application import Application
|
|
9
9
|
from regscale.core.app.logz import create_logger
|
|
10
10
|
from regscale.core.app.utils.app_utils import epoch_to_datetime, get_current_datetime, is_valid_fqdn
|
|
11
|
-
from regscale.
|
|
11
|
+
from regscale.integrations.scanner_integration import IntegrationAsset, IntegrationFinding
|
|
12
|
+
from regscale.models import Asset, AssetCategory, AssetStatus, AssetType, ImportValidater, IssueStatus, Vulnerability
|
|
12
13
|
from regscale.models.integration_models.flat_file_importer import FlatFileImporter
|
|
13
|
-
from regscale.models.regscale_models.asset import Asset
|
|
14
|
-
from regscale.models.regscale_models.vulnerability import Vulnerability
|
|
15
14
|
|
|
16
15
|
|
|
17
16
|
class Snyk(FlatFileImporter):
|
|
@@ -20,19 +19,29 @@ class Snyk(FlatFileImporter):
|
|
|
20
19
|
"""
|
|
21
20
|
|
|
22
21
|
def __init__(self, **kwargs):
|
|
22
|
+
self.not_implemented_error = "Unsupported file type for Snyk integration. Only XLSX and JSON are supported."
|
|
23
23
|
self.name = kwargs.get("name")
|
|
24
|
-
self.vuln_title = "PROBLEM_TITLE"
|
|
25
|
-
self.project_name = "PROJECT_NAME"
|
|
26
|
-
self.issue_severity = "ISSUE_SEVERITY"
|
|
27
24
|
self.auto_fixable = "AUTOFIXABLE"
|
|
28
25
|
self.fmt = "%Y-%m-%d"
|
|
29
26
|
self.dt_format = "%Y-%m-%d %H:%M:%S"
|
|
30
|
-
|
|
31
|
-
self.project_name
|
|
32
|
-
self.issue_severity
|
|
33
|
-
self.vuln_title
|
|
34
|
-
self.
|
|
35
|
-
|
|
27
|
+
if "json" in kwargs.get("file_type", ""):
|
|
28
|
+
self.project_name = "projectName"
|
|
29
|
+
self.issue_severity = "severity"
|
|
30
|
+
self.vuln_title = "title"
|
|
31
|
+
self.required_headers = [
|
|
32
|
+
"projectName",
|
|
33
|
+
"vulnerabilities",
|
|
34
|
+
]
|
|
35
|
+
else:
|
|
36
|
+
self.project_name = "PROJECT_NAME"
|
|
37
|
+
self.issue_severity = "ISSUE_SEVERITY"
|
|
38
|
+
self.vuln_title = "PROBLEM_TITLE"
|
|
39
|
+
self.required_headers = [
|
|
40
|
+
self.project_name,
|
|
41
|
+
self.issue_severity,
|
|
42
|
+
self.vuln_title,
|
|
43
|
+
self.auto_fixable,
|
|
44
|
+
]
|
|
36
45
|
self.mapping_file = kwargs.get("mappings_path")
|
|
37
46
|
self.disable_mapping = kwargs.get("disable_mapping")
|
|
38
47
|
self.validater = ImportValidater(
|
|
@@ -40,6 +49,12 @@ class Snyk(FlatFileImporter):
|
|
|
40
49
|
)
|
|
41
50
|
self.headers = self.validater.parsed_headers
|
|
42
51
|
self.mapping = self.validater.mapping
|
|
52
|
+
if "json" in kwargs.get("file_type", ""):
|
|
53
|
+
asset_count = 1
|
|
54
|
+
vuln_count = len(self.mapping.get_value(self.validater.data, "vulnerabilities", []))
|
|
55
|
+
else:
|
|
56
|
+
asset_count = None
|
|
57
|
+
vuln_count = None
|
|
43
58
|
logger = create_logger()
|
|
44
59
|
self.logger = logger
|
|
45
60
|
super().__init__(
|
|
@@ -48,6 +63,8 @@ class Snyk(FlatFileImporter):
|
|
|
48
63
|
headers=self.headers,
|
|
49
64
|
asset_func=self.create_asset,
|
|
50
65
|
vuln_func=self.create_vuln,
|
|
66
|
+
asset_count=asset_count,
|
|
67
|
+
vuln_count=vuln_count,
|
|
51
68
|
extra_headers_allowed=True,
|
|
52
69
|
**kwargs,
|
|
53
70
|
)
|
|
@@ -65,7 +82,22 @@ class Snyk(FlatFileImporter):
|
|
|
65
82
|
self.mapping.get_value(dat, "FIRST_INTRODUCED", datetime.now().time()),
|
|
66
83
|
).strftime(self.dt_format)
|
|
67
84
|
|
|
68
|
-
def create_asset(self, dat: Optional[dict] = None) -> Asset:
|
|
85
|
+
def create_asset(self, dat: Optional[dict] = None) -> Union[Asset, IntegrationAsset]:
|
|
86
|
+
"""
|
|
87
|
+
Create an asset from a row in the Snyk file
|
|
88
|
+
|
|
89
|
+
:param Optional[dict] dat: Data row from XLSX file or JSON file, defaults to None
|
|
90
|
+
:return: RegScale Asset if XLSX, IntegrationAsset if JSON
|
|
91
|
+
:rtype: Union[Asset, IntegrationAsset]
|
|
92
|
+
"""
|
|
93
|
+
if "json" in self.attributes.file_type:
|
|
94
|
+
return self._parse_json_asset(data=dat)
|
|
95
|
+
elif "xlsx" in self.attributes.file_type:
|
|
96
|
+
return self._parse_xlsx_asset(dat)
|
|
97
|
+
else:
|
|
98
|
+
raise NotImplementedError(self.not_implemented_error)
|
|
99
|
+
|
|
100
|
+
def _parse_xlsx_asset(self, dat: Optional[dict] = None) -> Asset:
|
|
69
101
|
"""
|
|
70
102
|
Create an asset from a row in the Snyk file
|
|
71
103
|
|
|
@@ -95,7 +127,50 @@ class Snyk(FlatFileImporter):
|
|
|
95
127
|
}
|
|
96
128
|
)
|
|
97
129
|
|
|
98
|
-
def
|
|
130
|
+
def _parse_json_asset(self, **kwargs) -> IntegrationAsset:
|
|
131
|
+
"""
|
|
132
|
+
Parse assets from Snyk json scan data.
|
|
133
|
+
|
|
134
|
+
:return: Integration asset
|
|
135
|
+
:rtype: IntegrationAsset
|
|
136
|
+
"""
|
|
137
|
+
data = kwargs.pop("data")
|
|
138
|
+
name = self.extract_host(self.mapping.get_value(data, self.project_name))
|
|
139
|
+
valid_name = is_valid_fqdn(name)
|
|
140
|
+
return IntegrationAsset(
|
|
141
|
+
identifier=name,
|
|
142
|
+
name=name,
|
|
143
|
+
status=AssetStatus.Active,
|
|
144
|
+
asset_category=AssetCategory.Software,
|
|
145
|
+
is_latest_scan=True,
|
|
146
|
+
is_authenticated_scan=True,
|
|
147
|
+
scanning_tool=self.name,
|
|
148
|
+
asset_type=AssetType.Other,
|
|
149
|
+
fqdn=name if valid_name else None,
|
|
150
|
+
system_administrator_id=self.config["userId"],
|
|
151
|
+
parent_id=self.attributes.parent_id,
|
|
152
|
+
parent_module=self.attributes.parent_module,
|
|
153
|
+
)
|
|
154
|
+
|
|
155
|
+
def create_vuln(
|
|
156
|
+
self, dat: Optional[dict] = None, **kwargs
|
|
157
|
+
) -> Optional[Union[list[IntegrationFinding], Vulnerability]]:
|
|
158
|
+
"""
|
|
159
|
+
Create a vulnerability from a row in the Snyk file
|
|
160
|
+
|
|
161
|
+
:param Optional[dict] dat: Data row from XLSX or JSON file, defaults to None
|
|
162
|
+
:raises TypeError: If dat is not a dictionary
|
|
163
|
+
:return: RegScale Vulnerability object if xlsx or list of IntegrationFindings if JSON
|
|
164
|
+
:rtype: Optional[Union[list[IntegrationFinding], Vulnerability]]
|
|
165
|
+
"""
|
|
166
|
+
if "json" in self.attributes.file_type:
|
|
167
|
+
return self._parse_json_findings(**kwargs)
|
|
168
|
+
elif "xlsx" in self.attributes.file_type:
|
|
169
|
+
return self._parse_xlsx_finding(dat, **kwargs)
|
|
170
|
+
else:
|
|
171
|
+
raise NotImplementedError(self.not_implemented_error)
|
|
172
|
+
|
|
173
|
+
def _parse_xlsx_finding(self, dat: Optional[dict] = None, **_) -> Optional[Vulnerability]:
|
|
99
174
|
"""
|
|
100
175
|
Create a vulnerability from a row in the Snyk csv file
|
|
101
176
|
|
|
@@ -139,6 +214,57 @@ class Snyk(FlatFileImporter):
|
|
|
139
214
|
)
|
|
140
215
|
return regscale_vuln
|
|
141
216
|
|
|
217
|
+
def _parse_json_findings(self, **kwargs) -> list[IntegrationFinding]:
|
|
218
|
+
"""
|
|
219
|
+
Create a vulnerability from a row in the Snyk csv file
|
|
220
|
+
|
|
221
|
+
:return: List of IntegrationFinding objects
|
|
222
|
+
:rtype: list[IntegrationFinding]
|
|
223
|
+
"""
|
|
224
|
+
findings = []
|
|
225
|
+
vulns = self.mapping.get_value(kwargs.get("data", self.validater.data), "vulnerabilities", [])
|
|
226
|
+
if not vulns:
|
|
227
|
+
return findings
|
|
228
|
+
for dat in vulns:
|
|
229
|
+
severity = self.finding_severity_map.get(dat.get(self.issue_severity, "Low").title())
|
|
230
|
+
hostname = self.extract_host(self.mapping.get_value(dat, self.project_name)) or self.extract_host(
|
|
231
|
+
self.mapping.get_value(self.validater.data, self.project_name)
|
|
232
|
+
)
|
|
233
|
+
description = self.mapping.get_value(dat, "description") or self.mapping.get_value(dat, self.vuln_title)
|
|
234
|
+
solution = self.mapping.get_value(dat, self.auto_fixable)
|
|
235
|
+
# if auto fixable is not available, check for upgradeable or patchable, this is for .json files
|
|
236
|
+
if not solution:
|
|
237
|
+
upgradeable = self.mapping.get_value(dat, "isUpgradeable", False)
|
|
238
|
+
patchable = self.mapping.get_value(dat, "isPatchable", False)
|
|
239
|
+
if upgradeable or patchable:
|
|
240
|
+
solution = "Upgrade or patch the vulnerable component."
|
|
241
|
+
cves = ", ".join(self.mapping.get_value(dat, "CVE", ""))
|
|
242
|
+
if not cves:
|
|
243
|
+
cves = ", ".join(dat.get("identifiers", {}).get("CVE", []))
|
|
244
|
+
findings.append(
|
|
245
|
+
IntegrationFinding(
|
|
246
|
+
title=dat.get("title") or description,
|
|
247
|
+
description=description,
|
|
248
|
+
severity=severity,
|
|
249
|
+
status=IssueStatus.Open,
|
|
250
|
+
plugin_name=description,
|
|
251
|
+
plugin_id=dat.get("id"),
|
|
252
|
+
plugin_text=self.mapping.get_value(dat, self.vuln_title),
|
|
253
|
+
asset_identifier=hostname,
|
|
254
|
+
cve=cves,
|
|
255
|
+
cvss_score=dat.get("cvssScore"),
|
|
256
|
+
first_seen=self.determine_first_seen(dat),
|
|
257
|
+
last_seen=get_current_datetime(),
|
|
258
|
+
scan_date=self.attributes.scan_date,
|
|
259
|
+
dns=hostname,
|
|
260
|
+
vpr_score=None,
|
|
261
|
+
remediation=solution,
|
|
262
|
+
category="Software",
|
|
263
|
+
control_labels=[],
|
|
264
|
+
)
|
|
265
|
+
)
|
|
266
|
+
return findings
|
|
267
|
+
|
|
142
268
|
@staticmethod
|
|
143
269
|
def extract_host(s: str) -> str:
|
|
144
270
|
"""
|