regscale-cli 6.16.1.0__py3-none-any.whl → 6.16.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of regscale-cli might be problematic. Click here for more details.

Files changed (35) hide show
  1. regscale/__init__.py +1 -1
  2. regscale/core/app/internal/login.py +1 -1
  3. regscale/core/app/internal/poam_editor.py +1 -1
  4. regscale/integrations/commercial/__init__.py +2 -2
  5. regscale/integrations/commercial/ad.py +1 -1
  6. regscale/integrations/commercial/grype/__init__.py +3 -0
  7. regscale/integrations/commercial/grype/commands.py +72 -0
  8. regscale/integrations/commercial/grype/scanner.py +390 -0
  9. regscale/integrations/commercial/import_all/import_all_cmd.py +2 -2
  10. regscale/integrations/commercial/opentext/__init__.py +6 -0
  11. regscale/integrations/commercial/opentext/commands.py +77 -0
  12. regscale/integrations/commercial/opentext/scanner.py +449 -85
  13. regscale/integrations/commercial/trivy/__init__.py +5 -0
  14. regscale/integrations/commercial/trivy/commands.py +74 -0
  15. regscale/integrations/commercial/trivy/scanner.py +276 -0
  16. regscale/integrations/commercial/wizv2/utils.py +1 -1
  17. regscale/integrations/jsonl_scanner_integration.py +869 -0
  18. regscale/integrations/public/fedramp/fedramp_common.py +4 -4
  19. regscale/integrations/public/fedramp/inventory_items.py +3 -3
  20. regscale/integrations/scanner_integration.py +172 -41
  21. regscale/models/integration_models/cisa_kev_data.json +20 -5
  22. regscale/models/integration_models/synqly_models/capabilities.json +1 -1
  23. regscale/models/integration_models/tenable_models/integration.py +42 -7
  24. regscale/models/regscale_models/regscale_model.py +1 -1
  25. regscale/models/regscale_models/vulnerability.py +21 -0
  26. {regscale_cli-6.16.1.0.dist-info → regscale_cli-6.16.2.0.dist-info}/METADATA +3 -3
  27. {regscale_cli-6.16.1.0.dist-info → regscale_cli-6.16.2.0.dist-info}/RECORD +32 -27
  28. regscale/integrations/commercial/grype.py +0 -165
  29. regscale/integrations/commercial/opentext/click.py +0 -99
  30. regscale/integrations/commercial/trivy.py +0 -162
  31. /regscale/models/integration_models/{flat_file_importer.py → flat_file_importer/__init__.py} +0 -0
  32. {regscale_cli-6.16.1.0.dist-info → regscale_cli-6.16.2.0.dist-info}/LICENSE +0 -0
  33. {regscale_cli-6.16.1.0.dist-info → regscale_cli-6.16.2.0.dist-info}/WHEEL +0 -0
  34. {regscale_cli-6.16.1.0.dist-info → regscale_cli-6.16.2.0.dist-info}/entry_points.txt +0 -0
  35. {regscale_cli-6.16.1.0.dist-info → regscale_cli-6.16.2.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,276 @@
1
+ """
2
+ Trivy Scanner Integration for RegScale.
3
+
4
+ This module provides integration between Trivy scanner and RegScale,
5
+ allowing you to import Trivy scan results into RegScale as assets and findings.
6
+ """
7
+
8
+ import logging
9
+ import os
10
+ from typing import Any, Dict, List, Optional, Union, Tuple, TypeVar
11
+
12
+ from pathlib import Path
13
+
14
+ from regscale.core.app.utils.parser_utils import safe_datetime_str
15
+ from regscale.integrations.jsonl_scanner_integration import JSONLScannerIntegration
16
+ from regscale.integrations.scanner_integration import IntegrationAsset, IntegrationFinding
17
+ from regscale.models import IssueSeverity, AssetStatus, IssueStatus
18
+
19
+ logger = logging.getLogger("regscale")
20
+
21
+ # Define generic types for items that can be written to file
22
+ T = TypeVar("T")
23
+ ItemType = TypeVar("ItemType", IntegrationAsset, IntegrationFinding)
24
+
25
+
26
+ class TrivyIntegration(JSONLScannerIntegration):
27
+ """Class for handling Trivy scanner integration."""
28
+
29
+ title: str = "Trivy"
30
+ asset_identifier_field: str = "otherTrackingNumber"
31
+ finding_severity_map: Dict[str, Any] = {
32
+ "CRITICAL": IssueSeverity.Critical.value,
33
+ "HIGH": IssueSeverity.High.value,
34
+ "MEDIUM": IssueSeverity.Moderate.value,
35
+ "LOW": IssueSeverity.Low.value,
36
+ "UNKNOWN": IssueSeverity.High.value,
37
+ "NEGLIGIBLE": IssueSeverity.High.value,
38
+ }
39
+
40
+ # Constants for file paths
41
+ ASSETS_FILE = "./artifacts/trivy_assets.jsonl"
42
+ FINDINGS_FILE = "./artifacts/trivy_findings.jsonl"
43
+
44
+ def __init__(self, *args, **kwargs):
45
+ """
46
+ Initialize the TrivyIntegration object.
47
+
48
+ :param Any kwargs: Keyword arguments
49
+ """
50
+ kwargs["read_files_only"] = True
51
+ kwargs["file_pattern"] = "*.json"
52
+ self.disable_mapping = kwargs["disable_mapping"] = True
53
+ super().__init__(*args, **kwargs)
54
+
55
+ def is_valid_file(self, data: Any, file_path: Union[Path, str]) -> Tuple[bool, Optional[Dict[str, Any]]]:
56
+ """
57
+ Check if the provided data is a valid Trivy scan result.
58
+
59
+ Validates that the data is from a Trivy JSON file with the required structure.
60
+ Logs a warning with the file path and returns (False, None) if invalid.
61
+
62
+ :param Any data: Data parsed from the file (string content when read_files_only is True, or file path otherwise)
63
+ :param Union[Path, str] file_path: Path to the file being processed
64
+ :return: Tuple of (is_valid, validated_data) where validated_data is the parsed JSON if valid
65
+ :rtype: Tuple[bool, Optional[Dict[str, Any]]]
66
+ """
67
+
68
+ # Check Trivy-specific structure
69
+ if not isinstance(data, dict):
70
+ logger.warning(f"File {file_path} is not a dict, skipping")
71
+ return False, None
72
+
73
+ if "Results" not in data:
74
+ logger.warning(f"File {file_path} has no 'Results' key, skipping")
75
+ return False, None
76
+
77
+ if not isinstance(data.get("Results"), list):
78
+ logger.warning(f"File {file_path} 'Results' is not a list, skipping")
79
+ return False, None
80
+
81
+ if "Metadata" not in data:
82
+ logger.warning(f"File {file_path} has no 'Metadata' key, skipping")
83
+ return False, None
84
+
85
+ return True, data
86
+
87
+ def parse_asset(self, file_path: Union[Path, str], data: Dict[str, Any]) -> IntegrationAsset:
88
+ """
89
+ Parse a single asset from Trivy scan data.
90
+
91
+ :param Union[Path, str] file_path: Path to the file containing the asset data
92
+ :param Dict[str, Any] data: The parsed JSON data
93
+ :return: IntegrationAsset object
94
+ :rtype: IntegrationAsset
95
+ """
96
+ # Convert path to string if it's not already
97
+ file_path_str = str(file_path)
98
+
99
+ # Get metadata and OS information
100
+ metadata = data.get("Metadata", {})
101
+ os_data = metadata.get("OS", {})
102
+
103
+ # Determine identifier from file name or data
104
+ if "sha256-" in file_path_str:
105
+ # Extract the sha256 from the filename
106
+ base_name = os.path.basename(file_path_str)
107
+ identifier = "sha256-" + base_name.split("sha256-")[1].split(".json")[0]
108
+ else:
109
+ identifier = metadata.get("ImageID", "Unknown")
110
+
111
+ # Get artifact name for other tracking number and fqdn
112
+ artifact_name = data.get("ArtifactName", identifier)
113
+
114
+ # Create and return the asset
115
+ return IntegrationAsset(
116
+ identifier=identifier,
117
+ name=identifier,
118
+ ip_address="0.0.0.0",
119
+ cpu=0,
120
+ ram=0,
121
+ status=AssetStatus.Active,
122
+ asset_type="Other",
123
+ asset_category="Software",
124
+ operating_system=f"{os_data.get('Family', '')} {os_data.get('Name', '')}",
125
+ notes=f"{os.path.basename(file_path_str)}",
126
+ other_tracking_number=artifact_name,
127
+ parent_id=self.plan_id,
128
+ parent_module="securityplans",
129
+ fqdn=artifact_name,
130
+ )
131
+
132
+ def _get_findings_data_from_file(self, data: Dict[str, Any]) -> List[Dict[str, Any]]:
133
+ """
134
+ Extract findings data from Trivy file data.
135
+
136
+ :param Dict[str, Any] data: The data from the Trivy file
137
+ :return: List of finding items
138
+ :rtype: List[Dict[str, Any]]
139
+ """
140
+ if not data or not isinstance(data, dict):
141
+ return []
142
+
143
+ findings = []
144
+
145
+ # Process all results
146
+ for result in data.get("Results", []):
147
+ if not isinstance(result, dict):
148
+ continue
149
+
150
+ # Extract vulnerabilities from the result
151
+ vulnerabilities = result.get("Vulnerabilities", [])
152
+ if not isinstance(vulnerabilities, list):
153
+ continue
154
+
155
+ # Add each vulnerability to the findings list
156
+ findings.extend(vulnerabilities)
157
+
158
+ return findings
159
+
160
+ def parse_finding(self, asset_identifier: str, data: Dict[str, Any], item: Dict[str, Any]) -> IntegrationFinding:
161
+ """
162
+ Parse a single finding from Trivy scan data.
163
+
164
+ :param str asset_identifier: The identifier of the asset this finding belongs to
165
+ :param Dict[str, Any] data: The parsed JSON data (for metadata)
166
+ :param Dict[str, Any] item: The finding data
167
+ :return: IntegrationFinding object
168
+ :rtype: IntegrationFinding
169
+ """
170
+ # Get scan date from the finding or use current time
171
+ scan_date = safe_datetime_str(data.get("CreatedAt"))
172
+
173
+ # Process severity
174
+ severity_str = item.get("Severity", "UNKNOWN")
175
+ severity_value = self.finding_severity_map.get(severity_str.upper(), IssueSeverity.High.value)
176
+ try:
177
+ severity = IssueSeverity(severity_value)
178
+ except ValueError:
179
+ severity = IssueSeverity.High
180
+
181
+ # Get CVSS fields
182
+ cvss_fields = self._get_cvss_score(item)
183
+
184
+ # Get data source information
185
+ data_source = item.get("DataSource", {})
186
+ plugin_name = data_source.get("Name", self.title)
187
+ plugin_id = data_source.get("ID", self.title)
188
+
189
+ metadata = data.get("Metadata", {})
190
+ os_family = metadata.get("OS", {}).get("Family", "")
191
+ os_name = metadata.get("OS", {}).get("Name", "")
192
+ if os_family and os_name == "unknown":
193
+ affected_os = "unknown"
194
+ else:
195
+ affected_os = f"{os_family} {os_name}"
196
+
197
+ # Set image digest from artifact name
198
+ artifact_name = data.get("ArtifactName", "")
199
+ image_digest = ""
200
+ if "@" in artifact_name:
201
+ image_digest = artifact_name.split("@")[1]
202
+
203
+ build_version = (
204
+ metadata.get("ImageConfig", {}).get("config", {}).get("Labels", {}).get("io.buildah.version", "")
205
+ )
206
+ pkg_name = item.get("PkgName", "")
207
+ cve = item.get("VulnerabilityID", "")
208
+
209
+ # Create and return the finding
210
+ return IntegrationFinding(
211
+ title=f"{cve}: {pkg_name}" if cve else pkg_name,
212
+ description=item.get("Description", "No description available"),
213
+ severity=severity,
214
+ status=IssueStatus.Open,
215
+ cvss_v3_score=cvss_fields.get("V3Score"),
216
+ cvss_v3_vector=cvss_fields.get("V3Vector") or "",
217
+ cvss_v2_score=cvss_fields.get("V2Score"),
218
+ cvss_v2_vector=cvss_fields.get("V2Vector") or "",
219
+ plugin_name=plugin_name,
220
+ plugin_id=plugin_id,
221
+ asset_identifier=asset_identifier,
222
+ cve=cve,
223
+ first_seen=scan_date,
224
+ last_seen=scan_date,
225
+ scan_date=scan_date,
226
+ date_created=scan_date,
227
+ category="Software",
228
+ control_labels=[],
229
+ installed_versions=item.get("InstalledVersion", ""),
230
+ affected_os=affected_os,
231
+ affected_packages=item.get("PkgID", ""),
232
+ image_digest=image_digest,
233
+ package_path=item.get("PkgIdentifier", {}).get("PURL", ""),
234
+ build_version=build_version,
235
+ fixed_versions=item.get("FixedVersion", ""),
236
+ fix_status=item.get("Status", ""),
237
+ )
238
+
239
+ @staticmethod
240
+ def _get_cvss_score(finding: Dict) -> dict:
241
+ """
242
+ Get the CVSS v3 and v2 scores and vectors from the cvss data.
243
+
244
+ :param Dict finding: The cvss data
245
+ :return: The CVSS fields
246
+ :rtype: dict
247
+ """
248
+ values = {
249
+ "V3Score": None,
250
+ "V2Score": None,
251
+ "V3Vector": None,
252
+ "V2Vector": None,
253
+ }
254
+
255
+ if cvs := finding.get("CVSS"):
256
+ if nvd := cvs.get("nvd"):
257
+ values["V3Score"] = nvd.get("V3Score", None)
258
+ values["V3Vector"] = nvd.get("V3Vector", None)
259
+ values["V2Score"] = nvd.get("V2Score", None)
260
+ values["V2Vector"] = nvd.get("V2Vector", None)
261
+ elif redhat := cvs.get("redhat"):
262
+ values["V3Score"] = redhat.get("V3Score", None)
263
+ values["V3Vector"] = redhat.get("V3Vector", None)
264
+ values["V2Score"] = redhat.get("V2Score", None)
265
+ values["V2Vector"] = redhat.get("V2Vector", None)
266
+ elif ghsa := cvs.get("ghsa"):
267
+ values["V3Score"] = ghsa.get("V3Score", None)
268
+ values["V3Vector"] = ghsa.get("V3Vector", None)
269
+ values["V2Score"] = ghsa.get("V2Score", None)
270
+ values["V2Vector"] = ghsa.get("V2Vector", None)
271
+ elif bitnami := cvs.get("bitnami"):
272
+ values["V3Score"] = bitnami.get("V3Score", None)
273
+ values["V3Vector"] = bitnami.get("V3Vector", None)
274
+ values["V2Score"] = bitnami.get("V2Score", None)
275
+ values["V2Vector"] = bitnami.get("V2Vector", None)
276
+ return values
@@ -708,7 +708,7 @@ def _sync_compliance(
708
708
  }
709
709
  sync_framework = framework_mapping.get(framework)
710
710
  snake_framework = sync_framework.replace(" ", "_")
711
- logger.info(snake_framework)
711
+ logger.debug(f"{snake_framework=}")
712
712
  logger.info("Fetching Wiz compliance report for project ID %s", wiz_project_id)
713
713
  report_data = fetch_framework_report(wiz_project_id, snake_framework)
714
714
  report_models = []