regscale-cli 6.18.0.0__py3-none-any.whl → 6.19.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of regscale-cli might be problematic. Click here for more details.
- regscale/__init__.py +1 -1
- regscale/integrations/api_paginator.py +932 -0
- regscale/integrations/api_paginator_example.py +348 -0
- regscale/integrations/commercial/__init__.py +11 -10
- regscale/integrations/commercial/{qualys.py → qualys/__init__.py} +756 -105
- regscale/integrations/commercial/qualys/scanner.py +1051 -0
- regscale/integrations/commercial/qualys/variables.py +21 -0
- regscale/integrations/commercial/sicura/api.py +1 -0
- regscale/integrations/commercial/stigv2/click_commands.py +36 -8
- regscale/integrations/commercial/stigv2/stig_integration.py +63 -9
- regscale/integrations/commercial/tenablev2/__init__.py +9 -0
- regscale/integrations/commercial/tenablev2/authenticate.py +23 -2
- regscale/integrations/commercial/tenablev2/commands.py +779 -0
- regscale/integrations/commercial/tenablev2/jsonl_scanner.py +1999 -0
- regscale/integrations/commercial/tenablev2/sc_scanner.py +600 -0
- regscale/integrations/commercial/tenablev2/scanner.py +7 -5
- regscale/integrations/commercial/tenablev2/utils.py +21 -4
- regscale/integrations/commercial/tenablev2/variables.py +4 -0
- regscale/integrations/jsonl_scanner_integration.py +523 -142
- regscale/integrations/scanner_integration.py +102 -26
- regscale/integrations/transformer/__init__.py +17 -0
- regscale/integrations/transformer/data_transformer.py +445 -0
- regscale/integrations/transformer/mappings/__init__.py +8 -0
- regscale/integrations/variables.py +2 -0
- regscale/models/__init__.py +5 -2
- regscale/models/integration_models/cisa_kev_data.json +6 -6
- regscale/models/integration_models/synqly_models/capabilities.json +1 -1
- regscale/models/regscale_models/asset.py +5 -2
- regscale/models/regscale_models/file.py +5 -2
- regscale/models/regscale_models/group.py +2 -1
- regscale/models/regscale_models/user_group.py +1 -1
- regscale/regscale.py +3 -1
- {regscale_cli-6.18.0.0.dist-info → regscale_cli-6.19.0.1.dist-info}/METADATA +1 -1
- {regscale_cli-6.18.0.0.dist-info → regscale_cli-6.19.0.1.dist-info}/RECORD +46 -30
- tests/regscale/core/test_version.py +22 -0
- tests/regscale/integrations/__init__.py +0 -0
- tests/regscale/integrations/test_api_paginator.py +597 -0
- tests/regscale/integrations/test_integration_mapping.py +60 -0
- tests/regscale/integrations/test_issue_creation.py +317 -0
- tests/regscale/integrations/test_issue_due_date.py +46 -0
- tests/regscale/integrations/transformer/__init__.py +0 -0
- tests/regscale/integrations/transformer/test_data_transformer.py +850 -0
- regscale/integrations/commercial/tenablev2/click.py +0 -1641
- {regscale_cli-6.18.0.0.dist-info → regscale_cli-6.19.0.1.dist-info}/LICENSE +0 -0
- {regscale_cli-6.18.0.0.dist-info → regscale_cli-6.19.0.1.dist-info}/WHEEL +0 -0
- {regscale_cli-6.18.0.0.dist-info → regscale_cli-6.19.0.1.dist-info}/entry_points.txt +0 -0
- {regscale_cli-6.18.0.0.dist-info → regscale_cli-6.19.0.1.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,600 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Module for Tenable SC vulnerability scanning integration.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import logging
|
|
6
|
+
import queue
|
|
7
|
+
import re
|
|
8
|
+
import tempfile
|
|
9
|
+
from concurrent.futures import ThreadPoolExecutor, wait
|
|
10
|
+
from threading import current_thread, get_ident, get_native_id
|
|
11
|
+
from typing import Any, Iterator, List, Optional, Tuple
|
|
12
|
+
|
|
13
|
+
from pathlib import Path
|
|
14
|
+
from tenable.sc.analysis import AnalysisResultsIterator
|
|
15
|
+
|
|
16
|
+
from regscale.core.app.application import Application
|
|
17
|
+
from regscale.core.app.utils.app_utils import epoch_to_datetime
|
|
18
|
+
from regscale.core.app.utils.pickle_file_handler import PickleFileHandler
|
|
19
|
+
from regscale.integrations.commercial.tenablev2.authenticate import gen_tsc
|
|
20
|
+
from regscale.integrations.commercial.tenablev2.utils import get_filtered_severities
|
|
21
|
+
from regscale.integrations.integration_override import IntegrationOverride
|
|
22
|
+
from regscale.integrations.scanner_integration import (
|
|
23
|
+
IntegrationAsset,
|
|
24
|
+
IntegrationFinding,
|
|
25
|
+
ScannerIntegration,
|
|
26
|
+
issue_due_date,
|
|
27
|
+
)
|
|
28
|
+
from regscale.models import regscale_models
|
|
29
|
+
from regscale.models.integration_models.tenable_models.models import TenableAsset
|
|
30
|
+
from regscale.utils.threading import ThreadSafeCounter
|
|
31
|
+
|
|
32
|
+
logger = logging.getLogger("regscale")
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class SCIntegration(ScannerIntegration):
|
|
36
|
+
"""
|
|
37
|
+
Tenable SC Integration class that is responsible for fetching assets and findings from Tenable
|
|
38
|
+
"""
|
|
39
|
+
|
|
40
|
+
ASSETS_FILE = "./artifacts/tenable_sc_assets.jsonl"
|
|
41
|
+
FINDINGS_FILE = "./artifacts/tenable_sc_findings.jsonl"
|
|
42
|
+
|
|
43
|
+
finding_severity_map = {
|
|
44
|
+
"Info": regscale_models.IssueSeverity.NotAssigned,
|
|
45
|
+
"Low": regscale_models.IssueSeverity.Low,
|
|
46
|
+
"Medium": regscale_models.IssueSeverity.Moderate,
|
|
47
|
+
"High": regscale_models.IssueSeverity.High,
|
|
48
|
+
"Critical": regscale_models.IssueSeverity.Critical,
|
|
49
|
+
}
|
|
50
|
+
# Required fields from ScannerIntegration
|
|
51
|
+
title = "Tenable SC"
|
|
52
|
+
asset_identifier_field = "tenableId"
|
|
53
|
+
|
|
54
|
+
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
|
55
|
+
"""
|
|
56
|
+
Initializes the SCIntegration class
|
|
57
|
+
|
|
58
|
+
:param Tuple args: Additional arguments
|
|
59
|
+
:param dict kwargs: Additional keyword arguments
|
|
60
|
+
"""
|
|
61
|
+
super().__init__(*args, **kwargs)
|
|
62
|
+
self.scan_date = kwargs.get("scan_date")
|
|
63
|
+
self.plan_id = kwargs.get("plan_id")
|
|
64
|
+
self.client = None
|
|
65
|
+
self.closed_count = 0
|
|
66
|
+
self.batch_size = kwargs.get("batch_size", 1000) # Default to 1000 if not provided
|
|
67
|
+
|
|
68
|
+
def authenticate(self) -> None:
|
|
69
|
+
"""Authenticate to Tenable SC."""
|
|
70
|
+
self.client = gen_tsc()
|
|
71
|
+
|
|
72
|
+
def fetch_assets(self, *args: Any, **kwargs: Any) -> Iterator[IntegrationAsset]:
|
|
73
|
+
"""
|
|
74
|
+
Fetches assets from SCIntegration
|
|
75
|
+
|
|
76
|
+
:param Tuple args: Additional arguments
|
|
77
|
+
:param dict kwargs: Additional keyword arguments
|
|
78
|
+
:yields: Iterator[IntegrationAsset]
|
|
79
|
+
"""
|
|
80
|
+
integration_assets = kwargs.get("integration_assets", [])
|
|
81
|
+
yield from integration_assets
|
|
82
|
+
|
|
83
|
+
def fetch_findings(self, *args: Tuple, **kwargs: dict) -> Iterator[IntegrationFinding]:
|
|
84
|
+
"""
|
|
85
|
+
Fetches findings from the SCIntegration
|
|
86
|
+
|
|
87
|
+
:param Tuple args: Additional arguments
|
|
88
|
+
:param dict kwargs: Additional keyword arguments
|
|
89
|
+
:yields: Iterator[IntegrationFinding]
|
|
90
|
+
|
|
91
|
+
"""
|
|
92
|
+
integration_findings = kwargs.get("integration_findings", [])
|
|
93
|
+
yield from integration_findings
|
|
94
|
+
|
|
95
|
+
def parse_findings(self, vuln: TenableAsset, integration_mapping: Any) -> List[IntegrationFinding]:
|
|
96
|
+
"""
|
|
97
|
+
Parses a TenableAsset into an IntegrationFinding object
|
|
98
|
+
|
|
99
|
+
:param TenableAsset vuln: The Tenable SC finding
|
|
100
|
+
:param Any integration_mapping: The IntegrationMapping object
|
|
101
|
+
:return: A list of IntegrationFinding objects
|
|
102
|
+
:rtype: List[IntegrationFinding]
|
|
103
|
+
"""
|
|
104
|
+
findings = []
|
|
105
|
+
try:
|
|
106
|
+
severity = self.finding_severity_map.get(vuln.severity.name, regscale_models.IssueSeverity.Low)
|
|
107
|
+
cve_set = set(vuln.cve.split(",")) if vuln.cve else set()
|
|
108
|
+
if severity in get_filtered_severities():
|
|
109
|
+
if cve_set:
|
|
110
|
+
for cve in cve_set:
|
|
111
|
+
findings.append(
|
|
112
|
+
self._create_finding(vuln=vuln, cve=cve, integration_mapping=integration_mapping)
|
|
113
|
+
)
|
|
114
|
+
else:
|
|
115
|
+
findings.append(self._create_finding(vuln=vuln, cve="", integration_mapping=integration_mapping))
|
|
116
|
+
except (KeyError, TypeError, ValueError) as e:
|
|
117
|
+
logger.error("Error parsing Tenable SC finding: %s", str(e), exc_info=True)
|
|
118
|
+
|
|
119
|
+
return findings
|
|
120
|
+
|
|
121
|
+
def _create_finding(
|
|
122
|
+
self, vuln: TenableAsset, cve: str, integration_mapping: IntegrationOverride
|
|
123
|
+
) -> IntegrationFinding:
|
|
124
|
+
"""
|
|
125
|
+
Helper method to create an IntegrationFinding object
|
|
126
|
+
|
|
127
|
+
:param TenableAsset vuln: The Tenable SC finding
|
|
128
|
+
:param str cve: The CVE identifier
|
|
129
|
+
:param IntegrationOverride integration_mapping: The IntegrationMapping object
|
|
130
|
+
:return: An IntegrationFinding object
|
|
131
|
+
:rtype: IntegrationFinding
|
|
132
|
+
"""
|
|
133
|
+
|
|
134
|
+
def getter(field_name: str) -> Optional[str]:
|
|
135
|
+
"""
|
|
136
|
+
Helper method to get the field value from the integration mapping
|
|
137
|
+
|
|
138
|
+
:param str field_name: The field name to get the value for
|
|
139
|
+
:return: The field value
|
|
140
|
+
:rtype: Optional[str]
|
|
141
|
+
"""
|
|
142
|
+
if val := integration_mapping.load("tenable_sc", field_name):
|
|
143
|
+
return getattr(vuln, val, None)
|
|
144
|
+
return None
|
|
145
|
+
|
|
146
|
+
validated_match = integration_mapping.field_map_validation(obj=vuln, model_type="asset")
|
|
147
|
+
asset_identifier = validated_match or vuln.dnsName or vuln.dns or vuln.ip
|
|
148
|
+
cvss_scores = self.get_cvss_scores(vuln)
|
|
149
|
+
severity = self.finding_severity_map.get(vuln.severity.name, regscale_models.IssueSeverity.Low)
|
|
150
|
+
|
|
151
|
+
installed_versions_str = ""
|
|
152
|
+
fixed_versions_str = ""
|
|
153
|
+
package_path_str = ""
|
|
154
|
+
|
|
155
|
+
if "Installed package" in vuln.pluginText:
|
|
156
|
+
installed_versions = re.findall(r"Installed package\s*:\s*(\S+)", vuln.pluginText)
|
|
157
|
+
installed_versions_str = ", ".join(installed_versions)
|
|
158
|
+
if "Fixed package" in vuln.pluginText:
|
|
159
|
+
fixed_versions = re.findall(r"Fixed package\s*:\s*(\S+)", vuln.pluginText)
|
|
160
|
+
fixed_versions_str = ", ".join(fixed_versions)
|
|
161
|
+
if "Path" in vuln.pluginText:
|
|
162
|
+
package_path = re.findall(r"Path\s*:\s*(\S+)", vuln.pluginText)
|
|
163
|
+
package_path_str = ", ".join(package_path)
|
|
164
|
+
if "Installed version" in vuln.pluginText:
|
|
165
|
+
installed_versions = re.findall(r"Installed version\s*:\s*(.+)", vuln.pluginText)
|
|
166
|
+
installed_versions_str = ", ".join(installed_versions)
|
|
167
|
+
if "Fixed version" in vuln.pluginText:
|
|
168
|
+
fixed_versions = re.findall(r"Fixed version\s*:\s*(.+)", vuln.pluginText)
|
|
169
|
+
fixed_versions_str = ", ".join(fixed_versions)
|
|
170
|
+
|
|
171
|
+
first_seen = epoch_to_datetime(vuln.firstSeen) if vuln.firstSeen else self.scan_date
|
|
172
|
+
return IntegrationFinding(
|
|
173
|
+
control_labels=[], # Add an empty list for control_labels
|
|
174
|
+
category="Tenable SC Vulnerability", # Add a default category
|
|
175
|
+
dns=vuln.dnsName,
|
|
176
|
+
title=getter("title") or f"{cve}: {vuln.synopsis}" if cve else (vuln.synopsis or vuln.pluginName),
|
|
177
|
+
description=getter("description") or (vuln.description or vuln.pluginInfo),
|
|
178
|
+
severity=severity,
|
|
179
|
+
status=regscale_models.IssueStatus.Open, # Findings of > Low are considered as FAIL
|
|
180
|
+
asset_identifier=asset_identifier,
|
|
181
|
+
external_id=vuln.pluginID, # Weakness Source Identifier
|
|
182
|
+
first_seen=first_seen,
|
|
183
|
+
last_seen=epoch_to_datetime(vuln.lastSeen),
|
|
184
|
+
date_created=first_seen,
|
|
185
|
+
date_last_updated=epoch_to_datetime(vuln.lastSeen),
|
|
186
|
+
recommendation_for_mitigation=vuln.solution,
|
|
187
|
+
cve=cve,
|
|
188
|
+
cvss_v3_score=cvss_scores.get("cvss_v3_base_score", 0.0),
|
|
189
|
+
cvss_score=cvss_scores.get("cvss_v3_base_score", 0.0),
|
|
190
|
+
cvss_v3_vector=vuln.cvssV3Vector,
|
|
191
|
+
cvss_v2_score=cvss_scores.get("cvss_v2_base_score", 0.0),
|
|
192
|
+
cvss_v2_vector=vuln.cvssVector,
|
|
193
|
+
vpr_score=float(vuln.vprScore) if vuln.vprScore else None,
|
|
194
|
+
comments=vuln.cvssV3Vector,
|
|
195
|
+
plugin_id=vuln.pluginID,
|
|
196
|
+
plugin_name=vuln.pluginName,
|
|
197
|
+
rule_id=vuln.pluginID,
|
|
198
|
+
rule_version=vuln.pluginName,
|
|
199
|
+
basis_for_adjustment="Tenable SC import",
|
|
200
|
+
vulnerability_type="Tenable SC Vulnerability",
|
|
201
|
+
vulnerable_asset=vuln.dnsName,
|
|
202
|
+
build_version="",
|
|
203
|
+
affected_os=vuln.operatingSystem,
|
|
204
|
+
affected_packages=vuln.pluginName,
|
|
205
|
+
package_path=package_path_str,
|
|
206
|
+
installed_versions=installed_versions_str,
|
|
207
|
+
fixed_versions=fixed_versions_str,
|
|
208
|
+
fix_status="",
|
|
209
|
+
scan_date=self.scan_date,
|
|
210
|
+
due_date=issue_due_date(
|
|
211
|
+
severity=severity, created_date=first_seen, title="tenable", config=self.app.config
|
|
212
|
+
),
|
|
213
|
+
)
|
|
214
|
+
|
|
215
|
+
def get_cvss_scores(self, vuln: TenableAsset) -> dict:
|
|
216
|
+
"""
|
|
217
|
+
Returns the CVSS score for the finding
|
|
218
|
+
|
|
219
|
+
:param TenableAsset vuln: The Tenable SC finding
|
|
220
|
+
:return: The CVSS score
|
|
221
|
+
:rtype: float
|
|
222
|
+
"""
|
|
223
|
+
res = {}
|
|
224
|
+
try:
|
|
225
|
+
res["cvss_v3_base_score"] = float(vuln.cvssV3BaseScore) if vuln.cvssV3BaseScore else 0.0
|
|
226
|
+
res["cvss_v2_base_score"] = float(vuln.baseScore) if vuln.baseScore else 0.0
|
|
227
|
+
except (ValueError, TypeError):
|
|
228
|
+
res["cvss_v3_base_score"] = 0.0
|
|
229
|
+
res["cvss_v2_base_score"] = 0.0
|
|
230
|
+
|
|
231
|
+
return res
|
|
232
|
+
|
|
233
|
+
def to_integration_asset(self, asset: TenableAsset, **kwargs: dict) -> IntegrationAsset:
|
|
234
|
+
"""Converts a TenableAsset object to an IntegrationAsset object
|
|
235
|
+
|
|
236
|
+
:param TenableAsset asset: The Tenable SC asset
|
|
237
|
+
:param dict **kwargs: Additional keyword arguments
|
|
238
|
+
:return: An IntegrationAsset object
|
|
239
|
+
:rtype: IntegrationAsset
|
|
240
|
+
"""
|
|
241
|
+
app = kwargs.get("app")
|
|
242
|
+
config = app.config
|
|
243
|
+
override = kwargs.get("override")
|
|
244
|
+
|
|
245
|
+
validated_match = override.field_map_validation(obj=asset, model_type="asset")
|
|
246
|
+
asset_identifier = validated_match or asset.dnsName or asset.dns or asset.ip
|
|
247
|
+
name = asset.dnsName or asset.ip
|
|
248
|
+
|
|
249
|
+
return IntegrationAsset(
|
|
250
|
+
name=name,
|
|
251
|
+
identifier=asset_identifier,
|
|
252
|
+
ip_address=asset.ip,
|
|
253
|
+
mac_address=asset.macAddress,
|
|
254
|
+
asset_owner_id=config["userId"],
|
|
255
|
+
status="Active (On Network)" if asset.family.type else "Off-Network",
|
|
256
|
+
asset_type="Other",
|
|
257
|
+
asset_category="Hardware",
|
|
258
|
+
)
|
|
259
|
+
|
|
260
|
+
def is_empty(self, file_path: Path) -> bool:
|
|
261
|
+
"""
|
|
262
|
+
Check if the file is empty.
|
|
263
|
+
|
|
264
|
+
:param Path file_path: The path to the file
|
|
265
|
+
:return: True if the file is empty, False otherwise
|
|
266
|
+
:rtype: bool
|
|
267
|
+
"""
|
|
268
|
+
try:
|
|
269
|
+
return file_path.stat().st_size == 0
|
|
270
|
+
except FileNotFoundError:
|
|
271
|
+
return True
|
|
272
|
+
|
|
273
|
+
def fetch_vulns_by_query_id(self, query_id: int) -> None:
|
|
274
|
+
"""
|
|
275
|
+
Fetch vulnerabilities from Tenable SC by query ID and sync to RegScale
|
|
276
|
+
|
|
277
|
+
:param int query_id: Tenable SC query ID to retrieve via API
|
|
278
|
+
"""
|
|
279
|
+
# Ensure authentication
|
|
280
|
+
self._ensure_authenticated()
|
|
281
|
+
|
|
282
|
+
# Log query information
|
|
283
|
+
logger.info(f"Fetching vulnerabilities from Tenable SC using query ID: {query_id}")
|
|
284
|
+
logger.info(f"Using batch size of {self.batch_size} for processing")
|
|
285
|
+
|
|
286
|
+
# Get vulnerability iterator from Tenable SC
|
|
287
|
+
vulns = self.client.analysis.vulns(query_id=query_id)
|
|
288
|
+
|
|
289
|
+
# Process in temporary directory to avoid disk space issues
|
|
290
|
+
with tempfile.TemporaryDirectory() as temp_dir:
|
|
291
|
+
logger.info(f"Saving Tenable SC data to disk: {temp_dir}")
|
|
292
|
+
|
|
293
|
+
# Process data and get counts
|
|
294
|
+
assets_count, findings_count = self._process_and_save_data(vulns, temp_dir)
|
|
295
|
+
|
|
296
|
+
# Sync to RegScale
|
|
297
|
+
self._sync_processed_data_to_regscale(temp_dir, assets_count, findings_count)
|
|
298
|
+
|
|
299
|
+
def _ensure_authenticated(self) -> None:
|
|
300
|
+
"""
|
|
301
|
+
Ensure client is authenticated to Tenable SC.
|
|
302
|
+
|
|
303
|
+
:raises ValueError: If authentication fails
|
|
304
|
+
"""
|
|
305
|
+
if not self.client:
|
|
306
|
+
self.authenticate()
|
|
307
|
+
|
|
308
|
+
if not self.client:
|
|
309
|
+
raise ValueError("Failed to authenticate to Tenable SC")
|
|
310
|
+
|
|
311
|
+
def _process_and_save_data(self, vulns: AnalysisResultsIterator, temp_dir: str) -> Tuple[int, int]:
|
|
312
|
+
"""
|
|
313
|
+
Process vulnerability data and save to temporary directory.
|
|
314
|
+
|
|
315
|
+
:param AnalysisResultsIterator vulns: Vulnerability iterator
|
|
316
|
+
:param str temp_dir: Temporary directory path
|
|
317
|
+
:return: Tuple of (assets_count, findings_count)
|
|
318
|
+
:rtype: Tuple[int, int]
|
|
319
|
+
"""
|
|
320
|
+
return self.consume_iterator_to_file(iterator=vulns, dir_path=Path(temp_dir))
|
|
321
|
+
|
|
322
|
+
def _sync_processed_data_to_regscale(self, temp_dir: str, assets_count: int, findings_count: int) -> None:
|
|
323
|
+
"""
|
|
324
|
+
Sync processed data to RegScale.
|
|
325
|
+
|
|
326
|
+
:param str temp_dir: Temporary directory path
|
|
327
|
+
:param int assets_count: Number of assets processed
|
|
328
|
+
:param int findings_count: Number of findings processed
|
|
329
|
+
:raises IndexError: If data processing fails
|
|
330
|
+
"""
|
|
331
|
+
try:
|
|
332
|
+
# Get iterables from disk
|
|
333
|
+
iterables = self.tenable_dir_to_tuple_generator(Path(temp_dir))
|
|
334
|
+
|
|
335
|
+
# Sync assets
|
|
336
|
+
self.sync_assets(
|
|
337
|
+
plan_id=self.plan_id,
|
|
338
|
+
integration_assets=(asset for sublist in iterables[0] for asset in sublist),
|
|
339
|
+
asset_count=assets_count,
|
|
340
|
+
)
|
|
341
|
+
|
|
342
|
+
# Sync findings
|
|
343
|
+
self.sync_findings(
|
|
344
|
+
plan_id=self.plan_id,
|
|
345
|
+
integration_findings=(finding for sublist in iterables[1] for finding in sublist),
|
|
346
|
+
finding_count=findings_count,
|
|
347
|
+
)
|
|
348
|
+
|
|
349
|
+
logger.info(f"Successfully synced {assets_count} assets and {findings_count} findings")
|
|
350
|
+
except IndexError as ex:
|
|
351
|
+
logger.error(f"Error processing Tenable SC data: {str(ex)}")
|
|
352
|
+
raise
|
|
353
|
+
|
|
354
|
+
def consume_iterator_to_file(self, iterator: AnalysisResultsIterator, dir_path: Path) -> Tuple[int, int]:
|
|
355
|
+
"""
|
|
356
|
+
Consume an iterator and write the results to a file
|
|
357
|
+
|
|
358
|
+
:param AnalysisResultsIterator iterator: Tenable SC iterator
|
|
359
|
+
:param Path dir_path: The directory to save the pickled files
|
|
360
|
+
:return: The total count of assets and findings processed
|
|
361
|
+
:rtype: Tuple[int, int]
|
|
362
|
+
"""
|
|
363
|
+
app = Application()
|
|
364
|
+
logger.info("Consuming Tenable SC iterator...")
|
|
365
|
+
override = IntegrationOverride(app)
|
|
366
|
+
|
|
367
|
+
# Initialize counters and thread tracking
|
|
368
|
+
counters = self._initialize_thread_safe_counters()
|
|
369
|
+
process_queue = queue.Queue()
|
|
370
|
+
futures_list = []
|
|
371
|
+
|
|
372
|
+
# Process data in batches using threads
|
|
373
|
+
with ThreadPoolExecutor(max_workers=5) as executor:
|
|
374
|
+
futures_list = self._process_iterator_in_batches(
|
|
375
|
+
iterator=iterator,
|
|
376
|
+
executor=executor,
|
|
377
|
+
process_queue=process_queue,
|
|
378
|
+
counters=counters,
|
|
379
|
+
app=app,
|
|
380
|
+
dir_path=dir_path,
|
|
381
|
+
override=override,
|
|
382
|
+
)
|
|
383
|
+
|
|
384
|
+
# Collect results from all threads
|
|
385
|
+
asset_count, finding_count = self._collect_thread_results(futures_list)
|
|
386
|
+
|
|
387
|
+
if counters["total"].value == 0:
|
|
388
|
+
logger.warning("No Tenable SC data found.")
|
|
389
|
+
|
|
390
|
+
return asset_count, finding_count
|
|
391
|
+
|
|
392
|
+
def _initialize_thread_safe_counters(self) -> dict:
|
|
393
|
+
"""
|
|
394
|
+
Initialize thread-safe counters for processing.
|
|
395
|
+
|
|
396
|
+
:return: Dictionary of counter objects
|
|
397
|
+
:rtype: dict
|
|
398
|
+
"""
|
|
399
|
+
return {"total": ThreadSafeCounter(), "page": ThreadSafeCounter(), "record": ThreadSafeCounter()}
|
|
400
|
+
|
|
401
|
+
def _process_iterator_in_batches(
|
|
402
|
+
self,
|
|
403
|
+
iterator: AnalysisResultsIterator,
|
|
404
|
+
executor: ThreadPoolExecutor,
|
|
405
|
+
process_queue: queue.Queue,
|
|
406
|
+
counters: dict,
|
|
407
|
+
app: Application,
|
|
408
|
+
dir_path: Path,
|
|
409
|
+
override: IntegrationOverride,
|
|
410
|
+
) -> List:
|
|
411
|
+
"""
|
|
412
|
+
Process an iterator in batches, submitting each batch to the executor.
|
|
413
|
+
|
|
414
|
+
:param AnalysisResultsIterator iterator: The data iterator
|
|
415
|
+
:param ThreadPoolExecutor executor: Thread pool executor
|
|
416
|
+
:param queue.Queue process_queue: Queue for items to process
|
|
417
|
+
:param dict counters: Thread-safe counters
|
|
418
|
+
:param Application app: Application instance
|
|
419
|
+
:param Path dir_path: Directory to save files
|
|
420
|
+
:param IntegrationOverride override: Integration override
|
|
421
|
+
:return: List of futures
|
|
422
|
+
:rtype: List
|
|
423
|
+
"""
|
|
424
|
+
futures = []
|
|
425
|
+
|
|
426
|
+
for item in iterator:
|
|
427
|
+
# Add item to queue and update counters
|
|
428
|
+
counters["total"].increment()
|
|
429
|
+
process_queue.put(item)
|
|
430
|
+
counters["record"].increment()
|
|
431
|
+
|
|
432
|
+
# When we've accumulated a full page of data, process it
|
|
433
|
+
if counters["record"].value == len(iterator.page):
|
|
434
|
+
counters["page"].increment()
|
|
435
|
+
|
|
436
|
+
# Extract items from queue
|
|
437
|
+
items = self._extract_items_from_queue(process_queue, len(iterator.page))
|
|
438
|
+
|
|
439
|
+
# Submit batch for processing
|
|
440
|
+
futures.append(
|
|
441
|
+
executor.submit(
|
|
442
|
+
self.process_sc_chunk,
|
|
443
|
+
app=app,
|
|
444
|
+
vulns=items,
|
|
445
|
+
page=counters["page"].value,
|
|
446
|
+
dir_path=dir_path,
|
|
447
|
+
override=override,
|
|
448
|
+
)
|
|
449
|
+
)
|
|
450
|
+
|
|
451
|
+
# Reset record counter for next batch
|
|
452
|
+
counters["record"].set(0)
|
|
453
|
+
|
|
454
|
+
return futures
|
|
455
|
+
|
|
456
|
+
def _extract_items_from_queue(self, queue_obj: queue.Queue, max_items: int) -> List[Any]:
|
|
457
|
+
"""
|
|
458
|
+
Extract up to max_items from a queue.
|
|
459
|
+
|
|
460
|
+
:param queue.Queue queue_obj: Queue to extract from
|
|
461
|
+
:param int max_items: Maximum number of items to extract
|
|
462
|
+
:return: List of extracted items
|
|
463
|
+
:rtype: List[Any]
|
|
464
|
+
"""
|
|
465
|
+
items = []
|
|
466
|
+
|
|
467
|
+
for _ in range(max_items):
|
|
468
|
+
if not queue_obj.empty():
|
|
469
|
+
items.append(queue_obj.get())
|
|
470
|
+
else:
|
|
471
|
+
break
|
|
472
|
+
|
|
473
|
+
return items
|
|
474
|
+
|
|
475
|
+
def _collect_thread_results(self, futures: List) -> Tuple[int, int]:
|
|
476
|
+
"""
|
|
477
|
+
Collect results from completed futures.
|
|
478
|
+
|
|
479
|
+
:param List futures: List of futures to collect from
|
|
480
|
+
:return: Counts of assets and findings
|
|
481
|
+
:rtype: Tuple[int, int]
|
|
482
|
+
"""
|
|
483
|
+
# Wait for all threads to complete
|
|
484
|
+
wait(futures)
|
|
485
|
+
|
|
486
|
+
# Collect results
|
|
487
|
+
asset_count = 0
|
|
488
|
+
finding_count = 0
|
|
489
|
+
|
|
490
|
+
for future in futures:
|
|
491
|
+
findings, assets = future.result()
|
|
492
|
+
finding_count += findings
|
|
493
|
+
asset_count += assets
|
|
494
|
+
|
|
495
|
+
return asset_count, finding_count
|
|
496
|
+
|
|
497
|
+
def process_sc_chunk(self, **kwargs) -> Tuple[int, int]:
|
|
498
|
+
"""
|
|
499
|
+
Process Tenable SC chunk
|
|
500
|
+
|
|
501
|
+
:param kwargs: Keyword arguments
|
|
502
|
+
:return: Tuple of findings and assets
|
|
503
|
+
:rtype: Tuple[int, int]
|
|
504
|
+
"""
|
|
505
|
+
integration_mapping = kwargs.get("override")
|
|
506
|
+
vulns = kwargs.get("vulns")
|
|
507
|
+
dir_path = kwargs.get("dir_path")
|
|
508
|
+
page_num = kwargs.get("page")
|
|
509
|
+
|
|
510
|
+
# If no vulnerabilities, return early
|
|
511
|
+
if not vulns:
|
|
512
|
+
return (0, 0)
|
|
513
|
+
|
|
514
|
+
# Set up file handler
|
|
515
|
+
generated_file_name = f"tenable_scan_page_{page_num}.pkl"
|
|
516
|
+
pickled_file_handler = PickleFileHandler(str(dir_path / generated_file_name))
|
|
517
|
+
|
|
518
|
+
# Process vulnerabilities into findings and assets
|
|
519
|
+
findings, assets = self._process_vulnerability_data(vulns, integration_mapping, kwargs)
|
|
520
|
+
|
|
521
|
+
# Write results to file
|
|
522
|
+
pickled_file_handler.write({"assets": assets, "findings": findings})
|
|
523
|
+
|
|
524
|
+
# Log progress
|
|
525
|
+
thread = current_thread()
|
|
526
|
+
logger.info(
|
|
527
|
+
"Submitting %i findings and %i assets to the CLI Job Queue from Tenable SC Page %i...",
|
|
528
|
+
len(findings),
|
|
529
|
+
len(assets),
|
|
530
|
+
page_num,
|
|
531
|
+
)
|
|
532
|
+
logger.debug(f"Completed thread: name={thread.name}, ident={get_ident()}, id={get_native_id()}")
|
|
533
|
+
|
|
534
|
+
return (len(findings), len(assets))
|
|
535
|
+
|
|
536
|
+
def _process_vulnerability_data(
|
|
537
|
+
self, vulns: List[dict], integration_mapping: IntegrationOverride, kwargs: dict
|
|
538
|
+
) -> Tuple[List, List]:
|
|
539
|
+
"""
|
|
540
|
+
Process vulnerability data into findings and assets.
|
|
541
|
+
|
|
542
|
+
:param List[dict] vulns: List of vulnerability dictionaries
|
|
543
|
+
:param IntegrationOverride integration_mapping: Integration mapping
|
|
544
|
+
:param dict kwargs: Additional parameters
|
|
545
|
+
:return: Tuple of (findings, assets)
|
|
546
|
+
:rtype: Tuple[List, List]
|
|
547
|
+
"""
|
|
548
|
+
# Convert to TenableAsset objects
|
|
549
|
+
tenable_vulns = [TenableAsset(**vuln) for vuln in vulns]
|
|
550
|
+
|
|
551
|
+
# Ensure DNS name is set
|
|
552
|
+
for vuln in tenable_vulns:
|
|
553
|
+
if not vuln.dnsName:
|
|
554
|
+
vuln.dnsName = vuln.ip
|
|
555
|
+
|
|
556
|
+
# Generate findings and assets
|
|
557
|
+
findings = []
|
|
558
|
+
assets = set() # Use a set to track unique asset names
|
|
559
|
+
asset_objects = []
|
|
560
|
+
|
|
561
|
+
for vuln in tenable_vulns:
|
|
562
|
+
# Add findings for this vulnerability
|
|
563
|
+
findings.extend(self.parse_findings(vuln=vuln, integration_mapping=integration_mapping))
|
|
564
|
+
|
|
565
|
+
# Add asset if not already processed
|
|
566
|
+
if vuln.dnsName not in assets:
|
|
567
|
+
assets.add(vuln.dnsName)
|
|
568
|
+
asset_objects.append(self.to_integration_asset(vuln, **kwargs))
|
|
569
|
+
|
|
570
|
+
return findings, asset_objects
|
|
571
|
+
|
|
572
|
+
def tenable_dir_to_tuple_generator(self, dir_path: Path) -> Tuple[Any, Any]:
|
|
573
|
+
"""
|
|
574
|
+
Generate a tuple of chained generators for Tenable directories.
|
|
575
|
+
|
|
576
|
+
:param Path dir_path: Directory path containing pickled files
|
|
577
|
+
:return: Tuple of asset and finding generators
|
|
578
|
+
:rtype: Tuple[Any, Any]
|
|
579
|
+
"""
|
|
580
|
+
from itertools import chain
|
|
581
|
+
|
|
582
|
+
assets_gen = chain.from_iterable(
|
|
583
|
+
(dat["assets"] for dat in PickleFileHandler(file).read()) for file in dir_path.iterdir()
|
|
584
|
+
)
|
|
585
|
+
findings_gen = chain.from_iterable(
|
|
586
|
+
(dat["findings"] for dat in PickleFileHandler(file).read()) for file in dir_path.iterdir()
|
|
587
|
+
)
|
|
588
|
+
|
|
589
|
+
return assets_gen, findings_gen
|
|
590
|
+
|
|
591
|
+
def fetch_vulns_query(self, query_id: int) -> None:
|
|
592
|
+
"""
|
|
593
|
+
Class method to fetch and sync vulnerabilities from Tenable SC by query ID
|
|
594
|
+
|
|
595
|
+
:param int query_id: Tenable SC query ID
|
|
596
|
+
:raises ValueError: If authentication to Tenable SC fails
|
|
597
|
+
:raises Exception: If an error occurs during the fetch process
|
|
598
|
+
"""
|
|
599
|
+
logger.info(f"Fetching vulnerabilities from Tenable SC for query ID {query_id} and plan ID {self.plan_id}")
|
|
600
|
+
self.fetch_vulns_by_query_id(query_id=query_id)
|
|
@@ -6,9 +6,9 @@ import datetime
|
|
|
6
6
|
import json
|
|
7
7
|
import linecache
|
|
8
8
|
import logging
|
|
9
|
-
from pathlib import Path
|
|
10
9
|
from typing import Any, Dict, Iterator, List, Optional, Tuple
|
|
11
10
|
|
|
11
|
+
from pathlib import Path
|
|
12
12
|
from tenable.errors import TioExportsError
|
|
13
13
|
|
|
14
14
|
from regscale.core.app.utils.app_utils import get_current_datetime
|
|
@@ -29,6 +29,8 @@ from regscale.models import regscale_models
|
|
|
29
29
|
|
|
30
30
|
logger = logging.getLogger("regscale")
|
|
31
31
|
|
|
32
|
+
ARTIFACTS_PATH = "./artifacts"
|
|
33
|
+
|
|
32
34
|
|
|
33
35
|
class TenableIntegration(ScannerIntegration):
|
|
34
36
|
"""Integration class for Tenable vulnerability scanning."""
|
|
@@ -66,9 +68,9 @@ class TenableIntegration(ScannerIntegration):
|
|
|
66
68
|
"""
|
|
67
69
|
tags: List[Tuple[str, str]] = kwargs.get("tags", [])
|
|
68
70
|
# Create artifacts directory if not exist
|
|
69
|
-
Path.mkdir(Path(
|
|
71
|
+
Path.mkdir(Path(ARTIFACTS_PATH), exist_ok=True, parents=True)
|
|
70
72
|
current_datetime = datetime.datetime.now().strftime("%Y%m%d%H")
|
|
71
|
-
cache_file = Path(
|
|
73
|
+
cache_file = Path(ARTIFACTS_PATH) / Path(f"tenable_assets_{self.plan_id}_{current_datetime}.json")
|
|
72
74
|
|
|
73
75
|
if (
|
|
74
76
|
cache_file.exists()
|
|
@@ -355,9 +357,9 @@ class TenableIntegration(ScannerIntegration):
|
|
|
355
357
|
"""
|
|
356
358
|
plan_id: int = int(kwargs.get("plan_id", self.plan_id))
|
|
357
359
|
tags: List[Tuple[str, str]] = kwargs.get("tags", [])
|
|
358
|
-
Path.mkdir(Path(
|
|
360
|
+
Path.mkdir(Path(ARTIFACTS_PATH), exist_ok=True, parents=True)
|
|
359
361
|
current_datetime = datetime.datetime.now().strftime("%Y%m%d%H")
|
|
360
|
-
cache_file = Path(
|
|
362
|
+
cache_file = Path(ARTIFACTS_PATH) / Path(f"tenable_findings_{self.plan_id}_{current_datetime}.json")
|
|
361
363
|
|
|
362
364
|
if (
|
|
363
365
|
cache_file.exists()
|
|
@@ -62,10 +62,27 @@ def get_filtered_severities() -> List[regscale_models.IssueSeverity]:
|
|
|
62
62
|
regscale_models.IssueSeverity.High,
|
|
63
63
|
regscale_models.IssueSeverity.Critical,
|
|
64
64
|
],
|
|
65
|
-
"moderate": [
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
65
|
+
"moderate": [
|
|
66
|
+
regscale_models.IssueSeverity.NotAssigned,
|
|
67
|
+
regscale_models.IssueSeverity.Moderate,
|
|
68
|
+
regscale_models.IssueSeverity.High,
|
|
69
|
+
regscale_models.IssueSeverity.Critical,
|
|
70
|
+
],
|
|
71
|
+
"medium": [
|
|
72
|
+
regscale_models.IssueSeverity.NotAssigned,
|
|
73
|
+
regscale_models.IssueSeverity.Moderate,
|
|
74
|
+
regscale_models.IssueSeverity.High,
|
|
75
|
+
regscale_models.IssueSeverity.Critical,
|
|
76
|
+
],
|
|
77
|
+
"high": [
|
|
78
|
+
regscale_models.IssueSeverity.NotAssigned,
|
|
79
|
+
regscale_models.IssueSeverity.High,
|
|
80
|
+
regscale_models.IssueSeverity.Critical,
|
|
81
|
+
],
|
|
82
|
+
"critical": [
|
|
83
|
+
regscale_models.IssueSeverity.NotAssigned,
|
|
84
|
+
regscale_models.IssueSeverity.Critical,
|
|
85
|
+
],
|
|
69
86
|
}
|
|
70
87
|
return severity_map.get(
|
|
71
88
|
severity_filter,
|
|
@@ -11,7 +11,11 @@ class TenableVariables(metaclass=RsVariablesMeta):
|
|
|
11
11
|
"""
|
|
12
12
|
|
|
13
13
|
# Define class-level attributes with type annotations and examples
|
|
14
|
+
# Tenable.io variables
|
|
14
15
|
tenableAccessKey: RsVariableType(str, "xxxxxxxxxxxxxxxxxxxxxx", sensitive=True) # type: ignore # noqa: F722,F821
|
|
15
16
|
tenableSecretKey: RsVariableType(str, "xxxxxxxxxxxxxxxxxxxxxx", sensitive=True) # type: ignore # noqa: F722,F821
|
|
16
17
|
tenableUrl: RsVariableType(str, "https://cloud.tenable.com") # type: ignore # noqa: F722,F821
|
|
17
18
|
tenableMinimumSeverityFilter: RsVariableType(str, "critical") # type: ignore # noqa: F722,F821
|
|
19
|
+
|
|
20
|
+
# Tenable.sc variables
|
|
21
|
+
tenableBatchSize: RsVariableType(int, "1000", default=1000, required=False) # type: ignore # noqa: F722,F821
|