regscale-cli 6.16.3.0__py3-none-any.whl → 6.16.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of regscale-cli might be problematic. Click here for more details.
- regscale/__init__.py +1 -1
- regscale/core/app/internal/control_editor.py +26 -2
- regscale/core/app/internal/model_editor.py +39 -26
- regscale/integrations/commercial/grype/scanner.py +37 -29
- regscale/integrations/commercial/opentext/commands.py +2 -0
- regscale/integrations/commercial/opentext/scanner.py +45 -31
- regscale/integrations/commercial/qualys.py +3 -1
- regscale/integrations/commercial/sicura/commands.py +9 -14
- regscale/integrations/commercial/tenablev2/click.py +25 -13
- regscale/integrations/commercial/tenablev2/scanner.py +12 -3
- regscale/integrations/commercial/trivy/scanner.py +14 -6
- regscale/integrations/commercial/wizv2/click.py +15 -37
- regscale/integrations/jsonl_scanner_integration.py +120 -16
- regscale/integrations/public/fedramp/click.py +8 -8
- regscale/integrations/public/fedramp/fedramp_cis_crm.py +499 -106
- regscale/integrations/public/fedramp/ssp_logger.py +2 -9
- regscale/integrations/scanner_integration.py +14 -9
- regscale/models/integration_models/cisa_kev_data.json +39 -8
- regscale/models/integration_models/synqly_models/capabilities.json +1 -1
- regscale/models/integration_models/tenable_models/integration.py +23 -3
- regscale/models/regscale_models/control_implementation.py +18 -0
- regscale/models/regscale_models/control_objective.py +2 -1
- regscale/models/regscale_models/facility.py +10 -26
- regscale/models/regscale_models/functional_roles.py +38 -0
- regscale/models/regscale_models/issue.py +3 -1
- regscale/models/regscale_models/parameter.py +21 -3
- regscale/models/regscale_models/profile.py +22 -0
- regscale/models/regscale_models/profile_mapping.py +48 -3
- regscale/models/regscale_models/regscale_model.py +2 -0
- regscale/models/regscale_models/risk.py +38 -30
- regscale/models/regscale_models/security_plan.py +1 -0
- regscale/models/regscale_models/supply_chain.py +1 -1
- regscale/models/regscale_models/user.py +16 -2
- regscale/utils/threading/__init__.py +1 -0
- regscale/utils/threading/threadsafe_list.py +10 -0
- regscale/utils/threading/threadsafe_set.py +116 -0
- {regscale_cli-6.16.3.0.dist-info → regscale_cli-6.16.4.0.dist-info}/METADATA +1 -1
- {regscale_cli-6.16.3.0.dist-info → regscale_cli-6.16.4.0.dist-info}/RECORD +42 -40
- {regscale_cli-6.16.3.0.dist-info → regscale_cli-6.16.4.0.dist-info}/LICENSE +0 -0
- {regscale_cli-6.16.3.0.dist-info → regscale_cli-6.16.4.0.dist-info}/WHEEL +0 -0
- {regscale_cli-6.16.3.0.dist-info → regscale_cli-6.16.4.0.dist-info}/entry_points.txt +0 -0
- {regscale_cli-6.16.3.0.dist-info → regscale_cli-6.16.4.0.dist-info}/top_level.txt +0 -0
regscale/__init__.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = "6.16.
|
|
1
|
+
__version__ = "6.16.4.0"
|
|
@@ -20,6 +20,7 @@ if TYPE_CHECKING:
|
|
|
20
20
|
from openpyxl import Workbook, load_workbook
|
|
21
21
|
from openpyxl.styles import Alignment, PatternFill, Protection
|
|
22
22
|
from openpyxl.worksheet.datavalidation import DataValidation
|
|
23
|
+
import warnings
|
|
23
24
|
|
|
24
25
|
|
|
25
26
|
from regscale.core.app.logz import create_logger
|
|
@@ -48,6 +49,12 @@ def control_editor():
|
|
|
48
49
|
"""
|
|
49
50
|
Performs actions on Control Editor Feature to edit controls to RegScale.
|
|
50
51
|
"""
|
|
52
|
+
warnings.filterwarnings("always", category=DeprecationWarning)
|
|
53
|
+
warnings.warn(
|
|
54
|
+
"Control Editor is deprecated and will be removed in a future release. Use `regscale model` with the `--model control` argument instead.",
|
|
55
|
+
DeprecationWarning,
|
|
56
|
+
stacklevel=2,
|
|
57
|
+
)
|
|
51
58
|
|
|
52
59
|
|
|
53
60
|
# Get data and pull into Excel worksheets.
|
|
@@ -68,7 +75,13 @@ def generate_data_download(regscale_id: int, regscale_module: str, path: Path):
|
|
|
68
75
|
This function will build and populate a spreadsheet of all control implementations
|
|
69
76
|
with the selected RegScale Parent Id and RegScale Module.
|
|
70
77
|
"""
|
|
71
|
-
|
|
78
|
+
warnings.filterwarnings("always", category=DeprecationWarning)
|
|
79
|
+
warnings.warn(
|
|
80
|
+
"Control Editor is deprecated and will be removed in a future release. Use `regscale model generate --model control` instead.",
|
|
81
|
+
DeprecationWarning,
|
|
82
|
+
stacklevel=2,
|
|
83
|
+
)
|
|
84
|
+
data_load(regscale_id, regscale_module, path)
|
|
72
85
|
|
|
73
86
|
|
|
74
87
|
def data_load(parent_id: int, parent_module: str, path: Path) -> None:
|
|
@@ -359,8 +372,13 @@ def _fetch_implementations(api: "Api", parent_id: int, parent_module: str) -> "p
|
|
|
359
372
|
def generate_db_update(path: Path, skip_prompt: bool):
|
|
360
373
|
"""
|
|
361
374
|
This function will check changes made to spreadsheet and upload any changes made to RegScale.
|
|
362
|
-
|
|
363
375
|
"""
|
|
376
|
+
warnings.filterwarnings("always", category=DeprecationWarning)
|
|
377
|
+
warnings.warn(
|
|
378
|
+
"Control Editor is deprecated and will be removed in a future release. Use `regscale model load --model control` instead.",
|
|
379
|
+
DeprecationWarning,
|
|
380
|
+
stacklevel=2,
|
|
381
|
+
)
|
|
364
382
|
db_update(path, skip_prompt)
|
|
365
383
|
|
|
366
384
|
|
|
@@ -514,6 +532,12 @@ def build_implementation(i: dict, regscale_parent_id: int, regscale_parent_modul
|
|
|
514
532
|
)
|
|
515
533
|
def generate_delete_file(path: Path):
|
|
516
534
|
"""This command will delete files used during the Control editing process."""
|
|
535
|
+
warnings.filterwarnings("always", category=DeprecationWarning)
|
|
536
|
+
warnings.warn(
|
|
537
|
+
"Control Editor is deprecated and will be removed in a future release. Use `regscale model` with the `--model control` argument instead.",
|
|
538
|
+
DeprecationWarning,
|
|
539
|
+
stacklevel=2,
|
|
540
|
+
)
|
|
517
541
|
delete_file(path)
|
|
518
542
|
|
|
519
543
|
|
|
@@ -328,7 +328,6 @@ def upload_data(path: Path, obj_type: str) -> None:
|
|
|
328
328
|
:return: None
|
|
329
329
|
:rtype: None
|
|
330
330
|
"""
|
|
331
|
-
import numpy as np # Optimize import performance
|
|
332
331
|
import pandas as pd
|
|
333
332
|
|
|
334
333
|
app = Application()
|
|
@@ -356,30 +355,25 @@ def upload_data(path: Path, obj_type: str) -> None:
|
|
|
356
355
|
if df1.equals(df2):
|
|
357
356
|
error_and_exit("No differences detected.")
|
|
358
357
|
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
app.logger.info(
|
|
379
|
-
"Please check differences.txt file located in %s to see changes made.",
|
|
380
|
-
path,
|
|
381
|
-
)
|
|
382
|
-
upload_existing_data(app, api, path, obj_type, all_workbook_filename)
|
|
358
|
+
app.logger.warning("Differences found!")
|
|
359
|
+
# Need to strip out any net new rows before doing this comparison
|
|
360
|
+
df3 = strip_any_net_new_rows(app, df2, all_workbook_filename, obj_type, path, new_workbook_filename)
|
|
361
|
+
try:
|
|
362
|
+
changes = compare_dataframes(df1, df3)
|
|
363
|
+
except ValueError:
|
|
364
|
+
changes = compare_dataframes(df1, df2)
|
|
365
|
+
changes.to_csv(
|
|
366
|
+
os.path.join(path, DIFFERENCES_FILE),
|
|
367
|
+
header=True,
|
|
368
|
+
index=True,
|
|
369
|
+
sep=" ",
|
|
370
|
+
mode="w+",
|
|
371
|
+
)
|
|
372
|
+
app.logger.info(
|
|
373
|
+
"Please check differences.txt file located in %s to see changes made.",
|
|
374
|
+
path,
|
|
375
|
+
)
|
|
376
|
+
upload_existing_data(app, api, path, obj_type, all_workbook_filename)
|
|
383
377
|
else:
|
|
384
378
|
app.logger.info("No files found for the specified type to load to RegScale.")
|
|
385
379
|
return app.logger.info(
|
|
@@ -388,7 +382,26 @@ def upload_data(path: Path, obj_type: str) -> None:
|
|
|
388
382
|
)
|
|
389
383
|
|
|
390
384
|
|
|
391
|
-
|
|
385
|
+
def compare_dataframes(df1: "pd.DataFrame", df2: "pd.DataFrame") -> "pd.DataFrame":
|
|
386
|
+
"""
|
|
387
|
+
Compare two DataFrames and return a DataFrame with the differences.
|
|
388
|
+
|
|
389
|
+
:param pd.DataFrame df1: The first DataFrame to compare
|
|
390
|
+
:param pd.DataFrame df2: The second DataFrame to compare
|
|
391
|
+
:return: A DataFrame with the differences between the two DataFrames
|
|
392
|
+
:rtype: pd.DataFrame
|
|
393
|
+
"""
|
|
394
|
+
import numpy as np
|
|
395
|
+
import pandas as pd
|
|
396
|
+
|
|
397
|
+
diff_mask = (df1 != df2) & ~(df1.isnull() & df2.isnull())
|
|
398
|
+
ne_stacked = diff_mask.stack()
|
|
399
|
+
changed = ne_stacked[ne_stacked]
|
|
400
|
+
changed.index.names = ["Id", "Column"]
|
|
401
|
+
difference_locations = np.nonzero(diff_mask)
|
|
402
|
+
changed_from = df1.values[difference_locations]
|
|
403
|
+
changed_to = df2.values[difference_locations]
|
|
404
|
+
return pd.DataFrame({"From": changed_from, "To": changed_to}, index=changed.index)
|
|
392
405
|
|
|
393
406
|
|
|
394
407
|
@model.command(name="delete_files")
|
|
@@ -17,7 +17,7 @@ from pathlib import Path
|
|
|
17
17
|
|
|
18
18
|
from regscale.core.app.utils.parser_utils import safe_datetime_str
|
|
19
19
|
from regscale.integrations.jsonl_scanner_integration import JSONLScannerIntegration
|
|
20
|
-
from regscale.integrations.scanner_integration import IntegrationAsset, IntegrationFinding
|
|
20
|
+
from regscale.integrations.scanner_integration import IntegrationAsset, IntegrationFinding, issue_due_date
|
|
21
21
|
from regscale.models import IssueSeverity, AssetStatus, IssueStatus
|
|
22
22
|
|
|
23
23
|
logger = logging.getLogger("regscale")
|
|
@@ -143,53 +143,61 @@ class GrypeIntegration(JSONLScannerIntegration):
|
|
|
143
143
|
finding_info = self._extract_finding_info(item, data)
|
|
144
144
|
artifact_info = self._extract_artifact_info(item)
|
|
145
145
|
severity_info = self._determine_severity(finding_info)
|
|
146
|
-
cvss_fields = self._get_cvss_fields(finding_info
|
|
147
|
-
|
|
146
|
+
cvss_fields = self._get_cvss_fields(finding_info.get("cvss"))
|
|
147
|
+
file_scan_date = safe_datetime_str(finding_info.get("descriptor", {}).get("timestamp", ""))
|
|
148
|
+
if not self.scan_date:
|
|
149
|
+
self.scan_date = file_scan_date
|
|
148
150
|
evidence = self._build_evidence(artifact_info)
|
|
149
151
|
observations = self._build_observations(finding_info)
|
|
150
|
-
remediation_info = self._build_remediation_info(finding_info
|
|
152
|
+
remediation_info = self._build_remediation_info(finding_info.get("fix"))
|
|
153
|
+
|
|
154
|
+
severity = severity_info["severity"]
|
|
151
155
|
|
|
152
156
|
return IntegrationFinding(
|
|
153
157
|
title=(
|
|
154
|
-
f"{severity_info
|
|
155
|
-
if severity_info
|
|
156
|
-
else artifact_info
|
|
158
|
+
f"{severity_info.get('cve_id')}: {artifact_info.get('name', 'unknown')}"
|
|
159
|
+
if severity_info.get("cve_id")
|
|
160
|
+
else artifact_info.get("name", "unknown")
|
|
157
161
|
),
|
|
158
162
|
description=severity_info["description"],
|
|
159
|
-
severity=
|
|
163
|
+
severity=severity,
|
|
160
164
|
status=IssueStatus.Open,
|
|
161
165
|
cvss_v3_score=cvss_fields.get("V3Score"),
|
|
162
166
|
cvss_v3_vector=cvss_fields.get("V3Vector") or "",
|
|
163
167
|
cvss_v2_score=cvss_fields.get("V2Score"),
|
|
164
168
|
cvss_v2_vector=cvss_fields.get("V2Vector") or "",
|
|
165
|
-
plugin_name=artifact_info
|
|
169
|
+
plugin_name=artifact_info.get("name"),
|
|
166
170
|
plugin_id=self.title,
|
|
167
171
|
asset_identifier=asset_identifier,
|
|
168
172
|
category="Vulnerability",
|
|
169
|
-
cve=severity_info
|
|
173
|
+
cve=severity_info.get("cve_id"),
|
|
170
174
|
control_labels=["CM-7", "SI-2"],
|
|
171
175
|
evidence=evidence,
|
|
172
176
|
observations=observations,
|
|
173
|
-
identified_risk=f"Vulnerable {artifact_info
|
|
177
|
+
identified_risk=f"Vulnerable {artifact_info.get('type', 'package')} detected: {artifact_info.get('name', 'unknown')} {artifact_info.get('version', 'unknown')}",
|
|
174
178
|
recommendation_for_mitigation=remediation_info["remediation"],
|
|
175
|
-
scan_date=scan_date,
|
|
176
|
-
first_seen=
|
|
177
|
-
last_seen=scan_date,
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
179
|
+
scan_date=self.scan_date,
|
|
180
|
+
first_seen=file_scan_date,
|
|
181
|
+
last_seen=self.scan_date,
|
|
182
|
+
date_created=self.scan_date,
|
|
183
|
+
vulnerability_type=finding_info.get("type"),
|
|
184
|
+
rule_id=finding_info.get("id"),
|
|
185
|
+
source_rule_id=finding_info.get("id"),
|
|
186
|
+
remediation=remediation_info.get("remediation"),
|
|
187
|
+
vulnerable_asset=f"{artifact_info.get('name', 'unknown')}:{artifact_info.get('version', 'unknown')}",
|
|
188
|
+
security_check=finding_info.get("matcher"),
|
|
189
|
+
external_id=finding_info.get("data_source"),
|
|
190
|
+
installed_versions=artifact_info.get("version"),
|
|
191
|
+
affected_os=finding_info.get("affected_os"),
|
|
192
|
+
affected_packages=artifact_info.get("name"),
|
|
193
|
+
image_digest=finding_info.get("manifest_digest"),
|
|
194
|
+
package_path=artifact_info.get("purl"),
|
|
195
|
+
build_version=finding_info.get("build_version"),
|
|
196
|
+
fixed_versions=remediation_info.get("fixed_versions"),
|
|
197
|
+
fix_status=remediation_info.get("fix_status"),
|
|
198
|
+
due_date=issue_due_date(
|
|
199
|
+
severity=severity, created_date=file_scan_date, title="grype", config=self.app.config
|
|
200
|
+
),
|
|
193
201
|
)
|
|
194
202
|
|
|
195
203
|
def _extract_finding_info(self, item: Dict[str, Any], data: Dict[str, Any]) -> Dict[str, Any]:
|
|
@@ -61,6 +61,8 @@ def import_scans(
|
|
|
61
61
|
Import and process a folder of Fortify WebInspect XML file(s).
|
|
62
62
|
"""
|
|
63
63
|
# Use the new WebInspectIntegration class to sync assets and findings
|
|
64
|
+
if s3_bucket and not folder_path:
|
|
65
|
+
folder_path = s3_bucket
|
|
64
66
|
wi = WebInspectIntegration(
|
|
65
67
|
plan_id=regscale_ssp_id,
|
|
66
68
|
file_path=str(folder_path) if folder_path else None,
|
|
@@ -14,9 +14,10 @@ from typing import Any, Dict, List, Optional, Union, Tuple, cast, Iterator, Set
|
|
|
14
14
|
|
|
15
15
|
from pathlib import Path
|
|
16
16
|
|
|
17
|
+
from regscale.core.app.utils.app_utils import check_license
|
|
17
18
|
from regscale.core.app.utils.file_utils import find_files, read_file
|
|
18
19
|
from regscale.integrations.jsonl_scanner_integration import JSONLScannerIntegration
|
|
19
|
-
from regscale.integrations.scanner_integration import IntegrationAsset, IntegrationFinding
|
|
20
|
+
from regscale.integrations.scanner_integration import IntegrationAsset, IntegrationFinding, issue_due_date
|
|
20
21
|
from regscale.models import IssueSeverity, AssetStatus, IssueStatus, ImportValidater
|
|
21
22
|
|
|
22
23
|
logger = logging.getLogger("regscale")
|
|
@@ -37,15 +38,19 @@ class WebInspectIntegration(JSONLScannerIntegration):
|
|
|
37
38
|
# Constants for file paths
|
|
38
39
|
ASSETS_FILE = "./artifacts/webinspect_assets.jsonl"
|
|
39
40
|
FINDINGS_FILE = "./artifacts/webinspect_findings.jsonl"
|
|
41
|
+
file_date: Optional[str] = None
|
|
40
42
|
|
|
41
43
|
def __init__(self, *args, **kwargs):
|
|
42
44
|
"""Initialize the WebInspectIntegration."""
|
|
45
|
+
self.app = check_license()
|
|
43
46
|
# Override file_pattern for XML files
|
|
44
47
|
kwargs["file_pattern"] = "*.xml"
|
|
45
48
|
kwargs["read_files_only"] = True
|
|
46
49
|
self.disable_mapping = kwargs["disable_mapping"] = True
|
|
47
|
-
|
|
50
|
+
self.set_scan_date(kwargs.get("scan_date"))
|
|
51
|
+
# logger.debug(f"scan_date: {self.scan_date}"
|
|
48
52
|
super().__init__(*args, **kwargs)
|
|
53
|
+
logger.debug(f"WebInspectIntegration initialized with scan date: {self.scan_date}")
|
|
49
54
|
|
|
50
55
|
def is_valid_file(self, data: Any, file_path: Union[Path, str]) -> Tuple[bool, Optional[Dict[str, Any]]]:
|
|
51
56
|
"""
|
|
@@ -232,6 +237,22 @@ class WebInspectIntegration(JSONLScannerIntegration):
|
|
|
232
237
|
asset = self._prepare_asset(file, data)
|
|
233
238
|
self._write_asset_if_new(asset, assets_file, asset_tracker)
|
|
234
239
|
|
|
240
|
+
# Extract the date from the file name
|
|
241
|
+
file_name = os.path.basename(str(file))
|
|
242
|
+
# Extract the string after " - " and before ".xml" in the file name
|
|
243
|
+
parsed_string = file_name.split(" - ")[1].rsplit(".xml", 1)[0] if " - " in file_name else ""
|
|
244
|
+
# Convert parsed_string to a date in "%Y-%m-%d %H:%M:%S" format
|
|
245
|
+
try:
|
|
246
|
+
if len(parsed_string) == 6: # Ensure the string is in "MMDDYY" format
|
|
247
|
+
month = int(parsed_string[:2])
|
|
248
|
+
day = int(parsed_string[2:4])
|
|
249
|
+
year = int(parsed_string[4:])
|
|
250
|
+
self.file_date = f"{year + 2000:04d}-{month:02d}-{day:02d}"
|
|
251
|
+
else:
|
|
252
|
+
self.file_date = None
|
|
253
|
+
except ValueError:
|
|
254
|
+
self.file_date = None
|
|
255
|
+
|
|
235
256
|
findings_data = self._get_findings_data_from_file(data)
|
|
236
257
|
logger.info(f"Found {len(findings_data)} findings in file: {file}")
|
|
237
258
|
findings_added = self._write_findings(findings_data, asset.identifier, findings_file, finding_tracker)
|
|
@@ -300,7 +321,7 @@ class WebInspectIntegration(JSONLScannerIntegration):
|
|
|
300
321
|
"""
|
|
301
322
|
findings_added = 0
|
|
302
323
|
for finding_item in findings_data:
|
|
303
|
-
finding = self.parse_finding(asset_id,
|
|
324
|
+
finding = self.parse_finding(asset_id, findings_data, finding_item) # Pass empty dict for data if unused
|
|
304
325
|
finding_dict = dataclasses.asdict(finding)
|
|
305
326
|
if not self.disable_mapping and self.mapping:
|
|
306
327
|
mapped_finding_dict = self._apply_mapping(
|
|
@@ -384,6 +405,7 @@ class WebInspectIntegration(JSONLScannerIntegration):
|
|
|
384
405
|
|
|
385
406
|
# Get the host from the first issue
|
|
386
407
|
host = issues[0].get("Host", "Unknown Host")
|
|
408
|
+
url = issues[0].get("URL", "")
|
|
387
409
|
|
|
388
410
|
# Create and return the asset
|
|
389
411
|
return IntegrationAsset(
|
|
@@ -395,6 +417,7 @@ class WebInspectIntegration(JSONLScannerIntegration):
|
|
|
395
417
|
asset_category="Hardware",
|
|
396
418
|
parent_id=self.plan_id,
|
|
397
419
|
parent_module="securityplans",
|
|
420
|
+
fqdn=url,
|
|
398
421
|
)
|
|
399
422
|
|
|
400
423
|
def _get_findings_data_from_file(self, data: Dict[str, Any]) -> List[Dict[str, Any]]:
|
|
@@ -457,51 +480,42 @@ class WebInspectIntegration(JSONLScannerIntegration):
|
|
|
457
480
|
"""
|
|
458
481
|
severity_int = int(item.get("Severity", 3))
|
|
459
482
|
severity_value = self.finding_severity_map.get(severity_int, IssueSeverity.High.value)
|
|
460
|
-
|
|
461
483
|
try:
|
|
462
484
|
severity = IssueSeverity(severity_value)
|
|
463
485
|
except ValueError:
|
|
464
486
|
severity = IssueSeverity.High
|
|
465
487
|
|
|
488
|
+
if self.scan_date is None:
|
|
489
|
+
self.scan_date = self.file_date
|
|
490
|
+
|
|
466
491
|
title = item.get("Name", "")
|
|
467
|
-
host = item.get("Host", asset_identifier)
|
|
468
492
|
plugin_id = item.get("VulnerabilityID", "")
|
|
469
|
-
external_id = str(
|
|
493
|
+
external_id = str(asset_identifier + plugin_id)
|
|
470
494
|
sections = item.get("ReportSection", [])
|
|
471
495
|
|
|
472
496
|
# Extract description and mitigation from report sections
|
|
473
497
|
description = self._parse_report_section(sections, "Summary")
|
|
474
498
|
mitigation = self._parse_report_section(sections, "Fix")
|
|
475
499
|
|
|
476
|
-
# Only create findings for certain severity levels
|
|
477
|
-
if severity in (IssueSeverity.Critical, IssueSeverity.High, IssueSeverity.Moderate, IssueSeverity.Low):
|
|
478
|
-
return IntegrationFinding(
|
|
479
|
-
external_id=external_id,
|
|
480
|
-
asset_identifier=host,
|
|
481
|
-
control_labels=[],
|
|
482
|
-
description=description,
|
|
483
|
-
status=IssueStatus.Open,
|
|
484
|
-
title=title,
|
|
485
|
-
severity=severity,
|
|
486
|
-
category=f"{self.title} Vulnerability",
|
|
487
|
-
plugin_id=plugin_id,
|
|
488
|
-
plugin_name=title,
|
|
489
|
-
rule_id=plugin_id,
|
|
490
|
-
recommendation_for_mitigation=mitigation,
|
|
491
|
-
source_report=self.title,
|
|
492
|
-
)
|
|
493
|
-
# Return a default finding for severities we skip
|
|
494
500
|
return IntegrationFinding(
|
|
495
|
-
external_id=
|
|
496
|
-
asset_identifier=
|
|
501
|
+
external_id=external_id,
|
|
502
|
+
asset_identifier=asset_identifier,
|
|
497
503
|
control_labels=[],
|
|
498
|
-
description=
|
|
499
|
-
status=IssueStatus.
|
|
500
|
-
title=
|
|
501
|
-
severity=
|
|
502
|
-
category=f"{self.title}
|
|
504
|
+
description=description,
|
|
505
|
+
status=IssueStatus.Open,
|
|
506
|
+
title=title,
|
|
507
|
+
severity=severity,
|
|
508
|
+
category=f"{self.title} Vulnerability",
|
|
509
|
+
scan_date=self.scan_date,
|
|
510
|
+
first_seen=self.scan_date,
|
|
511
|
+
last_seen=self.scan_date,
|
|
512
|
+
date_created=self.scan_date,
|
|
503
513
|
plugin_id=plugin_id,
|
|
504
514
|
plugin_name=title,
|
|
505
515
|
rule_id=plugin_id,
|
|
516
|
+
recommendation_for_mitigation=mitigation,
|
|
506
517
|
source_report=self.title,
|
|
518
|
+
due_date=issue_due_date(
|
|
519
|
+
severity=severity, created_date=self.scan_date, title="opentext", config=self.app.config
|
|
520
|
+
),
|
|
507
521
|
)
|
|
@@ -723,7 +723,7 @@ def _get_qualys_api():
|
|
|
723
723
|
|
|
724
724
|
# set the auth for the QUALYS_API session
|
|
725
725
|
QUALYS_API.auth = (config.get("qualysUserName"), config.get("qualysPassword"))
|
|
726
|
-
QUALYS_API.verify = config.get("
|
|
726
|
+
QUALYS_API.verify = config.get("sslVerify", True)
|
|
727
727
|
qualys_url = config.get("qualysUrl")
|
|
728
728
|
return qualys_url, QUALYS_API
|
|
729
729
|
|
|
@@ -749,12 +749,14 @@ def import_total_cloud_data_from_qualys_api(security_plan_id: int, include_tags:
|
|
|
749
749
|
params["tag_set_exclude"] = exclude_tags
|
|
750
750
|
if include_tags:
|
|
751
751
|
params["tag_set_include"] = include_tags
|
|
752
|
+
logger.info("Fetching Qualys Total Cloud data...")
|
|
752
753
|
response = QUALYS_API.get(
|
|
753
754
|
url=urljoin(qualys_url, "/api/2.0/fo/asset/host/vm/detection/"),
|
|
754
755
|
headers=HEADERS,
|
|
755
756
|
params=params,
|
|
756
757
|
)
|
|
757
758
|
if response and response.ok:
|
|
759
|
+
logger.info("Total cloud data fetched. processing...")
|
|
758
760
|
response_data = xmltodict.parse(response.text)
|
|
759
761
|
qt = QualysTotalCloudIntegration(plan_id=security_plan_id, xml_data=response_data)
|
|
760
762
|
qt.fetch_assets()
|
|
@@ -5,11 +5,10 @@ This module contains the Click command group for Sicura.
|
|
|
5
5
|
import logging
|
|
6
6
|
|
|
7
7
|
import click
|
|
8
|
-
|
|
8
|
+
|
|
9
9
|
from regscale.models import regscale_id
|
|
10
10
|
|
|
11
11
|
logger = logging.getLogger("regscale")
|
|
12
|
-
console = Console()
|
|
13
12
|
|
|
14
13
|
|
|
15
14
|
@click.group()
|
|
@@ -23,7 +22,7 @@ def sicura():
|
|
|
23
22
|
|
|
24
23
|
@sicura.command(name="sync_assets")
|
|
25
24
|
@regscale_id(help="RegScale will create and update assets as children of this record.")
|
|
26
|
-
def sync_assets(regscale_id):
|
|
25
|
+
def sync_assets(regscale_id: int):
|
|
27
26
|
"""
|
|
28
27
|
Sync Sicura assets to RegScale.
|
|
29
28
|
|
|
@@ -36,20 +35,18 @@ def sync_assets(regscale_id):
|
|
|
36
35
|
plan_id=regscale_id,
|
|
37
36
|
)
|
|
38
37
|
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
integration.sync_assets(plan_id=regscale_id)
|
|
38
|
+
# Using import_assets method which handles the synchronization
|
|
39
|
+
integration.sync_assets(plan_id=regscale_id)
|
|
42
40
|
|
|
43
|
-
|
|
41
|
+
logger.info("[bold green]Sicura asset synchronization complete.")
|
|
44
42
|
|
|
45
43
|
except Exception as e:
|
|
46
44
|
logger.error(f"Error syncing assets: {e}", exc_info=True)
|
|
47
|
-
console.print(f"[bold red]Error syncing assets: {e}")
|
|
48
45
|
|
|
49
46
|
|
|
50
47
|
@sicura.command(name="sync_findings")
|
|
51
48
|
@regscale_id(help="RegScale will create and update findings as children of this record.")
|
|
52
|
-
def sync_findings(regscale_id):
|
|
49
|
+
def sync_findings(regscale_id: int):
|
|
53
50
|
"""
|
|
54
51
|
Sync Sicura findings to RegScale.
|
|
55
52
|
|
|
@@ -62,12 +59,10 @@ def sync_findings(regscale_id):
|
|
|
62
59
|
plan_id=regscale_id,
|
|
63
60
|
)
|
|
64
61
|
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
integration.sync_findings(plan_id=regscale_id)
|
|
62
|
+
# Using import_findings method which handles the synchronization
|
|
63
|
+
integration.sync_findings(plan_id=regscale_id)
|
|
68
64
|
|
|
69
|
-
|
|
65
|
+
logger.info("[bold green]Finding synchronization complete.")
|
|
70
66
|
|
|
71
67
|
except Exception as e:
|
|
72
68
|
logger.error(f"Error syncing findings: {e}", exc_info=True)
|
|
73
|
-
console.print(f"[bold red]Error syncing findings: {e}")
|
|
@@ -4,7 +4,7 @@
|
|
|
4
4
|
|
|
5
5
|
import queue
|
|
6
6
|
from concurrent.futures import wait
|
|
7
|
-
from typing import TYPE_CHECKING
|
|
7
|
+
from typing import TYPE_CHECKING
|
|
8
8
|
|
|
9
9
|
from regscale.integrations.integration_override import IntegrationOverride
|
|
10
10
|
|
|
@@ -262,13 +262,17 @@ def get_queries() -> list:
|
|
|
262
262
|
required=True,
|
|
263
263
|
)
|
|
264
264
|
@regscale_ssp_id()
|
|
265
|
+
@click.option(
|
|
266
|
+
"--scan_date",
|
|
267
|
+
"-sd",
|
|
268
|
+
type=click.DateTime(formats=["%Y-%m-%d"]),
|
|
269
|
+
help="The scan date of the file.",
|
|
270
|
+
required=False,
|
|
271
|
+
)
|
|
265
272
|
# Add Prompt for RegScale SSP name
|
|
266
|
-
def query_vuln(query_id: int, regscale_ssp_id: int):
|
|
273
|
+
def query_vuln(query_id: int, regscale_ssp_id: int, scan_date: datetime = None):
|
|
267
274
|
"""Query Tenable vulnerabilities and sync assets to RegScale."""
|
|
268
|
-
q_vuln(
|
|
269
|
-
query_id=query_id,
|
|
270
|
-
ssp_id=regscale_ssp_id,
|
|
271
|
-
)
|
|
275
|
+
q_vuln(query_id=query_id, ssp_id=regscale_ssp_id, scan_date=scan_date)
|
|
272
276
|
|
|
273
277
|
|
|
274
278
|
@io.command(name="sync_assets")
|
|
@@ -300,14 +304,20 @@ def query_assets(regscale_ssp_id: int, tags: Optional[List[Tuple[str, str]]] = N
|
|
|
300
304
|
required=False,
|
|
301
305
|
callback=validate_tags,
|
|
302
306
|
)
|
|
303
|
-
|
|
304
|
-
|
|
307
|
+
@click.option(
|
|
308
|
+
"--scan_date",
|
|
309
|
+
"-sd",
|
|
310
|
+
type=click.DateTime(formats=["%Y-%m-%d"]),
|
|
311
|
+
help="The scan date of the file.",
|
|
312
|
+
required=False,
|
|
313
|
+
)
|
|
314
|
+
def query_vulns(regscale_ssp_id: int, tags: Optional[List[Tuple[str, str]]] = None, scan_date: datetime = None):
|
|
305
315
|
"""
|
|
306
316
|
Query Tenable vulnerabilities and sync assets, vulnerabilities and issues to RegScale.
|
|
307
317
|
"""
|
|
308
318
|
from regscale.integrations.commercial.tenablev2.scanner import TenableIntegration
|
|
309
319
|
|
|
310
|
-
TenableIntegration.sync_findings(plan_id=regscale_ssp_id, tags=tags)
|
|
320
|
+
TenableIntegration.sync_findings(plan_id=regscale_ssp_id, tags=tags, scan_date=scan_date)
|
|
311
321
|
|
|
312
322
|
|
|
313
323
|
def validate_regscale_security_plan(parent_id: int) -> bool:
|
|
@@ -409,18 +419,19 @@ def process_vulnerabilities(counts: collections.Counter, reg_assets: list, ssp_i
|
|
|
409
419
|
return update_assets
|
|
410
420
|
|
|
411
421
|
|
|
412
|
-
def q_vuln(query_id: int, ssp_id: int) -> list:
|
|
422
|
+
def q_vuln(query_id: int, ssp_id: int, scan_date: datetime = None) -> list:
|
|
413
423
|
"""
|
|
414
424
|
Query Tenable vulnerabilities
|
|
415
425
|
|
|
416
426
|
:param int query_id: Tenable query ID
|
|
417
427
|
:param int ssp_id: RegScale System Security Plan ID
|
|
428
|
+
:param datetime scan_date: Scan date, defaults to None
|
|
418
429
|
:return: List of queries from Tenable
|
|
419
430
|
:rtype: list
|
|
420
431
|
"""
|
|
421
432
|
check_license()
|
|
422
433
|
# At SSP level, provide a list of vulnerabilities and the counts of each
|
|
423
|
-
fetch_vulns(query_id=query_id, regscale_ssp_id=ssp_id)
|
|
434
|
+
fetch_vulns(query_id=query_id, regscale_ssp_id=ssp_id, scan_date=scan_date)
|
|
424
435
|
|
|
425
436
|
|
|
426
437
|
def process_vuln(counts: collections.Counter, reg_assets: list, ssp_id: int, vuln: TenableAsset) -> list:
|
|
@@ -708,18 +719,19 @@ def fetch_assets(ssp_id: int) -> list[TenableIOAsset]:
|
|
|
708
719
|
return assets
|
|
709
720
|
|
|
710
721
|
|
|
711
|
-
def fetch_vulns(query_id: int = 0, regscale_ssp_id: int = 0):
|
|
722
|
+
def fetch_vulns(query_id: int = 0, regscale_ssp_id: int = 0, scan_date: datetime = None):
|
|
712
723
|
"""
|
|
713
724
|
Fetch vulnerabilities from Tenable by query ID
|
|
714
725
|
|
|
715
726
|
:param int query_id: Tenable query ID, defaults to 0
|
|
716
727
|
:param int regscale_ssp_id: RegScale System Security Plan ID, defaults to 0
|
|
728
|
+
:param datetime scan_date: Scan date, defaults to None
|
|
717
729
|
"""
|
|
718
730
|
|
|
719
731
|
client = gen_client()
|
|
720
732
|
if query_id and client._env_base == "TSC":
|
|
721
733
|
vulns = client.analysis.vulns(query_id=query_id)
|
|
722
|
-
sc = SCIntegration(plan_id=regscale_ssp_id)
|
|
734
|
+
sc = SCIntegration(plan_id=regscale_ssp_id, scan_date=scan_date)
|
|
723
735
|
# Create pickle file to cache data
|
|
724
736
|
# make sure folder exists
|
|
725
737
|
with tempfile.TemporaryDirectory() as temp_dir:
|
|
@@ -6,9 +6,9 @@ import datetime
|
|
|
6
6
|
import json
|
|
7
7
|
import linecache
|
|
8
8
|
import logging
|
|
9
|
-
from pathlib import Path
|
|
10
9
|
from typing import Any, Dict, Iterator, List, Optional, Tuple
|
|
11
10
|
|
|
11
|
+
from pathlib import Path
|
|
12
12
|
from tenable.errors import TioExportsError
|
|
13
13
|
|
|
14
14
|
from regscale.core.app.utils.app_utils import get_current_datetime
|
|
@@ -18,7 +18,12 @@ from regscale.integrations.commercial.tenablev2.authenticate import gen_tio
|
|
|
18
18
|
from regscale.integrations.commercial.tenablev2.stig_parsers import parse_stig_output
|
|
19
19
|
from regscale.integrations.commercial.tenablev2.utils import get_last_pull_epoch
|
|
20
20
|
from regscale.integrations.commercial.tenablev2.variables import TenableVariables
|
|
21
|
-
from regscale.integrations.scanner_integration import
|
|
21
|
+
from regscale.integrations.scanner_integration import (
|
|
22
|
+
IntegrationAsset,
|
|
23
|
+
IntegrationFinding,
|
|
24
|
+
ScannerIntegration,
|
|
25
|
+
issue_due_date,
|
|
26
|
+
)
|
|
22
27
|
from regscale.integrations.variables import ScannerVariables
|
|
23
28
|
from regscale.models import regscale_models
|
|
24
29
|
|
|
@@ -44,9 +49,10 @@ class TenableIntegration(ScannerIntegration):
|
|
|
44
49
|
:param int plan_id: The ID of the security plan
|
|
45
50
|
:param int tenant_id: The ID of the tenant, defaults to 1
|
|
46
51
|
"""
|
|
47
|
-
super().__init__(plan_id, tenant_id)
|
|
52
|
+
super().__init__(plan_id, tenant_id, **kwargs)
|
|
48
53
|
self.client = None
|
|
49
54
|
self.tags = tags or []
|
|
55
|
+
self.scan_date = kwargs.get("scan_date", get_current_datetime())
|
|
50
56
|
|
|
51
57
|
def authenticate(self) -> None:
|
|
52
58
|
"""Authenticate to Tenable."""
|
|
@@ -482,6 +488,9 @@ class TenableIntegration(ScannerIntegration):
|
|
|
482
488
|
poam_comments=None,
|
|
483
489
|
vulnerable_asset=asset_id,
|
|
484
490
|
source_rule_id=str(plugin.get("id", "")),
|
|
491
|
+
due_date=issue_due_date(
|
|
492
|
+
severity=severity, created_date=self.scan_date, title="tenable", config=self.app.config
|
|
493
|
+
),
|
|
485
494
|
)
|
|
486
495
|
if is_stig:
|
|
487
496
|
integration_finding = parse_stig_output(output=plugin_output, finding=integration_finding)
|