regscale-cli 6.16.2.0__py3-none-any.whl → 6.16.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of regscale-cli might be problematic. Click here for more details.
- regscale/__init__.py +1 -1
- regscale/core/app/internal/control_editor.py +26 -2
- regscale/core/app/internal/model_editor.py +39 -26
- regscale/core/app/utils/api_handler.py +4 -11
- regscale/integrations/commercial/crowdstrike.py +0 -1
- regscale/integrations/commercial/grype/scanner.py +37 -29
- regscale/integrations/commercial/opentext/commands.py +2 -0
- regscale/integrations/commercial/opentext/scanner.py +45 -31
- regscale/integrations/commercial/qualys.py +52 -61
- regscale/integrations/commercial/servicenow.py +1 -0
- regscale/integrations/commercial/sicura/commands.py +9 -14
- regscale/integrations/commercial/snyk.py +2 -2
- regscale/integrations/commercial/synqly/ticketing.py +29 -0
- regscale/integrations/commercial/tenablev2/click.py +25 -13
- regscale/integrations/commercial/tenablev2/scanner.py +12 -3
- regscale/integrations/commercial/trivy/scanner.py +14 -6
- regscale/integrations/commercial/veracode.py +1 -1
- regscale/integrations/commercial/wizv2/click.py +15 -37
- regscale/integrations/jsonl_scanner_integration.py +120 -16
- regscale/integrations/public/fedramp/click.py +8 -8
- regscale/integrations/public/fedramp/fedramp_cis_crm.py +499 -106
- regscale/integrations/public/fedramp/ssp_logger.py +2 -9
- regscale/integrations/scanner_integration.py +67 -27
- regscale/models/integration_models/cisa_kev_data.json +86 -12
- regscale/models/integration_models/flat_file_importer/__init__.py +29 -8
- regscale/models/integration_models/snyk.py +141 -15
- regscale/models/integration_models/synqly_models/capabilities.json +1 -1
- regscale/models/integration_models/tenable_models/integration.py +23 -3
- regscale/models/integration_models/veracode.py +91 -48
- regscale/models/regscale_models/control_implementation.py +18 -0
- regscale/models/regscale_models/control_objective.py +2 -1
- regscale/models/regscale_models/facility.py +10 -26
- regscale/models/regscale_models/functional_roles.py +38 -0
- regscale/models/regscale_models/issue.py +3 -1
- regscale/models/regscale_models/parameter.py +21 -3
- regscale/models/regscale_models/profile.py +22 -0
- regscale/models/regscale_models/profile_mapping.py +48 -3
- regscale/models/regscale_models/regscale_model.py +2 -0
- regscale/models/regscale_models/risk.py +38 -30
- regscale/models/regscale_models/security_plan.py +1 -0
- regscale/models/regscale_models/supply_chain.py +1 -1
- regscale/models/regscale_models/user.py +19 -6
- regscale/utils/threading/__init__.py +1 -0
- regscale/utils/threading/threadsafe_list.py +10 -0
- regscale/utils/threading/threadsafe_set.py +116 -0
- regscale/utils/version.py +3 -5
- {regscale_cli-6.16.2.0.dist-info → regscale_cli-6.16.4.0.dist-info}/METADATA +1 -1
- {regscale_cli-6.16.2.0.dist-info → regscale_cli-6.16.4.0.dist-info}/RECORD +52 -50
- {regscale_cli-6.16.2.0.dist-info → regscale_cli-6.16.4.0.dist-info}/LICENSE +0 -0
- {regscale_cli-6.16.2.0.dist-info → regscale_cli-6.16.4.0.dist-info}/WHEEL +0 -0
- {regscale_cli-6.16.2.0.dist-info → regscale_cli-6.16.4.0.dist-info}/entry_points.txt +0 -0
- {regscale_cli-6.16.2.0.dist-info → regscale_cli-6.16.4.0.dist-info}/top_level.txt +0 -0
regscale/__init__.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = "6.16.
|
|
1
|
+
__version__ = "6.16.4.0"
|
|
@@ -20,6 +20,7 @@ if TYPE_CHECKING:
|
|
|
20
20
|
from openpyxl import Workbook, load_workbook
|
|
21
21
|
from openpyxl.styles import Alignment, PatternFill, Protection
|
|
22
22
|
from openpyxl.worksheet.datavalidation import DataValidation
|
|
23
|
+
import warnings
|
|
23
24
|
|
|
24
25
|
|
|
25
26
|
from regscale.core.app.logz import create_logger
|
|
@@ -48,6 +49,12 @@ def control_editor():
|
|
|
48
49
|
"""
|
|
49
50
|
Performs actions on Control Editor Feature to edit controls to RegScale.
|
|
50
51
|
"""
|
|
52
|
+
warnings.filterwarnings("always", category=DeprecationWarning)
|
|
53
|
+
warnings.warn(
|
|
54
|
+
"Control Editor is deprecated and will be removed in a future release. Use `regscale model` with the `--model control` argument instead.",
|
|
55
|
+
DeprecationWarning,
|
|
56
|
+
stacklevel=2,
|
|
57
|
+
)
|
|
51
58
|
|
|
52
59
|
|
|
53
60
|
# Get data and pull into Excel worksheets.
|
|
@@ -68,7 +75,13 @@ def generate_data_download(regscale_id: int, regscale_module: str, path: Path):
|
|
|
68
75
|
This function will build and populate a spreadsheet of all control implementations
|
|
69
76
|
with the selected RegScale Parent Id and RegScale Module.
|
|
70
77
|
"""
|
|
71
|
-
|
|
78
|
+
warnings.filterwarnings("always", category=DeprecationWarning)
|
|
79
|
+
warnings.warn(
|
|
80
|
+
"Control Editor is deprecated and will be removed in a future release. Use `regscale model generate --model control` instead.",
|
|
81
|
+
DeprecationWarning,
|
|
82
|
+
stacklevel=2,
|
|
83
|
+
)
|
|
84
|
+
data_load(regscale_id, regscale_module, path)
|
|
72
85
|
|
|
73
86
|
|
|
74
87
|
def data_load(parent_id: int, parent_module: str, path: Path) -> None:
|
|
@@ -359,8 +372,13 @@ def _fetch_implementations(api: "Api", parent_id: int, parent_module: str) -> "p
|
|
|
359
372
|
def generate_db_update(path: Path, skip_prompt: bool):
|
|
360
373
|
"""
|
|
361
374
|
This function will check changes made to spreadsheet and upload any changes made to RegScale.
|
|
362
|
-
|
|
363
375
|
"""
|
|
376
|
+
warnings.filterwarnings("always", category=DeprecationWarning)
|
|
377
|
+
warnings.warn(
|
|
378
|
+
"Control Editor is deprecated and will be removed in a future release. Use `regscale model load --model control` instead.",
|
|
379
|
+
DeprecationWarning,
|
|
380
|
+
stacklevel=2,
|
|
381
|
+
)
|
|
364
382
|
db_update(path, skip_prompt)
|
|
365
383
|
|
|
366
384
|
|
|
@@ -514,6 +532,12 @@ def build_implementation(i: dict, regscale_parent_id: int, regscale_parent_modul
|
|
|
514
532
|
)
|
|
515
533
|
def generate_delete_file(path: Path):
|
|
516
534
|
"""This command will delete files used during the Control editing process."""
|
|
535
|
+
warnings.filterwarnings("always", category=DeprecationWarning)
|
|
536
|
+
warnings.warn(
|
|
537
|
+
"Control Editor is deprecated and will be removed in a future release. Use `regscale model` with the `--model control` argument instead.",
|
|
538
|
+
DeprecationWarning,
|
|
539
|
+
stacklevel=2,
|
|
540
|
+
)
|
|
517
541
|
delete_file(path)
|
|
518
542
|
|
|
519
543
|
|
|
@@ -328,7 +328,6 @@ def upload_data(path: Path, obj_type: str) -> None:
|
|
|
328
328
|
:return: None
|
|
329
329
|
:rtype: None
|
|
330
330
|
"""
|
|
331
|
-
import numpy as np # Optimize import performance
|
|
332
331
|
import pandas as pd
|
|
333
332
|
|
|
334
333
|
app = Application()
|
|
@@ -356,30 +355,25 @@ def upload_data(path: Path, obj_type: str) -> None:
|
|
|
356
355
|
if df1.equals(df2):
|
|
357
356
|
error_and_exit("No differences detected.")
|
|
358
357
|
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
app.logger.info(
|
|
379
|
-
"Please check differences.txt file located in %s to see changes made.",
|
|
380
|
-
path,
|
|
381
|
-
)
|
|
382
|
-
upload_existing_data(app, api, path, obj_type, all_workbook_filename)
|
|
358
|
+
app.logger.warning("Differences found!")
|
|
359
|
+
# Need to strip out any net new rows before doing this comparison
|
|
360
|
+
df3 = strip_any_net_new_rows(app, df2, all_workbook_filename, obj_type, path, new_workbook_filename)
|
|
361
|
+
try:
|
|
362
|
+
changes = compare_dataframes(df1, df3)
|
|
363
|
+
except ValueError:
|
|
364
|
+
changes = compare_dataframes(df1, df2)
|
|
365
|
+
changes.to_csv(
|
|
366
|
+
os.path.join(path, DIFFERENCES_FILE),
|
|
367
|
+
header=True,
|
|
368
|
+
index=True,
|
|
369
|
+
sep=" ",
|
|
370
|
+
mode="w+",
|
|
371
|
+
)
|
|
372
|
+
app.logger.info(
|
|
373
|
+
"Please check differences.txt file located in %s to see changes made.",
|
|
374
|
+
path,
|
|
375
|
+
)
|
|
376
|
+
upload_existing_data(app, api, path, obj_type, all_workbook_filename)
|
|
383
377
|
else:
|
|
384
378
|
app.logger.info("No files found for the specified type to load to RegScale.")
|
|
385
379
|
return app.logger.info(
|
|
@@ -388,7 +382,26 @@ def upload_data(path: Path, obj_type: str) -> None:
|
|
|
388
382
|
)
|
|
389
383
|
|
|
390
384
|
|
|
391
|
-
|
|
385
|
+
def compare_dataframes(df1: "pd.DataFrame", df2: "pd.DataFrame") -> "pd.DataFrame":
|
|
386
|
+
"""
|
|
387
|
+
Compare two DataFrames and return a DataFrame with the differences.
|
|
388
|
+
|
|
389
|
+
:param pd.DataFrame df1: The first DataFrame to compare
|
|
390
|
+
:param pd.DataFrame df2: The second DataFrame to compare
|
|
391
|
+
:return: A DataFrame with the differences between the two DataFrames
|
|
392
|
+
:rtype: pd.DataFrame
|
|
393
|
+
"""
|
|
394
|
+
import numpy as np
|
|
395
|
+
import pandas as pd
|
|
396
|
+
|
|
397
|
+
diff_mask = (df1 != df2) & ~(df1.isnull() & df2.isnull())
|
|
398
|
+
ne_stacked = diff_mask.stack()
|
|
399
|
+
changed = ne_stacked[ne_stacked]
|
|
400
|
+
changed.index.names = ["Id", "Column"]
|
|
401
|
+
difference_locations = np.nonzero(diff_mask)
|
|
402
|
+
changed_from = df1.values[difference_locations]
|
|
403
|
+
changed_to = df2.values[difference_locations]
|
|
404
|
+
return pd.DataFrame({"From": changed_from, "To": changed_to}, index=changed.index)
|
|
392
405
|
|
|
393
406
|
|
|
394
407
|
@model.command(name="delete_files")
|
|
@@ -67,18 +67,11 @@ class APIHandler(Application):
|
|
|
67
67
|
:return: The version string
|
|
68
68
|
:rtype: str
|
|
69
69
|
"""
|
|
70
|
+
from regscale.utils.version import RegscaleVersion
|
|
71
|
+
|
|
72
|
+
rs_version = RegscaleVersion()
|
|
70
73
|
if self._regscale_version is None:
|
|
71
|
-
|
|
72
|
-
response = self.get("/assets/json/version.json")
|
|
73
|
-
if response.status_code == 200:
|
|
74
|
-
version_data = response.json()
|
|
75
|
-
self._regscale_version = version_data.get("version", "Unknown")
|
|
76
|
-
else:
|
|
77
|
-
logger.error(f"Failed to fetch version. Status code: {response.status_code}")
|
|
78
|
-
self._regscale_version = "Unknown"
|
|
79
|
-
except Exception as e:
|
|
80
|
-
logger.error(f"Error fetching version: {e}")
|
|
81
|
-
self._regscale_version = "Unknown"
|
|
74
|
+
self._regscale_version = rs_version.current_version
|
|
82
75
|
return self._regscale_version
|
|
83
76
|
|
|
84
77
|
def _make_request(
|
|
@@ -1010,7 +1010,6 @@ def create_new_control_implementation(
|
|
|
1010
1010
|
cim = ControlImplementation(
|
|
1011
1011
|
controlOwnerId=user_id,
|
|
1012
1012
|
dateLastAssessed=get_current_datetime(),
|
|
1013
|
-
implementation=control.get("implementation", None),
|
|
1014
1013
|
status=status,
|
|
1015
1014
|
controlID=control["id"],
|
|
1016
1015
|
parentId=parent_id,
|
|
@@ -17,7 +17,7 @@ from pathlib import Path
|
|
|
17
17
|
|
|
18
18
|
from regscale.core.app.utils.parser_utils import safe_datetime_str
|
|
19
19
|
from regscale.integrations.jsonl_scanner_integration import JSONLScannerIntegration
|
|
20
|
-
from regscale.integrations.scanner_integration import IntegrationAsset, IntegrationFinding
|
|
20
|
+
from regscale.integrations.scanner_integration import IntegrationAsset, IntegrationFinding, issue_due_date
|
|
21
21
|
from regscale.models import IssueSeverity, AssetStatus, IssueStatus
|
|
22
22
|
|
|
23
23
|
logger = logging.getLogger("regscale")
|
|
@@ -143,53 +143,61 @@ class GrypeIntegration(JSONLScannerIntegration):
|
|
|
143
143
|
finding_info = self._extract_finding_info(item, data)
|
|
144
144
|
artifact_info = self._extract_artifact_info(item)
|
|
145
145
|
severity_info = self._determine_severity(finding_info)
|
|
146
|
-
cvss_fields = self._get_cvss_fields(finding_info
|
|
147
|
-
|
|
146
|
+
cvss_fields = self._get_cvss_fields(finding_info.get("cvss"))
|
|
147
|
+
file_scan_date = safe_datetime_str(finding_info.get("descriptor", {}).get("timestamp", ""))
|
|
148
|
+
if not self.scan_date:
|
|
149
|
+
self.scan_date = file_scan_date
|
|
148
150
|
evidence = self._build_evidence(artifact_info)
|
|
149
151
|
observations = self._build_observations(finding_info)
|
|
150
|
-
remediation_info = self._build_remediation_info(finding_info
|
|
152
|
+
remediation_info = self._build_remediation_info(finding_info.get("fix"))
|
|
153
|
+
|
|
154
|
+
severity = severity_info["severity"]
|
|
151
155
|
|
|
152
156
|
return IntegrationFinding(
|
|
153
157
|
title=(
|
|
154
|
-
f"{severity_info
|
|
155
|
-
if severity_info
|
|
156
|
-
else artifact_info
|
|
158
|
+
f"{severity_info.get('cve_id')}: {artifact_info.get('name', 'unknown')}"
|
|
159
|
+
if severity_info.get("cve_id")
|
|
160
|
+
else artifact_info.get("name", "unknown")
|
|
157
161
|
),
|
|
158
162
|
description=severity_info["description"],
|
|
159
|
-
severity=
|
|
163
|
+
severity=severity,
|
|
160
164
|
status=IssueStatus.Open,
|
|
161
165
|
cvss_v3_score=cvss_fields.get("V3Score"),
|
|
162
166
|
cvss_v3_vector=cvss_fields.get("V3Vector") or "",
|
|
163
167
|
cvss_v2_score=cvss_fields.get("V2Score"),
|
|
164
168
|
cvss_v2_vector=cvss_fields.get("V2Vector") or "",
|
|
165
|
-
plugin_name=artifact_info
|
|
169
|
+
plugin_name=artifact_info.get("name"),
|
|
166
170
|
plugin_id=self.title,
|
|
167
171
|
asset_identifier=asset_identifier,
|
|
168
172
|
category="Vulnerability",
|
|
169
|
-
cve=severity_info
|
|
173
|
+
cve=severity_info.get("cve_id"),
|
|
170
174
|
control_labels=["CM-7", "SI-2"],
|
|
171
175
|
evidence=evidence,
|
|
172
176
|
observations=observations,
|
|
173
|
-
identified_risk=f"Vulnerable {artifact_info
|
|
177
|
+
identified_risk=f"Vulnerable {artifact_info.get('type', 'package')} detected: {artifact_info.get('name', 'unknown')} {artifact_info.get('version', 'unknown')}",
|
|
174
178
|
recommendation_for_mitigation=remediation_info["remediation"],
|
|
175
|
-
scan_date=scan_date,
|
|
176
|
-
first_seen=
|
|
177
|
-
last_seen=scan_date,
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
179
|
+
scan_date=self.scan_date,
|
|
180
|
+
first_seen=file_scan_date,
|
|
181
|
+
last_seen=self.scan_date,
|
|
182
|
+
date_created=self.scan_date,
|
|
183
|
+
vulnerability_type=finding_info.get("type"),
|
|
184
|
+
rule_id=finding_info.get("id"),
|
|
185
|
+
source_rule_id=finding_info.get("id"),
|
|
186
|
+
remediation=remediation_info.get("remediation"),
|
|
187
|
+
vulnerable_asset=f"{artifact_info.get('name', 'unknown')}:{artifact_info.get('version', 'unknown')}",
|
|
188
|
+
security_check=finding_info.get("matcher"),
|
|
189
|
+
external_id=finding_info.get("data_source"),
|
|
190
|
+
installed_versions=artifact_info.get("version"),
|
|
191
|
+
affected_os=finding_info.get("affected_os"),
|
|
192
|
+
affected_packages=artifact_info.get("name"),
|
|
193
|
+
image_digest=finding_info.get("manifest_digest"),
|
|
194
|
+
package_path=artifact_info.get("purl"),
|
|
195
|
+
build_version=finding_info.get("build_version"),
|
|
196
|
+
fixed_versions=remediation_info.get("fixed_versions"),
|
|
197
|
+
fix_status=remediation_info.get("fix_status"),
|
|
198
|
+
due_date=issue_due_date(
|
|
199
|
+
severity=severity, created_date=file_scan_date, title="grype", config=self.app.config
|
|
200
|
+
),
|
|
193
201
|
)
|
|
194
202
|
|
|
195
203
|
def _extract_finding_info(self, item: Dict[str, Any], data: Dict[str, Any]) -> Dict[str, Any]:
|
|
@@ -61,6 +61,8 @@ def import_scans(
|
|
|
61
61
|
Import and process a folder of Fortify WebInspect XML file(s).
|
|
62
62
|
"""
|
|
63
63
|
# Use the new WebInspectIntegration class to sync assets and findings
|
|
64
|
+
if s3_bucket and not folder_path:
|
|
65
|
+
folder_path = s3_bucket
|
|
64
66
|
wi = WebInspectIntegration(
|
|
65
67
|
plan_id=regscale_ssp_id,
|
|
66
68
|
file_path=str(folder_path) if folder_path else None,
|
|
@@ -14,9 +14,10 @@ from typing import Any, Dict, List, Optional, Union, Tuple, cast, Iterator, Set
|
|
|
14
14
|
|
|
15
15
|
from pathlib import Path
|
|
16
16
|
|
|
17
|
+
from regscale.core.app.utils.app_utils import check_license
|
|
17
18
|
from regscale.core.app.utils.file_utils import find_files, read_file
|
|
18
19
|
from regscale.integrations.jsonl_scanner_integration import JSONLScannerIntegration
|
|
19
|
-
from regscale.integrations.scanner_integration import IntegrationAsset, IntegrationFinding
|
|
20
|
+
from regscale.integrations.scanner_integration import IntegrationAsset, IntegrationFinding, issue_due_date
|
|
20
21
|
from regscale.models import IssueSeverity, AssetStatus, IssueStatus, ImportValidater
|
|
21
22
|
|
|
22
23
|
logger = logging.getLogger("regscale")
|
|
@@ -37,15 +38,19 @@ class WebInspectIntegration(JSONLScannerIntegration):
|
|
|
37
38
|
# Constants for file paths
|
|
38
39
|
ASSETS_FILE = "./artifacts/webinspect_assets.jsonl"
|
|
39
40
|
FINDINGS_FILE = "./artifacts/webinspect_findings.jsonl"
|
|
41
|
+
file_date: Optional[str] = None
|
|
40
42
|
|
|
41
43
|
def __init__(self, *args, **kwargs):
|
|
42
44
|
"""Initialize the WebInspectIntegration."""
|
|
45
|
+
self.app = check_license()
|
|
43
46
|
# Override file_pattern for XML files
|
|
44
47
|
kwargs["file_pattern"] = "*.xml"
|
|
45
48
|
kwargs["read_files_only"] = True
|
|
46
49
|
self.disable_mapping = kwargs["disable_mapping"] = True
|
|
47
|
-
|
|
50
|
+
self.set_scan_date(kwargs.get("scan_date"))
|
|
51
|
+
# logger.debug(f"scan_date: {self.scan_date}"
|
|
48
52
|
super().__init__(*args, **kwargs)
|
|
53
|
+
logger.debug(f"WebInspectIntegration initialized with scan date: {self.scan_date}")
|
|
49
54
|
|
|
50
55
|
def is_valid_file(self, data: Any, file_path: Union[Path, str]) -> Tuple[bool, Optional[Dict[str, Any]]]:
|
|
51
56
|
"""
|
|
@@ -232,6 +237,22 @@ class WebInspectIntegration(JSONLScannerIntegration):
|
|
|
232
237
|
asset = self._prepare_asset(file, data)
|
|
233
238
|
self._write_asset_if_new(asset, assets_file, asset_tracker)
|
|
234
239
|
|
|
240
|
+
# Extract the date from the file name
|
|
241
|
+
file_name = os.path.basename(str(file))
|
|
242
|
+
# Extract the string after " - " and before ".xml" in the file name
|
|
243
|
+
parsed_string = file_name.split(" - ")[1].rsplit(".xml", 1)[0] if " - " in file_name else ""
|
|
244
|
+
# Convert parsed_string to a date in "%Y-%m-%d %H:%M:%S" format
|
|
245
|
+
try:
|
|
246
|
+
if len(parsed_string) == 6: # Ensure the string is in "MMDDYY" format
|
|
247
|
+
month = int(parsed_string[:2])
|
|
248
|
+
day = int(parsed_string[2:4])
|
|
249
|
+
year = int(parsed_string[4:])
|
|
250
|
+
self.file_date = f"{year + 2000:04d}-{month:02d}-{day:02d}"
|
|
251
|
+
else:
|
|
252
|
+
self.file_date = None
|
|
253
|
+
except ValueError:
|
|
254
|
+
self.file_date = None
|
|
255
|
+
|
|
235
256
|
findings_data = self._get_findings_data_from_file(data)
|
|
236
257
|
logger.info(f"Found {len(findings_data)} findings in file: {file}")
|
|
237
258
|
findings_added = self._write_findings(findings_data, asset.identifier, findings_file, finding_tracker)
|
|
@@ -300,7 +321,7 @@ class WebInspectIntegration(JSONLScannerIntegration):
|
|
|
300
321
|
"""
|
|
301
322
|
findings_added = 0
|
|
302
323
|
for finding_item in findings_data:
|
|
303
|
-
finding = self.parse_finding(asset_id,
|
|
324
|
+
finding = self.parse_finding(asset_id, findings_data, finding_item) # Pass empty dict for data if unused
|
|
304
325
|
finding_dict = dataclasses.asdict(finding)
|
|
305
326
|
if not self.disable_mapping and self.mapping:
|
|
306
327
|
mapped_finding_dict = self._apply_mapping(
|
|
@@ -384,6 +405,7 @@ class WebInspectIntegration(JSONLScannerIntegration):
|
|
|
384
405
|
|
|
385
406
|
# Get the host from the first issue
|
|
386
407
|
host = issues[0].get("Host", "Unknown Host")
|
|
408
|
+
url = issues[0].get("URL", "")
|
|
387
409
|
|
|
388
410
|
# Create and return the asset
|
|
389
411
|
return IntegrationAsset(
|
|
@@ -395,6 +417,7 @@ class WebInspectIntegration(JSONLScannerIntegration):
|
|
|
395
417
|
asset_category="Hardware",
|
|
396
418
|
parent_id=self.plan_id,
|
|
397
419
|
parent_module="securityplans",
|
|
420
|
+
fqdn=url,
|
|
398
421
|
)
|
|
399
422
|
|
|
400
423
|
def _get_findings_data_from_file(self, data: Dict[str, Any]) -> List[Dict[str, Any]]:
|
|
@@ -457,51 +480,42 @@ class WebInspectIntegration(JSONLScannerIntegration):
|
|
|
457
480
|
"""
|
|
458
481
|
severity_int = int(item.get("Severity", 3))
|
|
459
482
|
severity_value = self.finding_severity_map.get(severity_int, IssueSeverity.High.value)
|
|
460
|
-
|
|
461
483
|
try:
|
|
462
484
|
severity = IssueSeverity(severity_value)
|
|
463
485
|
except ValueError:
|
|
464
486
|
severity = IssueSeverity.High
|
|
465
487
|
|
|
488
|
+
if self.scan_date is None:
|
|
489
|
+
self.scan_date = self.file_date
|
|
490
|
+
|
|
466
491
|
title = item.get("Name", "")
|
|
467
|
-
host = item.get("Host", asset_identifier)
|
|
468
492
|
plugin_id = item.get("VulnerabilityID", "")
|
|
469
|
-
external_id = str(
|
|
493
|
+
external_id = str(asset_identifier + plugin_id)
|
|
470
494
|
sections = item.get("ReportSection", [])
|
|
471
495
|
|
|
472
496
|
# Extract description and mitigation from report sections
|
|
473
497
|
description = self._parse_report_section(sections, "Summary")
|
|
474
498
|
mitigation = self._parse_report_section(sections, "Fix")
|
|
475
499
|
|
|
476
|
-
# Only create findings for certain severity levels
|
|
477
|
-
if severity in (IssueSeverity.Critical, IssueSeverity.High, IssueSeverity.Moderate, IssueSeverity.Low):
|
|
478
|
-
return IntegrationFinding(
|
|
479
|
-
external_id=external_id,
|
|
480
|
-
asset_identifier=host,
|
|
481
|
-
control_labels=[],
|
|
482
|
-
description=description,
|
|
483
|
-
status=IssueStatus.Open,
|
|
484
|
-
title=title,
|
|
485
|
-
severity=severity,
|
|
486
|
-
category=f"{self.title} Vulnerability",
|
|
487
|
-
plugin_id=plugin_id,
|
|
488
|
-
plugin_name=title,
|
|
489
|
-
rule_id=plugin_id,
|
|
490
|
-
recommendation_for_mitigation=mitigation,
|
|
491
|
-
source_report=self.title,
|
|
492
|
-
)
|
|
493
|
-
# Return a default finding for severities we skip
|
|
494
500
|
return IntegrationFinding(
|
|
495
|
-
external_id=
|
|
496
|
-
asset_identifier=
|
|
501
|
+
external_id=external_id,
|
|
502
|
+
asset_identifier=asset_identifier,
|
|
497
503
|
control_labels=[],
|
|
498
|
-
description=
|
|
499
|
-
status=IssueStatus.
|
|
500
|
-
title=
|
|
501
|
-
severity=
|
|
502
|
-
category=f"{self.title}
|
|
504
|
+
description=description,
|
|
505
|
+
status=IssueStatus.Open,
|
|
506
|
+
title=title,
|
|
507
|
+
severity=severity,
|
|
508
|
+
category=f"{self.title} Vulnerability",
|
|
509
|
+
scan_date=self.scan_date,
|
|
510
|
+
first_seen=self.scan_date,
|
|
511
|
+
last_seen=self.scan_date,
|
|
512
|
+
date_created=self.scan_date,
|
|
503
513
|
plugin_id=plugin_id,
|
|
504
514
|
plugin_name=title,
|
|
505
515
|
rule_id=plugin_id,
|
|
516
|
+
recommendation_for_mitigation=mitigation,
|
|
506
517
|
source_report=self.title,
|
|
518
|
+
due_date=issue_due_date(
|
|
519
|
+
severity=severity, created_date=self.scan_date, title="opentext", config=self.app.config
|
|
520
|
+
),
|
|
507
521
|
)
|