regscale-cli 6.16.0.0__py3-none-any.whl → 6.16.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of regscale-cli might be problematic. Click here for more details.
- regscale/__init__.py +1 -1
- regscale/core/app/application.py +1 -0
- regscale/core/app/internal/login.py +1 -1
- regscale/core/app/internal/poam_editor.py +1 -1
- regscale/core/app/utils/app_utils.py +1 -1
- regscale/core/app/utils/parser_utils.py +2 -2
- regscale/integrations/commercial/__init__.py +2 -2
- regscale/integrations/commercial/ad.py +1 -1
- regscale/integrations/commercial/azure/intune.py +1 -0
- regscale/integrations/commercial/grype/__init__.py +3 -0
- regscale/integrations/commercial/grype/commands.py +72 -0
- regscale/integrations/commercial/grype/scanner.py +390 -0
- regscale/integrations/commercial/import_all/import_all_cmd.py +2 -2
- regscale/integrations/commercial/nessus/scanner.py +3 -0
- regscale/integrations/commercial/opentext/__init__.py +6 -0
- regscale/integrations/commercial/opentext/commands.py +77 -0
- regscale/integrations/commercial/opentext/scanner.py +449 -85
- regscale/integrations/commercial/sap/sysdig/sysdig_scanner.py +4 -0
- regscale/integrations/commercial/sap/tenable/click.py +1 -1
- regscale/integrations/commercial/sap/tenable/scanner.py +8 -2
- regscale/integrations/commercial/tenablev2/click.py +39 -16
- regscale/integrations/commercial/trivy/__init__.py +5 -0
- regscale/integrations/commercial/trivy/commands.py +74 -0
- regscale/integrations/commercial/trivy/scanner.py +276 -0
- regscale/integrations/commercial/wizv2/click.py +9 -21
- regscale/integrations/commercial/wizv2/scanner.py +2 -1
- regscale/integrations/commercial/wizv2/utils.py +146 -70
- regscale/integrations/jsonl_scanner_integration.py +869 -0
- regscale/integrations/public/fedramp/fedramp_common.py +4 -4
- regscale/integrations/public/fedramp/import_workbook.py +1 -1
- regscale/integrations/public/fedramp/inventory_items.py +3 -3
- regscale/integrations/public/fedramp/poam/scanner.py +51 -44
- regscale/integrations/public/fedramp/ssp_logger.py +6 -6
- regscale/integrations/scanner_integration.py +268 -64
- regscale/models/app_models/mapping.py +3 -3
- regscale/models/integration_models/amazon_models/inspector.py +15 -17
- regscale/models/integration_models/aqua.py +1 -5
- regscale/models/integration_models/cisa_kev_data.json +100 -10
- regscale/models/integration_models/ecr_models/ecr.py +2 -6
- regscale/models/integration_models/{flat_file_importer.py → flat_file_importer/__init__.py} +7 -4
- regscale/models/integration_models/grype_import.py +3 -3
- regscale/models/integration_models/prisma.py +3 -3
- regscale/models/integration_models/synqly_models/capabilities.json +1 -1
- regscale/models/integration_models/synqly_models/connectors/assets.py +1 -0
- regscale/models/integration_models/synqly_models/connectors/vulnerabilities.py +2 -0
- regscale/models/integration_models/tenable_models/integration.py +46 -10
- regscale/models/integration_models/trivy_import.py +1 -1
- regscale/models/integration_models/xray.py +1 -1
- regscale/models/regscale_models/__init__.py +2 -0
- regscale/models/regscale_models/control_implementation.py +18 -44
- regscale/models/regscale_models/inherited_control.py +61 -0
- regscale/models/regscale_models/issue.py +3 -2
- regscale/models/regscale_models/mixins/parent_cache.py +1 -1
- regscale/models/regscale_models/regscale_model.py +73 -7
- regscale/models/regscale_models/vulnerability.py +61 -8
- {regscale_cli-6.16.0.0.dist-info → regscale_cli-6.16.2.0.dist-info}/METADATA +3 -3
- {regscale_cli-6.16.0.0.dist-info → regscale_cli-6.16.2.0.dist-info}/RECORD +62 -56
- tests/regscale/core/test_logz.py +8 -0
- regscale/integrations/commercial/grype.py +0 -165
- regscale/integrations/commercial/opentext/click.py +0 -99
- regscale/integrations/commercial/trivy.py +0 -162
- {regscale_cli-6.16.0.0.dist-info → regscale_cli-6.16.2.0.dist-info}/LICENSE +0 -0
- {regscale_cli-6.16.0.0.dist-info → regscale_cli-6.16.2.0.dist-info}/WHEEL +0 -0
- {regscale_cli-6.16.0.0.dist-info → regscale_cli-6.16.2.0.dist-info}/entry_points.txt +0 -0
- {regscale_cli-6.16.0.0.dist-info → regscale_cli-6.16.2.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
"""
|
|
2
|
+
This module contains the Click commands for the opentext integration.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from datetime import datetime
|
|
6
|
+
from typing import Optional
|
|
7
|
+
|
|
8
|
+
import click
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
|
|
11
|
+
from regscale.integrations.commercial.opentext.scanner import WebInspectIntegration
|
|
12
|
+
from regscale.models.integration_models.flat_file_importer import FlatFileImporter
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
# pylint: disable=W0621
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@click.group()
|
|
19
|
+
def fortify():
|
|
20
|
+
"""Performs actions on the OpenText Fortify"""
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
@fortify.group(name="web_inspect")
|
|
24
|
+
def web_inspect():
|
|
25
|
+
"""Performs actions on the OpenText Web Inspect files."""
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
@web_inspect.command(name="import_scans")
|
|
29
|
+
@FlatFileImporter.common_scanner_options(
|
|
30
|
+
message="File path to the folder containing JFrog XRay .json files to process to RegScale.",
|
|
31
|
+
prompt="File path for Grype files",
|
|
32
|
+
import_name="grype",
|
|
33
|
+
)
|
|
34
|
+
@click.option(
|
|
35
|
+
"--destination",
|
|
36
|
+
"-d",
|
|
37
|
+
help="Path to download the files to. If not provided, files will be downloaded to the temporary directory.",
|
|
38
|
+
type=click.Path(exists=True, dir_okay=True),
|
|
39
|
+
required=False,
|
|
40
|
+
)
|
|
41
|
+
@click.option(
|
|
42
|
+
"--file_pattern",
|
|
43
|
+
"-fp",
|
|
44
|
+
help="[Optional] File pattern to match (e.g., '*.json')",
|
|
45
|
+
required=False,
|
|
46
|
+
)
|
|
47
|
+
def import_scans(
|
|
48
|
+
destination: Optional[Path],
|
|
49
|
+
file_pattern: str,
|
|
50
|
+
folder_path: Path,
|
|
51
|
+
regscale_ssp_id: int,
|
|
52
|
+
scan_date: datetime,
|
|
53
|
+
mappings_path: Path,
|
|
54
|
+
disable_mapping: bool,
|
|
55
|
+
s3_bucket: str,
|
|
56
|
+
s3_prefix: str,
|
|
57
|
+
aws_profile: str,
|
|
58
|
+
upload_file: bool = False,
|
|
59
|
+
):
|
|
60
|
+
"""
|
|
61
|
+
Import and process a folder of Fortify WebInspect XML file(s).
|
|
62
|
+
"""
|
|
63
|
+
# Use the new WebInspectIntegration class to sync assets and findings
|
|
64
|
+
wi = WebInspectIntegration(
|
|
65
|
+
plan_id=regscale_ssp_id,
|
|
66
|
+
file_path=str(folder_path) if folder_path else None,
|
|
67
|
+
s3_bucket=s3_bucket,
|
|
68
|
+
s3_prefix=s3_prefix,
|
|
69
|
+
aws_profile=aws_profile,
|
|
70
|
+
scan_date=scan_date,
|
|
71
|
+
mappings_path=str(mappings_path) if mappings_path else None,
|
|
72
|
+
disable_mapping=disable_mapping,
|
|
73
|
+
download_destination=destination,
|
|
74
|
+
file_pattern=file_pattern,
|
|
75
|
+
upload_file=upload_file,
|
|
76
|
+
)
|
|
77
|
+
wi.sync_assets_and_findings()
|
|
@@ -1,97 +1,434 @@
|
|
|
1
1
|
"""
|
|
2
|
-
|
|
2
|
+
WebInspect Scanner Integration for RegScale.
|
|
3
|
+
|
|
4
|
+
This module provides integration between OpenText WebInspect scanner and RegScale,
|
|
5
|
+
allowing you to import WebInspect scan results into RegScale as assets and findings.
|
|
3
6
|
"""
|
|
4
7
|
|
|
8
|
+
import dataclasses
|
|
9
|
+
import json
|
|
5
10
|
import logging
|
|
6
|
-
|
|
7
|
-
|
|
11
|
+
import os
|
|
12
|
+
import traceback
|
|
13
|
+
from typing import Any, Dict, List, Optional, Union, Tuple, cast, Iterator, Set
|
|
14
|
+
|
|
15
|
+
from pathlib import Path
|
|
8
16
|
|
|
17
|
+
from regscale.core.app.utils.file_utils import find_files, read_file
|
|
18
|
+
from regscale.integrations.jsonl_scanner_integration import JSONLScannerIntegration
|
|
9
19
|
from regscale.integrations.scanner_integration import IntegrationAsset, IntegrationFinding
|
|
10
|
-
from regscale.models import ImportValidater
|
|
11
|
-
from regscale.models import IssueSeverity, IssueStatus, regscale_models
|
|
12
|
-
from regscale.models.integration_models.flat_file_importer import FlatFileImporter
|
|
20
|
+
from regscale.models import IssueSeverity, AssetStatus, IssueStatus, ImportValidater
|
|
13
21
|
|
|
14
22
|
logger = logging.getLogger("regscale")
|
|
15
|
-
XML = "*.xml"
|
|
16
23
|
|
|
17
24
|
|
|
18
|
-
class
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
+
class WebInspectIntegration(JSONLScannerIntegration):
|
|
26
|
+
"""Class for handling OpenText WebInspect scanner integration."""
|
|
27
|
+
|
|
28
|
+
title: str = "WebInspect"
|
|
29
|
+
finding_severity_map: Dict[int, Any] = {
|
|
30
|
+
4: IssueSeverity.Critical.value,
|
|
31
|
+
3: IssueSeverity.High.value,
|
|
32
|
+
2: IssueSeverity.Moderate.value,
|
|
33
|
+
1: IssueSeverity.Low.value,
|
|
34
|
+
0: IssueSeverity.NotAssigned.value,
|
|
25
35
|
}
|
|
26
36
|
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
37
|
+
# Constants for file paths
|
|
38
|
+
ASSETS_FILE = "./artifacts/webinspect_assets.jsonl"
|
|
39
|
+
FINDINGS_FILE = "./artifacts/webinspect_findings.jsonl"
|
|
40
|
+
|
|
41
|
+
def __init__(self, *args, **kwargs):
|
|
42
|
+
"""Initialize the WebInspectIntegration."""
|
|
43
|
+
# Override file_pattern for XML files
|
|
44
|
+
kwargs["file_pattern"] = "*.xml"
|
|
45
|
+
kwargs["read_files_only"] = True
|
|
46
|
+
self.disable_mapping = kwargs["disable_mapping"] = True
|
|
47
|
+
# kwargs["re"]
|
|
48
|
+
super().__init__(*args, **kwargs)
|
|
49
|
+
|
|
50
|
+
def is_valid_file(self, data: Any, file_path: Union[Path, str]) -> Tuple[bool, Optional[Dict[str, Any]]]:
|
|
51
|
+
"""
|
|
52
|
+
Check if the provided data is a valid WebInspect scan result.
|
|
53
|
+
|
|
54
|
+
Validates that the data is from a WebInspect XML file with the required structure.
|
|
55
|
+
Logs a warning with the file path and returns (False, None) if invalid.
|
|
56
|
+
|
|
57
|
+
:param Any data: Data parsed from the file (string content for XML when read_files_only is True, or file path otherwise)
|
|
58
|
+
:param Union[Path, str] file_path: Path to the file being processed
|
|
59
|
+
:return: Tuple of (is_valid, validated_data) where validated_data includes validater, mapping, and data if valid
|
|
60
|
+
:rtype: Tuple[bool, Optional[Dict[str, Any]]]
|
|
61
|
+
"""
|
|
62
|
+
if self.read_files_only:
|
|
63
|
+
# Data is the XML content as a string
|
|
64
|
+
if not isinstance(data, str):
|
|
65
|
+
logger.warning(f"Data is not a string (expected XML content) for file {file_path}")
|
|
66
|
+
return False, None
|
|
67
|
+
|
|
68
|
+
try:
|
|
69
|
+
# Create a temporary file since ImportValidater requires a file path
|
|
70
|
+
import tempfile
|
|
71
|
+
|
|
72
|
+
with tempfile.NamedTemporaryFile(mode="w", suffix=".xml", delete=False) as temp_file:
|
|
73
|
+
temp_file.write(data)
|
|
74
|
+
temp_path = temp_file.name
|
|
75
|
+
|
|
76
|
+
validater = ImportValidater(
|
|
77
|
+
required_headers=["Issues"],
|
|
78
|
+
file_path=temp_path,
|
|
79
|
+
mapping_file_path="", # Empty string instead of None
|
|
80
|
+
disable_mapping=True,
|
|
81
|
+
xml_tag="Scan", # XML root tag
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
# Clean up the temporary file
|
|
85
|
+
try:
|
|
86
|
+
os.unlink(temp_path)
|
|
87
|
+
except OSError:
|
|
88
|
+
pass
|
|
89
|
+
except Exception:
|
|
90
|
+
error_message = traceback.format_exc()
|
|
91
|
+
logger.warning(f"Error processing WebInspect XML content for file {file_path}: {str(error_message)}")
|
|
92
|
+
return False, None
|
|
93
|
+
else:
|
|
94
|
+
# Data is the file path
|
|
95
|
+
if not isinstance(data, (str, Path)):
|
|
96
|
+
logger.warning(f"Data is not a file path when read_files_only is False for file {file_path}")
|
|
97
|
+
return False, None
|
|
98
|
+
|
|
99
|
+
try:
|
|
100
|
+
validater = ImportValidater(
|
|
101
|
+
required_headers=["Issues"],
|
|
102
|
+
file_path=str(data),
|
|
103
|
+
mapping_file_path="", # Empty string instead of None
|
|
104
|
+
disable_mapping=True,
|
|
105
|
+
xml_tag="Scan", # XML root tag
|
|
106
|
+
)
|
|
107
|
+
except Exception as e:
|
|
108
|
+
logger.warning(f"Error processing WebInspect file {data} for file {file_path}: {str(e)}")
|
|
109
|
+
return False, None
|
|
110
|
+
|
|
111
|
+
# Check if validater produced usable data
|
|
112
|
+
if not validater.data or not validater.parsed_headers:
|
|
113
|
+
logger.warning(f"Data is not a valid WebInspect XML structure for file {file_path}")
|
|
114
|
+
return False, None
|
|
115
|
+
|
|
116
|
+
# Extract mapping and issues data
|
|
117
|
+
mapping = validater.mapping
|
|
118
|
+
issues_data = mapping.get_value(cast(Dict[str, Any], validater.data), "Issues", {})
|
|
119
|
+
# issues_data = parent_issues_data.get("Issue", [])
|
|
120
|
+
# Validate that issues data contains 'Issue' elements
|
|
121
|
+
if not issues_data or "Issue" not in issues_data:
|
|
122
|
+
logger.warning(f"Data has no 'Issues' with 'Issue' elements for file {file_path}")
|
|
123
|
+
return False, None
|
|
124
|
+
|
|
125
|
+
return True, issues_data
|
|
126
|
+
|
|
127
|
+
def _process_files(
|
|
128
|
+
self,
|
|
129
|
+
file_path: Union[str, Path],
|
|
130
|
+
assets_output_file: str,
|
|
131
|
+
findings_output_file: str,
|
|
132
|
+
empty_assets_file: bool = True,
|
|
133
|
+
empty_findings_file: bool = True,
|
|
134
|
+
) -> Tuple[int, int]:
|
|
135
|
+
"""
|
|
136
|
+
Process files (local or S3) to extract both assets and findings in a single pass.
|
|
137
|
+
|
|
138
|
+
Optimizes file processing by reading each file once to extract asset and finding data.
|
|
139
|
+
|
|
140
|
+
:param Union[str, Path] file_path: Path to source file or directory (local or S3 URI)
|
|
141
|
+
:param str assets_output_file: Path to output JSONL file for assets
|
|
142
|
+
:param str findings_output_file: Path to output JSONL file for findings
|
|
143
|
+
:param bool empty_assets_file: Whether to empty the assets file before writing (default: True)
|
|
144
|
+
:param bool empty_findings_file: Whether to empty the findings file before writing (default: True)
|
|
145
|
+
:return: Tuple of total asset and finding counts
|
|
146
|
+
:rtype: Tuple[int, int]
|
|
147
|
+
"""
|
|
148
|
+
asset_tracker = self._setup_tracker(assets_output_file, empty_assets_file, "asset")
|
|
149
|
+
finding_tracker = self._setup_tracker(findings_output_file, empty_findings_file, "finding")
|
|
150
|
+
processed_files = set()
|
|
151
|
+
|
|
152
|
+
with open(assets_output_file, "a") as assets_file, open(findings_output_file, "a") as findings_file:
|
|
153
|
+
self._process_file_data(
|
|
154
|
+
file_path, assets_file, findings_file, asset_tracker, finding_tracker, processed_files
|
|
155
|
+
)
|
|
156
|
+
|
|
157
|
+
self._log_completion(asset_tracker.new_items, assets_output_file, "assets")
|
|
158
|
+
self._log_completion(finding_tracker.new_items, findings_output_file, "findings")
|
|
159
|
+
return asset_tracker.total_items, finding_tracker.total_items
|
|
160
|
+
|
|
161
|
+
def _setup_tracker(self, output_file: str, empty_file: bool, item_type: str) -> "ItemTracker":
|
|
162
|
+
"""
|
|
163
|
+
Set up a tracker for counting items.
|
|
164
|
+
|
|
165
|
+
:param str output_file: Path to the output file
|
|
166
|
+
:param bool empty_file: Whether to empty the file before processing
|
|
167
|
+
:param str item_type: Type of items ('asset' or 'finding')
|
|
168
|
+
:return: Tracker object for managing item counts
|
|
169
|
+
:rtype: ItemTracker
|
|
170
|
+
"""
|
|
171
|
+
from dataclasses import dataclass
|
|
172
|
+
|
|
173
|
+
@dataclass
|
|
174
|
+
class ItemTracker:
|
|
175
|
+
existing_items: Dict[str, bool]
|
|
176
|
+
new_items: int = 0
|
|
177
|
+
total_items: int = 0
|
|
178
|
+
|
|
179
|
+
existing_items = self._prepare_output_file(output_file, empty_file, item_type)
|
|
180
|
+
return ItemTracker(existing_items=existing_items, total_items=len(existing_items))
|
|
181
|
+
|
|
182
|
+
def _process_file_data(
|
|
183
|
+
self,
|
|
184
|
+
file_path: Union[str, Path],
|
|
185
|
+
assets_file: Any,
|
|
186
|
+
findings_file: Any,
|
|
187
|
+
asset_tracker: "ItemTracker",
|
|
188
|
+
finding_tracker: "ItemTracker",
|
|
189
|
+
processed_files: Set[str],
|
|
190
|
+
) -> None:
|
|
191
|
+
"""
|
|
192
|
+
Process data from all files in the given path.
|
|
193
|
+
|
|
194
|
+
:param Union[str, Path] file_path: Path to source file or directory
|
|
195
|
+
:param Any assets_file: Open file handle for writing assets
|
|
196
|
+
:param Any findings_file: Open file handle for writing findings
|
|
197
|
+
:param ItemTracker asset_tracker: Tracker for asset counts
|
|
198
|
+
:param ItemTracker finding_tracker: Tracker for finding counts
|
|
199
|
+
:param Set[str] processed_files: Set of processed file paths
|
|
200
|
+
:rtype: None
|
|
201
|
+
"""
|
|
202
|
+
for file, data in self.find_valid_files(file_path):
|
|
203
|
+
file_str = str(file)
|
|
204
|
+
if file_str in processed_files:
|
|
205
|
+
continue
|
|
206
|
+
|
|
207
|
+
processed_files.add(file_str)
|
|
208
|
+
self._handle_single_file(file, data, assets_file, findings_file, asset_tracker, finding_tracker)
|
|
58
209
|
|
|
59
|
-
def
|
|
210
|
+
def _handle_single_file(
|
|
211
|
+
self,
|
|
212
|
+
file: Union[Path, str],
|
|
213
|
+
data: Optional[Dict[str, Any]],
|
|
214
|
+
assets_file: Any,
|
|
215
|
+
findings_file: Any,
|
|
216
|
+
asset_tracker: "ItemTracker",
|
|
217
|
+
finding_tracker: "ItemTracker",
|
|
218
|
+
) -> None:
|
|
60
219
|
"""
|
|
61
|
-
|
|
220
|
+
Handle processing of a single file's data.
|
|
62
221
|
|
|
63
|
-
:
|
|
222
|
+
:param Union[Path, str] file: Path to the file being processed
|
|
223
|
+
:param Optional[Dict[str, Any]] data: Parsed data from the file
|
|
224
|
+
:param Any assets_file: Open file handle for writing assets
|
|
225
|
+
:param Any findings_file: Open file handle for writing findings
|
|
226
|
+
:param ItemTracker asset_tracker: Tracker for asset counts
|
|
227
|
+
:param ItemTracker finding_tracker: Tracker for finding counts
|
|
228
|
+
:rtype: None
|
|
64
229
|
"""
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
yield from self.parse_asset(issue)
|
|
230
|
+
try:
|
|
231
|
+
logger.info(f"Processing file: {file}")
|
|
232
|
+
asset = self._prepare_asset(file, data)
|
|
233
|
+
self._write_asset_if_new(asset, assets_file, asset_tracker)
|
|
70
234
|
|
|
71
|
-
|
|
235
|
+
findings_data = self._get_findings_data_from_file(data)
|
|
236
|
+
logger.info(f"Found {len(findings_data)} findings in file: {file}")
|
|
237
|
+
findings_added = self._write_findings(findings_data, asset.identifier, findings_file, finding_tracker)
|
|
238
|
+
|
|
239
|
+
if findings_added > 0:
|
|
240
|
+
logger.info(f"Added {findings_added} new findings from file {file}")
|
|
241
|
+
except Exception as e:
|
|
242
|
+
logger.error(f"Error processing file {file}: {str(e)}")
|
|
243
|
+
|
|
244
|
+
def _prepare_asset(self, file: Union[Path, str], data: Optional[Dict[str, Any]]) -> IntegrationAsset:
|
|
245
|
+
"""
|
|
246
|
+
Prepare and validate an asset from file data.
|
|
247
|
+
|
|
248
|
+
:param Union[Path, str] file: Path to the file being processed
|
|
249
|
+
:param Optional[Dict[str, Any]] data: Parsed data from the file
|
|
250
|
+
:return: Processed and validated asset object
|
|
251
|
+
:rtype: IntegrationAsset
|
|
252
|
+
"""
|
|
253
|
+
asset = self.parse_asset(file, data)
|
|
254
|
+
asset_dict = dataclasses.asdict(asset)
|
|
255
|
+
if not self.disable_mapping and self.mapping:
|
|
256
|
+
mapped_asset_dict = self._apply_mapping(
|
|
257
|
+
data or {}, asset_dict, getattr(self.mapping, "fields", {}).get("asset_mapping", {})
|
|
258
|
+
)
|
|
259
|
+
mapped_asset = IntegrationAsset(**mapped_asset_dict)
|
|
260
|
+
else:
|
|
261
|
+
mapped_asset = asset
|
|
262
|
+
self._validate_fields(mapped_asset, self.required_asset_fields)
|
|
263
|
+
return mapped_asset
|
|
264
|
+
|
|
265
|
+
def _write_asset_if_new(self, asset: IntegrationAsset, assets_file: Any, tracker: "ItemTracker") -> None:
|
|
266
|
+
"""
|
|
267
|
+
Write an asset to the file if it’s new.
|
|
268
|
+
|
|
269
|
+
:param IntegrationAsset asset: Asset object to write
|
|
270
|
+
:param Any assets_file: Open file handle for writing assets
|
|
271
|
+
:param ItemTracker tracker: Tracker for asset counts
|
|
272
|
+
:rtype: None
|
|
273
|
+
"""
|
|
274
|
+
asset_key = asset.identifier
|
|
275
|
+
if asset_key not in tracker.existing_items:
|
|
276
|
+
assets_file.write(json.dumps(dataclasses.asdict(asset)) + "\n")
|
|
277
|
+
assets_file.flush()
|
|
278
|
+
tracker.existing_items[asset_key] = True
|
|
279
|
+
tracker.new_items += 1
|
|
280
|
+
tracker.total_items += 1
|
|
281
|
+
else:
|
|
282
|
+
logger.debug(f"Asset with identifier {asset_key} already exists, skipping")
|
|
283
|
+
|
|
284
|
+
def _write_findings(
|
|
285
|
+
self,
|
|
286
|
+
findings_data: List[Dict[str, Any]],
|
|
287
|
+
asset_id: str,
|
|
288
|
+
findings_file: Any,
|
|
289
|
+
tracker: "ItemTracker",
|
|
290
|
+
) -> int:
|
|
291
|
+
"""
|
|
292
|
+
Write new findings to the file and track counts.
|
|
293
|
+
|
|
294
|
+
:param List[Dict[str, Any]] findings_data: List of finding items
|
|
295
|
+
:param str asset_id: Identifier of the associated asset
|
|
296
|
+
:param Any findings_file: Open file handle for writing findings
|
|
297
|
+
:param ItemTracker tracker: Tracker for finding counts
|
|
298
|
+
:return: Number of new findings added
|
|
299
|
+
:rtype: int
|
|
300
|
+
"""
|
|
301
|
+
findings_added = 0
|
|
302
|
+
for finding_item in findings_data:
|
|
303
|
+
finding = self.parse_finding(asset_id, {}, finding_item) # Pass empty dict for data if unused
|
|
304
|
+
finding_dict = dataclasses.asdict(finding)
|
|
305
|
+
if not self.disable_mapping and self.mapping:
|
|
306
|
+
mapped_finding_dict = self._apply_mapping(
|
|
307
|
+
finding_item, finding_dict, getattr(self.mapping, "fields", {}).get("finding_mapping", {})
|
|
308
|
+
)
|
|
309
|
+
mapped_finding = IntegrationFinding(**mapped_finding_dict)
|
|
310
|
+
else:
|
|
311
|
+
mapped_finding = finding
|
|
312
|
+
self._validate_fields(mapped_finding, self.required_finding_fields)
|
|
313
|
+
|
|
314
|
+
finding_key = self._get_item_key(dataclasses.asdict(mapped_finding), "finding")
|
|
315
|
+
if finding_key not in tracker.existing_items:
|
|
316
|
+
findings_file.write(json.dumps(dataclasses.asdict(mapped_finding)) + "\n")
|
|
317
|
+
findings_file.flush()
|
|
318
|
+
tracker.existing_items[finding_key] = True
|
|
319
|
+
tracker.new_items += 1
|
|
320
|
+
tracker.total_items += 1
|
|
321
|
+
findings_added += 1
|
|
322
|
+
else:
|
|
323
|
+
logger.debug(f"Finding with key {finding_key} already exists, skipping")
|
|
324
|
+
return findings_added
|
|
325
|
+
|
|
326
|
+
def _log_completion(self, new_count: int, output_file: str, item_type: str) -> None:
|
|
327
|
+
"""
|
|
328
|
+
Log the completion of processing items.
|
|
329
|
+
|
|
330
|
+
:param int new_count: Number of new items added
|
|
331
|
+
:param str output_file: Path to the output file
|
|
332
|
+
:param str item_type: Type of items processed ('assets' or 'findings')
|
|
333
|
+
:rtype: None
|
|
334
|
+
"""
|
|
335
|
+
logger.info(f"Added {new_count} new {item_type} to {output_file}")
|
|
336
|
+
|
|
337
|
+
def find_valid_files(self, path: Union[Path, str]) -> Iterator[Tuple[Union[Path, str], Dict[str, Any]]]:
|
|
338
|
+
"""
|
|
339
|
+
Find all valid WebInspect scan files in the given path.
|
|
340
|
+
|
|
341
|
+
Overrides the parent method to handle XML files instead of JSON, passing content or path to is_valid_file.
|
|
342
|
+
|
|
343
|
+
:param Union[Path, str] path: Path to a file or directory (local or S3 URI)
|
|
344
|
+
:return: Iterator yielding tuples of (file_path, validated_data) for valid files
|
|
345
|
+
:rtype: Iterator[Tuple[Union[Path, str], Dict[str, Any]]]
|
|
346
|
+
"""
|
|
347
|
+
files = find_files(path, self.file_pattern)
|
|
348
|
+
for file in files:
|
|
349
|
+
try:
|
|
350
|
+
if self.read_files_only:
|
|
351
|
+
content = read_file(file) # Get raw XML content as string
|
|
352
|
+
else:
|
|
353
|
+
content = file # Pass file path directly
|
|
354
|
+
is_valid, validated_data = self.is_valid_file(content, file)
|
|
355
|
+
if is_valid and validated_data is not None:
|
|
356
|
+
yield file, validated_data
|
|
357
|
+
except Exception as e:
|
|
358
|
+
logger.error(f"Error processing file {file}: {str(e)}")
|
|
359
|
+
|
|
360
|
+
def parse_asset(self, file_path: Union[Path, str], data: Dict[str, Any]) -> IntegrationAsset:
|
|
72
361
|
"""
|
|
73
|
-
Parse
|
|
362
|
+
Parse a single asset from WebInspect scan data.
|
|
74
363
|
|
|
75
|
-
:param
|
|
76
|
-
:
|
|
364
|
+
:param Union[Path, str] file_path: Path to the file containing the asset data
|
|
365
|
+
:param Dict[str, Any] data: The parsed data containing validater, mapping, and data
|
|
366
|
+
:return: IntegrationAsset object
|
|
367
|
+
:rtype: IntegrationAsset
|
|
77
368
|
"""
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
369
|
+
# Get the first issue to extract host information
|
|
370
|
+
issues = data.get("Issue", [])
|
|
371
|
+
if not issues:
|
|
372
|
+
# If no issues found, create a default asset based on the file name
|
|
373
|
+
file_name = os.path.basename(str(file_path))
|
|
374
|
+
return IntegrationAsset(
|
|
375
|
+
identifier=file_name,
|
|
376
|
+
name=file_name,
|
|
377
|
+
ip_address="0.0.0.0",
|
|
378
|
+
status=AssetStatus.Active,
|
|
379
|
+
asset_type="Other",
|
|
380
|
+
asset_category="Hardware",
|
|
381
|
+
parent_id=self.plan_id,
|
|
382
|
+
parent_module="securityplans",
|
|
383
|
+
)
|
|
384
|
+
|
|
385
|
+
# Get the host from the first issue
|
|
386
|
+
host = issues[0].get("Host", "Unknown Host")
|
|
387
|
+
|
|
388
|
+
# Create and return the asset
|
|
389
|
+
return IntegrationAsset(
|
|
390
|
+
identifier=host,
|
|
391
|
+
name=host,
|
|
392
|
+
ip_address="0.0.0.0", # Default IP address
|
|
393
|
+
status=AssetStatus.Active,
|
|
394
|
+
asset_type="Other",
|
|
395
|
+
asset_category="Hardware",
|
|
396
|
+
parent_id=self.plan_id,
|
|
397
|
+
parent_module="securityplans",
|
|
398
|
+
)
|
|
81
399
|
|
|
82
|
-
def
|
|
400
|
+
def _get_findings_data_from_file(self, data: Dict[str, Any]) -> List[Dict[str, Any]]:
|
|
83
401
|
"""
|
|
84
|
-
|
|
402
|
+
Extract findings data from WebInspect file data.
|
|
85
403
|
|
|
86
|
-
:
|
|
87
|
-
:
|
|
404
|
+
:param Dict[str, Any] data: The data from the WebInspect file
|
|
405
|
+
:return: List of finding items
|
|
406
|
+
:rtype: List[Dict[str, Any]]
|
|
88
407
|
"""
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
408
|
+
if not data or not isinstance(data, dict):
|
|
409
|
+
return []
|
|
410
|
+
|
|
411
|
+
# Get the issues from the data
|
|
412
|
+
issues = data.get("Issue", [])
|
|
413
|
+
if not isinstance(issues, list):
|
|
414
|
+
return []
|
|
415
|
+
|
|
416
|
+
# Filter out findings with severity levels we don't want to include
|
|
417
|
+
filtered_issues = []
|
|
418
|
+
for issue in issues:
|
|
419
|
+
severity_int = int(issue.get("Severity", 3))
|
|
420
|
+
severity_value = self.finding_severity_map.get(severity_int, IssueSeverity.High.value)
|
|
421
|
+
|
|
422
|
+
try:
|
|
423
|
+
severity = IssueSeverity(severity_value)
|
|
424
|
+
# Only include findings with certain severity levels
|
|
425
|
+
if severity in (IssueSeverity.Critical, IssueSeverity.High, IssueSeverity.Moderate, IssueSeverity.Low):
|
|
426
|
+
filtered_issues.append(issue)
|
|
427
|
+
except ValueError:
|
|
428
|
+
# Include by default if we can't determine severity
|
|
429
|
+
filtered_issues.append(issue)
|
|
430
|
+
|
|
431
|
+
return filtered_issues
|
|
95
432
|
|
|
96
433
|
@staticmethod
|
|
97
434
|
def _parse_report_section(sections: List[dict], section_name: str) -> str:
|
|
@@ -103,26 +440,40 @@ class WebInspect(FlatFileImporter):
|
|
|
103
440
|
:return: Text from the specified section
|
|
104
441
|
:rtype: str
|
|
105
442
|
"""
|
|
443
|
+
if not sections:
|
|
444
|
+
return ""
|
|
445
|
+
|
|
106
446
|
return next((section.get("SectionText", "") for section in sections if section.get("Name") == section_name), "")
|
|
107
447
|
|
|
108
|
-
def parse_finding(self,
|
|
448
|
+
def parse_finding(self, asset_identifier: str, data: Dict[str, Any], item: Dict[str, Any]) -> IntegrationFinding:
|
|
109
449
|
"""
|
|
110
|
-
Parse
|
|
450
|
+
Parse a single finding from WebInspect scan data.
|
|
111
451
|
|
|
112
|
-
:param
|
|
113
|
-
:
|
|
114
|
-
:
|
|
452
|
+
:param str asset_identifier: The identifier of the asset this finding belongs to
|
|
453
|
+
:param Dict[str, Any] data: The parsed data (not used here, kept for interface compatibility)
|
|
454
|
+
:param Dict[str, Any] item: The finding data
|
|
455
|
+
:return: IntegrationFinding object
|
|
456
|
+
:rtype: IntegrationFinding
|
|
115
457
|
"""
|
|
116
|
-
severity_int = int(
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
458
|
+
severity_int = int(item.get("Severity", 3))
|
|
459
|
+
severity_value = self.finding_severity_map.get(severity_int, IssueSeverity.High.value)
|
|
460
|
+
|
|
461
|
+
try:
|
|
462
|
+
severity = IssueSeverity(severity_value)
|
|
463
|
+
except ValueError:
|
|
464
|
+
severity = IssueSeverity.High
|
|
465
|
+
|
|
466
|
+
title = item.get("Name", "")
|
|
467
|
+
host = item.get("Host", asset_identifier)
|
|
468
|
+
plugin_id = item.get("VulnerabilityID", "")
|
|
121
469
|
external_id = str(host + plugin_id)
|
|
122
|
-
sections =
|
|
470
|
+
sections = item.get("ReportSection", [])
|
|
471
|
+
|
|
472
|
+
# Extract description and mitigation from report sections
|
|
123
473
|
description = self._parse_report_section(sections, "Summary")
|
|
124
474
|
mitigation = self._parse_report_section(sections, "Fix")
|
|
125
475
|
|
|
476
|
+
# Only create findings for certain severity levels
|
|
126
477
|
if severity in (IssueSeverity.Critical, IssueSeverity.High, IssueSeverity.Moderate, IssueSeverity.Low):
|
|
127
478
|
return IntegrationFinding(
|
|
128
479
|
external_id=external_id,
|
|
@@ -132,12 +483,25 @@ class WebInspect(FlatFileImporter):
|
|
|
132
483
|
status=IssueStatus.Open,
|
|
133
484
|
title=title,
|
|
134
485
|
severity=severity,
|
|
135
|
-
|
|
136
|
-
category=f"{self.name} Vulnerability",
|
|
486
|
+
category=f"{self.title} Vulnerability",
|
|
137
487
|
plugin_id=plugin_id,
|
|
138
488
|
plugin_name=title,
|
|
139
489
|
rule_id=plugin_id,
|
|
140
490
|
recommendation_for_mitigation=mitigation,
|
|
141
|
-
source_report=self.
|
|
491
|
+
source_report=self.title,
|
|
142
492
|
)
|
|
143
|
-
|
|
493
|
+
# Return a default finding for severities we skip
|
|
494
|
+
return IntegrationFinding(
|
|
495
|
+
external_id=f"skip-{external_id}",
|
|
496
|
+
asset_identifier=host,
|
|
497
|
+
control_labels=[],
|
|
498
|
+
description="Skipped finding due to low severity",
|
|
499
|
+
status=IssueStatus.Closed,
|
|
500
|
+
title=f"Skipped: {title}",
|
|
501
|
+
severity=IssueSeverity.NotAssigned,
|
|
502
|
+
category=f"{self.title} Skipped",
|
|
503
|
+
plugin_id=plugin_id,
|
|
504
|
+
plugin_name=title,
|
|
505
|
+
rule_id=plugin_id,
|
|
506
|
+
source_report=self.title,
|
|
507
|
+
)
|