regscale-cli 6.16.0.0__py3-none-any.whl → 6.16.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of regscale-cli might be problematic. Click here for more details.
- regscale/__init__.py +1 -1
- regscale/core/app/application.py +1 -0
- regscale/core/app/internal/login.py +1 -1
- regscale/core/app/internal/poam_editor.py +1 -1
- regscale/core/app/utils/app_utils.py +1 -1
- regscale/core/app/utils/parser_utils.py +2 -2
- regscale/integrations/commercial/__init__.py +2 -2
- regscale/integrations/commercial/ad.py +1 -1
- regscale/integrations/commercial/azure/intune.py +1 -0
- regscale/integrations/commercial/grype/__init__.py +3 -0
- regscale/integrations/commercial/grype/commands.py +72 -0
- regscale/integrations/commercial/grype/scanner.py +390 -0
- regscale/integrations/commercial/import_all/import_all_cmd.py +2 -2
- regscale/integrations/commercial/nessus/scanner.py +3 -0
- regscale/integrations/commercial/opentext/__init__.py +6 -0
- regscale/integrations/commercial/opentext/commands.py +77 -0
- regscale/integrations/commercial/opentext/scanner.py +449 -85
- regscale/integrations/commercial/sap/sysdig/sysdig_scanner.py +4 -0
- regscale/integrations/commercial/sap/tenable/click.py +1 -1
- regscale/integrations/commercial/sap/tenable/scanner.py +8 -2
- regscale/integrations/commercial/tenablev2/click.py +39 -16
- regscale/integrations/commercial/trivy/__init__.py +5 -0
- regscale/integrations/commercial/trivy/commands.py +74 -0
- regscale/integrations/commercial/trivy/scanner.py +276 -0
- regscale/integrations/commercial/wizv2/click.py +9 -21
- regscale/integrations/commercial/wizv2/scanner.py +2 -1
- regscale/integrations/commercial/wizv2/utils.py +146 -70
- regscale/integrations/jsonl_scanner_integration.py +869 -0
- regscale/integrations/public/fedramp/fedramp_common.py +4 -4
- regscale/integrations/public/fedramp/import_workbook.py +1 -1
- regscale/integrations/public/fedramp/inventory_items.py +3 -3
- regscale/integrations/public/fedramp/poam/scanner.py +51 -44
- regscale/integrations/public/fedramp/ssp_logger.py +6 -6
- regscale/integrations/scanner_integration.py +268 -64
- regscale/models/app_models/mapping.py +3 -3
- regscale/models/integration_models/amazon_models/inspector.py +15 -17
- regscale/models/integration_models/aqua.py +1 -5
- regscale/models/integration_models/cisa_kev_data.json +100 -10
- regscale/models/integration_models/ecr_models/ecr.py +2 -6
- regscale/models/integration_models/{flat_file_importer.py → flat_file_importer/__init__.py} +7 -4
- regscale/models/integration_models/grype_import.py +3 -3
- regscale/models/integration_models/prisma.py +3 -3
- regscale/models/integration_models/synqly_models/capabilities.json +1 -1
- regscale/models/integration_models/synqly_models/connectors/assets.py +1 -0
- regscale/models/integration_models/synqly_models/connectors/vulnerabilities.py +2 -0
- regscale/models/integration_models/tenable_models/integration.py +46 -10
- regscale/models/integration_models/trivy_import.py +1 -1
- regscale/models/integration_models/xray.py +1 -1
- regscale/models/regscale_models/__init__.py +2 -0
- regscale/models/regscale_models/control_implementation.py +18 -44
- regscale/models/regscale_models/inherited_control.py +61 -0
- regscale/models/regscale_models/issue.py +3 -2
- regscale/models/regscale_models/mixins/parent_cache.py +1 -1
- regscale/models/regscale_models/regscale_model.py +73 -7
- regscale/models/regscale_models/vulnerability.py +61 -8
- {regscale_cli-6.16.0.0.dist-info → regscale_cli-6.16.2.0.dist-info}/METADATA +3 -3
- {regscale_cli-6.16.0.0.dist-info → regscale_cli-6.16.2.0.dist-info}/RECORD +62 -56
- tests/regscale/core/test_logz.py +8 -0
- regscale/integrations/commercial/grype.py +0 -165
- regscale/integrations/commercial/opentext/click.py +0 -99
- regscale/integrations/commercial/trivy.py +0 -162
- {regscale_cli-6.16.0.0.dist-info → regscale_cli-6.16.2.0.dist-info}/LICENSE +0 -0
- {regscale_cli-6.16.0.0.dist-info → regscale_cli-6.16.2.0.dist-info}/WHEEL +0 -0
- {regscale_cli-6.16.0.0.dist-info → regscale_cli-6.16.2.0.dist-info}/entry_points.txt +0 -0
- {regscale_cli-6.16.0.0.dist-info → regscale_cli-6.16.2.0.dist-info}/top_level.txt +0 -0
regscale/__init__.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = "6.16.
|
|
1
|
+
__version__ = "6.16.2.0"
|
regscale/core/app/application.py
CHANGED
|
@@ -86,6 +86,7 @@ class Application(metaclass=Singleton):
|
|
|
86
86
|
"dependabotRepo": "<myGithubRepoNameGoesHere>",
|
|
87
87
|
"dependabotToken": "<myGithubPersonalAccessTokenGoesHere>",
|
|
88
88
|
"domain": "https://regscale.yourcompany.com/",
|
|
89
|
+
"disableCache": False,
|
|
89
90
|
"evidenceFolder": "./evidence",
|
|
90
91
|
"passScore": 80,
|
|
91
92
|
"failScore": 30,
|
|
@@ -70,7 +70,7 @@ def login(
|
|
|
70
70
|
config["domain"] = (host if host and host != "None" else config.get("REGSCALE_DOMAIN")) or os.getenv(
|
|
71
71
|
"REGSCALE_DOMAIN"
|
|
72
72
|
)
|
|
73
|
-
app.logger.
|
|
73
|
+
app.logger.debug("Running in Airflow, logging in with token: %s", token)
|
|
74
74
|
else:
|
|
75
75
|
config["domain"] = host or config["domain"]
|
|
76
76
|
token = token or os.getenv("REGSCALE_TOKEN")
|
|
@@ -452,7 +452,7 @@ def all_issues(regscale_id: int, regscale_module: str, path: Path) -> None:
|
|
|
452
452
|
error_and_exit("There was an error creating your workbook for the given RegScale Id and RegScale Module.")
|
|
453
453
|
|
|
454
454
|
logger.info(
|
|
455
|
-
"Your data has
|
|
455
|
+
"Your data has been loaded. Please open the all_issues workbook and make your desired changes %s. " % path
|
|
456
456
|
)
|
|
457
457
|
return None
|
|
458
458
|
|
|
@@ -642,7 +642,7 @@ def save_to_json(file: Path, data: Any, output_log: bool) -> None:
|
|
|
642
642
|
with open(file, "w", encoding="utf-8") as outfile:
|
|
643
643
|
outfile.write(str(data))
|
|
644
644
|
if output_log:
|
|
645
|
-
logger.info("Data successfully saved to %s", file.
|
|
645
|
+
logger.info("Data successfully saved to %s", file.absolute())
|
|
646
646
|
|
|
647
647
|
|
|
648
648
|
def save_data_to(file: Path, data: Any, output_log: bool = True, transpose_data: bool = True) -> None:
|
|
@@ -23,7 +23,7 @@ def safe_float(value: Any, default: float = 0.0, field_name: str = "value") -> f
|
|
|
23
23
|
try:
|
|
24
24
|
return float(value)
|
|
25
25
|
except (ValueError, TypeError):
|
|
26
|
-
logger.
|
|
26
|
+
logger.debug(f"Invalid float {field_name}: {value}. Defaulting to {default}")
|
|
27
27
|
return default
|
|
28
28
|
|
|
29
29
|
|
|
@@ -43,7 +43,7 @@ def safe_int(value: Any, default: int = 0, field_name: str = "value") -> int:
|
|
|
43
43
|
try:
|
|
44
44
|
return int(value)
|
|
45
45
|
except (ValueError, TypeError):
|
|
46
|
-
logger.
|
|
46
|
+
logger.debug(f"Invalid integer {field_name}: {value}. Defaulting to {default}")
|
|
47
47
|
return default
|
|
48
48
|
|
|
49
49
|
|
|
@@ -161,7 +161,7 @@ show_mapping(ecr, "ecr")
|
|
|
161
161
|
@click.group(
|
|
162
162
|
cls=LazyGroup,
|
|
163
163
|
lazy_subcommands={
|
|
164
|
-
"import_file": "regscale.integrations.commercial.opentext.
|
|
164
|
+
"import_file": "regscale.integrations.commercial.opentext.commands.import_scans",
|
|
165
165
|
},
|
|
166
166
|
name="opentext",
|
|
167
167
|
)
|
|
@@ -320,7 +320,7 @@ def trivy():
|
|
|
320
320
|
@click.group(
|
|
321
321
|
cls=LazyGroup,
|
|
322
322
|
lazy_subcommands={
|
|
323
|
-
"import_scans": "regscale.integrations.commercial.grype.import_scans",
|
|
323
|
+
"import_scans": "regscale.integrations.commercial.grype.commands.import_scans",
|
|
324
324
|
},
|
|
325
325
|
name="grype",
|
|
326
326
|
)
|
|
@@ -278,7 +278,7 @@ def get_group(str_group: str) -> None:
|
|
|
278
278
|
file=Path(f"./artifacts/adMemberList-{group_id}.json"),
|
|
279
279
|
data=member_data,
|
|
280
280
|
)
|
|
281
|
-
logger.
|
|
281
|
+
logger.debug(member_data)
|
|
282
282
|
# retrieve the list of RegScale users
|
|
283
283
|
url_users = f'{config["domain"]}/api/accounts/getList'
|
|
284
284
|
try:
|
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
"""
|
|
2
|
+
This module contains the CLI commands for the Grype scanner integration.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from datetime import datetime
|
|
6
|
+
from typing import Optional
|
|
7
|
+
|
|
8
|
+
import click
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
|
|
11
|
+
from regscale.models.integration_models.flat_file_importer import FlatFileImporter
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
@click.group()
|
|
15
|
+
def grype():
|
|
16
|
+
"""Performs actions from the Grype scanner integration."""
|
|
17
|
+
pass
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
@grype.command("import_scans")
|
|
21
|
+
@FlatFileImporter.common_scanner_options(
|
|
22
|
+
message="File path to the folder containing JFrog XRay .json files to process to RegScale.",
|
|
23
|
+
prompt="File path for Grype files",
|
|
24
|
+
import_name="grype",
|
|
25
|
+
)
|
|
26
|
+
@click.option(
|
|
27
|
+
"--destination",
|
|
28
|
+
"-d",
|
|
29
|
+
help="Path to download the files to. If not provided, files will be downloaded to the temporary directory.",
|
|
30
|
+
type=click.Path(exists=True, dir_okay=True),
|
|
31
|
+
required=False,
|
|
32
|
+
)
|
|
33
|
+
@click.option(
|
|
34
|
+
"--file_pattern",
|
|
35
|
+
"-fp",
|
|
36
|
+
help="[Optional] File pattern to match (e.g., '*.json')",
|
|
37
|
+
required=False,
|
|
38
|
+
)
|
|
39
|
+
def import_scans(
|
|
40
|
+
destination: Optional[Path],
|
|
41
|
+
file_pattern: str,
|
|
42
|
+
folder_path: Path,
|
|
43
|
+
regscale_ssp_id: int,
|
|
44
|
+
scan_date: datetime,
|
|
45
|
+
mappings_path: Path,
|
|
46
|
+
disable_mapping: bool,
|
|
47
|
+
s3_bucket: str,
|
|
48
|
+
s3_prefix: str,
|
|
49
|
+
aws_profile: str,
|
|
50
|
+
upload_file: bool,
|
|
51
|
+
) -> None:
|
|
52
|
+
"""
|
|
53
|
+
Process Grype scan results from a folder containing Grype scan files and load into RegScale.
|
|
54
|
+
"""
|
|
55
|
+
from regscale.integrations.commercial.grype.scanner import GrypeIntegration
|
|
56
|
+
|
|
57
|
+
if s3_bucket and not folder_path:
|
|
58
|
+
folder_path = s3_bucket
|
|
59
|
+
gi = GrypeIntegration(
|
|
60
|
+
plan_id=regscale_ssp_id,
|
|
61
|
+
file_path=str(folder_path) if folder_path else None,
|
|
62
|
+
s3_bucket=s3_bucket,
|
|
63
|
+
s3_prefix=s3_prefix,
|
|
64
|
+
aws_profile=aws_profile,
|
|
65
|
+
scan_date=scan_date,
|
|
66
|
+
mappings_path=str(mappings_path) if mappings_path else None,
|
|
67
|
+
disable_mapping=disable_mapping,
|
|
68
|
+
download_destination=destination,
|
|
69
|
+
file_pattern=file_pattern,
|
|
70
|
+
upload_file=upload_file,
|
|
71
|
+
)
|
|
72
|
+
gi.sync_assets_and_findings()
|
|
@@ -0,0 +1,390 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Grype scanner integration class.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import logging
|
|
6
|
+
import os
|
|
7
|
+
from typing import (
|
|
8
|
+
Any,
|
|
9
|
+
Dict,
|
|
10
|
+
Optional,
|
|
11
|
+
Union,
|
|
12
|
+
Tuple,
|
|
13
|
+
TypeVar,
|
|
14
|
+
)
|
|
15
|
+
|
|
16
|
+
from pathlib import Path
|
|
17
|
+
|
|
18
|
+
from regscale.core.app.utils.parser_utils import safe_datetime_str
|
|
19
|
+
from regscale.integrations.jsonl_scanner_integration import JSONLScannerIntegration
|
|
20
|
+
from regscale.integrations.scanner_integration import IntegrationAsset, IntegrationFinding
|
|
21
|
+
from regscale.models import IssueSeverity, AssetStatus, IssueStatus
|
|
22
|
+
|
|
23
|
+
logger = logging.getLogger("regscale")
|
|
24
|
+
|
|
25
|
+
# Define generic types for items that can be written to file
|
|
26
|
+
T = TypeVar("T")
|
|
27
|
+
ItemType = TypeVar("ItemType", IntegrationAsset, IntegrationFinding)
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class GrypeIntegration(JSONLScannerIntegration):
|
|
31
|
+
"""Class for handling Grype scanner integration."""
|
|
32
|
+
|
|
33
|
+
title: str = "Grype"
|
|
34
|
+
asset_identifier_field: str = "otherTrackingNumber"
|
|
35
|
+
finding_severity_map: Dict[str, Any] = {
|
|
36
|
+
"CRITICAL": IssueSeverity.Critical.value,
|
|
37
|
+
"HIGH": IssueSeverity.High.value,
|
|
38
|
+
"MEDIUM": IssueSeverity.Moderate.value,
|
|
39
|
+
"LOW": IssueSeverity.Low.value,
|
|
40
|
+
"UNKNOWN": IssueSeverity.High.value,
|
|
41
|
+
"NEGLIGIBLE": IssueSeverity.High.value,
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
# Constants for file paths
|
|
45
|
+
ASSETS_FILE = "./artifacts/grype_assets.jsonl"
|
|
46
|
+
FINDINGS_FILE = "./artifacts/grype_findings.jsonl"
|
|
47
|
+
|
|
48
|
+
def __init__(self, *args, **kwargs):
|
|
49
|
+
"""
|
|
50
|
+
Initialize the TrivyIntegration object.
|
|
51
|
+
|
|
52
|
+
:param Any kwargs: Keyword arguments
|
|
53
|
+
"""
|
|
54
|
+
kwargs["read_files_only"] = True
|
|
55
|
+
kwargs["file_pattern"] = "*.json"
|
|
56
|
+
self.disable_mapping = kwargs["disable_mapping"] = True
|
|
57
|
+
super().__init__(*args, **kwargs)
|
|
58
|
+
|
|
59
|
+
def is_valid_file(self, data: Any, file_path: Union[Path, str]) -> Tuple[bool, Optional[Dict[str, Any]]]:
|
|
60
|
+
"""
|
|
61
|
+
Check if a file is a valid Grype scan file.
|
|
62
|
+
|
|
63
|
+
:param Any data: Data parsed from the file to validate
|
|
64
|
+
:param Union[Path, str] file_path: Path to the file being processed
|
|
65
|
+
:return: Tuple of (is_valid, data) where is_valid indicates validity and data is the validated content or None
|
|
66
|
+
:rtype: Tuple[bool, Optional[Dict[str, Any]]]
|
|
67
|
+
"""
|
|
68
|
+
try:
|
|
69
|
+
# Check if this looks like a Grype scan file
|
|
70
|
+
# Grype files should have matches array and source object
|
|
71
|
+
if not isinstance(data, dict):
|
|
72
|
+
logger.warning(f"File {file_path} is not a dict, skipping")
|
|
73
|
+
return False, None
|
|
74
|
+
|
|
75
|
+
if "matches" not in data:
|
|
76
|
+
logger.warning(f"File {file_path} has no 'matches' key, skipping")
|
|
77
|
+
return False, None
|
|
78
|
+
|
|
79
|
+
if not isinstance(data.get("matches"), list):
|
|
80
|
+
logger.warning(f"File {file_path} 'matches' is not a list, skipping")
|
|
81
|
+
return False, None
|
|
82
|
+
|
|
83
|
+
if "source" not in data:
|
|
84
|
+
logger.warning(f"File {file_path} has no 'source' key, skipping")
|
|
85
|
+
return False, None
|
|
86
|
+
|
|
87
|
+
logger.debug(f"File {file_path} validated successfully")
|
|
88
|
+
return True, data
|
|
89
|
+
except Exception as e:
|
|
90
|
+
logger.warning(f"Error reading file {file_path}: {str(e)}")
|
|
91
|
+
return False, None
|
|
92
|
+
|
|
93
|
+
def parse_asset(self, file_path: Union[Path, str], data: Dict[str, Any]) -> IntegrationAsset:
|
|
94
|
+
"""
|
|
95
|
+
Parse a single asset from Grype scan data.
|
|
96
|
+
|
|
97
|
+
:param Union[Path, str] file_path: Path to the file containing the asset data
|
|
98
|
+
:param Dict[str, Any] data: The parsed JSON data
|
|
99
|
+
:return: IntegrationAsset object
|
|
100
|
+
:rtype: IntegrationAsset
|
|
101
|
+
"""
|
|
102
|
+
source_target_data = data.get("source", {}).get("target", {})
|
|
103
|
+
# Convert path to string if it's not already
|
|
104
|
+
file_path_str = str(file_path)
|
|
105
|
+
|
|
106
|
+
# Determine identifier from file name or data
|
|
107
|
+
if "sha256-" in file_path_str:
|
|
108
|
+
# Extract the sha256 from the filename
|
|
109
|
+
base_name = os.path.basename(file_path_str)
|
|
110
|
+
identifier = "sha256-" + base_name.split("sha256-")[1].split(".json")[0]
|
|
111
|
+
else:
|
|
112
|
+
identifier = source_target_data.get("imageID", "Unknown")
|
|
113
|
+
|
|
114
|
+
return IntegrationAsset(
|
|
115
|
+
identifier=identifier,
|
|
116
|
+
name=identifier,
|
|
117
|
+
ip_address="0.0.0.0",
|
|
118
|
+
cpu=0,
|
|
119
|
+
ram=0,
|
|
120
|
+
status=AssetStatus.Active,
|
|
121
|
+
asset_type="Other",
|
|
122
|
+
asset_category="Software",
|
|
123
|
+
operating_system=source_target_data.get("os", source_target_data.get("OS", "Linux")),
|
|
124
|
+
notes=f"{os.path.basename(file_path_str)}",
|
|
125
|
+
other_tracking_number=source_target_data.get("userInput", source_target_data.get("UserInput", "Unknown")),
|
|
126
|
+
fqdn=source_target_data.get("userInput", source_target_data.get("UserInput", "Unknown")),
|
|
127
|
+
parent_id=self.plan_id,
|
|
128
|
+
parent_module="securityplans",
|
|
129
|
+
)
|
|
130
|
+
|
|
131
|
+
def parse_finding(self, asset_identifier: str, data: Dict[str, Any], item: Dict[str, Any]) -> IntegrationFinding:
|
|
132
|
+
"""
|
|
133
|
+
Parse a single finding from Grype scan data.
|
|
134
|
+
|
|
135
|
+
Constructs a finding object by extracting and processing vulnerability details.
|
|
136
|
+
|
|
137
|
+
:param str asset_identifier: Identifier of the asset this finding belongs to
|
|
138
|
+
:param Dict[str, Any] data: Parsed scan data containing metadata
|
|
139
|
+
:param Dict[str, Any] item: Individual finding data
|
|
140
|
+
:return: Parsed finding object
|
|
141
|
+
:rtype: IntegrationFinding
|
|
142
|
+
"""
|
|
143
|
+
finding_info = self._extract_finding_info(item, data)
|
|
144
|
+
artifact_info = self._extract_artifact_info(item)
|
|
145
|
+
severity_info = self._determine_severity(finding_info)
|
|
146
|
+
cvss_fields = self._get_cvss_fields(finding_info["cvss"])
|
|
147
|
+
scan_date = safe_datetime_str(finding_info["descriptor"].get("timestamp", ""))
|
|
148
|
+
evidence = self._build_evidence(artifact_info)
|
|
149
|
+
observations = self._build_observations(finding_info)
|
|
150
|
+
remediation_info = self._build_remediation_info(finding_info["fix"])
|
|
151
|
+
|
|
152
|
+
return IntegrationFinding(
|
|
153
|
+
title=(
|
|
154
|
+
f"{severity_info['cve_id']}: {artifact_info['name']}"
|
|
155
|
+
if severity_info["cve_id"]
|
|
156
|
+
else artifact_info["name"]
|
|
157
|
+
),
|
|
158
|
+
description=severity_info["description"],
|
|
159
|
+
severity=severity_info["severity"],
|
|
160
|
+
status=IssueStatus.Open,
|
|
161
|
+
cvss_v3_score=cvss_fields.get("V3Score"),
|
|
162
|
+
cvss_v3_vector=cvss_fields.get("V3Vector") or "",
|
|
163
|
+
cvss_v2_score=cvss_fields.get("V2Score"),
|
|
164
|
+
cvss_v2_vector=cvss_fields.get("V2Vector") or "",
|
|
165
|
+
plugin_name=artifact_info["name"],
|
|
166
|
+
plugin_id=self.title,
|
|
167
|
+
asset_identifier=asset_identifier,
|
|
168
|
+
category="Vulnerability",
|
|
169
|
+
cve=severity_info["cve_id"],
|
|
170
|
+
control_labels=["CM-7", "SI-2"],
|
|
171
|
+
evidence=evidence,
|
|
172
|
+
observations=observations,
|
|
173
|
+
identified_risk=f"Vulnerable {artifact_info['type'] or 'package'} detected: {artifact_info['name'] or 'unknown'} {artifact_info['version'] or 'unknown'}",
|
|
174
|
+
recommendation_for_mitigation=remediation_info["remediation"],
|
|
175
|
+
scan_date=scan_date,
|
|
176
|
+
first_seen=scan_date,
|
|
177
|
+
last_seen=scan_date,
|
|
178
|
+
vulnerability_type=finding_info["type"],
|
|
179
|
+
rule_id=finding_info["id"],
|
|
180
|
+
source_rule_id=finding_info["id"],
|
|
181
|
+
remediation=remediation_info["remediation"],
|
|
182
|
+
vulnerable_asset=f"{artifact_info['name'] or 'unknown'}:{artifact_info['version'] or 'unknown'}",
|
|
183
|
+
security_check=finding_info["matcher"],
|
|
184
|
+
external_id=finding_info["data_source"],
|
|
185
|
+
installed_versions=artifact_info["version"],
|
|
186
|
+
affected_os=finding_info["affected_os"],
|
|
187
|
+
affected_packages=artifact_info["name"],
|
|
188
|
+
image_digest=finding_info["manifest_digest"],
|
|
189
|
+
package_path=artifact_info["purl"],
|
|
190
|
+
build_version=finding_info["build_version"],
|
|
191
|
+
fixed_versions=remediation_info["fixed_versions"],
|
|
192
|
+
fix_status=remediation_info["fix_status"],
|
|
193
|
+
)
|
|
194
|
+
|
|
195
|
+
def _extract_finding_info(self, item: Dict[str, Any], data: Dict[str, Any]) -> Dict[str, Any]:
|
|
196
|
+
"""
|
|
197
|
+
Extract core finding information from Grype scan data.
|
|
198
|
+
|
|
199
|
+
:param Dict[str, Any] item: Individual finding data
|
|
200
|
+
:param Dict[str, Any] data: Parsed scan data containing metadata
|
|
201
|
+
:return: Dictionary of extracted finding details
|
|
202
|
+
:rtype: Dict[str, Any]
|
|
203
|
+
"""
|
|
204
|
+
finding = item.get("vulnerability", {})
|
|
205
|
+
source_target = data.get("source", {}).get("target", {})
|
|
206
|
+
labels = source_target.get("labels", {})
|
|
207
|
+
return {
|
|
208
|
+
"id": finding.get("id", ""),
|
|
209
|
+
"type": finding.get("type", ""),
|
|
210
|
+
"data_source": finding.get("dataSource", ""),
|
|
211
|
+
"namespace": finding.get("namespace", ""),
|
|
212
|
+
"urls": finding.get("urls", []),
|
|
213
|
+
"cvss": finding.get("cvss", []),
|
|
214
|
+
"related_vulns": item.get("relatedVulnerabilities", []),
|
|
215
|
+
"descriptor": data.get("descriptor", {}),
|
|
216
|
+
"match_details": item.get("matchDetails", []),
|
|
217
|
+
"description": finding.get("description", "No description available"),
|
|
218
|
+
"build_version": str(labels.get("io.buildah.version", "")),
|
|
219
|
+
"manifest_digest": source_target.get("manifestDigest", ""),
|
|
220
|
+
"affected_os": labels.get("org.opencontainers.image.base.name")
|
|
221
|
+
or f"{labels.get('org.opencontainers.image.ref.name', '')} {labels.get('org.opencontainers.image.version', '')}".strip(),
|
|
222
|
+
"fix": finding.get("fix", {}),
|
|
223
|
+
"severity": finding.get("severity", "UNKNOWN"),
|
|
224
|
+
"matcher": item.get("matchDetails", [{}])[0].get("matcher", ""),
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
def _extract_artifact_info(self, item: Dict[str, Any]) -> Dict[str, Any]:
|
|
228
|
+
"""
|
|
229
|
+
Extract artifact-related information from Grype finding data.
|
|
230
|
+
|
|
231
|
+
:param Dict[str, Any] item: Individual finding data
|
|
232
|
+
:return: Dictionary of artifact details
|
|
233
|
+
:rtype: Dict[str, Any]
|
|
234
|
+
"""
|
|
235
|
+
artifact = item.get("artifact", {})
|
|
236
|
+
locations = [loc.get("path", "") for loc in artifact.get("locations", [])]
|
|
237
|
+
return {
|
|
238
|
+
"type": artifact.get("type", ""),
|
|
239
|
+
"version": artifact.get("version", ""),
|
|
240
|
+
"name": artifact.get("name", ""),
|
|
241
|
+
"licenses": ", ".join(artifact.get("licenses", [])),
|
|
242
|
+
"purl": artifact.get("purl", ""),
|
|
243
|
+
"locations": locations,
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
def _determine_severity(self, finding_info: Dict[str, Any]) -> Dict[str, str]:
|
|
247
|
+
"""
|
|
248
|
+
Determine the severity and related details for the finding.
|
|
249
|
+
|
|
250
|
+
Prefers NVD data if available among related vulnerabilities.
|
|
251
|
+
|
|
252
|
+
:param Dict[str, Any] finding_info: Extracted finding details
|
|
253
|
+
:return: Dictionary with severity, CVE ID, and description
|
|
254
|
+
:rtype: Dict[str, str]
|
|
255
|
+
"""
|
|
256
|
+
severity_str = finding_info["severity"]
|
|
257
|
+
cve_id = finding_info["id"]
|
|
258
|
+
description = finding_info["description"]
|
|
259
|
+
cvss = finding_info["cvss"]
|
|
260
|
+
|
|
261
|
+
for vuln in finding_info["related_vulns"]:
|
|
262
|
+
if "nvd" in vuln.get("dataSource", ""):
|
|
263
|
+
cve_id = vuln.get("id", cve_id)
|
|
264
|
+
severity_str = vuln.get("severity", severity_str)
|
|
265
|
+
description = vuln.get("description", description)
|
|
266
|
+
cvss = vuln.get("cvss", cvss)
|
|
267
|
+
break
|
|
268
|
+
|
|
269
|
+
severity_value = self.finding_severity_map.get(severity_str.upper(), IssueSeverity.High.value)
|
|
270
|
+
try:
|
|
271
|
+
severity = IssueSeverity(severity_value)
|
|
272
|
+
except ValueError:
|
|
273
|
+
severity = IssueSeverity.High
|
|
274
|
+
|
|
275
|
+
return {"severity": severity, "cve_id": cve_id, "description": description, "cvss": cvss}
|
|
276
|
+
|
|
277
|
+
def _build_evidence(self, artifact_info: Dict[str, Any]) -> str:
|
|
278
|
+
"""
|
|
279
|
+
Build the evidence string for the finding.
|
|
280
|
+
|
|
281
|
+
:param Dict[str, Any] artifact_info: Artifact details
|
|
282
|
+
:return: Formatted evidence string
|
|
283
|
+
:rtype: str
|
|
284
|
+
"""
|
|
285
|
+
evidence = f"Found in {artifact_info['name']} {artifact_info['version']}"
|
|
286
|
+
details = [
|
|
287
|
+
f"type: {artifact_info['type']}" if artifact_info["type"] else "",
|
|
288
|
+
f"Locations: {', '.join(artifact_info['locations'])}" if artifact_info["locations"] else "",
|
|
289
|
+
f"Licenses: {artifact_info['licenses']}" if artifact_info["licenses"] else "",
|
|
290
|
+
f"Package URL: {artifact_info['purl']}" if artifact_info["purl"] else "",
|
|
291
|
+
f"References: {', '.join(artifact_info.get('urls', []))}" if artifact_info.get("urls", []) else "",
|
|
292
|
+
]
|
|
293
|
+
return evidence + "\n".join(filter(None, details))
|
|
294
|
+
|
|
295
|
+
def _build_observations(self, finding_info: Dict[str, Any]) -> str:
|
|
296
|
+
"""
|
|
297
|
+
Build the observations string for the finding.
|
|
298
|
+
|
|
299
|
+
:param Dict[str, Any] finding_info: Extracted finding details
|
|
300
|
+
:return: Formatted observations string
|
|
301
|
+
:rtype: str
|
|
302
|
+
"""
|
|
303
|
+
match_type = finding_info["match_details"][0].get("type", "") if finding_info["match_details"] else ""
|
|
304
|
+
matcher = finding_info["matcher"]
|
|
305
|
+
return "\n".join(
|
|
306
|
+
filter(
|
|
307
|
+
None,
|
|
308
|
+
[
|
|
309
|
+
f"Match type: {match_type}" if match_type else "",
|
|
310
|
+
f"Matcher: {matcher}" if matcher else "",
|
|
311
|
+
f"Data source: {finding_info['data_source']}" if finding_info["data_source"] else "",
|
|
312
|
+
f"Namespace: {finding_info['namespace']}" if finding_info["namespace"] else "",
|
|
313
|
+
],
|
|
314
|
+
)
|
|
315
|
+
)
|
|
316
|
+
|
|
317
|
+
def _build_remediation_info(self, fix_info: Dict[str, Any]) -> Dict[str, str]:
|
|
318
|
+
"""
|
|
319
|
+
Build remediation information for the finding.
|
|
320
|
+
|
|
321
|
+
:param Dict[str, Any] fix_info: Fix details from the finding
|
|
322
|
+
:return: Dictionary with remediation details
|
|
323
|
+
:rtype: Dict[str, str]
|
|
324
|
+
"""
|
|
325
|
+
state = fix_info.get("state", "No fix available")
|
|
326
|
+
remediation = f"State: {state}"
|
|
327
|
+
fixed_versions = ""
|
|
328
|
+
if versions := fix_info.get("versions", []):
|
|
329
|
+
fixed_versions = ", ".join(versions)
|
|
330
|
+
remediation += f", Fixed versions: {fixed_versions}"
|
|
331
|
+
return {"remediation": remediation, "fixed_versions": fixed_versions, "fix_status": state}
|
|
332
|
+
|
|
333
|
+
def _get_findings_data_from_file(self, data: Dict[str, Any]) -> list:
|
|
334
|
+
"""
|
|
335
|
+
Extract findings data from Grype file data.
|
|
336
|
+
|
|
337
|
+
:param Dict[str, Any] data: The data from the Grype file
|
|
338
|
+
:return: List of finding items
|
|
339
|
+
:rtype: list
|
|
340
|
+
"""
|
|
341
|
+
if not data or not isinstance(data, dict):
|
|
342
|
+
return []
|
|
343
|
+
|
|
344
|
+
matches = data.get("matches", [])
|
|
345
|
+
if not isinstance(matches, list):
|
|
346
|
+
return []
|
|
347
|
+
return matches
|
|
348
|
+
|
|
349
|
+
@staticmethod
|
|
350
|
+
def _get_cvss_fields(cvss):
|
|
351
|
+
"""
|
|
352
|
+
Get the CVSS v3 and v2 scores and vectors from the cvss data.
|
|
353
|
+
|
|
354
|
+
:param Dict finding: The cvss data
|
|
355
|
+
:return: The CVSS fields
|
|
356
|
+
:rtype: dict
|
|
357
|
+
"""
|
|
358
|
+
values = {
|
|
359
|
+
"V3Score": None,
|
|
360
|
+
"V2Score": None,
|
|
361
|
+
"V3Vector": None,
|
|
362
|
+
"V2Vector": None,
|
|
363
|
+
}
|
|
364
|
+
|
|
365
|
+
i = 0
|
|
366
|
+
while i < len(cvss):
|
|
367
|
+
item = cvss[i]
|
|
368
|
+
cvss_type = item.get("type", "")
|
|
369
|
+
version = item.get("version")
|
|
370
|
+
if "3." in version and cvss_type == "Primary":
|
|
371
|
+
values["V3Score"] = item.get("metrics", {}).get("baseScore", None)
|
|
372
|
+
values["V3Vector"] = item.get("vector", "")
|
|
373
|
+
elif "2." in version and item.get("type") == "Primary":
|
|
374
|
+
values["V2Score"] = item.get("metrics", {}).get("baseScore", None)
|
|
375
|
+
values["V2Vector"] = item.get("vector", "")
|
|
376
|
+
|
|
377
|
+
if values["V3Score"] is not None and values["V2Score"] is not None:
|
|
378
|
+
break
|
|
379
|
+
|
|
380
|
+
if values["V3Score"] is None and "3." in version and cvss_type == "Secondary":
|
|
381
|
+
values["V3Score"] = item.get("metrics", {}).get("baseScore", None)
|
|
382
|
+
values["V3Vector"] = item.get("vector", "")
|
|
383
|
+
|
|
384
|
+
if values["V2Score"] is None and "2." in version and cvss_type == "Secondary":
|
|
385
|
+
values["V2Score"] = item.get("metrics", {}).get("baseScore", None)
|
|
386
|
+
values["V2Vector"] = item.get("vector", "")
|
|
387
|
+
|
|
388
|
+
i += 1
|
|
389
|
+
|
|
390
|
+
return values
|
|
@@ -89,10 +89,10 @@ def import_all_scans(
|
|
|
89
89
|
from regscale.integrations.commercial.burp import import_burp
|
|
90
90
|
from regscale.integrations.commercial.defender import import_alerts
|
|
91
91
|
from regscale.integrations.commercial.ecr import import_ecr
|
|
92
|
-
from regscale.integrations.commercial.grype import import_scans as import_grype_scans
|
|
92
|
+
from regscale.integrations.commercial.grype.commands import import_scans as import_grype_scans
|
|
93
93
|
from regscale.integrations.commercial.ibm import import_appscan
|
|
94
94
|
from regscale.integrations.commercial.nexpose import import_nexpose
|
|
95
|
-
from regscale.integrations.commercial.opentext.
|
|
95
|
+
from regscale.integrations.commercial.opentext.commands import import_scans as import_opentext_file
|
|
96
96
|
from regscale.integrations.commercial.prisma import import_prisma
|
|
97
97
|
from regscale.integrations.commercial.qualys import import_scans as import_qualys
|
|
98
98
|
from regscale.integrations.commercial.snyk import import_snyk
|
|
@@ -79,6 +79,7 @@ class NessusIntegration(ScannerIntegration):
|
|
|
79
79
|
self.log_file_warning_and_exit(path)
|
|
80
80
|
if not self.check_collection(file_collection, path):
|
|
81
81
|
return
|
|
82
|
+
self.num_findings_to_process = 0
|
|
82
83
|
for file in iterate_files(file_collection):
|
|
83
84
|
content = read_file(file)
|
|
84
85
|
root = ET.fromstring(content)
|
|
@@ -87,6 +88,7 @@ class NessusIntegration(ScannerIntegration):
|
|
|
87
88
|
for nessus_vulnerability in root.iterfind(f"./Report/ReportHost[@name='{asset_name}']/ReportItem"):
|
|
88
89
|
parsed_vulnerability = self.parse_finding(nessus_vulnerability, asset_name)
|
|
89
90
|
if parsed_vulnerability:
|
|
91
|
+
self.num_findings_to_process += 1
|
|
90
92
|
yield parsed_vulnerability
|
|
91
93
|
self.move_files(file_collection)
|
|
92
94
|
|
|
@@ -214,6 +216,7 @@ class NessusIntegration(ScannerIntegration):
|
|
|
214
216
|
tree = ElementTree(root)
|
|
215
217
|
assets = nfr.scan.report_hosts(root)
|
|
216
218
|
cpe_items = cpe_xml_to_dict(tree) # type: ignore
|
|
219
|
+
self.num_assets_to_process = len(assets)
|
|
217
220
|
for asset in assets:
|
|
218
221
|
asset_properties = self.get_asset_properties(root, cpe_items, asset)
|
|
219
222
|
parsed_asset = self.parse_asset(asset_properties)
|