regscale-cli 6.27.2.0__py3-none-any.whl → 6.28.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of regscale-cli might be problematic. Click here for more details.
- regscale/_version.py +1 -1
- regscale/core/app/application.py +1 -0
- regscale/core/app/internal/control_editor.py +73 -21
- regscale/core/app/internal/login.py +4 -1
- regscale/core/app/internal/model_editor.py +219 -64
- regscale/core/app/utils/app_utils.py +11 -2
- regscale/core/login.py +21 -4
- regscale/core/utils/date.py +77 -1
- regscale/dev/cli.py +26 -0
- regscale/dev/version.py +72 -0
- regscale/integrations/commercial/__init__.py +15 -1
- regscale/integrations/commercial/amazon/amazon/__init__.py +0 -0
- regscale/integrations/commercial/amazon/amazon/common.py +204 -0
- regscale/integrations/commercial/amazon/common.py +48 -58
- regscale/integrations/commercial/aws/audit_manager_compliance.py +2671 -0
- regscale/integrations/commercial/aws/cli.py +3093 -55
- regscale/integrations/commercial/aws/cloudtrail_control_mappings.py +333 -0
- regscale/integrations/commercial/aws/cloudtrail_evidence.py +501 -0
- regscale/integrations/commercial/aws/cloudwatch_control_mappings.py +357 -0
- regscale/integrations/commercial/aws/cloudwatch_evidence.py +490 -0
- regscale/integrations/commercial/aws/config_compliance.py +914 -0
- regscale/integrations/commercial/aws/conformance_pack_mappings.py +198 -0
- regscale/integrations/commercial/aws/evidence_generator.py +283 -0
- regscale/integrations/commercial/aws/guardduty_control_mappings.py +340 -0
- regscale/integrations/commercial/aws/guardduty_evidence.py +1053 -0
- regscale/integrations/commercial/aws/iam_control_mappings.py +368 -0
- regscale/integrations/commercial/aws/iam_evidence.py +574 -0
- regscale/integrations/commercial/aws/inventory/__init__.py +223 -22
- regscale/integrations/commercial/aws/inventory/base.py +107 -5
- regscale/integrations/commercial/aws/inventory/resources/audit_manager.py +513 -0
- regscale/integrations/commercial/aws/inventory/resources/cloudtrail.py +315 -0
- regscale/integrations/commercial/aws/inventory/resources/cloudtrail_logs_metadata.py +476 -0
- regscale/integrations/commercial/aws/inventory/resources/cloudwatch.py +191 -0
- regscale/integrations/commercial/aws/inventory/resources/compute.py +66 -9
- regscale/integrations/commercial/aws/inventory/resources/config.py +464 -0
- regscale/integrations/commercial/aws/inventory/resources/containers.py +74 -9
- regscale/integrations/commercial/aws/inventory/resources/database.py +106 -31
- regscale/integrations/commercial/aws/inventory/resources/guardduty.py +286 -0
- regscale/integrations/commercial/aws/inventory/resources/iam.py +470 -0
- regscale/integrations/commercial/aws/inventory/resources/inspector.py +476 -0
- regscale/integrations/commercial/aws/inventory/resources/integration.py +175 -61
- regscale/integrations/commercial/aws/inventory/resources/kms.py +447 -0
- regscale/integrations/commercial/aws/inventory/resources/networking.py +103 -67
- regscale/integrations/commercial/aws/inventory/resources/s3.py +394 -0
- regscale/integrations/commercial/aws/inventory/resources/security.py +268 -72
- regscale/integrations/commercial/aws/inventory/resources/securityhub.py +473 -0
- regscale/integrations/commercial/aws/inventory/resources/storage.py +53 -29
- regscale/integrations/commercial/aws/inventory/resources/systems_manager.py +657 -0
- regscale/integrations/commercial/aws/inventory/resources/vpc.py +655 -0
- regscale/integrations/commercial/aws/kms_control_mappings.py +288 -0
- regscale/integrations/commercial/aws/kms_evidence.py +879 -0
- regscale/integrations/commercial/aws/ocsf/__init__.py +7 -0
- regscale/integrations/commercial/aws/ocsf/constants.py +115 -0
- regscale/integrations/commercial/aws/ocsf/mapper.py +435 -0
- regscale/integrations/commercial/aws/org_control_mappings.py +286 -0
- regscale/integrations/commercial/aws/org_evidence.py +666 -0
- regscale/integrations/commercial/aws/s3_control_mappings.py +356 -0
- regscale/integrations/commercial/aws/s3_evidence.py +632 -0
- regscale/integrations/commercial/aws/scanner.py +853 -205
- regscale/integrations/commercial/aws/security_hub.py +319 -0
- regscale/integrations/commercial/aws/session_manager.py +282 -0
- regscale/integrations/commercial/aws/ssm_control_mappings.py +291 -0
- regscale/integrations/commercial/aws/ssm_evidence.py +492 -0
- regscale/integrations/commercial/synqly/query_builder.py +4 -1
- regscale/integrations/compliance_integration.py +308 -38
- regscale/integrations/control_matcher.py +78 -23
- regscale/integrations/due_date_handler.py +3 -0
- regscale/integrations/public/csam/csam.py +572 -763
- regscale/integrations/public/csam/csam_agency_defined.py +179 -0
- regscale/integrations/public/csam/csam_common.py +154 -0
- regscale/integrations/public/csam/csam_controls.py +432 -0
- regscale/integrations/public/csam/csam_poam.py +124 -0
- regscale/integrations/public/fedramp/click.py +17 -4
- regscale/integrations/public/fedramp/fedramp_cis_crm.py +271 -62
- regscale/integrations/public/fedramp/poam/scanner.py +74 -7
- regscale/integrations/scanner_integration.py +415 -85
- regscale/models/integration_models/cisa_kev_data.json +80 -20
- regscale/models/integration_models/synqly_models/capabilities.json +1 -1
- regscale/models/integration_models/synqly_models/connectors/vulnerabilities.py +44 -3
- regscale/models/integration_models/synqly_models/ocsf_mapper.py +41 -12
- regscale/models/platform.py +3 -0
- regscale/models/regscale_models/__init__.py +5 -0
- regscale/models/regscale_models/assessment.py +2 -1
- regscale/models/regscale_models/component.py +1 -1
- regscale/models/regscale_models/control_implementation.py +55 -24
- regscale/models/regscale_models/control_objective.py +74 -5
- regscale/models/regscale_models/file.py +2 -0
- regscale/models/regscale_models/issue.py +2 -5
- regscale/models/regscale_models/organization.py +3 -0
- regscale/models/regscale_models/regscale_model.py +17 -5
- regscale/models/regscale_models/security_plan.py +1 -0
- regscale/regscale.py +11 -1
- {regscale_cli-6.27.2.0.dist-info → regscale_cli-6.28.0.0.dist-info}/METADATA +1 -1
- {regscale_cli-6.27.2.0.dist-info → regscale_cli-6.28.0.0.dist-info}/RECORD +140 -57
- tests/regscale/core/test_login.py +171 -4
- tests/regscale/integrations/commercial/aws/__init__.py +0 -0
- tests/regscale/integrations/commercial/aws/test_audit_manager_compliance.py +1304 -0
- tests/regscale/integrations/commercial/aws/test_audit_manager_evidence_aggregation.py +341 -0
- tests/regscale/integrations/commercial/aws/test_aws_audit_manager_collector.py +1155 -0
- tests/regscale/integrations/commercial/aws/test_aws_cloudtrail_collector.py +534 -0
- tests/regscale/integrations/commercial/aws/test_aws_config_collector.py +400 -0
- tests/regscale/integrations/commercial/aws/test_aws_guardduty_collector.py +315 -0
- tests/regscale/integrations/commercial/aws/test_aws_iam_collector.py +458 -0
- tests/regscale/integrations/commercial/aws/test_aws_inspector_collector.py +353 -0
- tests/regscale/integrations/commercial/aws/test_aws_inventory_integration.py +530 -0
- tests/regscale/integrations/commercial/aws/test_aws_kms_collector.py +919 -0
- tests/regscale/integrations/commercial/aws/test_aws_s3_collector.py +722 -0
- tests/regscale/integrations/commercial/aws/test_aws_scanner_integration.py +722 -0
- tests/regscale/integrations/commercial/aws/test_aws_securityhub_collector.py +792 -0
- tests/regscale/integrations/commercial/aws/test_aws_systems_manager_collector.py +918 -0
- tests/regscale/integrations/commercial/aws/test_aws_vpc_collector.py +996 -0
- tests/regscale/integrations/commercial/aws/test_cli_evidence.py +431 -0
- tests/regscale/integrations/commercial/aws/test_cloudtrail_control_mappings.py +452 -0
- tests/regscale/integrations/commercial/aws/test_cloudtrail_evidence.py +788 -0
- tests/regscale/integrations/commercial/aws/test_config_compliance.py +298 -0
- tests/regscale/integrations/commercial/aws/test_conformance_pack_mappings.py +200 -0
- tests/regscale/integrations/commercial/aws/test_evidence_generator.py +386 -0
- tests/regscale/integrations/commercial/aws/test_guardduty_control_mappings.py +564 -0
- tests/regscale/integrations/commercial/aws/test_guardduty_evidence.py +1041 -0
- tests/regscale/integrations/commercial/aws/test_iam_control_mappings.py +718 -0
- tests/regscale/integrations/commercial/aws/test_iam_evidence.py +1375 -0
- tests/regscale/integrations/commercial/aws/test_kms_control_mappings.py +656 -0
- tests/regscale/integrations/commercial/aws/test_kms_evidence.py +1163 -0
- tests/regscale/integrations/commercial/aws/test_ocsf_mapper.py +370 -0
- tests/regscale/integrations/commercial/aws/test_org_control_mappings.py +546 -0
- tests/regscale/integrations/commercial/aws/test_org_evidence.py +1240 -0
- tests/regscale/integrations/commercial/aws/test_s3_control_mappings.py +672 -0
- tests/regscale/integrations/commercial/aws/test_s3_evidence.py +987 -0
- tests/regscale/integrations/commercial/aws/test_scanner_evidence.py +373 -0
- tests/regscale/integrations/commercial/aws/test_security_hub_config_filtering.py +539 -0
- tests/regscale/integrations/commercial/aws/test_session_manager.py +516 -0
- tests/regscale/integrations/commercial/aws/test_ssm_control_mappings.py +588 -0
- tests/regscale/integrations/commercial/aws/test_ssm_evidence.py +735 -0
- tests/regscale/integrations/commercial/test_aws.py +55 -56
- tests/regscale/integrations/test_control_matcher.py +24 -0
- tests/regscale/models/test_control_implementation.py +118 -3
- {regscale_cli-6.27.2.0.dist-info → regscale_cli-6.28.0.0.dist-info}/LICENSE +0 -0
- {regscale_cli-6.27.2.0.dist-info → regscale_cli-6.28.0.0.dist-info}/WHEEL +0 -0
- {regscale_cli-6.27.2.0.dist-info → regscale_cli-6.28.0.0.dist-info}/entry_points.txt +0 -0
- {regscale_cli-6.27.2.0.dist-info → regscale_cli-6.28.0.0.dist-info}/top_level.txt +0 -0
|
@@ -7,9 +7,10 @@ import time
|
|
|
7
7
|
from typing import Any, Dict, Iterator, List, Optional, Tuple
|
|
8
8
|
|
|
9
9
|
from regscale.core.utils.date import date_str, datetime_str
|
|
10
|
-
from regscale.integrations.commercial.amazon.common import (
|
|
10
|
+
from regscale.integrations.commercial.amazon.amazon.common import (
|
|
11
11
|
check_finding_severity,
|
|
12
12
|
determine_status_and_results,
|
|
13
|
+
fetch_aws_findings,
|
|
13
14
|
get_comments,
|
|
14
15
|
get_due_date,
|
|
15
16
|
)
|
|
@@ -36,7 +37,9 @@ class AWSInventoryIntegration(ScannerIntegration):
|
|
|
36
37
|
|
|
37
38
|
title = "AWS"
|
|
38
39
|
asset_identifier_field = "awsIdentifier"
|
|
39
|
-
issue_identifier_field = "awsIdentifier
|
|
40
|
+
issue_identifier_field = "" # Use default otherIdentifier - awsIdentifier doesn't exist on Issue model
|
|
41
|
+
suppress_asset_not_found_errors = True # Suppress asset not found errors for AWS findings
|
|
42
|
+
enable_cci_mapping = False # AWS findings don't use CCI references
|
|
40
43
|
finding_severity_map = {
|
|
41
44
|
"CRITICAL": regscale_models.IssueSeverity.High,
|
|
42
45
|
"HIGH": regscale_models.IssueSeverity.High,
|
|
@@ -57,6 +60,8 @@ class AWSInventoryIntegration(ScannerIntegration):
|
|
|
57
60
|
:param int plan_id: The RegScale plan ID
|
|
58
61
|
"""
|
|
59
62
|
super().__init__(plan_id=plan_id, kwargs=kwargs)
|
|
63
|
+
# Override parent's default - suppress asset not found errors for AWS
|
|
64
|
+
self.suppress_asset_not_found_errors = True
|
|
60
65
|
self.collector: Optional[AWSInventoryCollector] = None
|
|
61
66
|
self.discovered_assets: List[IntegrationAsset] = []
|
|
62
67
|
self.processed_asset_identifiers: set = set() # Track processed assets to avoid duplicates
|
|
@@ -67,20 +72,30 @@ class AWSInventoryIntegration(ScannerIntegration):
|
|
|
67
72
|
aws_secret_access_key: Optional[str],
|
|
68
73
|
region: str = os.getenv("AWS_REGION", "us-east-1"),
|
|
69
74
|
aws_session_token: Optional[str] = os.getenv("AWS_SESSION_TOKEN"),
|
|
75
|
+
profile: Optional[str] = None,
|
|
76
|
+
account_id: Optional[str] = None,
|
|
77
|
+
tags: Optional[Dict[str, str]] = None,
|
|
70
78
|
) -> None:
|
|
71
79
|
"""
|
|
72
80
|
Authenticate with AWS and initialize the inventory collector.
|
|
73
81
|
|
|
74
|
-
:param str aws_access_key_id: Optional AWS access key ID
|
|
75
|
-
:param str aws_secret_access_key: Optional AWS secret access key
|
|
82
|
+
:param str aws_access_key_id: Optional AWS access key ID (overrides profile)
|
|
83
|
+
:param str aws_secret_access_key: Optional AWS secret access key (overrides profile)
|
|
76
84
|
:param str region: AWS region to collect inventory from
|
|
77
|
-
:param str aws_session_token: Optional AWS session
|
|
85
|
+
:param str aws_session_token: Optional AWS session token (overrides profile)
|
|
86
|
+
:param str profile: Optional AWS profile name from ~/.aws/credentials
|
|
87
|
+
:param str account_id: Optional AWS account ID to filter resources
|
|
88
|
+
:param dict tags: Optional dictionary of tag key-value pairs to filter resources
|
|
78
89
|
"""
|
|
79
90
|
self.collector = AWSInventoryCollector(
|
|
80
91
|
region=region,
|
|
92
|
+
profile=profile,
|
|
81
93
|
aws_access_key_id=aws_access_key_id,
|
|
82
94
|
aws_secret_access_key=aws_secret_access_key,
|
|
83
95
|
aws_session_token=aws_session_token,
|
|
96
|
+
account_id=account_id,
|
|
97
|
+
tags=tags,
|
|
98
|
+
collect_findings=False, # Disable findings collection for asset-only sync
|
|
84
99
|
)
|
|
85
100
|
|
|
86
101
|
def fetch_aws_data_if_needed(
|
|
@@ -89,29 +104,43 @@ class AWSInventoryIntegration(ScannerIntegration):
|
|
|
89
104
|
aws_access_key_id: Optional[str],
|
|
90
105
|
aws_secret_access_key: Optional[str],
|
|
91
106
|
aws_session_token: Optional[str] = None,
|
|
107
|
+
profile: Optional[str] = None,
|
|
108
|
+
account_id: Optional[str] = None,
|
|
109
|
+
tags: Optional[Dict[str, str]] = None,
|
|
110
|
+
force_refresh: bool = False,
|
|
92
111
|
) -> Dict[str, Any]:
|
|
93
112
|
"""
|
|
94
113
|
Fetch AWS inventory data, using cached data if available and not expired.
|
|
95
114
|
|
|
96
115
|
:param str region: AWS region to collect inventory from
|
|
97
|
-
:param str aws_access_key_id: Optional AWS access key ID
|
|
98
|
-
:param str aws_secret_access_key: Optional AWS secret access key
|
|
99
|
-
:param str aws_session_token: Optional AWS session
|
|
116
|
+
:param str aws_access_key_id: Optional AWS access key ID (overrides profile)
|
|
117
|
+
:param str aws_secret_access_key: Optional AWS secret access key (overrides profile)
|
|
118
|
+
:param str aws_session_token: Optional AWS session token (overrides profile)
|
|
119
|
+
:param str profile: Optional AWS profile name from ~/.aws/credentials
|
|
120
|
+
:param str account_id: Optional AWS account ID to filter resources
|
|
121
|
+
:param dict tags: Optional dictionary of tag key-value pairs to filter resources
|
|
122
|
+
:param bool force_refresh: Force refresh inventory data, ignoring cache
|
|
100
123
|
:return: Dictionary containing AWS inventory data
|
|
101
124
|
:rtype: Dict[str, Any]
|
|
102
125
|
"""
|
|
103
126
|
from regscale.models import DateTimeEncoder
|
|
104
127
|
|
|
105
|
-
# Check if we have cached data that's still valid
|
|
106
|
-
if os.path.exists(INVENTORY_FILE_PATH):
|
|
128
|
+
# Check if we have cached data that's still valid (unless force_refresh is True)
|
|
129
|
+
if not force_refresh and os.path.exists(INVENTORY_FILE_PATH):
|
|
107
130
|
file_age = time.time() - os.path.getmtime(INVENTORY_FILE_PATH)
|
|
108
131
|
if file_age < CACHE_TTL_SECONDS:
|
|
132
|
+
logger.info(f"Using cached AWS inventory data (age: {int(file_age / 60)} minutes)")
|
|
109
133
|
with open(INVENTORY_FILE_PATH, "r", encoding="utf-8") as file:
|
|
110
134
|
return json.load(file)
|
|
111
135
|
|
|
136
|
+
if force_refresh and os.path.exists(INVENTORY_FILE_PATH):
|
|
137
|
+
logger.info("Force refresh enabled - ignoring cached inventory data")
|
|
138
|
+
|
|
112
139
|
# No valid cache, need to fetch new data
|
|
113
140
|
if not self.collector:
|
|
114
|
-
self.authenticate(
|
|
141
|
+
self.authenticate(
|
|
142
|
+
aws_access_key_id, aws_secret_access_key, region, aws_session_token, profile, account_id, tags
|
|
143
|
+
)
|
|
115
144
|
|
|
116
145
|
if not self.collector:
|
|
117
146
|
raise RuntimeError("Failed to initialize AWS inventory collector")
|
|
@@ -138,17 +167,11 @@ class AWSInventoryIntegration(ScannerIntegration):
|
|
|
138
167
|
:yield: Iterator[IntegrationAsset]
|
|
139
168
|
"""
|
|
140
169
|
for asset in assets:
|
|
141
|
-
if not isinstance(asset, dict)
|
|
170
|
+
if not isinstance(asset, dict):
|
|
142
171
|
logger.warning(f"Skipping {asset_type} due to invalid data format: {asset}")
|
|
143
172
|
continue
|
|
144
173
|
try:
|
|
145
|
-
|
|
146
|
-
for user in assets[asset]:
|
|
147
|
-
self.num_assets_to_process += 1
|
|
148
|
-
yield parser_method(user)
|
|
149
|
-
else:
|
|
150
|
-
self.num_assets_to_process += 1
|
|
151
|
-
yield parser_method(asset)
|
|
174
|
+
yield parser_method(asset)
|
|
152
175
|
except Exception as e:
|
|
153
176
|
logger.error(f"Error parsing {asset_type} {asset}: {str(e)}", exc_info=True)
|
|
154
177
|
|
|
@@ -164,7 +187,14 @@ class AWSInventoryIntegration(ScannerIntegration):
|
|
|
164
187
|
:param callable parser_method: Method to parse the asset
|
|
165
188
|
:yield: Iterator[IntegrationAsset]
|
|
166
189
|
"""
|
|
167
|
-
|
|
190
|
+
section_data = inventory.get(section_key, [])
|
|
191
|
+
|
|
192
|
+
# Handle special case for IAM - need to extract Roles list from IAM dict
|
|
193
|
+
if section_key == "IAM" and isinstance(section_data, dict):
|
|
194
|
+
assets = section_data.get(asset_type, [])
|
|
195
|
+
else:
|
|
196
|
+
assets = section_data
|
|
197
|
+
|
|
168
198
|
yield from self._process_asset_collection(assets, asset_type, parser_method)
|
|
169
199
|
|
|
170
200
|
def get_asset_configs(self) -> List[Tuple[str, str, callable]]:
|
|
@@ -192,17 +222,34 @@ class AWSInventoryIntegration(ScannerIntegration):
|
|
|
192
222
|
aws_access_key_id: Optional[str] = None,
|
|
193
223
|
aws_secret_access_key: Optional[str] = None,
|
|
194
224
|
aws_session_token: Optional[str] = None,
|
|
225
|
+
profile: Optional[str] = None,
|
|
226
|
+
account_id: Optional[str] = None,
|
|
227
|
+
tags: Optional[Dict[str, str]] = None,
|
|
228
|
+
force_refresh: bool = False,
|
|
195
229
|
) -> Iterator[IntegrationAsset]:
|
|
196
230
|
"""
|
|
197
231
|
Fetch AWS assets from the inventory.
|
|
198
232
|
|
|
199
233
|
:param str region: AWS region to collect inventory from
|
|
200
|
-
:param str aws_access_key_id: Optional AWS access key ID
|
|
201
|
-
:param str aws_secret_access_key: Optional AWS secret access key
|
|
202
|
-
:param str aws_session_token: Optional AWS session
|
|
234
|
+
:param str aws_access_key_id: Optional AWS access key ID (overrides profile)
|
|
235
|
+
:param str aws_secret_access_key: Optional AWS secret access key (overrides profile)
|
|
236
|
+
:param str aws_session_token: Optional AWS session token (overrides profile)
|
|
237
|
+
:param str profile: Optional AWS profile name from ~/.aws/credentials
|
|
238
|
+
:param str account_id: Optional AWS account ID to filter resources
|
|
239
|
+
:param dict tags: Optional dictionary of tag key-value pairs to filter resources
|
|
240
|
+
:param bool force_refresh: Force refresh inventory data, ignoring cache
|
|
203
241
|
:yield: Iterator[IntegrationAsset]
|
|
204
242
|
"""
|
|
205
|
-
inventory = self.fetch_aws_data_if_needed(
|
|
243
|
+
inventory = self.fetch_aws_data_if_needed(
|
|
244
|
+
region,
|
|
245
|
+
aws_access_key_id,
|
|
246
|
+
aws_secret_access_key,
|
|
247
|
+
aws_session_token,
|
|
248
|
+
profile,
|
|
249
|
+
account_id,
|
|
250
|
+
tags,
|
|
251
|
+
force_refresh,
|
|
252
|
+
)
|
|
206
253
|
# Process each asset type using the corresponding parser
|
|
207
254
|
asset_configs = self.get_asset_configs()
|
|
208
255
|
|
|
@@ -211,6 +258,91 @@ class AWSInventoryIntegration(ScannerIntegration):
|
|
|
211
258
|
for section_key, asset_type, parser_method in asset_configs:
|
|
212
259
|
yield from self._process_inventory_section(inventory, section_key, asset_type, parser_method)
|
|
213
260
|
|
|
261
|
+
def _calculate_ec2_storage(self, instance: Dict[str, Any]) -> int:
|
|
262
|
+
"""
|
|
263
|
+
Calculate total storage from EC2 block devices.
|
|
264
|
+
|
|
265
|
+
:param Dict[str, Any] instance: The EC2 instance data
|
|
266
|
+
:return: Total storage in GB
|
|
267
|
+
:rtype: int
|
|
268
|
+
"""
|
|
269
|
+
total_storage = 0
|
|
270
|
+
for device in instance.get("BlockDeviceMappings", []):
|
|
271
|
+
if "Ebs" in device:
|
|
272
|
+
# Note: We need to add a call to describe_volumes to get actual size
|
|
273
|
+
total_storage += 8 # Default to 8 GB if size unknown
|
|
274
|
+
return total_storage
|
|
275
|
+
|
|
276
|
+
def _determine_ec2_asset_type(
|
|
277
|
+
self, image_name: str, platform: Optional[str]
|
|
278
|
+
) -> tuple[Any, Any, Any, Any, list[str]]:
|
|
279
|
+
"""
|
|
280
|
+
Determine EC2 asset type, category, component type, and names based on image and platform.
|
|
281
|
+
|
|
282
|
+
:param str image_name: Lowercase image name
|
|
283
|
+
:param Optional[str] platform: Platform type (e.g., 'windows')
|
|
284
|
+
:return: Tuple of (operating_system, asset_type, asset_category, component_type, component_names)
|
|
285
|
+
:rtype: tuple
|
|
286
|
+
"""
|
|
287
|
+
# Check for Palo Alto device first
|
|
288
|
+
if "pa-vm-aws" in image_name:
|
|
289
|
+
return (
|
|
290
|
+
regscale_models.AssetOperatingSystem.PaloAlto,
|
|
291
|
+
regscale_models.AssetType.Appliance,
|
|
292
|
+
regscale_models.AssetCategory.Hardware,
|
|
293
|
+
regscale_models.ComponentType.Hardware,
|
|
294
|
+
["Palo Alto Networks IDPS"],
|
|
295
|
+
)
|
|
296
|
+
|
|
297
|
+
# Check for Windows platform
|
|
298
|
+
if platform == "windows":
|
|
299
|
+
return (
|
|
300
|
+
regscale_models.AssetOperatingSystem.WindowsServer,
|
|
301
|
+
regscale_models.AssetType.VM,
|
|
302
|
+
regscale_models.AssetCategory.Hardware,
|
|
303
|
+
regscale_models.ComponentType.Hardware,
|
|
304
|
+
[EC_INSTANCES],
|
|
305
|
+
)
|
|
306
|
+
|
|
307
|
+
# Default to Linux
|
|
308
|
+
return (
|
|
309
|
+
regscale_models.AssetOperatingSystem.Linux,
|
|
310
|
+
regscale_models.AssetType.VM,
|
|
311
|
+
regscale_models.AssetCategory.Hardware,
|
|
312
|
+
regscale_models.ComponentType.Hardware,
|
|
313
|
+
[EC_INSTANCES],
|
|
314
|
+
)
|
|
315
|
+
|
|
316
|
+
def _build_ec2_notes(
|
|
317
|
+
self, description: str, instance: Dict[str, Any], image_info: Dict[str, Any], cpu_count: int, ram: int
|
|
318
|
+
) -> str:
|
|
319
|
+
"""
|
|
320
|
+
Build detailed notes for EC2 instance.
|
|
321
|
+
|
|
322
|
+
:param str description: Instance description
|
|
323
|
+
:param Dict[str, Any] instance: The EC2 instance data
|
|
324
|
+
:param Dict[str, Any] image_info: AMI image information
|
|
325
|
+
:param int cpu_count: Number of vCPUs
|
|
326
|
+
:param int ram: RAM in GB
|
|
327
|
+
:return: Formatted notes string
|
|
328
|
+
:rtype: str
|
|
329
|
+
"""
|
|
330
|
+
return f"""Description: {description}
|
|
331
|
+
AMI ID: {instance.get('ImageId', '')}
|
|
332
|
+
AMI Description: {image_info.get('Description', '')}
|
|
333
|
+
Architecture: {instance.get('Architecture', '')}
|
|
334
|
+
Root Device Type: {image_info.get('RootDeviceType', '')}
|
|
335
|
+
Virtualization: {image_info.get('VirtualizationType', '')}
|
|
336
|
+
Instance Type: {instance.get('InstanceType', '')}
|
|
337
|
+
vCPUs: {cpu_count}
|
|
338
|
+
RAM: {ram}GB
|
|
339
|
+
State: {instance.get('State')}
|
|
340
|
+
Platform Details: {instance.get('PlatformDetails', 'Linux')}
|
|
341
|
+
Private IP: {instance.get('PrivateIpAddress', 'N/A')}
|
|
342
|
+
Public IP: {instance.get('PublicIpAddress', 'N/A')}
|
|
343
|
+
VPC ID: {instance.get('VpcId', 'N/A')}
|
|
344
|
+
Subnet ID: {instance.get('SubnetId', 'N/A')}"""
|
|
345
|
+
|
|
214
346
|
def parse_ec2_instance(self, instance: Dict[str, Any]) -> IntegrationAsset:
|
|
215
347
|
"""Parse EC2 instance data into an IntegrationAsset.
|
|
216
348
|
|
|
@@ -224,15 +356,8 @@ class AWSInventoryIntegration(ScannerIntegration):
|
|
|
224
356
|
)
|
|
225
357
|
name = instance_name
|
|
226
358
|
|
|
227
|
-
# Calculate
|
|
228
|
-
total_storage =
|
|
229
|
-
for device in instance.get("BlockDeviceMappings", []):
|
|
230
|
-
if "Ebs" in device:
|
|
231
|
-
# Note: We need to add a call to describe_volumes to get actual size
|
|
232
|
-
total_storage += 8 # Default to 8 GB if size unknown
|
|
233
|
-
|
|
234
|
-
# Calculate RAM based on instance type
|
|
235
|
-
# This would need a mapping of instance types to RAM
|
|
359
|
+
# Calculate resources
|
|
360
|
+
total_storage = self._calculate_ec2_storage(instance)
|
|
236
361
|
ram = 16 # Default to 16 GB for c5.2xlarge
|
|
237
362
|
|
|
238
363
|
# Get CPU info
|
|
@@ -246,26 +371,10 @@ class AWSInventoryIntegration(ScannerIntegration):
|
|
|
246
371
|
image_info = instance.get("ImageInfo", {})
|
|
247
372
|
image_name = image_info.get("Name", "").lower()
|
|
248
373
|
|
|
249
|
-
#
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
asset_type = regscale_models.AssetType.Appliance
|
|
254
|
-
asset_category = regscale_models.AssetCategory.Hardware
|
|
255
|
-
component_type = regscale_models.ComponentType.Hardware
|
|
256
|
-
component_names = ["Palo Alto Networks IDPS"]
|
|
257
|
-
elif instance.get("Platform") == "windows":
|
|
258
|
-
operating_system = regscale_models.AssetOperatingSystem.WindowsServer
|
|
259
|
-
asset_type = regscale_models.AssetType.VM
|
|
260
|
-
asset_category = regscale_models.AssetCategory.Hardware
|
|
261
|
-
component_type = regscale_models.ComponentType.Hardware
|
|
262
|
-
component_names = [EC_INSTANCES]
|
|
263
|
-
else:
|
|
264
|
-
operating_system = regscale_models.AssetOperatingSystem.Linux
|
|
265
|
-
asset_type = regscale_models.AssetType.VM
|
|
266
|
-
asset_category = regscale_models.AssetCategory.Hardware
|
|
267
|
-
component_type = regscale_models.ComponentType.Hardware
|
|
268
|
-
component_names = [EC_INSTANCES]
|
|
374
|
+
# Determine asset type and OS
|
|
375
|
+
operating_system, asset_type, asset_category, component_type, component_names = self._determine_ec2_asset_type(
|
|
376
|
+
image_name, instance.get("Platform")
|
|
377
|
+
)
|
|
269
378
|
|
|
270
379
|
os_version = image_info.get("Description", "")
|
|
271
380
|
|
|
@@ -280,29 +389,21 @@ class AWSInventoryIntegration(ScannerIntegration):
|
|
|
280
389
|
# Create description
|
|
281
390
|
description = f"{instance_name} - {instance.get('PlatformDetails', 'Linux')} instance running on {instance.get('InstanceType', '')} with {cpu_count} vCPUs and {ram}GB RAM"
|
|
282
391
|
|
|
283
|
-
# Build notes
|
|
284
|
-
notes =
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
vCPUs: {cpu_count}
|
|
292
|
-
RAM: {ram}GB
|
|
293
|
-
State: {instance.get('State')}
|
|
294
|
-
Platform Details: {instance.get('PlatformDetails', 'Linux')}
|
|
295
|
-
Private IP: {instance.get('PrivateIpAddress', 'N/A')}
|
|
296
|
-
Public IP: {instance.get('PublicIpAddress', 'N/A')}
|
|
297
|
-
VPC ID: {instance.get('VpcId', 'N/A')}
|
|
298
|
-
Subnet ID: {instance.get('SubnetId', 'N/A')}"""
|
|
392
|
+
# Build notes
|
|
393
|
+
notes = self._build_ec2_notes(description, instance, image_info, cpu_count, ram)
|
|
394
|
+
|
|
395
|
+
# Build full ARN for EC2 instance: arn:aws:ec2:region:account-id:instance/instance-id
|
|
396
|
+
instance_id = instance.get("InstanceId", "")
|
|
397
|
+
region = instance.get("Region", "us-east-1")
|
|
398
|
+
account_id = instance.get("OwnerId", "")
|
|
399
|
+
instance_arn = f"arn:aws:ec2:{region}:{account_id}:instance/{instance_id}"
|
|
299
400
|
|
|
300
401
|
# Create URI for AWS Console link
|
|
301
|
-
uri = f"https://console.aws.amazon.com/ec2/v2/home?region={
|
|
402
|
+
uri = f"https://console.aws.amazon.com/ec2/v2/home?region={region}#InstanceDetails:instanceId={instance_id}"
|
|
302
403
|
|
|
303
404
|
return IntegrationAsset(
|
|
304
405
|
name=name,
|
|
305
|
-
identifier=
|
|
406
|
+
identifier=instance_arn,
|
|
306
407
|
asset_type=asset_type,
|
|
307
408
|
asset_category=asset_category,
|
|
308
409
|
component_type=component_type,
|
|
@@ -322,12 +423,12 @@ Subnet ID: {instance.get('SubnetId', 'N/A')}"""
|
|
|
322
423
|
ram=ram,
|
|
323
424
|
operating_system=operating_system,
|
|
324
425
|
os_version=os_version,
|
|
325
|
-
location=
|
|
426
|
+
location=region,
|
|
326
427
|
notes=notes,
|
|
327
428
|
model=instance.get("InstanceType"),
|
|
328
429
|
manufacturer="AWS",
|
|
329
430
|
is_public_facing=is_public_facing,
|
|
330
|
-
aws_identifier=
|
|
431
|
+
aws_identifier=instance_arn, # Use full ARN for asset matching with findings
|
|
331
432
|
vlan_id=instance.get("SubnetId"),
|
|
332
433
|
uri=uri,
|
|
333
434
|
source_data=instance,
|
|
@@ -368,7 +469,7 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
368
469
|
return IntegrationAsset(
|
|
369
470
|
# Required fields
|
|
370
471
|
name=name,
|
|
371
|
-
identifier=str(function.get("
|
|
472
|
+
identifier=str(function.get("FunctionArn", "")),
|
|
372
473
|
asset_type=regscale_models.AssetType.Other,
|
|
373
474
|
asset_category=regscale_models.AssetCategory.Software,
|
|
374
475
|
component_type=regscale_models.ComponentType.Software,
|
|
@@ -447,11 +548,11 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
447
548
|
:rtype: IntegrationAsset
|
|
448
549
|
"""
|
|
449
550
|
name = bucket.get("Name", "")
|
|
450
|
-
|
|
551
|
+
arn = f"arn:aws:s3:::{bucket.get('Name')}"
|
|
451
552
|
return IntegrationAsset(
|
|
452
553
|
# Required fields
|
|
453
554
|
name=name,
|
|
454
|
-
identifier=
|
|
555
|
+
identifier=arn,
|
|
455
556
|
asset_type=regscale_models.AssetType.Other,
|
|
456
557
|
asset_category=regscale_models.AssetCategory.Hardware,
|
|
457
558
|
component_type=regscale_models.ComponentType.Hardware,
|
|
@@ -464,7 +565,7 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
464
565
|
location=bucket.get("Region"),
|
|
465
566
|
# Cloud identifiers
|
|
466
567
|
external_id=bucket.get("Name"),
|
|
467
|
-
aws_identifier=
|
|
568
|
+
aws_identifier=arn,
|
|
468
569
|
uri=f"https://{bucket.get('Name')}.s3.amazonaws.com",
|
|
469
570
|
# Additional metadata
|
|
470
571
|
manufacturer="AWS",
|
|
@@ -491,7 +592,7 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
491
592
|
return IntegrationAsset(
|
|
492
593
|
# Required fields
|
|
493
594
|
name=name,
|
|
494
|
-
identifier=str(db.get("
|
|
595
|
+
identifier=str(db.get("DBInstanceArn", "")),
|
|
495
596
|
asset_type=regscale_models.AssetType.VM,
|
|
496
597
|
asset_category=regscale_models.AssetCategory.Hardware,
|
|
497
598
|
component_type=regscale_models.ComponentType.Hardware,
|
|
@@ -539,7 +640,7 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
539
640
|
return IntegrationAsset(
|
|
540
641
|
# Required fields
|
|
541
642
|
name=name,
|
|
542
|
-
identifier=str(table.get("
|
|
643
|
+
identifier=str(table.get("TableArn", "")),
|
|
543
644
|
asset_type=regscale_models.AssetType.Other,
|
|
544
645
|
asset_category=regscale_models.AssetCategory.Software,
|
|
545
646
|
component_type=regscale_models.ComponentType.Software,
|
|
@@ -578,10 +679,16 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
578
679
|
if vpc.get("IsDefault"):
|
|
579
680
|
notes = "Default VPC\n" + notes
|
|
580
681
|
|
|
682
|
+
# Build full ARN for VPC: arn:aws:ec2:region:account-id:vpc/vpc-id
|
|
683
|
+
vpc_id = vpc.get("VpcId", "")
|
|
684
|
+
region = vpc.get("Region", "us-east-1")
|
|
685
|
+
account_id = vpc.get("OwnerId", "")
|
|
686
|
+
vpc_arn = f"arn:aws:ec2:{region}:{account_id}:vpc/{vpc_id}"
|
|
687
|
+
|
|
581
688
|
return IntegrationAsset(
|
|
582
689
|
# Required fields
|
|
583
690
|
name=name,
|
|
584
|
-
identifier=
|
|
691
|
+
identifier=vpc_arn,
|
|
585
692
|
asset_type=regscale_models.AssetType.NetworkRouter,
|
|
586
693
|
asset_category=regscale_models.AssetCategory.Hardware,
|
|
587
694
|
component_type=regscale_models.ComponentType.Hardware,
|
|
@@ -595,12 +702,12 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
595
702
|
if vpc.get("State") == "available"
|
|
596
703
|
else regscale_models.AssetStatus.Inactive
|
|
597
704
|
),
|
|
598
|
-
location=
|
|
705
|
+
location=region,
|
|
599
706
|
# Network information
|
|
600
|
-
vlan_id=
|
|
707
|
+
vlan_id=vpc_id,
|
|
601
708
|
# Cloud identifiers
|
|
602
|
-
external_id=
|
|
603
|
-
aws_identifier=
|
|
709
|
+
external_id=vpc_id,
|
|
710
|
+
aws_identifier=vpc_arn, # Use full ARN for asset matching with findings
|
|
604
711
|
# Additional metadata
|
|
605
712
|
manufacturer="AWS",
|
|
606
713
|
notes=f"CIDR: {vpc.get('CidrBlock')}",
|
|
@@ -623,7 +730,7 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
623
730
|
return IntegrationAsset(
|
|
624
731
|
# Required fields
|
|
625
732
|
name=name,
|
|
626
|
-
identifier=
|
|
733
|
+
identifier=lb.get("LoadBalancerArn"),
|
|
627
734
|
asset_type=regscale_models.AssetType.NetworkRouter,
|
|
628
735
|
asset_category=regscale_models.AssetCategory.Hardware,
|
|
629
736
|
component_type=regscale_models.ComponentType.Hardware,
|
|
@@ -673,7 +780,7 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
673
780
|
return IntegrationAsset(
|
|
674
781
|
# Required fields
|
|
675
782
|
name=name,
|
|
676
|
-
identifier=str(repo.get("
|
|
783
|
+
identifier=str(repo.get("RepositoryArn", "")),
|
|
677
784
|
asset_type=regscale_models.AssetType.Other,
|
|
678
785
|
asset_category=regscale_models.AssetCategory.Software,
|
|
679
786
|
component_type=regscale_models.ComponentType.Software,
|
|
@@ -693,6 +800,94 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
693
800
|
source_data=repo,
|
|
694
801
|
)
|
|
695
802
|
|
|
803
|
+
def _validate_aws_credentials(
|
|
804
|
+
self,
|
|
805
|
+
profile: Optional[str],
|
|
806
|
+
aws_secret_key_id: Optional[str],
|
|
807
|
+
aws_secret_access_key: Optional[str],
|
|
808
|
+
region: Optional[str],
|
|
809
|
+
) -> None:
|
|
810
|
+
"""
|
|
811
|
+
Validate AWS credentials and region are provided.
|
|
812
|
+
|
|
813
|
+
:param profile: AWS profile name
|
|
814
|
+
:param aws_secret_key_id: AWS access key ID
|
|
815
|
+
:param aws_secret_access_key: AWS secret access key
|
|
816
|
+
:param region: AWS region
|
|
817
|
+
:raises ValueError: If credentials are not provided
|
|
818
|
+
"""
|
|
819
|
+
if not profile and (not aws_secret_key_id or not aws_secret_access_key):
|
|
820
|
+
raise ValueError(
|
|
821
|
+
"AWS Profile or Access Credentials are required.\nPlease provide --profile or set environment "
|
|
822
|
+
"variables (AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY) or pass as arguments."
|
|
823
|
+
)
|
|
824
|
+
if not region:
|
|
825
|
+
logger.warning("AWS region not provided. Defaulting to 'us-east-1'.")
|
|
826
|
+
|
|
827
|
+
def _get_severity_config(self) -> Optional[str]:
|
|
828
|
+
"""
|
|
829
|
+
Get minimum severity from config.
|
|
830
|
+
|
|
831
|
+
:return: Minimum severity or None
|
|
832
|
+
:rtype: Optional[str]
|
|
833
|
+
"""
|
|
834
|
+
try:
|
|
835
|
+
minimum_severity = self.app.config.get("issues", {}).get("amazon", {}).get("minimumSeverity")
|
|
836
|
+
if minimum_severity:
|
|
837
|
+
logger.info(f"Using minimumSeverity from config: {minimum_severity}")
|
|
838
|
+
return minimum_severity
|
|
839
|
+
except (KeyError, AttributeError):
|
|
840
|
+
logger.debug("No minimumSeverity configured, fetching all findings")
|
|
841
|
+
return None
|
|
842
|
+
|
|
843
|
+
def _get_posture_management_config(self) -> bool:
|
|
844
|
+
"""
|
|
845
|
+
Get posture management only setting from config.
|
|
846
|
+
|
|
847
|
+
:return: Posture management only setting (defaults to True)
|
|
848
|
+
:rtype: bool
|
|
849
|
+
"""
|
|
850
|
+
try:
|
|
851
|
+
posture_management_only = (
|
|
852
|
+
self.app.config.get("issues", {}).get("amazon", {}).get("postureManagementOnly", True)
|
|
853
|
+
)
|
|
854
|
+
if posture_management_only:
|
|
855
|
+
logger.info("Fetching posture management findings only (security standards compliance checks)")
|
|
856
|
+
else:
|
|
857
|
+
logger.info("Fetching all Security Hub findings (including non-compliance findings)")
|
|
858
|
+
return posture_management_only
|
|
859
|
+
except (KeyError, AttributeError):
|
|
860
|
+
logger.debug("No postureManagementOnly configured, defaulting to True")
|
|
861
|
+
return True
|
|
862
|
+
|
|
863
|
+
def _create_aws_session(
|
|
864
|
+
self,
|
|
865
|
+
aws_secret_key_id: Optional[str],
|
|
866
|
+
aws_secret_access_key: Optional[str],
|
|
867
|
+
region: str,
|
|
868
|
+
profile: Optional[str],
|
|
869
|
+
**kwargs,
|
|
870
|
+
):
|
|
871
|
+
"""
|
|
872
|
+
Create AWS session with profile or explicit credentials.
|
|
873
|
+
|
|
874
|
+
:param aws_secret_key_id: AWS access key ID
|
|
875
|
+
:param aws_secret_access_key: AWS secret access key
|
|
876
|
+
:param region: AWS region
|
|
877
|
+
:param profile: AWS profile name
|
|
878
|
+
:return: Boto3 session
|
|
879
|
+
"""
|
|
880
|
+
import boto3
|
|
881
|
+
|
|
882
|
+
if aws_secret_key_id or aws_secret_access_key:
|
|
883
|
+
return boto3.Session(
|
|
884
|
+
region_name=region,
|
|
885
|
+
aws_access_key_id=aws_secret_key_id,
|
|
886
|
+
aws_secret_access_key=aws_secret_access_key,
|
|
887
|
+
aws_session_token=kwargs.get("aws_session_token"),
|
|
888
|
+
)
|
|
889
|
+
return boto3.Session(profile_name=profile, region_name=region)
|
|
890
|
+
|
|
696
891
|
def fetch_findings(self, *args, **kwargs) -> Iterator[IntegrationFinding]:
|
|
697
892
|
"""
|
|
698
893
|
Fetch security findings from AWS Security Hub.
|
|
@@ -700,39 +895,35 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
700
895
|
|
|
701
896
|
:yield: Iterator[IntegrationFinding]
|
|
702
897
|
"""
|
|
703
|
-
import boto3
|
|
704
|
-
|
|
705
|
-
from regscale.integrations.commercial.amazon.common import fetch_aws_findings
|
|
706
|
-
|
|
707
898
|
aws_secret_key_id = kwargs.get("aws_access_key_id") or os.getenv("AWS_ACCESS_KEY_ID")
|
|
708
899
|
aws_secret_access_key = kwargs.get("aws_secret_access_key") or os.getenv("AWS_SECRET_ACCESS_KEY")
|
|
709
|
-
region = kwargs.get("region") or os.getenv("AWS_REGION")
|
|
710
|
-
|
|
711
|
-
|
|
712
|
-
|
|
713
|
-
|
|
714
|
-
|
|
715
|
-
|
|
716
|
-
|
|
717
|
-
|
|
718
|
-
|
|
719
|
-
|
|
720
|
-
|
|
721
|
-
aws_secret_access_key
|
|
722
|
-
|
|
723
|
-
)
|
|
900
|
+
region = kwargs.get("region") or os.getenv("AWS_REGION", "us-east-1")
|
|
901
|
+
profile = kwargs.get("profile")
|
|
902
|
+
|
|
903
|
+
self._validate_aws_credentials(profile, aws_secret_key_id, aws_secret_access_key, region)
|
|
904
|
+
|
|
905
|
+
minimum_severity = self._get_severity_config()
|
|
906
|
+
posture_management_only = self._get_posture_management_config()
|
|
907
|
+
|
|
908
|
+
# Create a copy of kwargs excluding parameters we're passing explicitly
|
|
909
|
+
session_kwargs = {
|
|
910
|
+
k: v
|
|
911
|
+
for k, v in kwargs.items()
|
|
912
|
+
if k not in ("aws_access_key_id", "aws_secret_access_key", "region", "profile")
|
|
913
|
+
}
|
|
914
|
+
session = self._create_aws_session(aws_secret_key_id, aws_secret_access_key, region, profile, **session_kwargs)
|
|
724
915
|
client = session.client("securityhub")
|
|
725
|
-
|
|
726
|
-
|
|
727
|
-
|
|
916
|
+
|
|
917
|
+
aws_findings = fetch_aws_findings(
|
|
918
|
+
aws_client=client, minimum_severity=minimum_severity, posture_management_only=posture_management_only
|
|
919
|
+
)
|
|
920
|
+
|
|
728
921
|
self.discovered_assets.clear()
|
|
729
922
|
self.processed_asset_identifiers.clear()
|
|
730
923
|
|
|
731
|
-
self.num_findings_to_process = len(aws_findings)
|
|
732
924
|
for finding in aws_findings:
|
|
733
925
|
yield from iter(self.parse_finding(finding))
|
|
734
926
|
|
|
735
|
-
# Log discovered assets count
|
|
736
927
|
if self.discovered_assets:
|
|
737
928
|
logger.info(f"Discovered {len(self.discovered_assets)} assets from Security Hub findings")
|
|
738
929
|
|
|
@@ -755,34 +946,109 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
755
946
|
:return: Tuple of (findings_processed, assets_processed)
|
|
756
947
|
:rtype: tuple[int, int]
|
|
757
948
|
"""
|
|
949
|
+
from regscale.core.app.utils.app_utils import create_progress_object
|
|
950
|
+
|
|
758
951
|
logger.info("Starting AWS Security Hub findings and assets sync...")
|
|
759
952
|
|
|
760
|
-
#
|
|
761
|
-
|
|
953
|
+
# Create progress bar context for the entire operation
|
|
954
|
+
with create_progress_object() as progress:
|
|
955
|
+
# Store progress object for use by nested methods
|
|
956
|
+
self.finding_progress = progress
|
|
762
957
|
|
|
763
|
-
|
|
764
|
-
|
|
765
|
-
self.processed_asset_identifiers.clear()
|
|
958
|
+
# First, fetch findings to discover assets (but don't sync findings yet)
|
|
959
|
+
logger.info("Discovering assets from AWS Security Hub findings...")
|
|
766
960
|
|
|
767
|
-
|
|
768
|
-
|
|
961
|
+
# Reset discovered assets for this run
|
|
962
|
+
self.discovered_assets.clear()
|
|
963
|
+
self.processed_asset_identifiers.clear()
|
|
769
964
|
|
|
770
|
-
|
|
771
|
-
|
|
772
|
-
logger.info(f"Creating {len(self.discovered_assets)} assets discovered from findings...")
|
|
773
|
-
self.num_assets_to_process = len(self.discovered_assets)
|
|
774
|
-
assets_processed = self.update_regscale_assets(self.get_discovered_assets())
|
|
775
|
-
logger.info(f"Successfully created {assets_processed} assets")
|
|
776
|
-
else:
|
|
777
|
-
logger.info("No assets discovered from findings")
|
|
778
|
-
assets_processed = 0
|
|
965
|
+
# Fetch findings to discover assets - store them to avoid re-fetching
|
|
966
|
+
findings_list = list(self.fetch_findings(**kwargs))
|
|
779
967
|
|
|
780
|
-
|
|
781
|
-
|
|
782
|
-
|
|
968
|
+
# Sync the discovered assets first
|
|
969
|
+
if self.discovered_assets:
|
|
970
|
+
logger.info(f"Creating {len(self.discovered_assets)} assets discovered from findings...")
|
|
971
|
+
self.num_assets_to_process = len(self.discovered_assets)
|
|
972
|
+
assets_processed = self.update_regscale_assets(self.get_discovered_assets())
|
|
973
|
+
logger.info(f"Successfully created {assets_processed} assets")
|
|
974
|
+
else:
|
|
975
|
+
logger.info("No assets discovered from findings")
|
|
976
|
+
assets_processed = 0
|
|
977
|
+
|
|
978
|
+
# Now process the findings we already fetched (avoid double-fetching)
|
|
979
|
+
logger.info("Now syncing findings with created assets...")
|
|
980
|
+
findings_processed = self.update_regscale_findings(findings_list)
|
|
981
|
+
|
|
982
|
+
# Log completion summary
|
|
983
|
+
logger.info(
|
|
984
|
+
f"AWS Security Hub sync completed successfully: {findings_processed} findings processed, {assets_processed} assets created"
|
|
985
|
+
)
|
|
783
986
|
|
|
784
987
|
return findings_processed, assets_processed
|
|
785
988
|
|
|
989
|
+
@classmethod
|
|
990
|
+
def sync_findings(cls, plan_id: int, **kwargs) -> int:
|
|
991
|
+
"""
|
|
992
|
+
Sync AWS Security Hub findings to RegScale.
|
|
993
|
+
|
|
994
|
+
:param int plan_id: The RegScale plan ID
|
|
995
|
+
:param kwargs: Additional keyword arguments including:
|
|
996
|
+
- region (str): AWS region
|
|
997
|
+
- profile (Optional[str]): AWS profile name
|
|
998
|
+
- aws_access_key_id (Optional[str]): AWS access key ID
|
|
999
|
+
- aws_secret_access_key (Optional[str]): AWS secret access key
|
|
1000
|
+
- aws_session_token (Optional[str]): AWS session token
|
|
1001
|
+
- account_id (Optional[str]): AWS account ID to filter by
|
|
1002
|
+
- tags (Optional[Dict[str, str]]): Tags to filter by
|
|
1003
|
+
- import_all_findings (bool): Import all findings even without matching assets
|
|
1004
|
+
:return: Number of findings processed
|
|
1005
|
+
:rtype: int
|
|
1006
|
+
"""
|
|
1007
|
+
# Extract parameters from kwargs
|
|
1008
|
+
region = kwargs.get("region", "us-east-1")
|
|
1009
|
+
profile = kwargs.get("profile")
|
|
1010
|
+
aws_access_key_id = kwargs.get("aws_access_key_id")
|
|
1011
|
+
aws_secret_access_key = kwargs.get("aws_secret_access_key")
|
|
1012
|
+
aws_session_token = kwargs.get("aws_session_token")
|
|
1013
|
+
account_id = kwargs.get("account_id")
|
|
1014
|
+
tags = kwargs.get("tags")
|
|
1015
|
+
import_all_findings = kwargs.get("import_all_findings", False)
|
|
1016
|
+
|
|
1017
|
+
instance = cls(plan_id=plan_id, import_all_findings=import_all_findings)
|
|
1018
|
+
instance.authenticate(
|
|
1019
|
+
aws_access_key_id=aws_access_key_id,
|
|
1020
|
+
aws_secret_access_key=aws_secret_access_key,
|
|
1021
|
+
region=region,
|
|
1022
|
+
aws_session_token=aws_session_token,
|
|
1023
|
+
profile=profile,
|
|
1024
|
+
account_id=account_id,
|
|
1025
|
+
tags=tags,
|
|
1026
|
+
)
|
|
1027
|
+
|
|
1028
|
+
# Load assets first
|
|
1029
|
+
logger.info("Loading asset map from RegScale...")
|
|
1030
|
+
instance.asset_map_by_identifier.update(instance.get_asset_map())
|
|
1031
|
+
|
|
1032
|
+
# Fetch and sync findings
|
|
1033
|
+
logger.info("Fetching and syncing AWS Security Hub findings...")
|
|
1034
|
+
findings = list(
|
|
1035
|
+
instance.fetch_findings(
|
|
1036
|
+
profile=profile,
|
|
1037
|
+
aws_access_key_id=aws_access_key_id,
|
|
1038
|
+
aws_secret_access_key=aws_secret_access_key,
|
|
1039
|
+
aws_session_token=aws_session_token,
|
|
1040
|
+
region=region,
|
|
1041
|
+
account_id=account_id,
|
|
1042
|
+
tags=tags,
|
|
1043
|
+
)
|
|
1044
|
+
)
|
|
1045
|
+
|
|
1046
|
+
# Use progress bar context for findings processing
|
|
1047
|
+
with instance.finding_progress:
|
|
1048
|
+
findings_processed = instance.update_regscale_findings(findings)
|
|
1049
|
+
|
|
1050
|
+
return findings_processed
|
|
1051
|
+
|
|
786
1052
|
def get_configured_issue_status(self) -> IssueStatus:
|
|
787
1053
|
"""
|
|
788
1054
|
Get the configured issue status from the configuration.
|
|
@@ -840,6 +1106,70 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
840
1106
|
|
|
841
1107
|
return should_process
|
|
842
1108
|
|
|
1109
|
+
def is_service_enabled_for_resource(self, resource_type: str) -> bool:
|
|
1110
|
+
"""
|
|
1111
|
+
Check if the AWS service for a given resource type is enabled in config.
|
|
1112
|
+
|
|
1113
|
+
:param str resource_type: AWS resource type (e.g., 'AwsEc2Instance', 'AwsS3Bucket')
|
|
1114
|
+
:return: True if the service is enabled or config not found, False otherwise
|
|
1115
|
+
:rtype: bool
|
|
1116
|
+
"""
|
|
1117
|
+
# Map resource types to service configuration keys
|
|
1118
|
+
resource_to_service_map = {
|
|
1119
|
+
"AwsEc2Instance": ("compute", "ec2"),
|
|
1120
|
+
"AwsEc2SecurityGroup": ("security", "securityhub"),
|
|
1121
|
+
"AwsEc2Subnet": ("networking", "vpc"),
|
|
1122
|
+
"AwsS3Bucket": ("storage", "s3"),
|
|
1123
|
+
"AwsRdsDbInstance": ("database", "rds"),
|
|
1124
|
+
"AwsLambdaFunction": ("compute", "lambda"),
|
|
1125
|
+
"AwsEcrRepository": ("containers", "ecr"),
|
|
1126
|
+
"AwsIamUser": ("security", "iam"),
|
|
1127
|
+
"AwsIamRole": ("security", "iam"),
|
|
1128
|
+
"AwsDynamoDbTable": ("database", "dynamodb"),
|
|
1129
|
+
"AwsKmsKey": ("security", "kms"),
|
|
1130
|
+
"AwsSecretsManagerSecret": ("security", "secrets_manager"),
|
|
1131
|
+
"AwsCloudTrailTrail": ("security", "cloudtrail"),
|
|
1132
|
+
"AwsConfigConfigurationRecorder": ("security", "config"),
|
|
1133
|
+
"AwsGuardDutyDetector": ("security", "guardduty"),
|
|
1134
|
+
"AwsInspector2": ("security", "inspector"),
|
|
1135
|
+
"AwsAuditManagerAssessment": ("security", "audit_manager"),
|
|
1136
|
+
}
|
|
1137
|
+
|
|
1138
|
+
try:
|
|
1139
|
+
# Get the service category and service name for this resource type
|
|
1140
|
+
service_info = resource_to_service_map.get(resource_type)
|
|
1141
|
+
if not service_info:
|
|
1142
|
+
# If resource type not in map, allow it by default (don't filter unknowns)
|
|
1143
|
+
logger.debug(f"Resource type '{resource_type}' not in service map, allowing by default")
|
|
1144
|
+
return True
|
|
1145
|
+
|
|
1146
|
+
category, service_name = service_info
|
|
1147
|
+
|
|
1148
|
+
# Check if the service is enabled in config
|
|
1149
|
+
enabled_services = self.app.config.get("aws", {}).get("inventory", {}).get("enabled_services", {})
|
|
1150
|
+
|
|
1151
|
+
# Check if category is enabled
|
|
1152
|
+
category_config = enabled_services.get(category, {})
|
|
1153
|
+
if not category_config.get("enabled", True):
|
|
1154
|
+
logger.debug(f"Service category '{category}' is disabled, filtering resource type '{resource_type}'")
|
|
1155
|
+
return False
|
|
1156
|
+
|
|
1157
|
+
# Check if specific service is enabled
|
|
1158
|
+
services = category_config.get("services", {})
|
|
1159
|
+
is_enabled = services.get(service_name, True)
|
|
1160
|
+
|
|
1161
|
+
if not is_enabled:
|
|
1162
|
+
logger.debug(
|
|
1163
|
+
f"Service '{service_name}' in category '{category}' is disabled, filtering resource type '{resource_type}'"
|
|
1164
|
+
)
|
|
1165
|
+
|
|
1166
|
+
return is_enabled
|
|
1167
|
+
|
|
1168
|
+
except (KeyError, AttributeError) as e:
|
|
1169
|
+
# If config not found or malformed, allow by default (don't filter)
|
|
1170
|
+
logger.debug(f"Could not check service enablement for '{resource_type}': {e}. Allowing by default.")
|
|
1171
|
+
return True
|
|
1172
|
+
|
|
843
1173
|
@staticmethod
|
|
844
1174
|
def get_baseline(resource: dict) -> str:
|
|
845
1175
|
"""
|
|
@@ -877,6 +1207,306 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
877
1207
|
except IndexError:
|
|
878
1208
|
return None
|
|
879
1209
|
|
|
1210
|
+
def _discover_asset_from_resource(self, resource: dict, finding: dict) -> None:
|
|
1211
|
+
"""
|
|
1212
|
+
Discover and track asset from finding resource.
|
|
1213
|
+
|
|
1214
|
+
:param dict resource: AWS Security Hub resource
|
|
1215
|
+
:param dict finding: AWS Security Hub finding
|
|
1216
|
+
"""
|
|
1217
|
+
asset = self.parse_resource_to_asset(resource, finding)
|
|
1218
|
+
if asset and asset.identifier not in self.processed_asset_identifiers:
|
|
1219
|
+
self.discovered_assets.append(asset)
|
|
1220
|
+
self.processed_asset_identifiers.add(asset.identifier)
|
|
1221
|
+
logger.debug(f"Discovered asset from finding: {asset.name} ({asset.identifier})")
|
|
1222
|
+
|
|
1223
|
+
def _get_friendly_severity(self, severity: str) -> str:
|
|
1224
|
+
"""
|
|
1225
|
+
Convert severity level to friendly name.
|
|
1226
|
+
|
|
1227
|
+
:param str severity: Raw severity level
|
|
1228
|
+
:return: Friendly severity name (low, moderate, high)
|
|
1229
|
+
:rtype: str
|
|
1230
|
+
"""
|
|
1231
|
+
if severity in ["CRITICAL", "HIGH"]:
|
|
1232
|
+
return "high"
|
|
1233
|
+
elif severity in ["MEDIUM", "MODERATE"]:
|
|
1234
|
+
return "moderate"
|
|
1235
|
+
return "low"
|
|
1236
|
+
|
|
1237
|
+
def _get_due_date_for_finding(self, finding: dict, friendly_sev: str) -> str:
|
|
1238
|
+
"""
|
|
1239
|
+
Calculate due date for finding based on severity.
|
|
1240
|
+
|
|
1241
|
+
:param dict finding: AWS Security Hub finding
|
|
1242
|
+
:param str friendly_sev: Friendly severity name
|
|
1243
|
+
:return: Due date string
|
|
1244
|
+
:rtype: str
|
|
1245
|
+
"""
|
|
1246
|
+
try:
|
|
1247
|
+
days = self.app.config["issues"]["amazon"][friendly_sev]
|
|
1248
|
+
except KeyError:
|
|
1249
|
+
logger.warning("Invalid severity level, defaulting to 30 day due date")
|
|
1250
|
+
days = 30
|
|
1251
|
+
return datetime_str(get_due_date(date_str(finding["CreatedAt"]), days))
|
|
1252
|
+
|
|
1253
|
+
def _construct_plugin_id(self, finding: dict, resource: dict = None) -> tuple[str, str]:
|
|
1254
|
+
"""
|
|
1255
|
+
Construct plugin name and ID from finding.
|
|
1256
|
+
|
|
1257
|
+
:param dict finding: AWS Security Hub finding
|
|
1258
|
+
:param dict resource: Optional resource dict for per-resource plugin ID
|
|
1259
|
+
:return: Tuple of (plugin_name, plugin_id)
|
|
1260
|
+
:rtype: tuple[str, str]
|
|
1261
|
+
"""
|
|
1262
|
+
plugin_name = next(iter(finding.get("Types", [])), "Unknown")
|
|
1263
|
+
finding_id = finding.get("Id", "")
|
|
1264
|
+
|
|
1265
|
+
# Extract UUID from ARN or full ID
|
|
1266
|
+
if "/" in finding_id:
|
|
1267
|
+
finding_uuid = finding_id.split("/")[-1]
|
|
1268
|
+
else:
|
|
1269
|
+
finding_uuid = finding_id.split(":")[-1]
|
|
1270
|
+
|
|
1271
|
+
# Sanitize plugin name for ID
|
|
1272
|
+
sanitized_name = plugin_name.replace(" ", "_").replace("/", "_").replace(":", "_")
|
|
1273
|
+
|
|
1274
|
+
# If we have multiple resources for this finding, include resource identifier
|
|
1275
|
+
# This ensures proper deduplication when a finding affects multiple resources
|
|
1276
|
+
if resource and len(finding.get("Resources", [])) > 1:
|
|
1277
|
+
resource_id = resource.get("Id", "")
|
|
1278
|
+
# Extract just the resource identifier part from ARN
|
|
1279
|
+
if "/" in resource_id:
|
|
1280
|
+
resource_suffix = resource_id.split("/")[-1]
|
|
1281
|
+
elif ":" in resource_id:
|
|
1282
|
+
resource_suffix = resource_id.split(":")[-1]
|
|
1283
|
+
else:
|
|
1284
|
+
resource_suffix = resource_id
|
|
1285
|
+
|
|
1286
|
+
# Sanitize and append resource suffix
|
|
1287
|
+
resource_suffix = resource_suffix.replace(" ", "_").replace("/", "_").replace(":", "_")
|
|
1288
|
+
plugin_id = f"{sanitized_name}_{finding_uuid}_{resource_suffix}"
|
|
1289
|
+
else:
|
|
1290
|
+
plugin_id = f"{sanitized_name}_{finding_uuid}"
|
|
1291
|
+
|
|
1292
|
+
return plugin_name, plugin_id
|
|
1293
|
+
|
|
1294
|
+
def _extract_cvss_scores(self, cvss_list: list) -> list:
|
|
1295
|
+
"""
|
|
1296
|
+
Extract CVSS scores from vulnerability data.
|
|
1297
|
+
|
|
1298
|
+
:param list cvss_list: List of CVSS data
|
|
1299
|
+
:return: List of formatted CVSS score strings
|
|
1300
|
+
:rtype: list
|
|
1301
|
+
"""
|
|
1302
|
+
cvss_scores = []
|
|
1303
|
+
for cvss in cvss_list:
|
|
1304
|
+
cvss_version = cvss.get("Version", "")
|
|
1305
|
+
cvss_score = cvss.get("BaseScore", 0)
|
|
1306
|
+
cvss_vector = cvss.get("BaseVector", "")
|
|
1307
|
+
if cvss_score:
|
|
1308
|
+
score_str = f"CVSS{cvss_version}: {cvss_score}"
|
|
1309
|
+
if cvss_vector:
|
|
1310
|
+
score_str += f" ({cvss_vector})"
|
|
1311
|
+
cvss_scores.append(score_str)
|
|
1312
|
+
return cvss_scores
|
|
1313
|
+
|
|
1314
|
+
def _extract_vendor_info(self, vendor: dict) -> str:
|
|
1315
|
+
"""
|
|
1316
|
+
Extract vendor information from vulnerability data.
|
|
1317
|
+
|
|
1318
|
+
:param dict vendor: Vendor data
|
|
1319
|
+
:return: Formatted vendor info string
|
|
1320
|
+
:rtype: str
|
|
1321
|
+
"""
|
|
1322
|
+
vendor_name = vendor.get("Name", "")
|
|
1323
|
+
vendor_url = vendor.get("Url", "")
|
|
1324
|
+
if not vendor_name:
|
|
1325
|
+
return ""
|
|
1326
|
+
return f"{vendor_name}: {vendor_url}" if vendor_url else vendor_name
|
|
1327
|
+
|
|
1328
|
+
def _build_package_version_string(self, pkg: dict) -> str:
|
|
1329
|
+
"""
|
|
1330
|
+
Build version string from package data.
|
|
1331
|
+
|
|
1332
|
+
:param dict pkg: Package data
|
|
1333
|
+
:return: Formatted version string
|
|
1334
|
+
:rtype: str
|
|
1335
|
+
"""
|
|
1336
|
+
pkg_version = pkg.get("Version", "")
|
|
1337
|
+
if not pkg_version:
|
|
1338
|
+
return ""
|
|
1339
|
+
|
|
1340
|
+
version_str = pkg_version
|
|
1341
|
+
if pkg_epoch := pkg.get("Epoch", ""):
|
|
1342
|
+
version_str = f"{pkg_epoch}:{version_str}"
|
|
1343
|
+
if pkg_release := pkg.get("Release", ""):
|
|
1344
|
+
version_str = f"{version_str}-{pkg_release}"
|
|
1345
|
+
if pkg_arch := pkg.get("Architecture", ""):
|
|
1346
|
+
version_str = f"{version_str}.{pkg_arch}"
|
|
1347
|
+
return version_str
|
|
1348
|
+
|
|
1349
|
+
def _extract_package_details(self, pkg: dict) -> str:
|
|
1350
|
+
"""
|
|
1351
|
+
Extract package details from vulnerable package data.
|
|
1352
|
+
|
|
1353
|
+
:param dict pkg: Package data
|
|
1354
|
+
:return: Formatted package details string
|
|
1355
|
+
:rtype: str
|
|
1356
|
+
"""
|
|
1357
|
+
pkg_details = []
|
|
1358
|
+
|
|
1359
|
+
if pkg_name := pkg.get("Name", ""):
|
|
1360
|
+
pkg_details.append(f"Package: {pkg_name}")
|
|
1361
|
+
|
|
1362
|
+
if version_str := self._build_package_version_string(pkg):
|
|
1363
|
+
pkg_details.append(f"Installed Version: {version_str}")
|
|
1364
|
+
|
|
1365
|
+
if fixed_version := pkg.get("FixedInVersion", ""):
|
|
1366
|
+
pkg_details.append(f"Fixed In: {fixed_version}")
|
|
1367
|
+
|
|
1368
|
+
return " | ".join(pkg_details) if pkg_details else ""
|
|
1369
|
+
|
|
1370
|
+
def _process_vulnerability(self, vuln: dict, cve_data: dict) -> None:
|
|
1371
|
+
"""
|
|
1372
|
+
Process a single vulnerability and update CVE data dictionary.
|
|
1373
|
+
|
|
1374
|
+
:param dict vuln: Vulnerability data
|
|
1375
|
+
:param dict cve_data: CVE data dictionary to update
|
|
1376
|
+
"""
|
|
1377
|
+
if cve_id := vuln.get("Id", ""):
|
|
1378
|
+
cve_data["cve_ids"].append(cve_id)
|
|
1379
|
+
|
|
1380
|
+
if cvss_list := vuln.get("Cvss", []):
|
|
1381
|
+
cve_data["cvss_scores"].extend(self._extract_cvss_scores(cvss_list))
|
|
1382
|
+
|
|
1383
|
+
if vendor := vuln.get("Vendor", {}):
|
|
1384
|
+
if vendor_info := self._extract_vendor_info(vendor):
|
|
1385
|
+
cve_data["vendor_info"].append(vendor_info)
|
|
1386
|
+
|
|
1387
|
+
if ref_urls := vuln.get("ReferenceUrls", []):
|
|
1388
|
+
cve_data["reference_urls"].extend(ref_urls)
|
|
1389
|
+
|
|
1390
|
+
for pkg in vuln.get("VulnerablePackages", []):
|
|
1391
|
+
if pkg_details := self._extract_package_details(pkg):
|
|
1392
|
+
cve_data["vulnerability_details"].append(pkg_details)
|
|
1393
|
+
|
|
1394
|
+
def _extract_cve_data(self, finding: dict) -> dict:
|
|
1395
|
+
"""
|
|
1396
|
+
Extract CVE and vulnerability data from AWS Security Hub finding.
|
|
1397
|
+
|
|
1398
|
+
:param dict finding: AWS Security Hub finding
|
|
1399
|
+
:return: Dictionary with CVE data
|
|
1400
|
+
:rtype: dict
|
|
1401
|
+
"""
|
|
1402
|
+
cve_data: dict = {
|
|
1403
|
+
"cve_ids": [],
|
|
1404
|
+
"cvss_scores": [],
|
|
1405
|
+
"vulnerability_details": [],
|
|
1406
|
+
"vendor_info": [],
|
|
1407
|
+
"reference_urls": [],
|
|
1408
|
+
}
|
|
1409
|
+
|
|
1410
|
+
vulnerabilities = finding.get("Vulnerabilities", [])
|
|
1411
|
+
if not vulnerabilities:
|
|
1412
|
+
return cve_data
|
|
1413
|
+
|
|
1414
|
+
for vuln in vulnerabilities:
|
|
1415
|
+
self._process_vulnerability(vuln, cve_data)
|
|
1416
|
+
|
|
1417
|
+
return cve_data
|
|
1418
|
+
|
|
1419
|
+
def _create_integration_finding(
|
|
1420
|
+
self,
|
|
1421
|
+
resource: dict,
|
|
1422
|
+
finding: dict,
|
|
1423
|
+
severity: str,
|
|
1424
|
+
comments: str,
|
|
1425
|
+
status: str,
|
|
1426
|
+
results: str,
|
|
1427
|
+
due_date: str,
|
|
1428
|
+
plugin_name: str,
|
|
1429
|
+
plugin_id: str,
|
|
1430
|
+
) -> IntegrationFinding:
|
|
1431
|
+
"""
|
|
1432
|
+
Create IntegrationFinding from processed finding data.
|
|
1433
|
+
|
|
1434
|
+
:param dict resource: AWS resource from finding
|
|
1435
|
+
:param dict finding: AWS Security Hub finding
|
|
1436
|
+
:param str severity: Severity level
|
|
1437
|
+
:param str comments: Finding comments
|
|
1438
|
+
:param str status: Compliance status
|
|
1439
|
+
:param str results: Test results
|
|
1440
|
+
:param str due_date: Due date string
|
|
1441
|
+
:param str plugin_name: Plugin name
|
|
1442
|
+
:param str plugin_id: Plugin ID
|
|
1443
|
+
:return: Integration finding
|
|
1444
|
+
:rtype: IntegrationFinding
|
|
1445
|
+
"""
|
|
1446
|
+
# Extract CVE data from finding
|
|
1447
|
+
cve_data = self._extract_cve_data(finding)
|
|
1448
|
+
|
|
1449
|
+
# Build enhanced comments with CVE information
|
|
1450
|
+
enhanced_comments = comments
|
|
1451
|
+
if cve_data["cve_ids"]:
|
|
1452
|
+
enhanced_comments += f"\n\nCVE IDs: {', '.join(cve_data['cve_ids'])}"
|
|
1453
|
+
if cve_data["cvss_scores"]:
|
|
1454
|
+
enhanced_comments += f"\nCVSS Scores: {'; '.join(cve_data['cvss_scores'])}"
|
|
1455
|
+
if cve_data["vulnerability_details"]:
|
|
1456
|
+
enhanced_comments += "\n\nVulnerable Packages:\n" + "\n".join(
|
|
1457
|
+
f"- {detail}" for detail in cve_data["vulnerability_details"]
|
|
1458
|
+
)
|
|
1459
|
+
if cve_data["vendor_info"]:
|
|
1460
|
+
enhanced_comments += f"\n\nVendor Info: {'; '.join(cve_data['vendor_info'])}"
|
|
1461
|
+
if cve_data["reference_urls"]:
|
|
1462
|
+
enhanced_comments += "\n\nReferences:\n" + "\n".join(
|
|
1463
|
+
f"- {url}" for url in cve_data["reference_urls"][:5] # Limit to first 5 URLs
|
|
1464
|
+
)
|
|
1465
|
+
|
|
1466
|
+
# Build observations with CVE details
|
|
1467
|
+
observations = enhanced_comments
|
|
1468
|
+
|
|
1469
|
+
# Build gaps field with vulnerability details
|
|
1470
|
+
gaps = ""
|
|
1471
|
+
if cve_data["vulnerability_details"]:
|
|
1472
|
+
gaps = "Vulnerable packages identified:\n" + "\n".join(cve_data["vulnerability_details"])
|
|
1473
|
+
|
|
1474
|
+
# Build evidence field with reference URLs
|
|
1475
|
+
evidence = ""
|
|
1476
|
+
if cve_data["reference_urls"]:
|
|
1477
|
+
evidence = "Reference URLs:\n" + "\n".join(cve_data["reference_urls"])
|
|
1478
|
+
|
|
1479
|
+
# Determine vulnerability number (primary CVE ID)
|
|
1480
|
+
vulnerability_number = cve_data["cve_ids"][0] if cve_data["cve_ids"] else ""
|
|
1481
|
+
|
|
1482
|
+
return IntegrationFinding(
|
|
1483
|
+
asset_identifier=resource["Id"],
|
|
1484
|
+
external_id=finding.get("Id", ""),
|
|
1485
|
+
control_labels=[],
|
|
1486
|
+
title=finding["Title"],
|
|
1487
|
+
category="SecurityHub",
|
|
1488
|
+
issue_title=finding["Title"],
|
|
1489
|
+
severity=self.finding_severity_map.get(severity),
|
|
1490
|
+
description=finding["Description"],
|
|
1491
|
+
status=self.get_configured_issue_status(),
|
|
1492
|
+
checklist_status=self.get_checklist_status(status),
|
|
1493
|
+
vulnerability_number=vulnerability_number,
|
|
1494
|
+
results=results,
|
|
1495
|
+
recommendation_for_mitigation=finding.get("Remediation", {}).get("Recommendation", {}).get("Text", ""),
|
|
1496
|
+
comments=enhanced_comments,
|
|
1497
|
+
poam_comments=enhanced_comments,
|
|
1498
|
+
date_created=date_str(finding["CreatedAt"]),
|
|
1499
|
+
due_date=due_date,
|
|
1500
|
+
plugin_name=plugin_name,
|
|
1501
|
+
plugin_id=plugin_id,
|
|
1502
|
+
baseline=self.get_baseline(resource),
|
|
1503
|
+
observations=observations,
|
|
1504
|
+
gaps=gaps,
|
|
1505
|
+
evidence=evidence,
|
|
1506
|
+
impact="",
|
|
1507
|
+
vulnerability_type="Vulnerability Scan",
|
|
1508
|
+
)
|
|
1509
|
+
|
|
880
1510
|
def parse_finding(self, finding: dict) -> list[IntegrationFinding]:
|
|
881
1511
|
"""
|
|
882
1512
|
Parse AWS Security Hub to RegScale IntegrationFinding format.
|
|
@@ -889,78 +1519,96 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
889
1519
|
findings = []
|
|
890
1520
|
try:
|
|
891
1521
|
for resource in finding["Resources"]:
|
|
892
|
-
#
|
|
893
|
-
|
|
894
|
-
if
|
|
895
|
-
|
|
896
|
-
|
|
897
|
-
|
|
898
|
-
|
|
899
|
-
|
|
1522
|
+
# Check if the service for this resource type is enabled
|
|
1523
|
+
resource_type = resource.get("Type", "")
|
|
1524
|
+
if not self.is_service_enabled_for_resource(resource_type):
|
|
1525
|
+
logger.debug(f"Skipping finding for disabled service resource type '{resource_type}'")
|
|
1526
|
+
continue
|
|
1527
|
+
|
|
1528
|
+
# Discover asset from resource
|
|
1529
|
+
self._discover_asset_from_resource(resource, finding)
|
|
1530
|
+
|
|
1531
|
+
# Determine status and severity
|
|
900
1532
|
status, results = determine_status_and_results(finding)
|
|
901
1533
|
comments = get_comments(finding)
|
|
902
1534
|
severity = check_finding_severity(comments)
|
|
903
|
-
friendly_sev =
|
|
904
|
-
if severity in ["CRITICAL", "HIGH"]:
|
|
905
|
-
friendly_sev = "high"
|
|
906
|
-
elif severity in ["MEDIUM", "MODERATE"]:
|
|
907
|
-
friendly_sev = "moderate"
|
|
1535
|
+
friendly_sev = self._get_friendly_severity(severity)
|
|
908
1536
|
|
|
909
|
-
# Filter
|
|
1537
|
+
# Filter by minimum severity
|
|
910
1538
|
if not self.should_process_finding_by_severity(severity):
|
|
911
1539
|
logger.debug(f"Skipping finding with severity '{severity}' - below minimum threshold")
|
|
912
1540
|
continue
|
|
913
|
-
|
|
914
|
-
|
|
915
|
-
|
|
916
|
-
|
|
917
|
-
|
|
918
|
-
|
|
919
|
-
|
|
920
|
-
|
|
921
|
-
# Create a unique plugin_id using the finding ID to ensure each finding creates a separate issue
|
|
922
|
-
finding_id = finding.get("Id", "")
|
|
923
|
-
# Extract just the finding UUID from the full ARN for a cleaner ID
|
|
924
|
-
finding_uuid = finding_id.split("/")[-1] if "/" in finding_id else finding_id.split(":")[-1]
|
|
925
|
-
plugin_id = f"{plugin_name.replace(' ', '_').replace('/', '_').replace(':', '_')}_{finding_uuid}"
|
|
926
|
-
|
|
927
|
-
findings.append(
|
|
928
|
-
IntegrationFinding(
|
|
929
|
-
asset_identifier=self.extract_name_from_arn(resource["Id"]),
|
|
930
|
-
external_id=finding_id, # Use the full finding ID as external_id for uniqueness
|
|
931
|
-
control_labels=[], # Determine how to populate this
|
|
932
|
-
title=finding["Title"],
|
|
933
|
-
category="SecurityHub",
|
|
934
|
-
issue_title=finding["Title"],
|
|
935
|
-
severity=self.finding_severity_map.get(severity),
|
|
936
|
-
description=finding["Description"],
|
|
937
|
-
status=self.get_configured_issue_status(),
|
|
938
|
-
checklist_status=self.get_checklist_status(status),
|
|
939
|
-
vulnerability_number="",
|
|
940
|
-
results=results,
|
|
941
|
-
recommendation_for_mitigation=finding.get("Remediation", {})
|
|
942
|
-
.get("Recommendation", {})
|
|
943
|
-
.get("Text", ""),
|
|
944
|
-
comments=comments,
|
|
945
|
-
poam_comments=comments,
|
|
946
|
-
date_created=date_str(finding["CreatedAt"]),
|
|
947
|
-
due_date=due_date,
|
|
948
|
-
plugin_name=plugin_name,
|
|
949
|
-
plugin_id=plugin_id, # Add the sanitized plugin_id
|
|
950
|
-
baseline=self.get_baseline(resource),
|
|
951
|
-
observations=comments,
|
|
952
|
-
gaps="",
|
|
953
|
-
evidence="",
|
|
954
|
-
impact="",
|
|
955
|
-
vulnerability_type="Vulnerability Scan",
|
|
956
|
-
)
|
|
1541
|
+
|
|
1542
|
+
# Calculate due date and construct IDs
|
|
1543
|
+
due_date = self._get_due_date_for_finding(finding, friendly_sev)
|
|
1544
|
+
plugin_name, plugin_id = self._construct_plugin_id(finding, resource)
|
|
1545
|
+
|
|
1546
|
+
# Create finding object
|
|
1547
|
+
integration_finding = self._create_integration_finding(
|
|
1548
|
+
resource, finding, severity, comments, status, results, due_date, plugin_name, plugin_id
|
|
957
1549
|
)
|
|
1550
|
+
findings.append(integration_finding)
|
|
958
1551
|
|
|
959
1552
|
except Exception as e:
|
|
960
1553
|
logger.error(f"Error parsing AWS Security Hub finding: {str(e)}", exc_info=True)
|
|
961
1554
|
|
|
962
1555
|
return findings
|
|
963
1556
|
|
|
1557
|
+
def process_findings_with_evidence(
|
|
1558
|
+
self,
|
|
1559
|
+
findings: List[dict],
|
|
1560
|
+
service_name: str,
|
|
1561
|
+
generate_evidence: bool = False,
|
|
1562
|
+
ssp_id: Optional[int] = None,
|
|
1563
|
+
control_ids: Optional[List[int]] = None,
|
|
1564
|
+
ocsf_format: bool = False,
|
|
1565
|
+
) -> tuple[List[IntegrationFinding], Optional[Any]]:
|
|
1566
|
+
"""
|
|
1567
|
+
Process findings and optionally generate evidence
|
|
1568
|
+
|
|
1569
|
+
:param List[dict] findings: Raw AWS findings
|
|
1570
|
+
:param str service_name: AWS service name
|
|
1571
|
+
:param bool generate_evidence: Whether to generate evidence record
|
|
1572
|
+
:param Optional[int] ssp_id: SSP ID to link evidence
|
|
1573
|
+
:param Optional[List[int]] control_ids: Control IDs to link
|
|
1574
|
+
:param bool ocsf_format: Whether to generate OCSF format
|
|
1575
|
+
:return: Tuple of (parsed findings, evidence record)
|
|
1576
|
+
:rtype: tuple[List[IntegrationFinding], Optional[Any]]
|
|
1577
|
+
"""
|
|
1578
|
+
from regscale.integrations.commercial.aws.evidence_generator import AWSEvidenceGenerator
|
|
1579
|
+
from regscale.integrations.commercial.aws.ocsf.mapper import AWSOCSFMapper
|
|
1580
|
+
|
|
1581
|
+
# Parse findings to IntegrationFinding objects
|
|
1582
|
+
integration_findings = []
|
|
1583
|
+
for finding in findings:
|
|
1584
|
+
integration_findings.extend(self.parse_finding(finding))
|
|
1585
|
+
|
|
1586
|
+
# Generate OCSF data if requested
|
|
1587
|
+
ocsf_data = None
|
|
1588
|
+
if ocsf_format:
|
|
1589
|
+
mapper = AWSOCSFMapper()
|
|
1590
|
+
if service_name == "SecurityHub":
|
|
1591
|
+
ocsf_data = [mapper.securityhub_to_ocsf(f) for f in findings]
|
|
1592
|
+
elif service_name == "GuardDuty":
|
|
1593
|
+
ocsf_data = [mapper.guardduty_to_ocsf(f) for f in findings]
|
|
1594
|
+
elif service_name == "CloudTrail":
|
|
1595
|
+
ocsf_data = [mapper.cloudtrail_event_to_ocsf(f) for f in findings]
|
|
1596
|
+
|
|
1597
|
+
# Generate evidence if requested
|
|
1598
|
+
evidence_record = None
|
|
1599
|
+
if generate_evidence:
|
|
1600
|
+
from regscale.core.app.api import Api
|
|
1601
|
+
|
|
1602
|
+
evidence_gen = AWSEvidenceGenerator(api=Api(), ssp_id=ssp_id)
|
|
1603
|
+
evidence_record = evidence_gen.create_evidence_from_scan(
|
|
1604
|
+
service_name=service_name,
|
|
1605
|
+
findings=findings,
|
|
1606
|
+
ocsf_data=ocsf_data,
|
|
1607
|
+
control_ids=control_ids,
|
|
1608
|
+
)
|
|
1609
|
+
|
|
1610
|
+
return integration_findings, evidence_record
|
|
1611
|
+
|
|
964
1612
|
def parse_resource_to_asset(self, resource: dict, finding: dict) -> Optional[IntegrationAsset]:
|
|
965
1613
|
"""
|
|
966
1614
|
Parse AWS Security Hub resource to RegScale IntegrationAsset format.
|
|
@@ -1031,7 +1679,7 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
1031
1679
|
|
|
1032
1680
|
return IntegrationAsset(
|
|
1033
1681
|
name=name,
|
|
1034
|
-
identifier=
|
|
1682
|
+
identifier=resource_id,
|
|
1035
1683
|
asset_type=regscale_models.AssetType.Firewall, # Security groups act like firewalls
|
|
1036
1684
|
asset_category=regscale_models.AssetCategory.Software,
|
|
1037
1685
|
component_type=regscale_models.ComponentType.Software,
|
|
@@ -1043,7 +1691,7 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
1043
1691
|
location=region,
|
|
1044
1692
|
notes=notes,
|
|
1045
1693
|
manufacturer="AWS",
|
|
1046
|
-
aws_identifier=
|
|
1694
|
+
aws_identifier=resource_id, # Use full ARN for asset matching
|
|
1047
1695
|
vlan_id=details.get("VpcId"),
|
|
1048
1696
|
uri=uri,
|
|
1049
1697
|
source_data=resource,
|
|
@@ -1084,7 +1732,7 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
1084
1732
|
|
|
1085
1733
|
return IntegrationAsset(
|
|
1086
1734
|
name=name,
|
|
1087
|
-
identifier=
|
|
1735
|
+
identifier=resource_id,
|
|
1088
1736
|
asset_type=regscale_models.AssetType.NetworkRouter, # Subnets are network infrastructure
|
|
1089
1737
|
asset_category=regscale_models.AssetCategory.Hardware,
|
|
1090
1738
|
component_type=regscale_models.ComponentType.Hardware,
|
|
@@ -1096,7 +1744,7 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
1096
1744
|
location=region,
|
|
1097
1745
|
notes=notes,
|
|
1098
1746
|
manufacturer="AWS",
|
|
1099
|
-
aws_identifier=
|
|
1747
|
+
aws_identifier=resource_id, # Use full ARN for asset matching
|
|
1100
1748
|
vlan_id=details.get("VpcId"),
|
|
1101
1749
|
uri=uri,
|
|
1102
1750
|
source_data=resource,
|
|
@@ -1120,7 +1768,7 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
1120
1768
|
|
|
1121
1769
|
return IntegrationAsset(
|
|
1122
1770
|
name=name,
|
|
1123
|
-
identifier=
|
|
1771
|
+
identifier=resource_id,
|
|
1124
1772
|
asset_type=regscale_models.AssetType.Other, # IAM users don't fit standard asset types
|
|
1125
1773
|
asset_category=regscale_models.AssetCategory.Software,
|
|
1126
1774
|
component_type=regscale_models.ComponentType.Software,
|
|
@@ -1132,7 +1780,7 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
1132
1780
|
location=region,
|
|
1133
1781
|
notes="AWS IAM User Account",
|
|
1134
1782
|
manufacturer="AWS",
|
|
1135
|
-
aws_identifier=
|
|
1783
|
+
aws_identifier=resource_id, # Use full ARN for asset matching
|
|
1136
1784
|
uri=uri,
|
|
1137
1785
|
source_data=resource,
|
|
1138
1786
|
is_virtual=True,
|
|
@@ -1161,7 +1809,7 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
1161
1809
|
|
|
1162
1810
|
return IntegrationAsset(
|
|
1163
1811
|
name=name,
|
|
1164
|
-
identifier=
|
|
1812
|
+
identifier=resource_id,
|
|
1165
1813
|
asset_type=regscale_models.AssetType.VM,
|
|
1166
1814
|
asset_category=regscale_models.AssetCategory.Hardware,
|
|
1167
1815
|
component_type=regscale_models.ComponentType.Hardware,
|
|
@@ -1174,7 +1822,7 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
1174
1822
|
notes=f"AWS EC2 Instance - {instance_type}",
|
|
1175
1823
|
model=instance_type,
|
|
1176
1824
|
manufacturer="AWS",
|
|
1177
|
-
aws_identifier=
|
|
1825
|
+
aws_identifier=resource_id, # Use full ARN for asset matching
|
|
1178
1826
|
vlan_id=details.get("SubnetId"),
|
|
1179
1827
|
uri=uri,
|
|
1180
1828
|
source_data=resource,
|
|
@@ -1197,7 +1845,7 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
1197
1845
|
|
|
1198
1846
|
return IntegrationAsset(
|
|
1199
1847
|
name=name,
|
|
1200
|
-
identifier=
|
|
1848
|
+
identifier=resource_id,
|
|
1201
1849
|
asset_type=regscale_models.AssetType.Other, # S3 buckets are storage, closest to Other
|
|
1202
1850
|
asset_category=regscale_models.AssetCategory.Software,
|
|
1203
1851
|
component_type=regscale_models.ComponentType.Software,
|
|
@@ -1209,7 +1857,7 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
1209
1857
|
location=region,
|
|
1210
1858
|
notes="AWS S3 Storage Bucket",
|
|
1211
1859
|
manufacturer="AWS",
|
|
1212
|
-
aws_identifier=
|
|
1860
|
+
aws_identifier=resource_id, # Use full ARN for asset matching
|
|
1213
1861
|
uri=uri,
|
|
1214
1862
|
source_data=resource,
|
|
1215
1863
|
is_virtual=True,
|
|
@@ -1236,7 +1884,7 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
1236
1884
|
|
|
1237
1885
|
return IntegrationAsset(
|
|
1238
1886
|
name=name,
|
|
1239
|
-
identifier=
|
|
1887
|
+
identifier=resource_id,
|
|
1240
1888
|
asset_type=regscale_models.AssetType.VM, # RDS instances are virtual database servers
|
|
1241
1889
|
asset_category=regscale_models.AssetCategory.Software,
|
|
1242
1890
|
component_type=regscale_models.ComponentType.Software,
|
|
@@ -1250,7 +1898,7 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
1250
1898
|
model=db_class,
|
|
1251
1899
|
software_name=engine,
|
|
1252
1900
|
manufacturer="AWS",
|
|
1253
|
-
aws_identifier=
|
|
1901
|
+
aws_identifier=resource_id, # Use full ARN for asset matching
|
|
1254
1902
|
uri=uri,
|
|
1255
1903
|
source_data=resource,
|
|
1256
1904
|
is_virtual=True,
|
|
@@ -1276,7 +1924,7 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
1276
1924
|
|
|
1277
1925
|
return IntegrationAsset(
|
|
1278
1926
|
name=name,
|
|
1279
|
-
identifier=
|
|
1927
|
+
identifier=resource_id,
|
|
1280
1928
|
asset_type=regscale_models.AssetType.Other, # Lambda functions are serverless, closest to Other
|
|
1281
1929
|
asset_category=regscale_models.AssetCategory.Software,
|
|
1282
1930
|
component_type=regscale_models.ComponentType.Software,
|
|
@@ -1289,7 +1937,7 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
1289
1937
|
notes=f"AWS Lambda Function - {runtime}",
|
|
1290
1938
|
software_name=runtime,
|
|
1291
1939
|
manufacturer="AWS",
|
|
1292
|
-
aws_identifier=
|
|
1940
|
+
aws_identifier=resource_id, # Use full ARN for asset matching
|
|
1293
1941
|
uri=uri,
|
|
1294
1942
|
source_data=resource,
|
|
1295
1943
|
is_virtual=True,
|
|
@@ -1311,7 +1959,7 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
1311
1959
|
|
|
1312
1960
|
return IntegrationAsset(
|
|
1313
1961
|
name=name,
|
|
1314
|
-
identifier=
|
|
1962
|
+
identifier=resource_id,
|
|
1315
1963
|
asset_type=regscale_models.AssetType.Other, # ECR repositories are container registries
|
|
1316
1964
|
asset_category=regscale_models.AssetCategory.Software,
|
|
1317
1965
|
component_type=regscale_models.ComponentType.Software,
|
|
@@ -1323,7 +1971,7 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
1323
1971
|
location=region,
|
|
1324
1972
|
notes="AWS ECR Container Repository",
|
|
1325
1973
|
manufacturer="AWS",
|
|
1326
|
-
aws_identifier=
|
|
1974
|
+
aws_identifier=resource_id, # Use full ARN for asset matching
|
|
1327
1975
|
uri=uri,
|
|
1328
1976
|
source_data=resource,
|
|
1329
1977
|
is_virtual=True,
|
|
@@ -1342,7 +1990,7 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
1342
1990
|
|
|
1343
1991
|
return IntegrationAsset(
|
|
1344
1992
|
name=name,
|
|
1345
|
-
identifier=
|
|
1993
|
+
identifier=resource_id,
|
|
1346
1994
|
asset_type=regscale_models.AssetType.Other,
|
|
1347
1995
|
asset_category=regscale_models.AssetCategory.Software,
|
|
1348
1996
|
component_type=regscale_models.ComponentType.Software,
|
|
@@ -1354,7 +2002,7 @@ Description: {description if isinstance(description, str) else ''}"""
|
|
|
1354
2002
|
location=region,
|
|
1355
2003
|
notes=f"AWS {resource_type}",
|
|
1356
2004
|
manufacturer="AWS",
|
|
1357
|
-
aws_identifier=
|
|
2005
|
+
aws_identifier=resource_id, # Use full ARN for asset matching
|
|
1358
2006
|
source_data=resource,
|
|
1359
2007
|
is_virtual=True,
|
|
1360
2008
|
)
|