regscale-cli 6.27.3.0__py3-none-any.whl → 6.28.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of regscale-cli might be problematic. Click here for more details.
- regscale/_version.py +1 -1
- regscale/core/app/utils/app_utils.py +11 -2
- regscale/dev/cli.py +26 -0
- regscale/dev/version.py +72 -0
- regscale/integrations/commercial/__init__.py +15 -1
- regscale/integrations/commercial/amazon/amazon/__init__.py +0 -0
- regscale/integrations/commercial/amazon/amazon/common.py +204 -0
- regscale/integrations/commercial/amazon/common.py +48 -58
- regscale/integrations/commercial/aws/audit_manager_compliance.py +2671 -0
- regscale/integrations/commercial/aws/cli.py +3093 -55
- regscale/integrations/commercial/aws/cloudtrail_control_mappings.py +333 -0
- regscale/integrations/commercial/aws/cloudtrail_evidence.py +501 -0
- regscale/integrations/commercial/aws/cloudwatch_control_mappings.py +357 -0
- regscale/integrations/commercial/aws/cloudwatch_evidence.py +490 -0
- regscale/integrations/commercial/aws/config_compliance.py +914 -0
- regscale/integrations/commercial/aws/conformance_pack_mappings.py +198 -0
- regscale/integrations/commercial/aws/evidence_generator.py +283 -0
- regscale/integrations/commercial/aws/guardduty_control_mappings.py +340 -0
- regscale/integrations/commercial/aws/guardduty_evidence.py +1053 -0
- regscale/integrations/commercial/aws/iam_control_mappings.py +368 -0
- regscale/integrations/commercial/aws/iam_evidence.py +574 -0
- regscale/integrations/commercial/aws/inventory/__init__.py +223 -22
- regscale/integrations/commercial/aws/inventory/base.py +107 -5
- regscale/integrations/commercial/aws/inventory/resources/audit_manager.py +513 -0
- regscale/integrations/commercial/aws/inventory/resources/cloudtrail.py +315 -0
- regscale/integrations/commercial/aws/inventory/resources/cloudtrail_logs_metadata.py +476 -0
- regscale/integrations/commercial/aws/inventory/resources/cloudwatch.py +191 -0
- regscale/integrations/commercial/aws/inventory/resources/compute.py +66 -9
- regscale/integrations/commercial/aws/inventory/resources/config.py +464 -0
- regscale/integrations/commercial/aws/inventory/resources/containers.py +74 -9
- regscale/integrations/commercial/aws/inventory/resources/database.py +106 -31
- regscale/integrations/commercial/aws/inventory/resources/guardduty.py +286 -0
- regscale/integrations/commercial/aws/inventory/resources/iam.py +470 -0
- regscale/integrations/commercial/aws/inventory/resources/inspector.py +476 -0
- regscale/integrations/commercial/aws/inventory/resources/integration.py +175 -61
- regscale/integrations/commercial/aws/inventory/resources/kms.py +447 -0
- regscale/integrations/commercial/aws/inventory/resources/networking.py +103 -67
- regscale/integrations/commercial/aws/inventory/resources/s3.py +394 -0
- regscale/integrations/commercial/aws/inventory/resources/security.py +268 -72
- regscale/integrations/commercial/aws/inventory/resources/securityhub.py +473 -0
- regscale/integrations/commercial/aws/inventory/resources/storage.py +53 -29
- regscale/integrations/commercial/aws/inventory/resources/systems_manager.py +657 -0
- regscale/integrations/commercial/aws/inventory/resources/vpc.py +655 -0
- regscale/integrations/commercial/aws/kms_control_mappings.py +288 -0
- regscale/integrations/commercial/aws/kms_evidence.py +879 -0
- regscale/integrations/commercial/aws/ocsf/__init__.py +7 -0
- regscale/integrations/commercial/aws/ocsf/constants.py +115 -0
- regscale/integrations/commercial/aws/ocsf/mapper.py +435 -0
- regscale/integrations/commercial/aws/org_control_mappings.py +286 -0
- regscale/integrations/commercial/aws/org_evidence.py +666 -0
- regscale/integrations/commercial/aws/s3_control_mappings.py +356 -0
- regscale/integrations/commercial/aws/s3_evidence.py +632 -0
- regscale/integrations/commercial/aws/scanner.py +851 -206
- regscale/integrations/commercial/aws/security_hub.py +319 -0
- regscale/integrations/commercial/aws/session_manager.py +282 -0
- regscale/integrations/commercial/aws/ssm_control_mappings.py +291 -0
- regscale/integrations/commercial/aws/ssm_evidence.py +492 -0
- regscale/integrations/commercial/synqly/ticketing.py +27 -0
- regscale/integrations/compliance_integration.py +308 -38
- regscale/integrations/due_date_handler.py +3 -0
- regscale/integrations/scanner_integration.py +399 -84
- regscale/models/integration_models/cisa_kev_data.json +65 -5
- regscale/models/integration_models/synqly_models/capabilities.json +1 -1
- regscale/models/integration_models/synqly_models/connectors/vulnerabilities.py +17 -9
- regscale/models/regscale_models/assessment.py +2 -1
- regscale/models/regscale_models/control_objective.py +74 -5
- regscale/models/regscale_models/file.py +2 -0
- regscale/models/regscale_models/issue.py +2 -5
- {regscale_cli-6.27.3.0.dist-info → regscale_cli-6.28.1.0.dist-info}/METADATA +1 -1
- {regscale_cli-6.27.3.0.dist-info → regscale_cli-6.28.1.0.dist-info}/RECORD +113 -34
- tests/regscale/integrations/commercial/aws/__init__.py +0 -0
- tests/regscale/integrations/commercial/aws/test_audit_manager_compliance.py +1304 -0
- tests/regscale/integrations/commercial/aws/test_audit_manager_evidence_aggregation.py +341 -0
- tests/regscale/integrations/commercial/aws/test_aws_audit_manager_collector.py +1155 -0
- tests/regscale/integrations/commercial/aws/test_aws_cloudtrail_collector.py +534 -0
- tests/regscale/integrations/commercial/aws/test_aws_config_collector.py +400 -0
- tests/regscale/integrations/commercial/aws/test_aws_guardduty_collector.py +315 -0
- tests/regscale/integrations/commercial/aws/test_aws_iam_collector.py +458 -0
- tests/regscale/integrations/commercial/aws/test_aws_inspector_collector.py +353 -0
- tests/regscale/integrations/commercial/aws/test_aws_inventory_integration.py +530 -0
- tests/regscale/integrations/commercial/aws/test_aws_kms_collector.py +919 -0
- tests/regscale/integrations/commercial/aws/test_aws_s3_collector.py +722 -0
- tests/regscale/integrations/commercial/aws/test_aws_scanner_integration.py +722 -0
- tests/regscale/integrations/commercial/aws/test_aws_securityhub_collector.py +792 -0
- tests/regscale/integrations/commercial/aws/test_aws_systems_manager_collector.py +918 -0
- tests/regscale/integrations/commercial/aws/test_aws_vpc_collector.py +996 -0
- tests/regscale/integrations/commercial/aws/test_cli_evidence.py +431 -0
- tests/regscale/integrations/commercial/aws/test_cloudtrail_control_mappings.py +452 -0
- tests/regscale/integrations/commercial/aws/test_cloudtrail_evidence.py +788 -0
- tests/regscale/integrations/commercial/aws/test_config_compliance.py +298 -0
- tests/regscale/integrations/commercial/aws/test_conformance_pack_mappings.py +200 -0
- tests/regscale/integrations/commercial/aws/test_evidence_generator.py +386 -0
- tests/regscale/integrations/commercial/aws/test_guardduty_control_mappings.py +564 -0
- tests/regscale/integrations/commercial/aws/test_guardduty_evidence.py +1041 -0
- tests/regscale/integrations/commercial/aws/test_iam_control_mappings.py +718 -0
- tests/regscale/integrations/commercial/aws/test_iam_evidence.py +1375 -0
- tests/regscale/integrations/commercial/aws/test_kms_control_mappings.py +656 -0
- tests/regscale/integrations/commercial/aws/test_kms_evidence.py +1163 -0
- tests/regscale/integrations/commercial/aws/test_ocsf_mapper.py +370 -0
- tests/regscale/integrations/commercial/aws/test_org_control_mappings.py +546 -0
- tests/regscale/integrations/commercial/aws/test_org_evidence.py +1240 -0
- tests/regscale/integrations/commercial/aws/test_s3_control_mappings.py +672 -0
- tests/regscale/integrations/commercial/aws/test_s3_evidence.py +987 -0
- tests/regscale/integrations/commercial/aws/test_scanner_evidence.py +373 -0
- tests/regscale/integrations/commercial/aws/test_security_hub_config_filtering.py +539 -0
- tests/regscale/integrations/commercial/aws/test_session_manager.py +516 -0
- tests/regscale/integrations/commercial/aws/test_ssm_control_mappings.py +588 -0
- tests/regscale/integrations/commercial/aws/test_ssm_evidence.py +735 -0
- tests/regscale/integrations/commercial/test_aws.py +55 -56
- {regscale_cli-6.27.3.0.dist-info → regscale_cli-6.28.1.0.dist-info}/LICENSE +0 -0
- {regscale_cli-6.27.3.0.dist-info → regscale_cli-6.28.1.0.dist-info}/WHEEL +0 -0
- {regscale_cli-6.27.3.0.dist-info → regscale_cli-6.28.1.0.dist-info}/entry_points.txt +0 -0
- {regscale_cli-6.27.3.0.dist-info → regscale_cli-6.28.1.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,319 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
"""AWS Security Hub optimized data puller with rate limiting and pagination."""
|
|
4
|
+
|
|
5
|
+
import logging
|
|
6
|
+
import time
|
|
7
|
+
from typing import Any, Dict, List, Optional
|
|
8
|
+
|
|
9
|
+
import boto3
|
|
10
|
+
from botocore.client import BaseClient
|
|
11
|
+
from botocore.exceptions import ClientError
|
|
12
|
+
|
|
13
|
+
logger = logging.getLogger("regscale")
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class SecurityHubPuller:
|
|
17
|
+
"""
|
|
18
|
+
Optimized AWS Security Hub data puller with intelligent rate limiting and pagination.
|
|
19
|
+
|
|
20
|
+
This class provides enhanced functionality for fetching Security Hub findings with:
|
|
21
|
+
- Automatic pagination handling
|
|
22
|
+
- Exponential backoff retry logic
|
|
23
|
+
- Rate limiting compliance
|
|
24
|
+
- Efficient batch processing
|
|
25
|
+
"""
|
|
26
|
+
|
|
27
|
+
def __init__(
|
|
28
|
+
self,
|
|
29
|
+
region_name: str = "us-east-1",
|
|
30
|
+
profile_name: Optional[str] = None,
|
|
31
|
+
aws_access_key_id: Optional[str] = None,
|
|
32
|
+
aws_secret_access_key: Optional[str] = None,
|
|
33
|
+
aws_session_token: Optional[str] = None,
|
|
34
|
+
max_retries: int = 5,
|
|
35
|
+
initial_delay: float = 1.0,
|
|
36
|
+
):
|
|
37
|
+
"""
|
|
38
|
+
Initialize SecurityHub puller.
|
|
39
|
+
|
|
40
|
+
:param str region_name: AWS region name
|
|
41
|
+
:param Optional[str] profile_name: AWS profile name
|
|
42
|
+
:param Optional[str] aws_access_key_id: AWS access key ID
|
|
43
|
+
:param Optional[str] aws_secret_access_key: AWS secret access key
|
|
44
|
+
:param Optional[str] aws_session_token: AWS session token
|
|
45
|
+
:param int max_retries: Maximum number of retries for failed requests
|
|
46
|
+
:param float initial_delay: Initial delay in seconds for exponential backoff
|
|
47
|
+
"""
|
|
48
|
+
self.region_name = region_name
|
|
49
|
+
self.max_retries = max_retries
|
|
50
|
+
self.initial_delay = initial_delay
|
|
51
|
+
|
|
52
|
+
# Create boto3 session
|
|
53
|
+
if profile_name:
|
|
54
|
+
session = boto3.Session(profile_name=profile_name, region_name=region_name)
|
|
55
|
+
elif aws_access_key_id and aws_secret_access_key:
|
|
56
|
+
session = boto3.Session(
|
|
57
|
+
region_name=region_name,
|
|
58
|
+
aws_access_key_id=aws_access_key_id,
|
|
59
|
+
aws_secret_access_key=aws_secret_access_key,
|
|
60
|
+
aws_session_token=aws_session_token,
|
|
61
|
+
)
|
|
62
|
+
else:
|
|
63
|
+
session = boto3.Session(region_name=region_name)
|
|
64
|
+
|
|
65
|
+
# Create Security Hub client
|
|
66
|
+
self.client: BaseClient = session.client("securityhub")
|
|
67
|
+
|
|
68
|
+
def get_all_findings_with_retries(
|
|
69
|
+
self,
|
|
70
|
+
filters: Optional[Dict[str, Any]] = None,
|
|
71
|
+
max_results: int = 100,
|
|
72
|
+
) -> List[Dict[str, Any]]:
|
|
73
|
+
"""
|
|
74
|
+
Fetch all Security Hub findings with automatic pagination and retry logic.
|
|
75
|
+
|
|
76
|
+
:param Optional[Dict[str, Any]] filters: Security Hub filters to apply
|
|
77
|
+
:param int max_results: Maximum results per page (1-100)
|
|
78
|
+
:return: List of all findings
|
|
79
|
+
:rtype: List[Dict[str, Any]]
|
|
80
|
+
"""
|
|
81
|
+
all_findings = []
|
|
82
|
+
next_token = None
|
|
83
|
+
page_count = 0
|
|
84
|
+
|
|
85
|
+
# Default filters if none provided
|
|
86
|
+
if filters is None:
|
|
87
|
+
filters = {
|
|
88
|
+
"RecordState": [{"Value": "ACTIVE", "Comparison": "EQUALS"}],
|
|
89
|
+
# Include NEW and NOTIFIED (In Progress) workflow statuses
|
|
90
|
+
# Exclude SUPPRESSED and RESOLVED
|
|
91
|
+
"WorkflowStatus": [
|
|
92
|
+
{"Value": "NEW", "Comparison": "EQUALS"},
|
|
93
|
+
{"Value": "NOTIFIED", "Comparison": "EQUALS"},
|
|
94
|
+
],
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
logger.info("Starting Security Hub findings retrieval with pagination...")
|
|
98
|
+
|
|
99
|
+
while True:
|
|
100
|
+
page_count += 1
|
|
101
|
+
try:
|
|
102
|
+
# Build request parameters
|
|
103
|
+
params = {
|
|
104
|
+
"Filters": filters,
|
|
105
|
+
"MaxResults": max_results,
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
if next_token:
|
|
109
|
+
params["NextToken"] = next_token
|
|
110
|
+
|
|
111
|
+
# Fetch findings with retry logic
|
|
112
|
+
response = self._get_findings_with_retry(params)
|
|
113
|
+
|
|
114
|
+
# Extract findings from response
|
|
115
|
+
findings = response.get("Findings", [])
|
|
116
|
+
all_findings.extend(findings)
|
|
117
|
+
|
|
118
|
+
logger.info(f"Retrieved page {page_count}: {len(findings)} findings (Total: {len(all_findings)})")
|
|
119
|
+
|
|
120
|
+
# Check for next page
|
|
121
|
+
next_token = response.get("NextToken")
|
|
122
|
+
if not next_token:
|
|
123
|
+
break
|
|
124
|
+
|
|
125
|
+
# Brief pause between pages to respect rate limits
|
|
126
|
+
time.sleep(0.2)
|
|
127
|
+
|
|
128
|
+
except ClientError as e:
|
|
129
|
+
error_code = e.response.get("Error", {}).get("Code", "Unknown")
|
|
130
|
+
logger.error(f"Failed to retrieve findings on page {page_count}: {error_code} - {str(e)}")
|
|
131
|
+
break
|
|
132
|
+
except Exception as e:
|
|
133
|
+
logger.error(f"Unexpected error on page {page_count}: {str(e)}")
|
|
134
|
+
break
|
|
135
|
+
|
|
136
|
+
logger.info(f"Completed Security Hub retrieval: {len(all_findings)} total findings across {page_count} pages")
|
|
137
|
+
|
|
138
|
+
return all_findings
|
|
139
|
+
|
|
140
|
+
def _get_findings_with_retry(self, params: Dict[str, Any]) -> Dict[str, Any]:
|
|
141
|
+
"""
|
|
142
|
+
Execute get_findings with exponential backoff retry logic.
|
|
143
|
+
|
|
144
|
+
:param Dict[str, Any] params: Parameters for get_findings call
|
|
145
|
+
:return: API response
|
|
146
|
+
:rtype: Dict[str, Any]
|
|
147
|
+
:raises ClientError: If all retries are exhausted
|
|
148
|
+
"""
|
|
149
|
+
delay = self.initial_delay
|
|
150
|
+
|
|
151
|
+
for attempt in range(self.max_retries):
|
|
152
|
+
try:
|
|
153
|
+
response = self.client.get_findings(**params)
|
|
154
|
+
return response
|
|
155
|
+
|
|
156
|
+
except ClientError as e:
|
|
157
|
+
error_code = e.response.get("Error", {}).get("Code", "Unknown")
|
|
158
|
+
|
|
159
|
+
# Handle throttling errors with exponential backoff
|
|
160
|
+
if error_code in ["ThrottlingException", "TooManyRequestsException"]:
|
|
161
|
+
if attempt < self.max_retries - 1:
|
|
162
|
+
logger.warning(
|
|
163
|
+
f"Rate limit hit (attempt {attempt + 1}/{self.max_retries}). "
|
|
164
|
+
f"Retrying in {delay:.1f}s..."
|
|
165
|
+
)
|
|
166
|
+
time.sleep(delay)
|
|
167
|
+
delay *= 2 # Exponential backoff
|
|
168
|
+
continue
|
|
169
|
+
else:
|
|
170
|
+
logger.error(f"Rate limit exceeded after {self.max_retries} attempts")
|
|
171
|
+
raise
|
|
172
|
+
else:
|
|
173
|
+
# For non-throttling errors, raise immediately
|
|
174
|
+
logger.error(f"Security Hub API error: {error_code} - {str(e)}")
|
|
175
|
+
raise
|
|
176
|
+
|
|
177
|
+
except Exception as e:
|
|
178
|
+
logger.error(f"Unexpected error calling Security Hub API: {str(e)}")
|
|
179
|
+
raise
|
|
180
|
+
|
|
181
|
+
# Should not reach here, but just in case
|
|
182
|
+
raise RuntimeError(f"Failed to retrieve findings after {self.max_retries} attempts")
|
|
183
|
+
|
|
184
|
+
def get_findings_by_severity(self, severity_labels: List[str], max_results: int = 100) -> List[Dict[str, Any]]:
|
|
185
|
+
"""
|
|
186
|
+
Fetch findings filtered by severity levels.
|
|
187
|
+
|
|
188
|
+
:param List[str] severity_labels: Severity labels to filter (CRITICAL, HIGH, MEDIUM, LOW, INFORMATIONAL)
|
|
189
|
+
:param int max_results: Maximum results per page
|
|
190
|
+
:return: List of findings matching severity criteria
|
|
191
|
+
:rtype: List[Dict[str, Any]]
|
|
192
|
+
"""
|
|
193
|
+
filters = {
|
|
194
|
+
"RecordState": [{"Value": "ACTIVE", "Comparison": "EQUALS"}],
|
|
195
|
+
"SeverityLabel": [{"Value": label, "Comparison": "EQUALS"} for label in severity_labels],
|
|
196
|
+
# Include NEW and NOTIFIED workflow statuses, exclude SUPPRESSED and RESOLVED
|
|
197
|
+
"WorkflowStatus": [
|
|
198
|
+
{"Value": "NEW", "Comparison": "EQUALS"},
|
|
199
|
+
{"Value": "NOTIFIED", "Comparison": "EQUALS"},
|
|
200
|
+
],
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
logger.info(f"Fetching findings with severity: {', '.join(severity_labels)}")
|
|
204
|
+
return self.get_all_findings_with_retries(filters=filters, max_results=max_results)
|
|
205
|
+
|
|
206
|
+
@staticmethod
|
|
207
|
+
def get_severity_filters_from_minimum(minimum_severity: str) -> List[str]:
|
|
208
|
+
"""
|
|
209
|
+
Get list of severity labels to filter based on minimum severity threshold.
|
|
210
|
+
|
|
211
|
+
:param str minimum_severity: Minimum severity level (CRITICAL, HIGH, MEDIUM, LOW, INFORMATIONAL)
|
|
212
|
+
:return: List of severity labels at or above the minimum threshold
|
|
213
|
+
:rtype: List[str]
|
|
214
|
+
"""
|
|
215
|
+
severity_hierarchy = ["INFORMATIONAL", "LOW", "MEDIUM", "MODERATE", "HIGH", "CRITICAL"]
|
|
216
|
+
min_sev_upper = minimum_severity.upper()
|
|
217
|
+
|
|
218
|
+
# Handle MODERATE as alias for MEDIUM
|
|
219
|
+
if min_sev_upper == "MODERATE":
|
|
220
|
+
min_sev_upper = "MEDIUM"
|
|
221
|
+
|
|
222
|
+
# Find the index of the minimum severity
|
|
223
|
+
if min_sev_upper not in severity_hierarchy:
|
|
224
|
+
logger.warning(f"Unknown minimum severity '{minimum_severity}', defaulting to LOW")
|
|
225
|
+
min_sev_upper = "LOW"
|
|
226
|
+
|
|
227
|
+
min_index = severity_hierarchy.index(min_sev_upper)
|
|
228
|
+
|
|
229
|
+
# Return all severities at or above the minimum (excluding MODERATE since it's an alias)
|
|
230
|
+
return [sev for sev in severity_hierarchy[min_index:] if sev != "MODERATE"]
|
|
231
|
+
|
|
232
|
+
def get_findings_by_compliance_status(
|
|
233
|
+
self, compliance_statuses: List[str], max_results: int = 100
|
|
234
|
+
) -> List[Dict[str, Any]]:
|
|
235
|
+
"""
|
|
236
|
+
Fetch findings filtered by compliance status (for posture management findings).
|
|
237
|
+
|
|
238
|
+
:param List[str] compliance_statuses: Compliance statuses (PASSED, FAILED, WARNING, NOT_AVAILABLE)
|
|
239
|
+
:param int max_results: Maximum results per page
|
|
240
|
+
:return: List of findings matching compliance criteria
|
|
241
|
+
:rtype: List[Dict[str, Any]]
|
|
242
|
+
"""
|
|
243
|
+
filters = {
|
|
244
|
+
"RecordState": [{"Value": "ACTIVE", "Comparison": "EQUALS"}],
|
|
245
|
+
"ComplianceStatus": [{"Value": status, "Comparison": "EQUALS"} for status in compliance_statuses],
|
|
246
|
+
# Include NEW and NOTIFIED workflow statuses, exclude SUPPRESSED and RESOLVED
|
|
247
|
+
"WorkflowStatus": [
|
|
248
|
+
{"Value": "NEW", "Comparison": "EQUALS"},
|
|
249
|
+
{"Value": "NOTIFIED", "Comparison": "EQUALS"},
|
|
250
|
+
],
|
|
251
|
+
}
|
|
252
|
+
|
|
253
|
+
logger.info(f"Fetching findings with compliance status: {', '.join(compliance_statuses)}")
|
|
254
|
+
return self.get_all_findings_with_retries(filters=filters, max_results=max_results)
|
|
255
|
+
|
|
256
|
+
def get_posture_management_findings(
|
|
257
|
+
self, severity_labels: Optional[List[str]] = None, max_results: int = 100
|
|
258
|
+
) -> List[Dict[str, Any]]:
|
|
259
|
+
"""
|
|
260
|
+
Fetch posture management findings (compliance checks from security standards).
|
|
261
|
+
These are findings that have a ComplianceStatus, indicating they come from
|
|
262
|
+
enabled security standards like CIS, PCI-DSS, AWS Foundational Security Best Practices, etc.
|
|
263
|
+
|
|
264
|
+
:param Optional[List[str]] severity_labels: Optional severity filter
|
|
265
|
+
:param int max_results: Maximum results per page
|
|
266
|
+
:return: List of posture management findings
|
|
267
|
+
:rtype: List[Dict[str, Any]]
|
|
268
|
+
"""
|
|
269
|
+
filters = {
|
|
270
|
+
"RecordState": [{"Value": "ACTIVE", "Comparison": "EQUALS"}],
|
|
271
|
+
# Include NEW and NOTIFIED workflow statuses, exclude SUPPRESSED and RESOLVED
|
|
272
|
+
"WorkflowStatus": [
|
|
273
|
+
{"Value": "NEW", "Comparison": "EQUALS"},
|
|
274
|
+
{"Value": "NOTIFIED", "Comparison": "EQUALS"},
|
|
275
|
+
],
|
|
276
|
+
# Posture management findings have FAILED compliance status
|
|
277
|
+
# (PASSED findings are not vulnerabilities)
|
|
278
|
+
"ComplianceStatus": [{"Value": "FAILED", "Comparison": "EQUALS"}],
|
|
279
|
+
}
|
|
280
|
+
|
|
281
|
+
# Add severity filter if provided
|
|
282
|
+
if severity_labels:
|
|
283
|
+
filters["SeverityLabel"] = [{"Value": label, "Comparison": "EQUALS"} for label in severity_labels]
|
|
284
|
+
logger.info(f"Fetching posture management findings with severity: {', '.join(severity_labels)}")
|
|
285
|
+
else:
|
|
286
|
+
logger.info("Fetching all posture management findings (FAILED compliance checks)")
|
|
287
|
+
|
|
288
|
+
return self.get_all_findings_with_retries(filters=filters, max_results=max_results)
|
|
289
|
+
|
|
290
|
+
def get_findings_count(self, filters: Optional[Dict[str, Any]] = None) -> int:
|
|
291
|
+
"""
|
|
292
|
+
Get count of findings matching filters without retrieving full data.
|
|
293
|
+
|
|
294
|
+
:param Optional[Dict[str, Any]] filters: Security Hub filters
|
|
295
|
+
:return: Count of matching findings
|
|
296
|
+
:rtype: int
|
|
297
|
+
"""
|
|
298
|
+
try:
|
|
299
|
+
if filters is None:
|
|
300
|
+
filters = {"RecordState": [{"Value": "ACTIVE", "Comparison": "EQUALS"}]}
|
|
301
|
+
|
|
302
|
+
# Use MaxResults=1 to minimize data transfer
|
|
303
|
+
response = self.client.get_findings(Filters=filters, MaxResults=1)
|
|
304
|
+
|
|
305
|
+
# The total count is typically not directly available, but we can
|
|
306
|
+
# estimate from pagination. For exact count, we'd need to paginate fully.
|
|
307
|
+
# This is a lightweight check to see if any findings exist.
|
|
308
|
+
findings_count = len(response.get("Findings", []))
|
|
309
|
+
|
|
310
|
+
if response.get("NextToken"):
|
|
311
|
+
logger.info("More than 1 finding exists (exact count requires full pagination)")
|
|
312
|
+
# For a rough estimate, we could paginate a few pages
|
|
313
|
+
return findings_count # Would need full pagination for exact count
|
|
314
|
+
else:
|
|
315
|
+
return findings_count
|
|
316
|
+
|
|
317
|
+
except ClientError as e:
|
|
318
|
+
logger.error(f"Failed to get findings count: {str(e)}")
|
|
319
|
+
return 0
|
|
@@ -0,0 +1,282 @@
|
|
|
1
|
+
"""AWS Session Token Manager for RegScale CLI.
|
|
2
|
+
|
|
3
|
+
This module provides functionality to generate, cache, and manage temporary AWS session tokens.
|
|
4
|
+
Session tokens provide better security than long-term access keys and support MFA authentication.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import json
|
|
8
|
+
import logging
|
|
9
|
+
import os
|
|
10
|
+
from datetime import datetime, timedelta
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
from typing import Dict, Optional, Tuple
|
|
13
|
+
|
|
14
|
+
logger = logging.getLogger("regscale")
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class AWSSessionManager:
|
|
18
|
+
"""Manages AWS session tokens with local caching and automatic expiration."""
|
|
19
|
+
|
|
20
|
+
def __init__(self, cache_dir: Optional[str] = None):
|
|
21
|
+
"""
|
|
22
|
+
Initialize the AWS session manager.
|
|
23
|
+
|
|
24
|
+
:param Optional[str] cache_dir: Directory to store cached session tokens.
|
|
25
|
+
Defaults to ~/.regscale/aws_sessions/
|
|
26
|
+
"""
|
|
27
|
+
if cache_dir:
|
|
28
|
+
self.cache_dir = Path(cache_dir)
|
|
29
|
+
else:
|
|
30
|
+
self.cache_dir = Path.home() / ".regscale" / "aws_sessions"
|
|
31
|
+
|
|
32
|
+
# Create cache directory if it doesn't exist
|
|
33
|
+
self.cache_dir.mkdir(parents=True, exist_ok=True)
|
|
34
|
+
|
|
35
|
+
# Set restrictive permissions on the cache directory (owner read/write only)
|
|
36
|
+
if os.name != "nt": # Unix-like systems
|
|
37
|
+
os.chmod(self.cache_dir, 0o700)
|
|
38
|
+
|
|
39
|
+
def get_session_token(
|
|
40
|
+
self,
|
|
41
|
+
profile: Optional[str] = None,
|
|
42
|
+
aws_access_key_id: Optional[str] = None,
|
|
43
|
+
aws_secret_access_key: Optional[str] = None,
|
|
44
|
+
mfa_serial: Optional[str] = None,
|
|
45
|
+
mfa_code: Optional[str] = None,
|
|
46
|
+
role_arn: Optional[str] = None,
|
|
47
|
+
role_session_name: Optional[str] = None,
|
|
48
|
+
duration_seconds: int = 3600,
|
|
49
|
+
) -> Dict[str, str]:
|
|
50
|
+
"""
|
|
51
|
+
Generate a new AWS session token using STS.
|
|
52
|
+
|
|
53
|
+
:param Optional[str] profile: AWS profile name to use
|
|
54
|
+
:param Optional[str] aws_access_key_id: AWS access key ID
|
|
55
|
+
:param Optional[str] aws_secret_access_key: AWS secret access key
|
|
56
|
+
:param Optional[str] mfa_serial: ARN of MFA device (e.g., arn:aws:iam::123456789012:mfa/user)
|
|
57
|
+
:param Optional[str] mfa_code: 6-digit MFA code from authenticator app
|
|
58
|
+
:param Optional[str] role_arn: ARN of role to assume
|
|
59
|
+
:param Optional[str] role_session_name: Name for the assumed role session
|
|
60
|
+
:param int duration_seconds: Duration for session token (900-43200 seconds, default 3600)
|
|
61
|
+
:return: Dictionary with temporary credentials
|
|
62
|
+
:rtype: Dict[str, str]
|
|
63
|
+
"""
|
|
64
|
+
import boto3
|
|
65
|
+
|
|
66
|
+
# Create initial session
|
|
67
|
+
if aws_access_key_id and aws_secret_access_key:
|
|
68
|
+
session = boto3.Session(
|
|
69
|
+
aws_access_key_id=aws_access_key_id,
|
|
70
|
+
aws_secret_access_key=aws_secret_access_key,
|
|
71
|
+
)
|
|
72
|
+
elif profile:
|
|
73
|
+
session = boto3.Session(profile_name=profile)
|
|
74
|
+
else:
|
|
75
|
+
# Use default credential chain
|
|
76
|
+
session = boto3.Session()
|
|
77
|
+
|
|
78
|
+
sts_client = session.client("sts")
|
|
79
|
+
|
|
80
|
+
try:
|
|
81
|
+
if role_arn:
|
|
82
|
+
# Assume role (with or without MFA)
|
|
83
|
+
logger.info(f"Assuming role: {role_arn}")
|
|
84
|
+
assume_role_params = {
|
|
85
|
+
"RoleArn": role_arn,
|
|
86
|
+
"RoleSessionName": role_session_name or f"regscale-{datetime.now().strftime('%Y%m%d%H%M%S')}",
|
|
87
|
+
"DurationSeconds": duration_seconds,
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
if mfa_serial and mfa_code:
|
|
91
|
+
assume_role_params["SerialNumber"] = mfa_serial
|
|
92
|
+
assume_role_params["TokenCode"] = mfa_code
|
|
93
|
+
|
|
94
|
+
response = sts_client.assume_role(**assume_role_params)
|
|
95
|
+
credentials = response["Credentials"]
|
|
96
|
+
|
|
97
|
+
return {
|
|
98
|
+
"aws_access_key_id": credentials["AccessKeyId"],
|
|
99
|
+
"aws_secret_access_key": credentials["SecretAccessKey"],
|
|
100
|
+
"aws_session_token": credentials["SessionToken"],
|
|
101
|
+
"expiration": credentials["Expiration"].isoformat(),
|
|
102
|
+
}
|
|
103
|
+
else:
|
|
104
|
+
# Get session token (with or without MFA)
|
|
105
|
+
logger.info("Getting session token from AWS STS")
|
|
106
|
+
get_session_params = {"DurationSeconds": duration_seconds}
|
|
107
|
+
|
|
108
|
+
if mfa_serial and mfa_code:
|
|
109
|
+
get_session_params["SerialNumber"] = mfa_serial
|
|
110
|
+
get_session_params["TokenCode"] = mfa_code
|
|
111
|
+
logger.info(f"Using MFA device: {mfa_serial}")
|
|
112
|
+
|
|
113
|
+
response = sts_client.get_session_token(**get_session_params)
|
|
114
|
+
credentials = response["Credentials"]
|
|
115
|
+
|
|
116
|
+
return {
|
|
117
|
+
"aws_access_key_id": credentials["AccessKeyId"],
|
|
118
|
+
"aws_secret_access_key": credentials["SecretAccessKey"],
|
|
119
|
+
"aws_session_token": credentials["SessionToken"],
|
|
120
|
+
"expiration": credentials["Expiration"].isoformat(),
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
except Exception as e:
|
|
124
|
+
logger.error(f"Failed to get AWS session token: {e}")
|
|
125
|
+
raise
|
|
126
|
+
|
|
127
|
+
def cache_session(
|
|
128
|
+
self,
|
|
129
|
+
session_name: str,
|
|
130
|
+
credentials: Dict[str, str],
|
|
131
|
+
region: Optional[str] = None,
|
|
132
|
+
) -> None:
|
|
133
|
+
"""
|
|
134
|
+
Cache session credentials to local file.
|
|
135
|
+
|
|
136
|
+
:param str session_name: Name for this session (e.g., profile name or custom name)
|
|
137
|
+
:param Dict[str, str] credentials: Credentials dictionary from get_session_token()
|
|
138
|
+
:param Optional[str] region: AWS region to associate with this session
|
|
139
|
+
"""
|
|
140
|
+
cache_file = self.cache_dir / f"{session_name}.json"
|
|
141
|
+
|
|
142
|
+
cache_data = {
|
|
143
|
+
"session_name": session_name,
|
|
144
|
+
"credentials": credentials,
|
|
145
|
+
"region": region,
|
|
146
|
+
"cached_at": datetime.now().isoformat(),
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
with open(cache_file, "w", encoding="utf-8") as f:
|
|
150
|
+
json.dump(cache_data, f, indent=2)
|
|
151
|
+
|
|
152
|
+
# Set restrictive permissions on the cache file (owner read/write only)
|
|
153
|
+
if os.name != "nt": # Unix-like systems
|
|
154
|
+
os.chmod(cache_file, 0o600)
|
|
155
|
+
|
|
156
|
+
logger.info(f"Cached session credentials to: {cache_file}")
|
|
157
|
+
logger.info(f"Session expires at: {credentials['expiration']}")
|
|
158
|
+
|
|
159
|
+
def get_cached_session(self, session_name: str) -> Optional[Dict[str, str]]:
|
|
160
|
+
"""
|
|
161
|
+
Retrieve cached session credentials if they exist and are not expired.
|
|
162
|
+
|
|
163
|
+
:param str session_name: Name of the cached session
|
|
164
|
+
:return: Credentials dictionary or None if not found/expired
|
|
165
|
+
:rtype: Optional[Dict[str, str]]
|
|
166
|
+
"""
|
|
167
|
+
cache_file = self.cache_dir / f"{session_name}.json"
|
|
168
|
+
|
|
169
|
+
if not cache_file.exists():
|
|
170
|
+
logger.debug(f"No cached session found: {session_name}")
|
|
171
|
+
return None
|
|
172
|
+
|
|
173
|
+
try:
|
|
174
|
+
with open(cache_file, "r", encoding="utf-8") as f:
|
|
175
|
+
cache_data = json.load(f)
|
|
176
|
+
|
|
177
|
+
credentials = cache_data["credentials"]
|
|
178
|
+
expiration = datetime.fromisoformat(credentials["expiration"])
|
|
179
|
+
|
|
180
|
+
# Check if session is expired (with 5 minute buffer for safety)
|
|
181
|
+
if expiration - timedelta(minutes=5) < datetime.now(expiration.tzinfo):
|
|
182
|
+
logger.warning(f"Cached session expired: {session_name}")
|
|
183
|
+
# Clean up expired cache file
|
|
184
|
+
cache_file.unlink()
|
|
185
|
+
return None
|
|
186
|
+
|
|
187
|
+
logger.info(f"Using cached session: {session_name}")
|
|
188
|
+
logger.info(f"Session expires at: {credentials['expiration']}")
|
|
189
|
+
return cache_data
|
|
190
|
+
|
|
191
|
+
except Exception as e:
|
|
192
|
+
logger.error(f"Failed to read cached session: {e}")
|
|
193
|
+
return None
|
|
194
|
+
|
|
195
|
+
def clear_session(self, session_name: str) -> bool:
|
|
196
|
+
"""
|
|
197
|
+
Clear a cached session.
|
|
198
|
+
|
|
199
|
+
:param str session_name: Name of the session to clear
|
|
200
|
+
:return: True if session was cleared, False if not found
|
|
201
|
+
:rtype: bool
|
|
202
|
+
"""
|
|
203
|
+
cache_file = self.cache_dir / f"{session_name}.json"
|
|
204
|
+
|
|
205
|
+
if cache_file.exists():
|
|
206
|
+
cache_file.unlink()
|
|
207
|
+
logger.info(f"Cleared cached session: {session_name}")
|
|
208
|
+
return True
|
|
209
|
+
else:
|
|
210
|
+
logger.warning(f"No cached session found: {session_name}")
|
|
211
|
+
return False
|
|
212
|
+
|
|
213
|
+
def clear_all_sessions(self) -> int:
|
|
214
|
+
"""
|
|
215
|
+
Clear all cached sessions.
|
|
216
|
+
|
|
217
|
+
:return: Number of sessions cleared
|
|
218
|
+
:rtype: int
|
|
219
|
+
"""
|
|
220
|
+
count = 0
|
|
221
|
+
for cache_file in self.cache_dir.glob("*.json"):
|
|
222
|
+
cache_file.unlink()
|
|
223
|
+
count += 1
|
|
224
|
+
|
|
225
|
+
logger.info(f"Cleared {count} cached session(s)")
|
|
226
|
+
return count
|
|
227
|
+
|
|
228
|
+
def list_sessions(self) -> list[Dict[str, str]]:
|
|
229
|
+
"""
|
|
230
|
+
List all cached sessions with their expiration status.
|
|
231
|
+
|
|
232
|
+
:return: List of session information dictionaries
|
|
233
|
+
:rtype: list[Dict[str, str]]
|
|
234
|
+
"""
|
|
235
|
+
sessions = []
|
|
236
|
+
|
|
237
|
+
for cache_file in self.cache_dir.glob("*.json"):
|
|
238
|
+
try:
|
|
239
|
+
with open(cache_file, "r", encoding="utf-8") as f:
|
|
240
|
+
cache_data = json.load(f)
|
|
241
|
+
|
|
242
|
+
credentials = cache_data["credentials"]
|
|
243
|
+
expiration = datetime.fromisoformat(credentials["expiration"])
|
|
244
|
+
is_expired = expiration < datetime.now(expiration.tzinfo)
|
|
245
|
+
|
|
246
|
+
sessions.append(
|
|
247
|
+
{
|
|
248
|
+
"name": cache_data["session_name"],
|
|
249
|
+
"region": cache_data.get("region", "N/A"),
|
|
250
|
+
"expiration": credentials["expiration"],
|
|
251
|
+
"expired": is_expired,
|
|
252
|
+
"cached_at": cache_data.get("cached_at", "Unknown"),
|
|
253
|
+
}
|
|
254
|
+
)
|
|
255
|
+
|
|
256
|
+
except Exception as e:
|
|
257
|
+
logger.error(f"Failed to read session file {cache_file}: {e}")
|
|
258
|
+
|
|
259
|
+
return sorted(sessions, key=lambda x: x["expiration"], reverse=True)
|
|
260
|
+
|
|
261
|
+
def get_credentials_for_session(
|
|
262
|
+
self, session_name: str
|
|
263
|
+
) -> Optional[Tuple[Optional[str], Optional[str], Optional[str], Optional[str]]]:
|
|
264
|
+
"""
|
|
265
|
+
Get AWS credentials from cached session for use in CLI commands.
|
|
266
|
+
|
|
267
|
+
:param str session_name: Name of the cached session
|
|
268
|
+
:return: Tuple of (access_key_id, secret_access_key, session_token, region) or None
|
|
269
|
+
:rtype: Optional[Tuple[Optional[str], Optional[str], Optional[str], Optional[str]]]
|
|
270
|
+
"""
|
|
271
|
+
cache_data = self.get_cached_session(session_name)
|
|
272
|
+
|
|
273
|
+
if not cache_data:
|
|
274
|
+
return None
|
|
275
|
+
|
|
276
|
+
credentials = cache_data["credentials"]
|
|
277
|
+
return (
|
|
278
|
+
credentials["aws_access_key_id"],
|
|
279
|
+
credentials["aws_secret_access_key"],
|
|
280
|
+
credentials["aws_session_token"],
|
|
281
|
+
cache_data.get("region"),
|
|
282
|
+
)
|