aws-cis-controls-assessment 1.0.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- aws_cis_assessment/__init__.py +11 -0
- aws_cis_assessment/cli/__init__.py +3 -0
- aws_cis_assessment/cli/examples.py +274 -0
- aws_cis_assessment/cli/main.py +1259 -0
- aws_cis_assessment/cli/utils.py +356 -0
- aws_cis_assessment/config/__init__.py +1 -0
- aws_cis_assessment/config/config_loader.py +328 -0
- aws_cis_assessment/config/rules/cis_controls_ig1.yaml +590 -0
- aws_cis_assessment/config/rules/cis_controls_ig2.yaml +412 -0
- aws_cis_assessment/config/rules/cis_controls_ig3.yaml +100 -0
- aws_cis_assessment/controls/__init__.py +1 -0
- aws_cis_assessment/controls/base_control.py +400 -0
- aws_cis_assessment/controls/ig1/__init__.py +239 -0
- aws_cis_assessment/controls/ig1/control_1_1.py +586 -0
- aws_cis_assessment/controls/ig1/control_2_2.py +231 -0
- aws_cis_assessment/controls/ig1/control_3_3.py +718 -0
- aws_cis_assessment/controls/ig1/control_3_4.py +235 -0
- aws_cis_assessment/controls/ig1/control_4_1.py +461 -0
- aws_cis_assessment/controls/ig1/control_access_keys.py +310 -0
- aws_cis_assessment/controls/ig1/control_advanced_security.py +512 -0
- aws_cis_assessment/controls/ig1/control_backup_recovery.py +510 -0
- aws_cis_assessment/controls/ig1/control_cloudtrail_logging.py +197 -0
- aws_cis_assessment/controls/ig1/control_critical_security.py +422 -0
- aws_cis_assessment/controls/ig1/control_data_protection.py +898 -0
- aws_cis_assessment/controls/ig1/control_iam_advanced.py +573 -0
- aws_cis_assessment/controls/ig1/control_iam_governance.py +493 -0
- aws_cis_assessment/controls/ig1/control_iam_policies.py +383 -0
- aws_cis_assessment/controls/ig1/control_instance_optimization.py +100 -0
- aws_cis_assessment/controls/ig1/control_network_enhancements.py +203 -0
- aws_cis_assessment/controls/ig1/control_network_security.py +672 -0
- aws_cis_assessment/controls/ig1/control_s3_enhancements.py +173 -0
- aws_cis_assessment/controls/ig1/control_s3_security.py +422 -0
- aws_cis_assessment/controls/ig1/control_vpc_security.py +235 -0
- aws_cis_assessment/controls/ig2/__init__.py +172 -0
- aws_cis_assessment/controls/ig2/control_3_10.py +698 -0
- aws_cis_assessment/controls/ig2/control_3_11.py +1330 -0
- aws_cis_assessment/controls/ig2/control_5_2.py +393 -0
- aws_cis_assessment/controls/ig2/control_advanced_encryption.py +355 -0
- aws_cis_assessment/controls/ig2/control_codebuild_security.py +263 -0
- aws_cis_assessment/controls/ig2/control_encryption_rest.py +382 -0
- aws_cis_assessment/controls/ig2/control_encryption_transit.py +382 -0
- aws_cis_assessment/controls/ig2/control_network_ha.py +467 -0
- aws_cis_assessment/controls/ig2/control_remaining_encryption.py +426 -0
- aws_cis_assessment/controls/ig2/control_remaining_rules.py +363 -0
- aws_cis_assessment/controls/ig2/control_service_logging.py +402 -0
- aws_cis_assessment/controls/ig3/__init__.py +49 -0
- aws_cis_assessment/controls/ig3/control_12_8.py +395 -0
- aws_cis_assessment/controls/ig3/control_13_1.py +467 -0
- aws_cis_assessment/controls/ig3/control_3_14.py +523 -0
- aws_cis_assessment/controls/ig3/control_7_1.py +359 -0
- aws_cis_assessment/core/__init__.py +1 -0
- aws_cis_assessment/core/accuracy_validator.py +425 -0
- aws_cis_assessment/core/assessment_engine.py +1266 -0
- aws_cis_assessment/core/audit_trail.py +491 -0
- aws_cis_assessment/core/aws_client_factory.py +313 -0
- aws_cis_assessment/core/error_handler.py +607 -0
- aws_cis_assessment/core/models.py +166 -0
- aws_cis_assessment/core/scoring_engine.py +459 -0
- aws_cis_assessment/reporters/__init__.py +8 -0
- aws_cis_assessment/reporters/base_reporter.py +454 -0
- aws_cis_assessment/reporters/csv_reporter.py +835 -0
- aws_cis_assessment/reporters/html_reporter.py +2162 -0
- aws_cis_assessment/reporters/json_reporter.py +561 -0
- aws_cis_controls_assessment-1.0.3.dist-info/METADATA +248 -0
- aws_cis_controls_assessment-1.0.3.dist-info/RECORD +77 -0
- aws_cis_controls_assessment-1.0.3.dist-info/WHEEL +5 -0
- aws_cis_controls_assessment-1.0.3.dist-info/entry_points.txt +2 -0
- aws_cis_controls_assessment-1.0.3.dist-info/licenses/LICENSE +21 -0
- aws_cis_controls_assessment-1.0.3.dist-info/top_level.txt +2 -0
- docs/README.md +94 -0
- docs/assessment-logic.md +766 -0
- docs/cli-reference.md +698 -0
- docs/config-rule-mappings.md +393 -0
- docs/developer-guide.md +858 -0
- docs/installation.md +299 -0
- docs/troubleshooting.md +634 -0
- docs/user-guide.md +487 -0
|
@@ -0,0 +1,491 @@
|
|
|
1
|
+
"""Audit trail system for tracking assessment activities and errors."""
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
import json
|
|
5
|
+
import os
|
|
6
|
+
from typing import Dict, List, Optional, Any, Union
|
|
7
|
+
from dataclasses import dataclass, field, asdict
|
|
8
|
+
from datetime import datetime, timedelta
|
|
9
|
+
from enum import Enum
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
|
|
12
|
+
logger = logging.getLogger(__name__)
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class AuditEventType(Enum):
|
|
16
|
+
"""Types of audit events."""
|
|
17
|
+
ASSESSMENT_START = "ASSESSMENT_START"
|
|
18
|
+
ASSESSMENT_COMPLETE = "ASSESSMENT_COMPLETE"
|
|
19
|
+
ASSESSMENT_ERROR = "ASSESSMENT_ERROR"
|
|
20
|
+
CONTROL_EVALUATION = "CONTROL_EVALUATION"
|
|
21
|
+
SERVICE_ACCESS = "SERVICE_ACCESS"
|
|
22
|
+
CREDENTIAL_VALIDATION = "CREDENTIAL_VALIDATION"
|
|
23
|
+
ERROR_RECOVERY = "ERROR_RECOVERY"
|
|
24
|
+
CONFIGURATION_LOAD = "CONFIGURATION_LOAD"
|
|
25
|
+
REPORT_GENERATION = "REPORT_GENERATION"
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
@dataclass
|
|
29
|
+
class AuditEvent:
|
|
30
|
+
"""Individual audit event record."""
|
|
31
|
+
event_id: str
|
|
32
|
+
timestamp: datetime
|
|
33
|
+
event_type: AuditEventType
|
|
34
|
+
user_id: str
|
|
35
|
+
account_id: str
|
|
36
|
+
region: str
|
|
37
|
+
service_name: str
|
|
38
|
+
operation: str
|
|
39
|
+
status: str # SUCCESS, FAILURE, WARNING
|
|
40
|
+
message: str
|
|
41
|
+
details: Dict[str, Any] = field(default_factory=dict)
|
|
42
|
+
duration_ms: Optional[int] = None
|
|
43
|
+
error_id: Optional[str] = None
|
|
44
|
+
|
|
45
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
46
|
+
"""Convert audit event to dictionary for serialization."""
|
|
47
|
+
data = asdict(self)
|
|
48
|
+
data['timestamp'] = self.timestamp.isoformat()
|
|
49
|
+
data['event_type'] = self.event_type.value
|
|
50
|
+
return data
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class AuditTrail:
|
|
54
|
+
"""Comprehensive audit trail system for assessment activities."""
|
|
55
|
+
|
|
56
|
+
def __init__(self, audit_file_path: Optional[str] = None,
|
|
57
|
+
max_file_size_mb: int = 100,
|
|
58
|
+
retention_days: int = 90,
|
|
59
|
+
enable_console_logging: bool = True):
|
|
60
|
+
"""Initialize audit trail system.
|
|
61
|
+
|
|
62
|
+
Args:
|
|
63
|
+
audit_file_path: Path to audit log file. If None, uses default location.
|
|
64
|
+
max_file_size_mb: Maximum audit file size before rotation
|
|
65
|
+
retention_days: Number of days to retain audit logs
|
|
66
|
+
enable_console_logging: Whether to also log to console
|
|
67
|
+
"""
|
|
68
|
+
self.max_file_size_mb = max_file_size_mb
|
|
69
|
+
self.retention_days = retention_days
|
|
70
|
+
self.enable_console_logging = enable_console_logging
|
|
71
|
+
|
|
72
|
+
# Set up audit file path
|
|
73
|
+
if audit_file_path:
|
|
74
|
+
self.audit_file_path = Path(audit_file_path)
|
|
75
|
+
else:
|
|
76
|
+
# Default to user's home directory
|
|
77
|
+
audit_dir = Path.home() / ".aws_cis_assessment" / "audit"
|
|
78
|
+
audit_dir.mkdir(parents=True, exist_ok=True)
|
|
79
|
+
self.audit_file_path = audit_dir / "assessment_audit.jsonl"
|
|
80
|
+
|
|
81
|
+
# Ensure audit directory exists
|
|
82
|
+
self.audit_file_path.parent.mkdir(parents=True, exist_ok=True)
|
|
83
|
+
|
|
84
|
+
# In-memory event buffer for current session
|
|
85
|
+
self.session_events: List[AuditEvent] = []
|
|
86
|
+
self.session_start_time = datetime.now()
|
|
87
|
+
|
|
88
|
+
# Set up file logging
|
|
89
|
+
self._setup_file_logging()
|
|
90
|
+
|
|
91
|
+
logger.info(f"Audit trail initialized: {self.audit_file_path}")
|
|
92
|
+
|
|
93
|
+
def _setup_file_logging(self):
|
|
94
|
+
"""Set up file-based audit logging."""
|
|
95
|
+
try:
|
|
96
|
+
# Create audit file if it doesn't exist
|
|
97
|
+
if not self.audit_file_path.exists():
|
|
98
|
+
self.audit_file_path.touch()
|
|
99
|
+
|
|
100
|
+
# Check file size and rotate if necessary
|
|
101
|
+
self._rotate_audit_file_if_needed()
|
|
102
|
+
|
|
103
|
+
# Clean up old audit files
|
|
104
|
+
self._cleanup_old_audit_files()
|
|
105
|
+
|
|
106
|
+
except Exception as e:
|
|
107
|
+
logger.warning(f"Failed to set up audit file logging: {e}")
|
|
108
|
+
|
|
109
|
+
def log_event(self, event_type: AuditEventType, user_id: str = "unknown",
|
|
110
|
+
account_id: str = "unknown", region: str = "unknown",
|
|
111
|
+
service_name: str = "", operation: str = "",
|
|
112
|
+
status: str = "SUCCESS", message: str = "",
|
|
113
|
+
details: Optional[Dict[str, Any]] = None,
|
|
114
|
+
duration_ms: Optional[int] = None,
|
|
115
|
+
error_id: Optional[str] = None) -> str:
|
|
116
|
+
"""Log an audit event.
|
|
117
|
+
|
|
118
|
+
Args:
|
|
119
|
+
event_type: Type of event
|
|
120
|
+
user_id: User identifier
|
|
121
|
+
account_id: AWS account ID
|
|
122
|
+
region: AWS region
|
|
123
|
+
service_name: AWS service name
|
|
124
|
+
operation: Operation being performed
|
|
125
|
+
status: Operation status (SUCCESS, FAILURE, WARNING)
|
|
126
|
+
message: Human-readable message
|
|
127
|
+
details: Additional event details
|
|
128
|
+
duration_ms: Operation duration in milliseconds
|
|
129
|
+
error_id: Associated error ID if applicable
|
|
130
|
+
|
|
131
|
+
Returns:
|
|
132
|
+
Event ID for the logged event
|
|
133
|
+
"""
|
|
134
|
+
# Generate unique event ID
|
|
135
|
+
event_id = f"AE_{int(datetime.now().timestamp() * 1000)}_{len(self.session_events):04d}"
|
|
136
|
+
|
|
137
|
+
# Create audit event
|
|
138
|
+
event = AuditEvent(
|
|
139
|
+
event_id=event_id,
|
|
140
|
+
timestamp=datetime.now(),
|
|
141
|
+
event_type=event_type,
|
|
142
|
+
user_id=user_id,
|
|
143
|
+
account_id=account_id,
|
|
144
|
+
region=region,
|
|
145
|
+
service_name=service_name,
|
|
146
|
+
operation=operation,
|
|
147
|
+
status=status,
|
|
148
|
+
message=message,
|
|
149
|
+
details=details or {},
|
|
150
|
+
duration_ms=duration_ms,
|
|
151
|
+
error_id=error_id
|
|
152
|
+
)
|
|
153
|
+
|
|
154
|
+
# Add to session events
|
|
155
|
+
self.session_events.append(event)
|
|
156
|
+
|
|
157
|
+
# Write to file
|
|
158
|
+
self._write_event_to_file(event)
|
|
159
|
+
|
|
160
|
+
# Log to console if enabled
|
|
161
|
+
if self.enable_console_logging:
|
|
162
|
+
self._log_event_to_console(event)
|
|
163
|
+
|
|
164
|
+
return event_id
|
|
165
|
+
|
|
166
|
+
def log_assessment_start(self, account_id: str, regions: List[str],
|
|
167
|
+
implementation_groups: List[str]) -> str:
|
|
168
|
+
"""Log assessment start event.
|
|
169
|
+
|
|
170
|
+
Args:
|
|
171
|
+
account_id: AWS account ID
|
|
172
|
+
regions: List of regions being assessed
|
|
173
|
+
implementation_groups: List of IGs being assessed
|
|
174
|
+
|
|
175
|
+
Returns:
|
|
176
|
+
Event ID
|
|
177
|
+
"""
|
|
178
|
+
return self.log_event(
|
|
179
|
+
event_type=AuditEventType.ASSESSMENT_START,
|
|
180
|
+
account_id=account_id,
|
|
181
|
+
message=f"Assessment started for account {account_id}",
|
|
182
|
+
details={
|
|
183
|
+
"regions": regions,
|
|
184
|
+
"implementation_groups": implementation_groups,
|
|
185
|
+
"session_start": self.session_start_time.isoformat()
|
|
186
|
+
}
|
|
187
|
+
)
|
|
188
|
+
|
|
189
|
+
def log_assessment_complete(self, account_id: str, overall_score: float,
|
|
190
|
+
total_resources: int, duration: timedelta) -> str:
|
|
191
|
+
"""Log assessment completion event.
|
|
192
|
+
|
|
193
|
+
Args:
|
|
194
|
+
account_id: AWS account ID
|
|
195
|
+
overall_score: Overall compliance score
|
|
196
|
+
total_resources: Total resources evaluated
|
|
197
|
+
duration: Assessment duration
|
|
198
|
+
|
|
199
|
+
Returns:
|
|
200
|
+
Event ID
|
|
201
|
+
"""
|
|
202
|
+
return self.log_event(
|
|
203
|
+
event_type=AuditEventType.ASSESSMENT_COMPLETE,
|
|
204
|
+
account_id=account_id,
|
|
205
|
+
message=f"Assessment completed for account {account_id}",
|
|
206
|
+
details={
|
|
207
|
+
"overall_score": overall_score,
|
|
208
|
+
"total_resources": total_resources,
|
|
209
|
+
"session_events": len(self.session_events)
|
|
210
|
+
},
|
|
211
|
+
duration_ms=int(duration.total_seconds() * 1000)
|
|
212
|
+
)
|
|
213
|
+
|
|
214
|
+
def log_control_evaluation(self, control_id: str, config_rule_name: str,
|
|
215
|
+
region: str, resource_count: int,
|
|
216
|
+
compliant_count: int, duration_ms: int) -> str:
|
|
217
|
+
"""Log control evaluation event.
|
|
218
|
+
|
|
219
|
+
Args:
|
|
220
|
+
control_id: CIS Control ID
|
|
221
|
+
config_rule_name: AWS Config rule name
|
|
222
|
+
region: AWS region
|
|
223
|
+
resource_count: Total resources evaluated
|
|
224
|
+
compliant_count: Number of compliant resources
|
|
225
|
+
duration_ms: Evaluation duration in milliseconds
|
|
226
|
+
|
|
227
|
+
Returns:
|
|
228
|
+
Event ID
|
|
229
|
+
"""
|
|
230
|
+
compliance_percentage = (compliant_count / resource_count * 100) if resource_count > 0 else 0
|
|
231
|
+
|
|
232
|
+
return self.log_event(
|
|
233
|
+
event_type=AuditEventType.CONTROL_EVALUATION,
|
|
234
|
+
region=region,
|
|
235
|
+
operation=config_rule_name,
|
|
236
|
+
message=f"Evaluated control {control_id} in {region}",
|
|
237
|
+
details={
|
|
238
|
+
"control_id": control_id,
|
|
239
|
+
"config_rule_name": config_rule_name,
|
|
240
|
+
"resource_count": resource_count,
|
|
241
|
+
"compliant_count": compliant_count,
|
|
242
|
+
"compliance_percentage": compliance_percentage
|
|
243
|
+
},
|
|
244
|
+
duration_ms=duration_ms
|
|
245
|
+
)
|
|
246
|
+
|
|
247
|
+
def log_service_access(self, service_name: str, region: str,
|
|
248
|
+
operation: str, status: str, message: str = "") -> str:
|
|
249
|
+
"""Log service access event.
|
|
250
|
+
|
|
251
|
+
Args:
|
|
252
|
+
service_name: AWS service name
|
|
253
|
+
region: AWS region
|
|
254
|
+
operation: Operation attempted
|
|
255
|
+
status: Access status (SUCCESS, FAILURE)
|
|
256
|
+
message: Additional message
|
|
257
|
+
|
|
258
|
+
Returns:
|
|
259
|
+
Event ID
|
|
260
|
+
"""
|
|
261
|
+
return self.log_event(
|
|
262
|
+
event_type=AuditEventType.SERVICE_ACCESS,
|
|
263
|
+
region=region,
|
|
264
|
+
service_name=service_name,
|
|
265
|
+
operation=operation,
|
|
266
|
+
status=status,
|
|
267
|
+
message=message or f"Service access: {service_name} in {region}"
|
|
268
|
+
)
|
|
269
|
+
|
|
270
|
+
def log_error_recovery(self, error_id: str, recovery_strategy: str,
|
|
271
|
+
success: bool, details: Dict[str, Any]) -> str:
|
|
272
|
+
"""Log error recovery attempt.
|
|
273
|
+
|
|
274
|
+
Args:
|
|
275
|
+
error_id: Original error ID
|
|
276
|
+
recovery_strategy: Recovery strategy used
|
|
277
|
+
success: Whether recovery was successful
|
|
278
|
+
details: Recovery details
|
|
279
|
+
|
|
280
|
+
Returns:
|
|
281
|
+
Event ID
|
|
282
|
+
"""
|
|
283
|
+
return self.log_event(
|
|
284
|
+
event_type=AuditEventType.ERROR_RECOVERY,
|
|
285
|
+
status="SUCCESS" if success else "FAILURE",
|
|
286
|
+
message=f"Error recovery {'succeeded' if success else 'failed'}: {recovery_strategy}",
|
|
287
|
+
details=details,
|
|
288
|
+
error_id=error_id
|
|
289
|
+
)
|
|
290
|
+
|
|
291
|
+
def _write_event_to_file(self, event: AuditEvent):
|
|
292
|
+
"""Write audit event to file.
|
|
293
|
+
|
|
294
|
+
Args:
|
|
295
|
+
event: Audit event to write
|
|
296
|
+
"""
|
|
297
|
+
try:
|
|
298
|
+
with open(self.audit_file_path, 'a', encoding='utf-8') as f:
|
|
299
|
+
json.dump(event.to_dict(), f, ensure_ascii=False)
|
|
300
|
+
f.write('\n')
|
|
301
|
+
except Exception as e:
|
|
302
|
+
logger.warning(f"Failed to write audit event to file: {e}")
|
|
303
|
+
|
|
304
|
+
def _log_event_to_console(self, event: AuditEvent):
|
|
305
|
+
"""Log audit event to console.
|
|
306
|
+
|
|
307
|
+
Args:
|
|
308
|
+
event: Audit event to log
|
|
309
|
+
"""
|
|
310
|
+
log_message = f"[AUDIT] {event.event_type.value}: {event.message}"
|
|
311
|
+
|
|
312
|
+
if event.service_name:
|
|
313
|
+
log_message += f" (Service: {event.service_name}"
|
|
314
|
+
if event.region:
|
|
315
|
+
log_message += f", Region: {event.region}"
|
|
316
|
+
log_message += ")"
|
|
317
|
+
|
|
318
|
+
if event.status == "FAILURE":
|
|
319
|
+
logger.error(log_message)
|
|
320
|
+
elif event.status == "WARNING":
|
|
321
|
+
logger.warning(log_message)
|
|
322
|
+
else:
|
|
323
|
+
logger.info(log_message)
|
|
324
|
+
|
|
325
|
+
def _rotate_audit_file_if_needed(self):
|
|
326
|
+
"""Rotate audit file if it exceeds maximum size."""
|
|
327
|
+
try:
|
|
328
|
+
if not self.audit_file_path.exists():
|
|
329
|
+
return
|
|
330
|
+
|
|
331
|
+
file_size_mb = self.audit_file_path.stat().st_size / (1024 * 1024)
|
|
332
|
+
|
|
333
|
+
if file_size_mb > self.max_file_size_mb:
|
|
334
|
+
# Create rotated filename with timestamp
|
|
335
|
+
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
|
336
|
+
rotated_path = self.audit_file_path.with_suffix(f".{timestamp}.jsonl")
|
|
337
|
+
|
|
338
|
+
# Move current file to rotated name
|
|
339
|
+
self.audit_file_path.rename(rotated_path)
|
|
340
|
+
|
|
341
|
+
logger.info(f"Rotated audit file: {rotated_path}")
|
|
342
|
+
|
|
343
|
+
except Exception as e:
|
|
344
|
+
logger.warning(f"Failed to rotate audit file: {e}")
|
|
345
|
+
|
|
346
|
+
def _cleanup_old_audit_files(self):
|
|
347
|
+
"""Clean up audit files older than retention period."""
|
|
348
|
+
try:
|
|
349
|
+
cutoff_date = datetime.now() - timedelta(days=self.retention_days)
|
|
350
|
+
audit_dir = self.audit_file_path.parent
|
|
351
|
+
|
|
352
|
+
for file_path in audit_dir.glob("*.jsonl"):
|
|
353
|
+
if file_path == self.audit_file_path:
|
|
354
|
+
continue # Skip current audit file
|
|
355
|
+
|
|
356
|
+
try:
|
|
357
|
+
file_mtime = datetime.fromtimestamp(file_path.stat().st_mtime)
|
|
358
|
+
if file_mtime < cutoff_date:
|
|
359
|
+
file_path.unlink()
|
|
360
|
+
logger.info(f"Deleted old audit file: {file_path}")
|
|
361
|
+
except Exception as e:
|
|
362
|
+
logger.warning(f"Failed to delete old audit file {file_path}: {e}")
|
|
363
|
+
|
|
364
|
+
except Exception as e:
|
|
365
|
+
logger.warning(f"Failed to cleanup old audit files: {e}")
|
|
366
|
+
|
|
367
|
+
def get_session_summary(self) -> Dict[str, Any]:
|
|
368
|
+
"""Get summary of current session events.
|
|
369
|
+
|
|
370
|
+
Returns:
|
|
371
|
+
Dictionary with session summary
|
|
372
|
+
"""
|
|
373
|
+
if not self.session_events:
|
|
374
|
+
return {"total_events": 0}
|
|
375
|
+
|
|
376
|
+
# Count events by type and status
|
|
377
|
+
event_type_counts = {}
|
|
378
|
+
status_counts = {}
|
|
379
|
+
|
|
380
|
+
for event in self.session_events:
|
|
381
|
+
event_type_counts[event.event_type.value] = event_type_counts.get(event.event_type.value, 0) + 1
|
|
382
|
+
status_counts[event.status] = status_counts.get(event.status, 0) + 1
|
|
383
|
+
|
|
384
|
+
# Calculate session duration
|
|
385
|
+
session_duration = datetime.now() - self.session_start_time
|
|
386
|
+
|
|
387
|
+
return {
|
|
388
|
+
"total_events": len(self.session_events),
|
|
389
|
+
"session_duration_seconds": int(session_duration.total_seconds()),
|
|
390
|
+
"events_by_type": event_type_counts,
|
|
391
|
+
"events_by_status": status_counts,
|
|
392
|
+
"first_event": self.session_events[0].timestamp.isoformat(),
|
|
393
|
+
"last_event": self.session_events[-1].timestamp.isoformat()
|
|
394
|
+
}
|
|
395
|
+
|
|
396
|
+
def query_events(self, event_type: Optional[AuditEventType] = None,
|
|
397
|
+
status: Optional[str] = None,
|
|
398
|
+
service_name: Optional[str] = None,
|
|
399
|
+
start_time: Optional[datetime] = None,
|
|
400
|
+
end_time: Optional[datetime] = None,
|
|
401
|
+
limit: int = 100) -> List[AuditEvent]:
|
|
402
|
+
"""Query audit events with filters.
|
|
403
|
+
|
|
404
|
+
Args:
|
|
405
|
+
event_type: Filter by event type
|
|
406
|
+
status: Filter by status
|
|
407
|
+
service_name: Filter by service name
|
|
408
|
+
start_time: Filter events after this time
|
|
409
|
+
end_time: Filter events before this time
|
|
410
|
+
limit: Maximum number of events to return
|
|
411
|
+
|
|
412
|
+
Returns:
|
|
413
|
+
List of matching audit events
|
|
414
|
+
"""
|
|
415
|
+
filtered_events = []
|
|
416
|
+
|
|
417
|
+
for event in reversed(self.session_events): # Most recent first
|
|
418
|
+
# Apply filters
|
|
419
|
+
if event_type and event.event_type != event_type:
|
|
420
|
+
continue
|
|
421
|
+
if status and event.status != status:
|
|
422
|
+
continue
|
|
423
|
+
if service_name and event.service_name != service_name:
|
|
424
|
+
continue
|
|
425
|
+
if start_time and event.timestamp < start_time:
|
|
426
|
+
continue
|
|
427
|
+
if end_time and event.timestamp > end_time:
|
|
428
|
+
continue
|
|
429
|
+
|
|
430
|
+
filtered_events.append(event)
|
|
431
|
+
|
|
432
|
+
if len(filtered_events) >= limit:
|
|
433
|
+
break
|
|
434
|
+
|
|
435
|
+
return filtered_events
|
|
436
|
+
|
|
437
|
+
def export_session_events(self, output_path: str, format: str = "json") -> bool:
|
|
438
|
+
"""Export session events to file.
|
|
439
|
+
|
|
440
|
+
Args:
|
|
441
|
+
output_path: Output file path
|
|
442
|
+
format: Export format ("json" or "csv")
|
|
443
|
+
|
|
444
|
+
Returns:
|
|
445
|
+
True if export successful, False otherwise
|
|
446
|
+
"""
|
|
447
|
+
try:
|
|
448
|
+
output_file = Path(output_path)
|
|
449
|
+
output_file.parent.mkdir(parents=True, exist_ok=True)
|
|
450
|
+
|
|
451
|
+
if format.lower() == "json":
|
|
452
|
+
with open(output_file, 'w', encoding='utf-8') as f:
|
|
453
|
+
events_data = [event.to_dict() for event in self.session_events]
|
|
454
|
+
json.dump(events_data, f, indent=2, ensure_ascii=False)
|
|
455
|
+
|
|
456
|
+
elif format.lower() == "csv":
|
|
457
|
+
import csv
|
|
458
|
+
|
|
459
|
+
with open(output_file, 'w', newline='', encoding='utf-8') as f:
|
|
460
|
+
if not self.session_events:
|
|
461
|
+
return True
|
|
462
|
+
|
|
463
|
+
fieldnames = ['event_id', 'timestamp', 'event_type', 'user_id',
|
|
464
|
+
'account_id', 'region', 'service_name', 'operation',
|
|
465
|
+
'status', 'message', 'duration_ms', 'error_id']
|
|
466
|
+
|
|
467
|
+
writer = csv.DictWriter(f, fieldnames=fieldnames)
|
|
468
|
+
writer.writeheader()
|
|
469
|
+
|
|
470
|
+
for event in self.session_events:
|
|
471
|
+
row = event.to_dict()
|
|
472
|
+
# Remove complex details for CSV
|
|
473
|
+
row.pop('details', None)
|
|
474
|
+
writer.writerow(row)
|
|
475
|
+
|
|
476
|
+
else:
|
|
477
|
+
logger.error(f"Unsupported export format: {format}")
|
|
478
|
+
return False
|
|
479
|
+
|
|
480
|
+
logger.info(f"Exported {len(self.session_events)} events to {output_file}")
|
|
481
|
+
return True
|
|
482
|
+
|
|
483
|
+
except Exception as e:
|
|
484
|
+
logger.error(f"Failed to export session events: {e}")
|
|
485
|
+
return False
|
|
486
|
+
|
|
487
|
+
def clear_session_events(self):
|
|
488
|
+
"""Clear current session events from memory."""
|
|
489
|
+
self.session_events.clear()
|
|
490
|
+
self.session_start_time = datetime.now()
|
|
491
|
+
logger.info("Session events cleared")
|