exploitgraph 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- core/__init__.py +0 -0
- core/attack_graph.py +83 -0
- core/aws_client.py +284 -0
- core/config.py +83 -0
- core/console.py +469 -0
- core/context_engine.py +172 -0
- core/correlator.py +476 -0
- core/http_client.py +243 -0
- core/logger.py +97 -0
- core/module_loader.py +69 -0
- core/risk_engine.py +47 -0
- core/session_manager.py +254 -0
- exploitgraph-1.0.0.dist-info/METADATA +429 -0
- exploitgraph-1.0.0.dist-info/RECORD +42 -0
- exploitgraph-1.0.0.dist-info/WHEEL +5 -0
- exploitgraph-1.0.0.dist-info/entry_points.txt +2 -0
- exploitgraph-1.0.0.dist-info/licenses/LICENSE +21 -0
- exploitgraph-1.0.0.dist-info/top_level.txt +2 -0
- modules/__init__.py +0 -0
- modules/base.py +82 -0
- modules/cloud/__init__.py +0 -0
- modules/cloud/aws_credential_validator.py +340 -0
- modules/cloud/azure_enum.py +289 -0
- modules/cloud/cloudtrail_analyzer.py +494 -0
- modules/cloud/gcp_enum.py +272 -0
- modules/cloud/iam_enum.py +321 -0
- modules/cloud/iam_privilege_escalation.py +515 -0
- modules/cloud/metadata_check.py +315 -0
- modules/cloud/s3_enum.py +469 -0
- modules/discovery/__init__.py +0 -0
- modules/discovery/http_enum.py +235 -0
- modules/discovery/subdomain_enum.py +260 -0
- modules/exploitation/__init__.py +0 -0
- modules/exploitation/api_exploit.py +403 -0
- modules/exploitation/jwt_attack.py +346 -0
- modules/exploitation/ssrf_scanner.py +258 -0
- modules/reporting/__init__.py +0 -0
- modules/reporting/html_report.py +446 -0
- modules/reporting/json_export.py +107 -0
- modules/secrets/__init__.py +0 -0
- modules/secrets/file_secrets.py +358 -0
- modules/secrets/git_secrets.py +267 -0
|
@@ -0,0 +1,494 @@
|
|
|
1
|
+
"""
|
|
2
|
+
ExploitGraph Module: CloudTrail Log Analyzer
|
|
3
|
+
Category: cloud
|
|
4
|
+
|
|
5
|
+
Automatically decompresses and parses AWS CloudTrail logs found in S3 buckets.
|
|
6
|
+
This is the critical next step after s3_enum downloads .json.gz log files.
|
|
7
|
+
|
|
8
|
+
Real attack scenario (flaws2.cloud):
|
|
9
|
+
s3_enum finds s3://flaws2-logs → downloads CloudTrail .json.gz files
|
|
10
|
+
→ this module parses them → extracts IAM access keys, usernames, API calls
|
|
11
|
+
→ those credentials feed aws_credential_validator → full compromise
|
|
12
|
+
|
|
13
|
+
What CloudTrail logs reveal:
|
|
14
|
+
- Which IAM users/roles made API calls
|
|
15
|
+
- Access key IDs used (AKIA...)
|
|
16
|
+
- IP addresses of callers
|
|
17
|
+
- Every AWS API call made (CreateUser, PutRolePolicy, etc.)
|
|
18
|
+
- Error codes revealing permission attempts
|
|
19
|
+
|
|
20
|
+
MITRE: T1530, T1078.004, T1552.005
|
|
21
|
+
"""
|
|
22
|
+
from __future__ import annotations
|
|
23
|
+
import gzip
|
|
24
|
+
import json
|
|
25
|
+
import re
|
|
26
|
+
import io
|
|
27
|
+
from collections import defaultdict
|
|
28
|
+
from typing import TYPE_CHECKING
|
|
29
|
+
|
|
30
|
+
import requests
|
|
31
|
+
from requests.packages.urllib3.exceptions import InsecureRequestWarning
|
|
32
|
+
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
|
|
33
|
+
|
|
34
|
+
from modules.base import BaseModule, ModuleResult
|
|
35
|
+
|
|
36
|
+
if TYPE_CHECKING:
|
|
37
|
+
from core.session_manager import Session
|
|
38
|
+
|
|
39
|
+
# API calls that indicate privilege escalation attempts
|
|
40
|
+
PRIVESC_EVENTS = {
|
|
41
|
+
"CreateUser", "CreateAccessKey", "AttachUserPolicy", "AttachRolePolicy",
|
|
42
|
+
"PutUserPolicy", "PutRolePolicy", "CreatePolicy", "CreatePolicyVersion",
|
|
43
|
+
"SetDefaultPolicyVersion", "PassRole", "AssumeRole", "AssumeRoleWithWebIdentity",
|
|
44
|
+
"UpdateAssumeRolePolicy", "AddUserToGroup", "CreateGroup", "AttachGroupPolicy",
|
|
45
|
+
"PutGroupPolicy", "CreateLoginProfile", "UpdateLoginProfile",
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
# API calls that indicate data exfiltration
|
|
49
|
+
EXFIL_EVENTS = {
|
|
50
|
+
"GetObject", "ListObjects", "ListBuckets", "GetBucketAcl",
|
|
51
|
+
"GetBucketPolicy", "ListObjectsV2", "GetSecretValue",
|
|
52
|
+
"GetParameter", "GetParameters", "DescribeSecret",
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
# Reconnaissance events
|
|
56
|
+
RECON_EVENTS = {
|
|
57
|
+
"DescribeInstances", "ListUsers", "ListRoles", "ListPolicies",
|
|
58
|
+
"GetAccountSummary", "ListBuckets", "DescribeSecurityGroups",
|
|
59
|
+
"GetCallerIdentity", "ListAttachedUserPolicies", "ListGroupsForUser",
|
|
60
|
+
"GetUserPolicy", "ListAccessKeys", "GetAccountAuthorizationDetails",
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
# Events that GuardDuty would flag
|
|
64
|
+
NOISY_EVENTS = {
|
|
65
|
+
"GetCallerIdentity", # Reconnaissance — always flagged
|
|
66
|
+
"ListUsers", # IAM enumeration
|
|
67
|
+
"ListRoles",
|
|
68
|
+
"GetAccountAuthorizationDetails", # Very noisy — full IAM dump
|
|
69
|
+
"CreateAccessKey", # Credential creation
|
|
70
|
+
"DeleteTrail", # Covering tracks
|
|
71
|
+
"StopLogging", # Covering tracks
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
class CloudTrailAnalyzer(BaseModule):
|
|
76
|
+
|
|
77
|
+
NAME = "cloudtrail_analyzer"
|
|
78
|
+
DESCRIPTION = "Parse AWS CloudTrail logs from downloaded .json.gz files — extract credentials, IAM activity, and attack indicators"
|
|
79
|
+
AUTHOR = "ExploitGraph Team"
|
|
80
|
+
VERSION = "1.1.0"
|
|
81
|
+
CATEGORY = "cloud"
|
|
82
|
+
SEVERITY = "CRITICAL"
|
|
83
|
+
MITRE = ["T1530", "T1078.004", "T1552.005"]
|
|
84
|
+
AWS_PARALLEL = "aws cloudtrail lookup-events --lookup-attributes AttributeKey=Username,AttributeValue=<user>"
|
|
85
|
+
|
|
86
|
+
OPTIONS = {
|
|
87
|
+
"TARGET": {"default": "", "required": False, "description": "Target URL (optional, for fetching additional logs)"},
|
|
88
|
+
"MAX_EVENTS": {"default": "1000", "required": False, "description": "Maximum events to parse per log file"},
|
|
89
|
+
"DOWNLOAD_DIR": {"default": "downloads", "required": False, "description": "Directory for saving decompressed logs"},
|
|
90
|
+
"DETECT_PRIVESC": {"default": "true", "required": False, "description": "Flag privilege escalation API calls"},
|
|
91
|
+
"DETECT_EXFIL": {"default": "true", "required": False, "description": "Flag data exfiltration API calls"},
|
|
92
|
+
"GUARDDUTY_MODE": {"default": "true", "required": False, "description": "Flag events that would trigger GuardDuty"},
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
def run(self, session: "Session") -> ModuleResult:
|
|
96
|
+
from core.logger import log
|
|
97
|
+
import os
|
|
98
|
+
|
|
99
|
+
self._timer_start()
|
|
100
|
+
log.section("CloudTrail Log Analyzer")
|
|
101
|
+
log.info("MITRE: T1530 — Data from Cloud Storage / T1078.004 — Valid Accounts")
|
|
102
|
+
log.info("Parsing downloaded CloudTrail .json.gz files from s3_enum")
|
|
103
|
+
|
|
104
|
+
# Find all CloudTrail log files from session
|
|
105
|
+
log_files = self._find_cloudtrail_files(session)
|
|
106
|
+
|
|
107
|
+
if not log_files:
|
|
108
|
+
log.warning("No CloudTrail log files found in session.")
|
|
109
|
+
log.step("Run cloud/s3_enum first to download logs from exposed buckets.")
|
|
110
|
+
return ModuleResult(True, {"events_parsed": 0,
|
|
111
|
+
"skipped_reason": "No CloudTrail files in session"})
|
|
112
|
+
|
|
113
|
+
log.info(f"Found {len(log_files)} CloudTrail log files to analyze")
|
|
114
|
+
|
|
115
|
+
# Aggregated findings across all log files
|
|
116
|
+
all_events = []
|
|
117
|
+
credentials_found = []
|
|
118
|
+
principals = defaultdict(list) # ARN → [events]
|
|
119
|
+
ips = defaultdict(int) # IP → count
|
|
120
|
+
privesc_events = []
|
|
121
|
+
exfil_events = []
|
|
122
|
+
noisy_events = []
|
|
123
|
+
error_events = []
|
|
124
|
+
|
|
125
|
+
for file_info in log_files:
|
|
126
|
+
log.step(f"Parsing: {file_info['path']}")
|
|
127
|
+
events = self._parse_log_file(file_info, session)
|
|
128
|
+
if events:
|
|
129
|
+
log.success(f" Parsed {len(events)} events")
|
|
130
|
+
all_events.extend(events)
|
|
131
|
+
|
|
132
|
+
# Analyze events
|
|
133
|
+
for event in events:
|
|
134
|
+
self._analyze_event(event, credentials_found, principals,
|
|
135
|
+
ips, privesc_events, exfil_events,
|
|
136
|
+
noisy_events, error_events)
|
|
137
|
+
|
|
138
|
+
# Generate findings from analysis
|
|
139
|
+
self._generate_findings(
|
|
140
|
+
all_events, credentials_found, principals, ips,
|
|
141
|
+
privesc_events, exfil_events, noisy_events, error_events, session
|
|
142
|
+
)
|
|
143
|
+
|
|
144
|
+
# Add to attack graph
|
|
145
|
+
if all_events or credentials_found:
|
|
146
|
+
session.add_graph_node(
|
|
147
|
+
"cloudtrail", f"CloudTrail Logs\n{len(all_events)} events",
|
|
148
|
+
"asset", "HIGH",
|
|
149
|
+
f"{len(credentials_found)} credentials, {len(privesc_events)} priv-esc events"
|
|
150
|
+
)
|
|
151
|
+
session.add_graph_edge("s3_exposure", "cloudtrail",
|
|
152
|
+
"log analysis", "T1530")
|
|
153
|
+
|
|
154
|
+
if credentials_found:
|
|
155
|
+
session.add_graph_node(
|
|
156
|
+
"cloudtrail_creds",
|
|
157
|
+
f"IAM Creds in Logs\n{len(credentials_found)} found",
|
|
158
|
+
"credential", "CRITICAL"
|
|
159
|
+
)
|
|
160
|
+
session.add_graph_edge("cloudtrail", "cloudtrail_creds",
|
|
161
|
+
"extract credentials", "T1078.004")
|
|
162
|
+
|
|
163
|
+
# Print summary
|
|
164
|
+
elapsed = self._timer_stop()
|
|
165
|
+
log.newline()
|
|
166
|
+
log.banner(f"CLOUDTRAIL ANALYSIS COMPLETE")
|
|
167
|
+
log.kv("Log files analyzed", str(len(log_files)))
|
|
168
|
+
log.kv("Total events", str(len(all_events)))
|
|
169
|
+
log.kv("Unique principals", str(len(principals)))
|
|
170
|
+
log.kv("Credentials found", str(len(credentials_found)))
|
|
171
|
+
log.kv("Priv-esc events", str(len(privesc_events)))
|
|
172
|
+
log.kv("Exfiltration events", str(len(exfil_events)))
|
|
173
|
+
log.kv("GuardDuty-noisy", str(len(noisy_events)))
|
|
174
|
+
|
|
175
|
+
if credentials_found:
|
|
176
|
+
log.newline()
|
|
177
|
+
log.critical("CREDENTIALS EXTRACTED FROM CLOUDTRAIL LOGS:")
|
|
178
|
+
for cred in credentials_found:
|
|
179
|
+
log.secret(cred["type"], cred["value"])
|
|
180
|
+
log.step(f"Used by: {cred.get('principal', 'unknown')}")
|
|
181
|
+
log.step(f"Source IP: {cred.get('ip', 'unknown')}")
|
|
182
|
+
|
|
183
|
+
return ModuleResult(True, {
|
|
184
|
+
"log_files": len(log_files),
|
|
185
|
+
"events_parsed": len(all_events),
|
|
186
|
+
"credentials": len(credentials_found),
|
|
187
|
+
"principals": len(principals),
|
|
188
|
+
"privesc_events": len(privesc_events),
|
|
189
|
+
"exfil_events": len(exfil_events),
|
|
190
|
+
})
|
|
191
|
+
|
|
192
|
+
# ── File discovery ─────────────────────────────────────────────────────
|
|
193
|
+
|
|
194
|
+
def _find_cloudtrail_files(self, session: "Session") -> list[dict]:
|
|
195
|
+
"""Find CloudTrail log files from session's downloaded content."""
|
|
196
|
+
files = []
|
|
197
|
+
for ef in session.exposed_files:
|
|
198
|
+
path = ef.get("path", "") or ef.get("url", "")
|
|
199
|
+
# Match CloudTrail patterns: AWSLogs/*/CloudTrail/*.json.gz
|
|
200
|
+
if (("CloudTrail" in path or "AWSLogs" in path) and
|
|
201
|
+
(path.endswith(".json.gz") or path.endswith(".json"))):
|
|
202
|
+
files.append(ef)
|
|
203
|
+
# Also match any .json.gz we downloaded
|
|
204
|
+
elif path.endswith(".json.gz"):
|
|
205
|
+
files.append(ef)
|
|
206
|
+
return files
|
|
207
|
+
|
|
208
|
+
# ── Log parsing ────────────────────────────────────────────────────────
|
|
209
|
+
|
|
210
|
+
def _parse_log_file(self, file_info: dict, session: "Session") -> list[dict]:
|
|
211
|
+
"""
|
|
212
|
+
Decompress and parse a CloudTrail log file.
|
|
213
|
+
Handles: in-memory content (bytes), URLs for re-fetching.
|
|
214
|
+
"""
|
|
215
|
+
from core.logger import log
|
|
216
|
+
max_events = int(self.get_option("MAX_EVENTS", "1000"))
|
|
217
|
+
events = []
|
|
218
|
+
|
|
219
|
+
# Try to get raw bytes content
|
|
220
|
+
raw_bytes = file_info.get("raw_bytes")
|
|
221
|
+
content = file_info.get("content")
|
|
222
|
+
url = file_info.get("url", "")
|
|
223
|
+
|
|
224
|
+
# If we only have the URL, re-fetch it
|
|
225
|
+
if not raw_bytes and not content and url.startswith("http"):
|
|
226
|
+
try:
|
|
227
|
+
r = requests.get(url, timeout=15, verify=False)
|
|
228
|
+
if r.status_code == 200:
|
|
229
|
+
raw_bytes = r.content
|
|
230
|
+
log.step(f" Re-fetched: {len(raw_bytes)} bytes")
|
|
231
|
+
except Exception as e:
|
|
232
|
+
log.warning(f" Could not fetch: {url}: {e}")
|
|
233
|
+
return []
|
|
234
|
+
|
|
235
|
+
# Decompress if gzipped
|
|
236
|
+
if raw_bytes:
|
|
237
|
+
try:
|
|
238
|
+
if raw_bytes[:2] == b'\x1f\x8b': # gzip magic bytes
|
|
239
|
+
with gzip.open(io.BytesIO(raw_bytes)) as gz:
|
|
240
|
+
content = gz.read().decode("utf-8", errors="ignore")
|
|
241
|
+
log.step(f" Decompressed: {len(content)} chars")
|
|
242
|
+
else:
|
|
243
|
+
content = raw_bytes.decode("utf-8", errors="ignore")
|
|
244
|
+
except Exception as e:
|
|
245
|
+
log.warning(f" Decompression failed: {e}")
|
|
246
|
+
return []
|
|
247
|
+
|
|
248
|
+
if not content:
|
|
249
|
+
return []
|
|
250
|
+
|
|
251
|
+
# Parse JSON
|
|
252
|
+
try:
|
|
253
|
+
data = json.loads(content)
|
|
254
|
+
records = data.get("Records", [])
|
|
255
|
+
log.step(f" Records: {len(records)}")
|
|
256
|
+
|
|
257
|
+
for record in records[:max_events]:
|
|
258
|
+
events.append(self._normalize_event(record))
|
|
259
|
+
|
|
260
|
+
except json.JSONDecodeError as e:
|
|
261
|
+
log.warning(f" JSON parse error: {e}")
|
|
262
|
+
# Try to extract partial events with regex
|
|
263
|
+
for match in re.finditer(r'\{[^{}]+\}', content):
|
|
264
|
+
try:
|
|
265
|
+
events.append(self._normalize_event(json.loads(match.group(0))))
|
|
266
|
+
except Exception:
|
|
267
|
+
pass
|
|
268
|
+
|
|
269
|
+
return events
|
|
270
|
+
|
|
271
|
+
def _normalize_event(self, record: dict) -> dict:
|
|
272
|
+
"""Extract the key fields from a CloudTrail record."""
|
|
273
|
+
identity = record.get("userIdentity", {})
|
|
274
|
+
return {
|
|
275
|
+
"event_time": record.get("eventTime", ""),
|
|
276
|
+
"event_name": record.get("eventName", ""),
|
|
277
|
+
"event_source": record.get("eventSource", ""),
|
|
278
|
+
"source_ip": record.get("sourceIPAddress", ""),
|
|
279
|
+
"user_agent": record.get("userAgent", ""),
|
|
280
|
+
"region": record.get("awsRegion", ""),
|
|
281
|
+
"request_params": record.get("requestParameters") or {},
|
|
282
|
+
"response_elements": record.get("responseElements") or {},
|
|
283
|
+
"error_code": record.get("errorCode", ""),
|
|
284
|
+
"error_message": record.get("errorMessage", ""),
|
|
285
|
+
# Identity
|
|
286
|
+
"principal_type": identity.get("type", ""),
|
|
287
|
+
"principal_arn": identity.get("arn", ""),
|
|
288
|
+
"principal_account": identity.get("accountId", ""),
|
|
289
|
+
"access_key_id": identity.get("accessKeyId", ""),
|
|
290
|
+
"username": identity.get("userName", ""),
|
|
291
|
+
# Assumed role session
|
|
292
|
+
"session_issuer_arn": (identity.get("sessionContext", {})
|
|
293
|
+
.get("sessionIssuer", {}).get("arn", "")),
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
# ── Analysis ───────────────────────────────────────────────────────────
|
|
297
|
+
|
|
298
|
+
def _analyze_event(self, event: dict, credentials_found: list,
|
|
299
|
+
principals: dict, ips: dict,
|
|
300
|
+
privesc_events: list, exfil_events: list,
|
|
301
|
+
noisy_events: list, error_events: list) -> None:
|
|
302
|
+
"""Analyse a single normalised event and categorise it."""
|
|
303
|
+
event_name = event["event_name"]
|
|
304
|
+
source_ip = event["source_ip"]
|
|
305
|
+
arn = event["principal_arn"] or event.get("session_issuer_arn", "")
|
|
306
|
+
access_key = event["access_key_id"]
|
|
307
|
+
|
|
308
|
+
# Track principals
|
|
309
|
+
if arn:
|
|
310
|
+
principals[arn].append(event_name)
|
|
311
|
+
|
|
312
|
+
# Track IPs
|
|
313
|
+
if source_ip and source_ip not in ("AWS Internal",):
|
|
314
|
+
ips[source_ip] += 1
|
|
315
|
+
|
|
316
|
+
# Extract access key IDs (AKIA...)
|
|
317
|
+
if access_key and re.match(r'AKIA[A-Z0-9]{16}', access_key):
|
|
318
|
+
if not any(c["value"] == access_key for c in credentials_found):
|
|
319
|
+
credentials_found.append({
|
|
320
|
+
"type": "AWS_ACCESS_KEY",
|
|
321
|
+
"value": access_key,
|
|
322
|
+
"principal": arn or event.get("username", "unknown"),
|
|
323
|
+
"ip": source_ip,
|
|
324
|
+
"source": "CloudTrail log",
|
|
325
|
+
})
|
|
326
|
+
|
|
327
|
+
# Extract access keys from response elements (CreateAccessKey response)
|
|
328
|
+
if event_name == "CreateAccessKey":
|
|
329
|
+
resp = event.get("response_elements", {}) or {}
|
|
330
|
+
new_key = (resp.get("accessKey", {}) or {}).get("accessKeyId", "")
|
|
331
|
+
if new_key and re.match(r'AKIA[A-Z0-9]{16}', new_key):
|
|
332
|
+
credentials_found.append({
|
|
333
|
+
"type": "AWS_ACCESS_KEY",
|
|
334
|
+
"value": new_key,
|
|
335
|
+
"principal": arn,
|
|
336
|
+
"ip": source_ip,
|
|
337
|
+
"source": "CloudTrail CreateAccessKey response",
|
|
338
|
+
"note": "Key was CREATED during the logged session",
|
|
339
|
+
})
|
|
340
|
+
|
|
341
|
+
# Categorise event
|
|
342
|
+
if event_name in PRIVESC_EVENTS:
|
|
343
|
+
privesc_events.append(event)
|
|
344
|
+
if event_name in EXFIL_EVENTS:
|
|
345
|
+
exfil_events.append(event)
|
|
346
|
+
if event_name in NOISY_EVENTS:
|
|
347
|
+
noisy_events.append(event)
|
|
348
|
+
if event.get("error_code"):
|
|
349
|
+
error_events.append(event)
|
|
350
|
+
|
|
351
|
+
# ── Finding generation ──────────────────────────────────────────────────
|
|
352
|
+
|
|
353
|
+
def _generate_findings(self, all_events: list, credentials_found: list,
|
|
354
|
+
principals: dict, ips: dict,
|
|
355
|
+
privesc_events: list, exfil_events: list,
|
|
356
|
+
noisy_events: list, error_events: list,
|
|
357
|
+
session: "Session") -> None:
|
|
358
|
+
from core.logger import log
|
|
359
|
+
|
|
360
|
+
# Finding 1: Credentials found in logs
|
|
361
|
+
for cred in credentials_found:
|
|
362
|
+
session.add_secret(
|
|
363
|
+
secret_type = cred["type"],
|
|
364
|
+
value = cred["value"],
|
|
365
|
+
source = cred["source"],
|
|
366
|
+
severity = "CRITICAL",
|
|
367
|
+
description = f"AWS Access Key ID found in CloudTrail log. Used by: {cred['principal']}",
|
|
368
|
+
aws_parallel = "Leaked IAM access key — validate with: aws sts get-caller-identity",
|
|
369
|
+
)
|
|
370
|
+
session.add_finding(
|
|
371
|
+
module = self.NAME,
|
|
372
|
+
title = f"AWS Access Key Found in CloudTrail Logs",
|
|
373
|
+
severity = "CRITICAL",
|
|
374
|
+
description = (
|
|
375
|
+
f"Access Key ID extracted from CloudTrail audit logs. "
|
|
376
|
+
f"This key was actively used to make AWS API calls. "
|
|
377
|
+
f"Principal: {cred['principal']}"
|
|
378
|
+
),
|
|
379
|
+
evidence = (
|
|
380
|
+
f"Key: {cred['value']}\n"
|
|
381
|
+
f"Principal: {cred['principal']}\n"
|
|
382
|
+
f"Source IP: {cred['ip']}\n"
|
|
383
|
+
f"Found in: {cred['source']}"
|
|
384
|
+
),
|
|
385
|
+
recommendation = (
|
|
386
|
+
"Immediately check if this key is still active:\n"
|
|
387
|
+
f"aws iam list-access-keys --user-name <user>\n"
|
|
388
|
+
"Disable if active:\n"
|
|
389
|
+
f"aws iam update-access-key --access-key-id {cred['value']} --status Inactive"
|
|
390
|
+
),
|
|
391
|
+
cvss_score = 9.8,
|
|
392
|
+
aws_parallel = "Stolen IAM access key from CloudTrail — validate with STS GetCallerIdentity",
|
|
393
|
+
mitre_technique = "T1078.004",
|
|
394
|
+
)
|
|
395
|
+
|
|
396
|
+
# Finding 2: Privilege escalation events
|
|
397
|
+
if privesc_events:
|
|
398
|
+
event_names = list({e["event_name"] for e in privesc_events})
|
|
399
|
+
session.add_finding(
|
|
400
|
+
module = self.NAME,
|
|
401
|
+
title = f"Privilege Escalation Activity in CloudTrail ({len(privesc_events)} events)",
|
|
402
|
+
severity = "CRITICAL",
|
|
403
|
+
description = (
|
|
404
|
+
f"CloudTrail logs contain {len(privesc_events)} events associated with "
|
|
405
|
+
f"privilege escalation: {', '.join(event_names[:5])}"
|
|
406
|
+
),
|
|
407
|
+
evidence = self._format_events(privesc_events[:5]),
|
|
408
|
+
recommendation = (
|
|
409
|
+
"Review all IAM changes. Check for unauthorized policy attachments. "
|
|
410
|
+
"Enable IAM Access Analyzer. Review CloudTrail for the full timeline."
|
|
411
|
+
),
|
|
412
|
+
cvss_score = 9.5,
|
|
413
|
+
aws_parallel = "IAM privilege escalation via PutUserPolicy, AssumeRole chains",
|
|
414
|
+
mitre_technique = "T1078.004",
|
|
415
|
+
)
|
|
416
|
+
|
|
417
|
+
# Finding 3: Data exfiltration events
|
|
418
|
+
if exfil_events:
|
|
419
|
+
s3_gets = [e for e in exfil_events if e["event_name"] == "GetObject"]
|
|
420
|
+
session.add_finding(
|
|
421
|
+
module = self.NAME,
|
|
422
|
+
title = f"Data Access Events in CloudTrail ({len(exfil_events)} events)",
|
|
423
|
+
severity = "HIGH",
|
|
424
|
+
description = (
|
|
425
|
+
f"{len(exfil_events)} data access events found. "
|
|
426
|
+
f"{len(s3_gets)} S3 GetObject calls."
|
|
427
|
+
),
|
|
428
|
+
evidence = self._format_events(exfil_events[:5]),
|
|
429
|
+
recommendation = (
|
|
430
|
+
"Review S3 access logs. Enable S3 server access logging. "
|
|
431
|
+
"Use Macie to detect sensitive data exposure."
|
|
432
|
+
),
|
|
433
|
+
cvss_score = 7.5,
|
|
434
|
+
aws_parallel = "S3 GetObject calls — potential data exfiltration",
|
|
435
|
+
mitre_technique = "T1530",
|
|
436
|
+
)
|
|
437
|
+
|
|
438
|
+
# Finding 4: GuardDuty noisy events
|
|
439
|
+
if noisy_events and self.get_option("GUARDDUTY_MODE", "true").lower() == "true":
|
|
440
|
+
event_names = list({e["event_name"] for e in noisy_events})
|
|
441
|
+
log.warning(f"GuardDuty would flag these API calls: {', '.join(event_names)}")
|
|
442
|
+
session.add_finding(
|
|
443
|
+
module = self.NAME,
|
|
444
|
+
title = "GuardDuty Alert — High-Signal API Calls Detected",
|
|
445
|
+
severity = "HIGH",
|
|
446
|
+
description = (
|
|
447
|
+
f"The following API calls would trigger GuardDuty findings: "
|
|
448
|
+
f"{', '.join(event_names)}. "
|
|
449
|
+
"These indicate reconnaissance or credential abuse."
|
|
450
|
+
),
|
|
451
|
+
evidence = self._format_events(noisy_events[:3]),
|
|
452
|
+
recommendation = "Enable GuardDuty in all regions. Review and respond to findings.",
|
|
453
|
+
cvss_score = 7.0,
|
|
454
|
+
aws_parallel = "GuardDuty: UnauthorizedAccess:IAMUser/TorIPCaller or Recon:IAMUser/UserPermissions",
|
|
455
|
+
mitre_technique = "T1078.004",
|
|
456
|
+
)
|
|
457
|
+
|
|
458
|
+
# Finding 5: Unique external IPs
|
|
459
|
+
external_ips = {ip: cnt for ip, cnt in ips.items()
|
|
460
|
+
if not ip.startswith(("10.", "172.", "192.168.", "AWS"))}
|
|
461
|
+
if external_ips:
|
|
462
|
+
top_ips = sorted(external_ips.items(), key=lambda x: x[1], reverse=True)[:5]
|
|
463
|
+
session.add_finding(
|
|
464
|
+
module = self.NAME,
|
|
465
|
+
title = f"External IP Addresses in CloudTrail ({len(external_ips)} unique)",
|
|
466
|
+
severity = "MEDIUM",
|
|
467
|
+
description = (
|
|
468
|
+
f"CloudTrail shows API calls from {len(external_ips)} external IPs. "
|
|
469
|
+
"Unexpected IPs may indicate compromised credentials."
|
|
470
|
+
),
|
|
471
|
+
evidence = "\n".join(f"{ip}: {cnt} calls" for ip, cnt in top_ips),
|
|
472
|
+
recommendation = "Investigate unexpected source IPs. Use IAM condition keys to restrict by IP.",
|
|
473
|
+
cvss_score = 6.0,
|
|
474
|
+
aws_parallel = "GuardDuty: UnauthorizedAccess:IAMUser/ConsoleLoginSuccess.B",
|
|
475
|
+
mitre_technique = "T1078.004",
|
|
476
|
+
)
|
|
477
|
+
|
|
478
|
+
# Finding 6: Summary of who did what
|
|
479
|
+
if principals:
|
|
480
|
+
top_principals = sorted(principals.items(),
|
|
481
|
+
key=lambda x: len(x[1]), reverse=True)[:5]
|
|
482
|
+
for arn, events_list in top_principals:
|
|
483
|
+
log.step(f"Principal: {arn} → {len(events_list)} API calls")
|
|
484
|
+
event_summary = ", ".join(list(dict.fromkeys(events_list))[:8])
|
|
485
|
+
log.step(f" Calls: {event_summary}")
|
|
486
|
+
|
|
487
|
+
def _format_events(self, events: list) -> str:
|
|
488
|
+
lines = []
|
|
489
|
+
for e in events:
|
|
490
|
+
lines.append(
|
|
491
|
+
f"{e['event_time'][:19]} | {e['event_name']:<35} | "
|
|
492
|
+
f"{e['principal_arn'][:40]} | {e['source_ip']}"
|
|
493
|
+
)
|
|
494
|
+
return "\n".join(lines)
|