exploitgraph 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- core/__init__.py +0 -0
- core/attack_graph.py +83 -0
- core/aws_client.py +284 -0
- core/config.py +83 -0
- core/console.py +469 -0
- core/context_engine.py +172 -0
- core/correlator.py +476 -0
- core/http_client.py +243 -0
- core/logger.py +97 -0
- core/module_loader.py +69 -0
- core/risk_engine.py +47 -0
- core/session_manager.py +254 -0
- exploitgraph-1.0.0.dist-info/METADATA +429 -0
- exploitgraph-1.0.0.dist-info/RECORD +42 -0
- exploitgraph-1.0.0.dist-info/WHEEL +5 -0
- exploitgraph-1.0.0.dist-info/entry_points.txt +2 -0
- exploitgraph-1.0.0.dist-info/licenses/LICENSE +21 -0
- exploitgraph-1.0.0.dist-info/top_level.txt +2 -0
- modules/__init__.py +0 -0
- modules/base.py +82 -0
- modules/cloud/__init__.py +0 -0
- modules/cloud/aws_credential_validator.py +340 -0
- modules/cloud/azure_enum.py +289 -0
- modules/cloud/cloudtrail_analyzer.py +494 -0
- modules/cloud/gcp_enum.py +272 -0
- modules/cloud/iam_enum.py +321 -0
- modules/cloud/iam_privilege_escalation.py +515 -0
- modules/cloud/metadata_check.py +315 -0
- modules/cloud/s3_enum.py +469 -0
- modules/discovery/__init__.py +0 -0
- modules/discovery/http_enum.py +235 -0
- modules/discovery/subdomain_enum.py +260 -0
- modules/exploitation/__init__.py +0 -0
- modules/exploitation/api_exploit.py +403 -0
- modules/exploitation/jwt_attack.py +346 -0
- modules/exploitation/ssrf_scanner.py +258 -0
- modules/reporting/__init__.py +0 -0
- modules/reporting/html_report.py +446 -0
- modules/reporting/json_export.py +107 -0
- modules/secrets/__init__.py +0 -0
- modules/secrets/file_secrets.py +358 -0
- modules/secrets/git_secrets.py +267 -0
|
@@ -0,0 +1,272 @@
|
|
|
1
|
+
"""
|
|
2
|
+
ExploitGraph Module: GCP Storage Bucket Enumerator
|
|
3
|
+
Category: cloud
|
|
4
|
+
|
|
5
|
+
Detects publicly accessible Google Cloud Storage buckets.
|
|
6
|
+
Works without credentials (HTTP/JSON API mode).
|
|
7
|
+
|
|
8
|
+
Real-world misconfigurations:
|
|
9
|
+
- allUsers READ permission on bucket
|
|
10
|
+
- allAuthenticatedUsers READ (all Google accounts)
|
|
11
|
+
- Public bucket with sensitive data
|
|
12
|
+
- Firebase storage rules set to public
|
|
13
|
+
|
|
14
|
+
GCP Parallel:
|
|
15
|
+
gsutil ls gs://bucket-name
|
|
16
|
+
curl https://storage.googleapis.com/bucket-name/?list-type=2
|
|
17
|
+
"""
|
|
18
|
+
from __future__ import annotations
|
|
19
|
+
import re
|
|
20
|
+
import json
|
|
21
|
+
from typing import TYPE_CHECKING
|
|
22
|
+
|
|
23
|
+
import requests
|
|
24
|
+
from requests.packages.urllib3.exceptions import InsecureRequestWarning
|
|
25
|
+
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
|
|
26
|
+
|
|
27
|
+
from modules.base import BaseModule, ModuleResult
|
|
28
|
+
|
|
29
|
+
if TYPE_CHECKING:
|
|
30
|
+
from core.session_manager import Session
|
|
31
|
+
|
|
32
|
+
GCS_API = "https://storage.googleapis.com"
|
|
33
|
+
GCS_JSON = "https://www.googleapis.com/storage/v1/b"
|
|
34
|
+
|
|
35
|
+
COMMON_SUFFIXES = [
|
|
36
|
+
"-backup", "-backups", "-storage", "-assets", "-static", "-public",
|
|
37
|
+
"-media", "-files", "-data", "-uploads", "-logs", "-config",
|
|
38
|
+
".appspot.com", "-dev", "-prod", "-staging",
|
|
39
|
+
]
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
class GcpEnum(BaseModule):
|
|
43
|
+
|
|
44
|
+
NAME = "gcp_enum"
|
|
45
|
+
DESCRIPTION = "Enumerate GCP Cloud Storage buckets — detect public access and Firebase misconfigurations"
|
|
46
|
+
AUTHOR = "ExploitGraph Team"
|
|
47
|
+
VERSION = "1.0.0"
|
|
48
|
+
CATEGORY = "cloud"
|
|
49
|
+
SEVERITY = "CRITICAL"
|
|
50
|
+
MITRE = ["T1530", "T1580"]
|
|
51
|
+
AWS_PARALLEL = "GCP equivalent of: aws s3 ls s3://bucket --no-sign-request"
|
|
52
|
+
|
|
53
|
+
OPTIONS = {
|
|
54
|
+
"TARGET": {"default": "", "required": True, "description": "Target URL or GCP project ID"},
|
|
55
|
+
"PROJECT_ID": {"default": "", "required": False, "description": "GCP project ID to enumerate"},
|
|
56
|
+
"BUCKET_NAME": {"default": "", "required": False, "description": "Specific bucket name to test"},
|
|
57
|
+
"WORDLIST": {"default": "", "required": False, "description": "Custom bucket name wordlist"},
|
|
58
|
+
"TIMEOUT": {"default": "8", "required": False, "description": "Request timeout"},
|
|
59
|
+
"CHECK_FIREBASE":{"default": "true","required": False, "description": "Check Firebase storage endpoints"},
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
def run(self, session: "Session") -> ModuleResult:
|
|
63
|
+
from core.logger import log
|
|
64
|
+
|
|
65
|
+
target = self.get_option("TARGET") or session.target
|
|
66
|
+
timeout = int(self.get_option("TIMEOUT", "8"))
|
|
67
|
+
|
|
68
|
+
self._timer_start()
|
|
69
|
+
log.section("GCP Cloud Storage Enumeration")
|
|
70
|
+
log.info("MITRE: T1530 — Data from Cloud Storage Object")
|
|
71
|
+
log.info("GCP cmd: gsutil ls gs://bucket-name (no credentials needed if public)")
|
|
72
|
+
|
|
73
|
+
found_buckets = []
|
|
74
|
+
|
|
75
|
+
bucket_names = self._discover_bucket_names(target, timeout)
|
|
76
|
+
if b := self.get_option("BUCKET_NAME"):
|
|
77
|
+
bucket_names.insert(0, b)
|
|
78
|
+
|
|
79
|
+
for name in bucket_names[:25]:
|
|
80
|
+
result = self._test_bucket(name, timeout, session)
|
|
81
|
+
if result:
|
|
82
|
+
found_buckets.append(result)
|
|
83
|
+
|
|
84
|
+
# Firebase check
|
|
85
|
+
if self.get_option("CHECK_FIREBASE", "true").lower() == "true":
|
|
86
|
+
self._check_firebase(target, timeout, session)
|
|
87
|
+
|
|
88
|
+
if found_buckets:
|
|
89
|
+
session.add_graph_node("gcp_exposure",
|
|
90
|
+
f"GCP Storage\nExposed ({len(found_buckets)} buckets)",
|
|
91
|
+
"exposure", "CRITICAL")
|
|
92
|
+
session.add_graph_edge("http_enum", "gcp_exposure",
|
|
93
|
+
"public GCS access", "T1530")
|
|
94
|
+
|
|
95
|
+
elapsed = self._timer_stop()
|
|
96
|
+
log.success(f"GCP enum done in {elapsed}s — {len(found_buckets)} public buckets")
|
|
97
|
+
|
|
98
|
+
return ModuleResult(True, {"buckets_found": len(found_buckets)})
|
|
99
|
+
|
|
100
|
+
def _discover_bucket_names(self, target: str, timeout: int) -> list[str]:
|
|
101
|
+
from core.logger import log
|
|
102
|
+
from urllib.parse import urlparse
|
|
103
|
+
names = set()
|
|
104
|
+
|
|
105
|
+
hostname = urlparse(target).hostname or ""
|
|
106
|
+
base = hostname.split(".")[0]
|
|
107
|
+
|
|
108
|
+
if base and base not in ("localhost", "127"):
|
|
109
|
+
names.add(base)
|
|
110
|
+
for suffix in COMMON_SUFFIXES:
|
|
111
|
+
names.add(f"{base}{suffix}")
|
|
112
|
+
|
|
113
|
+
# Scan target for GCS references
|
|
114
|
+
log.step("Scanning for GCS/Firebase references...")
|
|
115
|
+
try:
|
|
116
|
+
r = requests.get(target, timeout=timeout, verify=False)
|
|
117
|
+
# GCS bucket patterns
|
|
118
|
+
for pattern in [
|
|
119
|
+
r'storage\.googleapis\.com/([a-z0-9][-a-z0-9_.]{1,220})',
|
|
120
|
+
r'([a-z0-9][-a-z0-9_.]{1,220})\.storage\.googleapis\.com',
|
|
121
|
+
r'gs://([a-z0-9][-a-z0-9_.]{1,220})',
|
|
122
|
+
r'firebasestorage\.googleapis\.com/v0/b/([^/\s"\']+)',
|
|
123
|
+
]:
|
|
124
|
+
for match in re.finditer(pattern, r.text, re.IGNORECASE):
|
|
125
|
+
name = match.group(1).lower().rstrip("/")
|
|
126
|
+
names.add(name)
|
|
127
|
+
log.found(f"GCS reference: {name}")
|
|
128
|
+
|
|
129
|
+
# Project ID references
|
|
130
|
+
for match in re.finditer(r'"projectId"\s*:\s*"([^"]+)"', r.text):
|
|
131
|
+
pid = match.group(1)
|
|
132
|
+
names.add(pid)
|
|
133
|
+
names.add(f"{pid}.appspot.com")
|
|
134
|
+
log.found(f"GCP project ID: {pid}")
|
|
135
|
+
|
|
136
|
+
except Exception:
|
|
137
|
+
pass # network/connection error — continue scanning
|
|
138
|
+
|
|
139
|
+
return list(names)
|
|
140
|
+
|
|
141
|
+
def _test_bucket(self, name: str, timeout: int, session: "Session") -> dict | None:
|
|
142
|
+
from core.logger import log
|
|
143
|
+
|
|
144
|
+
urls_to_try = [
|
|
145
|
+
f"{GCS_API}/{name}/",
|
|
146
|
+
f"{GCS_API}/{name}/?list-type=2",
|
|
147
|
+
f"https://{name}.storage.googleapis.com/",
|
|
148
|
+
]
|
|
149
|
+
|
|
150
|
+
for url in urls_to_try:
|
|
151
|
+
try:
|
|
152
|
+
r = requests.get(url, timeout=timeout, verify=False)
|
|
153
|
+
|
|
154
|
+
# Public bucket with listing
|
|
155
|
+
if r.status_code == 200 and (
|
|
156
|
+
"<ListBucketResult" in r.text or
|
|
157
|
+
'"kind": "storage#objects"' in r.text or
|
|
158
|
+
"<Contents>" in r.text
|
|
159
|
+
):
|
|
160
|
+
log.critical(f"PUBLIC GCS BUCKET: {name}")
|
|
161
|
+
log.info(f"GCP cmd: gsutil ls gs://{name}")
|
|
162
|
+
|
|
163
|
+
# Parse object names
|
|
164
|
+
objects = re.findall(r'"name":\s*"([^"]+)"', r.text)
|
|
165
|
+
objects += re.findall(r'<Key>([^<]+)</Key>', r.text)
|
|
166
|
+
log.success(f" Objects: {len(objects)}")
|
|
167
|
+
for obj in objects[:5]:
|
|
168
|
+
log.secret("Object", obj)
|
|
169
|
+
|
|
170
|
+
session.add_finding(
|
|
171
|
+
module="gcp_enum",
|
|
172
|
+
title=f"Public GCS Bucket: {name}",
|
|
173
|
+
severity="CRITICAL",
|
|
174
|
+
description=f"GCS bucket '{name}' is publicly accessible with object listing enabled.",
|
|
175
|
+
evidence=f"URL: {url}\nHTTP 200\nObjects: {', '.join(objects[:5])}",
|
|
176
|
+
recommendation=(
|
|
177
|
+
f"Remove allUsers permission:\n"
|
|
178
|
+
f"gsutil iam ch -d allUsers:objectViewer gs://{name}\n"
|
|
179
|
+
f"gsutil iam ch -d allUsers:legacyBucketReader gs://{name}"
|
|
180
|
+
),
|
|
181
|
+
cvss_score=9.8,
|
|
182
|
+
aws_parallel="S3 bucket with public-read ACL and no Block Public Access",
|
|
183
|
+
mitre_technique="T1530",
|
|
184
|
+
)
|
|
185
|
+
|
|
186
|
+
# Download interesting files
|
|
187
|
+
for obj in objects:
|
|
188
|
+
if any(x in obj.lower() for x in [".env","config","secret","key","backup",".sql"]):
|
|
189
|
+
try:
|
|
190
|
+
obj_r = requests.get(f"{GCS_API}/{name}/{obj}", timeout=timeout)
|
|
191
|
+
if obj_r.status_code == 200:
|
|
192
|
+
log.found(f"Downloaded: {obj}")
|
|
193
|
+
session.exposed_files.append({
|
|
194
|
+
"url": f"{GCS_API}/{name}/{obj}",
|
|
195
|
+
"path": obj,
|
|
196
|
+
"content": obj_r.text,
|
|
197
|
+
"source": "gcp_enum",
|
|
198
|
+
})
|
|
199
|
+
except Exception:
|
|
200
|
+
pass # network/connection error — continue scanning
|
|
201
|
+
|
|
202
|
+
return {"name": name, "url": url, "objects": len(objects)}
|
|
203
|
+
|
|
204
|
+
# Bucket exists but private
|
|
205
|
+
elif r.status_code == 403:
|
|
206
|
+
log.step(f"[403] gs://{name} — exists but private")
|
|
207
|
+
|
|
208
|
+
except Exception:
|
|
209
|
+
pass # network/connection error — continue scanning
|
|
210
|
+
|
|
211
|
+
# Try JSON API
|
|
212
|
+
try:
|
|
213
|
+
r = requests.get(f"{GCS_JSON}/{name}/iam", timeout=timeout)
|
|
214
|
+
if r.status_code == 200:
|
|
215
|
+
iam = r.json()
|
|
216
|
+
bindings = iam.get("bindings", [])
|
|
217
|
+
for b in bindings:
|
|
218
|
+
if "allUsers" in b.get("members", []) or "allAuthenticatedUsers" in b.get("members", []):
|
|
219
|
+
log.critical(f"PUBLIC IAM on gs://{name}: {b['role']} → {b['members']}")
|
|
220
|
+
session.add_finding(
|
|
221
|
+
module="gcp_enum",
|
|
222
|
+
title=f"GCS Bucket Public IAM: {name}",
|
|
223
|
+
severity="CRITICAL",
|
|
224
|
+
description=f"Bucket '{name}' has IAM policy granting {b['role']} to {b['members']}.",
|
|
225
|
+
evidence=f"IAM binding: {b}",
|
|
226
|
+
recommendation=f"gsutil iam ch -d allUsers:{b['role'].split('/')[-1]} gs://{name}",
|
|
227
|
+
cvss_score=9.5,
|
|
228
|
+
aws_parallel="S3 bucket policy with Principal: '*'",
|
|
229
|
+
mitre_technique="T1530",
|
|
230
|
+
)
|
|
231
|
+
except Exception:
|
|
232
|
+
pass # network/connection error — continue scanning
|
|
233
|
+
|
|
234
|
+
return None
|
|
235
|
+
|
|
236
|
+
def _check_firebase(self, target: str, timeout: int, session: "Session"):
|
|
237
|
+
from core.logger import log
|
|
238
|
+
from urllib.parse import urlparse
|
|
239
|
+
|
|
240
|
+
hostname = urlparse(target).hostname or ""
|
|
241
|
+
base = hostname.split(".")[0]
|
|
242
|
+
if not base or base in ("localhost", "127"):
|
|
243
|
+
return
|
|
244
|
+
|
|
245
|
+
# Firebase Realtime Database
|
|
246
|
+
firebase_db = f"https://{base}-default-rtdb.firebaseio.com/.json"
|
|
247
|
+
try:
|
|
248
|
+
r = requests.get(firebase_db, timeout=timeout)
|
|
249
|
+
if r.status_code == 200:
|
|
250
|
+
log.critical(f"PUBLIC FIREBASE DATABASE: {firebase_db}")
|
|
251
|
+
try:
|
|
252
|
+
data = r.json()
|
|
253
|
+
data_preview = str(data)[:200]
|
|
254
|
+
except Exception:
|
|
255
|
+
pass # error handled upstream
|
|
256
|
+
except Exception:
|
|
257
|
+
|
|
258
|
+
pass # error handled upstream
|
|
259
|
+
session.add_finding(
|
|
260
|
+
module="gcp_enum",
|
|
261
|
+
title="Public Firebase Realtime Database",
|
|
262
|
+
severity="CRITICAL",
|
|
263
|
+
description="Firebase Realtime Database is publicly readable without authentication.",
|
|
264
|
+
evidence=f"URL: {firebase_db}\nHTTP 200\nData: {data_preview}",
|
|
265
|
+
recommendation="Set Firebase security rules to require authentication:\n"
|
|
266
|
+
'{"rules": {".read": "auth != null", ".write": "auth != null"}}',
|
|
267
|
+
cvss_score=10.0,
|
|
268
|
+
aws_parallel="DynamoDB table with no IAM conditions — publicly readable",
|
|
269
|
+
mitre_technique="T1530",
|
|
270
|
+
)
|
|
271
|
+
except Exception:
|
|
272
|
+
pass # network/connection error — continue scanning
|
|
@@ -0,0 +1,321 @@
|
|
|
1
|
+
"""
|
|
2
|
+
ExploitGraph Module: AWS IAM Enumerator
|
|
3
|
+
Category: cloud
|
|
4
|
+
Read-only IAM enumeration using boto3 or discovered AWS credentials.
|
|
5
|
+
All operations are NON-DESTRUCTIVE (list/get only).
|
|
6
|
+
|
|
7
|
+
Real AWS misconfigurations detected:
|
|
8
|
+
- Users without MFA enabled
|
|
9
|
+
- Access keys older than 90 days
|
|
10
|
+
- Overly permissive policies (AdministratorAccess attached)
|
|
11
|
+
- Inline policies with wildcard actions
|
|
12
|
+
- Roles with overly broad trust policies
|
|
13
|
+
"""
|
|
14
|
+
from __future__ import annotations
|
|
15
|
+
from typing import TYPE_CHECKING
|
|
16
|
+
|
|
17
|
+
from modules.base import BaseModule, ModuleResult
|
|
18
|
+
|
|
19
|
+
if TYPE_CHECKING:
|
|
20
|
+
from core.session_manager import Session
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class IamEnum(BaseModule):
|
|
24
|
+
|
|
25
|
+
NAME = "iam_enum"
|
|
26
|
+
DESCRIPTION = "Enumerate AWS IAM users, roles, and policies — detect privilege escalation paths and MFA gaps"
|
|
27
|
+
AUTHOR = "ExploitGraph Team"
|
|
28
|
+
VERSION = "1.0.0"
|
|
29
|
+
CATEGORY = "cloud"
|
|
30
|
+
SEVERITY = "HIGH"
|
|
31
|
+
MITRE = ["T1078.004", "T1580", "T1069.003"]
|
|
32
|
+
AWS_PARALLEL = "aws iam list-users | aws iam list-roles | aws iam get-account-summary"
|
|
33
|
+
|
|
34
|
+
OPTIONS = {
|
|
35
|
+
"AWS_PROFILE": {"default": "", "required": False, "description": "AWS CLI profile name"},
|
|
36
|
+
"AWS_REGION": {"default": "us-east-1", "required": False, "description": "AWS region"},
|
|
37
|
+
"AWS_ACCESS_KEY": {"default": "", "required": False, "description": "AWS Access Key ID (from secrets module)"},
|
|
38
|
+
"AWS_SECRET_KEY": {"default": "", "required": False, "description": "AWS Secret Access Key (from secrets module)"},
|
|
39
|
+
"CHECK_MFA": {"default": "true", "required": False, "description": "Check for users without MFA"},
|
|
40
|
+
"CHECK_KEY_AGE": {"default": "true", "required": False, "description": "Check for old access keys (>90 days)"},
|
|
41
|
+
"CHECK_POLICIES": {"default": "true", "required": False, "description": "Check for dangerous policies"},
|
|
42
|
+
"MAX_ITEMS": {"default": "100", "required": False, "description": "Max items to enumerate per list call"},
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
def run(self, session: "Session") -> ModuleResult:
|
|
46
|
+
from core.logger import log
|
|
47
|
+
|
|
48
|
+
self._timer_start()
|
|
49
|
+
log.section("AWS IAM Enumeration")
|
|
50
|
+
log.info("MITRE: T1580 — Cloud Infrastructure Discovery")
|
|
51
|
+
log.info("All operations are READ-ONLY (list/get only)")
|
|
52
|
+
|
|
53
|
+
# Auto-populate credentials from file_secrets findings
|
|
54
|
+
self._auto_populate_creds(session)
|
|
55
|
+
|
|
56
|
+
try:
|
|
57
|
+
import boto3
|
|
58
|
+
from botocore.exceptions import ClientError, NoCredentialsError
|
|
59
|
+
except ImportError:
|
|
60
|
+
pass # error handled upstream
|
|
61
|
+
except ImportError:
|
|
62
|
+
log.info("HTTP-based IAM detection running instead...")
|
|
63
|
+
return self._http_fallback(session)
|
|
64
|
+
|
|
65
|
+
try:
|
|
66
|
+
client = self._get_client("iam", session)
|
|
67
|
+
findings = []
|
|
68
|
+
|
|
69
|
+
# Identify caller
|
|
70
|
+
sts = self._get_client("sts", session)
|
|
71
|
+
try:
|
|
72
|
+
identity = sts.get_caller_identity()
|
|
73
|
+
account = identity.get("Account", "unknown")
|
|
74
|
+
arn = identity.get("Arn", "unknown")
|
|
75
|
+
log.found(f"Authenticated as: {arn}")
|
|
76
|
+
log.found(f"Account ID: {account}")
|
|
77
|
+
session.add_graph_node("aws_identity", f"AWS Identity\n{arn.split('/')[-1]}",
|
|
78
|
+
"asset", "INFO", arn)
|
|
79
|
+
except Exception as e:
|
|
80
|
+
pass # error handled upstream
|
|
81
|
+
except Exception as e:
|
|
82
|
+
account = "unknown"
|
|
83
|
+
|
|
84
|
+
# Enumerate users
|
|
85
|
+
users_findings = self._enum_users(client, account, session)
|
|
86
|
+
findings.extend(users_findings)
|
|
87
|
+
|
|
88
|
+
# Enumerate roles
|
|
89
|
+
roles_findings = self._enum_roles(client, session)
|
|
90
|
+
findings.extend(roles_findings)
|
|
91
|
+
|
|
92
|
+
# Check account summary (password policy, MFA)
|
|
93
|
+
self._check_account_summary(client, session)
|
|
94
|
+
|
|
95
|
+
elapsed = self._timer_stop()
|
|
96
|
+
log.success(f"IAM enumeration done in {elapsed}s — {len(findings)} findings")
|
|
97
|
+
|
|
98
|
+
return ModuleResult(True, {
|
|
99
|
+
"findings": len(findings),
|
|
100
|
+
"account": account,
|
|
101
|
+
})
|
|
102
|
+
|
|
103
|
+
except Exception as e:
|
|
104
|
+
pass # error handled upstream
|
|
105
|
+
except Exception as e:
|
|
106
|
+
return ModuleResult(False, {}, str(e))
|
|
107
|
+
|
|
108
|
+
def _auto_populate_creds(self, session: "Session"):
|
|
109
|
+
"""If file_secrets found AWS keys, use them automatically."""
|
|
110
|
+
from core.logger import log
|
|
111
|
+
access_key = secret_key = None
|
|
112
|
+
for s in session.secrets:
|
|
113
|
+
if s["secret_type"] == "AWS_ACCESS_KEY" and not self.get_option("AWS_ACCESS_KEY"):
|
|
114
|
+
access_key = s["value"]
|
|
115
|
+
self.set_option("AWS_ACCESS_KEY", access_key)
|
|
116
|
+
elif s["secret_type"] == "AWS_SECRET_KEY" and not self.get_option("AWS_SECRET_KEY"):
|
|
117
|
+
secret_key = s["value"]
|
|
118
|
+
self.set_option("AWS_SECRET_KEY", secret_key)
|
|
119
|
+
|
|
120
|
+
if access_key:
|
|
121
|
+
log.info(f"Auto-using discovered AWS Access Key: {access_key[:12]}...")
|
|
122
|
+
|
|
123
|
+
def _get_client(self, service: str, session: "Session"):
|
|
124
|
+
import boto3
|
|
125
|
+
profile = self.get_option("AWS_PROFILE", "")
|
|
126
|
+
region = self.get_option("AWS_REGION", "us-east-1")
|
|
127
|
+
access_key = self.get_option("AWS_ACCESS_KEY", "")
|
|
128
|
+
secret_key = self.get_option("AWS_SECRET_KEY", "")
|
|
129
|
+
|
|
130
|
+
if access_key and secret_key:
|
|
131
|
+
return boto3.client(service, region_name=region,
|
|
132
|
+
aws_access_key_id=access_key,
|
|
133
|
+
aws_secret_access_key=secret_key)
|
|
134
|
+
elif profile:
|
|
135
|
+
boto_session = boto3.Session(profile_name=profile, region_name=region)
|
|
136
|
+
return boto_session.client(service)
|
|
137
|
+
else:
|
|
138
|
+
return boto3.client(service, region_name=region)
|
|
139
|
+
|
|
140
|
+
def _enum_users(self, iam, account: str, session: "Session") -> list:
|
|
141
|
+
from core.logger import log
|
|
142
|
+
import datetime
|
|
143
|
+
findings = []
|
|
144
|
+
max_items = int(self.get_option("MAX_ITEMS", "100"))
|
|
145
|
+
|
|
146
|
+
try:
|
|
147
|
+
paginator = iam.get_paginator("list_users")
|
|
148
|
+
users = []
|
|
149
|
+
for page in paginator.paginate(PaginationConfig={"MaxItems": max_items}):
|
|
150
|
+
users.extend(page["Users"])
|
|
151
|
+
|
|
152
|
+
log.info(f"IAM Users found: {len(users)}")
|
|
153
|
+
|
|
154
|
+
for user in users:
|
|
155
|
+
name = user["UserName"]
|
|
156
|
+
arn = user["Arn"]
|
|
157
|
+
log.step(f"User: {name} ({arn})")
|
|
158
|
+
|
|
159
|
+
# Check MFA
|
|
160
|
+
if self.get_option("CHECK_MFA", "true").lower() == "true":
|
|
161
|
+
try:
|
|
162
|
+
mfa = iam.list_mfa_devices(UserName=name)
|
|
163
|
+
if not mfa["MFADevices"]:
|
|
164
|
+
log.warning(f" No MFA: {name}")
|
|
165
|
+
findings.append(f"no_mfa:{name}")
|
|
166
|
+
session.add_finding(
|
|
167
|
+
module="iam_enum",
|
|
168
|
+
title=f"IAM User Without MFA: {name}",
|
|
169
|
+
severity="HIGH",
|
|
170
|
+
description=f"IAM user '{name}' does not have MFA enabled.",
|
|
171
|
+
evidence=f"User ARN: {arn}\nMFA devices: 0",
|
|
172
|
+
recommendation="Enable MFA: aws iam enable-mfa-device --user-name {name} --serial-number arn:aws:iam::ACCOUNT:mfa/{name} --authentication-code1 CODE1 --authentication-code2 CODE2",
|
|
173
|
+
cvss_score=7.5,
|
|
174
|
+
aws_parallel="IAM user without MFA — vulnerable to credential stuffing",
|
|
175
|
+
mitre_technique="T1078.004",
|
|
176
|
+
)
|
|
177
|
+
except Exception:
|
|
178
|
+
pass # network/connection error — continue scanning
|
|
179
|
+
|
|
180
|
+
# Check access key age
|
|
181
|
+
if self.get_option("CHECK_KEY_AGE", "true").lower() == "true":
|
|
182
|
+
try:
|
|
183
|
+
keys = iam.list_access_keys(UserName=name)
|
|
184
|
+
for key in keys["AccessKeyMetadata"]:
|
|
185
|
+
if key["Status"] == "Active":
|
|
186
|
+
age = (datetime.datetime.now(datetime.timezone.utc) -
|
|
187
|
+
key["CreateDate"].astimezone(datetime.timezone.utc)).days
|
|
188
|
+
if age > 90:
|
|
189
|
+
log.warning(f" Old key ({age} days): {key['AccessKeyId']}")
|
|
190
|
+
findings.append(f"old_key:{key['AccessKeyId']}")
|
|
191
|
+
session.add_finding(
|
|
192
|
+
module="iam_enum",
|
|
193
|
+
title=f"Access Key Older Than 90 Days: {name}",
|
|
194
|
+
severity="MEDIUM",
|
|
195
|
+
description=f"Access key {key['AccessKeyId']} for user {name} is {age} days old.",
|
|
196
|
+
evidence=f"Key: {key['AccessKeyId']}\nAge: {age} days\nStatus: Active",
|
|
197
|
+
recommendation="Rotate: aws iam create-access-key --user-name {name} && aws iam delete-access-key --access-key-id {old_key}",
|
|
198
|
+
cvss_score=5.5,
|
|
199
|
+
aws_parallel="Stale IAM access key — increased exposure window",
|
|
200
|
+
mitre_technique="T1078.004",
|
|
201
|
+
)
|
|
202
|
+
except Exception:
|
|
203
|
+
pass # network/connection error — continue scanning
|
|
204
|
+
|
|
205
|
+
# Check for admin policies
|
|
206
|
+
if self.get_option("CHECK_POLICIES", "true").lower() == "true":
|
|
207
|
+
try:
|
|
208
|
+
attached = iam.list_attached_user_policies(UserName=name)
|
|
209
|
+
for policy in attached["AttachedPolicies"]:
|
|
210
|
+
if "AdministratorAccess" in policy["PolicyName"] or \
|
|
211
|
+
"FullAccess" in policy["PolicyName"]:
|
|
212
|
+
log.critical(f" ADMIN POLICY: {policy['PolicyName']} on {name}")
|
|
213
|
+
findings.append(f"admin_policy:{name}:{policy['PolicyName']}")
|
|
214
|
+
session.add_finding(
|
|
215
|
+
module="iam_enum",
|
|
216
|
+
title=f"Overly Permissive Policy: {name}",
|
|
217
|
+
severity="HIGH",
|
|
218
|
+
description=f"User {name} has {policy['PolicyName']} attached.",
|
|
219
|
+
evidence=f"Policy: {policy['PolicyArn']}",
|
|
220
|
+
recommendation="Apply least-privilege. Remove AdministratorAccess. Use specific service policies.",
|
|
221
|
+
cvss_score=8.0,
|
|
222
|
+
aws_parallel="IAM user with AdministratorAccess — violates least privilege",
|
|
223
|
+
mitre_technique="T1078.004",
|
|
224
|
+
)
|
|
225
|
+
except Exception:
|
|
226
|
+
pass # network/connection error — continue scanning
|
|
227
|
+
|
|
228
|
+
except Exception as e:
|
|
229
|
+
pass # error handled upstream
|
|
230
|
+
except Exception as e:
|
|
231
|
+
|
|
232
|
+
pass # error handled upstream
|
|
233
|
+
return findings
|
|
234
|
+
|
|
235
|
+
def _enum_roles(self, iam, session: "Session") -> list:
|
|
236
|
+
from core.logger import log
|
|
237
|
+
import json
|
|
238
|
+
findings = []
|
|
239
|
+
|
|
240
|
+
try:
|
|
241
|
+
paginator = iam.get_paginator("list_roles")
|
|
242
|
+
roles = []
|
|
243
|
+
for page in paginator.paginate():
|
|
244
|
+
roles.extend(page["Roles"])
|
|
245
|
+
|
|
246
|
+
log.info(f"IAM Roles found: {len(roles)}")
|
|
247
|
+
|
|
248
|
+
for role in roles:
|
|
249
|
+
name = role["RoleName"]
|
|
250
|
+
trust = role.get("AssumeRolePolicyDocument", {})
|
|
251
|
+
trust_str = json.dumps(trust)
|
|
252
|
+
|
|
253
|
+
# Check for overly broad trust
|
|
254
|
+
if '"*"' in trust_str or '"AWS": "*"' in trust_str:
|
|
255
|
+
log.critical(f" Wildcard trust policy: {name}")
|
|
256
|
+
findings.append(f"wildcard_trust:{name}")
|
|
257
|
+
session.add_finding(
|
|
258
|
+
module="iam_enum",
|
|
259
|
+
title=f"IAM Role with Wildcard Trust Policy: {name}",
|
|
260
|
+
severity="CRITICAL",
|
|
261
|
+
description=f"Role '{name}' can be assumed by any AWS principal (trust policy uses *).",
|
|
262
|
+
evidence=f"Trust policy: {trust_str[:200]}",
|
|
263
|
+
recommendation="Restrict trust policy to specific accounts/services/roles.",
|
|
264
|
+
cvss_score=9.0,
|
|
265
|
+
aws_parallel="iam:AssumeRole with wildcard principal — privilege escalation path",
|
|
266
|
+
mitre_technique="T1548",
|
|
267
|
+
)
|
|
268
|
+
|
|
269
|
+
except Exception as e:
|
|
270
|
+
pass # error handled upstream
|
|
271
|
+
except Exception as e:
|
|
272
|
+
|
|
273
|
+
pass # error handled upstream
|
|
274
|
+
return findings
|
|
275
|
+
|
|
276
|
+
def _check_account_summary(self, iam, session: "Session"):
|
|
277
|
+
from core.logger import log
|
|
278
|
+
try:
|
|
279
|
+
summary = iam.get_account_summary()["SummaryMap"]
|
|
280
|
+
root_mfa = summary.get("AccountMFAEnabled", 0)
|
|
281
|
+
pw_policy = summary.get("AccountSigningCertificatesPresent", 0)
|
|
282
|
+
|
|
283
|
+
log.info(f"Root account MFA: {'Enabled' if root_mfa else 'DISABLED'}")
|
|
284
|
+
if not root_mfa:
|
|
285
|
+
session.add_finding(
|
|
286
|
+
module="iam_enum",
|
|
287
|
+
title="CRITICAL: Root Account MFA Not Enabled",
|
|
288
|
+
severity="CRITICAL",
|
|
289
|
+
description="The AWS root account does not have MFA enabled.",
|
|
290
|
+
evidence="AccountMFAEnabled: 0",
|
|
291
|
+
recommendation="Enable MFA on root account immediately via AWS Console.",
|
|
292
|
+
cvss_score=9.5,
|
|
293
|
+
aws_parallel="Root account without MFA — highest possible AWS security risk",
|
|
294
|
+
mitre_technique="T1078.004",
|
|
295
|
+
)
|
|
296
|
+
except Exception:
|
|
297
|
+
pass # network/connection error — continue scanning
|
|
298
|
+
|
|
299
|
+
def _http_fallback(self, session: "Session") -> ModuleResult:
|
|
300
|
+
"""When boto3 not available, look for IAM clues in HTTP responses."""
|
|
301
|
+
from core.logger import log
|
|
302
|
+
log.info("Running HTTP-based IAM indicator detection...")
|
|
303
|
+
|
|
304
|
+
# Check if any found secrets look like AWS keys
|
|
305
|
+
aws_keys = [s for s in session.secrets if s["secret_type"] == "AWS_ACCESS_KEY"]
|
|
306
|
+
if aws_keys:
|
|
307
|
+
log.warning(f"Found {len(aws_keys)} AWS Access Keys but cannot verify without boto3")
|
|
308
|
+
log.info("Install boto3 for full IAM analysis: pip install boto3")
|
|
309
|
+
session.add_finding(
|
|
310
|
+
module="iam_enum",
|
|
311
|
+
title="AWS Credentials Found — boto3 Required for Full Analysis",
|
|
312
|
+
severity="HIGH",
|
|
313
|
+
description="AWS access keys were discovered but boto3 is not available for API verification.",
|
|
314
|
+
evidence=f"{len(aws_keys)} AWS Access Key(s) found",
|
|
315
|
+
recommendation="Install boto3 and rerun: pip install boto3",
|
|
316
|
+
cvss_score=7.0,
|
|
317
|
+
aws_parallel="Unverified IAM credentials — may grant significant AWS access",
|
|
318
|
+
mitre_technique="T1078.004",
|
|
319
|
+
)
|
|
320
|
+
|
|
321
|
+
return ModuleResult(True, {"boto3_available": False, "aws_keys_found": len(aws_keys)})
|