exploitgraph 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. core/__init__.py +0 -0
  2. core/attack_graph.py +83 -0
  3. core/aws_client.py +284 -0
  4. core/config.py +83 -0
  5. core/console.py +469 -0
  6. core/context_engine.py +172 -0
  7. core/correlator.py +476 -0
  8. core/http_client.py +243 -0
  9. core/logger.py +97 -0
  10. core/module_loader.py +69 -0
  11. core/risk_engine.py +47 -0
  12. core/session_manager.py +254 -0
  13. exploitgraph-1.0.0.dist-info/METADATA +429 -0
  14. exploitgraph-1.0.0.dist-info/RECORD +42 -0
  15. exploitgraph-1.0.0.dist-info/WHEEL +5 -0
  16. exploitgraph-1.0.0.dist-info/entry_points.txt +2 -0
  17. exploitgraph-1.0.0.dist-info/licenses/LICENSE +21 -0
  18. exploitgraph-1.0.0.dist-info/top_level.txt +2 -0
  19. modules/__init__.py +0 -0
  20. modules/base.py +82 -0
  21. modules/cloud/__init__.py +0 -0
  22. modules/cloud/aws_credential_validator.py +340 -0
  23. modules/cloud/azure_enum.py +289 -0
  24. modules/cloud/cloudtrail_analyzer.py +494 -0
  25. modules/cloud/gcp_enum.py +272 -0
  26. modules/cloud/iam_enum.py +321 -0
  27. modules/cloud/iam_privilege_escalation.py +515 -0
  28. modules/cloud/metadata_check.py +315 -0
  29. modules/cloud/s3_enum.py +469 -0
  30. modules/discovery/__init__.py +0 -0
  31. modules/discovery/http_enum.py +235 -0
  32. modules/discovery/subdomain_enum.py +260 -0
  33. modules/exploitation/__init__.py +0 -0
  34. modules/exploitation/api_exploit.py +403 -0
  35. modules/exploitation/jwt_attack.py +346 -0
  36. modules/exploitation/ssrf_scanner.py +258 -0
  37. modules/reporting/__init__.py +0 -0
  38. modules/reporting/html_report.py +446 -0
  39. modules/reporting/json_export.py +107 -0
  40. modules/secrets/__init__.py +0 -0
  41. modules/secrets/file_secrets.py +358 -0
  42. modules/secrets/git_secrets.py +267 -0
@@ -0,0 +1,289 @@
1
+ """
2
+ ExploitGraph Module: Azure Blob Storage Enumerator
3
+ Category: cloud
4
+
5
+ Detects publicly accessible Azure Blob Storage containers.
6
+ Works without credentials (anonymous HTTP mode).
7
+ With Azure SDK: deeper analysis including SAS token validation.
8
+
9
+ Real-world misconfigurations detected:
10
+ - Public blob containers (anonymous read access)
11
+ - Storage accounts with public network access
12
+ - Exposed SAS tokens in URLs/responses
13
+ - Misconfigured CORS on storage accounts
14
+
15
+ Azure Parallel:
16
+ az storage container list --account-name ACCOUNT --auth-mode login
17
+ az storage blob list --container-name CONTAINER --account-name ACCOUNT
18
+ """
19
+ from __future__ import annotations
20
+ import re
21
+ from typing import TYPE_CHECKING
22
+
23
+ import requests
24
+ from requests.packages.urllib3.exceptions import InsecureRequestWarning
25
+ requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
26
+
27
+ from modules.base import BaseModule, ModuleResult
28
+
29
+ if TYPE_CHECKING:
30
+ from core.session_manager import Session
31
+
32
+ # Common Azure storage account naming patterns
33
+ AZURE_SUFFIXES = [
34
+ "-storage", "-backup", "-backups", "-assets", "-static", "-media",
35
+ "-files", "-data", "-uploads", "-logs", "-config", "-prod", "-dev",
36
+ "storage", "backup", "assets", "static", "files",
37
+ ]
38
+
39
+ # Common container names inside storage accounts
40
+ COMMON_CONTAINERS = [
41
+ "$web", "public", "static", "assets", "files", "images", "media",
42
+ "uploads", "backups", "backup", "data", "logs", "config", "documents",
43
+ "reports", "exports", "archive", "temp", "web", "cdn",
44
+ ]
45
+
46
+ # SAS token pattern
47
+ SAS_PATTERN = re.compile(
48
+ r'(?:https?://[a-z0-9]+\.blob\.core\.windows\.net[^\s"\'<>]*\?sv=[^\s"\'<>]*sig=[A-Za-z0-9%+/=]+)',
49
+ re.IGNORECASE
50
+ )
51
+
52
+
53
+ class AzureEnum(BaseModule):
54
+
55
+ NAME = "azure_enum"
56
+ DESCRIPTION = "Enumerate Azure Blob Storage containers — detect public access and exposed SAS tokens"
57
+ AUTHOR = "ExploitGraph Team"
58
+ VERSION = "1.0.0"
59
+ CATEGORY = "cloud"
60
+ SEVERITY = "CRITICAL"
61
+ MITRE = ["T1530", "T1580"]
62
+ AWS_PARALLEL = "Azure equivalent of: aws s3 ls s3://bucket --no-sign-request"
63
+
64
+ OPTIONS = {
65
+ "TARGET": {"default": "", "required": True, "description": "Target URL or Azure storage account name"},
66
+ "ACCOUNT_NAME": {"default": "", "required": False, "description": "Azure storage account name to test directly"},
67
+ "WORDLIST": {"default": "", "required": False, "description": "Custom storage account name wordlist"},
68
+ "TIMEOUT": {"default": "8", "required": False, "description": "Request timeout in seconds"},
69
+ "CONTAINERS": {"default": "", "required": False, "description": "Comma-separated container names to probe"},
70
+ "DOWNLOAD_FILES": {"default": "true", "required": False, "description": "Download discovered files for secret analysis"},
71
+ }
72
+
73
+ def run(self, session: "Session") -> ModuleResult:
74
+ from core.config import cfg
75
+ from core.logger import log
76
+
77
+ target = self.get_option("TARGET") or session.target
78
+ timeout = int(self.get_option("TIMEOUT", "8"))
79
+ download = self.get_option("DOWNLOAD_FILES", "true").lower() == "true"
80
+
81
+ self._timer_start()
82
+ log.section("Azure Blob Storage Enumeration")
83
+ log.info("MITRE: T1530 — Data from Cloud Storage Object")
84
+ log.info("Azure: az storage container list --auth-mode anonymous")
85
+
86
+ found_accounts = []
87
+ found_containers = []
88
+ sas_tokens = []
89
+
90
+ # Step 1: Extract account names from target responses
91
+ account_names = self._discover_account_names(target, timeout)
92
+
93
+ # Step 2: Direct account if specified
94
+ if acct := self.get_option("ACCOUNT_NAME"):
95
+ account_names.insert(0, acct)
96
+
97
+ # Step 3: Test each account
98
+ containers_to_test = COMMON_CONTAINERS.copy()
99
+ if custom := self.get_option("CONTAINERS"):
100
+ containers_to_test = [c.strip() for c in custom.split(",")] + containers_to_test
101
+
102
+ for account in account_names[:20]:
103
+ result = self._test_account(account, containers_to_test, timeout, download, session)
104
+ if result:
105
+ found_accounts.append(result)
106
+ found_containers.extend(result.get("containers", []))
107
+
108
+ # Step 4: Scan target responses for SAS tokens
109
+ sas_tokens = self._scan_for_sas_tokens(target, timeout, session)
110
+
111
+ elapsed = self._timer_stop()
112
+ log.success(f"Azure enum done in {elapsed}s — {len(found_accounts)} accounts, "
113
+ f"{len(found_containers)} containers, {len(sas_tokens)} SAS tokens")
114
+
115
+ if found_accounts or found_containers:
116
+ session.add_graph_node("azure_exposure",
117
+ f"Azure Storage\nExposed ({len(found_containers)} containers)",
118
+ "exposure", "CRITICAL")
119
+ session.add_graph_edge("http_enum", "azure_exposure",
120
+ "public blob access", "T1530")
121
+
122
+ return ModuleResult(True, {
123
+ "accounts_found": len(found_accounts),
124
+ "containers_found": len(found_containers),
125
+ "sas_tokens_found": len(sas_tokens),
126
+ })
127
+
128
+ def _discover_account_names(self, target: str, timeout: int) -> list[str]:
129
+ from core.logger import log
130
+ from urllib.parse import urlparse
131
+ names = set()
132
+
133
+ hostname = urlparse(target).hostname or ""
134
+ base = hostname.split(".")[0]
135
+
136
+ # Generate candidates from base hostname
137
+ if base and base not in ("localhost", "127"):
138
+ names.add(base)
139
+ for suffix in AZURE_SUFFIXES:
140
+ names.add(f"{base}{suffix}")
141
+ names.add(f"{base.replace('-','')}{suffix}")
142
+
143
+ # Scan target for Azure storage references
144
+ log.step("Scanning for Azure storage references in target responses...")
145
+ try:
146
+ r = requests.get(target, timeout=timeout, verify=False)
147
+ # Find storage account names in response
148
+ for match in re.finditer(
149
+ r'([a-z0-9]{3,24})\.blob\.core\.windows\.net', r.text, re.IGNORECASE
150
+ ):
151
+ name = match.group(1).lower()
152
+ if 3 <= len(name) <= 24:
153
+ names.add(name)
154
+ log.found(f"Azure storage reference: {name}.blob.core.windows.net")
155
+ except Exception:
156
+ pass # network/connection error — continue scanning
157
+
158
+ return list(names)
159
+
160
+ def _test_account(self, account: str, containers: list[str],
161
+ timeout: int, download: bool, session: "Session") -> dict | None:
162
+ from core.logger import log
163
+
164
+ base_url = f"https://{account}.blob.core.windows.net"
165
+ acct_data = {"name": account, "url": base_url, "containers": []}
166
+
167
+ # Test account existence (list containers)
168
+ list_url = f"{base_url}/?comp=list"
169
+ try:
170
+ r = requests.get(list_url, timeout=timeout, verify=False)
171
+ if r.status_code == 200 and "<EnumerationResults" in r.text:
172
+ log.critical(f"PUBLIC AZURE ACCOUNT: {account}.blob.core.windows.net")
173
+ log.info(f"Azure cmd: az storage container list --account-name {account} --auth-mode anonymous")
174
+
175
+ # Parse container list from XML
176
+ listed = re.findall(r"<Name>([^<]+)</Name>", r.text)
177
+ log.success(f" Containers listed: {listed}")
178
+
179
+ session.add_finding(
180
+ module="azure_enum",
181
+ title=f"Public Azure Storage Account: {account}",
182
+ severity="CRITICAL",
183
+ description=f"Azure storage account '{account}' allows unauthenticated container listing.",
184
+ evidence=f"URL: {list_url}\nHTTP 200\nContainers: {listed}",
185
+ recommendation=(
186
+ "Disable anonymous access:\n"
187
+ f"az storage account update --name {account} "
188
+ "--allow-blob-public-access false"
189
+ ),
190
+ cvss_score=9.8,
191
+ aws_parallel="S3 bucket with s3:ListBucket granted to AllUsers",
192
+ mitre_technique="T1530",
193
+ )
194
+ containers = listed + [c for c in containers if c not in listed]
195
+
196
+ except Exception:
197
+ pass # network/connection error — continue scanning
198
+
199
+ # Test individual containers
200
+ for container in containers[:30]:
201
+ url = f"{base_url}/{container}?restype=container&comp=list"
202
+ try:
203
+ r = requests.get(url, timeout=timeout, verify=False)
204
+ if r.status_code == 200 and ("<EnumerationResults" in r.text or "<Blobs>" in r.text):
205
+ blobs = re.findall(r"<Name>([^<]+)</Name>", r.text)
206
+ log.found(f"Public container: {account}/{container} ({len(blobs)} blobs)")
207
+ for b in blobs[:5]:
208
+ log.secret("Blob", b)
209
+
210
+ acct_data["containers"].append({
211
+ "name": container, "blobs": blobs,
212
+ "url": f"{base_url}/{container}",
213
+ })
214
+
215
+ session.add_finding(
216
+ module="azure_enum",
217
+ title=f"Public Azure Container: {account}/{container}",
218
+ severity="CRITICAL",
219
+ description=f"Container '{container}' in storage account '{account}' is publicly accessible.",
220
+ evidence=f"URL: {url}\nBlobs: {', '.join(blobs[:5])}{'...' if len(blobs)>5 else ''}",
221
+ recommendation=(
222
+ f"az storage container set-permission --name {container} "
223
+ f"--account-name {account} --public-access off"
224
+ ),
225
+ cvss_score=9.5,
226
+ aws_parallel="S3 bucket object with public-read ACL",
227
+ mitre_technique="T1530",
228
+ )
229
+
230
+ if download:
231
+ self._download_blobs(account, container, blobs[:10], timeout, session)
232
+
233
+ except Exception:
234
+ pass # network/connection error — continue scanning
235
+
236
+ return acct_data if acct_data["containers"] else None
237
+
238
+ def _download_blobs(self, account: str, container: str, blobs: list[str],
239
+ timeout: int, session: "Session"):
240
+ from core.logger import log
241
+ INTERESTING = [".env", ".json", ".yaml", ".yml", ".conf", ".ini",
242
+ ".key", ".pem", ".zip", ".sql", "config", "secret",
243
+ "password", "credential", "backup"]
244
+ for blob in blobs:
245
+ if any(ext in blob.lower() for ext in INTERESTING):
246
+ url = f"https://{account}.blob.core.windows.net/{container}/{blob}"
247
+ try:
248
+ r = requests.get(url, timeout=timeout, verify=False)
249
+ if r.status_code == 200:
250
+ log.found(f"Downloaded blob: {blob} ({len(r.content)} bytes)")
251
+ session.exposed_files.append({
252
+ "url": url, "path": blob,
253
+ "content": r.text if len(r.content) < 1_000_000 else None,
254
+ "source": "azure_enum",
255
+ })
256
+ except Exception:
257
+ pass # network/connection error — continue scanning
258
+
259
+ def _scan_for_sas_tokens(self, target: str, timeout: int, session: "Session") -> list[str]:
260
+ from core.logger import log
261
+ tokens = []
262
+ try:
263
+ r = requests.get(target, timeout=timeout, verify=False)
264
+ for match in SAS_PATTERN.finditer(r.text):
265
+ token_url = match.group(0)
266
+ tokens.append(token_url)
267
+ log.critical(f"SAS TOKEN EXPOSED: {token_url[:80]}...")
268
+ session.add_secret(
269
+ secret_type="AZURE_SAS_TOKEN",
270
+ value=token_url,
271
+ source=target,
272
+ severity="CRITICAL",
273
+ description="Azure SAS token exposed in HTTP response. Grants time-limited storage access.",
274
+ aws_parallel="Pre-signed S3 URL exposed in response — grants temporary access",
275
+ )
276
+ session.add_finding(
277
+ module="azure_enum",
278
+ title="Azure SAS Token Exposed",
279
+ severity="CRITICAL",
280
+ description="A Shared Access Signature (SAS) token was found in the application response.",
281
+ evidence=f"Source: {target}\nToken: {token_url[:80]}...",
282
+ recommendation="Rotate SAS token immediately. Use Managed Identities instead of SAS tokens.",
283
+ cvss_score=9.0,
284
+ aws_parallel="Pre-signed S3 URL leaked in JavaScript/HTML source",
285
+ mitre_technique="T1552.001",
286
+ )
287
+ except Exception:
288
+ pass # network/connection error — continue scanning
289
+ return tokens