opencode-api-security-testing 2.1.0 → 2.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/SKILL.md +1797 -0
- package/core/advanced_recon.py +788 -0
- package/core/agentic_analyzer.py +445 -0
- package/core/analyzers/api_parser.py +210 -0
- package/core/analyzers/response_analyzer.py +212 -0
- package/core/analyzers/sensitive_finder.py +184 -0
- package/core/api_fuzzer.py +422 -0
- package/core/api_interceptor.py +525 -0
- package/core/api_parser.py +955 -0
- package/core/browser_tester.py +479 -0
- package/core/cloud_storage_tester.py +1330 -0
- package/core/collectors/__init__.py +23 -0
- package/core/collectors/api_path_finder.py +300 -0
- package/core/collectors/browser_collect.py +645 -0
- package/core/collectors/browser_collector.py +411 -0
- package/core/collectors/http_client.py +111 -0
- package/core/collectors/js_collector.py +490 -0
- package/core/collectors/js_parser.py +780 -0
- package/core/collectors/url_collector.py +319 -0
- package/core/context_manager.py +682 -0
- package/core/deep_api_tester_v35.py +844 -0
- package/core/deep_api_tester_v55.py +366 -0
- package/core/dynamic_api_analyzer.py +532 -0
- package/core/http_client.py +179 -0
- package/core/models.py +296 -0
- package/core/orchestrator.py +890 -0
- package/core/prerequisite.py +227 -0
- package/core/reasoning_engine.py +1042 -0
- package/core/response_classifier.py +606 -0
- package/core/runner.py +938 -0
- package/core/scan_engine.py +599 -0
- package/core/skill_executor.py +435 -0
- package/core/skill_executor_v2.py +670 -0
- package/core/skill_executor_v3.py +704 -0
- package/core/smart_analyzer.py +687 -0
- package/core/strategy_pool.py +707 -0
- package/core/testers/auth_tester.py +264 -0
- package/core/testers/idor_tester.py +200 -0
- package/core/testers/sqli_tester.py +211 -0
- package/core/testing_loop.py +655 -0
- package/core/utils/base_path_dict.py +255 -0
- package/core/utils/payload_lib.py +167 -0
- package/core/utils/ssrf_detector.py +220 -0
- package/core/verifiers/vuln_verifier.py +536 -0
- package/package.json +17 -13
- package/references/asset-discovery.md +119 -612
- package/references/graphql-guidance.md +65 -641
- package/references/intake.md +84 -0
- package/references/report-template.md +131 -38
- package/references/rest-guidance.md +55 -526
- package/references/severity-model.md +52 -264
- package/references/test-matrix.md +65 -263
- package/references/validation.md +53 -400
- package/scripts/postinstall.js +46 -0
- package/agents/cyber-supervisor.md +0 -55
- package/agents/probing-miner.md +0 -42
- package/agents/resource-specialist.md +0 -31
- package/commands/api-security-testing-scan.md +0 -59
- package/commands/api-security-testing-test.md +0 -49
- package/commands/api-security-testing.md +0 -72
- package/tsconfig.json +0 -17
|
@@ -0,0 +1,788 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Advanced Recon - 高级侦察模块
|
|
4
|
+
超越 JS 采集的多源信息收集
|
|
5
|
+
|
|
6
|
+
参考渗透测试工程师的侦察方法:
|
|
7
|
+
1. Swagger/OpenAPI 发现
|
|
8
|
+
2. WebSocket 探测
|
|
9
|
+
3. DNS/子域名枚举
|
|
10
|
+
4. 错误信息分析
|
|
11
|
+
5. 技术栈指纹
|
|
12
|
+
6. 响应差异分析
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
import re
|
|
16
|
+
import socket
|
|
17
|
+
import time
|
|
18
|
+
from typing import Dict, List, Set, Tuple, Optional, Any
|
|
19
|
+
from dataclasses import dataclass, field
|
|
20
|
+
from urllib.parse import urljoin, urlparse, parse_qs
|
|
21
|
+
import requests
|
|
22
|
+
|
|
23
|
+
try:
|
|
24
|
+
import dns.resolver
|
|
25
|
+
HAS_DNS = True
|
|
26
|
+
except ImportError:
|
|
27
|
+
HAS_DNS = False
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
@dataclass
|
|
31
|
+
class ReconResult:
|
|
32
|
+
"""侦察结果"""
|
|
33
|
+
swagger_endpoints: List[str] = field(default_factory=list)
|
|
34
|
+
websocket_urls: List[str] = field(default_factory=list)
|
|
35
|
+
subdomains: Set[str] = field(default_factory=set)
|
|
36
|
+
tech_stack: Dict[str, str] = field(default_factory=dict)
|
|
37
|
+
api_patterns: Set[str] = field(default_factory=set)
|
|
38
|
+
interesting_urls: List[str] = field(default_factory=list)
|
|
39
|
+
error_leaks: List[Dict] = field(default_factory=list)
|
|
40
|
+
fingerprint: Dict[str, Any] = field(default_factory=dict)
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
class SwaggerDiscoverer:
|
|
44
|
+
"""Swagger/OpenAPI 文档发现
|
|
45
|
+
|
|
46
|
+
优化:基于 API 父路径探测,比根路径探测更高效
|
|
47
|
+
"""
|
|
48
|
+
|
|
49
|
+
COMMON_SWAGGER_PATHS = [
|
|
50
|
+
'/swagger-ui.html',
|
|
51
|
+
'/swagger-ui/index.html',
|
|
52
|
+
'/swagger-ui/',
|
|
53
|
+
'/api-docs',
|
|
54
|
+
'/api-docs/',
|
|
55
|
+
'/v3/api-docs',
|
|
56
|
+
'/v3/api-docs/',
|
|
57
|
+
'/v3/api-docs.yaml',
|
|
58
|
+
'/v2/api-docs',
|
|
59
|
+
'/v2/api-docs.yaml',
|
|
60
|
+
'/swagger.json',
|
|
61
|
+
'/swagger/v1/swagger.json',
|
|
62
|
+
'/swagger/v2/swagger.json',
|
|
63
|
+
'/openapi.json',
|
|
64
|
+
'/openapi.yaml',
|
|
65
|
+
'/openapi/3.0.yaml',
|
|
66
|
+
'/api/openapi.json',
|
|
67
|
+
'/api/swagger.json',
|
|
68
|
+
'/doc.json',
|
|
69
|
+
'/api-doc',
|
|
70
|
+
'/swagger-doc',
|
|
71
|
+
'/api.html',
|
|
72
|
+
'/dev/api-doc',
|
|
73
|
+
'/qa/api-doc',
|
|
74
|
+
'/test/api-doc',
|
|
75
|
+
]
|
|
76
|
+
|
|
77
|
+
def __init__(self, session: requests.Session = None):
|
|
78
|
+
self.session = session or requests.Session()
|
|
79
|
+
self.discovered: List[Dict] = []
|
|
80
|
+
|
|
81
|
+
def discover(self, base_url: str, api_parent_paths: List[str] = None) -> List[Dict]:
|
|
82
|
+
"""发现 Swagger 文档
|
|
83
|
+
|
|
84
|
+
Args:
|
|
85
|
+
base_url: 目标基础 URL
|
|
86
|
+
api_parent_paths: 已发现的 API 父路径列表 (用于精确探测)
|
|
87
|
+
"""
|
|
88
|
+
results = []
|
|
89
|
+
checked_urls = set()
|
|
90
|
+
|
|
91
|
+
# 1. 首先在 API 父路径后探测 (更高效)
|
|
92
|
+
if api_parent_paths:
|
|
93
|
+
for parent in api_parent_paths[:15]:
|
|
94
|
+
for swagger_path in self.COMMON_SWAGGER_PATHS:
|
|
95
|
+
url = base_url.rstrip('/') + parent + swagger_path
|
|
96
|
+
if url in checked_urls:
|
|
97
|
+
continue
|
|
98
|
+
checked_urls.add(url)
|
|
99
|
+
|
|
100
|
+
result = self._check_swagger_url(url)
|
|
101
|
+
if result:
|
|
102
|
+
results.append(result)
|
|
103
|
+
|
|
104
|
+
# 2. 然后在根路径探测
|
|
105
|
+
for swagger_path in self.COMMON_SWAGGER_PATHS:
|
|
106
|
+
url = base_url.rstrip('/') + swagger_path
|
|
107
|
+
if url in checked_urls:
|
|
108
|
+
continue
|
|
109
|
+
checked_urls.add(url)
|
|
110
|
+
|
|
111
|
+
result = self._check_swagger_url(url)
|
|
112
|
+
if result:
|
|
113
|
+
results.append(result)
|
|
114
|
+
|
|
115
|
+
self.discovered = results
|
|
116
|
+
return results
|
|
117
|
+
|
|
118
|
+
def _check_swagger_url(self, url: str) -> Optional[Dict]:
|
|
119
|
+
"""检查单个 URL 是否为 Swagger 文档"""
|
|
120
|
+
try:
|
|
121
|
+
resp = self.session.get(url, timeout=5, allow_redirects=True)
|
|
122
|
+
|
|
123
|
+
if resp.status_code == 200:
|
|
124
|
+
content_type = resp.headers.get('Content-Type', '')
|
|
125
|
+
|
|
126
|
+
if 'json' in content_type or url.endswith(('.json', '.yaml', '.yml')):
|
|
127
|
+
return {
|
|
128
|
+
'url': url,
|
|
129
|
+
'type': 'openapi',
|
|
130
|
+
'status': 200,
|
|
131
|
+
'is_json': True
|
|
132
|
+
}
|
|
133
|
+
elif 'html' in content_type and 'swagger' in resp.text.lower():
|
|
134
|
+
return {
|
|
135
|
+
'url': url,
|
|
136
|
+
'type': 'swagger-ui',
|
|
137
|
+
'status': 200,
|
|
138
|
+
'is_json': False
|
|
139
|
+
}
|
|
140
|
+
elif 'html' in content_type:
|
|
141
|
+
return {
|
|
142
|
+
'url': url,
|
|
143
|
+
'type': 'spa-fallback',
|
|
144
|
+
'status': 200,
|
|
145
|
+
'is_json': False
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
elif resp.status_code in [401, 403]:
|
|
149
|
+
if 'swagger' in resp.text.lower() or 'openapi' in resp.text.lower():
|
|
150
|
+
return {
|
|
151
|
+
'url': url,
|
|
152
|
+
'type': 'swagger-protected',
|
|
153
|
+
'status': resp.status_code
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
except Exception:
|
|
157
|
+
pass
|
|
158
|
+
|
|
159
|
+
return None
|
|
160
|
+
|
|
161
|
+
for path in self.COMMON_SWAGGER_PATHS:
|
|
162
|
+
url = base_url.rstrip('/') + path
|
|
163
|
+
|
|
164
|
+
try:
|
|
165
|
+
resp = self.session.get(url, timeout=5, allow_redirects=True)
|
|
166
|
+
|
|
167
|
+
if resp.status_code == 200:
|
|
168
|
+
content_type = resp.headers.get('Content-Type', '')
|
|
169
|
+
|
|
170
|
+
if 'json' in content_type or path.endswith(('.json', '.yaml', '.yml')):
|
|
171
|
+
results.append({
|
|
172
|
+
'url': url,
|
|
173
|
+
'type': 'openapi',
|
|
174
|
+
'status': 200
|
|
175
|
+
})
|
|
176
|
+
elif 'html' in content_type and 'swagger' in resp.text.lower():
|
|
177
|
+
results.append({
|
|
178
|
+
'url': url,
|
|
179
|
+
'type': 'swagger-ui',
|
|
180
|
+
'status': 200
|
|
181
|
+
})
|
|
182
|
+
|
|
183
|
+
elif resp.status_code == 401 or resp.status_code == 403:
|
|
184
|
+
if 'swagger' in resp.text.lower() or 'openapi' in resp.text.lower():
|
|
185
|
+
results.append({
|
|
186
|
+
'url': url,
|
|
187
|
+
'type': 'swagger-protected',
|
|
188
|
+
'status': resp.status_code
|
|
189
|
+
})
|
|
190
|
+
|
|
191
|
+
except Exception:
|
|
192
|
+
pass
|
|
193
|
+
|
|
194
|
+
self.discovered = results
|
|
195
|
+
return results
|
|
196
|
+
|
|
197
|
+
def parse_swagger(self, url: str) -> Optional[Dict]:
|
|
198
|
+
"""解析 Swagger/OpenAPI 文档"""
|
|
199
|
+
try:
|
|
200
|
+
resp = self.session.get(url, timeout=10)
|
|
201
|
+
|
|
202
|
+
if resp.status_code == 200:
|
|
203
|
+
content = resp.text
|
|
204
|
+
|
|
205
|
+
if content.strip().startswith('{'):
|
|
206
|
+
import json
|
|
207
|
+
spec = json.loads(content)
|
|
208
|
+
else:
|
|
209
|
+
return None
|
|
210
|
+
|
|
211
|
+
info = spec.get('info', {})
|
|
212
|
+
paths = spec.get('paths', {})
|
|
213
|
+
|
|
214
|
+
endpoints = []
|
|
215
|
+
for path, methods in paths.items():
|
|
216
|
+
for method, details in methods.items():
|
|
217
|
+
if method.upper() in ['GET', 'POST', 'PUT', 'DELETE', 'PATCH']:
|
|
218
|
+
endpoints.append({
|
|
219
|
+
'path': path,
|
|
220
|
+
'method': method.upper(),
|
|
221
|
+
'summary': details.get('summary', ''),
|
|
222
|
+
'parameters': details.get('parameters', []),
|
|
223
|
+
})
|
|
224
|
+
|
|
225
|
+
return {
|
|
226
|
+
'title': info.get('title', ''),
|
|
227
|
+
'version': info.get('version', ''),
|
|
228
|
+
'endpoints': endpoints,
|
|
229
|
+
'total': len(endpoints)
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
except Exception:
|
|
233
|
+
pass
|
|
234
|
+
|
|
235
|
+
return None
|
|
236
|
+
|
|
237
|
+
|
|
238
|
+
class WebSocketDiscoverer:
|
|
239
|
+
"""WebSocket 发现"""
|
|
240
|
+
|
|
241
|
+
def __init__(self, session: requests.Session = None):
|
|
242
|
+
self.session = session
|
|
243
|
+
|
|
244
|
+
def discover_from_js(self, js_content: str, base_url: str) -> List[str]:
|
|
245
|
+
"""从 JS 中发现 WebSocket URL"""
|
|
246
|
+
ws_urls = []
|
|
247
|
+
|
|
248
|
+
patterns = [
|
|
249
|
+
r'new\s+WebSocket\s*\(\s*["\']([^"\']+)["\']',
|
|
250
|
+
r'wss?://[^\s"\'<>]+',
|
|
251
|
+
r'["\'](\/ws[s]?[^"\']+)["\']',
|
|
252
|
+
r'websocket\s*:\s*["\']([^"\']+)["\']',
|
|
253
|
+
r'SocketIO\s*\(\s*["\']([^"\']+)["\']',
|
|
254
|
+
r'socket\s*:\s*["\']([^"\']+)["\']',
|
|
255
|
+
]
|
|
256
|
+
|
|
257
|
+
for pattern in patterns:
|
|
258
|
+
matches = re.findall(pattern, js_content, re.IGNORECASE)
|
|
259
|
+
for match in matches:
|
|
260
|
+
if isinstance(match, str) and match:
|
|
261
|
+
ws_urls.append(match)
|
|
262
|
+
|
|
263
|
+
return list(set(ws_urls))
|
|
264
|
+
|
|
265
|
+
def discover_from_headers(self, base_url: str) -> List[str]:
|
|
266
|
+
"""从 HTTP 头发现 WebSocket"""
|
|
267
|
+
ws_hints = []
|
|
268
|
+
|
|
269
|
+
try:
|
|
270
|
+
resp = self.session.get(base_url, timeout=5)
|
|
271
|
+
|
|
272
|
+
upgrade = resp.headers.get('Upgrade', '')
|
|
273
|
+
if upgrade and 'websocket' in upgrade.lower():
|
|
274
|
+
ws_hints.append('websocket-supported')
|
|
275
|
+
|
|
276
|
+
sec_websocket = resp.headers.get('Sec-WebSocket-Extensions', '')
|
|
277
|
+
if sec_websocket:
|
|
278
|
+
ws_hints.append(sec_websocket)
|
|
279
|
+
|
|
280
|
+
except:
|
|
281
|
+
pass
|
|
282
|
+
|
|
283
|
+
return ws_hints
|
|
284
|
+
|
|
285
|
+
|
|
286
|
+
class TechFingerprinter:
|
|
287
|
+
"""技术栈指纹识别"""
|
|
288
|
+
|
|
289
|
+
FINGERPRINTS = {
|
|
290
|
+
'frontend': {
|
|
291
|
+
'Vue.js': [r'vue(@|/)', r'chunk-vendors', r'__VUE__', r'Vue\.js', r'create-vue'],
|
|
292
|
+
'React': [r'react(@|/)', r'__REACT__', r'create-react-app', r'nextjs'],
|
|
293
|
+
'Angular': [r'@angular', r'ng-version', r'Angular', r'zone\.js'],
|
|
294
|
+
'jQuery': [r'jquery', r'\.jquery', r'jQuery'],
|
|
295
|
+
'Bootstrap': [r'bootstrap', r'bootstrap\.js'],
|
|
296
|
+
},
|
|
297
|
+
'backend': {
|
|
298
|
+
'Spring': [r'Spring', r'springframework', r'jvm'],
|
|
299
|
+
'Django': [r'django', r'CSRF_COOKIE', r'csrftoken'],
|
|
300
|
+
'Flask': [r'flask', r'Werkzeug'],
|
|
301
|
+
'Express': [r'express', r'Node\.js'],
|
|
302
|
+
'FastAPI': [r'fastapi', r'Swagger/FastAPI'],
|
|
303
|
+
'Laravel': [r'laravel', r'laravel_session'],
|
|
304
|
+
'Tomcat': [r'Apache-Coyote', r'tomcat'],
|
|
305
|
+
'Nginx': [r'nginx', r'nginx/'],
|
|
306
|
+
'Apache': [r'apache', r'Apache/'],
|
|
307
|
+
},
|
|
308
|
+
'api': {
|
|
309
|
+
'GraphQL': [r'graphql', r'__schema', r'GraphQL'],
|
|
310
|
+
'REST': [r'/api/', r'/v1/', r'/v2/'],
|
|
311
|
+
'gRPC': [r'grpc', r'protocolbuffers'],
|
|
312
|
+
'WebSocket': [r'websocket', r'SocketIO'],
|
|
313
|
+
'Socket.IO': [r'socket\.io', r'SocketIO'],
|
|
314
|
+
}
|
|
315
|
+
}
|
|
316
|
+
|
|
317
|
+
def __init__(self, session: requests.Session = None):
|
|
318
|
+
self.session = session
|
|
319
|
+
|
|
320
|
+
def fingerprint_from_response(self, url: str) -> Dict[str, Set[str]]:
|
|
321
|
+
"""从响应中识别技术栈"""
|
|
322
|
+
result = {
|
|
323
|
+
'frontend': set(),
|
|
324
|
+
'backend': set(),
|
|
325
|
+
'api': set()
|
|
326
|
+
}
|
|
327
|
+
|
|
328
|
+
try:
|
|
329
|
+
resp = self.session.get(url, timeout=5)
|
|
330
|
+
content = resp.text
|
|
331
|
+
headers = dict(resp.headers)
|
|
332
|
+
header_str = str(headers).lower()
|
|
333
|
+
|
|
334
|
+
for category, patterns in self.FINGERPRINTS.items():
|
|
335
|
+
for tech, pattern_list in patterns.items():
|
|
336
|
+
for pattern in pattern_list:
|
|
337
|
+
if re.search(pattern, content, re.IGNORECASE) or re.search(pattern, header_str, re.IGNORECASE):
|
|
338
|
+
result[category].add(tech)
|
|
339
|
+
|
|
340
|
+
except Exception:
|
|
341
|
+
pass
|
|
342
|
+
|
|
343
|
+
return {k: list(v) for k, v in result.items()}
|
|
344
|
+
|
|
345
|
+
def fingerprint_from_js(self, js_content: str) -> Dict[str, Set[str]]:
|
|
346
|
+
"""从 JS 中识别技术栈"""
|
|
347
|
+
result = {
|
|
348
|
+
'frontend': set(),
|
|
349
|
+
'backend': set(),
|
|
350
|
+
'api': set()
|
|
351
|
+
}
|
|
352
|
+
|
|
353
|
+
for category, patterns in self.FINGERPRINTS.items():
|
|
354
|
+
for tech, pattern_list in patterns.items():
|
|
355
|
+
for pattern in pattern_list:
|
|
356
|
+
if re.search(pattern, js_content, re.IGNORECASE):
|
|
357
|
+
result[category].add(tech)
|
|
358
|
+
|
|
359
|
+
return {k: list(v) for k, v in result.items()}
|
|
360
|
+
|
|
361
|
+
|
|
362
|
+
class ResponseDifferentialAnalyzer:
|
|
363
|
+
"""响应差异分析器"""
|
|
364
|
+
|
|
365
|
+
def __init__(self, session: requests.Session = None):
|
|
366
|
+
self.session = session
|
|
367
|
+
self.baseline_response = None
|
|
368
|
+
self.baseline_hash = ""
|
|
369
|
+
|
|
370
|
+
def set_baseline(self, url: str):
|
|
371
|
+
"""设置基线响应"""
|
|
372
|
+
try:
|
|
373
|
+
resp = self.session.get(url, timeout=5)
|
|
374
|
+
self.baseline_response = resp
|
|
375
|
+
import hashlib
|
|
376
|
+
self.baseline_hash = hashlib.md5(resp.content).hexdigest()
|
|
377
|
+
except Exception:
|
|
378
|
+
pass
|
|
379
|
+
|
|
380
|
+
def analyze(self, url: str, params: Dict = None) -> Dict:
|
|
381
|
+
"""分析响应差异"""
|
|
382
|
+
result = {
|
|
383
|
+
'is_different': False,
|
|
384
|
+
'status_changed': False,
|
|
385
|
+
'length_diff': 0,
|
|
386
|
+
'content_hash': '',
|
|
387
|
+
'interesting': False,
|
|
388
|
+
'reason': ''
|
|
389
|
+
}
|
|
390
|
+
|
|
391
|
+
try:
|
|
392
|
+
if params:
|
|
393
|
+
resp = self.session.post(url, json=params, timeout=5)
|
|
394
|
+
else:
|
|
395
|
+
resp = self.session.get(url, timeout=5)
|
|
396
|
+
|
|
397
|
+
import hashlib
|
|
398
|
+
content_hash = hashlib.md5(resp.content).hexdigest()
|
|
399
|
+
result['content_hash'] = content_hash
|
|
400
|
+
|
|
401
|
+
if content_hash != self.baseline_hash:
|
|
402
|
+
result['is_different'] = True
|
|
403
|
+
result['length_diff'] = len(resp.content) - len(self.baseline_response.content)
|
|
404
|
+
|
|
405
|
+
if resp.status_code != self.baseline_response.status_code:
|
|
406
|
+
result['status_changed'] = True
|
|
407
|
+
|
|
408
|
+
if result['is_different'] and abs(result['length_diff']) > 100:
|
|
409
|
+
result['interesting'] = True
|
|
410
|
+
result['reason'] = 'Significant content difference'
|
|
411
|
+
|
|
412
|
+
if result['status_changed']:
|
|
413
|
+
result['interesting'] = True
|
|
414
|
+
result['reason'] = f'Status changed: {self.baseline_response.status_code} -> {resp.status_code}'
|
|
415
|
+
|
|
416
|
+
except Exception:
|
|
417
|
+
pass
|
|
418
|
+
|
|
419
|
+
return result
|
|
420
|
+
|
|
421
|
+
|
|
422
|
+
class SubdomainEnumerator:
|
|
423
|
+
"""子域名枚举"""
|
|
424
|
+
|
|
425
|
+
COMMON_SUBDOMAINS = [
|
|
426
|
+
'api', 'api1', 'api2', 'dev', 'test', 'staging', 'prod',
|
|
427
|
+
'admin', 'adm', 'manage', 'dashboard',
|
|
428
|
+
'auth', 'login', 'sso', 'oauth',
|
|
429
|
+
'cdn', 'static', 'assets', 'img', 'images',
|
|
430
|
+
'mail', 'smtp', 'pop', 'imap',
|
|
431
|
+
'ftp', 'sftp', 'ssh', 'vpn',
|
|
432
|
+
'git', 'svn', 'ci', 'cd', 'jenkins',
|
|
433
|
+
'db', 'database', 'mysql', 'postgres', 'mongo',
|
|
434
|
+
'redis', 'memcache', 'cache',
|
|
435
|
+
'search', 'elasticsearch', 'solr',
|
|
436
|
+
'queue', 'kafka', 'rabbitmq',
|
|
437
|
+
'k8s', 'kubernetes', 'docker', 'registry',
|
|
438
|
+
'backup', 'backup1', 'backups',
|
|
439
|
+
'office', 'corp', 'internal', 'intranet',
|
|
440
|
+
'mobile', 'm', 'app',
|
|
441
|
+
'docs', 'doc', 'wiki',
|
|
442
|
+
'status', 'monitor', 'health',
|
|
443
|
+
]
|
|
444
|
+
|
|
445
|
+
def __init__(self, session: requests.Session = None):
|
|
446
|
+
self.session = session
|
|
447
|
+
|
|
448
|
+
def enumerate(self, domain: str, check_availability: bool = True) -> List[str]:
|
|
449
|
+
"""枚举子域名"""
|
|
450
|
+
subdomains = []
|
|
451
|
+
|
|
452
|
+
base_domain = self._extract_base_domain(domain)
|
|
453
|
+
|
|
454
|
+
for sub in self.COMMON_SUBDOMAINS:
|
|
455
|
+
subdomain = f"{sub}.{base_domain}"
|
|
456
|
+
|
|
457
|
+
if check_availability:
|
|
458
|
+
try:
|
|
459
|
+
resp = self.session.get(
|
|
460
|
+
f"http://{subdomain}",
|
|
461
|
+
timeout=3,
|
|
462
|
+
allow_redirects=True
|
|
463
|
+
)
|
|
464
|
+
if resp.status_code < 500:
|
|
465
|
+
subdomains.append(subdomain)
|
|
466
|
+
except Exception:
|
|
467
|
+
pass
|
|
468
|
+
else:
|
|
469
|
+
subdomains.append(subdomain)
|
|
470
|
+
|
|
471
|
+
return subdomains
|
|
472
|
+
|
|
473
|
+
def enumerate_via_dns(self, domain: str) -> List[str]:
|
|
474
|
+
"""通过 DNS 枚举子域名"""
|
|
475
|
+
subdomains = []
|
|
476
|
+
|
|
477
|
+
if not HAS_DNS:
|
|
478
|
+
return subdomains
|
|
479
|
+
|
|
480
|
+
try:
|
|
481
|
+
resolver = dns.resolver.Resolver()
|
|
482
|
+
resolver.timeout = 2
|
|
483
|
+
|
|
484
|
+
for sub in self.COMMON_SUBDOMAINS[:20]:
|
|
485
|
+
subdomain = f"{sub}.{domain}"
|
|
486
|
+
|
|
487
|
+
try:
|
|
488
|
+
answers = resolver.resolve(subdomain, 'A')
|
|
489
|
+
if answers:
|
|
490
|
+
for rdata in answers:
|
|
491
|
+
subdomains.append(f"{subdomain} -> {rdata}")
|
|
492
|
+
except:
|
|
493
|
+
pass
|
|
494
|
+
|
|
495
|
+
except Exception:
|
|
496
|
+
pass
|
|
497
|
+
|
|
498
|
+
return subdomains
|
|
499
|
+
|
|
500
|
+
def _extract_base_domain(self, domain: str) -> str:
|
|
501
|
+
"""提取主域名"""
|
|
502
|
+
parts = domain.split('.')
|
|
503
|
+
|
|
504
|
+
if len(parts) >= 2:
|
|
505
|
+
return '.'.join(parts[-2:])
|
|
506
|
+
|
|
507
|
+
return domain
|
|
508
|
+
|
|
509
|
+
|
|
510
|
+
class ErrorLeakAnalyzer:
|
|
511
|
+
"""错误信息泄露分析"""
|
|
512
|
+
|
|
513
|
+
ERROR_PATTERNS = {
|
|
514
|
+
'SQL Error': [
|
|
515
|
+
r'sql\s*syntax',
|
|
516
|
+
r'mysql.*error',
|
|
517
|
+
r'postgresql.*error',
|
|
518
|
+
r'microsoft.*sql',
|
|
519
|
+
r'ora-\d{5}',
|
|
520
|
+
r'sqlite.*error',
|
|
521
|
+
],
|
|
522
|
+
'Path Traversal': [
|
|
523
|
+
r'\.\.(\/|\\)',
|
|
524
|
+
r'path.*not.*found',
|
|
525
|
+
r'file.*not.*found',
|
|
526
|
+
r'cannot.*read',
|
|
527
|
+
],
|
|
528
|
+
'Command Injection': [
|
|
529
|
+
r'system\(\)',
|
|
530
|
+
r'exec\(\)',
|
|
531
|
+
r'shell_exec',
|
|
532
|
+
r'passthru',
|
|
533
|
+
r'popen',
|
|
534
|
+
],
|
|
535
|
+
'XXE': [
|
|
536
|
+
r'<!ENTITY',
|
|
537
|
+
r'<!DOCTYPE.*\[',
|
|
538
|
+
r'SimpleXMLElement',
|
|
539
|
+
],
|
|
540
|
+
'SSRF': [
|
|
541
|
+
r'url=',
|
|
542
|
+
r'fetch=',
|
|
543
|
+
r'request=',
|
|
544
|
+
r'endpoint=',
|
|
545
|
+
],
|
|
546
|
+
'IDOR': [
|
|
547
|
+
r'user.*not.*found',
|
|
548
|
+
r'access.*denied',
|
|
549
|
+
r'unauthorized',
|
|
550
|
+
r'forbidden',
|
|
551
|
+
],
|
|
552
|
+
'Information Disclosure': [
|
|
553
|
+
r'file://',
|
|
554
|
+
r'php://',
|
|
555
|
+
r'http://',
|
|
556
|
+
r'https://',
|
|
557
|
+
r'localhost',
|
|
558
|
+
r'127\.0\.0\.1',
|
|
559
|
+
r'/etc/passwd',
|
|
560
|
+
r'c:\\windows',
|
|
561
|
+
]
|
|
562
|
+
}
|
|
563
|
+
|
|
564
|
+
def __init__(self, session: requests.Session = None):
|
|
565
|
+
self.session = session
|
|
566
|
+
|
|
567
|
+
def analyze_response(self, url: str, response_text: str) -> List[Dict]:
|
|
568
|
+
"""分析响应中的错误泄露"""
|
|
569
|
+
leaks = []
|
|
570
|
+
|
|
571
|
+
for error_type, patterns in self.ERROR_PATTERNS.items():
|
|
572
|
+
for pattern in patterns:
|
|
573
|
+
matches = re.finditer(pattern, response_text, re.IGNORECASE)
|
|
574
|
+
for match in matches:
|
|
575
|
+
start = max(0, match.start() - 50)
|
|
576
|
+
end = min(len(response_text), match.end() + 50)
|
|
577
|
+
context = response_text[start:end]
|
|
578
|
+
|
|
579
|
+
leaks.append({
|
|
580
|
+
'type': error_type,
|
|
581
|
+
'pattern': pattern,
|
|
582
|
+
'context': context,
|
|
583
|
+
'url': url
|
|
584
|
+
})
|
|
585
|
+
|
|
586
|
+
return leaks
|
|
587
|
+
|
|
588
|
+
def fuzz_and_analyze(self, url: str, method: str = 'GET') -> List[Dict]:
|
|
589
|
+
"""Fuzz 并分析错误"""
|
|
590
|
+
leaks = []
|
|
591
|
+
|
|
592
|
+
fuzz_payloads = {
|
|
593
|
+
'sql': ["'", "1 OR 1=1", "1 AND 1=1", "1 UNION SELECT"],
|
|
594
|
+
'xss': ["<script>alert(1)</script>", "<img src=x onerror=alert(1)>"],
|
|
595
|
+
'path': ["../etc/passwd", "..\\..\\windows\\win.ini"],
|
|
596
|
+
'cmd': ["; ls", "| cat /etc/passwd", "&& whoami"],
|
|
597
|
+
}
|
|
598
|
+
|
|
599
|
+
for payload_type, payloads in fuzz_payloads.items():
|
|
600
|
+
for payload in payloads:
|
|
601
|
+
try:
|
|
602
|
+
if method == 'GET':
|
|
603
|
+
resp = self.session.get(url, params={'q': payload}, timeout=5)
|
|
604
|
+
else:
|
|
605
|
+
resp = self.session.post(url, data={'q': payload}, timeout=5)
|
|
606
|
+
|
|
607
|
+
leak = self.analyze_response(url, resp.text)
|
|
608
|
+
leaks.extend(leak)
|
|
609
|
+
|
|
610
|
+
except Exception:
|
|
611
|
+
pass
|
|
612
|
+
|
|
613
|
+
return leaks
|
|
614
|
+
|
|
615
|
+
|
|
616
|
+
class AdvancedRecon:
|
|
617
|
+
"""
|
|
618
|
+
高级侦察引擎
|
|
619
|
+
|
|
620
|
+
整合多种侦察方法,参考渗透测试工程师的完整流程:
|
|
621
|
+
"""
|
|
622
|
+
|
|
623
|
+
def __init__(self, session: requests.Session = None):
|
|
624
|
+
self.session = session or requests.Session()
|
|
625
|
+
|
|
626
|
+
self.swagger = SwaggerDiscoverer(session)
|
|
627
|
+
self.websocket = WebSocketDiscoverer(session)
|
|
628
|
+
self.fingerprinter = TechFingerprinter(session)
|
|
629
|
+
self.diff_analyzer = ResponseDifferentialAnalyzer(session)
|
|
630
|
+
self.subdomain_enum = SubdomainEnumerator(session)
|
|
631
|
+
self.error_analyzer = ErrorLeakAnalyzer(session)
|
|
632
|
+
|
|
633
|
+
self.result = ReconResult()
|
|
634
|
+
|
|
635
|
+
def run(self, target_url: str) -> ReconResult:
|
|
636
|
+
"""执行完整侦察流程"""
|
|
637
|
+
print("[*] Starting advanced reconnaissance...")
|
|
638
|
+
|
|
639
|
+
parsed = urlparse(target_url)
|
|
640
|
+
domain = parsed.netloc
|
|
641
|
+
|
|
642
|
+
print(f"[*] Discovering Swagger/OpenAPI...")
|
|
643
|
+
swagger_results = self.swagger.discover(target_url)
|
|
644
|
+
self.result.swagger_endpoints = [s['url'] for s in swagger_results]
|
|
645
|
+
|
|
646
|
+
print(f"[*] Fingerprinting tech stack...")
|
|
647
|
+
tech_stack = self.fingerprinter.fingerprint_from_response(target_url)
|
|
648
|
+
self.result.tech_stack = tech_stack
|
|
649
|
+
|
|
650
|
+
print(f"[*] Enumerating subdomains...")
|
|
651
|
+
subdomains = self.subdomain_enum.enumerate(domain, check_availability=False)
|
|
652
|
+
self.result.subdomains = set(subdomains)
|
|
653
|
+
|
|
654
|
+
print(f"[*] Discovering WebSocket endpoints...")
|
|
655
|
+
try:
|
|
656
|
+
resp = self.session.get(target_url, timeout=5)
|
|
657
|
+
ws_urls = self.websocket.discover_from_js(resp.text, target_url)
|
|
658
|
+
self.result.websocket_urls = ws_urls
|
|
659
|
+
except:
|
|
660
|
+
pass
|
|
661
|
+
|
|
662
|
+
print(f"[*] Analyzing API patterns...")
|
|
663
|
+
for endpoint in self._discover_api_patterns(target_url):
|
|
664
|
+
self.result.api_patterns.add(endpoint)
|
|
665
|
+
|
|
666
|
+
print(f"[*] Collecting interesting URLs...")
|
|
667
|
+
self.result.interesting_urls = self._collect_interesting_urls(target_url)
|
|
668
|
+
|
|
669
|
+
return self.result
|
|
670
|
+
|
|
671
|
+
def _discover_api_patterns(self, base_url: str) -> List[str]:
|
|
672
|
+
"""发现 API 模式"""
|
|
673
|
+
patterns = []
|
|
674
|
+
|
|
675
|
+
pattern_tests = [
|
|
676
|
+
'/api/v1',
|
|
677
|
+
'/api/v2',
|
|
678
|
+
'/api/v3',
|
|
679
|
+
'/rest',
|
|
680
|
+
'/graphql',
|
|
681
|
+
'/rpc',
|
|
682
|
+
]
|
|
683
|
+
|
|
684
|
+
for pattern in pattern_tests:
|
|
685
|
+
url = base_url.rstrip('/') + pattern
|
|
686
|
+
try:
|
|
687
|
+
resp = self.session.head(url, timeout=3, allow_redirects=False)
|
|
688
|
+
if resp.status_code < 500:
|
|
689
|
+
patterns.append(pattern)
|
|
690
|
+
except:
|
|
691
|
+
pass
|
|
692
|
+
|
|
693
|
+
return patterns
|
|
694
|
+
|
|
695
|
+
def _collect_interesting_urls(self, base_url: str) -> List[str]:
|
|
696
|
+
"""收集有趣的 URL"""
|
|
697
|
+
urls = []
|
|
698
|
+
|
|
699
|
+
interesting_paths = [
|
|
700
|
+
'/admin', '/login', '/register', '/password/reset',
|
|
701
|
+
'/api', '/api-docs', '/swagger', '/openapi',
|
|
702
|
+
'/.git/config', '/.env', '/config', '/settings',
|
|
703
|
+
'/debug', '/test', '/debug/pprof',
|
|
704
|
+
'/actuator', '/env', '/heapdump',
|
|
705
|
+
]
|
|
706
|
+
|
|
707
|
+
for path in interesting_paths:
|
|
708
|
+
url = base_url.rstrip('/') + path
|
|
709
|
+
try:
|
|
710
|
+
resp = self.session.get(url, timeout=3, allow_redirects=False)
|
|
711
|
+
if resp.status_code in [200, 401, 403, 500]:
|
|
712
|
+
urls.append(f"{path} -> {resp.status_code}")
|
|
713
|
+
except:
|
|
714
|
+
pass
|
|
715
|
+
|
|
716
|
+
return urls
|
|
717
|
+
|
|
718
|
+
|
|
719
|
+
def run_full_recon(target_url: str) -> Dict:
|
|
720
|
+
"""运行完整侦察"""
|
|
721
|
+
session = requests.Session()
|
|
722
|
+
session.headers.update({
|
|
723
|
+
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36'
|
|
724
|
+
})
|
|
725
|
+
|
|
726
|
+
recon = AdvancedRecon(session)
|
|
727
|
+
result = recon.run(target_url)
|
|
728
|
+
|
|
729
|
+
swagger_details = []
|
|
730
|
+
for url in result.swagger_endpoints:
|
|
731
|
+
details = recon.swagger.parse_swagger(url)
|
|
732
|
+
if details:
|
|
733
|
+
swagger_details.append(details)
|
|
734
|
+
|
|
735
|
+
return {
|
|
736
|
+
'target': target_url,
|
|
737
|
+
'tech_stack': result.tech_stack,
|
|
738
|
+
'swagger_endpoints': result.swagger_endpoints,
|
|
739
|
+
'swagger_details': swagger_details,
|
|
740
|
+
'websocket_urls': result.websocket_urls,
|
|
741
|
+
'subdomains': list(result.subdomains),
|
|
742
|
+
'api_patterns': list(result.api_patterns),
|
|
743
|
+
'interesting_urls': result.interesting_urls,
|
|
744
|
+
'error_leaks': result.error_leaks,
|
|
745
|
+
}
|
|
746
|
+
|
|
747
|
+
|
|
748
|
+
if __name__ == "__main__":
|
|
749
|
+
import sys
|
|
750
|
+
target = sys.argv[1] if len(sys.argv) > 1 else "http://49.65.100.160:6004"
|
|
751
|
+
|
|
752
|
+
result = run_full_recon(target)
|
|
753
|
+
|
|
754
|
+
print("\n" + "=" * 70)
|
|
755
|
+
print(" Advanced Recon Results")
|
|
756
|
+
print("=" * 70)
|
|
757
|
+
|
|
758
|
+
print(f"\n[*] Tech Stack:")
|
|
759
|
+
for category, techs in result['tech_stack'].items():
|
|
760
|
+
if techs:
|
|
761
|
+
print(f" {category}: {', '.join(techs)}")
|
|
762
|
+
|
|
763
|
+
print(f"\n[*] Swagger/OpenAPI: {len(result['swagger_endpoints'])}")
|
|
764
|
+
for ep in result['swagger_endpoints']:
|
|
765
|
+
print(f" - {ep}")
|
|
766
|
+
|
|
767
|
+
print(f"\n[*] WebSocket: {len(result['websocket_urls'])}")
|
|
768
|
+
for ws in result['websocket_urls'][:5]:
|
|
769
|
+
print(f" - {ws}")
|
|
770
|
+
|
|
771
|
+
print(f"\n[*] Subdomains: {len(result['subdomains'])}")
|
|
772
|
+
for sub in list(result['subdomains'])[:10]:
|
|
773
|
+
print(f" - {sub}")
|
|
774
|
+
|
|
775
|
+
print(f"\n[*] API Patterns: {len(result['api_patterns'])}")
|
|
776
|
+
for pattern in result['api_patterns']:
|
|
777
|
+
print(f" - {pattern}")
|
|
778
|
+
|
|
779
|
+
print(f"\n[*] Interesting URLs: {len(result['interesting_urls'])}")
|
|
780
|
+
for url in result['interesting_urls'][:10]:
|
|
781
|
+
print(f" - {url}")
|
|
782
|
+
|
|
783
|
+
if result['swagger_details']:
|
|
784
|
+
print(f"\n[*] Swagger Details:")
|
|
785
|
+
for details in result['swagger_details']:
|
|
786
|
+
print(f" Title: {details.get('title', 'N/A')}")
|
|
787
|
+
print(f" Version: {details.get('version', 'N/A')}")
|
|
788
|
+
print(f" Endpoints: {details.get('total', 0)}")
|