opencode-api-security-testing 3.0.10 → 3.0.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +74 -0
- package/SKILL.md +1797 -0
- package/core/advanced_recon.py +788 -0
- package/core/agentic_analyzer.py +445 -0
- package/core/analyzers/api_parser.py +210 -0
- package/core/analyzers/response_analyzer.py +212 -0
- package/core/analyzers/sensitive_finder.py +184 -0
- package/core/api_fuzzer.py +422 -0
- package/core/api_interceptor.py +525 -0
- package/core/api_parser.py +955 -0
- package/core/browser_tester.py +479 -0
- package/core/cloud_storage_tester.py +1330 -0
- package/core/collectors/__init__.py +23 -0
- package/core/collectors/api_path_finder.py +300 -0
- package/core/collectors/browser_collect.py +645 -0
- package/core/collectors/browser_collector.py +411 -0
- package/core/collectors/http_client.py +111 -0
- package/core/collectors/js_collector.py +490 -0
- package/core/collectors/js_parser.py +780 -0
- package/core/collectors/url_collector.py +319 -0
- package/core/context_manager.py +682 -0
- package/core/deep_api_tester_v35.py +844 -0
- package/core/deep_api_tester_v55.py +366 -0
- package/core/dynamic_api_analyzer.py +532 -0
- package/core/http_client.py +179 -0
- package/core/models.py +296 -0
- package/core/orchestrator.py +890 -0
- package/core/prerequisite.py +227 -0
- package/core/reasoning_engine.py +1042 -0
- package/core/response_classifier.py +606 -0
- package/core/runner.py +938 -0
- package/core/scan_engine.py +599 -0
- package/core/skill_executor.py +435 -0
- package/core/skill_executor_v2.py +670 -0
- package/core/skill_executor_v3.py +704 -0
- package/core/smart_analyzer.py +687 -0
- package/core/strategy_pool.py +707 -0
- package/core/testers/auth_tester.py +264 -0
- package/core/testers/idor_tester.py +200 -0
- package/core/testers/sqli_tester.py +211 -0
- package/core/testing_loop.py +655 -0
- package/core/utils/base_path_dict.py +255 -0
- package/core/utils/payload_lib.py +167 -0
- package/core/utils/ssrf_detector.py +220 -0
- package/core/verifiers/vuln_verifier.py +536 -0
- package/package.json +1 -1
- package/references/README.md +72 -0
- package/references/asset-discovery.md +119 -0
- package/references/fuzzing-patterns.md +129 -0
- package/references/graphql-guidance.md +108 -0
- package/references/intake.md +84 -0
- package/references/pua-agent.md +192 -0
- package/references/report-template.md +156 -0
- package/references/rest-guidance.md +76 -0
- package/references/severity-model.md +76 -0
- package/references/test-matrix.md +86 -0
- package/references/validation.md +78 -0
- package/references/vulnerabilities/01-sqli-tests.md +1128 -0
- package/references/vulnerabilities/02-user-enum-tests.md +423 -0
- package/references/vulnerabilities/03-jwt-tests.md +499 -0
- package/references/vulnerabilities/04-idor-tests.md +362 -0
- package/references/vulnerabilities/05-sensitive-data-tests.md +466 -0
- package/references/vulnerabilities/06-biz-logic-tests.md +501 -0
- package/references/vulnerabilities/07-security-config-tests.md +511 -0
- package/references/vulnerabilities/08-brute-force-tests.md +457 -0
- package/references/vulnerabilities/09-vulnerability-chains.md +465 -0
- package/references/vulnerabilities/10-auth-tests.md +537 -0
- package/references/vulnerabilities/11-graphql-tests.md +355 -0
- package/references/vulnerabilities/12-ssrf-tests.md +396 -0
- package/references/vulnerabilities/README.md +148 -0
- package/references/workflows.md +192 -0
|
@@ -0,0 +1,411 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Browser Collector - 浏览器动态采集器
|
|
4
|
+
使用无头浏览器采集动态渲染的 JS、API 请求等
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import time
|
|
8
|
+
import json
|
|
9
|
+
from typing import Dict, List, Set, Optional, Callable
|
|
10
|
+
from dataclasses import dataclass, field
|
|
11
|
+
from urllib.parse import urljoin, urlparse
|
|
12
|
+
|
|
13
|
+
try:
|
|
14
|
+
from playwright.sync_api import sync_playwright, Page, Browser, BrowserContext
|
|
15
|
+
HAS_PLAYWRIGHT = True
|
|
16
|
+
except ImportError:
|
|
17
|
+
HAS_PLAYWRIGHT = False
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
@dataclass
|
|
21
|
+
class BrowserResource:
|
|
22
|
+
"""浏览器资源"""
|
|
23
|
+
url: str
|
|
24
|
+
resource_type: str
|
|
25
|
+
method: str = "GET"
|
|
26
|
+
request_headers: Dict = field(default_factory=dict)
|
|
27
|
+
response_headers: Dict = field(default_factory=dict)
|
|
28
|
+
post_data: Optional[str] = None
|
|
29
|
+
response_body: Optional[str] = None
|
|
30
|
+
status_code: int = 0
|
|
31
|
+
content_length: int = 0
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
@dataclass
|
|
35
|
+
class BrowserCollectionResult:
|
|
36
|
+
"""浏览器采集结果"""
|
|
37
|
+
js_urls: List[str] = field(default_factory=list)
|
|
38
|
+
api_requests: List[BrowserResource] = field(default_factory=list)
|
|
39
|
+
static_resources: List[BrowserResource] = field(default_factory=list)
|
|
40
|
+
websocket_connections: List[str] = field(default_factory=list)
|
|
41
|
+
xhr_requests: List[BrowserResource] = field(default_factory=list)
|
|
42
|
+
fetched_urls: Set[str] = field(default_factory=set)
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class HeadlessBrowserCollector:
|
|
46
|
+
"""
|
|
47
|
+
无头浏览器采集器
|
|
48
|
+
|
|
49
|
+
功能:
|
|
50
|
+
- 动态 JS 采集
|
|
51
|
+
- API 请求捕获 (XHR/Fetch)
|
|
52
|
+
- WebSocket 连接捕获
|
|
53
|
+
- 静态资源采集
|
|
54
|
+
- 页面截图
|
|
55
|
+
- 控制台日志捕获
|
|
56
|
+
"""
|
|
57
|
+
|
|
58
|
+
def __init__(self, headless: bool = True, timeout: int = 30000):
|
|
59
|
+
self.headless = headless
|
|
60
|
+
self.timeout = timeout
|
|
61
|
+
self.browser: Optional[Browser] = None
|
|
62
|
+
self.context: Optional[BrowserContext] = None
|
|
63
|
+
self.page = None
|
|
64
|
+
self.resources: List[BrowserResource] = []
|
|
65
|
+
self.api_requests: List[BrowserResource] = []
|
|
66
|
+
self.ws_connections: List[str] = []
|
|
67
|
+
self.console_logs: List[str] = []
|
|
68
|
+
self.js_urls: Set[str] = set()
|
|
69
|
+
|
|
70
|
+
def start(self) -> bool:
|
|
71
|
+
"""启动浏览器"""
|
|
72
|
+
if not HAS_PLAYWRIGHT:
|
|
73
|
+
print("[!] Playwright not installed. Run: pip install playwright")
|
|
74
|
+
return False
|
|
75
|
+
|
|
76
|
+
try:
|
|
77
|
+
self.playwright = sync_playwright().start()
|
|
78
|
+
self.browser = self.playwright.chromium.launch(
|
|
79
|
+
headless=self.headless,
|
|
80
|
+
args=[
|
|
81
|
+
'--no-sandbox',
|
|
82
|
+
'--disable-dev-shm-usage',
|
|
83
|
+
'--disable-blink-features=AutomationControlled'
|
|
84
|
+
]
|
|
85
|
+
)
|
|
86
|
+
self.context = self.browser.new_context(
|
|
87
|
+
ignore_https_errors=True,
|
|
88
|
+
user_agent='Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36'
|
|
89
|
+
)
|
|
90
|
+
self.page = self.context.new_page()
|
|
91
|
+
self._setup_listeners()
|
|
92
|
+
return True
|
|
93
|
+
except Exception as e:
|
|
94
|
+
print(f"[!] Browser start failed: {e}")
|
|
95
|
+
return False
|
|
96
|
+
|
|
97
|
+
def _setup_listeners(self):
|
|
98
|
+
"""设置请求监听"""
|
|
99
|
+
if not self.page:
|
|
100
|
+
return
|
|
101
|
+
|
|
102
|
+
def on_request(request):
|
|
103
|
+
resource = BrowserResource(
|
|
104
|
+
url=request.url,
|
|
105
|
+
resource_type=request.resource_type,
|
|
106
|
+
method=request.method,
|
|
107
|
+
request_headers=dict(request.headers),
|
|
108
|
+
post_data=request.post_data
|
|
109
|
+
)
|
|
110
|
+
self.resources.append(resource)
|
|
111
|
+
|
|
112
|
+
if request.resource_type == 'script':
|
|
113
|
+
self.js_urls.add(request.url)
|
|
114
|
+
|
|
115
|
+
if request.resource_type in ['xhr', 'fetch']:
|
|
116
|
+
self.api_requests.append(resource)
|
|
117
|
+
|
|
118
|
+
def on_response(response):
|
|
119
|
+
for resource in self.resources:
|
|
120
|
+
if resource.url == response.url:
|
|
121
|
+
resource.status_code = response.status
|
|
122
|
+
resource.response_headers = dict(response.headers)
|
|
123
|
+
try:
|
|
124
|
+
resource.content_length = len(response.body)
|
|
125
|
+
except:
|
|
126
|
+
pass
|
|
127
|
+
|
|
128
|
+
def on_websocket(ws):
|
|
129
|
+
self.ws_connections.append(ws.url)
|
|
130
|
+
|
|
131
|
+
def on_console(msg):
|
|
132
|
+
self.console_logs.append(f"[{msg.type}] {msg.text}")
|
|
133
|
+
|
|
134
|
+
self.page.on("request", on_request)
|
|
135
|
+
self.page.on("response", on_response)
|
|
136
|
+
self.page.on("websocket", on_websocket)
|
|
137
|
+
self.page.on("console", on_console)
|
|
138
|
+
|
|
139
|
+
def navigate(self, url: str, wait_for_selector: Optional[str] = None, delay: int = 0) -> bool:
|
|
140
|
+
"""导航到 URL"""
|
|
141
|
+
if not self.page:
|
|
142
|
+
return False
|
|
143
|
+
|
|
144
|
+
try:
|
|
145
|
+
self.page.goto(url, timeout=self.timeout)
|
|
146
|
+
|
|
147
|
+
if delay > 0:
|
|
148
|
+
time.sleep(delay)
|
|
149
|
+
|
|
150
|
+
if wait_for_selector:
|
|
151
|
+
try:
|
|
152
|
+
self.page.wait_for_selector(wait_for_selector, timeout=self.timeout)
|
|
153
|
+
except:
|
|
154
|
+
pass
|
|
155
|
+
else:
|
|
156
|
+
self.page.wait_for_load_state("networkidle", timeout=self.timeout)
|
|
157
|
+
|
|
158
|
+
return True
|
|
159
|
+
except Exception as e:
|
|
160
|
+
print(f"[!] Navigation failed: {e}")
|
|
161
|
+
return False
|
|
162
|
+
|
|
163
|
+
def click_and_intercept(self, selector: str, intercept_api: bool = True) -> bool:
|
|
164
|
+
"""点击元素并拦截 API 请求"""
|
|
165
|
+
if not self.page:
|
|
166
|
+
return False
|
|
167
|
+
|
|
168
|
+
try:
|
|
169
|
+
self.page.click(selector)
|
|
170
|
+
time.sleep(1)
|
|
171
|
+
return True
|
|
172
|
+
except Exception as e:
|
|
173
|
+
print(f"[!] Click failed: {e}")
|
|
174
|
+
return False
|
|
175
|
+
|
|
176
|
+
def fill_form_and_submit(self, form_data: Dict[str, str], submit_selector: str = "button[type='submit']") -> bool:
|
|
177
|
+
"""填写表单并提交"""
|
|
178
|
+
if not self.page:
|
|
179
|
+
return False
|
|
180
|
+
|
|
181
|
+
try:
|
|
182
|
+
for field_name, value in form_data.items():
|
|
183
|
+
try:
|
|
184
|
+
self.page.fill(f"input[name='{field_name}']", value)
|
|
185
|
+
except:
|
|
186
|
+
try:
|
|
187
|
+
self.page.fill(f"textarea[name='{field_name}']", value)
|
|
188
|
+
except:
|
|
189
|
+
pass
|
|
190
|
+
|
|
191
|
+
time.sleep(0.5)
|
|
192
|
+
self.page.click(submit_selector)
|
|
193
|
+
time.sleep(2)
|
|
194
|
+
return True
|
|
195
|
+
except Exception as e:
|
|
196
|
+
print(f"[!] Form submit failed: {e}")
|
|
197
|
+
return False
|
|
198
|
+
|
|
199
|
+
def execute_script(self, script: str) -> any:
|
|
200
|
+
"""执行 JavaScript"""
|
|
201
|
+
if not self.page:
|
|
202
|
+
return None
|
|
203
|
+
|
|
204
|
+
try:
|
|
205
|
+
return self.page.evaluate(script)
|
|
206
|
+
except Exception as e:
|
|
207
|
+
print(f"[!] Script execution failed: {e}")
|
|
208
|
+
return None
|
|
209
|
+
|
|
210
|
+
def get_dynamic_js_urls(self) -> List[str]:
|
|
211
|
+
"""获取动态加载的 JS URL"""
|
|
212
|
+
return list(self.js_urls)
|
|
213
|
+
|
|
214
|
+
def get_api_requests(self) -> List[BrowserResource]:
|
|
215
|
+
"""获取 API 请求"""
|
|
216
|
+
return self.api_requests
|
|
217
|
+
|
|
218
|
+
def get_websocket_connections(self) -> List[str]:
|
|
219
|
+
"""获取 WebSocket 连接"""
|
|
220
|
+
return self.ws_connections
|
|
221
|
+
|
|
222
|
+
def get_console_logs(self) -> List[str]:
|
|
223
|
+
"""获取控制台日志"""
|
|
224
|
+
return self.console_logs
|
|
225
|
+
|
|
226
|
+
def get_local_storage(self) -> Dict[str, str]:
|
|
227
|
+
"""获取 localStorage"""
|
|
228
|
+
if not self.page:
|
|
229
|
+
return {}
|
|
230
|
+
|
|
231
|
+
try:
|
|
232
|
+
return self.page.evaluate("() => JSON.stringify(localStorage)")
|
|
233
|
+
except:
|
|
234
|
+
return {}
|
|
235
|
+
|
|
236
|
+
def get_session_storage(self) -> Dict[str, str]:
|
|
237
|
+
"""获取 sessionStorage"""
|
|
238
|
+
if not self.page:
|
|
239
|
+
return {}
|
|
240
|
+
|
|
241
|
+
try:
|
|
242
|
+
return self.page.evaluate("() => JSON.stringify(sessionStorage)")
|
|
243
|
+
except:
|
|
244
|
+
return {}
|
|
245
|
+
|
|
246
|
+
def get_cookies(self) -> List[Dict]:
|
|
247
|
+
"""获取 Cookies"""
|
|
248
|
+
if not self.context:
|
|
249
|
+
return []
|
|
250
|
+
|
|
251
|
+
try:
|
|
252
|
+
return [dict(c) for c in self.context.cookies()]
|
|
253
|
+
except:
|
|
254
|
+
return []
|
|
255
|
+
|
|
256
|
+
def screenshot(self, path: str, full_page: bool = False) -> bool:
|
|
257
|
+
"""页面截图"""
|
|
258
|
+
if not self.page:
|
|
259
|
+
return False
|
|
260
|
+
|
|
261
|
+
try:
|
|
262
|
+
self.page.screenshot(path=path, full_page=full_page)
|
|
263
|
+
return True
|
|
264
|
+
except Exception as e:
|
|
265
|
+
print(f"[!] Screenshot failed: {e}")
|
|
266
|
+
return False
|
|
267
|
+
|
|
268
|
+
def stop(self):
|
|
269
|
+
"""停止浏览器"""
|
|
270
|
+
try:
|
|
271
|
+
if self.browser:
|
|
272
|
+
self.browser.close()
|
|
273
|
+
if hasattr(self, 'playwright'):
|
|
274
|
+
self.playwright.stop()
|
|
275
|
+
except:
|
|
276
|
+
pass
|
|
277
|
+
|
|
278
|
+
def collect(self, target_url: str, interactions: List[Dict] = None) -> BrowserCollectionResult:
|
|
279
|
+
"""
|
|
280
|
+
执行完整采集流程
|
|
281
|
+
|
|
282
|
+
Args:
|
|
283
|
+
target_url: 目标 URL
|
|
284
|
+
interactions: 交互列表,如 [{"type": "click", "selector": ".btn"}, ...]
|
|
285
|
+
"""
|
|
286
|
+
result = BrowserCollectionResult()
|
|
287
|
+
|
|
288
|
+
if not self.start():
|
|
289
|
+
return result
|
|
290
|
+
|
|
291
|
+
try:
|
|
292
|
+
print(f"[*] Navigating to {target_url}")
|
|
293
|
+
self.navigate(target_url)
|
|
294
|
+
result.fetched_urls.add(target_url)
|
|
295
|
+
|
|
296
|
+
for js_url in self.get_dynamic_js_urls():
|
|
297
|
+
result.js_urls.append(js_url)
|
|
298
|
+
|
|
299
|
+
result.api_requests.extend(self.get_api_requests())
|
|
300
|
+
result.websocket_connections.extend(self.get_websocket_connections())
|
|
301
|
+
|
|
302
|
+
if interactions:
|
|
303
|
+
for action in interactions:
|
|
304
|
+
action_type = action.get('type')
|
|
305
|
+
|
|
306
|
+
if action_type == 'click':
|
|
307
|
+
self.click_and_intercept(action.get('selector', ''))
|
|
308
|
+
elif action_type == 'fill':
|
|
309
|
+
self.fill_form_and_submit(
|
|
310
|
+
action.get('data', {}),
|
|
311
|
+
action.get('submit', "button[type='submit']")
|
|
312
|
+
)
|
|
313
|
+
elif action_type == 'navigate':
|
|
314
|
+
url = action.get('url')
|
|
315
|
+
if url:
|
|
316
|
+
self.navigate(url)
|
|
317
|
+
result.fetched_urls.add(url)
|
|
318
|
+
elif action_type == 'wait':
|
|
319
|
+
time.sleep(action.get('seconds', 1))
|
|
320
|
+
|
|
321
|
+
for js_url in self.get_dynamic_js_urls():
|
|
322
|
+
if js_url not in result.js_urls:
|
|
323
|
+
result.js_urls.append(js_url)
|
|
324
|
+
|
|
325
|
+
result.api_requests.extend(self.get_api_requests())
|
|
326
|
+
|
|
327
|
+
finally:
|
|
328
|
+
self.stop()
|
|
329
|
+
|
|
330
|
+
return result
|
|
331
|
+
|
|
332
|
+
|
|
333
|
+
class BrowserCollectorFacade:
|
|
334
|
+
"""浏览器采集器门面 - 统一接口"""
|
|
335
|
+
|
|
336
|
+
def __init__(self, headless: bool = True):
|
|
337
|
+
self.headless = headless
|
|
338
|
+
self.collector = HeadlessBrowserCollector(headless=headless)
|
|
339
|
+
|
|
340
|
+
def collect_all(self, target_url: str, config: Dict = None) -> Dict:
|
|
341
|
+
"""
|
|
342
|
+
执行完整采集
|
|
343
|
+
|
|
344
|
+
Args:
|
|
345
|
+
target_url: 目标 URL
|
|
346
|
+
config: 配置
|
|
347
|
+
- interactions: 交互列表
|
|
348
|
+
- capture_console: 是否捕获控制台
|
|
349
|
+
- capture_storage: 是否捕获存储
|
|
350
|
+
- screenshot: 是否截图
|
|
351
|
+
"""
|
|
352
|
+
config = config or {}
|
|
353
|
+
interactions = config.get('interactions', [])
|
|
354
|
+
|
|
355
|
+
result = self.collector.collect(target_url, interactions)
|
|
356
|
+
|
|
357
|
+
output = {
|
|
358
|
+
'target_url': target_url,
|
|
359
|
+
'js_urls': result.js_urls,
|
|
360
|
+
'api_requests': [
|
|
361
|
+
{
|
|
362
|
+
'url': r.url,
|
|
363
|
+
'method': r.method,
|
|
364
|
+
'type': r.resource_type,
|
|
365
|
+
'post_data': r.post_data,
|
|
366
|
+
'status_code': r.status_code,
|
|
367
|
+
}
|
|
368
|
+
for r in result.api_requests
|
|
369
|
+
],
|
|
370
|
+
'websocket_connections': result.websocket_connections,
|
|
371
|
+
'fetched_urls': list(result.fetched_urls),
|
|
372
|
+
'console_logs': self.collector.get_console_logs() if config.get('capture_console') else [],
|
|
373
|
+
'cookies': self.collector.get_cookies() if config.get('capture_storage') else [],
|
|
374
|
+
}
|
|
375
|
+
|
|
376
|
+
if config.get('screenshot'):
|
|
377
|
+
screenshot_path = f"/tmp/screenshot_{int(time.time())}.png"
|
|
378
|
+
if self.collector.screenshot(screenshot_path):
|
|
379
|
+
output['screenshot'] = screenshot_path
|
|
380
|
+
|
|
381
|
+
return output
|
|
382
|
+
|
|
383
|
+
|
|
384
|
+
# CLI interface
|
|
385
|
+
if __name__ == "__main__":
|
|
386
|
+
import argparse
|
|
387
|
+
|
|
388
|
+
parser = argparse.ArgumentParser(description="Browser Collector")
|
|
389
|
+
parser.add_argument("--target", required=True, help="Target URL")
|
|
390
|
+
parser.add_argument("--headless", action="store_true", default=True)
|
|
391
|
+
parser.add_argument("--output", help="Output file")
|
|
392
|
+
parser.add_argument("--screenshot", action="store_true")
|
|
393
|
+
|
|
394
|
+
args = parser.parse_args()
|
|
395
|
+
|
|
396
|
+
facade = BrowserCollectorFacade(headless=args.headless)
|
|
397
|
+
result = facade.collect_all(args.target, {
|
|
398
|
+
'capture_console': True,
|
|
399
|
+
'screenshot': args.screenshot
|
|
400
|
+
})
|
|
401
|
+
|
|
402
|
+
print("\n=== Collection Results ===")
|
|
403
|
+
print(f"Target: {result['target_url']}")
|
|
404
|
+
print(f"JS URLs found: {len(result['js_urls'])}")
|
|
405
|
+
print(f"API requests: {len(result['api_requests'])}")
|
|
406
|
+
print(f"WebSocket connections: {len(result['websocket_connections'])}")
|
|
407
|
+
|
|
408
|
+
if args.output:
|
|
409
|
+
with open(args.output, 'w') as f:
|
|
410
|
+
json.dump(result, f, indent=2)
|
|
411
|
+
print(f"\nResults saved to {args.output}")
|
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
"""
|
|
2
|
+
HTTP请求能力 - 快速HTTP探测
|
|
3
|
+
输入: {url, method, headers, body, timeout}
|
|
4
|
+
输出: {status, headers, body, elapsed}
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import requests
|
|
8
|
+
import time
|
|
9
|
+
|
|
10
|
+
requests.packages.urllib3.disable_warnings()
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def http_request(config):
|
|
14
|
+
"""
|
|
15
|
+
发送HTTP请求
|
|
16
|
+
|
|
17
|
+
输入:
|
|
18
|
+
url: string - 目标URL
|
|
19
|
+
method: string - HTTP方法 (GET, POST, PUT, DELETE, etc.)
|
|
20
|
+
headers?: dict - 请求头
|
|
21
|
+
body?: dict | string - 请求体
|
|
22
|
+
timeout?: number - 超时时间(秒)
|
|
23
|
+
|
|
24
|
+
输出:
|
|
25
|
+
status: number - HTTP状态码
|
|
26
|
+
headers: dict - 响应头
|
|
27
|
+
body: string - 响应体
|
|
28
|
+
elapsed: number - 请求耗时(秒)
|
|
29
|
+
"""
|
|
30
|
+
url = config.get('url')
|
|
31
|
+
method = config.get('method', 'GET').upper()
|
|
32
|
+
headers = config.get('headers', {})
|
|
33
|
+
body = config.get('body')
|
|
34
|
+
timeout = config.get('timeout', 10)
|
|
35
|
+
|
|
36
|
+
start = time.time()
|
|
37
|
+
|
|
38
|
+
try:
|
|
39
|
+
if method == 'GET':
|
|
40
|
+
resp = requests.get(url, headers=headers, timeout=timeout, verify=False)
|
|
41
|
+
elif method == 'POST':
|
|
42
|
+
if isinstance(body, dict):
|
|
43
|
+
resp = requests.post(url, json=body, headers=headers, timeout=timeout, verify=False)
|
|
44
|
+
else:
|
|
45
|
+
resp = requests.post(url, data=body, headers=headers, timeout=timeout, verify=False)
|
|
46
|
+
elif method == 'PUT':
|
|
47
|
+
if isinstance(body, dict):
|
|
48
|
+
resp = requests.put(url, json=body, headers=headers, timeout=timeout, verify=False)
|
|
49
|
+
else:
|
|
50
|
+
resp = requests.put(url, data=body, headers=headers, timeout=timeout, verify=False)
|
|
51
|
+
elif method == 'DELETE':
|
|
52
|
+
resp = requests.delete(url, headers=headers, timeout=timeout, verify=False)
|
|
53
|
+
else:
|
|
54
|
+
resp = requests.request(method, url, headers=headers, timeout=timeout, verify=False)
|
|
55
|
+
|
|
56
|
+
elapsed = time.time() - start
|
|
57
|
+
|
|
58
|
+
return {
|
|
59
|
+
'status': resp.status_code,
|
|
60
|
+
'headers': dict(resp.headers),
|
|
61
|
+
'body': resp.text,
|
|
62
|
+
'elapsed': elapsed,
|
|
63
|
+
'content_type': resp.headers.get('Content-Type', '')
|
|
64
|
+
}
|
|
65
|
+
except Exception as e:
|
|
66
|
+
elapsed = time.time() - start
|
|
67
|
+
return {
|
|
68
|
+
'status': 0,
|
|
69
|
+
'headers': {},
|
|
70
|
+
'body': str(e),
|
|
71
|
+
'elapsed': elapsed,
|
|
72
|
+
'error': str(e)
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def check_health(url, timeout=5):
|
|
77
|
+
"""
|
|
78
|
+
检查端点是否可达
|
|
79
|
+
|
|
80
|
+
输入:
|
|
81
|
+
url: string - 目标URL
|
|
82
|
+
timeout?: number - 超时时间
|
|
83
|
+
|
|
84
|
+
输出:
|
|
85
|
+
healthy: boolean - 是否可达
|
|
86
|
+
latency: number - 延迟(毫秒)
|
|
87
|
+
"""
|
|
88
|
+
try:
|
|
89
|
+
start = time.time()
|
|
90
|
+
resp = requests.get(url, timeout=timeout, verify=False)
|
|
91
|
+
latency = (time.time() - start) * 1000
|
|
92
|
+
return {
|
|
93
|
+
'healthy': resp.status_code < 500,
|
|
94
|
+
'latency': latency,
|
|
95
|
+
'status': resp.status_code
|
|
96
|
+
}
|
|
97
|
+
except:
|
|
98
|
+
return {
|
|
99
|
+
'healthy': False,
|
|
100
|
+
'latency': 0,
|
|
101
|
+
'status': 0
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
if __name__ == '__main__':
|
|
106
|
+
# 测试
|
|
107
|
+
result = http_request({
|
|
108
|
+
'url': 'https://httpbin.org/get',
|
|
109
|
+
'method': 'GET'
|
|
110
|
+
})
|
|
111
|
+
print(f"Status: {result['status']}, Elapsed: {result['elapsed']:.2f}s")
|