crawlo 1.3.3__py3-none-any.whl → 1.3.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +87 -63
- crawlo/__version__.py +1 -1
- crawlo/cli.py +75 -75
- crawlo/commands/__init__.py +14 -14
- crawlo/commands/check.py +594 -594
- crawlo/commands/genspider.py +151 -151
- crawlo/commands/help.py +138 -138
- crawlo/commands/list.py +155 -155
- crawlo/commands/run.py +341 -323
- crawlo/commands/startproject.py +436 -436
- crawlo/commands/stats.py +187 -187
- crawlo/commands/utils.py +196 -196
- crawlo/config.py +312 -312
- crawlo/config_validator.py +277 -277
- crawlo/core/__init__.py +46 -2
- crawlo/core/engine.py +439 -365
- crawlo/core/processor.py +40 -40
- crawlo/core/scheduler.py +257 -256
- crawlo/crawler.py +639 -1167
- crawlo/data/__init__.py +5 -5
- crawlo/data/user_agents.py +194 -194
- crawlo/downloader/__init__.py +273 -273
- crawlo/downloader/aiohttp_downloader.py +228 -226
- crawlo/downloader/cffi_downloader.py +245 -245
- crawlo/downloader/httpx_downloader.py +259 -259
- crawlo/downloader/hybrid_downloader.py +212 -212
- crawlo/downloader/playwright_downloader.py +402 -402
- crawlo/downloader/selenium_downloader.py +472 -472
- crawlo/event.py +11 -11
- crawlo/exceptions.py +81 -81
- crawlo/extension/__init__.py +39 -39
- crawlo/extension/health_check.py +141 -141
- crawlo/extension/log_interval.py +57 -57
- crawlo/extension/log_stats.py +81 -81
- crawlo/extension/logging_extension.py +61 -52
- crawlo/extension/memory_monitor.py +104 -104
- crawlo/extension/performance_profiler.py +133 -133
- crawlo/extension/request_recorder.py +107 -107
- crawlo/factories/__init__.py +28 -0
- crawlo/factories/base.py +69 -0
- crawlo/factories/crawler.py +104 -0
- crawlo/factories/registry.py +85 -0
- crawlo/filters/__init__.py +154 -154
- crawlo/filters/aioredis_filter.py +257 -234
- crawlo/filters/memory_filter.py +269 -269
- crawlo/framework.py +292 -0
- crawlo/initialization/__init__.py +40 -0
- crawlo/initialization/built_in.py +426 -0
- crawlo/initialization/context.py +142 -0
- crawlo/initialization/core.py +194 -0
- crawlo/initialization/phases.py +149 -0
- crawlo/initialization/registry.py +146 -0
- crawlo/items/__init__.py +23 -23
- crawlo/items/base.py +23 -22
- crawlo/items/fields.py +52 -52
- crawlo/items/items.py +104 -104
- crawlo/logging/__init__.py +38 -0
- crawlo/logging/config.py +97 -0
- crawlo/logging/factory.py +129 -0
- crawlo/logging/manager.py +112 -0
- crawlo/middleware/__init__.py +21 -21
- crawlo/middleware/default_header.py +132 -132
- crawlo/middleware/download_delay.py +104 -104
- crawlo/middleware/middleware_manager.py +135 -135
- crawlo/middleware/offsite.py +123 -123
- crawlo/middleware/proxy.py +386 -386
- crawlo/middleware/request_ignore.py +86 -86
- crawlo/middleware/response_code.py +163 -163
- crawlo/middleware/response_filter.py +136 -136
- crawlo/middleware/retry.py +124 -124
- crawlo/middleware/simple_proxy.py +65 -65
- crawlo/mode_manager.py +212 -187
- crawlo/network/__init__.py +21 -21
- crawlo/network/request.py +379 -379
- crawlo/network/response.py +359 -359
- crawlo/pipelines/__init__.py +21 -21
- crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
- crawlo/pipelines/console_pipeline.py +39 -39
- crawlo/pipelines/csv_pipeline.py +316 -316
- crawlo/pipelines/database_dedup_pipeline.py +222 -222
- crawlo/pipelines/json_pipeline.py +218 -218
- crawlo/pipelines/memory_dedup_pipeline.py +115 -115
- crawlo/pipelines/mongo_pipeline.py +131 -131
- crawlo/pipelines/mysql_pipeline.py +325 -318
- crawlo/pipelines/pipeline_manager.py +76 -75
- crawlo/pipelines/redis_dedup_pipeline.py +166 -166
- crawlo/project.py +327 -325
- crawlo/queue/pqueue.py +43 -37
- crawlo/queue/queue_manager.py +503 -379
- crawlo/queue/redis_priority_queue.py +326 -306
- crawlo/settings/__init__.py +7 -7
- crawlo/settings/default_settings.py +321 -225
- crawlo/settings/setting_manager.py +214 -198
- crawlo/spider/__init__.py +657 -639
- crawlo/stats_collector.py +73 -59
- crawlo/subscriber.py +129 -129
- crawlo/task_manager.py +139 -30
- crawlo/templates/crawlo.cfg.tmpl +10 -10
- crawlo/templates/project/__init__.py.tmpl +3 -3
- crawlo/templates/project/items.py.tmpl +17 -17
- crawlo/templates/project/middlewares.py.tmpl +118 -118
- crawlo/templates/project/pipelines.py.tmpl +96 -96
- crawlo/templates/project/settings.py.tmpl +168 -267
- crawlo/templates/project/settings_distributed.py.tmpl +167 -180
- crawlo/templates/project/settings_gentle.py.tmpl +167 -61
- crawlo/templates/project/settings_high_performance.py.tmpl +168 -131
- crawlo/templates/project/settings_minimal.py.tmpl +66 -35
- crawlo/templates/project/settings_simple.py.tmpl +165 -102
- crawlo/templates/project/spiders/__init__.py.tmpl +10 -6
- crawlo/templates/run.py.tmpl +34 -38
- crawlo/templates/spider/spider.py.tmpl +143 -143
- crawlo/templates/spiders_init.py.tmpl +10 -0
- crawlo/tools/__init__.py +200 -200
- crawlo/tools/anti_crawler.py +268 -268
- crawlo/tools/authenticated_proxy.py +240 -240
- crawlo/tools/data_formatter.py +225 -225
- crawlo/tools/data_validator.py +180 -180
- crawlo/tools/date_tools.py +289 -289
- crawlo/tools/distributed_coordinator.py +388 -388
- crawlo/tools/encoding_converter.py +127 -127
- crawlo/tools/network_diagnostic.py +365 -0
- crawlo/tools/request_tools.py +82 -82
- crawlo/tools/retry_mechanism.py +224 -224
- crawlo/tools/scenario_adapter.py +262 -262
- crawlo/tools/text_cleaner.py +232 -232
- crawlo/utils/__init__.py +34 -34
- crawlo/utils/batch_processor.py +259 -259
- crawlo/utils/class_loader.py +26 -0
- crawlo/utils/controlled_spider_mixin.py +439 -439
- crawlo/utils/db_helper.py +343 -343
- crawlo/utils/enhanced_error_handler.py +356 -356
- crawlo/utils/env_config.py +142 -142
- crawlo/utils/error_handler.py +165 -124
- crawlo/utils/func_tools.py +82 -82
- crawlo/utils/large_scale_config.py +286 -286
- crawlo/utils/large_scale_helper.py +344 -344
- crawlo/utils/log.py +80 -200
- crawlo/utils/performance_monitor.py +285 -285
- crawlo/utils/queue_helper.py +175 -175
- crawlo/utils/redis_connection_pool.py +388 -351
- crawlo/utils/redis_key_validator.py +198 -198
- crawlo/utils/request.py +267 -267
- crawlo/utils/request_serializer.py +225 -218
- crawlo/utils/spider_loader.py +61 -61
- crawlo/utils/system.py +11 -11
- crawlo/utils/tools.py +4 -4
- crawlo/utils/url.py +39 -39
- {crawlo-1.3.3.dist-info → crawlo-1.3.5.dist-info}/METADATA +1126 -1020
- crawlo-1.3.5.dist-info/RECORD +288 -0
- examples/__init__.py +7 -7
- tests/__init__.py +7 -7
- tests/advanced_tools_example.py +275 -275
- tests/authenticated_proxy_example.py +107 -107
- tests/baidu_performance_test.py +109 -0
- tests/baidu_test.py +60 -0
- tests/cleaners_example.py +160 -160
- tests/comprehensive_framework_test.py +213 -0
- tests/comprehensive_test.py +82 -0
- tests/comprehensive_testing_summary.md +187 -0
- tests/config_validation_demo.py +142 -142
- tests/controlled_spider_example.py +205 -205
- tests/date_tools_example.py +180 -180
- tests/debug_configure.py +70 -0
- tests/debug_framework_logger.py +85 -0
- tests/debug_log_config.py +127 -0
- tests/debug_log_levels.py +64 -0
- tests/debug_pipelines.py +66 -66
- tests/detailed_log_test.py +234 -0
- tests/distributed_test.py +67 -0
- tests/distributed_test_debug.py +77 -0
- tests/dynamic_loading_example.py +523 -523
- tests/dynamic_loading_test.py +104 -104
- tests/env_config_example.py +133 -133
- tests/error_handling_example.py +171 -171
- tests/final_command_test_report.md +0 -0
- tests/final_comprehensive_test.py +152 -0
- tests/final_log_test.py +261 -0
- tests/final_validation_test.py +183 -0
- tests/fix_log_test.py +143 -0
- tests/framework_performance_test.py +203 -0
- tests/log_buffering_test.py +112 -0
- tests/log_generation_timing_test.py +154 -0
- tests/optimized_performance_test.py +212 -0
- tests/performance_comparison.py +246 -0
- tests/queue_blocking_test.py +114 -0
- tests/queue_test.py +90 -0
- tests/redis_key_validation_demo.py +130 -130
- tests/request_params_example.py +150 -150
- tests/response_improvements_example.py +144 -144
- tests/scrapy_comparison/ofweek_scrapy.py +139 -0
- tests/scrapy_comparison/scrapy_test.py +134 -0
- tests/simple_command_test.py +120 -0
- tests/simple_crawlo_test.py +128 -0
- tests/simple_log_test.py +58 -0
- tests/simple_log_test2.py +138 -0
- tests/simple_optimization_test.py +129 -0
- tests/simple_spider_test.py +50 -0
- tests/simple_test.py +48 -0
- tests/spider_log_timing_test.py +178 -0
- tests/test_advanced_tools.py +148 -148
- tests/test_all_commands.py +231 -0
- tests/test_all_redis_key_configs.py +145 -145
- tests/test_authenticated_proxy.py +141 -141
- tests/test_batch_processor.py +179 -0
- tests/test_cleaners.py +54 -54
- tests/test_component_factory.py +175 -0
- tests/test_comprehensive.py +146 -146
- tests/test_config_consistency.py +80 -80
- tests/test_config_merge.py +152 -152
- tests/test_config_validator.py +182 -182
- tests/test_controlled_spider_mixin.py +80 -0
- tests/test_crawlo_proxy_integration.py +108 -108
- tests/test_date_tools.py +123 -123
- tests/test_default_header_middleware.py +158 -158
- tests/test_distributed.py +65 -65
- tests/test_double_crawlo_fix.py +207 -207
- tests/test_double_crawlo_fix_simple.py +124 -124
- tests/test_download_delay_middleware.py +221 -221
- tests/test_downloader_proxy_compatibility.py +268 -268
- tests/test_dynamic_downloaders_proxy.py +124 -124
- tests/test_dynamic_proxy.py +92 -92
- tests/test_dynamic_proxy_config.py +146 -146
- tests/test_dynamic_proxy_real.py +109 -109
- tests/test_edge_cases.py +303 -303
- tests/test_enhanced_error_handler.py +270 -270
- tests/test_enhanced_error_handler_comprehensive.py +246 -0
- tests/test_env_config.py +121 -121
- tests/test_error_handler_compatibility.py +112 -112
- tests/test_factories.py +253 -0
- tests/test_final_validation.py +153 -153
- tests/test_framework_env_usage.py +103 -103
- tests/test_framework_logger.py +67 -0
- tests/test_framework_startup.py +65 -0
- tests/test_get_component_logger.py +84 -0
- tests/test_integration.py +169 -169
- tests/test_item_dedup_redis_key.py +122 -122
- tests/test_large_scale_config.py +113 -0
- tests/test_large_scale_helper.py +236 -0
- tests/test_logging_system.py +283 -0
- tests/test_mode_change.py +73 -0
- tests/test_mode_consistency.py +51 -51
- tests/test_offsite_middleware.py +221 -221
- tests/test_parsel.py +29 -29
- tests/test_performance.py +327 -327
- tests/test_performance_monitor.py +116 -0
- tests/test_proxy_api.py +264 -264
- tests/test_proxy_health_check.py +32 -32
- tests/test_proxy_middleware.py +121 -121
- tests/test_proxy_middleware_enhanced.py +216 -216
- tests/test_proxy_middleware_integration.py +136 -136
- tests/test_proxy_middleware_refactored.py +184 -184
- tests/test_proxy_providers.py +56 -56
- tests/test_proxy_stats.py +19 -19
- tests/test_proxy_strategies.py +59 -59
- tests/test_queue_empty_check.py +42 -0
- tests/test_queue_manager_double_crawlo.py +173 -173
- tests/test_queue_manager_redis_key.py +176 -176
- tests/test_random_user_agent.py +72 -72
- tests/test_real_scenario_proxy.py +195 -195
- tests/test_redis_config.py +28 -28
- tests/test_redis_connection_pool.py +294 -294
- tests/test_redis_key_naming.py +181 -181
- tests/test_redis_key_validator.py +123 -123
- tests/test_redis_queue.py +224 -224
- tests/test_request_ignore_middleware.py +182 -182
- tests/test_request_params.py +111 -111
- tests/test_request_serialization.py +70 -70
- tests/test_response_code_middleware.py +349 -349
- tests/test_response_filter_middleware.py +427 -427
- tests/test_response_improvements.py +152 -152
- tests/test_retry_middleware.py +241 -241
- tests/test_scheduler.py +252 -252
- tests/test_scheduler_config_update.py +133 -133
- tests/test_simple_response.py +61 -61
- tests/test_telecom_spider_redis_key.py +205 -205
- tests/test_template_content.py +87 -87
- tests/test_template_redis_key.py +134 -134
- tests/test_tools.py +159 -159
- tests/test_user_agents.py +96 -96
- tests/tools_example.py +260 -260
- tests/untested_features_report.md +139 -0
- tests/verify_debug.py +52 -0
- tests/verify_distributed.py +117 -117
- tests/verify_log_fix.py +112 -0
- crawlo-1.3.3.dist-info/RECORD +0 -219
- tests/DOUBLE_CRAWLO_PREFIX_FIX_REPORT.md +0 -82
- {crawlo-1.3.3.dist-info → crawlo-1.3.5.dist-info}/WHEEL +0 -0
- {crawlo-1.3.3.dist-info → crawlo-1.3.5.dist-info}/entry_points.txt +0 -0
- {crawlo-1.3.3.dist-info → crawlo-1.3.5.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,116 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
性能监控工具测试
|
|
5
|
+
测试 PerformanceMonitor, PerformanceTimer, performance_monitor_decorator
|
|
6
|
+
"""
|
|
7
|
+
import sys
|
|
8
|
+
import os
|
|
9
|
+
import unittest
|
|
10
|
+
from unittest.mock import Mock, patch, MagicMock
|
|
11
|
+
import asyncio
|
|
12
|
+
|
|
13
|
+
# 添加项目根目录到 Python 路径
|
|
14
|
+
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
|
15
|
+
|
|
16
|
+
# 尝试导入性能监控工具
|
|
17
|
+
try:
|
|
18
|
+
from crawlo.utils.performance_monitor import PerformanceMonitor, PerformanceTimer, performance_monitor_decorator
|
|
19
|
+
PSUTIL_AVAILABLE = True
|
|
20
|
+
except ImportError:
|
|
21
|
+
PSUTIL_AVAILABLE = False
|
|
22
|
+
PerformanceMonitor = None
|
|
23
|
+
PerformanceTimer = None
|
|
24
|
+
performance_monitor_decorator = None
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class TestPerformanceTimer(unittest.TestCase):
|
|
28
|
+
"""性能计时器测试"""
|
|
29
|
+
|
|
30
|
+
@unittest.skipIf(not PSUTIL_AVAILABLE, "psutil not available")
|
|
31
|
+
def setUp(self):
|
|
32
|
+
"""测试前准备"""
|
|
33
|
+
self.timer = PerformanceTimer("test_timer")
|
|
34
|
+
|
|
35
|
+
@unittest.skipIf(not PSUTIL_AVAILABLE, "psutil not available")
|
|
36
|
+
def test_timer_initialization(self):
|
|
37
|
+
"""测试计时器初始化"""
|
|
38
|
+
self.assertEqual(self.timer.name, "test_timer")
|
|
39
|
+
self.assertIsNone(self.timer.start_time)
|
|
40
|
+
self.assertIsNone(self.timer.end_time)
|
|
41
|
+
|
|
42
|
+
@unittest.skipIf(not PSUTIL_AVAILABLE, "psutil not available")
|
|
43
|
+
def test_timer_context_manager(self):
|
|
44
|
+
"""测试计时器上下文管理器"""
|
|
45
|
+
with self.timer as t:
|
|
46
|
+
# 在这里执行一些操作
|
|
47
|
+
result = 1 + 1
|
|
48
|
+
|
|
49
|
+
self.assertEqual(result, 2)
|
|
50
|
+
# 验证计时器已启动和停止
|
|
51
|
+
self.assertIsNotNone(t.start_time)
|
|
52
|
+
self.assertIsNotNone(t.end_time)
|
|
53
|
+
|
|
54
|
+
@unittest.skipIf(not PSUTIL_AVAILABLE, "psutil not available")
|
|
55
|
+
def test_timer_start_stop(self):
|
|
56
|
+
"""测试计时器启动和停止"""
|
|
57
|
+
self.timer.start()
|
|
58
|
+
self.assertIsNotNone(self.timer.start_time)
|
|
59
|
+
|
|
60
|
+
# 等待一小段时间
|
|
61
|
+
import time
|
|
62
|
+
time.sleep(0.01)
|
|
63
|
+
|
|
64
|
+
elapsed = self.timer.stop()
|
|
65
|
+
self.assertIsNotNone(self.timer.end_time)
|
|
66
|
+
self.assertIsInstance(elapsed, float)
|
|
67
|
+
self.assertGreater(elapsed, 0)
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
class TestPerformanceMonitor(unittest.TestCase):
|
|
71
|
+
"""性能监控器测试"""
|
|
72
|
+
|
|
73
|
+
@unittest.skipIf(not PSUTIL_AVAILABLE, "psutil not available")
|
|
74
|
+
def setUp(self):
|
|
75
|
+
"""测试前准备"""
|
|
76
|
+
self.monitor = PerformanceMonitor("test_monitor")
|
|
77
|
+
|
|
78
|
+
@unittest.skipIf(not PSUTIL_AVAILABLE, "psutil not available")
|
|
79
|
+
def test_monitor_initialization(self):
|
|
80
|
+
"""测试监控器初始化"""
|
|
81
|
+
self.assertEqual(self.monitor.start_time, self.monitor.start_time)
|
|
82
|
+
self.assertIsInstance(self.monitor.metrics, dict)
|
|
83
|
+
self.assertIn('cpu_usage', self.monitor.metrics)
|
|
84
|
+
self.assertIn('memory_usage', self.monitor.metrics)
|
|
85
|
+
self.assertIn('network_io', self.monitor.metrics)
|
|
86
|
+
self.assertIn('disk_io', self.monitor.metrics)
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
class TestPerformanceMonitorDecorator(unittest.TestCase):
|
|
90
|
+
"""性能监控装饰器测试"""
|
|
91
|
+
|
|
92
|
+
@unittest.skipIf(not PSUTIL_AVAILABLE, "psutil not available")
|
|
93
|
+
def test_performance_monitor_decorator_sync(self):
|
|
94
|
+
"""测试同步函数的性能监控装饰器"""
|
|
95
|
+
@performance_monitor_decorator(name="test_sync_function")
|
|
96
|
+
def sync_function():
|
|
97
|
+
return "test_result"
|
|
98
|
+
|
|
99
|
+
result = sync_function()
|
|
100
|
+
self.assertEqual(result, "test_result")
|
|
101
|
+
|
|
102
|
+
@unittest.skipIf(not PSUTIL_AVAILABLE, "psutil not available")
|
|
103
|
+
def test_performance_monitor_decorator_async(self):
|
|
104
|
+
"""测试异步函数的性能监控装饰器"""
|
|
105
|
+
@performance_monitor_decorator(name="test_async_function")
|
|
106
|
+
async def async_function():
|
|
107
|
+
await asyncio.sleep(0.01) # 模拟异步操作
|
|
108
|
+
return "async_result"
|
|
109
|
+
|
|
110
|
+
# 使用事件循环运行异步函数
|
|
111
|
+
result = asyncio.run(async_function())
|
|
112
|
+
self.assertEqual(result, "async_result")
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
if __name__ == '__main__':
|
|
116
|
+
unittest.main()
|
tests/test_proxy_api.py
CHANGED
|
@@ -1,265 +1,265 @@
|
|
|
1
|
-
#!/usr/bin/python
|
|
2
|
-
# -*- coding: UTF-8 -*-
|
|
3
|
-
"""
|
|
4
|
-
代理API测试脚本
|
|
5
|
-
================
|
|
6
|
-
测试指定的代理API接口是否能正常工作
|
|
7
|
-
"""
|
|
8
|
-
|
|
9
|
-
import asyncio
|
|
10
|
-
import aiohttp
|
|
11
|
-
import sys
|
|
12
|
-
import os
|
|
13
|
-
from urllib.parse import urlparse
|
|
14
|
-
|
|
15
|
-
# 添加项目根目录到Python路径
|
|
16
|
-
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
|
17
|
-
|
|
18
|
-
from crawlo.middleware.proxy import ProxyMiddleware
|
|
19
|
-
from crawlo.network.request import Request
|
|
20
|
-
from crawlo.settings.setting_manager import SettingManager
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
async def test_proxy_api(proxy_api_url):
|
|
24
|
-
"""测试代理API接口"""
|
|
25
|
-
print(f"=== 测试代理API接口 ===")
|
|
26
|
-
print(f"API地址: {proxy_api_url}")
|
|
27
|
-
|
|
28
|
-
try:
|
|
29
|
-
timeout = aiohttp.ClientTimeout(total=10)
|
|
30
|
-
async with aiohttp.ClientSession(timeout=timeout) as session:
|
|
31
|
-
async with session.get(proxy_api_url) as response:
|
|
32
|
-
print(f"状态码: {response.status}")
|
|
33
|
-
print(f"响应头: {response.headers.get('content-type', 'Unknown')}")
|
|
34
|
-
|
|
35
|
-
# 尝试解析JSON响应
|
|
36
|
-
try:
|
|
37
|
-
data = await response.json()
|
|
38
|
-
print(f"响应数据: {data}")
|
|
39
|
-
return data
|
|
40
|
-
except Exception as e:
|
|
41
|
-
# 如果不是JSON,尝试获取文本
|
|
42
|
-
try:
|
|
43
|
-
text = await response.text()
|
|
44
|
-
print(f"响应文本: {text[:200]}{'...' if len(text) > 200 else ''}")
|
|
45
|
-
return text
|
|
46
|
-
except Exception as e2:
|
|
47
|
-
print(f"无法解析响应内容: {e2}")
|
|
48
|
-
return None
|
|
49
|
-
|
|
50
|
-
except asyncio.TimeoutError:
|
|
51
|
-
print("请求超时")
|
|
52
|
-
return None
|
|
53
|
-
except Exception as e:
|
|
54
|
-
print(f"请求失败: {e}")
|
|
55
|
-
return None
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
def extract_proxy_url(proxy_data):
|
|
59
|
-
"""从API响应中提取代理URL"""
|
|
60
|
-
proxy_url = None
|
|
61
|
-
|
|
62
|
-
if isinstance(proxy_data, dict):
|
|
63
|
-
# 检查是否有status字段且为成功状态
|
|
64
|
-
if proxy_data.get('status') == 0:
|
|
65
|
-
# 获取proxy字段
|
|
66
|
-
proxy_info = proxy_data.get('proxy', {})
|
|
67
|
-
if isinstance(proxy_info, dict):
|
|
68
|
-
# 优先使用https代理,否则使用http代理
|
|
69
|
-
proxy_url = proxy_info.get('https') or proxy_info.get('http')
|
|
70
|
-
elif isinstance(proxy_info, str):
|
|
71
|
-
proxy_url = proxy_info
|
|
72
|
-
else:
|
|
73
|
-
# 直接尝试常见的字段名
|
|
74
|
-
for key in ['proxy', 'data', 'url', 'http', 'https']:
|
|
75
|
-
if key in proxy_data:
|
|
76
|
-
value = proxy_data[key]
|
|
77
|
-
if isinstance(value, str):
|
|
78
|
-
proxy_url = value
|
|
79
|
-
break
|
|
80
|
-
elif isinstance(value, dict):
|
|
81
|
-
proxy_url = value.get('https') or value.get('http')
|
|
82
|
-
break
|
|
83
|
-
|
|
84
|
-
# 如果还是没有找到,尝试更深层的嵌套
|
|
85
|
-
if not proxy_url:
|
|
86
|
-
for key, value in proxy_data.items():
|
|
87
|
-
if isinstance(value, str) and (value.startswith('http://') or value.startswith('https://')):
|
|
88
|
-
proxy_url = value
|
|
89
|
-
break
|
|
90
|
-
elif isinstance(value, dict):
|
|
91
|
-
# 递归查找
|
|
92
|
-
for sub_key, sub_value in value.items():
|
|
93
|
-
if isinstance(sub_value, str) and (sub_value.startswith('http://') or sub_value.startswith('https://')):
|
|
94
|
-
proxy_url = sub_value
|
|
95
|
-
break
|
|
96
|
-
if proxy_url:
|
|
97
|
-
break
|
|
98
|
-
|
|
99
|
-
elif isinstance(proxy_data, str):
|
|
100
|
-
# 如果响应是字符串,直接使用
|
|
101
|
-
if proxy_data.startswith('http://') or proxy_data.startswith('https://'):
|
|
102
|
-
proxy_url = proxy_data
|
|
103
|
-
|
|
104
|
-
return proxy_url
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
async def test_target_url_without_proxy(target_url):
|
|
108
|
-
"""不使用代理直接测试访问目标URL"""
|
|
109
|
-
print(f"\n=== 直接访问目标URL(不使用代理) ===")
|
|
110
|
-
print(f"目标URL: {target_url}")
|
|
111
|
-
|
|
112
|
-
try:
|
|
113
|
-
timeout = aiohttp.ClientTimeout(total=15)
|
|
114
|
-
async with aiohttp.ClientSession(timeout=timeout) as session:
|
|
115
|
-
# 添加用户代理头,避免被反爬虫机制拦截
|
|
116
|
-
headers = {
|
|
117
|
-
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/124.0.0.0 Safari/537.36'
|
|
118
|
-
}
|
|
119
|
-
async with session.get(target_url, headers=headers) as response:
|
|
120
|
-
print(f"状态码: {response.status}")
|
|
121
|
-
print(f"响应头: {response.headers.get('content-type', 'Unknown')}")
|
|
122
|
-
|
|
123
|
-
# 只读取响应状态,不尝试解码内容
|
|
124
|
-
return response.status == 200
|
|
125
|
-
|
|
126
|
-
except asyncio.TimeoutError:
|
|
127
|
-
print("请求超时")
|
|
128
|
-
return False
|
|
129
|
-
except Exception as e:
|
|
130
|
-
print(f"请求失败: {e}")
|
|
131
|
-
return False
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
async def test_target_url_with_proxy(proxy_url, target_url, max_retries=3):
|
|
135
|
-
"""使用代理测试访问目标URL"""
|
|
136
|
-
print(f"\n=== 使用代理测试访问目标URL ===")
|
|
137
|
-
print(f"代理地址: {proxy_url}")
|
|
138
|
-
print(f"目标URL: {target_url}")
|
|
139
|
-
|
|
140
|
-
# 添加用户代理头,避免被反爬虫机制拦截
|
|
141
|
-
headers = {
|
|
142
|
-
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/124.0.0.0 Safari/537.36'
|
|
143
|
-
}
|
|
144
|
-
|
|
145
|
-
for attempt in range(max_retries):
|
|
146
|
-
if attempt > 0:
|
|
147
|
-
print(f"\n第 {attempt + 1} 次重试...")
|
|
148
|
-
|
|
149
|
-
try:
|
|
150
|
-
# 创建aiohttp客户端会话
|
|
151
|
-
timeout = aiohttp.ClientTimeout(total=15)
|
|
152
|
-
async with aiohttp.ClientSession(timeout=timeout, headers=headers) as session:
|
|
153
|
-
# 处理代理URL,支持带认证的代理
|
|
154
|
-
if isinstance(proxy_url, str) and "@" in proxy_url and "://" in proxy_url:
|
|
155
|
-
parsed = urlparse(proxy_url)
|
|
156
|
-
if parsed.username and parsed.password:
|
|
157
|
-
# 提取认证信息
|
|
158
|
-
auth = aiohttp.BasicAuth(parsed.username, parsed.password)
|
|
159
|
-
# 清理代理URL,移除认证信息
|
|
160
|
-
clean_proxy = f"{parsed.scheme}://{parsed.hostname}"
|
|
161
|
-
if parsed.port:
|
|
162
|
-
clean_proxy += f":{parsed.port}"
|
|
163
|
-
|
|
164
|
-
print(f"使用带认证的代理: {clean_proxy}")
|
|
165
|
-
async with session.get(target_url, proxy=clean_proxy, proxy_auth=auth) as response:
|
|
166
|
-
print(f"状态码: {response.status}")
|
|
167
|
-
print(f"响应头: {response.headers.get('content-type', 'Unknown')}")
|
|
168
|
-
return response.status == 200
|
|
169
|
-
else:
|
|
170
|
-
# 没有认证信息的代理
|
|
171
|
-
print(f"使用普通代理: {proxy_url}")
|
|
172
|
-
async with session.get(target_url, proxy=proxy_url) as response:
|
|
173
|
-
print(f"状态码: {response.status}")
|
|
174
|
-
print(f"响应头: {response.headers.get('content-type', 'Unknown')}")
|
|
175
|
-
return response.status == 200
|
|
176
|
-
else:
|
|
177
|
-
# 直接使用代理URL
|
|
178
|
-
print(f"使用代理: {proxy_url}")
|
|
179
|
-
async with session.get(target_url, proxy=proxy_url) as response:
|
|
180
|
-
print(f"状态码: {response.status}")
|
|
181
|
-
print(f"响应头: {response.headers.get('content-type', 'Unknown')}")
|
|
182
|
-
return response.status == 200
|
|
183
|
-
|
|
184
|
-
except asyncio.TimeoutError:
|
|
185
|
-
print("请求超时")
|
|
186
|
-
if attempt < max_retries - 1:
|
|
187
|
-
await asyncio.sleep(2) # 等待2秒后重试
|
|
188
|
-
continue
|
|
189
|
-
except aiohttp.ClientConnectorError as e:
|
|
190
|
-
print(f"连接错误: {e}")
|
|
191
|
-
if attempt < max_retries - 1:
|
|
192
|
-
await asyncio.sleep(2) # 等待2秒后重试
|
|
193
|
-
continue
|
|
194
|
-
except aiohttp.ClientHttpProxyError as e:
|
|
195
|
-
print(f"代理HTTP错误: {e}")
|
|
196
|
-
if attempt < max_retries - 1:
|
|
197
|
-
await asyncio.sleep(2) # 等待2秒后重试
|
|
198
|
-
continue
|
|
199
|
-
except aiohttp.ServerDisconnectedError as e:
|
|
200
|
-
print(f"服务器断开连接: {e}")
|
|
201
|
-
if attempt < max_retries - 1:
|
|
202
|
-
await asyncio.sleep(2) # 等待2秒后重试
|
|
203
|
-
continue
|
|
204
|
-
except Exception as e:
|
|
205
|
-
print(f"请求失败: {e}")
|
|
206
|
-
if attempt < max_retries - 1:
|
|
207
|
-
await asyncio.sleep(2) # 等待2秒后重试
|
|
208
|
-
continue
|
|
209
|
-
|
|
210
|
-
return False
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
async def main():
|
|
214
|
-
"""主测试函数"""
|
|
215
|
-
# 指定的代理API和测试链接
|
|
216
|
-
proxy_api = 'http://test.proxy.api:8080/proxy/getitem/'
|
|
217
|
-
target_url = 'https://stock.10jqka.com.cn/20240315/c655957791.shtml'
|
|
218
|
-
|
|
219
|
-
print("开始测试代理接口和目标链接访问...\n")
|
|
220
|
-
|
|
221
|
-
# 1. 测试代理API接口
|
|
222
|
-
proxy_data = await test_proxy_api(proxy_api)
|
|
223
|
-
|
|
224
|
-
if not proxy_data:
|
|
225
|
-
print("代理API测试失败,无法获取代理信息")
|
|
226
|
-
return
|
|
227
|
-
|
|
228
|
-
# 2. 从API响应中提取代理URL
|
|
229
|
-
proxy_url = extract_proxy_url(proxy_data)
|
|
230
|
-
|
|
231
|
-
if not proxy_url:
|
|
232
|
-
print("无法从API响应中提取代理URL")
|
|
233
|
-
print(f"API响应内容: {proxy_data}")
|
|
234
|
-
return
|
|
235
|
-
|
|
236
|
-
print(f"\n提取到的代理URL: {proxy_url}")
|
|
237
|
-
|
|
238
|
-
# 3. 首先尝试直接访问,确认目标URL是否可访问
|
|
239
|
-
print("\n=== 测试直接访问目标URL ===")
|
|
240
|
-
direct_success = await test_target_url_without_proxy(target_url)
|
|
241
|
-
|
|
242
|
-
if direct_success:
|
|
243
|
-
print("直接访问目标URL成功")
|
|
244
|
-
else:
|
|
245
|
-
print("直接访问目标URL失败")
|
|
246
|
-
|
|
247
|
-
# 4. 使用代理访问目标URL
|
|
248
|
-
print("\n=== 测试使用代理访问目标URL ===")
|
|
249
|
-
proxy_success = await test_target_url_with_proxy(proxy_url, target_url)
|
|
250
|
-
|
|
251
|
-
if proxy_success:
|
|
252
|
-
print(f"代理测试成功!代理 {proxy_url} 可以正常访问目标链接")
|
|
253
|
-
else:
|
|
254
|
-
print(f"代理测试失败!代理 {proxy_url} 无法访问目标链接")
|
|
255
|
-
|
|
256
|
-
# 5. 总结
|
|
257
|
-
print(f"\n=== 测试总结 ===")
|
|
258
|
-
print(f"代理API访问: {'成功' if proxy_data else '失败'}")
|
|
259
|
-
print(f"代理提取: {'成功' if proxy_url else '失败'}")
|
|
260
|
-
print(f"直接访问: {'成功' if direct_success else '失败'}")
|
|
261
|
-
print(f"代理访问: {'成功' if proxy_success else '失败'}")
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
if __name__ == "__main__":
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding: UTF-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
代理API测试脚本
|
|
5
|
+
================
|
|
6
|
+
测试指定的代理API接口是否能正常工作
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import asyncio
|
|
10
|
+
import aiohttp
|
|
11
|
+
import sys
|
|
12
|
+
import os
|
|
13
|
+
from urllib.parse import urlparse
|
|
14
|
+
|
|
15
|
+
# 添加项目根目录到Python路径
|
|
16
|
+
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
|
17
|
+
|
|
18
|
+
from crawlo.middleware.proxy import ProxyMiddleware
|
|
19
|
+
from crawlo.network.request import Request
|
|
20
|
+
from crawlo.settings.setting_manager import SettingManager
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
async def test_proxy_api(proxy_api_url):
|
|
24
|
+
"""测试代理API接口"""
|
|
25
|
+
print(f"=== 测试代理API接口 ===")
|
|
26
|
+
print(f"API地址: {proxy_api_url}")
|
|
27
|
+
|
|
28
|
+
try:
|
|
29
|
+
timeout = aiohttp.ClientTimeout(total=10)
|
|
30
|
+
async with aiohttp.ClientSession(timeout=timeout) as session:
|
|
31
|
+
async with session.get(proxy_api_url) as response:
|
|
32
|
+
print(f"状态码: {response.status}")
|
|
33
|
+
print(f"响应头: {response.headers.get('content-type', 'Unknown')}")
|
|
34
|
+
|
|
35
|
+
# 尝试解析JSON响应
|
|
36
|
+
try:
|
|
37
|
+
data = await response.json()
|
|
38
|
+
print(f"响应数据: {data}")
|
|
39
|
+
return data
|
|
40
|
+
except Exception as e:
|
|
41
|
+
# 如果不是JSON,尝试获取文本
|
|
42
|
+
try:
|
|
43
|
+
text = await response.text()
|
|
44
|
+
print(f"响应文本: {text[:200]}{'...' if len(text) > 200 else ''}")
|
|
45
|
+
return text
|
|
46
|
+
except Exception as e2:
|
|
47
|
+
print(f"无法解析响应内容: {e2}")
|
|
48
|
+
return None
|
|
49
|
+
|
|
50
|
+
except asyncio.TimeoutError:
|
|
51
|
+
print("请求超时")
|
|
52
|
+
return None
|
|
53
|
+
except Exception as e:
|
|
54
|
+
print(f"请求失败: {e}")
|
|
55
|
+
return None
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def extract_proxy_url(proxy_data):
|
|
59
|
+
"""从API响应中提取代理URL"""
|
|
60
|
+
proxy_url = None
|
|
61
|
+
|
|
62
|
+
if isinstance(proxy_data, dict):
|
|
63
|
+
# 检查是否有status字段且为成功状态
|
|
64
|
+
if proxy_data.get('status') == 0:
|
|
65
|
+
# 获取proxy字段
|
|
66
|
+
proxy_info = proxy_data.get('proxy', {})
|
|
67
|
+
if isinstance(proxy_info, dict):
|
|
68
|
+
# 优先使用https代理,否则使用http代理
|
|
69
|
+
proxy_url = proxy_info.get('https') or proxy_info.get('http')
|
|
70
|
+
elif isinstance(proxy_info, str):
|
|
71
|
+
proxy_url = proxy_info
|
|
72
|
+
else:
|
|
73
|
+
# 直接尝试常见的字段名
|
|
74
|
+
for key in ['proxy', 'data', 'url', 'http', 'https']:
|
|
75
|
+
if key in proxy_data:
|
|
76
|
+
value = proxy_data[key]
|
|
77
|
+
if isinstance(value, str):
|
|
78
|
+
proxy_url = value
|
|
79
|
+
break
|
|
80
|
+
elif isinstance(value, dict):
|
|
81
|
+
proxy_url = value.get('https') or value.get('http')
|
|
82
|
+
break
|
|
83
|
+
|
|
84
|
+
# 如果还是没有找到,尝试更深层的嵌套
|
|
85
|
+
if not proxy_url:
|
|
86
|
+
for key, value in proxy_data.items():
|
|
87
|
+
if isinstance(value, str) and (value.startswith('http://') or value.startswith('https://')):
|
|
88
|
+
proxy_url = value
|
|
89
|
+
break
|
|
90
|
+
elif isinstance(value, dict):
|
|
91
|
+
# 递归查找
|
|
92
|
+
for sub_key, sub_value in value.items():
|
|
93
|
+
if isinstance(sub_value, str) and (sub_value.startswith('http://') or sub_value.startswith('https://')):
|
|
94
|
+
proxy_url = sub_value
|
|
95
|
+
break
|
|
96
|
+
if proxy_url:
|
|
97
|
+
break
|
|
98
|
+
|
|
99
|
+
elif isinstance(proxy_data, str):
|
|
100
|
+
# 如果响应是字符串,直接使用
|
|
101
|
+
if proxy_data.startswith('http://') or proxy_data.startswith('https://'):
|
|
102
|
+
proxy_url = proxy_data
|
|
103
|
+
|
|
104
|
+
return proxy_url
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
async def test_target_url_without_proxy(target_url):
|
|
108
|
+
"""不使用代理直接测试访问目标URL"""
|
|
109
|
+
print(f"\n=== 直接访问目标URL(不使用代理) ===")
|
|
110
|
+
print(f"目标URL: {target_url}")
|
|
111
|
+
|
|
112
|
+
try:
|
|
113
|
+
timeout = aiohttp.ClientTimeout(total=15)
|
|
114
|
+
async with aiohttp.ClientSession(timeout=timeout) as session:
|
|
115
|
+
# 添加用户代理头,避免被反爬虫机制拦截
|
|
116
|
+
headers = {
|
|
117
|
+
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/124.0.0.0 Safari/537.36'
|
|
118
|
+
}
|
|
119
|
+
async with session.get(target_url, headers=headers) as response:
|
|
120
|
+
print(f"状态码: {response.status}")
|
|
121
|
+
print(f"响应头: {response.headers.get('content-type', 'Unknown')}")
|
|
122
|
+
|
|
123
|
+
# 只读取响应状态,不尝试解码内容
|
|
124
|
+
return response.status == 200
|
|
125
|
+
|
|
126
|
+
except asyncio.TimeoutError:
|
|
127
|
+
print("请求超时")
|
|
128
|
+
return False
|
|
129
|
+
except Exception as e:
|
|
130
|
+
print(f"请求失败: {e}")
|
|
131
|
+
return False
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
async def test_target_url_with_proxy(proxy_url, target_url, max_retries=3):
|
|
135
|
+
"""使用代理测试访问目标URL"""
|
|
136
|
+
print(f"\n=== 使用代理测试访问目标URL ===")
|
|
137
|
+
print(f"代理地址: {proxy_url}")
|
|
138
|
+
print(f"目标URL: {target_url}")
|
|
139
|
+
|
|
140
|
+
# 添加用户代理头,避免被反爬虫机制拦截
|
|
141
|
+
headers = {
|
|
142
|
+
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/124.0.0.0 Safari/537.36'
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
for attempt in range(max_retries):
|
|
146
|
+
if attempt > 0:
|
|
147
|
+
print(f"\n第 {attempt + 1} 次重试...")
|
|
148
|
+
|
|
149
|
+
try:
|
|
150
|
+
# 创建aiohttp客户端会话
|
|
151
|
+
timeout = aiohttp.ClientTimeout(total=15)
|
|
152
|
+
async with aiohttp.ClientSession(timeout=timeout, headers=headers) as session:
|
|
153
|
+
# 处理代理URL,支持带认证的代理
|
|
154
|
+
if isinstance(proxy_url, str) and "@" in proxy_url and "://" in proxy_url:
|
|
155
|
+
parsed = urlparse(proxy_url)
|
|
156
|
+
if parsed.username and parsed.password:
|
|
157
|
+
# 提取认证信息
|
|
158
|
+
auth = aiohttp.BasicAuth(parsed.username, parsed.password)
|
|
159
|
+
# 清理代理URL,移除认证信息
|
|
160
|
+
clean_proxy = f"{parsed.scheme}://{parsed.hostname}"
|
|
161
|
+
if parsed.port:
|
|
162
|
+
clean_proxy += f":{parsed.port}"
|
|
163
|
+
|
|
164
|
+
print(f"使用带认证的代理: {clean_proxy}")
|
|
165
|
+
async with session.get(target_url, proxy=clean_proxy, proxy_auth=auth) as response:
|
|
166
|
+
print(f"状态码: {response.status}")
|
|
167
|
+
print(f"响应头: {response.headers.get('content-type', 'Unknown')}")
|
|
168
|
+
return response.status == 200
|
|
169
|
+
else:
|
|
170
|
+
# 没有认证信息的代理
|
|
171
|
+
print(f"使用普通代理: {proxy_url}")
|
|
172
|
+
async with session.get(target_url, proxy=proxy_url) as response:
|
|
173
|
+
print(f"状态码: {response.status}")
|
|
174
|
+
print(f"响应头: {response.headers.get('content-type', 'Unknown')}")
|
|
175
|
+
return response.status == 200
|
|
176
|
+
else:
|
|
177
|
+
# 直接使用代理URL
|
|
178
|
+
print(f"使用代理: {proxy_url}")
|
|
179
|
+
async with session.get(target_url, proxy=proxy_url) as response:
|
|
180
|
+
print(f"状态码: {response.status}")
|
|
181
|
+
print(f"响应头: {response.headers.get('content-type', 'Unknown')}")
|
|
182
|
+
return response.status == 200
|
|
183
|
+
|
|
184
|
+
except asyncio.TimeoutError:
|
|
185
|
+
print("请求超时")
|
|
186
|
+
if attempt < max_retries - 1:
|
|
187
|
+
await asyncio.sleep(2) # 等待2秒后重试
|
|
188
|
+
continue
|
|
189
|
+
except aiohttp.ClientConnectorError as e:
|
|
190
|
+
print(f"连接错误: {e}")
|
|
191
|
+
if attempt < max_retries - 1:
|
|
192
|
+
await asyncio.sleep(2) # 等待2秒后重试
|
|
193
|
+
continue
|
|
194
|
+
except aiohttp.ClientHttpProxyError as e:
|
|
195
|
+
print(f"代理HTTP错误: {e}")
|
|
196
|
+
if attempt < max_retries - 1:
|
|
197
|
+
await asyncio.sleep(2) # 等待2秒后重试
|
|
198
|
+
continue
|
|
199
|
+
except aiohttp.ServerDisconnectedError as e:
|
|
200
|
+
print(f"服务器断开连接: {e}")
|
|
201
|
+
if attempt < max_retries - 1:
|
|
202
|
+
await asyncio.sleep(2) # 等待2秒后重试
|
|
203
|
+
continue
|
|
204
|
+
except Exception as e:
|
|
205
|
+
print(f"请求失败: {e}")
|
|
206
|
+
if attempt < max_retries - 1:
|
|
207
|
+
await asyncio.sleep(2) # 等待2秒后重试
|
|
208
|
+
continue
|
|
209
|
+
|
|
210
|
+
return False
|
|
211
|
+
|
|
212
|
+
|
|
213
|
+
async def main():
|
|
214
|
+
"""主测试函数"""
|
|
215
|
+
# 指定的代理API和测试链接
|
|
216
|
+
proxy_api = 'http://test.proxy.api:8080/proxy/getitem/'
|
|
217
|
+
target_url = 'https://stock.10jqka.com.cn/20240315/c655957791.shtml'
|
|
218
|
+
|
|
219
|
+
print("开始测试代理接口和目标链接访问...\n")
|
|
220
|
+
|
|
221
|
+
# 1. 测试代理API接口
|
|
222
|
+
proxy_data = await test_proxy_api(proxy_api)
|
|
223
|
+
|
|
224
|
+
if not proxy_data:
|
|
225
|
+
print("代理API测试失败,无法获取代理信息")
|
|
226
|
+
return
|
|
227
|
+
|
|
228
|
+
# 2. 从API响应中提取代理URL
|
|
229
|
+
proxy_url = extract_proxy_url(proxy_data)
|
|
230
|
+
|
|
231
|
+
if not proxy_url:
|
|
232
|
+
print("无法从API响应中提取代理URL")
|
|
233
|
+
print(f"API响应内容: {proxy_data}")
|
|
234
|
+
return
|
|
235
|
+
|
|
236
|
+
print(f"\n提取到的代理URL: {proxy_url}")
|
|
237
|
+
|
|
238
|
+
# 3. 首先尝试直接访问,确认目标URL是否可访问
|
|
239
|
+
print("\n=== 测试直接访问目标URL ===")
|
|
240
|
+
direct_success = await test_target_url_without_proxy(target_url)
|
|
241
|
+
|
|
242
|
+
if direct_success:
|
|
243
|
+
print("直接访问目标URL成功")
|
|
244
|
+
else:
|
|
245
|
+
print("直接访问目标URL失败")
|
|
246
|
+
|
|
247
|
+
# 4. 使用代理访问目标URL
|
|
248
|
+
print("\n=== 测试使用代理访问目标URL ===")
|
|
249
|
+
proxy_success = await test_target_url_with_proxy(proxy_url, target_url)
|
|
250
|
+
|
|
251
|
+
if proxy_success:
|
|
252
|
+
print(f"代理测试成功!代理 {proxy_url} 可以正常访问目标链接")
|
|
253
|
+
else:
|
|
254
|
+
print(f"代理测试失败!代理 {proxy_url} 无法访问目标链接")
|
|
255
|
+
|
|
256
|
+
# 5. 总结
|
|
257
|
+
print(f"\n=== 测试总结 ===")
|
|
258
|
+
print(f"代理API访问: {'成功' if proxy_data else '失败'}")
|
|
259
|
+
print(f"代理提取: {'成功' if proxy_url else '失败'}")
|
|
260
|
+
print(f"直接访问: {'成功' if direct_success else '失败'}")
|
|
261
|
+
print(f"代理访问: {'成功' if proxy_success else '失败'}")
|
|
262
|
+
|
|
263
|
+
|
|
264
|
+
if __name__ == "__main__":
|
|
265
265
|
asyncio.run(main())
|