crawlo 1.4.2__py3-none-any.whl → 1.4.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +93 -93
- crawlo/__version__.py +1 -1
- crawlo/cli.py +75 -75
- crawlo/commands/__init__.py +14 -14
- crawlo/commands/check.py +594 -594
- crawlo/commands/genspider.py +151 -151
- crawlo/commands/help.py +138 -138
- crawlo/commands/list.py +155 -155
- crawlo/commands/run.py +341 -341
- crawlo/commands/startproject.py +436 -436
- crawlo/commands/stats.py +187 -187
- crawlo/commands/utils.py +196 -196
- crawlo/config.py +312 -312
- crawlo/config_validator.py +277 -277
- crawlo/core/__init__.py +52 -52
- crawlo/core/engine.py +438 -439
- crawlo/core/processor.py +47 -47
- crawlo/core/scheduler.py +291 -257
- crawlo/crawler.py +650 -650
- crawlo/data/__init__.py +5 -5
- crawlo/data/user_agents.py +194 -194
- crawlo/downloader/__init__.py +273 -273
- crawlo/downloader/aiohttp_downloader.py +233 -233
- crawlo/downloader/cffi_downloader.py +245 -245
- crawlo/downloader/httpx_downloader.py +259 -259
- crawlo/downloader/hybrid_downloader.py +212 -212
- crawlo/downloader/playwright_downloader.py +402 -402
- crawlo/downloader/selenium_downloader.py +472 -472
- crawlo/event.py +11 -11
- crawlo/exceptions.py +81 -81
- crawlo/extension/__init__.py +63 -63
- crawlo/extension/health_check.py +141 -141
- crawlo/extension/log_interval.py +94 -94
- crawlo/extension/log_stats.py +70 -70
- crawlo/extension/logging_extension.py +61 -61
- crawlo/extension/memory_monitor.py +104 -104
- crawlo/extension/performance_profiler.py +133 -133
- crawlo/extension/request_recorder.py +107 -107
- crawlo/factories/__init__.py +27 -27
- crawlo/factories/base.py +68 -68
- crawlo/factories/crawler.py +103 -103
- crawlo/factories/registry.py +84 -84
- crawlo/filters/__init__.py +154 -154
- crawlo/filters/aioredis_filter.py +257 -257
- crawlo/filters/memory_filter.py +269 -269
- crawlo/framework.py +292 -292
- crawlo/initialization/__init__.py +44 -44
- crawlo/initialization/built_in.py +425 -425
- crawlo/initialization/context.py +141 -141
- crawlo/initialization/core.py +193 -193
- crawlo/initialization/phases.py +148 -148
- crawlo/initialization/registry.py +145 -145
- crawlo/items/__init__.py +23 -23
- crawlo/items/base.py +23 -23
- crawlo/items/fields.py +52 -52
- crawlo/items/items.py +104 -104
- crawlo/logging/__init__.py +45 -37
- crawlo/logging/async_handler.py +181 -0
- crawlo/logging/config.py +196 -96
- crawlo/logging/factory.py +171 -128
- crawlo/logging/manager.py +111 -111
- crawlo/logging/monitor.py +153 -0
- crawlo/logging/sampler.py +167 -0
- crawlo/middleware/__init__.py +21 -21
- crawlo/middleware/default_header.py +132 -132
- crawlo/middleware/download_delay.py +104 -104
- crawlo/middleware/middleware_manager.py +135 -135
- crawlo/middleware/offsite.py +123 -123
- crawlo/middleware/proxy.py +386 -386
- crawlo/middleware/request_ignore.py +86 -86
- crawlo/middleware/response_code.py +150 -150
- crawlo/middleware/response_filter.py +136 -136
- crawlo/middleware/retry.py +124 -124
- crawlo/middleware/simple_proxy.py +65 -65
- crawlo/mode_manager.py +219 -219
- crawlo/network/__init__.py +21 -21
- crawlo/network/request.py +379 -379
- crawlo/network/response.py +359 -359
- crawlo/pipelines/__init__.py +21 -21
- crawlo/pipelines/bloom_dedup_pipeline.py +146 -146
- crawlo/pipelines/console_pipeline.py +39 -39
- crawlo/pipelines/csv_pipeline.py +316 -316
- crawlo/pipelines/database_dedup_pipeline.py +197 -197
- crawlo/pipelines/json_pipeline.py +218 -218
- crawlo/pipelines/memory_dedup_pipeline.py +105 -105
- crawlo/pipelines/mongo_pipeline.py +131 -131
- crawlo/pipelines/mysql_pipeline.py +325 -325
- crawlo/pipelines/pipeline_manager.py +100 -84
- crawlo/pipelines/redis_dedup_pipeline.py +156 -156
- crawlo/project.py +349 -338
- crawlo/queue/pqueue.py +42 -42
- crawlo/queue/queue_manager.py +526 -522
- crawlo/queue/redis_priority_queue.py +370 -367
- crawlo/settings/__init__.py +7 -7
- crawlo/settings/default_settings.py +284 -284
- crawlo/settings/setting_manager.py +219 -219
- crawlo/spider/__init__.py +657 -657
- crawlo/stats_collector.py +73 -73
- crawlo/subscriber.py +129 -129
- crawlo/task_manager.py +138 -138
- crawlo/templates/crawlo.cfg.tmpl +10 -10
- crawlo/templates/project/__init__.py.tmpl +3 -3
- crawlo/templates/project/items.py.tmpl +17 -17
- crawlo/templates/project/middlewares.py.tmpl +118 -118
- crawlo/templates/project/pipelines.py.tmpl +96 -96
- crawlo/templates/project/settings.py.tmpl +170 -170
- crawlo/templates/project/settings_distributed.py.tmpl +169 -169
- crawlo/templates/project/settings_gentle.py.tmpl +166 -166
- crawlo/templates/project/settings_high_performance.py.tmpl +167 -167
- crawlo/templates/project/settings_minimal.py.tmpl +65 -65
- crawlo/templates/project/settings_simple.py.tmpl +164 -164
- crawlo/templates/project/spiders/__init__.py.tmpl +9 -9
- crawlo/templates/run.py.tmpl +34 -34
- crawlo/templates/spider/spider.py.tmpl +143 -143
- crawlo/templates/spiders_init.py.tmpl +9 -9
- crawlo/tools/__init__.py +200 -200
- crawlo/tools/anti_crawler.py +268 -268
- crawlo/tools/authenticated_proxy.py +240 -240
- crawlo/tools/data_formatter.py +225 -225
- crawlo/tools/data_validator.py +180 -180
- crawlo/tools/date_tools.py +289 -289
- crawlo/tools/distributed_coordinator.py +384 -384
- crawlo/tools/encoding_converter.py +127 -127
- crawlo/tools/network_diagnostic.py +364 -364
- crawlo/tools/request_tools.py +82 -82
- crawlo/tools/retry_mechanism.py +224 -224
- crawlo/tools/scenario_adapter.py +262 -262
- crawlo/tools/text_cleaner.py +232 -232
- crawlo/utils/__init__.py +34 -34
- crawlo/utils/batch_processor.py +259 -259
- crawlo/utils/class_loader.py +25 -25
- crawlo/utils/controlled_spider_mixin.py +439 -439
- crawlo/utils/db_helper.py +343 -343
- crawlo/utils/enhanced_error_handler.py +356 -356
- crawlo/utils/env_config.py +142 -142
- crawlo/utils/error_handler.py +165 -165
- crawlo/utils/fingerprint.py +122 -122
- crawlo/utils/func_tools.py +82 -82
- crawlo/utils/large_scale_config.py +286 -286
- crawlo/utils/large_scale_helper.py +344 -344
- crawlo/utils/log.py +79 -79
- crawlo/utils/performance_monitor.py +285 -285
- crawlo/utils/queue_helper.py +175 -175
- crawlo/utils/redis_connection_pool.py +388 -388
- crawlo/utils/redis_key_validator.py +198 -198
- crawlo/utils/request.py +267 -267
- crawlo/utils/request_serializer.py +225 -225
- crawlo/utils/spider_loader.py +61 -61
- crawlo/utils/system.py +11 -11
- crawlo/utils/tools.py +4 -4
- crawlo/utils/url.py +39 -39
- crawlo-1.4.3.dist-info/METADATA +190 -0
- crawlo-1.4.3.dist-info/RECORD +326 -0
- examples/__init__.py +7 -7
- examples/test_project/__init__.py +7 -7
- examples/test_project/run.py +34 -34
- examples/test_project/test_project/__init__.py +3 -3
- examples/test_project/test_project/items.py +17 -17
- examples/test_project/test_project/middlewares.py +118 -118
- examples/test_project/test_project/pipelines.py +96 -96
- examples/test_project/test_project/settings.py +169 -169
- examples/test_project/test_project/spiders/__init__.py +9 -9
- examples/test_project/test_project/spiders/of_week_dis.py +143 -143
- tests/__init__.py +7 -7
- tests/advanced_tools_example.py +275 -275
- tests/authenticated_proxy_example.py +106 -106
- tests/baidu_performance_test.py +108 -108
- tests/baidu_test.py +59 -59
- tests/cleaners_example.py +160 -160
- tests/comprehensive_framework_test.py +212 -212
- tests/comprehensive_test.py +81 -81
- tests/comprehensive_testing_summary.md +186 -186
- tests/config_validation_demo.py +142 -142
- tests/controlled_spider_example.py +205 -205
- tests/date_tools_example.py +180 -180
- tests/debug_configure.py +69 -69
- tests/debug_framework_logger.py +84 -84
- tests/debug_log_config.py +126 -126
- tests/debug_log_levels.py +63 -63
- tests/debug_pipelines.py +66 -66
- tests/detailed_log_test.py +233 -233
- tests/distributed_test.py +66 -66
- tests/distributed_test_debug.py +76 -76
- tests/dynamic_loading_example.py +523 -523
- tests/dynamic_loading_test.py +104 -104
- tests/env_config_example.py +133 -133
- tests/error_handling_example.py +171 -171
- tests/final_comprehensive_test.py +151 -151
- tests/final_log_test.py +260 -260
- tests/final_validation_test.py +182 -182
- tests/fix_log_test.py +142 -142
- tests/framework_performance_test.py +202 -202
- tests/log_buffering_test.py +111 -111
- tests/log_generation_timing_test.py +153 -153
- tests/optimized_performance_test.py +211 -211
- tests/performance_comparison.py +245 -245
- tests/queue_blocking_test.py +113 -113
- tests/queue_test.py +89 -89
- tests/redis_key_validation_demo.py +130 -130
- tests/request_params_example.py +150 -150
- tests/response_improvements_example.py +144 -144
- tests/scrapy_comparison/ofweek_scrapy.py +138 -138
- tests/scrapy_comparison/scrapy_test.py +133 -133
- tests/simple_command_test.py +119 -119
- tests/simple_crawlo_test.py +127 -127
- tests/simple_log_test.py +57 -57
- tests/simple_log_test2.py +137 -137
- tests/simple_optimization_test.py +128 -128
- tests/simple_queue_type_test.py +41 -41
- tests/simple_spider_test.py +49 -49
- tests/simple_test.py +47 -47
- tests/spider_log_timing_test.py +177 -177
- tests/test_advanced_tools.py +148 -148
- tests/test_all_commands.py +230 -230
- tests/test_all_pipeline_fingerprints.py +133 -133
- tests/test_all_redis_key_configs.py +145 -145
- tests/test_authenticated_proxy.py +141 -141
- tests/test_batch_processor.py +178 -178
- tests/test_cleaners.py +54 -54
- tests/test_component_factory.py +174 -174
- tests/test_comprehensive.py +146 -146
- tests/test_config_consistency.py +80 -80
- tests/test_config_merge.py +152 -152
- tests/test_config_validator.py +182 -182
- tests/test_controlled_spider_mixin.py +79 -79
- tests/test_crawlo_proxy_integration.py +108 -108
- tests/test_date_tools.py +123 -123
- tests/test_dedup_fix.py +220 -220
- tests/test_dedup_pipeline_consistency.py +125 -0
- tests/test_default_header_middleware.py +313 -313
- tests/test_distributed.py +65 -65
- tests/test_double_crawlo_fix.py +204 -204
- tests/test_double_crawlo_fix_simple.py +124 -124
- tests/test_download_delay_middleware.py +221 -221
- tests/test_downloader_proxy_compatibility.py +268 -268
- tests/test_dynamic_downloaders_proxy.py +124 -124
- tests/test_dynamic_proxy.py +92 -92
- tests/test_dynamic_proxy_config.py +146 -146
- tests/test_dynamic_proxy_real.py +109 -109
- tests/test_edge_cases.py +303 -303
- tests/test_enhanced_error_handler.py +270 -270
- tests/test_enhanced_error_handler_comprehensive.py +245 -245
- tests/test_env_config.py +121 -121
- tests/test_error_handler_compatibility.py +112 -112
- tests/test_factories.py +252 -252
- tests/test_final_validation.py +153 -153
- tests/test_fingerprint_consistency.py +135 -135
- tests/test_fingerprint_simple.py +51 -51
- tests/test_framework_env_usage.py +103 -103
- tests/test_framework_logger.py +66 -66
- tests/test_framework_startup.py +64 -64
- tests/test_get_component_logger.py +83 -83
- tests/test_hash_performance.py +99 -99
- tests/test_integration.py +169 -169
- tests/test_item_dedup_redis_key.py +122 -122
- tests/test_large_scale_config.py +112 -112
- tests/test_large_scale_helper.py +235 -235
- tests/test_logging_enhancements.py +375 -0
- tests/test_logging_final.py +185 -0
- tests/test_logging_integration.py +313 -0
- tests/test_logging_system.py +282 -282
- tests/test_middleware_debug.py +142 -0
- tests/test_mode_change.py +72 -72
- tests/test_mode_consistency.py +51 -51
- tests/test_offsite_middleware.py +244 -244
- tests/test_offsite_middleware_simple.py +203 -203
- tests/test_parsel.py +29 -29
- tests/test_performance.py +327 -327
- tests/test_performance_monitor.py +115 -115
- tests/test_pipeline_fingerprint_consistency.py +86 -86
- tests/test_priority_behavior.py +212 -0
- tests/test_priority_consistency.py +152 -0
- tests/test_priority_consistency_fixed.py +250 -0
- tests/test_proxy_api.py +264 -264
- tests/test_proxy_health_check.py +32 -32
- tests/test_proxy_middleware.py +121 -121
- tests/test_proxy_middleware_enhanced.py +216 -216
- tests/test_proxy_middleware_integration.py +136 -136
- tests/test_proxy_middleware_refactored.py +184 -184
- tests/test_proxy_providers.py +56 -56
- tests/test_proxy_stats.py +19 -19
- tests/test_proxy_strategies.py +59 -59
- tests/test_queue_empty_check.py +41 -41
- tests/test_queue_manager_double_crawlo.py +173 -173
- tests/test_queue_manager_redis_key.py +179 -179
- tests/test_queue_naming.py +154 -154
- tests/test_queue_type.py +106 -106
- tests/test_queue_type_redis_config_consistency.py +131 -0
- tests/test_random_headers_default.py +323 -0
- tests/test_random_headers_necessity.py +309 -0
- tests/test_random_user_agent.py +72 -72
- tests/test_real_scenario_proxy.py +195 -195
- tests/test_redis_config.py +28 -28
- tests/test_redis_connection_pool.py +294 -294
- tests/test_redis_key_naming.py +181 -181
- tests/test_redis_key_validator.py +123 -123
- tests/test_redis_queue.py +224 -224
- tests/test_redis_queue_name_fix.py +175 -175
- tests/test_redis_queue_type_fallback.py +130 -0
- tests/test_request_ignore_middleware.py +182 -182
- tests/test_request_params.py +111 -111
- tests/test_request_serialization.py +70 -70
- tests/test_response_code_middleware.py +349 -349
- tests/test_response_filter_middleware.py +427 -427
- tests/test_response_improvements.py +152 -152
- tests/test_retry_middleware.py +334 -242
- tests/test_retry_middleware_realistic.py +274 -0
- tests/test_scheduler.py +252 -252
- tests/test_scheduler_config_update.py +133 -133
- tests/test_simple_response.py +61 -61
- tests/test_telecom_spider_redis_key.py +205 -205
- tests/test_template_content.py +87 -87
- tests/test_template_redis_key.py +134 -134
- tests/test_tools.py +159 -159
- tests/test_user_agent_randomness.py +177 -0
- tests/test_user_agents.py +96 -96
- tests/tools_example.py +260 -260
- tests/untested_features_report.md +138 -138
- tests/verify_debug.py +51 -51
- tests/verify_distributed.py +117 -117
- tests/verify_log_fix.py +111 -111
- crawlo-1.4.2.dist-info/METADATA +0 -1199
- crawlo-1.4.2.dist-info/RECORD +0 -309
- {crawlo-1.4.2.dist-info → crawlo-1.4.3.dist-info}/WHEEL +0 -0
- {crawlo-1.4.2.dist-info → crawlo-1.4.3.dist-info}/entry_points.txt +0 -0
- {crawlo-1.4.2.dist-info → crawlo-1.4.3.dist-info}/top_level.txt +0 -0
crawlo/middleware/proxy.py
CHANGED
|
@@ -1,386 +1,386 @@
|
|
|
1
|
-
#!/usr/bin/python
|
|
2
|
-
# -*- coding: UTF-8 -*-
|
|
3
|
-
import time
|
|
4
|
-
import asyncio
|
|
5
|
-
import socket
|
|
6
|
-
from urllib.parse import urlparse
|
|
7
|
-
from typing import Optional, Dict, Any, Callable, Union, TYPE_CHECKING, List
|
|
8
|
-
|
|
9
|
-
from crawlo import Request, Response
|
|
10
|
-
from crawlo.exceptions import NotConfiguredError
|
|
11
|
-
from crawlo.utils.log import get_logger
|
|
12
|
-
|
|
13
|
-
if TYPE_CHECKING:
|
|
14
|
-
import aiohttp
|
|
15
|
-
|
|
16
|
-
try:
|
|
17
|
-
import httpx
|
|
18
|
-
|
|
19
|
-
HTTPX_EXCEPTIONS = (httpx.NetworkError, httpx.TimeoutException, httpx.ReadError, httpx.ConnectError)
|
|
20
|
-
except ImportError:
|
|
21
|
-
HTTPX_EXCEPTIONS = ()
|
|
22
|
-
httpx = None
|
|
23
|
-
|
|
24
|
-
try:
|
|
25
|
-
import aiohttp
|
|
26
|
-
|
|
27
|
-
AIOHTTP_EXCEPTIONS = (
|
|
28
|
-
aiohttp.ClientError, aiohttp.ClientConnectorError, aiohttp.ClientResponseError, aiohttp.ServerTimeoutError,
|
|
29
|
-
aiohttp.ServerDisconnectedError)
|
|
30
|
-
except ImportError:
|
|
31
|
-
AIOHTTP_EXCEPTIONS = ()
|
|
32
|
-
aiohttp = None
|
|
33
|
-
|
|
34
|
-
try:
|
|
35
|
-
from curl_cffi import requests as cffi_requests
|
|
36
|
-
|
|
37
|
-
CURL_CFFI_EXCEPTIONS = (cffi_requests.RequestsError,)
|
|
38
|
-
except (ImportError, AttributeError):
|
|
39
|
-
CURL_CFFI_EXCEPTIONS = ()
|
|
40
|
-
cffi_requests = None
|
|
41
|
-
|
|
42
|
-
NETWORK_EXCEPTIONS = (
|
|
43
|
-
asyncio.TimeoutError,
|
|
44
|
-
socket.gaierror,
|
|
45
|
-
ConnectionError,
|
|
46
|
-
TimeoutError,
|
|
47
|
-
) + HTTPX_EXCEPTIONS + AIOHTTP_EXCEPTIONS + CURL_CFFI_EXCEPTIONS
|
|
48
|
-
|
|
49
|
-
ProxyExtractor = Callable[[Dict[str, Any]], Union[None, str, Dict[str, str]]]
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
class Proxy:
|
|
53
|
-
"""代理对象,包含代理信息和统计数据"""
|
|
54
|
-
|
|
55
|
-
def __init__(self, proxy_str: str):
|
|
56
|
-
self.proxy_str = proxy_str
|
|
57
|
-
self.success_count = 0
|
|
58
|
-
self.failure_count = 0
|
|
59
|
-
self.last_used_time = 0.0
|
|
60
|
-
self.is_healthy = True
|
|
61
|
-
|
|
62
|
-
@property
|
|
63
|
-
def success_rate(self) -> float:
|
|
64
|
-
"""计算代理成功率"""
|
|
65
|
-
total = self.success_count + self.failure_count
|
|
66
|
-
if total == 0:
|
|
67
|
-
return 1.0
|
|
68
|
-
return self.success_count / total
|
|
69
|
-
|
|
70
|
-
def mark_success(self):
|
|
71
|
-
"""标记代理使用成功"""
|
|
72
|
-
self.success_count += 1
|
|
73
|
-
self.last_used_time = time.time()
|
|
74
|
-
self.is_healthy = True
|
|
75
|
-
|
|
76
|
-
def mark_failure(self):
|
|
77
|
-
"""标记代理使用失败"""
|
|
78
|
-
self.failure_count += 1
|
|
79
|
-
self.last_used_time = time.time()
|
|
80
|
-
# 如果失败率过高,标记为不健康
|
|
81
|
-
if self.failure_count > 3 and self.success_rate < 0.5:
|
|
82
|
-
self.is_healthy = False
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
class ProxyMiddleware:
|
|
86
|
-
def __init__(self, settings, log_level):
|
|
87
|
-
self.logger = get_logger(self.__class__.__name__, log_level)
|
|
88
|
-
|
|
89
|
-
self._session: Optional[Any] = None # aiohttp.ClientSession when aiohttp is available
|
|
90
|
-
# 将单个代理改为代理池
|
|
91
|
-
self._proxy_pool: List[Proxy] = []
|
|
92
|
-
self._current_proxy_index: int = 0
|
|
93
|
-
self._last_fetch_time: float = 0
|
|
94
|
-
|
|
95
|
-
self.proxy_extractor = settings.get("PROXY_EXTRACTOR", "proxy")
|
|
96
|
-
self.refresh_interval = settings.get_float("PROXY_REFRESH_INTERVAL", 60)
|
|
97
|
-
self.timeout = settings.get_float("PROXY_API_TIMEOUT", 10)
|
|
98
|
-
# 新增配置:代理池大小
|
|
99
|
-
self.proxy_pool_size = settings.get_int("PROXY_POOL_SIZE", 5)
|
|
100
|
-
# 新增配置:健康检查阈值
|
|
101
|
-
self.health_check_threshold = settings.get_float("PROXY_HEALTH_CHECK_THRESHOLD", 0.5)
|
|
102
|
-
|
|
103
|
-
self.enabled = settings.get_bool("PROXY_ENABLED", True)
|
|
104
|
-
|
|
105
|
-
if not self.enabled:
|
|
106
|
-
self.logger.info("ProxyMiddleware disabled")
|
|
107
|
-
return
|
|
108
|
-
|
|
109
|
-
self.api_url = settings.get("PROXY_API_URL")
|
|
110
|
-
if not self.api_url:
|
|
111
|
-
raise NotConfiguredError("PROXY_API_URL not configured, ProxyMiddleware disabled")
|
|
112
|
-
|
|
113
|
-
self.logger.info(
|
|
114
|
-
f"Proxy middleware enabled | API: {self.api_url} | Refresh interval: {self.refresh_interval}s | Proxy pool size: {self.proxy_pool_size}")
|
|
115
|
-
|
|
116
|
-
@classmethod
|
|
117
|
-
def create_instance(cls, crawler):
|
|
118
|
-
return cls(settings=crawler.settings, log_level=crawler.settings.get("LOG_LEVEL"))
|
|
119
|
-
|
|
120
|
-
def _compile_extractor(self) -> ProxyExtractor:
|
|
121
|
-
if callable(self.proxy_extractor):
|
|
122
|
-
return self.proxy_extractor
|
|
123
|
-
|
|
124
|
-
if isinstance(self.proxy_extractor, str):
|
|
125
|
-
keys = self.proxy_extractor.split(".")
|
|
126
|
-
|
|
127
|
-
def extract(data: Dict[str, Any]) -> Union[None, str, Dict[str, str]]:
|
|
128
|
-
for k in keys:
|
|
129
|
-
if isinstance(data, dict):
|
|
130
|
-
data = data.get(k)
|
|
131
|
-
else:
|
|
132
|
-
return None
|
|
133
|
-
if data is None:
|
|
134
|
-
break
|
|
135
|
-
return data
|
|
136
|
-
|
|
137
|
-
return extract
|
|
138
|
-
|
|
139
|
-
raise ValueError(f"PROXY_EXTRACTOR 必须是 str 或 callable,当前类型: {type(self.proxy_extractor)}")
|
|
140
|
-
|
|
141
|
-
async def _close_session(self):
|
|
142
|
-
if self._session:
|
|
143
|
-
try:
|
|
144
|
-
await self._session.close()
|
|
145
|
-
self.logger.debug("aiohttp session closed.")
|
|
146
|
-
except Exception as e:
|
|
147
|
-
self.logger.warning(f"Error closing aiohttp session: {e}")
|
|
148
|
-
finally:
|
|
149
|
-
self._session = None
|
|
150
|
-
|
|
151
|
-
async def _get_session(self) -> Any: # returns aiohttp.ClientSession when aiohttp is available
|
|
152
|
-
if aiohttp is None:
|
|
153
|
-
raise RuntimeError("aiohttp not installed, cannot use ProxyMiddleware")
|
|
154
|
-
|
|
155
|
-
if self._session is None or self._session.closed:
|
|
156
|
-
if self._session and self._session.closed:
|
|
157
|
-
self.logger.debug("Existing session closed, creating new session...")
|
|
158
|
-
timeout = aiohttp.ClientTimeout(total=self.timeout)
|
|
159
|
-
self._session = aiohttp.ClientSession(timeout=timeout)
|
|
160
|
-
self.logger.debug("New aiohttp session created.")
|
|
161
|
-
return self._session
|
|
162
|
-
|
|
163
|
-
async def _fetch_raw_data(self) -> Optional[Dict[str, Any]]:
|
|
164
|
-
max_retries = 2
|
|
165
|
-
retry_count = 0
|
|
166
|
-
|
|
167
|
-
while retry_count <= max_retries:
|
|
168
|
-
session = await self._get_session()
|
|
169
|
-
try:
|
|
170
|
-
async with session.get(self.api_url) as resp:
|
|
171
|
-
content_type = resp.content_type.lower()
|
|
172
|
-
if 'application/json' not in content_type:
|
|
173
|
-
self.logger.warning(
|
|
174
|
-
f"Proxy API returned non-JSON content type: {content_type} (URL: {self.api_url})")
|
|
175
|
-
try:
|
|
176
|
-
text = await resp.text()
|
|
177
|
-
return {"__raw_text__": text.strip(), "__content_type__": content_type}
|
|
178
|
-
except Exception as e:
|
|
179
|
-
self.logger.error(f"Failed to read non-JSON response body: {repr(e)}")
|
|
180
|
-
return None
|
|
181
|
-
|
|
182
|
-
if resp.status != 200:
|
|
183
|
-
try:
|
|
184
|
-
error_text = await resp.text()
|
|
185
|
-
except:
|
|
186
|
-
error_text = "<Unable to read response body>"
|
|
187
|
-
self.logger.error(f"Proxy API status code error: {resp.status}, Response body: {error_text}")
|
|
188
|
-
if 400 <= resp.status < 500:
|
|
189
|
-
return None
|
|
190
|
-
return None
|
|
191
|
-
|
|
192
|
-
return await resp.json()
|
|
193
|
-
|
|
194
|
-
except NETWORK_EXCEPTIONS as e:
|
|
195
|
-
retry_count += 1
|
|
196
|
-
self.logger.warning(f"Failed to request proxy API (attempt {retry_count}/{max_retries + 1}): {repr(e)}")
|
|
197
|
-
if retry_count <= max_retries:
|
|
198
|
-
self.logger.info("Closing and rebuilding session for retry...")
|
|
199
|
-
await self._close_session()
|
|
200
|
-
else:
|
|
201
|
-
self.logger.error(
|
|
202
|
-
f"Failed to request proxy API, maximum retry attempts reached ({max_retries + 1}): {repr(e)}")
|
|
203
|
-
return None
|
|
204
|
-
|
|
205
|
-
except aiohttp.ContentTypeError as e:
|
|
206
|
-
self.logger.error(f"Proxy API response content type error: {repr(e)}")
|
|
207
|
-
return None
|
|
208
|
-
|
|
209
|
-
except Exception as e:
|
|
210
|
-
self.logger.critical(f"Unexpected error occurred while requesting proxy API: {repr(e)}", exc_info=True)
|
|
211
|
-
return None
|
|
212
|
-
|
|
213
|
-
return None
|
|
214
|
-
|
|
215
|
-
async def _extract_proxy(self, data: Dict[str, Any]) -> Optional[Union[str, Dict[str, str]]]:
|
|
216
|
-
extractor = self._compile_extractor()
|
|
217
|
-
try:
|
|
218
|
-
result = extractor(data)
|
|
219
|
-
if isinstance(result, str) and result.strip():
|
|
220
|
-
return result.strip()
|
|
221
|
-
elif isinstance(result, dict):
|
|
222
|
-
cleaned = {k: v.strip() if isinstance(v, str) else v for k, v in result.items()}
|
|
223
|
-
return cleaned if cleaned else None
|
|
224
|
-
return None
|
|
225
|
-
except Exception as e:
|
|
226
|
-
self.logger.error(f"Error executing PROXY_EXTRACTOR: {repr(e)}")
|
|
227
|
-
return None
|
|
228
|
-
|
|
229
|
-
async def _get_proxy_from_api(self) -> Optional[Union[str, Dict[str, str]]]:
|
|
230
|
-
raw_data = await self._fetch_raw_data()
|
|
231
|
-
if not raw_data:
|
|
232
|
-
return None
|
|
233
|
-
|
|
234
|
-
if "__raw_text__" in raw_data:
|
|
235
|
-
text = raw_data["__raw_text__"]
|
|
236
|
-
if text.startswith("http://") or text.startswith("https://"):
|
|
237
|
-
return text
|
|
238
|
-
|
|
239
|
-
return await self._extract_proxy(raw_data)
|
|
240
|
-
|
|
241
|
-
def _parse_proxy_data(self, proxy_data: Union[str, Dict[str, Any]]) -> List[str]:
|
|
242
|
-
"""解析代理数据,提取代理URL列表"""
|
|
243
|
-
new_proxies = []
|
|
244
|
-
if isinstance(proxy_data, str):
|
|
245
|
-
# 单个代理
|
|
246
|
-
if proxy_data.startswith("http://") or proxy_data.startswith("https://"):
|
|
247
|
-
new_proxies = [proxy_data]
|
|
248
|
-
elif isinstance(proxy_data, dict):
|
|
249
|
-
# 如果是字典,尝试提取代理列表
|
|
250
|
-
for key, value in proxy_data.items():
|
|
251
|
-
if isinstance(value, str) and (value.startswith("http://") or value.startswith("https://")):
|
|
252
|
-
new_proxies.append(value)
|
|
253
|
-
elif isinstance(value, list):
|
|
254
|
-
# 如果值是列表,添加所有有效的代理
|
|
255
|
-
for item in value:
|
|
256
|
-
if isinstance(item, str) and (item.startswith("http://") or item.startswith("https://")):
|
|
257
|
-
new_proxies.append(item)
|
|
258
|
-
return new_proxies
|
|
259
|
-
|
|
260
|
-
def _get_healthy_proxies(self) -> List[Proxy]:
|
|
261
|
-
"""获取所有健康的代理"""
|
|
262
|
-
return [p for p in self._proxy_pool if p.is_healthy and p.success_rate >= self.health_check_threshold]
|
|
263
|
-
|
|
264
|
-
async def _update_proxy_pool(self):
|
|
265
|
-
"""更新代理池"""
|
|
266
|
-
if not self.enabled:
|
|
267
|
-
self.logger.debug("ProxyMiddleware disabled, skipping proxy fetch.")
|
|
268
|
-
return
|
|
269
|
-
|
|
270
|
-
now = asyncio.get_event_loop().time()
|
|
271
|
-
if (now - self._last_fetch_time) < self.refresh_interval:
|
|
272
|
-
return
|
|
273
|
-
|
|
274
|
-
# 获取新的代理列表
|
|
275
|
-
proxy_data = await self._get_proxy_from_api()
|
|
276
|
-
if not proxy_data:
|
|
277
|
-
self.logger.warning("Failed to get new proxies, proxy pool will remain unchanged.")
|
|
278
|
-
return
|
|
279
|
-
|
|
280
|
-
# 解析代理数据
|
|
281
|
-
new_proxies = self._parse_proxy_data(proxy_data)
|
|
282
|
-
|
|
283
|
-
# 创建新的代理池
|
|
284
|
-
if new_proxies:
|
|
285
|
-
self._proxy_pool = [Proxy(proxy_str) for proxy_str in new_proxies[:self.proxy_pool_size]]
|
|
286
|
-
self._current_proxy_index = 0
|
|
287
|
-
self._last_fetch_time = now
|
|
288
|
-
self.logger.info(f"Updated proxy pool, added {len(self._proxy_pool)} proxies")
|
|
289
|
-
else:
|
|
290
|
-
self.logger.warning("No valid proxies parsed, proxy pool will remain unchanged.")
|
|
291
|
-
|
|
292
|
-
async def _get_healthy_proxy(self) -> Optional[Proxy]:
|
|
293
|
-
"""从代理池中获取一个健康的代理"""
|
|
294
|
-
if not self._proxy_pool:
|
|
295
|
-
await self._update_proxy_pool()
|
|
296
|
-
|
|
297
|
-
if not self._proxy_pool:
|
|
298
|
-
return None
|
|
299
|
-
|
|
300
|
-
# 查找健康的代理
|
|
301
|
-
healthy_proxies = self._get_healthy_proxies()
|
|
302
|
-
|
|
303
|
-
if not healthy_proxies:
|
|
304
|
-
# 如果没有健康的代理,尝试更新代理池
|
|
305
|
-
await self._update_proxy_pool()
|
|
306
|
-
healthy_proxies = self._get_healthy_proxies()
|
|
307
|
-
|
|
308
|
-
if not healthy_proxies:
|
|
309
|
-
return None
|
|
310
|
-
|
|
311
|
-
# 使用轮询方式选择代理
|
|
312
|
-
self._current_proxy_index = (self._current_proxy_index + 1) % len(healthy_proxies)
|
|
313
|
-
selected_proxy = healthy_proxies[self._current_proxy_index]
|
|
314
|
-
return selected_proxy
|
|
315
|
-
|
|
316
|
-
@staticmethod
|
|
317
|
-
def _is_https(request: Request) -> bool:
|
|
318
|
-
return urlparse(request.url).scheme == "https"
|
|
319
|
-
|
|
320
|
-
async def process_request(self, request: Request, spider) -> Optional[Request]:
|
|
321
|
-
if not self.enabled:
|
|
322
|
-
self.logger.debug(f"ProxyMiddleware disabled, request will connect directly: {request.url}")
|
|
323
|
-
return None
|
|
324
|
-
|
|
325
|
-
if request.proxy:
|
|
326
|
-
return None
|
|
327
|
-
|
|
328
|
-
proxy_obj = await self._get_healthy_proxy()
|
|
329
|
-
if proxy_obj:
|
|
330
|
-
proxy = proxy_obj.proxy_str
|
|
331
|
-
# 处理带认证的代理URL
|
|
332
|
-
if isinstance(proxy, str) and "@" in proxy and "://" in proxy:
|
|
333
|
-
# 解析带认证的代理URL
|
|
334
|
-
parsed = urlparse(proxy)
|
|
335
|
-
if parsed.username and parsed.password:
|
|
336
|
-
# 对于AioHttp下载器,需要特殊处理认证信息
|
|
337
|
-
downloader_type = spider.crawler.settings.get("DOWNLOADER_TYPE", "aiohttp")
|
|
338
|
-
if downloader_type == "aiohttp":
|
|
339
|
-
# 将认证信息存储在meta中,由下载器处理
|
|
340
|
-
request.meta["proxy_auth"] = {
|
|
341
|
-
"username": parsed.username,
|
|
342
|
-
"password": parsed.password
|
|
343
|
-
}
|
|
344
|
-
# 清理URL中的认证信息
|
|
345
|
-
clean_proxy = f"{parsed.scheme}://{parsed.hostname}"
|
|
346
|
-
if parsed.port:
|
|
347
|
-
clean_proxy += f":{parsed.port}"
|
|
348
|
-
request.proxy = clean_proxy
|
|
349
|
-
else:
|
|
350
|
-
# 其他下载器可以直接使用带认证的URL
|
|
351
|
-
request.proxy = proxy
|
|
352
|
-
else:
|
|
353
|
-
request.proxy = proxy
|
|
354
|
-
else:
|
|
355
|
-
request.proxy = proxy
|
|
356
|
-
|
|
357
|
-
# 记录使用的代理
|
|
358
|
-
request.meta["_used_proxy"] = proxy_obj
|
|
359
|
-
self.logger.info(f"Assigned proxy → {proxy} | {request.url}")
|
|
360
|
-
else:
|
|
361
|
-
self.logger.warning(f"No proxy obtained, request connecting directly: {request.url}")
|
|
362
|
-
|
|
363
|
-
return None
|
|
364
|
-
|
|
365
|
-
def process_response(self, request: Request, response: Response, spider) -> Response:
|
|
366
|
-
proxy_obj = request.meta.get("_used_proxy")
|
|
367
|
-
if proxy_obj and isinstance(proxy_obj, Proxy):
|
|
368
|
-
proxy_obj.mark_success()
|
|
369
|
-
status_code = getattr(response, 'status_code', 'N/A')
|
|
370
|
-
self.logger.debug(f"Proxy success: {proxy_obj.proxy_str} | {request.url} | Status: {status_code}")
|
|
371
|
-
elif request.proxy:
|
|
372
|
-
status_code = getattr(response, 'status_code', 'N/A')
|
|
373
|
-
self.logger.debug(f"Proxy success: {request.proxy} | {request.url} | Status: {status_code}")
|
|
374
|
-
return response
|
|
375
|
-
|
|
376
|
-
def process_exception(self, request: Request, exception: Exception, spider) -> Optional[Request]:
|
|
377
|
-
proxy_obj = request.meta.get("_used_proxy")
|
|
378
|
-
if proxy_obj and isinstance(proxy_obj, Proxy):
|
|
379
|
-
proxy_obj.mark_failure()
|
|
380
|
-
self.logger.warning(f"Proxy request failed: {proxy_obj.proxy_str} | {request.url} | {repr(exception)}")
|
|
381
|
-
elif request.proxy:
|
|
382
|
-
self.logger.warning(f"Proxy request failed: {request.proxy} | {request.url} | {repr(exception)}")
|
|
383
|
-
return None
|
|
384
|
-
|
|
385
|
-
async def close(self):
|
|
386
|
-
await self._close_session()
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding: UTF-8 -*-
|
|
3
|
+
import time
|
|
4
|
+
import asyncio
|
|
5
|
+
import socket
|
|
6
|
+
from urllib.parse import urlparse
|
|
7
|
+
from typing import Optional, Dict, Any, Callable, Union, TYPE_CHECKING, List
|
|
8
|
+
|
|
9
|
+
from crawlo import Request, Response
|
|
10
|
+
from crawlo.exceptions import NotConfiguredError
|
|
11
|
+
from crawlo.utils.log import get_logger
|
|
12
|
+
|
|
13
|
+
if TYPE_CHECKING:
|
|
14
|
+
import aiohttp
|
|
15
|
+
|
|
16
|
+
try:
|
|
17
|
+
import httpx
|
|
18
|
+
|
|
19
|
+
HTTPX_EXCEPTIONS = (httpx.NetworkError, httpx.TimeoutException, httpx.ReadError, httpx.ConnectError)
|
|
20
|
+
except ImportError:
|
|
21
|
+
HTTPX_EXCEPTIONS = ()
|
|
22
|
+
httpx = None
|
|
23
|
+
|
|
24
|
+
try:
|
|
25
|
+
import aiohttp
|
|
26
|
+
|
|
27
|
+
AIOHTTP_EXCEPTIONS = (
|
|
28
|
+
aiohttp.ClientError, aiohttp.ClientConnectorError, aiohttp.ClientResponseError, aiohttp.ServerTimeoutError,
|
|
29
|
+
aiohttp.ServerDisconnectedError)
|
|
30
|
+
except ImportError:
|
|
31
|
+
AIOHTTP_EXCEPTIONS = ()
|
|
32
|
+
aiohttp = None
|
|
33
|
+
|
|
34
|
+
try:
|
|
35
|
+
from curl_cffi import requests as cffi_requests
|
|
36
|
+
|
|
37
|
+
CURL_CFFI_EXCEPTIONS = (cffi_requests.RequestsError,)
|
|
38
|
+
except (ImportError, AttributeError):
|
|
39
|
+
CURL_CFFI_EXCEPTIONS = ()
|
|
40
|
+
cffi_requests = None
|
|
41
|
+
|
|
42
|
+
NETWORK_EXCEPTIONS = (
|
|
43
|
+
asyncio.TimeoutError,
|
|
44
|
+
socket.gaierror,
|
|
45
|
+
ConnectionError,
|
|
46
|
+
TimeoutError,
|
|
47
|
+
) + HTTPX_EXCEPTIONS + AIOHTTP_EXCEPTIONS + CURL_CFFI_EXCEPTIONS
|
|
48
|
+
|
|
49
|
+
ProxyExtractor = Callable[[Dict[str, Any]], Union[None, str, Dict[str, str]]]
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
class Proxy:
|
|
53
|
+
"""代理对象,包含代理信息和统计数据"""
|
|
54
|
+
|
|
55
|
+
def __init__(self, proxy_str: str):
|
|
56
|
+
self.proxy_str = proxy_str
|
|
57
|
+
self.success_count = 0
|
|
58
|
+
self.failure_count = 0
|
|
59
|
+
self.last_used_time = 0.0
|
|
60
|
+
self.is_healthy = True
|
|
61
|
+
|
|
62
|
+
@property
|
|
63
|
+
def success_rate(self) -> float:
|
|
64
|
+
"""计算代理成功率"""
|
|
65
|
+
total = self.success_count + self.failure_count
|
|
66
|
+
if total == 0:
|
|
67
|
+
return 1.0
|
|
68
|
+
return self.success_count / total
|
|
69
|
+
|
|
70
|
+
def mark_success(self):
|
|
71
|
+
"""标记代理使用成功"""
|
|
72
|
+
self.success_count += 1
|
|
73
|
+
self.last_used_time = time.time()
|
|
74
|
+
self.is_healthy = True
|
|
75
|
+
|
|
76
|
+
def mark_failure(self):
|
|
77
|
+
"""标记代理使用失败"""
|
|
78
|
+
self.failure_count += 1
|
|
79
|
+
self.last_used_time = time.time()
|
|
80
|
+
# 如果失败率过高,标记为不健康
|
|
81
|
+
if self.failure_count > 3 and self.success_rate < 0.5:
|
|
82
|
+
self.is_healthy = False
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
class ProxyMiddleware:
|
|
86
|
+
def __init__(self, settings, log_level):
|
|
87
|
+
self.logger = get_logger(self.__class__.__name__, log_level)
|
|
88
|
+
|
|
89
|
+
self._session: Optional[Any] = None # aiohttp.ClientSession when aiohttp is available
|
|
90
|
+
# 将单个代理改为代理池
|
|
91
|
+
self._proxy_pool: List[Proxy] = []
|
|
92
|
+
self._current_proxy_index: int = 0
|
|
93
|
+
self._last_fetch_time: float = 0
|
|
94
|
+
|
|
95
|
+
self.proxy_extractor = settings.get("PROXY_EXTRACTOR", "proxy")
|
|
96
|
+
self.refresh_interval = settings.get_float("PROXY_REFRESH_INTERVAL", 60)
|
|
97
|
+
self.timeout = settings.get_float("PROXY_API_TIMEOUT", 10)
|
|
98
|
+
# 新增配置:代理池大小
|
|
99
|
+
self.proxy_pool_size = settings.get_int("PROXY_POOL_SIZE", 5)
|
|
100
|
+
# 新增配置:健康检查阈值
|
|
101
|
+
self.health_check_threshold = settings.get_float("PROXY_HEALTH_CHECK_THRESHOLD", 0.5)
|
|
102
|
+
|
|
103
|
+
self.enabled = settings.get_bool("PROXY_ENABLED", True)
|
|
104
|
+
|
|
105
|
+
if not self.enabled:
|
|
106
|
+
self.logger.info("ProxyMiddleware disabled")
|
|
107
|
+
return
|
|
108
|
+
|
|
109
|
+
self.api_url = settings.get("PROXY_API_URL")
|
|
110
|
+
if not self.api_url:
|
|
111
|
+
raise NotConfiguredError("PROXY_API_URL not configured, ProxyMiddleware disabled")
|
|
112
|
+
|
|
113
|
+
self.logger.info(
|
|
114
|
+
f"Proxy middleware enabled | API: {self.api_url} | Refresh interval: {self.refresh_interval}s | Proxy pool size: {self.proxy_pool_size}")
|
|
115
|
+
|
|
116
|
+
@classmethod
|
|
117
|
+
def create_instance(cls, crawler):
|
|
118
|
+
return cls(settings=crawler.settings, log_level=crawler.settings.get("LOG_LEVEL"))
|
|
119
|
+
|
|
120
|
+
def _compile_extractor(self) -> ProxyExtractor:
|
|
121
|
+
if callable(self.proxy_extractor):
|
|
122
|
+
return self.proxy_extractor
|
|
123
|
+
|
|
124
|
+
if isinstance(self.proxy_extractor, str):
|
|
125
|
+
keys = self.proxy_extractor.split(".")
|
|
126
|
+
|
|
127
|
+
def extract(data: Dict[str, Any]) -> Union[None, str, Dict[str, str]]:
|
|
128
|
+
for k in keys:
|
|
129
|
+
if isinstance(data, dict):
|
|
130
|
+
data = data.get(k)
|
|
131
|
+
else:
|
|
132
|
+
return None
|
|
133
|
+
if data is None:
|
|
134
|
+
break
|
|
135
|
+
return data
|
|
136
|
+
|
|
137
|
+
return extract
|
|
138
|
+
|
|
139
|
+
raise ValueError(f"PROXY_EXTRACTOR 必须是 str 或 callable,当前类型: {type(self.proxy_extractor)}")
|
|
140
|
+
|
|
141
|
+
async def _close_session(self):
|
|
142
|
+
if self._session:
|
|
143
|
+
try:
|
|
144
|
+
await self._session.close()
|
|
145
|
+
self.logger.debug("aiohttp session closed.")
|
|
146
|
+
except Exception as e:
|
|
147
|
+
self.logger.warning(f"Error closing aiohttp session: {e}")
|
|
148
|
+
finally:
|
|
149
|
+
self._session = None
|
|
150
|
+
|
|
151
|
+
async def _get_session(self) -> Any: # returns aiohttp.ClientSession when aiohttp is available
|
|
152
|
+
if aiohttp is None:
|
|
153
|
+
raise RuntimeError("aiohttp not installed, cannot use ProxyMiddleware")
|
|
154
|
+
|
|
155
|
+
if self._session is None or self._session.closed:
|
|
156
|
+
if self._session and self._session.closed:
|
|
157
|
+
self.logger.debug("Existing session closed, creating new session...")
|
|
158
|
+
timeout = aiohttp.ClientTimeout(total=self.timeout)
|
|
159
|
+
self._session = aiohttp.ClientSession(timeout=timeout)
|
|
160
|
+
self.logger.debug("New aiohttp session created.")
|
|
161
|
+
return self._session
|
|
162
|
+
|
|
163
|
+
async def _fetch_raw_data(self) -> Optional[Dict[str, Any]]:
|
|
164
|
+
max_retries = 2
|
|
165
|
+
retry_count = 0
|
|
166
|
+
|
|
167
|
+
while retry_count <= max_retries:
|
|
168
|
+
session = await self._get_session()
|
|
169
|
+
try:
|
|
170
|
+
async with session.get(self.api_url) as resp:
|
|
171
|
+
content_type = resp.content_type.lower()
|
|
172
|
+
if 'application/json' not in content_type:
|
|
173
|
+
self.logger.warning(
|
|
174
|
+
f"Proxy API returned non-JSON content type: {content_type} (URL: {self.api_url})")
|
|
175
|
+
try:
|
|
176
|
+
text = await resp.text()
|
|
177
|
+
return {"__raw_text__": text.strip(), "__content_type__": content_type}
|
|
178
|
+
except Exception as e:
|
|
179
|
+
self.logger.error(f"Failed to read non-JSON response body: {repr(e)}")
|
|
180
|
+
return None
|
|
181
|
+
|
|
182
|
+
if resp.status != 200:
|
|
183
|
+
try:
|
|
184
|
+
error_text = await resp.text()
|
|
185
|
+
except:
|
|
186
|
+
error_text = "<Unable to read response body>"
|
|
187
|
+
self.logger.error(f"Proxy API status code error: {resp.status}, Response body: {error_text}")
|
|
188
|
+
if 400 <= resp.status < 500:
|
|
189
|
+
return None
|
|
190
|
+
return None
|
|
191
|
+
|
|
192
|
+
return await resp.json()
|
|
193
|
+
|
|
194
|
+
except NETWORK_EXCEPTIONS as e:
|
|
195
|
+
retry_count += 1
|
|
196
|
+
self.logger.warning(f"Failed to request proxy API (attempt {retry_count}/{max_retries + 1}): {repr(e)}")
|
|
197
|
+
if retry_count <= max_retries:
|
|
198
|
+
self.logger.info("Closing and rebuilding session for retry...")
|
|
199
|
+
await self._close_session()
|
|
200
|
+
else:
|
|
201
|
+
self.logger.error(
|
|
202
|
+
f"Failed to request proxy API, maximum retry attempts reached ({max_retries + 1}): {repr(e)}")
|
|
203
|
+
return None
|
|
204
|
+
|
|
205
|
+
except aiohttp.ContentTypeError as e:
|
|
206
|
+
self.logger.error(f"Proxy API response content type error: {repr(e)}")
|
|
207
|
+
return None
|
|
208
|
+
|
|
209
|
+
except Exception as e:
|
|
210
|
+
self.logger.critical(f"Unexpected error occurred while requesting proxy API: {repr(e)}", exc_info=True)
|
|
211
|
+
return None
|
|
212
|
+
|
|
213
|
+
return None
|
|
214
|
+
|
|
215
|
+
async def _extract_proxy(self, data: Dict[str, Any]) -> Optional[Union[str, Dict[str, str]]]:
|
|
216
|
+
extractor = self._compile_extractor()
|
|
217
|
+
try:
|
|
218
|
+
result = extractor(data)
|
|
219
|
+
if isinstance(result, str) and result.strip():
|
|
220
|
+
return result.strip()
|
|
221
|
+
elif isinstance(result, dict):
|
|
222
|
+
cleaned = {k: v.strip() if isinstance(v, str) else v for k, v in result.items()}
|
|
223
|
+
return cleaned if cleaned else None
|
|
224
|
+
return None
|
|
225
|
+
except Exception as e:
|
|
226
|
+
self.logger.error(f"Error executing PROXY_EXTRACTOR: {repr(e)}")
|
|
227
|
+
return None
|
|
228
|
+
|
|
229
|
+
async def _get_proxy_from_api(self) -> Optional[Union[str, Dict[str, str]]]:
|
|
230
|
+
raw_data = await self._fetch_raw_data()
|
|
231
|
+
if not raw_data:
|
|
232
|
+
return None
|
|
233
|
+
|
|
234
|
+
if "__raw_text__" in raw_data:
|
|
235
|
+
text = raw_data["__raw_text__"]
|
|
236
|
+
if text.startswith("http://") or text.startswith("https://"):
|
|
237
|
+
return text
|
|
238
|
+
|
|
239
|
+
return await self._extract_proxy(raw_data)
|
|
240
|
+
|
|
241
|
+
def _parse_proxy_data(self, proxy_data: Union[str, Dict[str, Any]]) -> List[str]:
|
|
242
|
+
"""解析代理数据,提取代理URL列表"""
|
|
243
|
+
new_proxies = []
|
|
244
|
+
if isinstance(proxy_data, str):
|
|
245
|
+
# 单个代理
|
|
246
|
+
if proxy_data.startswith("http://") or proxy_data.startswith("https://"):
|
|
247
|
+
new_proxies = [proxy_data]
|
|
248
|
+
elif isinstance(proxy_data, dict):
|
|
249
|
+
# 如果是字典,尝试提取代理列表
|
|
250
|
+
for key, value in proxy_data.items():
|
|
251
|
+
if isinstance(value, str) and (value.startswith("http://") or value.startswith("https://")):
|
|
252
|
+
new_proxies.append(value)
|
|
253
|
+
elif isinstance(value, list):
|
|
254
|
+
# 如果值是列表,添加所有有效的代理
|
|
255
|
+
for item in value:
|
|
256
|
+
if isinstance(item, str) and (item.startswith("http://") or item.startswith("https://")):
|
|
257
|
+
new_proxies.append(item)
|
|
258
|
+
return new_proxies
|
|
259
|
+
|
|
260
|
+
def _get_healthy_proxies(self) -> List[Proxy]:
|
|
261
|
+
"""获取所有健康的代理"""
|
|
262
|
+
return [p for p in self._proxy_pool if p.is_healthy and p.success_rate >= self.health_check_threshold]
|
|
263
|
+
|
|
264
|
+
async def _update_proxy_pool(self):
|
|
265
|
+
"""更新代理池"""
|
|
266
|
+
if not self.enabled:
|
|
267
|
+
self.logger.debug("ProxyMiddleware disabled, skipping proxy fetch.")
|
|
268
|
+
return
|
|
269
|
+
|
|
270
|
+
now = asyncio.get_event_loop().time()
|
|
271
|
+
if (now - self._last_fetch_time) < self.refresh_interval:
|
|
272
|
+
return
|
|
273
|
+
|
|
274
|
+
# 获取新的代理列表
|
|
275
|
+
proxy_data = await self._get_proxy_from_api()
|
|
276
|
+
if not proxy_data:
|
|
277
|
+
self.logger.warning("Failed to get new proxies, proxy pool will remain unchanged.")
|
|
278
|
+
return
|
|
279
|
+
|
|
280
|
+
# 解析代理数据
|
|
281
|
+
new_proxies = self._parse_proxy_data(proxy_data)
|
|
282
|
+
|
|
283
|
+
# 创建新的代理池
|
|
284
|
+
if new_proxies:
|
|
285
|
+
self._proxy_pool = [Proxy(proxy_str) for proxy_str in new_proxies[:self.proxy_pool_size]]
|
|
286
|
+
self._current_proxy_index = 0
|
|
287
|
+
self._last_fetch_time = now
|
|
288
|
+
self.logger.info(f"Updated proxy pool, added {len(self._proxy_pool)} proxies")
|
|
289
|
+
else:
|
|
290
|
+
self.logger.warning("No valid proxies parsed, proxy pool will remain unchanged.")
|
|
291
|
+
|
|
292
|
+
async def _get_healthy_proxy(self) -> Optional[Proxy]:
|
|
293
|
+
"""从代理池中获取一个健康的代理"""
|
|
294
|
+
if not self._proxy_pool:
|
|
295
|
+
await self._update_proxy_pool()
|
|
296
|
+
|
|
297
|
+
if not self._proxy_pool:
|
|
298
|
+
return None
|
|
299
|
+
|
|
300
|
+
# 查找健康的代理
|
|
301
|
+
healthy_proxies = self._get_healthy_proxies()
|
|
302
|
+
|
|
303
|
+
if not healthy_proxies:
|
|
304
|
+
# 如果没有健康的代理,尝试更新代理池
|
|
305
|
+
await self._update_proxy_pool()
|
|
306
|
+
healthy_proxies = self._get_healthy_proxies()
|
|
307
|
+
|
|
308
|
+
if not healthy_proxies:
|
|
309
|
+
return None
|
|
310
|
+
|
|
311
|
+
# 使用轮询方式选择代理
|
|
312
|
+
self._current_proxy_index = (self._current_proxy_index + 1) % len(healthy_proxies)
|
|
313
|
+
selected_proxy = healthy_proxies[self._current_proxy_index]
|
|
314
|
+
return selected_proxy
|
|
315
|
+
|
|
316
|
+
@staticmethod
|
|
317
|
+
def _is_https(request: Request) -> bool:
|
|
318
|
+
return urlparse(request.url).scheme == "https"
|
|
319
|
+
|
|
320
|
+
async def process_request(self, request: Request, spider) -> Optional[Request]:
|
|
321
|
+
if not self.enabled:
|
|
322
|
+
self.logger.debug(f"ProxyMiddleware disabled, request will connect directly: {request.url}")
|
|
323
|
+
return None
|
|
324
|
+
|
|
325
|
+
if request.proxy:
|
|
326
|
+
return None
|
|
327
|
+
|
|
328
|
+
proxy_obj = await self._get_healthy_proxy()
|
|
329
|
+
if proxy_obj:
|
|
330
|
+
proxy = proxy_obj.proxy_str
|
|
331
|
+
# 处理带认证的代理URL
|
|
332
|
+
if isinstance(proxy, str) and "@" in proxy and "://" in proxy:
|
|
333
|
+
# 解析带认证的代理URL
|
|
334
|
+
parsed = urlparse(proxy)
|
|
335
|
+
if parsed.username and parsed.password:
|
|
336
|
+
# 对于AioHttp下载器,需要特殊处理认证信息
|
|
337
|
+
downloader_type = spider.crawler.settings.get("DOWNLOADER_TYPE", "aiohttp")
|
|
338
|
+
if downloader_type == "aiohttp":
|
|
339
|
+
# 将认证信息存储在meta中,由下载器处理
|
|
340
|
+
request.meta["proxy_auth"] = {
|
|
341
|
+
"username": parsed.username,
|
|
342
|
+
"password": parsed.password
|
|
343
|
+
}
|
|
344
|
+
# 清理URL中的认证信息
|
|
345
|
+
clean_proxy = f"{parsed.scheme}://{parsed.hostname}"
|
|
346
|
+
if parsed.port:
|
|
347
|
+
clean_proxy += f":{parsed.port}"
|
|
348
|
+
request.proxy = clean_proxy
|
|
349
|
+
else:
|
|
350
|
+
# 其他下载器可以直接使用带认证的URL
|
|
351
|
+
request.proxy = proxy
|
|
352
|
+
else:
|
|
353
|
+
request.proxy = proxy
|
|
354
|
+
else:
|
|
355
|
+
request.proxy = proxy
|
|
356
|
+
|
|
357
|
+
# 记录使用的代理
|
|
358
|
+
request.meta["_used_proxy"] = proxy_obj
|
|
359
|
+
self.logger.info(f"Assigned proxy → {proxy} | {request.url}")
|
|
360
|
+
else:
|
|
361
|
+
self.logger.warning(f"No proxy obtained, request connecting directly: {request.url}")
|
|
362
|
+
|
|
363
|
+
return None
|
|
364
|
+
|
|
365
|
+
def process_response(self, request: Request, response: Response, spider) -> Response:
|
|
366
|
+
proxy_obj = request.meta.get("_used_proxy")
|
|
367
|
+
if proxy_obj and isinstance(proxy_obj, Proxy):
|
|
368
|
+
proxy_obj.mark_success()
|
|
369
|
+
status_code = getattr(response, 'status_code', 'N/A')
|
|
370
|
+
self.logger.debug(f"Proxy success: {proxy_obj.proxy_str} | {request.url} | Status: {status_code}")
|
|
371
|
+
elif request.proxy:
|
|
372
|
+
status_code = getattr(response, 'status_code', 'N/A')
|
|
373
|
+
self.logger.debug(f"Proxy success: {request.proxy} | {request.url} | Status: {status_code}")
|
|
374
|
+
return response
|
|
375
|
+
|
|
376
|
+
def process_exception(self, request: Request, exception: Exception, spider) -> Optional[Request]:
|
|
377
|
+
proxy_obj = request.meta.get("_used_proxy")
|
|
378
|
+
if proxy_obj and isinstance(proxy_obj, Proxy):
|
|
379
|
+
proxy_obj.mark_failure()
|
|
380
|
+
self.logger.warning(f"Proxy request failed: {proxy_obj.proxy_str} | {request.url} | {repr(exception)}")
|
|
381
|
+
elif request.proxy:
|
|
382
|
+
self.logger.warning(f"Proxy request failed: {request.proxy} | {request.url} | {repr(exception)}")
|
|
383
|
+
return None
|
|
384
|
+
|
|
385
|
+
async def close(self):
|
|
386
|
+
await self._close_session()
|