crawlo 1.4.2__py3-none-any.whl → 1.4.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (326) hide show
  1. crawlo/__init__.py +93 -93
  2. crawlo/__version__.py +1 -1
  3. crawlo/cli.py +75 -75
  4. crawlo/commands/__init__.py +14 -14
  5. crawlo/commands/check.py +594 -594
  6. crawlo/commands/genspider.py +151 -151
  7. crawlo/commands/help.py +138 -138
  8. crawlo/commands/list.py +155 -155
  9. crawlo/commands/run.py +341 -341
  10. crawlo/commands/startproject.py +436 -436
  11. crawlo/commands/stats.py +187 -187
  12. crawlo/commands/utils.py +196 -196
  13. crawlo/config.py +312 -312
  14. crawlo/config_validator.py +277 -277
  15. crawlo/core/__init__.py +52 -52
  16. crawlo/core/engine.py +438 -439
  17. crawlo/core/processor.py +47 -47
  18. crawlo/core/scheduler.py +291 -257
  19. crawlo/crawler.py +650 -650
  20. crawlo/data/__init__.py +5 -5
  21. crawlo/data/user_agents.py +194 -194
  22. crawlo/downloader/__init__.py +273 -273
  23. crawlo/downloader/aiohttp_downloader.py +233 -233
  24. crawlo/downloader/cffi_downloader.py +245 -245
  25. crawlo/downloader/httpx_downloader.py +259 -259
  26. crawlo/downloader/hybrid_downloader.py +212 -212
  27. crawlo/downloader/playwright_downloader.py +402 -402
  28. crawlo/downloader/selenium_downloader.py +472 -472
  29. crawlo/event.py +11 -11
  30. crawlo/exceptions.py +81 -81
  31. crawlo/extension/__init__.py +63 -63
  32. crawlo/extension/health_check.py +141 -141
  33. crawlo/extension/log_interval.py +94 -94
  34. crawlo/extension/log_stats.py +70 -70
  35. crawlo/extension/logging_extension.py +61 -61
  36. crawlo/extension/memory_monitor.py +104 -104
  37. crawlo/extension/performance_profiler.py +133 -133
  38. crawlo/extension/request_recorder.py +107 -107
  39. crawlo/factories/__init__.py +27 -27
  40. crawlo/factories/base.py +68 -68
  41. crawlo/factories/crawler.py +103 -103
  42. crawlo/factories/registry.py +84 -84
  43. crawlo/filters/__init__.py +154 -154
  44. crawlo/filters/aioredis_filter.py +257 -257
  45. crawlo/filters/memory_filter.py +269 -269
  46. crawlo/framework.py +292 -292
  47. crawlo/initialization/__init__.py +44 -44
  48. crawlo/initialization/built_in.py +425 -425
  49. crawlo/initialization/context.py +141 -141
  50. crawlo/initialization/core.py +193 -193
  51. crawlo/initialization/phases.py +148 -148
  52. crawlo/initialization/registry.py +145 -145
  53. crawlo/items/__init__.py +23 -23
  54. crawlo/items/base.py +23 -23
  55. crawlo/items/fields.py +52 -52
  56. crawlo/items/items.py +104 -104
  57. crawlo/logging/__init__.py +45 -37
  58. crawlo/logging/async_handler.py +181 -0
  59. crawlo/logging/config.py +196 -96
  60. crawlo/logging/factory.py +171 -128
  61. crawlo/logging/manager.py +111 -111
  62. crawlo/logging/monitor.py +153 -0
  63. crawlo/logging/sampler.py +167 -0
  64. crawlo/middleware/__init__.py +21 -21
  65. crawlo/middleware/default_header.py +132 -132
  66. crawlo/middleware/download_delay.py +104 -104
  67. crawlo/middleware/middleware_manager.py +135 -135
  68. crawlo/middleware/offsite.py +123 -123
  69. crawlo/middleware/proxy.py +386 -386
  70. crawlo/middleware/request_ignore.py +86 -86
  71. crawlo/middleware/response_code.py +150 -150
  72. crawlo/middleware/response_filter.py +136 -136
  73. crawlo/middleware/retry.py +124 -124
  74. crawlo/middleware/simple_proxy.py +65 -65
  75. crawlo/mode_manager.py +219 -219
  76. crawlo/network/__init__.py +21 -21
  77. crawlo/network/request.py +379 -379
  78. crawlo/network/response.py +359 -359
  79. crawlo/pipelines/__init__.py +21 -21
  80. crawlo/pipelines/bloom_dedup_pipeline.py +146 -146
  81. crawlo/pipelines/console_pipeline.py +39 -39
  82. crawlo/pipelines/csv_pipeline.py +316 -316
  83. crawlo/pipelines/database_dedup_pipeline.py +197 -197
  84. crawlo/pipelines/json_pipeline.py +218 -218
  85. crawlo/pipelines/memory_dedup_pipeline.py +105 -105
  86. crawlo/pipelines/mongo_pipeline.py +131 -131
  87. crawlo/pipelines/mysql_pipeline.py +325 -325
  88. crawlo/pipelines/pipeline_manager.py +100 -84
  89. crawlo/pipelines/redis_dedup_pipeline.py +156 -156
  90. crawlo/project.py +349 -338
  91. crawlo/queue/pqueue.py +42 -42
  92. crawlo/queue/queue_manager.py +526 -522
  93. crawlo/queue/redis_priority_queue.py +370 -367
  94. crawlo/settings/__init__.py +7 -7
  95. crawlo/settings/default_settings.py +284 -284
  96. crawlo/settings/setting_manager.py +219 -219
  97. crawlo/spider/__init__.py +657 -657
  98. crawlo/stats_collector.py +73 -73
  99. crawlo/subscriber.py +129 -129
  100. crawlo/task_manager.py +138 -138
  101. crawlo/templates/crawlo.cfg.tmpl +10 -10
  102. crawlo/templates/project/__init__.py.tmpl +3 -3
  103. crawlo/templates/project/items.py.tmpl +17 -17
  104. crawlo/templates/project/middlewares.py.tmpl +118 -118
  105. crawlo/templates/project/pipelines.py.tmpl +96 -96
  106. crawlo/templates/project/settings.py.tmpl +170 -170
  107. crawlo/templates/project/settings_distributed.py.tmpl +169 -169
  108. crawlo/templates/project/settings_gentle.py.tmpl +166 -166
  109. crawlo/templates/project/settings_high_performance.py.tmpl +167 -167
  110. crawlo/templates/project/settings_minimal.py.tmpl +65 -65
  111. crawlo/templates/project/settings_simple.py.tmpl +164 -164
  112. crawlo/templates/project/spiders/__init__.py.tmpl +9 -9
  113. crawlo/templates/run.py.tmpl +34 -34
  114. crawlo/templates/spider/spider.py.tmpl +143 -143
  115. crawlo/templates/spiders_init.py.tmpl +9 -9
  116. crawlo/tools/__init__.py +200 -200
  117. crawlo/tools/anti_crawler.py +268 -268
  118. crawlo/tools/authenticated_proxy.py +240 -240
  119. crawlo/tools/data_formatter.py +225 -225
  120. crawlo/tools/data_validator.py +180 -180
  121. crawlo/tools/date_tools.py +289 -289
  122. crawlo/tools/distributed_coordinator.py +384 -384
  123. crawlo/tools/encoding_converter.py +127 -127
  124. crawlo/tools/network_diagnostic.py +364 -364
  125. crawlo/tools/request_tools.py +82 -82
  126. crawlo/tools/retry_mechanism.py +224 -224
  127. crawlo/tools/scenario_adapter.py +262 -262
  128. crawlo/tools/text_cleaner.py +232 -232
  129. crawlo/utils/__init__.py +34 -34
  130. crawlo/utils/batch_processor.py +259 -259
  131. crawlo/utils/class_loader.py +25 -25
  132. crawlo/utils/controlled_spider_mixin.py +439 -439
  133. crawlo/utils/db_helper.py +343 -343
  134. crawlo/utils/enhanced_error_handler.py +356 -356
  135. crawlo/utils/env_config.py +142 -142
  136. crawlo/utils/error_handler.py +165 -165
  137. crawlo/utils/fingerprint.py +122 -122
  138. crawlo/utils/func_tools.py +82 -82
  139. crawlo/utils/large_scale_config.py +286 -286
  140. crawlo/utils/large_scale_helper.py +344 -344
  141. crawlo/utils/log.py +79 -79
  142. crawlo/utils/performance_monitor.py +285 -285
  143. crawlo/utils/queue_helper.py +175 -175
  144. crawlo/utils/redis_connection_pool.py +388 -388
  145. crawlo/utils/redis_key_validator.py +198 -198
  146. crawlo/utils/request.py +267 -267
  147. crawlo/utils/request_serializer.py +225 -225
  148. crawlo/utils/spider_loader.py +61 -61
  149. crawlo/utils/system.py +11 -11
  150. crawlo/utils/tools.py +4 -4
  151. crawlo/utils/url.py +39 -39
  152. crawlo-1.4.3.dist-info/METADATA +190 -0
  153. crawlo-1.4.3.dist-info/RECORD +326 -0
  154. examples/__init__.py +7 -7
  155. examples/test_project/__init__.py +7 -7
  156. examples/test_project/run.py +34 -34
  157. examples/test_project/test_project/__init__.py +3 -3
  158. examples/test_project/test_project/items.py +17 -17
  159. examples/test_project/test_project/middlewares.py +118 -118
  160. examples/test_project/test_project/pipelines.py +96 -96
  161. examples/test_project/test_project/settings.py +169 -169
  162. examples/test_project/test_project/spiders/__init__.py +9 -9
  163. examples/test_project/test_project/spiders/of_week_dis.py +143 -143
  164. tests/__init__.py +7 -7
  165. tests/advanced_tools_example.py +275 -275
  166. tests/authenticated_proxy_example.py +106 -106
  167. tests/baidu_performance_test.py +108 -108
  168. tests/baidu_test.py +59 -59
  169. tests/cleaners_example.py +160 -160
  170. tests/comprehensive_framework_test.py +212 -212
  171. tests/comprehensive_test.py +81 -81
  172. tests/comprehensive_testing_summary.md +186 -186
  173. tests/config_validation_demo.py +142 -142
  174. tests/controlled_spider_example.py +205 -205
  175. tests/date_tools_example.py +180 -180
  176. tests/debug_configure.py +69 -69
  177. tests/debug_framework_logger.py +84 -84
  178. tests/debug_log_config.py +126 -126
  179. tests/debug_log_levels.py +63 -63
  180. tests/debug_pipelines.py +66 -66
  181. tests/detailed_log_test.py +233 -233
  182. tests/distributed_test.py +66 -66
  183. tests/distributed_test_debug.py +76 -76
  184. tests/dynamic_loading_example.py +523 -523
  185. tests/dynamic_loading_test.py +104 -104
  186. tests/env_config_example.py +133 -133
  187. tests/error_handling_example.py +171 -171
  188. tests/final_comprehensive_test.py +151 -151
  189. tests/final_log_test.py +260 -260
  190. tests/final_validation_test.py +182 -182
  191. tests/fix_log_test.py +142 -142
  192. tests/framework_performance_test.py +202 -202
  193. tests/log_buffering_test.py +111 -111
  194. tests/log_generation_timing_test.py +153 -153
  195. tests/optimized_performance_test.py +211 -211
  196. tests/performance_comparison.py +245 -245
  197. tests/queue_blocking_test.py +113 -113
  198. tests/queue_test.py +89 -89
  199. tests/redis_key_validation_demo.py +130 -130
  200. tests/request_params_example.py +150 -150
  201. tests/response_improvements_example.py +144 -144
  202. tests/scrapy_comparison/ofweek_scrapy.py +138 -138
  203. tests/scrapy_comparison/scrapy_test.py +133 -133
  204. tests/simple_command_test.py +119 -119
  205. tests/simple_crawlo_test.py +127 -127
  206. tests/simple_log_test.py +57 -57
  207. tests/simple_log_test2.py +137 -137
  208. tests/simple_optimization_test.py +128 -128
  209. tests/simple_queue_type_test.py +41 -41
  210. tests/simple_spider_test.py +49 -49
  211. tests/simple_test.py +47 -47
  212. tests/spider_log_timing_test.py +177 -177
  213. tests/test_advanced_tools.py +148 -148
  214. tests/test_all_commands.py +230 -230
  215. tests/test_all_pipeline_fingerprints.py +133 -133
  216. tests/test_all_redis_key_configs.py +145 -145
  217. tests/test_authenticated_proxy.py +141 -141
  218. tests/test_batch_processor.py +178 -178
  219. tests/test_cleaners.py +54 -54
  220. tests/test_component_factory.py +174 -174
  221. tests/test_comprehensive.py +146 -146
  222. tests/test_config_consistency.py +80 -80
  223. tests/test_config_merge.py +152 -152
  224. tests/test_config_validator.py +182 -182
  225. tests/test_controlled_spider_mixin.py +79 -79
  226. tests/test_crawlo_proxy_integration.py +108 -108
  227. tests/test_date_tools.py +123 -123
  228. tests/test_dedup_fix.py +220 -220
  229. tests/test_dedup_pipeline_consistency.py +125 -0
  230. tests/test_default_header_middleware.py +313 -313
  231. tests/test_distributed.py +65 -65
  232. tests/test_double_crawlo_fix.py +204 -204
  233. tests/test_double_crawlo_fix_simple.py +124 -124
  234. tests/test_download_delay_middleware.py +221 -221
  235. tests/test_downloader_proxy_compatibility.py +268 -268
  236. tests/test_dynamic_downloaders_proxy.py +124 -124
  237. tests/test_dynamic_proxy.py +92 -92
  238. tests/test_dynamic_proxy_config.py +146 -146
  239. tests/test_dynamic_proxy_real.py +109 -109
  240. tests/test_edge_cases.py +303 -303
  241. tests/test_enhanced_error_handler.py +270 -270
  242. tests/test_enhanced_error_handler_comprehensive.py +245 -245
  243. tests/test_env_config.py +121 -121
  244. tests/test_error_handler_compatibility.py +112 -112
  245. tests/test_factories.py +252 -252
  246. tests/test_final_validation.py +153 -153
  247. tests/test_fingerprint_consistency.py +135 -135
  248. tests/test_fingerprint_simple.py +51 -51
  249. tests/test_framework_env_usage.py +103 -103
  250. tests/test_framework_logger.py +66 -66
  251. tests/test_framework_startup.py +64 -64
  252. tests/test_get_component_logger.py +83 -83
  253. tests/test_hash_performance.py +99 -99
  254. tests/test_integration.py +169 -169
  255. tests/test_item_dedup_redis_key.py +122 -122
  256. tests/test_large_scale_config.py +112 -112
  257. tests/test_large_scale_helper.py +235 -235
  258. tests/test_logging_enhancements.py +375 -0
  259. tests/test_logging_final.py +185 -0
  260. tests/test_logging_integration.py +313 -0
  261. tests/test_logging_system.py +282 -282
  262. tests/test_middleware_debug.py +142 -0
  263. tests/test_mode_change.py +72 -72
  264. tests/test_mode_consistency.py +51 -51
  265. tests/test_offsite_middleware.py +244 -244
  266. tests/test_offsite_middleware_simple.py +203 -203
  267. tests/test_parsel.py +29 -29
  268. tests/test_performance.py +327 -327
  269. tests/test_performance_monitor.py +115 -115
  270. tests/test_pipeline_fingerprint_consistency.py +86 -86
  271. tests/test_priority_behavior.py +212 -0
  272. tests/test_priority_consistency.py +152 -0
  273. tests/test_priority_consistency_fixed.py +250 -0
  274. tests/test_proxy_api.py +264 -264
  275. tests/test_proxy_health_check.py +32 -32
  276. tests/test_proxy_middleware.py +121 -121
  277. tests/test_proxy_middleware_enhanced.py +216 -216
  278. tests/test_proxy_middleware_integration.py +136 -136
  279. tests/test_proxy_middleware_refactored.py +184 -184
  280. tests/test_proxy_providers.py +56 -56
  281. tests/test_proxy_stats.py +19 -19
  282. tests/test_proxy_strategies.py +59 -59
  283. tests/test_queue_empty_check.py +41 -41
  284. tests/test_queue_manager_double_crawlo.py +173 -173
  285. tests/test_queue_manager_redis_key.py +179 -179
  286. tests/test_queue_naming.py +154 -154
  287. tests/test_queue_type.py +106 -106
  288. tests/test_queue_type_redis_config_consistency.py +131 -0
  289. tests/test_random_headers_default.py +323 -0
  290. tests/test_random_headers_necessity.py +309 -0
  291. tests/test_random_user_agent.py +72 -72
  292. tests/test_real_scenario_proxy.py +195 -195
  293. tests/test_redis_config.py +28 -28
  294. tests/test_redis_connection_pool.py +294 -294
  295. tests/test_redis_key_naming.py +181 -181
  296. tests/test_redis_key_validator.py +123 -123
  297. tests/test_redis_queue.py +224 -224
  298. tests/test_redis_queue_name_fix.py +175 -175
  299. tests/test_redis_queue_type_fallback.py +130 -0
  300. tests/test_request_ignore_middleware.py +182 -182
  301. tests/test_request_params.py +111 -111
  302. tests/test_request_serialization.py +70 -70
  303. tests/test_response_code_middleware.py +349 -349
  304. tests/test_response_filter_middleware.py +427 -427
  305. tests/test_response_improvements.py +152 -152
  306. tests/test_retry_middleware.py +334 -242
  307. tests/test_retry_middleware_realistic.py +274 -0
  308. tests/test_scheduler.py +252 -252
  309. tests/test_scheduler_config_update.py +133 -133
  310. tests/test_simple_response.py +61 -61
  311. tests/test_telecom_spider_redis_key.py +205 -205
  312. tests/test_template_content.py +87 -87
  313. tests/test_template_redis_key.py +134 -134
  314. tests/test_tools.py +159 -159
  315. tests/test_user_agent_randomness.py +177 -0
  316. tests/test_user_agents.py +96 -96
  317. tests/tools_example.py +260 -260
  318. tests/untested_features_report.md +138 -138
  319. tests/verify_debug.py +51 -51
  320. tests/verify_distributed.py +117 -117
  321. tests/verify_log_fix.py +111 -111
  322. crawlo-1.4.2.dist-info/METADATA +0 -1199
  323. crawlo-1.4.2.dist-info/RECORD +0 -309
  324. {crawlo-1.4.2.dist-info → crawlo-1.4.3.dist-info}/WHEEL +0 -0
  325. {crawlo-1.4.2.dist-info → crawlo-1.4.3.dist-info}/entry_points.txt +0 -0
  326. {crawlo-1.4.2.dist-info → crawlo-1.4.3.dist-info}/top_level.txt +0 -0
@@ -1,386 +1,386 @@
1
- #!/usr/bin/python
2
- # -*- coding: UTF-8 -*-
3
- import time
4
- import asyncio
5
- import socket
6
- from urllib.parse import urlparse
7
- from typing import Optional, Dict, Any, Callable, Union, TYPE_CHECKING, List
8
-
9
- from crawlo import Request, Response
10
- from crawlo.exceptions import NotConfiguredError
11
- from crawlo.utils.log import get_logger
12
-
13
- if TYPE_CHECKING:
14
- import aiohttp
15
-
16
- try:
17
- import httpx
18
-
19
- HTTPX_EXCEPTIONS = (httpx.NetworkError, httpx.TimeoutException, httpx.ReadError, httpx.ConnectError)
20
- except ImportError:
21
- HTTPX_EXCEPTIONS = ()
22
- httpx = None
23
-
24
- try:
25
- import aiohttp
26
-
27
- AIOHTTP_EXCEPTIONS = (
28
- aiohttp.ClientError, aiohttp.ClientConnectorError, aiohttp.ClientResponseError, aiohttp.ServerTimeoutError,
29
- aiohttp.ServerDisconnectedError)
30
- except ImportError:
31
- AIOHTTP_EXCEPTIONS = ()
32
- aiohttp = None
33
-
34
- try:
35
- from curl_cffi import requests as cffi_requests
36
-
37
- CURL_CFFI_EXCEPTIONS = (cffi_requests.RequestsError,)
38
- except (ImportError, AttributeError):
39
- CURL_CFFI_EXCEPTIONS = ()
40
- cffi_requests = None
41
-
42
- NETWORK_EXCEPTIONS = (
43
- asyncio.TimeoutError,
44
- socket.gaierror,
45
- ConnectionError,
46
- TimeoutError,
47
- ) + HTTPX_EXCEPTIONS + AIOHTTP_EXCEPTIONS + CURL_CFFI_EXCEPTIONS
48
-
49
- ProxyExtractor = Callable[[Dict[str, Any]], Union[None, str, Dict[str, str]]]
50
-
51
-
52
- class Proxy:
53
- """代理对象,包含代理信息和统计数据"""
54
-
55
- def __init__(self, proxy_str: str):
56
- self.proxy_str = proxy_str
57
- self.success_count = 0
58
- self.failure_count = 0
59
- self.last_used_time = 0.0
60
- self.is_healthy = True
61
-
62
- @property
63
- def success_rate(self) -> float:
64
- """计算代理成功率"""
65
- total = self.success_count + self.failure_count
66
- if total == 0:
67
- return 1.0
68
- return self.success_count / total
69
-
70
- def mark_success(self):
71
- """标记代理使用成功"""
72
- self.success_count += 1
73
- self.last_used_time = time.time()
74
- self.is_healthy = True
75
-
76
- def mark_failure(self):
77
- """标记代理使用失败"""
78
- self.failure_count += 1
79
- self.last_used_time = time.time()
80
- # 如果失败率过高,标记为不健康
81
- if self.failure_count > 3 and self.success_rate < 0.5:
82
- self.is_healthy = False
83
-
84
-
85
- class ProxyMiddleware:
86
- def __init__(self, settings, log_level):
87
- self.logger = get_logger(self.__class__.__name__, log_level)
88
-
89
- self._session: Optional[Any] = None # aiohttp.ClientSession when aiohttp is available
90
- # 将单个代理改为代理池
91
- self._proxy_pool: List[Proxy] = []
92
- self._current_proxy_index: int = 0
93
- self._last_fetch_time: float = 0
94
-
95
- self.proxy_extractor = settings.get("PROXY_EXTRACTOR", "proxy")
96
- self.refresh_interval = settings.get_float("PROXY_REFRESH_INTERVAL", 60)
97
- self.timeout = settings.get_float("PROXY_API_TIMEOUT", 10)
98
- # 新增配置:代理池大小
99
- self.proxy_pool_size = settings.get_int("PROXY_POOL_SIZE", 5)
100
- # 新增配置:健康检查阈值
101
- self.health_check_threshold = settings.get_float("PROXY_HEALTH_CHECK_THRESHOLD", 0.5)
102
-
103
- self.enabled = settings.get_bool("PROXY_ENABLED", True)
104
-
105
- if not self.enabled:
106
- self.logger.info("ProxyMiddleware disabled")
107
- return
108
-
109
- self.api_url = settings.get("PROXY_API_URL")
110
- if not self.api_url:
111
- raise NotConfiguredError("PROXY_API_URL not configured, ProxyMiddleware disabled")
112
-
113
- self.logger.info(
114
- f"Proxy middleware enabled | API: {self.api_url} | Refresh interval: {self.refresh_interval}s | Proxy pool size: {self.proxy_pool_size}")
115
-
116
- @classmethod
117
- def create_instance(cls, crawler):
118
- return cls(settings=crawler.settings, log_level=crawler.settings.get("LOG_LEVEL"))
119
-
120
- def _compile_extractor(self) -> ProxyExtractor:
121
- if callable(self.proxy_extractor):
122
- return self.proxy_extractor
123
-
124
- if isinstance(self.proxy_extractor, str):
125
- keys = self.proxy_extractor.split(".")
126
-
127
- def extract(data: Dict[str, Any]) -> Union[None, str, Dict[str, str]]:
128
- for k in keys:
129
- if isinstance(data, dict):
130
- data = data.get(k)
131
- else:
132
- return None
133
- if data is None:
134
- break
135
- return data
136
-
137
- return extract
138
-
139
- raise ValueError(f"PROXY_EXTRACTOR 必须是 str 或 callable,当前类型: {type(self.proxy_extractor)}")
140
-
141
- async def _close_session(self):
142
- if self._session:
143
- try:
144
- await self._session.close()
145
- self.logger.debug("aiohttp session closed.")
146
- except Exception as e:
147
- self.logger.warning(f"Error closing aiohttp session: {e}")
148
- finally:
149
- self._session = None
150
-
151
- async def _get_session(self) -> Any: # returns aiohttp.ClientSession when aiohttp is available
152
- if aiohttp is None:
153
- raise RuntimeError("aiohttp not installed, cannot use ProxyMiddleware")
154
-
155
- if self._session is None or self._session.closed:
156
- if self._session and self._session.closed:
157
- self.logger.debug("Existing session closed, creating new session...")
158
- timeout = aiohttp.ClientTimeout(total=self.timeout)
159
- self._session = aiohttp.ClientSession(timeout=timeout)
160
- self.logger.debug("New aiohttp session created.")
161
- return self._session
162
-
163
- async def _fetch_raw_data(self) -> Optional[Dict[str, Any]]:
164
- max_retries = 2
165
- retry_count = 0
166
-
167
- while retry_count <= max_retries:
168
- session = await self._get_session()
169
- try:
170
- async with session.get(self.api_url) as resp:
171
- content_type = resp.content_type.lower()
172
- if 'application/json' not in content_type:
173
- self.logger.warning(
174
- f"Proxy API returned non-JSON content type: {content_type} (URL: {self.api_url})")
175
- try:
176
- text = await resp.text()
177
- return {"__raw_text__": text.strip(), "__content_type__": content_type}
178
- except Exception as e:
179
- self.logger.error(f"Failed to read non-JSON response body: {repr(e)}")
180
- return None
181
-
182
- if resp.status != 200:
183
- try:
184
- error_text = await resp.text()
185
- except:
186
- error_text = "<Unable to read response body>"
187
- self.logger.error(f"Proxy API status code error: {resp.status}, Response body: {error_text}")
188
- if 400 <= resp.status < 500:
189
- return None
190
- return None
191
-
192
- return await resp.json()
193
-
194
- except NETWORK_EXCEPTIONS as e:
195
- retry_count += 1
196
- self.logger.warning(f"Failed to request proxy API (attempt {retry_count}/{max_retries + 1}): {repr(e)}")
197
- if retry_count <= max_retries:
198
- self.logger.info("Closing and rebuilding session for retry...")
199
- await self._close_session()
200
- else:
201
- self.logger.error(
202
- f"Failed to request proxy API, maximum retry attempts reached ({max_retries + 1}): {repr(e)}")
203
- return None
204
-
205
- except aiohttp.ContentTypeError as e:
206
- self.logger.error(f"Proxy API response content type error: {repr(e)}")
207
- return None
208
-
209
- except Exception as e:
210
- self.logger.critical(f"Unexpected error occurred while requesting proxy API: {repr(e)}", exc_info=True)
211
- return None
212
-
213
- return None
214
-
215
- async def _extract_proxy(self, data: Dict[str, Any]) -> Optional[Union[str, Dict[str, str]]]:
216
- extractor = self._compile_extractor()
217
- try:
218
- result = extractor(data)
219
- if isinstance(result, str) and result.strip():
220
- return result.strip()
221
- elif isinstance(result, dict):
222
- cleaned = {k: v.strip() if isinstance(v, str) else v for k, v in result.items()}
223
- return cleaned if cleaned else None
224
- return None
225
- except Exception as e:
226
- self.logger.error(f"Error executing PROXY_EXTRACTOR: {repr(e)}")
227
- return None
228
-
229
- async def _get_proxy_from_api(self) -> Optional[Union[str, Dict[str, str]]]:
230
- raw_data = await self._fetch_raw_data()
231
- if not raw_data:
232
- return None
233
-
234
- if "__raw_text__" in raw_data:
235
- text = raw_data["__raw_text__"]
236
- if text.startswith("http://") or text.startswith("https://"):
237
- return text
238
-
239
- return await self._extract_proxy(raw_data)
240
-
241
- def _parse_proxy_data(self, proxy_data: Union[str, Dict[str, Any]]) -> List[str]:
242
- """解析代理数据,提取代理URL列表"""
243
- new_proxies = []
244
- if isinstance(proxy_data, str):
245
- # 单个代理
246
- if proxy_data.startswith("http://") or proxy_data.startswith("https://"):
247
- new_proxies = [proxy_data]
248
- elif isinstance(proxy_data, dict):
249
- # 如果是字典,尝试提取代理列表
250
- for key, value in proxy_data.items():
251
- if isinstance(value, str) and (value.startswith("http://") or value.startswith("https://")):
252
- new_proxies.append(value)
253
- elif isinstance(value, list):
254
- # 如果值是列表,添加所有有效的代理
255
- for item in value:
256
- if isinstance(item, str) and (item.startswith("http://") or item.startswith("https://")):
257
- new_proxies.append(item)
258
- return new_proxies
259
-
260
- def _get_healthy_proxies(self) -> List[Proxy]:
261
- """获取所有健康的代理"""
262
- return [p for p in self._proxy_pool if p.is_healthy and p.success_rate >= self.health_check_threshold]
263
-
264
- async def _update_proxy_pool(self):
265
- """更新代理池"""
266
- if not self.enabled:
267
- self.logger.debug("ProxyMiddleware disabled, skipping proxy fetch.")
268
- return
269
-
270
- now = asyncio.get_event_loop().time()
271
- if (now - self._last_fetch_time) < self.refresh_interval:
272
- return
273
-
274
- # 获取新的代理列表
275
- proxy_data = await self._get_proxy_from_api()
276
- if not proxy_data:
277
- self.logger.warning("Failed to get new proxies, proxy pool will remain unchanged.")
278
- return
279
-
280
- # 解析代理数据
281
- new_proxies = self._parse_proxy_data(proxy_data)
282
-
283
- # 创建新的代理池
284
- if new_proxies:
285
- self._proxy_pool = [Proxy(proxy_str) for proxy_str in new_proxies[:self.proxy_pool_size]]
286
- self._current_proxy_index = 0
287
- self._last_fetch_time = now
288
- self.logger.info(f"Updated proxy pool, added {len(self._proxy_pool)} proxies")
289
- else:
290
- self.logger.warning("No valid proxies parsed, proxy pool will remain unchanged.")
291
-
292
- async def _get_healthy_proxy(self) -> Optional[Proxy]:
293
- """从代理池中获取一个健康的代理"""
294
- if not self._proxy_pool:
295
- await self._update_proxy_pool()
296
-
297
- if not self._proxy_pool:
298
- return None
299
-
300
- # 查找健康的代理
301
- healthy_proxies = self._get_healthy_proxies()
302
-
303
- if not healthy_proxies:
304
- # 如果没有健康的代理,尝试更新代理池
305
- await self._update_proxy_pool()
306
- healthy_proxies = self._get_healthy_proxies()
307
-
308
- if not healthy_proxies:
309
- return None
310
-
311
- # 使用轮询方式选择代理
312
- self._current_proxy_index = (self._current_proxy_index + 1) % len(healthy_proxies)
313
- selected_proxy = healthy_proxies[self._current_proxy_index]
314
- return selected_proxy
315
-
316
- @staticmethod
317
- def _is_https(request: Request) -> bool:
318
- return urlparse(request.url).scheme == "https"
319
-
320
- async def process_request(self, request: Request, spider) -> Optional[Request]:
321
- if not self.enabled:
322
- self.logger.debug(f"ProxyMiddleware disabled, request will connect directly: {request.url}")
323
- return None
324
-
325
- if request.proxy:
326
- return None
327
-
328
- proxy_obj = await self._get_healthy_proxy()
329
- if proxy_obj:
330
- proxy = proxy_obj.proxy_str
331
- # 处理带认证的代理URL
332
- if isinstance(proxy, str) and "@" in proxy and "://" in proxy:
333
- # 解析带认证的代理URL
334
- parsed = urlparse(proxy)
335
- if parsed.username and parsed.password:
336
- # 对于AioHttp下载器,需要特殊处理认证信息
337
- downloader_type = spider.crawler.settings.get("DOWNLOADER_TYPE", "aiohttp")
338
- if downloader_type == "aiohttp":
339
- # 将认证信息存储在meta中,由下载器处理
340
- request.meta["proxy_auth"] = {
341
- "username": parsed.username,
342
- "password": parsed.password
343
- }
344
- # 清理URL中的认证信息
345
- clean_proxy = f"{parsed.scheme}://{parsed.hostname}"
346
- if parsed.port:
347
- clean_proxy += f":{parsed.port}"
348
- request.proxy = clean_proxy
349
- else:
350
- # 其他下载器可以直接使用带认证的URL
351
- request.proxy = proxy
352
- else:
353
- request.proxy = proxy
354
- else:
355
- request.proxy = proxy
356
-
357
- # 记录使用的代理
358
- request.meta["_used_proxy"] = proxy_obj
359
- self.logger.info(f"Assigned proxy → {proxy} | {request.url}")
360
- else:
361
- self.logger.warning(f"No proxy obtained, request connecting directly: {request.url}")
362
-
363
- return None
364
-
365
- def process_response(self, request: Request, response: Response, spider) -> Response:
366
- proxy_obj = request.meta.get("_used_proxy")
367
- if proxy_obj and isinstance(proxy_obj, Proxy):
368
- proxy_obj.mark_success()
369
- status_code = getattr(response, 'status_code', 'N/A')
370
- self.logger.debug(f"Proxy success: {proxy_obj.proxy_str} | {request.url} | Status: {status_code}")
371
- elif request.proxy:
372
- status_code = getattr(response, 'status_code', 'N/A')
373
- self.logger.debug(f"Proxy success: {request.proxy} | {request.url} | Status: {status_code}")
374
- return response
375
-
376
- def process_exception(self, request: Request, exception: Exception, spider) -> Optional[Request]:
377
- proxy_obj = request.meta.get("_used_proxy")
378
- if proxy_obj and isinstance(proxy_obj, Proxy):
379
- proxy_obj.mark_failure()
380
- self.logger.warning(f"Proxy request failed: {proxy_obj.proxy_str} | {request.url} | {repr(exception)}")
381
- elif request.proxy:
382
- self.logger.warning(f"Proxy request failed: {request.proxy} | {request.url} | {repr(exception)}")
383
- return None
384
-
385
- async def close(self):
386
- await self._close_session()
1
+ #!/usr/bin/python
2
+ # -*- coding: UTF-8 -*-
3
+ import time
4
+ import asyncio
5
+ import socket
6
+ from urllib.parse import urlparse
7
+ from typing import Optional, Dict, Any, Callable, Union, TYPE_CHECKING, List
8
+
9
+ from crawlo import Request, Response
10
+ from crawlo.exceptions import NotConfiguredError
11
+ from crawlo.utils.log import get_logger
12
+
13
+ if TYPE_CHECKING:
14
+ import aiohttp
15
+
16
+ try:
17
+ import httpx
18
+
19
+ HTTPX_EXCEPTIONS = (httpx.NetworkError, httpx.TimeoutException, httpx.ReadError, httpx.ConnectError)
20
+ except ImportError:
21
+ HTTPX_EXCEPTIONS = ()
22
+ httpx = None
23
+
24
+ try:
25
+ import aiohttp
26
+
27
+ AIOHTTP_EXCEPTIONS = (
28
+ aiohttp.ClientError, aiohttp.ClientConnectorError, aiohttp.ClientResponseError, aiohttp.ServerTimeoutError,
29
+ aiohttp.ServerDisconnectedError)
30
+ except ImportError:
31
+ AIOHTTP_EXCEPTIONS = ()
32
+ aiohttp = None
33
+
34
+ try:
35
+ from curl_cffi import requests as cffi_requests
36
+
37
+ CURL_CFFI_EXCEPTIONS = (cffi_requests.RequestsError,)
38
+ except (ImportError, AttributeError):
39
+ CURL_CFFI_EXCEPTIONS = ()
40
+ cffi_requests = None
41
+
42
+ NETWORK_EXCEPTIONS = (
43
+ asyncio.TimeoutError,
44
+ socket.gaierror,
45
+ ConnectionError,
46
+ TimeoutError,
47
+ ) + HTTPX_EXCEPTIONS + AIOHTTP_EXCEPTIONS + CURL_CFFI_EXCEPTIONS
48
+
49
+ ProxyExtractor = Callable[[Dict[str, Any]], Union[None, str, Dict[str, str]]]
50
+
51
+
52
+ class Proxy:
53
+ """代理对象,包含代理信息和统计数据"""
54
+
55
+ def __init__(self, proxy_str: str):
56
+ self.proxy_str = proxy_str
57
+ self.success_count = 0
58
+ self.failure_count = 0
59
+ self.last_used_time = 0.0
60
+ self.is_healthy = True
61
+
62
+ @property
63
+ def success_rate(self) -> float:
64
+ """计算代理成功率"""
65
+ total = self.success_count + self.failure_count
66
+ if total == 0:
67
+ return 1.0
68
+ return self.success_count / total
69
+
70
+ def mark_success(self):
71
+ """标记代理使用成功"""
72
+ self.success_count += 1
73
+ self.last_used_time = time.time()
74
+ self.is_healthy = True
75
+
76
+ def mark_failure(self):
77
+ """标记代理使用失败"""
78
+ self.failure_count += 1
79
+ self.last_used_time = time.time()
80
+ # 如果失败率过高,标记为不健康
81
+ if self.failure_count > 3 and self.success_rate < 0.5:
82
+ self.is_healthy = False
83
+
84
+
85
+ class ProxyMiddleware:
86
+ def __init__(self, settings, log_level):
87
+ self.logger = get_logger(self.__class__.__name__, log_level)
88
+
89
+ self._session: Optional[Any] = None # aiohttp.ClientSession when aiohttp is available
90
+ # 将单个代理改为代理池
91
+ self._proxy_pool: List[Proxy] = []
92
+ self._current_proxy_index: int = 0
93
+ self._last_fetch_time: float = 0
94
+
95
+ self.proxy_extractor = settings.get("PROXY_EXTRACTOR", "proxy")
96
+ self.refresh_interval = settings.get_float("PROXY_REFRESH_INTERVAL", 60)
97
+ self.timeout = settings.get_float("PROXY_API_TIMEOUT", 10)
98
+ # 新增配置:代理池大小
99
+ self.proxy_pool_size = settings.get_int("PROXY_POOL_SIZE", 5)
100
+ # 新增配置:健康检查阈值
101
+ self.health_check_threshold = settings.get_float("PROXY_HEALTH_CHECK_THRESHOLD", 0.5)
102
+
103
+ self.enabled = settings.get_bool("PROXY_ENABLED", True)
104
+
105
+ if not self.enabled:
106
+ self.logger.info("ProxyMiddleware disabled")
107
+ return
108
+
109
+ self.api_url = settings.get("PROXY_API_URL")
110
+ if not self.api_url:
111
+ raise NotConfiguredError("PROXY_API_URL not configured, ProxyMiddleware disabled")
112
+
113
+ self.logger.info(
114
+ f"Proxy middleware enabled | API: {self.api_url} | Refresh interval: {self.refresh_interval}s | Proxy pool size: {self.proxy_pool_size}")
115
+
116
+ @classmethod
117
+ def create_instance(cls, crawler):
118
+ return cls(settings=crawler.settings, log_level=crawler.settings.get("LOG_LEVEL"))
119
+
120
+ def _compile_extractor(self) -> ProxyExtractor:
121
+ if callable(self.proxy_extractor):
122
+ return self.proxy_extractor
123
+
124
+ if isinstance(self.proxy_extractor, str):
125
+ keys = self.proxy_extractor.split(".")
126
+
127
+ def extract(data: Dict[str, Any]) -> Union[None, str, Dict[str, str]]:
128
+ for k in keys:
129
+ if isinstance(data, dict):
130
+ data = data.get(k)
131
+ else:
132
+ return None
133
+ if data is None:
134
+ break
135
+ return data
136
+
137
+ return extract
138
+
139
+ raise ValueError(f"PROXY_EXTRACTOR 必须是 str 或 callable,当前类型: {type(self.proxy_extractor)}")
140
+
141
+ async def _close_session(self):
142
+ if self._session:
143
+ try:
144
+ await self._session.close()
145
+ self.logger.debug("aiohttp session closed.")
146
+ except Exception as e:
147
+ self.logger.warning(f"Error closing aiohttp session: {e}")
148
+ finally:
149
+ self._session = None
150
+
151
+ async def _get_session(self) -> Any: # returns aiohttp.ClientSession when aiohttp is available
152
+ if aiohttp is None:
153
+ raise RuntimeError("aiohttp not installed, cannot use ProxyMiddleware")
154
+
155
+ if self._session is None or self._session.closed:
156
+ if self._session and self._session.closed:
157
+ self.logger.debug("Existing session closed, creating new session...")
158
+ timeout = aiohttp.ClientTimeout(total=self.timeout)
159
+ self._session = aiohttp.ClientSession(timeout=timeout)
160
+ self.logger.debug("New aiohttp session created.")
161
+ return self._session
162
+
163
+ async def _fetch_raw_data(self) -> Optional[Dict[str, Any]]:
164
+ max_retries = 2
165
+ retry_count = 0
166
+
167
+ while retry_count <= max_retries:
168
+ session = await self._get_session()
169
+ try:
170
+ async with session.get(self.api_url) as resp:
171
+ content_type = resp.content_type.lower()
172
+ if 'application/json' not in content_type:
173
+ self.logger.warning(
174
+ f"Proxy API returned non-JSON content type: {content_type} (URL: {self.api_url})")
175
+ try:
176
+ text = await resp.text()
177
+ return {"__raw_text__": text.strip(), "__content_type__": content_type}
178
+ except Exception as e:
179
+ self.logger.error(f"Failed to read non-JSON response body: {repr(e)}")
180
+ return None
181
+
182
+ if resp.status != 200:
183
+ try:
184
+ error_text = await resp.text()
185
+ except:
186
+ error_text = "<Unable to read response body>"
187
+ self.logger.error(f"Proxy API status code error: {resp.status}, Response body: {error_text}")
188
+ if 400 <= resp.status < 500:
189
+ return None
190
+ return None
191
+
192
+ return await resp.json()
193
+
194
+ except NETWORK_EXCEPTIONS as e:
195
+ retry_count += 1
196
+ self.logger.warning(f"Failed to request proxy API (attempt {retry_count}/{max_retries + 1}): {repr(e)}")
197
+ if retry_count <= max_retries:
198
+ self.logger.info("Closing and rebuilding session for retry...")
199
+ await self._close_session()
200
+ else:
201
+ self.logger.error(
202
+ f"Failed to request proxy API, maximum retry attempts reached ({max_retries + 1}): {repr(e)}")
203
+ return None
204
+
205
+ except aiohttp.ContentTypeError as e:
206
+ self.logger.error(f"Proxy API response content type error: {repr(e)}")
207
+ return None
208
+
209
+ except Exception as e:
210
+ self.logger.critical(f"Unexpected error occurred while requesting proxy API: {repr(e)}", exc_info=True)
211
+ return None
212
+
213
+ return None
214
+
215
+ async def _extract_proxy(self, data: Dict[str, Any]) -> Optional[Union[str, Dict[str, str]]]:
216
+ extractor = self._compile_extractor()
217
+ try:
218
+ result = extractor(data)
219
+ if isinstance(result, str) and result.strip():
220
+ return result.strip()
221
+ elif isinstance(result, dict):
222
+ cleaned = {k: v.strip() if isinstance(v, str) else v for k, v in result.items()}
223
+ return cleaned if cleaned else None
224
+ return None
225
+ except Exception as e:
226
+ self.logger.error(f"Error executing PROXY_EXTRACTOR: {repr(e)}")
227
+ return None
228
+
229
+ async def _get_proxy_from_api(self) -> Optional[Union[str, Dict[str, str]]]:
230
+ raw_data = await self._fetch_raw_data()
231
+ if not raw_data:
232
+ return None
233
+
234
+ if "__raw_text__" in raw_data:
235
+ text = raw_data["__raw_text__"]
236
+ if text.startswith("http://") or text.startswith("https://"):
237
+ return text
238
+
239
+ return await self._extract_proxy(raw_data)
240
+
241
+ def _parse_proxy_data(self, proxy_data: Union[str, Dict[str, Any]]) -> List[str]:
242
+ """解析代理数据,提取代理URL列表"""
243
+ new_proxies = []
244
+ if isinstance(proxy_data, str):
245
+ # 单个代理
246
+ if proxy_data.startswith("http://") or proxy_data.startswith("https://"):
247
+ new_proxies = [proxy_data]
248
+ elif isinstance(proxy_data, dict):
249
+ # 如果是字典,尝试提取代理列表
250
+ for key, value in proxy_data.items():
251
+ if isinstance(value, str) and (value.startswith("http://") or value.startswith("https://")):
252
+ new_proxies.append(value)
253
+ elif isinstance(value, list):
254
+ # 如果值是列表,添加所有有效的代理
255
+ for item in value:
256
+ if isinstance(item, str) and (item.startswith("http://") or item.startswith("https://")):
257
+ new_proxies.append(item)
258
+ return new_proxies
259
+
260
+ def _get_healthy_proxies(self) -> List[Proxy]:
261
+ """获取所有健康的代理"""
262
+ return [p for p in self._proxy_pool if p.is_healthy and p.success_rate >= self.health_check_threshold]
263
+
264
+ async def _update_proxy_pool(self):
265
+ """更新代理池"""
266
+ if not self.enabled:
267
+ self.logger.debug("ProxyMiddleware disabled, skipping proxy fetch.")
268
+ return
269
+
270
+ now = asyncio.get_event_loop().time()
271
+ if (now - self._last_fetch_time) < self.refresh_interval:
272
+ return
273
+
274
+ # 获取新的代理列表
275
+ proxy_data = await self._get_proxy_from_api()
276
+ if not proxy_data:
277
+ self.logger.warning("Failed to get new proxies, proxy pool will remain unchanged.")
278
+ return
279
+
280
+ # 解析代理数据
281
+ new_proxies = self._parse_proxy_data(proxy_data)
282
+
283
+ # 创建新的代理池
284
+ if new_proxies:
285
+ self._proxy_pool = [Proxy(proxy_str) for proxy_str in new_proxies[:self.proxy_pool_size]]
286
+ self._current_proxy_index = 0
287
+ self._last_fetch_time = now
288
+ self.logger.info(f"Updated proxy pool, added {len(self._proxy_pool)} proxies")
289
+ else:
290
+ self.logger.warning("No valid proxies parsed, proxy pool will remain unchanged.")
291
+
292
+ async def _get_healthy_proxy(self) -> Optional[Proxy]:
293
+ """从代理池中获取一个健康的代理"""
294
+ if not self._proxy_pool:
295
+ await self._update_proxy_pool()
296
+
297
+ if not self._proxy_pool:
298
+ return None
299
+
300
+ # 查找健康的代理
301
+ healthy_proxies = self._get_healthy_proxies()
302
+
303
+ if not healthy_proxies:
304
+ # 如果没有健康的代理,尝试更新代理池
305
+ await self._update_proxy_pool()
306
+ healthy_proxies = self._get_healthy_proxies()
307
+
308
+ if not healthy_proxies:
309
+ return None
310
+
311
+ # 使用轮询方式选择代理
312
+ self._current_proxy_index = (self._current_proxy_index + 1) % len(healthy_proxies)
313
+ selected_proxy = healthy_proxies[self._current_proxy_index]
314
+ return selected_proxy
315
+
316
+ @staticmethod
317
+ def _is_https(request: Request) -> bool:
318
+ return urlparse(request.url).scheme == "https"
319
+
320
+ async def process_request(self, request: Request, spider) -> Optional[Request]:
321
+ if not self.enabled:
322
+ self.logger.debug(f"ProxyMiddleware disabled, request will connect directly: {request.url}")
323
+ return None
324
+
325
+ if request.proxy:
326
+ return None
327
+
328
+ proxy_obj = await self._get_healthy_proxy()
329
+ if proxy_obj:
330
+ proxy = proxy_obj.proxy_str
331
+ # 处理带认证的代理URL
332
+ if isinstance(proxy, str) and "@" in proxy and "://" in proxy:
333
+ # 解析带认证的代理URL
334
+ parsed = urlparse(proxy)
335
+ if parsed.username and parsed.password:
336
+ # 对于AioHttp下载器,需要特殊处理认证信息
337
+ downloader_type = spider.crawler.settings.get("DOWNLOADER_TYPE", "aiohttp")
338
+ if downloader_type == "aiohttp":
339
+ # 将认证信息存储在meta中,由下载器处理
340
+ request.meta["proxy_auth"] = {
341
+ "username": parsed.username,
342
+ "password": parsed.password
343
+ }
344
+ # 清理URL中的认证信息
345
+ clean_proxy = f"{parsed.scheme}://{parsed.hostname}"
346
+ if parsed.port:
347
+ clean_proxy += f":{parsed.port}"
348
+ request.proxy = clean_proxy
349
+ else:
350
+ # 其他下载器可以直接使用带认证的URL
351
+ request.proxy = proxy
352
+ else:
353
+ request.proxy = proxy
354
+ else:
355
+ request.proxy = proxy
356
+
357
+ # 记录使用的代理
358
+ request.meta["_used_proxy"] = proxy_obj
359
+ self.logger.info(f"Assigned proxy → {proxy} | {request.url}")
360
+ else:
361
+ self.logger.warning(f"No proxy obtained, request connecting directly: {request.url}")
362
+
363
+ return None
364
+
365
+ def process_response(self, request: Request, response: Response, spider) -> Response:
366
+ proxy_obj = request.meta.get("_used_proxy")
367
+ if proxy_obj and isinstance(proxy_obj, Proxy):
368
+ proxy_obj.mark_success()
369
+ status_code = getattr(response, 'status_code', 'N/A')
370
+ self.logger.debug(f"Proxy success: {proxy_obj.proxy_str} | {request.url} | Status: {status_code}")
371
+ elif request.proxy:
372
+ status_code = getattr(response, 'status_code', 'N/A')
373
+ self.logger.debug(f"Proxy success: {request.proxy} | {request.url} | Status: {status_code}")
374
+ return response
375
+
376
+ def process_exception(self, request: Request, exception: Exception, spider) -> Optional[Request]:
377
+ proxy_obj = request.meta.get("_used_proxy")
378
+ if proxy_obj and isinstance(proxy_obj, Proxy):
379
+ proxy_obj.mark_failure()
380
+ self.logger.warning(f"Proxy request failed: {proxy_obj.proxy_str} | {request.url} | {repr(exception)}")
381
+ elif request.proxy:
382
+ self.logger.warning(f"Proxy request failed: {request.proxy} | {request.url} | {repr(exception)}")
383
+ return None
384
+
385
+ async def close(self):
386
+ await self._close_session()