crawlo 1.2.8__py3-none-any.whl → 1.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (221) hide show
  1. crawlo/__init__.py +63 -61
  2. crawlo/__version__.py +1 -1
  3. crawlo/cli.py +75 -75
  4. crawlo/commands/__init__.py +14 -14
  5. crawlo/commands/check.py +594 -594
  6. crawlo/commands/genspider.py +151 -151
  7. crawlo/commands/help.py +138 -138
  8. crawlo/commands/list.py +155 -155
  9. crawlo/commands/run.py +314 -323
  10. crawlo/commands/startproject.py +436 -436
  11. crawlo/commands/stats.py +187 -187
  12. crawlo/commands/utils.py +186 -186
  13. crawlo/config.py +312 -312
  14. crawlo/config_validator.py +277 -251
  15. crawlo/core/__init__.py +2 -2
  16. crawlo/core/engine.py +365 -365
  17. crawlo/core/processor.py +40 -40
  18. crawlo/core/scheduler.py +256 -251
  19. crawlo/crawler.py +1097 -1099
  20. crawlo/data/__init__.py +5 -5
  21. crawlo/data/user_agents.py +194 -107
  22. crawlo/downloader/__init__.py +273 -266
  23. crawlo/downloader/aiohttp_downloader.py +226 -228
  24. crawlo/downloader/cffi_downloader.py +245 -256
  25. crawlo/downloader/httpx_downloader.py +259 -259
  26. crawlo/downloader/hybrid_downloader.py +212 -212
  27. crawlo/downloader/playwright_downloader.py +402 -402
  28. crawlo/downloader/selenium_downloader.py +472 -472
  29. crawlo/event.py +11 -11
  30. crawlo/exceptions.py +81 -81
  31. crawlo/extension/__init__.py +39 -39
  32. crawlo/extension/health_check.py +141 -141
  33. crawlo/extension/log_interval.py +57 -57
  34. crawlo/extension/log_stats.py +81 -81
  35. crawlo/extension/logging_extension.py +45 -43
  36. crawlo/extension/memory_monitor.py +104 -104
  37. crawlo/extension/performance_profiler.py +133 -133
  38. crawlo/extension/request_recorder.py +107 -107
  39. crawlo/filters/__init__.py +154 -154
  40. crawlo/filters/aioredis_filter.py +234 -234
  41. crawlo/filters/memory_filter.py +269 -269
  42. crawlo/items/__init__.py +23 -23
  43. crawlo/items/base.py +21 -21
  44. crawlo/items/fields.py +52 -52
  45. crawlo/items/items.py +104 -104
  46. crawlo/middleware/__init__.py +21 -21
  47. crawlo/middleware/default_header.py +132 -132
  48. crawlo/middleware/download_delay.py +104 -104
  49. crawlo/middleware/middleware_manager.py +136 -136
  50. crawlo/middleware/offsite.py +114 -114
  51. crawlo/middleware/proxy.py +386 -368
  52. crawlo/middleware/request_ignore.py +86 -86
  53. crawlo/middleware/response_code.py +163 -163
  54. crawlo/middleware/response_filter.py +136 -136
  55. crawlo/middleware/retry.py +124 -124
  56. crawlo/middleware/simple_proxy.py +65 -0
  57. crawlo/mode_manager.py +212 -211
  58. crawlo/network/__init__.py +21 -21
  59. crawlo/network/request.py +379 -338
  60. crawlo/network/response.py +359 -359
  61. crawlo/pipelines/__init__.py +21 -21
  62. crawlo/pipelines/bloom_dedup_pipeline.py +157 -157
  63. crawlo/pipelines/console_pipeline.py +39 -39
  64. crawlo/pipelines/csv_pipeline.py +316 -316
  65. crawlo/pipelines/database_dedup_pipeline.py +223 -223
  66. crawlo/pipelines/json_pipeline.py +218 -218
  67. crawlo/pipelines/memory_dedup_pipeline.py +115 -115
  68. crawlo/pipelines/mongo_pipeline.py +131 -131
  69. crawlo/pipelines/mysql_pipeline.py +317 -317
  70. crawlo/pipelines/pipeline_manager.py +74 -62
  71. crawlo/pipelines/redis_dedup_pipeline.py +167 -167
  72. crawlo/project.py +284 -315
  73. crawlo/queue/pqueue.py +37 -37
  74. crawlo/queue/queue_manager.py +379 -378
  75. crawlo/queue/redis_priority_queue.py +306 -306
  76. crawlo/settings/__init__.py +7 -7
  77. crawlo/settings/default_settings.py +216 -220
  78. crawlo/settings/setting_manager.py +175 -122
  79. crawlo/spider/__init__.py +639 -639
  80. crawlo/stats_collector.py +59 -59
  81. crawlo/subscriber.py +129 -129
  82. crawlo/task_manager.py +30 -30
  83. crawlo/templates/crawlo.cfg.tmpl +10 -10
  84. crawlo/templates/project/__init__.py.tmpl +3 -3
  85. crawlo/templates/project/items.py.tmpl +17 -17
  86. crawlo/templates/project/middlewares.py.tmpl +118 -118
  87. crawlo/templates/project/pipelines.py.tmpl +96 -96
  88. crawlo/templates/project/settings.py.tmpl +261 -288
  89. crawlo/templates/project/settings_distributed.py.tmpl +174 -157
  90. crawlo/templates/project/settings_gentle.py.tmpl +95 -100
  91. crawlo/templates/project/settings_high_performance.py.tmpl +125 -134
  92. crawlo/templates/project/settings_minimal.py.tmpl +30 -0
  93. crawlo/templates/project/settings_simple.py.tmpl +96 -98
  94. crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
  95. crawlo/templates/run.py.tmpl +47 -47
  96. crawlo/templates/spider/spider.py.tmpl +143 -143
  97. crawlo/tools/__init__.py +200 -182
  98. crawlo/tools/anti_crawler.py +268 -268
  99. crawlo/tools/authenticated_proxy.py +240 -240
  100. crawlo/{cleaners → tools}/data_formatter.py +225 -225
  101. crawlo/tools/data_validator.py +180 -180
  102. crawlo/tools/date_tools.py +290 -36
  103. crawlo/tools/distributed_coordinator.py +388 -387
  104. crawlo/{cleaners → tools}/encoding_converter.py +127 -126
  105. crawlo/tools/request_tools.py +83 -0
  106. crawlo/tools/retry_mechanism.py +224 -221
  107. crawlo/tools/scenario_adapter.py +262 -262
  108. crawlo/{cleaners → tools}/text_cleaner.py +232 -232
  109. crawlo/utils/__init__.py +35 -35
  110. crawlo/utils/batch_processor.py +259 -259
  111. crawlo/utils/controlled_spider_mixin.py +439 -439
  112. crawlo/utils/db_helper.py +343 -343
  113. crawlo/utils/enhanced_error_handler.py +356 -356
  114. crawlo/utils/env_config.py +142 -142
  115. crawlo/utils/error_handler.py +123 -123
  116. crawlo/utils/func_tools.py +82 -82
  117. crawlo/utils/large_scale_config.py +286 -286
  118. crawlo/utils/large_scale_helper.py +344 -344
  119. crawlo/utils/log.py +146 -128
  120. crawlo/utils/performance_monitor.py +285 -285
  121. crawlo/utils/queue_helper.py +175 -175
  122. crawlo/utils/redis_connection_pool.py +351 -351
  123. crawlo/utils/redis_key_validator.py +198 -198
  124. crawlo/utils/request.py +267 -267
  125. crawlo/utils/request_serializer.py +218 -218
  126. crawlo/utils/spider_loader.py +61 -61
  127. crawlo/utils/system.py +11 -11
  128. crawlo/utils/tools.py +4 -4
  129. crawlo/utils/url.py +39 -39
  130. {crawlo-1.2.8.dist-info → crawlo-1.3.0.dist-info}/METADATA +1011 -764
  131. crawlo-1.3.0.dist-info/RECORD +219 -0
  132. examples/__init__.py +7 -7
  133. tests/DOUBLE_CRAWLO_PREFIX_FIX_REPORT.md +81 -81
  134. tests/__init__.py +7 -7
  135. tests/advanced_tools_example.py +275 -275
  136. tests/authenticated_proxy_example.py +107 -237
  137. tests/cleaners_example.py +160 -160
  138. tests/config_validation_demo.py +143 -103
  139. tests/controlled_spider_example.py +205 -205
  140. tests/date_tools_example.py +180 -180
  141. tests/debug_pipelines.py +67 -0
  142. tests/dynamic_loading_example.py +523 -523
  143. tests/dynamic_loading_test.py +104 -104
  144. tests/env_config_example.py +133 -133
  145. tests/error_handling_example.py +171 -171
  146. tests/redis_key_validation_demo.py +130 -130
  147. tests/request_params_example.py +151 -0
  148. tests/response_improvements_example.py +144 -144
  149. tests/test_advanced_tools.py +148 -148
  150. tests/test_all_redis_key_configs.py +145 -145
  151. tests/test_authenticated_proxy.py +141 -141
  152. tests/test_cleaners.py +54 -54
  153. tests/test_comprehensive.py +146 -146
  154. tests/test_config_consistency.py +80 -80
  155. tests/test_config_merge.py +153 -0
  156. tests/test_config_validator.py +182 -193
  157. tests/test_crawlo_proxy_integration.py +109 -173
  158. tests/test_date_tools.py +123 -123
  159. tests/test_default_header_middleware.py +158 -158
  160. tests/test_distributed.py +65 -0
  161. tests/test_double_crawlo_fix.py +207 -207
  162. tests/test_double_crawlo_fix_simple.py +124 -124
  163. tests/test_download_delay_middleware.py +221 -221
  164. tests/test_downloader_proxy_compatibility.py +268 -268
  165. tests/test_dynamic_downloaders_proxy.py +124 -124
  166. tests/test_dynamic_proxy.py +92 -92
  167. tests/test_dynamic_proxy_config.py +146 -146
  168. tests/test_dynamic_proxy_real.py +109 -109
  169. tests/test_edge_cases.py +303 -303
  170. tests/test_enhanced_error_handler.py +270 -270
  171. tests/test_env_config.py +121 -121
  172. tests/test_error_handler_compatibility.py +112 -112
  173. tests/test_final_validation.py +153 -153
  174. tests/test_framework_env_usage.py +103 -103
  175. tests/test_integration.py +169 -357
  176. tests/test_item_dedup_redis_key.py +122 -122
  177. tests/test_mode_consistency.py +51 -51
  178. tests/test_offsite_middleware.py +221 -221
  179. tests/test_parsel.py +29 -29
  180. tests/test_performance.py +327 -327
  181. tests/test_proxy_api.py +264 -264
  182. tests/test_proxy_health_check.py +32 -32
  183. tests/test_proxy_middleware.py +121 -121
  184. tests/test_proxy_middleware_enhanced.py +216 -216
  185. tests/test_proxy_middleware_integration.py +136 -136
  186. tests/test_proxy_middleware_refactored.py +185 -0
  187. tests/test_proxy_providers.py +56 -56
  188. tests/test_proxy_stats.py +19 -19
  189. tests/test_proxy_strategies.py +59 -59
  190. tests/test_queue_manager_double_crawlo.py +173 -173
  191. tests/test_queue_manager_redis_key.py +176 -176
  192. tests/test_random_user_agent.py +73 -0
  193. tests/test_real_scenario_proxy.py +195 -195
  194. tests/test_redis_config.py +28 -28
  195. tests/test_redis_connection_pool.py +294 -294
  196. tests/test_redis_key_naming.py +181 -181
  197. tests/test_redis_key_validator.py +123 -123
  198. tests/test_redis_queue.py +224 -224
  199. tests/test_request_ignore_middleware.py +182 -182
  200. tests/test_request_params.py +112 -0
  201. tests/test_request_serialization.py +70 -70
  202. tests/test_response_code_middleware.py +349 -349
  203. tests/test_response_filter_middleware.py +427 -427
  204. tests/test_response_improvements.py +152 -152
  205. tests/test_retry_middleware.py +241 -241
  206. tests/test_scheduler.py +252 -252
  207. tests/test_scheduler_config_update.py +133 -133
  208. tests/test_simple_response.py +61 -61
  209. tests/test_telecom_spider_redis_key.py +205 -205
  210. tests/test_template_content.py +87 -87
  211. tests/test_template_redis_key.py +134 -134
  212. tests/test_tools.py +159 -153
  213. tests/test_user_agents.py +97 -0
  214. tests/tools_example.py +260 -257
  215. tests/verify_distributed.py +117 -0
  216. crawlo/cleaners/__init__.py +0 -61
  217. crawlo/utils/date_tools.py +0 -290
  218. crawlo-1.2.8.dist-info/RECORD +0 -209
  219. {crawlo-1.2.8.dist-info → crawlo-1.3.0.dist-info}/WHEEL +0 -0
  220. {crawlo-1.2.8.dist-info → crawlo-1.3.0.dist-info}/entry_points.txt +0 -0
  221. {crawlo-1.2.8.dist-info → crawlo-1.3.0.dist-info}/top_level.txt +0 -0
@@ -1,368 +1,386 @@
1
- #!/usr/bin/python
2
- # -*- coding: UTF-8 -*-
3
- import time
4
- import asyncio
5
- import socket
6
- from urllib.parse import urlparse
7
- from typing import Optional, Dict, Any, Callable, Union, TYPE_CHECKING, List
8
-
9
-
10
- from crawlo import Request, Response
11
- from crawlo.exceptions import NotConfiguredError
12
- from crawlo.utils.log import get_logger
13
-
14
- if TYPE_CHECKING:
15
- import aiohttp
16
-
17
- try:
18
- import httpx
19
- HTTPX_EXCEPTIONS = (httpx.NetworkError, httpx.TimeoutException, httpx.ReadError, httpx.ConnectError)
20
- except ImportError:
21
- HTTPX_EXCEPTIONS = ()
22
- httpx = None
23
-
24
- try:
25
- import aiohttp
26
- AIOHTTP_EXCEPTIONS = (
27
- aiohttp.ClientError, aiohttp.ClientConnectorError, aiohttp.ClientResponseError, aiohttp.ServerTimeoutError,
28
- aiohttp.ServerDisconnectedError)
29
- except ImportError:
30
- AIOHTTP_EXCEPTIONS = ()
31
- aiohttp = None
32
-
33
- try:
34
- from curl_cffi import requests as cffi_requests
35
- CURL_CFFI_EXCEPTIONS = (cffi_requests.RequestsError,)
36
- except (ImportError, AttributeError):
37
- CURL_CFFI_EXCEPTIONS = ()
38
- cffi_requests = None
39
-
40
- NETWORK_EXCEPTIONS = (
41
- asyncio.TimeoutError,
42
- socket.gaierror,
43
- ConnectionError,
44
- TimeoutError,
45
- ) + HTTPX_EXCEPTIONS + AIOHTTP_EXCEPTIONS + CURL_CFFI_EXCEPTIONS
46
-
47
- ProxyExtractor = Callable[[Dict[str, Any]], Union[None, str, Dict[str, str]]]
48
-
49
- class Proxy:
50
- """代理对象,包含代理信息和统计数据"""
51
- def __init__(self, proxy_str: str):
52
- self.proxy_str = proxy_str
53
- self.success_count = 0
54
- self.failure_count = 0
55
- self.last_used_time = 0.0
56
- self.is_healthy = True
57
-
58
- @property
59
- def success_rate(self) -> float:
60
- """计算代理成功率"""
61
- total = self.success_count + self.failure_count
62
- if total == 0:
63
- return 1.0
64
- return self.success_count / total
65
-
66
- def mark_success(self):
67
- """标记代理使用成功"""
68
- self.success_count += 1
69
- self.last_used_time = time.time()
70
- self.is_healthy = True
71
-
72
- def mark_failure(self):
73
- """标记代理使用失败"""
74
- self.failure_count += 1
75
- self.last_used_time = time.time()
76
- # 如果失败率过高,标记为不健康
77
- if self.failure_count > 3 and self.success_rate < 0.5:
78
- self.is_healthy = False
79
-
80
- class ProxyMiddleware:
81
- def __init__(self, settings, log_level):
82
- self.logger = get_logger(self.__class__.__name__, log_level)
83
-
84
- self._session: Optional[Any] = None # aiohttp.ClientSession when aiohttp is available
85
- # 将单个代理改为代理池
86
- self._proxy_pool: List[Proxy] = []
87
- self._current_proxy_index: int = 0
88
- self._last_fetch_time: float = 0
89
-
90
- self.proxy_extractor = settings.get("PROXY_EXTRACTOR", "proxy")
91
- self.refresh_interval = settings.get_float("PROXY_REFRESH_INTERVAL", 60)
92
- self.timeout = settings.get_float("PROXY_API_TIMEOUT", 10)
93
- # 新增配置:代理池大小
94
- self.proxy_pool_size = settings.get_int("PROXY_POOL_SIZE", 5)
95
- # 新增配置:健康检查阈值
96
- self.health_check_threshold = settings.get_float("PROXY_HEALTH_CHECK_THRESHOLD", 0.5)
97
-
98
- self.enabled = settings.get_bool("PROXY_ENABLED", True)
99
-
100
- if not self.enabled:
101
- self.logger.info("ProxyMiddleware 已被禁用 (PROXY_ENABLED=False)")
102
- return
103
-
104
- self.api_url = settings.get("PROXY_API_URL")
105
- if not self.api_url:
106
- raise NotConfiguredError("PROXY_API_URL 未配置,ProxyMiddleware 已禁用")
107
-
108
- self.logger.info(f"代理中间件已启用 | API: {self.api_url} | 刷新间隔: {self.refresh_interval}s | 代理池大小: {self.proxy_pool_size}")
109
-
110
- @classmethod
111
- def create_instance(cls, crawler):
112
- return cls(settings=crawler.settings, log_level=crawler.settings.get("LOG_LEVEL"))
113
-
114
- def _compile_extractor(self) -> ProxyExtractor:
115
- if callable(self.proxy_extractor):
116
- return self.proxy_extractor
117
-
118
- if isinstance(self.proxy_extractor, str):
119
- keys = self.proxy_extractor.split(".")
120
-
121
- def extract(data: Dict[str, Any]) -> Union[None, str, Dict[str, str]]:
122
- for k in keys:
123
- if isinstance(data, dict):
124
- data = data.get(k)
125
- else:
126
- return None
127
- if data is None:
128
- break
129
- return data
130
-
131
- return extract
132
-
133
- raise ValueError(f"PROXY_EXTRACTOR 必须是 str 或 callable,当前类型: {type(self.proxy_extractor)}")
134
-
135
- async def _close_session(self):
136
- if self._session:
137
- try:
138
- await self._session.close()
139
- self.logger.debug("已关闭 aiohttp session.")
140
- except Exception as e:
141
- self.logger.warning(f"关闭 aiohttp session 时出错: {e}")
142
- finally:
143
- self._session = None
144
-
145
- async def _get_session(self) -> Any: # returns aiohttp.ClientSession when aiohttp is available
146
- if aiohttp is None:
147
- raise RuntimeError("aiohttp 未安装,无法使用 ProxyMiddleware")
148
-
149
- if self._session is None or self._session.closed:
150
- if self._session and self._session.closed:
151
- self.logger.debug("现有 session 已关闭,正在创建新 session...")
152
- timeout = aiohttp.ClientTimeout(total=self.timeout)
153
- self._session = aiohttp.ClientSession(timeout=timeout)
154
- self.logger.debug("已创建新的 aiohttp session.")
155
- return self._session
156
-
157
- async def _fetch_raw_data(self) -> Optional[Dict[str, Any]]:
158
- max_retries = 2
159
- retry_count = 0
160
-
161
- while retry_count <= max_retries:
162
- session = await self._get_session()
163
- try:
164
- async with session.get(self.api_url) as resp:
165
- content_type = resp.content_type.lower()
166
- if 'application/json' not in content_type:
167
- self.logger.warning(f"代理 API 返回非 JSON 内容类型: {content_type} (URL: {self.api_url})")
168
- try:
169
- text = await resp.text()
170
- return {"__raw_text__": text.strip(), "__content_type__": content_type}
171
- except Exception as e:
172
- self.logger.error(f"读取非 JSON 响应体失败: {repr(e)}")
173
- return None
174
-
175
- if resp.status != 200:
176
- try:
177
- error_text = await resp.text()
178
- except:
179
- error_text = "<无法读取响应体>"
180
- self.logger.error(f"代理 API 状态码异常: {resp.status}, 响应体: {error_text}")
181
- if 400 <= resp.status < 500:
182
- return None
183
- return None
184
-
185
- return await resp.json()
186
-
187
- except NETWORK_EXCEPTIONS as e:
188
- retry_count += 1
189
- self.logger.warning(f"请求代理 API 失败 (尝试 {retry_count}/{max_retries + 1}): {repr(e)}")
190
- if retry_count <= max_retries:
191
- self.logger.info("正在关闭并重建 session 以进行重试...")
192
- await self._close_session()
193
- else:
194
- self.logger.error(f"请求代理 API 失败,已达到最大重试次数 ({max_retries + 1}): {repr(e)}")
195
- return None
196
-
197
- except aiohttp.ContentTypeError as e:
198
- self.logger.error(f"代理 API 响应内容类型错误: {repr(e)}")
199
- return None
200
-
201
- except Exception as e:
202
- self.logger.critical(f"请求代理 API 时发生未预期错误: {repr(e)}", exc_info=True)
203
- return None
204
-
205
- return None
206
-
207
- async def _extract_proxy(self, data: Dict[str, Any]) -> Optional[Union[str, Dict[str, str]]]:
208
- extractor = self._compile_extractor()
209
- try:
210
- result = extractor(data)
211
- if isinstance(result, str) and result.strip():
212
- return result.strip()
213
- elif isinstance(result, dict):
214
- cleaned = {k: v.strip() if isinstance(v, str) else v for k, v in result.items()}
215
- return cleaned if cleaned else None
216
- return None
217
- except Exception as e:
218
- self.logger.error(f"执行 PROXY_EXTRACTOR 时出错: {repr(e)}")
219
- return None
220
-
221
- async def _get_proxy_from_api(self) -> Optional[Union[str, Dict[str, str]]]:
222
- raw_data = await self._fetch_raw_data()
223
- if not raw_data:
224
- return None
225
-
226
- if "__raw_text__" in raw_data:
227
- text = raw_data["__raw_text__"]
228
- if text.startswith("http://") or text.startswith("https://"):
229
- return text
230
-
231
- return await self._extract_proxy(raw_data)
232
-
233
- async def _update_proxy_pool(self):
234
- """更新代理池"""
235
- if not self.enabled:
236
- self.logger.debug("ProxyMiddleware 已禁用,跳过代理获取。")
237
- return
238
-
239
- now = asyncio.get_event_loop().time()
240
- if (now - self._last_fetch_time) < self.refresh_interval:
241
- return
242
-
243
- # 获取新的代理列表
244
- proxy_data = await self._get_proxy_from_api()
245
- if not proxy_data:
246
- self.logger.warning("无法获取新代理,代理池将保持现状。")
247
- return
248
-
249
- # 解析代理数据
250
- new_proxies = []
251
- if isinstance(proxy_data, str):
252
- # 单个代理
253
- new_proxies = [proxy_data]
254
- elif isinstance(proxy_data, dict):
255
- # 如果是字典,尝试提取代理列表
256
- for key, value in proxy_data.items():
257
- if isinstance(value, str) and (value.startswith("http://") or value.startswith("https://")):
258
- new_proxies.append(value)
259
- elif isinstance(value, list):
260
- # 如果值是列表,添加所有有效的代理
261
- for item in value:
262
- if isinstance(item, str) and (item.startswith("http://") or item.startswith("https://")):
263
- new_proxies.append(item)
264
-
265
- # 创建新的代理池
266
- if new_proxies:
267
- self._proxy_pool = [Proxy(proxy_str) for proxy_str in new_proxies[:self.proxy_pool_size]]
268
- self._current_proxy_index = 0
269
- self._last_fetch_time = now
270
- self.logger.info(f"更新代理池,新增 {len(self._proxy_pool)} 个代理")
271
- else:
272
- self.logger.warning("未解析到有效代理,代理池将保持现状。")
273
-
274
- async def _get_healthy_proxy(self) -> Optional[Proxy]:
275
- """从代理池中获取一个健康的代理"""
276
- if not self._proxy_pool:
277
- await self._update_proxy_pool()
278
-
279
- if not self._proxy_pool:
280
- return None
281
-
282
- # 查找健康的代理
283
- healthy_proxies = [p for p in self._proxy_pool if p.is_healthy and p.success_rate >= self.health_check_threshold]
284
-
285
- if not healthy_proxies:
286
- # 如果没有健康的代理,尝试更新代理池
287
- await self._update_proxy_pool()
288
- healthy_proxies = [p for p in self._proxy_pool if p.is_healthy and p.success_rate >= self.health_check_threshold]
289
-
290
- if not healthy_proxies:
291
- return None
292
-
293
- # 使用轮询方式选择代理
294
- self._current_proxy_index = (self._current_proxy_index + 1) % len(healthy_proxies)
295
- selected_proxy = healthy_proxies[self._current_proxy_index]
296
- return selected_proxy
297
-
298
- @staticmethod
299
- def _is_https(request: Request) -> bool:
300
- return urlparse(request.url).scheme == "https"
301
-
302
- async def process_request(self, request: Request, spider) -> Optional[Request]:
303
- if not self.enabled:
304
- self.logger.debug(f"ProxyMiddleware 已禁用,请求将直连: {request.url}")
305
- return None
306
-
307
- if request.proxy:
308
- return None
309
-
310
- proxy_obj = await self._get_healthy_proxy()
311
- if proxy_obj:
312
- proxy = proxy_obj.proxy_str
313
- # 处理带认证的代理URL
314
- if isinstance(proxy, str) and "@" in proxy and "://" in proxy:
315
- # 解析带认证的代理URL
316
- parsed = urlparse(proxy)
317
- if parsed.username and parsed.password:
318
- # 对于AioHttp下载器,需要特殊处理认证信息
319
- downloader_type = spider.crawler.settings.get("DOWNLOADER_TYPE", "aiohttp")
320
- if downloader_type == "aiohttp":
321
- # 将认证信息存储在meta中,由下载器处理
322
- request.meta["proxy_auth"] = {
323
- "username": parsed.username,
324
- "password": parsed.password
325
- }
326
- # 清理URL中的认证信息
327
- clean_proxy = f"{parsed.scheme}://{parsed.hostname}"
328
- if parsed.port:
329
- clean_proxy += f":{parsed.port}"
330
- request.proxy = clean_proxy
331
- else:
332
- # 其他下载器可以直接使用带认证的URL
333
- request.proxy = proxy
334
- else:
335
- request.proxy = proxy
336
- else:
337
- request.proxy = proxy
338
-
339
- # 记录使用的代理
340
- request.meta["_used_proxy"] = proxy_obj
341
- self.logger.info(f"分配代理 → {proxy} | {request.url}")
342
- else:
343
- self.logger.warning(f"未获取到代理,请求直连: {request.url}")
344
-
345
- return None
346
-
347
- def process_response(self, request: Request, response: Response, spider) -> Response:
348
- proxy_obj = request.meta.get("_used_proxy")
349
- if proxy_obj and isinstance(proxy_obj, Proxy):
350
- proxy_obj.mark_success()
351
- status_code = getattr(response, 'status_code', 'N/A')
352
- self.logger.debug(f"代理成功: {proxy_obj.proxy_str} | {request.url} | Status: {status_code}")
353
- elif request.proxy:
354
- status_code = getattr(response, 'status_code', 'N/A')
355
- self.logger.debug(f"代理成功: {request.proxy} | {request.url} | Status: {status_code}")
356
- return response
357
-
358
- def process_exception(self, request: Request, exception: Exception, spider) -> Optional[Request]:
359
- proxy_obj = request.meta.get("_used_proxy")
360
- if proxy_obj and isinstance(proxy_obj, Proxy):
361
- proxy_obj.mark_failure()
362
- self.logger.warning(f"代理请求失败: {proxy_obj.proxy_str} | {request.url} | {repr(exception)}")
363
- elif request.proxy:
364
- self.logger.warning(f"代理请求失败: {request.proxy} | {request.url} | {repr(exception)}")
365
- return None
366
-
367
- async def close(self):
368
- await self._close_session()
1
+ #!/usr/bin/python
2
+ # -*- coding: UTF-8 -*-
3
+ import time
4
+ import asyncio
5
+ import socket
6
+ from urllib.parse import urlparse
7
+ from typing import Optional, Dict, Any, Callable, Union, TYPE_CHECKING, List
8
+
9
+ from crawlo import Request, Response
10
+ from crawlo.exceptions import NotConfiguredError
11
+ from crawlo.utils.log import get_logger
12
+
13
+ if TYPE_CHECKING:
14
+ import aiohttp
15
+
16
+ try:
17
+ import httpx
18
+
19
+ HTTPX_EXCEPTIONS = (httpx.NetworkError, httpx.TimeoutException, httpx.ReadError, httpx.ConnectError)
20
+ except ImportError:
21
+ HTTPX_EXCEPTIONS = ()
22
+ httpx = None
23
+
24
+ try:
25
+ import aiohttp
26
+
27
+ AIOHTTP_EXCEPTIONS = (
28
+ aiohttp.ClientError, aiohttp.ClientConnectorError, aiohttp.ClientResponseError, aiohttp.ServerTimeoutError,
29
+ aiohttp.ServerDisconnectedError)
30
+ except ImportError:
31
+ AIOHTTP_EXCEPTIONS = ()
32
+ aiohttp = None
33
+
34
+ try:
35
+ from curl_cffi import requests as cffi_requests
36
+
37
+ CURL_CFFI_EXCEPTIONS = (cffi_requests.RequestsError,)
38
+ except (ImportError, AttributeError):
39
+ CURL_CFFI_EXCEPTIONS = ()
40
+ cffi_requests = None
41
+
42
+ NETWORK_EXCEPTIONS = (
43
+ asyncio.TimeoutError,
44
+ socket.gaierror,
45
+ ConnectionError,
46
+ TimeoutError,
47
+ ) + HTTPX_EXCEPTIONS + AIOHTTP_EXCEPTIONS + CURL_CFFI_EXCEPTIONS
48
+
49
+ ProxyExtractor = Callable[[Dict[str, Any]], Union[None, str, Dict[str, str]]]
50
+
51
+
52
+ class Proxy:
53
+ """代理对象,包含代理信息和统计数据"""
54
+
55
+ def __init__(self, proxy_str: str):
56
+ self.proxy_str = proxy_str
57
+ self.success_count = 0
58
+ self.failure_count = 0
59
+ self.last_used_time = 0.0
60
+ self.is_healthy = True
61
+
62
+ @property
63
+ def success_rate(self) -> float:
64
+ """计算代理成功率"""
65
+ total = self.success_count + self.failure_count
66
+ if total == 0:
67
+ return 1.0
68
+ return self.success_count / total
69
+
70
+ def mark_success(self):
71
+ """标记代理使用成功"""
72
+ self.success_count += 1
73
+ self.last_used_time = time.time()
74
+ self.is_healthy = True
75
+
76
+ def mark_failure(self):
77
+ """标记代理使用失败"""
78
+ self.failure_count += 1
79
+ self.last_used_time = time.time()
80
+ # 如果失败率过高,标记为不健康
81
+ if self.failure_count > 3 and self.success_rate < 0.5:
82
+ self.is_healthy = False
83
+
84
+
85
+ class ProxyMiddleware:
86
+ def __init__(self, settings, log_level):
87
+ self.logger = get_logger(self.__class__.__name__, log_level)
88
+
89
+ self._session: Optional[Any] = None # aiohttp.ClientSession when aiohttp is available
90
+ # 将单个代理改为代理池
91
+ self._proxy_pool: List[Proxy] = []
92
+ self._current_proxy_index: int = 0
93
+ self._last_fetch_time: float = 0
94
+
95
+ self.proxy_extractor = settings.get("PROXY_EXTRACTOR", "proxy")
96
+ self.refresh_interval = settings.get_float("PROXY_REFRESH_INTERVAL", 60)
97
+ self.timeout = settings.get_float("PROXY_API_TIMEOUT", 10)
98
+ # 新增配置:代理池大小
99
+ self.proxy_pool_size = settings.get_int("PROXY_POOL_SIZE", 5)
100
+ # 新增配置:健康检查阈值
101
+ self.health_check_threshold = settings.get_float("PROXY_HEALTH_CHECK_THRESHOLD", 0.5)
102
+
103
+ self.enabled = settings.get_bool("PROXY_ENABLED", True)
104
+
105
+ if not self.enabled:
106
+ self.logger.info("ProxyMiddleware disabled")
107
+ return
108
+
109
+ self.api_url = settings.get("PROXY_API_URL")
110
+ if not self.api_url:
111
+ raise NotConfiguredError("PROXY_API_URL not configured, ProxyMiddleware disabled")
112
+
113
+ self.logger.info(
114
+ f"Proxy middleware enabled | API: {self.api_url} | Refresh interval: {self.refresh_interval}s | Proxy pool size: {self.proxy_pool_size}")
115
+
116
+ @classmethod
117
+ def create_instance(cls, crawler):
118
+ return cls(settings=crawler.settings, log_level=crawler.settings.get("LOG_LEVEL"))
119
+
120
+ def _compile_extractor(self) -> ProxyExtractor:
121
+ if callable(self.proxy_extractor):
122
+ return self.proxy_extractor
123
+
124
+ if isinstance(self.proxy_extractor, str):
125
+ keys = self.proxy_extractor.split(".")
126
+
127
+ def extract(data: Dict[str, Any]) -> Union[None, str, Dict[str, str]]:
128
+ for k in keys:
129
+ if isinstance(data, dict):
130
+ data = data.get(k)
131
+ else:
132
+ return None
133
+ if data is None:
134
+ break
135
+ return data
136
+
137
+ return extract
138
+
139
+ raise ValueError(f"PROXY_EXTRACTOR 必须是 str 或 callable,当前类型: {type(self.proxy_extractor)}")
140
+
141
+ async def _close_session(self):
142
+ if self._session:
143
+ try:
144
+ await self._session.close()
145
+ self.logger.debug("aiohttp session closed.")
146
+ except Exception as e:
147
+ self.logger.warning(f"Error closing aiohttp session: {e}")
148
+ finally:
149
+ self._session = None
150
+
151
+ async def _get_session(self) -> Any: # returns aiohttp.ClientSession when aiohttp is available
152
+ if aiohttp is None:
153
+ raise RuntimeError("aiohttp not installed, cannot use ProxyMiddleware")
154
+
155
+ if self._session is None or self._session.closed:
156
+ if self._session and self._session.closed:
157
+ self.logger.debug("Existing session closed, creating new session...")
158
+ timeout = aiohttp.ClientTimeout(total=self.timeout)
159
+ self._session = aiohttp.ClientSession(timeout=timeout)
160
+ self.logger.debug("New aiohttp session created.")
161
+ return self._session
162
+
163
+ async def _fetch_raw_data(self) -> Optional[Dict[str, Any]]:
164
+ max_retries = 2
165
+ retry_count = 0
166
+
167
+ while retry_count <= max_retries:
168
+ session = await self._get_session()
169
+ try:
170
+ async with session.get(self.api_url) as resp:
171
+ content_type = resp.content_type.lower()
172
+ if 'application/json' not in content_type:
173
+ self.logger.warning(
174
+ f"Proxy API returned non-JSON content type: {content_type} (URL: {self.api_url})")
175
+ try:
176
+ text = await resp.text()
177
+ return {"__raw_text__": text.strip(), "__content_type__": content_type}
178
+ except Exception as e:
179
+ self.logger.error(f"Failed to read non-JSON response body: {repr(e)}")
180
+ return None
181
+
182
+ if resp.status != 200:
183
+ try:
184
+ error_text = await resp.text()
185
+ except:
186
+ error_text = "<Unable to read response body>"
187
+ self.logger.error(f"Proxy API status code error: {resp.status}, Response body: {error_text}")
188
+ if 400 <= resp.status < 500:
189
+ return None
190
+ return None
191
+
192
+ return await resp.json()
193
+
194
+ except NETWORK_EXCEPTIONS as e:
195
+ retry_count += 1
196
+ self.logger.warning(f"Failed to request proxy API (attempt {retry_count}/{max_retries + 1}): {repr(e)}")
197
+ if retry_count <= max_retries:
198
+ self.logger.info("Closing and rebuilding session for retry...")
199
+ await self._close_session()
200
+ else:
201
+ self.logger.error(
202
+ f"Failed to request proxy API, maximum retry attempts reached ({max_retries + 1}): {repr(e)}")
203
+ return None
204
+
205
+ except aiohttp.ContentTypeError as e:
206
+ self.logger.error(f"Proxy API response content type error: {repr(e)}")
207
+ return None
208
+
209
+ except Exception as e:
210
+ self.logger.critical(f"Unexpected error occurred while requesting proxy API: {repr(e)}", exc_info=True)
211
+ return None
212
+
213
+ return None
214
+
215
+ async def _extract_proxy(self, data: Dict[str, Any]) -> Optional[Union[str, Dict[str, str]]]:
216
+ extractor = self._compile_extractor()
217
+ try:
218
+ result = extractor(data)
219
+ if isinstance(result, str) and result.strip():
220
+ return result.strip()
221
+ elif isinstance(result, dict):
222
+ cleaned = {k: v.strip() if isinstance(v, str) else v for k, v in result.items()}
223
+ return cleaned if cleaned else None
224
+ return None
225
+ except Exception as e:
226
+ self.logger.error(f"Error executing PROXY_EXTRACTOR: {repr(e)}")
227
+ return None
228
+
229
+ async def _get_proxy_from_api(self) -> Optional[Union[str, Dict[str, str]]]:
230
+ raw_data = await self._fetch_raw_data()
231
+ if not raw_data:
232
+ return None
233
+
234
+ if "__raw_text__" in raw_data:
235
+ text = raw_data["__raw_text__"]
236
+ if text.startswith("http://") or text.startswith("https://"):
237
+ return text
238
+
239
+ return await self._extract_proxy(raw_data)
240
+
241
+ def _parse_proxy_data(self, proxy_data: Union[str, Dict[str, Any]]) -> List[str]:
242
+ """解析代理数据,提取代理URL列表"""
243
+ new_proxies = []
244
+ if isinstance(proxy_data, str):
245
+ # 单个代理
246
+ if proxy_data.startswith("http://") or proxy_data.startswith("https://"):
247
+ new_proxies = [proxy_data]
248
+ elif isinstance(proxy_data, dict):
249
+ # 如果是字典,尝试提取代理列表
250
+ for key, value in proxy_data.items():
251
+ if isinstance(value, str) and (value.startswith("http://") or value.startswith("https://")):
252
+ new_proxies.append(value)
253
+ elif isinstance(value, list):
254
+ # 如果值是列表,添加所有有效的代理
255
+ for item in value:
256
+ if isinstance(item, str) and (item.startswith("http://") or item.startswith("https://")):
257
+ new_proxies.append(item)
258
+ return new_proxies
259
+
260
+ def _get_healthy_proxies(self) -> List[Proxy]:
261
+ """获取所有健康的代理"""
262
+ return [p for p in self._proxy_pool if p.is_healthy and p.success_rate >= self.health_check_threshold]
263
+
264
+ async def _update_proxy_pool(self):
265
+ """更新代理池"""
266
+ if not self.enabled:
267
+ self.logger.debug("ProxyMiddleware disabled, skipping proxy fetch.")
268
+ return
269
+
270
+ now = asyncio.get_event_loop().time()
271
+ if (now - self._last_fetch_time) < self.refresh_interval:
272
+ return
273
+
274
+ # 获取新的代理列表
275
+ proxy_data = await self._get_proxy_from_api()
276
+ if not proxy_data:
277
+ self.logger.warning("Failed to get new proxies, proxy pool will remain unchanged.")
278
+ return
279
+
280
+ # 解析代理数据
281
+ new_proxies = self._parse_proxy_data(proxy_data)
282
+
283
+ # 创建新的代理池
284
+ if new_proxies:
285
+ self._proxy_pool = [Proxy(proxy_str) for proxy_str in new_proxies[:self.proxy_pool_size]]
286
+ self._current_proxy_index = 0
287
+ self._last_fetch_time = now
288
+ self.logger.info(f"Updated proxy pool, added {len(self._proxy_pool)} proxies")
289
+ else:
290
+ self.logger.warning("No valid proxies parsed, proxy pool will remain unchanged.")
291
+
292
+ async def _get_healthy_proxy(self) -> Optional[Proxy]:
293
+ """从代理池中获取一个健康的代理"""
294
+ if not self._proxy_pool:
295
+ await self._update_proxy_pool()
296
+
297
+ if not self._proxy_pool:
298
+ return None
299
+
300
+ # 查找健康的代理
301
+ healthy_proxies = self._get_healthy_proxies()
302
+
303
+ if not healthy_proxies:
304
+ # 如果没有健康的代理,尝试更新代理池
305
+ await self._update_proxy_pool()
306
+ healthy_proxies = self._get_healthy_proxies()
307
+
308
+ if not healthy_proxies:
309
+ return None
310
+
311
+ # 使用轮询方式选择代理
312
+ self._current_proxy_index = (self._current_proxy_index + 1) % len(healthy_proxies)
313
+ selected_proxy = healthy_proxies[self._current_proxy_index]
314
+ return selected_proxy
315
+
316
+ @staticmethod
317
+ def _is_https(request: Request) -> bool:
318
+ return urlparse(request.url).scheme == "https"
319
+
320
+ async def process_request(self, request: Request, spider) -> Optional[Request]:
321
+ if not self.enabled:
322
+ self.logger.debug(f"ProxyMiddleware disabled, request will connect directly: {request.url}")
323
+ return None
324
+
325
+ if request.proxy:
326
+ return None
327
+
328
+ proxy_obj = await self._get_healthy_proxy()
329
+ if proxy_obj:
330
+ proxy = proxy_obj.proxy_str
331
+ # 处理带认证的代理URL
332
+ if isinstance(proxy, str) and "@" in proxy and "://" in proxy:
333
+ # 解析带认证的代理URL
334
+ parsed = urlparse(proxy)
335
+ if parsed.username and parsed.password:
336
+ # 对于AioHttp下载器,需要特殊处理认证信息
337
+ downloader_type = spider.crawler.settings.get("DOWNLOADER_TYPE", "aiohttp")
338
+ if downloader_type == "aiohttp":
339
+ # 将认证信息存储在meta中,由下载器处理
340
+ request.meta["proxy_auth"] = {
341
+ "username": parsed.username,
342
+ "password": parsed.password
343
+ }
344
+ # 清理URL中的认证信息
345
+ clean_proxy = f"{parsed.scheme}://{parsed.hostname}"
346
+ if parsed.port:
347
+ clean_proxy += f":{parsed.port}"
348
+ request.proxy = clean_proxy
349
+ else:
350
+ # 其他下载器可以直接使用带认证的URL
351
+ request.proxy = proxy
352
+ else:
353
+ request.proxy = proxy
354
+ else:
355
+ request.proxy = proxy
356
+
357
+ # 记录使用的代理
358
+ request.meta["_used_proxy"] = proxy_obj
359
+ self.logger.info(f"Assigned proxy → {proxy} | {request.url}")
360
+ else:
361
+ self.logger.warning(f"No proxy obtained, request connecting directly: {request.url}")
362
+
363
+ return None
364
+
365
+ def process_response(self, request: Request, response: Response, spider) -> Response:
366
+ proxy_obj = request.meta.get("_used_proxy")
367
+ if proxy_obj and isinstance(proxy_obj, Proxy):
368
+ proxy_obj.mark_success()
369
+ status_code = getattr(response, 'status_code', 'N/A')
370
+ self.logger.debug(f"Proxy success: {proxy_obj.proxy_str} | {request.url} | Status: {status_code}")
371
+ elif request.proxy:
372
+ status_code = getattr(response, 'status_code', 'N/A')
373
+ self.logger.debug(f"Proxy success: {request.proxy} | {request.url} | Status: {status_code}")
374
+ return response
375
+
376
+ def process_exception(self, request: Request, exception: Exception, spider) -> Optional[Request]:
377
+ proxy_obj = request.meta.get("_used_proxy")
378
+ if proxy_obj and isinstance(proxy_obj, Proxy):
379
+ proxy_obj.mark_failure()
380
+ self.logger.warning(f"Proxy request failed: {proxy_obj.proxy_str} | {request.url} | {repr(exception)}")
381
+ elif request.proxy:
382
+ self.logger.warning(f"Proxy request failed: {request.proxy} | {request.url} | {repr(exception)}")
383
+ return None
384
+
385
+ async def close(self):
386
+ await self._close_session()