crawlo 1.3.3__py3-none-any.whl → 1.3.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (279) hide show
  1. crawlo/__init__.py +87 -63
  2. crawlo/__version__.py +1 -1
  3. crawlo/cli.py +75 -75
  4. crawlo/commands/__init__.py +14 -14
  5. crawlo/commands/check.py +594 -594
  6. crawlo/commands/genspider.py +151 -151
  7. crawlo/commands/help.py +138 -138
  8. crawlo/commands/list.py +155 -155
  9. crawlo/commands/run.py +341 -323
  10. crawlo/commands/startproject.py +436 -436
  11. crawlo/commands/stats.py +187 -187
  12. crawlo/commands/utils.py +196 -196
  13. crawlo/config.py +312 -312
  14. crawlo/config_validator.py +277 -277
  15. crawlo/core/__init__.py +46 -2
  16. crawlo/core/engine.py +439 -365
  17. crawlo/core/processor.py +40 -40
  18. crawlo/core/scheduler.py +257 -256
  19. crawlo/crawler.py +639 -1167
  20. crawlo/data/__init__.py +5 -5
  21. crawlo/data/user_agents.py +194 -194
  22. crawlo/downloader/__init__.py +273 -273
  23. crawlo/downloader/aiohttp_downloader.py +228 -226
  24. crawlo/downloader/cffi_downloader.py +245 -245
  25. crawlo/downloader/httpx_downloader.py +259 -259
  26. crawlo/downloader/hybrid_downloader.py +212 -212
  27. crawlo/downloader/playwright_downloader.py +402 -402
  28. crawlo/downloader/selenium_downloader.py +472 -472
  29. crawlo/event.py +11 -11
  30. crawlo/exceptions.py +81 -81
  31. crawlo/extension/__init__.py +39 -39
  32. crawlo/extension/health_check.py +141 -141
  33. crawlo/extension/log_interval.py +57 -57
  34. crawlo/extension/log_stats.py +81 -81
  35. crawlo/extension/logging_extension.py +61 -52
  36. crawlo/extension/memory_monitor.py +104 -104
  37. crawlo/extension/performance_profiler.py +133 -133
  38. crawlo/extension/request_recorder.py +107 -107
  39. crawlo/factories/__init__.py +28 -0
  40. crawlo/factories/base.py +69 -0
  41. crawlo/factories/crawler.py +104 -0
  42. crawlo/factories/registry.py +85 -0
  43. crawlo/filters/__init__.py +154 -154
  44. crawlo/filters/aioredis_filter.py +257 -234
  45. crawlo/filters/memory_filter.py +269 -269
  46. crawlo/framework.py +292 -0
  47. crawlo/initialization/__init__.py +40 -0
  48. crawlo/initialization/built_in.py +426 -0
  49. crawlo/initialization/context.py +142 -0
  50. crawlo/initialization/core.py +194 -0
  51. crawlo/initialization/phases.py +149 -0
  52. crawlo/initialization/registry.py +146 -0
  53. crawlo/items/__init__.py +23 -23
  54. crawlo/items/base.py +23 -22
  55. crawlo/items/fields.py +52 -52
  56. crawlo/items/items.py +104 -104
  57. crawlo/logging/__init__.py +38 -0
  58. crawlo/logging/config.py +97 -0
  59. crawlo/logging/factory.py +129 -0
  60. crawlo/logging/manager.py +112 -0
  61. crawlo/middleware/__init__.py +21 -21
  62. crawlo/middleware/default_header.py +132 -132
  63. crawlo/middleware/download_delay.py +104 -104
  64. crawlo/middleware/middleware_manager.py +135 -135
  65. crawlo/middleware/offsite.py +123 -123
  66. crawlo/middleware/proxy.py +386 -386
  67. crawlo/middleware/request_ignore.py +86 -86
  68. crawlo/middleware/response_code.py +163 -163
  69. crawlo/middleware/response_filter.py +136 -136
  70. crawlo/middleware/retry.py +124 -124
  71. crawlo/middleware/simple_proxy.py +65 -65
  72. crawlo/mode_manager.py +212 -187
  73. crawlo/network/__init__.py +21 -21
  74. crawlo/network/request.py +379 -379
  75. crawlo/network/response.py +359 -359
  76. crawlo/pipelines/__init__.py +21 -21
  77. crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
  78. crawlo/pipelines/console_pipeline.py +39 -39
  79. crawlo/pipelines/csv_pipeline.py +316 -316
  80. crawlo/pipelines/database_dedup_pipeline.py +222 -222
  81. crawlo/pipelines/json_pipeline.py +218 -218
  82. crawlo/pipelines/memory_dedup_pipeline.py +115 -115
  83. crawlo/pipelines/mongo_pipeline.py +131 -131
  84. crawlo/pipelines/mysql_pipeline.py +318 -318
  85. crawlo/pipelines/pipeline_manager.py +76 -75
  86. crawlo/pipelines/redis_dedup_pipeline.py +166 -166
  87. crawlo/project.py +327 -325
  88. crawlo/queue/pqueue.py +43 -37
  89. crawlo/queue/queue_manager.py +503 -379
  90. crawlo/queue/redis_priority_queue.py +326 -306
  91. crawlo/settings/__init__.py +7 -7
  92. crawlo/settings/default_settings.py +321 -225
  93. crawlo/settings/setting_manager.py +214 -198
  94. crawlo/spider/__init__.py +657 -639
  95. crawlo/stats_collector.py +73 -59
  96. crawlo/subscriber.py +129 -129
  97. crawlo/task_manager.py +139 -30
  98. crawlo/templates/crawlo.cfg.tmpl +10 -10
  99. crawlo/templates/project/__init__.py.tmpl +3 -3
  100. crawlo/templates/project/items.py.tmpl +17 -17
  101. crawlo/templates/project/middlewares.py.tmpl +118 -118
  102. crawlo/templates/project/pipelines.py.tmpl +96 -96
  103. crawlo/templates/project/settings.py.tmpl +168 -267
  104. crawlo/templates/project/settings_distributed.py.tmpl +167 -180
  105. crawlo/templates/project/settings_gentle.py.tmpl +167 -61
  106. crawlo/templates/project/settings_high_performance.py.tmpl +168 -131
  107. crawlo/templates/project/settings_minimal.py.tmpl +66 -35
  108. crawlo/templates/project/settings_simple.py.tmpl +165 -102
  109. crawlo/templates/project/spiders/__init__.py.tmpl +10 -6
  110. crawlo/templates/run.py.tmpl +34 -38
  111. crawlo/templates/spider/spider.py.tmpl +143 -143
  112. crawlo/templates/spiders_init.py.tmpl +10 -0
  113. crawlo/tools/__init__.py +200 -200
  114. crawlo/tools/anti_crawler.py +268 -268
  115. crawlo/tools/authenticated_proxy.py +240 -240
  116. crawlo/tools/data_formatter.py +225 -225
  117. crawlo/tools/data_validator.py +180 -180
  118. crawlo/tools/date_tools.py +289 -289
  119. crawlo/tools/distributed_coordinator.py +388 -388
  120. crawlo/tools/encoding_converter.py +127 -127
  121. crawlo/tools/network_diagnostic.py +365 -0
  122. crawlo/tools/request_tools.py +82 -82
  123. crawlo/tools/retry_mechanism.py +224 -224
  124. crawlo/tools/scenario_adapter.py +262 -262
  125. crawlo/tools/text_cleaner.py +232 -232
  126. crawlo/utils/__init__.py +34 -34
  127. crawlo/utils/batch_processor.py +259 -259
  128. crawlo/utils/class_loader.py +26 -0
  129. crawlo/utils/controlled_spider_mixin.py +439 -439
  130. crawlo/utils/db_helper.py +343 -343
  131. crawlo/utils/enhanced_error_handler.py +356 -356
  132. crawlo/utils/env_config.py +142 -142
  133. crawlo/utils/error_handler.py +165 -124
  134. crawlo/utils/func_tools.py +82 -82
  135. crawlo/utils/large_scale_config.py +286 -286
  136. crawlo/utils/large_scale_helper.py +344 -344
  137. crawlo/utils/log.py +44 -200
  138. crawlo/utils/performance_monitor.py +285 -285
  139. crawlo/utils/queue_helper.py +175 -175
  140. crawlo/utils/redis_connection_pool.py +388 -351
  141. crawlo/utils/redis_key_validator.py +198 -198
  142. crawlo/utils/request.py +267 -267
  143. crawlo/utils/request_serializer.py +225 -218
  144. crawlo/utils/spider_loader.py +61 -61
  145. crawlo/utils/system.py +11 -11
  146. crawlo/utils/tools.py +4 -4
  147. crawlo/utils/url.py +39 -39
  148. {crawlo-1.3.3.dist-info → crawlo-1.3.4.dist-info}/METADATA +1126 -1020
  149. crawlo-1.3.4.dist-info/RECORD +278 -0
  150. examples/__init__.py +7 -7
  151. tests/__init__.py +7 -7
  152. tests/advanced_tools_example.py +275 -275
  153. tests/authenticated_proxy_example.py +107 -107
  154. tests/baidu_performance_test.py +109 -0
  155. tests/baidu_test.py +60 -0
  156. tests/cleaners_example.py +160 -160
  157. tests/comprehensive_framework_test.py +213 -0
  158. tests/comprehensive_test.py +82 -0
  159. tests/comprehensive_testing_summary.md +187 -0
  160. tests/config_validation_demo.py +142 -142
  161. tests/controlled_spider_example.py +205 -205
  162. tests/date_tools_example.py +180 -180
  163. tests/debug_configure.py +70 -0
  164. tests/debug_framework_logger.py +85 -0
  165. tests/debug_log_levels.py +64 -0
  166. tests/debug_pipelines.py +66 -66
  167. tests/distributed_test.py +67 -0
  168. tests/distributed_test_debug.py +77 -0
  169. tests/dynamic_loading_example.py +523 -523
  170. tests/dynamic_loading_test.py +104 -104
  171. tests/env_config_example.py +133 -133
  172. tests/error_handling_example.py +171 -171
  173. tests/final_command_test_report.md +0 -0
  174. tests/final_comprehensive_test.py +152 -0
  175. tests/final_validation_test.py +183 -0
  176. tests/framework_performance_test.py +203 -0
  177. tests/optimized_performance_test.py +212 -0
  178. tests/performance_comparison.py +246 -0
  179. tests/queue_blocking_test.py +114 -0
  180. tests/queue_test.py +90 -0
  181. tests/redis_key_validation_demo.py +130 -130
  182. tests/request_params_example.py +150 -150
  183. tests/response_improvements_example.py +144 -144
  184. tests/scrapy_comparison/ofweek_scrapy.py +139 -0
  185. tests/scrapy_comparison/scrapy_test.py +134 -0
  186. tests/simple_command_test.py +120 -0
  187. tests/simple_crawlo_test.py +128 -0
  188. tests/simple_log_test.py +58 -0
  189. tests/simple_optimization_test.py +129 -0
  190. tests/simple_spider_test.py +50 -0
  191. tests/simple_test.py +48 -0
  192. tests/test_advanced_tools.py +148 -148
  193. tests/test_all_commands.py +231 -0
  194. tests/test_all_redis_key_configs.py +145 -145
  195. tests/test_authenticated_proxy.py +141 -141
  196. tests/test_batch_processor.py +179 -0
  197. tests/test_cleaners.py +54 -54
  198. tests/test_component_factory.py +175 -0
  199. tests/test_comprehensive.py +146 -146
  200. tests/test_config_consistency.py +80 -80
  201. tests/test_config_merge.py +152 -152
  202. tests/test_config_validator.py +182 -182
  203. tests/test_controlled_spider_mixin.py +80 -0
  204. tests/test_crawlo_proxy_integration.py +108 -108
  205. tests/test_date_tools.py +123 -123
  206. tests/test_default_header_middleware.py +158 -158
  207. tests/test_distributed.py +65 -65
  208. tests/test_double_crawlo_fix.py +207 -207
  209. tests/test_double_crawlo_fix_simple.py +124 -124
  210. tests/test_download_delay_middleware.py +221 -221
  211. tests/test_downloader_proxy_compatibility.py +268 -268
  212. tests/test_dynamic_downloaders_proxy.py +124 -124
  213. tests/test_dynamic_proxy.py +92 -92
  214. tests/test_dynamic_proxy_config.py +146 -146
  215. tests/test_dynamic_proxy_real.py +109 -109
  216. tests/test_edge_cases.py +303 -303
  217. tests/test_enhanced_error_handler.py +270 -270
  218. tests/test_enhanced_error_handler_comprehensive.py +246 -0
  219. tests/test_env_config.py +121 -121
  220. tests/test_error_handler_compatibility.py +112 -112
  221. tests/test_factories.py +253 -0
  222. tests/test_final_validation.py +153 -153
  223. tests/test_framework_env_usage.py +103 -103
  224. tests/test_framework_logger.py +67 -0
  225. tests/test_framework_startup.py +65 -0
  226. tests/test_integration.py +169 -169
  227. tests/test_item_dedup_redis_key.py +122 -122
  228. tests/test_large_scale_config.py +113 -0
  229. tests/test_large_scale_helper.py +236 -0
  230. tests/test_mode_change.py +73 -0
  231. tests/test_mode_consistency.py +51 -51
  232. tests/test_offsite_middleware.py +221 -221
  233. tests/test_parsel.py +29 -29
  234. tests/test_performance.py +327 -327
  235. tests/test_performance_monitor.py +116 -0
  236. tests/test_proxy_api.py +264 -264
  237. tests/test_proxy_health_check.py +32 -32
  238. tests/test_proxy_middleware.py +121 -121
  239. tests/test_proxy_middleware_enhanced.py +216 -216
  240. tests/test_proxy_middleware_integration.py +136 -136
  241. tests/test_proxy_middleware_refactored.py +184 -184
  242. tests/test_proxy_providers.py +56 -56
  243. tests/test_proxy_stats.py +19 -19
  244. tests/test_proxy_strategies.py +59 -59
  245. tests/test_queue_empty_check.py +42 -0
  246. tests/test_queue_manager_double_crawlo.py +173 -173
  247. tests/test_queue_manager_redis_key.py +176 -176
  248. tests/test_random_user_agent.py +72 -72
  249. tests/test_real_scenario_proxy.py +195 -195
  250. tests/test_redis_config.py +28 -28
  251. tests/test_redis_connection_pool.py +294 -294
  252. tests/test_redis_key_naming.py +181 -181
  253. tests/test_redis_key_validator.py +123 -123
  254. tests/test_redis_queue.py +224 -224
  255. tests/test_request_ignore_middleware.py +182 -182
  256. tests/test_request_params.py +111 -111
  257. tests/test_request_serialization.py +70 -70
  258. tests/test_response_code_middleware.py +349 -349
  259. tests/test_response_filter_middleware.py +427 -427
  260. tests/test_response_improvements.py +152 -152
  261. tests/test_retry_middleware.py +241 -241
  262. tests/test_scheduler.py +252 -252
  263. tests/test_scheduler_config_update.py +133 -133
  264. tests/test_simple_response.py +61 -61
  265. tests/test_telecom_spider_redis_key.py +205 -205
  266. tests/test_template_content.py +87 -87
  267. tests/test_template_redis_key.py +134 -134
  268. tests/test_tools.py +159 -159
  269. tests/test_user_agents.py +96 -96
  270. tests/tools_example.py +260 -260
  271. tests/untested_features_report.md +139 -0
  272. tests/verify_debug.py +52 -0
  273. tests/verify_distributed.py +117 -117
  274. tests/verify_log_fix.py +112 -0
  275. crawlo-1.3.3.dist-info/RECORD +0 -219
  276. tests/DOUBLE_CRAWLO_PREFIX_FIX_REPORT.md +0 -82
  277. {crawlo-1.3.3.dist-info → crawlo-1.3.4.dist-info}/WHEEL +0 -0
  278. {crawlo-1.3.3.dist-info → crawlo-1.3.4.dist-info}/entry_points.txt +0 -0
  279. {crawlo-1.3.3.dist-info → crawlo-1.3.4.dist-info}/top_level.txt +0 -0
@@ -1,234 +1,257 @@
1
- from typing import Optional
2
- import redis.asyncio as aioredis
3
-
4
- from crawlo.filters import BaseFilter
5
- from crawlo.utils.log import get_logger
6
- from crawlo.utils.request import request_fingerprint
7
- from crawlo.utils.redis_connection_pool import get_redis_pool
8
-
9
-
10
- class AioRedisFilter(BaseFilter):
11
- """
12
- 基于Redis集合实现的异步请求去重过滤器
13
-
14
- 支持特性:
15
- - 分布式爬虫多节点共享去重数据
16
- - TTL 自动过期清理机制
17
- - Pipeline 批量操作优化性能
18
- - 容错设计和连接池管理
19
-
20
- 适用场景:
21
- - 分布式爬虫系统
22
- - 大规模数据处理
23
- - 需要持久化去重的场景
24
- """
25
-
26
- def __init__(
27
- self,
28
- redis_key: str,
29
- client: aioredis.Redis,
30
- stats: dict,
31
- debug: bool = False,
32
- log_level: str = 'INFO',
33
- cleanup_fp: bool = False,
34
- ttl: Optional[int] = None
35
- ):
36
- """
37
- 初始化Redis过滤器
38
-
39
- :param redis_key: Redis中存储指纹的键名
40
- :param client: Redis客户端实例(可以为None,稍后初始化)
41
- :param stats: 统计信息存储
42
- :param debug: 是否启用调试模式
43
- :param log_level: 日志级别
44
- :param cleanup_fp: 关闭时是否清理指纹
45
- :param ttl: 指纹过期时间(秒)
46
- """
47
- self.logger = get_logger(self.__class__.__name__, log_level)
48
- super().__init__(self.logger, stats, debug)
49
-
50
- self.redis_key = redis_key
51
- self.redis = client
52
- self.cleanup_fp = cleanup_fp
53
- self.ttl = ttl
54
-
55
- # 保存连接池引用(用于延迟初始化)
56
- self._redis_pool = None
57
-
58
- # 性能计数器
59
- self._redis_operations = 0
60
- self._pipeline_operations = 0
61
-
62
- # 连接状态标记,避免重复尝试连接失败的Redis
63
- self._connection_failed = False
64
-
65
- @classmethod
66
- def create_instance(cls, crawler) -> 'BaseFilter':
67
- """从爬虫配置创建过滤器实例"""
68
- redis_url = crawler.settings.get('REDIS_URL', 'redis://localhost:6379')
69
- # 确保 decode_responses=False 以避免编码问题
70
- decode_responses = False # crawler.settings.get_bool('DECODE_RESPONSES', False)
71
- ttl_setting = crawler.settings.get_int('REDIS_TTL')
72
-
73
- # 处理TTL设置
74
- ttl = None
75
- if ttl_setting is not None:
76
- ttl = max(0, int(ttl_setting)) if ttl_setting > 0 else None
77
-
78
- try:
79
- # 使用优化的连接池,确保 decode_responses=False
80
- redis_pool = get_redis_pool(
81
- redis_url,
82
- max_connections=20,
83
- socket_connect_timeout=5,
84
- socket_timeout=30,
85
- health_check_interval=30,
86
- retry_on_timeout=True,
87
- decode_responses=decode_responses, # 确保不自动解码响应
88
- encoding='utf-8'
89
- )
90
-
91
- # 注意:这里不应该使用 await,因为 create_instance 不是异步方法
92
- # 我们将在实际使用时获取连接
93
- redis_client = None # 延迟初始化
94
- except Exception as e:
95
- raise RuntimeError(f"Redis连接池初始化失败: {redis_url} - {str(e)}")
96
-
97
- # 使用统一的Redis key命名规范: crawlo:{project_name}:filter:fingerprint
98
- project_name = crawler.settings.get('PROJECT_NAME', 'default')
99
- redis_key = f"crawlo:{project_name}:filter:fingerprint"
100
-
101
- instance = cls(
102
- redis_key=redis_key,
103
- client=redis_client,
104
- stats=crawler.stats,
105
- cleanup_fp=crawler.settings.get_bool('CLEANUP_FP', False),
106
- ttl=ttl,
107
- debug=crawler.settings.get_bool('FILTER_DEBUG', False),
108
- log_level=crawler.settings.get('LOG_LEVEL', 'INFO')
109
- )
110
-
111
- # 保存连接池引用,以便在需要时获取连接
112
- instance._redis_pool = redis_pool
113
- return instance
114
-
115
- async def _get_redis_client(self):
116
- """获取Redis客户端实例(延迟初始化)"""
117
- # 如果之前连接失败,直接返回None
118
- if self._connection_failed:
119
- return None
120
-
121
- if self.redis is None and self._redis_pool is not None:
122
- try:
123
- self.redis = await self._redis_pool.get_connection()
124
- except Exception as e:
125
- self._connection_failed = True
126
- self.logger.error(f"Redis连接失败,将使用本地去重: {e}")
127
- return None
128
- return self.redis
129
-
130
- async def requested(self, request) -> bool:
131
- """
132
- 检查请求是否已存在(优化版本)
133
-
134
- :param request: 请求对象
135
- :return: True 表示重复,False 表示新请求
136
- """
137
- try:
138
- # 确保Redis客户端已初始化
139
- redis_client = await self._get_redis_client()
140
-
141
- # 如果Redis不可用,返回False表示不重复(避免丢失请求)
142
- if redis_client is None:
143
- return False
144
-
145
- fp = str(request_fingerprint(request))
146
- self._redis_operations += 1
147
-
148
- # 使用 pipeline 优化性能
149
- pipe = redis_client.pipeline()
150
- pipe.sismember(self.redis_key, fp)
151
-
152
- results = await pipe.execute()
153
- exists = results[0]
154
-
155
- self._pipeline_operations += 1
156
-
157
- if exists:
158
- if self.debug:
159
- self.logger.debug(f"发现重复请求: {fp[:20]}...")
160
- return True
161
-
162
- # 如果不存在,添加指纹并设置TTL
163
- await self.add_fingerprint(fp)
164
- return False
165
-
166
- except Exception as e:
167
- self.logger.error(f"请求检查失败: {getattr(request, 'url', '未知URL')} - {e}")
168
- # 在网络异常时返回False,避免丢失请求
169
- return False
170
-
171
- async def add_fingerprint(self, fp: str) -> bool:
172
- """
173
- 添加新指纹到Redis集合(优化版本)
174
-
175
- :param fp: 请求指纹字符串
176
- :return: 是否成功添加(True 表示新添加,False 表示已存在)
177
- """
178
- try:
179
- # 确保Redis客户端已初始化
180
- redis_client = await self._get_redis_client()
181
-
182
- # 如果Redis不可用,返回False表示添加失败
183
- if redis_client is None:
184
- return False
185
-
186
- fp = str(fp)
187
-
188
- # 使用 pipeline 优化性能
189
- pipe = redis_client.pipeline()
190
- pipe.sadd(self.redis_key, fp)
191
-
192
- if self.ttl and self.ttl > 0:
193
- pipe.expire(self.redis_key, self.ttl)
194
-
195
- results = await pipe.execute()
196
- added = results[0] == 1 # sadd 返回 1 表示新添加
197
-
198
- self._pipeline_operations += 1
199
-
200
- if self.debug and added:
201
- self.logger.debug(f"添加新指纹: {fp[:20]}...")
202
-
203
- return added
204
-
205
- except Exception as e:
206
- self.logger.error(f"添加指纹失败: {fp[:20]}... - {e}")
207
- return False
208
-
209
- async def __contains__(self, fp: str) -> bool:
210
- """
211
- 检查指纹是否存在于Redis集合中
212
-
213
- :param fp: 请求指纹字符串
214
- :return: 是否存在
215
- """
216
- try:
217
- # 确保Redis客户端已初始化
218
- redis_client = await self._get_redis_client()
219
-
220
- # 如果Redis不可用,返回False表示不存在
221
- if redis_client is None:
222
- return False
223
-
224
- # 检查指纹是否存在
225
- exists = await redis_client.sismember(self.redis_key, str(fp))
226
- return exists
227
- except Exception as e:
228
- self.logger.error(f"检查指纹存在性失败: {fp[:20]}... - {e}")
229
- # 在网络异常时返回False,避免丢失请求
230
- return False
231
-
232
-
233
- # 为了兼容性,确保导出类
234
- __all__ = ['AioRedisFilter']
1
+ from typing import Optional
2
+ import redis.asyncio as aioredis
3
+
4
+ from crawlo.filters import BaseFilter
5
+ from crawlo.utils.log import get_logger
6
+ from crawlo.utils.request import request_fingerprint
7
+ from crawlo.utils.redis_connection_pool import get_redis_pool
8
+
9
+
10
+ class AioRedisFilter(BaseFilter):
11
+ """
12
+ 基于Redis集合实现的异步请求去重过滤器
13
+
14
+ 支持特性:
15
+ - 分布式爬虫多节点共享去重数据
16
+ - TTL 自动过期清理机制
17
+ - Pipeline 批量操作优化性能
18
+ - 容错设计和连接池管理
19
+
20
+ 适用场景:
21
+ - 分布式爬虫系统
22
+ - 大规模数据处理
23
+ - 需要持久化去重的场景
24
+ """
25
+
26
+ def __init__(
27
+ self,
28
+ redis_key: str,
29
+ client: aioredis.Redis,
30
+ stats: dict,
31
+ debug: bool = False,
32
+ log_level: str = 'INFO',
33
+ cleanup_fp: bool = False,
34
+ ttl: Optional[int] = None
35
+ ):
36
+ """
37
+ 初始化Redis过滤器
38
+
39
+ :param redis_key: Redis中存储指纹的键名
40
+ :param client: Redis客户端实例(可以为None,稍后初始化)
41
+ :param stats: 统计信息存储
42
+ :param debug: 是否启用调试模式
43
+ :param log_level: 日志级别
44
+ :param cleanup_fp: 关闭时是否清理指纹
45
+ :param ttl: 指纹过期时间(秒)
46
+ """
47
+ self.logger = get_logger(self.__class__.__name__, log_level)
48
+ super().__init__(self.logger, stats, debug)
49
+
50
+ self.redis_key = redis_key
51
+ self.redis = client
52
+ self.cleanup_fp = cleanup_fp
53
+ self.ttl = ttl
54
+
55
+ # 保存连接池引用(用于延迟初始化)
56
+ self._redis_pool = None
57
+
58
+ # 性能计数器
59
+ self._redis_operations = 0
60
+ self._pipeline_operations = 0
61
+
62
+ # 连接状态标记,避免重复尝试连接失败的Redis
63
+ self._connection_failed = False
64
+
65
+ @classmethod
66
+ def create_instance(cls, crawler) -> 'BaseFilter':
67
+ """从爬虫配置创建过滤器实例"""
68
+ redis_url = crawler.settings.get('REDIS_URL', 'redis://localhost:6379')
69
+ # 确保 decode_responses=False 以避免编码问题
70
+ decode_responses = False # crawler.settings.get_bool('DECODE_RESPONSES', False)
71
+ ttl_setting = crawler.settings.get_int('REDIS_TTL')
72
+
73
+ # 处理TTL设置
74
+ ttl = None
75
+ if ttl_setting is not None:
76
+ ttl = max(0, int(ttl_setting)) if ttl_setting > 0 else None
77
+
78
+ try:
79
+ # 使用优化的连接池,确保 decode_responses=False
80
+ redis_pool = get_redis_pool(
81
+ redis_url,
82
+ max_connections=20,
83
+ socket_connect_timeout=5,
84
+ socket_timeout=30,
85
+ health_check_interval=30,
86
+ retry_on_timeout=True,
87
+ decode_responses=decode_responses, # 确保不自动解码响应
88
+ encoding='utf-8'
89
+ )
90
+
91
+ # 注意:这里不应该使用 await,因为 create_instance 不是异步方法
92
+ # 我们将在实际使用时获取连接
93
+ redis_client = None # 延迟初始化
94
+ except Exception as e:
95
+ raise RuntimeError(f"Redis连接池初始化失败: {redis_url} - {str(e)}")
96
+
97
+ # 使用统一的Redis key命名规范: crawlo:{project_name}:filter:fingerprint
98
+ project_name = crawler.settings.get('PROJECT_NAME', 'default')
99
+ redis_key = f"crawlo:{project_name}:filter:fingerprint"
100
+
101
+ instance = cls(
102
+ redis_key=redis_key,
103
+ client=redis_client,
104
+ stats=crawler.stats,
105
+ cleanup_fp=crawler.settings.get_bool('CLEANUP_FP', False),
106
+ ttl=ttl,
107
+ debug=crawler.settings.get_bool('FILTER_DEBUG', False),
108
+ log_level=crawler.settings.get('LOG_LEVEL', 'INFO')
109
+ )
110
+
111
+ # 保存连接池引用,以便在需要时获取连接
112
+ instance._redis_pool = redis_pool
113
+ return instance
114
+
115
+ async def _get_redis_client(self):
116
+ """获取Redis客户端实例(延迟初始化)"""
117
+ # 如果之前连接失败,直接返回None
118
+ if self._connection_failed:
119
+ return None
120
+
121
+ if self.redis is None and self._redis_pool is not None:
122
+ try:
123
+ self.redis = await self._redis_pool.get_connection()
124
+ except Exception as e:
125
+ self._connection_failed = True
126
+ self.logger.error(f"Redis连接失败,将使用本地去重: {e}")
127
+ return None
128
+ return self.redis
129
+
130
+ async def requested(self, request) -> bool:
131
+ """
132
+ 检查请求是否已存在(优化版本)
133
+
134
+ :param request: 请求对象
135
+ :return: True 表示重复,False 表示新请求
136
+ """
137
+ try:
138
+ # 确保Redis客户端已初始化
139
+ redis_client = await self._get_redis_client()
140
+
141
+ # 如果Redis不可用,返回False表示不重复(避免丢失请求)
142
+ if redis_client is None:
143
+ return False
144
+
145
+ fp = str(request_fingerprint(request))
146
+ self._redis_operations += 1
147
+
148
+ # 使用 pipeline 优化性能
149
+ pipe = redis_client.pipeline()
150
+ pipe.sismember(self.redis_key, fp)
151
+
152
+ results = await pipe.execute()
153
+ exists = results[0]
154
+
155
+ self._pipeline_operations += 1
156
+
157
+ if exists:
158
+ if self.debug:
159
+ self.logger.debug(f"发现重复请求: {fp[:20]}...")
160
+ return True
161
+
162
+ # 如果不存在,添加指纹并设置TTL
163
+ await self.add_fingerprint(fp)
164
+ return False
165
+
166
+ except Exception as e:
167
+ self.logger.error(f"请求检查失败: {getattr(request, 'url', '未知URL')} - {e}")
168
+ # 在网络异常时返回False,避免丢失请求
169
+ return False
170
+
171
+ async def add_fingerprint(self, fp: str) -> bool:
172
+ """
173
+ 添加新指纹到Redis集合(优化版本)
174
+
175
+ :param fp: 请求指纹字符串
176
+ :return: 是否成功添加(True 表示新添加,False 表示已存在)
177
+ """
178
+ try:
179
+ # 确保Redis客户端已初始化
180
+ redis_client = await self._get_redis_client()
181
+
182
+ # 如果Redis不可用,返回False表示添加失败
183
+ if redis_client is None:
184
+ return False
185
+
186
+ fp = str(fp)
187
+
188
+ # 使用 pipeline 优化性能
189
+ pipe = redis_client.pipeline()
190
+ pipe.sadd(self.redis_key, fp)
191
+
192
+ if self.ttl and self.ttl > 0:
193
+ pipe.expire(self.redis_key, self.ttl)
194
+
195
+ results = await pipe.execute()
196
+ added = results[0] == 1 # sadd 返回 1 表示新添加
197
+
198
+ self._pipeline_operations += 1
199
+
200
+ if self.debug and added:
201
+ self.logger.debug(f"添加新指纹: {fp[:20]}...")
202
+
203
+ return added
204
+
205
+ except Exception as e:
206
+ self.logger.error(f"添加指纹失败: {fp[:20]}... - {e}")
207
+ return False
208
+
209
+ def __contains__(self, fp: str) -> bool:
210
+ """
211
+ 检查指纹是否存在于Redis集合中(同步方法)
212
+
213
+ 注意:Python的魔术方法__contains__不能是异步的,
214
+ 所以这个方法提供同步接口,仅用于基本的存在性检查。
215
+ 对于需要异步检查的场景,请使用 contains_async() 方法。
216
+
217
+ :param fp: 请求指纹字符串
218
+ :return: 是否存在
219
+ """
220
+ # 由于__contains__不能是异步的,我们只能提供一个基本的同步检查
221
+ # 如果Redis客户端未初始化,返回False
222
+ if self.redis is None:
223
+ return False
224
+
225
+ # 对于同步场景,我们无法进行真正的Redis查询
226
+ # 所以返回False,避免阻塞调用
227
+ # 真正的异步检查应该使用 contains_async() 方法
228
+ return False
229
+
230
+ async def contains_async(self, fp: str) -> bool:
231
+ """
232
+ 异步检查指纹是否存在于Redis集合中
233
+
234
+ 这是真正的异步检查方法,应该优先使用这个方法而不是__contains__
235
+
236
+ :param fp: 请求指纹字符串
237
+ :return: 是否存在
238
+ """
239
+ try:
240
+ # 确保Redis客户端已初始化
241
+ redis_client = await self._get_redis_client()
242
+
243
+ # 如果Redis不可用,返回False表示不存在
244
+ if redis_client is None:
245
+ return False
246
+
247
+ # 检查指纹是否存在
248
+ exists = await redis_client.sismember(self.redis_key, str(fp))
249
+ return exists
250
+ except Exception as e:
251
+ self.logger.error(f"检查指纹存在性失败: {fp[:20]}... - {e}")
252
+ # 在网络异常时返回False,避免丢失请求
253
+ return False
254
+
255
+
256
+ # 为了兼容性,确保导出类
257
+ __all__ = ['AioRedisFilter']