crawlo 1.2.8__py3-none-any.whl → 1.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (221) hide show
  1. crawlo/__init__.py +63 -61
  2. crawlo/__version__.py +1 -1
  3. crawlo/cli.py +75 -75
  4. crawlo/commands/__init__.py +14 -14
  5. crawlo/commands/check.py +594 -594
  6. crawlo/commands/genspider.py +151 -151
  7. crawlo/commands/help.py +138 -138
  8. crawlo/commands/list.py +155 -155
  9. crawlo/commands/run.py +314 -323
  10. crawlo/commands/startproject.py +436 -436
  11. crawlo/commands/stats.py +187 -187
  12. crawlo/commands/utils.py +186 -186
  13. crawlo/config.py +312 -312
  14. crawlo/config_validator.py +277 -251
  15. crawlo/core/__init__.py +2 -2
  16. crawlo/core/engine.py +365 -365
  17. crawlo/core/processor.py +40 -40
  18. crawlo/core/scheduler.py +256 -251
  19. crawlo/crawler.py +1097 -1099
  20. crawlo/data/__init__.py +5 -5
  21. crawlo/data/user_agents.py +194 -107
  22. crawlo/downloader/__init__.py +273 -266
  23. crawlo/downloader/aiohttp_downloader.py +226 -228
  24. crawlo/downloader/cffi_downloader.py +245 -256
  25. crawlo/downloader/httpx_downloader.py +259 -259
  26. crawlo/downloader/hybrid_downloader.py +212 -212
  27. crawlo/downloader/playwright_downloader.py +402 -402
  28. crawlo/downloader/selenium_downloader.py +472 -472
  29. crawlo/event.py +11 -11
  30. crawlo/exceptions.py +81 -81
  31. crawlo/extension/__init__.py +39 -39
  32. crawlo/extension/health_check.py +141 -141
  33. crawlo/extension/log_interval.py +57 -57
  34. crawlo/extension/log_stats.py +81 -81
  35. crawlo/extension/logging_extension.py +45 -43
  36. crawlo/extension/memory_monitor.py +104 -104
  37. crawlo/extension/performance_profiler.py +133 -133
  38. crawlo/extension/request_recorder.py +107 -107
  39. crawlo/filters/__init__.py +154 -154
  40. crawlo/filters/aioredis_filter.py +234 -234
  41. crawlo/filters/memory_filter.py +269 -269
  42. crawlo/items/__init__.py +23 -23
  43. crawlo/items/base.py +21 -21
  44. crawlo/items/fields.py +52 -52
  45. crawlo/items/items.py +104 -104
  46. crawlo/middleware/__init__.py +21 -21
  47. crawlo/middleware/default_header.py +132 -132
  48. crawlo/middleware/download_delay.py +104 -104
  49. crawlo/middleware/middleware_manager.py +136 -136
  50. crawlo/middleware/offsite.py +114 -114
  51. crawlo/middleware/proxy.py +386 -368
  52. crawlo/middleware/request_ignore.py +86 -86
  53. crawlo/middleware/response_code.py +163 -163
  54. crawlo/middleware/response_filter.py +136 -136
  55. crawlo/middleware/retry.py +124 -124
  56. crawlo/middleware/simple_proxy.py +65 -0
  57. crawlo/mode_manager.py +212 -211
  58. crawlo/network/__init__.py +21 -21
  59. crawlo/network/request.py +379 -338
  60. crawlo/network/response.py +359 -359
  61. crawlo/pipelines/__init__.py +21 -21
  62. crawlo/pipelines/bloom_dedup_pipeline.py +157 -157
  63. crawlo/pipelines/console_pipeline.py +39 -39
  64. crawlo/pipelines/csv_pipeline.py +316 -316
  65. crawlo/pipelines/database_dedup_pipeline.py +223 -223
  66. crawlo/pipelines/json_pipeline.py +218 -218
  67. crawlo/pipelines/memory_dedup_pipeline.py +115 -115
  68. crawlo/pipelines/mongo_pipeline.py +131 -131
  69. crawlo/pipelines/mysql_pipeline.py +317 -317
  70. crawlo/pipelines/pipeline_manager.py +74 -62
  71. crawlo/pipelines/redis_dedup_pipeline.py +167 -167
  72. crawlo/project.py +284 -315
  73. crawlo/queue/pqueue.py +37 -37
  74. crawlo/queue/queue_manager.py +379 -378
  75. crawlo/queue/redis_priority_queue.py +306 -306
  76. crawlo/settings/__init__.py +7 -7
  77. crawlo/settings/default_settings.py +216 -220
  78. crawlo/settings/setting_manager.py +175 -122
  79. crawlo/spider/__init__.py +639 -639
  80. crawlo/stats_collector.py +59 -59
  81. crawlo/subscriber.py +129 -129
  82. crawlo/task_manager.py +30 -30
  83. crawlo/templates/crawlo.cfg.tmpl +10 -10
  84. crawlo/templates/project/__init__.py.tmpl +3 -3
  85. crawlo/templates/project/items.py.tmpl +17 -17
  86. crawlo/templates/project/middlewares.py.tmpl +118 -118
  87. crawlo/templates/project/pipelines.py.tmpl +96 -96
  88. crawlo/templates/project/settings.py.tmpl +261 -288
  89. crawlo/templates/project/settings_distributed.py.tmpl +174 -157
  90. crawlo/templates/project/settings_gentle.py.tmpl +95 -100
  91. crawlo/templates/project/settings_high_performance.py.tmpl +125 -134
  92. crawlo/templates/project/settings_minimal.py.tmpl +30 -0
  93. crawlo/templates/project/settings_simple.py.tmpl +96 -98
  94. crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
  95. crawlo/templates/run.py.tmpl +47 -47
  96. crawlo/templates/spider/spider.py.tmpl +143 -143
  97. crawlo/tools/__init__.py +200 -182
  98. crawlo/tools/anti_crawler.py +268 -268
  99. crawlo/tools/authenticated_proxy.py +240 -240
  100. crawlo/{cleaners → tools}/data_formatter.py +225 -225
  101. crawlo/tools/data_validator.py +180 -180
  102. crawlo/tools/date_tools.py +290 -36
  103. crawlo/tools/distributed_coordinator.py +388 -387
  104. crawlo/{cleaners → tools}/encoding_converter.py +127 -126
  105. crawlo/tools/request_tools.py +83 -0
  106. crawlo/tools/retry_mechanism.py +224 -221
  107. crawlo/tools/scenario_adapter.py +262 -262
  108. crawlo/{cleaners → tools}/text_cleaner.py +232 -232
  109. crawlo/utils/__init__.py +35 -35
  110. crawlo/utils/batch_processor.py +259 -259
  111. crawlo/utils/controlled_spider_mixin.py +439 -439
  112. crawlo/utils/db_helper.py +343 -343
  113. crawlo/utils/enhanced_error_handler.py +356 -356
  114. crawlo/utils/env_config.py +142 -142
  115. crawlo/utils/error_handler.py +123 -123
  116. crawlo/utils/func_tools.py +82 -82
  117. crawlo/utils/large_scale_config.py +286 -286
  118. crawlo/utils/large_scale_helper.py +344 -344
  119. crawlo/utils/log.py +146 -128
  120. crawlo/utils/performance_monitor.py +285 -285
  121. crawlo/utils/queue_helper.py +175 -175
  122. crawlo/utils/redis_connection_pool.py +351 -351
  123. crawlo/utils/redis_key_validator.py +198 -198
  124. crawlo/utils/request.py +267 -267
  125. crawlo/utils/request_serializer.py +218 -218
  126. crawlo/utils/spider_loader.py +61 -61
  127. crawlo/utils/system.py +11 -11
  128. crawlo/utils/tools.py +4 -4
  129. crawlo/utils/url.py +39 -39
  130. {crawlo-1.2.8.dist-info → crawlo-1.3.0.dist-info}/METADATA +1011 -764
  131. crawlo-1.3.0.dist-info/RECORD +219 -0
  132. examples/__init__.py +7 -7
  133. tests/DOUBLE_CRAWLO_PREFIX_FIX_REPORT.md +81 -81
  134. tests/__init__.py +7 -7
  135. tests/advanced_tools_example.py +275 -275
  136. tests/authenticated_proxy_example.py +107 -237
  137. tests/cleaners_example.py +160 -160
  138. tests/config_validation_demo.py +143 -103
  139. tests/controlled_spider_example.py +205 -205
  140. tests/date_tools_example.py +180 -180
  141. tests/debug_pipelines.py +67 -0
  142. tests/dynamic_loading_example.py +523 -523
  143. tests/dynamic_loading_test.py +104 -104
  144. tests/env_config_example.py +133 -133
  145. tests/error_handling_example.py +171 -171
  146. tests/redis_key_validation_demo.py +130 -130
  147. tests/request_params_example.py +151 -0
  148. tests/response_improvements_example.py +144 -144
  149. tests/test_advanced_tools.py +148 -148
  150. tests/test_all_redis_key_configs.py +145 -145
  151. tests/test_authenticated_proxy.py +141 -141
  152. tests/test_cleaners.py +54 -54
  153. tests/test_comprehensive.py +146 -146
  154. tests/test_config_consistency.py +80 -80
  155. tests/test_config_merge.py +153 -0
  156. tests/test_config_validator.py +182 -193
  157. tests/test_crawlo_proxy_integration.py +109 -173
  158. tests/test_date_tools.py +123 -123
  159. tests/test_default_header_middleware.py +158 -158
  160. tests/test_distributed.py +65 -0
  161. tests/test_double_crawlo_fix.py +207 -207
  162. tests/test_double_crawlo_fix_simple.py +124 -124
  163. tests/test_download_delay_middleware.py +221 -221
  164. tests/test_downloader_proxy_compatibility.py +268 -268
  165. tests/test_dynamic_downloaders_proxy.py +124 -124
  166. tests/test_dynamic_proxy.py +92 -92
  167. tests/test_dynamic_proxy_config.py +146 -146
  168. tests/test_dynamic_proxy_real.py +109 -109
  169. tests/test_edge_cases.py +303 -303
  170. tests/test_enhanced_error_handler.py +270 -270
  171. tests/test_env_config.py +121 -121
  172. tests/test_error_handler_compatibility.py +112 -112
  173. tests/test_final_validation.py +153 -153
  174. tests/test_framework_env_usage.py +103 -103
  175. tests/test_integration.py +169 -357
  176. tests/test_item_dedup_redis_key.py +122 -122
  177. tests/test_mode_consistency.py +51 -51
  178. tests/test_offsite_middleware.py +221 -221
  179. tests/test_parsel.py +29 -29
  180. tests/test_performance.py +327 -327
  181. tests/test_proxy_api.py +264 -264
  182. tests/test_proxy_health_check.py +32 -32
  183. tests/test_proxy_middleware.py +121 -121
  184. tests/test_proxy_middleware_enhanced.py +216 -216
  185. tests/test_proxy_middleware_integration.py +136 -136
  186. tests/test_proxy_middleware_refactored.py +185 -0
  187. tests/test_proxy_providers.py +56 -56
  188. tests/test_proxy_stats.py +19 -19
  189. tests/test_proxy_strategies.py +59 -59
  190. tests/test_queue_manager_double_crawlo.py +173 -173
  191. tests/test_queue_manager_redis_key.py +176 -176
  192. tests/test_random_user_agent.py +73 -0
  193. tests/test_real_scenario_proxy.py +195 -195
  194. tests/test_redis_config.py +28 -28
  195. tests/test_redis_connection_pool.py +294 -294
  196. tests/test_redis_key_naming.py +181 -181
  197. tests/test_redis_key_validator.py +123 -123
  198. tests/test_redis_queue.py +224 -224
  199. tests/test_request_ignore_middleware.py +182 -182
  200. tests/test_request_params.py +112 -0
  201. tests/test_request_serialization.py +70 -70
  202. tests/test_response_code_middleware.py +349 -349
  203. tests/test_response_filter_middleware.py +427 -427
  204. tests/test_response_improvements.py +152 -152
  205. tests/test_retry_middleware.py +241 -241
  206. tests/test_scheduler.py +252 -252
  207. tests/test_scheduler_config_update.py +133 -133
  208. tests/test_simple_response.py +61 -61
  209. tests/test_telecom_spider_redis_key.py +205 -205
  210. tests/test_template_content.py +87 -87
  211. tests/test_template_redis_key.py +134 -134
  212. tests/test_tools.py +159 -153
  213. tests/test_user_agents.py +97 -0
  214. tests/tools_example.py +260 -257
  215. tests/verify_distributed.py +117 -0
  216. crawlo/cleaners/__init__.py +0 -61
  217. crawlo/utils/date_tools.py +0 -290
  218. crawlo-1.2.8.dist-info/RECORD +0 -209
  219. {crawlo-1.2.8.dist-info → crawlo-1.3.0.dist-info}/WHEEL +0 -0
  220. {crawlo-1.2.8.dist-info → crawlo-1.3.0.dist-info}/entry_points.txt +0 -0
  221. {crawlo-1.2.8.dist-info → crawlo-1.3.0.dist-info}/top_level.txt +0 -0
@@ -1,234 +1,234 @@
1
- from typing import Optional
2
- import redis.asyncio as aioredis
3
-
4
- from crawlo.filters import BaseFilter
5
- from crawlo.utils.log import get_logger
6
- from crawlo.utils.request import request_fingerprint
7
- from crawlo.utils.redis_connection_pool import get_redis_pool
8
-
9
-
10
- class AioRedisFilter(BaseFilter):
11
- """
12
- 基于Redis集合实现的异步请求去重过滤器
13
-
14
- 支持特性:
15
- - 分布式爬虫多节点共享去重数据
16
- - TTL 自动过期清理机制
17
- - Pipeline 批量操作优化性能
18
- - 容错设计和连接池管理
19
-
20
- 适用场景:
21
- - 分布式爬虫系统
22
- - 大规模数据处理
23
- - 需要持久化去重的场景
24
- """
25
-
26
- def __init__(
27
- self,
28
- redis_key: str,
29
- client: aioredis.Redis,
30
- stats: dict,
31
- debug: bool = False,
32
- log_level: str = 'INFO',
33
- cleanup_fp: bool = False,
34
- ttl: Optional[int] = None
35
- ):
36
- """
37
- 初始化Redis过滤器
38
-
39
- :param redis_key: Redis中存储指纹的键名
40
- :param client: Redis客户端实例(可以为None,稍后初始化)
41
- :param stats: 统计信息存储
42
- :param debug: 是否启用调试模式
43
- :param log_level: 日志级别
44
- :param cleanup_fp: 关闭时是否清理指纹
45
- :param ttl: 指纹过期时间(秒)
46
- """
47
- self.logger = get_logger(self.__class__.__name__, log_level)
48
- super().__init__(self.logger, stats, debug)
49
-
50
- self.redis_key = redis_key
51
- self.redis = client
52
- self.cleanup_fp = cleanup_fp
53
- self.ttl = ttl
54
-
55
- # 保存连接池引用(用于延迟初始化)
56
- self._redis_pool = None
57
-
58
- # 性能计数器
59
- self._redis_operations = 0
60
- self._pipeline_operations = 0
61
-
62
- # 连接状态标记,避免重复尝试连接失败的Redis
63
- self._connection_failed = False
64
-
65
- @classmethod
66
- def create_instance(cls, crawler) -> 'BaseFilter':
67
- """从爬虫配置创建过滤器实例"""
68
- redis_url = crawler.settings.get('REDIS_URL', 'redis://localhost:6379')
69
- # 确保 decode_responses=False 以避免编码问题
70
- decode_responses = False # crawler.settings.get_bool('DECODE_RESPONSES', False)
71
- ttl_setting = crawler.settings.get_int('REDIS_TTL')
72
-
73
- # 处理TTL设置
74
- ttl = None
75
- if ttl_setting is not None:
76
- ttl = max(0, int(ttl_setting)) if ttl_setting > 0 else None
77
-
78
- try:
79
- # 使用优化的连接池,确保 decode_responses=False
80
- redis_pool = get_redis_pool(
81
- redis_url,
82
- max_connections=20,
83
- socket_connect_timeout=5,
84
- socket_timeout=30,
85
- health_check_interval=30,
86
- retry_on_timeout=True,
87
- decode_responses=decode_responses, # 确保不自动解码响应
88
- encoding='utf-8'
89
- )
90
-
91
- # 注意:这里不应该使用 await,因为 create_instance 不是异步方法
92
- # 我们将在实际使用时获取连接
93
- redis_client = None # 延迟初始化
94
- except Exception as e:
95
- raise RuntimeError(f"Redis连接池初始化失败: {redis_url} - {str(e)}")
96
-
97
- # 使用统一的Redis key命名规范: crawlo:{project_name}:filter:fingerprint
98
- project_name = crawler.settings.get('PROJECT_NAME', 'default')
99
- redis_key = f"crawlo:{project_name}:filter:fingerprint"
100
-
101
- instance = cls(
102
- redis_key=redis_key,
103
- client=redis_client,
104
- stats=crawler.stats,
105
- cleanup_fp=crawler.settings.get_bool('CLEANUP_FP', False),
106
- ttl=ttl,
107
- debug=crawler.settings.get_bool('FILTER_DEBUG', False),
108
- log_level=crawler.settings.get('LOG_LEVEL', 'INFO')
109
- )
110
-
111
- # 保存连接池引用,以便在需要时获取连接
112
- instance._redis_pool = redis_pool
113
- return instance
114
-
115
- async def _get_redis_client(self):
116
- """获取Redis客户端实例(延迟初始化)"""
117
- # 如果之前连接失败,直接返回None
118
- if self._connection_failed:
119
- return None
120
-
121
- if self.redis is None and self._redis_pool is not None:
122
- try:
123
- self.redis = await self._redis_pool.get_connection()
124
- except Exception as e:
125
- self._connection_failed = True
126
- self.logger.error(f"Redis连接失败,将使用本地去重: {e}")
127
- return None
128
- return self.redis
129
-
130
- async def requested(self, request) -> bool:
131
- """
132
- 检查请求是否已存在(优化版本)
133
-
134
- :param request: 请求对象
135
- :return: True 表示重复,False 表示新请求
136
- """
137
- try:
138
- # 确保Redis客户端已初始化
139
- redis_client = await self._get_redis_client()
140
-
141
- # 如果Redis不可用,返回False表示不重复(避免丢失请求)
142
- if redis_client is None:
143
- return False
144
-
145
- fp = str(request_fingerprint(request))
146
- self._redis_operations += 1
147
-
148
- # 使用 pipeline 优化性能
149
- pipe = redis_client.pipeline()
150
- pipe.sismember(self.redis_key, fp)
151
-
152
- results = await pipe.execute()
153
- exists = results[0]
154
-
155
- self._pipeline_operations += 1
156
-
157
- if exists:
158
- if self.debug:
159
- self.logger.debug(f"发现重复请求: {fp[:20]}...")
160
- return True
161
-
162
- # 如果不存在,添加指纹并设置TTL
163
- await self.add_fingerprint(fp)
164
- return False
165
-
166
- except Exception as e:
167
- self.logger.error(f"请求检查失败: {getattr(request, 'url', '未知URL')} - {e}")
168
- # 在网络异常时返回False,避免丢失请求
169
- return False
170
-
171
- async def add_fingerprint(self, fp: str) -> bool:
172
- """
173
- 添加新指纹到Redis集合(优化版本)
174
-
175
- :param fp: 请求指纹字符串
176
- :return: 是否成功添加(True 表示新添加,False 表示已存在)
177
- """
178
- try:
179
- # 确保Redis客户端已初始化
180
- redis_client = await self._get_redis_client()
181
-
182
- # 如果Redis不可用,返回False表示添加失败
183
- if redis_client is None:
184
- return False
185
-
186
- fp = str(fp)
187
-
188
- # 使用 pipeline 优化性能
189
- pipe = redis_client.pipeline()
190
- pipe.sadd(self.redis_key, fp)
191
-
192
- if self.ttl and self.ttl > 0:
193
- pipe.expire(self.redis_key, self.ttl)
194
-
195
- results = await pipe.execute()
196
- added = results[0] == 1 # sadd 返回 1 表示新添加
197
-
198
- self._pipeline_operations += 1
199
-
200
- if self.debug and added:
201
- self.logger.debug(f"添加新指纹: {fp[:20]}...")
202
-
203
- return added
204
-
205
- except Exception as e:
206
- self.logger.error(f"添加指纹失败: {fp[:20]}... - {e}")
207
- return False
208
-
209
- async def __contains__(self, fp: str) -> bool:
210
- """
211
- 检查指纹是否存在于Redis集合中
212
-
213
- :param fp: 请求指纹字符串
214
- :return: 是否存在
215
- """
216
- try:
217
- # 确保Redis客户端已初始化
218
- redis_client = await self._get_redis_client()
219
-
220
- # 如果Redis不可用,返回False表示不存在
221
- if redis_client is None:
222
- return False
223
-
224
- # 检查指纹是否存在
225
- exists = await redis_client.sismember(self.redis_key, str(fp))
226
- return exists
227
- except Exception as e:
228
- self.logger.error(f"检查指纹存在性失败: {fp[:20]}... - {e}")
229
- # 在网络异常时返回False,避免丢失请求
230
- return False
231
-
232
-
233
- # 为了兼容性,确保导出类
234
- __all__ = ['AioRedisFilter']
1
+ from typing import Optional
2
+ import redis.asyncio as aioredis
3
+
4
+ from crawlo.filters import BaseFilter
5
+ from crawlo.utils.log import get_logger
6
+ from crawlo.utils.request import request_fingerprint
7
+ from crawlo.utils.redis_connection_pool import get_redis_pool
8
+
9
+
10
+ class AioRedisFilter(BaseFilter):
11
+ """
12
+ 基于Redis集合实现的异步请求去重过滤器
13
+
14
+ 支持特性:
15
+ - 分布式爬虫多节点共享去重数据
16
+ - TTL 自动过期清理机制
17
+ - Pipeline 批量操作优化性能
18
+ - 容错设计和连接池管理
19
+
20
+ 适用场景:
21
+ - 分布式爬虫系统
22
+ - 大规模数据处理
23
+ - 需要持久化去重的场景
24
+ """
25
+
26
+ def __init__(
27
+ self,
28
+ redis_key: str,
29
+ client: aioredis.Redis,
30
+ stats: dict,
31
+ debug: bool = False,
32
+ log_level: str = 'INFO',
33
+ cleanup_fp: bool = False,
34
+ ttl: Optional[int] = None
35
+ ):
36
+ """
37
+ 初始化Redis过滤器
38
+
39
+ :param redis_key: Redis中存储指纹的键名
40
+ :param client: Redis客户端实例(可以为None,稍后初始化)
41
+ :param stats: 统计信息存储
42
+ :param debug: 是否启用调试模式
43
+ :param log_level: 日志级别
44
+ :param cleanup_fp: 关闭时是否清理指纹
45
+ :param ttl: 指纹过期时间(秒)
46
+ """
47
+ self.logger = get_logger(self.__class__.__name__, log_level)
48
+ super().__init__(self.logger, stats, debug)
49
+
50
+ self.redis_key = redis_key
51
+ self.redis = client
52
+ self.cleanup_fp = cleanup_fp
53
+ self.ttl = ttl
54
+
55
+ # 保存连接池引用(用于延迟初始化)
56
+ self._redis_pool = None
57
+
58
+ # 性能计数器
59
+ self._redis_operations = 0
60
+ self._pipeline_operations = 0
61
+
62
+ # 连接状态标记,避免重复尝试连接失败的Redis
63
+ self._connection_failed = False
64
+
65
+ @classmethod
66
+ def create_instance(cls, crawler) -> 'BaseFilter':
67
+ """从爬虫配置创建过滤器实例"""
68
+ redis_url = crawler.settings.get('REDIS_URL', 'redis://localhost:6379')
69
+ # 确保 decode_responses=False 以避免编码问题
70
+ decode_responses = False # crawler.settings.get_bool('DECODE_RESPONSES', False)
71
+ ttl_setting = crawler.settings.get_int('REDIS_TTL')
72
+
73
+ # 处理TTL设置
74
+ ttl = None
75
+ if ttl_setting is not None:
76
+ ttl = max(0, int(ttl_setting)) if ttl_setting > 0 else None
77
+
78
+ try:
79
+ # 使用优化的连接池,确保 decode_responses=False
80
+ redis_pool = get_redis_pool(
81
+ redis_url,
82
+ max_connections=20,
83
+ socket_connect_timeout=5,
84
+ socket_timeout=30,
85
+ health_check_interval=30,
86
+ retry_on_timeout=True,
87
+ decode_responses=decode_responses, # 确保不自动解码响应
88
+ encoding='utf-8'
89
+ )
90
+
91
+ # 注意:这里不应该使用 await,因为 create_instance 不是异步方法
92
+ # 我们将在实际使用时获取连接
93
+ redis_client = None # 延迟初始化
94
+ except Exception as e:
95
+ raise RuntimeError(f"Redis连接池初始化失败: {redis_url} - {str(e)}")
96
+
97
+ # 使用统一的Redis key命名规范: crawlo:{project_name}:filter:fingerprint
98
+ project_name = crawler.settings.get('PROJECT_NAME', 'default')
99
+ redis_key = f"crawlo:{project_name}:filter:fingerprint"
100
+
101
+ instance = cls(
102
+ redis_key=redis_key,
103
+ client=redis_client,
104
+ stats=crawler.stats,
105
+ cleanup_fp=crawler.settings.get_bool('CLEANUP_FP', False),
106
+ ttl=ttl,
107
+ debug=crawler.settings.get_bool('FILTER_DEBUG', False),
108
+ log_level=crawler.settings.get('LOG_LEVEL', 'INFO')
109
+ )
110
+
111
+ # 保存连接池引用,以便在需要时获取连接
112
+ instance._redis_pool = redis_pool
113
+ return instance
114
+
115
+ async def _get_redis_client(self):
116
+ """获取Redis客户端实例(延迟初始化)"""
117
+ # 如果之前连接失败,直接返回None
118
+ if self._connection_failed:
119
+ return None
120
+
121
+ if self.redis is None and self._redis_pool is not None:
122
+ try:
123
+ self.redis = await self._redis_pool.get_connection()
124
+ except Exception as e:
125
+ self._connection_failed = True
126
+ self.logger.error(f"Redis连接失败,将使用本地去重: {e}")
127
+ return None
128
+ return self.redis
129
+
130
+ async def requested(self, request) -> bool:
131
+ """
132
+ 检查请求是否已存在(优化版本)
133
+
134
+ :param request: 请求对象
135
+ :return: True 表示重复,False 表示新请求
136
+ """
137
+ try:
138
+ # 确保Redis客户端已初始化
139
+ redis_client = await self._get_redis_client()
140
+
141
+ # 如果Redis不可用,返回False表示不重复(避免丢失请求)
142
+ if redis_client is None:
143
+ return False
144
+
145
+ fp = str(request_fingerprint(request))
146
+ self._redis_operations += 1
147
+
148
+ # 使用 pipeline 优化性能
149
+ pipe = redis_client.pipeline()
150
+ pipe.sismember(self.redis_key, fp)
151
+
152
+ results = await pipe.execute()
153
+ exists = results[0]
154
+
155
+ self._pipeline_operations += 1
156
+
157
+ if exists:
158
+ if self.debug:
159
+ self.logger.debug(f"发现重复请求: {fp[:20]}...")
160
+ return True
161
+
162
+ # 如果不存在,添加指纹并设置TTL
163
+ await self.add_fingerprint(fp)
164
+ return False
165
+
166
+ except Exception as e:
167
+ self.logger.error(f"请求检查失败: {getattr(request, 'url', '未知URL')} - {e}")
168
+ # 在网络异常时返回False,避免丢失请求
169
+ return False
170
+
171
+ async def add_fingerprint(self, fp: str) -> bool:
172
+ """
173
+ 添加新指纹到Redis集合(优化版本)
174
+
175
+ :param fp: 请求指纹字符串
176
+ :return: 是否成功添加(True 表示新添加,False 表示已存在)
177
+ """
178
+ try:
179
+ # 确保Redis客户端已初始化
180
+ redis_client = await self._get_redis_client()
181
+
182
+ # 如果Redis不可用,返回False表示添加失败
183
+ if redis_client is None:
184
+ return False
185
+
186
+ fp = str(fp)
187
+
188
+ # 使用 pipeline 优化性能
189
+ pipe = redis_client.pipeline()
190
+ pipe.sadd(self.redis_key, fp)
191
+
192
+ if self.ttl and self.ttl > 0:
193
+ pipe.expire(self.redis_key, self.ttl)
194
+
195
+ results = await pipe.execute()
196
+ added = results[0] == 1 # sadd 返回 1 表示新添加
197
+
198
+ self._pipeline_operations += 1
199
+
200
+ if self.debug and added:
201
+ self.logger.debug(f"添加新指纹: {fp[:20]}...")
202
+
203
+ return added
204
+
205
+ except Exception as e:
206
+ self.logger.error(f"添加指纹失败: {fp[:20]}... - {e}")
207
+ return False
208
+
209
+ async def __contains__(self, fp: str) -> bool:
210
+ """
211
+ 检查指纹是否存在于Redis集合中
212
+
213
+ :param fp: 请求指纹字符串
214
+ :return: 是否存在
215
+ """
216
+ try:
217
+ # 确保Redis客户端已初始化
218
+ redis_client = await self._get_redis_client()
219
+
220
+ # 如果Redis不可用,返回False表示不存在
221
+ if redis_client is None:
222
+ return False
223
+
224
+ # 检查指纹是否存在
225
+ exists = await redis_client.sismember(self.redis_key, str(fp))
226
+ return exists
227
+ except Exception as e:
228
+ self.logger.error(f"检查指纹存在性失败: {fp[:20]}... - {e}")
229
+ # 在网络异常时返回False,避免丢失请求
230
+ return False
231
+
232
+
233
+ # 为了兼容性,确保导出类
234
+ __all__ = ['AioRedisFilter']