crawlo 1.4.2__py3-none-any.whl → 1.4.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +93 -93
- crawlo/__version__.py +1 -1
- crawlo/cli.py +75 -75
- crawlo/commands/__init__.py +14 -14
- crawlo/commands/check.py +594 -594
- crawlo/commands/genspider.py +151 -151
- crawlo/commands/help.py +138 -138
- crawlo/commands/list.py +155 -155
- crawlo/commands/run.py +341 -341
- crawlo/commands/startproject.py +436 -436
- crawlo/commands/stats.py +187 -187
- crawlo/commands/utils.py +196 -196
- crawlo/config.py +312 -312
- crawlo/config_validator.py +277 -277
- crawlo/core/__init__.py +52 -52
- crawlo/core/engine.py +438 -439
- crawlo/core/processor.py +47 -47
- crawlo/core/scheduler.py +291 -257
- crawlo/crawler.py +650 -650
- crawlo/data/__init__.py +5 -5
- crawlo/data/user_agents.py +194 -194
- crawlo/downloader/__init__.py +273 -273
- crawlo/downloader/aiohttp_downloader.py +233 -233
- crawlo/downloader/cffi_downloader.py +245 -245
- crawlo/downloader/httpx_downloader.py +259 -259
- crawlo/downloader/hybrid_downloader.py +212 -212
- crawlo/downloader/playwright_downloader.py +402 -402
- crawlo/downloader/selenium_downloader.py +472 -472
- crawlo/event.py +11 -11
- crawlo/exceptions.py +81 -81
- crawlo/extension/__init__.py +63 -63
- crawlo/extension/health_check.py +141 -141
- crawlo/extension/log_interval.py +94 -94
- crawlo/extension/log_stats.py +70 -70
- crawlo/extension/logging_extension.py +61 -61
- crawlo/extension/memory_monitor.py +104 -104
- crawlo/extension/performance_profiler.py +133 -133
- crawlo/extension/request_recorder.py +107 -107
- crawlo/factories/__init__.py +27 -27
- crawlo/factories/base.py +68 -68
- crawlo/factories/crawler.py +103 -103
- crawlo/factories/registry.py +84 -84
- crawlo/filters/__init__.py +154 -154
- crawlo/filters/aioredis_filter.py +257 -257
- crawlo/filters/memory_filter.py +269 -269
- crawlo/framework.py +292 -292
- crawlo/initialization/__init__.py +44 -44
- crawlo/initialization/built_in.py +425 -425
- crawlo/initialization/context.py +141 -141
- crawlo/initialization/core.py +193 -193
- crawlo/initialization/phases.py +148 -148
- crawlo/initialization/registry.py +145 -145
- crawlo/items/__init__.py +23 -23
- crawlo/items/base.py +23 -23
- crawlo/items/fields.py +52 -52
- crawlo/items/items.py +104 -104
- crawlo/logging/__init__.py +45 -37
- crawlo/logging/async_handler.py +181 -0
- crawlo/logging/config.py +196 -96
- crawlo/logging/factory.py +171 -128
- crawlo/logging/manager.py +111 -111
- crawlo/logging/monitor.py +153 -0
- crawlo/logging/sampler.py +167 -0
- crawlo/middleware/__init__.py +21 -21
- crawlo/middleware/default_header.py +132 -132
- crawlo/middleware/download_delay.py +104 -104
- crawlo/middleware/middleware_manager.py +135 -135
- crawlo/middleware/offsite.py +123 -123
- crawlo/middleware/proxy.py +386 -386
- crawlo/middleware/request_ignore.py +86 -86
- crawlo/middleware/response_code.py +150 -150
- crawlo/middleware/response_filter.py +136 -136
- crawlo/middleware/retry.py +124 -124
- crawlo/middleware/simple_proxy.py +65 -65
- crawlo/mode_manager.py +219 -219
- crawlo/network/__init__.py +21 -21
- crawlo/network/request.py +379 -379
- crawlo/network/response.py +359 -359
- crawlo/pipelines/__init__.py +21 -21
- crawlo/pipelines/bloom_dedup_pipeline.py +146 -146
- crawlo/pipelines/console_pipeline.py +39 -39
- crawlo/pipelines/csv_pipeline.py +316 -316
- crawlo/pipelines/database_dedup_pipeline.py +197 -197
- crawlo/pipelines/json_pipeline.py +218 -218
- crawlo/pipelines/memory_dedup_pipeline.py +105 -105
- crawlo/pipelines/mongo_pipeline.py +131 -131
- crawlo/pipelines/mysql_pipeline.py +325 -325
- crawlo/pipelines/pipeline_manager.py +100 -84
- crawlo/pipelines/redis_dedup_pipeline.py +156 -156
- crawlo/project.py +349 -338
- crawlo/queue/pqueue.py +42 -42
- crawlo/queue/queue_manager.py +526 -522
- crawlo/queue/redis_priority_queue.py +370 -367
- crawlo/settings/__init__.py +7 -7
- crawlo/settings/default_settings.py +284 -284
- crawlo/settings/setting_manager.py +219 -219
- crawlo/spider/__init__.py +657 -657
- crawlo/stats_collector.py +73 -73
- crawlo/subscriber.py +129 -129
- crawlo/task_manager.py +138 -138
- crawlo/templates/crawlo.cfg.tmpl +10 -10
- crawlo/templates/project/__init__.py.tmpl +3 -3
- crawlo/templates/project/items.py.tmpl +17 -17
- crawlo/templates/project/middlewares.py.tmpl +118 -118
- crawlo/templates/project/pipelines.py.tmpl +96 -96
- crawlo/templates/project/settings.py.tmpl +170 -170
- crawlo/templates/project/settings_distributed.py.tmpl +169 -169
- crawlo/templates/project/settings_gentle.py.tmpl +166 -166
- crawlo/templates/project/settings_high_performance.py.tmpl +167 -167
- crawlo/templates/project/settings_minimal.py.tmpl +65 -65
- crawlo/templates/project/settings_simple.py.tmpl +164 -164
- crawlo/templates/project/spiders/__init__.py.tmpl +9 -9
- crawlo/templates/run.py.tmpl +34 -34
- crawlo/templates/spider/spider.py.tmpl +143 -143
- crawlo/templates/spiders_init.py.tmpl +9 -9
- crawlo/tools/__init__.py +200 -200
- crawlo/tools/anti_crawler.py +268 -268
- crawlo/tools/authenticated_proxy.py +240 -240
- crawlo/tools/data_formatter.py +225 -225
- crawlo/tools/data_validator.py +180 -180
- crawlo/tools/date_tools.py +289 -289
- crawlo/tools/distributed_coordinator.py +384 -384
- crawlo/tools/encoding_converter.py +127 -127
- crawlo/tools/network_diagnostic.py +364 -364
- crawlo/tools/request_tools.py +82 -82
- crawlo/tools/retry_mechanism.py +224 -224
- crawlo/tools/scenario_adapter.py +262 -262
- crawlo/tools/text_cleaner.py +232 -232
- crawlo/utils/__init__.py +34 -34
- crawlo/utils/batch_processor.py +259 -259
- crawlo/utils/class_loader.py +25 -25
- crawlo/utils/controlled_spider_mixin.py +439 -439
- crawlo/utils/db_helper.py +343 -343
- crawlo/utils/enhanced_error_handler.py +356 -356
- crawlo/utils/env_config.py +142 -142
- crawlo/utils/error_handler.py +165 -165
- crawlo/utils/fingerprint.py +122 -122
- crawlo/utils/func_tools.py +82 -82
- crawlo/utils/large_scale_config.py +286 -286
- crawlo/utils/large_scale_helper.py +344 -344
- crawlo/utils/log.py +79 -79
- crawlo/utils/performance_monitor.py +285 -285
- crawlo/utils/queue_helper.py +175 -175
- crawlo/utils/redis_connection_pool.py +388 -388
- crawlo/utils/redis_key_validator.py +198 -198
- crawlo/utils/request.py +267 -267
- crawlo/utils/request_serializer.py +225 -225
- crawlo/utils/spider_loader.py +61 -61
- crawlo/utils/system.py +11 -11
- crawlo/utils/tools.py +4 -4
- crawlo/utils/url.py +39 -39
- crawlo-1.4.3.dist-info/METADATA +190 -0
- crawlo-1.4.3.dist-info/RECORD +326 -0
- examples/__init__.py +7 -7
- examples/test_project/__init__.py +7 -7
- examples/test_project/run.py +34 -34
- examples/test_project/test_project/__init__.py +3 -3
- examples/test_project/test_project/items.py +17 -17
- examples/test_project/test_project/middlewares.py +118 -118
- examples/test_project/test_project/pipelines.py +96 -96
- examples/test_project/test_project/settings.py +169 -169
- examples/test_project/test_project/spiders/__init__.py +9 -9
- examples/test_project/test_project/spiders/of_week_dis.py +143 -143
- tests/__init__.py +7 -7
- tests/advanced_tools_example.py +275 -275
- tests/authenticated_proxy_example.py +106 -106
- tests/baidu_performance_test.py +108 -108
- tests/baidu_test.py +59 -59
- tests/cleaners_example.py +160 -160
- tests/comprehensive_framework_test.py +212 -212
- tests/comprehensive_test.py +81 -81
- tests/comprehensive_testing_summary.md +186 -186
- tests/config_validation_demo.py +142 -142
- tests/controlled_spider_example.py +205 -205
- tests/date_tools_example.py +180 -180
- tests/debug_configure.py +69 -69
- tests/debug_framework_logger.py +84 -84
- tests/debug_log_config.py +126 -126
- tests/debug_log_levels.py +63 -63
- tests/debug_pipelines.py +66 -66
- tests/detailed_log_test.py +233 -233
- tests/distributed_test.py +66 -66
- tests/distributed_test_debug.py +76 -76
- tests/dynamic_loading_example.py +523 -523
- tests/dynamic_loading_test.py +104 -104
- tests/env_config_example.py +133 -133
- tests/error_handling_example.py +171 -171
- tests/final_comprehensive_test.py +151 -151
- tests/final_log_test.py +260 -260
- tests/final_validation_test.py +182 -182
- tests/fix_log_test.py +142 -142
- tests/framework_performance_test.py +202 -202
- tests/log_buffering_test.py +111 -111
- tests/log_generation_timing_test.py +153 -153
- tests/optimized_performance_test.py +211 -211
- tests/performance_comparison.py +245 -245
- tests/queue_blocking_test.py +113 -113
- tests/queue_test.py +89 -89
- tests/redis_key_validation_demo.py +130 -130
- tests/request_params_example.py +150 -150
- tests/response_improvements_example.py +144 -144
- tests/scrapy_comparison/ofweek_scrapy.py +138 -138
- tests/scrapy_comparison/scrapy_test.py +133 -133
- tests/simple_command_test.py +119 -119
- tests/simple_crawlo_test.py +127 -127
- tests/simple_log_test.py +57 -57
- tests/simple_log_test2.py +137 -137
- tests/simple_optimization_test.py +128 -128
- tests/simple_queue_type_test.py +41 -41
- tests/simple_spider_test.py +49 -49
- tests/simple_test.py +47 -47
- tests/spider_log_timing_test.py +177 -177
- tests/test_advanced_tools.py +148 -148
- tests/test_all_commands.py +230 -230
- tests/test_all_pipeline_fingerprints.py +133 -133
- tests/test_all_redis_key_configs.py +145 -145
- tests/test_authenticated_proxy.py +141 -141
- tests/test_batch_processor.py +178 -178
- tests/test_cleaners.py +54 -54
- tests/test_component_factory.py +174 -174
- tests/test_comprehensive.py +146 -146
- tests/test_config_consistency.py +80 -80
- tests/test_config_merge.py +152 -152
- tests/test_config_validator.py +182 -182
- tests/test_controlled_spider_mixin.py +79 -79
- tests/test_crawlo_proxy_integration.py +108 -108
- tests/test_date_tools.py +123 -123
- tests/test_dedup_fix.py +220 -220
- tests/test_dedup_pipeline_consistency.py +125 -0
- tests/test_default_header_middleware.py +313 -313
- tests/test_distributed.py +65 -65
- tests/test_double_crawlo_fix.py +204 -204
- tests/test_double_crawlo_fix_simple.py +124 -124
- tests/test_download_delay_middleware.py +221 -221
- tests/test_downloader_proxy_compatibility.py +268 -268
- tests/test_dynamic_downloaders_proxy.py +124 -124
- tests/test_dynamic_proxy.py +92 -92
- tests/test_dynamic_proxy_config.py +146 -146
- tests/test_dynamic_proxy_real.py +109 -109
- tests/test_edge_cases.py +303 -303
- tests/test_enhanced_error_handler.py +270 -270
- tests/test_enhanced_error_handler_comprehensive.py +245 -245
- tests/test_env_config.py +121 -121
- tests/test_error_handler_compatibility.py +112 -112
- tests/test_factories.py +252 -252
- tests/test_final_validation.py +153 -153
- tests/test_fingerprint_consistency.py +135 -135
- tests/test_fingerprint_simple.py +51 -51
- tests/test_framework_env_usage.py +103 -103
- tests/test_framework_logger.py +66 -66
- tests/test_framework_startup.py +64 -64
- tests/test_get_component_logger.py +83 -83
- tests/test_hash_performance.py +99 -99
- tests/test_integration.py +169 -169
- tests/test_item_dedup_redis_key.py +122 -122
- tests/test_large_scale_config.py +112 -112
- tests/test_large_scale_helper.py +235 -235
- tests/test_logging_enhancements.py +375 -0
- tests/test_logging_final.py +185 -0
- tests/test_logging_integration.py +313 -0
- tests/test_logging_system.py +282 -282
- tests/test_middleware_debug.py +142 -0
- tests/test_mode_change.py +72 -72
- tests/test_mode_consistency.py +51 -51
- tests/test_offsite_middleware.py +244 -244
- tests/test_offsite_middleware_simple.py +203 -203
- tests/test_parsel.py +29 -29
- tests/test_performance.py +327 -327
- tests/test_performance_monitor.py +115 -115
- tests/test_pipeline_fingerprint_consistency.py +86 -86
- tests/test_priority_behavior.py +212 -0
- tests/test_priority_consistency.py +152 -0
- tests/test_priority_consistency_fixed.py +250 -0
- tests/test_proxy_api.py +264 -264
- tests/test_proxy_health_check.py +32 -32
- tests/test_proxy_middleware.py +121 -121
- tests/test_proxy_middleware_enhanced.py +216 -216
- tests/test_proxy_middleware_integration.py +136 -136
- tests/test_proxy_middleware_refactored.py +184 -184
- tests/test_proxy_providers.py +56 -56
- tests/test_proxy_stats.py +19 -19
- tests/test_proxy_strategies.py +59 -59
- tests/test_queue_empty_check.py +41 -41
- tests/test_queue_manager_double_crawlo.py +173 -173
- tests/test_queue_manager_redis_key.py +179 -179
- tests/test_queue_naming.py +154 -154
- tests/test_queue_type.py +106 -106
- tests/test_queue_type_redis_config_consistency.py +131 -0
- tests/test_random_headers_default.py +323 -0
- tests/test_random_headers_necessity.py +309 -0
- tests/test_random_user_agent.py +72 -72
- tests/test_real_scenario_proxy.py +195 -195
- tests/test_redis_config.py +28 -28
- tests/test_redis_connection_pool.py +294 -294
- tests/test_redis_key_naming.py +181 -181
- tests/test_redis_key_validator.py +123 -123
- tests/test_redis_queue.py +224 -224
- tests/test_redis_queue_name_fix.py +175 -175
- tests/test_redis_queue_type_fallback.py +130 -0
- tests/test_request_ignore_middleware.py +182 -182
- tests/test_request_params.py +111 -111
- tests/test_request_serialization.py +70 -70
- tests/test_response_code_middleware.py +349 -349
- tests/test_response_filter_middleware.py +427 -427
- tests/test_response_improvements.py +152 -152
- tests/test_retry_middleware.py +334 -242
- tests/test_retry_middleware_realistic.py +274 -0
- tests/test_scheduler.py +252 -252
- tests/test_scheduler_config_update.py +133 -133
- tests/test_simple_response.py +61 -61
- tests/test_telecom_spider_redis_key.py +205 -205
- tests/test_template_content.py +87 -87
- tests/test_template_redis_key.py +134 -134
- tests/test_tools.py +159 -159
- tests/test_user_agent_randomness.py +177 -0
- tests/test_user_agents.py +96 -96
- tests/tools_example.py +260 -260
- tests/untested_features_report.md +138 -138
- tests/verify_debug.py +51 -51
- tests/verify_distributed.py +117 -117
- tests/verify_log_fix.py +111 -111
- crawlo-1.4.2.dist-info/METADATA +0 -1199
- crawlo-1.4.2.dist-info/RECORD +0 -309
- {crawlo-1.4.2.dist-info → crawlo-1.4.3.dist-info}/WHEEL +0 -0
- {crawlo-1.4.2.dist-info → crawlo-1.4.3.dist-info}/entry_points.txt +0 -0
- {crawlo-1.4.2.dist-info → crawlo-1.4.3.dist-info}/top_level.txt +0 -0
|
@@ -1,257 +1,257 @@
|
|
|
1
|
-
from typing import Optional
|
|
2
|
-
import redis.asyncio as aioredis
|
|
3
|
-
|
|
4
|
-
from crawlo.filters import BaseFilter
|
|
5
|
-
from crawlo.utils.log import get_logger
|
|
6
|
-
from crawlo.utils.request import request_fingerprint
|
|
7
|
-
from crawlo.utils.redis_connection_pool import get_redis_pool
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
class AioRedisFilter(BaseFilter):
|
|
11
|
-
"""
|
|
12
|
-
基于Redis集合实现的异步请求去重过滤器
|
|
13
|
-
|
|
14
|
-
支持特性:
|
|
15
|
-
- 分布式爬虫多节点共享去重数据
|
|
16
|
-
- TTL 自动过期清理机制
|
|
17
|
-
- Pipeline 批量操作优化性能
|
|
18
|
-
- 容错设计和连接池管理
|
|
19
|
-
|
|
20
|
-
适用场景:
|
|
21
|
-
- 分布式爬虫系统
|
|
22
|
-
- 大规模数据处理
|
|
23
|
-
- 需要持久化去重的场景
|
|
24
|
-
"""
|
|
25
|
-
|
|
26
|
-
def __init__(
|
|
27
|
-
self,
|
|
28
|
-
redis_key: str,
|
|
29
|
-
client: aioredis.Redis,
|
|
30
|
-
stats: dict,
|
|
31
|
-
debug: bool = False,
|
|
32
|
-
log_level: str = 'INFO',
|
|
33
|
-
cleanup_fp: bool = False,
|
|
34
|
-
ttl: Optional[int] = None
|
|
35
|
-
):
|
|
36
|
-
"""
|
|
37
|
-
初始化Redis过滤器
|
|
38
|
-
|
|
39
|
-
:param redis_key: Redis中存储指纹的键名
|
|
40
|
-
:param client: Redis客户端实例(可以为None,稍后初始化)
|
|
41
|
-
:param stats: 统计信息存储
|
|
42
|
-
:param debug: 是否启用调试模式
|
|
43
|
-
:param log_level: 日志级别
|
|
44
|
-
:param cleanup_fp: 关闭时是否清理指纹
|
|
45
|
-
:param ttl: 指纹过期时间(秒)
|
|
46
|
-
"""
|
|
47
|
-
self.logger = get_logger(self.__class__.__name__, log_level)
|
|
48
|
-
super().__init__(self.logger, stats, debug)
|
|
49
|
-
|
|
50
|
-
self.redis_key = redis_key
|
|
51
|
-
self.redis = client
|
|
52
|
-
self.cleanup_fp = cleanup_fp
|
|
53
|
-
self.ttl = ttl
|
|
54
|
-
|
|
55
|
-
# 保存连接池引用(用于延迟初始化)
|
|
56
|
-
self._redis_pool = None
|
|
57
|
-
|
|
58
|
-
# 性能计数器
|
|
59
|
-
self._redis_operations = 0
|
|
60
|
-
self._pipeline_operations = 0
|
|
61
|
-
|
|
62
|
-
# 连接状态标记,避免重复尝试连接失败的Redis
|
|
63
|
-
self._connection_failed = False
|
|
64
|
-
|
|
65
|
-
@classmethod
|
|
66
|
-
def create_instance(cls, crawler) -> 'BaseFilter':
|
|
67
|
-
"""从爬虫配置创建过滤器实例"""
|
|
68
|
-
redis_url = crawler.settings.get('REDIS_URL', 'redis://localhost:6379')
|
|
69
|
-
# 确保 decode_responses=False 以避免编码问题
|
|
70
|
-
decode_responses = False # crawler.settings.get_bool('DECODE_RESPONSES', False)
|
|
71
|
-
ttl_setting = crawler.settings.get_int('REDIS_TTL')
|
|
72
|
-
|
|
73
|
-
# 处理TTL设置
|
|
74
|
-
ttl = None
|
|
75
|
-
if ttl_setting is not None:
|
|
76
|
-
ttl = max(0, int(ttl_setting)) if ttl_setting > 0 else None
|
|
77
|
-
|
|
78
|
-
try:
|
|
79
|
-
# 使用优化的连接池,确保 decode_responses=False
|
|
80
|
-
redis_pool = get_redis_pool(
|
|
81
|
-
redis_url,
|
|
82
|
-
max_connections=20,
|
|
83
|
-
socket_connect_timeout=5,
|
|
84
|
-
socket_timeout=30,
|
|
85
|
-
health_check_interval=30,
|
|
86
|
-
retry_on_timeout=True,
|
|
87
|
-
decode_responses=decode_responses, # 确保不自动解码响应
|
|
88
|
-
encoding='utf-8'
|
|
89
|
-
)
|
|
90
|
-
|
|
91
|
-
# 注意:这里不应该使用 await,因为 create_instance 不是异步方法
|
|
92
|
-
# 我们将在实际使用时获取连接
|
|
93
|
-
redis_client = None # 延迟初始化
|
|
94
|
-
except Exception as e:
|
|
95
|
-
raise RuntimeError(f"Redis连接池初始化失败: {redis_url} - {str(e)}")
|
|
96
|
-
|
|
97
|
-
# 使用统一的Redis key命名规范: crawlo:{project_name}:filter:fingerprint
|
|
98
|
-
project_name = crawler.settings.get('PROJECT_NAME', 'default')
|
|
99
|
-
redis_key = f"crawlo:{project_name}:filter:fingerprint"
|
|
100
|
-
|
|
101
|
-
instance = cls(
|
|
102
|
-
redis_key=redis_key,
|
|
103
|
-
client=redis_client,
|
|
104
|
-
stats=crawler.stats,
|
|
105
|
-
cleanup_fp=crawler.settings.get_bool('CLEANUP_FP', False),
|
|
106
|
-
ttl=ttl,
|
|
107
|
-
debug=crawler.settings.get_bool('FILTER_DEBUG', False),
|
|
108
|
-
log_level=crawler.settings.get('LOG_LEVEL', 'INFO')
|
|
109
|
-
)
|
|
110
|
-
|
|
111
|
-
# 保存连接池引用,以便在需要时获取连接
|
|
112
|
-
instance._redis_pool = redis_pool
|
|
113
|
-
return instance
|
|
114
|
-
|
|
115
|
-
async def _get_redis_client(self):
|
|
116
|
-
"""获取Redis客户端实例(延迟初始化)"""
|
|
117
|
-
# 如果之前连接失败,直接返回None
|
|
118
|
-
if self._connection_failed:
|
|
119
|
-
return None
|
|
120
|
-
|
|
121
|
-
if self.redis is None and self._redis_pool is not None:
|
|
122
|
-
try:
|
|
123
|
-
self.redis = await self._redis_pool.get_connection()
|
|
124
|
-
except Exception as e:
|
|
125
|
-
self._connection_failed = True
|
|
126
|
-
self.logger.error(f"Redis连接失败,将使用本地去重: {e}")
|
|
127
|
-
return None
|
|
128
|
-
return self.redis
|
|
129
|
-
|
|
130
|
-
async def requested(self, request) -> bool:
|
|
131
|
-
"""
|
|
132
|
-
检查请求是否已存在(优化版本)
|
|
133
|
-
|
|
134
|
-
:param request: 请求对象
|
|
135
|
-
:return: True 表示重复,False 表示新请求
|
|
136
|
-
"""
|
|
137
|
-
try:
|
|
138
|
-
# 确保Redis客户端已初始化
|
|
139
|
-
redis_client = await self._get_redis_client()
|
|
140
|
-
|
|
141
|
-
# 如果Redis不可用,返回False表示不重复(避免丢失请求)
|
|
142
|
-
if redis_client is None:
|
|
143
|
-
return False
|
|
144
|
-
|
|
145
|
-
fp = str(request_fingerprint(request))
|
|
146
|
-
self._redis_operations += 1
|
|
147
|
-
|
|
148
|
-
# 使用 pipeline 优化性能
|
|
149
|
-
pipe = redis_client.pipeline()
|
|
150
|
-
pipe.sismember(self.redis_key, fp)
|
|
151
|
-
|
|
152
|
-
results = await pipe.execute()
|
|
153
|
-
exists = results[0]
|
|
154
|
-
|
|
155
|
-
self._pipeline_operations += 1
|
|
156
|
-
|
|
157
|
-
if exists:
|
|
158
|
-
if self.debug:
|
|
159
|
-
self.logger.debug(f"发现重复请求: {fp[:20]}...")
|
|
160
|
-
return True
|
|
161
|
-
|
|
162
|
-
# 如果不存在,添加指纹并设置TTL
|
|
163
|
-
await self.add_fingerprint(fp)
|
|
164
|
-
return False
|
|
165
|
-
|
|
166
|
-
except Exception as e:
|
|
167
|
-
self.logger.error(f"请求检查失败: {getattr(request, 'url', '未知URL')} - {e}")
|
|
168
|
-
# 在网络异常时返回False,避免丢失请求
|
|
169
|
-
return False
|
|
170
|
-
|
|
171
|
-
async def add_fingerprint(self, fp: str) -> bool:
|
|
172
|
-
"""
|
|
173
|
-
添加新指纹到Redis集合(优化版本)
|
|
174
|
-
|
|
175
|
-
:param fp: 请求指纹字符串
|
|
176
|
-
:return: 是否成功添加(True 表示新添加,False 表示已存在)
|
|
177
|
-
"""
|
|
178
|
-
try:
|
|
179
|
-
# 确保Redis客户端已初始化
|
|
180
|
-
redis_client = await self._get_redis_client()
|
|
181
|
-
|
|
182
|
-
# 如果Redis不可用,返回False表示添加失败
|
|
183
|
-
if redis_client is None:
|
|
184
|
-
return False
|
|
185
|
-
|
|
186
|
-
fp = str(fp)
|
|
187
|
-
|
|
188
|
-
# 使用 pipeline 优化性能
|
|
189
|
-
pipe = redis_client.pipeline()
|
|
190
|
-
pipe.sadd(self.redis_key, fp)
|
|
191
|
-
|
|
192
|
-
if self.ttl and self.ttl > 0:
|
|
193
|
-
pipe.expire(self.redis_key, self.ttl)
|
|
194
|
-
|
|
195
|
-
results = await pipe.execute()
|
|
196
|
-
added = results[0] == 1 # sadd 返回 1 表示新添加
|
|
197
|
-
|
|
198
|
-
self._pipeline_operations += 1
|
|
199
|
-
|
|
200
|
-
if self.debug and added:
|
|
201
|
-
self.logger.debug(f"添加新指纹: {fp[:20]}...")
|
|
202
|
-
|
|
203
|
-
return added
|
|
204
|
-
|
|
205
|
-
except Exception as e:
|
|
206
|
-
self.logger.error(f"添加指纹失败: {fp[:20]}... - {e}")
|
|
207
|
-
return False
|
|
208
|
-
|
|
209
|
-
def __contains__(self, fp: str) -> bool:
|
|
210
|
-
"""
|
|
211
|
-
检查指纹是否存在于Redis集合中(同步方法)
|
|
212
|
-
|
|
213
|
-
注意:Python的魔术方法__contains__不能是异步的,
|
|
214
|
-
所以这个方法提供同步接口,仅用于基本的存在性检查。
|
|
215
|
-
对于需要异步检查的场景,请使用 contains_async() 方法。
|
|
216
|
-
|
|
217
|
-
:param fp: 请求指纹字符串
|
|
218
|
-
:return: 是否存在
|
|
219
|
-
"""
|
|
220
|
-
# 由于__contains__不能是异步的,我们只能提供一个基本的同步检查
|
|
221
|
-
# 如果Redis客户端未初始化,返回False
|
|
222
|
-
if self.redis is None:
|
|
223
|
-
return False
|
|
224
|
-
|
|
225
|
-
# 对于同步场景,我们无法进行真正的Redis查询
|
|
226
|
-
# 所以返回False,避免阻塞调用
|
|
227
|
-
# 真正的异步检查应该使用 contains_async() 方法
|
|
228
|
-
return False
|
|
229
|
-
|
|
230
|
-
async def contains_async(self, fp: str) -> bool:
|
|
231
|
-
"""
|
|
232
|
-
异步检查指纹是否存在于Redis集合中
|
|
233
|
-
|
|
234
|
-
这是真正的异步检查方法,应该优先使用这个方法而不是__contains__
|
|
235
|
-
|
|
236
|
-
:param fp: 请求指纹字符串
|
|
237
|
-
:return: 是否存在
|
|
238
|
-
"""
|
|
239
|
-
try:
|
|
240
|
-
# 确保Redis客户端已初始化
|
|
241
|
-
redis_client = await self._get_redis_client()
|
|
242
|
-
|
|
243
|
-
# 如果Redis不可用,返回False表示不存在
|
|
244
|
-
if redis_client is None:
|
|
245
|
-
return False
|
|
246
|
-
|
|
247
|
-
# 检查指纹是否存在
|
|
248
|
-
exists = await redis_client.sismember(self.redis_key, str(fp))
|
|
249
|
-
return exists
|
|
250
|
-
except Exception as e:
|
|
251
|
-
self.logger.error(f"检查指纹存在性失败: {fp[:20]}... - {e}")
|
|
252
|
-
# 在网络异常时返回False,避免丢失请求
|
|
253
|
-
return False
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
# 为了兼容性,确保导出类
|
|
257
|
-
__all__ = ['AioRedisFilter']
|
|
1
|
+
from typing import Optional
|
|
2
|
+
import redis.asyncio as aioredis
|
|
3
|
+
|
|
4
|
+
from crawlo.filters import BaseFilter
|
|
5
|
+
from crawlo.utils.log import get_logger
|
|
6
|
+
from crawlo.utils.request import request_fingerprint
|
|
7
|
+
from crawlo.utils.redis_connection_pool import get_redis_pool
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class AioRedisFilter(BaseFilter):
|
|
11
|
+
"""
|
|
12
|
+
基于Redis集合实现的异步请求去重过滤器
|
|
13
|
+
|
|
14
|
+
支持特性:
|
|
15
|
+
- 分布式爬虫多节点共享去重数据
|
|
16
|
+
- TTL 自动过期清理机制
|
|
17
|
+
- Pipeline 批量操作优化性能
|
|
18
|
+
- 容错设计和连接池管理
|
|
19
|
+
|
|
20
|
+
适用场景:
|
|
21
|
+
- 分布式爬虫系统
|
|
22
|
+
- 大规模数据处理
|
|
23
|
+
- 需要持久化去重的场景
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
def __init__(
|
|
27
|
+
self,
|
|
28
|
+
redis_key: str,
|
|
29
|
+
client: aioredis.Redis,
|
|
30
|
+
stats: dict,
|
|
31
|
+
debug: bool = False,
|
|
32
|
+
log_level: str = 'INFO',
|
|
33
|
+
cleanup_fp: bool = False,
|
|
34
|
+
ttl: Optional[int] = None
|
|
35
|
+
):
|
|
36
|
+
"""
|
|
37
|
+
初始化Redis过滤器
|
|
38
|
+
|
|
39
|
+
:param redis_key: Redis中存储指纹的键名
|
|
40
|
+
:param client: Redis客户端实例(可以为None,稍后初始化)
|
|
41
|
+
:param stats: 统计信息存储
|
|
42
|
+
:param debug: 是否启用调试模式
|
|
43
|
+
:param log_level: 日志级别
|
|
44
|
+
:param cleanup_fp: 关闭时是否清理指纹
|
|
45
|
+
:param ttl: 指纹过期时间(秒)
|
|
46
|
+
"""
|
|
47
|
+
self.logger = get_logger(self.__class__.__name__, log_level)
|
|
48
|
+
super().__init__(self.logger, stats, debug)
|
|
49
|
+
|
|
50
|
+
self.redis_key = redis_key
|
|
51
|
+
self.redis = client
|
|
52
|
+
self.cleanup_fp = cleanup_fp
|
|
53
|
+
self.ttl = ttl
|
|
54
|
+
|
|
55
|
+
# 保存连接池引用(用于延迟初始化)
|
|
56
|
+
self._redis_pool = None
|
|
57
|
+
|
|
58
|
+
# 性能计数器
|
|
59
|
+
self._redis_operations = 0
|
|
60
|
+
self._pipeline_operations = 0
|
|
61
|
+
|
|
62
|
+
# 连接状态标记,避免重复尝试连接失败的Redis
|
|
63
|
+
self._connection_failed = False
|
|
64
|
+
|
|
65
|
+
@classmethod
|
|
66
|
+
def create_instance(cls, crawler) -> 'BaseFilter':
|
|
67
|
+
"""从爬虫配置创建过滤器实例"""
|
|
68
|
+
redis_url = crawler.settings.get('REDIS_URL', 'redis://localhost:6379')
|
|
69
|
+
# 确保 decode_responses=False 以避免编码问题
|
|
70
|
+
decode_responses = False # crawler.settings.get_bool('DECODE_RESPONSES', False)
|
|
71
|
+
ttl_setting = crawler.settings.get_int('REDIS_TTL')
|
|
72
|
+
|
|
73
|
+
# 处理TTL设置
|
|
74
|
+
ttl = None
|
|
75
|
+
if ttl_setting is not None:
|
|
76
|
+
ttl = max(0, int(ttl_setting)) if ttl_setting > 0 else None
|
|
77
|
+
|
|
78
|
+
try:
|
|
79
|
+
# 使用优化的连接池,确保 decode_responses=False
|
|
80
|
+
redis_pool = get_redis_pool(
|
|
81
|
+
redis_url,
|
|
82
|
+
max_connections=20,
|
|
83
|
+
socket_connect_timeout=5,
|
|
84
|
+
socket_timeout=30,
|
|
85
|
+
health_check_interval=30,
|
|
86
|
+
retry_on_timeout=True,
|
|
87
|
+
decode_responses=decode_responses, # 确保不自动解码响应
|
|
88
|
+
encoding='utf-8'
|
|
89
|
+
)
|
|
90
|
+
|
|
91
|
+
# 注意:这里不应该使用 await,因为 create_instance 不是异步方法
|
|
92
|
+
# 我们将在实际使用时获取连接
|
|
93
|
+
redis_client = None # 延迟初始化
|
|
94
|
+
except Exception as e:
|
|
95
|
+
raise RuntimeError(f"Redis连接池初始化失败: {redis_url} - {str(e)}")
|
|
96
|
+
|
|
97
|
+
# 使用统一的Redis key命名规范: crawlo:{project_name}:filter:fingerprint
|
|
98
|
+
project_name = crawler.settings.get('PROJECT_NAME', 'default')
|
|
99
|
+
redis_key = f"crawlo:{project_name}:filter:fingerprint"
|
|
100
|
+
|
|
101
|
+
instance = cls(
|
|
102
|
+
redis_key=redis_key,
|
|
103
|
+
client=redis_client,
|
|
104
|
+
stats=crawler.stats,
|
|
105
|
+
cleanup_fp=crawler.settings.get_bool('CLEANUP_FP', False),
|
|
106
|
+
ttl=ttl,
|
|
107
|
+
debug=crawler.settings.get_bool('FILTER_DEBUG', False),
|
|
108
|
+
log_level=crawler.settings.get('LOG_LEVEL', 'INFO')
|
|
109
|
+
)
|
|
110
|
+
|
|
111
|
+
# 保存连接池引用,以便在需要时获取连接
|
|
112
|
+
instance._redis_pool = redis_pool
|
|
113
|
+
return instance
|
|
114
|
+
|
|
115
|
+
async def _get_redis_client(self):
|
|
116
|
+
"""获取Redis客户端实例(延迟初始化)"""
|
|
117
|
+
# 如果之前连接失败,直接返回None
|
|
118
|
+
if self._connection_failed:
|
|
119
|
+
return None
|
|
120
|
+
|
|
121
|
+
if self.redis is None and self._redis_pool is not None:
|
|
122
|
+
try:
|
|
123
|
+
self.redis = await self._redis_pool.get_connection()
|
|
124
|
+
except Exception as e:
|
|
125
|
+
self._connection_failed = True
|
|
126
|
+
self.logger.error(f"Redis连接失败,将使用本地去重: {e}")
|
|
127
|
+
return None
|
|
128
|
+
return self.redis
|
|
129
|
+
|
|
130
|
+
async def requested(self, request) -> bool:
|
|
131
|
+
"""
|
|
132
|
+
检查请求是否已存在(优化版本)
|
|
133
|
+
|
|
134
|
+
:param request: 请求对象
|
|
135
|
+
:return: True 表示重复,False 表示新请求
|
|
136
|
+
"""
|
|
137
|
+
try:
|
|
138
|
+
# 确保Redis客户端已初始化
|
|
139
|
+
redis_client = await self._get_redis_client()
|
|
140
|
+
|
|
141
|
+
# 如果Redis不可用,返回False表示不重复(避免丢失请求)
|
|
142
|
+
if redis_client is None:
|
|
143
|
+
return False
|
|
144
|
+
|
|
145
|
+
fp = str(request_fingerprint(request))
|
|
146
|
+
self._redis_operations += 1
|
|
147
|
+
|
|
148
|
+
# 使用 pipeline 优化性能
|
|
149
|
+
pipe = redis_client.pipeline()
|
|
150
|
+
pipe.sismember(self.redis_key, fp)
|
|
151
|
+
|
|
152
|
+
results = await pipe.execute()
|
|
153
|
+
exists = results[0]
|
|
154
|
+
|
|
155
|
+
self._pipeline_operations += 1
|
|
156
|
+
|
|
157
|
+
if exists:
|
|
158
|
+
if self.debug:
|
|
159
|
+
self.logger.debug(f"发现重复请求: {fp[:20]}...")
|
|
160
|
+
return True
|
|
161
|
+
|
|
162
|
+
# 如果不存在,添加指纹并设置TTL
|
|
163
|
+
await self.add_fingerprint(fp)
|
|
164
|
+
return False
|
|
165
|
+
|
|
166
|
+
except Exception as e:
|
|
167
|
+
self.logger.error(f"请求检查失败: {getattr(request, 'url', '未知URL')} - {e}")
|
|
168
|
+
# 在网络异常时返回False,避免丢失请求
|
|
169
|
+
return False
|
|
170
|
+
|
|
171
|
+
async def add_fingerprint(self, fp: str) -> bool:
|
|
172
|
+
"""
|
|
173
|
+
添加新指纹到Redis集合(优化版本)
|
|
174
|
+
|
|
175
|
+
:param fp: 请求指纹字符串
|
|
176
|
+
:return: 是否成功添加(True 表示新添加,False 表示已存在)
|
|
177
|
+
"""
|
|
178
|
+
try:
|
|
179
|
+
# 确保Redis客户端已初始化
|
|
180
|
+
redis_client = await self._get_redis_client()
|
|
181
|
+
|
|
182
|
+
# 如果Redis不可用,返回False表示添加失败
|
|
183
|
+
if redis_client is None:
|
|
184
|
+
return False
|
|
185
|
+
|
|
186
|
+
fp = str(fp)
|
|
187
|
+
|
|
188
|
+
# 使用 pipeline 优化性能
|
|
189
|
+
pipe = redis_client.pipeline()
|
|
190
|
+
pipe.sadd(self.redis_key, fp)
|
|
191
|
+
|
|
192
|
+
if self.ttl and self.ttl > 0:
|
|
193
|
+
pipe.expire(self.redis_key, self.ttl)
|
|
194
|
+
|
|
195
|
+
results = await pipe.execute()
|
|
196
|
+
added = results[0] == 1 # sadd 返回 1 表示新添加
|
|
197
|
+
|
|
198
|
+
self._pipeline_operations += 1
|
|
199
|
+
|
|
200
|
+
if self.debug and added:
|
|
201
|
+
self.logger.debug(f"添加新指纹: {fp[:20]}...")
|
|
202
|
+
|
|
203
|
+
return added
|
|
204
|
+
|
|
205
|
+
except Exception as e:
|
|
206
|
+
self.logger.error(f"添加指纹失败: {fp[:20]}... - {e}")
|
|
207
|
+
return False
|
|
208
|
+
|
|
209
|
+
def __contains__(self, fp: str) -> bool:
|
|
210
|
+
"""
|
|
211
|
+
检查指纹是否存在于Redis集合中(同步方法)
|
|
212
|
+
|
|
213
|
+
注意:Python的魔术方法__contains__不能是异步的,
|
|
214
|
+
所以这个方法提供同步接口,仅用于基本的存在性检查。
|
|
215
|
+
对于需要异步检查的场景,请使用 contains_async() 方法。
|
|
216
|
+
|
|
217
|
+
:param fp: 请求指纹字符串
|
|
218
|
+
:return: 是否存在
|
|
219
|
+
"""
|
|
220
|
+
# 由于__contains__不能是异步的,我们只能提供一个基本的同步检查
|
|
221
|
+
# 如果Redis客户端未初始化,返回False
|
|
222
|
+
if self.redis is None:
|
|
223
|
+
return False
|
|
224
|
+
|
|
225
|
+
# 对于同步场景,我们无法进行真正的Redis查询
|
|
226
|
+
# 所以返回False,避免阻塞调用
|
|
227
|
+
# 真正的异步检查应该使用 contains_async() 方法
|
|
228
|
+
return False
|
|
229
|
+
|
|
230
|
+
async def contains_async(self, fp: str) -> bool:
|
|
231
|
+
"""
|
|
232
|
+
异步检查指纹是否存在于Redis集合中
|
|
233
|
+
|
|
234
|
+
这是真正的异步检查方法,应该优先使用这个方法而不是__contains__
|
|
235
|
+
|
|
236
|
+
:param fp: 请求指纹字符串
|
|
237
|
+
:return: 是否存在
|
|
238
|
+
"""
|
|
239
|
+
try:
|
|
240
|
+
# 确保Redis客户端已初始化
|
|
241
|
+
redis_client = await self._get_redis_client()
|
|
242
|
+
|
|
243
|
+
# 如果Redis不可用,返回False表示不存在
|
|
244
|
+
if redis_client is None:
|
|
245
|
+
return False
|
|
246
|
+
|
|
247
|
+
# 检查指纹是否存在
|
|
248
|
+
exists = await redis_client.sismember(self.redis_key, str(fp))
|
|
249
|
+
return exists
|
|
250
|
+
except Exception as e:
|
|
251
|
+
self.logger.error(f"检查指纹存在性失败: {fp[:20]}... - {e}")
|
|
252
|
+
# 在网络异常时返回False,避免丢失请求
|
|
253
|
+
return False
|
|
254
|
+
|
|
255
|
+
|
|
256
|
+
# 为了兼容性,确保导出类
|
|
257
|
+
__all__ = ['AioRedisFilter']
|