crawlo 1.4.6__py3-none-any.whl → 1.4.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +90 -89
- crawlo/__version__.py +1 -1
- crawlo/cli.py +75 -75
- crawlo/commands/__init__.py +14 -14
- crawlo/commands/check.py +594 -594
- crawlo/commands/genspider.py +186 -186
- crawlo/commands/help.py +140 -138
- crawlo/commands/list.py +155 -155
- crawlo/commands/run.py +379 -341
- crawlo/commands/startproject.py +460 -460
- crawlo/commands/stats.py +187 -187
- crawlo/commands/utils.py +196 -196
- crawlo/config.py +320 -312
- crawlo/config_validator.py +277 -277
- crawlo/core/__init__.py +52 -52
- crawlo/core/engine.py +451 -438
- crawlo/core/processor.py +47 -47
- crawlo/core/scheduler.py +290 -291
- crawlo/crawler.py +698 -657
- crawlo/data/__init__.py +5 -5
- crawlo/data/user_agents.py +194 -194
- crawlo/downloader/__init__.py +280 -276
- crawlo/downloader/aiohttp_downloader.py +233 -233
- crawlo/downloader/cffi_downloader.py +250 -247
- crawlo/downloader/httpx_downloader.py +265 -259
- crawlo/downloader/hybrid_downloader.py +212 -212
- crawlo/downloader/playwright_downloader.py +425 -402
- crawlo/downloader/selenium_downloader.py +486 -472
- crawlo/event.py +45 -11
- crawlo/exceptions.py +215 -82
- crawlo/extension/__init__.py +65 -64
- crawlo/extension/health_check.py +141 -141
- crawlo/extension/log_interval.py +94 -94
- crawlo/extension/log_stats.py +70 -70
- crawlo/extension/logging_extension.py +53 -61
- crawlo/extension/memory_monitor.py +104 -104
- crawlo/extension/performance_profiler.py +133 -133
- crawlo/extension/request_recorder.py +107 -107
- crawlo/factories/__init__.py +27 -27
- crawlo/factories/base.py +68 -68
- crawlo/factories/crawler.py +104 -103
- crawlo/factories/registry.py +84 -84
- crawlo/factories/utils.py +135 -0
- crawlo/filters/__init__.py +170 -153
- crawlo/filters/aioredis_filter.py +348 -264
- crawlo/filters/memory_filter.py +261 -276
- crawlo/framework.py +306 -292
- crawlo/initialization/__init__.py +44 -44
- crawlo/initialization/built_in.py +391 -434
- crawlo/initialization/context.py +141 -141
- crawlo/initialization/core.py +240 -194
- crawlo/initialization/phases.py +230 -149
- crawlo/initialization/registry.py +143 -145
- crawlo/initialization/utils.py +49 -0
- crawlo/interfaces.py +23 -23
- crawlo/items/__init__.py +23 -23
- crawlo/items/base.py +23 -23
- crawlo/items/fields.py +52 -52
- crawlo/items/items.py +104 -104
- crawlo/logging/__init__.py +42 -46
- crawlo/logging/config.py +277 -197
- crawlo/logging/factory.py +175 -171
- crawlo/logging/manager.py +104 -112
- crawlo/middleware/__init__.py +87 -24
- crawlo/middleware/default_header.py +132 -132
- crawlo/middleware/download_delay.py +104 -104
- crawlo/middleware/middleware_manager.py +142 -142
- crawlo/middleware/offsite.py +123 -123
- crawlo/middleware/proxy.py +209 -209
- crawlo/middleware/request_ignore.py +86 -86
- crawlo/middleware/response_code.py +150 -150
- crawlo/middleware/response_filter.py +136 -136
- crawlo/middleware/retry.py +124 -124
- crawlo/mode_manager.py +287 -253
- crawlo/network/__init__.py +21 -21
- crawlo/network/request.py +375 -379
- crawlo/network/response.py +569 -664
- crawlo/pipelines/__init__.py +53 -22
- crawlo/pipelines/base_pipeline.py +452 -0
- crawlo/pipelines/bloom_dedup_pipeline.py +146 -146
- crawlo/pipelines/console_pipeline.py +39 -39
- crawlo/pipelines/csv_pipeline.py +316 -316
- crawlo/pipelines/database_dedup_pipeline.py +197 -197
- crawlo/pipelines/json_pipeline.py +218 -218
- crawlo/pipelines/memory_dedup_pipeline.py +105 -105
- crawlo/pipelines/mongo_pipeline.py +140 -132
- crawlo/pipelines/mysql_pipeline.py +469 -476
- crawlo/pipelines/pipeline_manager.py +100 -100
- crawlo/pipelines/redis_dedup_pipeline.py +155 -156
- crawlo/project.py +347 -347
- crawlo/queue/__init__.py +10 -0
- crawlo/queue/pqueue.py +38 -38
- crawlo/queue/queue_manager.py +591 -525
- crawlo/queue/redis_priority_queue.py +519 -370
- crawlo/settings/__init__.py +7 -7
- crawlo/settings/default_settings.py +284 -277
- crawlo/settings/setting_manager.py +219 -219
- crawlo/spider/__init__.py +657 -657
- crawlo/stats_collector.py +81 -81
- crawlo/subscriber.py +129 -129
- crawlo/task_manager.py +138 -138
- crawlo/templates/crawlo.cfg.tmpl +10 -10
- crawlo/templates/project/__init__.py.tmpl +2 -4
- crawlo/templates/project/items.py.tmpl +13 -17
- crawlo/templates/project/middlewares.py.tmpl +38 -38
- crawlo/templates/project/pipelines.py.tmpl +35 -36
- crawlo/templates/project/settings.py.tmpl +109 -111
- crawlo/templates/project/settings_distributed.py.tmpl +156 -159
- crawlo/templates/project/settings_gentle.py.tmpl +170 -176
- crawlo/templates/project/settings_high_performance.py.tmpl +171 -177
- crawlo/templates/project/settings_minimal.py.tmpl +98 -100
- crawlo/templates/project/settings_simple.py.tmpl +168 -174
- crawlo/templates/project/spiders/__init__.py.tmpl +9 -9
- crawlo/templates/run.py.tmpl +23 -23
- crawlo/templates/spider/spider.py.tmpl +32 -40
- crawlo/templates/spiders_init.py.tmpl +5 -10
- crawlo/tools/__init__.py +86 -189
- crawlo/tools/date_tools.py +289 -289
- crawlo/tools/distributed_coordinator.py +384 -384
- crawlo/tools/scenario_adapter.py +262 -262
- crawlo/tools/text_cleaner.py +232 -232
- crawlo/utils/__init__.py +50 -50
- crawlo/utils/batch_processor.py +276 -259
- crawlo/utils/config_manager.py +442 -0
- crawlo/utils/controlled_spider_mixin.py +439 -439
- crawlo/utils/db_helper.py +250 -250
- crawlo/utils/error_handler.py +410 -410
- crawlo/utils/fingerprint.py +121 -121
- crawlo/utils/func_tools.py +82 -82
- crawlo/utils/large_scale_helper.py +344 -344
- crawlo/utils/leak_detector.py +335 -0
- crawlo/utils/log.py +79 -79
- crawlo/utils/misc.py +81 -81
- crawlo/utils/mongo_connection_pool.py +157 -0
- crawlo/utils/mysql_connection_pool.py +197 -0
- crawlo/utils/performance_monitor.py +285 -285
- crawlo/utils/queue_helper.py +175 -175
- crawlo/utils/redis_checker.py +91 -0
- crawlo/utils/redis_connection_pool.py +578 -388
- crawlo/utils/redis_key_validator.py +198 -198
- crawlo/utils/request.py +278 -256
- crawlo/utils/request_serializer.py +225 -225
- crawlo/utils/resource_manager.py +337 -0
- crawlo/utils/selector_helper.py +137 -137
- crawlo/utils/singleton.py +70 -0
- crawlo/utils/spider_loader.py +201 -201
- crawlo/utils/text_helper.py +94 -94
- crawlo/utils/{url.py → url_utils.py} +39 -39
- crawlo-1.4.7.dist-info/METADATA +689 -0
- crawlo-1.4.7.dist-info/RECORD +347 -0
- examples/__init__.py +7 -7
- tests/__init__.py +7 -7
- tests/advanced_tools_example.py +217 -275
- tests/authenticated_proxy_example.py +110 -110
- tests/baidu_performance_test.py +108 -108
- tests/baidu_test.py +59 -59
- tests/bug_check_test.py +250 -250
- tests/cleaners_example.py +160 -160
- tests/comprehensive_framework_test.py +212 -212
- tests/comprehensive_test.py +81 -81
- tests/comprehensive_testing_summary.md +186 -186
- tests/config_validation_demo.py +142 -142
- tests/controlled_spider_example.py +205 -205
- tests/date_tools_example.py +180 -180
- tests/debug_configure.py +69 -69
- tests/debug_framework_logger.py +84 -84
- tests/debug_log_config.py +126 -126
- tests/debug_log_levels.py +63 -63
- tests/debug_pipelines.py +66 -66
- tests/detailed_log_test.py +233 -233
- tests/direct_selector_helper_test.py +96 -96
- tests/distributed_dedup_test.py +467 -0
- tests/distributed_test.py +66 -66
- tests/distributed_test_debug.py +76 -76
- tests/dynamic_loading_example.py +523 -523
- tests/dynamic_loading_test.py +104 -104
- tests/error_handling_example.py +171 -171
- tests/explain_mysql_update_behavior.py +76 -76
- tests/final_comprehensive_test.py +151 -151
- tests/final_log_test.py +260 -260
- tests/final_validation_test.py +182 -182
- tests/fix_log_test.py +142 -142
- tests/framework_performance_test.py +202 -202
- tests/log_buffering_test.py +111 -111
- tests/log_generation_timing_test.py +153 -153
- tests/monitor_redis_dedup.sh +72 -0
- tests/ofweek_scrapy/ofweek_scrapy/items.py +12 -12
- tests/ofweek_scrapy/ofweek_scrapy/middlewares.py +100 -100
- tests/ofweek_scrapy/ofweek_scrapy/pipelines.py +13 -13
- tests/ofweek_scrapy/ofweek_scrapy/settings.py +84 -84
- tests/ofweek_scrapy/scrapy.cfg +11 -11
- tests/optimized_performance_test.py +211 -211
- tests/performance_comparison.py +244 -244
- tests/queue_blocking_test.py +113 -113
- tests/queue_test.py +89 -89
- tests/redis_key_validation_demo.py +130 -130
- tests/request_params_example.py +150 -150
- tests/response_improvements_example.py +144 -144
- tests/scrapy_comparison/ofweek_scrapy.py +138 -138
- tests/scrapy_comparison/scrapy_test.py +133 -133
- tests/simple_cli_test.py +55 -0
- tests/simple_command_test.py +119 -119
- tests/simple_crawlo_test.py +126 -126
- tests/simple_follow_test.py +38 -38
- tests/simple_log_test2.py +137 -137
- tests/simple_optimization_test.py +128 -128
- tests/simple_queue_type_test.py +41 -41
- tests/simple_response_selector_test.py +94 -94
- tests/simple_selector_helper_test.py +154 -154
- tests/simple_selector_test.py +207 -207
- tests/simple_spider_test.py +49 -49
- tests/simple_url_test.py +73 -73
- tests/simulate_mysql_update_test.py +139 -139
- tests/spider_log_timing_test.py +177 -177
- tests/test_advanced_tools.py +148 -148
- tests/test_all_commands.py +230 -230
- tests/test_all_pipeline_fingerprints.py +133 -133
- tests/test_all_redis_key_configs.py +145 -145
- tests/test_asyncmy_usage.py +56 -56
- tests/test_batch_processor.py +178 -178
- tests/test_cleaners.py +54 -54
- tests/test_cli_arguments.py +119 -0
- tests/test_component_factory.py +174 -174
- tests/test_config_consistency.py +80 -80
- tests/test_config_merge.py +152 -152
- tests/test_config_validator.py +182 -182
- tests/test_controlled_spider_mixin.py +79 -79
- tests/test_crawler_process_import.py +38 -38
- tests/test_crawler_process_spider_modules.py +47 -47
- tests/test_crawlo_proxy_integration.py +114 -114
- tests/test_date_tools.py +123 -123
- tests/test_dedup_fix.py +220 -220
- tests/test_dedup_pipeline_consistency.py +124 -124
- tests/test_default_header_middleware.py +313 -313
- tests/test_distributed.py +65 -65
- tests/test_double_crawlo_fix.py +204 -204
- tests/test_double_crawlo_fix_simple.py +124 -124
- tests/test_download_delay_middleware.py +221 -221
- tests/test_downloader_proxy_compatibility.py +272 -272
- tests/test_edge_cases.py +305 -305
- tests/test_encoding_core.py +56 -56
- tests/test_encoding_detection.py +126 -126
- tests/test_enhanced_error_handler.py +270 -270
- tests/test_enhanced_error_handler_comprehensive.py +245 -245
- tests/test_error_handler_compatibility.py +112 -112
- tests/test_factories.py +252 -252
- tests/test_factory_compatibility.py +196 -196
- tests/test_final_validation.py +153 -153
- tests/test_fingerprint_consistency.py +135 -135
- tests/test_fingerprint_simple.py +51 -51
- tests/test_get_component_logger.py +83 -83
- tests/test_hash_performance.py +99 -99
- tests/test_integration.py +169 -169
- tests/test_item_dedup_redis_key.py +122 -122
- tests/test_large_scale_helper.py +235 -235
- tests/test_logging_enhancements.py +374 -374
- tests/test_logging_final.py +184 -184
- tests/test_logging_integration.py +312 -312
- tests/test_logging_system.py +282 -282
- tests/test_middleware_debug.py +141 -141
- tests/test_mode_consistency.py +51 -51
- tests/test_multi_directory.py +67 -67
- tests/test_multiple_spider_modules.py +80 -80
- tests/test_mysql_pipeline_config.py +164 -164
- tests/test_mysql_pipeline_error.py +98 -98
- tests/test_mysql_pipeline_init_log.py +82 -82
- tests/test_mysql_pipeline_integration.py +132 -132
- tests/test_mysql_pipeline_refactor.py +143 -143
- tests/test_mysql_pipeline_refactor_simple.py +85 -85
- tests/test_mysql_pipeline_robustness.py +195 -195
- tests/test_mysql_pipeline_types.py +88 -88
- tests/test_mysql_update_columns.py +93 -93
- tests/test_offsite_middleware.py +244 -244
- tests/test_offsite_middleware_simple.py +203 -203
- tests/test_optimized_selector_naming.py +100 -100
- tests/test_parsel.py +29 -29
- tests/test_performance.py +327 -327
- tests/test_performance_monitor.py +115 -115
- tests/test_pipeline_fingerprint_consistency.py +86 -86
- tests/test_priority_behavior.py +211 -211
- tests/test_priority_consistency.py +151 -151
- tests/test_priority_consistency_fixed.py +249 -249
- tests/test_proxy_health_check.py +32 -32
- tests/test_proxy_middleware.py +217 -217
- tests/test_proxy_middleware_enhanced.py +212 -212
- tests/test_proxy_middleware_integration.py +142 -142
- tests/test_proxy_middleware_refactored.py +207 -207
- tests/test_proxy_only.py +83 -83
- tests/test_proxy_providers.py +56 -56
- tests/test_proxy_stats.py +19 -19
- tests/test_proxy_strategies.py +59 -59
- tests/test_proxy_with_downloader.py +152 -152
- tests/test_queue_empty_check.py +41 -41
- tests/test_queue_manager_double_crawlo.py +173 -173
- tests/test_queue_manager_redis_key.py +179 -179
- tests/test_queue_naming.py +154 -154
- tests/test_queue_type.py +106 -106
- tests/test_queue_type_redis_config_consistency.py +130 -130
- tests/test_random_headers_default.py +322 -322
- tests/test_random_headers_necessity.py +308 -308
- tests/test_random_user_agent.py +72 -72
- tests/test_redis_config.py +28 -28
- tests/test_redis_connection_pool.py +294 -294
- tests/test_redis_key_naming.py +181 -181
- tests/test_redis_key_validator.py +123 -123
- tests/test_redis_queue.py +224 -224
- tests/test_redis_queue_name_fix.py +175 -175
- tests/test_redis_queue_type_fallback.py +129 -129
- tests/test_request_ignore_middleware.py +182 -182
- tests/test_request_params.py +111 -111
- tests/test_request_serialization.py +70 -70
- tests/test_response_code_middleware.py +349 -349
- tests/test_response_filter_middleware.py +427 -427
- tests/test_response_follow.py +104 -104
- tests/test_response_improvements.py +152 -152
- tests/test_response_selector_methods.py +92 -92
- tests/test_response_url_methods.py +70 -70
- tests/test_response_urljoin.py +86 -86
- tests/test_retry_middleware.py +333 -333
- tests/test_retry_middleware_realistic.py +273 -273
- tests/test_scheduler.py +252 -252
- tests/test_scheduler_config_update.py +133 -133
- tests/test_scrapy_style_encoding.py +112 -112
- tests/test_selector_helper.py +100 -100
- tests/test_selector_optimizations.py +146 -146
- tests/test_simple_response.py +61 -61
- tests/test_spider_loader.py +49 -49
- tests/test_spider_loader_comprehensive.py +69 -69
- tests/test_spider_modules.py +84 -84
- tests/test_spiders/test_spider.py +9 -9
- tests/test_telecom_spider_redis_key.py +205 -205
- tests/test_template_content.py +87 -87
- tests/test_template_redis_key.py +134 -134
- tests/test_tools.py +159 -159
- tests/test_user_agent_randomness.py +176 -176
- tests/test_user_agents.py +96 -96
- tests/untested_features_report.md +138 -138
- tests/verify_debug.py +51 -51
- tests/verify_distributed.py +117 -117
- tests/verify_log_fix.py +111 -111
- tests/verify_mysql_warnings.py +109 -109
- crawlo/logging/async_handler.py +0 -181
- crawlo/logging/monitor.py +0 -153
- crawlo/logging/sampler.py +0 -167
- crawlo/tools/authenticated_proxy.py +0 -241
- crawlo/tools/data_formatter.py +0 -226
- crawlo/tools/data_validator.py +0 -181
- crawlo/tools/encoding_converter.py +0 -127
- crawlo/tools/network_diagnostic.py +0 -365
- crawlo/tools/request_tools.py +0 -83
- crawlo/tools/retry_mechanism.py +0 -224
- crawlo/utils/env_config.py +0 -143
- crawlo/utils/large_scale_config.py +0 -287
- crawlo/utils/system.py +0 -11
- crawlo/utils/tools.py +0 -5
- crawlo-1.4.6.dist-info/METADATA +0 -329
- crawlo-1.4.6.dist-info/RECORD +0 -361
- tests/env_config_example.py +0 -134
- tests/ofweek_scrapy/ofweek_scrapy/spiders/ofweek_spider.py +0 -162
- tests/test_authenticated_proxy.py +0 -142
- tests/test_comprehensive.py +0 -147
- tests/test_dynamic_downloaders_proxy.py +0 -125
- tests/test_dynamic_proxy.py +0 -93
- tests/test_dynamic_proxy_config.py +0 -147
- tests/test_dynamic_proxy_real.py +0 -110
- tests/test_env_config.py +0 -122
- tests/test_framework_env_usage.py +0 -104
- tests/test_large_scale_config.py +0 -113
- tests/test_proxy_api.py +0 -265
- tests/test_real_scenario_proxy.py +0 -196
- tests/tools_example.py +0 -261
- {crawlo-1.4.6.dist-info → crawlo-1.4.7.dist-info}/WHEEL +0 -0
- {crawlo-1.4.6.dist-info → crawlo-1.4.7.dist-info}/entry_points.txt +0 -0
- {crawlo-1.4.6.dist-info → crawlo-1.4.7.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,467 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
分布式场景下的多节点去重测试
|
|
5
|
+
==========================
|
|
6
|
+
|
|
7
|
+
测试目标:
|
|
8
|
+
1. 验证多个节点同时运行时,Redis 去重机制是否正常工作
|
|
9
|
+
2. 确认相同 URL 不会被多个节点重复处理
|
|
10
|
+
3. 验证 AioRedisFilter 在分布式场景下的一致性
|
|
11
|
+
4. 检查数据项去重管道的有效性
|
|
12
|
+
|
|
13
|
+
测试方法:
|
|
14
|
+
- 启动多个爬虫实例(模拟多节点)
|
|
15
|
+
- 使用相同的 Redis 配置
|
|
16
|
+
- 爬取相同的 URL 列表
|
|
17
|
+
- 统计实际处理的 URL 数量
|
|
18
|
+
- 验证是否存在重复处理
|
|
19
|
+
|
|
20
|
+
使用方式:
|
|
21
|
+
python tests/distributed_dedup_test.py
|
|
22
|
+
"""
|
|
23
|
+
|
|
24
|
+
import asyncio
|
|
25
|
+
import sys
|
|
26
|
+
import os
|
|
27
|
+
import time
|
|
28
|
+
import redis
|
|
29
|
+
from pathlib import Path
|
|
30
|
+
from typing import List, Dict, Set
|
|
31
|
+
from collections import defaultdict
|
|
32
|
+
|
|
33
|
+
# 添加项目根目录到 Python 路径
|
|
34
|
+
project_root = Path(__file__).parent.parent
|
|
35
|
+
sys.path.insert(0, str(project_root))
|
|
36
|
+
|
|
37
|
+
from crawlo import Spider
|
|
38
|
+
from crawlo.network.request import Request
|
|
39
|
+
from crawlo.crawler import Crawler
|
|
40
|
+
from crawlo.settings.setting_manager import SettingManager as Settings
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
class DedupTestSpider(Spider):
|
|
44
|
+
"""专门用于测试去重的爬虫"""
|
|
45
|
+
|
|
46
|
+
name = 'dedup_test_spider'
|
|
47
|
+
|
|
48
|
+
# 测试 URL 列表(包含重复)
|
|
49
|
+
test_urls = [
|
|
50
|
+
'http://httpbin.org/delay/1',
|
|
51
|
+
'http://httpbin.org/delay/2',
|
|
52
|
+
'http://httpbin.org/delay/1', # 重复
|
|
53
|
+
'http://httpbin.org/html',
|
|
54
|
+
'http://httpbin.org/json',
|
|
55
|
+
'http://httpbin.org/html', # 重复
|
|
56
|
+
'http://httpbin.org/uuid',
|
|
57
|
+
'http://httpbin.org/delay/1', # 重复
|
|
58
|
+
'http://httpbin.org/json', # 重复
|
|
59
|
+
'http://httpbin.org/uuid', # 重复
|
|
60
|
+
]
|
|
61
|
+
|
|
62
|
+
def __init__(self, instance_id: int = 0, *args, **kwargs):
|
|
63
|
+
super().__init__(*args, **kwargs)
|
|
64
|
+
self.instance_id = instance_id
|
|
65
|
+
self.processed_urls: Set[str] = set()
|
|
66
|
+
self.duplicate_count = 0
|
|
67
|
+
|
|
68
|
+
def start_requests(self):
|
|
69
|
+
"""生成初始请求"""
|
|
70
|
+
self.logger.info(f"[实例 {self.instance_id}] 开始生成请求")
|
|
71
|
+
for url in self.test_urls:
|
|
72
|
+
yield Request(
|
|
73
|
+
url=url,
|
|
74
|
+
callback=self.parse,
|
|
75
|
+
meta={'instance_id': self.instance_id}
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
async def parse(self, response):
|
|
79
|
+
"""解析响应"""
|
|
80
|
+
url = response.url
|
|
81
|
+
instance_id = response.meta.get('instance_id', self.instance_id)
|
|
82
|
+
|
|
83
|
+
# 记录处理的 URL
|
|
84
|
+
if url in self.processed_urls:
|
|
85
|
+
self.duplicate_count += 1
|
|
86
|
+
self.logger.warning(
|
|
87
|
+
f"[实例 {instance_id}] ⚠️ 检测到重复处理: {url}"
|
|
88
|
+
)
|
|
89
|
+
else:
|
|
90
|
+
self.processed_urls.add(url)
|
|
91
|
+
self.logger.info(
|
|
92
|
+
f"[实例 {instance_id}] ✓ 处理新URL: {url}"
|
|
93
|
+
)
|
|
94
|
+
|
|
95
|
+
# 返回数据项
|
|
96
|
+
yield {
|
|
97
|
+
'url': url,
|
|
98
|
+
'instance_id': instance_id,
|
|
99
|
+
'timestamp': time.time(),
|
|
100
|
+
'status': response.status_code, # 使用 status_code
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
class DistributedDedupTest:
|
|
105
|
+
"""分布式去重测试管理器"""
|
|
106
|
+
|
|
107
|
+
def __init__(self, num_instances: int = 3):
|
|
108
|
+
"""
|
|
109
|
+
初始化测试
|
|
110
|
+
|
|
111
|
+
:param num_instances: 模拟的节点数量
|
|
112
|
+
"""
|
|
113
|
+
self.num_instances = num_instances
|
|
114
|
+
self.redis_config = {
|
|
115
|
+
'host': '127.0.0.1',
|
|
116
|
+
'port': 6379,
|
|
117
|
+
'db': 15, # 使用独立的数据库避免污染
|
|
118
|
+
'password': '',
|
|
119
|
+
}
|
|
120
|
+
self.project_name = 'dedup_test'
|
|
121
|
+
self.results: Dict[int, Dict] = {}
|
|
122
|
+
|
|
123
|
+
def _check_redis_connection(self) -> bool:
|
|
124
|
+
"""检查 Redis 连接"""
|
|
125
|
+
try:
|
|
126
|
+
r = redis.Redis(
|
|
127
|
+
host=self.redis_config['host'],
|
|
128
|
+
port=self.redis_config['port'],
|
|
129
|
+
db=self.redis_config['db'],
|
|
130
|
+
password=self.redis_config['password'] or None,
|
|
131
|
+
decode_responses=True,
|
|
132
|
+
socket_connect_timeout=5,
|
|
133
|
+
)
|
|
134
|
+
r.ping()
|
|
135
|
+
print(f"✓ Redis 连接正常: {self.redis_config['host']}:{self.redis_config['port']}/{self.redis_config['db']}")
|
|
136
|
+
return True
|
|
137
|
+
except Exception as e:
|
|
138
|
+
print(f"✗ Redis 连接失败: {e}")
|
|
139
|
+
print(f" 请确保 Redis 服务正在运行")
|
|
140
|
+
return False
|
|
141
|
+
|
|
142
|
+
def _cleanup_redis(self):
|
|
143
|
+
"""清理 Redis 中的测试数据"""
|
|
144
|
+
try:
|
|
145
|
+
r = redis.Redis(
|
|
146
|
+
host=self.redis_config['host'],
|
|
147
|
+
port=self.redis_config['port'],
|
|
148
|
+
db=self.redis_config['db'],
|
|
149
|
+
password=self.redis_config['password'] or None,
|
|
150
|
+
decode_responses=True,
|
|
151
|
+
)
|
|
152
|
+
|
|
153
|
+
# 清理所有与项目相关的 key
|
|
154
|
+
pattern = f"crawlo:{self.project_name}:*"
|
|
155
|
+
keys = list(r.scan_iter(pattern))
|
|
156
|
+
if keys:
|
|
157
|
+
deleted = r.delete(*keys)
|
|
158
|
+
print(f"✓ 清理了 {deleted} 个 Redis 键")
|
|
159
|
+
else:
|
|
160
|
+
print(f"✓ 没有需要清理的 Redis 键")
|
|
161
|
+
|
|
162
|
+
except Exception as e:
|
|
163
|
+
print(f"⚠ Redis 清理失败: {e}")
|
|
164
|
+
|
|
165
|
+
def _create_settings(self, instance_id: int) -> Settings:
|
|
166
|
+
"""创建爬虫配置"""
|
|
167
|
+
settings = Settings()
|
|
168
|
+
|
|
169
|
+
# 项目基本配置
|
|
170
|
+
settings.set('PROJECT_NAME', self.project_name)
|
|
171
|
+
settings.set('RUN_MODE', 'distributed')
|
|
172
|
+
|
|
173
|
+
# Redis 配置
|
|
174
|
+
settings.set('REDIS_HOST', self.redis_config['host'])
|
|
175
|
+
settings.set('REDIS_PORT', self.redis_config['port'])
|
|
176
|
+
settings.set('REDIS_DB', self.redis_config['db'])
|
|
177
|
+
settings.set('REDIS_PASSWORD', self.redis_config['password'])
|
|
178
|
+
|
|
179
|
+
# 构建 Redis URL
|
|
180
|
+
if self.redis_config['password']:
|
|
181
|
+
redis_url = (
|
|
182
|
+
f"redis://:{self.redis_config['password']}@"
|
|
183
|
+
f"{self.redis_config['host']}:{self.redis_config['port']}/{self.redis_config['db']}"
|
|
184
|
+
)
|
|
185
|
+
else:
|
|
186
|
+
redis_url = (
|
|
187
|
+
f"redis://{self.redis_config['host']}:{self.redis_config['port']}/{self.redis_config['db']}"
|
|
188
|
+
)
|
|
189
|
+
settings.set('REDIS_URL', redis_url)
|
|
190
|
+
|
|
191
|
+
# 队列和过滤器配置
|
|
192
|
+
settings.set('QUEUE_TYPE', 'redis')
|
|
193
|
+
settings.set('FILTER_CLASS', 'crawlo.filters.aioredis_filter.AioRedisFilter')
|
|
194
|
+
settings.set('DEFAULT_DEDUP_PIPELINE', 'crawlo.pipelines.redis_dedup_pipeline.RedisDedupPipeline')
|
|
195
|
+
|
|
196
|
+
# 管道配置
|
|
197
|
+
settings.set('PIPELINES', [
|
|
198
|
+
'crawlo.pipelines.redis_dedup_pipeline.RedisDedupPipeline',
|
|
199
|
+
])
|
|
200
|
+
|
|
201
|
+
# 并发配置
|
|
202
|
+
settings.set('CONCURRENCY', 5)
|
|
203
|
+
settings.set('DOWNLOAD_DELAY', 0.5)
|
|
204
|
+
|
|
205
|
+
# 日志配置
|
|
206
|
+
settings.set('LOG_LEVEL', 'INFO')
|
|
207
|
+
settings.set('LOG_FILE', f'logs/dedup_test_instance_{instance_id}.log')
|
|
208
|
+
settings.set('LOG_ENCODING', 'utf-8')
|
|
209
|
+
settings.set('STATS_DUMP', True)
|
|
210
|
+
|
|
211
|
+
# 禁用某些扩展以简化测试
|
|
212
|
+
settings.set('EXTENSIONS', [])
|
|
213
|
+
|
|
214
|
+
return settings
|
|
215
|
+
|
|
216
|
+
async def _run_instance(self, instance_id: int):
|
|
217
|
+
"""运行单个爬虫实例"""
|
|
218
|
+
print(f"\n{'='*60}")
|
|
219
|
+
print(f"启动实例 {instance_id}")
|
|
220
|
+
print(f"{'='*60}")
|
|
221
|
+
|
|
222
|
+
# 创建配置
|
|
223
|
+
settings = self._create_settings(instance_id)
|
|
224
|
+
|
|
225
|
+
# 创建爬虫实例
|
|
226
|
+
spider = DedupTestSpider(instance_id=instance_id)
|
|
227
|
+
|
|
228
|
+
# 创建 Crawler(传入爬虫类而不是实例)
|
|
229
|
+
crawler = Crawler(DedupTestSpider, settings)
|
|
230
|
+
# 手动设置spider实例的instance_id
|
|
231
|
+
crawler._spider = spider
|
|
232
|
+
|
|
233
|
+
try:
|
|
234
|
+
# 运行爬虫
|
|
235
|
+
await crawler.crawl()
|
|
236
|
+
|
|
237
|
+
# 收集统计信息
|
|
238
|
+
stats = crawler.stats.get_stats() if crawler.stats else {}
|
|
239
|
+
|
|
240
|
+
self.results[instance_id] = {
|
|
241
|
+
'spider': spider,
|
|
242
|
+
'stats': stats,
|
|
243
|
+
'processed_urls': spider.processed_urls.copy(),
|
|
244
|
+
'duplicate_count': spider.duplicate_count,
|
|
245
|
+
}
|
|
246
|
+
|
|
247
|
+
print(f"\n[实例 {instance_id}] 完成:")
|
|
248
|
+
print(f" - 处理的 URL 数量: {len(spider.processed_urls)}")
|
|
249
|
+
print(f" - 检测到的重复: {spider.duplicate_count}")
|
|
250
|
+
|
|
251
|
+
except Exception as e:
|
|
252
|
+
print(f"\n[实例 {instance_id}] 运行失败: {e}")
|
|
253
|
+
import traceback
|
|
254
|
+
traceback.print_exc()
|
|
255
|
+
finally:
|
|
256
|
+
# 清理资源
|
|
257
|
+
await crawler.close()
|
|
258
|
+
|
|
259
|
+
async def run_parallel_test(self):
|
|
260
|
+
"""并行运行多个实例(真实的分布式场景)"""
|
|
261
|
+
print(f"\n{'='*60}")
|
|
262
|
+
print(f"并行测试:同时启动 {self.num_instances} 个实例")
|
|
263
|
+
print(f"{'='*60}")
|
|
264
|
+
|
|
265
|
+
# 创建所有任务
|
|
266
|
+
tasks = [
|
|
267
|
+
self._run_instance(i)
|
|
268
|
+
for i in range(self.num_instances)
|
|
269
|
+
]
|
|
270
|
+
|
|
271
|
+
# 并行执行
|
|
272
|
+
await asyncio.gather(*tasks)
|
|
273
|
+
|
|
274
|
+
async def run_sequential_test(self):
|
|
275
|
+
"""顺序运行多个实例(验证基本去重)"""
|
|
276
|
+
print(f"\n{'='*60}")
|
|
277
|
+
print(f"顺序测试:依次运行 {self.num_instances} 个实例")
|
|
278
|
+
print(f"{'='*60}")
|
|
279
|
+
|
|
280
|
+
for i in range(self.num_instances):
|
|
281
|
+
await self._run_instance(i)
|
|
282
|
+
# 等待一小段时间
|
|
283
|
+
await asyncio.sleep(1)
|
|
284
|
+
|
|
285
|
+
def _analyze_results(self):
|
|
286
|
+
"""分析测试结果"""
|
|
287
|
+
print(f"\n{'='*60}")
|
|
288
|
+
print(f"测试结果分析")
|
|
289
|
+
print(f"{'='*60}")
|
|
290
|
+
|
|
291
|
+
if not self.results:
|
|
292
|
+
print("⚠ 没有收集到任何结果")
|
|
293
|
+
return
|
|
294
|
+
|
|
295
|
+
# 统计所有实例处理的 URL
|
|
296
|
+
all_processed_urls: Set[str] = set()
|
|
297
|
+
total_duplicates = 0
|
|
298
|
+
total_requests = 0
|
|
299
|
+
|
|
300
|
+
print(f"\n各实例统计:")
|
|
301
|
+
for instance_id, result in sorted(self.results.items()):
|
|
302
|
+
spider = result['spider']
|
|
303
|
+
stats = result['stats']
|
|
304
|
+
|
|
305
|
+
processed_count = len(result['processed_urls'])
|
|
306
|
+
duplicate_count = result['duplicate_count']
|
|
307
|
+
|
|
308
|
+
all_processed_urls.update(result['processed_urls'])
|
|
309
|
+
total_duplicates += duplicate_count
|
|
310
|
+
total_requests += stats.get('request/success_count', 0)
|
|
311
|
+
|
|
312
|
+
print(f"\n 实例 {instance_id}:")
|
|
313
|
+
print(f" - 处理的唯一 URL: {processed_count}")
|
|
314
|
+
print(f" - 本地检测到的重复: {duplicate_count}")
|
|
315
|
+
print(f" - 成功的请求: {stats.get('request/success_count', 0)}")
|
|
316
|
+
print(f" - 失败的请求: {stats.get('request/failed_count', 0)}")
|
|
317
|
+
|
|
318
|
+
# 检查 Redis 中的数据
|
|
319
|
+
print(f"\n{'='*60}")
|
|
320
|
+
print(f"Redis 数据检查:")
|
|
321
|
+
print(f"{'='*60}")
|
|
322
|
+
|
|
323
|
+
try:
|
|
324
|
+
r = redis.Redis(
|
|
325
|
+
host=self.redis_config['host'],
|
|
326
|
+
port=self.redis_config['port'],
|
|
327
|
+
db=self.redis_config['db'],
|
|
328
|
+
password=self.redis_config['password'] or None,
|
|
329
|
+
decode_responses=True,
|
|
330
|
+
)
|
|
331
|
+
|
|
332
|
+
# 检查过滤器指纹
|
|
333
|
+
filter_key = f"crawlo:{self.project_name}:filter:fingerprint"
|
|
334
|
+
filter_count = r.scard(filter_key)
|
|
335
|
+
print(f" - 过滤器指纹数量: {filter_count}")
|
|
336
|
+
|
|
337
|
+
# 检查数据项指纹
|
|
338
|
+
item_key = f"crawlo:{self.project_name}:item:fingerprint"
|
|
339
|
+
item_count = r.scard(item_key)
|
|
340
|
+
print(f" - 数据项指纹数量: {item_count}")
|
|
341
|
+
|
|
342
|
+
# 检查队列
|
|
343
|
+
queue_key = f"crawlo:{self.project_name}:queue:requests"
|
|
344
|
+
queue_len = r.zcard(queue_key)
|
|
345
|
+
print(f" - 剩余队列长度: {queue_len}")
|
|
346
|
+
|
|
347
|
+
except Exception as e:
|
|
348
|
+
print(f" ⚠ Redis 检查失败: {e}")
|
|
349
|
+
|
|
350
|
+
# 总体统计
|
|
351
|
+
print(f"\n{'='*60}")
|
|
352
|
+
print(f"总体统计:")
|
|
353
|
+
print(f"{'='*60}")
|
|
354
|
+
|
|
355
|
+
unique_urls_count = len(all_processed_urls)
|
|
356
|
+
expected_unique_urls = len(set(DedupTestSpider.test_urls))
|
|
357
|
+
|
|
358
|
+
print(f" - 实例数量: {len(self.results)}")
|
|
359
|
+
print(f" - 所有实例处理的唯一 URL: {unique_urls_count}")
|
|
360
|
+
print(f" - 预期的唯一 URL 数量: {expected_unique_urls}")
|
|
361
|
+
print(f" - 总请求数: {total_requests}")
|
|
362
|
+
print(f" - 本地检测到的总重复: {total_duplicates}")
|
|
363
|
+
|
|
364
|
+
# 验证去重效果
|
|
365
|
+
print(f"\n{'='*60}")
|
|
366
|
+
print(f"去重效果验证:")
|
|
367
|
+
print(f"{'='*60}")
|
|
368
|
+
|
|
369
|
+
# 关键验证:所有实例处理的 URL 总数应该等于唯一 URL 数
|
|
370
|
+
if unique_urls_count == expected_unique_urls:
|
|
371
|
+
print(f" ✓ 测试通过!")
|
|
372
|
+
print(f" 所有实例共处理了 {unique_urls_count} 个唯一 URL")
|
|
373
|
+
print(f" 没有任何 URL 被多个节点重复处理")
|
|
374
|
+
return True
|
|
375
|
+
else:
|
|
376
|
+
print(f" ✗ 测试失败!")
|
|
377
|
+
print(f" 预期处理 {expected_unique_urls} 个唯一 URL")
|
|
378
|
+
print(f" 实际处理 {unique_urls_count} 个唯一 URL")
|
|
379
|
+
|
|
380
|
+
# 检查是否有 URL 被遗漏或重复
|
|
381
|
+
expected_urls = set(DedupTestSpider.test_urls)
|
|
382
|
+
missing_urls = expected_urls - all_processed_urls
|
|
383
|
+
|
|
384
|
+
if missing_urls:
|
|
385
|
+
print(f" 遗漏的 URL: {missing_urls}")
|
|
386
|
+
|
|
387
|
+
if unique_urls_count > expected_unique_urls:
|
|
388
|
+
print(f" 可能存在重复处理")
|
|
389
|
+
|
|
390
|
+
return False
|
|
391
|
+
|
|
392
|
+
async def run(self, mode: str = 'parallel'):
|
|
393
|
+
"""
|
|
394
|
+
运行测试
|
|
395
|
+
|
|
396
|
+
:param mode: 测试模式 'parallel' 或 'sequential'
|
|
397
|
+
"""
|
|
398
|
+
print(f"\n{'='*70}")
|
|
399
|
+
print(f"分布式去重测试")
|
|
400
|
+
print(f"{'='*70}")
|
|
401
|
+
print(f"测试模式: {mode}")
|
|
402
|
+
print(f"实例数量: {self.num_instances}")
|
|
403
|
+
print(f"Redis: {self.redis_config['host']}:{self.redis_config['port']}/{self.redis_config['db']}")
|
|
404
|
+
|
|
405
|
+
# 检查 Redis 连接
|
|
406
|
+
if not self._check_redis_connection():
|
|
407
|
+
print("\n⚠ Redis 不可用,测试终止")
|
|
408
|
+
return False
|
|
409
|
+
|
|
410
|
+
# 清理旧数据
|
|
411
|
+
print(f"\n清理 Redis 旧数据...")
|
|
412
|
+
self._cleanup_redis()
|
|
413
|
+
|
|
414
|
+
# 运行测试
|
|
415
|
+
try:
|
|
416
|
+
if mode == 'parallel':
|
|
417
|
+
await self.run_parallel_test()
|
|
418
|
+
else:
|
|
419
|
+
await self.run_sequential_test()
|
|
420
|
+
|
|
421
|
+
# 等待一小段时间确保所有数据都写入 Redis
|
|
422
|
+
await asyncio.sleep(2)
|
|
423
|
+
|
|
424
|
+
# 分析结果
|
|
425
|
+
return self._analyze_results()
|
|
426
|
+
|
|
427
|
+
except Exception as e:
|
|
428
|
+
print(f"\n✗ 测试执行失败: {e}")
|
|
429
|
+
import traceback
|
|
430
|
+
traceback.print_exc()
|
|
431
|
+
return False
|
|
432
|
+
finally:
|
|
433
|
+
# 可选:测试后清理数据
|
|
434
|
+
print(f"\n清理测试数据...")
|
|
435
|
+
self._cleanup_redis()
|
|
436
|
+
|
|
437
|
+
|
|
438
|
+
async def main():
|
|
439
|
+
"""主函数"""
|
|
440
|
+
import argparse
|
|
441
|
+
|
|
442
|
+
parser = argparse.ArgumentParser(description='分布式去重测试')
|
|
443
|
+
parser.add_argument(
|
|
444
|
+
'--instances',
|
|
445
|
+
type=int,
|
|
446
|
+
default=3,
|
|
447
|
+
help='模拟的节点数量(默认: 3)'
|
|
448
|
+
)
|
|
449
|
+
parser.add_argument(
|
|
450
|
+
'--mode',
|
|
451
|
+
choices=['parallel', 'sequential'],
|
|
452
|
+
default='parallel',
|
|
453
|
+
help='测试模式: parallel(并行)或 sequential(顺序)'
|
|
454
|
+
)
|
|
455
|
+
|
|
456
|
+
args = parser.parse_args()
|
|
457
|
+
|
|
458
|
+
# 创建并运行测试
|
|
459
|
+
test = DistributedDedupTest(num_instances=args.instances)
|
|
460
|
+
success = await test.run(mode=args.mode)
|
|
461
|
+
|
|
462
|
+
# 返回退出码
|
|
463
|
+
sys.exit(0 if success else 1)
|
|
464
|
+
|
|
465
|
+
|
|
466
|
+
if __name__ == '__main__':
|
|
467
|
+
asyncio.run(main())
|
tests/distributed_test.py
CHANGED
|
@@ -1,67 +1,67 @@
|
|
|
1
|
-
#!/usr/bin/env python
|
|
2
|
-
# -*- coding: utf-8 -*-
|
|
3
|
-
"""
|
|
4
|
-
分布式模式测试脚本
|
|
5
|
-
"""
|
|
6
|
-
|
|
7
|
-
import sys
|
|
8
|
-
import os
|
|
9
|
-
import asyncio
|
|
10
|
-
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
|
11
|
-
|
|
12
|
-
from crawlo.spider import Spider
|
|
13
|
-
from crawlo import Request
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
class DistributedTestSpider(Spider):
|
|
17
|
-
"""分布式测试爬虫"""
|
|
18
|
-
name = 'distributed_test_spider'
|
|
19
|
-
|
|
20
|
-
def start_requests(self):
|
|
21
|
-
"""发起测试请求"""
|
|
22
|
-
# 生成一些测试请求
|
|
23
|
-
for i in range(5):
|
|
24
|
-
yield Request(f'https://httpbin.org/get?page={i}', callback=self.parse)
|
|
25
|
-
|
|
26
|
-
def parse(self, response):
|
|
27
|
-
"""解析响应"""
|
|
28
|
-
print(f"成功获取响应: {response.url}")
|
|
29
|
-
print(f"状态码: {response.status_code}")
|
|
30
|
-
return []
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
async def test_distributed_mode():
|
|
34
|
-
"""测试分布式模式"""
|
|
35
|
-
print("开始测试分布式模式...")
|
|
36
|
-
|
|
37
|
-
# 初始化框架,设置为分布式模式
|
|
38
|
-
from crawlo.initialization import initialize_framework
|
|
39
|
-
custom_settings = {
|
|
40
|
-
'RUN_MODE': 'distributed',
|
|
41
|
-
'QUEUE_TYPE': 'redis',
|
|
42
|
-
'FILTER_CLASS': 'crawlo.filters.aioredis_filter.AioRedisFilter',
|
|
43
|
-
'REDIS_HOST': '127.0.0.1',
|
|
44
|
-
'REDIS_PORT': 6379,
|
|
45
|
-
'REDIS_DB': 15, # 使用测试数据库
|
|
46
|
-
'PROJECT_NAME': 'distributed_test'
|
|
47
|
-
}
|
|
48
|
-
settings = initialize_framework(custom_settings)
|
|
49
|
-
|
|
50
|
-
# 创建爬虫进程
|
|
51
|
-
from crawlo.crawler import CrawlerProcess
|
|
52
|
-
process = CrawlerProcess(settings=settings)
|
|
53
|
-
|
|
54
|
-
# 运行爬虫
|
|
55
|
-
await process.crawl(DistributedTestSpider)
|
|
56
|
-
|
|
57
|
-
print("分布式模式测试完成!")
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
def main():
|
|
61
|
-
"""主函数"""
|
|
62
|
-
print("开始分布式模式测试...")
|
|
63
|
-
asyncio.run(test_distributed_mode())
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
if __name__ == "__main__":
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
分布式模式测试脚本
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import sys
|
|
8
|
+
import os
|
|
9
|
+
import asyncio
|
|
10
|
+
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
|
11
|
+
|
|
12
|
+
from crawlo.spider import Spider
|
|
13
|
+
from crawlo import Request
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class DistributedTestSpider(Spider):
|
|
17
|
+
"""分布式测试爬虫"""
|
|
18
|
+
name = 'distributed_test_spider'
|
|
19
|
+
|
|
20
|
+
def start_requests(self):
|
|
21
|
+
"""发起测试请求"""
|
|
22
|
+
# 生成一些测试请求
|
|
23
|
+
for i in range(5):
|
|
24
|
+
yield Request(f'https://httpbin.org/get?page={i}', callback=self.parse)
|
|
25
|
+
|
|
26
|
+
def parse(self, response):
|
|
27
|
+
"""解析响应"""
|
|
28
|
+
print(f"成功获取响应: {response.url}")
|
|
29
|
+
print(f"状态码: {response.status_code}")
|
|
30
|
+
return []
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
async def test_distributed_mode():
|
|
34
|
+
"""测试分布式模式"""
|
|
35
|
+
print("开始测试分布式模式...")
|
|
36
|
+
|
|
37
|
+
# 初始化框架,设置为分布式模式
|
|
38
|
+
from crawlo.initialization import initialize_framework
|
|
39
|
+
custom_settings = {
|
|
40
|
+
'RUN_MODE': 'distributed',
|
|
41
|
+
'QUEUE_TYPE': 'redis',
|
|
42
|
+
'FILTER_CLASS': 'crawlo.filters.aioredis_filter.AioRedisFilter',
|
|
43
|
+
'REDIS_HOST': '127.0.0.1',
|
|
44
|
+
'REDIS_PORT': 6379,
|
|
45
|
+
'REDIS_DB': 15, # 使用测试数据库
|
|
46
|
+
'PROJECT_NAME': 'distributed_test'
|
|
47
|
+
}
|
|
48
|
+
settings = initialize_framework(custom_settings)
|
|
49
|
+
|
|
50
|
+
# 创建爬虫进程
|
|
51
|
+
from crawlo.crawler import CrawlerProcess
|
|
52
|
+
process = CrawlerProcess(settings=settings)
|
|
53
|
+
|
|
54
|
+
# 运行爬虫
|
|
55
|
+
await process.crawl(DistributedTestSpider)
|
|
56
|
+
|
|
57
|
+
print("分布式模式测试完成!")
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def main():
|
|
61
|
+
"""主函数"""
|
|
62
|
+
print("开始分布式模式测试...")
|
|
63
|
+
asyncio.run(test_distributed_mode())
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
if __name__ == "__main__":
|
|
67
67
|
main()
|