crawlo 1.4.1__py3-none-any.whl → 1.4.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +93 -93
- crawlo/__version__.py +1 -1
- crawlo/cli.py +75 -75
- crawlo/commands/__init__.py +14 -14
- crawlo/commands/check.py +594 -594
- crawlo/commands/genspider.py +151 -151
- crawlo/commands/help.py +138 -138
- crawlo/commands/list.py +155 -155
- crawlo/commands/run.py +341 -341
- crawlo/commands/startproject.py +436 -436
- crawlo/commands/stats.py +187 -187
- crawlo/commands/utils.py +196 -196
- crawlo/config.py +312 -312
- crawlo/config_validator.py +277 -277
- crawlo/core/__init__.py +52 -52
- crawlo/core/engine.py +438 -439
- crawlo/core/processor.py +47 -47
- crawlo/core/scheduler.py +291 -257
- crawlo/crawler.py +650 -650
- crawlo/data/__init__.py +5 -5
- crawlo/data/user_agents.py +194 -194
- crawlo/downloader/__init__.py +273 -273
- crawlo/downloader/aiohttp_downloader.py +233 -228
- crawlo/downloader/cffi_downloader.py +245 -245
- crawlo/downloader/httpx_downloader.py +259 -259
- crawlo/downloader/hybrid_downloader.py +212 -212
- crawlo/downloader/playwright_downloader.py +402 -402
- crawlo/downloader/selenium_downloader.py +472 -472
- crawlo/event.py +11 -11
- crawlo/exceptions.py +81 -81
- crawlo/extension/__init__.py +63 -63
- crawlo/extension/health_check.py +141 -141
- crawlo/extension/log_interval.py +94 -94
- crawlo/extension/log_stats.py +70 -70
- crawlo/extension/logging_extension.py +61 -61
- crawlo/extension/memory_monitor.py +104 -104
- crawlo/extension/performance_profiler.py +133 -133
- crawlo/extension/request_recorder.py +107 -107
- crawlo/factories/__init__.py +27 -27
- crawlo/factories/base.py +68 -68
- crawlo/factories/crawler.py +103 -103
- crawlo/factories/registry.py +84 -84
- crawlo/filters/__init__.py +154 -154
- crawlo/filters/aioredis_filter.py +257 -257
- crawlo/filters/memory_filter.py +269 -269
- crawlo/framework.py +292 -292
- crawlo/initialization/__init__.py +44 -44
- crawlo/initialization/built_in.py +425 -425
- crawlo/initialization/context.py +141 -141
- crawlo/initialization/core.py +193 -193
- crawlo/initialization/phases.py +148 -148
- crawlo/initialization/registry.py +145 -145
- crawlo/items/__init__.py +23 -23
- crawlo/items/base.py +23 -23
- crawlo/items/fields.py +52 -52
- crawlo/items/items.py +104 -104
- crawlo/logging/__init__.py +45 -37
- crawlo/logging/async_handler.py +181 -0
- crawlo/logging/config.py +196 -96
- crawlo/logging/factory.py +171 -128
- crawlo/logging/manager.py +111 -111
- crawlo/logging/monitor.py +153 -0
- crawlo/logging/sampler.py +167 -0
- crawlo/middleware/__init__.py +21 -21
- crawlo/middleware/default_header.py +132 -132
- crawlo/middleware/download_delay.py +104 -104
- crawlo/middleware/middleware_manager.py +135 -135
- crawlo/middleware/offsite.py +123 -123
- crawlo/middleware/proxy.py +386 -386
- crawlo/middleware/request_ignore.py +86 -86
- crawlo/middleware/response_code.py +150 -150
- crawlo/middleware/response_filter.py +136 -136
- crawlo/middleware/retry.py +124 -124
- crawlo/middleware/simple_proxy.py +65 -65
- crawlo/mode_manager.py +219 -219
- crawlo/network/__init__.py +21 -21
- crawlo/network/request.py +379 -379
- crawlo/network/response.py +359 -359
- crawlo/pipelines/__init__.py +21 -21
- crawlo/pipelines/bloom_dedup_pipeline.py +146 -146
- crawlo/pipelines/console_pipeline.py +39 -39
- crawlo/pipelines/csv_pipeline.py +316 -316
- crawlo/pipelines/database_dedup_pipeline.py +197 -197
- crawlo/pipelines/json_pipeline.py +218 -218
- crawlo/pipelines/memory_dedup_pipeline.py +105 -105
- crawlo/pipelines/mongo_pipeline.py +131 -131
- crawlo/pipelines/mysql_pipeline.py +325 -325
- crawlo/pipelines/pipeline_manager.py +100 -84
- crawlo/pipelines/redis_dedup_pipeline.py +156 -156
- crawlo/project.py +349 -338
- crawlo/queue/pqueue.py +42 -42
- crawlo/queue/queue_manager.py +526 -522
- crawlo/queue/redis_priority_queue.py +370 -367
- crawlo/settings/__init__.py +7 -7
- crawlo/settings/default_settings.py +284 -284
- crawlo/settings/setting_manager.py +219 -219
- crawlo/spider/__init__.py +657 -657
- crawlo/stats_collector.py +73 -73
- crawlo/subscriber.py +129 -129
- crawlo/task_manager.py +138 -138
- crawlo/templates/crawlo.cfg.tmpl +10 -10
- crawlo/templates/project/__init__.py.tmpl +3 -3
- crawlo/templates/project/items.py.tmpl +17 -17
- crawlo/templates/project/middlewares.py.tmpl +118 -118
- crawlo/templates/project/pipelines.py.tmpl +96 -96
- crawlo/templates/project/settings.py.tmpl +170 -170
- crawlo/templates/project/settings_distributed.py.tmpl +169 -169
- crawlo/templates/project/settings_gentle.py.tmpl +166 -166
- crawlo/templates/project/settings_high_performance.py.tmpl +167 -167
- crawlo/templates/project/settings_minimal.py.tmpl +65 -65
- crawlo/templates/project/settings_simple.py.tmpl +164 -164
- crawlo/templates/project/spiders/__init__.py.tmpl +9 -9
- crawlo/templates/run.py.tmpl +34 -34
- crawlo/templates/spider/spider.py.tmpl +143 -143
- crawlo/templates/spiders_init.py.tmpl +9 -9
- crawlo/tools/__init__.py +200 -200
- crawlo/tools/anti_crawler.py +268 -268
- crawlo/tools/authenticated_proxy.py +240 -240
- crawlo/tools/data_formatter.py +225 -225
- crawlo/tools/data_validator.py +180 -180
- crawlo/tools/date_tools.py +289 -289
- crawlo/tools/distributed_coordinator.py +384 -384
- crawlo/tools/encoding_converter.py +127 -127
- crawlo/tools/network_diagnostic.py +364 -364
- crawlo/tools/request_tools.py +82 -82
- crawlo/tools/retry_mechanism.py +224 -224
- crawlo/tools/scenario_adapter.py +262 -262
- crawlo/tools/text_cleaner.py +232 -232
- crawlo/utils/__init__.py +34 -34
- crawlo/utils/batch_processor.py +259 -259
- crawlo/utils/class_loader.py +25 -25
- crawlo/utils/controlled_spider_mixin.py +439 -439
- crawlo/utils/db_helper.py +343 -343
- crawlo/utils/enhanced_error_handler.py +356 -356
- crawlo/utils/env_config.py +142 -142
- crawlo/utils/error_handler.py +165 -165
- crawlo/utils/fingerprint.py +122 -122
- crawlo/utils/func_tools.py +82 -82
- crawlo/utils/large_scale_config.py +286 -286
- crawlo/utils/large_scale_helper.py +344 -344
- crawlo/utils/log.py +79 -79
- crawlo/utils/performance_monitor.py +285 -285
- crawlo/utils/queue_helper.py +175 -175
- crawlo/utils/redis_connection_pool.py +388 -388
- crawlo/utils/redis_key_validator.py +198 -198
- crawlo/utils/request.py +267 -267
- crawlo/utils/request_serializer.py +225 -225
- crawlo/utils/spider_loader.py +61 -61
- crawlo/utils/system.py +11 -11
- crawlo/utils/tools.py +4 -4
- crawlo/utils/url.py +39 -39
- crawlo-1.4.3.dist-info/METADATA +190 -0
- crawlo-1.4.3.dist-info/RECORD +326 -0
- examples/__init__.py +7 -7
- examples/test_project/__init__.py +7 -7
- examples/test_project/run.py +34 -34
- examples/test_project/test_project/__init__.py +3 -3
- examples/test_project/test_project/items.py +17 -17
- examples/test_project/test_project/middlewares.py +118 -118
- examples/test_project/test_project/pipelines.py +96 -96
- examples/test_project/test_project/settings.py +169 -169
- examples/test_project/test_project/spiders/__init__.py +9 -9
- examples/test_project/test_project/spiders/of_week_dis.py +143 -143
- tests/__init__.py +7 -7
- tests/advanced_tools_example.py +275 -275
- tests/authenticated_proxy_example.py +106 -106
- tests/baidu_performance_test.py +108 -108
- tests/baidu_test.py +59 -59
- tests/cleaners_example.py +160 -160
- tests/comprehensive_framework_test.py +212 -212
- tests/comprehensive_test.py +81 -81
- tests/comprehensive_testing_summary.md +186 -186
- tests/config_validation_demo.py +142 -142
- tests/controlled_spider_example.py +205 -205
- tests/date_tools_example.py +180 -180
- tests/debug_configure.py +69 -69
- tests/debug_framework_logger.py +84 -84
- tests/debug_log_config.py +126 -126
- tests/debug_log_levels.py +63 -63
- tests/debug_pipelines.py +66 -66
- tests/detailed_log_test.py +233 -233
- tests/distributed_test.py +66 -66
- tests/distributed_test_debug.py +76 -76
- tests/dynamic_loading_example.py +523 -523
- tests/dynamic_loading_test.py +104 -104
- tests/env_config_example.py +133 -133
- tests/error_handling_example.py +171 -171
- tests/final_comprehensive_test.py +151 -151
- tests/final_log_test.py +260 -260
- tests/final_validation_test.py +182 -182
- tests/fix_log_test.py +142 -142
- tests/framework_performance_test.py +202 -202
- tests/log_buffering_test.py +111 -111
- tests/log_generation_timing_test.py +153 -153
- tests/optimized_performance_test.py +211 -211
- tests/performance_comparison.py +245 -245
- tests/queue_blocking_test.py +113 -113
- tests/queue_test.py +89 -89
- tests/redis_key_validation_demo.py +130 -130
- tests/request_params_example.py +150 -150
- tests/response_improvements_example.py +144 -144
- tests/scrapy_comparison/ofweek_scrapy.py +138 -138
- tests/scrapy_comparison/scrapy_test.py +133 -133
- tests/simple_command_test.py +119 -119
- tests/simple_crawlo_test.py +127 -127
- tests/simple_log_test.py +57 -57
- tests/simple_log_test2.py +137 -137
- tests/simple_optimization_test.py +128 -128
- tests/simple_queue_type_test.py +41 -41
- tests/simple_spider_test.py +49 -49
- tests/simple_test.py +47 -47
- tests/spider_log_timing_test.py +177 -177
- tests/test_advanced_tools.py +148 -148
- tests/test_all_commands.py +230 -230
- tests/test_all_pipeline_fingerprints.py +133 -133
- tests/test_all_redis_key_configs.py +145 -145
- tests/test_authenticated_proxy.py +141 -141
- tests/test_batch_processor.py +178 -178
- tests/test_cleaners.py +54 -54
- tests/test_component_factory.py +174 -174
- tests/test_comprehensive.py +146 -146
- tests/test_config_consistency.py +80 -80
- tests/test_config_merge.py +152 -152
- tests/test_config_validator.py +182 -182
- tests/test_controlled_spider_mixin.py +79 -79
- tests/test_crawlo_proxy_integration.py +108 -108
- tests/test_date_tools.py +123 -123
- tests/test_dedup_fix.py +220 -220
- tests/test_dedup_pipeline_consistency.py +125 -0
- tests/test_default_header_middleware.py +313 -313
- tests/test_distributed.py +65 -65
- tests/test_double_crawlo_fix.py +204 -204
- tests/test_double_crawlo_fix_simple.py +124 -124
- tests/test_download_delay_middleware.py +221 -221
- tests/test_downloader_proxy_compatibility.py +268 -268
- tests/test_dynamic_downloaders_proxy.py +124 -124
- tests/test_dynamic_proxy.py +92 -92
- tests/test_dynamic_proxy_config.py +146 -146
- tests/test_dynamic_proxy_real.py +109 -109
- tests/test_edge_cases.py +303 -303
- tests/test_enhanced_error_handler.py +270 -270
- tests/test_enhanced_error_handler_comprehensive.py +245 -245
- tests/test_env_config.py +121 -121
- tests/test_error_handler_compatibility.py +112 -112
- tests/test_factories.py +252 -252
- tests/test_final_validation.py +153 -153
- tests/test_fingerprint_consistency.py +135 -135
- tests/test_fingerprint_simple.py +51 -51
- tests/test_framework_env_usage.py +103 -103
- tests/test_framework_logger.py +66 -66
- tests/test_framework_startup.py +64 -64
- tests/test_get_component_logger.py +83 -83
- tests/test_hash_performance.py +99 -99
- tests/test_integration.py +169 -169
- tests/test_item_dedup_redis_key.py +122 -122
- tests/test_large_scale_config.py +112 -112
- tests/test_large_scale_helper.py +235 -235
- tests/test_logging_enhancements.py +375 -0
- tests/test_logging_final.py +185 -0
- tests/test_logging_integration.py +313 -0
- tests/test_logging_system.py +282 -282
- tests/test_middleware_debug.py +142 -0
- tests/test_mode_change.py +72 -72
- tests/test_mode_consistency.py +51 -51
- tests/test_offsite_middleware.py +244 -244
- tests/test_offsite_middleware_simple.py +203 -203
- tests/test_parsel.py +29 -29
- tests/test_performance.py +327 -327
- tests/test_performance_monitor.py +115 -115
- tests/test_pipeline_fingerprint_consistency.py +86 -86
- tests/test_priority_behavior.py +212 -0
- tests/test_priority_consistency.py +152 -0
- tests/test_priority_consistency_fixed.py +250 -0
- tests/test_proxy_api.py +264 -264
- tests/test_proxy_health_check.py +32 -32
- tests/test_proxy_middleware.py +121 -121
- tests/test_proxy_middleware_enhanced.py +216 -216
- tests/test_proxy_middleware_integration.py +136 -136
- tests/test_proxy_middleware_refactored.py +184 -184
- tests/test_proxy_providers.py +56 -56
- tests/test_proxy_stats.py +19 -19
- tests/test_proxy_strategies.py +59 -59
- tests/test_queue_empty_check.py +41 -41
- tests/test_queue_manager_double_crawlo.py +173 -173
- tests/test_queue_manager_redis_key.py +179 -179
- tests/test_queue_naming.py +154 -154
- tests/test_queue_type.py +106 -106
- tests/test_queue_type_redis_config_consistency.py +131 -0
- tests/test_random_headers_default.py +323 -0
- tests/test_random_headers_necessity.py +309 -0
- tests/test_random_user_agent.py +72 -72
- tests/test_real_scenario_proxy.py +195 -195
- tests/test_redis_config.py +28 -28
- tests/test_redis_connection_pool.py +294 -294
- tests/test_redis_key_naming.py +181 -181
- tests/test_redis_key_validator.py +123 -123
- tests/test_redis_queue.py +224 -224
- tests/test_redis_queue_name_fix.py +175 -175
- tests/test_redis_queue_type_fallback.py +130 -0
- tests/test_request_ignore_middleware.py +182 -182
- tests/test_request_params.py +111 -111
- tests/test_request_serialization.py +70 -70
- tests/test_response_code_middleware.py +349 -349
- tests/test_response_filter_middleware.py +427 -427
- tests/test_response_improvements.py +152 -152
- tests/test_retry_middleware.py +334 -242
- tests/test_retry_middleware_realistic.py +274 -0
- tests/test_scheduler.py +252 -252
- tests/test_scheduler_config_update.py +133 -133
- tests/test_simple_response.py +61 -61
- tests/test_telecom_spider_redis_key.py +205 -205
- tests/test_template_content.py +87 -87
- tests/test_template_redis_key.py +134 -134
- tests/test_tools.py +159 -159
- tests/test_user_agent_randomness.py +177 -0
- tests/test_user_agents.py +96 -96
- tests/tools_example.py +260 -260
- tests/untested_features_report.md +138 -138
- tests/verify_debug.py +51 -51
- tests/verify_distributed.py +117 -117
- tests/verify_log_fix.py +111 -111
- crawlo-1.4.1.dist-info/METADATA +0 -1199
- crawlo-1.4.1.dist-info/RECORD +0 -309
- {crawlo-1.4.1.dist-info → crawlo-1.4.3.dist-info}/WHEEL +0 -0
- {crawlo-1.4.1.dist-info → crawlo-1.4.3.dist-info}/entry_points.txt +0 -0
- {crawlo-1.4.1.dist-info → crawlo-1.4.3.dist-info}/top_level.txt +0 -0
|
@@ -1,139 +1,139 @@
|
|
|
1
|
-
# 未测试功能报告
|
|
2
|
-
|
|
3
|
-
## 概述
|
|
4
|
-
|
|
5
|
-
在对Crawlo框架进行全面分析后,发现以下功能模块缺乏专门的测试用例。这些模块虽然部分功能在其他测试中可能有间接覆盖,但缺乏针对性的单元测试和集成测试。
|
|
6
|
-
|
|
7
|
-
## 已完成测试的功能模块
|
|
8
|
-
|
|
9
|
-
### 1. 工厂模式相关模块
|
|
10
|
-
|
|
11
|
-
**模块路径**: `crawlo/factories/`
|
|
12
|
-
|
|
13
|
-
**测试文件**: `tests/test_factories.py`
|
|
14
|
-
|
|
15
|
-
**已测试组件**:
|
|
16
|
-
- `ComponentRegistry` - 组件注册表
|
|
17
|
-
- `ComponentFactory` - 组件工厂基类
|
|
18
|
-
- `DefaultComponentFactory` - 默认组件工厂
|
|
19
|
-
- `CrawlerComponentFactory` - Crawler组件工厂
|
|
20
|
-
|
|
21
|
-
### 2. 批处理工具
|
|
22
|
-
|
|
23
|
-
**模块路径**: `crawlo/utils/batch_processor.py`
|
|
24
|
-
|
|
25
|
-
**测试文件**: `tests/test_batch_processor.py`
|
|
26
|
-
|
|
27
|
-
**已测试组件**:
|
|
28
|
-
- `BatchProcessor` - 批处理处理器
|
|
29
|
-
- `RedisBatchProcessor` - Redis批处理处理器
|
|
30
|
-
- `batch_process` - 便捷批处理函数
|
|
31
|
-
|
|
32
|
-
### 3. 受控爬虫混入类
|
|
33
|
-
|
|
34
|
-
**模块路径**: `crawlo/utils/controlled_spider_mixin.py`
|
|
35
|
-
|
|
36
|
-
**测试文件**: `tests/test_controlled_spider_mixin.py`
|
|
37
|
-
|
|
38
|
-
**已测试组件**:
|
|
39
|
-
- `ControlledRequestMixin` - 受控请求生成混入类
|
|
40
|
-
- `AsyncControlledRequestMixin` - 异步受控请求混入类
|
|
41
|
-
|
|
42
|
-
### 4. 大规模配置工具
|
|
43
|
-
|
|
44
|
-
**模块路径**: `crawlo/utils/large_scale_config.py`
|
|
45
|
-
|
|
46
|
-
**测试文件**: `tests/test_large_scale_config.py`
|
|
47
|
-
|
|
48
|
-
**已测试组件**:
|
|
49
|
-
- `LargeScaleConfig` - 大规模爬虫配置类
|
|
50
|
-
- `apply_large_scale_config` - 应用大规模配置函数
|
|
51
|
-
|
|
52
|
-
### 5. 大规模爬虫辅助工具
|
|
53
|
-
|
|
54
|
-
**模块路径**: `crawlo/utils/large_scale_helper.py`
|
|
55
|
-
|
|
56
|
-
**测试文件**: `tests/test_large_scale_helper.py`
|
|
57
|
-
|
|
58
|
-
**已测试组件**:
|
|
59
|
-
- `LargeScaleHelper` - 大规模爬虫辅助类
|
|
60
|
-
- `ProgressManager` - 进度管理器
|
|
61
|
-
- `MemoryOptimizer` - 内存优化器
|
|
62
|
-
- `DataSourceAdapter` - 数据源适配器
|
|
63
|
-
- `LargeScaleSpiderMixin` - 大规模爬虫混入类
|
|
64
|
-
|
|
65
|
-
### 6. 增强错误处理工具
|
|
66
|
-
|
|
67
|
-
**模块路径**: `crawlo/utils/enhanced_error_handler.py`
|
|
68
|
-
|
|
69
|
-
**测试文件**:
|
|
70
|
-
- `tests/test_enhanced_error_handler.py` (基础测试)
|
|
71
|
-
- `tests/test_enhanced_error_handler_comprehensive.py` (综合测试)
|
|
72
|
-
|
|
73
|
-
**已测试组件**:
|
|
74
|
-
- `ErrorContext` - 错误上下文信息
|
|
75
|
-
- `DetailedException` - 详细异常基类
|
|
76
|
-
- `EnhancedErrorHandler` - 增强错误处理器
|
|
77
|
-
- `handle_exception` 装饰器
|
|
78
|
-
|
|
79
|
-
## 未测试的功能模块
|
|
80
|
-
|
|
81
|
-
### 1. 性能监控工具
|
|
82
|
-
|
|
83
|
-
**模块路径**: `crawlo/utils/performance_monitor.py`
|
|
84
|
-
|
|
85
|
-
**测试文件**: `tests/test_performance_monitor.py` (部分测试,依赖psutil)
|
|
86
|
-
|
|
87
|
-
**未充分测试组件**:
|
|
88
|
-
- `PerformanceMonitor` - 性能监控器
|
|
89
|
-
- `PerformanceTimer` - 性能计时器
|
|
90
|
-
- `performance_monitor_decorator` - 性能监控装饰器
|
|
91
|
-
|
|
92
|
-
**风险**: 性能监控是优化和诊断的重要工具,缺乏测试可能导致监控数据不准确或监控功能失效。
|
|
93
|
-
|
|
94
|
-
## 建议的测试策略
|
|
95
|
-
|
|
96
|
-
### 1. 优先级排序
|
|
97
|
-
|
|
98
|
-
**高优先级** (直接影响核心功能):
|
|
99
|
-
- (已完成)
|
|
100
|
-
|
|
101
|
-
**中优先级** (影响性能和稳定性):
|
|
102
|
-
- 性能监控工具
|
|
103
|
-
|
|
104
|
-
**低优先级** (辅助功能):
|
|
105
|
-
- (已完成)
|
|
106
|
-
|
|
107
|
-
### 2. 测试类型建议
|
|
108
|
-
|
|
109
|
-
**单元测试**:
|
|
110
|
-
- 针对每个类的方法进行独立测试
|
|
111
|
-
- 验证边界条件和异常情况
|
|
112
|
-
- 测试配置参数的有效性
|
|
113
|
-
|
|
114
|
-
**集成测试**:
|
|
115
|
-
- 测试模块间的协作
|
|
116
|
-
- 验证与Redis等外部服务的交互
|
|
117
|
-
- 测试在真实爬虫场景中的表现
|
|
118
|
-
|
|
119
|
-
**性能测试**:
|
|
120
|
-
- 验证批处理工具的性能优势
|
|
121
|
-
- 测试大规模处理工具的内存使用情况
|
|
122
|
-
- 验证性能监控工具的准确性
|
|
123
|
-
|
|
124
|
-
### 3. 测试覆盖建议
|
|
125
|
-
|
|
126
|
-
**核心功能覆盖**:
|
|
127
|
-
- 正常流程测试
|
|
128
|
-
- 异常流程测试
|
|
129
|
-
- 边界条件测试
|
|
130
|
-
- 并发安全测试
|
|
131
|
-
|
|
132
|
-
**配置覆盖**:
|
|
133
|
-
- 不同配置参数的测试
|
|
134
|
-
- 默认配置与自定义配置的对比
|
|
135
|
-
- 配置更新的动态测试
|
|
136
|
-
|
|
137
|
-
## 结论
|
|
138
|
-
|
|
1
|
+
# 未测试功能报告
|
|
2
|
+
|
|
3
|
+
## 概述
|
|
4
|
+
|
|
5
|
+
在对Crawlo框架进行全面分析后,发现以下功能模块缺乏专门的测试用例。这些模块虽然部分功能在其他测试中可能有间接覆盖,但缺乏针对性的单元测试和集成测试。
|
|
6
|
+
|
|
7
|
+
## 已完成测试的功能模块
|
|
8
|
+
|
|
9
|
+
### 1. 工厂模式相关模块
|
|
10
|
+
|
|
11
|
+
**模块路径**: `crawlo/factories/`
|
|
12
|
+
|
|
13
|
+
**测试文件**: `tests/test_factories.py`
|
|
14
|
+
|
|
15
|
+
**已测试组件**:
|
|
16
|
+
- `ComponentRegistry` - 组件注册表
|
|
17
|
+
- `ComponentFactory` - 组件工厂基类
|
|
18
|
+
- `DefaultComponentFactory` - 默认组件工厂
|
|
19
|
+
- `CrawlerComponentFactory` - Crawler组件工厂
|
|
20
|
+
|
|
21
|
+
### 2. 批处理工具
|
|
22
|
+
|
|
23
|
+
**模块路径**: `crawlo/utils/batch_processor.py`
|
|
24
|
+
|
|
25
|
+
**测试文件**: `tests/test_batch_processor.py`
|
|
26
|
+
|
|
27
|
+
**已测试组件**:
|
|
28
|
+
- `BatchProcessor` - 批处理处理器
|
|
29
|
+
- `RedisBatchProcessor` - Redis批处理处理器
|
|
30
|
+
- `batch_process` - 便捷批处理函数
|
|
31
|
+
|
|
32
|
+
### 3. 受控爬虫混入类
|
|
33
|
+
|
|
34
|
+
**模块路径**: `crawlo/utils/controlled_spider_mixin.py`
|
|
35
|
+
|
|
36
|
+
**测试文件**: `tests/test_controlled_spider_mixin.py`
|
|
37
|
+
|
|
38
|
+
**已测试组件**:
|
|
39
|
+
- `ControlledRequestMixin` - 受控请求生成混入类
|
|
40
|
+
- `AsyncControlledRequestMixin` - 异步受控请求混入类
|
|
41
|
+
|
|
42
|
+
### 4. 大规模配置工具
|
|
43
|
+
|
|
44
|
+
**模块路径**: `crawlo/utils/large_scale_config.py`
|
|
45
|
+
|
|
46
|
+
**测试文件**: `tests/test_large_scale_config.py`
|
|
47
|
+
|
|
48
|
+
**已测试组件**:
|
|
49
|
+
- `LargeScaleConfig` - 大规模爬虫配置类
|
|
50
|
+
- `apply_large_scale_config` - 应用大规模配置函数
|
|
51
|
+
|
|
52
|
+
### 5. 大规模爬虫辅助工具
|
|
53
|
+
|
|
54
|
+
**模块路径**: `crawlo/utils/large_scale_helper.py`
|
|
55
|
+
|
|
56
|
+
**测试文件**: `tests/test_large_scale_helper.py`
|
|
57
|
+
|
|
58
|
+
**已测试组件**:
|
|
59
|
+
- `LargeScaleHelper` - 大规模爬虫辅助类
|
|
60
|
+
- `ProgressManager` - 进度管理器
|
|
61
|
+
- `MemoryOptimizer` - 内存优化器
|
|
62
|
+
- `DataSourceAdapter` - 数据源适配器
|
|
63
|
+
- `LargeScaleSpiderMixin` - 大规模爬虫混入类
|
|
64
|
+
|
|
65
|
+
### 6. 增强错误处理工具
|
|
66
|
+
|
|
67
|
+
**模块路径**: `crawlo/utils/enhanced_error_handler.py`
|
|
68
|
+
|
|
69
|
+
**测试文件**:
|
|
70
|
+
- `tests/test_enhanced_error_handler.py` (基础测试)
|
|
71
|
+
- `tests/test_enhanced_error_handler_comprehensive.py` (综合测试)
|
|
72
|
+
|
|
73
|
+
**已测试组件**:
|
|
74
|
+
- `ErrorContext` - 错误上下文信息
|
|
75
|
+
- `DetailedException` - 详细异常基类
|
|
76
|
+
- `EnhancedErrorHandler` - 增强错误处理器
|
|
77
|
+
- `handle_exception` 装饰器
|
|
78
|
+
|
|
79
|
+
## 未测试的功能模块
|
|
80
|
+
|
|
81
|
+
### 1. 性能监控工具
|
|
82
|
+
|
|
83
|
+
**模块路径**: `crawlo/utils/performance_monitor.py`
|
|
84
|
+
|
|
85
|
+
**测试文件**: `tests/test_performance_monitor.py` (部分测试,依赖psutil)
|
|
86
|
+
|
|
87
|
+
**未充分测试组件**:
|
|
88
|
+
- `PerformanceMonitor` - 性能监控器
|
|
89
|
+
- `PerformanceTimer` - 性能计时器
|
|
90
|
+
- `performance_monitor_decorator` - 性能监控装饰器
|
|
91
|
+
|
|
92
|
+
**风险**: 性能监控是优化和诊断的重要工具,缺乏测试可能导致监控数据不准确或监控功能失效。
|
|
93
|
+
|
|
94
|
+
## 建议的测试策略
|
|
95
|
+
|
|
96
|
+
### 1. 优先级排序
|
|
97
|
+
|
|
98
|
+
**高优先级** (直接影响核心功能):
|
|
99
|
+
- (已完成)
|
|
100
|
+
|
|
101
|
+
**中优先级** (影响性能和稳定性):
|
|
102
|
+
- 性能监控工具
|
|
103
|
+
|
|
104
|
+
**低优先级** (辅助功能):
|
|
105
|
+
- (已完成)
|
|
106
|
+
|
|
107
|
+
### 2. 测试类型建议
|
|
108
|
+
|
|
109
|
+
**单元测试**:
|
|
110
|
+
- 针对每个类的方法进行独立测试
|
|
111
|
+
- 验证边界条件和异常情况
|
|
112
|
+
- 测试配置参数的有效性
|
|
113
|
+
|
|
114
|
+
**集成测试**:
|
|
115
|
+
- 测试模块间的协作
|
|
116
|
+
- 验证与Redis等外部服务的交互
|
|
117
|
+
- 测试在真实爬虫场景中的表现
|
|
118
|
+
|
|
119
|
+
**性能测试**:
|
|
120
|
+
- 验证批处理工具的性能优势
|
|
121
|
+
- 测试大规模处理工具的内存使用情况
|
|
122
|
+
- 验证性能监控工具的准确性
|
|
123
|
+
|
|
124
|
+
### 3. 测试覆盖建议
|
|
125
|
+
|
|
126
|
+
**核心功能覆盖**:
|
|
127
|
+
- 正常流程测试
|
|
128
|
+
- 异常流程测试
|
|
129
|
+
- 边界条件测试
|
|
130
|
+
- 并发安全测试
|
|
131
|
+
|
|
132
|
+
**配置覆盖**:
|
|
133
|
+
- 不同配置参数的测试
|
|
134
|
+
- 默认配置与自定义配置的对比
|
|
135
|
+
- 配置更新的动态测试
|
|
136
|
+
|
|
137
|
+
## 结论
|
|
138
|
+
|
|
139
139
|
已为工厂模式、批处理工具、受控爬虫混入类、大规模配置工具、大规模爬虫辅助工具和增强错误处理工具创建了测试用例,这些核心组件现在有了基本的测试覆盖。建议继续为性能监控工具补充测试用例(在安装psutil后),以确保框架的完整性和稳定性。
|
tests/verify_debug.py
CHANGED
|
@@ -1,52 +1,52 @@
|
|
|
1
|
-
#!/usr/bin/env python
|
|
2
|
-
# -*- coding: utf-8 -*-
|
|
3
|
-
"""
|
|
4
|
-
验证运行模式日志级别修改的简单测试
|
|
5
|
-
"""
|
|
6
|
-
import os
|
|
7
|
-
|
|
8
|
-
# 删除旧日志文件
|
|
9
|
-
log_file = 'verify_debug.log'
|
|
10
|
-
if os.path.exists(log_file):
|
|
11
|
-
os.remove(log_file)
|
|
12
|
-
|
|
13
|
-
# 简单测试日志级别
|
|
14
|
-
from crawlo.utils.log import LoggerManager
|
|
15
|
-
|
|
16
|
-
# 配置日志系统
|
|
17
|
-
LoggerManager.configure(
|
|
18
|
-
LOG_LEVEL='INFO',
|
|
19
|
-
LOG_FILE=log_file
|
|
20
|
-
)
|
|
21
|
-
|
|
22
|
-
from crawlo.utils.log import get_logger
|
|
23
|
-
|
|
24
|
-
# 创建测试logger
|
|
25
|
-
test_logger = get_logger('crawlo.framework')
|
|
26
|
-
|
|
27
|
-
# 测试输出
|
|
28
|
-
test_logger.info("这是INFO级别的测试信息")
|
|
29
|
-
test_logger.debug("这是DEBUG级别的测试信息(不应该在INFO级别的日志中出现)")
|
|
30
|
-
test_logger.debug("使用单机模式 - 简单快速,适合开发和中小规模爬取")
|
|
31
|
-
|
|
32
|
-
print("测试完成")
|
|
33
|
-
|
|
34
|
-
# 检查日志文件
|
|
35
|
-
if os.path.exists(log_file):
|
|
36
|
-
with open(log_file, 'r', encoding='utf-8') as f:
|
|
37
|
-
content = f.read()
|
|
38
|
-
print(f"日志文件内容({len(content)} 字符):")
|
|
39
|
-
print(content)
|
|
40
|
-
|
|
41
|
-
# 检查是否包含不应该出现的DEBUG信息
|
|
42
|
-
if "DEBUG" in content:
|
|
43
|
-
print("❌ 发现DEBUG级别信息(不应该出现)")
|
|
44
|
-
else:
|
|
45
|
-
print("✅ 没有发现DEBUG级别信息(正确)")
|
|
46
|
-
|
|
47
|
-
if "使用单机模式" in content:
|
|
48
|
-
print("❌ 发现运行模式信息(不应该出现在INFO级别)")
|
|
49
|
-
else:
|
|
50
|
-
print("✅ 没有发现运行模式信息(正确)")
|
|
51
|
-
else:
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
验证运行模式日志级别修改的简单测试
|
|
5
|
+
"""
|
|
6
|
+
import os
|
|
7
|
+
|
|
8
|
+
# 删除旧日志文件
|
|
9
|
+
log_file = 'verify_debug.log'
|
|
10
|
+
if os.path.exists(log_file):
|
|
11
|
+
os.remove(log_file)
|
|
12
|
+
|
|
13
|
+
# 简单测试日志级别
|
|
14
|
+
from crawlo.utils.log import LoggerManager
|
|
15
|
+
|
|
16
|
+
# 配置日志系统
|
|
17
|
+
LoggerManager.configure(
|
|
18
|
+
LOG_LEVEL='INFO',
|
|
19
|
+
LOG_FILE=log_file
|
|
20
|
+
)
|
|
21
|
+
|
|
22
|
+
from crawlo.utils.log import get_logger
|
|
23
|
+
|
|
24
|
+
# 创建测试logger
|
|
25
|
+
test_logger = get_logger('crawlo.framework')
|
|
26
|
+
|
|
27
|
+
# 测试输出
|
|
28
|
+
test_logger.info("这是INFO级别的测试信息")
|
|
29
|
+
test_logger.debug("这是DEBUG级别的测试信息(不应该在INFO级别的日志中出现)")
|
|
30
|
+
test_logger.debug("使用单机模式 - 简单快速,适合开发和中小规模爬取")
|
|
31
|
+
|
|
32
|
+
print("测试完成")
|
|
33
|
+
|
|
34
|
+
# 检查日志文件
|
|
35
|
+
if os.path.exists(log_file):
|
|
36
|
+
with open(log_file, 'r', encoding='utf-8') as f:
|
|
37
|
+
content = f.read()
|
|
38
|
+
print(f"日志文件内容({len(content)} 字符):")
|
|
39
|
+
print(content)
|
|
40
|
+
|
|
41
|
+
# 检查是否包含不应该出现的DEBUG信息
|
|
42
|
+
if "DEBUG" in content:
|
|
43
|
+
print("❌ 发现DEBUG级别信息(不应该出现)")
|
|
44
|
+
else:
|
|
45
|
+
print("✅ 没有发现DEBUG级别信息(正确)")
|
|
46
|
+
|
|
47
|
+
if "使用单机模式" in content:
|
|
48
|
+
print("❌ 发现运行模式信息(不应该出现在INFO级别)")
|
|
49
|
+
else:
|
|
50
|
+
print("✅ 没有发现运行模式信息(正确)")
|
|
51
|
+
else:
|
|
52
52
|
print("❌ 日志文件未创建")
|
tests/verify_distributed.py
CHANGED
|
@@ -1,117 +1,117 @@
|
|
|
1
|
-
#!/usr/bin/env python
|
|
2
|
-
# -*- coding: UTF-8 -*-
|
|
3
|
-
"""
|
|
4
|
-
分布式采集功能验证脚本
|
|
5
|
-
验证Crawlo框架的分布式采集功能是否正常工作
|
|
6
|
-
"""
|
|
7
|
-
|
|
8
|
-
import redis
|
|
9
|
-
import json
|
|
10
|
-
import os
|
|
11
|
-
import sys
|
|
12
|
-
|
|
13
|
-
# 添加项目根目录到 Python 路径
|
|
14
|
-
project_root = os.path.dirname(os.path.abspath(__file__))
|
|
15
|
-
sys.path.insert(0, project_root)
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
def verify_distributed_functionality():
|
|
19
|
-
"""验证分布式采集功能"""
|
|
20
|
-
print("=== Crawlo分布式采集功能验证 ===\n")
|
|
21
|
-
|
|
22
|
-
# 1. 连接Redis
|
|
23
|
-
try:
|
|
24
|
-
r = redis.Redis(host='localhost', port=6379, db=2, decode_responses=False)
|
|
25
|
-
r.ping()
|
|
26
|
-
print("✓ Redis连接成功")
|
|
27
|
-
except Exception as e:
|
|
28
|
-
print(f"✗ Redis连接失败: {e}")
|
|
29
|
-
return False
|
|
30
|
-
|
|
31
|
-
# 2. 检查项目配置
|
|
32
|
-
try:
|
|
33
|
-
with open('../examples/ofweek_distributed/crawlo.cfg', 'r') as f:
|
|
34
|
-
config_content = f.read()
|
|
35
|
-
if 'ofweek_distributed.settings' in config_content:
|
|
36
|
-
print("✓ 项目配置文件正确")
|
|
37
|
-
else:
|
|
38
|
-
print("✗ 项目配置文件不正确")
|
|
39
|
-
return False
|
|
40
|
-
except Exception as e:
|
|
41
|
-
print(f"✗ 无法读取配置文件: {e}")
|
|
42
|
-
return False
|
|
43
|
-
|
|
44
|
-
# 3. 检查设置文件
|
|
45
|
-
try:
|
|
46
|
-
with open('../examples/ofweek_distributed/ofweek_distributed/settings.py', 'r') as f:
|
|
47
|
-
settings_content = f.read()
|
|
48
|
-
checks = [
|
|
49
|
-
('RUN_MODE = \'distributed\'', '运行模式设置为分布式'),
|
|
50
|
-
('QUEUE_TYPE = \'redis\'', '队列类型设置为Redis'),
|
|
51
|
-
('FILTER_CLASS = \'crawlo.filters.aioredis_filter.AioRedisFilter\'', '过滤器设置为Redis过滤器'),
|
|
52
|
-
('REDIS_HOST = \'127.0.0.1\'', 'Redis主机配置正确'),
|
|
53
|
-
]
|
|
54
|
-
|
|
55
|
-
all_passed = True
|
|
56
|
-
for check, description in checks:
|
|
57
|
-
if check in settings_content:
|
|
58
|
-
print(f"✓ {description}")
|
|
59
|
-
else:
|
|
60
|
-
print(f"✗ {description}")
|
|
61
|
-
all_passed = False
|
|
62
|
-
|
|
63
|
-
if not all_passed:
|
|
64
|
-
return False
|
|
65
|
-
except Exception as e:
|
|
66
|
-
print(f"✗ 无法读取设置文件: {e}")
|
|
67
|
-
return False
|
|
68
|
-
|
|
69
|
-
# 4. 检查Redis中的数据
|
|
70
|
-
try:
|
|
71
|
-
# 检查请求去重指纹
|
|
72
|
-
request_fingerprints = r.scard("crawlo:ofweek_distributed:filter:fingerprint")
|
|
73
|
-
print(f"✓ 请求去重指纹数量: {request_fingerprints}")
|
|
74
|
-
|
|
75
|
-
# 检查数据项去重指纹
|
|
76
|
-
item_fingerprints = r.scard("crawlo:ofweek_distributed:item:fingerprint")
|
|
77
|
-
print(f"✓ 数据项去重指纹数量: {item_fingerprints}")
|
|
78
|
-
|
|
79
|
-
# 检查请求队列
|
|
80
|
-
queue_size = r.zcard("crawlo:ofweek_distributed:queue:requests")
|
|
81
|
-
print(f"✓ 请求队列大小: {queue_size}")
|
|
82
|
-
|
|
83
|
-
# 验证数据是否存在
|
|
84
|
-
if request_fingerprints > 0 and item_fingerprints > 0:
|
|
85
|
-
print("✓ Redis中存在分布式采集数据")
|
|
86
|
-
else:
|
|
87
|
-
print("⚠ Redis中分布式采集数据为空")
|
|
88
|
-
|
|
89
|
-
except Exception as e:
|
|
90
|
-
print(f"✗ Redis数据检查失败: {e}")
|
|
91
|
-
return False
|
|
92
|
-
|
|
93
|
-
# 5. 检查输出文件
|
|
94
|
-
try:
|
|
95
|
-
import glob
|
|
96
|
-
json_files = glob.glob("output/*.json")
|
|
97
|
-
if json_files:
|
|
98
|
-
latest_file = max(json_files, key=os.path.getctime)
|
|
99
|
-
file_size = os.path.getsize(latest_file)
|
|
100
|
-
print(f"✓ 输出文件存在: {latest_file} ({file_size} bytes)")
|
|
101
|
-
else:
|
|
102
|
-
print("⚠ 未找到输出文件")
|
|
103
|
-
except Exception as e:
|
|
104
|
-
print(f"✗ 输出文件检查失败: {e}")
|
|
105
|
-
|
|
106
|
-
print("\n=== 验证结果 ===")
|
|
107
|
-
print("✓ Crawlo分布式采集功能正常工作!")
|
|
108
|
-
print(" - Redis连接正常")
|
|
109
|
-
print(" - 分布式配置正确")
|
|
110
|
-
print(" - Redis数据存储正常")
|
|
111
|
-
print(" - 采集任务执行正常")
|
|
112
|
-
|
|
113
|
-
return True
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
if __name__ == '__main__':
|
|
117
|
-
verify_distributed_functionality()
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
# -*- coding: UTF-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
分布式采集功能验证脚本
|
|
5
|
+
验证Crawlo框架的分布式采集功能是否正常工作
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import redis
|
|
9
|
+
import json
|
|
10
|
+
import os
|
|
11
|
+
import sys
|
|
12
|
+
|
|
13
|
+
# 添加项目根目录到 Python 路径
|
|
14
|
+
project_root = os.path.dirname(os.path.abspath(__file__))
|
|
15
|
+
sys.path.insert(0, project_root)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def verify_distributed_functionality():
|
|
19
|
+
"""验证分布式采集功能"""
|
|
20
|
+
print("=== Crawlo分布式采集功能验证 ===\n")
|
|
21
|
+
|
|
22
|
+
# 1. 连接Redis
|
|
23
|
+
try:
|
|
24
|
+
r = redis.Redis(host='localhost', port=6379, db=2, decode_responses=False)
|
|
25
|
+
r.ping()
|
|
26
|
+
print("✓ Redis连接成功")
|
|
27
|
+
except Exception as e:
|
|
28
|
+
print(f"✗ Redis连接失败: {e}")
|
|
29
|
+
return False
|
|
30
|
+
|
|
31
|
+
# 2. 检查项目配置
|
|
32
|
+
try:
|
|
33
|
+
with open('../examples/ofweek_distributed/crawlo.cfg', 'r') as f:
|
|
34
|
+
config_content = f.read()
|
|
35
|
+
if 'ofweek_distributed.settings' in config_content:
|
|
36
|
+
print("✓ 项目配置文件正确")
|
|
37
|
+
else:
|
|
38
|
+
print("✗ 项目配置文件不正确")
|
|
39
|
+
return False
|
|
40
|
+
except Exception as e:
|
|
41
|
+
print(f"✗ 无法读取配置文件: {e}")
|
|
42
|
+
return False
|
|
43
|
+
|
|
44
|
+
# 3. 检查设置文件
|
|
45
|
+
try:
|
|
46
|
+
with open('../examples/ofweek_distributed/ofweek_distributed/settings.py', 'r') as f:
|
|
47
|
+
settings_content = f.read()
|
|
48
|
+
checks = [
|
|
49
|
+
('RUN_MODE = \'distributed\'', '运行模式设置为分布式'),
|
|
50
|
+
('QUEUE_TYPE = \'redis\'', '队列类型设置为Redis'),
|
|
51
|
+
('FILTER_CLASS = \'crawlo.filters.aioredis_filter.AioRedisFilter\'', '过滤器设置为Redis过滤器'),
|
|
52
|
+
('REDIS_HOST = \'127.0.0.1\'', 'Redis主机配置正确'),
|
|
53
|
+
]
|
|
54
|
+
|
|
55
|
+
all_passed = True
|
|
56
|
+
for check, description in checks:
|
|
57
|
+
if check in settings_content:
|
|
58
|
+
print(f"✓ {description}")
|
|
59
|
+
else:
|
|
60
|
+
print(f"✗ {description}")
|
|
61
|
+
all_passed = False
|
|
62
|
+
|
|
63
|
+
if not all_passed:
|
|
64
|
+
return False
|
|
65
|
+
except Exception as e:
|
|
66
|
+
print(f"✗ 无法读取设置文件: {e}")
|
|
67
|
+
return False
|
|
68
|
+
|
|
69
|
+
# 4. 检查Redis中的数据
|
|
70
|
+
try:
|
|
71
|
+
# 检查请求去重指纹
|
|
72
|
+
request_fingerprints = r.scard("crawlo:ofweek_distributed:filter:fingerprint")
|
|
73
|
+
print(f"✓ 请求去重指纹数量: {request_fingerprints}")
|
|
74
|
+
|
|
75
|
+
# 检查数据项去重指纹
|
|
76
|
+
item_fingerprints = r.scard("crawlo:ofweek_distributed:item:fingerprint")
|
|
77
|
+
print(f"✓ 数据项去重指纹数量: {item_fingerprints}")
|
|
78
|
+
|
|
79
|
+
# 检查请求队列
|
|
80
|
+
queue_size = r.zcard("crawlo:ofweek_distributed:queue:requests")
|
|
81
|
+
print(f"✓ 请求队列大小: {queue_size}")
|
|
82
|
+
|
|
83
|
+
# 验证数据是否存在
|
|
84
|
+
if request_fingerprints > 0 and item_fingerprints > 0:
|
|
85
|
+
print("✓ Redis中存在分布式采集数据")
|
|
86
|
+
else:
|
|
87
|
+
print("⚠ Redis中分布式采集数据为空")
|
|
88
|
+
|
|
89
|
+
except Exception as e:
|
|
90
|
+
print(f"✗ Redis数据检查失败: {e}")
|
|
91
|
+
return False
|
|
92
|
+
|
|
93
|
+
# 5. 检查输出文件
|
|
94
|
+
try:
|
|
95
|
+
import glob
|
|
96
|
+
json_files = glob.glob("output/*.json")
|
|
97
|
+
if json_files:
|
|
98
|
+
latest_file = max(json_files, key=os.path.getctime)
|
|
99
|
+
file_size = os.path.getsize(latest_file)
|
|
100
|
+
print(f"✓ 输出文件存在: {latest_file} ({file_size} bytes)")
|
|
101
|
+
else:
|
|
102
|
+
print("⚠ 未找到输出文件")
|
|
103
|
+
except Exception as e:
|
|
104
|
+
print(f"✗ 输出文件检查失败: {e}")
|
|
105
|
+
|
|
106
|
+
print("\n=== 验证结果 ===")
|
|
107
|
+
print("✓ Crawlo分布式采集功能正常工作!")
|
|
108
|
+
print(" - Redis连接正常")
|
|
109
|
+
print(" - 分布式配置正确")
|
|
110
|
+
print(" - Redis数据存储正常")
|
|
111
|
+
print(" - 采集任务执行正常")
|
|
112
|
+
|
|
113
|
+
return True
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
if __name__ == '__main__':
|
|
117
|
+
verify_distributed_functionality()
|