crawlo 1.3.3__py3-none-any.whl → 1.3.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +87 -63
- crawlo/__version__.py +1 -1
- crawlo/cli.py +75 -75
- crawlo/commands/__init__.py +14 -14
- crawlo/commands/check.py +594 -594
- crawlo/commands/genspider.py +151 -151
- crawlo/commands/help.py +138 -138
- crawlo/commands/list.py +155 -155
- crawlo/commands/run.py +341 -323
- crawlo/commands/startproject.py +436 -436
- crawlo/commands/stats.py +187 -187
- crawlo/commands/utils.py +196 -196
- crawlo/config.py +312 -312
- crawlo/config_validator.py +277 -277
- crawlo/core/__init__.py +46 -2
- crawlo/core/engine.py +439 -365
- crawlo/core/processor.py +40 -40
- crawlo/core/scheduler.py +257 -256
- crawlo/crawler.py +639 -1167
- crawlo/data/__init__.py +5 -5
- crawlo/data/user_agents.py +194 -194
- crawlo/downloader/__init__.py +273 -273
- crawlo/downloader/aiohttp_downloader.py +228 -226
- crawlo/downloader/cffi_downloader.py +245 -245
- crawlo/downloader/httpx_downloader.py +259 -259
- crawlo/downloader/hybrid_downloader.py +212 -212
- crawlo/downloader/playwright_downloader.py +402 -402
- crawlo/downloader/selenium_downloader.py +472 -472
- crawlo/event.py +11 -11
- crawlo/exceptions.py +81 -81
- crawlo/extension/__init__.py +39 -39
- crawlo/extension/health_check.py +141 -141
- crawlo/extension/log_interval.py +57 -57
- crawlo/extension/log_stats.py +81 -81
- crawlo/extension/logging_extension.py +61 -52
- crawlo/extension/memory_monitor.py +104 -104
- crawlo/extension/performance_profiler.py +133 -133
- crawlo/extension/request_recorder.py +107 -107
- crawlo/factories/__init__.py +28 -0
- crawlo/factories/base.py +69 -0
- crawlo/factories/crawler.py +104 -0
- crawlo/factories/registry.py +85 -0
- crawlo/filters/__init__.py +154 -154
- crawlo/filters/aioredis_filter.py +257 -234
- crawlo/filters/memory_filter.py +269 -269
- crawlo/framework.py +292 -0
- crawlo/initialization/__init__.py +40 -0
- crawlo/initialization/built_in.py +426 -0
- crawlo/initialization/context.py +142 -0
- crawlo/initialization/core.py +194 -0
- crawlo/initialization/phases.py +149 -0
- crawlo/initialization/registry.py +146 -0
- crawlo/items/__init__.py +23 -23
- crawlo/items/base.py +23 -22
- crawlo/items/fields.py +52 -52
- crawlo/items/items.py +104 -104
- crawlo/logging/__init__.py +38 -0
- crawlo/logging/config.py +97 -0
- crawlo/logging/factory.py +129 -0
- crawlo/logging/manager.py +112 -0
- crawlo/middleware/__init__.py +21 -21
- crawlo/middleware/default_header.py +132 -132
- crawlo/middleware/download_delay.py +104 -104
- crawlo/middleware/middleware_manager.py +135 -135
- crawlo/middleware/offsite.py +123 -123
- crawlo/middleware/proxy.py +386 -386
- crawlo/middleware/request_ignore.py +86 -86
- crawlo/middleware/response_code.py +163 -163
- crawlo/middleware/response_filter.py +136 -136
- crawlo/middleware/retry.py +124 -124
- crawlo/middleware/simple_proxy.py +65 -65
- crawlo/mode_manager.py +212 -187
- crawlo/network/__init__.py +21 -21
- crawlo/network/request.py +379 -379
- crawlo/network/response.py +359 -359
- crawlo/pipelines/__init__.py +21 -21
- crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
- crawlo/pipelines/console_pipeline.py +39 -39
- crawlo/pipelines/csv_pipeline.py +316 -316
- crawlo/pipelines/database_dedup_pipeline.py +222 -222
- crawlo/pipelines/json_pipeline.py +218 -218
- crawlo/pipelines/memory_dedup_pipeline.py +115 -115
- crawlo/pipelines/mongo_pipeline.py +131 -131
- crawlo/pipelines/mysql_pipeline.py +318 -318
- crawlo/pipelines/pipeline_manager.py +76 -75
- crawlo/pipelines/redis_dedup_pipeline.py +166 -166
- crawlo/project.py +327 -325
- crawlo/queue/pqueue.py +43 -37
- crawlo/queue/queue_manager.py +503 -379
- crawlo/queue/redis_priority_queue.py +326 -306
- crawlo/settings/__init__.py +7 -7
- crawlo/settings/default_settings.py +321 -225
- crawlo/settings/setting_manager.py +214 -198
- crawlo/spider/__init__.py +657 -639
- crawlo/stats_collector.py +73 -59
- crawlo/subscriber.py +129 -129
- crawlo/task_manager.py +139 -30
- crawlo/templates/crawlo.cfg.tmpl +10 -10
- crawlo/templates/project/__init__.py.tmpl +3 -3
- crawlo/templates/project/items.py.tmpl +17 -17
- crawlo/templates/project/middlewares.py.tmpl +118 -118
- crawlo/templates/project/pipelines.py.tmpl +96 -96
- crawlo/templates/project/settings.py.tmpl +168 -267
- crawlo/templates/project/settings_distributed.py.tmpl +167 -180
- crawlo/templates/project/settings_gentle.py.tmpl +167 -61
- crawlo/templates/project/settings_high_performance.py.tmpl +168 -131
- crawlo/templates/project/settings_minimal.py.tmpl +66 -35
- crawlo/templates/project/settings_simple.py.tmpl +165 -102
- crawlo/templates/project/spiders/__init__.py.tmpl +10 -6
- crawlo/templates/run.py.tmpl +34 -38
- crawlo/templates/spider/spider.py.tmpl +143 -143
- crawlo/templates/spiders_init.py.tmpl +10 -0
- crawlo/tools/__init__.py +200 -200
- crawlo/tools/anti_crawler.py +268 -268
- crawlo/tools/authenticated_proxy.py +240 -240
- crawlo/tools/data_formatter.py +225 -225
- crawlo/tools/data_validator.py +180 -180
- crawlo/tools/date_tools.py +289 -289
- crawlo/tools/distributed_coordinator.py +388 -388
- crawlo/tools/encoding_converter.py +127 -127
- crawlo/tools/network_diagnostic.py +365 -0
- crawlo/tools/request_tools.py +82 -82
- crawlo/tools/retry_mechanism.py +224 -224
- crawlo/tools/scenario_adapter.py +262 -262
- crawlo/tools/text_cleaner.py +232 -232
- crawlo/utils/__init__.py +34 -34
- crawlo/utils/batch_processor.py +259 -259
- crawlo/utils/class_loader.py +26 -0
- crawlo/utils/controlled_spider_mixin.py +439 -439
- crawlo/utils/db_helper.py +343 -343
- crawlo/utils/enhanced_error_handler.py +356 -356
- crawlo/utils/env_config.py +142 -142
- crawlo/utils/error_handler.py +165 -124
- crawlo/utils/func_tools.py +82 -82
- crawlo/utils/large_scale_config.py +286 -286
- crawlo/utils/large_scale_helper.py +344 -344
- crawlo/utils/log.py +44 -200
- crawlo/utils/performance_monitor.py +285 -285
- crawlo/utils/queue_helper.py +175 -175
- crawlo/utils/redis_connection_pool.py +388 -351
- crawlo/utils/redis_key_validator.py +198 -198
- crawlo/utils/request.py +267 -267
- crawlo/utils/request_serializer.py +225 -218
- crawlo/utils/spider_loader.py +61 -61
- crawlo/utils/system.py +11 -11
- crawlo/utils/tools.py +4 -4
- crawlo/utils/url.py +39 -39
- {crawlo-1.3.3.dist-info → crawlo-1.3.4.dist-info}/METADATA +1126 -1020
- crawlo-1.3.4.dist-info/RECORD +278 -0
- examples/__init__.py +7 -7
- tests/__init__.py +7 -7
- tests/advanced_tools_example.py +275 -275
- tests/authenticated_proxy_example.py +107 -107
- tests/baidu_performance_test.py +109 -0
- tests/baidu_test.py +60 -0
- tests/cleaners_example.py +160 -160
- tests/comprehensive_framework_test.py +213 -0
- tests/comprehensive_test.py +82 -0
- tests/comprehensive_testing_summary.md +187 -0
- tests/config_validation_demo.py +142 -142
- tests/controlled_spider_example.py +205 -205
- tests/date_tools_example.py +180 -180
- tests/debug_configure.py +70 -0
- tests/debug_framework_logger.py +85 -0
- tests/debug_log_levels.py +64 -0
- tests/debug_pipelines.py +66 -66
- tests/distributed_test.py +67 -0
- tests/distributed_test_debug.py +77 -0
- tests/dynamic_loading_example.py +523 -523
- tests/dynamic_loading_test.py +104 -104
- tests/env_config_example.py +133 -133
- tests/error_handling_example.py +171 -171
- tests/final_command_test_report.md +0 -0
- tests/final_comprehensive_test.py +152 -0
- tests/final_validation_test.py +183 -0
- tests/framework_performance_test.py +203 -0
- tests/optimized_performance_test.py +212 -0
- tests/performance_comparison.py +246 -0
- tests/queue_blocking_test.py +114 -0
- tests/queue_test.py +90 -0
- tests/redis_key_validation_demo.py +130 -130
- tests/request_params_example.py +150 -150
- tests/response_improvements_example.py +144 -144
- tests/scrapy_comparison/ofweek_scrapy.py +139 -0
- tests/scrapy_comparison/scrapy_test.py +134 -0
- tests/simple_command_test.py +120 -0
- tests/simple_crawlo_test.py +128 -0
- tests/simple_log_test.py +58 -0
- tests/simple_optimization_test.py +129 -0
- tests/simple_spider_test.py +50 -0
- tests/simple_test.py +48 -0
- tests/test_advanced_tools.py +148 -148
- tests/test_all_commands.py +231 -0
- tests/test_all_redis_key_configs.py +145 -145
- tests/test_authenticated_proxy.py +141 -141
- tests/test_batch_processor.py +179 -0
- tests/test_cleaners.py +54 -54
- tests/test_component_factory.py +175 -0
- tests/test_comprehensive.py +146 -146
- tests/test_config_consistency.py +80 -80
- tests/test_config_merge.py +152 -152
- tests/test_config_validator.py +182 -182
- tests/test_controlled_spider_mixin.py +80 -0
- tests/test_crawlo_proxy_integration.py +108 -108
- tests/test_date_tools.py +123 -123
- tests/test_default_header_middleware.py +158 -158
- tests/test_distributed.py +65 -65
- tests/test_double_crawlo_fix.py +207 -207
- tests/test_double_crawlo_fix_simple.py +124 -124
- tests/test_download_delay_middleware.py +221 -221
- tests/test_downloader_proxy_compatibility.py +268 -268
- tests/test_dynamic_downloaders_proxy.py +124 -124
- tests/test_dynamic_proxy.py +92 -92
- tests/test_dynamic_proxy_config.py +146 -146
- tests/test_dynamic_proxy_real.py +109 -109
- tests/test_edge_cases.py +303 -303
- tests/test_enhanced_error_handler.py +270 -270
- tests/test_enhanced_error_handler_comprehensive.py +246 -0
- tests/test_env_config.py +121 -121
- tests/test_error_handler_compatibility.py +112 -112
- tests/test_factories.py +253 -0
- tests/test_final_validation.py +153 -153
- tests/test_framework_env_usage.py +103 -103
- tests/test_framework_logger.py +67 -0
- tests/test_framework_startup.py +65 -0
- tests/test_integration.py +169 -169
- tests/test_item_dedup_redis_key.py +122 -122
- tests/test_large_scale_config.py +113 -0
- tests/test_large_scale_helper.py +236 -0
- tests/test_mode_change.py +73 -0
- tests/test_mode_consistency.py +51 -51
- tests/test_offsite_middleware.py +221 -221
- tests/test_parsel.py +29 -29
- tests/test_performance.py +327 -327
- tests/test_performance_monitor.py +116 -0
- tests/test_proxy_api.py +264 -264
- tests/test_proxy_health_check.py +32 -32
- tests/test_proxy_middleware.py +121 -121
- tests/test_proxy_middleware_enhanced.py +216 -216
- tests/test_proxy_middleware_integration.py +136 -136
- tests/test_proxy_middleware_refactored.py +184 -184
- tests/test_proxy_providers.py +56 -56
- tests/test_proxy_stats.py +19 -19
- tests/test_proxy_strategies.py +59 -59
- tests/test_queue_empty_check.py +42 -0
- tests/test_queue_manager_double_crawlo.py +173 -173
- tests/test_queue_manager_redis_key.py +176 -176
- tests/test_random_user_agent.py +72 -72
- tests/test_real_scenario_proxy.py +195 -195
- tests/test_redis_config.py +28 -28
- tests/test_redis_connection_pool.py +294 -294
- tests/test_redis_key_naming.py +181 -181
- tests/test_redis_key_validator.py +123 -123
- tests/test_redis_queue.py +224 -224
- tests/test_request_ignore_middleware.py +182 -182
- tests/test_request_params.py +111 -111
- tests/test_request_serialization.py +70 -70
- tests/test_response_code_middleware.py +349 -349
- tests/test_response_filter_middleware.py +427 -427
- tests/test_response_improvements.py +152 -152
- tests/test_retry_middleware.py +241 -241
- tests/test_scheduler.py +252 -252
- tests/test_scheduler_config_update.py +133 -133
- tests/test_simple_response.py +61 -61
- tests/test_telecom_spider_redis_key.py +205 -205
- tests/test_template_content.py +87 -87
- tests/test_template_redis_key.py +134 -134
- tests/test_tools.py +159 -159
- tests/test_user_agents.py +96 -96
- tests/tools_example.py +260 -260
- tests/untested_features_report.md +139 -0
- tests/verify_debug.py +52 -0
- tests/verify_distributed.py +117 -117
- tests/verify_log_fix.py +112 -0
- crawlo-1.3.3.dist-info/RECORD +0 -219
- tests/DOUBLE_CRAWLO_PREFIX_FIX_REPORT.md +0 -82
- {crawlo-1.3.3.dist-info → crawlo-1.3.4.dist-info}/WHEEL +0 -0
- {crawlo-1.3.3.dist-info → crawlo-1.3.4.dist-info}/entry_points.txt +0 -0
- {crawlo-1.3.3.dist-info → crawlo-1.3.4.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
大规模配置工具测试
|
|
5
|
+
测试 LargeScaleConfig, apply_large_scale_config
|
|
6
|
+
"""
|
|
7
|
+
import sys
|
|
8
|
+
import os
|
|
9
|
+
import unittest
|
|
10
|
+
from unittest.mock import Mock, patch, MagicMock
|
|
11
|
+
|
|
12
|
+
# 添加项目根目录到 Python 路径
|
|
13
|
+
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
|
14
|
+
|
|
15
|
+
from crawlo.utils.large_scale_config import LargeScaleConfig, apply_large_scale_config
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class TestLargeScaleConfig(unittest.TestCase):
|
|
19
|
+
"""大规模配置工具测试"""
|
|
20
|
+
|
|
21
|
+
def test_conservative_config(self):
|
|
22
|
+
"""测试保守配置"""
|
|
23
|
+
config = LargeScaleConfig.conservative_config(concurrency=8)
|
|
24
|
+
|
|
25
|
+
# 验证基本配置
|
|
26
|
+
self.assertEqual(config['CONCURRENCY'], 8)
|
|
27
|
+
self.assertEqual(config['SCHEDULER_MAX_QUEUE_SIZE'], 80) # 8 * 10
|
|
28
|
+
self.assertEqual(config['DOWNLOAD_DELAY'], 0.2)
|
|
29
|
+
self.assertEqual(config['MAX_RUNNING_SPIDERS'], 1)
|
|
30
|
+
|
|
31
|
+
# 验证连接池配置
|
|
32
|
+
self.assertEqual(config['CONNECTION_POOL_LIMIT'], 16) # 8 * 2
|
|
33
|
+
|
|
34
|
+
# 验证重试配置
|
|
35
|
+
self.assertEqual(config['MAX_RETRY_TIMES'], 2)
|
|
36
|
+
|
|
37
|
+
def test_balanced_config(self):
|
|
38
|
+
"""测试平衡配置"""
|
|
39
|
+
config = LargeScaleConfig.balanced_config(concurrency=16)
|
|
40
|
+
|
|
41
|
+
# 验证基本配置
|
|
42
|
+
self.assertEqual(config['CONCURRENCY'], 16)
|
|
43
|
+
self.assertEqual(config['SCHEDULER_MAX_QUEUE_SIZE'], 240) # 16 * 15
|
|
44
|
+
self.assertEqual(config['DOWNLOAD_DELAY'], 0.1)
|
|
45
|
+
self.assertEqual(config['MAX_RUNNING_SPIDERS'], 2)
|
|
46
|
+
|
|
47
|
+
# 验证连接池配置
|
|
48
|
+
self.assertEqual(config['CONNECTION_POOL_LIMIT'], 48) # 16 * 3
|
|
49
|
+
|
|
50
|
+
# 验证重试配置
|
|
51
|
+
self.assertEqual(config['MAX_RETRY_TIMES'], 3)
|
|
52
|
+
|
|
53
|
+
def test_aggressive_config(self):
|
|
54
|
+
"""测试激进配置"""
|
|
55
|
+
config = LargeScaleConfig.aggressive_config(concurrency=32)
|
|
56
|
+
|
|
57
|
+
# 验证基本配置
|
|
58
|
+
self.assertEqual(config['CONCURRENCY'], 32)
|
|
59
|
+
self.assertEqual(config['SCHEDULER_MAX_QUEUE_SIZE'], 640) # 32 * 20
|
|
60
|
+
self.assertEqual(config['DOWNLOAD_DELAY'], 0.05)
|
|
61
|
+
self.assertEqual(config['MAX_RUNNING_SPIDERS'], 3)
|
|
62
|
+
|
|
63
|
+
# 验证连接池配置
|
|
64
|
+
self.assertEqual(config['CONNECTION_POOL_LIMIT'], 128) # 32 * 4
|
|
65
|
+
|
|
66
|
+
# 验证重试配置
|
|
67
|
+
self.assertEqual(config['MAX_RETRY_TIMES'], 5)
|
|
68
|
+
|
|
69
|
+
def test_memory_optimized_config(self):
|
|
70
|
+
"""测试内存优化配置"""
|
|
71
|
+
config = LargeScaleConfig.memory_optimized_config(concurrency=12)
|
|
72
|
+
|
|
73
|
+
# 验证基本配置
|
|
74
|
+
self.assertEqual(config['CONCURRENCY'], 12)
|
|
75
|
+
self.assertEqual(config['SCHEDULER_MAX_QUEUE_SIZE'], 60) # 12 * 5
|
|
76
|
+
self.assertEqual(config['DOWNLOAD_DELAY'], 0.1)
|
|
77
|
+
self.assertEqual(config['MAX_RUNNING_SPIDERS'], 1)
|
|
78
|
+
|
|
79
|
+
# 验证连接池配置
|
|
80
|
+
self.assertEqual(config['CONNECTION_POOL_LIMIT'], 12) # 12 * 1
|
|
81
|
+
|
|
82
|
+
# 验证内存限制配置
|
|
83
|
+
self.assertEqual(config['DOWNLOAD_MAXSIZE'], 2 * 1024 * 1024) # 2MB
|
|
84
|
+
self.assertEqual(config['DOWNLOAD_WARN_SIZE'], 512 * 1024) # 512KB
|
|
85
|
+
|
|
86
|
+
# 验证重试配置
|
|
87
|
+
self.assertEqual(config['MAX_RETRY_TIMES'], 2)
|
|
88
|
+
|
|
89
|
+
def test_apply_large_scale_config(self):
|
|
90
|
+
"""测试应用大规模配置"""
|
|
91
|
+
settings_dict = {}
|
|
92
|
+
|
|
93
|
+
# 应用平衡配置
|
|
94
|
+
apply_large_scale_config(settings_dict, "balanced", 16)
|
|
95
|
+
|
|
96
|
+
# 验证配置已应用
|
|
97
|
+
self.assertEqual(settings_dict['CONCURRENCY'], 16)
|
|
98
|
+
self.assertEqual(settings_dict['SCHEDULER_MAX_QUEUE_SIZE'], 240)
|
|
99
|
+
self.assertEqual(settings_dict['DOWNLOAD_DELAY'], 0.1)
|
|
100
|
+
|
|
101
|
+
def test_apply_large_scale_config_invalid_type(self):
|
|
102
|
+
"""测试应用无效的大规模配置类型"""
|
|
103
|
+
settings_dict = {}
|
|
104
|
+
|
|
105
|
+
# 应用无效配置类型应该抛出异常
|
|
106
|
+
with self.assertRaises(ValueError) as context:
|
|
107
|
+
apply_large_scale_config(settings_dict, "invalid_type", 16)
|
|
108
|
+
|
|
109
|
+
self.assertIn("不支持的配置类型", str(context.exception))
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
if __name__ == '__main__':
|
|
113
|
+
unittest.main()
|
|
@@ -0,0 +1,236 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
大规模爬虫辅助工具测试
|
|
5
|
+
测试 LargeScaleHelper, ProgressManager, MemoryOptimizer, DataSourceAdapter, LargeScaleSpiderMixin
|
|
6
|
+
"""
|
|
7
|
+
import sys
|
|
8
|
+
import os
|
|
9
|
+
import unittest
|
|
10
|
+
from unittest.mock import Mock, patch, MagicMock
|
|
11
|
+
import asyncio
|
|
12
|
+
import json
|
|
13
|
+
import tempfile
|
|
14
|
+
|
|
15
|
+
# 添加项目根目录到 Python 路径
|
|
16
|
+
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
|
17
|
+
|
|
18
|
+
from crawlo.utils.large_scale_helper import (
|
|
19
|
+
LargeScaleHelper,
|
|
20
|
+
ProgressManager,
|
|
21
|
+
MemoryOptimizer,
|
|
22
|
+
DataSourceAdapter,
|
|
23
|
+
LargeScaleSpiderMixin
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class TestLargeScaleHelper(unittest.TestCase):
|
|
28
|
+
"""大规模爬虫辅助类测试"""
|
|
29
|
+
|
|
30
|
+
def setUp(self):
|
|
31
|
+
"""测试前准备"""
|
|
32
|
+
self.helper = LargeScaleHelper(batch_size=100, checkpoint_interval=500)
|
|
33
|
+
|
|
34
|
+
def test_helper_initialization(self):
|
|
35
|
+
"""测试辅助类初始化"""
|
|
36
|
+
self.assertEqual(self.helper.batch_size, 100)
|
|
37
|
+
self.assertEqual(self.helper.checkpoint_interval, 500)
|
|
38
|
+
|
|
39
|
+
def test_batch_iterator_with_list(self):
|
|
40
|
+
"""测试批次迭代器与列表数据源"""
|
|
41
|
+
data = list(range(250)) # 250个元素
|
|
42
|
+
batches = list(self.helper.batch_iterator(data))
|
|
43
|
+
|
|
44
|
+
# 验证批次数量
|
|
45
|
+
self.assertEqual(len(batches), 3) # 250/100 = 3批次(向上取整)
|
|
46
|
+
|
|
47
|
+
# 验证每个批次的大小
|
|
48
|
+
self.assertEqual(len(batches[0]), 100)
|
|
49
|
+
self.assertEqual(len(batches[1]), 100)
|
|
50
|
+
self.assertEqual(len(batches[2]), 50) # 最后一个批次
|
|
51
|
+
|
|
52
|
+
# 验证数据完整性
|
|
53
|
+
all_data = []
|
|
54
|
+
for batch in batches:
|
|
55
|
+
all_data.extend(batch)
|
|
56
|
+
self.assertEqual(all_data, data)
|
|
57
|
+
|
|
58
|
+
def test_batch_iterator_with_offset(self):
|
|
59
|
+
"""测试批次迭代器与偏移量"""
|
|
60
|
+
data = list(range(250)) # 250个元素
|
|
61
|
+
batches = list(self.helper.batch_iterator(data, start_offset=50))
|
|
62
|
+
|
|
63
|
+
# 验证批次数量
|
|
64
|
+
self.assertEqual(len(batches), 2) # 剩余200个元素,2个批次
|
|
65
|
+
|
|
66
|
+
# 验证数据正确性
|
|
67
|
+
all_data = []
|
|
68
|
+
for batch in batches:
|
|
69
|
+
all_data.extend(batch)
|
|
70
|
+
self.assertEqual(all_data, list(range(50, 250)))
|
|
71
|
+
|
|
72
|
+
def test_batch_iterator_invalid_source(self):
|
|
73
|
+
"""测试批次迭代器与无效数据源"""
|
|
74
|
+
with self.assertRaises(ValueError) as context:
|
|
75
|
+
list(self.helper.batch_iterator(123)) # 整数不是有效的数据源
|
|
76
|
+
self.assertIn("不支持的数据源类型", str(context.exception))
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
class TestProgressManager(unittest.TestCase):
|
|
80
|
+
"""进度管理器测试"""
|
|
81
|
+
|
|
82
|
+
def setUp(self):
|
|
83
|
+
"""测试前准备"""
|
|
84
|
+
# 创建临时文件用于测试
|
|
85
|
+
self.temp_file = tempfile.NamedTemporaryFile(delete=False, suffix='.json')
|
|
86
|
+
self.temp_file.close()
|
|
87
|
+
self.progress_manager = ProgressManager(self.temp_file.name)
|
|
88
|
+
|
|
89
|
+
def tearDown(self):
|
|
90
|
+
"""测试后清理"""
|
|
91
|
+
try:
|
|
92
|
+
os.unlink(self.temp_file.name)
|
|
93
|
+
except:
|
|
94
|
+
pass
|
|
95
|
+
|
|
96
|
+
def test_progress_manager_initialization(self):
|
|
97
|
+
"""测试进度管理器初始化"""
|
|
98
|
+
self.assertEqual(self.progress_manager.progress_file, self.temp_file.name)
|
|
99
|
+
|
|
100
|
+
def test_save_and_load_progress(self):
|
|
101
|
+
"""测试保存和加载进度"""
|
|
102
|
+
# 保存进度
|
|
103
|
+
self.progress_manager.save_progress(
|
|
104
|
+
batch_num=10,
|
|
105
|
+
processed_count=1000,
|
|
106
|
+
skipped_count=50
|
|
107
|
+
)
|
|
108
|
+
|
|
109
|
+
# 加载进度
|
|
110
|
+
progress = self.progress_manager.load_progress()
|
|
111
|
+
|
|
112
|
+
# 验证进度数据
|
|
113
|
+
self.assertEqual(progress['batch_num'], 10)
|
|
114
|
+
self.assertEqual(progress['processed_count'], 1000)
|
|
115
|
+
self.assertEqual(progress['skipped_count'], 50)
|
|
116
|
+
self.assertIn('timestamp', progress)
|
|
117
|
+
self.assertIn('formatted_time', progress)
|
|
118
|
+
|
|
119
|
+
def test_load_progress_file_not_found(self):
|
|
120
|
+
"""测试加载不存在的进度文件"""
|
|
121
|
+
# 创建一个新的进度管理器与不存在的文件
|
|
122
|
+
non_existent_file = tempfile.gettempdir() + "/non_existent_progress.json"
|
|
123
|
+
pm = ProgressManager(non_existent_file)
|
|
124
|
+
|
|
125
|
+
# 加载进度应该返回默认值
|
|
126
|
+
progress = pm.load_progress()
|
|
127
|
+
self.assertEqual(progress['batch_num'], 0)
|
|
128
|
+
self.assertEqual(progress['processed_count'], 0)
|
|
129
|
+
self.assertEqual(progress['skipped_count'], 0)
|
|
130
|
+
|
|
131
|
+
|
|
132
|
+
class TestMemoryOptimizer(unittest.TestCase):
|
|
133
|
+
"""内存优化器测试"""
|
|
134
|
+
|
|
135
|
+
def setUp(self):
|
|
136
|
+
"""测试前准备"""
|
|
137
|
+
self.optimizer = MemoryOptimizer(max_memory_mb=100)
|
|
138
|
+
|
|
139
|
+
def test_optimizer_initialization(self):
|
|
140
|
+
"""测试内存优化器初始化"""
|
|
141
|
+
self.assertEqual(self.optimizer.max_memory_mb, 100)
|
|
142
|
+
|
|
143
|
+
def test_should_pause_for_memory_without_psutil(self):
|
|
144
|
+
"""测试在没有psutil时的内存检查"""
|
|
145
|
+
# 在没有psutil的情况下,should_pause_for_memory应该返回False
|
|
146
|
+
result = self.optimizer.should_pause_for_memory()
|
|
147
|
+
self.assertFalse(result)
|
|
148
|
+
|
|
149
|
+
def test_force_garbage_collection(self):
|
|
150
|
+
"""测试强制垃圾回收"""
|
|
151
|
+
# 这个方法应该能正常执行而不抛出异常
|
|
152
|
+
try:
|
|
153
|
+
self.optimizer.force_garbage_collection()
|
|
154
|
+
success = True
|
|
155
|
+
except:
|
|
156
|
+
success = False
|
|
157
|
+
self.assertTrue(success)
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
class TestDataSourceAdapter(unittest.TestCase):
|
|
161
|
+
"""数据源适配器测试"""
|
|
162
|
+
|
|
163
|
+
def test_from_file_adapter(self):
|
|
164
|
+
"""测试文件数据源适配器"""
|
|
165
|
+
# 创建临时文件
|
|
166
|
+
with tempfile.NamedTemporaryFile(mode='w', delete=False) as f:
|
|
167
|
+
for i in range(10):
|
|
168
|
+
f.write(f"line {i}\n")
|
|
169
|
+
temp_file_name = f.name
|
|
170
|
+
|
|
171
|
+
try:
|
|
172
|
+
# 创建文件数据源适配器
|
|
173
|
+
adapter = DataSourceAdapter.from_file(temp_file_name, batch_size=5)
|
|
174
|
+
|
|
175
|
+
# 获取第一批数据
|
|
176
|
+
batch = adapter(0, 5)
|
|
177
|
+
self.assertEqual(len(batch), 5)
|
|
178
|
+
self.assertEqual(batch[0], "line 0")
|
|
179
|
+
self.assertEqual(batch[4], "line 4")
|
|
180
|
+
|
|
181
|
+
# 获取第二批数据
|
|
182
|
+
batch = adapter(5, 5)
|
|
183
|
+
self.assertEqual(len(batch), 5)
|
|
184
|
+
self.assertEqual(batch[0], "line 5")
|
|
185
|
+
self.assertEqual(batch[4], "line 9")
|
|
186
|
+
|
|
187
|
+
finally:
|
|
188
|
+
# 清理临时文件
|
|
189
|
+
os.unlink(temp_file_name)
|
|
190
|
+
|
|
191
|
+
|
|
192
|
+
class TestLargeScaleSpiderMixin(unittest.TestCase):
|
|
193
|
+
"""大规模爬虫混入类测试"""
|
|
194
|
+
|
|
195
|
+
def test_mixin_initialization(self):
|
|
196
|
+
"""测试混入类初始化"""
|
|
197
|
+
# 创建一个模拟的爬虫类
|
|
198
|
+
class MockSpider:
|
|
199
|
+
def __init__(self):
|
|
200
|
+
self.name = "test_spider"
|
|
201
|
+
|
|
202
|
+
class TestSpider(MockSpider, LargeScaleSpiderMixin):
|
|
203
|
+
def __init__(self):
|
|
204
|
+
MockSpider.__init__(self)
|
|
205
|
+
LargeScaleSpiderMixin.__init__(self)
|
|
206
|
+
|
|
207
|
+
spider = TestSpider()
|
|
208
|
+
|
|
209
|
+
# 验证初始化
|
|
210
|
+
self.assertEqual(spider.name, "test_spider")
|
|
211
|
+
self.assertIsNotNone(spider.large_scale_helper)
|
|
212
|
+
self.assertIsNotNone(spider.progress_manager)
|
|
213
|
+
self.assertIsNotNone(spider.memory_optimizer)
|
|
214
|
+
|
|
215
|
+
def test_mixin_attributes(self):
|
|
216
|
+
"""测试混入类属性"""
|
|
217
|
+
# 创建一个模拟的爬虫类
|
|
218
|
+
class MockSpider:
|
|
219
|
+
def __init__(self):
|
|
220
|
+
self.name = "test_spider"
|
|
221
|
+
|
|
222
|
+
class TestSpider(MockSpider, LargeScaleSpiderMixin):
|
|
223
|
+
def __init__(self):
|
|
224
|
+
MockSpider.__init__(self)
|
|
225
|
+
LargeScaleSpiderMixin.__init__(self)
|
|
226
|
+
|
|
227
|
+
spider = TestSpider()
|
|
228
|
+
|
|
229
|
+
# 验证属性
|
|
230
|
+
self.assertIsInstance(spider.large_scale_helper, LargeScaleHelper)
|
|
231
|
+
self.assertIsInstance(spider.progress_manager, ProgressManager)
|
|
232
|
+
self.assertIsInstance(spider.memory_optimizer, MemoryOptimizer)
|
|
233
|
+
|
|
234
|
+
|
|
235
|
+
if __name__ == '__main__':
|
|
236
|
+
unittest.main()
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
测试运行模式日志级别修改
|
|
5
|
+
"""
|
|
6
|
+
import sys
|
|
7
|
+
import os
|
|
8
|
+
sys.path.insert(0, '/')
|
|
9
|
+
|
|
10
|
+
def test_mode_log_level():
|
|
11
|
+
print("=== 测试运行模式日志级别修改 ===")
|
|
12
|
+
|
|
13
|
+
# 删除旧的日志文件
|
|
14
|
+
test_log_file = '/Users/oscar/projects/Crawlo/test_mode_change.log'
|
|
15
|
+
if os.path.exists(test_log_file):
|
|
16
|
+
os.remove(test_log_file)
|
|
17
|
+
|
|
18
|
+
# 准备测试设置
|
|
19
|
+
test_settings = {
|
|
20
|
+
'PROJECT_NAME': 'test_mode_change',
|
|
21
|
+
'LOG_LEVEL': 'INFO',
|
|
22
|
+
'LOG_FILE': test_log_file,
|
|
23
|
+
'RUN_MODE': 'standalone'
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
try:
|
|
27
|
+
# 初始化框架
|
|
28
|
+
from crawlo.core.framework_initializer import initialize_framework
|
|
29
|
+
settings = initialize_framework(test_settings)
|
|
30
|
+
|
|
31
|
+
print(f"设置初始化完成: {settings.get('PROJECT_NAME')}")
|
|
32
|
+
|
|
33
|
+
# 检查日志文件是否包含运行模式信息
|
|
34
|
+
if os.path.exists(test_log_file):
|
|
35
|
+
with open(test_log_file, 'r', encoding='utf-8') as f:
|
|
36
|
+
content = f.read()
|
|
37
|
+
print(f"日志文件内容长度: {len(content)} 字符")
|
|
38
|
+
|
|
39
|
+
# 检查是否还有INFO级别的运行模式信息
|
|
40
|
+
info_lines = [line for line in content.split('\n') if 'INFO' in line and '使用单机模式' in line]
|
|
41
|
+
debug_lines = [line for line in content.split('\n') if 'DEBUG' in line and '使用单机模式' in line]
|
|
42
|
+
|
|
43
|
+
if info_lines:
|
|
44
|
+
print("❌ 仍然发现INFO级别的运行模式信息:")
|
|
45
|
+
for line in info_lines:
|
|
46
|
+
print(f" {line}")
|
|
47
|
+
else:
|
|
48
|
+
print("✅ 没有发现INFO级别的运行模式信息")
|
|
49
|
+
|
|
50
|
+
if debug_lines:
|
|
51
|
+
print("✅ 发现DEBUG级别的运行模式信息:")
|
|
52
|
+
for line in debug_lines:
|
|
53
|
+
print(f" {line}")
|
|
54
|
+
else:
|
|
55
|
+
print("❌ 没有发现DEBUG级别的运行模式信息")
|
|
56
|
+
|
|
57
|
+
print("\n所有日志内容:")
|
|
58
|
+
lines = content.split('\n')
|
|
59
|
+
for i, line in enumerate(lines, 1):
|
|
60
|
+
if line.strip():
|
|
61
|
+
print(f"{i:3d}: {line}")
|
|
62
|
+
else:
|
|
63
|
+
print("❌ 日志文件未创建")
|
|
64
|
+
|
|
65
|
+
except Exception as e:
|
|
66
|
+
print(f"错误: {e}")
|
|
67
|
+
import traceback
|
|
68
|
+
traceback.print_exc()
|
|
69
|
+
|
|
70
|
+
print("=== 测试完成 ===")
|
|
71
|
+
|
|
72
|
+
if __name__ == "__main__":
|
|
73
|
+
test_mode_log_level()
|
tests/test_mode_consistency.py
CHANGED
|
@@ -1,52 +1,52 @@
|
|
|
1
|
-
#!/usr/bin/env python3
|
|
2
|
-
# -*- coding: utf-8 -*-
|
|
3
|
-
"""
|
|
4
|
-
测试模式一致性提示
|
|
5
|
-
"""
|
|
6
|
-
import asyncio
|
|
7
|
-
import sys
|
|
8
|
-
import os
|
|
9
|
-
|
|
10
|
-
# 添加项目根目录到路径
|
|
11
|
-
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
|
12
|
-
|
|
13
|
-
from crawlo.crawler import CrawlerProcess
|
|
14
|
-
from crawlo.spider import Spider
|
|
15
|
-
from crawlo import Request
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
class TestSpider(Spider):
|
|
19
|
-
name = "test_mode_spider"
|
|
20
|
-
|
|
21
|
-
def start_requests(self):
|
|
22
|
-
yield Request("https://httpbin.org/get")
|
|
23
|
-
|
|
24
|
-
def parse(self, response):
|
|
25
|
-
yield {"url": response.url, "status": response.status
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
async def test_mode_consistency():
|
|
29
|
-
"""测试模式一致性提示"""
|
|
30
|
-
print("测试模式一致性提示...")
|
|
31
|
-
|
|
32
|
-
try:
|
|
33
|
-
# 创建爬虫进程
|
|
34
|
-
process = CrawlerProcess()
|
|
35
|
-
|
|
36
|
-
# 添加爬虫
|
|
37
|
-
await process.crawl(TestSpider)
|
|
38
|
-
|
|
39
|
-
print("模式一致性测试完成")
|
|
40
|
-
|
|
41
|
-
except Exception as e:
|
|
42
|
-
print(f"测试失败: {e}")
|
|
43
|
-
import traceback
|
|
44
|
-
traceback.print_exc()
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
if __name__ == "__main__":
|
|
48
|
-
# 设置日志级别
|
|
49
|
-
import logging
|
|
50
|
-
logging.basicConfig(level=logging.INFO)
|
|
51
|
-
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
测试模式一致性提示
|
|
5
|
+
"""
|
|
6
|
+
import asyncio
|
|
7
|
+
import sys
|
|
8
|
+
import os
|
|
9
|
+
|
|
10
|
+
# 添加项目根目录到路径
|
|
11
|
+
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
|
12
|
+
|
|
13
|
+
from crawlo.crawler import CrawlerProcess
|
|
14
|
+
from crawlo.spider import Spider
|
|
15
|
+
from crawlo import Request
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class TestSpider(Spider):
|
|
19
|
+
name = "test_mode_spider"
|
|
20
|
+
|
|
21
|
+
def start_requests(self):
|
|
22
|
+
yield Request("https://httpbin.org/get")
|
|
23
|
+
|
|
24
|
+
def parse(self, response):
|
|
25
|
+
yield {"url": response.url, "status": response.status_code} # 修复:使用status_code而不是status
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
async def test_mode_consistency():
|
|
29
|
+
"""测试模式一致性提示"""
|
|
30
|
+
print("测试模式一致性提示...")
|
|
31
|
+
|
|
32
|
+
try:
|
|
33
|
+
# 创建爬虫进程
|
|
34
|
+
process = CrawlerProcess()
|
|
35
|
+
|
|
36
|
+
# 添加爬虫
|
|
37
|
+
await process.crawl(TestSpider)
|
|
38
|
+
|
|
39
|
+
print("模式一致性测试完成")
|
|
40
|
+
|
|
41
|
+
except Exception as e:
|
|
42
|
+
print(f"测试失败: {e}")
|
|
43
|
+
import traceback
|
|
44
|
+
traceback.print_exc()
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
if __name__ == "__main__":
|
|
48
|
+
# 设置日志级别
|
|
49
|
+
import logging
|
|
50
|
+
logging.basicConfig(level=logging.INFO)
|
|
51
|
+
|
|
52
52
|
asyncio.run(test_mode_consistency())
|