crawlo 1.2.0__py3-none-any.whl → 1.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +61 -61
- crawlo/__version__.py +1 -1
- crawlo/cleaners/__init__.py +60 -60
- crawlo/cleaners/data_formatter.py +225 -225
- crawlo/cleaners/encoding_converter.py +125 -125
- crawlo/cleaners/text_cleaner.py +232 -232
- crawlo/cli.py +65 -65
- crawlo/commands/__init__.py +14 -14
- crawlo/commands/check.py +594 -594
- crawlo/commands/genspider.py +151 -151
- crawlo/commands/help.py +142 -132
- crawlo/commands/list.py +155 -155
- crawlo/commands/run.py +292 -292
- crawlo/commands/startproject.py +418 -418
- crawlo/commands/stats.py +188 -188
- crawlo/commands/utils.py +186 -186
- crawlo/config.py +312 -312
- crawlo/config_validator.py +252 -252
- crawlo/core/__init__.py +2 -2
- crawlo/core/engine.py +354 -354
- crawlo/core/processor.py +40 -40
- crawlo/core/scheduler.py +143 -143
- crawlo/crawler.py +1027 -1027
- crawlo/downloader/__init__.py +266 -266
- crawlo/downloader/aiohttp_downloader.py +220 -220
- crawlo/downloader/cffi_downloader.py +256 -256
- crawlo/downloader/httpx_downloader.py +259 -259
- crawlo/downloader/hybrid_downloader.py +213 -213
- crawlo/downloader/playwright_downloader.py +402 -402
- crawlo/downloader/selenium_downloader.py +472 -472
- crawlo/event.py +11 -11
- crawlo/exceptions.py +81 -81
- crawlo/extension/__init__.py +37 -37
- crawlo/extension/health_check.py +141 -141
- crawlo/extension/log_interval.py +57 -57
- crawlo/extension/log_stats.py +81 -81
- crawlo/extension/logging_extension.py +43 -43
- crawlo/extension/memory_monitor.py +104 -104
- crawlo/extension/performance_profiler.py +133 -133
- crawlo/extension/request_recorder.py +107 -107
- crawlo/filters/__init__.py +154 -154
- crawlo/filters/aioredis_filter.py +280 -280
- crawlo/filters/memory_filter.py +269 -269
- crawlo/items/__init__.py +23 -23
- crawlo/items/base.py +21 -21
- crawlo/items/fields.py +53 -53
- crawlo/items/items.py +104 -104
- crawlo/middleware/__init__.py +21 -21
- crawlo/middleware/default_header.py +132 -32
- crawlo/middleware/download_delay.py +105 -28
- crawlo/middleware/middleware_manager.py +135 -135
- crawlo/middleware/offsite.py +116 -0
- crawlo/middleware/proxy.py +366 -272
- crawlo/middleware/request_ignore.py +88 -30
- crawlo/middleware/response_code.py +164 -18
- crawlo/middleware/response_filter.py +138 -26
- crawlo/middleware/retry.py +124 -124
- crawlo/mode_manager.py +211 -211
- crawlo/network/__init__.py +21 -21
- crawlo/network/request.py +338 -338
- crawlo/network/response.py +359 -359
- crawlo/pipelines/__init__.py +21 -21
- crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
- crawlo/pipelines/console_pipeline.py +39 -39
- crawlo/pipelines/csv_pipeline.py +316 -316
- crawlo/pipelines/database_dedup_pipeline.py +224 -224
- crawlo/pipelines/json_pipeline.py +218 -218
- crawlo/pipelines/memory_dedup_pipeline.py +115 -115
- crawlo/pipelines/mongo_pipeline.py +131 -131
- crawlo/pipelines/mysql_pipeline.py +316 -316
- crawlo/pipelines/pipeline_manager.py +61 -61
- crawlo/pipelines/redis_dedup_pipeline.py +167 -167
- crawlo/project.py +187 -187
- crawlo/queue/pqueue.py +37 -37
- crawlo/queue/queue_manager.py +337 -337
- crawlo/queue/redis_priority_queue.py +298 -298
- crawlo/settings/__init__.py +7 -7
- crawlo/settings/default_settings.py +226 -219
- crawlo/settings/setting_manager.py +122 -122
- crawlo/spider/__init__.py +639 -639
- crawlo/stats_collector.py +59 -59
- crawlo/subscriber.py +130 -130
- crawlo/task_manager.py +30 -30
- crawlo/templates/crawlo.cfg.tmpl +10 -10
- crawlo/templates/project/__init__.py.tmpl +3 -3
- crawlo/templates/project/items.py.tmpl +17 -17
- crawlo/templates/project/middlewares.py.tmpl +118 -109
- crawlo/templates/project/pipelines.py.tmpl +96 -96
- crawlo/templates/project/run.py.tmpl +45 -45
- crawlo/templates/project/settings.py.tmpl +327 -326
- crawlo/templates/project/settings_distributed.py.tmpl +119 -119
- crawlo/templates/project/settings_gentle.py.tmpl +94 -94
- crawlo/templates/project/settings_high_performance.py.tmpl +151 -151
- crawlo/templates/project/settings_simple.py.tmpl +68 -68
- crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
- crawlo/templates/spider/spider.py.tmpl +143 -141
- crawlo/tools/__init__.py +182 -182
- crawlo/tools/anti_crawler.py +268 -268
- crawlo/tools/authenticated_proxy.py +240 -240
- crawlo/tools/data_validator.py +180 -180
- crawlo/tools/date_tools.py +35 -35
- crawlo/tools/distributed_coordinator.py +386 -386
- crawlo/tools/retry_mechanism.py +220 -220
- crawlo/tools/scenario_adapter.py +262 -262
- crawlo/utils/__init__.py +35 -35
- crawlo/utils/batch_processor.py +260 -260
- crawlo/utils/controlled_spider_mixin.py +439 -439
- crawlo/utils/date_tools.py +290 -290
- crawlo/utils/db_helper.py +343 -343
- crawlo/utils/enhanced_error_handler.py +359 -359
- crawlo/utils/env_config.py +105 -105
- crawlo/utils/error_handler.py +125 -125
- crawlo/utils/func_tools.py +82 -82
- crawlo/utils/large_scale_config.py +286 -286
- crawlo/utils/large_scale_helper.py +343 -343
- crawlo/utils/log.py +128 -128
- crawlo/utils/performance_monitor.py +284 -284
- crawlo/utils/queue_helper.py +175 -175
- crawlo/utils/redis_connection_pool.py +334 -334
- crawlo/utils/redis_key_validator.py +199 -199
- crawlo/utils/request.py +267 -267
- crawlo/utils/request_serializer.py +219 -219
- crawlo/utils/spider_loader.py +62 -62
- crawlo/utils/system.py +11 -11
- crawlo/utils/tools.py +4 -4
- crawlo/utils/url.py +39 -39
- {crawlo-1.2.0.dist-info → crawlo-1.2.1.dist-info}/METADATA +692 -697
- crawlo-1.2.1.dist-info/RECORD +220 -0
- examples/__init__.py +7 -7
- examples/aiohttp_settings.py +42 -0
- examples/curl_cffi_settings.py +41 -0
- examples/default_header_middleware_example.py +107 -0
- examples/default_header_spider_example.py +129 -0
- examples/download_delay_middleware_example.py +160 -0
- examples/httpx_settings.py +42 -0
- examples/multi_downloader_proxy_example.py +81 -0
- examples/offsite_middleware_example.py +55 -0
- examples/offsite_spider_example.py +107 -0
- examples/proxy_spider_example.py +166 -0
- examples/request_ignore_middleware_example.py +51 -0
- examples/request_ignore_spider_example.py +99 -0
- examples/response_code_middleware_example.py +52 -0
- examples/response_filter_middleware_example.py +67 -0
- examples/tong_hua_shun_settings.py +62 -0
- examples/tong_hua_shun_spider.py +170 -0
- tests/DOUBLE_CRAWLO_PREFIX_FIX_REPORT.md +81 -81
- tests/__init__.py +7 -7
- tests/advanced_tools_example.py +275 -275
- tests/authenticated_proxy_example.py +236 -236
- tests/cleaners_example.py +160 -160
- tests/config_validation_demo.py +102 -102
- tests/controlled_spider_example.py +205 -205
- tests/date_tools_example.py +180 -180
- tests/dynamic_loading_example.py +523 -523
- tests/dynamic_loading_test.py +104 -104
- tests/env_config_example.py +133 -133
- tests/error_handling_example.py +171 -171
- tests/redis_key_validation_demo.py +130 -130
- tests/response_improvements_example.py +144 -144
- tests/test_advanced_tools.py +148 -148
- tests/test_all_redis_key_configs.py +145 -145
- tests/test_authenticated_proxy.py +141 -141
- tests/test_cleaners.py +54 -54
- tests/test_comprehensive.py +146 -146
- tests/test_config_validator.py +193 -193
- tests/test_crawlo_proxy_integration.py +173 -0
- tests/test_date_tools.py +123 -123
- tests/test_default_header_middleware.py +159 -0
- tests/test_double_crawlo_fix.py +207 -207
- tests/test_double_crawlo_fix_simple.py +124 -124
- tests/test_download_delay_middleware.py +222 -0
- tests/test_downloader_proxy_compatibility.py +269 -0
- tests/test_dynamic_downloaders_proxy.py +124 -124
- tests/test_dynamic_proxy.py +92 -92
- tests/test_dynamic_proxy_config.py +146 -146
- tests/test_dynamic_proxy_real.py +109 -109
- tests/test_edge_cases.py +303 -303
- tests/test_enhanced_error_handler.py +270 -270
- tests/test_env_config.py +121 -121
- tests/test_error_handler_compatibility.py +112 -112
- tests/test_final_validation.py +153 -153
- tests/test_framework_env_usage.py +103 -103
- tests/test_integration.py +356 -356
- tests/test_item_dedup_redis_key.py +122 -122
- tests/test_offsite_middleware.py +222 -0
- tests/test_parsel.py +29 -29
- tests/test_performance.py +327 -327
- tests/test_proxy_api.py +265 -0
- tests/test_proxy_health_check.py +32 -32
- tests/test_proxy_middleware.py +122 -0
- tests/test_proxy_middleware_enhanced.py +217 -0
- tests/test_proxy_middleware_integration.py +136 -136
- tests/test_proxy_providers.py +56 -56
- tests/test_proxy_stats.py +19 -19
- tests/test_proxy_strategies.py +59 -59
- tests/test_queue_manager_double_crawlo.py +173 -173
- tests/test_queue_manager_redis_key.py +176 -176
- tests/test_real_scenario_proxy.py +196 -0
- tests/test_redis_config.py +28 -28
- tests/test_redis_connection_pool.py +294 -294
- tests/test_redis_key_naming.py +181 -181
- tests/test_redis_key_validator.py +123 -123
- tests/test_redis_queue.py +224 -224
- tests/test_request_ignore_middleware.py +183 -0
- tests/test_request_serialization.py +70 -70
- tests/test_response_code_middleware.py +350 -0
- tests/test_response_filter_middleware.py +428 -0
- tests/test_response_improvements.py +152 -152
- tests/test_retry_middleware.py +242 -0
- tests/test_scheduler.py +241 -241
- tests/test_simple_response.py +61 -61
- tests/test_telecom_spider_redis_key.py +205 -205
- tests/test_template_content.py +87 -87
- tests/test_template_redis_key.py +134 -134
- tests/test_tools.py +153 -153
- tests/tools_example.py +257 -257
- crawlo-1.2.0.dist-info/RECORD +0 -190
- {crawlo-1.2.0.dist-info → crawlo-1.2.1.dist-info}/WHEEL +0 -0
- {crawlo-1.2.0.dist-info → crawlo-1.2.1.dist-info}/entry_points.txt +0 -0
- {crawlo-1.2.0.dist-info → crawlo-1.2.1.dist-info}/top_level.txt +0 -0
|
@@ -1,125 +1,125 @@
|
|
|
1
|
-
#!/usr/bin/env python3
|
|
2
|
-
# -*- coding: utf-8 -*-
|
|
3
|
-
"""
|
|
4
|
-
双重 crawlo 前缀问题修复简单测试脚本
|
|
5
|
-
用于验证 Redis 队列名称中双重 crawlo 前缀问题的修复,不依赖于实际的 Redis 连接
|
|
6
|
-
"""
|
|
7
|
-
import sys
|
|
8
|
-
import os
|
|
9
|
-
import asyncio
|
|
10
|
-
import traceback
|
|
11
|
-
|
|
12
|
-
# 添加项目根目录到路径
|
|
13
|
-
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
|
14
|
-
|
|
15
|
-
# 导入相关模块
|
|
16
|
-
from crawlo.queue.redis_priority_queue import RedisPriorityQueue
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
def test_redis_queue_naming():
|
|
20
|
-
"""测试 Redis 队列命名修复"""
|
|
21
|
-
print("🚀 开始测试 Redis 队列命名修复...")
|
|
22
|
-
print("=" * 50)
|
|
23
|
-
|
|
24
|
-
test_cases = [
|
|
25
|
-
{
|
|
26
|
-
"name": "正常命名",
|
|
27
|
-
"queue_name": "crawlo:test_project:queue:requests",
|
|
28
|
-
"expected_queue": "crawlo:test_project:queue:requests",
|
|
29
|
-
"expected_processing": "crawlo:test_project:queue:processing",
|
|
30
|
-
"expected_failed": "crawlo:test_project:queue:failed"
|
|
31
|
-
},
|
|
32
|
-
{
|
|
33
|
-
"name": "双重 crawlo 前缀",
|
|
34
|
-
"queue_name": "crawlo:crawlo:queue:requests",
|
|
35
|
-
"expected_queue": "crawlo:crawlo:queue:requests", # 保持原始名称不变
|
|
36
|
-
"expected_processing": "crawlo:crawlo:queue:processing", # 保持一致的双重前缀
|
|
37
|
-
"expected_failed": "crawlo:crawlo:queue:failed"
|
|
38
|
-
},
|
|
39
|
-
{
|
|
40
|
-
"name": "三重 crawlo 前缀",
|
|
41
|
-
"queue_name": "crawlo:crawlo:crawlo:queue:requests",
|
|
42
|
-
"expected_queue": "crawlo:crawlo:crawlo:queue:requests", # 保持原始名称不变
|
|
43
|
-
"expected_processing": "crawlo:crawlo:crawlo:queue:processing", # 保持一致的前缀
|
|
44
|
-
"expected_failed": "crawlo:crawlo:crawlo:queue:failed"
|
|
45
|
-
},
|
|
46
|
-
{
|
|
47
|
-
"name": "无 crawlo 前缀",
|
|
48
|
-
"queue_name": "test_project:queue:requests",
|
|
49
|
-
"expected_queue": "test_project:queue:requests", # 保持原始名称不变
|
|
50
|
-
"expected_processing": "test_project:queue:processing",
|
|
51
|
-
"expected_failed": "test_project:queue:failed"
|
|
52
|
-
}
|
|
53
|
-
]
|
|
54
|
-
|
|
55
|
-
try:
|
|
56
|
-
for i, test_case in enumerate(test_cases, 1):
|
|
57
|
-
print(f"测试 {i}: {test_case['name']}")
|
|
58
|
-
print(f" 输入队列名称: {test_case['queue_name']}")
|
|
59
|
-
|
|
60
|
-
# 测试 RedisPriorityQueue 初始化
|
|
61
|
-
try:
|
|
62
|
-
queue = RedisPriorityQueue(
|
|
63
|
-
redis_url="redis://127.0.0.1:6379/15",
|
|
64
|
-
queue_name=test_case['queue_name'],
|
|
65
|
-
module_name="test_project"
|
|
66
|
-
)
|
|
67
|
-
|
|
68
|
-
print(f" 修复后队列名称: {queue.queue_name}")
|
|
69
|
-
print(f" 修复后处理队列: {queue.processing_queue}")
|
|
70
|
-
print(f" 修复后失败队列: {queue.failed_queue}")
|
|
71
|
-
|
|
72
|
-
# 验证结果
|
|
73
|
-
assert queue.queue_name == test_case['expected_queue'], \
|
|
74
|
-
f"队列名称不匹配: {queue.queue_name} != {test_case['expected_queue']}"
|
|
75
|
-
assert queue.processing_queue == test_case['expected_processing'], \
|
|
76
|
-
f"处理队列名称不匹配: {queue.processing_queue} != {test_case['expected_processing']}"
|
|
77
|
-
assert queue.failed_queue == test_case['expected_failed'], \
|
|
78
|
-
f"失败队列名称不匹配: {queue.failed_queue} != {test_case['expected_failed']}"
|
|
79
|
-
|
|
80
|
-
print(" ✅ 测试通过")
|
|
81
|
-
except Exception as e:
|
|
82
|
-
print(f" ❌ 测试失败: {e}")
|
|
83
|
-
traceback.print_exc()
|
|
84
|
-
return False
|
|
85
|
-
|
|
86
|
-
print()
|
|
87
|
-
|
|
88
|
-
print("✅ Redis 队列命名修复测试通过!")
|
|
89
|
-
return True
|
|
90
|
-
|
|
91
|
-
except Exception as e:
|
|
92
|
-
print(f"❌ Redis 队列命名修复测试失败: {e}")
|
|
93
|
-
traceback.print_exc()
|
|
94
|
-
return False
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
def main():
|
|
98
|
-
"""主测试函数"""
|
|
99
|
-
print("🚀 开始双重 crawlo 前缀问题修复测试...")
|
|
100
|
-
print("=" * 50)
|
|
101
|
-
|
|
102
|
-
try:
|
|
103
|
-
# 测试 Redis 队列命名修复
|
|
104
|
-
redis_test_success = test_redis_queue_naming()
|
|
105
|
-
print()
|
|
106
|
-
|
|
107
|
-
print("=" * 50)
|
|
108
|
-
if redis_test_success:
|
|
109
|
-
print("🎉 Redis 队列命名修复测试通过!双重 crawlo 前缀问题已修复")
|
|
110
|
-
else:
|
|
111
|
-
print("❌ Redis 队列命名修复测试失败,请检查实现")
|
|
112
|
-
return 1
|
|
113
|
-
|
|
114
|
-
except Exception as e:
|
|
115
|
-
print("=" * 50)
|
|
116
|
-
print(f"❌ 测试过程中发生异常: {e}")
|
|
117
|
-
traceback.print_exc()
|
|
118
|
-
return 1
|
|
119
|
-
|
|
120
|
-
return 0
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
if __name__ == "__main__":
|
|
124
|
-
exit_code = main()
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
双重 crawlo 前缀问题修复简单测试脚本
|
|
5
|
+
用于验证 Redis 队列名称中双重 crawlo 前缀问题的修复,不依赖于实际的 Redis 连接
|
|
6
|
+
"""
|
|
7
|
+
import sys
|
|
8
|
+
import os
|
|
9
|
+
import asyncio
|
|
10
|
+
import traceback
|
|
11
|
+
|
|
12
|
+
# 添加项目根目录到路径
|
|
13
|
+
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
|
14
|
+
|
|
15
|
+
# 导入相关模块
|
|
16
|
+
from crawlo.queue.redis_priority_queue import RedisPriorityQueue
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def test_redis_queue_naming():
|
|
20
|
+
"""测试 Redis 队列命名修复"""
|
|
21
|
+
print("🚀 开始测试 Redis 队列命名修复...")
|
|
22
|
+
print("=" * 50)
|
|
23
|
+
|
|
24
|
+
test_cases = [
|
|
25
|
+
{
|
|
26
|
+
"name": "正常命名",
|
|
27
|
+
"queue_name": "crawlo:test_project:queue:requests",
|
|
28
|
+
"expected_queue": "crawlo:test_project:queue:requests",
|
|
29
|
+
"expected_processing": "crawlo:test_project:queue:processing",
|
|
30
|
+
"expected_failed": "crawlo:test_project:queue:failed"
|
|
31
|
+
},
|
|
32
|
+
{
|
|
33
|
+
"name": "双重 crawlo 前缀",
|
|
34
|
+
"queue_name": "crawlo:crawlo:queue:requests",
|
|
35
|
+
"expected_queue": "crawlo:crawlo:queue:requests", # 保持原始名称不变
|
|
36
|
+
"expected_processing": "crawlo:crawlo:queue:processing", # 保持一致的双重前缀
|
|
37
|
+
"expected_failed": "crawlo:crawlo:queue:failed"
|
|
38
|
+
},
|
|
39
|
+
{
|
|
40
|
+
"name": "三重 crawlo 前缀",
|
|
41
|
+
"queue_name": "crawlo:crawlo:crawlo:queue:requests",
|
|
42
|
+
"expected_queue": "crawlo:crawlo:crawlo:queue:requests", # 保持原始名称不变
|
|
43
|
+
"expected_processing": "crawlo:crawlo:crawlo:queue:processing", # 保持一致的前缀
|
|
44
|
+
"expected_failed": "crawlo:crawlo:crawlo:queue:failed"
|
|
45
|
+
},
|
|
46
|
+
{
|
|
47
|
+
"name": "无 crawlo 前缀",
|
|
48
|
+
"queue_name": "test_project:queue:requests",
|
|
49
|
+
"expected_queue": "test_project:queue:requests", # 保持原始名称不变
|
|
50
|
+
"expected_processing": "test_project:queue:processing",
|
|
51
|
+
"expected_failed": "test_project:queue:failed"
|
|
52
|
+
}
|
|
53
|
+
]
|
|
54
|
+
|
|
55
|
+
try:
|
|
56
|
+
for i, test_case in enumerate(test_cases, 1):
|
|
57
|
+
print(f"测试 {i}: {test_case['name']}")
|
|
58
|
+
print(f" 输入队列名称: {test_case['queue_name']}")
|
|
59
|
+
|
|
60
|
+
# 测试 RedisPriorityQueue 初始化
|
|
61
|
+
try:
|
|
62
|
+
queue = RedisPriorityQueue(
|
|
63
|
+
redis_url="redis://127.0.0.1:6379/15",
|
|
64
|
+
queue_name=test_case['queue_name'],
|
|
65
|
+
module_name="test_project"
|
|
66
|
+
)
|
|
67
|
+
|
|
68
|
+
print(f" 修复后队列名称: {queue.queue_name}")
|
|
69
|
+
print(f" 修复后处理队列: {queue.processing_queue}")
|
|
70
|
+
print(f" 修复后失败队列: {queue.failed_queue}")
|
|
71
|
+
|
|
72
|
+
# 验证结果
|
|
73
|
+
assert queue.queue_name == test_case['expected_queue'], \
|
|
74
|
+
f"队列名称不匹配: {queue.queue_name} != {test_case['expected_queue']}"
|
|
75
|
+
assert queue.processing_queue == test_case['expected_processing'], \
|
|
76
|
+
f"处理队列名称不匹配: {queue.processing_queue} != {test_case['expected_processing']}"
|
|
77
|
+
assert queue.failed_queue == test_case['expected_failed'], \
|
|
78
|
+
f"失败队列名称不匹配: {queue.failed_queue} != {test_case['expected_failed']}"
|
|
79
|
+
|
|
80
|
+
print(" ✅ 测试通过")
|
|
81
|
+
except Exception as e:
|
|
82
|
+
print(f" ❌ 测试失败: {e}")
|
|
83
|
+
traceback.print_exc()
|
|
84
|
+
return False
|
|
85
|
+
|
|
86
|
+
print()
|
|
87
|
+
|
|
88
|
+
print("✅ Redis 队列命名修复测试通过!")
|
|
89
|
+
return True
|
|
90
|
+
|
|
91
|
+
except Exception as e:
|
|
92
|
+
print(f"❌ Redis 队列命名修复测试失败: {e}")
|
|
93
|
+
traceback.print_exc()
|
|
94
|
+
return False
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
def main():
|
|
98
|
+
"""主测试函数"""
|
|
99
|
+
print("🚀 开始双重 crawlo 前缀问题修复测试...")
|
|
100
|
+
print("=" * 50)
|
|
101
|
+
|
|
102
|
+
try:
|
|
103
|
+
# 测试 Redis 队列命名修复
|
|
104
|
+
redis_test_success = test_redis_queue_naming()
|
|
105
|
+
print()
|
|
106
|
+
|
|
107
|
+
print("=" * 50)
|
|
108
|
+
if redis_test_success:
|
|
109
|
+
print("🎉 Redis 队列命名修复测试通过!双重 crawlo 前缀问题已修复")
|
|
110
|
+
else:
|
|
111
|
+
print("❌ Redis 队列命名修复测试失败,请检查实现")
|
|
112
|
+
return 1
|
|
113
|
+
|
|
114
|
+
except Exception as e:
|
|
115
|
+
print("=" * 50)
|
|
116
|
+
print(f"❌ 测试过程中发生异常: {e}")
|
|
117
|
+
traceback.print_exc()
|
|
118
|
+
return 1
|
|
119
|
+
|
|
120
|
+
return 0
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
if __name__ == "__main__":
|
|
124
|
+
exit_code = main()
|
|
125
125
|
sys.exit(exit_code)
|
|
@@ -0,0 +1,222 @@
|
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding:UTF-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
DownloadDelayMiddleware 测试文件
|
|
5
|
+
用于测试下载延迟中间件的功能
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import asyncio
|
|
9
|
+
import unittest
|
|
10
|
+
from unittest.mock import Mock, patch
|
|
11
|
+
|
|
12
|
+
from crawlo.middleware.download_delay import DownloadDelayMiddleware
|
|
13
|
+
from crawlo.exceptions import NotConfiguredError
|
|
14
|
+
from crawlo.settings.setting_manager import SettingManager
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class MockLogger:
|
|
18
|
+
"""Mock Logger 类,用于测试日志输出"""
|
|
19
|
+
def __init__(self, name, level=None):
|
|
20
|
+
self.name = name
|
|
21
|
+
self.level = level
|
|
22
|
+
self.logs = []
|
|
23
|
+
|
|
24
|
+
def debug(self, msg):
|
|
25
|
+
self.logs.append(('debug', msg))
|
|
26
|
+
|
|
27
|
+
def info(self, msg):
|
|
28
|
+
self.logs.append(('info', msg))
|
|
29
|
+
|
|
30
|
+
def warning(self, msg):
|
|
31
|
+
self.logs.append(('warning', msg))
|
|
32
|
+
|
|
33
|
+
def error(self, msg):
|
|
34
|
+
self.logs.append(('error', msg))
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class MockStats:
|
|
38
|
+
"""Mock Stats 类,用于测试统计信息"""
|
|
39
|
+
def __init__(self):
|
|
40
|
+
self.stats = {}
|
|
41
|
+
|
|
42
|
+
def inc_value(self, key, value=1):
|
|
43
|
+
if key in self.stats:
|
|
44
|
+
self.stats[key] += value
|
|
45
|
+
else:
|
|
46
|
+
self.stats[key] = value
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class TestDownloadDelayMiddleware(unittest.TestCase):
|
|
50
|
+
"""DownloadDelayMiddleware 测试类"""
|
|
51
|
+
|
|
52
|
+
def setUp(self):
|
|
53
|
+
"""测试前准备"""
|
|
54
|
+
# 创建设置管理器
|
|
55
|
+
self.settings = SettingManager()
|
|
56
|
+
|
|
57
|
+
# 创建爬虫模拟对象
|
|
58
|
+
self.crawler = Mock()
|
|
59
|
+
self.crawler.settings = self.settings
|
|
60
|
+
|
|
61
|
+
# 创建请求和爬虫模拟对象
|
|
62
|
+
self.request = Mock()
|
|
63
|
+
self.spider = Mock()
|
|
64
|
+
|
|
65
|
+
@patch('crawlo.utils.log.get_logger')
|
|
66
|
+
def test_middleware_initialization_without_delay(self, mock_get_logger):
|
|
67
|
+
"""测试没有设置DOWNLOAD_DELAY时中间件初始化"""
|
|
68
|
+
# 设置DOWNLOAD_DELAY为0
|
|
69
|
+
self.settings.set('DOWNLOAD_DELAY', 0)
|
|
70
|
+
mock_get_logger.return_value = MockLogger('DownloadDelayMiddleware')
|
|
71
|
+
|
|
72
|
+
# 应该抛出NotConfiguredError异常
|
|
73
|
+
with self.assertRaises(NotConfiguredError) as context:
|
|
74
|
+
DownloadDelayMiddleware.create_instance(self.crawler)
|
|
75
|
+
|
|
76
|
+
self.assertIn("DOWNLOAD_DELAY not set or is zero", str(context.exception))
|
|
77
|
+
|
|
78
|
+
@patch('crawlo.utils.log.get_logger')
|
|
79
|
+
def test_middleware_initialization_with_delay(self, mock_get_logger):
|
|
80
|
+
"""测试正确设置DOWNLOAD_DELAY时中间件初始化"""
|
|
81
|
+
# 设置DOWNLOAD_DELAY
|
|
82
|
+
self.settings.set('DOWNLOAD_DELAY', 2.0)
|
|
83
|
+
self.settings.set('RANDOMNESS', False)
|
|
84
|
+
self.settings.set('RANDOM_RANGE', [0.5, 1.5])
|
|
85
|
+
self.settings.set('LOG_LEVEL', 'INFO')
|
|
86
|
+
|
|
87
|
+
mock_get_logger.return_value = MockLogger('DownloadDelayMiddleware')
|
|
88
|
+
|
|
89
|
+
# 应该正常创建实例
|
|
90
|
+
middleware = DownloadDelayMiddleware.create_instance(self.crawler)
|
|
91
|
+
self.assertIsInstance(middleware, DownloadDelayMiddleware)
|
|
92
|
+
self.assertEqual(middleware.delay, 2.0)
|
|
93
|
+
self.assertFalse(middleware.randomness)
|
|
94
|
+
|
|
95
|
+
@patch('crawlo.utils.log.get_logger')
|
|
96
|
+
def test_middleware_initialization_with_randomness(self, mock_get_logger):
|
|
97
|
+
"""测试启用随机延迟时中间件初始化"""
|
|
98
|
+
# 设置DOWNLOAD_DELAY和随机配置
|
|
99
|
+
self.settings.set('DOWNLOAD_DELAY', 1.0)
|
|
100
|
+
self.settings.set('RANDOMNESS', True)
|
|
101
|
+
self.settings.set('RANDOM_RANGE', [0.5, 2.0])
|
|
102
|
+
self.settings.set('LOG_LEVEL', 'INFO')
|
|
103
|
+
|
|
104
|
+
mock_get_logger.return_value = MockLogger('DownloadDelayMiddleware')
|
|
105
|
+
|
|
106
|
+
# 应该正常创建实例
|
|
107
|
+
middleware = DownloadDelayMiddleware.create_instance(self.crawler)
|
|
108
|
+
self.assertIsInstance(middleware, DownloadDelayMiddleware)
|
|
109
|
+
self.assertEqual(middleware.delay, 1.0)
|
|
110
|
+
self.assertTrue(middleware.randomness)
|
|
111
|
+
self.assertEqual(middleware.floor, 0.5)
|
|
112
|
+
self.assertEqual(middleware.upper, 2.0)
|
|
113
|
+
|
|
114
|
+
@patch('crawlo.utils.log.get_logger')
|
|
115
|
+
def test_middleware_initialization_with_invalid_random_range(self, mock_get_logger):
|
|
116
|
+
"""测试随机范围配置无效时中间件初始化"""
|
|
117
|
+
# 设置DOWNLOAD_DELAY和无效的随机配置
|
|
118
|
+
self.settings.set('DOWNLOAD_DELAY', 1.0)
|
|
119
|
+
self.settings.set('RANDOMNESS', True)
|
|
120
|
+
self.settings.set('RANDOM_RANGE', ['invalid', 'range'])
|
|
121
|
+
self.settings.set('LOG_LEVEL', 'INFO')
|
|
122
|
+
|
|
123
|
+
mock_get_logger.return_value = MockLogger('DownloadDelayMiddleware')
|
|
124
|
+
|
|
125
|
+
# 应该正常创建实例,使用默认随机范围
|
|
126
|
+
middleware = DownloadDelayMiddleware.create_instance(self.crawler)
|
|
127
|
+
self.assertIsInstance(middleware, DownloadDelayMiddleware)
|
|
128
|
+
self.assertEqual(middleware.floor, 0.5)
|
|
129
|
+
self.assertEqual(middleware.upper, 1.5)
|
|
130
|
+
|
|
131
|
+
@patch('crawlo.utils.log.get_logger')
|
|
132
|
+
def test_middleware_initialization_with_incomplete_random_range(self, mock_get_logger):
|
|
133
|
+
"""测试随机范围配置不完整时中间件初始化"""
|
|
134
|
+
# 设置DOWNLOAD_DELAY和不完整的随机配置
|
|
135
|
+
self.settings.set('DOWNLOAD_DELAY', 1.0)
|
|
136
|
+
self.settings.set('RANDOMNESS', True)
|
|
137
|
+
self.settings.set('RANDOM_RANGE', [0.8]) # 只有一个值
|
|
138
|
+
self.settings.set('LOG_LEVEL', 'INFO')
|
|
139
|
+
|
|
140
|
+
mock_get_logger.return_value = MockLogger('DownloadDelayMiddleware')
|
|
141
|
+
|
|
142
|
+
# 应该正常创建实例,使用默认随机范围
|
|
143
|
+
middleware = DownloadDelayMiddleware.create_instance(self.crawler)
|
|
144
|
+
self.assertIsInstance(middleware, DownloadDelayMiddleware)
|
|
145
|
+
self.assertEqual(middleware.floor, 0.5)
|
|
146
|
+
self.assertEqual(middleware.upper, 1.5)
|
|
147
|
+
|
|
148
|
+
@patch('crawlo.middleware.download_delay.sleep')
|
|
149
|
+
@patch('crawlo.utils.log.get_logger')
|
|
150
|
+
def test_process_request_without_randomness(self, mock_get_logger, mock_sleep):
|
|
151
|
+
"""测试不启用随机延迟时的请求处理"""
|
|
152
|
+
# 设置DOWNLOAD_DELAY
|
|
153
|
+
self.settings.set('DOWNLOAD_DELAY', 1.5)
|
|
154
|
+
self.settings.set('RANDOMNESS', False)
|
|
155
|
+
self.settings.set('LOG_LEVEL', 'DEBUG') # 使用DEBUG级别以启用日志
|
|
156
|
+
|
|
157
|
+
mock_logger = MockLogger('DownloadDelayMiddleware')
|
|
158
|
+
mock_get_logger.return_value = mock_logger
|
|
159
|
+
|
|
160
|
+
middleware = DownloadDelayMiddleware.create_instance(self.crawler)
|
|
161
|
+
|
|
162
|
+
# 执行请求处理
|
|
163
|
+
asyncio.run(middleware.process_request(self.request, self.spider))
|
|
164
|
+
|
|
165
|
+
# 验证sleep被调用且参数正确
|
|
166
|
+
mock_sleep.assert_called_once_with(1.5)
|
|
167
|
+
|
|
168
|
+
@patch('crawlo.middleware.download_delay.sleep')
|
|
169
|
+
@patch('crawlo.middleware.download_delay.uniform')
|
|
170
|
+
@patch('crawlo.utils.log.get_logger')
|
|
171
|
+
def test_process_request_with_randomness(self, mock_get_logger, mock_uniform, mock_sleep):
|
|
172
|
+
"""测试启用随机延迟时的请求处理"""
|
|
173
|
+
# 设置DOWNLOAD_DELAY和随机配置
|
|
174
|
+
self.settings.set('DOWNLOAD_DELAY', 2.0)
|
|
175
|
+
self.settings.set('RANDOMNESS', True)
|
|
176
|
+
self.settings.set('RANDOM_RANGE', [0.5, 1.5])
|
|
177
|
+
self.settings.set('LOG_LEVEL', 'DEBUG') # 使用DEBUG级别以启用日志
|
|
178
|
+
|
|
179
|
+
mock_logger = MockLogger('DownloadDelayMiddleware')
|
|
180
|
+
mock_get_logger.return_value = mock_logger
|
|
181
|
+
mock_uniform.return_value = 2.5 # 模拟随机数返回2.5
|
|
182
|
+
|
|
183
|
+
middleware = DownloadDelayMiddleware.create_instance(self.crawler)
|
|
184
|
+
|
|
185
|
+
# 执行请求处理
|
|
186
|
+
asyncio.run(middleware.process_request(self.request, self.spider))
|
|
187
|
+
|
|
188
|
+
# 验证uniform被调用且参数正确
|
|
189
|
+
mock_uniform.assert_called_once_with(1.0, 3.0) # 2.0*0.5=1.0, 2.0*1.5=3.0
|
|
190
|
+
# 验证sleep被调用且参数正确
|
|
191
|
+
mock_sleep.assert_called_once_with(2.5)
|
|
192
|
+
|
|
193
|
+
@patch('crawlo.middleware.download_delay.sleep')
|
|
194
|
+
@patch('crawlo.utils.log.get_logger')
|
|
195
|
+
def test_process_request_with_stats(self, mock_get_logger, mock_sleep):
|
|
196
|
+
"""测试带统计信息的请求处理"""
|
|
197
|
+
# 设置DOWNLOAD_DELAY
|
|
198
|
+
self.settings.set('DOWNLOAD_DELAY', 1.0)
|
|
199
|
+
self.settings.set('RANDOMNESS', False)
|
|
200
|
+
self.settings.set('LOG_LEVEL', 'INFO')
|
|
201
|
+
|
|
202
|
+
# 添加统计收集器到爬虫
|
|
203
|
+
mock_stats = MockStats()
|
|
204
|
+
self.crawler.stats = mock_stats
|
|
205
|
+
|
|
206
|
+
mock_logger = MockLogger('DownloadDelayMiddleware')
|
|
207
|
+
mock_get_logger.return_value = mock_logger
|
|
208
|
+
|
|
209
|
+
middleware = DownloadDelayMiddleware.create_instance(self.crawler)
|
|
210
|
+
|
|
211
|
+
# 执行请求处理
|
|
212
|
+
asyncio.run(middleware.process_request(self.request, self.spider))
|
|
213
|
+
|
|
214
|
+
# 验证统计信息
|
|
215
|
+
self.assertIn('download_delay/fixed_count', mock_stats.stats)
|
|
216
|
+
self.assertEqual(mock_stats.stats['download_delay/fixed_count'], 1)
|
|
217
|
+
self.assertIn('download_delay/fixed_total_time', mock_stats.stats)
|
|
218
|
+
self.assertEqual(mock_stats.stats['download_delay/fixed_total_time'], 1.0)
|
|
219
|
+
|
|
220
|
+
|
|
221
|
+
if __name__ == '__main__':
|
|
222
|
+
unittest.main()
|