crawlo 1.2.8__py3-none-any.whl → 1.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +63 -61
- crawlo/__version__.py +1 -1
- crawlo/cli.py +75 -75
- crawlo/commands/__init__.py +14 -14
- crawlo/commands/check.py +594 -594
- crawlo/commands/genspider.py +151 -151
- crawlo/commands/help.py +138 -138
- crawlo/commands/list.py +155 -155
- crawlo/commands/run.py +314 -323
- crawlo/commands/startproject.py +436 -436
- crawlo/commands/stats.py +187 -187
- crawlo/commands/utils.py +186 -186
- crawlo/config.py +312 -312
- crawlo/config_validator.py +277 -251
- crawlo/core/__init__.py +2 -2
- crawlo/core/engine.py +365 -365
- crawlo/core/processor.py +40 -40
- crawlo/core/scheduler.py +256 -251
- crawlo/crawler.py +1097 -1099
- crawlo/data/__init__.py +5 -5
- crawlo/data/user_agents.py +194 -107
- crawlo/downloader/__init__.py +273 -266
- crawlo/downloader/aiohttp_downloader.py +226 -228
- crawlo/downloader/cffi_downloader.py +245 -256
- crawlo/downloader/httpx_downloader.py +259 -259
- crawlo/downloader/hybrid_downloader.py +212 -212
- crawlo/downloader/playwright_downloader.py +402 -402
- crawlo/downloader/selenium_downloader.py +472 -472
- crawlo/event.py +11 -11
- crawlo/exceptions.py +81 -81
- crawlo/extension/__init__.py +39 -39
- crawlo/extension/health_check.py +141 -141
- crawlo/extension/log_interval.py +57 -57
- crawlo/extension/log_stats.py +81 -81
- crawlo/extension/logging_extension.py +45 -43
- crawlo/extension/memory_monitor.py +104 -104
- crawlo/extension/performance_profiler.py +133 -133
- crawlo/extension/request_recorder.py +107 -107
- crawlo/filters/__init__.py +154 -154
- crawlo/filters/aioredis_filter.py +234 -234
- crawlo/filters/memory_filter.py +269 -269
- crawlo/items/__init__.py +23 -23
- crawlo/items/base.py +21 -21
- crawlo/items/fields.py +52 -52
- crawlo/items/items.py +104 -104
- crawlo/middleware/__init__.py +21 -21
- crawlo/middleware/default_header.py +132 -132
- crawlo/middleware/download_delay.py +104 -104
- crawlo/middleware/middleware_manager.py +136 -136
- crawlo/middleware/offsite.py +114 -114
- crawlo/middleware/proxy.py +386 -368
- crawlo/middleware/request_ignore.py +86 -86
- crawlo/middleware/response_code.py +163 -163
- crawlo/middleware/response_filter.py +136 -136
- crawlo/middleware/retry.py +124 -124
- crawlo/middleware/simple_proxy.py +65 -0
- crawlo/mode_manager.py +212 -211
- crawlo/network/__init__.py +21 -21
- crawlo/network/request.py +379 -338
- crawlo/network/response.py +359 -359
- crawlo/pipelines/__init__.py +21 -21
- crawlo/pipelines/bloom_dedup_pipeline.py +157 -157
- crawlo/pipelines/console_pipeline.py +39 -39
- crawlo/pipelines/csv_pipeline.py +316 -316
- crawlo/pipelines/database_dedup_pipeline.py +223 -223
- crawlo/pipelines/json_pipeline.py +218 -218
- crawlo/pipelines/memory_dedup_pipeline.py +115 -115
- crawlo/pipelines/mongo_pipeline.py +131 -131
- crawlo/pipelines/mysql_pipeline.py +317 -317
- crawlo/pipelines/pipeline_manager.py +74 -62
- crawlo/pipelines/redis_dedup_pipeline.py +167 -167
- crawlo/project.py +284 -315
- crawlo/queue/pqueue.py +37 -37
- crawlo/queue/queue_manager.py +379 -378
- crawlo/queue/redis_priority_queue.py +306 -306
- crawlo/settings/__init__.py +7 -7
- crawlo/settings/default_settings.py +216 -220
- crawlo/settings/setting_manager.py +175 -122
- crawlo/spider/__init__.py +639 -639
- crawlo/stats_collector.py +59 -59
- crawlo/subscriber.py +129 -129
- crawlo/task_manager.py +30 -30
- crawlo/templates/crawlo.cfg.tmpl +10 -10
- crawlo/templates/project/__init__.py.tmpl +3 -3
- crawlo/templates/project/items.py.tmpl +17 -17
- crawlo/templates/project/middlewares.py.tmpl +118 -118
- crawlo/templates/project/pipelines.py.tmpl +96 -96
- crawlo/templates/project/settings.py.tmpl +261 -288
- crawlo/templates/project/settings_distributed.py.tmpl +174 -157
- crawlo/templates/project/settings_gentle.py.tmpl +95 -100
- crawlo/templates/project/settings_high_performance.py.tmpl +125 -134
- crawlo/templates/project/settings_minimal.py.tmpl +30 -0
- crawlo/templates/project/settings_simple.py.tmpl +96 -98
- crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
- crawlo/templates/run.py.tmpl +47 -47
- crawlo/templates/spider/spider.py.tmpl +143 -143
- crawlo/tools/__init__.py +200 -182
- crawlo/tools/anti_crawler.py +268 -268
- crawlo/tools/authenticated_proxy.py +240 -240
- crawlo/{cleaners → tools}/data_formatter.py +225 -225
- crawlo/tools/data_validator.py +180 -180
- crawlo/tools/date_tools.py +290 -36
- crawlo/tools/distributed_coordinator.py +388 -387
- crawlo/{cleaners → tools}/encoding_converter.py +127 -126
- crawlo/tools/request_tools.py +83 -0
- crawlo/tools/retry_mechanism.py +224 -221
- crawlo/tools/scenario_adapter.py +262 -262
- crawlo/{cleaners → tools}/text_cleaner.py +232 -232
- crawlo/utils/__init__.py +35 -35
- crawlo/utils/batch_processor.py +259 -259
- crawlo/utils/controlled_spider_mixin.py +439 -439
- crawlo/utils/db_helper.py +343 -343
- crawlo/utils/enhanced_error_handler.py +356 -356
- crawlo/utils/env_config.py +142 -142
- crawlo/utils/error_handler.py +123 -123
- crawlo/utils/func_tools.py +82 -82
- crawlo/utils/large_scale_config.py +286 -286
- crawlo/utils/large_scale_helper.py +344 -344
- crawlo/utils/log.py +146 -128
- crawlo/utils/performance_monitor.py +285 -285
- crawlo/utils/queue_helper.py +175 -175
- crawlo/utils/redis_connection_pool.py +351 -351
- crawlo/utils/redis_key_validator.py +198 -198
- crawlo/utils/request.py +267 -267
- crawlo/utils/request_serializer.py +218 -218
- crawlo/utils/spider_loader.py +61 -61
- crawlo/utils/system.py +11 -11
- crawlo/utils/tools.py +4 -4
- crawlo/utils/url.py +39 -39
- {crawlo-1.2.8.dist-info → crawlo-1.3.0.dist-info}/METADATA +1011 -764
- crawlo-1.3.0.dist-info/RECORD +219 -0
- examples/__init__.py +7 -7
- tests/DOUBLE_CRAWLO_PREFIX_FIX_REPORT.md +81 -81
- tests/__init__.py +7 -7
- tests/advanced_tools_example.py +275 -275
- tests/authenticated_proxy_example.py +107 -237
- tests/cleaners_example.py +160 -160
- tests/config_validation_demo.py +143 -103
- tests/controlled_spider_example.py +205 -205
- tests/date_tools_example.py +180 -180
- tests/debug_pipelines.py +67 -0
- tests/dynamic_loading_example.py +523 -523
- tests/dynamic_loading_test.py +104 -104
- tests/env_config_example.py +133 -133
- tests/error_handling_example.py +171 -171
- tests/redis_key_validation_demo.py +130 -130
- tests/request_params_example.py +151 -0
- tests/response_improvements_example.py +144 -144
- tests/test_advanced_tools.py +148 -148
- tests/test_all_redis_key_configs.py +145 -145
- tests/test_authenticated_proxy.py +141 -141
- tests/test_cleaners.py +54 -54
- tests/test_comprehensive.py +146 -146
- tests/test_config_consistency.py +80 -80
- tests/test_config_merge.py +153 -0
- tests/test_config_validator.py +182 -193
- tests/test_crawlo_proxy_integration.py +109 -173
- tests/test_date_tools.py +123 -123
- tests/test_default_header_middleware.py +158 -158
- tests/test_distributed.py +65 -0
- tests/test_double_crawlo_fix.py +207 -207
- tests/test_double_crawlo_fix_simple.py +124 -124
- tests/test_download_delay_middleware.py +221 -221
- tests/test_downloader_proxy_compatibility.py +268 -268
- tests/test_dynamic_downloaders_proxy.py +124 -124
- tests/test_dynamic_proxy.py +92 -92
- tests/test_dynamic_proxy_config.py +146 -146
- tests/test_dynamic_proxy_real.py +109 -109
- tests/test_edge_cases.py +303 -303
- tests/test_enhanced_error_handler.py +270 -270
- tests/test_env_config.py +121 -121
- tests/test_error_handler_compatibility.py +112 -112
- tests/test_final_validation.py +153 -153
- tests/test_framework_env_usage.py +103 -103
- tests/test_integration.py +169 -357
- tests/test_item_dedup_redis_key.py +122 -122
- tests/test_mode_consistency.py +51 -51
- tests/test_offsite_middleware.py +221 -221
- tests/test_parsel.py +29 -29
- tests/test_performance.py +327 -327
- tests/test_proxy_api.py +264 -264
- tests/test_proxy_health_check.py +32 -32
- tests/test_proxy_middleware.py +121 -121
- tests/test_proxy_middleware_enhanced.py +216 -216
- tests/test_proxy_middleware_integration.py +136 -136
- tests/test_proxy_middleware_refactored.py +185 -0
- tests/test_proxy_providers.py +56 -56
- tests/test_proxy_stats.py +19 -19
- tests/test_proxy_strategies.py +59 -59
- tests/test_queue_manager_double_crawlo.py +173 -173
- tests/test_queue_manager_redis_key.py +176 -176
- tests/test_random_user_agent.py +73 -0
- tests/test_real_scenario_proxy.py +195 -195
- tests/test_redis_config.py +28 -28
- tests/test_redis_connection_pool.py +294 -294
- tests/test_redis_key_naming.py +181 -181
- tests/test_redis_key_validator.py +123 -123
- tests/test_redis_queue.py +224 -224
- tests/test_request_ignore_middleware.py +182 -182
- tests/test_request_params.py +112 -0
- tests/test_request_serialization.py +70 -70
- tests/test_response_code_middleware.py +349 -349
- tests/test_response_filter_middleware.py +427 -427
- tests/test_response_improvements.py +152 -152
- tests/test_retry_middleware.py +241 -241
- tests/test_scheduler.py +252 -252
- tests/test_scheduler_config_update.py +133 -133
- tests/test_simple_response.py +61 -61
- tests/test_telecom_spider_redis_key.py +205 -205
- tests/test_template_content.py +87 -87
- tests/test_template_redis_key.py +134 -134
- tests/test_tools.py +159 -153
- tests/test_user_agents.py +97 -0
- tests/tools_example.py +260 -257
- tests/verify_distributed.py +117 -0
- crawlo/cleaners/__init__.py +0 -61
- crawlo/utils/date_tools.py +0 -290
- crawlo-1.2.8.dist-info/RECORD +0 -209
- {crawlo-1.2.8.dist-info → crawlo-1.3.0.dist-info}/WHEEL +0 -0
- {crawlo-1.2.8.dist-info → crawlo-1.3.0.dist-info}/entry_points.txt +0 -0
- {crawlo-1.2.8.dist-info → crawlo-1.3.0.dist-info}/top_level.txt +0 -0
tests/test_proxy_strategies.py
CHANGED
|
@@ -1,60 +1,60 @@
|
|
|
1
|
-
# tests/test_proxy_strategies.py
|
|
2
|
-
import pytest
|
|
3
|
-
from crawlo import Request
|
|
4
|
-
from crawlo.proxy.strategies import STRATEGIES
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
@pytest.fixture
|
|
8
|
-
def mock_proxies():
|
|
9
|
-
"""提供测试用的代理列表"""
|
|
10
|
-
return [
|
|
11
|
-
{'url': 'http://p1:8080'},
|
|
12
|
-
{'url': 'http://p2:8080'},
|
|
13
|
-
{'url': 'http://p3:8080'},
|
|
14
|
-
]
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
@pytest.fixture
|
|
18
|
-
def mock_stats():
|
|
19
|
-
"""提供测试用的统计信息"""
|
|
20
|
-
return {
|
|
21
|
-
'http://p1:8080': {'total': 10},
|
|
22
|
-
'http://p2:8080': {'total': 5},
|
|
23
|
-
'http://p3:8080': {'total': 1},
|
|
24
|
-
}
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
@pytest.fixture
|
|
28
|
-
def mock_request():
|
|
29
|
-
"""提供测试用的请求对象"""
|
|
30
|
-
return Request("https://example.com")
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
def test_random_strategy(mock_proxies, mock_request, mock_stats):
|
|
34
|
-
"""测试随机策略"""
|
|
35
|
-
strategy = STRATEGIES['random']
|
|
36
|
-
chosen = strategy(mock_proxies, mock_request, mock_stats)
|
|
37
|
-
assert chosen in [p['url'] for p in mock_proxies]
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
def test_least_used_strategy(mock_proxies, mock_request, mock_stats):
|
|
41
|
-
"""测试最少使用策略"""
|
|
42
|
-
strategy = STRATEGIES['least_used']
|
|
43
|
-
chosen = strategy(mock_proxies, mock_request, mock_stats)
|
|
44
|
-
assert chosen == 'http://p3:8080' # total=1
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
def test_domain_rule_strategy(mock_proxies, mock_request, mock_stats):
|
|
48
|
-
"""测试域名规则策略"""
|
|
49
|
-
from crawlo.proxy.strategies.domain_rule import domain_rule_strategy
|
|
50
|
-
request = Request("https://taobao.com/item/123")
|
|
51
|
-
rules = {'taobao.com': 'http://special:8080'}
|
|
52
|
-
|
|
53
|
-
# Monkey patch 确保有回退策略
|
|
54
|
-
old_strategy = STRATEGIES['least_used']
|
|
55
|
-
try:
|
|
56
|
-
STRATEGIES['least_used'] = lambda p, r, s: 'http://fallback:8080'
|
|
57
|
-
chosen = domain_rule_strategy(mock_proxies, request, mock_stats, rules)
|
|
58
|
-
assert chosen == 'http://special:8080'
|
|
59
|
-
finally:
|
|
1
|
+
# tests/test_proxy_strategies.py
|
|
2
|
+
import pytest
|
|
3
|
+
from crawlo import Request
|
|
4
|
+
from crawlo.proxy.strategies import STRATEGIES
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
@pytest.fixture
|
|
8
|
+
def mock_proxies():
|
|
9
|
+
"""提供测试用的代理列表"""
|
|
10
|
+
return [
|
|
11
|
+
{'url': 'http://p1:8080'},
|
|
12
|
+
{'url': 'http://p2:8080'},
|
|
13
|
+
{'url': 'http://p3:8080'},
|
|
14
|
+
]
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
@pytest.fixture
|
|
18
|
+
def mock_stats():
|
|
19
|
+
"""提供测试用的统计信息"""
|
|
20
|
+
return {
|
|
21
|
+
'http://p1:8080': {'total': 10},
|
|
22
|
+
'http://p2:8080': {'total': 5},
|
|
23
|
+
'http://p3:8080': {'total': 1},
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
@pytest.fixture
|
|
28
|
+
def mock_request():
|
|
29
|
+
"""提供测试用的请求对象"""
|
|
30
|
+
return Request("https://example.com")
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def test_random_strategy(mock_proxies, mock_request, mock_stats):
|
|
34
|
+
"""测试随机策略"""
|
|
35
|
+
strategy = STRATEGIES['random']
|
|
36
|
+
chosen = strategy(mock_proxies, mock_request, mock_stats)
|
|
37
|
+
assert chosen in [p['url'] for p in mock_proxies]
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def test_least_used_strategy(mock_proxies, mock_request, mock_stats):
|
|
41
|
+
"""测试最少使用策略"""
|
|
42
|
+
strategy = STRATEGIES['least_used']
|
|
43
|
+
chosen = strategy(mock_proxies, mock_request, mock_stats)
|
|
44
|
+
assert chosen == 'http://p3:8080' # total=1
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def test_domain_rule_strategy(mock_proxies, mock_request, mock_stats):
|
|
48
|
+
"""测试域名规则策略"""
|
|
49
|
+
from crawlo.proxy.strategies.domain_rule import domain_rule_strategy
|
|
50
|
+
request = Request("https://taobao.com/item/123")
|
|
51
|
+
rules = {'taobao.com': 'http://special:8080'}
|
|
52
|
+
|
|
53
|
+
# Monkey patch 确保有回退策略
|
|
54
|
+
old_strategy = STRATEGIES['least_used']
|
|
55
|
+
try:
|
|
56
|
+
STRATEGIES['least_used'] = lambda p, r, s: 'http://fallback:8080'
|
|
57
|
+
chosen = domain_rule_strategy(mock_proxies, request, mock_stats, rules)
|
|
58
|
+
assert chosen == 'http://special:8080'
|
|
59
|
+
finally:
|
|
60
60
|
STRATEGIES['least_used'] = old_strategy
|
|
@@ -1,174 +1,174 @@
|
|
|
1
|
-
#!/usr/bin/env python3
|
|
2
|
-
# -*- coding: utf-8 -*-
|
|
3
|
-
"""
|
|
4
|
-
队列管理器双重 crawlo 前缀问题测试脚本
|
|
5
|
-
用于验证队列管理器在处理双重 crawlo 前缀时的行为
|
|
6
|
-
"""
|
|
7
|
-
import sys
|
|
8
|
-
import os
|
|
9
|
-
import asyncio
|
|
10
|
-
import traceback
|
|
11
|
-
|
|
12
|
-
# 添加项目根目录到路径
|
|
13
|
-
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
|
14
|
-
|
|
15
|
-
# 导入相关模块
|
|
16
|
-
from crawlo.queue.queue_manager import QueueManager, QueueConfig, QueueType
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
async def test_queue_manager_naming():
|
|
20
|
-
"""测试队列管理器中的项目名称提取"""
|
|
21
|
-
print("
|
|
22
|
-
print("=" * 50)
|
|
23
|
-
|
|
24
|
-
test_cases = [
|
|
25
|
-
{
|
|
26
|
-
"name": "正常命名",
|
|
27
|
-
"queue_name": "crawlo:test_project:queue:requests",
|
|
28
|
-
"expected_module": "test_project"
|
|
29
|
-
},
|
|
30
|
-
{
|
|
31
|
-
"name": "双重 crawlo 前缀",
|
|
32
|
-
"queue_name": "crawlo:crawlo:queue:requests",
|
|
33
|
-
"expected_module": "queue" # 第三个部分是项目名称
|
|
34
|
-
},
|
|
35
|
-
{
|
|
36
|
-
"name": "三重 crawlo 前缀",
|
|
37
|
-
"queue_name": "crawlo:crawlo:crawlo:queue:requests",
|
|
38
|
-
"expected_module": "crawlo" # 第三个部分是项目名称
|
|
39
|
-
},
|
|
40
|
-
{
|
|
41
|
-
"name": "无 crawlo 前缀",
|
|
42
|
-
"queue_name": "test_project:queue:requests",
|
|
43
|
-
"expected_module": "test_project"
|
|
44
|
-
}
|
|
45
|
-
]
|
|
46
|
-
|
|
47
|
-
try:
|
|
48
|
-
for i, test_case in enumerate(test_cases, 1):
|
|
49
|
-
print(f"测试 {i}: {test_case['name']}")
|
|
50
|
-
print(f" 输入队列名称: {test_case['queue_name']}")
|
|
51
|
-
|
|
52
|
-
# 使用优化后的项目名称提取逻辑
|
|
53
|
-
project_name = "default"
|
|
54
|
-
if ':' in test_case['queue_name']:
|
|
55
|
-
parts = test_case['queue_name'].split(':')
|
|
56
|
-
# 跳过所有"crawlo"前缀,取第一个非"crawlo"部分作为项目名称
|
|
57
|
-
for part in parts:
|
|
58
|
-
if part != "crawlo":
|
|
59
|
-
project_name = part
|
|
60
|
-
break
|
|
61
|
-
else:
|
|
62
|
-
project_name = test_case['queue_name'] or "default"
|
|
63
|
-
|
|
64
|
-
print(f" 提取的项目名称: {project_name}")
|
|
65
|
-
print(f" 期望的项目名称: {test_case['expected_module']}")
|
|
66
|
-
|
|
67
|
-
# 验证结果
|
|
68
|
-
assert project_name == test_case['expected_module'], \
|
|
69
|
-
f"项目名称不匹配: {project_name} != {test_case['expected_module']}"
|
|
70
|
-
|
|
71
|
-
print("
|
|
72
|
-
print()
|
|
73
|
-
|
|
74
|
-
print("
|
|
75
|
-
return True
|
|
76
|
-
|
|
77
|
-
except Exception as e:
|
|
78
|
-
print(f"
|
|
79
|
-
traceback.print_exc()
|
|
80
|
-
return False
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
async def test_queue_manager_create_queue():
|
|
84
|
-
"""测试队列管理器创建队列"""
|
|
85
|
-
print("
|
|
86
|
-
print("=" * 50)
|
|
87
|
-
|
|
88
|
-
test_cases = [
|
|
89
|
-
{
|
|
90
|
-
"name": "正常命名",
|
|
91
|
-
"queue_name": "crawlo:test_project:queue:requests",
|
|
92
|
-
"expected_queue": "crawlo:test_project:queue:requests",
|
|
93
|
-
"expected_processing": "crawlo:test_project:queue:processing",
|
|
94
|
-
"expected_failed": "crawlo:test_project:queue:failed"
|
|
95
|
-
},
|
|
96
|
-
{
|
|
97
|
-
"name": "双重 crawlo 前缀",
|
|
98
|
-
"queue_name": "crawlo:crawlo:queue:requests",
|
|
99
|
-
"expected_queue": "crawlo:crawlo:queue:requests",
|
|
100
|
-
"expected_processing": "crawlo:crawlo:queue:processing",
|
|
101
|
-
"expected_failed": "crawlo:crawlo:queue:failed"
|
|
102
|
-
}
|
|
103
|
-
]
|
|
104
|
-
|
|
105
|
-
try:
|
|
106
|
-
for i, test_case in enumerate(test_cases, 1):
|
|
107
|
-
print(f"测试 {i}: {test_case['name']}")
|
|
108
|
-
print(f" 输入队列名称: {test_case['queue_name']}")
|
|
109
|
-
|
|
110
|
-
try:
|
|
111
|
-
# 创建队列配置
|
|
112
|
-
config = QueueConfig(
|
|
113
|
-
queue_type=QueueType.REDIS,
|
|
114
|
-
redis_url="redis://127.0.0.1:6379/15",
|
|
115
|
-
queue_name=test_case['queue_name'],
|
|
116
|
-
max_queue_size=1000,
|
|
117
|
-
max_retries=3,
|
|
118
|
-
timeout=300
|
|
119
|
-
)
|
|
120
|
-
|
|
121
|
-
# 创建队列管理器
|
|
122
|
-
queue_manager = QueueManager(config)
|
|
123
|
-
|
|
124
|
-
# 使用优化后的项目名称提取逻辑
|
|
125
|
-
project_name = "default"
|
|
126
|
-
if ':' in test_case['queue_name']:
|
|
127
|
-
parts = test_case['queue_name'].split(':')
|
|
128
|
-
# 跳过所有"crawlo"前缀,取第一个非"crawlo"部分作为项目名称
|
|
129
|
-
for part in parts:
|
|
130
|
-
if part != "crawlo":
|
|
131
|
-
project_name = part
|
|
132
|
-
break
|
|
133
|
-
else:
|
|
134
|
-
project_name = test_case['queue_name'] or "default"
|
|
135
|
-
|
|
136
|
-
print(f" 提取的项目名称: {project_name}")
|
|
137
|
-
|
|
138
|
-
# 创建 Redis 队列实例
|
|
139
|
-
from crawlo.queue.redis_priority_queue import RedisPriorityQueue
|
|
140
|
-
queue = RedisPriorityQueue(
|
|
141
|
-
redis_url=config.redis_url,
|
|
142
|
-
queue_name=config.queue_name,
|
|
143
|
-
max_retries=config.max_retries,
|
|
144
|
-
timeout=config.timeout,
|
|
145
|
-
module_name=project_name # 传递项目名称作为module_name
|
|
146
|
-
)
|
|
147
|
-
|
|
148
|
-
print(f" 创建的队列名称: {queue.queue_name}")
|
|
149
|
-
print(f" 创建的处理队列: {queue.processing_queue}")
|
|
150
|
-
print(f" 创建的失败队列: {queue.failed_queue}")
|
|
151
|
-
|
|
152
|
-
# 验证结果
|
|
153
|
-
assert queue.queue_name == test_case['expected_queue'], \
|
|
154
|
-
f"队列名称不匹配: {queue.queue_name} != {test_case['expected_queue']}"
|
|
155
|
-
assert queue.processing_queue == test_case['expected_processing'], \
|
|
156
|
-
f"处理队列名称不匹配: {queue.processing_queue} != {test_case['expected_processing']}"
|
|
157
|
-
assert queue.failed_queue == test_case['expected_failed'], \
|
|
158
|
-
f"失败队列名称不匹配: {queue.failed_queue} != {test_case['expected_failed']}"
|
|
159
|
-
|
|
160
|
-
print("
|
|
161
|
-
except Exception as e:
|
|
162
|
-
print(f"
|
|
163
|
-
traceback.print_exc()
|
|
164
|
-
return False
|
|
165
|
-
|
|
166
|
-
print()
|
|
167
|
-
|
|
168
|
-
print("
|
|
169
|
-
return True
|
|
170
|
-
|
|
171
|
-
except Exception as e:
|
|
172
|
-
print(f"
|
|
173
|
-
traceback.print_exc()
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
队列管理器双重 crawlo 前缀问题测试脚本
|
|
5
|
+
用于验证队列管理器在处理双重 crawlo 前缀时的行为
|
|
6
|
+
"""
|
|
7
|
+
import sys
|
|
8
|
+
import os
|
|
9
|
+
import asyncio
|
|
10
|
+
import traceback
|
|
11
|
+
|
|
12
|
+
# 添加项目根目录到路径
|
|
13
|
+
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
|
14
|
+
|
|
15
|
+
# 导入相关模块
|
|
16
|
+
from crawlo.queue.queue_manager import QueueManager, QueueConfig, QueueType
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
async def test_queue_manager_naming():
|
|
20
|
+
"""测试队列管理器中的项目名称提取"""
|
|
21
|
+
print("开始测试队列管理器项目名称提取...")
|
|
22
|
+
print("=" * 50)
|
|
23
|
+
|
|
24
|
+
test_cases = [
|
|
25
|
+
{
|
|
26
|
+
"name": "正常命名",
|
|
27
|
+
"queue_name": "crawlo:test_project:queue:requests",
|
|
28
|
+
"expected_module": "test_project"
|
|
29
|
+
},
|
|
30
|
+
{
|
|
31
|
+
"name": "双重 crawlo 前缀",
|
|
32
|
+
"queue_name": "crawlo:crawlo:queue:requests",
|
|
33
|
+
"expected_module": "queue" # 第三个部分是项目名称
|
|
34
|
+
},
|
|
35
|
+
{
|
|
36
|
+
"name": "三重 crawlo 前缀",
|
|
37
|
+
"queue_name": "crawlo:crawlo:crawlo:queue:requests",
|
|
38
|
+
"expected_module": "crawlo" # 第三个部分是项目名称
|
|
39
|
+
},
|
|
40
|
+
{
|
|
41
|
+
"name": "无 crawlo 前缀",
|
|
42
|
+
"queue_name": "test_project:queue:requests",
|
|
43
|
+
"expected_module": "test_project"
|
|
44
|
+
}
|
|
45
|
+
]
|
|
46
|
+
|
|
47
|
+
try:
|
|
48
|
+
for i, test_case in enumerate(test_cases, 1):
|
|
49
|
+
print(f"测试 {i}: {test_case['name']}")
|
|
50
|
+
print(f" 输入队列名称: {test_case['queue_name']}")
|
|
51
|
+
|
|
52
|
+
# 使用优化后的项目名称提取逻辑
|
|
53
|
+
project_name = "default"
|
|
54
|
+
if ':' in test_case['queue_name']:
|
|
55
|
+
parts = test_case['queue_name'].split(':')
|
|
56
|
+
# 跳过所有"crawlo"前缀,取第一个非"crawlo"部分作为项目名称
|
|
57
|
+
for part in parts:
|
|
58
|
+
if part != "crawlo":
|
|
59
|
+
project_name = part
|
|
60
|
+
break
|
|
61
|
+
else:
|
|
62
|
+
project_name = test_case['queue_name'] or "default"
|
|
63
|
+
|
|
64
|
+
print(f" 提取的项目名称: {project_name}")
|
|
65
|
+
print(f" 期望的项目名称: {test_case['expected_module']}")
|
|
66
|
+
|
|
67
|
+
# 验证结果
|
|
68
|
+
assert project_name == test_case['expected_module'], \
|
|
69
|
+
f"项目名称不匹配: {project_name} != {test_case['expected_module']}"
|
|
70
|
+
|
|
71
|
+
print(" 测试通过")
|
|
72
|
+
print()
|
|
73
|
+
|
|
74
|
+
print("队列管理器项目名称提取测试通过!")
|
|
75
|
+
return True
|
|
76
|
+
|
|
77
|
+
except Exception as e:
|
|
78
|
+
print(f"队列管理器项目名称提取测试失败: {e}")
|
|
79
|
+
traceback.print_exc()
|
|
80
|
+
return False
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
async def test_queue_manager_create_queue():
|
|
84
|
+
"""测试队列管理器创建队列"""
|
|
85
|
+
print("开始测试队列管理器创建队列...")
|
|
86
|
+
print("=" * 50)
|
|
87
|
+
|
|
88
|
+
test_cases = [
|
|
89
|
+
{
|
|
90
|
+
"name": "正常命名",
|
|
91
|
+
"queue_name": "crawlo:test_project:queue:requests",
|
|
92
|
+
"expected_queue": "crawlo:test_project:queue:requests",
|
|
93
|
+
"expected_processing": "crawlo:test_project:queue:processing",
|
|
94
|
+
"expected_failed": "crawlo:test_project:queue:failed"
|
|
95
|
+
},
|
|
96
|
+
{
|
|
97
|
+
"name": "双重 crawlo 前缀",
|
|
98
|
+
"queue_name": "crawlo:crawlo:queue:requests",
|
|
99
|
+
"expected_queue": "crawlo:crawlo:queue:requests",
|
|
100
|
+
"expected_processing": "crawlo:crawlo:queue:processing",
|
|
101
|
+
"expected_failed": "crawlo:crawlo:queue:failed"
|
|
102
|
+
}
|
|
103
|
+
]
|
|
104
|
+
|
|
105
|
+
try:
|
|
106
|
+
for i, test_case in enumerate(test_cases, 1):
|
|
107
|
+
print(f"测试 {i}: {test_case['name']}")
|
|
108
|
+
print(f" 输入队列名称: {test_case['queue_name']}")
|
|
109
|
+
|
|
110
|
+
try:
|
|
111
|
+
# 创建队列配置
|
|
112
|
+
config = QueueConfig(
|
|
113
|
+
queue_type=QueueType.REDIS,
|
|
114
|
+
redis_url="redis://127.0.0.1:6379/15",
|
|
115
|
+
queue_name=test_case['queue_name'],
|
|
116
|
+
max_queue_size=1000,
|
|
117
|
+
max_retries=3,
|
|
118
|
+
timeout=300
|
|
119
|
+
)
|
|
120
|
+
|
|
121
|
+
# 创建队列管理器
|
|
122
|
+
queue_manager = QueueManager(config)
|
|
123
|
+
|
|
124
|
+
# 使用优化后的项目名称提取逻辑
|
|
125
|
+
project_name = "default"
|
|
126
|
+
if ':' in test_case['queue_name']:
|
|
127
|
+
parts = test_case['queue_name'].split(':')
|
|
128
|
+
# 跳过所有"crawlo"前缀,取第一个非"crawlo"部分作为项目名称
|
|
129
|
+
for part in parts:
|
|
130
|
+
if part != "crawlo":
|
|
131
|
+
project_name = part
|
|
132
|
+
break
|
|
133
|
+
else:
|
|
134
|
+
project_name = test_case['queue_name'] or "default"
|
|
135
|
+
|
|
136
|
+
print(f" 提取的项目名称: {project_name}")
|
|
137
|
+
|
|
138
|
+
# 创建 Redis 队列实例
|
|
139
|
+
from crawlo.queue.redis_priority_queue import RedisPriorityQueue
|
|
140
|
+
queue = RedisPriorityQueue(
|
|
141
|
+
redis_url=config.redis_url,
|
|
142
|
+
queue_name=config.queue_name,
|
|
143
|
+
max_retries=config.max_retries,
|
|
144
|
+
timeout=config.timeout,
|
|
145
|
+
module_name=project_name # 传递项目名称作为module_name
|
|
146
|
+
)
|
|
147
|
+
|
|
148
|
+
print(f" 创建的队列名称: {queue.queue_name}")
|
|
149
|
+
print(f" 创建的处理队列: {queue.processing_queue}")
|
|
150
|
+
print(f" 创建的失败队列: {queue.failed_queue}")
|
|
151
|
+
|
|
152
|
+
# 验证结果
|
|
153
|
+
assert queue.queue_name == test_case['expected_queue'], \
|
|
154
|
+
f"队列名称不匹配: {queue.queue_name} != {test_case['expected_queue']}"
|
|
155
|
+
assert queue.processing_queue == test_case['expected_processing'], \
|
|
156
|
+
f"处理队列名称不匹配: {queue.processing_queue} != {test_case['expected_processing']}"
|
|
157
|
+
assert queue.failed_queue == test_case['expected_failed'], \
|
|
158
|
+
f"失败队列名称不匹配: {queue.failed_queue} != {test_case['expected_failed']}"
|
|
159
|
+
|
|
160
|
+
print(" 测试通过")
|
|
161
|
+
except Exception as e:
|
|
162
|
+
print(f" 测试失败: {e}")
|
|
163
|
+
traceback.print_exc()
|
|
164
|
+
return False
|
|
165
|
+
|
|
166
|
+
print()
|
|
167
|
+
|
|
168
|
+
print("队列管理器创建队列测试通过!")
|
|
169
|
+
return True
|
|
170
|
+
|
|
171
|
+
except Exception as e:
|
|
172
|
+
print(f"队列管理器创建队列测试失败: {e}")
|
|
173
|
+
traceback.print_exc()
|
|
174
174
|
return False
|