crawlo 1.1.4__py3-none-any.whl → 1.1.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (190) hide show
  1. crawlo/__init__.py +61 -34
  2. crawlo/__version__.py +1 -1
  3. crawlo/cleaners/__init__.py +61 -0
  4. crawlo/cleaners/data_formatter.py +226 -0
  5. crawlo/cleaners/encoding_converter.py +126 -0
  6. crawlo/cleaners/text_cleaner.py +233 -0
  7. crawlo/cli.py +40 -40
  8. crawlo/commands/__init__.py +13 -13
  9. crawlo/commands/check.py +594 -594
  10. crawlo/commands/genspider.py +151 -151
  11. crawlo/commands/list.py +155 -155
  12. crawlo/commands/run.py +292 -285
  13. crawlo/commands/startproject.py +419 -196
  14. crawlo/commands/stats.py +188 -188
  15. crawlo/commands/utils.py +186 -186
  16. crawlo/config.py +312 -279
  17. crawlo/config_validator.py +253 -0
  18. crawlo/core/__init__.py +2 -2
  19. crawlo/core/engine.py +346 -172
  20. crawlo/core/processor.py +40 -40
  21. crawlo/core/scheduler.py +137 -166
  22. crawlo/crawler.py +1027 -1027
  23. crawlo/downloader/__init__.py +266 -242
  24. crawlo/downloader/aiohttp_downloader.py +220 -212
  25. crawlo/downloader/cffi_downloader.py +256 -251
  26. crawlo/downloader/httpx_downloader.py +259 -259
  27. crawlo/downloader/hybrid_downloader.py +214 -0
  28. crawlo/downloader/playwright_downloader.py +403 -0
  29. crawlo/downloader/selenium_downloader.py +473 -0
  30. crawlo/event.py +11 -11
  31. crawlo/exceptions.py +81 -81
  32. crawlo/extension/__init__.py +37 -37
  33. crawlo/extension/health_check.py +141 -141
  34. crawlo/extension/log_interval.py +57 -57
  35. crawlo/extension/log_stats.py +81 -81
  36. crawlo/extension/logging_extension.py +43 -43
  37. crawlo/extension/memory_monitor.py +104 -88
  38. crawlo/extension/performance_profiler.py +133 -117
  39. crawlo/extension/request_recorder.py +107 -107
  40. crawlo/filters/__init__.py +154 -154
  41. crawlo/filters/aioredis_filter.py +281 -242
  42. crawlo/filters/memory_filter.py +269 -269
  43. crawlo/items/__init__.py +23 -23
  44. crawlo/items/base.py +21 -21
  45. crawlo/items/fields.py +53 -53
  46. crawlo/items/items.py +104 -104
  47. crawlo/middleware/__init__.py +21 -21
  48. crawlo/middleware/default_header.py +32 -32
  49. crawlo/middleware/download_delay.py +28 -28
  50. crawlo/middleware/middleware_manager.py +135 -135
  51. crawlo/middleware/proxy.py +272 -248
  52. crawlo/middleware/request_ignore.py +30 -30
  53. crawlo/middleware/response_code.py +18 -18
  54. crawlo/middleware/response_filter.py +26 -26
  55. crawlo/middleware/retry.py +124 -124
  56. crawlo/mode_manager.py +212 -201
  57. crawlo/network/__init__.py +21 -21
  58. crawlo/network/request.py +338 -311
  59. crawlo/network/response.py +360 -271
  60. crawlo/pipelines/__init__.py +21 -21
  61. crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
  62. crawlo/pipelines/console_pipeline.py +39 -39
  63. crawlo/pipelines/csv_pipeline.py +316 -316
  64. crawlo/pipelines/database_dedup_pipeline.py +224 -224
  65. crawlo/pipelines/json_pipeline.py +218 -218
  66. crawlo/pipelines/memory_dedup_pipeline.py +115 -115
  67. crawlo/pipelines/mongo_pipeline.py +131 -131
  68. crawlo/pipelines/mysql_pipeline.py +316 -316
  69. crawlo/pipelines/pipeline_manager.py +61 -56
  70. crawlo/pipelines/redis_dedup_pipeline.py +167 -162
  71. crawlo/project.py +188 -153
  72. crawlo/queue/pqueue.py +37 -37
  73. crawlo/queue/queue_manager.py +334 -307
  74. crawlo/queue/redis_priority_queue.py +299 -209
  75. crawlo/settings/__init__.py +7 -7
  76. crawlo/settings/default_settings.py +219 -278
  77. crawlo/settings/setting_manager.py +123 -100
  78. crawlo/spider/__init__.py +639 -639
  79. crawlo/stats_collector.py +59 -59
  80. crawlo/subscriber.py +130 -130
  81. crawlo/task_manager.py +30 -30
  82. crawlo/templates/crawlo.cfg.tmpl +10 -10
  83. crawlo/templates/project/__init__.py.tmpl +3 -3
  84. crawlo/templates/project/items.py.tmpl +17 -17
  85. crawlo/templates/project/middlewares.py.tmpl +110 -110
  86. crawlo/templates/project/pipelines.py.tmpl +97 -97
  87. crawlo/templates/project/run.py.tmpl +251 -251
  88. crawlo/templates/project/settings.py.tmpl +326 -279
  89. crawlo/templates/project/settings_distributed.py.tmpl +120 -0
  90. crawlo/templates/project/settings_gentle.py.tmpl +95 -0
  91. crawlo/templates/project/settings_high_performance.py.tmpl +152 -0
  92. crawlo/templates/project/settings_simple.py.tmpl +69 -0
  93. crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
  94. crawlo/templates/spider/spider.py.tmpl +141 -141
  95. crawlo/tools/__init__.py +183 -0
  96. crawlo/tools/anti_crawler.py +269 -0
  97. crawlo/tools/authenticated_proxy.py +241 -0
  98. crawlo/tools/data_validator.py +181 -0
  99. crawlo/tools/date_tools.py +36 -0
  100. crawlo/tools/distributed_coordinator.py +387 -0
  101. crawlo/tools/retry_mechanism.py +221 -0
  102. crawlo/tools/scenario_adapter.py +263 -0
  103. crawlo/utils/__init__.py +35 -7
  104. crawlo/utils/batch_processor.py +261 -0
  105. crawlo/utils/controlled_spider_mixin.py +439 -439
  106. crawlo/utils/date_tools.py +290 -233
  107. crawlo/utils/db_helper.py +343 -343
  108. crawlo/utils/enhanced_error_handler.py +360 -0
  109. crawlo/utils/env_config.py +106 -0
  110. crawlo/utils/error_handler.py +126 -0
  111. crawlo/utils/func_tools.py +82 -82
  112. crawlo/utils/large_scale_config.py +286 -286
  113. crawlo/utils/large_scale_helper.py +343 -343
  114. crawlo/utils/log.py +128 -128
  115. crawlo/utils/performance_monitor.py +285 -0
  116. crawlo/utils/queue_helper.py +175 -175
  117. crawlo/utils/redis_connection_pool.py +335 -0
  118. crawlo/utils/redis_key_validator.py +200 -0
  119. crawlo/utils/request.py +267 -267
  120. crawlo/utils/request_serializer.py +219 -219
  121. crawlo/utils/spider_loader.py +62 -62
  122. crawlo/utils/system.py +11 -11
  123. crawlo/utils/tools.py +4 -4
  124. crawlo/utils/url.py +39 -39
  125. {crawlo-1.1.4.dist-info → crawlo-1.1.6.dist-info}/METADATA +401 -403
  126. crawlo-1.1.6.dist-info/RECORD +189 -0
  127. examples/__init__.py +7 -7
  128. tests/DOUBLE_CRAWLO_PREFIX_FIX_REPORT.md +82 -0
  129. tests/__init__.py +7 -7
  130. tests/advanced_tools_example.py +276 -0
  131. tests/authenticated_proxy_example.py +237 -0
  132. tests/cleaners_example.py +161 -0
  133. tests/config_validation_demo.py +103 -0
  134. {examples → tests}/controlled_spider_example.py +205 -205
  135. tests/date_tools_example.py +181 -0
  136. tests/dynamic_loading_example.py +524 -0
  137. tests/dynamic_loading_test.py +105 -0
  138. tests/env_config_example.py +134 -0
  139. tests/error_handling_example.py +172 -0
  140. tests/redis_key_validation_demo.py +131 -0
  141. tests/response_improvements_example.py +145 -0
  142. tests/test_advanced_tools.py +149 -0
  143. tests/test_all_redis_key_configs.py +146 -0
  144. tests/test_authenticated_proxy.py +142 -0
  145. tests/test_cleaners.py +55 -0
  146. tests/test_comprehensive.py +147 -0
  147. tests/test_config_validator.py +194 -0
  148. tests/test_date_tools.py +124 -0
  149. tests/test_double_crawlo_fix.py +208 -0
  150. tests/test_double_crawlo_fix_simple.py +125 -0
  151. tests/test_dynamic_downloaders_proxy.py +125 -0
  152. tests/test_dynamic_proxy.py +93 -0
  153. tests/test_dynamic_proxy_config.py +147 -0
  154. tests/test_dynamic_proxy_real.py +110 -0
  155. tests/test_edge_cases.py +304 -0
  156. tests/test_enhanced_error_handler.py +271 -0
  157. tests/test_env_config.py +122 -0
  158. tests/test_error_handler_compatibility.py +113 -0
  159. tests/test_final_validation.py +153 -153
  160. tests/test_framework_env_usage.py +104 -0
  161. tests/test_integration.py +357 -0
  162. tests/test_item_dedup_redis_key.py +123 -0
  163. tests/test_parsel.py +30 -0
  164. tests/test_performance.py +328 -0
  165. tests/test_proxy_health_check.py +32 -32
  166. tests/test_proxy_middleware_integration.py +136 -136
  167. tests/test_proxy_providers.py +56 -56
  168. tests/test_proxy_stats.py +19 -19
  169. tests/test_proxy_strategies.py +59 -59
  170. tests/test_queue_manager_double_crawlo.py +231 -0
  171. tests/test_queue_manager_redis_key.py +177 -0
  172. tests/test_redis_config.py +28 -28
  173. tests/test_redis_connection_pool.py +295 -0
  174. tests/test_redis_key_naming.py +182 -0
  175. tests/test_redis_key_validator.py +124 -0
  176. tests/test_redis_queue.py +224 -224
  177. tests/test_request_serialization.py +70 -70
  178. tests/test_response_improvements.py +153 -0
  179. tests/test_scheduler.py +241 -241
  180. tests/test_simple_response.py +62 -0
  181. tests/test_telecom_spider_redis_key.py +206 -0
  182. tests/test_template_content.py +88 -0
  183. tests/test_template_redis_key.py +135 -0
  184. tests/test_tools.py +154 -0
  185. tests/tools_example.py +258 -0
  186. crawlo/core/enhanced_engine.py +0 -190
  187. crawlo-1.1.4.dist-info/RECORD +0 -117
  188. {crawlo-1.1.4.dist-info → crawlo-1.1.6.dist-info}/WHEEL +0 -0
  189. {crawlo-1.1.4.dist-info → crawlo-1.1.6.dist-info}/entry_points.txt +0 -0
  190. {crawlo-1.1.4.dist-info → crawlo-1.1.6.dist-info}/top_level.txt +0 -0
@@ -1,57 +1,57 @@
1
- # tests/test_proxy_providers.py
2
- import pytest
3
- import pytest
4
- import respx
5
- from httpx import Response
6
- from crawlo.proxy.providers import StaticProxyProvider, FileProxyProvider, APIProxyProvider
7
- import tempfile
8
- import os
9
-
10
-
11
- @pytest.mark.asyncio
12
- async def test_static_provider():
13
- """测试静态代理提供者"""
14
- provider = StaticProxyProvider(['http://1.1.1.1:8080', 'http://2.2.2.2:8080'])
15
- proxies = await provider.fetch_proxies()
16
- assert len(proxies) == 2
17
- assert 'http://1.1.1.1:8080' in proxies
18
- assert 'http://2.2.2.2:8080' in proxies
19
-
20
-
21
- @pytest.mark.asyncio
22
- async def test_file_provider():
23
- """测试文件代理提供者"""
24
- with tempfile.NamedTemporaryFile(mode='w', delete=False) as f:
25
- f.write("http://a.com:8080\nhttp://b.com:8080\n")
26
- temp_path = f.name
27
- try:
28
- provider = FileProxyProvider(temp_path)
29
- proxies = await provider.fetch_proxies()
30
- assert len(proxies) == 2
31
- assert 'http://a.com:8080' in proxies
32
- assert 'http://b.com:8080' in proxies
33
- finally:
34
- os.unlink(temp_path)
35
-
36
-
37
- @pytest.mark.asyncio
38
- @respx.mock
39
- async def test_api_provider():
40
- """使用 respx 拦截 HTTP 请求,更简洁可靠"""
41
- # 拦截 GET 请求
42
- respx.get("https://api.example.com").mock(
43
- return_value=Response(
44
- 200,
45
- json=[
46
- {"ip": "1.1.1.1", "port": 8080},
47
- {"ip": "2.2.2.2", "port": 8080}
48
- ]
49
- )
50
- )
51
-
52
- provider = APIProxyProvider(url="https://api.example.com")
53
- proxies = await provider.fetch_proxies()
54
-
55
- assert len(proxies) == 2
56
- assert "http://1.1.1.1:8080" in proxies
1
+ # tests/test_proxy_providers.py
2
+ import pytest
3
+ import pytest
4
+ import respx
5
+ from httpx import Response
6
+ from crawlo.proxy.providers import StaticProxyProvider, FileProxyProvider, APIProxyProvider
7
+ import tempfile
8
+ import os
9
+
10
+
11
+ @pytest.mark.asyncio
12
+ async def test_static_provider():
13
+ """测试静态代理提供者"""
14
+ provider = StaticProxyProvider(['http://1.1.1.1:8080', 'http://2.2.2.2:8080'])
15
+ proxies = await provider.fetch_proxies()
16
+ assert len(proxies) == 2
17
+ assert 'http://1.1.1.1:8080' in proxies
18
+ assert 'http://2.2.2.2:8080' in proxies
19
+
20
+
21
+ @pytest.mark.asyncio
22
+ async def test_file_provider():
23
+ """测试文件代理提供者"""
24
+ with tempfile.NamedTemporaryFile(mode='w', delete=False) as f:
25
+ f.write("http://a.com:8080\nhttp://b.com:8080\n")
26
+ temp_path = f.name
27
+ try:
28
+ provider = FileProxyProvider(temp_path)
29
+ proxies = await provider.fetch_proxies()
30
+ assert len(proxies) == 2
31
+ assert 'http://a.com:8080' in proxies
32
+ assert 'http://b.com:8080' in proxies
33
+ finally:
34
+ os.unlink(temp_path)
35
+
36
+
37
+ @pytest.mark.asyncio
38
+ @respx.mock
39
+ async def test_api_provider():
40
+ """使用 respx 拦截 HTTP 请求,更简洁可靠"""
41
+ # 拦截 GET 请求
42
+ respx.get("https://api.example.com").mock(
43
+ return_value=Response(
44
+ 200,
45
+ json=[
46
+ {"ip": "1.1.1.1", "port": 8080},
47
+ {"ip": "2.2.2.2", "port": 8080}
48
+ ]
49
+ )
50
+ )
51
+
52
+ provider = APIProxyProvider(url="https://api.example.com")
53
+ proxies = await provider.fetch_proxies()
54
+
55
+ assert len(proxies) == 2
56
+ assert "http://1.1.1.1:8080" in proxies
57
57
  assert "http://2.2.2.2:8080" in proxies
tests/test_proxy_stats.py CHANGED
@@ -1,20 +1,20 @@
1
- # tests/test_proxy_stats.py
2
- from crawlo.proxy.stats import ProxyStats
3
-
4
-
5
- def test_proxy_stats():
6
- """测试代理统计功能"""
7
- stats = ProxyStats()
8
- url = 'http://proxy1:8080'
9
-
10
- stats.record(url, 'success')
11
- stats.record(url, 'success')
12
- stats.record(url, 'failure')
13
-
14
- assert stats.get(url)['success'] == 2
15
- assert stats.get(url)['failure'] == 1
16
- assert stats.get(url)['total'] == 3
17
-
18
- all_data = stats.all()
19
- assert url in all_data
1
+ # tests/test_proxy_stats.py
2
+ from crawlo.proxy.stats import ProxyStats
3
+
4
+
5
+ def test_proxy_stats():
6
+ """测试代理统计功能"""
7
+ stats = ProxyStats()
8
+ url = 'http://proxy1:8080'
9
+
10
+ stats.record(url, 'success')
11
+ stats.record(url, 'success')
12
+ stats.record(url, 'failure')
13
+
14
+ assert stats.get(url)['success'] == 2
15
+ assert stats.get(url)['failure'] == 1
16
+ assert stats.get(url)['total'] == 3
17
+
18
+ all_data = stats.all()
19
+ assert url in all_data
20
20
  assert all_data[url]['success'] == 2
@@ -1,60 +1,60 @@
1
- # tests/test_proxy_strategies.py
2
- import pytest
3
- from crawlo import Request
4
- from crawlo.proxy.strategies import STRATEGIES
5
-
6
-
7
- @pytest.fixture
8
- def mock_proxies():
9
- """提供测试用的代理列表"""
10
- return [
11
- {'url': 'http://p1:8080'},
12
- {'url': 'http://p2:8080'},
13
- {'url': 'http://p3:8080'},
14
- ]
15
-
16
-
17
- @pytest.fixture
18
- def mock_stats():
19
- """提供测试用的统计信息"""
20
- return {
21
- 'http://p1:8080': {'total': 10},
22
- 'http://p2:8080': {'total': 5},
23
- 'http://p3:8080': {'total': 1},
24
- }
25
-
26
-
27
- @pytest.fixture
28
- def mock_request():
29
- """提供测试用的请求对象"""
30
- return Request("https://example.com")
31
-
32
-
33
- def test_random_strategy(mock_proxies, mock_request, mock_stats):
34
- """测试随机策略"""
35
- strategy = STRATEGIES['random']
36
- chosen = strategy(mock_proxies, mock_request, mock_stats)
37
- assert chosen in [p['url'] for p in mock_proxies]
38
-
39
-
40
- def test_least_used_strategy(mock_proxies, mock_request, mock_stats):
41
- """测试最少使用策略"""
42
- strategy = STRATEGIES['least_used']
43
- chosen = strategy(mock_proxies, mock_request, mock_stats)
44
- assert chosen == 'http://p3:8080' # total=1
45
-
46
-
47
- def test_domain_rule_strategy(mock_proxies, mock_request, mock_stats):
48
- """测试域名规则策略"""
49
- from crawlo.proxy.strategies.domain_rule import domain_rule_strategy
50
- request = Request("https://taobao.com/item/123")
51
- rules = {'taobao.com': 'http://special:8080'}
52
-
53
- # Monkey patch 确保有回退策略
54
- old_strategy = STRATEGIES['least_used']
55
- try:
56
- STRATEGIES['least_used'] = lambda p, r, s: 'http://fallback:8080'
57
- chosen = domain_rule_strategy(mock_proxies, request, mock_stats, rules)
58
- assert chosen == 'http://special:8080'
59
- finally:
1
+ # tests/test_proxy_strategies.py
2
+ import pytest
3
+ from crawlo import Request
4
+ from crawlo.proxy.strategies import STRATEGIES
5
+
6
+
7
+ @pytest.fixture
8
+ def mock_proxies():
9
+ """提供测试用的代理列表"""
10
+ return [
11
+ {'url': 'http://p1:8080'},
12
+ {'url': 'http://p2:8080'},
13
+ {'url': 'http://p3:8080'},
14
+ ]
15
+
16
+
17
+ @pytest.fixture
18
+ def mock_stats():
19
+ """提供测试用的统计信息"""
20
+ return {
21
+ 'http://p1:8080': {'total': 10},
22
+ 'http://p2:8080': {'total': 5},
23
+ 'http://p3:8080': {'total': 1},
24
+ }
25
+
26
+
27
+ @pytest.fixture
28
+ def mock_request():
29
+ """提供测试用的请求对象"""
30
+ return Request("https://example.com")
31
+
32
+
33
+ def test_random_strategy(mock_proxies, mock_request, mock_stats):
34
+ """测试随机策略"""
35
+ strategy = STRATEGIES['random']
36
+ chosen = strategy(mock_proxies, mock_request, mock_stats)
37
+ assert chosen in [p['url'] for p in mock_proxies]
38
+
39
+
40
+ def test_least_used_strategy(mock_proxies, mock_request, mock_stats):
41
+ """测试最少使用策略"""
42
+ strategy = STRATEGIES['least_used']
43
+ chosen = strategy(mock_proxies, mock_request, mock_stats)
44
+ assert chosen == 'http://p3:8080' # total=1
45
+
46
+
47
+ def test_domain_rule_strategy(mock_proxies, mock_request, mock_stats):
48
+ """测试域名规则策略"""
49
+ from crawlo.proxy.strategies.domain_rule import domain_rule_strategy
50
+ request = Request("https://taobao.com/item/123")
51
+ rules = {'taobao.com': 'http://special:8080'}
52
+
53
+ # Monkey patch 确保有回退策略
54
+ old_strategy = STRATEGIES['least_used']
55
+ try:
56
+ STRATEGIES['least_used'] = lambda p, r, s: 'http://fallback:8080'
57
+ chosen = domain_rule_strategy(mock_proxies, request, mock_stats, rules)
58
+ assert chosen == 'http://special:8080'
59
+ finally:
60
60
  STRATEGIES['least_used'] = old_strategy
@@ -0,0 +1,231 @@
1
+ #!/usr/bin/env python3
2
+ # -*- coding: utf-8 -*-
3
+ """
4
+ 队列管理器双重 crawlo 前缀问题测试脚本
5
+ 用于验证队列管理器在处理双重 crawlo 前缀时的行为
6
+ """
7
+ import sys
8
+ import os
9
+ import asyncio
10
+ import traceback
11
+
12
+ # 添加项目根目录到路径
13
+ sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
14
+
15
+ # 导入相关模块
16
+ from crawlo.queue.queue_manager import QueueManager, QueueConfig, QueueType
17
+
18
+
19
+ async def test_queue_manager_naming():
20
+ """测试队列管理器中的项目名称提取"""
21
+ print("🚀 开始测试队列管理器项目名称提取...")
22
+ print("=" * 50)
23
+
24
+ test_cases = [
25
+ {
26
+ "name": "正常命名",
27
+ "queue_name": "crawlo:test_project:queue:requests",
28
+ "expected_module": "test_project"
29
+ },
30
+ {
31
+ "name": "双重 crawlo 前缀",
32
+ "queue_name": "crawlo:crawlo:queue:requests",
33
+ "expected_module": "queue" # 第三个部分是项目名称
34
+ },
35
+ {
36
+ "name": "三重 crawlo 前缀",
37
+ "queue_name": "crawlo:crawlo:crawlo:queue:requests",
38
+ "expected_module": "crawlo" # 第三个部分是项目名称
39
+ },
40
+ {
41
+ "name": "无 crawlo 前缀",
42
+ "queue_name": "test_project:queue:requests",
43
+ "expected_module": "test_project"
44
+ }
45
+ ]
46
+
47
+ try:
48
+ for i, test_case in enumerate(test_cases, 1):
49
+ print(f"测试 {i}: {test_case['name']}")
50
+ print(f" 输入队列名称: {test_case['queue_name']}")
51
+
52
+ # 测试队列管理器中的项目名称提取逻辑
53
+ project_name = "default"
54
+ if ':' in test_case['queue_name']:
55
+ parts = test_case['queue_name'].split(':')
56
+ if len(parts) >= 2:
57
+ # 处理可能的双重 crawlo 前缀
58
+ if parts[0] == "crawlo" and parts[1] == "crawlo":
59
+ # 双重 crawlo 前缀,取第三个部分作为项目名称
60
+ if len(parts) >= 3:
61
+ project_name = parts[2]
62
+ else:
63
+ project_name = "default"
64
+ elif parts[0] == "crawlo":
65
+ # 正常的 crawlo 前缀,取第二个部分作为项目名称
66
+ project_name = parts[1]
67
+ else:
68
+ # 没有 crawlo 前缀,使用第一个部分作为项目名称
69
+ project_name = parts[0]
70
+ else:
71
+ project_name = test_case['queue_name'] or "default"
72
+ else:
73
+ project_name = test_case['queue_name'] or "default"
74
+
75
+ print(f" 提取的项目名称: {project_name}")
76
+ print(f" 期望的项目名称: {test_case['expected_module']}")
77
+
78
+ # 验证结果
79
+ assert project_name == test_case['expected_module'], \
80
+ f"项目名称不匹配: {project_name} != {test_case['expected_module']}"
81
+
82
+ print(" ✅ 测试通过")
83
+ print()
84
+
85
+ print("✅ 队列管理器项目名称提取测试通过!")
86
+ return True
87
+
88
+ except Exception as e:
89
+ print(f"❌ 队列管理器项目名称提取测试失败: {e}")
90
+ traceback.print_exc()
91
+ return False
92
+
93
+
94
+ async def test_queue_manager_create_queue():
95
+ """测试队列管理器创建队列"""
96
+ print("🚀 开始测试队列管理器创建队列...")
97
+ print("=" * 50)
98
+
99
+ test_cases = [
100
+ {
101
+ "name": "正常命名",
102
+ "queue_name": "crawlo:test_project:queue:requests",
103
+ "expected_queue": "crawlo:test_project:queue:requests",
104
+ "expected_processing": "crawlo:test_project:queue:processing",
105
+ "expected_failed": "crawlo:test_project:queue:failed"
106
+ },
107
+ {
108
+ "name": "双重 crawlo 前缀",
109
+ "queue_name": "crawlo:crawlo:queue:requests",
110
+ "expected_queue": "crawlo:crawlo:queue:requests",
111
+ "expected_processing": "crawlo:crawlo:queue:processing",
112
+ "expected_failed": "crawlo:crawlo:queue:failed"
113
+ }
114
+ ]
115
+
116
+ try:
117
+ for i, test_case in enumerate(test_cases, 1):
118
+ print(f"测试 {i}: {test_case['name']}")
119
+ print(f" 输入队列名称: {test_case['queue_name']}")
120
+
121
+ try:
122
+ # 创建队列配置
123
+ config = QueueConfig(
124
+ queue_type=QueueType.REDIS,
125
+ redis_url="redis://127.0.0.1:6379/15",
126
+ queue_name=test_case['queue_name'],
127
+ max_queue_size=1000,
128
+ max_retries=3,
129
+ timeout=300
130
+ )
131
+
132
+ # 创建队列管理器
133
+ queue_manager = QueueManager(config)
134
+
135
+ # 创建队列实例(模拟队列管理器的_create_queue方法)
136
+ project_name = "default"
137
+ if ':' in test_case['queue_name']:
138
+ parts = test_case['queue_name'].split(':')
139
+ if len(parts) >= 2:
140
+ # 处理可能的双重 crawlo 前缀
141
+ if parts[0] == "crawlo" and parts[1] == "crawlo":
142
+ # 双重 crawlo 前缀,取第三个部分作为项目名称
143
+ if len(parts) >= 3:
144
+ project_name = parts[2]
145
+ else:
146
+ project_name = "default"
147
+ elif parts[0] == "crawlo":
148
+ # 正常的 crawlo 前缀,取第二个部分作为项目名称
149
+ project_name = parts[1]
150
+ else:
151
+ # 没有 crawlo 前缀,使用第一个部分作为项目名称
152
+ project_name = parts[0]
153
+ else:
154
+ project_name = test_case['queue_name'] or "default"
155
+ else:
156
+ project_name = test_case['queue_name'] or "default"
157
+
158
+ print(f" 提取的项目名称: {project_name}")
159
+
160
+ # 创建 Redis 队列实例
161
+ from crawlo.queue.redis_priority_queue import RedisPriorityQueue
162
+ queue = RedisPriorityQueue(
163
+ redis_url=config.redis_url,
164
+ queue_name=config.queue_name,
165
+ max_retries=config.max_retries,
166
+ timeout=config.timeout,
167
+ module_name=project_name # 传递项目名称作为module_name
168
+ )
169
+
170
+ print(f" 创建的队列名称: {queue.queue_name}")
171
+ print(f" 创建的处理队列: {queue.processing_queue}")
172
+ print(f" 创建的失败队列: {queue.failed_queue}")
173
+
174
+ # 验证结果
175
+ assert queue.queue_name == test_case['expected_queue'], \
176
+ f"队列名称不匹配: {queue.queue_name} != {test_case['expected_queue']}"
177
+ assert queue.processing_queue == test_case['expected_processing'], \
178
+ f"处理队列名称不匹配: {queue.processing_queue} != {test_case['expected_processing']}"
179
+ assert queue.failed_queue == test_case['expected_failed'], \
180
+ f"失败队列名称不匹配: {queue.failed_queue} != {test_case['expected_failed']}"
181
+
182
+ print(" ✅ 测试通过")
183
+ except Exception as e:
184
+ print(f" ❌ 测试失败: {e}")
185
+ traceback.print_exc()
186
+ return False
187
+
188
+ print()
189
+
190
+ print("✅ 队列管理器创建队列测试通过!")
191
+ return True
192
+
193
+ except Exception as e:
194
+ print(f"❌ 队列管理器创建队列测试失败: {e}")
195
+ traceback.print_exc()
196
+ return False
197
+
198
+
199
+ async def main():
200
+ """主测试函数"""
201
+ print("🚀 开始队列管理器双重 crawlo 前缀问题测试...")
202
+ print("=" * 50)
203
+
204
+ try:
205
+ # 测试队列管理器项目名称提取
206
+ naming_test_success = await test_queue_manager_naming()
207
+ print()
208
+
209
+ # 测试队列管理器创建队列
210
+ create_test_success = await test_queue_manager_create_queue()
211
+ print()
212
+
213
+ print("=" * 50)
214
+ if naming_test_success and create_test_success:
215
+ print("🎉 队列管理器双重 crawlo 前缀问题测试通过!")
216
+ else:
217
+ print("❌ 部分测试失败,请检查实现")
218
+ return 1
219
+
220
+ except Exception as e:
221
+ print("=" * 50)
222
+ print(f"❌ 测试过程中发生异常: {e}")
223
+ traceback.print_exc()
224
+ return 1
225
+
226
+ return 0
227
+
228
+
229
+ if __name__ == "__main__":
230
+ exit_code = asyncio.run(main())
231
+ sys.exit(exit_code)
@@ -0,0 +1,177 @@
1
+ #!/usr/bin/env python3
2
+ # -*- coding: utf-8 -*-
3
+ """
4
+ QueueManager Redis Key 测试脚本
5
+ 用于验证QueueManager在创建RedisPriorityQueue时是否正确传递module_name参数
6
+ """
7
+ import asyncio
8
+ import sys
9
+ import os
10
+ import traceback
11
+
12
+ # 添加项目根目录到路径
13
+ sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
14
+
15
+ from crawlo.queue.queue_manager import QueueManager, QueueConfig, QueueType
16
+
17
+
18
+ class MockSettings:
19
+ """模拟设置类"""
20
+ def __init__(self, project_name="test_project"):
21
+ self.project_name = project_name
22
+ self.REDIS_URL = "redis://127.0.0.1:6379/15" # 使用测试数据库
23
+ self.REDIS_TTL = 0
24
+ self.CLEANUP_FP = 0
25
+ self.FILTER_DEBUG = True
26
+ self.LOG_LEVEL = "INFO"
27
+ self.DECODE_RESPONSES = True
28
+
29
+ def get(self, key, default=None):
30
+ if key == 'PROJECT_NAME':
31
+ return self.project_name
32
+ elif key == 'REDIS_URL':
33
+ return self.REDIS_URL
34
+ elif key == 'FILTER_DEBUG':
35
+ return self.FILTER_DEBUG
36
+ elif key == 'LOG_LEVEL':
37
+ return self.LOG_LEVEL
38
+ elif key == 'DECODE_RESPONSES':
39
+ return self.DECODE_RESPONSES
40
+ return default
41
+
42
+ def get_bool(self, key, default=False):
43
+ if key == 'FILTER_DEBUG':
44
+ return self.FILTER_DEBUG
45
+ elif key == 'DECODE_RESPONSES':
46
+ return self.DECODE_RESPONSES
47
+ elif key == 'CLEANUP_FP':
48
+ return self.CLEANUP_FP
49
+ return default
50
+
51
+ def get_int(self, key, default=0):
52
+ if key == 'REDIS_TTL':
53
+ return self.REDIS_TTL
54
+ elif key == 'REDIS_PORT':
55
+ return 6379
56
+ elif key == 'REDIS_DB':
57
+ return 0
58
+ elif key == 'SCHEDULER_MAX_QUEUE_SIZE':
59
+ return 1000
60
+ elif key == 'QUEUE_MAX_RETRIES':
61
+ return 3
62
+ elif key == 'QUEUE_TIMEOUT':
63
+ return 300
64
+ return default
65
+
66
+
67
+ async def test_queue_manager_redis_key():
68
+ """测试QueueManager创建Redis队列时的key命名"""
69
+ print("🔍 测试QueueManager创建Redis队列时的key命名...")
70
+
71
+ try:
72
+ # 测试不同的队列名称配置
73
+ test_cases = [
74
+ {
75
+ "queue_name": "crawlo:books_distributed:queue:requests",
76
+ "expected_module_name": "books_distributed",
77
+ "description": "标准项目名称"
78
+ },
79
+ {
80
+ "queue_name": "crawlo:api_data_collection:queue:requests",
81
+ "expected_module_name": "api_data_collection",
82
+ "description": "API数据采集项目"
83
+ },
84
+ {
85
+ "queue_name": "crawlo:test_project:queue:requests",
86
+ "expected_module_name": "test_project",
87
+ "description": "测试项目"
88
+ },
89
+ {
90
+ "queue_name": "simple_queue_name",
91
+ "expected_module_name": "simple_queue_name",
92
+ "description": "简单队列名称"
93
+ },
94
+ {
95
+ "queue_name": "",
96
+ "expected_module_name": "default",
97
+ "description": "空队列名称"
98
+ }
99
+ ]
100
+
101
+ for i, test_case in enumerate(test_cases, 1):
102
+ print(f" {i}. 测试 {test_case['description']}...")
103
+
104
+ # 创建QueueConfig
105
+ config = QueueConfig(
106
+ queue_type=QueueType.REDIS,
107
+ redis_url="redis://127.0.0.1:6379/15",
108
+ queue_name=test_case["queue_name"],
109
+ max_queue_size=1000,
110
+ max_retries=3,
111
+ timeout=300
112
+ )
113
+
114
+ # 创建QueueManager
115
+ queue_manager = QueueManager(config)
116
+
117
+ # 创建队列实例
118
+ queue = await queue_manager._create_queue(QueueType.REDIS)
119
+
120
+ # 验证module_name是否正确设置
121
+ assert hasattr(queue, 'module_name'), "RedisPriorityQueue缺少module_name属性"
122
+ assert queue.module_name == test_case["expected_module_name"], \
123
+ f"module_name不匹配: {queue.module_name} != {test_case['expected_module_name']}"
124
+
125
+ # 验证队列名称是否符合规范
126
+ expected_queue_name = f"crawlo:{queue.module_name}:queue:requests"
127
+ expected_processing_queue = f"crawlo:{queue.module_name}:queue:processing"
128
+ expected_failed_queue = f"crawlo:{queue.module_name}:queue:failed"
129
+
130
+ assert queue.queue_name == expected_queue_name, \
131
+ f"队列名称不匹配: {queue.queue_name} != {expected_queue_name}"
132
+ assert queue.processing_queue == expected_processing_queue, \
133
+ f"处理中队列名称不匹配: {queue.processing_queue} != {expected_processing_queue}"
134
+ assert queue.failed_queue == expected_failed_queue, \
135
+ f"失败队列名称不匹配: {queue.failed_queue} != {expected_failed_queue}"
136
+
137
+ print(f" ✅ module_name: {queue.module_name}")
138
+ print(f" ✅ 队列名称: {queue.queue_name}")
139
+ print(f" ✅ 处理中队列名称: {queue.processing_queue}")
140
+ print(f" ✅ 失败队列名称: {queue.failed_queue}")
141
+
142
+ print("✅ QueueManager Redis key命名测试通过!")
143
+ return True
144
+
145
+ except Exception as e:
146
+ print(f"❌ QueueManager Redis key命名测试失败: {e}")
147
+ traceback.print_exc()
148
+ return False
149
+
150
+
151
+ async def main():
152
+ """主测试函数"""
153
+ print("🚀 开始QueueManager Redis key命名测试...")
154
+ print("=" * 50)
155
+
156
+ try:
157
+ success = await test_queue_manager_redis_key()
158
+
159
+ print("=" * 50)
160
+ if success:
161
+ print("🎉 所有测试通过!QueueManager正确传递module_name参数")
162
+ else:
163
+ print("❌ 测试失败,请检查实现")
164
+ return 1
165
+
166
+ except Exception as e:
167
+ print("=" * 50)
168
+ print(f"❌ 测试过程中发生异常: {e}")
169
+ traceback.print_exc()
170
+ return 1
171
+
172
+ return 0
173
+
174
+
175
+ if __name__ == "__main__":
176
+ exit_code = asyncio.run(main())
177
+ sys.exit(exit_code)