crawlo 1.2.5__py3-none-any.whl → 1.2.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +61 -61
- crawlo/__version__.py +1 -1
- crawlo/cleaners/__init__.py +60 -60
- crawlo/cleaners/data_formatter.py +225 -225
- crawlo/cleaners/encoding_converter.py +125 -125
- crawlo/cleaners/text_cleaner.py +232 -232
- crawlo/cli.py +75 -88
- crawlo/commands/__init__.py +14 -14
- crawlo/commands/check.py +594 -594
- crawlo/commands/genspider.py +151 -151
- crawlo/commands/help.py +138 -144
- crawlo/commands/list.py +155 -155
- crawlo/commands/run.py +323 -323
- crawlo/commands/startproject.py +436 -436
- crawlo/commands/stats.py +187 -187
- crawlo/commands/utils.py +186 -186
- crawlo/config.py +312 -312
- crawlo/config_validator.py +251 -251
- crawlo/core/__init__.py +2 -2
- crawlo/core/engine.py +365 -354
- crawlo/core/processor.py +40 -40
- crawlo/core/scheduler.py +251 -143
- crawlo/crawler.py +1099 -1110
- crawlo/data/__init__.py +5 -5
- crawlo/data/user_agents.py +107 -107
- crawlo/downloader/__init__.py +266 -266
- crawlo/downloader/aiohttp_downloader.py +228 -221
- crawlo/downloader/cffi_downloader.py +256 -256
- crawlo/downloader/httpx_downloader.py +259 -259
- crawlo/downloader/hybrid_downloader.py +212 -212
- crawlo/downloader/playwright_downloader.py +402 -402
- crawlo/downloader/selenium_downloader.py +472 -472
- crawlo/event.py +11 -11
- crawlo/exceptions.py +81 -81
- crawlo/extension/__init__.py +39 -38
- crawlo/extension/health_check.py +141 -141
- crawlo/extension/log_interval.py +57 -57
- crawlo/extension/log_stats.py +81 -81
- crawlo/extension/logging_extension.py +43 -43
- crawlo/extension/memory_monitor.py +104 -104
- crawlo/extension/performance_profiler.py +133 -133
- crawlo/extension/request_recorder.py +107 -107
- crawlo/filters/__init__.py +154 -154
- crawlo/filters/aioredis_filter.py +234 -281
- crawlo/filters/memory_filter.py +269 -269
- crawlo/items/__init__.py +23 -23
- crawlo/items/base.py +21 -21
- crawlo/items/fields.py +52 -52
- crawlo/items/items.py +104 -104
- crawlo/middleware/__init__.py +21 -21
- crawlo/middleware/default_header.py +131 -131
- crawlo/middleware/download_delay.py +104 -104
- crawlo/middleware/middleware_manager.py +136 -135
- crawlo/middleware/offsite.py +114 -114
- crawlo/middleware/proxy.py +367 -367
- crawlo/middleware/request_ignore.py +86 -86
- crawlo/middleware/response_code.py +163 -163
- crawlo/middleware/response_filter.py +136 -136
- crawlo/middleware/retry.py +124 -124
- crawlo/mode_manager.py +211 -211
- crawlo/network/__init__.py +21 -21
- crawlo/network/request.py +338 -338
- crawlo/network/response.py +359 -359
- crawlo/pipelines/__init__.py +21 -21
- crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
- crawlo/pipelines/console_pipeline.py +39 -39
- crawlo/pipelines/csv_pipeline.py +316 -316
- crawlo/pipelines/database_dedup_pipeline.py +222 -222
- crawlo/pipelines/json_pipeline.py +218 -218
- crawlo/pipelines/memory_dedup_pipeline.py +115 -115
- crawlo/pipelines/mongo_pipeline.py +131 -131
- crawlo/pipelines/mysql_pipeline.py +317 -317
- crawlo/pipelines/pipeline_manager.py +62 -61
- crawlo/pipelines/redis_dedup_pipeline.py +166 -165
- crawlo/project.py +314 -279
- crawlo/queue/pqueue.py +37 -37
- crawlo/queue/queue_manager.py +377 -337
- crawlo/queue/redis_priority_queue.py +306 -299
- crawlo/settings/__init__.py +7 -7
- crawlo/settings/default_settings.py +219 -217
- crawlo/settings/setting_manager.py +122 -122
- crawlo/spider/__init__.py +639 -639
- crawlo/stats_collector.py +59 -59
- crawlo/subscriber.py +129 -129
- crawlo/task_manager.py +30 -30
- crawlo/templates/crawlo.cfg.tmpl +10 -10
- crawlo/templates/project/__init__.py.tmpl +3 -3
- crawlo/templates/project/items.py.tmpl +17 -17
- crawlo/templates/project/middlewares.py.tmpl +118 -118
- crawlo/templates/project/pipelines.py.tmpl +96 -96
- crawlo/templates/project/settings.py.tmpl +288 -324
- crawlo/templates/project/settings_distributed.py.tmpl +157 -154
- crawlo/templates/project/settings_gentle.py.tmpl +101 -128
- crawlo/templates/project/settings_high_performance.py.tmpl +135 -150
- crawlo/templates/project/settings_simple.py.tmpl +99 -103
- crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
- crawlo/templates/run.py.tmpl +45 -47
- crawlo/templates/spider/spider.py.tmpl +143 -143
- crawlo/tools/__init__.py +182 -182
- crawlo/tools/anti_crawler.py +268 -268
- crawlo/tools/authenticated_proxy.py +240 -240
- crawlo/tools/data_validator.py +180 -180
- crawlo/tools/date_tools.py +35 -35
- crawlo/tools/distributed_coordinator.py +386 -386
- crawlo/tools/retry_mechanism.py +220 -220
- crawlo/tools/scenario_adapter.py +262 -262
- crawlo/utils/__init__.py +35 -35
- crawlo/utils/batch_processor.py +259 -259
- crawlo/utils/controlled_spider_mixin.py +439 -439
- crawlo/utils/date_tools.py +290 -290
- crawlo/utils/db_helper.py +343 -343
- crawlo/utils/enhanced_error_handler.py +356 -356
- crawlo/utils/env_config.py +143 -106
- crawlo/utils/error_handler.py +123 -123
- crawlo/utils/func_tools.py +82 -82
- crawlo/utils/large_scale_config.py +286 -286
- crawlo/utils/large_scale_helper.py +344 -344
- crawlo/utils/log.py +128 -128
- crawlo/utils/performance_monitor.py +285 -285
- crawlo/utils/queue_helper.py +175 -175
- crawlo/utils/redis_connection_pool.py +351 -334
- crawlo/utils/redis_key_validator.py +198 -198
- crawlo/utils/request.py +267 -267
- crawlo/utils/request_serializer.py +218 -218
- crawlo/utils/spider_loader.py +61 -61
- crawlo/utils/system.py +11 -11
- crawlo/utils/tools.py +4 -4
- crawlo/utils/url.py +39 -39
- {crawlo-1.2.5.dist-info → crawlo-1.2.7.dist-info}/METADATA +764 -764
- crawlo-1.2.7.dist-info/RECORD +209 -0
- examples/__init__.py +7 -7
- tests/DOUBLE_CRAWLO_PREFIX_FIX_REPORT.md +81 -81
- tests/__init__.py +7 -7
- tests/advanced_tools_example.py +275 -275
- tests/authenticated_proxy_example.py +236 -236
- tests/cleaners_example.py +160 -160
- tests/config_validation_demo.py +102 -102
- tests/controlled_spider_example.py +205 -205
- tests/date_tools_example.py +180 -180
- tests/dynamic_loading_example.py +523 -523
- tests/dynamic_loading_test.py +104 -104
- tests/env_config_example.py +133 -133
- tests/error_handling_example.py +171 -171
- tests/redis_key_validation_demo.py +130 -130
- tests/response_improvements_example.py +144 -144
- tests/test_advanced_tools.py +148 -148
- tests/test_all_redis_key_configs.py +145 -145
- tests/test_authenticated_proxy.py +141 -141
- tests/test_cleaners.py +54 -54
- tests/test_comprehensive.py +146 -146
- tests/test_config_consistency.py +81 -0
- tests/test_config_validator.py +193 -193
- tests/test_crawlo_proxy_integration.py +172 -172
- tests/test_date_tools.py +123 -123
- tests/test_default_header_middleware.py +158 -158
- tests/test_double_crawlo_fix.py +207 -207
- tests/test_double_crawlo_fix_simple.py +124 -124
- tests/test_download_delay_middleware.py +221 -221
- tests/test_downloader_proxy_compatibility.py +268 -268
- tests/test_dynamic_downloaders_proxy.py +124 -124
- tests/test_dynamic_proxy.py +92 -92
- tests/test_dynamic_proxy_config.py +146 -146
- tests/test_dynamic_proxy_real.py +109 -109
- tests/test_edge_cases.py +303 -303
- tests/test_enhanced_error_handler.py +270 -270
- tests/test_env_config.py +121 -121
- tests/test_error_handler_compatibility.py +112 -112
- tests/test_final_validation.py +153 -153
- tests/test_framework_env_usage.py +103 -103
- tests/test_integration.py +356 -356
- tests/test_item_dedup_redis_key.py +122 -122
- tests/test_mode_consistency.py +52 -0
- tests/test_offsite_middleware.py +221 -221
- tests/test_parsel.py +29 -29
- tests/test_performance.py +327 -327
- tests/test_proxy_api.py +264 -264
- tests/test_proxy_health_check.py +32 -32
- tests/test_proxy_middleware.py +121 -121
- tests/test_proxy_middleware_enhanced.py +216 -216
- tests/test_proxy_middleware_integration.py +136 -136
- tests/test_proxy_providers.py +56 -56
- tests/test_proxy_stats.py +19 -19
- tests/test_proxy_strategies.py +59 -59
- tests/test_queue_manager_double_crawlo.py +173 -173
- tests/test_queue_manager_redis_key.py +176 -176
- tests/test_real_scenario_proxy.py +195 -195
- tests/test_redis_config.py +28 -28
- tests/test_redis_connection_pool.py +294 -294
- tests/test_redis_key_naming.py +181 -181
- tests/test_redis_key_validator.py +123 -123
- tests/test_redis_queue.py +224 -224
- tests/test_request_ignore_middleware.py +182 -182
- tests/test_request_serialization.py +70 -70
- tests/test_response_code_middleware.py +349 -349
- tests/test_response_filter_middleware.py +427 -427
- tests/test_response_improvements.py +152 -152
- tests/test_retry_middleware.py +241 -241
- tests/test_scheduler.py +252 -241
- tests/test_scheduler_config_update.py +134 -0
- tests/test_simple_response.py +61 -61
- tests/test_telecom_spider_redis_key.py +205 -205
- tests/test_template_content.py +87 -87
- tests/test_template_redis_key.py +134 -134
- tests/test_tools.py +153 -153
- tests/tools_example.py +257 -257
- crawlo-1.2.5.dist-info/RECORD +0 -206
- {crawlo-1.2.5.dist-info → crawlo-1.2.7.dist-info}/WHEEL +0 -0
- {crawlo-1.2.5.dist-info → crawlo-1.2.7.dist-info}/entry_points.txt +0 -0
- {crawlo-1.2.5.dist-info → crawlo-1.2.7.dist-info}/top_level.txt +0 -0
tests/test_simple_response.py
CHANGED
|
@@ -1,62 +1,62 @@
|
|
|
1
|
-
#!/usr/bin/python
|
|
2
|
-
# -*- coding:UTF-8 -*-
|
|
3
|
-
"""
|
|
4
|
-
Response 简单功能测试
|
|
5
|
-
"""
|
|
6
|
-
import sys
|
|
7
|
-
import os
|
|
8
|
-
|
|
9
|
-
# 添加项目根目录到路径
|
|
10
|
-
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
|
11
|
-
|
|
12
|
-
from crawlo.network.response import Response
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
def test_basic_functionality():
|
|
16
|
-
"""测试基本功能"""
|
|
17
|
-
print("测试基本功能...")
|
|
18
|
-
|
|
19
|
-
# 创建一个简单的HTML响应
|
|
20
|
-
html_content = """
|
|
21
|
-
<html>
|
|
22
|
-
<head>
|
|
23
|
-
<title>测试页面</title>
|
|
24
|
-
</head>
|
|
25
|
-
<body>
|
|
26
|
-
<div class="content">
|
|
27
|
-
<h1>主标题</h1>
|
|
28
|
-
<p class="intro">这是介绍段落</p>
|
|
29
|
-
</div>
|
|
30
|
-
</body>
|
|
31
|
-
</html>
|
|
32
|
-
"""
|
|
33
|
-
|
|
34
|
-
response = Response(
|
|
35
|
-
url="https://example.com/test",
|
|
36
|
-
body=html_content.encode('utf-8'),
|
|
37
|
-
headers={"content-type": "text/html; charset=utf-8"}
|
|
38
|
-
)
|
|
39
|
-
|
|
40
|
-
# 测试基本属性
|
|
41
|
-
print(f"URL: {response.url}")
|
|
42
|
-
print(f"状态码: {response.status_code}")
|
|
43
|
-
|
|
44
|
-
# 测试文本提取(使用新方法)
|
|
45
|
-
title = response.extract_text('title')
|
|
46
|
-
print(f"标题: {title}")
|
|
47
|
-
|
|
48
|
-
h1_text = response.extract_text('.content h1')
|
|
49
|
-
print(f"H1文本: {h1_text}")
|
|
50
|
-
|
|
51
|
-
intro_text = response.extract_text('.intro')
|
|
52
|
-
print(f"介绍文本: {intro_text}")
|
|
53
|
-
|
|
54
|
-
# 测试XPath(使用新方法)
|
|
55
|
-
title_xpath = response.extract_text('//title')
|
|
56
|
-
print(f"XPath标题: {title_xpath}")
|
|
57
|
-
|
|
58
|
-
print("基本功能测试完成")
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
if __name__ == '__main__':
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding:UTF-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
Response 简单功能测试
|
|
5
|
+
"""
|
|
6
|
+
import sys
|
|
7
|
+
import os
|
|
8
|
+
|
|
9
|
+
# 添加项目根目录到路径
|
|
10
|
+
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
|
11
|
+
|
|
12
|
+
from crawlo.network.response import Response
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def test_basic_functionality():
|
|
16
|
+
"""测试基本功能"""
|
|
17
|
+
print("测试基本功能...")
|
|
18
|
+
|
|
19
|
+
# 创建一个简单的HTML响应
|
|
20
|
+
html_content = """
|
|
21
|
+
<html>
|
|
22
|
+
<head>
|
|
23
|
+
<title>测试页面</title>
|
|
24
|
+
</head>
|
|
25
|
+
<body>
|
|
26
|
+
<div class="content">
|
|
27
|
+
<h1>主标题</h1>
|
|
28
|
+
<p class="intro">这是介绍段落</p>
|
|
29
|
+
</div>
|
|
30
|
+
</body>
|
|
31
|
+
</html>
|
|
32
|
+
"""
|
|
33
|
+
|
|
34
|
+
response = Response(
|
|
35
|
+
url="https://example.com/test",
|
|
36
|
+
body=html_content.encode('utf-8'),
|
|
37
|
+
headers={"content-type": "text/html; charset=utf-8"}
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
# 测试基本属性
|
|
41
|
+
print(f"URL: {response.url}")
|
|
42
|
+
print(f"状态码: {response.status_code}")
|
|
43
|
+
|
|
44
|
+
# 测试文本提取(使用新方法)
|
|
45
|
+
title = response.extract_text('title')
|
|
46
|
+
print(f"标题: {title}")
|
|
47
|
+
|
|
48
|
+
h1_text = response.extract_text('.content h1')
|
|
49
|
+
print(f"H1文本: {h1_text}")
|
|
50
|
+
|
|
51
|
+
intro_text = response.extract_text('.intro')
|
|
52
|
+
print(f"介绍文本: {intro_text}")
|
|
53
|
+
|
|
54
|
+
# 测试XPath(使用新方法)
|
|
55
|
+
title_xpath = response.extract_text('//title')
|
|
56
|
+
print(f"XPath标题: {title_xpath}")
|
|
57
|
+
|
|
58
|
+
print("基本功能测试完成")
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
if __name__ == '__main__':
|
|
62
62
|
test_basic_functionality()
|
|
@@ -1,206 +1,206 @@
|
|
|
1
|
-
#!/usr/bin/env python3
|
|
2
|
-
# -*- coding: utf-8 -*-
|
|
3
|
-
"""
|
|
4
|
-
电信设备许可证爬虫Redis Key测试脚本
|
|
5
|
-
用于验证分布式爬虫是否符合新的Redis key命名规范
|
|
6
|
-
"""
|
|
7
|
-
import sys
|
|
8
|
-
import os
|
|
9
|
-
import asyncio
|
|
10
|
-
import tempfile
|
|
11
|
-
import shutil
|
|
12
|
-
from pathlib import Path
|
|
13
|
-
|
|
14
|
-
# 添加项目根目录到路径
|
|
15
|
-
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
|
16
|
-
|
|
17
|
-
# 导入相关模块
|
|
18
|
-
from crawlo.queue.queue_manager import QueueManager, QueueConfig, QueueType
|
|
19
|
-
from crawlo.filters.aioredis_filter import AioRedisFilter
|
|
20
|
-
from crawlo.pipelines.redis_dedup_pipeline import RedisDedupPipeline
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
class MockSettings:
|
|
24
|
-
"""模拟设置类"""
|
|
25
|
-
def __init__(self, project_name="telecom_licenses_distributed"):
|
|
26
|
-
self.project_name = project_name
|
|
27
|
-
self.REDIS_HOST = '127.0.0.1'
|
|
28
|
-
self.REDIS_PORT = 6379
|
|
29
|
-
self.REDIS_PASSWORD = ''
|
|
30
|
-
self.REDIS_DB = 2
|
|
31
|
-
self.REDIS_URL = f'redis://127.0.0.1:6379/{self.REDIS_DB}'
|
|
32
|
-
self.REDIS_TTL = 0
|
|
33
|
-
self.CLEANUP_FP = 0
|
|
34
|
-
self.FILTER_DEBUG = True
|
|
35
|
-
self.LOG_LEVEL = "INFO"
|
|
36
|
-
self.DECODE_RESPONSES = True
|
|
37
|
-
self.SCHEDULER_QUEUE_NAME = f'crawlo:{project_name}:queue:requests'
|
|
38
|
-
|
|
39
|
-
def get(self, key, default=None):
|
|
40
|
-
if key == 'PROJECT_NAME':
|
|
41
|
-
return self.project_name
|
|
42
|
-
elif key == 'REDIS_HOST':
|
|
43
|
-
return self.REDIS_HOST
|
|
44
|
-
elif key == 'REDIS_PASSWORD':
|
|
45
|
-
return self.REDIS_PASSWORD
|
|
46
|
-
elif key == 'REDIS_URL':
|
|
47
|
-
return self.REDIS_URL
|
|
48
|
-
elif key == 'FILTER_DEBUG':
|
|
49
|
-
return self.FILTER_DEBUG
|
|
50
|
-
elif key == 'LOG_LEVEL':
|
|
51
|
-
return self.LOG_LEVEL
|
|
52
|
-
elif key == 'DECODE_RESPONSES':
|
|
53
|
-
return self.DECODE_RESPONSES
|
|
54
|
-
elif key == 'SCHEDULER_QUEUE_NAME':
|
|
55
|
-
return self.SCHEDULER_QUEUE_NAME
|
|
56
|
-
return default
|
|
57
|
-
|
|
58
|
-
def get_bool(self, key, default=False):
|
|
59
|
-
if key == 'FILTER_DEBUG':
|
|
60
|
-
return self.FILTER_DEBUG
|
|
61
|
-
elif key == 'DECODE_RESPONSES':
|
|
62
|
-
return self.DECODE_RESPONSES
|
|
63
|
-
elif key == 'CLEANUP_FP':
|
|
64
|
-
return self.CLEANUP_FP
|
|
65
|
-
return default
|
|
66
|
-
|
|
67
|
-
def get_int(self, key, default=0): # 修复方法名
|
|
68
|
-
if key == 'REDIS_TTL':
|
|
69
|
-
return self.REDIS_TTL
|
|
70
|
-
elif key == 'REDIS_PORT':
|
|
71
|
-
return self.REDIS_PORT
|
|
72
|
-
elif key == 'REDIS_DB':
|
|
73
|
-
return self.REDIS_DB
|
|
74
|
-
elif key == 'SCHEDULER_MAX_QUEUE_SIZE':
|
|
75
|
-
return 1000
|
|
76
|
-
elif key == 'QUEUE_MAX_RETRIES':
|
|
77
|
-
return 3
|
|
78
|
-
elif key == 'QUEUE_TIMEOUT':
|
|
79
|
-
return 300
|
|
80
|
-
return default
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
class MockCrawler:
|
|
84
|
-
"""模拟爬虫类"""
|
|
85
|
-
def __init__(self, project_name="telecom_licenses_distributed"):
|
|
86
|
-
self.settings = MockSettings(project_name)
|
|
87
|
-
self.stats = {}
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
async def test_telecom_spider_redis_key():
|
|
91
|
-
"""测试电信设备许可证爬虫Redis key命名规范"""
|
|
92
|
-
print("🔍 测试电信设备许可证爬虫Redis key命名规范...")
|
|
93
|
-
|
|
94
|
-
project_name = "telecom_licenses_distributed"
|
|
95
|
-
expected_prefix = f"crawlo:{project_name}"
|
|
96
|
-
|
|
97
|
-
try:
|
|
98
|
-
# 1. 测试QueueManager和RedisPriorityQueue
|
|
99
|
-
print(" 1. 测试队列管理器...")
|
|
100
|
-
queue_config = QueueConfig(
|
|
101
|
-
queue_type=QueueType.REDIS,
|
|
102
|
-
redis_url="redis://127.0.0.1:6379/2",
|
|
103
|
-
queue_name=f"crawlo:{project_name}:queue:requests", # 使用统一命名规范
|
|
104
|
-
max_queue_size=1000,
|
|
105
|
-
max_retries=3,
|
|
106
|
-
timeout=300
|
|
107
|
-
)
|
|
108
|
-
|
|
109
|
-
queue_manager = QueueManager(queue_config)
|
|
110
|
-
queue = await queue_manager._create_queue(QueueType.REDIS)
|
|
111
|
-
|
|
112
|
-
# 验证队列名称是否符合规范
|
|
113
|
-
expected_queue_name = f"{expected_prefix}:queue:requests"
|
|
114
|
-
expected_processing_queue = f"{expected_prefix}:queue:processing"
|
|
115
|
-
expected_failed_queue = f"{expected_prefix}:queue:failed"
|
|
116
|
-
|
|
117
|
-
assert queue.queue_name == expected_queue_name, f"队列名称不匹配: {queue.queue_name} != {expected_queue_name}"
|
|
118
|
-
assert queue.processing_queue == expected_processing_queue, f"处理中队列名称不匹配: {queue.processing_queue} != {expected_processing_queue}"
|
|
119
|
-
assert queue.failed_queue == expected_failed_queue, f"失败队列名称不匹配: {queue.failed_queue} != {expected_failed_queue}"
|
|
120
|
-
|
|
121
|
-
print(f" ✅ 请求队列: {queue.queue_name}")
|
|
122
|
-
print(f" ✅ 处理中队列: {queue.processing_queue}")
|
|
123
|
-
print(f" ✅ 失败队列: {queue.failed_queue}")
|
|
124
|
-
|
|
125
|
-
# 2. 测试AioRedisFilter
|
|
126
|
-
print(" 2. 测试请求去重过滤器...")
|
|
127
|
-
mock_crawler = MockCrawler(project_name)
|
|
128
|
-
filter_instance = AioRedisFilter.create_instance(mock_crawler)
|
|
129
|
-
|
|
130
|
-
expected_filter_key = f"{expected_prefix}:filter:fingerprint"
|
|
131
|
-
assert filter_instance.redis_key == expected_filter_key, f"过滤器key不匹配: {filter_instance.redis_key} != {expected_filter_key}"
|
|
132
|
-
|
|
133
|
-
print(f" ✅ 请求去重key: {filter_instance.redis_key}")
|
|
134
|
-
|
|
135
|
-
# 3. 测试RedisDedupPipeline
|
|
136
|
-
print(" 3. 测试数据项去重管道...")
|
|
137
|
-
dedup_pipeline = RedisDedupPipeline.from_crawler(mock_crawler)
|
|
138
|
-
|
|
139
|
-
expected_item_key = f"{expected_prefix}:item:fingerprint"
|
|
140
|
-
assert dedup_pipeline.redis_key == expected_item_key, f"数据项去重key不匹配: {dedup_pipeline.redis_key} != {expected_item_key}"
|
|
141
|
-
|
|
142
|
-
print(f" ✅ 数据项去重key: {dedup_pipeline.redis_key}")
|
|
143
|
-
|
|
144
|
-
# 4. 验证所有key都使用统一前缀
|
|
145
|
-
print(" 4. 验证统一前缀...")
|
|
146
|
-
all_keys = [
|
|
147
|
-
queue.queue_name,
|
|
148
|
-
queue.processing_queue,
|
|
149
|
-
queue.failed_queue,
|
|
150
|
-
filter_instance.redis_key,
|
|
151
|
-
dedup_pipeline.redis_key
|
|
152
|
-
]
|
|
153
|
-
|
|
154
|
-
for key in all_keys:
|
|
155
|
-
assert key.startswith(expected_prefix), f"Key未使用统一前缀: {key}"
|
|
156
|
-
print(f" ✅ {key}")
|
|
157
|
-
|
|
158
|
-
print("✅ 电信设备许可证爬虫Redis key命名规范测试通过!")
|
|
159
|
-
return True
|
|
160
|
-
|
|
161
|
-
except Exception as e:
|
|
162
|
-
print(f"❌ 测试失败: {e}")
|
|
163
|
-
import traceback
|
|
164
|
-
traceback.print_exc()
|
|
165
|
-
return False
|
|
166
|
-
finally:
|
|
167
|
-
# 清理资源
|
|
168
|
-
try:
|
|
169
|
-
if 'queue' in locals():
|
|
170
|
-
await queue.close()
|
|
171
|
-
if 'filter_instance' in locals() and hasattr(filter_instance, 'redis'):
|
|
172
|
-
await filter_instance.redis.close()
|
|
173
|
-
if 'dedup_pipeline' in locals() and hasattr(dedup_pipeline, 'redis_client'):
|
|
174
|
-
dedup_pipeline.redis_client.close()
|
|
175
|
-
except:
|
|
176
|
-
pass
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
async def main():
|
|
180
|
-
"""主测试函数"""
|
|
181
|
-
print("🚀 开始电信设备许可证爬虫Redis key命名规范测试...")
|
|
182
|
-
print("=" * 60)
|
|
183
|
-
|
|
184
|
-
try:
|
|
185
|
-
success = await test_telecom_spider_redis_key()
|
|
186
|
-
|
|
187
|
-
print("=" * 60)
|
|
188
|
-
if success:
|
|
189
|
-
print("🎉 所有测试通过!电信设备许可证爬虫符合新的Redis key命名规范")
|
|
190
|
-
else:
|
|
191
|
-
print("❌ 测试失败,请检查实现")
|
|
192
|
-
return 1
|
|
193
|
-
|
|
194
|
-
except Exception as e:
|
|
195
|
-
print("=" * 60)
|
|
196
|
-
print(f"❌ 测试过程中发生异常: {e}")
|
|
197
|
-
import traceback
|
|
198
|
-
traceback.print_exc()
|
|
199
|
-
return 1
|
|
200
|
-
|
|
201
|
-
return 0
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
if __name__ == "__main__":
|
|
205
|
-
exit_code = asyncio.run(main())
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
电信设备许可证爬虫Redis Key测试脚本
|
|
5
|
+
用于验证分布式爬虫是否符合新的Redis key命名规范
|
|
6
|
+
"""
|
|
7
|
+
import sys
|
|
8
|
+
import os
|
|
9
|
+
import asyncio
|
|
10
|
+
import tempfile
|
|
11
|
+
import shutil
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
|
|
14
|
+
# 添加项目根目录到路径
|
|
15
|
+
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
|
16
|
+
|
|
17
|
+
# 导入相关模块
|
|
18
|
+
from crawlo.queue.queue_manager import QueueManager, QueueConfig, QueueType
|
|
19
|
+
from crawlo.filters.aioredis_filter import AioRedisFilter
|
|
20
|
+
from crawlo.pipelines.redis_dedup_pipeline import RedisDedupPipeline
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class MockSettings:
|
|
24
|
+
"""模拟设置类"""
|
|
25
|
+
def __init__(self, project_name="telecom_licenses_distributed"):
|
|
26
|
+
self.project_name = project_name
|
|
27
|
+
self.REDIS_HOST = '127.0.0.1'
|
|
28
|
+
self.REDIS_PORT = 6379
|
|
29
|
+
self.REDIS_PASSWORD = ''
|
|
30
|
+
self.REDIS_DB = 2
|
|
31
|
+
self.REDIS_URL = f'redis://127.0.0.1:6379/{self.REDIS_DB}'
|
|
32
|
+
self.REDIS_TTL = 0
|
|
33
|
+
self.CLEANUP_FP = 0
|
|
34
|
+
self.FILTER_DEBUG = True
|
|
35
|
+
self.LOG_LEVEL = "INFO"
|
|
36
|
+
self.DECODE_RESPONSES = True
|
|
37
|
+
self.SCHEDULER_QUEUE_NAME = f'crawlo:{project_name}:queue:requests'
|
|
38
|
+
|
|
39
|
+
def get(self, key, default=None):
|
|
40
|
+
if key == 'PROJECT_NAME':
|
|
41
|
+
return self.project_name
|
|
42
|
+
elif key == 'REDIS_HOST':
|
|
43
|
+
return self.REDIS_HOST
|
|
44
|
+
elif key == 'REDIS_PASSWORD':
|
|
45
|
+
return self.REDIS_PASSWORD
|
|
46
|
+
elif key == 'REDIS_URL':
|
|
47
|
+
return self.REDIS_URL
|
|
48
|
+
elif key == 'FILTER_DEBUG':
|
|
49
|
+
return self.FILTER_DEBUG
|
|
50
|
+
elif key == 'LOG_LEVEL':
|
|
51
|
+
return self.LOG_LEVEL
|
|
52
|
+
elif key == 'DECODE_RESPONSES':
|
|
53
|
+
return self.DECODE_RESPONSES
|
|
54
|
+
elif key == 'SCHEDULER_QUEUE_NAME':
|
|
55
|
+
return self.SCHEDULER_QUEUE_NAME
|
|
56
|
+
return default
|
|
57
|
+
|
|
58
|
+
def get_bool(self, key, default=False):
|
|
59
|
+
if key == 'FILTER_DEBUG':
|
|
60
|
+
return self.FILTER_DEBUG
|
|
61
|
+
elif key == 'DECODE_RESPONSES':
|
|
62
|
+
return self.DECODE_RESPONSES
|
|
63
|
+
elif key == 'CLEANUP_FP':
|
|
64
|
+
return self.CLEANUP_FP
|
|
65
|
+
return default
|
|
66
|
+
|
|
67
|
+
def get_int(self, key, default=0): # 修复方法名
|
|
68
|
+
if key == 'REDIS_TTL':
|
|
69
|
+
return self.REDIS_TTL
|
|
70
|
+
elif key == 'REDIS_PORT':
|
|
71
|
+
return self.REDIS_PORT
|
|
72
|
+
elif key == 'REDIS_DB':
|
|
73
|
+
return self.REDIS_DB
|
|
74
|
+
elif key == 'SCHEDULER_MAX_QUEUE_SIZE':
|
|
75
|
+
return 1000
|
|
76
|
+
elif key == 'QUEUE_MAX_RETRIES':
|
|
77
|
+
return 3
|
|
78
|
+
elif key == 'QUEUE_TIMEOUT':
|
|
79
|
+
return 300
|
|
80
|
+
return default
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
class MockCrawler:
|
|
84
|
+
"""模拟爬虫类"""
|
|
85
|
+
def __init__(self, project_name="telecom_licenses_distributed"):
|
|
86
|
+
self.settings = MockSettings(project_name)
|
|
87
|
+
self.stats = {}
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
async def test_telecom_spider_redis_key():
|
|
91
|
+
"""测试电信设备许可证爬虫Redis key命名规范"""
|
|
92
|
+
print("🔍 测试电信设备许可证爬虫Redis key命名规范...")
|
|
93
|
+
|
|
94
|
+
project_name = "telecom_licenses_distributed"
|
|
95
|
+
expected_prefix = f"crawlo:{project_name}"
|
|
96
|
+
|
|
97
|
+
try:
|
|
98
|
+
# 1. 测试QueueManager和RedisPriorityQueue
|
|
99
|
+
print(" 1. 测试队列管理器...")
|
|
100
|
+
queue_config = QueueConfig(
|
|
101
|
+
queue_type=QueueType.REDIS,
|
|
102
|
+
redis_url="redis://127.0.0.1:6379/2",
|
|
103
|
+
queue_name=f"crawlo:{project_name}:queue:requests", # 使用统一命名规范
|
|
104
|
+
max_queue_size=1000,
|
|
105
|
+
max_retries=3,
|
|
106
|
+
timeout=300
|
|
107
|
+
)
|
|
108
|
+
|
|
109
|
+
queue_manager = QueueManager(queue_config)
|
|
110
|
+
queue = await queue_manager._create_queue(QueueType.REDIS)
|
|
111
|
+
|
|
112
|
+
# 验证队列名称是否符合规范
|
|
113
|
+
expected_queue_name = f"{expected_prefix}:queue:requests"
|
|
114
|
+
expected_processing_queue = f"{expected_prefix}:queue:processing"
|
|
115
|
+
expected_failed_queue = f"{expected_prefix}:queue:failed"
|
|
116
|
+
|
|
117
|
+
assert queue.queue_name == expected_queue_name, f"队列名称不匹配: {queue.queue_name} != {expected_queue_name}"
|
|
118
|
+
assert queue.processing_queue == expected_processing_queue, f"处理中队列名称不匹配: {queue.processing_queue} != {expected_processing_queue}"
|
|
119
|
+
assert queue.failed_queue == expected_failed_queue, f"失败队列名称不匹配: {queue.failed_queue} != {expected_failed_queue}"
|
|
120
|
+
|
|
121
|
+
print(f" ✅ 请求队列: {queue.queue_name}")
|
|
122
|
+
print(f" ✅ 处理中队列: {queue.processing_queue}")
|
|
123
|
+
print(f" ✅ 失败队列: {queue.failed_queue}")
|
|
124
|
+
|
|
125
|
+
# 2. 测试AioRedisFilter
|
|
126
|
+
print(" 2. 测试请求去重过滤器...")
|
|
127
|
+
mock_crawler = MockCrawler(project_name)
|
|
128
|
+
filter_instance = AioRedisFilter.create_instance(mock_crawler)
|
|
129
|
+
|
|
130
|
+
expected_filter_key = f"{expected_prefix}:filter:fingerprint"
|
|
131
|
+
assert filter_instance.redis_key == expected_filter_key, f"过滤器key不匹配: {filter_instance.redis_key} != {expected_filter_key}"
|
|
132
|
+
|
|
133
|
+
print(f" ✅ 请求去重key: {filter_instance.redis_key}")
|
|
134
|
+
|
|
135
|
+
# 3. 测试RedisDedupPipeline
|
|
136
|
+
print(" 3. 测试数据项去重管道...")
|
|
137
|
+
dedup_pipeline = RedisDedupPipeline.from_crawler(mock_crawler)
|
|
138
|
+
|
|
139
|
+
expected_item_key = f"{expected_prefix}:item:fingerprint"
|
|
140
|
+
assert dedup_pipeline.redis_key == expected_item_key, f"数据项去重key不匹配: {dedup_pipeline.redis_key} != {expected_item_key}"
|
|
141
|
+
|
|
142
|
+
print(f" ✅ 数据项去重key: {dedup_pipeline.redis_key}")
|
|
143
|
+
|
|
144
|
+
# 4. 验证所有key都使用统一前缀
|
|
145
|
+
print(" 4. 验证统一前缀...")
|
|
146
|
+
all_keys = [
|
|
147
|
+
queue.queue_name,
|
|
148
|
+
queue.processing_queue,
|
|
149
|
+
queue.failed_queue,
|
|
150
|
+
filter_instance.redis_key,
|
|
151
|
+
dedup_pipeline.redis_key
|
|
152
|
+
]
|
|
153
|
+
|
|
154
|
+
for key in all_keys:
|
|
155
|
+
assert key.startswith(expected_prefix), f"Key未使用统一前缀: {key}"
|
|
156
|
+
print(f" ✅ {key}")
|
|
157
|
+
|
|
158
|
+
print("✅ 电信设备许可证爬虫Redis key命名规范测试通过!")
|
|
159
|
+
return True
|
|
160
|
+
|
|
161
|
+
except Exception as e:
|
|
162
|
+
print(f"❌ 测试失败: {e}")
|
|
163
|
+
import traceback
|
|
164
|
+
traceback.print_exc()
|
|
165
|
+
return False
|
|
166
|
+
finally:
|
|
167
|
+
# 清理资源
|
|
168
|
+
try:
|
|
169
|
+
if 'queue' in locals():
|
|
170
|
+
await queue.close()
|
|
171
|
+
if 'filter_instance' in locals() and hasattr(filter_instance, 'redis'):
|
|
172
|
+
await filter_instance.redis.close()
|
|
173
|
+
if 'dedup_pipeline' in locals() and hasattr(dedup_pipeline, 'redis_client'):
|
|
174
|
+
dedup_pipeline.redis_client.close()
|
|
175
|
+
except:
|
|
176
|
+
pass
|
|
177
|
+
|
|
178
|
+
|
|
179
|
+
async def main():
|
|
180
|
+
"""主测试函数"""
|
|
181
|
+
print("🚀 开始电信设备许可证爬虫Redis key命名规范测试...")
|
|
182
|
+
print("=" * 60)
|
|
183
|
+
|
|
184
|
+
try:
|
|
185
|
+
success = await test_telecom_spider_redis_key()
|
|
186
|
+
|
|
187
|
+
print("=" * 60)
|
|
188
|
+
if success:
|
|
189
|
+
print("🎉 所有测试通过!电信设备许可证爬虫符合新的Redis key命名规范")
|
|
190
|
+
else:
|
|
191
|
+
print("❌ 测试失败,请检查实现")
|
|
192
|
+
return 1
|
|
193
|
+
|
|
194
|
+
except Exception as e:
|
|
195
|
+
print("=" * 60)
|
|
196
|
+
print(f"❌ 测试过程中发生异常: {e}")
|
|
197
|
+
import traceback
|
|
198
|
+
traceback.print_exc()
|
|
199
|
+
return 1
|
|
200
|
+
|
|
201
|
+
return 0
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
if __name__ == "__main__":
|
|
205
|
+
exit_code = asyncio.run(main())
|
|
206
206
|
sys.exit(exit_code)
|