crawlo 1.1.4__py3-none-any.whl → 1.1.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +61 -34
- crawlo/__version__.py +1 -1
- crawlo/cleaners/__init__.py +61 -0
- crawlo/cleaners/data_formatter.py +226 -0
- crawlo/cleaners/encoding_converter.py +126 -0
- crawlo/cleaners/text_cleaner.py +233 -0
- crawlo/cli.py +40 -40
- crawlo/commands/__init__.py +13 -13
- crawlo/commands/check.py +594 -594
- crawlo/commands/genspider.py +151 -151
- crawlo/commands/list.py +155 -155
- crawlo/commands/run.py +292 -285
- crawlo/commands/startproject.py +419 -196
- crawlo/commands/stats.py +188 -188
- crawlo/commands/utils.py +186 -186
- crawlo/config.py +312 -279
- crawlo/config_validator.py +253 -0
- crawlo/core/__init__.py +2 -2
- crawlo/core/engine.py +346 -172
- crawlo/core/processor.py +40 -40
- crawlo/core/scheduler.py +137 -166
- crawlo/crawler.py +1027 -1027
- crawlo/downloader/__init__.py +266 -242
- crawlo/downloader/aiohttp_downloader.py +220 -212
- crawlo/downloader/cffi_downloader.py +256 -251
- crawlo/downloader/httpx_downloader.py +259 -259
- crawlo/downloader/hybrid_downloader.py +214 -0
- crawlo/downloader/playwright_downloader.py +403 -0
- crawlo/downloader/selenium_downloader.py +473 -0
- crawlo/event.py +11 -11
- crawlo/exceptions.py +81 -81
- crawlo/extension/__init__.py +37 -37
- crawlo/extension/health_check.py +141 -141
- crawlo/extension/log_interval.py +57 -57
- crawlo/extension/log_stats.py +81 -81
- crawlo/extension/logging_extension.py +43 -43
- crawlo/extension/memory_monitor.py +104 -88
- crawlo/extension/performance_profiler.py +133 -117
- crawlo/extension/request_recorder.py +107 -107
- crawlo/filters/__init__.py +154 -154
- crawlo/filters/aioredis_filter.py +281 -242
- crawlo/filters/memory_filter.py +269 -269
- crawlo/items/__init__.py +23 -23
- crawlo/items/base.py +21 -21
- crawlo/items/fields.py +53 -53
- crawlo/items/items.py +104 -104
- crawlo/middleware/__init__.py +21 -21
- crawlo/middleware/default_header.py +32 -32
- crawlo/middleware/download_delay.py +28 -28
- crawlo/middleware/middleware_manager.py +135 -135
- crawlo/middleware/proxy.py +272 -248
- crawlo/middleware/request_ignore.py +30 -30
- crawlo/middleware/response_code.py +18 -18
- crawlo/middleware/response_filter.py +26 -26
- crawlo/middleware/retry.py +124 -124
- crawlo/mode_manager.py +212 -201
- crawlo/network/__init__.py +21 -21
- crawlo/network/request.py +338 -311
- crawlo/network/response.py +360 -271
- crawlo/pipelines/__init__.py +21 -21
- crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
- crawlo/pipelines/console_pipeline.py +39 -39
- crawlo/pipelines/csv_pipeline.py +316 -316
- crawlo/pipelines/database_dedup_pipeline.py +224 -224
- crawlo/pipelines/json_pipeline.py +218 -218
- crawlo/pipelines/memory_dedup_pipeline.py +115 -115
- crawlo/pipelines/mongo_pipeline.py +131 -131
- crawlo/pipelines/mysql_pipeline.py +316 -316
- crawlo/pipelines/pipeline_manager.py +61 -56
- crawlo/pipelines/redis_dedup_pipeline.py +167 -162
- crawlo/project.py +188 -153
- crawlo/queue/pqueue.py +37 -37
- crawlo/queue/queue_manager.py +334 -307
- crawlo/queue/redis_priority_queue.py +299 -209
- crawlo/settings/__init__.py +7 -7
- crawlo/settings/default_settings.py +219 -278
- crawlo/settings/setting_manager.py +123 -100
- crawlo/spider/__init__.py +639 -639
- crawlo/stats_collector.py +59 -59
- crawlo/subscriber.py +130 -130
- crawlo/task_manager.py +30 -30
- crawlo/templates/crawlo.cfg.tmpl +10 -10
- crawlo/templates/project/__init__.py.tmpl +3 -3
- crawlo/templates/project/items.py.tmpl +17 -17
- crawlo/templates/project/middlewares.py.tmpl +110 -110
- crawlo/templates/project/pipelines.py.tmpl +97 -97
- crawlo/templates/project/run.py.tmpl +251 -251
- crawlo/templates/project/settings.py.tmpl +326 -279
- crawlo/templates/project/settings_distributed.py.tmpl +120 -0
- crawlo/templates/project/settings_gentle.py.tmpl +95 -0
- crawlo/templates/project/settings_high_performance.py.tmpl +152 -0
- crawlo/templates/project/settings_simple.py.tmpl +69 -0
- crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
- crawlo/templates/spider/spider.py.tmpl +141 -141
- crawlo/tools/__init__.py +183 -0
- crawlo/tools/anti_crawler.py +269 -0
- crawlo/tools/authenticated_proxy.py +241 -0
- crawlo/tools/data_validator.py +181 -0
- crawlo/tools/date_tools.py +36 -0
- crawlo/tools/distributed_coordinator.py +387 -0
- crawlo/tools/retry_mechanism.py +221 -0
- crawlo/tools/scenario_adapter.py +263 -0
- crawlo/utils/__init__.py +35 -7
- crawlo/utils/batch_processor.py +261 -0
- crawlo/utils/controlled_spider_mixin.py +439 -439
- crawlo/utils/date_tools.py +290 -233
- crawlo/utils/db_helper.py +343 -343
- crawlo/utils/enhanced_error_handler.py +360 -0
- crawlo/utils/env_config.py +106 -0
- crawlo/utils/error_handler.py +126 -0
- crawlo/utils/func_tools.py +82 -82
- crawlo/utils/large_scale_config.py +286 -286
- crawlo/utils/large_scale_helper.py +343 -343
- crawlo/utils/log.py +128 -128
- crawlo/utils/performance_monitor.py +285 -0
- crawlo/utils/queue_helper.py +175 -175
- crawlo/utils/redis_connection_pool.py +335 -0
- crawlo/utils/redis_key_validator.py +200 -0
- crawlo/utils/request.py +267 -267
- crawlo/utils/request_serializer.py +219 -219
- crawlo/utils/spider_loader.py +62 -62
- crawlo/utils/system.py +11 -11
- crawlo/utils/tools.py +4 -4
- crawlo/utils/url.py +39 -39
- {crawlo-1.1.4.dist-info → crawlo-1.1.6.dist-info}/METADATA +401 -403
- crawlo-1.1.6.dist-info/RECORD +189 -0
- examples/__init__.py +7 -7
- tests/DOUBLE_CRAWLO_PREFIX_FIX_REPORT.md +82 -0
- tests/__init__.py +7 -7
- tests/advanced_tools_example.py +276 -0
- tests/authenticated_proxy_example.py +237 -0
- tests/cleaners_example.py +161 -0
- tests/config_validation_demo.py +103 -0
- {examples → tests}/controlled_spider_example.py +205 -205
- tests/date_tools_example.py +181 -0
- tests/dynamic_loading_example.py +524 -0
- tests/dynamic_loading_test.py +105 -0
- tests/env_config_example.py +134 -0
- tests/error_handling_example.py +172 -0
- tests/redis_key_validation_demo.py +131 -0
- tests/response_improvements_example.py +145 -0
- tests/test_advanced_tools.py +149 -0
- tests/test_all_redis_key_configs.py +146 -0
- tests/test_authenticated_proxy.py +142 -0
- tests/test_cleaners.py +55 -0
- tests/test_comprehensive.py +147 -0
- tests/test_config_validator.py +194 -0
- tests/test_date_tools.py +124 -0
- tests/test_double_crawlo_fix.py +208 -0
- tests/test_double_crawlo_fix_simple.py +125 -0
- tests/test_dynamic_downloaders_proxy.py +125 -0
- tests/test_dynamic_proxy.py +93 -0
- tests/test_dynamic_proxy_config.py +147 -0
- tests/test_dynamic_proxy_real.py +110 -0
- tests/test_edge_cases.py +304 -0
- tests/test_enhanced_error_handler.py +271 -0
- tests/test_env_config.py +122 -0
- tests/test_error_handler_compatibility.py +113 -0
- tests/test_final_validation.py +153 -153
- tests/test_framework_env_usage.py +104 -0
- tests/test_integration.py +357 -0
- tests/test_item_dedup_redis_key.py +123 -0
- tests/test_parsel.py +30 -0
- tests/test_performance.py +328 -0
- tests/test_proxy_health_check.py +32 -32
- tests/test_proxy_middleware_integration.py +136 -136
- tests/test_proxy_providers.py +56 -56
- tests/test_proxy_stats.py +19 -19
- tests/test_proxy_strategies.py +59 -59
- tests/test_queue_manager_double_crawlo.py +231 -0
- tests/test_queue_manager_redis_key.py +177 -0
- tests/test_redis_config.py +28 -28
- tests/test_redis_connection_pool.py +295 -0
- tests/test_redis_key_naming.py +182 -0
- tests/test_redis_key_validator.py +124 -0
- tests/test_redis_queue.py +224 -224
- tests/test_request_serialization.py +70 -70
- tests/test_response_improvements.py +153 -0
- tests/test_scheduler.py +241 -241
- tests/test_simple_response.py +62 -0
- tests/test_telecom_spider_redis_key.py +206 -0
- tests/test_template_content.py +88 -0
- tests/test_template_redis_key.py +135 -0
- tests/test_tools.py +154 -0
- tests/tools_example.py +258 -0
- crawlo/core/enhanced_engine.py +0 -190
- crawlo-1.1.4.dist-info/RECORD +0 -117
- {crawlo-1.1.4.dist-info → crawlo-1.1.6.dist-info}/WHEEL +0 -0
- {crawlo-1.1.4.dist-info → crawlo-1.1.6.dist-info}/entry_points.txt +0 -0
- {crawlo-1.1.4.dist-info → crawlo-1.1.6.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding:UTF-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
Response 简单功能测试
|
|
5
|
+
"""
|
|
6
|
+
import sys
|
|
7
|
+
import os
|
|
8
|
+
|
|
9
|
+
# 添加项目根目录到路径
|
|
10
|
+
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
|
11
|
+
|
|
12
|
+
from crawlo.network.response import Response
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def test_basic_functionality():
|
|
16
|
+
"""测试基本功能"""
|
|
17
|
+
print("测试基本功能...")
|
|
18
|
+
|
|
19
|
+
# 创建一个简单的HTML响应
|
|
20
|
+
html_content = """
|
|
21
|
+
<html>
|
|
22
|
+
<head>
|
|
23
|
+
<title>测试页面</title>
|
|
24
|
+
</head>
|
|
25
|
+
<body>
|
|
26
|
+
<div class="content">
|
|
27
|
+
<h1>主标题</h1>
|
|
28
|
+
<p class="intro">这是介绍段落</p>
|
|
29
|
+
</div>
|
|
30
|
+
</body>
|
|
31
|
+
</html>
|
|
32
|
+
"""
|
|
33
|
+
|
|
34
|
+
response = Response(
|
|
35
|
+
url="https://example.com/test",
|
|
36
|
+
body=html_content.encode('utf-8'),
|
|
37
|
+
headers={"content-type": "text/html; charset=utf-8"}
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
# 测试基本属性
|
|
41
|
+
print(f"URL: {response.url}")
|
|
42
|
+
print(f"状态码: {response.status_code}")
|
|
43
|
+
|
|
44
|
+
# 测试文本提取(使用新方法)
|
|
45
|
+
title = response.extract_text('title')
|
|
46
|
+
print(f"标题: {title}")
|
|
47
|
+
|
|
48
|
+
h1_text = response.extract_text('.content h1')
|
|
49
|
+
print(f"H1文本: {h1_text}")
|
|
50
|
+
|
|
51
|
+
intro_text = response.extract_text('.intro')
|
|
52
|
+
print(f"介绍文本: {intro_text}")
|
|
53
|
+
|
|
54
|
+
# 测试XPath(使用新方法)
|
|
55
|
+
title_xpath = response.extract_text('//title')
|
|
56
|
+
print(f"XPath标题: {title_xpath}")
|
|
57
|
+
|
|
58
|
+
print("基本功能测试完成")
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
if __name__ == '__main__':
|
|
62
|
+
test_basic_functionality()
|
|
@@ -0,0 +1,206 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
电信设备许可证爬虫Redis Key测试脚本
|
|
5
|
+
用于验证分布式爬虫是否符合新的Redis key命名规范
|
|
6
|
+
"""
|
|
7
|
+
import sys
|
|
8
|
+
import os
|
|
9
|
+
import asyncio
|
|
10
|
+
import tempfile
|
|
11
|
+
import shutil
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
|
|
14
|
+
# 添加项目根目录到路径
|
|
15
|
+
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
|
16
|
+
|
|
17
|
+
# 导入相关模块
|
|
18
|
+
from crawlo.queue.queue_manager import QueueManager, QueueConfig, QueueType
|
|
19
|
+
from crawlo.filters.aioredis_filter import AioRedisFilter
|
|
20
|
+
from crawlo.pipelines.redis_dedup_pipeline import RedisDedupPipeline
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class MockSettings:
|
|
24
|
+
"""模拟设置类"""
|
|
25
|
+
def __init__(self, project_name="telecom_licenses_distributed"):
|
|
26
|
+
self.project_name = project_name
|
|
27
|
+
self.REDIS_HOST = '127.0.0.1'
|
|
28
|
+
self.REDIS_PORT = 6379
|
|
29
|
+
self.REDIS_PASSWORD = ''
|
|
30
|
+
self.REDIS_DB = 2
|
|
31
|
+
self.REDIS_URL = f'redis://127.0.0.1:6379/{self.REDIS_DB}'
|
|
32
|
+
self.REDIS_TTL = 0
|
|
33
|
+
self.CLEANUP_FP = 0
|
|
34
|
+
self.FILTER_DEBUG = True
|
|
35
|
+
self.LOG_LEVEL = "INFO"
|
|
36
|
+
self.DECODE_RESPONSES = True
|
|
37
|
+
self.SCHEDULER_QUEUE_NAME = f'crawlo:{project_name}:queue:requests'
|
|
38
|
+
|
|
39
|
+
def get(self, key, default=None):
|
|
40
|
+
if key == 'PROJECT_NAME':
|
|
41
|
+
return self.project_name
|
|
42
|
+
elif key == 'REDIS_HOST':
|
|
43
|
+
return self.REDIS_HOST
|
|
44
|
+
elif key == 'REDIS_PASSWORD':
|
|
45
|
+
return self.REDIS_PASSWORD
|
|
46
|
+
elif key == 'REDIS_URL':
|
|
47
|
+
return self.REDIS_URL
|
|
48
|
+
elif key == 'FILTER_DEBUG':
|
|
49
|
+
return self.FILTER_DEBUG
|
|
50
|
+
elif key == 'LOG_LEVEL':
|
|
51
|
+
return self.LOG_LEVEL
|
|
52
|
+
elif key == 'DECODE_RESPONSES':
|
|
53
|
+
return self.DECODE_RESPONSES
|
|
54
|
+
elif key == 'SCHEDULER_QUEUE_NAME':
|
|
55
|
+
return self.SCHEDULER_QUEUE_NAME
|
|
56
|
+
return default
|
|
57
|
+
|
|
58
|
+
def get_bool(self, key, default=False):
|
|
59
|
+
if key == 'FILTER_DEBUG':
|
|
60
|
+
return self.FILTER_DEBUG
|
|
61
|
+
elif key == 'DECODE_RESPONSES':
|
|
62
|
+
return self.DECODE_RESPONSES
|
|
63
|
+
elif key == 'CLEANUP_FP':
|
|
64
|
+
return self.CLEANUP_FP
|
|
65
|
+
return default
|
|
66
|
+
|
|
67
|
+
def get_int(self, key, default=0): # 修复方法名
|
|
68
|
+
if key == 'REDIS_TTL':
|
|
69
|
+
return self.REDIS_TTL
|
|
70
|
+
elif key == 'REDIS_PORT':
|
|
71
|
+
return self.REDIS_PORT
|
|
72
|
+
elif key == 'REDIS_DB':
|
|
73
|
+
return self.REDIS_DB
|
|
74
|
+
elif key == 'SCHEDULER_MAX_QUEUE_SIZE':
|
|
75
|
+
return 1000
|
|
76
|
+
elif key == 'QUEUE_MAX_RETRIES':
|
|
77
|
+
return 3
|
|
78
|
+
elif key == 'QUEUE_TIMEOUT':
|
|
79
|
+
return 300
|
|
80
|
+
return default
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
class MockCrawler:
|
|
84
|
+
"""模拟爬虫类"""
|
|
85
|
+
def __init__(self, project_name="telecom_licenses_distributed"):
|
|
86
|
+
self.settings = MockSettings(project_name)
|
|
87
|
+
self.stats = {}
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
async def test_telecom_spider_redis_key():
|
|
91
|
+
"""测试电信设备许可证爬虫Redis key命名规范"""
|
|
92
|
+
print("🔍 测试电信设备许可证爬虫Redis key命名规范...")
|
|
93
|
+
|
|
94
|
+
project_name = "telecom_licenses_distributed"
|
|
95
|
+
expected_prefix = f"crawlo:{project_name}"
|
|
96
|
+
|
|
97
|
+
try:
|
|
98
|
+
# 1. 测试QueueManager和RedisPriorityQueue
|
|
99
|
+
print(" 1. 测试队列管理器...")
|
|
100
|
+
queue_config = QueueConfig(
|
|
101
|
+
queue_type=QueueType.REDIS,
|
|
102
|
+
redis_url="redis://127.0.0.1:6379/2",
|
|
103
|
+
queue_name=f"crawlo:{project_name}:queue:requests", # 使用统一命名规范
|
|
104
|
+
max_queue_size=1000,
|
|
105
|
+
max_retries=3,
|
|
106
|
+
timeout=300
|
|
107
|
+
)
|
|
108
|
+
|
|
109
|
+
queue_manager = QueueManager(queue_config)
|
|
110
|
+
queue = await queue_manager._create_queue(QueueType.REDIS)
|
|
111
|
+
|
|
112
|
+
# 验证队列名称是否符合规范
|
|
113
|
+
expected_queue_name = f"{expected_prefix}:queue:requests"
|
|
114
|
+
expected_processing_queue = f"{expected_prefix}:queue:processing"
|
|
115
|
+
expected_failed_queue = f"{expected_prefix}:queue:failed"
|
|
116
|
+
|
|
117
|
+
assert queue.queue_name == expected_queue_name, f"队列名称不匹配: {queue.queue_name} != {expected_queue_name}"
|
|
118
|
+
assert queue.processing_queue == expected_processing_queue, f"处理中队列名称不匹配: {queue.processing_queue} != {expected_processing_queue}"
|
|
119
|
+
assert queue.failed_queue == expected_failed_queue, f"失败队列名称不匹配: {queue.failed_queue} != {expected_failed_queue}"
|
|
120
|
+
|
|
121
|
+
print(f" ✅ 请求队列: {queue.queue_name}")
|
|
122
|
+
print(f" ✅ 处理中队列: {queue.processing_queue}")
|
|
123
|
+
print(f" ✅ 失败队列: {queue.failed_queue}")
|
|
124
|
+
|
|
125
|
+
# 2. 测试AioRedisFilter
|
|
126
|
+
print(" 2. 测试请求去重过滤器...")
|
|
127
|
+
mock_crawler = MockCrawler(project_name)
|
|
128
|
+
filter_instance = AioRedisFilter.create_instance(mock_crawler)
|
|
129
|
+
|
|
130
|
+
expected_filter_key = f"{expected_prefix}:filter:fingerprint"
|
|
131
|
+
assert filter_instance.redis_key == expected_filter_key, f"过滤器key不匹配: {filter_instance.redis_key} != {expected_filter_key}"
|
|
132
|
+
|
|
133
|
+
print(f" ✅ 请求去重key: {filter_instance.redis_key}")
|
|
134
|
+
|
|
135
|
+
# 3. 测试RedisDedupPipeline
|
|
136
|
+
print(" 3. 测试数据项去重管道...")
|
|
137
|
+
dedup_pipeline = RedisDedupPipeline.from_crawler(mock_crawler)
|
|
138
|
+
|
|
139
|
+
expected_item_key = f"{expected_prefix}:item:fingerprint"
|
|
140
|
+
assert dedup_pipeline.redis_key == expected_item_key, f"数据项去重key不匹配: {dedup_pipeline.redis_key} != {expected_item_key}"
|
|
141
|
+
|
|
142
|
+
print(f" ✅ 数据项去重key: {dedup_pipeline.redis_key}")
|
|
143
|
+
|
|
144
|
+
# 4. 验证所有key都使用统一前缀
|
|
145
|
+
print(" 4. 验证统一前缀...")
|
|
146
|
+
all_keys = [
|
|
147
|
+
queue.queue_name,
|
|
148
|
+
queue.processing_queue,
|
|
149
|
+
queue.failed_queue,
|
|
150
|
+
filter_instance.redis_key,
|
|
151
|
+
dedup_pipeline.redis_key
|
|
152
|
+
]
|
|
153
|
+
|
|
154
|
+
for key in all_keys:
|
|
155
|
+
assert key.startswith(expected_prefix), f"Key未使用统一前缀: {key}"
|
|
156
|
+
print(f" ✅ {key}")
|
|
157
|
+
|
|
158
|
+
print("✅ 电信设备许可证爬虫Redis key命名规范测试通过!")
|
|
159
|
+
return True
|
|
160
|
+
|
|
161
|
+
except Exception as e:
|
|
162
|
+
print(f"❌ 测试失败: {e}")
|
|
163
|
+
import traceback
|
|
164
|
+
traceback.print_exc()
|
|
165
|
+
return False
|
|
166
|
+
finally:
|
|
167
|
+
# 清理资源
|
|
168
|
+
try:
|
|
169
|
+
if 'queue' in locals():
|
|
170
|
+
await queue.close()
|
|
171
|
+
if 'filter_instance' in locals() and hasattr(filter_instance, 'redis'):
|
|
172
|
+
await filter_instance.redis.close()
|
|
173
|
+
if 'dedup_pipeline' in locals() and hasattr(dedup_pipeline, 'redis_client'):
|
|
174
|
+
dedup_pipeline.redis_client.close()
|
|
175
|
+
except:
|
|
176
|
+
pass
|
|
177
|
+
|
|
178
|
+
|
|
179
|
+
async def main():
|
|
180
|
+
"""主测试函数"""
|
|
181
|
+
print("🚀 开始电信设备许可证爬虫Redis key命名规范测试...")
|
|
182
|
+
print("=" * 60)
|
|
183
|
+
|
|
184
|
+
try:
|
|
185
|
+
success = await test_telecom_spider_redis_key()
|
|
186
|
+
|
|
187
|
+
print("=" * 60)
|
|
188
|
+
if success:
|
|
189
|
+
print("🎉 所有测试通过!电信设备许可证爬虫符合新的Redis key命名规范")
|
|
190
|
+
else:
|
|
191
|
+
print("❌ 测试失败,请检查实现")
|
|
192
|
+
return 1
|
|
193
|
+
|
|
194
|
+
except Exception as e:
|
|
195
|
+
print("=" * 60)
|
|
196
|
+
print(f"❌ 测试过程中发生异常: {e}")
|
|
197
|
+
import traceback
|
|
198
|
+
traceback.print_exc()
|
|
199
|
+
return 1
|
|
200
|
+
|
|
201
|
+
return 0
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
if __name__ == "__main__":
|
|
205
|
+
exit_code = asyncio.run(main())
|
|
206
|
+
sys.exit(exit_code)
|
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
模板文件内容测试脚本
|
|
5
|
+
用于验证模板文件是否符合新的Redis key命名规范
|
|
6
|
+
"""
|
|
7
|
+
import sys
|
|
8
|
+
import os
|
|
9
|
+
|
|
10
|
+
# 添加项目根目录到路径
|
|
11
|
+
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def test_template_content():
|
|
15
|
+
"""测试模板文件内容"""
|
|
16
|
+
print("🔍 测试模板文件内容...")
|
|
17
|
+
|
|
18
|
+
try:
|
|
19
|
+
# 检查settings.py.tmpl模板文件
|
|
20
|
+
template_file = "crawlo/templates/project/settings.py.tmpl"
|
|
21
|
+
if not os.path.exists(template_file):
|
|
22
|
+
print(f"❌ 模板文件不存在: {template_file}")
|
|
23
|
+
return False
|
|
24
|
+
|
|
25
|
+
with open(template_file, 'r', encoding='utf-8') as f:
|
|
26
|
+
content = f.read()
|
|
27
|
+
|
|
28
|
+
# 检查是否移除了旧的REDIS_KEY配置
|
|
29
|
+
old_config = "REDIS_KEY = f'{{project_name}}:fingerprint'"
|
|
30
|
+
if old_config in content:
|
|
31
|
+
print("❌ 仍然存在旧的REDIS_KEY配置")
|
|
32
|
+
return False
|
|
33
|
+
print(" ✅ 已移除旧的REDIS_KEY配置")
|
|
34
|
+
|
|
35
|
+
# 检查是否添加了新的注释
|
|
36
|
+
filter_comment = "# crawlo:{project_name}:filter:fingerprint (请求去重)"
|
|
37
|
+
if filter_comment not in content:
|
|
38
|
+
print("❌ 缺少请求去重的Redis key命名规范注释")
|
|
39
|
+
return False
|
|
40
|
+
print(" ✅ 包含请求去重的Redis key命名规范注释")
|
|
41
|
+
|
|
42
|
+
item_comment = "# crawlo:{project_name}:item:fingerprint (数据项去重)"
|
|
43
|
+
if item_comment not in content:
|
|
44
|
+
print("❌ 缺少数据项去重的Redis key命名规范注释")
|
|
45
|
+
return False
|
|
46
|
+
print(" ✅ 包含数据项去重的Redis key命名规范注释")
|
|
47
|
+
|
|
48
|
+
# 检查是否保留了队列名称配置
|
|
49
|
+
queue_config = "SCHEDULER_QUEUE_NAME = f'crawlo:{{project_name}}:queue:requests'"
|
|
50
|
+
if queue_config not in content:
|
|
51
|
+
print("❌ 缺少队列名称配置")
|
|
52
|
+
return False
|
|
53
|
+
print(" ✅ 包含队列名称配置")
|
|
54
|
+
|
|
55
|
+
print("✅ 模板文件内容测试通过!")
|
|
56
|
+
return True
|
|
57
|
+
|
|
58
|
+
except Exception as e:
|
|
59
|
+
print(f"❌ 测试过程中发生错误: {e}")
|
|
60
|
+
return False
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def main():
|
|
64
|
+
"""主测试函数"""
|
|
65
|
+
print("🚀 开始模板文件内容测试...")
|
|
66
|
+
print("=" * 50)
|
|
67
|
+
|
|
68
|
+
try:
|
|
69
|
+
success = test_template_content()
|
|
70
|
+
|
|
71
|
+
print("=" * 50)
|
|
72
|
+
if success:
|
|
73
|
+
print("🎉 所有测试通过!模板文件符合新的Redis key命名规范")
|
|
74
|
+
else:
|
|
75
|
+
print("❌ 测试失败,请检查模板文件")
|
|
76
|
+
return 1
|
|
77
|
+
|
|
78
|
+
except Exception as e:
|
|
79
|
+
print("=" * 50)
|
|
80
|
+
print(f"❌ 测试过程中发生异常: {e}")
|
|
81
|
+
return 1
|
|
82
|
+
|
|
83
|
+
return 0
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
if __name__ == "__main__":
|
|
87
|
+
exit_code = main()
|
|
88
|
+
sys.exit(exit_code)
|
|
@@ -0,0 +1,135 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
模板项目Redis Key测试脚本
|
|
5
|
+
用于验证通过模板生成的项目是否符合新的Redis key命名规范
|
|
6
|
+
"""
|
|
7
|
+
import sys
|
|
8
|
+
import os
|
|
9
|
+
import tempfile
|
|
10
|
+
import shutil
|
|
11
|
+
import subprocess
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
|
|
14
|
+
# 添加项目根目录到路径
|
|
15
|
+
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def test_template_project_redis_key():
|
|
19
|
+
"""测试模板项目Redis key命名规范"""
|
|
20
|
+
print("🔍 测试模板项目Redis key命名规范...")
|
|
21
|
+
|
|
22
|
+
# 创建临时目录
|
|
23
|
+
with tempfile.TemporaryDirectory() as temp_dir:
|
|
24
|
+
try:
|
|
25
|
+
# 在原始工作目录中创建项目,然后移动到临时目录
|
|
26
|
+
original_cwd = os.getcwd()
|
|
27
|
+
|
|
28
|
+
# 创建测试项目(在原始工作目录中)
|
|
29
|
+
print(" 1. 创建测试项目...")
|
|
30
|
+
cmd_path = os.path.join(original_cwd, "crawlo", "commands", "startproject.py")
|
|
31
|
+
result = subprocess.run([
|
|
32
|
+
sys.executable, cmd_path, "test_project"
|
|
33
|
+
], cwd=original_cwd, capture_output=True, text=True)
|
|
34
|
+
|
|
35
|
+
if result.returncode != 0:
|
|
36
|
+
print(f"❌ 创建项目失败: {result.stderr}")
|
|
37
|
+
return False
|
|
38
|
+
|
|
39
|
+
print(" ✅ 项目创建成功")
|
|
40
|
+
|
|
41
|
+
# 检查生成的文件
|
|
42
|
+
project_dir = Path(original_cwd) / "test_project"
|
|
43
|
+
if not project_dir.exists():
|
|
44
|
+
print("❌ 项目目录未创建")
|
|
45
|
+
return False
|
|
46
|
+
|
|
47
|
+
# 移动项目到临时目录
|
|
48
|
+
target_dir = Path(temp_dir) / "test_project"
|
|
49
|
+
shutil.move(str(project_dir), str(target_dir))
|
|
50
|
+
project_dir = target_dir
|
|
51
|
+
|
|
52
|
+
settings_file = project_dir / "test_project" / "settings.py"
|
|
53
|
+
if not settings_file.exists():
|
|
54
|
+
print("❌ settings.py文件未创建")
|
|
55
|
+
return False
|
|
56
|
+
|
|
57
|
+
# 读取settings.py内容
|
|
58
|
+
with open(settings_file, 'r', encoding='utf-8') as f:
|
|
59
|
+
settings_content = f.read()
|
|
60
|
+
|
|
61
|
+
# 检查是否移除了旧的REDIS_KEY配置
|
|
62
|
+
if "REDIS_KEY = f'{{project_name}}:fingerprint'" in settings_content:
|
|
63
|
+
print("❌ 仍然存在旧的REDIS_KEY配置")
|
|
64
|
+
return False
|
|
65
|
+
|
|
66
|
+
# 检查是否添加了新的注释
|
|
67
|
+
if "# crawlo:{project_name}:filter:fingerprint (请求去重)" not in settings_content:
|
|
68
|
+
print("❌ 缺少新的Redis key命名规范注释")
|
|
69
|
+
return False
|
|
70
|
+
|
|
71
|
+
if "# crawlo:{project_name}:item:fingerprint (数据项去重)" not in settings_content:
|
|
72
|
+
print("❌ 缺少数据项去重的Redis key命名规范注释")
|
|
73
|
+
return False
|
|
74
|
+
|
|
75
|
+
print(" ✅ settings.py符合新的Redis key命名规范")
|
|
76
|
+
|
|
77
|
+
# 检查crawlo.cfg
|
|
78
|
+
cfg_file = project_dir / "crawlo.cfg"
|
|
79
|
+
if not cfg_file.exists():
|
|
80
|
+
print("❌ crawlo.cfg文件未创建")
|
|
81
|
+
return False
|
|
82
|
+
|
|
83
|
+
with open(cfg_file, 'r', encoding='utf-8') as f:
|
|
84
|
+
cfg_content = f.read()
|
|
85
|
+
|
|
86
|
+
if "default = test_project.settings" not in cfg_content:
|
|
87
|
+
print("❌ crawlo.cfg配置不正确")
|
|
88
|
+
return False
|
|
89
|
+
|
|
90
|
+
print(" ✅ crawlo.cfg配置正确")
|
|
91
|
+
|
|
92
|
+
print("✅ 模板项目Redis key命名规范测试通过!")
|
|
93
|
+
return True
|
|
94
|
+
|
|
95
|
+
except Exception as e:
|
|
96
|
+
print(f"❌ 测试过程中发生错误: {e}")
|
|
97
|
+
import traceback
|
|
98
|
+
traceback.print_exc()
|
|
99
|
+
return False
|
|
100
|
+
finally:
|
|
101
|
+
# 清理创建的项目目录
|
|
102
|
+
project_dir = Path(original_cwd) / "test_project"
|
|
103
|
+
if project_dir.exists():
|
|
104
|
+
shutil.rmtree(str(project_dir), ignore_errors=True)
|
|
105
|
+
|
|
106
|
+
# 恢复原始工作目录
|
|
107
|
+
os.chdir(original_cwd)
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
def main():
|
|
111
|
+
"""主测试函数"""
|
|
112
|
+
print("🚀 开始模板项目Redis key命名规范测试...")
|
|
113
|
+
print("=" * 50)
|
|
114
|
+
|
|
115
|
+
try:
|
|
116
|
+
success = test_template_project_redis_key()
|
|
117
|
+
|
|
118
|
+
print("=" * 50)
|
|
119
|
+
if success:
|
|
120
|
+
print("🎉 所有测试通过!模板项目符合新的Redis key命名规范")
|
|
121
|
+
else:
|
|
122
|
+
print("❌ 测试失败,请检查模板文件")
|
|
123
|
+
return 1
|
|
124
|
+
|
|
125
|
+
except Exception as e:
|
|
126
|
+
print("=" * 50)
|
|
127
|
+
print(f"❌ 测试过程中发生异常: {e}")
|
|
128
|
+
return 1
|
|
129
|
+
|
|
130
|
+
return 0
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
if __name__ == "__main__":
|
|
134
|
+
exit_code = main()
|
|
135
|
+
sys.exit(exit_code)
|
tests/test_tools.py
ADDED
|
@@ -0,0 +1,154 @@
|
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding: UTF-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
工具包测试
|
|
5
|
+
"""
|
|
6
|
+
import unittest
|
|
7
|
+
from crawlo.tools import (
|
|
8
|
+
# 日期工具
|
|
9
|
+
parse_time,
|
|
10
|
+
format_time,
|
|
11
|
+
time_diff,
|
|
12
|
+
|
|
13
|
+
# 数据清洗工具
|
|
14
|
+
clean_text,
|
|
15
|
+
format_currency,
|
|
16
|
+
extract_emails,
|
|
17
|
+
|
|
18
|
+
# 数据验证工具
|
|
19
|
+
validate_email,
|
|
20
|
+
validate_url,
|
|
21
|
+
validate_phone,
|
|
22
|
+
validate_chinese_id_card,
|
|
23
|
+
validate_date,
|
|
24
|
+
validate_number_range,
|
|
25
|
+
|
|
26
|
+
# 请求处理工具
|
|
27
|
+
build_url,
|
|
28
|
+
add_query_params,
|
|
29
|
+
merge_headers,
|
|
30
|
+
|
|
31
|
+
# 反爬虫应对工具
|
|
32
|
+
get_random_user_agent,
|
|
33
|
+
rotate_proxy,
|
|
34
|
+
|
|
35
|
+
# 分布式协调工具
|
|
36
|
+
generate_task_id,
|
|
37
|
+
get_cluster_info
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class TestTools(unittest.TestCase):
|
|
42
|
+
"""工具包测试类"""
|
|
43
|
+
|
|
44
|
+
def test_date_tools(self):
|
|
45
|
+
"""测试日期工具"""
|
|
46
|
+
# 测试时间解析
|
|
47
|
+
time_str = "2025-09-10 14:30:00"
|
|
48
|
+
parsed_time = parse_time(time_str)
|
|
49
|
+
self.assertIsNotNone(parsed_time)
|
|
50
|
+
|
|
51
|
+
# 测试时间格式化
|
|
52
|
+
formatted_time = format_time(parsed_time, "%Y-%m-%d")
|
|
53
|
+
self.assertEqual(formatted_time, "2025-09-10")
|
|
54
|
+
|
|
55
|
+
# 测试时间差计算
|
|
56
|
+
time_str2 = "2025-09-11 14:30:00"
|
|
57
|
+
parsed_time2 = parse_time(time_str2)
|
|
58
|
+
diff = time_diff(parsed_time2, parsed_time)
|
|
59
|
+
self.assertEqual(diff, 86400) # 24小时 = 86400秒
|
|
60
|
+
|
|
61
|
+
def test_data_cleaning_tools(self):
|
|
62
|
+
"""测试数据清洗工具"""
|
|
63
|
+
# 测试文本清洗
|
|
64
|
+
dirty_text = "<p>这是一个 <b>测试</b>&文本</p>"
|
|
65
|
+
clean_result = clean_text(dirty_text)
|
|
66
|
+
self.assertEqual(clean_result, "这是一个 测试&文本")
|
|
67
|
+
|
|
68
|
+
# 测试货币格式化
|
|
69
|
+
price = 1234.567
|
|
70
|
+
formatted_price = format_currency(price, "¥", 2)
|
|
71
|
+
self.assertEqual(formatted_price, "¥1,234.57")
|
|
72
|
+
|
|
73
|
+
# 测试邮箱提取
|
|
74
|
+
text_with_email = "联系邮箱: test@example.com, support@crawler.com"
|
|
75
|
+
emails = extract_emails(text_with_email)
|
|
76
|
+
self.assertIn("test@example.com", emails)
|
|
77
|
+
self.assertIn("support@crawler.com", emails)
|
|
78
|
+
|
|
79
|
+
def test_data_validation_tools(self):
|
|
80
|
+
"""测试数据验证工具"""
|
|
81
|
+
# 测试邮箱验证
|
|
82
|
+
self.assertTrue(validate_email("test@example.com"))
|
|
83
|
+
self.assertFalse(validate_email("invalid-email"))
|
|
84
|
+
|
|
85
|
+
# 测试URL验证
|
|
86
|
+
self.assertTrue(validate_url("https://example.com"))
|
|
87
|
+
self.assertFalse(validate_url("invalid-url"))
|
|
88
|
+
|
|
89
|
+
# 测试电话验证
|
|
90
|
+
self.assertTrue(validate_phone("13812345678"))
|
|
91
|
+
self.assertFalse(validate_phone("12345"))
|
|
92
|
+
|
|
93
|
+
# 测试身份证验证
|
|
94
|
+
self.assertTrue(validate_chinese_id_card("110101199001011234"))
|
|
95
|
+
self.assertFalse(validate_chinese_id_card("invalid-id"))
|
|
96
|
+
|
|
97
|
+
# 测试日期验证
|
|
98
|
+
self.assertTrue(validate_date("2025-09-10"))
|
|
99
|
+
self.assertFalse(validate_date("invalid-date"))
|
|
100
|
+
|
|
101
|
+
# 测试数值范围验证
|
|
102
|
+
self.assertTrue(validate_number_range(50, 1, 100))
|
|
103
|
+
self.assertFalse(validate_number_range(150, 1, 100))
|
|
104
|
+
|
|
105
|
+
def test_request_handling_tools(self):
|
|
106
|
+
"""测试请求处理工具"""
|
|
107
|
+
# 测试URL构建
|
|
108
|
+
base_url = "https://api.example.com"
|
|
109
|
+
path = "/v1/users"
|
|
110
|
+
query_params = {"page": 1, "limit": 10}
|
|
111
|
+
full_url = build_url(base_url, path, query_params)
|
|
112
|
+
self.assertIn("https://api.example.com/v1/users", full_url)
|
|
113
|
+
self.assertIn("page=1", full_url)
|
|
114
|
+
self.assertIn("limit=10", full_url)
|
|
115
|
+
|
|
116
|
+
# 测试添加查询参数
|
|
117
|
+
existing_url = "https://api.example.com/v1/users?page=1"
|
|
118
|
+
new_params = {"sort": "name"}
|
|
119
|
+
updated_url = add_query_params(existing_url, new_params)
|
|
120
|
+
self.assertIn("sort=name", updated_url)
|
|
121
|
+
|
|
122
|
+
# 测试合并请求头
|
|
123
|
+
base_headers = {"Content-Type": "application/json"}
|
|
124
|
+
additional_headers = {"Authorization": "Bearer token123"}
|
|
125
|
+
merged_headers = merge_headers(base_headers, additional_headers)
|
|
126
|
+
self.assertEqual(merged_headers["Content-Type"], "application/json")
|
|
127
|
+
self.assertEqual(merged_headers["Authorization"], "Bearer token123")
|
|
128
|
+
|
|
129
|
+
def test_anti_crawler_tools(self):
|
|
130
|
+
"""测试反爬虫应对工具"""
|
|
131
|
+
# 测试随机User-Agent
|
|
132
|
+
user_agent = get_random_user_agent()
|
|
133
|
+
self.assertIsInstance(user_agent, str)
|
|
134
|
+
self.assertGreater(len(user_agent), 0)
|
|
135
|
+
|
|
136
|
+
# 测试代理轮换
|
|
137
|
+
proxy = rotate_proxy()
|
|
138
|
+
self.assertIsInstance(proxy, dict)
|
|
139
|
+
|
|
140
|
+
def test_distributed_coordinator_tools(self):
|
|
141
|
+
"""测试分布式协调工具"""
|
|
142
|
+
# 测试任务ID生成
|
|
143
|
+
task_id = generate_task_id("https://example.com", "test_spider")
|
|
144
|
+
self.assertIsInstance(task_id, str)
|
|
145
|
+
self.assertEqual(len(task_id), 32) # MD5 hash长度
|
|
146
|
+
|
|
147
|
+
# 测试集群信息获取
|
|
148
|
+
cluster_info = get_cluster_info()
|
|
149
|
+
self.assertIsInstance(cluster_info, dict)
|
|
150
|
+
self.assertIn("worker_count", cluster_info)
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
if __name__ == '__main__':
|
|
154
|
+
unittest.main()
|