crawlo 1.3.2__py3-none-any.whl → 1.3.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +24 -0
- crawlo/__version__.py +1 -1
- crawlo/commands/run.py +58 -32
- crawlo/core/__init__.py +44 -0
- crawlo/core/engine.py +119 -45
- crawlo/core/scheduler.py +4 -3
- crawlo/crawler.py +603 -1133
- crawlo/downloader/aiohttp_downloader.py +4 -2
- crawlo/extension/__init__.py +1 -1
- crawlo/extension/logging_extension.py +23 -7
- crawlo/factories/__init__.py +28 -0
- crawlo/factories/base.py +69 -0
- crawlo/factories/crawler.py +104 -0
- crawlo/factories/registry.py +85 -0
- crawlo/filters/aioredis_filter.py +25 -2
- crawlo/framework.py +292 -0
- crawlo/initialization/__init__.py +40 -0
- crawlo/initialization/built_in.py +426 -0
- crawlo/initialization/context.py +142 -0
- crawlo/initialization/core.py +194 -0
- crawlo/initialization/phases.py +149 -0
- crawlo/initialization/registry.py +146 -0
- crawlo/items/base.py +2 -1
- crawlo/logging/__init__.py +38 -0
- crawlo/logging/config.py +97 -0
- crawlo/logging/factory.py +129 -0
- crawlo/logging/manager.py +112 -0
- crawlo/middleware/middleware_manager.py +1 -1
- crawlo/middleware/offsite.py +1 -1
- crawlo/mode_manager.py +26 -1
- crawlo/pipelines/pipeline_manager.py +2 -1
- crawlo/project.py +76 -46
- crawlo/queue/pqueue.py +11 -5
- crawlo/queue/queue_manager.py +143 -19
- crawlo/queue/redis_priority_queue.py +69 -49
- crawlo/settings/default_settings.py +110 -14
- crawlo/settings/setting_manager.py +29 -13
- crawlo/spider/__init__.py +34 -16
- crawlo/stats_collector.py +17 -3
- crawlo/task_manager.py +112 -3
- crawlo/templates/project/settings.py.tmpl +103 -202
- crawlo/templates/project/settings_distributed.py.tmpl +122 -135
- crawlo/templates/project/settings_gentle.py.tmpl +149 -43
- crawlo/templates/project/settings_high_performance.py.tmpl +127 -90
- crawlo/templates/project/settings_minimal.py.tmpl +46 -15
- crawlo/templates/project/settings_simple.py.tmpl +138 -75
- crawlo/templates/project/spiders/__init__.py.tmpl +5 -1
- crawlo/templates/run.py.tmpl +10 -14
- crawlo/templates/spiders_init.py.tmpl +10 -0
- crawlo/tools/network_diagnostic.py +365 -0
- crawlo/utils/class_loader.py +26 -0
- crawlo/utils/error_handler.py +76 -35
- crawlo/utils/log.py +41 -144
- crawlo/utils/redis_connection_pool.py +43 -6
- crawlo/utils/request_serializer.py +8 -1
- {crawlo-1.3.2.dist-info → crawlo-1.3.4.dist-info}/METADATA +120 -14
- {crawlo-1.3.2.dist-info → crawlo-1.3.4.dist-info}/RECORD +104 -45
- tests/authenticated_proxy_example.py +2 -2
- tests/baidu_performance_test.py +109 -0
- tests/baidu_test.py +60 -0
- tests/comprehensive_framework_test.py +213 -0
- tests/comprehensive_test.py +82 -0
- tests/comprehensive_testing_summary.md +187 -0
- tests/debug_configure.py +70 -0
- tests/debug_framework_logger.py +85 -0
- tests/debug_log_levels.py +64 -0
- tests/distributed_test.py +67 -0
- tests/distributed_test_debug.py +77 -0
- tests/final_command_test_report.md +0 -0
- tests/final_comprehensive_test.py +152 -0
- tests/final_validation_test.py +183 -0
- tests/framework_performance_test.py +203 -0
- tests/optimized_performance_test.py +212 -0
- tests/performance_comparison.py +246 -0
- tests/queue_blocking_test.py +114 -0
- tests/queue_test.py +90 -0
- tests/scrapy_comparison/ofweek_scrapy.py +139 -0
- tests/scrapy_comparison/scrapy_test.py +134 -0
- tests/simple_command_test.py +120 -0
- tests/simple_crawlo_test.py +128 -0
- tests/simple_log_test.py +58 -0
- tests/simple_optimization_test.py +129 -0
- tests/simple_spider_test.py +50 -0
- tests/simple_test.py +48 -0
- tests/test_all_commands.py +231 -0
- tests/test_batch_processor.py +179 -0
- tests/test_component_factory.py +175 -0
- tests/test_controlled_spider_mixin.py +80 -0
- tests/test_enhanced_error_handler_comprehensive.py +246 -0
- tests/test_factories.py +253 -0
- tests/test_framework_logger.py +67 -0
- tests/test_framework_startup.py +65 -0
- tests/test_large_scale_config.py +113 -0
- tests/test_large_scale_helper.py +236 -0
- tests/test_mode_change.py +73 -0
- tests/test_mode_consistency.py +1 -1
- tests/test_performance_monitor.py +116 -0
- tests/test_queue_empty_check.py +42 -0
- tests/untested_features_report.md +139 -0
- tests/verify_debug.py +52 -0
- tests/verify_log_fix.py +112 -0
- tests/DOUBLE_CRAWLO_PREFIX_FIX_REPORT.md +0 -82
- {crawlo-1.3.2.dist-info → crawlo-1.3.4.dist-info}/WHEEL +0 -0
- {crawlo-1.3.2.dist-info → crawlo-1.3.4.dist-info}/entry_points.txt +0 -0
- {crawlo-1.3.2.dist-info → crawlo-1.3.4.dist-info}/top_level.txt +0 -0
tests/baidu_test.py
ADDED
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
百度网站测试脚本
|
|
5
|
+
测试Crawlo框架的基本功能
|
|
6
|
+
"""
|
|
7
|
+
import asyncio
|
|
8
|
+
import sys
|
|
9
|
+
import os
|
|
10
|
+
|
|
11
|
+
# 添加项目根目录到Python路径
|
|
12
|
+
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
|
13
|
+
|
|
14
|
+
from crawlo import Spider, Request
|
|
15
|
+
from crawlo.crawler import CrawlerProcess
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class BaiduSpider(Spider):
|
|
19
|
+
name = 'baidu_test'
|
|
20
|
+
|
|
21
|
+
def start_requests(self):
|
|
22
|
+
# 测试百度首页
|
|
23
|
+
yield Request('https://www.baidu.com/', callback=self.parse_home)
|
|
24
|
+
|
|
25
|
+
def parse_home(self, response):
|
|
26
|
+
self.logger.info(f"成功访问百度首页: {response.url}")
|
|
27
|
+
self.logger.info(f"响应状态码: {response.status_code}")
|
|
28
|
+
self.logger.info(f"页面标题: {response.xpath('//title/text()').get()}")
|
|
29
|
+
|
|
30
|
+
# 测试提取一些链接
|
|
31
|
+
links = response.xpath('//a/@href').getall()[:5] # 只取前5个链接
|
|
32
|
+
self.logger.info(f"提取到链接数量: {len(links)}")
|
|
33
|
+
|
|
34
|
+
# 可以选择跟进一些链接进行测试
|
|
35
|
+
for i, link in enumerate(links):
|
|
36
|
+
if i >= 2: # 只跟进前2个链接
|
|
37
|
+
break
|
|
38
|
+
# 确保链接是完整的URL
|
|
39
|
+
if link.startswith('http'):
|
|
40
|
+
yield Request(link, callback=self.parse_link)
|
|
41
|
+
|
|
42
|
+
def parse_link(self, response):
|
|
43
|
+
self.logger.info(f"访问链接: {response.url}")
|
|
44
|
+
self.logger.info(f"响应状态码: {response.status_code}")
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
async def main():
|
|
48
|
+
# 创建爬虫进程
|
|
49
|
+
process = CrawlerProcess(settings={
|
|
50
|
+
'CONCURRENCY': 4, # 设置并发数
|
|
51
|
+
'DOWNLOAD_DELAY': 1, # 设置下载延迟
|
|
52
|
+
'LOG_LEVEL': 'INFO', # 设置日志级别
|
|
53
|
+
})
|
|
54
|
+
|
|
55
|
+
# 运行爬虫
|
|
56
|
+
await process.crawl(BaiduSpider)
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
if __name__ == '__main__':
|
|
60
|
+
asyncio.run(main())
|
|
@@ -0,0 +1,213 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
综合框架测试脚本
|
|
5
|
+
全面测试框架的所有核心功能
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import sys
|
|
9
|
+
import os
|
|
10
|
+
import asyncio
|
|
11
|
+
import time
|
|
12
|
+
|
|
13
|
+
# 添加项目根目录到Python路径
|
|
14
|
+
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
|
15
|
+
|
|
16
|
+
from crawlo.spider import Spider
|
|
17
|
+
from crawlo import Request
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class TestSpider(Spider):
|
|
21
|
+
"""测试爬虫"""
|
|
22
|
+
name = 'framework_test_spider'
|
|
23
|
+
|
|
24
|
+
def start_requests(self):
|
|
25
|
+
"""发起测试请求"""
|
|
26
|
+
# 生成一些测试请求
|
|
27
|
+
for i in range(3):
|
|
28
|
+
yield Request(f'https://httpbin.org/get?page={i}', callback=self.parse)
|
|
29
|
+
|
|
30
|
+
def parse(self, response):
|
|
31
|
+
"""解析响应"""
|
|
32
|
+
print(f"成功获取响应: {response.url}")
|
|
33
|
+
print(f"状态码: {response.status_code}")
|
|
34
|
+
return []
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
async def test_framework_initialization():
|
|
38
|
+
"""测试框架初始化"""
|
|
39
|
+
print("测试框架初始化...")
|
|
40
|
+
|
|
41
|
+
from crawlo.initialization import initialize_framework
|
|
42
|
+
|
|
43
|
+
# 测试默认配置
|
|
44
|
+
settings = initialize_framework()
|
|
45
|
+
print(f"默认配置 - RUN_MODE: {settings.get('RUN_MODE')}")
|
|
46
|
+
print(f"默认配置 - QUEUE_TYPE: {settings.get('QUEUE_TYPE')}")
|
|
47
|
+
|
|
48
|
+
# 测试自定义配置
|
|
49
|
+
custom_settings = {
|
|
50
|
+
'PROJECT_NAME': 'framework_test',
|
|
51
|
+
'SCHEDULER_MAX_QUEUE_SIZE': 50
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
settings = initialize_framework(custom_settings)
|
|
55
|
+
print(f"自定义配置 - PROJECT_NAME: {settings.get('PROJECT_NAME')}")
|
|
56
|
+
print(f"自定义配置 - SCHEDULER_MAX_QUEUE_SIZE: {settings.get('SCHEDULER_MAX_QUEUE_SIZE')}")
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
async def test_crawler_execution():
|
|
60
|
+
"""测试爬虫执行"""
|
|
61
|
+
print("测试爬虫执行...")
|
|
62
|
+
|
|
63
|
+
from crawlo.initialization import initialize_framework
|
|
64
|
+
from crawlo.crawler import CrawlerProcess
|
|
65
|
+
|
|
66
|
+
# 初始化框架
|
|
67
|
+
settings = initialize_framework({
|
|
68
|
+
'PROJECT_NAME': 'framework_test'
|
|
69
|
+
})
|
|
70
|
+
|
|
71
|
+
# 创建爬虫进程
|
|
72
|
+
process = CrawlerProcess(settings=settings)
|
|
73
|
+
|
|
74
|
+
# 运行爬虫
|
|
75
|
+
await process.crawl(TestSpider)
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
async def test_queue_system():
|
|
79
|
+
"""测试队列系统"""
|
|
80
|
+
print("测试队列系统...")
|
|
81
|
+
|
|
82
|
+
from crawlo.queue.queue_manager import QueueConfig, QueueManager
|
|
83
|
+
from crawlo import Request
|
|
84
|
+
|
|
85
|
+
# 创建小队列配置进行测试
|
|
86
|
+
queue_config = QueueConfig(
|
|
87
|
+
queue_type='memory',
|
|
88
|
+
max_queue_size=5
|
|
89
|
+
)
|
|
90
|
+
|
|
91
|
+
# 创建队列管理器
|
|
92
|
+
queue_manager = QueueManager(queue_config)
|
|
93
|
+
await queue_manager.initialize()
|
|
94
|
+
|
|
95
|
+
# 测试添加请求
|
|
96
|
+
print("添加请求到队列...")
|
|
97
|
+
for i in range(3):
|
|
98
|
+
request = Request(f'https://example.com/test{i}')
|
|
99
|
+
await queue_manager.put(request)
|
|
100
|
+
print(f"添加请求 {i}")
|
|
101
|
+
|
|
102
|
+
# 测试获取请求
|
|
103
|
+
print("从队列获取请求...")
|
|
104
|
+
for i in range(3):
|
|
105
|
+
request = await queue_manager.get(timeout=1.0)
|
|
106
|
+
if request:
|
|
107
|
+
print(f"获取请求: {request.url}")
|
|
108
|
+
|
|
109
|
+
# 关闭队列
|
|
110
|
+
await queue_manager.close()
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
async def test_spider_registry():
|
|
114
|
+
"""测试爬虫注册系统"""
|
|
115
|
+
print("测试爬虫注册系统...")
|
|
116
|
+
|
|
117
|
+
from crawlo.spider import get_global_spider_registry, is_spider_registered, get_spider_names
|
|
118
|
+
|
|
119
|
+
# 检查测试爬虫是否已注册
|
|
120
|
+
spider_name = TestSpider.name
|
|
121
|
+
is_registered = is_spider_registered(spider_name)
|
|
122
|
+
print(f"爬虫 '{spider_name}' 是否已注册: {is_registered}")
|
|
123
|
+
|
|
124
|
+
# 获取所有注册的爬虫名称
|
|
125
|
+
spider_names = get_spider_names()
|
|
126
|
+
print(f"所有注册的爬虫: {spider_names}")
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
async def test_logging_system():
|
|
130
|
+
"""测试日志系统"""
|
|
131
|
+
print("测试日志系统...")
|
|
132
|
+
|
|
133
|
+
from crawlo.logging import get_logger, configure_logging
|
|
134
|
+
|
|
135
|
+
# 配置日志系统
|
|
136
|
+
configure_logging({
|
|
137
|
+
'LOG_LEVEL': 'INFO',
|
|
138
|
+
'LOG_FILE': 'logs/test_framework.log'
|
|
139
|
+
})
|
|
140
|
+
|
|
141
|
+
# 获取logger并记录日志
|
|
142
|
+
logger = get_logger('test_framework')
|
|
143
|
+
logger.info("这是测试日志信息")
|
|
144
|
+
logger.warning("这是测试警告信息")
|
|
145
|
+
logger.error("这是测试错误信息")
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
async def test_settings_system():
|
|
149
|
+
"""测试配置系统"""
|
|
150
|
+
print("测试配置系统...")
|
|
151
|
+
|
|
152
|
+
from crawlo.settings.setting_manager import SettingManager
|
|
153
|
+
|
|
154
|
+
# 创建配置管理器
|
|
155
|
+
settings = SettingManager()
|
|
156
|
+
|
|
157
|
+
# 测试配置项
|
|
158
|
+
settings.set('TEST_KEY', 'test_value')
|
|
159
|
+
test_value = settings.get('TEST_KEY')
|
|
160
|
+
print(f"配置项 TEST_KEY 的值: {test_value}")
|
|
161
|
+
|
|
162
|
+
# 测试不同类型的配置项
|
|
163
|
+
settings.set('TEST_INT', 42)
|
|
164
|
+
test_int = settings.get_int('TEST_INT')
|
|
165
|
+
print(f"配置项 TEST_INT 的值: {test_int}")
|
|
166
|
+
|
|
167
|
+
settings.set('TEST_BOOL', True)
|
|
168
|
+
test_bool = settings.get_bool('TEST_BOOL')
|
|
169
|
+
print(f"配置项 TEST_BOOL 的值: {test_bool}")
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
async def main():
|
|
173
|
+
"""主函数"""
|
|
174
|
+
print("开始综合框架测试...")
|
|
175
|
+
print("=" * 50)
|
|
176
|
+
|
|
177
|
+
try:
|
|
178
|
+
# 1. 测试框架初始化
|
|
179
|
+
await test_framework_initialization()
|
|
180
|
+
print()
|
|
181
|
+
|
|
182
|
+
# 2. 测试配置系统
|
|
183
|
+
await test_settings_system()
|
|
184
|
+
print()
|
|
185
|
+
|
|
186
|
+
# 3. 测试日志系统
|
|
187
|
+
await test_logging_system()
|
|
188
|
+
print()
|
|
189
|
+
|
|
190
|
+
# 4. 测试队列系统
|
|
191
|
+
await test_queue_system()
|
|
192
|
+
print()
|
|
193
|
+
|
|
194
|
+
# 5. 测试爬虫注册系统
|
|
195
|
+
await test_spider_registry()
|
|
196
|
+
print()
|
|
197
|
+
|
|
198
|
+
# 6. 测试爬虫执行
|
|
199
|
+
await test_crawler_execution()
|
|
200
|
+
print()
|
|
201
|
+
|
|
202
|
+
print("=" * 50)
|
|
203
|
+
print("所有测试通过!框架工作正常。")
|
|
204
|
+
|
|
205
|
+
except Exception as e:
|
|
206
|
+
print("=" * 50)
|
|
207
|
+
print(f"测试失败: {e}")
|
|
208
|
+
import traceback
|
|
209
|
+
traceback.print_exc()
|
|
210
|
+
|
|
211
|
+
|
|
212
|
+
if __name__ == "__main__":
|
|
213
|
+
asyncio.run(main())
|
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
综合测试脚本
|
|
5
|
+
测试Crawlo框架的所有优化功能
|
|
6
|
+
"""
|
|
7
|
+
import asyncio
|
|
8
|
+
import sys
|
|
9
|
+
import os
|
|
10
|
+
|
|
11
|
+
# 添加项目根目录到Python路径
|
|
12
|
+
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
|
13
|
+
|
|
14
|
+
from crawlo import Spider, Request
|
|
15
|
+
from crawlo.crawler import CrawlerProcess
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class ComprehensiveSpider(Spider):
|
|
19
|
+
name = 'comprehensive_test'
|
|
20
|
+
|
|
21
|
+
def start_requests(self):
|
|
22
|
+
# 测试多个URL
|
|
23
|
+
urls = [
|
|
24
|
+
'https://www.baidu.com/',
|
|
25
|
+
'https://www.baidu.com/s?wd=python',
|
|
26
|
+
'https://www.baidu.com/s?wd=爬虫',
|
|
27
|
+
'https://www.baidu.com/s?wd=框架',
|
|
28
|
+
'https://www.baidu.com/s?wd=异步',
|
|
29
|
+
]
|
|
30
|
+
|
|
31
|
+
for i, url in enumerate(urls):
|
|
32
|
+
# 设置不同的优先级
|
|
33
|
+
priority = -i # 负数表示优先级,数值越小优先级越高
|
|
34
|
+
yield Request(url, callback=self.parse, priority=priority)
|
|
35
|
+
|
|
36
|
+
def parse(self, response):
|
|
37
|
+
self.logger.info(f"访问URL: {response.url}")
|
|
38
|
+
self.logger.info(f"响应状态码: {response.status_code}")
|
|
39
|
+
self.logger.info(f"页面标题: {response.xpath('//title/text()').get()}")
|
|
40
|
+
|
|
41
|
+
# 提取一些链接用于进一步测试
|
|
42
|
+
links = response.xpath('//a/@href').getall()[:3] # 只取前3个链接
|
|
43
|
+
|
|
44
|
+
# 跟进链接,设置不同的深度
|
|
45
|
+
for link in links:
|
|
46
|
+
if link.startswith('http'):
|
|
47
|
+
# 创建新的请求,增加深度
|
|
48
|
+
meta = response.meta.copy()
|
|
49
|
+
meta['depth'] = meta.get('depth', 0) + 1
|
|
50
|
+
yield Request(link, callback=self.parse_link, meta=meta)
|
|
51
|
+
|
|
52
|
+
def parse_link(self, response):
|
|
53
|
+
self.logger.info(f"跟进链接: {response.url}")
|
|
54
|
+
self.logger.info(f"响应状态码: {response.status_code}")
|
|
55
|
+
self.logger.info(f"页面深度: {response.meta.get('depth', 0)}")
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
async def main():
|
|
59
|
+
# 创建爬虫进程
|
|
60
|
+
process = CrawlerProcess(settings={
|
|
61
|
+
'CONCURRENCY': 4, # 设置并发数
|
|
62
|
+
'DOWNLOAD_DELAY': 0.5, # 设置下载延迟
|
|
63
|
+
'LOG_LEVEL': 'INFO', # 设置日志级别
|
|
64
|
+
'SCHEDULER_MAX_QUEUE_SIZE': 100, # 设置队列最大大小
|
|
65
|
+
})
|
|
66
|
+
|
|
67
|
+
# 运行爬虫
|
|
68
|
+
await process.crawl(ComprehensiveSpider)
|
|
69
|
+
|
|
70
|
+
# 输出统计信息
|
|
71
|
+
if hasattr(process, 'get_metrics'):
|
|
72
|
+
metrics = process.get_metrics()
|
|
73
|
+
print(f"\n=== 爬虫统计信息 ===")
|
|
74
|
+
print(f"总执行时间: {metrics.get('total_duration', 0):.2f}秒")
|
|
75
|
+
print(f"总请求数: {metrics.get('total_requests', 0)}")
|
|
76
|
+
print(f"成功请求数: {metrics.get('total_success', 0)}")
|
|
77
|
+
print(f"错误请求数: {metrics.get('total_errors', 0)}")
|
|
78
|
+
print(f"平均成功率: {metrics.get('average_success_rate', 0):.2f}%")
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
if __name__ == '__main__':
|
|
82
|
+
asyncio.run(main())
|
|
@@ -0,0 +1,187 @@
|
|
|
1
|
+
# 综合测试工作总结报告
|
|
2
|
+
|
|
3
|
+
## 概述
|
|
4
|
+
|
|
5
|
+
本次测试工作旨在为Crawlo框架中未充分测试的功能模块创建完整的测试用例,以提高框架的稳定性和可靠性。通过系统性的分析和测试,我们成功为多个核心组件创建了全面的测试覆盖。
|
|
6
|
+
|
|
7
|
+
## 已完成的测试工作
|
|
8
|
+
|
|
9
|
+
### 1. 工厂模式相关模块测试
|
|
10
|
+
|
|
11
|
+
**测试文件**: `tests/test_factories.py`
|
|
12
|
+
|
|
13
|
+
**覆盖组件**:
|
|
14
|
+
- `ComponentRegistry` - 组件注册表
|
|
15
|
+
- `ComponentFactory` - 组件工厂基类
|
|
16
|
+
- `DefaultComponentFactory` - 默认组件工厂
|
|
17
|
+
- `CrawlerComponentFactory` - Crawler组件工厂
|
|
18
|
+
|
|
19
|
+
**测试内容**:
|
|
20
|
+
- 组件规范的创建和注册
|
|
21
|
+
- 默认组件工厂的单例模式支持
|
|
22
|
+
- 组件注册表的注册和创建功能
|
|
23
|
+
- 全局组件注册表的使用
|
|
24
|
+
- Crawler组件工厂的依赖检查和创建功能
|
|
25
|
+
|
|
26
|
+
**测试结果**: 所有10个测试用例全部通过
|
|
27
|
+
|
|
28
|
+
### 2. 批处理工具测试
|
|
29
|
+
|
|
30
|
+
**测试文件**: `tests/test_batch_processor.py`
|
|
31
|
+
|
|
32
|
+
**覆盖组件**:
|
|
33
|
+
- `BatchProcessor` - 批处理处理器
|
|
34
|
+
- `RedisBatchProcessor` - Redis批处理处理器
|
|
35
|
+
- `batch_process` - 便捷批处理函数
|
|
36
|
+
|
|
37
|
+
**测试内容**:
|
|
38
|
+
- 批处理器的初始化和配置
|
|
39
|
+
- 同步和异步批处理功能
|
|
40
|
+
- 分批处理大量数据的能力
|
|
41
|
+
- 异常处理和错误恢复
|
|
42
|
+
- Redis批处理器的批量设置、获取和删除操作
|
|
43
|
+
- 便捷批处理函数的使用
|
|
44
|
+
|
|
45
|
+
**测试结果**: 所有12个测试用例全部通过
|
|
46
|
+
|
|
47
|
+
### 3. 受控爬虫混入类测试
|
|
48
|
+
|
|
49
|
+
**测试文件**: `tests/test_controlled_spider_mixin.py`
|
|
50
|
+
|
|
51
|
+
**覆盖组件**:
|
|
52
|
+
- `ControlledRequestMixin` - 受控请求生成混入类
|
|
53
|
+
- `AsyncControlledRequestMixin` - 异步受控请求混入类
|
|
54
|
+
|
|
55
|
+
**测试内容**:
|
|
56
|
+
- 混入类的初始化和配置
|
|
57
|
+
- 配置参数的修改和验证
|
|
58
|
+
- 生成统计信息的获取
|
|
59
|
+
|
|
60
|
+
**测试结果**: 所有5个测试用例全部通过
|
|
61
|
+
|
|
62
|
+
### 4. 大规模配置工具测试
|
|
63
|
+
|
|
64
|
+
**测试文件**: `tests/test_large_scale_config.py`
|
|
65
|
+
|
|
66
|
+
**覆盖组件**:
|
|
67
|
+
- `LargeScaleConfig` - 大规模爬虫配置类
|
|
68
|
+
- `apply_large_scale_config` - 应用大规模配置函数
|
|
69
|
+
|
|
70
|
+
**测试内容**:
|
|
71
|
+
- 保守配置、平衡配置、激进配置和内存优化配置的验证
|
|
72
|
+
- 不同并发数下的配置参数计算
|
|
73
|
+
- 配置应用功能的测试
|
|
74
|
+
- 无效配置类型的错误处理
|
|
75
|
+
|
|
76
|
+
**测试结果**: 所有6个测试用例全部通过
|
|
77
|
+
|
|
78
|
+
### 5. 大规模爬虫辅助工具测试
|
|
79
|
+
|
|
80
|
+
**测试文件**: `tests/test_large_scale_helper.py`
|
|
81
|
+
|
|
82
|
+
**覆盖组件**:
|
|
83
|
+
- `LargeScaleHelper` - 大规模爬虫辅助类
|
|
84
|
+
- `ProgressManager` - 进度管理器
|
|
85
|
+
- `MemoryOptimizer` - 内存优化器
|
|
86
|
+
- `DataSourceAdapter` - 数据源适配器
|
|
87
|
+
- `LargeScaleSpiderMixin` - 大规模爬虫混入类
|
|
88
|
+
|
|
89
|
+
**测试内容**:
|
|
90
|
+
- 批次迭代器与不同数据源的兼容性
|
|
91
|
+
- 进度保存和加载功能
|
|
92
|
+
- 内存优化器的基本功能
|
|
93
|
+
- 文件数据源适配器的使用
|
|
94
|
+
- 混入类的初始化和属性验证
|
|
95
|
+
|
|
96
|
+
**测试结果**: 所有13个测试用例全部通过
|
|
97
|
+
|
|
98
|
+
### 6. 增强错误处理工具综合测试
|
|
99
|
+
|
|
100
|
+
**测试文件**: `tests/test_enhanced_error_handler_comprehensive.py`
|
|
101
|
+
|
|
102
|
+
**覆盖组件**:
|
|
103
|
+
- `ErrorContext` - 错误上下文信息
|
|
104
|
+
- `DetailedException` - 详细异常基类
|
|
105
|
+
- `EnhancedErrorHandler` - 增强错误处理器
|
|
106
|
+
- `handle_exception` 装饰器
|
|
107
|
+
|
|
108
|
+
**测试内容**:
|
|
109
|
+
- 错误上下文的创建和字符串表示
|
|
110
|
+
- 详细异常的初始化和信息封装
|
|
111
|
+
- 增强错误处理器的错误处理、安全调用和历史记录功能
|
|
112
|
+
- 异常处理装饰器对同步和异步函数的支持
|
|
113
|
+
- 详细异常与装饰器的集成使用
|
|
114
|
+
|
|
115
|
+
**测试结果**: 所有15个测试用例全部通过
|
|
116
|
+
|
|
117
|
+
## 总体测试统计
|
|
118
|
+
|
|
119
|
+
| 模块 | 测试文件 | 测试用例数 | 通过数 | 通过率 |
|
|
120
|
+
|------|----------|------------|--------|--------|
|
|
121
|
+
| 工厂模式 | test_factories.py | 10 | 10 | 100% |
|
|
122
|
+
| 批处理工具 | test_batch_processor.py | 12 | 12 | 100% |
|
|
123
|
+
| 受控爬虫混入类 | test_controlled_spider_mixin.py | 5 | 5 | 100% |
|
|
124
|
+
| 大规模配置工具 | test_large_scale_config.py | 6 | 6 | 100% |
|
|
125
|
+
| 大规模爬虫辅助工具 | test_large_scale_helper.py | 13 | 13 | 100% |
|
|
126
|
+
| 增强错误处理工具 | test_enhanced_error_handler_comprehensive.py | 15 | 15 | 100% |
|
|
127
|
+
| **总计** | **6个文件** | **61** | **61** | **100%** |
|
|
128
|
+
|
|
129
|
+
## 未完成的测试工作
|
|
130
|
+
|
|
131
|
+
### 性能监控工具测试
|
|
132
|
+
|
|
133
|
+
**模块路径**: `crawlo/utils/performance_monitor.py`
|
|
134
|
+
|
|
135
|
+
**问题**: 依赖psutil模块,当前环境中未安装
|
|
136
|
+
|
|
137
|
+
**建议**: 在安装psutil模块后,运行现有的测试文件 `tests/test_performance_monitor.py` 来验证性能监控工具的功能。
|
|
138
|
+
|
|
139
|
+
## 测试质量评估
|
|
140
|
+
|
|
141
|
+
### 代码覆盖率
|
|
142
|
+
|
|
143
|
+
通过本次测试工作,我们为以下核心组件提供了全面的测试覆盖:
|
|
144
|
+
|
|
145
|
+
1. **工厂模式**: 框架的核心组件创建机制
|
|
146
|
+
2. **批处理工具**: 大规模数据处理的关键组件
|
|
147
|
+
3. **受控爬虫混入类**: 解决大量请求生成时并发控制问题的组件
|
|
148
|
+
4. **大规模配置工具**: 针对大规模爬取场景的配置管理组件
|
|
149
|
+
5. **大规模爬虫辅助工具**: 处理大规模爬取场景的辅助组件
|
|
150
|
+
6. **增强错误处理工具**: 系统稳定性的关键组件
|
|
151
|
+
|
|
152
|
+
### 测试类型覆盖
|
|
153
|
+
|
|
154
|
+
我们的测试涵盖了以下多个方面:
|
|
155
|
+
|
|
156
|
+
1. **单元测试**: 针对每个类的方法进行独立测试
|
|
157
|
+
2. **集成测试**: 测试模块间的协作
|
|
158
|
+
3. **异常处理测试**: 验证边界条件和异常情况
|
|
159
|
+
4. **配置测试**: 测试不同配置参数的有效性
|
|
160
|
+
5. **mock测试**: 使用mock对象模拟外部依赖
|
|
161
|
+
|
|
162
|
+
## 建议和后续工作
|
|
163
|
+
|
|
164
|
+
### 1. 环境完善
|
|
165
|
+
|
|
166
|
+
建议安装psutil模块以完成性能监控工具的测试:
|
|
167
|
+
```bash
|
|
168
|
+
pip install psutil
|
|
169
|
+
```
|
|
170
|
+
|
|
171
|
+
### 2. 持续集成
|
|
172
|
+
|
|
173
|
+
建议将这些测试用例集成到持续集成(CI)流程中,确保每次代码变更都能自动运行测试。
|
|
174
|
+
|
|
175
|
+
### 3. 性能测试
|
|
176
|
+
|
|
177
|
+
建议为批处理工具和大规模处理工具添加性能测试,验证其在实际使用中的性能表现。
|
|
178
|
+
|
|
179
|
+
### 4. 集成测试扩展
|
|
180
|
+
|
|
181
|
+
建议创建更多的集成测试,验证这些组件在真实爬虫场景中的协同工作能力。
|
|
182
|
+
|
|
183
|
+
## 结论
|
|
184
|
+
|
|
185
|
+
通过本次系统性的测试工作,我们成功为Crawlo框架的多个核心组件创建了全面的测试覆盖,显著提高了框架的稳定性和可靠性。这些测试用例不仅验证了现有功能的正确性,也为未来的功能扩展和重构提供了安全保障。
|
|
186
|
+
|
|
187
|
+
建议团队继续维护和完善这些测试用例,确保框架的长期健康发展。
|
tests/debug_configure.py
ADDED
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
# -*- coding: UTF-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
逐步调试LoggerManager.configure
|
|
5
|
+
"""
|
|
6
|
+
import sys
|
|
7
|
+
import os
|
|
8
|
+
sys.path.insert(0, '/')
|
|
9
|
+
|
|
10
|
+
from crawlo.utils.log import LoggerManager
|
|
11
|
+
|
|
12
|
+
print("=== 逐步调试LoggerManager.configure ===")
|
|
13
|
+
|
|
14
|
+
print("1. 检查初始状态")
|
|
15
|
+
print(f" _early_initialized: {LoggerManager._early_initialized}")
|
|
16
|
+
print(f" _configured: {LoggerManager._configured}")
|
|
17
|
+
|
|
18
|
+
print("2. 调用early_initialize")
|
|
19
|
+
LoggerManager.early_initialize()
|
|
20
|
+
print(f" _early_initialized: {LoggerManager._early_initialized}")
|
|
21
|
+
|
|
22
|
+
print("3. 进入configure - 开始")
|
|
23
|
+
# 模拟configure方法的各个步骤
|
|
24
|
+
with LoggerManager._config_lock:
|
|
25
|
+
print(" 获得锁")
|
|
26
|
+
|
|
27
|
+
if LoggerManager._configured:
|
|
28
|
+
print(" 已配置,直接返回")
|
|
29
|
+
else:
|
|
30
|
+
print(" 开始配置...")
|
|
31
|
+
|
|
32
|
+
# 更新状态
|
|
33
|
+
print(" 设置_log_state")
|
|
34
|
+
from crawlo.utils.log import _log_state
|
|
35
|
+
_log_state['current_step'] = 'basic_setup'
|
|
36
|
+
|
|
37
|
+
print(" 处理参数")
|
|
38
|
+
kwargs = {'LOG_LEVEL': 'INFO', 'LOG_FILE': 'test.log'}
|
|
39
|
+
get_val = lambda k, d=None: kwargs.get(k, d)
|
|
40
|
+
|
|
41
|
+
filename = get_val('LOG_FILE')
|
|
42
|
+
level = get_val('LOG_LEVEL', None)
|
|
43
|
+
if level is None:
|
|
44
|
+
level = 'INFO'
|
|
45
|
+
|
|
46
|
+
print(f" filename: {filename}")
|
|
47
|
+
print(f" level: {level}")
|
|
48
|
+
|
|
49
|
+
print(" 设置默认值")
|
|
50
|
+
LoggerManager._default_filename = filename
|
|
51
|
+
LoggerManager._default_level = LoggerManager._to_level(level)
|
|
52
|
+
LoggerManager._default_file_level = LoggerManager._to_level(level)
|
|
53
|
+
LoggerManager._default_console_level = LoggerManager._default_level
|
|
54
|
+
|
|
55
|
+
print(" 清空缓存")
|
|
56
|
+
LoggerManager.logger_cache.clear()
|
|
57
|
+
|
|
58
|
+
print(" 设置已配置状态")
|
|
59
|
+
LoggerManager._configured = True
|
|
60
|
+
_log_state['current_step'] = 'full_config'
|
|
61
|
+
|
|
62
|
+
print(" 配置完成")
|
|
63
|
+
|
|
64
|
+
print("4. 测试创建logger")
|
|
65
|
+
from crawlo.utils.log import get_logger
|
|
66
|
+
logger = get_logger('test')
|
|
67
|
+
print(f" Logger: {logger}")
|
|
68
|
+
print(f" Handlers: {len(logger.handlers)}")
|
|
69
|
+
|
|
70
|
+
print("=== 调试完成 ===")
|
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
# -*- coding: UTF-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
调试框架logger配置
|
|
5
|
+
"""
|
|
6
|
+
import sys
|
|
7
|
+
import os
|
|
8
|
+
sys.path.insert(0, '/')
|
|
9
|
+
|
|
10
|
+
from crawlo.core.framework_initializer import initialize_framework, get_framework_initializer
|
|
11
|
+
from crawlo.utils.log import get_logger, LoggerManager
|
|
12
|
+
import logging
|
|
13
|
+
|
|
14
|
+
def debug_framework_logger():
|
|
15
|
+
print("=== 调试框架logger配置 ===")
|
|
16
|
+
|
|
17
|
+
# 1. 初始化框架,模拟ofweek_standalone的配置
|
|
18
|
+
print("1. 初始化框架...")
|
|
19
|
+
custom_settings = {
|
|
20
|
+
'LOG_LEVEL': 'INFO',
|
|
21
|
+
'LOG_FILE': 'logs/debug_framework.log',
|
|
22
|
+
'PROJECT_NAME': 'debug_test',
|
|
23
|
+
'RUN_MODE': 'standalone'
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
# 确保日志目录存在
|
|
27
|
+
os.makedirs('../logs', exist_ok=True)
|
|
28
|
+
|
|
29
|
+
settings = initialize_framework(custom_settings)
|
|
30
|
+
print(f" LOG_LEVEL: {settings.get('LOG_LEVEL')}")
|
|
31
|
+
print(f" LOG_FILE: {settings.get('LOG_FILE')}")
|
|
32
|
+
|
|
33
|
+
# 2. 获取框架初始化管理器
|
|
34
|
+
init_manager = get_framework_initializer()
|
|
35
|
+
print(f" 框架是否就绪: {init_manager.is_ready}")
|
|
36
|
+
print(f" 初始化阶段: {init_manager.phase}")
|
|
37
|
+
|
|
38
|
+
# 3. 测试框架logger
|
|
39
|
+
framework_logger = init_manager.logger
|
|
40
|
+
print(f" 框架logger: {framework_logger}")
|
|
41
|
+
if framework_logger:
|
|
42
|
+
print(f" 名称: {framework_logger.name}")
|
|
43
|
+
print(f" 级别: {framework_logger.level} ({logging.getLevelName(framework_logger.level)})")
|
|
44
|
+
print(f" 处理器数量: {len(framework_logger.handlers)}")
|
|
45
|
+
|
|
46
|
+
for i, handler in enumerate(framework_logger.handlers):
|
|
47
|
+
handler_type = type(handler).__name__
|
|
48
|
+
handler_level = handler.level
|
|
49
|
+
print(f" 处理器{i}: {handler_type}, 级别: {handler_level} ({logging.getLevelName(handler_level)})")
|
|
50
|
+
if hasattr(handler, 'baseFilename'):
|
|
51
|
+
print(f" 文件: {handler.baseFilename}")
|
|
52
|
+
|
|
53
|
+
# 4. 手动创建一个crawlo.framework logger对比
|
|
54
|
+
manual_logger = get_logger('crawlo.framework')
|
|
55
|
+
print(f" 手动创建的logger: {manual_logger}")
|
|
56
|
+
if manual_logger:
|
|
57
|
+
print(f" 名称: {manual_logger.name}")
|
|
58
|
+
print(f" 级别: {manual_logger.level} ({logging.getLevelName(manual_logger.level)})")
|
|
59
|
+
print(f" 处理器数量: {len(manual_logger.handlers)}")
|
|
60
|
+
|
|
61
|
+
for i, handler in enumerate(manual_logger.handlers):
|
|
62
|
+
handler_type = type(handler).__name__
|
|
63
|
+
handler_level = handler.level
|
|
64
|
+
print(f" 处理器{i}: {handler_type}, 级别: {handler_level} ({logging.getLevelName(handler_level)})")
|
|
65
|
+
if hasattr(handler, 'baseFilename'):
|
|
66
|
+
print(f" 文件: {handler.baseFilename}")
|
|
67
|
+
|
|
68
|
+
# 5. 测试日志输出
|
|
69
|
+
print("2. 测试日志输出...")
|
|
70
|
+
|
|
71
|
+
if framework_logger:
|
|
72
|
+
framework_logger.info("这是框架logger测试消息 - INFO级别")
|
|
73
|
+
framework_logger.debug("这是框架logger测试消息 - DEBUG级别")
|
|
74
|
+
|
|
75
|
+
if manual_logger:
|
|
76
|
+
manual_logger.info("这是手动logger测试消息 - INFO级别")
|
|
77
|
+
manual_logger.debug("这是手动logger测试消息 - DEBUG级别")
|
|
78
|
+
|
|
79
|
+
# 6. 检查是否同一个实例
|
|
80
|
+
print(f"3. 是否同一实例: {framework_logger is manual_logger}")
|
|
81
|
+
|
|
82
|
+
print("=== 调试完成 ===")
|
|
83
|
+
|
|
84
|
+
if __name__ == "__main__":
|
|
85
|
+
debug_framework_logger()
|