crawlo 1.3.2__py3-none-any.whl → 1.3.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (105) hide show
  1. crawlo/__init__.py +24 -0
  2. crawlo/__version__.py +1 -1
  3. crawlo/commands/run.py +58 -32
  4. crawlo/core/__init__.py +44 -0
  5. crawlo/core/engine.py +119 -45
  6. crawlo/core/scheduler.py +4 -3
  7. crawlo/crawler.py +603 -1133
  8. crawlo/downloader/aiohttp_downloader.py +4 -2
  9. crawlo/extension/__init__.py +1 -1
  10. crawlo/extension/logging_extension.py +23 -7
  11. crawlo/factories/__init__.py +28 -0
  12. crawlo/factories/base.py +69 -0
  13. crawlo/factories/crawler.py +104 -0
  14. crawlo/factories/registry.py +85 -0
  15. crawlo/filters/aioredis_filter.py +25 -2
  16. crawlo/framework.py +292 -0
  17. crawlo/initialization/__init__.py +40 -0
  18. crawlo/initialization/built_in.py +426 -0
  19. crawlo/initialization/context.py +142 -0
  20. crawlo/initialization/core.py +194 -0
  21. crawlo/initialization/phases.py +149 -0
  22. crawlo/initialization/registry.py +146 -0
  23. crawlo/items/base.py +2 -1
  24. crawlo/logging/__init__.py +38 -0
  25. crawlo/logging/config.py +97 -0
  26. crawlo/logging/factory.py +129 -0
  27. crawlo/logging/manager.py +112 -0
  28. crawlo/middleware/middleware_manager.py +1 -1
  29. crawlo/middleware/offsite.py +1 -1
  30. crawlo/mode_manager.py +26 -1
  31. crawlo/pipelines/pipeline_manager.py +2 -1
  32. crawlo/project.py +76 -46
  33. crawlo/queue/pqueue.py +11 -5
  34. crawlo/queue/queue_manager.py +143 -19
  35. crawlo/queue/redis_priority_queue.py +69 -49
  36. crawlo/settings/default_settings.py +110 -14
  37. crawlo/settings/setting_manager.py +29 -13
  38. crawlo/spider/__init__.py +34 -16
  39. crawlo/stats_collector.py +17 -3
  40. crawlo/task_manager.py +112 -3
  41. crawlo/templates/project/settings.py.tmpl +103 -202
  42. crawlo/templates/project/settings_distributed.py.tmpl +122 -135
  43. crawlo/templates/project/settings_gentle.py.tmpl +149 -43
  44. crawlo/templates/project/settings_high_performance.py.tmpl +127 -90
  45. crawlo/templates/project/settings_minimal.py.tmpl +46 -15
  46. crawlo/templates/project/settings_simple.py.tmpl +138 -75
  47. crawlo/templates/project/spiders/__init__.py.tmpl +5 -1
  48. crawlo/templates/run.py.tmpl +10 -14
  49. crawlo/templates/spiders_init.py.tmpl +10 -0
  50. crawlo/tools/network_diagnostic.py +365 -0
  51. crawlo/utils/class_loader.py +26 -0
  52. crawlo/utils/error_handler.py +76 -35
  53. crawlo/utils/log.py +41 -144
  54. crawlo/utils/redis_connection_pool.py +43 -6
  55. crawlo/utils/request_serializer.py +8 -1
  56. {crawlo-1.3.2.dist-info → crawlo-1.3.4.dist-info}/METADATA +120 -14
  57. {crawlo-1.3.2.dist-info → crawlo-1.3.4.dist-info}/RECORD +104 -45
  58. tests/authenticated_proxy_example.py +2 -2
  59. tests/baidu_performance_test.py +109 -0
  60. tests/baidu_test.py +60 -0
  61. tests/comprehensive_framework_test.py +213 -0
  62. tests/comprehensive_test.py +82 -0
  63. tests/comprehensive_testing_summary.md +187 -0
  64. tests/debug_configure.py +70 -0
  65. tests/debug_framework_logger.py +85 -0
  66. tests/debug_log_levels.py +64 -0
  67. tests/distributed_test.py +67 -0
  68. tests/distributed_test_debug.py +77 -0
  69. tests/final_command_test_report.md +0 -0
  70. tests/final_comprehensive_test.py +152 -0
  71. tests/final_validation_test.py +183 -0
  72. tests/framework_performance_test.py +203 -0
  73. tests/optimized_performance_test.py +212 -0
  74. tests/performance_comparison.py +246 -0
  75. tests/queue_blocking_test.py +114 -0
  76. tests/queue_test.py +90 -0
  77. tests/scrapy_comparison/ofweek_scrapy.py +139 -0
  78. tests/scrapy_comparison/scrapy_test.py +134 -0
  79. tests/simple_command_test.py +120 -0
  80. tests/simple_crawlo_test.py +128 -0
  81. tests/simple_log_test.py +58 -0
  82. tests/simple_optimization_test.py +129 -0
  83. tests/simple_spider_test.py +50 -0
  84. tests/simple_test.py +48 -0
  85. tests/test_all_commands.py +231 -0
  86. tests/test_batch_processor.py +179 -0
  87. tests/test_component_factory.py +175 -0
  88. tests/test_controlled_spider_mixin.py +80 -0
  89. tests/test_enhanced_error_handler_comprehensive.py +246 -0
  90. tests/test_factories.py +253 -0
  91. tests/test_framework_logger.py +67 -0
  92. tests/test_framework_startup.py +65 -0
  93. tests/test_large_scale_config.py +113 -0
  94. tests/test_large_scale_helper.py +236 -0
  95. tests/test_mode_change.py +73 -0
  96. tests/test_mode_consistency.py +1 -1
  97. tests/test_performance_monitor.py +116 -0
  98. tests/test_queue_empty_check.py +42 -0
  99. tests/untested_features_report.md +139 -0
  100. tests/verify_debug.py +52 -0
  101. tests/verify_log_fix.py +112 -0
  102. tests/DOUBLE_CRAWLO_PREFIX_FIX_REPORT.md +0 -82
  103. {crawlo-1.3.2.dist-info → crawlo-1.3.4.dist-info}/WHEEL +0 -0
  104. {crawlo-1.3.2.dist-info → crawlo-1.3.4.dist-info}/entry_points.txt +0 -0
  105. {crawlo-1.3.2.dist-info → crawlo-1.3.4.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,64 @@
1
+ #!/usr/bin/env python
2
+ # -*- coding: UTF-8 -*-
3
+ """
4
+ 调试日志级别配置脚本
5
+ """
6
+ import sys
7
+ import os
8
+ sys.path.insert(0, '/')
9
+
10
+ from crawlo.core.framework_initializer import initialize_framework
11
+ from crawlo.utils.log import LoggerManager, get_logger
12
+ import logging
13
+
14
+ def main():
15
+ print("=== 开始调试日志级别配置 ===")
16
+
17
+ # 初始化框架
18
+ print("1. 初始化框架...")
19
+ settings = initialize_framework()
20
+
21
+ # 打印配置信息
22
+ print(f"2. 配置信息:")
23
+ print(f" LOG_LEVEL: {settings.get('LOG_LEVEL')}")
24
+ print(f" LOG_FILE: {settings.get('LOG_FILE')}")
25
+ print(f" LoggerManager._default_level: {LoggerManager._default_level}")
26
+ print(f" LoggerManager._default_console_level: {LoggerManager._default_console_level}")
27
+ print(f" LoggerManager._default_file_level: {LoggerManager._default_file_level}")
28
+
29
+ # 测试不同组件的日志级别
30
+ components = [
31
+ 'crawlo.framework',
32
+ 'crawlo.crawler',
33
+ 'QueueManager',
34
+ 'Scheduler',
35
+ 'AioHttpDownloader',
36
+ 'MiddlewareManager',
37
+ 'PipelineManager',
38
+ 'ExtensionManager',
39
+ 'of_week_standalone'
40
+ ]
41
+
42
+ print("3. 组件日志级别测试:")
43
+ for component_name in components:
44
+ logger = get_logger(component_name)
45
+ print(f" {component_name}:")
46
+ print(f" logger.level: {logger.level} ({logging.getLevelName(logger.level)})")
47
+
48
+ for handler in logger.handlers:
49
+ handler_type = type(handler).__name__
50
+ print(f" {handler_type}.level: {handler.level} ({logging.getLevelName(handler.level)})")
51
+
52
+ # 实际测试日志输出
53
+ print("4. 测试日志输出:")
54
+ test_logger = get_logger('TestLogger')
55
+
56
+ print(" 控制台应该看到以下日志:")
57
+ test_logger.debug("这是DEBUG级别日志 - 控制台应该看不到")
58
+ test_logger.info("这是INFO级别日志 - 控制台应该能看到")
59
+ test_logger.warning("这是WARNING级别日志 - 控制台应该能看到")
60
+
61
+ print("=== 调试完成 ===")
62
+
63
+ if __name__ == '__main__':
64
+ main()
@@ -0,0 +1,67 @@
1
+ #!/usr/bin/env python
2
+ # -*- coding: utf-8 -*-
3
+ """
4
+ 分布式模式测试脚本
5
+ """
6
+
7
+ import sys
8
+ import os
9
+ import asyncio
10
+ sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
11
+
12
+ from crawlo.spider import Spider
13
+ from crawlo import Request
14
+
15
+
16
+ class DistributedTestSpider(Spider):
17
+ """分布式测试爬虫"""
18
+ name = 'distributed_test_spider'
19
+
20
+ def start_requests(self):
21
+ """发起测试请求"""
22
+ # 生成一些测试请求
23
+ for i in range(5):
24
+ yield Request(f'https://httpbin.org/get?page={i}', callback=self.parse)
25
+
26
+ def parse(self, response):
27
+ """解析响应"""
28
+ print(f"成功获取响应: {response.url}")
29
+ print(f"状态码: {response.status_code}")
30
+ return []
31
+
32
+
33
+ async def test_distributed_mode():
34
+ """测试分布式模式"""
35
+ print("开始测试分布式模式...")
36
+
37
+ # 初始化框架,设置为分布式模式
38
+ from crawlo.initialization import initialize_framework
39
+ custom_settings = {
40
+ 'RUN_MODE': 'distributed',
41
+ 'QUEUE_TYPE': 'redis',
42
+ 'FILTER_CLASS': 'crawlo.filters.aioredis_filter.AioRedisFilter',
43
+ 'REDIS_HOST': '127.0.0.1',
44
+ 'REDIS_PORT': 6379,
45
+ 'REDIS_DB': 15, # 使用测试数据库
46
+ 'PROJECT_NAME': 'distributed_test'
47
+ }
48
+ settings = initialize_framework(custom_settings)
49
+
50
+ # 创建爬虫进程
51
+ from crawlo.crawler import CrawlerProcess
52
+ process = CrawlerProcess(settings=settings)
53
+
54
+ # 运行爬虫
55
+ await process.crawl(DistributedTestSpider)
56
+
57
+ print("分布式模式测试完成!")
58
+
59
+
60
+ def main():
61
+ """主函数"""
62
+ print("开始分布式模式测试...")
63
+ asyncio.run(test_distributed_mode())
64
+
65
+
66
+ if __name__ == "__main__":
67
+ main()
@@ -0,0 +1,77 @@
1
+ #!/usr/bin/env python
2
+ # -*- coding: utf-8 -*-
3
+ """
4
+ 分布式模式调试测试脚本
5
+ """
6
+
7
+ import sys
8
+ import os
9
+ import asyncio
10
+ sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
11
+
12
+ from crawlo.spider import Spider
13
+ from crawlo import Request
14
+
15
+
16
+ class DistributedTestSpider(Spider):
17
+ """分布式测试爬虫"""
18
+ name = 'distributed_test_spider'
19
+
20
+ def start_requests(self):
21
+ """发起测试请求"""
22
+ # 生成一些测试请求
23
+ for i in range(3):
24
+ yield Request(f'https://httpbin.org/get?page={i}', callback=self.parse)
25
+
26
+ def parse(self, response):
27
+ """解析响应"""
28
+ print(f"成功获取响应: {response.url}")
29
+ print(f"状态码: {response.status_code}")
30
+ return []
31
+
32
+
33
+ async def test_distributed_mode():
34
+ """测试分布式模式"""
35
+ print("开始测试分布式模式...")
36
+
37
+ # 初始化框架,设置为分布式模式
38
+ from crawlo.initialization import initialize_framework
39
+ custom_settings = {
40
+ 'RUN_MODE': 'distributed',
41
+ 'QUEUE_TYPE': 'redis',
42
+ 'FILTER_CLASS': 'crawlo.filters.aioredis_filter.AioRedisFilter',
43
+ 'REDIS_HOST': '127.0.0.1',
44
+ 'REDIS_PORT': 6379,
45
+ 'REDIS_DB': 15, # 使用测试数据库
46
+ 'PROJECT_NAME': 'distributed_test'
47
+ }
48
+
49
+ print("自定义配置:")
50
+ for key, value in custom_settings.items():
51
+ print(f" {key}: {value}")
52
+
53
+ settings = initialize_framework(custom_settings)
54
+
55
+ print("初始化后的配置:")
56
+ print(f" RUN_MODE: {settings.get('RUN_MODE')}")
57
+ print(f" QUEUE_TYPE: {settings.get('QUEUE_TYPE')}")
58
+ print(f" FILTER_CLASS: {settings.get('FILTER_CLASS')}")
59
+
60
+ # 创建爬虫进程
61
+ from crawlo.crawler import CrawlerProcess
62
+ process = CrawlerProcess(settings=settings)
63
+
64
+ # 运行爬虫
65
+ await process.crawl(DistributedTestSpider)
66
+
67
+ print("分布式模式测试完成!")
68
+
69
+
70
+ def main():
71
+ """主函数"""
72
+ print("开始分布式模式调试测试...")
73
+ asyncio.run(test_distributed_mode())
74
+
75
+
76
+ if __name__ == "__main__":
77
+ main()
File without changes
@@ -0,0 +1,152 @@
1
+ #!/usr/bin/env python
2
+ # -*- coding: utf-8 -*-
3
+ """
4
+ 最终综合测试脚本
5
+ 全面测试框架的所有核心功能,特别是我们修改的部分
6
+ """
7
+
8
+ import sys
9
+ import os
10
+ import asyncio
11
+ import time
12
+ sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
13
+
14
+ from crawlo.spider import Spider
15
+ from crawlo import Request
16
+
17
+
18
+ class TestSpider(Spider):
19
+ """测试爬虫"""
20
+ name = 'final_test_spider'
21
+
22
+ def start_requests(self):
23
+ """发起测试请求"""
24
+ # 生成一些测试请求
25
+ for i in range(5):
26
+ yield Request(f'https://httpbin.org/get?page={i}', callback=self.parse)
27
+
28
+ def parse(self, response):
29
+ """解析响应"""
30
+ print(f"成功获取响应: {response.url}")
31
+ print(f"状态码: {response.status_code}")
32
+ return []
33
+
34
+
35
+ async def test_queue_blocking_behavior():
36
+ """测试队列阻塞行为"""
37
+ print("测试队列阻塞行为...")
38
+
39
+ from crawlo.queue.queue_manager import QueueConfig, QueueManager
40
+
41
+ # 创建小队列配置
42
+ queue_config = QueueConfig(
43
+ queue_type='memory',
44
+ max_queue_size=3 # 非常小的队列
45
+ )
46
+
47
+ # 创建队列管理器
48
+ queue_manager = QueueManager(queue_config)
49
+ await queue_manager.initialize()
50
+
51
+ # 测试添加超过队列大小的请求
52
+ print("添加6个请求到大小为3的队列...")
53
+ start_time = time.time()
54
+
55
+ # 创建生产者任务
56
+ async def producer():
57
+ for i in range(6):
58
+ request = Request(f'https://example.com/test{i}')
59
+ await queue_manager.put(request)
60
+ print(f"添加请求 {i}")
61
+
62
+ # 创建消费者任务
63
+ async def consumer():
64
+ retrieved = 0
65
+ while retrieved < 6:
66
+ request = await queue_manager.get(timeout=2.0)
67
+ if request:
68
+ print(f"获取请求: {request.url}")
69
+ retrieved += 1
70
+ await asyncio.sleep(0.1) # 模拟处理时间
71
+
72
+ # 并发运行生产者和消费者
73
+ await asyncio.gather(producer(), consumer())
74
+
75
+ end_time = time.time()
76
+ print(f"队列测试完成,耗时 {end_time - start_time:.2f} 秒")
77
+
78
+ # 关闭队列
79
+ await queue_manager.close()
80
+
81
+
82
+ async def test_framework_initialization():
83
+ """测试框架初始化"""
84
+ print("测试框架初始化...")
85
+
86
+ from crawlo.initialization import initialize_framework
87
+
88
+ # 测试默认配置
89
+ settings = initialize_framework()
90
+ print(f"默认配置 - RUN_MODE: {settings.get('RUN_MODE')}")
91
+ print(f"默认配置 - QUEUE_TYPE: {settings.get('QUEUE_TYPE')}")
92
+
93
+ # 测试自定义配置
94
+ custom_settings = {
95
+ 'PROJECT_NAME': 'final_test',
96
+ 'SCHEDULER_MAX_QUEUE_SIZE': 100
97
+ }
98
+
99
+ settings = initialize_framework(custom_settings)
100
+ print(f"自定义配置 - PROJECT_NAME: {settings.get('PROJECT_NAME')}")
101
+ print(f"自定义配置 - SCHEDULER_MAX_QUEUE_SIZE: {settings.get('SCHEDULER_MAX_QUEUE_SIZE')}")
102
+
103
+
104
+ async def test_crawler_execution():
105
+ """测试爬虫执行"""
106
+ print("测试爬虫执行...")
107
+
108
+ from crawlo.initialization import initialize_framework
109
+ from crawlo.crawler import CrawlerProcess
110
+
111
+ # 初始化框架
112
+ settings = initialize_framework({
113
+ 'PROJECT_NAME': 'final_test'
114
+ })
115
+
116
+ # 创建爬虫进程
117
+ process = CrawlerProcess(settings=settings)
118
+
119
+ # 运行爬虫
120
+ await process.crawl(TestSpider)
121
+
122
+
123
+ async def main():
124
+ """主函数"""
125
+ print("开始最终综合测试...")
126
+ print("=" * 50)
127
+
128
+ try:
129
+ # 1. 测试框架初始化
130
+ await test_framework_initialization()
131
+ print()
132
+
133
+ # 2. 测试队列阻塞行为
134
+ await test_queue_blocking_behavior()
135
+ print()
136
+
137
+ # 3. 测试爬虫执行
138
+ await test_crawler_execution()
139
+ print()
140
+
141
+ print("=" * 50)
142
+ print("所有测试通过!框架工作正常。")
143
+
144
+ except Exception as e:
145
+ print("=" * 50)
146
+ print(f"测试失败: {e}")
147
+ import traceback
148
+ traceback.print_exc()
149
+
150
+
151
+ if __name__ == "__main__":
152
+ asyncio.run(main())
@@ -0,0 +1,183 @@
1
+ #!/usr/bin/env python3
2
+ # -*- coding: utf-8 -*-
3
+ """
4
+ 最终验证测试脚本
5
+ 验证所有修改的功能是否正常工作
6
+ """
7
+
8
+ import sys
9
+ import os
10
+ import asyncio
11
+ import time
12
+
13
+ # 添加项目根目录到Python路径
14
+ sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
15
+
16
+ from crawlo.spider import Spider
17
+ from crawlo import Request
18
+
19
+
20
+ class ValidationTestSpider(Spider):
21
+ """验证测试爬虫"""
22
+ name = 'validation_test_spider'
23
+
24
+ def start_requests(self):
25
+ """发起测试请求"""
26
+ # 生成一些测试请求
27
+ for i in range(3):
28
+ yield Request(f'https://httpbin.org/get?page={i}', callback=self.parse)
29
+
30
+ def parse(self, response):
31
+ """解析响应"""
32
+ print(f"成功获取响应: {response.url}")
33
+ print(f"状态码: {response.status_code}")
34
+ return []
35
+
36
+
37
+ async def test_framework_startup_logging():
38
+ """测试框架启动日志"""
39
+ print("测试框架启动日志...")
40
+
41
+ from crawlo.initialization import initialize_framework
42
+ from crawlo.logging import get_logger
43
+
44
+ # 初始化框架
45
+ settings = initialize_framework({
46
+ 'PROJECT_NAME': 'validation_test'
47
+ })
48
+
49
+ # 获取框架logger并检查启动日志
50
+ logger = get_logger('crawlo.framework')
51
+ print("框架启动日志已记录")
52
+
53
+
54
+ async def test_queue_blocking_behavior():
55
+ """测试队列阻塞行为"""
56
+ print("测试队列阻塞行为...")
57
+
58
+ from crawlo.queue.queue_manager import QueueConfig, QueueManager
59
+ from crawlo import Request
60
+
61
+ # 创建小队列配置进行测试
62
+ queue_config = QueueConfig(
63
+ queue_type='memory',
64
+ max_queue_size=3 # 非常小的队列
65
+ )
66
+
67
+ # 创建队列管理器
68
+ queue_manager = QueueManager(queue_config)
69
+ await queue_manager.initialize()
70
+
71
+ # 测试添加超过队列大小的请求
72
+ print("添加5个请求到大小为3的队列...")
73
+ start_time = time.time()
74
+
75
+ # 创建生产者任务
76
+ async def producer():
77
+ for i in range(5):
78
+ request = Request(f'https://example.com/test{i}')
79
+ await queue_manager.put(request)
80
+ print(f"添加请求 {i}")
81
+
82
+ # 创建消费者任务
83
+ async def consumer():
84
+ retrieved = 0
85
+ while retrieved < 5:
86
+ request = await queue_manager.get(timeout=2.0)
87
+ if request:
88
+ print(f"获取请求: {request.url}")
89
+ retrieved += 1
90
+ await asyncio.sleep(0.1) # 模拟处理时间
91
+
92
+ # 并发运行生产者和消费者
93
+ await asyncio.gather(producer(), consumer())
94
+
95
+ end_time = time.time()
96
+ print(f"队列测试完成,耗时 {end_time - start_time:.2f} 秒")
97
+
98
+ # 关闭队列
99
+ await queue_manager.close()
100
+
101
+
102
+ async def test_stats_output():
103
+ """测试统计信息输出"""
104
+ print("测试统计信息输出...")
105
+
106
+ from crawlo.initialization import initialize_framework
107
+ from crawlo.crawler import CrawlerProcess
108
+
109
+ # 初始化框架
110
+ settings = initialize_framework({
111
+ 'PROJECT_NAME': 'stats_test'
112
+ })
113
+
114
+ # 创建爬虫进程
115
+ process = CrawlerProcess(settings=settings)
116
+
117
+ # 运行爬虫
118
+ await process.crawl(ValidationTestSpider)
119
+
120
+ print("统计信息输出测试完成")
121
+
122
+
123
+ async def test_spider_auto_import():
124
+ """测试爬虫自动导入"""
125
+ print("测试爬虫自动导入...")
126
+
127
+ from crawlo.initialization import initialize_framework
128
+ from crawlo.crawler import CrawlerProcess
129
+
130
+ # 初始化框架
131
+ settings = initialize_framework({
132
+ 'PROJECT_NAME': 'auto_import_test'
133
+ })
134
+
135
+ # 创建爬虫进程,指定spider_modules
136
+ spider_modules = ['crawlo.spider'] # 使用框架的spider模块进行测试
137
+ process = CrawlerProcess(settings=settings, spider_modules=spider_modules)
138
+
139
+ # 检查爬虫是否已注册
140
+ spider_name = ValidationTestSpider.name
141
+ is_registered = process.is_spider_registered(spider_name)
142
+ print(f"爬虫 '{spider_name}' 是否已注册: {is_registered}")
143
+
144
+ if is_registered:
145
+ print("爬虫自动导入功能正常")
146
+ else:
147
+ print("爬虫自动导入功能异常")
148
+
149
+
150
+ async def main():
151
+ """主函数"""
152
+ print("开始最终验证测试...")
153
+ print("=" * 50)
154
+
155
+ try:
156
+ # 1. 测试框架启动日志
157
+ await test_framework_startup_logging()
158
+ print()
159
+
160
+ # 2. 测试队列阻塞行为
161
+ await test_queue_blocking_behavior()
162
+ print()
163
+
164
+ # 3. 测试统计信息输出
165
+ await test_stats_output()
166
+ print()
167
+
168
+ # 4. 测试爬虫自动导入
169
+ await test_spider_auto_import()
170
+ print()
171
+
172
+ print("=" * 50)
173
+ print("所有验证测试通过!框架修改的功能工作正常。")
174
+
175
+ except Exception as e:
176
+ print("=" * 50)
177
+ print(f"验证测试失败: {e}")
178
+ import traceback
179
+ traceback.print_exc()
180
+
181
+
182
+ if __name__ == "__main__":
183
+ asyncio.run(main())