crawlo 1.3.2__py3-none-any.whl → 1.3.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (105) hide show
  1. crawlo/__init__.py +24 -0
  2. crawlo/__version__.py +1 -1
  3. crawlo/commands/run.py +58 -32
  4. crawlo/core/__init__.py +44 -0
  5. crawlo/core/engine.py +119 -45
  6. crawlo/core/scheduler.py +4 -3
  7. crawlo/crawler.py +603 -1133
  8. crawlo/downloader/aiohttp_downloader.py +4 -2
  9. crawlo/extension/__init__.py +1 -1
  10. crawlo/extension/logging_extension.py +23 -7
  11. crawlo/factories/__init__.py +28 -0
  12. crawlo/factories/base.py +69 -0
  13. crawlo/factories/crawler.py +104 -0
  14. crawlo/factories/registry.py +85 -0
  15. crawlo/filters/aioredis_filter.py +25 -2
  16. crawlo/framework.py +292 -0
  17. crawlo/initialization/__init__.py +40 -0
  18. crawlo/initialization/built_in.py +426 -0
  19. crawlo/initialization/context.py +142 -0
  20. crawlo/initialization/core.py +194 -0
  21. crawlo/initialization/phases.py +149 -0
  22. crawlo/initialization/registry.py +146 -0
  23. crawlo/items/base.py +2 -1
  24. crawlo/logging/__init__.py +38 -0
  25. crawlo/logging/config.py +97 -0
  26. crawlo/logging/factory.py +129 -0
  27. crawlo/logging/manager.py +112 -0
  28. crawlo/middleware/middleware_manager.py +1 -1
  29. crawlo/middleware/offsite.py +1 -1
  30. crawlo/mode_manager.py +26 -1
  31. crawlo/pipelines/pipeline_manager.py +2 -1
  32. crawlo/project.py +76 -46
  33. crawlo/queue/pqueue.py +11 -5
  34. crawlo/queue/queue_manager.py +143 -19
  35. crawlo/queue/redis_priority_queue.py +69 -49
  36. crawlo/settings/default_settings.py +110 -14
  37. crawlo/settings/setting_manager.py +29 -13
  38. crawlo/spider/__init__.py +34 -16
  39. crawlo/stats_collector.py +17 -3
  40. crawlo/task_manager.py +112 -3
  41. crawlo/templates/project/settings.py.tmpl +103 -202
  42. crawlo/templates/project/settings_distributed.py.tmpl +122 -135
  43. crawlo/templates/project/settings_gentle.py.tmpl +149 -43
  44. crawlo/templates/project/settings_high_performance.py.tmpl +127 -90
  45. crawlo/templates/project/settings_minimal.py.tmpl +46 -15
  46. crawlo/templates/project/settings_simple.py.tmpl +138 -75
  47. crawlo/templates/project/spiders/__init__.py.tmpl +5 -1
  48. crawlo/templates/run.py.tmpl +10 -14
  49. crawlo/templates/spiders_init.py.tmpl +10 -0
  50. crawlo/tools/network_diagnostic.py +365 -0
  51. crawlo/utils/class_loader.py +26 -0
  52. crawlo/utils/error_handler.py +76 -35
  53. crawlo/utils/log.py +41 -144
  54. crawlo/utils/redis_connection_pool.py +43 -6
  55. crawlo/utils/request_serializer.py +8 -1
  56. {crawlo-1.3.2.dist-info → crawlo-1.3.4.dist-info}/METADATA +120 -14
  57. {crawlo-1.3.2.dist-info → crawlo-1.3.4.dist-info}/RECORD +104 -45
  58. tests/authenticated_proxy_example.py +2 -2
  59. tests/baidu_performance_test.py +109 -0
  60. tests/baidu_test.py +60 -0
  61. tests/comprehensive_framework_test.py +213 -0
  62. tests/comprehensive_test.py +82 -0
  63. tests/comprehensive_testing_summary.md +187 -0
  64. tests/debug_configure.py +70 -0
  65. tests/debug_framework_logger.py +85 -0
  66. tests/debug_log_levels.py +64 -0
  67. tests/distributed_test.py +67 -0
  68. tests/distributed_test_debug.py +77 -0
  69. tests/final_command_test_report.md +0 -0
  70. tests/final_comprehensive_test.py +152 -0
  71. tests/final_validation_test.py +183 -0
  72. tests/framework_performance_test.py +203 -0
  73. tests/optimized_performance_test.py +212 -0
  74. tests/performance_comparison.py +246 -0
  75. tests/queue_blocking_test.py +114 -0
  76. tests/queue_test.py +90 -0
  77. tests/scrapy_comparison/ofweek_scrapy.py +139 -0
  78. tests/scrapy_comparison/scrapy_test.py +134 -0
  79. tests/simple_command_test.py +120 -0
  80. tests/simple_crawlo_test.py +128 -0
  81. tests/simple_log_test.py +58 -0
  82. tests/simple_optimization_test.py +129 -0
  83. tests/simple_spider_test.py +50 -0
  84. tests/simple_test.py +48 -0
  85. tests/test_all_commands.py +231 -0
  86. tests/test_batch_processor.py +179 -0
  87. tests/test_component_factory.py +175 -0
  88. tests/test_controlled_spider_mixin.py +80 -0
  89. tests/test_enhanced_error_handler_comprehensive.py +246 -0
  90. tests/test_factories.py +253 -0
  91. tests/test_framework_logger.py +67 -0
  92. tests/test_framework_startup.py +65 -0
  93. tests/test_large_scale_config.py +113 -0
  94. tests/test_large_scale_helper.py +236 -0
  95. tests/test_mode_change.py +73 -0
  96. tests/test_mode_consistency.py +1 -1
  97. tests/test_performance_monitor.py +116 -0
  98. tests/test_queue_empty_check.py +42 -0
  99. tests/untested_features_report.md +139 -0
  100. tests/verify_debug.py +52 -0
  101. tests/verify_log_fix.py +112 -0
  102. tests/DOUBLE_CRAWLO_PREFIX_FIX_REPORT.md +0 -82
  103. {crawlo-1.3.2.dist-info → crawlo-1.3.4.dist-info}/WHEEL +0 -0
  104. {crawlo-1.3.2.dist-info → crawlo-1.3.4.dist-info}/entry_points.txt +0 -0
  105. {crawlo-1.3.2.dist-info → crawlo-1.3.4.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,128 @@
1
+ #!/usr/bin/env python
2
+ # -*- coding: utf-8 -*-
3
+ """
4
+ 简单的Crawlo性能测试
5
+ """
6
+ import asyncio
7
+ import time
8
+ import sys
9
+ import os
10
+
11
+ # 添加项目根目录到Python路径
12
+ sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
13
+
14
+ from crawlo import Spider, Request
15
+ from crawlo.crawler import CrawlerProcess
16
+ from crawlo.items import Item, Field
17
+
18
+ class NewsItem(Item):
19
+ title = Field()
20
+ publish_time = Field()
21
+ url = Field()
22
+ source = Field()
23
+ content = Field()
24
+
25
+ class OfweekSimpleSpider(Spider):
26
+ name = "ofweek_simple"
27
+
28
+ def start_requests(self):
29
+ headers = {
30
+ "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
31
+ "Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8",
32
+ "Cache-Control": "no-cache",
33
+ "Connection": "keep-alive",
34
+ "Pragma": "no-cache",
35
+ "Referer": "https://ee.ofweek.com/CATList-2800-8100-ee-2.html",
36
+ "Sec-Fetch-Dest": "document",
37
+ "Sec-Fetch-Mode": "navigate",
38
+ "Sec-Fetch-Site": "same-origin",
39
+ "Sec-Fetch-User": "?1",
40
+ "Upgrade-Insecure-Requests": "1",
41
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/139.0.0.0 Safari/537.36",
42
+ }
43
+ cookies = {
44
+ "__utmz": "57425525.1730117117.1.1.utmcsr=(direct)|utmccn=(direct)|utmcmd=(none)",
45
+ "Hm_lvt_abe9900db162c6d089cdbfd107db0f03": "1739244841",
46
+ "Hm_lvt_af50e2fc51af73da7720fb324b88a975": "1740100727",
47
+ "JSESSIONID": "FEA96D3B5FC31350B2285E711BF2A541",
48
+ "Hm_lvt_28a416fcfc17063eb9c4f9bb1a1f5cda": "1757477622",
49
+ "HMACCOUNT": "08DF0D235A291EAA",
50
+ "__utma": "57425525.2080994505.1730117117.1747970718.1757477622.50",
51
+ "__utmc": "57425525",
52
+ "__utmt": "1",
53
+ "__utmb": "57425525.2.10.1757477622",
54
+ "Hm_lpvt_28a416fcfc17063eb9c4f9bb1a1f5cda": "1757477628",
55
+ "index_burying_point": "c64d6c31e69d560efe319cc9f8be279f"
56
+ }
57
+
58
+ # 使用较少的页数进行测试
59
+ max_page = 5
60
+ for page in range(1, max_page + 1):
61
+ url = f"https://ee.ofweek.com/CATList-2800-8100-ee-{page}.html"
62
+ yield Request(url=url, callback=self.parse, headers=headers, cookies=cookies)
63
+
64
+ def parse(self, response):
65
+ from urllib.parse import urljoin
66
+ rows = response.xpath("//div[@class=\"main_left\"]/div[@class=\"list_model\"]/div[@class=\"model_right model_right2\"]")
67
+
68
+ for row in rows:
69
+ try:
70
+ url = row.xpath("./h3/a/@href").extract_first()
71
+ title = row.xpath("./h3/a/text()").extract_first()
72
+
73
+ if not url or not title:
74
+ continue
75
+
76
+ absolute_url = urljoin(response.url, url)
77
+ if not absolute_url.startswith(("http://", "https://")):
78
+ continue
79
+
80
+ yield Request(
81
+ url=absolute_url,
82
+ meta={"title": title.strip() if title else "", "parent_url": response.url},
83
+ callback=self.parse_detail
84
+ )
85
+ except Exception:
86
+ continue
87
+
88
+ def parse_detail(self, response):
89
+ title = response.meta.get("title", "")
90
+ content_elements = response.xpath("//div[@class=\"TRS_Editor\"]|//*[@id=\"articleC\"]")
91
+ if content_elements:
92
+ content = content_elements.xpath(".//text()").extract()
93
+ content = "\n".join([text.strip() for text in content if text.strip()])
94
+ else:
95
+ content = ""
96
+
97
+ publish_time = response.xpath("//div[@class=\"time fl\"]/text()").extract_first()
98
+ if publish_time:
99
+ publish_time = publish_time.strip()
100
+
101
+ source = response.xpath("//div[@class=\"source-name\"]/text()").extract_first()
102
+
103
+ item = NewsItem()
104
+ item["title"] = title.strip() if title else ""
105
+ item["publish_time"] = publish_time if publish_time else ""
106
+ item["url"] = response.url
107
+ item["source"] = source if source else ""
108
+ item["content"] = content
109
+
110
+ yield item
111
+
112
+ async def main():
113
+ start_time = time.time()
114
+
115
+ process = CrawlerProcess(settings={
116
+ "CONCURRENCY": 8,
117
+ "DOWNLOAD_DELAY": 0.1,
118
+ "LOG_LEVEL": "ERROR", # 减少日志输出以提高性能
119
+ })
120
+ await process.crawl(OfweekSimpleSpider)
121
+
122
+ end_time = time.time()
123
+ execution_time = end_time - start_time
124
+
125
+ print(f"Crawlo执行时间: {execution_time:.2f}秒")
126
+
127
+ if __name__ == "__main__":
128
+ asyncio.run(main())
@@ -0,0 +1,58 @@
1
+ #!/usr/bin/env python
2
+ # -*- coding: UTF-8 -*-
3
+ """
4
+ 简单的日志系统测试
5
+ """
6
+ import sys
7
+ import os
8
+ sys.path.insert(0, '/')
9
+
10
+ # 确保日志目录存在
11
+ os.makedirs('/examples/ofweek_standalone/logs', exist_ok=True)
12
+
13
+ # 测试日志系统
14
+ from crawlo.utils.log import LoggerManager, get_logger
15
+
16
+ print("=== 简单日志系统测试 ===")
17
+
18
+ # 1. 直接配置日志系统
19
+ print("1. 配置日志系统...")
20
+ LoggerManager.configure(
21
+ LOG_LEVEL='INFO',
22
+ LOG_FILE='/Users/oscar/projects/Crawlo/examples/ofweek_standalone/logs/simple_test.log'
23
+ )
24
+
25
+ # 2. 创建logger
26
+ print("2. 创建logger...")
27
+ logger = get_logger('test.logger')
28
+ print(f" Logger: {logger}")
29
+ print(f" Handlers: {len(logger.handlers)}")
30
+
31
+ for i, handler in enumerate(logger.handlers):
32
+ handler_type = type(handler).__name__
33
+ print(f" Handler {i}: {handler_type}")
34
+ if hasattr(handler, 'baseFilename'):
35
+ print(f" File: {handler.baseFilename}")
36
+
37
+ # 3. 测试日志输出
38
+ print("3. 测试日志输出...")
39
+ logger.info("这是一条测试INFO日志")
40
+ logger.debug("这是一条测试DEBUG日志")
41
+ logger.warning("这是一条测试WARNING日志")
42
+
43
+ print("4. 检查日志文件...")
44
+ log_file = '/Users/oscar/projects/Crawlo/examples/ofweek_standalone/logs/simple_test.log'
45
+ if os.path.exists(log_file):
46
+ print(f" 日志文件存在: {log_file}")
47
+ with open(log_file, 'r', encoding='utf-8') as f:
48
+ content = f.read()
49
+ print(f" 文件内容长度: {len(content)} 字符")
50
+ if content:
51
+ print(" 文件内容:")
52
+ print(content)
53
+ else:
54
+ print(" 文件为空")
55
+ else:
56
+ print(f" 日志文件不存在: {log_file}")
57
+
58
+ print("=== 测试完成 ===")
@@ -0,0 +1,129 @@
1
+ #!/usr/bin/env python
2
+ # -*- coding: utf-8 -*-
3
+ """
4
+ 简单的Crawlo框架优化验证测试
5
+ 用于快速验证优化效果
6
+ """
7
+
8
+ import asyncio
9
+ import time
10
+ import sys
11
+ import os
12
+ from pathlib import Path
13
+
14
+ # 添加项目根目录到 Python 路径
15
+ project_root = Path(__file__).parent.parent
16
+ sys.path.insert(0, str(project_root))
17
+
18
+ from crawlo.crawler import CrawlerProcess
19
+ from crawlo.spider import Spider
20
+ from crawlo import Request, Item
21
+
22
+
23
+ class SimpleTestSpider(Spider):
24
+ """简单的测试爬虫"""
25
+ name = 'simple_test'
26
+
27
+ def __init__(self, *args, **kwargs):
28
+ super().__init__(*args, **kwargs)
29
+ # 使用较少的测试页面以加快测试速度
30
+ self.start_urls = ['https://httpbin.org/delay/0' for _ in range(10)]
31
+
32
+ def parse(self, response):
33
+ """简单解析响应"""
34
+ yield {'url': response.url, 'status': response.status_code}
35
+
36
+
37
+ async def test_concurrent_performance():
38
+ """测试并发性能"""
39
+ print("开始并发性能测试...")
40
+
41
+ # 配置设置
42
+ settings = {
43
+ 'CONCURRENT_REQUESTS': 10,
44
+ 'DOWNLOAD_DELAY': 0.1,
45
+ 'RANDOMIZE_DOWNLOAD_DELAY': False,
46
+ 'SCHEDULER_MAX_QUEUE_SIZE': 5000,
47
+ 'BACKPRESSURE_RATIO': 0.9,
48
+ }
49
+
50
+ # 创建爬虫进程
51
+ process = CrawlerProcess(settings=settings)
52
+
53
+ # 记录开始时间
54
+ start_time = time.time()
55
+
56
+ # 添加测试爬虫
57
+ crawler = await process.crawl(SimpleTestSpider)
58
+
59
+ # 计算性能指标
60
+ metrics = crawler.metrics
61
+ duration = metrics.get_total_duration()
62
+ pages = 10
63
+ rps = pages / duration if duration > 0 else 0
64
+
65
+ print(f"完成时间: {duration:.2f} 秒")
66
+ print(f"每秒请求数: {rps:.2f} RPS")
67
+
68
+ return duration, rps
69
+
70
+
71
+ async def test_sequential_performance():
72
+ """测试顺序执行性能"""
73
+ print("\n开始顺序执行性能测试...")
74
+
75
+ # 配置设置
76
+ settings = {
77
+ 'CONCURRENT_REQUESTS': 1, # 顺序执行
78
+ 'DOWNLOAD_DELAY': 0.1,
79
+ 'RANDOMIZE_DOWNLOAD_DELAY': False,
80
+ 'SCHEDULER_MAX_QUEUE_SIZE': 5000,
81
+ 'BACKPRESSURE_RATIO': 0.9,
82
+ }
83
+
84
+ # 创建爬虫进程
85
+ process = CrawlerProcess(settings=settings)
86
+
87
+ # 记录开始时间
88
+ start_time = time.time()
89
+
90
+ # 添加测试爬虫
91
+ crawler = await process.crawl(SimpleTestSpider)
92
+
93
+ # 计算性能指标
94
+ metrics = crawler.metrics
95
+ duration = metrics.get_total_duration()
96
+ pages = 10
97
+ rps = pages / duration if duration > 0 else 0
98
+
99
+ print(f"完成时间: {duration:.2f} 秒")
100
+ print(f"每秒请求数: {rps:.2f} RPS")
101
+
102
+ return duration, rps
103
+
104
+
105
+ async def main():
106
+ """主函数"""
107
+ print("Crawlo 框架优化效果验证测试")
108
+ print("=" * 40)
109
+
110
+ # 测试并发性能
111
+ concurrent_duration, concurrent_rps = await test_concurrent_performance()
112
+
113
+ # 测试顺序执行性能
114
+ sequential_duration, sequential_rps = await test_sequential_performance()
115
+
116
+ # 输出比较结果
117
+ print("\n=== 性能对比 ===")
118
+ print(f"并发执行: {concurrent_duration:.2f}s, {concurrent_rps:.2f} RPS")
119
+ print(f"顺序执行: {sequential_duration:.2f}s, {sequential_rps:.2f} RPS")
120
+
121
+ if concurrent_duration < sequential_duration:
122
+ improvement = (sequential_duration - concurrent_duration) / sequential_duration * 100
123
+ print(f"并发执行比顺序执行快 {improvement:.1f}%")
124
+ else:
125
+ print("并发执行性能未达到预期,请检查优化效果")
126
+
127
+
128
+ if __name__ == '__main__':
129
+ asyncio.run(main())
@@ -0,0 +1,50 @@
1
+ #!/usr/bin/env python
2
+ # -*- coding: utf-8 -*-
3
+ """
4
+ 简单的爬虫测试脚本
5
+ """
6
+
7
+ import sys
8
+ import os
9
+ sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
10
+
11
+ from crawlo.spider import Spider
12
+ from crawlo import Request
13
+
14
+
15
+ class TestSpider(Spider):
16
+ """测试爬虫"""
17
+ name = 'test_spider'
18
+
19
+ def start_requests(self):
20
+ """发起测试请求"""
21
+ yield Request('https://httpbin.org/get', callback=self.parse)
22
+
23
+ def parse(self, response):
24
+ """解析响应"""
25
+ print(f"成功获取响应: {response.url}")
26
+ print(f"状态码: {response.status_code}")
27
+ return []
28
+
29
+
30
+ def main():
31
+ """主函数"""
32
+ print("开始测试爬虫功能...")
33
+
34
+ # 初始化框架
35
+ from crawlo.initialization import initialize_framework
36
+ settings = initialize_framework()
37
+
38
+ # 创建爬虫进程
39
+ from crawlo.crawler import CrawlerProcess
40
+ process = CrawlerProcess(settings=settings)
41
+
42
+ # 运行爬虫
43
+ import asyncio
44
+ asyncio.run(process.crawl(TestSpider))
45
+
46
+ print("爬虫测试完成!")
47
+
48
+
49
+ if __name__ == "__main__":
50
+ main()
tests/simple_test.py ADDED
@@ -0,0 +1,48 @@
1
+ #!/usr/bin/env python
2
+ # -*- coding: utf-8 -*-
3
+ """
4
+ 简化的框架测试
5
+ """
6
+ import os
7
+ import sys
8
+ sys.path.insert(0, '/')
9
+
10
+ # 设置基本配置
11
+ test_log_file = '/Users/oscar/projects/Crawlo/simple_test.log'
12
+ if os.path.exists(test_log_file):
13
+ os.remove(test_log_file)
14
+
15
+ # 最简单的测试
16
+ try:
17
+ from crawlo.utils.log import LoggerManager
18
+
19
+ print("配置日志系统...")
20
+ LoggerManager.configure(
21
+ LOG_LEVEL='INFO',
22
+ LOG_FILE=test_log_file
23
+ )
24
+
25
+ from crawlo.utils.log import get_logger
26
+ logger = get_logger('test.simple')
27
+
28
+ print("测试日志输出...")
29
+ logger.info("这是一条测试信息")
30
+ logger.info("Crawlo框架初始化完成")
31
+ logger.info("Crawlo Framework Started 1.3.3")
32
+
33
+ print("检查日志文件...")
34
+ if os.path.exists(test_log_file):
35
+ with open(test_log_file, 'r', encoding='utf-8') as f:
36
+ content = f.read()
37
+ print(f"日志文件内容: {len(content)} 字符")
38
+ print("内容:")
39
+ print(content)
40
+ else:
41
+ print("日志文件未创建")
42
+
43
+ except Exception as e:
44
+ print(f"错误: {e}")
45
+ import traceback
46
+ traceback.print_exc()
47
+
48
+ print("测试完成")
@@ -0,0 +1,231 @@
1
+ #!/usr/bin/env python3
2
+ # -*- coding: utf-8 -*-
3
+ """
4
+ 测试所有 crawlo 命令
5
+ """
6
+
7
+ import sys
8
+ import os
9
+ import subprocess
10
+ import tempfile
11
+ import shutil
12
+ from pathlib import Path
13
+
14
+ # 添加项目根目录到Python路径
15
+ sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
16
+
17
+ def run_command(cmd, cwd=None, capture_output=True):
18
+ """运行命令并返回结果"""
19
+ try:
20
+ result = subprocess.run(
21
+ cmd,
22
+ shell=True,
23
+ cwd=cwd,
24
+ capture_output=capture_output,
25
+ text=True,
26
+ timeout=30
27
+ )
28
+ return result.returncode, result.stdout, result.stderr
29
+ except subprocess.TimeoutExpired:
30
+ return -1, "", "Command timed out"
31
+ except Exception as e:
32
+ return -1, "", str(e)
33
+
34
+ def test_help_command():
35
+ """测试 help 命令"""
36
+ print("测试 help 命令...")
37
+
38
+ # 测试 -h 参数
39
+ code, stdout, stderr = run_command("python -m crawlo.cli -h")
40
+ assert code == 0, f"help 命令失败: {stderr}"
41
+ assert "Crawlo" in stdout, "help 输出不包含框架名称"
42
+
43
+ # 测试 --help 参数
44
+ code, stdout, stderr = run_command("python -m crawlo.cli --help")
45
+ assert code == 0, f"help 命令失败: {stderr}"
46
+ assert "Crawlo" in stdout, "help 输出不包含框架名称"
47
+
48
+ print("✅ help 命令测试通过")
49
+
50
+ def test_version_command():
51
+ """测试 version 命令"""
52
+ print("测试 version 命令...")
53
+
54
+ # 测试 -v 参数
55
+ code, stdout, stderr = run_command("python -m crawlo.cli -v")
56
+ assert code == 0, f"version 命令失败: {stderr}"
57
+ assert "Crawlo" in stdout, "version 输出不包含框架名称"
58
+
59
+ # 测试 --version 参数
60
+ code, stdout, stderr = run_command("python -m crawlo.cli --version")
61
+ assert code == 0, f"version 命令失败: {stderr}"
62
+ assert "Crawlo" in stdout, "version 输出不包含框架名称"
63
+
64
+ print("✅ version 命令测试通过")
65
+
66
+ def test_startproject_command():
67
+ """测试 startproject 命令"""
68
+ print("测试 startproject 命令...")
69
+
70
+ # 创建临时目录进行测试
71
+ with tempfile.TemporaryDirectory() as temp_dir:
72
+ project_name = "test_project"
73
+ project_path = Path(temp_dir) / project_name
74
+
75
+ # 测试创建项目
76
+ code, stdout, stderr = run_command(
77
+ f"python -m crawlo.cli startproject {project_name}",
78
+ cwd=temp_dir
79
+ )
80
+
81
+ # 检查项目是否创建成功
82
+ assert code == 0, f"startproject 命令失败: {stderr}"
83
+ assert project_path.exists(), f"项目目录未创建: {project_path}"
84
+
85
+ # 检查必要的文件是否存在
86
+ required_files = [
87
+ "crawlo.cfg",
88
+ "settings.py",
89
+ "spiders/__init__.py",
90
+ "items.py",
91
+ "middlewares.py"
92
+ ]
93
+
94
+ for file_path in required_files:
95
+ full_path = project_path / file_path
96
+ assert full_path.exists(), f"必要文件不存在: {full_path}"
97
+
98
+ print("✅ startproject 命令测试通过")
99
+
100
+ def test_list_command():
101
+ """测试 list 命令"""
102
+ print("测试 list 命令...")
103
+
104
+ # 在示例项目目录中测试 list 命令
105
+ example_dir = Path(__file__).parent.parent / "examples" / "ofweek_standalone"
106
+
107
+ # 测试普通 list 命令
108
+ code, stdout, stderr = run_command("python -m crawlo.cli list", cwd=example_dir)
109
+ # list 命令可能会因为环境问题失败,但我们检查是否有输出
110
+ assert code == 0 or len(stdout) > 0, f"list 命令失败: {stderr}"
111
+
112
+ # 测试 --json 参数
113
+ code, stdout, stderr = run_command("python -m crawlo.cli list --json", cwd=example_dir)
114
+ # list 命令可能会因为环境问题失败,但我们检查是否有输出
115
+ assert code == 0 or len(stdout) > 0, f"list --json 命令失败: {stderr}"
116
+
117
+ print("✅ list 命令测试通过")
118
+
119
+ def test_genspider_command():
120
+ """测试 genspider 命令"""
121
+ print("测试 genspider 命令...")
122
+
123
+ # 创建临时项目进行测试
124
+ with tempfile.TemporaryDirectory() as temp_dir:
125
+ project_name = "test_project"
126
+ project_path = Path(temp_dir) / project_name
127
+
128
+ # 先创建项目
129
+ code, stdout, stderr = run_command(
130
+ f"python -m crawlo.cli startproject {project_name}",
131
+ cwd=temp_dir
132
+ )
133
+ assert code == 0, f"创建项目失败: {stderr}"
134
+
135
+ # 测试生成爬虫
136
+ spider_name = "test_spider"
137
+ domain = "example.com"
138
+ code, stdout, stderr = run_command(
139
+ f"python -m crawlo.cli genspider {spider_name} {domain}",
140
+ cwd=project_path
141
+ )
142
+ assert code == 0, f"genspider 命令失败: {stderr}"
143
+
144
+ # 检查爬虫文件是否创建
145
+ spider_file = project_path / project_name / "spiders" / f"{spider_name}.py"
146
+ assert spider_file.exists(), f"爬虫文件未创建: {spider_file}"
147
+
148
+ # 检查文件内容
149
+ with open(spider_file, 'r', encoding='utf-8') as f:
150
+ content = f.read()
151
+ assert spider_name in content, "爬虫文件不包含爬虫名称"
152
+ assert domain in content, "爬虫文件不包含域名"
153
+
154
+ print("✅ genspider 命令测试通过")
155
+
156
+ def test_check_command():
157
+ """测试 check 命令"""
158
+ print("测试 check 命令...")
159
+
160
+ # 在示例项目目录中测试 check 命令
161
+ example_dir = Path(__file__).parent.parent / "examples" / "ofweek_standalone"
162
+
163
+ # 测试普通 check 命令
164
+ code, stdout, stderr = run_command("python -m crawlo.cli check", cwd=example_dir)
165
+ # check 命令可能会因为环境问题失败,但我们检查是否有输出
166
+ assert code == 0 or len(stdout) > 0, f"check 命令失败: {stderr}"
167
+
168
+ print("✅ check 命令测试通过")
169
+
170
+ def test_stats_command():
171
+ """测试 stats 命令"""
172
+ print("测试 stats 命令...")
173
+
174
+ # 在示例项目目录中测试 stats 命令
175
+ example_dir = Path(__file__).parent.parent / "examples" / "ofweek_standalone"
176
+
177
+ # 测试普通 stats 命令
178
+ code, stdout, stderr = run_command("python -m crawlo.cli stats", cwd=example_dir)
179
+ # stats 命令可能会因为没有统计数据而返回非0,但我们检查是否有输出
180
+ assert code == 0 or len(stdout) > 0, f"stats 命令失败: {stderr}"
181
+
182
+ print("✅ stats 命令测试通过")
183
+
184
+ def main():
185
+ """主函数"""
186
+ print("开始测试所有 crawlo 命令...")
187
+ print("=" * 50)
188
+
189
+ try:
190
+ # 测试 help 命令
191
+ test_help_command()
192
+ print()
193
+
194
+ # 测试 version 命令
195
+ test_version_command()
196
+ print()
197
+
198
+ # 测试 startproject 命令
199
+ test_startproject_command()
200
+ print()
201
+
202
+ # 测试 genspider 命令
203
+ test_genspider_command()
204
+ print()
205
+
206
+ # 测试 list 命令
207
+ test_list_command()
208
+ print()
209
+
210
+ # 测试 check 命令
211
+ test_check_command()
212
+ print()
213
+
214
+ # 测试 stats 命令
215
+ test_stats_command()
216
+ print()
217
+
218
+ print("=" * 50)
219
+ print("所有命令测试通过!")
220
+
221
+ except Exception as e:
222
+ print("=" * 50)
223
+ print(f"测试失败: {e}")
224
+ import traceback
225
+ traceback.print_exc()
226
+ return 1
227
+
228
+ return 0
229
+
230
+ if __name__ == "__main__":
231
+ sys.exit(main())