crawlo 1.1.4__py3-none-any.whl → 1.1.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (186) hide show
  1. crawlo/__init__.py +61 -34
  2. crawlo/__version__.py +1 -1
  3. crawlo/cleaners/__init__.py +61 -0
  4. crawlo/cleaners/data_formatter.py +226 -0
  5. crawlo/cleaners/encoding_converter.py +126 -0
  6. crawlo/cleaners/text_cleaner.py +233 -0
  7. crawlo/cli.py +40 -40
  8. crawlo/commands/__init__.py +13 -13
  9. crawlo/commands/check.py +594 -594
  10. crawlo/commands/genspider.py +151 -151
  11. crawlo/commands/list.py +155 -155
  12. crawlo/commands/run.py +285 -285
  13. crawlo/commands/startproject.py +300 -196
  14. crawlo/commands/stats.py +188 -188
  15. crawlo/commands/utils.py +186 -186
  16. crawlo/config.py +309 -279
  17. crawlo/config_validator.py +253 -0
  18. crawlo/core/__init__.py +2 -2
  19. crawlo/core/engine.py +346 -172
  20. crawlo/core/processor.py +40 -40
  21. crawlo/core/scheduler.py +137 -166
  22. crawlo/crawler.py +1027 -1027
  23. crawlo/downloader/__init__.py +266 -242
  24. crawlo/downloader/aiohttp_downloader.py +220 -212
  25. crawlo/downloader/cffi_downloader.py +256 -251
  26. crawlo/downloader/httpx_downloader.py +259 -259
  27. crawlo/downloader/hybrid_downloader.py +214 -0
  28. crawlo/downloader/playwright_downloader.py +403 -0
  29. crawlo/downloader/selenium_downloader.py +473 -0
  30. crawlo/event.py +11 -11
  31. crawlo/exceptions.py +81 -81
  32. crawlo/extension/__init__.py +37 -37
  33. crawlo/extension/health_check.py +141 -141
  34. crawlo/extension/log_interval.py +57 -57
  35. crawlo/extension/log_stats.py +81 -81
  36. crawlo/extension/logging_extension.py +43 -43
  37. crawlo/extension/memory_monitor.py +104 -88
  38. crawlo/extension/performance_profiler.py +133 -117
  39. crawlo/extension/request_recorder.py +107 -107
  40. crawlo/filters/__init__.py +154 -154
  41. crawlo/filters/aioredis_filter.py +280 -242
  42. crawlo/filters/memory_filter.py +269 -269
  43. crawlo/items/__init__.py +23 -23
  44. crawlo/items/base.py +21 -21
  45. crawlo/items/fields.py +53 -53
  46. crawlo/items/items.py +104 -104
  47. crawlo/middleware/__init__.py +21 -21
  48. crawlo/middleware/default_header.py +32 -32
  49. crawlo/middleware/download_delay.py +28 -28
  50. crawlo/middleware/middleware_manager.py +135 -135
  51. crawlo/middleware/proxy.py +272 -248
  52. crawlo/middleware/request_ignore.py +30 -30
  53. crawlo/middleware/response_code.py +18 -18
  54. crawlo/middleware/response_filter.py +26 -26
  55. crawlo/middleware/retry.py +124 -124
  56. crawlo/mode_manager.py +206 -201
  57. crawlo/network/__init__.py +21 -21
  58. crawlo/network/request.py +338 -311
  59. crawlo/network/response.py +360 -271
  60. crawlo/pipelines/__init__.py +21 -21
  61. crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
  62. crawlo/pipelines/console_pipeline.py +39 -39
  63. crawlo/pipelines/csv_pipeline.py +316 -316
  64. crawlo/pipelines/database_dedup_pipeline.py +224 -224
  65. crawlo/pipelines/json_pipeline.py +218 -218
  66. crawlo/pipelines/memory_dedup_pipeline.py +115 -115
  67. crawlo/pipelines/mongo_pipeline.py +131 -131
  68. crawlo/pipelines/mysql_pipeline.py +316 -316
  69. crawlo/pipelines/pipeline_manager.py +56 -56
  70. crawlo/pipelines/redis_dedup_pipeline.py +166 -162
  71. crawlo/project.py +153 -153
  72. crawlo/queue/pqueue.py +37 -37
  73. crawlo/queue/queue_manager.py +320 -307
  74. crawlo/queue/redis_priority_queue.py +277 -209
  75. crawlo/settings/__init__.py +7 -7
  76. crawlo/settings/default_settings.py +216 -278
  77. crawlo/settings/setting_manager.py +99 -99
  78. crawlo/spider/__init__.py +639 -639
  79. crawlo/stats_collector.py +59 -59
  80. crawlo/subscriber.py +130 -130
  81. crawlo/task_manager.py +30 -30
  82. crawlo/templates/crawlo.cfg.tmpl +10 -10
  83. crawlo/templates/project/__init__.py.tmpl +3 -3
  84. crawlo/templates/project/items.py.tmpl +17 -17
  85. crawlo/templates/project/middlewares.py.tmpl +110 -110
  86. crawlo/templates/project/pipelines.py.tmpl +97 -97
  87. crawlo/templates/project/run.py.tmpl +251 -251
  88. crawlo/templates/project/settings.py.tmpl +326 -279
  89. crawlo/templates/project/settings_distributed.py.tmpl +120 -0
  90. crawlo/templates/project/settings_gentle.py.tmpl +95 -0
  91. crawlo/templates/project/settings_high_performance.py.tmpl +152 -0
  92. crawlo/templates/project/settings_simple.py.tmpl +69 -0
  93. crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
  94. crawlo/templates/spider/spider.py.tmpl +141 -141
  95. crawlo/tools/__init__.py +183 -0
  96. crawlo/tools/anti_crawler.py +269 -0
  97. crawlo/tools/authenticated_proxy.py +241 -0
  98. crawlo/tools/data_validator.py +181 -0
  99. crawlo/tools/date_tools.py +36 -0
  100. crawlo/tools/distributed_coordinator.py +387 -0
  101. crawlo/tools/retry_mechanism.py +221 -0
  102. crawlo/tools/scenario_adapter.py +263 -0
  103. crawlo/utils/__init__.py +35 -7
  104. crawlo/utils/batch_processor.py +261 -0
  105. crawlo/utils/controlled_spider_mixin.py +439 -439
  106. crawlo/utils/date_tools.py +290 -233
  107. crawlo/utils/db_helper.py +343 -343
  108. crawlo/utils/enhanced_error_handler.py +360 -0
  109. crawlo/utils/env_config.py +106 -0
  110. crawlo/utils/error_handler.py +126 -0
  111. crawlo/utils/func_tools.py +82 -82
  112. crawlo/utils/large_scale_config.py +286 -286
  113. crawlo/utils/large_scale_helper.py +343 -343
  114. crawlo/utils/log.py +128 -128
  115. crawlo/utils/performance_monitor.py +285 -0
  116. crawlo/utils/queue_helper.py +175 -175
  117. crawlo/utils/redis_connection_pool.py +335 -0
  118. crawlo/utils/redis_key_validator.py +200 -0
  119. crawlo/utils/request.py +267 -267
  120. crawlo/utils/request_serializer.py +219 -219
  121. crawlo/utils/spider_loader.py +62 -62
  122. crawlo/utils/system.py +11 -11
  123. crawlo/utils/tools.py +4 -4
  124. crawlo/utils/url.py +39 -39
  125. {crawlo-1.1.4.dist-info → crawlo-1.1.5.dist-info}/METADATA +401 -403
  126. crawlo-1.1.5.dist-info/RECORD +185 -0
  127. examples/__init__.py +7 -7
  128. tests/__init__.py +7 -7
  129. tests/advanced_tools_example.py +276 -0
  130. tests/authenticated_proxy_example.py +237 -0
  131. tests/cleaners_example.py +161 -0
  132. tests/config_validation_demo.py +103 -0
  133. {examples → tests}/controlled_spider_example.py +205 -205
  134. tests/date_tools_example.py +181 -0
  135. tests/dynamic_loading_example.py +524 -0
  136. tests/dynamic_loading_test.py +105 -0
  137. tests/env_config_example.py +134 -0
  138. tests/error_handling_example.py +172 -0
  139. tests/redis_key_validation_demo.py +131 -0
  140. tests/response_improvements_example.py +145 -0
  141. tests/test_advanced_tools.py +149 -0
  142. tests/test_all_redis_key_configs.py +146 -0
  143. tests/test_authenticated_proxy.py +142 -0
  144. tests/test_cleaners.py +55 -0
  145. tests/test_comprehensive.py +147 -0
  146. tests/test_config_validator.py +194 -0
  147. tests/test_date_tools.py +124 -0
  148. tests/test_dynamic_downloaders_proxy.py +125 -0
  149. tests/test_dynamic_proxy.py +93 -0
  150. tests/test_dynamic_proxy_config.py +147 -0
  151. tests/test_dynamic_proxy_real.py +110 -0
  152. tests/test_edge_cases.py +304 -0
  153. tests/test_enhanced_error_handler.py +271 -0
  154. tests/test_env_config.py +122 -0
  155. tests/test_error_handler_compatibility.py +113 -0
  156. tests/test_final_validation.py +153 -153
  157. tests/test_framework_env_usage.py +104 -0
  158. tests/test_integration.py +357 -0
  159. tests/test_item_dedup_redis_key.py +123 -0
  160. tests/test_parsel.py +30 -0
  161. tests/test_performance.py +328 -0
  162. tests/test_proxy_health_check.py +32 -32
  163. tests/test_proxy_middleware_integration.py +136 -136
  164. tests/test_proxy_providers.py +56 -56
  165. tests/test_proxy_stats.py +19 -19
  166. tests/test_proxy_strategies.py +59 -59
  167. tests/test_queue_manager_redis_key.py +177 -0
  168. tests/test_redis_config.py +28 -28
  169. tests/test_redis_connection_pool.py +295 -0
  170. tests/test_redis_key_naming.py +182 -0
  171. tests/test_redis_key_validator.py +124 -0
  172. tests/test_redis_queue.py +224 -224
  173. tests/test_request_serialization.py +70 -70
  174. tests/test_response_improvements.py +153 -0
  175. tests/test_scheduler.py +241 -241
  176. tests/test_simple_response.py +62 -0
  177. tests/test_telecom_spider_redis_key.py +206 -0
  178. tests/test_template_content.py +88 -0
  179. tests/test_template_redis_key.py +135 -0
  180. tests/test_tools.py +154 -0
  181. tests/tools_example.py +258 -0
  182. crawlo/core/enhanced_engine.py +0 -190
  183. crawlo-1.1.4.dist-info/RECORD +0 -117
  184. {crawlo-1.1.4.dist-info → crawlo-1.1.5.dist-info}/WHEEL +0 -0
  185. {crawlo-1.1.4.dist-info → crawlo-1.1.5.dist-info}/entry_points.txt +0 -0
  186. {crawlo-1.1.4.dist-info → crawlo-1.1.5.dist-info}/top_level.txt +0 -0
@@ -1,196 +1,300 @@
1
- #!/usr/bin/python
2
- # -*- coding: UTF-8 -*-
3
- """
4
- # @Time : 2025-08-31 22:36
5
- # @Author : crawl-coder
6
- # @Desc : 命令行入口:crawlo startproject baidu,创建项目。
7
- """
8
- import shutil
9
- import re
10
- from pathlib import Path
11
- from rich.console import Console
12
- from rich.panel import Panel
13
- from rich.text import Text
14
-
15
- from .utils import show_error_panel, show_success_panel
16
-
17
- # 初始化 rich 控制台
18
- console = Console()
19
-
20
- TEMPLATES_DIR = Path(__file__).parent.parent / 'templates'
21
-
22
-
23
- def _render_template(tmpl_path, context):
24
- """读取模板文件,替换 {{key}} 为 context 中的值"""
25
- with open(tmpl_path, 'r', encoding='utf-8') as f:
26
- content = f.read()
27
- for key, value in context.items():
28
- content = content.replace(f'{{{{{key}}}}}', str(value))
29
- return content
30
-
31
-
32
- def _copytree_with_templates(src, dst, context):
33
- """
34
- 递归复制目录,将 .tmpl 文件渲染后复制(去除 .tmpl 后缀),其他文件直接复制。
35
- """
36
- src_path = Path(src)
37
- dst_path = Path(dst)
38
- dst_path.mkdir(parents=True, exist_ok=True)
39
-
40
- for item in src_path.rglob('*'):
41
- rel_path = item.relative_to(src_path)
42
- dst_item = dst_path / rel_path
43
-
44
- if item.is_dir():
45
- dst_item.mkdir(parents=True, exist_ok=True)
46
- else:
47
- if item.suffix == '.tmpl':
48
- rendered_content = _render_template(item, context)
49
- final_dst = dst_item.with_suffix('')
50
- final_dst.parent.mkdir(parents=True, exist_ok=True)
51
- with open(final_dst, 'w', encoding='utf-8') as f:
52
- f.write(rendered_content)
53
- else:
54
- shutil.copy2(item, dst_item)
55
-
56
-
57
- def validate_project_name(project_name: str) -> tuple[bool, str]:
58
- """
59
- 验证项目名称是否有效
60
-
61
- Returns:
62
- tuple[bool, str]: (是否有效, 错误信息)
63
- """
64
- # 检查是否为空
65
- if not project_name or not project_name.strip():
66
- return False, "Project name cannot be empty"
67
-
68
- project_name = project_name.strip()
69
-
70
- # 检查长度
71
- if len(project_name) > 50:
72
- return False, "Project name too long (max 50 characters)"
73
-
74
- # 检查是否为Python关键字
75
- python_keywords = {
76
- 'False', 'None', 'True', 'and', 'as', 'assert', 'break', 'class',
77
- 'continue', 'def', 'del', 'elif', 'else', 'except', 'finally',
78
- 'for', 'from', 'global', 'if', 'import', 'in', 'is', 'lambda',
79
- 'nonlocal', 'not', 'or', 'pass', 'raise', 'return', 'try',
80
- 'while', 'with', 'yield'
81
- }
82
- if project_name in python_keywords:
83
- return False, f"'{project_name}' is a Python keyword and cannot be used as project name"
84
-
85
- # 检查是否为有效的Python标识符
86
- if not project_name.isidentifier():
87
- return False, "Project name must be a valid Python identifier"
88
-
89
- # 检查格式(建议使用snake_case)
90
- if not re.match(r'^[a-z][a-z0-9_]*$', project_name):
91
- return False, (
92
- "Project name should start with lowercase letter and "
93
- "contain only lowercase letters, numbers, and underscores"
94
- )
95
-
96
- # 检查是否以数字结尾(不推荐)
97
- if project_name[-1].isdigit():
98
- return False, "Project name should not end with a number"
99
-
100
- return True, ""
101
-
102
-
103
- def main(args):
104
- if len(args) != 1:
105
- console.print("[bold red]Error:[/bold red] Usage: [blue]crawlo startproject[/blue] <project_name>")
106
- console.print("💡 Examples:")
107
- console.print(" [blue]crawlo startproject[/blue] my_spider_project")
108
- console.print(" [blue]crawlo startproject[/blue] news_crawler")
109
- console.print(" [blue]crawlo startproject[/blue] ecommerce_spider")
110
- return 1
111
-
112
- project_name = args[0]
113
-
114
- # 验证项目名称
115
- is_valid, error_msg = validate_project_name(project_name)
116
- if not is_valid:
117
- show_error_panel(
118
- "Invalid Project Name",
119
- f"[cyan]{project_name}[/cyan] is not a valid project name.\n"
120
- f"❌ {error_msg}\n\n"
121
- "💡 Project name should:\n"
122
- " • Start with lowercase letter\n"
123
- " • Contain only lowercase letters, numbers, and underscores\n"
124
- " • Be a valid Python identifier\n"
125
- " • Not be a Python keyword"
126
- )
127
- return 1
128
-
129
- project_dir = Path(project_name)
130
-
131
- if project_dir.exists():
132
- show_error_panel(
133
- "Directory Exists",
134
- f"Directory '[cyan]{project_dir}[/cyan]' already exists.\n"
135
- "💡 Choose a different project name or remove the existing directory."
136
- )
137
- return 1
138
-
139
- context = {'project_name': project_name}
140
- template_dir = TEMPLATES_DIR / 'project'
141
-
142
- try:
143
- # 1. 创建项目根目录
144
- project_dir.mkdir()
145
-
146
- # 2. 渲染 crawlo.cfg.tmpl
147
- cfg_template = TEMPLATES_DIR / 'crawlo.cfg.tmpl'
148
- if cfg_template.exists():
149
- cfg_content = _render_template(cfg_template, context)
150
- (project_dir / 'crawlo.cfg').write_text(cfg_content, encoding='utf-8')
151
- console.print(f":white_check_mark: Created [green]{project_dir / 'crawlo.cfg'}[/green]")
152
- else:
153
- console.print("[yellow]⚠ Warning:[/yellow] Template 'crawlo.cfg.tmpl' not found.")
154
-
155
- # 3. 复制并渲染项目包内容
156
- package_dir = project_dir / project_name
157
- _copytree_with_templates(template_dir, package_dir, context)
158
- console.print(f":white_check_mark: Created project package: [green]{package_dir}[/green]")
159
-
160
- # 4. 创建 logs 目录
161
- (project_dir / 'logs').mkdir(exist_ok=True)
162
- console.print(":white_check_mark: Created logs directory")
163
-
164
- # 5. 创建 output 目录(用于数据输出)
165
- (project_dir / 'output').mkdir(exist_ok=True)
166
- console.print(":white_check_mark: Created output directory")
167
-
168
- # 成功面板
169
- success_text = Text.from_markup(f"Project '[bold cyan]{project_name}[/bold cyan]' created successfully!")
170
- console.print(Panel(success_text, title=":rocket: Success", border_style="green", padding=(1, 2)))
171
-
172
- # 下一步操作提示(对齐美观 + 语法高亮)
173
- next_steps = f"""
174
- [bold]🚀 Next steps:[/bold]
175
- [blue]cd[/blue] {project_name}
176
- [blue]crawlo genspider[/blue] example example.com
177
- [blue]crawlo run[/blue] example
178
-
179
- [bold]📚 Learn more:[/bold]
180
- [blue]crawlo list[/blue] # List all spiders
181
- [blue]crawlo check[/blue] example # Check spider validity
182
- [blue]crawlo stats[/blue] # View statistics
183
- """.strip()
184
- console.print(next_steps)
185
-
186
- return 0
187
-
188
- except Exception as e:
189
- show_error_panel(
190
- "Creation Failed",
191
- f"Failed to create project: {e}"
192
- )
193
- if project_dir.exists():
194
- shutil.rmtree(project_dir, ignore_errors=True)
195
- console.print("[red]:cross_mark: Cleaned up partially created project.[/red]")
196
- return 1
1
+ #!/usr/bin/python
2
+ # -*- coding: UTF-8 -*-
3
+ """
4
+ # @Time : 2025-08-31 22:36
5
+ # @Author : crawl-coder
6
+ # @Desc : 命令行入口:crawlo startproject baidu,创建项目。
7
+ """
8
+ import shutil
9
+ import re
10
+ import sys
11
+ import os
12
+ from pathlib import Path
13
+ from typing import Optional
14
+
15
+ # 添加项目根目录到路径,以便能够导入utils模块
16
+ sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
17
+
18
+ try:
19
+ from rich.console import Console
20
+ from rich.panel import Panel
21
+ from rich.text import Text
22
+ from rich.table import Table
23
+ RICH_AVAILABLE = True
24
+ except ImportError:
25
+ RICH_AVAILABLE = False
26
+
27
+ try:
28
+ from .utils import show_error_panel, show_success_panel
29
+ UTILS_AVAILABLE = True
30
+ except ImportError:
31
+ # 如果相对导入失败,尝试绝对导入
32
+ try:
33
+ from crawlo.commands.utils import show_error_panel, show_success_panel
34
+ UTILS_AVAILABLE = True
35
+ except ImportError:
36
+ UTILS_AVAILABLE = False
37
+
38
+ # 初始化 rich 控制台(如果可用)
39
+ if RICH_AVAILABLE:
40
+ console = Console()
41
+ else:
42
+ # 简单的控制台输出替代
43
+ class Console:
44
+ def print(self, text):
45
+ print(text)
46
+ console = Console()
47
+
48
+ TEMPLATES_DIR = Path(__file__).parent.parent / 'templates'
49
+
50
+ # 可用的模板类型
51
+ TEMPLATE_TYPES = {
52
+ 'default': '默认模板 - 通用配置,适合大多数项目',
53
+ 'simple': '简化模板 - 最小配置,适合快速开始',
54
+ 'distributed': '分布式模板 - 针对分布式爬取优化',
55
+ 'high-performance': '高性能模板 - 针对大规模高并发优化',
56
+ 'gentle': '温和模板 - 低负载配置,对目标网站友好'
57
+ }
58
+
59
+
60
+ def show_error_panel(title, content):
61
+ """显示错误面板的简单实现"""
62
+ if RICH_AVAILABLE:
63
+ from rich.panel import Panel
64
+ console.print(Panel(content, title=title, border_style="red"))
65
+ else:
66
+ print(f" {title}")
67
+ print(content)
68
+
69
+ def show_success_panel(title, content):
70
+ """显示成功面板的简单实现"""
71
+ if RICH_AVAILABLE:
72
+ from rich.panel import Panel
73
+ console.print(Panel(content, title=title, border_style="green"))
74
+ else:
75
+ print(f"✅ {title}")
76
+ print(content)
77
+
78
+ def _render_template(tmpl_path, context):
79
+ """读取模板文件,替换 {{key}} context 中的值"""
80
+ with open(tmpl_path, 'r', encoding='utf-8') as f:
81
+ content = f.read()
82
+ for key, value in context.items():
83
+ content = content.replace(f'{{{{{key}}}}}', str(value))
84
+ return content
85
+
86
+
87
+ def _copytree_with_templates(src, dst, context, template_type='default'):
88
+ """
89
+ 递归复制目录,将 .tmpl 文件渲染后复制(去除 .tmpl 后缀),其他文件直接复制。
90
+ """
91
+ src_path = Path(src)
92
+ dst_path = Path(dst)
93
+ dst_path.mkdir(parents=True, exist_ok=True)
94
+
95
+ for item in src_path.rglob('*'):
96
+ rel_path = item.relative_to(src_path)
97
+ dst_item = dst_path / rel_path
98
+
99
+ if item.is_dir():
100
+ dst_item.mkdir(parents=True, exist_ok=True)
101
+ else:
102
+ if item.suffix == '.tmpl':
103
+ # 处理特定模板类型的设置文件
104
+ if item.name == 'settings.py.tmpl' and template_type != 'default':
105
+ # 使用特定模板类型的设置文件
106
+ template_file_name = f'settings_{template_type}.py.tmpl'
107
+ template_file_path = src_path / template_file_name
108
+ if template_file_path.exists():
109
+ rendered_content = _render_template(template_file_path, context)
110
+ else:
111
+ # 如果特定模板不存在,使用默认模板
112
+ rendered_content = _render_template(item, context)
113
+ else:
114
+ rendered_content = _render_template(item, context)
115
+
116
+ final_dst = dst_item.with_suffix('')
117
+ final_dst.parent.mkdir(parents=True, exist_ok=True)
118
+ with open(final_dst, 'w', encoding='utf-8') as f:
119
+ f.write(rendered_content)
120
+ else:
121
+ shutil.copy2(item, dst_item)
122
+
123
+
124
+ def validate_project_name(project_name: str) -> tuple[bool, str]:
125
+ """
126
+ 验证项目名称是否有效
127
+
128
+ Returns:
129
+ tuple[bool, str]: (是否有效, 错误信息)
130
+ """
131
+ # 检查是否为空
132
+ if not project_name or not project_name.strip():
133
+ return False, "Project name cannot be empty"
134
+
135
+ project_name = project_name.strip()
136
+
137
+ # 检查长度
138
+ if len(project_name) > 50:
139
+ return False, "Project name too long (max 50 characters)"
140
+
141
+ # 检查是否为Python关键字
142
+ python_keywords = {
143
+ 'False', 'None', 'True', 'and', 'as', 'assert', 'break', 'class',
144
+ 'continue', 'def', 'del', 'elif', 'else', 'except', 'finally',
145
+ 'for', 'from', 'global', 'if', 'import', 'in', 'is', 'lambda',
146
+ 'nonlocal', 'not', 'or', 'pass', 'raise', 'return', 'try',
147
+ 'while', 'with', 'yield'
148
+ }
149
+ if project_name in python_keywords:
150
+ return False, f"'{project_name}' is a Python keyword and cannot be used as project name"
151
+
152
+ # 检查是否为有效的Python标识符
153
+ if not project_name.isidentifier():
154
+ return False, "Project name must be a valid Python identifier"
155
+
156
+ # 检查格式(建议使用snake_case)
157
+ if not re.match(r'^[a-z][a-z0-9_]*$', project_name):
158
+ return False, (
159
+ "Project name should start with lowercase letter and "
160
+ "contain only lowercase letters, numbers, and underscores"
161
+ )
162
+
163
+ # 检查是否以数字结尾(不推荐)
164
+ if project_name[-1].isdigit():
165
+ return False, "Project name should not end with a number"
166
+
167
+ return True, ""
168
+
169
+
170
+ def show_template_options():
171
+ """显示可用的模板选项"""
172
+ if RICH_AVAILABLE:
173
+ table = Table(title="可用模板类型", show_header=True, header_style="bold magenta")
174
+ table.add_column("模板类型", style="cyan", no_wrap=True)
175
+ table.add_column("描述", style="green")
176
+
177
+ for template_type, description in TEMPLATE_TYPES.items():
178
+ table.add_row(template_type, description)
179
+
180
+ console.print(table)
181
+ else:
182
+ print("可用模板类型:")
183
+ for template_type, description in TEMPLATE_TYPES.items():
184
+ print(f" {template_type}: {description}")
185
+
186
+
187
+ def main(args):
188
+ if len(args) < 1 or len(args) > 2:
189
+ console.print("[bold red]Error:[/bold red] Usage: [blue]crawlo startproject[/blue] <project_name> [template_type]")
190
+ console.print("💡 Examples:")
191
+ console.print(" [blue]crawlo startproject[/blue] my_spider_project")
192
+ console.print(" [blue]crawlo startproject[/blue] news_crawler simple")
193
+ console.print(" [blue]crawlo startproject[/blue] ecommerce_spider distributed")
194
+ show_template_options()
195
+ return 1
196
+
197
+ project_name = args[0]
198
+ template_type = args[1] if len(args) > 1 else 'default'
199
+
200
+ # 验证模板类型
201
+ if template_type not in TEMPLATE_TYPES:
202
+ show_error_panel(
203
+ "Invalid Template Type",
204
+ f"Template type '[cyan]{template_type}[/cyan]' is not supported.\n"
205
+ )
206
+ show_template_options()
207
+ return 1
208
+
209
+ # 验证项目名称
210
+ is_valid, error_msg = validate_project_name(project_name)
211
+ if not is_valid:
212
+ show_error_panel(
213
+ "Invalid Project Name",
214
+ f"[cyan]{project_name}[/cyan] is not a valid project name.\n"
215
+ f"❌ {error_msg}\n\n"
216
+ "💡 Project name should:\n"
217
+ " • Start with lowercase letter\n"
218
+ " • Contain only lowercase letters, numbers, and underscores\n"
219
+ " • Be a valid Python identifier\n"
220
+ " • Not be a Python keyword"
221
+ )
222
+ return 1
223
+
224
+ project_dir = Path(project_name)
225
+
226
+ if project_dir.exists():
227
+ show_error_panel(
228
+ "Directory Exists",
229
+ f"Directory '[cyan]{project_dir}[/cyan]' already exists.\n"
230
+ "💡 Choose a different project name or remove the existing directory."
231
+ )
232
+ return 1
233
+
234
+ context = {'project_name': project_name}
235
+ template_dir = TEMPLATES_DIR / 'project'
236
+
237
+ try:
238
+ # 1. 创建项目根目录
239
+ project_dir.mkdir()
240
+
241
+ # 2. 渲染 crawlo.cfg.tmpl
242
+ cfg_template = TEMPLATES_DIR / 'crawlo.cfg.tmpl'
243
+ if cfg_template.exists():
244
+ cfg_content = _render_template(cfg_template, context)
245
+ (project_dir / 'crawlo.cfg').write_text(cfg_content, encoding='utf-8')
246
+ console.print(f":white_check_mark: Created [green]{project_dir / 'crawlo.cfg'}[/green]")
247
+ else:
248
+ console.print("[yellow]⚠ Warning:[/yellow] Template 'crawlo.cfg.tmpl' not found.")
249
+
250
+ # 3. 复制并渲染项目包内容
251
+ package_dir = project_dir / project_name
252
+ _copytree_with_templates(template_dir, package_dir, context, template_type)
253
+ console.print(f":white_check_mark: Created project package: [green]{package_dir}[/green]")
254
+
255
+ # 4. 创建 logs 目录
256
+ (project_dir / 'logs').mkdir(exist_ok=True)
257
+ console.print(":white_check_mark: Created logs directory")
258
+
259
+ # 5. 创建 output 目录(用于数据输出)
260
+ (project_dir / 'output').mkdir(exist_ok=True)
261
+ console.print(":white_check_mark: Created output directory")
262
+
263
+ # 成功面板
264
+ success_text = Text.from_markup(f"Project '[bold cyan]{project_name}[/bold cyan]' created successfully!")
265
+ console.print(Panel(success_text, title=":rocket: Success", border_style="green", padding=(1, 2)))
266
+
267
+ # 显示使用的模板类型
268
+ if template_type != 'default':
269
+ console.print(f":information: 使用模板类型: [bold blue]{template_type}[/bold blue] - {TEMPLATE_TYPES[template_type]}")
270
+
271
+ # 下一步操作提示(对齐美观 + 语法高亮)
272
+ next_steps = f"""
273
+ [bold]🚀 Next steps:[/bold]
274
+ [blue]cd[/blue] {project_name}
275
+ [blue]crawlo genspider[/blue] example example.com
276
+ [blue]crawlo run[/blue] example
277
+
278
+ [bold]📚 Learn more:[/bold]
279
+ [blue]crawlo list[/blue] # List all spiders
280
+ [blue]crawlo check[/blue] example # Check spider validity
281
+ [blue]crawlo stats[/blue] # View statistics
282
+ """.strip()
283
+ console.print(next_steps)
284
+
285
+ return 0
286
+
287
+ except Exception as e:
288
+ show_error_panel(
289
+ "Creation Failed",
290
+ f"Failed to create project: {e}"
291
+ )
292
+ if project_dir.exists():
293
+ shutil.rmtree(project_dir, ignore_errors=True)
294
+ console.print("[red]:cross_mark: Cleaned up partially created project.[/red]")
295
+ return 1
296
+
297
+ if __name__ == "__main__":
298
+ import sys
299
+ exit_code = main(sys.argv[1:])
300
+ sys.exit(exit_code)