crawlo 1.1.8__py3-none-any.whl → 1.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (191) hide show
  1. crawlo/__init__.py +61 -61
  2. crawlo/__version__.py +1 -1
  3. crawlo/cleaners/__init__.py +60 -60
  4. crawlo/cleaners/data_formatter.py +225 -225
  5. crawlo/cleaners/encoding_converter.py +125 -125
  6. crawlo/cleaners/text_cleaner.py +232 -232
  7. crawlo/cli.py +65 -65
  8. crawlo/commands/__init__.py +14 -14
  9. crawlo/commands/check.py +594 -594
  10. crawlo/commands/genspider.py +151 -151
  11. crawlo/commands/help.py +132 -132
  12. crawlo/commands/list.py +155 -155
  13. crawlo/commands/run.py +292 -292
  14. crawlo/commands/startproject.py +418 -418
  15. crawlo/commands/stats.py +188 -188
  16. crawlo/commands/utils.py +186 -186
  17. crawlo/config.py +312 -312
  18. crawlo/config_validator.py +252 -252
  19. crawlo/core/__init__.py +2 -2
  20. crawlo/core/engine.py +354 -345
  21. crawlo/core/processor.py +40 -40
  22. crawlo/core/scheduler.py +143 -136
  23. crawlo/crawler.py +1027 -1027
  24. crawlo/downloader/__init__.py +266 -266
  25. crawlo/downloader/aiohttp_downloader.py +220 -220
  26. crawlo/downloader/cffi_downloader.py +256 -256
  27. crawlo/downloader/httpx_downloader.py +259 -259
  28. crawlo/downloader/hybrid_downloader.py +213 -213
  29. crawlo/downloader/playwright_downloader.py +402 -402
  30. crawlo/downloader/selenium_downloader.py +472 -472
  31. crawlo/event.py +11 -11
  32. crawlo/exceptions.py +81 -81
  33. crawlo/extension/__init__.py +37 -37
  34. crawlo/extension/health_check.py +141 -141
  35. crawlo/extension/log_interval.py +57 -57
  36. crawlo/extension/log_stats.py +81 -81
  37. crawlo/extension/logging_extension.py +43 -43
  38. crawlo/extension/memory_monitor.py +104 -104
  39. crawlo/extension/performance_profiler.py +133 -133
  40. crawlo/extension/request_recorder.py +107 -107
  41. crawlo/filters/__init__.py +154 -154
  42. crawlo/filters/aioredis_filter.py +280 -280
  43. crawlo/filters/memory_filter.py +269 -269
  44. crawlo/items/__init__.py +23 -23
  45. crawlo/items/base.py +21 -21
  46. crawlo/items/fields.py +53 -53
  47. crawlo/items/items.py +104 -104
  48. crawlo/middleware/__init__.py +21 -21
  49. crawlo/middleware/default_header.py +32 -32
  50. crawlo/middleware/download_delay.py +28 -28
  51. crawlo/middleware/middleware_manager.py +135 -135
  52. crawlo/middleware/proxy.py +272 -272
  53. crawlo/middleware/request_ignore.py +30 -30
  54. crawlo/middleware/response_code.py +18 -18
  55. crawlo/middleware/response_filter.py +26 -26
  56. crawlo/middleware/retry.py +124 -124
  57. crawlo/mode_manager.py +211 -211
  58. crawlo/network/__init__.py +21 -21
  59. crawlo/network/request.py +338 -338
  60. crawlo/network/response.py +359 -359
  61. crawlo/pipelines/__init__.py +21 -21
  62. crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
  63. crawlo/pipelines/console_pipeline.py +39 -39
  64. crawlo/pipelines/csv_pipeline.py +316 -316
  65. crawlo/pipelines/database_dedup_pipeline.py +224 -224
  66. crawlo/pipelines/json_pipeline.py +218 -218
  67. crawlo/pipelines/memory_dedup_pipeline.py +115 -115
  68. crawlo/pipelines/mongo_pipeline.py +131 -131
  69. crawlo/pipelines/mysql_pipeline.py +316 -316
  70. crawlo/pipelines/pipeline_manager.py +61 -61
  71. crawlo/pipelines/redis_dedup_pipeline.py +167 -167
  72. crawlo/project.py +187 -187
  73. crawlo/queue/pqueue.py +37 -37
  74. crawlo/queue/queue_manager.py +337 -334
  75. crawlo/queue/redis_priority_queue.py +298 -298
  76. crawlo/settings/__init__.py +7 -7
  77. crawlo/settings/default_settings.py +219 -219
  78. crawlo/settings/setting_manager.py +122 -122
  79. crawlo/spider/__init__.py +639 -639
  80. crawlo/stats_collector.py +59 -59
  81. crawlo/subscriber.py +130 -130
  82. crawlo/task_manager.py +30 -30
  83. crawlo/templates/crawlo.cfg.tmpl +10 -10
  84. crawlo/templates/project/__init__.py.tmpl +3 -3
  85. crawlo/templates/project/items.py.tmpl +17 -17
  86. crawlo/templates/project/middlewares.py.tmpl +109 -109
  87. crawlo/templates/project/pipelines.py.tmpl +96 -96
  88. crawlo/templates/project/run.py.tmpl +45 -45
  89. crawlo/templates/project/settings.py.tmpl +326 -326
  90. crawlo/templates/project/settings_distributed.py.tmpl +119 -119
  91. crawlo/templates/project/settings_gentle.py.tmpl +94 -94
  92. crawlo/templates/project/settings_high_performance.py.tmpl +151 -151
  93. crawlo/templates/project/settings_simple.py.tmpl +68 -68
  94. crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
  95. crawlo/templates/spider/spider.py.tmpl +141 -141
  96. crawlo/tools/__init__.py +182 -182
  97. crawlo/tools/anti_crawler.py +268 -268
  98. crawlo/tools/authenticated_proxy.py +240 -240
  99. crawlo/tools/data_validator.py +180 -180
  100. crawlo/tools/date_tools.py +35 -35
  101. crawlo/tools/distributed_coordinator.py +386 -386
  102. crawlo/tools/retry_mechanism.py +220 -220
  103. crawlo/tools/scenario_adapter.py +262 -262
  104. crawlo/utils/__init__.py +35 -35
  105. crawlo/utils/batch_processor.py +260 -260
  106. crawlo/utils/controlled_spider_mixin.py +439 -439
  107. crawlo/utils/date_tools.py +290 -290
  108. crawlo/utils/db_helper.py +343 -343
  109. crawlo/utils/enhanced_error_handler.py +359 -359
  110. crawlo/utils/env_config.py +105 -105
  111. crawlo/utils/error_handler.py +125 -125
  112. crawlo/utils/func_tools.py +82 -82
  113. crawlo/utils/large_scale_config.py +286 -286
  114. crawlo/utils/large_scale_helper.py +343 -343
  115. crawlo/utils/log.py +128 -128
  116. crawlo/utils/performance_monitor.py +284 -284
  117. crawlo/utils/queue_helper.py +175 -175
  118. crawlo/utils/redis_connection_pool.py +334 -334
  119. crawlo/utils/redis_key_validator.py +199 -199
  120. crawlo/utils/request.py +267 -267
  121. crawlo/utils/request_serializer.py +219 -219
  122. crawlo/utils/spider_loader.py +62 -62
  123. crawlo/utils/system.py +11 -11
  124. crawlo/utils/tools.py +4 -4
  125. crawlo/utils/url.py +39 -39
  126. crawlo-1.2.0.dist-info/METADATA +697 -0
  127. crawlo-1.2.0.dist-info/RECORD +190 -0
  128. examples/__init__.py +7 -7
  129. tests/DOUBLE_CRAWLO_PREFIX_FIX_REPORT.md +81 -81
  130. tests/__init__.py +7 -7
  131. tests/advanced_tools_example.py +275 -275
  132. tests/authenticated_proxy_example.py +236 -236
  133. tests/cleaners_example.py +160 -160
  134. tests/config_validation_demo.py +102 -102
  135. tests/controlled_spider_example.py +205 -205
  136. tests/date_tools_example.py +180 -180
  137. tests/dynamic_loading_example.py +523 -523
  138. tests/dynamic_loading_test.py +104 -104
  139. tests/env_config_example.py +133 -133
  140. tests/error_handling_example.py +171 -171
  141. tests/redis_key_validation_demo.py +130 -130
  142. tests/response_improvements_example.py +144 -144
  143. tests/test_advanced_tools.py +148 -148
  144. tests/test_all_redis_key_configs.py +145 -145
  145. tests/test_authenticated_proxy.py +141 -141
  146. tests/test_cleaners.py +54 -54
  147. tests/test_comprehensive.py +146 -146
  148. tests/test_config_validator.py +193 -193
  149. tests/test_date_tools.py +123 -123
  150. tests/test_double_crawlo_fix.py +207 -207
  151. tests/test_double_crawlo_fix_simple.py +124 -124
  152. tests/test_dynamic_downloaders_proxy.py +124 -124
  153. tests/test_dynamic_proxy.py +92 -92
  154. tests/test_dynamic_proxy_config.py +146 -146
  155. tests/test_dynamic_proxy_real.py +109 -109
  156. tests/test_edge_cases.py +303 -303
  157. tests/test_enhanced_error_handler.py +270 -270
  158. tests/test_env_config.py +121 -121
  159. tests/test_error_handler_compatibility.py +112 -112
  160. tests/test_final_validation.py +153 -153
  161. tests/test_framework_env_usage.py +103 -103
  162. tests/test_integration.py +356 -356
  163. tests/test_item_dedup_redis_key.py +122 -122
  164. tests/test_parsel.py +29 -29
  165. tests/test_performance.py +327 -327
  166. tests/test_proxy_health_check.py +32 -32
  167. tests/test_proxy_middleware_integration.py +136 -136
  168. tests/test_proxy_providers.py +56 -56
  169. tests/test_proxy_stats.py +19 -19
  170. tests/test_proxy_strategies.py +59 -59
  171. tests/test_queue_manager_double_crawlo.py +174 -231
  172. tests/test_queue_manager_redis_key.py +176 -176
  173. tests/test_redis_config.py +28 -28
  174. tests/test_redis_connection_pool.py +294 -294
  175. tests/test_redis_key_naming.py +181 -181
  176. tests/test_redis_key_validator.py +123 -123
  177. tests/test_redis_queue.py +224 -224
  178. tests/test_request_serialization.py +70 -70
  179. tests/test_response_improvements.py +152 -152
  180. tests/test_scheduler.py +241 -241
  181. tests/test_simple_response.py +61 -61
  182. tests/test_telecom_spider_redis_key.py +205 -205
  183. tests/test_template_content.py +87 -87
  184. tests/test_template_redis_key.py +134 -134
  185. tests/test_tools.py +153 -153
  186. tests/tools_example.py +257 -257
  187. crawlo-1.1.8.dist-info/METADATA +0 -626
  188. crawlo-1.1.8.dist-info/RECORD +0 -190
  189. {crawlo-1.1.8.dist-info → crawlo-1.2.0.dist-info}/WHEEL +0 -0
  190. {crawlo-1.1.8.dist-info → crawlo-1.2.0.dist-info}/entry_points.txt +0 -0
  191. {crawlo-1.1.8.dist-info → crawlo-1.2.0.dist-info}/top_level.txt +0 -0
@@ -1,419 +1,419 @@
1
- #!/usr/bin/python
2
- # -*- coding: UTF-8 -*-
3
- """
4
- # @Time : 2025-08-31 22:36
5
- # @Author : crawl-coder
6
- # @Desc : 命令行入口:crawlo startproject baidu,创建项目。
7
- """
8
- import shutil
9
- import re
10
- import sys
11
- import os
12
- from pathlib import Path
13
- from typing import Optional, List
14
-
15
- # 添加项目根目录到路径,以便能够导入utils模块
16
- sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
17
-
18
- try:
19
- from rich.console import Console
20
- from rich.panel import Panel
21
- from rich.text import Text
22
- from rich.table import Table
23
- RICH_AVAILABLE = True
24
- except ImportError:
25
- RICH_AVAILABLE = False
26
-
27
- try:
28
- from .utils import show_error_panel, show_success_panel
29
- UTILS_AVAILABLE = True
30
- except ImportError:
31
- # 如果相对导入失败,尝试绝对导入
32
- try:
33
- from crawlo.commands.utils import show_error_panel, show_success_panel
34
- UTILS_AVAILABLE = True
35
- except ImportError:
36
- UTILS_AVAILABLE = False
37
-
38
- # 初始化 rich 控制台(如果可用)
39
- if RICH_AVAILABLE:
40
- console = Console()
41
- else:
42
- # 简单的控制台输出替代
43
- class Console:
44
- def print(self, text):
45
- print(text)
46
- console = Console()
47
-
48
- TEMPLATES_DIR = Path(__file__).parent.parent / 'templates'
49
-
50
- # 可用的模板类型
51
- TEMPLATE_TYPES = {
52
- 'default': '默认模板 - 通用配置,适合大多数项目',
53
- 'simple': '简化模板 - 最小配置,适合快速开始',
54
- 'distributed': '分布式模板 - 针对分布式爬取优化',
55
- 'high-performance': '高性能模板 - 针对大规模高并发优化',
56
- 'gentle': '温和模板 - 低负载配置,对目标网站友好'
57
- }
58
-
59
- # 可选的模块组件
60
- OPTIONAL_MODULES = {
61
- 'mysql': 'MySQL数据库支持',
62
- 'mongodb': 'MongoDB数据库支持',
63
- 'redis': 'Redis支持(分布式队列和去重)',
64
- 'proxy': '代理支持',
65
- 'monitoring': '监控和性能分析',
66
- 'dedup': '去重功能',
67
- 'httpx': 'HttpX下载器',
68
- 'aiohttp': 'AioHttp下载器',
69
- 'curl': 'CurlCffi下载器'
70
- }
71
-
72
-
73
- def show_error_panel(title, content):
74
- """显示错误面板的简单实现"""
75
- if RICH_AVAILABLE:
76
- from rich.panel import Panel
77
- console.print(Panel(content, title=title, border_style="red"))
78
- else:
79
- print(f"❌ {title}")
80
- print(content)
81
-
82
- def show_success_panel(title, content):
83
- """显示成功面板的简单实现"""
84
- if RICH_AVAILABLE:
85
- from rich.panel import Panel
86
- console.print(Panel(content, title=title, border_style="green"))
87
- else:
88
- print(f"✅ {title}")
89
- print(content)
90
-
91
- def _render_template(tmpl_path, context):
92
- """读取模板文件,替换 {{key}} 为 context 中的值"""
93
- with open(tmpl_path, 'r', encoding='utf-8') as f:
94
- content = f.read()
95
- for key, value in context.items():
96
- content = content.replace(f'{{{{{key}}}}}', str(value))
97
- return content
98
-
99
-
100
- def _copytree_with_templates(src, dst, context, template_type='default', modules: List[str] = None):
101
- """
102
- 递归复制目录,将 .tmpl 文件渲染后复制(去除 .tmpl 后缀),其他文件直接复制。
103
- 支持选择性模块复制。
104
- """
105
- src_path = Path(src)
106
- dst_path = Path(dst)
107
- dst_path.mkdir(parents=True, exist_ok=True)
108
-
109
- for item in src_path.rglob('*'):
110
- rel_path = item.relative_to(src_path)
111
- dst_item = dst_path / rel_path
112
-
113
- # 检查是否应该包含此文件(基于模块选择)
114
- if not _should_include_file(rel_path, modules):
115
- continue
116
-
117
- if item.is_dir():
118
- dst_item.mkdir(parents=True, exist_ok=True)
119
- else:
120
- if item.suffix == '.tmpl':
121
- rendered_content = None
122
- # 处理特定模板类型的设置文件
123
- if item.name == 'settings.py.tmpl':
124
- # 对于设置文件,根据模板类型选择相应的内容模板
125
- if template_type != 'default':
126
- # 使用特定模板类型的设置文件
127
- template_file_name = f'settings_{template_type}.py.tmpl'
128
- template_file_path = src_path / template_file_name
129
- if template_file_path.exists():
130
- rendered_content = _render_template(template_file_path, context)
131
- else:
132
- # 如果特定模板不存在,使用默认模板
133
- rendered_content = _render_template(item, context)
134
- else:
135
- # 使用默认模板
136
- rendered_content = _render_template(item, context)
137
- # 跳过其他以 settings_ 开头的模板文件,避免重复处理
138
- elif item.name.startswith('settings_') and item.name.endswith('.py.tmpl'):
139
- continue
140
- else:
141
- rendered_content = _render_template(item, context)
142
-
143
- # 确保设置文件始终命名为 settings.py
144
- if item.name == 'settings.py.tmpl':
145
- # 特殊处理设置模板文件,统一生成为 settings.py
146
- final_dst = dst_item.parent / 'settings.py'
147
- else:
148
- final_dst = dst_item.with_suffix('')
149
-
150
- final_dst.parent.mkdir(parents=True, exist_ok=True)
151
- with open(final_dst, 'w', encoding='utf-8') as f:
152
- f.write(rendered_content)
153
- else:
154
- shutil.copy2(item, dst_item)
155
-
156
-
157
- def _should_include_file(rel_path, modules: List[str]) -> bool:
158
- """
159
- 根据选择的模块决定是否包含文件
160
- """
161
- if modules is None:
162
- # 如果没有指定模块,则包含所有文件
163
- return True
164
-
165
- # 基础文件始终包含
166
- basic_files = [
167
- '__init__.py.tmpl',
168
- 'settings.py.tmpl',
169
- 'spiders/__init__.py.tmpl',
170
- 'items.py.tmpl',
171
- 'middlewares.py.tmpl',
172
- 'run.py.tmpl'
173
- ]
174
-
175
- path_str = str(rel_path).replace('\\', '/')
176
-
177
- # 始终包含基础文件
178
- if path_str in basic_files:
179
- return True
180
-
181
- # 根据模块选择包含特定文件
182
- if 'mysql' in modules and 'mysql' in path_str:
183
- return True
184
- if 'mongodb' in modules and 'mongo' in path_str:
185
- return True
186
- if 'redis' in modules and 'redis' in path_str:
187
- return True
188
- if 'proxy' in modules and 'proxy' in path_str:
189
- return True
190
- if 'monitoring' in modules and ('monitor' in path_str or 'stats' in path_str):
191
- return True
192
- if 'dedup' in modules and 'dedup' in path_str:
193
- return True
194
- if 'httpx' in modules and 'httpx' in path_str:
195
- return True
196
- if 'aiohttp' in modules and 'aiohttp' in path_str:
197
- return True
198
- if 'curl' in modules and 'cffi' in path_str:
199
- return True
200
-
201
- # 默认不包含特定模块文件
202
- return False
203
-
204
-
205
- def validate_project_name(project_name: str) -> tuple[bool, str]:
206
- """
207
- 验证项目名称是否有效
208
-
209
- Returns:
210
- tuple[bool, str]: (是否有效, 错误信息)
211
- """
212
- # 检查是否为空
213
- if not project_name or not project_name.strip():
214
- return False, "Project name cannot be empty"
215
-
216
- project_name = project_name.strip()
217
-
218
- # 检查长度
219
- if len(project_name) > 50:
220
- return False, "Project name too long (max 50 characters)"
221
-
222
- # 检查是否为Python关键字
223
- python_keywords = {
224
- 'False', 'None', 'True', 'and', 'as', 'assert', 'break', 'class',
225
- 'continue', 'def', 'del', 'elif', 'else', 'except', 'finally',
226
- 'for', 'from', 'global', 'if', 'import', 'in', 'is', 'lambda',
227
- 'nonlocal', 'not', 'or', 'pass', 'raise', 'return', 'try',
228
- 'while', 'with', 'yield'
229
- }
230
- if project_name in python_keywords:
231
- return False, f"'{project_name}' is a Python keyword and cannot be used as project name"
232
-
233
- # 检查是否为有效的Python标识符
234
- if not project_name.isidentifier():
235
- return False, "Project name must be a valid Python identifier"
236
-
237
- # 检查格式(建议使用snake_case)
238
- if not re.match(r'^[a-z][a-z0-9_]*$', project_name):
239
- return False, (
240
- "Project name should start with lowercase letter and "
241
- "contain only lowercase letters, numbers, and underscores"
242
- )
243
-
244
- # 检查是否以数字结尾(不推荐)
245
- if project_name[-1].isdigit():
246
- return False, "Project name should not end with a number"
247
-
248
- return True, ""
249
-
250
-
251
- def show_template_options():
252
- """显示可用的模板选项"""
253
- if RICH_AVAILABLE:
254
- table = Table(title="可用模板类型", show_header=True, header_style="bold magenta")
255
- table.add_column("模板类型", style="cyan", no_wrap=True)
256
- table.add_column("描述", style="green")
257
-
258
- for template_type, description in TEMPLATE_TYPES.items():
259
- table.add_row(template_type, description)
260
-
261
- console.print(table)
262
- else:
263
- print("可用模板类型:")
264
- for template_type, description in TEMPLATE_TYPES.items():
265
- print(f" {template_type}: {description}")
266
-
267
-
268
- def show_module_options():
269
- """显示可用的模块选项"""
270
- if RICH_AVAILABLE:
271
- table = Table(title="可选模块组件", show_header=True, header_style="bold magenta")
272
- table.add_column("模块", style="cyan", no_wrap=True)
273
- table.add_column("描述", style="green")
274
-
275
- for module, description in OPTIONAL_MODULES.items():
276
- table.add_row(module, description)
277
-
278
- console.print(table)
279
- else:
280
- print("可选模块组件:")
281
- for module, description in OPTIONAL_MODULES.items():
282
- print(f" {module}: {description}")
283
-
284
-
285
- def main(args):
286
- if len(args) < 1:
287
- console.print("[bold red]Error:[/bold red] Usage: [blue]crawlo startproject[/blue] <project_name> [template_type] [--modules module1,module2]")
288
- console.print("💡 Examples:")
289
- console.print(" [blue]crawlo startproject[/blue] my_spider_project")
290
- console.print(" [blue]crawlo startproject[/blue] news_crawler simple")
291
- console.print(" [blue]crawlo startproject[/blue] ecommerce_spider distributed --modules mysql,proxy")
292
- show_template_options()
293
- show_module_options()
294
- return 1
295
-
296
- # 解析参数
297
- project_name = args[0]
298
- template_type = 'default'
299
- modules = None
300
-
301
- # 解析可选参数
302
- if len(args) > 1:
303
- for i, arg in enumerate(args[1:], 1):
304
- if arg.startswith('--modules='):
305
- modules_str = arg.split('=', 1)[1]
306
- modules = [m.strip() for m in modules_str.split(',') if m.strip()]
307
- elif arg.startswith('--modules'):
308
- # 处理 --modules module1,module2 格式
309
- if i + 1 < len(args):
310
- modules_str = args[i + 1]
311
- modules = [m.strip() for m in modules_str.split(',') if m.strip()]
312
- elif not arg.startswith('--') and arg in TEMPLATE_TYPES:
313
- template_type = arg
314
-
315
- # 验证模板类型
316
- if template_type not in TEMPLATE_TYPES:
317
- show_error_panel(
318
- "Invalid Template Type",
319
- f"Template type '[cyan]{template_type}[/cyan]' is not supported.\n"
320
- )
321
- show_template_options()
322
- return 1
323
-
324
- # 验证项目名称
325
- is_valid, error_msg = validate_project_name(project_name)
326
- if not is_valid:
327
- show_error_panel(
328
- "Invalid Project Name",
329
- f"[cyan]{project_name}[/cyan] is not a valid project name.\n"
330
- f"❌ {error_msg}\n\n"
331
- "💡 Project name should:\n"
332
- " • Start with lowercase letter\n"
333
- " • Contain only lowercase letters, numbers, and underscores\n"
334
- " • Be a valid Python identifier\n"
335
- " • Not be a Python keyword"
336
- )
337
- return 1
338
-
339
- project_dir = Path(project_name)
340
-
341
- if project_dir.exists():
342
- show_error_panel(
343
- "Directory Exists",
344
- f"Directory '[cyan]{project_dir}[/cyan]' already exists.\n"
345
- "💡 Choose a different project name or remove the existing directory."
346
- )
347
- return 1
348
-
349
- context = {'project_name': project_name}
350
- template_dir = TEMPLATES_DIR / 'project'
351
-
352
- try:
353
- # 1. 创建项目根目录
354
- project_dir.mkdir()
355
-
356
- # 2. 渲染 crawlo.cfg.tmpl
357
- cfg_template = TEMPLATES_DIR / 'crawlo.cfg.tmpl'
358
- if cfg_template.exists():
359
- cfg_content = _render_template(cfg_template, context)
360
- (project_dir / 'crawlo.cfg').write_text(cfg_content, encoding='utf-8')
361
- console.print(f":white_check_mark: Created [green]{project_dir / 'crawlo.cfg'}[/green]")
362
- else:
363
- console.print("[yellow]⚠ Warning:[/yellow] Template 'crawlo.cfg.tmpl' not found.")
364
-
365
- # 3. 复制并渲染项目包内容
366
- package_dir = project_dir / project_name
367
- _copytree_with_templates(template_dir, package_dir, context, template_type, modules)
368
- console.print(f":white_check_mark: Created project package: [green]{package_dir}[/green]")
369
-
370
- # 4. 创建 logs 目录
371
- (project_dir / 'logs').mkdir(exist_ok=True)
372
- console.print(":white_check_mark: Created logs directory")
373
-
374
- # 5. 创建 output 目录(用于数据输出)
375
- (project_dir / 'output').mkdir(exist_ok=True)
376
- console.print(":white_check_mark: Created output directory")
377
-
378
- # 成功面板
379
- success_text = Text.from_markup(f"Project '[bold cyan]{project_name}[/bold cyan]' created successfully!")
380
- console.print(Panel(success_text, title=":rocket: Success", border_style="green", padding=(1, 2)))
381
-
382
- # 显示使用的模板类型
383
- if template_type != 'default':
384
- console.print(f":information: 使用模板类型: [bold blue]{template_type}[/bold blue] - {TEMPLATE_TYPES[template_type]}")
385
-
386
- # 显示选择的模块
387
- if modules:
388
- console.print(f":information: 选择的模块: [bold blue]{', '.join(modules)}[/bold blue]")
389
-
390
- # 下一步操作提示(对齐美观 + 语法高亮)
391
- next_steps = f"""
392
- [bold]🚀 Next steps:[/bold]
393
- [blue]cd[/blue] {project_name}
394
- [blue]crawlo genspider[/blue] example example.com
395
- [blue]crawlo run[/blue] example
396
-
397
- [bold]📚 Learn more:[/bold]
398
- [blue]crawlo list[/blue] # List all spiders
399
- [blue]crawlo check[/blue] example # Check spider validity
400
- [blue]crawlo stats[/blue] # View statistics
401
- """.strip()
402
- console.print(next_steps)
403
-
404
- return 0
405
-
406
- except Exception as e:
407
- show_error_panel(
408
- "Creation Failed",
409
- f"Failed to create project: {e}"
410
- )
411
- if project_dir.exists():
412
- shutil.rmtree(project_dir, ignore_errors=True)
413
- console.print("[red]:cross_mark: Cleaned up partially created project.[/red]")
414
- return 1
415
-
416
- if __name__ == "__main__":
417
- import sys
418
- exit_code = main(sys.argv[1:])
1
+ #!/usr/bin/python
2
+ # -*- coding: UTF-8 -*-
3
+ """
4
+ # @Time : 2025-08-31 22:36
5
+ # @Author : crawl-coder
6
+ # @Desc : 命令行入口:crawlo startproject baidu,创建项目。
7
+ """
8
+ import shutil
9
+ import re
10
+ import sys
11
+ import os
12
+ from pathlib import Path
13
+ from typing import Optional, List
14
+
15
+ # 添加项目根目录到路径,以便能够导入utils模块
16
+ sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
17
+
18
+ try:
19
+ from rich.console import Console
20
+ from rich.panel import Panel
21
+ from rich.text import Text
22
+ from rich.table import Table
23
+ RICH_AVAILABLE = True
24
+ except ImportError:
25
+ RICH_AVAILABLE = False
26
+
27
+ try:
28
+ from .utils import show_error_panel, show_success_panel
29
+ UTILS_AVAILABLE = True
30
+ except ImportError:
31
+ # 如果相对导入失败,尝试绝对导入
32
+ try:
33
+ from crawlo.commands.utils import show_error_panel, show_success_panel
34
+ UTILS_AVAILABLE = True
35
+ except ImportError:
36
+ UTILS_AVAILABLE = False
37
+
38
+ # 初始化 rich 控制台(如果可用)
39
+ if RICH_AVAILABLE:
40
+ console = Console()
41
+ else:
42
+ # 简单的控制台输出替代
43
+ class Console:
44
+ def print(self, text):
45
+ print(text)
46
+ console = Console()
47
+
48
+ TEMPLATES_DIR = Path(__file__).parent.parent / 'templates'
49
+
50
+ # 可用的模板类型
51
+ TEMPLATE_TYPES = {
52
+ 'default': '默认模板 - 通用配置,适合大多数项目',
53
+ 'simple': '简化模板 - 最小配置,适合快速开始',
54
+ 'distributed': '分布式模板 - 针对分布式爬取优化',
55
+ 'high-performance': '高性能模板 - 针对大规模高并发优化',
56
+ 'gentle': '温和模板 - 低负载配置,对目标网站友好'
57
+ }
58
+
59
+ # 可选的模块组件
60
+ OPTIONAL_MODULES = {
61
+ 'mysql': 'MySQL数据库支持',
62
+ 'mongodb': 'MongoDB数据库支持',
63
+ 'redis': 'Redis支持(分布式队列和去重)',
64
+ 'proxy': '代理支持',
65
+ 'monitoring': '监控和性能分析',
66
+ 'dedup': '去重功能',
67
+ 'httpx': 'HttpX下载器',
68
+ 'aiohttp': 'AioHttp下载器',
69
+ 'curl': 'CurlCffi下载器'
70
+ }
71
+
72
+
73
+ def show_error_panel(title, content):
74
+ """显示错误面板的简单实现"""
75
+ if RICH_AVAILABLE:
76
+ from rich.panel import Panel
77
+ console.print(Panel(content, title=title, border_style="red"))
78
+ else:
79
+ print(f"❌ {title}")
80
+ print(content)
81
+
82
+ def show_success_panel(title, content):
83
+ """显示成功面板的简单实现"""
84
+ if RICH_AVAILABLE:
85
+ from rich.panel import Panel
86
+ console.print(Panel(content, title=title, border_style="green"))
87
+ else:
88
+ print(f"✅ {title}")
89
+ print(content)
90
+
91
+ def _render_template(tmpl_path, context):
92
+ """读取模板文件,替换 {{key}} 为 context 中的值"""
93
+ with open(tmpl_path, 'r', encoding='utf-8') as f:
94
+ content = f.read()
95
+ for key, value in context.items():
96
+ content = content.replace(f'{{{{{key}}}}}', str(value))
97
+ return content
98
+
99
+
100
+ def _copytree_with_templates(src, dst, context, template_type='default', modules: List[str] = None):
101
+ """
102
+ 递归复制目录,将 .tmpl 文件渲染后复制(去除 .tmpl 后缀),其他文件直接复制。
103
+ 支持选择性模块复制。
104
+ """
105
+ src_path = Path(src)
106
+ dst_path = Path(dst)
107
+ dst_path.mkdir(parents=True, exist_ok=True)
108
+
109
+ for item in src_path.rglob('*'):
110
+ rel_path = item.relative_to(src_path)
111
+ dst_item = dst_path / rel_path
112
+
113
+ # 检查是否应该包含此文件(基于模块选择)
114
+ if not _should_include_file(rel_path, modules):
115
+ continue
116
+
117
+ if item.is_dir():
118
+ dst_item.mkdir(parents=True, exist_ok=True)
119
+ else:
120
+ if item.suffix == '.tmpl':
121
+ rendered_content = None
122
+ # 处理特定模板类型的设置文件
123
+ if item.name == 'settings.py.tmpl':
124
+ # 对于设置文件,根据模板类型选择相应的内容模板
125
+ if template_type != 'default':
126
+ # 使用特定模板类型的设置文件
127
+ template_file_name = f'settings_{template_type}.py.tmpl'
128
+ template_file_path = src_path / template_file_name
129
+ if template_file_path.exists():
130
+ rendered_content = _render_template(template_file_path, context)
131
+ else:
132
+ # 如果特定模板不存在,使用默认模板
133
+ rendered_content = _render_template(item, context)
134
+ else:
135
+ # 使用默认模板
136
+ rendered_content = _render_template(item, context)
137
+ # 跳过其他以 settings_ 开头的模板文件,避免重复处理
138
+ elif item.name.startswith('settings_') and item.name.endswith('.py.tmpl'):
139
+ continue
140
+ else:
141
+ rendered_content = _render_template(item, context)
142
+
143
+ # 确保设置文件始终命名为 settings.py
144
+ if item.name == 'settings.py.tmpl':
145
+ # 特殊处理设置模板文件,统一生成为 settings.py
146
+ final_dst = dst_item.parent / 'settings.py'
147
+ else:
148
+ final_dst = dst_item.with_suffix('')
149
+
150
+ final_dst.parent.mkdir(parents=True, exist_ok=True)
151
+ with open(final_dst, 'w', encoding='utf-8') as f:
152
+ f.write(rendered_content)
153
+ else:
154
+ shutil.copy2(item, dst_item)
155
+
156
+
157
+ def _should_include_file(rel_path, modules: List[str]) -> bool:
158
+ """
159
+ 根据选择的模块决定是否包含文件
160
+ """
161
+ if modules is None:
162
+ # 如果没有指定模块,则包含所有文件
163
+ return True
164
+
165
+ # 基础文件始终包含
166
+ basic_files = [
167
+ '__init__.py.tmpl',
168
+ 'settings.py.tmpl',
169
+ 'spiders/__init__.py.tmpl',
170
+ 'items.py.tmpl',
171
+ 'middlewares.py.tmpl',
172
+ 'run.py.tmpl'
173
+ ]
174
+
175
+ path_str = str(rel_path).replace('\\', '/')
176
+
177
+ # 始终包含基础文件
178
+ if path_str in basic_files:
179
+ return True
180
+
181
+ # 根据模块选择包含特定文件
182
+ if 'mysql' in modules and 'mysql' in path_str:
183
+ return True
184
+ if 'mongodb' in modules and 'mongo' in path_str:
185
+ return True
186
+ if 'redis' in modules and 'redis' in path_str:
187
+ return True
188
+ if 'proxy' in modules and 'proxy' in path_str:
189
+ return True
190
+ if 'monitoring' in modules and ('monitor' in path_str or 'stats' in path_str):
191
+ return True
192
+ if 'dedup' in modules and 'dedup' in path_str:
193
+ return True
194
+ if 'httpx' in modules and 'httpx' in path_str:
195
+ return True
196
+ if 'aiohttp' in modules and 'aiohttp' in path_str:
197
+ return True
198
+ if 'curl' in modules and 'cffi' in path_str:
199
+ return True
200
+
201
+ # 默认不包含特定模块文件
202
+ return False
203
+
204
+
205
+ def validate_project_name(project_name: str) -> tuple[bool, str]:
206
+ """
207
+ 验证项目名称是否有效
208
+
209
+ Returns:
210
+ tuple[bool, str]: (是否有效, 错误信息)
211
+ """
212
+ # 检查是否为空
213
+ if not project_name or not project_name.strip():
214
+ return False, "Project name cannot be empty"
215
+
216
+ project_name = project_name.strip()
217
+
218
+ # 检查长度
219
+ if len(project_name) > 50:
220
+ return False, "Project name too long (max 50 characters)"
221
+
222
+ # 检查是否为Python关键字
223
+ python_keywords = {
224
+ 'False', 'None', 'True', 'and', 'as', 'assert', 'break', 'class',
225
+ 'continue', 'def', 'del', 'elif', 'else', 'except', 'finally',
226
+ 'for', 'from', 'global', 'if', 'import', 'in', 'is', 'lambda',
227
+ 'nonlocal', 'not', 'or', 'pass', 'raise', 'return', 'try',
228
+ 'while', 'with', 'yield'
229
+ }
230
+ if project_name in python_keywords:
231
+ return False, f"'{project_name}' is a Python keyword and cannot be used as project name"
232
+
233
+ # 检查是否为有效的Python标识符
234
+ if not project_name.isidentifier():
235
+ return False, "Project name must be a valid Python identifier"
236
+
237
+ # 检查格式(建议使用snake_case)
238
+ if not re.match(r'^[a-z][a-z0-9_]*$', project_name):
239
+ return False, (
240
+ "Project name should start with lowercase letter and "
241
+ "contain only lowercase letters, numbers, and underscores"
242
+ )
243
+
244
+ # 检查是否以数字结尾(不推荐)
245
+ if project_name[-1].isdigit():
246
+ return False, "Project name should not end with a number"
247
+
248
+ return True, ""
249
+
250
+
251
+ def show_template_options():
252
+ """显示可用的模板选项"""
253
+ if RICH_AVAILABLE:
254
+ table = Table(title="可用模板类型", show_header=True, header_style="bold magenta")
255
+ table.add_column("模板类型", style="cyan", no_wrap=True)
256
+ table.add_column("描述", style="green")
257
+
258
+ for template_type, description in TEMPLATE_TYPES.items():
259
+ table.add_row(template_type, description)
260
+
261
+ console.print(table)
262
+ else:
263
+ print("可用模板类型:")
264
+ for template_type, description in TEMPLATE_TYPES.items():
265
+ print(f" {template_type}: {description}")
266
+
267
+
268
+ def show_module_options():
269
+ """显示可用的模块选项"""
270
+ if RICH_AVAILABLE:
271
+ table = Table(title="可选模块组件", show_header=True, header_style="bold magenta")
272
+ table.add_column("模块", style="cyan", no_wrap=True)
273
+ table.add_column("描述", style="green")
274
+
275
+ for module, description in OPTIONAL_MODULES.items():
276
+ table.add_row(module, description)
277
+
278
+ console.print(table)
279
+ else:
280
+ print("可选模块组件:")
281
+ for module, description in OPTIONAL_MODULES.items():
282
+ print(f" {module}: {description}")
283
+
284
+
285
+ def main(args):
286
+ if len(args) < 1:
287
+ console.print("[bold red]Error:[/bold red] Usage: [blue]crawlo startproject[/blue] <project_name> [template_type] [--modules module1,module2]")
288
+ console.print("💡 Examples:")
289
+ console.print(" [blue]crawlo startproject[/blue] my_spider_project")
290
+ console.print(" [blue]crawlo startproject[/blue] news_crawler simple")
291
+ console.print(" [blue]crawlo startproject[/blue] ecommerce_spider distributed --modules mysql,proxy")
292
+ show_template_options()
293
+ show_module_options()
294
+ return 1
295
+
296
+ # 解析参数
297
+ project_name = args[0]
298
+ template_type = 'default'
299
+ modules = None
300
+
301
+ # 解析可选参数
302
+ if len(args) > 1:
303
+ for i, arg in enumerate(args[1:], 1):
304
+ if arg.startswith('--modules='):
305
+ modules_str = arg.split('=', 1)[1]
306
+ modules = [m.strip() for m in modules_str.split(',') if m.strip()]
307
+ elif arg.startswith('--modules'):
308
+ # 处理 --modules module1,module2 格式
309
+ if i + 1 < len(args):
310
+ modules_str = args[i + 1]
311
+ modules = [m.strip() for m in modules_str.split(',') if m.strip()]
312
+ elif not arg.startswith('--') and arg in TEMPLATE_TYPES:
313
+ template_type = arg
314
+
315
+ # 验证模板类型
316
+ if template_type not in TEMPLATE_TYPES:
317
+ show_error_panel(
318
+ "Invalid Template Type",
319
+ f"Template type '[cyan]{template_type}[/cyan]' is not supported.\n"
320
+ )
321
+ show_template_options()
322
+ return 1
323
+
324
+ # 验证项目名称
325
+ is_valid, error_msg = validate_project_name(project_name)
326
+ if not is_valid:
327
+ show_error_panel(
328
+ "Invalid Project Name",
329
+ f"[cyan]{project_name}[/cyan] is not a valid project name.\n"
330
+ f"❌ {error_msg}\n\n"
331
+ "💡 Project name should:\n"
332
+ " • Start with lowercase letter\n"
333
+ " • Contain only lowercase letters, numbers, and underscores\n"
334
+ " • Be a valid Python identifier\n"
335
+ " • Not be a Python keyword"
336
+ )
337
+ return 1
338
+
339
+ project_dir = Path(project_name)
340
+
341
+ if project_dir.exists():
342
+ show_error_panel(
343
+ "Directory Exists",
344
+ f"Directory '[cyan]{project_dir}[/cyan]' already exists.\n"
345
+ "💡 Choose a different project name or remove the existing directory."
346
+ )
347
+ return 1
348
+
349
+ context = {'project_name': project_name}
350
+ template_dir = TEMPLATES_DIR / 'project'
351
+
352
+ try:
353
+ # 1. 创建项目根目录
354
+ project_dir.mkdir()
355
+
356
+ # 2. 渲染 crawlo.cfg.tmpl
357
+ cfg_template = TEMPLATES_DIR / 'crawlo.cfg.tmpl'
358
+ if cfg_template.exists():
359
+ cfg_content = _render_template(cfg_template, context)
360
+ (project_dir / 'crawlo.cfg').write_text(cfg_content, encoding='utf-8')
361
+ console.print(f":white_check_mark: Created [green]{project_dir / 'crawlo.cfg'}[/green]")
362
+ else:
363
+ console.print("[yellow]⚠ Warning:[/yellow] Template 'crawlo.cfg.tmpl' not found.")
364
+
365
+ # 3. 复制并渲染项目包内容
366
+ package_dir = project_dir / project_name
367
+ _copytree_with_templates(template_dir, package_dir, context, template_type, modules)
368
+ console.print(f":white_check_mark: Created project package: [green]{package_dir}[/green]")
369
+
370
+ # 4. 创建 logs 目录
371
+ (project_dir / 'logs').mkdir(exist_ok=True)
372
+ console.print(":white_check_mark: Created logs directory")
373
+
374
+ # 5. 创建 output 目录(用于数据输出)
375
+ (project_dir / 'output').mkdir(exist_ok=True)
376
+ console.print(":white_check_mark: Created output directory")
377
+
378
+ # 成功面板
379
+ success_text = Text.from_markup(f"Project '[bold cyan]{project_name}[/bold cyan]' created successfully!")
380
+ console.print(Panel(success_text, title=":rocket: Success", border_style="green", padding=(1, 2)))
381
+
382
+ # 显示使用的模板类型
383
+ if template_type != 'default':
384
+ console.print(f":information: 使用模板类型: [bold blue]{template_type}[/bold blue] - {TEMPLATE_TYPES[template_type]}")
385
+
386
+ # 显示选择的模块
387
+ if modules:
388
+ console.print(f":information: 选择的模块: [bold blue]{', '.join(modules)}[/bold blue]")
389
+
390
+ # 下一步操作提示(对齐美观 + 语法高亮)
391
+ next_steps = f"""
392
+ [bold]🚀 Next steps:[/bold]
393
+ [blue]cd[/blue] {project_name}
394
+ [blue]crawlo genspider[/blue] example example.com
395
+ [blue]crawlo run[/blue] example
396
+
397
+ [bold]📚 Learn more:[/bold]
398
+ [blue]crawlo list[/blue] # List all spiders
399
+ [blue]crawlo check[/blue] example # Check spider validity
400
+ [blue]crawlo stats[/blue] # View statistics
401
+ """.strip()
402
+ console.print(next_steps)
403
+
404
+ return 0
405
+
406
+ except Exception as e:
407
+ show_error_panel(
408
+ "Creation Failed",
409
+ f"Failed to create project: {e}"
410
+ )
411
+ if project_dir.exists():
412
+ shutil.rmtree(project_dir, ignore_errors=True)
413
+ console.print("[red]:cross_mark: Cleaned up partially created project.[/red]")
414
+ return 1
415
+
416
+ if __name__ == "__main__":
417
+ import sys
418
+ exit_code = main(sys.argv[1:])
419
419
  sys.exit(exit_code)