crawlo 1.2.6__py3-none-any.whl → 1.2.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (209) hide show
  1. crawlo/__init__.py +61 -61
  2. crawlo/__version__.py +1 -1
  3. crawlo/cleaners/__init__.py +60 -60
  4. crawlo/cleaners/data_formatter.py +225 -225
  5. crawlo/cleaners/encoding_converter.py +125 -125
  6. crawlo/cleaners/text_cleaner.py +232 -232
  7. crawlo/cli.py +75 -88
  8. crawlo/commands/__init__.py +14 -14
  9. crawlo/commands/check.py +594 -594
  10. crawlo/commands/genspider.py +151 -151
  11. crawlo/commands/help.py +138 -144
  12. crawlo/commands/list.py +155 -155
  13. crawlo/commands/run.py +323 -323
  14. crawlo/commands/startproject.py +436 -436
  15. crawlo/commands/stats.py +187 -187
  16. crawlo/commands/utils.py +186 -186
  17. crawlo/config.py +312 -312
  18. crawlo/config_validator.py +251 -251
  19. crawlo/core/__init__.py +2 -2
  20. crawlo/core/engine.py +365 -356
  21. crawlo/core/processor.py +40 -40
  22. crawlo/core/scheduler.py +251 -239
  23. crawlo/crawler.py +1099 -1110
  24. crawlo/data/__init__.py +5 -5
  25. crawlo/data/user_agents.py +107 -107
  26. crawlo/downloader/__init__.py +266 -266
  27. crawlo/downloader/aiohttp_downloader.py +228 -221
  28. crawlo/downloader/cffi_downloader.py +256 -256
  29. crawlo/downloader/httpx_downloader.py +259 -259
  30. crawlo/downloader/hybrid_downloader.py +212 -212
  31. crawlo/downloader/playwright_downloader.py +402 -402
  32. crawlo/downloader/selenium_downloader.py +472 -472
  33. crawlo/event.py +11 -11
  34. crawlo/exceptions.py +81 -81
  35. crawlo/extension/__init__.py +39 -38
  36. crawlo/extension/health_check.py +141 -141
  37. crawlo/extension/log_interval.py +57 -57
  38. crawlo/extension/log_stats.py +81 -81
  39. crawlo/extension/logging_extension.py +43 -43
  40. crawlo/extension/memory_monitor.py +104 -104
  41. crawlo/extension/performance_profiler.py +133 -133
  42. crawlo/extension/request_recorder.py +107 -107
  43. crawlo/filters/__init__.py +154 -154
  44. crawlo/filters/aioredis_filter.py +234 -234
  45. crawlo/filters/memory_filter.py +269 -269
  46. crawlo/items/__init__.py +23 -23
  47. crawlo/items/base.py +21 -21
  48. crawlo/items/fields.py +52 -52
  49. crawlo/items/items.py +104 -104
  50. crawlo/middleware/__init__.py +21 -21
  51. crawlo/middleware/default_header.py +131 -131
  52. crawlo/middleware/download_delay.py +104 -104
  53. crawlo/middleware/middleware_manager.py +136 -135
  54. crawlo/middleware/offsite.py +114 -114
  55. crawlo/middleware/proxy.py +367 -367
  56. crawlo/middleware/request_ignore.py +86 -86
  57. crawlo/middleware/response_code.py +163 -163
  58. crawlo/middleware/response_filter.py +136 -136
  59. crawlo/middleware/retry.py +124 -124
  60. crawlo/mode_manager.py +211 -211
  61. crawlo/network/__init__.py +21 -21
  62. crawlo/network/request.py +338 -338
  63. crawlo/network/response.py +359 -359
  64. crawlo/pipelines/__init__.py +21 -21
  65. crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
  66. crawlo/pipelines/console_pipeline.py +39 -39
  67. crawlo/pipelines/csv_pipeline.py +316 -316
  68. crawlo/pipelines/database_dedup_pipeline.py +222 -222
  69. crawlo/pipelines/json_pipeline.py +218 -218
  70. crawlo/pipelines/memory_dedup_pipeline.py +115 -115
  71. crawlo/pipelines/mongo_pipeline.py +131 -131
  72. crawlo/pipelines/mysql_pipeline.py +317 -317
  73. crawlo/pipelines/pipeline_manager.py +62 -61
  74. crawlo/pipelines/redis_dedup_pipeline.py +166 -165
  75. crawlo/project.py +314 -279
  76. crawlo/queue/pqueue.py +37 -37
  77. crawlo/queue/queue_manager.py +377 -376
  78. crawlo/queue/redis_priority_queue.py +306 -306
  79. crawlo/settings/__init__.py +7 -7
  80. crawlo/settings/default_settings.py +219 -215
  81. crawlo/settings/setting_manager.py +122 -122
  82. crawlo/spider/__init__.py +639 -639
  83. crawlo/stats_collector.py +59 -59
  84. crawlo/subscriber.py +129 -129
  85. crawlo/task_manager.py +30 -30
  86. crawlo/templates/crawlo.cfg.tmpl +10 -10
  87. crawlo/templates/project/__init__.py.tmpl +3 -3
  88. crawlo/templates/project/items.py.tmpl +17 -17
  89. crawlo/templates/project/middlewares.py.tmpl +118 -118
  90. crawlo/templates/project/pipelines.py.tmpl +96 -96
  91. crawlo/templates/project/settings.py.tmpl +288 -288
  92. crawlo/templates/project/settings_distributed.py.tmpl +157 -157
  93. crawlo/templates/project/settings_gentle.py.tmpl +100 -100
  94. crawlo/templates/project/settings_high_performance.py.tmpl +134 -134
  95. crawlo/templates/project/settings_simple.py.tmpl +98 -98
  96. crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
  97. crawlo/templates/run.py.tmpl +45 -45
  98. crawlo/templates/spider/spider.py.tmpl +143 -143
  99. crawlo/tools/__init__.py +182 -182
  100. crawlo/tools/anti_crawler.py +268 -268
  101. crawlo/tools/authenticated_proxy.py +240 -240
  102. crawlo/tools/data_validator.py +180 -180
  103. crawlo/tools/date_tools.py +35 -35
  104. crawlo/tools/distributed_coordinator.py +386 -386
  105. crawlo/tools/retry_mechanism.py +220 -220
  106. crawlo/tools/scenario_adapter.py +262 -262
  107. crawlo/utils/__init__.py +35 -35
  108. crawlo/utils/batch_processor.py +259 -259
  109. crawlo/utils/controlled_spider_mixin.py +439 -439
  110. crawlo/utils/date_tools.py +290 -290
  111. crawlo/utils/db_helper.py +343 -343
  112. crawlo/utils/enhanced_error_handler.py +356 -356
  113. crawlo/utils/env_config.py +143 -106
  114. crawlo/utils/error_handler.py +123 -123
  115. crawlo/utils/func_tools.py +82 -82
  116. crawlo/utils/large_scale_config.py +286 -286
  117. crawlo/utils/large_scale_helper.py +344 -344
  118. crawlo/utils/log.py +128 -128
  119. crawlo/utils/performance_monitor.py +285 -285
  120. crawlo/utils/queue_helper.py +175 -175
  121. crawlo/utils/redis_connection_pool.py +351 -351
  122. crawlo/utils/redis_key_validator.py +198 -198
  123. crawlo/utils/request.py +267 -267
  124. crawlo/utils/request_serializer.py +218 -218
  125. crawlo/utils/spider_loader.py +61 -61
  126. crawlo/utils/system.py +11 -11
  127. crawlo/utils/tools.py +4 -4
  128. crawlo/utils/url.py +39 -39
  129. {crawlo-1.2.6.dist-info → crawlo-1.2.7.dist-info}/METADATA +764 -764
  130. crawlo-1.2.7.dist-info/RECORD +209 -0
  131. examples/__init__.py +7 -7
  132. tests/DOUBLE_CRAWLO_PREFIX_FIX_REPORT.md +81 -81
  133. tests/__init__.py +7 -7
  134. tests/advanced_tools_example.py +275 -275
  135. tests/authenticated_proxy_example.py +236 -236
  136. tests/cleaners_example.py +160 -160
  137. tests/config_validation_demo.py +102 -102
  138. tests/controlled_spider_example.py +205 -205
  139. tests/date_tools_example.py +180 -180
  140. tests/dynamic_loading_example.py +523 -523
  141. tests/dynamic_loading_test.py +104 -104
  142. tests/env_config_example.py +133 -133
  143. tests/error_handling_example.py +171 -171
  144. tests/redis_key_validation_demo.py +130 -130
  145. tests/response_improvements_example.py +144 -144
  146. tests/test_advanced_tools.py +148 -148
  147. tests/test_all_redis_key_configs.py +145 -145
  148. tests/test_authenticated_proxy.py +141 -141
  149. tests/test_cleaners.py +54 -54
  150. tests/test_comprehensive.py +146 -146
  151. tests/test_config_consistency.py +81 -0
  152. tests/test_config_validator.py +193 -193
  153. tests/test_crawlo_proxy_integration.py +172 -172
  154. tests/test_date_tools.py +123 -123
  155. tests/test_default_header_middleware.py +158 -158
  156. tests/test_double_crawlo_fix.py +207 -207
  157. tests/test_double_crawlo_fix_simple.py +124 -124
  158. tests/test_download_delay_middleware.py +221 -221
  159. tests/test_downloader_proxy_compatibility.py +268 -268
  160. tests/test_dynamic_downloaders_proxy.py +124 -124
  161. tests/test_dynamic_proxy.py +92 -92
  162. tests/test_dynamic_proxy_config.py +146 -146
  163. tests/test_dynamic_proxy_real.py +109 -109
  164. tests/test_edge_cases.py +303 -303
  165. tests/test_enhanced_error_handler.py +270 -270
  166. tests/test_env_config.py +121 -121
  167. tests/test_error_handler_compatibility.py +112 -112
  168. tests/test_final_validation.py +153 -153
  169. tests/test_framework_env_usage.py +103 -103
  170. tests/test_integration.py +356 -356
  171. tests/test_item_dedup_redis_key.py +122 -122
  172. tests/test_mode_consistency.py +52 -0
  173. tests/test_offsite_middleware.py +221 -221
  174. tests/test_parsel.py +29 -29
  175. tests/test_performance.py +327 -327
  176. tests/test_proxy_api.py +264 -264
  177. tests/test_proxy_health_check.py +32 -32
  178. tests/test_proxy_middleware.py +121 -121
  179. tests/test_proxy_middleware_enhanced.py +216 -216
  180. tests/test_proxy_middleware_integration.py +136 -136
  181. tests/test_proxy_providers.py +56 -56
  182. tests/test_proxy_stats.py +19 -19
  183. tests/test_proxy_strategies.py +59 -59
  184. tests/test_queue_manager_double_crawlo.py +173 -173
  185. tests/test_queue_manager_redis_key.py +176 -176
  186. tests/test_real_scenario_proxy.py +195 -195
  187. tests/test_redis_config.py +28 -28
  188. tests/test_redis_connection_pool.py +294 -294
  189. tests/test_redis_key_naming.py +181 -181
  190. tests/test_redis_key_validator.py +123 -123
  191. tests/test_redis_queue.py +224 -224
  192. tests/test_request_ignore_middleware.py +182 -182
  193. tests/test_request_serialization.py +70 -70
  194. tests/test_response_code_middleware.py +349 -349
  195. tests/test_response_filter_middleware.py +427 -427
  196. tests/test_response_improvements.py +152 -152
  197. tests/test_retry_middleware.py +241 -241
  198. tests/test_scheduler.py +252 -241
  199. tests/test_scheduler_config_update.py +134 -0
  200. tests/test_simple_response.py +61 -61
  201. tests/test_telecom_spider_redis_key.py +205 -205
  202. tests/test_template_content.py +87 -87
  203. tests/test_template_redis_key.py +134 -134
  204. tests/test_tools.py +153 -153
  205. tests/tools_example.py +257 -257
  206. crawlo-1.2.6.dist-info/RECORD +0 -206
  207. {crawlo-1.2.6.dist-info → crawlo-1.2.7.dist-info}/WHEEL +0 -0
  208. {crawlo-1.2.6.dist-info → crawlo-1.2.7.dist-info}/entry_points.txt +0 -0
  209. {crawlo-1.2.6.dist-info → crawlo-1.2.7.dist-info}/top_level.txt +0 -0
@@ -1,437 +1,437 @@
1
- #!/usr/bin/python
2
- # -*- coding: UTF-8 -*-
3
- """
4
- # @Time : 2025-08-31 22:36
5
- # @Author : crawl-coder
6
- # @Desc : 命令行入口:crawlo startproject baidu,创建项目。
7
- """
8
- import shutil
9
- import re
10
- import sys
11
- import os
12
- from pathlib import Path
13
- from typing import Optional, List
14
-
15
- # 添加项目根目录到路径,以便能够导入utils模块
16
- sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
17
-
18
- try:
19
- from rich.console import Console
20
- from rich.panel import Panel
21
- from rich.text import Text
22
- from rich.table import Table
23
- RICH_AVAILABLE = True
24
- except ImportError:
25
- RICH_AVAILABLE = False
26
-
27
- try:
28
- from .utils import show_error_panel, show_success_panel
29
- UTILS_AVAILABLE = True
30
- except ImportError:
31
- # 如果相对导入失败,尝试绝对导入
32
- try:
33
- from crawlo.commands.utils import show_error_panel, show_success_panel
34
- UTILS_AVAILABLE = True
35
- except ImportError:
36
- UTILS_AVAILABLE = False
37
-
38
- # 初始化 rich 控制台(如果可用)
39
- if RICH_AVAILABLE:
40
- console = Console()
41
- else:
42
- # 简单的控制台输出替代
43
- class Console:
44
- def print(self, text):
45
- print(text)
46
- console = Console()
47
-
48
- TEMPLATES_DIR = Path(__file__).parent.parent / 'templates'
49
-
50
- # 可用的模板类型
51
- TEMPLATE_TYPES = {
52
- 'default': '默认模板 - 通用配置,适合大多数项目',
53
- 'simple': '简化模板 - 最小配置,适合快速开始',
54
- 'distributed': '分布式模板 - 针对分布式爬取优化',
55
- 'high-performance': '高性能模板 - 针对大规模高并发优化',
56
- 'gentle': '温和模板 - 低负载配置,对目标网站友好'
57
- }
58
-
59
- # 可选的模块组件
60
- OPTIONAL_MODULES = {
61
- 'mysql': 'MySQL数据库支持',
62
- 'mongodb': 'MongoDB数据库支持',
63
- 'redis': 'Redis支持(分布式队列和去重)',
64
- 'proxy': '代理支持',
65
- 'monitoring': '监控和性能分析',
66
- 'dedup': '去重功能',
67
- 'httpx': 'HttpX下载器',
68
- 'aiohttp': 'AioHttp下载器',
69
- 'curl': 'CurlCffi下载器'
70
- }
71
-
72
-
73
- def show_error_panel(title, content):
74
- """显示错误面板的简单实现"""
75
- if RICH_AVAILABLE:
76
- from rich.panel import Panel
77
- console.print(Panel(content, title=title, border_style="red"))
78
- else:
79
- print(f"❌ {title}")
80
- print(content)
81
-
82
- def show_success_panel(title, content):
83
- """显示成功面板的简单实现"""
84
- if RICH_AVAILABLE:
85
- from rich.panel import Panel
86
- console.print(Panel(content, title=title, border_style="green"))
87
- else:
88
- print(f"✅ {title}")
89
- print(content)
90
-
91
- def _render_template(tmpl_path, context):
92
- """读取模板文件,替换 {{key}} 为 context 中的值"""
93
- with open(tmpl_path, 'r', encoding='utf-8') as f:
94
- content = f.read()
95
- for key, value in context.items():
96
- content = content.replace(f'{{{{{key}}}}}', str(value))
97
- return content
98
-
99
-
100
- def _copytree_with_templates(src, dst, context, template_type='default', modules: List[str] = None):
101
- """
102
- 递归复制目录,将 .tmpl 文件渲染后复制(去除 .tmpl 后缀),其他文件直接复制。
103
- 支持选择性模块复制。
104
- """
105
- src_path = Path(src)
106
- dst_path = Path(dst)
107
- dst_path.mkdir(parents=True, exist_ok=True)
108
-
109
- for item in src_path.rglob('*'):
110
- rel_path = item.relative_to(src_path)
111
- # 对于run.py.tmpl文件,需要特殊处理,将其放到项目根目录
112
- if item.name == 'run.py.tmpl':
113
- dst_item = dst_path.parent / rel_path # 放到项目根目录
114
- else:
115
- dst_item = dst_path / rel_path
116
-
117
- # 检查是否应该包含此文件
118
- path_str = str(rel_path).replace('\\', '/')
119
-
120
- # 所有文件根据模块选择决定是否包含
121
- if not _should_include_file(rel_path, modules):
122
- continue
123
-
124
- if item.is_dir():
125
- dst_item.mkdir(parents=True, exist_ok=True)
126
- else:
127
- if item.suffix == '.tmpl':
128
- rendered_content = None
129
- # 处理特定模板类型的设置文件
130
- if item.name == 'settings.py.tmpl':
131
- # 对于设置文件,根据模板类型选择相应的内容模板
132
- if template_type != 'default':
133
- # 使用特定模板类型的设置文件
134
- template_file_name = f'settings_{template_type}.py.tmpl'
135
- template_file_path = src_path / template_file_name
136
- if template_file_path.exists():
137
- rendered_content = _render_template(template_file_path, context)
138
- else:
139
- # 如果特定模板不存在,使用默认模板
140
- rendered_content = _render_template(item, context)
141
- else:
142
- # 使用默认模板
143
- rendered_content = _render_template(item, context)
144
- # 跳过其他以 settings_ 开头的模板文件,避免重复处理
145
- elif item.name.startswith('settings_') and item.name.endswith('.py.tmpl'):
146
- continue
147
- else:
148
- rendered_content = _render_template(item, context)
149
-
150
- # 确保设置文件始终命名为 settings.py
151
- if item.name == 'settings.py.tmpl':
152
- # 特殊处理设置模板文件,统一生成为 settings.py
153
- final_dst = dst_item.parent / 'settings.py'
154
- # 特殊处理run.py.tmpl文件
155
- elif item.name == 'run.py.tmpl':
156
- final_dst = dst_item.with_suffix('') # 去掉.tmpl后缀
157
- else:
158
- final_dst = dst_item.with_suffix('')
159
-
160
- final_dst.parent.mkdir(parents=True, exist_ok=True)
161
- with open(final_dst, 'w', encoding='utf-8') as f:
162
- f.write(rendered_content)
163
- else:
164
- shutil.copy2(item, dst_item)
165
-
166
-
167
- def _should_include_file(rel_path, modules: List[str]) -> bool:
168
- """
169
- 根据选择的模块决定是否包含文件
170
- """
171
- if modules is None:
172
- # 如果没有指定模块,则包含所有文件
173
- return True
174
-
175
- # 基础文件始终包含
176
- basic_files = [
177
- '__init__.py.tmpl',
178
- 'settings.py.tmpl',
179
- 'spiders/__init__.py.tmpl',
180
- 'items.py.tmpl',
181
- 'middlewares.py.tmpl'
182
- # 移除了'run.py.tmpl',因为它现在在模板根目录
183
- ]
184
-
185
- path_str = str(rel_path).replace('\\', '/')
186
-
187
- # 始终包含基础文件
188
- if path_str in basic_files:
189
- return True
190
-
191
- # 根据模块选择包含特定文件
192
- if 'mysql' in modules and 'mysql' in path_str:
193
- return True
194
- if 'mongodb' in modules and 'mongo' in path_str:
195
- return True
196
- if 'redis' in modules and 'redis' in path_str:
197
- return True
198
- if 'proxy' in modules and 'proxy' in path_str:
199
- return True
200
- if 'monitoring' in modules and ('monitor' in path_str or 'stats' in path_str):
201
- return True
202
- if 'dedup' in modules and 'dedup' in path_str:
203
- return True
204
- if 'httpx' in modules and 'httpx' in path_str:
205
- return True
206
- if 'aiohttp' in modules and 'aiohttp' in path_str:
207
- return True
208
- if 'curl' in modules and 'cffi' in path_str:
209
- return True
210
-
211
- # 默认不包含特定模块文件
212
- return False
213
-
214
-
215
- def validate_project_name(project_name: str) -> tuple[bool, str]:
216
- """
217
- 验证项目名称是否有效
218
-
219
- Returns:
220
- tuple[bool, str]: (是否有效, 错误信息)
221
- """
222
- # 检查是否为空
223
- if not project_name or not project_name.strip():
224
- return False, "项目名称不能为空"
225
-
226
- project_name = project_name.strip()
227
-
228
- # 检查长度
229
- if len(project_name) > 50:
230
- return False, "项目名称太长(最多50个字符)"
231
-
232
- # 检查是否为Python关键字
233
- python_keywords = {
234
- 'False', 'None', 'True', 'and', 'as', 'assert', 'break', 'class',
235
- 'continue', 'def', 'del', 'elif', 'else', 'except', 'finally',
236
- 'for', 'from', 'global', 'if', 'import', 'in', 'is', 'lambda',
237
- 'nonlocal', 'not', 'or', 'pass', 'raise', 'return', 'try',
238
- 'while', 'with', 'yield'
239
- }
240
- if project_name in python_keywords:
241
- return False, f"'{project_name}' 是Python关键字,不能用作项目名称"
242
-
243
- # 检查是否为有效的Python标识符
244
- if not project_name.isidentifier():
245
- return False, "项目名称必须是有效的Python标识符"
246
-
247
- # 检查格式(建议使用snake_case)
248
- if not re.match(r'^[a-z][a-z0-9_]*$', project_name):
249
- return False, (
250
- "项目名称应以小写字母开头,只能包含小写字母、数字和下划线"
251
- )
252
-
253
- # 检查是否以数字结尾(不推荐)
254
- if project_name[-1].isdigit():
255
- return False, "项目名称不应以数字结尾"
256
-
257
- return True, ""
258
-
259
-
260
- def show_template_options():
261
- """显示可用的模板选项"""
262
- if RICH_AVAILABLE:
263
- table = Table(title="可用模板类型", show_header=True, header_style="bold magenta")
264
- table.add_column("模板类型", style="cyan", no_wrap=True)
265
- table.add_column("描述", style="green")
266
-
267
- for template_type, description in TEMPLATE_TYPES.items():
268
- table.add_row(template_type, description)
269
-
270
- console.print(table)
271
- else:
272
- print("可用模板类型:")
273
- for template_type, description in TEMPLATE_TYPES.items():
274
- print(f" {template_type}: {description}")
275
-
276
-
277
- def show_module_options():
278
- """显示可用的模块选项"""
279
- if RICH_AVAILABLE:
280
- table = Table(title="可选模块组件", show_header=True, header_style="bold magenta")
281
- table.add_column("模块", style="cyan", no_wrap=True)
282
- table.add_column("描述", style="green")
283
-
284
- for module, description in OPTIONAL_MODULES.items():
285
- table.add_row(module, description)
286
-
287
- console.print(table)
288
- else:
289
- print("可选模块组件:")
290
- for module, description in OPTIONAL_MODULES.items():
291
- print(f" {module}: {description}")
292
-
293
-
294
- def main(args):
295
- if len(args) < 1:
296
- console.print("[bold red]错误:[/bold red] 用法: [blue]crawlo startproject[/blue] <项目名称> [模板类型] [--modules 模块1,模块2]")
297
- console.print("💡 示例:")
298
- console.print(" [blue]crawlo startproject[/blue] my_spider_project")
299
- console.print(" [blue]crawlo startproject[/blue] news_crawler simple")
300
- console.print(" [blue]crawlo startproject[/blue] ecommerce_spider distributed --modules mysql,proxy")
301
- show_template_options()
302
- show_module_options()
303
- return 1
304
-
305
- # 解析参数
306
- project_name = args[0]
307
- template_type = 'default'
308
- modules = None
309
-
310
- # 解析可选参数
311
- if len(args) > 1:
312
- for i, arg in enumerate(args[1:], 1):
313
- if arg.startswith('--modules='):
314
- modules_str = arg.split('=', 1)[1]
315
- modules = [m.strip() for m in modules_str.split(',') if m.strip()]
316
- elif arg.startswith('--modules'):
317
- # 处理 --modules module1,module2 格式
318
- if i + 1 < len(args):
319
- modules_str = args[i + 1]
320
- modules = [m.strip() for m in modules_str.split(',') if m.strip()]
321
- elif not arg.startswith('--') and arg in TEMPLATE_TYPES:
322
- template_type = arg
323
-
324
- # 验证模板类型
325
- if template_type not in TEMPLATE_TYPES:
326
- show_error_panel(
327
- "无效的模板类型",
328
- f"不支持模板类型 '[cyan]{template_type}[/cyan]'。\n"
329
- )
330
- show_template_options()
331
- return 1
332
-
333
- # 验证项目名称
334
- is_valid, error_msg = validate_project_name(project_name)
335
- if not is_valid:
336
- show_error_panel(
337
- "无效的项目名称",
338
- f"[cyan]{project_name}[/cyan] 不是有效的项目名称。\n"
339
- f"❌ {error_msg}\n\n"
340
- "💡 项目名称应:\n"
341
- " • 以小写字母开头\n"
342
- " • 只能包含小写字母、数字和下划线\n"
343
- " • 是有效的Python标识符\n"
344
- " • 不能是Python关键字"
345
- )
346
- return 1
347
-
348
- project_dir = Path(project_name)
349
-
350
- if project_dir.exists():
351
- show_error_panel(
352
- "目录已存在",
353
- f"目录 '[cyan]{project_dir}[/cyan]' 已存在。\n"
354
- "💡 请选择不同的项目名称或删除现有目录。"
355
- )
356
- return 1
357
-
358
- context = {'project_name': project_name}
359
- template_dir = TEMPLATES_DIR / 'project'
360
-
361
- try:
362
- # 1. 创建项目根目录
363
- project_dir.mkdir()
364
-
365
- # 2. 渲染 crawlo.cfg.tmpl
366
- cfg_template = TEMPLATES_DIR / 'crawlo.cfg.tmpl'
367
- if cfg_template.exists():
368
- cfg_content = _render_template(cfg_template, context)
369
- (project_dir / 'crawlo.cfg').write_text(cfg_content, encoding='utf-8')
370
- console.print(f":white_check_mark: 已创建 [green]{project_dir / 'crawlo.cfg'}[/green]")
371
- else:
372
- console.print("[yellow]⚠ 警告:[/yellow] 找不到模板 'crawlo.cfg.tmpl'。")
373
-
374
- # 3. 渲染 run.py.tmpl (放在项目根目录)
375
- run_template = TEMPLATES_DIR / 'run.py.tmpl'
376
- if run_template.exists():
377
- run_content = _render_template(run_template, context)
378
- (project_dir / 'run.py').write_text(run_content, encoding='utf-8')
379
- console.print(f":white_check_mark: 已创建 [green]{project_dir / 'run.py'}[/green]")
380
- else:
381
- console.print("[yellow]⚠ 警告:[/yellow] 找不到模板 'run.py.tmpl'。")
382
-
383
- # 4. 复制并渲染项目包内容
384
- package_dir = project_dir / project_name
385
- _copytree_with_templates(template_dir, package_dir, context, template_type, modules)
386
- console.print(f":white_check_mark: 已创建项目包: [green]{package_dir}[/green]")
387
-
388
- # 5. 创建 logs 目录
389
- (project_dir / 'logs').mkdir(exist_ok=True)
390
- console.print(":white_check_mark: 已创建 logs 目录")
391
-
392
- # 6. 创建 output 目录(用于数据输出)
393
- (project_dir / 'output').mkdir(exist_ok=True)
394
- console.print(":white_check_mark: 已创建 output 目录")
395
-
396
- # 成功面板
397
- success_text = Text.from_markup(f"项目 '[bold cyan]{project_name}[/bold cyan]' 创建成功!")
398
- console.print(Panel(success_text, title=":rocket: 成功", border_style="green", padding=(1, 2)))
399
-
400
- # 显示使用的模板类型
401
- if template_type != 'default':
402
- console.print(f":information: 使用模板类型: [bold blue]{template_type}[/bold blue] - {TEMPLATE_TYPES[template_type]}")
403
-
404
- # 显示选择的模块
405
- if modules:
406
- console.print(f":information: 选择的模块: [bold blue]{', '.join(modules)}[/bold blue]")
407
-
408
- # 下一步操作提示(对齐美观 + 语法高亮)
409
- next_steps = f"""
410
- [bold]🚀 下一步操作:[/bold]
411
- [blue]cd[/blue] {project_name}
412
- [blue]crawlo genspider[/blue] example example.com
413
- [blue]crawlo run[/blue] example
414
-
415
- [bold]📚 了解更多:[/bold]
416
- [blue]crawlo list[/blue] # 列出所有爬虫
417
- [blue]crawlo check[/blue] example # 检查爬虫有效性
418
- [blue]crawlo stats[/blue] # 查看统计信息
419
- """.strip()
420
- console.print(next_steps)
421
-
422
- return 0
423
-
424
- except Exception as e:
425
- show_error_panel(
426
- "创建失败",
427
- f"创建项目失败: {e}"
428
- )
429
- if project_dir.exists():
430
- shutil.rmtree(project_dir, ignore_errors=True)
431
- console.print("[red]:cross_mark: 已清理部分创建的项目。[/red]")
432
- return 1
433
-
434
- if __name__ == "__main__":
435
- import sys
436
- exit_code = main(sys.argv[1:])
1
+ #!/usr/bin/python
2
+ # -*- coding: UTF-8 -*-
3
+ """
4
+ # @Time : 2025-08-31 22:36
5
+ # @Author : crawl-coder
6
+ # @Desc : 命令行入口:crawlo startproject baidu,创建项目。
7
+ """
8
+ import shutil
9
+ import re
10
+ import sys
11
+ import os
12
+ from pathlib import Path
13
+ from typing import Optional, List
14
+
15
+ # 添加项目根目录到路径,以便能够导入utils模块
16
+ sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
17
+
18
+ try:
19
+ from rich.console import Console
20
+ from rich.panel import Panel
21
+ from rich.text import Text
22
+ from rich.table import Table
23
+ RICH_AVAILABLE = True
24
+ except ImportError:
25
+ RICH_AVAILABLE = False
26
+
27
+ try:
28
+ from .utils import show_error_panel, show_success_panel
29
+ UTILS_AVAILABLE = True
30
+ except ImportError:
31
+ # 如果相对导入失败,尝试绝对导入
32
+ try:
33
+ from crawlo.commands.utils import show_error_panel, show_success_panel
34
+ UTILS_AVAILABLE = True
35
+ except ImportError:
36
+ UTILS_AVAILABLE = False
37
+
38
+ # 初始化 rich 控制台(如果可用)
39
+ if RICH_AVAILABLE:
40
+ console = Console()
41
+ else:
42
+ # 简单的控制台输出替代
43
+ class Console:
44
+ def print(self, text):
45
+ print(text)
46
+ console = Console()
47
+
48
+ TEMPLATES_DIR = Path(__file__).parent.parent / 'templates'
49
+
50
+ # 可用的模板类型
51
+ TEMPLATE_TYPES = {
52
+ 'default': '默认模板 - 通用配置,适合大多数项目',
53
+ 'simple': '简化模板 - 最小配置,适合快速开始',
54
+ 'distributed': '分布式模板 - 针对分布式爬取优化',
55
+ 'high-performance': '高性能模板 - 针对大规模高并发优化',
56
+ 'gentle': '温和模板 - 低负载配置,对目标网站友好'
57
+ }
58
+
59
+ # 可选的模块组件
60
+ OPTIONAL_MODULES = {
61
+ 'mysql': 'MySQL数据库支持',
62
+ 'mongodb': 'MongoDB数据库支持',
63
+ 'redis': 'Redis支持(分布式队列和去重)',
64
+ 'proxy': '代理支持',
65
+ 'monitoring': '监控和性能分析',
66
+ 'dedup': '去重功能',
67
+ 'httpx': 'HttpX下载器',
68
+ 'aiohttp': 'AioHttp下载器',
69
+ 'curl': 'CurlCffi下载器'
70
+ }
71
+
72
+
73
+ def show_error_panel(title, content):
74
+ """显示错误面板的简单实现"""
75
+ if RICH_AVAILABLE:
76
+ from rich.panel import Panel
77
+ console.print(Panel(content, title=title, border_style="red"))
78
+ else:
79
+ print(f"❌ {title}")
80
+ print(content)
81
+
82
+ def show_success_panel(title, content):
83
+ """显示成功面板的简单实现"""
84
+ if RICH_AVAILABLE:
85
+ from rich.panel import Panel
86
+ console.print(Panel(content, title=title, border_style="green"))
87
+ else:
88
+ print(f"✅ {title}")
89
+ print(content)
90
+
91
+ def _render_template(tmpl_path, context):
92
+ """读取模板文件,替换 {{key}} 为 context 中的值"""
93
+ with open(tmpl_path, 'r', encoding='utf-8') as f:
94
+ content = f.read()
95
+ for key, value in context.items():
96
+ content = content.replace(f'{{{{{key}}}}}', str(value))
97
+ return content
98
+
99
+
100
+ def _copytree_with_templates(src, dst, context, template_type='default', modules: List[str] = None):
101
+ """
102
+ 递归复制目录,将 .tmpl 文件渲染后复制(去除 .tmpl 后缀),其他文件直接复制。
103
+ 支持选择性模块复制。
104
+ """
105
+ src_path = Path(src)
106
+ dst_path = Path(dst)
107
+ dst_path.mkdir(parents=True, exist_ok=True)
108
+
109
+ for item in src_path.rglob('*'):
110
+ rel_path = item.relative_to(src_path)
111
+ # 对于run.py.tmpl文件,需要特殊处理,将其放到项目根目录
112
+ if item.name == 'run.py.tmpl':
113
+ dst_item = dst_path.parent / rel_path # 放到项目根目录
114
+ else:
115
+ dst_item = dst_path / rel_path
116
+
117
+ # 检查是否应该包含此文件
118
+ path_str = str(rel_path).replace('\\', '/')
119
+
120
+ # 所有文件根据模块选择决定是否包含
121
+ if not _should_include_file(rel_path, modules):
122
+ continue
123
+
124
+ if item.is_dir():
125
+ dst_item.mkdir(parents=True, exist_ok=True)
126
+ else:
127
+ if item.suffix == '.tmpl':
128
+ rendered_content = None
129
+ # 处理特定模板类型的设置文件
130
+ if item.name == 'settings.py.tmpl':
131
+ # 对于设置文件,根据模板类型选择相应的内容模板
132
+ if template_type != 'default':
133
+ # 使用特定模板类型的设置文件
134
+ template_file_name = f'settings_{template_type}.py.tmpl'
135
+ template_file_path = src_path / template_file_name
136
+ if template_file_path.exists():
137
+ rendered_content = _render_template(template_file_path, context)
138
+ else:
139
+ # 如果特定模板不存在,使用默认模板
140
+ rendered_content = _render_template(item, context)
141
+ else:
142
+ # 使用默认模板
143
+ rendered_content = _render_template(item, context)
144
+ # 跳过其他以 settings_ 开头的模板文件,避免重复处理
145
+ elif item.name.startswith('settings_') and item.name.endswith('.py.tmpl'):
146
+ continue
147
+ else:
148
+ rendered_content = _render_template(item, context)
149
+
150
+ # 确保设置文件始终命名为 settings.py
151
+ if item.name == 'settings.py.tmpl':
152
+ # 特殊处理设置模板文件,统一生成为 settings.py
153
+ final_dst = dst_item.parent / 'settings.py'
154
+ # 特殊处理run.py.tmpl文件
155
+ elif item.name == 'run.py.tmpl':
156
+ final_dst = dst_item.with_suffix('') # 去掉.tmpl后缀
157
+ else:
158
+ final_dst = dst_item.with_suffix('')
159
+
160
+ final_dst.parent.mkdir(parents=True, exist_ok=True)
161
+ with open(final_dst, 'w', encoding='utf-8') as f:
162
+ f.write(rendered_content)
163
+ else:
164
+ shutil.copy2(item, dst_item)
165
+
166
+
167
+ def _should_include_file(rel_path, modules: List[str]) -> bool:
168
+ """
169
+ 根据选择的模块决定是否包含文件
170
+ """
171
+ if modules is None:
172
+ # 如果没有指定模块,则包含所有文件
173
+ return True
174
+
175
+ # 基础文件始终包含
176
+ basic_files = [
177
+ '__init__.py.tmpl',
178
+ 'settings.py.tmpl',
179
+ 'spiders/__init__.py.tmpl',
180
+ 'items.py.tmpl',
181
+ 'middlewares.py.tmpl'
182
+ # 移除了'run.py.tmpl',因为它现在在模板根目录
183
+ ]
184
+
185
+ path_str = str(rel_path).replace('\\', '/')
186
+
187
+ # 始终包含基础文件
188
+ if path_str in basic_files:
189
+ return True
190
+
191
+ # 根据模块选择包含特定文件
192
+ if 'mysql' in modules and 'mysql' in path_str:
193
+ return True
194
+ if 'mongodb' in modules and 'mongo' in path_str:
195
+ return True
196
+ if 'redis' in modules and 'redis' in path_str:
197
+ return True
198
+ if 'proxy' in modules and 'proxy' in path_str:
199
+ return True
200
+ if 'monitoring' in modules and ('monitor' in path_str or 'stats' in path_str):
201
+ return True
202
+ if 'dedup' in modules and 'dedup' in path_str:
203
+ return True
204
+ if 'httpx' in modules and 'httpx' in path_str:
205
+ return True
206
+ if 'aiohttp' in modules and 'aiohttp' in path_str:
207
+ return True
208
+ if 'curl' in modules and 'cffi' in path_str:
209
+ return True
210
+
211
+ # 默认不包含特定模块文件
212
+ return False
213
+
214
+
215
+ def validate_project_name(project_name: str) -> tuple[bool, str]:
216
+ """
217
+ 验证项目名称是否有效
218
+
219
+ Returns:
220
+ tuple[bool, str]: (是否有效, 错误信息)
221
+ """
222
+ # 检查是否为空
223
+ if not project_name or not project_name.strip():
224
+ return False, "项目名称不能为空"
225
+
226
+ project_name = project_name.strip()
227
+
228
+ # 检查长度
229
+ if len(project_name) > 50:
230
+ return False, "项目名称太长(最多50个字符)"
231
+
232
+ # 检查是否为Python关键字
233
+ python_keywords = {
234
+ 'False', 'None', 'True', 'and', 'as', 'assert', 'break', 'class',
235
+ 'continue', 'def', 'del', 'elif', 'else', 'except', 'finally',
236
+ 'for', 'from', 'global', 'if', 'import', 'in', 'is', 'lambda',
237
+ 'nonlocal', 'not', 'or', 'pass', 'raise', 'return', 'try',
238
+ 'while', 'with', 'yield'
239
+ }
240
+ if project_name in python_keywords:
241
+ return False, f"'{project_name}' 是Python关键字,不能用作项目名称"
242
+
243
+ # 检查是否为有效的Python标识符
244
+ if not project_name.isidentifier():
245
+ return False, "项目名称必须是有效的Python标识符"
246
+
247
+ # 检查格式(建议使用snake_case)
248
+ if not re.match(r'^[a-z][a-z0-9_]*$', project_name):
249
+ return False, (
250
+ "项目名称应以小写字母开头,只能包含小写字母、数字和下划线"
251
+ )
252
+
253
+ # 检查是否以数字结尾(不推荐)
254
+ if project_name[-1].isdigit():
255
+ return False, "项目名称不应以数字结尾"
256
+
257
+ return True, ""
258
+
259
+
260
+ def show_template_options():
261
+ """显示可用的模板选项"""
262
+ if RICH_AVAILABLE:
263
+ table = Table(title="可用模板类型", show_header=True, header_style="bold magenta")
264
+ table.add_column("模板类型", style="cyan", no_wrap=True)
265
+ table.add_column("描述", style="green")
266
+
267
+ for template_type, description in TEMPLATE_TYPES.items():
268
+ table.add_row(template_type, description)
269
+
270
+ console.print(table)
271
+ else:
272
+ print("可用模板类型:")
273
+ for template_type, description in TEMPLATE_TYPES.items():
274
+ print(f" {template_type}: {description}")
275
+
276
+
277
+ def show_module_options():
278
+ """显示可用的模块选项"""
279
+ if RICH_AVAILABLE:
280
+ table = Table(title="可选模块组件", show_header=True, header_style="bold magenta")
281
+ table.add_column("模块", style="cyan", no_wrap=True)
282
+ table.add_column("描述", style="green")
283
+
284
+ for module, description in OPTIONAL_MODULES.items():
285
+ table.add_row(module, description)
286
+
287
+ console.print(table)
288
+ else:
289
+ print("可选模块组件:")
290
+ for module, description in OPTIONAL_MODULES.items():
291
+ print(f" {module}: {description}")
292
+
293
+
294
+ def main(args):
295
+ if len(args) < 1:
296
+ console.print("[bold red]错误:[/bold red] 用法: [blue]crawlo startproject[/blue] <项目名称> [模板类型] [--modules 模块1,模块2]")
297
+ console.print("💡 示例:")
298
+ console.print(" [blue]crawlo startproject[/blue] my_spider_project")
299
+ console.print(" [blue]crawlo startproject[/blue] news_crawler simple")
300
+ console.print(" [blue]crawlo startproject[/blue] ecommerce_spider distributed --modules mysql,proxy")
301
+ show_template_options()
302
+ show_module_options()
303
+ return 1
304
+
305
+ # 解析参数
306
+ project_name = args[0]
307
+ template_type = 'default'
308
+ modules = None
309
+
310
+ # 解析可选参数
311
+ if len(args) > 1:
312
+ for i, arg in enumerate(args[1:], 1):
313
+ if arg.startswith('--modules='):
314
+ modules_str = arg.split('=', 1)[1]
315
+ modules = [m.strip() for m in modules_str.split(',') if m.strip()]
316
+ elif arg.startswith('--modules'):
317
+ # 处理 --modules module1,module2 格式
318
+ if i + 1 < len(args):
319
+ modules_str = args[i + 1]
320
+ modules = [m.strip() for m in modules_str.split(',') if m.strip()]
321
+ elif not arg.startswith('--') and arg in TEMPLATE_TYPES:
322
+ template_type = arg
323
+
324
+ # 验证模板类型
325
+ if template_type not in TEMPLATE_TYPES:
326
+ show_error_panel(
327
+ "无效的模板类型",
328
+ f"不支持模板类型 '[cyan]{template_type}[/cyan]'。\n"
329
+ )
330
+ show_template_options()
331
+ return 1
332
+
333
+ # 验证项目名称
334
+ is_valid, error_msg = validate_project_name(project_name)
335
+ if not is_valid:
336
+ show_error_panel(
337
+ "无效的项目名称",
338
+ f"[cyan]{project_name}[/cyan] 不是有效的项目名称。\n"
339
+ f"❌ {error_msg}\n\n"
340
+ "💡 项目名称应:\n"
341
+ " • 以小写字母开头\n"
342
+ " • 只能包含小写字母、数字和下划线\n"
343
+ " • 是有效的Python标识符\n"
344
+ " • 不能是Python关键字"
345
+ )
346
+ return 1
347
+
348
+ project_dir = Path(project_name)
349
+
350
+ if project_dir.exists():
351
+ show_error_panel(
352
+ "目录已存在",
353
+ f"目录 '[cyan]{project_dir}[/cyan]' 已存在。\n"
354
+ "💡 请选择不同的项目名称或删除现有目录。"
355
+ )
356
+ return 1
357
+
358
+ context = {'project_name': project_name}
359
+ template_dir = TEMPLATES_DIR / 'project'
360
+
361
+ try:
362
+ # 1. 创建项目根目录
363
+ project_dir.mkdir()
364
+
365
+ # 2. 渲染 crawlo.cfg.tmpl
366
+ cfg_template = TEMPLATES_DIR / 'crawlo.cfg.tmpl'
367
+ if cfg_template.exists():
368
+ cfg_content = _render_template(cfg_template, context)
369
+ (project_dir / 'crawlo.cfg').write_text(cfg_content, encoding='utf-8')
370
+ console.print(f":white_check_mark: 已创建 [green]{project_dir / 'crawlo.cfg'}[/green]")
371
+ else:
372
+ console.print("[yellow]⚠ 警告:[/yellow] 找不到模板 'crawlo.cfg.tmpl'。")
373
+
374
+ # 3. 渲染 run.py.tmpl (放在项目根目录)
375
+ run_template = TEMPLATES_DIR / 'run.py.tmpl'
376
+ if run_template.exists():
377
+ run_content = _render_template(run_template, context)
378
+ (project_dir / 'run.py').write_text(run_content, encoding='utf-8')
379
+ console.print(f":white_check_mark: 已创建 [green]{project_dir / 'run.py'}[/green]")
380
+ else:
381
+ console.print("[yellow]⚠ 警告:[/yellow] 找不到模板 'run.py.tmpl'。")
382
+
383
+ # 4. 复制并渲染项目包内容
384
+ package_dir = project_dir / project_name
385
+ _copytree_with_templates(template_dir, package_dir, context, template_type, modules)
386
+ console.print(f":white_check_mark: 已创建项目包: [green]{package_dir}[/green]")
387
+
388
+ # 5. 创建 logs 目录
389
+ (project_dir / 'logs').mkdir(exist_ok=True)
390
+ console.print(":white_check_mark: 已创建 logs 目录")
391
+
392
+ # 6. 创建 output 目录(用于数据输出)
393
+ (project_dir / 'output').mkdir(exist_ok=True)
394
+ console.print(":white_check_mark: 已创建 output 目录")
395
+
396
+ # 成功面板
397
+ success_text = Text.from_markup(f"项目 '[bold cyan]{project_name}[/bold cyan]' 创建成功!")
398
+ console.print(Panel(success_text, title=":rocket: 成功", border_style="green", padding=(1, 2)))
399
+
400
+ # 显示使用的模板类型
401
+ if template_type != 'default':
402
+ console.print(f":information: 使用模板类型: [bold blue]{template_type}[/bold blue] - {TEMPLATE_TYPES[template_type]}")
403
+
404
+ # 显示选择的模块
405
+ if modules:
406
+ console.print(f":information: 选择的模块: [bold blue]{', '.join(modules)}[/bold blue]")
407
+
408
+ # 下一步操作提示(对齐美观 + 语法高亮)
409
+ next_steps = f"""
410
+ [bold]🚀 下一步操作:[/bold]
411
+ [blue]cd[/blue] {project_name}
412
+ [blue]crawlo genspider[/blue] example example.com
413
+ [blue]crawlo run[/blue] example
414
+
415
+ [bold]📚 了解更多:[/bold]
416
+ [blue]crawlo list[/blue] # 列出所有爬虫
417
+ [blue]crawlo check[/blue] example # 检查爬虫有效性
418
+ [blue]crawlo stats[/blue] # 查看统计信息
419
+ """.strip()
420
+ console.print(next_steps)
421
+
422
+ return 0
423
+
424
+ except Exception as e:
425
+ show_error_panel(
426
+ "创建失败",
427
+ f"创建项目失败: {e}"
428
+ )
429
+ if project_dir.exists():
430
+ shutil.rmtree(project_dir, ignore_errors=True)
431
+ console.print("[red]:cross_mark: 已清理部分创建的项目。[/red]")
432
+ return 1
433
+
434
+ if __name__ == "__main__":
435
+ import sys
436
+ exit_code = main(sys.argv[1:])
437
437
  sys.exit(exit_code)