crawlo 1.2.6__py3-none-any.whl → 1.2.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +61 -61
- crawlo/__version__.py +1 -1
- crawlo/cleaners/__init__.py +60 -60
- crawlo/cleaners/data_formatter.py +225 -225
- crawlo/cleaners/encoding_converter.py +125 -125
- crawlo/cleaners/text_cleaner.py +232 -232
- crawlo/cli.py +75 -88
- crawlo/commands/__init__.py +14 -14
- crawlo/commands/check.py +594 -594
- crawlo/commands/genspider.py +151 -151
- crawlo/commands/help.py +138 -144
- crawlo/commands/list.py +155 -155
- crawlo/commands/run.py +323 -323
- crawlo/commands/startproject.py +436 -436
- crawlo/commands/stats.py +187 -187
- crawlo/commands/utils.py +186 -186
- crawlo/config.py +312 -312
- crawlo/config_validator.py +251 -251
- crawlo/core/__init__.py +2 -2
- crawlo/core/engine.py +365 -356
- crawlo/core/processor.py +40 -40
- crawlo/core/scheduler.py +251 -239
- crawlo/crawler.py +1099 -1110
- crawlo/data/__init__.py +5 -5
- crawlo/data/user_agents.py +107 -107
- crawlo/downloader/__init__.py +266 -266
- crawlo/downloader/aiohttp_downloader.py +228 -221
- crawlo/downloader/cffi_downloader.py +256 -256
- crawlo/downloader/httpx_downloader.py +259 -259
- crawlo/downloader/hybrid_downloader.py +212 -212
- crawlo/downloader/playwright_downloader.py +402 -402
- crawlo/downloader/selenium_downloader.py +472 -472
- crawlo/event.py +11 -11
- crawlo/exceptions.py +81 -81
- crawlo/extension/__init__.py +39 -38
- crawlo/extension/health_check.py +141 -141
- crawlo/extension/log_interval.py +57 -57
- crawlo/extension/log_stats.py +81 -81
- crawlo/extension/logging_extension.py +43 -43
- crawlo/extension/memory_monitor.py +104 -104
- crawlo/extension/performance_profiler.py +133 -133
- crawlo/extension/request_recorder.py +107 -107
- crawlo/filters/__init__.py +154 -154
- crawlo/filters/aioredis_filter.py +234 -234
- crawlo/filters/memory_filter.py +269 -269
- crawlo/items/__init__.py +23 -23
- crawlo/items/base.py +21 -21
- crawlo/items/fields.py +52 -52
- crawlo/items/items.py +104 -104
- crawlo/middleware/__init__.py +21 -21
- crawlo/middleware/default_header.py +131 -131
- crawlo/middleware/download_delay.py +104 -104
- crawlo/middleware/middleware_manager.py +136 -135
- crawlo/middleware/offsite.py +114 -114
- crawlo/middleware/proxy.py +367 -367
- crawlo/middleware/request_ignore.py +86 -86
- crawlo/middleware/response_code.py +163 -163
- crawlo/middleware/response_filter.py +136 -136
- crawlo/middleware/retry.py +124 -124
- crawlo/mode_manager.py +211 -211
- crawlo/network/__init__.py +21 -21
- crawlo/network/request.py +338 -338
- crawlo/network/response.py +359 -359
- crawlo/pipelines/__init__.py +21 -21
- crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
- crawlo/pipelines/console_pipeline.py +39 -39
- crawlo/pipelines/csv_pipeline.py +316 -316
- crawlo/pipelines/database_dedup_pipeline.py +222 -222
- crawlo/pipelines/json_pipeline.py +218 -218
- crawlo/pipelines/memory_dedup_pipeline.py +115 -115
- crawlo/pipelines/mongo_pipeline.py +131 -131
- crawlo/pipelines/mysql_pipeline.py +317 -317
- crawlo/pipelines/pipeline_manager.py +62 -61
- crawlo/pipelines/redis_dedup_pipeline.py +166 -165
- crawlo/project.py +314 -279
- crawlo/queue/pqueue.py +37 -37
- crawlo/queue/queue_manager.py +377 -376
- crawlo/queue/redis_priority_queue.py +306 -306
- crawlo/settings/__init__.py +7 -7
- crawlo/settings/default_settings.py +219 -215
- crawlo/settings/setting_manager.py +122 -122
- crawlo/spider/__init__.py +639 -639
- crawlo/stats_collector.py +59 -59
- crawlo/subscriber.py +129 -129
- crawlo/task_manager.py +30 -30
- crawlo/templates/crawlo.cfg.tmpl +10 -10
- crawlo/templates/project/__init__.py.tmpl +3 -3
- crawlo/templates/project/items.py.tmpl +17 -17
- crawlo/templates/project/middlewares.py.tmpl +118 -118
- crawlo/templates/project/pipelines.py.tmpl +96 -96
- crawlo/templates/project/settings.py.tmpl +288 -288
- crawlo/templates/project/settings_distributed.py.tmpl +157 -157
- crawlo/templates/project/settings_gentle.py.tmpl +100 -100
- crawlo/templates/project/settings_high_performance.py.tmpl +134 -134
- crawlo/templates/project/settings_simple.py.tmpl +98 -98
- crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
- crawlo/templates/run.py.tmpl +47 -45
- crawlo/templates/spider/spider.py.tmpl +143 -143
- crawlo/tools/__init__.py +182 -182
- crawlo/tools/anti_crawler.py +268 -268
- crawlo/tools/authenticated_proxy.py +240 -240
- crawlo/tools/data_validator.py +180 -180
- crawlo/tools/date_tools.py +35 -35
- crawlo/tools/distributed_coordinator.py +386 -386
- crawlo/tools/retry_mechanism.py +220 -220
- crawlo/tools/scenario_adapter.py +262 -262
- crawlo/utils/__init__.py +35 -35
- crawlo/utils/batch_processor.py +259 -259
- crawlo/utils/controlled_spider_mixin.py +439 -439
- crawlo/utils/date_tools.py +290 -290
- crawlo/utils/db_helper.py +343 -343
- crawlo/utils/enhanced_error_handler.py +356 -356
- crawlo/utils/env_config.py +143 -106
- crawlo/utils/error_handler.py +123 -123
- crawlo/utils/func_tools.py +82 -82
- crawlo/utils/large_scale_config.py +286 -286
- crawlo/utils/large_scale_helper.py +344 -344
- crawlo/utils/log.py +128 -128
- crawlo/utils/performance_monitor.py +285 -285
- crawlo/utils/queue_helper.py +175 -175
- crawlo/utils/redis_connection_pool.py +351 -351
- crawlo/utils/redis_key_validator.py +198 -198
- crawlo/utils/request.py +267 -267
- crawlo/utils/request_serializer.py +218 -218
- crawlo/utils/spider_loader.py +61 -61
- crawlo/utils/system.py +11 -11
- crawlo/utils/tools.py +4 -4
- crawlo/utils/url.py +39 -39
- {crawlo-1.2.6.dist-info → crawlo-1.2.8.dist-info}/METADATA +764 -764
- crawlo-1.2.8.dist-info/RECORD +209 -0
- examples/__init__.py +7 -7
- tests/DOUBLE_CRAWLO_PREFIX_FIX_REPORT.md +81 -81
- tests/__init__.py +7 -7
- tests/advanced_tools_example.py +275 -275
- tests/authenticated_proxy_example.py +236 -236
- tests/cleaners_example.py +160 -160
- tests/config_validation_demo.py +102 -102
- tests/controlled_spider_example.py +205 -205
- tests/date_tools_example.py +180 -180
- tests/dynamic_loading_example.py +523 -523
- tests/dynamic_loading_test.py +104 -104
- tests/env_config_example.py +133 -133
- tests/error_handling_example.py +171 -171
- tests/redis_key_validation_demo.py +130 -130
- tests/response_improvements_example.py +144 -144
- tests/test_advanced_tools.py +148 -148
- tests/test_all_redis_key_configs.py +145 -145
- tests/test_authenticated_proxy.py +141 -141
- tests/test_cleaners.py +54 -54
- tests/test_comprehensive.py +146 -146
- tests/test_config_consistency.py +81 -0
- tests/test_config_validator.py +193 -193
- tests/test_crawlo_proxy_integration.py +172 -172
- tests/test_date_tools.py +123 -123
- tests/test_default_header_middleware.py +158 -158
- tests/test_double_crawlo_fix.py +207 -207
- tests/test_double_crawlo_fix_simple.py +124 -124
- tests/test_download_delay_middleware.py +221 -221
- tests/test_downloader_proxy_compatibility.py +268 -268
- tests/test_dynamic_downloaders_proxy.py +124 -124
- tests/test_dynamic_proxy.py +92 -92
- tests/test_dynamic_proxy_config.py +146 -146
- tests/test_dynamic_proxy_real.py +109 -109
- tests/test_edge_cases.py +303 -303
- tests/test_enhanced_error_handler.py +270 -270
- tests/test_env_config.py +121 -121
- tests/test_error_handler_compatibility.py +112 -112
- tests/test_final_validation.py +153 -153
- tests/test_framework_env_usage.py +103 -103
- tests/test_integration.py +356 -356
- tests/test_item_dedup_redis_key.py +122 -122
- tests/test_mode_consistency.py +52 -0
- tests/test_offsite_middleware.py +221 -221
- tests/test_parsel.py +29 -29
- tests/test_performance.py +327 -327
- tests/test_proxy_api.py +264 -264
- tests/test_proxy_health_check.py +32 -32
- tests/test_proxy_middleware.py +121 -121
- tests/test_proxy_middleware_enhanced.py +216 -216
- tests/test_proxy_middleware_integration.py +136 -136
- tests/test_proxy_providers.py +56 -56
- tests/test_proxy_stats.py +19 -19
- tests/test_proxy_strategies.py +59 -59
- tests/test_queue_manager_double_crawlo.py +173 -173
- tests/test_queue_manager_redis_key.py +176 -176
- tests/test_real_scenario_proxy.py +195 -195
- tests/test_redis_config.py +28 -28
- tests/test_redis_connection_pool.py +294 -294
- tests/test_redis_key_naming.py +181 -181
- tests/test_redis_key_validator.py +123 -123
- tests/test_redis_queue.py +224 -224
- tests/test_request_ignore_middleware.py +182 -182
- tests/test_request_serialization.py +70 -70
- tests/test_response_code_middleware.py +349 -349
- tests/test_response_filter_middleware.py +427 -427
- tests/test_response_improvements.py +152 -152
- tests/test_retry_middleware.py +241 -241
- tests/test_scheduler.py +252 -241
- tests/test_scheduler_config_update.py +134 -0
- tests/test_simple_response.py +61 -61
- tests/test_telecom_spider_redis_key.py +205 -205
- tests/test_template_content.py +87 -87
- tests/test_template_redis_key.py +134 -134
- tests/test_tools.py +153 -153
- tests/tools_example.py +257 -257
- crawlo-1.2.6.dist-info/RECORD +0 -206
- {crawlo-1.2.6.dist-info → crawlo-1.2.8.dist-info}/WHEEL +0 -0
- {crawlo-1.2.6.dist-info → crawlo-1.2.8.dist-info}/entry_points.txt +0 -0
- {crawlo-1.2.6.dist-info → crawlo-1.2.8.dist-info}/top_level.txt +0 -0
crawlo/commands/list.py
CHANGED
|
@@ -1,156 +1,156 @@
|
|
|
1
|
-
#!/usr/bin/python
|
|
2
|
-
# -*- coding: UTF-8 -*-
|
|
3
|
-
"""
|
|
4
|
-
# @Time : 2025-08-31 22:33
|
|
5
|
-
# @Author : crawl-coder
|
|
6
|
-
# @Desc : 命令行入口:crawlo list,用于列出所有已注册的爬虫
|
|
7
|
-
"""
|
|
8
|
-
import sys
|
|
9
|
-
from pathlib import Path
|
|
10
|
-
from importlib import import_module
|
|
11
|
-
|
|
12
|
-
from rich.console import Console
|
|
13
|
-
from rich.table import Table
|
|
14
|
-
from rich.panel import Panel
|
|
15
|
-
from rich.text import Text
|
|
16
|
-
from rich import box
|
|
17
|
-
|
|
18
|
-
from crawlo.crawler import CrawlerProcess
|
|
19
|
-
from crawlo.utils.log import get_logger
|
|
20
|
-
from .utils import validate_project_environment, show_error_panel
|
|
21
|
-
|
|
22
|
-
logger = get_logger(__name__)
|
|
23
|
-
console = Console()
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
def main(args):
|
|
27
|
-
"""
|
|
28
|
-
主函数:列出所有可用爬虫
|
|
29
|
-
用法: crawlo list [--json]
|
|
30
|
-
"""
|
|
31
|
-
show_json = "--json" in args
|
|
32
|
-
|
|
33
|
-
# 过滤掉参数后检查是否有额外参数
|
|
34
|
-
filtered_args = [arg for arg in args if not arg.startswith('--')]
|
|
35
|
-
if filtered_args:
|
|
36
|
-
if show_json:
|
|
37
|
-
console.print_json(data={"success": False, "error": "用法: crawlo list [--json]"})
|
|
38
|
-
else:
|
|
39
|
-
console.print("[bold red]❌ 错误:[/bold red] 用法: [blue]crawlo list[/blue] [--json]")
|
|
40
|
-
return 1
|
|
41
|
-
|
|
42
|
-
try:
|
|
43
|
-
# 验证项目环境
|
|
44
|
-
is_valid, project_package, error_msg = validate_project_environment()
|
|
45
|
-
if not is_valid:
|
|
46
|
-
if show_json:
|
|
47
|
-
console.print_json(data={"success": False, "error": error_msg})
|
|
48
|
-
else:
|
|
49
|
-
show_error_panel("非Crawlo项目", error_msg)
|
|
50
|
-
return 1
|
|
51
|
-
|
|
52
|
-
# 初始化 CrawlerProcess 并加载爬虫模块
|
|
53
|
-
spider_modules = [f"{project_package}.spiders"]
|
|
54
|
-
process = CrawlerProcess(spider_modules=spider_modules)
|
|
55
|
-
|
|
56
|
-
# 获取所有爬虫名称
|
|
57
|
-
spider_names = process.get_spider_names()
|
|
58
|
-
if not spider_names:
|
|
59
|
-
if show_json:
|
|
60
|
-
console.print_json(data={
|
|
61
|
-
"success": True,
|
|
62
|
-
"spiders": [],
|
|
63
|
-
"message": "项目中未找到爬虫"
|
|
64
|
-
})
|
|
65
|
-
else:
|
|
66
|
-
console.print(Panel(
|
|
67
|
-
Text.from_markup(
|
|
68
|
-
":envelope_with_arrow: [bold]未找到爬虫[/bold] 于 '[cyan]spiders/[/cyan]' 目录。\n\n"
|
|
69
|
-
"[bold]💡 确保:[/bold]\n"
|
|
70
|
-
" • 爬虫类继承自 [blue]`crawlo.spider.Spider`[/blue]\n"
|
|
71
|
-
" • 每个爬虫都有 [green]`name`[/green] 属性\n"
|
|
72
|
-
" • 爬虫已在 [cyan]`spiders/__init__.py`[/cyan] 中导入 (如果使用包)"
|
|
73
|
-
),
|
|
74
|
-
title="📭 未找到爬虫",
|
|
75
|
-
border_style="yellow",
|
|
76
|
-
padding=(1, 2)
|
|
77
|
-
))
|
|
78
|
-
return 0
|
|
79
|
-
|
|
80
|
-
# 准备爬虫信息
|
|
81
|
-
spider_info = []
|
|
82
|
-
for name in sorted(spider_names):
|
|
83
|
-
spider_cls = process.get_spider_class(name)
|
|
84
|
-
module_name = spider_cls.__module__.replace(f"{project_package}.", "")
|
|
85
|
-
|
|
86
|
-
# 获取额外信息
|
|
87
|
-
start_urls_count = len(getattr(spider_cls, 'start_urls', []))
|
|
88
|
-
allowed_domains = getattr(spider_cls, 'allowed_domains', [])
|
|
89
|
-
custom_settings = getattr(spider_cls, 'custom_settings', {})
|
|
90
|
-
|
|
91
|
-
spider_info.append({
|
|
92
|
-
"name": name,
|
|
93
|
-
"class": spider_cls.__name__,
|
|
94
|
-
"module": module_name,
|
|
95
|
-
"start_urls_count": start_urls_count,
|
|
96
|
-
"allowed_domains": allowed_domains,
|
|
97
|
-
"has_custom_settings": bool(custom_settings)
|
|
98
|
-
})
|
|
99
|
-
|
|
100
|
-
# JSON 输出
|
|
101
|
-
if show_json:
|
|
102
|
-
console.print_json(data={
|
|
103
|
-
"success": True,
|
|
104
|
-
"count": len(spider_info),
|
|
105
|
-
"spiders": spider_info
|
|
106
|
-
})
|
|
107
|
-
return 0
|
|
108
|
-
|
|
109
|
-
# 表格输出
|
|
110
|
-
table = Table(
|
|
111
|
-
title=f"📋 找到 {len(spider_names)} 个爬虫",
|
|
112
|
-
box=box.ROUNDED,
|
|
113
|
-
show_header=True,
|
|
114
|
-
header_style="bold magenta",
|
|
115
|
-
title_style="bold green"
|
|
116
|
-
)
|
|
117
|
-
table.add_column("名称", style="cyan", no_wrap=True)
|
|
118
|
-
table.add_column("类名", style="green")
|
|
119
|
-
table.add_column("模块", style="dim")
|
|
120
|
-
table.add_column("URL数", style="blue", justify="center")
|
|
121
|
-
table.add_column("域名", style="yellow")
|
|
122
|
-
table.add_column("自定义设置", style="magenta", justify="center")
|
|
123
|
-
|
|
124
|
-
for info in spider_info:
|
|
125
|
-
domains_display = ", ".join(info["allowed_domains"][:2]) # 显示前2个域名
|
|
126
|
-
if len(info["allowed_domains"]) > 2:
|
|
127
|
-
domains_display += f" (+{len(info['allowed_domains'])-2})"
|
|
128
|
-
elif not domains_display:
|
|
129
|
-
domains_display = "-"
|
|
130
|
-
|
|
131
|
-
table.add_row(
|
|
132
|
-
info["name"],
|
|
133
|
-
info["class"],
|
|
134
|
-
info["module"],
|
|
135
|
-
str(info["start_urls_count"]),
|
|
136
|
-
domains_display,
|
|
137
|
-
"✓" if info["has_custom_settings"] else "-"
|
|
138
|
-
)
|
|
139
|
-
|
|
140
|
-
console.print(table)
|
|
141
|
-
|
|
142
|
-
# 显示使用提示
|
|
143
|
-
console.print("\n[bold]🚀 下一步操作:[/bold]")
|
|
144
|
-
console.print(" [blue]crawlo run[/blue] <爬虫名称> # 运行指定爬虫")
|
|
145
|
-
console.print(" [blue]crawlo run[/blue] all # 运行所有爬虫")
|
|
146
|
-
console.print(" [blue]crawlo check[/blue] <爬虫名称> # 检查爬虫有效性")
|
|
147
|
-
|
|
148
|
-
return 0
|
|
149
|
-
|
|
150
|
-
except Exception as e:
|
|
151
|
-
if show_json:
|
|
152
|
-
console.print_json(data={"success": False, "error": str(e)})
|
|
153
|
-
else:
|
|
154
|
-
console.print(f"[bold red]❌ 意外错误:[/bold red] {e}")
|
|
155
|
-
logger.exception("执行 'crawlo list' 时发生异常")
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding: UTF-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
# @Time : 2025-08-31 22:33
|
|
5
|
+
# @Author : crawl-coder
|
|
6
|
+
# @Desc : 命令行入口:crawlo list,用于列出所有已注册的爬虫
|
|
7
|
+
"""
|
|
8
|
+
import sys
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
from importlib import import_module
|
|
11
|
+
|
|
12
|
+
from rich.console import Console
|
|
13
|
+
from rich.table import Table
|
|
14
|
+
from rich.panel import Panel
|
|
15
|
+
from rich.text import Text
|
|
16
|
+
from rich import box
|
|
17
|
+
|
|
18
|
+
from crawlo.crawler import CrawlerProcess
|
|
19
|
+
from crawlo.utils.log import get_logger
|
|
20
|
+
from .utils import validate_project_environment, show_error_panel
|
|
21
|
+
|
|
22
|
+
logger = get_logger(__name__)
|
|
23
|
+
console = Console()
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def main(args):
|
|
27
|
+
"""
|
|
28
|
+
主函数:列出所有可用爬虫
|
|
29
|
+
用法: crawlo list [--json]
|
|
30
|
+
"""
|
|
31
|
+
show_json = "--json" in args
|
|
32
|
+
|
|
33
|
+
# 过滤掉参数后检查是否有额外参数
|
|
34
|
+
filtered_args = [arg for arg in args if not arg.startswith('--')]
|
|
35
|
+
if filtered_args:
|
|
36
|
+
if show_json:
|
|
37
|
+
console.print_json(data={"success": False, "error": "用法: crawlo list [--json]"})
|
|
38
|
+
else:
|
|
39
|
+
console.print("[bold red]❌ 错误:[/bold red] 用法: [blue]crawlo list[/blue] [--json]")
|
|
40
|
+
return 1
|
|
41
|
+
|
|
42
|
+
try:
|
|
43
|
+
# 验证项目环境
|
|
44
|
+
is_valid, project_package, error_msg = validate_project_environment()
|
|
45
|
+
if not is_valid:
|
|
46
|
+
if show_json:
|
|
47
|
+
console.print_json(data={"success": False, "error": error_msg})
|
|
48
|
+
else:
|
|
49
|
+
show_error_panel("非Crawlo项目", error_msg)
|
|
50
|
+
return 1
|
|
51
|
+
|
|
52
|
+
# 初始化 CrawlerProcess 并加载爬虫模块
|
|
53
|
+
spider_modules = [f"{project_package}.spiders"]
|
|
54
|
+
process = CrawlerProcess(spider_modules=spider_modules)
|
|
55
|
+
|
|
56
|
+
# 获取所有爬虫名称
|
|
57
|
+
spider_names = process.get_spider_names()
|
|
58
|
+
if not spider_names:
|
|
59
|
+
if show_json:
|
|
60
|
+
console.print_json(data={
|
|
61
|
+
"success": True,
|
|
62
|
+
"spiders": [],
|
|
63
|
+
"message": "项目中未找到爬虫"
|
|
64
|
+
})
|
|
65
|
+
else:
|
|
66
|
+
console.print(Panel(
|
|
67
|
+
Text.from_markup(
|
|
68
|
+
":envelope_with_arrow: [bold]未找到爬虫[/bold] 于 '[cyan]spiders/[/cyan]' 目录。\n\n"
|
|
69
|
+
"[bold]💡 确保:[/bold]\n"
|
|
70
|
+
" • 爬虫类继承自 [blue]`crawlo.spider.Spider`[/blue]\n"
|
|
71
|
+
" • 每个爬虫都有 [green]`name`[/green] 属性\n"
|
|
72
|
+
" • 爬虫已在 [cyan]`spiders/__init__.py`[/cyan] 中导入 (如果使用包)"
|
|
73
|
+
),
|
|
74
|
+
title="📭 未找到爬虫",
|
|
75
|
+
border_style="yellow",
|
|
76
|
+
padding=(1, 2)
|
|
77
|
+
))
|
|
78
|
+
return 0
|
|
79
|
+
|
|
80
|
+
# 准备爬虫信息
|
|
81
|
+
spider_info = []
|
|
82
|
+
for name in sorted(spider_names):
|
|
83
|
+
spider_cls = process.get_spider_class(name)
|
|
84
|
+
module_name = spider_cls.__module__.replace(f"{project_package}.", "")
|
|
85
|
+
|
|
86
|
+
# 获取额外信息
|
|
87
|
+
start_urls_count = len(getattr(spider_cls, 'start_urls', []))
|
|
88
|
+
allowed_domains = getattr(spider_cls, 'allowed_domains', [])
|
|
89
|
+
custom_settings = getattr(spider_cls, 'custom_settings', {})
|
|
90
|
+
|
|
91
|
+
spider_info.append({
|
|
92
|
+
"name": name,
|
|
93
|
+
"class": spider_cls.__name__,
|
|
94
|
+
"module": module_name,
|
|
95
|
+
"start_urls_count": start_urls_count,
|
|
96
|
+
"allowed_domains": allowed_domains,
|
|
97
|
+
"has_custom_settings": bool(custom_settings)
|
|
98
|
+
})
|
|
99
|
+
|
|
100
|
+
# JSON 输出
|
|
101
|
+
if show_json:
|
|
102
|
+
console.print_json(data={
|
|
103
|
+
"success": True,
|
|
104
|
+
"count": len(spider_info),
|
|
105
|
+
"spiders": spider_info
|
|
106
|
+
})
|
|
107
|
+
return 0
|
|
108
|
+
|
|
109
|
+
# 表格输出
|
|
110
|
+
table = Table(
|
|
111
|
+
title=f"📋 找到 {len(spider_names)} 个爬虫",
|
|
112
|
+
box=box.ROUNDED,
|
|
113
|
+
show_header=True,
|
|
114
|
+
header_style="bold magenta",
|
|
115
|
+
title_style="bold green"
|
|
116
|
+
)
|
|
117
|
+
table.add_column("名称", style="cyan", no_wrap=True)
|
|
118
|
+
table.add_column("类名", style="green")
|
|
119
|
+
table.add_column("模块", style="dim")
|
|
120
|
+
table.add_column("URL数", style="blue", justify="center")
|
|
121
|
+
table.add_column("域名", style="yellow")
|
|
122
|
+
table.add_column("自定义设置", style="magenta", justify="center")
|
|
123
|
+
|
|
124
|
+
for info in spider_info:
|
|
125
|
+
domains_display = ", ".join(info["allowed_domains"][:2]) # 显示前2个域名
|
|
126
|
+
if len(info["allowed_domains"]) > 2:
|
|
127
|
+
domains_display += f" (+{len(info['allowed_domains'])-2})"
|
|
128
|
+
elif not domains_display:
|
|
129
|
+
domains_display = "-"
|
|
130
|
+
|
|
131
|
+
table.add_row(
|
|
132
|
+
info["name"],
|
|
133
|
+
info["class"],
|
|
134
|
+
info["module"],
|
|
135
|
+
str(info["start_urls_count"]),
|
|
136
|
+
domains_display,
|
|
137
|
+
"✓" if info["has_custom_settings"] else "-"
|
|
138
|
+
)
|
|
139
|
+
|
|
140
|
+
console.print(table)
|
|
141
|
+
|
|
142
|
+
# 显示使用提示
|
|
143
|
+
console.print("\n[bold]🚀 下一步操作:[/bold]")
|
|
144
|
+
console.print(" [blue]crawlo run[/blue] <爬虫名称> # 运行指定爬虫")
|
|
145
|
+
console.print(" [blue]crawlo run[/blue] all # 运行所有爬虫")
|
|
146
|
+
console.print(" [blue]crawlo check[/blue] <爬虫名称> # 检查爬虫有效性")
|
|
147
|
+
|
|
148
|
+
return 0
|
|
149
|
+
|
|
150
|
+
except Exception as e:
|
|
151
|
+
if show_json:
|
|
152
|
+
console.print_json(data={"success": False, "error": str(e)})
|
|
153
|
+
else:
|
|
154
|
+
console.print(f"[bold red]❌ 意外错误:[/bold red] {e}")
|
|
155
|
+
logger.exception("执行 'crawlo list' 时发生异常")
|
|
156
156
|
return 1
|