crawlo 1.1.0__py3-none-any.whl → 1.1.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +34 -24
- crawlo/__version__.py +1 -1
- crawlo/cli.py +40 -40
- crawlo/commands/__init__.py +13 -13
- crawlo/commands/check.py +594 -155
- crawlo/commands/genspider.py +152 -111
- crawlo/commands/list.py +156 -119
- crawlo/commands/run.py +285 -170
- crawlo/commands/startproject.py +196 -101
- crawlo/commands/stats.py +188 -167
- crawlo/commands/utils.py +187 -0
- crawlo/config.py +280 -0
- crawlo/core/__init__.py +2 -2
- crawlo/core/engine.py +171 -158
- crawlo/core/enhanced_engine.py +190 -0
- crawlo/core/processor.py +40 -40
- crawlo/core/scheduler.py +162 -57
- crawlo/crawler.py +1028 -493
- crawlo/downloader/__init__.py +242 -78
- crawlo/downloader/aiohttp_downloader.py +212 -199
- crawlo/downloader/cffi_downloader.py +252 -277
- crawlo/downloader/httpx_downloader.py +257 -246
- crawlo/event.py +11 -11
- crawlo/exceptions.py +78 -78
- crawlo/extension/__init__.py +31 -31
- crawlo/extension/log_interval.py +49 -49
- crawlo/extension/log_stats.py +44 -44
- crawlo/extension/logging_extension.py +34 -34
- crawlo/filters/__init__.py +154 -37
- crawlo/filters/aioredis_filter.py +242 -150
- crawlo/filters/memory_filter.py +269 -202
- crawlo/items/__init__.py +23 -23
- crawlo/items/base.py +21 -21
- crawlo/items/fields.py +53 -53
- crawlo/items/items.py +104 -104
- crawlo/middleware/__init__.py +21 -21
- crawlo/middleware/default_header.py +32 -32
- crawlo/middleware/download_delay.py +28 -28
- crawlo/middleware/middleware_manager.py +135 -135
- crawlo/middleware/proxy.py +248 -245
- crawlo/middleware/request_ignore.py +30 -30
- crawlo/middleware/response_code.py +18 -18
- crawlo/middleware/response_filter.py +26 -26
- crawlo/middleware/retry.py +125 -90
- crawlo/mode_manager.py +201 -0
- crawlo/network/__init__.py +21 -7
- crawlo/network/request.py +311 -203
- crawlo/network/response.py +269 -166
- crawlo/pipelines/__init__.py +13 -13
- crawlo/pipelines/console_pipeline.py +39 -39
- crawlo/pipelines/csv_pipeline.py +317 -0
- crawlo/pipelines/json_pipeline.py +219 -0
- crawlo/pipelines/mongo_pipeline.py +116 -116
- crawlo/pipelines/mysql_pipeline.py +195 -195
- crawlo/pipelines/pipeline_manager.py +56 -56
- crawlo/project.py +153 -0
- crawlo/queue/pqueue.py +37 -0
- crawlo/queue/queue_manager.py +304 -0
- crawlo/queue/redis_priority_queue.py +192 -0
- crawlo/settings/__init__.py +7 -7
- crawlo/settings/default_settings.py +226 -169
- crawlo/settings/setting_manager.py +99 -99
- crawlo/spider/__init__.py +639 -129
- crawlo/stats_collector.py +59 -59
- crawlo/subscriber.py +106 -106
- crawlo/task_manager.py +30 -27
- crawlo/templates/crawlo.cfg.tmpl +10 -10
- crawlo/templates/project/__init__.py.tmpl +3 -3
- crawlo/templates/project/items.py.tmpl +17 -17
- crawlo/templates/project/middlewares.py.tmpl +87 -76
- crawlo/templates/project/pipelines.py.tmpl +336 -64
- crawlo/templates/project/run.py.tmpl +239 -0
- crawlo/templates/project/settings.py.tmpl +248 -54
- crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
- crawlo/templates/spider/spider.py.tmpl +178 -32
- crawlo/utils/__init__.py +7 -7
- crawlo/utils/controlled_spider_mixin.py +336 -0
- crawlo/utils/date_tools.py +233 -233
- crawlo/utils/db_helper.py +343 -343
- crawlo/utils/func_tools.py +82 -82
- crawlo/utils/large_scale_config.py +287 -0
- crawlo/utils/large_scale_helper.py +344 -0
- crawlo/utils/log.py +128 -128
- crawlo/utils/queue_helper.py +176 -0
- crawlo/utils/request.py +267 -267
- crawlo/utils/request_serializer.py +220 -0
- crawlo/utils/spider_loader.py +62 -62
- crawlo/utils/system.py +11 -11
- crawlo/utils/tools.py +4 -4
- crawlo/utils/url.py +39 -39
- crawlo-1.1.2.dist-info/METADATA +567 -0
- crawlo-1.1.2.dist-info/RECORD +108 -0
- examples/__init__.py +7 -0
- tests/__init__.py +7 -7
- tests/test_final_validation.py +154 -0
- tests/test_proxy_health_check.py +32 -32
- tests/test_proxy_middleware_integration.py +136 -136
- tests/test_proxy_providers.py +56 -56
- tests/test_proxy_stats.py +19 -19
- tests/test_proxy_strategies.py +59 -59
- tests/test_redis_config.py +29 -0
- tests/test_redis_queue.py +225 -0
- tests/test_request_serialization.py +71 -0
- tests/test_scheduler.py +242 -0
- crawlo/pipelines/mysql_batch_pipline.py +0 -273
- crawlo/utils/concurrency_manager.py +0 -125
- crawlo/utils/pqueue.py +0 -174
- crawlo/utils/project.py +0 -197
- crawlo-1.1.0.dist-info/METADATA +0 -49
- crawlo-1.1.0.dist-info/RECORD +0 -97
- examples/gxb/items.py +0 -36
- examples/gxb/run.py +0 -16
- examples/gxb/settings.py +0 -72
- examples/gxb/spider/__init__.py +0 -2
- examples/gxb/spider/miit_spider.py +0 -180
- examples/gxb/spider/telecom_device.py +0 -129
- {examples/gxb → crawlo/queue}/__init__.py +0 -0
- {crawlo-1.1.0.dist-info → crawlo-1.1.2.dist-info}/WHEEL +0 -0
- {crawlo-1.1.0.dist-info → crawlo-1.1.2.dist-info}/entry_points.txt +0 -0
- {crawlo-1.1.0.dist-info → crawlo-1.1.2.dist-info}/top_level.txt +0 -0
crawlo/commands/genspider.py
CHANGED
|
@@ -1,111 +1,152 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
print("
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
#
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding: UTF-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
# @Time : 2025-08-31 22:36
|
|
5
|
+
# @Author : crawl-coder
|
|
6
|
+
# @Desc : 命令行入口:crawlo genspider baidu,创建爬虫。
|
|
7
|
+
"""
|
|
8
|
+
import sys
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
import configparser
|
|
11
|
+
import importlib
|
|
12
|
+
from rich.console import Console
|
|
13
|
+
|
|
14
|
+
from .utils import (
|
|
15
|
+
get_project_root,
|
|
16
|
+
validate_project_environment,
|
|
17
|
+
show_error_panel,
|
|
18
|
+
show_success_panel,
|
|
19
|
+
validate_spider_name,
|
|
20
|
+
is_valid_domain
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
# 初始化 rich 控制台
|
|
24
|
+
console = Console()
|
|
25
|
+
|
|
26
|
+
TEMPLATES_DIR = Path(__file__).parent.parent / 'templates'
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def _render_template(tmpl_path, context):
|
|
30
|
+
"""读取模板文件,替换 {{key}} 为 context 中的值"""
|
|
31
|
+
with open(tmpl_path, 'r', encoding='utf-8') as f:
|
|
32
|
+
content = f.read()
|
|
33
|
+
for key, value in context.items():
|
|
34
|
+
content = content.replace(f'{{{{{key}}}}}', str(value))
|
|
35
|
+
return content
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def main(args):
|
|
39
|
+
if len(args) < 2:
|
|
40
|
+
console.print("[bold red]Error:[/bold red] Usage: [blue]crawlo genspider[/blue] <spider_name> <domain>")
|
|
41
|
+
console.print("💡 Examples:")
|
|
42
|
+
console.print(" [blue]crawlo genspider[/blue] news_spider news.example.com")
|
|
43
|
+
console.print(" [blue]crawlo genspider[/blue] product_spider shop.example.com")
|
|
44
|
+
return 1
|
|
45
|
+
|
|
46
|
+
spider_name = args[0]
|
|
47
|
+
domain = args[1]
|
|
48
|
+
|
|
49
|
+
# 验证爬虫名称
|
|
50
|
+
if not validate_spider_name(spider_name):
|
|
51
|
+
show_error_panel(
|
|
52
|
+
"Invalid Spider Name",
|
|
53
|
+
f"Spider name '[cyan]{spider_name}[/cyan]' is invalid.\n"
|
|
54
|
+
"💡 Spider name should:\n"
|
|
55
|
+
" • Start with lowercase letter\n"
|
|
56
|
+
" • Contain only lowercase letters, numbers, and underscores\n"
|
|
57
|
+
" • Be a valid Python identifier"
|
|
58
|
+
)
|
|
59
|
+
return 1
|
|
60
|
+
|
|
61
|
+
# 验证域名格式
|
|
62
|
+
if not is_valid_domain(domain):
|
|
63
|
+
show_error_panel(
|
|
64
|
+
"Invalid Domain",
|
|
65
|
+
f"Domain '[cyan]{domain}[/cyan]' format is invalid.\n"
|
|
66
|
+
"💡 Please provide a valid domain name like 'example.com'"
|
|
67
|
+
)
|
|
68
|
+
return 1
|
|
69
|
+
|
|
70
|
+
# 验证项目环境
|
|
71
|
+
is_valid, project_package, error_msg = validate_project_environment()
|
|
72
|
+
if not is_valid:
|
|
73
|
+
show_error_panel("Not a Crawlo Project", error_msg)
|
|
74
|
+
return 1
|
|
75
|
+
|
|
76
|
+
project_root = get_project_root()
|
|
77
|
+
|
|
78
|
+
# 确定 items 模块的路径
|
|
79
|
+
items_module_path = f"{project_package}.items"
|
|
80
|
+
|
|
81
|
+
# 尝试导入 items 模块
|
|
82
|
+
default_item_class = "ExampleItem" # 默认回退
|
|
83
|
+
try:
|
|
84
|
+
items_module = importlib.import_module(items_module_path)
|
|
85
|
+
# 获取模块中所有大写开头的类
|
|
86
|
+
item_classes = [
|
|
87
|
+
cls for cls in items_module.__dict__.values()
|
|
88
|
+
if isinstance(cls, type) and cls.__name__[0].isupper() # 首字母大写
|
|
89
|
+
]
|
|
90
|
+
|
|
91
|
+
if item_classes:
|
|
92
|
+
default_item_class = item_classes[0].__name__
|
|
93
|
+
else:
|
|
94
|
+
console.print("[yellow]:warning: Warning:[/yellow] No item class found in [cyan]items.py[/cyan], using [green]ExampleItem[/green].")
|
|
95
|
+
|
|
96
|
+
except ImportError as e:
|
|
97
|
+
console.print(f"[yellow]:warning: Warning:[/yellow] Failed to import [cyan]{items_module_path}[/cyan]: {e}")
|
|
98
|
+
# 仍使用默认 ExampleItem,不中断流程
|
|
99
|
+
|
|
100
|
+
# 创建爬虫文件
|
|
101
|
+
spiders_dir = project_root / project_package / 'spiders'
|
|
102
|
+
spiders_dir.mkdir(parents=True, exist_ok=True)
|
|
103
|
+
|
|
104
|
+
spider_file = spiders_dir / f'{spider_name}.py'
|
|
105
|
+
if spider_file.exists():
|
|
106
|
+
show_error_panel(
|
|
107
|
+
"Spider Already Exists",
|
|
108
|
+
f"Spider '[cyan]{spider_name}[/cyan]' already exists at\n[green]{spider_file}[/green]"
|
|
109
|
+
)
|
|
110
|
+
return 1
|
|
111
|
+
|
|
112
|
+
# 模板路径
|
|
113
|
+
tmpl_path = TEMPLATES_DIR / 'spider' / 'spider.py.tmpl'
|
|
114
|
+
if not tmpl_path.exists():
|
|
115
|
+
show_error_panel(
|
|
116
|
+
"Template Not Found",
|
|
117
|
+
f"Template file not found at [cyan]{tmpl_path}[/cyan]"
|
|
118
|
+
)
|
|
119
|
+
return 1
|
|
120
|
+
|
|
121
|
+
# 生成类名
|
|
122
|
+
class_name = f"{spider_name.replace('_', '').capitalize()}Spider"
|
|
123
|
+
|
|
124
|
+
context = {
|
|
125
|
+
'spider_name': spider_name,
|
|
126
|
+
'domain': domain,
|
|
127
|
+
'project_name': project_package,
|
|
128
|
+
'item_class': default_item_class,
|
|
129
|
+
'class_name': class_name
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
try:
|
|
133
|
+
content = _render_template(tmpl_path, context)
|
|
134
|
+
with open(spider_file, 'w', encoding='utf-8') as f:
|
|
135
|
+
f.write(content)
|
|
136
|
+
|
|
137
|
+
console.print(f":white_check_mark: [green]Spider '[bold]{spider_name}[/bold]' created successfully![/green]")
|
|
138
|
+
console.print(f" → Location: [cyan]{spider_file}[/cyan]")
|
|
139
|
+
console.print(f" → Class: [yellow]{class_name}[/yellow]")
|
|
140
|
+
console.print(f" → Domain: [blue]{domain}[/blue]")
|
|
141
|
+
console.print("\n[bold]Next steps:[/bold]")
|
|
142
|
+
console.print(f" [blue]crawlo run[/blue] {spider_name}")
|
|
143
|
+
console.print(f" [blue]crawlo check[/blue] {spider_name}")
|
|
144
|
+
|
|
145
|
+
return 0
|
|
146
|
+
|
|
147
|
+
except Exception as e:
|
|
148
|
+
show_error_panel(
|
|
149
|
+
"Creation Failed",
|
|
150
|
+
f"Failed to create spider: {e}"
|
|
151
|
+
)
|
|
152
|
+
return 1
|
crawlo/commands/list.py
CHANGED
|
@@ -1,119 +1,156 @@
|
|
|
1
|
-
#!/usr/bin/python
|
|
2
|
-
# -*- coding: UTF-8 -*-
|
|
3
|
-
"""
|
|
4
|
-
# @Time : 2025-08-31 22:33
|
|
5
|
-
# @Author : crawl-coder
|
|
6
|
-
# @Desc : 命令行入口:crawlo list,用于列出所有已注册的爬虫
|
|
7
|
-
"""
|
|
8
|
-
|
|
9
|
-
import
|
|
10
|
-
import
|
|
11
|
-
|
|
12
|
-
from
|
|
13
|
-
|
|
14
|
-
from
|
|
15
|
-
from
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding: UTF-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
# @Time : 2025-08-31 22:33
|
|
5
|
+
# @Author : crawl-coder
|
|
6
|
+
# @Desc : 命令行入口:crawlo list,用于列出所有已注册的爬虫
|
|
7
|
+
"""
|
|
8
|
+
import sys
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
from importlib import import_module
|
|
11
|
+
|
|
12
|
+
from rich.console import Console
|
|
13
|
+
from rich.table import Table
|
|
14
|
+
from rich.panel import Panel
|
|
15
|
+
from rich.text import Text
|
|
16
|
+
from rich import box
|
|
17
|
+
|
|
18
|
+
from crawlo.crawler import CrawlerProcess
|
|
19
|
+
from crawlo.utils.log import get_logger
|
|
20
|
+
from .utils import validate_project_environment, show_error_panel
|
|
21
|
+
|
|
22
|
+
logger = get_logger(__name__)
|
|
23
|
+
console = Console()
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def main(args):
|
|
27
|
+
"""
|
|
28
|
+
主函数:列出所有可用爬虫
|
|
29
|
+
用法: crawlo list [--json]
|
|
30
|
+
"""
|
|
31
|
+
show_json = "--json" in args
|
|
32
|
+
|
|
33
|
+
# 过滤掉参数后检查是否有额外参数
|
|
34
|
+
filtered_args = [arg for arg in args if not arg.startswith('--')]
|
|
35
|
+
if filtered_args:
|
|
36
|
+
if show_json:
|
|
37
|
+
console.print_json(data={"success": False, "error": "Usage: crawlo list [--json]"})
|
|
38
|
+
else:
|
|
39
|
+
console.print("[bold red]❌ Error:[/bold red] Usage: [blue]crawlo list[/blue] [--json]")
|
|
40
|
+
return 1
|
|
41
|
+
|
|
42
|
+
try:
|
|
43
|
+
# 验证项目环境
|
|
44
|
+
is_valid, project_package, error_msg = validate_project_environment()
|
|
45
|
+
if not is_valid:
|
|
46
|
+
if show_json:
|
|
47
|
+
console.print_json(data={"success": False, "error": error_msg})
|
|
48
|
+
else:
|
|
49
|
+
show_error_panel("Not a Crawlo Project", error_msg)
|
|
50
|
+
return 1
|
|
51
|
+
|
|
52
|
+
# 初始化 CrawlerProcess 并加载爬虫模块
|
|
53
|
+
spider_modules = [f"{project_package}.spiders"]
|
|
54
|
+
process = CrawlerProcess(spider_modules=spider_modules)
|
|
55
|
+
|
|
56
|
+
# 获取所有爬虫名称
|
|
57
|
+
spider_names = process.get_spider_names()
|
|
58
|
+
if not spider_names:
|
|
59
|
+
if show_json:
|
|
60
|
+
console.print_json(data={
|
|
61
|
+
"success": True,
|
|
62
|
+
"spiders": [],
|
|
63
|
+
"message": "No spiders found in project"
|
|
64
|
+
})
|
|
65
|
+
else:
|
|
66
|
+
console.print(Panel(
|
|
67
|
+
Text.from_markup(
|
|
68
|
+
":envelope_with_arrow: [bold]No spiders found[/bold] in '[cyan]spiders/[/cyan]' directory.\n\n"
|
|
69
|
+
"[bold]💡 Make sure:[/bold]\n"
|
|
70
|
+
" • Spider classes inherit from [blue]`crawlo.spider.Spider`[/blue]\n"
|
|
71
|
+
" • Each spider has a [green]`name`[/green] attribute\n"
|
|
72
|
+
" • Spiders are imported in [cyan]`spiders/__init__.py`[/cyan] (if using package)"
|
|
73
|
+
),
|
|
74
|
+
title="📭 No Spiders Found",
|
|
75
|
+
border_style="yellow",
|
|
76
|
+
padding=(1, 2)
|
|
77
|
+
))
|
|
78
|
+
return 0
|
|
79
|
+
|
|
80
|
+
# 准备爬虫信息
|
|
81
|
+
spider_info = []
|
|
82
|
+
for name in sorted(spider_names):
|
|
83
|
+
spider_cls = process.get_spider_class(name)
|
|
84
|
+
module_name = spider_cls.__module__.replace(f"{project_package}.", "")
|
|
85
|
+
|
|
86
|
+
# 获取额外信息
|
|
87
|
+
start_urls_count = len(getattr(spider_cls, 'start_urls', []))
|
|
88
|
+
allowed_domains = getattr(spider_cls, 'allowed_domains', [])
|
|
89
|
+
custom_settings = getattr(spider_cls, 'custom_settings', {})
|
|
90
|
+
|
|
91
|
+
spider_info.append({
|
|
92
|
+
"name": name,
|
|
93
|
+
"class": spider_cls.__name__,
|
|
94
|
+
"module": module_name,
|
|
95
|
+
"start_urls_count": start_urls_count,
|
|
96
|
+
"allowed_domains": allowed_domains,
|
|
97
|
+
"has_custom_settings": bool(custom_settings)
|
|
98
|
+
})
|
|
99
|
+
|
|
100
|
+
# JSON 输出
|
|
101
|
+
if show_json:
|
|
102
|
+
console.print_json(data={
|
|
103
|
+
"success": True,
|
|
104
|
+
"count": len(spider_info),
|
|
105
|
+
"spiders": spider_info
|
|
106
|
+
})
|
|
107
|
+
return 0
|
|
108
|
+
|
|
109
|
+
# 表格输出
|
|
110
|
+
table = Table(
|
|
111
|
+
title=f"📋 Found {len(spider_names)} spider(s)",
|
|
112
|
+
box=box.ROUNDED,
|
|
113
|
+
show_header=True,
|
|
114
|
+
header_style="bold magenta",
|
|
115
|
+
title_style="bold green"
|
|
116
|
+
)
|
|
117
|
+
table.add_column("Name", style="cyan", no_wrap=True)
|
|
118
|
+
table.add_column("Class", style="green")
|
|
119
|
+
table.add_column("Module", style="dim")
|
|
120
|
+
table.add_column("URLs", style="blue", justify="center")
|
|
121
|
+
table.add_column("Domains", style="yellow")
|
|
122
|
+
table.add_column("Custom Settings", style="magenta", justify="center")
|
|
123
|
+
|
|
124
|
+
for info in spider_info:
|
|
125
|
+
domains_display = ", ".join(info["allowed_domains"][:2]) # 显示前2个域名
|
|
126
|
+
if len(info["allowed_domains"]) > 2:
|
|
127
|
+
domains_display += f" (+{len(info['allowed_domains'])-2})"
|
|
128
|
+
elif not domains_display:
|
|
129
|
+
domains_display = "-"
|
|
130
|
+
|
|
131
|
+
table.add_row(
|
|
132
|
+
info["name"],
|
|
133
|
+
info["class"],
|
|
134
|
+
info["module"],
|
|
135
|
+
str(info["start_urls_count"]),
|
|
136
|
+
domains_display,
|
|
137
|
+
"✓" if info["has_custom_settings"] else "-"
|
|
138
|
+
)
|
|
139
|
+
|
|
140
|
+
console.print(table)
|
|
141
|
+
|
|
142
|
+
# 显示使用提示
|
|
143
|
+
console.print("\n[bold]🚀 Next steps:[/bold]")
|
|
144
|
+
console.print(" [blue]crawlo run[/blue] <spider_name> # Run a specific spider")
|
|
145
|
+
console.print(" [blue]crawlo run[/blue] all # Run all spiders")
|
|
146
|
+
console.print(" [blue]crawlo check[/blue] <spider_name> # Check spider validity")
|
|
147
|
+
|
|
148
|
+
return 0
|
|
149
|
+
|
|
150
|
+
except Exception as e:
|
|
151
|
+
if show_json:
|
|
152
|
+
console.print_json(data={"success": False, "error": str(e)})
|
|
153
|
+
else:
|
|
154
|
+
console.print(f"[bold red]❌ Unexpected error:[/bold red] {e}")
|
|
155
|
+
logger.exception("Exception during 'crawlo list'")
|
|
156
|
+
return 1
|