crawlo 1.0.9__py3-none-any.whl → 1.1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +33 -24
- crawlo/__version__.py +1 -1
- crawlo/cli.py +40 -40
- crawlo/commands/__init__.py +13 -13
- crawlo/commands/check.py +594 -106
- crawlo/commands/genspider.py +125 -110
- crawlo/commands/list.py +147 -92
- crawlo/commands/run.py +286 -181
- crawlo/commands/startproject.py +111 -101
- crawlo/commands/stats.py +188 -59
- crawlo/core/__init__.py +2 -2
- crawlo/core/engine.py +158 -158
- crawlo/core/processor.py +40 -40
- crawlo/core/scheduler.py +57 -57
- crawlo/crawler.py +494 -492
- crawlo/downloader/__init__.py +78 -78
- crawlo/downloader/aiohttp_downloader.py +199 -199
- crawlo/downloader/cffi_downloader.py +242 -277
- crawlo/downloader/httpx_downloader.py +246 -246
- crawlo/event.py +11 -11
- crawlo/exceptions.py +78 -78
- crawlo/extension/__init__.py +31 -31
- crawlo/extension/log_interval.py +49 -49
- crawlo/extension/log_stats.py +44 -44
- crawlo/extension/logging_extension.py +34 -34
- crawlo/filters/__init__.py +37 -37
- crawlo/filters/aioredis_filter.py +150 -150
- crawlo/filters/memory_filter.py +202 -202
- crawlo/items/__init__.py +23 -23
- crawlo/items/base.py +21 -21
- crawlo/items/fields.py +53 -53
- crawlo/items/items.py +104 -104
- crawlo/middleware/__init__.py +21 -21
- crawlo/middleware/default_header.py +32 -32
- crawlo/middleware/download_delay.py +28 -28
- crawlo/middleware/middleware_manager.py +135 -135
- crawlo/middleware/proxy.py +245 -245
- crawlo/middleware/request_ignore.py +30 -30
- crawlo/middleware/response_code.py +18 -18
- crawlo/middleware/response_filter.py +26 -26
- crawlo/middleware/retry.py +90 -90
- crawlo/network/__init__.py +7 -7
- crawlo/network/request.py +203 -203
- crawlo/network/response.py +166 -166
- crawlo/pipelines/__init__.py +13 -13
- crawlo/pipelines/console_pipeline.py +39 -39
- crawlo/pipelines/mongo_pipeline.py +116 -116
- crawlo/pipelines/mysql_batch_pipline.py +272 -272
- crawlo/pipelines/mysql_pipeline.py +195 -195
- crawlo/pipelines/pipeline_manager.py +56 -56
- crawlo/project.py +153 -0
- crawlo/settings/__init__.py +7 -7
- crawlo/settings/default_settings.py +166 -168
- crawlo/settings/setting_manager.py +99 -99
- crawlo/spider/__init__.py +129 -129
- crawlo/stats_collector.py +59 -59
- crawlo/subscriber.py +106 -106
- crawlo/task_manager.py +27 -27
- crawlo/templates/crawlo.cfg.tmpl +10 -10
- crawlo/templates/project/__init__.py.tmpl +3 -3
- crawlo/templates/project/items.py.tmpl +17 -17
- crawlo/templates/project/middlewares.py.tmpl +75 -75
- crawlo/templates/project/pipelines.py.tmpl +63 -63
- crawlo/templates/project/settings.py.tmpl +54 -54
- crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
- crawlo/templates/spider/spider.py.tmpl +31 -31
- crawlo/utils/__init__.py +7 -7
- crawlo/utils/date_tools.py +233 -233
- crawlo/utils/db_helper.py +343 -343
- crawlo/utils/func_tools.py +82 -82
- crawlo/utils/log.py +128 -128
- crawlo/utils/pqueue.py +173 -173
- crawlo/utils/request.py +267 -267
- crawlo/utils/spider_loader.py +62 -62
- crawlo/utils/system.py +11 -11
- crawlo/utils/tools.py +4 -4
- crawlo/utils/url.py +39 -39
- crawlo-1.1.1.dist-info/METADATA +220 -0
- crawlo-1.1.1.dist-info/RECORD +100 -0
- examples/__init__.py +7 -0
- examples/baidu_spider/__init__.py +7 -0
- examples/baidu_spider/demo.py +94 -0
- examples/baidu_spider/items.py +46 -0
- examples/baidu_spider/middleware.py +49 -0
- examples/baidu_spider/pipeline.py +55 -0
- examples/baidu_spider/run.py +27 -0
- examples/baidu_spider/settings.py +121 -0
- examples/baidu_spider/spiders/__init__.py +7 -0
- examples/baidu_spider/spiders/bai_du.py +61 -0
- examples/baidu_spider/spiders/miit.py +159 -0
- examples/baidu_spider/spiders/sina.py +79 -0
- tests/__init__.py +7 -7
- tests/test_proxy_health_check.py +32 -32
- tests/test_proxy_middleware_integration.py +136 -136
- tests/test_proxy_providers.py +56 -56
- tests/test_proxy_stats.py +19 -19
- tests/test_proxy_strategies.py +59 -59
- crawlo/utils/concurrency_manager.py +0 -125
- crawlo/utils/project.py +0 -197
- crawlo-1.0.9.dist-info/METADATA +0 -49
- crawlo-1.0.9.dist-info/RECORD +0 -97
- examples/gxb/__init__.py +0 -0
- examples/gxb/items.py +0 -36
- examples/gxb/run.py +0 -16
- examples/gxb/settings.py +0 -72
- examples/gxb/spider/__init__.py +0 -0
- examples/gxb/spider/miit_spider.py +0 -180
- examples/gxb/spider/telecom_device.py +0 -129
- {crawlo-1.0.9.dist-info → crawlo-1.1.1.dist-info}/WHEEL +0 -0
- {crawlo-1.0.9.dist-info → crawlo-1.1.1.dist-info}/entry_points.txt +0 -0
- {crawlo-1.0.9.dist-info → crawlo-1.1.1.dist-info}/top_level.txt +0 -0
crawlo/commands/genspider.py
CHANGED
|
@@ -1,111 +1,126 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
#
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding: UTF-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
# @Time : 2025-08-31 22:36
|
|
5
|
+
# @Author : crawl-coder
|
|
6
|
+
# @Desc : 命令行入口:crawlo genspider baidu,创建爬虫。
|
|
7
|
+
"""
|
|
8
|
+
import sys
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
import configparser
|
|
11
|
+
import importlib
|
|
12
|
+
from rich.console import Console
|
|
13
|
+
|
|
14
|
+
# 初始化 rich 控制台
|
|
15
|
+
console = Console()
|
|
16
|
+
|
|
17
|
+
TEMPLATES_DIR = Path(__file__).parent.parent / 'templates'
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def _render_template(tmpl_path, context):
|
|
21
|
+
"""读取模板文件,替换 {{key}} 为 context 中的值"""
|
|
22
|
+
with open(tmpl_path, 'r', encoding='utf-8') as f:
|
|
23
|
+
content = f.read()
|
|
24
|
+
for key, value in context.items():
|
|
25
|
+
content = content.replace(f'{{{{{key}}}}}', str(value))
|
|
26
|
+
return content
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def main(args):
|
|
30
|
+
if len(args) < 2:
|
|
31
|
+
console.print("[bold red]Error:[/bold red] Usage: [blue]crawlo genspider[/blue] <spider_name> <domain>")
|
|
32
|
+
return 1
|
|
33
|
+
|
|
34
|
+
spider_name = args[0]
|
|
35
|
+
domain = args[1]
|
|
36
|
+
|
|
37
|
+
# 查找项目根目录
|
|
38
|
+
project_root = None
|
|
39
|
+
current = Path.cwd()
|
|
40
|
+
while True:
|
|
41
|
+
cfg_file = current / 'crawlo.cfg'
|
|
42
|
+
if cfg_file.exists():
|
|
43
|
+
project_root = current
|
|
44
|
+
break
|
|
45
|
+
parent = current.parent
|
|
46
|
+
if parent == current:
|
|
47
|
+
break
|
|
48
|
+
current = parent
|
|
49
|
+
|
|
50
|
+
if not project_root:
|
|
51
|
+
console.print("[bold red]:cross_mark: Error:[/bold red] Not a crawlo project. [cyan]crawlo.cfg[/cyan] not found.")
|
|
52
|
+
return 1
|
|
53
|
+
|
|
54
|
+
# 将项目根目录加入 sys.path
|
|
55
|
+
if str(project_root) not in sys.path:
|
|
56
|
+
sys.path.insert(0, str(project_root))
|
|
57
|
+
|
|
58
|
+
# 从 crawlo.cfg 读取 settings 模块,获取项目包名
|
|
59
|
+
config = configparser.ConfigParser()
|
|
60
|
+
try:
|
|
61
|
+
config.read(cfg_file, encoding='utf-8')
|
|
62
|
+
settings_module = config.get('settings', 'default')
|
|
63
|
+
project_package = settings_module.split('.')[0] # e.g., myproject.settings -> myproject
|
|
64
|
+
except Exception as e:
|
|
65
|
+
console.print(f"[bold red]:cross_mark: Error reading crawlo.cfg:[/bold red] {e}")
|
|
66
|
+
return 1
|
|
67
|
+
|
|
68
|
+
# 确定 items 模块的路径
|
|
69
|
+
items_module_path = f"{project_package}.items"
|
|
70
|
+
|
|
71
|
+
# 尝试导入 items 模块
|
|
72
|
+
default_item_class = "ExampleItem" # 默认回退
|
|
73
|
+
try:
|
|
74
|
+
items_module = importlib.import_module(items_module_path)
|
|
75
|
+
# 获取模块中所有大写开头的类
|
|
76
|
+
item_classes = [
|
|
77
|
+
cls for cls in items_module.__dict__.values()
|
|
78
|
+
if isinstance(cls, type) and cls.__name__[0].isupper() # 首字母大写
|
|
79
|
+
]
|
|
80
|
+
|
|
81
|
+
if item_classes:
|
|
82
|
+
default_item_class = item_classes[0].__name__
|
|
83
|
+
else:
|
|
84
|
+
console.print("[yellow]:warning: Warning:[/yellow] No item class found in [cyan]items.py[/cyan], using [green]ExampleItem[/green].")
|
|
85
|
+
|
|
86
|
+
except ImportError as e:
|
|
87
|
+
console.print(f"[yellow]:warning: Warning:[/yellow] Failed to import [cyan]{items_module_path}[/cyan]: {e}")
|
|
88
|
+
# 仍使用默认 ExampleItem,不中断流程
|
|
89
|
+
|
|
90
|
+
# 创建爬虫文件
|
|
91
|
+
spiders_dir = project_root / project_package / 'spiders'
|
|
92
|
+
spiders_dir.mkdir(parents=True, exist_ok=True)
|
|
93
|
+
|
|
94
|
+
spider_file = spiders_dir / f'{spider_name}.py'
|
|
95
|
+
if spider_file.exists():
|
|
96
|
+
console.print(f"[bold red]:cross_mark: Error:[/bold red] Spider '[cyan]{spider_name}[/cyan]' already exists at [green]{spider_file}[/green]")
|
|
97
|
+
return 1
|
|
98
|
+
|
|
99
|
+
# 模板路径
|
|
100
|
+
tmpl_path = TEMPLATES_DIR / 'spider' / 'spider.py.tmpl'
|
|
101
|
+
if not tmpl_path.exists():
|
|
102
|
+
console.print(f"[bold red]:cross_mark: Error:[/bold red] Template file not found at [cyan]{tmpl_path}[/cyan]")
|
|
103
|
+
return 1
|
|
104
|
+
|
|
105
|
+
# 生成类名
|
|
106
|
+
class_name = f"{spider_name.capitalize()}Spider"
|
|
107
|
+
|
|
108
|
+
context = {
|
|
109
|
+
'spider_name': spider_name,
|
|
110
|
+
'domain': domain,
|
|
111
|
+
'project_name': project_package,
|
|
112
|
+
'item_class': default_item_class,
|
|
113
|
+
'class_name': class_name
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
content = _render_template(tmpl_path, context)
|
|
117
|
+
|
|
118
|
+
with open(spider_file, 'w', encoding='utf-8') as f:
|
|
119
|
+
f.write(content)
|
|
120
|
+
|
|
121
|
+
console.print(f":white_check_mark: [green]Spider '[bold]{spider_name}[/bold]' created successfully![/green]")
|
|
122
|
+
console.print(f" → Location: [cyan]{spider_file}[/cyan]")
|
|
123
|
+
console.print("\n[bold]Next step:[/bold]")
|
|
124
|
+
console.print(f" [blue]crawlo run[/blue] {spider_name}")
|
|
125
|
+
|
|
111
126
|
return 0
|
crawlo/commands/list.py
CHANGED
|
@@ -1,92 +1,147 @@
|
|
|
1
|
-
#!/usr/bin/python
|
|
2
|
-
# -*- coding:UTF-8 -*-
|
|
3
|
-
"""
|
|
4
|
-
# @Time :
|
|
5
|
-
# @Author :
|
|
6
|
-
# @Desc :
|
|
7
|
-
"""
|
|
8
|
-
import sys
|
|
9
|
-
import configparser
|
|
10
|
-
|
|
11
|
-
from
|
|
12
|
-
|
|
13
|
-
from
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding: UTF-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
# @Time : 2025-08-31 22:33
|
|
5
|
+
# @Author : crawl-coder
|
|
6
|
+
# @Desc : 命令行入口:crawlo list,用于列出所有已注册的爬虫
|
|
7
|
+
"""
|
|
8
|
+
import sys
|
|
9
|
+
import configparser
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
from importlib import import_module
|
|
12
|
+
|
|
13
|
+
from rich.console import Console
|
|
14
|
+
from rich.table import Table
|
|
15
|
+
from rich.panel import Panel
|
|
16
|
+
from rich.text import Text
|
|
17
|
+
from rich import box
|
|
18
|
+
|
|
19
|
+
from crawlo.crawler import CrawlerProcess
|
|
20
|
+
from crawlo.utils.log import get_logger
|
|
21
|
+
|
|
22
|
+
logger = get_logger(__name__)
|
|
23
|
+
console = Console()
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def get_project_root():
|
|
27
|
+
"""
|
|
28
|
+
自动检测项目根目录:从当前目录向上查找 crawlo.cfg
|
|
29
|
+
找到后返回该目录路径(字符串),最多向上查找10层。
|
|
30
|
+
"""
|
|
31
|
+
current = Path.cwd()
|
|
32
|
+
for _ in range(10):
|
|
33
|
+
cfg = current / "crawlo.cfg"
|
|
34
|
+
if cfg.exists():
|
|
35
|
+
return str(current)
|
|
36
|
+
if current == current.parent:
|
|
37
|
+
break
|
|
38
|
+
current = current.parent
|
|
39
|
+
return None # 未找到
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def main(args):
|
|
43
|
+
"""
|
|
44
|
+
主函数:列出所有可用爬虫
|
|
45
|
+
用法: crawlo list
|
|
46
|
+
"""
|
|
47
|
+
if args:
|
|
48
|
+
console.print("[bold red]❌ Error:[/bold red] Usage: [blue]crawlo list[/blue]")
|
|
49
|
+
return 1
|
|
50
|
+
|
|
51
|
+
try:
|
|
52
|
+
# 1. 查找项目根目录
|
|
53
|
+
project_root = get_project_root()
|
|
54
|
+
if not project_root:
|
|
55
|
+
console.print(Panel(
|
|
56
|
+
Text.from_markup(
|
|
57
|
+
":cross_mark: [bold red]Cannot find 'crawlo.cfg'[/bold red]\n"
|
|
58
|
+
"💡 Run this command inside your project directory.\n"
|
|
59
|
+
"🚀 Or create a new project with:\n"
|
|
60
|
+
" [blue]crawlo startproject myproject[/blue]"
|
|
61
|
+
),
|
|
62
|
+
title="❌ Not in a Crawlo Project",
|
|
63
|
+
border_style="red",
|
|
64
|
+
padding=(1, 2)
|
|
65
|
+
))
|
|
66
|
+
return 1
|
|
67
|
+
|
|
68
|
+
project_root_path = Path(project_root)
|
|
69
|
+
project_root_str = str(project_root_path)
|
|
70
|
+
|
|
71
|
+
# 2. 将项目根加入 Python 路径
|
|
72
|
+
if project_root_str not in sys.path:
|
|
73
|
+
sys.path.insert(0, project_root_str)
|
|
74
|
+
|
|
75
|
+
# 3. 读取 crawlo.cfg 获取 settings 模块
|
|
76
|
+
cfg_file = project_root_path / "crawlo.cfg"
|
|
77
|
+
config = configparser.ConfigParser()
|
|
78
|
+
config.read(cfg_file, encoding="utf-8")
|
|
79
|
+
|
|
80
|
+
if not config.has_section("settings") or not config.has_option("settings", "default"):
|
|
81
|
+
console.print(Panel(
|
|
82
|
+
":cross_mark: [bold red]Invalid crawlo.cfg[/bold red]\n"
|
|
83
|
+
"Missing [settings] section or 'default' option.",
|
|
84
|
+
title="❌ Config Error",
|
|
85
|
+
border_style="red"
|
|
86
|
+
))
|
|
87
|
+
return 1
|
|
88
|
+
|
|
89
|
+
settings_module = config.get("settings", "default")
|
|
90
|
+
project_package = settings_module.split(".")[0]
|
|
91
|
+
|
|
92
|
+
# 4. 确保项目包可导入
|
|
93
|
+
try:
|
|
94
|
+
import_module(project_package)
|
|
95
|
+
except ImportError as e:
|
|
96
|
+
console.print(Panel(
|
|
97
|
+
f":cross_mark: Failed to import project package '[cyan]{project_package}[/cyan]':\n{e}",
|
|
98
|
+
title="❌ Import Error",
|
|
99
|
+
border_style="red"
|
|
100
|
+
))
|
|
101
|
+
return 1
|
|
102
|
+
|
|
103
|
+
# 5. 初始化 CrawlerProcess 并加载爬虫模块
|
|
104
|
+
spider_modules = [f"{project_package}.spiders"]
|
|
105
|
+
process = CrawlerProcess(spider_modules=spider_modules)
|
|
106
|
+
|
|
107
|
+
# 6. 获取所有爬虫名称
|
|
108
|
+
spider_names = process.get_spider_names()
|
|
109
|
+
if not spider_names:
|
|
110
|
+
console.print(Panel(
|
|
111
|
+
Text.from_markup(
|
|
112
|
+
":envelope_with_arrow: [bold]No spiders found[/bold] in '[cyan]spiders/[/cyan]' directory.\n\n"
|
|
113
|
+
"[bold]💡 Make sure:[/bold]\n"
|
|
114
|
+
" • Spider classes inherit from [blue]`crawlo.spider.Spider`[/blue]\n"
|
|
115
|
+
" • Each spider has a [green]`name`[/green] attribute\n"
|
|
116
|
+
" • Spiders are imported in [cyan]`spiders/__init__.py`[/cyan] (if using package)"
|
|
117
|
+
),
|
|
118
|
+
title="📭 No Spiders Found",
|
|
119
|
+
border_style="yellow",
|
|
120
|
+
padding=(1, 2)
|
|
121
|
+
))
|
|
122
|
+
return 1
|
|
123
|
+
|
|
124
|
+
# 7. 输出爬虫列表 —— 使用表格
|
|
125
|
+
table = Table(
|
|
126
|
+
title=f"📋 Found {len(spider_names)} spider(s)",
|
|
127
|
+
box=box.ROUNDED,
|
|
128
|
+
show_header=True,
|
|
129
|
+
header_style="bold magenta",
|
|
130
|
+
title_style="bold green"
|
|
131
|
+
)
|
|
132
|
+
table.add_column("Name", style="cyan", no_wrap=True)
|
|
133
|
+
table.add_column("Class", style="green")
|
|
134
|
+
table.add_column("Module", style="dim")
|
|
135
|
+
|
|
136
|
+
for name in sorted(spider_names):
|
|
137
|
+
spider_cls = process.get_spider_class(name)
|
|
138
|
+
module_name = spider_cls.__module__.replace(f"{project_package}.", "")
|
|
139
|
+
table.add_row(name, spider_cls.__name__, module_name)
|
|
140
|
+
|
|
141
|
+
console.print(table)
|
|
142
|
+
return 0
|
|
143
|
+
|
|
144
|
+
except Exception as e:
|
|
145
|
+
console.print(f"[bold red]❌ Unexpected error:[/bold red] {e}")
|
|
146
|
+
logger.exception("Exception during 'crawlo list'")
|
|
147
|
+
return 1
|