crawlo 1.1.0__py3-none-any.whl → 1.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (111) hide show
  1. crawlo/__init__.py +33 -24
  2. crawlo/__version__.py +1 -1
  3. crawlo/cli.py +40 -40
  4. crawlo/commands/__init__.py +13 -13
  5. crawlo/commands/check.py +594 -155
  6. crawlo/commands/genspider.py +125 -110
  7. crawlo/commands/list.py +147 -119
  8. crawlo/commands/run.py +285 -170
  9. crawlo/commands/startproject.py +111 -101
  10. crawlo/commands/stats.py +188 -167
  11. crawlo/core/__init__.py +2 -2
  12. crawlo/core/engine.py +158 -158
  13. crawlo/core/processor.py +40 -40
  14. crawlo/core/scheduler.py +57 -57
  15. crawlo/crawler.py +494 -492
  16. crawlo/downloader/__init__.py +78 -78
  17. crawlo/downloader/aiohttp_downloader.py +199 -199
  18. crawlo/downloader/cffi_downloader.py +242 -277
  19. crawlo/downloader/httpx_downloader.py +246 -246
  20. crawlo/event.py +11 -11
  21. crawlo/exceptions.py +78 -78
  22. crawlo/extension/__init__.py +31 -31
  23. crawlo/extension/log_interval.py +49 -49
  24. crawlo/extension/log_stats.py +44 -44
  25. crawlo/extension/logging_extension.py +34 -34
  26. crawlo/filters/__init__.py +37 -37
  27. crawlo/filters/aioredis_filter.py +150 -150
  28. crawlo/filters/memory_filter.py +202 -202
  29. crawlo/items/__init__.py +23 -23
  30. crawlo/items/base.py +21 -21
  31. crawlo/items/fields.py +53 -53
  32. crawlo/items/items.py +104 -104
  33. crawlo/middleware/__init__.py +21 -21
  34. crawlo/middleware/default_header.py +32 -32
  35. crawlo/middleware/download_delay.py +28 -28
  36. crawlo/middleware/middleware_manager.py +135 -135
  37. crawlo/middleware/proxy.py +245 -245
  38. crawlo/middleware/request_ignore.py +30 -30
  39. crawlo/middleware/response_code.py +18 -18
  40. crawlo/middleware/response_filter.py +26 -26
  41. crawlo/middleware/retry.py +90 -90
  42. crawlo/network/__init__.py +7 -7
  43. crawlo/network/request.py +203 -203
  44. crawlo/network/response.py +166 -166
  45. crawlo/pipelines/__init__.py +13 -13
  46. crawlo/pipelines/console_pipeline.py +39 -39
  47. crawlo/pipelines/mongo_pipeline.py +116 -116
  48. crawlo/pipelines/mysql_batch_pipline.py +272 -272
  49. crawlo/pipelines/mysql_pipeline.py +195 -195
  50. crawlo/pipelines/pipeline_manager.py +56 -56
  51. crawlo/project.py +153 -0
  52. crawlo/settings/__init__.py +7 -7
  53. crawlo/settings/default_settings.py +166 -168
  54. crawlo/settings/setting_manager.py +99 -99
  55. crawlo/spider/__init__.py +129 -129
  56. crawlo/stats_collector.py +59 -59
  57. crawlo/subscriber.py +106 -106
  58. crawlo/task_manager.py +27 -27
  59. crawlo/templates/crawlo.cfg.tmpl +10 -10
  60. crawlo/templates/project/__init__.py.tmpl +3 -3
  61. crawlo/templates/project/items.py.tmpl +17 -17
  62. crawlo/templates/project/middlewares.py.tmpl +75 -75
  63. crawlo/templates/project/pipelines.py.tmpl +63 -63
  64. crawlo/templates/project/settings.py.tmpl +54 -54
  65. crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
  66. crawlo/templates/spider/spider.py.tmpl +31 -31
  67. crawlo/utils/__init__.py +7 -7
  68. crawlo/utils/date_tools.py +233 -233
  69. crawlo/utils/db_helper.py +343 -343
  70. crawlo/utils/func_tools.py +82 -82
  71. crawlo/utils/log.py +128 -128
  72. crawlo/utils/pqueue.py +173 -173
  73. crawlo/utils/request.py +267 -267
  74. crawlo/utils/spider_loader.py +62 -62
  75. crawlo/utils/system.py +11 -11
  76. crawlo/utils/tools.py +4 -4
  77. crawlo/utils/url.py +39 -39
  78. crawlo-1.1.1.dist-info/METADATA +220 -0
  79. crawlo-1.1.1.dist-info/RECORD +100 -0
  80. examples/__init__.py +7 -0
  81. examples/baidu_spider/__init__.py +7 -0
  82. examples/baidu_spider/demo.py +94 -0
  83. examples/baidu_spider/items.py +46 -0
  84. examples/baidu_spider/middleware.py +49 -0
  85. examples/baidu_spider/pipeline.py +55 -0
  86. examples/baidu_spider/run.py +27 -0
  87. examples/baidu_spider/settings.py +121 -0
  88. examples/baidu_spider/spiders/__init__.py +7 -0
  89. examples/baidu_spider/spiders/bai_du.py +61 -0
  90. examples/baidu_spider/spiders/miit.py +159 -0
  91. examples/baidu_spider/spiders/sina.py +79 -0
  92. tests/__init__.py +7 -7
  93. tests/test_proxy_health_check.py +32 -32
  94. tests/test_proxy_middleware_integration.py +136 -136
  95. tests/test_proxy_providers.py +56 -56
  96. tests/test_proxy_stats.py +19 -19
  97. tests/test_proxy_strategies.py +59 -59
  98. crawlo/utils/concurrency_manager.py +0 -125
  99. crawlo/utils/project.py +0 -197
  100. crawlo-1.1.0.dist-info/METADATA +0 -49
  101. crawlo-1.1.0.dist-info/RECORD +0 -97
  102. examples/gxb/__init__.py +0 -0
  103. examples/gxb/items.py +0 -36
  104. examples/gxb/run.py +0 -16
  105. examples/gxb/settings.py +0 -72
  106. examples/gxb/spider/__init__.py +0 -2
  107. examples/gxb/spider/miit_spider.py +0 -180
  108. examples/gxb/spider/telecom_device.py +0 -129
  109. {crawlo-1.1.0.dist-info → crawlo-1.1.1.dist-info}/WHEEL +0 -0
  110. {crawlo-1.1.0.dist-info → crawlo-1.1.1.dist-info}/entry_points.txt +0 -0
  111. {crawlo-1.1.0.dist-info → crawlo-1.1.1.dist-info}/top_level.txt +0 -0
@@ -1,111 +1,126 @@
1
- import os
2
- import sys
3
- from pathlib import Path
4
- import configparser
5
- import importlib
6
-
7
- TEMPLATES_DIR = Path(__file__).parent.parent / 'templates'
8
-
9
-
10
- def _render_template(tmpl_path, context):
11
- """读取模板文件,替换 {{key}} 为 context 中的值"""
12
- with open(tmpl_path, 'r', encoding='utf-8') as f:
13
- content = f.read()
14
- for key, value in context.items():
15
- content = content.replace(f'{{{{{key}}}}}', str(value))
16
- return content
17
-
18
-
19
- def main(args):
20
- if len(args) < 2:
21
- print("Usage: crawlo genspider <spider_name> <domain>")
22
- return 1
23
-
24
- spider_name = args[0]
25
- domain = args[1]
26
-
27
- # 查找项目根目录
28
- project_root = None
29
- current = Path.cwd()
30
- while True:
31
- cfg_file = current / 'crawlo.cfg'
32
- if cfg_file.exists():
33
- project_root = current
34
- break
35
- parent = current.parent
36
- if parent == current:
37
- break
38
- current = parent
39
-
40
- if not project_root:
41
- print("Error: Not a crawlo project. crawlo.cfg not found.")
42
- return 1
43
-
44
- # 将项目根目录加入 sys.path
45
- if str(project_root) not in sys.path:
46
- sys.path.insert(0, str(project_root))
47
-
48
- # crawlo.cfg 读取 settings 模块,获取项目包名
49
- config = configparser.ConfigParser()
50
- try:
51
- config.read(cfg_file, encoding='utf-8')
52
- settings_module = config.get('settings', 'default')
53
- project_package = settings_module.split('.')[0] # e.g., myproject.settings -> myproject
54
- except Exception as e:
55
- print(f"Error reading crawlo.cfg: {e}")
56
- return 1
57
-
58
- # 确定 items 模块的路径
59
- items_module_path = f"{project_package}.items"
60
-
61
- # 尝试导入 items 模块
62
- try:
63
- items_module = importlib.import_module(items_module_path)
64
- # 获取模块中所有大写开头的类
65
- item_classes = [cls for cls in items_module.__dict__.values()
66
- if isinstance(cls, type) and cls.__name__.isupper()]
67
-
68
- # 如果找到了类,使用第一个作为默认
69
- if item_classes:
70
- default_item_class = item_classes[0].__name__
71
- else:
72
- default_item_class = "ExampleItem" # 回退到示例
73
- except ImportError as e:
74
- print(f"Error importing items module '{items_module_path}': {e}")
75
- default_item_class = "ExampleItem"
76
-
77
- # 创建爬虫文件
78
- spiders_dir = project_root / project_package / 'spiders'
79
- if not spiders_dir.exists():
80
- spiders_dir.mkdir(parents=True)
81
-
82
- spider_file = spiders_dir / f'{spider_name}.py'
83
- if spider_file.exists():
84
- print(f"Error: Spider '{spider_name}' already exists.")
85
- return 1
86
-
87
- # 修正模板路径
88
- tmpl_path = TEMPLATES_DIR / 'spider' / 'spider.py.tmpl'
89
-
90
- if not tmpl_path.exists():
91
- print(f"Error: Template file not found at {tmpl_path}")
92
- return 1
93
-
94
- # 生成正确的类名
95
- class_name = f"{spider_name.capitalize()}Spider"
96
-
97
- context = {
98
- 'spider_name': spider_name,
99
- 'domain': domain,
100
- 'project_name': project_package,
101
- 'item_class': default_item_class,
102
- 'class_name': class_name # 添加处理好的类名
103
- }
104
-
105
- content = _render_template(tmpl_path, context)
106
-
107
- with open(spider_file, 'w', encoding='utf-8') as f:
108
- f.write(content)
109
-
110
- print(f"Spider '{spider_name}' created in {spider_file}")
1
+ #!/usr/bin/python
2
+ # -*- coding: UTF-8 -*-
3
+ """
4
+ # @Time : 2025-08-31 22:36
5
+ # @Author : crawl-coder
6
+ # @Desc : 命令行入口:crawlo genspider baidu,创建爬虫。
7
+ """
8
+ import sys
9
+ from pathlib import Path
10
+ import configparser
11
+ import importlib
12
+ from rich.console import Console
13
+
14
+ # 初始化 rich 控制台
15
+ console = Console()
16
+
17
+ TEMPLATES_DIR = Path(__file__).parent.parent / 'templates'
18
+
19
+
20
+ def _render_template(tmpl_path, context):
21
+ """读取模板文件,替换 {{key}} context 中的值"""
22
+ with open(tmpl_path, 'r', encoding='utf-8') as f:
23
+ content = f.read()
24
+ for key, value in context.items():
25
+ content = content.replace(f'{{{{{key}}}}}', str(value))
26
+ return content
27
+
28
+
29
+ def main(args):
30
+ if len(args) < 2:
31
+ console.print("[bold red]Error:[/bold red] Usage: [blue]crawlo genspider[/blue] <spider_name> <domain>")
32
+ return 1
33
+
34
+ spider_name = args[0]
35
+ domain = args[1]
36
+
37
+ # 查找项目根目录
38
+ project_root = None
39
+ current = Path.cwd()
40
+ while True:
41
+ cfg_file = current / 'crawlo.cfg'
42
+ if cfg_file.exists():
43
+ project_root = current
44
+ break
45
+ parent = current.parent
46
+ if parent == current:
47
+ break
48
+ current = parent
49
+
50
+ if not project_root:
51
+ console.print("[bold red]:cross_mark: Error:[/bold red] Not a crawlo project. [cyan]crawlo.cfg[/cyan] not found.")
52
+ return 1
53
+
54
+ # 将项目根目录加入 sys.path
55
+ if str(project_root) not in sys.path:
56
+ sys.path.insert(0, str(project_root))
57
+
58
+ # crawlo.cfg 读取 settings 模块,获取项目包名
59
+ config = configparser.ConfigParser()
60
+ try:
61
+ config.read(cfg_file, encoding='utf-8')
62
+ settings_module = config.get('settings', 'default')
63
+ project_package = settings_module.split('.')[0] # e.g., myproject.settings -> myproject
64
+ except Exception as e:
65
+ console.print(f"[bold red]:cross_mark: Error reading crawlo.cfg:[/bold red] {e}")
66
+ return 1
67
+
68
+ # 确定 items 模块的路径
69
+ items_module_path = f"{project_package}.items"
70
+
71
+ # 尝试导入 items 模块
72
+ default_item_class = "ExampleItem" # 默认回退
73
+ try:
74
+ items_module = importlib.import_module(items_module_path)
75
+ # 获取模块中所有大写开头的类
76
+ item_classes = [
77
+ cls for cls in items_module.__dict__.values()
78
+ if isinstance(cls, type) and cls.__name__[0].isupper() # 首字母大写
79
+ ]
80
+
81
+ if item_classes:
82
+ default_item_class = item_classes[0].__name__
83
+ else:
84
+ console.print("[yellow]:warning: Warning:[/yellow] No item class found in [cyan]items.py[/cyan], using [green]ExampleItem[/green].")
85
+
86
+ except ImportError as e:
87
+ console.print(f"[yellow]:warning: Warning:[/yellow] Failed to import [cyan]{items_module_path}[/cyan]: {e}")
88
+ # 仍使用默认 ExampleItem,不中断流程
89
+
90
+ # 创建爬虫文件
91
+ spiders_dir = project_root / project_package / 'spiders'
92
+ spiders_dir.mkdir(parents=True, exist_ok=True)
93
+
94
+ spider_file = spiders_dir / f'{spider_name}.py'
95
+ if spider_file.exists():
96
+ console.print(f"[bold red]:cross_mark: Error:[/bold red] Spider '[cyan]{spider_name}[/cyan]' already exists at [green]{spider_file}[/green]")
97
+ return 1
98
+
99
+ # 模板路径
100
+ tmpl_path = TEMPLATES_DIR / 'spider' / 'spider.py.tmpl'
101
+ if not tmpl_path.exists():
102
+ console.print(f"[bold red]:cross_mark: Error:[/bold red] Template file not found at [cyan]{tmpl_path}[/cyan]")
103
+ return 1
104
+
105
+ # 生成类名
106
+ class_name = f"{spider_name.capitalize()}Spider"
107
+
108
+ context = {
109
+ 'spider_name': spider_name,
110
+ 'domain': domain,
111
+ 'project_name': project_package,
112
+ 'item_class': default_item_class,
113
+ 'class_name': class_name
114
+ }
115
+
116
+ content = _render_template(tmpl_path, context)
117
+
118
+ with open(spider_file, 'w', encoding='utf-8') as f:
119
+ f.write(content)
120
+
121
+ console.print(f":white_check_mark: [green]Spider '[bold]{spider_name}[/bold]' created successfully![/green]")
122
+ console.print(f" → Location: [cyan]{spider_file}[/cyan]")
123
+ console.print("\n[bold]Next step:[/bold]")
124
+ console.print(f" [blue]crawlo run[/blue] {spider_name}")
125
+
111
126
  return 0
crawlo/commands/list.py CHANGED
@@ -1,119 +1,147 @@
1
- #!/usr/bin/python
2
- # -*- coding: UTF-8 -*-
3
- """
4
- # @Time : 2025-08-31 22:33
5
- # @Author : crawl-coder
6
- # @Desc : 命令行入口:crawlo list,用于列出所有已注册的爬虫
7
- """
8
-
9
- import sys
10
- import configparser
11
- from pathlib import Path
12
- from importlib import import_module
13
-
14
- from crawlo.crawler import CrawlerProcess
15
- from crawlo.utils.log import get_logger
16
-
17
-
18
- logger = get_logger(__name__)
19
-
20
-
21
- def get_project_root():
22
- """
23
- 自动检测项目根目录:从当前目录向上查找 crawlo.cfg
24
- 找到后返回该目录路径(字符串),最多向上查找10层。
25
- """
26
- current = Path.cwd()
27
-
28
- for _ in range(10):
29
- cfg = current / "crawlo.cfg"
30
- if cfg.exists():
31
- return str(current)
32
-
33
- # 到达文件系统根目录
34
- if current == current.parent:
35
- break
36
- current = current.parent
37
-
38
- return None # 未找到
39
-
40
-
41
- def main(args):
42
- """
43
- 主函数:列出所有可用爬虫
44
- 用法: crawlo list
45
- """
46
- if args:
47
- print("❌ Usage: crawlo list")
48
- return 1
49
-
50
- try:
51
- # 1. 查找项目根目录
52
- project_root = get_project_root()
53
- if not project_root:
54
- print("❌ Error: Cannot find 'crawlo.cfg'. Are you in a crawlo project?")
55
- print("💡 Tip: Run this command inside your project directory, or create a project with 'crawlo startproject'.")
56
- return 1
57
-
58
- project_root_path = Path(project_root)
59
- project_root_str = str(project_root_path)
60
-
61
- # 2. 将项目根加入 Python 路径,以便导入项目模块
62
- if project_root_str not in sys.path:
63
- sys.path.insert(0, project_root_str)
64
-
65
- # 3. 读取 crawlo.cfg 获取 settings 模块
66
- cfg_file = project_root_path / "crawlo.cfg"
67
- config = configparser.ConfigParser()
68
- config.read(cfg_file, encoding="utf-8")
69
-
70
- if not config.has_section("settings") or not config.has_option("settings", "default"):
71
- print("❌ Error: Invalid crawlo.cfg missing [settings] or 'default' option.")
72
- return 1
73
-
74
- settings_module = config.get("settings", "default")
75
- project_package = settings_module.split(".")[0]
76
-
77
- # 4. 确保项目包可导入(可选:尝试导入以触发异常)
78
- try:
79
- import_module(project_package)
80
- except ImportError as e:
81
- print(f"❌ Failed to import project package '{project_package}': {e}")
82
- return 1
83
-
84
- # 5. 初始化 CrawlerProcess 并加载爬虫模块
85
- spider_modules = [f"{project_package}.spiders"]
86
- process = CrawlerProcess(spider_modules=spider_modules)
87
-
88
- # 6. 获取所有爬虫名称
89
- spider_names = process.get_spider_names()
90
- if not spider_names:
91
- print("📭 No spiders found in 'spiders/' directory.")
92
- print("💡 Make sure:")
93
- print(" • Spider classes inherit from `crawlo.spider.Spider`")
94
- print(" • Each spider has a `name` attribute")
95
- print(" • Spiders are imported in `spiders/__init__.py` (if using package)")
96
- return 1
97
-
98
- # 7. 输出爬虫列表
99
- print(f"📋 Found {len(spider_names)} spider(s):")
100
- print("-" * 60)
101
- for name in sorted(spider_names):
102
- spider_cls = process.get_spider_class(name)
103
- module_name = spider_cls.__module__.replace(f"{project_package}.", "")
104
- print(f"🕷️ {name:<20} {spider_cls.__name__:<25} ({module_name})")
105
- print("-" * 60)
106
- return 0
107
-
108
- except Exception as e:
109
- print(f"❌ Unexpected error: {e}")
110
- logger.exception("Exception during 'crawlo list'")
111
- return 1
112
-
113
-
114
- if __name__ == "__main__":
115
- """
116
- 支持直接运行:
117
- python -m crawlo.commands.list
118
- """
119
- sys.exit(main(sys.argv[1:]))
1
+ #!/usr/bin/python
2
+ # -*- coding: UTF-8 -*-
3
+ """
4
+ # @Time : 2025-08-31 22:33
5
+ # @Author : crawl-coder
6
+ # @Desc : 命令行入口:crawlo list,用于列出所有已注册的爬虫
7
+ """
8
+ import sys
9
+ import configparser
10
+ from pathlib import Path
11
+ from importlib import import_module
12
+
13
+ from rich.console import Console
14
+ from rich.table import Table
15
+ from rich.panel import Panel
16
+ from rich.text import Text
17
+ from rich import box
18
+
19
+ from crawlo.crawler import CrawlerProcess
20
+ from crawlo.utils.log import get_logger
21
+
22
+ logger = get_logger(__name__)
23
+ console = Console()
24
+
25
+
26
+ def get_project_root():
27
+ """
28
+ 自动检测项目根目录:从当前目录向上查找 crawlo.cfg
29
+ 找到后返回该目录路径(字符串),最多向上查找10层。
30
+ """
31
+ current = Path.cwd()
32
+ for _ in range(10):
33
+ cfg = current / "crawlo.cfg"
34
+ if cfg.exists():
35
+ return str(current)
36
+ if current == current.parent:
37
+ break
38
+ current = current.parent
39
+ return None # 未找到
40
+
41
+
42
+ def main(args):
43
+ """
44
+ 主函数:列出所有可用爬虫
45
+ 用法: crawlo list
46
+ """
47
+ if args:
48
+ console.print("[bold red]❌ Error:[/bold red] Usage: [blue]crawlo list[/blue]")
49
+ return 1
50
+
51
+ try:
52
+ # 1. 查找项目根目录
53
+ project_root = get_project_root()
54
+ if not project_root:
55
+ console.print(Panel(
56
+ Text.from_markup(
57
+ ":cross_mark: [bold red]Cannot find 'crawlo.cfg'[/bold red]\n"
58
+ "💡 Run this command inside your project directory.\n"
59
+ "🚀 Or create a new project with:\n"
60
+ " [blue]crawlo startproject myproject[/blue]"
61
+ ),
62
+ title="❌ Not in a Crawlo Project",
63
+ border_style="red",
64
+ padding=(1, 2)
65
+ ))
66
+ return 1
67
+
68
+ project_root_path = Path(project_root)
69
+ project_root_str = str(project_root_path)
70
+
71
+ # 2. 将项目根加入 Python 路径
72
+ if project_root_str not in sys.path:
73
+ sys.path.insert(0, project_root_str)
74
+
75
+ # 3. 读取 crawlo.cfg 获取 settings 模块
76
+ cfg_file = project_root_path / "crawlo.cfg"
77
+ config = configparser.ConfigParser()
78
+ config.read(cfg_file, encoding="utf-8")
79
+
80
+ if not config.has_section("settings") or not config.has_option("settings", "default"):
81
+ console.print(Panel(
82
+ ":cross_mark: [bold red]Invalid crawlo.cfg[/bold red]\n"
83
+ "Missing [settings] section or 'default' option.",
84
+ title="❌ Config Error",
85
+ border_style="red"
86
+ ))
87
+ return 1
88
+
89
+ settings_module = config.get("settings", "default")
90
+ project_package = settings_module.split(".")[0]
91
+
92
+ # 4. 确保项目包可导入
93
+ try:
94
+ import_module(project_package)
95
+ except ImportError as e:
96
+ console.print(Panel(
97
+ f":cross_mark: Failed to import project package '[cyan]{project_package}[/cyan]':\n{e}",
98
+ title="❌ Import Error",
99
+ border_style="red"
100
+ ))
101
+ return 1
102
+
103
+ # 5. 初始化 CrawlerProcess 并加载爬虫模块
104
+ spider_modules = [f"{project_package}.spiders"]
105
+ process = CrawlerProcess(spider_modules=spider_modules)
106
+
107
+ # 6. 获取所有爬虫名称
108
+ spider_names = process.get_spider_names()
109
+ if not spider_names:
110
+ console.print(Panel(
111
+ Text.from_markup(
112
+ ":envelope_with_arrow: [bold]No spiders found[/bold] in '[cyan]spiders/[/cyan]' directory.\n\n"
113
+ "[bold]💡 Make sure:[/bold]\n"
114
+ " • Spider classes inherit from [blue]`crawlo.spider.Spider`[/blue]\n"
115
+ " • Each spider has a [green]`name`[/green] attribute\n"
116
+ " • Spiders are imported in [cyan]`spiders/__init__.py`[/cyan] (if using package)"
117
+ ),
118
+ title="📭 No Spiders Found",
119
+ border_style="yellow",
120
+ padding=(1, 2)
121
+ ))
122
+ return 1
123
+
124
+ # 7. 输出爬虫列表 —— 使用表格
125
+ table = Table(
126
+ title=f"📋 Found {len(spider_names)} spider(s)",
127
+ box=box.ROUNDED,
128
+ show_header=True,
129
+ header_style="bold magenta",
130
+ title_style="bold green"
131
+ )
132
+ table.add_column("Name", style="cyan", no_wrap=True)
133
+ table.add_column("Class", style="green")
134
+ table.add_column("Module", style="dim")
135
+
136
+ for name in sorted(spider_names):
137
+ spider_cls = process.get_spider_class(name)
138
+ module_name = spider_cls.__module__.replace(f"{project_package}.", "")
139
+ table.add_row(name, spider_cls.__name__, module_name)
140
+
141
+ console.print(table)
142
+ return 0
143
+
144
+ except Exception as e:
145
+ console.print(f"[bold red]❌ Unexpected error:[/bold red] {e}")
146
+ logger.exception("Exception during 'crawlo list'")
147
+ return 1