crawlo 1.1.4__py3-none-any.whl → 1.1.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (186) hide show
  1. crawlo/__init__.py +61 -34
  2. crawlo/__version__.py +1 -1
  3. crawlo/cleaners/__init__.py +61 -0
  4. crawlo/cleaners/data_formatter.py +226 -0
  5. crawlo/cleaners/encoding_converter.py +126 -0
  6. crawlo/cleaners/text_cleaner.py +233 -0
  7. crawlo/cli.py +40 -40
  8. crawlo/commands/__init__.py +13 -13
  9. crawlo/commands/check.py +594 -594
  10. crawlo/commands/genspider.py +151 -151
  11. crawlo/commands/list.py +155 -155
  12. crawlo/commands/run.py +285 -285
  13. crawlo/commands/startproject.py +300 -196
  14. crawlo/commands/stats.py +188 -188
  15. crawlo/commands/utils.py +186 -186
  16. crawlo/config.py +309 -279
  17. crawlo/config_validator.py +253 -0
  18. crawlo/core/__init__.py +2 -2
  19. crawlo/core/engine.py +346 -172
  20. crawlo/core/processor.py +40 -40
  21. crawlo/core/scheduler.py +137 -166
  22. crawlo/crawler.py +1027 -1027
  23. crawlo/downloader/__init__.py +266 -242
  24. crawlo/downloader/aiohttp_downloader.py +220 -212
  25. crawlo/downloader/cffi_downloader.py +256 -251
  26. crawlo/downloader/httpx_downloader.py +259 -259
  27. crawlo/downloader/hybrid_downloader.py +214 -0
  28. crawlo/downloader/playwright_downloader.py +403 -0
  29. crawlo/downloader/selenium_downloader.py +473 -0
  30. crawlo/event.py +11 -11
  31. crawlo/exceptions.py +81 -81
  32. crawlo/extension/__init__.py +37 -37
  33. crawlo/extension/health_check.py +141 -141
  34. crawlo/extension/log_interval.py +57 -57
  35. crawlo/extension/log_stats.py +81 -81
  36. crawlo/extension/logging_extension.py +43 -43
  37. crawlo/extension/memory_monitor.py +104 -88
  38. crawlo/extension/performance_profiler.py +133 -117
  39. crawlo/extension/request_recorder.py +107 -107
  40. crawlo/filters/__init__.py +154 -154
  41. crawlo/filters/aioredis_filter.py +280 -242
  42. crawlo/filters/memory_filter.py +269 -269
  43. crawlo/items/__init__.py +23 -23
  44. crawlo/items/base.py +21 -21
  45. crawlo/items/fields.py +53 -53
  46. crawlo/items/items.py +104 -104
  47. crawlo/middleware/__init__.py +21 -21
  48. crawlo/middleware/default_header.py +32 -32
  49. crawlo/middleware/download_delay.py +28 -28
  50. crawlo/middleware/middleware_manager.py +135 -135
  51. crawlo/middleware/proxy.py +272 -248
  52. crawlo/middleware/request_ignore.py +30 -30
  53. crawlo/middleware/response_code.py +18 -18
  54. crawlo/middleware/response_filter.py +26 -26
  55. crawlo/middleware/retry.py +124 -124
  56. crawlo/mode_manager.py +206 -201
  57. crawlo/network/__init__.py +21 -21
  58. crawlo/network/request.py +338 -311
  59. crawlo/network/response.py +360 -271
  60. crawlo/pipelines/__init__.py +21 -21
  61. crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
  62. crawlo/pipelines/console_pipeline.py +39 -39
  63. crawlo/pipelines/csv_pipeline.py +316 -316
  64. crawlo/pipelines/database_dedup_pipeline.py +224 -224
  65. crawlo/pipelines/json_pipeline.py +218 -218
  66. crawlo/pipelines/memory_dedup_pipeline.py +115 -115
  67. crawlo/pipelines/mongo_pipeline.py +131 -131
  68. crawlo/pipelines/mysql_pipeline.py +316 -316
  69. crawlo/pipelines/pipeline_manager.py +56 -56
  70. crawlo/pipelines/redis_dedup_pipeline.py +166 -162
  71. crawlo/project.py +153 -153
  72. crawlo/queue/pqueue.py +37 -37
  73. crawlo/queue/queue_manager.py +320 -307
  74. crawlo/queue/redis_priority_queue.py +277 -209
  75. crawlo/settings/__init__.py +7 -7
  76. crawlo/settings/default_settings.py +216 -278
  77. crawlo/settings/setting_manager.py +99 -99
  78. crawlo/spider/__init__.py +639 -639
  79. crawlo/stats_collector.py +59 -59
  80. crawlo/subscriber.py +130 -130
  81. crawlo/task_manager.py +30 -30
  82. crawlo/templates/crawlo.cfg.tmpl +10 -10
  83. crawlo/templates/project/__init__.py.tmpl +3 -3
  84. crawlo/templates/project/items.py.tmpl +17 -17
  85. crawlo/templates/project/middlewares.py.tmpl +110 -110
  86. crawlo/templates/project/pipelines.py.tmpl +97 -97
  87. crawlo/templates/project/run.py.tmpl +251 -251
  88. crawlo/templates/project/settings.py.tmpl +326 -279
  89. crawlo/templates/project/settings_distributed.py.tmpl +120 -0
  90. crawlo/templates/project/settings_gentle.py.tmpl +95 -0
  91. crawlo/templates/project/settings_high_performance.py.tmpl +152 -0
  92. crawlo/templates/project/settings_simple.py.tmpl +69 -0
  93. crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
  94. crawlo/templates/spider/spider.py.tmpl +141 -141
  95. crawlo/tools/__init__.py +183 -0
  96. crawlo/tools/anti_crawler.py +269 -0
  97. crawlo/tools/authenticated_proxy.py +241 -0
  98. crawlo/tools/data_validator.py +181 -0
  99. crawlo/tools/date_tools.py +36 -0
  100. crawlo/tools/distributed_coordinator.py +387 -0
  101. crawlo/tools/retry_mechanism.py +221 -0
  102. crawlo/tools/scenario_adapter.py +263 -0
  103. crawlo/utils/__init__.py +35 -7
  104. crawlo/utils/batch_processor.py +261 -0
  105. crawlo/utils/controlled_spider_mixin.py +439 -439
  106. crawlo/utils/date_tools.py +290 -233
  107. crawlo/utils/db_helper.py +343 -343
  108. crawlo/utils/enhanced_error_handler.py +360 -0
  109. crawlo/utils/env_config.py +106 -0
  110. crawlo/utils/error_handler.py +126 -0
  111. crawlo/utils/func_tools.py +82 -82
  112. crawlo/utils/large_scale_config.py +286 -286
  113. crawlo/utils/large_scale_helper.py +343 -343
  114. crawlo/utils/log.py +128 -128
  115. crawlo/utils/performance_monitor.py +285 -0
  116. crawlo/utils/queue_helper.py +175 -175
  117. crawlo/utils/redis_connection_pool.py +335 -0
  118. crawlo/utils/redis_key_validator.py +200 -0
  119. crawlo/utils/request.py +267 -267
  120. crawlo/utils/request_serializer.py +219 -219
  121. crawlo/utils/spider_loader.py +62 -62
  122. crawlo/utils/system.py +11 -11
  123. crawlo/utils/tools.py +4 -4
  124. crawlo/utils/url.py +39 -39
  125. {crawlo-1.1.4.dist-info → crawlo-1.1.5.dist-info}/METADATA +401 -403
  126. crawlo-1.1.5.dist-info/RECORD +185 -0
  127. examples/__init__.py +7 -7
  128. tests/__init__.py +7 -7
  129. tests/advanced_tools_example.py +276 -0
  130. tests/authenticated_proxy_example.py +237 -0
  131. tests/cleaners_example.py +161 -0
  132. tests/config_validation_demo.py +103 -0
  133. {examples → tests}/controlled_spider_example.py +205 -205
  134. tests/date_tools_example.py +181 -0
  135. tests/dynamic_loading_example.py +524 -0
  136. tests/dynamic_loading_test.py +105 -0
  137. tests/env_config_example.py +134 -0
  138. tests/error_handling_example.py +172 -0
  139. tests/redis_key_validation_demo.py +131 -0
  140. tests/response_improvements_example.py +145 -0
  141. tests/test_advanced_tools.py +149 -0
  142. tests/test_all_redis_key_configs.py +146 -0
  143. tests/test_authenticated_proxy.py +142 -0
  144. tests/test_cleaners.py +55 -0
  145. tests/test_comprehensive.py +147 -0
  146. tests/test_config_validator.py +194 -0
  147. tests/test_date_tools.py +124 -0
  148. tests/test_dynamic_downloaders_proxy.py +125 -0
  149. tests/test_dynamic_proxy.py +93 -0
  150. tests/test_dynamic_proxy_config.py +147 -0
  151. tests/test_dynamic_proxy_real.py +110 -0
  152. tests/test_edge_cases.py +304 -0
  153. tests/test_enhanced_error_handler.py +271 -0
  154. tests/test_env_config.py +122 -0
  155. tests/test_error_handler_compatibility.py +113 -0
  156. tests/test_final_validation.py +153 -153
  157. tests/test_framework_env_usage.py +104 -0
  158. tests/test_integration.py +357 -0
  159. tests/test_item_dedup_redis_key.py +123 -0
  160. tests/test_parsel.py +30 -0
  161. tests/test_performance.py +328 -0
  162. tests/test_proxy_health_check.py +32 -32
  163. tests/test_proxy_middleware_integration.py +136 -136
  164. tests/test_proxy_providers.py +56 -56
  165. tests/test_proxy_stats.py +19 -19
  166. tests/test_proxy_strategies.py +59 -59
  167. tests/test_queue_manager_redis_key.py +177 -0
  168. tests/test_redis_config.py +28 -28
  169. tests/test_redis_connection_pool.py +295 -0
  170. tests/test_redis_key_naming.py +182 -0
  171. tests/test_redis_key_validator.py +124 -0
  172. tests/test_redis_queue.py +224 -224
  173. tests/test_request_serialization.py +70 -70
  174. tests/test_response_improvements.py +153 -0
  175. tests/test_scheduler.py +241 -241
  176. tests/test_simple_response.py +62 -0
  177. tests/test_telecom_spider_redis_key.py +206 -0
  178. tests/test_template_content.py +88 -0
  179. tests/test_template_redis_key.py +135 -0
  180. tests/test_tools.py +154 -0
  181. tests/tools_example.py +258 -0
  182. crawlo/core/enhanced_engine.py +0 -190
  183. crawlo-1.1.4.dist-info/RECORD +0 -117
  184. {crawlo-1.1.4.dist-info → crawlo-1.1.5.dist-info}/WHEEL +0 -0
  185. {crawlo-1.1.4.dist-info → crawlo-1.1.5.dist-info}/entry_points.txt +0 -0
  186. {crawlo-1.1.4.dist-info → crawlo-1.1.5.dist-info}/top_level.txt +0 -0
crawlo/commands/stats.py CHANGED
@@ -1,188 +1,188 @@
1
- #!/usr/bin/python
2
- # -*- coding: UTF-8 -*-
3
- """
4
- # @Time : 2025-08-31 22:36
5
- # @Author : crawl-coder
6
- # @Desc : 命令行入口:crawlo stats,查看最近运行的爬虫统计信息。
7
- """
8
- import json
9
- from pathlib import Path
10
- from datetime import datetime
11
- from typing import Dict, Any
12
-
13
- from rich.console import Console
14
- from rich.table import Table
15
- from rich.panel import Panel
16
- from rich.text import Text
17
- from rich import box
18
-
19
- from crawlo.utils.log import get_logger
20
-
21
-
22
- logger = get_logger(__name__)
23
- console = Console()
24
-
25
- # 默认存储目录(相对于项目根目录)
26
- STATS_DIR = "logs/stats"
27
-
28
-
29
- def get_stats_dir() -> Path:
30
- """
31
- 获取统计文件存储目录,优先使用项目根下的 logs/stats/
32
- 如果不在项目中,回退到当前目录
33
- """
34
- current = Path.cwd()
35
- for _ in range(10):
36
- if (current / "crawlo.cfg").exists():
37
- return current / STATS_DIR
38
- if current == current.parent:
39
- break
40
- current = current.parent
41
- return Path.cwd() / STATS_DIR
42
-
43
-
44
- def record_stats(crawler):
45
- """
46
- 【供爬虫运行时调用】记录爬虫结束后的统计信息到 JSON 文件
47
- 需在 Crawler 的 closed 回调中调用
48
- """
49
- spider_name = getattr(crawler.spider, "name", "unknown")
50
- stats = crawler.stats.get_stats() if crawler.stats else {}
51
-
52
- timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
53
- stats_dir = Path(get_stats_dir())
54
- stats_dir.mkdir(parents=True, exist_ok=True)
55
-
56
- filename = stats_dir / f"{spider_name}_{timestamp}.json"
57
- try:
58
- with open(filename, "w", encoding="utf-8") as f:
59
- json.dump({
60
- "spider": spider_name,
61
- "timestamp": datetime.now().isoformat(),
62
- "stats": stats
63
- }, f, ensure_ascii=False, indent=2, default=str)
64
- logger.info(f"📊 Stats saved for spider '{spider_name}' → {filename}")
65
- except Exception as e:
66
- logger.error(f"Failed to save stats for '{spider_name}': {e}")
67
-
68
-
69
- def load_all_stats() -> Dict[str, list]:
70
- """
71
- 加载所有已保存的统计文件,按 spider name 分组
72
- 返回: {spider_name: [stats_record, ...]}
73
- """
74
- stats_dir = get_stats_dir()
75
- if not stats_dir.exists():
76
- return {}
77
-
78
- result = {}
79
- json_files = sorted(stats_dir.glob("*.json"), key=lambda x: x.stat().st_mtime, reverse=True)
80
-
81
- for file in json_files:
82
- try:
83
- with open(file, "r", encoding="utf-8") as f:
84
- data = json.load(f)
85
- spider_name = data.get("spider", "unknown")
86
- result.setdefault(spider_name, []).append(data)
87
- except Exception as e:
88
- logger.warning(f"Failed to load stats file {file}: {e}")
89
- return result
90
-
91
-
92
- def format_value(v: Any) -> str:
93
- """格式化值,防止太长或不可打印"""
94
- if isinstance(v, float):
95
- return f"{v:.4f}"
96
- s = str(v)
97
- if len(s) > 80:
98
- return s[:77] + "..."
99
- return s
100
-
101
-
102
- def display_stats_table(stats_data: dict, title: str = "Statistics"):
103
- """通用函数:用 rich.table 展示统计数据"""
104
- table = Table(title=title, box=box.ROUNDED, show_header=True, header_style="bold magenta")
105
- table.add_column("Key", style="cyan", no_wrap=True)
106
- table.add_column("Value", style="green")
107
-
108
- for k in sorted(stats_data.keys()):
109
- table.add_row(k, format_value(stats_data[k]))
110
-
111
- console.print(table)
112
-
113
-
114
- def main(args):
115
- """
116
- 主函数:查看统计信息
117
- 用法:
118
- crawlo stats → 显示所有爬虫最近一次运行
119
- crawlo stats myspider → 显示指定爬虫所有历史记录
120
- crawlo stats myspider --all → 显示所有历史(同上)
121
- """
122
- if len(args) > 2:
123
- console.print("[bold red]Error:[/bold red] Usage: [blue]crawlo stats[/blue] [spider_name] [--all]")
124
- return 1
125
-
126
- spider_name = None
127
- show_all = False
128
-
129
- if args:
130
- spider_name = args[0]
131
- show_all = "--all" in args or "-a" in args
132
-
133
- all_stats = load_all_stats()
134
-
135
- if not all_stats:
136
- console.print(Panel(
137
- Text.from_markup(
138
- ":chart_with_upwards_trend: [bold]No stats found.[/bold]\n"
139
- "💡 Run a spider first to generate statistics.\n"
140
- f"📁 Stats directory: [cyan]{get_stats_dir()}[/cyan]"
141
- ),
142
- title="📊 Statistics",
143
- border_style="yellow",
144
- padding=(1, 2)
145
- ))
146
- return 0
147
-
148
- # 显示所有爬虫最近一次运行
149
- if not spider_name:
150
- console.print(Panel(
151
- "[bold]Recent Spider Statistics (last run)[/bold]",
152
- title="📊 Spider Stats Overview",
153
- border_style="green",
154
- padding=(0, 1)
155
- ))
156
-
157
- for name, runs in all_stats.items():
158
- latest = runs[0]
159
- ts = latest['timestamp'][:19]
160
- console.print(f"🕷️ [bold cyan]{name}[/bold cyan] ([green]{ts}[/green])")
161
- display_stats_table(latest["stats"], title=f"Stats for {name}")
162
- console.print() # 空行分隔
163
-
164
- return 0
165
-
166
- # 显示指定爬虫的历史
167
- if spider_name not in all_stats:
168
- console.print(f"[bold red]:cross_mark: No stats found for spider '[cyan]{spider_name}[/cyan]'[/bold red]")
169
- available = ', '.join(all_stats.keys())
170
- if available:
171
- console.print(f":bulb: Available spiders: [green]{available}[/green]")
172
- return 1
173
-
174
- runs = all_stats[spider_name]
175
- if show_all:
176
- console.print(f":bar_chart: [bold]All runs for '[cyan]{spider_name}[/cyan]' ({len(runs)} runs):[/bold]")
177
- else:
178
- runs = runs[:1]
179
- console.print(f":bar_chart: [bold]Last run for '[cyan]{spider_name}[/cyan]':[/bold]")
180
-
181
- for i, run in enumerate(runs, 1):
182
- ts = run['timestamp']
183
- subtitle = f"Run #{i} · {ts}" if show_all else f"Last Run · {ts}"
184
- display_stats_table(run["stats"], title=f"Stats for {spider_name} — {subtitle}")
185
- if i < len(runs):
186
- console.print("─" * 60)
187
-
188
- return 0
1
+ #!/usr/bin/python
2
+ # -*- coding: UTF-8 -*-
3
+ """
4
+ # @Time : 2025-08-31 22:36
5
+ # @Author : crawl-coder
6
+ # @Desc : 命令行入口:crawlo stats,查看最近运行的爬虫统计信息。
7
+ """
8
+ import json
9
+ from pathlib import Path
10
+ from datetime import datetime
11
+ from typing import Dict, Any
12
+
13
+ from rich.console import Console
14
+ from rich.table import Table
15
+ from rich.panel import Panel
16
+ from rich.text import Text
17
+ from rich import box
18
+
19
+ from crawlo.utils.log import get_logger
20
+
21
+
22
+ logger = get_logger(__name__)
23
+ console = Console()
24
+
25
+ # 默认存储目录(相对于项目根目录)
26
+ STATS_DIR = "logs/stats"
27
+
28
+
29
+ def get_stats_dir() -> Path:
30
+ """
31
+ 获取统计文件存储目录,优先使用项目根下的 logs/stats/
32
+ 如果不在项目中,回退到当前目录
33
+ """
34
+ current = Path.cwd()
35
+ for _ in range(10):
36
+ if (current / "crawlo.cfg").exists():
37
+ return current / STATS_DIR
38
+ if current == current.parent:
39
+ break
40
+ current = current.parent
41
+ return Path.cwd() / STATS_DIR
42
+
43
+
44
+ def record_stats(crawler):
45
+ """
46
+ 【供爬虫运行时调用】记录爬虫结束后的统计信息到 JSON 文件
47
+ 需在 Crawler 的 closed 回调中调用
48
+ """
49
+ spider_name = getattr(crawler.spider, "name", "unknown")
50
+ stats = crawler.stats.get_stats() if crawler.stats else {}
51
+
52
+ timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
53
+ stats_dir = Path(get_stats_dir())
54
+ stats_dir.mkdir(parents=True, exist_ok=True)
55
+
56
+ filename = stats_dir / f"{spider_name}_{timestamp}.json"
57
+ try:
58
+ with open(filename, "w", encoding="utf-8") as f:
59
+ json.dump({
60
+ "spider": spider_name,
61
+ "timestamp": datetime.now().isoformat(),
62
+ "stats": stats
63
+ }, f, ensure_ascii=False, indent=2, default=str)
64
+ logger.info(f"📊 Stats saved for spider '{spider_name}' → {filename}")
65
+ except Exception as e:
66
+ logger.error(f"Failed to save stats for '{spider_name}': {e}")
67
+
68
+
69
+ def load_all_stats() -> Dict[str, list]:
70
+ """
71
+ 加载所有已保存的统计文件,按 spider name 分组
72
+ 返回: {spider_name: [stats_record, ...]}
73
+ """
74
+ stats_dir = get_stats_dir()
75
+ if not stats_dir.exists():
76
+ return {}
77
+
78
+ result = {}
79
+ json_files = sorted(stats_dir.glob("*.json"), key=lambda x: x.stat().st_mtime, reverse=True)
80
+
81
+ for file in json_files:
82
+ try:
83
+ with open(file, "r", encoding="utf-8") as f:
84
+ data = json.load(f)
85
+ spider_name = data.get("spider", "unknown")
86
+ result.setdefault(spider_name, []).append(data)
87
+ except Exception as e:
88
+ logger.warning(f"Failed to load stats file {file}: {e}")
89
+ return result
90
+
91
+
92
+ def format_value(v: Any) -> str:
93
+ """格式化值,防止太长或不可打印"""
94
+ if isinstance(v, float):
95
+ return f"{v:.4f}"
96
+ s = str(v)
97
+ if len(s) > 80:
98
+ return s[:77] + "..."
99
+ return s
100
+
101
+
102
+ def display_stats_table(stats_data: dict, title: str = "Statistics"):
103
+ """通用函数:用 rich.table 展示统计数据"""
104
+ table = Table(title=title, box=box.ROUNDED, show_header=True, header_style="bold magenta")
105
+ table.add_column("Key", style="cyan", no_wrap=True)
106
+ table.add_column("Value", style="green")
107
+
108
+ for k in sorted(stats_data.keys()):
109
+ table.add_row(k, format_value(stats_data[k]))
110
+
111
+ console.print(table)
112
+
113
+
114
+ def main(args):
115
+ """
116
+ 主函数:查看统计信息
117
+ 用法:
118
+ crawlo stats → 显示所有爬虫最近一次运行
119
+ crawlo stats myspider → 显示指定爬虫所有历史记录
120
+ crawlo stats myspider --all → 显示所有历史(同上)
121
+ """
122
+ if len(args) > 2:
123
+ console.print("[bold red]Error:[/bold red] Usage: [blue]crawlo stats[/blue] [spider_name] [--all]")
124
+ return 1
125
+
126
+ spider_name = None
127
+ show_all = False
128
+
129
+ if args:
130
+ spider_name = args[0]
131
+ show_all = "--all" in args or "-a" in args
132
+
133
+ all_stats = load_all_stats()
134
+
135
+ if not all_stats:
136
+ console.print(Panel(
137
+ Text.from_markup(
138
+ ":chart_with_upwards_trend: [bold]No stats found.[/bold]\n"
139
+ "💡 Run a spider first to generate statistics.\n"
140
+ f"📁 Stats directory: [cyan]{get_stats_dir()}[/cyan]"
141
+ ),
142
+ title="📊 Statistics",
143
+ border_style="yellow",
144
+ padding=(1, 2)
145
+ ))
146
+ return 0
147
+
148
+ # 显示所有爬虫最近一次运行
149
+ if not spider_name:
150
+ console.print(Panel(
151
+ "[bold]Recent Spider Statistics (last run)[/bold]",
152
+ title="📊 Spider Stats Overview",
153
+ border_style="green",
154
+ padding=(0, 1)
155
+ ))
156
+
157
+ for name, runs in all_stats.items():
158
+ latest = runs[0]
159
+ ts = latest['timestamp'][:19]
160
+ console.print(f"🕷️ [bold cyan]{name}[/bold cyan] ([green]{ts}[/green])")
161
+ display_stats_table(latest["stats"], title=f"Stats for {name}")
162
+ console.print() # 空行分隔
163
+
164
+ return 0
165
+
166
+ # 显示指定爬虫的历史
167
+ if spider_name not in all_stats:
168
+ console.print(f"[bold red]:cross_mark: No stats found for spider '[cyan]{spider_name}[/cyan]'[/bold red]")
169
+ available = ', '.join(all_stats.keys())
170
+ if available:
171
+ console.print(f":bulb: Available spiders: [green]{available}[/green]")
172
+ return 1
173
+
174
+ runs = all_stats[spider_name]
175
+ if show_all:
176
+ console.print(f":bar_chart: [bold]All runs for '[cyan]{spider_name}[/cyan]' ({len(runs)} runs):[/bold]")
177
+ else:
178
+ runs = runs[:1]
179
+ console.print(f":bar_chart: [bold]Last run for '[cyan]{spider_name}[/cyan]':[/bold]")
180
+
181
+ for i, run in enumerate(runs, 1):
182
+ ts = run['timestamp']
183
+ subtitle = f"Run #{i} · {ts}" if show_all else f"Last Run · {ts}"
184
+ display_stats_table(run["stats"], title=f"Stats for {spider_name} — {subtitle}")
185
+ if i < len(runs):
186
+ console.print("─" * 60)
187
+
188
+ return 0