crawlo 1.1.2__py3-none-any.whl → 1.1.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (113) hide show
  1. crawlo/__init__.py +34 -34
  2. crawlo/__version__.py +1 -1
  3. crawlo/cli.py +40 -40
  4. crawlo/commands/__init__.py +13 -13
  5. crawlo/commands/check.py +594 -594
  6. crawlo/commands/genspider.py +151 -151
  7. crawlo/commands/list.py +155 -155
  8. crawlo/commands/run.py +285 -285
  9. crawlo/commands/startproject.py +196 -196
  10. crawlo/commands/stats.py +188 -188
  11. crawlo/commands/utils.py +186 -186
  12. crawlo/config.py +279 -279
  13. crawlo/core/__init__.py +2 -2
  14. crawlo/core/engine.py +171 -171
  15. crawlo/core/enhanced_engine.py +189 -189
  16. crawlo/core/processor.py +40 -40
  17. crawlo/core/scheduler.py +166 -162
  18. crawlo/crawler.py +1027 -1027
  19. crawlo/downloader/__init__.py +242 -242
  20. crawlo/downloader/aiohttp_downloader.py +212 -212
  21. crawlo/downloader/cffi_downloader.py +251 -251
  22. crawlo/downloader/httpx_downloader.py +259 -257
  23. crawlo/event.py +11 -11
  24. crawlo/exceptions.py +82 -78
  25. crawlo/extension/__init__.py +31 -31
  26. crawlo/extension/log_interval.py +49 -49
  27. crawlo/extension/log_stats.py +44 -44
  28. crawlo/extension/logging_extension.py +34 -34
  29. crawlo/filters/__init__.py +154 -154
  30. crawlo/filters/aioredis_filter.py +242 -242
  31. crawlo/filters/memory_filter.py +269 -269
  32. crawlo/items/__init__.py +23 -23
  33. crawlo/items/base.py +21 -21
  34. crawlo/items/fields.py +53 -53
  35. crawlo/items/items.py +104 -104
  36. crawlo/middleware/__init__.py +21 -21
  37. crawlo/middleware/default_header.py +32 -32
  38. crawlo/middleware/download_delay.py +28 -28
  39. crawlo/middleware/middleware_manager.py +135 -135
  40. crawlo/middleware/proxy.py +248 -248
  41. crawlo/middleware/request_ignore.py +30 -30
  42. crawlo/middleware/response_code.py +18 -18
  43. crawlo/middleware/response_filter.py +26 -26
  44. crawlo/middleware/retry.py +125 -125
  45. crawlo/mode_manager.py +200 -200
  46. crawlo/network/__init__.py +21 -21
  47. crawlo/network/request.py +311 -311
  48. crawlo/network/response.py +271 -269
  49. crawlo/pipelines/__init__.py +22 -13
  50. crawlo/pipelines/bloom_dedup_pipeline.py +157 -0
  51. crawlo/pipelines/console_pipeline.py +39 -39
  52. crawlo/pipelines/csv_pipeline.py +316 -316
  53. crawlo/pipelines/database_dedup_pipeline.py +225 -0
  54. crawlo/pipelines/json_pipeline.py +218 -218
  55. crawlo/pipelines/memory_dedup_pipeline.py +116 -0
  56. crawlo/pipelines/mongo_pipeline.py +116 -116
  57. crawlo/pipelines/mysql_pipeline.py +195 -195
  58. crawlo/pipelines/pipeline_manager.py +56 -56
  59. crawlo/pipelines/redis_dedup_pipeline.py +163 -0
  60. crawlo/project.py +153 -153
  61. crawlo/queue/pqueue.py +37 -37
  62. crawlo/queue/queue_manager.py +307 -303
  63. crawlo/queue/redis_priority_queue.py +208 -191
  64. crawlo/settings/__init__.py +7 -7
  65. crawlo/settings/default_settings.py +245 -226
  66. crawlo/settings/setting_manager.py +99 -99
  67. crawlo/spider/__init__.py +639 -639
  68. crawlo/stats_collector.py +59 -59
  69. crawlo/subscriber.py +106 -106
  70. crawlo/task_manager.py +30 -30
  71. crawlo/templates/crawlo.cfg.tmpl +10 -10
  72. crawlo/templates/project/__init__.py.tmpl +3 -3
  73. crawlo/templates/project/items.py.tmpl +17 -17
  74. crawlo/templates/project/middlewares.py.tmpl +86 -86
  75. crawlo/templates/project/pipelines.py.tmpl +341 -335
  76. crawlo/templates/project/run.py.tmpl +251 -238
  77. crawlo/templates/project/settings.py.tmpl +250 -247
  78. crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
  79. crawlo/templates/spider/spider.py.tmpl +177 -177
  80. crawlo/utils/__init__.py +7 -7
  81. crawlo/utils/controlled_spider_mixin.py +439 -335
  82. crawlo/utils/date_tools.py +233 -233
  83. crawlo/utils/db_helper.py +343 -343
  84. crawlo/utils/func_tools.py +82 -82
  85. crawlo/utils/large_scale_config.py +286 -286
  86. crawlo/utils/large_scale_helper.py +343 -343
  87. crawlo/utils/log.py +128 -128
  88. crawlo/utils/queue_helper.py +175 -175
  89. crawlo/utils/request.py +267 -267
  90. crawlo/utils/request_serializer.py +219 -219
  91. crawlo/utils/spider_loader.py +62 -62
  92. crawlo/utils/system.py +11 -11
  93. crawlo/utils/tools.py +4 -4
  94. crawlo/utils/url.py +39 -39
  95. {crawlo-1.1.2.dist-info → crawlo-1.1.3.dist-info}/METADATA +635 -567
  96. crawlo-1.1.3.dist-info/RECORD +113 -0
  97. examples/__init__.py +7 -7
  98. examples/controlled_spider_example.py +205 -0
  99. tests/__init__.py +7 -7
  100. tests/test_final_validation.py +153 -153
  101. tests/test_proxy_health_check.py +32 -32
  102. tests/test_proxy_middleware_integration.py +136 -136
  103. tests/test_proxy_providers.py +56 -56
  104. tests/test_proxy_stats.py +19 -19
  105. tests/test_proxy_strategies.py +59 -59
  106. tests/test_redis_config.py +28 -28
  107. tests/test_redis_queue.py +224 -224
  108. tests/test_request_serialization.py +70 -70
  109. tests/test_scheduler.py +241 -241
  110. crawlo-1.1.2.dist-info/RECORD +0 -108
  111. {crawlo-1.1.2.dist-info → crawlo-1.1.3.dist-info}/WHEEL +0 -0
  112. {crawlo-1.1.2.dist-info → crawlo-1.1.3.dist-info}/entry_points.txt +0 -0
  113. {crawlo-1.1.2.dist-info → crawlo-1.1.3.dist-info}/top_level.txt +0 -0
crawlo/commands/stats.py CHANGED
@@ -1,188 +1,188 @@
1
- #!/usr/bin/python
2
- # -*- coding: UTF-8 -*-
3
- """
4
- # @Time : 2025-08-31 22:36
5
- # @Author : crawl-coder
6
- # @Desc : 命令行入口:crawlo stats,查看最近运行的爬虫统计信息。
7
- """
8
- import json
9
- from pathlib import Path
10
- from datetime import datetime
11
- from typing import Dict, Any
12
-
13
- from rich.console import Console
14
- from rich.table import Table
15
- from rich.panel import Panel
16
- from rich.text import Text
17
- from rich import box
18
-
19
- from crawlo.utils.log import get_logger
20
-
21
-
22
- logger = get_logger(__name__)
23
- console = Console()
24
-
25
- # 默认存储目录(相对于项目根目录)
26
- STATS_DIR = "logs/stats"
27
-
28
-
29
- def get_stats_dir() -> Path:
30
- """
31
- 获取统计文件存储目录,优先使用项目根下的 logs/stats/
32
- 如果不在项目中,回退到当前目录
33
- """
34
- current = Path.cwd()
35
- for _ in range(10):
36
- if (current / "crawlo.cfg").exists():
37
- return current / STATS_DIR
38
- if current == current.parent:
39
- break
40
- current = current.parent
41
- return Path.cwd() / STATS_DIR
42
-
43
-
44
- def record_stats(crawler):
45
- """
46
- 【供爬虫运行时调用】记录爬虫结束后的统计信息到 JSON 文件
47
- 需在 Crawler 的 closed 回调中调用
48
- """
49
- spider_name = getattr(crawler.spider, "name", "unknown")
50
- stats = crawler.stats.get_stats() if crawler.stats else {}
51
-
52
- timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
53
- stats_dir = Path(get_stats_dir())
54
- stats_dir.mkdir(parents=True, exist_ok=True)
55
-
56
- filename = stats_dir / f"{spider_name}_{timestamp}.json"
57
- try:
58
- with open(filename, "w", encoding="utf-8") as f:
59
- json.dump({
60
- "spider": spider_name,
61
- "timestamp": datetime.now().isoformat(),
62
- "stats": stats
63
- }, f, ensure_ascii=False, indent=2, default=str)
64
- logger.info(f"📊 Stats saved for spider '{spider_name}' → {filename}")
65
- except Exception as e:
66
- logger.error(f"Failed to save stats for '{spider_name}': {e}")
67
-
68
-
69
- def load_all_stats() -> Dict[str, list]:
70
- """
71
- 加载所有已保存的统计文件,按 spider name 分组
72
- 返回: {spider_name: [stats_record, ...]}
73
- """
74
- stats_dir = get_stats_dir()
75
- if not stats_dir.exists():
76
- return {}
77
-
78
- result = {}
79
- json_files = sorted(stats_dir.glob("*.json"), key=lambda x: x.stat().st_mtime, reverse=True)
80
-
81
- for file in json_files:
82
- try:
83
- with open(file, "r", encoding="utf-8") as f:
84
- data = json.load(f)
85
- spider_name = data.get("spider", "unknown")
86
- result.setdefault(spider_name, []).append(data)
87
- except Exception as e:
88
- logger.warning(f"Failed to load stats file {file}: {e}")
89
- return result
90
-
91
-
92
- def format_value(v: Any) -> str:
93
- """格式化值,防止太长或不可打印"""
94
- if isinstance(v, float):
95
- return f"{v:.4f}"
96
- s = str(v)
97
- if len(s) > 80:
98
- return s[:77] + "..."
99
- return s
100
-
101
-
102
- def display_stats_table(stats_data: dict, title: str = "Statistics"):
103
- """通用函数:用 rich.table 展示统计数据"""
104
- table = Table(title=title, box=box.ROUNDED, show_header=True, header_style="bold magenta")
105
- table.add_column("Key", style="cyan", no_wrap=True)
106
- table.add_column("Value", style="green")
107
-
108
- for k in sorted(stats_data.keys()):
109
- table.add_row(k, format_value(stats_data[k]))
110
-
111
- console.print(table)
112
-
113
-
114
- def main(args):
115
- """
116
- 主函数:查看统计信息
117
- 用法:
118
- crawlo stats → 显示所有爬虫最近一次运行
119
- crawlo stats myspider → 显示指定爬虫所有历史记录
120
- crawlo stats myspider --all → 显示所有历史(同上)
121
- """
122
- if len(args) > 2:
123
- console.print("[bold red]Error:[/bold red] Usage: [blue]crawlo stats[/blue] [spider_name] [--all]")
124
- return 1
125
-
126
- spider_name = None
127
- show_all = False
128
-
129
- if args:
130
- spider_name = args[0]
131
- show_all = "--all" in args or "-a" in args
132
-
133
- all_stats = load_all_stats()
134
-
135
- if not all_stats:
136
- console.print(Panel(
137
- Text.from_markup(
138
- ":chart_with_upwards_trend: [bold]No stats found.[/bold]\n"
139
- "💡 Run a spider first to generate statistics.\n"
140
- f"📁 Stats directory: [cyan]{get_stats_dir()}[/cyan]"
141
- ),
142
- title="📊 Statistics",
143
- border_style="yellow",
144
- padding=(1, 2)
145
- ))
146
- return 0
147
-
148
- # 显示所有爬虫最近一次运行
149
- if not spider_name:
150
- console.print(Panel(
151
- "[bold]Recent Spider Statistics (last run)[/bold]",
152
- title="📊 Spider Stats Overview",
153
- border_style="green",
154
- padding=(0, 1)
155
- ))
156
-
157
- for name, runs in all_stats.items():
158
- latest = runs[0]
159
- ts = latest['timestamp'][:19]
160
- console.print(f"🕷️ [bold cyan]{name}[/bold cyan] ([green]{ts}[/green])")
161
- display_stats_table(latest["stats"], title=f"Stats for {name}")
162
- console.print() # 空行分隔
163
-
164
- return 0
165
-
166
- # 显示指定爬虫的历史
167
- if spider_name not in all_stats:
168
- console.print(f"[bold red]:cross_mark: No stats found for spider '[cyan]{spider_name}[/cyan]'[/bold red]")
169
- available = ', '.join(all_stats.keys())
170
- if available:
171
- console.print(f":bulb: Available spiders: [green]{available}[/green]")
172
- return 1
173
-
174
- runs = all_stats[spider_name]
175
- if show_all:
176
- console.print(f":bar_chart: [bold]All runs for '[cyan]{spider_name}[/cyan]' ({len(runs)} runs):[/bold]")
177
- else:
178
- runs = runs[:1]
179
- console.print(f":bar_chart: [bold]Last run for '[cyan]{spider_name}[/cyan]':[/bold]")
180
-
181
- for i, run in enumerate(runs, 1):
182
- ts = run['timestamp']
183
- subtitle = f"Run #{i} · {ts}" if show_all else f"Last Run · {ts}"
184
- display_stats_table(run["stats"], title=f"Stats for {spider_name} — {subtitle}")
185
- if i < len(runs):
186
- console.print("─" * 60)
187
-
188
- return 0
1
+ #!/usr/bin/python
2
+ # -*- coding: UTF-8 -*-
3
+ """
4
+ # @Time : 2025-08-31 22:36
5
+ # @Author : crawl-coder
6
+ # @Desc : 命令行入口:crawlo stats,查看最近运行的爬虫统计信息。
7
+ """
8
+ import json
9
+ from pathlib import Path
10
+ from datetime import datetime
11
+ from typing import Dict, Any
12
+
13
+ from rich.console import Console
14
+ from rich.table import Table
15
+ from rich.panel import Panel
16
+ from rich.text import Text
17
+ from rich import box
18
+
19
+ from crawlo.utils.log import get_logger
20
+
21
+
22
+ logger = get_logger(__name__)
23
+ console = Console()
24
+
25
+ # 默认存储目录(相对于项目根目录)
26
+ STATS_DIR = "logs/stats"
27
+
28
+
29
+ def get_stats_dir() -> Path:
30
+ """
31
+ 获取统计文件存储目录,优先使用项目根下的 logs/stats/
32
+ 如果不在项目中,回退到当前目录
33
+ """
34
+ current = Path.cwd()
35
+ for _ in range(10):
36
+ if (current / "crawlo.cfg").exists():
37
+ return current / STATS_DIR
38
+ if current == current.parent:
39
+ break
40
+ current = current.parent
41
+ return Path.cwd() / STATS_DIR
42
+
43
+
44
+ def record_stats(crawler):
45
+ """
46
+ 【供爬虫运行时调用】记录爬虫结束后的统计信息到 JSON 文件
47
+ 需在 Crawler 的 closed 回调中调用
48
+ """
49
+ spider_name = getattr(crawler.spider, "name", "unknown")
50
+ stats = crawler.stats.get_stats() if crawler.stats else {}
51
+
52
+ timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
53
+ stats_dir = Path(get_stats_dir())
54
+ stats_dir.mkdir(parents=True, exist_ok=True)
55
+
56
+ filename = stats_dir / f"{spider_name}_{timestamp}.json"
57
+ try:
58
+ with open(filename, "w", encoding="utf-8") as f:
59
+ json.dump({
60
+ "spider": spider_name,
61
+ "timestamp": datetime.now().isoformat(),
62
+ "stats": stats
63
+ }, f, ensure_ascii=False, indent=2, default=str)
64
+ logger.info(f"📊 Stats saved for spider '{spider_name}' → {filename}")
65
+ except Exception as e:
66
+ logger.error(f"Failed to save stats for '{spider_name}': {e}")
67
+
68
+
69
+ def load_all_stats() -> Dict[str, list]:
70
+ """
71
+ 加载所有已保存的统计文件,按 spider name 分组
72
+ 返回: {spider_name: [stats_record, ...]}
73
+ """
74
+ stats_dir = get_stats_dir()
75
+ if not stats_dir.exists():
76
+ return {}
77
+
78
+ result = {}
79
+ json_files = sorted(stats_dir.glob("*.json"), key=lambda x: x.stat().st_mtime, reverse=True)
80
+
81
+ for file in json_files:
82
+ try:
83
+ with open(file, "r", encoding="utf-8") as f:
84
+ data = json.load(f)
85
+ spider_name = data.get("spider", "unknown")
86
+ result.setdefault(spider_name, []).append(data)
87
+ except Exception as e:
88
+ logger.warning(f"Failed to load stats file {file}: {e}")
89
+ return result
90
+
91
+
92
+ def format_value(v: Any) -> str:
93
+ """格式化值,防止太长或不可打印"""
94
+ if isinstance(v, float):
95
+ return f"{v:.4f}"
96
+ s = str(v)
97
+ if len(s) > 80:
98
+ return s[:77] + "..."
99
+ return s
100
+
101
+
102
+ def display_stats_table(stats_data: dict, title: str = "Statistics"):
103
+ """通用函数:用 rich.table 展示统计数据"""
104
+ table = Table(title=title, box=box.ROUNDED, show_header=True, header_style="bold magenta")
105
+ table.add_column("Key", style="cyan", no_wrap=True)
106
+ table.add_column("Value", style="green")
107
+
108
+ for k in sorted(stats_data.keys()):
109
+ table.add_row(k, format_value(stats_data[k]))
110
+
111
+ console.print(table)
112
+
113
+
114
+ def main(args):
115
+ """
116
+ 主函数:查看统计信息
117
+ 用法:
118
+ crawlo stats → 显示所有爬虫最近一次运行
119
+ crawlo stats myspider → 显示指定爬虫所有历史记录
120
+ crawlo stats myspider --all → 显示所有历史(同上)
121
+ """
122
+ if len(args) > 2:
123
+ console.print("[bold red]Error:[/bold red] Usage: [blue]crawlo stats[/blue] [spider_name] [--all]")
124
+ return 1
125
+
126
+ spider_name = None
127
+ show_all = False
128
+
129
+ if args:
130
+ spider_name = args[0]
131
+ show_all = "--all" in args or "-a" in args
132
+
133
+ all_stats = load_all_stats()
134
+
135
+ if not all_stats:
136
+ console.print(Panel(
137
+ Text.from_markup(
138
+ ":chart_with_upwards_trend: [bold]No stats found.[/bold]\n"
139
+ "💡 Run a spider first to generate statistics.\n"
140
+ f"📁 Stats directory: [cyan]{get_stats_dir()}[/cyan]"
141
+ ),
142
+ title="📊 Statistics",
143
+ border_style="yellow",
144
+ padding=(1, 2)
145
+ ))
146
+ return 0
147
+
148
+ # 显示所有爬虫最近一次运行
149
+ if not spider_name:
150
+ console.print(Panel(
151
+ "[bold]Recent Spider Statistics (last run)[/bold]",
152
+ title="📊 Spider Stats Overview",
153
+ border_style="green",
154
+ padding=(0, 1)
155
+ ))
156
+
157
+ for name, runs in all_stats.items():
158
+ latest = runs[0]
159
+ ts = latest['timestamp'][:19]
160
+ console.print(f"🕷️ [bold cyan]{name}[/bold cyan] ([green]{ts}[/green])")
161
+ display_stats_table(latest["stats"], title=f"Stats for {name}")
162
+ console.print() # 空行分隔
163
+
164
+ return 0
165
+
166
+ # 显示指定爬虫的历史
167
+ if spider_name not in all_stats:
168
+ console.print(f"[bold red]:cross_mark: No stats found for spider '[cyan]{spider_name}[/cyan]'[/bold red]")
169
+ available = ', '.join(all_stats.keys())
170
+ if available:
171
+ console.print(f":bulb: Available spiders: [green]{available}[/green]")
172
+ return 1
173
+
174
+ runs = all_stats[spider_name]
175
+ if show_all:
176
+ console.print(f":bar_chart: [bold]All runs for '[cyan]{spider_name}[/cyan]' ({len(runs)} runs):[/bold]")
177
+ else:
178
+ runs = runs[:1]
179
+ console.print(f":bar_chart: [bold]Last run for '[cyan]{spider_name}[/cyan]':[/bold]")
180
+
181
+ for i, run in enumerate(runs, 1):
182
+ ts = run['timestamp']
183
+ subtitle = f"Run #{i} · {ts}" if show_all else f"Last Run · {ts}"
184
+ display_stats_table(run["stats"], title=f"Stats for {spider_name} — {subtitle}")
185
+ if i < len(runs):
186
+ console.print("─" * 60)
187
+
188
+ return 0