crawlo 1.1.3__py3-none-any.whl → 1.1.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (118) hide show
  1. crawlo/__init__.py +34 -34
  2. crawlo/__version__.py +1 -1
  3. crawlo/cli.py +40 -40
  4. crawlo/commands/__init__.py +13 -13
  5. crawlo/commands/check.py +594 -594
  6. crawlo/commands/genspider.py +151 -151
  7. crawlo/commands/list.py +155 -155
  8. crawlo/commands/run.py +285 -285
  9. crawlo/commands/startproject.py +196 -196
  10. crawlo/commands/stats.py +188 -188
  11. crawlo/commands/utils.py +186 -186
  12. crawlo/config.py +279 -279
  13. crawlo/core/__init__.py +2 -2
  14. crawlo/core/engine.py +171 -171
  15. crawlo/core/enhanced_engine.py +189 -189
  16. crawlo/core/processor.py +40 -40
  17. crawlo/core/scheduler.py +165 -165
  18. crawlo/crawler.py +1027 -1027
  19. crawlo/downloader/__init__.py +242 -242
  20. crawlo/downloader/aiohttp_downloader.py +212 -212
  21. crawlo/downloader/cffi_downloader.py +251 -251
  22. crawlo/downloader/httpx_downloader.py +259 -259
  23. crawlo/event.py +11 -11
  24. crawlo/exceptions.py +81 -81
  25. crawlo/extension/__init__.py +38 -31
  26. crawlo/extension/health_check.py +142 -0
  27. crawlo/extension/log_interval.py +58 -49
  28. crawlo/extension/log_stats.py +82 -44
  29. crawlo/extension/logging_extension.py +44 -35
  30. crawlo/extension/memory_monitor.py +89 -0
  31. crawlo/extension/performance_profiler.py +118 -0
  32. crawlo/extension/request_recorder.py +108 -0
  33. crawlo/filters/__init__.py +154 -154
  34. crawlo/filters/aioredis_filter.py +241 -241
  35. crawlo/filters/memory_filter.py +269 -269
  36. crawlo/items/__init__.py +23 -23
  37. crawlo/items/base.py +21 -21
  38. crawlo/items/fields.py +53 -53
  39. crawlo/items/items.py +104 -104
  40. crawlo/middleware/__init__.py +21 -21
  41. crawlo/middleware/default_header.py +32 -32
  42. crawlo/middleware/download_delay.py +28 -28
  43. crawlo/middleware/middleware_manager.py +135 -135
  44. crawlo/middleware/proxy.py +248 -248
  45. crawlo/middleware/request_ignore.py +30 -30
  46. crawlo/middleware/response_code.py +18 -18
  47. crawlo/middleware/response_filter.py +26 -26
  48. crawlo/middleware/retry.py +124 -124
  49. crawlo/mode_manager.py +200 -200
  50. crawlo/network/__init__.py +21 -21
  51. crawlo/network/request.py +311 -311
  52. crawlo/network/response.py +271 -271
  53. crawlo/pipelines/__init__.py +21 -21
  54. crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
  55. crawlo/pipelines/console_pipeline.py +39 -39
  56. crawlo/pipelines/csv_pipeline.py +316 -316
  57. crawlo/pipelines/database_dedup_pipeline.py +224 -224
  58. crawlo/pipelines/json_pipeline.py +218 -218
  59. crawlo/pipelines/memory_dedup_pipeline.py +115 -115
  60. crawlo/pipelines/mongo_pipeline.py +132 -117
  61. crawlo/pipelines/mysql_pipeline.py +317 -195
  62. crawlo/pipelines/pipeline_manager.py +56 -56
  63. crawlo/pipelines/redis_dedup_pipeline.py +162 -162
  64. crawlo/project.py +153 -153
  65. crawlo/queue/pqueue.py +37 -37
  66. crawlo/queue/queue_manager.py +307 -307
  67. crawlo/queue/redis_priority_queue.py +208 -208
  68. crawlo/settings/__init__.py +7 -7
  69. crawlo/settings/default_settings.py +278 -244
  70. crawlo/settings/setting_manager.py +99 -99
  71. crawlo/spider/__init__.py +639 -639
  72. crawlo/stats_collector.py +59 -59
  73. crawlo/subscriber.py +131 -106
  74. crawlo/task_manager.py +30 -30
  75. crawlo/templates/crawlo.cfg.tmpl +10 -10
  76. crawlo/templates/project/__init__.py.tmpl +3 -3
  77. crawlo/templates/project/items.py.tmpl +17 -17
  78. crawlo/templates/project/middlewares.py.tmpl +111 -87
  79. crawlo/templates/project/pipelines.py.tmpl +97 -341
  80. crawlo/templates/project/run.py.tmpl +251 -251
  81. crawlo/templates/project/settings.py.tmpl +279 -250
  82. crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
  83. crawlo/templates/spider/spider.py.tmpl +142 -178
  84. crawlo/utils/__init__.py +7 -7
  85. crawlo/utils/controlled_spider_mixin.py +439 -439
  86. crawlo/utils/date_tools.py +233 -233
  87. crawlo/utils/db_helper.py +343 -343
  88. crawlo/utils/func_tools.py +82 -82
  89. crawlo/utils/large_scale_config.py +286 -286
  90. crawlo/utils/large_scale_helper.py +343 -343
  91. crawlo/utils/log.py +128 -128
  92. crawlo/utils/queue_helper.py +175 -175
  93. crawlo/utils/request.py +267 -267
  94. crawlo/utils/request_serializer.py +219 -219
  95. crawlo/utils/spider_loader.py +62 -62
  96. crawlo/utils/system.py +11 -11
  97. crawlo/utils/tools.py +4 -4
  98. crawlo/utils/url.py +39 -39
  99. crawlo-1.1.4.dist-info/METADATA +403 -0
  100. crawlo-1.1.4.dist-info/RECORD +117 -0
  101. examples/__init__.py +7 -7
  102. examples/controlled_spider_example.py +205 -205
  103. tests/__init__.py +7 -7
  104. tests/test_final_validation.py +153 -153
  105. tests/test_proxy_health_check.py +32 -32
  106. tests/test_proxy_middleware_integration.py +136 -136
  107. tests/test_proxy_providers.py +56 -56
  108. tests/test_proxy_stats.py +19 -19
  109. tests/test_proxy_strategies.py +59 -59
  110. tests/test_redis_config.py +28 -28
  111. tests/test_redis_queue.py +224 -224
  112. tests/test_request_serialization.py +70 -70
  113. tests/test_scheduler.py +241 -241
  114. crawlo-1.1.3.dist-info/METADATA +0 -635
  115. crawlo-1.1.3.dist-info/RECORD +0 -113
  116. {crawlo-1.1.3.dist-info → crawlo-1.1.4.dist-info}/WHEEL +0 -0
  117. {crawlo-1.1.3.dist-info → crawlo-1.1.4.dist-info}/entry_points.txt +0 -0
  118. {crawlo-1.1.3.dist-info → crawlo-1.1.4.dist-info}/top_level.txt +0 -0
@@ -1,196 +1,196 @@
1
- #!/usr/bin/python
2
- # -*- coding: UTF-8 -*-
3
- """
4
- # @Time : 2025-08-31 22:36
5
- # @Author : crawl-coder
6
- # @Desc : 命令行入口:crawlo startproject baidu,创建项目。
7
- """
8
- import shutil
9
- import re
10
- from pathlib import Path
11
- from rich.console import Console
12
- from rich.panel import Panel
13
- from rich.text import Text
14
-
15
- from .utils import show_error_panel, show_success_panel
16
-
17
- # 初始化 rich 控制台
18
- console = Console()
19
-
20
- TEMPLATES_DIR = Path(__file__).parent.parent / 'templates'
21
-
22
-
23
- def _render_template(tmpl_path, context):
24
- """读取模板文件,替换 {{key}} 为 context 中的值"""
25
- with open(tmpl_path, 'r', encoding='utf-8') as f:
26
- content = f.read()
27
- for key, value in context.items():
28
- content = content.replace(f'{{{{{key}}}}}', str(value))
29
- return content
30
-
31
-
32
- def _copytree_with_templates(src, dst, context):
33
- """
34
- 递归复制目录,将 .tmpl 文件渲染后复制(去除 .tmpl 后缀),其他文件直接复制。
35
- """
36
- src_path = Path(src)
37
- dst_path = Path(dst)
38
- dst_path.mkdir(parents=True, exist_ok=True)
39
-
40
- for item in src_path.rglob('*'):
41
- rel_path = item.relative_to(src_path)
42
- dst_item = dst_path / rel_path
43
-
44
- if item.is_dir():
45
- dst_item.mkdir(parents=True, exist_ok=True)
46
- else:
47
- if item.suffix == '.tmpl':
48
- rendered_content = _render_template(item, context)
49
- final_dst = dst_item.with_suffix('')
50
- final_dst.parent.mkdir(parents=True, exist_ok=True)
51
- with open(final_dst, 'w', encoding='utf-8') as f:
52
- f.write(rendered_content)
53
- else:
54
- shutil.copy2(item, dst_item)
55
-
56
-
57
- def validate_project_name(project_name: str) -> tuple[bool, str]:
58
- """
59
- 验证项目名称是否有效
60
-
61
- Returns:
62
- tuple[bool, str]: (是否有效, 错误信息)
63
- """
64
- # 检查是否为空
65
- if not project_name or not project_name.strip():
66
- return False, "Project name cannot be empty"
67
-
68
- project_name = project_name.strip()
69
-
70
- # 检查长度
71
- if len(project_name) > 50:
72
- return False, "Project name too long (max 50 characters)"
73
-
74
- # 检查是否为Python关键字
75
- python_keywords = {
76
- 'False', 'None', 'True', 'and', 'as', 'assert', 'break', 'class',
77
- 'continue', 'def', 'del', 'elif', 'else', 'except', 'finally',
78
- 'for', 'from', 'global', 'if', 'import', 'in', 'is', 'lambda',
79
- 'nonlocal', 'not', 'or', 'pass', 'raise', 'return', 'try',
80
- 'while', 'with', 'yield'
81
- }
82
- if project_name in python_keywords:
83
- return False, f"'{project_name}' is a Python keyword and cannot be used as project name"
84
-
85
- # 检查是否为有效的Python标识符
86
- if not project_name.isidentifier():
87
- return False, "Project name must be a valid Python identifier"
88
-
89
- # 检查格式(建议使用snake_case)
90
- if not re.match(r'^[a-z][a-z0-9_]*$', project_name):
91
- return False, (
92
- "Project name should start with lowercase letter and "
93
- "contain only lowercase letters, numbers, and underscores"
94
- )
95
-
96
- # 检查是否以数字结尾(不推荐)
97
- if project_name[-1].isdigit():
98
- return False, "Project name should not end with a number"
99
-
100
- return True, ""
101
-
102
-
103
- def main(args):
104
- if len(args) != 1:
105
- console.print("[bold red]Error:[/bold red] Usage: [blue]crawlo startproject[/blue] <project_name>")
106
- console.print("💡 Examples:")
107
- console.print(" [blue]crawlo startproject[/blue] my_spider_project")
108
- console.print(" [blue]crawlo startproject[/blue] news_crawler")
109
- console.print(" [blue]crawlo startproject[/blue] ecommerce_spider")
110
- return 1
111
-
112
- project_name = args[0]
113
-
114
- # 验证项目名称
115
- is_valid, error_msg = validate_project_name(project_name)
116
- if not is_valid:
117
- show_error_panel(
118
- "Invalid Project Name",
119
- f"[cyan]{project_name}[/cyan] is not a valid project name.\n"
120
- f"❌ {error_msg}\n\n"
121
- "💡 Project name should:\n"
122
- " • Start with lowercase letter\n"
123
- " • Contain only lowercase letters, numbers, and underscores\n"
124
- " • Be a valid Python identifier\n"
125
- " • Not be a Python keyword"
126
- )
127
- return 1
128
-
129
- project_dir = Path(project_name)
130
-
131
- if project_dir.exists():
132
- show_error_panel(
133
- "Directory Exists",
134
- f"Directory '[cyan]{project_dir}[/cyan]' already exists.\n"
135
- "💡 Choose a different project name or remove the existing directory."
136
- )
137
- return 1
138
-
139
- context = {'project_name': project_name}
140
- template_dir = TEMPLATES_DIR / 'project'
141
-
142
- try:
143
- # 1. 创建项目根目录
144
- project_dir.mkdir()
145
-
146
- # 2. 渲染 crawlo.cfg.tmpl
147
- cfg_template = TEMPLATES_DIR / 'crawlo.cfg.tmpl'
148
- if cfg_template.exists():
149
- cfg_content = _render_template(cfg_template, context)
150
- (project_dir / 'crawlo.cfg').write_text(cfg_content, encoding='utf-8')
151
- console.print(f":white_check_mark: Created [green]{project_dir / 'crawlo.cfg'}[/green]")
152
- else:
153
- console.print("[yellow]⚠ Warning:[/yellow] Template 'crawlo.cfg.tmpl' not found.")
154
-
155
- # 3. 复制并渲染项目包内容
156
- package_dir = project_dir / project_name
157
- _copytree_with_templates(template_dir, package_dir, context)
158
- console.print(f":white_check_mark: Created project package: [green]{package_dir}[/green]")
159
-
160
- # 4. 创建 logs 目录
161
- (project_dir / 'logs').mkdir(exist_ok=True)
162
- console.print(":white_check_mark: Created logs directory")
163
-
164
- # 5. 创建 output 目录(用于数据输出)
165
- (project_dir / 'output').mkdir(exist_ok=True)
166
- console.print(":white_check_mark: Created output directory")
167
-
168
- # 成功面板
169
- success_text = Text.from_markup(f"Project '[bold cyan]{project_name}[/bold cyan]' created successfully!")
170
- console.print(Panel(success_text, title=":rocket: Success", border_style="green", padding=(1, 2)))
171
-
172
- # 下一步操作提示(对齐美观 + 语法高亮)
173
- next_steps = f"""
174
- [bold]🚀 Next steps:[/bold]
175
- [blue]cd[/blue] {project_name}
176
- [blue]crawlo genspider[/blue] example example.com
177
- [blue]crawlo run[/blue] example
178
-
179
- [bold]📚 Learn more:[/bold]
180
- [blue]crawlo list[/blue] # List all spiders
181
- [blue]crawlo check[/blue] example # Check spider validity
182
- [blue]crawlo stats[/blue] # View statistics
183
- """.strip()
184
- console.print(next_steps)
185
-
186
- return 0
187
-
188
- except Exception as e:
189
- show_error_panel(
190
- "Creation Failed",
191
- f"Failed to create project: {e}"
192
- )
193
- if project_dir.exists():
194
- shutil.rmtree(project_dir, ignore_errors=True)
195
- console.print("[red]:cross_mark: Cleaned up partially created project.[/red]")
196
- return 1
1
+ #!/usr/bin/python
2
+ # -*- coding: UTF-8 -*-
3
+ """
4
+ # @Time : 2025-08-31 22:36
5
+ # @Author : crawl-coder
6
+ # @Desc : 命令行入口:crawlo startproject baidu,创建项目。
7
+ """
8
+ import shutil
9
+ import re
10
+ from pathlib import Path
11
+ from rich.console import Console
12
+ from rich.panel import Panel
13
+ from rich.text import Text
14
+
15
+ from .utils import show_error_panel, show_success_panel
16
+
17
+ # 初始化 rich 控制台
18
+ console = Console()
19
+
20
+ TEMPLATES_DIR = Path(__file__).parent.parent / 'templates'
21
+
22
+
23
+ def _render_template(tmpl_path, context):
24
+ """读取模板文件,替换 {{key}} 为 context 中的值"""
25
+ with open(tmpl_path, 'r', encoding='utf-8') as f:
26
+ content = f.read()
27
+ for key, value in context.items():
28
+ content = content.replace(f'{{{{{key}}}}}', str(value))
29
+ return content
30
+
31
+
32
+ def _copytree_with_templates(src, dst, context):
33
+ """
34
+ 递归复制目录,将 .tmpl 文件渲染后复制(去除 .tmpl 后缀),其他文件直接复制。
35
+ """
36
+ src_path = Path(src)
37
+ dst_path = Path(dst)
38
+ dst_path.mkdir(parents=True, exist_ok=True)
39
+
40
+ for item in src_path.rglob('*'):
41
+ rel_path = item.relative_to(src_path)
42
+ dst_item = dst_path / rel_path
43
+
44
+ if item.is_dir():
45
+ dst_item.mkdir(parents=True, exist_ok=True)
46
+ else:
47
+ if item.suffix == '.tmpl':
48
+ rendered_content = _render_template(item, context)
49
+ final_dst = dst_item.with_suffix('')
50
+ final_dst.parent.mkdir(parents=True, exist_ok=True)
51
+ with open(final_dst, 'w', encoding='utf-8') as f:
52
+ f.write(rendered_content)
53
+ else:
54
+ shutil.copy2(item, dst_item)
55
+
56
+
57
+ def validate_project_name(project_name: str) -> tuple[bool, str]:
58
+ """
59
+ 验证项目名称是否有效
60
+
61
+ Returns:
62
+ tuple[bool, str]: (是否有效, 错误信息)
63
+ """
64
+ # 检查是否为空
65
+ if not project_name or not project_name.strip():
66
+ return False, "Project name cannot be empty"
67
+
68
+ project_name = project_name.strip()
69
+
70
+ # 检查长度
71
+ if len(project_name) > 50:
72
+ return False, "Project name too long (max 50 characters)"
73
+
74
+ # 检查是否为Python关键字
75
+ python_keywords = {
76
+ 'False', 'None', 'True', 'and', 'as', 'assert', 'break', 'class',
77
+ 'continue', 'def', 'del', 'elif', 'else', 'except', 'finally',
78
+ 'for', 'from', 'global', 'if', 'import', 'in', 'is', 'lambda',
79
+ 'nonlocal', 'not', 'or', 'pass', 'raise', 'return', 'try',
80
+ 'while', 'with', 'yield'
81
+ }
82
+ if project_name in python_keywords:
83
+ return False, f"'{project_name}' is a Python keyword and cannot be used as project name"
84
+
85
+ # 检查是否为有效的Python标识符
86
+ if not project_name.isidentifier():
87
+ return False, "Project name must be a valid Python identifier"
88
+
89
+ # 检查格式(建议使用snake_case)
90
+ if not re.match(r'^[a-z][a-z0-9_]*$', project_name):
91
+ return False, (
92
+ "Project name should start with lowercase letter and "
93
+ "contain only lowercase letters, numbers, and underscores"
94
+ )
95
+
96
+ # 检查是否以数字结尾(不推荐)
97
+ if project_name[-1].isdigit():
98
+ return False, "Project name should not end with a number"
99
+
100
+ return True, ""
101
+
102
+
103
+ def main(args):
104
+ if len(args) != 1:
105
+ console.print("[bold red]Error:[/bold red] Usage: [blue]crawlo startproject[/blue] <project_name>")
106
+ console.print("💡 Examples:")
107
+ console.print(" [blue]crawlo startproject[/blue] my_spider_project")
108
+ console.print(" [blue]crawlo startproject[/blue] news_crawler")
109
+ console.print(" [blue]crawlo startproject[/blue] ecommerce_spider")
110
+ return 1
111
+
112
+ project_name = args[0]
113
+
114
+ # 验证项目名称
115
+ is_valid, error_msg = validate_project_name(project_name)
116
+ if not is_valid:
117
+ show_error_panel(
118
+ "Invalid Project Name",
119
+ f"[cyan]{project_name}[/cyan] is not a valid project name.\n"
120
+ f"❌ {error_msg}\n\n"
121
+ "💡 Project name should:\n"
122
+ " • Start with lowercase letter\n"
123
+ " • Contain only lowercase letters, numbers, and underscores\n"
124
+ " • Be a valid Python identifier\n"
125
+ " • Not be a Python keyword"
126
+ )
127
+ return 1
128
+
129
+ project_dir = Path(project_name)
130
+
131
+ if project_dir.exists():
132
+ show_error_panel(
133
+ "Directory Exists",
134
+ f"Directory '[cyan]{project_dir}[/cyan]' already exists.\n"
135
+ "💡 Choose a different project name or remove the existing directory."
136
+ )
137
+ return 1
138
+
139
+ context = {'project_name': project_name}
140
+ template_dir = TEMPLATES_DIR / 'project'
141
+
142
+ try:
143
+ # 1. 创建项目根目录
144
+ project_dir.mkdir()
145
+
146
+ # 2. 渲染 crawlo.cfg.tmpl
147
+ cfg_template = TEMPLATES_DIR / 'crawlo.cfg.tmpl'
148
+ if cfg_template.exists():
149
+ cfg_content = _render_template(cfg_template, context)
150
+ (project_dir / 'crawlo.cfg').write_text(cfg_content, encoding='utf-8')
151
+ console.print(f":white_check_mark: Created [green]{project_dir / 'crawlo.cfg'}[/green]")
152
+ else:
153
+ console.print("[yellow]⚠ Warning:[/yellow] Template 'crawlo.cfg.tmpl' not found.")
154
+
155
+ # 3. 复制并渲染项目包内容
156
+ package_dir = project_dir / project_name
157
+ _copytree_with_templates(template_dir, package_dir, context)
158
+ console.print(f":white_check_mark: Created project package: [green]{package_dir}[/green]")
159
+
160
+ # 4. 创建 logs 目录
161
+ (project_dir / 'logs').mkdir(exist_ok=True)
162
+ console.print(":white_check_mark: Created logs directory")
163
+
164
+ # 5. 创建 output 目录(用于数据输出)
165
+ (project_dir / 'output').mkdir(exist_ok=True)
166
+ console.print(":white_check_mark: Created output directory")
167
+
168
+ # 成功面板
169
+ success_text = Text.from_markup(f"Project '[bold cyan]{project_name}[/bold cyan]' created successfully!")
170
+ console.print(Panel(success_text, title=":rocket: Success", border_style="green", padding=(1, 2)))
171
+
172
+ # 下一步操作提示(对齐美观 + 语法高亮)
173
+ next_steps = f"""
174
+ [bold]🚀 Next steps:[/bold]
175
+ [blue]cd[/blue] {project_name}
176
+ [blue]crawlo genspider[/blue] example example.com
177
+ [blue]crawlo run[/blue] example
178
+
179
+ [bold]📚 Learn more:[/bold]
180
+ [blue]crawlo list[/blue] # List all spiders
181
+ [blue]crawlo check[/blue] example # Check spider validity
182
+ [blue]crawlo stats[/blue] # View statistics
183
+ """.strip()
184
+ console.print(next_steps)
185
+
186
+ return 0
187
+
188
+ except Exception as e:
189
+ show_error_panel(
190
+ "Creation Failed",
191
+ f"Failed to create project: {e}"
192
+ )
193
+ if project_dir.exists():
194
+ shutil.rmtree(project_dir, ignore_errors=True)
195
+ console.print("[red]:cross_mark: Cleaned up partially created project.[/red]")
196
+ return 1