crawlo 1.1.8__py3-none-any.whl → 1.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +61 -61
- crawlo/__version__.py +1 -1
- crawlo/cleaners/__init__.py +60 -60
- crawlo/cleaners/data_formatter.py +225 -225
- crawlo/cleaners/encoding_converter.py +125 -125
- crawlo/cleaners/text_cleaner.py +232 -232
- crawlo/cli.py +65 -65
- crawlo/commands/__init__.py +14 -14
- crawlo/commands/check.py +594 -594
- crawlo/commands/genspider.py +151 -151
- crawlo/commands/help.py +132 -132
- crawlo/commands/list.py +155 -155
- crawlo/commands/run.py +292 -292
- crawlo/commands/startproject.py +418 -418
- crawlo/commands/stats.py +188 -188
- crawlo/commands/utils.py +186 -186
- crawlo/config.py +312 -312
- crawlo/config_validator.py +252 -252
- crawlo/core/__init__.py +2 -2
- crawlo/core/engine.py +354 -345
- crawlo/core/processor.py +40 -40
- crawlo/core/scheduler.py +143 -136
- crawlo/crawler.py +1027 -1027
- crawlo/downloader/__init__.py +266 -266
- crawlo/downloader/aiohttp_downloader.py +220 -220
- crawlo/downloader/cffi_downloader.py +256 -256
- crawlo/downloader/httpx_downloader.py +259 -259
- crawlo/downloader/hybrid_downloader.py +213 -213
- crawlo/downloader/playwright_downloader.py +402 -402
- crawlo/downloader/selenium_downloader.py +472 -472
- crawlo/event.py +11 -11
- crawlo/exceptions.py +81 -81
- crawlo/extension/__init__.py +37 -37
- crawlo/extension/health_check.py +141 -141
- crawlo/extension/log_interval.py +57 -57
- crawlo/extension/log_stats.py +81 -81
- crawlo/extension/logging_extension.py +43 -43
- crawlo/extension/memory_monitor.py +104 -104
- crawlo/extension/performance_profiler.py +133 -133
- crawlo/extension/request_recorder.py +107 -107
- crawlo/filters/__init__.py +154 -154
- crawlo/filters/aioredis_filter.py +280 -280
- crawlo/filters/memory_filter.py +269 -269
- crawlo/items/__init__.py +23 -23
- crawlo/items/base.py +21 -21
- crawlo/items/fields.py +53 -53
- crawlo/items/items.py +104 -104
- crawlo/middleware/__init__.py +21 -21
- crawlo/middleware/default_header.py +32 -32
- crawlo/middleware/download_delay.py +28 -28
- crawlo/middleware/middleware_manager.py +135 -135
- crawlo/middleware/proxy.py +272 -272
- crawlo/middleware/request_ignore.py +30 -30
- crawlo/middleware/response_code.py +18 -18
- crawlo/middleware/response_filter.py +26 -26
- crawlo/middleware/retry.py +124 -124
- crawlo/mode_manager.py +211 -211
- crawlo/network/__init__.py +21 -21
- crawlo/network/request.py +338 -338
- crawlo/network/response.py +359 -359
- crawlo/pipelines/__init__.py +21 -21
- crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
- crawlo/pipelines/console_pipeline.py +39 -39
- crawlo/pipelines/csv_pipeline.py +316 -316
- crawlo/pipelines/database_dedup_pipeline.py +224 -224
- crawlo/pipelines/json_pipeline.py +218 -218
- crawlo/pipelines/memory_dedup_pipeline.py +115 -115
- crawlo/pipelines/mongo_pipeline.py +131 -131
- crawlo/pipelines/mysql_pipeline.py +316 -316
- crawlo/pipelines/pipeline_manager.py +61 -61
- crawlo/pipelines/redis_dedup_pipeline.py +167 -167
- crawlo/project.py +187 -187
- crawlo/queue/pqueue.py +37 -37
- crawlo/queue/queue_manager.py +337 -334
- crawlo/queue/redis_priority_queue.py +298 -298
- crawlo/settings/__init__.py +7 -7
- crawlo/settings/default_settings.py +219 -219
- crawlo/settings/setting_manager.py +122 -122
- crawlo/spider/__init__.py +639 -639
- crawlo/stats_collector.py +59 -59
- crawlo/subscriber.py +130 -130
- crawlo/task_manager.py +30 -30
- crawlo/templates/crawlo.cfg.tmpl +10 -10
- crawlo/templates/project/__init__.py.tmpl +3 -3
- crawlo/templates/project/items.py.tmpl +17 -17
- crawlo/templates/project/middlewares.py.tmpl +109 -109
- crawlo/templates/project/pipelines.py.tmpl +96 -96
- crawlo/templates/project/run.py.tmpl +45 -45
- crawlo/templates/project/settings.py.tmpl +326 -326
- crawlo/templates/project/settings_distributed.py.tmpl +119 -119
- crawlo/templates/project/settings_gentle.py.tmpl +94 -94
- crawlo/templates/project/settings_high_performance.py.tmpl +151 -151
- crawlo/templates/project/settings_simple.py.tmpl +68 -68
- crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
- crawlo/templates/spider/spider.py.tmpl +141 -141
- crawlo/tools/__init__.py +182 -182
- crawlo/tools/anti_crawler.py +268 -268
- crawlo/tools/authenticated_proxy.py +240 -240
- crawlo/tools/data_validator.py +180 -180
- crawlo/tools/date_tools.py +35 -35
- crawlo/tools/distributed_coordinator.py +386 -386
- crawlo/tools/retry_mechanism.py +220 -220
- crawlo/tools/scenario_adapter.py +262 -262
- crawlo/utils/__init__.py +35 -35
- crawlo/utils/batch_processor.py +260 -260
- crawlo/utils/controlled_spider_mixin.py +439 -439
- crawlo/utils/date_tools.py +290 -290
- crawlo/utils/db_helper.py +343 -343
- crawlo/utils/enhanced_error_handler.py +359 -359
- crawlo/utils/env_config.py +105 -105
- crawlo/utils/error_handler.py +125 -125
- crawlo/utils/func_tools.py +82 -82
- crawlo/utils/large_scale_config.py +286 -286
- crawlo/utils/large_scale_helper.py +343 -343
- crawlo/utils/log.py +128 -128
- crawlo/utils/performance_monitor.py +284 -284
- crawlo/utils/queue_helper.py +175 -175
- crawlo/utils/redis_connection_pool.py +334 -334
- crawlo/utils/redis_key_validator.py +199 -199
- crawlo/utils/request.py +267 -267
- crawlo/utils/request_serializer.py +219 -219
- crawlo/utils/spider_loader.py +62 -62
- crawlo/utils/system.py +11 -11
- crawlo/utils/tools.py +4 -4
- crawlo/utils/url.py +39 -39
- crawlo-1.2.0.dist-info/METADATA +697 -0
- crawlo-1.2.0.dist-info/RECORD +190 -0
- examples/__init__.py +7 -7
- tests/DOUBLE_CRAWLO_PREFIX_FIX_REPORT.md +81 -81
- tests/__init__.py +7 -7
- tests/advanced_tools_example.py +275 -275
- tests/authenticated_proxy_example.py +236 -236
- tests/cleaners_example.py +160 -160
- tests/config_validation_demo.py +102 -102
- tests/controlled_spider_example.py +205 -205
- tests/date_tools_example.py +180 -180
- tests/dynamic_loading_example.py +523 -523
- tests/dynamic_loading_test.py +104 -104
- tests/env_config_example.py +133 -133
- tests/error_handling_example.py +171 -171
- tests/redis_key_validation_demo.py +130 -130
- tests/response_improvements_example.py +144 -144
- tests/test_advanced_tools.py +148 -148
- tests/test_all_redis_key_configs.py +145 -145
- tests/test_authenticated_proxy.py +141 -141
- tests/test_cleaners.py +54 -54
- tests/test_comprehensive.py +146 -146
- tests/test_config_validator.py +193 -193
- tests/test_date_tools.py +123 -123
- tests/test_double_crawlo_fix.py +207 -207
- tests/test_double_crawlo_fix_simple.py +124 -124
- tests/test_dynamic_downloaders_proxy.py +124 -124
- tests/test_dynamic_proxy.py +92 -92
- tests/test_dynamic_proxy_config.py +146 -146
- tests/test_dynamic_proxy_real.py +109 -109
- tests/test_edge_cases.py +303 -303
- tests/test_enhanced_error_handler.py +270 -270
- tests/test_env_config.py +121 -121
- tests/test_error_handler_compatibility.py +112 -112
- tests/test_final_validation.py +153 -153
- tests/test_framework_env_usage.py +103 -103
- tests/test_integration.py +356 -356
- tests/test_item_dedup_redis_key.py +122 -122
- tests/test_parsel.py +29 -29
- tests/test_performance.py +327 -327
- tests/test_proxy_health_check.py +32 -32
- tests/test_proxy_middleware_integration.py +136 -136
- tests/test_proxy_providers.py +56 -56
- tests/test_proxy_stats.py +19 -19
- tests/test_proxy_strategies.py +59 -59
- tests/test_queue_manager_double_crawlo.py +174 -231
- tests/test_queue_manager_redis_key.py +176 -176
- tests/test_redis_config.py +28 -28
- tests/test_redis_connection_pool.py +294 -294
- tests/test_redis_key_naming.py +181 -181
- tests/test_redis_key_validator.py +123 -123
- tests/test_redis_queue.py +224 -224
- tests/test_request_serialization.py +70 -70
- tests/test_response_improvements.py +152 -152
- tests/test_scheduler.py +241 -241
- tests/test_simple_response.py +61 -61
- tests/test_telecom_spider_redis_key.py +205 -205
- tests/test_template_content.py +87 -87
- tests/test_template_redis_key.py +134 -134
- tests/test_tools.py +153 -153
- tests/tools_example.py +257 -257
- crawlo-1.1.8.dist-info/METADATA +0 -626
- crawlo-1.1.8.dist-info/RECORD +0 -190
- {crawlo-1.1.8.dist-info → crawlo-1.2.0.dist-info}/WHEEL +0 -0
- {crawlo-1.1.8.dist-info → crawlo-1.2.0.dist-info}/entry_points.txt +0 -0
- {crawlo-1.1.8.dist-info → crawlo-1.2.0.dist-info}/top_level.txt +0 -0
|
@@ -1,287 +1,287 @@
|
|
|
1
|
-
#!/usr/bin/env python3
|
|
2
|
-
# -*- coding: utf-8 -*-
|
|
3
|
-
"""
|
|
4
|
-
大规模爬虫配置助手
|
|
5
|
-
提供针对上万请求场景的优化配置
|
|
6
|
-
"""
|
|
7
|
-
from typing import Dict, Any
|
|
8
|
-
|
|
9
|
-
from crawlo.utils.queue_helper import QueueHelper
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
class LargeScaleConfig:
|
|
13
|
-
"""大规模爬虫配置类"""
|
|
14
|
-
|
|
15
|
-
@staticmethod
|
|
16
|
-
def conservative_config(concurrency: int = 8) -> Dict[str, Any]:
|
|
17
|
-
"""
|
|
18
|
-
保守配置 - 适用于资源有限的环境
|
|
19
|
-
|
|
20
|
-
特点:
|
|
21
|
-
- 较小的队列容量
|
|
22
|
-
- 较低的并发数
|
|
23
|
-
- 较长的延迟
|
|
24
|
-
"""
|
|
25
|
-
config = QueueHelper.use_redis_queue(
|
|
26
|
-
queue_name="crawlo:conservative",
|
|
27
|
-
max_retries=3,
|
|
28
|
-
timeout=300
|
|
29
|
-
)
|
|
30
|
-
|
|
31
|
-
config.update({
|
|
32
|
-
# 并发控制
|
|
33
|
-
'CONCURRENCY': concurrency,
|
|
34
|
-
'SCHEDULER_MAX_QUEUE_SIZE': concurrency * 10, # 队列容量为并发数的10倍
|
|
35
|
-
'MAX_RUNNING_SPIDERS': 1,
|
|
36
|
-
|
|
37
|
-
# 请求控制
|
|
38
|
-
'DOWNLOAD_DELAY': 0.2,
|
|
39
|
-
'RANDOMNESS': True,
|
|
40
|
-
'RANDOM_RANGE': (0.8, 1.5),
|
|
41
|
-
|
|
42
|
-
# 内存控制
|
|
43
|
-
'DOWNLOAD_MAXSIZE': 5 * 1024 * 1024, # 5MB
|
|
44
|
-
'CONNECTION_POOL_LIMIT': concurrency * 2,
|
|
45
|
-
|
|
46
|
-
# 重试策略
|
|
47
|
-
'MAX_RETRY_TIMES': 2,
|
|
48
|
-
|
|
49
|
-
# 使用增强引擎
|
|
50
|
-
'ENGINE_CLASS': 'crawlo.core.enhanced_engine.EnhancedEngine'
|
|
51
|
-
})
|
|
52
|
-
|
|
53
|
-
return config
|
|
54
|
-
|
|
55
|
-
@staticmethod
|
|
56
|
-
def balanced_config(concurrency: int = 16) -> Dict[str, Any]:
|
|
57
|
-
"""
|
|
58
|
-
平衡配置 - 适用于一般生产环境
|
|
59
|
-
|
|
60
|
-
特点:
|
|
61
|
-
- 中等的队列容量
|
|
62
|
-
- 平衡的并发数
|
|
63
|
-
- 适中的延迟
|
|
64
|
-
"""
|
|
65
|
-
config = QueueHelper.use_redis_queue(
|
|
66
|
-
queue_name="crawlo:balanced",
|
|
67
|
-
max_retries=5,
|
|
68
|
-
timeout=600
|
|
69
|
-
)
|
|
70
|
-
|
|
71
|
-
config.update({
|
|
72
|
-
# 并发控制
|
|
73
|
-
'CONCURRENCY': concurrency,
|
|
74
|
-
'SCHEDULER_MAX_QUEUE_SIZE': concurrency * 15,
|
|
75
|
-
'MAX_RUNNING_SPIDERS': 2,
|
|
76
|
-
|
|
77
|
-
# 请求控制
|
|
78
|
-
'DOWNLOAD_DELAY': 0.1,
|
|
79
|
-
'RANDOMNESS': True,
|
|
80
|
-
'RANDOM_RANGE': (0.5, 1.2),
|
|
81
|
-
|
|
82
|
-
# 内存控制
|
|
83
|
-
'DOWNLOAD_MAXSIZE': 10 * 1024 * 1024, # 10MB
|
|
84
|
-
'CONNECTION_POOL_LIMIT': concurrency * 3,
|
|
85
|
-
|
|
86
|
-
# 重试策略
|
|
87
|
-
'MAX_RETRY_TIMES': 3,
|
|
88
|
-
|
|
89
|
-
# 使用增强引擎
|
|
90
|
-
'ENGINE_CLASS': 'crawlo.core.enhanced_engine.EnhancedEngine'
|
|
91
|
-
})
|
|
92
|
-
|
|
93
|
-
return config
|
|
94
|
-
|
|
95
|
-
@staticmethod
|
|
96
|
-
def aggressive_config(concurrency: int = 32) -> Dict[str, Any]:
|
|
97
|
-
"""
|
|
98
|
-
激进配置 - 适用于高性能环境
|
|
99
|
-
|
|
100
|
-
特点:
|
|
101
|
-
- 大的队列容量
|
|
102
|
-
- 高并发数
|
|
103
|
-
- 较短的延迟
|
|
104
|
-
"""
|
|
105
|
-
config = QueueHelper.use_redis_queue(
|
|
106
|
-
queue_name="crawlo:aggressive",
|
|
107
|
-
max_retries=10,
|
|
108
|
-
timeout=900
|
|
109
|
-
)
|
|
110
|
-
|
|
111
|
-
config.update({
|
|
112
|
-
# 并发控制
|
|
113
|
-
'CONCURRENCY': concurrency,
|
|
114
|
-
'SCHEDULER_MAX_QUEUE_SIZE': concurrency * 20,
|
|
115
|
-
'MAX_RUNNING_SPIDERS': 3,
|
|
116
|
-
|
|
117
|
-
# 请求控制
|
|
118
|
-
'DOWNLOAD_DELAY': 0.05,
|
|
119
|
-
'RANDOMNESS': True,
|
|
120
|
-
'RANDOM_RANGE': (0.3, 1.0),
|
|
121
|
-
|
|
122
|
-
# 内存控制
|
|
123
|
-
'DOWNLOAD_MAXSIZE': 20 * 1024 * 1024, # 20MB
|
|
124
|
-
'CONNECTION_POOL_LIMIT': concurrency * 4,
|
|
125
|
-
|
|
126
|
-
# 重试策略
|
|
127
|
-
'MAX_RETRY_TIMES': 5,
|
|
128
|
-
|
|
129
|
-
# 使用增强引擎
|
|
130
|
-
'ENGINE_CLASS': 'crawlo.core.enhanced_engine.EnhancedEngine'
|
|
131
|
-
})
|
|
132
|
-
|
|
133
|
-
return config
|
|
134
|
-
|
|
135
|
-
@staticmethod
|
|
136
|
-
def memory_optimized_config(concurrency: int = 12) -> Dict[str, Any]:
|
|
137
|
-
"""
|
|
138
|
-
内存优化配置 - 适用于大规模但内存受限的场景
|
|
139
|
-
|
|
140
|
-
特点:
|
|
141
|
-
- 小队列,快速流转
|
|
142
|
-
- 严格的内存控制
|
|
143
|
-
- 使用Redis减少内存压力
|
|
144
|
-
"""
|
|
145
|
-
config = QueueHelper.use_redis_queue(
|
|
146
|
-
queue_name="crawlo:memory_optimized",
|
|
147
|
-
max_retries=3,
|
|
148
|
-
timeout=300
|
|
149
|
-
)
|
|
150
|
-
|
|
151
|
-
config.update({
|
|
152
|
-
# 并发控制
|
|
153
|
-
'CONCURRENCY': concurrency,
|
|
154
|
-
'SCHEDULER_MAX_QUEUE_SIZE': concurrency * 5, # 小队列
|
|
155
|
-
'MAX_RUNNING_SPIDERS': 1,
|
|
156
|
-
|
|
157
|
-
# 请求控制
|
|
158
|
-
'DOWNLOAD_DELAY': 0.1,
|
|
159
|
-
'RANDOMNESS': False, # 减少随机性降低内存使用
|
|
160
|
-
|
|
161
|
-
# 严格的内存控制
|
|
162
|
-
'DOWNLOAD_MAXSIZE': 2 * 1024 * 1024, # 2MB
|
|
163
|
-
'DOWNLOAD_WARN_SIZE': 512 * 1024, # 512KB
|
|
164
|
-
'CONNECTION_POOL_LIMIT': concurrency,
|
|
165
|
-
|
|
166
|
-
# 重试策略
|
|
167
|
-
'MAX_RETRY_TIMES': 2,
|
|
168
|
-
|
|
169
|
-
# 使用增强引擎
|
|
170
|
-
'ENGINE_CLASS': 'crawlo.core.enhanced_engine.EnhancedEngine'
|
|
171
|
-
})
|
|
172
|
-
|
|
173
|
-
return config
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
def apply_large_scale_config(settings_dict: Dict[str, Any], config_type: str = "balanced", concurrency: int = None):
|
|
177
|
-
"""
|
|
178
|
-
应用大规模配置
|
|
179
|
-
|
|
180
|
-
Args:
|
|
181
|
-
settings_dict: 设置字典
|
|
182
|
-
config_type: 配置类型 ("conservative", "balanced", "aggressive", "memory_optimized")
|
|
183
|
-
concurrency: 并发数(可选,不指定则使用默认值)
|
|
184
|
-
"""
|
|
185
|
-
config_map = {
|
|
186
|
-
"conservative": LargeScaleConfig.conservative_config,
|
|
187
|
-
"balanced": LargeScaleConfig.balanced_config,
|
|
188
|
-
"aggressive": LargeScaleConfig.aggressive_config,
|
|
189
|
-
"memory_optimized": LargeScaleConfig.memory_optimized_config
|
|
190
|
-
}
|
|
191
|
-
|
|
192
|
-
if config_type not in config_map:
|
|
193
|
-
raise ValueError(f"不支持的配置类型: {config_type}")
|
|
194
|
-
|
|
195
|
-
if concurrency:
|
|
196
|
-
config = config_map[config_type](concurrency)
|
|
197
|
-
else:
|
|
198
|
-
config = config_map[config_type]()
|
|
199
|
-
|
|
200
|
-
settings_dict.update(config)
|
|
201
|
-
|
|
202
|
-
return config
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
# 使用示例和说明
|
|
206
|
-
USAGE_GUIDE = """
|
|
207
|
-
# 大规模爬虫配置使用指南
|
|
208
|
-
|
|
209
|
-
## 1. 选择合适的配置类型
|
|
210
|
-
|
|
211
|
-
### Conservative (保守型)
|
|
212
|
-
- 适用场景:资源受限、网络不稳定的环境
|
|
213
|
-
- 并发数:8 (默认)
|
|
214
|
-
- 队列容量:80
|
|
215
|
-
- 延迟:200ms
|
|
216
|
-
- 使用场景:个人开发、小规模爬取
|
|
217
|
-
|
|
218
|
-
### Balanced (平衡型)
|
|
219
|
-
- 适用场景:一般生产环境
|
|
220
|
-
- 并发数:16 (默认)
|
|
221
|
-
- 队列容量:240
|
|
222
|
-
- 延迟:100ms
|
|
223
|
-
- 使用场景:中小企业生产环境
|
|
224
|
-
|
|
225
|
-
### Aggressive (激进型)
|
|
226
|
-
- 适用场景:高性能服务器、对速度要求高
|
|
227
|
-
- 并发数:32 (默认)
|
|
228
|
-
- 队列容量:640
|
|
229
|
-
- 延迟:50ms
|
|
230
|
-
- 使用场景:大公司、高并发需求
|
|
231
|
-
|
|
232
|
-
### Memory Optimized (内存优化型)
|
|
233
|
-
- 适用场景:大规模爬取但内存受限
|
|
234
|
-
- 并发数:12 (默认)
|
|
235
|
-
- 队列容量:60 (小队列快速流转)
|
|
236
|
-
- 延迟:100ms
|
|
237
|
-
- 使用场景:处理数万/数十万请求但内存有限
|
|
238
|
-
|
|
239
|
-
## 2. 使用方法
|
|
240
|
-
|
|
241
|
-
```python
|
|
242
|
-
# 方法1:在 settings.py 中直接配置
|
|
243
|
-
from crawlo.utils.large_scale_config import apply_large_scale_config
|
|
244
|
-
|
|
245
|
-
# 使用平衡配置,16并发
|
|
246
|
-
apply_large_scale_config(locals(), "balanced", 16)
|
|
247
|
-
|
|
248
|
-
# 方法2:在爬虫代码中动态配置
|
|
249
|
-
from crawlo.crawler import CrawlerProcess
|
|
250
|
-
from crawlo.utils.large_scale_config import LargeScaleConfig
|
|
251
|
-
|
|
252
|
-
process = CrawlerProcess()
|
|
253
|
-
config = LargeScaleConfig.memory_optimized_config(20) # 20并发的内存优化配置
|
|
254
|
-
process.settings.update(config)
|
|
255
|
-
|
|
256
|
-
# 方法3:自定义配置
|
|
257
|
-
config = LargeScaleConfig.balanced_config(24) # 24并发
|
|
258
|
-
config['DOWNLOAD_DELAY'] = 0.05 # 自定义延迟
|
|
259
|
-
process.settings.update(config)
|
|
260
|
-
```
|
|
261
|
-
|
|
262
|
-
## 3. 针对不同场景的建议
|
|
263
|
-
|
|
264
|
-
### 处理5万+请求
|
|
265
|
-
```python
|
|
266
|
-
# 推荐内存优化配置
|
|
267
|
-
apply_large_scale_config(locals(), "memory_optimized", 20)
|
|
268
|
-
```
|
|
269
|
-
|
|
270
|
-
### 高速爬取但服务器性能好
|
|
271
|
-
```python
|
|
272
|
-
# 推荐激进配置
|
|
273
|
-
apply_large_scale_config(locals(), "aggressive", 40)
|
|
274
|
-
```
|
|
275
|
-
|
|
276
|
-
### 资源受限但要稳定运行
|
|
277
|
-
```python
|
|
278
|
-
# 推荐保守配置
|
|
279
|
-
apply_large_scale_config(locals(), "conservative", 6)
|
|
280
|
-
```
|
|
281
|
-
|
|
282
|
-
### 平衡性能和稳定性
|
|
283
|
-
```python
|
|
284
|
-
# 推荐平衡配置
|
|
285
|
-
apply_large_scale_config(locals(), "balanced", 18)
|
|
286
|
-
```
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
大规模爬虫配置助手
|
|
5
|
+
提供针对上万请求场景的优化配置
|
|
6
|
+
"""
|
|
7
|
+
from typing import Dict, Any
|
|
8
|
+
|
|
9
|
+
from crawlo.utils.queue_helper import QueueHelper
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class LargeScaleConfig:
|
|
13
|
+
"""大规模爬虫配置类"""
|
|
14
|
+
|
|
15
|
+
@staticmethod
|
|
16
|
+
def conservative_config(concurrency: int = 8) -> Dict[str, Any]:
|
|
17
|
+
"""
|
|
18
|
+
保守配置 - 适用于资源有限的环境
|
|
19
|
+
|
|
20
|
+
特点:
|
|
21
|
+
- 较小的队列容量
|
|
22
|
+
- 较低的并发数
|
|
23
|
+
- 较长的延迟
|
|
24
|
+
"""
|
|
25
|
+
config = QueueHelper.use_redis_queue(
|
|
26
|
+
queue_name="crawlo:conservative",
|
|
27
|
+
max_retries=3,
|
|
28
|
+
timeout=300
|
|
29
|
+
)
|
|
30
|
+
|
|
31
|
+
config.update({
|
|
32
|
+
# 并发控制
|
|
33
|
+
'CONCURRENCY': concurrency,
|
|
34
|
+
'SCHEDULER_MAX_QUEUE_SIZE': concurrency * 10, # 队列容量为并发数的10倍
|
|
35
|
+
'MAX_RUNNING_SPIDERS': 1,
|
|
36
|
+
|
|
37
|
+
# 请求控制
|
|
38
|
+
'DOWNLOAD_DELAY': 0.2,
|
|
39
|
+
'RANDOMNESS': True,
|
|
40
|
+
'RANDOM_RANGE': (0.8, 1.5),
|
|
41
|
+
|
|
42
|
+
# 内存控制
|
|
43
|
+
'DOWNLOAD_MAXSIZE': 5 * 1024 * 1024, # 5MB
|
|
44
|
+
'CONNECTION_POOL_LIMIT': concurrency * 2,
|
|
45
|
+
|
|
46
|
+
# 重试策略
|
|
47
|
+
'MAX_RETRY_TIMES': 2,
|
|
48
|
+
|
|
49
|
+
# 使用增强引擎
|
|
50
|
+
'ENGINE_CLASS': 'crawlo.core.enhanced_engine.EnhancedEngine'
|
|
51
|
+
})
|
|
52
|
+
|
|
53
|
+
return config
|
|
54
|
+
|
|
55
|
+
@staticmethod
|
|
56
|
+
def balanced_config(concurrency: int = 16) -> Dict[str, Any]:
|
|
57
|
+
"""
|
|
58
|
+
平衡配置 - 适用于一般生产环境
|
|
59
|
+
|
|
60
|
+
特点:
|
|
61
|
+
- 中等的队列容量
|
|
62
|
+
- 平衡的并发数
|
|
63
|
+
- 适中的延迟
|
|
64
|
+
"""
|
|
65
|
+
config = QueueHelper.use_redis_queue(
|
|
66
|
+
queue_name="crawlo:balanced",
|
|
67
|
+
max_retries=5,
|
|
68
|
+
timeout=600
|
|
69
|
+
)
|
|
70
|
+
|
|
71
|
+
config.update({
|
|
72
|
+
# 并发控制
|
|
73
|
+
'CONCURRENCY': concurrency,
|
|
74
|
+
'SCHEDULER_MAX_QUEUE_SIZE': concurrency * 15,
|
|
75
|
+
'MAX_RUNNING_SPIDERS': 2,
|
|
76
|
+
|
|
77
|
+
# 请求控制
|
|
78
|
+
'DOWNLOAD_DELAY': 0.1,
|
|
79
|
+
'RANDOMNESS': True,
|
|
80
|
+
'RANDOM_RANGE': (0.5, 1.2),
|
|
81
|
+
|
|
82
|
+
# 内存控制
|
|
83
|
+
'DOWNLOAD_MAXSIZE': 10 * 1024 * 1024, # 10MB
|
|
84
|
+
'CONNECTION_POOL_LIMIT': concurrency * 3,
|
|
85
|
+
|
|
86
|
+
# 重试策略
|
|
87
|
+
'MAX_RETRY_TIMES': 3,
|
|
88
|
+
|
|
89
|
+
# 使用增强引擎
|
|
90
|
+
'ENGINE_CLASS': 'crawlo.core.enhanced_engine.EnhancedEngine'
|
|
91
|
+
})
|
|
92
|
+
|
|
93
|
+
return config
|
|
94
|
+
|
|
95
|
+
@staticmethod
|
|
96
|
+
def aggressive_config(concurrency: int = 32) -> Dict[str, Any]:
|
|
97
|
+
"""
|
|
98
|
+
激进配置 - 适用于高性能环境
|
|
99
|
+
|
|
100
|
+
特点:
|
|
101
|
+
- 大的队列容量
|
|
102
|
+
- 高并发数
|
|
103
|
+
- 较短的延迟
|
|
104
|
+
"""
|
|
105
|
+
config = QueueHelper.use_redis_queue(
|
|
106
|
+
queue_name="crawlo:aggressive",
|
|
107
|
+
max_retries=10,
|
|
108
|
+
timeout=900
|
|
109
|
+
)
|
|
110
|
+
|
|
111
|
+
config.update({
|
|
112
|
+
# 并发控制
|
|
113
|
+
'CONCURRENCY': concurrency,
|
|
114
|
+
'SCHEDULER_MAX_QUEUE_SIZE': concurrency * 20,
|
|
115
|
+
'MAX_RUNNING_SPIDERS': 3,
|
|
116
|
+
|
|
117
|
+
# 请求控制
|
|
118
|
+
'DOWNLOAD_DELAY': 0.05,
|
|
119
|
+
'RANDOMNESS': True,
|
|
120
|
+
'RANDOM_RANGE': (0.3, 1.0),
|
|
121
|
+
|
|
122
|
+
# 内存控制
|
|
123
|
+
'DOWNLOAD_MAXSIZE': 20 * 1024 * 1024, # 20MB
|
|
124
|
+
'CONNECTION_POOL_LIMIT': concurrency * 4,
|
|
125
|
+
|
|
126
|
+
# 重试策略
|
|
127
|
+
'MAX_RETRY_TIMES': 5,
|
|
128
|
+
|
|
129
|
+
# 使用增强引擎
|
|
130
|
+
'ENGINE_CLASS': 'crawlo.core.enhanced_engine.EnhancedEngine'
|
|
131
|
+
})
|
|
132
|
+
|
|
133
|
+
return config
|
|
134
|
+
|
|
135
|
+
@staticmethod
|
|
136
|
+
def memory_optimized_config(concurrency: int = 12) -> Dict[str, Any]:
|
|
137
|
+
"""
|
|
138
|
+
内存优化配置 - 适用于大规模但内存受限的场景
|
|
139
|
+
|
|
140
|
+
特点:
|
|
141
|
+
- 小队列,快速流转
|
|
142
|
+
- 严格的内存控制
|
|
143
|
+
- 使用Redis减少内存压力
|
|
144
|
+
"""
|
|
145
|
+
config = QueueHelper.use_redis_queue(
|
|
146
|
+
queue_name="crawlo:memory_optimized",
|
|
147
|
+
max_retries=3,
|
|
148
|
+
timeout=300
|
|
149
|
+
)
|
|
150
|
+
|
|
151
|
+
config.update({
|
|
152
|
+
# 并发控制
|
|
153
|
+
'CONCURRENCY': concurrency,
|
|
154
|
+
'SCHEDULER_MAX_QUEUE_SIZE': concurrency * 5, # 小队列
|
|
155
|
+
'MAX_RUNNING_SPIDERS': 1,
|
|
156
|
+
|
|
157
|
+
# 请求控制
|
|
158
|
+
'DOWNLOAD_DELAY': 0.1,
|
|
159
|
+
'RANDOMNESS': False, # 减少随机性降低内存使用
|
|
160
|
+
|
|
161
|
+
# 严格的内存控制
|
|
162
|
+
'DOWNLOAD_MAXSIZE': 2 * 1024 * 1024, # 2MB
|
|
163
|
+
'DOWNLOAD_WARN_SIZE': 512 * 1024, # 512KB
|
|
164
|
+
'CONNECTION_POOL_LIMIT': concurrency,
|
|
165
|
+
|
|
166
|
+
# 重试策略
|
|
167
|
+
'MAX_RETRY_TIMES': 2,
|
|
168
|
+
|
|
169
|
+
# 使用增强引擎
|
|
170
|
+
'ENGINE_CLASS': 'crawlo.core.enhanced_engine.EnhancedEngine'
|
|
171
|
+
})
|
|
172
|
+
|
|
173
|
+
return config
|
|
174
|
+
|
|
175
|
+
|
|
176
|
+
def apply_large_scale_config(settings_dict: Dict[str, Any], config_type: str = "balanced", concurrency: int = None):
|
|
177
|
+
"""
|
|
178
|
+
应用大规模配置
|
|
179
|
+
|
|
180
|
+
Args:
|
|
181
|
+
settings_dict: 设置字典
|
|
182
|
+
config_type: 配置类型 ("conservative", "balanced", "aggressive", "memory_optimized")
|
|
183
|
+
concurrency: 并发数(可选,不指定则使用默认值)
|
|
184
|
+
"""
|
|
185
|
+
config_map = {
|
|
186
|
+
"conservative": LargeScaleConfig.conservative_config,
|
|
187
|
+
"balanced": LargeScaleConfig.balanced_config,
|
|
188
|
+
"aggressive": LargeScaleConfig.aggressive_config,
|
|
189
|
+
"memory_optimized": LargeScaleConfig.memory_optimized_config
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
if config_type not in config_map:
|
|
193
|
+
raise ValueError(f"不支持的配置类型: {config_type}")
|
|
194
|
+
|
|
195
|
+
if concurrency:
|
|
196
|
+
config = config_map[config_type](concurrency)
|
|
197
|
+
else:
|
|
198
|
+
config = config_map[config_type]()
|
|
199
|
+
|
|
200
|
+
settings_dict.update(config)
|
|
201
|
+
|
|
202
|
+
return config
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
# 使用示例和说明
|
|
206
|
+
USAGE_GUIDE = """
|
|
207
|
+
# 大规模爬虫配置使用指南
|
|
208
|
+
|
|
209
|
+
## 1. 选择合适的配置类型
|
|
210
|
+
|
|
211
|
+
### Conservative (保守型)
|
|
212
|
+
- 适用场景:资源受限、网络不稳定的环境
|
|
213
|
+
- 并发数:8 (默认)
|
|
214
|
+
- 队列容量:80
|
|
215
|
+
- 延迟:200ms
|
|
216
|
+
- 使用场景:个人开发、小规模爬取
|
|
217
|
+
|
|
218
|
+
### Balanced (平衡型)
|
|
219
|
+
- 适用场景:一般生产环境
|
|
220
|
+
- 并发数:16 (默认)
|
|
221
|
+
- 队列容量:240
|
|
222
|
+
- 延迟:100ms
|
|
223
|
+
- 使用场景:中小企业生产环境
|
|
224
|
+
|
|
225
|
+
### Aggressive (激进型)
|
|
226
|
+
- 适用场景:高性能服务器、对速度要求高
|
|
227
|
+
- 并发数:32 (默认)
|
|
228
|
+
- 队列容量:640
|
|
229
|
+
- 延迟:50ms
|
|
230
|
+
- 使用场景:大公司、高并发需求
|
|
231
|
+
|
|
232
|
+
### Memory Optimized (内存优化型)
|
|
233
|
+
- 适用场景:大规模爬取但内存受限
|
|
234
|
+
- 并发数:12 (默认)
|
|
235
|
+
- 队列容量:60 (小队列快速流转)
|
|
236
|
+
- 延迟:100ms
|
|
237
|
+
- 使用场景:处理数万/数十万请求但内存有限
|
|
238
|
+
|
|
239
|
+
## 2. 使用方法
|
|
240
|
+
|
|
241
|
+
```python
|
|
242
|
+
# 方法1:在 settings.py 中直接配置
|
|
243
|
+
from crawlo.utils.large_scale_config import apply_large_scale_config
|
|
244
|
+
|
|
245
|
+
# 使用平衡配置,16并发
|
|
246
|
+
apply_large_scale_config(locals(), "balanced", 16)
|
|
247
|
+
|
|
248
|
+
# 方法2:在爬虫代码中动态配置
|
|
249
|
+
from crawlo.crawler import CrawlerProcess
|
|
250
|
+
from crawlo.utils.large_scale_config import LargeScaleConfig
|
|
251
|
+
|
|
252
|
+
process = CrawlerProcess()
|
|
253
|
+
config = LargeScaleConfig.memory_optimized_config(20) # 20并发的内存优化配置
|
|
254
|
+
process.settings.update(config)
|
|
255
|
+
|
|
256
|
+
# 方法3:自定义配置
|
|
257
|
+
config = LargeScaleConfig.balanced_config(24) # 24并发
|
|
258
|
+
config['DOWNLOAD_DELAY'] = 0.05 # 自定义延迟
|
|
259
|
+
process.settings.update(config)
|
|
260
|
+
```
|
|
261
|
+
|
|
262
|
+
## 3. 针对不同场景的建议
|
|
263
|
+
|
|
264
|
+
### 处理5万+请求
|
|
265
|
+
```python
|
|
266
|
+
# 推荐内存优化配置
|
|
267
|
+
apply_large_scale_config(locals(), "memory_optimized", 20)
|
|
268
|
+
```
|
|
269
|
+
|
|
270
|
+
### 高速爬取但服务器性能好
|
|
271
|
+
```python
|
|
272
|
+
# 推荐激进配置
|
|
273
|
+
apply_large_scale_config(locals(), "aggressive", 40)
|
|
274
|
+
```
|
|
275
|
+
|
|
276
|
+
### 资源受限但要稳定运行
|
|
277
|
+
```python
|
|
278
|
+
# 推荐保守配置
|
|
279
|
+
apply_large_scale_config(locals(), "conservative", 6)
|
|
280
|
+
```
|
|
281
|
+
|
|
282
|
+
### 平衡性能和稳定性
|
|
283
|
+
```python
|
|
284
|
+
# 推荐平衡配置
|
|
285
|
+
apply_large_scale_config(locals(), "balanced", 18)
|
|
286
|
+
```
|
|
287
287
|
"""
|