crawlo 1.1.2__py3-none-any.whl → 1.1.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (113) hide show
  1. crawlo/__init__.py +34 -34
  2. crawlo/__version__.py +1 -1
  3. crawlo/cli.py +40 -40
  4. crawlo/commands/__init__.py +13 -13
  5. crawlo/commands/check.py +594 -594
  6. crawlo/commands/genspider.py +151 -151
  7. crawlo/commands/list.py +155 -155
  8. crawlo/commands/run.py +285 -285
  9. crawlo/commands/startproject.py +196 -196
  10. crawlo/commands/stats.py +188 -188
  11. crawlo/commands/utils.py +186 -186
  12. crawlo/config.py +279 -279
  13. crawlo/core/__init__.py +2 -2
  14. crawlo/core/engine.py +171 -171
  15. crawlo/core/enhanced_engine.py +189 -189
  16. crawlo/core/processor.py +40 -40
  17. crawlo/core/scheduler.py +166 -162
  18. crawlo/crawler.py +1027 -1027
  19. crawlo/downloader/__init__.py +242 -242
  20. crawlo/downloader/aiohttp_downloader.py +212 -212
  21. crawlo/downloader/cffi_downloader.py +251 -251
  22. crawlo/downloader/httpx_downloader.py +259 -257
  23. crawlo/event.py +11 -11
  24. crawlo/exceptions.py +82 -78
  25. crawlo/extension/__init__.py +31 -31
  26. crawlo/extension/log_interval.py +49 -49
  27. crawlo/extension/log_stats.py +44 -44
  28. crawlo/extension/logging_extension.py +34 -34
  29. crawlo/filters/__init__.py +154 -154
  30. crawlo/filters/aioredis_filter.py +242 -242
  31. crawlo/filters/memory_filter.py +269 -269
  32. crawlo/items/__init__.py +23 -23
  33. crawlo/items/base.py +21 -21
  34. crawlo/items/fields.py +53 -53
  35. crawlo/items/items.py +104 -104
  36. crawlo/middleware/__init__.py +21 -21
  37. crawlo/middleware/default_header.py +32 -32
  38. crawlo/middleware/download_delay.py +28 -28
  39. crawlo/middleware/middleware_manager.py +135 -135
  40. crawlo/middleware/proxy.py +248 -248
  41. crawlo/middleware/request_ignore.py +30 -30
  42. crawlo/middleware/response_code.py +18 -18
  43. crawlo/middleware/response_filter.py +26 -26
  44. crawlo/middleware/retry.py +125 -125
  45. crawlo/mode_manager.py +200 -200
  46. crawlo/network/__init__.py +21 -21
  47. crawlo/network/request.py +311 -311
  48. crawlo/network/response.py +271 -269
  49. crawlo/pipelines/__init__.py +22 -13
  50. crawlo/pipelines/bloom_dedup_pipeline.py +157 -0
  51. crawlo/pipelines/console_pipeline.py +39 -39
  52. crawlo/pipelines/csv_pipeline.py +316 -316
  53. crawlo/pipelines/database_dedup_pipeline.py +225 -0
  54. crawlo/pipelines/json_pipeline.py +218 -218
  55. crawlo/pipelines/memory_dedup_pipeline.py +116 -0
  56. crawlo/pipelines/mongo_pipeline.py +116 -116
  57. crawlo/pipelines/mysql_pipeline.py +195 -195
  58. crawlo/pipelines/pipeline_manager.py +56 -56
  59. crawlo/pipelines/redis_dedup_pipeline.py +163 -0
  60. crawlo/project.py +153 -153
  61. crawlo/queue/pqueue.py +37 -37
  62. crawlo/queue/queue_manager.py +307 -303
  63. crawlo/queue/redis_priority_queue.py +208 -191
  64. crawlo/settings/__init__.py +7 -7
  65. crawlo/settings/default_settings.py +245 -226
  66. crawlo/settings/setting_manager.py +99 -99
  67. crawlo/spider/__init__.py +639 -639
  68. crawlo/stats_collector.py +59 -59
  69. crawlo/subscriber.py +106 -106
  70. crawlo/task_manager.py +30 -30
  71. crawlo/templates/crawlo.cfg.tmpl +10 -10
  72. crawlo/templates/project/__init__.py.tmpl +3 -3
  73. crawlo/templates/project/items.py.tmpl +17 -17
  74. crawlo/templates/project/middlewares.py.tmpl +86 -86
  75. crawlo/templates/project/pipelines.py.tmpl +341 -335
  76. crawlo/templates/project/run.py.tmpl +251 -238
  77. crawlo/templates/project/settings.py.tmpl +250 -247
  78. crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
  79. crawlo/templates/spider/spider.py.tmpl +177 -177
  80. crawlo/utils/__init__.py +7 -7
  81. crawlo/utils/controlled_spider_mixin.py +439 -335
  82. crawlo/utils/date_tools.py +233 -233
  83. crawlo/utils/db_helper.py +343 -343
  84. crawlo/utils/func_tools.py +82 -82
  85. crawlo/utils/large_scale_config.py +286 -286
  86. crawlo/utils/large_scale_helper.py +343 -343
  87. crawlo/utils/log.py +128 -128
  88. crawlo/utils/queue_helper.py +175 -175
  89. crawlo/utils/request.py +267 -267
  90. crawlo/utils/request_serializer.py +219 -219
  91. crawlo/utils/spider_loader.py +62 -62
  92. crawlo/utils/system.py +11 -11
  93. crawlo/utils/tools.py +4 -4
  94. crawlo/utils/url.py +39 -39
  95. {crawlo-1.1.2.dist-info → crawlo-1.1.3.dist-info}/METADATA +635 -567
  96. crawlo-1.1.3.dist-info/RECORD +113 -0
  97. examples/__init__.py +7 -7
  98. examples/controlled_spider_example.py +205 -0
  99. tests/__init__.py +7 -7
  100. tests/test_final_validation.py +153 -153
  101. tests/test_proxy_health_check.py +32 -32
  102. tests/test_proxy_middleware_integration.py +136 -136
  103. tests/test_proxy_providers.py +56 -56
  104. tests/test_proxy_stats.py +19 -19
  105. tests/test_proxy_strategies.py +59 -59
  106. tests/test_redis_config.py +28 -28
  107. tests/test_redis_queue.py +224 -224
  108. tests/test_request_serialization.py +70 -70
  109. tests/test_scheduler.py +241 -241
  110. crawlo-1.1.2.dist-info/RECORD +0 -108
  111. {crawlo-1.1.2.dist-info → crawlo-1.1.3.dist-info}/WHEEL +0 -0
  112. {crawlo-1.1.2.dist-info → crawlo-1.1.3.dist-info}/entry_points.txt +0 -0
  113. {crawlo-1.1.2.dist-info → crawlo-1.1.3.dist-info}/top_level.txt +0 -0
@@ -1,287 +1,287 @@
1
- #!/usr/bin/env python3
2
- # -*- coding: utf-8 -*-
3
- """
4
- 大规模爬虫配置助手
5
- 提供针对上万请求场景的优化配置
6
- """
7
- from typing import Dict, Any
8
-
9
- from crawlo.utils.queue_helper import QueueHelper
10
-
11
-
12
- class LargeScaleConfig:
13
- """大规模爬虫配置类"""
14
-
15
- @staticmethod
16
- def conservative_config(concurrency: int = 8) -> Dict[str, Any]:
17
- """
18
- 保守配置 - 适用于资源有限的环境
19
-
20
- 特点:
21
- - 较小的队列容量
22
- - 较低的并发数
23
- - 较长的延迟
24
- """
25
- config = QueueHelper.use_redis_queue(
26
- queue_name="crawlo:conservative",
27
- max_retries=3,
28
- timeout=300
29
- )
30
-
31
- config.update({
32
- # 并发控制
33
- 'CONCURRENCY': concurrency,
34
- 'SCHEDULER_MAX_QUEUE_SIZE': concurrency * 10, # 队列容量为并发数的10倍
35
- 'MAX_RUNNING_SPIDERS': 1,
36
-
37
- # 请求控制
38
- 'DOWNLOAD_DELAY': 0.2,
39
- 'RANDOMNESS': True,
40
- 'RANDOM_RANGE': (0.8, 1.5),
41
-
42
- # 内存控制
43
- 'DOWNLOAD_MAXSIZE': 5 * 1024 * 1024, # 5MB
44
- 'CONNECTION_POOL_LIMIT': concurrency * 2,
45
-
46
- # 重试策略
47
- 'MAX_RETRY_TIMES': 2,
48
-
49
- # 使用增强引擎
50
- 'ENGINE_CLASS': 'crawlo.core.enhanced_engine.EnhancedEngine'
51
- })
52
-
53
- return config
54
-
55
- @staticmethod
56
- def balanced_config(concurrency: int = 16) -> Dict[str, Any]:
57
- """
58
- 平衡配置 - 适用于一般生产环境
59
-
60
- 特点:
61
- - 中等的队列容量
62
- - 平衡的并发数
63
- - 适中的延迟
64
- """
65
- config = QueueHelper.use_redis_queue(
66
- queue_name="crawlo:balanced",
67
- max_retries=5,
68
- timeout=600
69
- )
70
-
71
- config.update({
72
- # 并发控制
73
- 'CONCURRENCY': concurrency,
74
- 'SCHEDULER_MAX_QUEUE_SIZE': concurrency * 15,
75
- 'MAX_RUNNING_SPIDERS': 2,
76
-
77
- # 请求控制
78
- 'DOWNLOAD_DELAY': 0.1,
79
- 'RANDOMNESS': True,
80
- 'RANDOM_RANGE': (0.5, 1.2),
81
-
82
- # 内存控制
83
- 'DOWNLOAD_MAXSIZE': 10 * 1024 * 1024, # 10MB
84
- 'CONNECTION_POOL_LIMIT': concurrency * 3,
85
-
86
- # 重试策略
87
- 'MAX_RETRY_TIMES': 3,
88
-
89
- # 使用增强引擎
90
- 'ENGINE_CLASS': 'crawlo.core.enhanced_engine.EnhancedEngine'
91
- })
92
-
93
- return config
94
-
95
- @staticmethod
96
- def aggressive_config(concurrency: int = 32) -> Dict[str, Any]:
97
- """
98
- 激进配置 - 适用于高性能环境
99
-
100
- 特点:
101
- - 大的队列容量
102
- - 高并发数
103
- - 较短的延迟
104
- """
105
- config = QueueHelper.use_redis_queue(
106
- queue_name="crawlo:aggressive",
107
- max_retries=10,
108
- timeout=900
109
- )
110
-
111
- config.update({
112
- # 并发控制
113
- 'CONCURRENCY': concurrency,
114
- 'SCHEDULER_MAX_QUEUE_SIZE': concurrency * 20,
115
- 'MAX_RUNNING_SPIDERS': 3,
116
-
117
- # 请求控制
118
- 'DOWNLOAD_DELAY': 0.05,
119
- 'RANDOMNESS': True,
120
- 'RANDOM_RANGE': (0.3, 1.0),
121
-
122
- # 内存控制
123
- 'DOWNLOAD_MAXSIZE': 20 * 1024 * 1024, # 20MB
124
- 'CONNECTION_POOL_LIMIT': concurrency * 4,
125
-
126
- # 重试策略
127
- 'MAX_RETRY_TIMES': 5,
128
-
129
- # 使用增强引擎
130
- 'ENGINE_CLASS': 'crawlo.core.enhanced_engine.EnhancedEngine'
131
- })
132
-
133
- return config
134
-
135
- @staticmethod
136
- def memory_optimized_config(concurrency: int = 12) -> Dict[str, Any]:
137
- """
138
- 内存优化配置 - 适用于大规模但内存受限的场景
139
-
140
- 特点:
141
- - 小队列,快速流转
142
- - 严格的内存控制
143
- - 使用Redis减少内存压力
144
- """
145
- config = QueueHelper.use_redis_queue(
146
- queue_name="crawlo:memory_optimized",
147
- max_retries=3,
148
- timeout=300
149
- )
150
-
151
- config.update({
152
- # 并发控制
153
- 'CONCURRENCY': concurrency,
154
- 'SCHEDULER_MAX_QUEUE_SIZE': concurrency * 5, # 小队列
155
- 'MAX_RUNNING_SPIDERS': 1,
156
-
157
- # 请求控制
158
- 'DOWNLOAD_DELAY': 0.1,
159
- 'RANDOMNESS': False, # 减少随机性降低内存使用
160
-
161
- # 严格的内存控制
162
- 'DOWNLOAD_MAXSIZE': 2 * 1024 * 1024, # 2MB
163
- 'DOWNLOAD_WARN_SIZE': 512 * 1024, # 512KB
164
- 'CONNECTION_POOL_LIMIT': concurrency,
165
-
166
- # 重试策略
167
- 'MAX_RETRY_TIMES': 2,
168
-
169
- # 使用增强引擎
170
- 'ENGINE_CLASS': 'crawlo.core.enhanced_engine.EnhancedEngine'
171
- })
172
-
173
- return config
174
-
175
-
176
- def apply_large_scale_config(settings_dict: Dict[str, Any], config_type: str = "balanced", concurrency: int = None):
177
- """
178
- 应用大规模配置
179
-
180
- Args:
181
- settings_dict: 设置字典
182
- config_type: 配置类型 ("conservative", "balanced", "aggressive", "memory_optimized")
183
- concurrency: 并发数(可选,不指定则使用默认值)
184
- """
185
- config_map = {
186
- "conservative": LargeScaleConfig.conservative_config,
187
- "balanced": LargeScaleConfig.balanced_config,
188
- "aggressive": LargeScaleConfig.aggressive_config,
189
- "memory_optimized": LargeScaleConfig.memory_optimized_config
190
- }
191
-
192
- if config_type not in config_map:
193
- raise ValueError(f"不支持的配置类型: {config_type}")
194
-
195
- if concurrency:
196
- config = config_map[config_type](concurrency)
197
- else:
198
- config = config_map[config_type]()
199
-
200
- settings_dict.update(config)
201
-
202
- return config
203
-
204
-
205
- # 使用示例和说明
206
- USAGE_GUIDE = """
207
- # 大规模爬虫配置使用指南
208
-
209
- ## 1. 选择合适的配置类型
210
-
211
- ### Conservative (保守型)
212
- - 适用场景:资源受限、网络不稳定的环境
213
- - 并发数:8 (默认)
214
- - 队列容量:80
215
- - 延迟:200ms
216
- - 使用场景:个人开发、小规模爬取
217
-
218
- ### Balanced (平衡型)
219
- - 适用场景:一般生产环境
220
- - 并发数:16 (默认)
221
- - 队列容量:240
222
- - 延迟:100ms
223
- - 使用场景:中小企业生产环境
224
-
225
- ### Aggressive (激进型)
226
- - 适用场景:高性能服务器、对速度要求高
227
- - 并发数:32 (默认)
228
- - 队列容量:640
229
- - 延迟:50ms
230
- - 使用场景:大公司、高并发需求
231
-
232
- ### Memory Optimized (内存优化型)
233
- - 适用场景:大规模爬取但内存受限
234
- - 并发数:12 (默认)
235
- - 队列容量:60 (小队列快速流转)
236
- - 延迟:100ms
237
- - 使用场景:处理数万/数十万请求但内存有限
238
-
239
- ## 2. 使用方法
240
-
241
- ```python
242
- # 方法1:在 settings.py 中直接配置
243
- from crawlo.utils.large_scale_config import apply_large_scale_config
244
-
245
- # 使用平衡配置,16并发
246
- apply_large_scale_config(locals(), "balanced", 16)
247
-
248
- # 方法2:在爬虫代码中动态配置
249
- from crawlo.crawler import CrawlerProcess
250
- from crawlo.utils.large_scale_config import LargeScaleConfig
251
-
252
- process = CrawlerProcess()
253
- config = LargeScaleConfig.memory_optimized_config(20) # 20并发的内存优化配置
254
- process.settings.update(config)
255
-
256
- # 方法3:自定义配置
257
- config = LargeScaleConfig.balanced_config(24) # 24并发
258
- config['DOWNLOAD_DELAY'] = 0.05 # 自定义延迟
259
- process.settings.update(config)
260
- ```
261
-
262
- ## 3. 针对不同场景的建议
263
-
264
- ### 处理5万+请求
265
- ```python
266
- # 推荐内存优化配置
267
- apply_large_scale_config(locals(), "memory_optimized", 20)
268
- ```
269
-
270
- ### 高速爬取但服务器性能好
271
- ```python
272
- # 推荐激进配置
273
- apply_large_scale_config(locals(), "aggressive", 40)
274
- ```
275
-
276
- ### 资源受限但要稳定运行
277
- ```python
278
- # 推荐保守配置
279
- apply_large_scale_config(locals(), "conservative", 6)
280
- ```
281
-
282
- ### 平衡性能和稳定性
283
- ```python
284
- # 推荐平衡配置
285
- apply_large_scale_config(locals(), "balanced", 18)
286
- ```
1
+ #!/usr/bin/env python3
2
+ # -*- coding: utf-8 -*-
3
+ """
4
+ 大规模爬虫配置助手
5
+ 提供针对上万请求场景的优化配置
6
+ """
7
+ from typing import Dict, Any
8
+
9
+ from crawlo.utils.queue_helper import QueueHelper
10
+
11
+
12
+ class LargeScaleConfig:
13
+ """大规模爬虫配置类"""
14
+
15
+ @staticmethod
16
+ def conservative_config(concurrency: int = 8) -> Dict[str, Any]:
17
+ """
18
+ 保守配置 - 适用于资源有限的环境
19
+
20
+ 特点:
21
+ - 较小的队列容量
22
+ - 较低的并发数
23
+ - 较长的延迟
24
+ """
25
+ config = QueueHelper.use_redis_queue(
26
+ queue_name="crawlo:conservative",
27
+ max_retries=3,
28
+ timeout=300
29
+ )
30
+
31
+ config.update({
32
+ # 并发控制
33
+ 'CONCURRENCY': concurrency,
34
+ 'SCHEDULER_MAX_QUEUE_SIZE': concurrency * 10, # 队列容量为并发数的10倍
35
+ 'MAX_RUNNING_SPIDERS': 1,
36
+
37
+ # 请求控制
38
+ 'DOWNLOAD_DELAY': 0.2,
39
+ 'RANDOMNESS': True,
40
+ 'RANDOM_RANGE': (0.8, 1.5),
41
+
42
+ # 内存控制
43
+ 'DOWNLOAD_MAXSIZE': 5 * 1024 * 1024, # 5MB
44
+ 'CONNECTION_POOL_LIMIT': concurrency * 2,
45
+
46
+ # 重试策略
47
+ 'MAX_RETRY_TIMES': 2,
48
+
49
+ # 使用增强引擎
50
+ 'ENGINE_CLASS': 'crawlo.core.enhanced_engine.EnhancedEngine'
51
+ })
52
+
53
+ return config
54
+
55
+ @staticmethod
56
+ def balanced_config(concurrency: int = 16) -> Dict[str, Any]:
57
+ """
58
+ 平衡配置 - 适用于一般生产环境
59
+
60
+ 特点:
61
+ - 中等的队列容量
62
+ - 平衡的并发数
63
+ - 适中的延迟
64
+ """
65
+ config = QueueHelper.use_redis_queue(
66
+ queue_name="crawlo:balanced",
67
+ max_retries=5,
68
+ timeout=600
69
+ )
70
+
71
+ config.update({
72
+ # 并发控制
73
+ 'CONCURRENCY': concurrency,
74
+ 'SCHEDULER_MAX_QUEUE_SIZE': concurrency * 15,
75
+ 'MAX_RUNNING_SPIDERS': 2,
76
+
77
+ # 请求控制
78
+ 'DOWNLOAD_DELAY': 0.1,
79
+ 'RANDOMNESS': True,
80
+ 'RANDOM_RANGE': (0.5, 1.2),
81
+
82
+ # 内存控制
83
+ 'DOWNLOAD_MAXSIZE': 10 * 1024 * 1024, # 10MB
84
+ 'CONNECTION_POOL_LIMIT': concurrency * 3,
85
+
86
+ # 重试策略
87
+ 'MAX_RETRY_TIMES': 3,
88
+
89
+ # 使用增强引擎
90
+ 'ENGINE_CLASS': 'crawlo.core.enhanced_engine.EnhancedEngine'
91
+ })
92
+
93
+ return config
94
+
95
+ @staticmethod
96
+ def aggressive_config(concurrency: int = 32) -> Dict[str, Any]:
97
+ """
98
+ 激进配置 - 适用于高性能环境
99
+
100
+ 特点:
101
+ - 大的队列容量
102
+ - 高并发数
103
+ - 较短的延迟
104
+ """
105
+ config = QueueHelper.use_redis_queue(
106
+ queue_name="crawlo:aggressive",
107
+ max_retries=10,
108
+ timeout=900
109
+ )
110
+
111
+ config.update({
112
+ # 并发控制
113
+ 'CONCURRENCY': concurrency,
114
+ 'SCHEDULER_MAX_QUEUE_SIZE': concurrency * 20,
115
+ 'MAX_RUNNING_SPIDERS': 3,
116
+
117
+ # 请求控制
118
+ 'DOWNLOAD_DELAY': 0.05,
119
+ 'RANDOMNESS': True,
120
+ 'RANDOM_RANGE': (0.3, 1.0),
121
+
122
+ # 内存控制
123
+ 'DOWNLOAD_MAXSIZE': 20 * 1024 * 1024, # 20MB
124
+ 'CONNECTION_POOL_LIMIT': concurrency * 4,
125
+
126
+ # 重试策略
127
+ 'MAX_RETRY_TIMES': 5,
128
+
129
+ # 使用增强引擎
130
+ 'ENGINE_CLASS': 'crawlo.core.enhanced_engine.EnhancedEngine'
131
+ })
132
+
133
+ return config
134
+
135
+ @staticmethod
136
+ def memory_optimized_config(concurrency: int = 12) -> Dict[str, Any]:
137
+ """
138
+ 内存优化配置 - 适用于大规模但内存受限的场景
139
+
140
+ 特点:
141
+ - 小队列,快速流转
142
+ - 严格的内存控制
143
+ - 使用Redis减少内存压力
144
+ """
145
+ config = QueueHelper.use_redis_queue(
146
+ queue_name="crawlo:memory_optimized",
147
+ max_retries=3,
148
+ timeout=300
149
+ )
150
+
151
+ config.update({
152
+ # 并发控制
153
+ 'CONCURRENCY': concurrency,
154
+ 'SCHEDULER_MAX_QUEUE_SIZE': concurrency * 5, # 小队列
155
+ 'MAX_RUNNING_SPIDERS': 1,
156
+
157
+ # 请求控制
158
+ 'DOWNLOAD_DELAY': 0.1,
159
+ 'RANDOMNESS': False, # 减少随机性降低内存使用
160
+
161
+ # 严格的内存控制
162
+ 'DOWNLOAD_MAXSIZE': 2 * 1024 * 1024, # 2MB
163
+ 'DOWNLOAD_WARN_SIZE': 512 * 1024, # 512KB
164
+ 'CONNECTION_POOL_LIMIT': concurrency,
165
+
166
+ # 重试策略
167
+ 'MAX_RETRY_TIMES': 2,
168
+
169
+ # 使用增强引擎
170
+ 'ENGINE_CLASS': 'crawlo.core.enhanced_engine.EnhancedEngine'
171
+ })
172
+
173
+ return config
174
+
175
+
176
+ def apply_large_scale_config(settings_dict: Dict[str, Any], config_type: str = "balanced", concurrency: int = None):
177
+ """
178
+ 应用大规模配置
179
+
180
+ Args:
181
+ settings_dict: 设置字典
182
+ config_type: 配置类型 ("conservative", "balanced", "aggressive", "memory_optimized")
183
+ concurrency: 并发数(可选,不指定则使用默认值)
184
+ """
185
+ config_map = {
186
+ "conservative": LargeScaleConfig.conservative_config,
187
+ "balanced": LargeScaleConfig.balanced_config,
188
+ "aggressive": LargeScaleConfig.aggressive_config,
189
+ "memory_optimized": LargeScaleConfig.memory_optimized_config
190
+ }
191
+
192
+ if config_type not in config_map:
193
+ raise ValueError(f"不支持的配置类型: {config_type}")
194
+
195
+ if concurrency:
196
+ config = config_map[config_type](concurrency)
197
+ else:
198
+ config = config_map[config_type]()
199
+
200
+ settings_dict.update(config)
201
+
202
+ return config
203
+
204
+
205
+ # 使用示例和说明
206
+ USAGE_GUIDE = """
207
+ # 大规模爬虫配置使用指南
208
+
209
+ ## 1. 选择合适的配置类型
210
+
211
+ ### Conservative (保守型)
212
+ - 适用场景:资源受限、网络不稳定的环境
213
+ - 并发数:8 (默认)
214
+ - 队列容量:80
215
+ - 延迟:200ms
216
+ - 使用场景:个人开发、小规模爬取
217
+
218
+ ### Balanced (平衡型)
219
+ - 适用场景:一般生产环境
220
+ - 并发数:16 (默认)
221
+ - 队列容量:240
222
+ - 延迟:100ms
223
+ - 使用场景:中小企业生产环境
224
+
225
+ ### Aggressive (激进型)
226
+ - 适用场景:高性能服务器、对速度要求高
227
+ - 并发数:32 (默认)
228
+ - 队列容量:640
229
+ - 延迟:50ms
230
+ - 使用场景:大公司、高并发需求
231
+
232
+ ### Memory Optimized (内存优化型)
233
+ - 适用场景:大规模爬取但内存受限
234
+ - 并发数:12 (默认)
235
+ - 队列容量:60 (小队列快速流转)
236
+ - 延迟:100ms
237
+ - 使用场景:处理数万/数十万请求但内存有限
238
+
239
+ ## 2. 使用方法
240
+
241
+ ```python
242
+ # 方法1:在 settings.py 中直接配置
243
+ from crawlo.utils.large_scale_config import apply_large_scale_config
244
+
245
+ # 使用平衡配置,16并发
246
+ apply_large_scale_config(locals(), "balanced", 16)
247
+
248
+ # 方法2:在爬虫代码中动态配置
249
+ from crawlo.crawler import CrawlerProcess
250
+ from crawlo.utils.large_scale_config import LargeScaleConfig
251
+
252
+ process = CrawlerProcess()
253
+ config = LargeScaleConfig.memory_optimized_config(20) # 20并发的内存优化配置
254
+ process.settings.update(config)
255
+
256
+ # 方法3:自定义配置
257
+ config = LargeScaleConfig.balanced_config(24) # 24并发
258
+ config['DOWNLOAD_DELAY'] = 0.05 # 自定义延迟
259
+ process.settings.update(config)
260
+ ```
261
+
262
+ ## 3. 针对不同场景的建议
263
+
264
+ ### 处理5万+请求
265
+ ```python
266
+ # 推荐内存优化配置
267
+ apply_large_scale_config(locals(), "memory_optimized", 20)
268
+ ```
269
+
270
+ ### 高速爬取但服务器性能好
271
+ ```python
272
+ # 推荐激进配置
273
+ apply_large_scale_config(locals(), "aggressive", 40)
274
+ ```
275
+
276
+ ### 资源受限但要稳定运行
277
+ ```python
278
+ # 推荐保守配置
279
+ apply_large_scale_config(locals(), "conservative", 6)
280
+ ```
281
+
282
+ ### 平衡性能和稳定性
283
+ ```python
284
+ # 推荐平衡配置
285
+ apply_large_scale_config(locals(), "balanced", 18)
286
+ ```
287
287
  """