crawlo 1.1.8__py3-none-any.whl → 1.1.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (190) hide show
  1. crawlo/__init__.py +61 -61
  2. crawlo/__version__.py +1 -1
  3. crawlo/cleaners/__init__.py +60 -60
  4. crawlo/cleaners/data_formatter.py +225 -225
  5. crawlo/cleaners/encoding_converter.py +125 -125
  6. crawlo/cleaners/text_cleaner.py +232 -232
  7. crawlo/cli.py +65 -65
  8. crawlo/commands/__init__.py +14 -14
  9. crawlo/commands/check.py +594 -594
  10. crawlo/commands/genspider.py +151 -151
  11. crawlo/commands/help.py +132 -132
  12. crawlo/commands/list.py +155 -155
  13. crawlo/commands/run.py +292 -292
  14. crawlo/commands/startproject.py +418 -418
  15. crawlo/commands/stats.py +188 -188
  16. crawlo/commands/utils.py +186 -186
  17. crawlo/config.py +312 -312
  18. crawlo/config_validator.py +252 -252
  19. crawlo/core/__init__.py +2 -2
  20. crawlo/core/engine.py +345 -345
  21. crawlo/core/processor.py +40 -40
  22. crawlo/core/scheduler.py +136 -136
  23. crawlo/crawler.py +1027 -1027
  24. crawlo/downloader/__init__.py +266 -266
  25. crawlo/downloader/aiohttp_downloader.py +220 -220
  26. crawlo/downloader/cffi_downloader.py +256 -256
  27. crawlo/downloader/httpx_downloader.py +259 -259
  28. crawlo/downloader/hybrid_downloader.py +213 -213
  29. crawlo/downloader/playwright_downloader.py +402 -402
  30. crawlo/downloader/selenium_downloader.py +472 -472
  31. crawlo/event.py +11 -11
  32. crawlo/exceptions.py +81 -81
  33. crawlo/extension/__init__.py +37 -37
  34. crawlo/extension/health_check.py +141 -141
  35. crawlo/extension/log_interval.py +57 -57
  36. crawlo/extension/log_stats.py +81 -81
  37. crawlo/extension/logging_extension.py +43 -43
  38. crawlo/extension/memory_monitor.py +104 -104
  39. crawlo/extension/performance_profiler.py +133 -133
  40. crawlo/extension/request_recorder.py +107 -107
  41. crawlo/filters/__init__.py +154 -154
  42. crawlo/filters/aioredis_filter.py +280 -280
  43. crawlo/filters/memory_filter.py +269 -269
  44. crawlo/items/__init__.py +23 -23
  45. crawlo/items/base.py +21 -21
  46. crawlo/items/fields.py +53 -53
  47. crawlo/items/items.py +104 -104
  48. crawlo/middleware/__init__.py +21 -21
  49. crawlo/middleware/default_header.py +32 -32
  50. crawlo/middleware/download_delay.py +28 -28
  51. crawlo/middleware/middleware_manager.py +135 -135
  52. crawlo/middleware/proxy.py +272 -272
  53. crawlo/middleware/request_ignore.py +30 -30
  54. crawlo/middleware/response_code.py +18 -18
  55. crawlo/middleware/response_filter.py +26 -26
  56. crawlo/middleware/retry.py +124 -124
  57. crawlo/mode_manager.py +211 -211
  58. crawlo/network/__init__.py +21 -21
  59. crawlo/network/request.py +338 -338
  60. crawlo/network/response.py +359 -359
  61. crawlo/pipelines/__init__.py +21 -21
  62. crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
  63. crawlo/pipelines/console_pipeline.py +39 -39
  64. crawlo/pipelines/csv_pipeline.py +316 -316
  65. crawlo/pipelines/database_dedup_pipeline.py +224 -224
  66. crawlo/pipelines/json_pipeline.py +218 -218
  67. crawlo/pipelines/memory_dedup_pipeline.py +115 -115
  68. crawlo/pipelines/mongo_pipeline.py +131 -131
  69. crawlo/pipelines/mysql_pipeline.py +316 -316
  70. crawlo/pipelines/pipeline_manager.py +61 -61
  71. crawlo/pipelines/redis_dedup_pipeline.py +167 -167
  72. crawlo/project.py +187 -187
  73. crawlo/queue/pqueue.py +37 -37
  74. crawlo/queue/queue_manager.py +334 -334
  75. crawlo/queue/redis_priority_queue.py +298 -298
  76. crawlo/settings/__init__.py +7 -7
  77. crawlo/settings/default_settings.py +219 -219
  78. crawlo/settings/setting_manager.py +122 -122
  79. crawlo/spider/__init__.py +639 -639
  80. crawlo/stats_collector.py +59 -59
  81. crawlo/subscriber.py +130 -130
  82. crawlo/task_manager.py +30 -30
  83. crawlo/templates/crawlo.cfg.tmpl +10 -10
  84. crawlo/templates/project/__init__.py.tmpl +3 -3
  85. crawlo/templates/project/items.py.tmpl +17 -17
  86. crawlo/templates/project/middlewares.py.tmpl +109 -109
  87. crawlo/templates/project/pipelines.py.tmpl +96 -96
  88. crawlo/templates/project/run.py.tmpl +45 -45
  89. crawlo/templates/project/settings.py.tmpl +326 -326
  90. crawlo/templates/project/settings_distributed.py.tmpl +119 -119
  91. crawlo/templates/project/settings_gentle.py.tmpl +94 -94
  92. crawlo/templates/project/settings_high_performance.py.tmpl +151 -151
  93. crawlo/templates/project/settings_simple.py.tmpl +68 -68
  94. crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
  95. crawlo/templates/spider/spider.py.tmpl +141 -141
  96. crawlo/tools/__init__.py +182 -182
  97. crawlo/tools/anti_crawler.py +268 -268
  98. crawlo/tools/authenticated_proxy.py +240 -240
  99. crawlo/tools/data_validator.py +180 -180
  100. crawlo/tools/date_tools.py +35 -35
  101. crawlo/tools/distributed_coordinator.py +386 -386
  102. crawlo/tools/retry_mechanism.py +220 -220
  103. crawlo/tools/scenario_adapter.py +262 -262
  104. crawlo/utils/__init__.py +35 -35
  105. crawlo/utils/batch_processor.py +260 -260
  106. crawlo/utils/controlled_spider_mixin.py +439 -439
  107. crawlo/utils/date_tools.py +290 -290
  108. crawlo/utils/db_helper.py +343 -343
  109. crawlo/utils/enhanced_error_handler.py +359 -359
  110. crawlo/utils/env_config.py +105 -105
  111. crawlo/utils/error_handler.py +125 -125
  112. crawlo/utils/func_tools.py +82 -82
  113. crawlo/utils/large_scale_config.py +286 -286
  114. crawlo/utils/large_scale_helper.py +343 -343
  115. crawlo/utils/log.py +128 -128
  116. crawlo/utils/performance_monitor.py +284 -284
  117. crawlo/utils/queue_helper.py +175 -175
  118. crawlo/utils/redis_connection_pool.py +334 -334
  119. crawlo/utils/redis_key_validator.py +199 -199
  120. crawlo/utils/request.py +267 -267
  121. crawlo/utils/request_serializer.py +219 -219
  122. crawlo/utils/spider_loader.py +62 -62
  123. crawlo/utils/system.py +11 -11
  124. crawlo/utils/tools.py +4 -4
  125. crawlo/utils/url.py +39 -39
  126. {crawlo-1.1.8.dist-info → crawlo-1.1.9.dist-info}/METADATA +626 -626
  127. crawlo-1.1.9.dist-info/RECORD +190 -0
  128. examples/__init__.py +7 -7
  129. tests/DOUBLE_CRAWLO_PREFIX_FIX_REPORT.md +81 -81
  130. tests/__init__.py +7 -7
  131. tests/advanced_tools_example.py +275 -275
  132. tests/authenticated_proxy_example.py +236 -236
  133. tests/cleaners_example.py +160 -160
  134. tests/config_validation_demo.py +102 -102
  135. tests/controlled_spider_example.py +205 -205
  136. tests/date_tools_example.py +180 -180
  137. tests/dynamic_loading_example.py +523 -523
  138. tests/dynamic_loading_test.py +104 -104
  139. tests/env_config_example.py +133 -133
  140. tests/error_handling_example.py +171 -171
  141. tests/redis_key_validation_demo.py +130 -130
  142. tests/response_improvements_example.py +144 -144
  143. tests/test_advanced_tools.py +148 -148
  144. tests/test_all_redis_key_configs.py +145 -145
  145. tests/test_authenticated_proxy.py +141 -141
  146. tests/test_cleaners.py +54 -54
  147. tests/test_comprehensive.py +146 -146
  148. tests/test_config_validator.py +193 -193
  149. tests/test_date_tools.py +123 -123
  150. tests/test_double_crawlo_fix.py +207 -207
  151. tests/test_double_crawlo_fix_simple.py +124 -124
  152. tests/test_dynamic_downloaders_proxy.py +124 -124
  153. tests/test_dynamic_proxy.py +92 -92
  154. tests/test_dynamic_proxy_config.py +146 -146
  155. tests/test_dynamic_proxy_real.py +109 -109
  156. tests/test_edge_cases.py +303 -303
  157. tests/test_enhanced_error_handler.py +270 -270
  158. tests/test_env_config.py +121 -121
  159. tests/test_error_handler_compatibility.py +112 -112
  160. tests/test_final_validation.py +153 -153
  161. tests/test_framework_env_usage.py +103 -103
  162. tests/test_integration.py +356 -356
  163. tests/test_item_dedup_redis_key.py +122 -122
  164. tests/test_parsel.py +29 -29
  165. tests/test_performance.py +327 -327
  166. tests/test_proxy_health_check.py +32 -32
  167. tests/test_proxy_middleware_integration.py +136 -136
  168. tests/test_proxy_providers.py +56 -56
  169. tests/test_proxy_stats.py +19 -19
  170. tests/test_proxy_strategies.py +59 -59
  171. tests/test_queue_manager_double_crawlo.py +230 -230
  172. tests/test_queue_manager_redis_key.py +176 -176
  173. tests/test_redis_config.py +28 -28
  174. tests/test_redis_connection_pool.py +294 -294
  175. tests/test_redis_key_naming.py +181 -181
  176. tests/test_redis_key_validator.py +123 -123
  177. tests/test_redis_queue.py +224 -224
  178. tests/test_request_serialization.py +70 -70
  179. tests/test_response_improvements.py +152 -152
  180. tests/test_scheduler.py +241 -241
  181. tests/test_simple_response.py +61 -61
  182. tests/test_telecom_spider_redis_key.py +205 -205
  183. tests/test_template_content.py +87 -87
  184. tests/test_template_redis_key.py +134 -134
  185. tests/test_tools.py +153 -153
  186. tests/tools_example.py +257 -257
  187. crawlo-1.1.8.dist-info/RECORD +0 -190
  188. {crawlo-1.1.8.dist-info → crawlo-1.1.9.dist-info}/WHEEL +0 -0
  189. {crawlo-1.1.8.dist-info → crawlo-1.1.9.dist-info}/entry_points.txt +0 -0
  190. {crawlo-1.1.8.dist-info → crawlo-1.1.9.dist-info}/top_level.txt +0 -0
@@ -1,287 +1,287 @@
1
- #!/usr/bin/env python3
2
- # -*- coding: utf-8 -*-
3
- """
4
- 大规模爬虫配置助手
5
- 提供针对上万请求场景的优化配置
6
- """
7
- from typing import Dict, Any
8
-
9
- from crawlo.utils.queue_helper import QueueHelper
10
-
11
-
12
- class LargeScaleConfig:
13
- """大规模爬虫配置类"""
14
-
15
- @staticmethod
16
- def conservative_config(concurrency: int = 8) -> Dict[str, Any]:
17
- """
18
- 保守配置 - 适用于资源有限的环境
19
-
20
- 特点:
21
- - 较小的队列容量
22
- - 较低的并发数
23
- - 较长的延迟
24
- """
25
- config = QueueHelper.use_redis_queue(
26
- queue_name="crawlo:conservative",
27
- max_retries=3,
28
- timeout=300
29
- )
30
-
31
- config.update({
32
- # 并发控制
33
- 'CONCURRENCY': concurrency,
34
- 'SCHEDULER_MAX_QUEUE_SIZE': concurrency * 10, # 队列容量为并发数的10倍
35
- 'MAX_RUNNING_SPIDERS': 1,
36
-
37
- # 请求控制
38
- 'DOWNLOAD_DELAY': 0.2,
39
- 'RANDOMNESS': True,
40
- 'RANDOM_RANGE': (0.8, 1.5),
41
-
42
- # 内存控制
43
- 'DOWNLOAD_MAXSIZE': 5 * 1024 * 1024, # 5MB
44
- 'CONNECTION_POOL_LIMIT': concurrency * 2,
45
-
46
- # 重试策略
47
- 'MAX_RETRY_TIMES': 2,
48
-
49
- # 使用增强引擎
50
- 'ENGINE_CLASS': 'crawlo.core.enhanced_engine.EnhancedEngine'
51
- })
52
-
53
- return config
54
-
55
- @staticmethod
56
- def balanced_config(concurrency: int = 16) -> Dict[str, Any]:
57
- """
58
- 平衡配置 - 适用于一般生产环境
59
-
60
- 特点:
61
- - 中等的队列容量
62
- - 平衡的并发数
63
- - 适中的延迟
64
- """
65
- config = QueueHelper.use_redis_queue(
66
- queue_name="crawlo:balanced",
67
- max_retries=5,
68
- timeout=600
69
- )
70
-
71
- config.update({
72
- # 并发控制
73
- 'CONCURRENCY': concurrency,
74
- 'SCHEDULER_MAX_QUEUE_SIZE': concurrency * 15,
75
- 'MAX_RUNNING_SPIDERS': 2,
76
-
77
- # 请求控制
78
- 'DOWNLOAD_DELAY': 0.1,
79
- 'RANDOMNESS': True,
80
- 'RANDOM_RANGE': (0.5, 1.2),
81
-
82
- # 内存控制
83
- 'DOWNLOAD_MAXSIZE': 10 * 1024 * 1024, # 10MB
84
- 'CONNECTION_POOL_LIMIT': concurrency * 3,
85
-
86
- # 重试策略
87
- 'MAX_RETRY_TIMES': 3,
88
-
89
- # 使用增强引擎
90
- 'ENGINE_CLASS': 'crawlo.core.enhanced_engine.EnhancedEngine'
91
- })
92
-
93
- return config
94
-
95
- @staticmethod
96
- def aggressive_config(concurrency: int = 32) -> Dict[str, Any]:
97
- """
98
- 激进配置 - 适用于高性能环境
99
-
100
- 特点:
101
- - 大的队列容量
102
- - 高并发数
103
- - 较短的延迟
104
- """
105
- config = QueueHelper.use_redis_queue(
106
- queue_name="crawlo:aggressive",
107
- max_retries=10,
108
- timeout=900
109
- )
110
-
111
- config.update({
112
- # 并发控制
113
- 'CONCURRENCY': concurrency,
114
- 'SCHEDULER_MAX_QUEUE_SIZE': concurrency * 20,
115
- 'MAX_RUNNING_SPIDERS': 3,
116
-
117
- # 请求控制
118
- 'DOWNLOAD_DELAY': 0.05,
119
- 'RANDOMNESS': True,
120
- 'RANDOM_RANGE': (0.3, 1.0),
121
-
122
- # 内存控制
123
- 'DOWNLOAD_MAXSIZE': 20 * 1024 * 1024, # 20MB
124
- 'CONNECTION_POOL_LIMIT': concurrency * 4,
125
-
126
- # 重试策略
127
- 'MAX_RETRY_TIMES': 5,
128
-
129
- # 使用增强引擎
130
- 'ENGINE_CLASS': 'crawlo.core.enhanced_engine.EnhancedEngine'
131
- })
132
-
133
- return config
134
-
135
- @staticmethod
136
- def memory_optimized_config(concurrency: int = 12) -> Dict[str, Any]:
137
- """
138
- 内存优化配置 - 适用于大规模但内存受限的场景
139
-
140
- 特点:
141
- - 小队列,快速流转
142
- - 严格的内存控制
143
- - 使用Redis减少内存压力
144
- """
145
- config = QueueHelper.use_redis_queue(
146
- queue_name="crawlo:memory_optimized",
147
- max_retries=3,
148
- timeout=300
149
- )
150
-
151
- config.update({
152
- # 并发控制
153
- 'CONCURRENCY': concurrency,
154
- 'SCHEDULER_MAX_QUEUE_SIZE': concurrency * 5, # 小队列
155
- 'MAX_RUNNING_SPIDERS': 1,
156
-
157
- # 请求控制
158
- 'DOWNLOAD_DELAY': 0.1,
159
- 'RANDOMNESS': False, # 减少随机性降低内存使用
160
-
161
- # 严格的内存控制
162
- 'DOWNLOAD_MAXSIZE': 2 * 1024 * 1024, # 2MB
163
- 'DOWNLOAD_WARN_SIZE': 512 * 1024, # 512KB
164
- 'CONNECTION_POOL_LIMIT': concurrency,
165
-
166
- # 重试策略
167
- 'MAX_RETRY_TIMES': 2,
168
-
169
- # 使用增强引擎
170
- 'ENGINE_CLASS': 'crawlo.core.enhanced_engine.EnhancedEngine'
171
- })
172
-
173
- return config
174
-
175
-
176
- def apply_large_scale_config(settings_dict: Dict[str, Any], config_type: str = "balanced", concurrency: int = None):
177
- """
178
- 应用大规模配置
179
-
180
- Args:
181
- settings_dict: 设置字典
182
- config_type: 配置类型 ("conservative", "balanced", "aggressive", "memory_optimized")
183
- concurrency: 并发数(可选,不指定则使用默认值)
184
- """
185
- config_map = {
186
- "conservative": LargeScaleConfig.conservative_config,
187
- "balanced": LargeScaleConfig.balanced_config,
188
- "aggressive": LargeScaleConfig.aggressive_config,
189
- "memory_optimized": LargeScaleConfig.memory_optimized_config
190
- }
191
-
192
- if config_type not in config_map:
193
- raise ValueError(f"不支持的配置类型: {config_type}")
194
-
195
- if concurrency:
196
- config = config_map[config_type](concurrency)
197
- else:
198
- config = config_map[config_type]()
199
-
200
- settings_dict.update(config)
201
-
202
- return config
203
-
204
-
205
- # 使用示例和说明
206
- USAGE_GUIDE = """
207
- # 大规模爬虫配置使用指南
208
-
209
- ## 1. 选择合适的配置类型
210
-
211
- ### Conservative (保守型)
212
- - 适用场景:资源受限、网络不稳定的环境
213
- - 并发数:8 (默认)
214
- - 队列容量:80
215
- - 延迟:200ms
216
- - 使用场景:个人开发、小规模爬取
217
-
218
- ### Balanced (平衡型)
219
- - 适用场景:一般生产环境
220
- - 并发数:16 (默认)
221
- - 队列容量:240
222
- - 延迟:100ms
223
- - 使用场景:中小企业生产环境
224
-
225
- ### Aggressive (激进型)
226
- - 适用场景:高性能服务器、对速度要求高
227
- - 并发数:32 (默认)
228
- - 队列容量:640
229
- - 延迟:50ms
230
- - 使用场景:大公司、高并发需求
231
-
232
- ### Memory Optimized (内存优化型)
233
- - 适用场景:大规模爬取但内存受限
234
- - 并发数:12 (默认)
235
- - 队列容量:60 (小队列快速流转)
236
- - 延迟:100ms
237
- - 使用场景:处理数万/数十万请求但内存有限
238
-
239
- ## 2. 使用方法
240
-
241
- ```python
242
- # 方法1:在 settings.py 中直接配置
243
- from crawlo.utils.large_scale_config import apply_large_scale_config
244
-
245
- # 使用平衡配置,16并发
246
- apply_large_scale_config(locals(), "balanced", 16)
247
-
248
- # 方法2:在爬虫代码中动态配置
249
- from crawlo.crawler import CrawlerProcess
250
- from crawlo.utils.large_scale_config import LargeScaleConfig
251
-
252
- process = CrawlerProcess()
253
- config = LargeScaleConfig.memory_optimized_config(20) # 20并发的内存优化配置
254
- process.settings.update(config)
255
-
256
- # 方法3:自定义配置
257
- config = LargeScaleConfig.balanced_config(24) # 24并发
258
- config['DOWNLOAD_DELAY'] = 0.05 # 自定义延迟
259
- process.settings.update(config)
260
- ```
261
-
262
- ## 3. 针对不同场景的建议
263
-
264
- ### 处理5万+请求
265
- ```python
266
- # 推荐内存优化配置
267
- apply_large_scale_config(locals(), "memory_optimized", 20)
268
- ```
269
-
270
- ### 高速爬取但服务器性能好
271
- ```python
272
- # 推荐激进配置
273
- apply_large_scale_config(locals(), "aggressive", 40)
274
- ```
275
-
276
- ### 资源受限但要稳定运行
277
- ```python
278
- # 推荐保守配置
279
- apply_large_scale_config(locals(), "conservative", 6)
280
- ```
281
-
282
- ### 平衡性能和稳定性
283
- ```python
284
- # 推荐平衡配置
285
- apply_large_scale_config(locals(), "balanced", 18)
286
- ```
1
+ #!/usr/bin/env python3
2
+ # -*- coding: utf-8 -*-
3
+ """
4
+ 大规模爬虫配置助手
5
+ 提供针对上万请求场景的优化配置
6
+ """
7
+ from typing import Dict, Any
8
+
9
+ from crawlo.utils.queue_helper import QueueHelper
10
+
11
+
12
+ class LargeScaleConfig:
13
+ """大规模爬虫配置类"""
14
+
15
+ @staticmethod
16
+ def conservative_config(concurrency: int = 8) -> Dict[str, Any]:
17
+ """
18
+ 保守配置 - 适用于资源有限的环境
19
+
20
+ 特点:
21
+ - 较小的队列容量
22
+ - 较低的并发数
23
+ - 较长的延迟
24
+ """
25
+ config = QueueHelper.use_redis_queue(
26
+ queue_name="crawlo:conservative",
27
+ max_retries=3,
28
+ timeout=300
29
+ )
30
+
31
+ config.update({
32
+ # 并发控制
33
+ 'CONCURRENCY': concurrency,
34
+ 'SCHEDULER_MAX_QUEUE_SIZE': concurrency * 10, # 队列容量为并发数的10倍
35
+ 'MAX_RUNNING_SPIDERS': 1,
36
+
37
+ # 请求控制
38
+ 'DOWNLOAD_DELAY': 0.2,
39
+ 'RANDOMNESS': True,
40
+ 'RANDOM_RANGE': (0.8, 1.5),
41
+
42
+ # 内存控制
43
+ 'DOWNLOAD_MAXSIZE': 5 * 1024 * 1024, # 5MB
44
+ 'CONNECTION_POOL_LIMIT': concurrency * 2,
45
+
46
+ # 重试策略
47
+ 'MAX_RETRY_TIMES': 2,
48
+
49
+ # 使用增强引擎
50
+ 'ENGINE_CLASS': 'crawlo.core.enhanced_engine.EnhancedEngine'
51
+ })
52
+
53
+ return config
54
+
55
+ @staticmethod
56
+ def balanced_config(concurrency: int = 16) -> Dict[str, Any]:
57
+ """
58
+ 平衡配置 - 适用于一般生产环境
59
+
60
+ 特点:
61
+ - 中等的队列容量
62
+ - 平衡的并发数
63
+ - 适中的延迟
64
+ """
65
+ config = QueueHelper.use_redis_queue(
66
+ queue_name="crawlo:balanced",
67
+ max_retries=5,
68
+ timeout=600
69
+ )
70
+
71
+ config.update({
72
+ # 并发控制
73
+ 'CONCURRENCY': concurrency,
74
+ 'SCHEDULER_MAX_QUEUE_SIZE': concurrency * 15,
75
+ 'MAX_RUNNING_SPIDERS': 2,
76
+
77
+ # 请求控制
78
+ 'DOWNLOAD_DELAY': 0.1,
79
+ 'RANDOMNESS': True,
80
+ 'RANDOM_RANGE': (0.5, 1.2),
81
+
82
+ # 内存控制
83
+ 'DOWNLOAD_MAXSIZE': 10 * 1024 * 1024, # 10MB
84
+ 'CONNECTION_POOL_LIMIT': concurrency * 3,
85
+
86
+ # 重试策略
87
+ 'MAX_RETRY_TIMES': 3,
88
+
89
+ # 使用增强引擎
90
+ 'ENGINE_CLASS': 'crawlo.core.enhanced_engine.EnhancedEngine'
91
+ })
92
+
93
+ return config
94
+
95
+ @staticmethod
96
+ def aggressive_config(concurrency: int = 32) -> Dict[str, Any]:
97
+ """
98
+ 激进配置 - 适用于高性能环境
99
+
100
+ 特点:
101
+ - 大的队列容量
102
+ - 高并发数
103
+ - 较短的延迟
104
+ """
105
+ config = QueueHelper.use_redis_queue(
106
+ queue_name="crawlo:aggressive",
107
+ max_retries=10,
108
+ timeout=900
109
+ )
110
+
111
+ config.update({
112
+ # 并发控制
113
+ 'CONCURRENCY': concurrency,
114
+ 'SCHEDULER_MAX_QUEUE_SIZE': concurrency * 20,
115
+ 'MAX_RUNNING_SPIDERS': 3,
116
+
117
+ # 请求控制
118
+ 'DOWNLOAD_DELAY': 0.05,
119
+ 'RANDOMNESS': True,
120
+ 'RANDOM_RANGE': (0.3, 1.0),
121
+
122
+ # 内存控制
123
+ 'DOWNLOAD_MAXSIZE': 20 * 1024 * 1024, # 20MB
124
+ 'CONNECTION_POOL_LIMIT': concurrency * 4,
125
+
126
+ # 重试策略
127
+ 'MAX_RETRY_TIMES': 5,
128
+
129
+ # 使用增强引擎
130
+ 'ENGINE_CLASS': 'crawlo.core.enhanced_engine.EnhancedEngine'
131
+ })
132
+
133
+ return config
134
+
135
+ @staticmethod
136
+ def memory_optimized_config(concurrency: int = 12) -> Dict[str, Any]:
137
+ """
138
+ 内存优化配置 - 适用于大规模但内存受限的场景
139
+
140
+ 特点:
141
+ - 小队列,快速流转
142
+ - 严格的内存控制
143
+ - 使用Redis减少内存压力
144
+ """
145
+ config = QueueHelper.use_redis_queue(
146
+ queue_name="crawlo:memory_optimized",
147
+ max_retries=3,
148
+ timeout=300
149
+ )
150
+
151
+ config.update({
152
+ # 并发控制
153
+ 'CONCURRENCY': concurrency,
154
+ 'SCHEDULER_MAX_QUEUE_SIZE': concurrency * 5, # 小队列
155
+ 'MAX_RUNNING_SPIDERS': 1,
156
+
157
+ # 请求控制
158
+ 'DOWNLOAD_DELAY': 0.1,
159
+ 'RANDOMNESS': False, # 减少随机性降低内存使用
160
+
161
+ # 严格的内存控制
162
+ 'DOWNLOAD_MAXSIZE': 2 * 1024 * 1024, # 2MB
163
+ 'DOWNLOAD_WARN_SIZE': 512 * 1024, # 512KB
164
+ 'CONNECTION_POOL_LIMIT': concurrency,
165
+
166
+ # 重试策略
167
+ 'MAX_RETRY_TIMES': 2,
168
+
169
+ # 使用增强引擎
170
+ 'ENGINE_CLASS': 'crawlo.core.enhanced_engine.EnhancedEngine'
171
+ })
172
+
173
+ return config
174
+
175
+
176
+ def apply_large_scale_config(settings_dict: Dict[str, Any], config_type: str = "balanced", concurrency: int = None):
177
+ """
178
+ 应用大规模配置
179
+
180
+ Args:
181
+ settings_dict: 设置字典
182
+ config_type: 配置类型 ("conservative", "balanced", "aggressive", "memory_optimized")
183
+ concurrency: 并发数(可选,不指定则使用默认值)
184
+ """
185
+ config_map = {
186
+ "conservative": LargeScaleConfig.conservative_config,
187
+ "balanced": LargeScaleConfig.balanced_config,
188
+ "aggressive": LargeScaleConfig.aggressive_config,
189
+ "memory_optimized": LargeScaleConfig.memory_optimized_config
190
+ }
191
+
192
+ if config_type not in config_map:
193
+ raise ValueError(f"不支持的配置类型: {config_type}")
194
+
195
+ if concurrency:
196
+ config = config_map[config_type](concurrency)
197
+ else:
198
+ config = config_map[config_type]()
199
+
200
+ settings_dict.update(config)
201
+
202
+ return config
203
+
204
+
205
+ # 使用示例和说明
206
+ USAGE_GUIDE = """
207
+ # 大规模爬虫配置使用指南
208
+
209
+ ## 1. 选择合适的配置类型
210
+
211
+ ### Conservative (保守型)
212
+ - 适用场景:资源受限、网络不稳定的环境
213
+ - 并发数:8 (默认)
214
+ - 队列容量:80
215
+ - 延迟:200ms
216
+ - 使用场景:个人开发、小规模爬取
217
+
218
+ ### Balanced (平衡型)
219
+ - 适用场景:一般生产环境
220
+ - 并发数:16 (默认)
221
+ - 队列容量:240
222
+ - 延迟:100ms
223
+ - 使用场景:中小企业生产环境
224
+
225
+ ### Aggressive (激进型)
226
+ - 适用场景:高性能服务器、对速度要求高
227
+ - 并发数:32 (默认)
228
+ - 队列容量:640
229
+ - 延迟:50ms
230
+ - 使用场景:大公司、高并发需求
231
+
232
+ ### Memory Optimized (内存优化型)
233
+ - 适用场景:大规模爬取但内存受限
234
+ - 并发数:12 (默认)
235
+ - 队列容量:60 (小队列快速流转)
236
+ - 延迟:100ms
237
+ - 使用场景:处理数万/数十万请求但内存有限
238
+
239
+ ## 2. 使用方法
240
+
241
+ ```python
242
+ # 方法1:在 settings.py 中直接配置
243
+ from crawlo.utils.large_scale_config import apply_large_scale_config
244
+
245
+ # 使用平衡配置,16并发
246
+ apply_large_scale_config(locals(), "balanced", 16)
247
+
248
+ # 方法2:在爬虫代码中动态配置
249
+ from crawlo.crawler import CrawlerProcess
250
+ from crawlo.utils.large_scale_config import LargeScaleConfig
251
+
252
+ process = CrawlerProcess()
253
+ config = LargeScaleConfig.memory_optimized_config(20) # 20并发的内存优化配置
254
+ process.settings.update(config)
255
+
256
+ # 方法3:自定义配置
257
+ config = LargeScaleConfig.balanced_config(24) # 24并发
258
+ config['DOWNLOAD_DELAY'] = 0.05 # 自定义延迟
259
+ process.settings.update(config)
260
+ ```
261
+
262
+ ## 3. 针对不同场景的建议
263
+
264
+ ### 处理5万+请求
265
+ ```python
266
+ # 推荐内存优化配置
267
+ apply_large_scale_config(locals(), "memory_optimized", 20)
268
+ ```
269
+
270
+ ### 高速爬取但服务器性能好
271
+ ```python
272
+ # 推荐激进配置
273
+ apply_large_scale_config(locals(), "aggressive", 40)
274
+ ```
275
+
276
+ ### 资源受限但要稳定运行
277
+ ```python
278
+ # 推荐保守配置
279
+ apply_large_scale_config(locals(), "conservative", 6)
280
+ ```
281
+
282
+ ### 平衡性能和稳定性
283
+ ```python
284
+ # 推荐平衡配置
285
+ apply_large_scale_config(locals(), "balanced", 18)
286
+ ```
287
287
  """