crawlo 1.2.2__py3-none-any.whl → 1.2.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (222) hide show
  1. crawlo/__init__.py +61 -61
  2. crawlo/__version__.py +1 -1
  3. crawlo/cleaners/__init__.py +60 -60
  4. crawlo/cleaners/data_formatter.py +225 -225
  5. crawlo/cleaners/encoding_converter.py +125 -125
  6. crawlo/cleaners/text_cleaner.py +232 -232
  7. crawlo/cli.py +81 -81
  8. crawlo/commands/__init__.py +14 -14
  9. crawlo/commands/check.py +594 -594
  10. crawlo/commands/genspider.py +151 -151
  11. crawlo/commands/help.py +144 -142
  12. crawlo/commands/list.py +155 -155
  13. crawlo/commands/run.py +323 -292
  14. crawlo/commands/startproject.py +420 -418
  15. crawlo/commands/stats.py +188 -188
  16. crawlo/commands/utils.py +186 -186
  17. crawlo/config.py +312 -312
  18. crawlo/config_validator.py +251 -252
  19. crawlo/core/__init__.py +2 -2
  20. crawlo/core/engine.py +354 -354
  21. crawlo/core/processor.py +40 -40
  22. crawlo/core/scheduler.py +143 -143
  23. crawlo/crawler.py +1110 -1027
  24. crawlo/data/__init__.py +6 -0
  25. crawlo/data/user_agents.py +108 -0
  26. crawlo/downloader/__init__.py +266 -266
  27. crawlo/downloader/aiohttp_downloader.py +220 -220
  28. crawlo/downloader/cffi_downloader.py +256 -256
  29. crawlo/downloader/httpx_downloader.py +259 -259
  30. crawlo/downloader/hybrid_downloader.py +212 -213
  31. crawlo/downloader/playwright_downloader.py +402 -402
  32. crawlo/downloader/selenium_downloader.py +472 -472
  33. crawlo/event.py +11 -11
  34. crawlo/exceptions.py +81 -81
  35. crawlo/extension/__init__.py +37 -37
  36. crawlo/extension/health_check.py +141 -141
  37. crawlo/extension/log_interval.py +57 -57
  38. crawlo/extension/log_stats.py +81 -81
  39. crawlo/extension/logging_extension.py +43 -43
  40. crawlo/extension/memory_monitor.py +104 -104
  41. crawlo/extension/performance_profiler.py +133 -133
  42. crawlo/extension/request_recorder.py +107 -107
  43. crawlo/filters/__init__.py +154 -154
  44. crawlo/filters/aioredis_filter.py +280 -280
  45. crawlo/filters/memory_filter.py +269 -269
  46. crawlo/items/__init__.py +23 -23
  47. crawlo/items/base.py +21 -21
  48. crawlo/items/fields.py +52 -53
  49. crawlo/items/items.py +104 -104
  50. crawlo/middleware/__init__.py +21 -21
  51. crawlo/middleware/default_header.py +131 -131
  52. crawlo/middleware/download_delay.py +104 -104
  53. crawlo/middleware/middleware_manager.py +135 -135
  54. crawlo/middleware/offsite.py +114 -115
  55. crawlo/middleware/proxy.py +367 -366
  56. crawlo/middleware/request_ignore.py +86 -87
  57. crawlo/middleware/response_code.py +163 -164
  58. crawlo/middleware/response_filter.py +136 -137
  59. crawlo/middleware/retry.py +124 -124
  60. crawlo/mode_manager.py +211 -211
  61. crawlo/network/__init__.py +21 -21
  62. crawlo/network/request.py +338 -338
  63. crawlo/network/response.py +359 -359
  64. crawlo/pipelines/__init__.py +21 -21
  65. crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
  66. crawlo/pipelines/console_pipeline.py +39 -39
  67. crawlo/pipelines/csv_pipeline.py +316 -316
  68. crawlo/pipelines/database_dedup_pipeline.py +222 -224
  69. crawlo/pipelines/json_pipeline.py +218 -218
  70. crawlo/pipelines/memory_dedup_pipeline.py +115 -115
  71. crawlo/pipelines/mongo_pipeline.py +131 -131
  72. crawlo/pipelines/mysql_pipeline.py +317 -316
  73. crawlo/pipelines/pipeline_manager.py +61 -61
  74. crawlo/pipelines/redis_dedup_pipeline.py +165 -167
  75. crawlo/project.py +279 -187
  76. crawlo/queue/pqueue.py +37 -37
  77. crawlo/queue/queue_manager.py +337 -337
  78. crawlo/queue/redis_priority_queue.py +298 -298
  79. crawlo/settings/__init__.py +7 -7
  80. crawlo/settings/default_settings.py +217 -226
  81. crawlo/settings/setting_manager.py +122 -122
  82. crawlo/spider/__init__.py +639 -639
  83. crawlo/stats_collector.py +59 -59
  84. crawlo/subscriber.py +129 -130
  85. crawlo/task_manager.py +30 -30
  86. crawlo/templates/crawlo.cfg.tmpl +10 -10
  87. crawlo/templates/project/__init__.py.tmpl +3 -3
  88. crawlo/templates/project/items.py.tmpl +17 -17
  89. crawlo/templates/project/middlewares.py.tmpl +118 -118
  90. crawlo/templates/project/pipelines.py.tmpl +96 -96
  91. crawlo/templates/project/run.py.tmpl +47 -45
  92. crawlo/templates/project/settings.py.tmpl +350 -327
  93. crawlo/templates/project/settings_distributed.py.tmpl +160 -119
  94. crawlo/templates/project/settings_gentle.py.tmpl +133 -94
  95. crawlo/templates/project/settings_high_performance.py.tmpl +155 -151
  96. crawlo/templates/project/settings_simple.py.tmpl +108 -68
  97. crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
  98. crawlo/templates/spider/spider.py.tmpl +143 -143
  99. crawlo/tools/__init__.py +182 -182
  100. crawlo/tools/anti_crawler.py +268 -268
  101. crawlo/tools/authenticated_proxy.py +240 -240
  102. crawlo/tools/data_validator.py +180 -180
  103. crawlo/tools/date_tools.py +35 -35
  104. crawlo/tools/distributed_coordinator.py +386 -386
  105. crawlo/tools/retry_mechanism.py +220 -220
  106. crawlo/tools/scenario_adapter.py +262 -262
  107. crawlo/utils/__init__.py +35 -35
  108. crawlo/utils/batch_processor.py +259 -260
  109. crawlo/utils/controlled_spider_mixin.py +439 -439
  110. crawlo/utils/date_tools.py +290 -290
  111. crawlo/utils/db_helper.py +343 -343
  112. crawlo/utils/enhanced_error_handler.py +356 -359
  113. crawlo/utils/env_config.py +105 -105
  114. crawlo/utils/error_handler.py +123 -125
  115. crawlo/utils/func_tools.py +82 -82
  116. crawlo/utils/large_scale_config.py +286 -286
  117. crawlo/utils/large_scale_helper.py +344 -343
  118. crawlo/utils/log.py +128 -128
  119. crawlo/utils/performance_monitor.py +285 -284
  120. crawlo/utils/queue_helper.py +175 -175
  121. crawlo/utils/redis_connection_pool.py +334 -334
  122. crawlo/utils/redis_key_validator.py +198 -199
  123. crawlo/utils/request.py +267 -267
  124. crawlo/utils/request_serializer.py +218 -219
  125. crawlo/utils/spider_loader.py +61 -62
  126. crawlo/utils/system.py +11 -11
  127. crawlo/utils/tools.py +4 -4
  128. crawlo/utils/url.py +39 -39
  129. {crawlo-1.2.2.dist-info → crawlo-1.2.4.dist-info}/METADATA +764 -692
  130. crawlo-1.2.4.dist-info/RECORD +206 -0
  131. examples/__init__.py +7 -7
  132. tests/DOUBLE_CRAWLO_PREFIX_FIX_REPORT.md +81 -81
  133. tests/__init__.py +7 -7
  134. tests/advanced_tools_example.py +275 -275
  135. tests/authenticated_proxy_example.py +236 -236
  136. tests/cleaners_example.py +160 -160
  137. tests/config_validation_demo.py +102 -102
  138. tests/controlled_spider_example.py +205 -205
  139. tests/date_tools_example.py +180 -180
  140. tests/dynamic_loading_example.py +523 -523
  141. tests/dynamic_loading_test.py +104 -104
  142. tests/env_config_example.py +133 -133
  143. tests/error_handling_example.py +171 -171
  144. tests/redis_key_validation_demo.py +130 -130
  145. tests/response_improvements_example.py +144 -144
  146. tests/test_advanced_tools.py +148 -148
  147. tests/test_all_redis_key_configs.py +145 -145
  148. tests/test_authenticated_proxy.py +141 -141
  149. tests/test_cleaners.py +54 -54
  150. tests/test_comprehensive.py +146 -146
  151. tests/test_config_validator.py +193 -193
  152. tests/test_crawlo_proxy_integration.py +172 -172
  153. tests/test_date_tools.py +123 -123
  154. tests/test_default_header_middleware.py +158 -158
  155. tests/test_double_crawlo_fix.py +207 -207
  156. tests/test_double_crawlo_fix_simple.py +124 -124
  157. tests/test_download_delay_middleware.py +221 -221
  158. tests/test_downloader_proxy_compatibility.py +268 -268
  159. tests/test_dynamic_downloaders_proxy.py +124 -124
  160. tests/test_dynamic_proxy.py +92 -92
  161. tests/test_dynamic_proxy_config.py +146 -146
  162. tests/test_dynamic_proxy_real.py +109 -109
  163. tests/test_edge_cases.py +303 -303
  164. tests/test_enhanced_error_handler.py +270 -270
  165. tests/test_env_config.py +121 -121
  166. tests/test_error_handler_compatibility.py +112 -112
  167. tests/test_final_validation.py +153 -153
  168. tests/test_framework_env_usage.py +103 -103
  169. tests/test_integration.py +356 -356
  170. tests/test_item_dedup_redis_key.py +122 -122
  171. tests/test_offsite_middleware.py +221 -221
  172. tests/test_parsel.py +29 -29
  173. tests/test_performance.py +327 -327
  174. tests/test_proxy_api.py +264 -264
  175. tests/test_proxy_health_check.py +32 -32
  176. tests/test_proxy_middleware.py +121 -121
  177. tests/test_proxy_middleware_enhanced.py +216 -216
  178. tests/test_proxy_middleware_integration.py +136 -136
  179. tests/test_proxy_providers.py +56 -56
  180. tests/test_proxy_stats.py +19 -19
  181. tests/test_proxy_strategies.py +59 -59
  182. tests/test_queue_manager_double_crawlo.py +173 -173
  183. tests/test_queue_manager_redis_key.py +176 -176
  184. tests/test_real_scenario_proxy.py +195 -195
  185. tests/test_redis_config.py +28 -28
  186. tests/test_redis_connection_pool.py +294 -294
  187. tests/test_redis_key_naming.py +181 -181
  188. tests/test_redis_key_validator.py +123 -123
  189. tests/test_redis_queue.py +224 -224
  190. tests/test_request_ignore_middleware.py +182 -182
  191. tests/test_request_serialization.py +70 -70
  192. tests/test_response_code_middleware.py +349 -349
  193. tests/test_response_filter_middleware.py +427 -427
  194. tests/test_response_improvements.py +152 -152
  195. tests/test_retry_middleware.py +241 -241
  196. tests/test_scheduler.py +241 -241
  197. tests/test_simple_response.py +61 -61
  198. tests/test_telecom_spider_redis_key.py +205 -205
  199. tests/test_template_content.py +87 -87
  200. tests/test_template_redis_key.py +134 -134
  201. tests/test_tools.py +153 -153
  202. tests/tools_example.py +257 -257
  203. crawlo-1.2.2.dist-info/RECORD +0 -220
  204. examples/aiohttp_settings.py +0 -42
  205. examples/curl_cffi_settings.py +0 -41
  206. examples/default_header_middleware_example.py +0 -107
  207. examples/default_header_spider_example.py +0 -129
  208. examples/download_delay_middleware_example.py +0 -160
  209. examples/httpx_settings.py +0 -42
  210. examples/multi_downloader_proxy_example.py +0 -81
  211. examples/offsite_middleware_example.py +0 -55
  212. examples/offsite_spider_example.py +0 -107
  213. examples/proxy_spider_example.py +0 -166
  214. examples/request_ignore_middleware_example.py +0 -51
  215. examples/request_ignore_spider_example.py +0 -99
  216. examples/response_code_middleware_example.py +0 -52
  217. examples/response_filter_middleware_example.py +0 -67
  218. examples/tong_hua_shun_settings.py +0 -62
  219. examples/tong_hua_shun_spider.py +0 -170
  220. {crawlo-1.2.2.dist-info → crawlo-1.2.4.dist-info}/WHEEL +0 -0
  221. {crawlo-1.2.2.dist-info → crawlo-1.2.4.dist-info}/entry_points.txt +0 -0
  222. {crawlo-1.2.2.dist-info → crawlo-1.2.4.dist-info}/top_level.txt +0 -0
@@ -1,287 +1,287 @@
1
- #!/usr/bin/env python3
2
- # -*- coding: utf-8 -*-
3
- """
4
- 大规模爬虫配置助手
5
- 提供针对上万请求场景的优化配置
6
- """
7
- from typing import Dict, Any
8
-
9
- from crawlo.utils.queue_helper import QueueHelper
10
-
11
-
12
- class LargeScaleConfig:
13
- """大规模爬虫配置类"""
14
-
15
- @staticmethod
16
- def conservative_config(concurrency: int = 8) -> Dict[str, Any]:
17
- """
18
- 保守配置 - 适用于资源有限的环境
19
-
20
- 特点:
21
- - 较小的队列容量
22
- - 较低的并发数
23
- - 较长的延迟
24
- """
25
- config = QueueHelper.use_redis_queue(
26
- queue_name="crawlo:conservative",
27
- max_retries=3,
28
- timeout=300
29
- )
30
-
31
- config.update({
32
- # 并发控制
33
- 'CONCURRENCY': concurrency,
34
- 'SCHEDULER_MAX_QUEUE_SIZE': concurrency * 10, # 队列容量为并发数的10倍
35
- 'MAX_RUNNING_SPIDERS': 1,
36
-
37
- # 请求控制
38
- 'DOWNLOAD_DELAY': 0.2,
39
- 'RANDOMNESS': True,
40
- 'RANDOM_RANGE': (0.8, 1.5),
41
-
42
- # 内存控制
43
- 'DOWNLOAD_MAXSIZE': 5 * 1024 * 1024, # 5MB
44
- 'CONNECTION_POOL_LIMIT': concurrency * 2,
45
-
46
- # 重试策略
47
- 'MAX_RETRY_TIMES': 2,
48
-
49
- # 使用增强引擎
50
- 'ENGINE_CLASS': 'crawlo.core.enhanced_engine.EnhancedEngine'
51
- })
52
-
53
- return config
54
-
55
- @staticmethod
56
- def balanced_config(concurrency: int = 16) -> Dict[str, Any]:
57
- """
58
- 平衡配置 - 适用于一般生产环境
59
-
60
- 特点:
61
- - 中等的队列容量
62
- - 平衡的并发数
63
- - 适中的延迟
64
- """
65
- config = QueueHelper.use_redis_queue(
66
- queue_name="crawlo:balanced",
67
- max_retries=5,
68
- timeout=600
69
- )
70
-
71
- config.update({
72
- # 并发控制
73
- 'CONCURRENCY': concurrency,
74
- 'SCHEDULER_MAX_QUEUE_SIZE': concurrency * 15,
75
- 'MAX_RUNNING_SPIDERS': 2,
76
-
77
- # 请求控制
78
- 'DOWNLOAD_DELAY': 0.1,
79
- 'RANDOMNESS': True,
80
- 'RANDOM_RANGE': (0.5, 1.2),
81
-
82
- # 内存控制
83
- 'DOWNLOAD_MAXSIZE': 10 * 1024 * 1024, # 10MB
84
- 'CONNECTION_POOL_LIMIT': concurrency * 3,
85
-
86
- # 重试策略
87
- 'MAX_RETRY_TIMES': 3,
88
-
89
- # 使用增强引擎
90
- 'ENGINE_CLASS': 'crawlo.core.enhanced_engine.EnhancedEngine'
91
- })
92
-
93
- return config
94
-
95
- @staticmethod
96
- def aggressive_config(concurrency: int = 32) -> Dict[str, Any]:
97
- """
98
- 激进配置 - 适用于高性能环境
99
-
100
- 特点:
101
- - 大的队列容量
102
- - 高并发数
103
- - 较短的延迟
104
- """
105
- config = QueueHelper.use_redis_queue(
106
- queue_name="crawlo:aggressive",
107
- max_retries=10,
108
- timeout=900
109
- )
110
-
111
- config.update({
112
- # 并发控制
113
- 'CONCURRENCY': concurrency,
114
- 'SCHEDULER_MAX_QUEUE_SIZE': concurrency * 20,
115
- 'MAX_RUNNING_SPIDERS': 3,
116
-
117
- # 请求控制
118
- 'DOWNLOAD_DELAY': 0.05,
119
- 'RANDOMNESS': True,
120
- 'RANDOM_RANGE': (0.3, 1.0),
121
-
122
- # 内存控制
123
- 'DOWNLOAD_MAXSIZE': 20 * 1024 * 1024, # 20MB
124
- 'CONNECTION_POOL_LIMIT': concurrency * 4,
125
-
126
- # 重试策略
127
- 'MAX_RETRY_TIMES': 5,
128
-
129
- # 使用增强引擎
130
- 'ENGINE_CLASS': 'crawlo.core.enhanced_engine.EnhancedEngine'
131
- })
132
-
133
- return config
134
-
135
- @staticmethod
136
- def memory_optimized_config(concurrency: int = 12) -> Dict[str, Any]:
137
- """
138
- 内存优化配置 - 适用于大规模但内存受限的场景
139
-
140
- 特点:
141
- - 小队列,快速流转
142
- - 严格的内存控制
143
- - 使用Redis减少内存压力
144
- """
145
- config = QueueHelper.use_redis_queue(
146
- queue_name="crawlo:memory_optimized",
147
- max_retries=3,
148
- timeout=300
149
- )
150
-
151
- config.update({
152
- # 并发控制
153
- 'CONCURRENCY': concurrency,
154
- 'SCHEDULER_MAX_QUEUE_SIZE': concurrency * 5, # 小队列
155
- 'MAX_RUNNING_SPIDERS': 1,
156
-
157
- # 请求控制
158
- 'DOWNLOAD_DELAY': 0.1,
159
- 'RANDOMNESS': False, # 减少随机性降低内存使用
160
-
161
- # 严格的内存控制
162
- 'DOWNLOAD_MAXSIZE': 2 * 1024 * 1024, # 2MB
163
- 'DOWNLOAD_WARN_SIZE': 512 * 1024, # 512KB
164
- 'CONNECTION_POOL_LIMIT': concurrency,
165
-
166
- # 重试策略
167
- 'MAX_RETRY_TIMES': 2,
168
-
169
- # 使用增强引擎
170
- 'ENGINE_CLASS': 'crawlo.core.enhanced_engine.EnhancedEngine'
171
- })
172
-
173
- return config
174
-
175
-
176
- def apply_large_scale_config(settings_dict: Dict[str, Any], config_type: str = "balanced", concurrency: int = None):
177
- """
178
- 应用大规模配置
179
-
180
- Args:
181
- settings_dict: 设置字典
182
- config_type: 配置类型 ("conservative", "balanced", "aggressive", "memory_optimized")
183
- concurrency: 并发数(可选,不指定则使用默认值)
184
- """
185
- config_map = {
186
- "conservative": LargeScaleConfig.conservative_config,
187
- "balanced": LargeScaleConfig.balanced_config,
188
- "aggressive": LargeScaleConfig.aggressive_config,
189
- "memory_optimized": LargeScaleConfig.memory_optimized_config
190
- }
191
-
192
- if config_type not in config_map:
193
- raise ValueError(f"不支持的配置类型: {config_type}")
194
-
195
- if concurrency:
196
- config = config_map[config_type](concurrency)
197
- else:
198
- config = config_map[config_type]()
199
-
200
- settings_dict.update(config)
201
-
202
- return config
203
-
204
-
205
- # 使用示例和说明
206
- USAGE_GUIDE = """
207
- # 大规模爬虫配置使用指南
208
-
209
- ## 1. 选择合适的配置类型
210
-
211
- ### Conservative (保守型)
212
- - 适用场景:资源受限、网络不稳定的环境
213
- - 并发数:8 (默认)
214
- - 队列容量:80
215
- - 延迟:200ms
216
- - 使用场景:个人开发、小规模爬取
217
-
218
- ### Balanced (平衡型)
219
- - 适用场景:一般生产环境
220
- - 并发数:16 (默认)
221
- - 队列容量:240
222
- - 延迟:100ms
223
- - 使用场景:中小企业生产环境
224
-
225
- ### Aggressive (激进型)
226
- - 适用场景:高性能服务器、对速度要求高
227
- - 并发数:32 (默认)
228
- - 队列容量:640
229
- - 延迟:50ms
230
- - 使用场景:大公司、高并发需求
231
-
232
- ### Memory Optimized (内存优化型)
233
- - 适用场景:大规模爬取但内存受限
234
- - 并发数:12 (默认)
235
- - 队列容量:60 (小队列快速流转)
236
- - 延迟:100ms
237
- - 使用场景:处理数万/数十万请求但内存有限
238
-
239
- ## 2. 使用方法
240
-
241
- ```python
242
- # 方法1:在 settings.py 中直接配置
243
- from crawlo.utils.large_scale_config import apply_large_scale_config
244
-
245
- # 使用平衡配置,16并发
246
- apply_large_scale_config(locals(), "balanced", 16)
247
-
248
- # 方法2:在爬虫代码中动态配置
249
- from crawlo.crawler import CrawlerProcess
250
- from crawlo.utils.large_scale_config import LargeScaleConfig
251
-
252
- process = CrawlerProcess()
253
- config = LargeScaleConfig.memory_optimized_config(20) # 20并发的内存优化配置
254
- process.settings.update(config)
255
-
256
- # 方法3:自定义配置
257
- config = LargeScaleConfig.balanced_config(24) # 24并发
258
- config['DOWNLOAD_DELAY'] = 0.05 # 自定义延迟
259
- process.settings.update(config)
260
- ```
261
-
262
- ## 3. 针对不同场景的建议
263
-
264
- ### 处理5万+请求
265
- ```python
266
- # 推荐内存优化配置
267
- apply_large_scale_config(locals(), "memory_optimized", 20)
268
- ```
269
-
270
- ### 高速爬取但服务器性能好
271
- ```python
272
- # 推荐激进配置
273
- apply_large_scale_config(locals(), "aggressive", 40)
274
- ```
275
-
276
- ### 资源受限但要稳定运行
277
- ```python
278
- # 推荐保守配置
279
- apply_large_scale_config(locals(), "conservative", 6)
280
- ```
281
-
282
- ### 平衡性能和稳定性
283
- ```python
284
- # 推荐平衡配置
285
- apply_large_scale_config(locals(), "balanced", 18)
286
- ```
1
+ #!/usr/bin/env python3
2
+ # -*- coding: utf-8 -*-
3
+ """
4
+ 大规模爬虫配置助手
5
+ 提供针对上万请求场景的优化配置
6
+ """
7
+ from typing import Dict, Any
8
+
9
+ from crawlo.utils.queue_helper import QueueHelper
10
+
11
+
12
+ class LargeScaleConfig:
13
+ """大规模爬虫配置类"""
14
+
15
+ @staticmethod
16
+ def conservative_config(concurrency: int = 8) -> Dict[str, Any]:
17
+ """
18
+ 保守配置 - 适用于资源有限的环境
19
+
20
+ 特点:
21
+ - 较小的队列容量
22
+ - 较低的并发数
23
+ - 较长的延迟
24
+ """
25
+ config = QueueHelper.use_redis_queue(
26
+ queue_name="crawlo:conservative",
27
+ max_retries=3,
28
+ timeout=300
29
+ )
30
+
31
+ config.update({
32
+ # 并发控制
33
+ 'CONCURRENCY': concurrency,
34
+ 'SCHEDULER_MAX_QUEUE_SIZE': concurrency * 10, # 队列容量为并发数的10倍
35
+ 'MAX_RUNNING_SPIDERS': 1,
36
+
37
+ # 请求控制
38
+ 'DOWNLOAD_DELAY': 0.2,
39
+ 'RANDOMNESS': True,
40
+ 'RANDOM_RANGE': (0.8, 1.5),
41
+
42
+ # 内存控制
43
+ 'DOWNLOAD_MAXSIZE': 5 * 1024 * 1024, # 5MB
44
+ 'CONNECTION_POOL_LIMIT': concurrency * 2,
45
+
46
+ # 重试策略
47
+ 'MAX_RETRY_TIMES': 2,
48
+
49
+ # 使用增强引擎
50
+ 'ENGINE_CLASS': 'crawlo.core.enhanced_engine.EnhancedEngine'
51
+ })
52
+
53
+ return config
54
+
55
+ @staticmethod
56
+ def balanced_config(concurrency: int = 16) -> Dict[str, Any]:
57
+ """
58
+ 平衡配置 - 适用于一般生产环境
59
+
60
+ 特点:
61
+ - 中等的队列容量
62
+ - 平衡的并发数
63
+ - 适中的延迟
64
+ """
65
+ config = QueueHelper.use_redis_queue(
66
+ queue_name="crawlo:balanced",
67
+ max_retries=5,
68
+ timeout=600
69
+ )
70
+
71
+ config.update({
72
+ # 并发控制
73
+ 'CONCURRENCY': concurrency,
74
+ 'SCHEDULER_MAX_QUEUE_SIZE': concurrency * 15,
75
+ 'MAX_RUNNING_SPIDERS': 2,
76
+
77
+ # 请求控制
78
+ 'DOWNLOAD_DELAY': 0.1,
79
+ 'RANDOMNESS': True,
80
+ 'RANDOM_RANGE': (0.5, 1.2),
81
+
82
+ # 内存控制
83
+ 'DOWNLOAD_MAXSIZE': 10 * 1024 * 1024, # 10MB
84
+ 'CONNECTION_POOL_LIMIT': concurrency * 3,
85
+
86
+ # 重试策略
87
+ 'MAX_RETRY_TIMES': 3,
88
+
89
+ # 使用增强引擎
90
+ 'ENGINE_CLASS': 'crawlo.core.enhanced_engine.EnhancedEngine'
91
+ })
92
+
93
+ return config
94
+
95
+ @staticmethod
96
+ def aggressive_config(concurrency: int = 32) -> Dict[str, Any]:
97
+ """
98
+ 激进配置 - 适用于高性能环境
99
+
100
+ 特点:
101
+ - 大的队列容量
102
+ - 高并发数
103
+ - 较短的延迟
104
+ """
105
+ config = QueueHelper.use_redis_queue(
106
+ queue_name="crawlo:aggressive",
107
+ max_retries=10,
108
+ timeout=900
109
+ )
110
+
111
+ config.update({
112
+ # 并发控制
113
+ 'CONCURRENCY': concurrency,
114
+ 'SCHEDULER_MAX_QUEUE_SIZE': concurrency * 20,
115
+ 'MAX_RUNNING_SPIDERS': 3,
116
+
117
+ # 请求控制
118
+ 'DOWNLOAD_DELAY': 0.05,
119
+ 'RANDOMNESS': True,
120
+ 'RANDOM_RANGE': (0.3, 1.0),
121
+
122
+ # 内存控制
123
+ 'DOWNLOAD_MAXSIZE': 20 * 1024 * 1024, # 20MB
124
+ 'CONNECTION_POOL_LIMIT': concurrency * 4,
125
+
126
+ # 重试策略
127
+ 'MAX_RETRY_TIMES': 5,
128
+
129
+ # 使用增强引擎
130
+ 'ENGINE_CLASS': 'crawlo.core.enhanced_engine.EnhancedEngine'
131
+ })
132
+
133
+ return config
134
+
135
+ @staticmethod
136
+ def memory_optimized_config(concurrency: int = 12) -> Dict[str, Any]:
137
+ """
138
+ 内存优化配置 - 适用于大规模但内存受限的场景
139
+
140
+ 特点:
141
+ - 小队列,快速流转
142
+ - 严格的内存控制
143
+ - 使用Redis减少内存压力
144
+ """
145
+ config = QueueHelper.use_redis_queue(
146
+ queue_name="crawlo:memory_optimized",
147
+ max_retries=3,
148
+ timeout=300
149
+ )
150
+
151
+ config.update({
152
+ # 并发控制
153
+ 'CONCURRENCY': concurrency,
154
+ 'SCHEDULER_MAX_QUEUE_SIZE': concurrency * 5, # 小队列
155
+ 'MAX_RUNNING_SPIDERS': 1,
156
+
157
+ # 请求控制
158
+ 'DOWNLOAD_DELAY': 0.1,
159
+ 'RANDOMNESS': False, # 减少随机性降低内存使用
160
+
161
+ # 严格的内存控制
162
+ 'DOWNLOAD_MAXSIZE': 2 * 1024 * 1024, # 2MB
163
+ 'DOWNLOAD_WARN_SIZE': 512 * 1024, # 512KB
164
+ 'CONNECTION_POOL_LIMIT': concurrency,
165
+
166
+ # 重试策略
167
+ 'MAX_RETRY_TIMES': 2,
168
+
169
+ # 使用增强引擎
170
+ 'ENGINE_CLASS': 'crawlo.core.enhanced_engine.EnhancedEngine'
171
+ })
172
+
173
+ return config
174
+
175
+
176
+ def apply_large_scale_config(settings_dict: Dict[str, Any], config_type: str = "balanced", concurrency: int = None):
177
+ """
178
+ 应用大规模配置
179
+
180
+ Args:
181
+ settings_dict: 设置字典
182
+ config_type: 配置类型 ("conservative", "balanced", "aggressive", "memory_optimized")
183
+ concurrency: 并发数(可选,不指定则使用默认值)
184
+ """
185
+ config_map = {
186
+ "conservative": LargeScaleConfig.conservative_config,
187
+ "balanced": LargeScaleConfig.balanced_config,
188
+ "aggressive": LargeScaleConfig.aggressive_config,
189
+ "memory_optimized": LargeScaleConfig.memory_optimized_config
190
+ }
191
+
192
+ if config_type not in config_map:
193
+ raise ValueError(f"不支持的配置类型: {config_type}")
194
+
195
+ if concurrency:
196
+ config = config_map[config_type](concurrency)
197
+ else:
198
+ config = config_map[config_type]()
199
+
200
+ settings_dict.update(config)
201
+
202
+ return config
203
+
204
+
205
+ # 使用示例和说明
206
+ USAGE_GUIDE = """
207
+ # 大规模爬虫配置使用指南
208
+
209
+ ## 1. 选择合适的配置类型
210
+
211
+ ### Conservative (保守型)
212
+ - 适用场景:资源受限、网络不稳定的环境
213
+ - 并发数:8 (默认)
214
+ - 队列容量:80
215
+ - 延迟:200ms
216
+ - 使用场景:个人开发、小规模爬取
217
+
218
+ ### Balanced (平衡型)
219
+ - 适用场景:一般生产环境
220
+ - 并发数:16 (默认)
221
+ - 队列容量:240
222
+ - 延迟:100ms
223
+ - 使用场景:中小企业生产环境
224
+
225
+ ### Aggressive (激进型)
226
+ - 适用场景:高性能服务器、对速度要求高
227
+ - 并发数:32 (默认)
228
+ - 队列容量:640
229
+ - 延迟:50ms
230
+ - 使用场景:大公司、高并发需求
231
+
232
+ ### Memory Optimized (内存优化型)
233
+ - 适用场景:大规模爬取但内存受限
234
+ - 并发数:12 (默认)
235
+ - 队列容量:60 (小队列快速流转)
236
+ - 延迟:100ms
237
+ - 使用场景:处理数万/数十万请求但内存有限
238
+
239
+ ## 2. 使用方法
240
+
241
+ ```python
242
+ # 方法1:在 settings.py 中直接配置
243
+ from crawlo.utils.large_scale_config import apply_large_scale_config
244
+
245
+ # 使用平衡配置,16并发
246
+ apply_large_scale_config(locals(), "balanced", 16)
247
+
248
+ # 方法2:在爬虫代码中动态配置
249
+ from crawlo.crawler import CrawlerProcess
250
+ from crawlo.utils.large_scale_config import LargeScaleConfig
251
+
252
+ process = CrawlerProcess()
253
+ config = LargeScaleConfig.memory_optimized_config(20) # 20并发的内存优化配置
254
+ process.settings.update(config)
255
+
256
+ # 方法3:自定义配置
257
+ config = LargeScaleConfig.balanced_config(24) # 24并发
258
+ config['DOWNLOAD_DELAY'] = 0.05 # 自定义延迟
259
+ process.settings.update(config)
260
+ ```
261
+
262
+ ## 3. 针对不同场景的建议
263
+
264
+ ### 处理5万+请求
265
+ ```python
266
+ # 推荐内存优化配置
267
+ apply_large_scale_config(locals(), "memory_optimized", 20)
268
+ ```
269
+
270
+ ### 高速爬取但服务器性能好
271
+ ```python
272
+ # 推荐激进配置
273
+ apply_large_scale_config(locals(), "aggressive", 40)
274
+ ```
275
+
276
+ ### 资源受限但要稳定运行
277
+ ```python
278
+ # 推荐保守配置
279
+ apply_large_scale_config(locals(), "conservative", 6)
280
+ ```
281
+
282
+ ### 平衡性能和稳定性
283
+ ```python
284
+ # 推荐平衡配置
285
+ apply_large_scale_config(locals(), "balanced", 18)
286
+ ```
287
287
  """