crawlo 1.4.6__py3-none-any.whl → 1.4.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (162) hide show
  1. crawlo/__init__.py +2 -1
  2. crawlo/__version__.py +1 -1
  3. crawlo/cli.py +2 -2
  4. crawlo/commands/check.py +1 -1
  5. crawlo/commands/help.py +5 -3
  6. crawlo/commands/list.py +1 -1
  7. crawlo/commands/run.py +49 -11
  8. crawlo/commands/stats.py +1 -1
  9. crawlo/config.py +12 -4
  10. crawlo/config_validator.py +1 -1
  11. crawlo/core/engine.py +20 -7
  12. crawlo/core/processor.py +1 -1
  13. crawlo/core/scheduler.py +4 -5
  14. crawlo/crawler.py +51 -10
  15. crawlo/downloader/__init__.py +7 -3
  16. crawlo/downloader/aiohttp_downloader.py +18 -18
  17. crawlo/downloader/cffi_downloader.py +5 -2
  18. crawlo/downloader/httpx_downloader.py +9 -3
  19. crawlo/downloader/hybrid_downloader.py +2 -2
  20. crawlo/downloader/playwright_downloader.py +38 -15
  21. crawlo/downloader/selenium_downloader.py +16 -2
  22. crawlo/event.py +42 -8
  23. crawlo/exceptions.py +157 -24
  24. crawlo/extension/__init__.py +10 -9
  25. crawlo/extension/health_check.py +7 -7
  26. crawlo/extension/log_interval.py +6 -6
  27. crawlo/extension/log_stats.py +2 -2
  28. crawlo/extension/logging_extension.py +4 -12
  29. crawlo/extension/memory_monitor.py +5 -5
  30. crawlo/extension/performance_profiler.py +5 -5
  31. crawlo/extension/request_recorder.py +6 -6
  32. crawlo/factories/base.py +1 -1
  33. crawlo/factories/crawler.py +61 -60
  34. crawlo/factories/utils.py +135 -0
  35. crawlo/filters/__init__.py +19 -2
  36. crawlo/filters/aioredis_filter.py +133 -49
  37. crawlo/filters/memory_filter.py +6 -21
  38. crawlo/framework.py +22 -8
  39. crawlo/initialization/built_in.py +24 -67
  40. crawlo/initialization/core.py +65 -19
  41. crawlo/initialization/phases.py +83 -2
  42. crawlo/initialization/registry.py +5 -7
  43. crawlo/initialization/utils.py +49 -0
  44. crawlo/logging/__init__.py +6 -10
  45. crawlo/logging/config.py +106 -22
  46. crawlo/logging/factory.py +12 -8
  47. crawlo/logging/manager.py +19 -27
  48. crawlo/middleware/__init__.py +72 -9
  49. crawlo/middleware/default_header.py +2 -2
  50. crawlo/middleware/download_delay.py +2 -2
  51. crawlo/middleware/middleware_manager.py +6 -6
  52. crawlo/middleware/offsite.py +2 -2
  53. crawlo/middleware/proxy.py +2 -2
  54. crawlo/middleware/request_ignore.py +4 -4
  55. crawlo/middleware/response_code.py +2 -2
  56. crawlo/middleware/response_filter.py +2 -2
  57. crawlo/middleware/retry.py +1 -1
  58. crawlo/mode_manager.py +38 -4
  59. crawlo/network/request.py +54 -26
  60. crawlo/network/response.py +69 -135
  61. crawlo/pipelines/__init__.py +40 -9
  62. crawlo/pipelines/base_pipeline.py +452 -0
  63. crawlo/pipelines/bloom_dedup_pipeline.py +4 -5
  64. crawlo/pipelines/console_pipeline.py +2 -2
  65. crawlo/pipelines/csv_pipeline.py +4 -4
  66. crawlo/pipelines/database_dedup_pipeline.py +4 -5
  67. crawlo/pipelines/json_pipeline.py +4 -4
  68. crawlo/pipelines/memory_dedup_pipeline.py +4 -5
  69. crawlo/pipelines/mongo_pipeline.py +23 -14
  70. crawlo/pipelines/mysql_pipeline.py +31 -39
  71. crawlo/pipelines/pipeline_manager.py +8 -8
  72. crawlo/pipelines/redis_dedup_pipeline.py +13 -14
  73. crawlo/project.py +1 -1
  74. crawlo/queue/__init__.py +10 -0
  75. crawlo/queue/queue_manager.py +79 -13
  76. crawlo/queue/redis_priority_queue.py +196 -47
  77. crawlo/settings/default_settings.py +16 -6
  78. crawlo/spider/__init__.py +6 -5
  79. crawlo/stats_collector.py +2 -2
  80. crawlo/task_manager.py +1 -1
  81. crawlo/templates/crawlo.cfg.tmpl +3 -3
  82. crawlo/templates/project/__init__.py.tmpl +1 -3
  83. crawlo/templates/project/items.py.tmpl +2 -6
  84. crawlo/templates/project/middlewares.py.tmpl +1 -1
  85. crawlo/templates/project/pipelines.py.tmpl +1 -2
  86. crawlo/templates/project/settings.py.tmpl +12 -10
  87. crawlo/templates/project/settings_distributed.py.tmpl +14 -13
  88. crawlo/templates/project/settings_gentle.py.tmpl +21 -23
  89. crawlo/templates/project/settings_high_performance.py.tmpl +21 -23
  90. crawlo/templates/project/settings_minimal.py.tmpl +10 -8
  91. crawlo/templates/project/settings_simple.py.tmpl +21 -23
  92. crawlo/templates/run.py.tmpl +1 -1
  93. crawlo/templates/spider/spider.py.tmpl +4 -12
  94. crawlo/templates/spiders_init.py.tmpl +3 -8
  95. crawlo/tools/__init__.py +0 -103
  96. crawlo/tools/scenario_adapter.py +1 -1
  97. crawlo/utils/__init__.py +25 -1
  98. crawlo/utils/batch_processor.py +23 -6
  99. crawlo/utils/config_manager.py +442 -0
  100. crawlo/utils/controlled_spider_mixin.py +1 -1
  101. crawlo/utils/db_helper.py +1 -1
  102. crawlo/utils/encoding_helper.py +190 -0
  103. crawlo/utils/error_handler.py +2 -2
  104. crawlo/utils/large_scale_helper.py +1 -1
  105. crawlo/utils/leak_detector.py +335 -0
  106. crawlo/utils/mongo_connection_pool.py +157 -0
  107. crawlo/utils/mysql_connection_pool.py +197 -0
  108. crawlo/utils/performance_monitor.py +1 -1
  109. crawlo/utils/redis_checker.py +91 -0
  110. crawlo/utils/redis_connection_pool.py +260 -70
  111. crawlo/utils/redis_key_validator.py +1 -1
  112. crawlo/utils/request.py +24 -2
  113. crawlo/utils/request_serializer.py +1 -1
  114. crawlo/utils/resource_manager.py +337 -0
  115. crawlo/utils/response_helper.py +113 -0
  116. crawlo/utils/selector_helper.py +3 -2
  117. crawlo/utils/singleton.py +70 -0
  118. crawlo/utils/spider_loader.py +1 -1
  119. crawlo/utils/text_helper.py +1 -1
  120. crawlo-1.4.8.dist-info/METADATA +831 -0
  121. {crawlo-1.4.6.dist-info → crawlo-1.4.8.dist-info}/RECORD +131 -145
  122. tests/advanced_tools_example.py +10 -68
  123. tests/distributed_dedup_test.py +467 -0
  124. tests/monitor_redis_dedup.sh +72 -0
  125. tests/ofweek_scrapy/ofweek_scrapy/spiders/__init__.py +4 -4
  126. tests/simple_cli_test.py +55 -0
  127. tests/test_cli_arguments.py +119 -0
  128. tests/test_dedup_fix.py +10 -10
  129. crawlo/logging/async_handler.py +0 -181
  130. crawlo/logging/monitor.py +0 -153
  131. crawlo/logging/sampler.py +0 -167
  132. crawlo/tools/authenticated_proxy.py +0 -241
  133. crawlo/tools/data_formatter.py +0 -226
  134. crawlo/tools/data_validator.py +0 -181
  135. crawlo/tools/encoding_converter.py +0 -127
  136. crawlo/tools/network_diagnostic.py +0 -365
  137. crawlo/tools/request_tools.py +0 -83
  138. crawlo/tools/retry_mechanism.py +0 -224
  139. crawlo/utils/env_config.py +0 -143
  140. crawlo/utils/large_scale_config.py +0 -287
  141. crawlo/utils/log.py +0 -80
  142. crawlo/utils/system.py +0 -11
  143. crawlo/utils/tools.py +0 -5
  144. crawlo/utils/url.py +0 -40
  145. crawlo-1.4.6.dist-info/METADATA +0 -329
  146. tests/env_config_example.py +0 -134
  147. tests/ofweek_scrapy/ofweek_scrapy/spiders/ofweek_spider.py +0 -162
  148. tests/test_authenticated_proxy.py +0 -142
  149. tests/test_comprehensive.py +0 -147
  150. tests/test_dynamic_downloaders_proxy.py +0 -125
  151. tests/test_dynamic_proxy.py +0 -93
  152. tests/test_dynamic_proxy_config.py +0 -147
  153. tests/test_dynamic_proxy_real.py +0 -110
  154. tests/test_env_config.py +0 -122
  155. tests/test_framework_env_usage.py +0 -104
  156. tests/test_large_scale_config.py +0 -113
  157. tests/test_proxy_api.py +0 -265
  158. tests/test_real_scenario_proxy.py +0 -196
  159. tests/tools_example.py +0 -261
  160. {crawlo-1.4.6.dist-info → crawlo-1.4.8.dist-info}/WHEEL +0 -0
  161. {crawlo-1.4.6.dist-info → crawlo-1.4.8.dist-info}/entry_points.txt +0 -0
  162. {crawlo-1.4.6.dist-info → crawlo-1.4.8.dist-info}/top_level.txt +0 -0
@@ -1,224 +0,0 @@
1
- #!/usr/bin/python
2
- # -*- coding: UTF-8 -*-
3
- """
4
- # @Time : 2025-09-10 22:00
5
- # @Author : crawl-coder
6
- # @Desc : 重试机制工具
7
- """
8
-
9
- import time
10
- import random
11
- import asyncio
12
- from typing import Callable, Any, Optional, Tuple, Set
13
- from functools import wraps
14
-
15
-
16
- class RetryMechanism:
17
- """重试机制工具类"""
18
-
19
- # 默认应该重试的HTTP状态码
20
- DEFAULT_RETRY_STATUS_CODES = {429, 500, 502, 503, 504}
21
-
22
- # 默认应该重试的异常类型
23
- DEFAULT_RETRY_EXCEPTIONS = (
24
- ConnectionError,
25
- TimeoutError,
26
- asyncio.TimeoutError,
27
- )
28
-
29
- def __init__(self, max_retries: int = 3,
30
- retry_status_codes: Optional[Set[int]] = None,
31
- retry_exceptions: Optional[Tuple[type, ...]] = None):
32
- """
33
- 初始化重试机制
34
-
35
- Args:
36
- max_retries (int): 最大重试次数
37
- retry_status_codes (Optional[Set[int]]): 应该重试的HTTP状态码
38
- retry_exceptions (Optional[Tuple[type, ...]]): 应该重试的异常类型
39
- """
40
- self.max_retries = max_retries
41
- self.retry_status_codes = retry_status_codes or self.DEFAULT_RETRY_STATUS_CODES
42
- self.retry_exceptions = retry_exceptions or self.DEFAULT_RETRY_EXCEPTIONS
43
-
44
- def should_retry(self, status_code: Optional[int] = None,
45
- exception: Optional[Exception] = None) -> bool:
46
- """
47
- 判断是否应该重试
48
-
49
- Args:
50
- status_code (Optional[int]): HTTP状态码
51
- exception (Optional[Exception]): 异常对象
52
-
53
- Returns:
54
- bool: 是否应该重试
55
- """
56
- # 如果有状态码,检查是否在重试列表中
57
- if status_code is not None and status_code in self.retry_status_codes:
58
- return True
59
-
60
- # 如果有异常,检查是否在重试列表中
61
- if exception is not None and isinstance(exception, self.retry_exceptions):
62
- return True
63
-
64
- return False
65
-
66
- def exponential_backoff(self, attempt: int, base_delay: float = 1.0,
67
- max_delay: float = 60.0) -> float:
68
- """
69
- 计算指数退避延迟时间
70
-
71
- Args:
72
- attempt (int): 当前重试次数
73
- base_delay (float): 基础延迟时间(秒)
74
- max_delay (float): 最大延迟时间(秒)
75
-
76
- Returns:
77
- float: 延迟时间(秒)
78
- """
79
- # 计算基本延迟:base_delay * (2 ^ attempt)
80
- delay = base_delay * (2 ** attempt)
81
-
82
- # 添加随机抖动,避免惊群效应
83
- jitter = random.uniform(0, 0.1) * delay
84
-
85
- # 返回最终延迟时间,不超过最大延迟
86
- return min(delay + jitter, max_delay)
87
-
88
- async def async_retry(self, func: Callable, *args, **kwargs) -> Any:
89
- """
90
- 异步重试执行函数
91
-
92
- Args:
93
- func (Callable): 要执行的函数
94
- *args: 函数参数
95
- **kwargs: 函数关键字参数
96
-
97
- Returns:
98
- Any: 函数执行结果
99
-
100
- Raises:
101
- Exception: 如果超过最大重试次数仍未成功,则抛出最后一个异常
102
- """
103
- last_exception = None
104
-
105
- for attempt in range(self.max_retries + 1):
106
- try:
107
- result = await func(*args, **kwargs)
108
-
109
- # 如果函数返回状态码,检查是否需要重试
110
- if hasattr(result, 'status') and self.should_retry(status_code=result.status):
111
- if attempt < self.max_retries:
112
- delay = self.exponential_backoff(attempt)
113
- await asyncio.sleep(delay)
114
- continue
115
- else:
116
- raise Exception(f"HTTP {result.status} after {self.max_retries} retries")
117
-
118
- return result
119
-
120
- except Exception as e:
121
- last_exception = e
122
-
123
- # 检查是否应该重试
124
- if self.should_retry(exception=e) and attempt < self.max_retries:
125
- delay = self.exponential_backoff(attempt)
126
- await asyncio.sleep(delay)
127
- continue
128
- else:
129
- raise e
130
-
131
- # 如果到达这里,说明所有重试都失败了
132
- raise last_exception
133
-
134
- def sync_retry(self, func: Callable, *args, **kwargs) -> Any:
135
- """
136
- 同步重试执行函数
137
-
138
- Args:
139
- func (Callable): 要执行的函数
140
- *args: 函数参数
141
- **kwargs: 函数关键字参数
142
-
143
- Returns:
144
- Any: 函数执行结果
145
-
146
- Raises:
147
- Exception: 如果超过最大重试次数仍未成功,则抛出最后一个异常
148
- """
149
- last_exception = None
150
-
151
- for attempt in range(self.max_retries + 1):
152
- try:
153
- result = func(*args, **kwargs)
154
-
155
- # 如果函数返回状态码,检查是否需要重试
156
- if hasattr(result, 'status') and self.should_retry(status_code=result.status):
157
- if attempt < self.max_retries:
158
- delay = self.exponential_backoff(attempt)
159
- time.sleep(delay)
160
- continue
161
- else:
162
- raise Exception(f"HTTP {result.status} after {self.max_retries} retries")
163
-
164
- return result
165
-
166
- except Exception as e:
167
- last_exception = e
168
-
169
- # 检查是否应该重试
170
- if self.should_retry(exception=e) and attempt < self.max_retries:
171
- delay = self.exponential_backoff(attempt)
172
- time.sleep(delay)
173
- continue
174
- else:
175
- raise e
176
-
177
- # 如果到达这里,说明所有重试都失败了
178
- raise last_exception
179
-
180
-
181
- def retry(max_retries: int = 3,
182
- retry_status_codes: Optional[Set[int]] = None,
183
- retry_exceptions: Optional[Tuple[type, ...]] = None):
184
- """
185
- 重试装饰器
186
-
187
- Args:
188
- max_retries (int): 最大重试次数
189
- retry_status_codes (Optional[Set[int]]): 应该重试的HTTP状态码
190
- retry_exceptions (Optional[Tuple[type, ...]]): 应该重试的异常类型
191
- """
192
-
193
- def decorator(func: Callable) -> Callable:
194
- retry_mechanism = RetryMechanism(max_retries, retry_status_codes, retry_exceptions)
195
-
196
- if asyncio.iscoroutinefunction(func):
197
- @wraps(func)
198
- async def async_wrapper(*args, **kwargs):
199
- return await retry_mechanism.async_retry(func, *args, **kwargs)
200
-
201
- return async_wrapper
202
- else:
203
- @wraps(func)
204
- def sync_wrapper(*args, **kwargs):
205
- return retry_mechanism.sync_retry(func, *args, **kwargs)
206
-
207
- return sync_wrapper
208
-
209
- return decorator
210
-
211
-
212
- # 便捷函数
213
- def should_retry(status_code: Optional[int] = None,
214
- exception: Optional[Exception] = None) -> bool:
215
- """判断是否应该重试"""
216
- retry_mechanism = RetryMechanism()
217
- return retry_mechanism.should_retry(status_code, exception)
218
-
219
-
220
- def exponential_backoff(attempt: int, base_delay: float = 1.0,
221
- max_delay: float = 60.0) -> float:
222
- """计算指数退避延迟时间"""
223
- retry_mechanism = RetryMechanism()
224
- return retry_mechanism.exponential_backoff(attempt, base_delay, max_delay)
@@ -1,143 +0,0 @@
1
- #!/usr/bin/python
2
- # -*- coding:UTF-8 -*-
3
- """
4
- 环境变量配置工具
5
- 提供统一的环境变量读取和配置管理机制
6
- """
7
- import os
8
- import re
9
- from typing import Any
10
-
11
-
12
- class EnvConfigManager:
13
- """环境变量配置管理器"""
14
-
15
- @staticmethod
16
- def get_env_var(var_name: str, default: Any = None, var_type: type = str) -> Any:
17
- """
18
- 获取环境变量值
19
-
20
- Args:
21
- var_name: 环境变量名称
22
- default: 默认值
23
- var_type: 变量类型 (str, int, float, bool)
24
-
25
- Returns:
26
- 环境变量值或默认值
27
- """
28
- value = os.getenv(var_name)
29
- if value is None:
30
- return default
31
-
32
- try:
33
- if var_type == bool:
34
- return value.lower() in ('1', 'true', 'yes', 'on')
35
- elif var_type == int:
36
- return int(value)
37
- elif var_type == float:
38
- return float(value)
39
- else:
40
- return value
41
- except (ValueError, TypeError):
42
- return default
43
-
44
- @staticmethod
45
- def get_redis_config() -> dict:
46
- """
47
- 获取 Redis 配置
48
-
49
- Returns:
50
- Redis 配置字典
51
- """
52
- return {
53
- 'REDIS_HOST': EnvConfigManager.get_env_var('REDIS_HOST', '127.0.0.1', str),
54
- 'REDIS_PORT': EnvConfigManager.get_env_var('REDIS_PORT', 6379, int),
55
- 'REDIS_PASSWORD': EnvConfigManager.get_env_var('REDIS_PASSWORD', '', str),
56
- 'REDIS_DB': EnvConfigManager.get_env_var('REDIS_DB', 0, int),
57
- }
58
-
59
- @staticmethod
60
- def get_runtime_config() -> dict:
61
- """
62
- 获取运行时配置
63
-
64
- Returns:
65
- 运行时配置字典
66
- """
67
- return {
68
- 'CRAWLO_MODE': EnvConfigManager.get_env_var('CRAWLO_MODE', 'standalone', str),
69
- 'PROJECT_NAME': EnvConfigManager.get_env_var('PROJECT_NAME', 'crawlo', str),
70
- 'CONCURRENCY': EnvConfigManager.get_env_var('CONCURRENCY', 8, int),
71
- }
72
-
73
- @staticmethod
74
- def get_version() -> str:
75
- """
76
- 获取框架版本号
77
-
78
- Returns:
79
- 框架版本号字符串
80
- """
81
- # 获取版本文件路径
82
- version_file = os.path.join(os.path.dirname(__file__), '..', '__version__.py')
83
- default_version = '1.0.0'
84
-
85
- if os.path.exists(version_file):
86
- try:
87
- with open(version_file, 'r', encoding='utf-8') as f:
88
- content = f.read()
89
- # 使用正则表达式提取版本号
90
- version_match = re.search(r"__version__\s*=\s*['\"]([^'\"]*)['\"]", content)
91
- if version_match:
92
- return version_match.group(1)
93
- except Exception:
94
- # 如果读取失败,使用默认版本号
95
- pass
96
-
97
- return default_version
98
-
99
-
100
- # 便捷函数
101
- def get_env_var(var_name: str, default: Any = None, var_type: type = str) -> Any:
102
- """
103
- 便捷函数:获取环境变量值
104
-
105
- Args:
106
- var_name: 环境变量名称
107
- default: 默认值
108
- var_type: 变量类型 (str, int, float, bool)
109
-
110
- Returns:
111
- 环境变量值或默认值
112
- """
113
- return EnvConfigManager.get_env_var(var_name, default, var_type)
114
-
115
-
116
- def get_redis_config() -> dict:
117
- """
118
- 便捷函数:获取 Redis 配置
119
-
120
- Returns:
121
- Redis 配置字典
122
- """
123
- return EnvConfigManager.get_redis_config()
124
-
125
-
126
- def get_runtime_config() -> dict:
127
- """
128
- 便捷函数:获取运行时配置
129
-
130
- Returns:
131
- 运行时配置字典
132
- """
133
- return EnvConfigManager.get_runtime_config()
134
-
135
-
136
- def get_version() -> str:
137
- """
138
- 便捷函数:获取框架版本号
139
-
140
- Returns:
141
- 框架版本号字符串
142
- """
143
- return EnvConfigManager.get_version()
@@ -1,287 +0,0 @@
1
- #!/usr/bin/env python3
2
- # -*- coding: utf-8 -*-
3
- """
4
- 大规模爬虫配置助手
5
- 提供针对上万请求场景的优化配置
6
- """
7
- from typing import Dict, Any
8
-
9
- from crawlo.utils.queue_helper import QueueHelper
10
-
11
-
12
- class LargeScaleConfig:
13
- """大规模爬虫配置类"""
14
-
15
- @staticmethod
16
- def conservative_config(concurrency: int = 8) -> Dict[str, Any]:
17
- """
18
- 保守配置 - 适用于资源有限的环境
19
-
20
- 特点:
21
- - 较小的队列容量
22
- - 较低的并发数
23
- - 较长的延迟
24
- """
25
- config = QueueHelper.use_redis_queue(
26
- queue_name="crawlo:conservative",
27
- max_retries=3,
28
- timeout=300
29
- )
30
-
31
- config.update({
32
- # 并发控制
33
- 'CONCURRENCY': concurrency,
34
- 'SCHEDULER_MAX_QUEUE_SIZE': concurrency * 10, # 队列容量为并发数的10倍
35
- 'MAX_RUNNING_SPIDERS': 1,
36
-
37
- # 请求控制
38
- 'DOWNLOAD_DELAY': 0.2,
39
- 'RANDOMNESS': True,
40
- 'RANDOM_RANGE': (0.8, 1.5),
41
-
42
- # 内存控制
43
- 'DOWNLOAD_MAXSIZE': 5 * 1024 * 1024, # 5MB
44
- 'CONNECTION_POOL_LIMIT': concurrency * 2,
45
-
46
- # 重试策略
47
- 'MAX_RETRY_TIMES': 2,
48
-
49
- # 使用增强引擎
50
- 'ENGINE_CLASS': 'crawlo.core.engine.Engine'
51
- })
52
-
53
- return config
54
-
55
- @staticmethod
56
- def balanced_config(concurrency: int = 16) -> Dict[str, Any]:
57
- """
58
- 平衡配置 - 适用于一般生产环境
59
-
60
- 特点:
61
- - 中等的队列容量
62
- - 平衡的并发数
63
- - 适中的延迟
64
- """
65
- config = QueueHelper.use_redis_queue(
66
- queue_name="crawlo:balanced",
67
- max_retries=5,
68
- timeout=600
69
- )
70
-
71
- config.update({
72
- # 并发控制
73
- 'CONCURRENCY': concurrency,
74
- 'SCHEDULER_MAX_QUEUE_SIZE': concurrency * 15,
75
- 'MAX_RUNNING_SPIDERS': 2,
76
-
77
- # 请求控制
78
- 'DOWNLOAD_DELAY': 0.1,
79
- 'RANDOMNESS': True,
80
- 'RANDOM_RANGE': (0.5, 1.2),
81
-
82
- # 内存控制
83
- 'DOWNLOAD_MAXSIZE': 10 * 1024 * 1024, # 10MB
84
- 'CONNECTION_POOL_LIMIT': concurrency * 3,
85
-
86
- # 重试策略
87
- 'MAX_RETRY_TIMES': 3,
88
-
89
- # 使用增强引擎
90
- 'ENGINE_CLASS': 'crawlo.core.engine.Engine'
91
- })
92
-
93
- return config
94
-
95
- @staticmethod
96
- def aggressive_config(concurrency: int = 32) -> Dict[str, Any]:
97
- """
98
- 激进配置 - 适用于高性能环境
99
-
100
- 特点:
101
- - 大的队列容量
102
- - 高并发数
103
- - 较短的延迟
104
- """
105
- config = QueueHelper.use_redis_queue(
106
- queue_name="crawlo:aggressive",
107
- max_retries=10,
108
- timeout=900
109
- )
110
-
111
- config.update({
112
- # 并发控制
113
- 'CONCURRENCY': concurrency,
114
- 'SCHEDULER_MAX_QUEUE_SIZE': concurrency * 20,
115
- 'MAX_RUNNING_SPIDERS': 3,
116
-
117
- # 请求控制
118
- 'DOWNLOAD_DELAY': 0.05,
119
- 'RANDOMNESS': True,
120
- 'RANDOM_RANGE': (0.3, 1.0),
121
-
122
- # 内存控制
123
- 'DOWNLOAD_MAXSIZE': 20 * 1024 * 1024, # 20MB
124
- 'CONNECTION_POOL_LIMIT': concurrency * 4,
125
-
126
- # 重试策略
127
- 'MAX_RETRY_TIMES': 5,
128
-
129
- # 使用增强引擎
130
- 'ENGINE_CLASS': 'crawlo.core.engine.Engine'
131
- })
132
-
133
- return config
134
-
135
- @staticmethod
136
- def memory_optimized_config(concurrency: int = 12) -> Dict[str, Any]:
137
- """
138
- 内存优化配置 - 适用于大规模但内存受限的场景
139
-
140
- 特点:
141
- - 小队列,快速流转
142
- - 严格的内存控制
143
- - 使用Redis减少内存压力
144
- """
145
- config = QueueHelper.use_redis_queue(
146
- queue_name="crawlo:memory_optimized",
147
- max_retries=3,
148
- timeout=300
149
- )
150
-
151
- config.update({
152
- # 并发控制
153
- 'CONCURRENCY': concurrency,
154
- 'SCHEDULER_MAX_QUEUE_SIZE': concurrency * 5, # 小队列
155
- 'MAX_RUNNING_SPIDERS': 1,
156
-
157
- # 请求控制
158
- 'DOWNLOAD_DELAY': 0.1,
159
- 'RANDOMNESS': False, # 减少随机性降低内存使用
160
-
161
- # 严格的内存控制
162
- 'DOWNLOAD_MAXSIZE': 2 * 1024 * 1024, # 2MB
163
- 'DOWNLOAD_WARN_SIZE': 512 * 1024, # 512KB
164
- 'CONNECTION_POOL_LIMIT': concurrency,
165
-
166
- # 重试策略
167
- 'MAX_RETRY_TIMES': 2,
168
-
169
- # 使用增强引擎
170
- 'ENGINE_CLASS': 'crawlo.core.engine.Engine'
171
- })
172
-
173
- return config
174
-
175
-
176
- def apply_large_scale_config(settings_dict: Dict[str, Any], config_type: str = "balanced", concurrency: int = None):
177
- """
178
- 应用大规模配置
179
-
180
- Args:
181
- settings_dict: 设置字典
182
- config_type: 配置类型 ("conservative", "balanced", "aggressive", "memory_optimized")
183
- concurrency: 并发数(可选,不指定则使用默认值)
184
- """
185
- config_map = {
186
- "conservative": LargeScaleConfig.conservative_config,
187
- "balanced": LargeScaleConfig.balanced_config,
188
- "aggressive": LargeScaleConfig.aggressive_config,
189
- "memory_optimized": LargeScaleConfig.memory_optimized_config
190
- }
191
-
192
- if config_type not in config_map:
193
- raise ValueError(f"不支持的配置类型: {config_type}")
194
-
195
- if concurrency:
196
- config = config_map[config_type](concurrency)
197
- else:
198
- config = config_map[config_type]()
199
-
200
- settings_dict.update(config)
201
-
202
- return config
203
-
204
-
205
- # 使用示例和说明
206
- USAGE_GUIDE = """
207
- # 大规模爬虫配置使用指南
208
-
209
- ## 1. 选择合适的配置类型
210
-
211
- ### Conservative (保守型)
212
- - 适用场景:资源受限、网络不稳定的环境
213
- - 并发数:8 (默认)
214
- - 队列容量:80
215
- - 延迟:200ms
216
- - 使用场景:个人开发、小规模爬取
217
-
218
- ### Balanced (平衡型)
219
- - 适用场景:一般生产环境
220
- - 并发数:16 (默认)
221
- - 队列容量:240
222
- - 延迟:100ms
223
- - 使用场景:中小企业生产环境
224
-
225
- ### Aggressive (激进型)
226
- - 适用场景:高性能服务器、对速度要求高
227
- - 并发数:32 (默认)
228
- - 队列容量:640
229
- - 延迟:50ms
230
- - 使用场景:大公司、高并发需求
231
-
232
- ### Memory Optimized (内存优化型)
233
- - 适用场景:大规模爬取但内存受限
234
- - 并发数:12 (默认)
235
- - 队列容量:60 (小队列快速流转)
236
- - 延迟:100ms
237
- - 使用场景:处理数万/数十万请求但内存有限
238
-
239
- ## 2. 使用方法
240
-
241
- ```python
242
- # 方法1:在 settings.py 中直接配置
243
- from crawlo.utils.large_scale_config import apply_large_scale_config
244
-
245
- # 使用平衡配置,16并发
246
- apply_large_scale_config(locals(), "balanced", 16)
247
-
248
- # 方法2:在爬虫代码中动态配置
249
- from crawlo.crawler import CrawlerProcess
250
- from crawlo.utils.large_scale_config import LargeScaleConfig
251
-
252
- process = CrawlerProcess()
253
- config = LargeScaleConfig.memory_optimized_config(20) # 20并发的内存优化配置
254
- process.settings.update(config)
255
-
256
- # 方法3:自定义配置
257
- config = LargeScaleConfig.balanced_config(24) # 24并发
258
- config['DOWNLOAD_DELAY'] = 0.05 # 自定义延迟
259
- process.settings.update(config)
260
- ```
261
-
262
- ## 3. 针对不同场景的建议
263
-
264
- ### 处理5万+请求
265
- ```python
266
- # 推荐内存优化配置
267
- apply_large_scale_config(locals(), "memory_optimized", 20)
268
- ```
269
-
270
- ### 高速爬取但服务器性能好
271
- ```python
272
- # 推荐激进配置
273
- apply_large_scale_config(locals(), "aggressive", 40)
274
- ```
275
-
276
- ### 资源受限但要稳定运行
277
- ```python
278
- # 推荐保守配置
279
- apply_large_scale_config(locals(), "conservative", 6)
280
- ```
281
-
282
- ### 平衡性能和稳定性
283
- ```python
284
- # 推荐平衡配置
285
- apply_large_scale_config(locals(), "balanced", 18)
286
- ```
287
- """