crawlo 1.3.3__py3-none-any.whl → 1.3.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (279) hide show
  1. crawlo/__init__.py +87 -63
  2. crawlo/__version__.py +1 -1
  3. crawlo/cli.py +75 -75
  4. crawlo/commands/__init__.py +14 -14
  5. crawlo/commands/check.py +594 -594
  6. crawlo/commands/genspider.py +151 -151
  7. crawlo/commands/help.py +138 -138
  8. crawlo/commands/list.py +155 -155
  9. crawlo/commands/run.py +341 -323
  10. crawlo/commands/startproject.py +436 -436
  11. crawlo/commands/stats.py +187 -187
  12. crawlo/commands/utils.py +196 -196
  13. crawlo/config.py +312 -312
  14. crawlo/config_validator.py +277 -277
  15. crawlo/core/__init__.py +46 -2
  16. crawlo/core/engine.py +439 -365
  17. crawlo/core/processor.py +40 -40
  18. crawlo/core/scheduler.py +257 -256
  19. crawlo/crawler.py +639 -1167
  20. crawlo/data/__init__.py +5 -5
  21. crawlo/data/user_agents.py +194 -194
  22. crawlo/downloader/__init__.py +273 -273
  23. crawlo/downloader/aiohttp_downloader.py +228 -226
  24. crawlo/downloader/cffi_downloader.py +245 -245
  25. crawlo/downloader/httpx_downloader.py +259 -259
  26. crawlo/downloader/hybrid_downloader.py +212 -212
  27. crawlo/downloader/playwright_downloader.py +402 -402
  28. crawlo/downloader/selenium_downloader.py +472 -472
  29. crawlo/event.py +11 -11
  30. crawlo/exceptions.py +81 -81
  31. crawlo/extension/__init__.py +39 -39
  32. crawlo/extension/health_check.py +141 -141
  33. crawlo/extension/log_interval.py +57 -57
  34. crawlo/extension/log_stats.py +81 -81
  35. crawlo/extension/logging_extension.py +61 -52
  36. crawlo/extension/memory_monitor.py +104 -104
  37. crawlo/extension/performance_profiler.py +133 -133
  38. crawlo/extension/request_recorder.py +107 -107
  39. crawlo/factories/__init__.py +28 -0
  40. crawlo/factories/base.py +69 -0
  41. crawlo/factories/crawler.py +104 -0
  42. crawlo/factories/registry.py +85 -0
  43. crawlo/filters/__init__.py +154 -154
  44. crawlo/filters/aioredis_filter.py +257 -234
  45. crawlo/filters/memory_filter.py +269 -269
  46. crawlo/framework.py +292 -0
  47. crawlo/initialization/__init__.py +40 -0
  48. crawlo/initialization/built_in.py +426 -0
  49. crawlo/initialization/context.py +142 -0
  50. crawlo/initialization/core.py +194 -0
  51. crawlo/initialization/phases.py +149 -0
  52. crawlo/initialization/registry.py +146 -0
  53. crawlo/items/__init__.py +23 -23
  54. crawlo/items/base.py +23 -22
  55. crawlo/items/fields.py +52 -52
  56. crawlo/items/items.py +104 -104
  57. crawlo/logging/__init__.py +38 -0
  58. crawlo/logging/config.py +97 -0
  59. crawlo/logging/factory.py +129 -0
  60. crawlo/logging/manager.py +112 -0
  61. crawlo/middleware/__init__.py +21 -21
  62. crawlo/middleware/default_header.py +132 -132
  63. crawlo/middleware/download_delay.py +104 -104
  64. crawlo/middleware/middleware_manager.py +135 -135
  65. crawlo/middleware/offsite.py +123 -123
  66. crawlo/middleware/proxy.py +386 -386
  67. crawlo/middleware/request_ignore.py +86 -86
  68. crawlo/middleware/response_code.py +163 -163
  69. crawlo/middleware/response_filter.py +136 -136
  70. crawlo/middleware/retry.py +124 -124
  71. crawlo/middleware/simple_proxy.py +65 -65
  72. crawlo/mode_manager.py +212 -187
  73. crawlo/network/__init__.py +21 -21
  74. crawlo/network/request.py +379 -379
  75. crawlo/network/response.py +359 -359
  76. crawlo/pipelines/__init__.py +21 -21
  77. crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
  78. crawlo/pipelines/console_pipeline.py +39 -39
  79. crawlo/pipelines/csv_pipeline.py +316 -316
  80. crawlo/pipelines/database_dedup_pipeline.py +222 -222
  81. crawlo/pipelines/json_pipeline.py +218 -218
  82. crawlo/pipelines/memory_dedup_pipeline.py +115 -115
  83. crawlo/pipelines/mongo_pipeline.py +131 -131
  84. crawlo/pipelines/mysql_pipeline.py +318 -318
  85. crawlo/pipelines/pipeline_manager.py +76 -75
  86. crawlo/pipelines/redis_dedup_pipeline.py +166 -166
  87. crawlo/project.py +327 -325
  88. crawlo/queue/pqueue.py +43 -37
  89. crawlo/queue/queue_manager.py +503 -379
  90. crawlo/queue/redis_priority_queue.py +326 -306
  91. crawlo/settings/__init__.py +7 -7
  92. crawlo/settings/default_settings.py +321 -225
  93. crawlo/settings/setting_manager.py +214 -198
  94. crawlo/spider/__init__.py +657 -639
  95. crawlo/stats_collector.py +73 -59
  96. crawlo/subscriber.py +129 -129
  97. crawlo/task_manager.py +139 -30
  98. crawlo/templates/crawlo.cfg.tmpl +10 -10
  99. crawlo/templates/project/__init__.py.tmpl +3 -3
  100. crawlo/templates/project/items.py.tmpl +17 -17
  101. crawlo/templates/project/middlewares.py.tmpl +118 -118
  102. crawlo/templates/project/pipelines.py.tmpl +96 -96
  103. crawlo/templates/project/settings.py.tmpl +168 -267
  104. crawlo/templates/project/settings_distributed.py.tmpl +167 -180
  105. crawlo/templates/project/settings_gentle.py.tmpl +167 -61
  106. crawlo/templates/project/settings_high_performance.py.tmpl +168 -131
  107. crawlo/templates/project/settings_minimal.py.tmpl +66 -35
  108. crawlo/templates/project/settings_simple.py.tmpl +165 -102
  109. crawlo/templates/project/spiders/__init__.py.tmpl +10 -6
  110. crawlo/templates/run.py.tmpl +34 -38
  111. crawlo/templates/spider/spider.py.tmpl +143 -143
  112. crawlo/templates/spiders_init.py.tmpl +10 -0
  113. crawlo/tools/__init__.py +200 -200
  114. crawlo/tools/anti_crawler.py +268 -268
  115. crawlo/tools/authenticated_proxy.py +240 -240
  116. crawlo/tools/data_formatter.py +225 -225
  117. crawlo/tools/data_validator.py +180 -180
  118. crawlo/tools/date_tools.py +289 -289
  119. crawlo/tools/distributed_coordinator.py +388 -388
  120. crawlo/tools/encoding_converter.py +127 -127
  121. crawlo/tools/network_diagnostic.py +365 -0
  122. crawlo/tools/request_tools.py +82 -82
  123. crawlo/tools/retry_mechanism.py +224 -224
  124. crawlo/tools/scenario_adapter.py +262 -262
  125. crawlo/tools/text_cleaner.py +232 -232
  126. crawlo/utils/__init__.py +34 -34
  127. crawlo/utils/batch_processor.py +259 -259
  128. crawlo/utils/class_loader.py +26 -0
  129. crawlo/utils/controlled_spider_mixin.py +439 -439
  130. crawlo/utils/db_helper.py +343 -343
  131. crawlo/utils/enhanced_error_handler.py +356 -356
  132. crawlo/utils/env_config.py +142 -142
  133. crawlo/utils/error_handler.py +165 -124
  134. crawlo/utils/func_tools.py +82 -82
  135. crawlo/utils/large_scale_config.py +286 -286
  136. crawlo/utils/large_scale_helper.py +344 -344
  137. crawlo/utils/log.py +44 -200
  138. crawlo/utils/performance_monitor.py +285 -285
  139. crawlo/utils/queue_helper.py +175 -175
  140. crawlo/utils/redis_connection_pool.py +388 -351
  141. crawlo/utils/redis_key_validator.py +198 -198
  142. crawlo/utils/request.py +267 -267
  143. crawlo/utils/request_serializer.py +225 -218
  144. crawlo/utils/spider_loader.py +61 -61
  145. crawlo/utils/system.py +11 -11
  146. crawlo/utils/tools.py +4 -4
  147. crawlo/utils/url.py +39 -39
  148. {crawlo-1.3.3.dist-info → crawlo-1.3.4.dist-info}/METADATA +1126 -1020
  149. crawlo-1.3.4.dist-info/RECORD +278 -0
  150. examples/__init__.py +7 -7
  151. tests/__init__.py +7 -7
  152. tests/advanced_tools_example.py +275 -275
  153. tests/authenticated_proxy_example.py +107 -107
  154. tests/baidu_performance_test.py +109 -0
  155. tests/baidu_test.py +60 -0
  156. tests/cleaners_example.py +160 -160
  157. tests/comprehensive_framework_test.py +213 -0
  158. tests/comprehensive_test.py +82 -0
  159. tests/comprehensive_testing_summary.md +187 -0
  160. tests/config_validation_demo.py +142 -142
  161. tests/controlled_spider_example.py +205 -205
  162. tests/date_tools_example.py +180 -180
  163. tests/debug_configure.py +70 -0
  164. tests/debug_framework_logger.py +85 -0
  165. tests/debug_log_levels.py +64 -0
  166. tests/debug_pipelines.py +66 -66
  167. tests/distributed_test.py +67 -0
  168. tests/distributed_test_debug.py +77 -0
  169. tests/dynamic_loading_example.py +523 -523
  170. tests/dynamic_loading_test.py +104 -104
  171. tests/env_config_example.py +133 -133
  172. tests/error_handling_example.py +171 -171
  173. tests/final_command_test_report.md +0 -0
  174. tests/final_comprehensive_test.py +152 -0
  175. tests/final_validation_test.py +183 -0
  176. tests/framework_performance_test.py +203 -0
  177. tests/optimized_performance_test.py +212 -0
  178. tests/performance_comparison.py +246 -0
  179. tests/queue_blocking_test.py +114 -0
  180. tests/queue_test.py +90 -0
  181. tests/redis_key_validation_demo.py +130 -130
  182. tests/request_params_example.py +150 -150
  183. tests/response_improvements_example.py +144 -144
  184. tests/scrapy_comparison/ofweek_scrapy.py +139 -0
  185. tests/scrapy_comparison/scrapy_test.py +134 -0
  186. tests/simple_command_test.py +120 -0
  187. tests/simple_crawlo_test.py +128 -0
  188. tests/simple_log_test.py +58 -0
  189. tests/simple_optimization_test.py +129 -0
  190. tests/simple_spider_test.py +50 -0
  191. tests/simple_test.py +48 -0
  192. tests/test_advanced_tools.py +148 -148
  193. tests/test_all_commands.py +231 -0
  194. tests/test_all_redis_key_configs.py +145 -145
  195. tests/test_authenticated_proxy.py +141 -141
  196. tests/test_batch_processor.py +179 -0
  197. tests/test_cleaners.py +54 -54
  198. tests/test_component_factory.py +175 -0
  199. tests/test_comprehensive.py +146 -146
  200. tests/test_config_consistency.py +80 -80
  201. tests/test_config_merge.py +152 -152
  202. tests/test_config_validator.py +182 -182
  203. tests/test_controlled_spider_mixin.py +80 -0
  204. tests/test_crawlo_proxy_integration.py +108 -108
  205. tests/test_date_tools.py +123 -123
  206. tests/test_default_header_middleware.py +158 -158
  207. tests/test_distributed.py +65 -65
  208. tests/test_double_crawlo_fix.py +207 -207
  209. tests/test_double_crawlo_fix_simple.py +124 -124
  210. tests/test_download_delay_middleware.py +221 -221
  211. tests/test_downloader_proxy_compatibility.py +268 -268
  212. tests/test_dynamic_downloaders_proxy.py +124 -124
  213. tests/test_dynamic_proxy.py +92 -92
  214. tests/test_dynamic_proxy_config.py +146 -146
  215. tests/test_dynamic_proxy_real.py +109 -109
  216. tests/test_edge_cases.py +303 -303
  217. tests/test_enhanced_error_handler.py +270 -270
  218. tests/test_enhanced_error_handler_comprehensive.py +246 -0
  219. tests/test_env_config.py +121 -121
  220. tests/test_error_handler_compatibility.py +112 -112
  221. tests/test_factories.py +253 -0
  222. tests/test_final_validation.py +153 -153
  223. tests/test_framework_env_usage.py +103 -103
  224. tests/test_framework_logger.py +67 -0
  225. tests/test_framework_startup.py +65 -0
  226. tests/test_integration.py +169 -169
  227. tests/test_item_dedup_redis_key.py +122 -122
  228. tests/test_large_scale_config.py +113 -0
  229. tests/test_large_scale_helper.py +236 -0
  230. tests/test_mode_change.py +73 -0
  231. tests/test_mode_consistency.py +51 -51
  232. tests/test_offsite_middleware.py +221 -221
  233. tests/test_parsel.py +29 -29
  234. tests/test_performance.py +327 -327
  235. tests/test_performance_monitor.py +116 -0
  236. tests/test_proxy_api.py +264 -264
  237. tests/test_proxy_health_check.py +32 -32
  238. tests/test_proxy_middleware.py +121 -121
  239. tests/test_proxy_middleware_enhanced.py +216 -216
  240. tests/test_proxy_middleware_integration.py +136 -136
  241. tests/test_proxy_middleware_refactored.py +184 -184
  242. tests/test_proxy_providers.py +56 -56
  243. tests/test_proxy_stats.py +19 -19
  244. tests/test_proxy_strategies.py +59 -59
  245. tests/test_queue_empty_check.py +42 -0
  246. tests/test_queue_manager_double_crawlo.py +173 -173
  247. tests/test_queue_manager_redis_key.py +176 -176
  248. tests/test_random_user_agent.py +72 -72
  249. tests/test_real_scenario_proxy.py +195 -195
  250. tests/test_redis_config.py +28 -28
  251. tests/test_redis_connection_pool.py +294 -294
  252. tests/test_redis_key_naming.py +181 -181
  253. tests/test_redis_key_validator.py +123 -123
  254. tests/test_redis_queue.py +224 -224
  255. tests/test_request_ignore_middleware.py +182 -182
  256. tests/test_request_params.py +111 -111
  257. tests/test_request_serialization.py +70 -70
  258. tests/test_response_code_middleware.py +349 -349
  259. tests/test_response_filter_middleware.py +427 -427
  260. tests/test_response_improvements.py +152 -152
  261. tests/test_retry_middleware.py +241 -241
  262. tests/test_scheduler.py +252 -252
  263. tests/test_scheduler_config_update.py +133 -133
  264. tests/test_simple_response.py +61 -61
  265. tests/test_telecom_spider_redis_key.py +205 -205
  266. tests/test_template_content.py +87 -87
  267. tests/test_template_redis_key.py +134 -134
  268. tests/test_tools.py +159 -159
  269. tests/test_user_agents.py +96 -96
  270. tests/tools_example.py +260 -260
  271. tests/untested_features_report.md +139 -0
  272. tests/verify_debug.py +52 -0
  273. tests/verify_distributed.py +117 -117
  274. tests/verify_log_fix.py +112 -0
  275. crawlo-1.3.3.dist-info/RECORD +0 -219
  276. tests/DOUBLE_CRAWLO_PREFIX_FIX_REPORT.md +0 -82
  277. {crawlo-1.3.3.dist-info → crawlo-1.3.4.dist-info}/WHEEL +0 -0
  278. {crawlo-1.3.3.dist-info → crawlo-1.3.4.dist-info}/entry_points.txt +0 -0
  279. {crawlo-1.3.3.dist-info → crawlo-1.3.4.dist-info}/top_level.txt +0 -0
@@ -1,213 +1,213 @@
1
- #!/usr/bin/python
2
- # -*- coding: UTF-8 -*-
3
- """
4
- 混合下载器
5
- =========
6
- 智能选择合适的下载器处理不同类型的请求,支持协议请求和动态加载内容。
7
-
8
- 支持的场景:
9
- 1. 列表页、详情页都要动态加载
10
- 2. 列表页使用协议请求、详情页使用动态加载
11
- 3. 列表页使用动态加载,详情页使用协议请求
12
-
13
- 功能特性:
14
- 1. 智能检测内容类型并选择合适的下载器
15
- 2. 支持基于URL模式的下载器选择
16
- 3. 支持基于请求标记的下载器选择
17
- 4. 统一的接口和响应格式
18
- 5. 自动资源管理和优化
19
- """
20
- from typing import Optional, Dict, Type
21
- from urllib.parse import urlparse
22
-
23
- from crawlo.downloader import DownloaderBase
24
- from crawlo.network.request import Request
25
- from crawlo.network.response import Response
26
- from crawlo.utils.log import get_logger
27
-
28
- # 动态导入下载器(避免循环导入)
29
- try:
30
- from .aiohttp_downloader import AioHttpDownloader
31
- except ImportError:
32
- AioHttpDownloader = None
33
-
34
- try:
35
- from .httpx_downloader import HttpXDownloader
36
- except ImportError:
37
- HttpXDownloader = None
38
-
39
- try:
40
- from .cffi_downloader import CurlCffiDownloader
41
- except ImportError:
42
- CurlCffiDownloader = None
43
-
44
- try:
45
- from .selenium_downloader import SeleniumDownloader
46
- except ImportError:
47
- SeleniumDownloader = None
48
-
49
- try:
50
- from .playwright_downloader import PlaywrightDownloader
51
- except ImportError:
52
- PlaywrightDownloader = None
53
-
54
-
55
- class HybridDownloader(DownloaderBase):
56
- """
57
- 混合下载器 - 根据请求特征智能选择合适的下载器
58
- """
59
-
60
- def __init__(self, crawler):
61
- super().__init__(crawler)
62
- self.logger = get_logger(self.__class__.__name__, crawler.settings.get("LOG_LEVEL"))
63
-
64
- # 下载器实例缓存
65
- self._downloaders: Dict[str, DownloaderBase] = {}
66
-
67
- # 配置选项
68
- self.default_protocol_downloader = crawler.settings.get("HYBRID_DEFAULT_PROTOCOL_DOWNLOADER", "aiohttp")
69
- self.default_dynamic_downloader = crawler.settings.get("HYBRID_DEFAULT_DYNAMIC_DOWNLOADER", "playwright")
70
-
71
- # URL模式配置
72
- self.dynamic_url_patterns = set(crawler.settings.get_list("HYBRID_DYNAMIC_URL_PATTERNS", []))
73
- self.protocol_url_patterns = set(crawler.settings.get_list("HYBRID_PROTOCOL_URL_PATTERNS", []))
74
-
75
- # 域名配置
76
- self.dynamic_domains = set(crawler.settings.get_list("HYBRID_DYNAMIC_DOMAINS", []))
77
- self.protocol_domains = set(crawler.settings.get_list("HYBRID_PROTOCOL_DOMAINS", []))
78
-
79
- def open(self):
80
- super().open()
81
- self.logger.info("Opening HybridDownloader")
82
-
83
- # 初始化默认下载器
84
- self._initialize_default_downloaders()
85
-
86
- def _initialize_default_downloaders(self):
87
- """初始化默认下载器"""
88
- # 初始化协议下载器
89
- protocol_downloader_cls = self._get_downloader_class(self.default_protocol_downloader)
90
- if protocol_downloader_cls:
91
- self._downloaders["protocol"] = protocol_downloader_cls(self.crawler)
92
- self._downloaders["protocol"].open()
93
-
94
- # 初始化动态下载器
95
- dynamic_downloader_cls = self._get_downloader_class(self.default_dynamic_downloader)
96
- if dynamic_downloader_cls:
97
- self._downloaders["dynamic"] = dynamic_downloader_cls(self.crawler)
98
- # 使用标准的 open 方法初始化下载器
99
- self._downloaders["dynamic"].open()
100
-
101
- self.logger.debug("Default downloaders initialized")
102
-
103
- def _get_downloader_class(self, downloader_type: str) -> Optional[Type[DownloaderBase]]:
104
- """根据类型获取下载器类"""
105
- downloader_map = {
106
- "aiohttp": AioHttpDownloader,
107
- "httpx": HttpXDownloader,
108
- "curl_cffi": CurlCffiDownloader,
109
- "selenium": SeleniumDownloader,
110
- "playwright": PlaywrightDownloader
111
- }
112
- return downloader_map.get(downloader_type.lower())
113
-
114
- async def download(self, request: Request) -> Optional[Response]:
115
- """根据请求特征选择合适的下载器并下载"""
116
- # 确定应该使用的下载器类型
117
- downloader_type = self._determine_downloader_type(request)
118
-
119
- # 获取对应的下载器
120
- downloader = self._get_or_create_downloader(downloader_type)
121
- if not downloader:
122
- raise RuntimeError(f"No downloader available for type: {downloader_type}")
123
-
124
- self.logger.debug(f"Using {downloader_type} downloader for {request.url}")
125
-
126
- # 执行下载
127
- return await downloader.download(request)
128
-
129
- def _determine_downloader_type(self, request: Request) -> str:
130
- """根据请求特征确定下载器类型"""
131
- url = request.url
132
- parsed_url = urlparse(url)
133
- domain = parsed_url.netloc.lower()
134
-
135
- # 1. 检查请求标记
136
- if request.meta.get("use_dynamic_loader"):
137
- return "dynamic"
138
- elif request.meta.get("use_protocol_loader"):
139
- return "protocol"
140
-
141
- # 2. 检查URL模式
142
- for pattern in self.dynamic_url_patterns:
143
- if pattern in url:
144
- return "dynamic"
145
-
146
- for pattern in self.protocol_url_patterns:
147
- if pattern in url:
148
- return "protocol"
149
-
150
- # 3. 检查域名
151
- if domain in self.dynamic_domains:
152
- return "dynamic"
153
-
154
- if domain in self.protocol_domains:
155
- return "protocol"
156
-
157
- # 4. 检查文件扩展名(动态内容通常没有特定扩展名)
158
- path = parsed_url.path.lower()
159
- static_extensions = {'.js', '.css', '.jpg', '.jpeg', '.png', '.gif', '.ico', '.pdf', '.zip', '.doc', '.docx'}
160
- if any(path.endswith(ext) for ext in static_extensions):
161
- return "protocol"
162
-
163
- # 5. 检查请求方法(POST请求更可能需要动态加载)
164
- if request.method.upper() == "POST":
165
- return "dynamic"
166
-
167
- # 6. 默认策略:根据内容类型推测
168
- # 如果URL中包含典型的动态内容标识符
169
- dynamic_indicators = ['ajax', 'api', 'dynamic', 'spa', 'react', 'vue', 'angular']
170
- if any(indicator in url.lower() for indicator in dynamic_indicators):
171
- return "dynamic"
172
-
173
- # 默认使用协议下载器
174
- return "protocol"
175
-
176
- def _get_or_create_downloader(self, downloader_type: str) -> Optional[DownloaderBase]:
177
- """获取或创建下载器实例"""
178
- # 如果已经存在,直接返回
179
- if downloader_type in self._downloaders:
180
- return self._downloaders[downloader_type]
181
-
182
- # 创建新的下载器实例
183
- if downloader_type == "protocol":
184
- downloader_cls = self._get_downloader_class(self.default_protocol_downloader)
185
- elif downloader_type == "dynamic":
186
- downloader_cls = self._get_downloader_class(self.default_dynamic_downloader)
187
- else:
188
- return None
189
-
190
- if not downloader_cls:
191
- return None
192
-
193
- downloader = downloader_cls(self.crawler)
194
- # 使用标准的 open 方法初始化下载器
195
- downloader.open()
196
-
197
- self._downloaders[downloader_type] = downloader
198
- return downloader
199
-
200
- async def close(self) -> None:
201
- """关闭所有下载器"""
202
- for name, downloader in self._downloaders.items():
203
- try:
204
- if hasattr(downloader, 'close_async'):
205
- await downloader.close_async()
206
- else:
207
- await downloader.close()
208
- self.logger.debug(f"Closed {name} downloader")
209
- except Exception as e:
210
- self.logger.warning(f"Error closing {name} downloader: {e}")
211
-
212
- self._downloaders.clear()
1
+ #!/usr/bin/python
2
+ # -*- coding: UTF-8 -*-
3
+ """
4
+ 混合下载器
5
+ =========
6
+ 智能选择合适的下载器处理不同类型的请求,支持协议请求和动态加载内容。
7
+
8
+ 支持的场景:
9
+ 1. 列表页、详情页都要动态加载
10
+ 2. 列表页使用协议请求、详情页使用动态加载
11
+ 3. 列表页使用动态加载,详情页使用协议请求
12
+
13
+ 功能特性:
14
+ 1. 智能检测内容类型并选择合适的下载器
15
+ 2. 支持基于URL模式的下载器选择
16
+ 3. 支持基于请求标记的下载器选择
17
+ 4. 统一的接口和响应格式
18
+ 5. 自动资源管理和优化
19
+ """
20
+ from typing import Optional, Dict, Type
21
+ from urllib.parse import urlparse
22
+
23
+ from crawlo.downloader import DownloaderBase
24
+ from crawlo.network.request import Request
25
+ from crawlo.network.response import Response
26
+ from crawlo.utils.log import get_logger
27
+
28
+ # 动态导入下载器(避免循环导入)
29
+ try:
30
+ from .aiohttp_downloader import AioHttpDownloader
31
+ except ImportError:
32
+ AioHttpDownloader = None
33
+
34
+ try:
35
+ from .httpx_downloader import HttpXDownloader
36
+ except ImportError:
37
+ HttpXDownloader = None
38
+
39
+ try:
40
+ from .cffi_downloader import CurlCffiDownloader
41
+ except ImportError:
42
+ CurlCffiDownloader = None
43
+
44
+ try:
45
+ from .selenium_downloader import SeleniumDownloader
46
+ except ImportError:
47
+ SeleniumDownloader = None
48
+
49
+ try:
50
+ from .playwright_downloader import PlaywrightDownloader
51
+ except ImportError:
52
+ PlaywrightDownloader = None
53
+
54
+
55
+ class HybridDownloader(DownloaderBase):
56
+ """
57
+ 混合下载器 - 根据请求特征智能选择合适的下载器
58
+ """
59
+
60
+ def __init__(self, crawler):
61
+ super().__init__(crawler)
62
+ self.logger = get_logger(self.__class__.__name__, crawler.settings.get("LOG_LEVEL"))
63
+
64
+ # 下载器实例缓存
65
+ self._downloaders: Dict[str, DownloaderBase] = {}
66
+
67
+ # 配置选项
68
+ self.default_protocol_downloader = crawler.settings.get("HYBRID_DEFAULT_PROTOCOL_DOWNLOADER", "aiohttp")
69
+ self.default_dynamic_downloader = crawler.settings.get("HYBRID_DEFAULT_DYNAMIC_DOWNLOADER", "playwright")
70
+
71
+ # URL模式配置
72
+ self.dynamic_url_patterns = set(crawler.settings.get_list("HYBRID_DYNAMIC_URL_PATTERNS", []))
73
+ self.protocol_url_patterns = set(crawler.settings.get_list("HYBRID_PROTOCOL_URL_PATTERNS", []))
74
+
75
+ # 域名配置
76
+ self.dynamic_domains = set(crawler.settings.get_list("HYBRID_DYNAMIC_DOMAINS", []))
77
+ self.protocol_domains = set(crawler.settings.get_list("HYBRID_PROTOCOL_DOMAINS", []))
78
+
79
+ def open(self):
80
+ super().open()
81
+ self.logger.info("Opening HybridDownloader")
82
+
83
+ # 初始化默认下载器
84
+ self._initialize_default_downloaders()
85
+
86
+ def _initialize_default_downloaders(self):
87
+ """初始化默认下载器"""
88
+ # 初始化协议下载器
89
+ protocol_downloader_cls = self._get_downloader_class(self.default_protocol_downloader)
90
+ if protocol_downloader_cls:
91
+ self._downloaders["protocol"] = protocol_downloader_cls(self.crawler)
92
+ self._downloaders["protocol"].open()
93
+
94
+ # 初始化动态下载器
95
+ dynamic_downloader_cls = self._get_downloader_class(self.default_dynamic_downloader)
96
+ if dynamic_downloader_cls:
97
+ self._downloaders["dynamic"] = dynamic_downloader_cls(self.crawler)
98
+ # 使用标准的 open 方法初始化下载器
99
+ self._downloaders["dynamic"].open()
100
+
101
+ self.logger.debug("Default downloaders initialized")
102
+
103
+ def _get_downloader_class(self, downloader_type: str) -> Optional[Type[DownloaderBase]]:
104
+ """根据类型获取下载器类"""
105
+ downloader_map = {
106
+ "aiohttp": AioHttpDownloader,
107
+ "httpx": HttpXDownloader,
108
+ "curl_cffi": CurlCffiDownloader,
109
+ "selenium": SeleniumDownloader,
110
+ "playwright": PlaywrightDownloader
111
+ }
112
+ return downloader_map.get(downloader_type.lower())
113
+
114
+ async def download(self, request: Request) -> Optional[Response]:
115
+ """根据请求特征选择合适的下载器并下载"""
116
+ # 确定应该使用的下载器类型
117
+ downloader_type = self._determine_downloader_type(request)
118
+
119
+ # 获取对应的下载器
120
+ downloader = self._get_or_create_downloader(downloader_type)
121
+ if not downloader:
122
+ raise RuntimeError(f"No downloader available for type: {downloader_type}")
123
+
124
+ self.logger.debug(f"Using {downloader_type} downloader for {request.url}")
125
+
126
+ # 执行下载
127
+ return await downloader.download(request)
128
+
129
+ def _determine_downloader_type(self, request: Request) -> str:
130
+ """根据请求特征确定下载器类型"""
131
+ url = request.url
132
+ parsed_url = urlparse(url)
133
+ domain = parsed_url.netloc.lower()
134
+
135
+ # 1. 检查请求标记
136
+ if request.meta.get("use_dynamic_loader"):
137
+ return "dynamic"
138
+ elif request.meta.get("use_protocol_loader"):
139
+ return "protocol"
140
+
141
+ # 2. 检查URL模式
142
+ for pattern in self.dynamic_url_patterns:
143
+ if pattern in url:
144
+ return "dynamic"
145
+
146
+ for pattern in self.protocol_url_patterns:
147
+ if pattern in url:
148
+ return "protocol"
149
+
150
+ # 3. 检查域名
151
+ if domain in self.dynamic_domains:
152
+ return "dynamic"
153
+
154
+ if domain in self.protocol_domains:
155
+ return "protocol"
156
+
157
+ # 4. 检查文件扩展名(动态内容通常没有特定扩展名)
158
+ path = parsed_url.path.lower()
159
+ static_extensions = {'.js', '.css', '.jpg', '.jpeg', '.png', '.gif', '.ico', '.pdf', '.zip', '.doc', '.docx'}
160
+ if any(path.endswith(ext) for ext in static_extensions):
161
+ return "protocol"
162
+
163
+ # 5. 检查请求方法(POST请求更可能需要动态加载)
164
+ if request.method.upper() == "POST":
165
+ return "dynamic"
166
+
167
+ # 6. 默认策略:根据内容类型推测
168
+ # 如果URL中包含典型的动态内容标识符
169
+ dynamic_indicators = ['ajax', 'api', 'dynamic', 'spa', 'react', 'vue', 'angular']
170
+ if any(indicator in url.lower() for indicator in dynamic_indicators):
171
+ return "dynamic"
172
+
173
+ # 默认使用协议下载器
174
+ return "protocol"
175
+
176
+ def _get_or_create_downloader(self, downloader_type: str) -> Optional[DownloaderBase]:
177
+ """获取或创建下载器实例"""
178
+ # 如果已经存在,直接返回
179
+ if downloader_type in self._downloaders:
180
+ return self._downloaders[downloader_type]
181
+
182
+ # 创建新的下载器实例
183
+ if downloader_type == "protocol":
184
+ downloader_cls = self._get_downloader_class(self.default_protocol_downloader)
185
+ elif downloader_type == "dynamic":
186
+ downloader_cls = self._get_downloader_class(self.default_dynamic_downloader)
187
+ else:
188
+ return None
189
+
190
+ if not downloader_cls:
191
+ return None
192
+
193
+ downloader = downloader_cls(self.crawler)
194
+ # 使用标准的 open 方法初始化下载器
195
+ downloader.open()
196
+
197
+ self._downloaders[downloader_type] = downloader
198
+ return downloader
199
+
200
+ async def close(self) -> None:
201
+ """关闭所有下载器"""
202
+ for name, downloader in self._downloaders.items():
203
+ try:
204
+ if hasattr(downloader, 'close_async'):
205
+ await downloader.close_async()
206
+ else:
207
+ await downloader.close()
208
+ self.logger.debug(f"Closed {name} downloader")
209
+ except Exception as e:
210
+ self.logger.warning(f"Error closing {name} downloader: {e}")
211
+
212
+ self._downloaders.clear()
213
213
  self.logger.info("HybridDownloader closed.")