crawlo 1.4.7__py3-none-any.whl → 1.4.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +90 -90
- crawlo/__version__.py +1 -1
- crawlo/cli.py +75 -75
- crawlo/commands/__init__.py +14 -14
- crawlo/commands/check.py +594 -594
- crawlo/commands/genspider.py +186 -186
- crawlo/commands/help.py +140 -140
- crawlo/commands/list.py +155 -155
- crawlo/commands/run.py +379 -379
- crawlo/commands/startproject.py +460 -460
- crawlo/commands/stats.py +187 -187
- crawlo/commands/utils.py +196 -196
- crawlo/config.py +320 -320
- crawlo/config_validator.py +277 -277
- crawlo/core/__init__.py +52 -52
- crawlo/core/engine.py +451 -451
- crawlo/core/processor.py +47 -47
- crawlo/core/scheduler.py +290 -290
- crawlo/crawler.py +698 -698
- crawlo/data/__init__.py +5 -5
- crawlo/data/user_agents.py +194 -194
- crawlo/downloader/__init__.py +280 -280
- crawlo/downloader/aiohttp_downloader.py +233 -233
- crawlo/downloader/cffi_downloader.py +250 -250
- crawlo/downloader/httpx_downloader.py +265 -265
- crawlo/downloader/hybrid_downloader.py +212 -212
- crawlo/downloader/playwright_downloader.py +425 -425
- crawlo/downloader/selenium_downloader.py +486 -486
- crawlo/event.py +45 -45
- crawlo/exceptions.py +214 -214
- crawlo/extension/__init__.py +64 -64
- crawlo/extension/health_check.py +141 -141
- crawlo/extension/log_interval.py +94 -94
- crawlo/extension/log_stats.py +70 -70
- crawlo/extension/logging_extension.py +53 -53
- crawlo/extension/memory_monitor.py +104 -104
- crawlo/extension/performance_profiler.py +133 -133
- crawlo/extension/request_recorder.py +107 -107
- crawlo/factories/__init__.py +27 -27
- crawlo/factories/base.py +68 -68
- crawlo/factories/crawler.py +104 -104
- crawlo/factories/registry.py +84 -84
- crawlo/factories/utils.py +134 -134
- crawlo/filters/__init__.py +170 -170
- crawlo/filters/aioredis_filter.py +347 -347
- crawlo/filters/memory_filter.py +261 -261
- crawlo/framework.py +306 -306
- crawlo/initialization/__init__.py +44 -44
- crawlo/initialization/built_in.py +391 -391
- crawlo/initialization/context.py +141 -141
- crawlo/initialization/core.py +240 -240
- crawlo/initialization/phases.py +229 -229
- crawlo/initialization/registry.py +143 -143
- crawlo/initialization/utils.py +48 -48
- crawlo/interfaces.py +23 -23
- crawlo/items/__init__.py +23 -23
- crawlo/items/base.py +23 -23
- crawlo/items/fields.py +52 -52
- crawlo/items/items.py +104 -104
- crawlo/logging/__init__.py +42 -42
- crawlo/logging/config.py +280 -276
- crawlo/logging/factory.py +175 -175
- crawlo/logging/manager.py +104 -104
- crawlo/middleware/__init__.py +87 -87
- crawlo/middleware/default_header.py +132 -132
- crawlo/middleware/download_delay.py +104 -104
- crawlo/middleware/middleware_manager.py +142 -142
- crawlo/middleware/offsite.py +123 -123
- crawlo/middleware/proxy.py +209 -209
- crawlo/middleware/request_ignore.py +86 -86
- crawlo/middleware/response_code.py +150 -150
- crawlo/middleware/response_filter.py +136 -136
- crawlo/middleware/retry.py +124 -124
- crawlo/mode_manager.py +287 -287
- crawlo/network/__init__.py +21 -21
- crawlo/network/request.py +408 -376
- crawlo/network/response.py +598 -569
- crawlo/pipelines/__init__.py +52 -52
- crawlo/pipelines/base_pipeline.py +452 -452
- crawlo/pipelines/bloom_dedup_pipeline.py +145 -146
- crawlo/pipelines/console_pipeline.py +39 -39
- crawlo/pipelines/csv_pipeline.py +316 -316
- crawlo/pipelines/database_dedup_pipeline.py +196 -197
- crawlo/pipelines/json_pipeline.py +218 -218
- crawlo/pipelines/memory_dedup_pipeline.py +104 -105
- crawlo/pipelines/mongo_pipeline.py +140 -139
- crawlo/pipelines/mysql_pipeline.py +468 -469
- crawlo/pipelines/pipeline_manager.py +100 -100
- crawlo/pipelines/redis_dedup_pipeline.py +155 -155
- crawlo/project.py +347 -347
- crawlo/queue/__init__.py +9 -9
- crawlo/queue/pqueue.py +38 -38
- crawlo/queue/queue_manager.py +591 -591
- crawlo/queue/redis_priority_queue.py +518 -518
- crawlo/settings/__init__.py +7 -7
- crawlo/settings/default_settings.py +287 -284
- crawlo/settings/setting_manager.py +219 -219
- crawlo/spider/__init__.py +658 -657
- crawlo/stats_collector.py +81 -81
- crawlo/subscriber.py +129 -129
- crawlo/task_manager.py +138 -138
- crawlo/templates/crawlo.cfg.tmpl +10 -10
- crawlo/templates/project/__init__.py.tmpl +1 -1
- crawlo/templates/project/items.py.tmpl +13 -13
- crawlo/templates/project/middlewares.py.tmpl +38 -38
- crawlo/templates/project/pipelines.py.tmpl +35 -35
- crawlo/templates/project/settings.py.tmpl +113 -109
- crawlo/templates/project/settings_distributed.py.tmpl +160 -156
- crawlo/templates/project/settings_gentle.py.tmpl +174 -170
- crawlo/templates/project/settings_high_performance.py.tmpl +175 -171
- crawlo/templates/project/settings_minimal.py.tmpl +102 -98
- crawlo/templates/project/settings_simple.py.tmpl +172 -168
- crawlo/templates/project/spiders/__init__.py.tmpl +9 -9
- crawlo/templates/run.py.tmpl +23 -23
- crawlo/templates/spider/spider.py.tmpl +32 -32
- crawlo/templates/spiders_init.py.tmpl +4 -4
- crawlo/tools/__init__.py +86 -86
- crawlo/tools/date_tools.py +289 -289
- crawlo/tools/distributed_coordinator.py +384 -384
- crawlo/tools/scenario_adapter.py +262 -262
- crawlo/tools/text_cleaner.py +232 -232
- crawlo/utils/__init__.py +74 -50
- crawlo/utils/batch_processor.py +276 -276
- crawlo/utils/config_manager.py +442 -442
- crawlo/utils/controlled_spider_mixin.py +439 -439
- crawlo/utils/db_helper.py +250 -250
- crawlo/utils/encoding_helper.py +190 -0
- crawlo/utils/error_handler.py +410 -410
- crawlo/utils/fingerprint.py +121 -121
- crawlo/utils/func_tools.py +82 -82
- crawlo/utils/large_scale_helper.py +344 -344
- crawlo/utils/leak_detector.py +335 -335
- crawlo/utils/misc.py +81 -81
- crawlo/utils/mongo_connection_pool.py +157 -157
- crawlo/utils/mysql_connection_pool.py +197 -197
- crawlo/utils/performance_monitor.py +285 -285
- crawlo/utils/queue_helper.py +175 -175
- crawlo/utils/redis_checker.py +90 -90
- crawlo/utils/redis_connection_pool.py +578 -578
- crawlo/utils/redis_key_validator.py +198 -198
- crawlo/utils/request.py +278 -278
- crawlo/utils/request_serializer.py +225 -225
- crawlo/utils/resource_manager.py +337 -337
- crawlo/utils/response_helper.py +113 -0
- crawlo/utils/selector_helper.py +138 -137
- crawlo/utils/singleton.py +69 -69
- crawlo/utils/spider_loader.py +201 -201
- crawlo/utils/text_helper.py +94 -94
- {crawlo-1.4.7.dist-info → crawlo-1.4.8.dist-info}/METADATA +831 -689
- crawlo-1.4.8.dist-info/RECORD +347 -0
- examples/__init__.py +7 -7
- tests/__init__.py +7 -7
- tests/advanced_tools_example.py +217 -217
- tests/authenticated_proxy_example.py +110 -110
- tests/baidu_performance_test.py +108 -108
- tests/baidu_test.py +59 -59
- tests/bug_check_test.py +250 -250
- tests/cleaners_example.py +160 -160
- tests/comprehensive_framework_test.py +212 -212
- tests/comprehensive_test.py +81 -81
- tests/comprehensive_testing_summary.md +186 -186
- tests/config_validation_demo.py +142 -142
- tests/controlled_spider_example.py +205 -205
- tests/date_tools_example.py +180 -180
- tests/debug_configure.py +69 -69
- tests/debug_framework_logger.py +84 -84
- tests/debug_log_config.py +126 -126
- tests/debug_log_levels.py +63 -63
- tests/debug_pipelines.py +66 -66
- tests/detailed_log_test.py +233 -233
- tests/direct_selector_helper_test.py +96 -96
- tests/distributed_dedup_test.py +467 -467
- tests/distributed_test.py +66 -66
- tests/distributed_test_debug.py +76 -76
- tests/dynamic_loading_example.py +523 -523
- tests/dynamic_loading_test.py +104 -104
- tests/error_handling_example.py +171 -171
- tests/explain_mysql_update_behavior.py +76 -76
- tests/final_comprehensive_test.py +151 -151
- tests/final_log_test.py +260 -260
- tests/final_validation_test.py +182 -182
- tests/fix_log_test.py +142 -142
- tests/framework_performance_test.py +202 -202
- tests/log_buffering_test.py +111 -111
- tests/log_generation_timing_test.py +153 -153
- tests/monitor_redis_dedup.sh +72 -72
- tests/ofweek_scrapy/ofweek_scrapy/items.py +12 -12
- tests/ofweek_scrapy/ofweek_scrapy/middlewares.py +100 -100
- tests/ofweek_scrapy/ofweek_scrapy/pipelines.py +13 -13
- tests/ofweek_scrapy/ofweek_scrapy/settings.py +84 -84
- tests/ofweek_scrapy/ofweek_scrapy/spiders/__init__.py +4 -4
- tests/ofweek_scrapy/scrapy.cfg +11 -11
- tests/optimized_performance_test.py +211 -211
- tests/performance_comparison.py +244 -244
- tests/queue_blocking_test.py +113 -113
- tests/queue_test.py +89 -89
- tests/redis_key_validation_demo.py +130 -130
- tests/request_params_example.py +150 -150
- tests/response_improvements_example.py +144 -144
- tests/scrapy_comparison/ofweek_scrapy.py +138 -138
- tests/scrapy_comparison/scrapy_test.py +133 -133
- tests/simple_cli_test.py +54 -54
- tests/simple_command_test.py +119 -119
- tests/simple_crawlo_test.py +126 -126
- tests/simple_follow_test.py +38 -38
- tests/simple_log_test2.py +137 -137
- tests/simple_optimization_test.py +128 -128
- tests/simple_queue_type_test.py +41 -41
- tests/simple_response_selector_test.py +94 -94
- tests/simple_selector_helper_test.py +154 -154
- tests/simple_selector_test.py +207 -207
- tests/simple_spider_test.py +49 -49
- tests/simple_url_test.py +73 -73
- tests/simulate_mysql_update_test.py +139 -139
- tests/spider_log_timing_test.py +177 -177
- tests/test_advanced_tools.py +148 -148
- tests/test_all_commands.py +230 -230
- tests/test_all_pipeline_fingerprints.py +133 -133
- tests/test_all_redis_key_configs.py +145 -145
- tests/test_asyncmy_usage.py +56 -56
- tests/test_batch_processor.py +178 -178
- tests/test_cleaners.py +54 -54
- tests/test_cli_arguments.py +118 -118
- tests/test_component_factory.py +174 -174
- tests/test_config_consistency.py +80 -80
- tests/test_config_merge.py +152 -152
- tests/test_config_validator.py +182 -182
- tests/test_controlled_spider_mixin.py +79 -79
- tests/test_crawler_process_import.py +38 -38
- tests/test_crawler_process_spider_modules.py +47 -47
- tests/test_crawlo_proxy_integration.py +114 -114
- tests/test_date_tools.py +123 -123
- tests/test_dedup_fix.py +220 -220
- tests/test_dedup_pipeline_consistency.py +124 -124
- tests/test_default_header_middleware.py +313 -313
- tests/test_distributed.py +65 -65
- tests/test_double_crawlo_fix.py +204 -204
- tests/test_double_crawlo_fix_simple.py +124 -124
- tests/test_download_delay_middleware.py +221 -221
- tests/test_downloader_proxy_compatibility.py +272 -272
- tests/test_edge_cases.py +305 -305
- tests/test_encoding_core.py +56 -56
- tests/test_encoding_detection.py +126 -126
- tests/test_enhanced_error_handler.py +270 -270
- tests/test_enhanced_error_handler_comprehensive.py +245 -245
- tests/test_error_handler_compatibility.py +112 -112
- tests/test_factories.py +252 -252
- tests/test_factory_compatibility.py +196 -196
- tests/test_final_validation.py +153 -153
- tests/test_fingerprint_consistency.py +135 -135
- tests/test_fingerprint_simple.py +51 -51
- tests/test_get_component_logger.py +83 -83
- tests/test_hash_performance.py +99 -99
- tests/test_integration.py +169 -169
- tests/test_item_dedup_redis_key.py +122 -122
- tests/test_large_scale_helper.py +235 -235
- tests/test_logging_enhancements.py +374 -374
- tests/test_logging_final.py +184 -184
- tests/test_logging_integration.py +312 -312
- tests/test_logging_system.py +282 -282
- tests/test_middleware_debug.py +141 -141
- tests/test_mode_consistency.py +51 -51
- tests/test_multi_directory.py +67 -67
- tests/test_multiple_spider_modules.py +80 -80
- tests/test_mysql_pipeline_config.py +164 -164
- tests/test_mysql_pipeline_error.py +98 -98
- tests/test_mysql_pipeline_init_log.py +82 -82
- tests/test_mysql_pipeline_integration.py +132 -132
- tests/test_mysql_pipeline_refactor.py +143 -143
- tests/test_mysql_pipeline_refactor_simple.py +85 -85
- tests/test_mysql_pipeline_robustness.py +195 -195
- tests/test_mysql_pipeline_types.py +88 -88
- tests/test_mysql_update_columns.py +93 -93
- tests/test_offsite_middleware.py +244 -244
- tests/test_offsite_middleware_simple.py +203 -203
- tests/test_optimized_selector_naming.py +100 -100
- tests/test_parsel.py +29 -29
- tests/test_performance.py +327 -327
- tests/test_performance_monitor.py +115 -115
- tests/test_pipeline_fingerprint_consistency.py +86 -86
- tests/test_priority_behavior.py +211 -211
- tests/test_priority_consistency.py +151 -151
- tests/test_priority_consistency_fixed.py +249 -249
- tests/test_proxy_health_check.py +32 -32
- tests/test_proxy_middleware.py +217 -217
- tests/test_proxy_middleware_enhanced.py +212 -212
- tests/test_proxy_middleware_integration.py +142 -142
- tests/test_proxy_middleware_refactored.py +207 -207
- tests/test_proxy_only.py +83 -83
- tests/test_proxy_providers.py +56 -56
- tests/test_proxy_stats.py +19 -19
- tests/test_proxy_strategies.py +59 -59
- tests/test_proxy_with_downloader.py +152 -152
- tests/test_queue_empty_check.py +41 -41
- tests/test_queue_manager_double_crawlo.py +173 -173
- tests/test_queue_manager_redis_key.py +179 -179
- tests/test_queue_naming.py +154 -154
- tests/test_queue_type.py +106 -106
- tests/test_queue_type_redis_config_consistency.py +130 -130
- tests/test_random_headers_default.py +322 -322
- tests/test_random_headers_necessity.py +308 -308
- tests/test_random_user_agent.py +72 -72
- tests/test_redis_config.py +28 -28
- tests/test_redis_connection_pool.py +294 -294
- tests/test_redis_key_naming.py +181 -181
- tests/test_redis_key_validator.py +123 -123
- tests/test_redis_queue.py +224 -224
- tests/test_redis_queue_name_fix.py +175 -175
- tests/test_redis_queue_type_fallback.py +129 -129
- tests/test_request_ignore_middleware.py +182 -182
- tests/test_request_params.py +111 -111
- tests/test_request_serialization.py +70 -70
- tests/test_response_code_middleware.py +349 -349
- tests/test_response_filter_middleware.py +427 -427
- tests/test_response_follow.py +104 -104
- tests/test_response_improvements.py +152 -152
- tests/test_response_selector_methods.py +92 -92
- tests/test_response_url_methods.py +70 -70
- tests/test_response_urljoin.py +86 -86
- tests/test_retry_middleware.py +333 -333
- tests/test_retry_middleware_realistic.py +273 -273
- tests/test_scheduler.py +252 -252
- tests/test_scheduler_config_update.py +133 -133
- tests/test_scrapy_style_encoding.py +112 -112
- tests/test_selector_helper.py +100 -100
- tests/test_selector_optimizations.py +146 -146
- tests/test_simple_response.py +61 -61
- tests/test_spider_loader.py +49 -49
- tests/test_spider_loader_comprehensive.py +69 -69
- tests/test_spider_modules.py +84 -84
- tests/test_spiders/test_spider.py +9 -9
- tests/test_telecom_spider_redis_key.py +205 -205
- tests/test_template_content.py +87 -87
- tests/test_template_redis_key.py +134 -134
- tests/test_tools.py +159 -159
- tests/test_user_agent_randomness.py +176 -176
- tests/test_user_agents.py +96 -96
- tests/untested_features_report.md +138 -138
- tests/verify_debug.py +51 -51
- tests/verify_distributed.py +117 -117
- tests/verify_log_fix.py +111 -111
- tests/verify_mysql_warnings.py +109 -109
- crawlo/utils/log.py +0 -80
- crawlo/utils/url_utils.py +0 -40
- crawlo-1.4.7.dist-info/RECORD +0 -347
- {crawlo-1.4.7.dist-info → crawlo-1.4.8.dist-info}/WHEEL +0 -0
- {crawlo-1.4.7.dist-info → crawlo-1.4.8.dist-info}/entry_points.txt +0 -0
- {crawlo-1.4.7.dist-info → crawlo-1.4.8.dist-info}/top_level.txt +0 -0
|
@@ -1,233 +1,233 @@
|
|
|
1
|
-
#!/usr/bin/python
|
|
2
|
-
# -*- coding: UTF-8 -*-
|
|
3
|
-
import asyncio
|
|
4
|
-
from yarl import URL
|
|
5
|
-
from typing import Optional
|
|
6
|
-
from aiohttp import (
|
|
7
|
-
ClientSession,
|
|
8
|
-
TCPConnector,
|
|
9
|
-
ClientTimeout,
|
|
10
|
-
TraceConfig,
|
|
11
|
-
ClientResponse,
|
|
12
|
-
ClientError,
|
|
13
|
-
BasicAuth,
|
|
14
|
-
)
|
|
15
|
-
|
|
16
|
-
from crawlo.network.response import Response
|
|
17
|
-
from crawlo.logging import get_logger
|
|
18
|
-
from crawlo.downloader import DownloaderBase
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
class AioHttpDownloader(DownloaderBase):
|
|
22
|
-
"""
|
|
23
|
-
高性能异步下载器
|
|
24
|
-
- 基于持久化 ClientSession
|
|
25
|
-
- 智能识别 Request 的高层语义(json_body/form_data)
|
|
26
|
-
- 支持 GET/POST/PUT/DELETE 等方法
|
|
27
|
-
- 支持中间件设置的 IP 代理(HTTP/HTTPS)
|
|
28
|
-
- 内存安全防护
|
|
29
|
-
"""
|
|
30
|
-
|
|
31
|
-
def __init__(self, crawler):
|
|
32
|
-
super().__init__(crawler)
|
|
33
|
-
self.session: Optional[ClientSession] = None
|
|
34
|
-
self.max_download_size: int = 0
|
|
35
|
-
self.logger = get_logger(self.__class__.__name__)
|
|
36
|
-
|
|
37
|
-
def open(self):
|
|
38
|
-
super().open()
|
|
39
|
-
# 恢复关键的下载器启动信息为INFO级别
|
|
40
|
-
# 读取配置
|
|
41
|
-
timeout_secs = self.crawler.settings.get_int("DOWNLOAD_TIMEOUT", 30)
|
|
42
|
-
verify_ssl = self.crawler.settings.get_bool("VERIFY_SSL", True)
|
|
43
|
-
pool_limit = self.crawler.settings.get_int("CONNECTION_POOL_LIMIT", 300) # 从200增加到300
|
|
44
|
-
pool_per_host = self.crawler.settings.get_int("CONNECTION_POOL_LIMIT_PER_HOST", 100) # 从50增加到100
|
|
45
|
-
self.max_download_size = self.crawler.settings.get_int("DOWNLOAD_MAXSIZE", 10 * 1024 * 1024) # 10MB
|
|
46
|
-
|
|
47
|
-
# 创建连接器
|
|
48
|
-
connector = TCPConnector(
|
|
49
|
-
verify_ssl=verify_ssl,
|
|
50
|
-
limit=pool_limit,
|
|
51
|
-
limit_per_host=pool_per_host,
|
|
52
|
-
ttl_dns_cache=300,
|
|
53
|
-
keepalive_timeout=15,
|
|
54
|
-
force_close=False,
|
|
55
|
-
use_dns_cache=True, # 启用DNS缓存
|
|
56
|
-
family=0, # 允许IPv4和IPv6
|
|
57
|
-
)
|
|
58
|
-
|
|
59
|
-
# 超时控制 - 增加更多超时设置
|
|
60
|
-
timeout = ClientTimeout(
|
|
61
|
-
total=timeout_secs,
|
|
62
|
-
connect=timeout_secs/2, # 连接超时
|
|
63
|
-
sock_read=timeout_secs, # 读取超时
|
|
64
|
-
sock_connect=timeout_secs/2 # socket连接超时
|
|
65
|
-
)
|
|
66
|
-
|
|
67
|
-
# 请求追踪
|
|
68
|
-
trace_config = TraceConfig()
|
|
69
|
-
trace_config.on_request_start.append(self._on_request_start)
|
|
70
|
-
trace_config.on_request_end.append(self._on_request_end)
|
|
71
|
-
trace_config.on_request_exception.append(self._on_request_exception)
|
|
72
|
-
|
|
73
|
-
# 创建全局 session
|
|
74
|
-
self.session = ClientSession(
|
|
75
|
-
connector=connector,
|
|
76
|
-
timeout=timeout,
|
|
77
|
-
trace_configs=[trace_config],
|
|
78
|
-
auto_decompress=True,
|
|
79
|
-
)
|
|
80
|
-
|
|
81
|
-
# 输出下载器配置摘要
|
|
82
|
-
spider_name = getattr(self.crawler.spider, 'name', 'Unknown')
|
|
83
|
-
concurrency = self.crawler.settings.get('CONCURRENCY', 4)
|
|
84
|
-
|
|
85
|
-
async def download(self, request) -> Optional[Response]:
|
|
86
|
-
"""下载请求并返回响应"""
|
|
87
|
-
if not self.session or self.session.closed:
|
|
88
|
-
raise RuntimeError("AioHttpDownloader session is not open.")
|
|
89
|
-
|
|
90
|
-
start_time = None
|
|
91
|
-
if self.crawler.settings.get_bool("DOWNLOAD_STATS", True):
|
|
92
|
-
import time
|
|
93
|
-
start_time = time.time()
|
|
94
|
-
|
|
95
|
-
try:
|
|
96
|
-
# 使用通用发送逻辑(支持所有 HTTP 方法)
|
|
97
|
-
async with await self._send_request(self.session, request) as resp:
|
|
98
|
-
# 安全检查:防止大响应体导致 OOM
|
|
99
|
-
content_length = resp.headers.get("Content-Length")
|
|
100
|
-
if content_length and int(content_length) > self.max_download_size:
|
|
101
|
-
raise OverflowError(f"Response too large: {content_length} > {self.max_download_size}")
|
|
102
|
-
|
|
103
|
-
body = await resp.read()
|
|
104
|
-
response = self._structure_response(request, resp, body)
|
|
105
|
-
|
|
106
|
-
# 记录下载统计
|
|
107
|
-
if start_time:
|
|
108
|
-
download_time = time.time() - start_time
|
|
109
|
-
self.logger.debug(f"Downloaded {request.url} in {download_time:.3f}s, size: {len(body)} bytes")
|
|
110
|
-
|
|
111
|
-
return response
|
|
112
|
-
|
|
113
|
-
except ClientError as e:
|
|
114
|
-
self.logger.error(f"Client error for {request.url}: {e}")
|
|
115
|
-
raise
|
|
116
|
-
except Exception as e:
|
|
117
|
-
self.logger.critical(f"Unexpected error for {request.url}: {e}", exc_info=True)
|
|
118
|
-
raise
|
|
119
|
-
|
|
120
|
-
@staticmethod
|
|
121
|
-
async def _send_request(session: ClientSession, request) -> ClientResponse:
|
|
122
|
-
"""
|
|
123
|
-
根据请求方法和高层语义智能发送请求。
|
|
124
|
-
支持中间件设置的 proxy,兼容以下格式:
|
|
125
|
-
- str: "http://user:pass@host:port"
|
|
126
|
-
- dict: {"http": "...", "https": "..."} (自动取 http 或 https 字段)
|
|
127
|
-
"""
|
|
128
|
-
method = request.method.lower()
|
|
129
|
-
if not hasattr(session, method):
|
|
130
|
-
raise ValueError(f"Unsupported HTTP method: {request.method}")
|
|
131
|
-
|
|
132
|
-
method_func = getattr(session, method)
|
|
133
|
-
|
|
134
|
-
# 构造参数
|
|
135
|
-
kwargs = {
|
|
136
|
-
"headers": request.headers,
|
|
137
|
-
"cookies": request.cookies,
|
|
138
|
-
"allow_redirects": request.allow_redirects,
|
|
139
|
-
}
|
|
140
|
-
|
|
141
|
-
# === 处理代理(proxy)===
|
|
142
|
-
proxy = getattr(request, "proxy", None)
|
|
143
|
-
proxy_auth = None
|
|
144
|
-
|
|
145
|
-
if proxy:
|
|
146
|
-
# 兼容字典格式:{"http": "http://...", "https": "http://..."}
|
|
147
|
-
if isinstance(proxy, dict):
|
|
148
|
-
# 优先使用 https,否则用 http
|
|
149
|
-
proxy = proxy.get("https") or proxy.get("http")
|
|
150
|
-
|
|
151
|
-
if not isinstance(proxy, (str, URL)):
|
|
152
|
-
raise ValueError(f"proxy must be str or URL, got {type(proxy)}")
|
|
153
|
-
|
|
154
|
-
try:
|
|
155
|
-
proxy_url = URL(proxy)
|
|
156
|
-
if proxy_url.scheme not in ("http", "https"):
|
|
157
|
-
raise ValueError(f"Unsupported proxy scheme: {proxy_url.scheme}, only HTTP/HTTPS supported.")
|
|
158
|
-
|
|
159
|
-
# 提取认证信息
|
|
160
|
-
if proxy_url.user and proxy_url.password:
|
|
161
|
-
proxy_auth = BasicAuth(proxy_url.user, proxy_url.password)
|
|
162
|
-
# 去掉用户密码的 URL
|
|
163
|
-
proxy = str(proxy_url.with_user(None))
|
|
164
|
-
else:
|
|
165
|
-
proxy = str(proxy_url)
|
|
166
|
-
|
|
167
|
-
kwargs["proxy"] = proxy
|
|
168
|
-
if proxy_auth:
|
|
169
|
-
kwargs["proxy_auth"] = proxy_auth
|
|
170
|
-
|
|
171
|
-
except Exception as e:
|
|
172
|
-
raise ValueError(f"Invalid proxy URL: {proxy}") from e
|
|
173
|
-
|
|
174
|
-
# 处理通过meta传递的代理认证信息
|
|
175
|
-
meta_proxy_auth = request.meta.get("proxy_auth")
|
|
176
|
-
if meta_proxy_auth and isinstance(meta_proxy_auth, dict):
|
|
177
|
-
username = meta_proxy_auth.get("username")
|
|
178
|
-
password = meta_proxy_auth.get("password")
|
|
179
|
-
if username and password:
|
|
180
|
-
kwargs["proxy_auth"] = BasicAuth(username, password)
|
|
181
|
-
|
|
182
|
-
# === 处理请求体 ===
|
|
183
|
-
if hasattr(request, "_json_body") and request._json_body is not None:
|
|
184
|
-
kwargs["json"] = request._json_body
|
|
185
|
-
elif isinstance(request.body, (dict, list)):
|
|
186
|
-
kwargs["json"] = request.body
|
|
187
|
-
else:
|
|
188
|
-
if request.body is not None:
|
|
189
|
-
kwargs["data"] = request.body
|
|
190
|
-
|
|
191
|
-
return await method_func(request.url, **kwargs)
|
|
192
|
-
|
|
193
|
-
@staticmethod
|
|
194
|
-
def _structure_response(request, resp: ClientResponse, body: bytes) -> Response:
|
|
195
|
-
"""构造框架所需的 Response 对象"""
|
|
196
|
-
return Response(
|
|
197
|
-
url=str(resp.url),
|
|
198
|
-
headers=dict(resp.headers),
|
|
199
|
-
status_code=resp.status,
|
|
200
|
-
body=body,
|
|
201
|
-
request=request,
|
|
202
|
-
)
|
|
203
|
-
|
|
204
|
-
# --- 请求追踪日志 ---
|
|
205
|
-
async def _on_request_start(self, session, trace_config_ctx, params):
|
|
206
|
-
"""请求开始时的回调。"""
|
|
207
|
-
pass
|
|
208
|
-
|
|
209
|
-
async def _on_request_end(self, session, trace_config_ctx, params):
|
|
210
|
-
"""请求成功结束时的回调。"""
|
|
211
|
-
pass
|
|
212
|
-
|
|
213
|
-
async def _on_request_exception(self, session, trace_config_ctx, params):
|
|
214
|
-
"""请求发生异常时的回调。"""
|
|
215
|
-
pass
|
|
216
|
-
|
|
217
|
-
async def close(self) -> None:
|
|
218
|
-
"""关闭会话资源"""
|
|
219
|
-
if self.session and not self.session.closed:
|
|
220
|
-
self.logger.info("Closing AioHttpDownloader session...")
|
|
221
|
-
try:
|
|
222
|
-
# 关闭 session
|
|
223
|
-
await self.session.close()
|
|
224
|
-
|
|
225
|
-
# 等待一小段时间确保连接完全关闭
|
|
226
|
-
# 参考: https://docs.aiohttp.org/en/stable/client_advanced.html#graceful-shutdown
|
|
227
|
-
await asyncio.sleep(0.25)
|
|
228
|
-
except Exception as e:
|
|
229
|
-
self.logger.warning(f"Error during session close: {e}")
|
|
230
|
-
finally:
|
|
231
|
-
self.session = None
|
|
232
|
-
|
|
233
|
-
self.logger.debug("AioHttpDownloader closed.")
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding: UTF-8 -*-
|
|
3
|
+
import asyncio
|
|
4
|
+
from yarl import URL
|
|
5
|
+
from typing import Optional
|
|
6
|
+
from aiohttp import (
|
|
7
|
+
ClientSession,
|
|
8
|
+
TCPConnector,
|
|
9
|
+
ClientTimeout,
|
|
10
|
+
TraceConfig,
|
|
11
|
+
ClientResponse,
|
|
12
|
+
ClientError,
|
|
13
|
+
BasicAuth,
|
|
14
|
+
)
|
|
15
|
+
|
|
16
|
+
from crawlo.network.response import Response
|
|
17
|
+
from crawlo.logging import get_logger
|
|
18
|
+
from crawlo.downloader import DownloaderBase
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class AioHttpDownloader(DownloaderBase):
|
|
22
|
+
"""
|
|
23
|
+
高性能异步下载器
|
|
24
|
+
- 基于持久化 ClientSession
|
|
25
|
+
- 智能识别 Request 的高层语义(json_body/form_data)
|
|
26
|
+
- 支持 GET/POST/PUT/DELETE 等方法
|
|
27
|
+
- 支持中间件设置的 IP 代理(HTTP/HTTPS)
|
|
28
|
+
- 内存安全防护
|
|
29
|
+
"""
|
|
30
|
+
|
|
31
|
+
def __init__(self, crawler):
|
|
32
|
+
super().__init__(crawler)
|
|
33
|
+
self.session: Optional[ClientSession] = None
|
|
34
|
+
self.max_download_size: int = 0
|
|
35
|
+
self.logger = get_logger(self.__class__.__name__)
|
|
36
|
+
|
|
37
|
+
def open(self):
|
|
38
|
+
super().open()
|
|
39
|
+
# 恢复关键的下载器启动信息为INFO级别
|
|
40
|
+
# 读取配置
|
|
41
|
+
timeout_secs = self.crawler.settings.get_int("DOWNLOAD_TIMEOUT", 30)
|
|
42
|
+
verify_ssl = self.crawler.settings.get_bool("VERIFY_SSL", True)
|
|
43
|
+
pool_limit = self.crawler.settings.get_int("CONNECTION_POOL_LIMIT", 300) # 从200增加到300
|
|
44
|
+
pool_per_host = self.crawler.settings.get_int("CONNECTION_POOL_LIMIT_PER_HOST", 100) # 从50增加到100
|
|
45
|
+
self.max_download_size = self.crawler.settings.get_int("DOWNLOAD_MAXSIZE", 10 * 1024 * 1024) # 10MB
|
|
46
|
+
|
|
47
|
+
# 创建连接器
|
|
48
|
+
connector = TCPConnector(
|
|
49
|
+
verify_ssl=verify_ssl,
|
|
50
|
+
limit=pool_limit,
|
|
51
|
+
limit_per_host=pool_per_host,
|
|
52
|
+
ttl_dns_cache=300,
|
|
53
|
+
keepalive_timeout=15,
|
|
54
|
+
force_close=False,
|
|
55
|
+
use_dns_cache=True, # 启用DNS缓存
|
|
56
|
+
family=0, # 允许IPv4和IPv6
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
# 超时控制 - 增加更多超时设置
|
|
60
|
+
timeout = ClientTimeout(
|
|
61
|
+
total=timeout_secs,
|
|
62
|
+
connect=timeout_secs/2, # 连接超时
|
|
63
|
+
sock_read=timeout_secs, # 读取超时
|
|
64
|
+
sock_connect=timeout_secs/2 # socket连接超时
|
|
65
|
+
)
|
|
66
|
+
|
|
67
|
+
# 请求追踪
|
|
68
|
+
trace_config = TraceConfig()
|
|
69
|
+
trace_config.on_request_start.append(self._on_request_start)
|
|
70
|
+
trace_config.on_request_end.append(self._on_request_end)
|
|
71
|
+
trace_config.on_request_exception.append(self._on_request_exception)
|
|
72
|
+
|
|
73
|
+
# 创建全局 session
|
|
74
|
+
self.session = ClientSession(
|
|
75
|
+
connector=connector,
|
|
76
|
+
timeout=timeout,
|
|
77
|
+
trace_configs=[trace_config],
|
|
78
|
+
auto_decompress=True,
|
|
79
|
+
)
|
|
80
|
+
|
|
81
|
+
# 输出下载器配置摘要
|
|
82
|
+
spider_name = getattr(self.crawler.spider, 'name', 'Unknown')
|
|
83
|
+
concurrency = self.crawler.settings.get('CONCURRENCY', 4)
|
|
84
|
+
|
|
85
|
+
async def download(self, request) -> Optional[Response]:
|
|
86
|
+
"""下载请求并返回响应"""
|
|
87
|
+
if not self.session or self.session.closed:
|
|
88
|
+
raise RuntimeError("AioHttpDownloader session is not open.")
|
|
89
|
+
|
|
90
|
+
start_time = None
|
|
91
|
+
if self.crawler.settings.get_bool("DOWNLOAD_STATS", True):
|
|
92
|
+
import time
|
|
93
|
+
start_time = time.time()
|
|
94
|
+
|
|
95
|
+
try:
|
|
96
|
+
# 使用通用发送逻辑(支持所有 HTTP 方法)
|
|
97
|
+
async with await self._send_request(self.session, request) as resp:
|
|
98
|
+
# 安全检查:防止大响应体导致 OOM
|
|
99
|
+
content_length = resp.headers.get("Content-Length")
|
|
100
|
+
if content_length and int(content_length) > self.max_download_size:
|
|
101
|
+
raise OverflowError(f"Response too large: {content_length} > {self.max_download_size}")
|
|
102
|
+
|
|
103
|
+
body = await resp.read()
|
|
104
|
+
response = self._structure_response(request, resp, body)
|
|
105
|
+
|
|
106
|
+
# 记录下载统计
|
|
107
|
+
if start_time:
|
|
108
|
+
download_time = time.time() - start_time
|
|
109
|
+
self.logger.debug(f"Downloaded {request.url} in {download_time:.3f}s, size: {len(body)} bytes")
|
|
110
|
+
|
|
111
|
+
return response
|
|
112
|
+
|
|
113
|
+
except ClientError as e:
|
|
114
|
+
self.logger.error(f"Client error for {request.url}: {e}")
|
|
115
|
+
raise
|
|
116
|
+
except Exception as e:
|
|
117
|
+
self.logger.critical(f"Unexpected error for {request.url}: {e}", exc_info=True)
|
|
118
|
+
raise
|
|
119
|
+
|
|
120
|
+
@staticmethod
|
|
121
|
+
async def _send_request(session: ClientSession, request) -> ClientResponse:
|
|
122
|
+
"""
|
|
123
|
+
根据请求方法和高层语义智能发送请求。
|
|
124
|
+
支持中间件设置的 proxy,兼容以下格式:
|
|
125
|
+
- str: "http://user:pass@host:port"
|
|
126
|
+
- dict: {"http": "...", "https": "..."} (自动取 http 或 https 字段)
|
|
127
|
+
"""
|
|
128
|
+
method = request.method.lower()
|
|
129
|
+
if not hasattr(session, method):
|
|
130
|
+
raise ValueError(f"Unsupported HTTP method: {request.method}")
|
|
131
|
+
|
|
132
|
+
method_func = getattr(session, method)
|
|
133
|
+
|
|
134
|
+
# 构造参数
|
|
135
|
+
kwargs = {
|
|
136
|
+
"headers": request.headers,
|
|
137
|
+
"cookies": request.cookies,
|
|
138
|
+
"allow_redirects": request.allow_redirects,
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
# === 处理代理(proxy)===
|
|
142
|
+
proxy = getattr(request, "proxy", None)
|
|
143
|
+
proxy_auth = None
|
|
144
|
+
|
|
145
|
+
if proxy:
|
|
146
|
+
# 兼容字典格式:{"http": "http://...", "https": "http://..."}
|
|
147
|
+
if isinstance(proxy, dict):
|
|
148
|
+
# 优先使用 https,否则用 http
|
|
149
|
+
proxy = proxy.get("https") or proxy.get("http")
|
|
150
|
+
|
|
151
|
+
if not isinstance(proxy, (str, URL)):
|
|
152
|
+
raise ValueError(f"proxy must be str or URL, got {type(proxy)}")
|
|
153
|
+
|
|
154
|
+
try:
|
|
155
|
+
proxy_url = URL(proxy)
|
|
156
|
+
if proxy_url.scheme not in ("http", "https"):
|
|
157
|
+
raise ValueError(f"Unsupported proxy scheme: {proxy_url.scheme}, only HTTP/HTTPS supported.")
|
|
158
|
+
|
|
159
|
+
# 提取认证信息
|
|
160
|
+
if proxy_url.user and proxy_url.password:
|
|
161
|
+
proxy_auth = BasicAuth(proxy_url.user, proxy_url.password)
|
|
162
|
+
# 去掉用户密码的 URL
|
|
163
|
+
proxy = str(proxy_url.with_user(None))
|
|
164
|
+
else:
|
|
165
|
+
proxy = str(proxy_url)
|
|
166
|
+
|
|
167
|
+
kwargs["proxy"] = proxy
|
|
168
|
+
if proxy_auth:
|
|
169
|
+
kwargs["proxy_auth"] = proxy_auth
|
|
170
|
+
|
|
171
|
+
except Exception as e:
|
|
172
|
+
raise ValueError(f"Invalid proxy URL: {proxy}") from e
|
|
173
|
+
|
|
174
|
+
# 处理通过meta传递的代理认证信息
|
|
175
|
+
meta_proxy_auth = request.meta.get("proxy_auth")
|
|
176
|
+
if meta_proxy_auth and isinstance(meta_proxy_auth, dict):
|
|
177
|
+
username = meta_proxy_auth.get("username")
|
|
178
|
+
password = meta_proxy_auth.get("password")
|
|
179
|
+
if username and password:
|
|
180
|
+
kwargs["proxy_auth"] = BasicAuth(username, password)
|
|
181
|
+
|
|
182
|
+
# === 处理请求体 ===
|
|
183
|
+
if hasattr(request, "_json_body") and request._json_body is not None:
|
|
184
|
+
kwargs["json"] = request._json_body
|
|
185
|
+
elif isinstance(request.body, (dict, list)):
|
|
186
|
+
kwargs["json"] = request.body
|
|
187
|
+
else:
|
|
188
|
+
if request.body is not None:
|
|
189
|
+
kwargs["data"] = request.body
|
|
190
|
+
|
|
191
|
+
return await method_func(request.url, **kwargs)
|
|
192
|
+
|
|
193
|
+
@staticmethod
|
|
194
|
+
def _structure_response(request, resp: ClientResponse, body: bytes) -> Response:
|
|
195
|
+
"""构造框架所需的 Response 对象"""
|
|
196
|
+
return Response(
|
|
197
|
+
url=str(resp.url),
|
|
198
|
+
headers=dict(resp.headers),
|
|
199
|
+
status_code=resp.status,
|
|
200
|
+
body=body,
|
|
201
|
+
request=request,
|
|
202
|
+
)
|
|
203
|
+
|
|
204
|
+
# --- 请求追踪日志 ---
|
|
205
|
+
async def _on_request_start(self, session, trace_config_ctx, params):
|
|
206
|
+
"""请求开始时的回调。"""
|
|
207
|
+
pass
|
|
208
|
+
|
|
209
|
+
async def _on_request_end(self, session, trace_config_ctx, params):
|
|
210
|
+
"""请求成功结束时的回调。"""
|
|
211
|
+
pass
|
|
212
|
+
|
|
213
|
+
async def _on_request_exception(self, session, trace_config_ctx, params):
|
|
214
|
+
"""请求发生异常时的回调。"""
|
|
215
|
+
pass
|
|
216
|
+
|
|
217
|
+
async def close(self) -> None:
|
|
218
|
+
"""关闭会话资源"""
|
|
219
|
+
if self.session and not self.session.closed:
|
|
220
|
+
self.logger.info("Closing AioHttpDownloader session...")
|
|
221
|
+
try:
|
|
222
|
+
# 关闭 session
|
|
223
|
+
await self.session.close()
|
|
224
|
+
|
|
225
|
+
# 等待一小段时间确保连接完全关闭
|
|
226
|
+
# 参考: https://docs.aiohttp.org/en/stable/client_advanced.html#graceful-shutdown
|
|
227
|
+
await asyncio.sleep(0.25)
|
|
228
|
+
except Exception as e:
|
|
229
|
+
self.logger.warning(f"Error during session close: {e}")
|
|
230
|
+
finally:
|
|
231
|
+
self.session = None
|
|
232
|
+
|
|
233
|
+
self.logger.debug("AioHttpDownloader closed.")
|