crawlo 1.4.2__py3-none-any.whl → 1.4.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +93 -93
- crawlo/__version__.py +1 -1
- crawlo/cli.py +75 -75
- crawlo/commands/__init__.py +14 -14
- crawlo/commands/check.py +594 -594
- crawlo/commands/genspider.py +151 -151
- crawlo/commands/help.py +138 -138
- crawlo/commands/list.py +155 -155
- crawlo/commands/run.py +341 -341
- crawlo/commands/startproject.py +436 -436
- crawlo/commands/stats.py +187 -187
- crawlo/commands/utils.py +196 -196
- crawlo/config.py +312 -312
- crawlo/config_validator.py +277 -277
- crawlo/core/__init__.py +52 -52
- crawlo/core/engine.py +438 -439
- crawlo/core/processor.py +47 -47
- crawlo/core/scheduler.py +291 -257
- crawlo/crawler.py +650 -650
- crawlo/data/__init__.py +5 -5
- crawlo/data/user_agents.py +194 -194
- crawlo/downloader/__init__.py +273 -273
- crawlo/downloader/aiohttp_downloader.py +233 -233
- crawlo/downloader/cffi_downloader.py +245 -245
- crawlo/downloader/httpx_downloader.py +259 -259
- crawlo/downloader/hybrid_downloader.py +212 -212
- crawlo/downloader/playwright_downloader.py +402 -402
- crawlo/downloader/selenium_downloader.py +472 -472
- crawlo/event.py +11 -11
- crawlo/exceptions.py +81 -81
- crawlo/extension/__init__.py +63 -63
- crawlo/extension/health_check.py +141 -141
- crawlo/extension/log_interval.py +94 -94
- crawlo/extension/log_stats.py +70 -70
- crawlo/extension/logging_extension.py +61 -61
- crawlo/extension/memory_monitor.py +104 -104
- crawlo/extension/performance_profiler.py +133 -133
- crawlo/extension/request_recorder.py +107 -107
- crawlo/factories/__init__.py +27 -27
- crawlo/factories/base.py +68 -68
- crawlo/factories/crawler.py +103 -103
- crawlo/factories/registry.py +84 -84
- crawlo/filters/__init__.py +154 -154
- crawlo/filters/aioredis_filter.py +257 -257
- crawlo/filters/memory_filter.py +269 -269
- crawlo/framework.py +292 -292
- crawlo/initialization/__init__.py +44 -44
- crawlo/initialization/built_in.py +425 -425
- crawlo/initialization/context.py +141 -141
- crawlo/initialization/core.py +193 -193
- crawlo/initialization/phases.py +148 -148
- crawlo/initialization/registry.py +145 -145
- crawlo/items/__init__.py +23 -23
- crawlo/items/base.py +23 -23
- crawlo/items/fields.py +52 -52
- crawlo/items/items.py +104 -104
- crawlo/logging/__init__.py +45 -37
- crawlo/logging/async_handler.py +181 -0
- crawlo/logging/config.py +196 -96
- crawlo/logging/factory.py +171 -128
- crawlo/logging/manager.py +111 -111
- crawlo/logging/monitor.py +153 -0
- crawlo/logging/sampler.py +167 -0
- crawlo/middleware/__init__.py +21 -21
- crawlo/middleware/default_header.py +132 -132
- crawlo/middleware/download_delay.py +104 -104
- crawlo/middleware/middleware_manager.py +135 -135
- crawlo/middleware/offsite.py +123 -123
- crawlo/middleware/proxy.py +386 -386
- crawlo/middleware/request_ignore.py +86 -86
- crawlo/middleware/response_code.py +150 -150
- crawlo/middleware/response_filter.py +136 -136
- crawlo/middleware/retry.py +124 -124
- crawlo/middleware/simple_proxy.py +65 -65
- crawlo/mode_manager.py +219 -219
- crawlo/network/__init__.py +21 -21
- crawlo/network/request.py +379 -379
- crawlo/network/response.py +359 -359
- crawlo/pipelines/__init__.py +21 -21
- crawlo/pipelines/bloom_dedup_pipeline.py +146 -146
- crawlo/pipelines/console_pipeline.py +39 -39
- crawlo/pipelines/csv_pipeline.py +316 -316
- crawlo/pipelines/database_dedup_pipeline.py +197 -197
- crawlo/pipelines/json_pipeline.py +218 -218
- crawlo/pipelines/memory_dedup_pipeline.py +105 -105
- crawlo/pipelines/mongo_pipeline.py +131 -131
- crawlo/pipelines/mysql_pipeline.py +325 -325
- crawlo/pipelines/pipeline_manager.py +100 -84
- crawlo/pipelines/redis_dedup_pipeline.py +156 -156
- crawlo/project.py +349 -338
- crawlo/queue/pqueue.py +42 -42
- crawlo/queue/queue_manager.py +526 -522
- crawlo/queue/redis_priority_queue.py +370 -367
- crawlo/settings/__init__.py +7 -7
- crawlo/settings/default_settings.py +284 -284
- crawlo/settings/setting_manager.py +219 -219
- crawlo/spider/__init__.py +657 -657
- crawlo/stats_collector.py +73 -73
- crawlo/subscriber.py +129 -129
- crawlo/task_manager.py +138 -138
- crawlo/templates/crawlo.cfg.tmpl +10 -10
- crawlo/templates/project/__init__.py.tmpl +3 -3
- crawlo/templates/project/items.py.tmpl +17 -17
- crawlo/templates/project/middlewares.py.tmpl +118 -118
- crawlo/templates/project/pipelines.py.tmpl +96 -96
- crawlo/templates/project/settings.py.tmpl +170 -170
- crawlo/templates/project/settings_distributed.py.tmpl +169 -169
- crawlo/templates/project/settings_gentle.py.tmpl +166 -166
- crawlo/templates/project/settings_high_performance.py.tmpl +167 -167
- crawlo/templates/project/settings_minimal.py.tmpl +65 -65
- crawlo/templates/project/settings_simple.py.tmpl +164 -164
- crawlo/templates/project/spiders/__init__.py.tmpl +9 -9
- crawlo/templates/run.py.tmpl +34 -34
- crawlo/templates/spider/spider.py.tmpl +143 -143
- crawlo/templates/spiders_init.py.tmpl +9 -9
- crawlo/tools/__init__.py +200 -200
- crawlo/tools/anti_crawler.py +268 -268
- crawlo/tools/authenticated_proxy.py +240 -240
- crawlo/tools/data_formatter.py +225 -225
- crawlo/tools/data_validator.py +180 -180
- crawlo/tools/date_tools.py +289 -289
- crawlo/tools/distributed_coordinator.py +384 -384
- crawlo/tools/encoding_converter.py +127 -127
- crawlo/tools/network_diagnostic.py +364 -364
- crawlo/tools/request_tools.py +82 -82
- crawlo/tools/retry_mechanism.py +224 -224
- crawlo/tools/scenario_adapter.py +262 -262
- crawlo/tools/text_cleaner.py +232 -232
- crawlo/utils/__init__.py +34 -34
- crawlo/utils/batch_processor.py +259 -259
- crawlo/utils/class_loader.py +25 -25
- crawlo/utils/controlled_spider_mixin.py +439 -439
- crawlo/utils/db_helper.py +343 -343
- crawlo/utils/enhanced_error_handler.py +356 -356
- crawlo/utils/env_config.py +142 -142
- crawlo/utils/error_handler.py +165 -165
- crawlo/utils/fingerprint.py +122 -122
- crawlo/utils/func_tools.py +82 -82
- crawlo/utils/large_scale_config.py +286 -286
- crawlo/utils/large_scale_helper.py +344 -344
- crawlo/utils/log.py +79 -79
- crawlo/utils/performance_monitor.py +285 -285
- crawlo/utils/queue_helper.py +175 -175
- crawlo/utils/redis_connection_pool.py +388 -388
- crawlo/utils/redis_key_validator.py +198 -198
- crawlo/utils/request.py +267 -267
- crawlo/utils/request_serializer.py +225 -225
- crawlo/utils/spider_loader.py +61 -61
- crawlo/utils/system.py +11 -11
- crawlo/utils/tools.py +4 -4
- crawlo/utils/url.py +39 -39
- crawlo-1.4.3.dist-info/METADATA +190 -0
- crawlo-1.4.3.dist-info/RECORD +326 -0
- examples/__init__.py +7 -7
- examples/test_project/__init__.py +7 -7
- examples/test_project/run.py +34 -34
- examples/test_project/test_project/__init__.py +3 -3
- examples/test_project/test_project/items.py +17 -17
- examples/test_project/test_project/middlewares.py +118 -118
- examples/test_project/test_project/pipelines.py +96 -96
- examples/test_project/test_project/settings.py +169 -169
- examples/test_project/test_project/spiders/__init__.py +9 -9
- examples/test_project/test_project/spiders/of_week_dis.py +143 -143
- tests/__init__.py +7 -7
- tests/advanced_tools_example.py +275 -275
- tests/authenticated_proxy_example.py +106 -106
- tests/baidu_performance_test.py +108 -108
- tests/baidu_test.py +59 -59
- tests/cleaners_example.py +160 -160
- tests/comprehensive_framework_test.py +212 -212
- tests/comprehensive_test.py +81 -81
- tests/comprehensive_testing_summary.md +186 -186
- tests/config_validation_demo.py +142 -142
- tests/controlled_spider_example.py +205 -205
- tests/date_tools_example.py +180 -180
- tests/debug_configure.py +69 -69
- tests/debug_framework_logger.py +84 -84
- tests/debug_log_config.py +126 -126
- tests/debug_log_levels.py +63 -63
- tests/debug_pipelines.py +66 -66
- tests/detailed_log_test.py +233 -233
- tests/distributed_test.py +66 -66
- tests/distributed_test_debug.py +76 -76
- tests/dynamic_loading_example.py +523 -523
- tests/dynamic_loading_test.py +104 -104
- tests/env_config_example.py +133 -133
- tests/error_handling_example.py +171 -171
- tests/final_comprehensive_test.py +151 -151
- tests/final_log_test.py +260 -260
- tests/final_validation_test.py +182 -182
- tests/fix_log_test.py +142 -142
- tests/framework_performance_test.py +202 -202
- tests/log_buffering_test.py +111 -111
- tests/log_generation_timing_test.py +153 -153
- tests/optimized_performance_test.py +211 -211
- tests/performance_comparison.py +245 -245
- tests/queue_blocking_test.py +113 -113
- tests/queue_test.py +89 -89
- tests/redis_key_validation_demo.py +130 -130
- tests/request_params_example.py +150 -150
- tests/response_improvements_example.py +144 -144
- tests/scrapy_comparison/ofweek_scrapy.py +138 -138
- tests/scrapy_comparison/scrapy_test.py +133 -133
- tests/simple_command_test.py +119 -119
- tests/simple_crawlo_test.py +127 -127
- tests/simple_log_test.py +57 -57
- tests/simple_log_test2.py +137 -137
- tests/simple_optimization_test.py +128 -128
- tests/simple_queue_type_test.py +41 -41
- tests/simple_spider_test.py +49 -49
- tests/simple_test.py +47 -47
- tests/spider_log_timing_test.py +177 -177
- tests/test_advanced_tools.py +148 -148
- tests/test_all_commands.py +230 -230
- tests/test_all_pipeline_fingerprints.py +133 -133
- tests/test_all_redis_key_configs.py +145 -145
- tests/test_authenticated_proxy.py +141 -141
- tests/test_batch_processor.py +178 -178
- tests/test_cleaners.py +54 -54
- tests/test_component_factory.py +174 -174
- tests/test_comprehensive.py +146 -146
- tests/test_config_consistency.py +80 -80
- tests/test_config_merge.py +152 -152
- tests/test_config_validator.py +182 -182
- tests/test_controlled_spider_mixin.py +79 -79
- tests/test_crawlo_proxy_integration.py +108 -108
- tests/test_date_tools.py +123 -123
- tests/test_dedup_fix.py +220 -220
- tests/test_dedup_pipeline_consistency.py +125 -0
- tests/test_default_header_middleware.py +313 -313
- tests/test_distributed.py +65 -65
- tests/test_double_crawlo_fix.py +204 -204
- tests/test_double_crawlo_fix_simple.py +124 -124
- tests/test_download_delay_middleware.py +221 -221
- tests/test_downloader_proxy_compatibility.py +268 -268
- tests/test_dynamic_downloaders_proxy.py +124 -124
- tests/test_dynamic_proxy.py +92 -92
- tests/test_dynamic_proxy_config.py +146 -146
- tests/test_dynamic_proxy_real.py +109 -109
- tests/test_edge_cases.py +303 -303
- tests/test_enhanced_error_handler.py +270 -270
- tests/test_enhanced_error_handler_comprehensive.py +245 -245
- tests/test_env_config.py +121 -121
- tests/test_error_handler_compatibility.py +112 -112
- tests/test_factories.py +252 -252
- tests/test_final_validation.py +153 -153
- tests/test_fingerprint_consistency.py +135 -135
- tests/test_fingerprint_simple.py +51 -51
- tests/test_framework_env_usage.py +103 -103
- tests/test_framework_logger.py +66 -66
- tests/test_framework_startup.py +64 -64
- tests/test_get_component_logger.py +83 -83
- tests/test_hash_performance.py +99 -99
- tests/test_integration.py +169 -169
- tests/test_item_dedup_redis_key.py +122 -122
- tests/test_large_scale_config.py +112 -112
- tests/test_large_scale_helper.py +235 -235
- tests/test_logging_enhancements.py +375 -0
- tests/test_logging_final.py +185 -0
- tests/test_logging_integration.py +313 -0
- tests/test_logging_system.py +282 -282
- tests/test_middleware_debug.py +142 -0
- tests/test_mode_change.py +72 -72
- tests/test_mode_consistency.py +51 -51
- tests/test_offsite_middleware.py +244 -244
- tests/test_offsite_middleware_simple.py +203 -203
- tests/test_parsel.py +29 -29
- tests/test_performance.py +327 -327
- tests/test_performance_monitor.py +115 -115
- tests/test_pipeline_fingerprint_consistency.py +86 -86
- tests/test_priority_behavior.py +212 -0
- tests/test_priority_consistency.py +152 -0
- tests/test_priority_consistency_fixed.py +250 -0
- tests/test_proxy_api.py +264 -264
- tests/test_proxy_health_check.py +32 -32
- tests/test_proxy_middleware.py +121 -121
- tests/test_proxy_middleware_enhanced.py +216 -216
- tests/test_proxy_middleware_integration.py +136 -136
- tests/test_proxy_middleware_refactored.py +184 -184
- tests/test_proxy_providers.py +56 -56
- tests/test_proxy_stats.py +19 -19
- tests/test_proxy_strategies.py +59 -59
- tests/test_queue_empty_check.py +41 -41
- tests/test_queue_manager_double_crawlo.py +173 -173
- tests/test_queue_manager_redis_key.py +179 -179
- tests/test_queue_naming.py +154 -154
- tests/test_queue_type.py +106 -106
- tests/test_queue_type_redis_config_consistency.py +131 -0
- tests/test_random_headers_default.py +323 -0
- tests/test_random_headers_necessity.py +309 -0
- tests/test_random_user_agent.py +72 -72
- tests/test_real_scenario_proxy.py +195 -195
- tests/test_redis_config.py +28 -28
- tests/test_redis_connection_pool.py +294 -294
- tests/test_redis_key_naming.py +181 -181
- tests/test_redis_key_validator.py +123 -123
- tests/test_redis_queue.py +224 -224
- tests/test_redis_queue_name_fix.py +175 -175
- tests/test_redis_queue_type_fallback.py +130 -0
- tests/test_request_ignore_middleware.py +182 -182
- tests/test_request_params.py +111 -111
- tests/test_request_serialization.py +70 -70
- tests/test_response_code_middleware.py +349 -349
- tests/test_response_filter_middleware.py +427 -427
- tests/test_response_improvements.py +152 -152
- tests/test_retry_middleware.py +334 -242
- tests/test_retry_middleware_realistic.py +274 -0
- tests/test_scheduler.py +252 -252
- tests/test_scheduler_config_update.py +133 -133
- tests/test_simple_response.py +61 -61
- tests/test_telecom_spider_redis_key.py +205 -205
- tests/test_template_content.py +87 -87
- tests/test_template_redis_key.py +134 -134
- tests/test_tools.py +159 -159
- tests/test_user_agent_randomness.py +177 -0
- tests/test_user_agents.py +96 -96
- tests/tools_example.py +260 -260
- tests/untested_features_report.md +138 -138
- tests/verify_debug.py +51 -51
- tests/verify_distributed.py +117 -117
- tests/verify_log_fix.py +111 -111
- crawlo-1.4.2.dist-info/METADATA +0 -1199
- crawlo-1.4.2.dist-info/RECORD +0 -309
- {crawlo-1.4.2.dist-info → crawlo-1.4.3.dist-info}/WHEEL +0 -0
- {crawlo-1.4.2.dist-info → crawlo-1.4.3.dist-info}/entry_points.txt +0 -0
- {crawlo-1.4.2.dist-info → crawlo-1.4.3.dist-info}/top_level.txt +0 -0
|
@@ -1,241 +1,241 @@
|
|
|
1
|
-
#!/usr/bin/python
|
|
2
|
-
# -*- coding: UTF-8 -*-
|
|
3
|
-
"""
|
|
4
|
-
带认证代理工具
|
|
5
|
-
=============
|
|
6
|
-
支持带用户名密码认证的代理和非认证代理的统一处理工具
|
|
7
|
-
|
|
8
|
-
功能特性:
|
|
9
|
-
- 支持HTTP/HTTPS/SOCKS代理
|
|
10
|
-
- 支持带认证和不带认证的代理
|
|
11
|
-
- 统一的代理格式处理
|
|
12
|
-
- 代理有效性检测
|
|
13
|
-
"""
|
|
14
|
-
|
|
15
|
-
from typing import Dict, Any, Optional, Union
|
|
16
|
-
from urllib.parse import urlparse, urlunparse
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
class AuthenticatedProxy:
|
|
20
|
-
"""带认证代理处理类"""
|
|
21
|
-
|
|
22
|
-
def __init__(self, proxy_url: str):
|
|
23
|
-
"""
|
|
24
|
-
初始化代理对象
|
|
25
|
-
|
|
26
|
-
Args:
|
|
27
|
-
proxy_url (str): 代理URL,支持带认证信息的格式
|
|
28
|
-
- 带认证: http://username:password@proxy.example.com:8080
|
|
29
|
-
- 无认证: http://proxy.example.com:8080
|
|
30
|
-
"""
|
|
31
|
-
self.proxy_url = proxy_url
|
|
32
|
-
self.parsed = urlparse(proxy_url)
|
|
33
|
-
|
|
34
|
-
# 提取认证信息
|
|
35
|
-
self.username = self.parsed.username
|
|
36
|
-
self.password = self.parsed.password
|
|
37
|
-
|
|
38
|
-
# 构造不带认证信息的URL
|
|
39
|
-
self.clean_url = urlunparse((
|
|
40
|
-
self.parsed.scheme,
|
|
41
|
-
f"{self.parsed.hostname}:{self.parsed.port}" if self.parsed.port else self.parsed.hostname,
|
|
42
|
-
self.parsed.path,
|
|
43
|
-
self.parsed.params,
|
|
44
|
-
self.parsed.query,
|
|
45
|
-
self.parsed.fragment
|
|
46
|
-
))
|
|
47
|
-
|
|
48
|
-
# 构造下载器兼容的代理字典
|
|
49
|
-
self.proxy_dict = {
|
|
50
|
-
"http": self.clean_url,
|
|
51
|
-
"https": self.clean_url
|
|
52
|
-
}
|
|
53
|
-
|
|
54
|
-
# 如果有认证信息,构造认证字符串
|
|
55
|
-
if self.username and self.password:
|
|
56
|
-
self.auth_string = f"{self.username}:{self.password}"
|
|
57
|
-
else:
|
|
58
|
-
self.auth_string = None
|
|
59
|
-
|
|
60
|
-
def get_proxy_for_downloader(self) -> Union[str, Dict[str, str]]:
|
|
61
|
-
"""
|
|
62
|
-
获取适用于下载器的代理配置
|
|
63
|
-
|
|
64
|
-
Returns:
|
|
65
|
-
Union[str, Dict[str, str]]: 代理配置
|
|
66
|
-
- 对于AioHttp/CurlCffi: 返回字典格式 {"http": "...", "https": "..."}
|
|
67
|
-
- 对于HttpX: 可以直接使用字符串或字典格式
|
|
68
|
-
"""
|
|
69
|
-
return self.proxy_dict
|
|
70
|
-
|
|
71
|
-
def get_auth_credentials(self) -> Optional[Dict[str, str]]:
|
|
72
|
-
"""
|
|
73
|
-
获取认证凭据
|
|
74
|
-
|
|
75
|
-
Returns:
|
|
76
|
-
Optional[Dict[str, str]]: 认证凭据 {"username": "...", "password": "..."}
|
|
77
|
-
"""
|
|
78
|
-
if self.username and self.password:
|
|
79
|
-
return {
|
|
80
|
-
"username": self.username,
|
|
81
|
-
"password": self.password
|
|
82
|
-
}
|
|
83
|
-
return None
|
|
84
|
-
|
|
85
|
-
def get_auth_header(self) -> Optional[str]:
|
|
86
|
-
"""
|
|
87
|
-
获取Basic Auth认证头
|
|
88
|
-
|
|
89
|
-
Returns:
|
|
90
|
-
Optional[str]: Basic Auth头信息
|
|
91
|
-
"""
|
|
92
|
-
if self.username and self.password:
|
|
93
|
-
import base64
|
|
94
|
-
credentials = f"{self.username}:{self.password}"
|
|
95
|
-
encoded = base64.b64encode(credentials.encode()).decode()
|
|
96
|
-
return f"Basic {encoded}"
|
|
97
|
-
return None
|
|
98
|
-
|
|
99
|
-
def is_valid(self) -> bool:
|
|
100
|
-
"""
|
|
101
|
-
检查代理URL是否有效
|
|
102
|
-
|
|
103
|
-
Returns:
|
|
104
|
-
bool: 代理URL是否有效
|
|
105
|
-
"""
|
|
106
|
-
# 检查协议
|
|
107
|
-
if self.parsed.scheme not in ["http", "https", "socks4", "socks5"]:
|
|
108
|
-
return False
|
|
109
|
-
|
|
110
|
-
# 检查主机名
|
|
111
|
-
if not self.parsed.hostname:
|
|
112
|
-
return False
|
|
113
|
-
|
|
114
|
-
# 检查端口(如果指定)
|
|
115
|
-
if self.parsed.port and (self.parsed.port < 1 or self.parsed.port > 65535):
|
|
116
|
-
return False
|
|
117
|
-
|
|
118
|
-
return True
|
|
119
|
-
|
|
120
|
-
def __str__(self) -> str:
|
|
121
|
-
return self.proxy_url
|
|
122
|
-
|
|
123
|
-
def __repr__(self) -> str:
|
|
124
|
-
return f"AuthenticatedProxy(url='{self.proxy_url}', username={self.username is not None})"
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
def create_proxy_config(proxy_url: str) -> Dict[str, Any]:
|
|
128
|
-
"""
|
|
129
|
-
创建代理配置,兼容各种下载器
|
|
130
|
-
|
|
131
|
-
Args:
|
|
132
|
-
proxy_url (str): 代理URL
|
|
133
|
-
|
|
134
|
-
Returns:
|
|
135
|
-
Dict[str, Any]: 代理配置字典
|
|
136
|
-
"""
|
|
137
|
-
proxy = AuthenticatedProxy(proxy_url)
|
|
138
|
-
|
|
139
|
-
if not proxy.is_valid():
|
|
140
|
-
raise ValueError(f"Invalid proxy URL: {proxy_url}")
|
|
141
|
-
|
|
142
|
-
config = {
|
|
143
|
-
"url": proxy.clean_url,
|
|
144
|
-
"proxy_dict": proxy.proxy_dict,
|
|
145
|
-
"has_auth": proxy.auth_string is not None
|
|
146
|
-
}
|
|
147
|
-
|
|
148
|
-
if proxy.auth_string:
|
|
149
|
-
config["auth"] = proxy.get_auth_credentials()
|
|
150
|
-
config["auth_header"] = proxy.get_auth_header()
|
|
151
|
-
|
|
152
|
-
return config
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
def format_proxy_for_request(proxy_config: Dict[str, Any], downloader_type: str = "aiohttp") -> Dict[str, Any]:
|
|
156
|
-
"""
|
|
157
|
-
格式化代理配置以适配特定下载器
|
|
158
|
-
|
|
159
|
-
Args:
|
|
160
|
-
proxy_config (Dict[str, Any]): 代理配置
|
|
161
|
-
downloader_type (str): 下载器类型 (aiohttp, httpx, curl_cffi)
|
|
162
|
-
|
|
163
|
-
Returns:
|
|
164
|
-
Dict[str, Any]: 适配下载器的代理配置
|
|
165
|
-
"""
|
|
166
|
-
formatted = {}
|
|
167
|
-
|
|
168
|
-
if downloader_type.lower() == "aiohttp":
|
|
169
|
-
# AioHttp使用proxy和proxy_auth参数
|
|
170
|
-
formatted["proxy"] = proxy_config["url"]
|
|
171
|
-
if proxy_config.get("has_auth") and proxy_config.get("auth"):
|
|
172
|
-
from aiohttp import BasicAuth
|
|
173
|
-
auth = proxy_config["auth"]
|
|
174
|
-
formatted["proxy_auth"] = BasicAuth(auth["username"], auth["password"])
|
|
175
|
-
|
|
176
|
-
elif downloader_type.lower() == "httpx":
|
|
177
|
-
# HttpX可以直接使用代理URL字符串或字典
|
|
178
|
-
formatted["proxy"] = proxy_config["url"]
|
|
179
|
-
|
|
180
|
-
elif downloader_type.lower() == "curl_cffi":
|
|
181
|
-
# CurlCffi使用proxies字典
|
|
182
|
-
formatted["proxies"] = proxy_config["proxy_dict"]
|
|
183
|
-
# 认证信息包含在URL中或通过headers传递
|
|
184
|
-
if proxy_config.get("auth_header"):
|
|
185
|
-
formatted["headers"] = {"Proxy-Authorization": proxy_config["auth_header"]}
|
|
186
|
-
|
|
187
|
-
return formatted
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
# 便捷函数
|
|
191
|
-
def parse_proxy_url(proxy_url: str) -> Dict[str, Any]:
|
|
192
|
-
"""
|
|
193
|
-
解析代理URL并返回详细信息
|
|
194
|
-
|
|
195
|
-
Args:
|
|
196
|
-
proxy_url (str): 代理URL
|
|
197
|
-
|
|
198
|
-
Returns:
|
|
199
|
-
Dict[str, Any]: 代理详细信息
|
|
200
|
-
"""
|
|
201
|
-
return create_proxy_config(proxy_url)
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
def validate_proxy_url(proxy_url: str) -> bool:
|
|
205
|
-
"""
|
|
206
|
-
验证代理URL是否有效
|
|
207
|
-
|
|
208
|
-
Args:
|
|
209
|
-
proxy_url (str): 代理URL
|
|
210
|
-
|
|
211
|
-
Returns:
|
|
212
|
-
bool: 是否有效
|
|
213
|
-
"""
|
|
214
|
-
try:
|
|
215
|
-
proxy = AuthenticatedProxy(proxy_url)
|
|
216
|
-
return proxy.is_valid()
|
|
217
|
-
except:
|
|
218
|
-
return False
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
def get_proxy_info(proxy_url: str) -> Dict[str, Any]:
|
|
222
|
-
"""
|
|
223
|
-
获取代理详细信息
|
|
224
|
-
|
|
225
|
-
Args:
|
|
226
|
-
proxy_url (str): 代理URL
|
|
227
|
-
|
|
228
|
-
Returns:
|
|
229
|
-
Dict[str, Any]: 代理详细信息
|
|
230
|
-
"""
|
|
231
|
-
proxy = AuthenticatedProxy(proxy_url)
|
|
232
|
-
return {
|
|
233
|
-
"original_url": proxy.proxy_url,
|
|
234
|
-
"clean_url": proxy.clean_url,
|
|
235
|
-
"scheme": proxy.parsed.scheme,
|
|
236
|
-
"hostname": proxy.parsed.hostname,
|
|
237
|
-
"port": proxy.parsed.port,
|
|
238
|
-
"has_auth": proxy.auth_string is not None,
|
|
239
|
-
"username": proxy.username,
|
|
240
|
-
"is_valid": proxy.is_valid()
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding: UTF-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
带认证代理工具
|
|
5
|
+
=============
|
|
6
|
+
支持带用户名密码认证的代理和非认证代理的统一处理工具
|
|
7
|
+
|
|
8
|
+
功能特性:
|
|
9
|
+
- 支持HTTP/HTTPS/SOCKS代理
|
|
10
|
+
- 支持带认证和不带认证的代理
|
|
11
|
+
- 统一的代理格式处理
|
|
12
|
+
- 代理有效性检测
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
from typing import Dict, Any, Optional, Union
|
|
16
|
+
from urllib.parse import urlparse, urlunparse
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class AuthenticatedProxy:
|
|
20
|
+
"""带认证代理处理类"""
|
|
21
|
+
|
|
22
|
+
def __init__(self, proxy_url: str):
|
|
23
|
+
"""
|
|
24
|
+
初始化代理对象
|
|
25
|
+
|
|
26
|
+
Args:
|
|
27
|
+
proxy_url (str): 代理URL,支持带认证信息的格式
|
|
28
|
+
- 带认证: http://username:password@proxy.example.com:8080
|
|
29
|
+
- 无认证: http://proxy.example.com:8080
|
|
30
|
+
"""
|
|
31
|
+
self.proxy_url = proxy_url
|
|
32
|
+
self.parsed = urlparse(proxy_url)
|
|
33
|
+
|
|
34
|
+
# 提取认证信息
|
|
35
|
+
self.username = self.parsed.username
|
|
36
|
+
self.password = self.parsed.password
|
|
37
|
+
|
|
38
|
+
# 构造不带认证信息的URL
|
|
39
|
+
self.clean_url = urlunparse((
|
|
40
|
+
self.parsed.scheme,
|
|
41
|
+
f"{self.parsed.hostname}:{self.parsed.port}" if self.parsed.port else self.parsed.hostname,
|
|
42
|
+
self.parsed.path,
|
|
43
|
+
self.parsed.params,
|
|
44
|
+
self.parsed.query,
|
|
45
|
+
self.parsed.fragment
|
|
46
|
+
))
|
|
47
|
+
|
|
48
|
+
# 构造下载器兼容的代理字典
|
|
49
|
+
self.proxy_dict = {
|
|
50
|
+
"http": self.clean_url,
|
|
51
|
+
"https": self.clean_url
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
# 如果有认证信息,构造认证字符串
|
|
55
|
+
if self.username and self.password:
|
|
56
|
+
self.auth_string = f"{self.username}:{self.password}"
|
|
57
|
+
else:
|
|
58
|
+
self.auth_string = None
|
|
59
|
+
|
|
60
|
+
def get_proxy_for_downloader(self) -> Union[str, Dict[str, str]]:
|
|
61
|
+
"""
|
|
62
|
+
获取适用于下载器的代理配置
|
|
63
|
+
|
|
64
|
+
Returns:
|
|
65
|
+
Union[str, Dict[str, str]]: 代理配置
|
|
66
|
+
- 对于AioHttp/CurlCffi: 返回字典格式 {"http": "...", "https": "..."}
|
|
67
|
+
- 对于HttpX: 可以直接使用字符串或字典格式
|
|
68
|
+
"""
|
|
69
|
+
return self.proxy_dict
|
|
70
|
+
|
|
71
|
+
def get_auth_credentials(self) -> Optional[Dict[str, str]]:
|
|
72
|
+
"""
|
|
73
|
+
获取认证凭据
|
|
74
|
+
|
|
75
|
+
Returns:
|
|
76
|
+
Optional[Dict[str, str]]: 认证凭据 {"username": "...", "password": "..."}
|
|
77
|
+
"""
|
|
78
|
+
if self.username and self.password:
|
|
79
|
+
return {
|
|
80
|
+
"username": self.username,
|
|
81
|
+
"password": self.password
|
|
82
|
+
}
|
|
83
|
+
return None
|
|
84
|
+
|
|
85
|
+
def get_auth_header(self) -> Optional[str]:
|
|
86
|
+
"""
|
|
87
|
+
获取Basic Auth认证头
|
|
88
|
+
|
|
89
|
+
Returns:
|
|
90
|
+
Optional[str]: Basic Auth头信息
|
|
91
|
+
"""
|
|
92
|
+
if self.username and self.password:
|
|
93
|
+
import base64
|
|
94
|
+
credentials = f"{self.username}:{self.password}"
|
|
95
|
+
encoded = base64.b64encode(credentials.encode()).decode()
|
|
96
|
+
return f"Basic {encoded}"
|
|
97
|
+
return None
|
|
98
|
+
|
|
99
|
+
def is_valid(self) -> bool:
|
|
100
|
+
"""
|
|
101
|
+
检查代理URL是否有效
|
|
102
|
+
|
|
103
|
+
Returns:
|
|
104
|
+
bool: 代理URL是否有效
|
|
105
|
+
"""
|
|
106
|
+
# 检查协议
|
|
107
|
+
if self.parsed.scheme not in ["http", "https", "socks4", "socks5"]:
|
|
108
|
+
return False
|
|
109
|
+
|
|
110
|
+
# 检查主机名
|
|
111
|
+
if not self.parsed.hostname:
|
|
112
|
+
return False
|
|
113
|
+
|
|
114
|
+
# 检查端口(如果指定)
|
|
115
|
+
if self.parsed.port and (self.parsed.port < 1 or self.parsed.port > 65535):
|
|
116
|
+
return False
|
|
117
|
+
|
|
118
|
+
return True
|
|
119
|
+
|
|
120
|
+
def __str__(self) -> str:
|
|
121
|
+
return self.proxy_url
|
|
122
|
+
|
|
123
|
+
def __repr__(self) -> str:
|
|
124
|
+
return f"AuthenticatedProxy(url='{self.proxy_url}', username={self.username is not None})"
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
def create_proxy_config(proxy_url: str) -> Dict[str, Any]:
|
|
128
|
+
"""
|
|
129
|
+
创建代理配置,兼容各种下载器
|
|
130
|
+
|
|
131
|
+
Args:
|
|
132
|
+
proxy_url (str): 代理URL
|
|
133
|
+
|
|
134
|
+
Returns:
|
|
135
|
+
Dict[str, Any]: 代理配置字典
|
|
136
|
+
"""
|
|
137
|
+
proxy = AuthenticatedProxy(proxy_url)
|
|
138
|
+
|
|
139
|
+
if not proxy.is_valid():
|
|
140
|
+
raise ValueError(f"Invalid proxy URL: {proxy_url}")
|
|
141
|
+
|
|
142
|
+
config = {
|
|
143
|
+
"url": proxy.clean_url,
|
|
144
|
+
"proxy_dict": proxy.proxy_dict,
|
|
145
|
+
"has_auth": proxy.auth_string is not None
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
if proxy.auth_string:
|
|
149
|
+
config["auth"] = proxy.get_auth_credentials()
|
|
150
|
+
config["auth_header"] = proxy.get_auth_header()
|
|
151
|
+
|
|
152
|
+
return config
|
|
153
|
+
|
|
154
|
+
|
|
155
|
+
def format_proxy_for_request(proxy_config: Dict[str, Any], downloader_type: str = "aiohttp") -> Dict[str, Any]:
|
|
156
|
+
"""
|
|
157
|
+
格式化代理配置以适配特定下载器
|
|
158
|
+
|
|
159
|
+
Args:
|
|
160
|
+
proxy_config (Dict[str, Any]): 代理配置
|
|
161
|
+
downloader_type (str): 下载器类型 (aiohttp, httpx, curl_cffi)
|
|
162
|
+
|
|
163
|
+
Returns:
|
|
164
|
+
Dict[str, Any]: 适配下载器的代理配置
|
|
165
|
+
"""
|
|
166
|
+
formatted = {}
|
|
167
|
+
|
|
168
|
+
if downloader_type.lower() == "aiohttp":
|
|
169
|
+
# AioHttp使用proxy和proxy_auth参数
|
|
170
|
+
formatted["proxy"] = proxy_config["url"]
|
|
171
|
+
if proxy_config.get("has_auth") and proxy_config.get("auth"):
|
|
172
|
+
from aiohttp import BasicAuth
|
|
173
|
+
auth = proxy_config["auth"]
|
|
174
|
+
formatted["proxy_auth"] = BasicAuth(auth["username"], auth["password"])
|
|
175
|
+
|
|
176
|
+
elif downloader_type.lower() == "httpx":
|
|
177
|
+
# HttpX可以直接使用代理URL字符串或字典
|
|
178
|
+
formatted["proxy"] = proxy_config["url"]
|
|
179
|
+
|
|
180
|
+
elif downloader_type.lower() == "curl_cffi":
|
|
181
|
+
# CurlCffi使用proxies字典
|
|
182
|
+
formatted["proxies"] = proxy_config["proxy_dict"]
|
|
183
|
+
# 认证信息包含在URL中或通过headers传递
|
|
184
|
+
if proxy_config.get("auth_header"):
|
|
185
|
+
formatted["headers"] = {"Proxy-Authorization": proxy_config["auth_header"]}
|
|
186
|
+
|
|
187
|
+
return formatted
|
|
188
|
+
|
|
189
|
+
|
|
190
|
+
# 便捷函数
|
|
191
|
+
def parse_proxy_url(proxy_url: str) -> Dict[str, Any]:
|
|
192
|
+
"""
|
|
193
|
+
解析代理URL并返回详细信息
|
|
194
|
+
|
|
195
|
+
Args:
|
|
196
|
+
proxy_url (str): 代理URL
|
|
197
|
+
|
|
198
|
+
Returns:
|
|
199
|
+
Dict[str, Any]: 代理详细信息
|
|
200
|
+
"""
|
|
201
|
+
return create_proxy_config(proxy_url)
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
def validate_proxy_url(proxy_url: str) -> bool:
|
|
205
|
+
"""
|
|
206
|
+
验证代理URL是否有效
|
|
207
|
+
|
|
208
|
+
Args:
|
|
209
|
+
proxy_url (str): 代理URL
|
|
210
|
+
|
|
211
|
+
Returns:
|
|
212
|
+
bool: 是否有效
|
|
213
|
+
"""
|
|
214
|
+
try:
|
|
215
|
+
proxy = AuthenticatedProxy(proxy_url)
|
|
216
|
+
return proxy.is_valid()
|
|
217
|
+
except:
|
|
218
|
+
return False
|
|
219
|
+
|
|
220
|
+
|
|
221
|
+
def get_proxy_info(proxy_url: str) -> Dict[str, Any]:
|
|
222
|
+
"""
|
|
223
|
+
获取代理详细信息
|
|
224
|
+
|
|
225
|
+
Args:
|
|
226
|
+
proxy_url (str): 代理URL
|
|
227
|
+
|
|
228
|
+
Returns:
|
|
229
|
+
Dict[str, Any]: 代理详细信息
|
|
230
|
+
"""
|
|
231
|
+
proxy = AuthenticatedProxy(proxy_url)
|
|
232
|
+
return {
|
|
233
|
+
"original_url": proxy.proxy_url,
|
|
234
|
+
"clean_url": proxy.clean_url,
|
|
235
|
+
"scheme": proxy.parsed.scheme,
|
|
236
|
+
"hostname": proxy.parsed.hostname,
|
|
237
|
+
"port": proxy.parsed.port,
|
|
238
|
+
"has_auth": proxy.auth_string is not None,
|
|
239
|
+
"username": proxy.username,
|
|
240
|
+
"is_valid": proxy.is_valid()
|
|
241
241
|
}
|