crawlo 1.4.2__py3-none-any.whl → 1.4.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +93 -93
- crawlo/__version__.py +1 -1
- crawlo/cli.py +75 -75
- crawlo/commands/__init__.py +14 -14
- crawlo/commands/check.py +594 -594
- crawlo/commands/genspider.py +151 -151
- crawlo/commands/help.py +138 -138
- crawlo/commands/list.py +155 -155
- crawlo/commands/run.py +341 -341
- crawlo/commands/startproject.py +436 -436
- crawlo/commands/stats.py +187 -187
- crawlo/commands/utils.py +196 -196
- crawlo/config.py +312 -312
- crawlo/config_validator.py +277 -277
- crawlo/core/__init__.py +52 -52
- crawlo/core/engine.py +438 -439
- crawlo/core/processor.py +47 -47
- crawlo/core/scheduler.py +291 -257
- crawlo/crawler.py +650 -650
- crawlo/data/__init__.py +5 -5
- crawlo/data/user_agents.py +194 -194
- crawlo/downloader/__init__.py +273 -273
- crawlo/downloader/aiohttp_downloader.py +233 -233
- crawlo/downloader/cffi_downloader.py +245 -245
- crawlo/downloader/httpx_downloader.py +259 -259
- crawlo/downloader/hybrid_downloader.py +212 -212
- crawlo/downloader/playwright_downloader.py +402 -402
- crawlo/downloader/selenium_downloader.py +472 -472
- crawlo/event.py +11 -11
- crawlo/exceptions.py +81 -81
- crawlo/extension/__init__.py +63 -63
- crawlo/extension/health_check.py +141 -141
- crawlo/extension/log_interval.py +94 -94
- crawlo/extension/log_stats.py +70 -70
- crawlo/extension/logging_extension.py +61 -61
- crawlo/extension/memory_monitor.py +104 -104
- crawlo/extension/performance_profiler.py +133 -133
- crawlo/extension/request_recorder.py +107 -107
- crawlo/factories/__init__.py +27 -27
- crawlo/factories/base.py +68 -68
- crawlo/factories/crawler.py +103 -103
- crawlo/factories/registry.py +84 -84
- crawlo/filters/__init__.py +154 -154
- crawlo/filters/aioredis_filter.py +257 -257
- crawlo/filters/memory_filter.py +269 -269
- crawlo/framework.py +292 -292
- crawlo/initialization/__init__.py +44 -44
- crawlo/initialization/built_in.py +425 -425
- crawlo/initialization/context.py +141 -141
- crawlo/initialization/core.py +193 -193
- crawlo/initialization/phases.py +148 -148
- crawlo/initialization/registry.py +145 -145
- crawlo/items/__init__.py +23 -23
- crawlo/items/base.py +23 -23
- crawlo/items/fields.py +52 -52
- crawlo/items/items.py +104 -104
- crawlo/logging/__init__.py +45 -37
- crawlo/logging/async_handler.py +181 -0
- crawlo/logging/config.py +196 -96
- crawlo/logging/factory.py +171 -128
- crawlo/logging/manager.py +111 -111
- crawlo/logging/monitor.py +153 -0
- crawlo/logging/sampler.py +167 -0
- crawlo/middleware/__init__.py +21 -21
- crawlo/middleware/default_header.py +132 -132
- crawlo/middleware/download_delay.py +104 -104
- crawlo/middleware/middleware_manager.py +135 -135
- crawlo/middleware/offsite.py +123 -123
- crawlo/middleware/proxy.py +386 -386
- crawlo/middleware/request_ignore.py +86 -86
- crawlo/middleware/response_code.py +150 -150
- crawlo/middleware/response_filter.py +136 -136
- crawlo/middleware/retry.py +124 -124
- crawlo/middleware/simple_proxy.py +65 -65
- crawlo/mode_manager.py +219 -219
- crawlo/network/__init__.py +21 -21
- crawlo/network/request.py +379 -379
- crawlo/network/response.py +359 -359
- crawlo/pipelines/__init__.py +21 -21
- crawlo/pipelines/bloom_dedup_pipeline.py +146 -146
- crawlo/pipelines/console_pipeline.py +39 -39
- crawlo/pipelines/csv_pipeline.py +316 -316
- crawlo/pipelines/database_dedup_pipeline.py +197 -197
- crawlo/pipelines/json_pipeline.py +218 -218
- crawlo/pipelines/memory_dedup_pipeline.py +105 -105
- crawlo/pipelines/mongo_pipeline.py +131 -131
- crawlo/pipelines/mysql_pipeline.py +325 -325
- crawlo/pipelines/pipeline_manager.py +100 -84
- crawlo/pipelines/redis_dedup_pipeline.py +156 -156
- crawlo/project.py +349 -338
- crawlo/queue/pqueue.py +42 -42
- crawlo/queue/queue_manager.py +526 -522
- crawlo/queue/redis_priority_queue.py +370 -367
- crawlo/settings/__init__.py +7 -7
- crawlo/settings/default_settings.py +284 -284
- crawlo/settings/setting_manager.py +219 -219
- crawlo/spider/__init__.py +657 -657
- crawlo/stats_collector.py +73 -73
- crawlo/subscriber.py +129 -129
- crawlo/task_manager.py +138 -138
- crawlo/templates/crawlo.cfg.tmpl +10 -10
- crawlo/templates/project/__init__.py.tmpl +3 -3
- crawlo/templates/project/items.py.tmpl +17 -17
- crawlo/templates/project/middlewares.py.tmpl +118 -118
- crawlo/templates/project/pipelines.py.tmpl +96 -96
- crawlo/templates/project/settings.py.tmpl +170 -170
- crawlo/templates/project/settings_distributed.py.tmpl +169 -169
- crawlo/templates/project/settings_gentle.py.tmpl +166 -166
- crawlo/templates/project/settings_high_performance.py.tmpl +167 -167
- crawlo/templates/project/settings_minimal.py.tmpl +65 -65
- crawlo/templates/project/settings_simple.py.tmpl +164 -164
- crawlo/templates/project/spiders/__init__.py.tmpl +9 -9
- crawlo/templates/run.py.tmpl +34 -34
- crawlo/templates/spider/spider.py.tmpl +143 -143
- crawlo/templates/spiders_init.py.tmpl +9 -9
- crawlo/tools/__init__.py +200 -200
- crawlo/tools/anti_crawler.py +268 -268
- crawlo/tools/authenticated_proxy.py +240 -240
- crawlo/tools/data_formatter.py +225 -225
- crawlo/tools/data_validator.py +180 -180
- crawlo/tools/date_tools.py +289 -289
- crawlo/tools/distributed_coordinator.py +384 -384
- crawlo/tools/encoding_converter.py +127 -127
- crawlo/tools/network_diagnostic.py +364 -364
- crawlo/tools/request_tools.py +82 -82
- crawlo/tools/retry_mechanism.py +224 -224
- crawlo/tools/scenario_adapter.py +262 -262
- crawlo/tools/text_cleaner.py +232 -232
- crawlo/utils/__init__.py +34 -34
- crawlo/utils/batch_processor.py +259 -259
- crawlo/utils/class_loader.py +25 -25
- crawlo/utils/controlled_spider_mixin.py +439 -439
- crawlo/utils/db_helper.py +343 -343
- crawlo/utils/enhanced_error_handler.py +356 -356
- crawlo/utils/env_config.py +142 -142
- crawlo/utils/error_handler.py +165 -165
- crawlo/utils/fingerprint.py +122 -122
- crawlo/utils/func_tools.py +82 -82
- crawlo/utils/large_scale_config.py +286 -286
- crawlo/utils/large_scale_helper.py +344 -344
- crawlo/utils/log.py +79 -79
- crawlo/utils/performance_monitor.py +285 -285
- crawlo/utils/queue_helper.py +175 -175
- crawlo/utils/redis_connection_pool.py +388 -388
- crawlo/utils/redis_key_validator.py +198 -198
- crawlo/utils/request.py +267 -267
- crawlo/utils/request_serializer.py +225 -225
- crawlo/utils/spider_loader.py +61 -61
- crawlo/utils/system.py +11 -11
- crawlo/utils/tools.py +4 -4
- crawlo/utils/url.py +39 -39
- crawlo-1.4.3.dist-info/METADATA +190 -0
- crawlo-1.4.3.dist-info/RECORD +326 -0
- examples/__init__.py +7 -7
- examples/test_project/__init__.py +7 -7
- examples/test_project/run.py +34 -34
- examples/test_project/test_project/__init__.py +3 -3
- examples/test_project/test_project/items.py +17 -17
- examples/test_project/test_project/middlewares.py +118 -118
- examples/test_project/test_project/pipelines.py +96 -96
- examples/test_project/test_project/settings.py +169 -169
- examples/test_project/test_project/spiders/__init__.py +9 -9
- examples/test_project/test_project/spiders/of_week_dis.py +143 -143
- tests/__init__.py +7 -7
- tests/advanced_tools_example.py +275 -275
- tests/authenticated_proxy_example.py +106 -106
- tests/baidu_performance_test.py +108 -108
- tests/baidu_test.py +59 -59
- tests/cleaners_example.py +160 -160
- tests/comprehensive_framework_test.py +212 -212
- tests/comprehensive_test.py +81 -81
- tests/comprehensive_testing_summary.md +186 -186
- tests/config_validation_demo.py +142 -142
- tests/controlled_spider_example.py +205 -205
- tests/date_tools_example.py +180 -180
- tests/debug_configure.py +69 -69
- tests/debug_framework_logger.py +84 -84
- tests/debug_log_config.py +126 -126
- tests/debug_log_levels.py +63 -63
- tests/debug_pipelines.py +66 -66
- tests/detailed_log_test.py +233 -233
- tests/distributed_test.py +66 -66
- tests/distributed_test_debug.py +76 -76
- tests/dynamic_loading_example.py +523 -523
- tests/dynamic_loading_test.py +104 -104
- tests/env_config_example.py +133 -133
- tests/error_handling_example.py +171 -171
- tests/final_comprehensive_test.py +151 -151
- tests/final_log_test.py +260 -260
- tests/final_validation_test.py +182 -182
- tests/fix_log_test.py +142 -142
- tests/framework_performance_test.py +202 -202
- tests/log_buffering_test.py +111 -111
- tests/log_generation_timing_test.py +153 -153
- tests/optimized_performance_test.py +211 -211
- tests/performance_comparison.py +245 -245
- tests/queue_blocking_test.py +113 -113
- tests/queue_test.py +89 -89
- tests/redis_key_validation_demo.py +130 -130
- tests/request_params_example.py +150 -150
- tests/response_improvements_example.py +144 -144
- tests/scrapy_comparison/ofweek_scrapy.py +138 -138
- tests/scrapy_comparison/scrapy_test.py +133 -133
- tests/simple_command_test.py +119 -119
- tests/simple_crawlo_test.py +127 -127
- tests/simple_log_test.py +57 -57
- tests/simple_log_test2.py +137 -137
- tests/simple_optimization_test.py +128 -128
- tests/simple_queue_type_test.py +41 -41
- tests/simple_spider_test.py +49 -49
- tests/simple_test.py +47 -47
- tests/spider_log_timing_test.py +177 -177
- tests/test_advanced_tools.py +148 -148
- tests/test_all_commands.py +230 -230
- tests/test_all_pipeline_fingerprints.py +133 -133
- tests/test_all_redis_key_configs.py +145 -145
- tests/test_authenticated_proxy.py +141 -141
- tests/test_batch_processor.py +178 -178
- tests/test_cleaners.py +54 -54
- tests/test_component_factory.py +174 -174
- tests/test_comprehensive.py +146 -146
- tests/test_config_consistency.py +80 -80
- tests/test_config_merge.py +152 -152
- tests/test_config_validator.py +182 -182
- tests/test_controlled_spider_mixin.py +79 -79
- tests/test_crawlo_proxy_integration.py +108 -108
- tests/test_date_tools.py +123 -123
- tests/test_dedup_fix.py +220 -220
- tests/test_dedup_pipeline_consistency.py +125 -0
- tests/test_default_header_middleware.py +313 -313
- tests/test_distributed.py +65 -65
- tests/test_double_crawlo_fix.py +204 -204
- tests/test_double_crawlo_fix_simple.py +124 -124
- tests/test_download_delay_middleware.py +221 -221
- tests/test_downloader_proxy_compatibility.py +268 -268
- tests/test_dynamic_downloaders_proxy.py +124 -124
- tests/test_dynamic_proxy.py +92 -92
- tests/test_dynamic_proxy_config.py +146 -146
- tests/test_dynamic_proxy_real.py +109 -109
- tests/test_edge_cases.py +303 -303
- tests/test_enhanced_error_handler.py +270 -270
- tests/test_enhanced_error_handler_comprehensive.py +245 -245
- tests/test_env_config.py +121 -121
- tests/test_error_handler_compatibility.py +112 -112
- tests/test_factories.py +252 -252
- tests/test_final_validation.py +153 -153
- tests/test_fingerprint_consistency.py +135 -135
- tests/test_fingerprint_simple.py +51 -51
- tests/test_framework_env_usage.py +103 -103
- tests/test_framework_logger.py +66 -66
- tests/test_framework_startup.py +64 -64
- tests/test_get_component_logger.py +83 -83
- tests/test_hash_performance.py +99 -99
- tests/test_integration.py +169 -169
- tests/test_item_dedup_redis_key.py +122 -122
- tests/test_large_scale_config.py +112 -112
- tests/test_large_scale_helper.py +235 -235
- tests/test_logging_enhancements.py +375 -0
- tests/test_logging_final.py +185 -0
- tests/test_logging_integration.py +313 -0
- tests/test_logging_system.py +282 -282
- tests/test_middleware_debug.py +142 -0
- tests/test_mode_change.py +72 -72
- tests/test_mode_consistency.py +51 -51
- tests/test_offsite_middleware.py +244 -244
- tests/test_offsite_middleware_simple.py +203 -203
- tests/test_parsel.py +29 -29
- tests/test_performance.py +327 -327
- tests/test_performance_monitor.py +115 -115
- tests/test_pipeline_fingerprint_consistency.py +86 -86
- tests/test_priority_behavior.py +212 -0
- tests/test_priority_consistency.py +152 -0
- tests/test_priority_consistency_fixed.py +250 -0
- tests/test_proxy_api.py +264 -264
- tests/test_proxy_health_check.py +32 -32
- tests/test_proxy_middleware.py +121 -121
- tests/test_proxy_middleware_enhanced.py +216 -216
- tests/test_proxy_middleware_integration.py +136 -136
- tests/test_proxy_middleware_refactored.py +184 -184
- tests/test_proxy_providers.py +56 -56
- tests/test_proxy_stats.py +19 -19
- tests/test_proxy_strategies.py +59 -59
- tests/test_queue_empty_check.py +41 -41
- tests/test_queue_manager_double_crawlo.py +173 -173
- tests/test_queue_manager_redis_key.py +179 -179
- tests/test_queue_naming.py +154 -154
- tests/test_queue_type.py +106 -106
- tests/test_queue_type_redis_config_consistency.py +131 -0
- tests/test_random_headers_default.py +323 -0
- tests/test_random_headers_necessity.py +309 -0
- tests/test_random_user_agent.py +72 -72
- tests/test_real_scenario_proxy.py +195 -195
- tests/test_redis_config.py +28 -28
- tests/test_redis_connection_pool.py +294 -294
- tests/test_redis_key_naming.py +181 -181
- tests/test_redis_key_validator.py +123 -123
- tests/test_redis_queue.py +224 -224
- tests/test_redis_queue_name_fix.py +175 -175
- tests/test_redis_queue_type_fallback.py +130 -0
- tests/test_request_ignore_middleware.py +182 -182
- tests/test_request_params.py +111 -111
- tests/test_request_serialization.py +70 -70
- tests/test_response_code_middleware.py +349 -349
- tests/test_response_filter_middleware.py +427 -427
- tests/test_response_improvements.py +152 -152
- tests/test_retry_middleware.py +334 -242
- tests/test_retry_middleware_realistic.py +274 -0
- tests/test_scheduler.py +252 -252
- tests/test_scheduler_config_update.py +133 -133
- tests/test_simple_response.py +61 -61
- tests/test_telecom_spider_redis_key.py +205 -205
- tests/test_template_content.py +87 -87
- tests/test_template_redis_key.py +134 -134
- tests/test_tools.py +159 -159
- tests/test_user_agent_randomness.py +177 -0
- tests/test_user_agents.py +96 -96
- tests/tools_example.py +260 -260
- tests/untested_features_report.md +138 -138
- tests/verify_debug.py +51 -51
- tests/verify_distributed.py +117 -117
- tests/verify_log_fix.py +111 -111
- crawlo-1.4.2.dist-info/METADATA +0 -1199
- crawlo-1.4.2.dist-info/RECORD +0 -309
- {crawlo-1.4.2.dist-info → crawlo-1.4.3.dist-info}/WHEEL +0 -0
- {crawlo-1.4.2.dist-info → crawlo-1.4.3.dist-info}/entry_points.txt +0 -0
- {crawlo-1.4.2.dist-info → crawlo-1.4.3.dist-info}/top_level.txt +0 -0
|
@@ -1,137 +1,137 @@
|
|
|
1
|
-
#!/usr/bin/python
|
|
2
|
-
# -*- coding:UTF-8 -*-
|
|
3
|
-
"""
|
|
4
|
-
ResponseFilterMiddleware 中间件
|
|
5
|
-
用于过滤不符合要求的HTTP响应,支持自定义允许的状态码
|
|
6
|
-
"""
|
|
7
|
-
from crawlo.utils.log import get_logger
|
|
8
|
-
from crawlo.exceptions import IgnoreRequestError
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
class ResponseFilterMiddleware:
|
|
12
|
-
"""
|
|
13
|
-
ResponseFilterMiddleware 中间件
|
|
14
|
-
用于过滤不符合要求的HTTP响应,支持自定义允许的状态码
|
|
15
|
-
|
|
16
|
-
功能特性:
|
|
17
|
-
- 默认允许2xx状态码
|
|
18
|
-
- 支持自定义允许的状态码列表
|
|
19
|
-
- 支持拒绝特定状态码
|
|
20
|
-
- 提供详细的日志信息
|
|
21
|
-
- 支持按域名配置不同的过滤规则
|
|
22
|
-
"""
|
|
23
|
-
|
|
24
|
-
def __init__(self, allowed_codes, denied_codes, log_level):
|
|
25
|
-
"""
|
|
26
|
-
初始化中间件
|
|
27
|
-
|
|
28
|
-
Args:
|
|
29
|
-
allowed_codes: 允许的状态码列表
|
|
30
|
-
denied_codes: 拒绝的状态码列表
|
|
31
|
-
log_level: 日志级别
|
|
32
|
-
"""
|
|
33
|
-
# 确保状态码是整数类型
|
|
34
|
-
self.allowed_codes = set()
|
|
35
|
-
if allowed_codes:
|
|
36
|
-
for code in allowed_codes:
|
|
37
|
-
try:
|
|
38
|
-
self.allowed_codes.add(int(code))
|
|
39
|
-
except (ValueError, TypeError):
|
|
40
|
-
pass # 忽略无效的状态码
|
|
41
|
-
|
|
42
|
-
self.denied_codes = set()
|
|
43
|
-
if denied_codes:
|
|
44
|
-
for code in denied_codes:
|
|
45
|
-
try:
|
|
46
|
-
self.denied_codes.add(int(code))
|
|
47
|
-
except (ValueError, TypeError):
|
|
48
|
-
pass # 忽略无效的状态码
|
|
49
|
-
|
|
50
|
-
self.logger = get_logger(self.__class__.__name__, log_level)
|
|
51
|
-
|
|
52
|
-
@classmethod
|
|
53
|
-
def create_instance(cls, crawler):
|
|
54
|
-
"""
|
|
55
|
-
创建中间件实例
|
|
56
|
-
|
|
57
|
-
Args:
|
|
58
|
-
crawler: 爬虫实例
|
|
59
|
-
|
|
60
|
-
Returns:
|
|
61
|
-
ResponseFilterMiddleware: 中间件实例
|
|
62
|
-
"""
|
|
63
|
-
o = cls(
|
|
64
|
-
allowed_codes=crawler.settings.get_list('ALLOWED_RESPONSE_CODES'),
|
|
65
|
-
denied_codes=crawler.settings.get_list('DENIED_RESPONSE_CODES'),
|
|
66
|
-
log_level=crawler.settings.get('LOG_LEVEL')
|
|
67
|
-
)
|
|
68
|
-
return o
|
|
69
|
-
|
|
70
|
-
def _is_response_allowed(self, response):
|
|
71
|
-
"""
|
|
72
|
-
判断响应是否被允许
|
|
73
|
-
|
|
74
|
-
Args:
|
|
75
|
-
response: 响应对象
|
|
76
|
-
|
|
77
|
-
Returns:
|
|
78
|
-
bool: 是否被允许
|
|
79
|
-
"""
|
|
80
|
-
status_code = response.status_code
|
|
81
|
-
|
|
82
|
-
# 首先检查是否被明确拒绝
|
|
83
|
-
if status_code in self.denied_codes:
|
|
84
|
-
return False
|
|
85
|
-
|
|
86
|
-
# 检查是否被明确允许
|
|
87
|
-
if status_code in self.allowed_codes:
|
|
88
|
-
return True
|
|
89
|
-
|
|
90
|
-
# 默认允许2xx状态码
|
|
91
|
-
if 200 <= status_code < 300:
|
|
92
|
-
return True
|
|
93
|
-
|
|
94
|
-
# 默认拒绝其他状态码
|
|
95
|
-
return False
|
|
96
|
-
|
|
97
|
-
def _get_filter_reason(self, status_code):
|
|
98
|
-
"""
|
|
99
|
-
获取过滤原因描述
|
|
100
|
-
|
|
101
|
-
Args:
|
|
102
|
-
status_code (int): HTTP状态码
|
|
103
|
-
|
|
104
|
-
Returns:
|
|
105
|
-
str: 过滤原因描述
|
|
106
|
-
"""
|
|
107
|
-
if status_code in self.denied_codes:
|
|
108
|
-
return f"状态码 {status_code} 被明确拒绝"
|
|
109
|
-
elif status_code not in self.allowed_codes and not (200 <= status_code < 300):
|
|
110
|
-
return f"状态码 {status_code} 不在允许列表中"
|
|
111
|
-
else:
|
|
112
|
-
return f"状态码 {status_code} 被过滤"
|
|
113
|
-
|
|
114
|
-
def process_response(self, request, response, spider):
|
|
115
|
-
"""
|
|
116
|
-
处理响应,过滤不符合要求的响应
|
|
117
|
-
|
|
118
|
-
Args:
|
|
119
|
-
request: 请求对象
|
|
120
|
-
response: 响应对象
|
|
121
|
-
spider: 爬虫实例
|
|
122
|
-
|
|
123
|
-
Returns:
|
|
124
|
-
response: 响应对象(如果被允许)
|
|
125
|
-
|
|
126
|
-
Raises:
|
|
127
|
-
IgnoreRequestError: 如果响应被过滤
|
|
128
|
-
"""
|
|
129
|
-
if self._is_response_allowed(response):
|
|
130
|
-
return response
|
|
131
|
-
|
|
132
|
-
# 响应被过滤
|
|
133
|
-
reason = self._get_filter_reason(response.status_code)
|
|
134
|
-
self.logger.debug(f"过滤响应: {response.status_code} {response.url} - {reason}")
|
|
135
|
-
|
|
136
|
-
# 抛出异常以忽略该响应
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding:UTF-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
ResponseFilterMiddleware 中间件
|
|
5
|
+
用于过滤不符合要求的HTTP响应,支持自定义允许的状态码
|
|
6
|
+
"""
|
|
7
|
+
from crawlo.utils.log import get_logger
|
|
8
|
+
from crawlo.exceptions import IgnoreRequestError
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class ResponseFilterMiddleware:
|
|
12
|
+
"""
|
|
13
|
+
ResponseFilterMiddleware 中间件
|
|
14
|
+
用于过滤不符合要求的HTTP响应,支持自定义允许的状态码
|
|
15
|
+
|
|
16
|
+
功能特性:
|
|
17
|
+
- 默认允许2xx状态码
|
|
18
|
+
- 支持自定义允许的状态码列表
|
|
19
|
+
- 支持拒绝特定状态码
|
|
20
|
+
- 提供详细的日志信息
|
|
21
|
+
- 支持按域名配置不同的过滤规则
|
|
22
|
+
"""
|
|
23
|
+
|
|
24
|
+
def __init__(self, allowed_codes, denied_codes, log_level):
|
|
25
|
+
"""
|
|
26
|
+
初始化中间件
|
|
27
|
+
|
|
28
|
+
Args:
|
|
29
|
+
allowed_codes: 允许的状态码列表
|
|
30
|
+
denied_codes: 拒绝的状态码列表
|
|
31
|
+
log_level: 日志级别
|
|
32
|
+
"""
|
|
33
|
+
# 确保状态码是整数类型
|
|
34
|
+
self.allowed_codes = set()
|
|
35
|
+
if allowed_codes:
|
|
36
|
+
for code in allowed_codes:
|
|
37
|
+
try:
|
|
38
|
+
self.allowed_codes.add(int(code))
|
|
39
|
+
except (ValueError, TypeError):
|
|
40
|
+
pass # 忽略无效的状态码
|
|
41
|
+
|
|
42
|
+
self.denied_codes = set()
|
|
43
|
+
if denied_codes:
|
|
44
|
+
for code in denied_codes:
|
|
45
|
+
try:
|
|
46
|
+
self.denied_codes.add(int(code))
|
|
47
|
+
except (ValueError, TypeError):
|
|
48
|
+
pass # 忽略无效的状态码
|
|
49
|
+
|
|
50
|
+
self.logger = get_logger(self.__class__.__name__, log_level)
|
|
51
|
+
|
|
52
|
+
@classmethod
|
|
53
|
+
def create_instance(cls, crawler):
|
|
54
|
+
"""
|
|
55
|
+
创建中间件实例
|
|
56
|
+
|
|
57
|
+
Args:
|
|
58
|
+
crawler: 爬虫实例
|
|
59
|
+
|
|
60
|
+
Returns:
|
|
61
|
+
ResponseFilterMiddleware: 中间件实例
|
|
62
|
+
"""
|
|
63
|
+
o = cls(
|
|
64
|
+
allowed_codes=crawler.settings.get_list('ALLOWED_RESPONSE_CODES'),
|
|
65
|
+
denied_codes=crawler.settings.get_list('DENIED_RESPONSE_CODES'),
|
|
66
|
+
log_level=crawler.settings.get('LOG_LEVEL')
|
|
67
|
+
)
|
|
68
|
+
return o
|
|
69
|
+
|
|
70
|
+
def _is_response_allowed(self, response):
|
|
71
|
+
"""
|
|
72
|
+
判断响应是否被允许
|
|
73
|
+
|
|
74
|
+
Args:
|
|
75
|
+
response: 响应对象
|
|
76
|
+
|
|
77
|
+
Returns:
|
|
78
|
+
bool: 是否被允许
|
|
79
|
+
"""
|
|
80
|
+
status_code = response.status_code
|
|
81
|
+
|
|
82
|
+
# 首先检查是否被明确拒绝
|
|
83
|
+
if status_code in self.denied_codes:
|
|
84
|
+
return False
|
|
85
|
+
|
|
86
|
+
# 检查是否被明确允许
|
|
87
|
+
if status_code in self.allowed_codes:
|
|
88
|
+
return True
|
|
89
|
+
|
|
90
|
+
# 默认允许2xx状态码
|
|
91
|
+
if 200 <= status_code < 300:
|
|
92
|
+
return True
|
|
93
|
+
|
|
94
|
+
# 默认拒绝其他状态码
|
|
95
|
+
return False
|
|
96
|
+
|
|
97
|
+
def _get_filter_reason(self, status_code):
|
|
98
|
+
"""
|
|
99
|
+
获取过滤原因描述
|
|
100
|
+
|
|
101
|
+
Args:
|
|
102
|
+
status_code (int): HTTP状态码
|
|
103
|
+
|
|
104
|
+
Returns:
|
|
105
|
+
str: 过滤原因描述
|
|
106
|
+
"""
|
|
107
|
+
if status_code in self.denied_codes:
|
|
108
|
+
return f"状态码 {status_code} 被明确拒绝"
|
|
109
|
+
elif status_code not in self.allowed_codes and not (200 <= status_code < 300):
|
|
110
|
+
return f"状态码 {status_code} 不在允许列表中"
|
|
111
|
+
else:
|
|
112
|
+
return f"状态码 {status_code} 被过滤"
|
|
113
|
+
|
|
114
|
+
def process_response(self, request, response, spider):
|
|
115
|
+
"""
|
|
116
|
+
处理响应,过滤不符合要求的响应
|
|
117
|
+
|
|
118
|
+
Args:
|
|
119
|
+
request: 请求对象
|
|
120
|
+
response: 响应对象
|
|
121
|
+
spider: 爬虫实例
|
|
122
|
+
|
|
123
|
+
Returns:
|
|
124
|
+
response: 响应对象(如果被允许)
|
|
125
|
+
|
|
126
|
+
Raises:
|
|
127
|
+
IgnoreRequestError: 如果响应被过滤
|
|
128
|
+
"""
|
|
129
|
+
if self._is_response_allowed(response):
|
|
130
|
+
return response
|
|
131
|
+
|
|
132
|
+
# 响应被过滤
|
|
133
|
+
reason = self._get_filter_reason(response.status_code)
|
|
134
|
+
self.logger.debug(f"过滤响应: {response.status_code} {response.url} - {reason}")
|
|
135
|
+
|
|
136
|
+
# 抛出异常以忽略该响应
|
|
137
137
|
raise IgnoreRequestError(f"response filtered: {reason} - {response.status_code} {response.url}")
|
crawlo/middleware/retry.py
CHANGED
|
@@ -1,125 +1,125 @@
|
|
|
1
|
-
#!/usr/bin/python
|
|
2
|
-
# -*- coding:UTF-8 -*-
|
|
3
|
-
import asyncio
|
|
4
|
-
from typing import List
|
|
5
|
-
|
|
6
|
-
try:
|
|
7
|
-
from anyio import EndOfStream
|
|
8
|
-
except ImportError:
|
|
9
|
-
# 如果 anyio 不可用或者 EndOfStream 不存在,创建一个占位符
|
|
10
|
-
class EndOfStream(Exception):
|
|
11
|
-
pass
|
|
12
|
-
|
|
13
|
-
try:
|
|
14
|
-
from httpcore import ReadError
|
|
15
|
-
except ImportError:
|
|
16
|
-
class ReadError(Exception):
|
|
17
|
-
pass
|
|
18
|
-
|
|
19
|
-
try:
|
|
20
|
-
from httpx import RemoteProtocolError, ConnectError, ReadTimeout
|
|
21
|
-
except ImportError:
|
|
22
|
-
class RemoteProtocolError(Exception):
|
|
23
|
-
pass
|
|
24
|
-
class ConnectError(Exception):
|
|
25
|
-
pass
|
|
26
|
-
class ReadTimeout(Exception):
|
|
27
|
-
pass
|
|
28
|
-
|
|
29
|
-
try:
|
|
30
|
-
from aiohttp.client_exceptions import ClientConnectionError, ClientPayloadError
|
|
31
|
-
from aiohttp import ClientConnectorError, ClientTimeout, ClientConnectorSSLError, ClientResponseError
|
|
32
|
-
except ImportError:
|
|
33
|
-
class ClientConnectionError(Exception):
|
|
34
|
-
pass
|
|
35
|
-
class ClientPayloadError(Exception):
|
|
36
|
-
pass
|
|
37
|
-
class ClientConnectorError(Exception):
|
|
38
|
-
pass
|
|
39
|
-
class ClientTimeout(Exception):
|
|
40
|
-
pass
|
|
41
|
-
class ClientConnectorSSLError(Exception):
|
|
42
|
-
pass
|
|
43
|
-
class ClientResponseError(Exception):
|
|
44
|
-
pass
|
|
45
|
-
|
|
46
|
-
from crawlo.utils.log import get_logger
|
|
47
|
-
from crawlo.stats_collector import StatsCollector
|
|
48
|
-
|
|
49
|
-
_retry_exceptions = [
|
|
50
|
-
EndOfStream,
|
|
51
|
-
ReadError,
|
|
52
|
-
asyncio.TimeoutError,
|
|
53
|
-
ConnectError,
|
|
54
|
-
ReadTimeout,
|
|
55
|
-
ClientConnectorError,
|
|
56
|
-
ClientResponseError,
|
|
57
|
-
RemoteProtocolError,
|
|
58
|
-
ClientTimeout,
|
|
59
|
-
ClientConnectorSSLError,
|
|
60
|
-
ClientPayloadError,
|
|
61
|
-
ClientConnectionError
|
|
62
|
-
]
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
class RetryMiddleware(object):
|
|
66
|
-
|
|
67
|
-
def __init__(
|
|
68
|
-
self,
|
|
69
|
-
*,
|
|
70
|
-
retry_http_codes: List,
|
|
71
|
-
ignore_http_codes: List,
|
|
72
|
-
max_retry_times: int,
|
|
73
|
-
retry_exceptions: List,
|
|
74
|
-
stats: StatsCollector,
|
|
75
|
-
retry_priority: int
|
|
76
|
-
):
|
|
77
|
-
self.retry_http_codes = retry_http_codes
|
|
78
|
-
self.ignore_http_codes = ignore_http_codes
|
|
79
|
-
self.max_retry_times = max_retry_times
|
|
80
|
-
self.retry_exceptions = tuple(retry_exceptions + _retry_exceptions)
|
|
81
|
-
self.retry_priority = retry_priority
|
|
82
|
-
self.stats = stats
|
|
83
|
-
self.logger = get_logger(self.__class__.__name__)
|
|
84
|
-
|
|
85
|
-
@classmethod
|
|
86
|
-
def create_instance(cls, crawler):
|
|
87
|
-
o = cls(
|
|
88
|
-
retry_http_codes=crawler.settings.get_list('RETRY_HTTP_CODES'),
|
|
89
|
-
ignore_http_codes=crawler.settings.get_list('IGNORE_HTTP_CODES'),
|
|
90
|
-
max_retry_times=crawler.settings.get_int('MAX_RETRY_TIMES'),
|
|
91
|
-
retry_exceptions=crawler.settings.get_list('RETRY_EXCEPTIONS'),
|
|
92
|
-
stats=crawler.stats,
|
|
93
|
-
retry_priority=crawler.settings.get_int('RETRY_PRIORITY')
|
|
94
|
-
)
|
|
95
|
-
return o
|
|
96
|
-
|
|
97
|
-
def process_response(self, request, response, spider):
|
|
98
|
-
if request.meta.get('dont_retry', False):
|
|
99
|
-
return response
|
|
100
|
-
if response.status_code in self.ignore_http_codes:
|
|
101
|
-
return response
|
|
102
|
-
if response.status_code in self.retry_http_codes:
|
|
103
|
-
# 重试逻辑
|
|
104
|
-
reason = f"response code {response.status_code}"
|
|
105
|
-
return self._retry(request, reason, spider) or response
|
|
106
|
-
return response
|
|
107
|
-
|
|
108
|
-
def process_exception(self, request, exc, spider):
|
|
109
|
-
if isinstance(exc, self.retry_exceptions) and not request.meta.get('dont_retry', False):
|
|
110
|
-
return self._retry(request=request, reason=type(exc).__name__, spider=spider)
|
|
111
|
-
|
|
112
|
-
def _retry(self, request, reason, spider):
|
|
113
|
-
retry_times = request.meta.get('retry_times', 0)
|
|
114
|
-
if retry_times < self.max_retry_times:
|
|
115
|
-
retry_times += 1
|
|
116
|
-
self.logger.info(f"{spider} {request} {reason} retrying {retry_times} time...")
|
|
117
|
-
request.meta['retry_times'] = retry_times
|
|
118
|
-
# request.dont_retry = True
|
|
119
|
-
request.meta['dont_retry'] = True
|
|
120
|
-
request.priority = request.priority + self.retry_priority
|
|
121
|
-
self.stats.inc_value("retry_count")
|
|
122
|
-
return request
|
|
123
|
-
else:
|
|
124
|
-
self.logger.warning(f"{spider} {request} {reason} retry max {self.max_retry_times} times, give up.")
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding:UTF-8 -*-
|
|
3
|
+
import asyncio
|
|
4
|
+
from typing import List
|
|
5
|
+
|
|
6
|
+
try:
|
|
7
|
+
from anyio import EndOfStream
|
|
8
|
+
except ImportError:
|
|
9
|
+
# 如果 anyio 不可用或者 EndOfStream 不存在,创建一个占位符
|
|
10
|
+
class EndOfStream(Exception):
|
|
11
|
+
pass
|
|
12
|
+
|
|
13
|
+
try:
|
|
14
|
+
from httpcore import ReadError
|
|
15
|
+
except ImportError:
|
|
16
|
+
class ReadError(Exception):
|
|
17
|
+
pass
|
|
18
|
+
|
|
19
|
+
try:
|
|
20
|
+
from httpx import RemoteProtocolError, ConnectError, ReadTimeout
|
|
21
|
+
except ImportError:
|
|
22
|
+
class RemoteProtocolError(Exception):
|
|
23
|
+
pass
|
|
24
|
+
class ConnectError(Exception):
|
|
25
|
+
pass
|
|
26
|
+
class ReadTimeout(Exception):
|
|
27
|
+
pass
|
|
28
|
+
|
|
29
|
+
try:
|
|
30
|
+
from aiohttp.client_exceptions import ClientConnectionError, ClientPayloadError
|
|
31
|
+
from aiohttp import ClientConnectorError, ClientTimeout, ClientConnectorSSLError, ClientResponseError
|
|
32
|
+
except ImportError:
|
|
33
|
+
class ClientConnectionError(Exception):
|
|
34
|
+
pass
|
|
35
|
+
class ClientPayloadError(Exception):
|
|
36
|
+
pass
|
|
37
|
+
class ClientConnectorError(Exception):
|
|
38
|
+
pass
|
|
39
|
+
class ClientTimeout(Exception):
|
|
40
|
+
pass
|
|
41
|
+
class ClientConnectorSSLError(Exception):
|
|
42
|
+
pass
|
|
43
|
+
class ClientResponseError(Exception):
|
|
44
|
+
pass
|
|
45
|
+
|
|
46
|
+
from crawlo.utils.log import get_logger
|
|
47
|
+
from crawlo.stats_collector import StatsCollector
|
|
48
|
+
|
|
49
|
+
_retry_exceptions = [
|
|
50
|
+
EndOfStream,
|
|
51
|
+
ReadError,
|
|
52
|
+
asyncio.TimeoutError,
|
|
53
|
+
ConnectError,
|
|
54
|
+
ReadTimeout,
|
|
55
|
+
ClientConnectorError,
|
|
56
|
+
ClientResponseError,
|
|
57
|
+
RemoteProtocolError,
|
|
58
|
+
ClientTimeout,
|
|
59
|
+
ClientConnectorSSLError,
|
|
60
|
+
ClientPayloadError,
|
|
61
|
+
ClientConnectionError
|
|
62
|
+
]
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
class RetryMiddleware(object):
|
|
66
|
+
|
|
67
|
+
def __init__(
|
|
68
|
+
self,
|
|
69
|
+
*,
|
|
70
|
+
retry_http_codes: List,
|
|
71
|
+
ignore_http_codes: List,
|
|
72
|
+
max_retry_times: int,
|
|
73
|
+
retry_exceptions: List,
|
|
74
|
+
stats: StatsCollector,
|
|
75
|
+
retry_priority: int
|
|
76
|
+
):
|
|
77
|
+
self.retry_http_codes = retry_http_codes
|
|
78
|
+
self.ignore_http_codes = ignore_http_codes
|
|
79
|
+
self.max_retry_times = max_retry_times
|
|
80
|
+
self.retry_exceptions = tuple(retry_exceptions + _retry_exceptions)
|
|
81
|
+
self.retry_priority = retry_priority
|
|
82
|
+
self.stats = stats
|
|
83
|
+
self.logger = get_logger(self.__class__.__name__)
|
|
84
|
+
|
|
85
|
+
@classmethod
|
|
86
|
+
def create_instance(cls, crawler):
|
|
87
|
+
o = cls(
|
|
88
|
+
retry_http_codes=crawler.settings.get_list('RETRY_HTTP_CODES'),
|
|
89
|
+
ignore_http_codes=crawler.settings.get_list('IGNORE_HTTP_CODES'),
|
|
90
|
+
max_retry_times=crawler.settings.get_int('MAX_RETRY_TIMES'),
|
|
91
|
+
retry_exceptions=crawler.settings.get_list('RETRY_EXCEPTIONS'),
|
|
92
|
+
stats=crawler.stats,
|
|
93
|
+
retry_priority=crawler.settings.get_int('RETRY_PRIORITY')
|
|
94
|
+
)
|
|
95
|
+
return o
|
|
96
|
+
|
|
97
|
+
def process_response(self, request, response, spider):
|
|
98
|
+
if request.meta.get('dont_retry', False):
|
|
99
|
+
return response
|
|
100
|
+
if response.status_code in self.ignore_http_codes:
|
|
101
|
+
return response
|
|
102
|
+
if response.status_code in self.retry_http_codes:
|
|
103
|
+
# 重试逻辑
|
|
104
|
+
reason = f"response code {response.status_code}"
|
|
105
|
+
return self._retry(request, reason, spider) or response
|
|
106
|
+
return response
|
|
107
|
+
|
|
108
|
+
def process_exception(self, request, exc, spider):
|
|
109
|
+
if isinstance(exc, self.retry_exceptions) and not request.meta.get('dont_retry', False):
|
|
110
|
+
return self._retry(request=request, reason=type(exc).__name__, spider=spider)
|
|
111
|
+
|
|
112
|
+
def _retry(self, request, reason, spider):
|
|
113
|
+
retry_times = request.meta.get('retry_times', 0)
|
|
114
|
+
if retry_times < self.max_retry_times:
|
|
115
|
+
retry_times += 1
|
|
116
|
+
self.logger.info(f"{spider} {request} {reason} retrying {retry_times} time...")
|
|
117
|
+
request.meta['retry_times'] = retry_times
|
|
118
|
+
# request.dont_retry = True
|
|
119
|
+
request.meta['dont_retry'] = True
|
|
120
|
+
request.priority = request.priority + self.retry_priority
|
|
121
|
+
self.stats.inc_value("retry_count")
|
|
122
|
+
return request
|
|
123
|
+
else:
|
|
124
|
+
self.logger.warning(f"{spider} {request} {reason} retry max {self.max_retry_times} times, give up.")
|
|
125
125
|
return None
|
|
@@ -1,65 +1,65 @@
|
|
|
1
|
-
#!/usr/bin/python
|
|
2
|
-
# -*- coding: UTF-8 -*-
|
|
3
|
-
"""
|
|
4
|
-
简化版代理中间件
|
|
5
|
-
提供基础的代理功能,避免过度复杂的实现
|
|
6
|
-
"""
|
|
7
|
-
import random
|
|
8
|
-
from typing import Optional, List
|
|
9
|
-
|
|
10
|
-
from crawlo import Request, Response
|
|
11
|
-
from crawlo.exceptions import NotConfiguredError
|
|
12
|
-
from crawlo.utils.log import get_logger
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
class SimpleProxyMiddleware:
|
|
16
|
-
"""简化版代理中间件"""
|
|
17
|
-
|
|
18
|
-
def __init__(self, settings, log_level):
|
|
19
|
-
self.logger = get_logger(self.__class__.__name__, log_level)
|
|
20
|
-
|
|
21
|
-
# 获取代理列表
|
|
22
|
-
self.proxies: List[str] = settings.get("PROXY_LIST", [])
|
|
23
|
-
self.enabled = settings.get_bool("PROXY_ENABLED", False)
|
|
24
|
-
|
|
25
|
-
if not self.enabled:
|
|
26
|
-
self.logger.info("SimpleProxyMiddleware disabled")
|
|
27
|
-
return
|
|
28
|
-
|
|
29
|
-
if not self.proxies:
|
|
30
|
-
raise NotConfiguredError("PROXY_LIST not configured, SimpleProxyMiddleware disabled")
|
|
31
|
-
|
|
32
|
-
self.logger.info(f"SimpleProxyMiddleware enabled with {len(self.proxies)} proxies")
|
|
33
|
-
|
|
34
|
-
@classmethod
|
|
35
|
-
def create_instance(cls, crawler):
|
|
36
|
-
return cls(settings=crawler.settings, log_level=crawler.settings.get("LOG_LEVEL"))
|
|
37
|
-
|
|
38
|
-
async def process_request(self, request: Request, spider) -> Optional[Request]:
|
|
39
|
-
"""为请求分配代理"""
|
|
40
|
-
if not self.enabled:
|
|
41
|
-
return None
|
|
42
|
-
|
|
43
|
-
if request.proxy:
|
|
44
|
-
# 请求已指定代理,不覆盖
|
|
45
|
-
return None
|
|
46
|
-
|
|
47
|
-
if self.proxies:
|
|
48
|
-
# 随机选择一个代理
|
|
49
|
-
proxy = random.choice(self.proxies)
|
|
50
|
-
request.proxy = proxy
|
|
51
|
-
self.logger.debug(f"Assigned proxy {proxy} to {request.url}")
|
|
52
|
-
|
|
53
|
-
return None
|
|
54
|
-
|
|
55
|
-
def process_response(self, request: Request, response: Response, spider) -> Response:
|
|
56
|
-
"""处理响应"""
|
|
57
|
-
if request.proxy:
|
|
58
|
-
self.logger.debug(f"Proxy request successful: {request.proxy} | {request.url}")
|
|
59
|
-
return response
|
|
60
|
-
|
|
61
|
-
def process_exception(self, request: Request, exception: Exception, spider) -> Optional[Request]:
|
|
62
|
-
"""处理异常"""
|
|
63
|
-
if request.proxy:
|
|
64
|
-
self.logger.warning(f"Proxy request failed: {request.proxy} | {request.url} | {repr(exception)}")
|
|
65
|
-
return None
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding: UTF-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
简化版代理中间件
|
|
5
|
+
提供基础的代理功能,避免过度复杂的实现
|
|
6
|
+
"""
|
|
7
|
+
import random
|
|
8
|
+
from typing import Optional, List
|
|
9
|
+
|
|
10
|
+
from crawlo import Request, Response
|
|
11
|
+
from crawlo.exceptions import NotConfiguredError
|
|
12
|
+
from crawlo.utils.log import get_logger
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class SimpleProxyMiddleware:
|
|
16
|
+
"""简化版代理中间件"""
|
|
17
|
+
|
|
18
|
+
def __init__(self, settings, log_level):
|
|
19
|
+
self.logger = get_logger(self.__class__.__name__, log_level)
|
|
20
|
+
|
|
21
|
+
# 获取代理列表
|
|
22
|
+
self.proxies: List[str] = settings.get("PROXY_LIST", [])
|
|
23
|
+
self.enabled = settings.get_bool("PROXY_ENABLED", False)
|
|
24
|
+
|
|
25
|
+
if not self.enabled:
|
|
26
|
+
self.logger.info("SimpleProxyMiddleware disabled")
|
|
27
|
+
return
|
|
28
|
+
|
|
29
|
+
if not self.proxies:
|
|
30
|
+
raise NotConfiguredError("PROXY_LIST not configured, SimpleProxyMiddleware disabled")
|
|
31
|
+
|
|
32
|
+
self.logger.info(f"SimpleProxyMiddleware enabled with {len(self.proxies)} proxies")
|
|
33
|
+
|
|
34
|
+
@classmethod
|
|
35
|
+
def create_instance(cls, crawler):
|
|
36
|
+
return cls(settings=crawler.settings, log_level=crawler.settings.get("LOG_LEVEL"))
|
|
37
|
+
|
|
38
|
+
async def process_request(self, request: Request, spider) -> Optional[Request]:
|
|
39
|
+
"""为请求分配代理"""
|
|
40
|
+
if not self.enabled:
|
|
41
|
+
return None
|
|
42
|
+
|
|
43
|
+
if request.proxy:
|
|
44
|
+
# 请求已指定代理,不覆盖
|
|
45
|
+
return None
|
|
46
|
+
|
|
47
|
+
if self.proxies:
|
|
48
|
+
# 随机选择一个代理
|
|
49
|
+
proxy = random.choice(self.proxies)
|
|
50
|
+
request.proxy = proxy
|
|
51
|
+
self.logger.debug(f"Assigned proxy {proxy} to {request.url}")
|
|
52
|
+
|
|
53
|
+
return None
|
|
54
|
+
|
|
55
|
+
def process_response(self, request: Request, response: Response, spider) -> Response:
|
|
56
|
+
"""处理响应"""
|
|
57
|
+
if request.proxy:
|
|
58
|
+
self.logger.debug(f"Proxy request successful: {request.proxy} | {request.url}")
|
|
59
|
+
return response
|
|
60
|
+
|
|
61
|
+
def process_exception(self, request: Request, exception: Exception, spider) -> Optional[Request]:
|
|
62
|
+
"""处理异常"""
|
|
63
|
+
if request.proxy:
|
|
64
|
+
self.logger.warning(f"Proxy request failed: {request.proxy} | {request.url} | {repr(exception)}")
|
|
65
|
+
return None
|