crawlo 1.4.7__py3-none-any.whl → 1.4.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +90 -90
- crawlo/__version__.py +1 -1
- crawlo/cli.py +75 -75
- crawlo/commands/__init__.py +14 -14
- crawlo/commands/check.py +594 -594
- crawlo/commands/genspider.py +186 -186
- crawlo/commands/help.py +140 -140
- crawlo/commands/list.py +155 -155
- crawlo/commands/run.py +379 -379
- crawlo/commands/startproject.py +460 -460
- crawlo/commands/stats.py +187 -187
- crawlo/commands/utils.py +196 -196
- crawlo/config.py +320 -320
- crawlo/config_validator.py +277 -277
- crawlo/core/__init__.py +52 -52
- crawlo/core/engine.py +451 -451
- crawlo/core/processor.py +47 -47
- crawlo/core/scheduler.py +290 -290
- crawlo/crawler.py +698 -698
- crawlo/data/__init__.py +5 -5
- crawlo/data/user_agents.py +194 -194
- crawlo/downloader/__init__.py +280 -280
- crawlo/downloader/aiohttp_downloader.py +233 -233
- crawlo/downloader/cffi_downloader.py +250 -250
- crawlo/downloader/httpx_downloader.py +265 -265
- crawlo/downloader/hybrid_downloader.py +212 -212
- crawlo/downloader/playwright_downloader.py +425 -425
- crawlo/downloader/selenium_downloader.py +486 -486
- crawlo/event.py +45 -45
- crawlo/exceptions.py +214 -214
- crawlo/extension/__init__.py +64 -64
- crawlo/extension/health_check.py +141 -141
- crawlo/extension/log_interval.py +94 -94
- crawlo/extension/log_stats.py +70 -70
- crawlo/extension/logging_extension.py +53 -53
- crawlo/extension/memory_monitor.py +104 -104
- crawlo/extension/performance_profiler.py +133 -133
- crawlo/extension/request_recorder.py +107 -107
- crawlo/factories/__init__.py +27 -27
- crawlo/factories/base.py +68 -68
- crawlo/factories/crawler.py +104 -104
- crawlo/factories/registry.py +84 -84
- crawlo/factories/utils.py +134 -134
- crawlo/filters/__init__.py +170 -170
- crawlo/filters/aioredis_filter.py +347 -347
- crawlo/filters/memory_filter.py +261 -261
- crawlo/framework.py +306 -306
- crawlo/initialization/__init__.py +44 -44
- crawlo/initialization/built_in.py +391 -391
- crawlo/initialization/context.py +141 -141
- crawlo/initialization/core.py +240 -240
- crawlo/initialization/phases.py +229 -229
- crawlo/initialization/registry.py +143 -143
- crawlo/initialization/utils.py +48 -48
- crawlo/interfaces.py +23 -23
- crawlo/items/__init__.py +23 -23
- crawlo/items/base.py +23 -23
- crawlo/items/fields.py +52 -52
- crawlo/items/items.py +104 -104
- crawlo/logging/__init__.py +42 -42
- crawlo/logging/config.py +280 -276
- crawlo/logging/factory.py +175 -175
- crawlo/logging/manager.py +104 -104
- crawlo/middleware/__init__.py +87 -87
- crawlo/middleware/default_header.py +132 -132
- crawlo/middleware/download_delay.py +104 -104
- crawlo/middleware/middleware_manager.py +142 -142
- crawlo/middleware/offsite.py +123 -123
- crawlo/middleware/proxy.py +209 -209
- crawlo/middleware/request_ignore.py +86 -86
- crawlo/middleware/response_code.py +150 -150
- crawlo/middleware/response_filter.py +136 -136
- crawlo/middleware/retry.py +124 -124
- crawlo/mode_manager.py +287 -287
- crawlo/network/__init__.py +21 -21
- crawlo/network/request.py +408 -376
- crawlo/network/response.py +598 -569
- crawlo/pipelines/__init__.py +52 -52
- crawlo/pipelines/base_pipeline.py +452 -452
- crawlo/pipelines/bloom_dedup_pipeline.py +145 -146
- crawlo/pipelines/console_pipeline.py +39 -39
- crawlo/pipelines/csv_pipeline.py +316 -316
- crawlo/pipelines/database_dedup_pipeline.py +196 -197
- crawlo/pipelines/json_pipeline.py +218 -218
- crawlo/pipelines/memory_dedup_pipeline.py +104 -105
- crawlo/pipelines/mongo_pipeline.py +140 -139
- crawlo/pipelines/mysql_pipeline.py +468 -469
- crawlo/pipelines/pipeline_manager.py +100 -100
- crawlo/pipelines/redis_dedup_pipeline.py +155 -155
- crawlo/project.py +347 -347
- crawlo/queue/__init__.py +9 -9
- crawlo/queue/pqueue.py +38 -38
- crawlo/queue/queue_manager.py +591 -591
- crawlo/queue/redis_priority_queue.py +518 -518
- crawlo/settings/__init__.py +7 -7
- crawlo/settings/default_settings.py +287 -284
- crawlo/settings/setting_manager.py +219 -219
- crawlo/spider/__init__.py +658 -657
- crawlo/stats_collector.py +81 -81
- crawlo/subscriber.py +129 -129
- crawlo/task_manager.py +138 -138
- crawlo/templates/crawlo.cfg.tmpl +10 -10
- crawlo/templates/project/__init__.py.tmpl +1 -1
- crawlo/templates/project/items.py.tmpl +13 -13
- crawlo/templates/project/middlewares.py.tmpl +38 -38
- crawlo/templates/project/pipelines.py.tmpl +35 -35
- crawlo/templates/project/settings.py.tmpl +113 -109
- crawlo/templates/project/settings_distributed.py.tmpl +160 -156
- crawlo/templates/project/settings_gentle.py.tmpl +174 -170
- crawlo/templates/project/settings_high_performance.py.tmpl +175 -171
- crawlo/templates/project/settings_minimal.py.tmpl +102 -98
- crawlo/templates/project/settings_simple.py.tmpl +172 -168
- crawlo/templates/project/spiders/__init__.py.tmpl +9 -9
- crawlo/templates/run.py.tmpl +23 -23
- crawlo/templates/spider/spider.py.tmpl +32 -32
- crawlo/templates/spiders_init.py.tmpl +4 -4
- crawlo/tools/__init__.py +86 -86
- crawlo/tools/date_tools.py +289 -289
- crawlo/tools/distributed_coordinator.py +384 -384
- crawlo/tools/scenario_adapter.py +262 -262
- crawlo/tools/text_cleaner.py +232 -232
- crawlo/utils/__init__.py +74 -50
- crawlo/utils/batch_processor.py +276 -276
- crawlo/utils/config_manager.py +442 -442
- crawlo/utils/controlled_spider_mixin.py +439 -439
- crawlo/utils/db_helper.py +250 -250
- crawlo/utils/encoding_helper.py +190 -0
- crawlo/utils/error_handler.py +410 -410
- crawlo/utils/fingerprint.py +121 -121
- crawlo/utils/func_tools.py +82 -82
- crawlo/utils/large_scale_helper.py +344 -344
- crawlo/utils/leak_detector.py +335 -335
- crawlo/utils/misc.py +81 -81
- crawlo/utils/mongo_connection_pool.py +157 -157
- crawlo/utils/mysql_connection_pool.py +197 -197
- crawlo/utils/performance_monitor.py +285 -285
- crawlo/utils/queue_helper.py +175 -175
- crawlo/utils/redis_checker.py +90 -90
- crawlo/utils/redis_connection_pool.py +578 -578
- crawlo/utils/redis_key_validator.py +198 -198
- crawlo/utils/request.py +278 -278
- crawlo/utils/request_serializer.py +225 -225
- crawlo/utils/resource_manager.py +337 -337
- crawlo/utils/response_helper.py +113 -0
- crawlo/utils/selector_helper.py +138 -137
- crawlo/utils/singleton.py +69 -69
- crawlo/utils/spider_loader.py +201 -201
- crawlo/utils/text_helper.py +94 -94
- {crawlo-1.4.7.dist-info → crawlo-1.4.8.dist-info}/METADATA +831 -689
- crawlo-1.4.8.dist-info/RECORD +347 -0
- examples/__init__.py +7 -7
- tests/__init__.py +7 -7
- tests/advanced_tools_example.py +217 -217
- tests/authenticated_proxy_example.py +110 -110
- tests/baidu_performance_test.py +108 -108
- tests/baidu_test.py +59 -59
- tests/bug_check_test.py +250 -250
- tests/cleaners_example.py +160 -160
- tests/comprehensive_framework_test.py +212 -212
- tests/comprehensive_test.py +81 -81
- tests/comprehensive_testing_summary.md +186 -186
- tests/config_validation_demo.py +142 -142
- tests/controlled_spider_example.py +205 -205
- tests/date_tools_example.py +180 -180
- tests/debug_configure.py +69 -69
- tests/debug_framework_logger.py +84 -84
- tests/debug_log_config.py +126 -126
- tests/debug_log_levels.py +63 -63
- tests/debug_pipelines.py +66 -66
- tests/detailed_log_test.py +233 -233
- tests/direct_selector_helper_test.py +96 -96
- tests/distributed_dedup_test.py +467 -467
- tests/distributed_test.py +66 -66
- tests/distributed_test_debug.py +76 -76
- tests/dynamic_loading_example.py +523 -523
- tests/dynamic_loading_test.py +104 -104
- tests/error_handling_example.py +171 -171
- tests/explain_mysql_update_behavior.py +76 -76
- tests/final_comprehensive_test.py +151 -151
- tests/final_log_test.py +260 -260
- tests/final_validation_test.py +182 -182
- tests/fix_log_test.py +142 -142
- tests/framework_performance_test.py +202 -202
- tests/log_buffering_test.py +111 -111
- tests/log_generation_timing_test.py +153 -153
- tests/monitor_redis_dedup.sh +72 -72
- tests/ofweek_scrapy/ofweek_scrapy/items.py +12 -12
- tests/ofweek_scrapy/ofweek_scrapy/middlewares.py +100 -100
- tests/ofweek_scrapy/ofweek_scrapy/pipelines.py +13 -13
- tests/ofweek_scrapy/ofweek_scrapy/settings.py +84 -84
- tests/ofweek_scrapy/ofweek_scrapy/spiders/__init__.py +4 -4
- tests/ofweek_scrapy/scrapy.cfg +11 -11
- tests/optimized_performance_test.py +211 -211
- tests/performance_comparison.py +244 -244
- tests/queue_blocking_test.py +113 -113
- tests/queue_test.py +89 -89
- tests/redis_key_validation_demo.py +130 -130
- tests/request_params_example.py +150 -150
- tests/response_improvements_example.py +144 -144
- tests/scrapy_comparison/ofweek_scrapy.py +138 -138
- tests/scrapy_comparison/scrapy_test.py +133 -133
- tests/simple_cli_test.py +54 -54
- tests/simple_command_test.py +119 -119
- tests/simple_crawlo_test.py +126 -126
- tests/simple_follow_test.py +38 -38
- tests/simple_log_test2.py +137 -137
- tests/simple_optimization_test.py +128 -128
- tests/simple_queue_type_test.py +41 -41
- tests/simple_response_selector_test.py +94 -94
- tests/simple_selector_helper_test.py +154 -154
- tests/simple_selector_test.py +207 -207
- tests/simple_spider_test.py +49 -49
- tests/simple_url_test.py +73 -73
- tests/simulate_mysql_update_test.py +139 -139
- tests/spider_log_timing_test.py +177 -177
- tests/test_advanced_tools.py +148 -148
- tests/test_all_commands.py +230 -230
- tests/test_all_pipeline_fingerprints.py +133 -133
- tests/test_all_redis_key_configs.py +145 -145
- tests/test_asyncmy_usage.py +56 -56
- tests/test_batch_processor.py +178 -178
- tests/test_cleaners.py +54 -54
- tests/test_cli_arguments.py +118 -118
- tests/test_component_factory.py +174 -174
- tests/test_config_consistency.py +80 -80
- tests/test_config_merge.py +152 -152
- tests/test_config_validator.py +182 -182
- tests/test_controlled_spider_mixin.py +79 -79
- tests/test_crawler_process_import.py +38 -38
- tests/test_crawler_process_spider_modules.py +47 -47
- tests/test_crawlo_proxy_integration.py +114 -114
- tests/test_date_tools.py +123 -123
- tests/test_dedup_fix.py +220 -220
- tests/test_dedup_pipeline_consistency.py +124 -124
- tests/test_default_header_middleware.py +313 -313
- tests/test_distributed.py +65 -65
- tests/test_double_crawlo_fix.py +204 -204
- tests/test_double_crawlo_fix_simple.py +124 -124
- tests/test_download_delay_middleware.py +221 -221
- tests/test_downloader_proxy_compatibility.py +272 -272
- tests/test_edge_cases.py +305 -305
- tests/test_encoding_core.py +56 -56
- tests/test_encoding_detection.py +126 -126
- tests/test_enhanced_error_handler.py +270 -270
- tests/test_enhanced_error_handler_comprehensive.py +245 -245
- tests/test_error_handler_compatibility.py +112 -112
- tests/test_factories.py +252 -252
- tests/test_factory_compatibility.py +196 -196
- tests/test_final_validation.py +153 -153
- tests/test_fingerprint_consistency.py +135 -135
- tests/test_fingerprint_simple.py +51 -51
- tests/test_get_component_logger.py +83 -83
- tests/test_hash_performance.py +99 -99
- tests/test_integration.py +169 -169
- tests/test_item_dedup_redis_key.py +122 -122
- tests/test_large_scale_helper.py +235 -235
- tests/test_logging_enhancements.py +374 -374
- tests/test_logging_final.py +184 -184
- tests/test_logging_integration.py +312 -312
- tests/test_logging_system.py +282 -282
- tests/test_middleware_debug.py +141 -141
- tests/test_mode_consistency.py +51 -51
- tests/test_multi_directory.py +67 -67
- tests/test_multiple_spider_modules.py +80 -80
- tests/test_mysql_pipeline_config.py +164 -164
- tests/test_mysql_pipeline_error.py +98 -98
- tests/test_mysql_pipeline_init_log.py +82 -82
- tests/test_mysql_pipeline_integration.py +132 -132
- tests/test_mysql_pipeline_refactor.py +143 -143
- tests/test_mysql_pipeline_refactor_simple.py +85 -85
- tests/test_mysql_pipeline_robustness.py +195 -195
- tests/test_mysql_pipeline_types.py +88 -88
- tests/test_mysql_update_columns.py +93 -93
- tests/test_offsite_middleware.py +244 -244
- tests/test_offsite_middleware_simple.py +203 -203
- tests/test_optimized_selector_naming.py +100 -100
- tests/test_parsel.py +29 -29
- tests/test_performance.py +327 -327
- tests/test_performance_monitor.py +115 -115
- tests/test_pipeline_fingerprint_consistency.py +86 -86
- tests/test_priority_behavior.py +211 -211
- tests/test_priority_consistency.py +151 -151
- tests/test_priority_consistency_fixed.py +249 -249
- tests/test_proxy_health_check.py +32 -32
- tests/test_proxy_middleware.py +217 -217
- tests/test_proxy_middleware_enhanced.py +212 -212
- tests/test_proxy_middleware_integration.py +142 -142
- tests/test_proxy_middleware_refactored.py +207 -207
- tests/test_proxy_only.py +83 -83
- tests/test_proxy_providers.py +56 -56
- tests/test_proxy_stats.py +19 -19
- tests/test_proxy_strategies.py +59 -59
- tests/test_proxy_with_downloader.py +152 -152
- tests/test_queue_empty_check.py +41 -41
- tests/test_queue_manager_double_crawlo.py +173 -173
- tests/test_queue_manager_redis_key.py +179 -179
- tests/test_queue_naming.py +154 -154
- tests/test_queue_type.py +106 -106
- tests/test_queue_type_redis_config_consistency.py +130 -130
- tests/test_random_headers_default.py +322 -322
- tests/test_random_headers_necessity.py +308 -308
- tests/test_random_user_agent.py +72 -72
- tests/test_redis_config.py +28 -28
- tests/test_redis_connection_pool.py +294 -294
- tests/test_redis_key_naming.py +181 -181
- tests/test_redis_key_validator.py +123 -123
- tests/test_redis_queue.py +224 -224
- tests/test_redis_queue_name_fix.py +175 -175
- tests/test_redis_queue_type_fallback.py +129 -129
- tests/test_request_ignore_middleware.py +182 -182
- tests/test_request_params.py +111 -111
- tests/test_request_serialization.py +70 -70
- tests/test_response_code_middleware.py +349 -349
- tests/test_response_filter_middleware.py +427 -427
- tests/test_response_follow.py +104 -104
- tests/test_response_improvements.py +152 -152
- tests/test_response_selector_methods.py +92 -92
- tests/test_response_url_methods.py +70 -70
- tests/test_response_urljoin.py +86 -86
- tests/test_retry_middleware.py +333 -333
- tests/test_retry_middleware_realistic.py +273 -273
- tests/test_scheduler.py +252 -252
- tests/test_scheduler_config_update.py +133 -133
- tests/test_scrapy_style_encoding.py +112 -112
- tests/test_selector_helper.py +100 -100
- tests/test_selector_optimizations.py +146 -146
- tests/test_simple_response.py +61 -61
- tests/test_spider_loader.py +49 -49
- tests/test_spider_loader_comprehensive.py +69 -69
- tests/test_spider_modules.py +84 -84
- tests/test_spiders/test_spider.py +9 -9
- tests/test_telecom_spider_redis_key.py +205 -205
- tests/test_template_content.py +87 -87
- tests/test_template_redis_key.py +134 -134
- tests/test_tools.py +159 -159
- tests/test_user_agent_randomness.py +176 -176
- tests/test_user_agents.py +96 -96
- tests/untested_features_report.md +138 -138
- tests/verify_debug.py +51 -51
- tests/verify_distributed.py +117 -117
- tests/verify_log_fix.py +111 -111
- tests/verify_mysql_warnings.py +109 -109
- crawlo/utils/log.py +0 -80
- crawlo/utils/url_utils.py +0 -40
- crawlo-1.4.7.dist-info/RECORD +0 -347
- {crawlo-1.4.7.dist-info → crawlo-1.4.8.dist-info}/WHEEL +0 -0
- {crawlo-1.4.7.dist-info → crawlo-1.4.8.dist-info}/entry_points.txt +0 -0
- {crawlo-1.4.7.dist-info → crawlo-1.4.8.dist-info}/top_level.txt +0 -0
|
@@ -1,151 +1,151 @@
|
|
|
1
|
-
#!/usr/bin/python
|
|
2
|
-
# -*- coding:UTF-8 -*-
|
|
3
|
-
"""
|
|
4
|
-
ResponseCodeMiddleware 中间件
|
|
5
|
-
用于处理HTTP响应状态码,记录统计信息并支持特殊状态码处理
|
|
6
|
-
"""
|
|
7
|
-
from crawlo.logging import get_logger
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
class ResponseCodeMiddleware(object):
|
|
11
|
-
"""
|
|
12
|
-
ResponseCodeMiddleware 中间件
|
|
13
|
-
用于处理HTTP响应状态码,记录统计信息并支持特殊状态码处理
|
|
14
|
-
|
|
15
|
-
功能特性:
|
|
16
|
-
- 记录各种HTTP状态码的出现次数
|
|
17
|
-
- 支持特殊状态码的详细处理
|
|
18
|
-
- 提供详细的日志信息
|
|
19
|
-
- 支持状态码分类统计(2xx, 3xx, 4xx, 5xx)
|
|
20
|
-
"""
|
|
21
|
-
|
|
22
|
-
def __init__(self, stats, log_level):
|
|
23
|
-
"""
|
|
24
|
-
初始化中间件
|
|
25
|
-
|
|
26
|
-
Args:
|
|
27
|
-
stats: 统计信息收集器
|
|
28
|
-
log_level: 日志级别
|
|
29
|
-
"""
|
|
30
|
-
self.logger = get_logger(self.__class__.__name__)
|
|
31
|
-
self.stats = stats
|
|
32
|
-
|
|
33
|
-
@classmethod
|
|
34
|
-
def create_instance(cls, crawler):
|
|
35
|
-
"""
|
|
36
|
-
创建中间件实例
|
|
37
|
-
|
|
38
|
-
Args:
|
|
39
|
-
crawler: 爬虫实例
|
|
40
|
-
|
|
41
|
-
Returns:
|
|
42
|
-
ResponseCodeMiddleware: 中间件实例
|
|
43
|
-
"""
|
|
44
|
-
o = cls(stats=crawler.stats, log_level=crawler.settings.get('LOG_LEVEL'))
|
|
45
|
-
return o
|
|
46
|
-
|
|
47
|
-
def _get_status_category(self, status_code):
|
|
48
|
-
"""
|
|
49
|
-
获取状态码分类
|
|
50
|
-
|
|
51
|
-
Args:
|
|
52
|
-
status_code (int): HTTP状态码
|
|
53
|
-
|
|
54
|
-
Returns:
|
|
55
|
-
str: 状态码分类 (2xx, 3xx, 4xx, 5xx, other)
|
|
56
|
-
"""
|
|
57
|
-
if 200 <= status_code < 300:
|
|
58
|
-
return "2xx"
|
|
59
|
-
elif 300 <= status_code < 400:
|
|
60
|
-
return "3xx"
|
|
61
|
-
elif 400 <= status_code < 500:
|
|
62
|
-
return "4xx"
|
|
63
|
-
elif 500 <= status_code < 600:
|
|
64
|
-
return "5xx"
|
|
65
|
-
else:
|
|
66
|
-
return "other"
|
|
67
|
-
|
|
68
|
-
def _is_success_response(self, status_code):
|
|
69
|
-
"""
|
|
70
|
-
判断是否为成功响应
|
|
71
|
-
|
|
72
|
-
Args:
|
|
73
|
-
status_code (int): HTTP状态码
|
|
74
|
-
|
|
75
|
-
Returns:
|
|
76
|
-
bool: 是否为成功响应
|
|
77
|
-
"""
|
|
78
|
-
return 200 <= status_code < 300
|
|
79
|
-
|
|
80
|
-
def _is_redirect_response(self, status_code):
|
|
81
|
-
"""
|
|
82
|
-
判断是否为重定向响应
|
|
83
|
-
|
|
84
|
-
Args:
|
|
85
|
-
status_code (int): HTTP状态码
|
|
86
|
-
|
|
87
|
-
Returns:
|
|
88
|
-
bool: 是否为重定向响应
|
|
89
|
-
"""
|
|
90
|
-
return 300 <= status_code < 400
|
|
91
|
-
|
|
92
|
-
def _is_client_error(self, status_code):
|
|
93
|
-
"""
|
|
94
|
-
判断是否为客户端错误
|
|
95
|
-
|
|
96
|
-
Args:
|
|
97
|
-
status_code (int): HTTP状态码
|
|
98
|
-
|
|
99
|
-
Returns:
|
|
100
|
-
bool: 是否为客户端错误
|
|
101
|
-
"""
|
|
102
|
-
return 400 <= status_code < 500
|
|
103
|
-
|
|
104
|
-
def _is_server_error(self, status_code):
|
|
105
|
-
"""
|
|
106
|
-
判断是否为服务器错误
|
|
107
|
-
|
|
108
|
-
Args:
|
|
109
|
-
status_code (int): HTTP状态码
|
|
110
|
-
|
|
111
|
-
Returns:
|
|
112
|
-
bool: 是否为服务器错误
|
|
113
|
-
"""
|
|
114
|
-
return 500 <= status_code < 600
|
|
115
|
-
|
|
116
|
-
def process_response(self, request, response, spider):
|
|
117
|
-
"""
|
|
118
|
-
处理响应,记录状态码统计信息
|
|
119
|
-
|
|
120
|
-
Args:
|
|
121
|
-
request: 请求对象
|
|
122
|
-
response: 响应对象
|
|
123
|
-
spider: 爬虫实例
|
|
124
|
-
|
|
125
|
-
Returns:
|
|
126
|
-
response: 响应对象
|
|
127
|
-
"""
|
|
128
|
-
status_code = response.status_code
|
|
129
|
-
|
|
130
|
-
# 只记录总的统计信息,不记录每个域名和每个状态码的详细信息
|
|
131
|
-
# 记录状态码分类统计
|
|
132
|
-
category = self._get_status_category(status_code)
|
|
133
|
-
self.stats.inc_value(f'response_status_code/category/{category}')
|
|
134
|
-
|
|
135
|
-
# 记录成功/失败统计
|
|
136
|
-
if self._is_success_response(status_code):
|
|
137
|
-
self.stats.inc_value('response_status_code/success_count')
|
|
138
|
-
elif self._is_client_error(status_code) or self._is_server_error(status_code):
|
|
139
|
-
self.stats.inc_value('response_status_code/error_count')
|
|
140
|
-
|
|
141
|
-
# 记录响应大小统计
|
|
142
|
-
if hasattr(response, 'content_length') and response.content_length:
|
|
143
|
-
self.stats.inc_value('response_total_bytes', response.content_length)
|
|
144
|
-
|
|
145
|
-
# 详细日志记录
|
|
146
|
-
self.logger.debug(
|
|
147
|
-
f'收到响应: {status_code} {response.url} '
|
|
148
|
-
f'(分类: {category}, 大小: {getattr(response, "content_length", "unknown")} bytes)'
|
|
149
|
-
)
|
|
150
|
-
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding:UTF-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
ResponseCodeMiddleware 中间件
|
|
5
|
+
用于处理HTTP响应状态码,记录统计信息并支持特殊状态码处理
|
|
6
|
+
"""
|
|
7
|
+
from crawlo.logging import get_logger
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class ResponseCodeMiddleware(object):
|
|
11
|
+
"""
|
|
12
|
+
ResponseCodeMiddleware 中间件
|
|
13
|
+
用于处理HTTP响应状态码,记录统计信息并支持特殊状态码处理
|
|
14
|
+
|
|
15
|
+
功能特性:
|
|
16
|
+
- 记录各种HTTP状态码的出现次数
|
|
17
|
+
- 支持特殊状态码的详细处理
|
|
18
|
+
- 提供详细的日志信息
|
|
19
|
+
- 支持状态码分类统计(2xx, 3xx, 4xx, 5xx)
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
def __init__(self, stats, log_level):
|
|
23
|
+
"""
|
|
24
|
+
初始化中间件
|
|
25
|
+
|
|
26
|
+
Args:
|
|
27
|
+
stats: 统计信息收集器
|
|
28
|
+
log_level: 日志级别
|
|
29
|
+
"""
|
|
30
|
+
self.logger = get_logger(self.__class__.__name__)
|
|
31
|
+
self.stats = stats
|
|
32
|
+
|
|
33
|
+
@classmethod
|
|
34
|
+
def create_instance(cls, crawler):
|
|
35
|
+
"""
|
|
36
|
+
创建中间件实例
|
|
37
|
+
|
|
38
|
+
Args:
|
|
39
|
+
crawler: 爬虫实例
|
|
40
|
+
|
|
41
|
+
Returns:
|
|
42
|
+
ResponseCodeMiddleware: 中间件实例
|
|
43
|
+
"""
|
|
44
|
+
o = cls(stats=crawler.stats, log_level=crawler.settings.get('LOG_LEVEL'))
|
|
45
|
+
return o
|
|
46
|
+
|
|
47
|
+
def _get_status_category(self, status_code):
|
|
48
|
+
"""
|
|
49
|
+
获取状态码分类
|
|
50
|
+
|
|
51
|
+
Args:
|
|
52
|
+
status_code (int): HTTP状态码
|
|
53
|
+
|
|
54
|
+
Returns:
|
|
55
|
+
str: 状态码分类 (2xx, 3xx, 4xx, 5xx, other)
|
|
56
|
+
"""
|
|
57
|
+
if 200 <= status_code < 300:
|
|
58
|
+
return "2xx"
|
|
59
|
+
elif 300 <= status_code < 400:
|
|
60
|
+
return "3xx"
|
|
61
|
+
elif 400 <= status_code < 500:
|
|
62
|
+
return "4xx"
|
|
63
|
+
elif 500 <= status_code < 600:
|
|
64
|
+
return "5xx"
|
|
65
|
+
else:
|
|
66
|
+
return "other"
|
|
67
|
+
|
|
68
|
+
def _is_success_response(self, status_code):
|
|
69
|
+
"""
|
|
70
|
+
判断是否为成功响应
|
|
71
|
+
|
|
72
|
+
Args:
|
|
73
|
+
status_code (int): HTTP状态码
|
|
74
|
+
|
|
75
|
+
Returns:
|
|
76
|
+
bool: 是否为成功响应
|
|
77
|
+
"""
|
|
78
|
+
return 200 <= status_code < 300
|
|
79
|
+
|
|
80
|
+
def _is_redirect_response(self, status_code):
|
|
81
|
+
"""
|
|
82
|
+
判断是否为重定向响应
|
|
83
|
+
|
|
84
|
+
Args:
|
|
85
|
+
status_code (int): HTTP状态码
|
|
86
|
+
|
|
87
|
+
Returns:
|
|
88
|
+
bool: 是否为重定向响应
|
|
89
|
+
"""
|
|
90
|
+
return 300 <= status_code < 400
|
|
91
|
+
|
|
92
|
+
def _is_client_error(self, status_code):
|
|
93
|
+
"""
|
|
94
|
+
判断是否为客户端错误
|
|
95
|
+
|
|
96
|
+
Args:
|
|
97
|
+
status_code (int): HTTP状态码
|
|
98
|
+
|
|
99
|
+
Returns:
|
|
100
|
+
bool: 是否为客户端错误
|
|
101
|
+
"""
|
|
102
|
+
return 400 <= status_code < 500
|
|
103
|
+
|
|
104
|
+
def _is_server_error(self, status_code):
|
|
105
|
+
"""
|
|
106
|
+
判断是否为服务器错误
|
|
107
|
+
|
|
108
|
+
Args:
|
|
109
|
+
status_code (int): HTTP状态码
|
|
110
|
+
|
|
111
|
+
Returns:
|
|
112
|
+
bool: 是否为服务器错误
|
|
113
|
+
"""
|
|
114
|
+
return 500 <= status_code < 600
|
|
115
|
+
|
|
116
|
+
def process_response(self, request, response, spider):
|
|
117
|
+
"""
|
|
118
|
+
处理响应,记录状态码统计信息
|
|
119
|
+
|
|
120
|
+
Args:
|
|
121
|
+
request: 请求对象
|
|
122
|
+
response: 响应对象
|
|
123
|
+
spider: 爬虫实例
|
|
124
|
+
|
|
125
|
+
Returns:
|
|
126
|
+
response: 响应对象
|
|
127
|
+
"""
|
|
128
|
+
status_code = response.status_code
|
|
129
|
+
|
|
130
|
+
# 只记录总的统计信息,不记录每个域名和每个状态码的详细信息
|
|
131
|
+
# 记录状态码分类统计
|
|
132
|
+
category = self._get_status_category(status_code)
|
|
133
|
+
self.stats.inc_value(f'response_status_code/category/{category}')
|
|
134
|
+
|
|
135
|
+
# 记录成功/失败统计
|
|
136
|
+
if self._is_success_response(status_code):
|
|
137
|
+
self.stats.inc_value('response_status_code/success_count')
|
|
138
|
+
elif self._is_client_error(status_code) or self._is_server_error(status_code):
|
|
139
|
+
self.stats.inc_value('response_status_code/error_count')
|
|
140
|
+
|
|
141
|
+
# 记录响应大小统计
|
|
142
|
+
if hasattr(response, 'content_length') and response.content_length:
|
|
143
|
+
self.stats.inc_value('response_total_bytes', response.content_length)
|
|
144
|
+
|
|
145
|
+
# 详细日志记录
|
|
146
|
+
self.logger.debug(
|
|
147
|
+
f'收到响应: {status_code} {response.url} '
|
|
148
|
+
f'(分类: {category}, 大小: {getattr(response, "content_length", "unknown")} bytes)'
|
|
149
|
+
)
|
|
150
|
+
|
|
151
151
|
return response
|
|
@@ -1,137 +1,137 @@
|
|
|
1
|
-
#!/usr/bin/python
|
|
2
|
-
# -*- coding:UTF-8 -*-
|
|
3
|
-
"""
|
|
4
|
-
ResponseFilterMiddleware 中间件
|
|
5
|
-
用于过滤不符合要求的HTTP响应,支持自定义允许的状态码
|
|
6
|
-
"""
|
|
7
|
-
from crawlo.logging import get_logger
|
|
8
|
-
from crawlo.exceptions import IgnoreRequestError
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
class ResponseFilterMiddleware:
|
|
12
|
-
"""
|
|
13
|
-
ResponseFilterMiddleware 中间件
|
|
14
|
-
用于过滤不符合要求的HTTP响应,支持自定义允许的状态码
|
|
15
|
-
|
|
16
|
-
功能特性:
|
|
17
|
-
- 默认允许2xx状态码
|
|
18
|
-
- 支持自定义允许的状态码列表
|
|
19
|
-
- 支持拒绝特定状态码
|
|
20
|
-
- 提供详细的日志信息
|
|
21
|
-
- 支持按域名配置不同的过滤规则
|
|
22
|
-
"""
|
|
23
|
-
|
|
24
|
-
def __init__(self, allowed_codes, denied_codes, log_level):
|
|
25
|
-
"""
|
|
26
|
-
初始化中间件
|
|
27
|
-
|
|
28
|
-
Args:
|
|
29
|
-
allowed_codes: 允许的状态码列表
|
|
30
|
-
denied_codes: 拒绝的状态码列表
|
|
31
|
-
log_level: 日志级别
|
|
32
|
-
"""
|
|
33
|
-
# 确保状态码是整数类型
|
|
34
|
-
self.allowed_codes = set()
|
|
35
|
-
if allowed_codes:
|
|
36
|
-
for code in allowed_codes:
|
|
37
|
-
try:
|
|
38
|
-
self.allowed_codes.add(int(code))
|
|
39
|
-
except (ValueError, TypeError):
|
|
40
|
-
pass # 忽略无效的状态码
|
|
41
|
-
|
|
42
|
-
self.denied_codes = set()
|
|
43
|
-
if denied_codes:
|
|
44
|
-
for code in denied_codes:
|
|
45
|
-
try:
|
|
46
|
-
self.denied_codes.add(int(code))
|
|
47
|
-
except (ValueError, TypeError):
|
|
48
|
-
pass # 忽略无效的状态码
|
|
49
|
-
|
|
50
|
-
self.logger = get_logger(self.__class__.__name__)
|
|
51
|
-
|
|
52
|
-
@classmethod
|
|
53
|
-
def create_instance(cls, crawler):
|
|
54
|
-
"""
|
|
55
|
-
创建中间件实例
|
|
56
|
-
|
|
57
|
-
Args:
|
|
58
|
-
crawler: 爬虫实例
|
|
59
|
-
|
|
60
|
-
Returns:
|
|
61
|
-
ResponseFilterMiddleware: 中间件实例
|
|
62
|
-
"""
|
|
63
|
-
o = cls(
|
|
64
|
-
allowed_codes=crawler.settings.get_list('ALLOWED_RESPONSE_CODES'),
|
|
65
|
-
denied_codes=crawler.settings.get_list('DENIED_RESPONSE_CODES'),
|
|
66
|
-
log_level=crawler.settings.get('LOG_LEVEL')
|
|
67
|
-
)
|
|
68
|
-
return o
|
|
69
|
-
|
|
70
|
-
def _is_response_allowed(self, response):
|
|
71
|
-
"""
|
|
72
|
-
判断响应是否被允许
|
|
73
|
-
|
|
74
|
-
Args:
|
|
75
|
-
response: 响应对象
|
|
76
|
-
|
|
77
|
-
Returns:
|
|
78
|
-
bool: 是否被允许
|
|
79
|
-
"""
|
|
80
|
-
status_code = response.status_code
|
|
81
|
-
|
|
82
|
-
# 首先检查是否被明确拒绝
|
|
83
|
-
if status_code in self.denied_codes:
|
|
84
|
-
return False
|
|
85
|
-
|
|
86
|
-
# 检查是否被明确允许
|
|
87
|
-
if status_code in self.allowed_codes:
|
|
88
|
-
return True
|
|
89
|
-
|
|
90
|
-
# 默认允许2xx状态码
|
|
91
|
-
if 200 <= status_code < 300:
|
|
92
|
-
return True
|
|
93
|
-
|
|
94
|
-
# 默认拒绝其他状态码
|
|
95
|
-
return False
|
|
96
|
-
|
|
97
|
-
def _get_filter_reason(self, status_code):
|
|
98
|
-
"""
|
|
99
|
-
获取过滤原因描述
|
|
100
|
-
|
|
101
|
-
Args:
|
|
102
|
-
status_code (int): HTTP状态码
|
|
103
|
-
|
|
104
|
-
Returns:
|
|
105
|
-
str: 过滤原因描述
|
|
106
|
-
"""
|
|
107
|
-
if status_code in self.denied_codes:
|
|
108
|
-
return f"状态码 {status_code} 被明确拒绝"
|
|
109
|
-
elif status_code not in self.allowed_codes and not (200 <= status_code < 300):
|
|
110
|
-
return f"状态码 {status_code} 不在允许列表中"
|
|
111
|
-
else:
|
|
112
|
-
return f"状态码 {status_code} 被过滤"
|
|
113
|
-
|
|
114
|
-
def process_response(self, request, response, spider):
|
|
115
|
-
"""
|
|
116
|
-
处理响应,过滤不符合要求的响应
|
|
117
|
-
|
|
118
|
-
Args:
|
|
119
|
-
request: 请求对象
|
|
120
|
-
response: 响应对象
|
|
121
|
-
spider: 爬虫实例
|
|
122
|
-
|
|
123
|
-
Returns:
|
|
124
|
-
response: 响应对象(如果被允许)
|
|
125
|
-
|
|
126
|
-
Raises:
|
|
127
|
-
IgnoreRequestError: 如果响应被过滤
|
|
128
|
-
"""
|
|
129
|
-
if self._is_response_allowed(response):
|
|
130
|
-
return response
|
|
131
|
-
|
|
132
|
-
# 响应被过滤
|
|
133
|
-
reason = self._get_filter_reason(response.status_code)
|
|
134
|
-
self.logger.debug(f"过滤响应: {response.status_code} {response.url} - {reason}")
|
|
135
|
-
|
|
136
|
-
# 抛出异常以忽略该响应
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding:UTF-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
ResponseFilterMiddleware 中间件
|
|
5
|
+
用于过滤不符合要求的HTTP响应,支持自定义允许的状态码
|
|
6
|
+
"""
|
|
7
|
+
from crawlo.logging import get_logger
|
|
8
|
+
from crawlo.exceptions import IgnoreRequestError
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class ResponseFilterMiddleware:
|
|
12
|
+
"""
|
|
13
|
+
ResponseFilterMiddleware 中间件
|
|
14
|
+
用于过滤不符合要求的HTTP响应,支持自定义允许的状态码
|
|
15
|
+
|
|
16
|
+
功能特性:
|
|
17
|
+
- 默认允许2xx状态码
|
|
18
|
+
- 支持自定义允许的状态码列表
|
|
19
|
+
- 支持拒绝特定状态码
|
|
20
|
+
- 提供详细的日志信息
|
|
21
|
+
- 支持按域名配置不同的过滤规则
|
|
22
|
+
"""
|
|
23
|
+
|
|
24
|
+
def __init__(self, allowed_codes, denied_codes, log_level):
|
|
25
|
+
"""
|
|
26
|
+
初始化中间件
|
|
27
|
+
|
|
28
|
+
Args:
|
|
29
|
+
allowed_codes: 允许的状态码列表
|
|
30
|
+
denied_codes: 拒绝的状态码列表
|
|
31
|
+
log_level: 日志级别
|
|
32
|
+
"""
|
|
33
|
+
# 确保状态码是整数类型
|
|
34
|
+
self.allowed_codes = set()
|
|
35
|
+
if allowed_codes:
|
|
36
|
+
for code in allowed_codes:
|
|
37
|
+
try:
|
|
38
|
+
self.allowed_codes.add(int(code))
|
|
39
|
+
except (ValueError, TypeError):
|
|
40
|
+
pass # 忽略无效的状态码
|
|
41
|
+
|
|
42
|
+
self.denied_codes = set()
|
|
43
|
+
if denied_codes:
|
|
44
|
+
for code in denied_codes:
|
|
45
|
+
try:
|
|
46
|
+
self.denied_codes.add(int(code))
|
|
47
|
+
except (ValueError, TypeError):
|
|
48
|
+
pass # 忽略无效的状态码
|
|
49
|
+
|
|
50
|
+
self.logger = get_logger(self.__class__.__name__)
|
|
51
|
+
|
|
52
|
+
@classmethod
|
|
53
|
+
def create_instance(cls, crawler):
|
|
54
|
+
"""
|
|
55
|
+
创建中间件实例
|
|
56
|
+
|
|
57
|
+
Args:
|
|
58
|
+
crawler: 爬虫实例
|
|
59
|
+
|
|
60
|
+
Returns:
|
|
61
|
+
ResponseFilterMiddleware: 中间件实例
|
|
62
|
+
"""
|
|
63
|
+
o = cls(
|
|
64
|
+
allowed_codes=crawler.settings.get_list('ALLOWED_RESPONSE_CODES'),
|
|
65
|
+
denied_codes=crawler.settings.get_list('DENIED_RESPONSE_CODES'),
|
|
66
|
+
log_level=crawler.settings.get('LOG_LEVEL')
|
|
67
|
+
)
|
|
68
|
+
return o
|
|
69
|
+
|
|
70
|
+
def _is_response_allowed(self, response):
|
|
71
|
+
"""
|
|
72
|
+
判断响应是否被允许
|
|
73
|
+
|
|
74
|
+
Args:
|
|
75
|
+
response: 响应对象
|
|
76
|
+
|
|
77
|
+
Returns:
|
|
78
|
+
bool: 是否被允许
|
|
79
|
+
"""
|
|
80
|
+
status_code = response.status_code
|
|
81
|
+
|
|
82
|
+
# 首先检查是否被明确拒绝
|
|
83
|
+
if status_code in self.denied_codes:
|
|
84
|
+
return False
|
|
85
|
+
|
|
86
|
+
# 检查是否被明确允许
|
|
87
|
+
if status_code in self.allowed_codes:
|
|
88
|
+
return True
|
|
89
|
+
|
|
90
|
+
# 默认允许2xx状态码
|
|
91
|
+
if 200 <= status_code < 300:
|
|
92
|
+
return True
|
|
93
|
+
|
|
94
|
+
# 默认拒绝其他状态码
|
|
95
|
+
return False
|
|
96
|
+
|
|
97
|
+
def _get_filter_reason(self, status_code):
|
|
98
|
+
"""
|
|
99
|
+
获取过滤原因描述
|
|
100
|
+
|
|
101
|
+
Args:
|
|
102
|
+
status_code (int): HTTP状态码
|
|
103
|
+
|
|
104
|
+
Returns:
|
|
105
|
+
str: 过滤原因描述
|
|
106
|
+
"""
|
|
107
|
+
if status_code in self.denied_codes:
|
|
108
|
+
return f"状态码 {status_code} 被明确拒绝"
|
|
109
|
+
elif status_code not in self.allowed_codes and not (200 <= status_code < 300):
|
|
110
|
+
return f"状态码 {status_code} 不在允许列表中"
|
|
111
|
+
else:
|
|
112
|
+
return f"状态码 {status_code} 被过滤"
|
|
113
|
+
|
|
114
|
+
def process_response(self, request, response, spider):
|
|
115
|
+
"""
|
|
116
|
+
处理响应,过滤不符合要求的响应
|
|
117
|
+
|
|
118
|
+
Args:
|
|
119
|
+
request: 请求对象
|
|
120
|
+
response: 响应对象
|
|
121
|
+
spider: 爬虫实例
|
|
122
|
+
|
|
123
|
+
Returns:
|
|
124
|
+
response: 响应对象(如果被允许)
|
|
125
|
+
|
|
126
|
+
Raises:
|
|
127
|
+
IgnoreRequestError: 如果响应被过滤
|
|
128
|
+
"""
|
|
129
|
+
if self._is_response_allowed(response):
|
|
130
|
+
return response
|
|
131
|
+
|
|
132
|
+
# 响应被过滤
|
|
133
|
+
reason = self._get_filter_reason(response.status_code)
|
|
134
|
+
self.logger.debug(f"过滤响应: {response.status_code} {response.url} - {reason}")
|
|
135
|
+
|
|
136
|
+
# 抛出异常以忽略该响应
|
|
137
137
|
raise IgnoreRequestError(f"response filtered: {reason} - {response.status_code} {response.url}")
|