crawlo 1.4.5__py3-none-any.whl → 1.4.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +90 -89
- crawlo/__version__.py +1 -1
- crawlo/cli.py +75 -75
- crawlo/commands/__init__.py +14 -14
- crawlo/commands/check.py +594 -594
- crawlo/commands/genspider.py +186 -186
- crawlo/commands/help.py +140 -138
- crawlo/commands/list.py +155 -155
- crawlo/commands/run.py +379 -341
- crawlo/commands/startproject.py +460 -460
- crawlo/commands/stats.py +187 -187
- crawlo/commands/utils.py +196 -196
- crawlo/config.py +320 -312
- crawlo/config_validator.py +277 -277
- crawlo/core/__init__.py +52 -52
- crawlo/core/engine.py +451 -438
- crawlo/core/processor.py +47 -47
- crawlo/core/scheduler.py +290 -291
- crawlo/crawler.py +698 -657
- crawlo/data/__init__.py +5 -5
- crawlo/data/user_agents.py +194 -194
- crawlo/downloader/__init__.py +280 -276
- crawlo/downloader/aiohttp_downloader.py +233 -233
- crawlo/downloader/cffi_downloader.py +250 -245
- crawlo/downloader/httpx_downloader.py +265 -259
- crawlo/downloader/hybrid_downloader.py +212 -212
- crawlo/downloader/playwright_downloader.py +425 -402
- crawlo/downloader/selenium_downloader.py +486 -472
- crawlo/event.py +45 -11
- crawlo/exceptions.py +215 -82
- crawlo/extension/__init__.py +65 -64
- crawlo/extension/health_check.py +141 -141
- crawlo/extension/log_interval.py +94 -94
- crawlo/extension/log_stats.py +70 -70
- crawlo/extension/logging_extension.py +53 -61
- crawlo/extension/memory_monitor.py +104 -104
- crawlo/extension/performance_profiler.py +133 -133
- crawlo/extension/request_recorder.py +107 -107
- crawlo/factories/__init__.py +27 -27
- crawlo/factories/base.py +68 -68
- crawlo/factories/crawler.py +104 -103
- crawlo/factories/registry.py +84 -84
- crawlo/factories/utils.py +135 -0
- crawlo/filters/__init__.py +170 -153
- crawlo/filters/aioredis_filter.py +348 -264
- crawlo/filters/memory_filter.py +261 -276
- crawlo/framework.py +306 -292
- crawlo/initialization/__init__.py +44 -44
- crawlo/initialization/built_in.py +391 -434
- crawlo/initialization/context.py +141 -141
- crawlo/initialization/core.py +240 -194
- crawlo/initialization/phases.py +230 -149
- crawlo/initialization/registry.py +143 -145
- crawlo/initialization/utils.py +49 -0
- crawlo/interfaces.py +23 -23
- crawlo/items/__init__.py +23 -23
- crawlo/items/base.py +23 -23
- crawlo/items/fields.py +52 -52
- crawlo/items/items.py +104 -104
- crawlo/logging/__init__.py +42 -46
- crawlo/logging/config.py +277 -197
- crawlo/logging/factory.py +175 -171
- crawlo/logging/manager.py +104 -112
- crawlo/middleware/__init__.py +87 -24
- crawlo/middleware/default_header.py +132 -132
- crawlo/middleware/download_delay.py +104 -104
- crawlo/middleware/middleware_manager.py +142 -142
- crawlo/middleware/offsite.py +123 -123
- crawlo/middleware/proxy.py +209 -386
- crawlo/middleware/request_ignore.py +86 -86
- crawlo/middleware/response_code.py +150 -150
- crawlo/middleware/response_filter.py +136 -136
- crawlo/middleware/retry.py +124 -124
- crawlo/mode_manager.py +287 -253
- crawlo/network/__init__.py +21 -21
- crawlo/network/request.py +375 -379
- crawlo/network/response.py +569 -664
- crawlo/pipelines/__init__.py +53 -22
- crawlo/pipelines/base_pipeline.py +452 -0
- crawlo/pipelines/bloom_dedup_pipeline.py +146 -146
- crawlo/pipelines/console_pipeline.py +39 -39
- crawlo/pipelines/csv_pipeline.py +316 -316
- crawlo/pipelines/database_dedup_pipeline.py +197 -197
- crawlo/pipelines/json_pipeline.py +218 -218
- crawlo/pipelines/memory_dedup_pipeline.py +105 -105
- crawlo/pipelines/mongo_pipeline.py +140 -132
- crawlo/pipelines/mysql_pipeline.py +470 -326
- crawlo/pipelines/pipeline_manager.py +100 -100
- crawlo/pipelines/redis_dedup_pipeline.py +155 -156
- crawlo/project.py +347 -347
- crawlo/queue/__init__.py +10 -0
- crawlo/queue/pqueue.py +38 -38
- crawlo/queue/queue_manager.py +591 -525
- crawlo/queue/redis_priority_queue.py +519 -370
- crawlo/settings/__init__.py +7 -7
- crawlo/settings/default_settings.py +285 -270
- crawlo/settings/setting_manager.py +219 -219
- crawlo/spider/__init__.py +657 -657
- crawlo/stats_collector.py +82 -73
- crawlo/subscriber.py +129 -129
- crawlo/task_manager.py +138 -138
- crawlo/templates/crawlo.cfg.tmpl +10 -10
- crawlo/templates/project/__init__.py.tmpl +2 -4
- crawlo/templates/project/items.py.tmpl +13 -17
- crawlo/templates/project/middlewares.py.tmpl +38 -38
- crawlo/templates/project/pipelines.py.tmpl +35 -36
- crawlo/templates/project/settings.py.tmpl +110 -157
- crawlo/templates/project/settings_distributed.py.tmpl +156 -161
- crawlo/templates/project/settings_gentle.py.tmpl +170 -171
- crawlo/templates/project/settings_high_performance.py.tmpl +171 -172
- crawlo/templates/project/settings_minimal.py.tmpl +99 -77
- crawlo/templates/project/settings_simple.py.tmpl +168 -169
- crawlo/templates/project/spiders/__init__.py.tmpl +9 -9
- crawlo/templates/run.py.tmpl +23 -30
- crawlo/templates/spider/spider.py.tmpl +33 -144
- crawlo/templates/spiders_init.py.tmpl +5 -10
- crawlo/tools/__init__.py +86 -189
- crawlo/tools/date_tools.py +289 -289
- crawlo/tools/distributed_coordinator.py +384 -384
- crawlo/tools/scenario_adapter.py +262 -262
- crawlo/tools/text_cleaner.py +232 -232
- crawlo/utils/__init__.py +50 -50
- crawlo/utils/batch_processor.py +276 -259
- crawlo/utils/config_manager.py +442 -0
- crawlo/utils/controlled_spider_mixin.py +439 -439
- crawlo/utils/db_helper.py +250 -244
- crawlo/utils/error_handler.py +410 -410
- crawlo/utils/fingerprint.py +121 -121
- crawlo/utils/func_tools.py +82 -82
- crawlo/utils/large_scale_helper.py +344 -344
- crawlo/utils/leak_detector.py +335 -0
- crawlo/utils/log.py +79 -79
- crawlo/utils/misc.py +81 -81
- crawlo/utils/mongo_connection_pool.py +157 -0
- crawlo/utils/mysql_connection_pool.py +197 -0
- crawlo/utils/performance_monitor.py +285 -285
- crawlo/utils/queue_helper.py +175 -175
- crawlo/utils/redis_checker.py +91 -0
- crawlo/utils/redis_connection_pool.py +578 -388
- crawlo/utils/redis_key_validator.py +198 -198
- crawlo/utils/request.py +278 -256
- crawlo/utils/request_serializer.py +225 -225
- crawlo/utils/resource_manager.py +337 -0
- crawlo/utils/selector_helper.py +137 -137
- crawlo/utils/singleton.py +70 -0
- crawlo/utils/spider_loader.py +201 -201
- crawlo/utils/text_helper.py +94 -94
- crawlo/utils/{url.py → url_utils.py} +39 -39
- crawlo-1.4.7.dist-info/METADATA +689 -0
- crawlo-1.4.7.dist-info/RECORD +347 -0
- examples/__init__.py +7 -7
- tests/__init__.py +7 -7
- tests/advanced_tools_example.py +217 -275
- tests/authenticated_proxy_example.py +110 -106
- tests/baidu_performance_test.py +108 -108
- tests/baidu_test.py +59 -59
- tests/bug_check_test.py +250 -250
- tests/cleaners_example.py +160 -160
- tests/comprehensive_framework_test.py +212 -212
- tests/comprehensive_test.py +81 -81
- tests/comprehensive_testing_summary.md +186 -186
- tests/config_validation_demo.py +142 -142
- tests/controlled_spider_example.py +205 -205
- tests/date_tools_example.py +180 -180
- tests/debug_configure.py +69 -69
- tests/debug_framework_logger.py +84 -84
- tests/debug_log_config.py +126 -126
- tests/debug_log_levels.py +63 -63
- tests/debug_pipelines.py +66 -66
- tests/detailed_log_test.py +233 -233
- tests/direct_selector_helper_test.py +96 -96
- tests/distributed_dedup_test.py +467 -0
- tests/distributed_test.py +66 -66
- tests/distributed_test_debug.py +76 -76
- tests/dynamic_loading_example.py +523 -523
- tests/dynamic_loading_test.py +104 -104
- tests/error_handling_example.py +171 -171
- tests/explain_mysql_update_behavior.py +77 -0
- tests/final_comprehensive_test.py +151 -151
- tests/final_log_test.py +260 -260
- tests/final_validation_test.py +182 -182
- tests/fix_log_test.py +142 -142
- tests/framework_performance_test.py +202 -202
- tests/log_buffering_test.py +111 -111
- tests/log_generation_timing_test.py +153 -153
- tests/monitor_redis_dedup.sh +72 -0
- tests/ofweek_scrapy/ofweek_scrapy/items.py +12 -12
- tests/ofweek_scrapy/ofweek_scrapy/middlewares.py +100 -100
- tests/ofweek_scrapy/ofweek_scrapy/pipelines.py +13 -13
- tests/ofweek_scrapy/ofweek_scrapy/settings.py +84 -84
- tests/ofweek_scrapy/scrapy.cfg +11 -11
- tests/optimized_performance_test.py +211 -211
- tests/performance_comparison.py +244 -244
- tests/queue_blocking_test.py +113 -113
- tests/queue_test.py +89 -89
- tests/redis_key_validation_demo.py +130 -130
- tests/request_params_example.py +150 -150
- tests/response_improvements_example.py +144 -144
- tests/scrapy_comparison/ofweek_scrapy.py +138 -138
- tests/scrapy_comparison/scrapy_test.py +133 -133
- tests/simple_cli_test.py +55 -0
- tests/simple_command_test.py +119 -119
- tests/simple_crawlo_test.py +126 -126
- tests/simple_follow_test.py +38 -38
- tests/simple_log_test2.py +137 -137
- tests/simple_optimization_test.py +128 -128
- tests/simple_queue_type_test.py +41 -41
- tests/simple_response_selector_test.py +94 -94
- tests/simple_selector_helper_test.py +154 -154
- tests/simple_selector_test.py +207 -207
- tests/simple_spider_test.py +49 -49
- tests/simple_url_test.py +73 -73
- tests/simulate_mysql_update_test.py +140 -0
- tests/spider_log_timing_test.py +177 -177
- tests/test_advanced_tools.py +148 -148
- tests/test_all_commands.py +230 -230
- tests/test_all_pipeline_fingerprints.py +133 -133
- tests/test_all_redis_key_configs.py +145 -145
- tests/test_asyncmy_usage.py +57 -0
- tests/test_batch_processor.py +178 -178
- tests/test_cleaners.py +54 -54
- tests/test_cli_arguments.py +119 -0
- tests/test_component_factory.py +174 -174
- tests/test_config_consistency.py +80 -80
- tests/test_config_merge.py +152 -152
- tests/test_config_validator.py +182 -182
- tests/test_controlled_spider_mixin.py +79 -79
- tests/test_crawler_process_import.py +38 -38
- tests/test_crawler_process_spider_modules.py +47 -47
- tests/test_crawlo_proxy_integration.py +114 -108
- tests/test_date_tools.py +123 -123
- tests/test_dedup_fix.py +220 -220
- tests/test_dedup_pipeline_consistency.py +124 -124
- tests/test_default_header_middleware.py +313 -313
- tests/test_distributed.py +65 -65
- tests/test_double_crawlo_fix.py +204 -204
- tests/test_double_crawlo_fix_simple.py +124 -124
- tests/test_download_delay_middleware.py +221 -221
- tests/test_downloader_proxy_compatibility.py +272 -268
- tests/test_edge_cases.py +305 -305
- tests/test_encoding_core.py +56 -56
- tests/test_encoding_detection.py +126 -126
- tests/test_enhanced_error_handler.py +270 -270
- tests/test_enhanced_error_handler_comprehensive.py +245 -245
- tests/test_error_handler_compatibility.py +112 -112
- tests/test_factories.py +252 -252
- tests/test_factory_compatibility.py +196 -196
- tests/test_final_validation.py +153 -153
- tests/test_fingerprint_consistency.py +135 -135
- tests/test_fingerprint_simple.py +51 -51
- tests/test_get_component_logger.py +83 -83
- tests/test_hash_performance.py +99 -99
- tests/test_integration.py +169 -169
- tests/test_item_dedup_redis_key.py +122 -122
- tests/test_large_scale_helper.py +235 -235
- tests/test_logging_enhancements.py +374 -374
- tests/test_logging_final.py +184 -184
- tests/test_logging_integration.py +312 -312
- tests/test_logging_system.py +282 -282
- tests/test_middleware_debug.py +141 -141
- tests/test_mode_consistency.py +51 -51
- tests/test_multi_directory.py +67 -67
- tests/test_multiple_spider_modules.py +80 -80
- tests/test_mysql_pipeline_config.py +165 -0
- tests/test_mysql_pipeline_error.py +99 -0
- tests/test_mysql_pipeline_init_log.py +83 -0
- tests/test_mysql_pipeline_integration.py +133 -0
- tests/test_mysql_pipeline_refactor.py +144 -0
- tests/test_mysql_pipeline_refactor_simple.py +86 -0
- tests/test_mysql_pipeline_robustness.py +196 -0
- tests/test_mysql_pipeline_types.py +89 -0
- tests/test_mysql_update_columns.py +94 -0
- tests/test_offsite_middleware.py +244 -244
- tests/test_offsite_middleware_simple.py +203 -203
- tests/test_optimized_selector_naming.py +100 -100
- tests/test_parsel.py +29 -29
- tests/test_performance.py +327 -327
- tests/test_performance_monitor.py +115 -115
- tests/test_pipeline_fingerprint_consistency.py +86 -86
- tests/test_priority_behavior.py +211 -211
- tests/test_priority_consistency.py +151 -151
- tests/test_priority_consistency_fixed.py +249 -249
- tests/test_proxy_health_check.py +32 -32
- tests/test_proxy_middleware.py +217 -121
- tests/test_proxy_middleware_enhanced.py +212 -216
- tests/test_proxy_middleware_integration.py +142 -137
- tests/test_proxy_middleware_refactored.py +207 -184
- tests/test_proxy_only.py +84 -0
- tests/test_proxy_providers.py +56 -56
- tests/test_proxy_stats.py +19 -19
- tests/test_proxy_strategies.py +59 -59
- tests/test_proxy_with_downloader.py +153 -0
- tests/test_queue_empty_check.py +41 -41
- tests/test_queue_manager_double_crawlo.py +173 -173
- tests/test_queue_manager_redis_key.py +179 -179
- tests/test_queue_naming.py +154 -154
- tests/test_queue_type.py +106 -106
- tests/test_queue_type_redis_config_consistency.py +130 -130
- tests/test_random_headers_default.py +322 -322
- tests/test_random_headers_necessity.py +308 -308
- tests/test_random_user_agent.py +72 -72
- tests/test_redis_config.py +28 -28
- tests/test_redis_connection_pool.py +294 -294
- tests/test_redis_key_naming.py +181 -181
- tests/test_redis_key_validator.py +123 -123
- tests/test_redis_queue.py +224 -224
- tests/test_redis_queue_name_fix.py +175 -175
- tests/test_redis_queue_type_fallback.py +129 -129
- tests/test_request_ignore_middleware.py +182 -182
- tests/test_request_params.py +111 -111
- tests/test_request_serialization.py +70 -70
- tests/test_response_code_middleware.py +349 -349
- tests/test_response_filter_middleware.py +427 -427
- tests/test_response_follow.py +104 -104
- tests/test_response_improvements.py +152 -152
- tests/test_response_selector_methods.py +92 -92
- tests/test_response_url_methods.py +70 -70
- tests/test_response_urljoin.py +86 -86
- tests/test_retry_middleware.py +333 -333
- tests/test_retry_middleware_realistic.py +273 -273
- tests/test_scheduler.py +252 -252
- tests/test_scheduler_config_update.py +133 -133
- tests/test_scrapy_style_encoding.py +112 -112
- tests/test_selector_helper.py +100 -100
- tests/test_selector_optimizations.py +146 -146
- tests/test_simple_response.py +61 -61
- tests/test_spider_loader.py +49 -49
- tests/test_spider_loader_comprehensive.py +69 -69
- tests/test_spider_modules.py +84 -84
- tests/test_spiders/test_spider.py +9 -9
- tests/test_telecom_spider_redis_key.py +205 -205
- tests/test_template_content.py +87 -87
- tests/test_template_redis_key.py +134 -134
- tests/test_tools.py +159 -159
- tests/test_user_agent_randomness.py +176 -176
- tests/test_user_agents.py +96 -96
- tests/untested_features_report.md +138 -138
- tests/verify_debug.py +51 -51
- tests/verify_distributed.py +117 -117
- tests/verify_log_fix.py +111 -111
- tests/verify_mysql_warnings.py +110 -0
- crawlo/logging/async_handler.py +0 -181
- crawlo/logging/monitor.py +0 -153
- crawlo/logging/sampler.py +0 -167
- crawlo/middleware/simple_proxy.py +0 -65
- crawlo/tools/authenticated_proxy.py +0 -241
- crawlo/tools/data_formatter.py +0 -226
- crawlo/tools/data_validator.py +0 -181
- crawlo/tools/encoding_converter.py +0 -127
- crawlo/tools/network_diagnostic.py +0 -365
- crawlo/tools/request_tools.py +0 -83
- crawlo/tools/retry_mechanism.py +0 -224
- crawlo/utils/env_config.py +0 -143
- crawlo/utils/large_scale_config.py +0 -287
- crawlo/utils/system.py +0 -11
- crawlo/utils/tools.py +0 -5
- crawlo-1.4.5.dist-info/METADATA +0 -329
- crawlo-1.4.5.dist-info/RECORD +0 -347
- tests/env_config_example.py +0 -134
- tests/ofweek_scrapy/ofweek_scrapy/spiders/ofweek_spider.py +0 -162
- tests/test_authenticated_proxy.py +0 -142
- tests/test_comprehensive.py +0 -147
- tests/test_dynamic_downloaders_proxy.py +0 -125
- tests/test_dynamic_proxy.py +0 -93
- tests/test_dynamic_proxy_config.py +0 -147
- tests/test_dynamic_proxy_real.py +0 -110
- tests/test_env_config.py +0 -122
- tests/test_framework_env_usage.py +0 -104
- tests/test_large_scale_config.py +0 -113
- tests/test_proxy_api.py +0 -265
- tests/test_real_scenario_proxy.py +0 -196
- tests/tools_example.py +0 -261
- {crawlo-1.4.5.dist-info → crawlo-1.4.7.dist-info}/WHEEL +0 -0
- {crawlo-1.4.5.dist-info → crawlo-1.4.7.dist-info}/entry_points.txt +0 -0
- {crawlo-1.4.5.dist-info → crawlo-1.4.7.dist-info}/top_level.txt +0 -0
|
@@ -1,309 +1,309 @@
|
|
|
1
|
-
#!/usr/bin/env python3
|
|
2
|
-
# -*- coding: utf-8 -*-
|
|
3
|
-
"""
|
|
4
|
-
测试RANDOM_HEADERS参数的必要性
|
|
5
|
-
验证是否可以仅使用现有的User-Agent功能满足需求
|
|
6
|
-
"""
|
|
7
|
-
|
|
8
|
-
import sys
|
|
9
|
-
import os
|
|
10
|
-
import random
|
|
11
|
-
from unittest.mock import Mock, patch
|
|
12
|
-
|
|
13
|
-
# 添加项目根目录到Python路径
|
|
14
|
-
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
|
15
|
-
|
|
16
|
-
from crawlo.middleware.default_header import DefaultHeaderMiddleware
|
|
17
|
-
from crawlo.settings.setting_manager import SettingManager
|
|
18
|
-
from crawlo.exceptions import NotConfiguredError
|
|
19
|
-
from crawlo.data.user_agents import get_random_user_agent
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
class MockLogger:
|
|
23
|
-
"""Mock Logger 类,用于测试日志输出"""
|
|
24
|
-
def __init__(self, name, level=None):
|
|
25
|
-
self.name = name
|
|
26
|
-
self.level = level
|
|
27
|
-
self.logs = []
|
|
28
|
-
|
|
29
|
-
def debug(self, msg):
|
|
30
|
-
self.logs.append(('debug', msg))
|
|
31
|
-
|
|
32
|
-
def info(self, msg):
|
|
33
|
-
self.logs.append(('info', msg))
|
|
34
|
-
|
|
35
|
-
def warning(self, msg):
|
|
36
|
-
self.logs.append(('warning', msg))
|
|
37
|
-
|
|
38
|
-
def error(self, msg):
|
|
39
|
-
self.logs.append(('error', msg))
|
|
40
|
-
|
|
41
|
-
def isEnabledFor(self, level):
|
|
42
|
-
return True
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
def test_current_user_agent_functionality():
|
|
46
|
-
"""测试当前User-Agent功能是否足够"""
|
|
47
|
-
print("=== 测试当前User-Agent功能是否足够 ===")
|
|
48
|
-
|
|
49
|
-
# 创建设置管理器
|
|
50
|
-
settings = SettingManager()
|
|
51
|
-
settings.set('DEFAULT_REQUEST_HEADERS', {
|
|
52
|
-
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
|
|
53
|
-
})
|
|
54
|
-
settings.set('RANDOM_USER_AGENT_ENABLED', True) # 启用随机User-Agent
|
|
55
|
-
settings.set('LOG_LEVEL', 'DEBUG')
|
|
56
|
-
settings.set('RANDOMNESS', True) # 启用随机功能
|
|
57
|
-
|
|
58
|
-
# 创建一个模拟的crawler对象
|
|
59
|
-
crawler = Mock()
|
|
60
|
-
crawler.settings = settings
|
|
61
|
-
|
|
62
|
-
logger = MockLogger('DefaultHeaderMiddleware')
|
|
63
|
-
with patch('crawlo.middleware.default_header.get_logger', return_value=logger):
|
|
64
|
-
try:
|
|
65
|
-
# 应该正常创建实例
|
|
66
|
-
middleware = DefaultHeaderMiddleware.create_instance(crawler)
|
|
67
|
-
print(" ✅ 启用随机User-Agent时中间件创建成功")
|
|
68
|
-
|
|
69
|
-
# 检查配置
|
|
70
|
-
print(f" 随机User-Agent启用: {middleware.random_user_agent_enabled}")
|
|
71
|
-
print(f" User-Agent列表数量: {len(middleware.user_agents)}")
|
|
72
|
-
print(f" User-Agent设备类型: {middleware.user_agent_device_type}")
|
|
73
|
-
|
|
74
|
-
# 测试处理请求
|
|
75
|
-
request = Mock()
|
|
76
|
-
request.headers = {}
|
|
77
|
-
request.url = 'https://example.com'
|
|
78
|
-
|
|
79
|
-
spider = Mock()
|
|
80
|
-
middleware.process_request(request, spider)
|
|
81
|
-
|
|
82
|
-
# 检查User-Agent是否添加
|
|
83
|
-
if 'User-Agent' in request.headers:
|
|
84
|
-
print(" ✅ 随机User-Agent正确添加到请求中")
|
|
85
|
-
print(f" User-Agent: {request.headers['User-Agent'][:50]}...")
|
|
86
|
-
return True
|
|
87
|
-
else:
|
|
88
|
-
print(" ❌ 随机User-Agent未添加")
|
|
89
|
-
return False
|
|
90
|
-
|
|
91
|
-
except Exception as e:
|
|
92
|
-
print(f" ❌ 测试失败: {e}")
|
|
93
|
-
return False
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
def test_random_headers_vs_user_agent():
|
|
97
|
-
"""比较RANDOM_HEADERS和User-Agent功能的差异"""
|
|
98
|
-
print("\n=== 比较RANDOM_HEADERS和User-Agent功能的差异 ===")
|
|
99
|
-
|
|
100
|
-
# 测试RANDOM_HEADERS功能
|
|
101
|
-
print(" RANDOM_HEADERS功能:")
|
|
102
|
-
settings1 = SettingManager()
|
|
103
|
-
settings1.set('DEFAULT_REQUEST_HEADERS', {
|
|
104
|
-
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
|
|
105
|
-
})
|
|
106
|
-
settings1.set('RANDOM_HEADERS', {
|
|
107
|
-
'X-Custom-Header': ['Value1', 'Value2', 'Value3'],
|
|
108
|
-
'X-Another-Header': 'FixedValue',
|
|
109
|
-
'X-Random-Header': ['A', 'B', 'C', 'D']
|
|
110
|
-
})
|
|
111
|
-
settings1.set('RANDOMNESS', True)
|
|
112
|
-
settings1.set('LOG_LEVEL', 'DEBUG')
|
|
113
|
-
|
|
114
|
-
# 测试User-Agent功能
|
|
115
|
-
print(" User-Agent功能:")
|
|
116
|
-
settings2 = SettingManager()
|
|
117
|
-
settings2.set('DEFAULT_REQUEST_HEADERS', {
|
|
118
|
-
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
|
|
119
|
-
})
|
|
120
|
-
settings2.set('RANDOM_USER_AGENT_ENABLED', True)
|
|
121
|
-
settings2.set('LOG_LEVEL', 'DEBUG')
|
|
122
|
-
settings2.set('RANDOMNESS', True)
|
|
123
|
-
|
|
124
|
-
# 创建crawler对象
|
|
125
|
-
crawler1 = Mock()
|
|
126
|
-
crawler1.settings = settings1
|
|
127
|
-
crawler2 = Mock()
|
|
128
|
-
crawler2.settings = settings2
|
|
129
|
-
|
|
130
|
-
logger = MockLogger('DefaultHeaderMiddleware')
|
|
131
|
-
|
|
132
|
-
# 测试RANDOM_HEADERS
|
|
133
|
-
with patch('crawlo.middleware.default_header.get_logger', return_value=logger):
|
|
134
|
-
try:
|
|
135
|
-
middleware1 = DefaultHeaderMiddleware.create_instance(crawler1)
|
|
136
|
-
|
|
137
|
-
# 测试多次请求的随机性
|
|
138
|
-
print(" RANDOM_HEADERS随机性测试:")
|
|
139
|
-
header_values = {}
|
|
140
|
-
for i in range(20):
|
|
141
|
-
test_request = Mock()
|
|
142
|
-
test_request.headers = {}
|
|
143
|
-
test_request.url = f'https://example.com/test{i}'
|
|
144
|
-
|
|
145
|
-
middleware1.process_request(test_request, Mock())
|
|
146
|
-
|
|
147
|
-
# 收集各种随机头部的值
|
|
148
|
-
for header in ['X-Custom-Header', 'X-Another-Header', 'X-Random-Header']:
|
|
149
|
-
if header in test_request.headers:
|
|
150
|
-
if header not in header_values:
|
|
151
|
-
header_values[header] = []
|
|
152
|
-
header_values[header].append(test_request.headers[header])
|
|
153
|
-
|
|
154
|
-
# 分析随机性
|
|
155
|
-
for header, values in header_values.items():
|
|
156
|
-
unique_values = set(values)
|
|
157
|
-
print(f" {header}: {len(unique_values)} 个不同值 ({list(unique_values)[:3]}...)")
|
|
158
|
-
|
|
159
|
-
except Exception as e:
|
|
160
|
-
print(f" RANDOM_HEADERS测试失败: {e}")
|
|
161
|
-
|
|
162
|
-
# 测试User-Agent
|
|
163
|
-
with patch('crawlo.middleware.default_header.get_logger', return_value=logger):
|
|
164
|
-
try:
|
|
165
|
-
middleware2 = DefaultHeaderMiddleware.create_instance(crawler2)
|
|
166
|
-
|
|
167
|
-
# 测试多次请求的随机性
|
|
168
|
-
print(" User-Agent随机性测试:")
|
|
169
|
-
ua_values = []
|
|
170
|
-
for i in range(20):
|
|
171
|
-
test_request = Mock()
|
|
172
|
-
test_request.headers = {}
|
|
173
|
-
test_request.url = f'https://example.com/test{i}'
|
|
174
|
-
|
|
175
|
-
middleware2.process_request(test_request, Mock())
|
|
176
|
-
|
|
177
|
-
if 'User-Agent' in test_request.headers:
|
|
178
|
-
ua_values.append(test_request.headers['User-Agent'])
|
|
179
|
-
|
|
180
|
-
# 分析随机性
|
|
181
|
-
unique_uas = set(ua_values)
|
|
182
|
-
print(f" User-Agent: {len(unique_uas)} 个不同值")
|
|
183
|
-
print(f" 示例: {list(unique_uas)[:3]}")
|
|
184
|
-
|
|
185
|
-
except Exception as e:
|
|
186
|
-
print(f" User-Agent测试失败: {e}")
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
def test_direct_user_agent_usage():
|
|
190
|
-
"""测试直接使用user_agents模块的功能"""
|
|
191
|
-
print("\n=== 测试直接使用user_agents模块的功能 ===")
|
|
192
|
-
|
|
193
|
-
# 测试get_random_user_agent函数
|
|
194
|
-
print(" 直接使用get_random_user_agent函数:")
|
|
195
|
-
for i in range(5):
|
|
196
|
-
ua = get_random_user_agent()
|
|
197
|
-
print(f" {i+1}. {ua[:50]}...")
|
|
198
|
-
|
|
199
|
-
# 测试不同设备类型的User-Agent
|
|
200
|
-
print(" 不同设备类型的User-Agent:")
|
|
201
|
-
device_types = ["desktop", "mobile", "chrome", "firefox", "safari"]
|
|
202
|
-
for device_type in device_types:
|
|
203
|
-
ua = get_random_user_agent(device_type)
|
|
204
|
-
print(f" {device_type}: {ua[:50]}...")
|
|
205
|
-
|
|
206
|
-
print(" ✅ 可以直接使用user_agents模块满足User-Agent随机化需求")
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
def test_alternative_approach():
|
|
210
|
-
"""测试替代方案:仅使用User-Agent功能"""
|
|
211
|
-
print("\n=== 测试替代方案:仅使用User-Agent功能 ===")
|
|
212
|
-
|
|
213
|
-
print(" 推荐的配置方式:")
|
|
214
|
-
print(" 1. 启用RANDOM_USER_AGENT_ENABLED = True")
|
|
215
|
-
print(" 2. 设置USER_AGENT_DEVICE_TYPE = 'desktop' 或 'mobile' 等")
|
|
216
|
-
print(" 3. 无需配置RANDOM_HEADERS")
|
|
217
|
-
|
|
218
|
-
# 模拟推荐配置
|
|
219
|
-
settings = SettingManager()
|
|
220
|
-
settings.set('DEFAULT_REQUEST_HEADERS', {
|
|
221
|
-
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
|
|
222
|
-
})
|
|
223
|
-
settings.set('RANDOM_USER_AGENT_ENABLED', True)
|
|
224
|
-
settings.set('USER_AGENT_DEVICE_TYPE', 'desktop')
|
|
225
|
-
settings.set('LOG_LEVEL', 'DEBUG')
|
|
226
|
-
|
|
227
|
-
crawler = Mock()
|
|
228
|
-
crawler.settings = settings
|
|
229
|
-
|
|
230
|
-
logger = MockLogger('DefaultHeaderMiddleware')
|
|
231
|
-
with patch('crawlo.middleware.default_header.get_logger', return_value=logger):
|
|
232
|
-
try:
|
|
233
|
-
middleware = DefaultHeaderMiddleware.create_instance(crawler)
|
|
234
|
-
print(" ✅ 推荐配置可行")
|
|
235
|
-
|
|
236
|
-
# 测试处理请求
|
|
237
|
-
request = Mock()
|
|
238
|
-
request.headers = {}
|
|
239
|
-
request.url = 'https://example.com'
|
|
240
|
-
|
|
241
|
-
spider = Mock()
|
|
242
|
-
middleware.process_request(request, spider)
|
|
243
|
-
|
|
244
|
-
if 'User-Agent' in request.headers:
|
|
245
|
-
print(f" User-Agent: {request.headers['User-Agent'][:50]}...")
|
|
246
|
-
|
|
247
|
-
return True
|
|
248
|
-
except Exception as e:
|
|
249
|
-
print(f" ❌ 推荐配置失败: {e}")
|
|
250
|
-
return False
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
def analyze_necessity():
|
|
254
|
-
"""分析RANDOM_HEADERS参数的必要性"""
|
|
255
|
-
print("\n=== 分析RANDOM_HEADERS参数的必要性 ===")
|
|
256
|
-
|
|
257
|
-
print("功能对比:")
|
|
258
|
-
print(" User-Agent功能:")
|
|
259
|
-
print(" ✓ 专门用于User-Agent随机化")
|
|
260
|
-
print(" ✓ 内置大量真实User-Agent")
|
|
261
|
-
print(" ✓ 支持按设备类型分类")
|
|
262
|
-
print(" ✓ 易于使用和配置")
|
|
263
|
-
|
|
264
|
-
print(" RANDOM_HEADERS功能:")
|
|
265
|
-
print(" ✓ 可以为任意头部添加随机值")
|
|
266
|
-
print(" ✓ 更加灵活,支持自定义头部")
|
|
267
|
-
print(" ✓ 适用于需要随机化其他头部的场景")
|
|
268
|
-
print(" ✗ 需要用户自己提供头部值列表")
|
|
269
|
-
|
|
270
|
-
print("\n使用建议:")
|
|
271
|
-
print(" 1. 对于User-Agent随机化:使用RANDOM_USER_AGENT_ENABLED")
|
|
272
|
-
print(" 2. 对于其他头部随机化:使用RANDOM_HEADERS")
|
|
273
|
-
print(" 3. 大多数场景下,User-Agent功能已足够")
|
|
274
|
-
print(" 4. RANDOM_HEADERS适用于特殊需求场景")
|
|
275
|
-
|
|
276
|
-
print("\n结论:")
|
|
277
|
-
print(" RANDOM_HEADERS参数不是必需的,但对于需要随机化其他头部的场景很有用")
|
|
278
|
-
print(" 现有的User-Agent功能已经可以满足大多数反爬虫需求")
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
def main():
|
|
282
|
-
print("开始测试RANDOM_HEADERS参数的必要性...")
|
|
283
|
-
|
|
284
|
-
try:
|
|
285
|
-
# 运行所有测试
|
|
286
|
-
test1_result = test_current_user_agent_functionality()
|
|
287
|
-
test_random_headers_vs_user_agent()
|
|
288
|
-
test_direct_user_agent_usage()
|
|
289
|
-
test2_result = test_alternative_approach()
|
|
290
|
-
analyze_necessity()
|
|
291
|
-
|
|
292
|
-
if test1_result and test2_result:
|
|
293
|
-
print("\n🎉 测试完成!")
|
|
294
|
-
print("\n总结:")
|
|
295
|
-
print(" 1. 现有的User-Agent功能已能满足大多数随机化需求")
|
|
296
|
-
print(" 2. RANDOM_HEADERS参数提供了额外的灵活性")
|
|
297
|
-
print(" 3. 对于简单场景,仅使用User-Agent功能即可")
|
|
298
|
-
print(" 4. 对于复杂场景,RANDOM_HEADERS参数仍然有价值")
|
|
299
|
-
else:
|
|
300
|
-
print("\n❌ 部分测试失败")
|
|
301
|
-
|
|
302
|
-
except Exception as e:
|
|
303
|
-
print(f"\n❌ 测试过程中发生错误: {e}")
|
|
304
|
-
import traceback
|
|
305
|
-
traceback.print_exc()
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
if __name__ == "__main__":
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
测试RANDOM_HEADERS参数的必要性
|
|
5
|
+
验证是否可以仅使用现有的User-Agent功能满足需求
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import sys
|
|
9
|
+
import os
|
|
10
|
+
import random
|
|
11
|
+
from unittest.mock import Mock, patch
|
|
12
|
+
|
|
13
|
+
# 添加项目根目录到Python路径
|
|
14
|
+
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
|
15
|
+
|
|
16
|
+
from crawlo.middleware.default_header import DefaultHeaderMiddleware
|
|
17
|
+
from crawlo.settings.setting_manager import SettingManager
|
|
18
|
+
from crawlo.exceptions import NotConfiguredError
|
|
19
|
+
from crawlo.data.user_agents import get_random_user_agent
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class MockLogger:
|
|
23
|
+
"""Mock Logger 类,用于测试日志输出"""
|
|
24
|
+
def __init__(self, name, level=None):
|
|
25
|
+
self.name = name
|
|
26
|
+
self.level = level
|
|
27
|
+
self.logs = []
|
|
28
|
+
|
|
29
|
+
def debug(self, msg):
|
|
30
|
+
self.logs.append(('debug', msg))
|
|
31
|
+
|
|
32
|
+
def info(self, msg):
|
|
33
|
+
self.logs.append(('info', msg))
|
|
34
|
+
|
|
35
|
+
def warning(self, msg):
|
|
36
|
+
self.logs.append(('warning', msg))
|
|
37
|
+
|
|
38
|
+
def error(self, msg):
|
|
39
|
+
self.logs.append(('error', msg))
|
|
40
|
+
|
|
41
|
+
def isEnabledFor(self, level):
|
|
42
|
+
return True
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def test_current_user_agent_functionality():
|
|
46
|
+
"""测试当前User-Agent功能是否足够"""
|
|
47
|
+
print("=== 测试当前User-Agent功能是否足够 ===")
|
|
48
|
+
|
|
49
|
+
# 创建设置管理器
|
|
50
|
+
settings = SettingManager()
|
|
51
|
+
settings.set('DEFAULT_REQUEST_HEADERS', {
|
|
52
|
+
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
|
|
53
|
+
})
|
|
54
|
+
settings.set('RANDOM_USER_AGENT_ENABLED', True) # 启用随机User-Agent
|
|
55
|
+
settings.set('LOG_LEVEL', 'DEBUG')
|
|
56
|
+
settings.set('RANDOMNESS', True) # 启用随机功能
|
|
57
|
+
|
|
58
|
+
# 创建一个模拟的crawler对象
|
|
59
|
+
crawler = Mock()
|
|
60
|
+
crawler.settings = settings
|
|
61
|
+
|
|
62
|
+
logger = MockLogger('DefaultHeaderMiddleware')
|
|
63
|
+
with patch('crawlo.middleware.default_header.get_logger', return_value=logger):
|
|
64
|
+
try:
|
|
65
|
+
# 应该正常创建实例
|
|
66
|
+
middleware = DefaultHeaderMiddleware.create_instance(crawler)
|
|
67
|
+
print(" ✅ 启用随机User-Agent时中间件创建成功")
|
|
68
|
+
|
|
69
|
+
# 检查配置
|
|
70
|
+
print(f" 随机User-Agent启用: {middleware.random_user_agent_enabled}")
|
|
71
|
+
print(f" User-Agent列表数量: {len(middleware.user_agents)}")
|
|
72
|
+
print(f" User-Agent设备类型: {middleware.user_agent_device_type}")
|
|
73
|
+
|
|
74
|
+
# 测试处理请求
|
|
75
|
+
request = Mock()
|
|
76
|
+
request.headers = {}
|
|
77
|
+
request.url = 'https://example.com'
|
|
78
|
+
|
|
79
|
+
spider = Mock()
|
|
80
|
+
middleware.process_request(request, spider)
|
|
81
|
+
|
|
82
|
+
# 检查User-Agent是否添加
|
|
83
|
+
if 'User-Agent' in request.headers:
|
|
84
|
+
print(" ✅ 随机User-Agent正确添加到请求中")
|
|
85
|
+
print(f" User-Agent: {request.headers['User-Agent'][:50]}...")
|
|
86
|
+
return True
|
|
87
|
+
else:
|
|
88
|
+
print(" ❌ 随机User-Agent未添加")
|
|
89
|
+
return False
|
|
90
|
+
|
|
91
|
+
except Exception as e:
|
|
92
|
+
print(f" ❌ 测试失败: {e}")
|
|
93
|
+
return False
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
def test_random_headers_vs_user_agent():
|
|
97
|
+
"""比较RANDOM_HEADERS和User-Agent功能的差异"""
|
|
98
|
+
print("\n=== 比较RANDOM_HEADERS和User-Agent功能的差异 ===")
|
|
99
|
+
|
|
100
|
+
# 测试RANDOM_HEADERS功能
|
|
101
|
+
print(" RANDOM_HEADERS功能:")
|
|
102
|
+
settings1 = SettingManager()
|
|
103
|
+
settings1.set('DEFAULT_REQUEST_HEADERS', {
|
|
104
|
+
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
|
|
105
|
+
})
|
|
106
|
+
settings1.set('RANDOM_HEADERS', {
|
|
107
|
+
'X-Custom-Header': ['Value1', 'Value2', 'Value3'],
|
|
108
|
+
'X-Another-Header': 'FixedValue',
|
|
109
|
+
'X-Random-Header': ['A', 'B', 'C', 'D']
|
|
110
|
+
})
|
|
111
|
+
settings1.set('RANDOMNESS', True)
|
|
112
|
+
settings1.set('LOG_LEVEL', 'DEBUG')
|
|
113
|
+
|
|
114
|
+
# 测试User-Agent功能
|
|
115
|
+
print(" User-Agent功能:")
|
|
116
|
+
settings2 = SettingManager()
|
|
117
|
+
settings2.set('DEFAULT_REQUEST_HEADERS', {
|
|
118
|
+
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
|
|
119
|
+
})
|
|
120
|
+
settings2.set('RANDOM_USER_AGENT_ENABLED', True)
|
|
121
|
+
settings2.set('LOG_LEVEL', 'DEBUG')
|
|
122
|
+
settings2.set('RANDOMNESS', True)
|
|
123
|
+
|
|
124
|
+
# 创建crawler对象
|
|
125
|
+
crawler1 = Mock()
|
|
126
|
+
crawler1.settings = settings1
|
|
127
|
+
crawler2 = Mock()
|
|
128
|
+
crawler2.settings = settings2
|
|
129
|
+
|
|
130
|
+
logger = MockLogger('DefaultHeaderMiddleware')
|
|
131
|
+
|
|
132
|
+
# 测试RANDOM_HEADERS
|
|
133
|
+
with patch('crawlo.middleware.default_header.get_logger', return_value=logger):
|
|
134
|
+
try:
|
|
135
|
+
middleware1 = DefaultHeaderMiddleware.create_instance(crawler1)
|
|
136
|
+
|
|
137
|
+
# 测试多次请求的随机性
|
|
138
|
+
print(" RANDOM_HEADERS随机性测试:")
|
|
139
|
+
header_values = {}
|
|
140
|
+
for i in range(20):
|
|
141
|
+
test_request = Mock()
|
|
142
|
+
test_request.headers = {}
|
|
143
|
+
test_request.url = f'https://example.com/test{i}'
|
|
144
|
+
|
|
145
|
+
middleware1.process_request(test_request, Mock())
|
|
146
|
+
|
|
147
|
+
# 收集各种随机头部的值
|
|
148
|
+
for header in ['X-Custom-Header', 'X-Another-Header', 'X-Random-Header']:
|
|
149
|
+
if header in test_request.headers:
|
|
150
|
+
if header not in header_values:
|
|
151
|
+
header_values[header] = []
|
|
152
|
+
header_values[header].append(test_request.headers[header])
|
|
153
|
+
|
|
154
|
+
# 分析随机性
|
|
155
|
+
for header, values in header_values.items():
|
|
156
|
+
unique_values = set(values)
|
|
157
|
+
print(f" {header}: {len(unique_values)} 个不同值 ({list(unique_values)[:3]}...)")
|
|
158
|
+
|
|
159
|
+
except Exception as e:
|
|
160
|
+
print(f" RANDOM_HEADERS测试失败: {e}")
|
|
161
|
+
|
|
162
|
+
# 测试User-Agent
|
|
163
|
+
with patch('crawlo.middleware.default_header.get_logger', return_value=logger):
|
|
164
|
+
try:
|
|
165
|
+
middleware2 = DefaultHeaderMiddleware.create_instance(crawler2)
|
|
166
|
+
|
|
167
|
+
# 测试多次请求的随机性
|
|
168
|
+
print(" User-Agent随机性测试:")
|
|
169
|
+
ua_values = []
|
|
170
|
+
for i in range(20):
|
|
171
|
+
test_request = Mock()
|
|
172
|
+
test_request.headers = {}
|
|
173
|
+
test_request.url = f'https://example.com/test{i}'
|
|
174
|
+
|
|
175
|
+
middleware2.process_request(test_request, Mock())
|
|
176
|
+
|
|
177
|
+
if 'User-Agent' in test_request.headers:
|
|
178
|
+
ua_values.append(test_request.headers['User-Agent'])
|
|
179
|
+
|
|
180
|
+
# 分析随机性
|
|
181
|
+
unique_uas = set(ua_values)
|
|
182
|
+
print(f" User-Agent: {len(unique_uas)} 个不同值")
|
|
183
|
+
print(f" 示例: {list(unique_uas)[:3]}")
|
|
184
|
+
|
|
185
|
+
except Exception as e:
|
|
186
|
+
print(f" User-Agent测试失败: {e}")
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
def test_direct_user_agent_usage():
|
|
190
|
+
"""测试直接使用user_agents模块的功能"""
|
|
191
|
+
print("\n=== 测试直接使用user_agents模块的功能 ===")
|
|
192
|
+
|
|
193
|
+
# 测试get_random_user_agent函数
|
|
194
|
+
print(" 直接使用get_random_user_agent函数:")
|
|
195
|
+
for i in range(5):
|
|
196
|
+
ua = get_random_user_agent()
|
|
197
|
+
print(f" {i+1}. {ua[:50]}...")
|
|
198
|
+
|
|
199
|
+
# 测试不同设备类型的User-Agent
|
|
200
|
+
print(" 不同设备类型的User-Agent:")
|
|
201
|
+
device_types = ["desktop", "mobile", "chrome", "firefox", "safari"]
|
|
202
|
+
for device_type in device_types:
|
|
203
|
+
ua = get_random_user_agent(device_type)
|
|
204
|
+
print(f" {device_type}: {ua[:50]}...")
|
|
205
|
+
|
|
206
|
+
print(" ✅ 可以直接使用user_agents模块满足User-Agent随机化需求")
|
|
207
|
+
|
|
208
|
+
|
|
209
|
+
def test_alternative_approach():
|
|
210
|
+
"""测试替代方案:仅使用User-Agent功能"""
|
|
211
|
+
print("\n=== 测试替代方案:仅使用User-Agent功能 ===")
|
|
212
|
+
|
|
213
|
+
print(" 推荐的配置方式:")
|
|
214
|
+
print(" 1. 启用RANDOM_USER_AGENT_ENABLED = True")
|
|
215
|
+
print(" 2. 设置USER_AGENT_DEVICE_TYPE = 'desktop' 或 'mobile' 等")
|
|
216
|
+
print(" 3. 无需配置RANDOM_HEADERS")
|
|
217
|
+
|
|
218
|
+
# 模拟推荐配置
|
|
219
|
+
settings = SettingManager()
|
|
220
|
+
settings.set('DEFAULT_REQUEST_HEADERS', {
|
|
221
|
+
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
|
|
222
|
+
})
|
|
223
|
+
settings.set('RANDOM_USER_AGENT_ENABLED', True)
|
|
224
|
+
settings.set('USER_AGENT_DEVICE_TYPE', 'desktop')
|
|
225
|
+
settings.set('LOG_LEVEL', 'DEBUG')
|
|
226
|
+
|
|
227
|
+
crawler = Mock()
|
|
228
|
+
crawler.settings = settings
|
|
229
|
+
|
|
230
|
+
logger = MockLogger('DefaultHeaderMiddleware')
|
|
231
|
+
with patch('crawlo.middleware.default_header.get_logger', return_value=logger):
|
|
232
|
+
try:
|
|
233
|
+
middleware = DefaultHeaderMiddleware.create_instance(crawler)
|
|
234
|
+
print(" ✅ 推荐配置可行")
|
|
235
|
+
|
|
236
|
+
# 测试处理请求
|
|
237
|
+
request = Mock()
|
|
238
|
+
request.headers = {}
|
|
239
|
+
request.url = 'https://example.com'
|
|
240
|
+
|
|
241
|
+
spider = Mock()
|
|
242
|
+
middleware.process_request(request, spider)
|
|
243
|
+
|
|
244
|
+
if 'User-Agent' in request.headers:
|
|
245
|
+
print(f" User-Agent: {request.headers['User-Agent'][:50]}...")
|
|
246
|
+
|
|
247
|
+
return True
|
|
248
|
+
except Exception as e:
|
|
249
|
+
print(f" ❌ 推荐配置失败: {e}")
|
|
250
|
+
return False
|
|
251
|
+
|
|
252
|
+
|
|
253
|
+
def analyze_necessity():
|
|
254
|
+
"""分析RANDOM_HEADERS参数的必要性"""
|
|
255
|
+
print("\n=== 分析RANDOM_HEADERS参数的必要性 ===")
|
|
256
|
+
|
|
257
|
+
print("功能对比:")
|
|
258
|
+
print(" User-Agent功能:")
|
|
259
|
+
print(" ✓ 专门用于User-Agent随机化")
|
|
260
|
+
print(" ✓ 内置大量真实User-Agent")
|
|
261
|
+
print(" ✓ 支持按设备类型分类")
|
|
262
|
+
print(" ✓ 易于使用和配置")
|
|
263
|
+
|
|
264
|
+
print(" RANDOM_HEADERS功能:")
|
|
265
|
+
print(" ✓ 可以为任意头部添加随机值")
|
|
266
|
+
print(" ✓ 更加灵活,支持自定义头部")
|
|
267
|
+
print(" ✓ 适用于需要随机化其他头部的场景")
|
|
268
|
+
print(" ✗ 需要用户自己提供头部值列表")
|
|
269
|
+
|
|
270
|
+
print("\n使用建议:")
|
|
271
|
+
print(" 1. 对于User-Agent随机化:使用RANDOM_USER_AGENT_ENABLED")
|
|
272
|
+
print(" 2. 对于其他头部随机化:使用RANDOM_HEADERS")
|
|
273
|
+
print(" 3. 大多数场景下,User-Agent功能已足够")
|
|
274
|
+
print(" 4. RANDOM_HEADERS适用于特殊需求场景")
|
|
275
|
+
|
|
276
|
+
print("\n结论:")
|
|
277
|
+
print(" RANDOM_HEADERS参数不是必需的,但对于需要随机化其他头部的场景很有用")
|
|
278
|
+
print(" 现有的User-Agent功能已经可以满足大多数反爬虫需求")
|
|
279
|
+
|
|
280
|
+
|
|
281
|
+
def main():
|
|
282
|
+
print("开始测试RANDOM_HEADERS参数的必要性...")
|
|
283
|
+
|
|
284
|
+
try:
|
|
285
|
+
# 运行所有测试
|
|
286
|
+
test1_result = test_current_user_agent_functionality()
|
|
287
|
+
test_random_headers_vs_user_agent()
|
|
288
|
+
test_direct_user_agent_usage()
|
|
289
|
+
test2_result = test_alternative_approach()
|
|
290
|
+
analyze_necessity()
|
|
291
|
+
|
|
292
|
+
if test1_result and test2_result:
|
|
293
|
+
print("\n🎉 测试完成!")
|
|
294
|
+
print("\n总结:")
|
|
295
|
+
print(" 1. 现有的User-Agent功能已能满足大多数随机化需求")
|
|
296
|
+
print(" 2. RANDOM_HEADERS参数提供了额外的灵活性")
|
|
297
|
+
print(" 3. 对于简单场景,仅使用User-Agent功能即可")
|
|
298
|
+
print(" 4. 对于复杂场景,RANDOM_HEADERS参数仍然有价值")
|
|
299
|
+
else:
|
|
300
|
+
print("\n❌ 部分测试失败")
|
|
301
|
+
|
|
302
|
+
except Exception as e:
|
|
303
|
+
print(f"\n❌ 测试过程中发生错误: {e}")
|
|
304
|
+
import traceback
|
|
305
|
+
traceback.print_exc()
|
|
306
|
+
|
|
307
|
+
|
|
308
|
+
if __name__ == "__main__":
|
|
309
309
|
main()
|