crawlo 1.4.1__py3-none-any.whl → 1.4.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +93 -93
- crawlo/__version__.py +1 -1
- crawlo/cli.py +75 -75
- crawlo/commands/__init__.py +14 -14
- crawlo/commands/check.py +594 -594
- crawlo/commands/genspider.py +151 -151
- crawlo/commands/help.py +138 -138
- crawlo/commands/list.py +155 -155
- crawlo/commands/run.py +341 -341
- crawlo/commands/startproject.py +436 -436
- crawlo/commands/stats.py +187 -187
- crawlo/commands/utils.py +196 -196
- crawlo/config.py +312 -312
- crawlo/config_validator.py +277 -277
- crawlo/core/__init__.py +52 -52
- crawlo/core/engine.py +438 -439
- crawlo/core/processor.py +47 -47
- crawlo/core/scheduler.py +291 -257
- crawlo/crawler.py +650 -650
- crawlo/data/__init__.py +5 -5
- crawlo/data/user_agents.py +194 -194
- crawlo/downloader/__init__.py +273 -273
- crawlo/downloader/aiohttp_downloader.py +233 -228
- crawlo/downloader/cffi_downloader.py +245 -245
- crawlo/downloader/httpx_downloader.py +259 -259
- crawlo/downloader/hybrid_downloader.py +212 -212
- crawlo/downloader/playwright_downloader.py +402 -402
- crawlo/downloader/selenium_downloader.py +472 -472
- crawlo/event.py +11 -11
- crawlo/exceptions.py +81 -81
- crawlo/extension/__init__.py +63 -63
- crawlo/extension/health_check.py +141 -141
- crawlo/extension/log_interval.py +94 -94
- crawlo/extension/log_stats.py +70 -70
- crawlo/extension/logging_extension.py +61 -61
- crawlo/extension/memory_monitor.py +104 -104
- crawlo/extension/performance_profiler.py +133 -133
- crawlo/extension/request_recorder.py +107 -107
- crawlo/factories/__init__.py +27 -27
- crawlo/factories/base.py +68 -68
- crawlo/factories/crawler.py +103 -103
- crawlo/factories/registry.py +84 -84
- crawlo/filters/__init__.py +154 -154
- crawlo/filters/aioredis_filter.py +257 -257
- crawlo/filters/memory_filter.py +269 -269
- crawlo/framework.py +292 -292
- crawlo/initialization/__init__.py +44 -44
- crawlo/initialization/built_in.py +425 -425
- crawlo/initialization/context.py +141 -141
- crawlo/initialization/core.py +193 -193
- crawlo/initialization/phases.py +148 -148
- crawlo/initialization/registry.py +145 -145
- crawlo/items/__init__.py +23 -23
- crawlo/items/base.py +23 -23
- crawlo/items/fields.py +52 -52
- crawlo/items/items.py +104 -104
- crawlo/logging/__init__.py +45 -37
- crawlo/logging/async_handler.py +181 -0
- crawlo/logging/config.py +196 -96
- crawlo/logging/factory.py +171 -128
- crawlo/logging/manager.py +111 -111
- crawlo/logging/monitor.py +153 -0
- crawlo/logging/sampler.py +167 -0
- crawlo/middleware/__init__.py +21 -21
- crawlo/middleware/default_header.py +132 -132
- crawlo/middleware/download_delay.py +104 -104
- crawlo/middleware/middleware_manager.py +135 -135
- crawlo/middleware/offsite.py +123 -123
- crawlo/middleware/proxy.py +386 -386
- crawlo/middleware/request_ignore.py +86 -86
- crawlo/middleware/response_code.py +150 -150
- crawlo/middleware/response_filter.py +136 -136
- crawlo/middleware/retry.py +124 -124
- crawlo/middleware/simple_proxy.py +65 -65
- crawlo/mode_manager.py +219 -219
- crawlo/network/__init__.py +21 -21
- crawlo/network/request.py +379 -379
- crawlo/network/response.py +359 -359
- crawlo/pipelines/__init__.py +21 -21
- crawlo/pipelines/bloom_dedup_pipeline.py +146 -146
- crawlo/pipelines/console_pipeline.py +39 -39
- crawlo/pipelines/csv_pipeline.py +316 -316
- crawlo/pipelines/database_dedup_pipeline.py +197 -197
- crawlo/pipelines/json_pipeline.py +218 -218
- crawlo/pipelines/memory_dedup_pipeline.py +105 -105
- crawlo/pipelines/mongo_pipeline.py +131 -131
- crawlo/pipelines/mysql_pipeline.py +325 -325
- crawlo/pipelines/pipeline_manager.py +100 -84
- crawlo/pipelines/redis_dedup_pipeline.py +156 -156
- crawlo/project.py +349 -338
- crawlo/queue/pqueue.py +42 -42
- crawlo/queue/queue_manager.py +526 -522
- crawlo/queue/redis_priority_queue.py +370 -367
- crawlo/settings/__init__.py +7 -7
- crawlo/settings/default_settings.py +284 -284
- crawlo/settings/setting_manager.py +219 -219
- crawlo/spider/__init__.py +657 -657
- crawlo/stats_collector.py +73 -73
- crawlo/subscriber.py +129 -129
- crawlo/task_manager.py +138 -138
- crawlo/templates/crawlo.cfg.tmpl +10 -10
- crawlo/templates/project/__init__.py.tmpl +3 -3
- crawlo/templates/project/items.py.tmpl +17 -17
- crawlo/templates/project/middlewares.py.tmpl +118 -118
- crawlo/templates/project/pipelines.py.tmpl +96 -96
- crawlo/templates/project/settings.py.tmpl +170 -170
- crawlo/templates/project/settings_distributed.py.tmpl +169 -169
- crawlo/templates/project/settings_gentle.py.tmpl +166 -166
- crawlo/templates/project/settings_high_performance.py.tmpl +167 -167
- crawlo/templates/project/settings_minimal.py.tmpl +65 -65
- crawlo/templates/project/settings_simple.py.tmpl +164 -164
- crawlo/templates/project/spiders/__init__.py.tmpl +9 -9
- crawlo/templates/run.py.tmpl +34 -34
- crawlo/templates/spider/spider.py.tmpl +143 -143
- crawlo/templates/spiders_init.py.tmpl +9 -9
- crawlo/tools/__init__.py +200 -200
- crawlo/tools/anti_crawler.py +268 -268
- crawlo/tools/authenticated_proxy.py +240 -240
- crawlo/tools/data_formatter.py +225 -225
- crawlo/tools/data_validator.py +180 -180
- crawlo/tools/date_tools.py +289 -289
- crawlo/tools/distributed_coordinator.py +384 -384
- crawlo/tools/encoding_converter.py +127 -127
- crawlo/tools/network_diagnostic.py +364 -364
- crawlo/tools/request_tools.py +82 -82
- crawlo/tools/retry_mechanism.py +224 -224
- crawlo/tools/scenario_adapter.py +262 -262
- crawlo/tools/text_cleaner.py +232 -232
- crawlo/utils/__init__.py +34 -34
- crawlo/utils/batch_processor.py +259 -259
- crawlo/utils/class_loader.py +25 -25
- crawlo/utils/controlled_spider_mixin.py +439 -439
- crawlo/utils/db_helper.py +343 -343
- crawlo/utils/enhanced_error_handler.py +356 -356
- crawlo/utils/env_config.py +142 -142
- crawlo/utils/error_handler.py +165 -165
- crawlo/utils/fingerprint.py +122 -122
- crawlo/utils/func_tools.py +82 -82
- crawlo/utils/large_scale_config.py +286 -286
- crawlo/utils/large_scale_helper.py +344 -344
- crawlo/utils/log.py +79 -79
- crawlo/utils/performance_monitor.py +285 -285
- crawlo/utils/queue_helper.py +175 -175
- crawlo/utils/redis_connection_pool.py +388 -388
- crawlo/utils/redis_key_validator.py +198 -198
- crawlo/utils/request.py +267 -267
- crawlo/utils/request_serializer.py +225 -225
- crawlo/utils/spider_loader.py +61 -61
- crawlo/utils/system.py +11 -11
- crawlo/utils/tools.py +4 -4
- crawlo/utils/url.py +39 -39
- crawlo-1.4.3.dist-info/METADATA +190 -0
- crawlo-1.4.3.dist-info/RECORD +326 -0
- examples/__init__.py +7 -7
- examples/test_project/__init__.py +7 -7
- examples/test_project/run.py +34 -34
- examples/test_project/test_project/__init__.py +3 -3
- examples/test_project/test_project/items.py +17 -17
- examples/test_project/test_project/middlewares.py +118 -118
- examples/test_project/test_project/pipelines.py +96 -96
- examples/test_project/test_project/settings.py +169 -169
- examples/test_project/test_project/spiders/__init__.py +9 -9
- examples/test_project/test_project/spiders/of_week_dis.py +143 -143
- tests/__init__.py +7 -7
- tests/advanced_tools_example.py +275 -275
- tests/authenticated_proxy_example.py +106 -106
- tests/baidu_performance_test.py +108 -108
- tests/baidu_test.py +59 -59
- tests/cleaners_example.py +160 -160
- tests/comprehensive_framework_test.py +212 -212
- tests/comprehensive_test.py +81 -81
- tests/comprehensive_testing_summary.md +186 -186
- tests/config_validation_demo.py +142 -142
- tests/controlled_spider_example.py +205 -205
- tests/date_tools_example.py +180 -180
- tests/debug_configure.py +69 -69
- tests/debug_framework_logger.py +84 -84
- tests/debug_log_config.py +126 -126
- tests/debug_log_levels.py +63 -63
- tests/debug_pipelines.py +66 -66
- tests/detailed_log_test.py +233 -233
- tests/distributed_test.py +66 -66
- tests/distributed_test_debug.py +76 -76
- tests/dynamic_loading_example.py +523 -523
- tests/dynamic_loading_test.py +104 -104
- tests/env_config_example.py +133 -133
- tests/error_handling_example.py +171 -171
- tests/final_comprehensive_test.py +151 -151
- tests/final_log_test.py +260 -260
- tests/final_validation_test.py +182 -182
- tests/fix_log_test.py +142 -142
- tests/framework_performance_test.py +202 -202
- tests/log_buffering_test.py +111 -111
- tests/log_generation_timing_test.py +153 -153
- tests/optimized_performance_test.py +211 -211
- tests/performance_comparison.py +245 -245
- tests/queue_blocking_test.py +113 -113
- tests/queue_test.py +89 -89
- tests/redis_key_validation_demo.py +130 -130
- tests/request_params_example.py +150 -150
- tests/response_improvements_example.py +144 -144
- tests/scrapy_comparison/ofweek_scrapy.py +138 -138
- tests/scrapy_comparison/scrapy_test.py +133 -133
- tests/simple_command_test.py +119 -119
- tests/simple_crawlo_test.py +127 -127
- tests/simple_log_test.py +57 -57
- tests/simple_log_test2.py +137 -137
- tests/simple_optimization_test.py +128 -128
- tests/simple_queue_type_test.py +41 -41
- tests/simple_spider_test.py +49 -49
- tests/simple_test.py +47 -47
- tests/spider_log_timing_test.py +177 -177
- tests/test_advanced_tools.py +148 -148
- tests/test_all_commands.py +230 -230
- tests/test_all_pipeline_fingerprints.py +133 -133
- tests/test_all_redis_key_configs.py +145 -145
- tests/test_authenticated_proxy.py +141 -141
- tests/test_batch_processor.py +178 -178
- tests/test_cleaners.py +54 -54
- tests/test_component_factory.py +174 -174
- tests/test_comprehensive.py +146 -146
- tests/test_config_consistency.py +80 -80
- tests/test_config_merge.py +152 -152
- tests/test_config_validator.py +182 -182
- tests/test_controlled_spider_mixin.py +79 -79
- tests/test_crawlo_proxy_integration.py +108 -108
- tests/test_date_tools.py +123 -123
- tests/test_dedup_fix.py +220 -220
- tests/test_dedup_pipeline_consistency.py +125 -0
- tests/test_default_header_middleware.py +313 -313
- tests/test_distributed.py +65 -65
- tests/test_double_crawlo_fix.py +204 -204
- tests/test_double_crawlo_fix_simple.py +124 -124
- tests/test_download_delay_middleware.py +221 -221
- tests/test_downloader_proxy_compatibility.py +268 -268
- tests/test_dynamic_downloaders_proxy.py +124 -124
- tests/test_dynamic_proxy.py +92 -92
- tests/test_dynamic_proxy_config.py +146 -146
- tests/test_dynamic_proxy_real.py +109 -109
- tests/test_edge_cases.py +303 -303
- tests/test_enhanced_error_handler.py +270 -270
- tests/test_enhanced_error_handler_comprehensive.py +245 -245
- tests/test_env_config.py +121 -121
- tests/test_error_handler_compatibility.py +112 -112
- tests/test_factories.py +252 -252
- tests/test_final_validation.py +153 -153
- tests/test_fingerprint_consistency.py +135 -135
- tests/test_fingerprint_simple.py +51 -51
- tests/test_framework_env_usage.py +103 -103
- tests/test_framework_logger.py +66 -66
- tests/test_framework_startup.py +64 -64
- tests/test_get_component_logger.py +83 -83
- tests/test_hash_performance.py +99 -99
- tests/test_integration.py +169 -169
- tests/test_item_dedup_redis_key.py +122 -122
- tests/test_large_scale_config.py +112 -112
- tests/test_large_scale_helper.py +235 -235
- tests/test_logging_enhancements.py +375 -0
- tests/test_logging_final.py +185 -0
- tests/test_logging_integration.py +313 -0
- tests/test_logging_system.py +282 -282
- tests/test_middleware_debug.py +142 -0
- tests/test_mode_change.py +72 -72
- tests/test_mode_consistency.py +51 -51
- tests/test_offsite_middleware.py +244 -244
- tests/test_offsite_middleware_simple.py +203 -203
- tests/test_parsel.py +29 -29
- tests/test_performance.py +327 -327
- tests/test_performance_monitor.py +115 -115
- tests/test_pipeline_fingerprint_consistency.py +86 -86
- tests/test_priority_behavior.py +212 -0
- tests/test_priority_consistency.py +152 -0
- tests/test_priority_consistency_fixed.py +250 -0
- tests/test_proxy_api.py +264 -264
- tests/test_proxy_health_check.py +32 -32
- tests/test_proxy_middleware.py +121 -121
- tests/test_proxy_middleware_enhanced.py +216 -216
- tests/test_proxy_middleware_integration.py +136 -136
- tests/test_proxy_middleware_refactored.py +184 -184
- tests/test_proxy_providers.py +56 -56
- tests/test_proxy_stats.py +19 -19
- tests/test_proxy_strategies.py +59 -59
- tests/test_queue_empty_check.py +41 -41
- tests/test_queue_manager_double_crawlo.py +173 -173
- tests/test_queue_manager_redis_key.py +179 -179
- tests/test_queue_naming.py +154 -154
- tests/test_queue_type.py +106 -106
- tests/test_queue_type_redis_config_consistency.py +131 -0
- tests/test_random_headers_default.py +323 -0
- tests/test_random_headers_necessity.py +309 -0
- tests/test_random_user_agent.py +72 -72
- tests/test_real_scenario_proxy.py +195 -195
- tests/test_redis_config.py +28 -28
- tests/test_redis_connection_pool.py +294 -294
- tests/test_redis_key_naming.py +181 -181
- tests/test_redis_key_validator.py +123 -123
- tests/test_redis_queue.py +224 -224
- tests/test_redis_queue_name_fix.py +175 -175
- tests/test_redis_queue_type_fallback.py +130 -0
- tests/test_request_ignore_middleware.py +182 -182
- tests/test_request_params.py +111 -111
- tests/test_request_serialization.py +70 -70
- tests/test_response_code_middleware.py +349 -349
- tests/test_response_filter_middleware.py +427 -427
- tests/test_response_improvements.py +152 -152
- tests/test_retry_middleware.py +334 -242
- tests/test_retry_middleware_realistic.py +274 -0
- tests/test_scheduler.py +252 -252
- tests/test_scheduler_config_update.py +133 -133
- tests/test_simple_response.py +61 -61
- tests/test_telecom_spider_redis_key.py +205 -205
- tests/test_template_content.py +87 -87
- tests/test_template_redis_key.py +134 -134
- tests/test_tools.py +159 -159
- tests/test_user_agent_randomness.py +177 -0
- tests/test_user_agents.py +96 -96
- tests/tools_example.py +260 -260
- tests/untested_features_report.md +138 -138
- tests/verify_debug.py +51 -51
- tests/verify_distributed.py +117 -117
- tests/verify_log_fix.py +111 -111
- crawlo-1.4.1.dist-info/METADATA +0 -1199
- crawlo-1.4.1.dist-info/RECORD +0 -309
- {crawlo-1.4.1.dist-info → crawlo-1.4.3.dist-info}/WHEEL +0 -0
- {crawlo-1.4.1.dist-info → crawlo-1.4.3.dist-info}/entry_points.txt +0 -0
- {crawlo-1.4.1.dist-info → crawlo-1.4.3.dist-info}/top_level.txt +0 -0
crawlo/settings/__init__.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
|
-
#!/usr/bin/python
|
|
2
|
-
# -*- coding:UTF-8 -*-
|
|
3
|
-
"""
|
|
4
|
-
# @Time : 2025-05-11 11:08
|
|
5
|
-
# @Author : oscar
|
|
6
|
-
# @Desc : None
|
|
7
|
-
"""
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding:UTF-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
# @Time : 2025-05-11 11:08
|
|
5
|
+
# @Author : oscar
|
|
6
|
+
# @Desc : None
|
|
7
|
+
"""
|
|
@@ -1,285 +1,285 @@
|
|
|
1
|
-
# -*- coding:UTF-8 -*-
|
|
2
|
-
"""
|
|
3
|
-
默认配置文件
|
|
4
|
-
包含 Crawlo 框架的所有默认设置项
|
|
5
|
-
"""
|
|
6
|
-
# 添加环境变量配置工具导入
|
|
7
|
-
from crawlo.utils.env_config import get_redis_config, get_runtime_config, get_version
|
|
8
|
-
|
|
9
|
-
# ===========================================================================
|
|
10
|
-
# 1. 框架基础配置
|
|
11
|
-
# ===========================================================================
|
|
12
|
-
|
|
13
|
-
# 框架初始化控制
|
|
14
|
-
FRAMEWORK_INIT_ORDER = [
|
|
15
|
-
'log_system', # 日志系统
|
|
16
|
-
'settings_system', # 配置系统
|
|
17
|
-
'core_components', # 核心组件
|
|
18
|
-
'extensions', # 扩展组件
|
|
19
|
-
'full_initialization' # 完全初始化
|
|
20
|
-
]
|
|
21
|
-
FRAMEWORK_INIT_STATE = 'uninitialized'
|
|
22
|
-
|
|
23
|
-
# 项目基础配置
|
|
24
|
-
runtime_config = get_runtime_config()
|
|
25
|
-
PROJECT_NAME = runtime_config['PROJECT_NAME'] # 项目名称(用于日志、Redis Key 等标识)
|
|
26
|
-
VERSION = get_version() # 项目版本号 - 从框架的__version__.py文件中读取,如果不存在则使用默认值
|
|
27
|
-
RUN_MODE = runtime_config['CRAWLO_MODE'] # 运行模式:standalone/distributed/auto
|
|
28
|
-
CONCURRENCY = runtime_config['CONCURRENCY'] # 并发数配置
|
|
29
|
-
|
|
30
|
-
# ===========================================================================
|
|
31
|
-
# 2. 爬虫核心配置
|
|
32
|
-
# ===========================================================================
|
|
33
|
-
|
|
34
|
-
# 下载器配置
|
|
35
|
-
DOWNLOADER = "crawlo.downloader.httpx_downloader.HttpXDownloader" # 默认下载器
|
|
36
|
-
DOWNLOAD_DELAY = 0.5 # 请求延迟(秒)
|
|
37
|
-
RANDOMNESS = True # 是否启用随机延迟
|
|
38
|
-
RANDOM_RANGE = [0.5, 1.5] # 随机延迟范围因子,实际延迟 = DOWNLOAD_DELAY * RANDOM_RANGE[0] 到 DOWNLOAD_DELAY * RANDOM_RANGE[1]
|
|
39
|
-
|
|
40
|
-
# 调度器配置
|
|
41
|
-
DEPTH_PRIORITY = 1 # 深度优先级(负数表示深度优先,正数表示广度优先)
|
|
42
|
-
SCHEDULER_MAX_QUEUE_SIZE = 5000 # 调度器队列最大大小
|
|
43
|
-
BACKPRESSURE_RATIO = 0.9 # 背压触发阈值(队列大小达到最大容量的90%时触发背压控制)
|
|
44
|
-
|
|
45
|
-
# 请求生成控制
|
|
46
|
-
REQUEST_GENERATION_BATCH_SIZE = 10 # 请求生成批处理大小
|
|
47
|
-
REQUEST_GENERATION_INTERVAL = 0.01 # 请求生成间隔(秒)
|
|
48
|
-
ENABLE_CONTROLLED_REQUEST_GENERATION = False # 是否启用受控请求生成
|
|
49
|
-
|
|
50
|
-
# 队列配置
|
|
51
|
-
QUEUE_TYPE = 'auto' # 队列类型:memory/redis/auto
|
|
52
|
-
# SCHEDULER_QUEUE_NAME = f"crawlo:{PROJECT_NAME}:queue:requests" # 调度器队列名称(遵循统一命名规范)
|
|
53
|
-
QUEUE_MAX_RETRIES = 3 # 队列操作最大重试次数
|
|
54
|
-
QUEUE_TIMEOUT = 300 # 队列操作超时时间(秒)
|
|
55
|
-
|
|
56
|
-
# ===========================================================================
|
|
57
|
-
# 3. 数据库和过滤器配置
|
|
58
|
-
# ===========================================================================
|
|
59
|
-
|
|
60
|
-
# MySQL配置
|
|
61
|
-
MYSQL_HOST = '127.0.0.1'
|
|
62
|
-
MYSQL_PORT = 3306
|
|
63
|
-
MYSQL_USER = 'root'
|
|
64
|
-
MYSQL_PASSWORD = '123456'
|
|
65
|
-
MYSQL_DB = 'crawl_pro'
|
|
66
|
-
MYSQL_TABLE = 'crawlo'
|
|
67
|
-
MYSQL_BATCH_SIZE = 100
|
|
68
|
-
MYSQL_USE_BATCH = False # 是否启用批量插入
|
|
69
|
-
|
|
70
|
-
# Redis配置
|
|
71
|
-
redis_config = get_redis_config()
|
|
72
|
-
REDIS_HOST = redis_config['REDIS_HOST']
|
|
73
|
-
REDIS_PORT = redis_config['REDIS_PORT']
|
|
74
|
-
REDIS_PASSWORD = redis_config['REDIS_PASSWORD']
|
|
75
|
-
REDIS_DB = redis_config['REDIS_DB']
|
|
76
|
-
|
|
77
|
-
# 根据是否有密码生成不同的 URL 格式
|
|
78
|
-
if REDIS_PASSWORD:
|
|
79
|
-
REDIS_URL = f'redis://:{REDIS_PASSWORD}@{REDIS_HOST}:{REDIS_PORT}/{REDIS_DB}'
|
|
80
|
-
else:
|
|
81
|
-
REDIS_URL = f'redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_DB}'
|
|
82
|
-
|
|
83
|
-
# Redis key命名规范已封装到框架内部组件中,用户无需手动配置:
|
|
84
|
-
# - 请求去重: crawlo:{PROJECT_NAME}:filter:fingerprint
|
|
85
|
-
# - 数据项去重: crawlo:{PROJECT_NAME}:item:fingerprint
|
|
86
|
-
# - 请求队列: crawlo:{PROJECT_NAME}:queue:requests
|
|
87
|
-
# - 处理中队列: crawlo:{PROJECT_NAME}:queue:processing
|
|
88
|
-
# - 失败队列: crawlo:{PROJECT_NAME}:queue:failed
|
|
89
|
-
|
|
90
|
-
REDIS_TTL = 0 # 指纹过期时间(0 表示永不过期)
|
|
91
|
-
CLEANUP_FP = 0 # 程序结束时是否清理指纹(0=不清理)
|
|
92
|
-
FILTER_DEBUG = True # 是否开启去重调试日志
|
|
93
|
-
DECODE_RESPONSES = True # Redis 返回是否解码为字符串
|
|
94
|
-
|
|
95
|
-
# 过滤器配置
|
|
96
|
-
DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.memory_dedup_pipeline.MemoryDedupPipeline' # 默认使用内存过滤器和去重管道
|
|
97
|
-
FILTER_CLASS = 'crawlo.filters.memory_filter.MemoryFilter'
|
|
98
|
-
|
|
99
|
-
# Bloom过滤器配置
|
|
100
|
-
BLOOM_FILTER_CAPACITY = 1000000 # Bloom过滤器容量
|
|
101
|
-
BLOOM_FILTER_ERROR_RATE = 0.001 # Bloom过滤器错误率
|
|
102
|
-
|
|
103
|
-
# ===========================================================================
|
|
104
|
-
# 4. 中间件配置
|
|
105
|
-
# ===========================================================================
|
|
106
|
-
|
|
107
|
-
# 框架中间件列表(框架默认中间件 + 用户自定义中间件)
|
|
108
|
-
MIDDLEWARES = [
|
|
109
|
-
# === 请求预处理阶段 ===
|
|
110
|
-
'crawlo.middleware.request_ignore.RequestIgnoreMiddleware', # 1. 忽略无效请求
|
|
111
|
-
'crawlo.middleware.download_delay.DownloadDelayMiddleware', # 2. 控制请求频率
|
|
112
|
-
'crawlo.middleware.default_header.DefaultHeaderMiddleware', # 3. 添加默认请求头
|
|
113
|
-
'crawlo.middleware.offsite.OffsiteMiddleware', # 5. 站外请求过滤
|
|
114
|
-
|
|
115
|
-
# === 响应处理阶段 ===
|
|
116
|
-
'crawlo.middleware.retry.RetryMiddleware', # 6. 失败请求重试
|
|
117
|
-
'crawlo.middleware.response_code.ResponseCodeMiddleware', # 7. 处理特殊状态码
|
|
118
|
-
'crawlo.middleware.response_filter.ResponseFilterMiddleware', # 8. 响应内容过滤
|
|
119
|
-
]
|
|
120
|
-
|
|
121
|
-
# ===========================================================================
|
|
122
|
-
# 5. 管道配置
|
|
123
|
-
# ===========================================================================
|
|
124
|
-
|
|
125
|
-
# 框架数据处理管道列表(框架默认管道 + 用户自定义管道)
|
|
126
|
-
PIPELINES = [
|
|
127
|
-
'crawlo.pipelines.console_pipeline.ConsolePipeline',
|
|
128
|
-
]
|
|
129
|
-
|
|
130
|
-
# ===========================================================================
|
|
131
|
-
# 6. 扩展配置
|
|
132
|
-
# ===========================================================================
|
|
133
|
-
|
|
134
|
-
# 框架扩展组件列表(框架默认扩展 + 用户自定义扩展)
|
|
135
|
-
EXTENSIONS = [
|
|
136
|
-
'crawlo.extension.log_interval.LogIntervalExtension', # 定时日志
|
|
137
|
-
'crawlo.extension.log_stats.LogStats', # 统计信息
|
|
138
|
-
'crawlo.extension.logging_extension.CustomLoggerExtension', # 自定义日志
|
|
139
|
-
]
|
|
140
|
-
|
|
141
|
-
# ===========================================================================
|
|
142
|
-
# 7. 日志与监控配置
|
|
143
|
-
# ===========================================================================
|
|
144
|
-
|
|
145
|
-
# 日志配置
|
|
146
|
-
LOG_LEVEL = None # 日志级别: DEBUG/INFO/WARNING/ERROR,默认为None,由用户在项目settings中设置
|
|
147
|
-
STATS_DUMP = True # 是否周期性输出统计信息
|
|
148
|
-
LOG_FILE = None # 日志文件路径,将在项目配置中设置
|
|
149
|
-
LOG_FORMAT = '%(asctime)s - [%(name)s] - %(levelname)s: %(message)s'
|
|
150
|
-
LOG_ENCODING = 'utf-8'
|
|
151
|
-
|
|
152
|
-
# 日志间隔配置
|
|
153
|
-
INTERVAL = 60 # 日志输出间隔(秒)
|
|
154
|
-
|
|
155
|
-
# 自定义日志配置
|
|
156
|
-
LOG_ENABLE_CUSTOM = False # 是否启用自定义日志
|
|
157
|
-
|
|
158
|
-
# 内存监控配置
|
|
159
|
-
MEMORY_MONITOR_ENABLED = False # 是否启用内存监控
|
|
160
|
-
MEMORY_MONITOR_INTERVAL = 60 # 内存监控检查间隔(秒)
|
|
161
|
-
MEMORY_WARNING_THRESHOLD = 80.0 # 内存使用率警告阈值(百分比)
|
|
162
|
-
MEMORY_CRITICAL_THRESHOLD = 90.0 # 内存使用率严重阈值(百分比)
|
|
163
|
-
|
|
164
|
-
# 性能分析配置
|
|
165
|
-
PERFORMANCE_PROFILER_ENABLED = False # 是否启用性能分析
|
|
166
|
-
PERFORMANCE_PROFILER_OUTPUT_DIR = 'profiling' # 性能分析输出目录
|
|
167
|
-
PERFORMANCE_PROFILER_INTERVAL = 300 # 性能分析间隔(秒)
|
|
168
|
-
|
|
169
|
-
# 健康检查配置
|
|
170
|
-
HEALTH_CHECK_ENABLED = True # 是否启用健康检查
|
|
171
|
-
|
|
172
|
-
# ===========================================================================
|
|
173
|
-
# 8. 网络请求配置
|
|
174
|
-
# ===========================================================================
|
|
175
|
-
|
|
176
|
-
# 默认请求头配置
|
|
177
|
-
DEFAULT_REQUEST_HEADERS = {
|
|
178
|
-
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
|
|
179
|
-
'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',
|
|
180
|
-
'Accept-Encoding': 'gzip, deflate, br',
|
|
181
|
-
} # 默认请求头
|
|
182
|
-
|
|
183
|
-
# 默认User-Agent(使用现代浏览器的User-Agent)
|
|
184
|
-
USER_AGENT = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36"
|
|
185
|
-
|
|
186
|
-
# 是否启用随机User-Agent功能(默认禁用,用户可根据需要启用)
|
|
187
|
-
RANDOM_USER_AGENT_ENABLED = False # 是否启用随机用户代理
|
|
188
|
-
|
|
189
|
-
# 站外过滤配置
|
|
190
|
-
ALLOWED_DOMAINS = [] # 允许的域名列表
|
|
191
|
-
|
|
192
|
-
# 代理配置
|
|
193
|
-
PROXY_ENABLED = False # 是否启用代理
|
|
194
|
-
PROXY_LIST = [] # 简化版代理配置(适用于SimpleProxyMiddleware)
|
|
195
|
-
PROXY_API_URL = "" # 高级代理配置(适用于ProxyMiddleware)
|
|
196
|
-
PROXY_EXTRACTOR = "proxy" # 代理提取方式
|
|
197
|
-
PROXY_REFRESH_INTERVAL = 60 # 代理刷新间隔(秒)
|
|
198
|
-
PROXY_API_TIMEOUT = 10 # 请求代理 API 超时时间
|
|
199
|
-
PROXY_POOL_SIZE = 5 # 代理池大小
|
|
200
|
-
PROXY_HEALTH_CHECK_THRESHOLD = 0.5 # 代理健康检查阈值
|
|
201
|
-
|
|
202
|
-
# 下载器通用配置
|
|
203
|
-
DOWNLOAD_TIMEOUT = 30 # 下载超时时间(秒)
|
|
204
|
-
VERIFY_SSL = True # 是否验证SSL证书
|
|
205
|
-
CONNECTION_POOL_LIMIT = 100 # 连接池大小限制
|
|
206
|
-
CONNECTION_POOL_LIMIT_PER_HOST = 20 # 每个主机的连接池大小限制
|
|
207
|
-
DOWNLOAD_MAXSIZE = 10 * 1024 * 1024 # 最大下载大小(字节)
|
|
208
|
-
DOWNLOAD_STATS = True # 是否启用下载统计
|
|
209
|
-
DOWNLOAD_WARN_SIZE = 1024 * 1024 # 下载警告大小(字节)
|
|
210
|
-
DOWNLOAD_RETRY_TIMES = 3 # 下载重试次数
|
|
211
|
-
MAX_RETRY_TIMES = 3 # 最大重试次数
|
|
212
|
-
|
|
213
|
-
# 下载器健康检查
|
|
214
|
-
DOWNLOADER_HEALTH_CHECK = True # 是否启用下载器健康检查
|
|
215
|
-
HEALTH_CHECK_INTERVAL = 60 # 健康检查间隔(秒)
|
|
216
|
-
REQUEST_STATS_ENABLED = True # 是否启用请求统计
|
|
217
|
-
STATS_RESET_ON_START = False # 启动时是否重置统计
|
|
218
|
-
|
|
219
|
-
# HttpX 下载器专用配置
|
|
220
|
-
HTTPX_HTTP2 = True # 是否启用HTTP/2支持
|
|
221
|
-
HTTPX_FOLLOW_REDIRECTS = True # 是否自动跟随重定向
|
|
222
|
-
|
|
223
|
-
# AioHttp 下载器专用配置
|
|
224
|
-
AIOHTTP_AUTO_DECOMPRESS = True # 是否自动解压响应
|
|
225
|
-
AIOHTTP_FORCE_CLOSE = False # 是否强制关闭连接
|
|
226
|
-
|
|
227
|
-
# Curl-Cffi 特有配置
|
|
228
|
-
CURL_BROWSER_TYPE = "chrome" # 浏览器指纹模拟(仅 CurlCffi 下载器有效)
|
|
229
|
-
CURL_BROWSER_VERSION_MAP = { # 自定义浏览器版本映射(可覆盖默认行为)
|
|
230
|
-
"chrome": "chrome136",
|
|
231
|
-
"edge": "edge101",
|
|
232
|
-
"safari": "safari184",
|
|
233
|
-
"firefox": "firefox135",
|
|
234
|
-
}
|
|
235
|
-
|
|
236
|
-
# Selenium 下载器配置
|
|
237
|
-
SELENIUM_BROWSER_TYPE = "chrome" # 浏览器类型: chrome, firefox, edge
|
|
238
|
-
SELENIUM_HEADLESS = True # 是否无头模式
|
|
239
|
-
SELENIUM_TIMEOUT = 30 # 超时时间(秒)
|
|
240
|
-
SELENIUM_LOAD_TIMEOUT = 10 # 页面加载超时时间(秒)
|
|
241
|
-
SELENIUM_WINDOW_WIDTH = 1920 # 窗口宽度
|
|
242
|
-
SELENIUM_WINDOW_HEIGHT = 1080 # 窗口高度
|
|
243
|
-
SELENIUM_WAIT_FOR_ELEMENT = None # 等待特定元素选择器
|
|
244
|
-
SELENIUM_ENABLE_JS = True # 是否启用JavaScript
|
|
245
|
-
SELENIUM_PROXY = None # 代理设置
|
|
246
|
-
SELENIUM_SINGLE_BROWSER_MODE = True # 单浏览器多标签页模式
|
|
247
|
-
SELENIUM_MAX_TABS_PER_BROWSER = 10 # 单浏览器最大标签页数量
|
|
248
|
-
|
|
249
|
-
# Playwright 下载器配置
|
|
250
|
-
PLAYWRIGHT_BROWSER_TYPE = "chromium" # 浏览器类型: chromium, firefox, webkit
|
|
251
|
-
PLAYWRIGHT_HEADLESS = True # 是否无头模式
|
|
252
|
-
PLAYWRIGHT_TIMEOUT = 30000 # 超时时间(毫秒)
|
|
253
|
-
PLAYWRIGHT_LOAD_TIMEOUT = 10000 # 页面加载超时时间(毫秒)
|
|
254
|
-
PLAYWRIGHT_VIEWPORT_WIDTH = 1920 # 视口宽度
|
|
255
|
-
PLAYWRIGHT_VIEWPORT_HEIGHT = 1080 # 视口高度
|
|
256
|
-
PLAYWRIGHT_WAIT_FOR_ELEMENT = None # 等待特定元素选择器
|
|
257
|
-
PLAYWRIGHT_PROXY = None # 代理设置
|
|
258
|
-
PLAYWRIGHT_SINGLE_BROWSER_MODE = True # 单浏览器多标签页模式
|
|
259
|
-
PLAYWRIGHT_MAX_PAGES_PER_BROWSER = 10 # 单浏览器最大页面数量
|
|
260
|
-
|
|
261
|
-
# 通用优化配置
|
|
262
|
-
CONNECTION_TTL_DNS_CACHE = 300 # DNS缓存TTL(秒)
|
|
263
|
-
CONNECTION_KEEPALIVE_TIMEOUT = 15 # Keep-Alive超时(秒)
|
|
264
|
-
|
|
265
|
-
# ===========================================================================
|
|
266
|
-
# 9. 数据存储配置
|
|
267
|
-
# ===========================================================================
|
|
268
|
-
|
|
269
|
-
# CSV管道配置
|
|
270
|
-
CSV_DELIMITER = ',' # CSV分隔符
|
|
271
|
-
CSV_QUOTECHAR = '"' # CSV引号字符
|
|
272
|
-
CSV_INCLUDE_HEADERS = True # 是否包含表头
|
|
273
|
-
CSV_EXTRASACTION = 'ignore' # 额外字段处理方式:ignore, raise
|
|
274
|
-
CSV_FIELDNAMES = None # 字段名列表
|
|
275
|
-
CSV_FILE = None # CSV文件路径
|
|
276
|
-
CSV_DICT_FILE = None # CSV字典文件路径
|
|
277
|
-
CSV_BATCH_SIZE = 100 # CSV批处理大小
|
|
278
|
-
CSV_BATCH_FILE = None # CSV批处理文件路径
|
|
279
|
-
|
|
280
|
-
# 数据库去重管道配置
|
|
281
|
-
DB_HOST = 'localhost' # 数据库主机
|
|
282
|
-
DB_PORT = 3306 # 数据库端口
|
|
283
|
-
DB_USER = 'root' # 数据库用户
|
|
284
|
-
DB_PASSWORD = '' # 数据库密码
|
|
1
|
+
# -*- coding:UTF-8 -*-
|
|
2
|
+
"""
|
|
3
|
+
默认配置文件
|
|
4
|
+
包含 Crawlo 框架的所有默认设置项
|
|
5
|
+
"""
|
|
6
|
+
# 添加环境变量配置工具导入
|
|
7
|
+
from crawlo.utils.env_config import get_redis_config, get_runtime_config, get_version
|
|
8
|
+
|
|
9
|
+
# ===========================================================================
|
|
10
|
+
# 1. 框架基础配置
|
|
11
|
+
# ===========================================================================
|
|
12
|
+
|
|
13
|
+
# 框架初始化控制
|
|
14
|
+
FRAMEWORK_INIT_ORDER = [
|
|
15
|
+
'log_system', # 日志系统
|
|
16
|
+
'settings_system', # 配置系统
|
|
17
|
+
'core_components', # 核心组件
|
|
18
|
+
'extensions', # 扩展组件
|
|
19
|
+
'full_initialization' # 完全初始化
|
|
20
|
+
]
|
|
21
|
+
FRAMEWORK_INIT_STATE = 'uninitialized'
|
|
22
|
+
|
|
23
|
+
# 项目基础配置
|
|
24
|
+
runtime_config = get_runtime_config()
|
|
25
|
+
PROJECT_NAME = runtime_config['PROJECT_NAME'] # 项目名称(用于日志、Redis Key 等标识)
|
|
26
|
+
VERSION = get_version() # 项目版本号 - 从框架的__version__.py文件中读取,如果不存在则使用默认值
|
|
27
|
+
RUN_MODE = runtime_config['CRAWLO_MODE'] # 运行模式:standalone/distributed/auto
|
|
28
|
+
CONCURRENCY = runtime_config['CONCURRENCY'] # 并发数配置
|
|
29
|
+
|
|
30
|
+
# ===========================================================================
|
|
31
|
+
# 2. 爬虫核心配置
|
|
32
|
+
# ===========================================================================
|
|
33
|
+
|
|
34
|
+
# 下载器配置
|
|
35
|
+
DOWNLOADER = "crawlo.downloader.httpx_downloader.HttpXDownloader" # 默认下载器
|
|
36
|
+
DOWNLOAD_DELAY = 0.5 # 请求延迟(秒)
|
|
37
|
+
RANDOMNESS = True # 是否启用随机延迟
|
|
38
|
+
RANDOM_RANGE = [0.5, 1.5] # 随机延迟范围因子,实际延迟 = DOWNLOAD_DELAY * RANDOM_RANGE[0] 到 DOWNLOAD_DELAY * RANDOM_RANGE[1]
|
|
39
|
+
|
|
40
|
+
# 调度器配置
|
|
41
|
+
DEPTH_PRIORITY = 1 # 深度优先级(负数表示深度优先,正数表示广度优先)
|
|
42
|
+
SCHEDULER_MAX_QUEUE_SIZE = 5000 # 调度器队列最大大小
|
|
43
|
+
BACKPRESSURE_RATIO = 0.9 # 背压触发阈值(队列大小达到最大容量的90%时触发背压控制)
|
|
44
|
+
|
|
45
|
+
# 请求生成控制
|
|
46
|
+
REQUEST_GENERATION_BATCH_SIZE = 10 # 请求生成批处理大小
|
|
47
|
+
REQUEST_GENERATION_INTERVAL = 0.01 # 请求生成间隔(秒)
|
|
48
|
+
ENABLE_CONTROLLED_REQUEST_GENERATION = False # 是否启用受控请求生成
|
|
49
|
+
|
|
50
|
+
# 队列配置
|
|
51
|
+
QUEUE_TYPE = 'auto' # 队列类型:memory/redis/auto
|
|
52
|
+
# SCHEDULER_QUEUE_NAME = f"crawlo:{PROJECT_NAME}:queue:requests" # 调度器队列名称(遵循统一命名规范)
|
|
53
|
+
QUEUE_MAX_RETRIES = 3 # 队列操作最大重试次数
|
|
54
|
+
QUEUE_TIMEOUT = 300 # 队列操作超时时间(秒)
|
|
55
|
+
|
|
56
|
+
# ===========================================================================
|
|
57
|
+
# 3. 数据库和过滤器配置
|
|
58
|
+
# ===========================================================================
|
|
59
|
+
|
|
60
|
+
# MySQL配置
|
|
61
|
+
MYSQL_HOST = '127.0.0.1'
|
|
62
|
+
MYSQL_PORT = 3306
|
|
63
|
+
MYSQL_USER = 'root'
|
|
64
|
+
MYSQL_PASSWORD = '123456'
|
|
65
|
+
MYSQL_DB = 'crawl_pro'
|
|
66
|
+
MYSQL_TABLE = 'crawlo'
|
|
67
|
+
MYSQL_BATCH_SIZE = 100
|
|
68
|
+
MYSQL_USE_BATCH = False # 是否启用批量插入
|
|
69
|
+
|
|
70
|
+
# Redis配置
|
|
71
|
+
redis_config = get_redis_config()
|
|
72
|
+
REDIS_HOST = redis_config['REDIS_HOST']
|
|
73
|
+
REDIS_PORT = redis_config['REDIS_PORT']
|
|
74
|
+
REDIS_PASSWORD = redis_config['REDIS_PASSWORD']
|
|
75
|
+
REDIS_DB = redis_config['REDIS_DB']
|
|
76
|
+
|
|
77
|
+
# 根据是否有密码生成不同的 URL 格式
|
|
78
|
+
if REDIS_PASSWORD:
|
|
79
|
+
REDIS_URL = f'redis://:{REDIS_PASSWORD}@{REDIS_HOST}:{REDIS_PORT}/{REDIS_DB}'
|
|
80
|
+
else:
|
|
81
|
+
REDIS_URL = f'redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_DB}'
|
|
82
|
+
|
|
83
|
+
# Redis key命名规范已封装到框架内部组件中,用户无需手动配置:
|
|
84
|
+
# - 请求去重: crawlo:{PROJECT_NAME}:filter:fingerprint
|
|
85
|
+
# - 数据项去重: crawlo:{PROJECT_NAME}:item:fingerprint
|
|
86
|
+
# - 请求队列: crawlo:{PROJECT_NAME}:queue:requests
|
|
87
|
+
# - 处理中队列: crawlo:{PROJECT_NAME}:queue:processing
|
|
88
|
+
# - 失败队列: crawlo:{PROJECT_NAME}:queue:failed
|
|
89
|
+
|
|
90
|
+
REDIS_TTL = 0 # 指纹过期时间(0 表示永不过期)
|
|
91
|
+
CLEANUP_FP = 0 # 程序结束时是否清理指纹(0=不清理)
|
|
92
|
+
FILTER_DEBUG = True # 是否开启去重调试日志
|
|
93
|
+
DECODE_RESPONSES = True # Redis 返回是否解码为字符串
|
|
94
|
+
|
|
95
|
+
# 过滤器配置
|
|
96
|
+
DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.memory_dedup_pipeline.MemoryDedupPipeline' # 默认使用内存过滤器和去重管道
|
|
97
|
+
FILTER_CLASS = 'crawlo.filters.memory_filter.MemoryFilter'
|
|
98
|
+
|
|
99
|
+
# Bloom过滤器配置
|
|
100
|
+
BLOOM_FILTER_CAPACITY = 1000000 # Bloom过滤器容量
|
|
101
|
+
BLOOM_FILTER_ERROR_RATE = 0.001 # Bloom过滤器错误率
|
|
102
|
+
|
|
103
|
+
# ===========================================================================
|
|
104
|
+
# 4. 中间件配置
|
|
105
|
+
# ===========================================================================
|
|
106
|
+
|
|
107
|
+
# 框架中间件列表(框架默认中间件 + 用户自定义中间件)
|
|
108
|
+
MIDDLEWARES = [
|
|
109
|
+
# === 请求预处理阶段 ===
|
|
110
|
+
'crawlo.middleware.request_ignore.RequestIgnoreMiddleware', # 1. 忽略无效请求
|
|
111
|
+
'crawlo.middleware.download_delay.DownloadDelayMiddleware', # 2. 控制请求频率
|
|
112
|
+
'crawlo.middleware.default_header.DefaultHeaderMiddleware', # 3. 添加默认请求头
|
|
113
|
+
'crawlo.middleware.offsite.OffsiteMiddleware', # 5. 站外请求过滤
|
|
114
|
+
|
|
115
|
+
# === 响应处理阶段 ===
|
|
116
|
+
'crawlo.middleware.retry.RetryMiddleware', # 6. 失败请求重试
|
|
117
|
+
'crawlo.middleware.response_code.ResponseCodeMiddleware', # 7. 处理特殊状态码
|
|
118
|
+
'crawlo.middleware.response_filter.ResponseFilterMiddleware', # 8. 响应内容过滤
|
|
119
|
+
]
|
|
120
|
+
|
|
121
|
+
# ===========================================================================
|
|
122
|
+
# 5. 管道配置
|
|
123
|
+
# ===========================================================================
|
|
124
|
+
|
|
125
|
+
# 框架数据处理管道列表(框架默认管道 + 用户自定义管道)
|
|
126
|
+
PIPELINES = [
|
|
127
|
+
'crawlo.pipelines.console_pipeline.ConsolePipeline',
|
|
128
|
+
]
|
|
129
|
+
|
|
130
|
+
# ===========================================================================
|
|
131
|
+
# 6. 扩展配置
|
|
132
|
+
# ===========================================================================
|
|
133
|
+
|
|
134
|
+
# 框架扩展组件列表(框架默认扩展 + 用户自定义扩展)
|
|
135
|
+
EXTENSIONS = [
|
|
136
|
+
'crawlo.extension.log_interval.LogIntervalExtension', # 定时日志
|
|
137
|
+
'crawlo.extension.log_stats.LogStats', # 统计信息
|
|
138
|
+
'crawlo.extension.logging_extension.CustomLoggerExtension', # 自定义日志
|
|
139
|
+
]
|
|
140
|
+
|
|
141
|
+
# ===========================================================================
|
|
142
|
+
# 7. 日志与监控配置
|
|
143
|
+
# ===========================================================================
|
|
144
|
+
|
|
145
|
+
# 日志配置
|
|
146
|
+
LOG_LEVEL = None # 日志级别: DEBUG/INFO/WARNING/ERROR,默认为None,由用户在项目settings中设置
|
|
147
|
+
STATS_DUMP = True # 是否周期性输出统计信息
|
|
148
|
+
LOG_FILE = None # 日志文件路径,将在项目配置中设置
|
|
149
|
+
LOG_FORMAT = '%(asctime)s - [%(name)s] - %(levelname)s: %(message)s'
|
|
150
|
+
LOG_ENCODING = 'utf-8'
|
|
151
|
+
|
|
152
|
+
# 日志间隔配置
|
|
153
|
+
INTERVAL = 60 # 日志输出间隔(秒)
|
|
154
|
+
|
|
155
|
+
# 自定义日志配置
|
|
156
|
+
LOG_ENABLE_CUSTOM = False # 是否启用自定义日志
|
|
157
|
+
|
|
158
|
+
# 内存监控配置
|
|
159
|
+
MEMORY_MONITOR_ENABLED = False # 是否启用内存监控
|
|
160
|
+
MEMORY_MONITOR_INTERVAL = 60 # 内存监控检查间隔(秒)
|
|
161
|
+
MEMORY_WARNING_THRESHOLD = 80.0 # 内存使用率警告阈值(百分比)
|
|
162
|
+
MEMORY_CRITICAL_THRESHOLD = 90.0 # 内存使用率严重阈值(百分比)
|
|
163
|
+
|
|
164
|
+
# 性能分析配置
|
|
165
|
+
PERFORMANCE_PROFILER_ENABLED = False # 是否启用性能分析
|
|
166
|
+
PERFORMANCE_PROFILER_OUTPUT_DIR = 'profiling' # 性能分析输出目录
|
|
167
|
+
PERFORMANCE_PROFILER_INTERVAL = 300 # 性能分析间隔(秒)
|
|
168
|
+
|
|
169
|
+
# 健康检查配置
|
|
170
|
+
HEALTH_CHECK_ENABLED = True # 是否启用健康检查
|
|
171
|
+
|
|
172
|
+
# ===========================================================================
|
|
173
|
+
# 8. 网络请求配置
|
|
174
|
+
# ===========================================================================
|
|
175
|
+
|
|
176
|
+
# 默认请求头配置
|
|
177
|
+
DEFAULT_REQUEST_HEADERS = {
|
|
178
|
+
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
|
|
179
|
+
'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',
|
|
180
|
+
'Accept-Encoding': 'gzip, deflate, br',
|
|
181
|
+
} # 默认请求头
|
|
182
|
+
|
|
183
|
+
# 默认User-Agent(使用现代浏览器的User-Agent)
|
|
184
|
+
USER_AGENT = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36"
|
|
185
|
+
|
|
186
|
+
# 是否启用随机User-Agent功能(默认禁用,用户可根据需要启用)
|
|
187
|
+
RANDOM_USER_AGENT_ENABLED = False # 是否启用随机用户代理
|
|
188
|
+
|
|
189
|
+
# 站外过滤配置
|
|
190
|
+
ALLOWED_DOMAINS = [] # 允许的域名列表
|
|
191
|
+
|
|
192
|
+
# 代理配置
|
|
193
|
+
PROXY_ENABLED = False # 是否启用代理
|
|
194
|
+
PROXY_LIST = [] # 简化版代理配置(适用于SimpleProxyMiddleware)
|
|
195
|
+
PROXY_API_URL = "" # 高级代理配置(适用于ProxyMiddleware)
|
|
196
|
+
PROXY_EXTRACTOR = "proxy" # 代理提取方式
|
|
197
|
+
PROXY_REFRESH_INTERVAL = 60 # 代理刷新间隔(秒)
|
|
198
|
+
PROXY_API_TIMEOUT = 10 # 请求代理 API 超时时间
|
|
199
|
+
PROXY_POOL_SIZE = 5 # 代理池大小
|
|
200
|
+
PROXY_HEALTH_CHECK_THRESHOLD = 0.5 # 代理健康检查阈值
|
|
201
|
+
|
|
202
|
+
# 下载器通用配置
|
|
203
|
+
DOWNLOAD_TIMEOUT = 30 # 下载超时时间(秒)
|
|
204
|
+
VERIFY_SSL = True # 是否验证SSL证书
|
|
205
|
+
CONNECTION_POOL_LIMIT = 100 # 连接池大小限制
|
|
206
|
+
CONNECTION_POOL_LIMIT_PER_HOST = 20 # 每个主机的连接池大小限制
|
|
207
|
+
DOWNLOAD_MAXSIZE = 10 * 1024 * 1024 # 最大下载大小(字节)
|
|
208
|
+
DOWNLOAD_STATS = True # 是否启用下载统计
|
|
209
|
+
DOWNLOAD_WARN_SIZE = 1024 * 1024 # 下载警告大小(字节)
|
|
210
|
+
DOWNLOAD_RETRY_TIMES = 3 # 下载重试次数
|
|
211
|
+
MAX_RETRY_TIMES = 3 # 最大重试次数
|
|
212
|
+
|
|
213
|
+
# 下载器健康检查
|
|
214
|
+
DOWNLOADER_HEALTH_CHECK = True # 是否启用下载器健康检查
|
|
215
|
+
HEALTH_CHECK_INTERVAL = 60 # 健康检查间隔(秒)
|
|
216
|
+
REQUEST_STATS_ENABLED = True # 是否启用请求统计
|
|
217
|
+
STATS_RESET_ON_START = False # 启动时是否重置统计
|
|
218
|
+
|
|
219
|
+
# HttpX 下载器专用配置
|
|
220
|
+
HTTPX_HTTP2 = True # 是否启用HTTP/2支持
|
|
221
|
+
HTTPX_FOLLOW_REDIRECTS = True # 是否自动跟随重定向
|
|
222
|
+
|
|
223
|
+
# AioHttp 下载器专用配置
|
|
224
|
+
AIOHTTP_AUTO_DECOMPRESS = True # 是否自动解压响应
|
|
225
|
+
AIOHTTP_FORCE_CLOSE = False # 是否强制关闭连接
|
|
226
|
+
|
|
227
|
+
# Curl-Cffi 特有配置
|
|
228
|
+
CURL_BROWSER_TYPE = "chrome" # 浏览器指纹模拟(仅 CurlCffi 下载器有效)
|
|
229
|
+
CURL_BROWSER_VERSION_MAP = { # 自定义浏览器版本映射(可覆盖默认行为)
|
|
230
|
+
"chrome": "chrome136",
|
|
231
|
+
"edge": "edge101",
|
|
232
|
+
"safari": "safari184",
|
|
233
|
+
"firefox": "firefox135",
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
# Selenium 下载器配置
|
|
237
|
+
SELENIUM_BROWSER_TYPE = "chrome" # 浏览器类型: chrome, firefox, edge
|
|
238
|
+
SELENIUM_HEADLESS = True # 是否无头模式
|
|
239
|
+
SELENIUM_TIMEOUT = 30 # 超时时间(秒)
|
|
240
|
+
SELENIUM_LOAD_TIMEOUT = 10 # 页面加载超时时间(秒)
|
|
241
|
+
SELENIUM_WINDOW_WIDTH = 1920 # 窗口宽度
|
|
242
|
+
SELENIUM_WINDOW_HEIGHT = 1080 # 窗口高度
|
|
243
|
+
SELENIUM_WAIT_FOR_ELEMENT = None # 等待特定元素选择器
|
|
244
|
+
SELENIUM_ENABLE_JS = True # 是否启用JavaScript
|
|
245
|
+
SELENIUM_PROXY = None # 代理设置
|
|
246
|
+
SELENIUM_SINGLE_BROWSER_MODE = True # 单浏览器多标签页模式
|
|
247
|
+
SELENIUM_MAX_TABS_PER_BROWSER = 10 # 单浏览器最大标签页数量
|
|
248
|
+
|
|
249
|
+
# Playwright 下载器配置
|
|
250
|
+
PLAYWRIGHT_BROWSER_TYPE = "chromium" # 浏览器类型: chromium, firefox, webkit
|
|
251
|
+
PLAYWRIGHT_HEADLESS = True # 是否无头模式
|
|
252
|
+
PLAYWRIGHT_TIMEOUT = 30000 # 超时时间(毫秒)
|
|
253
|
+
PLAYWRIGHT_LOAD_TIMEOUT = 10000 # 页面加载超时时间(毫秒)
|
|
254
|
+
PLAYWRIGHT_VIEWPORT_WIDTH = 1920 # 视口宽度
|
|
255
|
+
PLAYWRIGHT_VIEWPORT_HEIGHT = 1080 # 视口高度
|
|
256
|
+
PLAYWRIGHT_WAIT_FOR_ELEMENT = None # 等待特定元素选择器
|
|
257
|
+
PLAYWRIGHT_PROXY = None # 代理设置
|
|
258
|
+
PLAYWRIGHT_SINGLE_BROWSER_MODE = True # 单浏览器多标签页模式
|
|
259
|
+
PLAYWRIGHT_MAX_PAGES_PER_BROWSER = 10 # 单浏览器最大页面数量
|
|
260
|
+
|
|
261
|
+
# 通用优化配置
|
|
262
|
+
CONNECTION_TTL_DNS_CACHE = 300 # DNS缓存TTL(秒)
|
|
263
|
+
CONNECTION_KEEPALIVE_TIMEOUT = 15 # Keep-Alive超时(秒)
|
|
264
|
+
|
|
265
|
+
# ===========================================================================
|
|
266
|
+
# 9. 数据存储配置
|
|
267
|
+
# ===========================================================================
|
|
268
|
+
|
|
269
|
+
# CSV管道配置
|
|
270
|
+
CSV_DELIMITER = ',' # CSV分隔符
|
|
271
|
+
CSV_QUOTECHAR = '"' # CSV引号字符
|
|
272
|
+
CSV_INCLUDE_HEADERS = True # 是否包含表头
|
|
273
|
+
CSV_EXTRASACTION = 'ignore' # 额外字段处理方式:ignore, raise
|
|
274
|
+
CSV_FIELDNAMES = None # 字段名列表
|
|
275
|
+
CSV_FILE = None # CSV文件路径
|
|
276
|
+
CSV_DICT_FILE = None # CSV字典文件路径
|
|
277
|
+
CSV_BATCH_SIZE = 100 # CSV批处理大小
|
|
278
|
+
CSV_BATCH_FILE = None # CSV批处理文件路径
|
|
279
|
+
|
|
280
|
+
# 数据库去重管道配置
|
|
281
|
+
DB_HOST = 'localhost' # 数据库主机
|
|
282
|
+
DB_PORT = 3306 # 数据库端口
|
|
283
|
+
DB_USER = 'root' # 数据库用户
|
|
284
|
+
DB_PASSWORD = '' # 数据库密码
|
|
285
285
|
DB_NAME = 'crawlo' # 数据库名称
|