crawlo 1.2.7__py3-none-any.whl → 1.2.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +63 -61
- crawlo/__version__.py +1 -1
- crawlo/cli.py +75 -75
- crawlo/commands/__init__.py +14 -14
- crawlo/commands/check.py +594 -594
- crawlo/commands/genspider.py +151 -151
- crawlo/commands/help.py +138 -138
- crawlo/commands/list.py +155 -155
- crawlo/commands/run.py +323 -323
- crawlo/commands/startproject.py +436 -436
- crawlo/commands/stats.py +187 -187
- crawlo/commands/utils.py +186 -186
- crawlo/config.py +312 -312
- crawlo/config_validator.py +277 -251
- crawlo/core/__init__.py +2 -2
- crawlo/core/engine.py +366 -365
- crawlo/core/processor.py +40 -40
- crawlo/core/scheduler.py +256 -251
- crawlo/crawler.py +1103 -1100
- crawlo/data/__init__.py +5 -5
- crawlo/data/user_agents.py +194 -107
- crawlo/downloader/__init__.py +273 -266
- crawlo/downloader/aiohttp_downloader.py +226 -228
- crawlo/downloader/cffi_downloader.py +245 -256
- crawlo/downloader/httpx_downloader.py +259 -259
- crawlo/downloader/hybrid_downloader.py +212 -212
- crawlo/downloader/playwright_downloader.py +402 -402
- crawlo/downloader/selenium_downloader.py +472 -472
- crawlo/event.py +11 -11
- crawlo/exceptions.py +81 -81
- crawlo/extension/__init__.py +39 -39
- crawlo/extension/health_check.py +141 -141
- crawlo/extension/log_interval.py +57 -57
- crawlo/extension/log_stats.py +81 -81
- crawlo/extension/logging_extension.py +43 -43
- crawlo/extension/memory_monitor.py +104 -104
- crawlo/extension/performance_profiler.py +133 -133
- crawlo/extension/request_recorder.py +107 -107
- crawlo/filters/__init__.py +154 -154
- crawlo/filters/aioredis_filter.py +234 -234
- crawlo/filters/memory_filter.py +269 -269
- crawlo/items/__init__.py +23 -23
- crawlo/items/base.py +21 -21
- crawlo/items/fields.py +52 -52
- crawlo/items/items.py +104 -104
- crawlo/middleware/__init__.py +21 -21
- crawlo/middleware/default_header.py +132 -132
- crawlo/middleware/download_delay.py +104 -104
- crawlo/middleware/middleware_manager.py +136 -136
- crawlo/middleware/offsite.py +114 -114
- crawlo/middleware/proxy.py +386 -368
- crawlo/middleware/request_ignore.py +86 -86
- crawlo/middleware/response_code.py +163 -163
- crawlo/middleware/response_filter.py +136 -136
- crawlo/middleware/retry.py +124 -124
- crawlo/middleware/simple_proxy.py +65 -0
- crawlo/mode_manager.py +211 -211
- crawlo/network/__init__.py +21 -21
- crawlo/network/request.py +379 -338
- crawlo/network/response.py +359 -359
- crawlo/pipelines/__init__.py +21 -21
- crawlo/pipelines/bloom_dedup_pipeline.py +157 -157
- crawlo/pipelines/console_pipeline.py +39 -39
- crawlo/pipelines/csv_pipeline.py +316 -316
- crawlo/pipelines/database_dedup_pipeline.py +223 -223
- crawlo/pipelines/json_pipeline.py +218 -218
- crawlo/pipelines/memory_dedup_pipeline.py +115 -115
- crawlo/pipelines/mongo_pipeline.py +131 -131
- crawlo/pipelines/mysql_pipeline.py +317 -317
- crawlo/pipelines/pipeline_manager.py +62 -62
- crawlo/pipelines/redis_dedup_pipeline.py +167 -167
- crawlo/project.py +290 -315
- crawlo/queue/pqueue.py +37 -37
- crawlo/queue/queue_manager.py +379 -378
- crawlo/queue/redis_priority_queue.py +306 -306
- crawlo/settings/__init__.py +7 -7
- crawlo/settings/default_settings.py +216 -220
- crawlo/settings/setting_manager.py +163 -122
- crawlo/spider/__init__.py +639 -639
- crawlo/stats_collector.py +59 -59
- crawlo/subscriber.py +129 -129
- crawlo/task_manager.py +30 -30
- crawlo/templates/crawlo.cfg.tmpl +10 -10
- crawlo/templates/project/__init__.py.tmpl +3 -3
- crawlo/templates/project/items.py.tmpl +17 -17
- crawlo/templates/project/middlewares.py.tmpl +118 -118
- crawlo/templates/project/pipelines.py.tmpl +96 -96
- crawlo/templates/project/settings.py.tmpl +261 -288
- crawlo/templates/project/settings_distributed.py.tmpl +174 -157
- crawlo/templates/project/settings_gentle.py.tmpl +95 -100
- crawlo/templates/project/settings_high_performance.py.tmpl +125 -134
- crawlo/templates/project/settings_minimal.py.tmpl +30 -0
- crawlo/templates/project/settings_simple.py.tmpl +96 -98
- crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
- crawlo/templates/run.py.tmpl +47 -45
- crawlo/templates/spider/spider.py.tmpl +143 -143
- crawlo/tools/__init__.py +200 -182
- crawlo/tools/anti_crawler.py +268 -268
- crawlo/tools/authenticated_proxy.py +240 -240
- crawlo/{cleaners → tools}/data_formatter.py +225 -225
- crawlo/tools/data_validator.py +180 -180
- crawlo/tools/date_tools.py +290 -36
- crawlo/tools/distributed_coordinator.py +388 -387
- crawlo/{cleaners → tools}/encoding_converter.py +127 -126
- crawlo/tools/request_tools.py +83 -0
- crawlo/tools/retry_mechanism.py +224 -221
- crawlo/tools/scenario_adapter.py +262 -262
- crawlo/{cleaners → tools}/text_cleaner.py +232 -232
- crawlo/utils/__init__.py +35 -35
- crawlo/utils/batch_processor.py +259 -259
- crawlo/utils/controlled_spider_mixin.py +439 -439
- crawlo/utils/db_helper.py +343 -343
- crawlo/utils/enhanced_error_handler.py +356 -356
- crawlo/utils/env_config.py +142 -142
- crawlo/utils/error_handler.py +123 -123
- crawlo/utils/func_tools.py +82 -82
- crawlo/utils/large_scale_config.py +286 -286
- crawlo/utils/large_scale_helper.py +344 -344
- crawlo/utils/log.py +187 -128
- crawlo/utils/performance_monitor.py +285 -285
- crawlo/utils/queue_helper.py +175 -175
- crawlo/utils/redis_connection_pool.py +351 -351
- crawlo/utils/redis_key_validator.py +198 -198
- crawlo/utils/request.py +267 -267
- crawlo/utils/request_serializer.py +218 -218
- crawlo/utils/spider_loader.py +61 -61
- crawlo/utils/system.py +11 -11
- crawlo/utils/tools.py +4 -4
- crawlo/utils/url.py +39 -39
- {crawlo-1.2.7.dist-info → crawlo-1.2.9.dist-info}/METADATA +1011 -764
- crawlo-1.2.9.dist-info/RECORD +219 -0
- examples/__init__.py +7 -7
- tests/DOUBLE_CRAWLO_PREFIX_FIX_REPORT.md +81 -81
- tests/__init__.py +7 -7
- tests/advanced_tools_example.py +275 -275
- tests/authenticated_proxy_example.py +107 -237
- tests/cleaners_example.py +160 -160
- tests/config_validation_demo.py +143 -103
- tests/controlled_spider_example.py +205 -205
- tests/date_tools_example.py +180 -180
- tests/debug_pipelines.py +67 -0
- tests/dynamic_loading_example.py +523 -523
- tests/dynamic_loading_test.py +104 -104
- tests/env_config_example.py +133 -133
- tests/error_handling_example.py +171 -171
- tests/redis_key_validation_demo.py +130 -130
- tests/request_params_example.py +151 -0
- tests/response_improvements_example.py +144 -144
- tests/test_advanced_tools.py +148 -148
- tests/test_all_redis_key_configs.py +145 -145
- tests/test_authenticated_proxy.py +141 -141
- tests/test_cleaners.py +54 -54
- tests/test_comprehensive.py +146 -146
- tests/test_config_consistency.py +80 -80
- tests/test_config_merge.py +153 -0
- tests/test_config_validator.py +182 -193
- tests/test_crawlo_proxy_integration.py +109 -173
- tests/test_date_tools.py +123 -123
- tests/test_default_header_middleware.py +158 -158
- tests/test_distributed.py +65 -0
- tests/test_double_crawlo_fix.py +207 -207
- tests/test_double_crawlo_fix_simple.py +124 -124
- tests/test_download_delay_middleware.py +221 -221
- tests/test_downloader_proxy_compatibility.py +268 -268
- tests/test_dynamic_downloaders_proxy.py +124 -124
- tests/test_dynamic_proxy.py +92 -92
- tests/test_dynamic_proxy_config.py +146 -146
- tests/test_dynamic_proxy_real.py +109 -109
- tests/test_edge_cases.py +303 -303
- tests/test_enhanced_error_handler.py +270 -270
- tests/test_env_config.py +121 -121
- tests/test_error_handler_compatibility.py +112 -112
- tests/test_final_validation.py +153 -153
- tests/test_framework_env_usage.py +103 -103
- tests/test_integration.py +169 -357
- tests/test_item_dedup_redis_key.py +122 -122
- tests/test_mode_consistency.py +51 -51
- tests/test_offsite_middleware.py +221 -221
- tests/test_parsel.py +29 -29
- tests/test_performance.py +327 -327
- tests/test_proxy_api.py +264 -264
- tests/test_proxy_health_check.py +32 -32
- tests/test_proxy_middleware.py +121 -121
- tests/test_proxy_middleware_enhanced.py +216 -216
- tests/test_proxy_middleware_integration.py +136 -136
- tests/test_proxy_middleware_refactored.py +185 -0
- tests/test_proxy_providers.py +56 -56
- tests/test_proxy_stats.py +19 -19
- tests/test_proxy_strategies.py +59 -59
- tests/test_queue_manager_double_crawlo.py +173 -173
- tests/test_queue_manager_redis_key.py +176 -176
- tests/test_random_user_agent.py +73 -0
- tests/test_real_scenario_proxy.py +195 -195
- tests/test_redis_config.py +28 -28
- tests/test_redis_connection_pool.py +294 -294
- tests/test_redis_key_naming.py +181 -181
- tests/test_redis_key_validator.py +123 -123
- tests/test_redis_queue.py +224 -224
- tests/test_request_ignore_middleware.py +182 -182
- tests/test_request_params.py +112 -0
- tests/test_request_serialization.py +70 -70
- tests/test_response_code_middleware.py +349 -349
- tests/test_response_filter_middleware.py +427 -427
- tests/test_response_improvements.py +152 -152
- tests/test_retry_middleware.py +241 -241
- tests/test_scheduler.py +252 -252
- tests/test_scheduler_config_update.py +133 -133
- tests/test_simple_response.py +61 -61
- tests/test_telecom_spider_redis_key.py +205 -205
- tests/test_template_content.py +87 -87
- tests/test_template_redis_key.py +134 -134
- tests/test_tools.py +159 -153
- tests/test_user_agents.py +97 -0
- tests/tools_example.py +260 -257
- tests/verify_distributed.py +117 -0
- crawlo/cleaners/__init__.py +0 -61
- crawlo/utils/date_tools.py +0 -290
- crawlo-1.2.7.dist-info/RECORD +0 -209
- {crawlo-1.2.7.dist-info → crawlo-1.2.9.dist-info}/WHEEL +0 -0
- {crawlo-1.2.7.dist-info → crawlo-1.2.9.dist-info}/entry_points.txt +0 -0
- {crawlo-1.2.7.dist-info → crawlo-1.2.9.dist-info}/top_level.txt +0 -0
|
@@ -1,158 +1,175 @@
|
|
|
1
|
-
# -*- coding: UTF-8 -*-
|
|
2
|
-
"""
|
|
3
|
-
{{project_name}} 项目配置文件(分布式版)
|
|
4
|
-
=============================
|
|
5
|
-
基于 Crawlo 框架的分布式爬虫项目配置。
|
|
6
|
-
适合大规模数据采集和多节点部署。
|
|
7
|
-
"""
|
|
8
|
-
import os
|
|
9
|
-
from crawlo.config import CrawloConfig
|
|
10
|
-
|
|
11
|
-
# ============================== 项目基本信息 ==============================
|
|
12
|
-
PROJECT_NAME = '{{project_name}}'
|
|
13
|
-
|
|
14
|
-
# ============================== 分布式配置说明 ==============================
|
|
15
|
-
RUN_MODE = 'distributed'
|
|
16
|
-
QUEUE_TYPE = 'redis'
|
|
17
|
-
FILTER_CLASS = 'crawlo.filters.aioredis_filter.AioRedisFilter'
|
|
18
|
-
DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.redis_dedup_pipeline.RedisDedupPipeline'
|
|
19
|
-
# 本模板专为分布式部署设计,适用于以下场景:
|
|
20
|
-
# - 大规模数据采集任务
|
|
21
|
-
# - 需要多节点协同工作的项目
|
|
22
|
-
# - 高并发、高吞吐量需求
|
|
23
|
-
#
|
|
24
|
-
# 运行模式特点:
|
|
25
|
-
# - RUN_MODE = 'distributed'(分布式模式)
|
|
26
|
-
# - QUEUE_TYPE = 'redis'(使用Redis队列实现分布式协调)
|
|
27
|
-
# - FILTER_CLASS = 'crawlo.filters.aioredis_filter.AioRedisFilter'(Redis过滤器)
|
|
28
|
-
# - DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.redis_dedup_pipeline.RedisDedupPipeline'(Redis去重)
|
|
29
|
-
#
|
|
30
|
-
# 部署要求:
|
|
31
|
-
# - 需要配置Redis服务器连接参数
|
|
32
|
-
# - 建议使用独立的Redis数据库避免数据冲突
|
|
33
|
-
# - 多节点部署时确保所有节点能访问同一Redis实例
|
|
34
|
-
#
|
|
35
|
-
# 🎯 最佳使用方式:
|
|
36
|
-
# 推荐使用配置工厂方式创建分布式配置:
|
|
37
|
-
# from crawlo.config import CrawloConfig
|
|
38
|
-
# config = CrawloConfig.distributed(
|
|
39
|
-
# redis_host='your_redis_host',
|
|
40
|
-
# redis_port=6379,
|
|
41
|
-
# redis_password='your_password',
|
|
42
|
-
# project_name='{{project_name}}'
|
|
43
|
-
# )
|
|
44
|
-
# process = CrawlerProcess(settings=config.to_dict())
|
|
45
|
-
|
|
46
|
-
# ============================== 分布式配置 ==============================
|
|
47
|
-
# 使用配置工厂创建分布式配置
|
|
48
|
-
CONFIG = CrawloConfig.distributed(
|
|
49
|
-
redis_host=os.getenv('REDIS_HOST', '127.0.0.1'),
|
|
50
|
-
redis_port=int(os.getenv('REDIS_PORT', 6379)),
|
|
51
|
-
redis_password=os.getenv('REDIS_PASSWORD', ''),
|
|
52
|
-
project_name='{{project_name}}',
|
|
53
|
-
concurrency=16,
|
|
54
|
-
download_delay=1.0
|
|
55
|
-
)
|
|
56
|
-
|
|
57
|
-
# 获取配置
|
|
58
|
-
locals().update(CONFIG.to_dict())
|
|
59
|
-
|
|
60
|
-
# ============================== 网络请求配置 ==============================
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
#
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
#
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
#
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
#
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
#
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
#
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
#
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
#
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
#
|
|
1
|
+
# -*- coding: UTF-8 -*-
|
|
2
|
+
"""
|
|
3
|
+
{{project_name}} 项目配置文件(分布式版)
|
|
4
|
+
=============================
|
|
5
|
+
基于 Crawlo 框架的分布式爬虫项目配置。
|
|
6
|
+
适合大规模数据采集和多节点部署。
|
|
7
|
+
"""
|
|
8
|
+
import os
|
|
9
|
+
from crawlo.config import CrawloConfig
|
|
10
|
+
|
|
11
|
+
# ============================== 项目基本信息 ==============================
|
|
12
|
+
PROJECT_NAME = '{{project_name}}'
|
|
13
|
+
|
|
14
|
+
# ============================== 分布式配置说明 ==============================
|
|
15
|
+
RUN_MODE = 'distributed'
|
|
16
|
+
QUEUE_TYPE = 'redis'
|
|
17
|
+
FILTER_CLASS = 'crawlo.filters.aioredis_filter.AioRedisFilter'
|
|
18
|
+
DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.redis_dedup_pipeline.RedisDedupPipeline'
|
|
19
|
+
# 本模板专为分布式部署设计,适用于以下场景:
|
|
20
|
+
# - 大规模数据采集任务
|
|
21
|
+
# - 需要多节点协同工作的项目
|
|
22
|
+
# - 高并发、高吞吐量需求
|
|
23
|
+
#
|
|
24
|
+
# 运行模式特点:
|
|
25
|
+
# - RUN_MODE = 'distributed'(分布式模式)
|
|
26
|
+
# - QUEUE_TYPE = 'redis'(使用Redis队列实现分布式协调)
|
|
27
|
+
# - FILTER_CLASS = 'crawlo.filters.aioredis_filter.AioRedisFilter'(Redis过滤器)
|
|
28
|
+
# - DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.redis_dedup_pipeline.RedisDedupPipeline'(Redis去重)
|
|
29
|
+
#
|
|
30
|
+
# 部署要求:
|
|
31
|
+
# - 需要配置Redis服务器连接参数
|
|
32
|
+
# - 建议使用独立的Redis数据库避免数据冲突
|
|
33
|
+
# - 多节点部署时确保所有节点能访问同一Redis实例
|
|
34
|
+
#
|
|
35
|
+
# 🎯 最佳使用方式:
|
|
36
|
+
# 推荐使用配置工厂方式创建分布式配置:
|
|
37
|
+
# from crawlo.config import CrawloConfig
|
|
38
|
+
# config = CrawloConfig.distributed(
|
|
39
|
+
# redis_host='your_redis_host',
|
|
40
|
+
# redis_port=6379,
|
|
41
|
+
# redis_password='your_password',
|
|
42
|
+
# project_name='{{project_name}}'
|
|
43
|
+
# )
|
|
44
|
+
# process = CrawlerProcess(settings=config.to_dict())
|
|
45
|
+
|
|
46
|
+
# ============================== 分布式配置 ==============================
|
|
47
|
+
# 使用配置工厂创建分布式配置
|
|
48
|
+
CONFIG = CrawloConfig.distributed(
|
|
49
|
+
redis_host=os.getenv('REDIS_HOST', '127.0.0.1'),
|
|
50
|
+
redis_port=int(os.getenv('REDIS_PORT', 6379)),
|
|
51
|
+
redis_password=os.getenv('REDIS_PASSWORD', ''),
|
|
52
|
+
project_name='{{project_name}}',
|
|
53
|
+
concurrency=16,
|
|
54
|
+
download_delay=1.0
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
# 获取配置
|
|
58
|
+
locals().update(CONFIG.to_dict())
|
|
59
|
+
|
|
60
|
+
# ============================== 网络请求配置 ==============================
|
|
61
|
+
|
|
62
|
+
# 注意:框架已提供默认的网络请求配置,以下配置项通常无需修改
|
|
63
|
+
# 如需自定义,请取消注释并修改相应值
|
|
64
|
+
|
|
65
|
+
# DOWNLOADER = "crawlo.downloader.httpx_downloader.HttpXDownloader"
|
|
66
|
+
# DOWNLOAD_TIMEOUT = 60
|
|
67
|
+
# VERIFY_SSL = True
|
|
68
|
+
|
|
69
|
+
# ============================== 并发配置 ==============================
|
|
70
|
+
|
|
71
|
+
# 注意:并发配置通常通过CrawloConfig设置,以下配置项用于细粒度调整
|
|
72
|
+
|
|
73
|
+
# CONCURRENCY = 16
|
|
74
|
+
# MAX_RUNNING_SPIDERS = 5
|
|
75
|
+
# DOWNLOAD_DELAY = 1.0
|
|
76
|
+
|
|
77
|
+
# ============================== 队列配置 ==============================
|
|
78
|
+
|
|
79
|
+
# 注意:队列配置通常通过CrawloConfig设置,以下配置项用于细粒度调整
|
|
80
|
+
|
|
81
|
+
# SCHEDULER_MAX_QUEUE_SIZE = 5000
|
|
82
|
+
# QUEUE_MAX_RETRIES = 5
|
|
83
|
+
# QUEUE_TIMEOUT = 300
|
|
84
|
+
|
|
85
|
+
# ============================== Redis 配置 ==============================
|
|
86
|
+
|
|
87
|
+
# 注意:Redis配置通常通过CrawloConfig设置,以下配置项用于细粒度调整
|
|
88
|
+
|
|
89
|
+
# REDIS_HOST = os.getenv('REDIS_HOST', '127.0.0.1')
|
|
90
|
+
# REDIS_PORT = int(os.getenv('REDIS_PORT', 6379))
|
|
91
|
+
# REDIS_PASSWORD = os.getenv('REDIS_PASSWORD', '')
|
|
92
|
+
# REDIS_DB = int(os.getenv('REDIS_DB', 0))
|
|
93
|
+
|
|
94
|
+
# 根据是否有密码生成 URL
|
|
95
|
+
# if REDIS_PASSWORD:
|
|
96
|
+
# REDIS_URL = f'redis://:{REDIS_PASSWORD}@{REDIS_HOST}:{REDIS_PORT}/{REDIS_DB}'
|
|
97
|
+
# else:
|
|
98
|
+
# REDIS_URL = f'redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_DB}'
|
|
99
|
+
|
|
100
|
+
# ============================== 数据存储配置 ==============================
|
|
101
|
+
|
|
102
|
+
# --- MySQL 配置 ---
|
|
103
|
+
MYSQL_HOST = os.getenv('MYSQL_HOST', '127.0.0.1')
|
|
104
|
+
MYSQL_PORT = int(os.getenv('MYSQL_PORT', 3306))
|
|
105
|
+
MYSQL_USER = os.getenv('MYSQL_USER', 'root')
|
|
106
|
+
MYSQL_PASSWORD = os.getenv('MYSQL_PASSWORD', '123456')
|
|
107
|
+
MYSQL_DB = os.getenv('MYSQL_DB', '{{project_name}}')
|
|
108
|
+
MYSQL_TABLE = '{{project_name}}_data'
|
|
109
|
+
MYSQL_BATCH_SIZE = 100
|
|
110
|
+
MYSQL_USE_BATCH = True
|
|
111
|
+
|
|
112
|
+
# --- MongoDB 配置 ---
|
|
113
|
+
MONGO_URI = os.getenv('MONGO_URI', 'mongodb://localhost:27017')
|
|
114
|
+
MONGO_DATABASE = '{{project_name}}_db'
|
|
115
|
+
MONGO_COLLECTION = '{{project_name}}_items'
|
|
116
|
+
MONGO_BATCH_SIZE = 100
|
|
117
|
+
MONGO_USE_BATCH = True
|
|
118
|
+
|
|
119
|
+
# ============================== 去重配置 ==============================
|
|
120
|
+
|
|
121
|
+
# 注意:框架已提供默认的去重配置,以下配置项通常无需修改
|
|
122
|
+
# 如需自定义,请取消注释并修改相应值
|
|
123
|
+
|
|
124
|
+
# 明确指定分布式模式下使用Redis去重管道
|
|
125
|
+
# DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.redis_dedup_pipeline.RedisDedupPipeline'
|
|
126
|
+
# FILTER_CLASS = 'crawlo.filters.aioredis_filter.AioRedisFilter'
|
|
127
|
+
# REDIS_TTL = 0
|
|
128
|
+
# CLEANUP_FP = 0
|
|
129
|
+
# FILTER_DEBUG = True
|
|
130
|
+
|
|
131
|
+
# ============================== 用户自定义中间件配置 ==============================
|
|
132
|
+
# 注意:框架默认中间件已自动加载,此处可添加或覆盖默认中间件
|
|
133
|
+
|
|
134
|
+
# 中间件列表(框架默认中间件 + 用户自定义中间件)
|
|
135
|
+
# MIDDLEWARES = [
|
|
136
|
+
# '{{project_name}}.middlewares.CustomMiddleware', # 示例自定义中间件
|
|
137
|
+
# ]
|
|
138
|
+
|
|
139
|
+
# ============================== 用户自定义数据管道配置 ==============================
|
|
140
|
+
# 注意:框架默认管道已自动加载,此处可添加或覆盖默认管道
|
|
141
|
+
|
|
142
|
+
# 数据处理管道列表(框架默认管道 + 用户自定义管道)
|
|
143
|
+
# PIPELINES = [
|
|
144
|
+
# '{{project_name}}.pipelines.DatabasePipeline', # 自定义数据库管道
|
|
145
|
+
# 'crawlo.pipelines.mysql_pipeline.AsyncmyMySQLPipeline', # MySQL 存储
|
|
146
|
+
# 'crawlo.pipelines.mongo_pipeline.MongoPipeline', # MongoDB 存储
|
|
147
|
+
# ]
|
|
148
|
+
|
|
149
|
+
# ============================== 用户自定义扩展组件 ==============================
|
|
150
|
+
# 注意:框架默认扩展已自动加载,此处可添加或覆盖默认扩展
|
|
151
|
+
|
|
152
|
+
# 扩展组件列表(框架默认扩展 + 用户自定义扩展)
|
|
153
|
+
# EXTENSIONS = [
|
|
154
|
+
# 'crawlo.extension.memory_monitor.MemoryMonitorExtension', # 内存监控
|
|
155
|
+
# 'crawlo.extension.request_recorder.RequestRecorderExtension', # 请求记录
|
|
156
|
+
# 'crawlo.extension.performance_profiler.PerformanceProfilerExtension', # 性能分析
|
|
157
|
+
# 'crawlo.extension.health_check.HealthCheckExtension', # 健康检查
|
|
158
|
+
# ]
|
|
159
|
+
|
|
160
|
+
# ============================== 日志配置 ==============================
|
|
161
|
+
|
|
162
|
+
LOG_LEVEL = 'INFO'
|
|
163
|
+
LOG_FILE = f'logs/{{project_name}}.log'
|
|
164
|
+
STATS_DUMP = True
|
|
165
|
+
|
|
166
|
+
# ============================== 代理配置 ==============================
|
|
167
|
+
|
|
168
|
+
PROXY_ENABLED = False
|
|
169
|
+
PROXY_API_URL = ""
|
|
170
|
+
PROXY_EXTRACTOR = "proxy"
|
|
171
|
+
PROXY_REFRESH_INTERVAL = 60
|
|
172
|
+
PROXY_API_TIMEOUT = 10
|
|
173
|
+
|
|
174
|
+
# ============================== 自定义配置 ==============================
|
|
158
175
|
# 在此处添加项目特定的配置项
|
|
@@ -1,101 +1,96 @@
|
|
|
1
|
-
# -*- coding: UTF-8 -*-
|
|
2
|
-
"""
|
|
3
|
-
温和模式配置模板
|
|
4
|
-
低负载配置,对目标网站友好
|
|
5
|
-
"""
|
|
6
|
-
|
|
7
|
-
# ============================== 项目基本信息 ==============================
|
|
8
|
-
PROJECT_NAME = '{{project_name}}'
|
|
9
|
-
|
|
10
|
-
# ============================== 温和运行模式 ==============================
|
|
11
|
-
# 运行模式:'standalone'(单机), 'distributed'(分布式), 'auto'(自动检测)
|
|
12
|
-
RUN_MODE = 'standalone' # 单机模式 - 适用于开发和小规模数据采集
|
|
13
|
-
|
|
14
|
-
# 并发配置
|
|
15
|
-
CONCURRENCY = 2 # 极低并发数以减少目标网站压力
|
|
16
|
-
DOWNLOAD_DELAY = 3.0 # 增加延迟以降低目标网站压力
|
|
17
|
-
RANDOMNESS = True # 启用随机延迟
|
|
18
|
-
RANDOM_RANGE = (0.5, 2.0) # 随机延迟范围
|
|
19
|
-
|
|
20
|
-
# ============================== 队列配置 ==============================
|
|
21
|
-
|
|
22
|
-
# 队列类型:'auto'(自动选择), 'memory'(内存队列), 'redis'(分布式队列)
|
|
23
|
-
QUEUE_TYPE = 'auto' # 自动检测,如果Redis可用则使用Redis队列
|
|
24
|
-
SCHEDULER_MAX_QUEUE_SIZE = 500
|
|
25
|
-
SCHEDULER_QUEUE_NAME = f'crawlo:{{project_name}}:queue:requests'
|
|
26
|
-
QUEUE_MAX_RETRIES = 3
|
|
27
|
-
QUEUE_TIMEOUT = 300
|
|
28
|
-
|
|
29
|
-
# ============================== 去重过滤配置 ==============================
|
|
30
|
-
|
|
31
|
-
# 温和模式下使用内存去重管道和过滤器
|
|
32
|
-
DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.memory_dedup_pipeline.MemoryDedupPipeline'
|
|
33
|
-
FILTER_CLASS = 'crawlo.filters.memory_filter.MemoryFilter'
|
|
34
|
-
|
|
35
|
-
# --- Redis 配置(用于分布式去重和队列) ---
|
|
36
|
-
REDIS_HOST = '127.0.0.1'
|
|
37
|
-
REDIS_PORT = 6379
|
|
38
|
-
REDIS_PASSWORD = '' # 如果有密码,请填写
|
|
39
|
-
|
|
40
|
-
# 根据是否有密码生成 URL
|
|
41
|
-
if REDIS_PASSWORD:
|
|
42
|
-
REDIS_URL = f'redis://:{REDIS_PASSWORD}@{REDIS_HOST}:{REDIS_PORT}/0'
|
|
43
|
-
else:
|
|
44
|
-
REDIS_URL = f'redis://{REDIS_HOST}:{REDIS_PORT}/0'
|
|
45
|
-
|
|
46
|
-
# Redis key配置已移至各组件中,使用统一的命名规范
|
|
47
|
-
# crawlo:{project_name}:filter:fingerprint (请求去重)
|
|
48
|
-
# crawlo:{project_name}:item:fingerprint (数据项去重)
|
|
49
|
-
# crawlo:{project_name}:queue:requests (请求队列)
|
|
50
|
-
# crawlo:{project_name}:queue:processing (处理中队列)
|
|
51
|
-
# crawlo:{project_name}:queue:failed (失败队列)
|
|
52
|
-
|
|
53
|
-
REDIS_TTL = 0
|
|
54
|
-
CLEANUP_FP = 0
|
|
55
|
-
FILTER_DEBUG = True
|
|
56
|
-
DECODE_RESPONSES = True
|
|
57
|
-
|
|
58
|
-
# ==============================
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
'
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
'
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
#
|
|
75
|
-
|
|
76
|
-
#
|
|
77
|
-
PIPELINES
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
#
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
#
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
LOG_LEVEL = 'INFO'
|
|
98
|
-
STATS_DUMP = True
|
|
99
|
-
LOG_FILE = f'logs/{{project_name}}.log'
|
|
100
|
-
LOG_FORMAT = '%(asctime)s - [%(name)s] - %(levelname)s: %(message)s'
|
|
1
|
+
# -*- coding: UTF-8 -*-
|
|
2
|
+
"""
|
|
3
|
+
温和模式配置模板
|
|
4
|
+
低负载配置,对目标网站友好
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
# ============================== 项目基本信息 ==============================
|
|
8
|
+
PROJECT_NAME = '{{project_name}}'
|
|
9
|
+
|
|
10
|
+
# ============================== 温和运行模式 ==============================
|
|
11
|
+
# 运行模式:'standalone'(单机), 'distributed'(分布式), 'auto'(自动检测)
|
|
12
|
+
RUN_MODE = 'standalone' # 单机模式 - 适用于开发和小规模数据采集
|
|
13
|
+
|
|
14
|
+
# 并发配置
|
|
15
|
+
CONCURRENCY = 2 # 极低并发数以减少目标网站压力
|
|
16
|
+
DOWNLOAD_DELAY = 3.0 # 增加延迟以降低目标网站压力
|
|
17
|
+
RANDOMNESS = True # 启用随机延迟
|
|
18
|
+
RANDOM_RANGE = (0.5, 2.0) # 随机延迟范围
|
|
19
|
+
|
|
20
|
+
# ============================== 队列配置 ==============================
|
|
21
|
+
|
|
22
|
+
# 队列类型:'auto'(自动选择), 'memory'(内存队列), 'redis'(分布式队列)
|
|
23
|
+
QUEUE_TYPE = 'auto' # 自动检测,如果Redis可用则使用Redis队列
|
|
24
|
+
SCHEDULER_MAX_QUEUE_SIZE = 500
|
|
25
|
+
SCHEDULER_QUEUE_NAME = f'crawlo:{{project_name}}:queue:requests'
|
|
26
|
+
QUEUE_MAX_RETRIES = 3
|
|
27
|
+
QUEUE_TIMEOUT = 300
|
|
28
|
+
|
|
29
|
+
# ============================== 去重过滤配置 ==============================
|
|
30
|
+
|
|
31
|
+
# 温和模式下使用内存去重管道和过滤器
|
|
32
|
+
DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.memory_dedup_pipeline.MemoryDedupPipeline'
|
|
33
|
+
FILTER_CLASS = 'crawlo.filters.memory_filter.MemoryFilter'
|
|
34
|
+
|
|
35
|
+
# --- Redis 配置(用于分布式去重和队列) ---
|
|
36
|
+
REDIS_HOST = '127.0.0.1'
|
|
37
|
+
REDIS_PORT = 6379
|
|
38
|
+
REDIS_PASSWORD = '' # 如果有密码,请填写
|
|
39
|
+
|
|
40
|
+
# 根据是否有密码生成 URL
|
|
41
|
+
if REDIS_PASSWORD:
|
|
42
|
+
REDIS_URL = f'redis://:{REDIS_PASSWORD}@{REDIS_HOST}:{REDIS_PORT}/0'
|
|
43
|
+
else:
|
|
44
|
+
REDIS_URL = f'redis://{REDIS_HOST}:{REDIS_PORT}/0'
|
|
45
|
+
|
|
46
|
+
# Redis key配置已移至各组件中,使用统一的命名规范
|
|
47
|
+
# crawlo:{project_name}:filter:fingerprint (请求去重)
|
|
48
|
+
# crawlo:{project_name}:item:fingerprint (数据项去重)
|
|
49
|
+
# crawlo:{project_name}:queue:requests (请求队列)
|
|
50
|
+
# crawlo:{project_name}:queue:processing (处理中队列)
|
|
51
|
+
# crawlo:{project_name}:queue:failed (失败队列)
|
|
52
|
+
|
|
53
|
+
REDIS_TTL = 0
|
|
54
|
+
CLEANUP_FP = 0
|
|
55
|
+
FILTER_DEBUG = True
|
|
56
|
+
DECODE_RESPONSES = True
|
|
57
|
+
|
|
58
|
+
# ============================== 用户自定义中间件配置 ==============================
|
|
59
|
+
# 注意:框架默认中间件已自动加载,此处可添加或覆盖默认中间件
|
|
60
|
+
|
|
61
|
+
# 中间件列表(框架默认中间件 + 用户自定义中间件)
|
|
62
|
+
# MIDDLEWARES = [
|
|
63
|
+
# '{{project_name}}.middlewares.CustomMiddleware', # 示例自定义中间件
|
|
64
|
+
# ]
|
|
65
|
+
|
|
66
|
+
# ============================== 用户自定义数据管道配置 ==============================
|
|
67
|
+
# 注意:框架默认管道已自动加载,此处可添加或覆盖默认管道
|
|
68
|
+
|
|
69
|
+
# 数据处理管道列表(框架默认管道 + 用户自定义管道)
|
|
70
|
+
# PIPELINES = [
|
|
71
|
+
# '{{project_name}}.pipelines.DatabasePipeline', # 自定义数据库管道
|
|
72
|
+
# 'crawlo.pipelines.mysql_pipeline.AsyncmyMySQLPipeline', # MySQL 存储
|
|
73
|
+
# 'crawlo.pipelines.mongo_pipeline.MongoPipeline', # MongoDB 存储
|
|
74
|
+
# ]
|
|
75
|
+
|
|
76
|
+
# 明确添加默认去重管道到管道列表开头
|
|
77
|
+
# PIPELINES.insert(0, DEFAULT_DEDUP_PIPELINE)
|
|
78
|
+
|
|
79
|
+
# ============================== 用户自定义扩展组件 ==============================
|
|
80
|
+
# 注意:框架默认扩展已自动加载,此处可添加或覆盖默认扩展
|
|
81
|
+
|
|
82
|
+
# 扩展组件列表(框架默认扩展 + 用户自定义扩展)
|
|
83
|
+
# EXTENSIONS = [
|
|
84
|
+
# 'crawlo.extension.memory_monitor.MemoryMonitorExtension', # 内存监控
|
|
85
|
+
# 'crawlo.extension.request_recorder.RequestRecorderExtension', # 请求记录
|
|
86
|
+
# 'crawlo.extension.performance_profiler.PerformanceProfilerExtension', # 性能分析
|
|
87
|
+
# 'crawlo.extension.health_check.HealthCheckExtension', # 健康检查
|
|
88
|
+
# ]
|
|
89
|
+
|
|
90
|
+
# ============================== 日志配置 ==============================
|
|
91
|
+
|
|
92
|
+
LOG_LEVEL = 'INFO'
|
|
93
|
+
STATS_DUMP = True
|
|
94
|
+
LOG_FILE = f'logs/{{project_name}}.log'
|
|
95
|
+
LOG_FORMAT = '%(asctime)s - [%(name)s] - %(levelname)s: %(message)s'
|
|
101
96
|
LOG_ENCODING = 'utf-8'
|