crawlo 1.2.4__py3-none-any.whl → 1.2.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- crawlo/__version__.py +1 -1
- crawlo/cli.py +12 -5
- crawlo/commands/startproject.py +22 -6
- crawlo/core/engine.py +3 -1
- crawlo/core/scheduler.py +102 -6
- crawlo/filters/aioredis_filter.py +44 -91
- crawlo/queue/queue_manager.py +47 -8
- crawlo/queue/redis_priority_queue.py +9 -2
- crawlo/settings/default_settings.py +5 -7
- crawlo/templates/project/settings.py.tmpl +3 -65
- crawlo/templates/project/settings_distributed.py.tmpl +4 -7
- crawlo/templates/project/settings_gentle.py.tmpl +60 -93
- crawlo/templates/project/settings_high_performance.py.tmpl +85 -106
- crawlo/templates/project/settings_simple.py.tmpl +73 -83
- crawlo/templates/{project/run.py.tmpl → run.py.tmpl} +1 -3
- crawlo/utils/redis_connection_pool.py +19 -2
- {crawlo-1.2.4.dist-info → crawlo-1.2.6.dist-info}/METADATA +1 -1
- {crawlo-1.2.4.dist-info → crawlo-1.2.6.dist-info}/RECORD +21 -21
- {crawlo-1.2.4.dist-info → crawlo-1.2.6.dist-info}/WHEEL +0 -0
- {crawlo-1.2.4.dist-info → crawlo-1.2.6.dist-info}/entry_points.txt +0 -0
- {crawlo-1.2.4.dist-info → crawlo-1.2.6.dist-info}/top_level.txt +0 -0
|
@@ -1,35 +1,9 @@
|
|
|
1
1
|
# -*- coding: UTF-8 -*-
|
|
2
|
-
"""
|
|
3
|
-
{{project_name}} 项目配置文件
|
|
4
|
-
=============================
|
|
5
|
-
基于 Crawlo 框架的爬虫项目配置。
|
|
6
|
-
|
|
7
|
-
🎯 快速开始:
|
|
8
|
-
|
|
9
|
-
# 方式1:使用默认配置(推荐)
|
|
10
|
-
from crawlo.crawler import CrawlerProcess
|
|
11
|
-
process = CrawlerProcess() # 无需任何配置
|
|
12
|
-
|
|
13
|
-
# 方式2:使用配置工厂
|
|
14
|
-
from crawlo.config import CrawloConfig
|
|
15
|
-
config = CrawloConfig.standalone() # 单机模式
|
|
16
|
-
config = CrawloConfig.distributed(redis_host='192.168.1.100') # 分布式模式
|
|
17
|
-
process = CrawlerProcess(settings=config.to_dict())
|
|
18
|
-
|
|
19
|
-
# 方式3:使用环境变量
|
|
20
|
-
from crawlo.config import CrawloConfig
|
|
21
|
-
config = CrawloConfig.from_env() # 从环境变量读取
|
|
22
|
-
"""
|
|
23
2
|
import os
|
|
24
3
|
from crawlo.config import CrawloConfig
|
|
25
4
|
|
|
26
5
|
# ============================== 项目基本信息 ==============================
|
|
27
6
|
PROJECT_NAME = '{{project_name}}'
|
|
28
|
-
try:
|
|
29
|
-
from crawlo import __version__
|
|
30
|
-
VERSION = __version__
|
|
31
|
-
except ImportError:
|
|
32
|
-
VERSION = '1.0.0'
|
|
33
7
|
|
|
34
8
|
# ============================== 运行模式选择 ==============================
|
|
35
9
|
|
|
@@ -101,43 +75,6 @@ INTERVAL = 5
|
|
|
101
75
|
DEPTH_PRIORITY = 1
|
|
102
76
|
MAX_RUNNING_SPIDERS = 3
|
|
103
77
|
|
|
104
|
-
# ============================== 运行模式选择 ==============================
|
|
105
|
-
# 运行模式:'standalone'(单机), 'distributed'(分布式), 'auto'(自动检测)
|
|
106
|
-
#
|
|
107
|
-
# 三种运行模式的最佳使用场景:
|
|
108
|
-
#
|
|
109
|
-
# 1. standalone(单机模式):
|
|
110
|
-
# - 适用场景:开发调试、小规模数据采集、个人项目
|
|
111
|
-
# - 特点:简单易用,资源占用少,无需额外依赖
|
|
112
|
-
# - 配置建议:
|
|
113
|
-
# * QUEUE_TYPE = 'auto'(自动选择队列类型)
|
|
114
|
-
# * FILTER_CLASS = 'crawlo.filters.memory_filter.MemoryFilter'(内存过滤器)
|
|
115
|
-
# * DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.memory_dedup_pipeline.MemoryDedupPipeline'(内存去重)
|
|
116
|
-
# - 混合配置(推荐):
|
|
117
|
-
# * QUEUE_TYPE = 'auto'(自动选择)
|
|
118
|
-
# * FILTER_CLASS = 'crawlo.filters.aioredis_filter.AioRedisFilter'(Redis过滤器)
|
|
119
|
-
# * DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.redis_dedup_pipeline.RedisDedupPipeline'(Redis去重)
|
|
120
|
-
# * 优势:享受Redis去重的持久性,同时保持部署简单
|
|
121
|
-
#
|
|
122
|
-
# 2. distributed(分布式模式):
|
|
123
|
-
# - 适用场景:大规模数据采集、多节点协同工作、高并发需求
|
|
124
|
-
# - 特点:支持多节点扩展,高并发处理能力,需要Redis支持
|
|
125
|
-
# - 配置建议:
|
|
126
|
-
# * QUEUE_TYPE = 'redis'(Redis队列)
|
|
127
|
-
# * FILTER_CLASS = 'crawlo.filters.aioredis_filter.AioRedisFilter'(Redis过滤器)
|
|
128
|
-
# * DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.redis_dedup_pipeline.RedisDedupPipeline'(Redis去重)
|
|
129
|
-
# - 部署要求:需要配置Redis服务器连接参数
|
|
130
|
-
#
|
|
131
|
-
# 3. auto(自动检测模式):
|
|
132
|
-
# - 适用场景:希望根据环境自动选择最佳运行方式
|
|
133
|
-
# - 特点:智能检测环境配置,自动选择运行模式
|
|
134
|
-
# - 配置建议:
|
|
135
|
-
# * 框架会根据Redis可用性自动选择队列类型
|
|
136
|
-
# * 默认使用内存过滤器和去重管道
|
|
137
|
-
# - 适用情况:需要在不同环境中使用同一套配置
|
|
138
|
-
|
|
139
|
-
RUN_MODE = 'standalone' # 默认单机模式,简单易用
|
|
140
|
-
|
|
141
78
|
# ============================== 队列配置(支持分布式) ==============================
|
|
142
79
|
|
|
143
80
|
# 队列类型:'auto'(自动选择), 'memory'(内存队列), 'redis'(分布式队列)
|
|
@@ -183,8 +120,9 @@ MONGO_USE_BATCH = False # 是否启用批量插入
|
|
|
183
120
|
REQUEST_DIR = '.'
|
|
184
121
|
|
|
185
122
|
# 明确配置默认去重管道和过滤器,避免冗余的if-else判断
|
|
186
|
-
|
|
187
|
-
|
|
123
|
+
# 在单机模式下,如果Redis可用则使用Redis去重,否则使用内存去重
|
|
124
|
+
DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.redis_dedup_pipeline.RedisDedupPipeline'
|
|
125
|
+
FILTER_CLASS = 'crawlo.filters.aioredis_filter.AioRedisFilter'
|
|
188
126
|
|
|
189
127
|
# --- Redis 配置(用于分布式去重和队列) ---
|
|
190
128
|
REDIS_HOST = os.getenv('REDIS_HOST', '127.0.0.1')
|
|
@@ -5,20 +5,17 @@
|
|
|
5
5
|
基于 Crawlo 框架的分布式爬虫项目配置。
|
|
6
6
|
适合大规模数据采集和多节点部署。
|
|
7
7
|
"""
|
|
8
|
-
|
|
9
8
|
import os
|
|
10
9
|
from crawlo.config import CrawloConfig
|
|
11
10
|
|
|
12
11
|
# ============================== 项目基本信息 ==============================
|
|
13
12
|
PROJECT_NAME = '{{project_name}}'
|
|
14
|
-
try:
|
|
15
|
-
from crawlo import __version__
|
|
16
|
-
VERSION = __version__
|
|
17
|
-
except ImportError:
|
|
18
|
-
VERSION = '1.0.0'
|
|
19
13
|
|
|
20
14
|
# ============================== 分布式配置说明 ==============================
|
|
21
|
-
|
|
15
|
+
RUN_MODE = 'distributed'
|
|
16
|
+
QUEUE_TYPE = 'redis'
|
|
17
|
+
FILTER_CLASS = 'crawlo.filters.aioredis_filter.AioRedisFilter'
|
|
18
|
+
DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.redis_dedup_pipeline.RedisDedupPipeline'
|
|
22
19
|
# 本模板专为分布式部署设计,适用于以下场景:
|
|
23
20
|
# - 大规模数据采集任务
|
|
24
21
|
# - 需要多节点协同工作的项目
|
|
@@ -1,124 +1,91 @@
|
|
|
1
1
|
# -*- coding: UTF-8 -*-
|
|
2
2
|
"""
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
基于 Crawlo 框架的温和爬虫项目配置。
|
|
6
|
-
适合对目标网站友好的低负载爬取。
|
|
3
|
+
温和模式配置模板
|
|
4
|
+
低负载配置,对目标网站友好
|
|
7
5
|
"""
|
|
8
6
|
|
|
9
|
-
import os
|
|
10
|
-
from crawlo.config import CrawloConfig
|
|
11
|
-
|
|
12
7
|
# ============================== 项目基本信息 ==============================
|
|
13
8
|
PROJECT_NAME = '{{project_name}}'
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
#
|
|
22
|
-
#
|
|
23
|
-
#
|
|
24
|
-
# - 长时间运行的监控类爬虫
|
|
25
|
-
# - 对目标网站有友好性要求的项目
|
|
26
|
-
#
|
|
27
|
-
# 运行模式特点:
|
|
28
|
-
# - RUN_MODE = 'standalone'(单机模式)
|
|
29
|
-
# - QUEUE_TYPE = 'memory'(使用内存队列)
|
|
30
|
-
# - FILTER_CLASS = 'crawlo.filters.memory_filter.MemoryFilter'(内存过滤器)
|
|
31
|
-
# - DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.memory_dedup_pipeline.MemoryDedupPipeline'(内存去重)
|
|
32
|
-
#
|
|
33
|
-
# 配置特点:
|
|
34
|
-
# - 低并发:CONCURRENCY = 2
|
|
35
|
-
# - 高延迟:DOWNLOAD_DELAY = 3.0秒
|
|
36
|
-
# - 随机化:启用RANDOMNESS增加随机性
|
|
37
|
-
# - 连接池限制:减少连接数避免给服务器造成压力
|
|
38
|
-
#
|
|
39
|
-
# 扩展建议:
|
|
40
|
-
# - 如需跨会话去重,可将FILTER_CLASS和DEFAULT_DEDUP_PIPELINE改为Redis实现
|
|
41
|
-
# - 可根据目标网站特性调整DOWNLOAD_DELAY和RANDOM_RANGE参数
|
|
42
|
-
#
|
|
43
|
-
# 🎯 最佳使用方式:
|
|
44
|
-
# 推荐使用配置工厂方式创建温和模式配置:
|
|
45
|
-
# from crawlo.config import CrawloConfig
|
|
46
|
-
# config = CrawloConfig.presets().gentle()
|
|
47
|
-
# process = CrawlerProcess(settings=config.to_dict())
|
|
48
|
-
|
|
49
|
-
# ============================== 温和模式配置 ==============================
|
|
50
|
-
# 使用配置工厂创建温和模式配置
|
|
51
|
-
CONFIG = CrawloConfig.presets().gentle()
|
|
52
|
-
|
|
53
|
-
# 获取配置
|
|
54
|
-
locals().update(CONFIG.to_dict())
|
|
55
|
-
|
|
56
|
-
# ============================== 网络请求配置 ==============================
|
|
57
|
-
DOWNLOADER = "crawlo.downloader.httpx_downloader.HttpXDownloader"
|
|
58
|
-
DOWNLOAD_TIMEOUT = 60
|
|
59
|
-
VERIFY_SSL = True
|
|
60
|
-
|
|
61
|
-
# ============================== 低并发配置 ==============================
|
|
62
|
-
CONCURRENCY = 2
|
|
63
|
-
MAX_RUNNING_SPIDERS = 1
|
|
64
|
-
DOWNLOAD_DELAY = 3.0
|
|
65
|
-
RANDOMNESS = True
|
|
66
|
-
RANDOM_RANGE = (2.0, 5.0)
|
|
67
|
-
|
|
68
|
-
# ============================== 连接池配置 ==============================
|
|
69
|
-
CONNECTION_POOL_LIMIT = 10
|
|
70
|
-
CONNECTION_POOL_LIMIT_PER_HOST = 5
|
|
71
|
-
|
|
72
|
-
# ============================== 重试配置 ==============================
|
|
73
|
-
MAX_RETRY_TIMES = 3
|
|
74
|
-
RETRY_HTTP_CODES = [408, 429, 500, 502, 503, 504, 522, 524]
|
|
75
|
-
IGNORE_HTTP_CODES = [403, 404]
|
|
9
|
+
|
|
10
|
+
# ============================== 温和运行模式 ==============================
|
|
11
|
+
# 运行模式:'standalone'(单机), 'distributed'(分布式), 'auto'(自动检测)
|
|
12
|
+
RUN_MODE = 'standalone' # 单机模式 - 适用于开发和小规模数据采集
|
|
13
|
+
|
|
14
|
+
# 并发配置
|
|
15
|
+
CONCURRENCY = 2 # 极低并发数以减少目标网站压力
|
|
16
|
+
DOWNLOAD_DELAY = 3.0 # 增加延迟以降低目标网站压力
|
|
17
|
+
RANDOMNESS = True # 启用随机延迟
|
|
18
|
+
RANDOM_RANGE = (0.5, 2.0) # 随机延迟范围
|
|
76
19
|
|
|
77
20
|
# ============================== 队列配置 ==============================
|
|
78
|
-
|
|
21
|
+
|
|
22
|
+
# 队列类型:'auto'(自动选择), 'memory'(内存队列), 'redis'(分布式队列)
|
|
23
|
+
QUEUE_TYPE = 'auto' # 自动检测,如果Redis可用则使用Redis队列
|
|
24
|
+
SCHEDULER_MAX_QUEUE_SIZE = 500
|
|
25
|
+
SCHEDULER_QUEUE_NAME = f'crawlo:{{project_name}}:queue:requests'
|
|
79
26
|
QUEUE_MAX_RETRIES = 3
|
|
80
27
|
QUEUE_TIMEOUT = 300
|
|
81
28
|
|
|
82
|
-
# ==============================
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
MYSQL_PORT = int(os.getenv('MYSQL_PORT', 3306))
|
|
86
|
-
MYSQL_USER = os.getenv('MYSQL_USER', 'root')
|
|
87
|
-
MYSQL_PASSWORD = os.getenv('MYSQL_PASSWORD', '123456')
|
|
88
|
-
MYSQL_DB = os.getenv('MYSQL_DB', '{{project_name}}')
|
|
89
|
-
MYSQL_TABLE = '{{project_name}}_data'
|
|
90
|
-
|
|
91
|
-
# MongoDB 配置
|
|
92
|
-
MONGO_URI = os.getenv('MONGO_URI', 'mongodb://localhost:27017')
|
|
93
|
-
MONGO_DATABASE = '{{project_name}}_db'
|
|
94
|
-
MONGO_COLLECTION = '{{project_name}}_items'
|
|
95
|
-
|
|
96
|
-
# ============================== 去重配置 ==============================
|
|
97
|
-
# 明确指定温和模式下使用内存去重管道
|
|
29
|
+
# ============================== 去重过滤配置 ==============================
|
|
30
|
+
|
|
31
|
+
# 温和模式下使用内存去重管道和过滤器
|
|
98
32
|
DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.memory_dedup_pipeline.MemoryDedupPipeline'
|
|
99
33
|
FILTER_CLASS = 'crawlo.filters.memory_filter.MemoryFilter'
|
|
34
|
+
|
|
35
|
+
# --- Redis 配置(用于分布式去重和队列) ---
|
|
36
|
+
REDIS_HOST = '127.0.0.1'
|
|
37
|
+
REDIS_PORT = 6379
|
|
38
|
+
REDIS_PASSWORD = '' # 如果有密码,请填写
|
|
39
|
+
|
|
40
|
+
# 根据是否有密码生成 URL
|
|
41
|
+
if REDIS_PASSWORD:
|
|
42
|
+
REDIS_URL = f'redis://:{REDIS_PASSWORD}@{REDIS_HOST}:{REDIS_PORT}/0'
|
|
43
|
+
else:
|
|
44
|
+
REDIS_URL = f'redis://{REDIS_HOST}:{REDIS_PORT}/0'
|
|
45
|
+
|
|
46
|
+
# Redis key配置已移至各组件中,使用统一的命名规范
|
|
47
|
+
# crawlo:{project_name}:filter:fingerprint (请求去重)
|
|
48
|
+
# crawlo:{project_name}:item:fingerprint (数据项去重)
|
|
49
|
+
# crawlo:{project_name}:queue:requests (请求队列)
|
|
50
|
+
# crawlo:{project_name}:queue:processing (处理中队列)
|
|
51
|
+
# crawlo:{project_name}:queue:failed (失败队列)
|
|
52
|
+
|
|
100
53
|
REDIS_TTL = 0
|
|
101
54
|
CLEANUP_FP = 0
|
|
102
55
|
FILTER_DEBUG = True
|
|
56
|
+
DECODE_RESPONSES = True
|
|
57
|
+
|
|
58
|
+
# ============================== 中间件配置 ==============================
|
|
103
59
|
|
|
104
|
-
# ============================== 中间件与管道 ==============================
|
|
105
60
|
MIDDLEWARES = [
|
|
61
|
+
# === 请求预处理阶段 ===
|
|
106
62
|
'crawlo.middleware.request_ignore.RequestIgnoreMiddleware',
|
|
107
63
|
'crawlo.middleware.download_delay.DownloadDelayMiddleware',
|
|
108
64
|
'crawlo.middleware.default_header.DefaultHeaderMiddleware',
|
|
65
|
+
'crawlo.middleware.proxy.ProxyMiddleware',
|
|
66
|
+
'crawlo.middleware.offsite.OffsiteMiddleware',
|
|
67
|
+
|
|
68
|
+
# === 响应处理阶段 ===
|
|
109
69
|
'crawlo.middleware.retry.RetryMiddleware',
|
|
110
70
|
'crawlo.middleware.response_code.ResponseCodeMiddleware',
|
|
71
|
+
'crawlo.middleware.response_filter.ResponseFilterMiddleware',
|
|
111
72
|
]
|
|
112
73
|
|
|
74
|
+
# ============================== 数据管道配置 ==============================
|
|
75
|
+
|
|
76
|
+
# 数据处理管道(启用的存储方式)
|
|
113
77
|
PIPELINES = [
|
|
114
78
|
'crawlo.pipelines.console_pipeline.ConsolePipeline',
|
|
115
|
-
# '{{project_name}}.pipelines.DatabasePipeline',
|
|
79
|
+
# '{{project_name}}.pipelines.DatabasePipeline', # 自定义数据库管道
|
|
80
|
+
# 'crawlo.pipelines.mysql_pipeline.AsyncmyMySQLPipeline', # MySQL 存储
|
|
81
|
+
# 'crawlo.pipelines.mongo_pipeline.MongoPipeline', # MongoDB 存储
|
|
116
82
|
]
|
|
117
83
|
|
|
118
|
-
#
|
|
84
|
+
# 明确添加默认去重管道到管道列表开头
|
|
119
85
|
PIPELINES.insert(0, DEFAULT_DEDUP_PIPELINE)
|
|
120
86
|
|
|
121
87
|
# ============================== 扩展组件 ==============================
|
|
88
|
+
|
|
122
89
|
EXTENSIONS = [
|
|
123
90
|
'crawlo.extension.log_interval.LogIntervalExtension',
|
|
124
91
|
'crawlo.extension.log_stats.LogStats',
|
|
@@ -126,9 +93,9 @@ EXTENSIONS = [
|
|
|
126
93
|
]
|
|
127
94
|
|
|
128
95
|
# ============================== 日志配置 ==============================
|
|
96
|
+
|
|
129
97
|
LOG_LEVEL = 'INFO'
|
|
130
|
-
LOG_FILE = f'logs/{{project_name}}.log'
|
|
131
98
|
STATS_DUMP = True
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
99
|
+
LOG_FILE = f'logs/{{project_name}}.log'
|
|
100
|
+
LOG_FORMAT = '%(asctime)s - [%(name)s] - %(levelname)s: %(message)s'
|
|
101
|
+
LOG_ENCODING = 'utf-8'
|
|
@@ -1,156 +1,135 @@
|
|
|
1
1
|
# -*- coding: UTF-8 -*-
|
|
2
2
|
"""
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
基于 Crawlo 框架的高性能爬虫项目配置。
|
|
6
|
-
针对大规模、高并发场景优化。
|
|
3
|
+
高性能模式配置模板
|
|
4
|
+
针对大规模高并发优化
|
|
7
5
|
"""
|
|
8
6
|
|
|
9
|
-
import os
|
|
10
|
-
from crawlo.config import CrawloConfig
|
|
11
|
-
|
|
12
7
|
# ============================== 项目基本信息 ==============================
|
|
13
8
|
PROJECT_NAME = '{{project_name}}'
|
|
14
|
-
try:
|
|
15
|
-
from crawlo import __version__
|
|
16
|
-
VERSION = __version__
|
|
17
|
-
except ImportError:
|
|
18
|
-
VERSION = '1.0.0'
|
|
19
|
-
|
|
20
|
-
# ============================== 高性能配置 ==============================
|
|
21
|
-
# 使用配置工厂创建高性能配置
|
|
22
|
-
CONFIG = CrawloConfig.presets().large_scale(
|
|
23
|
-
redis_host=os.getenv('REDIS_HOST', '127.0.0.1'),
|
|
24
|
-
project_name='{{project_name}}'
|
|
25
|
-
)
|
|
26
|
-
|
|
27
|
-
# 获取配置
|
|
28
|
-
locals().update(CONFIG.to_dict())
|
|
29
|
-
|
|
30
|
-
# ============================== 网络请求配置 ==============================
|
|
31
|
-
DOWNLOADER = "crawlo.downloader.cffi_downloader.CurlCffiDownloader"
|
|
32
|
-
DOWNLOAD_TIMEOUT = 30
|
|
33
|
-
VERIFY_SSL = True
|
|
34
|
-
USE_SESSION = True
|
|
35
|
-
|
|
36
|
-
# ============================== 高并发配置 ==============================
|
|
37
|
-
CONCURRENCY = 32
|
|
38
|
-
MAX_RUNNING_SPIDERS = 10
|
|
39
|
-
DOWNLOAD_DELAY = 0.5
|
|
40
|
-
RANDOMNESS = True
|
|
41
|
-
RANDOM_RANGE = (0.8, 1.2)
|
|
42
|
-
|
|
43
|
-
# ============================== 连接池配置 ==============================
|
|
44
|
-
CONNECTION_POOL_LIMIT = 100
|
|
45
|
-
CONNECTION_POOL_LIMIT_PER_HOST = 50
|
|
46
9
|
|
|
47
|
-
# ==============================
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
10
|
+
# ============================== 高性能运行模式 ==============================
|
|
11
|
+
# 运行模式:'standalone'(单机), 'distributed'(分布式), 'auto'(自动检测)
|
|
12
|
+
RUN_MODE = 'standalone' # 单机模式 - 适用于开发和小规模数据采集
|
|
13
|
+
|
|
14
|
+
# 并发配置
|
|
15
|
+
CONCURRENCY = 32 # 高并发数以充分利用系统资源
|
|
16
|
+
DOWNLOAD_DELAY = 0.1 # 极小延迟以提高吞吐量
|
|
17
|
+
RANDOMNESS = False # 禁用随机延迟以保证性能
|
|
51
18
|
|
|
52
19
|
# ============================== 队列配置 ==============================
|
|
53
|
-
|
|
20
|
+
|
|
21
|
+
# 队列类型:'auto'(自动选择), 'memory'(内存队列), 'redis'(分布式队列)
|
|
22
|
+
QUEUE_TYPE = 'auto' # 自动检测,如果Redis可用则使用Redis队列
|
|
23
|
+
SCHEDULER_MAX_QUEUE_SIZE = 5000
|
|
54
24
|
SCHEDULER_QUEUE_NAME = f'crawlo:{{project_name}}:queue:requests'
|
|
55
|
-
QUEUE_MAX_RETRIES =
|
|
25
|
+
QUEUE_MAX_RETRIES = 3
|
|
56
26
|
QUEUE_TIMEOUT = 300
|
|
57
|
-
LARGE_SCALE_BATCH_SIZE = 2000
|
|
58
|
-
LARGE_SCALE_CHECKPOINT_INTERVAL = 5000
|
|
59
27
|
|
|
60
|
-
# ==============================
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
28
|
+
# ============================== 去重过滤配置 ==============================
|
|
29
|
+
|
|
30
|
+
# 高性能模式下,如果Redis可用则使用Redis去重,否则使用内存去重
|
|
31
|
+
DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.redis_dedup_pipeline.RedisDedupPipeline'
|
|
32
|
+
FILTER_CLASS = 'crawlo.filters.aioredis_filter.AioRedisFilter'
|
|
33
|
+
|
|
34
|
+
# --- Redis 配置(用于分布式去重和队列) ---
|
|
35
|
+
REDIS_HOST = '127.0.0.1'
|
|
36
|
+
REDIS_PORT = 6379
|
|
37
|
+
REDIS_PASSWORD = '' # 如果有密码,请填写
|
|
65
38
|
|
|
66
39
|
# 根据是否有密码生成 URL
|
|
67
40
|
if REDIS_PASSWORD:
|
|
68
|
-
REDIS_URL = f'redis://:{REDIS_PASSWORD}@{REDIS_HOST}:{REDIS_PORT}/
|
|
41
|
+
REDIS_URL = f'redis://:{REDIS_PASSWORD}@{REDIS_HOST}:{REDIS_PORT}/0'
|
|
69
42
|
else:
|
|
70
|
-
REDIS_URL = f'redis://{REDIS_HOST}:{REDIS_PORT}/
|
|
71
|
-
|
|
72
|
-
#
|
|
73
|
-
#
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
MYSQL_TABLE = '{{project_name}}_data'
|
|
80
|
-
MYSQL_BATCH_SIZE = 200
|
|
81
|
-
MYSQL_USE_BATCH = True
|
|
82
|
-
MYSQL_POOL_MIN = 10
|
|
83
|
-
MYSQL_POOL_MAX = 50
|
|
84
|
-
|
|
85
|
-
# MongoDB 配置
|
|
86
|
-
MONGO_URI = os.getenv('MONGO_URI', 'mongodb://localhost:27017')
|
|
87
|
-
MONGO_DATABASE = '{{project_name}}_db'
|
|
88
|
-
MONGO_COLLECTION = '{{project_name}}_items'
|
|
89
|
-
MONGO_BATCH_SIZE = 200
|
|
90
|
-
MONGO_USE_BATCH = True
|
|
91
|
-
MONGO_MAX_POOL_SIZE = 300
|
|
92
|
-
MONGO_MIN_POOL_SIZE = 50
|
|
93
|
-
|
|
94
|
-
# ============================== 去重配置 ==============================
|
|
43
|
+
REDIS_URL = f'redis://{REDIS_HOST}:{REDIS_PORT}/0'
|
|
44
|
+
|
|
45
|
+
# Redis key配置已移至各组件中,使用统一的命名规范
|
|
46
|
+
# crawlo:{project_name}:filter:fingerprint (请求去重)
|
|
47
|
+
# crawlo:{project_name}:item:fingerprint (数据项去重)
|
|
48
|
+
# crawlo:{project_name}:queue:requests (请求队列)
|
|
49
|
+
# crawlo:{project_name}:queue:processing (处理中队列)
|
|
50
|
+
# crawlo:{project_name}:queue:failed (失败队列)
|
|
51
|
+
|
|
95
52
|
REDIS_TTL = 0
|
|
96
53
|
CLEANUP_FP = 0
|
|
97
|
-
FILTER_DEBUG =
|
|
54
|
+
FILTER_DEBUG = True
|
|
55
|
+
DECODE_RESPONSES = True
|
|
56
|
+
|
|
57
|
+
# ============================== 中间件配置 ==============================
|
|
98
58
|
|
|
99
|
-
# ============================== 中间件与管道 ==============================
|
|
100
59
|
MIDDLEWARES = [
|
|
60
|
+
# === 请求预处理阶段 ===
|
|
101
61
|
'crawlo.middleware.request_ignore.RequestIgnoreMiddleware',
|
|
102
62
|
'crawlo.middleware.download_delay.DownloadDelayMiddleware',
|
|
103
63
|
'crawlo.middleware.default_header.DefaultHeaderMiddleware',
|
|
104
64
|
'crawlo.middleware.proxy.ProxyMiddleware',
|
|
65
|
+
'crawlo.middleware.offsite.OffsiteMiddleware',
|
|
66
|
+
|
|
67
|
+
# === 响应处理阶段 ===
|
|
105
68
|
'crawlo.middleware.retry.RetryMiddleware',
|
|
106
69
|
'crawlo.middleware.response_code.ResponseCodeMiddleware',
|
|
107
70
|
'crawlo.middleware.response_filter.ResponseFilterMiddleware',
|
|
108
71
|
]
|
|
109
72
|
|
|
73
|
+
# ============================== 数据管道配置 ==============================
|
|
74
|
+
|
|
75
|
+
# 数据处理管道(启用的存储方式)
|
|
110
76
|
PIPELINES = [
|
|
111
77
|
'crawlo.pipelines.console_pipeline.ConsolePipeline',
|
|
112
|
-
# '{{project_name}}.pipelines.DatabasePipeline',
|
|
113
|
-
# 'crawlo.pipelines.mysql_pipeline.AsyncmyMySQLPipeline',
|
|
114
|
-
# 'crawlo.pipelines.mongo_pipeline.MongoPipeline',
|
|
78
|
+
# '{{project_name}}.pipelines.DatabasePipeline', # 自定义数据库管道
|
|
79
|
+
# 'crawlo.pipelines.mysql_pipeline.AsyncmyMySQLPipeline', # MySQL 存储
|
|
80
|
+
# 'crawlo.pipelines.mongo_pipeline.MongoPipeline', # MongoDB 存储
|
|
115
81
|
]
|
|
116
82
|
|
|
83
|
+
# 明确添加默认去重管道到管道列表开头
|
|
84
|
+
PIPELINES.insert(0, DEFAULT_DEDUP_PIPELINE)
|
|
85
|
+
|
|
117
86
|
# ============================== 扩展组件 ==============================
|
|
87
|
+
|
|
118
88
|
EXTENSIONS = [
|
|
119
89
|
'crawlo.extension.log_interval.LogIntervalExtension',
|
|
120
90
|
'crawlo.extension.log_stats.LogStats',
|
|
121
91
|
'crawlo.extension.logging_extension.CustomLoggerExtension',
|
|
122
|
-
# 'crawlo.extension.memory_monitor.MemoryMonitorExtension',
|
|
123
|
-
# 'crawlo.extension.request_recorder.RequestRecorderExtension',
|
|
124
|
-
# 'crawlo.extension.performance_profiler.PerformanceProfilerExtension',
|
|
92
|
+
# 'crawlo.extension.memory_monitor.MemoryMonitorExtension', # 内存监控
|
|
93
|
+
# 'crawlo.extension.request_recorder.RequestRecorderExtension', # 请求记录
|
|
94
|
+
# 'crawlo.extension.performance_profiler.PerformanceProfilerExtension', # 性能分析
|
|
95
|
+
# 'crawlo.extension.health_check.HealthCheckExtension', # 健康检查
|
|
125
96
|
]
|
|
126
97
|
|
|
127
98
|
# ============================== 日志配置 ==============================
|
|
99
|
+
|
|
128
100
|
LOG_LEVEL = 'INFO'
|
|
129
|
-
LOG_FILE = f'logs/{{project_name}}.log'
|
|
130
101
|
STATS_DUMP = True
|
|
102
|
+
LOG_FILE = f'logs/{{project_name}}.log'
|
|
103
|
+
LOG_FORMAT = '%(asctime)s - [%(name)s] - %(levelname)s: %(message)s'
|
|
104
|
+
LOG_ENCODING = 'utf-8'
|
|
105
|
+
|
|
106
|
+
# ============================== 性能优化配置 ==============================
|
|
131
107
|
|
|
132
|
-
#
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
}
|
|
147
|
-
|
|
148
|
-
# ============================== 下载器优化配置 ==============================
|
|
108
|
+
# 连接池配置
|
|
109
|
+
CONNECTION_POOL_LIMIT = 100
|
|
110
|
+
DOWNLOAD_MAXSIZE = 10 * 1024 * 1024 # 10MB
|
|
111
|
+
DOWNLOAD_WARN_SIZE = 1024 * 1024 # 1MB
|
|
112
|
+
|
|
113
|
+
# 下载器优化配置
|
|
114
|
+
DOWNLOADER_HEALTH_CHECK = True
|
|
115
|
+
HEALTH_CHECK_INTERVAL = 30
|
|
116
|
+
|
|
117
|
+
# 请求统计配置
|
|
118
|
+
REQUEST_STATS_ENABLED = True
|
|
119
|
+
STATS_RESET_ON_START = False
|
|
120
|
+
|
|
121
|
+
# HttpX 下载器专用配置
|
|
149
122
|
HTTPX_HTTP2 = True
|
|
150
123
|
HTTPX_FOLLOW_REDIRECTS = True
|
|
124
|
+
|
|
125
|
+
# AioHttp 下载器专用配置
|
|
151
126
|
AIOHTTP_AUTO_DECOMPRESS = True
|
|
127
|
+
AIOHTTP_FORCE_CLOSE = False
|
|
128
|
+
|
|
129
|
+
# 通用优化配置
|
|
152
130
|
CONNECTION_TTL_DNS_CACHE = 300
|
|
153
131
|
CONNECTION_KEEPALIVE_TIMEOUT = 15
|
|
154
132
|
|
|
155
|
-
#
|
|
156
|
-
|
|
133
|
+
# 性能监控
|
|
134
|
+
ENABLE_PERFORMANCE_MONITORING = True
|
|
135
|
+
MEMORY_USAGE_WARNING_THRESHOLD = 800 # MB
|