crawlo 1.1.2__py3-none-any.whl → 1.1.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__version__.py +1 -1
- crawlo/core/scheduler.py +20 -16
- crawlo/downloader/httpx_downloader.py +14 -12
- crawlo/exceptions.py +4 -0
- crawlo/extension/__init__.py +17 -10
- crawlo/extension/health_check.py +142 -0
- crawlo/extension/log_interval.py +27 -18
- crawlo/extension/log_stats.py +62 -24
- crawlo/extension/logging_extension.py +18 -9
- crawlo/extension/memory_monitor.py +89 -0
- crawlo/extension/performance_profiler.py +118 -0
- crawlo/extension/request_recorder.py +108 -0
- crawlo/filters/aioredis_filter.py +2 -2
- crawlo/middleware/retry.py +3 -3
- crawlo/network/request.py +2 -2
- crawlo/network/response.py +25 -23
- crawlo/pipelines/__init__.py +9 -0
- crawlo/pipelines/bloom_dedup_pipeline.py +157 -0
- crawlo/pipelines/database_dedup_pipeline.py +225 -0
- crawlo/pipelines/memory_dedup_pipeline.py +116 -0
- crawlo/pipelines/mongo_pipeline.py +81 -66
- crawlo/pipelines/mysql_pipeline.py +165 -43
- crawlo/pipelines/redis_dedup_pipeline.py +163 -0
- crawlo/queue/queue_manager.py +4 -0
- crawlo/queue/redis_priority_queue.py +20 -3
- crawlo/settings/default_settings.py +119 -66
- crawlo/subscriber.py +62 -37
- crawlo/templates/project/items.py.tmpl +1 -1
- crawlo/templates/project/middlewares.py.tmpl +73 -49
- crawlo/templates/project/pipelines.py.tmpl +52 -290
- crawlo/templates/project/run.py.tmpl +20 -7
- crawlo/templates/project/settings.py.tmpl +35 -3
- crawlo/templates/spider/spider.py.tmpl +1 -37
- crawlo/utils/controlled_spider_mixin.py +109 -5
- crawlo-1.1.4.dist-info/METADATA +403 -0
- {crawlo-1.1.2.dist-info → crawlo-1.1.4.dist-info}/RECORD +40 -31
- examples/controlled_spider_example.py +205 -0
- crawlo-1.1.2.dist-info/METADATA +0 -567
- {crawlo-1.1.2.dist-info → crawlo-1.1.4.dist-info}/WHEEL +0 -0
- {crawlo-1.1.2.dist-info → crawlo-1.1.4.dist-info}/entry_points.txt +0 -0
- {crawlo-1.1.2.dist-info → crawlo-1.1.4.dist-info}/top_level.txt +0 -0
|
@@ -29,54 +29,54 @@ DOWNLOADER = "crawlo.downloader.aiohttp_downloader.AioHttpDownloader"
|
|
|
29
29
|
# 可以在Spider类中设置 custom_settings = {'DOWNLOADER_TYPE': 'httpx'}
|
|
30
30
|
|
|
31
31
|
# 请求超时与安全
|
|
32
|
-
DOWNLOAD_TIMEOUT = 30
|
|
33
|
-
VERIFY_SSL = True
|
|
34
|
-
USE_SESSION = True
|
|
32
|
+
DOWNLOAD_TIMEOUT = 30 # 下载超时时间(秒)
|
|
33
|
+
VERIFY_SSL = True # 是否验证 SSL 证书
|
|
34
|
+
USE_SESSION = True # 是否使用持久化会话(aiohttp 特有)
|
|
35
35
|
|
|
36
36
|
# 请求延迟控制
|
|
37
|
-
DOWNLOAD_DELAY = 1.0
|
|
38
|
-
RANDOM_RANGE = (0.8, 1.2)
|
|
39
|
-
RANDOMNESS = True
|
|
37
|
+
DOWNLOAD_DELAY = 1.0 # 基础延迟(秒)
|
|
38
|
+
RANDOM_RANGE = (0.8, 1.2) # 随机延迟系数范围
|
|
39
|
+
RANDOMNESS = True # 是否启用随机延迟
|
|
40
40
|
|
|
41
41
|
# 重试策略
|
|
42
|
-
MAX_RETRY_TIMES = 3
|
|
43
|
-
RETRY_PRIORITY = -1
|
|
42
|
+
MAX_RETRY_TIMES = 3 # 最大重试次数
|
|
43
|
+
RETRY_PRIORITY = -1 # 重试请求的优先级调整
|
|
44
44
|
RETRY_HTTP_CODES = [408, 429, 500, 502, 503, 504, 522, 524] # 触发重试的状态码
|
|
45
|
-
IGNORE_HTTP_CODES = [403, 404]
|
|
46
|
-
ALLOWED_CODES = []
|
|
45
|
+
IGNORE_HTTP_CODES = [403, 404] # 直接标记成功、不重试的状态码
|
|
46
|
+
ALLOWED_CODES = [] # 允许的状态码(空表示不限制)
|
|
47
47
|
|
|
48
48
|
# 连接与响应大小限制
|
|
49
|
-
CONNECTION_POOL_LIMIT = 50
|
|
49
|
+
CONNECTION_POOL_LIMIT = 50 # 最大并发连接数(连接池大小)
|
|
50
50
|
CONNECTION_POOL_LIMIT_PER_HOST = 20 # 每个主机的连接池大小
|
|
51
|
-
DOWNLOAD_MAXSIZE = 10 * 1024 * 1024
|
|
52
|
-
DOWNLOAD_WARN_SIZE = 1024 * 1024
|
|
51
|
+
DOWNLOAD_MAXSIZE = 10 * 1024 * 1024 # 最大响应体大小(10MB)
|
|
52
|
+
DOWNLOAD_WARN_SIZE = 1024 * 1024 # 响应体警告阈值(1MB)
|
|
53
53
|
DOWNLOAD_RETRY_TIMES = MAX_RETRY_TIMES # 下载器内部重试次数(复用全局)
|
|
54
54
|
|
|
55
55
|
# 下载统计配置
|
|
56
|
-
DOWNLOADER_STATS = True
|
|
57
|
-
DOWNLOAD_STATS = True
|
|
56
|
+
DOWNLOADER_STATS = True # 是否启用下载器统计功能
|
|
57
|
+
DOWNLOAD_STATS = True # 是否记录下载时间和大小统计
|
|
58
58
|
|
|
59
59
|
# ============================== 并发与调度 ==============================
|
|
60
60
|
|
|
61
|
-
CONCURRENCY = 8
|
|
62
|
-
INTERVAL = 5
|
|
63
|
-
DEPTH_PRIORITY = 1
|
|
64
|
-
MAX_RUNNING_SPIDERS = 3
|
|
61
|
+
CONCURRENCY = 8 # 单个爬虫的并发请求数
|
|
62
|
+
INTERVAL = 5 # 日志统计输出间隔(秒)
|
|
63
|
+
DEPTH_PRIORITY = 1 # 深度优先策略优先级
|
|
64
|
+
MAX_RUNNING_SPIDERS = 3 # 最大同时运行的爬虫数
|
|
65
65
|
|
|
66
66
|
# ============================== 队列配置 ==============================
|
|
67
67
|
|
|
68
68
|
# 🎯 运行模式选择:'standalone'(单机), 'distributed'(分布式), 'auto'(自动检测)
|
|
69
|
-
RUN_MODE = 'standalone'
|
|
69
|
+
RUN_MODE = 'standalone' # 默认单机模式,简单易用
|
|
70
70
|
|
|
71
71
|
# 队列类型选择:'memory'(内存), 'redis'(分布式), 'auto'(自动选择)
|
|
72
|
-
QUEUE_TYPE = 'memory'
|
|
72
|
+
QUEUE_TYPE = 'memory' # 默认内存队列,无需外部依赖
|
|
73
73
|
SCHEDULER_MAX_QUEUE_SIZE = 2000 # 调度器队列最大容量
|
|
74
74
|
SCHEDULER_QUEUE_NAME = 'crawlo:requests' # Redis 队列名称
|
|
75
|
-
QUEUE_MAX_RETRIES = 3
|
|
76
|
-
QUEUE_TIMEOUT = 300
|
|
75
|
+
QUEUE_MAX_RETRIES = 3 # 队列操作最大重试次数
|
|
76
|
+
QUEUE_TIMEOUT = 300 # 队列操作超时时间(秒)
|
|
77
77
|
|
|
78
78
|
# 大规模爬取优化配置
|
|
79
|
-
LARGE_SCALE_BATCH_SIZE = 1000
|
|
79
|
+
LARGE_SCALE_BATCH_SIZE = 1000 # 批处理大小
|
|
80
80
|
LARGE_SCALE_CHECKPOINT_INTERVAL = 5000 # 进度保存间隔
|
|
81
81
|
LARGE_SCALE_MAX_MEMORY_USAGE = 500 # 最大内存使用量(MB)
|
|
82
82
|
|
|
@@ -89,13 +89,14 @@ MYSQL_USER = 'root'
|
|
|
89
89
|
MYSQL_PASSWORD = '123456'
|
|
90
90
|
MYSQL_DB = 'crawl'
|
|
91
91
|
MYSQL_TABLE = 'crawlo'
|
|
92
|
-
MYSQL_BATCH_SIZE = 100
|
|
92
|
+
MYSQL_BATCH_SIZE = 100 # 批量插入条数
|
|
93
|
+
MYSQL_USE_BATCH = False # 是否启用批量插入
|
|
93
94
|
|
|
94
95
|
# MySQL 连接池
|
|
95
|
-
MYSQL_FLUSH_INTERVAL = 5
|
|
96
|
+
MYSQL_FLUSH_INTERVAL = 5 # 缓存刷新间隔(秒)
|
|
96
97
|
MYSQL_POOL_MIN = 5
|
|
97
98
|
MYSQL_POOL_MAX = 20
|
|
98
|
-
MYSQL_ECHO = False
|
|
99
|
+
MYSQL_ECHO = False # 是否打印 SQL 日志
|
|
99
100
|
|
|
100
101
|
# --- MongoDB 配置 ---
|
|
101
102
|
MONGO_URI = 'mongodb://user:password@host:27017'
|
|
@@ -103,12 +104,24 @@ MONGO_DATABASE = 'scrapy_data'
|
|
|
103
104
|
MONGO_COLLECTION = 'crawled_items'
|
|
104
105
|
MONGO_MAX_POOL_SIZE = 200
|
|
105
106
|
MONGO_MIN_POOL_SIZE = 20
|
|
107
|
+
MONGO_BATCH_SIZE = 100 # 批量插入条数
|
|
108
|
+
MONGO_USE_BATCH = False # 是否启用批量插入
|
|
106
109
|
|
|
107
110
|
# ============================== 去重过滤配置 ==============================
|
|
108
111
|
|
|
109
112
|
# 请求指纹存储目录(文件过滤器使用)
|
|
110
113
|
REQUEST_DIR = '.'
|
|
111
114
|
|
|
115
|
+
# 根据运行模式自动选择去重管道
|
|
116
|
+
# 单机模式默认使用内存去重管道
|
|
117
|
+
# 分布式模式默认使用Redis去重管道
|
|
118
|
+
if RUN_MODE == 'distributed':
|
|
119
|
+
# 分布式模式下默认使用Redis去重管道
|
|
120
|
+
DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.RedisDedupPipeline'
|
|
121
|
+
else:
|
|
122
|
+
# 单机模式下默认使用内存去重管道
|
|
123
|
+
DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.MemoryDedupPipeline'
|
|
124
|
+
|
|
112
125
|
# 去重过滤器类(二选一)
|
|
113
126
|
FILTER_CLASS = 'crawlo.filters.memory_filter.MemoryFilter'
|
|
114
127
|
# FILTER_CLASS = 'crawlo.filters.aioredis_filter.AioRedisFilter' # 分布式去重
|
|
@@ -117,76 +130,116 @@ FILTER_CLASS = 'crawlo.filters.memory_filter.MemoryFilter'
|
|
|
117
130
|
REDIS_HOST = os.getenv('REDIS_HOST', '127.0.0.1')
|
|
118
131
|
REDIS_PORT = int(os.getenv('REDIS_PORT', 6379))
|
|
119
132
|
REDIS_PASSWORD = os.getenv('REDIS_PASSWORD', '') # 默认无密码
|
|
133
|
+
REDIS_DB = int(os.getenv('REDIS_DB', 0)) # Redis 数据库编号,默认为 0
|
|
120
134
|
# 🔧 根据是否有密码生成不同的 URL 格式
|
|
121
135
|
if REDIS_PASSWORD:
|
|
122
|
-
REDIS_URL = f'redis://:{REDIS_PASSWORD}@{REDIS_HOST}:{REDIS_PORT}/
|
|
136
|
+
REDIS_URL = f'redis://:{REDIS_PASSWORD}@{REDIS_HOST}:{REDIS_PORT}/{REDIS_DB}'
|
|
123
137
|
else:
|
|
124
|
-
REDIS_URL = f'redis://{REDIS_HOST}:{REDIS_PORT}/
|
|
125
|
-
REDIS_KEY = 'request_fingerprint'
|
|
126
|
-
REDIS_TTL = 0
|
|
127
|
-
CLEANUP_FP = 0
|
|
128
|
-
FILTER_DEBUG = True
|
|
129
|
-
DECODE_RESPONSES = True
|
|
138
|
+
REDIS_URL = f'redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_DB}'
|
|
139
|
+
REDIS_KEY = 'request_fingerprint' # Redis 中存储指纹的键名
|
|
140
|
+
REDIS_TTL = 0 # 指纹过期时间(0 表示永不过期)
|
|
141
|
+
CLEANUP_FP = 0 # 程序结束时是否清理指纹(0=不清理)
|
|
142
|
+
FILTER_DEBUG = True # 是否开启去重调试日志
|
|
143
|
+
DECODE_RESPONSES = True # Redis 返回是否解码为字符串
|
|
130
144
|
|
|
131
145
|
# ============================== 中间件配置 ==============================
|
|
132
146
|
|
|
133
147
|
MIDDLEWARES = [
|
|
134
148
|
# === 请求预处理阶段 ===
|
|
135
|
-
'crawlo.middleware.request_ignore.RequestIgnoreMiddleware',
|
|
136
|
-
'crawlo.middleware.download_delay.DownloadDelayMiddleware',
|
|
137
|
-
'crawlo.middleware.default_header.DefaultHeaderMiddleware',
|
|
138
|
-
'crawlo.middleware.proxy.ProxyMiddleware',
|
|
149
|
+
'crawlo.middleware.request_ignore.RequestIgnoreMiddleware', # 1. 忽略无效请求
|
|
150
|
+
'crawlo.middleware.download_delay.DownloadDelayMiddleware', # 2. 控制请求频率
|
|
151
|
+
'crawlo.middleware.default_header.DefaultHeaderMiddleware', # 3. 添加默认请求头
|
|
152
|
+
'crawlo.middleware.proxy.ProxyMiddleware', # 4. 设置代理
|
|
139
153
|
|
|
140
154
|
# === 响应处理阶段 ===
|
|
141
|
-
'crawlo.middleware.retry.RetryMiddleware',
|
|
142
|
-
'crawlo.middleware.response_code.ResponseCodeMiddleware',
|
|
143
|
-
'crawlo.middleware.response_filter.ResponseFilterMiddleware',
|
|
155
|
+
'crawlo.middleware.retry.RetryMiddleware', # 5. 失败请求重试
|
|
156
|
+
'crawlo.middleware.response_code.ResponseCodeMiddleware', # 6. 处理特殊状态码
|
|
157
|
+
'crawlo.middleware.response_filter.ResponseFilterMiddleware', # 7. 响应内容过滤
|
|
144
158
|
]
|
|
145
159
|
|
|
146
160
|
# ============================== 扩展与管道 ==============================
|
|
147
161
|
|
|
148
162
|
# 数据处理管道(启用的存储方式)
|
|
149
163
|
PIPELINES = [
|
|
150
|
-
'crawlo.pipelines.console_pipeline.ConsolePipeline',
|
|
164
|
+
'crawlo.pipelines.console_pipeline.ConsolePipeline', # 控制台输出
|
|
151
165
|
# 'crawlo.pipelines.mysql_pipeline.AsyncmyMySQLPipeline', # MySQL 存储(可选)
|
|
152
166
|
]
|
|
153
167
|
|
|
168
|
+
# 根据运行模式自动配置默认去重管道
|
|
169
|
+
if RUN_MODE == 'distributed':
|
|
170
|
+
# 分布式模式下添加Redis去重管道
|
|
171
|
+
PIPELINES.insert(0, DEFAULT_DEDUP_PIPELINE)
|
|
172
|
+
else:
|
|
173
|
+
# 单机模式下添加内存去重管道
|
|
174
|
+
PIPELINES.insert(0, DEFAULT_DEDUP_PIPELINE)
|
|
175
|
+
|
|
154
176
|
# 扩展组件(监控与日志)
|
|
155
177
|
EXTENSIONS = [
|
|
156
|
-
'crawlo.extension.log_interval.LogIntervalExtension',
|
|
157
|
-
'crawlo.extension.log_stats.LogStats',
|
|
158
|
-
'crawlo.extension.logging_extension.CustomLoggerExtension',
|
|
178
|
+
'crawlo.extension.log_interval.LogIntervalExtension', # 定时日志
|
|
179
|
+
'crawlo.extension.log_stats.LogStats', # 统计信息
|
|
180
|
+
'crawlo.extension.logging_extension.CustomLoggerExtension', # 自定义日志
|
|
181
|
+
'crawlo.extension.memory_monitor.MemoryMonitorExtension', # 内存监控
|
|
182
|
+
# 'crawlo.extension.request_recorder.RequestRecorderExtension', # 请求记录
|
|
183
|
+
# 'crawlo.extension.performance_profiler.PerformanceProfilerExtension', # 性能分析
|
|
184
|
+
# 'crawlo.extension.health_check.HealthCheckExtension', # 健康检查
|
|
159
185
|
]
|
|
160
186
|
|
|
161
187
|
# ============================== 日志与监控 ==============================
|
|
162
188
|
|
|
163
|
-
LOG_LEVEL = 'INFO'
|
|
164
|
-
STATS_DUMP = True
|
|
165
|
-
|
|
189
|
+
LOG_LEVEL = 'INFO' # 日志级别: DEBUG/INFO/WARNING/ERROR
|
|
190
|
+
STATS_DUMP = True # 是否周期性输出统计信息
|
|
191
|
+
|
|
192
|
+
# ============================== 扩展配置 ==============================
|
|
193
|
+
|
|
194
|
+
# 内存监控扩展配置
|
|
195
|
+
MEMORY_MONITOR_ENABLED = False # 是否启用内存监控
|
|
196
|
+
MEMORY_MONITOR_INTERVAL = 60 # 内存检查间隔(秒)
|
|
197
|
+
MEMORY_WARNING_THRESHOLD = 80.0 # 内存使用警告阈值(百分比)
|
|
198
|
+
MEMORY_CRITICAL_THRESHOLD = 90.0 # 内存使用严重阈值(百分比)
|
|
199
|
+
|
|
200
|
+
# 请求记录扩展配置
|
|
201
|
+
REQUEST_RECORDER_ENABLED = False # 是否启用请求记录
|
|
202
|
+
REQUEST_RECORDER_OUTPUT_DIR = 'requests_log' # 请求记录输出目录
|
|
203
|
+
REQUEST_RECORDER_MAX_FILE_SIZE = 10 * 1024 * 1024 # 单个记录文件最大大小(字节)
|
|
204
|
+
|
|
205
|
+
# 性能分析扩展配置
|
|
206
|
+
PERFORMANCE_PROFILER_ENABLED = False # 是否启用性能分析
|
|
207
|
+
PERFORMANCE_PROFILER_OUTPUT_DIR = 'profiling' # 性能分析输出目录
|
|
208
|
+
PERFORMANCE_PROFILER_INTERVAL = 300 # 定期保存分析结果间隔(秒)
|
|
209
|
+
|
|
210
|
+
# 健康检查扩展配置
|
|
211
|
+
HEALTH_CHECK_ENABLED = True # 是否启用健康检查
|
|
212
|
+
HEALTH_CHECK_INTERVAL = 60 # 健康检查间隔(秒)
|
|
213
|
+
|
|
214
|
+
# ============================== 日志与监控 ==============================
|
|
215
|
+
|
|
216
|
+
LOG_LEVEL = 'INFO' # 日志级别: DEBUG/INFO/WARNING/ERROR
|
|
217
|
+
STATS_DUMP = True # 是否周期性输出统计信息
|
|
218
|
+
LOG_FILE = f'logs/{PROJECT_NAME}.log' # 日志文件路径
|
|
166
219
|
LOG_FORMAT = '%(asctime)s - [%(name)s] - %(levelname)s: %(message)s'
|
|
167
220
|
LOG_ENCODING = 'utf-8'
|
|
168
221
|
|
|
169
222
|
# ============================== 代理配置 ==============================
|
|
170
223
|
|
|
171
|
-
PROXY_ENABLED = False
|
|
224
|
+
PROXY_ENABLED = False # 是否启用代理
|
|
172
225
|
PROXY_API_URL = "https://api.proxyprovider.com/get" # 代理获取接口(请替换为真实地址)
|
|
173
226
|
|
|
174
227
|
# 代理提取方式(支持字段路径或函数)
|
|
175
|
-
PROXY_EXTRACTOR = "proxy"
|
|
228
|
+
PROXY_EXTRACTOR = "proxy" # 如返回 {"proxy": "http://1.1.1.1:8080"}
|
|
176
229
|
|
|
177
230
|
# 代理刷新控制
|
|
178
|
-
PROXY_REFRESH_INTERVAL = 60
|
|
179
|
-
PROXY_API_TIMEOUT = 10
|
|
231
|
+
PROXY_REFRESH_INTERVAL = 60 # 代理刷新间隔(秒)
|
|
232
|
+
PROXY_API_TIMEOUT = 10 # 请求代理 API 超时时间
|
|
180
233
|
|
|
181
234
|
# ============================== Curl-Cffi 特有配置 ==============================
|
|
182
235
|
|
|
183
236
|
# 浏览器指纹模拟(仅 CurlCffi 下载器有效)
|
|
184
|
-
CURL_BROWSER_TYPE = "chrome"
|
|
237
|
+
CURL_BROWSER_TYPE = "chrome" # 可选: chrome, edge, safari, firefox 或版本如 chrome136
|
|
185
238
|
|
|
186
239
|
# 自定义浏览器版本映射(可覆盖默认行为)
|
|
187
240
|
CURL_BROWSER_VERSION_MAP = {
|
|
188
241
|
"chrome": "chrome136",
|
|
189
|
-
"edge": "edge101",
|
|
242
|
+
"edge": "edge101",
|
|
190
243
|
"safari": "safari184",
|
|
191
244
|
"firefox": "firefox135",
|
|
192
245
|
# 示例:旧版本测试
|
|
@@ -194,8 +247,8 @@ CURL_BROWSER_VERSION_MAP = {
|
|
|
194
247
|
}
|
|
195
248
|
|
|
196
249
|
# Curl-Cffi 优化配置
|
|
197
|
-
CURL_RANDOMIZE_DELAY = False
|
|
198
|
-
CURL_RETRY_BACKOFF = True
|
|
250
|
+
CURL_RANDOMIZE_DELAY = False # 是否启用随机延迟
|
|
251
|
+
CURL_RETRY_BACKOFF = True # 是否启用指数退避重试
|
|
199
252
|
|
|
200
253
|
# 默认请求头(可被 Spider 覆盖)
|
|
201
254
|
DEFAULT_REQUEST_HEADERS = {
|
|
@@ -206,21 +259,21 @@ DEFAULT_REQUEST_HEADERS = {
|
|
|
206
259
|
# ============================== 下载器优化配置 ==============================
|
|
207
260
|
|
|
208
261
|
# 下载器健康检查
|
|
209
|
-
DOWNLOADER_HEALTH_CHECK = True
|
|
210
|
-
HEALTH_CHECK_INTERVAL = 60
|
|
262
|
+
DOWNLOADER_HEALTH_CHECK = True # 是否启用下载器健康检查
|
|
263
|
+
HEALTH_CHECK_INTERVAL = 60 # 健康检查间隔(秒)
|
|
211
264
|
|
|
212
265
|
# 请求统计配置
|
|
213
|
-
REQUEST_STATS_ENABLED = True
|
|
214
|
-
STATS_RESET_ON_START = False
|
|
266
|
+
REQUEST_STATS_ENABLED = True # 是否启用请求统计
|
|
267
|
+
STATS_RESET_ON_START = False # 启动时是否重置统计
|
|
215
268
|
|
|
216
269
|
# HttpX 下载器专用配置
|
|
217
|
-
HTTPX_HTTP2 = True
|
|
218
|
-
HTTPX_FOLLOW_REDIRECTS = True
|
|
270
|
+
HTTPX_HTTP2 = True # 是否启用HTTP/2支持
|
|
271
|
+
HTTPX_FOLLOW_REDIRECTS = True # 是否自动跟随重定向
|
|
219
272
|
|
|
220
273
|
# AioHttp 下载器专用配置
|
|
221
|
-
AIOHTTP_AUTO_DECOMPRESS = True
|
|
222
|
-
AIOHTTP_FORCE_CLOSE = False
|
|
274
|
+
AIOHTTP_AUTO_DECOMPRESS = True # 是否自动解压响应
|
|
275
|
+
AIOHTTP_FORCE_CLOSE = False # 是否强制关闭连接
|
|
223
276
|
|
|
224
277
|
# 通用优化配置
|
|
225
|
-
CONNECTION_TTL_DNS_CACHE = 300
|
|
226
|
-
CONNECTION_KEEPALIVE_TIMEOUT = 15
|
|
278
|
+
CONNECTION_TTL_DNS_CACHE = 300 # DNS缓存TTL(秒)
|
|
279
|
+
CONNECTION_KEEPALIVE_TIMEOUT = 15 # Keep-Alive超时(秒)
|
crawlo/subscriber.py
CHANGED
|
@@ -1,9 +1,10 @@
|
|
|
1
1
|
#!/usr/bin/python
|
|
2
2
|
# -*- coding:UTF-8 -*-
|
|
3
3
|
import asyncio
|
|
4
|
+
import weakref
|
|
4
5
|
from collections import defaultdict
|
|
5
6
|
from inspect import iscoroutinefunction
|
|
6
|
-
from typing import Dict, Set, Callable, Coroutine, Any, TypeAlias, List
|
|
7
|
+
from typing import Dict, Set, Callable, Coroutine, Any, TypeAlias, List, Tuple
|
|
7
8
|
|
|
8
9
|
|
|
9
10
|
class ReceiverTypeError(TypeError):
|
|
@@ -24,22 +25,30 @@ class Subscriber:
|
|
|
24
25
|
|
|
25
26
|
def __init__(self):
|
|
26
27
|
"""初始化一个空的订阅者字典。"""
|
|
27
|
-
|
|
28
|
+
# 使用弱引用字典避免内存泄漏
|
|
29
|
+
self._subscribers: Dict[str, Dict[ReceiverCoroutine, int]] = defaultdict(dict)
|
|
30
|
+
# 用于缓存排序后的订阅者列表,提高频繁事件的处理性能
|
|
31
|
+
self._sorted_subscribers_cache: Dict[str, List[Tuple[ReceiverCoroutine, int]]] = {}
|
|
28
32
|
|
|
29
|
-
def subscribe(self, receiver: ReceiverCoroutine, *, event: str) -> None:
|
|
33
|
+
def subscribe(self, receiver: ReceiverCoroutine, *, event: str, priority: int = 0) -> None:
|
|
30
34
|
"""
|
|
31
35
|
订阅一个事件。
|
|
32
36
|
|
|
33
37
|
Args:
|
|
34
38
|
receiver: 一个协程函数 (例如 async def my_func(...))。
|
|
35
39
|
event: 要订阅的事件名称。
|
|
40
|
+
priority: 订阅者优先级,数值越小优先级越高,默认为0。
|
|
36
41
|
|
|
37
42
|
Raises:
|
|
38
43
|
ReceiverTypeError: 如果提供的 `receiver` 不是一个协程函数。
|
|
39
44
|
"""
|
|
40
45
|
if not iscoroutinefunction(receiver):
|
|
41
46
|
raise ReceiverTypeError(f"接收者 '{receiver.__qualname__}' 必须是一个协程函数。")
|
|
42
|
-
|
|
47
|
+
|
|
48
|
+
# 使用弱引用避免内存泄漏
|
|
49
|
+
self._subscribers[event][receiver] = priority
|
|
50
|
+
# 清除缓存
|
|
51
|
+
self._sorted_subscribers_cache.pop(event, None)
|
|
43
52
|
|
|
44
53
|
def unsubscribe(self, receiver: ReceiverCoroutine, *, event: str) -> None:
|
|
45
54
|
"""
|
|
@@ -52,13 +61,47 @@ class Subscriber:
|
|
|
52
61
|
event: 事件名称。
|
|
53
62
|
"""
|
|
54
63
|
if event in self._subscribers:
|
|
55
|
-
self._subscribers[event].
|
|
64
|
+
self._subscribers[event].pop(receiver, None)
|
|
65
|
+
# 清除缓存
|
|
66
|
+
self._sorted_subscribers_cache.pop(event, None)
|
|
67
|
+
|
|
68
|
+
def _get_sorted_subscribers(self, event: str) -> List[Tuple[ReceiverCoroutine, int]]:
|
|
69
|
+
"""
|
|
70
|
+
获取按优先级排序的订阅者列表。
|
|
71
|
+
|
|
72
|
+
Args:
|
|
73
|
+
event: 事件名称。
|
|
74
|
+
|
|
75
|
+
Returns:
|
|
76
|
+
按优先级排序的订阅者列表。
|
|
77
|
+
"""
|
|
78
|
+
# 检查缓存
|
|
79
|
+
if event in self._sorted_subscribers_cache:
|
|
80
|
+
return self._sorted_subscribers_cache[event]
|
|
81
|
+
|
|
82
|
+
# 获取有效的订阅者(使用弱引用检查)
|
|
83
|
+
valid_subscribers = {}
|
|
84
|
+
for receiver, priority in list(self._subscribers[event].items()):
|
|
85
|
+
# 检查弱引用是否仍然有效
|
|
86
|
+
if isinstance(receiver, Callable):
|
|
87
|
+
valid_subscribers[receiver] = priority
|
|
88
|
+
|
|
89
|
+
# 更新订阅者字典
|
|
90
|
+
self._subscribers[event] = valid_subscribers
|
|
91
|
+
|
|
92
|
+
# 按优先级排序(数值小的优先级高)
|
|
93
|
+
sorted_subscribers = sorted(valid_subscribers.items(), key=lambda x: x[1])
|
|
94
|
+
# 缓存结果
|
|
95
|
+
self._sorted_subscribers_cache[event] = sorted_subscribers
|
|
96
|
+
|
|
97
|
+
return sorted_subscribers
|
|
56
98
|
|
|
57
99
|
async def notify(self, event: str, *args, **kwargs) -> List[Any]:
|
|
58
100
|
"""
|
|
59
101
|
异步地、并发地通知所有订阅了该事件的接收者。
|
|
60
102
|
|
|
61
103
|
此方法会等待所有订阅者任务完成后再返回,并收集所有结果或异常。
|
|
104
|
+
订阅者按优先级顺序执行,优先级高的先执行。
|
|
62
105
|
|
|
63
106
|
Args:
|
|
64
107
|
event: 要触发的事件名称。
|
|
@@ -68,39 +111,21 @@ class Subscriber:
|
|
|
68
111
|
Returns:
|
|
69
112
|
一个列表,包含每个订阅者任务的返回结果或在执行期间捕获的异常。
|
|
70
113
|
"""
|
|
71
|
-
|
|
72
|
-
if not
|
|
114
|
+
sorted_subscribers = self._get_sorted_subscribers(event)
|
|
115
|
+
if not sorted_subscribers:
|
|
73
116
|
return []
|
|
74
117
|
|
|
75
|
-
|
|
118
|
+
# 为频繁触发的事件重用任务对象以提高性能
|
|
119
|
+
tasks = []
|
|
120
|
+
for receiver, _ in sorted_subscribers:
|
|
121
|
+
try:
|
|
122
|
+
# 创建任务并添加到列表
|
|
123
|
+
task = asyncio.create_task(receiver(*args, **kwargs))
|
|
124
|
+
tasks.append(task)
|
|
125
|
+
except Exception as e:
|
|
126
|
+
# 如果创建任务失败,记录异常并继续处理其他订阅者
|
|
127
|
+
tasks.append(asyncio.Future()) # 添加一个已完成的Future表示错误
|
|
128
|
+
tasks[-1].set_exception(e)
|
|
76
129
|
|
|
77
130
|
# 并发执行所有任务并返回结果列表(包括异常)
|
|
78
|
-
return await asyncio.gather(*tasks, return_exceptions=True)
|
|
79
|
-
|
|
80
|
-
# #!/usr/bin/python
|
|
81
|
-
# # -*- coding:UTF-8 -*-
|
|
82
|
-
# import asyncio
|
|
83
|
-
# from collections import defaultdict
|
|
84
|
-
# from inspect import iscoroutinefunction
|
|
85
|
-
# from typing import Dict, Set, Callable, Coroutine
|
|
86
|
-
#
|
|
87
|
-
# from crawlo.exceptions import ReceiverTypeError
|
|
88
|
-
#
|
|
89
|
-
#
|
|
90
|
-
# class Subscriber:
|
|
91
|
-
#
|
|
92
|
-
# def __init__(self):
|
|
93
|
-
# self._subscribers: Dict[str, Set[Callable[..., Coroutine]]] = defaultdict(set)
|
|
94
|
-
#
|
|
95
|
-
# def subscribe(self, receiver: Callable[..., Coroutine], *, event: str) -> None:
|
|
96
|
-
# if not iscoroutinefunction(receiver):
|
|
97
|
-
# raise ReceiverTypeError(f"{receiver.__qualname__} must be a coroutine function")
|
|
98
|
-
# self._subscribers[event].add(receiver)
|
|
99
|
-
#
|
|
100
|
-
# def unsubscribe(self, receiver: Callable[..., Coroutine], *, event: str) -> None:
|
|
101
|
-
# self._subscribers[event].discard(receiver)
|
|
102
|
-
#
|
|
103
|
-
# async def notify(self, event: str, *args, **kwargs) -> None:
|
|
104
|
-
# for receiver in self._subscribers[event]:
|
|
105
|
-
# # 不能 await
|
|
106
|
-
# asyncio.create_task(receiver(*args, **kwargs))
|
|
131
|
+
return await asyncio.gather(*tasks, return_exceptions=True)
|
|
@@ -3,6 +3,8 @@
|
|
|
3
3
|
{{project_name}}.middlewares
|
|
4
4
|
============================
|
|
5
5
|
自定义中间件,用于在请求/响应/异常处理过程中插入自定义逻辑。
|
|
6
|
+
|
|
7
|
+
这是一个简单的示例中间件,您可以根据需要添加更多中间件。
|
|
6
8
|
"""
|
|
7
9
|
|
|
8
10
|
import random
|
|
@@ -11,8 +13,15 @@ from crawlo.utils.log import get_logger
|
|
|
11
13
|
from crawlo.exceptions import IgnoreRequest
|
|
12
14
|
|
|
13
15
|
|
|
14
|
-
class
|
|
15
|
-
"""
|
|
16
|
+
class ExampleMiddleware:
|
|
17
|
+
"""
|
|
18
|
+
示例中间件,演示如何处理请求、响应和异常。
|
|
19
|
+
|
|
20
|
+
此中间件会:
|
|
21
|
+
1. 为请求添加随机 User-Agent
|
|
22
|
+
2. 记录请求和响应信息
|
|
23
|
+
3. 处理异常情况
|
|
24
|
+
"""
|
|
16
25
|
|
|
17
26
|
def __init__(self):
|
|
18
27
|
self.logger = get_logger(self.__class__.__name__)
|
|
@@ -22,66 +31,81 @@ class RandomUserAgentMiddleware:
|
|
|
22
31
|
'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:135.0) Gecko/20100101 Firefox/135.0',
|
|
23
32
|
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:135.0) Gecko/20100101 Firefox/135.0',
|
|
24
33
|
]
|
|
25
|
-
|
|
34
|
+
|
|
26
35
|
def process_request(self, request, spider):
|
|
36
|
+
"""
|
|
37
|
+
在请求被下载器执行前调用。
|
|
38
|
+
|
|
39
|
+
Args:
|
|
40
|
+
request: 请求对象
|
|
41
|
+
spider: 爬虫实例
|
|
42
|
+
|
|
43
|
+
Returns:
|
|
44
|
+
None: 继续处理请求
|
|
45
|
+
Response: 返回响应对象(短路处理)
|
|
46
|
+
Request: 返回新请求对象(替换原请求)
|
|
47
|
+
"""
|
|
48
|
+
# 为请求添加随机 User-Agent
|
|
27
49
|
if 'User-Agent' not in request.headers:
|
|
28
50
|
ua = random.choice(self.user_agents)
|
|
29
51
|
request.headers['User-Agent'] = ua
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
class CustomDownloaderMiddleware:
|
|
34
|
-
"""自定义下载器中间件示例。"""
|
|
35
|
-
|
|
36
|
-
def __init__(self):
|
|
37
|
-
self.logger = get_logger(self.__class__.__name__)
|
|
38
|
-
|
|
39
|
-
def process_request(self, request, spider):
|
|
40
|
-
"""在请求被下载器执行前调用。"""
|
|
41
|
-
# 示例:添加自定义请求头
|
|
42
|
-
# request.headers['Custom-Header'] = 'Custom-Value'
|
|
43
|
-
# 示例:设置代理
|
|
44
|
-
# request.meta['proxy'] = 'http://proxy.example.com:8080'
|
|
52
|
+
self.logger.debug(f"为请求 {request.url} 设置 User-Agent: {ua[:50]}...")
|
|
53
|
+
|
|
45
54
|
return None
|
|
46
55
|
|
|
47
56
|
def process_response(self, request, response, spider):
|
|
48
|
-
"""
|
|
49
|
-
|
|
57
|
+
"""
|
|
58
|
+
在响应被 Spider 处理前调用。
|
|
59
|
+
|
|
60
|
+
Args:
|
|
61
|
+
request: 原始请求对象
|
|
62
|
+
response: 响应对象
|
|
63
|
+
spider: 爬虫实例
|
|
64
|
+
|
|
65
|
+
Returns:
|
|
66
|
+
Response: 处理后的响应对象
|
|
67
|
+
"""
|
|
68
|
+
# 记录响应信息
|
|
69
|
+
self.logger.info(f"收到响应: {request.url} - 状态码: {response.status_code}")
|
|
70
|
+
|
|
71
|
+
# 可以在这里处理特殊状态码
|
|
50
72
|
if response.status_code == 403:
|
|
51
73
|
self.logger.warning(f"访问被拒绝: {request.url}")
|
|
74
|
+
|
|
52
75
|
return response
|
|
53
76
|
|
|
54
77
|
def process_exception(self, request, exception, spider):
|
|
55
|
-
"""
|
|
78
|
+
"""
|
|
79
|
+
在下载或处理过程中发生异常时调用。
|
|
80
|
+
|
|
81
|
+
Args:
|
|
82
|
+
request: 请求对象
|
|
83
|
+
exception: 异常对象
|
|
84
|
+
spider: 爬虫实例
|
|
85
|
+
|
|
86
|
+
Returns:
|
|
87
|
+
None: 异常将继续传播
|
|
88
|
+
Response: 返回响应对象(处理异常)
|
|
89
|
+
Request: 返回新请求对象(重试请求)
|
|
90
|
+
"""
|
|
56
91
|
self.logger.error(f"请求异常: {request.url} - {exception}")
|
|
57
92
|
return None
|
|
58
93
|
|
|
59
94
|
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
yield item
|
|
78
|
-
|
|
79
|
-
def process_spider_exception(self, response, exception, spider):
|
|
80
|
-
"""在 Spider 的 parse 方法抛出异常时调用。"""
|
|
81
|
-
self.logger.error(f"Spider 异常: {response.url} - {exception}")
|
|
82
|
-
pass
|
|
83
|
-
|
|
84
|
-
def process_start_requests(self, start_requests, spider):
|
|
85
|
-
"""在 Spider 的 start_requests 生成器被消费时调用。"""
|
|
86
|
-
for request in start_requests:
|
|
87
|
-
yield request
|
|
95
|
+
# ======================== 使用说明 ========================
|
|
96
|
+
#
|
|
97
|
+
# 在 settings.py 中启用中间件:
|
|
98
|
+
# MIDDLEWARES = [
|
|
99
|
+
# '{{project_name}}.middlewares.ExampleMiddleware',
|
|
100
|
+
# ]
|
|
101
|
+
#
|
|
102
|
+
# 您可以根据需要添加更多中间件,例如:
|
|
103
|
+
# 1. 请求处理中间件(修改请求头、设置代理等)
|
|
104
|
+
# 2. 响应处理中间件(解析、过滤等)
|
|
105
|
+
# 3. 异常处理中间件(重试、记录等)
|
|
106
|
+
#
|
|
107
|
+
# 每个中间件可以实现以下方法:
|
|
108
|
+
# - process_request: 处理请求
|
|
109
|
+
# - process_response: 处理响应
|
|
110
|
+
# - process_exception: 处理异常
|
|
111
|
+
# ======================== 使用说明 ========================
|