crawlo 1.3.2__py3-none-any.whl → 1.3.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +24 -0
- crawlo/__version__.py +1 -1
- crawlo/commands/run.py +58 -32
- crawlo/core/__init__.py +44 -0
- crawlo/core/engine.py +119 -45
- crawlo/core/scheduler.py +4 -3
- crawlo/crawler.py +603 -1133
- crawlo/downloader/aiohttp_downloader.py +4 -2
- crawlo/extension/__init__.py +1 -1
- crawlo/extension/logging_extension.py +23 -7
- crawlo/factories/__init__.py +28 -0
- crawlo/factories/base.py +69 -0
- crawlo/factories/crawler.py +104 -0
- crawlo/factories/registry.py +85 -0
- crawlo/filters/aioredis_filter.py +25 -2
- crawlo/framework.py +292 -0
- crawlo/initialization/__init__.py +40 -0
- crawlo/initialization/built_in.py +426 -0
- crawlo/initialization/context.py +142 -0
- crawlo/initialization/core.py +194 -0
- crawlo/initialization/phases.py +149 -0
- crawlo/initialization/registry.py +146 -0
- crawlo/items/base.py +2 -1
- crawlo/logging/__init__.py +38 -0
- crawlo/logging/config.py +97 -0
- crawlo/logging/factory.py +129 -0
- crawlo/logging/manager.py +112 -0
- crawlo/middleware/middleware_manager.py +1 -1
- crawlo/middleware/offsite.py +1 -1
- crawlo/mode_manager.py +26 -1
- crawlo/pipelines/pipeline_manager.py +2 -1
- crawlo/project.py +76 -46
- crawlo/queue/pqueue.py +11 -5
- crawlo/queue/queue_manager.py +143 -19
- crawlo/queue/redis_priority_queue.py +69 -49
- crawlo/settings/default_settings.py +110 -14
- crawlo/settings/setting_manager.py +29 -13
- crawlo/spider/__init__.py +34 -16
- crawlo/stats_collector.py +17 -3
- crawlo/task_manager.py +112 -3
- crawlo/templates/project/settings.py.tmpl +103 -202
- crawlo/templates/project/settings_distributed.py.tmpl +122 -135
- crawlo/templates/project/settings_gentle.py.tmpl +149 -43
- crawlo/templates/project/settings_high_performance.py.tmpl +127 -90
- crawlo/templates/project/settings_minimal.py.tmpl +46 -15
- crawlo/templates/project/settings_simple.py.tmpl +138 -75
- crawlo/templates/project/spiders/__init__.py.tmpl +5 -1
- crawlo/templates/run.py.tmpl +10 -14
- crawlo/templates/spiders_init.py.tmpl +10 -0
- crawlo/tools/network_diagnostic.py +365 -0
- crawlo/utils/class_loader.py +26 -0
- crawlo/utils/error_handler.py +76 -35
- crawlo/utils/log.py +41 -144
- crawlo/utils/redis_connection_pool.py +43 -6
- crawlo/utils/request_serializer.py +8 -1
- {crawlo-1.3.2.dist-info → crawlo-1.3.4.dist-info}/METADATA +120 -14
- {crawlo-1.3.2.dist-info → crawlo-1.3.4.dist-info}/RECORD +104 -45
- tests/authenticated_proxy_example.py +2 -2
- tests/baidu_performance_test.py +109 -0
- tests/baidu_test.py +60 -0
- tests/comprehensive_framework_test.py +213 -0
- tests/comprehensive_test.py +82 -0
- tests/comprehensive_testing_summary.md +187 -0
- tests/debug_configure.py +70 -0
- tests/debug_framework_logger.py +85 -0
- tests/debug_log_levels.py +64 -0
- tests/distributed_test.py +67 -0
- tests/distributed_test_debug.py +77 -0
- tests/final_command_test_report.md +0 -0
- tests/final_comprehensive_test.py +152 -0
- tests/final_validation_test.py +183 -0
- tests/framework_performance_test.py +203 -0
- tests/optimized_performance_test.py +212 -0
- tests/performance_comparison.py +246 -0
- tests/queue_blocking_test.py +114 -0
- tests/queue_test.py +90 -0
- tests/scrapy_comparison/ofweek_scrapy.py +139 -0
- tests/scrapy_comparison/scrapy_test.py +134 -0
- tests/simple_command_test.py +120 -0
- tests/simple_crawlo_test.py +128 -0
- tests/simple_log_test.py +58 -0
- tests/simple_optimization_test.py +129 -0
- tests/simple_spider_test.py +50 -0
- tests/simple_test.py +48 -0
- tests/test_all_commands.py +231 -0
- tests/test_batch_processor.py +179 -0
- tests/test_component_factory.py +175 -0
- tests/test_controlled_spider_mixin.py +80 -0
- tests/test_enhanced_error_handler_comprehensive.py +246 -0
- tests/test_factories.py +253 -0
- tests/test_framework_logger.py +67 -0
- tests/test_framework_startup.py +65 -0
- tests/test_large_scale_config.py +113 -0
- tests/test_large_scale_helper.py +236 -0
- tests/test_mode_change.py +73 -0
- tests/test_mode_consistency.py +1 -1
- tests/test_performance_monitor.py +116 -0
- tests/test_queue_empty_check.py +42 -0
- tests/untested_features_report.md +139 -0
- tests/verify_debug.py +52 -0
- tests/verify_log_fix.py +112 -0
- tests/DOUBLE_CRAWLO_PREFIX_FIX_REPORT.md +0 -82
- {crawlo-1.3.2.dist-info → crawlo-1.3.4.dist-info}/WHEEL +0 -0
- {crawlo-1.3.2.dist-info → crawlo-1.3.4.dist-info}/entry_points.txt +0 -0
- {crawlo-1.3.2.dist-info → crawlo-1.3.4.dist-info}/top_level.txt +0 -0
|
@@ -1,61 +1,167 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
1
|
+
# -*- coding: UTF-8 -*-
|
|
2
|
+
"""
|
|
3
|
+
{{project_name}} 项目配置文件(温和版)
|
|
4
|
+
=============================
|
|
5
|
+
基于 Crawlo 框架的温和爬虫项目配置。
|
|
6
|
+
对目标网站友好,适合长期运行。
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
# ============================== 项目基本信息 ==============================
|
|
10
|
+
PROJECT_NAME = '{{project_name}}'
|
|
11
|
+
|
|
12
|
+
# ============================== 运行模式 ==============================
|
|
13
|
+
RUN_MODE = 'standalone'
|
|
5
14
|
|
|
6
|
-
#
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
#
|
|
10
|
-
#
|
|
11
|
-
#
|
|
15
|
+
# ============================== 并发配置 ==============================
|
|
16
|
+
CONCURRENCY = 2
|
|
17
|
+
MAX_RUNNING_SPIDERS = 1
|
|
18
|
+
DOWNLOAD_DELAY = 3.0 # 增加延迟以降低目标网站压力
|
|
19
|
+
RANDOMNESS = True # 启用随机延迟
|
|
20
|
+
RANDOM_RANGE = [0.5, 1.5] # 随机延迟范围因子
|
|
12
21
|
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
22
|
+
# ============================== 下载器配置 ==============================
|
|
23
|
+
# 可选下载器:
|
|
24
|
+
# DOWNLOADER = 'crawlo.downloader.aiohttp_downloader.AioHttpDownloader'
|
|
25
|
+
# DOWNLOADER = 'crawlo.downloader.httpx_downloader.HttpXDownloader'
|
|
26
|
+
# DOWNLOADER = 'crawlo.downloader.cffi_downloader.CurlCffiDownloader'
|
|
27
|
+
DOWNLOADER = 'crawlo.downloader.aiohttp_downloader.AioHttpDownloader'
|
|
17
28
|
|
|
18
|
-
# ==============================
|
|
19
|
-
|
|
20
|
-
# 如需启用域名过滤功能,请取消注释并配置允许的域名列表
|
|
21
|
-
# ALLOWED_DOMAINS = ['example.com', 'www.example.com']
|
|
29
|
+
# ============================== 队列配置 ==============================
|
|
30
|
+
QUEUE_TYPE = 'memory'
|
|
22
31
|
|
|
23
|
-
# ==============================
|
|
24
|
-
|
|
32
|
+
# ============================== 去重过滤器 ==============================
|
|
33
|
+
FILTER_CLASS = 'crawlo.filters.memory_filter.MemoryFilter'
|
|
25
34
|
|
|
26
|
-
#
|
|
35
|
+
# ============================== 默认去重管道 ==============================
|
|
36
|
+
DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.memory_dedup_pipeline.MemoryDedupPipeline'
|
|
37
|
+
|
|
38
|
+
# ============================== 爬虫模块配置 ==============================
|
|
39
|
+
SPIDER_MODULES = ['{{project_name}}.spiders']
|
|
40
|
+
|
|
41
|
+
# ============================== 中间件 ==============================
|
|
27
42
|
# MIDDLEWARES = [
|
|
28
|
-
|
|
43
|
+
# 'crawlo.middleware.simple_proxy.SimpleProxyMiddleware',
|
|
29
44
|
# ]
|
|
30
45
|
|
|
31
|
-
# ==============================
|
|
32
|
-
#
|
|
46
|
+
# ============================== 默认请求头配置 ==============================
|
|
47
|
+
# 为DefaultHeaderMiddleware配置默认请求头
|
|
48
|
+
DEFAULT_REQUEST_HEADERS = {
|
|
49
|
+
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
|
|
50
|
+
'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',
|
|
51
|
+
'Accept-Encoding': 'gzip, deflate, br',
|
|
52
|
+
}
|
|
33
53
|
|
|
34
|
-
#
|
|
54
|
+
# ============================== 允许的域名 ==============================
|
|
55
|
+
# 为OffsiteMiddleware配置允许的域名
|
|
56
|
+
# ALLOWED_DOMAINS = ['example.com']
|
|
57
|
+
|
|
58
|
+
# ============================== 数据管道 ==============================
|
|
35
59
|
# PIPELINES = [
|
|
36
|
-
|
|
37
|
-
# 'crawlo.pipelines.mysql_pipeline.AsyncmyMySQLPipeline', # MySQL 存储
|
|
38
|
-
# 'crawlo.pipelines.mongo_pipeline.MongoPipeline', # MongoDB 存储
|
|
60
|
+
# 'crawlo.pipelines.mysql_pipeline.AsyncmyMySQLPipeline', # MySQL 存储(使用asyncmy异步库)
|
|
39
61
|
# ]
|
|
40
62
|
|
|
41
|
-
#
|
|
42
|
-
# PIPELINES.insert(0, DEFAULT_DEDUP_PIPELINE)
|
|
43
|
-
|
|
44
|
-
# ============================== 用户自定义扩展组件 ==============================
|
|
45
|
-
# 注意:框架默认扩展已自动加载,此处可添加或覆盖默认扩展
|
|
46
|
-
|
|
47
|
-
# 扩展组件列表(框架默认扩展 + 用户自定义扩展)
|
|
63
|
+
# ============================== 扩展组件 ==============================
|
|
48
64
|
# EXTENSIONS = [
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
# 'crawlo.extension.health_check.HealthCheckExtension', # 健康检查
|
|
65
|
+
# 'crawlo.extension.log_interval.LogIntervalExtension',
|
|
66
|
+
# 'crawlo.extension.log_stats.LogStats',
|
|
67
|
+
# 'crawlo.extension.logging_extension.CustomLoggerExtension',
|
|
53
68
|
# ]
|
|
54
69
|
|
|
55
70
|
# ============================== 日志配置 ==============================
|
|
56
|
-
|
|
57
71
|
LOG_LEVEL = 'INFO'
|
|
72
|
+
LOG_FILE = 'logs/{{project_name}}.log'
|
|
73
|
+
LOG_ENCODING = 'utf-8' # 明确指定日志文件编码
|
|
58
74
|
STATS_DUMP = True
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
75
|
+
|
|
76
|
+
# ============================== 输出配置 ==============================
|
|
77
|
+
OUTPUT_DIR = 'output'
|
|
78
|
+
|
|
79
|
+
# ============================== Redis配置 ==============================
|
|
80
|
+
REDIS_HOST = '127.0.0.1'
|
|
81
|
+
REDIS_PORT = 6379
|
|
82
|
+
REDIS_PASSWORD = ''
|
|
83
|
+
REDIS_DB = 0
|
|
84
|
+
|
|
85
|
+
# 根据是否有密码生成 URL
|
|
86
|
+
if REDIS_PASSWORD:
|
|
87
|
+
REDIS_URL = f'redis://:{REDIS_PASSWORD}@{REDIS_HOST}:{REDIS_PORT}/{REDIS_DB}'
|
|
88
|
+
else:
|
|
89
|
+
REDIS_URL = f'redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_DB}'
|
|
90
|
+
|
|
91
|
+
# ============================== MySQL配置 ==============================
|
|
92
|
+
MYSQL_HOST = '127.0.0.1'
|
|
93
|
+
MYSQL_PORT = 3306
|
|
94
|
+
MYSQL_USER = 'root'
|
|
95
|
+
MYSQL_PASSWORD = '123456'
|
|
96
|
+
MYSQL_DB = '{{project_name}}'
|
|
97
|
+
MYSQL_TABLE = '{{project_name}}_data'
|
|
98
|
+
MYSQL_BATCH_SIZE = 100
|
|
99
|
+
MYSQL_USE_BATCH = False # 是否启用批量插入
|
|
100
|
+
|
|
101
|
+
# ============================== MongoDB配置 ==============================
|
|
102
|
+
MONGO_URI = 'mongodb://localhost:27017'
|
|
103
|
+
MONGO_DATABASE = '{{project_name}}_db'
|
|
104
|
+
MONGO_COLLECTION = '{{project_name}}_items'
|
|
105
|
+
MONGO_MAX_POOL_SIZE = 200
|
|
106
|
+
MONGO_MIN_POOL_SIZE = 20
|
|
107
|
+
MONGO_BATCH_SIZE = 100 # 批量插入条数
|
|
108
|
+
MONGO_USE_BATCH = False # 是否启用批量插入
|
|
109
|
+
|
|
110
|
+
# ============================== 代理配置 ==============================
|
|
111
|
+
# 代理功能默认不启用,如需使用请在项目配置文件中启用并配置相关参数
|
|
112
|
+
PROXY_ENABLED = False # 是否启用代理
|
|
113
|
+
|
|
114
|
+
# 简化版代理配置(适用于SimpleProxyMiddleware)
|
|
115
|
+
PROXY_LIST = [] # 代理列表,例如: ["http://proxy1:8080", "http://proxy2:8080"]
|
|
116
|
+
|
|
117
|
+
# 高级代理配置(适用于ProxyMiddleware)
|
|
118
|
+
PROXY_API_URL = "" # 代理获取接口(请替换为真实地址)
|
|
119
|
+
|
|
120
|
+
# 代理提取方式(支持字段路径或函数)
|
|
121
|
+
# 示例: "proxy" 适用于 {"proxy": "http://1.1.1.1:8080"}
|
|
122
|
+
# 示例: "data.proxy" 适用于 {"data": {"proxy": "http://1.1.1.1:8080"}}
|
|
123
|
+
PROXY_EXTRACTOR = "proxy"
|
|
124
|
+
|
|
125
|
+
# 代理刷新控制
|
|
126
|
+
PROXY_REFRESH_INTERVAL = 60 # 代理刷新间隔(秒)
|
|
127
|
+
PROXY_API_TIMEOUT = 10 # 请求代理 API 超时时间
|
|
128
|
+
|
|
129
|
+
# ============================== Curl-Cffi 特有配置 ==============================
|
|
130
|
+
# 浏览器指纹模拟(仅 CurlCffi 下载器有效)
|
|
131
|
+
CURL_BROWSER_TYPE = "chrome" # 可选: chrome, edge, safari, firefox 或版本如 chrome136
|
|
132
|
+
|
|
133
|
+
# 自定义浏览器版本映射(可覆盖默认行为)
|
|
134
|
+
CURL_BROWSER_VERSION_MAP = {
|
|
135
|
+
"chrome": "chrome136",
|
|
136
|
+
"edge": "edge101",
|
|
137
|
+
"safari": "safari184",
|
|
138
|
+
"firefox": "firefox135",
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
# ============================== 下载器优化配置 ==============================
|
|
142
|
+
# 下载器健康检查
|
|
143
|
+
DOWNLOADER_HEALTH_CHECK = True # 是否启用下载器健康检查
|
|
144
|
+
HEALTH_CHECK_INTERVAL = 60 # 健康检查间隔(秒)
|
|
145
|
+
|
|
146
|
+
# 请求统计配置
|
|
147
|
+
REQUEST_STATS_ENABLED = True # 是否启用请求统计
|
|
148
|
+
STATS_RESET_ON_START = False # 启动时是否重置统计
|
|
149
|
+
|
|
150
|
+
# HttpX 下载器专用配置
|
|
151
|
+
HTTPX_HTTP2 = True # 是否启用HTTP/2支持
|
|
152
|
+
HTTPX_FOLLOW_REDIRECTS = True # 是否自动跟随重定向
|
|
153
|
+
|
|
154
|
+
# AioHttp 下载器专用配置
|
|
155
|
+
AIOHTTP_AUTO_DECOMPRESS = True # 是否自动解压响应
|
|
156
|
+
AIOHTTP_FORCE_CLOSE = False # 是否强制关闭连接
|
|
157
|
+
|
|
158
|
+
# 通用优化配置
|
|
159
|
+
CONNECTION_TTL_DNS_CACHE = 300 # DNS缓存TTL(秒)
|
|
160
|
+
CONNECTION_KEEPALIVE_TIMEOUT = 15 # Keep-Alive超时(秒)
|
|
161
|
+
|
|
162
|
+
# ============================== 内存监控配置 ==============================
|
|
163
|
+
# 内存监控扩展默认不启用,如需使用请在项目配置文件中启用
|
|
164
|
+
MEMORY_MONITOR_ENABLED = False # 是否启用内存监控
|
|
165
|
+
MEMORY_MONITOR_INTERVAL = 60 # 内存监控检查间隔(秒)
|
|
166
|
+
MEMORY_WARNING_THRESHOLD = 80.0 # 内存使用率警告阈值(百分比)
|
|
167
|
+
MEMORY_CRITICAL_THRESHOLD = 90.0 # 内存使用率严重阈值(百分比)
|
|
@@ -1,131 +1,168 @@
|
|
|
1
1
|
# -*- coding: UTF-8 -*-
|
|
2
2
|
"""
|
|
3
|
-
|
|
4
|
-
|
|
3
|
+
{{project_name}} 项目配置文件(高性能版)
|
|
4
|
+
=============================
|
|
5
|
+
基于 Crawlo 框架的高性能爬虫项目配置。
|
|
6
|
+
适合大规模高并发场景。
|
|
5
7
|
"""
|
|
6
8
|
|
|
7
9
|
# ============================== 项目基本信息 ==============================
|
|
8
10
|
PROJECT_NAME = '{{project_name}}'
|
|
9
11
|
|
|
10
|
-
# ==============================
|
|
11
|
-
|
|
12
|
-
RUN_MODE = 'standalone' # 单机模式 - 适用于开发和小规模数据采集
|
|
12
|
+
# ============================== 运行模式 ==============================
|
|
13
|
+
RUN_MODE = 'standalone'
|
|
13
14
|
|
|
14
|
-
# 并发配置
|
|
15
|
-
CONCURRENCY = 32
|
|
16
|
-
|
|
15
|
+
# ============================== 并发配置 ==============================
|
|
16
|
+
CONCURRENCY = 32
|
|
17
|
+
MAX_RUNNING_SPIDERS = 10
|
|
18
|
+
DOWNLOAD_DELAY = 0.1
|
|
17
19
|
RANDOMNESS = False # 禁用随机延迟以保证性能
|
|
18
20
|
|
|
19
|
-
# ==============================
|
|
20
|
-
|
|
21
|
-
#
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
QUEUE_MAX_RETRIES = 3
|
|
26
|
-
QUEUE_TIMEOUT = 300
|
|
21
|
+
# ============================== 下载器配置 ==============================
|
|
22
|
+
# 可选下载器:
|
|
23
|
+
# DOWNLOADER = 'crawlo.downloader.aiohttp_downloader.AioHttpDownloader'
|
|
24
|
+
# DOWNLOADER = 'crawlo.downloader.httpx_downloader.HttpXDownloader'
|
|
25
|
+
# DOWNLOADER = 'crawlo.downloader.cffi_downloader.CurlCffiDownloader'
|
|
26
|
+
DOWNLOADER = 'crawlo.downloader.aiohttp_downloader.AioHttpDownloader'
|
|
27
27
|
|
|
28
|
-
# ==============================
|
|
28
|
+
# ============================== 队列配置 ==============================
|
|
29
|
+
QUEUE_TYPE = 'auto'
|
|
29
30
|
|
|
31
|
+
# ============================== 去重过滤器 ==============================
|
|
30
32
|
# 高性能模式下,如果Redis可用则使用Redis去重,否则使用内存去重
|
|
31
|
-
DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.redis_dedup_pipeline.RedisDedupPipeline'
|
|
32
33
|
FILTER_CLASS = 'crawlo.filters.aioredis_filter.AioRedisFilter'
|
|
33
34
|
|
|
34
|
-
#
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
REDIS_PASSWORD = '' # 如果有密码,请填写
|
|
38
|
-
|
|
39
|
-
# 根据是否有密码生成 URL
|
|
40
|
-
if REDIS_PASSWORD:
|
|
41
|
-
REDIS_URL = f'redis://:{REDIS_PASSWORD}@{REDIS_HOST}:{REDIS_PORT}/0'
|
|
42
|
-
else:
|
|
43
|
-
REDIS_URL = f'redis://{REDIS_HOST}:{REDIS_PORT}/0'
|
|
44
|
-
|
|
45
|
-
# Redis key配置已移至各组件中,使用统一的命名规范
|
|
46
|
-
# crawlo:{project_name}:filter:fingerprint (请求去重)
|
|
47
|
-
# crawlo:{project_name}:item:fingerprint (数据项去重)
|
|
48
|
-
# crawlo:{project_name}:queue:requests (请求队列)
|
|
49
|
-
# crawlo:{project_name}:queue:processing (处理中队列)
|
|
50
|
-
# crawlo:{project_name}:queue:failed (失败队列)
|
|
51
|
-
|
|
52
|
-
REDIS_TTL = 0
|
|
53
|
-
CLEANUP_FP = 0
|
|
54
|
-
FILTER_DEBUG = True
|
|
55
|
-
DECODE_RESPONSES = True
|
|
35
|
+
# ============================== 默认去重管道 ==============================
|
|
36
|
+
# 高性能模式下,如果Redis可用则使用Redis去重,否则使用内存去重
|
|
37
|
+
DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.redis_dedup_pipeline.RedisDedupPipeline'
|
|
56
38
|
|
|
57
|
-
# ==============================
|
|
58
|
-
|
|
39
|
+
# ============================== 爬虫模块配置 ==============================
|
|
40
|
+
SPIDER_MODULES = ['{{project_name}}.spiders']
|
|
59
41
|
|
|
60
|
-
#
|
|
42
|
+
# ============================== 中间件 ==============================
|
|
61
43
|
# MIDDLEWARES = [
|
|
62
|
-
|
|
44
|
+
# 'crawlo.middleware.simple_proxy.SimpleProxyMiddleware',
|
|
63
45
|
# ]
|
|
64
46
|
|
|
65
|
-
# ==============================
|
|
66
|
-
#
|
|
47
|
+
# ============================== 默认请求头配置 ==============================
|
|
48
|
+
# 为DefaultHeaderMiddleware配置默认请求头
|
|
49
|
+
DEFAULT_REQUEST_HEADERS = {
|
|
50
|
+
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
|
|
51
|
+
'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',
|
|
52
|
+
'Accept-Encoding': 'gzip, deflate, br',
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
# ============================== 允许的域名 ==============================
|
|
56
|
+
# 为OffsiteMiddleware配置允许的域名
|
|
57
|
+
# ALLOWED_DOMAINS = ['example.com']
|
|
67
58
|
|
|
68
|
-
#
|
|
59
|
+
# ============================== 数据管道 ==============================
|
|
69
60
|
# PIPELINES = [
|
|
70
|
-
|
|
71
|
-
# 'crawlo.pipelines.mysql_pipeline.AsyncmyMySQLPipeline', # MySQL 存储
|
|
72
|
-
# 'crawlo.pipelines.mongo_pipeline.MongoPipeline', # MongoDB 存储
|
|
61
|
+
# 'crawlo.pipelines.mysql_pipeline.AsyncmyMySQLPipeline', # MySQL 存储(使用asyncmy异步库)
|
|
73
62
|
# ]
|
|
74
63
|
|
|
75
|
-
#
|
|
76
|
-
# PIPELINES.insert(0, DEFAULT_DEDUP_PIPELINE)
|
|
77
|
-
|
|
78
|
-
# ============================== 用户自定义扩展组件 ==============================
|
|
79
|
-
# 注意:框架默认扩展已自动加载,此处可添加或覆盖默认扩展
|
|
80
|
-
|
|
81
|
-
# 扩展组件列表(框架默认扩展 + 用户自定义扩展)
|
|
64
|
+
# ============================== 扩展组件 ==============================
|
|
82
65
|
# EXTENSIONS = [
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
# 'crawlo.extension.health_check.HealthCheckExtension', # 健康检查
|
|
66
|
+
# 'crawlo.extension.log_interval.LogIntervalExtension',
|
|
67
|
+
# 'crawlo.extension.log_stats.LogStats',
|
|
68
|
+
# 'crawlo.extension.logging_extension.CustomLoggerExtension',
|
|
87
69
|
# ]
|
|
88
70
|
|
|
89
|
-
# ============================== 域名过滤配置 ==============================
|
|
90
|
-
# OffsiteMiddleware 配置,用于限制爬虫只爬取指定域名的页面
|
|
91
|
-
# 如需启用域名过滤功能,请取消注释并配置允许的域名列表
|
|
92
|
-
# ALLOWED_DOMAINS = ['example.com', 'www.example.com']
|
|
93
|
-
|
|
94
71
|
# ============================== 日志配置 ==============================
|
|
95
|
-
|
|
96
72
|
LOG_LEVEL = 'INFO'
|
|
73
|
+
LOG_FILE = 'logs/{{project_name}}.log'
|
|
74
|
+
LOG_ENCODING = 'utf-8' # 明确指定日志文件编码
|
|
97
75
|
STATS_DUMP = True
|
|
98
|
-
LOG_FILE = f'logs/{{project_name}}.log'
|
|
99
|
-
LOG_FORMAT = '%(asctime)s - [%(name)s] - %(levelname)s: %(message)s'
|
|
100
|
-
LOG_ENCODING = 'utf-8'
|
|
101
76
|
|
|
102
|
-
# ==============================
|
|
77
|
+
# ============================== 输出配置 ==============================
|
|
78
|
+
OUTPUT_DIR = 'output'
|
|
103
79
|
|
|
104
|
-
#
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
80
|
+
# ============================== Redis配置 ==============================
|
|
81
|
+
REDIS_HOST = '127.0.0.1'
|
|
82
|
+
REDIS_PORT = 6379
|
|
83
|
+
REDIS_PASSWORD = ''
|
|
84
|
+
REDIS_DB = 0
|
|
108
85
|
|
|
109
|
-
#
|
|
110
|
-
|
|
111
|
-
|
|
86
|
+
# 根据是否有密码生成 URL
|
|
87
|
+
if REDIS_PASSWORD:
|
|
88
|
+
REDIS_URL = f'redis://:{REDIS_PASSWORD}@{REDIS_HOST}:{REDIS_PORT}/{REDIS_DB}'
|
|
89
|
+
else:
|
|
90
|
+
REDIS_URL = f'redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_DB}'
|
|
91
|
+
|
|
92
|
+
# ============================== MySQL配置 ==============================
|
|
93
|
+
MYSQL_HOST = '127.0.0.1'
|
|
94
|
+
MYSQL_PORT = 3306
|
|
95
|
+
MYSQL_USER = 'root'
|
|
96
|
+
MYSQL_PASSWORD = '123456'
|
|
97
|
+
MYSQL_DB = '{{project_name}}'
|
|
98
|
+
MYSQL_TABLE = '{{project_name}}_data'
|
|
99
|
+
MYSQL_BATCH_SIZE = 100
|
|
100
|
+
MYSQL_USE_BATCH = True # 是否启用批量插入
|
|
101
|
+
|
|
102
|
+
# ============================== MongoDB配置 ==============================
|
|
103
|
+
MONGO_URI = 'mongodb://localhost:27017'
|
|
104
|
+
MONGO_DATABASE = '{{project_name}}_db'
|
|
105
|
+
MONGO_COLLECTION = '{{project_name}}_items'
|
|
106
|
+
MONGO_MAX_POOL_SIZE = 200
|
|
107
|
+
MONGO_MIN_POOL_SIZE = 20
|
|
108
|
+
MONGO_BATCH_SIZE = 100 # 批量插入条数
|
|
109
|
+
MONGO_USE_BATCH = True # 是否启用批量插入
|
|
110
|
+
|
|
111
|
+
# ============================== 代理配置 ==============================
|
|
112
|
+
# 代理功能默认不启用,如需使用请在项目配置文件中启用并配置相关参数
|
|
113
|
+
PROXY_ENABLED = False # 是否启用代理
|
|
114
|
+
|
|
115
|
+
# 简化版代理配置(适用于SimpleProxyMiddleware)
|
|
116
|
+
PROXY_LIST = [] # 代理列表,例如: ["http://proxy1:8080", "http://proxy2:8080"]
|
|
117
|
+
|
|
118
|
+
# 高级代理配置(适用于ProxyMiddleware)
|
|
119
|
+
PROXY_API_URL = "" # 代理获取接口(请替换为真实地址)
|
|
120
|
+
|
|
121
|
+
# 代理提取方式(支持字段路径或函数)
|
|
122
|
+
# 示例: "proxy" 适用于 {"proxy": "http://1.1.1.1:8080"}
|
|
123
|
+
# 示例: "data.proxy" 适用于 {"data": {"proxy": "http://1.1.1.1:8080"}}
|
|
124
|
+
PROXY_EXTRACTOR = "proxy"
|
|
125
|
+
|
|
126
|
+
# 代理刷新控制
|
|
127
|
+
PROXY_REFRESH_INTERVAL = 60 # 代理刷新间隔(秒)
|
|
128
|
+
PROXY_API_TIMEOUT = 10 # 请求代理 API 超时时间
|
|
129
|
+
|
|
130
|
+
# ============================== Curl-Cffi 特有配置 ==============================
|
|
131
|
+
# 浏览器指纹模拟(仅 CurlCffi 下载器有效)
|
|
132
|
+
CURL_BROWSER_TYPE = "chrome" # 可选: chrome, edge, safari, firefox 或版本如 chrome136
|
|
133
|
+
|
|
134
|
+
# 自定义浏览器版本映射(可覆盖默认行为)
|
|
135
|
+
CURL_BROWSER_VERSION_MAP = {
|
|
136
|
+
"chrome": "chrome136",
|
|
137
|
+
"edge": "edge101",
|
|
138
|
+
"safari": "safari184",
|
|
139
|
+
"firefox": "firefox135",
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
# ============================== 下载器优化配置 ==============================
|
|
143
|
+
# 下载器健康检查
|
|
144
|
+
DOWNLOADER_HEALTH_CHECK = True # 是否启用下载器健康检查
|
|
145
|
+
HEALTH_CHECK_INTERVAL = 30 # 健康检查间隔(秒)
|
|
112
146
|
|
|
113
147
|
# 请求统计配置
|
|
114
|
-
REQUEST_STATS_ENABLED = True
|
|
115
|
-
STATS_RESET_ON_START = False
|
|
148
|
+
REQUEST_STATS_ENABLED = True # 是否启用请求统计
|
|
149
|
+
STATS_RESET_ON_START = False # 启动时是否重置统计
|
|
116
150
|
|
|
117
151
|
# HttpX 下载器专用配置
|
|
118
|
-
HTTPX_HTTP2 = True
|
|
119
|
-
HTTPX_FOLLOW_REDIRECTS = True
|
|
152
|
+
HTTPX_HTTP2 = True # 是否启用HTTP/2支持
|
|
153
|
+
HTTPX_FOLLOW_REDIRECTS = True # 是否自动跟随重定向
|
|
120
154
|
|
|
121
155
|
# AioHttp 下载器专用配置
|
|
122
|
-
AIOHTTP_AUTO_DECOMPRESS = True
|
|
123
|
-
AIOHTTP_FORCE_CLOSE = False
|
|
156
|
+
AIOHTTP_AUTO_DECOMPRESS = True # 是否自动解压响应
|
|
157
|
+
AIOHTTP_FORCE_CLOSE = False # 是否强制关闭连接
|
|
124
158
|
|
|
125
159
|
# 通用优化配置
|
|
126
|
-
CONNECTION_TTL_DNS_CACHE = 300
|
|
127
|
-
CONNECTION_KEEPALIVE_TIMEOUT = 15
|
|
128
|
-
|
|
129
|
-
#
|
|
130
|
-
|
|
131
|
-
|
|
160
|
+
CONNECTION_TTL_DNS_CACHE = 300 # DNS缓存TTL(秒)
|
|
161
|
+
CONNECTION_KEEPALIVE_TIMEOUT = 15 # Keep-Alive超时(秒)
|
|
162
|
+
|
|
163
|
+
# ============================== 内存监控配置 ==============================
|
|
164
|
+
# 内存监控扩展默认不启用,如需使用请在项目配置文件中启用
|
|
165
|
+
MEMORY_MONITOR_ENABLED = False # 是否启用内存监控
|
|
166
|
+
MEMORY_MONITOR_INTERVAL = 60 # 内存监控检查间隔(秒)
|
|
167
|
+
MEMORY_WARNING_THRESHOLD = 80.0 # 内存使用率警告阈值(百分比)
|
|
168
|
+
MEMORY_CRITICAL_THRESHOLD = 90.0 # 内存使用率严重阈值(百分比)
|
|
@@ -1,35 +1,66 @@
|
|
|
1
1
|
# -*- coding: UTF-8 -*-
|
|
2
2
|
"""
|
|
3
|
-
{{project_name}}
|
|
3
|
+
{{project_name}} 项目配置文件(最小版)
|
|
4
4
|
=============================
|
|
5
|
-
|
|
5
|
+
基于 Crawlo 框架的最小爬虫项目配置。
|
|
6
|
+
仅包含最基本和常用的配置项。
|
|
6
7
|
"""
|
|
7
8
|
|
|
8
9
|
# ============================== 项目基本信息 ==============================
|
|
9
10
|
PROJECT_NAME = '{{project_name}}'
|
|
10
11
|
|
|
11
|
-
# ==============================
|
|
12
|
-
|
|
13
|
-
CONCURRENCY = 4
|
|
12
|
+
# ============================== 运行模式 ==============================
|
|
13
|
+
RUN_MODE = 'standalone'
|
|
14
14
|
|
|
15
|
-
#
|
|
15
|
+
# ============================== 并发配置 ==============================
|
|
16
|
+
CONCURRENCY = 4
|
|
17
|
+
MAX_RUNNING_SPIDERS = 1
|
|
16
18
|
DOWNLOAD_DELAY = 1.0
|
|
17
19
|
|
|
18
|
-
# ==============================
|
|
19
|
-
|
|
20
|
+
# ============================== 下载器配置 ==============================
|
|
21
|
+
DOWNLOADER = 'crawlo.downloader.aiohttp_downloader.AioHttpDownloader'
|
|
22
|
+
|
|
23
|
+
# ============================== 队列配置 ==============================
|
|
24
|
+
QUEUE_TYPE = 'memory'
|
|
25
|
+
|
|
26
|
+
# ============================== 去重过滤器 ==============================
|
|
27
|
+
FILTER_CLASS = 'crawlo.filters.memory_filter.MemoryFilter'
|
|
28
|
+
|
|
29
|
+
# ============================== 默认去重管道 ==============================
|
|
30
|
+
DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.memory_dedup_pipeline.MemoryDedupPipeline'
|
|
31
|
+
|
|
32
|
+
# ============================== 爬虫模块配置 ==============================
|
|
33
|
+
SPIDER_MODULES = ['{{project_name}}.spiders']
|
|
34
|
+
|
|
35
|
+
# ============================== 默认请求头配置 ==============================
|
|
36
|
+
# 为DefaultHeaderMiddleware配置默认请求头
|
|
37
|
+
DEFAULT_REQUEST_HEADERS = {
|
|
38
|
+
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
|
|
39
|
+
'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',
|
|
40
|
+
'Accept-Encoding': 'gzip, deflate, br',
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
# ============================== 允许的域名 ==============================
|
|
44
|
+
# 为OffsiteMiddleware配置允许的域名
|
|
45
|
+
# ALLOWED_DOMAINS = ['example.com']
|
|
46
|
+
|
|
47
|
+
# ============================== 数据管道 ==============================
|
|
20
48
|
PIPELINES = [
|
|
21
49
|
'crawlo.pipelines.json_pipeline.JsonPipeline',
|
|
22
50
|
]
|
|
23
51
|
|
|
24
|
-
# ==============================
|
|
25
|
-
#
|
|
26
|
-
#
|
|
27
|
-
#
|
|
52
|
+
# ============================== 扩展组件 ==============================
|
|
53
|
+
# EXTENSIONS = [
|
|
54
|
+
# 'crawlo.extension.log_interval.LogIntervalExtension',
|
|
55
|
+
# 'crawlo.extension.log_stats.LogStats',
|
|
56
|
+
# 'crawlo.extension.logging_extension.CustomLoggerExtension',
|
|
57
|
+
# ]
|
|
28
58
|
|
|
29
59
|
# ============================== 日志配置 ==============================
|
|
30
60
|
LOG_LEVEL = 'INFO'
|
|
31
|
-
LOG_FILE =
|
|
61
|
+
LOG_FILE = 'logs/{{project_name}}.log'
|
|
62
|
+
LOG_ENCODING = 'utf-8' # 明确指定日志文件编码
|
|
32
63
|
STATS_DUMP = True
|
|
33
64
|
|
|
34
|
-
# ==============================
|
|
35
|
-
|
|
65
|
+
# ============================== 输出配置 ==============================
|
|
66
|
+
OUTPUT_DIR = 'output'
|