crawlo 1.1.4__py3-none-any.whl → 1.1.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (190) hide show
  1. crawlo/__init__.py +61 -34
  2. crawlo/__version__.py +1 -1
  3. crawlo/cleaners/__init__.py +61 -0
  4. crawlo/cleaners/data_formatter.py +226 -0
  5. crawlo/cleaners/encoding_converter.py +126 -0
  6. crawlo/cleaners/text_cleaner.py +233 -0
  7. crawlo/cli.py +40 -40
  8. crawlo/commands/__init__.py +13 -13
  9. crawlo/commands/check.py +594 -594
  10. crawlo/commands/genspider.py +151 -151
  11. crawlo/commands/list.py +155 -155
  12. crawlo/commands/run.py +292 -285
  13. crawlo/commands/startproject.py +419 -196
  14. crawlo/commands/stats.py +188 -188
  15. crawlo/commands/utils.py +186 -186
  16. crawlo/config.py +312 -279
  17. crawlo/config_validator.py +253 -0
  18. crawlo/core/__init__.py +2 -2
  19. crawlo/core/engine.py +346 -172
  20. crawlo/core/processor.py +40 -40
  21. crawlo/core/scheduler.py +137 -166
  22. crawlo/crawler.py +1027 -1027
  23. crawlo/downloader/__init__.py +266 -242
  24. crawlo/downloader/aiohttp_downloader.py +220 -212
  25. crawlo/downloader/cffi_downloader.py +256 -251
  26. crawlo/downloader/httpx_downloader.py +259 -259
  27. crawlo/downloader/hybrid_downloader.py +214 -0
  28. crawlo/downloader/playwright_downloader.py +403 -0
  29. crawlo/downloader/selenium_downloader.py +473 -0
  30. crawlo/event.py +11 -11
  31. crawlo/exceptions.py +81 -81
  32. crawlo/extension/__init__.py +37 -37
  33. crawlo/extension/health_check.py +141 -141
  34. crawlo/extension/log_interval.py +57 -57
  35. crawlo/extension/log_stats.py +81 -81
  36. crawlo/extension/logging_extension.py +43 -43
  37. crawlo/extension/memory_monitor.py +104 -88
  38. crawlo/extension/performance_profiler.py +133 -117
  39. crawlo/extension/request_recorder.py +107 -107
  40. crawlo/filters/__init__.py +154 -154
  41. crawlo/filters/aioredis_filter.py +281 -242
  42. crawlo/filters/memory_filter.py +269 -269
  43. crawlo/items/__init__.py +23 -23
  44. crawlo/items/base.py +21 -21
  45. crawlo/items/fields.py +53 -53
  46. crawlo/items/items.py +104 -104
  47. crawlo/middleware/__init__.py +21 -21
  48. crawlo/middleware/default_header.py +32 -32
  49. crawlo/middleware/download_delay.py +28 -28
  50. crawlo/middleware/middleware_manager.py +135 -135
  51. crawlo/middleware/proxy.py +272 -248
  52. crawlo/middleware/request_ignore.py +30 -30
  53. crawlo/middleware/response_code.py +18 -18
  54. crawlo/middleware/response_filter.py +26 -26
  55. crawlo/middleware/retry.py +124 -124
  56. crawlo/mode_manager.py +212 -201
  57. crawlo/network/__init__.py +21 -21
  58. crawlo/network/request.py +338 -311
  59. crawlo/network/response.py +360 -271
  60. crawlo/pipelines/__init__.py +21 -21
  61. crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
  62. crawlo/pipelines/console_pipeline.py +39 -39
  63. crawlo/pipelines/csv_pipeline.py +316 -316
  64. crawlo/pipelines/database_dedup_pipeline.py +224 -224
  65. crawlo/pipelines/json_pipeline.py +218 -218
  66. crawlo/pipelines/memory_dedup_pipeline.py +115 -115
  67. crawlo/pipelines/mongo_pipeline.py +131 -131
  68. crawlo/pipelines/mysql_pipeline.py +316 -316
  69. crawlo/pipelines/pipeline_manager.py +61 -56
  70. crawlo/pipelines/redis_dedup_pipeline.py +167 -162
  71. crawlo/project.py +188 -153
  72. crawlo/queue/pqueue.py +37 -37
  73. crawlo/queue/queue_manager.py +334 -307
  74. crawlo/queue/redis_priority_queue.py +299 -209
  75. crawlo/settings/__init__.py +7 -7
  76. crawlo/settings/default_settings.py +219 -278
  77. crawlo/settings/setting_manager.py +123 -100
  78. crawlo/spider/__init__.py +639 -639
  79. crawlo/stats_collector.py +59 -59
  80. crawlo/subscriber.py +130 -130
  81. crawlo/task_manager.py +30 -30
  82. crawlo/templates/crawlo.cfg.tmpl +10 -10
  83. crawlo/templates/project/__init__.py.tmpl +3 -3
  84. crawlo/templates/project/items.py.tmpl +17 -17
  85. crawlo/templates/project/middlewares.py.tmpl +110 -110
  86. crawlo/templates/project/pipelines.py.tmpl +97 -97
  87. crawlo/templates/project/run.py.tmpl +251 -251
  88. crawlo/templates/project/settings.py.tmpl +326 -279
  89. crawlo/templates/project/settings_distributed.py.tmpl +120 -0
  90. crawlo/templates/project/settings_gentle.py.tmpl +95 -0
  91. crawlo/templates/project/settings_high_performance.py.tmpl +152 -0
  92. crawlo/templates/project/settings_simple.py.tmpl +69 -0
  93. crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
  94. crawlo/templates/spider/spider.py.tmpl +141 -141
  95. crawlo/tools/__init__.py +183 -0
  96. crawlo/tools/anti_crawler.py +269 -0
  97. crawlo/tools/authenticated_proxy.py +241 -0
  98. crawlo/tools/data_validator.py +181 -0
  99. crawlo/tools/date_tools.py +36 -0
  100. crawlo/tools/distributed_coordinator.py +387 -0
  101. crawlo/tools/retry_mechanism.py +221 -0
  102. crawlo/tools/scenario_adapter.py +263 -0
  103. crawlo/utils/__init__.py +35 -7
  104. crawlo/utils/batch_processor.py +261 -0
  105. crawlo/utils/controlled_spider_mixin.py +439 -439
  106. crawlo/utils/date_tools.py +290 -233
  107. crawlo/utils/db_helper.py +343 -343
  108. crawlo/utils/enhanced_error_handler.py +360 -0
  109. crawlo/utils/env_config.py +106 -0
  110. crawlo/utils/error_handler.py +126 -0
  111. crawlo/utils/func_tools.py +82 -82
  112. crawlo/utils/large_scale_config.py +286 -286
  113. crawlo/utils/large_scale_helper.py +343 -343
  114. crawlo/utils/log.py +128 -128
  115. crawlo/utils/performance_monitor.py +285 -0
  116. crawlo/utils/queue_helper.py +175 -175
  117. crawlo/utils/redis_connection_pool.py +335 -0
  118. crawlo/utils/redis_key_validator.py +200 -0
  119. crawlo/utils/request.py +267 -267
  120. crawlo/utils/request_serializer.py +219 -219
  121. crawlo/utils/spider_loader.py +62 -62
  122. crawlo/utils/system.py +11 -11
  123. crawlo/utils/tools.py +4 -4
  124. crawlo/utils/url.py +39 -39
  125. {crawlo-1.1.4.dist-info → crawlo-1.1.6.dist-info}/METADATA +401 -403
  126. crawlo-1.1.6.dist-info/RECORD +189 -0
  127. examples/__init__.py +7 -7
  128. tests/DOUBLE_CRAWLO_PREFIX_FIX_REPORT.md +82 -0
  129. tests/__init__.py +7 -7
  130. tests/advanced_tools_example.py +276 -0
  131. tests/authenticated_proxy_example.py +237 -0
  132. tests/cleaners_example.py +161 -0
  133. tests/config_validation_demo.py +103 -0
  134. {examples → tests}/controlled_spider_example.py +205 -205
  135. tests/date_tools_example.py +181 -0
  136. tests/dynamic_loading_example.py +524 -0
  137. tests/dynamic_loading_test.py +105 -0
  138. tests/env_config_example.py +134 -0
  139. tests/error_handling_example.py +172 -0
  140. tests/redis_key_validation_demo.py +131 -0
  141. tests/response_improvements_example.py +145 -0
  142. tests/test_advanced_tools.py +149 -0
  143. tests/test_all_redis_key_configs.py +146 -0
  144. tests/test_authenticated_proxy.py +142 -0
  145. tests/test_cleaners.py +55 -0
  146. tests/test_comprehensive.py +147 -0
  147. tests/test_config_validator.py +194 -0
  148. tests/test_date_tools.py +124 -0
  149. tests/test_double_crawlo_fix.py +208 -0
  150. tests/test_double_crawlo_fix_simple.py +125 -0
  151. tests/test_dynamic_downloaders_proxy.py +125 -0
  152. tests/test_dynamic_proxy.py +93 -0
  153. tests/test_dynamic_proxy_config.py +147 -0
  154. tests/test_dynamic_proxy_real.py +110 -0
  155. tests/test_edge_cases.py +304 -0
  156. tests/test_enhanced_error_handler.py +271 -0
  157. tests/test_env_config.py +122 -0
  158. tests/test_error_handler_compatibility.py +113 -0
  159. tests/test_final_validation.py +153 -153
  160. tests/test_framework_env_usage.py +104 -0
  161. tests/test_integration.py +357 -0
  162. tests/test_item_dedup_redis_key.py +123 -0
  163. tests/test_parsel.py +30 -0
  164. tests/test_performance.py +328 -0
  165. tests/test_proxy_health_check.py +32 -32
  166. tests/test_proxy_middleware_integration.py +136 -136
  167. tests/test_proxy_providers.py +56 -56
  168. tests/test_proxy_stats.py +19 -19
  169. tests/test_proxy_strategies.py +59 -59
  170. tests/test_queue_manager_double_crawlo.py +231 -0
  171. tests/test_queue_manager_redis_key.py +177 -0
  172. tests/test_redis_config.py +28 -28
  173. tests/test_redis_connection_pool.py +295 -0
  174. tests/test_redis_key_naming.py +182 -0
  175. tests/test_redis_key_validator.py +124 -0
  176. tests/test_redis_queue.py +224 -224
  177. tests/test_request_serialization.py +70 -70
  178. tests/test_response_improvements.py +153 -0
  179. tests/test_scheduler.py +241 -241
  180. tests/test_simple_response.py +62 -0
  181. tests/test_telecom_spider_redis_key.py +206 -0
  182. tests/test_template_content.py +88 -0
  183. tests/test_template_redis_key.py +135 -0
  184. tests/test_tools.py +154 -0
  185. tests/tools_example.py +258 -0
  186. crawlo/core/enhanced_engine.py +0 -190
  187. crawlo-1.1.4.dist-info/RECORD +0 -117
  188. {crawlo-1.1.4.dist-info → crawlo-1.1.6.dist-info}/WHEEL +0 -0
  189. {crawlo-1.1.4.dist-info → crawlo-1.1.6.dist-info}/entry_points.txt +0 -0
  190. {crawlo-1.1.4.dist-info → crawlo-1.1.6.dist-info}/top_level.txt +0 -0
@@ -1,279 +1,220 @@
1
- #!/usr/bin/python
2
- # -*- coding: UTF-8 -*-
3
- """
4
- ==================================
5
- Crawlo 项目配置文件
6
- ==================================
7
- 说明:
8
- - 所有配置项均已按功能模块分类。
9
- - 支持通过环境变量覆盖部分敏感配置(如 Redis、MySQL 密码等)。
10
- - 可根据需求启用/禁用组件(如 MySQL、Redis、Proxy 等)。
11
- """
12
- import os
13
-
14
- # ============================== 核心信息 ==============================
15
- PROJECT_NAME = 'crawlo'
16
-
17
- # ============================== 网络请求配置 ==============================
18
-
19
- # 下载器选择(支持三种方式)
20
- # 方式1: 直接指定类路径
21
- DOWNLOADER = "crawlo.downloader.aiohttp_downloader.AioHttpDownloader"
22
- # DOWNLOADER = "crawlo.downloader.cffi_downloader.CurlCffiDownloader" # 支持浏览器指纹
23
- # DOWNLOADER = "crawlo.downloader.httpx_downloader.HttpXDownloader" # 支持HTTP/2
24
-
25
- # 方式2: 使用简化名称(推荐)
26
- # DOWNLOADER_TYPE = 'aiohttp' # 可选: aiohttp, httpx, curl_cffi, cffi
27
-
28
- # 方式3: 在Spider中动态选择
29
- # 可以在Spider类中设置 custom_settings = {'DOWNLOADER_TYPE': 'httpx'}
30
-
31
- # 请求超时与安全
32
- DOWNLOAD_TIMEOUT = 30 # 下载超时时间(秒)
33
- VERIFY_SSL = True # 是否验证 SSL 证书
34
- USE_SESSION = True # 是否使用持久化会话(aiohttp 特有)
35
-
36
- # 请求延迟控制
37
- DOWNLOAD_DELAY = 1.0 # 基础延迟(秒)
38
- RANDOM_RANGE = (0.8, 1.2) # 随机延迟系数范围
39
- RANDOMNESS = True # 是否启用随机延迟
40
-
41
- # 重试策略
42
- MAX_RETRY_TIMES = 3 # 最大重试次数
43
- RETRY_PRIORITY = -1 # 重试请求的优先级调整
44
- RETRY_HTTP_CODES = [408, 429, 500, 502, 503, 504, 522, 524] # 触发重试的状态码
45
- IGNORE_HTTP_CODES = [403, 404] # 直接标记成功、不重试的状态码
46
- ALLOWED_CODES = [] # 允许的状态码(空表示不限制)
47
-
48
- # 连接与响应大小限制
49
- CONNECTION_POOL_LIMIT = 50 # 最大并发连接数(连接池大小)
50
- CONNECTION_POOL_LIMIT_PER_HOST = 20 # 每个主机的连接池大小
51
- DOWNLOAD_MAXSIZE = 10 * 1024 * 1024 # 最大响应体大小(10MB)
52
- DOWNLOAD_WARN_SIZE = 1024 * 1024 # 响应体警告阈值(1MB)
53
- DOWNLOAD_RETRY_TIMES = MAX_RETRY_TIMES # 下载器内部重试次数(复用全局)
54
-
55
- # 下载统计配置
56
- DOWNLOADER_STATS = True # 是否启用下载器统计功能
57
- DOWNLOAD_STATS = True # 是否记录下载时间和大小统计
58
-
59
- # ============================== 并发与调度 ==============================
60
-
61
- CONCURRENCY = 8 # 单个爬虫的并发请求数
62
- INTERVAL = 5 # 日志统计输出间隔(秒)
63
- DEPTH_PRIORITY = 1 # 深度优先策略优先级
64
- MAX_RUNNING_SPIDERS = 3 # 最大同时运行的爬虫数
65
-
66
- # ============================== 队列配置 ==============================
67
-
68
- # 🎯 运行模式选择:'standalone'(单机), 'distributed'(分布式), 'auto'(自动检测)
69
- RUN_MODE = 'standalone' # 默认单机模式,简单易用
70
-
71
- # 队列类型选择:'memory'(内存), 'redis'(分布式), 'auto'(自动选择)
72
- QUEUE_TYPE = 'memory' # 默认内存队列,无需外部依赖
73
- SCHEDULER_MAX_QUEUE_SIZE = 2000 # 调度器队列最大容量
74
- SCHEDULER_QUEUE_NAME = 'crawlo:requests' # Redis 队列名称
75
- QUEUE_MAX_RETRIES = 3 # 队列操作最大重试次数
76
- QUEUE_TIMEOUT = 300 # 队列操作超时时间(秒)
77
-
78
- # 大规模爬取优化配置
79
- LARGE_SCALE_BATCH_SIZE = 1000 # 批处理大小
80
- LARGE_SCALE_CHECKPOINT_INTERVAL = 5000 # 进度保存间隔
81
- LARGE_SCALE_MAX_MEMORY_USAGE = 500 # 最大内存使用量(MB)
82
-
83
- # ============================== 数据存储配置 ==============================
84
-
85
- # --- MySQL 配置 ---
86
- MYSQL_HOST = '127.0.0.1'
87
- MYSQL_PORT = 3306
88
- MYSQL_USER = 'root'
89
- MYSQL_PASSWORD = '123456'
90
- MYSQL_DB = 'crawl'
91
- MYSQL_TABLE = 'crawlo'
92
- MYSQL_BATCH_SIZE = 100 # 批量插入条数
93
- MYSQL_USE_BATCH = False # 是否启用批量插入
94
-
95
- # MySQL 连接池
96
- MYSQL_FLUSH_INTERVAL = 5 # 缓存刷新间隔(秒)
97
- MYSQL_POOL_MIN = 5
98
- MYSQL_POOL_MAX = 20
99
- MYSQL_ECHO = False # 是否打印 SQL 日志
100
-
101
- # --- MongoDB 配置 ---
102
- MONGO_URI = 'mongodb://user:password@host:27017'
103
- MONGO_DATABASE = 'scrapy_data'
104
- MONGO_COLLECTION = 'crawled_items'
105
- MONGO_MAX_POOL_SIZE = 200
106
- MONGO_MIN_POOL_SIZE = 20
107
- MONGO_BATCH_SIZE = 100 # 批量插入条数
108
- MONGO_USE_BATCH = False # 是否启用批量插入
109
-
110
- # ============================== 去重过滤配置 ==============================
111
-
112
- # 请求指纹存储目录(文件过滤器使用)
113
- REQUEST_DIR = '.'
114
-
115
- # 根据运行模式自动选择去重管道
116
- # 单机模式默认使用内存去重管道
117
- # 分布式模式默认使用Redis去重管道
118
- if RUN_MODE == 'distributed':
119
- # 分布式模式下默认使用Redis去重管道
120
- DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.RedisDedupPipeline'
121
- else:
122
- # 单机模式下默认使用内存去重管道
123
- DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.MemoryDedupPipeline'
124
-
125
- # 去重过滤器类(二选一)
126
- FILTER_CLASS = 'crawlo.filters.memory_filter.MemoryFilter'
127
- # FILTER_CLASS = 'crawlo.filters.aioredis_filter.AioRedisFilter' # 分布式去重
128
-
129
- # --- Redis 过滤器配置 ---
130
- REDIS_HOST = os.getenv('REDIS_HOST', '127.0.0.1')
131
- REDIS_PORT = int(os.getenv('REDIS_PORT', 6379))
132
- REDIS_PASSWORD = os.getenv('REDIS_PASSWORD', '') # 默认无密码
133
- REDIS_DB = int(os.getenv('REDIS_DB', 0)) # Redis 数据库编号,默认为 0
134
- # 🔧 根据是否有密码生成不同的 URL 格式
135
- if REDIS_PASSWORD:
136
- REDIS_URL = f'redis://:{REDIS_PASSWORD}@{REDIS_HOST}:{REDIS_PORT}/{REDIS_DB}'
137
- else:
138
- REDIS_URL = f'redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_DB}'
139
- REDIS_KEY = 'request_fingerprint' # Redis 中存储指纹的键名
140
- REDIS_TTL = 0 # 指纹过期时间(0 表示永不过期)
141
- CLEANUP_FP = 0 # 程序结束时是否清理指纹(0=不清理)
142
- FILTER_DEBUG = True # 是否开启去重调试日志
143
- DECODE_RESPONSES = True # Redis 返回是否解码为字符串
144
-
145
- # ============================== 中间件配置 ==============================
146
-
147
- MIDDLEWARES = [
148
- # === 请求预处理阶段 ===
149
- 'crawlo.middleware.request_ignore.RequestIgnoreMiddleware', # 1. 忽略无效请求
150
- 'crawlo.middleware.download_delay.DownloadDelayMiddleware', # 2. 控制请求频率
151
- 'crawlo.middleware.default_header.DefaultHeaderMiddleware', # 3. 添加默认请求头
152
- 'crawlo.middleware.proxy.ProxyMiddleware', # 4. 设置代理
153
-
154
- # === 响应处理阶段 ===
155
- 'crawlo.middleware.retry.RetryMiddleware', # 5. 失败请求重试
156
- 'crawlo.middleware.response_code.ResponseCodeMiddleware', # 6. 处理特殊状态码
157
- 'crawlo.middleware.response_filter.ResponseFilterMiddleware', # 7. 响应内容过滤
158
- ]
159
-
160
- # ============================== 扩展与管道 ==============================
161
-
162
- # 数据处理管道(启用的存储方式)
163
- PIPELINES = [
164
- 'crawlo.pipelines.console_pipeline.ConsolePipeline', # 控制台输出
165
- # 'crawlo.pipelines.mysql_pipeline.AsyncmyMySQLPipeline', # MySQL 存储(可选)
166
- ]
167
-
168
- # 根据运行模式自动配置默认去重管道
169
- if RUN_MODE == 'distributed':
170
- # 分布式模式下添加Redis去重管道
171
- PIPELINES.insert(0, DEFAULT_DEDUP_PIPELINE)
172
- else:
173
- # 单机模式下添加内存去重管道
174
- PIPELINES.insert(0, DEFAULT_DEDUP_PIPELINE)
175
-
176
- # 扩展组件(监控与日志)
177
- EXTENSIONS = [
178
- 'crawlo.extension.log_interval.LogIntervalExtension', # 定时日志
179
- 'crawlo.extension.log_stats.LogStats', # 统计信息
180
- 'crawlo.extension.logging_extension.CustomLoggerExtension', # 自定义日志
181
- 'crawlo.extension.memory_monitor.MemoryMonitorExtension', # 内存监控
182
- # 'crawlo.extension.request_recorder.RequestRecorderExtension', # 请求记录
183
- # 'crawlo.extension.performance_profiler.PerformanceProfilerExtension', # 性能分析
184
- # 'crawlo.extension.health_check.HealthCheckExtension', # 健康检查
185
- ]
186
-
187
- # ============================== 日志与监控 ==============================
188
-
189
- LOG_LEVEL = 'INFO' # 日志级别: DEBUG/INFO/WARNING/ERROR
190
- STATS_DUMP = True # 是否周期性输出统计信息
191
-
192
- # ============================== 扩展配置 ==============================
193
-
194
- # 内存监控扩展配置
195
- MEMORY_MONITOR_ENABLED = False # 是否启用内存监控
196
- MEMORY_MONITOR_INTERVAL = 60 # 内存检查间隔(秒)
197
- MEMORY_WARNING_THRESHOLD = 80.0 # 内存使用警告阈值(百分比)
198
- MEMORY_CRITICAL_THRESHOLD = 90.0 # 内存使用严重阈值(百分比)
199
-
200
- # 请求记录扩展配置
201
- REQUEST_RECORDER_ENABLED = False # 是否启用请求记录
202
- REQUEST_RECORDER_OUTPUT_DIR = 'requests_log' # 请求记录输出目录
203
- REQUEST_RECORDER_MAX_FILE_SIZE = 10 * 1024 * 1024 # 单个记录文件最大大小(字节)
204
-
205
- # 性能分析扩展配置
206
- PERFORMANCE_PROFILER_ENABLED = False # 是否启用性能分析
207
- PERFORMANCE_PROFILER_OUTPUT_DIR = 'profiling' # 性能分析输出目录
208
- PERFORMANCE_PROFILER_INTERVAL = 300 # 定期保存分析结果间隔(秒)
209
-
210
- # 健康检查扩展配置
211
- HEALTH_CHECK_ENABLED = True # 是否启用健康检查
212
- HEALTH_CHECK_INTERVAL = 60 # 健康检查间隔(秒)
213
-
214
- # ============================== 日志与监控 ==============================
215
-
216
- LOG_LEVEL = 'INFO' # 日志级别: DEBUG/INFO/WARNING/ERROR
217
- STATS_DUMP = True # 是否周期性输出统计信息
218
- LOG_FILE = f'logs/{PROJECT_NAME}.log' # 日志文件路径
219
- LOG_FORMAT = '%(asctime)s - [%(name)s] - %(levelname)s: %(message)s'
220
- LOG_ENCODING = 'utf-8'
221
-
222
- # ============================== 代理配置 ==============================
223
-
224
- PROXY_ENABLED = False # 是否启用代理
225
- PROXY_API_URL = "https://api.proxyprovider.com/get" # 代理获取接口(请替换为真实地址)
226
-
227
- # 代理提取方式(支持字段路径或函数)
228
- PROXY_EXTRACTOR = "proxy" # 如返回 {"proxy": "http://1.1.1.1:8080"}
229
-
230
- # 代理刷新控制
231
- PROXY_REFRESH_INTERVAL = 60 # 代理刷新间隔(秒)
232
- PROXY_API_TIMEOUT = 10 # 请求代理 API 超时时间
233
-
234
- # ============================== Curl-Cffi 特有配置 ==============================
235
-
236
- # 浏览器指纹模拟(仅 CurlCffi 下载器有效)
237
- CURL_BROWSER_TYPE = "chrome" # 可选: chrome, edge, safari, firefox 或版本如 chrome136
238
-
239
- # 自定义浏览器版本映射(可覆盖默认行为)
240
- CURL_BROWSER_VERSION_MAP = {
241
- "chrome": "chrome136",
242
- "edge": "edge101",
243
- "safari": "safari184",
244
- "firefox": "firefox135",
245
- # 示例:旧版本测试
246
- # "chrome_legacy": "chrome110",
247
- }
248
-
249
- # Curl-Cffi 优化配置
250
- CURL_RANDOMIZE_DELAY = False # 是否启用随机延迟
251
- CURL_RETRY_BACKOFF = True # 是否启用指数退避重试
252
-
253
- # 默认请求头(可被 Spider 覆盖)
254
- DEFAULT_REQUEST_HEADERS = {
255
- 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 '
256
- '(KHTML, like Gecko) Chrome/124.0.0.0 Safari/537.36',
257
- }
258
-
259
- # ============================== 下载器优化配置 ==============================
260
-
261
- # 下载器健康检查
262
- DOWNLOADER_HEALTH_CHECK = True # 是否启用下载器健康检查
263
- HEALTH_CHECK_INTERVAL = 60 # 健康检查间隔(秒)
264
-
265
- # 请求统计配置
266
- REQUEST_STATS_ENABLED = True # 是否启用请求统计
267
- STATS_RESET_ON_START = False # 启动时是否重置统计
268
-
269
- # HttpX 下载器专用配置
270
- HTTPX_HTTP2 = True # 是否启用HTTP/2支持
271
- HTTPX_FOLLOW_REDIRECTS = True # 是否自动跟随重定向
272
-
273
- # AioHttp 下载器专用配置
274
- AIOHTTP_AUTO_DECOMPRESS = True # 是否自动解压响应
275
- AIOHTTP_FORCE_CLOSE = False # 是否强制关闭连接
276
-
277
- # 通用优化配置
278
- CONNECTION_TTL_DNS_CACHE = 300 # DNS缓存TTL(秒)
1
+ #!/usr/bin/python
2
+ # -*- coding:UTF-8 -*-
3
+ """
4
+ 默认配置文件
5
+ 包含 Crawlo 框架的所有默认设置项
6
+ """
7
+ import os
8
+
9
+ # 添加环境变量配置工具导入
10
+ from crawlo.utils.env_config import get_redis_config, get_runtime_config
11
+
12
+ # ============================== 项目基础配置 ==============================
13
+
14
+ # 项目名称(用于日志、Redis Key 等标识)
15
+ PROJECT_NAME = get_runtime_config()['PROJECT_NAME']
16
+
17
+ # 框架版本
18
+ VERSION = 1.0
19
+
20
+ # 运行模式:standalone/distributed/auto
21
+ RUN_MODE = get_runtime_config()['CRAWLO_MODE']
22
+
23
+ # 并发数配置
24
+ CONCURRENCY = get_runtime_config()['CONCURRENCY']
25
+
26
+ # ============================== 爬虫核心配置 ==============================
27
+
28
+ # 默认下载器
29
+ DOWNLOADER = 'crawlo.downloader.aiohttp_downloader.AioHttpDownloader'
30
+
31
+ # 请求延迟(秒)
32
+ DOWNLOAD_DELAY = 1
33
+
34
+ # 随机延迟配置
35
+ RANDOMNESS = False # 是否启用随机延迟
36
+ RANDOM_RANGE = [0.5, 1.5] # 随机延迟范围因子,实际延迟 = DOWNLOAD_DELAY * RANDOM_RANGE[0] 到 DOWNLOAD_DELAY * RANDOM_RANGE[1]
37
+
38
+ # 深度优先级(负数表示深度优先,正数表示广度优先)
39
+ DEPTH_PRIORITY = 1
40
+
41
+ # 调度器队列最大大小
42
+ SCHEDULER_MAX_QUEUE_SIZE = 1000
43
+
44
+ # 调度器队列名称(遵循统一命名规范)
45
+ SCHEDULER_QUEUE_NAME = f"crawlo:{PROJECT_NAME}:queue:requests"
46
+
47
+ # 队列类型:memory/redis/auto
48
+ QUEUE_TYPE = 'auto'
49
+
50
+ # 默认去重管道(根据运行模式自动选择)
51
+ DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.redis_dedup_pipeline.RedisDedupPipeline'
52
+
53
+ # 请求去重过滤器
54
+ FILTER_CLASS = 'crawlo.filters.memory_filter.MemoryFilter'
55
+ # FILTER_CLASS = 'crawlo.filters.aioredis_filter.AioRedisFilter' # 分布式去重
56
+
57
+ # --- Redis 过滤器配置 ---
58
+ # 使用环境变量配置工具获取 Redis 配置
59
+ redis_config = get_redis_config()
60
+ REDIS_HOST = redis_config['REDIS_HOST']
61
+ REDIS_PORT = redis_config['REDIS_PORT']
62
+ REDIS_PASSWORD = redis_config['REDIS_PASSWORD']
63
+ REDIS_DB = redis_config['REDIS_DB']
64
+
65
+ # 🔧 根据是否有密码生成不同的 URL 格式
66
+ if REDIS_PASSWORD:
67
+ REDIS_URL = f'redis://:{REDIS_PASSWORD}@{REDIS_HOST}:{REDIS_PORT}/{REDIS_DB}'
68
+ else:
69
+ REDIS_URL = f'redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_DB}'
70
+
71
+ # 统一的Redis key命名规范配置
72
+ # REDIS_KEY_PREFIX 已移至各组件中,使用统一的命名规范
73
+ # crawlo:{PROJECT_NAME}:filter:fingerprint (请求去重)
74
+ # crawlo:{PROJECT_NAME}:item:fingerprint (数据项去重)
75
+ # crawlo:{PROJECT_NAME}:queue:requests (请求队列)
76
+ # crawlo:{PROJECT_NAME}:queue:processing (处理中队列)
77
+ # crawlo:{PROJECT_NAME}:queue:failed (失败队列)
78
+
79
+ REDIS_TTL = 0 # 指纹过期时间(0 表示永不过期)
80
+ CLEANUP_FP = 0 # 程序结束时是否清理指纹(0=不清理)
81
+ FILTER_DEBUG = True # 是否开启去重调试日志
82
+ DECODE_RESPONSES = True # Redis 返回是否解码为字符串
83
+
84
+ # ============================== 中间件配置 ==============================
85
+
86
+ MIDDLEWARES = [
87
+ # === 请求预处理阶段 ===
88
+ 'crawlo.middleware.request_ignore.RequestIgnoreMiddleware', # 1. 忽略无效请求
89
+ 'crawlo.middleware.download_delay.DownloadDelayMiddleware', # 2. 控制请求频率
90
+ 'crawlo.middleware.default_header.DefaultHeaderMiddleware', # 3. 添加默认请求头
91
+ 'crawlo.middleware.proxy.ProxyMiddleware', # 4. 设置代理
92
+
93
+ # === 响应处理阶段 ===
94
+ 'crawlo.middleware.retry.RetryMiddleware', # 5. 失败请求重试
95
+ 'crawlo.middleware.response_code.ResponseCodeMiddleware', # 6. 处理特殊状态码
96
+ 'crawlo.middleware.response_filter.ResponseFilterMiddleware', # 7. 响应内容过滤
97
+ ]
98
+
99
+ # ============================== 扩展与管道 ==============================
100
+
101
+ # 数据处理管道(启用的存储方式)
102
+ PIPELINES = [
103
+ 'crawlo.pipelines.console_pipeline.ConsolePipeline', # 控制台输出
104
+ # 'crawlo.pipelines.mysql_pipeline.AsyncmyMySQLPipeline', # MySQL 存储(可选)
105
+ ]
106
+
107
+ # 根据运行模式自动配置默认去重管道
108
+ if RUN_MODE == 'distributed':
109
+ # 分布式模式下添加Redis去重管道
110
+ PIPELINES.insert(0, DEFAULT_DEDUP_PIPELINE)
111
+ else:
112
+ # 单机模式下添加内存去重管道
113
+ PIPELINES.insert(0, DEFAULT_DEDUP_PIPELINE)
114
+
115
+ # 扩展组件(监控与日志)
116
+ EXTENSIONS = [
117
+ 'crawlo.extension.log_interval.LogIntervalExtension', # 定时日志
118
+ 'crawlo.extension.log_stats.LogStats', # 统计信息
119
+ 'crawlo.extension.logging_extension.CustomLoggerExtension', # 自定义日志
120
+ # 'crawlo.extension.memory_monitor.MemoryMonitorExtension', # 内存监控
121
+ # 'crawlo.extension.request_recorder.RequestRecorderExtension', # 请求记录
122
+ # 'crawlo.extension.performance_profiler.PerformanceProfilerExtension', # 性能分析
123
+ # 'crawlo.extension.health_check.HealthCheckExtension', # 健康检查
124
+ ]
125
+
126
+ # ============================== 日志与监控 ==============================
127
+
128
+ LOG_LEVEL = 'INFO' # 日志级别: DEBUG/INFO/WARNING/ERROR
129
+ STATS_DUMP = True # 是否周期性输出统计信息
130
+ LOG_FILE = f'logs/{PROJECT_NAME}.log' # 日志文件路径
131
+ LOG_FORMAT = '%(asctime)s - [%(name)s] - %(levelname)s: %(message)s'
132
+ LOG_ENCODING = 'utf-8'
133
+
134
+ # ============================== 代理配置 ==============================
135
+
136
+ PROXY_ENABLED = False # 是否启用代理
137
+ PROXY_API_URL = "https://api.proxyprovider.com/get" # 代理获取接口(请替换为真实地址)
138
+
139
+ # 代理提取方式(支持字段路径或函数)
140
+ PROXY_EXTRACTOR = "proxy" # 如返回 {"proxy": "http://1.1.1.1:8080"}
141
+
142
+ # 代理刷新控制
143
+ PROXY_REFRESH_INTERVAL = 60 # 代理刷新间隔(秒)
144
+ PROXY_API_TIMEOUT = 10 # 请求代理 API 超时时间
145
+
146
+ # ============================== Curl-Cffi 特有配置 ==============================
147
+
148
+ # 浏览器指纹模拟(仅 CurlCffi 下载器有效)
149
+ CURL_BROWSER_TYPE = "chrome" # 可选: chrome, edge, safari, firefox 或版本如 chrome136
150
+
151
+ # 自定义浏览器版本映射(可覆盖默认行为)
152
+ CURL_BROWSER_VERSION_MAP = {
153
+ "chrome": "chrome136",
154
+ "edge": "edge101",
155
+ "safari": "safari184",
156
+ "firefox": "firefox135",
157
+ # 示例:旧版本测试
158
+ # "chrome_legacy": "chrome110",
159
+ }
160
+
161
+ # Curl-Cffi 优化配置
162
+ CURL_RANDOMIZE_DELAY = False # 是否启用随机延迟
163
+ CURL_RETRY_BACKOFF = True # 是否启用指数退避重试
164
+
165
+ # 默认请求头(可被 Spider 覆盖)
166
+ DEFAULT_REQUEST_HEADERS = {
167
+ 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 '
168
+ '(KHTML, like Gecko) Chrome/124.0.0.0 Safari/537.36',
169
+ }
170
+
171
+ # ============================== 下载器优化配置 ==============================
172
+
173
+ # 下载器健康检查
174
+ DOWNLOADER_HEALTH_CHECK = True # 是否启用下载器健康检查
175
+ HEALTH_CHECK_INTERVAL = 60 # 健康检查间隔(秒)
176
+
177
+ # 请求统计配置
178
+ REQUEST_STATS_ENABLED = True # 是否启用请求统计
179
+ STATS_RESET_ON_START = False # 启动时是否重置统计
180
+
181
+ # HttpX 下载器专用配置
182
+ HTTPX_HTTP2 = True # 是否启用HTTP/2支持
183
+ HTTPX_FOLLOW_REDIRECTS = True # 是否自动跟随重定向
184
+
185
+ # AioHttp 下载器专用配置
186
+ AIOHTTP_AUTO_DECOMPRESS = True # 是否自动解压响应
187
+ AIOHTTP_FORCE_CLOSE = False # 是否强制关闭连接
188
+
189
+ # ============================== Selenium 下载器配置 ==============================
190
+
191
+ # Selenium 基础配置
192
+ SELENIUM_BROWSER_TYPE = "chrome" # 浏览器类型: chrome, firefox, edge
193
+ SELENIUM_HEADLESS = True # 是否无头模式
194
+ SELENIUM_TIMEOUT = 30 # 超时时间(秒)
195
+ SELENIUM_LOAD_TIMEOUT = 10 # 页面加载超时时间(秒)
196
+ SELENIUM_WINDOW_WIDTH = 1920 # 窗口宽度
197
+ SELENIUM_WINDOW_HEIGHT = 1080 # 窗口高度
198
+ SELENIUM_WAIT_FOR_ELEMENT = None # 等待特定元素选择器
199
+ SELENIUM_ENABLE_JS = True # 是否启用JavaScript
200
+ SELENIUM_PROXY = None # 代理设置
201
+ SELENIUM_SINGLE_BROWSER_MODE = True # 单浏览器多标签页模式
202
+ SELENIUM_MAX_TABS_PER_BROWSER = 10 # 单浏览器最大标签页数量
203
+
204
+ # ============================== Playwright 下载器配置 ==============================
205
+
206
+ # Playwright 基础配置
207
+ PLAYWRIGHT_BROWSER_TYPE = "chromium" # 浏览器类型: chromium, firefox, webkit
208
+ PLAYWRIGHT_HEADLESS = True # 是否无头模式
209
+ PLAYWRIGHT_TIMEOUT = 30000 # 超时时间(毫秒)
210
+ PLAYWRIGHT_LOAD_TIMEOUT = 10000 # 页面加载超时时间(毫秒)
211
+ PLAYWRIGHT_VIEWPORT_WIDTH = 1920 # 视口宽度
212
+ PLAYWRIGHT_VIEWPORT_HEIGHT = 1080 # 视口高度
213
+ PLAYWRIGHT_WAIT_FOR_ELEMENT = None # 等待特定元素选择器
214
+ PLAYWRIGHT_PROXY = None # 代理设置
215
+ PLAYWRIGHT_SINGLE_BROWSER_MODE = True # 单浏览器多标签页模式
216
+ PLAYWRIGHT_MAX_PAGES_PER_BROWSER = 10 # 单浏览器最大页面数量
217
+
218
+ # 通用优化配置
219
+ CONNECTION_TTL_DNS_CACHE = 300 # DNS缓存TTL(秒)
279
220
  CONNECTION_KEEPALIVE_TIMEOUT = 15 # Keep-Alive超时(秒)