crawlo 1.2.3__py3-none-any.whl → 1.2.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (222) hide show
  1. crawlo/__init__.py +61 -61
  2. crawlo/__version__.py +1 -1
  3. crawlo/cleaners/__init__.py +60 -60
  4. crawlo/cleaners/data_formatter.py +225 -225
  5. crawlo/cleaners/encoding_converter.py +125 -125
  6. crawlo/cleaners/text_cleaner.py +232 -232
  7. crawlo/cli.py +81 -81
  8. crawlo/commands/__init__.py +14 -14
  9. crawlo/commands/check.py +594 -594
  10. crawlo/commands/genspider.py +151 -151
  11. crawlo/commands/help.py +144 -142
  12. crawlo/commands/list.py +155 -155
  13. crawlo/commands/run.py +323 -292
  14. crawlo/commands/startproject.py +420 -417
  15. crawlo/commands/stats.py +187 -187
  16. crawlo/commands/utils.py +186 -186
  17. crawlo/config.py +312 -312
  18. crawlo/config_validator.py +251 -251
  19. crawlo/core/__init__.py +2 -2
  20. crawlo/core/engine.py +354 -354
  21. crawlo/core/processor.py +40 -40
  22. crawlo/core/scheduler.py +143 -143
  23. crawlo/crawler.py +1110 -1027
  24. crawlo/data/__init__.py +5 -5
  25. crawlo/data/user_agents.py +107 -107
  26. crawlo/downloader/__init__.py +266 -266
  27. crawlo/downloader/aiohttp_downloader.py +220 -220
  28. crawlo/downloader/cffi_downloader.py +256 -256
  29. crawlo/downloader/httpx_downloader.py +259 -259
  30. crawlo/downloader/hybrid_downloader.py +212 -212
  31. crawlo/downloader/playwright_downloader.py +402 -402
  32. crawlo/downloader/selenium_downloader.py +472 -472
  33. crawlo/event.py +11 -11
  34. crawlo/exceptions.py +81 -81
  35. crawlo/extension/__init__.py +37 -37
  36. crawlo/extension/health_check.py +141 -141
  37. crawlo/extension/log_interval.py +57 -57
  38. crawlo/extension/log_stats.py +81 -81
  39. crawlo/extension/logging_extension.py +43 -43
  40. crawlo/extension/memory_monitor.py +104 -104
  41. crawlo/extension/performance_profiler.py +133 -133
  42. crawlo/extension/request_recorder.py +107 -107
  43. crawlo/filters/__init__.py +154 -154
  44. crawlo/filters/aioredis_filter.py +280 -280
  45. crawlo/filters/memory_filter.py +269 -269
  46. crawlo/items/__init__.py +23 -23
  47. crawlo/items/base.py +21 -21
  48. crawlo/items/fields.py +52 -52
  49. crawlo/items/items.py +104 -104
  50. crawlo/middleware/__init__.py +21 -21
  51. crawlo/middleware/default_header.py +131 -131
  52. crawlo/middleware/download_delay.py +104 -104
  53. crawlo/middleware/middleware_manager.py +135 -135
  54. crawlo/middleware/offsite.py +114 -114
  55. crawlo/middleware/proxy.py +367 -367
  56. crawlo/middleware/request_ignore.py +86 -86
  57. crawlo/middleware/response_code.py +163 -163
  58. crawlo/middleware/response_filter.py +136 -136
  59. crawlo/middleware/retry.py +124 -124
  60. crawlo/mode_manager.py +211 -211
  61. crawlo/network/__init__.py +21 -21
  62. crawlo/network/request.py +338 -338
  63. crawlo/network/response.py +359 -359
  64. crawlo/pipelines/__init__.py +21 -21
  65. crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
  66. crawlo/pipelines/console_pipeline.py +39 -39
  67. crawlo/pipelines/csv_pipeline.py +316 -316
  68. crawlo/pipelines/database_dedup_pipeline.py +222 -222
  69. crawlo/pipelines/json_pipeline.py +218 -218
  70. crawlo/pipelines/memory_dedup_pipeline.py +115 -115
  71. crawlo/pipelines/mongo_pipeline.py +131 -131
  72. crawlo/pipelines/mysql_pipeline.py +317 -317
  73. crawlo/pipelines/pipeline_manager.py +61 -61
  74. crawlo/pipelines/redis_dedup_pipeline.py +165 -165
  75. crawlo/project.py +279 -187
  76. crawlo/queue/pqueue.py +37 -37
  77. crawlo/queue/queue_manager.py +337 -337
  78. crawlo/queue/redis_priority_queue.py +298 -298
  79. crawlo/settings/__init__.py +7 -7
  80. crawlo/settings/default_settings.py +217 -226
  81. crawlo/settings/setting_manager.py +122 -122
  82. crawlo/spider/__init__.py +639 -639
  83. crawlo/stats_collector.py +59 -59
  84. crawlo/subscriber.py +129 -129
  85. crawlo/task_manager.py +30 -30
  86. crawlo/templates/crawlo.cfg.tmpl +10 -10
  87. crawlo/templates/project/__init__.py.tmpl +3 -3
  88. crawlo/templates/project/items.py.tmpl +17 -17
  89. crawlo/templates/project/middlewares.py.tmpl +118 -118
  90. crawlo/templates/project/pipelines.py.tmpl +96 -96
  91. crawlo/templates/project/run.py.tmpl +47 -45
  92. crawlo/templates/project/settings.py.tmpl +350 -325
  93. crawlo/templates/project/settings_distributed.py.tmpl +160 -121
  94. crawlo/templates/project/settings_gentle.py.tmpl +133 -94
  95. crawlo/templates/project/settings_high_performance.py.tmpl +155 -151
  96. crawlo/templates/project/settings_simple.py.tmpl +108 -68
  97. crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
  98. crawlo/templates/spider/spider.py.tmpl +143 -143
  99. crawlo/tools/__init__.py +182 -182
  100. crawlo/tools/anti_crawler.py +268 -268
  101. crawlo/tools/authenticated_proxy.py +240 -240
  102. crawlo/tools/data_validator.py +180 -180
  103. crawlo/tools/date_tools.py +35 -35
  104. crawlo/tools/distributed_coordinator.py +386 -386
  105. crawlo/tools/retry_mechanism.py +220 -220
  106. crawlo/tools/scenario_adapter.py +262 -262
  107. crawlo/utils/__init__.py +35 -35
  108. crawlo/utils/batch_processor.py +259 -259
  109. crawlo/utils/controlled_spider_mixin.py +439 -439
  110. crawlo/utils/date_tools.py +290 -290
  111. crawlo/utils/db_helper.py +343 -343
  112. crawlo/utils/enhanced_error_handler.py +356 -356
  113. crawlo/utils/env_config.py +105 -105
  114. crawlo/utils/error_handler.py +123 -123
  115. crawlo/utils/func_tools.py +82 -82
  116. crawlo/utils/large_scale_config.py +286 -286
  117. crawlo/utils/large_scale_helper.py +344 -344
  118. crawlo/utils/log.py +128 -128
  119. crawlo/utils/performance_monitor.py +285 -285
  120. crawlo/utils/queue_helper.py +175 -175
  121. crawlo/utils/redis_connection_pool.py +334 -334
  122. crawlo/utils/redis_key_validator.py +198 -198
  123. crawlo/utils/request.py +267 -267
  124. crawlo/utils/request_serializer.py +218 -218
  125. crawlo/utils/spider_loader.py +61 -61
  126. crawlo/utils/system.py +11 -11
  127. crawlo/utils/tools.py +4 -4
  128. crawlo/utils/url.py +39 -39
  129. {crawlo-1.2.3.dist-info → crawlo-1.2.4.dist-info}/METADATA +764 -692
  130. crawlo-1.2.4.dist-info/RECORD +206 -0
  131. examples/__init__.py +7 -7
  132. tests/DOUBLE_CRAWLO_PREFIX_FIX_REPORT.md +81 -81
  133. tests/__init__.py +7 -7
  134. tests/advanced_tools_example.py +275 -275
  135. tests/authenticated_proxy_example.py +236 -236
  136. tests/cleaners_example.py +160 -160
  137. tests/config_validation_demo.py +102 -102
  138. tests/controlled_spider_example.py +205 -205
  139. tests/date_tools_example.py +180 -180
  140. tests/dynamic_loading_example.py +523 -523
  141. tests/dynamic_loading_test.py +104 -104
  142. tests/env_config_example.py +133 -133
  143. tests/error_handling_example.py +171 -171
  144. tests/redis_key_validation_demo.py +130 -130
  145. tests/response_improvements_example.py +144 -144
  146. tests/test_advanced_tools.py +148 -148
  147. tests/test_all_redis_key_configs.py +145 -145
  148. tests/test_authenticated_proxy.py +141 -141
  149. tests/test_cleaners.py +54 -54
  150. tests/test_comprehensive.py +146 -146
  151. tests/test_config_validator.py +193 -193
  152. tests/test_crawlo_proxy_integration.py +172 -172
  153. tests/test_date_tools.py +123 -123
  154. tests/test_default_header_middleware.py +158 -158
  155. tests/test_double_crawlo_fix.py +207 -207
  156. tests/test_double_crawlo_fix_simple.py +124 -124
  157. tests/test_download_delay_middleware.py +221 -221
  158. tests/test_downloader_proxy_compatibility.py +268 -268
  159. tests/test_dynamic_downloaders_proxy.py +124 -124
  160. tests/test_dynamic_proxy.py +92 -92
  161. tests/test_dynamic_proxy_config.py +146 -146
  162. tests/test_dynamic_proxy_real.py +109 -109
  163. tests/test_edge_cases.py +303 -303
  164. tests/test_enhanced_error_handler.py +270 -270
  165. tests/test_env_config.py +121 -121
  166. tests/test_error_handler_compatibility.py +112 -112
  167. tests/test_final_validation.py +153 -153
  168. tests/test_framework_env_usage.py +103 -103
  169. tests/test_integration.py +356 -356
  170. tests/test_item_dedup_redis_key.py +122 -122
  171. tests/test_offsite_middleware.py +221 -221
  172. tests/test_parsel.py +29 -29
  173. tests/test_performance.py +327 -327
  174. tests/test_proxy_api.py +264 -264
  175. tests/test_proxy_health_check.py +32 -32
  176. tests/test_proxy_middleware.py +121 -121
  177. tests/test_proxy_middleware_enhanced.py +216 -216
  178. tests/test_proxy_middleware_integration.py +136 -136
  179. tests/test_proxy_providers.py +56 -56
  180. tests/test_proxy_stats.py +19 -19
  181. tests/test_proxy_strategies.py +59 -59
  182. tests/test_queue_manager_double_crawlo.py +173 -173
  183. tests/test_queue_manager_redis_key.py +176 -176
  184. tests/test_real_scenario_proxy.py +195 -195
  185. tests/test_redis_config.py +28 -28
  186. tests/test_redis_connection_pool.py +294 -294
  187. tests/test_redis_key_naming.py +181 -181
  188. tests/test_redis_key_validator.py +123 -123
  189. tests/test_redis_queue.py +224 -224
  190. tests/test_request_ignore_middleware.py +182 -182
  191. tests/test_request_serialization.py +70 -70
  192. tests/test_response_code_middleware.py +349 -349
  193. tests/test_response_filter_middleware.py +427 -427
  194. tests/test_response_improvements.py +152 -152
  195. tests/test_retry_middleware.py +241 -241
  196. tests/test_scheduler.py +241 -241
  197. tests/test_simple_response.py +61 -61
  198. tests/test_telecom_spider_redis_key.py +205 -205
  199. tests/test_template_content.py +87 -87
  200. tests/test_template_redis_key.py +134 -134
  201. tests/test_tools.py +153 -153
  202. tests/tools_example.py +257 -257
  203. crawlo-1.2.3.dist-info/RECORD +0 -222
  204. examples/aiohttp_settings.py +0 -42
  205. examples/curl_cffi_settings.py +0 -41
  206. examples/default_header_middleware_example.py +0 -107
  207. examples/default_header_spider_example.py +0 -129
  208. examples/download_delay_middleware_example.py +0 -160
  209. examples/httpx_settings.py +0 -42
  210. examples/multi_downloader_proxy_example.py +0 -81
  211. examples/offsite_middleware_example.py +0 -55
  212. examples/offsite_spider_example.py +0 -107
  213. examples/proxy_spider_example.py +0 -166
  214. examples/request_ignore_middleware_example.py +0 -51
  215. examples/request_ignore_spider_example.py +0 -99
  216. examples/response_code_middleware_example.py +0 -52
  217. examples/response_filter_middleware_example.py +0 -67
  218. examples/tong_hua_shun_settings.py +0 -62
  219. examples/tong_hua_shun_spider.py +0 -170
  220. {crawlo-1.2.3.dist-info → crawlo-1.2.4.dist-info}/WHEEL +0 -0
  221. {crawlo-1.2.3.dist-info → crawlo-1.2.4.dist-info}/entry_points.txt +0 -0
  222. {crawlo-1.2.3.dist-info → crawlo-1.2.4.dist-info}/top_level.txt +0 -0
@@ -1,326 +1,351 @@
1
- # -*- coding: UTF-8 -*-
2
- """
3
- {{project_name}} 项目配置文件
4
- =============================
5
- 基于 Crawlo 框架的爬虫项目配置。
6
-
7
- 🎯 快速开始:
8
-
9
- # 方式1:使用默认单机模式(推荐)
10
- from crawlo.crawler import CrawlerProcess
11
- process = CrawlerProcess() # 无需任何配置
12
-
13
- # 方式2:使用配置工厂
14
- from crawlo.config import CrawloConfig
15
- config = CrawloConfig.standalone() # 单机模式
16
- config = CrawloConfig.distributed(redis_host='192.168.1.100') # 分布式模式
17
- process = CrawlerProcess(settings=config.to_dict())
18
-
19
- # 方式3:使用环境变量
20
- from crawlo.config import CrawloConfig
21
- config = CrawloConfig.from_env() # 从环境变量读取
22
- """
23
- import os
24
- from crawlo.config import CrawloConfig
25
-
26
- # ============================== 项目基本信息 ==============================
27
- PROJECT_NAME = '{{project_name}}'
28
- VERSION = '1.0.0'
29
-
30
- # ============================== 运行模式选择 ==============================
31
-
32
- # 🎯 选择一种配置方式:
33
-
34
- # 方式1:使用配置工厂(推荐)
35
- # 单机模式(默认)
36
- CONFIG = CrawloConfig.standalone(
37
- concurrency=8,
38
- download_delay=1.0
39
- )
40
-
41
- # 分布式模式(去掉注释并修改 Redis 地址)
42
- # CONFIG = CrawloConfig.distributed(
43
- # redis_host='127.0.0.1',
44
- # redis_password='your_password', # 如果有密码
45
- # project_name='{{project_name}}',
46
- # concurrency=16,
47
- # download_delay=1.0
48
- # )
49
-
50
- # 自动检测模式
51
- # CONFIG = CrawloConfig.auto(concurrency=12)
52
-
53
- # 方式2:从环境变量读取(适合部署)
54
- # CONFIG = CrawloConfig.from_env()
55
-
56
- # 方式3:使用预设配置
57
- # from crawlo.config import Presets
58
- # CONFIG = Presets.development() # 开发环境
59
- # CONFIG = Presets.production() # 生产环境
60
-
61
- # 获取最终配置
62
- locals().update(CONFIG.to_dict())
63
-
64
- # ============================== 网络请求配置 ==============================
65
-
66
- # 下载器选择(推荐使用 CurlCffi,支持浏览器指纹模拟)
67
- DOWNLOADER = "crawlo.downloader.httpx_downloader.HttpXDownloader" # HTTP/2 支持
68
- # DOWNLOADER = "crawlo.downloader.cffi_downloader.CurlCffiDownloader" # 支持浏览器指纹
69
- # DOWNLOADER = "crawlo.downloader.aiohttp_downloader.AioHttpDownloader" # 轻量级选择
70
-
71
- # 请求超时与安全
72
- DOWNLOAD_TIMEOUT = 30
73
- VERIFY_SSL = True
74
- USE_SESSION = True
75
-
76
- # 请求延迟控制(防反爬)
77
- DOWNLOAD_DELAY = 1.0
78
- RANDOM_RANGE = (0.5, 1.5)
79
- RANDOMNESS = False
80
-
81
- # 重试策略
82
- MAX_RETRY_TIMES = 3
83
- RETRY_PRIORITY = -1
84
- RETRY_HTTP_CODES = [408, 429, 500, 502, 503, 504, 522, 524]
85
- IGNORE_HTTP_CODES = [403, 404]
86
- ALLOWED_RESPONSE_CODES = [] # ResponseFilterMiddleware允许的状态码
87
- DENIED_RESPONSE_CODES = [] # ResponseFilterMiddleware拒绝的状态码
88
-
89
- # 连接池配置
90
- CONNECTION_POOL_LIMIT = 50
91
- DOWNLOAD_MAXSIZE = 10 * 1024 * 1024 # 10MB
92
- DOWNLOAD_WARN_SIZE = 1024 * 1024 # 1MB
93
- DOWNLOAD_RETRY_TIMES = MAX_RETRY_TIMES # 下载器内部重试次数(复用全局)
94
-
95
- # 下载统计配置
96
- DOWNLOADER_STATS = True # 是否启用下载器统计功能
97
- DOWNLOAD_STATS = True # 是否记录下载时间和大小统计
98
-
99
- # ============================== 并发与调度配置 ==============================
100
-
101
- CONCURRENCY = 8
102
- INTERVAL = 5
103
- DEPTH_PRIORITY = 1
104
- MAX_RUNNING_SPIDERS = 3
105
-
106
- # 运行模式选择:'standalone'(单机), 'distributed'(分布式), 'auto'(自动检测)
107
- RUN_MODE = 'standalone' # 默认单机模式,简单易用
108
-
109
- # ============================== 队列配置(支持分布式) ==============================
110
-
111
- # 队列类型:'auto'(自动选择), 'memory'(内存队列), 'redis'(分布式队列)
112
- QUEUE_TYPE = 'memory'
113
- SCHEDULER_MAX_QUEUE_SIZE = 2000
114
- SCHEDULER_QUEUE_NAME = f'crawlo:{{project_name}}:queue:requests' # 使用统一命名规范
115
- QUEUE_MAX_RETRIES = 3
116
- QUEUE_TIMEOUT = 300
117
-
118
- # 大规模爬取优化
119
- LARGE_SCALE_BATCH_SIZE = 1000 # 批处理大小
120
- LARGE_SCALE_CHECKPOINT_INTERVAL = 5000 # 进度保存间隔
121
- LARGE_SCALE_MAX_MEMORY_USAGE = 500 # 最大内存使用量(MB)
122
-
123
- # ============================== 数据存储配置 ==============================
124
-
125
- # --- MySQL 配置 ---
126
- MYSQL_HOST = os.getenv('MYSQL_HOST', '127.0.0.1')
127
- MYSQL_PORT = int(os.getenv('MYSQL_PORT', 3306))
128
- MYSQL_USER = os.getenv('MYSQL_USER', 'root')
129
- MYSQL_PASSWORD = os.getenv('MYSQL_PASSWORD', '123456')
130
- MYSQL_DB = os.getenv('MYSQL_DB', '{{project_name}}')
131
- MYSQL_TABLE = '{{project_name}}_data'
132
- MYSQL_BATCH_SIZE = 100
133
- MYSQL_USE_BATCH = False # 是否启用批量插入
134
-
135
- # --- MongoDB 配置 ---
136
- MONGO_URI = os.getenv('MONGO_URI', 'mongodb://localhost:27017')
137
- MONGO_DATABASE = '{{project_name}}_db'
138
- MONGO_COLLECTION = '{{project_name}}_items'
139
- MONGO_MAX_POOL_SIZE = 200
140
- MONGO_MIN_POOL_SIZE = 20
141
- MONGO_BATCH_SIZE = 100 # 批量插入条数
142
- MONGO_USE_BATCH = False # 是否启用批量插入
143
-
144
- # ============================== 去重过滤配置 ==============================
145
-
146
- REQUEST_DIR = '.'
147
-
148
- # 根据运行模式自动选择去重管道
149
- if RUN_MODE == 'distributed':
150
- # 分布式模式下默认使用Redis去重管道
151
- DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.redis_dedup_pipeline.RedisDedupPipeline'
152
- else:
153
- # 单机模式下默认使用内存去重管道
154
- DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.memory_dedup_pipeline.MemoryDedupPipeline'
155
-
156
- # 去重过滤器(推荐分布式项目使用 Redis 过滤器)
157
- FILTER_CLASS = 'crawlo.filters.memory_filter.MemoryFilter'
158
-
159
- # --- Redis 配置(用于分布式去重和队列) ---
160
- REDIS_HOST = os.getenv('REDIS_HOST', '127.0.0.1')
161
- REDIS_PORT = int(os.getenv('REDIS_PORT', 6379))
162
- REDIS_PASSWORD = os.getenv('REDIS_PASSWORD', '')
163
-
164
- # 根据是否有密码生成 URL
165
- if REDIS_PASSWORD:
166
- REDIS_URL = f'redis://:{REDIS_PASSWORD}@{REDIS_HOST}:{REDIS_PORT}/0'
167
- else:
168
- REDIS_URL = f'redis://{REDIS_HOST}:{REDIS_PORT}/0'
169
-
170
- # Redis key配置已移至各组件中,使用统一的命名规范
171
- # crawlo:{project_name}:filter:fingerprint (请求去重)
172
- # crawlo:{project_name}:item:fingerprint (数据项去重)
173
- # crawlo:{project_name}:queue:requests (请求队列)
174
- # crawlo:{project_name}:queue:processing (处理中队列)
175
- # crawlo:{project_name}:queue:failed (失败队列)
176
-
177
- REDIS_TTL = 0
178
- CLEANUP_FP = 0
179
- FILTER_DEBUG = True
180
- DECODE_RESPONSES = True
181
-
182
- # ============================== 中间件配置 ==============================
183
-
184
- MIDDLEWARES = [
185
- # === 请求预处理阶段 ===
186
- 'crawlo.middleware.request_ignore.RequestIgnoreMiddleware',
187
- 'crawlo.middleware.download_delay.DownloadDelayMiddleware',
188
- 'crawlo.middleware.default_header.DefaultHeaderMiddleware',
189
- 'crawlo.middleware.proxy.ProxyMiddleware',
190
- 'crawlo.middleware.offsite.OffsiteMiddleware',
191
-
192
- # === 响应处理阶段 ===
193
- 'crawlo.middleware.retry.RetryMiddleware',
194
- 'crawlo.middleware.response_code.ResponseCodeMiddleware',
195
- 'crawlo.middleware.response_filter.ResponseFilterMiddleware',
196
- ]
197
-
198
- # ============================== 数据管道配置 ==============================
199
-
200
- # 数据处理管道(启用的存储方式)
201
- PIPELINES = [
202
- 'crawlo.pipelines.console_pipeline.ConsolePipeline',
203
- # '{{project_name}}.pipelines.DatabasePipeline', # 自定义数据库管道
204
- # 'crawlo.pipelines.mysql_pipeline.AsyncmyMySQLPipeline', # MySQL 存储
205
- # 'crawlo.pipelines.mongo_pipeline.MongoPipeline', # MongoDB 存储
206
- ]
207
-
208
- # 根据运行模式自动配置默认去重管道
209
- if RUN_MODE == 'distributed':
210
- # 分布式模式下添加Redis去重管道
211
- PIPELINES.insert(0, DEFAULT_DEDUP_PIPELINE)
212
- else:
213
- # 单机模式下添加内存去重管道
214
- PIPELINES.insert(0, DEFAULT_DEDUP_PIPELINE)
215
-
216
- # ============================== 扩展组件 ==============================
217
-
218
- EXTENSIONS = [
219
- 'crawlo.extension.log_interval.LogIntervalExtension',
220
- 'crawlo.extension.log_stats.LogStats',
221
- 'crawlo.extension.logging_extension.CustomLoggerExtension',
222
- # 'crawlo.extension.memory_monitor.MemoryMonitorExtension', # 内存监控
223
- # 'crawlo.extension.request_recorder.RequestRecorderExtension', # 请求记录
224
- # 'crawlo.extension.performance_profiler.PerformanceProfilerExtension', # 性能分析
225
- # 'crawlo.extension.health_check.HealthCheckExtension', # 健康检查
226
- ]
227
-
228
- # ============================== 扩展配置 ==============================
229
-
230
- # 内存监控扩展配置
231
- # MEMORY_MONITOR_ENABLED = True # 是否启用内存监控
232
- # MEMORY_MONITOR_INTERVAL = 60 # 内存检查间隔(秒)
233
- # MEMORY_WARNING_THRESHOLD = 80.0 # 内存使用警告阈值(百分比)
234
- # MEMORY_CRITICAL_THRESHOLD = 90.0 # 内存使用严重阈值(百分比)
235
-
236
- # 请求记录扩展配置
237
- # REQUEST_RECORDER_ENABLED = True # 是否启用请求记录
238
- # REQUEST_RECORDER_OUTPUT_DIR = 'requests_log' # 请求记录输出目录
239
- # REQUEST_RECORDER_MAX_FILE_SIZE = 10 * 1024 * 1024 # 单个记录文件最大大小(字节)
240
-
241
- # 性能分析扩展配置
242
- # PERFORMANCE_PROFILER_ENABLED = True # 是否启用性能分析
243
- # PERFORMANCE_PROFILER_OUTPUT_DIR = 'profiling' # 性能分析输出目录
244
- # PERFORMANCE_PROFILER_INTERVAL = 300 # 定期保存分析结果间隔(秒)
245
-
246
- # 健康检查扩展配置
247
- # HEALTH_CHECK_ENABLED = True # 是否启用健康检查
248
- # HEALTH_CHECK_INTERVAL = 60 # 健康检查间隔(秒)
249
-
250
- # ============================== 日志配置 ==============================
251
-
252
- LOG_LEVEL = 'INFO'
253
- STATS_DUMP = True
254
- LOG_FILE = f'logs/{{project_name}}.log'
255
- LOG_FORMAT = '%(asctime)s - [%(name)s] - %(levelname)s: %(message)s'
256
- LOG_ENCODING = 'utf-8'
257
-
258
- # ============================== 代理配置 ==============================
259
-
260
- PROXY_ENABLED = False
261
- PROXY_API_URL = "" # 请填入真实的代理API地址
262
- PROXY_EXTRACTOR = "proxy"
263
- PROXY_REFRESH_INTERVAL = 60
264
- PROXY_API_TIMEOUT = 10
265
-
266
- # ============================== 浏览器指纹配置 ==============================
267
-
268
- # CurlCffi 下载器专用配置
269
- CURL_BROWSER_TYPE = "chrome"
270
- CURL_BROWSER_VERSION_MAP = {
271
- "chrome": "chrome136",
272
- "edge": "edge101",
273
- "safari": "safari184",
274
- "firefox": "firefox135",
275
- }
276
-
277
- # 默认请求头
278
- DEFAULT_REQUEST_HEADERS = {
279
- 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 '
280
- '(KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36',
281
- 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
282
- 'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',
283
- 'Accept-Encoding': 'gzip, deflate, br',
284
- 'Connection': 'keep-alive',
285
- 'Upgrade-Insecure-Requests': '1',
286
- }
287
-
288
- # ============================== 下载器优化配置 ==============================
289
-
290
- # 下载器健康检查
291
- DOWNLOADER_HEALTH_CHECK = True # 是否启用下载器健康检查
292
- HEALTH_CHECK_INTERVAL = 60 # 健康检查间隔(秒)
293
-
294
- # 请求统计配置
295
- REQUEST_STATS_ENABLED = True # 是否启用请求统计
296
- STATS_RESET_ON_START = False # 启动时是否重置统计
297
-
298
- # HttpX 下载器专用配置
299
- HTTPX_HTTP2 = True # 是否启用HTTP/2支持
300
- HTTPX_FOLLOW_REDIRECTS = True # 是否自动跟随重定向
301
-
302
- # AioHttp 下载器专用配置
303
- AIOHTTP_AUTO_DECOMPRESS = True # 是否自动解压响应
304
- AIOHTTP_FORCE_CLOSE = False # 是否强制关闭连接
305
-
306
- # 通用优化配置
307
- CONNECTION_TTL_DNS_CACHE = 300 # DNS缓存TTL(秒)
308
- CONNECTION_KEEPALIVE_TIMEOUT = 15 # Keep-Alive超时(秒)
309
-
310
- # ============================== 开发与调试 ==============================
311
-
312
- # 开发模式配置
313
- DEBUG = False
314
- TESTING = False
315
-
316
- # 性能监控
317
- ENABLE_PERFORMANCE_MONITORING = True
318
- MEMORY_USAGE_WARNING_THRESHOLD = 500 # MB
319
-
320
- # ============================== 自定义配置区域 ==============================
321
- # 在此处添加项目特定的配置项
322
-
323
- # 示例:目标网站特定配置
324
- # TARGET_DOMAIN = '{{domain}}'
325
- # MAX_PAGES_PER_DOMAIN = 10000
1
+ # -*- coding: UTF-8 -*-
2
+ """
3
+ {{project_name}} 项目配置文件
4
+ =============================
5
+ 基于 Crawlo 框架的爬虫项目配置。
6
+
7
+ 🎯 快速开始:
8
+
9
+ # 方式1:使用默认配置(推荐)
10
+ from crawlo.crawler import CrawlerProcess
11
+ process = CrawlerProcess() # 无需任何配置
12
+
13
+ # 方式2:使用配置工厂
14
+ from crawlo.config import CrawloConfig
15
+ config = CrawloConfig.standalone() # 单机模式
16
+ config = CrawloConfig.distributed(redis_host='192.168.1.100') # 分布式模式
17
+ process = CrawlerProcess(settings=config.to_dict())
18
+
19
+ # 方式3:使用环境变量
20
+ from crawlo.config import CrawloConfig
21
+ config = CrawloConfig.from_env() # 从环境变量读取
22
+ """
23
+ import os
24
+ from crawlo.config import CrawloConfig
25
+
26
+ # ============================== 项目基本信息 ==============================
27
+ PROJECT_NAME = '{{project_name}}'
28
+ try:
29
+ from crawlo import __version__
30
+ VERSION = __version__
31
+ except ImportError:
32
+ VERSION = '1.0.0'
33
+
34
+ # ============================== 运行模式选择 ==============================
35
+
36
+ # 🎯 选择一种配置方式(推荐使用配置工厂):
37
+
38
+ # 单机模式(默认)- 适用于开发调试、小规模数据采集
39
+ CONFIG = CrawloConfig.standalone(
40
+ concurrency=8,
41
+ download_delay=1.0
42
+ )
43
+
44
+ # 分布式模式 - 适用于大规模数据采集、多节点协同工作
45
+ # CONFIG = CrawloConfig.distributed(
46
+ # redis_host='127.0.0.1',
47
+ # redis_password='your_password', # 如果有密码
48
+ # project_name='{{project_name}}',
49
+ # concurrency=16,
50
+ # download_delay=1.0
51
+ # )
52
+
53
+ # 自动检测模式 - 适用于希望根据环境自动选择最佳运行方式
54
+ # CONFIG = CrawloConfig.auto(concurrency=12)
55
+
56
+ # 从环境变量读取配置 - 适用于部署环境
57
+ # CONFIG = CrawloConfig.from_env()
58
+
59
+ # 获取最终配置
60
+ locals().update(CONFIG.to_dict())
61
+
62
+ # ============================== 网络请求配置 ==============================
63
+
64
+ # 下载器选择(推荐使用 CurlCffi,支持浏览器指纹模拟)
65
+ DOWNLOADER = "crawlo.downloader.httpx_downloader.HttpXDownloader" # HTTP/2 支持
66
+ # DOWNLOADER = "crawlo.downloader.cffi_downloader.CurlCffiDownloader" # 支持浏览器指纹
67
+ # DOWNLOADER = "crawlo.downloader.aiohttp_downloader.AioHttpDownloader" # 轻量级选择
68
+
69
+ # 请求超时与安全
70
+ DOWNLOAD_TIMEOUT = 30
71
+ VERIFY_SSL = True
72
+ USE_SESSION = True
73
+
74
+ # 请求延迟控制(防反爬)
75
+ DOWNLOAD_DELAY = 1.0
76
+ RANDOM_RANGE = (0.5, 1.5)
77
+ RANDOMNESS = False
78
+
79
+ # 重试策略
80
+ MAX_RETRY_TIMES = 3
81
+ RETRY_PRIORITY = -1
82
+ RETRY_HTTP_CODES = [408, 429, 500, 502, 503, 504, 522, 524]
83
+ IGNORE_HTTP_CODES = [403, 404]
84
+ ALLOWED_RESPONSE_CODES = [] # ResponseFilterMiddleware允许的状态码
85
+ DENIED_RESPONSE_CODES = [] # ResponseFilterMiddleware拒绝的状态码
86
+
87
+ # 连接池配置
88
+ CONNECTION_POOL_LIMIT = 50
89
+ DOWNLOAD_MAXSIZE = 10 * 1024 * 1024 # 10MB
90
+ DOWNLOAD_WARN_SIZE = 1024 * 1024 # 1MB
91
+ DOWNLOAD_RETRY_TIMES = MAX_RETRY_TIMES # 下载器内部重试次数(复用全局)
92
+
93
+ # 下载统计配置
94
+ DOWNLOADER_STATS = True # 是否启用下载器统计功能
95
+ DOWNLOAD_STATS = True # 是否记录下载时间和大小统计
96
+
97
+ # ============================== 并发与调度配置 ==============================
98
+
99
+ CONCURRENCY = 8
100
+ INTERVAL = 5
101
+ DEPTH_PRIORITY = 1
102
+ MAX_RUNNING_SPIDERS = 3
103
+
104
+ # ============================== 运行模式选择 ==============================
105
+ # 运行模式:'standalone'(单机), 'distributed'(分布式), 'auto'(自动检测)
106
+ #
107
+ # 三种运行模式的最佳使用场景:
108
+ #
109
+ # 1. standalone(单机模式):
110
+ # - 适用场景:开发调试、小规模数据采集、个人项目
111
+ # - 特点:简单易用,资源占用少,无需额外依赖
112
+ # - 配置建议:
113
+ # * QUEUE_TYPE = 'auto'(自动选择队列类型)
114
+ # * FILTER_CLASS = 'crawlo.filters.memory_filter.MemoryFilter'(内存过滤器)
115
+ # * DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.memory_dedup_pipeline.MemoryDedupPipeline'(内存去重)
116
+ # - 混合配置(推荐):
117
+ # * QUEUE_TYPE = 'auto'(自动选择)
118
+ # * FILTER_CLASS = 'crawlo.filters.aioredis_filter.AioRedisFilter'(Redis过滤器)
119
+ # * DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.redis_dedup_pipeline.RedisDedupPipeline'(Redis去重)
120
+ # * 优势:享受Redis去重的持久性,同时保持部署简单
121
+ #
122
+ # 2. distributed(分布式模式):
123
+ # - 适用场景:大规模数据采集、多节点协同工作、高并发需求
124
+ # - 特点:支持多节点扩展,高并发处理能力,需要Redis支持
125
+ # - 配置建议:
126
+ # * QUEUE_TYPE = 'redis'(Redis队列)
127
+ # * FILTER_CLASS = 'crawlo.filters.aioredis_filter.AioRedisFilter'(Redis过滤器)
128
+ # * DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.redis_dedup_pipeline.RedisDedupPipeline'(Redis去重)
129
+ # - 部署要求:需要配置Redis服务器连接参数
130
+ #
131
+ # 3. auto(自动检测模式):
132
+ # - 适用场景:希望根据环境自动选择最佳运行方式
133
+ # - 特点:智能检测环境配置,自动选择运行模式
134
+ # - 配置建议:
135
+ # * 框架会根据Redis可用性自动选择队列类型
136
+ # * 默认使用内存过滤器和去重管道
137
+ # - 适用情况:需要在不同环境中使用同一套配置
138
+
139
+ RUN_MODE = 'standalone' # 默认单机模式,简单易用
140
+
141
+ # ============================== 队列配置(支持分布式) ==============================
142
+
143
+ # 队列类型:'auto'(自动选择), 'memory'(内存队列), 'redis'(分布式队列)
144
+ #
145
+ # 队列类型选择指南:
146
+ # - 'auto':推荐用于大多数场景,框架会根据Redis可用性自动选择
147
+ # - 'memory':适用于单机运行且不需要Redis的场景
148
+ # - 'redis':适用于分布式部署场景,需要Redis服务器支持
149
+ QUEUE_TYPE = 'auto'
150
+ SCHEDULER_MAX_QUEUE_SIZE = 2000
151
+ SCHEDULER_QUEUE_NAME = f'crawlo:{{project_name}}:queue:requests' # 使用统一命名规范
152
+ QUEUE_MAX_RETRIES = 3
153
+ QUEUE_TIMEOUT = 300
154
+
155
+ # 大规模爬取优化
156
+ LARGE_SCALE_BATCH_SIZE = 1000 # 批处理大小
157
+ LARGE_SCALE_CHECKPOINT_INTERVAL = 5000 # 进度保存间隔
158
+ LARGE_SCALE_MAX_MEMORY_USAGE = 500 # 最大内存使用量(MB)
159
+
160
+ # ============================== 数据存储配置 ==============================
161
+
162
+ # --- MySQL 配置 ---
163
+ MYSQL_HOST = os.getenv('MYSQL_HOST', '127.0.0.1')
164
+ MYSQL_PORT = int(os.getenv('MYSQL_PORT', 3306))
165
+ MYSQL_USER = os.getenv('MYSQL_USER', 'root')
166
+ MYSQL_PASSWORD = os.getenv('MYSQL_PASSWORD', '123456')
167
+ MYSQL_DB = os.getenv('MYSQL_DB', '{{project_name}}')
168
+ MYSQL_TABLE = '{{project_name}}_data'
169
+ MYSQL_BATCH_SIZE = 100
170
+ MYSQL_USE_BATCH = False # 是否启用批量插入
171
+
172
+ # --- MongoDB 配置 ---
173
+ MONGO_URI = os.getenv('MONGO_URI', 'mongodb://localhost:27017')
174
+ MONGO_DATABASE = '{{project_name}}_db'
175
+ MONGO_COLLECTION = '{{project_name}}_items'
176
+ MONGO_MAX_POOL_SIZE = 200
177
+ MONGO_MIN_POOL_SIZE = 20
178
+ MONGO_BATCH_SIZE = 100 # 批量插入条数
179
+ MONGO_USE_BATCH = False # 是否启用批量插入
180
+
181
+ # ============================== 去重过滤配置 ==============================
182
+
183
+ REQUEST_DIR = '.'
184
+
185
+ # 明确配置默认去重管道和过滤器,避免冗余的if-else判断
186
+ DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.memory_dedup_pipeline.MemoryDedupPipeline'
187
+ FILTER_CLASS = 'crawlo.filters.memory_filter.MemoryFilter'
188
+
189
+ # --- Redis 配置(用于分布式去重和队列) ---
190
+ REDIS_HOST = os.getenv('REDIS_HOST', '127.0.0.1')
191
+ REDIS_PORT = int(os.getenv('REDIS_PORT', 6379))
192
+ REDIS_PASSWORD = os.getenv('REDIS_PASSWORD', '')
193
+
194
+ # 根据是否有密码生成 URL
195
+ if REDIS_PASSWORD:
196
+ REDIS_URL = f'redis://:{REDIS_PASSWORD}@{REDIS_HOST}:{REDIS_PORT}/0'
197
+ else:
198
+ REDIS_URL = f'redis://{REDIS_HOST}:{REDIS_PORT}/0'
199
+
200
+ # Redis key配置已移至各组件中,使用统一的命名规范
201
+ # crawlo:{project_name}:filter:fingerprint (请求去重)
202
+ # crawlo:{project_name}:item:fingerprint (数据项去重)
203
+ # crawlo:{project_name}:queue:requests (请求队列)
204
+ # crawlo:{project_name}:queue:processing (处理中队列)
205
+ # crawlo:{project_name}:queue:failed (失败队列)
206
+
207
+ REDIS_TTL = 0
208
+ CLEANUP_FP = 0
209
+ FILTER_DEBUG = True
210
+ DECODE_RESPONSES = True
211
+
212
+ # ============================== 中间件配置 ==============================
213
+
214
+ MIDDLEWARES = [
215
+ # === 请求预处理阶段 ===
216
+ 'crawlo.middleware.request_ignore.RequestIgnoreMiddleware',
217
+ 'crawlo.middleware.download_delay.DownloadDelayMiddleware',
218
+ 'crawlo.middleware.default_header.DefaultHeaderMiddleware',
219
+ 'crawlo.middleware.proxy.ProxyMiddleware',
220
+ 'crawlo.middleware.offsite.OffsiteMiddleware',
221
+
222
+ # === 响应处理阶段 ===
223
+ 'crawlo.middleware.retry.RetryMiddleware',
224
+ 'crawlo.middleware.response_code.ResponseCodeMiddleware',
225
+ 'crawlo.middleware.response_filter.ResponseFilterMiddleware',
226
+ ]
227
+
228
+ # ============================== 数据管道配置 ==============================
229
+
230
+ # 数据处理管道(启用的存储方式)
231
+ PIPELINES = [
232
+ 'crawlo.pipelines.console_pipeline.ConsolePipeline',
233
+ # '{{project_name}}.pipelines.DatabasePipeline', # 自定义数据库管道
234
+ # 'crawlo.pipelines.mysql_pipeline.AsyncmyMySQLPipeline', # MySQL 存储
235
+ # 'crawlo.pipelines.mongo_pipeline.MongoPipeline', # MongoDB 存储
236
+ ]
237
+
238
+ # 明确添加默认去重管道到管道列表开头
239
+ PIPELINES.insert(0, DEFAULT_DEDUP_PIPELINE)
240
+
241
+ # ============================== 扩展组件 ==============================
242
+
243
+ EXTENSIONS = [
244
+ 'crawlo.extension.log_interval.LogIntervalExtension',
245
+ 'crawlo.extension.log_stats.LogStats',
246
+ 'crawlo.extension.logging_extension.CustomLoggerExtension',
247
+ # 'crawlo.extension.memory_monitor.MemoryMonitorExtension', # 内存监控
248
+ # 'crawlo.extension.request_recorder.RequestRecorderExtension', # 请求记录
249
+ # 'crawlo.extension.performance_profiler.PerformanceProfilerExtension', # 性能分析
250
+ # 'crawlo.extension.health_check.HealthCheckExtension', # 健康检查
251
+ ]
252
+
253
+ # ============================== 扩展配置 ==============================
254
+
255
+ # 内存监控扩展配置
256
+ # MEMORY_MONITOR_ENABLED = True # 是否启用内存监控
257
+ # MEMORY_MONITOR_INTERVAL = 60 # 内存检查间隔(秒)
258
+ # MEMORY_WARNING_THRESHOLD = 80.0 # 内存使用警告阈值(百分比)
259
+ # MEMORY_CRITICAL_THRESHOLD = 90.0 # 内存使用严重阈值(百分比)
260
+
261
+ # 请求记录扩展配置
262
+ # REQUEST_RECORDER_ENABLED = True # 是否启用请求记录
263
+ # REQUEST_RECORDER_OUTPUT_DIR = 'requests_log' # 请求记录输出目录
264
+ # REQUEST_RECORDER_MAX_FILE_SIZE = 10 * 1024 * 1024 # 单个记录文件最大大小(字节)
265
+
266
+ # 性能分析扩展配置
267
+ # PERFORMANCE_PROFILER_ENABLED = True # 是否启用性能分析
268
+ # PERFORMANCE_PROFILER_OUTPUT_DIR = 'profiling' # 性能分析输出目录
269
+ # PERFORMANCE_PROFILER_INTERVAL = 300 # 定期保存分析结果间隔(秒)
270
+
271
+ # 健康检查扩展配置
272
+ # HEALTH_CHECK_ENABLED = True # 是否启用健康检查
273
+ # HEALTH_CHECK_INTERVAL = 60 # 健康检查间隔(秒)
274
+
275
+ # ============================== 日志配置 ==============================
276
+
277
+ LOG_LEVEL = 'INFO'
278
+ STATS_DUMP = True
279
+ LOG_FILE = f'logs/{{project_name}}.log'
280
+ LOG_FORMAT = '%(asctime)s - [%(name)s] - %(levelname)s: %(message)s'
281
+ LOG_ENCODING = 'utf-8'
282
+
283
+ # ============================== 代理配置 ==============================
284
+
285
+ PROXY_ENABLED = False
286
+ PROXY_API_URL = "" # 请填入真实的代理API地址
287
+ PROXY_EXTRACTOR = "proxy"
288
+ PROXY_REFRESH_INTERVAL = 60
289
+ PROXY_API_TIMEOUT = 10
290
+
291
+ # ============================== 浏览器指纹配置 ==============================
292
+
293
+ # CurlCffi 下载器专用配置
294
+ CURL_BROWSER_TYPE = "chrome"
295
+ CURL_BROWSER_VERSION_MAP = {
296
+ "chrome": "chrome136",
297
+ "edge": "edge101",
298
+ "safari": "safari184",
299
+ "firefox": "firefox135",
300
+ }
301
+
302
+ # 默认请求头
303
+ DEFAULT_REQUEST_HEADERS = {
304
+ 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 '
305
+ '(KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36',
306
+ 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
307
+ 'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',
308
+ 'Accept-Encoding': 'gzip, deflate, br',
309
+ 'Connection': 'keep-alive',
310
+ 'Upgrade-Insecure-Requests': '1',
311
+ }
312
+
313
+ # ============================== 下载器优化配置 ==============================
314
+
315
+ # 下载器健康检查
316
+ DOWNLOADER_HEALTH_CHECK = True # 是否启用下载器健康检查
317
+ HEALTH_CHECK_INTERVAL = 60 # 健康检查间隔(秒)
318
+
319
+ # 请求统计配置
320
+ REQUEST_STATS_ENABLED = True # 是否启用请求统计
321
+ STATS_RESET_ON_START = False # 启动时是否重置统计
322
+
323
+ # HttpX 下载器专用配置
324
+ HTTPX_HTTP2 = True # 是否启用HTTP/2支持
325
+ HTTPX_FOLLOW_REDIRECTS = True # 是否自动跟随重定向
326
+
327
+ # AioHttp 下载器专用配置
328
+ AIOHTTP_AUTO_DECOMPRESS = True # 是否自动解压响应
329
+ AIOHTTP_FORCE_CLOSE = False # 是否强制关闭连接
330
+
331
+ # 通用优化配置
332
+ CONNECTION_TTL_DNS_CACHE = 300 # DNS缓存TTL(秒)
333
+ CONNECTION_KEEPALIVE_TIMEOUT = 15 # Keep-Alive超时(秒)
334
+
335
+ # ============================== 开发与调试 ==============================
336
+
337
+ # 开发模式配置
338
+ DEBUG = False
339
+ TESTING = False
340
+
341
+ # 性能监控
342
+ ENABLE_PERFORMANCE_MONITORING = True
343
+ MEMORY_USAGE_WARNING_THRESHOLD = 500 # MB
344
+
345
+ # ============================== 自定义配置区域 ==============================
346
+ # 在此处添加项目特定的配置项
347
+
348
+ # 示例:目标网站特定配置
349
+ # TARGET_DOMAIN = '{{domain}}'
350
+ # MAX_PAGES_PER_DOMAIN = 10000
326
351
  # CUSTOM_RATE_LIMIT = 1.5