crawlo 1.1.4__py3-none-any.whl → 1.1.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (190) hide show
  1. crawlo/__init__.py +61 -34
  2. crawlo/__version__.py +1 -1
  3. crawlo/cleaners/__init__.py +61 -0
  4. crawlo/cleaners/data_formatter.py +226 -0
  5. crawlo/cleaners/encoding_converter.py +126 -0
  6. crawlo/cleaners/text_cleaner.py +233 -0
  7. crawlo/cli.py +40 -40
  8. crawlo/commands/__init__.py +13 -13
  9. crawlo/commands/check.py +594 -594
  10. crawlo/commands/genspider.py +151 -151
  11. crawlo/commands/list.py +155 -155
  12. crawlo/commands/run.py +292 -285
  13. crawlo/commands/startproject.py +419 -196
  14. crawlo/commands/stats.py +188 -188
  15. crawlo/commands/utils.py +186 -186
  16. crawlo/config.py +312 -279
  17. crawlo/config_validator.py +253 -0
  18. crawlo/core/__init__.py +2 -2
  19. crawlo/core/engine.py +346 -172
  20. crawlo/core/processor.py +40 -40
  21. crawlo/core/scheduler.py +137 -166
  22. crawlo/crawler.py +1027 -1027
  23. crawlo/downloader/__init__.py +266 -242
  24. crawlo/downloader/aiohttp_downloader.py +220 -212
  25. crawlo/downloader/cffi_downloader.py +256 -251
  26. crawlo/downloader/httpx_downloader.py +259 -259
  27. crawlo/downloader/hybrid_downloader.py +214 -0
  28. crawlo/downloader/playwright_downloader.py +403 -0
  29. crawlo/downloader/selenium_downloader.py +473 -0
  30. crawlo/event.py +11 -11
  31. crawlo/exceptions.py +81 -81
  32. crawlo/extension/__init__.py +37 -37
  33. crawlo/extension/health_check.py +141 -141
  34. crawlo/extension/log_interval.py +57 -57
  35. crawlo/extension/log_stats.py +81 -81
  36. crawlo/extension/logging_extension.py +43 -43
  37. crawlo/extension/memory_monitor.py +104 -88
  38. crawlo/extension/performance_profiler.py +133 -117
  39. crawlo/extension/request_recorder.py +107 -107
  40. crawlo/filters/__init__.py +154 -154
  41. crawlo/filters/aioredis_filter.py +281 -242
  42. crawlo/filters/memory_filter.py +269 -269
  43. crawlo/items/__init__.py +23 -23
  44. crawlo/items/base.py +21 -21
  45. crawlo/items/fields.py +53 -53
  46. crawlo/items/items.py +104 -104
  47. crawlo/middleware/__init__.py +21 -21
  48. crawlo/middleware/default_header.py +32 -32
  49. crawlo/middleware/download_delay.py +28 -28
  50. crawlo/middleware/middleware_manager.py +135 -135
  51. crawlo/middleware/proxy.py +272 -248
  52. crawlo/middleware/request_ignore.py +30 -30
  53. crawlo/middleware/response_code.py +18 -18
  54. crawlo/middleware/response_filter.py +26 -26
  55. crawlo/middleware/retry.py +124 -124
  56. crawlo/mode_manager.py +212 -201
  57. crawlo/network/__init__.py +21 -21
  58. crawlo/network/request.py +338 -311
  59. crawlo/network/response.py +360 -271
  60. crawlo/pipelines/__init__.py +21 -21
  61. crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
  62. crawlo/pipelines/console_pipeline.py +39 -39
  63. crawlo/pipelines/csv_pipeline.py +316 -316
  64. crawlo/pipelines/database_dedup_pipeline.py +224 -224
  65. crawlo/pipelines/json_pipeline.py +218 -218
  66. crawlo/pipelines/memory_dedup_pipeline.py +115 -115
  67. crawlo/pipelines/mongo_pipeline.py +131 -131
  68. crawlo/pipelines/mysql_pipeline.py +316 -316
  69. crawlo/pipelines/pipeline_manager.py +61 -56
  70. crawlo/pipelines/redis_dedup_pipeline.py +167 -162
  71. crawlo/project.py +188 -153
  72. crawlo/queue/pqueue.py +37 -37
  73. crawlo/queue/queue_manager.py +334 -307
  74. crawlo/queue/redis_priority_queue.py +299 -209
  75. crawlo/settings/__init__.py +7 -7
  76. crawlo/settings/default_settings.py +219 -278
  77. crawlo/settings/setting_manager.py +123 -100
  78. crawlo/spider/__init__.py +639 -639
  79. crawlo/stats_collector.py +59 -59
  80. crawlo/subscriber.py +130 -130
  81. crawlo/task_manager.py +30 -30
  82. crawlo/templates/crawlo.cfg.tmpl +10 -10
  83. crawlo/templates/project/__init__.py.tmpl +3 -3
  84. crawlo/templates/project/items.py.tmpl +17 -17
  85. crawlo/templates/project/middlewares.py.tmpl +110 -110
  86. crawlo/templates/project/pipelines.py.tmpl +97 -97
  87. crawlo/templates/project/run.py.tmpl +251 -251
  88. crawlo/templates/project/settings.py.tmpl +326 -279
  89. crawlo/templates/project/settings_distributed.py.tmpl +120 -0
  90. crawlo/templates/project/settings_gentle.py.tmpl +95 -0
  91. crawlo/templates/project/settings_high_performance.py.tmpl +152 -0
  92. crawlo/templates/project/settings_simple.py.tmpl +69 -0
  93. crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
  94. crawlo/templates/spider/spider.py.tmpl +141 -141
  95. crawlo/tools/__init__.py +183 -0
  96. crawlo/tools/anti_crawler.py +269 -0
  97. crawlo/tools/authenticated_proxy.py +241 -0
  98. crawlo/tools/data_validator.py +181 -0
  99. crawlo/tools/date_tools.py +36 -0
  100. crawlo/tools/distributed_coordinator.py +387 -0
  101. crawlo/tools/retry_mechanism.py +221 -0
  102. crawlo/tools/scenario_adapter.py +263 -0
  103. crawlo/utils/__init__.py +35 -7
  104. crawlo/utils/batch_processor.py +261 -0
  105. crawlo/utils/controlled_spider_mixin.py +439 -439
  106. crawlo/utils/date_tools.py +290 -233
  107. crawlo/utils/db_helper.py +343 -343
  108. crawlo/utils/enhanced_error_handler.py +360 -0
  109. crawlo/utils/env_config.py +106 -0
  110. crawlo/utils/error_handler.py +126 -0
  111. crawlo/utils/func_tools.py +82 -82
  112. crawlo/utils/large_scale_config.py +286 -286
  113. crawlo/utils/large_scale_helper.py +343 -343
  114. crawlo/utils/log.py +128 -128
  115. crawlo/utils/performance_monitor.py +285 -0
  116. crawlo/utils/queue_helper.py +175 -175
  117. crawlo/utils/redis_connection_pool.py +335 -0
  118. crawlo/utils/redis_key_validator.py +200 -0
  119. crawlo/utils/request.py +267 -267
  120. crawlo/utils/request_serializer.py +219 -219
  121. crawlo/utils/spider_loader.py +62 -62
  122. crawlo/utils/system.py +11 -11
  123. crawlo/utils/tools.py +4 -4
  124. crawlo/utils/url.py +39 -39
  125. {crawlo-1.1.4.dist-info → crawlo-1.1.6.dist-info}/METADATA +401 -403
  126. crawlo-1.1.6.dist-info/RECORD +189 -0
  127. examples/__init__.py +7 -7
  128. tests/DOUBLE_CRAWLO_PREFIX_FIX_REPORT.md +82 -0
  129. tests/__init__.py +7 -7
  130. tests/advanced_tools_example.py +276 -0
  131. tests/authenticated_proxy_example.py +237 -0
  132. tests/cleaners_example.py +161 -0
  133. tests/config_validation_demo.py +103 -0
  134. {examples → tests}/controlled_spider_example.py +205 -205
  135. tests/date_tools_example.py +181 -0
  136. tests/dynamic_loading_example.py +524 -0
  137. tests/dynamic_loading_test.py +105 -0
  138. tests/env_config_example.py +134 -0
  139. tests/error_handling_example.py +172 -0
  140. tests/redis_key_validation_demo.py +131 -0
  141. tests/response_improvements_example.py +145 -0
  142. tests/test_advanced_tools.py +149 -0
  143. tests/test_all_redis_key_configs.py +146 -0
  144. tests/test_authenticated_proxy.py +142 -0
  145. tests/test_cleaners.py +55 -0
  146. tests/test_comprehensive.py +147 -0
  147. tests/test_config_validator.py +194 -0
  148. tests/test_date_tools.py +124 -0
  149. tests/test_double_crawlo_fix.py +208 -0
  150. tests/test_double_crawlo_fix_simple.py +125 -0
  151. tests/test_dynamic_downloaders_proxy.py +125 -0
  152. tests/test_dynamic_proxy.py +93 -0
  153. tests/test_dynamic_proxy_config.py +147 -0
  154. tests/test_dynamic_proxy_real.py +110 -0
  155. tests/test_edge_cases.py +304 -0
  156. tests/test_enhanced_error_handler.py +271 -0
  157. tests/test_env_config.py +122 -0
  158. tests/test_error_handler_compatibility.py +113 -0
  159. tests/test_final_validation.py +153 -153
  160. tests/test_framework_env_usage.py +104 -0
  161. tests/test_integration.py +357 -0
  162. tests/test_item_dedup_redis_key.py +123 -0
  163. tests/test_parsel.py +30 -0
  164. tests/test_performance.py +328 -0
  165. tests/test_proxy_health_check.py +32 -32
  166. tests/test_proxy_middleware_integration.py +136 -136
  167. tests/test_proxy_providers.py +56 -56
  168. tests/test_proxy_stats.py +19 -19
  169. tests/test_proxy_strategies.py +59 -59
  170. tests/test_queue_manager_double_crawlo.py +231 -0
  171. tests/test_queue_manager_redis_key.py +177 -0
  172. tests/test_redis_config.py +28 -28
  173. tests/test_redis_connection_pool.py +295 -0
  174. tests/test_redis_key_naming.py +182 -0
  175. tests/test_redis_key_validator.py +124 -0
  176. tests/test_redis_queue.py +224 -224
  177. tests/test_request_serialization.py +70 -70
  178. tests/test_response_improvements.py +153 -0
  179. tests/test_scheduler.py +241 -241
  180. tests/test_simple_response.py +62 -0
  181. tests/test_telecom_spider_redis_key.py +206 -0
  182. tests/test_template_content.py +88 -0
  183. tests/test_template_redis_key.py +135 -0
  184. tests/test_tools.py +154 -0
  185. tests/tools_example.py +258 -0
  186. crawlo/core/enhanced_engine.py +0 -190
  187. crawlo-1.1.4.dist-info/RECORD +0 -117
  188. {crawlo-1.1.4.dist-info → crawlo-1.1.6.dist-info}/WHEEL +0 -0
  189. {crawlo-1.1.4.dist-info → crawlo-1.1.6.dist-info}/entry_points.txt +0 -0
  190. {crawlo-1.1.4.dist-info → crawlo-1.1.6.dist-info}/top_level.txt +0 -0
@@ -1,280 +1,327 @@
1
- # -*- coding: UTF-8 -*-
2
- """
3
- {{project_name}} 项目配置文件
4
- =============================
5
- 基于 Crawlo 框架的爬虫项目配置。
6
-
7
- 🎯 快速开始:
8
-
9
- # 方式1:使用默认单机模式(推荐)
10
- from crawlo.crawler import CrawlerProcess
11
- process = CrawlerProcess() # 无需任何配置
12
-
13
- # 方式2:使用配置工厂
14
- from crawlo.config import CrawloConfig
15
- config = CrawloConfig.standalone() # 单机模式
16
- config = CrawloConfig.distributed(redis_host='192.168.1.100') # 分布式模式
17
- process = CrawlerProcess(settings=config.to_dict())
18
-
19
- # 方式3:使用环境变量
20
- from crawlo.config import CrawloConfig
21
- config = CrawloConfig.from_env() # 从环境变量读取
22
- """
23
- import os
24
- from crawlo.config import CrawloConfig
25
-
26
- # ============================== 项目基本信息 ==============================
27
- PROJECT_NAME = '{{project_name}}'
28
- VERSION = '1.0.0'
29
-
30
- # ============================== 运行模式选择 ==============================
31
-
32
- # 🎯 选择一种配置方式:
33
-
34
- # 方式1:使用配置工厂(推荐)
35
- # 单机模式(默认)
36
- CONFIG = CrawloConfig.standalone(
37
- concurrency=8,
38
- download_delay=1.0
39
- )
40
-
41
- # 分布式模式(去掉注释并修改 Redis 地址)
42
- # CONFIG = CrawloConfig.distributed(
43
- # redis_host='127.0.0.1',
44
- # redis_password='your_password', # 如果有密码
45
- # project_name='{{project_name}}',
46
- # concurrency=16,
47
- # download_delay=1.0
48
- # )
49
-
50
- # 自动检测模式
51
- # CONFIG = CrawloConfig.auto(concurrency=12)
52
-
53
- # 方式2:从环境变量读取(适合部署)
54
- # CONFIG = CrawloConfig.from_env()
55
-
56
- # 方式3:使用预设配置
57
- # from crawlo.config import Presets
58
- # CONFIG = Presets.development() # 开发环境
59
- # CONFIG = Presets.production() # 生产环境
60
-
61
- # 获取最终配置
62
- locals().update(CONFIG.to_dict())
63
-
64
- # ============================== 网络请求配置 ==============================
65
-
66
- # 下载器选择(推荐使用 CurlCffi,支持浏览器指纹模拟)
67
- DOWNLOADER = "crawlo.downloader.httpx_downloader.HttpXDownloader" # HTTP/2 支持
68
- # DOWNLOADER = "crawlo.downloader.cffi_downloader.CurlCffiDownloader" # 支持浏览器指纹
69
- # DOWNLOADER = "crawlo.downloader.aiohttp_downloader.AioHttpDownloader" # 轻量级选择
70
-
71
- # 请求超时与安全
72
- DOWNLOAD_TIMEOUT = 30
73
- VERIFY_SSL = True
74
- USE_SESSION = True
75
-
76
- # 请求延迟控制(防反爬)
77
- DOWNLOAD_DELAY = 1.0
78
- RANDOM_RANGE = (0.8, 1.2)
79
- RANDOMNESS = True
80
-
81
- # 重试策略
82
- MAX_RETRY_TIMES = 3
83
- RETRY_PRIORITY = -1
84
- RETRY_HTTP_CODES = [408, 429, 500, 502, 503, 504, 522, 524]
85
- IGNORE_HTTP_CODES = [403, 404]
86
- ALLOWED_CODES = []
87
-
88
- # 连接池配置
89
- CONNECTION_POOL_LIMIT = 50
90
- DOWNLOAD_MAXSIZE = 10 * 1024 * 1024 # 10MB
91
- DOWNLOAD_WARN_SIZE = 1024 * 1024 # 1MB
92
-
93
- # ============================== 并发与调度配置 ==============================
94
- CONCURRENCY = 8
95
- INTERVAL = 5
96
- DEPTH_PRIORITY = 1
97
- MAX_RUNNING_SPIDERS = 3
98
-
99
- # ============================== 队列配置(支持分布式) ==============================
100
-
101
- # 队列类型:'auto'(自动选择), 'memory'(内存队列), 'redis'(分布式队列)
102
- QUEUE_TYPE = 'auto'
103
- SCHEDULER_MAX_QUEUE_SIZE = 2000
104
- SCHEDULER_QUEUE_NAME = f'{{project_name}}:requests'
105
- QUEUE_MAX_RETRIES = 3
106
- QUEUE_TIMEOUT = 300
107
-
108
- # 大规模爬取优化
109
- LARGE_SCALE_BATCH_SIZE = 1000
110
- LARGE_SCALE_CHECKPOINT_INTERVAL = 5000
111
- LARGE_SCALE_MAX_MEMORY_USAGE = 500
112
-
113
- # ============================== 数据存储配置 ==============================
114
-
115
- # --- MySQL 配置 ---
116
- MYSQL_HOST = os.getenv('MYSQL_HOST', '127.0.0.1')
117
- MYSQL_PORT = int(os.getenv('MYSQL_PORT', 3306))
118
- MYSQL_USER = os.getenv('MYSQL_USER', 'root')
119
- MYSQL_PASSWORD = os.getenv('MYSQL_PASSWORD', '123456')
120
- MYSQL_DB = os.getenv('MYSQL_DB', '{{project_name}}')
121
- MYSQL_TABLE = '{{project_name}}_data'
122
- MYSQL_BATCH_SIZE = 100
123
- MYSQL_USE_BATCH = False # 是否启用批量插入
124
-
125
- # MySQL 连接池
126
- MYSQL_FLUSH_INTERVAL = 5
127
- MYSQL_POOL_MIN = 5
128
- MYSQL_POOL_MAX = 20
129
- MYSQL_ECHO = False
130
-
131
- # --- MongoDB 配置 ---
132
- MONGO_URI = os.getenv('MONGO_URI', 'mongodb://localhost:27017')
133
- MONGO_DATABASE = '{{project_name}}_db'
134
- MONGO_COLLECTION = '{{project_name}}_items'
135
- MONGO_MAX_POOL_SIZE = 200
136
- MONGO_MIN_POOL_SIZE = 20
137
- MONGO_BATCH_SIZE = 100 # 批量插入条数
138
- MONGO_USE_BATCH = False # 是否启用批量插入
139
-
140
- # ============================== 去重过滤配置 ==============================
141
-
142
- REQUEST_DIR = '.'
143
-
144
- # 去重过滤器(推荐分布式项目使用 Redis 过滤器)
145
- FILTER_CLASS = 'crawlo.filters.memory_filter.MemoryFilter'
146
- # FILTER_CLASS = 'crawlo.filters.aioredis_filter.AioRedisFilter' # 分布式去重
147
-
148
- # --- Redis 配置(用于分布式去重和队列) ---
149
- REDIS_HOST = os.getenv('REDIS_HOST', '127.0.0.1')
150
- REDIS_PORT = int(os.getenv('REDIS_PORT', 6379))
151
- REDIS_PASSWORD = os.getenv('REDIS_PASSWORD', '')
152
-
153
- # 根据是否有密码生成 URL
154
- if REDIS_PASSWORD:
155
- REDIS_URL = f'redis://:{REDIS_PASSWORD}@{REDIS_HOST}:{REDIS_PORT}/0'
156
- else:
157
- REDIS_URL = f'redis://{REDIS_HOST}:{REDIS_PORT}/0'
158
-
159
- REDIS_KEY = f'{{project_name}}:fingerprint'
160
- REDIS_TTL = 0
161
- CLEANUP_FP = 0
162
- FILTER_DEBUG = True
163
- DECODE_RESPONSES = True
164
-
165
- # ============================== 中间件配置 ==============================
166
-
167
- MIDDLEWARES = [
168
- # === 请求预处理阶段 ===
169
- 'crawlo.middleware.request_ignore.RequestIgnoreMiddleware',
170
- 'crawlo.middleware.download_delay.DownloadDelayMiddleware',
171
- 'crawlo.middleware.default_header.DefaultHeaderMiddleware',
172
- 'crawlo.middleware.proxy.ProxyMiddleware',
173
-
174
- # === 响应处理阶段 ===
175
- 'crawlo.middleware.retry.RetryMiddleware',
176
- 'crawlo.middleware.response_code.ResponseCodeMiddleware',
177
- 'crawlo.middleware.response_filter.ResponseFilterMiddleware',
178
- ]
179
-
180
- # ============================== 数据管道配置 ==============================
181
-
182
- PIPELINES = [
183
- # 根据运行模式自动选择默认去重管道
184
- # 单机模式:crawlo.pipelines.MemoryDedupPipeline
185
- # 分布式模式:crawlo.pipelines.RedisDedupPipeline
186
- 'crawlo.pipelines.console_pipeline.ConsolePipeline',
187
- # '{{project_name}}.pipelines.DatabasePipeline', # 自定义数据库管道
188
- # 'crawlo.pipelines.mysql_pipeline.AsyncmyMySQLPipeline', # MySQL 存储
189
- # 'crawlo.pipelines.mongo_pipeline.MongoPipeline', # MongoDB 存储
190
- ]
191
-
192
- # ============================== 扩展组件 ==============================
193
-
194
- EXTENSIONS = [
195
- 'crawlo.extension.log_interval.LogIntervalExtension',
196
- 'crawlo.extension.log_stats.LogStats',
197
- 'crawlo.extension.logging_extension.CustomLoggerExtension',
198
- # 'crawlo.extension.memory_monitor.MemoryMonitorExtension', # 内存监控
199
- # 'crawlo.extension.request_recorder.RequestRecorderExtension', # 请求记录
200
- # 'crawlo.extension.performance_profiler.PerformanceProfilerExtension', # 性能分析
201
- # 'crawlo.extension.health_check.HealthCheckExtension', # 健康检查
202
- ]
203
-
204
- # ============================== 扩展配置 ==============================
205
-
206
- # 内存监控扩展配置
207
- # MEMORY_MONITOR_ENABLED = True # 是否启用内存监控
208
- # MEMORY_MONITOR_INTERVAL = 60 # 内存检查间隔(秒)
209
- # MEMORY_WARNING_THRESHOLD = 80.0 # 内存使用警告阈值(百分比)
210
- # MEMORY_CRITICAL_THRESHOLD = 90.0 # 内存使用严重阈值(百分比)
211
-
212
- # 请求记录扩展配置
213
- # REQUEST_RECORDER_ENABLED = True # 是否启用请求记录
214
- # REQUEST_RECORDER_OUTPUT_DIR = 'requests_log' # 请求记录输出目录
215
- # REQUEST_RECORDER_MAX_FILE_SIZE = 10 * 1024 * 1024 # 单个记录文件最大大小(字节)
216
-
217
- # 性能分析扩展配置
218
- # PERFORMANCE_PROFILER_ENABLED = True # 是否启用性能分析
219
- # PERFORMANCE_PROFILER_OUTPUT_DIR = 'profiling' # 性能分析输出目录
220
- # PERFORMANCE_PROFILER_INTERVAL = 300 # 定期保存分析结果间隔(秒)
221
-
222
- # 健康检查扩展配置
223
- # HEALTH_CHECK_ENABLED = True # 是否启用健康检查
224
- # HEALTH_CHECK_INTERVAL = 60 # 健康检查间隔(秒)
225
-
226
- # ============================== 日志配置 ==============================
227
-
228
- LOG_LEVEL = 'INFO'
229
- STATS_DUMP = True
230
- LOG_FILE = f'logs/{{project_name}}.log'
231
- LOG_FORMAT = '%(asctime)s - [%(name)s] - %(levelname)s: %(message)s'
232
- LOG_ENCODING = 'utf-8'
233
-
234
- # ============================== 代理配置 ==============================
235
-
236
- PROXY_ENABLED = False
237
- PROXY_API_URL = "" # 请填入真实的代理API地址
238
- PROXY_EXTRACTOR = "proxy"
239
- PROXY_REFRESH_INTERVAL = 60
240
- PROXY_API_TIMEOUT = 10
241
-
242
- # ============================== 浏览器指纹配置 ==============================
243
-
244
- # CurlCffi 下载器专用配置
245
- CURL_BROWSER_TYPE = "chrome"
246
- CURL_BROWSER_VERSION_MAP = {
247
- "chrome": "chrome136",
248
- "edge": "edge101",
249
- "safari": "safari184",
250
- "firefox": "firefox135",
251
- }
252
-
253
- # 默认请求头
254
- DEFAULT_REQUEST_HEADERS = {
255
- 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 '
256
- '(KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36',
257
- 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
258
- 'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',
259
- 'Accept-Encoding': 'gzip, deflate, br',
260
- 'Connection': 'keep-alive',
261
- 'Upgrade-Insecure-Requests': '1',
262
- }
263
-
264
- # ============================== 开发与调试 ==============================
265
-
266
- # 开发模式配置
267
- DEBUG = False
268
- TESTING = False
269
-
270
- # 性能监控
271
- ENABLE_PERFORMANCE_MONITORING = True
272
- MEMORY_USAGE_WARNING_THRESHOLD = 500 # MB
273
-
274
- # ============================== 自定义配置区域 ==============================
275
- # 在此处添加项目特定的配置项
276
-
277
- # 示例:目标网站特定配置
278
- # TARGET_DOMAIN = '{{domain}}'
279
- # MAX_PAGES_PER_DOMAIN = 10000
1
+ # -*- coding: UTF-8 -*-
2
+ """
3
+ {{project_name}} 项目配置文件
4
+ =============================
5
+ 基于 Crawlo 框架的爬虫项目配置。
6
+
7
+ 🎯 快速开始:
8
+
9
+ # 方式1:使用默认单机模式(推荐)
10
+ from crawlo.crawler import CrawlerProcess
11
+ process = CrawlerProcess() # 无需任何配置
12
+
13
+ # 方式2:使用配置工厂
14
+ from crawlo.config import CrawloConfig
15
+ config = CrawloConfig.standalone() # 单机模式
16
+ config = CrawloConfig.distributed(redis_host='192.168.1.100') # 分布式模式
17
+ process = CrawlerProcess(settings=config.to_dict())
18
+
19
+ # 方式3:使用环境变量
20
+ from crawlo.config import CrawloConfig
21
+ config = CrawloConfig.from_env() # 从环境变量读取
22
+ """
23
+ import os
24
+ from crawlo.config import CrawloConfig
25
+
26
+ # ============================== 项目基本信息 ==============================
27
+ PROJECT_NAME = '{{project_name}}'
28
+ VERSION = '1.0.0'
29
+
30
+ # ============================== 运行模式选择 ==============================
31
+
32
+ # 🎯 选择一种配置方式:
33
+
34
+ # 方式1:使用配置工厂(推荐)
35
+ # 单机模式(默认)
36
+ CONFIG = CrawloConfig.standalone(
37
+ concurrency=8,
38
+ download_delay=1.0
39
+ )
40
+
41
+ # 分布式模式(去掉注释并修改 Redis 地址)
42
+ # CONFIG = CrawloConfig.distributed(
43
+ # redis_host='127.0.0.1',
44
+ # redis_password='your_password', # 如果有密码
45
+ # project_name='{{project_name}}',
46
+ # concurrency=16,
47
+ # download_delay=1.0
48
+ # )
49
+
50
+ # 自动检测模式
51
+ # CONFIG = CrawloConfig.auto(concurrency=12)
52
+
53
+ # 方式2:从环境变量读取(适合部署)
54
+ # CONFIG = CrawloConfig.from_env()
55
+
56
+ # 方式3:使用预设配置
57
+ # from crawlo.config import Presets
58
+ # CONFIG = Presets.development() # 开发环境
59
+ # CONFIG = Presets.production() # 生产环境
60
+
61
+ # 获取最终配置
62
+ locals().update(CONFIG.to_dict())
63
+
64
+ # ============================== 网络请求配置 ==============================
65
+
66
+ # 下载器选择(推荐使用 CurlCffi,支持浏览器指纹模拟)
67
+ DOWNLOADER = "crawlo.downloader.httpx_downloader.HttpXDownloader" # HTTP/2 支持
68
+ # DOWNLOADER = "crawlo.downloader.cffi_downloader.CurlCffiDownloader" # 支持浏览器指纹
69
+ # DOWNLOADER = "crawlo.downloader.aiohttp_downloader.AioHttpDownloader" # 轻量级选择
70
+
71
+ # 请求超时与安全
72
+ DOWNLOAD_TIMEOUT = 30
73
+ VERIFY_SSL = True
74
+ USE_SESSION = True
75
+
76
+ # 请求延迟控制(防反爬)
77
+ DOWNLOAD_DELAY = 1.0
78
+ RANDOM_RANGE = (0.8, 1.2)
79
+ RANDOMNESS = True
80
+
81
+ # 重试策略
82
+ MAX_RETRY_TIMES = 3
83
+ RETRY_PRIORITY = -1
84
+ RETRY_HTTP_CODES = [408, 429, 500, 502, 503, 504, 522, 524]
85
+ IGNORE_HTTP_CODES = [403, 404]
86
+ ALLOWED_CODES = []
87
+
88
+ # 连接池配置
89
+ CONNECTION_POOL_LIMIT = 50
90
+ DOWNLOAD_MAXSIZE = 10 * 1024 * 1024 # 10MB
91
+ DOWNLOAD_WARN_SIZE = 1024 * 1024 # 1MB
92
+ DOWNLOAD_RETRY_TIMES = MAX_RETRY_TIMES # 下载器内部重试次数(复用全局)
93
+
94
+ # 下载统计配置
95
+ DOWNLOADER_STATS = True # 是否启用下载器统计功能
96
+ DOWNLOAD_STATS = True # 是否记录下载时间和大小统计
97
+
98
+ # ============================== 并发与调度配置 ==============================
99
+
100
+ CONCURRENCY = 8
101
+ INTERVAL = 5
102
+ DEPTH_PRIORITY = 1
103
+ MAX_RUNNING_SPIDERS = 3
104
+
105
+ # 运行模式选择:'standalone'(单机), 'distributed'(分布式), 'auto'(自动检测)
106
+ RUN_MODE = 'standalone' # 默认单机模式,简单易用
107
+
108
+ # ============================== 队列配置(支持分布式) ==============================
109
+
110
+ # 队列类型:'auto'(自动选择), 'memory'(内存队列), 'redis'(分布式队列)
111
+ QUEUE_TYPE = 'auto'
112
+ SCHEDULER_MAX_QUEUE_SIZE = 2000
113
+ SCHEDULER_QUEUE_NAME = f'crawlo:{{project_name}}:queue:requests' # 使用统一命名规范
114
+ QUEUE_MAX_RETRIES = 3
115
+ QUEUE_TIMEOUT = 300
116
+
117
+ # 大规模爬取优化
118
+ LARGE_SCALE_BATCH_SIZE = 1000 # 批处理大小
119
+ LARGE_SCALE_CHECKPOINT_INTERVAL = 5000 # 进度保存间隔
120
+ LARGE_SCALE_MAX_MEMORY_USAGE = 500 # 最大内存使用量(MB)
121
+
122
+ # ============================== 数据存储配置 ==============================
123
+
124
+ # --- MySQL 配置 ---
125
+ MYSQL_HOST = os.getenv('MYSQL_HOST', '127.0.0.1')
126
+ MYSQL_PORT = int(os.getenv('MYSQL_PORT', 3306))
127
+ MYSQL_USER = os.getenv('MYSQL_USER', 'root')
128
+ MYSQL_PASSWORD = os.getenv('MYSQL_PASSWORD', '123456')
129
+ MYSQL_DB = os.getenv('MYSQL_DB', '{{project_name}}')
130
+ MYSQL_TABLE = '{{project_name}}_data'
131
+ MYSQL_BATCH_SIZE = 100
132
+ MYSQL_USE_BATCH = False # 是否启用批量插入
133
+
134
+ # --- MongoDB 配置 ---
135
+ MONGO_URI = os.getenv('MONGO_URI', 'mongodb://localhost:27017')
136
+ MONGO_DATABASE = '{{project_name}}_db'
137
+ MONGO_COLLECTION = '{{project_name}}_items'
138
+ MONGO_MAX_POOL_SIZE = 200
139
+ MONGO_MIN_POOL_SIZE = 20
140
+ MONGO_BATCH_SIZE = 100 # 批量插入条数
141
+ MONGO_USE_BATCH = False # 是否启用批量插入
142
+
143
+ # ============================== 去重过滤配置 ==============================
144
+
145
+ REQUEST_DIR = '.'
146
+
147
+ # 根据运行模式自动选择去重管道
148
+ # 单机模式默认使用内存去重管道
149
+ # 分布式模式默认使用Redis去重管道
150
+ if RUN_MODE == 'distributed':
151
+ # 分布式模式下默认使用Redis去重管道
152
+ DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.RedisDedupPipeline'
153
+ else:
154
+ # 单机模式下默认使用内存去重管道
155
+ DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.MemoryDedupPipeline'
156
+
157
+ # 去重过滤器(推荐分布式项目使用 Redis 过滤器)
158
+ FILTER_CLASS = 'crawlo.filters.memory_filter.MemoryFilter'
159
+ # FILTER_CLASS = 'crawlo.filters.aioredis_filter.AioRedisFilter' # 分布式去重
160
+
161
+ # --- Redis 配置(用于分布式去重和队列) ---
162
+ REDIS_HOST = os.getenv('REDIS_HOST', '127.0.0.1')
163
+ REDIS_PORT = int(os.getenv('REDIS_PORT', 6379))
164
+ REDIS_PASSWORD = os.getenv('REDIS_PASSWORD', '')
165
+
166
+ # 根据是否有密码生成 URL
167
+ if REDIS_PASSWORD:
168
+ REDIS_URL = f'redis://:{REDIS_PASSWORD}@{REDIS_HOST}:{REDIS_PORT}/0'
169
+ else:
170
+ REDIS_URL = f'redis://{REDIS_HOST}:{REDIS_PORT}/0'
171
+
172
+ # Redis key配置已移至各组件中,使用统一的命名规范
173
+ # crawlo:{project_name}:filter:fingerprint (请求去重)
174
+ # crawlo:{project_name}:item:fingerprint (数据项去重)
175
+ # crawlo:{project_name}:queue:requests (请求队列)
176
+ # crawlo:{project_name}:queue:processing (处理中队列)
177
+ # crawlo:{project_name}:queue:failed (失败队列)
178
+
179
+ REDIS_TTL = 0
180
+ CLEANUP_FP = 0
181
+ FILTER_DEBUG = True
182
+ DECODE_RESPONSES = True
183
+
184
+ # ============================== 中间件配置 ==============================
185
+
186
+ MIDDLEWARES = [
187
+ # === 请求预处理阶段 ===
188
+ 'crawlo.middleware.request_ignore.RequestIgnoreMiddleware',
189
+ 'crawlo.middleware.download_delay.DownloadDelayMiddleware',
190
+ 'crawlo.middleware.default_header.DefaultHeaderMiddleware',
191
+ 'crawlo.middleware.proxy.ProxyMiddleware',
192
+
193
+ # === 响应处理阶段 ===
194
+ 'crawlo.middleware.retry.RetryMiddleware',
195
+ 'crawlo.middleware.response_code.ResponseCodeMiddleware',
196
+ 'crawlo.middleware.response_filter.ResponseFilterMiddleware',
197
+ ]
198
+
199
+ # ============================== 数据管道配置 ==============================
200
+
201
+ # 数据处理管道(启用的存储方式)
202
+ PIPELINES = [
203
+ 'crawlo.pipelines.console_pipeline.ConsolePipeline',
204
+ # '{{project_name}}.pipelines.DatabasePipeline', # 自定义数据库管道
205
+ # 'crawlo.pipelines.mysql_pipeline.AsyncmyMySQLPipeline', # MySQL 存储
206
+ # 'crawlo.pipelines.mongo_pipeline.MongoPipeline', # MongoDB 存储
207
+ ]
208
+
209
+ # 根据运行模式自动配置默认去重管道
210
+ if RUN_MODE == 'distributed':
211
+ # 分布式模式下添加Redis去重管道
212
+ PIPELINES.insert(0, DEFAULT_DEDUP_PIPELINE)
213
+ else:
214
+ # 单机模式下添加内存去重管道
215
+ PIPELINES.insert(0, DEFAULT_DEDUP_PIPELINE)
216
+
217
+ # ============================== 扩展组件 ==============================
218
+
219
+ EXTENSIONS = [
220
+ 'crawlo.extension.log_interval.LogIntervalExtension',
221
+ 'crawlo.extension.log_stats.LogStats',
222
+ 'crawlo.extension.logging_extension.CustomLoggerExtension',
223
+ # 'crawlo.extension.memory_monitor.MemoryMonitorExtension', # 内存监控
224
+ # 'crawlo.extension.request_recorder.RequestRecorderExtension', # 请求记录
225
+ # 'crawlo.extension.performance_profiler.PerformanceProfilerExtension', # 性能分析
226
+ # 'crawlo.extension.health_check.HealthCheckExtension', # 健康检查
227
+ ]
228
+
229
+ # ============================== 扩展配置 ==============================
230
+
231
+ # 内存监控扩展配置
232
+ # MEMORY_MONITOR_ENABLED = True # 是否启用内存监控
233
+ # MEMORY_MONITOR_INTERVAL = 60 # 内存检查间隔(秒)
234
+ # MEMORY_WARNING_THRESHOLD = 80.0 # 内存使用警告阈值(百分比)
235
+ # MEMORY_CRITICAL_THRESHOLD = 90.0 # 内存使用严重阈值(百分比)
236
+
237
+ # 请求记录扩展配置
238
+ # REQUEST_RECORDER_ENABLED = True # 是否启用请求记录
239
+ # REQUEST_RECORDER_OUTPUT_DIR = 'requests_log' # 请求记录输出目录
240
+ # REQUEST_RECORDER_MAX_FILE_SIZE = 10 * 1024 * 1024 # 单个记录文件最大大小(字节)
241
+
242
+ # 性能分析扩展配置
243
+ # PERFORMANCE_PROFILER_ENABLED = True # 是否启用性能分析
244
+ # PERFORMANCE_PROFILER_OUTPUT_DIR = 'profiling' # 性能分析输出目录
245
+ # PERFORMANCE_PROFILER_INTERVAL = 300 # 定期保存分析结果间隔(秒)
246
+
247
+ # 健康检查扩展配置
248
+ # HEALTH_CHECK_ENABLED = True # 是否启用健康检查
249
+ # HEALTH_CHECK_INTERVAL = 60 # 健康检查间隔(秒)
250
+
251
+ # ============================== 日志配置 ==============================
252
+
253
+ LOG_LEVEL = 'INFO'
254
+ STATS_DUMP = True
255
+ LOG_FILE = f'logs/{{project_name}}.log'
256
+ LOG_FORMAT = '%(asctime)s - [%(name)s] - %(levelname)s: %(message)s'
257
+ LOG_ENCODING = 'utf-8'
258
+
259
+ # ============================== 代理配置 ==============================
260
+
261
+ PROXY_ENABLED = False
262
+ PROXY_API_URL = "" # 请填入真实的代理API地址
263
+ PROXY_EXTRACTOR = "proxy"
264
+ PROXY_REFRESH_INTERVAL = 60
265
+ PROXY_API_TIMEOUT = 10
266
+
267
+ # ============================== 浏览器指纹配置 ==============================
268
+
269
+ # CurlCffi 下载器专用配置
270
+ CURL_BROWSER_TYPE = "chrome"
271
+ CURL_BROWSER_VERSION_MAP = {
272
+ "chrome": "chrome136",
273
+ "edge": "edge101",
274
+ "safari": "safari184",
275
+ "firefox": "firefox135",
276
+ }
277
+
278
+ # 默认请求头
279
+ DEFAULT_REQUEST_HEADERS = {
280
+ 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 '
281
+ '(KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36',
282
+ 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
283
+ 'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',
284
+ 'Accept-Encoding': 'gzip, deflate, br',
285
+ 'Connection': 'keep-alive',
286
+ 'Upgrade-Insecure-Requests': '1',
287
+ }
288
+
289
+ # ============================== 下载器优化配置 ==============================
290
+
291
+ # 下载器健康检查
292
+ DOWNLOADER_HEALTH_CHECK = True # 是否启用下载器健康检查
293
+ HEALTH_CHECK_INTERVAL = 60 # 健康检查间隔(秒)
294
+
295
+ # 请求统计配置
296
+ REQUEST_STATS_ENABLED = True # 是否启用请求统计
297
+ STATS_RESET_ON_START = False # 启动时是否重置统计
298
+
299
+ # HttpX 下载器专用配置
300
+ HTTPX_HTTP2 = True # 是否启用HTTP/2支持
301
+ HTTPX_FOLLOW_REDIRECTS = True # 是否自动跟随重定向
302
+
303
+ # AioHttp 下载器专用配置
304
+ AIOHTTP_AUTO_DECOMPRESS = True # 是否自动解压响应
305
+ AIOHTTP_FORCE_CLOSE = False # 是否强制关闭连接
306
+
307
+ # 通用优化配置
308
+ CONNECTION_TTL_DNS_CACHE = 300 # DNS缓存TTL(秒)
309
+ CONNECTION_KEEPALIVE_TIMEOUT = 15 # Keep-Alive超时(秒)
310
+
311
+ # ============================== 开发与调试 ==============================
312
+
313
+ # 开发模式配置
314
+ DEBUG = False
315
+ TESTING = False
316
+
317
+ # 性能监控
318
+ ENABLE_PERFORMANCE_MONITORING = True
319
+ MEMORY_USAGE_WARNING_THRESHOLD = 500 # MB
320
+
321
+ # ============================== 自定义配置区域 ==============================
322
+ # 在此处添加项目特定的配置项
323
+
324
+ # 示例:目标网站特定配置
325
+ # TARGET_DOMAIN = '{{domain}}'
326
+ # MAX_PAGES_PER_DOMAIN = 10000
280
327
  # CUSTOM_RATE_LIMIT = 1.5