crawlo 1.2.8__py3-none-any.whl → 1.2.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (221) hide show
  1. crawlo/__init__.py +63 -61
  2. crawlo/__version__.py +1 -1
  3. crawlo/cli.py +75 -75
  4. crawlo/commands/__init__.py +14 -14
  5. crawlo/commands/check.py +594 -594
  6. crawlo/commands/genspider.py +151 -151
  7. crawlo/commands/help.py +138 -138
  8. crawlo/commands/list.py +155 -155
  9. crawlo/commands/run.py +323 -323
  10. crawlo/commands/startproject.py +436 -436
  11. crawlo/commands/stats.py +187 -187
  12. crawlo/commands/utils.py +186 -186
  13. crawlo/config.py +312 -312
  14. crawlo/config_validator.py +277 -251
  15. crawlo/core/__init__.py +2 -2
  16. crawlo/core/engine.py +366 -365
  17. crawlo/core/processor.py +40 -40
  18. crawlo/core/scheduler.py +256 -251
  19. crawlo/crawler.py +1103 -1100
  20. crawlo/data/__init__.py +5 -5
  21. crawlo/data/user_agents.py +194 -107
  22. crawlo/downloader/__init__.py +273 -266
  23. crawlo/downloader/aiohttp_downloader.py +226 -228
  24. crawlo/downloader/cffi_downloader.py +245 -256
  25. crawlo/downloader/httpx_downloader.py +259 -259
  26. crawlo/downloader/hybrid_downloader.py +212 -212
  27. crawlo/downloader/playwright_downloader.py +402 -402
  28. crawlo/downloader/selenium_downloader.py +472 -472
  29. crawlo/event.py +11 -11
  30. crawlo/exceptions.py +81 -81
  31. crawlo/extension/__init__.py +39 -39
  32. crawlo/extension/health_check.py +141 -141
  33. crawlo/extension/log_interval.py +57 -57
  34. crawlo/extension/log_stats.py +81 -81
  35. crawlo/extension/logging_extension.py +43 -43
  36. crawlo/extension/memory_monitor.py +104 -104
  37. crawlo/extension/performance_profiler.py +133 -133
  38. crawlo/extension/request_recorder.py +107 -107
  39. crawlo/filters/__init__.py +154 -154
  40. crawlo/filters/aioredis_filter.py +234 -234
  41. crawlo/filters/memory_filter.py +269 -269
  42. crawlo/items/__init__.py +23 -23
  43. crawlo/items/base.py +21 -21
  44. crawlo/items/fields.py +52 -52
  45. crawlo/items/items.py +104 -104
  46. crawlo/middleware/__init__.py +21 -21
  47. crawlo/middleware/default_header.py +132 -132
  48. crawlo/middleware/download_delay.py +104 -104
  49. crawlo/middleware/middleware_manager.py +136 -136
  50. crawlo/middleware/offsite.py +114 -114
  51. crawlo/middleware/proxy.py +386 -368
  52. crawlo/middleware/request_ignore.py +86 -86
  53. crawlo/middleware/response_code.py +163 -163
  54. crawlo/middleware/response_filter.py +136 -136
  55. crawlo/middleware/retry.py +124 -124
  56. crawlo/middleware/simple_proxy.py +65 -0
  57. crawlo/mode_manager.py +211 -211
  58. crawlo/network/__init__.py +21 -21
  59. crawlo/network/request.py +379 -338
  60. crawlo/network/response.py +359 -359
  61. crawlo/pipelines/__init__.py +21 -21
  62. crawlo/pipelines/bloom_dedup_pipeline.py +157 -157
  63. crawlo/pipelines/console_pipeline.py +39 -39
  64. crawlo/pipelines/csv_pipeline.py +316 -316
  65. crawlo/pipelines/database_dedup_pipeline.py +223 -223
  66. crawlo/pipelines/json_pipeline.py +218 -218
  67. crawlo/pipelines/memory_dedup_pipeline.py +115 -115
  68. crawlo/pipelines/mongo_pipeline.py +131 -131
  69. crawlo/pipelines/mysql_pipeline.py +317 -317
  70. crawlo/pipelines/pipeline_manager.py +62 -62
  71. crawlo/pipelines/redis_dedup_pipeline.py +167 -167
  72. crawlo/project.py +290 -315
  73. crawlo/queue/pqueue.py +37 -37
  74. crawlo/queue/queue_manager.py +379 -378
  75. crawlo/queue/redis_priority_queue.py +306 -306
  76. crawlo/settings/__init__.py +7 -7
  77. crawlo/settings/default_settings.py +216 -220
  78. crawlo/settings/setting_manager.py +163 -122
  79. crawlo/spider/__init__.py +639 -639
  80. crawlo/stats_collector.py +59 -59
  81. crawlo/subscriber.py +129 -129
  82. crawlo/task_manager.py +30 -30
  83. crawlo/templates/crawlo.cfg.tmpl +10 -10
  84. crawlo/templates/project/__init__.py.tmpl +3 -3
  85. crawlo/templates/project/items.py.tmpl +17 -17
  86. crawlo/templates/project/middlewares.py.tmpl +118 -118
  87. crawlo/templates/project/pipelines.py.tmpl +96 -96
  88. crawlo/templates/project/settings.py.tmpl +261 -288
  89. crawlo/templates/project/settings_distributed.py.tmpl +174 -157
  90. crawlo/templates/project/settings_gentle.py.tmpl +95 -100
  91. crawlo/templates/project/settings_high_performance.py.tmpl +125 -134
  92. crawlo/templates/project/settings_minimal.py.tmpl +30 -0
  93. crawlo/templates/project/settings_simple.py.tmpl +96 -98
  94. crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
  95. crawlo/templates/run.py.tmpl +47 -47
  96. crawlo/templates/spider/spider.py.tmpl +143 -143
  97. crawlo/tools/__init__.py +200 -182
  98. crawlo/tools/anti_crawler.py +268 -268
  99. crawlo/tools/authenticated_proxy.py +240 -240
  100. crawlo/{cleaners → tools}/data_formatter.py +225 -225
  101. crawlo/tools/data_validator.py +180 -180
  102. crawlo/tools/date_tools.py +290 -36
  103. crawlo/tools/distributed_coordinator.py +388 -387
  104. crawlo/{cleaners → tools}/encoding_converter.py +127 -126
  105. crawlo/tools/request_tools.py +83 -0
  106. crawlo/tools/retry_mechanism.py +224 -221
  107. crawlo/tools/scenario_adapter.py +262 -262
  108. crawlo/{cleaners → tools}/text_cleaner.py +232 -232
  109. crawlo/utils/__init__.py +35 -35
  110. crawlo/utils/batch_processor.py +259 -259
  111. crawlo/utils/controlled_spider_mixin.py +439 -439
  112. crawlo/utils/db_helper.py +343 -343
  113. crawlo/utils/enhanced_error_handler.py +356 -356
  114. crawlo/utils/env_config.py +142 -142
  115. crawlo/utils/error_handler.py +123 -123
  116. crawlo/utils/func_tools.py +82 -82
  117. crawlo/utils/large_scale_config.py +286 -286
  118. crawlo/utils/large_scale_helper.py +344 -344
  119. crawlo/utils/log.py +187 -128
  120. crawlo/utils/performance_monitor.py +285 -285
  121. crawlo/utils/queue_helper.py +175 -175
  122. crawlo/utils/redis_connection_pool.py +351 -351
  123. crawlo/utils/redis_key_validator.py +198 -198
  124. crawlo/utils/request.py +267 -267
  125. crawlo/utils/request_serializer.py +218 -218
  126. crawlo/utils/spider_loader.py +61 -61
  127. crawlo/utils/system.py +11 -11
  128. crawlo/utils/tools.py +4 -4
  129. crawlo/utils/url.py +39 -39
  130. {crawlo-1.2.8.dist-info → crawlo-1.2.9.dist-info}/METADATA +1011 -764
  131. crawlo-1.2.9.dist-info/RECORD +219 -0
  132. examples/__init__.py +7 -7
  133. tests/DOUBLE_CRAWLO_PREFIX_FIX_REPORT.md +81 -81
  134. tests/__init__.py +7 -7
  135. tests/advanced_tools_example.py +275 -275
  136. tests/authenticated_proxy_example.py +107 -237
  137. tests/cleaners_example.py +160 -160
  138. tests/config_validation_demo.py +143 -103
  139. tests/controlled_spider_example.py +205 -205
  140. tests/date_tools_example.py +180 -180
  141. tests/debug_pipelines.py +67 -0
  142. tests/dynamic_loading_example.py +523 -523
  143. tests/dynamic_loading_test.py +104 -104
  144. tests/env_config_example.py +133 -133
  145. tests/error_handling_example.py +171 -171
  146. tests/redis_key_validation_demo.py +130 -130
  147. tests/request_params_example.py +151 -0
  148. tests/response_improvements_example.py +144 -144
  149. tests/test_advanced_tools.py +148 -148
  150. tests/test_all_redis_key_configs.py +145 -145
  151. tests/test_authenticated_proxy.py +141 -141
  152. tests/test_cleaners.py +54 -54
  153. tests/test_comprehensive.py +146 -146
  154. tests/test_config_consistency.py +80 -80
  155. tests/test_config_merge.py +153 -0
  156. tests/test_config_validator.py +182 -193
  157. tests/test_crawlo_proxy_integration.py +109 -173
  158. tests/test_date_tools.py +123 -123
  159. tests/test_default_header_middleware.py +158 -158
  160. tests/test_distributed.py +65 -0
  161. tests/test_double_crawlo_fix.py +207 -207
  162. tests/test_double_crawlo_fix_simple.py +124 -124
  163. tests/test_download_delay_middleware.py +221 -221
  164. tests/test_downloader_proxy_compatibility.py +268 -268
  165. tests/test_dynamic_downloaders_proxy.py +124 -124
  166. tests/test_dynamic_proxy.py +92 -92
  167. tests/test_dynamic_proxy_config.py +146 -146
  168. tests/test_dynamic_proxy_real.py +109 -109
  169. tests/test_edge_cases.py +303 -303
  170. tests/test_enhanced_error_handler.py +270 -270
  171. tests/test_env_config.py +121 -121
  172. tests/test_error_handler_compatibility.py +112 -112
  173. tests/test_final_validation.py +153 -153
  174. tests/test_framework_env_usage.py +103 -103
  175. tests/test_integration.py +169 -357
  176. tests/test_item_dedup_redis_key.py +122 -122
  177. tests/test_mode_consistency.py +51 -51
  178. tests/test_offsite_middleware.py +221 -221
  179. tests/test_parsel.py +29 -29
  180. tests/test_performance.py +327 -327
  181. tests/test_proxy_api.py +264 -264
  182. tests/test_proxy_health_check.py +32 -32
  183. tests/test_proxy_middleware.py +121 -121
  184. tests/test_proxy_middleware_enhanced.py +216 -216
  185. tests/test_proxy_middleware_integration.py +136 -136
  186. tests/test_proxy_middleware_refactored.py +185 -0
  187. tests/test_proxy_providers.py +56 -56
  188. tests/test_proxy_stats.py +19 -19
  189. tests/test_proxy_strategies.py +59 -59
  190. tests/test_queue_manager_double_crawlo.py +173 -173
  191. tests/test_queue_manager_redis_key.py +176 -176
  192. tests/test_random_user_agent.py +73 -0
  193. tests/test_real_scenario_proxy.py +195 -195
  194. tests/test_redis_config.py +28 -28
  195. tests/test_redis_connection_pool.py +294 -294
  196. tests/test_redis_key_naming.py +181 -181
  197. tests/test_redis_key_validator.py +123 -123
  198. tests/test_redis_queue.py +224 -224
  199. tests/test_request_ignore_middleware.py +182 -182
  200. tests/test_request_params.py +112 -0
  201. tests/test_request_serialization.py +70 -70
  202. tests/test_response_code_middleware.py +349 -349
  203. tests/test_response_filter_middleware.py +427 -427
  204. tests/test_response_improvements.py +152 -152
  205. tests/test_retry_middleware.py +241 -241
  206. tests/test_scheduler.py +252 -252
  207. tests/test_scheduler_config_update.py +133 -133
  208. tests/test_simple_response.py +61 -61
  209. tests/test_telecom_spider_redis_key.py +205 -205
  210. tests/test_template_content.py +87 -87
  211. tests/test_template_redis_key.py +134 -134
  212. tests/test_tools.py +159 -153
  213. tests/test_user_agents.py +97 -0
  214. tests/tools_example.py +260 -257
  215. tests/verify_distributed.py +117 -0
  216. crawlo/cleaners/__init__.py +0 -61
  217. crawlo/utils/date_tools.py +0 -290
  218. crawlo-1.2.8.dist-info/RECORD +0 -209
  219. {crawlo-1.2.8.dist-info → crawlo-1.2.9.dist-info}/WHEEL +0 -0
  220. {crawlo-1.2.8.dist-info → crawlo-1.2.9.dist-info}/entry_points.txt +0 -0
  221. {crawlo-1.2.8.dist-info → crawlo-1.2.9.dist-info}/top_level.txt +0 -0
@@ -1,289 +1,262 @@
1
- # -*- coding: UTF-8 -*-
2
- import os
3
- from crawlo.config import CrawloConfig
4
-
5
- # ============================== 项目基本信息 ==============================
6
- PROJECT_NAME = '{{project_name}}'
7
-
8
- # ============================== 运行模式选择 ==============================
9
-
10
- # 🎯 选择一种配置方式(推荐使用配置工厂):
11
-
12
- # 单机模式(默认)- 适用于开发调试、小规模数据采集
13
- CONFIG = CrawloConfig.standalone(
14
- concurrency=8,
15
- download_delay=1.0
16
- )
17
-
18
- # 分布式模式 - 适用于大规模数据采集、多节点协同工作
19
- # CONFIG = CrawloConfig.distributed(
20
- # redis_host='127.0.0.1',
21
- # redis_password='your_password', # 如果有密码
22
- # project_name='{{project_name}}',
23
- # concurrency=16,
24
- # download_delay=1.0
25
- # )
26
-
27
- # 自动检测模式 - 适用于希望根据环境自动选择最佳运行方式
28
- # CONFIG = CrawloConfig.auto(concurrency=12)
29
-
30
- # 从环境变量读取配置 - 适用于部署环境
31
- # CONFIG = CrawloConfig.from_env()
32
-
33
- # 获取最终配置
34
- locals().update(CONFIG.to_dict())
35
-
36
- # ============================== 网络请求配置 ==============================
37
-
38
- # 下载器选择(推荐使用 CurlCffi,支持浏览器指纹模拟)
39
- DOWNLOADER = "crawlo.downloader.httpx_downloader.HttpXDownloader" # HTTP/2 支持
40
- # DOWNLOADER = "crawlo.downloader.cffi_downloader.CurlCffiDownloader" # 支持浏览器指纹
41
- # DOWNLOADER = "crawlo.downloader.aiohttp_downloader.AioHttpDownloader" # 轻量级选择
42
-
43
- # 请求超时与安全
44
- DOWNLOAD_TIMEOUT = 30
45
- VERIFY_SSL = True
46
- USE_SESSION = True
47
-
48
- # 请求延迟控制(防反爬)
49
- DOWNLOAD_DELAY = 1.0
50
- RANDOM_RANGE = (0.5, 1.5)
51
- RANDOMNESS = False
52
-
53
- # 重试策略
54
- MAX_RETRY_TIMES = 3
55
- RETRY_PRIORITY = -1
56
- RETRY_HTTP_CODES = [408, 429, 500, 502, 503, 504, 522, 524]
57
- IGNORE_HTTP_CODES = [403, 404]
58
- ALLOWED_RESPONSE_CODES = [] # ResponseFilterMiddleware允许的状态码
59
- DENIED_RESPONSE_CODES = [] # ResponseFilterMiddleware拒绝的状态码
60
-
61
- # 连接池配置
62
- CONNECTION_POOL_LIMIT = 50
63
- DOWNLOAD_MAXSIZE = 10 * 1024 * 1024 # 10MB
64
- DOWNLOAD_WARN_SIZE = 1024 * 1024 # 1MB
65
- DOWNLOAD_RETRY_TIMES = MAX_RETRY_TIMES # 下载器内部重试次数(复用全局)
66
-
67
- # 下载统计配置
68
- DOWNLOADER_STATS = True # 是否启用下载器统计功能
69
- DOWNLOAD_STATS = True # 是否记录下载时间和大小统计
70
-
71
- # ============================== 并发与调度配置 ==============================
72
-
73
- CONCURRENCY = 8
74
- INTERVAL = 5
75
- DEPTH_PRIORITY = 1
76
- MAX_RUNNING_SPIDERS = 3
77
-
78
- # ============================== 队列配置(支持分布式) ==============================
79
-
80
- # 队列类型:'auto'(自动选择), 'memory'(内存队列), 'redis'(分布式队列)
81
- #
82
- # 队列类型选择指南:
83
- # - 'auto':推荐用于大多数场景,框架会根据Redis可用性自动选择
84
- # - 'memory':适用于单机运行且不需要Redis的场景
85
- # - 'redis':适用于分布式部署场景,需要Redis服务器支持
86
- QUEUE_TYPE = 'auto'
87
- SCHEDULER_MAX_QUEUE_SIZE = 2000
88
- SCHEDULER_QUEUE_NAME = f'crawlo:{{project_name}}:queue:requests' # 使用统一命名规范
89
- QUEUE_MAX_RETRIES = 3
90
- QUEUE_TIMEOUT = 300
91
-
92
- # 大规模爬取优化
93
- LARGE_SCALE_BATCH_SIZE = 1000 # 批处理大小
94
- LARGE_SCALE_CHECKPOINT_INTERVAL = 5000 # 进度保存间隔
95
- LARGE_SCALE_MAX_MEMORY_USAGE = 500 # 最大内存使用量(MB)
96
-
97
- # ============================== 数据存储配置 ==============================
98
-
99
- # --- MySQL 配置 ---
100
- MYSQL_HOST = os.getenv('MYSQL_HOST', '127.0.0.1')
101
- MYSQL_PORT = int(os.getenv('MYSQL_PORT', 3306))
102
- MYSQL_USER = os.getenv('MYSQL_USER', 'root')
103
- MYSQL_PASSWORD = os.getenv('MYSQL_PASSWORD', '123456')
104
- MYSQL_DB = os.getenv('MYSQL_DB', '{{project_name}}')
105
- MYSQL_TABLE = '{{project_name}}_data'
106
- MYSQL_BATCH_SIZE = 100
107
- MYSQL_USE_BATCH = False # 是否启用批量插入
108
-
109
- # --- MongoDB 配置 ---
110
- MONGO_URI = os.getenv('MONGO_URI', 'mongodb://localhost:27017')
111
- MONGO_DATABASE = '{{project_name}}_db'
112
- MONGO_COLLECTION = '{{project_name}}_items'
113
- MONGO_MAX_POOL_SIZE = 200
114
- MONGO_MIN_POOL_SIZE = 20
115
- MONGO_BATCH_SIZE = 100 # 批量插入条数
116
- MONGO_USE_BATCH = False # 是否启用批量插入
117
-
118
- # ============================== 去重过滤配置 ==============================
119
-
120
- REQUEST_DIR = '.'
121
-
122
- # 明确配置默认去重管道和过滤器,避免冗余的if-else判断
123
- # 在单机模式下,如果Redis可用则使用Redis去重,否则使用内存去重
124
- DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.redis_dedup_pipeline.RedisDedupPipeline'
125
- FILTER_CLASS = 'crawlo.filters.aioredis_filter.AioRedisFilter'
126
-
127
- # --- Redis 配置(用于分布式去重和队列) ---
128
- REDIS_HOST = os.getenv('REDIS_HOST', '127.0.0.1')
129
- REDIS_PORT = int(os.getenv('REDIS_PORT', 6379))
130
- REDIS_PASSWORD = os.getenv('REDIS_PASSWORD', '')
131
-
132
- # 根据是否有密码生成 URL
133
- if REDIS_PASSWORD:
134
- REDIS_URL = f'redis://:{REDIS_PASSWORD}@{REDIS_HOST}:{REDIS_PORT}/0'
135
- else:
136
- REDIS_URL = f'redis://{REDIS_HOST}:{REDIS_PORT}/0'
137
-
138
- # Redis key配置已移至各组件中,使用统一的命名规范
139
- # crawlo:{project_name}:filter:fingerprint (请求去重)
140
- # crawlo:{project_name}:item:fingerprint (数据项去重)
141
- # crawlo:{project_name}:queue:requests (请求队列)
142
- # crawlo:{project_name}:queue:processing (处理中队列)
143
- # crawlo:{project_name}:queue:failed (失败队列)
144
-
145
- REDIS_TTL = 0
146
- CLEANUP_FP = 0
147
- FILTER_DEBUG = True
148
- DECODE_RESPONSES = True
149
-
150
- # ============================== 中间件配置 ==============================
151
-
152
- MIDDLEWARES = [
153
- # === 请求预处理阶段 ===
154
- 'crawlo.middleware.request_ignore.RequestIgnoreMiddleware',
155
- 'crawlo.middleware.download_delay.DownloadDelayMiddleware',
156
- 'crawlo.middleware.default_header.DefaultHeaderMiddleware',
157
- 'crawlo.middleware.proxy.ProxyMiddleware',
158
- 'crawlo.middleware.offsite.OffsiteMiddleware',
159
-
160
- # === 响应处理阶段 ===
161
- 'crawlo.middleware.retry.RetryMiddleware',
162
- 'crawlo.middleware.response_code.ResponseCodeMiddleware',
163
- 'crawlo.middleware.response_filter.ResponseFilterMiddleware',
164
- ]
165
-
166
- # ============================== 数据管道配置 ==============================
167
-
168
- # 数据处理管道(启用的存储方式)
169
- PIPELINES = [
170
- 'crawlo.pipelines.console_pipeline.ConsolePipeline',
171
- # '{{project_name}}.pipelines.DatabasePipeline', # 自定义数据库管道
172
- # 'crawlo.pipelines.mysql_pipeline.AsyncmyMySQLPipeline', # MySQL 存储
173
- # 'crawlo.pipelines.mongo_pipeline.MongoPipeline', # MongoDB 存储
174
- ]
175
-
176
- # 明确添加默认去重管道到管道列表开头
177
- PIPELINES.insert(0, DEFAULT_DEDUP_PIPELINE)
178
-
179
- # ============================== 扩展组件 ==============================
180
-
181
- EXTENSIONS = [
182
- 'crawlo.extension.log_interval.LogIntervalExtension',
183
- 'crawlo.extension.log_stats.LogStats',
184
- 'crawlo.extension.logging_extension.CustomLoggerExtension',
185
- # 'crawlo.extension.memory_monitor.MemoryMonitorExtension', # 内存监控
186
- # 'crawlo.extension.request_recorder.RequestRecorderExtension', # 请求记录
187
- # 'crawlo.extension.performance_profiler.PerformanceProfilerExtension', # 性能分析
188
- # 'crawlo.extension.health_check.HealthCheckExtension', # 健康检查
189
- ]
190
-
191
- # ============================== 扩展配置 ==============================
192
-
193
- # 内存监控扩展配置
194
- # MEMORY_MONITOR_ENABLED = True # 是否启用内存监控
195
- # MEMORY_MONITOR_INTERVAL = 60 # 内存检查间隔(秒)
196
- # MEMORY_WARNING_THRESHOLD = 80.0 # 内存使用警告阈值(百分比)
197
- # MEMORY_CRITICAL_THRESHOLD = 90.0 # 内存使用严重阈值(百分比)
198
-
199
- # 请求记录扩展配置
200
- # REQUEST_RECORDER_ENABLED = True # 是否启用请求记录
201
- # REQUEST_RECORDER_OUTPUT_DIR = 'requests_log' # 请求记录输出目录
202
- # REQUEST_RECORDER_MAX_FILE_SIZE = 10 * 1024 * 1024 # 单个记录文件最大大小(字节)
203
-
204
- # 性能分析扩展配置
205
- # PERFORMANCE_PROFILER_ENABLED = True # 是否启用性能分析
206
- # PERFORMANCE_PROFILER_OUTPUT_DIR = 'profiling' # 性能分析输出目录
207
- # PERFORMANCE_PROFILER_INTERVAL = 300 # 定期保存分析结果间隔(秒)
208
-
209
- # 健康检查扩展配置
210
- # HEALTH_CHECK_ENABLED = True # 是否启用健康检查
211
- # HEALTH_CHECK_INTERVAL = 60 # 健康检查间隔(秒)
212
-
213
- # ============================== 日志配置 ==============================
214
-
215
- LOG_LEVEL = 'INFO'
216
- STATS_DUMP = True
217
- LOG_FILE = f'logs/{{project_name}}.log'
218
- LOG_FORMAT = '%(asctime)s - [%(name)s] - %(levelname)s: %(message)s'
219
- LOG_ENCODING = 'utf-8'
220
-
221
- # ============================== 代理配置 ==============================
222
-
223
- PROXY_ENABLED = False
224
- PROXY_API_URL = "" # 请填入真实的代理API地址
225
- PROXY_EXTRACTOR = "proxy"
226
- PROXY_REFRESH_INTERVAL = 60
227
- PROXY_API_TIMEOUT = 10
228
-
229
- # ============================== 浏览器指纹配置 ==============================
230
-
231
- # CurlCffi 下载器专用配置
232
- CURL_BROWSER_TYPE = "chrome"
233
- CURL_BROWSER_VERSION_MAP = {
234
- "chrome": "chrome136",
235
- "edge": "edge101",
236
- "safari": "safari184",
237
- "firefox": "firefox135",
238
- }
239
-
240
- # 默认请求头
241
- DEFAULT_REQUEST_HEADERS = {
242
- 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 '
243
- '(KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36',
244
- 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
245
- 'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',
246
- 'Accept-Encoding': 'gzip, deflate, br',
247
- 'Connection': 'keep-alive',
248
- 'Upgrade-Insecure-Requests': '1',
249
- }
250
-
251
- # ============================== 下载器优化配置 ==============================
252
-
253
- # 下载器健康检查
254
- DOWNLOADER_HEALTH_CHECK = True # 是否启用下载器健康检查
255
- HEALTH_CHECK_INTERVAL = 60 # 健康检查间隔(秒)
256
-
257
- # 请求统计配置
258
- REQUEST_STATS_ENABLED = True # 是否启用请求统计
259
- STATS_RESET_ON_START = False # 启动时是否重置统计
260
-
261
- # HttpX 下载器专用配置
262
- HTTPX_HTTP2 = True # 是否启用HTTP/2支持
263
- HTTPX_FOLLOW_REDIRECTS = True # 是否自动跟随重定向
264
-
265
- # AioHttp 下载器专用配置
266
- AIOHTTP_AUTO_DECOMPRESS = True # 是否自动解压响应
267
- AIOHTTP_FORCE_CLOSE = False # 是否强制关闭连接
268
-
269
- # 通用优化配置
270
- CONNECTION_TTL_DNS_CACHE = 300 # DNS缓存TTL(秒)
271
- CONNECTION_KEEPALIVE_TIMEOUT = 15 # Keep-Alive超时(秒)
272
-
273
- # ============================== 开发与调试 ==============================
274
-
275
- # 开发模式配置
276
- DEBUG = False
277
- TESTING = False
278
-
279
- # 性能监控
280
- ENABLE_PERFORMANCE_MONITORING = True
281
- MEMORY_USAGE_WARNING_THRESHOLD = 500 # MB
282
-
283
- # ============================== 自定义配置区域 ==============================
284
- # 在此处添加项目特定的配置项
285
-
286
- # 示例:目标网站特定配置
287
- # TARGET_DOMAIN = '{{domain}}'
288
- # MAX_PAGES_PER_DOMAIN = 10000
1
+ # -*- coding: UTF-8 -*-
2
+ import os
3
+ from crawlo.config import CrawloConfig
4
+
5
+ # ============================== 项目基本信息 ==============================
6
+ PROJECT_NAME = '{{project_name}}'
7
+
8
+ # ============================== 运行模式选择 ==============================
9
+
10
+ # 🎯 选择一种配置方式(推荐使用配置工厂):
11
+
12
+ # 单机模式(默认)- 适用于开发调试、小规模数据采集
13
+ CONFIG = CrawloConfig.standalone(
14
+ concurrency=8,
15
+ download_delay=1.0
16
+ )
17
+
18
+ # 分布式模式 - 适用于大规模数据采集、多节点协同工作
19
+ # CONFIG = CrawloConfig.distributed(
20
+ # redis_host='127.0.0.1',
21
+ # redis_password='your_password', # 如果有密码
22
+ # project_name='{{project_name}}',
23
+ # concurrency=16,
24
+ # download_delay=1.0
25
+ # )
26
+
27
+ # 自动检测模式 - 适用于希望根据环境自动选择最佳运行方式
28
+ # CONFIG = CrawloConfig.auto(concurrency=12)
29
+
30
+ # 从环境变量读取配置 - 适用于部署环境
31
+ # CONFIG = CrawloConfig.from_env()
32
+
33
+ # 获取最终配置
34
+ locals().update(CONFIG.to_dict())
35
+
36
+ # ============================== 网络请求配置 ==============================
37
+
38
+ # 注意:框架已提供默认的网络请求配置,以下配置项通常无需修改
39
+ # 如需自定义,请取消注释并修改相应值
40
+
41
+ # 下载器选择(推荐使用 CurlCffi,支持浏览器指纹模拟)
42
+ # DOWNLOADER = "crawlo.downloader.httpx_downloader.HttpXDownloader" # HTTP/2 支持
43
+ # DOWNLOADER = "crawlo.downloader.cffi_downloader.CurlCffiDownloader" # 支持浏览器指纹
44
+ # DOWNLOADER = "crawlo.downloader.aiohttp_downloader.AioHttpDownloader" # 轻量级选择
45
+
46
+ # 请求超时与安全
47
+ # DOWNLOAD_TIMEOUT = 30
48
+ # VERIFY_SSL = True
49
+ # USE_SESSION = True
50
+
51
+ # 请求延迟控制(防反爬)
52
+ # DOWNLOAD_DELAY = 1.0
53
+ # RANDOM_RANGE = (0.5, 1.5)
54
+ # RANDOMNESS = False
55
+
56
+ # 重试策略
57
+ # MAX_RETRY_TIMES = 3
58
+ # RETRY_PRIORITY = -1
59
+ # RETRY_HTTP_CODES = [408, 429, 500, 502, 503, 504, 522, 524]
60
+ # IGNORE_HTTP_CODES = [403, 404]
61
+ # ALLOWED_RESPONSE_CODES = [] # ResponseFilterMiddleware允许的状态码
62
+ # DENIED_RESPONSE_CODES = [] # ResponseFilterMiddleware拒绝的状态码
63
+
64
+ # 连接池配置
65
+ # CONNECTION_POOL_LIMIT = 50
66
+ # DOWNLOAD_MAXSIZE = 10 * 1024 * 1024 # 10MB
67
+ # DOWNLOAD_WARN_SIZE = 1024 * 1024 # 1MB
68
+ # DOWNLOAD_RETRY_TIMES = MAX_RETRY_TIMES # 下载器内部重试次数(复用全局)
69
+
70
+ # 下载统计配置
71
+ # DOWNLOADER_STATS = True # 是否启用下载器统计功能
72
+ # DOWNLOAD_STATS = True # 是否记录下载时间和大小统计
73
+
74
+ # ============================== 并发与调度配置 ==============================
75
+
76
+ # 注意:并发配置通常通过CrawloConfig设置,以下配置项用于细粒度调整
77
+
78
+ # CONCURRENCY = 8
79
+ # INTERVAL = 5
80
+ # DEPTH_PRIORITY = 1
81
+ # MAX_RUNNING_SPIDERS = 3
82
+
83
+ # ============================== 队列配置(支持分布式) ==============================
84
+
85
+ # 注意:队列配置通常通过CrawloConfig设置,以下配置项用于细粒度调整
86
+
87
+ # 队列类型:'auto'(自动选择), 'memory'(内存队列), 'redis'(分布式队列)
88
+ #
89
+ # 队列类型选择指南:
90
+ # - 'auto':推荐用于大多数场景,框架会根据Redis可用性自动选择
91
+ # - 'memory':适用于单机运行且不需要Redis的场景
92
+ # - 'redis':适用于分布式部署场景,需要Redis服务器支持
93
+ # QUEUE_TYPE = 'auto'
94
+ # SCHEDULER_MAX_QUEUE_SIZE = 2000
95
+ # SCHEDULER_QUEUE_NAME = f'crawlo:{{project_name}}:queue:requests' # 使用统一命名规范
96
+ # QUEUE_MAX_RETRIES = 3
97
+ # QUEUE_TIMEOUT = 300
98
+
99
+ # 大规模爬取优化
100
+ # LARGE_SCALE_BATCH_SIZE = 1000 # 批处理大小
101
+ # LARGE_SCALE_CHECKPOINT_INTERVAL = 5000 # 进度保存间隔
102
+ # LARGE_SCALE_MAX_MEMORY_USAGE = 500 # 最大内存使用量(MB)
103
+
104
+ # ============================== 数据存储配置 ==============================
105
+
106
+ # --- MySQL 配置 ---
107
+ MYSQL_HOST = os.getenv('MYSQL_HOST', '127.0.0.1')
108
+ MYSQL_PORT = int(os.getenv('MYSQL_PORT', 3306))
109
+ MYSQL_USER = os.getenv('MYSQL_USER', 'root')
110
+ MYSQL_PASSWORD = os.getenv('MYSQL_PASSWORD', '123456')
111
+ MYSQL_DB = os.getenv('MYSQL_DB', '{{project_name}}')
112
+ MYSQL_TABLE = '{{project_name}}_data'
113
+ MYSQL_BATCH_SIZE = 100
114
+ MYSQL_USE_BATCH = False # 是否启用批量插入
115
+
116
+ # --- MongoDB 配置 ---
117
+ MONGO_URI = os.getenv('MONGO_URI', 'mongodb://localhost:27017')
118
+ MONGO_DATABASE = '{{project_name}}_db'
119
+ MONGO_COLLECTION = '{{project_name}}_items'
120
+ MONGO_MAX_POOL_SIZE = 200
121
+ MONGO_MIN_POOL_SIZE = 20
122
+ MONGO_BATCH_SIZE = 100 # 批量插入条数
123
+ MONGO_USE_BATCH = False # 是否启用批量插入
124
+
125
+ # ============================== 去重过滤配置 ==============================
126
+
127
+ # 注意:框架已提供默认的去重配置,以下配置项通常无需修改
128
+ # 如需自定义,请取消注释并修改相应值
129
+
130
+ # REQUEST_DIR = '.'
131
+
132
+ # 在单机模式下,如果Redis可用则使用Redis去重,否则使用内存去重
133
+ # DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.redis_dedup_pipeline.RedisDedupPipeline'
134
+ # FILTER_CLASS = 'crawlo.filters.aioredis_filter.AioRedisFilter'
135
+
136
+ # --- Redis 配置(用于分布式去重和队列) ---
137
+ # REDIS_HOST = os.getenv('REDIS_HOST', '127.0.0.1')
138
+ # REDIS_PORT = int(os.getenv('REDIS_PORT', 6379))
139
+ # REDIS_PASSWORD = os.getenv('REDIS_PASSWORD', '')
140
+
141
+ # 根据是否有密码生成 URL
142
+ # if REDIS_PASSWORD:
143
+ # REDIS_URL = f'redis://:{REDIS_PASSWORD}@{REDIS_HOST}:{REDIS_PORT}/0'
144
+ # else:
145
+ # REDIS_URL = f'redis://{REDIS_HOST}:{REDIS_PORT}/0'
146
+
147
+ # Redis key配置已移至各组件中,使用统一的命名规范
148
+ # crawlo:{project_name}:filter:fingerprint (请求去重)
149
+ # crawlo:{project_name}:item:fingerprint (数据项去重)
150
+ # crawlo:{project_name}:queue:requests (请求队列)
151
+ # crawlo:{project_name}:queue:processing (处理中队列)
152
+ # crawlo:{project_name}:queue:failed (失败队列)
153
+
154
+ # REDIS_TTL = 0
155
+ # CLEANUP_FP = 0
156
+ # FILTER_DEBUG = True
157
+ # DECODE_RESPONSES = True
158
+
159
+ # ============================== 用户自定义中间件配置 ==============================
160
+ # 注意:框架默认中间件已自动加载,此处可添加或覆盖默认中间件
161
+ # 如需启用代理功能,请取消注释 ProxyMiddleware 并配置代理相关参数
162
+
163
+ # 中间件列表(框架默认中间件 + 用户自定义中间件)
164
+ # MIDDLEWARES = [
165
+ # '{{project_name}}.middlewares.CustomMiddleware', # 示例自定义中间件
166
+ # 'crawlo.middleware.proxy.ProxyMiddleware', # 启用代理功能(需要配置 PROXY_API_URL)
167
+ # ]
168
+
169
+ # ============================== 用户自定义数据管道配置 ==============================
170
+ # 注意:框架默认管道已自动加载,此处可添加或覆盖默认管道
171
+
172
+ # 数据处理管道列表(框架默认管道 + 用户自定义管道)
173
+ # PIPELINES = [
174
+ # '{{project_name}}.pipelines.DatabasePipeline', # 自定义数据库管道
175
+ # 'crawlo.pipelines.mysql_pipeline.AsyncmyMySQLPipeline', # MySQL 存储
176
+ # 'crawlo.pipelines.mongo_pipeline.MongoPipeline', # MongoDB 存储
177
+ # ]
178
+
179
+ # ============================== 用户自定义扩展组件 ==============================
180
+ # 注意:框架默认扩展已自动加载,此处可添加或覆盖默认扩展
181
+
182
+ # 扩展组件列表(框架默认扩展 + 用户自定义扩展)
183
+ # EXTENSIONS = [
184
+ # 'crawlo.extension.memory_monitor.MemoryMonitorExtension', # 内存监控
185
+ # 'crawlo.extension.request_recorder.RequestRecorderExtension', # 请求记录
186
+ # 'crawlo.extension.performance_profiler.PerformanceProfilerExtension', # 性能分析
187
+ # 'crawlo.extension.health_check.HealthCheckExtension', # 健康检查
188
+ # ]
189
+
190
+ # ============================== 日志配置 ==============================
191
+
192
+ LOG_LEVEL = 'INFO'
193
+ STATS_DUMP = True
194
+ LOG_FILE = f'logs/{{project_name}}.log'
195
+ LOG_FORMAT = '%(asctime)s - [%(name)s] - %(levelname)s: %(message)s'
196
+ LOG_ENCODING = 'utf-8'
197
+
198
+ # ============================== 代理配置 ==============================
199
+
200
+ # 代理功能默认不启用,如需使用请取消注释并配置以下参数
201
+ # PROXY_ENABLED = True
202
+ # PROXY_API_URL = "https://api.proxyprovider.com/get" # 请填入真实的代理API地址
203
+
204
+ # 代理提取方式(支持字段路径或函数)
205
+ # 示例: "proxy" 适用于 {"proxy": "http://1.1.1.1:8080"}
206
+ # 示例: "data.proxy" 适用于 {"data": {"proxy": "http://1.1.1.1:8080"}}
207
+ # PROXY_EXTRACTOR = "proxy"
208
+
209
+ # 代理刷新控制
210
+ # PROXY_REFRESH_INTERVAL = 60 # 代理刷新间隔(秒)
211
+ # PROXY_API_TIMEOUT = 10 # 请求代理 API 超时时间
212
+
213
+ # ============================== 浏览器指纹配置 ==============================
214
+
215
+ # CurlCffi 下载器专用配置
216
+ CURL_BROWSER_TYPE = "chrome"
217
+ CURL_BROWSER_VERSION_MAP = {
218
+ "chrome": "chrome136",
219
+ "edge": "edge101",
220
+ "safari": "safari184",
221
+ "firefox": "firefox135",
222
+ }
223
+
224
+ # ============================== 下载器优化配置 ==============================
225
+
226
+ # 下载器健康检查
227
+ DOWNLOADER_HEALTH_CHECK = True # 是否启用下载器健康检查
228
+ HEALTH_CHECK_INTERVAL = 60 # 健康检查间隔(秒)
229
+
230
+ # 请求统计配置
231
+ REQUEST_STATS_ENABLED = True # 是否启用请求统计
232
+ STATS_RESET_ON_START = False # 启动时是否重置统计
233
+
234
+ # HttpX 下载器专用配置
235
+ HTTPX_HTTP2 = True # 是否启用HTTP/2支持
236
+ HTTPX_FOLLOW_REDIRECTS = True # 是否自动跟随重定向
237
+
238
+ # AioHttp 下载器专用配置
239
+ AIOHTTP_AUTO_DECOMPRESS = True # 是否自动解压响应
240
+ AIOHTTP_FORCE_CLOSE = False # 是否强制关闭连接
241
+
242
+ # 通用优化配置
243
+ CONNECTION_TTL_DNS_CACHE = 300 # DNS缓存TTL(秒)
244
+ CONNECTION_KEEPALIVE_TIMEOUT = 15 # Keep-Alive超时(秒)
245
+
246
+ # ============================== 开发与调试 ==============================
247
+
248
+ # 开发模式配置
249
+ DEBUG = False
250
+ TESTING = False
251
+
252
+ # 性能监控
253
+ ENABLE_PERFORMANCE_MONITORING = True
254
+ MEMORY_USAGE_WARNING_THRESHOLD = 500 # MB
255
+
256
+ # ============================== 自定义配置区域 ==============================
257
+ # 在此处添加项目特定的配置项
258
+
259
+ # 示例:目标网站特定配置
260
+ # TARGET_DOMAIN = '{{domain}}'
261
+ # MAX_PAGES_PER_DOMAIN = 10000
289
262
  # CUSTOM_RATE_LIMIT = 1.5