crawlo 1.2.8__py3-none-any.whl → 1.2.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (221) hide show
  1. crawlo/__init__.py +63 -61
  2. crawlo/__version__.py +1 -1
  3. crawlo/cli.py +75 -75
  4. crawlo/commands/__init__.py +14 -14
  5. crawlo/commands/check.py +594 -594
  6. crawlo/commands/genspider.py +151 -151
  7. crawlo/commands/help.py +138 -138
  8. crawlo/commands/list.py +155 -155
  9. crawlo/commands/run.py +323 -323
  10. crawlo/commands/startproject.py +436 -436
  11. crawlo/commands/stats.py +187 -187
  12. crawlo/commands/utils.py +186 -186
  13. crawlo/config.py +312 -312
  14. crawlo/config_validator.py +277 -251
  15. crawlo/core/__init__.py +2 -2
  16. crawlo/core/engine.py +366 -365
  17. crawlo/core/processor.py +40 -40
  18. crawlo/core/scheduler.py +256 -251
  19. crawlo/crawler.py +1103 -1100
  20. crawlo/data/__init__.py +5 -5
  21. crawlo/data/user_agents.py +194 -107
  22. crawlo/downloader/__init__.py +273 -266
  23. crawlo/downloader/aiohttp_downloader.py +226 -228
  24. crawlo/downloader/cffi_downloader.py +245 -256
  25. crawlo/downloader/httpx_downloader.py +259 -259
  26. crawlo/downloader/hybrid_downloader.py +212 -212
  27. crawlo/downloader/playwright_downloader.py +402 -402
  28. crawlo/downloader/selenium_downloader.py +472 -472
  29. crawlo/event.py +11 -11
  30. crawlo/exceptions.py +81 -81
  31. crawlo/extension/__init__.py +39 -39
  32. crawlo/extension/health_check.py +141 -141
  33. crawlo/extension/log_interval.py +57 -57
  34. crawlo/extension/log_stats.py +81 -81
  35. crawlo/extension/logging_extension.py +43 -43
  36. crawlo/extension/memory_monitor.py +104 -104
  37. crawlo/extension/performance_profiler.py +133 -133
  38. crawlo/extension/request_recorder.py +107 -107
  39. crawlo/filters/__init__.py +154 -154
  40. crawlo/filters/aioredis_filter.py +234 -234
  41. crawlo/filters/memory_filter.py +269 -269
  42. crawlo/items/__init__.py +23 -23
  43. crawlo/items/base.py +21 -21
  44. crawlo/items/fields.py +52 -52
  45. crawlo/items/items.py +104 -104
  46. crawlo/middleware/__init__.py +21 -21
  47. crawlo/middleware/default_header.py +132 -132
  48. crawlo/middleware/download_delay.py +104 -104
  49. crawlo/middleware/middleware_manager.py +136 -136
  50. crawlo/middleware/offsite.py +114 -114
  51. crawlo/middleware/proxy.py +386 -368
  52. crawlo/middleware/request_ignore.py +86 -86
  53. crawlo/middleware/response_code.py +163 -163
  54. crawlo/middleware/response_filter.py +136 -136
  55. crawlo/middleware/retry.py +124 -124
  56. crawlo/middleware/simple_proxy.py +65 -0
  57. crawlo/mode_manager.py +211 -211
  58. crawlo/network/__init__.py +21 -21
  59. crawlo/network/request.py +379 -338
  60. crawlo/network/response.py +359 -359
  61. crawlo/pipelines/__init__.py +21 -21
  62. crawlo/pipelines/bloom_dedup_pipeline.py +157 -157
  63. crawlo/pipelines/console_pipeline.py +39 -39
  64. crawlo/pipelines/csv_pipeline.py +316 -316
  65. crawlo/pipelines/database_dedup_pipeline.py +223 -223
  66. crawlo/pipelines/json_pipeline.py +218 -218
  67. crawlo/pipelines/memory_dedup_pipeline.py +115 -115
  68. crawlo/pipelines/mongo_pipeline.py +131 -131
  69. crawlo/pipelines/mysql_pipeline.py +317 -317
  70. crawlo/pipelines/pipeline_manager.py +62 -62
  71. crawlo/pipelines/redis_dedup_pipeline.py +167 -167
  72. crawlo/project.py +290 -315
  73. crawlo/queue/pqueue.py +37 -37
  74. crawlo/queue/queue_manager.py +379 -378
  75. crawlo/queue/redis_priority_queue.py +306 -306
  76. crawlo/settings/__init__.py +7 -7
  77. crawlo/settings/default_settings.py +216 -220
  78. crawlo/settings/setting_manager.py +163 -122
  79. crawlo/spider/__init__.py +639 -639
  80. crawlo/stats_collector.py +59 -59
  81. crawlo/subscriber.py +129 -129
  82. crawlo/task_manager.py +30 -30
  83. crawlo/templates/crawlo.cfg.tmpl +10 -10
  84. crawlo/templates/project/__init__.py.tmpl +3 -3
  85. crawlo/templates/project/items.py.tmpl +17 -17
  86. crawlo/templates/project/middlewares.py.tmpl +118 -118
  87. crawlo/templates/project/pipelines.py.tmpl +96 -96
  88. crawlo/templates/project/settings.py.tmpl +261 -288
  89. crawlo/templates/project/settings_distributed.py.tmpl +174 -157
  90. crawlo/templates/project/settings_gentle.py.tmpl +95 -100
  91. crawlo/templates/project/settings_high_performance.py.tmpl +125 -134
  92. crawlo/templates/project/settings_minimal.py.tmpl +30 -0
  93. crawlo/templates/project/settings_simple.py.tmpl +96 -98
  94. crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
  95. crawlo/templates/run.py.tmpl +47 -47
  96. crawlo/templates/spider/spider.py.tmpl +143 -143
  97. crawlo/tools/__init__.py +200 -182
  98. crawlo/tools/anti_crawler.py +268 -268
  99. crawlo/tools/authenticated_proxy.py +240 -240
  100. crawlo/{cleaners → tools}/data_formatter.py +225 -225
  101. crawlo/tools/data_validator.py +180 -180
  102. crawlo/tools/date_tools.py +290 -36
  103. crawlo/tools/distributed_coordinator.py +388 -387
  104. crawlo/{cleaners → tools}/encoding_converter.py +127 -126
  105. crawlo/tools/request_tools.py +83 -0
  106. crawlo/tools/retry_mechanism.py +224 -221
  107. crawlo/tools/scenario_adapter.py +262 -262
  108. crawlo/{cleaners → tools}/text_cleaner.py +232 -232
  109. crawlo/utils/__init__.py +35 -35
  110. crawlo/utils/batch_processor.py +259 -259
  111. crawlo/utils/controlled_spider_mixin.py +439 -439
  112. crawlo/utils/db_helper.py +343 -343
  113. crawlo/utils/enhanced_error_handler.py +356 -356
  114. crawlo/utils/env_config.py +142 -142
  115. crawlo/utils/error_handler.py +123 -123
  116. crawlo/utils/func_tools.py +82 -82
  117. crawlo/utils/large_scale_config.py +286 -286
  118. crawlo/utils/large_scale_helper.py +344 -344
  119. crawlo/utils/log.py +187 -128
  120. crawlo/utils/performance_monitor.py +285 -285
  121. crawlo/utils/queue_helper.py +175 -175
  122. crawlo/utils/redis_connection_pool.py +351 -351
  123. crawlo/utils/redis_key_validator.py +198 -198
  124. crawlo/utils/request.py +267 -267
  125. crawlo/utils/request_serializer.py +218 -218
  126. crawlo/utils/spider_loader.py +61 -61
  127. crawlo/utils/system.py +11 -11
  128. crawlo/utils/tools.py +4 -4
  129. crawlo/utils/url.py +39 -39
  130. {crawlo-1.2.8.dist-info → crawlo-1.2.9.dist-info}/METADATA +1011 -764
  131. crawlo-1.2.9.dist-info/RECORD +219 -0
  132. examples/__init__.py +7 -7
  133. tests/DOUBLE_CRAWLO_PREFIX_FIX_REPORT.md +81 -81
  134. tests/__init__.py +7 -7
  135. tests/advanced_tools_example.py +275 -275
  136. tests/authenticated_proxy_example.py +107 -237
  137. tests/cleaners_example.py +160 -160
  138. tests/config_validation_demo.py +143 -103
  139. tests/controlled_spider_example.py +205 -205
  140. tests/date_tools_example.py +180 -180
  141. tests/debug_pipelines.py +67 -0
  142. tests/dynamic_loading_example.py +523 -523
  143. tests/dynamic_loading_test.py +104 -104
  144. tests/env_config_example.py +133 -133
  145. tests/error_handling_example.py +171 -171
  146. tests/redis_key_validation_demo.py +130 -130
  147. tests/request_params_example.py +151 -0
  148. tests/response_improvements_example.py +144 -144
  149. tests/test_advanced_tools.py +148 -148
  150. tests/test_all_redis_key_configs.py +145 -145
  151. tests/test_authenticated_proxy.py +141 -141
  152. tests/test_cleaners.py +54 -54
  153. tests/test_comprehensive.py +146 -146
  154. tests/test_config_consistency.py +80 -80
  155. tests/test_config_merge.py +153 -0
  156. tests/test_config_validator.py +182 -193
  157. tests/test_crawlo_proxy_integration.py +109 -173
  158. tests/test_date_tools.py +123 -123
  159. tests/test_default_header_middleware.py +158 -158
  160. tests/test_distributed.py +65 -0
  161. tests/test_double_crawlo_fix.py +207 -207
  162. tests/test_double_crawlo_fix_simple.py +124 -124
  163. tests/test_download_delay_middleware.py +221 -221
  164. tests/test_downloader_proxy_compatibility.py +268 -268
  165. tests/test_dynamic_downloaders_proxy.py +124 -124
  166. tests/test_dynamic_proxy.py +92 -92
  167. tests/test_dynamic_proxy_config.py +146 -146
  168. tests/test_dynamic_proxy_real.py +109 -109
  169. tests/test_edge_cases.py +303 -303
  170. tests/test_enhanced_error_handler.py +270 -270
  171. tests/test_env_config.py +121 -121
  172. tests/test_error_handler_compatibility.py +112 -112
  173. tests/test_final_validation.py +153 -153
  174. tests/test_framework_env_usage.py +103 -103
  175. tests/test_integration.py +169 -357
  176. tests/test_item_dedup_redis_key.py +122 -122
  177. tests/test_mode_consistency.py +51 -51
  178. tests/test_offsite_middleware.py +221 -221
  179. tests/test_parsel.py +29 -29
  180. tests/test_performance.py +327 -327
  181. tests/test_proxy_api.py +264 -264
  182. tests/test_proxy_health_check.py +32 -32
  183. tests/test_proxy_middleware.py +121 -121
  184. tests/test_proxy_middleware_enhanced.py +216 -216
  185. tests/test_proxy_middleware_integration.py +136 -136
  186. tests/test_proxy_middleware_refactored.py +185 -0
  187. tests/test_proxy_providers.py +56 -56
  188. tests/test_proxy_stats.py +19 -19
  189. tests/test_proxy_strategies.py +59 -59
  190. tests/test_queue_manager_double_crawlo.py +173 -173
  191. tests/test_queue_manager_redis_key.py +176 -176
  192. tests/test_random_user_agent.py +73 -0
  193. tests/test_real_scenario_proxy.py +195 -195
  194. tests/test_redis_config.py +28 -28
  195. tests/test_redis_connection_pool.py +294 -294
  196. tests/test_redis_key_naming.py +181 -181
  197. tests/test_redis_key_validator.py +123 -123
  198. tests/test_redis_queue.py +224 -224
  199. tests/test_request_ignore_middleware.py +182 -182
  200. tests/test_request_params.py +112 -0
  201. tests/test_request_serialization.py +70 -70
  202. tests/test_response_code_middleware.py +349 -349
  203. tests/test_response_filter_middleware.py +427 -427
  204. tests/test_response_improvements.py +152 -152
  205. tests/test_retry_middleware.py +241 -241
  206. tests/test_scheduler.py +252 -252
  207. tests/test_scheduler_config_update.py +133 -133
  208. tests/test_simple_response.py +61 -61
  209. tests/test_telecom_spider_redis_key.py +205 -205
  210. tests/test_template_content.py +87 -87
  211. tests/test_template_redis_key.py +134 -134
  212. tests/test_tools.py +159 -153
  213. tests/test_user_agents.py +97 -0
  214. tests/tools_example.py +260 -257
  215. tests/verify_distributed.py +117 -0
  216. crawlo/cleaners/__init__.py +0 -61
  217. crawlo/utils/date_tools.py +0 -290
  218. crawlo-1.2.8.dist-info/RECORD +0 -209
  219. {crawlo-1.2.8.dist-info → crawlo-1.2.9.dist-info}/WHEEL +0 -0
  220. {crawlo-1.2.8.dist-info → crawlo-1.2.9.dist-info}/entry_points.txt +0 -0
  221. {crawlo-1.2.8.dist-info → crawlo-1.2.9.dist-info}/top_level.txt +0 -0
@@ -1,135 +1,126 @@
1
- # -*- coding: UTF-8 -*-
2
- """
3
- 高性能模式配置模板
4
- 针对大规模高并发优化
5
- """
6
-
7
- # ============================== 项目基本信息 ==============================
8
- PROJECT_NAME = '{{project_name}}'
9
-
10
- # ============================== 高性能运行模式 ==============================
11
- # 运行模式:'standalone'(单机), 'distributed'(分布式), 'auto'(自动检测)
12
- RUN_MODE = 'standalone' # 单机模式 - 适用于开发和小规模数据采集
13
-
14
- # 并发配置
15
- CONCURRENCY = 32 # 高并发数以充分利用系统资源
16
- DOWNLOAD_DELAY = 0.1 # 极小延迟以提高吞吐量
17
- RANDOMNESS = False # 禁用随机延迟以保证性能
18
-
19
- # ============================== 队列配置 ==============================
20
-
21
- # 队列类型:'auto'(自动选择), 'memory'(内存队列), 'redis'(分布式队列)
22
- QUEUE_TYPE = 'auto' # 自动检测,如果Redis可用则使用Redis队列
23
- SCHEDULER_MAX_QUEUE_SIZE = 5000
24
- SCHEDULER_QUEUE_NAME = f'crawlo:{{project_name}}:queue:requests'
25
- QUEUE_MAX_RETRIES = 3
26
- QUEUE_TIMEOUT = 300
27
-
28
- # ============================== 去重过滤配置 ==============================
29
-
30
- # 高性能模式下,如果Redis可用则使用Redis去重,否则使用内存去重
31
- DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.redis_dedup_pipeline.RedisDedupPipeline'
32
- FILTER_CLASS = 'crawlo.filters.aioredis_filter.AioRedisFilter'
33
-
34
- # --- Redis 配置(用于分布式去重和队列) ---
35
- REDIS_HOST = '127.0.0.1'
36
- REDIS_PORT = 6379
37
- REDIS_PASSWORD = '' # 如果有密码,请填写
38
-
39
- # 根据是否有密码生成 URL
40
- if REDIS_PASSWORD:
41
- REDIS_URL = f'redis://:{REDIS_PASSWORD}@{REDIS_HOST}:{REDIS_PORT}/0'
42
- else:
43
- REDIS_URL = f'redis://{REDIS_HOST}:{REDIS_PORT}/0'
44
-
45
- # Redis key配置已移至各组件中,使用统一的命名规范
46
- # crawlo:{project_name}:filter:fingerprint (请求去重)
47
- # crawlo:{project_name}:item:fingerprint (数据项去重)
48
- # crawlo:{project_name}:queue:requests (请求队列)
49
- # crawlo:{project_name}:queue:processing (处理中队列)
50
- # crawlo:{project_name}:queue:failed (失败队列)
51
-
52
- REDIS_TTL = 0
53
- CLEANUP_FP = 0
54
- FILTER_DEBUG = True
55
- DECODE_RESPONSES = True
56
-
57
- # ============================== 中间件配置 ==============================
58
-
59
- MIDDLEWARES = [
60
- # === 请求预处理阶段 ===
61
- 'crawlo.middleware.request_ignore.RequestIgnoreMiddleware',
62
- 'crawlo.middleware.download_delay.DownloadDelayMiddleware',
63
- 'crawlo.middleware.default_header.DefaultHeaderMiddleware',
64
- 'crawlo.middleware.proxy.ProxyMiddleware',
65
- 'crawlo.middleware.offsite.OffsiteMiddleware',
66
-
67
- # === 响应处理阶段 ===
68
- 'crawlo.middleware.retry.RetryMiddleware',
69
- 'crawlo.middleware.response_code.ResponseCodeMiddleware',
70
- 'crawlo.middleware.response_filter.ResponseFilterMiddleware',
71
- ]
72
-
73
- # ============================== 数据管道配置 ==============================
74
-
75
- # 数据处理管道(启用的存储方式)
76
- PIPELINES = [
77
- 'crawlo.pipelines.console_pipeline.ConsolePipeline',
78
- # '{{project_name}}.pipelines.DatabasePipeline', # 自定义数据库管道
79
- # 'crawlo.pipelines.mysql_pipeline.AsyncmyMySQLPipeline', # MySQL 存储
80
- # 'crawlo.pipelines.mongo_pipeline.MongoPipeline', # MongoDB 存储
81
- ]
82
-
83
- # 明确添加默认去重管道到管道列表开头
84
- PIPELINES.insert(0, DEFAULT_DEDUP_PIPELINE)
85
-
86
- # ============================== 扩展组件 ==============================
87
-
88
- EXTENSIONS = [
89
- 'crawlo.extension.log_interval.LogIntervalExtension',
90
- 'crawlo.extension.log_stats.LogStats',
91
- 'crawlo.extension.logging_extension.CustomLoggerExtension',
92
- # 'crawlo.extension.memory_monitor.MemoryMonitorExtension', # 内存监控
93
- # 'crawlo.extension.request_recorder.RequestRecorderExtension', # 请求记录
94
- # 'crawlo.extension.performance_profiler.PerformanceProfilerExtension', # 性能分析
95
- # 'crawlo.extension.health_check.HealthCheckExtension', # 健康检查
96
- ]
97
-
98
- # ============================== 日志配置 ==============================
99
-
100
- LOG_LEVEL = 'INFO'
101
- STATS_DUMP = True
102
- LOG_FILE = f'logs/{{project_name}}.log'
103
- LOG_FORMAT = '%(asctime)s - [%(name)s] - %(levelname)s: %(message)s'
104
- LOG_ENCODING = 'utf-8'
105
-
106
- # ============================== 性能优化配置 ==============================
107
-
108
- # 连接池配置
109
- CONNECTION_POOL_LIMIT = 100
110
- DOWNLOAD_MAXSIZE = 10 * 1024 * 1024 # 10MB
111
- DOWNLOAD_WARN_SIZE = 1024 * 1024 # 1MB
112
-
113
- # 下载器优化配置
114
- DOWNLOADER_HEALTH_CHECK = True
115
- HEALTH_CHECK_INTERVAL = 30
116
-
117
- # 请求统计配置
118
- REQUEST_STATS_ENABLED = True
119
- STATS_RESET_ON_START = False
120
-
121
- # HttpX 下载器专用配置
122
- HTTPX_HTTP2 = True
123
- HTTPX_FOLLOW_REDIRECTS = True
124
-
125
- # AioHttp 下载器专用配置
126
- AIOHTTP_AUTO_DECOMPRESS = True
127
- AIOHTTP_FORCE_CLOSE = False
128
-
129
- # 通用优化配置
130
- CONNECTION_TTL_DNS_CACHE = 300
131
- CONNECTION_KEEPALIVE_TIMEOUT = 15
132
-
133
- # 性能监控
134
- ENABLE_PERFORMANCE_MONITORING = True
1
+ # -*- coding: UTF-8 -*-
2
+ """
3
+ 高性能模式配置模板
4
+ 针对大规模高并发优化
5
+ """
6
+
7
+ # ============================== 项目基本信息 ==============================
8
+ PROJECT_NAME = '{{project_name}}'
9
+
10
+ # ============================== 高性能运行模式 ==============================
11
+ # 运行模式:'standalone'(单机), 'distributed'(分布式), 'auto'(自动检测)
12
+ RUN_MODE = 'standalone' # 单机模式 - 适用于开发和小规模数据采集
13
+
14
+ # 并发配置
15
+ CONCURRENCY = 32 # 高并发数以充分利用系统资源
16
+ DOWNLOAD_DELAY = 0.1 # 极小延迟以提高吞吐量
17
+ RANDOMNESS = False # 禁用随机延迟以保证性能
18
+
19
+ # ============================== 队列配置 ==============================
20
+
21
+ # 队列类型:'auto'(自动选择), 'memory'(内存队列), 'redis'(分布式队列)
22
+ QUEUE_TYPE = 'auto' # 自动检测,如果Redis可用则使用Redis队列
23
+ SCHEDULER_MAX_QUEUE_SIZE = 5000
24
+ SCHEDULER_QUEUE_NAME = f'crawlo:{{project_name}}:queue:requests'
25
+ QUEUE_MAX_RETRIES = 3
26
+ QUEUE_TIMEOUT = 300
27
+
28
+ # ============================== 去重过滤配置 ==============================
29
+
30
+ # 高性能模式下,如果Redis可用则使用Redis去重,否则使用内存去重
31
+ DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.redis_dedup_pipeline.RedisDedupPipeline'
32
+ FILTER_CLASS = 'crawlo.filters.aioredis_filter.AioRedisFilter'
33
+
34
+ # --- Redis 配置(用于分布式去重和队列) ---
35
+ REDIS_HOST = '127.0.0.1'
36
+ REDIS_PORT = 6379
37
+ REDIS_PASSWORD = '' # 如果有密码,请填写
38
+
39
+ # 根据是否有密码生成 URL
40
+ if REDIS_PASSWORD:
41
+ REDIS_URL = f'redis://:{REDIS_PASSWORD}@{REDIS_HOST}:{REDIS_PORT}/0'
42
+ else:
43
+ REDIS_URL = f'redis://{REDIS_HOST}:{REDIS_PORT}/0'
44
+
45
+ # Redis key配置已移至各组件中,使用统一的命名规范
46
+ # crawlo:{project_name}:filter:fingerprint (请求去重)
47
+ # crawlo:{project_name}:item:fingerprint (数据项去重)
48
+ # crawlo:{project_name}:queue:requests (请求队列)
49
+ # crawlo:{project_name}:queue:processing (处理中队列)
50
+ # crawlo:{project_name}:queue:failed (失败队列)
51
+
52
+ REDIS_TTL = 0
53
+ CLEANUP_FP = 0
54
+ FILTER_DEBUG = True
55
+ DECODE_RESPONSES = True
56
+
57
+ # ============================== 用户自定义中间件配置 ==============================
58
+ # 注意:框架默认中间件已自动加载,此处可添加或覆盖默认中间件
59
+
60
+ # 中间件列表(框架默认中间件 + 用户自定义中间件)
61
+ # MIDDLEWARES = [
62
+ # '{{project_name}}.middlewares.CustomMiddleware', # 示例自定义中间件
63
+ # ]
64
+
65
+ # ============================== 用户自定义数据管道配置 ==============================
66
+ # 注意:框架默认管道已自动加载,此处可添加或覆盖默认管道
67
+
68
+ # 数据处理管道列表(框架默认管道 + 用户自定义管道)
69
+ # PIPELINES = [
70
+ # '{{project_name}}.pipelines.DatabasePipeline', # 自定义数据库管道
71
+ # 'crawlo.pipelines.mysql_pipeline.AsyncmyMySQLPipeline', # MySQL 存储
72
+ # 'crawlo.pipelines.mongo_pipeline.MongoPipeline', # MongoDB 存储
73
+ # ]
74
+
75
+ # 明确添加默认去重管道到管道列表开头
76
+ # PIPELINES.insert(0, DEFAULT_DEDUP_PIPELINE)
77
+
78
+ # ============================== 用户自定义扩展组件 ==============================
79
+ # 注意:框架默认扩展已自动加载,此处可添加或覆盖默认扩展
80
+
81
+ # 扩展组件列表(框架默认扩展 + 用户自定义扩展)
82
+ # EXTENSIONS = [
83
+ # 'crawlo.extension.memory_monitor.MemoryMonitorExtension', # 内存监控
84
+ # 'crawlo.extension.request_recorder.RequestRecorderExtension', # 请求记录
85
+ # 'crawlo.extension.performance_profiler.PerformanceProfilerExtension', # 性能分析
86
+ # 'crawlo.extension.health_check.HealthCheckExtension', # 健康检查
87
+ # ]
88
+
89
+ # ============================== 日志配置 ==============================
90
+
91
+ LOG_LEVEL = 'INFO'
92
+ STATS_DUMP = True
93
+ LOG_FILE = f'logs/{{project_name}}.log'
94
+ LOG_FORMAT = '%(asctime)s - [%(name)s] - %(levelname)s: %(message)s'
95
+ LOG_ENCODING = 'utf-8'
96
+
97
+ # ============================== 性能优化配置 ==============================
98
+
99
+ # 连接池配置
100
+ CONNECTION_POOL_LIMIT = 100
101
+ DOWNLOAD_MAXSIZE = 10 * 1024 * 1024 # 10MB
102
+ DOWNLOAD_WARN_SIZE = 1024 * 1024 # 1MB
103
+
104
+ # 下载器优化配置
105
+ DOWNLOADER_HEALTH_CHECK = True
106
+ HEALTH_CHECK_INTERVAL = 30
107
+
108
+ # 请求统计配置
109
+ REQUEST_STATS_ENABLED = True
110
+ STATS_RESET_ON_START = False
111
+
112
+ # HttpX 下载器专用配置
113
+ HTTPX_HTTP2 = True
114
+ HTTPX_FOLLOW_REDIRECTS = True
115
+
116
+ # AioHttp 下载器专用配置
117
+ AIOHTTP_AUTO_DECOMPRESS = True
118
+ AIOHTTP_FORCE_CLOSE = False
119
+
120
+ # 通用优化配置
121
+ CONNECTION_TTL_DNS_CACHE = 300
122
+ CONNECTION_KEEPALIVE_TIMEOUT = 15
123
+
124
+ # 性能监控
125
+ ENABLE_PERFORMANCE_MONITORING = True
135
126
  MEMORY_USAGE_WARNING_THRESHOLD = 800 # MB
@@ -0,0 +1,30 @@
1
+ # -*- coding: UTF-8 -*-
2
+ """
3
+ {{project_name}} 最小化配置文件
4
+ =============================
5
+ 仅包含最基本和常用的配置项,适合快速开始和简单项目。
6
+ """
7
+
8
+ # ============================== 项目基本信息 ==============================
9
+ PROJECT_NAME = '{{project_name}}'
10
+
11
+ # ============================== 核心配置 ==============================
12
+ # 并发数
13
+ CONCURRENCY = 4
14
+
15
+ # 请求延迟(秒)
16
+ DOWNLOAD_DELAY = 1.0
17
+
18
+ # ============================== 数据存储 ==============================
19
+ # JSON文件存储(默认启用)
20
+ PIPELINES = [
21
+ 'crawlo.pipelines.json_pipeline.JsonPipeline',
22
+ ]
23
+
24
+ # ============================== 日志配置 ==============================
25
+ LOG_LEVEL = 'INFO'
26
+ LOG_FILE = f'logs/{{project_name}}.log'
27
+ STATS_DUMP = True
28
+
29
+ # ============================== 自定义配置 ==============================
30
+ # 在此处添加项目特定的配置项
@@ -1,99 +1,97 @@
1
- # -*- coding: UTF-8 -*-
2
- """
3
- 简化模式配置模板
4
- 最小配置,适合快速开始和简单项目
5
- """
6
-
7
- # ============================== 项目基本信息 ==============================
8
- PROJECT_NAME = '{{project_name}}'
9
-
10
- # ============================== 简化运行模式 ==============================
11
- # 运行模式:'standalone'(单机), 'distributed'(分布式), 'auto'(自动检测)
12
- RUN_MODE = 'standalone' # 单机模式 - 适用于开发和小规模数据采集
13
-
14
- # 并发配置
15
- CONCURRENCY = 4 # 低并发数以减少资源占用
16
- DOWNLOAD_DELAY = 1.0 # 增加延迟以降低目标网站压力
17
-
18
- # ============================== 队列配置 ==============================
19
-
20
- # 队列类型:'auto'(自动选择), 'memory'(内存队列), 'redis'(分布式队列)
21
- QUEUE_TYPE = 'auto' # 自动检测,如果Redis可用则使用Redis队列
22
- SCHEDULER_MAX_QUEUE_SIZE = 1000
23
- SCHEDULER_QUEUE_NAME = f'crawlo:{{project_name}}:queue:requests'
24
- QUEUE_MAX_RETRIES = 3
25
- QUEUE_TIMEOUT = 300
26
-
27
- # ============================== 去重过滤配置 ==============================
28
-
29
- # 简化模式下使用内存去重管道和过滤器
30
- DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.memory_dedup_pipeline.MemoryDedupPipeline'
31
- FILTER_CLASS = 'crawlo.filters.memory_filter.MemoryFilter'
32
-
33
- # --- Redis 配置(用于分布式去重和队列) ---
34
- REDIS_HOST = '127.0.0.1'
35
- REDIS_PORT = 6379
36
- REDIS_PASSWORD = '' # 如果有密码,请填写
37
-
38
- # 根据是否有密码生成 URL
39
- if REDIS_PASSWORD:
40
- REDIS_URL = f'redis://:{REDIS_PASSWORD}@{REDIS_HOST}:{REDIS_PORT}/0'
41
- else:
42
- REDIS_URL = f'redis://{REDIS_HOST}:{REDIS_PORT}/0'
43
-
44
- # Redis key配置已移至各组件中,使用统一的命名规范
45
- # crawlo:{project_name}:filter:fingerprint (请求去重)
46
- # crawlo:{project_name}:item:fingerprint (数据项去重)
47
- # crawlo:{project_name}:queue:requests (请求队列)
48
- # crawlo:{project_name}:queue:processing (处理中队列)
49
- # crawlo:{project_name}:queue:failed (失败队列)
50
-
51
- REDIS_TTL = 0
52
- CLEANUP_FP = 0
53
- FILTER_DEBUG = True
54
- DECODE_RESPONSES = True
55
-
56
- # ============================== 中间件配置 ==============================
57
-
58
- MIDDLEWARES = [
59
- # === 请求预处理阶段 ===
60
- 'crawlo.middleware.request_ignore.RequestIgnoreMiddleware',
61
- 'crawlo.middleware.download_delay.DownloadDelayMiddleware',
62
- 'crawlo.middleware.default_header.DefaultHeaderMiddleware',
63
- 'crawlo.middleware.proxy.ProxyMiddleware',
64
- 'crawlo.middleware.offsite.OffsiteMiddleware',
65
-
66
- # === 响应处理阶段 ===
67
- 'crawlo.middleware.retry.RetryMiddleware',
68
- 'crawlo.middleware.response_code.ResponseCodeMiddleware',
69
- 'crawlo.middleware.response_filter.ResponseFilterMiddleware',
70
- ]
71
-
72
- # ============================== 数据管道配置 ==============================
73
-
74
- # 数据处理管道(启用的存储方式)
75
- PIPELINES = [
76
- 'crawlo.pipelines.console_pipeline.ConsolePipeline',
77
- # '{{project_name}}.pipelines.DatabasePipeline', # 自定义数据库管道
78
- # 'crawlo.pipelines.mysql_pipeline.AsyncmyMySQLPipeline', # MySQL 存储
79
- # 'crawlo.pipelines.mongo_pipeline.MongoPipeline', # MongoDB 存储
80
- ]
81
-
82
- # 明确添加默认去重管道到管道列表开头
83
- PIPELINES.insert(0, DEFAULT_DEDUP_PIPELINE)
84
-
85
- # ============================== 扩展组件 ==============================
86
-
87
- EXTENSIONS = [
88
- 'crawlo.extension.log_interval.LogIntervalExtension',
89
- 'crawlo.extension.log_stats.LogStats',
90
- 'crawlo.extension.logging_extension.CustomLoggerExtension',
91
- ]
92
-
93
- # ============================== 日志配置 ==============================
94
-
95
- LOG_LEVEL = 'INFO'
96
- STATS_DUMP = True
97
- LOG_FILE = f'logs/{{project_name}}.log'
98
- LOG_FORMAT = '%(asctime)s - [%(name)s] - %(levelname)s: %(message)s'
1
+ # -*- coding: UTF-8 -*-
2
+ """
3
+ 简化模式配置模板
4
+ 最小配置,适合快速开始和简单项目
5
+ """
6
+
7
+ # ============================== 项目基本信息 ==============================
8
+ PROJECT_NAME = '{{project_name}}'
9
+
10
+ # ============================== 简化运行模式 ==============================
11
+ # 运行模式:'standalone'(单机), 'distributed'(分布式), 'auto'(自动检测)
12
+ RUN_MODE = 'standalone' # 单机模式 - 适用于开发和小规模数据采集
13
+
14
+ # 并发配置
15
+ CONCURRENCY = 4 # 低并发数以减少资源占用
16
+ DOWNLOAD_DELAY = 1.0 # 增加延迟以降低目标网站压力
17
+
18
+ # ============================== 队列配置 ==============================
19
+
20
+ # 注意:框架已提供默认的队列配置,以下配置项通常无需修改
21
+ # 如需自定义,请取消注释并修改相应值
22
+
23
+ # 队列类型:'auto'(自动选择), 'memory'(内存队列), 'redis'(分布式队列)
24
+ # QUEUE_TYPE = 'auto' # 自动检测,如果Redis可用则使用Redis队列
25
+ # SCHEDULER_MAX_QUEUE_SIZE = 1000
26
+ # SCHEDULER_QUEUE_NAME = f'crawlo:{{project_name}}:queue:requests'
27
+ # QUEUE_MAX_RETRIES = 3
28
+ # QUEUE_TIMEOUT = 300
29
+
30
+ # ============================== 去重过滤配置 ==============================
31
+
32
+ # 注意:框架已提供默认的去重配置,以下配置项通常无需修改
33
+ # 如需自定义,请取消注释并修改相应值
34
+
35
+ # 简化模式下使用内存去重管道和过滤器
36
+ # DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.memory_dedup_pipeline.MemoryDedupPipeline'
37
+ # FILTER_CLASS = 'crawlo.filters.memory_filter.MemoryFilter'
38
+
39
+ # --- Redis 配置(用于分布式去重和队列) ---
40
+ # REDIS_HOST = '127.0.0.1'
41
+ # REDIS_PORT = 6379
42
+ # REDIS_PASSWORD = '' # 如果有密码,请填写
43
+
44
+ # 根据是否有密码生成 URL
45
+ # if REDIS_PASSWORD:
46
+ # REDIS_URL = f'redis://:{REDIS_PASSWORD}@{REDIS_HOST}:{REDIS_PORT}/0'
47
+ # else:
48
+ # REDIS_URL = f'redis://{REDIS_HOST}:{REDIS_PORT}/0'
49
+
50
+ # Redis key配置已移至各组件中,使用统一的命名规范
51
+ # crawlo:{project_name}:filter:fingerprint (请求去重)
52
+ # crawlo:{project_name}:item:fingerprint (数据项去重)
53
+ # crawlo:{project_name}:queue:requests (请求队列)
54
+ # crawlo:{project_name}:queue:processing (处理中队列)
55
+ # crawlo:{project_name}:queue:failed (失败队列)
56
+
57
+ # REDIS_TTL = 0
58
+ # CLEANUP_FP = 0
59
+ # FILTER_DEBUG = True
60
+ # DECODE_RESPONSES = True
61
+
62
+ # ============================== 用户自定义中间件配置 ==============================
63
+ # 注意:框架默认中间件已自动加载,此处可添加或覆盖默认中间件
64
+
65
+ # 中间件列表(框架默认中间件 + 用户自定义中间件)
66
+ # MIDDLEWARES = [
67
+ # '{{project_name}}.middlewares.CustomMiddleware', # 示例自定义中间件
68
+ # ]
69
+
70
+ # ============================== 用户自定义数据管道配置 ==============================
71
+ # 注意:框架默认管道已自动加载,此处可添加或覆盖默认管道
72
+
73
+ # 数据处理管道列表(框架默认管道 + 用户自定义管道)
74
+ # PIPELINES = [
75
+ # '{{project_name}}.pipelines.DatabasePipeline', # 自定义数据库管道
76
+ # 'crawlo.pipelines.mysql_pipeline.AsyncmyMySQLPipeline', # MySQL 存储
77
+ # 'crawlo.pipelines.mongo_pipeline.MongoPipeline', # MongoDB 存储
78
+ # ]
79
+
80
+ # ============================== 用户自定义扩展组件 ==============================
81
+ # 注意:框架默认扩展已自动加载,此处可添加或覆盖默认扩展
82
+
83
+ # 扩展组件列表(框架默认扩展 + 用户自定义扩展)
84
+ # EXTENSIONS = [
85
+ # 'crawlo.extension.memory_monitor.MemoryMonitorExtension', # 内存监控
86
+ # 'crawlo.extension.request_recorder.RequestRecorderExtension', # 请求记录
87
+ # 'crawlo.extension.performance_profiler.PerformanceProfilerExtension', # 性能分析
88
+ # 'crawlo.extension.health_check.HealthCheckExtension', # 健康检查
89
+ # ]
90
+
91
+ # ============================== 日志配置 ==============================
92
+
93
+ LOG_LEVEL = 'INFO'
94
+ STATS_DUMP = True
95
+ LOG_FILE = f'logs/{{project_name}}.log'
96
+ LOG_FORMAT = '%(asctime)s - [%(name)s] - %(levelname)s: %(message)s'
99
97
  LOG_ENCODING = 'utf-8'
@@ -1,6 +1,6 @@
1
- # -*- coding: UTF-8 -*-
2
- """
3
- {{project_name}}.spiders
4
- ========================
5
- 存放所有的爬虫。
1
+ # -*- coding: UTF-8 -*-
2
+ """
3
+ {{project_name}}.spiders
4
+ ========================
5
+ 存放所有的爬虫。
6
6
  """
@@ -1,48 +1,48 @@
1
- #!/usr/bin/env python
2
- # -*- coding: UTF-8 -*-
3
- """
4
- {{project_name}} 项目运行脚本
5
- ============================
6
- 基于 Crawlo 框架的简化爬虫启动器。
7
- """
8
-
9
- import sys
10
- import os
11
- import asyncio
12
-
13
- # 添加项目根目录到 Python 路径
14
- project_root = os.path.dirname(os.path.abspath(__file__))
15
- sys.path.insert(0, project_root)
16
-
17
- # 切换到项目根目录
18
- os.chdir(project_root)
19
-
20
- from crawlo.crawler import CrawlerProcess
21
-
22
-
23
- def main():
24
- """主函数:运行固定的爬虫"""
25
- print("🚀 启动 {{project_name}} 爬虫")
26
-
27
- # 创建爬虫进程(自动加载默认配置)
28
- try:
29
- # 确保 spider 模块被正确导入
30
- spider_modules = ['{{project_name}}.spiders']
31
- process = CrawlerProcess(spider_modules=spider_modules)
32
- print("✅ 爬虫进程初始化成功")
33
-
34
- # 运行固定的爬虫
35
- # TODO: 请将 'your_spider_name' 替换为实际的爬虫名称
36
- asyncio.run(process.crawl('your_spider_name'))
37
-
38
- print("✅ 爬虫运行完成")
39
-
40
- except Exception as e:
41
- print(f"❌ 运行失败: {e}")
42
- import traceback
43
- traceback.print_exc()
44
- sys.exit(1)
45
-
46
-
47
- if __name__ == '__main__':
1
+ #!/usr/bin/env python
2
+ # -*- coding: UTF-8 -*-
3
+ """
4
+ {{project_name}} 项目运行脚本
5
+ ============================
6
+ 基于 Crawlo 框架的简化爬虫启动器。
7
+ """
8
+
9
+ import sys
10
+ import os
11
+ import asyncio
12
+
13
+ # 添加项目根目录到 Python 路径
14
+ project_root = os.path.dirname(os.path.abspath(__file__))
15
+ sys.path.insert(0, project_root)
16
+
17
+ # 切换到项目根目录
18
+ os.chdir(project_root)
19
+
20
+ from crawlo.crawler import CrawlerProcess
21
+
22
+
23
+ def main():
24
+ """主函数:运行固定的爬虫"""
25
+ print("🚀 启动 {{project_name}} 爬虫")
26
+
27
+ # 创建爬虫进程(自动加载默认配置)
28
+ try:
29
+ # 确保 spider 模块被正确导入
30
+ spider_modules = ['{{project_name}}.spiders']
31
+ process = CrawlerProcess(spider_modules=spider_modules)
32
+ print("✅ 爬虫进程初始化成功")
33
+
34
+ # 运行固定的爬虫
35
+ # TODO: 请将 'your_spider_name' 替换为实际的爬虫名称
36
+ asyncio.run(process.crawl('your_spider_name'))
37
+
38
+ print("✅ 爬虫运行完成")
39
+
40
+ except Exception as e:
41
+ print(f"❌ 运行失败: {e}")
42
+ import traceback
43
+ traceback.print_exc()
44
+ sys.exit(1)
45
+
46
+
47
+ if __name__ == '__main__':
48
48
  main()