crawlo 1.4.5__py3-none-any.whl → 1.4.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (375) hide show
  1. crawlo/__init__.py +90 -89
  2. crawlo/__version__.py +1 -1
  3. crawlo/cli.py +75 -75
  4. crawlo/commands/__init__.py +14 -14
  5. crawlo/commands/check.py +594 -594
  6. crawlo/commands/genspider.py +186 -186
  7. crawlo/commands/help.py +140 -138
  8. crawlo/commands/list.py +155 -155
  9. crawlo/commands/run.py +379 -341
  10. crawlo/commands/startproject.py +460 -460
  11. crawlo/commands/stats.py +187 -187
  12. crawlo/commands/utils.py +196 -196
  13. crawlo/config.py +320 -312
  14. crawlo/config_validator.py +277 -277
  15. crawlo/core/__init__.py +52 -52
  16. crawlo/core/engine.py +451 -438
  17. crawlo/core/processor.py +47 -47
  18. crawlo/core/scheduler.py +290 -291
  19. crawlo/crawler.py +698 -657
  20. crawlo/data/__init__.py +5 -5
  21. crawlo/data/user_agents.py +194 -194
  22. crawlo/downloader/__init__.py +280 -276
  23. crawlo/downloader/aiohttp_downloader.py +233 -233
  24. crawlo/downloader/cffi_downloader.py +250 -245
  25. crawlo/downloader/httpx_downloader.py +265 -259
  26. crawlo/downloader/hybrid_downloader.py +212 -212
  27. crawlo/downloader/playwright_downloader.py +425 -402
  28. crawlo/downloader/selenium_downloader.py +486 -472
  29. crawlo/event.py +45 -11
  30. crawlo/exceptions.py +215 -82
  31. crawlo/extension/__init__.py +65 -64
  32. crawlo/extension/health_check.py +141 -141
  33. crawlo/extension/log_interval.py +94 -94
  34. crawlo/extension/log_stats.py +70 -70
  35. crawlo/extension/logging_extension.py +53 -61
  36. crawlo/extension/memory_monitor.py +104 -104
  37. crawlo/extension/performance_profiler.py +133 -133
  38. crawlo/extension/request_recorder.py +107 -107
  39. crawlo/factories/__init__.py +27 -27
  40. crawlo/factories/base.py +68 -68
  41. crawlo/factories/crawler.py +104 -103
  42. crawlo/factories/registry.py +84 -84
  43. crawlo/factories/utils.py +135 -0
  44. crawlo/filters/__init__.py +170 -153
  45. crawlo/filters/aioredis_filter.py +348 -264
  46. crawlo/filters/memory_filter.py +261 -276
  47. crawlo/framework.py +306 -292
  48. crawlo/initialization/__init__.py +44 -44
  49. crawlo/initialization/built_in.py +391 -434
  50. crawlo/initialization/context.py +141 -141
  51. crawlo/initialization/core.py +240 -194
  52. crawlo/initialization/phases.py +230 -149
  53. crawlo/initialization/registry.py +143 -145
  54. crawlo/initialization/utils.py +49 -0
  55. crawlo/interfaces.py +23 -23
  56. crawlo/items/__init__.py +23 -23
  57. crawlo/items/base.py +23 -23
  58. crawlo/items/fields.py +52 -52
  59. crawlo/items/items.py +104 -104
  60. crawlo/logging/__init__.py +42 -46
  61. crawlo/logging/config.py +277 -197
  62. crawlo/logging/factory.py +175 -171
  63. crawlo/logging/manager.py +104 -112
  64. crawlo/middleware/__init__.py +87 -24
  65. crawlo/middleware/default_header.py +132 -132
  66. crawlo/middleware/download_delay.py +104 -104
  67. crawlo/middleware/middleware_manager.py +142 -142
  68. crawlo/middleware/offsite.py +123 -123
  69. crawlo/middleware/proxy.py +209 -386
  70. crawlo/middleware/request_ignore.py +86 -86
  71. crawlo/middleware/response_code.py +150 -150
  72. crawlo/middleware/response_filter.py +136 -136
  73. crawlo/middleware/retry.py +124 -124
  74. crawlo/mode_manager.py +287 -253
  75. crawlo/network/__init__.py +21 -21
  76. crawlo/network/request.py +375 -379
  77. crawlo/network/response.py +569 -664
  78. crawlo/pipelines/__init__.py +53 -22
  79. crawlo/pipelines/base_pipeline.py +452 -0
  80. crawlo/pipelines/bloom_dedup_pipeline.py +146 -146
  81. crawlo/pipelines/console_pipeline.py +39 -39
  82. crawlo/pipelines/csv_pipeline.py +316 -316
  83. crawlo/pipelines/database_dedup_pipeline.py +197 -197
  84. crawlo/pipelines/json_pipeline.py +218 -218
  85. crawlo/pipelines/memory_dedup_pipeline.py +105 -105
  86. crawlo/pipelines/mongo_pipeline.py +140 -132
  87. crawlo/pipelines/mysql_pipeline.py +470 -326
  88. crawlo/pipelines/pipeline_manager.py +100 -100
  89. crawlo/pipelines/redis_dedup_pipeline.py +155 -156
  90. crawlo/project.py +347 -347
  91. crawlo/queue/__init__.py +10 -0
  92. crawlo/queue/pqueue.py +38 -38
  93. crawlo/queue/queue_manager.py +591 -525
  94. crawlo/queue/redis_priority_queue.py +519 -370
  95. crawlo/settings/__init__.py +7 -7
  96. crawlo/settings/default_settings.py +285 -270
  97. crawlo/settings/setting_manager.py +219 -219
  98. crawlo/spider/__init__.py +657 -657
  99. crawlo/stats_collector.py +82 -73
  100. crawlo/subscriber.py +129 -129
  101. crawlo/task_manager.py +138 -138
  102. crawlo/templates/crawlo.cfg.tmpl +10 -10
  103. crawlo/templates/project/__init__.py.tmpl +2 -4
  104. crawlo/templates/project/items.py.tmpl +13 -17
  105. crawlo/templates/project/middlewares.py.tmpl +38 -38
  106. crawlo/templates/project/pipelines.py.tmpl +35 -36
  107. crawlo/templates/project/settings.py.tmpl +110 -157
  108. crawlo/templates/project/settings_distributed.py.tmpl +156 -161
  109. crawlo/templates/project/settings_gentle.py.tmpl +170 -171
  110. crawlo/templates/project/settings_high_performance.py.tmpl +171 -172
  111. crawlo/templates/project/settings_minimal.py.tmpl +99 -77
  112. crawlo/templates/project/settings_simple.py.tmpl +168 -169
  113. crawlo/templates/project/spiders/__init__.py.tmpl +9 -9
  114. crawlo/templates/run.py.tmpl +23 -30
  115. crawlo/templates/spider/spider.py.tmpl +33 -144
  116. crawlo/templates/spiders_init.py.tmpl +5 -10
  117. crawlo/tools/__init__.py +86 -189
  118. crawlo/tools/date_tools.py +289 -289
  119. crawlo/tools/distributed_coordinator.py +384 -384
  120. crawlo/tools/scenario_adapter.py +262 -262
  121. crawlo/tools/text_cleaner.py +232 -232
  122. crawlo/utils/__init__.py +50 -50
  123. crawlo/utils/batch_processor.py +276 -259
  124. crawlo/utils/config_manager.py +442 -0
  125. crawlo/utils/controlled_spider_mixin.py +439 -439
  126. crawlo/utils/db_helper.py +250 -244
  127. crawlo/utils/error_handler.py +410 -410
  128. crawlo/utils/fingerprint.py +121 -121
  129. crawlo/utils/func_tools.py +82 -82
  130. crawlo/utils/large_scale_helper.py +344 -344
  131. crawlo/utils/leak_detector.py +335 -0
  132. crawlo/utils/log.py +79 -79
  133. crawlo/utils/misc.py +81 -81
  134. crawlo/utils/mongo_connection_pool.py +157 -0
  135. crawlo/utils/mysql_connection_pool.py +197 -0
  136. crawlo/utils/performance_monitor.py +285 -285
  137. crawlo/utils/queue_helper.py +175 -175
  138. crawlo/utils/redis_checker.py +91 -0
  139. crawlo/utils/redis_connection_pool.py +578 -388
  140. crawlo/utils/redis_key_validator.py +198 -198
  141. crawlo/utils/request.py +278 -256
  142. crawlo/utils/request_serializer.py +225 -225
  143. crawlo/utils/resource_manager.py +337 -0
  144. crawlo/utils/selector_helper.py +137 -137
  145. crawlo/utils/singleton.py +70 -0
  146. crawlo/utils/spider_loader.py +201 -201
  147. crawlo/utils/text_helper.py +94 -94
  148. crawlo/utils/{url.py → url_utils.py} +39 -39
  149. crawlo-1.4.7.dist-info/METADATA +689 -0
  150. crawlo-1.4.7.dist-info/RECORD +347 -0
  151. examples/__init__.py +7 -7
  152. tests/__init__.py +7 -7
  153. tests/advanced_tools_example.py +217 -275
  154. tests/authenticated_proxy_example.py +110 -106
  155. tests/baidu_performance_test.py +108 -108
  156. tests/baidu_test.py +59 -59
  157. tests/bug_check_test.py +250 -250
  158. tests/cleaners_example.py +160 -160
  159. tests/comprehensive_framework_test.py +212 -212
  160. tests/comprehensive_test.py +81 -81
  161. tests/comprehensive_testing_summary.md +186 -186
  162. tests/config_validation_demo.py +142 -142
  163. tests/controlled_spider_example.py +205 -205
  164. tests/date_tools_example.py +180 -180
  165. tests/debug_configure.py +69 -69
  166. tests/debug_framework_logger.py +84 -84
  167. tests/debug_log_config.py +126 -126
  168. tests/debug_log_levels.py +63 -63
  169. tests/debug_pipelines.py +66 -66
  170. tests/detailed_log_test.py +233 -233
  171. tests/direct_selector_helper_test.py +96 -96
  172. tests/distributed_dedup_test.py +467 -0
  173. tests/distributed_test.py +66 -66
  174. tests/distributed_test_debug.py +76 -76
  175. tests/dynamic_loading_example.py +523 -523
  176. tests/dynamic_loading_test.py +104 -104
  177. tests/error_handling_example.py +171 -171
  178. tests/explain_mysql_update_behavior.py +77 -0
  179. tests/final_comprehensive_test.py +151 -151
  180. tests/final_log_test.py +260 -260
  181. tests/final_validation_test.py +182 -182
  182. tests/fix_log_test.py +142 -142
  183. tests/framework_performance_test.py +202 -202
  184. tests/log_buffering_test.py +111 -111
  185. tests/log_generation_timing_test.py +153 -153
  186. tests/monitor_redis_dedup.sh +72 -0
  187. tests/ofweek_scrapy/ofweek_scrapy/items.py +12 -12
  188. tests/ofweek_scrapy/ofweek_scrapy/middlewares.py +100 -100
  189. tests/ofweek_scrapy/ofweek_scrapy/pipelines.py +13 -13
  190. tests/ofweek_scrapy/ofweek_scrapy/settings.py +84 -84
  191. tests/ofweek_scrapy/scrapy.cfg +11 -11
  192. tests/optimized_performance_test.py +211 -211
  193. tests/performance_comparison.py +244 -244
  194. tests/queue_blocking_test.py +113 -113
  195. tests/queue_test.py +89 -89
  196. tests/redis_key_validation_demo.py +130 -130
  197. tests/request_params_example.py +150 -150
  198. tests/response_improvements_example.py +144 -144
  199. tests/scrapy_comparison/ofweek_scrapy.py +138 -138
  200. tests/scrapy_comparison/scrapy_test.py +133 -133
  201. tests/simple_cli_test.py +55 -0
  202. tests/simple_command_test.py +119 -119
  203. tests/simple_crawlo_test.py +126 -126
  204. tests/simple_follow_test.py +38 -38
  205. tests/simple_log_test2.py +137 -137
  206. tests/simple_optimization_test.py +128 -128
  207. tests/simple_queue_type_test.py +41 -41
  208. tests/simple_response_selector_test.py +94 -94
  209. tests/simple_selector_helper_test.py +154 -154
  210. tests/simple_selector_test.py +207 -207
  211. tests/simple_spider_test.py +49 -49
  212. tests/simple_url_test.py +73 -73
  213. tests/simulate_mysql_update_test.py +140 -0
  214. tests/spider_log_timing_test.py +177 -177
  215. tests/test_advanced_tools.py +148 -148
  216. tests/test_all_commands.py +230 -230
  217. tests/test_all_pipeline_fingerprints.py +133 -133
  218. tests/test_all_redis_key_configs.py +145 -145
  219. tests/test_asyncmy_usage.py +57 -0
  220. tests/test_batch_processor.py +178 -178
  221. tests/test_cleaners.py +54 -54
  222. tests/test_cli_arguments.py +119 -0
  223. tests/test_component_factory.py +174 -174
  224. tests/test_config_consistency.py +80 -80
  225. tests/test_config_merge.py +152 -152
  226. tests/test_config_validator.py +182 -182
  227. tests/test_controlled_spider_mixin.py +79 -79
  228. tests/test_crawler_process_import.py +38 -38
  229. tests/test_crawler_process_spider_modules.py +47 -47
  230. tests/test_crawlo_proxy_integration.py +114 -108
  231. tests/test_date_tools.py +123 -123
  232. tests/test_dedup_fix.py +220 -220
  233. tests/test_dedup_pipeline_consistency.py +124 -124
  234. tests/test_default_header_middleware.py +313 -313
  235. tests/test_distributed.py +65 -65
  236. tests/test_double_crawlo_fix.py +204 -204
  237. tests/test_double_crawlo_fix_simple.py +124 -124
  238. tests/test_download_delay_middleware.py +221 -221
  239. tests/test_downloader_proxy_compatibility.py +272 -268
  240. tests/test_edge_cases.py +305 -305
  241. tests/test_encoding_core.py +56 -56
  242. tests/test_encoding_detection.py +126 -126
  243. tests/test_enhanced_error_handler.py +270 -270
  244. tests/test_enhanced_error_handler_comprehensive.py +245 -245
  245. tests/test_error_handler_compatibility.py +112 -112
  246. tests/test_factories.py +252 -252
  247. tests/test_factory_compatibility.py +196 -196
  248. tests/test_final_validation.py +153 -153
  249. tests/test_fingerprint_consistency.py +135 -135
  250. tests/test_fingerprint_simple.py +51 -51
  251. tests/test_get_component_logger.py +83 -83
  252. tests/test_hash_performance.py +99 -99
  253. tests/test_integration.py +169 -169
  254. tests/test_item_dedup_redis_key.py +122 -122
  255. tests/test_large_scale_helper.py +235 -235
  256. tests/test_logging_enhancements.py +374 -374
  257. tests/test_logging_final.py +184 -184
  258. tests/test_logging_integration.py +312 -312
  259. tests/test_logging_system.py +282 -282
  260. tests/test_middleware_debug.py +141 -141
  261. tests/test_mode_consistency.py +51 -51
  262. tests/test_multi_directory.py +67 -67
  263. tests/test_multiple_spider_modules.py +80 -80
  264. tests/test_mysql_pipeline_config.py +165 -0
  265. tests/test_mysql_pipeline_error.py +99 -0
  266. tests/test_mysql_pipeline_init_log.py +83 -0
  267. tests/test_mysql_pipeline_integration.py +133 -0
  268. tests/test_mysql_pipeline_refactor.py +144 -0
  269. tests/test_mysql_pipeline_refactor_simple.py +86 -0
  270. tests/test_mysql_pipeline_robustness.py +196 -0
  271. tests/test_mysql_pipeline_types.py +89 -0
  272. tests/test_mysql_update_columns.py +94 -0
  273. tests/test_offsite_middleware.py +244 -244
  274. tests/test_offsite_middleware_simple.py +203 -203
  275. tests/test_optimized_selector_naming.py +100 -100
  276. tests/test_parsel.py +29 -29
  277. tests/test_performance.py +327 -327
  278. tests/test_performance_monitor.py +115 -115
  279. tests/test_pipeline_fingerprint_consistency.py +86 -86
  280. tests/test_priority_behavior.py +211 -211
  281. tests/test_priority_consistency.py +151 -151
  282. tests/test_priority_consistency_fixed.py +249 -249
  283. tests/test_proxy_health_check.py +32 -32
  284. tests/test_proxy_middleware.py +217 -121
  285. tests/test_proxy_middleware_enhanced.py +212 -216
  286. tests/test_proxy_middleware_integration.py +142 -137
  287. tests/test_proxy_middleware_refactored.py +207 -184
  288. tests/test_proxy_only.py +84 -0
  289. tests/test_proxy_providers.py +56 -56
  290. tests/test_proxy_stats.py +19 -19
  291. tests/test_proxy_strategies.py +59 -59
  292. tests/test_proxy_with_downloader.py +153 -0
  293. tests/test_queue_empty_check.py +41 -41
  294. tests/test_queue_manager_double_crawlo.py +173 -173
  295. tests/test_queue_manager_redis_key.py +179 -179
  296. tests/test_queue_naming.py +154 -154
  297. tests/test_queue_type.py +106 -106
  298. tests/test_queue_type_redis_config_consistency.py +130 -130
  299. tests/test_random_headers_default.py +322 -322
  300. tests/test_random_headers_necessity.py +308 -308
  301. tests/test_random_user_agent.py +72 -72
  302. tests/test_redis_config.py +28 -28
  303. tests/test_redis_connection_pool.py +294 -294
  304. tests/test_redis_key_naming.py +181 -181
  305. tests/test_redis_key_validator.py +123 -123
  306. tests/test_redis_queue.py +224 -224
  307. tests/test_redis_queue_name_fix.py +175 -175
  308. tests/test_redis_queue_type_fallback.py +129 -129
  309. tests/test_request_ignore_middleware.py +182 -182
  310. tests/test_request_params.py +111 -111
  311. tests/test_request_serialization.py +70 -70
  312. tests/test_response_code_middleware.py +349 -349
  313. tests/test_response_filter_middleware.py +427 -427
  314. tests/test_response_follow.py +104 -104
  315. tests/test_response_improvements.py +152 -152
  316. tests/test_response_selector_methods.py +92 -92
  317. tests/test_response_url_methods.py +70 -70
  318. tests/test_response_urljoin.py +86 -86
  319. tests/test_retry_middleware.py +333 -333
  320. tests/test_retry_middleware_realistic.py +273 -273
  321. tests/test_scheduler.py +252 -252
  322. tests/test_scheduler_config_update.py +133 -133
  323. tests/test_scrapy_style_encoding.py +112 -112
  324. tests/test_selector_helper.py +100 -100
  325. tests/test_selector_optimizations.py +146 -146
  326. tests/test_simple_response.py +61 -61
  327. tests/test_spider_loader.py +49 -49
  328. tests/test_spider_loader_comprehensive.py +69 -69
  329. tests/test_spider_modules.py +84 -84
  330. tests/test_spiders/test_spider.py +9 -9
  331. tests/test_telecom_spider_redis_key.py +205 -205
  332. tests/test_template_content.py +87 -87
  333. tests/test_template_redis_key.py +134 -134
  334. tests/test_tools.py +159 -159
  335. tests/test_user_agent_randomness.py +176 -176
  336. tests/test_user_agents.py +96 -96
  337. tests/untested_features_report.md +138 -138
  338. tests/verify_debug.py +51 -51
  339. tests/verify_distributed.py +117 -117
  340. tests/verify_log_fix.py +111 -111
  341. tests/verify_mysql_warnings.py +110 -0
  342. crawlo/logging/async_handler.py +0 -181
  343. crawlo/logging/monitor.py +0 -153
  344. crawlo/logging/sampler.py +0 -167
  345. crawlo/middleware/simple_proxy.py +0 -65
  346. crawlo/tools/authenticated_proxy.py +0 -241
  347. crawlo/tools/data_formatter.py +0 -226
  348. crawlo/tools/data_validator.py +0 -181
  349. crawlo/tools/encoding_converter.py +0 -127
  350. crawlo/tools/network_diagnostic.py +0 -365
  351. crawlo/tools/request_tools.py +0 -83
  352. crawlo/tools/retry_mechanism.py +0 -224
  353. crawlo/utils/env_config.py +0 -143
  354. crawlo/utils/large_scale_config.py +0 -287
  355. crawlo/utils/system.py +0 -11
  356. crawlo/utils/tools.py +0 -5
  357. crawlo-1.4.5.dist-info/METADATA +0 -329
  358. crawlo-1.4.5.dist-info/RECORD +0 -347
  359. tests/env_config_example.py +0 -134
  360. tests/ofweek_scrapy/ofweek_scrapy/spiders/ofweek_spider.py +0 -162
  361. tests/test_authenticated_proxy.py +0 -142
  362. tests/test_comprehensive.py +0 -147
  363. tests/test_dynamic_downloaders_proxy.py +0 -125
  364. tests/test_dynamic_proxy.py +0 -93
  365. tests/test_dynamic_proxy_config.py +0 -147
  366. tests/test_dynamic_proxy_real.py +0 -110
  367. tests/test_env_config.py +0 -122
  368. tests/test_framework_env_usage.py +0 -104
  369. tests/test_large_scale_config.py +0 -113
  370. tests/test_proxy_api.py +0 -265
  371. tests/test_real_scenario_proxy.py +0 -196
  372. tests/tools_example.py +0 -261
  373. {crawlo-1.4.5.dist-info → crawlo-1.4.7.dist-info}/WHEEL +0 -0
  374. {crawlo-1.4.5.dist-info → crawlo-1.4.7.dist-info}/entry_points.txt +0 -0
  375. {crawlo-1.4.5.dist-info → crawlo-1.4.7.dist-info}/top_level.txt +0 -0
@@ -1,370 +1,519 @@
1
- import asyncio
2
- import asyncio
3
- import pickle
4
- import time
5
- import traceback
6
- from typing import Optional, TYPE_CHECKING
7
-
8
- import redis.asyncio as aioredis
9
-
10
- # 使用 TYPE_CHECKING 避免运行时循环导入
11
- if TYPE_CHECKING:
12
- from crawlo import Request
13
-
14
- from crawlo.utils.error_handler import ErrorHandler
15
- from crawlo.utils.log import get_logger
16
- from crawlo.utils.redis_connection_pool import get_redis_pool, OptimizedRedisConnectionPool
17
- from crawlo.utils.request_serializer import RequestSerializer
18
-
19
- # 延迟初始化避免循环依赖
20
- _logger = None
21
- _error_handler = None
22
-
23
-
24
- def get_module_logger():
25
- global _logger
26
- if _logger is None:
27
- _logger = get_logger(__name__)
28
- return _logger
29
-
30
-
31
- def get_module_error_handler():
32
- global _error_handler
33
- if _error_handler is None:
34
- _error_handler = ErrorHandler(__name__)
35
- return _error_handler
36
-
37
-
38
- class RedisPriorityQueue:
39
- """
40
- 基于 Redis 的分布式异步优先级队列
41
- """
42
-
43
- def __init__(
44
- self,
45
- redis_url: str = None,
46
- queue_name: str = None, # 修改默认值为 None
47
- processing_queue: str = None, # 修改默认值为 None
48
- failed_queue: str = None, # 修改默认值为 None
49
- max_retries: int = 3,
50
- timeout: int = 300, # 任务处理超时时间(秒)
51
- max_connections: int = 10, # 连接池大小
52
- module_name: str = "default" # 添加 module_name 参数
53
- ):
54
- # 移除直接使用 os.getenv(),要求通过参数传递 redis_url
55
- if redis_url is None:
56
- # 如果没有提供 redis_url,则抛出异常,要求在 settings 中配置
57
- raise ValueError("redis_url must be provided. Configure it in settings instead of using os.getenv()")
58
-
59
- self.redis_url = redis_url
60
- self.module_name = module_name # 保存 module_name
61
-
62
- # 如果未提供 queue_name,则根据 module_name 自动生成
63
- if queue_name is None:
64
- self.queue_name = f"crawlo:{module_name}:queue:requests"
65
- else:
66
- # 处理多重 crawlo 前缀,规范化队列名称
67
- self.queue_name = self._normalize_queue_name(queue_name)
68
-
69
- # 如果未提供 processing_queue,则根据 queue_name 自动生成
70
- if processing_queue is None:
71
- if ":queue:requests" in self.queue_name:
72
- self.processing_queue = self.queue_name.replace(":queue:requests", ":queue:processing")
73
- else:
74
- self.processing_queue = f"{self.queue_name}:processing"
75
- else:
76
- self.processing_queue = processing_queue
77
-
78
- # 如果未提供 failed_queue,则根据 queue_name 自动生成
79
- if failed_queue is None:
80
- if ":queue:requests" in self.queue_name:
81
- self.failed_queue = self.queue_name.replace(":queue:requests", ":queue:failed")
82
- else:
83
- self.failed_queue = f"{self.queue_name}:failed"
84
- else:
85
- self.failed_queue = failed_queue
86
-
87
- self.max_retries = max_retries
88
- self.timeout = timeout
89
- self.max_connections = max_connections
90
- self._redis_pool: Optional[OptimizedRedisConnectionPool] = None
91
- self._redis: Optional[aioredis.Redis] = None
92
- self._lock = asyncio.Lock() # 用于连接初始化的锁
93
- self.request_serializer = RequestSerializer() # 处理序列化
94
-
95
- def _normalize_queue_name(self, queue_name: str) -> str:
96
- """
97
- 规范化队列名称,处理多重 crawlo 前缀
98
-
99
- :param queue_name: 原始队列名称
100
- :return: 规范化后的队列名称
101
- """
102
- # 如果队列名称已经符合规范(以 crawlo: 开头且不是 crawlo:crawlo:),则保持不变
103
- if queue_name.startswith("crawlo:") and not queue_name.startswith("crawlo:crawlo:"):
104
- return queue_name
105
-
106
- # 处理三重 crawlo 前缀,简化为标准格式
107
- if queue_name.startswith("crawlo:crawlo:crawlo:"):
108
- # 三重 crawlo 前缀,简化为标准 crawlo: 格式
109
- remaining = queue_name[21:] # 去掉 "crawlo:crawlo:crawlo:" 前缀
110
- if remaining:
111
- return f"crawlo:{remaining}"
112
- else:
113
- return "crawlo:requests" # 默认名称
114
-
115
- # 处理双重 crawlo 前缀
116
- elif queue_name.startswith("crawlo:crawlo:"):
117
- # 双重 crawlo 前缀,简化为标准 crawlo: 格式
118
- remaining = queue_name[14:] # 去掉 "crawlo:crawlo:" 前缀
119
- if remaining:
120
- return f"crawlo:{remaining}"
121
- else:
122
- return "crawlo:requests" # 默认名称
123
-
124
- # 处理无 crawlo 前缀的情况
125
- elif not queue_name.startswith("crawlo:"):
126
- # crawlo 前缀,添加 crawlo: 前缀
127
- if queue_name:
128
- return f"crawlo:{queue_name}"
129
- else:
130
- return "crawlo:requests" # 默认名称
131
-
132
- # 其他情况,保持不变
133
- else:
134
- return queue_name
135
-
136
- async def connect(self, max_retries=3, delay=1):
137
- """异步连接 Redis,支持重试"""
138
- async with self._lock:
139
- if self._redis is not None:
140
- # 如果已经连接,测试连接是否仍然有效
141
- try:
142
- await self._redis.ping()
143
- return self._redis
144
- except Exception:
145
- # 连接失效,重新连接
146
- self._redis = None
147
-
148
- for attempt in range(max_retries):
149
- try:
150
- # 使用优化的连接池,确保 decode_responses=False 以避免编码问题
151
- self._redis_pool = get_redis_pool(
152
- self.redis_url,
153
- max_connections=self.max_connections,
154
- socket_connect_timeout=5,
155
- socket_timeout=30,
156
- health_check_interval=30,
157
- retry_on_timeout=True,
158
- decode_responses=False, # 确保不自动解码响应
159
- encoding='utf-8'
160
- )
161
-
162
- self._redis = await self._redis_pool.get_connection()
163
-
164
- # 测试连接
165
- await self._redis.ping()
166
- # 只在调试模式下输出详细连接信息
167
- # get_module_logger().debug(f"Redis 连接成功 (Module: {self.module_name})") # 注释掉重复的日志
168
- return self._redis
169
- except Exception as e:
170
- error_msg = f"Redis 连接失败 (尝试 {attempt + 1}/{max_retries}, Module: {self.module_name}): {e}"
171
- get_module_logger().warning(error_msg)
172
- get_module_logger().debug(f"详细错误信息:\n{traceback.format_exc()}")
173
- if attempt < max_retries - 1:
174
- await asyncio.sleep(delay)
175
- else:
176
- raise ConnectionError(f"无法连接 Redis (Module: {self.module_name}): {e}")
177
-
178
- async def _ensure_connection(self):
179
- """确保连接有效"""
180
- if self._redis is None:
181
- await self.connect()
182
- try:
183
- await self._redis.ping()
184
- except Exception as e:
185
- get_module_logger().warning(f"Redis 连接失效 (Module: {self.module_name}),尝试重连...: {e}")
186
- self._redis = None
187
- await self.connect()
188
-
189
- async def put(self, request, priority: int = 0) -> bool:
190
- """放入请求到队列"""
191
- try:
192
- await self._ensure_connection()
193
- # 修复优先级行为一致性问题
194
- # 原来: score = -priority (导致priority大的先出队)
195
- # 现在: score = priority (确保priority小的先出队,与内存队列一致)
196
- score = priority
197
- key = self._get_request_key(request)
198
-
199
- # 🔥 使用专用的序列化工具清理 Request
200
- clean_request = self.request_serializer.prepare_for_serialization(request)
201
-
202
- # 确保序列化后的数据可以被正确反序列化
203
- try:
204
- serialized = pickle.dumps(clean_request)
205
- # 验证序列化数据可以被反序列化
206
- pickle.loads(serialized)
207
- except Exception as serialize_error:
208
- get_module_logger().error(f"请求序列化验证失败 (Module: {self.module_name}): {serialize_error}")
209
- return False
210
-
211
- pipe = self._redis.pipeline()
212
- pipe.zadd(self.queue_name, {key: score})
213
- pipe.hset(f"{self.queue_name}:data", key, serialized)
214
- result = await pipe.execute()
215
-
216
- if result[0] > 0:
217
- get_module_logger().debug(f"成功入队 (Module: {self.module_name}): {request.url}") # 注释掉重复的日志
218
- return result[0] > 0
219
- except Exception as e:
220
- get_module_error_handler().handle_error(
221
- e,
222
- context=f"放入队列失败 (Module: {self.module_name})",
223
- raise_error=False
224
- )
225
- return False
226
-
227
- async def get(self, timeout: float = 5.0):
228
- """
229
- 获取请求(带超时)
230
- :param timeout: 最大等待时间(秒),避免无限轮询
231
- """
232
- try:
233
- await self._ensure_connection()
234
- start_time = asyncio.get_event_loop().time()
235
-
236
- while True:
237
- # 尝试获取任务
238
- result = await self._redis.zpopmin(self.queue_name, count=1)
239
- if result:
240
- key, score = result[0]
241
- serialized = await self._redis.hget(f"{self.queue_name}:data", key)
242
- if not serialized:
243
- continue
244
-
245
- # 移动到 processing
246
- processing_key = f"{key}:{int(time.time())}"
247
- pipe = self._redis.pipeline()
248
- pipe.zadd(self.processing_queue, {processing_key: time.time() + self.timeout})
249
- pipe.hset(f"{self.processing_queue}:data", processing_key, serialized)
250
- pipe.hdel(f"{self.queue_name}:data", key)
251
- await pipe.execute()
252
-
253
- # 更安全的反序列化方式
254
- try:
255
- # 首先尝试标准的 pickle 反序列化
256
- request = pickle.loads(serialized)
257
- return request
258
- except UnicodeDecodeError:
259
- # 如果出现编码错误,尝试使用 latin1 解码
260
- request = pickle.loads(serialized, encoding='latin1')
261
- return request
262
- except Exception as pickle_error:
263
- # 如果pickle反序列化失败,记录错误并跳过这个任务
264
- get_module_logger().error(f"无法反序列化请求数据 (Module: {self.module_name}): {pickle_error}")
265
- # 从processing队列中移除这个无效的任务
266
- await self._redis.zrem(self.processing_queue, processing_key)
267
- await self._redis.hdel(f"{self.processing_queue}:data", processing_key)
268
- # 继续尝试下一个任务
269
- continue
270
-
271
- # 检查是否超时
272
- if asyncio.get_event_loop().time() - start_time > timeout:
273
- return None
274
-
275
- # 短暂等待,避免空轮询,但减少等待时间以提高响应速度
276
- await asyncio.sleep(0.001) # 从0.01减少到0.001
277
-
278
- except Exception as e:
279
- get_module_error_handler().handle_error(
280
- e,
281
- context=f"获取队列任务失败 (Module: {self.module_name})",
282
- raise_error=False
283
- )
284
- return None
285
-
286
- async def ack(self, request: "Request"):
287
- """确认任务完成"""
288
- try:
289
- await self._ensure_connection()
290
- key = self._get_request_key(request)
291
- cursor = 0
292
- while True:
293
- cursor, keys = await self._redis.zscan(self.processing_queue, cursor, match=f"{key}:*")
294
- if keys:
295
- pipe = self._redis.pipeline()
296
- for k in keys:
297
- pipe.zrem(self.processing_queue, k)
298
- pipe.hdel(f"{self.processing_queue}:data", k)
299
- await pipe.execute()
300
- if cursor == 0:
301
- break
302
- except Exception as e:
303
- get_module_error_handler().handle_error(
304
- e,
305
- context=f"确认任务完成失败 (Module: {self.module_name})",
306
- raise_error=False
307
- )
308
-
309
- async def fail(self, request: "Request", reason: str = ""):
310
- """标记任务失败"""
311
- try:
312
- await self._ensure_connection()
313
- key = self._get_request_key(request)
314
- await self.ack(request)
315
-
316
- retry_key = f"{self.failed_queue}:retries:{key}"
317
- retries = await self._redis.incr(retry_key)
318
- await self._redis.expire(retry_key, 86400)
319
-
320
- if retries <= self.max_retries:
321
- await self.put(request, priority=request.priority + 1)
322
- get_module_logger().info(
323
- f"任务重试 [{retries}/{self.max_retries}] (Module: {self.module_name}): {request.url}")
324
- else:
325
- failed_data = {
326
- "url": request.url,
327
- "reason": reason,
328
- "retries": retries,
329
- "failed_at": time.time(),
330
- "request_pickle": pickle.dumps(request).hex(), # 可选:保存完整请求
331
- }
332
- await self._redis.lpush(self.failed_queue, pickle.dumps(failed_data))
333
- get_module_logger().error(f"任务彻底失败 [{retries}次] (Module: {self.module_name}): {request.url}")
334
- except Exception as e:
335
- get_module_error_handler().handle_error(
336
- e,
337
- context=f"标记任务失败失败 (Module: {self.module_name})",
338
- raise_error=False
339
- )
340
-
341
- def _get_request_key(self, request) -> str:
342
- """生成请求唯一键"""
343
- return f"{self.module_name}:url:{hash(request.url) & 0x7FFFFFFF}" # 确保正数
344
-
345
- async def qsize(self) -> int:
346
- """Get queue size"""
347
- try:
348
- await self._ensure_connection()
349
- return await self._redis.zcard(self.queue_name)
350
- except Exception as e:
351
- get_module_error_handler().handle_error(
352
- e,
353
- context=f"Failed to get queue size (Module: {self.module_name})",
354
- raise_error=False
355
- )
356
- return 0
357
-
358
- async def close(self):
359
- """关闭连接"""
360
- try:
361
- # 连接池会自动管理连接,这里不需要显式关闭单个连接
362
- self._redis = None
363
- self._redis_pool = None
364
- get_module_logger().debug(f"Redis 连接已释放 (Module: {self.module_name})")
365
- except Exception as e:
366
- get_module_error_handler().handle_error(
367
- e,
368
- context=f"释放 Redis 连接失败 (Module: {self.module_name})",
369
- raise_error=False
370
- )
1
+ import asyncio
2
+ import pickle
3
+ import time
4
+ import traceback
5
+ from typing import Optional, TYPE_CHECKING, List, Union, Any
6
+
7
+ import redis.asyncio as aioredis
8
+
9
+ # 尝试导入Redis集群支持
10
+ try:
11
+ from redis.asyncio.cluster import RedisCluster
12
+ REDIS_CLUSTER_AVAILABLE = True
13
+ except ImportError:
14
+ RedisCluster = None
15
+ REDIS_CLUSTER_AVAILABLE = False
16
+
17
+ # 使用 TYPE_CHECKING 避免运行时循环导入
18
+ if TYPE_CHECKING:
19
+ from crawlo import Request
20
+
21
+ from crawlo.utils.error_handler import ErrorHandler, ErrorContext
22
+ from crawlo.logging import get_logger
23
+ from crawlo.utils.redis_connection_pool import get_redis_pool, RedisConnectionPool
24
+ from crawlo.utils.request_serializer import RequestSerializer
25
+
26
+ # 延迟初始化避免循环依赖
27
+ _logger = None
28
+ _error_handler = None
29
+
30
+
31
+ def get_module_logger():
32
+ global _logger
33
+ if _logger is None:
34
+ _logger = get_logger(__name__)
35
+ return _logger
36
+
37
+
38
+ def get_module_error_handler():
39
+ global _error_handler
40
+ if _error_handler is None:
41
+ _error_handler = ErrorHandler(__name__)
42
+ return _error_handler
43
+
44
+
45
+ class RedisPriorityQueue:
46
+ """
47
+ 基于 Redis 的分布式异步优先级队列
48
+ """
49
+
50
+ def __init__(
51
+ self,
52
+ redis_url: Optional[str] = None,
53
+ queue_name: Optional[str] = None, # 修改默认值为 None
54
+ processing_queue: Optional[str] = None, # 修改默认值为 None
55
+ failed_queue: Optional[str] = None, # 修改默认值为 None
56
+ max_retries: int = 3,
57
+ timeout: int = 300, # 任务处理超时时间(秒)
58
+ max_connections: int = 10, # 连接池大小
59
+ module_name: str = "default", # 添加 module_name 参数
60
+ is_cluster: bool = False, # 是否为集群模式
61
+ cluster_nodes: Optional[List[str]] = None # 集群节点列表
62
+ ):
63
+ # 移除直接使用 os.getenv(),要求通过参数传递 redis_url
64
+ if redis_url is None:
65
+ # 如果没有提供 redis_url,则抛出异常,要求在 settings 中配置
66
+ raise ValueError("redis_url must be provided. Configure it in settings instead of using os.getenv()")
67
+
68
+ self.redis_url = redis_url
69
+ self.module_name = module_name # 保存 module_name
70
+ self.is_cluster = is_cluster
71
+ self.cluster_nodes = cluster_nodes
72
+
73
+ # 如果未提供 queue_name,则根据 module_name 自动生成
74
+ if queue_name is None:
75
+ self.queue_name = f"crawlo:{module_name}:queue:requests"
76
+ else:
77
+ # 处理多重 crawlo 前缀,规范化队列名称
78
+ self.queue_name = self._normalize_queue_name(queue_name)
79
+
80
+ # 如果未提供 processing_queue,则根据 queue_name 自动生成
81
+ if processing_queue is None:
82
+ if ":queue:requests" in self.queue_name:
83
+ self.processing_queue = self.queue_name.replace(":queue:requests", ":queue:processing")
84
+ else:
85
+ self.processing_queue = f"{self.queue_name}:processing"
86
+ else:
87
+ self.processing_queue = processing_queue
88
+
89
+ # 如果未提供 failed_queue,则根据 queue_name 自动生成
90
+ if failed_queue is None:
91
+ if ":queue:requests" in self.queue_name:
92
+ self.failed_queue = self.queue_name.replace(":queue:requests", ":queue:failed")
93
+ else:
94
+ self.failed_queue = f"{self.queue_name}:failed"
95
+ else:
96
+ self.failed_queue = failed_queue
97
+
98
+ self.max_retries = max_retries
99
+ self.timeout = timeout
100
+ self.max_connections = max_connections
101
+ self._redis_pool: Optional[RedisConnectionPool] = None
102
+ self._redis: Optional[Any] = None
103
+ self._lock = asyncio.Lock() # 用于连接初始化的锁
104
+ self.request_serializer = RequestSerializer() # 处理序列化
105
+
106
+ def _normalize_queue_name(self, queue_name: str) -> str:
107
+ """
108
+ 规范化队列名称,处理多重 crawlo 前缀
109
+
110
+ :param queue_name: 原始队列名称
111
+ :return: 规范化后的队列名称
112
+ """
113
+ # 如果队列名称已经符合规范(以 crawlo: 开头且不是 crawlo:crawlo:),则保持不变
114
+ if queue_name.startswith("crawlo:") and not queue_name.startswith("crawlo:crawlo:"):
115
+ return queue_name
116
+
117
+ # 处理三重 crawlo 前缀,简化为标准格式
118
+ if queue_name.startswith("crawlo:crawlo:crawlo:"):
119
+ # 三重 crawlo 前缀,简化为标准 crawlo: 格式
120
+ remaining = queue_name[21:] # 去掉 "crawlo:crawlo:crawlo:" 前缀
121
+ if remaining:
122
+ return f"crawlo:{remaining}"
123
+ else:
124
+ return "crawlo:requests" # 默认名称
125
+
126
+ # 处理双重 crawlo 前缀
127
+ elif queue_name.startswith("crawlo:crawlo:"):
128
+ # 双重 crawlo 前缀,简化为标准 crawlo: 格式
129
+ remaining = queue_name[14:] # 去掉 "crawlo:crawlo:" 前缀
130
+ if remaining:
131
+ return f"crawlo:{remaining}"
132
+ else:
133
+ return "crawlo:requests" # 默认名称
134
+
135
+ # 处理无 crawlo 前缀的情况
136
+ elif not queue_name.startswith("crawlo:"):
137
+ # 无 crawlo 前缀,添加 crawlo: 前缀
138
+ if queue_name:
139
+ return f"crawlo:{queue_name}"
140
+ else:
141
+ return "crawlo:requests" # 默认名称
142
+
143
+ # 其他情况,保持不变
144
+ else:
145
+ return queue_name
146
+
147
+ async def connect(self, max_retries=3, delay=1):
148
+ """异步连接 Redis,支持重试"""
149
+ async with self._lock:
150
+ if self._redis is not None:
151
+ # 如果已经连接,测试连接是否仍然有效
152
+ try:
153
+ await self._redis.ping()
154
+ return self._redis
155
+ except Exception:
156
+ # 连接失效,重新连接
157
+ self._redis = None
158
+
159
+ for attempt in range(max_retries):
160
+ try:
161
+ # 使用优化的连接池,确保 decode_responses=False 以避免编码问题
162
+ self._redis_pool = get_redis_pool(
163
+ self.redis_url,
164
+ is_cluster=self.is_cluster,
165
+ cluster_nodes=self.cluster_nodes,
166
+ max_connections=self.max_connections,
167
+ socket_connect_timeout=5,
168
+ socket_timeout=30,
169
+ health_check_interval=30,
170
+ retry_on_timeout=True,
171
+ decode_responses=False, # 确保不自动解码响应
172
+ encoding='utf-8'
173
+ )
174
+
175
+ self._redis = await self._redis_pool.get_connection()
176
+
177
+ # 测试连接
178
+ if self._redis:
179
+ await self._redis.ping()
180
+ return self._redis
181
+ except Exception as e:
182
+ error_msg = f"Redis 连接失败 (尝试 {attempt + 1}/{max_retries}, Module: {self.module_name}): {e}"
183
+ get_module_logger().warning(error_msg)
184
+ get_module_logger().debug(f"详细错误信息:\n{traceback.format_exc()}")
185
+ if attempt < max_retries - 1:
186
+ await asyncio.sleep(delay)
187
+ else:
188
+ raise ConnectionError(f"无法连接 Redis (Module: {self.module_name}): {e}")
189
+
190
+ async def _ensure_connection(self):
191
+ """确保连接有效"""
192
+ if self._redis is None:
193
+ await self.connect()
194
+ try:
195
+ if self._redis:
196
+ await self._redis.ping()
197
+ except Exception as e:
198
+ get_module_logger().warning(f"Redis 连接失效 (Module: {self.module_name}),尝试重连...: {e}")
199
+ self._redis = None
200
+ await self.connect()
201
+
202
+ def _is_cluster_mode(self) -> bool:
203
+ """检查是否为集群模式"""
204
+ if REDIS_CLUSTER_AVAILABLE and RedisCluster is not None:
205
+ # 检查 _redis 是否为 RedisCluster 实例
206
+ if self._redis is not None and isinstance(self._redis, RedisCluster):
207
+ return True
208
+ return False
209
+
210
+ async def put(self, request, priority: int = 0) -> bool:
211
+ """放入请求到队列"""
212
+ try:
213
+ await self._ensure_connection()
214
+ if not self._redis:
215
+ return False
216
+
217
+ # 修复优先级行为一致性问题
218
+ # 原来: score = -priority (导致priority大的先出队)
219
+ # 现在: score = priority (确保priority小的先出队,与内存队列一致)
220
+ score = priority
221
+ key = self._get_request_key(request)
222
+
223
+ # 🔥 使用专用的序列化工具清理 Request
224
+ clean_request = self.request_serializer.prepare_for_serialization(request)
225
+
226
+ # 确保序列化后的数据可以被正确反序列化
227
+ try:
228
+ serialized = pickle.dumps(clean_request)
229
+ # 验证序列化数据可以被反序列化
230
+ pickle.loads(serialized)
231
+ except Exception as serialize_error:
232
+ get_module_logger().error(f"请求序列化验证失败 (Module: {self.module_name}): {serialize_error}")
233
+ return False
234
+
235
+ # 处理集群模式下的操作
236
+ if self._is_cluster_mode():
237
+ # 在集群模式下,确保所有键都在同一个slot中
238
+ # 可以通过在键名中添加相同的哈希标签来实现
239
+ hash_tag = "{queue}" # 使用哈希标签确保键在同一个slot
240
+ queue_name_with_tag = f"{self.queue_name}{hash_tag}"
241
+ data_key_with_tag = f"{self.queue_name}:data{hash_tag}"
242
+
243
+ pipe = self._redis.pipeline()
244
+ pipe.zadd(queue_name_with_tag, {key: score})
245
+ pipe.hset(data_key_with_tag, key, serialized)
246
+ result = await pipe.execute()
247
+ else:
248
+ pipe = self._redis.pipeline()
249
+ pipe.zadd(self.queue_name, {key: score})
250
+ pipe.hset(f"{self.queue_name}:data", key, serialized)
251
+ result = await pipe.execute()
252
+
253
+ if result[0] > 0:
254
+ get_module_logger().debug(f"成功入队 (Module: {self.module_name}): {request.url}")
255
+ return result[0] > 0
256
+ except Exception as e:
257
+ error_context = ErrorContext(
258
+ context=f"放入队列失败 (Module: {self.module_name})"
259
+ )
260
+ get_module_error_handler().handle_error(
261
+ e,
262
+ context=error_context,
263
+ raise_error=False
264
+ )
265
+ return False
266
+
267
+ async def get(self, timeout: float = 5.0):
268
+ """
269
+ 获取请求(带超时)
270
+ :param timeout: 最大等待时间(秒),避免无限轮询
271
+ """
272
+ try:
273
+ await self._ensure_connection()
274
+ if not self._redis:
275
+ return None
276
+
277
+ start_time = asyncio.get_event_loop().time()
278
+
279
+ while True:
280
+ # 尝试获取任务
281
+ if self._is_cluster_mode():
282
+ # 集群模式处理
283
+ hash_tag = "{queue}"
284
+ queue_name_with_tag = f"{self.queue_name}{hash_tag}"
285
+ result = await self._redis.zpopmin(queue_name_with_tag, count=1)
286
+ else:
287
+ result = await self._redis.zpopmin(self.queue_name, count=1)
288
+
289
+ if result:
290
+ key, score = result[0]
291
+ data_key = f"{self.queue_name}:data"
292
+ if self._is_cluster_mode():
293
+ hash_tag = "{queue}"
294
+ data_key = f"{self.queue_name}:data{hash_tag}"
295
+
296
+ serialized = await self._redis.hget(data_key, key)
297
+ if not serialized:
298
+ continue
299
+
300
+ # 移动到 processing
301
+ processing_key = f"{key}:{int(time.time())}"
302
+ processing_queue = self.processing_queue
303
+ processing_data_key = f"{self.processing_queue}:data"
304
+
305
+ if self._is_cluster_mode():
306
+ hash_tag = "{queue}"
307
+ processing_queue = f"{self.processing_queue}{hash_tag}"
308
+ processing_data_key = f"{self.processing_queue}:data{hash_tag}"
309
+
310
+ if self._is_cluster_mode():
311
+ pipe = self._redis.pipeline()
312
+ pipe.zadd(processing_queue, {processing_key: time.time() + self.timeout})
313
+ pipe.hset(processing_data_key, processing_key, serialized)
314
+ pipe.hdel(data_key, key)
315
+ await pipe.execute()
316
+ else:
317
+ pipe = self._redis.pipeline()
318
+ pipe.zadd(processing_queue, {processing_key: time.time() + self.timeout})
319
+ pipe.hset(processing_data_key, processing_key, serialized)
320
+ pipe.hdel(data_key, key)
321
+ await pipe.execute()
322
+
323
+ # 更安全的反序列化方式
324
+ try:
325
+ # 首先尝试标准的 pickle 反序列化
326
+ request = pickle.loads(serialized)
327
+ return request
328
+ except UnicodeDecodeError:
329
+ # 如果出现编码错误,尝试使用 latin1 解码
330
+ request = pickle.loads(serialized, encoding='latin1')
331
+ return request
332
+ except Exception as pickle_error:
333
+ # 如果pickle反序列化失败,记录错误并跳过这个任务
334
+ get_module_logger().error(f"无法反序列化请求数据 (Module: {self.module_name}): {pickle_error}")
335
+ # 从processing队列中移除这个无效的任务
336
+ if self._is_cluster_mode():
337
+ await self._redis.zrem(processing_queue, processing_key)
338
+ await self._redis.hdel(processing_data_key, processing_key)
339
+ else:
340
+ await self._redis.zrem(processing_queue, processing_key)
341
+ await self._redis.hdel(processing_data_key, processing_key)
342
+ # 继续尝试下一个任务
343
+ continue
344
+
345
+ # 检查是否超时
346
+ if asyncio.get_event_loop().time() - start_time > timeout:
347
+ return None
348
+
349
+ # 短暂等待,避免空轮询,但减少等待时间以提高响应速度
350
+ await asyncio.sleep(0.001) # 从0.01减少到0.001
351
+
352
+ except Exception as e:
353
+ error_context = ErrorContext(
354
+ context=f"获取队列任务失败 (Module: {self.module_name})"
355
+ )
356
+ get_module_error_handler().handle_error(
357
+ e,
358
+ context=error_context,
359
+ raise_error=False
360
+ )
361
+ return None
362
+
363
+ async def ack(self, request: "Request"):
364
+ """确认任务完成"""
365
+ try:
366
+ await self._ensure_connection()
367
+ if not self._redis:
368
+ return
369
+
370
+ key = self._get_request_key(request)
371
+ processing_queue = self.processing_queue
372
+ processing_data_key = f"{self.processing_queue}:data"
373
+
374
+ if self._is_cluster_mode():
375
+ hash_tag = "{queue}"
376
+ processing_queue = f"{self.processing_queue}{hash_tag}"
377
+ processing_data_key = f"{self.processing_queue}:data{hash_tag}"
378
+
379
+ cursor = 0
380
+ while True:
381
+ if self._is_cluster_mode():
382
+ cursor, keys = await self._redis.zscan(processing_queue, cursor, match=f"{key}:*")
383
+ else:
384
+ cursor, keys = await self._redis.zscan(processing_queue, cursor, match=f"{key}:*")
385
+ if keys:
386
+ if self._is_cluster_mode():
387
+ pipe = self._redis.pipeline()
388
+ for k in keys:
389
+ pipe.zrem(processing_queue, k)
390
+ pipe.hdel(processing_data_key, k)
391
+ await pipe.execute()
392
+ else:
393
+ pipe = self._redis.pipeline()
394
+ for k in keys:
395
+ pipe.zrem(processing_queue, k)
396
+ pipe.hdel(processing_data_key, k)
397
+ await pipe.execute()
398
+ if cursor == 0:
399
+ break
400
+ except Exception as e:
401
+ error_context = ErrorContext(
402
+ context=f"确认任务完成失败 (Module: {self.module_name})"
403
+ )
404
+ get_module_error_handler().handle_error(
405
+ e,
406
+ context=error_context,
407
+ raise_error=False
408
+ )
409
+
410
+ async def fail(self, request: "Request", reason: str = ""):
411
+ """标记任务失败"""
412
+ try:
413
+ await self._ensure_connection()
414
+ if not self._redis:
415
+ return
416
+
417
+ key = self._get_request_key(request)
418
+ await self.ack(request)
419
+
420
+ retry_key = f"{self.failed_queue}:retries:{key}"
421
+ failed_queue = self.failed_queue
422
+
423
+ if self._is_cluster_mode():
424
+ hash_tag = "{queue}"
425
+ retry_key = f"{self.failed_queue}:retries:{key}{hash_tag}"
426
+ failed_queue = f"{self.failed_queue}{hash_tag}"
427
+
428
+ retries = await self._redis.incr(retry_key)
429
+ await self._redis.expire(retry_key, 86400)
430
+
431
+ if retries <= self.max_retries:
432
+ await self.put(request, priority=request.priority + 1)
433
+ get_module_logger().info(
434
+ f"任务重试 [{retries}/{self.max_retries}] (Module: {self.module_name}): {request.url}")
435
+ else:
436
+ failed_data = {
437
+ "url": request.url,
438
+ "reason": reason,
439
+ "retries": retries,
440
+ "failed_at": time.time(),
441
+ "request_pickle": pickle.dumps(request).hex(), # 可选:保存完整请求
442
+ }
443
+ await self._redis.lpush(failed_queue, pickle.dumps(failed_data))
444
+ get_module_logger().error(f"任务彻底失败 [{retries}次] (Module: {self.module_name}): {request.url}")
445
+ except Exception as e:
446
+ error_context = ErrorContext(
447
+ context=f"标记任务失败失败 (Module: {self.module_name})"
448
+ )
449
+ get_module_error_handler().handle_error(
450
+ e,
451
+ context=error_context,
452
+ raise_error=False
453
+ )
454
+
455
+ def _get_request_key(self, request) -> str:
456
+ """生成请求唯一键"""
457
+ return f"{self.module_name}:url:{hash(request.url) & 0x7FFFFFFF}" # 确保正数
458
+
459
+ async def qsize(self) -> int:
460
+ """Get queue size"""
461
+ try:
462
+ await self._ensure_connection()
463
+ if not self._redis:
464
+ return 0
465
+
466
+ if self._is_cluster_mode():
467
+ hash_tag = "{queue}"
468
+ queue_name_with_tag = f"{self.queue_name}{hash_tag}"
469
+ return await self._redis.zcard(queue_name_with_tag)
470
+ else:
471
+ return await self._redis.zcard(self.queue_name)
472
+ except Exception as e:
473
+ error_context = ErrorContext(
474
+ context=f"Failed to get queue size (Module: {self.module_name})"
475
+ )
476
+ get_module_error_handler().handle_error(
477
+ e,
478
+ context=error_context,
479
+ raise_error=False
480
+ )
481
+ return 0
482
+
483
+ async def close(self):
484
+ """关闭连接"""
485
+ try:
486
+ # 显式关闭Redis连接
487
+ if self._redis is not None:
488
+ try:
489
+ # 尝试关闭连接
490
+ if hasattr(self._redis, 'close'):
491
+ close_result = self._redis.close()
492
+ if asyncio.iscoroutine(close_result):
493
+ await close_result
494
+
495
+ # 等待连接关闭完成
496
+ if hasattr(self._redis, 'wait_closed'):
497
+ wait_result = self._redis.wait_closed()
498
+ if asyncio.iscoroutine(wait_result):
499
+ await wait_result
500
+ except Exception as close_error:
501
+ get_module_logger().warning(
502
+ f"Error closing Redis connection (Module: {self.module_name}): {close_error}"
503
+ )
504
+ finally:
505
+ self._redis = None
506
+
507
+ # 释放连接池引用(连接池由全局管理器管理)
508
+ self._redis_pool = None
509
+
510
+ get_module_logger().debug(f"Redis 连接已释放 (Module: {self.module_name})")
511
+ except Exception as e:
512
+ error_context = ErrorContext(
513
+ context=f"释放 Redis 连接失败 (Module: {self.module_name})"
514
+ )
515
+ get_module_error_handler().handle_error(
516
+ e,
517
+ context=error_context,
518
+ raise_error=False
519
+ )