crawlo 1.1.3__py3-none-any.whl → 1.1.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (115) hide show
  1. crawlo/__init__.py +28 -1
  2. crawlo/__version__.py +1 -1
  3. crawlo/cleaners/__init__.py +61 -0
  4. crawlo/cleaners/data_formatter.py +226 -0
  5. crawlo/cleaners/encoding_converter.py +126 -0
  6. crawlo/cleaners/text_cleaner.py +233 -0
  7. crawlo/commands/startproject.py +117 -13
  8. crawlo/config.py +30 -0
  9. crawlo/config_validator.py +253 -0
  10. crawlo/core/engine.py +185 -11
  11. crawlo/core/scheduler.py +49 -78
  12. crawlo/crawler.py +6 -6
  13. crawlo/downloader/__init__.py +24 -0
  14. crawlo/downloader/aiohttp_downloader.py +8 -0
  15. crawlo/downloader/cffi_downloader.py +5 -0
  16. crawlo/downloader/hybrid_downloader.py +214 -0
  17. crawlo/downloader/playwright_downloader.py +403 -0
  18. crawlo/downloader/selenium_downloader.py +473 -0
  19. crawlo/extension/__init__.py +17 -10
  20. crawlo/extension/health_check.py +142 -0
  21. crawlo/extension/log_interval.py +27 -18
  22. crawlo/extension/log_stats.py +62 -24
  23. crawlo/extension/logging_extension.py +18 -9
  24. crawlo/extension/memory_monitor.py +105 -0
  25. crawlo/extension/performance_profiler.py +134 -0
  26. crawlo/extension/request_recorder.py +108 -0
  27. crawlo/filters/aioredis_filter.py +50 -12
  28. crawlo/middleware/proxy.py +26 -2
  29. crawlo/mode_manager.py +24 -19
  30. crawlo/network/request.py +30 -3
  31. crawlo/network/response.py +114 -25
  32. crawlo/pipelines/mongo_pipeline.py +81 -66
  33. crawlo/pipelines/mysql_pipeline.py +165 -43
  34. crawlo/pipelines/redis_dedup_pipeline.py +7 -3
  35. crawlo/queue/queue_manager.py +15 -2
  36. crawlo/queue/redis_priority_queue.py +144 -76
  37. crawlo/settings/default_settings.py +93 -121
  38. crawlo/subscriber.py +62 -37
  39. crawlo/templates/project/items.py.tmpl +1 -1
  40. crawlo/templates/project/middlewares.py.tmpl +73 -49
  41. crawlo/templates/project/pipelines.py.tmpl +51 -295
  42. crawlo/templates/project/settings.py.tmpl +93 -17
  43. crawlo/templates/project/settings_distributed.py.tmpl +120 -0
  44. crawlo/templates/project/settings_gentle.py.tmpl +95 -0
  45. crawlo/templates/project/settings_high_performance.py.tmpl +152 -0
  46. crawlo/templates/project/settings_simple.py.tmpl +69 -0
  47. crawlo/templates/spider/spider.py.tmpl +2 -38
  48. crawlo/tools/__init__.py +183 -0
  49. crawlo/tools/anti_crawler.py +269 -0
  50. crawlo/tools/authenticated_proxy.py +241 -0
  51. crawlo/tools/data_validator.py +181 -0
  52. crawlo/tools/date_tools.py +36 -0
  53. crawlo/tools/distributed_coordinator.py +387 -0
  54. crawlo/tools/retry_mechanism.py +221 -0
  55. crawlo/tools/scenario_adapter.py +263 -0
  56. crawlo/utils/__init__.py +29 -1
  57. crawlo/utils/batch_processor.py +261 -0
  58. crawlo/utils/date_tools.py +58 -1
  59. crawlo/utils/enhanced_error_handler.py +360 -0
  60. crawlo/utils/env_config.py +106 -0
  61. crawlo/utils/error_handler.py +126 -0
  62. crawlo/utils/performance_monitor.py +285 -0
  63. crawlo/utils/redis_connection_pool.py +335 -0
  64. crawlo/utils/redis_key_validator.py +200 -0
  65. crawlo-1.1.5.dist-info/METADATA +401 -0
  66. crawlo-1.1.5.dist-info/RECORD +185 -0
  67. tests/advanced_tools_example.py +276 -0
  68. tests/authenticated_proxy_example.py +237 -0
  69. tests/cleaners_example.py +161 -0
  70. tests/config_validation_demo.py +103 -0
  71. tests/date_tools_example.py +181 -0
  72. tests/dynamic_loading_example.py +524 -0
  73. tests/dynamic_loading_test.py +105 -0
  74. tests/env_config_example.py +134 -0
  75. tests/error_handling_example.py +172 -0
  76. tests/redis_key_validation_demo.py +131 -0
  77. tests/response_improvements_example.py +145 -0
  78. tests/test_advanced_tools.py +149 -0
  79. tests/test_all_redis_key_configs.py +146 -0
  80. tests/test_authenticated_proxy.py +142 -0
  81. tests/test_cleaners.py +55 -0
  82. tests/test_comprehensive.py +147 -0
  83. tests/test_config_validator.py +194 -0
  84. tests/test_date_tools.py +124 -0
  85. tests/test_dynamic_downloaders_proxy.py +125 -0
  86. tests/test_dynamic_proxy.py +93 -0
  87. tests/test_dynamic_proxy_config.py +147 -0
  88. tests/test_dynamic_proxy_real.py +110 -0
  89. tests/test_edge_cases.py +304 -0
  90. tests/test_enhanced_error_handler.py +271 -0
  91. tests/test_env_config.py +122 -0
  92. tests/test_error_handler_compatibility.py +113 -0
  93. tests/test_framework_env_usage.py +104 -0
  94. tests/test_integration.py +357 -0
  95. tests/test_item_dedup_redis_key.py +123 -0
  96. tests/test_parsel.py +30 -0
  97. tests/test_performance.py +328 -0
  98. tests/test_queue_manager_redis_key.py +177 -0
  99. tests/test_redis_connection_pool.py +295 -0
  100. tests/test_redis_key_naming.py +182 -0
  101. tests/test_redis_key_validator.py +124 -0
  102. tests/test_response_improvements.py +153 -0
  103. tests/test_simple_response.py +62 -0
  104. tests/test_telecom_spider_redis_key.py +206 -0
  105. tests/test_template_content.py +88 -0
  106. tests/test_template_redis_key.py +135 -0
  107. tests/test_tools.py +154 -0
  108. tests/tools_example.py +258 -0
  109. crawlo/core/enhanced_engine.py +0 -190
  110. crawlo-1.1.3.dist-info/METADATA +0 -635
  111. crawlo-1.1.3.dist-info/RECORD +0 -113
  112. {crawlo-1.1.3.dist-info → crawlo-1.1.5.dist-info}/WHEEL +0 -0
  113. {crawlo-1.1.3.dist-info → crawlo-1.1.5.dist-info}/entry_points.txt +0 -0
  114. {crawlo-1.1.3.dist-info → crawlo-1.1.5.dist-info}/top_level.txt +0 -0
  115. {examples → tests}/controlled_spider_example.py +0 -0
@@ -1,11 +1,11 @@
1
1
  # -*- coding: utf-8 -*-
2
2
  import asyncio
3
3
  import aiomysql
4
- from typing import Optional
4
+ from typing import Optional, List, Dict
5
5
  from asyncmy import create_pool
6
6
  from crawlo.utils.log import get_logger
7
7
  from crawlo.exceptions import ItemDiscard
8
- from crawlo.utils.db_helper import make_insert_sql, logger
8
+ from crawlo.utils.db_helper import make_insert_sql, make_batch_sql, logger
9
9
 
10
10
 
11
11
  class AsyncmyMySQLPipeline:
@@ -24,6 +24,11 @@ class AsyncmyMySQLPipeline:
24
24
  f"{crawler.spider.name}_items"
25
25
  )
26
26
 
27
+ # 批量插入配置
28
+ self.batch_size = self.settings.get_int('MYSQL_BATCH_SIZE', 100)
29
+ self.use_batch = self.settings.get_bool('MYSQL_USE_BATCH', False)
30
+ self.batch_buffer: List[Dict] = [] # 批量缓冲区
31
+
27
32
  # 注册关闭事件
28
33
  crawler.subscriber.subscribe(self.spider_closed, event='spider_closed')
29
34
 
@@ -59,30 +64,45 @@ class AsyncmyMySQLPipeline:
59
64
  """处理item的核心方法"""
60
65
  kwargs = kwargs or {}
61
66
  spider_name = getattr(spider, 'name', 'unknown') # 获取爬虫名称
62
- try:
63
- await self._ensure_pool()
64
- item_dict = dict(item)
65
- sql = make_insert_sql(table=self.table_name, data=item_dict, **kwargs)
66
-
67
- rowcount = await self._execute_sql(sql=sql)
68
- if rowcount > 1:
69
- self.logger.info(
70
- f"爬虫 {spider_name} 成功插入 {rowcount} 条记录到表 {self.table_name}"
71
- )
72
- elif rowcount == 1:
73
- self.logger.debug(
74
- f"爬虫 {spider_name} 成功插入单条记录到表 {self.table_name}"
75
- )
76
- else:
77
- self.logger.warning(
78
- f"爬虫 {spider_name}: SQL执行成功但未插入新记录 - {sql[:100]}..."
79
- )
80
-
67
+
68
+ # 如果启用批量插入,将item添加到缓冲区
69
+ if self.use_batch:
70
+ self.batch_buffer.append(dict(item))
71
+
72
+ # 如果缓冲区达到批量大小,执行批量插入
73
+ if len(self.batch_buffer) >= self.batch_size:
74
+ await self._flush_batch(spider_name)
75
+
81
76
  return item
77
+ else:
78
+ # 单条插入逻辑
79
+ try:
80
+ await self._ensure_pool()
81
+ item_dict = dict(item)
82
+ sql = make_insert_sql(table=self.table_name, data=item_dict, **kwargs)
82
83
 
83
- except Exception as e:
84
- self.logger.error(f"处理item时发生错误: {e}")
85
- raise ItemDiscard(f"处理失败: {e}")
84
+ rowcount = await self._execute_sql(sql=sql)
85
+ if rowcount > 1:
86
+ self.logger.info(
87
+ f"爬虫 {spider_name} 成功插入 {rowcount} 条记录到表 {self.table_name}"
88
+ )
89
+ elif rowcount == 1:
90
+ self.logger.debug(
91
+ f"爬虫 {spider_name} 成功插入单条记录到表 {self.table_name}"
92
+ )
93
+ else:
94
+ self.logger.warning(
95
+ f"爬虫 {spider_name}: SQL执行成功但未插入新记录 - {sql[:100]}..."
96
+ )
97
+
98
+ # 统计计数移到这里,与AiomysqlMySQLPipeline保持一致
99
+ self.crawler.stats.inc_value('mysql/insert_success')
100
+ return item
101
+
102
+ except Exception as e:
103
+ self.logger.error(f"处理item时发生错误: {e}")
104
+ self.crawler.stats.inc_value('mysql/insert_failed')
105
+ raise ItemDiscard(f"处理失败: {e}")
86
106
 
87
107
  async def _execute_sql(self, sql: str, values: list = None) -> int:
88
108
  """执行SQL语句并处理结果"""
@@ -96,15 +116,59 @@ class AsyncmyMySQLPipeline:
96
116
  rowcount = await cursor.execute(sql)
97
117
 
98
118
  await conn.commit()
99
- self.crawler.stats.inc_value('mysql/insert_success')
119
+ # 移除这里的统计计数
100
120
  return rowcount
101
121
  except Exception as e:
102
122
  await conn.rollback()
103
- self.crawler.stats.inc_value('mysql/insert_failed')
123
+ # 移除这里的统计计数
104
124
  raise ItemDiscard(f"MySQL插入失败: {e}")
105
125
 
126
+ async def _flush_batch(self, spider_name: str):
127
+ """刷新批量缓冲区并执行批量插入"""
128
+ if not self.batch_buffer:
129
+ return
130
+
131
+ try:
132
+ await self._ensure_pool()
133
+
134
+ # 使用批量SQL生成函数
135
+ batch_result = make_batch_sql(table=self.table_name, datas=self.batch_buffer)
136
+ if batch_result is None:
137
+ self.logger.warning("批量插入数据为空")
138
+ self.batch_buffer.clear()
139
+ return
140
+
141
+ sql, values_list = batch_result
142
+
143
+ async with self.pool.acquire() as conn:
144
+ async with conn.cursor() as cursor:
145
+ try:
146
+ # 执行批量插入
147
+ rowcount = await cursor.executemany(sql, values_list)
148
+ await conn.commit()
149
+
150
+ self.logger.info(
151
+ f"爬虫 {spider_name} 批量插入 {rowcount} 条记录到表 {self.table_name}"
152
+ )
153
+ # 更新统计计数
154
+ self.crawler.stats.inc_value('mysql/insert_success', rowcount)
155
+ self.batch_buffer.clear()
156
+ except Exception as e:
157
+ await conn.rollback()
158
+ self.crawler.stats.inc_value('mysql/insert_failed', len(self.batch_buffer))
159
+ self.logger.error(f"批量插入失败: {e}")
160
+ raise ItemDiscard(f"批量插入失败: {e}")
161
+ except Exception as e:
162
+ self.logger.error(f"批量插入过程中发生错误: {e}")
163
+ raise ItemDiscard(f"批量插入处理失败: {e}")
164
+
106
165
  async def spider_closed(self):
107
166
  """关闭爬虫时清理资源"""
167
+ # 在关闭前刷新剩余的批量数据
168
+ if self.use_batch and self.batch_buffer:
169
+ spider_name = getattr(self.crawler.spider, 'name', 'unknown')
170
+ await self._flush_batch(spider_name)
171
+
108
172
  if self.pool:
109
173
  self.pool.close()
110
174
  await self.pool.wait_closed()
@@ -127,6 +191,11 @@ class AiomysqlMySQLPipeline:
127
191
  f"{crawler.spider.name}_items"
128
192
  )
129
193
 
194
+ # 批量插入配置
195
+ self.batch_size = self.settings.get_int('MYSQL_BATCH_SIZE', 100)
196
+ self.use_batch = self.settings.get_bool('MYSQL_USE_BATCH', False)
197
+ self.batch_buffer: List[Dict] = [] # 批量缓冲区
198
+
130
199
  crawler.subscriber.subscribe(self.spider_closed, event='spider_closed')
131
200
 
132
201
  @classmethod
@@ -160,36 +229,89 @@ class AiomysqlMySQLPipeline:
160
229
 
161
230
  async def process_item(self, item, spider) -> Optional[dict]:
162
231
  """处理item方法"""
232
+ # 如果启用批量插入,将item添加到缓冲区
233
+ if self.use_batch:
234
+ self.batch_buffer.append(dict(item))
235
+
236
+ # 如果缓冲区达到批量大小,执行批量插入
237
+ if len(self.batch_buffer) >= self.batch_size:
238
+ spider_name = getattr(spider, 'name', 'unknown')
239
+ await self._flush_batch(spider_name)
240
+
241
+ return item
242
+ else:
243
+ # 单条插入逻辑
244
+ try:
245
+ await self._init_pool()
246
+
247
+ item_dict = dict(item)
248
+ # 使用make_insert_sql工具函数生成SQL
249
+ sql = make_insert_sql(table=self.table_name, data=item_dict)
250
+
251
+ async with self.pool.acquire() as conn:
252
+ async with conn.cursor() as cursor:
253
+ try:
254
+ await cursor.execute(sql)
255
+ await conn.commit()
256
+ self.crawler.stats.inc_value('mysql/insert_success')
257
+ except aiomysql.Error as e:
258
+ await conn.rollback()
259
+ self.crawler.stats.inc_value('mysql/insert_failed')
260
+ raise ItemDiscard(f"MySQL错误: {e.args[1]}")
261
+
262
+ return item
263
+
264
+ except Exception as e:
265
+ self.logger.error(f"Pipeline处理异常: {e}")
266
+ raise ItemDiscard(f"处理失败: {e}")
267
+
268
+ async def _flush_batch(self, spider_name: str):
269
+ """刷新批量缓冲区并执行批量插入"""
270
+ if not self.batch_buffer:
271
+ return
272
+
163
273
  try:
164
274
  await self._init_pool()
275
+
276
+ # 使用批量SQL生成函数
277
+ batch_result = make_batch_sql(table=self.table_name, datas=self.batch_buffer)
278
+ if batch_result is None:
279
+ self.logger.warning("批量插入数据为空")
280
+ self.batch_buffer.clear()
281
+ return
165
282
 
166
- item_dict = dict(item)
167
- sql = f"""
168
- INSERT INTO `{self.table_name}`
169
- ({', '.join([f'`{k}`' for k in item_dict.keys()])})
170
- VALUES ({', '.join(['%s'] * len(item_dict))})
171
- """
283
+ sql, values_list = batch_result
172
284
 
173
285
  async with self.pool.acquire() as conn:
174
286
  async with conn.cursor() as cursor:
175
287
  try:
176
- await cursor.execute(sql, list(item_dict.values()))
288
+ # 执行批量插入
289
+ rowcount = await cursor.executemany(sql, values_list)
177
290
  await conn.commit()
178
- self.crawler.stats.inc_value('mysql/insert_success')
179
- except aiomysql.Error as e:
291
+
292
+ self.logger.info(
293
+ f"爬虫 {spider_name} 批量插入 {rowcount} 条记录到表 {self.table_name}"
294
+ )
295
+ # 更新统计计数
296
+ self.crawler.stats.inc_value('mysql/insert_success', rowcount)
297
+ self.batch_buffer.clear()
298
+ except Exception as e:
180
299
  await conn.rollback()
181
- self.crawler.stats.inc_value('mysql/insert_failed')
182
- raise ItemDiscard(f"MySQL错误: {e.args[1]}")
183
-
184
- return item
185
-
300
+ self.crawler.stats.inc_value('mysql/insert_failed', len(self.batch_buffer))
301
+ self.logger.error(f"批量插入失败: {e}")
302
+ raise ItemDiscard(f"批量插入失败: {e}")
186
303
  except Exception as e:
187
- self.logger.error(f"Pipeline处理异常: {e}")
188
- raise ItemDiscard(f"处理失败: {e}")
304
+ self.logger.error(f"批量插入过程中发生错误: {e}")
305
+ raise ItemDiscard(f"批量插入处理失败: {e}")
189
306
 
190
307
  async def spider_closed(self):
191
308
  """资源清理"""
309
+ # 在关闭前刷新剩余的批量数据
310
+ if self.use_batch and self.batch_buffer:
311
+ spider_name = getattr(self.crawler.spider, 'name', 'unknown')
312
+ await self._flush_batch(spider_name)
313
+
192
314
  if self.pool:
193
315
  self.pool.close()
194
316
  await self.pool.wait_closed()
195
- self.logger.info("aiomysql连接池已释放")
317
+ self.logger.info("aiomysql连接池已释放")
@@ -72,12 +72,16 @@ class RedisDedupPipeline:
72
72
  """从爬虫配置创建管道实例"""
73
73
  settings = crawler.settings
74
74
 
75
+ # 使用统一的Redis key命名规范: crawlo:{project_name}:item:fingerprint
76
+ project_name = settings.get('PROJECT_NAME', 'default')
77
+ redis_key = f"crawlo:{project_name}:item:fingerprint"
78
+
75
79
  return cls(
76
80
  redis_host=settings.get('REDIS_HOST', 'localhost'),
77
- redis_port=settings.getint('REDIS_PORT', 6379),
78
- redis_db=settings.getint('REDIS_DB', 0),
81
+ redis_port=settings.get_int('REDIS_PORT', 6379),
82
+ redis_db=settings.get_int('REDIS_DB', 0),
79
83
  redis_password=settings.get('REDIS_PASSWORD') or None,
80
- redis_key=settings.get('REDIS_DEDUP_KEY', 'crawlo:item_fingerprints'),
84
+ redis_key=redis_key,
81
85
  log_level=settings.get('LOG_LEVEL', 'INFO')
82
86
  )
83
87
 
@@ -12,6 +12,7 @@ import os
12
12
 
13
13
  from crawlo.utils.log import get_logger
14
14
  from crawlo.utils.request_serializer import RequestSerializer
15
+ from crawlo.utils.error_handler import ErrorHandler
15
16
  from crawlo.queue.pqueue import SpiderPriorityQueue
16
17
  from crawlo import Request
17
18
 
@@ -87,6 +88,7 @@ class QueueManager:
87
88
  def __init__(self, config: QueueConfig):
88
89
  self.config = config
89
90
  self.logger = get_logger(self.__class__.__name__)
91
+ self.error_handler = ErrorHandler(self.__class__.__name__)
90
92
  self.request_serializer = RequestSerializer()
91
93
  self._queue = None
92
94
  self._queue_semaphore = None
@@ -110,7 +112,7 @@ class QueueManager:
110
112
  except Exception as e:
111
113
  # 记录详细的错误信息和堆栈跟踪
112
114
  self.logger.error(f"❌ 队列初始化失败: {e}")
113
- self.logger.error(f"详细错误信息:\n{traceback.format_exc()}")
115
+ self.logger.debug(f"详细错误信息:\n{traceback.format_exc()}")
114
116
  self._health_status = "error"
115
117
  return False
116
118
 
@@ -259,11 +261,22 @@ class QueueManager:
259
261
  async def _create_queue(self, queue_type: QueueType):
260
262
  """创建队列实例"""
261
263
  if queue_type == QueueType.REDIS:
264
+ # 从队列名称中提取项目名称,用于module_name
265
+ # 例如:crawlo:books_distributed:queue:requests -> books_distributed
266
+ project_name = "default"
267
+ if ':' in self.config.queue_name:
268
+ parts = self.config.queue_name.split(':')
269
+ if len(parts) >= 2:
270
+ project_name = parts[1] # 取第二个部分作为项目名称
271
+ else:
272
+ project_name = self.config.queue_name or "default"
273
+
262
274
  queue = RedisPriorityQueue(
263
275
  redis_url=self.config.redis_url,
264
276
  queue_name=self.config.queue_name,
265
277
  max_retries=self.config.max_retries,
266
- timeout=self.config.timeout
278
+ timeout=self.config.timeout,
279
+ module_name=project_name # 传递项目名称作为module_name
267
280
  )
268
281
  # 不需要立即连接,使用 lazy connect
269
282
  return queue
@@ -9,46 +9,71 @@ import os
9
9
  from crawlo import Request
10
10
  from crawlo.utils.log import get_logger
11
11
  from crawlo.utils.request_serializer import RequestSerializer
12
+ from crawlo.utils.error_handler import ErrorHandler
13
+ from crawlo.utils.redis_connection_pool import get_redis_pool, OptimizedRedisConnectionPool
12
14
 
13
15
 
14
16
  logger = get_logger(__name__)
17
+ error_handler = ErrorHandler(__name__)
15
18
 
16
19
 
17
20
  class RedisPriorityQueue:
18
21
  """
19
- 基于 Redis 的分布式异步优先级队列(生产级优化版)
22
+ 基于 Redis 的分布式异步优先级队列
20
23
  """
21
24
 
22
25
  def __init__(
23
26
  self,
24
27
  redis_url: str = None,
25
- queue_name: str = "crawlo:requests",
26
- processing_queue: str = "crawlo:processing",
27
- failed_queue: str = "crawlo:failed",
28
+ queue_name: str = None, # 修改默认值为 None
29
+ processing_queue: str = None, # 修改默认值为 None
30
+ failed_queue: str = None, # 修改默认值为 None
28
31
  max_retries: int = 3,
29
32
  timeout: int = 300, # 任务处理超时时间(秒)
30
33
  max_connections: int = 10, # 连接池大小
34
+ module_name: str = "default" # 添加 module_name 参数
31
35
  ):
32
- # 如果没有提供 redis_url,则从环境变量构造
36
+ # 移除直接使用 os.getenv(),要求通过参数传递 redis_url
33
37
  if redis_url is None:
34
- redis_host = os.getenv('REDIS_HOST', 'localhost')
35
- redis_port = os.getenv('REDIS_PORT', '6379')
36
- redis_db = os.getenv('REDIS_DB', '0')
37
- redis_password = os.getenv('REDIS_PASSWORD', '')
38
+ # 如果没有提供 redis_url,则抛出异常,要求在 settings 中配置
39
+ raise ValueError("redis_url must be provided. Configure it in settings instead of using os.getenv()")
40
+
41
+ self.redis_url = redis_url
42
+ self.module_name = module_name # 保存 module_name
43
+
44
+ # 如果未提供 queue_name,则根据 module_name 自动生成
45
+ if queue_name is None:
46
+ self.queue_name = f"crawlo:{module_name}:queue:requests"
47
+ else:
48
+ # 如果提供了 queue_name,但不符合规范格式,则转换为规范格式
49
+ if not queue_name.startswith("crawlo:"):
50
+ self.queue_name = f"crawlo:{module_name}:queue:requests"
51
+ else:
52
+ self.queue_name = queue_name
53
+
54
+ # 如果未提供 processing_queue,则根据 queue_name 自动生成
55
+ if processing_queue is None:
56
+ if ":queue:requests" in self.queue_name:
57
+ self.processing_queue = self.queue_name.replace(":queue:requests", ":queue:processing")
58
+ else:
59
+ self.processing_queue = f"{self.queue_name}:processing"
60
+ else:
61
+ self.processing_queue = processing_queue
38
62
 
39
- if redis_password:
40
- redis_url = f"redis://:{redis_password}@{redis_host}:{redis_port}/{redis_db}"
63
+ # 如果未提供 failed_queue,则根据 queue_name 自动生成
64
+ if failed_queue is None:
65
+ if ":queue:requests" in self.queue_name:
66
+ self.failed_queue = self.queue_name.replace(":queue:requests", ":queue:failed")
41
67
  else:
42
- redis_url = f"redis://{redis_host}:{redis_port}/{redis_db}"
68
+ self.failed_queue = f"{self.queue_name}:failed"
69
+ else:
70
+ self.failed_queue = failed_queue
43
71
 
44
- self.redis_url = redis_url
45
- self.queue_name = queue_name
46
- self.processing_queue = processing_queue
47
- self.failed_queue = failed_queue
48
72
  self.max_retries = max_retries
49
73
  self.timeout = timeout
50
74
  self.max_connections = max_connections
51
- self._redis = None
75
+ self._redis_pool: Optional[OptimizedRedisConnectionPool] = None
76
+ self._redis: Optional[aioredis.Redis] = None
52
77
  self._lock = asyncio.Lock() # 用于连接初始化的锁
53
78
  self.request_serializer = RequestSerializer() # 处理序列化
54
79
 
@@ -60,24 +85,30 @@ class RedisPriorityQueue:
60
85
 
61
86
  for attempt in range(max_retries):
62
87
  try:
63
- self._redis = await aioredis.from_url(
88
+ # 使用优化的连接池
89
+ self._redis_pool = get_redis_pool(
64
90
  self.redis_url,
65
- decode_responses=False, # pickle 需要 bytes
66
91
  max_connections=self.max_connections,
67
92
  socket_connect_timeout=5,
68
93
  socket_timeout=30,
94
+ health_check_interval=30,
95
+ retry_on_timeout=True
69
96
  )
97
+
98
+ self._redis = await self._redis_pool.get_connection()
99
+
70
100
  # 测试连接
71
101
  await self._redis.ping()
72
- logger.info("✅ Redis 连接成功")
102
+ logger.info(f"✅ Redis 连接成功 (Module: {self.module_name})")
73
103
  return self._redis
74
104
  except Exception as e:
75
- logger.warning(f"⚠️ Redis 连接失败 (尝试 {attempt + 1}/{max_retries}): {e}")
76
- logger.warning(f"详细错误信息:\n{traceback.format_exc()}")
105
+ error_msg = f"⚠️ Redis 连接失败 (尝试 {attempt + 1}/{max_retries}, Module: {self.module_name}): {e}"
106
+ logger.warning(error_msg)
107
+ logger.debug(f"详细错误信息:\n{traceback.format_exc()}")
77
108
  if attempt < max_retries - 1:
78
109
  await asyncio.sleep(delay)
79
110
  else:
80
- raise ConnectionError(f"❌ 无法连接 Redis: {e}")
111
+ raise ConnectionError(f"❌ 无法连接 Redis (Module: {self.module_name}): {e}")
81
112
 
82
113
  async def _ensure_connection(self):
83
114
  """确保连接有效"""
@@ -86,16 +117,17 @@ class RedisPriorityQueue:
86
117
  try:
87
118
  await self._redis.ping()
88
119
  except Exception as e:
89
- logger.warning(f"🔄 Redis 连接失效,尝试重连...: {e}")
120
+ logger.warning(f"🔄 Redis 连接失效 (Module: {self.module_name}),尝试重连...: {e}")
90
121
  self._redis = None
91
122
  await self.connect()
92
123
 
93
124
  async def put(self, request: Request, priority: int = 0) -> bool:
94
125
  """放入请求到队列"""
95
- await self._ensure_connection()
96
- score = -priority
97
- key = self._get_request_key(request)
98
126
  try:
127
+ await self._ensure_connection()
128
+ score = -priority
129
+ key = self._get_request_key(request)
130
+
99
131
  # 🔥 使用专用的序列化工具清理 Request
100
132
  clean_request = self.request_serializer.prepare_for_serialization(request)
101
133
 
@@ -106,11 +138,14 @@ class RedisPriorityQueue:
106
138
  result = await pipe.execute()
107
139
 
108
140
  if result[0] > 0:
109
- logger.debug(f"✅ 成功入队: {request.url}")
141
+ logger.debug(f"✅ 成功入队 (Module: {self.module_name}): {request.url}")
110
142
  return result[0] > 0
111
143
  except Exception as e:
112
- logger.error(f"❌ 放入队列失败: {e}")
113
- logger.error(f"详细错误信息:\n{traceback.format_exc()}")
144
+ error_handler.handle_error(
145
+ e,
146
+ context=f"放入队列失败 (Module: {self.module_name})",
147
+ raise_error=False
148
+ )
114
149
  return False
115
150
 
116
151
  async def get(self, timeout: float = 5.0) -> Optional[Request]:
@@ -118,11 +153,11 @@ class RedisPriorityQueue:
118
153
  获取请求(带超时)
119
154
  :param timeout: 最大等待时间(秒),避免无限轮询
120
155
  """
121
- await self._ensure_connection()
122
- start_time = asyncio.get_event_loop().time()
156
+ try:
157
+ await self._ensure_connection()
158
+ start_time = asyncio.get_event_loop().time()
123
159
 
124
- while True:
125
- try:
160
+ while True:
126
161
  # 尝试获取任务
127
162
  result = await self._redis.zpopmin(self.queue_name, count=1)
128
163
  if result:
@@ -148,62 +183,95 @@ class RedisPriorityQueue:
148
183
  # 短暂等待,避免空轮询
149
184
  await asyncio.sleep(0.1)
150
185
 
151
- except Exception as e:
152
- logger.error(f"❌ 获取队列任务失败: {e}")
153
- logger.error(f"详细错误信息:\n{traceback.format_exc()}")
154
- return None
186
+ except Exception as e:
187
+ error_handler.handle_error(
188
+ e,
189
+ context=f"获取队列任务失败 (Module: {self.module_name})",
190
+ raise_error=False
191
+ )
192
+ return None
155
193
 
156
194
  async def ack(self, request: Request):
157
195
  """确认任务完成"""
158
- await self._ensure_connection()
159
- key = self._get_request_key(request)
160
- cursor = 0
161
- while True:
162
- cursor, keys = await self._redis.zscan(self.processing_queue, cursor, match=f"{key}:*")
163
- if keys:
164
- pipe = self._redis.pipeline()
165
- for k in keys:
166
- pipe.zrem(self.processing_queue, k)
167
- pipe.hdel(f"{self.processing_queue}:data", k)
168
- await pipe.execute()
169
- if cursor == 0:
170
- break
196
+ try:
197
+ await self._ensure_connection()
198
+ key = self._get_request_key(request)
199
+ cursor = 0
200
+ while True:
201
+ cursor, keys = await self._redis.zscan(self.processing_queue, cursor, match=f"{key}:*")
202
+ if keys:
203
+ pipe = self._redis.pipeline()
204
+ for k in keys:
205
+ pipe.zrem(self.processing_queue, k)
206
+ pipe.hdel(f"{self.processing_queue}:data", k)
207
+ await pipe.execute()
208
+ if cursor == 0:
209
+ break
210
+ except Exception as e:
211
+ error_handler.handle_error(
212
+ e,
213
+ context=f"确认任务完成失败 (Module: {self.module_name})",
214
+ raise_error=False
215
+ )
171
216
 
172
217
  async def fail(self, request: Request, reason: str = ""):
173
218
  """标记任务失败"""
174
- await self._ensure_connection()
175
- key = self._get_request_key(request)
176
- await self.ack(request)
219
+ try:
220
+ await self._ensure_connection()
221
+ key = self._get_request_key(request)
222
+ await self.ack(request)
177
223
 
178
- retry_key = f"{self.failed_queue}:retries:{key}"
179
- retries = await self._redis.incr(retry_key)
180
- await self._redis.expire(retry_key, 86400)
224
+ retry_key = f"{self.failed_queue}:retries:{key}"
225
+ retries = await self._redis.incr(retry_key)
226
+ await self._redis.expire(retry_key, 86400)
181
227
 
182
- if retries <= self.max_retries:
183
- await self.put(request, priority=request.priority + 1)
184
- logger.info(f"🔁 任务重试 [{retries}/{self.max_retries}]: {request.url}")
185
- else:
186
- failed_data = {
187
- "url": request.url,
188
- "reason": reason,
189
- "retries": retries,
190
- "failed_at": time.time(),
191
- "request_pickle": pickle.dumps(request).hex(), # 可选:保存完整请求
192
- }
193
- await self._redis.lpush(self.failed_queue, pickle.dumps(failed_data))
194
- logger.error(f"❌ 任务彻底失败 [{retries}次]: {request.url}")
228
+ if retries <= self.max_retries:
229
+ await self.put(request, priority=request.priority + 1)
230
+ logger.info(f"🔁 任务重试 [{retries}/{self.max_retries}] (Module: {self.module_name}): {request.url}")
231
+ else:
232
+ failed_data = {
233
+ "url": request.url,
234
+ "reason": reason,
235
+ "retries": retries,
236
+ "failed_at": time.time(),
237
+ "request_pickle": pickle.dumps(request).hex(), # 可选:保存完整请求
238
+ }
239
+ await self._redis.lpush(self.failed_queue, pickle.dumps(failed_data))
240
+ logger.error(f"❌ 任务彻底失败 [{retries}次] (Module: {self.module_name}): {request.url}")
241
+ except Exception as e:
242
+ error_handler.handle_error(
243
+ e,
244
+ context=f"标记任务失败失败 (Module: {self.module_name})",
245
+ raise_error=False
246
+ )
195
247
 
196
248
  def _get_request_key(self, request: Request) -> str:
197
249
  """生成请求唯一键"""
198
- return f"url:{hash(request.url)}"
250
+ return f"{self.module_name}:url:{hash(request.url) & 0x7FFFFFFF}" # 确保正数
199
251
 
200
252
  async def qsize(self) -> int:
201
253
  """获取队列大小"""
202
- await self._ensure_connection()
203
- return await self._redis.zcard(self.queue_name)
254
+ try:
255
+ await self._ensure_connection()
256
+ return await self._redis.zcard(self.queue_name)
257
+ except Exception as e:
258
+ error_handler.handle_error(
259
+ e,
260
+ context=f"获取队列大小失败 (Module: {self.module_name})",
261
+ raise_error=False
262
+ )
263
+ return 0
204
264
 
205
265
  async def close(self):
206
266
  """关闭连接"""
207
- if self._redis:
208
- await self._redis.close()
209
- self._redis = None
267
+ try:
268
+ # 连接池会自动管理连接,这里不需要显式关闭单个连接
269
+ self._redis = None
270
+ self._redis_pool = None
271
+ logger.info(f"✅ Redis 连接已释放 (Module: {self.module_name})")
272
+ except Exception as e:
273
+ error_handler.handle_error(
274
+ e,
275
+ context=f"释放 Redis 连接失败 (Module: {self.module_name})",
276
+ raise_error=False
277
+ )