crawlo 1.1.4__py3-none-any.whl → 1.1.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (190) hide show
  1. crawlo/__init__.py +61 -34
  2. crawlo/__version__.py +1 -1
  3. crawlo/cleaners/__init__.py +61 -0
  4. crawlo/cleaners/data_formatter.py +226 -0
  5. crawlo/cleaners/encoding_converter.py +126 -0
  6. crawlo/cleaners/text_cleaner.py +233 -0
  7. crawlo/cli.py +40 -40
  8. crawlo/commands/__init__.py +13 -13
  9. crawlo/commands/check.py +594 -594
  10. crawlo/commands/genspider.py +151 -151
  11. crawlo/commands/list.py +155 -155
  12. crawlo/commands/run.py +292 -285
  13. crawlo/commands/startproject.py +419 -196
  14. crawlo/commands/stats.py +188 -188
  15. crawlo/commands/utils.py +186 -186
  16. crawlo/config.py +312 -279
  17. crawlo/config_validator.py +253 -0
  18. crawlo/core/__init__.py +2 -2
  19. crawlo/core/engine.py +346 -172
  20. crawlo/core/processor.py +40 -40
  21. crawlo/core/scheduler.py +137 -166
  22. crawlo/crawler.py +1027 -1027
  23. crawlo/downloader/__init__.py +266 -242
  24. crawlo/downloader/aiohttp_downloader.py +220 -212
  25. crawlo/downloader/cffi_downloader.py +256 -251
  26. crawlo/downloader/httpx_downloader.py +259 -259
  27. crawlo/downloader/hybrid_downloader.py +214 -0
  28. crawlo/downloader/playwright_downloader.py +403 -0
  29. crawlo/downloader/selenium_downloader.py +473 -0
  30. crawlo/event.py +11 -11
  31. crawlo/exceptions.py +81 -81
  32. crawlo/extension/__init__.py +37 -37
  33. crawlo/extension/health_check.py +141 -141
  34. crawlo/extension/log_interval.py +57 -57
  35. crawlo/extension/log_stats.py +81 -81
  36. crawlo/extension/logging_extension.py +43 -43
  37. crawlo/extension/memory_monitor.py +104 -88
  38. crawlo/extension/performance_profiler.py +133 -117
  39. crawlo/extension/request_recorder.py +107 -107
  40. crawlo/filters/__init__.py +154 -154
  41. crawlo/filters/aioredis_filter.py +281 -242
  42. crawlo/filters/memory_filter.py +269 -269
  43. crawlo/items/__init__.py +23 -23
  44. crawlo/items/base.py +21 -21
  45. crawlo/items/fields.py +53 -53
  46. crawlo/items/items.py +104 -104
  47. crawlo/middleware/__init__.py +21 -21
  48. crawlo/middleware/default_header.py +32 -32
  49. crawlo/middleware/download_delay.py +28 -28
  50. crawlo/middleware/middleware_manager.py +135 -135
  51. crawlo/middleware/proxy.py +272 -248
  52. crawlo/middleware/request_ignore.py +30 -30
  53. crawlo/middleware/response_code.py +18 -18
  54. crawlo/middleware/response_filter.py +26 -26
  55. crawlo/middleware/retry.py +124 -124
  56. crawlo/mode_manager.py +212 -201
  57. crawlo/network/__init__.py +21 -21
  58. crawlo/network/request.py +338 -311
  59. crawlo/network/response.py +360 -271
  60. crawlo/pipelines/__init__.py +21 -21
  61. crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
  62. crawlo/pipelines/console_pipeline.py +39 -39
  63. crawlo/pipelines/csv_pipeline.py +316 -316
  64. crawlo/pipelines/database_dedup_pipeline.py +224 -224
  65. crawlo/pipelines/json_pipeline.py +218 -218
  66. crawlo/pipelines/memory_dedup_pipeline.py +115 -115
  67. crawlo/pipelines/mongo_pipeline.py +131 -131
  68. crawlo/pipelines/mysql_pipeline.py +316 -316
  69. crawlo/pipelines/pipeline_manager.py +61 -56
  70. crawlo/pipelines/redis_dedup_pipeline.py +167 -162
  71. crawlo/project.py +188 -153
  72. crawlo/queue/pqueue.py +37 -37
  73. crawlo/queue/queue_manager.py +334 -307
  74. crawlo/queue/redis_priority_queue.py +299 -209
  75. crawlo/settings/__init__.py +7 -7
  76. crawlo/settings/default_settings.py +219 -278
  77. crawlo/settings/setting_manager.py +123 -100
  78. crawlo/spider/__init__.py +639 -639
  79. crawlo/stats_collector.py +59 -59
  80. crawlo/subscriber.py +130 -130
  81. crawlo/task_manager.py +30 -30
  82. crawlo/templates/crawlo.cfg.tmpl +10 -10
  83. crawlo/templates/project/__init__.py.tmpl +3 -3
  84. crawlo/templates/project/items.py.tmpl +17 -17
  85. crawlo/templates/project/middlewares.py.tmpl +110 -110
  86. crawlo/templates/project/pipelines.py.tmpl +97 -97
  87. crawlo/templates/project/run.py.tmpl +251 -251
  88. crawlo/templates/project/settings.py.tmpl +326 -279
  89. crawlo/templates/project/settings_distributed.py.tmpl +120 -0
  90. crawlo/templates/project/settings_gentle.py.tmpl +95 -0
  91. crawlo/templates/project/settings_high_performance.py.tmpl +152 -0
  92. crawlo/templates/project/settings_simple.py.tmpl +69 -0
  93. crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
  94. crawlo/templates/spider/spider.py.tmpl +141 -141
  95. crawlo/tools/__init__.py +183 -0
  96. crawlo/tools/anti_crawler.py +269 -0
  97. crawlo/tools/authenticated_proxy.py +241 -0
  98. crawlo/tools/data_validator.py +181 -0
  99. crawlo/tools/date_tools.py +36 -0
  100. crawlo/tools/distributed_coordinator.py +387 -0
  101. crawlo/tools/retry_mechanism.py +221 -0
  102. crawlo/tools/scenario_adapter.py +263 -0
  103. crawlo/utils/__init__.py +35 -7
  104. crawlo/utils/batch_processor.py +261 -0
  105. crawlo/utils/controlled_spider_mixin.py +439 -439
  106. crawlo/utils/date_tools.py +290 -233
  107. crawlo/utils/db_helper.py +343 -343
  108. crawlo/utils/enhanced_error_handler.py +360 -0
  109. crawlo/utils/env_config.py +106 -0
  110. crawlo/utils/error_handler.py +126 -0
  111. crawlo/utils/func_tools.py +82 -82
  112. crawlo/utils/large_scale_config.py +286 -286
  113. crawlo/utils/large_scale_helper.py +343 -343
  114. crawlo/utils/log.py +128 -128
  115. crawlo/utils/performance_monitor.py +285 -0
  116. crawlo/utils/queue_helper.py +175 -175
  117. crawlo/utils/redis_connection_pool.py +335 -0
  118. crawlo/utils/redis_key_validator.py +200 -0
  119. crawlo/utils/request.py +267 -267
  120. crawlo/utils/request_serializer.py +219 -219
  121. crawlo/utils/spider_loader.py +62 -62
  122. crawlo/utils/system.py +11 -11
  123. crawlo/utils/tools.py +4 -4
  124. crawlo/utils/url.py +39 -39
  125. {crawlo-1.1.4.dist-info → crawlo-1.1.6.dist-info}/METADATA +401 -403
  126. crawlo-1.1.6.dist-info/RECORD +189 -0
  127. examples/__init__.py +7 -7
  128. tests/DOUBLE_CRAWLO_PREFIX_FIX_REPORT.md +82 -0
  129. tests/__init__.py +7 -7
  130. tests/advanced_tools_example.py +276 -0
  131. tests/authenticated_proxy_example.py +237 -0
  132. tests/cleaners_example.py +161 -0
  133. tests/config_validation_demo.py +103 -0
  134. {examples → tests}/controlled_spider_example.py +205 -205
  135. tests/date_tools_example.py +181 -0
  136. tests/dynamic_loading_example.py +524 -0
  137. tests/dynamic_loading_test.py +105 -0
  138. tests/env_config_example.py +134 -0
  139. tests/error_handling_example.py +172 -0
  140. tests/redis_key_validation_demo.py +131 -0
  141. tests/response_improvements_example.py +145 -0
  142. tests/test_advanced_tools.py +149 -0
  143. tests/test_all_redis_key_configs.py +146 -0
  144. tests/test_authenticated_proxy.py +142 -0
  145. tests/test_cleaners.py +55 -0
  146. tests/test_comprehensive.py +147 -0
  147. tests/test_config_validator.py +194 -0
  148. tests/test_date_tools.py +124 -0
  149. tests/test_double_crawlo_fix.py +208 -0
  150. tests/test_double_crawlo_fix_simple.py +125 -0
  151. tests/test_dynamic_downloaders_proxy.py +125 -0
  152. tests/test_dynamic_proxy.py +93 -0
  153. tests/test_dynamic_proxy_config.py +147 -0
  154. tests/test_dynamic_proxy_real.py +110 -0
  155. tests/test_edge_cases.py +304 -0
  156. tests/test_enhanced_error_handler.py +271 -0
  157. tests/test_env_config.py +122 -0
  158. tests/test_error_handler_compatibility.py +113 -0
  159. tests/test_final_validation.py +153 -153
  160. tests/test_framework_env_usage.py +104 -0
  161. tests/test_integration.py +357 -0
  162. tests/test_item_dedup_redis_key.py +123 -0
  163. tests/test_parsel.py +30 -0
  164. tests/test_performance.py +328 -0
  165. tests/test_proxy_health_check.py +32 -32
  166. tests/test_proxy_middleware_integration.py +136 -136
  167. tests/test_proxy_providers.py +56 -56
  168. tests/test_proxy_stats.py +19 -19
  169. tests/test_proxy_strategies.py +59 -59
  170. tests/test_queue_manager_double_crawlo.py +231 -0
  171. tests/test_queue_manager_redis_key.py +177 -0
  172. tests/test_redis_config.py +28 -28
  173. tests/test_redis_connection_pool.py +295 -0
  174. tests/test_redis_key_naming.py +182 -0
  175. tests/test_redis_key_validator.py +124 -0
  176. tests/test_redis_queue.py +224 -224
  177. tests/test_request_serialization.py +70 -70
  178. tests/test_response_improvements.py +153 -0
  179. tests/test_scheduler.py +241 -241
  180. tests/test_simple_response.py +62 -0
  181. tests/test_telecom_spider_redis_key.py +206 -0
  182. tests/test_template_content.py +88 -0
  183. tests/test_template_redis_key.py +135 -0
  184. tests/test_tools.py +154 -0
  185. tests/tools_example.py +258 -0
  186. crawlo/core/enhanced_engine.py +0 -190
  187. crawlo-1.1.4.dist-info/RECORD +0 -117
  188. {crawlo-1.1.4.dist-info → crawlo-1.1.6.dist-info}/WHEEL +0 -0
  189. {crawlo-1.1.4.dist-info → crawlo-1.1.6.dist-info}/entry_points.txt +0 -0
  190. {crawlo-1.1.4.dist-info → crawlo-1.1.6.dist-info}/top_level.txt +0 -0
@@ -1,209 +1,299 @@
1
- import pickle
2
- import time
3
- import asyncio
4
- from typing import Optional
5
- import redis.asyncio as aioredis
6
- import traceback
7
- import os
8
-
9
- from crawlo import Request
10
- from crawlo.utils.log import get_logger
11
- from crawlo.utils.request_serializer import RequestSerializer
12
-
13
-
14
- logger = get_logger(__name__)
15
-
16
-
17
- class RedisPriorityQueue:
18
- """
19
- 基于 Redis 的分布式异步优先级队列(生产级优化版)
20
- """
21
-
22
- def __init__(
23
- self,
24
- redis_url: str = None,
25
- queue_name: str = "crawlo:requests",
26
- processing_queue: str = "crawlo:processing",
27
- failed_queue: str = "crawlo:failed",
28
- max_retries: int = 3,
29
- timeout: int = 300, # 任务处理超时时间(秒)
30
- max_connections: int = 10, # 连接池大小
31
- ):
32
- # 如果没有提供 redis_url,则从环境变量构造
33
- if redis_url is None:
34
- redis_host = os.getenv('REDIS_HOST', 'localhost')
35
- redis_port = os.getenv('REDIS_PORT', '6379')
36
- redis_db = os.getenv('REDIS_DB', '0')
37
- redis_password = os.getenv('REDIS_PASSWORD', '')
38
-
39
- if redis_password:
40
- redis_url = f"redis://:{redis_password}@{redis_host}:{redis_port}/{redis_db}"
41
- else:
42
- redis_url = f"redis://{redis_host}:{redis_port}/{redis_db}"
43
-
44
- self.redis_url = redis_url
45
- self.queue_name = queue_name
46
- self.processing_queue = processing_queue
47
- self.failed_queue = failed_queue
48
- self.max_retries = max_retries
49
- self.timeout = timeout
50
- self.max_connections = max_connections
51
- self._redis = None
52
- self._lock = asyncio.Lock() # 用于连接初始化的锁
53
- self.request_serializer = RequestSerializer() # 处理序列化
54
-
55
- async def connect(self, max_retries=3, delay=1):
56
- """异步连接 Redis,支持重试"""
57
- async with self._lock:
58
- if self._redis is not None:
59
- return self._redis
60
-
61
- for attempt in range(max_retries):
62
- try:
63
- self._redis = await aioredis.from_url(
64
- self.redis_url,
65
- decode_responses=False, # pickle 需要 bytes
66
- max_connections=self.max_connections,
67
- socket_connect_timeout=5,
68
- socket_timeout=30,
69
- )
70
- # 测试连接
71
- await self._redis.ping()
72
- logger.info("✅ Redis 连接成功")
73
- return self._redis
74
- except Exception as e:
75
- logger.warning(f"⚠️ Redis 连接失败 (尝试 {attempt + 1}/{max_retries}): {e}")
76
- logger.warning(f"详细错误信息:\n{traceback.format_exc()}")
77
- if attempt < max_retries - 1:
78
- await asyncio.sleep(delay)
79
- else:
80
- raise ConnectionError(f"❌ 无法连接 Redis: {e}")
81
-
82
- async def _ensure_connection(self):
83
- """确保连接有效"""
84
- if self._redis is None:
85
- await self.connect()
86
- try:
87
- await self._redis.ping()
88
- except Exception as e:
89
- logger.warning(f"🔄 Redis 连接失效,尝试重连...: {e}")
90
- self._redis = None
91
- await self.connect()
92
-
93
- async def put(self, request: Request, priority: int = 0) -> bool:
94
- """放入请求到队列"""
95
- await self._ensure_connection()
96
- score = -priority
97
- key = self._get_request_key(request)
98
- try:
99
- # 🔥 使用专用的序列化工具清理 Request
100
- clean_request = self.request_serializer.prepare_for_serialization(request)
101
-
102
- serialized = pickle.dumps(clean_request)
103
- pipe = self._redis.pipeline()
104
- pipe.zadd(self.queue_name, {key: score})
105
- pipe.hset(f"{self.queue_name}:data", key, serialized)
106
- result = await pipe.execute()
107
-
108
- if result[0] > 0:
109
- logger.debug(f" 成功入队: {request.url}")
110
- return result[0] > 0
111
- except Exception as e:
112
- logger.error(f"❌ 放入队列失败: {e}")
113
- logger.error(f"详细错误信息:\n{traceback.format_exc()}")
114
- return False
115
-
116
- async def get(self, timeout: float = 5.0) -> Optional[Request]:
117
- """
118
- 获取请求(带超时)
119
- :param timeout: 最大等待时间(秒),避免无限轮询
120
- """
121
- await self._ensure_connection()
122
- start_time = asyncio.get_event_loop().time()
123
-
124
- while True:
125
- try:
126
- # 尝试获取任务
127
- result = await self._redis.zpopmin(self.queue_name, count=1)
128
- if result:
129
- key, score = result[0]
130
- serialized = await self._redis.hget(f"{self.queue_name}:data", key)
131
- if not serialized:
132
- continue
133
-
134
- # 移动到 processing
135
- processing_key = f"{key}:{int(time.time())}"
136
- pipe = self._redis.pipeline()
137
- pipe.zadd(self.processing_queue, {processing_key: time.time() + self.timeout})
138
- pipe.hset(f"{self.processing_queue}:data", processing_key, serialized)
139
- pipe.hdel(f"{self.queue_name}:data", key)
140
- await pipe.execute()
141
-
142
- return pickle.loads(serialized)
143
-
144
- # 检查是否超时
145
- if asyncio.get_event_loop().time() - start_time > timeout:
146
- return None
147
-
148
- # 短暂等待,避免空轮询
149
- await asyncio.sleep(0.1)
150
-
151
- except Exception as e:
152
- logger.error(f" 获取队列任务失败: {e}")
153
- logger.error(f"详细错误信息:\n{traceback.format_exc()}")
154
- return None
155
-
156
- async def ack(self, request: Request):
157
- """确认任务完成"""
158
- await self._ensure_connection()
159
- key = self._get_request_key(request)
160
- cursor = 0
161
- while True:
162
- cursor, keys = await self._redis.zscan(self.processing_queue, cursor, match=f"{key}:*")
163
- if keys:
164
- pipe = self._redis.pipeline()
165
- for k in keys:
166
- pipe.zrem(self.processing_queue, k)
167
- pipe.hdel(f"{self.processing_queue}:data", k)
168
- await pipe.execute()
169
- if cursor == 0:
170
- break
171
-
172
- async def fail(self, request: Request, reason: str = ""):
173
- """标记任务失败"""
174
- await self._ensure_connection()
175
- key = self._get_request_key(request)
176
- await self.ack(request)
177
-
178
- retry_key = f"{self.failed_queue}:retries:{key}"
179
- retries = await self._redis.incr(retry_key)
180
- await self._redis.expire(retry_key, 86400)
181
-
182
- if retries <= self.max_retries:
183
- await self.put(request, priority=request.priority + 1)
184
- logger.info(f"🔁 任务重试 [{retries}/{self.max_retries}]: {request.url}")
185
- else:
186
- failed_data = {
187
- "url": request.url,
188
- "reason": reason,
189
- "retries": retries,
190
- "failed_at": time.time(),
191
- "request_pickle": pickle.dumps(request).hex(), # 可选:保存完整请求
192
- }
193
- await self._redis.lpush(self.failed_queue, pickle.dumps(failed_data))
194
- logger.error(f"❌ 任务彻底失败 [{retries}次]: {request.url}")
195
-
196
- def _get_request_key(self, request: Request) -> str:
197
- """生成请求唯一键"""
198
- return f"url:{hash(request.url)}"
199
-
200
- async def qsize(self) -> int:
201
- """获取队列大小"""
202
- await self._ensure_connection()
203
- return await self._redis.zcard(self.queue_name)
204
-
205
- async def close(self):
206
- """关闭连接"""
207
- if self._redis:
208
- await self._redis.close()
209
- self._redis = None
1
+ import asyncio
2
+ import pickle
3
+ import time
4
+ import traceback
5
+ from typing import Optional
6
+
7
+ import redis.asyncio as aioredis
8
+
9
+ from crawlo import Request
10
+ from crawlo.utils.error_handler import ErrorHandler
11
+ from crawlo.utils.log import get_logger
12
+ from crawlo.utils.redis_connection_pool import get_redis_pool, OptimizedRedisConnectionPool
13
+ from crawlo.utils.request_serializer import RequestSerializer
14
+
15
+ logger = get_logger(__name__)
16
+ error_handler = ErrorHandler(__name__)
17
+
18
+
19
+ class RedisPriorityQueue:
20
+ """
21
+ 基于 Redis 的分布式异步优先级队列
22
+ """
23
+
24
+ def __init__(
25
+ self,
26
+ redis_url: str = None,
27
+ queue_name: str = None, # 修改默认值为 None
28
+ processing_queue: str = None, # 修改默认值为 None
29
+ failed_queue: str = None, # 修改默认值为 None
30
+ max_retries: int = 3,
31
+ timeout: int = 300, # 任务处理超时时间(秒)
32
+ max_connections: int = 10, # 连接池大小
33
+ module_name: str = "default" # 添加 module_name 参数
34
+ ):
35
+ # 移除直接使用 os.getenv(),要求通过参数传递 redis_url
36
+ if redis_url is None:
37
+ # 如果没有提供 redis_url,则抛出异常,要求在 settings 中配置
38
+ raise ValueError("redis_url must be provided. Configure it in settings instead of using os.getenv()")
39
+
40
+ self.redis_url = redis_url
41
+ self.module_name = module_name # 保存 module_name
42
+
43
+ # 如果未提供 queue_name,则根据 module_name 自动生成
44
+ if queue_name is None:
45
+ self.queue_name = f"crawlo:{module_name}:queue:requests"
46
+ else:
47
+ # 保持用户提供的队列名称不变,不做修改
48
+ self.queue_name = queue_name
49
+
50
+ # 如果未提供 processing_queue,则根据 queue_name 自动生成
51
+ if processing_queue is None:
52
+ if ":queue:requests" in self.queue_name:
53
+ self.processing_queue = self.queue_name.replace(":queue:requests", ":queue:processing")
54
+ else:
55
+ self.processing_queue = f"{self.queue_name}:processing"
56
+ else:
57
+ self.processing_queue = processing_queue
58
+
59
+ # 如果未提供 failed_queue,则根据 queue_name 自动生成
60
+ if failed_queue is None:
61
+ if ":queue:requests" in self.queue_name:
62
+ self.failed_queue = self.queue_name.replace(":queue:requests", ":queue:failed")
63
+ else:
64
+ self.failed_queue = f"{self.queue_name}:failed"
65
+ else:
66
+ self.failed_queue = failed_queue
67
+
68
+ self.max_retries = max_retries
69
+ self.timeout = timeout
70
+ self.max_connections = max_connections
71
+ self._redis_pool: Optional[OptimizedRedisConnectionPool] = None
72
+ self._redis: Optional[aioredis.Redis] = None
73
+ self._lock = asyncio.Lock() # 用于连接初始化的锁
74
+ self.request_serializer = RequestSerializer() # 处理序列化
75
+
76
+ async def connect(self, max_retries=3, delay=1):
77
+ """异步连接 Redis,支持重试"""
78
+ async with self._lock:
79
+ if self._redis is not None:
80
+ return self._redis
81
+
82
+ for attempt in range(max_retries):
83
+ try:
84
+ # 使用优化的连接池,确保 decode_responses=False 以避免编码问题
85
+ self._redis_pool = get_redis_pool(
86
+ self.redis_url,
87
+ max_connections=self.max_connections,
88
+ socket_connect_timeout=5,
89
+ socket_timeout=30,
90
+ health_check_interval=30,
91
+ retry_on_timeout=True,
92
+ decode_responses=False, # 确保不自动解码响应
93
+ encoding='utf-8'
94
+ )
95
+
96
+ self._redis = await self._redis_pool.get_connection()
97
+
98
+ # 测试连接
99
+ await self._redis.ping()
100
+ logger.info(f"✅ Redis 连接成功 (Module: {self.module_name})")
101
+ return self._redis
102
+ except Exception as e:
103
+ error_msg = f"⚠️ Redis 连接失败 (尝试 {attempt + 1}/{max_retries}, Module: {self.module_name}): {e}"
104
+ logger.warning(error_msg)
105
+ logger.debug(f"详细错误信息:\n{traceback.format_exc()}")
106
+ if attempt < max_retries - 1:
107
+ await asyncio.sleep(delay)
108
+ else:
109
+ raise ConnectionError(f" 无法连接 Redis (Module: {self.module_name}): {e}")
110
+
111
+ async def _ensure_connection(self):
112
+ """确保连接有效"""
113
+ if self._redis is None:
114
+ await self.connect()
115
+ try:
116
+ await self._redis.ping()
117
+ except Exception as e:
118
+ logger.warning(f"🔄 Redis 连接失效 (Module: {self.module_name}),尝试重连...: {e}")
119
+ self._redis = None
120
+ await self.connect()
121
+
122
+ async def put(self, request: Request, priority: int = 0) -> bool:
123
+ """放入请求到队列"""
124
+ try:
125
+ await self._ensure_connection()
126
+ score = -priority
127
+ key = self._get_request_key(request)
128
+
129
+ # 🔥 使用专用的序列化工具清理 Request
130
+ clean_request = self.request_serializer.prepare_for_serialization(request)
131
+
132
+ # 确保序列化后的数据可以被正确反序列化
133
+ try:
134
+ serialized = pickle.dumps(clean_request)
135
+ # 验证序列化数据可以被反序列化
136
+ pickle.loads(serialized)
137
+ except Exception as serialize_error:
138
+ logger.error(f"❌ 请求序列化验证失败 (Module: {self.module_name}): {serialize_error}")
139
+ return False
140
+
141
+ pipe = self._redis.pipeline()
142
+ pipe.zadd(self.queue_name, {key: score})
143
+ pipe.hset(f"{self.queue_name}:data", key, serialized)
144
+ result = await pipe.execute()
145
+
146
+ if result[0] > 0:
147
+ logger.debug(f"✅ 成功入队 (Module: {self.module_name}): {request.url}")
148
+ return result[0] > 0
149
+ except Exception as e:
150
+ error_handler.handle_error(
151
+ e,
152
+ context=f"放入队列失败 (Module: {self.module_name})",
153
+ raise_error=False
154
+ )
155
+ return False
156
+
157
+ async def get(self, timeout: float = 5.0) -> Optional[Request]:
158
+ """
159
+ 获取请求(带超时)
160
+ :param timeout: 最大等待时间(秒),避免无限轮询
161
+ """
162
+ try:
163
+ await self._ensure_connection()
164
+ start_time = asyncio.get_event_loop().time()
165
+
166
+ while True:
167
+ # 尝试获取任务
168
+ result = await self._redis.zpopmin(self.queue_name, count=1)
169
+ if result:
170
+ key, score = result[0]
171
+ serialized = await self._redis.hget(f"{self.queue_name}:data", key)
172
+ if not serialized:
173
+ continue
174
+
175
+ # 移动到 processing
176
+ processing_key = f"{key}:{int(time.time())}"
177
+ pipe = self._redis.pipeline()
178
+ pipe.zadd(self.processing_queue, {processing_key: time.time() + self.timeout})
179
+ pipe.hset(f"{self.processing_queue}:data", processing_key, serialized)
180
+ pipe.hdel(f"{self.queue_name}:data", key)
181
+ await pipe.execute()
182
+
183
+ # 更安全的反序列化方式
184
+ try:
185
+ # 首先尝试标准的 pickle 反序列化
186
+ request = pickle.loads(serialized)
187
+ return request
188
+ except UnicodeDecodeError:
189
+ # 如果出现编码错误,尝试使用 latin1 解码
190
+ request = pickle.loads(serialized, encoding='latin1')
191
+ return request
192
+ except Exception as pickle_error:
193
+ # 如果pickle反序列化失败,记录错误并跳过这个任务
194
+ logger.error(f"❌ 无法反序列化请求数据 (Module: {self.module_name}): {pickle_error}")
195
+ # 从processing队列中移除这个无效的任务
196
+ await self._redis.zrem(self.processing_queue, processing_key)
197
+ await self._redis.hdel(f"{self.processing_queue}:data", processing_key)
198
+ # 继续尝试下一个任务
199
+ continue
200
+
201
+ # 检查是否超时
202
+ if asyncio.get_event_loop().time() - start_time > timeout:
203
+ return None
204
+
205
+ # 短暂等待,避免空轮询
206
+ await asyncio.sleep(0.1)
207
+
208
+ except Exception as e:
209
+ error_handler.handle_error(
210
+ e,
211
+ context=f"获取队列任务失败 (Module: {self.module_name})",
212
+ raise_error=False
213
+ )
214
+ return None
215
+
216
+ async def ack(self, request: Request):
217
+ """确认任务完成"""
218
+ try:
219
+ await self._ensure_connection()
220
+ key = self._get_request_key(request)
221
+ cursor = 0
222
+ while True:
223
+ cursor, keys = await self._redis.zscan(self.processing_queue, cursor, match=f"{key}:*")
224
+ if keys:
225
+ pipe = self._redis.pipeline()
226
+ for k in keys:
227
+ pipe.zrem(self.processing_queue, k)
228
+ pipe.hdel(f"{self.processing_queue}:data", k)
229
+ await pipe.execute()
230
+ if cursor == 0:
231
+ break
232
+ except Exception as e:
233
+ error_handler.handle_error(
234
+ e,
235
+ context=f"确认任务完成失败 (Module: {self.module_name})",
236
+ raise_error=False
237
+ )
238
+
239
+ async def fail(self, request: Request, reason: str = ""):
240
+ """标记任务失败"""
241
+ try:
242
+ await self._ensure_connection()
243
+ key = self._get_request_key(request)
244
+ await self.ack(request)
245
+
246
+ retry_key = f"{self.failed_queue}:retries:{key}"
247
+ retries = await self._redis.incr(retry_key)
248
+ await self._redis.expire(retry_key, 86400)
249
+
250
+ if retries <= self.max_retries:
251
+ await self.put(request, priority=request.priority + 1)
252
+ logger.info(f"🔁 任务重试 [{retries}/{self.max_retries}] (Module: {self.module_name}): {request.url}")
253
+ else:
254
+ failed_data = {
255
+ "url": request.url,
256
+ "reason": reason,
257
+ "retries": retries,
258
+ "failed_at": time.time(),
259
+ "request_pickle": pickle.dumps(request).hex(), # 可选:保存完整请求
260
+ }
261
+ await self._redis.lpush(self.failed_queue, pickle.dumps(failed_data))
262
+ logger.error(f"❌ 任务彻底失败 [{retries}次] (Module: {self.module_name}): {request.url}")
263
+ except Exception as e:
264
+ error_handler.handle_error(
265
+ e,
266
+ context=f"标记任务失败失败 (Module: {self.module_name})",
267
+ raise_error=False
268
+ )
269
+
270
+ def _get_request_key(self, request: Request) -> str:
271
+ """生成请求唯一键"""
272
+ return f"{self.module_name}:url:{hash(request.url) & 0x7FFFFFFF}" # 确保正数
273
+
274
+ async def qsize(self) -> int:
275
+ """获取队列大小"""
276
+ try:
277
+ await self._ensure_connection()
278
+ return await self._redis.zcard(self.queue_name)
279
+ except Exception as e:
280
+ error_handler.handle_error(
281
+ e,
282
+ context=f"获取队列大小失败 (Module: {self.module_name})",
283
+ raise_error=False
284
+ )
285
+ return 0
286
+
287
+ async def close(self):
288
+ """关闭连接"""
289
+ try:
290
+ # 连接池会自动管理连接,这里不需要显式关闭单个连接
291
+ self._redis = None
292
+ self._redis_pool = None
293
+ logger.info(f"✅ Redis 连接已释放 (Module: {self.module_name})")
294
+ except Exception as e:
295
+ error_handler.handle_error(
296
+ e,
297
+ context=f"释放 Redis 连接失败 (Module: {self.module_name})",
298
+ raise_error=False
299
+ )
@@ -1,7 +1,7 @@
1
- #!/usr/bin/python
2
- # -*- coding:UTF-8 -*-
3
- """
4
- # @Time : 2025-05-11 11:08
5
- # @Author : oscar
6
- # @Desc : None
7
- """
1
+ #!/usr/bin/python
2
+ # -*- coding:UTF-8 -*-
3
+ """
4
+ # @Time : 2025-05-11 11:08
5
+ # @Author : oscar
6
+ # @Desc : None
7
+ """