aury-boot 0.0.35__py3-none-any.whl → 0.0.37__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
aury/boot/_version.py CHANGED
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
28
28
  commit_id: COMMIT_ID
29
29
  __commit_id__: COMMIT_ID
30
30
 
31
- __version__ = version = '0.0.35'
32
- __version_tuple__ = version_tuple = (0, 0, 35)
31
+ __version__ = version = '0.0.37'
32
+ __version_tuple__ = version_tuple = (0, 0, 37)
33
33
 
34
34
  __commit_id__ = commit_id = None
@@ -147,13 +147,13 @@ class MQInstanceConfig(MultiInstanceSettings):
147
147
 
148
148
  环境变量格式: MQ__{INSTANCE}__{FIELD}
149
149
  示例:
150
- MQ__DEFAULT__BACKEND=redis
150
+ MQ__DEFAULT__BACKEND=redis_stream
151
151
  MQ__DEFAULT__URL=redis://localhost:6379/4
152
152
  """
153
153
 
154
154
  backend: str = Field(
155
- default="redis",
156
- description="消息队列后端 (redis/rabbitmq)"
155
+ default="redis_stream",
156
+ description="消息队列后端 (redis/redis_stream/rabbitmq)"
157
157
  )
158
158
  url: str | None = Field(
159
159
  default=None,
@@ -530,24 +530,25 @@ class MessageQueueSettings(BaseModel):
530
530
  """消息队列配置。
531
531
 
532
532
  环境变量格式: MQ__{FIELD}
533
- 示例: MQ__BROKER_URL, MQ__DEFAULT_QUEUE, MQ__SERIALIZER
533
+ 示例: MQ__BACKEND, MQ__URL, MQ__BROKER_URL
534
534
 
535
535
  与 Task(任务队列)的区别:
536
536
  - Task: 基于 Dramatiq,用于异步任务处理(API + Worker 模式)
537
537
  - MQ: 通用消息队列,用于服务间通信、事件驱动架构
538
538
 
539
539
  支持的后端:
540
- - Redis: redis://localhost:6379/0
541
- - RabbitMQ: amqp://guest:guest@localhost:5672//
540
+ - redis: redis://localhost:6379/0
541
+ - redis_stream: redis://localhost:6379/0
542
+ - rabbitmq: amqp://guest:guest@localhost:5672//
542
543
  """
543
544
 
544
- enabled: bool = Field(
545
- default=False,
546
- description="是否启用消息队列组件"
545
+ backend: str = Field(
546
+ default="",
547
+ description="消息队列后端 (redis/redis_stream/rabbitmq),空字符串表示不启用"
547
548
  )
548
- broker_url: str | None = Field(
549
+ url: str | None = Field(
549
550
  default=None,
550
- description="消息队列代理 URL"
551
+ description="连接 URL"
551
552
  )
552
553
  default_queue: str = Field(
553
554
  default="default",
@@ -714,7 +715,7 @@ class AlertSettings(BaseModel):
714
715
 
715
716
  # 抑制配置
716
717
  suppress_seconds: int = Field(
717
- default=10,
718
+ default=300,
718
719
  description="告警抑制时间(秒),相同告警在此时间内不重复发送"
719
720
  )
720
721
 
@@ -1001,6 +1002,7 @@ class BaseConfig(BaseSettings):
1001
1002
  # ========== 异步与事件 ==========
1002
1003
  task: TaskSettings = Field(default_factory=TaskSettings)
1003
1004
  event: EventSettings = Field(default_factory=EventSettings)
1005
+ mq: MessageQueueSettings = Field(default_factory=MessageQueueSettings)
1004
1006
 
1005
1007
  # ========== 微服务通信 ==========
1006
1008
  # RPC 客户端配置(调用其他服务)
@@ -1108,10 +1110,18 @@ class BaseConfig(BaseSettings):
1108
1110
  """获取所有消息队列实例配置。
1109
1111
 
1110
1112
  从环境变量解析 MQ__{INSTANCE}__{FIELD} 格式的配置。
1113
+ 如果没有配置多实例,返回从单实例配置转换的 default 实例。
1111
1114
  """
1112
1115
  if self._mqs is None:
1113
1116
  loader = MultiInstanceConfigLoader("MQ", MQInstanceConfig)
1114
1117
  self._mqs = loader.load()
1118
+ if not self._mqs and self.mq.backend:
1119
+ self._mqs = {
1120
+ "default": MQInstanceConfig(
1121
+ backend=self.mq.backend,
1122
+ url=self.mq.url,
1123
+ )
1124
+ }
1115
1125
  return self._mqs
1116
1126
 
1117
1127
  def get_events(self) -> dict[str, EventInstanceConfig]:
@@ -707,8 +707,8 @@ def init(
707
707
  console.print(f" [cyan]uv add \"{deps[0]}\"[/cyan]")
708
708
  console.print()
709
709
  else:
710
- # 非交互模式:默认启用 Admin Console
711
- with_admin_console = True
710
+ # 非交互模式:默认禁用 Admin Console
711
+ with_admin_console = False
712
712
  if package_name == ".":
713
713
  package_name_snake = None
714
714
  elif package_name:
@@ -9,6 +9,7 @@ import sys
9
9
  from typing import TYPE_CHECKING
10
10
 
11
11
  import typer
12
+ import uvicorn
12
13
 
13
14
  if TYPE_CHECKING:
14
15
  from aury.boot.application.app.base import FoundationApp
@@ -416,7 +417,6 @@ def dev(
416
417
  os_module.environ["AURIMYTH_RELOAD"] = "1"
417
418
 
418
419
  # 热重载模式下,直接使用 uvicorn,传递 app 字符串路径
419
- import uvicorn
420
420
  uvicorn.run(
421
421
  app=app_module_path,
422
422
  host=server_host,
@@ -500,19 +500,22 @@ def prod(
500
500
  typer.echo(" 热重载: ❌")
501
501
  typer.echo(" 调试模式: ❌")
502
502
 
503
+ # 获取 app 模块路径(多进程模式需要字符串格式)
504
+ app_module_path = app_path or _detect_app_module()
505
+ typer.echo(f" 应用模块: {app_module_path}")
506
+
503
507
  try:
504
- server = ApplicationServer(
505
- app=app_instance,
508
+ # 多进程模式必须使用字符串路径,否则子进程无法重新加载应用
509
+ uvicorn.run(
510
+ app=app_module_path,
506
511
  host=server_host,
507
512
  port=server_port,
508
513
  workers=server_workers,
509
514
  reload=False,
510
515
  loop="auto",
511
516
  http="auto",
512
- debug=False,
513
517
  access_log=True,
514
518
  )
515
- server.run()
516
519
  except KeyboardInterrupt:
517
520
  typer.echo("\n👋 服务器已停止")
518
521
  except Exception as e:
@@ -169,7 +169,7 @@ class BaseConfig(BaseSettings):
169
169
  )
170
170
  ```
171
171
 
172
- **环境变量命名规则**:`{SECTION}__{FIELD}`
172
+ **环境变量命名规则**:`{{SECTION}}__{{FIELD}}`
173
173
 
174
174
  ```bash
175
175
  # 示例
@@ -1,6 +1,6 @@
1
1
  # 消息队列(MQ)
2
2
 
3
- 支持 `redis` 和 `rabbitmq` 后端的消息队列,用于异步任务解耦。
3
+ 支持 `redis`、`redis_stream` 和 `rabbitmq` 后端的消息队列,用于异步任务解耦。
4
4
 
5
5
  ## 14.1 基本用法
6
6
 
@@ -10,93 +10,143 @@ from aury.boot.infrastructure.mq import MQManager
10
10
  # 获取实例
11
11
  mq = MQManager.get_instance()
12
12
 
13
- # Redis 后端
13
+ # Redis List 后端(简单队列)
14
14
  await mq.initialize(backend="redis", url="redis://localhost:6379/0")
15
15
 
16
+ # Redis Stream 后端(推荐,支持消费者组)
17
+ await mq.initialize(backend="redis_stream", url="redis://localhost:6379/0")
18
+
16
19
  # RabbitMQ 后端
17
20
  await mq.initialize(backend="rabbitmq", url="amqp://guest:guest@localhost:5672/")
18
21
  ```
19
22
 
20
- ## 14.2 生产者
23
+ ## 14.2 后端对比
24
+
25
+ **Redis List (redis)**:
26
+ - 简单的 FIFO 队列(LPUSH/BRPOP)
27
+ - 适合单消费者场景
28
+ - 消息不持久化(除非开启 AOF)
29
+
30
+ **Redis Stream (redis_stream)** ⭐ 推荐:
31
+ - 支持消费者组,多实例可并行消费
32
+ - 消息持久化(配合 AOF)
33
+ - 支持消息确认(ACK)和重试
34
+ - 支持消息回放和历史查询
35
+
36
+ **RabbitMQ (rabbitmq)**:
37
+ - 功能最完整的消息队列
38
+ - 支持多种交换机类型
39
+ - 适合复杂的消息路由场景
40
+
41
+ ## 14.3 生产者
21
42
 
22
43
  ```python
44
+ from aury.boot.infrastructure.mq import MQMessage
45
+
23
46
  # 发送消息
24
- await mq.publish(
47
+ await mq.send(
25
48
  queue="orders",
26
49
  message={{"order_id": "123", "action": "created"}}
27
50
  )
28
51
 
29
- # 批量发送
30
- await mq.publish_batch(
31
- queue="orders",
32
- messages=[
33
- {{"order_id": "1", "action": "created"}},
34
- {{"order_id": "2", "action": "updated"}},
35
- ]
52
+ # 使用 MQMessage 对象(可设置 headers)
53
+ msg = MQMessage(
54
+ body={{"order_id": "123"}},
55
+ headers={{"priority": "high"}}
36
56
  )
57
+ await mq.send("orders", msg)
37
58
  ```
38
59
 
39
- ## 14.3 消费者
60
+ ## 14.4 消费者
40
61
 
41
62
  **文件**: `{package_name}/workers/order_worker.py`
42
63
 
43
64
  ```python
44
- from aury.boot.infrastructure.mq import MQManager
65
+ from aury.boot.infrastructure.mq import MQManager, MQMessage
45
66
  from aury.boot.common.logging import logger
46
67
 
47
68
  mq = MQManager.get_instance()
48
69
 
49
70
 
50
- async def process_order(message: dict):
71
+ async def process_order(message: MQMessage):
51
72
  \"\"\"处理订单消息。\"\"\"
52
- logger.info(f"处理订单: {{message['order_id']}}")
73
+ logger.info(f"处理订单: {{message.body}}")
53
74
  # 业务逻辑...
54
75
 
55
76
 
56
77
  async def start_consumer():
57
78
  \"\"\"启动消费者。\"\"\"
79
+ # consume 会自动处理 ACK/NACK
58
80
  await mq.consume("orders", process_order)
81
+ ```
82
+
83
+ ## 14.5 Redis Stream 特性
59
84
 
85
+ ```python
86
+ from aury.boot.infrastructure.mq.backends.redis_stream import RedisStreamMQ
87
+
88
+ # 初始化时指定消费者组
89
+ mq = MQManager.get_instance()
90
+ await mq.initialize(
91
+ backend="redis_stream",
92
+ url="redis://localhost:6379/0"
93
+ )
94
+
95
+ # 获取底层 RedisStreamMQ 实例使用高级特性
96
+ stream_mq: RedisStreamMQ = mq.backend
60
97
 
61
- # 带确认的消费
62
- async def process_with_ack(message: dict, ack, nack):
63
- try:
64
- await process_order(message)
65
- await ack()
66
- except Exception:
67
- await nack(requeue=True)
98
+ # 读取所有历史消息(用于重放)
99
+ messages = await stream_mq.read_all("orders", count=100)
68
100
 
69
- await mq.consume("orders", process_with_ack, auto_ack=False)
101
+ # 阻塞读取新消息(用于 SSE/实时推送)
102
+ messages = await stream_mq.read_blocking(
103
+ "orders",
104
+ last_id="$", # 从最新消息开始
105
+ count=10,
106
+ block_ms=5000 # 阻塞 5 秒
107
+ )
108
+
109
+ # 裁剪 Stream(保留最新 1000 条)
110
+ await stream_mq.trim("orders", maxlen=1000)
111
+
112
+ # 获取 Stream 信息
113
+ info = await stream_mq.stream_info("orders")
70
114
  ```
71
115
 
72
- ## 14.4 多实例
116
+ ## 14.6 多实例配置
117
+
118
+ 框架支持命名多实例,适合不同业务场景使用不同的 MQ:
73
119
 
74
120
  ```python
75
- # 不同用途的 MQ 实例
121
+ # 代码中使用命名实例
76
122
  orders_mq = MQManager.get_instance("orders")
77
123
  notifications_mq = MQManager.get_instance("notifications")
78
124
 
79
125
  # 分别初始化
80
- await orders_mq.initialize(backend="rabbitmq", url="amqp://localhost:5672/orders")
81
- await notifications_mq.initialize(backend="redis", url="redis://localhost:6379/5")
126
+ await orders_mq.initialize(backend="redis_stream", url="redis://localhost:6379/1")
127
+ await notifications_mq.initialize(backend="redis", url="redis://localhost:6379/2")
82
128
  ```
83
129
 
84
- ## 14.5 环境变量
130
+ **环境变量配置**(自动初始化):
85
131
 
86
132
  ```bash
87
- # 默认实例
88
- MQ__BACKEND=redis
133
+ # 单实例配置
134
+ MQ__BACKEND=redis_stream
89
135
  MQ__URL=redis://localhost:6379/0
90
136
 
91
- # 多实例(格式:MQ__{{INSTANCE}}__{{FIELD}})
92
- MQ__DEFAULT__BACKEND=redis
137
+ # 多实例配置(格式:MQ__{{INSTANCE}}__{{FIELD}})
138
+ MQ__DEFAULT__BACKEND=redis_stream
93
139
  MQ__DEFAULT__URL=redis://localhost:6379/4
94
140
  MQ__ORDERS__BACKEND=rabbitmq
95
141
  MQ__ORDERS__URL=amqp://guest:guest@localhost:5672/
96
- MQ__ORDERS__PREFETCH_COUNT=10
97
142
  ```
98
143
 
99
- ## 14.6 与异步任务(Dramatiq)的区别
144
+ ## 14.7 与异步任务(Dramatiq)的区别
145
+
146
+ - **MQ**:轻量级消息传递,适合简单的生产者-消费者模式、实时通知、多实例消费
147
+ - **Dramatiq(TaskManager)**:功能更丰富,支持重试、延迟、优先级、Actor 模式
100
148
 
101
- - **MQ**:轻量级消息传递,适合简单的生产者-消费者模式
102
- - **Dramatiq(TaskManager)**:功能更丰富,支持重试、延迟、优先级等
149
+ 选择建议:
150
+ - 简单的异步解耦 → MQ (redis_stream)
151
+ - 需要重试/延迟/优先级 → Dramatiq
152
+ - 多服务实例并行消费 → MQ (redis_stream) 或 RabbitMQ
@@ -19,19 +19,27 @@
19
19
  # =============================================================================
20
20
  # 消息队列配置 (MQ__)
21
21
  # =============================================================================
22
+ # 支持后端: redis | redis_stream (推荐) | rabbitmq
23
+ #
22
24
  # 单实例配置:
23
- # MQ__ENABLED=false
24
- # MQ__BROKER_URL=redis://localhost:6379/4
25
-
25
+ # MQ__BACKEND=redis_stream
26
+ # MQ__URL=redis://localhost:6379/4
27
+ #
26
28
  # 多实例配置 (格式: MQ__{{INSTANCE}}__{{FIELD}}):
27
- # MQ__DEFAULT__BACKEND=redis
29
+ # MQ__DEFAULT__BACKEND=redis_stream
28
30
  # MQ__DEFAULT__URL=redis://localhost:6379/4
29
- # MQ__DEFAULT__MAX_CONNECTIONS=10
30
31
  #
31
- # RabbitMQ 后端:
32
+ # Redis List 后端 (简单 FIFO 队列):
33
+ # MQ__SIMPLE__BACKEND=redis
34
+ # MQ__SIMPLE__URL=redis://localhost:6379/4
35
+ #
36
+ # Redis Stream 后端 (推荐,支持消费者组/多实例消费):
37
+ # MQ__TASKS__BACKEND=redis_stream
38
+ # MQ__TASKS__URL=redis://localhost:6379/4
39
+ #
40
+ # RabbitMQ 后端 (复杂消息路由):
32
41
  # MQ__ORDERS__BACKEND=rabbitmq
33
42
  # MQ__ORDERS__URL=amqp://guest:guest@localhost:5672/orders
34
- # MQ__ORDERS__PREFETCH_COUNT=10
35
43
 
36
44
  # =============================================================================
37
45
  # 事件总线配置 (EVENT__)
@@ -107,14 +107,14 @@ class AlertRule:
107
107
  # 检查路径
108
108
  if self._path_regex:
109
109
  endpoint = event.metadata.get("endpoint", "")
110
- if not self._path_regex.match(endpoint):
110
+ if not self._path_regex.fullmatch(endpoint):
111
111
  return False
112
112
 
113
113
  # 检查排除路径
114
114
  if self._exclude_regexes:
115
115
  endpoint = event.metadata.get("endpoint", "")
116
116
  for exclude_regex in self._exclude_regexes:
117
- if exclude_regex.match(endpoint):
117
+ if exclude_regex.fullmatch(endpoint):
118
118
  return False # 匹配到排除规则,不触发告警
119
119
 
120
120
  # 检查阈值(对于 slow_* 类型)
@@ -181,7 +181,23 @@ class AlertingSpanProcessor:
181
181
  or name
182
182
  )
183
183
 
184
- return any(regex.match(path) for regex in self._exclude_regexes)
184
+ # 检查所有可能的路径来源
185
+ paths_to_check = [path]
186
+
187
+ # 也检查 span name 中的路径(可能包含 HTTP 方法和后缀)
188
+ # 例如 "GET /api/v1/spaces/{space_id}/subscribe http receive"
189
+ if name and name != path:
190
+ # 尝试提取 span name 中的路径部分
191
+ parts = name.split()
192
+ for part in parts:
193
+ if part.startswith("/"):
194
+ paths_to_check.append(part)
195
+
196
+ for p in paths_to_check:
197
+ if any(regex.fullmatch(p) for regex in self._exclude_regexes):
198
+ return True
199
+
200
+ return False
185
201
 
186
202
  def _emit_slow_alert(
187
203
  self,
@@ -2,8 +2,10 @@
2
2
 
3
3
  from .rabbitmq import RabbitMQ
4
4
  from .redis import RedisMQ
5
+ from .redis_stream import RedisStreamMQ
5
6
 
6
7
  __all__ = [
7
8
  "RabbitMQ",
8
9
  "RedisMQ",
10
+ "RedisStreamMQ",
9
11
  ]
@@ -0,0 +1,428 @@
1
+ """Redis Stream 消息队列后端。
2
+
3
+ 使用 Redis Stream 实现支持消费者组的消息队列。
4
+ 相比 Redis List,提供更强的持久化和消费保证。
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ import asyncio
10
+ from collections.abc import Callable
11
+ import json
12
+ from typing import TYPE_CHECKING, Any
13
+
14
+ from aury.boot.common.logging import logger
15
+
16
+ from ..base import IMQ, MQMessage
17
+
18
+ if TYPE_CHECKING:
19
+ from aury.boot.infrastructure.clients.redis import RedisClient
20
+
21
+
22
+ class RedisStreamMQ(IMQ):
23
+ """Redis Stream 消息队列实现。
24
+
25
+ 使用 Redis Stream (XADD/XREADGROUP/XACK) 实现可靠的消息队列。
26
+
27
+ 特性:
28
+ - 消费者组支持多实例消费
29
+ - 消息持久化 (配合 AOF)
30
+ - 消息确认机制
31
+ - 支持消息重放
32
+ """
33
+
34
+ def __init__(
35
+ self,
36
+ url: str | None = None,
37
+ *,
38
+ redis_client: RedisClient | None = None,
39
+ prefix: str = "stream:",
40
+ consumer_group: str = "default",
41
+ consumer_name: str | None = None,
42
+ max_len: int | None = None,
43
+ ) -> None:
44
+ """初始化 Redis Stream 消息队列。
45
+
46
+ Args:
47
+ url: Redis 连接 URL(当 redis_client 为 None 时必须提供)
48
+ redis_client: RedisClient 实例(可选,优先使用)
49
+ prefix: 队列名称前缀
50
+ consumer_group: 消费者组名称
51
+ consumer_name: 消费者名称(默认自动生成)
52
+ max_len: Stream 最大长度(可选,用于自动裁剪)
53
+
54
+ Raises:
55
+ ValueError: 当 url 和 redis_client 都为 None 时
56
+ """
57
+ if redis_client is None and url is None:
58
+ raise ValueError("Redis Stream 消息队列需要提供 url 或 redis_client 参数")
59
+
60
+ self._url = url
61
+ self._client = redis_client
62
+ self._prefix = prefix
63
+ self._consumer_group = consumer_group
64
+ self._consumer_name = consumer_name or f"consumer-{id(self)}"
65
+ self._max_len = max_len
66
+ self._consuming = False
67
+ self._owns_client = False
68
+
69
+ async def _ensure_client(self) -> None:
70
+ """确保 Redis 客户端已初始化。"""
71
+ if self._client is None and self._url:
72
+ from aury.boot.infrastructure.clients.redis import RedisClient
73
+ # 创建独立实例(不使用 get_instance 避免和全局实例冲突)
74
+ self._client = RedisClient(name=f"mq-{id(self)}")
75
+ self._client.configure(url=self._url)
76
+ await self._client.initialize()
77
+ self._owns_client = True
78
+
79
+ def _stream_key(self, queue: str) -> str:
80
+ """获取 Stream 的 Redis key。"""
81
+ return f"{self._prefix}{queue}"
82
+
83
+ async def _ensure_group(self, queue: str) -> None:
84
+ """确保消费者组存在。"""
85
+ stream_key = self._stream_key(queue)
86
+ try:
87
+ await self._client.connection.xgroup_create(
88
+ stream_key,
89
+ self._consumer_group,
90
+ id="0",
91
+ mkstream=True,
92
+ )
93
+ logger.debug(f"创建消费者组: {self._consumer_group} on {stream_key}")
94
+ except Exception as e:
95
+ # 组已存在,忽略
96
+ if "BUSYGROUP" not in str(e):
97
+ raise
98
+
99
+ async def send(self, queue: str, message: MQMessage) -> str:
100
+ """发送消息到 Stream。
101
+
102
+ 使用 XADD 命令,支持 MAXLEN 自动裁剪。
103
+ """
104
+ await self._ensure_client()
105
+ message.queue = queue
106
+
107
+ # 序列化消息
108
+ data = {
109
+ "payload": json.dumps(message.to_dict()),
110
+ }
111
+
112
+ stream_key = self._stream_key(queue)
113
+
114
+ # XADD with optional MAXLEN
115
+ if self._max_len:
116
+ msg_id = await self._client.connection.xadd(
117
+ stream_key,
118
+ data,
119
+ maxlen=self._max_len,
120
+ approximate=True, # ~ 近似裁剪,性能更好
121
+ )
122
+ else:
123
+ msg_id = await self._client.connection.xadd(stream_key, data)
124
+
125
+ logger.debug(f"发送消息到 Stream: {stream_key}, id={msg_id}")
126
+ return message.id
127
+
128
+ async def receive(
129
+ self,
130
+ queue: str,
131
+ timeout: float | None = None,
132
+ ) -> MQMessage | None:
133
+ """从 Stream 接收消息(不使用消费者组)。
134
+
135
+ 用于简单场景,直接 XREAD 读取最新消息。
136
+ """
137
+ await self._ensure_client()
138
+
139
+ stream_key = self._stream_key(queue)
140
+ timeout_ms = int(timeout * 1000) if timeout else 0
141
+
142
+ result = await self._client.connection.xread(
143
+ streams={stream_key: "$"},
144
+ count=1,
145
+ block=timeout_ms,
146
+ )
147
+
148
+ if not result:
149
+ return None
150
+
151
+ # 解析结果: [[stream_key, [(msg_id, data)]]]
152
+ for stream, messages in result:
153
+ for msg_id, data in messages:
154
+ try:
155
+ payload = data.get(b"payload") or data.get("payload")
156
+ if isinstance(payload, bytes):
157
+ payload = payload.decode()
158
+ msg_dict = json.loads(payload)
159
+ message = MQMessage.from_dict(msg_dict)
160
+ message._stream_id = msg_id # 保存 stream ID 用于 ACK
161
+ return message
162
+ except (json.JSONDecodeError, KeyError) as e:
163
+ logger.error(f"解析消息失败: {e}")
164
+ return None
165
+
166
+ return None
167
+
168
+ async def receive_group(
169
+ self,
170
+ queue: str,
171
+ timeout: float | None = None,
172
+ ) -> MQMessage | None:
173
+ """从 Stream 接收消息(使用消费者组)。
174
+
175
+ 使用 XREADGROUP 从消费者组读取,支持多实例消费。
176
+ """
177
+ await self._ensure_client()
178
+ await self._ensure_group(queue)
179
+
180
+ stream_key = self._stream_key(queue)
181
+ timeout_ms = int(timeout * 1000) if timeout else 0
182
+
183
+ result = await self._client.connection.xreadgroup(
184
+ groupname=self._consumer_group,
185
+ consumername=self._consumer_name,
186
+ streams={stream_key: ">"}, # > 表示只读取新消息
187
+ count=1,
188
+ block=timeout_ms,
189
+ )
190
+
191
+ if not result:
192
+ return None
193
+
194
+ # 解析结果
195
+ for stream, messages in result:
196
+ for msg_id, data in messages:
197
+ try:
198
+ payload = data.get(b"payload") or data.get("payload")
199
+ if isinstance(payload, bytes):
200
+ payload = payload.decode()
201
+ msg_dict = json.loads(payload)
202
+ message = MQMessage.from_dict(msg_dict)
203
+ message._stream_id = msg_id # 保存用于 ACK
204
+ message.queue = queue
205
+ return message
206
+ except (json.JSONDecodeError, KeyError) as e:
207
+ logger.error(f"解析消息失败: {e}")
208
+ # ACK 损坏的消息,防止阻塞
209
+ await self._client.connection.xack(
210
+ stream_key, self._consumer_group, msg_id
211
+ )
212
+ return None
213
+
214
+ return None
215
+
216
+ async def ack(self, message: MQMessage) -> None:
217
+ """确认消息已处理。"""
218
+ if not message.queue:
219
+ return
220
+
221
+ stream_id = getattr(message, "_stream_id", None)
222
+ if stream_id:
223
+ stream_key = self._stream_key(message.queue)
224
+ await self._client.connection.xack(
225
+ stream_key, self._consumer_group, stream_id
226
+ )
227
+ logger.debug(f"ACK 消息: {stream_id}")
228
+
229
+ async def nack(self, message: MQMessage, requeue: bool = True) -> None:
230
+ """拒绝消息。
231
+
232
+ Redis Stream 没有原生 NACK,通过重新发送实现。
233
+ """
234
+ if not message.queue:
235
+ return
236
+
237
+ stream_id = getattr(message, "_stream_id", None)
238
+ if stream_id:
239
+ stream_key = self._stream_key(message.queue)
240
+ # 先 ACK 原消息
241
+ await self._client.connection.xack(
242
+ stream_key, self._consumer_group, stream_id
243
+ )
244
+
245
+ if requeue and message.retry_count < message.max_retries:
246
+ # 重新发送
247
+ message.retry_count += 1
248
+ await self.send(message.queue, message)
249
+ logger.debug(f"NACK 重新入队: {message.id}, retry={message.retry_count}")
250
+
251
+ async def consume(
252
+ self,
253
+ queue: str,
254
+ handler: Callable[[MQMessage], Any],
255
+ *,
256
+ prefetch: int = 1,
257
+ ) -> None:
258
+ """消费队列消息(使用消费者组)。"""
259
+ self._consuming = True
260
+ await self._ensure_group(queue)
261
+ logger.info(f"开始消费 Stream: {queue}, group={self._consumer_group}")
262
+
263
+ while self._consuming:
264
+ try:
265
+ message = await self.receive_group(queue, timeout=1.0)
266
+ if message is None:
267
+ continue
268
+
269
+ try:
270
+ result = handler(message)
271
+ if asyncio.iscoroutine(result):
272
+ await result
273
+ await self.ack(message)
274
+ except Exception as e:
275
+ logger.error(f"处理消息失败: {e}")
276
+ await self.nack(message, requeue=True)
277
+
278
+ except Exception as e:
279
+ logger.error(f"消费消息异常: {e}")
280
+ await asyncio.sleep(1)
281
+
282
+ async def read_all(
283
+ self,
284
+ queue: str,
285
+ start: str = "-",
286
+ end: str = "+",
287
+ count: int | None = None,
288
+ ) -> list[MQMessage]:
289
+ """读取 Stream 中的所有消息(用于 compaction)。
290
+
291
+ 使用 XRANGE 读取指定范围的消息。
292
+
293
+ Args:
294
+ queue: 队列名称
295
+ start: 起始 ID("-" 表示最早)
296
+ end: 结束 ID("+" 表示最新)
297
+ count: 最大数量
298
+
299
+ Returns:
300
+ 消息列表
301
+ """
302
+ await self._ensure_client()
303
+ stream_key = self._stream_key(queue)
304
+
305
+ result = await self._client.connection.xrange(
306
+ stream_key,
307
+ min=start,
308
+ max=end,
309
+ count=count,
310
+ )
311
+
312
+ messages = []
313
+ for msg_id, data in result:
314
+ try:
315
+ payload = data.get(b"payload") or data.get("payload")
316
+ if isinstance(payload, bytes):
317
+ payload = payload.decode()
318
+ msg_dict = json.loads(payload)
319
+ message = MQMessage.from_dict(msg_dict)
320
+ message._stream_id = msg_id
321
+ messages.append(message)
322
+ except (json.JSONDecodeError, KeyError) as e:
323
+ logger.warning(f"跳过损坏的消息 {msg_id}: {e}")
324
+
325
+ return messages
326
+
327
+ async def read_blocking(
328
+ self,
329
+ queue: str,
330
+ last_id: str = "$",
331
+ count: int = 10,
332
+ block_ms: int = 100,
333
+ ) -> list[MQMessage]:
334
+ """阻塞读取 Stream 中的新消息(使用 XREAD BLOCK)。
335
+
336
+ Args:
337
+ queue: 队列名称
338
+ last_id: 起始 ID("$" 表示只等待新消息,"0" 表示从开头)
339
+ count: 最大读取数量
340
+ block_ms: 阻塞等待超时(毫秒),0 表示不阻塞
341
+
342
+ Returns:
343
+ 消息列表
344
+ """
345
+ await self._ensure_client()
346
+ stream_key = self._stream_key(queue)
347
+
348
+ result = await self._client.connection.xread(
349
+ streams={stream_key: last_id},
350
+ count=count,
351
+ block=block_ms,
352
+ )
353
+
354
+ if not result:
355
+ return []
356
+
357
+ messages = []
358
+ for stream, stream_messages in result:
359
+ for msg_id, data in stream_messages:
360
+ try:
361
+ payload = data.get(b"payload") or data.get("payload")
362
+ if isinstance(payload, bytes):
363
+ payload = payload.decode()
364
+ msg_dict = json.loads(payload)
365
+ message = MQMessage.from_dict(msg_dict)
366
+ message._stream_id = msg_id
367
+ messages.append(message)
368
+ except (json.JSONDecodeError, KeyError) as e:
369
+ logger.warning(f"跳过损坏的消息 {msg_id}: {e}")
370
+
371
+ return messages
372
+
373
+ async def trim(
374
+ self,
375
+ queue: str,
376
+ *,
377
+ maxlen: int | None = None,
378
+ minid: str | None = None,
379
+ ) -> int:
380
+ """裁剪 Stream。
381
+
382
+ Args:
383
+ queue: 队列名称
384
+ maxlen: 保留的最大长度
385
+ minid: 保留此 ID 之后的消息
386
+
387
+ Returns:
388
+ 删除的消息数量
389
+ """
390
+ await self._ensure_client()
391
+ stream_key = self._stream_key(queue)
392
+
393
+ if minid:
394
+ return await self._client.connection.xtrim(
395
+ stream_key, minid=minid, approximate=False
396
+ )
397
+ elif maxlen is not None:
398
+ # maxlen=0 也应该生效(清空 stream)
399
+ return await self._client.connection.xtrim(
400
+ stream_key, maxlen=maxlen, approximate=False
401
+ )
402
+ return 0
403
+
404
+ async def delete_stream(self, queue: str) -> bool:
405
+ """删除整个 Stream。"""
406
+ await self._ensure_client()
407
+ stream_key = self._stream_key(queue)
408
+ return await self._client.connection.delete(stream_key) > 0
409
+
410
+ async def stream_info(self, queue: str) -> dict[str, Any]:
411
+ """获取 Stream 信息。"""
412
+ await self._ensure_client()
413
+ stream_key = self._stream_key(queue)
414
+ try:
415
+ return await self._client.connection.xinfo_stream(stream_key)
416
+ except Exception:
417
+ return {}
418
+
419
+ async def close(self) -> None:
420
+ """关闭连接。"""
421
+ self._consuming = False
422
+ if self._owns_client and self._client:
423
+ await self._client.close()
424
+ self._client = None
425
+ logger.debug("Redis Stream 消息队列已关闭")
426
+
427
+
428
+ __all__ = ["RedisStreamMQ"]
@@ -18,6 +18,7 @@ class MQBackend(Enum):
18
18
  """消息队列后端类型。"""
19
19
 
20
20
  REDIS = "redis"
21
+ REDIS_STREAM = "redis_stream"
21
22
  RABBITMQ = "rabbitmq"
22
23
 
23
24
 
@@ -12,6 +12,7 @@ from aury.boot.common.logging import logger
12
12
 
13
13
  from .backends.rabbitmq import RabbitMQ
14
14
  from .backends.redis import RedisMQ
15
+ from .backends.redis_stream import RedisStreamMQ
15
16
  from .base import IMQ, MQBackend, MQMessage
16
17
 
17
18
  if TYPE_CHECKING:
@@ -127,6 +128,8 @@ class MQManager:
127
128
  # 根据后端类型创建实例,参数校验由后端自己处理
128
129
  if backend == MQBackend.REDIS:
129
130
  self._backend = RedisMQ(url=url, redis_client=redis_client, prefix=prefix)
131
+ elif backend == MQBackend.REDIS_STREAM:
132
+ self._backend = RedisStreamMQ(url=url, redis_client=redis_client, prefix=prefix)
130
133
  elif backend == MQBackend.RABBITMQ:
131
134
  self._backend = RabbitMQ(url=url)
132
135
  else:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: aury-boot
3
- Version: 0.0.35
3
+ Version: 0.0.37
4
4
  Summary: Aury Boot - 基于 FastAPI 生态的企业级 API 开发框架
5
5
  Requires-Python: >=3.13
6
6
  Requires-Dist: alembic>=1.17.2
@@ -1,5 +1,5 @@
1
1
  aury/boot/__init__.py,sha256=pCno-EInnpIBa1OtxNYF-JWf9j95Cd2h6vmu0xqa_-4,1791
2
- aury/boot/_version.py,sha256=w77E3DIE0tp22UaCyYn9461JfWMpEPhsv91t-4n4Bjw,706
2
+ aury/boot/_version.py,sha256=BV1Ma7idHU0hCvzfaqU-vGA4N-4x9mQtKafasKpzadA,706
3
3
  aury/boot/application/__init__.py,sha256=I2KqNVdYg2q5nlOXr0TtFGyHmhj4oWdaR6ZB73Mwg7Y,3041
4
4
  aury/boot/application/adapter/__init__.py,sha256=e1bcSb1bxUMfofTwiCuHBZJk5-STkMCWPF2EJXHQ7UU,3976
5
5
  aury/boot/application/adapter/base.py,sha256=Ar_66fiHPDEmV-1DKnqXKwc53p3pozG31bgTJTEUriY,15763
@@ -14,7 +14,7 @@ aury/boot/application/app/middlewares.py,sha256=BXe2H14FHzJUVpQM6DZUm-zfZRXSXIi1
14
14
  aury/boot/application/app/startup.py,sha256=DHKt3C2G7V5XfFr1SQMl14tNzcuDd9MqUVAxi274HDQ,7873
15
15
  aury/boot/application/config/__init__.py,sha256=Dd-myRSBCM18DXXsi863h0cJG5VFrI10xMRtjnvelGo,1894
16
16
  aury/boot/application/config/multi_instance.py,sha256=RXSp-xP8-bKMDEhq3SeL7T3lS8-vpRlvBEVBuZVjVK4,6475
17
- aury/boot/application/config/settings.py,sha256=XGPoA8qFYxmnStmsboCBRHn1OAuniXCWzH7jAtrq4ho,37517
17
+ aury/boot/application/config/settings.py,sha256=JZuLVKH13cuBdlHdtrt7ZZ4d7KD8as5DWBv0d9enHDk,38026
18
18
  aury/boot/application/constants/__init__.py,sha256=DCXs13_VVaQWHqO-qpJoZwRd7HIexiirtw_nu8msTXE,340
19
19
  aury/boot/application/constants/components.py,sha256=I4SlsF2DpSzMiLsi1wVrEmdHn4yV5J2h3ikMQqufPmM,1120
20
20
  aury/boot/application/constants/scheduler.py,sha256=S77FBIvHlyruvlabRWZJ2J1YAs2xWXPQI2yuGdGUDNA,471
@@ -47,7 +47,7 @@ aury/boot/commands/config.py,sha256=gPkG_jSWrXidjpyVdzABH7uRhoCgX5yrOcdKabtX5wY,
47
47
  aury/boot/commands/docker.py,sha256=7mKorZCPZgxH1XFslzo6W-uzpe61hGXz86JKOhOeBlo,9006
48
48
  aury/boot/commands/docs.py,sha256=Hz1W-2TW8DzaPxARqEF4UncPhGMI9h97jJ962dlox3U,14327
49
49
  aury/boot/commands/generate.py,sha256=WZieSXuofxJOC7NBiVGpBigB9NZ4GMcF2F1ReTNun1I,44420
50
- aury/boot/commands/init.py,sha256=W_eCL3wydWaMSLqTpadREDnzC0w-LGgNnj3IBjuQAfA,32348
50
+ aury/boot/commands/init.py,sha256=6reBpZ5jS4O9QTfXHKt4MCXPn3WcubjUfOtB5tKdy0s,32349
51
51
  aury/boot/commands/pkg.py,sha256=bw0QPptKscNgQ4I1SfSehTio9Q5KrvxgvkYx4tbZ7Vs,14495
52
52
  aury/boot/commands/scheduler.py,sha256=XO3Gq7PqNxXNz5Gw0xNUHa_bEnAKZ9AkzLc062QJ3j8,3669
53
53
  aury/boot/commands/worker.py,sha256=OEvfDiiM_pV3Mj73HKhSm1RNqFPuS125iNM0qNCTHFY,4316
@@ -55,13 +55,13 @@ aury/boot/commands/migrate/__init__.py,sha256=W9OhkX8ILdolySofgdP2oYoJGG9loQd5Fe
55
55
  aury/boot/commands/migrate/app.py,sha256=phCMKW6cuFYW2wr6PSMSCq0K2uUCiYo3UiFd0_UvA_o,1327
56
56
  aury/boot/commands/migrate/commands.py,sha256=892htS_pTtpejLGqRP8bc3xXJPG92WwAejHlY74oI3o,9950
57
57
  aury/boot/commands/server/__init__.py,sha256=aP3bPNGn6wT8dHa_OmKw1Dexnxuvf0BhrGA6pEUcsVM,319
58
- aury/boot/commands/server/app.py,sha256=-A52dLgerab98IM50a-_ptFb0xlMvbdbhYjqoJIIIpU,15795
58
+ aury/boot/commands/server/app.py,sha256=9QQs7PHOw-CBxm9-soccB5v-mElO3G4-WZDWJQebpp0,16010
59
59
  aury/boot/commands/templates/generate/api.py.tpl,sha256=xTbk9uzn5IMtJ-SPMadjmOUNHoM3WoE6g-TIEsGHFUA,3153
60
60
  aury/boot/commands/templates/generate/model.py.tpl,sha256=knFwMyGZ7wMpzH4_bQD_V1hFTvmCb2H04G8p3s2xvyA,312
61
61
  aury/boot/commands/templates/generate/repository.py.tpl,sha256=Uj9jNEI9Zn8W061FGFlRaIfAy9IhdassYH6noEjG0z0,662
62
62
  aury/boot/commands/templates/generate/schema.py.tpl,sha256=HIaY5B0UG_S188nQLrZDEJ0q73WPdb7BmCdc0tseZA4,545
63
63
  aury/boot/commands/templates/generate/service.py.tpl,sha256=2hwQ8e4a5d_bIMx_jGDobdmKPMFLBlfQrQVQH4Ym5k4,1842
64
- aury/boot/commands/templates/project/AGENTS.md.tpl,sha256=sp5qyzU-SGhgQCobpMW4EXRzpGsEsVdmJvspnKAP4AQ,10059
64
+ aury/boot/commands/templates/project/AGENTS.md.tpl,sha256=KdCqZJeI6cRwjTxX0B2u9fcGQKUi2fJrIjTJBkm3c4U,10063
65
65
  aury/boot/commands/templates/project/README.md.tpl,sha256=oCeBiukk6Pa3hrCKybkfM2sIRHsPZ15nlwuFTUSFDwY,2459
66
66
  aury/boot/commands/templates/project/admin_console_init.py.tpl,sha256=K81L14thyEhRA8lFCQJVZL_NU22-sBz0xS68MJPeoCo,1541
67
67
  aury/boot/commands/templates/project/alert_rules.example.yaml.tpl,sha256=QZH6SC5TcUhgX_2JRXk0k0g26wJf9xNwsdquiEIgg-I,2492
@@ -83,7 +83,7 @@ aury/boot/commands/templates/project/aury_docs/10-storage.md.tpl,sha256=mhe0j0S5
83
83
  aury/boot/commands/templates/project/aury_docs/11-logging.md.tpl,sha256=bwxFCGQsO9cTEbwqJF1xcjsZKP82HRWhIMRUS0c9_ZI,2435
84
84
  aury/boot/commands/templates/project/aury_docs/12-admin.md.tpl,sha256=6z3mN54qP2jtpTFOJBLVexvEv0ZHXYKjncvpZG4yOdw,1883
85
85
  aury/boot/commands/templates/project/aury_docs/13-channel.md.tpl,sha256=aGpf2phQBMRs6Uh1DfjNl06pC_niea91Sm8sTq_NFec,4443
86
- aury/boot/commands/templates/project/aury_docs/14-mq.md.tpl,sha256=4bxLQBbCi0Fue0VQWOPt6acZ5P00BoLkCoLPQe_8k4U,2396
86
+ aury/boot/commands/templates/project/aury_docs/14-mq.md.tpl,sha256=irrKal6y8pPAjifntnOLfRzzllvBQinKMjIjKd-_ANc,4016
87
87
  aury/boot/commands/templates/project/aury_docs/15-events.md.tpl,sha256=a4wQRgVPuYUGTGmw_lX1HJH_yFTbD30mBz7Arc4zgfs,3361
88
88
  aury/boot/commands/templates/project/aury_docs/16-adapter.md.tpl,sha256=pkmJkZw2Ca6_uYk2jZvAb8DozjBa2tWq_t3gtq1lFSk,11456
89
89
  aury/boot/commands/templates/project/aury_docs/17-alerting.md.tpl,sha256=2MosApSAuGerBw7SOO-ihk4NTp2qEkgOUyu6pS2m0UY,5709
@@ -93,7 +93,7 @@ aury/boot/commands/templates/project/env_templates/admin.tpl,sha256=wWt3iybOpBHt
93
93
  aury/boot/commands/templates/project/env_templates/cache.tpl,sha256=_sK-p_FECj4mVvggNvgb4Wu0yGii0Ocz560syG7DU2c,498
94
94
  aury/boot/commands/templates/project/env_templates/database.tpl,sha256=2lWzTKt4X0SpeBBCkrDV90Di4EfoAuqYzhVsh74vTUI,907
95
95
  aury/boot/commands/templates/project/env_templates/log.tpl,sha256=x5rkrEFJISH0gaCcr-wTCbDYtyFnlLNJpY789fqjZgc,754
96
- aury/boot/commands/templates/project/env_templates/messaging.tpl,sha256=SzPRKwN0wO5e1kpjkSwpPJfVmiUDzZkK4Qm-qNsCvVE,2178
96
+ aury/boot/commands/templates/project/env_templates/messaging.tpl,sha256=AgfsXTRnvDySFERoCVop89jsC_h8hzj1sPeq5MczSXM,2462
97
97
  aury/boot/commands/templates/project/env_templates/monitoring.tpl,sha256=Zq0xQzDrCRtbeLCQB3pkEE2p8FFED6IjQo4TqMyd_P8,2584
98
98
  aury/boot/commands/templates/project/env_templates/rpc.tpl,sha256=FhweCFakawGLSs01a_BkmZo11UhWax2-VCBudHj68WA,1163
99
99
  aury/boot/commands/templates/project/env_templates/scheduler.tpl,sha256=c8Grcs1rgBB58RHlxqmDMPHQl8BnbcqNW473ctmsojU,752
@@ -173,7 +173,7 @@ aury/boot/infrastructure/monitoring/alerting/__init__.py,sha256=UvUsMhSZeGJOjZy0
173
173
  aury/boot/infrastructure/monitoring/alerting/aggregator.py,sha256=fiI-lBSqWxXv1eVPfaDNjcigX-81w41fcmhD_vN_XSs,5805
174
174
  aury/boot/infrastructure/monitoring/alerting/events.py,sha256=zJvTevQ-9JflIDyYVo1BRzOVyAGhdgEfRlMsD0NcBgM,4056
175
175
  aury/boot/infrastructure/monitoring/alerting/manager.py,sha256=vdWox9Pnjl_0IIE6w-Ne9R17IUrqtF9CPhZHwZvke6E,16044
176
- aury/boot/infrastructure/monitoring/alerting/rules.py,sha256=XcXJXWVrPpdZKKz63BiVWmwkKitIaNQWBfJATrSzG1M,6116
176
+ aury/boot/infrastructure/monitoring/alerting/rules.py,sha256=FdyGOolQJF31fN_9mqRGi9i_x2JqtoHEOkNOcPyO07o,6124
177
177
  aury/boot/infrastructure/monitoring/alerting/notifiers/__init__.py,sha256=dsfxThPHO_Ofb3Wo_dYlL8HvP_N63pb_S_UXm_qSxF8,321
178
178
  aury/boot/infrastructure/monitoring/alerting/notifiers/base.py,sha256=_RXZMzWX-YeTG0Up1U8CwK8ADfX34dd0Sh56ugfqOWM,1462
179
179
  aury/boot/infrastructure/monitoring/alerting/notifiers/feishu.py,sha256=JAMJiCNRYoDeJrYn29ew_ZVXDGq8OLgiFApRWd4iPY0,7134
@@ -182,15 +182,16 @@ aury/boot/infrastructure/monitoring/health/__init__.py,sha256=nqwFFXl6J9yTfQa1JL
182
182
  aury/boot/infrastructure/monitoring/tracing/__init__.py,sha256=YizkpnhY-bcUUcd8YaDzUsluMflhNOH1dAKdVtkW05U,1287
183
183
  aury/boot/infrastructure/monitoring/tracing/context.py,sha256=s_k2MzNl4LDDpei9xUP6TFW5BwZneoQg44RPaw95jac,978
184
184
  aury/boot/infrastructure/monitoring/tracing/logging.py,sha256=gzuKa1ZiyY4z06fHNTbjgZasS6mLftSEaZQQ-Z6J_RE,2041
185
- aury/boot/infrastructure/monitoring/tracing/processor.py,sha256=qc37YmS8rslpwqAYHrBDzVvNWmXRIFEwpld34NMmByk,12640
185
+ aury/boot/infrastructure/monitoring/tracing/processor.py,sha256=36hoiyQ25sk55k7D4vDAKXt7l9d4wCNkZMTpwlPwg_Y,13224
186
186
  aury/boot/infrastructure/monitoring/tracing/provider.py,sha256=AnPHUDHnfrCB48WHjp9vLBhCh9BpyfWb3DHGRh6Din4,11553
187
187
  aury/boot/infrastructure/monitoring/tracing/tracing.py,sha256=BeWL-FYtlQ05r05wGJ6qjTSpypgCp-7OzdNnZ3uunB0,6890
188
188
  aury/boot/infrastructure/mq/__init__.py,sha256=Q7kBk_GeQnxnqkyp29Bh1yFH3Q8xxxjs8oDYLeDj8C0,498
189
- aury/boot/infrastructure/mq/base.py,sha256=kHrWUysWflMj3qyOnioLZ90it8d9Alq1Wb4PYhpBW4k,3396
190
- aury/boot/infrastructure/mq/manager.py,sha256=DVXOQhoqx9dz9INajWiAxLnKjLaP-otKmdiBUzxgsAY,7502
191
- aury/boot/infrastructure/mq/backends/__init__.py,sha256=YRByNFWv0VFibslQR5v4h5XxfSX-HIveCfb6W1jXM54,139
189
+ aury/boot/infrastructure/mq/base.py,sha256=ld4wtzhO_6y8wJRXL1DagqJiwhd0VQ6MJlJGDQoL6A8,3430
190
+ aury/boot/infrastructure/mq/manager.py,sha256=Bu4E1Tgz0CzFvJuCS9_fBMj9eAqmXcZp8aFIYhvNUl4,7692
191
+ aury/boot/infrastructure/mq/backends/__init__.py,sha256=10nggw2V-AzuZ1vvzq_ksoXR4FI3e4BR36EfY49Pek4,200
192
192
  aury/boot/infrastructure/mq/backends/rabbitmq.py,sha256=0NWgPKEwtbmI63EVvKINdfXXDNyOvuOOP9LlBzqH91E,5493
193
193
  aury/boot/infrastructure/mq/backends/redis.py,sha256=i8KECToIFEZ6CnHyNCk34_xdff5ioK172_knOy6EeUU,5279
194
+ aury/boot/infrastructure/mq/backends/redis_stream.py,sha256=hbSX03d0BLshE10GryjyvqybtjDBOS4pQUsNHhQFB-Q,14420
194
195
  aury/boot/infrastructure/scheduler/__init__.py,sha256=eTRJ5dSPcKvyFvLVtraoQteXTTDDGwIrmw06J2hoNdA,323
195
196
  aury/boot/infrastructure/scheduler/exceptions.py,sha256=ROltrhSctVWA-6ulnjuYeHAk3ZF-sykDoesuierYzew,634
196
197
  aury/boot/infrastructure/scheduler/manager.py,sha256=OHQOHQlcoN8yFnky4kfuhsEIk39qX6nLZ7xJ51tfg68,23130
@@ -209,7 +210,7 @@ aury/boot/testing/client.py,sha256=KOg1EemuIVsBG68G5y0DjSxZGcIQVdWQ4ASaHE3o1R0,4
209
210
  aury/boot/testing/factory.py,sha256=8GvwX9qIDu0L65gzJMlrWB0xbmJ-7zPHuwk3eECULcg,5185
210
211
  aury/boot/toolkit/__init__.py,sha256=AcyVb9fDf3CaEmJPNkWC4iGv32qCPyk4BuFKSuNiJRQ,334
211
212
  aury/boot/toolkit/http/__init__.py,sha256=zIPmpIZ9Qbqe25VmEr7jixoY2fkRbLm7NkCB9vKpg6I,11039
212
- aury_boot-0.0.35.dist-info/METADATA,sha256=fyQTvIB-zPVWMQblf2xQC-bPoTyALLCe5Snkc9WG1fA,8694
213
- aury_boot-0.0.35.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
214
- aury_boot-0.0.35.dist-info/entry_points.txt,sha256=f9KXEkDIGc0BGkgBvsNx_HMz9VhDjNxu26q00jUpDwQ,49
215
- aury_boot-0.0.35.dist-info/RECORD,,
213
+ aury_boot-0.0.37.dist-info/METADATA,sha256=8Kgz7a5fhou-Ki53QzjfwAfv68cE8iu83UWQ8aR6BOI,8694
214
+ aury_boot-0.0.37.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
215
+ aury_boot-0.0.37.dist-info/entry_points.txt,sha256=f9KXEkDIGc0BGkgBvsNx_HMz9VhDjNxu26q00jUpDwQ,49
216
+ aury_boot-0.0.37.dist-info/RECORD,,