basion-agent 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- basion_agent/__init__.py +62 -0
- basion_agent/agent.py +360 -0
- basion_agent/agent_state_client.py +149 -0
- basion_agent/app.py +502 -0
- basion_agent/artifact.py +58 -0
- basion_agent/attachment_client.py +153 -0
- basion_agent/checkpoint_client.py +169 -0
- basion_agent/checkpointer.py +16 -0
- basion_agent/cli.py +139 -0
- basion_agent/conversation.py +103 -0
- basion_agent/conversation_client.py +86 -0
- basion_agent/conversation_message.py +48 -0
- basion_agent/exceptions.py +36 -0
- basion_agent/extensions/__init__.py +1 -0
- basion_agent/extensions/langgraph.py +526 -0
- basion_agent/extensions/pydantic_ai.py +180 -0
- basion_agent/gateway_client.py +531 -0
- basion_agent/gateway_pb2.py +73 -0
- basion_agent/gateway_pb2_grpc.py +101 -0
- basion_agent/heartbeat.py +84 -0
- basion_agent/loki_handler.py +355 -0
- basion_agent/memory.py +73 -0
- basion_agent/memory_client.py +155 -0
- basion_agent/message.py +333 -0
- basion_agent/py.typed +0 -0
- basion_agent/streamer.py +184 -0
- basion_agent/structural/__init__.py +6 -0
- basion_agent/structural/artifact.py +94 -0
- basion_agent/structural/base.py +71 -0
- basion_agent/structural/stepper.py +125 -0
- basion_agent/structural/surface.py +90 -0
- basion_agent/structural/text_block.py +96 -0
- basion_agent/tools/__init__.py +19 -0
- basion_agent/tools/container.py +46 -0
- basion_agent/tools/knowledge_graph.py +306 -0
- basion_agent-0.4.0.dist-info/METADATA +880 -0
- basion_agent-0.4.0.dist-info/RECORD +41 -0
- basion_agent-0.4.0.dist-info/WHEEL +5 -0
- basion_agent-0.4.0.dist-info/entry_points.txt +2 -0
- basion_agent-0.4.0.dist-info/licenses/LICENSE +21 -0
- basion_agent-0.4.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,180 @@
|
|
|
1
|
+
"""Pydantic AI extension for basion_agent.
|
|
2
|
+
|
|
3
|
+
This module provides integration with Pydantic AI, enabling persistent
|
|
4
|
+
message history storage via the agent-state API.
|
|
5
|
+
|
|
6
|
+
Usage:
|
|
7
|
+
from basion_agent import BasionAgentApp
|
|
8
|
+
from basion_agent.extensions.pydantic_ai import PydanticAIMessageStore
|
|
9
|
+
from pydantic_ai import Agent
|
|
10
|
+
|
|
11
|
+
app = BasionAgentApp(gateway_url="agent-gateway:8080", api_key="key")
|
|
12
|
+
store = PydanticAIMessageStore(app=app)
|
|
13
|
+
|
|
14
|
+
my_agent = Agent('openai:gpt-4o', system_prompt="...")
|
|
15
|
+
|
|
16
|
+
@medical_agent.on_message
|
|
17
|
+
async def handle(message, sender):
|
|
18
|
+
# Load message history
|
|
19
|
+
history = await store.load(message.conversation_id)
|
|
20
|
+
|
|
21
|
+
# Run agent with streaming
|
|
22
|
+
async with medical_agent.streamer(message) as s:
|
|
23
|
+
async with my_agent.run_stream(message.content, message_history=history) as result:
|
|
24
|
+
async for chunk in result.stream_text():
|
|
25
|
+
s.stream(chunk)
|
|
26
|
+
|
|
27
|
+
# Save updated history
|
|
28
|
+
await store.save(message.conversation_id, result.all_messages())
|
|
29
|
+
"""
|
|
30
|
+
|
|
31
|
+
from __future__ import annotations
|
|
32
|
+
|
|
33
|
+
import logging
|
|
34
|
+
from typing import TYPE_CHECKING, List, Any, Optional
|
|
35
|
+
|
|
36
|
+
if TYPE_CHECKING:
|
|
37
|
+
from ..app import BasionAgentApp
|
|
38
|
+
|
|
39
|
+
logger = logging.getLogger(__name__)
|
|
40
|
+
|
|
41
|
+
# Try to import Pydantic AI types, but make them optional
|
|
42
|
+
try:
|
|
43
|
+
from pydantic_ai.messages import (
|
|
44
|
+
ModelMessage,
|
|
45
|
+
ModelMessagesTypeAdapter,
|
|
46
|
+
)
|
|
47
|
+
|
|
48
|
+
PYDANTIC_AI_AVAILABLE = True
|
|
49
|
+
except ImportError:
|
|
50
|
+
PYDANTIC_AI_AVAILABLE = False
|
|
51
|
+
ModelMessage = Any
|
|
52
|
+
ModelMessagesTypeAdapter = None
|
|
53
|
+
|
|
54
|
+
from ..agent_state_client import AgentStateClient
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
class PydanticAIMessageStore:
|
|
58
|
+
"""Persistent message store for Pydantic AI.
|
|
59
|
+
|
|
60
|
+
Stores and retrieves Pydantic AI native message history using the
|
|
61
|
+
agent-state API. Messages are serialized using Pydantic AI's built-in
|
|
62
|
+
ModelMessagesTypeAdapter for full fidelity.
|
|
63
|
+
|
|
64
|
+
Example:
|
|
65
|
+
```python
|
|
66
|
+
from basion_agent import BasionAgentApp
|
|
67
|
+
from basion_agent.extensions.pydantic_ai import PydanticAIMessageStore
|
|
68
|
+
from pydantic_ai import Agent
|
|
69
|
+
|
|
70
|
+
app = BasionAgentApp(gateway_url="...", api_key="...")
|
|
71
|
+
store = PydanticAIMessageStore(app=app)
|
|
72
|
+
|
|
73
|
+
my_agent = Agent('openai:gpt-4o')
|
|
74
|
+
|
|
75
|
+
@medical_agent.on_message
|
|
76
|
+
async def handle(message, sender):
|
|
77
|
+
history = await store.load(message.conversation_id)
|
|
78
|
+
|
|
79
|
+
async with medical_agent.streamer(message) as s:
|
|
80
|
+
async with my_agent.run_stream(
|
|
81
|
+
message.content,
|
|
82
|
+
message_history=history
|
|
83
|
+
) as result:
|
|
84
|
+
async for chunk in result.stream_text():
|
|
85
|
+
s.stream(chunk)
|
|
86
|
+
|
|
87
|
+
await store.save(message.conversation_id, result.all_messages())
|
|
88
|
+
```
|
|
89
|
+
"""
|
|
90
|
+
|
|
91
|
+
NAMESPACE = "pydantic_ai"
|
|
92
|
+
|
|
93
|
+
def __init__(self, app: "BasionAgentApp"):
|
|
94
|
+
"""Initialize Pydantic AI message store.
|
|
95
|
+
|
|
96
|
+
Args:
|
|
97
|
+
app: BasionAgentApp instance to get the gateway client from.
|
|
98
|
+
"""
|
|
99
|
+
if not PYDANTIC_AI_AVAILABLE:
|
|
100
|
+
raise ImportError(
|
|
101
|
+
"pydantic-ai is required for PydanticAIMessageStore. "
|
|
102
|
+
"Install it with: pip install basion-agent[pydantic]"
|
|
103
|
+
)
|
|
104
|
+
|
|
105
|
+
base_url = app.gateway_client.conversation_store_url
|
|
106
|
+
self.client = AgentStateClient(base_url)
|
|
107
|
+
self._app = app
|
|
108
|
+
|
|
109
|
+
async def load(self, conversation_id: str) -> List[ModelMessage]:
|
|
110
|
+
"""Load Pydantic AI message history for a conversation.
|
|
111
|
+
|
|
112
|
+
Args:
|
|
113
|
+
conversation_id: The conversation ID to load history for.
|
|
114
|
+
|
|
115
|
+
Returns:
|
|
116
|
+
List of ModelMessage objects (empty list if no history).
|
|
117
|
+
"""
|
|
118
|
+
result = await self.client.get(conversation_id, self.NAMESPACE)
|
|
119
|
+
|
|
120
|
+
if not result:
|
|
121
|
+
logger.debug(f"No message history found for conversation {conversation_id}")
|
|
122
|
+
return []
|
|
123
|
+
|
|
124
|
+
state = result.get("state", {})
|
|
125
|
+
messages_data = state.get("messages", [])
|
|
126
|
+
|
|
127
|
+
if not messages_data:
|
|
128
|
+
return []
|
|
129
|
+
|
|
130
|
+
# Deserialize using Pydantic AI's TypeAdapter
|
|
131
|
+
try:
|
|
132
|
+
messages = ModelMessagesTypeAdapter.validate_python(messages_data)
|
|
133
|
+
logger.debug(
|
|
134
|
+
f"Loaded {len(messages)} messages for conversation {conversation_id}"
|
|
135
|
+
)
|
|
136
|
+
return messages
|
|
137
|
+
except Exception as e:
|
|
138
|
+
logger.error(f"Failed to deserialize messages: {e}")
|
|
139
|
+
return []
|
|
140
|
+
|
|
141
|
+
async def save(
|
|
142
|
+
self,
|
|
143
|
+
conversation_id: str,
|
|
144
|
+
messages: List[ModelMessage],
|
|
145
|
+
) -> None:
|
|
146
|
+
"""Save Pydantic AI message history for a conversation.
|
|
147
|
+
|
|
148
|
+
Args:
|
|
149
|
+
conversation_id: The conversation ID to save history for.
|
|
150
|
+
messages: List of ModelMessage objects to save.
|
|
151
|
+
"""
|
|
152
|
+
# Serialize using Pydantic AI's TypeAdapter
|
|
153
|
+
try:
|
|
154
|
+
serialized = ModelMessagesTypeAdapter.dump_python(messages, mode="json")
|
|
155
|
+
except Exception as e:
|
|
156
|
+
logger.error(f"Failed to serialize messages: {e}")
|
|
157
|
+
raise
|
|
158
|
+
|
|
159
|
+
await self.client.put(
|
|
160
|
+
conversation_id,
|
|
161
|
+
self.NAMESPACE,
|
|
162
|
+
{"messages": serialized},
|
|
163
|
+
)
|
|
164
|
+
|
|
165
|
+
logger.debug(
|
|
166
|
+
f"Saved {len(messages)} messages for conversation {conversation_id}"
|
|
167
|
+
)
|
|
168
|
+
|
|
169
|
+
async def clear(self, conversation_id: str) -> None:
|
|
170
|
+
"""Clear message history for a conversation.
|
|
171
|
+
|
|
172
|
+
Args:
|
|
173
|
+
conversation_id: The conversation ID to clear history for.
|
|
174
|
+
"""
|
|
175
|
+
await self.client.delete(conversation_id, self.NAMESPACE)
|
|
176
|
+
logger.debug(f"Cleared message history for conversation {conversation_id}")
|
|
177
|
+
|
|
178
|
+
async def close(self) -> None:
|
|
179
|
+
"""Close the HTTP client session."""
|
|
180
|
+
await self.client.close()
|
|
@@ -0,0 +1,531 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Gateway client for connecting to the Agent Gateway service.
|
|
3
|
+
Handles gRPC communication for Kafka and HTTP for other services.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import json
|
|
7
|
+
import logging
|
|
8
|
+
import queue
|
|
9
|
+
import threading
|
|
10
|
+
import time
|
|
11
|
+
import uuid
|
|
12
|
+
from dataclasses import dataclass
|
|
13
|
+
from typing import Callable, Dict, List, Optional, Any, Tuple
|
|
14
|
+
|
|
15
|
+
import grpc
|
|
16
|
+
import requests
|
|
17
|
+
import aiohttp
|
|
18
|
+
|
|
19
|
+
logger = logging.getLogger(__name__)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
@dataclass
|
|
23
|
+
class KafkaMessage:
|
|
24
|
+
"""Represents a message received from Kafka via the gateway."""
|
|
25
|
+
topic: str
|
|
26
|
+
partition: int
|
|
27
|
+
offset: int
|
|
28
|
+
key: str
|
|
29
|
+
headers: Dict[str, str]
|
|
30
|
+
body: Dict[str, Any]
|
|
31
|
+
timestamp: int
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class GatewayClient:
|
|
35
|
+
"""
|
|
36
|
+
Client for communicating with the Agent Gateway.
|
|
37
|
+
|
|
38
|
+
Provides:
|
|
39
|
+
- gRPC bidirectional streaming for Kafka consume/produce
|
|
40
|
+
- HTTP proxy for AI Inventory and Conversation Store
|
|
41
|
+
"""
|
|
42
|
+
|
|
43
|
+
def __init__(self, gateway_url: str, api_key: str, secure: bool = False):
|
|
44
|
+
"""
|
|
45
|
+
Initialize the gateway client.
|
|
46
|
+
|
|
47
|
+
Args:
|
|
48
|
+
gateway_url: Gateway endpoint (e.g., "agent-gateway:8080")
|
|
49
|
+
api_key: API key for authentication
|
|
50
|
+
secure: If True, use TLS for gRPC and HTTPS for HTTP requests
|
|
51
|
+
"""
|
|
52
|
+
# Both gRPC and HTTP use the same endpoint (multiplexed via cmux)
|
|
53
|
+
self.grpc_url = gateway_url
|
|
54
|
+
self.secure = secure
|
|
55
|
+
scheme = "https" if secure else "http"
|
|
56
|
+
self.http_url = f"{scheme}://{gateway_url}"
|
|
57
|
+
self.api_key = api_key
|
|
58
|
+
|
|
59
|
+
self.channel: Optional[grpc.Channel] = None
|
|
60
|
+
self.stub = None
|
|
61
|
+
self.stream = None
|
|
62
|
+
|
|
63
|
+
self.connection_id: Optional[str] = None
|
|
64
|
+
self.subscribed_topics: List[str] = []
|
|
65
|
+
|
|
66
|
+
self._running = False
|
|
67
|
+
self._send_queue: queue.Queue = queue.Queue()
|
|
68
|
+
self._lock = threading.Lock()
|
|
69
|
+
|
|
70
|
+
# Import generated protobuf code lazily
|
|
71
|
+
self._pb = None
|
|
72
|
+
self._pb_grpc = None
|
|
73
|
+
|
|
74
|
+
# Topic handlers (for compatibility, routing done by BasionAgentApp)
|
|
75
|
+
self._handlers: Dict[str, Callable] = {}
|
|
76
|
+
|
|
77
|
+
# Async HTTP session
|
|
78
|
+
self._http_session: Optional[aiohttp.ClientSession] = None
|
|
79
|
+
|
|
80
|
+
def _load_protobuf(self):
|
|
81
|
+
"""Lazily load protobuf modules."""
|
|
82
|
+
if self._pb is None:
|
|
83
|
+
from . import gateway_pb2 as pb
|
|
84
|
+
from . import gateway_pb2_grpc as pb_grpc
|
|
85
|
+
self._pb = pb
|
|
86
|
+
self._pb_grpc = pb_grpc
|
|
87
|
+
|
|
88
|
+
@property
|
|
89
|
+
def ai_inventory_url(self) -> str:
|
|
90
|
+
"""HTTP URL for AI Inventory service via proxy."""
|
|
91
|
+
return f"{self.http_url}/s/ai-inventory"
|
|
92
|
+
|
|
93
|
+
@property
|
|
94
|
+
def conversation_store_url(self) -> str:
|
|
95
|
+
"""HTTP URL for Conversation Store service via proxy."""
|
|
96
|
+
return f"{self.http_url}/s/conversation-store"
|
|
97
|
+
|
|
98
|
+
@property
|
|
99
|
+
def ai_memory_url(self) -> str:
|
|
100
|
+
"""HTTP URL for AI Memory service via proxy."""
|
|
101
|
+
return f"{self.http_url}/s/ai-memory"
|
|
102
|
+
|
|
103
|
+
@property
|
|
104
|
+
def attachment_url(self) -> str:
|
|
105
|
+
"""HTTP URL for Attachment service via proxy."""
|
|
106
|
+
return f"{self.http_url}/s/attachment"
|
|
107
|
+
|
|
108
|
+
@property
|
|
109
|
+
def knowledge_graph_url(self) -> str:
|
|
110
|
+
"""HTTP URL for Knowledge Graph service via proxy."""
|
|
111
|
+
return f"{self.http_url}/s/knowledge-graph"
|
|
112
|
+
|
|
113
|
+
def register_handler(self, topic: str, handler: Callable) -> None:
|
|
114
|
+
"""
|
|
115
|
+
Register a handler for a topic.
|
|
116
|
+
|
|
117
|
+
Note: Message routing is handled by BasionAgentApp._consume_loop(),
|
|
118
|
+
this method is kept for compatibility.
|
|
119
|
+
"""
|
|
120
|
+
self._handlers[topic] = handler
|
|
121
|
+
logger.debug(f"Registered handler for topic: {topic}")
|
|
122
|
+
|
|
123
|
+
def connect(self, agent_names: List[str]) -> List[str]:
|
|
124
|
+
"""
|
|
125
|
+
Connect to the gateway and authenticate.
|
|
126
|
+
|
|
127
|
+
Args:
|
|
128
|
+
agent_names: List of agent names to subscribe to
|
|
129
|
+
|
|
130
|
+
Returns:
|
|
131
|
+
List of subscribed topic names
|
|
132
|
+
"""
|
|
133
|
+
self._load_protobuf()
|
|
134
|
+
pb = self._pb
|
|
135
|
+
pb_grpc = self._pb_grpc
|
|
136
|
+
|
|
137
|
+
logger.info(f"Connecting to gRPC gateway at {self.grpc_url} (secure={self.secure})")
|
|
138
|
+
|
|
139
|
+
# Create gRPC channel with keepalive settings
|
|
140
|
+
options = [
|
|
141
|
+
('grpc.keepalive_time_ms', 30000), # Send keepalive ping every 30s
|
|
142
|
+
('grpc.keepalive_timeout_ms', 10000), # Wait 10s for ping ack
|
|
143
|
+
('grpc.keepalive_permit_without_calls', True), # Allow pings without active calls
|
|
144
|
+
('grpc.http2.min_time_between_pings_ms', 10000), # Min time between pings
|
|
145
|
+
('grpc.http2.max_pings_without_data', 0), # Unlimited pings without data
|
|
146
|
+
]
|
|
147
|
+
|
|
148
|
+
if self.secure:
|
|
149
|
+
# Use system default SSL credentials for TLS
|
|
150
|
+
credentials = grpc.ssl_channel_credentials()
|
|
151
|
+
self.channel = grpc.secure_channel(self.grpc_url, credentials, options=options)
|
|
152
|
+
else:
|
|
153
|
+
self.channel = grpc.insecure_channel(self.grpc_url, options=options)
|
|
154
|
+
self.stub = pb_grpc.AgentGatewayStub(self.channel)
|
|
155
|
+
|
|
156
|
+
# Start bidirectional stream
|
|
157
|
+
# Create a fresh queue to prevent old generator from stealing messages
|
|
158
|
+
self._send_queue = queue.Queue()
|
|
159
|
+
self._running = True
|
|
160
|
+
|
|
161
|
+
# Prepare auth message to send first
|
|
162
|
+
auth_request = pb.ClientMessage(
|
|
163
|
+
auth=pb.AuthRequest(
|
|
164
|
+
api_key=self.api_key,
|
|
165
|
+
agent_names=agent_names
|
|
166
|
+
)
|
|
167
|
+
)
|
|
168
|
+
|
|
169
|
+
# Create request iterator that sends auth first, then reads from queue
|
|
170
|
+
self._request_iterator = self._generate_requests(auth_request)
|
|
171
|
+
|
|
172
|
+
# Start the stream
|
|
173
|
+
self.stream = self.stub.AgentStream(self._request_iterator)
|
|
174
|
+
|
|
175
|
+
# Wait for auth response
|
|
176
|
+
try:
|
|
177
|
+
response = next(self.stream)
|
|
178
|
+
|
|
179
|
+
if response.HasField("error"):
|
|
180
|
+
error = response.error
|
|
181
|
+
raise AuthenticationError(f"Authentication failed: {error.message}")
|
|
182
|
+
|
|
183
|
+
if response.HasField("auth_response"):
|
|
184
|
+
auth_resp = response.auth_response
|
|
185
|
+
self.connection_id = auth_resp.connection_id
|
|
186
|
+
self.subscribed_topics = list(auth_resp.subscribed_topics)
|
|
187
|
+
|
|
188
|
+
logger.info(f"Connected to gateway (connection_id: {self.connection_id})")
|
|
189
|
+
logger.info(f"Subscribed to topics: {self.subscribed_topics}")
|
|
190
|
+
|
|
191
|
+
return self.subscribed_topics
|
|
192
|
+
else:
|
|
193
|
+
raise GatewayError(f"Unexpected response type")
|
|
194
|
+
|
|
195
|
+
except grpc.RpcError as e:
|
|
196
|
+
raise GatewayError(f"gRPC error during auth: {e}")
|
|
197
|
+
|
|
198
|
+
def _generate_requests(self, first_message=None):
|
|
199
|
+
"""Generator that yields requests. Sends first_message immediately, then from queue."""
|
|
200
|
+
# Capture local reference to this connection's queue.
|
|
201
|
+
# On reconnect, connect() creates a new _send_queue, so this generator
|
|
202
|
+
# will only drain the old (now-orphaned) queue and won't steal from the new one.
|
|
203
|
+
my_queue = self._send_queue
|
|
204
|
+
|
|
205
|
+
# Send the first message (auth) immediately
|
|
206
|
+
if first_message:
|
|
207
|
+
logger.debug("Sending auth message")
|
|
208
|
+
yield first_message
|
|
209
|
+
|
|
210
|
+
# Then continue with queued messages
|
|
211
|
+
seq = 0
|
|
212
|
+
while self._running and my_queue is self._send_queue:
|
|
213
|
+
try:
|
|
214
|
+
msg = my_queue.get(timeout=0.1)
|
|
215
|
+
seq += 1
|
|
216
|
+
if hasattr(msg, 'produce') and msg.HasField('produce'):
|
|
217
|
+
body_preview = msg.produce.body[:80].decode('utf-8', errors='replace') if msg.produce.body else ''
|
|
218
|
+
logger.info(f"[GENERATOR] Yielding produce #{seq}, key={msg.produce.key}, body={body_preview}")
|
|
219
|
+
yield msg
|
|
220
|
+
logger.info(f"[GENERATOR] Yielded #{seq} successfully (gRPC consumed it)")
|
|
221
|
+
except queue.Empty:
|
|
222
|
+
continue
|
|
223
|
+
|
|
224
|
+
def subscribe(self, agent_name: str) -> str:
|
|
225
|
+
"""
|
|
226
|
+
Subscribe to an additional agent's topic.
|
|
227
|
+
|
|
228
|
+
Args:
|
|
229
|
+
agent_name: Name of the agent to subscribe to
|
|
230
|
+
|
|
231
|
+
Returns:
|
|
232
|
+
Topic name that was subscribed to
|
|
233
|
+
"""
|
|
234
|
+
pb = self._pb
|
|
235
|
+
|
|
236
|
+
if not self.stream:
|
|
237
|
+
raise GatewayError("Not connected to gateway")
|
|
238
|
+
|
|
239
|
+
msg = pb.ClientMessage(
|
|
240
|
+
subscribe=pb.SubscribeRequest(agent_name=agent_name)
|
|
241
|
+
)
|
|
242
|
+
self._send_queue.put(msg)
|
|
243
|
+
|
|
244
|
+
# Wait for response
|
|
245
|
+
try:
|
|
246
|
+
response = next(self.stream)
|
|
247
|
+
|
|
248
|
+
if response.HasField("error"):
|
|
249
|
+
raise GatewayError(f"Subscribe failed: {response.error.message}")
|
|
250
|
+
|
|
251
|
+
if response.HasField("subscribe_ack"):
|
|
252
|
+
topic = response.subscribe_ack.topic
|
|
253
|
+
self.subscribed_topics.append(topic)
|
|
254
|
+
logger.info(f"Subscribed to topic: {topic}")
|
|
255
|
+
return topic
|
|
256
|
+
|
|
257
|
+
except grpc.RpcError as e:
|
|
258
|
+
raise GatewayError(f"Subscribe failed: {e}")
|
|
259
|
+
|
|
260
|
+
raise GatewayError("Unexpected response")
|
|
261
|
+
|
|
262
|
+
def unsubscribe(self, agent_name: str) -> None:
|
|
263
|
+
"""
|
|
264
|
+
Unsubscribe from an agent's topic.
|
|
265
|
+
|
|
266
|
+
Args:
|
|
267
|
+
agent_name: Name of the agent to unsubscribe from
|
|
268
|
+
"""
|
|
269
|
+
pb = self._pb
|
|
270
|
+
|
|
271
|
+
if not self.stream:
|
|
272
|
+
raise GatewayError("Not connected to gateway")
|
|
273
|
+
|
|
274
|
+
msg = pb.ClientMessage(
|
|
275
|
+
unsubscribe=pb.UnsubscribeRequest(agent_name=agent_name)
|
|
276
|
+
)
|
|
277
|
+
self._send_queue.put(msg)
|
|
278
|
+
|
|
279
|
+
topic = f"{agent_name}.inbox"
|
|
280
|
+
if topic in self.subscribed_topics:
|
|
281
|
+
self.subscribed_topics.remove(topic)
|
|
282
|
+
logger.info(f"Unsubscribed from agent: {agent_name}")
|
|
283
|
+
|
|
284
|
+
def produce(
|
|
285
|
+
self,
|
|
286
|
+
topic: str,
|
|
287
|
+
key: str,
|
|
288
|
+
headers: Dict[str, str],
|
|
289
|
+
body: Dict[str, Any],
|
|
290
|
+
correlation_id: Optional[str] = None
|
|
291
|
+
) -> Dict[str, Any]:
|
|
292
|
+
"""
|
|
293
|
+
Produce a message to Kafka via the gateway.
|
|
294
|
+
|
|
295
|
+
Args:
|
|
296
|
+
topic: Target topic (usually "router.inbox")
|
|
297
|
+
key: Message key (usually conversation_id)
|
|
298
|
+
headers: Message headers
|
|
299
|
+
body: Message body
|
|
300
|
+
correlation_id: Optional correlation ID for tracking
|
|
301
|
+
|
|
302
|
+
Returns:
|
|
303
|
+
Produce acknowledgment with partition and offset
|
|
304
|
+
"""
|
|
305
|
+
pb = self._pb
|
|
306
|
+
|
|
307
|
+
if not self.stream:
|
|
308
|
+
raise GatewayError("Not connected to gateway")
|
|
309
|
+
|
|
310
|
+
# Serialize body to JSON bytes
|
|
311
|
+
body_bytes = json.dumps(body).encode('utf-8')
|
|
312
|
+
|
|
313
|
+
msg = pb.ClientMessage(
|
|
314
|
+
produce=pb.ProduceRequest(
|
|
315
|
+
topic=topic,
|
|
316
|
+
key=key,
|
|
317
|
+
headers=headers,
|
|
318
|
+
body=body_bytes,
|
|
319
|
+
correlation_id=correlation_id or ""
|
|
320
|
+
)
|
|
321
|
+
)
|
|
322
|
+
body_preview = body_bytes[:80].decode('utf-8', errors='replace')
|
|
323
|
+
logger.info(f"[PRODUCE] Queuing message, key={key}, queue_size={self._send_queue.qsize()}, body={body_preview}")
|
|
324
|
+
self._send_queue.put(msg)
|
|
325
|
+
|
|
326
|
+
# Don't block waiting for ack
|
|
327
|
+
return {"status": "sent", "topic": topic}
|
|
328
|
+
|
|
329
|
+
def consume_one(self, timeout: float = 1.0) -> Optional[KafkaMessage]:
|
|
330
|
+
"""
|
|
331
|
+
Consume a single message (blocking).
|
|
332
|
+
|
|
333
|
+
Args:
|
|
334
|
+
timeout: Maximum time to wait for a message
|
|
335
|
+
|
|
336
|
+
Returns:
|
|
337
|
+
KafkaMessage or None if timeout
|
|
338
|
+
"""
|
|
339
|
+
if not self.stream:
|
|
340
|
+
raise GatewayError("Not connected to gateway")
|
|
341
|
+
|
|
342
|
+
try:
|
|
343
|
+
response = next(self.stream)
|
|
344
|
+
|
|
345
|
+
if response.HasField("message"):
|
|
346
|
+
kafka_msg = response.message
|
|
347
|
+
|
|
348
|
+
# Parse body from JSON bytes
|
|
349
|
+
try:
|
|
350
|
+
body = json.loads(kafka_msg.body.decode('utf-8'))
|
|
351
|
+
except json.JSONDecodeError:
|
|
352
|
+
body = {}
|
|
353
|
+
|
|
354
|
+
return KafkaMessage(
|
|
355
|
+
topic=kafka_msg.topic,
|
|
356
|
+
partition=kafka_msg.partition,
|
|
357
|
+
offset=kafka_msg.offset,
|
|
358
|
+
key=kafka_msg.key,
|
|
359
|
+
headers=dict(kafka_msg.headers),
|
|
360
|
+
body=body,
|
|
361
|
+
timestamp=kafka_msg.timestamp
|
|
362
|
+
)
|
|
363
|
+
elif response.HasField("produce_ack"):
|
|
364
|
+
logger.debug(f"Received produce_ack for topic {response.produce_ack.topic}")
|
|
365
|
+
return None
|
|
366
|
+
elif response.HasField("pong"):
|
|
367
|
+
return None
|
|
368
|
+
elif response.HasField("error"):
|
|
369
|
+
logger.error(f"Gateway error: {response.error.message}")
|
|
370
|
+
return None
|
|
371
|
+
else:
|
|
372
|
+
logger.debug(f"Received other message type")
|
|
373
|
+
return None
|
|
374
|
+
|
|
375
|
+
except StopIteration:
|
|
376
|
+
logger.warning("Stream ended")
|
|
377
|
+
raise GatewayError("Stream ended unexpectedly")
|
|
378
|
+
except grpc.RpcError as e:
|
|
379
|
+
if e.code() == grpc.StatusCode.DEADLINE_EXCEEDED:
|
|
380
|
+
return None
|
|
381
|
+
logger.error(f"Error in consume_one: {e}")
|
|
382
|
+
raise GatewayError(f"Consume error: {e}")
|
|
383
|
+
|
|
384
|
+
def ping(self) -> int:
|
|
385
|
+
"""
|
|
386
|
+
Send a ping to the gateway.
|
|
387
|
+
|
|
388
|
+
Returns:
|
|
389
|
+
Client timestamp sent
|
|
390
|
+
"""
|
|
391
|
+
pb = self._pb
|
|
392
|
+
|
|
393
|
+
if not self.stream:
|
|
394
|
+
raise GatewayError("Not connected to gateway")
|
|
395
|
+
|
|
396
|
+
timestamp = int(time.time() * 1000)
|
|
397
|
+
msg = pb.ClientMessage(
|
|
398
|
+
ping=pb.PingRequest(timestamp=timestamp)
|
|
399
|
+
)
|
|
400
|
+
self._send_queue.put(msg)
|
|
401
|
+
return timestamp
|
|
402
|
+
|
|
403
|
+
async def _get_http_session(self) -> aiohttp.ClientSession:
|
|
404
|
+
"""Get or create aiohttp session."""
|
|
405
|
+
if self._http_session is None or self._http_session.closed:
|
|
406
|
+
timeout = aiohttp.ClientTimeout(total=30.0)
|
|
407
|
+
self._http_session = aiohttp.ClientSession(timeout=timeout)
|
|
408
|
+
return self._http_session
|
|
409
|
+
|
|
410
|
+
async def close_async(self):
|
|
411
|
+
"""Close async HTTP session."""
|
|
412
|
+
if self._http_session and not self._http_session.closed:
|
|
413
|
+
await self._http_session.close()
|
|
414
|
+
|
|
415
|
+
def close(self) -> None:
|
|
416
|
+
"""Close the gateway connection."""
|
|
417
|
+
self._running = False
|
|
418
|
+
|
|
419
|
+
if self.channel:
|
|
420
|
+
try:
|
|
421
|
+
self.channel.close()
|
|
422
|
+
except Exception:
|
|
423
|
+
pass
|
|
424
|
+
self.channel = None
|
|
425
|
+
|
|
426
|
+
self.stream = None
|
|
427
|
+
self.stub = None
|
|
428
|
+
self.connection_id = None
|
|
429
|
+
self.subscribed_topics = []
|
|
430
|
+
logger.info("gRPC gateway connection closed")
|
|
431
|
+
|
|
432
|
+
# HTTP Proxy Methods (Sync - for registration)
|
|
433
|
+
|
|
434
|
+
def http_post(self, service: str, path: str, **kwargs) -> requests.Response:
|
|
435
|
+
"""
|
|
436
|
+
Make a sync POST request (used for registration).
|
|
437
|
+
|
|
438
|
+
Args:
|
|
439
|
+
service: Service name ("ai-inventory" or "conversation-store")
|
|
440
|
+
path: Request path
|
|
441
|
+
**kwargs: Additional arguments passed to requests.post
|
|
442
|
+
|
|
443
|
+
Returns:
|
|
444
|
+
Response object
|
|
445
|
+
"""
|
|
446
|
+
url = f"{self.http_url}/s/{service}{path}"
|
|
447
|
+
return requests.post(url, **kwargs)
|
|
448
|
+
|
|
449
|
+
# HTTP Proxy Methods (Async - for runtime)
|
|
450
|
+
|
|
451
|
+
async def http_get_async(self, service: str, path: str, **kwargs):
|
|
452
|
+
"""
|
|
453
|
+
Make an async GET request to a service via the gateway proxy.
|
|
454
|
+
|
|
455
|
+
Args:
|
|
456
|
+
service: Service name ("ai-inventory" or "conversation-store")
|
|
457
|
+
path: Request path
|
|
458
|
+
**kwargs: Additional arguments passed to aiohttp
|
|
459
|
+
|
|
460
|
+
Returns:
|
|
461
|
+
Response object
|
|
462
|
+
"""
|
|
463
|
+
url = f"{self.http_url}/s/{service}{path}"
|
|
464
|
+
session = await self._get_http_session()
|
|
465
|
+
async with session.get(url, **kwargs) as response:
|
|
466
|
+
response.raise_for_status()
|
|
467
|
+
return response
|
|
468
|
+
|
|
469
|
+
async def http_post_async(self, service: str, path: str, **kwargs):
|
|
470
|
+
"""
|
|
471
|
+
Make an async POST request to a service via the gateway proxy.
|
|
472
|
+
|
|
473
|
+
Args:
|
|
474
|
+
service: Service name ("ai-inventory" or "conversation-store")
|
|
475
|
+
path: Request path
|
|
476
|
+
**kwargs: Additional arguments passed to aiohttp
|
|
477
|
+
|
|
478
|
+
Returns:
|
|
479
|
+
Response object
|
|
480
|
+
"""
|
|
481
|
+
url = f"{self.http_url}/s/{service}{path}"
|
|
482
|
+
session = await self._get_http_session()
|
|
483
|
+
async with session.post(url, **kwargs) as response:
|
|
484
|
+
response.raise_for_status()
|
|
485
|
+
return response
|
|
486
|
+
|
|
487
|
+
async def http_patch_async(self, service: str, path: str, **kwargs):
|
|
488
|
+
"""
|
|
489
|
+
Make an async PATCH request to a service via the gateway proxy.
|
|
490
|
+
|
|
491
|
+
Args:
|
|
492
|
+
service: Service name ("ai-inventory" or "conversation-store")
|
|
493
|
+
path: Request path
|
|
494
|
+
**kwargs: Additional arguments passed to aiohttp
|
|
495
|
+
|
|
496
|
+
Returns:
|
|
497
|
+
Response object
|
|
498
|
+
"""
|
|
499
|
+
url = f"{self.http_url}/s/{service}{path}"
|
|
500
|
+
session = await self._get_http_session()
|
|
501
|
+
async with session.patch(url, **kwargs) as response:
|
|
502
|
+
response.raise_for_status()
|
|
503
|
+
return response
|
|
504
|
+
|
|
505
|
+
async def http_delete_async(self, service: str, path: str, **kwargs):
|
|
506
|
+
"""
|
|
507
|
+
Make an async DELETE request to a service via the gateway proxy.
|
|
508
|
+
|
|
509
|
+
Args:
|
|
510
|
+
service: Service name ("ai-inventory" or "conversation-store")
|
|
511
|
+
path: Request path
|
|
512
|
+
**kwargs: Additional arguments passed to aiohttp
|
|
513
|
+
|
|
514
|
+
Returns:
|
|
515
|
+
Response object
|
|
516
|
+
"""
|
|
517
|
+
url = f"{self.http_url}/s/{service}{path}"
|
|
518
|
+
session = await self._get_http_session()
|
|
519
|
+
async with session.delete(url, **kwargs) as response:
|
|
520
|
+
response.raise_for_status()
|
|
521
|
+
return response
|
|
522
|
+
|
|
523
|
+
|
|
524
|
+
class GatewayError(Exception):
|
|
525
|
+
"""Base exception for gateway errors."""
|
|
526
|
+
pass
|
|
527
|
+
|
|
528
|
+
|
|
529
|
+
class AuthenticationError(GatewayError):
|
|
530
|
+
"""Raised when authentication fails."""
|
|
531
|
+
pass
|