march-agent 0.1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- march_agent/__init__.py +52 -0
- march_agent/agent.py +341 -0
- march_agent/agent_state_client.py +149 -0
- march_agent/app.py +416 -0
- march_agent/artifact.py +58 -0
- march_agent/checkpoint_client.py +169 -0
- march_agent/checkpointer.py +16 -0
- march_agent/cli.py +139 -0
- march_agent/conversation.py +103 -0
- march_agent/conversation_client.py +86 -0
- march_agent/conversation_message.py +48 -0
- march_agent/exceptions.py +36 -0
- march_agent/extensions/__init__.py +1 -0
- march_agent/extensions/langgraph.py +526 -0
- march_agent/extensions/pydantic_ai.py +180 -0
- march_agent/gateway_client.py +506 -0
- march_agent/gateway_pb2.py +73 -0
- march_agent/gateway_pb2_grpc.py +101 -0
- march_agent/heartbeat.py +84 -0
- march_agent/memory.py +73 -0
- march_agent/memory_client.py +155 -0
- march_agent/message.py +80 -0
- march_agent/streamer.py +220 -0
- march_agent-0.1.1.dist-info/METADATA +503 -0
- march_agent-0.1.1.dist-info/RECORD +29 -0
- march_agent-0.1.1.dist-info/WHEEL +5 -0
- march_agent-0.1.1.dist-info/entry_points.txt +2 -0
- march_agent-0.1.1.dist-info/licenses/LICENSE +21 -0
- march_agent-0.1.1.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,506 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Gateway client for connecting to the Agent Gateway service.
|
|
3
|
+
Handles gRPC communication for Kafka and HTTP for other services.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import json
|
|
7
|
+
import logging
|
|
8
|
+
import queue
|
|
9
|
+
import threading
|
|
10
|
+
import time
|
|
11
|
+
import uuid
|
|
12
|
+
from dataclasses import dataclass
|
|
13
|
+
from typing import Callable, Dict, List, Optional, Any, Tuple
|
|
14
|
+
|
|
15
|
+
import grpc
|
|
16
|
+
import requests
|
|
17
|
+
import aiohttp
|
|
18
|
+
|
|
19
|
+
logger = logging.getLogger(__name__)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
@dataclass
|
|
23
|
+
class KafkaMessage:
|
|
24
|
+
"""Represents a message received from Kafka via the gateway."""
|
|
25
|
+
topic: str
|
|
26
|
+
partition: int
|
|
27
|
+
offset: int
|
|
28
|
+
key: str
|
|
29
|
+
headers: Dict[str, str]
|
|
30
|
+
body: Dict[str, Any]
|
|
31
|
+
timestamp: int
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class GatewayClient:
|
|
35
|
+
"""
|
|
36
|
+
Client for communicating with the Agent Gateway.
|
|
37
|
+
|
|
38
|
+
Provides:
|
|
39
|
+
- gRPC bidirectional streaming for Kafka consume/produce
|
|
40
|
+
- HTTP proxy for AI Inventory and Conversation Store
|
|
41
|
+
"""
|
|
42
|
+
|
|
43
|
+
def __init__(self, gateway_url: str, api_key: str, secure: bool = False):
|
|
44
|
+
"""
|
|
45
|
+
Initialize the gateway client.
|
|
46
|
+
|
|
47
|
+
Args:
|
|
48
|
+
gateway_url: Gateway endpoint (e.g., "agent-gateway:8080")
|
|
49
|
+
api_key: API key for authentication
|
|
50
|
+
secure: If True, use TLS for gRPC and HTTPS for HTTP requests
|
|
51
|
+
"""
|
|
52
|
+
# Both gRPC and HTTP use the same endpoint (multiplexed via cmux)
|
|
53
|
+
self.grpc_url = gateway_url
|
|
54
|
+
self.secure = secure
|
|
55
|
+
scheme = "https" if secure else "http"
|
|
56
|
+
self.http_url = f"{scheme}://{gateway_url}"
|
|
57
|
+
self.api_key = api_key
|
|
58
|
+
|
|
59
|
+
self.channel: Optional[grpc.Channel] = None
|
|
60
|
+
self.stub = None
|
|
61
|
+
self.stream = None
|
|
62
|
+
|
|
63
|
+
self.connection_id: Optional[str] = None
|
|
64
|
+
self.subscribed_topics: List[str] = []
|
|
65
|
+
|
|
66
|
+
self._running = False
|
|
67
|
+
self._send_queue: queue.Queue = queue.Queue()
|
|
68
|
+
self._lock = threading.Lock()
|
|
69
|
+
|
|
70
|
+
# Import generated protobuf code lazily
|
|
71
|
+
self._pb = None
|
|
72
|
+
self._pb_grpc = None
|
|
73
|
+
|
|
74
|
+
# Topic handlers (for compatibility, routing done by MarchAgentApp)
|
|
75
|
+
self._handlers: Dict[str, Callable] = {}
|
|
76
|
+
|
|
77
|
+
# Async HTTP session
|
|
78
|
+
self._http_session: Optional[aiohttp.ClientSession] = None
|
|
79
|
+
|
|
80
|
+
def _load_protobuf(self):
|
|
81
|
+
"""Lazily load protobuf modules."""
|
|
82
|
+
if self._pb is None:
|
|
83
|
+
from . import gateway_pb2 as pb
|
|
84
|
+
from . import gateway_pb2_grpc as pb_grpc
|
|
85
|
+
self._pb = pb
|
|
86
|
+
self._pb_grpc = pb_grpc
|
|
87
|
+
|
|
88
|
+
@property
|
|
89
|
+
def ai_inventory_url(self) -> str:
|
|
90
|
+
"""HTTP URL for AI Inventory service via proxy."""
|
|
91
|
+
return f"{self.http_url}/s/ai-inventory"
|
|
92
|
+
|
|
93
|
+
@property
|
|
94
|
+
def conversation_store_url(self) -> str:
|
|
95
|
+
"""HTTP URL for Conversation Store service via proxy."""
|
|
96
|
+
return f"{self.http_url}/s/conversation-store"
|
|
97
|
+
|
|
98
|
+
@property
|
|
99
|
+
def ai_memory_url(self) -> str:
|
|
100
|
+
"""HTTP URL for AI Memory service via proxy."""
|
|
101
|
+
return f"{self.http_url}/s/ai-memory"
|
|
102
|
+
|
|
103
|
+
def register_handler(self, topic: str, handler: Callable) -> None:
|
|
104
|
+
"""
|
|
105
|
+
Register a handler for a topic.
|
|
106
|
+
|
|
107
|
+
Note: Message routing is handled by MarchAgentApp._consume_loop(),
|
|
108
|
+
this method is kept for compatibility.
|
|
109
|
+
"""
|
|
110
|
+
self._handlers[topic] = handler
|
|
111
|
+
logger.debug(f"Registered handler for topic: {topic}")
|
|
112
|
+
|
|
113
|
+
def connect(self, agent_names: List[str]) -> List[str]:
|
|
114
|
+
"""
|
|
115
|
+
Connect to the gateway and authenticate.
|
|
116
|
+
|
|
117
|
+
Args:
|
|
118
|
+
agent_names: List of agent names to subscribe to
|
|
119
|
+
|
|
120
|
+
Returns:
|
|
121
|
+
List of subscribed topic names
|
|
122
|
+
"""
|
|
123
|
+
self._load_protobuf()
|
|
124
|
+
pb = self._pb
|
|
125
|
+
pb_grpc = self._pb_grpc
|
|
126
|
+
|
|
127
|
+
logger.info(f"Connecting to gRPC gateway at {self.grpc_url} (secure={self.secure})")
|
|
128
|
+
|
|
129
|
+
# Create gRPC channel with keepalive settings
|
|
130
|
+
options = [
|
|
131
|
+
('grpc.keepalive_time_ms', 30000), # Send keepalive ping every 30s
|
|
132
|
+
('grpc.keepalive_timeout_ms', 10000), # Wait 10s for ping ack
|
|
133
|
+
('grpc.keepalive_permit_without_calls', True), # Allow pings without active calls
|
|
134
|
+
('grpc.http2.min_time_between_pings_ms', 10000), # Min time between pings
|
|
135
|
+
('grpc.http2.max_pings_without_data', 0), # Unlimited pings without data
|
|
136
|
+
]
|
|
137
|
+
|
|
138
|
+
if self.secure:
|
|
139
|
+
# Use system default SSL credentials for TLS
|
|
140
|
+
credentials = grpc.ssl_channel_credentials()
|
|
141
|
+
self.channel = grpc.secure_channel(self.grpc_url, credentials, options=options)
|
|
142
|
+
else:
|
|
143
|
+
self.channel = grpc.insecure_channel(self.grpc_url, options=options)
|
|
144
|
+
self.stub = pb_grpc.AgentGatewayStub(self.channel)
|
|
145
|
+
|
|
146
|
+
# Start bidirectional stream
|
|
147
|
+
self._running = True
|
|
148
|
+
|
|
149
|
+
# Prepare auth message to send first
|
|
150
|
+
auth_request = pb.ClientMessage(
|
|
151
|
+
auth=pb.AuthRequest(
|
|
152
|
+
api_key=self.api_key,
|
|
153
|
+
agent_names=agent_names
|
|
154
|
+
)
|
|
155
|
+
)
|
|
156
|
+
|
|
157
|
+
# Create request iterator that sends auth first, then reads from queue
|
|
158
|
+
self._request_iterator = self._generate_requests(auth_request)
|
|
159
|
+
|
|
160
|
+
# Start the stream
|
|
161
|
+
self.stream = self.stub.AgentStream(self._request_iterator)
|
|
162
|
+
|
|
163
|
+
# Wait for auth response
|
|
164
|
+
try:
|
|
165
|
+
response = next(self.stream)
|
|
166
|
+
|
|
167
|
+
if response.HasField("error"):
|
|
168
|
+
error = response.error
|
|
169
|
+
raise AuthenticationError(f"Authentication failed: {error.message}")
|
|
170
|
+
|
|
171
|
+
if response.HasField("auth_response"):
|
|
172
|
+
auth_resp = response.auth_response
|
|
173
|
+
self.connection_id = auth_resp.connection_id
|
|
174
|
+
self.subscribed_topics = list(auth_resp.subscribed_topics)
|
|
175
|
+
|
|
176
|
+
logger.info(f"Connected to gateway (connection_id: {self.connection_id})")
|
|
177
|
+
logger.info(f"Subscribed to topics: {self.subscribed_topics}")
|
|
178
|
+
|
|
179
|
+
return self.subscribed_topics
|
|
180
|
+
else:
|
|
181
|
+
raise GatewayError(f"Unexpected response type")
|
|
182
|
+
|
|
183
|
+
except grpc.RpcError as e:
|
|
184
|
+
raise GatewayError(f"gRPC error during auth: {e}")
|
|
185
|
+
|
|
186
|
+
def _generate_requests(self, first_message=None):
|
|
187
|
+
"""Generator that yields requests. Sends first_message immediately, then from queue."""
|
|
188
|
+
# Send the first message (auth) immediately
|
|
189
|
+
if first_message:
|
|
190
|
+
logger.debug("Sending auth message")
|
|
191
|
+
yield first_message
|
|
192
|
+
|
|
193
|
+
# Then continue with queued messages
|
|
194
|
+
while self._running:
|
|
195
|
+
try:
|
|
196
|
+
msg = self._send_queue.get(timeout=0.1)
|
|
197
|
+
yield msg
|
|
198
|
+
except queue.Empty:
|
|
199
|
+
continue
|
|
200
|
+
|
|
201
|
+
def subscribe(self, agent_name: str) -> str:
|
|
202
|
+
"""
|
|
203
|
+
Subscribe to an additional agent's topic.
|
|
204
|
+
|
|
205
|
+
Args:
|
|
206
|
+
agent_name: Name of the agent to subscribe to
|
|
207
|
+
|
|
208
|
+
Returns:
|
|
209
|
+
Topic name that was subscribed to
|
|
210
|
+
"""
|
|
211
|
+
pb = self._pb
|
|
212
|
+
|
|
213
|
+
if not self.stream:
|
|
214
|
+
raise GatewayError("Not connected to gateway")
|
|
215
|
+
|
|
216
|
+
msg = pb.ClientMessage(
|
|
217
|
+
subscribe=pb.SubscribeRequest(agent_name=agent_name)
|
|
218
|
+
)
|
|
219
|
+
self._send_queue.put(msg)
|
|
220
|
+
|
|
221
|
+
# Wait for response
|
|
222
|
+
try:
|
|
223
|
+
response = next(self.stream)
|
|
224
|
+
|
|
225
|
+
if response.HasField("error"):
|
|
226
|
+
raise GatewayError(f"Subscribe failed: {response.error.message}")
|
|
227
|
+
|
|
228
|
+
if response.HasField("subscribe_ack"):
|
|
229
|
+
topic = response.subscribe_ack.topic
|
|
230
|
+
self.subscribed_topics.append(topic)
|
|
231
|
+
logger.info(f"Subscribed to topic: {topic}")
|
|
232
|
+
return topic
|
|
233
|
+
|
|
234
|
+
except grpc.RpcError as e:
|
|
235
|
+
raise GatewayError(f"Subscribe failed: {e}")
|
|
236
|
+
|
|
237
|
+
raise GatewayError("Unexpected response")
|
|
238
|
+
|
|
239
|
+
def unsubscribe(self, agent_name: str) -> None:
|
|
240
|
+
"""
|
|
241
|
+
Unsubscribe from an agent's topic.
|
|
242
|
+
|
|
243
|
+
Args:
|
|
244
|
+
agent_name: Name of the agent to unsubscribe from
|
|
245
|
+
"""
|
|
246
|
+
pb = self._pb
|
|
247
|
+
|
|
248
|
+
if not self.stream:
|
|
249
|
+
raise GatewayError("Not connected to gateway")
|
|
250
|
+
|
|
251
|
+
msg = pb.ClientMessage(
|
|
252
|
+
unsubscribe=pb.UnsubscribeRequest(agent_name=agent_name)
|
|
253
|
+
)
|
|
254
|
+
self._send_queue.put(msg)
|
|
255
|
+
|
|
256
|
+
topic = f"{agent_name}.inbox"
|
|
257
|
+
if topic in self.subscribed_topics:
|
|
258
|
+
self.subscribed_topics.remove(topic)
|
|
259
|
+
logger.info(f"Unsubscribed from agent: {agent_name}")
|
|
260
|
+
|
|
261
|
+
def produce(
|
|
262
|
+
self,
|
|
263
|
+
topic: str,
|
|
264
|
+
key: str,
|
|
265
|
+
headers: Dict[str, str],
|
|
266
|
+
body: Dict[str, Any],
|
|
267
|
+
correlation_id: Optional[str] = None
|
|
268
|
+
) -> Dict[str, Any]:
|
|
269
|
+
"""
|
|
270
|
+
Produce a message to Kafka via the gateway.
|
|
271
|
+
|
|
272
|
+
Args:
|
|
273
|
+
topic: Target topic (usually "router.inbox")
|
|
274
|
+
key: Message key (usually conversation_id)
|
|
275
|
+
headers: Message headers
|
|
276
|
+
body: Message body
|
|
277
|
+
correlation_id: Optional correlation ID for tracking
|
|
278
|
+
|
|
279
|
+
Returns:
|
|
280
|
+
Produce acknowledgment with partition and offset
|
|
281
|
+
"""
|
|
282
|
+
pb = self._pb
|
|
283
|
+
|
|
284
|
+
if not self.stream:
|
|
285
|
+
raise GatewayError("Not connected to gateway")
|
|
286
|
+
|
|
287
|
+
# Serialize body to JSON bytes
|
|
288
|
+
body_bytes = json.dumps(body).encode('utf-8')
|
|
289
|
+
|
|
290
|
+
msg = pb.ClientMessage(
|
|
291
|
+
produce=pb.ProduceRequest(
|
|
292
|
+
topic=topic,
|
|
293
|
+
key=key,
|
|
294
|
+
headers=headers,
|
|
295
|
+
body=body_bytes,
|
|
296
|
+
correlation_id=correlation_id or ""
|
|
297
|
+
)
|
|
298
|
+
)
|
|
299
|
+
self._send_queue.put(msg)
|
|
300
|
+
|
|
301
|
+
# Don't block waiting for ack
|
|
302
|
+
return {"status": "sent", "topic": topic}
|
|
303
|
+
|
|
304
|
+
def consume_one(self, timeout: float = 1.0) -> Optional[KafkaMessage]:
|
|
305
|
+
"""
|
|
306
|
+
Consume a single message (blocking).
|
|
307
|
+
|
|
308
|
+
Args:
|
|
309
|
+
timeout: Maximum time to wait for a message
|
|
310
|
+
|
|
311
|
+
Returns:
|
|
312
|
+
KafkaMessage or None if timeout
|
|
313
|
+
"""
|
|
314
|
+
if not self.stream:
|
|
315
|
+
raise GatewayError("Not connected to gateway")
|
|
316
|
+
|
|
317
|
+
try:
|
|
318
|
+
response = next(self.stream)
|
|
319
|
+
|
|
320
|
+
if response.HasField("message"):
|
|
321
|
+
kafka_msg = response.message
|
|
322
|
+
|
|
323
|
+
# Parse body from JSON bytes
|
|
324
|
+
try:
|
|
325
|
+
body = json.loads(kafka_msg.body.decode('utf-8'))
|
|
326
|
+
except json.JSONDecodeError:
|
|
327
|
+
body = {}
|
|
328
|
+
|
|
329
|
+
return KafkaMessage(
|
|
330
|
+
topic=kafka_msg.topic,
|
|
331
|
+
partition=kafka_msg.partition,
|
|
332
|
+
offset=kafka_msg.offset,
|
|
333
|
+
key=kafka_msg.key,
|
|
334
|
+
headers=dict(kafka_msg.headers),
|
|
335
|
+
body=body,
|
|
336
|
+
timestamp=kafka_msg.timestamp
|
|
337
|
+
)
|
|
338
|
+
elif response.HasField("produce_ack"):
|
|
339
|
+
logger.debug(f"Received produce_ack for topic {response.produce_ack.topic}")
|
|
340
|
+
return None
|
|
341
|
+
elif response.HasField("pong"):
|
|
342
|
+
return None
|
|
343
|
+
elif response.HasField("error"):
|
|
344
|
+
logger.error(f"Gateway error: {response.error.message}")
|
|
345
|
+
return None
|
|
346
|
+
else:
|
|
347
|
+
logger.debug(f"Received other message type")
|
|
348
|
+
return None
|
|
349
|
+
|
|
350
|
+
except StopIteration:
|
|
351
|
+
logger.warning("Stream ended")
|
|
352
|
+
raise GatewayError("Stream ended unexpectedly")
|
|
353
|
+
except grpc.RpcError as e:
|
|
354
|
+
if e.code() == grpc.StatusCode.DEADLINE_EXCEEDED:
|
|
355
|
+
return None
|
|
356
|
+
logger.error(f"Error in consume_one: {e}")
|
|
357
|
+
raise GatewayError(f"Consume error: {e}")
|
|
358
|
+
|
|
359
|
+
def ping(self) -> int:
|
|
360
|
+
"""
|
|
361
|
+
Send a ping to the gateway.
|
|
362
|
+
|
|
363
|
+
Returns:
|
|
364
|
+
Client timestamp sent
|
|
365
|
+
"""
|
|
366
|
+
pb = self._pb
|
|
367
|
+
|
|
368
|
+
if not self.stream:
|
|
369
|
+
raise GatewayError("Not connected to gateway")
|
|
370
|
+
|
|
371
|
+
timestamp = int(time.time() * 1000)
|
|
372
|
+
msg = pb.ClientMessage(
|
|
373
|
+
ping=pb.PingRequest(timestamp=timestamp)
|
|
374
|
+
)
|
|
375
|
+
self._send_queue.put(msg)
|
|
376
|
+
return timestamp
|
|
377
|
+
|
|
378
|
+
async def _get_http_session(self) -> aiohttp.ClientSession:
|
|
379
|
+
"""Get or create aiohttp session."""
|
|
380
|
+
if self._http_session is None or self._http_session.closed:
|
|
381
|
+
timeout = aiohttp.ClientTimeout(total=30.0)
|
|
382
|
+
self._http_session = aiohttp.ClientSession(timeout=timeout)
|
|
383
|
+
return self._http_session
|
|
384
|
+
|
|
385
|
+
async def close_async(self):
|
|
386
|
+
"""Close async HTTP session."""
|
|
387
|
+
if self._http_session and not self._http_session.closed:
|
|
388
|
+
await self._http_session.close()
|
|
389
|
+
|
|
390
|
+
def close(self) -> None:
|
|
391
|
+
"""Close the gateway connection."""
|
|
392
|
+
self._running = False
|
|
393
|
+
|
|
394
|
+
if self.channel:
|
|
395
|
+
try:
|
|
396
|
+
self.channel.close()
|
|
397
|
+
except Exception:
|
|
398
|
+
pass
|
|
399
|
+
self.channel = None
|
|
400
|
+
|
|
401
|
+
self.stream = None
|
|
402
|
+
self.stub = None
|
|
403
|
+
self.connection_id = None
|
|
404
|
+
self.subscribed_topics = []
|
|
405
|
+
logger.info("gRPC gateway connection closed")
|
|
406
|
+
|
|
407
|
+
# HTTP Proxy Methods (Sync - for registration)
|
|
408
|
+
|
|
409
|
+
def http_post(self, service: str, path: str, **kwargs) -> requests.Response:
|
|
410
|
+
"""
|
|
411
|
+
Make a sync POST request (used for registration).
|
|
412
|
+
|
|
413
|
+
Args:
|
|
414
|
+
service: Service name ("ai-inventory" or "conversation-store")
|
|
415
|
+
path: Request path
|
|
416
|
+
**kwargs: Additional arguments passed to requests.post
|
|
417
|
+
|
|
418
|
+
Returns:
|
|
419
|
+
Response object
|
|
420
|
+
"""
|
|
421
|
+
url = f"{self.http_url}/s/{service}{path}"
|
|
422
|
+
return requests.post(url, **kwargs)
|
|
423
|
+
|
|
424
|
+
# HTTP Proxy Methods (Async - for runtime)
|
|
425
|
+
|
|
426
|
+
async def http_get_async(self, service: str, path: str, **kwargs):
|
|
427
|
+
"""
|
|
428
|
+
Make an async GET request to a service via the gateway proxy.
|
|
429
|
+
|
|
430
|
+
Args:
|
|
431
|
+
service: Service name ("ai-inventory" or "conversation-store")
|
|
432
|
+
path: Request path
|
|
433
|
+
**kwargs: Additional arguments passed to aiohttp
|
|
434
|
+
|
|
435
|
+
Returns:
|
|
436
|
+
Response object
|
|
437
|
+
"""
|
|
438
|
+
url = f"{self.http_url}/s/{service}{path}"
|
|
439
|
+
session = await self._get_http_session()
|
|
440
|
+
async with session.get(url, **kwargs) as response:
|
|
441
|
+
response.raise_for_status()
|
|
442
|
+
return response
|
|
443
|
+
|
|
444
|
+
async def http_post_async(self, service: str, path: str, **kwargs):
|
|
445
|
+
"""
|
|
446
|
+
Make an async POST request to a service via the gateway proxy.
|
|
447
|
+
|
|
448
|
+
Args:
|
|
449
|
+
service: Service name ("ai-inventory" or "conversation-store")
|
|
450
|
+
path: Request path
|
|
451
|
+
**kwargs: Additional arguments passed to aiohttp
|
|
452
|
+
|
|
453
|
+
Returns:
|
|
454
|
+
Response object
|
|
455
|
+
"""
|
|
456
|
+
url = f"{self.http_url}/s/{service}{path}"
|
|
457
|
+
session = await self._get_http_session()
|
|
458
|
+
async with session.post(url, **kwargs) as response:
|
|
459
|
+
response.raise_for_status()
|
|
460
|
+
return response
|
|
461
|
+
|
|
462
|
+
async def http_patch_async(self, service: str, path: str, **kwargs):
|
|
463
|
+
"""
|
|
464
|
+
Make an async PATCH request to a service via the gateway proxy.
|
|
465
|
+
|
|
466
|
+
Args:
|
|
467
|
+
service: Service name ("ai-inventory" or "conversation-store")
|
|
468
|
+
path: Request path
|
|
469
|
+
**kwargs: Additional arguments passed to aiohttp
|
|
470
|
+
|
|
471
|
+
Returns:
|
|
472
|
+
Response object
|
|
473
|
+
"""
|
|
474
|
+
url = f"{self.http_url}/s/{service}{path}"
|
|
475
|
+
session = await self._get_http_session()
|
|
476
|
+
async with session.patch(url, **kwargs) as response:
|
|
477
|
+
response.raise_for_status()
|
|
478
|
+
return response
|
|
479
|
+
|
|
480
|
+
async def http_delete_async(self, service: str, path: str, **kwargs):
|
|
481
|
+
"""
|
|
482
|
+
Make an async DELETE request to a service via the gateway proxy.
|
|
483
|
+
|
|
484
|
+
Args:
|
|
485
|
+
service: Service name ("ai-inventory" or "conversation-store")
|
|
486
|
+
path: Request path
|
|
487
|
+
**kwargs: Additional arguments passed to aiohttp
|
|
488
|
+
|
|
489
|
+
Returns:
|
|
490
|
+
Response object
|
|
491
|
+
"""
|
|
492
|
+
url = f"{self.http_url}/s/{service}{path}"
|
|
493
|
+
session = await self._get_http_session()
|
|
494
|
+
async with session.delete(url, **kwargs) as response:
|
|
495
|
+
response.raise_for_status()
|
|
496
|
+
return response
|
|
497
|
+
|
|
498
|
+
|
|
499
|
+
class GatewayError(Exception):
|
|
500
|
+
"""Base exception for gateway errors."""
|
|
501
|
+
pass
|
|
502
|
+
|
|
503
|
+
|
|
504
|
+
class AuthenticationError(GatewayError):
|
|
505
|
+
"""Raised when authentication fails."""
|
|
506
|
+
pass
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
|
3
|
+
# NO CHECKED-IN PROTOBUF GENCODE
|
|
4
|
+
# source: gateway.proto
|
|
5
|
+
# Protobuf Python Version: 6.31.1
|
|
6
|
+
"""Generated protocol buffer code."""
|
|
7
|
+
from google.protobuf import descriptor as _descriptor
|
|
8
|
+
from google.protobuf import descriptor_pool as _descriptor_pool
|
|
9
|
+
from google.protobuf import runtime_version as _runtime_version
|
|
10
|
+
from google.protobuf import symbol_database as _symbol_database
|
|
11
|
+
from google.protobuf.internal import builder as _builder
|
|
12
|
+
_runtime_version.ValidateProtobufRuntimeVersion(
|
|
13
|
+
_runtime_version.Domain.PUBLIC,
|
|
14
|
+
6,
|
|
15
|
+
31,
|
|
16
|
+
1,
|
|
17
|
+
'',
|
|
18
|
+
'gateway.proto'
|
|
19
|
+
)
|
|
20
|
+
# @@protoc_insertion_point(imports)
|
|
21
|
+
|
|
22
|
+
_sym_db = _symbol_database.Default()
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\rgateway.proto\x12\x07gateway\"\xf6\x01\n\rClientMessage\x12$\n\x04\x61uth\x18\x01 \x01(\x0b\x32\x14.gateway.AuthRequestH\x00\x12.\n\tsubscribe\x18\x02 \x01(\x0b\x32\x19.gateway.SubscribeRequestH\x00\x12\x32\n\x0bunsubscribe\x18\x03 \x01(\x0b\x32\x1b.gateway.UnsubscribeRequestH\x00\x12*\n\x07produce\x18\x04 \x01(\x0b\x32\x17.gateway.ProduceRequestH\x00\x12$\n\x04ping\x18\x05 \x01(\x0b\x32\x14.gateway.PingRequestH\x00\x42\t\n\x07payload\"\xd4\x02\n\rServerMessage\x12.\n\rauth_response\x18\x01 \x01(\x0b\x32\x15.gateway.AuthResponseH\x00\x12(\n\x07message\x18\x02 \x01(\x0b\x32\x15.gateway.KafkaMessageH\x00\x12*\n\x0bproduce_ack\x18\x03 \x01(\x0b\x32\x13.gateway.ProduceAckH\x00\x12.\n\rsubscribe_ack\x18\x04 \x01(\x0b\x32\x15.gateway.SubscribeAckH\x00\x12\x32\n\x0funsubscribe_ack\x18\x05 \x01(\x0b\x32\x17.gateway.UnsubscribeAckH\x00\x12%\n\x04pong\x18\x06 \x01(\x0b\x32\x15.gateway.PongResponseH\x00\x12\'\n\x05\x65rror\x18\x07 \x01(\x0b\x32\x16.gateway.ErrorResponseH\x00\x42\t\n\x07payload\"3\n\x0b\x41uthRequest\x12\x0f\n\x07\x61pi_key\x18\x01 \x01(\t\x12\x13\n\x0b\x61gent_names\x18\x02 \x03(\t\"@\n\x0c\x41uthResponse\x12\x15\n\rconnection_id\x18\x01 \x01(\t\x12\x19\n\x11subscribed_topics\x18\x02 \x03(\t\"&\n\x10SubscribeRequest\x12\x12\n\nagent_name\x18\x01 \x01(\t\"\x1d\n\x0cSubscribeAck\x12\r\n\x05topic\x18\x01 \x01(\t\"(\n\x12UnsubscribeRequest\x12\x12\n\nagent_name\x18\x01 \x01(\t\"$\n\x0eUnsubscribeAck\x12\x12\n\nagent_name\x18\x01 \x01(\t\"\xb9\x01\n\x0eProduceRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x0b\n\x03key\x18\x02 \x01(\t\x12\x35\n\x07headers\x18\x03 \x03(\x0b\x32$.gateway.ProduceRequest.HeadersEntry\x12\x0c\n\x04\x62ody\x18\x04 \x01(\x0c\x12\x16\n\x0e\x63orrelation_id\x18\x05 \x01(\t\x1a.\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"V\n\nProduceAck\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpartition\x18\x02 \x01(\x05\x12\x0e\n\x06offset\x18\x03 \x01(\x03\x12\x16\n\x0e\x63orrelation_id\x18\x04 \x01(\t\"\xd3\x01\n\x0cKafkaMessage\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpartition\x18\x02 \x01(\x05\x12\x0e\n\x06offset\x18\x03 \x01(\x03\x12\x0b\n\x03key\x18\x04 \x01(\t\x12\x33\n\x07headers\x18\x05 \x03(\x0b\x32\".gateway.KafkaMessage.HeadersEntry\x12\x0c\n\x04\x62ody\x18\x06 \x01(\x0c\x12\x11\n\ttimestamp\x18\x07 \x01(\x03\x1a.\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\" \n\x0bPingRequest\x12\x11\n\ttimestamp\x18\x01 \x01(\x03\"B\n\x0cPongResponse\x12\x18\n\x10\x63lient_timestamp\x18\x01 \x01(\x03\x12\x18\n\x10server_timestamp\x18\x02 \x01(\x03\"F\n\rErrorResponse\x12\x0c\n\x04\x63ode\x18\x01 \x01(\t\x12\x0f\n\x07message\x18\x02 \x01(\t\x12\x16\n\x0e\x63orrelation_id\x18\x03 \x01(\t2Q\n\x0c\x41gentGateway\x12\x41\n\x0b\x41gentStream\x12\x16.gateway.ClientMessage\x1a\x16.gateway.ServerMessage(\x01\x30\x01\x42 Z\x1e\x61gent-gateway/internal/grpc/pbb\x06proto3')
|
|
28
|
+
|
|
29
|
+
_globals = globals()
|
|
30
|
+
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
|
|
31
|
+
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'gateway_pb2', _globals)
|
|
32
|
+
if not _descriptor._USE_C_DESCRIPTORS:
|
|
33
|
+
_globals['DESCRIPTOR']._loaded_options = None
|
|
34
|
+
_globals['DESCRIPTOR']._serialized_options = b'Z\036agent-gateway/internal/grpc/pb'
|
|
35
|
+
_globals['_PRODUCEREQUEST_HEADERSENTRY']._loaded_options = None
|
|
36
|
+
_globals['_PRODUCEREQUEST_HEADERSENTRY']._serialized_options = b'8\001'
|
|
37
|
+
_globals['_KAFKAMESSAGE_HEADERSENTRY']._loaded_options = None
|
|
38
|
+
_globals['_KAFKAMESSAGE_HEADERSENTRY']._serialized_options = b'8\001'
|
|
39
|
+
_globals['_CLIENTMESSAGE']._serialized_start=27
|
|
40
|
+
_globals['_CLIENTMESSAGE']._serialized_end=273
|
|
41
|
+
_globals['_SERVERMESSAGE']._serialized_start=276
|
|
42
|
+
_globals['_SERVERMESSAGE']._serialized_end=616
|
|
43
|
+
_globals['_AUTHREQUEST']._serialized_start=618
|
|
44
|
+
_globals['_AUTHREQUEST']._serialized_end=669
|
|
45
|
+
_globals['_AUTHRESPONSE']._serialized_start=671
|
|
46
|
+
_globals['_AUTHRESPONSE']._serialized_end=735
|
|
47
|
+
_globals['_SUBSCRIBEREQUEST']._serialized_start=737
|
|
48
|
+
_globals['_SUBSCRIBEREQUEST']._serialized_end=775
|
|
49
|
+
_globals['_SUBSCRIBEACK']._serialized_start=777
|
|
50
|
+
_globals['_SUBSCRIBEACK']._serialized_end=806
|
|
51
|
+
_globals['_UNSUBSCRIBEREQUEST']._serialized_start=808
|
|
52
|
+
_globals['_UNSUBSCRIBEREQUEST']._serialized_end=848
|
|
53
|
+
_globals['_UNSUBSCRIBEACK']._serialized_start=850
|
|
54
|
+
_globals['_UNSUBSCRIBEACK']._serialized_end=886
|
|
55
|
+
_globals['_PRODUCEREQUEST']._serialized_start=889
|
|
56
|
+
_globals['_PRODUCEREQUEST']._serialized_end=1074
|
|
57
|
+
_globals['_PRODUCEREQUEST_HEADERSENTRY']._serialized_start=1028
|
|
58
|
+
_globals['_PRODUCEREQUEST_HEADERSENTRY']._serialized_end=1074
|
|
59
|
+
_globals['_PRODUCEACK']._serialized_start=1076
|
|
60
|
+
_globals['_PRODUCEACK']._serialized_end=1162
|
|
61
|
+
_globals['_KAFKAMESSAGE']._serialized_start=1165
|
|
62
|
+
_globals['_KAFKAMESSAGE']._serialized_end=1376
|
|
63
|
+
_globals['_KAFKAMESSAGE_HEADERSENTRY']._serialized_start=1028
|
|
64
|
+
_globals['_KAFKAMESSAGE_HEADERSENTRY']._serialized_end=1074
|
|
65
|
+
_globals['_PINGREQUEST']._serialized_start=1378
|
|
66
|
+
_globals['_PINGREQUEST']._serialized_end=1410
|
|
67
|
+
_globals['_PONGRESPONSE']._serialized_start=1412
|
|
68
|
+
_globals['_PONGRESPONSE']._serialized_end=1478
|
|
69
|
+
_globals['_ERRORRESPONSE']._serialized_start=1480
|
|
70
|
+
_globals['_ERRORRESPONSE']._serialized_end=1550
|
|
71
|
+
_globals['_AGENTGATEWAY']._serialized_start=1552
|
|
72
|
+
_globals['_AGENTGATEWAY']._serialized_end=1633
|
|
73
|
+
# @@protoc_insertion_point(module_scope)
|
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
|
|
2
|
+
"""Client and server classes corresponding to protobuf-defined services."""
|
|
3
|
+
import grpc
|
|
4
|
+
import warnings
|
|
5
|
+
|
|
6
|
+
from . import gateway_pb2 as gateway__pb2
|
|
7
|
+
|
|
8
|
+
GRPC_GENERATED_VERSION = '1.76.0'
|
|
9
|
+
GRPC_VERSION = grpc.__version__
|
|
10
|
+
_version_not_supported = False
|
|
11
|
+
|
|
12
|
+
try:
|
|
13
|
+
from grpc._utilities import first_version_is_lower
|
|
14
|
+
_version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION)
|
|
15
|
+
except ImportError:
|
|
16
|
+
_version_not_supported = True
|
|
17
|
+
|
|
18
|
+
if _version_not_supported:
|
|
19
|
+
raise RuntimeError(
|
|
20
|
+
f'The grpc package installed is at version {GRPC_VERSION},'
|
|
21
|
+
+ ' but the generated code in gateway_pb2_grpc.py depends on'
|
|
22
|
+
+ f' grpcio>={GRPC_GENERATED_VERSION}.'
|
|
23
|
+
+ f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}'
|
|
24
|
+
+ f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.'
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class AgentGatewayStub(object):
|
|
29
|
+
"""Bidirectional streaming service for agent communication
|
|
30
|
+
"""
|
|
31
|
+
|
|
32
|
+
def __init__(self, channel):
|
|
33
|
+
"""Constructor.
|
|
34
|
+
|
|
35
|
+
Args:
|
|
36
|
+
channel: A grpc.Channel.
|
|
37
|
+
"""
|
|
38
|
+
self.AgentStream = channel.stream_stream(
|
|
39
|
+
'/gateway.AgentGateway/AgentStream',
|
|
40
|
+
request_serializer=gateway__pb2.ClientMessage.SerializeToString,
|
|
41
|
+
response_deserializer=gateway__pb2.ServerMessage.FromString,
|
|
42
|
+
_registered_method=True)
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class AgentGatewayServicer(object):
|
|
46
|
+
"""Bidirectional streaming service for agent communication
|
|
47
|
+
"""
|
|
48
|
+
|
|
49
|
+
def AgentStream(self, request_iterator, context):
|
|
50
|
+
"""Main bidirectional stream for all agent communication
|
|
51
|
+
"""
|
|
52
|
+
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
|
53
|
+
context.set_details('Method not implemented!')
|
|
54
|
+
raise NotImplementedError('Method not implemented!')
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def add_AgentGatewayServicer_to_server(servicer, server):
|
|
58
|
+
rpc_method_handlers = {
|
|
59
|
+
'AgentStream': grpc.stream_stream_rpc_method_handler(
|
|
60
|
+
servicer.AgentStream,
|
|
61
|
+
request_deserializer=gateway__pb2.ClientMessage.FromString,
|
|
62
|
+
response_serializer=gateway__pb2.ServerMessage.SerializeToString,
|
|
63
|
+
),
|
|
64
|
+
}
|
|
65
|
+
generic_handler = grpc.method_handlers_generic_handler(
|
|
66
|
+
'gateway.AgentGateway', rpc_method_handlers)
|
|
67
|
+
server.add_generic_rpc_handlers((generic_handler,))
|
|
68
|
+
server.add_registered_method_handlers('gateway.AgentGateway', rpc_method_handlers)
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
# This class is part of an EXPERIMENTAL API.
|
|
72
|
+
class AgentGateway(object):
|
|
73
|
+
"""Bidirectional streaming service for agent communication
|
|
74
|
+
"""
|
|
75
|
+
|
|
76
|
+
@staticmethod
|
|
77
|
+
def AgentStream(request_iterator,
|
|
78
|
+
target,
|
|
79
|
+
options=(),
|
|
80
|
+
channel_credentials=None,
|
|
81
|
+
call_credentials=None,
|
|
82
|
+
insecure=False,
|
|
83
|
+
compression=None,
|
|
84
|
+
wait_for_ready=None,
|
|
85
|
+
timeout=None,
|
|
86
|
+
metadata=None):
|
|
87
|
+
return grpc.experimental.stream_stream(
|
|
88
|
+
request_iterator,
|
|
89
|
+
target,
|
|
90
|
+
'/gateway.AgentGateway/AgentStream',
|
|
91
|
+
gateway__pb2.ClientMessage.SerializeToString,
|
|
92
|
+
gateway__pb2.ServerMessage.FromString,
|
|
93
|
+
options,
|
|
94
|
+
channel_credentials,
|
|
95
|
+
insecure,
|
|
96
|
+
call_credentials,
|
|
97
|
+
compression,
|
|
98
|
+
wait_for_ready,
|
|
99
|
+
timeout,
|
|
100
|
+
metadata,
|
|
101
|
+
_registered_method=True)
|