agentic-fabriq-sdk 0.1.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of agentic-fabriq-sdk might be problematic. Click here for more details.
- af_sdk/__init__.py +55 -0
- af_sdk/auth/__init__.py +31 -0
- af_sdk/auth/dpop.py +43 -0
- af_sdk/auth/oauth.py +247 -0
- af_sdk/auth/token_cache.py +318 -0
- af_sdk/connectors/__init__.py +23 -0
- af_sdk/connectors/base.py +231 -0
- af_sdk/connectors/registry.py +262 -0
- af_sdk/dx/__init__.py +12 -0
- af_sdk/dx/decorators.py +40 -0
- af_sdk/dx/runtime.py +170 -0
- af_sdk/events.py +699 -0
- af_sdk/exceptions.py +140 -0
- af_sdk/fabriq_client.py +198 -0
- af_sdk/models/__init__.py +47 -0
- af_sdk/models/audit.py +44 -0
- af_sdk/models/types.py +242 -0
- af_sdk/py.typed +0 -0
- af_sdk/transport/__init__.py +7 -0
- af_sdk/transport/http.py +366 -0
- af_sdk/vault.py +500 -0
- agentic_fabriq_sdk-0.1.3.dist-info/METADATA +81 -0
- agentic_fabriq_sdk-0.1.3.dist-info/RECORD +24 -0
- agentic_fabriq_sdk-0.1.3.dist-info/WHEEL +4 -0
af_sdk/events.py
ADDED
|
@@ -0,0 +1,699 @@
|
|
|
1
|
+
"""
|
|
2
|
+
NATS JetStream Integration for Agentic Fabric
|
|
3
|
+
============================================
|
|
4
|
+
|
|
5
|
+
This module provides integration with NATS JetStream for event streaming and async operations.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import asyncio
|
|
9
|
+
import json
|
|
10
|
+
import logging
|
|
11
|
+
from datetime import datetime
|
|
12
|
+
from typing import Dict, List, Optional, Any, Callable, Awaitable, Union
|
|
13
|
+
from uuid import uuid4
|
|
14
|
+
|
|
15
|
+
import nats
|
|
16
|
+
from nats.aio.client import Client as NATSClient
|
|
17
|
+
from nats.js import JetStreamContext
|
|
18
|
+
from nats.js.api import StreamInfo
|
|
19
|
+
from pydantic import BaseModel, Field
|
|
20
|
+
|
|
21
|
+
from .exceptions import EventError
|
|
22
|
+
|
|
23
|
+
logger = logging.getLogger(__name__)
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class EventMetadata(BaseModel):
|
|
27
|
+
"""Event metadata model"""
|
|
28
|
+
event_id: str = Field(default_factory=lambda: str(uuid4()))
|
|
29
|
+
timestamp: datetime = Field(default_factory=datetime.utcnow)
|
|
30
|
+
source: str
|
|
31
|
+
version: str = "1.0"
|
|
32
|
+
correlation_id: Optional[str] = None
|
|
33
|
+
tenant_id: Optional[str] = None
|
|
34
|
+
user_id: Optional[str] = None
|
|
35
|
+
trace_id: Optional[str] = None
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
class AgentEvent(BaseModel):
|
|
39
|
+
"""Agent lifecycle event"""
|
|
40
|
+
event_type: str # registered, updated, deleted, invoked
|
|
41
|
+
agent_id: str
|
|
42
|
+
agent_name: str
|
|
43
|
+
agent_type: str
|
|
44
|
+
tenant_id: str
|
|
45
|
+
metadata: EventMetadata
|
|
46
|
+
payload: Dict[str, Any] = Field(default_factory=dict)
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class ToolEvent(BaseModel):
|
|
50
|
+
"""Tool lifecycle event"""
|
|
51
|
+
event_type: str # registered, updated, deleted, invoked
|
|
52
|
+
tool_id: str
|
|
53
|
+
tool_name: str
|
|
54
|
+
tool_type: str
|
|
55
|
+
tenant_id: str
|
|
56
|
+
metadata: EventMetadata
|
|
57
|
+
payload: Dict[str, Any] = Field(default_factory=dict)
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
class InvocationEvent(BaseModel):
|
|
61
|
+
"""Agent/Tool invocation event"""
|
|
62
|
+
event_type: str # started, completed, failed
|
|
63
|
+
invocation_id: str
|
|
64
|
+
target_type: str # agent, tool
|
|
65
|
+
target_id: str
|
|
66
|
+
tenant_id: str
|
|
67
|
+
user_id: str
|
|
68
|
+
duration_ms: Optional[int] = None
|
|
69
|
+
error: Optional[str] = None
|
|
70
|
+
metadata: EventMetadata
|
|
71
|
+
payload: Dict[str, Any] = Field(default_factory=dict)
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
class SecretEvent(BaseModel):
|
|
75
|
+
"""Secret management event"""
|
|
76
|
+
event_type: str # created, updated, deleted, rotated
|
|
77
|
+
secret_path: str
|
|
78
|
+
tenant_id: str
|
|
79
|
+
user_id: str
|
|
80
|
+
metadata: EventMetadata
|
|
81
|
+
payload: Dict[str, Any] = Field(default_factory=dict)
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
Event = Union[AgentEvent, ToolEvent, InvocationEvent, SecretEvent]
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
class EventStreamConfig(BaseModel):
|
|
88
|
+
"""Event stream configuration"""
|
|
89
|
+
name: str
|
|
90
|
+
subjects: List[str]
|
|
91
|
+
description: str = ""
|
|
92
|
+
max_msgs: int = 1000000
|
|
93
|
+
max_age: int = 86400 # 24 hours in seconds
|
|
94
|
+
storage: str = "file" # file or memory
|
|
95
|
+
replicas: int = 1
|
|
96
|
+
retention: str = "limits" # limits, interest, workqueue
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
class EventConsumerConfig(BaseModel):
|
|
100
|
+
"""Event consumer configuration"""
|
|
101
|
+
name: str
|
|
102
|
+
stream: str
|
|
103
|
+
filter_subject: Optional[str] = None
|
|
104
|
+
deliver_policy: str = "all" # all, last, new
|
|
105
|
+
ack_policy: str = "explicit" # explicit, none, all
|
|
106
|
+
max_deliver: int = 5
|
|
107
|
+
replay_policy: str = "instant" # instant, original
|
|
108
|
+
durable: bool = True
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
class EventPublisher:
|
|
112
|
+
"""Event publisher for NATS JetStream"""
|
|
113
|
+
|
|
114
|
+
def __init__(self, client: NATSClient, js: JetStreamContext):
|
|
115
|
+
"""Initialize event publisher"""
|
|
116
|
+
self.client = client
|
|
117
|
+
self.js = js
|
|
118
|
+
|
|
119
|
+
async def publish_agent_event(
|
|
120
|
+
self,
|
|
121
|
+
event_type: str,
|
|
122
|
+
agent_id: str,
|
|
123
|
+
agent_name: str,
|
|
124
|
+
agent_type: str,
|
|
125
|
+
tenant_id: str,
|
|
126
|
+
source: str,
|
|
127
|
+
payload: Optional[Dict] = None,
|
|
128
|
+
correlation_id: Optional[str] = None
|
|
129
|
+
):
|
|
130
|
+
"""Publish agent lifecycle event"""
|
|
131
|
+
metadata = EventMetadata(
|
|
132
|
+
source=source,
|
|
133
|
+
correlation_id=correlation_id,
|
|
134
|
+
tenant_id=tenant_id
|
|
135
|
+
)
|
|
136
|
+
|
|
137
|
+
event = AgentEvent(
|
|
138
|
+
event_type=event_type,
|
|
139
|
+
agent_id=agent_id,
|
|
140
|
+
agent_name=agent_name,
|
|
141
|
+
agent_type=agent_type,
|
|
142
|
+
tenant_id=tenant_id,
|
|
143
|
+
metadata=metadata,
|
|
144
|
+
payload=payload or {}
|
|
145
|
+
)
|
|
146
|
+
|
|
147
|
+
subject = f"agents.{event_type}.{tenant_id}"
|
|
148
|
+
await self._publish_event(subject, event)
|
|
149
|
+
|
|
150
|
+
async def publish_tool_event(
|
|
151
|
+
self,
|
|
152
|
+
event_type: str,
|
|
153
|
+
tool_id: str,
|
|
154
|
+
tool_name: str,
|
|
155
|
+
tool_type: str,
|
|
156
|
+
tenant_id: str,
|
|
157
|
+
source: str,
|
|
158
|
+
payload: Optional[Dict] = None,
|
|
159
|
+
correlation_id: Optional[str] = None
|
|
160
|
+
):
|
|
161
|
+
"""Publish tool lifecycle event"""
|
|
162
|
+
metadata = EventMetadata(
|
|
163
|
+
source=source,
|
|
164
|
+
correlation_id=correlation_id,
|
|
165
|
+
tenant_id=tenant_id
|
|
166
|
+
)
|
|
167
|
+
|
|
168
|
+
event = ToolEvent(
|
|
169
|
+
event_type=event_type,
|
|
170
|
+
tool_id=tool_id,
|
|
171
|
+
tool_name=tool_name,
|
|
172
|
+
tool_type=tool_type,
|
|
173
|
+
tenant_id=tenant_id,
|
|
174
|
+
metadata=metadata,
|
|
175
|
+
payload=payload or {}
|
|
176
|
+
)
|
|
177
|
+
|
|
178
|
+
subject = f"tools.{event_type}.{tenant_id}"
|
|
179
|
+
await self._publish_event(subject, event)
|
|
180
|
+
|
|
181
|
+
async def publish_invocation_event(
|
|
182
|
+
self,
|
|
183
|
+
event_type: str,
|
|
184
|
+
invocation_id: str,
|
|
185
|
+
target_type: str,
|
|
186
|
+
target_id: str,
|
|
187
|
+
tenant_id: str,
|
|
188
|
+
user_id: str,
|
|
189
|
+
source: str,
|
|
190
|
+
duration_ms: Optional[int] = None,
|
|
191
|
+
error: Optional[str] = None,
|
|
192
|
+
payload: Optional[Dict] = None,
|
|
193
|
+
correlation_id: Optional[str] = None
|
|
194
|
+
):
|
|
195
|
+
"""Publish invocation event"""
|
|
196
|
+
metadata = EventMetadata(
|
|
197
|
+
source=source,
|
|
198
|
+
correlation_id=correlation_id,
|
|
199
|
+
tenant_id=tenant_id,
|
|
200
|
+
user_id=user_id
|
|
201
|
+
)
|
|
202
|
+
|
|
203
|
+
event = InvocationEvent(
|
|
204
|
+
event_type=event_type,
|
|
205
|
+
invocation_id=invocation_id,
|
|
206
|
+
target_type=target_type,
|
|
207
|
+
target_id=target_id,
|
|
208
|
+
tenant_id=tenant_id,
|
|
209
|
+
user_id=user_id,
|
|
210
|
+
duration_ms=duration_ms,
|
|
211
|
+
error=error,
|
|
212
|
+
metadata=metadata,
|
|
213
|
+
payload=payload or {}
|
|
214
|
+
)
|
|
215
|
+
|
|
216
|
+
subject = f"invocations.{event_type}.{tenant_id}"
|
|
217
|
+
await self._publish_event(subject, event)
|
|
218
|
+
|
|
219
|
+
async def publish_secret_event(
|
|
220
|
+
self,
|
|
221
|
+
event_type: str,
|
|
222
|
+
secret_path: str,
|
|
223
|
+
tenant_id: str,
|
|
224
|
+
user_id: str,
|
|
225
|
+
source: str,
|
|
226
|
+
payload: Optional[Dict] = None,
|
|
227
|
+
correlation_id: Optional[str] = None
|
|
228
|
+
):
|
|
229
|
+
"""Publish secret management event"""
|
|
230
|
+
metadata = EventMetadata(
|
|
231
|
+
source=source,
|
|
232
|
+
correlation_id=correlation_id,
|
|
233
|
+
tenant_id=tenant_id,
|
|
234
|
+
user_id=user_id
|
|
235
|
+
)
|
|
236
|
+
|
|
237
|
+
event = SecretEvent(
|
|
238
|
+
event_type=event_type,
|
|
239
|
+
secret_path=secret_path,
|
|
240
|
+
tenant_id=tenant_id,
|
|
241
|
+
user_id=user_id,
|
|
242
|
+
metadata=metadata,
|
|
243
|
+
payload=payload or {}
|
|
244
|
+
)
|
|
245
|
+
|
|
246
|
+
subject = f"secrets.{event_type}.{tenant_id}"
|
|
247
|
+
await self._publish_event(subject, event)
|
|
248
|
+
|
|
249
|
+
async def _publish_event(self, subject: str, event: Event):
|
|
250
|
+
"""Publish event to NATS JetStream"""
|
|
251
|
+
try:
|
|
252
|
+
message_data = event.model_dump_json()
|
|
253
|
+
|
|
254
|
+
# Publish to JetStream
|
|
255
|
+
await self.js.publish(
|
|
256
|
+
subject=subject,
|
|
257
|
+
payload=message_data.encode(),
|
|
258
|
+
headers={
|
|
259
|
+
"event_id": event.metadata.event_id,
|
|
260
|
+
"timestamp": event.metadata.timestamp.isoformat(),
|
|
261
|
+
"source": event.metadata.source,
|
|
262
|
+
"version": event.metadata.version,
|
|
263
|
+
"tenant_id": event.metadata.tenant_id or "",
|
|
264
|
+
"user_id": event.metadata.user_id or "",
|
|
265
|
+
"correlation_id": event.metadata.correlation_id or "",
|
|
266
|
+
}
|
|
267
|
+
)
|
|
268
|
+
|
|
269
|
+
logger.info(f"Published event {event.metadata.event_id} to {subject}")
|
|
270
|
+
|
|
271
|
+
except Exception as e:
|
|
272
|
+
logger.error(f"Failed to publish event: {e}")
|
|
273
|
+
raise EventError(f"Failed to publish event: {e}")
|
|
274
|
+
|
|
275
|
+
|
|
276
|
+
class EventSubscriber:
|
|
277
|
+
"""Event subscriber for NATS JetStream"""
|
|
278
|
+
|
|
279
|
+
def __init__(self, client: NATSClient, js: JetStreamContext):
|
|
280
|
+
"""Initialize event subscriber"""
|
|
281
|
+
self.client = client
|
|
282
|
+
self.js = js
|
|
283
|
+
self._subscriptions: Dict[str, Any] = {}
|
|
284
|
+
|
|
285
|
+
async def subscribe_to_agent_events(
|
|
286
|
+
self,
|
|
287
|
+
handler: Callable[[AgentEvent], Awaitable[None]],
|
|
288
|
+
tenant_id: Optional[str] = None,
|
|
289
|
+
event_type: Optional[str] = None,
|
|
290
|
+
consumer_name: str = "agent-events-consumer"
|
|
291
|
+
):
|
|
292
|
+
"""Subscribe to agent events"""
|
|
293
|
+
subject = "agents.*"
|
|
294
|
+
if tenant_id:
|
|
295
|
+
subject += f".{tenant_id}"
|
|
296
|
+
if event_type:
|
|
297
|
+
subject = f"agents.{event_type}.*"
|
|
298
|
+
|
|
299
|
+
await self._subscribe(subject, handler, consumer_name, AgentEvent)
|
|
300
|
+
|
|
301
|
+
async def subscribe_to_tool_events(
|
|
302
|
+
self,
|
|
303
|
+
handler: Callable[[ToolEvent], Awaitable[None]],
|
|
304
|
+
tenant_id: Optional[str] = None,
|
|
305
|
+
event_type: Optional[str] = None,
|
|
306
|
+
consumer_name: str = "tool-events-consumer"
|
|
307
|
+
):
|
|
308
|
+
"""Subscribe to tool events"""
|
|
309
|
+
subject = "tools.*"
|
|
310
|
+
if tenant_id:
|
|
311
|
+
subject += f".{tenant_id}"
|
|
312
|
+
if event_type:
|
|
313
|
+
subject = f"tools.{event_type}.*"
|
|
314
|
+
|
|
315
|
+
await self._subscribe(subject, handler, consumer_name, ToolEvent)
|
|
316
|
+
|
|
317
|
+
async def subscribe_to_invocation_events(
|
|
318
|
+
self,
|
|
319
|
+
handler: Callable[[InvocationEvent], Awaitable[None]],
|
|
320
|
+
tenant_id: Optional[str] = None,
|
|
321
|
+
event_type: Optional[str] = None,
|
|
322
|
+
consumer_name: str = "invocation-events-consumer"
|
|
323
|
+
):
|
|
324
|
+
"""Subscribe to invocation events"""
|
|
325
|
+
subject = "invocations.*"
|
|
326
|
+
if tenant_id:
|
|
327
|
+
subject += f".{tenant_id}"
|
|
328
|
+
if event_type:
|
|
329
|
+
subject = f"invocations.{event_type}.*"
|
|
330
|
+
|
|
331
|
+
await self._subscribe(subject, handler, consumer_name, InvocationEvent)
|
|
332
|
+
|
|
333
|
+
async def subscribe_to_secret_events(
|
|
334
|
+
self,
|
|
335
|
+
handler: Callable[[SecretEvent], Awaitable[None]],
|
|
336
|
+
tenant_id: Optional[str] = None,
|
|
337
|
+
event_type: Optional[str] = None,
|
|
338
|
+
consumer_name: str = "secret-events-consumer"
|
|
339
|
+
):
|
|
340
|
+
"""Subscribe to secret events"""
|
|
341
|
+
subject = "secrets.*"
|
|
342
|
+
if tenant_id:
|
|
343
|
+
subject += f".{tenant_id}"
|
|
344
|
+
if event_type:
|
|
345
|
+
subject = f"secrets.{event_type}.*"
|
|
346
|
+
|
|
347
|
+
await self._subscribe(subject, handler, consumer_name, SecretEvent)
|
|
348
|
+
|
|
349
|
+
async def _subscribe(
|
|
350
|
+
self,
|
|
351
|
+
subject: str,
|
|
352
|
+
handler: Callable,
|
|
353
|
+
consumer_name: str,
|
|
354
|
+
event_class: type
|
|
355
|
+
):
|
|
356
|
+
"""Subscribe to events with given handler"""
|
|
357
|
+
try:
|
|
358
|
+
async def message_handler(msg):
|
|
359
|
+
try:
|
|
360
|
+
# Parse event data
|
|
361
|
+
event_data = json.loads(msg.data.decode())
|
|
362
|
+
event = event_class.model_validate(event_data)
|
|
363
|
+
|
|
364
|
+
# Call handler
|
|
365
|
+
await handler(event)
|
|
366
|
+
|
|
367
|
+
# Acknowledge message
|
|
368
|
+
await msg.ack()
|
|
369
|
+
|
|
370
|
+
except Exception as e:
|
|
371
|
+
logger.error(f"Error processing event: {e}")
|
|
372
|
+
await msg.nak()
|
|
373
|
+
|
|
374
|
+
# Subscribe to the subject
|
|
375
|
+
subscription = await self.js.subscribe(
|
|
376
|
+
subject=subject,
|
|
377
|
+
cb=message_handler,
|
|
378
|
+
durable=consumer_name,
|
|
379
|
+
config=nats.js.api.ConsumerConfig(
|
|
380
|
+
durable_name=consumer_name,
|
|
381
|
+
ack_policy=nats.js.api.AckPolicy.EXPLICIT,
|
|
382
|
+
max_deliver=5,
|
|
383
|
+
deliver_policy=nats.js.api.DeliverPolicy.ALL,
|
|
384
|
+
)
|
|
385
|
+
)
|
|
386
|
+
|
|
387
|
+
self._subscriptions[consumer_name] = subscription
|
|
388
|
+
logger.info(f"Subscribed to {subject} with consumer {consumer_name}")
|
|
389
|
+
|
|
390
|
+
except Exception as e:
|
|
391
|
+
logger.error(f"Failed to subscribe to {subject}: {e}")
|
|
392
|
+
raise EventError(f"Failed to subscribe to {subject}: {e}")
|
|
393
|
+
|
|
394
|
+
async def unsubscribe(self, consumer_name: str):
|
|
395
|
+
"""Unsubscribe from events"""
|
|
396
|
+
if consumer_name in self._subscriptions:
|
|
397
|
+
await self._subscriptions[consumer_name].unsubscribe()
|
|
398
|
+
del self._subscriptions[consumer_name]
|
|
399
|
+
logger.info(f"Unsubscribed consumer {consumer_name}")
|
|
400
|
+
|
|
401
|
+
|
|
402
|
+
class EventStreamManager:
|
|
403
|
+
"""NATS JetStream manager for event streaming"""
|
|
404
|
+
|
|
405
|
+
def __init__(self, nats_url: str = "nats://localhost:4222", publish_timeout: float = 5.0):
|
|
406
|
+
"""Initialize event stream manager
|
|
407
|
+
|
|
408
|
+
Args:
|
|
409
|
+
nats_url: NATS server URL
|
|
410
|
+
publish_timeout: Timeout in seconds for publish operations (default: 5.0)
|
|
411
|
+
"""
|
|
412
|
+
self.nats_url = nats_url
|
|
413
|
+
self.publish_timeout = publish_timeout
|
|
414
|
+
self.client: Optional[NATSClient] = None
|
|
415
|
+
self.js: Optional[JetStreamContext] = None
|
|
416
|
+
self.publisher: Optional[EventPublisher] = None
|
|
417
|
+
self.subscriber: Optional[EventSubscriber] = None
|
|
418
|
+
|
|
419
|
+
async def connect(self):
|
|
420
|
+
"""Connect to NATS JetStream"""
|
|
421
|
+
try:
|
|
422
|
+
# Connect with longer timeout and reconnect settings
|
|
423
|
+
self.client = await nats.connect(
|
|
424
|
+
self.nats_url,
|
|
425
|
+
connect_timeout=10,
|
|
426
|
+
reconnect_time_wait=2,
|
|
427
|
+
max_reconnect_attempts=5,
|
|
428
|
+
)
|
|
429
|
+
# jetstream() may be async in mocks; support both
|
|
430
|
+
js_obj = self.client.jetstream()
|
|
431
|
+
self.js = await js_obj if asyncio.iscoroutine(js_obj) else js_obj
|
|
432
|
+
|
|
433
|
+
# Initialize publisher and subscriber
|
|
434
|
+
self.publisher = EventPublisher(self.client, self.js)
|
|
435
|
+
self.subscriber = EventSubscriber(self.client, self.js)
|
|
436
|
+
|
|
437
|
+
logger.info(f"Connected to NATS JetStream at {self.nats_url}")
|
|
438
|
+
|
|
439
|
+
except Exception as e:
|
|
440
|
+
logger.error(f"Failed to connect to NATS: {e}")
|
|
441
|
+
raise EventError(f"Failed to connect to NATS: {e}")
|
|
442
|
+
|
|
443
|
+
def is_connected(self) -> bool:
|
|
444
|
+
"""Check if NATS client is connected"""
|
|
445
|
+
return self.client is not None and self.client.is_connected and self.js is not None
|
|
446
|
+
|
|
447
|
+
async def emit_audit_event(self, subject: str, payload: Dict[str, Any]) -> None:
|
|
448
|
+
"""Publish an audit event to the AUDIT stream. Subject should start with 'audit.'"""
|
|
449
|
+
# Check connection health first
|
|
450
|
+
if not self.is_connected():
|
|
451
|
+
logger.warning("NATS not connected, skipping audit event publish")
|
|
452
|
+
return
|
|
453
|
+
|
|
454
|
+
if not subject.startswith("audit."):
|
|
455
|
+
subject = f"audit.{subject}"
|
|
456
|
+
try:
|
|
457
|
+
# Ensure JSON-serializable payload (e.g., datetime)
|
|
458
|
+
def _json_default(obj):
|
|
459
|
+
if isinstance(obj, datetime):
|
|
460
|
+
return obj.isoformat()
|
|
461
|
+
return str(obj)
|
|
462
|
+
serialized = json.dumps(payload, default=_json_default)
|
|
463
|
+
|
|
464
|
+
# Publish with timeout
|
|
465
|
+
await asyncio.wait_for(
|
|
466
|
+
self.js.publish(subject=subject, payload=serialized.encode()),
|
|
467
|
+
timeout=self.publish_timeout
|
|
468
|
+
)
|
|
469
|
+
except asyncio.TimeoutError:
|
|
470
|
+
logger.warning(f"Audit event publish timed out after {self.publish_timeout}s")
|
|
471
|
+
except Exception as e:
|
|
472
|
+
logger.warning(f"Failed to publish audit event: {e}")
|
|
473
|
+
# Don't raise - audit is best-effort
|
|
474
|
+
|
|
475
|
+
async def disconnect(self):
|
|
476
|
+
"""Disconnect from NATS JetStream"""
|
|
477
|
+
if self.client:
|
|
478
|
+
await self.client.close()
|
|
479
|
+
self.client = None
|
|
480
|
+
self.js = None
|
|
481
|
+
self.publisher = None
|
|
482
|
+
self.subscriber = None
|
|
483
|
+
logger.info("Disconnected from NATS JetStream")
|
|
484
|
+
|
|
485
|
+
async def create_stream(self, config: EventStreamConfig):
|
|
486
|
+
"""Create or update a JetStream stream"""
|
|
487
|
+
try:
|
|
488
|
+
if not self.js:
|
|
489
|
+
raise EventError("Not connected to NATS JetStream")
|
|
490
|
+
|
|
491
|
+
stream_config = nats.js.api.StreamConfig(
|
|
492
|
+
name=config.name,
|
|
493
|
+
subjects=config.subjects,
|
|
494
|
+
description=config.description,
|
|
495
|
+
max_msgs=config.max_msgs,
|
|
496
|
+
max_age=config.max_age,
|
|
497
|
+
storage=nats.js.api.StorageType.FILE if config.storage == "file" else nats.js.api.StorageType.MEMORY,
|
|
498
|
+
num_replicas=config.replicas,
|
|
499
|
+
retention=nats.js.api.RetentionPolicy.LIMITS,
|
|
500
|
+
)
|
|
501
|
+
|
|
502
|
+
# Create or update stream
|
|
503
|
+
await self.js.add_stream(stream_config)
|
|
504
|
+
logger.info(f"Created/updated stream: {config.name}")
|
|
505
|
+
|
|
506
|
+
except Exception as e:
|
|
507
|
+
logger.error(f"Failed to create stream {config.name}: {e}")
|
|
508
|
+
raise EventError(f"Failed to create stream {config.name}: {e}")
|
|
509
|
+
|
|
510
|
+
async def delete_stream(self, stream_name: str):
|
|
511
|
+
"""Delete a JetStream stream"""
|
|
512
|
+
try:
|
|
513
|
+
if not self.js:
|
|
514
|
+
raise EventError("Not connected to NATS JetStream")
|
|
515
|
+
|
|
516
|
+
await self.js.delete_stream(stream_name)
|
|
517
|
+
logger.info(f"Deleted stream: {stream_name}")
|
|
518
|
+
|
|
519
|
+
except Exception as e:
|
|
520
|
+
logger.error(f"Failed to delete stream {stream_name}: {e}")
|
|
521
|
+
raise EventError(f"Failed to delete stream {stream_name}: {e}")
|
|
522
|
+
|
|
523
|
+
async def list_streams(self) -> List[StreamInfo]:
|
|
524
|
+
"""List all JetStream streams"""
|
|
525
|
+
try:
|
|
526
|
+
if not self.js:
|
|
527
|
+
raise EventError("Not connected to NATS JetStream")
|
|
528
|
+
|
|
529
|
+
# Use the correct method to get stream info
|
|
530
|
+
stream_iterator = await self.js.streams_info_iterator()
|
|
531
|
+
stream_dicts = stream_iterator.streams
|
|
532
|
+
|
|
533
|
+
# Convert dict responses to StreamInfo objects if needed
|
|
534
|
+
streams = []
|
|
535
|
+
for stream_dict in stream_dicts:
|
|
536
|
+
if hasattr(stream_dict, 'config'):
|
|
537
|
+
# Already a StreamInfo object
|
|
538
|
+
streams.append(stream_dict)
|
|
539
|
+
else:
|
|
540
|
+
# Convert dict to StreamInfo using NATS StreamInfo.from_response
|
|
541
|
+
from nats.js.api import StreamInfo
|
|
542
|
+
stream_info = StreamInfo.from_response(stream_dict)
|
|
543
|
+
streams.append(stream_info)
|
|
544
|
+
|
|
545
|
+
return streams
|
|
546
|
+
|
|
547
|
+
except Exception as e:
|
|
548
|
+
logger.error(f"Failed to list streams: {e}")
|
|
549
|
+
raise EventError(f"Failed to list streams: {e}")
|
|
550
|
+
|
|
551
|
+
async def get_stream_info(self, stream_name: str) -> StreamInfo:
|
|
552
|
+
"""Get information about a stream"""
|
|
553
|
+
try:
|
|
554
|
+
if not self.js:
|
|
555
|
+
raise EventError("Not connected to NATS JetStream")
|
|
556
|
+
|
|
557
|
+
return await self.js.stream_info(stream_name)
|
|
558
|
+
|
|
559
|
+
except Exception as e:
|
|
560
|
+
logger.error(f"Failed to get stream info for {stream_name}: {e}")
|
|
561
|
+
raise EventError(f"Failed to get stream info for {stream_name}: {e}")
|
|
562
|
+
|
|
563
|
+
async def setup_default_streams(self):
|
|
564
|
+
"""Set up default event streams"""
|
|
565
|
+
default_streams = [
|
|
566
|
+
EventStreamConfig(
|
|
567
|
+
name="AGENTS",
|
|
568
|
+
subjects=["agents.>"],
|
|
569
|
+
description="Agent lifecycle events",
|
|
570
|
+
max_msgs=1000000,
|
|
571
|
+
max_age=604800, # 7 days
|
|
572
|
+
),
|
|
573
|
+
EventStreamConfig(
|
|
574
|
+
name="TOOLS",
|
|
575
|
+
subjects=["tools.>"],
|
|
576
|
+
description="Tool lifecycle events",
|
|
577
|
+
max_msgs=1000000,
|
|
578
|
+
max_age=604800, # 7 days
|
|
579
|
+
),
|
|
580
|
+
EventStreamConfig(
|
|
581
|
+
name="INVOCATIONS",
|
|
582
|
+
subjects=["invocations.>"],
|
|
583
|
+
description="Agent and tool invocation events",
|
|
584
|
+
max_msgs=5000000,
|
|
585
|
+
max_age=2592000, # 30 days
|
|
586
|
+
),
|
|
587
|
+
EventStreamConfig(
|
|
588
|
+
name="SECRETS",
|
|
589
|
+
subjects=["secrets.>"],
|
|
590
|
+
description="Secret management events",
|
|
591
|
+
max_msgs=100000,
|
|
592
|
+
max_age=7776000, # 90 days
|
|
593
|
+
),
|
|
594
|
+
]
|
|
595
|
+
|
|
596
|
+
for stream_config in default_streams:
|
|
597
|
+
await self.create_stream(stream_config)
|
|
598
|
+
|
|
599
|
+
logger.info("Set up default event streams")
|
|
600
|
+
|
|
601
|
+
async def setup_audit_stream(self) -> None:
|
|
602
|
+
"""Create AUDIT stream for audit logging events."""
|
|
603
|
+
if not self.js:
|
|
604
|
+
raise EventError("Not connected to NATS JetStream")
|
|
605
|
+
|
|
606
|
+
audit_stream = EventStreamConfig(
|
|
607
|
+
name="AUDIT",
|
|
608
|
+
subjects=["audit.>"],
|
|
609
|
+
description="Audit logging events",
|
|
610
|
+
max_msgs=10_000_000,
|
|
611
|
+
max_age=90 * 24 * 3600, # 90 days
|
|
612
|
+
storage="file",
|
|
613
|
+
replicas=1,
|
|
614
|
+
)
|
|
615
|
+
await self.create_stream(audit_stream)
|
|
616
|
+
|
|
617
|
+
async def __aenter__(self):
|
|
618
|
+
"""Async context manager entry"""
|
|
619
|
+
await self.connect()
|
|
620
|
+
return self
|
|
621
|
+
|
|
622
|
+
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
|
623
|
+
"""Async context manager exit"""
|
|
624
|
+
await self.disconnect()
|
|
625
|
+
|
|
626
|
+
|
|
627
|
+
# Convenience functions
|
|
628
|
+
async def setup_event_streaming(nats_url: str = "nats://localhost:4222") -> EventStreamManager:
|
|
629
|
+
"""Set up event streaming with default configuration"""
|
|
630
|
+
manager = EventStreamManager(nats_url)
|
|
631
|
+
await manager.connect()
|
|
632
|
+
await manager.setup_default_streams()
|
|
633
|
+
return manager
|
|
634
|
+
|
|
635
|
+
|
|
636
|
+
async def publish_agent_registered(
|
|
637
|
+
publisher: EventPublisher,
|
|
638
|
+
agent_id: str,
|
|
639
|
+
agent_name: str,
|
|
640
|
+
agent_type: str,
|
|
641
|
+
tenant_id: str,
|
|
642
|
+
payload: Optional[Dict] = None
|
|
643
|
+
):
|
|
644
|
+
"""Convenience function to publish agent registered event"""
|
|
645
|
+
await publisher.publish_agent_event(
|
|
646
|
+
event_type="registered",
|
|
647
|
+
agent_id=agent_id,
|
|
648
|
+
agent_name=agent_name,
|
|
649
|
+
agent_type=agent_type,
|
|
650
|
+
tenant_id=tenant_id,
|
|
651
|
+
source="af_gateway",
|
|
652
|
+
payload=payload
|
|
653
|
+
)
|
|
654
|
+
|
|
655
|
+
|
|
656
|
+
async def publish_invocation_started(
|
|
657
|
+
publisher: EventPublisher,
|
|
658
|
+
invocation_id: str,
|
|
659
|
+
target_type: str,
|
|
660
|
+
target_id: str,
|
|
661
|
+
tenant_id: str,
|
|
662
|
+
user_id: str,
|
|
663
|
+
payload: Optional[Dict] = None
|
|
664
|
+
):
|
|
665
|
+
"""Convenience function to publish invocation started event"""
|
|
666
|
+
await publisher.publish_invocation_event(
|
|
667
|
+
event_type="started",
|
|
668
|
+
invocation_id=invocation_id,
|
|
669
|
+
target_type=target_type,
|
|
670
|
+
target_id=target_id,
|
|
671
|
+
tenant_id=tenant_id,
|
|
672
|
+
user_id=user_id,
|
|
673
|
+
source="af_gateway",
|
|
674
|
+
payload=payload
|
|
675
|
+
)
|
|
676
|
+
|
|
677
|
+
|
|
678
|
+
async def publish_invocation_completed(
|
|
679
|
+
publisher: EventPublisher,
|
|
680
|
+
invocation_id: str,
|
|
681
|
+
target_type: str,
|
|
682
|
+
target_id: str,
|
|
683
|
+
tenant_id: str,
|
|
684
|
+
user_id: str,
|
|
685
|
+
duration_ms: int,
|
|
686
|
+
payload: Optional[Dict] = None
|
|
687
|
+
):
|
|
688
|
+
"""Convenience function to publish invocation completed event"""
|
|
689
|
+
await publisher.publish_invocation_event(
|
|
690
|
+
event_type="completed",
|
|
691
|
+
invocation_id=invocation_id,
|
|
692
|
+
target_type=target_type,
|
|
693
|
+
target_id=target_id,
|
|
694
|
+
tenant_id=tenant_id,
|
|
695
|
+
user_id=user_id,
|
|
696
|
+
source="af_gateway",
|
|
697
|
+
duration_ms=duration_ms,
|
|
698
|
+
payload=payload
|
|
699
|
+
)
|