django-agent-runtime 0.3.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- django_agent_runtime/__init__.py +25 -0
- django_agent_runtime/admin.py +155 -0
- django_agent_runtime/api/__init__.py +26 -0
- django_agent_runtime/api/permissions.py +109 -0
- django_agent_runtime/api/serializers.py +114 -0
- django_agent_runtime/api/views.py +472 -0
- django_agent_runtime/apps.py +26 -0
- django_agent_runtime/conf.py +241 -0
- django_agent_runtime/examples/__init__.py +10 -0
- django_agent_runtime/examples/langgraph_adapter.py +164 -0
- django_agent_runtime/examples/langgraph_tools.py +179 -0
- django_agent_runtime/examples/simple_chat.py +69 -0
- django_agent_runtime/examples/tool_agent.py +157 -0
- django_agent_runtime/management/__init__.py +2 -0
- django_agent_runtime/management/commands/__init__.py +2 -0
- django_agent_runtime/management/commands/runagent.py +419 -0
- django_agent_runtime/migrations/0001_initial.py +117 -0
- django_agent_runtime/migrations/0002_persistence_models.py +129 -0
- django_agent_runtime/migrations/0003_persistenceconversation_active_branch_id_and_more.py +212 -0
- django_agent_runtime/migrations/0004_add_anonymous_session_id.py +18 -0
- django_agent_runtime/migrations/__init__.py +2 -0
- django_agent_runtime/models/__init__.py +54 -0
- django_agent_runtime/models/base.py +450 -0
- django_agent_runtime/models/concrete.py +146 -0
- django_agent_runtime/persistence/__init__.py +60 -0
- django_agent_runtime/persistence/helpers.py +148 -0
- django_agent_runtime/persistence/models.py +506 -0
- django_agent_runtime/persistence/stores.py +1191 -0
- django_agent_runtime/runtime/__init__.py +23 -0
- django_agent_runtime/runtime/events/__init__.py +65 -0
- django_agent_runtime/runtime/events/base.py +135 -0
- django_agent_runtime/runtime/events/db.py +129 -0
- django_agent_runtime/runtime/events/redis.py +228 -0
- django_agent_runtime/runtime/events/sync.py +140 -0
- django_agent_runtime/runtime/interfaces.py +475 -0
- django_agent_runtime/runtime/llm/__init__.py +91 -0
- django_agent_runtime/runtime/llm/anthropic.py +249 -0
- django_agent_runtime/runtime/llm/litellm_adapter.py +173 -0
- django_agent_runtime/runtime/llm/openai.py +230 -0
- django_agent_runtime/runtime/queue/__init__.py +75 -0
- django_agent_runtime/runtime/queue/base.py +158 -0
- django_agent_runtime/runtime/queue/postgres.py +248 -0
- django_agent_runtime/runtime/queue/redis_streams.py +336 -0
- django_agent_runtime/runtime/queue/sync.py +277 -0
- django_agent_runtime/runtime/registry.py +186 -0
- django_agent_runtime/runtime/runner.py +540 -0
- django_agent_runtime/runtime/tracing/__init__.py +48 -0
- django_agent_runtime/runtime/tracing/langfuse.py +117 -0
- django_agent_runtime/runtime/tracing/noop.py +36 -0
- django_agent_runtime/urls.py +39 -0
- django_agent_runtime-0.3.6.dist-info/METADATA +723 -0
- django_agent_runtime-0.3.6.dist-info/RECORD +55 -0
- django_agent_runtime-0.3.6.dist-info/WHEEL +5 -0
- django_agent_runtime-0.3.6.dist-info/licenses/LICENSE +22 -0
- django_agent_runtime-0.3.6.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Runtime module - core execution engine for agent runs.
|
|
3
|
+
|
|
4
|
+
This module contains:
|
|
5
|
+
- interfaces: Public API contracts (AgentRuntime, RunContext, etc.)
|
|
6
|
+
- registry: Plugin discovery and registration
|
|
7
|
+
- runner: Main execution loop with leasing, retries, cancellation
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
from django_agent_runtime.runtime.interfaces import (
|
|
11
|
+
AgentRuntime,
|
|
12
|
+
RunContext,
|
|
13
|
+
RunResult,
|
|
14
|
+
EventType,
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
__all__ = [
|
|
18
|
+
"AgentRuntime",
|
|
19
|
+
"RunContext",
|
|
20
|
+
"RunResult",
|
|
21
|
+
"EventType",
|
|
22
|
+
]
|
|
23
|
+
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Event bus for streaming agent events to UI.
|
|
3
|
+
|
|
4
|
+
Provides:
|
|
5
|
+
- EventBus: Abstract async interface for event publishing/subscribing
|
|
6
|
+
- SyncEventBus: Abstract sync interface for event publishing
|
|
7
|
+
- DatabaseEventBus: Async database-backed event bus
|
|
8
|
+
- SyncDatabaseEventBus: Sync database-backed event bus
|
|
9
|
+
- RedisEventBus: Uses Redis pub/sub for real-time streaming
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
from django_agent_runtime.runtime.events.base import EventBus, Event
|
|
13
|
+
from django_agent_runtime.runtime.events.sync import SyncEventBus, SyncDatabaseEventBus
|
|
14
|
+
|
|
15
|
+
__all__ = [
|
|
16
|
+
# Async
|
|
17
|
+
"EventBus",
|
|
18
|
+
"Event",
|
|
19
|
+
# Sync
|
|
20
|
+
"SyncEventBus",
|
|
21
|
+
"SyncDatabaseEventBus",
|
|
22
|
+
# Factory functions
|
|
23
|
+
"get_event_bus",
|
|
24
|
+
"get_sync_event_bus",
|
|
25
|
+
]
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def get_event_bus(backend: str = "db", **kwargs) -> EventBus:
|
|
29
|
+
"""
|
|
30
|
+
Factory function to get an async event bus instance.
|
|
31
|
+
|
|
32
|
+
Args:
|
|
33
|
+
backend: "db" or "redis"
|
|
34
|
+
**kwargs: Backend-specific configuration
|
|
35
|
+
|
|
36
|
+
Returns:
|
|
37
|
+
EventBus instance
|
|
38
|
+
"""
|
|
39
|
+
if backend == "db":
|
|
40
|
+
from django_agent_runtime.runtime.events.db import DatabaseEventBus
|
|
41
|
+
|
|
42
|
+
return DatabaseEventBus(**kwargs)
|
|
43
|
+
elif backend == "redis":
|
|
44
|
+
from django_agent_runtime.runtime.events.redis import RedisEventBus
|
|
45
|
+
|
|
46
|
+
return RedisEventBus(**kwargs)
|
|
47
|
+
else:
|
|
48
|
+
raise ValueError(f"Unknown event bus backend: {backend}")
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def get_sync_event_bus(backend: str = "db", **kwargs) -> SyncEventBus:
|
|
52
|
+
"""
|
|
53
|
+
Factory function to get a synchronous event bus instance.
|
|
54
|
+
|
|
55
|
+
Args:
|
|
56
|
+
backend: "db" (only db supported for sync)
|
|
57
|
+
**kwargs: Backend-specific configuration
|
|
58
|
+
|
|
59
|
+
Returns:
|
|
60
|
+
SyncEventBus instance
|
|
61
|
+
"""
|
|
62
|
+
if backend == "db":
|
|
63
|
+
return SyncDatabaseEventBus(**kwargs)
|
|
64
|
+
else:
|
|
65
|
+
raise ValueError(f"Unknown or unsupported sync event bus backend: {backend}")
|
|
@@ -0,0 +1,135 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Abstract base class for event bus implementations.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from abc import ABC, abstractmethod
|
|
6
|
+
from dataclasses import dataclass, field
|
|
7
|
+
from datetime import datetime
|
|
8
|
+
from typing import AsyncIterator, Optional
|
|
9
|
+
from uuid import UUID
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@dataclass
|
|
13
|
+
class Event:
|
|
14
|
+
"""
|
|
15
|
+
An event emitted by an agent runtime.
|
|
16
|
+
|
|
17
|
+
Events are the communication channel between workers and UI.
|
|
18
|
+
|
|
19
|
+
Visibility:
|
|
20
|
+
- ui_visible: Whether this event should be displayed in UI
|
|
21
|
+
- visibility_level: "internal", "debug", or "user"
|
|
22
|
+
|
|
23
|
+
The visibility is determined by the event type and configuration.
|
|
24
|
+
See conf.py EVENT_VISIBILITY for default mappings.
|
|
25
|
+
"""
|
|
26
|
+
|
|
27
|
+
run_id: UUID
|
|
28
|
+
seq: int
|
|
29
|
+
event_type: str
|
|
30
|
+
payload: dict = field(default_factory=dict)
|
|
31
|
+
timestamp: datetime = field(default_factory=datetime.utcnow)
|
|
32
|
+
visibility_level: str = field(default="user") # "internal", "debug", "user"
|
|
33
|
+
ui_visible: bool = field(default=True) # Computed based on visibility_level and DEBUG_MODE
|
|
34
|
+
|
|
35
|
+
def to_dict(self) -> dict:
|
|
36
|
+
"""Convert to dictionary for serialization."""
|
|
37
|
+
return {
|
|
38
|
+
"run_id": str(self.run_id),
|
|
39
|
+
"seq": self.seq,
|
|
40
|
+
"type": self.event_type,
|
|
41
|
+
"payload": self.payload,
|
|
42
|
+
"ts": self.timestamp.isoformat(),
|
|
43
|
+
"visibility_level": self.visibility_level,
|
|
44
|
+
"ui_visible": self.ui_visible,
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
@classmethod
|
|
48
|
+
def from_dict(cls, data: dict) -> "Event":
|
|
49
|
+
"""Create from dictionary."""
|
|
50
|
+
return cls(
|
|
51
|
+
run_id=UUID(data["run_id"]),
|
|
52
|
+
seq=data["seq"],
|
|
53
|
+
event_type=data["type"],
|
|
54
|
+
payload=data.get("payload", {}),
|
|
55
|
+
timestamp=datetime.fromisoformat(data["ts"]),
|
|
56
|
+
visibility_level=data.get("visibility_level", "user"),
|
|
57
|
+
ui_visible=data.get("ui_visible", True),
|
|
58
|
+
)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
class EventBus(ABC):
|
|
62
|
+
"""
|
|
63
|
+
Abstract interface for event bus implementations.
|
|
64
|
+
|
|
65
|
+
Event buses handle:
|
|
66
|
+
- Publishing events from workers
|
|
67
|
+
- Subscribing to events for streaming to UI
|
|
68
|
+
- Persisting events for replay
|
|
69
|
+
"""
|
|
70
|
+
|
|
71
|
+
@abstractmethod
|
|
72
|
+
async def publish(self, event: Event) -> None:
|
|
73
|
+
"""
|
|
74
|
+
Publish an event.
|
|
75
|
+
|
|
76
|
+
Args:
|
|
77
|
+
event: Event to publish
|
|
78
|
+
"""
|
|
79
|
+
...
|
|
80
|
+
|
|
81
|
+
@abstractmethod
|
|
82
|
+
async def subscribe(
|
|
83
|
+
self,
|
|
84
|
+
run_id: UUID,
|
|
85
|
+
from_seq: int = 0,
|
|
86
|
+
) -> AsyncIterator[Event]:
|
|
87
|
+
"""
|
|
88
|
+
Subscribe to events for a run.
|
|
89
|
+
|
|
90
|
+
Args:
|
|
91
|
+
run_id: Run to subscribe to
|
|
92
|
+
from_seq: Start from this sequence number (for replay)
|
|
93
|
+
|
|
94
|
+
Yields:
|
|
95
|
+
Events as they arrive
|
|
96
|
+
"""
|
|
97
|
+
...
|
|
98
|
+
|
|
99
|
+
@abstractmethod
|
|
100
|
+
async def get_events(
|
|
101
|
+
self,
|
|
102
|
+
run_id: UUID,
|
|
103
|
+
from_seq: int = 0,
|
|
104
|
+
to_seq: Optional[int] = None,
|
|
105
|
+
) -> list[Event]:
|
|
106
|
+
"""
|
|
107
|
+
Get historical events for a run.
|
|
108
|
+
|
|
109
|
+
Args:
|
|
110
|
+
run_id: Run to get events for
|
|
111
|
+
from_seq: Start sequence (inclusive)
|
|
112
|
+
to_seq: End sequence (inclusive), None for all
|
|
113
|
+
|
|
114
|
+
Returns:
|
|
115
|
+
List of events
|
|
116
|
+
"""
|
|
117
|
+
...
|
|
118
|
+
|
|
119
|
+
@abstractmethod
|
|
120
|
+
async def get_next_seq(self, run_id: UUID) -> int:
|
|
121
|
+
"""
|
|
122
|
+
Get the next sequence number for a run.
|
|
123
|
+
|
|
124
|
+
Args:
|
|
125
|
+
run_id: Run to get sequence for
|
|
126
|
+
|
|
127
|
+
Returns:
|
|
128
|
+
Next sequence number (0 if no events)
|
|
129
|
+
"""
|
|
130
|
+
...
|
|
131
|
+
|
|
132
|
+
async def close(self) -> None:
|
|
133
|
+
"""Close any connections. Override if needed."""
|
|
134
|
+
pass
|
|
135
|
+
|
|
@@ -0,0 +1,129 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Database-backed event bus.
|
|
3
|
+
|
|
4
|
+
Stores all events in the database. Simple but higher latency for streaming.
|
|
5
|
+
Good for development and low-volume production.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import asyncio
|
|
9
|
+
from datetime import datetime, timezone
|
|
10
|
+
from typing import AsyncIterator, Optional
|
|
11
|
+
from uuid import UUID
|
|
12
|
+
|
|
13
|
+
from asgiref.sync import sync_to_async
|
|
14
|
+
from django.db.models import Max
|
|
15
|
+
|
|
16
|
+
from django_agent_runtime.models import AgentEvent, AgentRun
|
|
17
|
+
from django_agent_runtime.runtime.events.base import EventBus, Event
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class DatabaseEventBus(EventBus):
|
|
21
|
+
"""
|
|
22
|
+
Database-backed event bus implementation.
|
|
23
|
+
|
|
24
|
+
All events are persisted to the AgentEvent table.
|
|
25
|
+
Streaming is implemented via polling.
|
|
26
|
+
"""
|
|
27
|
+
|
|
28
|
+
def __init__(self, poll_interval: float = 0.5):
|
|
29
|
+
"""
|
|
30
|
+
Initialize database event bus.
|
|
31
|
+
|
|
32
|
+
Args:
|
|
33
|
+
poll_interval: Seconds between polls when streaming
|
|
34
|
+
"""
|
|
35
|
+
self.poll_interval = poll_interval
|
|
36
|
+
|
|
37
|
+
async def publish(self, event: Event) -> None:
|
|
38
|
+
"""Publish event to database."""
|
|
39
|
+
|
|
40
|
+
@sync_to_async
|
|
41
|
+
def _publish():
|
|
42
|
+
AgentEvent.objects.create(
|
|
43
|
+
run_id=event.run_id,
|
|
44
|
+
seq=event.seq,
|
|
45
|
+
event_type=event.event_type,
|
|
46
|
+
payload=event.payload,
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
await _publish()
|
|
50
|
+
|
|
51
|
+
async def subscribe(
|
|
52
|
+
self,
|
|
53
|
+
run_id: UUID,
|
|
54
|
+
from_seq: int = 0,
|
|
55
|
+
) -> AsyncIterator[Event]:
|
|
56
|
+
"""Subscribe to events via polling."""
|
|
57
|
+
current_seq = from_seq
|
|
58
|
+
|
|
59
|
+
while True:
|
|
60
|
+
# Get new events
|
|
61
|
+
events = await self.get_events(run_id, from_seq=current_seq)
|
|
62
|
+
|
|
63
|
+
for event in events:
|
|
64
|
+
yield event
|
|
65
|
+
current_seq = event.seq + 1
|
|
66
|
+
|
|
67
|
+
# Check if run is complete
|
|
68
|
+
if await self._is_run_complete(run_id):
|
|
69
|
+
break
|
|
70
|
+
|
|
71
|
+
# Poll interval
|
|
72
|
+
await asyncio.sleep(self.poll_interval)
|
|
73
|
+
|
|
74
|
+
@sync_to_async
|
|
75
|
+
def _is_run_complete(self, run_id: UUID) -> bool:
|
|
76
|
+
"""Check if run is in terminal state."""
|
|
77
|
+
try:
|
|
78
|
+
run = AgentRun.objects.get(id=run_id)
|
|
79
|
+
return run.is_terminal
|
|
80
|
+
except AgentRun.DoesNotExist:
|
|
81
|
+
return True
|
|
82
|
+
|
|
83
|
+
async def get_events(
|
|
84
|
+
self,
|
|
85
|
+
run_id: UUID,
|
|
86
|
+
from_seq: int = 0,
|
|
87
|
+
to_seq: Optional[int] = None,
|
|
88
|
+
) -> list[Event]:
|
|
89
|
+
"""Get events from database."""
|
|
90
|
+
|
|
91
|
+
@sync_to_async
|
|
92
|
+
def _get():
|
|
93
|
+
queryset = AgentEvent.objects.filter(
|
|
94
|
+
run_id=run_id,
|
|
95
|
+
seq__gte=from_seq,
|
|
96
|
+
)
|
|
97
|
+
|
|
98
|
+
if to_seq is not None:
|
|
99
|
+
queryset = queryset.filter(seq__lte=to_seq)
|
|
100
|
+
|
|
101
|
+
return [
|
|
102
|
+
Event(
|
|
103
|
+
run_id=e.run_id,
|
|
104
|
+
seq=e.seq,
|
|
105
|
+
event_type=e.event_type,
|
|
106
|
+
payload=e.payload,
|
|
107
|
+
timestamp=e.timestamp,
|
|
108
|
+
)
|
|
109
|
+
for e in queryset.order_by("seq")
|
|
110
|
+
]
|
|
111
|
+
|
|
112
|
+
return await _get()
|
|
113
|
+
|
|
114
|
+
async def get_next_seq(self, run_id: UUID) -> int:
|
|
115
|
+
"""Get next sequence number."""
|
|
116
|
+
|
|
117
|
+
@sync_to_async
|
|
118
|
+
def _get_next():
|
|
119
|
+
result = AgentEvent.objects.filter(run_id=run_id).aggregate(
|
|
120
|
+
max_seq=Max("seq")
|
|
121
|
+
)
|
|
122
|
+
max_seq = result["max_seq"]
|
|
123
|
+
# Note: can't use `max_seq or -1` because 0 is falsy!
|
|
124
|
+
if max_seq is None:
|
|
125
|
+
return 0
|
|
126
|
+
return max_seq + 1
|
|
127
|
+
|
|
128
|
+
return await _get_next()
|
|
129
|
+
|
|
@@ -0,0 +1,228 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Redis-backed event bus using pub/sub and streams.
|
|
3
|
+
|
|
4
|
+
Real-time streaming with optional database persistence.
|
|
5
|
+
Recommended for production with high event volume.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import asyncio
|
|
9
|
+
import json
|
|
10
|
+
from datetime import datetime, timezone
|
|
11
|
+
from typing import AsyncIterator, Optional
|
|
12
|
+
from uuid import UUID
|
|
13
|
+
|
|
14
|
+
from asgiref.sync import sync_to_async
|
|
15
|
+
from django.db.models import Max
|
|
16
|
+
|
|
17
|
+
from django_agent_runtime.models import AgentEvent, AgentRun
|
|
18
|
+
from django_agent_runtime.runtime.events.base import EventBus, Event
|
|
19
|
+
|
|
20
|
+
try:
|
|
21
|
+
import redis.asyncio as aioredis
|
|
22
|
+
except ImportError:
|
|
23
|
+
aioredis = None
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class RedisEventBus(EventBus):
|
|
27
|
+
"""
|
|
28
|
+
Redis-backed event bus implementation.
|
|
29
|
+
|
|
30
|
+
Uses Redis Streams for event storage and pub/sub for real-time notifications.
|
|
31
|
+
Optionally persists to database for durability.
|
|
32
|
+
"""
|
|
33
|
+
|
|
34
|
+
STREAM_PREFIX = "agent_runtime:events:"
|
|
35
|
+
CHANNEL_PREFIX = "agent_runtime:notify:"
|
|
36
|
+
|
|
37
|
+
def __init__(
|
|
38
|
+
self,
|
|
39
|
+
redis_url: str,
|
|
40
|
+
persist_to_db: bool = True,
|
|
41
|
+
event_ttl_seconds: int = 3600 * 6, # 6 hours
|
|
42
|
+
persist_token_deltas: bool = False,
|
|
43
|
+
):
|
|
44
|
+
"""
|
|
45
|
+
Initialize Redis event bus.
|
|
46
|
+
|
|
47
|
+
Args:
|
|
48
|
+
redis_url: Redis connection URL
|
|
49
|
+
persist_to_db: Whether to also persist events to database
|
|
50
|
+
event_ttl_seconds: TTL for events in Redis
|
|
51
|
+
persist_token_deltas: Whether to persist token delta events to DB
|
|
52
|
+
"""
|
|
53
|
+
if aioredis is None:
|
|
54
|
+
raise ImportError("redis package is required for RedisEventBus")
|
|
55
|
+
|
|
56
|
+
self.redis_url = redis_url
|
|
57
|
+
self.persist_to_db = persist_to_db
|
|
58
|
+
self.event_ttl_seconds = event_ttl_seconds
|
|
59
|
+
self.persist_token_deltas = persist_token_deltas
|
|
60
|
+
self._redis: Optional[aioredis.Redis] = None
|
|
61
|
+
|
|
62
|
+
async def _get_redis(self) -> "aioredis.Redis":
|
|
63
|
+
"""Get or create Redis connection."""
|
|
64
|
+
if self._redis is None:
|
|
65
|
+
self._redis = aioredis.from_url(self.redis_url)
|
|
66
|
+
return self._redis
|
|
67
|
+
|
|
68
|
+
def _stream_key(self, run_id: UUID) -> str:
|
|
69
|
+
"""Get Redis stream key for a run."""
|
|
70
|
+
return f"{self.STREAM_PREFIX}{run_id}"
|
|
71
|
+
|
|
72
|
+
def _channel_key(self, run_id: UUID) -> str:
|
|
73
|
+
"""Get Redis pub/sub channel for a run."""
|
|
74
|
+
return f"{self.CHANNEL_PREFIX}{run_id}"
|
|
75
|
+
|
|
76
|
+
async def publish(self, event: Event) -> None:
|
|
77
|
+
"""Publish event to Redis and optionally database."""
|
|
78
|
+
redis = await self._get_redis()
|
|
79
|
+
|
|
80
|
+
# Add to stream
|
|
81
|
+
stream_key = self._stream_key(event.run_id)
|
|
82
|
+
await redis.xadd(
|
|
83
|
+
stream_key,
|
|
84
|
+
{"data": json.dumps(event.to_dict())},
|
|
85
|
+
)
|
|
86
|
+
|
|
87
|
+
# Set TTL on stream
|
|
88
|
+
await redis.expire(stream_key, self.event_ttl_seconds)
|
|
89
|
+
|
|
90
|
+
# Notify subscribers
|
|
91
|
+
channel_key = self._channel_key(event.run_id)
|
|
92
|
+
await redis.publish(channel_key, str(event.seq))
|
|
93
|
+
|
|
94
|
+
# Persist to database if configured
|
|
95
|
+
if self.persist_to_db:
|
|
96
|
+
# Skip token deltas unless configured
|
|
97
|
+
if event.event_type == "assistant.delta" and not self.persist_token_deltas:
|
|
98
|
+
return
|
|
99
|
+
|
|
100
|
+
await self._persist_to_db(event)
|
|
101
|
+
|
|
102
|
+
@sync_to_async
|
|
103
|
+
def _persist_to_db(self, event: Event) -> None:
|
|
104
|
+
"""Persist event to database."""
|
|
105
|
+
AgentEvent.objects.create(
|
|
106
|
+
run_id=event.run_id,
|
|
107
|
+
seq=event.seq,
|
|
108
|
+
event_type=event.event_type,
|
|
109
|
+
payload=event.payload,
|
|
110
|
+
)
|
|
111
|
+
|
|
112
|
+
async def subscribe(
|
|
113
|
+
self,
|
|
114
|
+
run_id: UUID,
|
|
115
|
+
from_seq: int = 0,
|
|
116
|
+
) -> AsyncIterator[Event]:
|
|
117
|
+
"""Subscribe to events using pub/sub for notifications."""
|
|
118
|
+
redis = await self._get_redis()
|
|
119
|
+
pubsub = redis.pubsub()
|
|
120
|
+
channel_key = self._channel_key(run_id)
|
|
121
|
+
|
|
122
|
+
await pubsub.subscribe(channel_key)
|
|
123
|
+
|
|
124
|
+
try:
|
|
125
|
+
# First, get any existing events
|
|
126
|
+
events = await self.get_events(run_id, from_seq=from_seq)
|
|
127
|
+
current_seq = from_seq
|
|
128
|
+
|
|
129
|
+
for event in events:
|
|
130
|
+
yield event
|
|
131
|
+
current_seq = event.seq + 1
|
|
132
|
+
|
|
133
|
+
# Then listen for new events
|
|
134
|
+
while True:
|
|
135
|
+
# Check if run is complete
|
|
136
|
+
if await self._is_run_complete(run_id):
|
|
137
|
+
# Get any final events
|
|
138
|
+
final_events = await self.get_events(run_id, from_seq=current_seq)
|
|
139
|
+
for event in final_events:
|
|
140
|
+
yield event
|
|
141
|
+
break
|
|
142
|
+
|
|
143
|
+
# Wait for notification with timeout
|
|
144
|
+
try:
|
|
145
|
+
message = await asyncio.wait_for(
|
|
146
|
+
pubsub.get_message(ignore_subscribe_messages=True),
|
|
147
|
+
timeout=1.0,
|
|
148
|
+
)
|
|
149
|
+
if message:
|
|
150
|
+
# Get new events
|
|
151
|
+
new_events = await self.get_events(run_id, from_seq=current_seq)
|
|
152
|
+
for event in new_events:
|
|
153
|
+
yield event
|
|
154
|
+
current_seq = event.seq + 1
|
|
155
|
+
except asyncio.TimeoutError:
|
|
156
|
+
continue
|
|
157
|
+
|
|
158
|
+
finally:
|
|
159
|
+
await pubsub.unsubscribe(channel_key)
|
|
160
|
+
await pubsub.close()
|
|
161
|
+
|
|
162
|
+
@sync_to_async
|
|
163
|
+
def _is_run_complete(self, run_id: UUID) -> bool:
|
|
164
|
+
"""Check if run is in terminal state."""
|
|
165
|
+
try:
|
|
166
|
+
run = AgentRun.objects.get(id=run_id)
|
|
167
|
+
return run.is_terminal
|
|
168
|
+
except AgentRun.DoesNotExist:
|
|
169
|
+
return True
|
|
170
|
+
|
|
171
|
+
async def get_events(
|
|
172
|
+
self,
|
|
173
|
+
run_id: UUID,
|
|
174
|
+
from_seq: int = 0,
|
|
175
|
+
to_seq: Optional[int] = None,
|
|
176
|
+
) -> list[Event]:
|
|
177
|
+
"""Get events from Redis stream."""
|
|
178
|
+
redis = await self._get_redis()
|
|
179
|
+
stream_key = self._stream_key(run_id)
|
|
180
|
+
|
|
181
|
+
# Read from stream
|
|
182
|
+
messages = await redis.xrange(stream_key)
|
|
183
|
+
|
|
184
|
+
events = []
|
|
185
|
+
for msg_id, data in messages:
|
|
186
|
+
event_data = json.loads(data[b"data"].decode())
|
|
187
|
+
event = Event.from_dict(event_data)
|
|
188
|
+
|
|
189
|
+
if event.seq < from_seq:
|
|
190
|
+
continue
|
|
191
|
+
if to_seq is not None and event.seq > to_seq:
|
|
192
|
+
continue
|
|
193
|
+
|
|
194
|
+
events.append(event)
|
|
195
|
+
|
|
196
|
+
return sorted(events, key=lambda e: e.seq)
|
|
197
|
+
|
|
198
|
+
async def get_next_seq(self, run_id: UUID) -> int:
|
|
199
|
+
"""Get next sequence number from Redis or database."""
|
|
200
|
+
redis = await self._get_redis()
|
|
201
|
+
stream_key = self._stream_key(run_id)
|
|
202
|
+
|
|
203
|
+
# Check Redis stream
|
|
204
|
+
messages = await redis.xrevrange(stream_key, count=1)
|
|
205
|
+
if messages:
|
|
206
|
+
msg_id, data = messages[0]
|
|
207
|
+
event_data = json.loads(data[b"data"].decode())
|
|
208
|
+
return event_data["seq"] + 1
|
|
209
|
+
|
|
210
|
+
# Fall back to database
|
|
211
|
+
@sync_to_async
|
|
212
|
+
def _get_from_db():
|
|
213
|
+
result = AgentEvent.objects.filter(run_id=run_id).aggregate(
|
|
214
|
+
max_seq=Max("seq")
|
|
215
|
+
)
|
|
216
|
+
max_seq = result["max_seq"]
|
|
217
|
+
# Note: can't use `max_seq or -1` because 0 is falsy!
|
|
218
|
+
if max_seq is None:
|
|
219
|
+
return 0
|
|
220
|
+
return max_seq + 1
|
|
221
|
+
|
|
222
|
+
return await _get_from_db()
|
|
223
|
+
|
|
224
|
+
async def close(self) -> None:
|
|
225
|
+
"""Close Redis connection."""
|
|
226
|
+
if self._redis:
|
|
227
|
+
await self._redis.close()
|
|
228
|
+
self._redis = None
|