agent-runtime-core 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agent_runtime/__init__.py +110 -0
- agent_runtime/config.py +172 -0
- agent_runtime/events/__init__.py +55 -0
- agent_runtime/events/base.py +86 -0
- agent_runtime/events/memory.py +89 -0
- agent_runtime/events/redis.py +185 -0
- agent_runtime/events/sqlite.py +168 -0
- agent_runtime/interfaces.py +390 -0
- agent_runtime/llm/__init__.py +83 -0
- agent_runtime/llm/anthropic.py +237 -0
- agent_runtime/llm/litellm_client.py +175 -0
- agent_runtime/llm/openai.py +220 -0
- agent_runtime/queue/__init__.py +55 -0
- agent_runtime/queue/base.py +167 -0
- agent_runtime/queue/memory.py +184 -0
- agent_runtime/queue/redis.py +453 -0
- agent_runtime/queue/sqlite.py +420 -0
- agent_runtime/registry.py +74 -0
- agent_runtime/runner.py +403 -0
- agent_runtime/state/__init__.py +53 -0
- agent_runtime/state/base.py +69 -0
- agent_runtime/state/memory.py +51 -0
- agent_runtime/state/redis.py +109 -0
- agent_runtime/state/sqlite.py +158 -0
- agent_runtime/tracing/__init__.py +47 -0
- agent_runtime/tracing/langfuse.py +119 -0
- agent_runtime/tracing/noop.py +34 -0
- agent_runtime_core-0.1.0.dist-info/METADATA +75 -0
- agent_runtime_core-0.1.0.dist-info/RECORD +31 -0
- agent_runtime_core-0.1.0.dist-info/WHEEL +4 -0
- agent_runtime_core-0.1.0.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
"""
|
|
2
|
+
agent_runtime - A standalone Python package for building AI agent systems.
|
|
3
|
+
|
|
4
|
+
This package provides:
|
|
5
|
+
- Core interfaces for agent runtimes
|
|
6
|
+
- Queue, event bus, and state store implementations
|
|
7
|
+
- LLM client abstractions
|
|
8
|
+
- Tracing and observability
|
|
9
|
+
- A runner for executing agent runs
|
|
10
|
+
|
|
11
|
+
Example usage:
|
|
12
|
+
from agent_runtime import (
|
|
13
|
+
AgentRuntime,
|
|
14
|
+
RunContext,
|
|
15
|
+
RunResult,
|
|
16
|
+
Tool,
|
|
17
|
+
configure,
|
|
18
|
+
)
|
|
19
|
+
|
|
20
|
+
# Configure the runtime
|
|
21
|
+
configure(
|
|
22
|
+
model_provider="openai",
|
|
23
|
+
queue_backend="memory",
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
# Create a custom agent runtime
|
|
27
|
+
class MyAgent(AgentRuntime):
|
|
28
|
+
@property
|
|
29
|
+
def key(self) -> str:
|
|
30
|
+
return "my-agent"
|
|
31
|
+
|
|
32
|
+
async def run(self, ctx: RunContext) -> RunResult:
|
|
33
|
+
# Your agent logic here
|
|
34
|
+
return RunResult(final_output={"message": "Hello!"})
|
|
35
|
+
"""
|
|
36
|
+
|
|
37
|
+
__version__ = "0.1.0"
|
|
38
|
+
|
|
39
|
+
# Core interfaces
|
|
40
|
+
from agent_runtime.interfaces import (
|
|
41
|
+
AgentRuntime,
|
|
42
|
+
EventType,
|
|
43
|
+
ErrorInfo,
|
|
44
|
+
LLMClient,
|
|
45
|
+
LLMResponse,
|
|
46
|
+
LLMStreamChunk,
|
|
47
|
+
Message,
|
|
48
|
+
RunContext,
|
|
49
|
+
RunResult,
|
|
50
|
+
Tool,
|
|
51
|
+
ToolDefinition,
|
|
52
|
+
ToolRegistry,
|
|
53
|
+
TraceSink,
|
|
54
|
+
)
|
|
55
|
+
|
|
56
|
+
# Configuration
|
|
57
|
+
from agent_runtime.config import (
|
|
58
|
+
RuntimeConfig,
|
|
59
|
+
configure,
|
|
60
|
+
get_config,
|
|
61
|
+
)
|
|
62
|
+
|
|
63
|
+
# Registry
|
|
64
|
+
from agent_runtime.registry import (
|
|
65
|
+
register_runtime,
|
|
66
|
+
get_runtime,
|
|
67
|
+
list_runtimes,
|
|
68
|
+
unregister_runtime,
|
|
69
|
+
clear_registry,
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
# Runner
|
|
73
|
+
from agent_runtime.runner import (
|
|
74
|
+
AgentRunner,
|
|
75
|
+
RunnerConfig,
|
|
76
|
+
RunContextImpl,
|
|
77
|
+
)
|
|
78
|
+
|
|
79
|
+
__all__ = [
|
|
80
|
+
# Version
|
|
81
|
+
"__version__",
|
|
82
|
+
# Interfaces
|
|
83
|
+
"AgentRuntime",
|
|
84
|
+
"LLMClient",
|
|
85
|
+
"LLMResponse",
|
|
86
|
+
"LLMStreamChunk",
|
|
87
|
+
"Message",
|
|
88
|
+
"RunContext",
|
|
89
|
+
"RunResult",
|
|
90
|
+
"ToolRegistry",
|
|
91
|
+
"Tool",
|
|
92
|
+
"ToolDefinition",
|
|
93
|
+
"TraceSink",
|
|
94
|
+
"EventType",
|
|
95
|
+
"ErrorInfo",
|
|
96
|
+
# Configuration
|
|
97
|
+
"RuntimeConfig",
|
|
98
|
+
"configure",
|
|
99
|
+
"get_config",
|
|
100
|
+
# Registry
|
|
101
|
+
"register_runtime",
|
|
102
|
+
"get_runtime",
|
|
103
|
+
"list_runtimes",
|
|
104
|
+
"unregister_runtime",
|
|
105
|
+
"clear_registry",
|
|
106
|
+
# Runner
|
|
107
|
+
"AgentRunner",
|
|
108
|
+
"RunnerConfig",
|
|
109
|
+
"RunContextImpl",
|
|
110
|
+
]
|
agent_runtime/config.py
ADDED
|
@@ -0,0 +1,172 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Configuration system for agent_runtime.
|
|
3
|
+
|
|
4
|
+
Supports:
|
|
5
|
+
- Programmatic configuration via configure()
|
|
6
|
+
- Environment variables
|
|
7
|
+
- Default values
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
import os
|
|
11
|
+
from dataclasses import dataclass, field
|
|
12
|
+
from typing import Optional
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
@dataclass
|
|
16
|
+
class RuntimeConfig:
|
|
17
|
+
"""
|
|
18
|
+
Configuration for the agent runtime.
|
|
19
|
+
|
|
20
|
+
All settings can be overridden via environment variables with
|
|
21
|
+
AGENT_RUNTIME_ prefix (e.g., AGENT_RUNTIME_MODEL_PROVIDER).
|
|
22
|
+
"""
|
|
23
|
+
|
|
24
|
+
# LLM Provider
|
|
25
|
+
model_provider: str = "openai" # openai, anthropic, litellm
|
|
26
|
+
default_model: str = "gpt-4o"
|
|
27
|
+
|
|
28
|
+
# API Keys (loaded from env if not set)
|
|
29
|
+
openai_api_key: Optional[str] = None
|
|
30
|
+
anthropic_api_key: Optional[str] = None
|
|
31
|
+
|
|
32
|
+
# Queue backend
|
|
33
|
+
queue_backend: str = "memory" # memory, redis, sqlite
|
|
34
|
+
|
|
35
|
+
# Event bus backend
|
|
36
|
+
event_bus_backend: str = "memory" # memory, redis, sqlite
|
|
37
|
+
|
|
38
|
+
# State store backend
|
|
39
|
+
state_store_backend: str = "memory" # memory, redis, sqlite
|
|
40
|
+
|
|
41
|
+
# Tracing backend
|
|
42
|
+
tracing_backend: Optional[str] = None # noop, langfuse
|
|
43
|
+
|
|
44
|
+
# Redis settings
|
|
45
|
+
redis_url: Optional[str] = None
|
|
46
|
+
|
|
47
|
+
# SQLite settings
|
|
48
|
+
sqlite_path: Optional[str] = None
|
|
49
|
+
|
|
50
|
+
# Langfuse settings
|
|
51
|
+
langfuse_public_key: Optional[str] = None
|
|
52
|
+
langfuse_secret_key: Optional[str] = None
|
|
53
|
+
langfuse_host: Optional[str] = None
|
|
54
|
+
|
|
55
|
+
# Runner settings
|
|
56
|
+
run_timeout_seconds: int = 300
|
|
57
|
+
heartbeat_interval_seconds: int = 30
|
|
58
|
+
lease_ttl_seconds: int = 60
|
|
59
|
+
max_retries: int = 3
|
|
60
|
+
retry_backoff_base: int = 2
|
|
61
|
+
retry_backoff_max: int = 300
|
|
62
|
+
|
|
63
|
+
def get_openai_api_key(self) -> Optional[str]:
|
|
64
|
+
"""Get OpenAI API key from config or environment."""
|
|
65
|
+
return self.openai_api_key or os.environ.get("OPENAI_API_KEY")
|
|
66
|
+
|
|
67
|
+
def get_anthropic_api_key(self) -> Optional[str]:
|
|
68
|
+
"""Get Anthropic API key from config or environment."""
|
|
69
|
+
return self.anthropic_api_key or os.environ.get("ANTHROPIC_API_KEY")
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
# Global configuration instance
|
|
73
|
+
_config: Optional[RuntimeConfig] = None
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def configure(**kwargs) -> RuntimeConfig:
|
|
77
|
+
"""
|
|
78
|
+
Configure the agent runtime.
|
|
79
|
+
|
|
80
|
+
Args:
|
|
81
|
+
**kwargs: Configuration options (see RuntimeConfig)
|
|
82
|
+
|
|
83
|
+
Returns:
|
|
84
|
+
The configured RuntimeConfig instance
|
|
85
|
+
|
|
86
|
+
Example:
|
|
87
|
+
from agent_runtime import configure
|
|
88
|
+
|
|
89
|
+
configure(
|
|
90
|
+
model_provider="openai",
|
|
91
|
+
openai_api_key="sk-...",
|
|
92
|
+
queue_backend="redis",
|
|
93
|
+
redis_url="redis://localhost:6379",
|
|
94
|
+
)
|
|
95
|
+
"""
|
|
96
|
+
global _config
|
|
97
|
+
|
|
98
|
+
# Start with defaults
|
|
99
|
+
config = RuntimeConfig()
|
|
100
|
+
|
|
101
|
+
# Apply environment variables
|
|
102
|
+
_apply_env_vars(config)
|
|
103
|
+
|
|
104
|
+
# Apply explicit kwargs
|
|
105
|
+
for key, value in kwargs.items():
|
|
106
|
+
if hasattr(config, key):
|
|
107
|
+
setattr(config, key, value)
|
|
108
|
+
else:
|
|
109
|
+
raise ValueError(f"Unknown configuration option: {key}")
|
|
110
|
+
|
|
111
|
+
_config = config
|
|
112
|
+
return config
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
def get_config() -> RuntimeConfig:
|
|
116
|
+
"""
|
|
117
|
+
Get the current configuration.
|
|
118
|
+
|
|
119
|
+
If not configured, returns default configuration with env vars applied.
|
|
120
|
+
|
|
121
|
+
Returns:
|
|
122
|
+
RuntimeConfig instance
|
|
123
|
+
"""
|
|
124
|
+
global _config
|
|
125
|
+
|
|
126
|
+
if _config is None:
|
|
127
|
+
_config = RuntimeConfig()
|
|
128
|
+
_apply_env_vars(_config)
|
|
129
|
+
|
|
130
|
+
return _config
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
def reset_config() -> None:
|
|
134
|
+
"""Reset configuration to defaults. Useful for testing."""
|
|
135
|
+
global _config
|
|
136
|
+
_config = None
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
def _apply_env_vars(config: RuntimeConfig) -> None:
|
|
140
|
+
"""Apply environment variables to config."""
|
|
141
|
+
env_mapping = {
|
|
142
|
+
"AGENT_RUNTIME_MODEL_PROVIDER": "model_provider",
|
|
143
|
+
"AGENT_RUNTIME_DEFAULT_MODEL": "default_model",
|
|
144
|
+
"AGENT_RUNTIME_QUEUE_BACKEND": "queue_backend",
|
|
145
|
+
"AGENT_RUNTIME_EVENT_BUS_BACKEND": "event_bus_backend",
|
|
146
|
+
"AGENT_RUNTIME_STATE_STORE_BACKEND": "state_store_backend",
|
|
147
|
+
"AGENT_RUNTIME_TRACING_BACKEND": "tracing_backend",
|
|
148
|
+
"AGENT_RUNTIME_REDIS_URL": "redis_url",
|
|
149
|
+
"AGENT_RUNTIME_SQLITE_PATH": "sqlite_path",
|
|
150
|
+
"AGENT_RUNTIME_LANGFUSE_PUBLIC_KEY": "langfuse_public_key",
|
|
151
|
+
"AGENT_RUNTIME_LANGFUSE_SECRET_KEY": "langfuse_secret_key",
|
|
152
|
+
"AGENT_RUNTIME_LANGFUSE_HOST": "langfuse_host",
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
int_fields = {
|
|
156
|
+
"AGENT_RUNTIME_RUN_TIMEOUT_SECONDS": "run_timeout_seconds",
|
|
157
|
+
"AGENT_RUNTIME_HEARTBEAT_INTERVAL_SECONDS": "heartbeat_interval_seconds",
|
|
158
|
+
"AGENT_RUNTIME_LEASE_TTL_SECONDS": "lease_ttl_seconds",
|
|
159
|
+
"AGENT_RUNTIME_MAX_RETRIES": "max_retries",
|
|
160
|
+
"AGENT_RUNTIME_RETRY_BACKOFF_BASE": "retry_backoff_base",
|
|
161
|
+
"AGENT_RUNTIME_RETRY_BACKOFF_MAX": "retry_backoff_max",
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
for env_var, attr in env_mapping.items():
|
|
165
|
+
value = os.environ.get(env_var)
|
|
166
|
+
if value is not None:
|
|
167
|
+
setattr(config, attr, value)
|
|
168
|
+
|
|
169
|
+
for env_var, attr in int_fields.items():
|
|
170
|
+
value = os.environ.get(env_var)
|
|
171
|
+
if value is not None:
|
|
172
|
+
setattr(config, attr, int(value))
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Event bus implementations for agent communication.
|
|
3
|
+
|
|
4
|
+
Provides:
|
|
5
|
+
- EventBus: Abstract interface
|
|
6
|
+
- Event: Event data structure
|
|
7
|
+
- InMemoryEventBus: For testing and simple use cases
|
|
8
|
+
- RedisEventBus: For production with pub/sub
|
|
9
|
+
- SQLiteEventBus: For persistent local storage
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
from agent_runtime.events.base import EventBus, Event
|
|
13
|
+
from agent_runtime.events.memory import InMemoryEventBus
|
|
14
|
+
|
|
15
|
+
__all__ = [
|
|
16
|
+
"EventBus",
|
|
17
|
+
"Event",
|
|
18
|
+
"InMemoryEventBus",
|
|
19
|
+
"get_event_bus",
|
|
20
|
+
]
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def get_event_bus(backend: str = None, **kwargs) -> EventBus:
|
|
24
|
+
"""
|
|
25
|
+
Factory function to get an event bus.
|
|
26
|
+
|
|
27
|
+
Args:
|
|
28
|
+
backend: "memory", "redis", or "sqlite"
|
|
29
|
+
**kwargs: Backend-specific configuration
|
|
30
|
+
|
|
31
|
+
Returns:
|
|
32
|
+
EventBus instance
|
|
33
|
+
"""
|
|
34
|
+
from agent_runtime.config import get_config
|
|
35
|
+
|
|
36
|
+
config = get_config()
|
|
37
|
+
backend = backend or config.event_bus_backend
|
|
38
|
+
|
|
39
|
+
if backend == "memory":
|
|
40
|
+
return InMemoryEventBus()
|
|
41
|
+
|
|
42
|
+
elif backend == "redis":
|
|
43
|
+
from agent_runtime.events.redis import RedisEventBus
|
|
44
|
+
url = kwargs.get("url") or config.redis_url
|
|
45
|
+
if not url:
|
|
46
|
+
raise ValueError("redis_url is required for redis event bus backend")
|
|
47
|
+
return RedisEventBus(url=url, **kwargs)
|
|
48
|
+
|
|
49
|
+
elif backend == "sqlite":
|
|
50
|
+
from agent_runtime.events.sqlite import SQLiteEventBus
|
|
51
|
+
path = kwargs.get("path") or config.sqlite_path or "agent_runtime.db"
|
|
52
|
+
return SQLiteEventBus(path=path)
|
|
53
|
+
|
|
54
|
+
else:
|
|
55
|
+
raise ValueError(f"Unknown event bus backend: {backend}")
|
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Abstract base class for event bus implementations.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from abc import ABC, abstractmethod
|
|
6
|
+
from dataclasses import dataclass, field
|
|
7
|
+
from datetime import datetime, timezone
|
|
8
|
+
from typing import Any, AsyncIterator, Callable, Optional
|
|
9
|
+
from uuid import UUID
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@dataclass
|
|
13
|
+
class Event:
|
|
14
|
+
"""An event emitted by an agent run."""
|
|
15
|
+
|
|
16
|
+
run_id: UUID
|
|
17
|
+
event_type: str
|
|
18
|
+
payload: dict = field(default_factory=dict)
|
|
19
|
+
timestamp: datetime = field(default_factory=lambda: datetime.now(timezone.utc))
|
|
20
|
+
sequence: int = 0
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class EventBus(ABC):
|
|
24
|
+
"""
|
|
25
|
+
Abstract interface for event bus implementations.
|
|
26
|
+
|
|
27
|
+
Event buses handle:
|
|
28
|
+
- Publishing events from agent runs
|
|
29
|
+
- Subscribing to events for a run
|
|
30
|
+
- Event persistence (optional)
|
|
31
|
+
"""
|
|
32
|
+
|
|
33
|
+
@abstractmethod
|
|
34
|
+
async def publish(
|
|
35
|
+
self,
|
|
36
|
+
run_id: UUID,
|
|
37
|
+
event_type: str,
|
|
38
|
+
payload: dict,
|
|
39
|
+
) -> None:
|
|
40
|
+
"""
|
|
41
|
+
Publish an event.
|
|
42
|
+
|
|
43
|
+
Args:
|
|
44
|
+
run_id: Run that emitted the event
|
|
45
|
+
event_type: Type of event
|
|
46
|
+
payload: Event data
|
|
47
|
+
"""
|
|
48
|
+
...
|
|
49
|
+
|
|
50
|
+
@abstractmethod
|
|
51
|
+
async def subscribe(
|
|
52
|
+
self,
|
|
53
|
+
run_id: UUID,
|
|
54
|
+
) -> AsyncIterator[Event]:
|
|
55
|
+
"""
|
|
56
|
+
Subscribe to events for a run.
|
|
57
|
+
|
|
58
|
+
Args:
|
|
59
|
+
run_id: Run to subscribe to
|
|
60
|
+
|
|
61
|
+
Yields:
|
|
62
|
+
Events as they are published
|
|
63
|
+
"""
|
|
64
|
+
...
|
|
65
|
+
|
|
66
|
+
@abstractmethod
|
|
67
|
+
async def get_events(
|
|
68
|
+
self,
|
|
69
|
+
run_id: UUID,
|
|
70
|
+
since_sequence: int = 0,
|
|
71
|
+
) -> list[Event]:
|
|
72
|
+
"""
|
|
73
|
+
Get historical events for a run.
|
|
74
|
+
|
|
75
|
+
Args:
|
|
76
|
+
run_id: Run to get events for
|
|
77
|
+
since_sequence: Only return events after this sequence
|
|
78
|
+
|
|
79
|
+
Returns:
|
|
80
|
+
List of events
|
|
81
|
+
"""
|
|
82
|
+
...
|
|
83
|
+
|
|
84
|
+
async def close(self) -> None:
|
|
85
|
+
"""Close any connections. Override if needed."""
|
|
86
|
+
pass
|
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
"""
|
|
2
|
+
In-memory event bus implementation.
|
|
3
|
+
|
|
4
|
+
Good for:
|
|
5
|
+
- Unit testing
|
|
6
|
+
- Local development
|
|
7
|
+
- Simple single-process scripts
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
import asyncio
|
|
11
|
+
from collections import defaultdict
|
|
12
|
+
from datetime import datetime, timezone
|
|
13
|
+
from typing import AsyncIterator, Optional
|
|
14
|
+
from uuid import UUID
|
|
15
|
+
|
|
16
|
+
from agent_runtime.events.base import EventBus, Event
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class InMemoryEventBus(EventBus):
|
|
20
|
+
"""
|
|
21
|
+
In-memory event bus implementation.
|
|
22
|
+
|
|
23
|
+
Stores events in memory. Data is lost when the process exits.
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
def __init__(self):
|
|
27
|
+
# run_id -> list of events
|
|
28
|
+
self._events: dict[UUID, list[Event]] = defaultdict(list)
|
|
29
|
+
# run_id -> list of subscriber queues
|
|
30
|
+
self._subscribers: dict[UUID, list[asyncio.Queue]] = defaultdict(list)
|
|
31
|
+
self._lock = asyncio.Lock()
|
|
32
|
+
|
|
33
|
+
async def publish(
|
|
34
|
+
self,
|
|
35
|
+
run_id: UUID,
|
|
36
|
+
event_type: str,
|
|
37
|
+
payload: dict,
|
|
38
|
+
) -> None:
|
|
39
|
+
"""Publish an event."""
|
|
40
|
+
async with self._lock:
|
|
41
|
+
events = self._events[run_id]
|
|
42
|
+
sequence = len(events)
|
|
43
|
+
|
|
44
|
+
event = Event(
|
|
45
|
+
run_id=run_id,
|
|
46
|
+
event_type=event_type,
|
|
47
|
+
payload=payload,
|
|
48
|
+
timestamp=datetime.now(timezone.utc),
|
|
49
|
+
sequence=sequence,
|
|
50
|
+
)
|
|
51
|
+
|
|
52
|
+
events.append(event)
|
|
53
|
+
|
|
54
|
+
# Notify subscribers
|
|
55
|
+
for queue in self._subscribers[run_id]:
|
|
56
|
+
await queue.put(event)
|
|
57
|
+
|
|
58
|
+
async def subscribe(
|
|
59
|
+
self,
|
|
60
|
+
run_id: UUID,
|
|
61
|
+
) -> AsyncIterator[Event]:
|
|
62
|
+
"""Subscribe to events for a run."""
|
|
63
|
+
queue: asyncio.Queue[Event] = asyncio.Queue()
|
|
64
|
+
|
|
65
|
+
async with self._lock:
|
|
66
|
+
self._subscribers[run_id].append(queue)
|
|
67
|
+
|
|
68
|
+
try:
|
|
69
|
+
while True:
|
|
70
|
+
event = await queue.get()
|
|
71
|
+
yield event
|
|
72
|
+
finally:
|
|
73
|
+
async with self._lock:
|
|
74
|
+
if queue in self._subscribers[run_id]:
|
|
75
|
+
self._subscribers[run_id].remove(queue)
|
|
76
|
+
|
|
77
|
+
async def get_events(
|
|
78
|
+
self,
|
|
79
|
+
run_id: UUID,
|
|
80
|
+
since_sequence: int = 0,
|
|
81
|
+
) -> list[Event]:
|
|
82
|
+
"""Get historical events for a run."""
|
|
83
|
+
events = self._events.get(run_id, [])
|
|
84
|
+
return [e for e in events if e.sequence >= since_sequence]
|
|
85
|
+
|
|
86
|
+
def clear(self) -> None:
|
|
87
|
+
"""Clear all events. Useful for testing."""
|
|
88
|
+
self._events.clear()
|
|
89
|
+
self._subscribers.clear()
|
|
@@ -0,0 +1,185 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Redis-backed event bus using pub/sub and streams.
|
|
3
|
+
|
|
4
|
+
Good for:
|
|
5
|
+
- Production deployments
|
|
6
|
+
- Multi-process/distributed setups
|
|
7
|
+
- Real-time streaming
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
import asyncio
|
|
11
|
+
import json
|
|
12
|
+
from typing import AsyncIterator, Optional
|
|
13
|
+
from uuid import UUID
|
|
14
|
+
|
|
15
|
+
from agent_runtime.events.base import EventBus, Event
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class RedisEventBus(EventBus):
|
|
19
|
+
"""
|
|
20
|
+
Redis-backed event bus implementation.
|
|
21
|
+
|
|
22
|
+
Uses Redis Streams for event storage and pub/sub for real-time notifications.
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
STREAM_PREFIX = "agent_runtime:events:"
|
|
26
|
+
CHANNEL_PREFIX = "agent_runtime:notify:"
|
|
27
|
+
|
|
28
|
+
def __init__(
|
|
29
|
+
self,
|
|
30
|
+
url: str = "redis://localhost:6379",
|
|
31
|
+
event_ttl_seconds: int = 3600 * 6, # 6 hours
|
|
32
|
+
):
|
|
33
|
+
self.url = url
|
|
34
|
+
self.event_ttl_seconds = event_ttl_seconds
|
|
35
|
+
self._client = None
|
|
36
|
+
|
|
37
|
+
async def _get_client(self):
|
|
38
|
+
"""Get or create Redis client."""
|
|
39
|
+
if self._client is None:
|
|
40
|
+
try:
|
|
41
|
+
import redis.asyncio as redis
|
|
42
|
+
except ImportError:
|
|
43
|
+
raise ImportError(
|
|
44
|
+
"redis package is required for RedisEventBus. "
|
|
45
|
+
"Install with: pip install agent_runtime[redis]"
|
|
46
|
+
)
|
|
47
|
+
self._client = redis.from_url(self.url)
|
|
48
|
+
return self._client
|
|
49
|
+
|
|
50
|
+
def _stream_key(self, run_id: UUID) -> str:
|
|
51
|
+
"""Get Redis stream key for a run."""
|
|
52
|
+
return f"{self.STREAM_PREFIX}{run_id}"
|
|
53
|
+
|
|
54
|
+
def _channel_key(self, run_id: UUID) -> str:
|
|
55
|
+
"""Get Redis pub/sub channel for a run."""
|
|
56
|
+
return f"{self.CHANNEL_PREFIX}{run_id}"
|
|
57
|
+
|
|
58
|
+
async def publish(self, event: Event) -> None:
|
|
59
|
+
"""Publish event to Redis."""
|
|
60
|
+
client = await self._get_client()
|
|
61
|
+
|
|
62
|
+
# Add to stream
|
|
63
|
+
stream_key = self._stream_key(event.run_id)
|
|
64
|
+
await client.xadd(
|
|
65
|
+
stream_key,
|
|
66
|
+
{"data": json.dumps(event.to_dict())},
|
|
67
|
+
)
|
|
68
|
+
|
|
69
|
+
# Set TTL on stream
|
|
70
|
+
await client.expire(stream_key, self.event_ttl_seconds)
|
|
71
|
+
|
|
72
|
+
# Notify subscribers
|
|
73
|
+
channel_key = self._channel_key(event.run_id)
|
|
74
|
+
await client.publish(channel_key, str(event.seq))
|
|
75
|
+
|
|
76
|
+
async def subscribe(
|
|
77
|
+
self,
|
|
78
|
+
run_id: UUID,
|
|
79
|
+
from_seq: int = 0,
|
|
80
|
+
check_complete: Optional[callable] = None,
|
|
81
|
+
) -> AsyncIterator[Event]:
|
|
82
|
+
"""
|
|
83
|
+
Subscribe to events using pub/sub for notifications.
|
|
84
|
+
|
|
85
|
+
Args:
|
|
86
|
+
run_id: Run to subscribe to
|
|
87
|
+
from_seq: Start from this sequence number
|
|
88
|
+
check_complete: Optional async callable that returns True when run is complete
|
|
89
|
+
"""
|
|
90
|
+
client = await self._get_client()
|
|
91
|
+
pubsub = client.pubsub()
|
|
92
|
+
channel_key = self._channel_key(run_id)
|
|
93
|
+
|
|
94
|
+
await pubsub.subscribe(channel_key)
|
|
95
|
+
|
|
96
|
+
try:
|
|
97
|
+
# First, get any existing events
|
|
98
|
+
events = await self.get_events(run_id, from_seq=from_seq)
|
|
99
|
+
current_seq = from_seq
|
|
100
|
+
|
|
101
|
+
for event in events:
|
|
102
|
+
yield event
|
|
103
|
+
current_seq = event.seq + 1
|
|
104
|
+
|
|
105
|
+
# Then listen for new events
|
|
106
|
+
while True:
|
|
107
|
+
# Check if run is complete
|
|
108
|
+
if check_complete and await check_complete():
|
|
109
|
+
# Get any final events
|
|
110
|
+
final_events = await self.get_events(run_id, from_seq=current_seq)
|
|
111
|
+
for event in final_events:
|
|
112
|
+
yield event
|
|
113
|
+
break
|
|
114
|
+
|
|
115
|
+
# Wait for notification with timeout
|
|
116
|
+
try:
|
|
117
|
+
message = await asyncio.wait_for(
|
|
118
|
+
pubsub.get_message(ignore_subscribe_messages=True),
|
|
119
|
+
timeout=1.0,
|
|
120
|
+
)
|
|
121
|
+
if message:
|
|
122
|
+
# Get new events
|
|
123
|
+
new_events = await self.get_events(run_id, from_seq=current_seq)
|
|
124
|
+
for event in new_events:
|
|
125
|
+
yield event
|
|
126
|
+
current_seq = event.seq + 1
|
|
127
|
+
except asyncio.TimeoutError:
|
|
128
|
+
continue
|
|
129
|
+
|
|
130
|
+
finally:
|
|
131
|
+
await pubsub.unsubscribe(channel_key)
|
|
132
|
+
await pubsub.close()
|
|
133
|
+
|
|
134
|
+
async def get_events(
|
|
135
|
+
self,
|
|
136
|
+
run_id: UUID,
|
|
137
|
+
from_seq: int = 0,
|
|
138
|
+
to_seq: Optional[int] = None,
|
|
139
|
+
) -> list[Event]:
|
|
140
|
+
"""Get events from Redis stream."""
|
|
141
|
+
client = await self._get_client()
|
|
142
|
+
stream_key = self._stream_key(run_id)
|
|
143
|
+
|
|
144
|
+
# Read from stream
|
|
145
|
+
messages = await client.xrange(stream_key)
|
|
146
|
+
|
|
147
|
+
events = []
|
|
148
|
+
for msg_id, data in messages:
|
|
149
|
+
data_bytes = data.get(b"data", data.get("data"))
|
|
150
|
+
if isinstance(data_bytes, bytes):
|
|
151
|
+
data_bytes = data_bytes.decode()
|
|
152
|
+
event_data = json.loads(data_bytes)
|
|
153
|
+
event = Event.from_dict(event_data)
|
|
154
|
+
|
|
155
|
+
if event.seq < from_seq:
|
|
156
|
+
continue
|
|
157
|
+
if to_seq is not None and event.seq > to_seq:
|
|
158
|
+
continue
|
|
159
|
+
|
|
160
|
+
events.append(event)
|
|
161
|
+
|
|
162
|
+
return sorted(events, key=lambda e: e.seq)
|
|
163
|
+
|
|
164
|
+
async def get_next_seq(self, run_id: UUID) -> int:
|
|
165
|
+
"""Get next sequence number from Redis."""
|
|
166
|
+
client = await self._get_client()
|
|
167
|
+
stream_key = self._stream_key(run_id)
|
|
168
|
+
|
|
169
|
+
# Check Redis stream
|
|
170
|
+
messages = await client.xrevrange(stream_key, count=1)
|
|
171
|
+
if messages:
|
|
172
|
+
msg_id, data = messages[0]
|
|
173
|
+
data_bytes = data.get(b"data", data.get("data"))
|
|
174
|
+
if isinstance(data_bytes, bytes):
|
|
175
|
+
data_bytes = data_bytes.decode()
|
|
176
|
+
event_data = json.loads(data_bytes)
|
|
177
|
+
return event_data["seq"] + 1
|
|
178
|
+
|
|
179
|
+
return 0
|
|
180
|
+
|
|
181
|
+
async def close(self) -> None:
|
|
182
|
+
"""Close Redis connection."""
|
|
183
|
+
if self._client:
|
|
184
|
+
await self._client.close()
|
|
185
|
+
self._client = None
|