voxagent 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- voxagent/__init__.py +143 -0
- voxagent/_version.py +5 -0
- voxagent/agent/__init__.py +32 -0
- voxagent/agent/abort.py +178 -0
- voxagent/agent/core.py +902 -0
- voxagent/code/__init__.py +9 -0
- voxagent/mcp/__init__.py +16 -0
- voxagent/mcp/manager.py +188 -0
- voxagent/mcp/tool.py +152 -0
- voxagent/providers/__init__.py +110 -0
- voxagent/providers/anthropic.py +498 -0
- voxagent/providers/augment.py +293 -0
- voxagent/providers/auth.py +116 -0
- voxagent/providers/base.py +268 -0
- voxagent/providers/chatgpt.py +415 -0
- voxagent/providers/claudecode.py +162 -0
- voxagent/providers/cli_base.py +265 -0
- voxagent/providers/codex.py +183 -0
- voxagent/providers/failover.py +90 -0
- voxagent/providers/google.py +532 -0
- voxagent/providers/groq.py +96 -0
- voxagent/providers/ollama.py +425 -0
- voxagent/providers/openai.py +435 -0
- voxagent/providers/registry.py +175 -0
- voxagent/py.typed +1 -0
- voxagent/security/__init__.py +14 -0
- voxagent/security/events.py +75 -0
- voxagent/security/filter.py +169 -0
- voxagent/security/registry.py +87 -0
- voxagent/session/__init__.py +39 -0
- voxagent/session/compaction.py +237 -0
- voxagent/session/lock.py +103 -0
- voxagent/session/model.py +109 -0
- voxagent/session/storage.py +184 -0
- voxagent/streaming/__init__.py +52 -0
- voxagent/streaming/emitter.py +286 -0
- voxagent/streaming/events.py +255 -0
- voxagent/subagent/__init__.py +20 -0
- voxagent/subagent/context.py +124 -0
- voxagent/subagent/definition.py +172 -0
- voxagent/tools/__init__.py +32 -0
- voxagent/tools/context.py +50 -0
- voxagent/tools/decorator.py +175 -0
- voxagent/tools/definition.py +131 -0
- voxagent/tools/executor.py +109 -0
- voxagent/tools/policy.py +89 -0
- voxagent/tools/registry.py +89 -0
- voxagent/types/__init__.py +46 -0
- voxagent/types/messages.py +134 -0
- voxagent/types/run.py +176 -0
- voxagent-0.1.0.dist-info/METADATA +186 -0
- voxagent-0.1.0.dist-info/RECORD +53 -0
- voxagent-0.1.0.dist-info/WHEEL +4 -0
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
"""Session model for voxagent."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import uuid
|
|
6
|
+
from datetime import datetime, timezone
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
from pydantic import BaseModel, ConfigDict, Field
|
|
10
|
+
|
|
11
|
+
from voxagent.types.messages import Message
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class Session(BaseModel):
|
|
15
|
+
"""Represents a conversation session."""
|
|
16
|
+
|
|
17
|
+
id: str = Field(..., description="Unique session ID (UUID)")
|
|
18
|
+
key: str = Field(..., description="Session key for resolution")
|
|
19
|
+
messages: list[Message] = Field(default_factory=list)
|
|
20
|
+
created_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc))
|
|
21
|
+
updated_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc))
|
|
22
|
+
metadata: dict[str, Any] = Field(default_factory=dict)
|
|
23
|
+
|
|
24
|
+
model_config = ConfigDict(arbitrary_types_allowed=True)
|
|
25
|
+
|
|
26
|
+
def add_message(self, message: Message) -> None:
|
|
27
|
+
"""Add a message and update timestamp."""
|
|
28
|
+
self.messages.append(message)
|
|
29
|
+
self.updated_at = datetime.now(timezone.utc)
|
|
30
|
+
|
|
31
|
+
def get_messages(self, limit: int | None = None) -> list[Message]:
|
|
32
|
+
"""Get messages, optionally limited to last N."""
|
|
33
|
+
if limit is None:
|
|
34
|
+
return list(self.messages)
|
|
35
|
+
return list(self.messages[-limit:])
|
|
36
|
+
|
|
37
|
+
def clear_messages(self) -> None:
|
|
38
|
+
"""Clear all messages."""
|
|
39
|
+
self.messages.clear()
|
|
40
|
+
self.updated_at = datetime.now(timezone.utc)
|
|
41
|
+
|
|
42
|
+
def to_dict(self) -> dict[str, Any]:
|
|
43
|
+
"""Serialize to dictionary."""
|
|
44
|
+
return {
|
|
45
|
+
"id": self.id,
|
|
46
|
+
"key": self.key,
|
|
47
|
+
"messages": [m.model_dump() for m in self.messages],
|
|
48
|
+
"created_at": self.created_at.isoformat(),
|
|
49
|
+
"updated_at": self.updated_at.isoformat(),
|
|
50
|
+
"metadata": self.metadata,
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
@classmethod
|
|
54
|
+
def from_dict(cls, data: dict[str, Any]) -> Session:
|
|
55
|
+
"""Deserialize from dictionary."""
|
|
56
|
+
messages = [Message(**m) for m in data.get("messages", [])]
|
|
57
|
+
created_at = data.get("created_at")
|
|
58
|
+
updated_at = data.get("updated_at")
|
|
59
|
+
|
|
60
|
+
if isinstance(created_at, str):
|
|
61
|
+
created_at = datetime.fromisoformat(created_at)
|
|
62
|
+
if isinstance(updated_at, str):
|
|
63
|
+
updated_at = datetime.fromisoformat(updated_at)
|
|
64
|
+
|
|
65
|
+
return cls(
|
|
66
|
+
id=data["id"],
|
|
67
|
+
key=data["key"],
|
|
68
|
+
messages=messages,
|
|
69
|
+
created_at=created_at or datetime.now(timezone.utc),
|
|
70
|
+
updated_at=updated_at or datetime.now(timezone.utc),
|
|
71
|
+
metadata=data.get("metadata", {}),
|
|
72
|
+
)
|
|
73
|
+
|
|
74
|
+
@classmethod
|
|
75
|
+
def create(cls, key: str, **kwargs: Any) -> Session:
|
|
76
|
+
"""Create a new session with generated ID."""
|
|
77
|
+
return cls(
|
|
78
|
+
id=str(uuid.uuid4()),
|
|
79
|
+
key=key,
|
|
80
|
+
**kwargs,
|
|
81
|
+
)
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def resolve_session_key(
|
|
85
|
+
user_id: str | None = None,
|
|
86
|
+
channel: str | None = None,
|
|
87
|
+
thread_id: str | None = None,
|
|
88
|
+
) -> str:
|
|
89
|
+
"""Generate a session key from components.
|
|
90
|
+
|
|
91
|
+
Examples:
|
|
92
|
+
resolve_session_key(user_id="u123") -> "user:u123"
|
|
93
|
+
resolve_session_key(channel="general") -> "channel:general"
|
|
94
|
+
resolve_session_key(user_id="u123", channel="general") -> "user:u123:channel:general"
|
|
95
|
+
"""
|
|
96
|
+
parts = []
|
|
97
|
+
|
|
98
|
+
if user_id and user_id.strip():
|
|
99
|
+
parts.append(f"user:{user_id}")
|
|
100
|
+
if channel and channel.strip():
|
|
101
|
+
parts.append(f"channel:{channel}")
|
|
102
|
+
if thread_id and thread_id.strip():
|
|
103
|
+
parts.append(f"thread:{thread_id}")
|
|
104
|
+
|
|
105
|
+
if not parts:
|
|
106
|
+
raise ValueError("At least one of user_id, channel, or thread_id must be provided")
|
|
107
|
+
|
|
108
|
+
return ":".join(parts)
|
|
109
|
+
|
|
@@ -0,0 +1,184 @@
|
|
|
1
|
+
"""Session storage backends for voxagent."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
import os
|
|
7
|
+
import tempfile
|
|
8
|
+
from abc import ABC, abstractmethod
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
from typing import TYPE_CHECKING
|
|
11
|
+
|
|
12
|
+
if TYPE_CHECKING:
|
|
13
|
+
from voxagent.session.model import Session
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class SessionStorage(ABC):
|
|
17
|
+
"""Abstract base class for session storage backends."""
|
|
18
|
+
|
|
19
|
+
@abstractmethod
|
|
20
|
+
async def load(self, session_key: str) -> "Session | None":
|
|
21
|
+
"""Load a session by key. Returns None if not found."""
|
|
22
|
+
|
|
23
|
+
@abstractmethod
|
|
24
|
+
async def save(self, session: "Session") -> None:
|
|
25
|
+
"""Save a session."""
|
|
26
|
+
|
|
27
|
+
@abstractmethod
|
|
28
|
+
async def delete(self, session_key: str) -> bool:
|
|
29
|
+
"""Delete a session. Returns True if deleted, False if not found."""
|
|
30
|
+
|
|
31
|
+
@abstractmethod
|
|
32
|
+
async def list_keys(self) -> list[str]:
|
|
33
|
+
"""List all session keys."""
|
|
34
|
+
|
|
35
|
+
@abstractmethod
|
|
36
|
+
async def exists(self, session_key: str) -> bool:
|
|
37
|
+
"""Check if a session exists."""
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
class InMemorySessionStorage(SessionStorage):
|
|
41
|
+
"""In-memory storage for testing."""
|
|
42
|
+
|
|
43
|
+
def __init__(self) -> None:
|
|
44
|
+
self._sessions: dict[str, "Session"] = {}
|
|
45
|
+
|
|
46
|
+
async def load(self, session_key: str) -> "Session | None":
|
|
47
|
+
return self._sessions.get(session_key)
|
|
48
|
+
|
|
49
|
+
async def save(self, session: "Session") -> None:
|
|
50
|
+
self._sessions[session.key] = session
|
|
51
|
+
|
|
52
|
+
async def delete(self, session_key: str) -> bool:
|
|
53
|
+
if session_key in self._sessions:
|
|
54
|
+
del self._sessions[session_key]
|
|
55
|
+
return True
|
|
56
|
+
return False
|
|
57
|
+
|
|
58
|
+
async def list_keys(self) -> list[str]:
|
|
59
|
+
return list(self._sessions.keys())
|
|
60
|
+
|
|
61
|
+
async def exists(self, session_key: str) -> bool:
|
|
62
|
+
return session_key in self._sessions
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
class FileSessionStorage(SessionStorage):
|
|
66
|
+
"""File-based storage using JSONL format."""
|
|
67
|
+
|
|
68
|
+
def __init__(self, base_dir: Path | str) -> None:
|
|
69
|
+
self.base_dir = Path(base_dir)
|
|
70
|
+
self.base_dir.mkdir(parents=True, exist_ok=True)
|
|
71
|
+
# Map sanitized filename stems to original keys
|
|
72
|
+
self._key_map: dict[str, str] = {}
|
|
73
|
+
|
|
74
|
+
def _sanitize_key(self, session_key: str) -> str:
|
|
75
|
+
"""Sanitize key for filesystem safety."""
|
|
76
|
+
return session_key.replace(":", "_").replace("/", "_").replace("\\", "_")
|
|
77
|
+
|
|
78
|
+
def _get_session_path(self, session_key: str) -> Path:
|
|
79
|
+
"""Get file path for a session key."""
|
|
80
|
+
safe_key = self._sanitize_key(session_key)
|
|
81
|
+
return self.base_dir / f"{safe_key}.jsonl"
|
|
82
|
+
|
|
83
|
+
async def load(self, session_key: str) -> "Session | None":
|
|
84
|
+
from voxagent.session.model import Session
|
|
85
|
+
|
|
86
|
+
path = self._get_session_path(session_key)
|
|
87
|
+
if not path.exists():
|
|
88
|
+
return None
|
|
89
|
+
|
|
90
|
+
with open(path, "r", encoding="utf-8") as f:
|
|
91
|
+
lines = f.readlines()
|
|
92
|
+
|
|
93
|
+
if not lines:
|
|
94
|
+
return None
|
|
95
|
+
|
|
96
|
+
# First line is session metadata
|
|
97
|
+
metadata = json.loads(lines[0])
|
|
98
|
+
|
|
99
|
+
# Remaining lines are messages
|
|
100
|
+
messages_data = []
|
|
101
|
+
for line in lines[1:]:
|
|
102
|
+
if line.strip():
|
|
103
|
+
msg_data = json.loads(line)
|
|
104
|
+
messages_data.append(msg_data)
|
|
105
|
+
|
|
106
|
+
return Session.from_dict({
|
|
107
|
+
"id": metadata["id"],
|
|
108
|
+
"key": metadata["key"],
|
|
109
|
+
"messages": messages_data,
|
|
110
|
+
"created_at": metadata.get("created_at"),
|
|
111
|
+
"updated_at": metadata.get("updated_at"),
|
|
112
|
+
"metadata": metadata.get("metadata", {}),
|
|
113
|
+
})
|
|
114
|
+
|
|
115
|
+
async def save(self, session: "Session") -> None:
|
|
116
|
+
path = self._get_session_path(session.key)
|
|
117
|
+
|
|
118
|
+
# Ensure directory exists
|
|
119
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
120
|
+
|
|
121
|
+
# Atomic write: write to temp file, then rename
|
|
122
|
+
fd, temp_path = tempfile.mkstemp(
|
|
123
|
+
dir=self.base_dir,
|
|
124
|
+
prefix=".tmp_",
|
|
125
|
+
suffix=".jsonl",
|
|
126
|
+
)
|
|
127
|
+
|
|
128
|
+
try:
|
|
129
|
+
with os.fdopen(fd, "w", encoding="utf-8") as f:
|
|
130
|
+
# First line: session metadata
|
|
131
|
+
metadata = {
|
|
132
|
+
"id": session.id,
|
|
133
|
+
"key": session.key,
|
|
134
|
+
"created_at": session.created_at.isoformat(),
|
|
135
|
+
"updated_at": session.updated_at.isoformat(),
|
|
136
|
+
"metadata": session.metadata,
|
|
137
|
+
}
|
|
138
|
+
f.write(json.dumps(metadata) + "\n")
|
|
139
|
+
|
|
140
|
+
# Remaining lines: one message per line
|
|
141
|
+
for msg in session.messages:
|
|
142
|
+
f.write(json.dumps(msg.model_dump()) + "\n")
|
|
143
|
+
|
|
144
|
+
# Atomic rename
|
|
145
|
+
Path(temp_path).rename(path)
|
|
146
|
+
except Exception:
|
|
147
|
+
# Clean up temp file on error
|
|
148
|
+
if os.path.exists(temp_path):
|
|
149
|
+
os.unlink(temp_path)
|
|
150
|
+
raise
|
|
151
|
+
|
|
152
|
+
async def delete(self, session_key: str) -> bool:
|
|
153
|
+
path = self._get_session_path(session_key)
|
|
154
|
+
if path.exists():
|
|
155
|
+
path.unlink()
|
|
156
|
+
return True
|
|
157
|
+
return False
|
|
158
|
+
|
|
159
|
+
async def list_keys(self) -> list[str]:
|
|
160
|
+
keys = []
|
|
161
|
+
for path in self.base_dir.glob("*.jsonl"):
|
|
162
|
+
if not path.name.startswith(".tmp_"):
|
|
163
|
+
# Read the key from the file metadata
|
|
164
|
+
try:
|
|
165
|
+
with open(path, "r", encoding="utf-8") as f:
|
|
166
|
+
first_line = f.readline()
|
|
167
|
+
if first_line.strip():
|
|
168
|
+
metadata = json.loads(first_line)
|
|
169
|
+
keys.append(metadata["key"])
|
|
170
|
+
except (json.JSONDecodeError, KeyError, OSError):
|
|
171
|
+
# Skip corrupted files
|
|
172
|
+
pass
|
|
173
|
+
return keys
|
|
174
|
+
|
|
175
|
+
async def exists(self, session_key: str) -> bool:
|
|
176
|
+
return self._get_session_path(session_key).exists()
|
|
177
|
+
|
|
178
|
+
|
|
179
|
+
__all__ = [
|
|
180
|
+
"FileSessionStorage",
|
|
181
|
+
"InMemorySessionStorage",
|
|
182
|
+
"SessionStorage",
|
|
183
|
+
]
|
|
184
|
+
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
"""Event streaming infrastructure.
|
|
2
|
+
|
|
3
|
+
This subpackage provides:
|
|
4
|
+
- StreamResult wrapper for streaming responses
|
|
5
|
+
- Event emitter for typed events
|
|
6
|
+
- Lifecycle events (RUN_START, RUN_END, etc.)
|
|
7
|
+
- Tool events (TOOL_START, TOOL_OUTPUT, TOOL_END)
|
|
8
|
+
- Typed event data models
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
from voxagent.streaming.emitter import (
|
|
12
|
+
EventCallback,
|
|
13
|
+
EventEmitter,
|
|
14
|
+
StreamEvent,
|
|
15
|
+
WildcardCallback,
|
|
16
|
+
)
|
|
17
|
+
from voxagent.streaming.events import (
|
|
18
|
+
AssistantEndEvent,
|
|
19
|
+
AssistantStartEvent,
|
|
20
|
+
BaseEvent,
|
|
21
|
+
CompactionEndEvent,
|
|
22
|
+
CompactionStartEvent,
|
|
23
|
+
RunEndEvent,
|
|
24
|
+
RunErrorEvent,
|
|
25
|
+
RunStartEvent,
|
|
26
|
+
StreamEventData,
|
|
27
|
+
TextDeltaEvent,
|
|
28
|
+
ToolEndEvent,
|
|
29
|
+
ToolOutputEvent,
|
|
30
|
+
ToolStartEvent,
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
__all__ = [
|
|
34
|
+
"AssistantEndEvent",
|
|
35
|
+
"AssistantStartEvent",
|
|
36
|
+
"BaseEvent",
|
|
37
|
+
"CompactionEndEvent",
|
|
38
|
+
"CompactionStartEvent",
|
|
39
|
+
"EventCallback",
|
|
40
|
+
"EventEmitter",
|
|
41
|
+
"RunEndEvent",
|
|
42
|
+
"RunErrorEvent",
|
|
43
|
+
"RunStartEvent",
|
|
44
|
+
"StreamEvent",
|
|
45
|
+
"StreamEventData",
|
|
46
|
+
"TextDeltaEvent",
|
|
47
|
+
"ToolEndEvent",
|
|
48
|
+
"ToolOutputEvent",
|
|
49
|
+
"ToolStartEvent",
|
|
50
|
+
"WildcardCallback",
|
|
51
|
+
]
|
|
52
|
+
|
|
@@ -0,0 +1,286 @@
|
|
|
1
|
+
"""Event emitter for streaming events.
|
|
2
|
+
|
|
3
|
+
Provides typed event emission for the agent lifecycle including
|
|
4
|
+
run, inference, tool, and context compaction events.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
import inspect
|
|
10
|
+
import logging
|
|
11
|
+
import threading
|
|
12
|
+
from collections.abc import Awaitable
|
|
13
|
+
from enum import Enum
|
|
14
|
+
from typing import Any, Callable
|
|
15
|
+
|
|
16
|
+
logger = logging.getLogger(__name__)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class StreamEvent(str, Enum):
|
|
20
|
+
"""Stream event types for the agent lifecycle.
|
|
21
|
+
|
|
22
|
+
Based on algorithm specification from agent_algorithm.md.
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
# Lifecycle events
|
|
26
|
+
RUN_START = "run_start"
|
|
27
|
+
RUN_END = "run_end"
|
|
28
|
+
RUN_ERROR = "run_error"
|
|
29
|
+
|
|
30
|
+
# Inference events
|
|
31
|
+
ASSISTANT_START = "assistant_start"
|
|
32
|
+
TEXT_DELTA = "text_delta"
|
|
33
|
+
ASSISTANT_END = "assistant_end"
|
|
34
|
+
|
|
35
|
+
# Tool events
|
|
36
|
+
TOOL_START = "tool_start"
|
|
37
|
+
TOOL_OUTPUT = "tool_output"
|
|
38
|
+
TOOL_END = "tool_end"
|
|
39
|
+
|
|
40
|
+
# Context events
|
|
41
|
+
COMPACTION_START = "compaction_start"
|
|
42
|
+
COMPACTION_END = "compaction_end"
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
# Type aliases for callbacks
|
|
46
|
+
EventCallback = Callable[[Any], None] | Callable[[Any], Awaitable[None]]
|
|
47
|
+
WildcardCallback = (
|
|
48
|
+
Callable[[StreamEvent | str, Any], None]
|
|
49
|
+
| Callable[[StreamEvent | str, Any], Awaitable[None]]
|
|
50
|
+
)
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class EventEmitter:
|
|
54
|
+
"""Event emitter for typed streaming events.
|
|
55
|
+
|
|
56
|
+
Thread-safe event emitter supporting both sync and async callbacks.
|
|
57
|
+
Supports wildcard listeners that receive all events.
|
|
58
|
+
"""
|
|
59
|
+
|
|
60
|
+
def __init__(self) -> None:
|
|
61
|
+
"""Initialize the event emitter with empty listener storage."""
|
|
62
|
+
self._listeners: dict[StreamEvent | str, list[EventCallback]] = {}
|
|
63
|
+
self._once_callbacks: set[EventCallback] = set()
|
|
64
|
+
self._wildcard_listeners: list[WildcardCallback] = []
|
|
65
|
+
self._lock = threading.Lock()
|
|
66
|
+
|
|
67
|
+
def on(
|
|
68
|
+
self, event_type: StreamEvent | str, callback: EventCallback
|
|
69
|
+
) -> Callable[[], None]:
|
|
70
|
+
"""Register a callback for an event type.
|
|
71
|
+
|
|
72
|
+
Args:
|
|
73
|
+
event_type: The event type to listen for.
|
|
74
|
+
callback: The callback function to invoke when event is emitted.
|
|
75
|
+
|
|
76
|
+
Returns:
|
|
77
|
+
An unsubscribe function that removes this callback.
|
|
78
|
+
"""
|
|
79
|
+
with self._lock:
|
|
80
|
+
if event_type not in self._listeners:
|
|
81
|
+
self._listeners[event_type] = []
|
|
82
|
+
self._listeners[event_type].append(callback)
|
|
83
|
+
|
|
84
|
+
def unsubscribe() -> None:
|
|
85
|
+
self.off(event_type, callback)
|
|
86
|
+
|
|
87
|
+
return unsubscribe
|
|
88
|
+
|
|
89
|
+
def off(self, event_type: StreamEvent | str, callback: EventCallback) -> None:
|
|
90
|
+
"""Unregister a callback for an event type.
|
|
91
|
+
|
|
92
|
+
Args:
|
|
93
|
+
event_type: The event type to unregister from.
|
|
94
|
+
callback: The callback to remove.
|
|
95
|
+
"""
|
|
96
|
+
with self._lock:
|
|
97
|
+
if event_type in self._listeners:
|
|
98
|
+
try:
|
|
99
|
+
self._listeners[event_type].remove(callback)
|
|
100
|
+
except ValueError:
|
|
101
|
+
pass # Callback was not registered
|
|
102
|
+
# Also remove from once callbacks if present
|
|
103
|
+
self._once_callbacks.discard(callback)
|
|
104
|
+
|
|
105
|
+
def once(
|
|
106
|
+
self, event_type: StreamEvent | str, callback: EventCallback
|
|
107
|
+
) -> Callable[[], None]:
|
|
108
|
+
"""Register a one-time callback that auto-removes after first call.
|
|
109
|
+
|
|
110
|
+
Args:
|
|
111
|
+
event_type: The event type to listen for.
|
|
112
|
+
callback: The callback function to invoke once.
|
|
113
|
+
|
|
114
|
+
Returns:
|
|
115
|
+
An unsubscribe function that removes this callback.
|
|
116
|
+
"""
|
|
117
|
+
with self._lock:
|
|
118
|
+
if event_type not in self._listeners:
|
|
119
|
+
self._listeners[event_type] = []
|
|
120
|
+
self._listeners[event_type].append(callback)
|
|
121
|
+
self._once_callbacks.add(callback)
|
|
122
|
+
|
|
123
|
+
def unsubscribe() -> None:
|
|
124
|
+
self.off(event_type, callback)
|
|
125
|
+
|
|
126
|
+
return unsubscribe
|
|
127
|
+
|
|
128
|
+
def emit(self, event_type: StreamEvent | str, data: Any = None) -> None:
|
|
129
|
+
"""Emit an event synchronously to all registered listeners.
|
|
130
|
+
|
|
131
|
+
Exceptions in callbacks are caught and logged, but do not prevent
|
|
132
|
+
other callbacks from being called.
|
|
133
|
+
|
|
134
|
+
Args:
|
|
135
|
+
event_type: The event type to emit.
|
|
136
|
+
data: Optional data payload to pass to callbacks.
|
|
137
|
+
"""
|
|
138
|
+
# Get a snapshot of callbacks to call
|
|
139
|
+
with self._lock:
|
|
140
|
+
callbacks = list(self._listeners.get(event_type, []))
|
|
141
|
+
wildcard_callbacks = list(self._wildcard_listeners)
|
|
142
|
+
once_callbacks = self._once_callbacks.copy()
|
|
143
|
+
|
|
144
|
+
# Call specific event callbacks
|
|
145
|
+
for callback in callbacks:
|
|
146
|
+
try:
|
|
147
|
+
callback(data)
|
|
148
|
+
except Exception:
|
|
149
|
+
logger.exception(
|
|
150
|
+
"Exception in event callback for %s", event_type
|
|
151
|
+
)
|
|
152
|
+
|
|
153
|
+
# Remove if it was a once callback
|
|
154
|
+
if callback in once_callbacks:
|
|
155
|
+
self.off(event_type, callback)
|
|
156
|
+
|
|
157
|
+
# Call wildcard callbacks
|
|
158
|
+
for callback in wildcard_callbacks:
|
|
159
|
+
try:
|
|
160
|
+
callback(event_type, data)
|
|
161
|
+
except Exception:
|
|
162
|
+
logger.exception(
|
|
163
|
+
"Exception in wildcard callback for %s", event_type
|
|
164
|
+
)
|
|
165
|
+
|
|
166
|
+
async def emit_async(
|
|
167
|
+
self, event_type: StreamEvent | str, data: Any = None
|
|
168
|
+
) -> None:
|
|
169
|
+
"""Emit an event asynchronously, awaiting async callbacks.
|
|
170
|
+
|
|
171
|
+
Handles both sync and async callbacks. Exceptions in callbacks are
|
|
172
|
+
caught and logged, but do not prevent other callbacks from being called.
|
|
173
|
+
|
|
174
|
+
Args:
|
|
175
|
+
event_type: The event type to emit.
|
|
176
|
+
data: Optional data payload to pass to callbacks.
|
|
177
|
+
"""
|
|
178
|
+
# Get a snapshot of callbacks to call
|
|
179
|
+
with self._lock:
|
|
180
|
+
callbacks = list(self._listeners.get(event_type, []))
|
|
181
|
+
wildcard_callbacks = list(self._wildcard_listeners)
|
|
182
|
+
once_callbacks = self._once_callbacks.copy()
|
|
183
|
+
|
|
184
|
+
# Call specific event callbacks
|
|
185
|
+
for callback in callbacks:
|
|
186
|
+
try:
|
|
187
|
+
result = callback(data)
|
|
188
|
+
if inspect.isawaitable(result):
|
|
189
|
+
await result
|
|
190
|
+
except Exception:
|
|
191
|
+
logger.exception(
|
|
192
|
+
"Exception in async event callback for %s", event_type
|
|
193
|
+
)
|
|
194
|
+
|
|
195
|
+
# Remove if it was a once callback
|
|
196
|
+
if callback in once_callbacks:
|
|
197
|
+
self.off(event_type, callback)
|
|
198
|
+
|
|
199
|
+
# Call wildcard callbacks
|
|
200
|
+
for callback in wildcard_callbacks:
|
|
201
|
+
try:
|
|
202
|
+
result = callback(event_type, data)
|
|
203
|
+
if inspect.isawaitable(result):
|
|
204
|
+
await result
|
|
205
|
+
except Exception:
|
|
206
|
+
logger.exception(
|
|
207
|
+
"Exception in async wildcard callback for %s", event_type
|
|
208
|
+
)
|
|
209
|
+
|
|
210
|
+
def on_any(self, callback: WildcardCallback) -> Callable[[], None]:
|
|
211
|
+
"""Register a wildcard callback that receives all events.
|
|
212
|
+
|
|
213
|
+
Args:
|
|
214
|
+
callback: The callback function receiving (event_type, data).
|
|
215
|
+
|
|
216
|
+
Returns:
|
|
217
|
+
An unsubscribe function that removes this callback.
|
|
218
|
+
"""
|
|
219
|
+
with self._lock:
|
|
220
|
+
self._wildcard_listeners.append(callback)
|
|
221
|
+
|
|
222
|
+
def unsubscribe() -> None:
|
|
223
|
+
self.off_any(callback)
|
|
224
|
+
|
|
225
|
+
return unsubscribe
|
|
226
|
+
|
|
227
|
+
def off_any(self, callback: WildcardCallback) -> None:
|
|
228
|
+
"""Unregister a wildcard callback.
|
|
229
|
+
|
|
230
|
+
Args:
|
|
231
|
+
callback: The wildcard callback to remove.
|
|
232
|
+
"""
|
|
233
|
+
with self._lock:
|
|
234
|
+
try:
|
|
235
|
+
self._wildcard_listeners.remove(callback)
|
|
236
|
+
except ValueError:
|
|
237
|
+
pass # Callback was not registered
|
|
238
|
+
|
|
239
|
+
def clear(self, event_type: StreamEvent | str | None = None) -> None:
|
|
240
|
+
"""Clear all listeners, or listeners for a specific event.
|
|
241
|
+
|
|
242
|
+
Args:
|
|
243
|
+
event_type: If provided, clear only listeners for this event.
|
|
244
|
+
If None, clear all listeners including wildcards.
|
|
245
|
+
"""
|
|
246
|
+
with self._lock:
|
|
247
|
+
if event_type is None:
|
|
248
|
+
self._listeners.clear()
|
|
249
|
+
self._once_callbacks.clear()
|
|
250
|
+
self._wildcard_listeners.clear()
|
|
251
|
+
else:
|
|
252
|
+
if event_type in self._listeners:
|
|
253
|
+
# Remove any once callbacks for this event
|
|
254
|
+
for cb in self._listeners[event_type]:
|
|
255
|
+
self._once_callbacks.discard(cb)
|
|
256
|
+
del self._listeners[event_type]
|
|
257
|
+
|
|
258
|
+
def listener_count(self, event_type: StreamEvent | str | None = None) -> int:
|
|
259
|
+
"""Return the number of listeners for an event type.
|
|
260
|
+
|
|
261
|
+
Args:
|
|
262
|
+
event_type: The event type to count listeners for.
|
|
263
|
+
If None, returns total count of all listeners.
|
|
264
|
+
|
|
265
|
+
Returns:
|
|
266
|
+
The number of listeners registered for the event type.
|
|
267
|
+
"""
|
|
268
|
+
with self._lock:
|
|
269
|
+
if event_type is None:
|
|
270
|
+
total = sum(len(cbs) for cbs in self._listeners.values())
|
|
271
|
+
total += len(self._wildcard_listeners)
|
|
272
|
+
return total
|
|
273
|
+
return len(self._listeners.get(event_type, []))
|
|
274
|
+
|
|
275
|
+
def has_listeners(self, event_type: StreamEvent | str) -> bool:
|
|
276
|
+
"""Return True if the event type has any listeners.
|
|
277
|
+
|
|
278
|
+
Args:
|
|
279
|
+
event_type: The event type to check.
|
|
280
|
+
|
|
281
|
+
Returns:
|
|
282
|
+
True if there are listeners for this event type.
|
|
283
|
+
"""
|
|
284
|
+
with self._lock:
|
|
285
|
+
return len(self._listeners.get(event_type, [])) > 0
|
|
286
|
+
|