agno 2.3.16__py3-none-any.whl → 2.3.18__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agno/agent/__init__.py +2 -0
- agno/agent/agent.py +4 -53
- agno/agent/remote.py +351 -0
- agno/client/__init__.py +3 -0
- agno/client/os.py +2669 -0
- agno/db/base.py +20 -0
- agno/db/mongo/async_mongo.py +11 -0
- agno/db/mongo/mongo.py +10 -0
- agno/db/mysql/async_mysql.py +9 -0
- agno/db/mysql/mysql.py +9 -0
- agno/db/postgres/async_postgres.py +9 -0
- agno/db/postgres/postgres.py +9 -0
- agno/db/postgres/utils.py +3 -2
- agno/db/sqlite/async_sqlite.py +9 -0
- agno/db/sqlite/sqlite.py +11 -1
- agno/exceptions.py +23 -0
- agno/knowledge/chunking/semantic.py +123 -46
- agno/knowledge/reader/csv_reader.py +1 -1
- agno/knowledge/reader/field_labeled_csv_reader.py +1 -1
- agno/knowledge/reader/json_reader.py +1 -1
- agno/models/google/gemini.py +5 -0
- agno/os/app.py +108 -25
- agno/os/auth.py +25 -1
- agno/os/interfaces/a2a/a2a.py +7 -6
- agno/os/interfaces/a2a/router.py +13 -13
- agno/os/interfaces/agui/agui.py +5 -3
- agno/os/interfaces/agui/router.py +23 -16
- agno/os/interfaces/base.py +7 -7
- agno/os/interfaces/slack/router.py +6 -6
- agno/os/interfaces/slack/slack.py +7 -7
- agno/os/interfaces/whatsapp/router.py +29 -6
- agno/os/interfaces/whatsapp/whatsapp.py +11 -8
- agno/os/managers.py +326 -0
- agno/os/mcp.py +651 -79
- agno/os/router.py +125 -18
- agno/os/routers/agents/router.py +65 -22
- agno/os/routers/agents/schema.py +16 -4
- agno/os/routers/database.py +5 -0
- agno/os/routers/evals/evals.py +93 -11
- agno/os/routers/evals/utils.py +6 -6
- agno/os/routers/knowledge/knowledge.py +104 -16
- agno/os/routers/memory/memory.py +124 -7
- agno/os/routers/metrics/metrics.py +21 -4
- agno/os/routers/session/session.py +141 -12
- agno/os/routers/teams/router.py +40 -14
- agno/os/routers/teams/schema.py +12 -4
- agno/os/routers/traces/traces.py +54 -4
- agno/os/routers/workflows/router.py +223 -117
- agno/os/routers/workflows/schema.py +65 -1
- agno/os/schema.py +38 -12
- agno/os/utils.py +87 -166
- agno/remote/__init__.py +3 -0
- agno/remote/base.py +484 -0
- agno/run/workflow.py +1 -0
- agno/team/__init__.py +2 -0
- agno/team/remote.py +287 -0
- agno/team/team.py +25 -54
- agno/tracing/exporter.py +10 -6
- agno/tracing/setup.py +2 -1
- agno/utils/agent.py +58 -1
- agno/utils/http.py +68 -20
- agno/utils/os.py +0 -0
- agno/utils/remote.py +23 -0
- agno/vectordb/chroma/chromadb.py +452 -16
- agno/vectordb/pgvector/pgvector.py +7 -0
- agno/vectordb/redis/redisdb.py +1 -1
- agno/workflow/__init__.py +2 -0
- agno/workflow/agent.py +2 -2
- agno/workflow/remote.py +222 -0
- agno/workflow/types.py +0 -73
- agno/workflow/workflow.py +119 -68
- {agno-2.3.16.dist-info → agno-2.3.18.dist-info}/METADATA +1 -1
- {agno-2.3.16.dist-info → agno-2.3.18.dist-info}/RECORD +76 -66
- {agno-2.3.16.dist-info → agno-2.3.18.dist-info}/WHEEL +0 -0
- {agno-2.3.16.dist-info → agno-2.3.18.dist-info}/licenses/LICENSE +0 -0
- {agno-2.3.16.dist-info → agno-2.3.18.dist-info}/top_level.txt +0 -0
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import json
|
|
2
|
-
from typing import TYPE_CHECKING, Any, AsyncGenerator, Dict, List, Optional, Union
|
|
2
|
+
from typing import TYPE_CHECKING, Any, AsyncGenerator, Dict, List, Optional, Union
|
|
3
3
|
from uuid import uuid4
|
|
4
4
|
|
|
5
5
|
from fastapi import (
|
|
@@ -15,7 +15,13 @@ from fastapi.responses import JSONResponse, StreamingResponse
|
|
|
15
15
|
from pydantic import BaseModel
|
|
16
16
|
|
|
17
17
|
from agno.exceptions import InputCheckError, OutputCheckError
|
|
18
|
-
from agno.os.auth import
|
|
18
|
+
from agno.os.auth import (
|
|
19
|
+
get_auth_token_from_request,
|
|
20
|
+
get_authentication_dependency,
|
|
21
|
+
require_resource_access,
|
|
22
|
+
validate_websocket_token,
|
|
23
|
+
)
|
|
24
|
+
from agno.os.managers import event_buffer, websocket_manager
|
|
19
25
|
from agno.os.routers.workflows.schema import WorkflowResponse
|
|
20
26
|
from agno.os.schema import (
|
|
21
27
|
BadRequestResponse,
|
|
@@ -31,110 +37,17 @@ from agno.os.utils import (
|
|
|
31
37
|
get_request_kwargs,
|
|
32
38
|
get_workflow_by_id,
|
|
33
39
|
)
|
|
34
|
-
from agno.run.
|
|
35
|
-
from agno.
|
|
40
|
+
from agno.run.base import RunStatus
|
|
41
|
+
from agno.run.workflow import WorkflowErrorEvent
|
|
42
|
+
from agno.utils.log import log_debug, log_warning, logger
|
|
43
|
+
from agno.utils.serialize import json_serializer
|
|
44
|
+
from agno.workflow.remote import RemoteWorkflow
|
|
36
45
|
from agno.workflow.workflow import Workflow
|
|
37
46
|
|
|
38
47
|
if TYPE_CHECKING:
|
|
39
48
|
from agno.os.app import AgentOS
|
|
40
49
|
|
|
41
50
|
|
|
42
|
-
class WebSocketManager:
|
|
43
|
-
"""Manages WebSocket connections for workflow runs"""
|
|
44
|
-
|
|
45
|
-
active_connections: Dict[str, WebSocket] # {run_id: websocket}
|
|
46
|
-
authenticated_connections: Dict[WebSocket, bool] # {websocket: is_authenticated}
|
|
47
|
-
|
|
48
|
-
def __init__(
|
|
49
|
-
self,
|
|
50
|
-
active_connections: Optional[Dict[str, WebSocket]] = None,
|
|
51
|
-
):
|
|
52
|
-
# Store active connections: {run_id: websocket}
|
|
53
|
-
self.active_connections = active_connections or {}
|
|
54
|
-
# Track authentication state for each websocket
|
|
55
|
-
self.authenticated_connections = {}
|
|
56
|
-
|
|
57
|
-
async def connect(self, websocket: WebSocket, requires_auth: bool = True):
|
|
58
|
-
"""Accept WebSocket connection"""
|
|
59
|
-
await websocket.accept()
|
|
60
|
-
logger.debug("WebSocket connected")
|
|
61
|
-
|
|
62
|
-
# If auth is not required, mark as authenticated immediately
|
|
63
|
-
self.authenticated_connections[websocket] = not requires_auth
|
|
64
|
-
|
|
65
|
-
# Send connection confirmation with auth requirement info
|
|
66
|
-
await websocket.send_text(
|
|
67
|
-
json.dumps(
|
|
68
|
-
{
|
|
69
|
-
"event": "connected",
|
|
70
|
-
"message": (
|
|
71
|
-
"Connected to workflow events. Please authenticate to continue."
|
|
72
|
-
if requires_auth
|
|
73
|
-
else "Connected to workflow events. Authentication not required."
|
|
74
|
-
),
|
|
75
|
-
"requires_auth": requires_auth,
|
|
76
|
-
}
|
|
77
|
-
)
|
|
78
|
-
)
|
|
79
|
-
|
|
80
|
-
async def authenticate_websocket(self, websocket: WebSocket):
|
|
81
|
-
"""Mark a WebSocket connection as authenticated"""
|
|
82
|
-
self.authenticated_connections[websocket] = True
|
|
83
|
-
logger.debug("WebSocket authenticated")
|
|
84
|
-
|
|
85
|
-
# Send authentication confirmation
|
|
86
|
-
await websocket.send_text(
|
|
87
|
-
json.dumps(
|
|
88
|
-
{
|
|
89
|
-
"event": "authenticated",
|
|
90
|
-
"message": "Authentication successful. You can now send commands.",
|
|
91
|
-
}
|
|
92
|
-
)
|
|
93
|
-
)
|
|
94
|
-
|
|
95
|
-
def is_authenticated(self, websocket: WebSocket) -> bool:
|
|
96
|
-
"""Check if a WebSocket connection is authenticated"""
|
|
97
|
-
return self.authenticated_connections.get(websocket, False)
|
|
98
|
-
|
|
99
|
-
async def register_workflow_websocket(self, run_id: str, websocket: WebSocket):
|
|
100
|
-
"""Register a workflow run with its WebSocket connection"""
|
|
101
|
-
self.active_connections[run_id] = websocket
|
|
102
|
-
logger.debug(f"Registered WebSocket for run_id: {run_id}")
|
|
103
|
-
|
|
104
|
-
async def disconnect_by_run_id(self, run_id: str):
|
|
105
|
-
"""Remove WebSocket connection by run_id"""
|
|
106
|
-
if run_id in self.active_connections:
|
|
107
|
-
websocket = self.active_connections[run_id]
|
|
108
|
-
del self.active_connections[run_id]
|
|
109
|
-
# Clean up authentication state
|
|
110
|
-
if websocket in self.authenticated_connections:
|
|
111
|
-
del self.authenticated_connections[websocket]
|
|
112
|
-
logger.debug(f"WebSocket disconnected for run_id: {run_id}")
|
|
113
|
-
|
|
114
|
-
async def disconnect_websocket(self, websocket: WebSocket):
|
|
115
|
-
"""Remove WebSocket connection and clean up all associated state"""
|
|
116
|
-
# Remove from authenticated connections
|
|
117
|
-
if websocket in self.authenticated_connections:
|
|
118
|
-
del self.authenticated_connections[websocket]
|
|
119
|
-
|
|
120
|
-
# Remove from active connections
|
|
121
|
-
runs_to_remove = [run_id for run_id, ws in self.active_connections.items() if ws == websocket]
|
|
122
|
-
for run_id in runs_to_remove:
|
|
123
|
-
del self.active_connections[run_id]
|
|
124
|
-
|
|
125
|
-
logger.debug("WebSocket disconnected and cleaned up")
|
|
126
|
-
|
|
127
|
-
async def get_websocket_for_run(self, run_id: str) -> Optional[WebSocket]:
|
|
128
|
-
"""Get WebSocket connection for a workflow run"""
|
|
129
|
-
return self.active_connections.get(run_id)
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
# Global manager instance
|
|
133
|
-
websocket_manager = WebSocketManager(
|
|
134
|
-
active_connections={},
|
|
135
|
-
)
|
|
136
|
-
|
|
137
|
-
|
|
138
51
|
async def handle_workflow_via_websocket(websocket: WebSocket, message: dict, os: "AgentOS"):
|
|
139
52
|
"""Handle workflow execution directly via WebSocket"""
|
|
140
53
|
try:
|
|
@@ -153,6 +66,12 @@ async def handle_workflow_via_websocket(websocket: WebSocket, message: dict, os:
|
|
|
153
66
|
await websocket.send_text(json.dumps({"event": "error", "error": f"Workflow {workflow_id} not found"}))
|
|
154
67
|
return
|
|
155
68
|
|
|
69
|
+
if isinstance(workflow, RemoteWorkflow):
|
|
70
|
+
await websocket.send_text(
|
|
71
|
+
json.dumps({"event": "error", "error": "Remote workflows are not supported via WebSocket"})
|
|
72
|
+
)
|
|
73
|
+
return
|
|
74
|
+
|
|
156
75
|
# Generate session_id if not provided
|
|
157
76
|
# Use workflow's default session_id if not provided in message
|
|
158
77
|
if not session_id:
|
|
@@ -162,7 +81,7 @@ async def handle_workflow_via_websocket(websocket: WebSocket, message: dict, os:
|
|
|
162
81
|
session_id = str(uuid4())
|
|
163
82
|
|
|
164
83
|
# Execute workflow in background with streaming
|
|
165
|
-
|
|
84
|
+
await workflow.arun( # type: ignore
|
|
166
85
|
input=user_message,
|
|
167
86
|
session_id=session_id,
|
|
168
87
|
user_id=user_id,
|
|
@@ -172,9 +91,9 @@ async def handle_workflow_via_websocket(websocket: WebSocket, message: dict, os:
|
|
|
172
91
|
websocket=websocket,
|
|
173
92
|
)
|
|
174
93
|
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
94
|
+
# NOTE: Don't register the original websocket in the manager
|
|
95
|
+
# It's already handled by the WebSocketHandler passed to the workflow
|
|
96
|
+
# The manager is ONLY for reconnected clients (see handle_workflow_subscription)
|
|
178
97
|
|
|
179
98
|
except (InputCheckError, OutputCheckError) as e:
|
|
180
99
|
await websocket.send_text(
|
|
@@ -200,12 +119,175 @@ async def handle_workflow_via_websocket(websocket: WebSocket, message: dict, os:
|
|
|
200
119
|
await websocket.send_text(json.dumps(error_payload))
|
|
201
120
|
|
|
202
121
|
|
|
122
|
+
async def handle_workflow_subscription(websocket: WebSocket, message: dict, os: "AgentOS"):
|
|
123
|
+
"""
|
|
124
|
+
Handle subscription/reconnection to an existing workflow run.
|
|
125
|
+
|
|
126
|
+
Allows clients to reconnect after page refresh or disconnection and catch up on missed events.
|
|
127
|
+
"""
|
|
128
|
+
try:
|
|
129
|
+
run_id = message.get("run_id")
|
|
130
|
+
workflow_id = message.get("workflow_id")
|
|
131
|
+
session_id = message.get("session_id")
|
|
132
|
+
last_event_index = message.get("last_event_index") # 0-based index of last received event
|
|
133
|
+
|
|
134
|
+
if not run_id:
|
|
135
|
+
await websocket.send_text(json.dumps({"event": "error", "error": "run_id is required for subscription"}))
|
|
136
|
+
return
|
|
137
|
+
|
|
138
|
+
# Check if run exists in event buffer
|
|
139
|
+
buffer_status = event_buffer.get_run_status(run_id)
|
|
140
|
+
|
|
141
|
+
if buffer_status is None:
|
|
142
|
+
# Run not in buffer - check database
|
|
143
|
+
if workflow_id and session_id:
|
|
144
|
+
workflow = get_workflow_by_id(workflow_id, os.workflows)
|
|
145
|
+
if workflow and isinstance(workflow, Workflow):
|
|
146
|
+
workflow_run = await workflow.aget_run_output(run_id, session_id)
|
|
147
|
+
|
|
148
|
+
if workflow_run:
|
|
149
|
+
# Run exists in DB - send all events from DB
|
|
150
|
+
if workflow_run.events:
|
|
151
|
+
await websocket.send_text(
|
|
152
|
+
json.dumps(
|
|
153
|
+
{
|
|
154
|
+
"event": "replay",
|
|
155
|
+
"run_id": run_id,
|
|
156
|
+
"status": workflow_run.status.value if workflow_run.status else "unknown",
|
|
157
|
+
"total_events": len(workflow_run.events),
|
|
158
|
+
"message": "Run completed. Replaying all events from database.",
|
|
159
|
+
}
|
|
160
|
+
)
|
|
161
|
+
)
|
|
162
|
+
|
|
163
|
+
# Send events one by one
|
|
164
|
+
for idx, event in enumerate(workflow_run.events):
|
|
165
|
+
# Convert event to dict and add event_index
|
|
166
|
+
event_dict = event.model_dump() if hasattr(event, "model_dump") else event.to_dict()
|
|
167
|
+
event_dict["event_index"] = idx
|
|
168
|
+
if "run_id" not in event_dict:
|
|
169
|
+
event_dict["run_id"] = run_id
|
|
170
|
+
|
|
171
|
+
await websocket.send_text(json.dumps(event_dict, default=json_serializer))
|
|
172
|
+
else:
|
|
173
|
+
await websocket.send_text(
|
|
174
|
+
json.dumps(
|
|
175
|
+
{
|
|
176
|
+
"event": "replay",
|
|
177
|
+
"run_id": run_id,
|
|
178
|
+
"status": workflow_run.status.value if workflow_run.status else "unknown",
|
|
179
|
+
"total_events": 0,
|
|
180
|
+
"message": "Run completed but no events stored.",
|
|
181
|
+
}
|
|
182
|
+
)
|
|
183
|
+
)
|
|
184
|
+
return
|
|
185
|
+
|
|
186
|
+
# Run not found anywhere
|
|
187
|
+
await websocket.send_text(
|
|
188
|
+
json.dumps({"event": "error", "error": f"Run {run_id} not found in buffer or database"})
|
|
189
|
+
)
|
|
190
|
+
return
|
|
191
|
+
|
|
192
|
+
# Run is in buffer (still active or recently completed)
|
|
193
|
+
if buffer_status in [RunStatus.completed, RunStatus.error, RunStatus.cancelled]:
|
|
194
|
+
# Run finished - send all events from buffer
|
|
195
|
+
all_events = event_buffer.get_events(run_id, last_event_index=None)
|
|
196
|
+
|
|
197
|
+
await websocket.send_text(
|
|
198
|
+
json.dumps(
|
|
199
|
+
{
|
|
200
|
+
"event": "replay",
|
|
201
|
+
"run_id": run_id,
|
|
202
|
+
"status": buffer_status.value,
|
|
203
|
+
"total_events": len(all_events),
|
|
204
|
+
"message": f"Run {buffer_status.value}. Replaying all events.",
|
|
205
|
+
}
|
|
206
|
+
)
|
|
207
|
+
)
|
|
208
|
+
|
|
209
|
+
# Send all events
|
|
210
|
+
for idx, buffered_event in enumerate(all_events):
|
|
211
|
+
# Convert event to dict and add event_index
|
|
212
|
+
event_dict = (
|
|
213
|
+
buffered_event.model_dump() if hasattr(buffered_event, "model_dump") else buffered_event.to_dict()
|
|
214
|
+
)
|
|
215
|
+
event_dict["event_index"] = idx
|
|
216
|
+
if "run_id" not in event_dict:
|
|
217
|
+
event_dict["run_id"] = run_id
|
|
218
|
+
|
|
219
|
+
await websocket.send_text(json.dumps(event_dict))
|
|
220
|
+
return
|
|
221
|
+
|
|
222
|
+
# Run is still active - send missed events and subscribe to new ones
|
|
223
|
+
missed_events = event_buffer.get_events(run_id, last_event_index)
|
|
224
|
+
current_event_count = event_buffer.get_event_count(run_id)
|
|
225
|
+
|
|
226
|
+
if missed_events:
|
|
227
|
+
# Send catch-up notification
|
|
228
|
+
await websocket.send_text(
|
|
229
|
+
json.dumps(
|
|
230
|
+
{
|
|
231
|
+
"event": "catch_up",
|
|
232
|
+
"run_id": run_id,
|
|
233
|
+
"status": "running",
|
|
234
|
+
"missed_events": len(missed_events),
|
|
235
|
+
"current_event_count": current_event_count,
|
|
236
|
+
"message": f"Catching up on {len(missed_events)} missed events.",
|
|
237
|
+
}
|
|
238
|
+
)
|
|
239
|
+
)
|
|
240
|
+
|
|
241
|
+
# Send missed events
|
|
242
|
+
start_index = (last_event_index + 1) if last_event_index is not None else 0
|
|
243
|
+
for idx, buffered_event in enumerate(missed_events):
|
|
244
|
+
# Convert event to dict and add event_index
|
|
245
|
+
event_dict = (
|
|
246
|
+
buffered_event.model_dump() if hasattr(buffered_event, "model_dump") else buffered_event.to_dict()
|
|
247
|
+
)
|
|
248
|
+
event_dict["event_index"] = start_index + idx
|
|
249
|
+
if "run_id" not in event_dict:
|
|
250
|
+
event_dict["run_id"] = run_id
|
|
251
|
+
|
|
252
|
+
await websocket.send_text(json.dumps(event_dict))
|
|
253
|
+
|
|
254
|
+
# Register websocket for future events
|
|
255
|
+
await websocket_manager.register_websocket(run_id, websocket)
|
|
256
|
+
|
|
257
|
+
# Send subscription confirmation
|
|
258
|
+
await websocket.send_text(
|
|
259
|
+
json.dumps(
|
|
260
|
+
{
|
|
261
|
+
"event": "subscribed",
|
|
262
|
+
"run_id": run_id,
|
|
263
|
+
"status": "running",
|
|
264
|
+
"current_event_count": current_event_count,
|
|
265
|
+
"message": "Subscribed to workflow run. You will receive new events as they occur.",
|
|
266
|
+
}
|
|
267
|
+
)
|
|
268
|
+
)
|
|
269
|
+
|
|
270
|
+
log_debug(f"Client subscribed to workflow run {run_id} (last_event_index: {last_event_index})")
|
|
271
|
+
|
|
272
|
+
except Exception as e:
|
|
273
|
+
logger.error(f"Error handling workflow subscription: {e}")
|
|
274
|
+
await websocket.send_text(
|
|
275
|
+
json.dumps(
|
|
276
|
+
{
|
|
277
|
+
"event": "error",
|
|
278
|
+
"error": f"Subscription failed: {str(e)}",
|
|
279
|
+
}
|
|
280
|
+
)
|
|
281
|
+
)
|
|
282
|
+
|
|
283
|
+
|
|
203
284
|
async def workflow_response_streamer(
|
|
204
|
-
workflow: Workflow,
|
|
205
|
-
input:
|
|
285
|
+
workflow: Union[Workflow, RemoteWorkflow],
|
|
286
|
+
input: Union[str, Dict[str, Any], List[Any], BaseModel],
|
|
206
287
|
session_id: Optional[str] = None,
|
|
207
288
|
user_id: Optional[str] = None,
|
|
208
289
|
background_tasks: Optional[BackgroundTasks] = None,
|
|
290
|
+
auth_token: Optional[str] = None,
|
|
209
291
|
**kwargs: Any,
|
|
210
292
|
) -> AsyncGenerator:
|
|
211
293
|
try:
|
|
@@ -213,12 +295,21 @@ async def workflow_response_streamer(
|
|
|
213
295
|
if background_tasks is not None:
|
|
214
296
|
kwargs["background_tasks"] = background_tasks
|
|
215
297
|
|
|
216
|
-
|
|
298
|
+
if "stream_events" in kwargs:
|
|
299
|
+
stream_events = kwargs.pop("stream_events")
|
|
300
|
+
else:
|
|
301
|
+
stream_events = True
|
|
302
|
+
|
|
303
|
+
# Pass auth_token for remote workflows
|
|
304
|
+
if auth_token and isinstance(workflow, RemoteWorkflow):
|
|
305
|
+
kwargs["auth_token"] = auth_token
|
|
306
|
+
|
|
307
|
+
run_response = workflow.arun( # type: ignore
|
|
217
308
|
input=input,
|
|
218
309
|
session_id=session_id,
|
|
219
310
|
user_id=user_id,
|
|
220
311
|
stream=True,
|
|
221
|
-
stream_events=
|
|
312
|
+
stream_events=stream_events,
|
|
222
313
|
**kwargs,
|
|
223
314
|
)
|
|
224
315
|
|
|
@@ -368,6 +459,10 @@ def get_websocket_router(
|
|
|
368
459
|
# Handle workflow execution directly via WebSocket
|
|
369
460
|
await handle_workflow_via_websocket(websocket, message, os)
|
|
370
461
|
|
|
462
|
+
elif action == "reconnect":
|
|
463
|
+
# Subscribe/reconnect to an existing workflow run
|
|
464
|
+
await handle_workflow_subscription(websocket, message, os)
|
|
465
|
+
|
|
371
466
|
else:
|
|
372
467
|
await websocket.send_text(json.dumps({"event": "error", "error": f"Unknown action: {action}"}))
|
|
373
468
|
|
|
@@ -479,8 +574,10 @@ def get_workflow_router(
|
|
|
479
574
|
workflow = get_workflow_by_id(workflow_id, os.workflows)
|
|
480
575
|
if workflow is None:
|
|
481
576
|
raise HTTPException(status_code=404, detail="Workflow not found")
|
|
482
|
-
|
|
483
|
-
|
|
577
|
+
if isinstance(workflow, RemoteWorkflow):
|
|
578
|
+
return await workflow.get_workflow_config()
|
|
579
|
+
else:
|
|
580
|
+
return await WorkflowResponse.from_workflow(workflow=workflow)
|
|
484
581
|
|
|
485
582
|
@router.post(
|
|
486
583
|
"/workflows/{workflow_id}/runs",
|
|
@@ -528,25 +625,25 @@ def get_workflow_router(
|
|
|
528
625
|
):
|
|
529
626
|
kwargs = await get_request_kwargs(request, create_workflow_run)
|
|
530
627
|
|
|
531
|
-
if hasattr(request.state, "user_id"):
|
|
628
|
+
if hasattr(request.state, "user_id") and request.state.user_id is not None:
|
|
532
629
|
if user_id:
|
|
533
630
|
log_warning("User ID parameter passed in both request state and kwargs, using request state")
|
|
534
631
|
user_id = request.state.user_id
|
|
535
|
-
if hasattr(request.state, "session_id"):
|
|
632
|
+
if hasattr(request.state, "session_id") and request.state.session_id is not None:
|
|
536
633
|
if session_id:
|
|
537
634
|
log_warning("Session ID parameter passed in both request state and kwargs, using request state")
|
|
538
635
|
session_id = request.state.session_id
|
|
539
|
-
if hasattr(request.state, "session_state"):
|
|
636
|
+
if hasattr(request.state, "session_state") and request.state.session_state is not None:
|
|
540
637
|
session_state = request.state.session_state
|
|
541
638
|
if "session_state" in kwargs:
|
|
542
639
|
log_warning("Session state parameter passed in both request state and kwargs, using request state")
|
|
543
640
|
kwargs["session_state"] = session_state
|
|
544
|
-
if hasattr(request.state, "dependencies"):
|
|
641
|
+
if hasattr(request.state, "dependencies") and request.state.dependencies is not None:
|
|
545
642
|
dependencies = request.state.dependencies
|
|
546
643
|
if "dependencies" in kwargs:
|
|
547
644
|
log_warning("Dependencies parameter passed in both request state and kwargs, using request state")
|
|
548
645
|
kwargs["dependencies"] = dependencies
|
|
549
|
-
if hasattr(request.state, "metadata"):
|
|
646
|
+
if hasattr(request.state, "metadata") and request.state.metadata is not None:
|
|
550
647
|
metadata = request.state.metadata
|
|
551
648
|
if "metadata" in kwargs:
|
|
552
649
|
log_warning("Metadata parameter passed in both request state and kwargs, using request state")
|
|
@@ -563,6 +660,9 @@ def get_workflow_router(
|
|
|
563
660
|
logger.debug("Creating new session")
|
|
564
661
|
session_id = str(uuid4())
|
|
565
662
|
|
|
663
|
+
# Extract auth token for remote workflows
|
|
664
|
+
auth_token = get_auth_token_from_request(request)
|
|
665
|
+
|
|
566
666
|
# Return based on stream parameter
|
|
567
667
|
try:
|
|
568
668
|
if stream:
|
|
@@ -573,11 +673,16 @@ def get_workflow_router(
|
|
|
573
673
|
session_id=session_id,
|
|
574
674
|
user_id=user_id,
|
|
575
675
|
background_tasks=background_tasks,
|
|
676
|
+
auth_token=auth_token,
|
|
576
677
|
**kwargs,
|
|
577
678
|
),
|
|
578
679
|
media_type="text/event-stream",
|
|
579
680
|
)
|
|
580
681
|
else:
|
|
682
|
+
# Pass auth_token for remote workflows
|
|
683
|
+
if auth_token and isinstance(workflow, RemoteWorkflow):
|
|
684
|
+
kwargs["auth_token"] = auth_token
|
|
685
|
+
|
|
581
686
|
run_response = await workflow.arun(
|
|
582
687
|
input=message,
|
|
583
688
|
session_id=session_id,
|
|
@@ -616,8 +721,9 @@ def get_workflow_router(
|
|
|
616
721
|
if workflow is None:
|
|
617
722
|
raise HTTPException(status_code=404, detail="Workflow not found")
|
|
618
723
|
|
|
619
|
-
|
|
620
|
-
|
|
724
|
+
cancelled = workflow.cancel_run(run_id=run_id)
|
|
725
|
+
if not cancelled:
|
|
726
|
+
raise HTTPException(status_code=500, detail="Failed to cancel run - run not found or already completed")
|
|
621
727
|
|
|
622
728
|
return JSONResponse(content={}, status_code=200)
|
|
623
729
|
|
|
@@ -4,11 +4,75 @@ from pydantic import BaseModel, Field
|
|
|
4
4
|
|
|
5
5
|
from agno.os.routers.agents.schema import AgentResponse
|
|
6
6
|
from agno.os.routers.teams.schema import TeamResponse
|
|
7
|
-
from agno.os.utils import get_workflow_input_schema_dict
|
|
8
7
|
from agno.workflow.agent import WorkflowAgent
|
|
9
8
|
from agno.workflow.workflow import Workflow
|
|
10
9
|
|
|
11
10
|
|
|
11
|
+
def _generate_schema_from_params(params: Dict[str, Any]) -> Dict[str, Any]:
|
|
12
|
+
"""Convert function parameters to JSON schema"""
|
|
13
|
+
properties: Dict[str, Any] = {}
|
|
14
|
+
required: List[str] = []
|
|
15
|
+
|
|
16
|
+
for param_name, param_info in params.items():
|
|
17
|
+
# Skip the default 'message' parameter for custom kwargs workflows
|
|
18
|
+
if param_name == "message":
|
|
19
|
+
continue
|
|
20
|
+
|
|
21
|
+
# Map Python types to JSON schema types
|
|
22
|
+
param_type = param_info.get("annotation", "str")
|
|
23
|
+
default_value = param_info.get("default")
|
|
24
|
+
is_required = param_info.get("required", False)
|
|
25
|
+
|
|
26
|
+
# Convert Python type annotations to JSON schema types
|
|
27
|
+
if param_type == "str":
|
|
28
|
+
properties[param_name] = {"type": "string"}
|
|
29
|
+
elif param_type == "bool":
|
|
30
|
+
properties[param_name] = {"type": "boolean"}
|
|
31
|
+
elif param_type == "int":
|
|
32
|
+
properties[param_name] = {"type": "integer"}
|
|
33
|
+
elif param_type == "float":
|
|
34
|
+
properties[param_name] = {"type": "number"}
|
|
35
|
+
elif "List" in str(param_type):
|
|
36
|
+
properties[param_name] = {"type": "array", "items": {"type": "string"}}
|
|
37
|
+
else:
|
|
38
|
+
properties[param_name] = {"type": "string"} # fallback
|
|
39
|
+
|
|
40
|
+
# Add default value if present
|
|
41
|
+
if default_value is not None:
|
|
42
|
+
properties[param_name]["default"] = default_value
|
|
43
|
+
|
|
44
|
+
# Add to required if no default value
|
|
45
|
+
if is_required and default_value is None:
|
|
46
|
+
required.append(param_name)
|
|
47
|
+
|
|
48
|
+
schema = {"type": "object", "properties": properties}
|
|
49
|
+
|
|
50
|
+
if required:
|
|
51
|
+
schema["required"] = required
|
|
52
|
+
|
|
53
|
+
return schema
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
def get_workflow_input_schema_dict(workflow: Workflow) -> Optional[Dict[str, Any]]:
|
|
57
|
+
"""Get input schema as dictionary for API responses"""
|
|
58
|
+
|
|
59
|
+
# Priority 1: Explicit input_schema (Pydantic model)
|
|
60
|
+
if workflow.input_schema is not None:
|
|
61
|
+
try:
|
|
62
|
+
return workflow.input_schema.model_json_schema()
|
|
63
|
+
except Exception:
|
|
64
|
+
return None
|
|
65
|
+
|
|
66
|
+
# Priority 2: Auto-generate from custom kwargs
|
|
67
|
+
if workflow.steps and callable(workflow.steps):
|
|
68
|
+
custom_params = workflow.run_parameters
|
|
69
|
+
if custom_params and len(custom_params) > 1: # More than just 'message'
|
|
70
|
+
return _generate_schema_from_params(custom_params)
|
|
71
|
+
|
|
72
|
+
# Priority 3: No schema (expects string message)
|
|
73
|
+
return None
|
|
74
|
+
|
|
75
|
+
|
|
12
76
|
class WorkflowResponse(BaseModel):
|
|
13
77
|
id: Optional[str] = Field(None, description="Unique identifier for the workflow")
|
|
14
78
|
name: Optional[str] = Field(None, description="Name of the workflow")
|
agno/os/schema.py
CHANGED
|
@@ -5,6 +5,7 @@ from typing import Any, Dict, Generic, List, Optional, TypeVar, Union
|
|
|
5
5
|
from pydantic import BaseModel, ConfigDict, Field
|
|
6
6
|
|
|
7
7
|
from agno.agent import Agent
|
|
8
|
+
from agno.agent.remote import RemoteAgent
|
|
8
9
|
from agno.db.base import SessionType
|
|
9
10
|
from agno.os.config import (
|
|
10
11
|
ChatConfig,
|
|
@@ -21,7 +22,9 @@ from agno.os.utils import (
|
|
|
21
22
|
get_session_name,
|
|
22
23
|
)
|
|
23
24
|
from agno.session import AgentSession, TeamSession, WorkflowSession
|
|
25
|
+
from agno.team.remote import RemoteTeam
|
|
24
26
|
from agno.team.team import Team
|
|
27
|
+
from agno.workflow.remote import RemoteWorkflow
|
|
25
28
|
from agno.workflow.workflow import Workflow
|
|
26
29
|
|
|
27
30
|
|
|
@@ -102,7 +105,7 @@ class AgentSummaryResponse(BaseModel):
|
|
|
102
105
|
db_id: Optional[str] = Field(None, description="Database identifier")
|
|
103
106
|
|
|
104
107
|
@classmethod
|
|
105
|
-
def from_agent(cls, agent: Agent) -> "AgentSummaryResponse":
|
|
108
|
+
def from_agent(cls, agent: Union[Agent, RemoteAgent]) -> "AgentSummaryResponse":
|
|
106
109
|
return cls(id=agent.id, name=agent.name, description=agent.description, db_id=agent.db.id if agent.db else None)
|
|
107
110
|
|
|
108
111
|
|
|
@@ -113,8 +116,9 @@ class TeamSummaryResponse(BaseModel):
|
|
|
113
116
|
db_id: Optional[str] = Field(None, description="Database identifier")
|
|
114
117
|
|
|
115
118
|
@classmethod
|
|
116
|
-
def from_team(cls, team: Team) -> "TeamSummaryResponse":
|
|
117
|
-
|
|
119
|
+
def from_team(cls, team: Union[Team, RemoteTeam]) -> "TeamSummaryResponse":
|
|
120
|
+
db_id = team.db.id if team.db else None
|
|
121
|
+
return cls(id=team.id, name=team.name, description=team.description, db_id=db_id)
|
|
118
122
|
|
|
119
123
|
|
|
120
124
|
class WorkflowSummaryResponse(BaseModel):
|
|
@@ -124,12 +128,13 @@ class WorkflowSummaryResponse(BaseModel):
|
|
|
124
128
|
db_id: Optional[str] = Field(None, description="Database identifier")
|
|
125
129
|
|
|
126
130
|
@classmethod
|
|
127
|
-
def from_workflow(cls, workflow: Workflow) -> "WorkflowSummaryResponse":
|
|
131
|
+
def from_workflow(cls, workflow: Union[Workflow, RemoteWorkflow]) -> "WorkflowSummaryResponse":
|
|
132
|
+
db_id = workflow.db.id if workflow.db else None
|
|
128
133
|
return cls(
|
|
129
134
|
id=workflow.id,
|
|
130
135
|
name=workflow.name,
|
|
131
136
|
description=workflow.description,
|
|
132
|
-
db_id=
|
|
137
|
+
db_id=db_id,
|
|
133
138
|
)
|
|
134
139
|
|
|
135
140
|
|
|
@@ -182,18 +187,39 @@ class SessionSchema(BaseModel):
|
|
|
182
187
|
|
|
183
188
|
@classmethod
|
|
184
189
|
def from_dict(cls, session: Dict[str, Any]) -> "SessionSchema":
|
|
185
|
-
session_name =
|
|
190
|
+
session_name = session.get("session_name")
|
|
191
|
+
if not session_name:
|
|
192
|
+
session_name = get_session_name(session)
|
|
186
193
|
session_data = session.get("session_data", {}) or {}
|
|
194
|
+
|
|
195
|
+
created_at = session.get("created_at", 0)
|
|
196
|
+
updated_at = session.get("updated_at", 0)
|
|
197
|
+
|
|
198
|
+
# Handle created_at and updated_at as either ISO 8601 string or timestamp
|
|
199
|
+
def parse_datetime(val):
|
|
200
|
+
if isinstance(val, str):
|
|
201
|
+
try:
|
|
202
|
+
# Accept both with and without Z
|
|
203
|
+
if val.endswith("Z"):
|
|
204
|
+
val = val[:-1] + "+00:00"
|
|
205
|
+
return datetime.fromisoformat(val)
|
|
206
|
+
except Exception:
|
|
207
|
+
return None
|
|
208
|
+
elif isinstance(val, (int, float)):
|
|
209
|
+
try:
|
|
210
|
+
return datetime.fromtimestamp(val, tz=timezone.utc)
|
|
211
|
+
except Exception:
|
|
212
|
+
return None
|
|
213
|
+
return None
|
|
214
|
+
|
|
215
|
+
created_at = parse_datetime(session.get("created_at", 0))
|
|
216
|
+
updated_at = parse_datetime(session.get("updated_at", 0))
|
|
187
217
|
return cls(
|
|
188
218
|
session_id=session.get("session_id", ""),
|
|
189
219
|
session_name=session_name,
|
|
190
220
|
session_state=session_data.get("session_state", None),
|
|
191
|
-
created_at=
|
|
192
|
-
|
|
193
|
-
else None,
|
|
194
|
-
updated_at=datetime.fromtimestamp(session.get("updated_at", 0), tz=timezone.utc)
|
|
195
|
-
if session.get("updated_at")
|
|
196
|
-
else None,
|
|
221
|
+
created_at=created_at,
|
|
222
|
+
updated_at=updated_at,
|
|
197
223
|
)
|
|
198
224
|
|
|
199
225
|
|