agno 2.2.13__py3-none-any.whl → 2.4.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agno/agent/__init__.py +6 -0
- agno/agent/agent.py +5252 -3145
- agno/agent/remote.py +525 -0
- agno/api/api.py +2 -0
- agno/client/__init__.py +3 -0
- agno/client/a2a/__init__.py +10 -0
- agno/client/a2a/client.py +554 -0
- agno/client/a2a/schemas.py +112 -0
- agno/client/a2a/utils.py +369 -0
- agno/client/os.py +2669 -0
- agno/compression/__init__.py +3 -0
- agno/compression/manager.py +247 -0
- agno/culture/manager.py +2 -2
- agno/db/base.py +927 -6
- agno/db/dynamo/dynamo.py +788 -2
- agno/db/dynamo/schemas.py +128 -0
- agno/db/dynamo/utils.py +26 -3
- agno/db/firestore/firestore.py +674 -50
- agno/db/firestore/schemas.py +41 -0
- agno/db/firestore/utils.py +25 -10
- agno/db/gcs_json/gcs_json_db.py +506 -3
- agno/db/gcs_json/utils.py +14 -2
- agno/db/in_memory/in_memory_db.py +203 -4
- agno/db/in_memory/utils.py +14 -2
- agno/db/json/json_db.py +498 -2
- agno/db/json/utils.py +14 -2
- agno/db/migrations/manager.py +199 -0
- agno/db/migrations/utils.py +19 -0
- agno/db/migrations/v1_to_v2.py +54 -16
- agno/db/migrations/versions/__init__.py +0 -0
- agno/db/migrations/versions/v2_3_0.py +977 -0
- agno/db/mongo/async_mongo.py +1013 -39
- agno/db/mongo/mongo.py +684 -4
- agno/db/mongo/schemas.py +48 -0
- agno/db/mongo/utils.py +17 -0
- agno/db/mysql/__init__.py +2 -1
- agno/db/mysql/async_mysql.py +2958 -0
- agno/db/mysql/mysql.py +722 -53
- agno/db/mysql/schemas.py +77 -11
- agno/db/mysql/utils.py +151 -8
- agno/db/postgres/async_postgres.py +1254 -137
- agno/db/postgres/postgres.py +2316 -93
- agno/db/postgres/schemas.py +153 -21
- agno/db/postgres/utils.py +22 -7
- agno/db/redis/redis.py +531 -3
- agno/db/redis/schemas.py +36 -0
- agno/db/redis/utils.py +31 -15
- agno/db/schemas/evals.py +1 -0
- agno/db/schemas/memory.py +20 -9
- agno/db/singlestore/schemas.py +70 -1
- agno/db/singlestore/singlestore.py +737 -74
- agno/db/singlestore/utils.py +13 -3
- agno/db/sqlite/async_sqlite.py +1069 -89
- agno/db/sqlite/schemas.py +133 -1
- agno/db/sqlite/sqlite.py +2203 -165
- agno/db/sqlite/utils.py +21 -11
- agno/db/surrealdb/models.py +25 -0
- agno/db/surrealdb/surrealdb.py +603 -1
- agno/db/utils.py +60 -0
- agno/eval/__init__.py +26 -3
- agno/eval/accuracy.py +25 -12
- agno/eval/agent_as_judge.py +871 -0
- agno/eval/base.py +29 -0
- agno/eval/performance.py +10 -4
- agno/eval/reliability.py +22 -13
- agno/eval/utils.py +2 -1
- agno/exceptions.py +42 -0
- agno/hooks/__init__.py +3 -0
- agno/hooks/decorator.py +164 -0
- agno/integrations/discord/client.py +13 -2
- agno/knowledge/__init__.py +4 -0
- agno/knowledge/chunking/code.py +90 -0
- agno/knowledge/chunking/document.py +65 -4
- agno/knowledge/chunking/fixed.py +4 -1
- agno/knowledge/chunking/markdown.py +102 -11
- agno/knowledge/chunking/recursive.py +2 -2
- agno/knowledge/chunking/semantic.py +130 -48
- agno/knowledge/chunking/strategy.py +18 -0
- agno/knowledge/embedder/azure_openai.py +0 -1
- agno/knowledge/embedder/google.py +1 -1
- agno/knowledge/embedder/mistral.py +1 -1
- agno/knowledge/embedder/nebius.py +1 -1
- agno/knowledge/embedder/openai.py +16 -12
- agno/knowledge/filesystem.py +412 -0
- agno/knowledge/knowledge.py +4261 -1199
- agno/knowledge/protocol.py +134 -0
- agno/knowledge/reader/arxiv_reader.py +3 -2
- agno/knowledge/reader/base.py +9 -7
- agno/knowledge/reader/csv_reader.py +91 -42
- agno/knowledge/reader/docx_reader.py +9 -10
- agno/knowledge/reader/excel_reader.py +225 -0
- agno/knowledge/reader/field_labeled_csv_reader.py +38 -48
- agno/knowledge/reader/firecrawl_reader.py +3 -2
- agno/knowledge/reader/json_reader.py +16 -22
- agno/knowledge/reader/markdown_reader.py +15 -14
- agno/knowledge/reader/pdf_reader.py +33 -28
- agno/knowledge/reader/pptx_reader.py +9 -10
- agno/knowledge/reader/reader_factory.py +135 -1
- agno/knowledge/reader/s3_reader.py +8 -16
- agno/knowledge/reader/tavily_reader.py +3 -3
- agno/knowledge/reader/text_reader.py +15 -14
- agno/knowledge/reader/utils/__init__.py +17 -0
- agno/knowledge/reader/utils/spreadsheet.py +114 -0
- agno/knowledge/reader/web_search_reader.py +8 -65
- agno/knowledge/reader/website_reader.py +16 -13
- agno/knowledge/reader/wikipedia_reader.py +36 -3
- agno/knowledge/reader/youtube_reader.py +3 -2
- agno/knowledge/remote_content/__init__.py +33 -0
- agno/knowledge/remote_content/config.py +266 -0
- agno/knowledge/remote_content/remote_content.py +105 -17
- agno/knowledge/utils.py +76 -22
- agno/learn/__init__.py +71 -0
- agno/learn/config.py +463 -0
- agno/learn/curate.py +185 -0
- agno/learn/machine.py +725 -0
- agno/learn/schemas.py +1114 -0
- agno/learn/stores/__init__.py +38 -0
- agno/learn/stores/decision_log.py +1156 -0
- agno/learn/stores/entity_memory.py +3275 -0
- agno/learn/stores/learned_knowledge.py +1583 -0
- agno/learn/stores/protocol.py +117 -0
- agno/learn/stores/session_context.py +1217 -0
- agno/learn/stores/user_memory.py +1495 -0
- agno/learn/stores/user_profile.py +1220 -0
- agno/learn/utils.py +209 -0
- agno/media.py +22 -6
- agno/memory/__init__.py +14 -1
- agno/memory/manager.py +223 -8
- agno/memory/strategies/__init__.py +15 -0
- agno/memory/strategies/base.py +66 -0
- agno/memory/strategies/summarize.py +196 -0
- agno/memory/strategies/types.py +37 -0
- agno/models/aimlapi/aimlapi.py +17 -0
- agno/models/anthropic/claude.py +434 -59
- agno/models/aws/bedrock.py +121 -20
- agno/models/aws/claude.py +131 -274
- agno/models/azure/ai_foundry.py +10 -6
- agno/models/azure/openai_chat.py +33 -10
- agno/models/base.py +1162 -561
- agno/models/cerebras/cerebras.py +120 -24
- agno/models/cerebras/cerebras_openai.py +21 -2
- agno/models/cohere/chat.py +65 -6
- agno/models/cometapi/cometapi.py +18 -1
- agno/models/dashscope/dashscope.py +2 -3
- agno/models/deepinfra/deepinfra.py +18 -1
- agno/models/deepseek/deepseek.py +69 -3
- agno/models/fireworks/fireworks.py +18 -1
- agno/models/google/gemini.py +959 -89
- agno/models/google/utils.py +22 -0
- agno/models/groq/groq.py +48 -18
- agno/models/huggingface/huggingface.py +17 -6
- agno/models/ibm/watsonx.py +16 -6
- agno/models/internlm/internlm.py +18 -1
- agno/models/langdb/langdb.py +13 -1
- agno/models/litellm/chat.py +88 -9
- agno/models/litellm/litellm_openai.py +18 -1
- agno/models/message.py +24 -5
- agno/models/meta/llama.py +40 -13
- agno/models/meta/llama_openai.py +22 -21
- agno/models/metrics.py +12 -0
- agno/models/mistral/mistral.py +8 -4
- agno/models/n1n/__init__.py +3 -0
- agno/models/n1n/n1n.py +57 -0
- agno/models/nebius/nebius.py +6 -7
- agno/models/nvidia/nvidia.py +20 -3
- agno/models/ollama/__init__.py +2 -0
- agno/models/ollama/chat.py +17 -6
- agno/models/ollama/responses.py +100 -0
- agno/models/openai/__init__.py +2 -0
- agno/models/openai/chat.py +117 -26
- agno/models/openai/open_responses.py +46 -0
- agno/models/openai/responses.py +110 -32
- agno/models/openrouter/__init__.py +2 -0
- agno/models/openrouter/openrouter.py +67 -2
- agno/models/openrouter/responses.py +146 -0
- agno/models/perplexity/perplexity.py +19 -1
- agno/models/portkey/portkey.py +7 -6
- agno/models/requesty/requesty.py +19 -2
- agno/models/response.py +20 -2
- agno/models/sambanova/sambanova.py +20 -3
- agno/models/siliconflow/siliconflow.py +19 -2
- agno/models/together/together.py +20 -3
- agno/models/vercel/v0.py +20 -3
- agno/models/vertexai/claude.py +124 -4
- agno/models/vllm/vllm.py +19 -14
- agno/models/xai/xai.py +19 -2
- agno/os/app.py +467 -137
- agno/os/auth.py +253 -5
- agno/os/config.py +22 -0
- agno/os/interfaces/a2a/a2a.py +7 -6
- agno/os/interfaces/a2a/router.py +635 -26
- agno/os/interfaces/a2a/utils.py +32 -33
- agno/os/interfaces/agui/agui.py +5 -3
- agno/os/interfaces/agui/router.py +26 -16
- agno/os/interfaces/agui/utils.py +97 -57
- agno/os/interfaces/base.py +7 -7
- agno/os/interfaces/slack/router.py +16 -7
- agno/os/interfaces/slack/slack.py +7 -7
- agno/os/interfaces/whatsapp/router.py +35 -7
- agno/os/interfaces/whatsapp/security.py +3 -1
- agno/os/interfaces/whatsapp/whatsapp.py +11 -8
- agno/os/managers.py +326 -0
- agno/os/mcp.py +652 -79
- agno/os/middleware/__init__.py +4 -0
- agno/os/middleware/jwt.py +718 -115
- agno/os/middleware/trailing_slash.py +27 -0
- agno/os/router.py +105 -1558
- agno/os/routers/agents/__init__.py +3 -0
- agno/os/routers/agents/router.py +655 -0
- agno/os/routers/agents/schema.py +288 -0
- agno/os/routers/components/__init__.py +3 -0
- agno/os/routers/components/components.py +475 -0
- agno/os/routers/database.py +155 -0
- agno/os/routers/evals/evals.py +111 -18
- agno/os/routers/evals/schemas.py +38 -5
- agno/os/routers/evals/utils.py +80 -11
- agno/os/routers/health.py +3 -3
- agno/os/routers/knowledge/knowledge.py +284 -35
- agno/os/routers/knowledge/schemas.py +14 -2
- agno/os/routers/memory/memory.py +274 -11
- agno/os/routers/memory/schemas.py +44 -3
- agno/os/routers/metrics/metrics.py +30 -15
- agno/os/routers/metrics/schemas.py +10 -6
- agno/os/routers/registry/__init__.py +3 -0
- agno/os/routers/registry/registry.py +337 -0
- agno/os/routers/session/session.py +143 -14
- agno/os/routers/teams/__init__.py +3 -0
- agno/os/routers/teams/router.py +550 -0
- agno/os/routers/teams/schema.py +280 -0
- agno/os/routers/traces/__init__.py +3 -0
- agno/os/routers/traces/schemas.py +414 -0
- agno/os/routers/traces/traces.py +549 -0
- agno/os/routers/workflows/__init__.py +3 -0
- agno/os/routers/workflows/router.py +757 -0
- agno/os/routers/workflows/schema.py +139 -0
- agno/os/schema.py +157 -584
- agno/os/scopes.py +469 -0
- agno/os/settings.py +3 -0
- agno/os/utils.py +574 -185
- agno/reasoning/anthropic.py +85 -1
- agno/reasoning/azure_ai_foundry.py +93 -1
- agno/reasoning/deepseek.py +102 -2
- agno/reasoning/default.py +6 -7
- agno/reasoning/gemini.py +87 -3
- agno/reasoning/groq.py +109 -2
- agno/reasoning/helpers.py +6 -7
- agno/reasoning/manager.py +1238 -0
- agno/reasoning/ollama.py +93 -1
- agno/reasoning/openai.py +115 -1
- agno/reasoning/vertexai.py +85 -1
- agno/registry/__init__.py +3 -0
- agno/registry/registry.py +68 -0
- agno/remote/__init__.py +3 -0
- agno/remote/base.py +581 -0
- agno/run/__init__.py +2 -4
- agno/run/agent.py +134 -19
- agno/run/base.py +49 -1
- agno/run/cancel.py +65 -52
- agno/run/cancellation_management/__init__.py +9 -0
- agno/run/cancellation_management/base.py +78 -0
- agno/run/cancellation_management/in_memory_cancellation_manager.py +100 -0
- agno/run/cancellation_management/redis_cancellation_manager.py +236 -0
- agno/run/requirement.py +181 -0
- agno/run/team.py +111 -19
- agno/run/workflow.py +2 -1
- agno/session/agent.py +57 -92
- agno/session/summary.py +1 -1
- agno/session/team.py +62 -115
- agno/session/workflow.py +353 -57
- agno/skills/__init__.py +17 -0
- agno/skills/agent_skills.py +377 -0
- agno/skills/errors.py +32 -0
- agno/skills/loaders/__init__.py +4 -0
- agno/skills/loaders/base.py +27 -0
- agno/skills/loaders/local.py +216 -0
- agno/skills/skill.py +65 -0
- agno/skills/utils.py +107 -0
- agno/skills/validator.py +277 -0
- agno/table.py +10 -0
- agno/team/__init__.py +5 -1
- agno/team/remote.py +447 -0
- agno/team/team.py +3769 -2202
- agno/tools/brandfetch.py +27 -18
- agno/tools/browserbase.py +225 -16
- agno/tools/crawl4ai.py +3 -0
- agno/tools/duckduckgo.py +25 -71
- agno/tools/exa.py +0 -21
- agno/tools/file.py +14 -13
- agno/tools/file_generation.py +12 -6
- agno/tools/firecrawl.py +15 -7
- agno/tools/function.py +94 -113
- agno/tools/google_bigquery.py +11 -2
- agno/tools/google_drive.py +4 -3
- agno/tools/knowledge.py +9 -4
- agno/tools/mcp/mcp.py +301 -18
- agno/tools/mcp/multi_mcp.py +269 -14
- agno/tools/mem0.py +11 -10
- agno/tools/memory.py +47 -46
- agno/tools/mlx_transcribe.py +10 -7
- agno/tools/models/nebius.py +5 -5
- agno/tools/models_labs.py +20 -10
- agno/tools/nano_banana.py +151 -0
- agno/tools/parallel.py +0 -7
- agno/tools/postgres.py +76 -36
- agno/tools/python.py +14 -6
- agno/tools/reasoning.py +30 -23
- agno/tools/redshift.py +406 -0
- agno/tools/shopify.py +1519 -0
- agno/tools/spotify.py +919 -0
- agno/tools/tavily.py +4 -1
- agno/tools/toolkit.py +253 -18
- agno/tools/websearch.py +93 -0
- agno/tools/website.py +1 -1
- agno/tools/wikipedia.py +1 -1
- agno/tools/workflow.py +56 -48
- agno/tools/yfinance.py +12 -11
- agno/tracing/__init__.py +12 -0
- agno/tracing/exporter.py +161 -0
- agno/tracing/schemas.py +276 -0
- agno/tracing/setup.py +112 -0
- agno/utils/agent.py +251 -10
- agno/utils/cryptography.py +22 -0
- agno/utils/dttm.py +33 -0
- agno/utils/events.py +264 -7
- agno/utils/hooks.py +111 -3
- agno/utils/http.py +161 -2
- agno/utils/mcp.py +49 -8
- agno/utils/media.py +22 -1
- agno/utils/models/ai_foundry.py +9 -2
- agno/utils/models/claude.py +20 -5
- agno/utils/models/cohere.py +9 -2
- agno/utils/models/llama.py +9 -2
- agno/utils/models/mistral.py +4 -2
- agno/utils/os.py +0 -0
- agno/utils/print_response/agent.py +99 -16
- agno/utils/print_response/team.py +223 -24
- agno/utils/print_response/workflow.py +0 -2
- agno/utils/prompts.py +8 -6
- agno/utils/remote.py +23 -0
- agno/utils/response.py +1 -13
- agno/utils/string.py +91 -2
- agno/utils/team.py +62 -12
- agno/utils/tokens.py +657 -0
- agno/vectordb/base.py +15 -2
- agno/vectordb/cassandra/cassandra.py +1 -1
- agno/vectordb/chroma/__init__.py +2 -1
- agno/vectordb/chroma/chromadb.py +468 -23
- agno/vectordb/clickhouse/clickhousedb.py +1 -1
- agno/vectordb/couchbase/couchbase.py +6 -2
- agno/vectordb/lancedb/lance_db.py +7 -38
- agno/vectordb/lightrag/lightrag.py +7 -6
- agno/vectordb/milvus/milvus.py +118 -84
- agno/vectordb/mongodb/__init__.py +2 -1
- agno/vectordb/mongodb/mongodb.py +14 -31
- agno/vectordb/pgvector/pgvector.py +120 -66
- agno/vectordb/pineconedb/pineconedb.py +2 -19
- agno/vectordb/qdrant/__init__.py +2 -1
- agno/vectordb/qdrant/qdrant.py +33 -56
- agno/vectordb/redis/__init__.py +2 -1
- agno/vectordb/redis/redisdb.py +19 -31
- agno/vectordb/singlestore/singlestore.py +17 -9
- agno/vectordb/surrealdb/surrealdb.py +2 -38
- agno/vectordb/weaviate/__init__.py +2 -1
- agno/vectordb/weaviate/weaviate.py +7 -3
- agno/workflow/__init__.py +5 -1
- agno/workflow/agent.py +2 -2
- agno/workflow/condition.py +12 -10
- agno/workflow/loop.py +28 -9
- agno/workflow/parallel.py +21 -13
- agno/workflow/remote.py +362 -0
- agno/workflow/router.py +12 -9
- agno/workflow/step.py +261 -36
- agno/workflow/steps.py +12 -8
- agno/workflow/types.py +40 -77
- agno/workflow/workflow.py +939 -213
- {agno-2.2.13.dist-info → agno-2.4.3.dist-info}/METADATA +134 -181
- agno-2.4.3.dist-info/RECORD +677 -0
- {agno-2.2.13.dist-info → agno-2.4.3.dist-info}/WHEEL +1 -1
- agno/tools/googlesearch.py +0 -98
- agno/tools/memori.py +0 -339
- agno-2.2.13.dist-info/RECORD +0 -575
- {agno-2.2.13.dist-info → agno-2.4.3.dist-info}/licenses/LICENSE +0 -0
- {agno-2.2.13.dist-info → agno-2.4.3.dist-info}/top_level.txt +0 -0
agno/workflow/workflow.py
CHANGED
|
@@ -3,6 +3,7 @@ from dataclasses import dataclass
|
|
|
3
3
|
from datetime import datetime
|
|
4
4
|
from os import getenv
|
|
5
5
|
from typing import (
|
|
6
|
+
TYPE_CHECKING,
|
|
6
7
|
Any,
|
|
7
8
|
AsyncIterator,
|
|
8
9
|
Awaitable,
|
|
@@ -23,22 +24,33 @@ from uuid import uuid4
|
|
|
23
24
|
from fastapi import WebSocket
|
|
24
25
|
from pydantic import BaseModel
|
|
25
26
|
|
|
27
|
+
if TYPE_CHECKING:
|
|
28
|
+
from agno.os.managers import WebSocketHandler
|
|
29
|
+
|
|
26
30
|
from agno.agent.agent import Agent
|
|
27
|
-
from agno.db.base import AsyncBaseDb, BaseDb, SessionType
|
|
31
|
+
from agno.db.base import AsyncBaseDb, BaseDb, ComponentType, SessionType
|
|
32
|
+
from agno.db.utils import db_from_dict
|
|
28
33
|
from agno.exceptions import InputCheckError, OutputCheckError, RunCancelledException
|
|
29
34
|
from agno.media import Audio, File, Image, Video
|
|
30
35
|
from agno.models.message import Message
|
|
31
36
|
from agno.models.metrics import Metrics
|
|
37
|
+
from agno.registry import Registry
|
|
32
38
|
from agno.run import RunContext, RunStatus
|
|
33
39
|
from agno.run.agent import RunContentEvent, RunEvent, RunOutput
|
|
34
40
|
from agno.run.cancel import (
|
|
35
|
-
|
|
41
|
+
acancel_run as acancel_run_global,
|
|
36
42
|
)
|
|
37
43
|
from agno.run.cancel import (
|
|
44
|
+
acleanup_run,
|
|
45
|
+
araise_if_cancelled,
|
|
46
|
+
aregister_run,
|
|
38
47
|
cleanup_run,
|
|
39
48
|
raise_if_cancelled,
|
|
40
49
|
register_run,
|
|
41
50
|
)
|
|
51
|
+
from agno.run.cancel import (
|
|
52
|
+
cancel_run as cancel_run_global,
|
|
53
|
+
)
|
|
42
54
|
from agno.run.team import RunContentEvent as TeamRunContentEvent
|
|
43
55
|
from agno.run.team import TeamRunEvent
|
|
44
56
|
from agno.run.workflow import (
|
|
@@ -50,9 +62,9 @@ from agno.run.workflow import (
|
|
|
50
62
|
WorkflowRunOutputEvent,
|
|
51
63
|
WorkflowStartedEvent,
|
|
52
64
|
)
|
|
53
|
-
from agno.session.workflow import WorkflowSession
|
|
65
|
+
from agno.session.workflow import WorkflowChatInteraction, WorkflowSession
|
|
54
66
|
from agno.team.team import Team
|
|
55
|
-
from agno.utils.
|
|
67
|
+
from agno.utils.agent import validate_input
|
|
56
68
|
from agno.utils.log import (
|
|
57
69
|
log_debug,
|
|
58
70
|
log_error,
|
|
@@ -68,7 +80,8 @@ from agno.utils.print_response.workflow import (
|
|
|
68
80
|
print_response,
|
|
69
81
|
print_response_stream,
|
|
70
82
|
)
|
|
71
|
-
from agno.
|
|
83
|
+
from agno.utils.string import generate_id_from_name
|
|
84
|
+
from agno.workflow.agent import WorkflowAgent
|
|
72
85
|
from agno.workflow.condition import Condition
|
|
73
86
|
from agno.workflow.loop import Loop
|
|
74
87
|
from agno.workflow.parallel import Parallel
|
|
@@ -80,7 +93,6 @@ from agno.workflow.types import (
|
|
|
80
93
|
StepMetrics,
|
|
81
94
|
StepOutput,
|
|
82
95
|
StepType,
|
|
83
|
-
WebSocketHandler,
|
|
84
96
|
WorkflowExecutionInput,
|
|
85
97
|
WorkflowMetrics,
|
|
86
98
|
)
|
|
@@ -153,8 +165,6 @@ class Workflow:
|
|
|
153
165
|
stream: Optional[bool] = None
|
|
154
166
|
# Stream the intermediate steps from the Workflow
|
|
155
167
|
stream_events: bool = False
|
|
156
|
-
# [Deprecated] Stream the intermediate steps from the Workflow
|
|
157
|
-
stream_intermediate_steps: bool = False
|
|
158
168
|
# Stream events from executors (agents/teams/functions) within steps
|
|
159
169
|
stream_executor_events: bool = True
|
|
160
170
|
|
|
@@ -166,7 +176,7 @@ class Workflow:
|
|
|
166
176
|
# Control whether to store executor responses (agent/team responses) in flattened runs
|
|
167
177
|
store_executor_outputs: bool = True
|
|
168
178
|
|
|
169
|
-
websocket_handler: Optional[WebSocketHandler] = None
|
|
179
|
+
websocket_handler: Optional["WebSocketHandler"] = None
|
|
170
180
|
|
|
171
181
|
# Input schema to validate the input to the workflow
|
|
172
182
|
input_schema: Optional[Type[BaseModel]] = None
|
|
@@ -184,6 +194,9 @@ class Workflow:
|
|
|
184
194
|
# Number of historical runs to include in the messages
|
|
185
195
|
num_history_runs: int = 3
|
|
186
196
|
|
|
197
|
+
# If True, run hooks as FastAPI background tasks (non-blocking). Set by AgentOS.
|
|
198
|
+
_run_hooks_in_background: bool = False
|
|
199
|
+
|
|
187
200
|
def __init__(
|
|
188
201
|
self,
|
|
189
202
|
id: Optional[str] = None,
|
|
@@ -196,10 +209,10 @@ class Workflow:
|
|
|
196
209
|
session_state: Optional[Dict[str, Any]] = None,
|
|
197
210
|
overwrite_db_session_state: bool = False,
|
|
198
211
|
user_id: Optional[str] = None,
|
|
212
|
+
debug_level: Literal[1, 2] = 1,
|
|
199
213
|
debug_mode: Optional[bool] = False,
|
|
200
214
|
stream: Optional[bool] = None,
|
|
201
215
|
stream_events: bool = False,
|
|
202
|
-
stream_intermediate_steps: bool = False,
|
|
203
216
|
stream_executor_events: bool = True,
|
|
204
217
|
store_events: bool = False,
|
|
205
218
|
events_to_skip: Optional[List[Union[WorkflowRunEvent, RunEvent, TeamRunEvent]]] = None,
|
|
@@ -221,11 +234,10 @@ class Workflow:
|
|
|
221
234
|
self.overwrite_db_session_state = overwrite_db_session_state
|
|
222
235
|
self.user_id = user_id
|
|
223
236
|
self.debug_mode = debug_mode
|
|
237
|
+
self.debug_level = debug_level
|
|
224
238
|
self.store_events = store_events
|
|
225
239
|
self.events_to_skip = events_to_skip or []
|
|
226
240
|
self.stream = stream
|
|
227
|
-
self.stream_events = stream_events
|
|
228
|
-
self.stream_intermediate_steps = stream_intermediate_steps
|
|
229
241
|
self.stream_executor_events = stream_executor_events
|
|
230
242
|
self.store_executor_outputs = store_executor_outputs
|
|
231
243
|
self.input_schema = input_schema
|
|
@@ -236,6 +248,7 @@ class Workflow:
|
|
|
236
248
|
self.add_workflow_history_to_steps = add_workflow_history_to_steps
|
|
237
249
|
self.num_history_runs = num_history_runs
|
|
238
250
|
self._workflow_session: Optional[WorkflowSession] = None
|
|
251
|
+
self.stream_events = stream_events
|
|
239
252
|
|
|
240
253
|
# Warn if workflow history is enabled without a database
|
|
241
254
|
if self.add_workflow_history_to_steps and self.db is None:
|
|
@@ -246,67 +259,11 @@ class Workflow:
|
|
|
246
259
|
|
|
247
260
|
def set_id(self) -> None:
|
|
248
261
|
if self.id is None:
|
|
249
|
-
|
|
250
|
-
self.id = self.name.lower().replace(" ", "-")
|
|
251
|
-
else:
|
|
252
|
-
self.id = str(uuid4())
|
|
262
|
+
self.id = generate_id_from_name(self.name)
|
|
253
263
|
|
|
254
264
|
def _has_async_db(self) -> bool:
|
|
255
265
|
return self.db is not None and isinstance(self.db, AsyncBaseDb)
|
|
256
266
|
|
|
257
|
-
def _validate_input(
|
|
258
|
-
self, input: Optional[Union[str, Dict[str, Any], List[Any], BaseModel, List[Message]]]
|
|
259
|
-
) -> Optional[Union[str, List, Dict, Message, BaseModel]]:
|
|
260
|
-
"""Parse and validate input against input_schema if provided"""
|
|
261
|
-
if self.input_schema is None:
|
|
262
|
-
return input # Return input unchanged if no schema is set
|
|
263
|
-
|
|
264
|
-
if input is None:
|
|
265
|
-
raise ValueError("Input required when input_schema is set")
|
|
266
|
-
|
|
267
|
-
# Handle Message objects - extract content
|
|
268
|
-
if isinstance(input, Message):
|
|
269
|
-
input = input.content # type: ignore
|
|
270
|
-
|
|
271
|
-
# If input is a string, convert it to a dict
|
|
272
|
-
if isinstance(input, str):
|
|
273
|
-
import json
|
|
274
|
-
|
|
275
|
-
try:
|
|
276
|
-
input = json.loads(input)
|
|
277
|
-
except Exception as e:
|
|
278
|
-
raise ValueError(f"Failed to parse input. Is it a valid JSON string?: {e}")
|
|
279
|
-
|
|
280
|
-
# Case 1: Message is already a BaseModel instance
|
|
281
|
-
if isinstance(input, BaseModel):
|
|
282
|
-
if isinstance(input, self.input_schema):
|
|
283
|
-
try:
|
|
284
|
-
return input
|
|
285
|
-
except Exception as e:
|
|
286
|
-
raise ValueError(f"BaseModel validation failed: {str(e)}")
|
|
287
|
-
else:
|
|
288
|
-
# Different BaseModel types
|
|
289
|
-
raise ValueError(f"Expected {self.input_schema.__name__} but got {type(input).__name__}")
|
|
290
|
-
|
|
291
|
-
# Case 2: Message is a dict
|
|
292
|
-
elif isinstance(input, dict):
|
|
293
|
-
try:
|
|
294
|
-
# Check if the schema is a TypedDict
|
|
295
|
-
if is_typed_dict(self.input_schema):
|
|
296
|
-
validated_dict = validate_typed_dict(input, self.input_schema)
|
|
297
|
-
return validated_dict
|
|
298
|
-
else:
|
|
299
|
-
validated_model = self.input_schema(**input)
|
|
300
|
-
return validated_model
|
|
301
|
-
except Exception as e:
|
|
302
|
-
raise ValueError(f"Failed to parse dict into {self.input_schema.__name__}: {str(e)}")
|
|
303
|
-
|
|
304
|
-
# Case 3: Other types not supported for structured input
|
|
305
|
-
else:
|
|
306
|
-
raise ValueError(
|
|
307
|
-
f"Cannot validate {type(input)} against input_schema. Expected dict or {self.input_schema.__name__} instance."
|
|
308
|
-
)
|
|
309
|
-
|
|
310
267
|
@property
|
|
311
268
|
def run_parameters(self) -> Dict[str, Any]:
|
|
312
269
|
"""Get the run parameters for the workflow"""
|
|
@@ -384,11 +341,14 @@ class Workflow:
|
|
|
384
341
|
def _initialize_session_state(
|
|
385
342
|
self,
|
|
386
343
|
session_state: Dict[str, Any],
|
|
387
|
-
user_id: Optional[str] = None,
|
|
388
344
|
session_id: Optional[str] = None,
|
|
345
|
+
user_id: Optional[str] = None,
|
|
389
346
|
run_id: Optional[str] = None,
|
|
390
347
|
) -> Dict[str, Any]:
|
|
391
348
|
"""Initialize the session state for the workflow."""
|
|
349
|
+
session_state["workflow_id"] = self.id
|
|
350
|
+
if self.name:
|
|
351
|
+
session_state["workflow_name"] = self.name
|
|
392
352
|
if user_id:
|
|
393
353
|
session_state["current_user_id"] = user_id
|
|
394
354
|
if session_id is not None:
|
|
@@ -396,16 +356,6 @@ class Workflow:
|
|
|
396
356
|
if run_id is not None:
|
|
397
357
|
session_state["current_run_id"] = run_id
|
|
398
358
|
|
|
399
|
-
session_state.update(
|
|
400
|
-
{
|
|
401
|
-
"workflow_id": self.id,
|
|
402
|
-
"run_id": run_id,
|
|
403
|
-
"session_id": session_id,
|
|
404
|
-
}
|
|
405
|
-
)
|
|
406
|
-
if self.name:
|
|
407
|
-
session_state["workflow_name"] = self.name
|
|
408
|
-
|
|
409
359
|
return session_state
|
|
410
360
|
|
|
411
361
|
def _generate_workflow_session_name(self) -> str:
|
|
@@ -586,6 +536,313 @@ class Workflow:
|
|
|
586
536
|
# -*- Delete session
|
|
587
537
|
self.db.delete_session(session_id=session_id)
|
|
588
538
|
|
|
539
|
+
# -*- Serialization Functions
|
|
540
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
541
|
+
"""
|
|
542
|
+
Convert the Workflow to a dictionary.
|
|
543
|
+
|
|
544
|
+
Returns:
|
|
545
|
+
Dict[str, Any]: Dictionary representation of the workflow configuration
|
|
546
|
+
"""
|
|
547
|
+
config: Dict[str, Any] = {}
|
|
548
|
+
|
|
549
|
+
# --- Workflow settings ---
|
|
550
|
+
if self.name is not None:
|
|
551
|
+
config["name"] = self.name
|
|
552
|
+
if self.id is not None:
|
|
553
|
+
config["id"] = self.id
|
|
554
|
+
if self.description is not None:
|
|
555
|
+
config["description"] = self.description
|
|
556
|
+
|
|
557
|
+
# --- User settings ---
|
|
558
|
+
if self.user_id is not None:
|
|
559
|
+
config["user_id"] = self.user_id
|
|
560
|
+
|
|
561
|
+
# --- Session settings ---
|
|
562
|
+
if self.session_id is not None:
|
|
563
|
+
config["session_id"] = self.session_id
|
|
564
|
+
if self.session_state is not None:
|
|
565
|
+
config["session_state"] = self.session_state
|
|
566
|
+
config["overwrite_db_session_state"] = self.overwrite_db_session_state
|
|
567
|
+
|
|
568
|
+
# --- Database settings ---
|
|
569
|
+
if self.db is not None and hasattr(self.db, "to_dict"):
|
|
570
|
+
config["db"] = self.db.to_dict()
|
|
571
|
+
|
|
572
|
+
# --- History settings ---
|
|
573
|
+
config["add_workflow_history_to_steps"] = self.add_workflow_history_to_steps
|
|
574
|
+
config["num_history_runs"] = self.num_history_runs
|
|
575
|
+
|
|
576
|
+
# --- Streaming settings ---
|
|
577
|
+
if self.stream is not None:
|
|
578
|
+
config["stream"] = self.stream
|
|
579
|
+
config["stream_events"] = self.stream_events
|
|
580
|
+
config["stream_executor_events"] = self.stream_executor_events
|
|
581
|
+
config["store_events"] = self.store_events
|
|
582
|
+
config["store_executor_outputs"] = self.store_executor_outputs
|
|
583
|
+
|
|
584
|
+
# --- Schema settings ---
|
|
585
|
+
if self.input_schema is not None:
|
|
586
|
+
if isinstance(self.input_schema, type) and issubclass(self.input_schema, BaseModel):
|
|
587
|
+
config["input_schema"] = self.input_schema.__name__
|
|
588
|
+
elif isinstance(self.input_schema, dict):
|
|
589
|
+
config["input_schema"] = self.input_schema
|
|
590
|
+
|
|
591
|
+
# --- Metadata ---
|
|
592
|
+
if self.metadata is not None:
|
|
593
|
+
config["metadata"] = self.metadata
|
|
594
|
+
|
|
595
|
+
# --- Debug and telemetry settings ---
|
|
596
|
+
config["debug_mode"] = self.debug_mode
|
|
597
|
+
config["telemetry"] = self.telemetry
|
|
598
|
+
|
|
599
|
+
# --- Steps ---
|
|
600
|
+
# TODO: Implement steps serialization for step types other than Step
|
|
601
|
+
if self.steps and isinstance(self.steps, list):
|
|
602
|
+
config["steps"] = [step.to_dict() for step in self.steps if hasattr(step, "to_dict")]
|
|
603
|
+
|
|
604
|
+
return config
|
|
605
|
+
|
|
606
|
+
@classmethod
|
|
607
|
+
def from_dict(
|
|
608
|
+
cls,
|
|
609
|
+
data: Dict[str, Any],
|
|
610
|
+
db: Optional["BaseDb"] = None,
|
|
611
|
+
links: Optional[List[Dict[str, Any]]] = None,
|
|
612
|
+
registry: Optional[Registry] = None,
|
|
613
|
+
) -> "Workflow":
|
|
614
|
+
"""
|
|
615
|
+
Create a Workflow from a dictionary.
|
|
616
|
+
|
|
617
|
+
Args:
|
|
618
|
+
data: Dictionary containing workflow configuration
|
|
619
|
+
db: Optional database for loading agents/teams in steps
|
|
620
|
+
links: Optional links for this workflow version
|
|
621
|
+
registry: Optional registry for rehydrating executors
|
|
622
|
+
|
|
623
|
+
Returns:
|
|
624
|
+
Workflow: Reconstructed workflow instance
|
|
625
|
+
"""
|
|
626
|
+
config = data.copy()
|
|
627
|
+
|
|
628
|
+
# --- Handle DB reconstruction ---
|
|
629
|
+
if "db" in config and isinstance(config["db"], dict):
|
|
630
|
+
db_data = config["db"]
|
|
631
|
+
db_id = db_data.get("id")
|
|
632
|
+
|
|
633
|
+
# First try to get the db from the registry (preferred - reuses existing connection)
|
|
634
|
+
if registry and db_id:
|
|
635
|
+
registry_db = registry.get_db(db_id)
|
|
636
|
+
if registry_db is not None:
|
|
637
|
+
config["db"] = registry_db
|
|
638
|
+
else:
|
|
639
|
+
del config["db"]
|
|
640
|
+
else:
|
|
641
|
+
# No registry or no db_id, fall back to creating from dict
|
|
642
|
+
config["db"] = db_from_dict(db_data)
|
|
643
|
+
if config["db"] is None:
|
|
644
|
+
del config["db"]
|
|
645
|
+
|
|
646
|
+
# --- Handle Schema reconstruction ---
|
|
647
|
+
if "input_schema" in config and isinstance(config["input_schema"], str):
|
|
648
|
+
schema_cls = registry.get_schema(config["input_schema"]) if registry else None
|
|
649
|
+
if schema_cls:
|
|
650
|
+
config["input_schema"] = schema_cls
|
|
651
|
+
else:
|
|
652
|
+
log_warning(f"Input schema {config['input_schema']} not found in registry, skipping.")
|
|
653
|
+
del config["input_schema"]
|
|
654
|
+
|
|
655
|
+
# --- Handle steps reconstruction ---
|
|
656
|
+
steps: Optional[WorkflowSteps] = None
|
|
657
|
+
if "steps" in config and config["steps"]:
|
|
658
|
+
steps = [Step.from_dict(step_data, db=db, links=links, registry=registry) for step_data in config["steps"]]
|
|
659
|
+
del config["steps"]
|
|
660
|
+
|
|
661
|
+
return cls(
|
|
662
|
+
# --- Workflow settings ---
|
|
663
|
+
name=config.get("name"),
|
|
664
|
+
id=config.get("id"),
|
|
665
|
+
description=config.get("description"),
|
|
666
|
+
# --- User settings ---
|
|
667
|
+
user_id=config.get("user_id"),
|
|
668
|
+
# --- Session settings ---
|
|
669
|
+
session_id=config.get("session_id"),
|
|
670
|
+
session_state=config.get("session_state"),
|
|
671
|
+
overwrite_db_session_state=config.get("overwrite_db_session_state", False),
|
|
672
|
+
# --- Database settings ---
|
|
673
|
+
db=config.get("db"),
|
|
674
|
+
# --- History settings ---
|
|
675
|
+
add_workflow_history_to_steps=config.get("add_workflow_history_to_steps", False),
|
|
676
|
+
num_history_runs=config.get("num_history_runs", 3),
|
|
677
|
+
# --- Streaming settings ---
|
|
678
|
+
stream=config.get("stream"),
|
|
679
|
+
stream_events=config.get("stream_events", False),
|
|
680
|
+
stream_executor_events=config.get("stream_executor_events", True),
|
|
681
|
+
store_events=config.get("store_events", False),
|
|
682
|
+
store_executor_outputs=config.get("store_executor_outputs", True),
|
|
683
|
+
# --- Schema settings ---
|
|
684
|
+
# input_schema=config.get("input_schema"), # TODO
|
|
685
|
+
# --- Metadata ---
|
|
686
|
+
metadata=config.get("metadata"),
|
|
687
|
+
# --- Debug and telemetry settings ---
|
|
688
|
+
debug_mode=config.get("debug_mode", False),
|
|
689
|
+
telemetry=config.get("telemetry", True),
|
|
690
|
+
# --- Steps ---
|
|
691
|
+
steps=steps,
|
|
692
|
+
)
|
|
693
|
+
|
|
694
|
+
def save(
|
|
695
|
+
self,
|
|
696
|
+
*,
|
|
697
|
+
db: Optional["BaseDb"] = None,
|
|
698
|
+
stage: str = "published",
|
|
699
|
+
label: Optional[str] = None,
|
|
700
|
+
notes: Optional[str] = None,
|
|
701
|
+
) -> Optional[int]:
|
|
702
|
+
"""
|
|
703
|
+
Save the workflow component and config.
|
|
704
|
+
|
|
705
|
+
Args:
|
|
706
|
+
db: The database to save the component and config to.
|
|
707
|
+
stage: The stage of the component. Defaults to "published".
|
|
708
|
+
label: The label of the component.
|
|
709
|
+
notes: The notes of the component.
|
|
710
|
+
|
|
711
|
+
Returns:
|
|
712
|
+
Optional[int]: The version number of the saved config.
|
|
713
|
+
"""
|
|
714
|
+
db_ = db or self.db
|
|
715
|
+
if not db_:
|
|
716
|
+
raise ValueError("Db not initialized or provided")
|
|
717
|
+
if not isinstance(db_, BaseDb):
|
|
718
|
+
raise ValueError("Async databases not yet supported for save(). Use a sync database.")
|
|
719
|
+
if self.id is None:
|
|
720
|
+
self.id = generate_id_from_name(self.name)
|
|
721
|
+
|
|
722
|
+
# Track saved entity versions for pinning links
|
|
723
|
+
saved_versions: Dict[str, int] = {}
|
|
724
|
+
|
|
725
|
+
# Collect all links
|
|
726
|
+
all_links = []
|
|
727
|
+
steps_config = []
|
|
728
|
+
|
|
729
|
+
try:
|
|
730
|
+
steps_to_save = self.steps if isinstance(self.steps, list) else []
|
|
731
|
+
for position, step in enumerate(steps_to_save):
|
|
732
|
+
# TODO: Support other Step types
|
|
733
|
+
if isinstance(step, Step):
|
|
734
|
+
# TODO: Allow not saving a new config if the agent/team already has a published config and no changes have been made
|
|
735
|
+
# Save agent/team if present and capture version
|
|
736
|
+
if step.agent and isinstance(step.agent, Agent):
|
|
737
|
+
agent_version = step.agent.save(
|
|
738
|
+
db=db_,
|
|
739
|
+
stage=stage,
|
|
740
|
+
label=label,
|
|
741
|
+
notes=notes,
|
|
742
|
+
)
|
|
743
|
+
if step.agent.id is not None and agent_version is not None:
|
|
744
|
+
saved_versions[step.agent.id] = agent_version
|
|
745
|
+
|
|
746
|
+
if step.team and isinstance(step.team, Team):
|
|
747
|
+
team_version = step.team.save(db=db_, stage=stage, label=label, notes=notes)
|
|
748
|
+
if step.team.id is not None and team_version is not None:
|
|
749
|
+
saved_versions[step.team.id] = team_version
|
|
750
|
+
|
|
751
|
+
# Add step config
|
|
752
|
+
steps_config.append(step.to_dict())
|
|
753
|
+
|
|
754
|
+
# Add links with position and pinned version
|
|
755
|
+
for link in step.get_links(position=position):
|
|
756
|
+
# Pin the version if we just saved it
|
|
757
|
+
if link["child_component_id"] in saved_versions:
|
|
758
|
+
link["child_version"] = saved_versions[link["child_component_id"]]
|
|
759
|
+
all_links.append(link)
|
|
760
|
+
|
|
761
|
+
db_.upsert_component(
|
|
762
|
+
component_id=self.id,
|
|
763
|
+
component_type=ComponentType.WORKFLOW,
|
|
764
|
+
name=self.name,
|
|
765
|
+
description=self.description,
|
|
766
|
+
metadata=self.metadata,
|
|
767
|
+
)
|
|
768
|
+
config = db_.upsert_config(
|
|
769
|
+
component_id=self.id,
|
|
770
|
+
config=self.to_dict(),
|
|
771
|
+
links=all_links,
|
|
772
|
+
label=label,
|
|
773
|
+
stage=stage,
|
|
774
|
+
notes=notes,
|
|
775
|
+
)
|
|
776
|
+
|
|
777
|
+
return config.get("version")
|
|
778
|
+
|
|
779
|
+
except Exception as e:
|
|
780
|
+
log_error(f"Error saving workflow: {e}")
|
|
781
|
+
return None
|
|
782
|
+
|
|
783
|
+
@classmethod
|
|
784
|
+
def load(
|
|
785
|
+
cls,
|
|
786
|
+
id: str,
|
|
787
|
+
*,
|
|
788
|
+
db: "BaseDb",
|
|
789
|
+
registry: Optional["Registry"] = None,
|
|
790
|
+
label: Optional[str] = None,
|
|
791
|
+
version: Optional[int] = None,
|
|
792
|
+
) -> Optional["Workflow"]:
|
|
793
|
+
"""
|
|
794
|
+
Load a workflow by id.
|
|
795
|
+
|
|
796
|
+
Args:
|
|
797
|
+
id: The id of the workflow to load.
|
|
798
|
+
db: The database to load the workflow from.
|
|
799
|
+
label: The label of the workflow to load.
|
|
800
|
+
|
|
801
|
+
Returns:
|
|
802
|
+
The workflow loaded from the database or None if not found.
|
|
803
|
+
"""
|
|
804
|
+
# TODO: Use db.load_component_graph instead of get_config
|
|
805
|
+
data: Optional[Dict[str, Any]] = db.get_config(component_id=id, label=label, version=version)
|
|
806
|
+
if data is None:
|
|
807
|
+
return None
|
|
808
|
+
|
|
809
|
+
config = data.get("config")
|
|
810
|
+
if config is None:
|
|
811
|
+
return None
|
|
812
|
+
|
|
813
|
+
workflow = cls.from_dict(config, db=db, registry=registry)
|
|
814
|
+
|
|
815
|
+
workflow.id = id
|
|
816
|
+
workflow.db = db
|
|
817
|
+
|
|
818
|
+
return workflow
|
|
819
|
+
|
|
820
|
+
def delete(
|
|
821
|
+
self,
|
|
822
|
+
*,
|
|
823
|
+
db: Optional["BaseDb"] = None,
|
|
824
|
+
hard_delete: bool = False,
|
|
825
|
+
) -> bool:
|
|
826
|
+
"""
|
|
827
|
+
Delete the workflow component.
|
|
828
|
+
|
|
829
|
+
Args:
|
|
830
|
+
db: The database to delete the workflow from.
|
|
831
|
+
hard_delete: Whether to hard delete the workflow.
|
|
832
|
+
|
|
833
|
+
Returns:
|
|
834
|
+
True if the workflow was deleted, False otherwise.
|
|
835
|
+
"""
|
|
836
|
+
db_ = db or self.db
|
|
837
|
+
if not db_:
|
|
838
|
+
raise ValueError("Db not initialized or provided")
|
|
839
|
+
if not isinstance(db_, BaseDb):
|
|
840
|
+
raise ValueError("Async databases not yet supported for delete(). Use a sync database.")
|
|
841
|
+
if self.id is None:
|
|
842
|
+
raise ValueError("Cannot delete workflow without an id")
|
|
843
|
+
|
|
844
|
+
return db_.delete_component(component_id=self.id, hard_delete=hard_delete)
|
|
845
|
+
|
|
589
846
|
async def aget_run_output(self, run_id: str, session_id: Optional[str] = None) -> Optional[WorkflowRunOutput]:
|
|
590
847
|
"""Get a RunOutput from the database."""
|
|
591
848
|
if self._workflow_session is not None:
|
|
@@ -727,12 +984,17 @@ class Workflow:
|
|
|
727
984
|
if workflow_session is None:
|
|
728
985
|
# Creating new session if none found
|
|
729
986
|
log_debug(f"Creating new WorkflowSession: {session_id}")
|
|
987
|
+
session_data = {}
|
|
988
|
+
if self.session_state is not None:
|
|
989
|
+
from copy import deepcopy
|
|
990
|
+
|
|
991
|
+
session_data["session_state"] = deepcopy(self.session_state)
|
|
730
992
|
workflow_session = WorkflowSession(
|
|
731
993
|
session_id=session_id,
|
|
732
994
|
workflow_id=self.id,
|
|
733
995
|
user_id=user_id,
|
|
734
996
|
workflow_data=self._get_workflow_data(),
|
|
735
|
-
session_data=
|
|
997
|
+
session_data=session_data,
|
|
736
998
|
metadata=self.metadata,
|
|
737
999
|
created_at=int(time()),
|
|
738
1000
|
)
|
|
@@ -755,13 +1017,12 @@ class Workflow:
|
|
|
755
1017
|
Returns:
|
|
756
1018
|
WorkflowSession: The WorkflowSession loaded from the database or created if it does not exist.
|
|
757
1019
|
"""
|
|
758
|
-
if not session_id and not self.session_id:
|
|
759
|
-
raise Exception("No session_id provided")
|
|
760
|
-
|
|
761
1020
|
session_id_to_load = session_id or self.session_id
|
|
1021
|
+
if session_id_to_load is None:
|
|
1022
|
+
raise Exception("No session_id provided")
|
|
762
1023
|
|
|
763
1024
|
# Try to load from database
|
|
764
|
-
if self.db is not None
|
|
1025
|
+
if self.db is not None:
|
|
765
1026
|
workflow_session = cast(WorkflowSession, await self._aread_session(session_id=session_id_to_load))
|
|
766
1027
|
return workflow_session
|
|
767
1028
|
|
|
@@ -831,6 +1092,54 @@ class Workflow:
|
|
|
831
1092
|
self._upsert_session(session=session)
|
|
832
1093
|
log_debug(f"Created or updated WorkflowSession record: {session.session_id}")
|
|
833
1094
|
|
|
1095
|
+
def get_chat_history(
|
|
1096
|
+
self, session_id: Optional[str] = None, last_n_runs: Optional[int] = None
|
|
1097
|
+
) -> List[WorkflowChatInteraction]:
|
|
1098
|
+
"""Return a list of dictionaries containing the input and output for each run in the session.
|
|
1099
|
+
|
|
1100
|
+
Args:
|
|
1101
|
+
session_id: The session ID to get the chat history for. If not provided, the current cached session ID is used.
|
|
1102
|
+
last_n_runs: Number of recent runs to include. If None, all runs will be considered.
|
|
1103
|
+
|
|
1104
|
+
Returns:
|
|
1105
|
+
A list of WorkflowChatInteraction objects.
|
|
1106
|
+
"""
|
|
1107
|
+
session_id = session_id or self.session_id
|
|
1108
|
+
if session_id is None:
|
|
1109
|
+
log_warning("Session ID is not set, cannot get messages for session")
|
|
1110
|
+
return []
|
|
1111
|
+
|
|
1112
|
+
session = self.get_session(
|
|
1113
|
+
session_id=session_id,
|
|
1114
|
+
)
|
|
1115
|
+
if session is None:
|
|
1116
|
+
raise Exception("Session not found")
|
|
1117
|
+
|
|
1118
|
+
return session.get_chat_history(last_n_runs=last_n_runs)
|
|
1119
|
+
|
|
1120
|
+
async def aget_chat_history(
|
|
1121
|
+
self, session_id: Optional[str] = None, last_n_runs: Optional[int] = None
|
|
1122
|
+
) -> List[WorkflowChatInteraction]:
|
|
1123
|
+
"""Return a list of dictionaries containing the input and output for each run in the session.
|
|
1124
|
+
|
|
1125
|
+
Args:
|
|
1126
|
+
session_id: The session ID to get the chat history for. If not provided, the current cached session ID is used.
|
|
1127
|
+
last_n_runs: Number of recent runs to include. If None, all runs will be considered.
|
|
1128
|
+
|
|
1129
|
+
Returns:
|
|
1130
|
+
A list of dictionaries containing the input and output for each run.
|
|
1131
|
+
"""
|
|
1132
|
+
session_id = session_id or self.session_id
|
|
1133
|
+
if session_id is None:
|
|
1134
|
+
log_warning("Session ID is not set, cannot get messages for session")
|
|
1135
|
+
return []
|
|
1136
|
+
|
|
1137
|
+
session = await self.aget_session(session_id=session_id)
|
|
1138
|
+
if session is None:
|
|
1139
|
+
raise Exception("Session not found")
|
|
1140
|
+
|
|
1141
|
+
return session.get_chat_history(last_n_runs=last_n_runs)
|
|
1142
|
+
|
|
834
1143
|
# -*- Session Database Functions
|
|
835
1144
|
async def _aread_session(self, session_id: str) -> Optional[WorkflowSession]:
|
|
836
1145
|
"""Get a Session from the database."""
|
|
@@ -956,7 +1265,7 @@ class Workflow:
|
|
|
956
1265
|
def _broadcast_to_websocket(
|
|
957
1266
|
self,
|
|
958
1267
|
event: Any,
|
|
959
|
-
websocket_handler: Optional[WebSocketHandler] = None,
|
|
1268
|
+
websocket_handler: Optional["WebSocketHandler"] = None,
|
|
960
1269
|
) -> None:
|
|
961
1270
|
"""Broadcast events to WebSocket if available (async context only)"""
|
|
962
1271
|
if websocket_handler:
|
|
@@ -971,7 +1280,7 @@ class Workflow:
|
|
|
971
1280
|
self,
|
|
972
1281
|
event: "WorkflowRunOutputEvent",
|
|
973
1282
|
workflow_run_response: WorkflowRunOutput,
|
|
974
|
-
websocket_handler: Optional[WebSocketHandler] = None,
|
|
1283
|
+
websocket_handler: Optional["WebSocketHandler"] = None,
|
|
975
1284
|
) -> "WorkflowRunOutputEvent":
|
|
976
1285
|
"""Handle workflow events for storage - similar to Team._handle_event"""
|
|
977
1286
|
from agno.run.agent import RunOutput
|
|
@@ -999,8 +1308,71 @@ class Workflow:
|
|
|
999
1308
|
workflow_run_response.events = []
|
|
1000
1309
|
workflow_run_response.events.append(event)
|
|
1001
1310
|
|
|
1311
|
+
# Add to event buffer for reconnection support
|
|
1312
|
+
# Use workflow_run_id for agent/team events, run_id for workflow events
|
|
1313
|
+
buffer_run_id = None
|
|
1314
|
+
event_index = None
|
|
1315
|
+
if hasattr(event, "workflow_run_id") and event.workflow_run_id:
|
|
1316
|
+
# Agent/Team event - use workflow_run_id
|
|
1317
|
+
buffer_run_id = event.workflow_run_id
|
|
1318
|
+
elif hasattr(event, "run_id") and event.run_id:
|
|
1319
|
+
# Workflow event - use run_id
|
|
1320
|
+
buffer_run_id = event.run_id
|
|
1321
|
+
|
|
1322
|
+
if buffer_run_id:
|
|
1323
|
+
try:
|
|
1324
|
+
from agno.os.managers import event_buffer
|
|
1325
|
+
|
|
1326
|
+
# add_event now returns the event_index
|
|
1327
|
+
event_index = event_buffer.add_event(buffer_run_id, event) # type: ignore
|
|
1328
|
+
except Exception as e:
|
|
1329
|
+
# Don't fail workflow execution if buffering fails
|
|
1330
|
+
log_debug(f"Failed to add event to buffer: {e}")
|
|
1331
|
+
|
|
1002
1332
|
# Broadcast to WebSocket if available (async context only)
|
|
1003
|
-
|
|
1333
|
+
# Include event_index for frontend reconnection support
|
|
1334
|
+
if websocket_handler:
|
|
1335
|
+
import asyncio
|
|
1336
|
+
|
|
1337
|
+
try:
|
|
1338
|
+
loop = asyncio.get_running_loop()
|
|
1339
|
+
if loop:
|
|
1340
|
+
# Pass event_index and run_id to websocket handler
|
|
1341
|
+
asyncio.create_task(
|
|
1342
|
+
websocket_handler.handle_event(event, event_index=event_index, run_id=buffer_run_id)
|
|
1343
|
+
)
|
|
1344
|
+
except RuntimeError:
|
|
1345
|
+
pass
|
|
1346
|
+
|
|
1347
|
+
# ALSO broadcast through websocket manager for reconnected clients
|
|
1348
|
+
# This ensures clients who reconnect after workflow started still receive events
|
|
1349
|
+
if buffer_run_id:
|
|
1350
|
+
try:
|
|
1351
|
+
import asyncio
|
|
1352
|
+
|
|
1353
|
+
from agno.os.managers import websocket_manager
|
|
1354
|
+
|
|
1355
|
+
loop = asyncio.get_running_loop()
|
|
1356
|
+
if loop:
|
|
1357
|
+
# Format the event for broadcast
|
|
1358
|
+
event_dict = event.model_dump() if hasattr(event, "model_dump") else event.to_dict()
|
|
1359
|
+
if event_index is not None:
|
|
1360
|
+
event_dict["event_index"] = event_index
|
|
1361
|
+
if "run_id" not in event_dict:
|
|
1362
|
+
event_dict["run_id"] = buffer_run_id
|
|
1363
|
+
|
|
1364
|
+
# Broadcast to registered websocket (if different from original)
|
|
1365
|
+
import json
|
|
1366
|
+
|
|
1367
|
+
from agno.utils.serialize import json_serializer
|
|
1368
|
+
|
|
1369
|
+
asyncio.create_task(
|
|
1370
|
+
websocket_manager.broadcast_to_run(
|
|
1371
|
+
buffer_run_id, json.dumps(event_dict, default=json_serializer)
|
|
1372
|
+
)
|
|
1373
|
+
)
|
|
1374
|
+
except Exception as e:
|
|
1375
|
+
log_debug(f"Failed to broadcast through manager: {e}")
|
|
1004
1376
|
|
|
1005
1377
|
return event
|
|
1006
1378
|
|
|
@@ -1050,9 +1422,12 @@ class Workflow:
|
|
|
1050
1422
|
"""Set debug mode and configure logging"""
|
|
1051
1423
|
if self.debug_mode or getenv("AGNO_DEBUG", "false").lower() == "true":
|
|
1052
1424
|
use_workflow_logger()
|
|
1425
|
+
debug_level: Literal[1, 2] = (
|
|
1426
|
+
cast(Literal[1, 2], int(env)) if (env := getenv("AGNO_DEBUG_LEVEL")) in ("1", "2") else self.debug_level
|
|
1427
|
+
)
|
|
1053
1428
|
|
|
1054
1429
|
self.debug_mode = True
|
|
1055
|
-
set_log_level_to_debug(source_type="workflow")
|
|
1430
|
+
set_log_level_to_debug(source_type="workflow", level=debug_level)
|
|
1056
1431
|
|
|
1057
1432
|
# Propagate to steps - only if steps is iterable (not callable)
|
|
1058
1433
|
if self.steps and not callable(self.steps):
|
|
@@ -1227,14 +1602,13 @@ class Workflow:
|
|
|
1227
1602
|
execution_input: WorkflowExecutionInput,
|
|
1228
1603
|
workflow_run_response: WorkflowRunOutput,
|
|
1229
1604
|
run_context: RunContext,
|
|
1605
|
+
background_tasks: Optional[Any] = None,
|
|
1230
1606
|
**kwargs: Any,
|
|
1231
1607
|
) -> WorkflowRunOutput:
|
|
1232
1608
|
"""Execute a specific pipeline by name synchronously"""
|
|
1233
1609
|
from inspect import isasyncgenfunction, iscoroutinefunction, isgeneratorfunction
|
|
1234
1610
|
|
|
1235
1611
|
workflow_run_response.status = RunStatus.running
|
|
1236
|
-
if workflow_run_response.run_id:
|
|
1237
|
-
register_run(workflow_run_response.run_id) # type: ignore
|
|
1238
1612
|
|
|
1239
1613
|
if callable(self.steps):
|
|
1240
1614
|
if iscoroutinefunction(self.steps) or isasyncgenfunction(self.steps):
|
|
@@ -1300,6 +1674,7 @@ class Workflow:
|
|
|
1300
1674
|
if self.add_workflow_history_to_steps
|
|
1301
1675
|
else None,
|
|
1302
1676
|
num_history_runs=self.num_history_runs,
|
|
1677
|
+
background_tasks=background_tasks,
|
|
1303
1678
|
)
|
|
1304
1679
|
|
|
1305
1680
|
# Check for cancellation after step execution
|
|
@@ -1400,6 +1775,7 @@ class Workflow:
|
|
|
1400
1775
|
workflow_run_response: WorkflowRunOutput,
|
|
1401
1776
|
run_context: RunContext,
|
|
1402
1777
|
stream_events: bool = False,
|
|
1778
|
+
background_tasks: Optional[Any] = None,
|
|
1403
1779
|
**kwargs: Any,
|
|
1404
1780
|
) -> Iterator[WorkflowRunOutputEvent]:
|
|
1405
1781
|
"""Execute a specific pipeline by name with event streaming"""
|
|
@@ -1407,10 +1783,6 @@ class Workflow:
|
|
|
1407
1783
|
|
|
1408
1784
|
workflow_run_response.status = RunStatus.running
|
|
1409
1785
|
|
|
1410
|
-
# Register run for cancellation tracking
|
|
1411
|
-
if workflow_run_response.run_id:
|
|
1412
|
-
register_run(workflow_run_response.run_id)
|
|
1413
|
-
|
|
1414
1786
|
workflow_started_event = WorkflowStartedEvent(
|
|
1415
1787
|
run_id=workflow_run_response.run_id or "",
|
|
1416
1788
|
workflow_name=workflow_run_response.workflow_name,
|
|
@@ -1497,6 +1869,7 @@ class Workflow:
|
|
|
1497
1869
|
if self.add_workflow_history_to_steps
|
|
1498
1870
|
else None,
|
|
1499
1871
|
num_history_runs=self.num_history_runs,
|
|
1872
|
+
background_tasks=background_tasks,
|
|
1500
1873
|
):
|
|
1501
1874
|
raise_if_cancelled(workflow_run_response.run_id) # type: ignore
|
|
1502
1875
|
|
|
@@ -1694,6 +2067,17 @@ class Workflow:
|
|
|
1694
2067
|
)
|
|
1695
2068
|
yield self._handle_event(workflow_completed_event, workflow_run_response)
|
|
1696
2069
|
|
|
2070
|
+
# Mark run as completed in event buffer
|
|
2071
|
+
try:
|
|
2072
|
+
from agno.os.managers import event_buffer
|
|
2073
|
+
|
|
2074
|
+
event_buffer.set_run_completed(
|
|
2075
|
+
workflow_run_response.run_id, # type: ignore
|
|
2076
|
+
workflow_run_response.status or RunStatus.completed,
|
|
2077
|
+
)
|
|
2078
|
+
except Exception as e:
|
|
2079
|
+
log_debug(f"Failed to mark run as completed in buffer: {e}")
|
|
2080
|
+
|
|
1697
2081
|
# Stop timer on error
|
|
1698
2082
|
if workflow_run_response.metrics:
|
|
1699
2083
|
workflow_run_response.metrics.stop_timer()
|
|
@@ -1776,7 +2160,7 @@ class Workflow:
|
|
|
1776
2160
|
self._update_metadata(session=workflow_session)
|
|
1777
2161
|
|
|
1778
2162
|
# Update session state from DB
|
|
1779
|
-
_session_state = session_state
|
|
2163
|
+
_session_state = session_state if session_state is not None else {}
|
|
1780
2164
|
_session_state = self._load_session_state(session=workflow_session, session_state=_session_state)
|
|
1781
2165
|
|
|
1782
2166
|
return workflow_session, _session_state
|
|
@@ -1788,11 +2172,13 @@ class Workflow:
|
|
|
1788
2172
|
execution_input: WorkflowExecutionInput,
|
|
1789
2173
|
workflow_run_response: WorkflowRunOutput,
|
|
1790
2174
|
run_context: RunContext,
|
|
2175
|
+
background_tasks: Optional[Any] = None,
|
|
1791
2176
|
**kwargs: Any,
|
|
1792
2177
|
) -> WorkflowRunOutput:
|
|
1793
2178
|
"""Execute a specific pipeline by name asynchronously"""
|
|
1794
2179
|
from inspect import isasyncgenfunction, iscoroutinefunction, isgeneratorfunction
|
|
1795
2180
|
|
|
2181
|
+
await aregister_run(run_context.run_id)
|
|
1796
2182
|
# Read existing session from database
|
|
1797
2183
|
workflow_session, run_context.session_state = await self._aload_or_create_session(
|
|
1798
2184
|
session_id=session_id, user_id=user_id, session_state=run_context.session_state
|
|
@@ -1800,10 +2186,6 @@ class Workflow:
|
|
|
1800
2186
|
|
|
1801
2187
|
workflow_run_response.status = RunStatus.running
|
|
1802
2188
|
|
|
1803
|
-
# Register run for cancellation tracking
|
|
1804
|
-
if workflow_run_response.run_id:
|
|
1805
|
-
register_run(workflow_run_response.run_id) # type: ignore
|
|
1806
|
-
|
|
1807
2189
|
if callable(self.steps):
|
|
1808
2190
|
# Execute the workflow with the custom executor
|
|
1809
2191
|
content = ""
|
|
@@ -1820,14 +2202,14 @@ class Workflow:
|
|
|
1820
2202
|
elif isasyncgenfunction(self.steps): # type: ignore
|
|
1821
2203
|
async_gen = await self._acall_custom_function(self.steps, execution_input, **kwargs)
|
|
1822
2204
|
async for chunk in async_gen:
|
|
1823
|
-
|
|
2205
|
+
await araise_if_cancelled(workflow_run_response.run_id) # type: ignore
|
|
1824
2206
|
if hasattr(chunk, "content") and chunk.content is not None and isinstance(chunk.content, str):
|
|
1825
2207
|
content += chunk.content
|
|
1826
2208
|
else:
|
|
1827
2209
|
content += str(chunk)
|
|
1828
2210
|
workflow_run_response.content = content
|
|
1829
2211
|
else:
|
|
1830
|
-
|
|
2212
|
+
await araise_if_cancelled(workflow_run_response.run_id) # type: ignore
|
|
1831
2213
|
workflow_run_response.content = self._call_custom_function(self.steps, execution_input, **kwargs)
|
|
1832
2214
|
workflow_run_response.status = RunStatus.completed
|
|
1833
2215
|
|
|
@@ -1847,7 +2229,7 @@ class Workflow:
|
|
|
1847
2229
|
output_files: List[File] = (execution_input.files or []).copy() # Start with input files
|
|
1848
2230
|
|
|
1849
2231
|
for i, step in enumerate(self.steps): # type: ignore[arg-type]
|
|
1850
|
-
|
|
2232
|
+
await araise_if_cancelled(workflow_run_response.run_id) # type: ignore
|
|
1851
2233
|
step_name = getattr(step, "name", f"step_{i + 1}")
|
|
1852
2234
|
log_debug(f"Async Executing step {i + 1}/{self._get_step_count()}: {step_name}")
|
|
1853
2235
|
|
|
@@ -1862,7 +2244,7 @@ class Workflow:
|
|
|
1862
2244
|
)
|
|
1863
2245
|
|
|
1864
2246
|
# Check for cancellation before executing step
|
|
1865
|
-
|
|
2247
|
+
await araise_if_cancelled(workflow_run_response.run_id) # type: ignore
|
|
1866
2248
|
|
|
1867
2249
|
step_output = await step.aexecute( # type: ignore[union-attr]
|
|
1868
2250
|
step_input,
|
|
@@ -1876,10 +2258,11 @@ class Workflow:
|
|
|
1876
2258
|
if self.add_workflow_history_to_steps
|
|
1877
2259
|
else None,
|
|
1878
2260
|
num_history_runs=self.num_history_runs,
|
|
2261
|
+
background_tasks=background_tasks,
|
|
1879
2262
|
)
|
|
1880
2263
|
|
|
1881
2264
|
# Check for cancellation after step execution
|
|
1882
|
-
|
|
2265
|
+
await araise_if_cancelled(workflow_run_response.run_id) # type: ignore
|
|
1883
2266
|
|
|
1884
2267
|
# Update the workflow-level previous_step_outputs dictionary
|
|
1885
2268
|
previous_step_outputs[step_name] = step_output
|
|
@@ -1959,7 +2342,7 @@ class Workflow:
|
|
|
1959
2342
|
else:
|
|
1960
2343
|
self.save_session(session=workflow_session)
|
|
1961
2344
|
# Always clean up the run tracking
|
|
1962
|
-
|
|
2345
|
+
await acleanup_run(workflow_run_response.run_id) # type: ignore
|
|
1963
2346
|
|
|
1964
2347
|
# Log Workflow Telemetry
|
|
1965
2348
|
if self.telemetry:
|
|
@@ -1975,12 +2358,15 @@ class Workflow:
|
|
|
1975
2358
|
workflow_run_response: WorkflowRunOutput,
|
|
1976
2359
|
run_context: RunContext,
|
|
1977
2360
|
stream_events: bool = False,
|
|
1978
|
-
websocket_handler: Optional[WebSocketHandler] = None,
|
|
2361
|
+
websocket_handler: Optional["WebSocketHandler"] = None,
|
|
2362
|
+
background_tasks: Optional[Any] = None,
|
|
1979
2363
|
**kwargs: Any,
|
|
1980
2364
|
) -> AsyncIterator[WorkflowRunOutputEvent]:
|
|
1981
2365
|
"""Execute a specific pipeline by name with event streaming"""
|
|
1982
2366
|
from inspect import isasyncgenfunction, iscoroutinefunction, isgeneratorfunction
|
|
1983
2367
|
|
|
2368
|
+
await aregister_run(run_context.run_id)
|
|
2369
|
+
|
|
1984
2370
|
# Read existing session from database
|
|
1985
2371
|
workflow_session, run_context.session_state = await self._aload_or_create_session(
|
|
1986
2372
|
session_id=session_id, user_id=user_id, session_state=run_context.session_state
|
|
@@ -1988,10 +2374,6 @@ class Workflow:
|
|
|
1988
2374
|
|
|
1989
2375
|
workflow_run_response.status = RunStatus.running
|
|
1990
2376
|
|
|
1991
|
-
# Register run for cancellation tracking
|
|
1992
|
-
if workflow_run_response.run_id:
|
|
1993
|
-
register_run(workflow_run_response.run_id)
|
|
1994
|
-
|
|
1995
2377
|
workflow_started_event = WorkflowStartedEvent(
|
|
1996
2378
|
run_id=workflow_run_response.run_id or "",
|
|
1997
2379
|
workflow_name=workflow_run_response.workflow_name,
|
|
@@ -2016,7 +2398,7 @@ class Workflow:
|
|
|
2016
2398
|
content = ""
|
|
2017
2399
|
async_gen = await self._acall_custom_function(self.steps, execution_input, **kwargs)
|
|
2018
2400
|
async for chunk in async_gen:
|
|
2019
|
-
|
|
2401
|
+
await araise_if_cancelled(workflow_run_response.run_id) # type: ignore
|
|
2020
2402
|
if hasattr(chunk, "content") and chunk.content is not None and isinstance(chunk.content, str):
|
|
2021
2403
|
content += chunk.content
|
|
2022
2404
|
yield chunk
|
|
@@ -2051,7 +2433,7 @@ class Workflow:
|
|
|
2051
2433
|
|
|
2052
2434
|
for i, step in enumerate(self.steps): # type: ignore[arg-type]
|
|
2053
2435
|
if workflow_run_response.run_id:
|
|
2054
|
-
|
|
2436
|
+
await araise_if_cancelled(workflow_run_response.run_id)
|
|
2055
2437
|
step_name = getattr(step, "name", f"step_{i + 1}")
|
|
2056
2438
|
log_debug(f"Async streaming step {i + 1}/{self._get_step_count()}: {step_name}")
|
|
2057
2439
|
|
|
@@ -2086,9 +2468,10 @@ class Workflow:
|
|
|
2086
2468
|
if self.add_workflow_history_to_steps
|
|
2087
2469
|
else None,
|
|
2088
2470
|
num_history_runs=self.num_history_runs,
|
|
2471
|
+
background_tasks=background_tasks,
|
|
2089
2472
|
):
|
|
2090
2473
|
if workflow_run_response.run_id:
|
|
2091
|
-
|
|
2474
|
+
await araise_if_cancelled(workflow_run_response.run_id)
|
|
2092
2475
|
|
|
2093
2476
|
# Accumulate partial data from streaming events
|
|
2094
2477
|
partial_step_content = self._accumulate_partial_step_data(event, partial_step_content) # type: ignore
|
|
@@ -2290,6 +2673,17 @@ class Workflow:
|
|
|
2290
2673
|
)
|
|
2291
2674
|
yield self._handle_event(workflow_completed_event, workflow_run_response, websocket_handler=websocket_handler)
|
|
2292
2675
|
|
|
2676
|
+
# Mark run as completed in event buffer
|
|
2677
|
+
try:
|
|
2678
|
+
from agno.os.managers import event_buffer
|
|
2679
|
+
|
|
2680
|
+
event_buffer.set_run_completed(
|
|
2681
|
+
workflow_run_response.run_id, # type: ignore
|
|
2682
|
+
workflow_run_response.status or RunStatus.completed,
|
|
2683
|
+
)
|
|
2684
|
+
except Exception as e:
|
|
2685
|
+
log_debug(f"Failed to mark run as completed in buffer: {e}")
|
|
2686
|
+
|
|
2293
2687
|
# Stop timer on error
|
|
2294
2688
|
if workflow_run_response.metrics:
|
|
2295
2689
|
workflow_run_response.metrics.stop_timer()
|
|
@@ -2307,7 +2701,7 @@ class Workflow:
|
|
|
2307
2701
|
await self._alog_workflow_telemetry(session_id=session_id, run_id=workflow_run_response.run_id)
|
|
2308
2702
|
|
|
2309
2703
|
# Always clean up the run tracking
|
|
2310
|
-
|
|
2704
|
+
await acleanup_run(workflow_run_response.run_id) # type: ignore
|
|
2311
2705
|
|
|
2312
2706
|
async def _arun_background(
|
|
2313
2707
|
self,
|
|
@@ -2349,6 +2743,7 @@ class Workflow:
|
|
|
2349
2743
|
run_id=run_id,
|
|
2350
2744
|
input=input,
|
|
2351
2745
|
session_id=session_id,
|
|
2746
|
+
user_id=user_id,
|
|
2352
2747
|
workflow_id=self.id,
|
|
2353
2748
|
workflow_name=self.name,
|
|
2354
2749
|
created_at=int(datetime.now().timestamp()),
|
|
@@ -2437,7 +2832,7 @@ class Workflow:
|
|
|
2437
2832
|
videos: Optional[List[Video]] = None,
|
|
2438
2833
|
files: Optional[List[File]] = None,
|
|
2439
2834
|
stream_events: bool = False,
|
|
2440
|
-
websocket_handler: Optional[WebSocketHandler] = None,
|
|
2835
|
+
websocket_handler: Optional["WebSocketHandler"] = None,
|
|
2441
2836
|
**kwargs: Any,
|
|
2442
2837
|
) -> WorkflowRunOutput:
|
|
2443
2838
|
"""Execute workflow in background with streaming and WebSocket broadcasting"""
|
|
@@ -2467,6 +2862,7 @@ class Workflow:
|
|
|
2467
2862
|
run_id=run_id,
|
|
2468
2863
|
input=input,
|
|
2469
2864
|
session_id=session_id,
|
|
2865
|
+
user_id=user_id,
|
|
2470
2866
|
workflow_id=self.id,
|
|
2471
2867
|
workflow_name=self.name,
|
|
2472
2868
|
created_at=int(datetime.now().timestamp()),
|
|
@@ -2512,6 +2908,8 @@ class Workflow:
|
|
|
2512
2908
|
else:
|
|
2513
2909
|
# Update status to RUNNING and save
|
|
2514
2910
|
workflow_run_response.status = RunStatus.running
|
|
2911
|
+
|
|
2912
|
+
workflow_session.upsert_run(run=workflow_run_response)
|
|
2515
2913
|
if self._has_async_db():
|
|
2516
2914
|
await self.asave_session(session=workflow_session)
|
|
2517
2915
|
else:
|
|
@@ -2639,6 +3037,7 @@ class Workflow:
|
|
|
2639
3037
|
execution_input: WorkflowExecutionInput,
|
|
2640
3038
|
run_context: RunContext,
|
|
2641
3039
|
stream: bool = False,
|
|
3040
|
+
stream_events: bool = False,
|
|
2642
3041
|
**kwargs: Any,
|
|
2643
3042
|
) -> Union[WorkflowRunOutput, Iterator[WorkflowRunOutputEvent]]:
|
|
2644
3043
|
"""
|
|
@@ -2652,7 +3051,7 @@ class Workflow:
|
|
|
2652
3051
|
execution_input: The execution input
|
|
2653
3052
|
run_context: The run context
|
|
2654
3053
|
stream: Whether to stream the response
|
|
2655
|
-
|
|
3054
|
+
stream_events: Whether to stream all events
|
|
2656
3055
|
|
|
2657
3056
|
Returns:
|
|
2658
3057
|
WorkflowRunOutput if stream=False, Iterator[WorkflowRunOutputEvent] if stream=True
|
|
@@ -2664,6 +3063,7 @@ class Workflow:
|
|
|
2664
3063
|
execution_input=execution_input,
|
|
2665
3064
|
run_context=run_context,
|
|
2666
3065
|
stream=stream,
|
|
3066
|
+
stream_events=stream_events,
|
|
2667
3067
|
**kwargs,
|
|
2668
3068
|
)
|
|
2669
3069
|
else:
|
|
@@ -2698,7 +3098,7 @@ class Workflow:
|
|
|
2698
3098
|
|
|
2699
3099
|
from agno.run.workflow import WorkflowCompletedEvent, WorkflowRunOutputEvent
|
|
2700
3100
|
|
|
2701
|
-
# Initialize agent with
|
|
3101
|
+
# Initialize agent with stream_events=True so tool yields events
|
|
2702
3102
|
self._initialize_workflow_agent(session, execution_input, run_context=run_context, stream=stream)
|
|
2703
3103
|
|
|
2704
3104
|
# Build dependencies with workflow context
|
|
@@ -2720,6 +3120,7 @@ class Workflow:
|
|
|
2720
3120
|
run_id=run_id,
|
|
2721
3121
|
input=execution_input.input,
|
|
2722
3122
|
session_id=session.session_id,
|
|
3123
|
+
user_id=session.user_id,
|
|
2723
3124
|
workflow_id=self.id,
|
|
2724
3125
|
workflow_name=self.name,
|
|
2725
3126
|
created_at=int(datetime.now().timestamp()),
|
|
@@ -2737,8 +3138,8 @@ class Workflow:
|
|
|
2737
3138
|
for event in self.agent.run( # type: ignore[union-attr]
|
|
2738
3139
|
input=agent_input,
|
|
2739
3140
|
stream=True,
|
|
2740
|
-
|
|
2741
|
-
|
|
3141
|
+
stream_events=True,
|
|
3142
|
+
yield_run_output=True,
|
|
2742
3143
|
session_id=session.session_id,
|
|
2743
3144
|
dependencies=run_context.dependencies, # Pass context dynamically per-run
|
|
2744
3145
|
session_state=run_context.session_state, # Pass session state dynamically per-run
|
|
@@ -2901,6 +3302,7 @@ class Workflow:
|
|
|
2901
3302
|
run_id=run_id,
|
|
2902
3303
|
input=execution_input.input,
|
|
2903
3304
|
session_id=session.session_id,
|
|
3305
|
+
user_id=session.user_id,
|
|
2904
3306
|
workflow_id=self.id,
|
|
2905
3307
|
workflow_name=self.name,
|
|
2906
3308
|
created_at=int(datetime.now().timestamp()),
|
|
@@ -2949,6 +3351,7 @@ class Workflow:
|
|
|
2949
3351
|
run_id=str(uuid4()),
|
|
2950
3352
|
input=execution_input.input,
|
|
2951
3353
|
session_id=session.session_id,
|
|
3354
|
+
user_id=session.user_id,
|
|
2952
3355
|
workflow_id=self.id,
|
|
2953
3356
|
workflow_name=self.name,
|
|
2954
3357
|
created_at=int(datetime.now().timestamp()),
|
|
@@ -2961,7 +3364,7 @@ class Workflow:
|
|
|
2961
3364
|
session: WorkflowSession,
|
|
2962
3365
|
execution_input: WorkflowExecutionInput,
|
|
2963
3366
|
run_context: RunContext,
|
|
2964
|
-
websocket_handler: Optional[WebSocketHandler] = None,
|
|
3367
|
+
websocket_handler: Optional["WebSocketHandler"] = None,
|
|
2965
3368
|
stream: bool = False,
|
|
2966
3369
|
) -> None:
|
|
2967
3370
|
"""Initialize the workflow agent with async tools (but NOT context - that's passed per-run)"""
|
|
@@ -2997,7 +3400,7 @@ class Workflow:
|
|
|
2997
3400
|
run_context: RunContext,
|
|
2998
3401
|
execution_input: WorkflowExecutionInput,
|
|
2999
3402
|
stream: bool = False,
|
|
3000
|
-
websocket_handler: Optional[WebSocketHandler] = None,
|
|
3403
|
+
websocket_handler: Optional["WebSocketHandler"] = None,
|
|
3001
3404
|
**kwargs: Any,
|
|
3002
3405
|
):
|
|
3003
3406
|
"""
|
|
@@ -3020,7 +3423,8 @@ class Workflow:
|
|
|
3020
3423
|
if stream:
|
|
3021
3424
|
|
|
3022
3425
|
async def _stream():
|
|
3023
|
-
|
|
3426
|
+
await aregister_run(run_context.run_id)
|
|
3427
|
+
session, _ = await self._aload_session_for_workflow_agent(
|
|
3024
3428
|
run_context.session_id, run_context.user_id, run_context.session_state
|
|
3025
3429
|
)
|
|
3026
3430
|
async for event in self._arun_workflow_agent_stream(
|
|
@@ -3038,7 +3442,8 @@ class Workflow:
|
|
|
3038
3442
|
else:
|
|
3039
3443
|
|
|
3040
3444
|
async def _execute():
|
|
3041
|
-
|
|
3445
|
+
await aregister_run(run_context.run_id)
|
|
3446
|
+
session, _ = await self._aload_session_for_workflow_agent(
|
|
3042
3447
|
run_context.session_id, run_context.user_id, run_context.session_state
|
|
3043
3448
|
)
|
|
3044
3449
|
return await self._arun_workflow_agent(
|
|
@@ -3058,7 +3463,7 @@ class Workflow:
|
|
|
3058
3463
|
execution_input: WorkflowExecutionInput,
|
|
3059
3464
|
run_context: RunContext,
|
|
3060
3465
|
stream: bool = False,
|
|
3061
|
-
websocket_handler: Optional[WebSocketHandler] = None,
|
|
3466
|
+
websocket_handler: Optional["WebSocketHandler"] = None,
|
|
3062
3467
|
**kwargs: Any,
|
|
3063
3468
|
) -> AsyncIterator[WorkflowRunOutputEvent]:
|
|
3064
3469
|
"""
|
|
@@ -3103,6 +3508,7 @@ class Workflow:
|
|
|
3103
3508
|
run_id=run_id,
|
|
3104
3509
|
input=execution_input.input,
|
|
3105
3510
|
session_id=session.session_id,
|
|
3511
|
+
user_id=session.user_id,
|
|
3106
3512
|
workflow_id=self.id,
|
|
3107
3513
|
workflow_name=self.name,
|
|
3108
3514
|
created_at=int(datetime.now().timestamp()),
|
|
@@ -3121,8 +3527,8 @@ class Workflow:
|
|
|
3121
3527
|
async for event in self.agent.arun( # type: ignore[union-attr]
|
|
3122
3528
|
input=agent_input,
|
|
3123
3529
|
stream=True,
|
|
3124
|
-
|
|
3125
|
-
|
|
3530
|
+
stream_events=True,
|
|
3531
|
+
yield_run_output=True,
|
|
3126
3532
|
session_id=session.session_id,
|
|
3127
3533
|
dependencies=run_context.dependencies, # Pass context dynamically per-run
|
|
3128
3534
|
session_state=run_context.session_state, # Pass session state dynamically per-run
|
|
@@ -3301,6 +3707,7 @@ class Workflow:
|
|
|
3301
3707
|
run_id=run_id,
|
|
3302
3708
|
input=execution_input.input,
|
|
3303
3709
|
session_id=session.session_id,
|
|
3710
|
+
user_id=session.user_id,
|
|
3304
3711
|
workflow_id=self.id,
|
|
3305
3712
|
workflow_name=self.name,
|
|
3306
3713
|
created_at=int(datetime.now().timestamp()),
|
|
@@ -3368,6 +3775,7 @@ class Workflow:
|
|
|
3368
3775
|
run_id=str(uuid4()),
|
|
3369
3776
|
input=execution_input.input,
|
|
3370
3777
|
session_id=session.session_id,
|
|
3778
|
+
user_id=session.user_id,
|
|
3371
3779
|
workflow_id=self.id,
|
|
3372
3780
|
workflow_name=self.name,
|
|
3373
3781
|
created_at=int(datetime.now().timestamp()),
|
|
@@ -3386,12 +3794,24 @@ class Workflow:
|
|
|
3386
3794
|
"""
|
|
3387
3795
|
return cancel_run_global(run_id)
|
|
3388
3796
|
|
|
3797
|
+
async def acancel_run(self, run_id: str) -> bool:
|
|
3798
|
+
"""Cancel a running workflow execution (async version).
|
|
3799
|
+
|
|
3800
|
+
Args:
|
|
3801
|
+
run_id (str): The run_id to cancel.
|
|
3802
|
+
|
|
3803
|
+
Returns:
|
|
3804
|
+
bool: True if the run was found and marked for cancellation, False otherwise.
|
|
3805
|
+
"""
|
|
3806
|
+
return await acancel_run_global(run_id)
|
|
3807
|
+
|
|
3389
3808
|
@overload
|
|
3390
3809
|
def run(
|
|
3391
3810
|
self,
|
|
3392
3811
|
input: Optional[Union[str, Dict[str, Any], List[Any], BaseModel]] = None,
|
|
3393
3812
|
additional_data: Optional[Dict[str, Any]] = None,
|
|
3394
3813
|
user_id: Optional[str] = None,
|
|
3814
|
+
run_id: Optional[str] = None,
|
|
3395
3815
|
session_id: Optional[str] = None,
|
|
3396
3816
|
session_state: Optional[Dict[str, Any]] = None,
|
|
3397
3817
|
audio: Optional[List[Audio]] = None,
|
|
@@ -3400,8 +3820,8 @@ class Workflow:
|
|
|
3400
3820
|
files: Optional[List[File]] = None,
|
|
3401
3821
|
stream: Literal[False] = False,
|
|
3402
3822
|
stream_events: Optional[bool] = None,
|
|
3403
|
-
stream_intermediate_steps: Optional[bool] = None,
|
|
3404
3823
|
background: Optional[bool] = False,
|
|
3824
|
+
background_tasks: Optional[Any] = None,
|
|
3405
3825
|
) -> WorkflowRunOutput: ...
|
|
3406
3826
|
|
|
3407
3827
|
@overload
|
|
@@ -3410,6 +3830,7 @@ class Workflow:
|
|
|
3410
3830
|
input: Optional[Union[str, Dict[str, Any], List[Any], BaseModel]] = None,
|
|
3411
3831
|
additional_data: Optional[Dict[str, Any]] = None,
|
|
3412
3832
|
user_id: Optional[str] = None,
|
|
3833
|
+
run_id: Optional[str] = None,
|
|
3413
3834
|
session_id: Optional[str] = None,
|
|
3414
3835
|
session_state: Optional[Dict[str, Any]] = None,
|
|
3415
3836
|
audio: Optional[List[Audio]] = None,
|
|
@@ -3418,8 +3839,8 @@ class Workflow:
|
|
|
3418
3839
|
files: Optional[List[File]] = None,
|
|
3419
3840
|
stream: Literal[True] = True,
|
|
3420
3841
|
stream_events: Optional[bool] = None,
|
|
3421
|
-
stream_intermediate_steps: Optional[bool] = None,
|
|
3422
3842
|
background: Optional[bool] = False,
|
|
3843
|
+
background_tasks: Optional[Any] = None,
|
|
3423
3844
|
) -> Iterator[WorkflowRunOutputEvent]: ...
|
|
3424
3845
|
|
|
3425
3846
|
def run(
|
|
@@ -3427,30 +3848,36 @@ class Workflow:
|
|
|
3427
3848
|
input: Optional[Union[str, Dict[str, Any], List[Any], BaseModel]] = None,
|
|
3428
3849
|
additional_data: Optional[Dict[str, Any]] = None,
|
|
3429
3850
|
user_id: Optional[str] = None,
|
|
3851
|
+
run_id: Optional[str] = None,
|
|
3430
3852
|
session_id: Optional[str] = None,
|
|
3431
3853
|
session_state: Optional[Dict[str, Any]] = None,
|
|
3432
3854
|
audio: Optional[List[Audio]] = None,
|
|
3433
3855
|
images: Optional[List[Image]] = None,
|
|
3434
3856
|
videos: Optional[List[Video]] = None,
|
|
3435
3857
|
files: Optional[List[File]] = None,
|
|
3436
|
-
stream: bool =
|
|
3858
|
+
stream: Optional[bool] = None,
|
|
3437
3859
|
stream_events: Optional[bool] = None,
|
|
3438
|
-
stream_intermediate_steps: Optional[bool] = None,
|
|
3439
3860
|
background: Optional[bool] = False,
|
|
3861
|
+
background_tasks: Optional[Any] = None,
|
|
3440
3862
|
**kwargs: Any,
|
|
3441
3863
|
) -> Union[WorkflowRunOutput, Iterator[WorkflowRunOutputEvent]]:
|
|
3442
3864
|
"""Execute the workflow synchronously with optional streaming"""
|
|
3443
3865
|
if self._has_async_db():
|
|
3444
3866
|
raise Exception("`run()` is not supported with an async DB. Please use `arun()`.")
|
|
3445
3867
|
|
|
3446
|
-
|
|
3868
|
+
# Set the id for the run and register it immediately for cancellation tracking
|
|
3869
|
+
run_id = run_id or str(uuid4())
|
|
3870
|
+
register_run(run_id)
|
|
3871
|
+
|
|
3872
|
+
if input is None and self.input_schema is not None:
|
|
3873
|
+
raise ValueError("Input is required when input_schema is provided")
|
|
3874
|
+
if input is not None and self.input_schema is not None:
|
|
3875
|
+
input = validate_input(input, self.input_schema)
|
|
3447
3876
|
if background:
|
|
3448
3877
|
raise RuntimeError("Background execution is not supported for sync run()")
|
|
3449
3878
|
|
|
3450
3879
|
self._set_debug()
|
|
3451
3880
|
|
|
3452
|
-
run_id = str(uuid4())
|
|
3453
|
-
|
|
3454
3881
|
self.initialize_workflow()
|
|
3455
3882
|
session_id, user_id = self._initialize_session(session_id=session_id, user_id=user_id)
|
|
3456
3883
|
|
|
@@ -3458,20 +3885,26 @@ class Workflow:
|
|
|
3458
3885
|
workflow_session = self.read_or_create_session(session_id=session_id, user_id=user_id)
|
|
3459
3886
|
self._update_metadata(session=workflow_session)
|
|
3460
3887
|
|
|
3461
|
-
# Initialize session state
|
|
3888
|
+
# Initialize session state. Get it from DB if relevant.
|
|
3889
|
+
session_state = self._load_session_state(
|
|
3890
|
+
session=workflow_session,
|
|
3891
|
+
session_state=session_state if session_state is not None else {},
|
|
3892
|
+
)
|
|
3893
|
+
|
|
3894
|
+
# Add current session/user/run info to session_state
|
|
3462
3895
|
session_state = self._initialize_session_state(
|
|
3463
|
-
session_state=session_state
|
|
3896
|
+
session_state=session_state,
|
|
3897
|
+
session_id=session_id,
|
|
3898
|
+
user_id=user_id,
|
|
3899
|
+
run_id=run_id,
|
|
3464
3900
|
)
|
|
3465
|
-
# Update session state from DB
|
|
3466
|
-
session_state = self._load_session_state(session=workflow_session, session_state=session_state)
|
|
3467
3901
|
|
|
3468
3902
|
log_debug(f"Workflow Run Start: {self.name}", center=True)
|
|
3469
3903
|
|
|
3470
|
-
# Use
|
|
3471
|
-
|
|
3472
|
-
|
|
3473
|
-
|
|
3474
|
-
)
|
|
3904
|
+
# Use stream override value when necessary
|
|
3905
|
+
if stream is None:
|
|
3906
|
+
stream = self.stream or False
|
|
3907
|
+
stream_events = stream_events or self.stream_events
|
|
3475
3908
|
|
|
3476
3909
|
# Can't stream events if streaming is disabled
|
|
3477
3910
|
if stream is False:
|
|
@@ -3503,6 +3936,8 @@ class Workflow:
|
|
|
3503
3936
|
session_id=session_id,
|
|
3504
3937
|
user_id=user_id,
|
|
3505
3938
|
session_state=session_state,
|
|
3939
|
+
workflow_id=self.id,
|
|
3940
|
+
workflow_name=self.name,
|
|
3506
3941
|
)
|
|
3507
3942
|
|
|
3508
3943
|
# Execute workflow agent if configured
|
|
@@ -3513,6 +3948,7 @@ class Workflow:
|
|
|
3513
3948
|
execution_input=inputs,
|
|
3514
3949
|
run_context=run_context,
|
|
3515
3950
|
stream=stream,
|
|
3951
|
+
stream_events=stream_events,
|
|
3516
3952
|
**kwargs,
|
|
3517
3953
|
)
|
|
3518
3954
|
|
|
@@ -3521,6 +3957,7 @@ class Workflow:
|
|
|
3521
3957
|
run_id=run_id,
|
|
3522
3958
|
input=input,
|
|
3523
3959
|
session_id=session_id,
|
|
3960
|
+
user_id=user_id,
|
|
3524
3961
|
workflow_id=self.id,
|
|
3525
3962
|
workflow_name=self.name,
|
|
3526
3963
|
created_at=int(datetime.now().timestamp()),
|
|
@@ -3537,6 +3974,7 @@ class Workflow:
|
|
|
3537
3974
|
workflow_run_response=workflow_run_response,
|
|
3538
3975
|
stream_events=stream_events,
|
|
3539
3976
|
run_context=run_context,
|
|
3977
|
+
background_tasks=background_tasks,
|
|
3540
3978
|
**kwargs,
|
|
3541
3979
|
)
|
|
3542
3980
|
else:
|
|
@@ -3545,6 +3983,7 @@ class Workflow:
|
|
|
3545
3983
|
execution_input=inputs, # type: ignore[arg-type]
|
|
3546
3984
|
workflow_run_response=workflow_run_response,
|
|
3547
3985
|
run_context=run_context,
|
|
3986
|
+
background_tasks=background_tasks,
|
|
3548
3987
|
**kwargs,
|
|
3549
3988
|
)
|
|
3550
3989
|
|
|
@@ -3554,6 +3993,7 @@ class Workflow:
|
|
|
3554
3993
|
input: Optional[Union[str, Dict[str, Any], List[Any], BaseModel, List[Message]]] = None,
|
|
3555
3994
|
additional_data: Optional[Dict[str, Any]] = None,
|
|
3556
3995
|
user_id: Optional[str] = None,
|
|
3996
|
+
run_id: Optional[str] = None,
|
|
3557
3997
|
session_id: Optional[str] = None,
|
|
3558
3998
|
session_state: Optional[Dict[str, Any]] = None,
|
|
3559
3999
|
audio: Optional[List[Audio]] = None,
|
|
@@ -3562,9 +4002,9 @@ class Workflow:
|
|
|
3562
4002
|
files: Optional[List[File]] = None,
|
|
3563
4003
|
stream: Literal[False] = False,
|
|
3564
4004
|
stream_events: Optional[bool] = None,
|
|
3565
|
-
stream_intermediate_steps: Optional[bool] = None,
|
|
3566
4005
|
background: Optional[bool] = False,
|
|
3567
4006
|
websocket: Optional[WebSocket] = None,
|
|
4007
|
+
background_tasks: Optional[Any] = None,
|
|
3568
4008
|
) -> WorkflowRunOutput: ...
|
|
3569
4009
|
|
|
3570
4010
|
@overload
|
|
@@ -3573,6 +4013,7 @@ class Workflow:
|
|
|
3573
4013
|
input: Optional[Union[str, Dict[str, Any], List[Any], BaseModel, List[Message]]] = None,
|
|
3574
4014
|
additional_data: Optional[Dict[str, Any]] = None,
|
|
3575
4015
|
user_id: Optional[str] = None,
|
|
4016
|
+
run_id: Optional[str] = None,
|
|
3576
4017
|
session_id: Optional[str] = None,
|
|
3577
4018
|
session_state: Optional[Dict[str, Any]] = None,
|
|
3578
4019
|
audio: Optional[List[Audio]] = None,
|
|
@@ -3581,9 +4022,9 @@ class Workflow:
|
|
|
3581
4022
|
files: Optional[List[File]] = None,
|
|
3582
4023
|
stream: Literal[True] = True,
|
|
3583
4024
|
stream_events: Optional[bool] = None,
|
|
3584
|
-
stream_intermediate_steps: Optional[bool] = None,
|
|
3585
4025
|
background: Optional[bool] = False,
|
|
3586
4026
|
websocket: Optional[WebSocket] = None,
|
|
4027
|
+
background_tasks: Optional[Any] = None,
|
|
3587
4028
|
) -> AsyncIterator[WorkflowRunOutputEvent]: ...
|
|
3588
4029
|
|
|
3589
4030
|
def arun( # type: ignore
|
|
@@ -3591,34 +4032,36 @@ class Workflow:
|
|
|
3591
4032
|
input: Optional[Union[str, Dict[str, Any], List[Any], BaseModel, List[Message]]] = None,
|
|
3592
4033
|
additional_data: Optional[Dict[str, Any]] = None,
|
|
3593
4034
|
user_id: Optional[str] = None,
|
|
4035
|
+
run_id: Optional[str] = None,
|
|
3594
4036
|
session_id: Optional[str] = None,
|
|
3595
4037
|
session_state: Optional[Dict[str, Any]] = None,
|
|
3596
4038
|
audio: Optional[List[Audio]] = None,
|
|
3597
4039
|
images: Optional[List[Image]] = None,
|
|
3598
4040
|
videos: Optional[List[Video]] = None,
|
|
3599
4041
|
files: Optional[List[File]] = None,
|
|
3600
|
-
stream: bool =
|
|
4042
|
+
stream: Optional[bool] = None,
|
|
3601
4043
|
stream_events: Optional[bool] = None,
|
|
3602
|
-
stream_intermediate_steps: Optional[bool] = False,
|
|
3603
4044
|
background: Optional[bool] = False,
|
|
3604
4045
|
websocket: Optional[WebSocket] = None,
|
|
4046
|
+
background_tasks: Optional[Any] = None,
|
|
3605
4047
|
**kwargs: Any,
|
|
3606
4048
|
) -> Union[WorkflowRunOutput, AsyncIterator[WorkflowRunOutputEvent]]:
|
|
3607
4049
|
"""Execute the workflow synchronously with optional streaming"""
|
|
3608
4050
|
|
|
3609
|
-
input
|
|
4051
|
+
if input is None and self.input_schema is not None:
|
|
4052
|
+
raise ValueError("Input is required when input_schema is provided")
|
|
4053
|
+
if input is not None and self.input_schema is not None:
|
|
4054
|
+
input = validate_input(input, self.input_schema)
|
|
3610
4055
|
|
|
3611
4056
|
websocket_handler = None
|
|
3612
4057
|
if websocket:
|
|
3613
|
-
from agno.
|
|
4058
|
+
from agno.os.managers import WebSocketHandler
|
|
3614
4059
|
|
|
3615
4060
|
websocket_handler = WebSocketHandler(websocket=websocket)
|
|
3616
4061
|
|
|
3617
4062
|
if background:
|
|
3618
4063
|
if stream and websocket:
|
|
3619
|
-
|
|
3620
|
-
stream_events = stream_events or stream_intermediate_steps or False
|
|
3621
|
-
|
|
4064
|
+
stream_events = stream_events or False
|
|
3622
4065
|
# Background + Streaming + WebSocket = Real-time events
|
|
3623
4066
|
return self._arun_background_stream( # type: ignore
|
|
3624
4067
|
input=input,
|
|
@@ -3654,7 +4097,8 @@ class Workflow:
|
|
|
3654
4097
|
|
|
3655
4098
|
self._set_debug()
|
|
3656
4099
|
|
|
3657
|
-
|
|
4100
|
+
# Set the id for the run and register it immediately for cancellation tracking
|
|
4101
|
+
run_id = run_id or str(uuid4())
|
|
3658
4102
|
|
|
3659
4103
|
self.initialize_workflow()
|
|
3660
4104
|
session_id, user_id = self._initialize_session(session_id=session_id, user_id=user_id)
|
|
@@ -3669,11 +4113,10 @@ class Workflow:
|
|
|
3669
4113
|
|
|
3670
4114
|
log_debug(f"Async Workflow Run Start: {self.name}", center=True)
|
|
3671
4115
|
|
|
3672
|
-
# Use
|
|
3673
|
-
|
|
3674
|
-
|
|
3675
|
-
|
|
3676
|
-
)
|
|
4116
|
+
# Use stream override value when necessary
|
|
4117
|
+
if stream is None:
|
|
4118
|
+
stream = self.stream or False
|
|
4119
|
+
stream_events = stream_events or self.stream_events
|
|
3677
4120
|
|
|
3678
4121
|
# Can't stream events if streaming is disabled
|
|
3679
4122
|
if stream is False:
|
|
@@ -3712,6 +4155,7 @@ class Workflow:
|
|
|
3712
4155
|
run_id=run_id,
|
|
3713
4156
|
input=input,
|
|
3714
4157
|
session_id=session_id,
|
|
4158
|
+
user_id=user_id,
|
|
3715
4159
|
workflow_id=self.id,
|
|
3716
4160
|
workflow_name=self.name,
|
|
3717
4161
|
created_at=int(datetime.now().timestamp()),
|
|
@@ -3732,6 +4176,7 @@ class Workflow:
|
|
|
3732
4176
|
files=files,
|
|
3733
4177
|
session_state=session_state,
|
|
3734
4178
|
run_context=run_context,
|
|
4179
|
+
background_tasks=background_tasks,
|
|
3735
4180
|
**kwargs,
|
|
3736
4181
|
)
|
|
3737
4182
|
else:
|
|
@@ -3744,6 +4189,7 @@ class Workflow:
|
|
|
3744
4189
|
files=files,
|
|
3745
4190
|
session_state=session_state,
|
|
3746
4191
|
run_context=run_context,
|
|
4192
|
+
background_tasks=background_tasks,
|
|
3747
4193
|
**kwargs,
|
|
3748
4194
|
)
|
|
3749
4195
|
|
|
@@ -3792,8 +4238,6 @@ class Workflow:
|
|
|
3792
4238
|
videos: Optional[List[Video]] = None,
|
|
3793
4239
|
files: Optional[List[File]] = None,
|
|
3794
4240
|
stream: Optional[bool] = None,
|
|
3795
|
-
stream_events: Optional[bool] = None,
|
|
3796
|
-
stream_intermediate_steps: Optional[bool] = None,
|
|
3797
4241
|
markdown: bool = True,
|
|
3798
4242
|
show_time: bool = True,
|
|
3799
4243
|
show_step_details: bool = True,
|
|
@@ -3812,12 +4256,10 @@ class Workflow:
|
|
|
3812
4256
|
videos: Video input
|
|
3813
4257
|
files: File input
|
|
3814
4258
|
stream: Whether to stream the response content
|
|
3815
|
-
stream_events: Whether to stream intermediate steps
|
|
3816
4259
|
markdown: Whether to render content as markdown
|
|
3817
4260
|
show_time: Whether to show execution time
|
|
3818
4261
|
show_step_details: Whether to show individual step outputs
|
|
3819
4262
|
console: Rich console instance (optional)
|
|
3820
|
-
(deprecated) stream_intermediate_steps: Whether to stream intermediate step outputs. If None, uses workflow default.
|
|
3821
4263
|
"""
|
|
3822
4264
|
if self._has_async_db():
|
|
3823
4265
|
raise Exception("`print_response()` is not supported with an async DB. Please use `aprint_response()`.")
|
|
@@ -3825,19 +4267,8 @@ class Workflow:
|
|
|
3825
4267
|
if stream is None:
|
|
3826
4268
|
stream = self.stream or False
|
|
3827
4269
|
|
|
3828
|
-
|
|
3829
|
-
|
|
3830
|
-
|
|
3831
|
-
# Can't stream events if streaming is disabled
|
|
3832
|
-
if stream is False:
|
|
3833
|
-
stream_events = False
|
|
3834
|
-
|
|
3835
|
-
if stream_events is None:
|
|
3836
|
-
stream_events = (
|
|
3837
|
-
False
|
|
3838
|
-
if (self.stream_events is None and self.stream_intermediate_steps is None)
|
|
3839
|
-
else (self.stream_intermediate_steps or self.stream_events)
|
|
3840
|
-
)
|
|
4270
|
+
if "stream_events" in kwargs:
|
|
4271
|
+
kwargs.pop("stream_events")
|
|
3841
4272
|
|
|
3842
4273
|
if stream:
|
|
3843
4274
|
print_response_stream(
|
|
@@ -3850,7 +4281,7 @@ class Workflow:
|
|
|
3850
4281
|
images=images,
|
|
3851
4282
|
videos=videos,
|
|
3852
4283
|
files=files,
|
|
3853
|
-
stream_events=
|
|
4284
|
+
stream_events=True,
|
|
3854
4285
|
markdown=markdown,
|
|
3855
4286
|
show_time=show_time,
|
|
3856
4287
|
show_step_details=show_step_details,
|
|
@@ -3886,8 +4317,6 @@ class Workflow:
|
|
|
3886
4317
|
videos: Optional[List[Video]] = None,
|
|
3887
4318
|
files: Optional[List[File]] = None,
|
|
3888
4319
|
stream: Optional[bool] = None,
|
|
3889
|
-
stream_events: Optional[bool] = None,
|
|
3890
|
-
stream_intermediate_steps: Optional[bool] = None,
|
|
3891
4320
|
markdown: bool = True,
|
|
3892
4321
|
show_time: bool = True,
|
|
3893
4322
|
show_step_details: bool = True,
|
|
@@ -3906,29 +4335,16 @@ class Workflow:
|
|
|
3906
4335
|
videos: Video input
|
|
3907
4336
|
files: Files input
|
|
3908
4337
|
stream: Whether to stream the response content
|
|
3909
|
-
stream_events: Whether to stream intermediate steps
|
|
3910
4338
|
markdown: Whether to render content as markdown
|
|
3911
4339
|
show_time: Whether to show execution time
|
|
3912
4340
|
show_step_details: Whether to show individual step outputs
|
|
3913
4341
|
console: Rich console instance (optional)
|
|
3914
|
-
(deprecated) stream_intermediate_steps: Whether to stream intermediate step outputs. If None, uses workflow default.
|
|
3915
4342
|
"""
|
|
3916
4343
|
if stream is None:
|
|
3917
4344
|
stream = self.stream or False
|
|
3918
4345
|
|
|
3919
|
-
|
|
3920
|
-
|
|
3921
|
-
|
|
3922
|
-
# Can't stream events if streaming is disabled
|
|
3923
|
-
if stream is False:
|
|
3924
|
-
stream_events = False
|
|
3925
|
-
|
|
3926
|
-
if stream_events is None:
|
|
3927
|
-
stream_events = (
|
|
3928
|
-
False
|
|
3929
|
-
if (self.stream_events is None and self.stream_intermediate_steps is None)
|
|
3930
|
-
else (self.stream_intermediate_steps or self.stream_events)
|
|
3931
|
-
)
|
|
4346
|
+
if "stream_events" in kwargs:
|
|
4347
|
+
kwargs.pop("stream_events")
|
|
3932
4348
|
|
|
3933
4349
|
if stream:
|
|
3934
4350
|
await aprint_response_stream(
|
|
@@ -3941,7 +4357,7 @@ class Workflow:
|
|
|
3941
4357
|
images=images,
|
|
3942
4358
|
videos=videos,
|
|
3943
4359
|
files=files,
|
|
3944
|
-
stream_events=
|
|
4360
|
+
stream_events=True,
|
|
3945
4361
|
markdown=markdown,
|
|
3946
4362
|
show_time=show_time,
|
|
3947
4363
|
show_step_details=show_step_details,
|
|
@@ -3966,7 +4382,8 @@ class Workflow:
|
|
|
3966
4382
|
**kwargs,
|
|
3967
4383
|
)
|
|
3968
4384
|
|
|
3969
|
-
|
|
4385
|
+
# TODO: This is a temporary method to convert the workflow to a dictionary for steps. We need to find a better way to do this.
|
|
4386
|
+
def to_dict_for_steps(self) -> Dict[str, Any]:
|
|
3970
4387
|
"""Convert workflow to dictionary representation"""
|
|
3971
4388
|
|
|
3972
4389
|
def serialize_step(step):
|
|
@@ -4122,6 +4539,56 @@ class Workflow:
|
|
|
4122
4539
|
if hasattr(member, "workflow_id"):
|
|
4123
4540
|
member.workflow_id = self.id
|
|
4124
4541
|
|
|
4542
|
+
def propagate_run_hooks_in_background(self, run_in_background: bool = True) -> None:
|
|
4543
|
+
"""
|
|
4544
|
+
Propagate _run_hooks_in_background setting to this workflow and all agents/teams in steps.
|
|
4545
|
+
|
|
4546
|
+
This method sets _run_hooks_in_background on the workflow and all agents/teams
|
|
4547
|
+
within its steps, including nested teams and their members.
|
|
4548
|
+
|
|
4549
|
+
Args:
|
|
4550
|
+
run_in_background: Whether hooks should run in background. Defaults to True.
|
|
4551
|
+
"""
|
|
4552
|
+
self._run_hooks_in_background = run_in_background
|
|
4553
|
+
|
|
4554
|
+
if not self.steps or callable(self.steps):
|
|
4555
|
+
return
|
|
4556
|
+
|
|
4557
|
+
steps_list = self.steps.steps if isinstance(self.steps, Steps) else self.steps
|
|
4558
|
+
|
|
4559
|
+
for step in steps_list:
|
|
4560
|
+
self._propagate_hooks_to_step(step, run_in_background)
|
|
4561
|
+
|
|
4562
|
+
def _propagate_hooks_to_step(self, step: Any, run_in_background: bool) -> None:
|
|
4563
|
+
"""Recursively propagate _run_hooks_in_background to a step and its nested content."""
|
|
4564
|
+
# Handle Step objects with active executor
|
|
4565
|
+
if hasattr(step, "active_executor") and step.active_executor:
|
|
4566
|
+
executor = step.active_executor
|
|
4567
|
+
# If it's a team, use its propagation method
|
|
4568
|
+
if hasattr(executor, "propagate_run_hooks_in_background"):
|
|
4569
|
+
executor.propagate_run_hooks_in_background(run_in_background)
|
|
4570
|
+
elif hasattr(executor, "_run_hooks_in_background"):
|
|
4571
|
+
executor._run_hooks_in_background = run_in_background
|
|
4572
|
+
|
|
4573
|
+
# Handle agent/team directly on step
|
|
4574
|
+
if hasattr(step, "agent") and step.agent:
|
|
4575
|
+
if hasattr(step.agent, "_run_hooks_in_background"):
|
|
4576
|
+
step.agent._run_hooks_in_background = run_in_background
|
|
4577
|
+
if hasattr(step, "team") and step.team:
|
|
4578
|
+
# Use team's method to propagate to all nested members
|
|
4579
|
+
if hasattr(step.team, "propagate_run_hooks_in_background"):
|
|
4580
|
+
step.team.propagate_run_hooks_in_background(run_in_background)
|
|
4581
|
+
elif hasattr(step.team, "_run_hooks_in_background"):
|
|
4582
|
+
step.team._run_hooks_in_background = run_in_background
|
|
4583
|
+
|
|
4584
|
+
# Handle nested primitives - check 'steps' and 'choices' attributes
|
|
4585
|
+
for attr_name in ["steps", "choices"]:
|
|
4586
|
+
if hasattr(step, attr_name):
|
|
4587
|
+
attr_value = getattr(step, attr_name)
|
|
4588
|
+
if attr_value and isinstance(attr_value, list):
|
|
4589
|
+
for nested_step in attr_value:
|
|
4590
|
+
self._propagate_hooks_to_step(nested_step, run_in_background)
|
|
4591
|
+
|
|
4125
4592
|
###########################################################################
|
|
4126
4593
|
# Telemetry functions
|
|
4127
4594
|
###########################################################################
|
|
@@ -4174,8 +4641,6 @@ class Workflow:
|
|
|
4174
4641
|
user: str = "User",
|
|
4175
4642
|
emoji: str = ":technologist:",
|
|
4176
4643
|
stream: Optional[bool] = None,
|
|
4177
|
-
stream_events: Optional[bool] = None,
|
|
4178
|
-
stream_intermediate_steps: Optional[bool] = None,
|
|
4179
4644
|
markdown: bool = True,
|
|
4180
4645
|
show_time: bool = True,
|
|
4181
4646
|
show_step_details: bool = True,
|
|
@@ -4195,12 +4660,10 @@ class Workflow:
|
|
|
4195
4660
|
user: Display name for the user in the CLI prompt. Defaults to "User".
|
|
4196
4661
|
emoji: Emoji to display next to the user name in prompts. Defaults to ":technologist:".
|
|
4197
4662
|
stream: Whether to stream the workflow response. If None, uses workflow default.
|
|
4198
|
-
stream_events: Whether to stream intermediate step outputs. If None, uses workflow default.
|
|
4199
4663
|
markdown: Whether to render output as markdown. Defaults to True.
|
|
4200
4664
|
show_time: Whether to display timestamps in the output. Defaults to True.
|
|
4201
4665
|
show_step_details: Whether to show detailed step information. Defaults to True.
|
|
4202
4666
|
exit_on: List of commands that will exit the CLI. Defaults to ["exit", "quit", "bye", "stop"].
|
|
4203
|
-
(deprecated) stream_intermediate_steps: Whether to stream intermediate step outputs. If None, uses workflow default.
|
|
4204
4667
|
**kwargs: Additional keyword arguments passed to the workflow's print_response method.
|
|
4205
4668
|
|
|
4206
4669
|
Returns:
|
|
@@ -4209,14 +4672,10 @@ class Workflow:
|
|
|
4209
4672
|
|
|
4210
4673
|
from rich.prompt import Prompt
|
|
4211
4674
|
|
|
4212
|
-
# Considering both stream_events and stream_intermediate_steps (deprecated)
|
|
4213
|
-
stream_events = stream_events or stream_intermediate_steps or False
|
|
4214
|
-
|
|
4215
4675
|
if input:
|
|
4216
4676
|
self.print_response(
|
|
4217
4677
|
input=input,
|
|
4218
4678
|
stream=stream,
|
|
4219
|
-
stream_events=stream_events,
|
|
4220
4679
|
markdown=markdown,
|
|
4221
4680
|
show_time=show_time,
|
|
4222
4681
|
show_step_details=show_step_details,
|
|
@@ -4234,7 +4693,6 @@ class Workflow:
|
|
|
4234
4693
|
self.print_response(
|
|
4235
4694
|
input=message,
|
|
4236
4695
|
stream=stream,
|
|
4237
|
-
stream_events=stream_events,
|
|
4238
4696
|
markdown=markdown,
|
|
4239
4697
|
show_time=show_time,
|
|
4240
4698
|
show_step_details=show_step_details,
|
|
@@ -4251,8 +4709,6 @@ class Workflow:
|
|
|
4251
4709
|
user: str = "User",
|
|
4252
4710
|
emoji: str = ":technologist:",
|
|
4253
4711
|
stream: Optional[bool] = None,
|
|
4254
|
-
stream_events: Optional[bool] = None,
|
|
4255
|
-
stream_intermediate_steps: Optional[bool] = None,
|
|
4256
4712
|
markdown: bool = True,
|
|
4257
4713
|
show_time: bool = True,
|
|
4258
4714
|
show_step_details: bool = True,
|
|
@@ -4272,12 +4728,10 @@ class Workflow:
|
|
|
4272
4728
|
user: Display name for the user in the CLI prompt. Defaults to "User".
|
|
4273
4729
|
emoji: Emoji to display next to the user name in prompts. Defaults to ":technologist:".
|
|
4274
4730
|
stream: Whether to stream the workflow response. If None, uses workflow default.
|
|
4275
|
-
stream_events: Whether to stream events from the workflow. If None, uses workflow default.
|
|
4276
4731
|
markdown: Whether to render output as markdown. Defaults to True.
|
|
4277
4732
|
show_time: Whether to display timestamps in the output. Defaults to True.
|
|
4278
4733
|
show_step_details: Whether to show detailed step information. Defaults to True.
|
|
4279
4734
|
exit_on: List of commands that will exit the CLI. Defaults to ["exit", "quit", "bye", "stop"].
|
|
4280
|
-
(deprecated) stream_intermediate_steps: Whether to stream intermediate step outputs. If None, uses workflow default.
|
|
4281
4735
|
**kwargs: Additional keyword arguments passed to the workflow's print_response method.
|
|
4282
4736
|
|
|
4283
4737
|
Returns:
|
|
@@ -4286,14 +4740,10 @@ class Workflow:
|
|
|
4286
4740
|
|
|
4287
4741
|
from rich.prompt import Prompt
|
|
4288
4742
|
|
|
4289
|
-
# Considering both stream_events and stream_intermediate_steps (deprecated)
|
|
4290
|
-
stream_events = stream_events or stream_intermediate_steps or False
|
|
4291
|
-
|
|
4292
4743
|
if input:
|
|
4293
4744
|
await self.aprint_response(
|
|
4294
4745
|
input=input,
|
|
4295
4746
|
stream=stream,
|
|
4296
|
-
stream_events=stream_events,
|
|
4297
4747
|
markdown=markdown,
|
|
4298
4748
|
show_time=show_time,
|
|
4299
4749
|
show_step_details=show_step_details,
|
|
@@ -4311,7 +4761,6 @@ class Workflow:
|
|
|
4311
4761
|
await self.aprint_response(
|
|
4312
4762
|
input=message,
|
|
4313
4763
|
stream=stream,
|
|
4314
|
-
stream_events=stream_events,
|
|
4315
4764
|
markdown=markdown,
|
|
4316
4765
|
show_time=show_time,
|
|
4317
4766
|
show_step_details=show_step_details,
|
|
@@ -4319,3 +4768,280 @@ class Workflow:
|
|
|
4319
4768
|
session_id=session_id,
|
|
4320
4769
|
**kwargs,
|
|
4321
4770
|
)
|
|
4771
|
+
|
|
4772
|
+
def deep_copy(self, *, update: Optional[Dict[str, Any]] = None) -> "Workflow":
|
|
4773
|
+
"""Create and return a deep copy of this Workflow, optionally updating fields.
|
|
4774
|
+
|
|
4775
|
+
This creates a fresh Workflow instance with isolated mutable state while sharing
|
|
4776
|
+
heavy resources like database connections. Steps containing agents/teams are also
|
|
4777
|
+
deep copied to ensure complete isolation.
|
|
4778
|
+
|
|
4779
|
+
Args:
|
|
4780
|
+
update: Optional dictionary of fields to override in the new Workflow.
|
|
4781
|
+
|
|
4782
|
+
Returns:
|
|
4783
|
+
Workflow: A new Workflow instance with copied state.
|
|
4784
|
+
"""
|
|
4785
|
+
from copy import copy, deepcopy
|
|
4786
|
+
from dataclasses import fields
|
|
4787
|
+
|
|
4788
|
+
from agno.utils.log import log_debug, log_warning
|
|
4789
|
+
|
|
4790
|
+
# Extract the fields to set for the new Workflow
|
|
4791
|
+
fields_for_new_workflow: Dict[str, Any] = {}
|
|
4792
|
+
|
|
4793
|
+
for f in fields(self):
|
|
4794
|
+
# Skip private fields (not part of __init__ signature)
|
|
4795
|
+
if f.name.startswith("_"):
|
|
4796
|
+
continue
|
|
4797
|
+
|
|
4798
|
+
field_value = getattr(self, f.name)
|
|
4799
|
+
if field_value is not None:
|
|
4800
|
+
# Special handling for steps that may contain agents/teams
|
|
4801
|
+
if f.name == "steps" and field_value is not None:
|
|
4802
|
+
fields_for_new_workflow[f.name] = self._deep_copy_steps(field_value)
|
|
4803
|
+
# Special handling for workflow agent
|
|
4804
|
+
elif f.name == "agent" and field_value is not None:
|
|
4805
|
+
if hasattr(field_value, "deep_copy"):
|
|
4806
|
+
fields_for_new_workflow[f.name] = field_value.deep_copy()
|
|
4807
|
+
else:
|
|
4808
|
+
fields_for_new_workflow[f.name] = field_value
|
|
4809
|
+
# Share heavy resources - these maintain connections/pools that shouldn't be duplicated
|
|
4810
|
+
elif f.name == "db":
|
|
4811
|
+
fields_for_new_workflow[f.name] = field_value
|
|
4812
|
+
# For compound types, attempt a deep copy
|
|
4813
|
+
elif isinstance(field_value, (list, dict, set)):
|
|
4814
|
+
try:
|
|
4815
|
+
fields_for_new_workflow[f.name] = deepcopy(field_value)
|
|
4816
|
+
except Exception:
|
|
4817
|
+
try:
|
|
4818
|
+
fields_for_new_workflow[f.name] = copy(field_value)
|
|
4819
|
+
except Exception as e:
|
|
4820
|
+
log_warning(f"Failed to copy field: {f.name} - {e}")
|
|
4821
|
+
fields_for_new_workflow[f.name] = field_value
|
|
4822
|
+
# For pydantic models, attempt a model_copy
|
|
4823
|
+
elif isinstance(field_value, BaseModel):
|
|
4824
|
+
try:
|
|
4825
|
+
fields_for_new_workflow[f.name] = field_value.model_copy(deep=True)
|
|
4826
|
+
except Exception:
|
|
4827
|
+
try:
|
|
4828
|
+
fields_for_new_workflow[f.name] = field_value.model_copy(deep=False)
|
|
4829
|
+
except Exception:
|
|
4830
|
+
fields_for_new_workflow[f.name] = field_value
|
|
4831
|
+
# For other types, attempt a shallow copy
|
|
4832
|
+
else:
|
|
4833
|
+
try:
|
|
4834
|
+
fields_for_new_workflow[f.name] = copy(field_value)
|
|
4835
|
+
except Exception:
|
|
4836
|
+
fields_for_new_workflow[f.name] = field_value
|
|
4837
|
+
|
|
4838
|
+
# Update fields if provided
|
|
4839
|
+
if update:
|
|
4840
|
+
fields_for_new_workflow.update(update)
|
|
4841
|
+
|
|
4842
|
+
# Create a new Workflow
|
|
4843
|
+
try:
|
|
4844
|
+
new_workflow = self.__class__(**fields_for_new_workflow)
|
|
4845
|
+
log_debug(f"Created new {self.__class__.__name__}")
|
|
4846
|
+
return new_workflow
|
|
4847
|
+
except Exception as e:
|
|
4848
|
+
from agno.utils.log import log_error
|
|
4849
|
+
|
|
4850
|
+
log_error(f"Failed to create deep copy of {self.__class__.__name__}: {e}")
|
|
4851
|
+
raise
|
|
4852
|
+
|
|
4853
|
+
def _deep_copy_steps(self, steps: Any) -> Any:
|
|
4854
|
+
"""Deep copy workflow steps, handling nested agents and teams."""
|
|
4855
|
+
from agno.workflow.steps import Steps
|
|
4856
|
+
|
|
4857
|
+
if steps is None:
|
|
4858
|
+
return None
|
|
4859
|
+
|
|
4860
|
+
# Handle Steps container
|
|
4861
|
+
if isinstance(steps, Steps):
|
|
4862
|
+
copied_steps = []
|
|
4863
|
+
if steps.steps:
|
|
4864
|
+
for step in steps.steps:
|
|
4865
|
+
copied_steps.append(self._deep_copy_single_step(step))
|
|
4866
|
+
return Steps(steps=copied_steps)
|
|
4867
|
+
|
|
4868
|
+
# Handle list of steps
|
|
4869
|
+
if isinstance(steps, list):
|
|
4870
|
+
return [self._deep_copy_single_step(step) for step in steps]
|
|
4871
|
+
|
|
4872
|
+
# Handle callable steps
|
|
4873
|
+
if callable(steps):
|
|
4874
|
+
return steps
|
|
4875
|
+
|
|
4876
|
+
# Handle single step
|
|
4877
|
+
return self._deep_copy_single_step(steps)
|
|
4878
|
+
|
|
4879
|
+
def _deep_copy_single_step(self, step: Any) -> Any:
|
|
4880
|
+
"""Deep copy a single step, handling nested agents and teams."""
|
|
4881
|
+
from copy import copy, deepcopy
|
|
4882
|
+
|
|
4883
|
+
from agno.agent import Agent
|
|
4884
|
+
from agno.team import Team
|
|
4885
|
+
from agno.workflow.condition import Condition
|
|
4886
|
+
from agno.workflow.loop import Loop
|
|
4887
|
+
from agno.workflow.parallel import Parallel
|
|
4888
|
+
from agno.workflow.router import Router
|
|
4889
|
+
from agno.workflow.step import Step
|
|
4890
|
+
from agno.workflow.steps import Steps
|
|
4891
|
+
|
|
4892
|
+
# Handle Step with agent or team
|
|
4893
|
+
if isinstance(step, Step):
|
|
4894
|
+
step_kwargs: Dict[str, Any] = {}
|
|
4895
|
+
if step.name:
|
|
4896
|
+
step_kwargs["name"] = step.name
|
|
4897
|
+
if step.description:
|
|
4898
|
+
step_kwargs["description"] = step.description
|
|
4899
|
+
if step.executor:
|
|
4900
|
+
step_kwargs["executor"] = step.executor
|
|
4901
|
+
if step.agent:
|
|
4902
|
+
step_kwargs["agent"] = step.agent.deep_copy() if hasattr(step.agent, "deep_copy") else step.agent
|
|
4903
|
+
if step.team:
|
|
4904
|
+
step_kwargs["team"] = step.team.deep_copy() if hasattr(step.team, "deep_copy") else step.team
|
|
4905
|
+
# Copy Step configuration attributes
|
|
4906
|
+
for attr in [
|
|
4907
|
+
"max_retries",
|
|
4908
|
+
"timeout_seconds",
|
|
4909
|
+
"skip_on_failure",
|
|
4910
|
+
"strict_input_validation",
|
|
4911
|
+
"add_workflow_history",
|
|
4912
|
+
"num_history_runs",
|
|
4913
|
+
]:
|
|
4914
|
+
if hasattr(step, attr):
|
|
4915
|
+
value = getattr(step, attr)
|
|
4916
|
+
# Only include non-default values to avoid overriding defaults
|
|
4917
|
+
if value is not None:
|
|
4918
|
+
step_kwargs[attr] = value
|
|
4919
|
+
return Step(**step_kwargs)
|
|
4920
|
+
|
|
4921
|
+
# Handle direct Agent
|
|
4922
|
+
if isinstance(step, Agent):
|
|
4923
|
+
return step.deep_copy() if hasattr(step, "deep_copy") else step
|
|
4924
|
+
|
|
4925
|
+
# Handle direct Team
|
|
4926
|
+
if isinstance(step, Team):
|
|
4927
|
+
return step.deep_copy() if hasattr(step, "deep_copy") else step
|
|
4928
|
+
|
|
4929
|
+
# Handle Parallel steps
|
|
4930
|
+
if isinstance(step, Parallel):
|
|
4931
|
+
copied_parallel_steps = [self._deep_copy_single_step(s) for s in step.steps] if step.steps else []
|
|
4932
|
+
return Parallel(*copied_parallel_steps, name=step.name, description=step.description)
|
|
4933
|
+
|
|
4934
|
+
# Handle Loop steps
|
|
4935
|
+
if isinstance(step, Loop):
|
|
4936
|
+
copied_loop_steps = [self._deep_copy_single_step(s) for s in step.steps] if step.steps else []
|
|
4937
|
+
return Loop(
|
|
4938
|
+
steps=copied_loop_steps,
|
|
4939
|
+
name=step.name,
|
|
4940
|
+
description=step.description,
|
|
4941
|
+
max_iterations=step.max_iterations,
|
|
4942
|
+
end_condition=step.end_condition,
|
|
4943
|
+
)
|
|
4944
|
+
|
|
4945
|
+
# Handle Condition steps
|
|
4946
|
+
if isinstance(step, Condition):
|
|
4947
|
+
copied_condition_steps = [self._deep_copy_single_step(s) for s in step.steps] if step.steps else []
|
|
4948
|
+
return Condition(
|
|
4949
|
+
evaluator=step.evaluator, steps=copied_condition_steps, name=step.name, description=step.description
|
|
4950
|
+
)
|
|
4951
|
+
|
|
4952
|
+
# Handle Router steps
|
|
4953
|
+
if isinstance(step, Router):
|
|
4954
|
+
copied_choices = [self._deep_copy_single_step(s) for s in step.choices] if step.choices else []
|
|
4955
|
+
return Router(choices=copied_choices, name=step.name, description=step.description, selector=step.selector)
|
|
4956
|
+
|
|
4957
|
+
# Handle Steps container
|
|
4958
|
+
if isinstance(step, Steps):
|
|
4959
|
+
copied_steps = [self._deep_copy_single_step(s) for s in step.steps] if step.steps else []
|
|
4960
|
+
return Steps(name=step.name, description=step.description, steps=copied_steps)
|
|
4961
|
+
|
|
4962
|
+
# For other types, attempt deep copy
|
|
4963
|
+
try:
|
|
4964
|
+
return deepcopy(step)
|
|
4965
|
+
except Exception:
|
|
4966
|
+
try:
|
|
4967
|
+
return copy(step)
|
|
4968
|
+
except Exception:
|
|
4969
|
+
return step
|
|
4970
|
+
|
|
4971
|
+
|
|
4972
|
+
def get_workflow_by_id(
|
|
4973
|
+
db: "BaseDb",
|
|
4974
|
+
id: str,
|
|
4975
|
+
version: Optional[int] = None,
|
|
4976
|
+
label: Optional[str] = None,
|
|
4977
|
+
registry: Optional["Registry"] = None,
|
|
4978
|
+
) -> Optional["Workflow"]:
|
|
4979
|
+
"""
|
|
4980
|
+
Get a Workflow by id from the database (new entities/configs schema).
|
|
4981
|
+
|
|
4982
|
+
Resolution order:
|
|
4983
|
+
- if version is provided: load that version
|
|
4984
|
+
- elif label is provided: load that labeled version
|
|
4985
|
+
- else: load entity.current_version
|
|
4986
|
+
|
|
4987
|
+
Args:
|
|
4988
|
+
db: Database handle.
|
|
4989
|
+
id: Workflow entity_id.
|
|
4990
|
+
version: Optional integer config version.
|
|
4991
|
+
label: Optional version_label.
|
|
4992
|
+
registry: Optional Registry for reconstructing unserializable components.
|
|
4993
|
+
|
|
4994
|
+
Returns:
|
|
4995
|
+
Workflow instance or None.
|
|
4996
|
+
"""
|
|
4997
|
+
try:
|
|
4998
|
+
row = db.get_config(component_id=id, version=version, label=label)
|
|
4999
|
+
if row is None:
|
|
5000
|
+
return None
|
|
5001
|
+
|
|
5002
|
+
cfg = row.get("config") if isinstance(row, dict) else None
|
|
5003
|
+
if cfg is None:
|
|
5004
|
+
raise ValueError(f"Invalid config found for workflow {id}")
|
|
5005
|
+
|
|
5006
|
+
resolved_version = row.get("version")
|
|
5007
|
+
|
|
5008
|
+
# Get links for this workflow version
|
|
5009
|
+
links = db.get_links(component_id=id, version=resolved_version) if resolved_version else []
|
|
5010
|
+
|
|
5011
|
+
workflow = Workflow.from_dict(cfg, db=db, links=links, registry=registry)
|
|
5012
|
+
|
|
5013
|
+
# Ensure workflow.id is set to the component_id
|
|
5014
|
+
workflow.id = id
|
|
5015
|
+
|
|
5016
|
+
return workflow
|
|
5017
|
+
|
|
5018
|
+
except Exception as e:
|
|
5019
|
+
log_error(f"Error loading Workflow {id} from database: {e}")
|
|
5020
|
+
return None
|
|
5021
|
+
|
|
5022
|
+
|
|
5023
|
+
def get_workflows(
|
|
5024
|
+
db: "BaseDb",
|
|
5025
|
+
registry: Optional["Registry"] = None,
|
|
5026
|
+
) -> List["Workflow"]:
|
|
5027
|
+
"""Get all workflows from the database"""
|
|
5028
|
+
workflows: List[Workflow] = []
|
|
5029
|
+
try:
|
|
5030
|
+
components, _ = db.list_components(component_type=ComponentType.WORKFLOW)
|
|
5031
|
+
for component in components:
|
|
5032
|
+
config = db.get_config(component_id=component["component_id"])
|
|
5033
|
+
if config is not None:
|
|
5034
|
+
workflow_config = config.get("config")
|
|
5035
|
+
if workflow_config is not None:
|
|
5036
|
+
component_id = component["component_id"]
|
|
5037
|
+
if "id" not in workflow_config:
|
|
5038
|
+
workflow_config["id"] = component_id
|
|
5039
|
+
workflow = Workflow.from_dict(workflow_config, db=db, registry=registry)
|
|
5040
|
+
# Ensure workflow.id is set to the component_id
|
|
5041
|
+
workflow.id = component_id
|
|
5042
|
+
workflows.append(workflow)
|
|
5043
|
+
return workflows
|
|
5044
|
+
|
|
5045
|
+
except Exception as e:
|
|
5046
|
+
log_error(f"Error loading Workflows from database: {e}")
|
|
5047
|
+
return []
|