agno 2.3.24__py3-none-any.whl → 2.3.26__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. agno/agent/agent.py +357 -28
  2. agno/db/base.py +214 -0
  3. agno/db/dynamo/dynamo.py +47 -0
  4. agno/db/firestore/firestore.py +47 -0
  5. agno/db/gcs_json/gcs_json_db.py +47 -0
  6. agno/db/in_memory/in_memory_db.py +47 -0
  7. agno/db/json/json_db.py +47 -0
  8. agno/db/mongo/async_mongo.py +229 -0
  9. agno/db/mongo/mongo.py +47 -0
  10. agno/db/mongo/schemas.py +16 -0
  11. agno/db/mysql/async_mysql.py +47 -0
  12. agno/db/mysql/mysql.py +47 -0
  13. agno/db/postgres/async_postgres.py +231 -0
  14. agno/db/postgres/postgres.py +239 -0
  15. agno/db/postgres/schemas.py +19 -0
  16. agno/db/redis/redis.py +47 -0
  17. agno/db/singlestore/singlestore.py +47 -0
  18. agno/db/sqlite/async_sqlite.py +242 -0
  19. agno/db/sqlite/schemas.py +18 -0
  20. agno/db/sqlite/sqlite.py +239 -0
  21. agno/db/surrealdb/surrealdb.py +47 -0
  22. agno/knowledge/chunking/code.py +90 -0
  23. agno/knowledge/chunking/document.py +62 -2
  24. agno/knowledge/chunking/strategy.py +14 -0
  25. agno/knowledge/knowledge.py +7 -1
  26. agno/knowledge/reader/arxiv_reader.py +1 -0
  27. agno/knowledge/reader/csv_reader.py +1 -0
  28. agno/knowledge/reader/docx_reader.py +1 -0
  29. agno/knowledge/reader/firecrawl_reader.py +1 -0
  30. agno/knowledge/reader/json_reader.py +1 -0
  31. agno/knowledge/reader/markdown_reader.py +1 -0
  32. agno/knowledge/reader/pdf_reader.py +1 -0
  33. agno/knowledge/reader/pptx_reader.py +1 -0
  34. agno/knowledge/reader/s3_reader.py +1 -0
  35. agno/knowledge/reader/tavily_reader.py +1 -0
  36. agno/knowledge/reader/text_reader.py +1 -0
  37. agno/knowledge/reader/web_search_reader.py +1 -0
  38. agno/knowledge/reader/website_reader.py +1 -0
  39. agno/knowledge/reader/wikipedia_reader.py +1 -0
  40. agno/knowledge/reader/youtube_reader.py +1 -0
  41. agno/knowledge/utils.py +1 -0
  42. agno/learn/__init__.py +65 -0
  43. agno/learn/config.py +463 -0
  44. agno/learn/curate.py +185 -0
  45. agno/learn/machine.py +690 -0
  46. agno/learn/schemas.py +1043 -0
  47. agno/learn/stores/__init__.py +35 -0
  48. agno/learn/stores/entity_memory.py +3275 -0
  49. agno/learn/stores/learned_knowledge.py +1583 -0
  50. agno/learn/stores/protocol.py +117 -0
  51. agno/learn/stores/session_context.py +1217 -0
  52. agno/learn/stores/user_memory.py +1495 -0
  53. agno/learn/stores/user_profile.py +1220 -0
  54. agno/learn/utils.py +209 -0
  55. agno/models/base.py +59 -0
  56. agno/os/routers/agents/router.py +4 -4
  57. agno/os/routers/knowledge/knowledge.py +7 -0
  58. agno/os/routers/teams/router.py +3 -3
  59. agno/os/routers/workflows/router.py +5 -5
  60. agno/os/utils.py +55 -3
  61. agno/team/team.py +131 -0
  62. agno/tools/browserbase.py +78 -6
  63. agno/tools/google_bigquery.py +11 -2
  64. agno/utils/agent.py +30 -1
  65. agno/workflow/workflow.py +198 -0
  66. {agno-2.3.24.dist-info → agno-2.3.26.dist-info}/METADATA +24 -2
  67. {agno-2.3.24.dist-info → agno-2.3.26.dist-info}/RECORD +70 -56
  68. {agno-2.3.24.dist-info → agno-2.3.26.dist-info}/WHEEL +0 -0
  69. {agno-2.3.24.dist-info → agno-2.3.26.dist-info}/licenses/LICENSE +0 -0
  70. {agno-2.3.24.dist-info → agno-2.3.26.dist-info}/top_level.txt +0 -0
agno/learn/utils.py ADDED
@@ -0,0 +1,209 @@
1
+ """
2
+ Learning Machine Utilities
3
+ ==========================
4
+ Helper functions for safe data handling.
5
+
6
+ All functions are designed to never raise exceptions -
7
+ they return None on any failure. This prevents learning
8
+ extraction errors from crashing the main agent.
9
+ """
10
+
11
+ from dataclasses import asdict, fields
12
+ from typing import Any, Dict, List, Optional, Type, TypeVar
13
+
14
+ T = TypeVar("T")
15
+
16
+
17
+ def _safe_get(data: Any, key: str, default: Any = None) -> Any:
18
+ """Safely get a key from dict-like data.
19
+
20
+ Args:
21
+ data: Dict or object with attributes.
22
+ key: Key or attribute name to get.
23
+ default: Value to return if not found.
24
+
25
+ Returns:
26
+ The value, or default if not found.
27
+ """
28
+ if isinstance(data, dict):
29
+ return data.get(key, default)
30
+ return getattr(data, key, default)
31
+
32
+
33
+ def _parse_json(data: Any) -> Optional[Dict]:
34
+ """Parse JSON string to dict, or return dict as-is.
35
+
36
+ Args:
37
+ data: JSON string, dict, or None.
38
+
39
+ Returns:
40
+ Parsed dict, or None if parsing fails.
41
+ """
42
+ if data is None:
43
+ return None
44
+ if isinstance(data, dict):
45
+ return data
46
+ if isinstance(data, str):
47
+ import json
48
+
49
+ try:
50
+ return json.loads(data)
51
+ except Exception:
52
+ return None
53
+ return None
54
+
55
+
56
+ def from_dict_safe(cls: Type[T], data: Any) -> Optional[T]:
57
+ """Safely create a dataclass instance from dict-like data.
58
+
59
+ Works with any dataclass - automatically handles subclass fields.
60
+ Never raises - returns None on any failure.
61
+
62
+ Args:
63
+ cls: The dataclass type to instantiate.
64
+ data: Dict, JSON string, or existing instance.
65
+
66
+ Returns:
67
+ Instance of cls, or None if parsing fails.
68
+
69
+ Example:
70
+ >>> profile = from_dict_safe(UserProfile, {"user_id": "123"})
71
+ >>> profile.user_id
72
+ '123'
73
+ """
74
+ if data is None:
75
+ return None
76
+
77
+ # Already the right type
78
+ if isinstance(data, cls):
79
+ return data
80
+
81
+ try:
82
+ # Parse JSON string if needed
83
+ parsed = _parse_json(data)
84
+ if parsed is None:
85
+ return None
86
+
87
+ # Get valid field names for this class
88
+ field_names = {f.name for f in fields(cls)} # type: ignore
89
+
90
+ # Filter to only valid fields
91
+ kwargs = {k: v for k, v in parsed.items() if k in field_names}
92
+
93
+ return cls(**kwargs)
94
+ except Exception:
95
+ return None
96
+
97
+
98
+ def print_panel(
99
+ title: str,
100
+ subtitle: str,
101
+ lines: List[str],
102
+ *,
103
+ empty_message: str = "No data",
104
+ raw_data: Any = None,
105
+ raw: bool = False,
106
+ ) -> None:
107
+ """Print formatted panel output for learning stores.
108
+
109
+ Uses rich library for formatted output with a bordered panel.
110
+ Falls back to pprint when raw=True or rich is unavailable.
111
+
112
+ Args:
113
+ title: Panel title (e.g., "User Profile", "Session Context")
114
+ subtitle: Panel subtitle (e.g., user_id, session_id)
115
+ lines: Content lines to display inside the panel
116
+ empty_message: Message shown when lines is empty
117
+ raw_data: Object to pprint when raw=True
118
+ raw: If True, use pprint instead of formatted panel
119
+
120
+ Example:
121
+ >>> print_panel(
122
+ ... title="User Profile",
123
+ ... subtitle="alice@example.com",
124
+ ... lines=["Name: Alice", "Memories:", " [abc123] Loves Python"],
125
+ ... raw_data=profile,
126
+ ... )
127
+ ╭──────────────── User Profile ─────────────────╮
128
+ │ Name: Alice │
129
+ │ Memories: │
130
+ │ [abc123] Loves Python │
131
+ ╰─────────────── alice@example.com ─────────────╯
132
+ """
133
+ if raw and raw_data is not None:
134
+ from pprint import pprint
135
+
136
+ pprint(to_dict_safe(raw_data) or raw_data)
137
+ return
138
+
139
+ try:
140
+ from rich.console import Console
141
+ from rich.panel import Panel
142
+
143
+ console = Console()
144
+
145
+ if not lines:
146
+ content = f"[dim]{empty_message}[/dim]"
147
+ else:
148
+ content = "\n".join(lines)
149
+
150
+ panel = Panel(
151
+ content,
152
+ title=f"[bold]{title}[/bold]",
153
+ subtitle=f"[dim]{subtitle}[/dim]",
154
+ border_style="blue",
155
+ )
156
+ console.print(panel)
157
+
158
+ except ImportError:
159
+ # Fallback if rich not installed
160
+ from pprint import pprint
161
+
162
+ print(f"=== {title} ({subtitle}) ===")
163
+ if not lines:
164
+ print(f" {empty_message}")
165
+ else:
166
+ for line in lines:
167
+ print(f" {line}")
168
+ print()
169
+
170
+
171
+ def to_dict_safe(obj: Any) -> Optional[Dict[str, Any]]:
172
+ """Safely convert a dataclass to dict.
173
+
174
+ Works with any dataclass. Never raises - returns None on failure.
175
+
176
+ Args:
177
+ obj: Dataclass instance to convert.
178
+
179
+ Returns:
180
+ Dict representation, or None if conversion fails.
181
+
182
+ Example:
183
+ >>> profile = UserProfile(user_id="123")
184
+ >>> to_dict_safe(profile)
185
+ {'user_id': '123', 'name': None, ...}
186
+ """
187
+ if obj is None:
188
+ return None
189
+
190
+ try:
191
+ # Already a dict
192
+ if isinstance(obj, dict):
193
+ return obj
194
+
195
+ # Has to_dict method
196
+ if hasattr(obj, "to_dict"):
197
+ return obj.to_dict()
198
+
199
+ # Is a dataclass
200
+ if hasattr(obj, "__dataclass_fields__"):
201
+ return asdict(obj)
202
+
203
+ # Has __dict__
204
+ if hasattr(obj, "__dict__"):
205
+ return dict(obj.__dict__)
206
+
207
+ return None
208
+ except Exception:
209
+ return None
agno/models/base.py CHANGED
@@ -174,6 +174,49 @@ class Model(ABC):
174
174
  return self.delay_between_retries * (2**attempt)
175
175
  return self.delay_between_retries
176
176
 
177
+ def _is_retryable_error(self, error: ModelProviderError) -> bool:
178
+ """Determine if an error is worth retrying.
179
+
180
+ Non-retryable errors include:
181
+ - Client errors (400, 401, 403, 413, 422) that won't change on retry
182
+ - Context window/token limit exceeded errors
183
+ - Payload too large errors
184
+
185
+ Retryable errors include:
186
+ - Rate limit errors (429)
187
+ - Server errors (500, 502, 503, 504)
188
+
189
+ Args:
190
+ error: The ModelProviderError to evaluate.
191
+
192
+ Returns:
193
+ True if the error is transient and worth retrying, False otherwise.
194
+ """
195
+ # Non-retryable status codes (client errors that won't change)
196
+ non_retryable_codes = {400, 401, 403, 404, 413, 422}
197
+ if error.status_code in non_retryable_codes:
198
+ return False
199
+
200
+ # Non-retryable error message patterns (context/token limits)
201
+ non_retryable_patterns = [
202
+ "context_length_exceeded",
203
+ "context window",
204
+ "maximum context length",
205
+ "token limit",
206
+ "max_tokens",
207
+ "too many tokens",
208
+ "payload too large",
209
+ "content_too_large",
210
+ "request too large",
211
+ "input too long",
212
+ "exceeds the model",
213
+ ]
214
+ error_msg = str(error.message).lower()
215
+ if any(pattern in error_msg for pattern in non_retryable_patterns):
216
+ return False
217
+
218
+ return True
219
+
177
220
  def _invoke_with_retry(self, **kwargs) -> ModelResponse:
178
221
  """
179
222
  Invoke the model with retry logic for ModelProviderError.
@@ -189,6 +232,10 @@ class Model(ABC):
189
232
  return self.invoke(**kwargs)
190
233
  except ModelProviderError as e:
191
234
  last_exception = e
235
+ # Check if error is non-retryable
236
+ if not self._is_retryable_error(e):
237
+ log_error(f"Non-retryable model provider error: {e}")
238
+ raise
192
239
  if attempt < self.retries:
193
240
  delay = self._get_retry_delay(attempt)
194
241
  log_warning(
@@ -232,6 +279,10 @@ class Model(ABC):
232
279
  return await self.ainvoke(**kwargs)
233
280
  except ModelProviderError as e:
234
281
  last_exception = e
282
+ # Check if error is non-retryable
283
+ if not self._is_retryable_error(e):
284
+ log_error(f"Non-retryable model provider error: {e}")
285
+ raise
235
286
  if attempt < self.retries:
236
287
  delay = self._get_retry_delay(attempt)
237
288
  log_warning(
@@ -277,6 +328,10 @@ class Model(ABC):
277
328
  return # Success, exit the retry loop
278
329
  except ModelProviderError as e:
279
330
  last_exception = e
331
+ # Check if error is non-retryable (e.g., context window exceeded, auth errors)
332
+ if not self._is_retryable_error(e):
333
+ log_error(f"Non-retryable model provider error: {e}")
334
+ raise
280
335
  if attempt < self.retries:
281
336
  delay = self._get_retry_delay(attempt)
282
337
  log_warning(
@@ -325,6 +380,10 @@ class Model(ABC):
325
380
  return # Success, exit the retry loop
326
381
  except ModelProviderError as e:
327
382
  last_exception = e
383
+ # Check if error is non-retryable
384
+ if not self._is_retryable_error(e):
385
+ log_error(f"Non-retryable model provider error: {e}")
386
+ raise
328
387
  if attempt < self.retries:
329
388
  delay = self._get_retry_delay(attempt)
330
389
  log_warning(
@@ -243,7 +243,7 @@ def get_agent_router(
243
243
  log_warning("Metadata parameter passed in both request state and kwargs, using request state")
244
244
  kwargs["metadata"] = metadata
245
245
 
246
- agent = get_agent_by_id(agent_id, os.agents)
246
+ agent = get_agent_by_id(agent_id, os.agents, create_fresh=True)
247
247
  if agent is None:
248
248
  raise HTTPException(status_code=404, detail="Agent not found")
249
249
 
@@ -405,7 +405,7 @@ def get_agent_router(
405
405
  agent_id: str,
406
406
  run_id: str,
407
407
  ):
408
- agent = get_agent_by_id(agent_id, os.agents)
408
+ agent = get_agent_by_id(agent_id, os.agents, create_fresh=True)
409
409
  if agent is None:
410
410
  raise HTTPException(status_code=404, detail="Agent not found")
411
411
 
@@ -464,7 +464,7 @@ def get_agent_router(
464
464
  except json.JSONDecodeError:
465
465
  raise HTTPException(status_code=400, detail="Invalid JSON in tools field")
466
466
 
467
- agent = get_agent_by_id(agent_id, os.agents)
467
+ agent = get_agent_by_id(agent_id, os.agents, create_fresh=True)
468
468
  if agent is None:
469
469
  raise HTTPException(status_code=404, detail="Agent not found")
470
470
 
@@ -630,7 +630,7 @@ def get_agent_router(
630
630
  dependencies=[Depends(require_resource_access("agents", "read", "agent_id"))],
631
631
  )
632
632
  async def get_agent(agent_id: str, request: Request) -> AgentResponse:
633
- agent = get_agent_by_id(agent_id, os.agents)
633
+ agent = get_agent_by_id(agent_id, os.agents, create_fresh=True)
634
634
  if agent is None:
635
635
  raise HTTPException(status_code=404, detail="Agent not found")
636
636
 
@@ -860,6 +860,7 @@ def attach_routes(router: APIRouter, knowledge_instances: List[Union[Knowledge,
860
860
  "name": "TextReader",
861
861
  "description": "Reads text files",
862
862
  "chunkers": [
863
+ "CodeChunker",
863
864
  "FixedSizeChunker",
864
865
  "AgenticChunker",
865
866
  "DocumentChunker",
@@ -898,6 +899,12 @@ def attach_routes(router: APIRouter, knowledge_instances: List[Union[Knowledge,
898
899
  "description": "Chunking strategy that uses an LLM to determine natural breakpoints in the text",
899
900
  "metadata": {"chunk_size": 5000},
900
901
  },
902
+ "CodeChunker": {
903
+ "key": "CodeChunker",
904
+ "name": "CodeChunker",
905
+ "description": "The CodeChunker splits code into chunks based on its structure, leveraging Abstract Syntax Trees (ASTs) to create contextually relevant segments",
906
+ "metadata": {"chunk_size": 2048},
907
+ },
901
908
  "DocumentChunker": {
902
909
  "key": "DocumentChunker",
903
910
  "name": "DocumentChunker",
@@ -194,7 +194,7 @@ def get_team_router(
194
194
 
195
195
  logger.debug(f"Creating team run: {message=} {session_id=} {monitor=} {user_id=} {team_id=} {files=} {kwargs=}")
196
196
 
197
- team = get_team_by_id(team_id, os.teams)
197
+ team = get_team_by_id(team_id, os.teams, create_fresh=True)
198
198
  if team is None:
199
199
  raise HTTPException(status_code=404, detail="Team not found")
200
200
 
@@ -321,7 +321,7 @@ def get_team_router(
321
321
  team_id: str,
322
322
  run_id: str,
323
323
  ):
324
- team = get_team_by_id(team_id, os.teams)
324
+ team = get_team_by_id(team_id, os.teams, create_fresh=True)
325
325
  if team is None:
326
326
  raise HTTPException(status_code=404, detail="Team not found")
327
327
 
@@ -526,7 +526,7 @@ def get_team_router(
526
526
  dependencies=[Depends(require_resource_access("teams", "read", "team_id"))],
527
527
  )
528
528
  async def get_team(team_id: str, request: Request) -> TeamResponse:
529
- team = get_team_by_id(team_id, os.teams)
529
+ team = get_team_by_id(team_id, os.teams, create_fresh=True)
530
530
  if team is None:
531
531
  raise HTTPException(status_code=404, detail="Team not found")
532
532
 
@@ -61,7 +61,7 @@ async def handle_workflow_via_websocket(websocket: WebSocket, message: dict, os:
61
61
  return
62
62
 
63
63
  # Get workflow from OS
64
- workflow = get_workflow_by_id(workflow_id, os.workflows)
64
+ workflow = get_workflow_by_id(workflow_id, os.workflows, create_fresh=True)
65
65
  if not workflow:
66
66
  await websocket.send_text(json.dumps({"event": "error", "error": f"Workflow {workflow_id} not found"}))
67
67
  return
@@ -141,7 +141,7 @@ async def handle_workflow_subscription(websocket: WebSocket, message: dict, os:
141
141
  if buffer_status is None:
142
142
  # Run not in buffer - check database
143
143
  if workflow_id and session_id:
144
- workflow = get_workflow_by_id(workflow_id, os.workflows)
144
+ workflow = get_workflow_by_id(workflow_id, os.workflows, create_fresh=True)
145
145
  if workflow and isinstance(workflow, Workflow):
146
146
  workflow_run = await workflow.aget_run_output(run_id, session_id)
147
147
 
@@ -571,7 +571,7 @@ def get_workflow_router(
571
571
  dependencies=[Depends(require_resource_access("workflows", "read", "workflow_id"))],
572
572
  )
573
573
  async def get_workflow(workflow_id: str, request: Request) -> WorkflowResponse:
574
- workflow = get_workflow_by_id(workflow_id, os.workflows)
574
+ workflow = get_workflow_by_id(workflow_id, os.workflows, create_fresh=True)
575
575
  if workflow is None:
576
576
  raise HTTPException(status_code=404, detail="Workflow not found")
577
577
  if isinstance(workflow, RemoteWorkflow):
@@ -650,7 +650,7 @@ def get_workflow_router(
650
650
  kwargs["metadata"] = metadata
651
651
 
652
652
  # Retrieve the workflow by ID
653
- workflow = get_workflow_by_id(workflow_id, os.workflows)
653
+ workflow = get_workflow_by_id(workflow_id, os.workflows, create_fresh=True)
654
654
  if workflow is None:
655
655
  raise HTTPException(status_code=404, detail="Workflow not found")
656
656
 
@@ -716,7 +716,7 @@ def get_workflow_router(
716
716
  dependencies=[Depends(require_resource_access("workflows", "run", "workflow_id"))],
717
717
  )
718
718
  async def cancel_workflow_run(workflow_id: str, run_id: str):
719
- workflow = get_workflow_by_id(workflow_id, os.workflows)
719
+ workflow = get_workflow_by_id(workflow_id, os.workflows, create_fresh=True)
720
720
 
721
721
  if workflow is None:
722
722
  raise HTTPException(status_code=404, detail="Workflow not found")
agno/os/utils.py CHANGED
@@ -414,37 +414,89 @@ def extract_format(file: UploadFile) -> Optional[str]:
414
414
 
415
415
 
416
416
  def get_agent_by_id(
417
- agent_id: str, agents: Optional[List[Union[Agent, RemoteAgent]]] = None
417
+ agent_id: str,
418
+ agents: Optional[List[Union[Agent, RemoteAgent]]] = None,
419
+ create_fresh: bool = False,
418
420
  ) -> Optional[Union[Agent, RemoteAgent]]:
421
+ """Get an agent by ID, optionally creating a fresh instance for request isolation.
422
+
423
+ When create_fresh=True, creates a new agent instance using deep_copy() to prevent
424
+ state contamination between concurrent requests. The new instance shares heavy
425
+ resources (db, model, MCP tools) but has isolated mutable state.
426
+
427
+ Args:
428
+ agent_id: The agent ID to look up
429
+ agents: List of agents to search
430
+ create_fresh: If True, creates a new instance using deep_copy()
431
+
432
+ Returns:
433
+ The agent instance (shared or fresh copy based on create_fresh)
434
+ """
419
435
  if agent_id is None or agents is None:
420
436
  return None
421
437
 
422
438
  for agent in agents:
423
439
  if agent.id == agent_id:
440
+ if create_fresh and isinstance(agent, Agent):
441
+ return agent.deep_copy()
424
442
  return agent
425
443
  return None
426
444
 
427
445
 
428
446
  def get_team_by_id(
429
- team_id: str, teams: Optional[List[Union[Team, RemoteTeam]]] = None
447
+ team_id: str,
448
+ teams: Optional[List[Union[Team, RemoteTeam]]] = None,
449
+ create_fresh: bool = False,
430
450
  ) -> Optional[Union[Team, RemoteTeam]]:
451
+ """Get a team by ID, optionally creating a fresh instance for request isolation.
452
+
453
+ When create_fresh=True, creates a new team instance using deep_copy() to prevent
454
+ state contamination between concurrent requests. Member agents are also deep copied.
455
+
456
+ Args:
457
+ team_id: The team ID to look up
458
+ teams: List of teams to search
459
+ create_fresh: If True, creates a new instance using deep_copy()
460
+
461
+ Returns:
462
+ The team instance (shared or fresh copy based on create_fresh)
463
+ """
431
464
  if team_id is None or teams is None:
432
465
  return None
433
466
 
434
467
  for team in teams:
435
468
  if team.id == team_id:
469
+ if create_fresh and isinstance(team, Team):
470
+ return team.deep_copy()
436
471
  return team
437
472
  return None
438
473
 
439
474
 
440
475
  def get_workflow_by_id(
441
- workflow_id: str, workflows: Optional[List[Union[Workflow, RemoteWorkflow]]] = None
476
+ workflow_id: str,
477
+ workflows: Optional[List[Union[Workflow, RemoteWorkflow]]] = None,
478
+ create_fresh: bool = False,
442
479
  ) -> Optional[Union[Workflow, RemoteWorkflow]]:
480
+ """Get a workflow by ID, optionally creating a fresh instance for request isolation.
481
+
482
+ When create_fresh=True, creates a new workflow instance using deep_copy() to prevent
483
+ state contamination between concurrent requests. Steps containing agents/teams are also deep copied.
484
+
485
+ Args:
486
+ workflow_id: The workflow ID to look up
487
+ workflows: List of workflows to search
488
+ create_fresh: If True, creates a new instance using deep_copy()
489
+
490
+ Returns:
491
+ The workflow instance (shared or fresh copy based on create_fresh)
492
+ """
443
493
  if workflow_id is None or workflows is None:
444
494
  return None
445
495
 
446
496
  for workflow in workflows:
447
497
  if workflow.id == workflow_id:
498
+ if create_fresh and isinstance(workflow, Workflow):
499
+ return workflow.deep_copy()
448
500
  return workflow
449
501
  return None
450
502
 
agno/team/team.py CHANGED
@@ -9392,3 +9392,134 @@ class Team:
9392
9392
  )
9393
9393
  except Exception as e:
9394
9394
  log_debug(f"Could not create Team run telemetry event: {e}")
9395
+
9396
+ def deep_copy(self, *, update: Optional[Dict[str, Any]] = None) -> "Team":
9397
+ """Create and return a deep copy of this Team, optionally updating fields.
9398
+
9399
+ This creates a fresh Team instance with isolated mutable state while sharing
9400
+ heavy resources like database connections and models. Member agents are also
9401
+ deep copied to ensure complete isolation.
9402
+
9403
+ Args:
9404
+ update: Optional dictionary of fields to override in the new Team.
9405
+
9406
+ Returns:
9407
+ Team: A new Team instance with copied state.
9408
+ """
9409
+ from dataclasses import fields
9410
+
9411
+ # Extract the fields to set for the new Team
9412
+ fields_for_new_team: Dict[str, Any] = {}
9413
+
9414
+ for f in fields(self):
9415
+ # Skip private fields (not part of __init__ signature)
9416
+ if f.name.startswith("_"):
9417
+ continue
9418
+
9419
+ field_value = getattr(self, f.name)
9420
+ if field_value is not None:
9421
+ try:
9422
+ fields_for_new_team[f.name] = self._deep_copy_field(f.name, field_value)
9423
+ except Exception as e:
9424
+ log_warning(f"Failed to deep copy field '{f.name}': {e}. Using original value.")
9425
+ fields_for_new_team[f.name] = field_value
9426
+
9427
+ # Update fields if provided
9428
+ if update:
9429
+ fields_for_new_team.update(update)
9430
+
9431
+ # Create a new Team
9432
+ try:
9433
+ new_team = self.__class__(**fields_for_new_team)
9434
+ log_debug(f"Created new {self.__class__.__name__}")
9435
+ return new_team
9436
+ except Exception as e:
9437
+ log_error(f"Failed to create deep copy of {self.__class__.__name__}: {e}")
9438
+ raise
9439
+
9440
+ def _deep_copy_field(self, field_name: str, field_value: Any) -> Any:
9441
+ """Helper method to deep copy a field based on its type."""
9442
+ from copy import copy, deepcopy
9443
+
9444
+ # For members, deep copy each agent/team
9445
+ if field_name == "members" and field_value is not None:
9446
+ copied_members = []
9447
+ for member in field_value:
9448
+ if hasattr(member, "deep_copy"):
9449
+ copied_members.append(member.deep_copy())
9450
+ else:
9451
+ copied_members.append(member)
9452
+ return copied_members
9453
+
9454
+ # For tools, share MCP tools but copy others
9455
+ if field_name == "tools" and field_value is not None:
9456
+ try:
9457
+ copied_tools = []
9458
+ for tool in field_value:
9459
+ try:
9460
+ # Share MCP tools (they maintain server connections)
9461
+ is_mcp_tool = hasattr(type(tool), "__mro__") and any(
9462
+ c.__name__ in ["MCPTools", "MultiMCPTools"] for c in type(tool).__mro__
9463
+ )
9464
+ if is_mcp_tool:
9465
+ copied_tools.append(tool)
9466
+ else:
9467
+ try:
9468
+ copied_tools.append(deepcopy(tool))
9469
+ except Exception:
9470
+ # Tool can't be deep copied, share by reference
9471
+ copied_tools.append(tool)
9472
+ except Exception:
9473
+ # MCP detection failed, share tool by reference to be safe
9474
+ copied_tools.append(tool)
9475
+ return copied_tools
9476
+ except Exception as e:
9477
+ # If entire tools processing fails, log and return original list
9478
+ log_warning(f"Failed to process tools for deep copy: {e}")
9479
+ return field_value
9480
+
9481
+ # Share heavy resources - these maintain connections/pools that shouldn't be duplicated
9482
+ if field_name in (
9483
+ "db",
9484
+ "model",
9485
+ "reasoning_model",
9486
+ "knowledge",
9487
+ "memory_manager",
9488
+ "parser_model",
9489
+ "output_model",
9490
+ "session_summary_manager",
9491
+ "culture_manager",
9492
+ "compression_manager",
9493
+ "learning",
9494
+ "skills",
9495
+ ):
9496
+ return field_value
9497
+
9498
+ # For compound types, attempt a deep copy
9499
+ if isinstance(field_value, (list, dict, set)):
9500
+ try:
9501
+ return deepcopy(field_value)
9502
+ except Exception:
9503
+ try:
9504
+ return copy(field_value)
9505
+ except Exception as e:
9506
+ log_warning(f"Failed to copy field: {field_name} - {e}")
9507
+ return field_value
9508
+
9509
+ # For pydantic models, attempt a model_copy
9510
+ if isinstance(field_value, BaseModel):
9511
+ try:
9512
+ return field_value.model_copy(deep=True)
9513
+ except Exception:
9514
+ try:
9515
+ return field_value.model_copy(deep=False)
9516
+ except Exception as e:
9517
+ log_warning(f"Failed to copy field: {field_name} - {e}")
9518
+ return field_value
9519
+
9520
+ # For other types, attempt a shallow copy first
9521
+ try:
9522
+ return copy(field_value)
9523
+ except Exception:
9524
+ # If copy fails, return as is
9525
+ return field_value