agno 2.2.10__py3-none-any.whl → 2.2.12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. agno/agent/agent.py +75 -48
  2. agno/db/dynamo/utils.py +1 -1
  3. agno/db/firestore/utils.py +1 -1
  4. agno/db/gcs_json/utils.py +1 -1
  5. agno/db/in_memory/utils.py +1 -1
  6. agno/db/json/utils.py +1 -1
  7. agno/db/mongo/utils.py +3 -3
  8. agno/db/mysql/mysql.py +1 -1
  9. agno/db/mysql/utils.py +1 -1
  10. agno/db/postgres/utils.py +1 -1
  11. agno/db/redis/utils.py +1 -1
  12. agno/db/singlestore/singlestore.py +1 -1
  13. agno/db/singlestore/utils.py +1 -1
  14. agno/db/sqlite/async_sqlite.py +1 -1
  15. agno/db/sqlite/sqlite.py +1 -1
  16. agno/db/sqlite/utils.py +1 -1
  17. agno/filters.py +354 -0
  18. agno/knowledge/chunking/agentic.py +8 -9
  19. agno/knowledge/chunking/strategy.py +59 -15
  20. agno/knowledge/embedder/sentence_transformer.py +6 -2
  21. agno/knowledge/knowledge.py +43 -22
  22. agno/knowledge/reader/base.py +6 -2
  23. agno/knowledge/utils.py +20 -0
  24. agno/models/anthropic/claude.py +45 -9
  25. agno/models/base.py +4 -0
  26. agno/os/app.py +23 -7
  27. agno/os/interfaces/slack/router.py +53 -33
  28. agno/os/interfaces/slack/slack.py +9 -1
  29. agno/os/router.py +25 -1
  30. agno/os/routers/health.py +5 -3
  31. agno/os/routers/knowledge/knowledge.py +43 -17
  32. agno/os/routers/knowledge/schemas.py +4 -3
  33. agno/run/agent.py +11 -1
  34. agno/run/base.py +3 -2
  35. agno/session/agent.py +10 -5
  36. agno/team/team.py +57 -18
  37. agno/tools/file_generation.py +4 -4
  38. agno/tools/gmail.py +179 -0
  39. agno/tools/parallel.py +314 -0
  40. agno/utils/agent.py +22 -17
  41. agno/utils/gemini.py +15 -5
  42. agno/utils/knowledge.py +12 -5
  43. agno/utils/log.py +1 -0
  44. agno/utils/models/claude.py +2 -1
  45. agno/utils/print_response/agent.py +5 -4
  46. agno/utils/print_response/team.py +5 -4
  47. agno/vectordb/base.py +2 -4
  48. agno/vectordb/cassandra/cassandra.py +12 -5
  49. agno/vectordb/chroma/chromadb.py +10 -4
  50. agno/vectordb/clickhouse/clickhousedb.py +12 -4
  51. agno/vectordb/couchbase/couchbase.py +12 -3
  52. agno/vectordb/lancedb/lance_db.py +69 -144
  53. agno/vectordb/langchaindb/langchaindb.py +13 -4
  54. agno/vectordb/lightrag/lightrag.py +8 -3
  55. agno/vectordb/llamaindex/llamaindexdb.py +10 -4
  56. agno/vectordb/milvus/milvus.py +16 -5
  57. agno/vectordb/mongodb/mongodb.py +14 -3
  58. agno/vectordb/pgvector/pgvector.py +73 -15
  59. agno/vectordb/pineconedb/pineconedb.py +6 -2
  60. agno/vectordb/qdrant/qdrant.py +25 -13
  61. agno/vectordb/redis/redisdb.py +37 -30
  62. agno/vectordb/singlestore/singlestore.py +9 -4
  63. agno/vectordb/surrealdb/surrealdb.py +13 -3
  64. agno/vectordb/upstashdb/upstashdb.py +8 -5
  65. agno/vectordb/weaviate/weaviate.py +29 -12
  66. agno/workflow/step.py +3 -2
  67. agno/workflow/types.py +20 -1
  68. agno/workflow/workflow.py +103 -14
  69. {agno-2.2.10.dist-info → agno-2.2.12.dist-info}/METADATA +4 -1
  70. {agno-2.2.10.dist-info → agno-2.2.12.dist-info}/RECORD +73 -71
  71. {agno-2.2.10.dist-info → agno-2.2.12.dist-info}/WHEEL +0 -0
  72. {agno-2.2.10.dist-info → agno-2.2.12.dist-info}/licenses/LICENSE +0 -0
  73. {agno-2.2.10.dist-info → agno-2.2.12.dist-info}/top_level.txt +0 -0
agno/tools/parallel.py ADDED
@@ -0,0 +1,314 @@
1
+ import json
2
+ from os import getenv
3
+ from typing import Any, Dict, List, Optional
4
+
5
+ from agno.tools import Toolkit
6
+ from agno.utils.log import log_error
7
+
8
+ try:
9
+ from parallel import Parallel as ParallelClient
10
+ except ImportError:
11
+ raise ImportError("`parallel-web` not installed. Please install using `pip install parallel-web`")
12
+
13
+
14
+ class CustomJSONEncoder(json.JSONEncoder):
15
+ """Custom JSON encoder that handles non-serializable types by converting them to strings."""
16
+
17
+ def default(self, obj):
18
+ try:
19
+ return super().default(obj)
20
+ except TypeError:
21
+ return str(obj)
22
+
23
+
24
+ class ParallelTools(Toolkit):
25
+ """
26
+ ParallelTools provides access to Parallel's web search and extraction APIs.
27
+
28
+ Parallel offers powerful APIs optimized for AI agents:
29
+ - Search API: AI-optimized web search that returns relevant excerpts tailored for LLMs
30
+ - Extract API: Extract content from specific URLs in clean markdown format, handling JavaScript-heavy pages and PDFs
31
+
32
+ Args:
33
+ api_key (Optional[str]): Parallel API key. If not provided, will use PARALLEL_API_KEY environment variable.
34
+ enable_search (bool): Enable Search API functionality. Default is True.
35
+ enable_extract (bool): Enable Extract API functionality. Default is True.
36
+ all (bool): Enable all tools. Overrides individual flags when True. Default is False.
37
+ max_results (int): Default maximum number of results for search operations. Default is 10.
38
+ max_chars_per_result (int): Default maximum characters per result for search operations. Default is 10000.
39
+ beta_version (str): Beta API version header. Default is "search-extract-2025-10-10".
40
+ mode (Optional[str]): Default search mode. Options: "one-shot" or "agentic". Default is None.
41
+ include_domains (Optional[List[str]]): Default domains to restrict results to. Default is None.
42
+ exclude_domains (Optional[List[str]]): Default domains to exclude from results. Default is None.
43
+ max_age_seconds (Optional[int]): Default cache age threshold (minimum 600). Default is None.
44
+ timeout_seconds (Optional[float]): Default timeout for content retrieval. Default is None.
45
+ disable_cache_fallback (Optional[bool]): Default cache fallback behavior. Default is None.
46
+ """
47
+
48
+ def __init__(
49
+ self,
50
+ api_key: Optional[str] = None,
51
+ enable_search: bool = True,
52
+ enable_extract: bool = True,
53
+ all: bool = False,
54
+ max_results: int = 10,
55
+ max_chars_per_result: int = 10000,
56
+ beta_version: str = "search-extract-2025-10-10",
57
+ mode: Optional[str] = None,
58
+ include_domains: Optional[List[str]] = None,
59
+ exclude_domains: Optional[List[str]] = None,
60
+ max_age_seconds: Optional[int] = None,
61
+ timeout_seconds: Optional[float] = None,
62
+ disable_cache_fallback: Optional[bool] = None,
63
+ **kwargs,
64
+ ):
65
+ self.api_key: Optional[str] = api_key or getenv("PARALLEL_API_KEY")
66
+ if not self.api_key:
67
+ log_error("PARALLEL_API_KEY not set. Please set the PARALLEL_API_KEY environment variable.")
68
+
69
+ self.max_results = max_results
70
+ self.max_chars_per_result = max_chars_per_result
71
+ self.beta_version = beta_version
72
+ self.mode = mode
73
+ self.include_domains = include_domains
74
+ self.exclude_domains = exclude_domains
75
+ self.max_age_seconds = max_age_seconds
76
+ self.timeout_seconds = timeout_seconds
77
+ self.disable_cache_fallback = disable_cache_fallback
78
+
79
+ self.parallel_client = ParallelClient(
80
+ api_key=self.api_key, default_headers={"parallel-beta": self.beta_version}
81
+ )
82
+
83
+ tools: List[Any] = []
84
+ if all or enable_search:
85
+ tools.append(self.parallel_search)
86
+ if all or enable_extract:
87
+ tools.append(self.parallel_extract)
88
+
89
+ super().__init__(name="parallel_tools", tools=tools, **kwargs)
90
+
91
+ def parallel_search(
92
+ self,
93
+ objective: Optional[str] = None,
94
+ search_queries: Optional[List[str]] = None,
95
+ max_results: Optional[int] = None,
96
+ max_chars_per_result: Optional[int] = None,
97
+ ) -> str:
98
+ """Use this function to search the web using Parallel's Search API with a natural language objective.
99
+ You must provide at least one of objective or search_queries.
100
+
101
+ Args:
102
+ objective (Optional[str]): Natural-language description of what the web search is trying to find.
103
+ search_queries (Optional[List[str]]): Traditional keyword queries with optional search operators.
104
+ max_results (Optional[int]): Upper bound on results returned. Overrides constructor default.
105
+ max_chars_per_result (Optional[int]): Upper bound on total characters per url for excerpts.
106
+
107
+ Returns:
108
+ str: A JSON formatted string containing the search results with URLs, titles, publish dates, and relevant excerpts.
109
+ """
110
+ try:
111
+ if not objective and not search_queries:
112
+ return json.dumps({"error": "Please provide at least one of: objective or search_queries"}, indent=2)
113
+
114
+ # Use instance defaults if not provided
115
+ final_max_results = max_results if max_results is not None else self.max_results
116
+
117
+ search_params: Dict[str, Any] = {
118
+ "max_results": final_max_results,
119
+ }
120
+
121
+ # Add objective if provided
122
+ if objective:
123
+ search_params["objective"] = objective
124
+
125
+ # Add search_queries if provided
126
+ if search_queries:
127
+ search_params["search_queries"] = search_queries
128
+
129
+ # Add mode from constructor default
130
+ if self.mode:
131
+ search_params["mode"] = self.mode
132
+
133
+ # Add excerpts configuration
134
+ excerpts_config: Dict[str, Any] = {}
135
+ final_max_chars = max_chars_per_result if max_chars_per_result is not None else self.max_chars_per_result
136
+ if final_max_chars is not None:
137
+ excerpts_config["max_chars_per_result"] = final_max_chars
138
+
139
+ if excerpts_config:
140
+ search_params["excerpts"] = excerpts_config
141
+
142
+ # Add source_policy from constructor defaults
143
+ source_policy: Dict[str, Any] = {}
144
+ if self.include_domains:
145
+ source_policy["include_domains"] = self.include_domains
146
+ if self.exclude_domains:
147
+ source_policy["exclude_domains"] = self.exclude_domains
148
+
149
+ if source_policy:
150
+ search_params["source_policy"] = source_policy
151
+
152
+ # Add fetch_policy from constructor defaults
153
+ fetch_policy: Dict[str, Any] = {}
154
+ if self.max_age_seconds is not None:
155
+ fetch_policy["max_age_seconds"] = self.max_age_seconds
156
+ if self.timeout_seconds is not None:
157
+ fetch_policy["timeout_seconds"] = self.timeout_seconds
158
+ if self.disable_cache_fallback is not None:
159
+ fetch_policy["disable_cache_fallback"] = self.disable_cache_fallback
160
+
161
+ if fetch_policy:
162
+ search_params["fetch_policy"] = fetch_policy
163
+
164
+ search_result = self.parallel_client.beta.search(**search_params)
165
+
166
+ # Use model_dump() if available, otherwise convert to dict
167
+ try:
168
+ if hasattr(search_result, "model_dump"):
169
+ return json.dumps(search_result.model_dump(), cls=CustomJSONEncoder)
170
+ except Exception:
171
+ pass
172
+
173
+ # Manually format the results
174
+ formatted_results: Dict[str, Any] = {
175
+ "search_id": getattr(search_result, "search_id", ""),
176
+ "results": [],
177
+ }
178
+
179
+ if hasattr(search_result, "results") and search_result.results:
180
+ results_list: List[Dict[str, Any]] = []
181
+ for result in search_result.results:
182
+ formatted_result: Dict[str, Any] = {
183
+ "title": getattr(result, "title", ""),
184
+ "url": getattr(result, "url", ""),
185
+ "publish_date": getattr(result, "publish_date", ""),
186
+ "excerpt": getattr(result, "excerpt", ""),
187
+ }
188
+ results_list.append(formatted_result)
189
+ formatted_results["results"] = results_list
190
+
191
+ if hasattr(search_result, "warnings"):
192
+ formatted_results["warnings"] = search_result.warnings
193
+
194
+ if hasattr(search_result, "usage"):
195
+ formatted_results["usage"] = search_result.usage
196
+
197
+ return json.dumps(formatted_results, cls=CustomJSONEncoder, indent=2)
198
+
199
+ except Exception as e:
200
+ log_error(f"Error searching Parallel for objective '{objective}': {e}")
201
+ return json.dumps({"error": f"Search failed: {str(e)}"}, indent=2)
202
+
203
+ def parallel_extract(
204
+ self,
205
+ urls: List[str],
206
+ objective: Optional[str] = None,
207
+ search_queries: Optional[List[str]] = None,
208
+ excerpts: bool = True,
209
+ max_chars_per_excerpt: Optional[int] = None,
210
+ full_content: bool = False,
211
+ max_chars_for_full_content: Optional[int] = None,
212
+ ) -> str:
213
+ """Use this function to extract content from specific URLs using Parallel's Extract API.
214
+
215
+ Args:
216
+ urls (List[str]): List of public URLs to extract content from.
217
+ objective (Optional[str]): Search focus to guide content extraction.
218
+ search_queries (Optional[List[str]]): Keywords for targeting relevant content.
219
+ excerpts (bool): Include relevant text snippets.
220
+ max_chars_per_excerpt (Optional[int]): Upper bound on total characters per url. Only used when excerpts is True.
221
+ full_content (bool): Include complete page text.
222
+ max_chars_for_full_content (Optional[int]): Limit on characters per url. Only used when full_content is True.
223
+
224
+ Returns:
225
+ str: A JSON formatted string containing extracted content with titles, publish dates, excerpts and/or full content.
226
+ """
227
+ try:
228
+ if not urls:
229
+ return json.dumps({"error": "Please provide at least one URL to extract"}, indent=2)
230
+
231
+ extract_params: Dict[str, Any] = {
232
+ "urls": urls,
233
+ }
234
+
235
+ # Add objective if provided
236
+ if objective:
237
+ extract_params["objective"] = objective
238
+
239
+ # Add search_queries if provided
240
+ if search_queries:
241
+ extract_params["search_queries"] = search_queries
242
+
243
+ # Add excerpts configuration
244
+ if excerpts and max_chars_per_excerpt is not None:
245
+ extract_params["excerpts"] = {"max_chars_per_result": max_chars_per_excerpt}
246
+ else:
247
+ extract_params["excerpts"] = excerpts
248
+
249
+ # Add full_content configuration
250
+ if full_content and max_chars_for_full_content is not None:
251
+ extract_params["full_content"] = {"max_chars_per_result": max_chars_for_full_content}
252
+ else:
253
+ extract_params["full_content"] = full_content
254
+
255
+ # Add fetch_policy from constructor defaults
256
+ fetch_policy: Dict[str, Any] = {}
257
+ if self.max_age_seconds is not None:
258
+ fetch_policy["max_age_seconds"] = self.max_age_seconds
259
+ if self.timeout_seconds is not None:
260
+ fetch_policy["timeout_seconds"] = self.timeout_seconds
261
+ if self.disable_cache_fallback is not None:
262
+ fetch_policy["disable_cache_fallback"] = self.disable_cache_fallback
263
+
264
+ if fetch_policy:
265
+ extract_params["fetch_policy"] = fetch_policy
266
+
267
+ extract_result = self.parallel_client.beta.extract(**extract_params)
268
+
269
+ # Use model_dump() if available, otherwise convert to dict
270
+ try:
271
+ if hasattr(extract_result, "model_dump"):
272
+ return json.dumps(extract_result.model_dump(), cls=CustomJSONEncoder)
273
+ except Exception:
274
+ pass
275
+
276
+ # Manually format the results
277
+ formatted_results: Dict[str, Any] = {
278
+ "extract_id": getattr(extract_result, "extract_id", ""),
279
+ "results": [],
280
+ "errors": [],
281
+ }
282
+
283
+ if hasattr(extract_result, "results") and extract_result.results:
284
+ results_list: List[Dict[str, Any]] = []
285
+ for result in extract_result.results:
286
+ formatted_result: Dict[str, Any] = {
287
+ "url": getattr(result, "url", ""),
288
+ "title": getattr(result, "title", ""),
289
+ "publish_date": getattr(result, "publish_date", ""),
290
+ }
291
+
292
+ if excerpts and hasattr(result, "excerpts"):
293
+ formatted_result["excerpts"] = result.excerpts
294
+
295
+ if full_content and hasattr(result, "full_content"):
296
+ formatted_result["full_content"] = result.full_content
297
+
298
+ results_list.append(formatted_result)
299
+ formatted_results["results"] = results_list
300
+
301
+ if hasattr(extract_result, "errors") and extract_result.errors:
302
+ formatted_results["errors"] = extract_result.errors
303
+
304
+ if hasattr(extract_result, "warnings"):
305
+ formatted_results["warnings"] = extract_result.warnings
306
+
307
+ if hasattr(extract_result, "usage"):
308
+ formatted_results["usage"] = extract_result.usage
309
+
310
+ return json.dumps(formatted_results, cls=CustomJSONEncoder, indent=2)
311
+
312
+ except Exception as e:
313
+ log_error(f"Error extracting from Parallel: {e}")
314
+ return json.dumps({"error": f"Extract failed: {str(e)}"}, indent=2)
agno/utils/agent.py CHANGED
@@ -8,7 +8,7 @@ from agno.models.response import ModelResponse
8
8
  from agno.run.agent import RunEvent, RunInput, RunOutput, RunOutputEvent
9
9
  from agno.run.team import RunOutputEvent as TeamRunOutputEvent
10
10
  from agno.run.team import TeamRunOutput
11
- from agno.session import AgentSession, TeamSession
11
+ from agno.session import AgentSession, TeamSession, WorkflowSession
12
12
  from agno.utils.events import (
13
13
  create_memory_update_completed_event,
14
14
  create_memory_update_started_event,
@@ -457,7 +457,12 @@ def scrub_history_messages_from_run_output(run_response: Union[RunOutput, TeamRu
457
457
 
458
458
  def get_run_output_util(
459
459
  entity: Union["Agent", "Team"], run_id: str, session_id: Optional[str] = None
460
- ) -> Optional[Union[RunOutput, TeamRunOutput]]:
460
+ ) -> Optional[
461
+ Union[
462
+ RunOutput,
463
+ TeamRunOutput,
464
+ ]
465
+ ]:
461
466
  """
462
467
  Get a RunOutput from the database.
463
468
 
@@ -473,13 +478,13 @@ def get_run_output_util(
473
478
  if session is not None:
474
479
  run_response = session.get_run(run_id=run_id)
475
480
  if run_response is not None:
476
- return run_response
481
+ return run_response # type: ignore
477
482
  else:
478
483
  log_warning(f"RunOutput {run_id} not found in Session {session_id}")
479
484
  elif entity.cached_session is not None:
480
485
  run_response = entity.cached_session.get_run(run_id=run_id)
481
486
  if run_response is not None:
482
- return run_response
487
+ return run_response # type: ignore
483
488
  else:
484
489
  log_warning(f"RunOutput {run_id} not found in Session {entity.cached_session.session_id}")
485
490
  return None
@@ -501,7 +506,7 @@ async def aget_run_output_util(
501
506
  if session is not None:
502
507
  run_response = session.get_run(run_id=run_id)
503
508
  if run_response is not None:
504
- return run_response
509
+ return run_response # type: ignore
505
510
  else:
506
511
  log_warning(f"RunOutput {run_id} not found in Session {session_id}")
507
512
  elif entity.cached_session is not None:
@@ -535,10 +540,10 @@ def get_last_run_output_util(
535
540
  for run_output in reversed(session.runs):
536
541
  if entity.__class__.__name__ == "Agent":
537
542
  if hasattr(run_output, "agent_id") and run_output.agent_id == entity.id:
538
- return run_output
543
+ return run_output # type: ignore
539
544
  elif entity.__class__.__name__ == "Team":
540
545
  if hasattr(run_output, "team_id") and run_output.team_id == entity.id:
541
- return run_output
546
+ return run_output # type: ignore
542
547
  else:
543
548
  log_warning(f"No run responses found in Session {session_id}")
544
549
 
@@ -550,10 +555,10 @@ def get_last_run_output_util(
550
555
  for run_output in reversed(entity.cached_session.runs):
551
556
  if entity.__class__.__name__ == "Agent":
552
557
  if hasattr(run_output, "agent_id") and run_output.agent_id == entity.id:
553
- return run_output
558
+ return run_output # type: ignore
554
559
  elif entity.__class__.__name__ == "Team":
555
560
  if hasattr(run_output, "team_id") and run_output.team_id == entity.id:
556
- return run_output
561
+ return run_output # type: ignore
557
562
  return None
558
563
 
559
564
 
@@ -575,10 +580,10 @@ async def aget_last_run_output_util(
575
580
  for run_output in reversed(session.runs):
576
581
  if entity.__class__.__name__ == "Agent":
577
582
  if hasattr(run_output, "agent_id") and run_output.agent_id == entity.id:
578
- return run_output
583
+ return run_output # type: ignore
579
584
  elif entity.__class__.__name__ == "Team":
580
585
  if hasattr(run_output, "team_id") and run_output.team_id == entity.id:
581
- return run_output
586
+ return run_output # type: ignore
582
587
  else:
583
588
  log_warning(f"No run responses found in Session {session_id}")
584
589
 
@@ -590,16 +595,16 @@ async def aget_last_run_output_util(
590
595
  for run_output in reversed(entity.cached_session.runs):
591
596
  if entity.__class__.__name__ == "Agent":
592
597
  if hasattr(run_output, "agent_id") and run_output.agent_id == entity.id:
593
- return run_output
598
+ return run_output # type: ignore
594
599
  elif entity.__class__.__name__ == "Team":
595
600
  if hasattr(run_output, "team_id") and run_output.team_id == entity.id:
596
- return run_output
601
+ return run_output # type: ignore
597
602
  return None
598
603
 
599
604
 
600
605
  def set_session_name_util(
601
606
  entity: Union["Agent", "Team"], session_id: str, autogenerate: bool = False, session_name: Optional[str] = None
602
- ) -> Union[AgentSession, TeamSession]:
607
+ ) -> Union[AgentSession, TeamSession, WorkflowSession]:
603
608
  """Set the session name and save to storage"""
604
609
  if entity._has_async_db():
605
610
  raise ValueError("Async database not supported for sync functions")
@@ -629,7 +634,7 @@ def set_session_name_util(
629
634
 
630
635
  async def aset_session_name_util(
631
636
  entity: Union["Agent", "Team"], session_id: str, autogenerate: bool = False, session_name: Optional[str] = None
632
- ) -> Union[AgentSession, TeamSession]:
637
+ ) -> Union[AgentSession, TeamSession, WorkflowSession]:
633
638
  """Set the session name and save to storage"""
634
639
  session = await entity.aget_session(session_id=session_id) # type: ignore
635
640
 
@@ -796,7 +801,7 @@ def get_chat_history_util(entity: Union["Agent", "Team"], session_id: str) -> Li
796
801
  if session is None:
797
802
  raise Exception("Session not found")
798
803
 
799
- return session.get_chat_history()
804
+ return session.get_chat_history() # type: ignore
800
805
 
801
806
 
802
807
  async def aget_chat_history_util(entity: Union["Agent", "Team"], session_id: str) -> List[Message]:
@@ -812,4 +817,4 @@ async def aget_chat_history_util(entity: Union["Agent", "Team"], session_id: str
812
817
  if session is None:
813
818
  raise Exception("Session not found")
814
819
 
815
- return session.get_chat_history()
820
+ return session.get_chat_history() # type: ignore
agno/utils/gemini.py CHANGED
@@ -225,12 +225,13 @@ def convert_schema(
225
225
  if schema_type is None or schema_type == "null":
226
226
  return None
227
227
  description = schema_dict.get("description", None)
228
+ title = schema_dict.get("title", None)
228
229
  default = schema_dict.get("default", None)
229
230
 
230
231
  # Handle enum types
231
232
  if "enum" in schema_dict:
232
233
  enum_values = schema_dict["enum"]
233
- return Schema(type=GeminiType.STRING, enum=enum_values, description=description, default=default)
234
+ return Schema(type=GeminiType.STRING, enum=enum_values, description=description, default=default, title=title)
234
235
 
235
236
  if schema_type == "object":
236
237
  # Handle regular objects with properties
@@ -250,6 +251,10 @@ def convert_schema(
250
251
  if is_nullable:
251
252
  converted_schema.nullable = True
252
253
  properties[key] = converted_schema
254
+ else:
255
+ properties[key] = Schema(
256
+ title=prop_def.get("title", None), description=prop_def.get("description", None)
257
+ )
253
258
 
254
259
  required = schema_dict.get("required", [])
255
260
 
@@ -260,9 +265,10 @@ def convert_schema(
260
265
  required=required,
261
266
  description=description,
262
267
  default=default,
268
+ title=title,
263
269
  )
264
270
  else:
265
- return Schema(type=GeminiType.OBJECT, description=description, default=default)
271
+ return Schema(type=GeminiType.OBJECT, description=description, default=default, title=title)
266
272
 
267
273
  # Handle Dict types (objects with additionalProperties but no properties)
268
274
  elif "additionalProperties" in schema_dict:
@@ -305,11 +311,11 @@ def convert_schema(
305
311
  )
306
312
  else:
307
313
  # additionalProperties is false or true
308
- return Schema(type=GeminiType.OBJECT, description=description, default=default)
314
+ return Schema(type=GeminiType.OBJECT, description=description, default=default, title=title)
309
315
 
310
316
  # Handle empty objects
311
317
  else:
312
- return Schema(type=GeminiType.OBJECT, description=description, default=default)
318
+ return Schema(type=GeminiType.OBJECT, description=description, default=default, title=title)
313
319
 
314
320
  elif schema_type == "array" and "items" in schema_dict:
315
321
  if not schema_dict["items"]: # Handle empty {}
@@ -325,6 +331,7 @@ def convert_schema(
325
331
  items=items,
326
332
  min_items=min_items,
327
333
  max_items=max_items,
334
+ title=title,
328
335
  )
329
336
 
330
337
  elif schema_type == "string":
@@ -332,6 +339,7 @@ def convert_schema(
332
339
  "type": GeminiType.STRING,
333
340
  "description": description,
334
341
  "default": default,
342
+ "title": title,
335
343
  }
336
344
  if "format" in schema_dict:
337
345
  schema_kwargs["format"] = schema_dict["format"]
@@ -342,6 +350,7 @@ def convert_schema(
342
350
  "type": schema_type.upper(),
343
351
  "description": description,
344
352
  "default": default,
353
+ "title": title,
345
354
  }
346
355
  if "maximum" in schema_dict:
347
356
  schema_kwargs["maximum"] = schema_dict["maximum"]
@@ -373,6 +382,7 @@ def convert_schema(
373
382
  any_of=any_of,
374
383
  description=description,
375
384
  default=default,
385
+ title=title,
376
386
  )
377
387
  else:
378
388
  if isinstance(schema_type, list):
@@ -384,7 +394,7 @@ def convert_schema(
384
394
  # Only convert to uppercase if schema_type is not empty
385
395
  if schema_type:
386
396
  schema_type = schema_type.upper()
387
- return Schema(type=schema_type, description=description, default=default)
397
+ return Schema(type=schema_type, description=description, default=default, title=title)
388
398
  else:
389
399
  # If we get here with an empty type and no other handlers matched,
390
400
  # something is wrong with the schema
agno/utils/knowledge.py CHANGED
@@ -1,10 +1,11 @@
1
- from typing import Any, Dict, Optional
1
+ from typing import Any, Dict, List, Optional, Union
2
2
 
3
+ from agno.filters import FilterExpr
3
4
  from agno.utils.log import log_info
4
5
 
5
6
 
6
7
  def get_agentic_or_user_search_filters(
7
- filters: Optional[Dict[str, Any]], effective_filters: Optional[Dict[str, Any]]
8
+ filters: Optional[Dict[str, Any]], effective_filters: Optional[Union[Dict[str, Any], List[FilterExpr]]]
8
9
  ) -> Dict[str, Any]:
9
10
  """Helper function to determine the final filters to use for the search.
10
11
 
@@ -15,7 +16,7 @@ def get_agentic_or_user_search_filters(
15
16
  Returns:
16
17
  Dict[str, Any]: The final filters to use for the search.
17
18
  """
18
- search_filters = {}
19
+ search_filters = None
19
20
 
20
21
  # If agentic filters exist and manual filters (passed by user) do not, use agentic filters
21
22
  if filters and not effective_filters:
@@ -23,7 +24,13 @@ def get_agentic_or_user_search_filters(
23
24
 
24
25
  # If both agentic filters exist and manual filters (passed by user) exist, use manual filters (give priority to user and override)
25
26
  if filters and effective_filters:
26
- search_filters = effective_filters
27
+ if isinstance(effective_filters, dict):
28
+ search_filters = effective_filters
29
+ elif isinstance(effective_filters, list):
30
+ # If effective_filters is a list (likely List[FilterExpr]), convert both filters and effective_filters to a dict if possible, otherwise raise
31
+ raise ValueError(
32
+ "Merging dict and list of filters is not supported; effective_filters should be a dict for search compatibility."
33
+ )
27
34
 
28
35
  log_info(f"Filters used by Agent: {search_filters}")
29
- return search_filters
36
+ return search_filters or {}
agno/utils/log.py CHANGED
@@ -108,6 +108,7 @@ workflow_logger: AgnoLogger = build_logger(WORKFLOW_LOGGER_NAME, source_type="wo
108
108
  # Set the default logger to the agent logger
109
109
  logger: AgnoLogger = agent_logger
110
110
 
111
+
111
112
  debug_on: bool = False
112
113
  debug_level: Literal[1, 2] = 1
113
114
 
@@ -68,6 +68,8 @@ def _format_image_for_message(image: Image) -> Optional[Dict[str, Any]]:
68
68
  }
69
69
 
70
70
  try:
71
+ img_type = None
72
+
71
73
  # Case 0: Image is an Anthropic uploaded file
72
74
  if image.content is not None and hasattr(image.content, "id"):
73
75
  content_bytes = image.content
@@ -80,7 +82,6 @@ def _format_image_for_message(image: Image) -> Optional[Dict[str, Any]]:
80
82
  import os
81
83
  from urllib.parse import urlparse
82
84
 
83
- img_type = None
84
85
  if image.url:
85
86
  parsed_url = urlparse(image.url)
86
87
  _, ext = os.path.splitext(parsed_url.path)
@@ -10,6 +10,7 @@ from rich.markdown import Markdown
10
10
  from rich.status import Status
11
11
  from rich.text import Text
12
12
 
13
+ from agno.filters import FilterExpr
13
14
  from agno.media import Audio, File, Image, Video
14
15
  from agno.models.message import Message
15
16
  from agno.reasoning.step import ReasoningStep
@@ -35,7 +36,7 @@ def print_response_stream(
35
36
  files: Optional[Sequence[File]] = None,
36
37
  stream_events: bool = False,
37
38
  stream_intermediate_steps: bool = False,
38
- knowledge_filters: Optional[Dict[str, Any]] = None,
39
+ knowledge_filters: Optional[Union[Dict[str, Any], List[FilterExpr]]] = None,
39
40
  debug_mode: Optional[bool] = None,
40
41
  markdown: bool = False,
41
42
  show_message: bool = True,
@@ -227,7 +228,7 @@ async def aprint_response_stream(
227
228
  files: Optional[Sequence[File]] = None,
228
229
  stream_events: bool = False,
229
230
  stream_intermediate_steps: bool = False,
230
- knowledge_filters: Optional[Dict[str, Any]] = None,
231
+ knowledge_filters: Optional[Union[Dict[str, Any], List[FilterExpr]]] = None,
231
232
  debug_mode: Optional[bool] = None,
232
233
  markdown: bool = False,
233
234
  show_message: bool = True,
@@ -505,7 +506,7 @@ def print_response(
505
506
  images: Optional[Sequence[Image]] = None,
506
507
  videos: Optional[Sequence[Video]] = None,
507
508
  files: Optional[Sequence[File]] = None,
508
- knowledge_filters: Optional[Dict[str, Any]] = None,
509
+ knowledge_filters: Optional[Union[Dict[str, Any], List[FilterExpr]]] = None,
509
510
  debug_mode: Optional[bool] = None,
510
511
  markdown: bool = False,
511
512
  show_message: bool = True,
@@ -621,7 +622,7 @@ async def aprint_response(
621
622
  images: Optional[Sequence[Image]] = None,
622
623
  videos: Optional[Sequence[Video]] = None,
623
624
  files: Optional[Sequence[File]] = None,
624
- knowledge_filters: Optional[Dict[str, Any]] = None,
625
+ knowledge_filters: Optional[Union[Dict[str, Any], List[FilterExpr]]] = None,
625
626
  debug_mode: Optional[bool] = None,
626
627
  markdown: bool = False,
627
628
  show_message: bool = True,