agno 2.0.0rc2__py3-none-any.whl → 2.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (331) hide show
  1. agno/agent/agent.py +6009 -2874
  2. agno/api/api.py +2 -0
  3. agno/api/os.py +1 -1
  4. agno/culture/__init__.py +3 -0
  5. agno/culture/manager.py +956 -0
  6. agno/db/async_postgres/__init__.py +3 -0
  7. agno/db/base.py +385 -6
  8. agno/db/dynamo/dynamo.py +388 -81
  9. agno/db/dynamo/schemas.py +47 -10
  10. agno/db/dynamo/utils.py +63 -4
  11. agno/db/firestore/firestore.py +435 -64
  12. agno/db/firestore/schemas.py +11 -0
  13. agno/db/firestore/utils.py +102 -4
  14. agno/db/gcs_json/gcs_json_db.py +384 -42
  15. agno/db/gcs_json/utils.py +60 -26
  16. agno/db/in_memory/in_memory_db.py +351 -66
  17. agno/db/in_memory/utils.py +60 -2
  18. agno/db/json/json_db.py +339 -48
  19. agno/db/json/utils.py +60 -26
  20. agno/db/migrations/manager.py +199 -0
  21. agno/db/migrations/v1_to_v2.py +510 -37
  22. agno/db/migrations/versions/__init__.py +0 -0
  23. agno/db/migrations/versions/v2_3_0.py +938 -0
  24. agno/db/mongo/__init__.py +15 -1
  25. agno/db/mongo/async_mongo.py +2036 -0
  26. agno/db/mongo/mongo.py +653 -76
  27. agno/db/mongo/schemas.py +13 -0
  28. agno/db/mongo/utils.py +80 -8
  29. agno/db/mysql/mysql.py +687 -25
  30. agno/db/mysql/schemas.py +61 -37
  31. agno/db/mysql/utils.py +60 -2
  32. agno/db/postgres/__init__.py +2 -1
  33. agno/db/postgres/async_postgres.py +2001 -0
  34. agno/db/postgres/postgres.py +676 -57
  35. agno/db/postgres/schemas.py +43 -18
  36. agno/db/postgres/utils.py +164 -2
  37. agno/db/redis/redis.py +344 -38
  38. agno/db/redis/schemas.py +18 -0
  39. agno/db/redis/utils.py +60 -2
  40. agno/db/schemas/__init__.py +2 -1
  41. agno/db/schemas/culture.py +120 -0
  42. agno/db/schemas/memory.py +13 -0
  43. agno/db/singlestore/schemas.py +26 -1
  44. agno/db/singlestore/singlestore.py +687 -53
  45. agno/db/singlestore/utils.py +60 -2
  46. agno/db/sqlite/__init__.py +2 -1
  47. agno/db/sqlite/async_sqlite.py +2371 -0
  48. agno/db/sqlite/schemas.py +24 -0
  49. agno/db/sqlite/sqlite.py +774 -85
  50. agno/db/sqlite/utils.py +168 -5
  51. agno/db/surrealdb/__init__.py +3 -0
  52. agno/db/surrealdb/metrics.py +292 -0
  53. agno/db/surrealdb/models.py +309 -0
  54. agno/db/surrealdb/queries.py +71 -0
  55. agno/db/surrealdb/surrealdb.py +1361 -0
  56. agno/db/surrealdb/utils.py +147 -0
  57. agno/db/utils.py +50 -22
  58. agno/eval/accuracy.py +50 -43
  59. agno/eval/performance.py +6 -3
  60. agno/eval/reliability.py +6 -3
  61. agno/eval/utils.py +33 -16
  62. agno/exceptions.py +68 -1
  63. agno/filters.py +354 -0
  64. agno/guardrails/__init__.py +6 -0
  65. agno/guardrails/base.py +19 -0
  66. agno/guardrails/openai.py +144 -0
  67. agno/guardrails/pii.py +94 -0
  68. agno/guardrails/prompt_injection.py +52 -0
  69. agno/integrations/discord/client.py +1 -0
  70. agno/knowledge/chunking/agentic.py +13 -10
  71. agno/knowledge/chunking/fixed.py +1 -1
  72. agno/knowledge/chunking/semantic.py +40 -8
  73. agno/knowledge/chunking/strategy.py +59 -15
  74. agno/knowledge/embedder/aws_bedrock.py +9 -4
  75. agno/knowledge/embedder/azure_openai.py +54 -0
  76. agno/knowledge/embedder/base.py +2 -0
  77. agno/knowledge/embedder/cohere.py +184 -5
  78. agno/knowledge/embedder/fastembed.py +1 -1
  79. agno/knowledge/embedder/google.py +79 -1
  80. agno/knowledge/embedder/huggingface.py +9 -4
  81. agno/knowledge/embedder/jina.py +63 -0
  82. agno/knowledge/embedder/mistral.py +78 -11
  83. agno/knowledge/embedder/nebius.py +1 -1
  84. agno/knowledge/embedder/ollama.py +13 -0
  85. agno/knowledge/embedder/openai.py +37 -65
  86. agno/knowledge/embedder/sentence_transformer.py +8 -4
  87. agno/knowledge/embedder/vllm.py +262 -0
  88. agno/knowledge/embedder/voyageai.py +69 -16
  89. agno/knowledge/knowledge.py +595 -187
  90. agno/knowledge/reader/base.py +9 -2
  91. agno/knowledge/reader/csv_reader.py +8 -10
  92. agno/knowledge/reader/docx_reader.py +5 -6
  93. agno/knowledge/reader/field_labeled_csv_reader.py +290 -0
  94. agno/knowledge/reader/json_reader.py +6 -5
  95. agno/knowledge/reader/markdown_reader.py +13 -13
  96. agno/knowledge/reader/pdf_reader.py +43 -68
  97. agno/knowledge/reader/pptx_reader.py +101 -0
  98. agno/knowledge/reader/reader_factory.py +51 -6
  99. agno/knowledge/reader/s3_reader.py +3 -15
  100. agno/knowledge/reader/tavily_reader.py +194 -0
  101. agno/knowledge/reader/text_reader.py +13 -13
  102. agno/knowledge/reader/web_search_reader.py +2 -43
  103. agno/knowledge/reader/website_reader.py +43 -25
  104. agno/knowledge/reranker/__init__.py +3 -0
  105. agno/knowledge/types.py +9 -0
  106. agno/knowledge/utils.py +20 -0
  107. agno/media.py +339 -266
  108. agno/memory/manager.py +336 -82
  109. agno/models/aimlapi/aimlapi.py +2 -2
  110. agno/models/anthropic/claude.py +183 -37
  111. agno/models/aws/bedrock.py +52 -112
  112. agno/models/aws/claude.py +33 -1
  113. agno/models/azure/ai_foundry.py +33 -15
  114. agno/models/azure/openai_chat.py +25 -8
  115. agno/models/base.py +1011 -566
  116. agno/models/cerebras/cerebras.py +19 -13
  117. agno/models/cerebras/cerebras_openai.py +8 -5
  118. agno/models/cohere/chat.py +27 -1
  119. agno/models/cometapi/__init__.py +5 -0
  120. agno/models/cometapi/cometapi.py +57 -0
  121. agno/models/dashscope/dashscope.py +1 -0
  122. agno/models/deepinfra/deepinfra.py +2 -2
  123. agno/models/deepseek/deepseek.py +2 -2
  124. agno/models/fireworks/fireworks.py +2 -2
  125. agno/models/google/gemini.py +110 -37
  126. agno/models/groq/groq.py +28 -11
  127. agno/models/huggingface/huggingface.py +2 -1
  128. agno/models/internlm/internlm.py +2 -2
  129. agno/models/langdb/langdb.py +4 -4
  130. agno/models/litellm/chat.py +18 -1
  131. agno/models/litellm/litellm_openai.py +2 -2
  132. agno/models/llama_cpp/__init__.py +5 -0
  133. agno/models/llama_cpp/llama_cpp.py +22 -0
  134. agno/models/message.py +143 -4
  135. agno/models/meta/llama.py +27 -10
  136. agno/models/meta/llama_openai.py +5 -17
  137. agno/models/nebius/nebius.py +6 -6
  138. agno/models/nexus/__init__.py +3 -0
  139. agno/models/nexus/nexus.py +22 -0
  140. agno/models/nvidia/nvidia.py +2 -2
  141. agno/models/ollama/chat.py +60 -6
  142. agno/models/openai/chat.py +102 -43
  143. agno/models/openai/responses.py +103 -106
  144. agno/models/openrouter/openrouter.py +41 -3
  145. agno/models/perplexity/perplexity.py +4 -5
  146. agno/models/portkey/portkey.py +3 -3
  147. agno/models/requesty/__init__.py +5 -0
  148. agno/models/requesty/requesty.py +52 -0
  149. agno/models/response.py +81 -5
  150. agno/models/sambanova/sambanova.py +2 -2
  151. agno/models/siliconflow/__init__.py +5 -0
  152. agno/models/siliconflow/siliconflow.py +25 -0
  153. agno/models/together/together.py +2 -2
  154. agno/models/utils.py +254 -8
  155. agno/models/vercel/v0.py +2 -2
  156. agno/models/vertexai/__init__.py +0 -0
  157. agno/models/vertexai/claude.py +96 -0
  158. agno/models/vllm/vllm.py +1 -0
  159. agno/models/xai/xai.py +3 -2
  160. agno/os/app.py +543 -175
  161. agno/os/auth.py +24 -14
  162. agno/os/config.py +1 -0
  163. agno/os/interfaces/__init__.py +1 -0
  164. agno/os/interfaces/a2a/__init__.py +3 -0
  165. agno/os/interfaces/a2a/a2a.py +42 -0
  166. agno/os/interfaces/a2a/router.py +250 -0
  167. agno/os/interfaces/a2a/utils.py +924 -0
  168. agno/os/interfaces/agui/agui.py +23 -7
  169. agno/os/interfaces/agui/router.py +27 -3
  170. agno/os/interfaces/agui/utils.py +242 -142
  171. agno/os/interfaces/base.py +6 -2
  172. agno/os/interfaces/slack/router.py +81 -23
  173. agno/os/interfaces/slack/slack.py +29 -14
  174. agno/os/interfaces/whatsapp/router.py +11 -4
  175. agno/os/interfaces/whatsapp/whatsapp.py +14 -7
  176. agno/os/mcp.py +111 -54
  177. agno/os/middleware/__init__.py +7 -0
  178. agno/os/middleware/jwt.py +233 -0
  179. agno/os/router.py +556 -139
  180. agno/os/routers/evals/evals.py +71 -34
  181. agno/os/routers/evals/schemas.py +31 -31
  182. agno/os/routers/evals/utils.py +6 -5
  183. agno/os/routers/health.py +31 -0
  184. agno/os/routers/home.py +52 -0
  185. agno/os/routers/knowledge/knowledge.py +185 -38
  186. agno/os/routers/knowledge/schemas.py +82 -22
  187. agno/os/routers/memory/memory.py +158 -53
  188. agno/os/routers/memory/schemas.py +20 -16
  189. agno/os/routers/metrics/metrics.py +20 -8
  190. agno/os/routers/metrics/schemas.py +16 -16
  191. agno/os/routers/session/session.py +499 -38
  192. agno/os/schema.py +308 -198
  193. agno/os/utils.py +401 -41
  194. agno/reasoning/anthropic.py +80 -0
  195. agno/reasoning/azure_ai_foundry.py +2 -2
  196. agno/reasoning/deepseek.py +2 -2
  197. agno/reasoning/default.py +3 -1
  198. agno/reasoning/gemini.py +73 -0
  199. agno/reasoning/groq.py +2 -2
  200. agno/reasoning/ollama.py +2 -2
  201. agno/reasoning/openai.py +7 -2
  202. agno/reasoning/vertexai.py +76 -0
  203. agno/run/__init__.py +6 -0
  204. agno/run/agent.py +266 -112
  205. agno/run/base.py +53 -24
  206. agno/run/team.py +252 -111
  207. agno/run/workflow.py +156 -45
  208. agno/session/agent.py +105 -89
  209. agno/session/summary.py +65 -25
  210. agno/session/team.py +176 -96
  211. agno/session/workflow.py +406 -40
  212. agno/team/team.py +3854 -1692
  213. agno/tools/brightdata.py +3 -3
  214. agno/tools/cartesia.py +3 -5
  215. agno/tools/dalle.py +9 -8
  216. agno/tools/decorator.py +4 -2
  217. agno/tools/desi_vocal.py +2 -2
  218. agno/tools/duckduckgo.py +15 -11
  219. agno/tools/e2b.py +20 -13
  220. agno/tools/eleven_labs.py +26 -28
  221. agno/tools/exa.py +21 -16
  222. agno/tools/fal.py +4 -4
  223. agno/tools/file.py +153 -23
  224. agno/tools/file_generation.py +350 -0
  225. agno/tools/firecrawl.py +4 -4
  226. agno/tools/function.py +257 -37
  227. agno/tools/giphy.py +2 -2
  228. agno/tools/gmail.py +238 -14
  229. agno/tools/google_drive.py +270 -0
  230. agno/tools/googlecalendar.py +36 -8
  231. agno/tools/googlesheets.py +20 -5
  232. agno/tools/jira.py +20 -0
  233. agno/tools/knowledge.py +3 -3
  234. agno/tools/lumalab.py +3 -3
  235. agno/tools/mcp/__init__.py +10 -0
  236. agno/tools/mcp/mcp.py +331 -0
  237. agno/tools/mcp/multi_mcp.py +347 -0
  238. agno/tools/mcp/params.py +24 -0
  239. agno/tools/mcp_toolbox.py +284 -0
  240. agno/tools/mem0.py +11 -17
  241. agno/tools/memori.py +1 -53
  242. agno/tools/memory.py +419 -0
  243. agno/tools/models/azure_openai.py +2 -2
  244. agno/tools/models/gemini.py +3 -3
  245. agno/tools/models/groq.py +3 -5
  246. agno/tools/models/nebius.py +7 -7
  247. agno/tools/models_labs.py +25 -15
  248. agno/tools/notion.py +204 -0
  249. agno/tools/openai.py +4 -9
  250. agno/tools/opencv.py +3 -3
  251. agno/tools/parallel.py +314 -0
  252. agno/tools/replicate.py +7 -7
  253. agno/tools/scrapegraph.py +58 -31
  254. agno/tools/searxng.py +2 -2
  255. agno/tools/serper.py +2 -2
  256. agno/tools/slack.py +18 -3
  257. agno/tools/spider.py +2 -2
  258. agno/tools/tavily.py +146 -0
  259. agno/tools/whatsapp.py +1 -1
  260. agno/tools/workflow.py +278 -0
  261. agno/tools/yfinance.py +12 -11
  262. agno/utils/agent.py +820 -0
  263. agno/utils/audio.py +27 -0
  264. agno/utils/common.py +90 -1
  265. agno/utils/events.py +222 -7
  266. agno/utils/gemini.py +181 -23
  267. agno/utils/hooks.py +57 -0
  268. agno/utils/http.py +111 -0
  269. agno/utils/knowledge.py +12 -5
  270. agno/utils/log.py +1 -0
  271. agno/utils/mcp.py +95 -5
  272. agno/utils/media.py +188 -10
  273. agno/utils/merge_dict.py +22 -1
  274. agno/utils/message.py +60 -0
  275. agno/utils/models/claude.py +40 -11
  276. agno/utils/models/cohere.py +1 -1
  277. agno/utils/models/watsonx.py +1 -1
  278. agno/utils/openai.py +1 -1
  279. agno/utils/print_response/agent.py +105 -21
  280. agno/utils/print_response/team.py +103 -38
  281. agno/utils/print_response/workflow.py +251 -34
  282. agno/utils/reasoning.py +22 -1
  283. agno/utils/serialize.py +32 -0
  284. agno/utils/streamlit.py +16 -10
  285. agno/utils/string.py +41 -0
  286. agno/utils/team.py +98 -9
  287. agno/utils/tools.py +1 -1
  288. agno/vectordb/base.py +23 -4
  289. agno/vectordb/cassandra/cassandra.py +65 -9
  290. agno/vectordb/chroma/chromadb.py +182 -38
  291. agno/vectordb/clickhouse/clickhousedb.py +64 -11
  292. agno/vectordb/couchbase/couchbase.py +105 -10
  293. agno/vectordb/lancedb/lance_db.py +183 -135
  294. agno/vectordb/langchaindb/langchaindb.py +25 -7
  295. agno/vectordb/lightrag/lightrag.py +17 -3
  296. agno/vectordb/llamaindex/__init__.py +3 -0
  297. agno/vectordb/llamaindex/llamaindexdb.py +46 -7
  298. agno/vectordb/milvus/milvus.py +126 -9
  299. agno/vectordb/mongodb/__init__.py +7 -1
  300. agno/vectordb/mongodb/mongodb.py +112 -7
  301. agno/vectordb/pgvector/pgvector.py +142 -21
  302. agno/vectordb/pineconedb/pineconedb.py +80 -8
  303. agno/vectordb/qdrant/qdrant.py +125 -39
  304. agno/vectordb/redis/__init__.py +9 -0
  305. agno/vectordb/redis/redisdb.py +694 -0
  306. agno/vectordb/singlestore/singlestore.py +111 -25
  307. agno/vectordb/surrealdb/surrealdb.py +31 -5
  308. agno/vectordb/upstashdb/upstashdb.py +76 -8
  309. agno/vectordb/weaviate/weaviate.py +86 -15
  310. agno/workflow/__init__.py +2 -0
  311. agno/workflow/agent.py +299 -0
  312. agno/workflow/condition.py +112 -18
  313. agno/workflow/loop.py +69 -10
  314. agno/workflow/parallel.py +266 -118
  315. agno/workflow/router.py +110 -17
  316. agno/workflow/step.py +645 -136
  317. agno/workflow/steps.py +65 -6
  318. agno/workflow/types.py +71 -33
  319. agno/workflow/workflow.py +2113 -300
  320. agno-2.3.0.dist-info/METADATA +618 -0
  321. agno-2.3.0.dist-info/RECORD +577 -0
  322. agno-2.3.0.dist-info/licenses/LICENSE +201 -0
  323. agno/knowledge/reader/url_reader.py +0 -128
  324. agno/tools/googlesearch.py +0 -98
  325. agno/tools/mcp.py +0 -610
  326. agno/utils/models/aws_claude.py +0 -170
  327. agno-2.0.0rc2.dist-info/METADATA +0 -355
  328. agno-2.0.0rc2.dist-info/RECORD +0 -515
  329. agno-2.0.0rc2.dist-info/licenses/LICENSE +0 -375
  330. {agno-2.0.0rc2.dist-info → agno-2.3.0.dist-info}/WHEEL +0 -0
  331. {agno-2.0.0rc2.dist-info → agno-2.3.0.dist-info}/top_level.txt +0 -0
agno/tools/parallel.py ADDED
@@ -0,0 +1,314 @@
1
+ import json
2
+ from os import getenv
3
+ from typing import Any, Dict, List, Optional
4
+
5
+ from agno.tools import Toolkit
6
+ from agno.utils.log import log_error
7
+
8
+ try:
9
+ from parallel import Parallel as ParallelClient
10
+ except ImportError:
11
+ raise ImportError("`parallel-web` not installed. Please install using `pip install parallel-web`")
12
+
13
+
14
+ class CustomJSONEncoder(json.JSONEncoder):
15
+ """Custom JSON encoder that handles non-serializable types by converting them to strings."""
16
+
17
+ def default(self, obj):
18
+ try:
19
+ return super().default(obj)
20
+ except TypeError:
21
+ return str(obj)
22
+
23
+
24
+ class ParallelTools(Toolkit):
25
+ """
26
+ ParallelTools provides access to Parallel's web search and extraction APIs.
27
+
28
+ Parallel offers powerful APIs optimized for AI agents:
29
+ - Search API: AI-optimized web search that returns relevant excerpts tailored for LLMs
30
+ - Extract API: Extract content from specific URLs in clean markdown format, handling JavaScript-heavy pages and PDFs
31
+
32
+ Args:
33
+ api_key (Optional[str]): Parallel API key. If not provided, will use PARALLEL_API_KEY environment variable.
34
+ enable_search (bool): Enable Search API functionality. Default is True.
35
+ enable_extract (bool): Enable Extract API functionality. Default is True.
36
+ all (bool): Enable all tools. Overrides individual flags when True. Default is False.
37
+ max_results (int): Default maximum number of results for search operations. Default is 10.
38
+ max_chars_per_result (int): Default maximum characters per result for search operations. Default is 10000.
39
+ beta_version (str): Beta API version header. Default is "search-extract-2025-10-10".
40
+ mode (Optional[str]): Default search mode. Options: "one-shot" or "agentic". Default is None.
41
+ include_domains (Optional[List[str]]): Default domains to restrict results to. Default is None.
42
+ exclude_domains (Optional[List[str]]): Default domains to exclude from results. Default is None.
43
+ max_age_seconds (Optional[int]): Default cache age threshold (minimum 600). Default is None.
44
+ timeout_seconds (Optional[float]): Default timeout for content retrieval. Default is None.
45
+ disable_cache_fallback (Optional[bool]): Default cache fallback behavior. Default is None.
46
+ """
47
+
48
+ def __init__(
49
+ self,
50
+ api_key: Optional[str] = None,
51
+ enable_search: bool = True,
52
+ enable_extract: bool = True,
53
+ all: bool = False,
54
+ max_results: int = 10,
55
+ max_chars_per_result: int = 10000,
56
+ beta_version: str = "search-extract-2025-10-10",
57
+ mode: Optional[str] = None,
58
+ include_domains: Optional[List[str]] = None,
59
+ exclude_domains: Optional[List[str]] = None,
60
+ max_age_seconds: Optional[int] = None,
61
+ timeout_seconds: Optional[float] = None,
62
+ disable_cache_fallback: Optional[bool] = None,
63
+ **kwargs,
64
+ ):
65
+ self.api_key: Optional[str] = api_key or getenv("PARALLEL_API_KEY")
66
+ if not self.api_key:
67
+ log_error("PARALLEL_API_KEY not set. Please set the PARALLEL_API_KEY environment variable.")
68
+
69
+ self.max_results = max_results
70
+ self.max_chars_per_result = max_chars_per_result
71
+ self.beta_version = beta_version
72
+ self.mode = mode
73
+ self.include_domains = include_domains
74
+ self.exclude_domains = exclude_domains
75
+ self.max_age_seconds = max_age_seconds
76
+ self.timeout_seconds = timeout_seconds
77
+ self.disable_cache_fallback = disable_cache_fallback
78
+
79
+ self.parallel_client = ParallelClient(
80
+ api_key=self.api_key, default_headers={"parallel-beta": self.beta_version}
81
+ )
82
+
83
+ tools: List[Any] = []
84
+ if all or enable_search:
85
+ tools.append(self.parallel_search)
86
+ if all or enable_extract:
87
+ tools.append(self.parallel_extract)
88
+
89
+ super().__init__(name="parallel_tools", tools=tools, **kwargs)
90
+
91
+ def parallel_search(
92
+ self,
93
+ objective: Optional[str] = None,
94
+ search_queries: Optional[List[str]] = None,
95
+ max_results: Optional[int] = None,
96
+ max_chars_per_result: Optional[int] = None,
97
+ ) -> str:
98
+ """Use this function to search the web using Parallel's Search API with a natural language objective.
99
+ You must provide at least one of objective or search_queries.
100
+
101
+ Args:
102
+ objective (Optional[str]): Natural-language description of what the web search is trying to find.
103
+ search_queries (Optional[List[str]]): Traditional keyword queries with optional search operators.
104
+ max_results (Optional[int]): Upper bound on results returned. Overrides constructor default.
105
+ max_chars_per_result (Optional[int]): Upper bound on total characters per url for excerpts.
106
+
107
+ Returns:
108
+ str: A JSON formatted string containing the search results with URLs, titles, publish dates, and relevant excerpts.
109
+ """
110
+ try:
111
+ if not objective and not search_queries:
112
+ return json.dumps({"error": "Please provide at least one of: objective or search_queries"}, indent=2)
113
+
114
+ # Use instance defaults if not provided
115
+ final_max_results = max_results if max_results is not None else self.max_results
116
+
117
+ search_params: Dict[str, Any] = {
118
+ "max_results": final_max_results,
119
+ }
120
+
121
+ # Add objective if provided
122
+ if objective:
123
+ search_params["objective"] = objective
124
+
125
+ # Add search_queries if provided
126
+ if search_queries:
127
+ search_params["search_queries"] = search_queries
128
+
129
+ # Add mode from constructor default
130
+ if self.mode:
131
+ search_params["mode"] = self.mode
132
+
133
+ # Add excerpts configuration
134
+ excerpts_config: Dict[str, Any] = {}
135
+ final_max_chars = max_chars_per_result if max_chars_per_result is not None else self.max_chars_per_result
136
+ if final_max_chars is not None:
137
+ excerpts_config["max_chars_per_result"] = final_max_chars
138
+
139
+ if excerpts_config:
140
+ search_params["excerpts"] = excerpts_config
141
+
142
+ # Add source_policy from constructor defaults
143
+ source_policy: Dict[str, Any] = {}
144
+ if self.include_domains:
145
+ source_policy["include_domains"] = self.include_domains
146
+ if self.exclude_domains:
147
+ source_policy["exclude_domains"] = self.exclude_domains
148
+
149
+ if source_policy:
150
+ search_params["source_policy"] = source_policy
151
+
152
+ # Add fetch_policy from constructor defaults
153
+ fetch_policy: Dict[str, Any] = {}
154
+ if self.max_age_seconds is not None:
155
+ fetch_policy["max_age_seconds"] = self.max_age_seconds
156
+ if self.timeout_seconds is not None:
157
+ fetch_policy["timeout_seconds"] = self.timeout_seconds
158
+ if self.disable_cache_fallback is not None:
159
+ fetch_policy["disable_cache_fallback"] = self.disable_cache_fallback
160
+
161
+ if fetch_policy:
162
+ search_params["fetch_policy"] = fetch_policy
163
+
164
+ search_result = self.parallel_client.beta.search(**search_params)
165
+
166
+ # Use model_dump() if available, otherwise convert to dict
167
+ try:
168
+ if hasattr(search_result, "model_dump"):
169
+ return json.dumps(search_result.model_dump(), cls=CustomJSONEncoder)
170
+ except Exception:
171
+ pass
172
+
173
+ # Manually format the results
174
+ formatted_results: Dict[str, Any] = {
175
+ "search_id": getattr(search_result, "search_id", ""),
176
+ "results": [],
177
+ }
178
+
179
+ if hasattr(search_result, "results") and search_result.results:
180
+ results_list: List[Dict[str, Any]] = []
181
+ for result in search_result.results:
182
+ formatted_result: Dict[str, Any] = {
183
+ "title": getattr(result, "title", ""),
184
+ "url": getattr(result, "url", ""),
185
+ "publish_date": getattr(result, "publish_date", ""),
186
+ "excerpt": getattr(result, "excerpt", ""),
187
+ }
188
+ results_list.append(formatted_result)
189
+ formatted_results["results"] = results_list
190
+
191
+ if hasattr(search_result, "warnings"):
192
+ formatted_results["warnings"] = search_result.warnings
193
+
194
+ if hasattr(search_result, "usage"):
195
+ formatted_results["usage"] = search_result.usage
196
+
197
+ return json.dumps(formatted_results, cls=CustomJSONEncoder, indent=2)
198
+
199
+ except Exception as e:
200
+ log_error(f"Error searching Parallel for objective '{objective}': {e}")
201
+ return json.dumps({"error": f"Search failed: {str(e)}"}, indent=2)
202
+
203
+ def parallel_extract(
204
+ self,
205
+ urls: List[str],
206
+ objective: Optional[str] = None,
207
+ search_queries: Optional[List[str]] = None,
208
+ excerpts: bool = True,
209
+ max_chars_per_excerpt: Optional[int] = None,
210
+ full_content: bool = False,
211
+ max_chars_for_full_content: Optional[int] = None,
212
+ ) -> str:
213
+ """Use this function to extract content from specific URLs using Parallel's Extract API.
214
+
215
+ Args:
216
+ urls (List[str]): List of public URLs to extract content from.
217
+ objective (Optional[str]): Search focus to guide content extraction.
218
+ search_queries (Optional[List[str]]): Keywords for targeting relevant content.
219
+ excerpts (bool): Include relevant text snippets.
220
+ max_chars_per_excerpt (Optional[int]): Upper bound on total characters per url. Only used when excerpts is True.
221
+ full_content (bool): Include complete page text.
222
+ max_chars_for_full_content (Optional[int]): Limit on characters per url. Only used when full_content is True.
223
+
224
+ Returns:
225
+ str: A JSON formatted string containing extracted content with titles, publish dates, excerpts and/or full content.
226
+ """
227
+ try:
228
+ if not urls:
229
+ return json.dumps({"error": "Please provide at least one URL to extract"}, indent=2)
230
+
231
+ extract_params: Dict[str, Any] = {
232
+ "urls": urls,
233
+ }
234
+
235
+ # Add objective if provided
236
+ if objective:
237
+ extract_params["objective"] = objective
238
+
239
+ # Add search_queries if provided
240
+ if search_queries:
241
+ extract_params["search_queries"] = search_queries
242
+
243
+ # Add excerpts configuration
244
+ if excerpts and max_chars_per_excerpt is not None:
245
+ extract_params["excerpts"] = {"max_chars_per_result": max_chars_per_excerpt}
246
+ else:
247
+ extract_params["excerpts"] = excerpts
248
+
249
+ # Add full_content configuration
250
+ if full_content and max_chars_for_full_content is not None:
251
+ extract_params["full_content"] = {"max_chars_per_result": max_chars_for_full_content}
252
+ else:
253
+ extract_params["full_content"] = full_content
254
+
255
+ # Add fetch_policy from constructor defaults
256
+ fetch_policy: Dict[str, Any] = {}
257
+ if self.max_age_seconds is not None:
258
+ fetch_policy["max_age_seconds"] = self.max_age_seconds
259
+ if self.timeout_seconds is not None:
260
+ fetch_policy["timeout_seconds"] = self.timeout_seconds
261
+ if self.disable_cache_fallback is not None:
262
+ fetch_policy["disable_cache_fallback"] = self.disable_cache_fallback
263
+
264
+ if fetch_policy:
265
+ extract_params["fetch_policy"] = fetch_policy
266
+
267
+ extract_result = self.parallel_client.beta.extract(**extract_params)
268
+
269
+ # Use model_dump() if available, otherwise convert to dict
270
+ try:
271
+ if hasattr(extract_result, "model_dump"):
272
+ return json.dumps(extract_result.model_dump(), cls=CustomJSONEncoder)
273
+ except Exception:
274
+ pass
275
+
276
+ # Manually format the results
277
+ formatted_results: Dict[str, Any] = {
278
+ "extract_id": getattr(extract_result, "extract_id", ""),
279
+ "results": [],
280
+ "errors": [],
281
+ }
282
+
283
+ if hasattr(extract_result, "results") and extract_result.results:
284
+ results_list: List[Dict[str, Any]] = []
285
+ for result in extract_result.results:
286
+ formatted_result: Dict[str, Any] = {
287
+ "url": getattr(result, "url", ""),
288
+ "title": getattr(result, "title", ""),
289
+ "publish_date": getattr(result, "publish_date", ""),
290
+ }
291
+
292
+ if excerpts and hasattr(result, "excerpts"):
293
+ formatted_result["excerpts"] = result.excerpts
294
+
295
+ if full_content and hasattr(result, "full_content"):
296
+ formatted_result["full_content"] = result.full_content
297
+
298
+ results_list.append(formatted_result)
299
+ formatted_results["results"] = results_list
300
+
301
+ if hasattr(extract_result, "errors") and extract_result.errors:
302
+ formatted_results["errors"] = extract_result.errors
303
+
304
+ if hasattr(extract_result, "warnings"):
305
+ formatted_results["warnings"] = extract_result.warnings
306
+
307
+ if hasattr(extract_result, "usage"):
308
+ formatted_results["usage"] = extract_result.usage
309
+
310
+ return json.dumps(formatted_results, cls=CustomJSONEncoder, indent=2)
311
+
312
+ except Exception as e:
313
+ log_error(f"Error extracting from Parallel: {e}")
314
+ return json.dumps({"error": f"Extract failed: {str(e)}"}, indent=2)
agno/tools/replicate.py CHANGED
@@ -5,7 +5,7 @@ from urllib.parse import urlparse
5
5
  from uuid import uuid4
6
6
 
7
7
  from agno.agent import Agent
8
- from agno.media import ImageArtifact, VideoArtifact
8
+ from agno.media import Image, Video
9
9
  from agno.team.team import Team
10
10
  from agno.tools import Toolkit
11
11
  from agno.tools.function import ToolResult
@@ -72,9 +72,9 @@ class ReplicateTools(Toolkit):
72
72
  result_msg, media_artifact = self._parse_output(output)
73
73
  results.append(result_msg)
74
74
 
75
- if isinstance(media_artifact, ImageArtifact):
75
+ if isinstance(media_artifact, Image):
76
76
  images.append(media_artifact)
77
- elif isinstance(media_artifact, VideoArtifact):
77
+ elif isinstance(media_artifact, Video):
78
78
  videos.append(media_artifact)
79
79
 
80
80
  content = "\n".join(results)
@@ -87,7 +87,7 @@ class ReplicateTools(Toolkit):
87
87
  logger.error(f"Failed to generate media: {e}")
88
88
  return ToolResult(content=f"Error: {e}")
89
89
 
90
- def _parse_output(self, output: FileOutput) -> Tuple[str, Union[ImageArtifact, VideoArtifact]]:
90
+ def _parse_output(self, output: FileOutput) -> Tuple[str, Union[Image, Video]]:
91
91
  """
92
92
  Parse the outputs from the replicate model.
93
93
  """
@@ -101,14 +101,14 @@ class ReplicateTools(Toolkit):
101
101
  video_extensions = {".mp4", ".mov", ".avi", ".mkv", ".flv", ".wmv", ".webm"}
102
102
 
103
103
  media_id = str(uuid4())
104
- artifact: Union[ImageArtifact, VideoArtifact]
104
+ artifact: Union[Image, Video]
105
105
  media_type: str
106
106
 
107
107
  if ext in image_extensions:
108
- artifact = ImageArtifact(id=media_id, url=output.url)
108
+ artifact = Image(id=media_id, url=output.url)
109
109
  media_type = "image"
110
110
  elif ext in video_extensions:
111
- artifact = VideoArtifact(id=media_id, url=output.url)
111
+ artifact = Video(id=media_id, url=output.url)
112
112
  media_type = "video"
113
113
  else:
114
114
  logger.error(f"Unsupported media type with extension '{ext}' for URL: {output.url}")
agno/tools/scrapegraph.py CHANGED
@@ -3,6 +3,7 @@ from os import getenv
3
3
  from typing import Any, List, Optional
4
4
 
5
5
  from agno.tools import Toolkit
6
+ from agno.utils.log import log_debug, log_error
6
7
 
7
8
  try:
8
9
  from scrapegraph_py import Client
@@ -23,11 +24,14 @@ class ScrapeGraphTools(Toolkit):
23
24
  enable_crawl: bool = False,
24
25
  enable_searchscraper: bool = False,
25
26
  enable_agentic_crawler: bool = False,
27
+ enable_scrape: bool = False,
28
+ render_heavy_js: bool = False,
26
29
  all: bool = False,
27
30
  **kwargs,
28
31
  ):
29
32
  self.api_key: Optional[str] = api_key or getenv("SGAI_API_KEY")
30
33
  self.client = Client(api_key=self.api_key)
34
+ self.render_heavy_js = render_heavy_js
31
35
 
32
36
  # Start with smartscraper by default
33
37
  # Only enable markdownify if smartscraper is False
@@ -45,6 +49,8 @@ class ScrapeGraphTools(Toolkit):
45
49
  tools.append(self.searchscraper)
46
50
  if enable_agentic_crawler or all:
47
51
  tools.append(self.agentic_crawler)
52
+ if enable_scrape or all:
53
+ tools.append(self.scrape)
48
54
 
49
55
  super().__init__(name="scrapegraph_tools", tools=tools, **kwargs)
50
56
 
@@ -57,10 +63,13 @@ class ScrapeGraphTools(Toolkit):
57
63
  The structured data extracted from the webpage
58
64
  """
59
65
  try:
66
+ log_debug(f"ScrapeGraph smartscraper request for URL: {url}")
60
67
  response = self.client.smartscraper(website_url=url, user_prompt=prompt)
61
68
  return json.dumps(response["result"])
62
69
  except Exception as e:
63
- return json.dumps({"error": str(e)})
70
+ error_msg = f"Smartscraper failed: {str(e)}"
71
+ log_error(error_msg)
72
+ return f"Error: {error_msg}"
64
73
 
65
74
  def markdownify(self, url: str) -> str:
66
75
  """Convert a webpage to markdown format.
@@ -70,10 +79,13 @@ class ScrapeGraphTools(Toolkit):
70
79
  The markdown version of the webpage
71
80
  """
72
81
  try:
82
+ log_debug(f"ScrapeGraph markdownify request for URL: {url}")
73
83
  response = self.client.markdownify(website_url=url)
74
84
  return response["result"]
75
85
  except Exception as e:
76
- return f"Error converting to markdown: {str(e)}"
86
+ error_msg = f"Markdownify failed: {str(e)}"
87
+ log_error(error_msg)
88
+ return f"Error: {error_msg}"
77
89
 
78
90
  def crawl(
79
91
  self,
@@ -100,10 +112,11 @@ class ScrapeGraphTools(Toolkit):
100
112
  The structured data extracted from the website
101
113
  """
102
114
  try:
115
+ log_debug(f"ScrapeGraph crawl request for URL: {url}")
103
116
  response = self.client.crawl(
104
117
  url=url,
105
118
  prompt=prompt,
106
- schema=schema,
119
+ data_schema=schema,
107
120
  cache_website=cache_website,
108
121
  depth=depth,
109
122
  max_pages=max_pages,
@@ -112,7 +125,9 @@ class ScrapeGraphTools(Toolkit):
112
125
  )
113
126
  return json.dumps(response, indent=2)
114
127
  except Exception as e:
115
- return json.dumps({"error": str(e)})
128
+ error_msg = f"Crawl failed: {str(e)}"
129
+ log_error(error_msg)
130
+ return f"Error: {error_msg}"
116
131
 
117
132
  def agentic_crawler(
118
133
  self,
@@ -143,21 +158,7 @@ class ScrapeGraphTools(Toolkit):
143
158
  JSON string containing the scraping results, including request_id, status, and extracted data
144
159
  """
145
160
  try:
146
- # Validate required parameters for AI extraction
147
- if ai_extraction and not user_prompt:
148
- return json.dumps({"error": "user_prompt is required when ai_extraction=True"})
149
-
150
- # Validate URL format
151
- if not url.strip():
152
- return json.dumps({"error": "URL cannot be empty"})
153
- if not (url.startswith("http://") or url.startswith("https://")):
154
- return json.dumps({"error": "Invalid URL - must start with http:// or https://"})
155
-
156
- # Validate steps
157
- if not steps:
158
- return json.dumps({"error": "Steps cannot be empty"})
159
- if any(not step.strip() for step in steps):
160
- return json.dumps({"error": "All steps must contain valid instructions"})
161
+ log_debug(f"ScrapeGraph agentic_crawler request for URL: {url}")
161
162
 
162
163
  # Prepare parameters for the API call
163
164
  params = {"url": url, "steps": steps, "use_session": use_session, "ai_extraction": ai_extraction}
@@ -170,26 +171,52 @@ class ScrapeGraphTools(Toolkit):
170
171
 
171
172
  # Call the agentic scraper API
172
173
  response = self.client.agenticscraper(**params)
173
-
174
174
  return json.dumps(response, indent=2)
175
175
 
176
176
  except Exception as e:
177
- return json.dumps({"error": str(e)})
177
+ error_msg = f"Agentic crawler failed: {str(e)}"
178
+ log_error(error_msg)
179
+ return f"Error: {error_msg}"
178
180
 
179
- def searchscraper(self, prompt: str) -> str:
181
+ def searchscraper(self, user_prompt: str) -> str:
180
182
  """Search the web and extract information from the web.
181
183
  Args:
182
- prompt (str): Search query
184
+ user_prompt (str): Search query
183
185
  Returns:
184
186
  JSON of the search results
185
187
  """
186
188
  try:
187
- response = self.client.searchscraper(user_prompt=prompt)
188
- if hasattr(response, "result"):
189
- return json.dumps(response.result)
190
- elif isinstance(response, dict) and "result" in response:
191
- return json.dumps(response["result"])
192
- else:
193
- return json.dumps(response)
189
+ log_debug(f"ScrapeGraph searchscraper request with prompt: {user_prompt}")
190
+ response = self.client.searchscraper(user_prompt=user_prompt)
191
+ return json.dumps(response["result"])
192
+ except Exception as e:
193
+ error_msg = f"Searchscraper failed: {str(e)}"
194
+ log_error(error_msg)
195
+ return f"Error: {error_msg}"
196
+
197
+ def scrape(
198
+ self,
199
+ website_url: str,
200
+ headers: Optional[dict] = None,
201
+ ) -> str:
202
+ """Get raw HTML content from a website using the ScrapeGraphAI scrape API.
203
+
204
+ Args:
205
+ website_url (str): The URL of the website to scrape
206
+ headers (Optional[dict]): Optional headers to send with the request
207
+
208
+ Returns:
209
+ JSON string containing the HTML content and metadata
210
+ """
211
+ try:
212
+ log_debug(f"ScrapeGraph scrape request for URL: {website_url}")
213
+ response = self.client.scrape(
214
+ website_url=website_url,
215
+ headers=headers,
216
+ render_heavy_js=self.render_heavy_js,
217
+ )
218
+ return json.dumps(response, indent=2)
194
219
  except Exception as e:
195
- return json.dumps({"error": str(e)})
220
+ error_msg = f"Scrape failed: {str(e)}"
221
+ log_error(error_msg)
222
+ return f"Error: {error_msg}"
agno/tools/searxng.py CHANGED
@@ -21,7 +21,7 @@ class Searxng(Toolkit):
21
21
  self.fixed_max_results = fixed_max_results
22
22
 
23
23
  tools: List[Any] = [
24
- self.search,
24
+ self.search_web,
25
25
  self.image_search,
26
26
  self.it_search,
27
27
  self.map_search,
@@ -33,7 +33,7 @@ class Searxng(Toolkit):
33
33
 
34
34
  super().__init__(name="searxng", tools=tools, **kwargs)
35
35
 
36
- def search(self, query: str, max_results: int = 5) -> str:
36
+ def search_web(self, query: str, max_results: int = 5) -> str:
37
37
  """Use this function to search the web.
38
38
 
39
39
  Args:
agno/tools/serper.py CHANGED
@@ -44,7 +44,7 @@ class SerperTools(Toolkit):
44
44
 
45
45
  tools: List[Any] = []
46
46
  if all or enable_search:
47
- tools.append(self.search)
47
+ tools.append(self.search_web)
48
48
  if all or enable_search_news:
49
49
  tools.append(self.search_news)
50
50
  if all or enable_search_scholar:
@@ -97,7 +97,7 @@ class SerperTools(Toolkit):
97
97
  log_error(f"Serper API error: {str(e)}")
98
98
  return {"success": False, "error": str(e)}
99
99
 
100
- def search(
100
+ def search_web(
101
101
  self,
102
102
  query: str,
103
103
  num_results: Optional[int] = None,
agno/tools/slack.py CHANGED
@@ -16,6 +16,7 @@ class SlackTools(Toolkit):
16
16
  def __init__(
17
17
  self,
18
18
  token: Optional[str] = None,
19
+ markdown: bool = True,
19
20
  enable_send_message: bool = True,
20
21
  enable_send_message_thread: bool = True,
21
22
  enable_list_channels: bool = True,
@@ -23,10 +24,22 @@ class SlackTools(Toolkit):
23
24
  all: bool = False,
24
25
  **kwargs,
25
26
  ):
27
+ """
28
+ Initialize the SlackTools class.
29
+ Args:
30
+ token: The Slack API token. Defaults to the SLACK_TOKEN environment variable.
31
+ markdown: Whether to enable Slack markdown formatting. Defaults to True.
32
+ enable_send_message: Whether to enable the send_message tool. Defaults to True.
33
+ enable_send_message_thread: Whether to enable the send_message_thread tool. Defaults to True.
34
+ enable_list_channels: Whether to enable the list_channels tool. Defaults to True.
35
+ enable_get_channel_history: Whether to enable the get_channel_history tool. Defaults to True.
36
+ all: Whether to enable all tools. Defaults to False.
37
+ """
26
38
  self.token: Optional[str] = token or getenv("SLACK_TOKEN")
27
39
  if self.token is None or self.token == "":
28
40
  raise ValueError("SLACK_TOKEN is not set")
29
41
  self.client = WebClient(token=self.token)
42
+ self.markdown = markdown
30
43
 
31
44
  tools: List[Any] = []
32
45
  if enable_send_message or all:
@@ -52,7 +65,7 @@ class SlackTools(Toolkit):
52
65
  str: A JSON string containing the response from the Slack API.
53
66
  """
54
67
  try:
55
- response = self.client.chat_postMessage(channel=channel, text=text)
68
+ response = self.client.chat_postMessage(channel=channel, text=text, mrkdwn=self.markdown)
56
69
  return json.dumps(response.data)
57
70
  except SlackApiError as e:
58
71
  logger.error(f"Error sending message: {e}")
@@ -65,13 +78,15 @@ class SlackTools(Toolkit):
65
78
  Args:
66
79
  channel (str): The channel ID or name to send the message to.
67
80
  text (str): The text of the message to send.
68
- thread_ts (ts): The thread to reply to
81
+ thread_ts (ts): The thread to reply to.
69
82
 
70
83
  Returns:
71
84
  str: A JSON string containing the response from the Slack API.
72
85
  """
73
86
  try:
74
- response = self.client.chat_postMessage(channel=channel, text=text, thread_ts=thread_ts)
87
+ response = self.client.chat_postMessage(
88
+ channel=channel, text=text, thread_ts=thread_ts, mrkdwn=self.markdown
89
+ )
75
90
  return json.dumps(response.data)
76
91
  except SlackApiError as e:
77
92
  logger.error(f"Error sending message: {e}")
agno/tools/spider.py CHANGED
@@ -42,7 +42,7 @@ class SpiderTools(Toolkit):
42
42
 
43
43
  tools: List[Any] = []
44
44
  if enable_search or all:
45
- tools.append(self.search)
45
+ tools.append(self.search_web)
46
46
  if enable_scrape or all:
47
47
  tools.append(self.scrape)
48
48
  if enable_crawl or all:
@@ -50,7 +50,7 @@ class SpiderTools(Toolkit):
50
50
 
51
51
  super().__init__(name="spider", tools=tools, **kwargs)
52
52
 
53
- def search(self, query: str, max_results: int = 5) -> str:
53
+ def search_web(self, query: str, max_results: int = 5) -> str:
54
54
  """Use this function to search the web.
55
55
  Args:
56
56
  query (str): The query to search the web with.