agno 2.0.4__py3-none-any.whl → 2.0.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agno/agent/agent.py +127 -102
- agno/db/dynamo/dynamo.py +9 -7
- agno/db/firestore/firestore.py +7 -4
- agno/db/gcs_json/gcs_json_db.py +6 -4
- agno/db/json/json_db.py +10 -6
- agno/db/migrations/v1_to_v2.py +191 -23
- agno/db/mongo/mongo.py +67 -6
- agno/db/mysql/mysql.py +7 -6
- agno/db/mysql/schemas.py +27 -27
- agno/db/postgres/postgres.py +7 -6
- agno/db/redis/redis.py +3 -3
- agno/db/singlestore/singlestore.py +4 -4
- agno/db/sqlite/sqlite.py +7 -6
- agno/db/utils.py +0 -14
- agno/integrations/discord/client.py +1 -0
- agno/knowledge/embedder/openai.py +19 -11
- agno/knowledge/knowledge.py +11 -10
- agno/knowledge/reader/reader_factory.py +7 -3
- agno/knowledge/reader/web_search_reader.py +12 -6
- agno/knowledge/reader/website_reader.py +33 -16
- agno/media.py +70 -0
- agno/models/aimlapi/aimlapi.py +2 -2
- agno/models/base.py +31 -4
- agno/models/cerebras/cerebras_openai.py +2 -2
- agno/models/deepinfra/deepinfra.py +2 -2
- agno/models/deepseek/deepseek.py +2 -2
- agno/models/fireworks/fireworks.py +2 -2
- agno/models/internlm/internlm.py +2 -2
- agno/models/langdb/langdb.py +4 -4
- agno/models/litellm/litellm_openai.py +2 -2
- agno/models/message.py +135 -0
- agno/models/meta/llama_openai.py +2 -2
- agno/models/nebius/nebius.py +2 -2
- agno/models/nexus/__init__.py +3 -0
- agno/models/nexus/nexus.py +25 -0
- agno/models/nvidia/nvidia.py +2 -2
- agno/models/openai/responses.py +6 -0
- agno/models/openrouter/openrouter.py +2 -2
- agno/models/perplexity/perplexity.py +2 -2
- agno/models/portkey/portkey.py +3 -3
- agno/models/response.py +2 -1
- agno/models/sambanova/sambanova.py +2 -2
- agno/models/together/together.py +2 -2
- agno/models/vercel/v0.py +2 -2
- agno/models/xai/xai.py +2 -2
- agno/os/app.py +162 -42
- agno/os/interfaces/agui/utils.py +98 -134
- agno/os/router.py +3 -1
- agno/os/routers/health.py +0 -1
- agno/os/routers/home.py +52 -0
- agno/os/routers/knowledge/knowledge.py +2 -2
- agno/os/schema.py +21 -0
- agno/os/utils.py +1 -9
- agno/run/agent.py +19 -3
- agno/run/team.py +18 -3
- agno/run/workflow.py +10 -0
- agno/team/team.py +70 -45
- agno/tools/duckduckgo.py +15 -11
- agno/tools/e2b.py +14 -7
- agno/tools/file_generation.py +350 -0
- agno/tools/function.py +2 -0
- agno/tools/googlesearch.py +1 -1
- agno/utils/gemini.py +24 -4
- agno/utils/string.py +32 -0
- agno/utils/tools.py +1 -1
- agno/vectordb/chroma/chromadb.py +66 -25
- agno/vectordb/lancedb/lance_db.py +15 -4
- agno/vectordb/milvus/milvus.py +6 -0
- agno/workflow/step.py +4 -3
- agno/workflow/workflow.py +4 -0
- {agno-2.0.4.dist-info → agno-2.0.6.dist-info}/METADATA +9 -5
- {agno-2.0.4.dist-info → agno-2.0.6.dist-info}/RECORD +75 -72
- agno/knowledge/reader/url_reader.py +0 -128
- {agno-2.0.4.dist-info → agno-2.0.6.dist-info}/WHEEL +0 -0
- {agno-2.0.4.dist-info → agno-2.0.6.dist-info}/licenses/LICENSE +0 -0
- {agno-2.0.4.dist-info → agno-2.0.6.dist-info}/top_level.txt +0 -0
agno/tools/duckduckgo.py
CHANGED
|
@@ -12,14 +12,16 @@ except ImportError:
|
|
|
12
12
|
|
|
13
13
|
class DuckDuckGoTools(Toolkit):
|
|
14
14
|
"""
|
|
15
|
-
DuckDuckGo is a toolkit for searching DuckDuckGo easily.
|
|
15
|
+
DuckDuckGo is a toolkit for searching using DuckDuckGo easily.
|
|
16
|
+
It uses the meta-search library DDGS, so it also has access to other backends.
|
|
16
17
|
Args:
|
|
17
|
-
|
|
18
|
-
|
|
18
|
+
enable_search (bool): Enable DDGS search function.
|
|
19
|
+
enable_news (bool): Enable DDGS news function.
|
|
19
20
|
modifier (Optional[str]): A modifier to be used in the search request.
|
|
20
21
|
fixed_max_results (Optional[int]): A fixed number of maximum results.
|
|
21
22
|
proxy (Optional[str]): Proxy to be used in the search request.
|
|
22
23
|
timeout (Optional[int]): The maximum number of seconds to wait for a response.
|
|
24
|
+
backend (Optional[str]): The backend to be used in the search request.
|
|
23
25
|
|
|
24
26
|
"""
|
|
25
27
|
|
|
@@ -28,6 +30,7 @@ class DuckDuckGoTools(Toolkit):
|
|
|
28
30
|
enable_search: bool = True,
|
|
29
31
|
enable_news: bool = True,
|
|
30
32
|
all: bool = False,
|
|
33
|
+
backend: str = "duckduckgo",
|
|
31
34
|
modifier: Optional[str] = None,
|
|
32
35
|
fixed_max_results: Optional[int] = None,
|
|
33
36
|
proxy: Optional[str] = None,
|
|
@@ -40,6 +43,7 @@ class DuckDuckGoTools(Toolkit):
|
|
|
40
43
|
self.fixed_max_results: Optional[int] = fixed_max_results
|
|
41
44
|
self.modifier: Optional[str] = modifier
|
|
42
45
|
self.verify_ssl: bool = verify_ssl
|
|
46
|
+
self.backend: str = backend
|
|
43
47
|
|
|
44
48
|
tools: List[Any] = []
|
|
45
49
|
if all or enable_search:
|
|
@@ -50,38 +54,38 @@ class DuckDuckGoTools(Toolkit):
|
|
|
50
54
|
super().__init__(name="duckduckgo", tools=tools, **kwargs)
|
|
51
55
|
|
|
52
56
|
def duckduckgo_search(self, query: str, max_results: int = 5) -> str:
|
|
53
|
-
"""Use this function to search
|
|
57
|
+
"""Use this function to search DDGS for a query.
|
|
54
58
|
|
|
55
59
|
Args:
|
|
56
60
|
query(str): The query to search for.
|
|
57
61
|
max_results (optional, default=5): The maximum number of results to return.
|
|
58
62
|
|
|
59
63
|
Returns:
|
|
60
|
-
The result from
|
|
64
|
+
The result from DDGS.
|
|
61
65
|
"""
|
|
62
66
|
actual_max_results = self.fixed_max_results or max_results
|
|
63
67
|
search_query = f"{self.modifier} {query}" if self.modifier else query
|
|
64
68
|
|
|
65
|
-
log_debug(f"Searching DDG for: {search_query}")
|
|
69
|
+
log_debug(f"Searching DDG for: {search_query} using backend: {self.backend}")
|
|
66
70
|
with DDGS(proxy=self.proxy, timeout=self.timeout, verify=self.verify_ssl) as ddgs:
|
|
67
|
-
results = ddgs.text(query=search_query, max_results=actual_max_results)
|
|
71
|
+
results = ddgs.text(query=search_query, max_results=actual_max_results, backend=self.backend)
|
|
68
72
|
|
|
69
73
|
return json.dumps(results, indent=2)
|
|
70
74
|
|
|
71
75
|
def duckduckgo_news(self, query: str, max_results: int = 5) -> str:
|
|
72
|
-
"""Use this function to get the latest news from
|
|
76
|
+
"""Use this function to get the latest news from DDGS.
|
|
73
77
|
|
|
74
78
|
Args:
|
|
75
79
|
query(str): The query to search for.
|
|
76
80
|
max_results (optional, default=5): The maximum number of results to return.
|
|
77
81
|
|
|
78
82
|
Returns:
|
|
79
|
-
The latest news from
|
|
83
|
+
The latest news from DDGS.
|
|
80
84
|
"""
|
|
81
85
|
actual_max_results = self.fixed_max_results or max_results
|
|
82
86
|
|
|
83
|
-
log_debug(f"Searching DDG news for: {query}")
|
|
87
|
+
log_debug(f"Searching DDG news for: {query} using backend: {self.backend}")
|
|
84
88
|
with DDGS(proxy=self.proxy, timeout=self.timeout, verify=self.verify_ssl) as ddgs:
|
|
85
|
-
results = ddgs.news(query=query, max_results=actual_max_results)
|
|
89
|
+
results = ddgs.news(query=query, max_results=actual_max_results, backend=self.backend)
|
|
86
90
|
|
|
87
91
|
return json.dumps(results, indent=2)
|
agno/tools/e2b.py
CHANGED
|
@@ -464,7 +464,7 @@ class E2BTools(Toolkit):
|
|
|
464
464
|
|
|
465
465
|
result = f"Contents of {directory_path}:\n"
|
|
466
466
|
for file in files:
|
|
467
|
-
file_type = "Directory" if file.
|
|
467
|
+
file_type = "Directory" if file.type == "directory" else "File"
|
|
468
468
|
size = f"{file.size} bytes" if file.size is not None else "Unknown size"
|
|
469
469
|
result += f"- {file.name} ({file_type}, {size})\n"
|
|
470
470
|
|
|
@@ -486,12 +486,19 @@ class E2BTools(Toolkit):
|
|
|
486
486
|
try:
|
|
487
487
|
content = self.sandbox.files.read(file_path)
|
|
488
488
|
|
|
489
|
-
#
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
489
|
+
# Check if content is already a string or if it's bytes that need decoding
|
|
490
|
+
if isinstance(content, str):
|
|
491
|
+
return content
|
|
492
|
+
elif isinstance(content, bytes):
|
|
493
|
+
# Try to decode as text if encoding is provided
|
|
494
|
+
try:
|
|
495
|
+
text_content = content.decode(encoding)
|
|
496
|
+
return text_content
|
|
497
|
+
except UnicodeDecodeError:
|
|
498
|
+
return f"File read successfully but contains binary data ({len(content)} bytes). Use download_file_from_sandbox to save it."
|
|
499
|
+
else:
|
|
500
|
+
# Handle unexpected content type
|
|
501
|
+
return f"Unexpected content type: {type(content)}. Expected str or bytes."
|
|
495
502
|
|
|
496
503
|
except Exception as e:
|
|
497
504
|
return json.dumps({"status": "error", "message": f"Error reading file: {str(e)}"})
|
|
@@ -0,0 +1,350 @@
|
|
|
1
|
+
import csv
|
|
2
|
+
import io
|
|
3
|
+
import json
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import Any, Dict, List, Optional, Union
|
|
6
|
+
from uuid import uuid4
|
|
7
|
+
|
|
8
|
+
from agno.media import File
|
|
9
|
+
from agno.tools import Toolkit
|
|
10
|
+
from agno.tools.function import ToolResult
|
|
11
|
+
from agno.utils.log import log_debug, logger
|
|
12
|
+
|
|
13
|
+
try:
|
|
14
|
+
from reportlab.lib.pagesizes import letter
|
|
15
|
+
from reportlab.lib.styles import getSampleStyleSheet
|
|
16
|
+
from reportlab.lib.units import inch
|
|
17
|
+
from reportlab.platypus import Paragraph, SimpleDocTemplate, Spacer
|
|
18
|
+
|
|
19
|
+
PDF_AVAILABLE = True
|
|
20
|
+
except ImportError:
|
|
21
|
+
PDF_AVAILABLE = False
|
|
22
|
+
logger.warning("reportlab not installed. PDF generation will not be available. Install with: pip install reportlab")
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class FileGenerationTools(Toolkit):
|
|
26
|
+
def __init__(
|
|
27
|
+
self,
|
|
28
|
+
enable_json_generation: bool = True,
|
|
29
|
+
enable_csv_generation: bool = True,
|
|
30
|
+
enable_pdf_generation: bool = True,
|
|
31
|
+
enable_txt_generation: bool = True,
|
|
32
|
+
output_directory: Optional[str] = None,
|
|
33
|
+
all: bool = False,
|
|
34
|
+
**kwargs,
|
|
35
|
+
):
|
|
36
|
+
self.enable_json_generation = enable_json_generation
|
|
37
|
+
self.enable_csv_generation = enable_csv_generation
|
|
38
|
+
self.enable_pdf_generation = enable_pdf_generation and PDF_AVAILABLE
|
|
39
|
+
self.enable_txt_generation = enable_txt_generation
|
|
40
|
+
self.output_directory = Path(output_directory) if output_directory else None
|
|
41
|
+
|
|
42
|
+
# Create output directory if specified
|
|
43
|
+
if self.output_directory:
|
|
44
|
+
self.output_directory.mkdir(parents=True, exist_ok=True)
|
|
45
|
+
log_debug(f"Files will be saved to: {self.output_directory}")
|
|
46
|
+
|
|
47
|
+
if enable_pdf_generation and not PDF_AVAILABLE:
|
|
48
|
+
logger.warning("PDF generation requested but reportlab is not installed. Disabling PDF generation.")
|
|
49
|
+
self.enable_pdf_generation = False
|
|
50
|
+
|
|
51
|
+
tools: List[Any] = []
|
|
52
|
+
if all or enable_json_generation:
|
|
53
|
+
tools.append(self.generate_json_file)
|
|
54
|
+
if all or enable_csv_generation:
|
|
55
|
+
tools.append(self.generate_csv_file)
|
|
56
|
+
if all or (enable_pdf_generation and PDF_AVAILABLE):
|
|
57
|
+
tools.append(self.generate_pdf_file)
|
|
58
|
+
if all or enable_txt_generation:
|
|
59
|
+
tools.append(self.generate_text_file)
|
|
60
|
+
|
|
61
|
+
super().__init__(name="file_generation", tools=tools, **kwargs)
|
|
62
|
+
|
|
63
|
+
def _save_file_to_disk(self, content: Union[str, bytes], filename: str) -> Optional[str]:
|
|
64
|
+
"""Save file to disk if output_directory is set. Return file path or None."""
|
|
65
|
+
if not self.output_directory:
|
|
66
|
+
return None
|
|
67
|
+
|
|
68
|
+
file_path = self.output_directory / filename
|
|
69
|
+
|
|
70
|
+
if isinstance(content, str):
|
|
71
|
+
file_path.write_text(content, encoding="utf-8")
|
|
72
|
+
else:
|
|
73
|
+
file_path.write_bytes(content)
|
|
74
|
+
|
|
75
|
+
log_debug(f"File saved to: {file_path}")
|
|
76
|
+
return str(file_path)
|
|
77
|
+
|
|
78
|
+
def generate_json_file(self, data: Union[Dict, List, str], filename: Optional[str] = None) -> ToolResult:
|
|
79
|
+
"""Generate a JSON file from the provided data.
|
|
80
|
+
|
|
81
|
+
Args:
|
|
82
|
+
data: The data to write to the JSON file. Can be a dictionary, list, or JSON string.
|
|
83
|
+
filename: Optional filename for the generated file. If not provided, a UUID will be used.
|
|
84
|
+
|
|
85
|
+
Returns:
|
|
86
|
+
ToolResult: Result containing the generated JSON file as a FileArtifact.
|
|
87
|
+
"""
|
|
88
|
+
try:
|
|
89
|
+
log_debug(f"Generating JSON file with data: {type(data)}")
|
|
90
|
+
|
|
91
|
+
# Handle different input types
|
|
92
|
+
if isinstance(data, str):
|
|
93
|
+
try:
|
|
94
|
+
json.loads(data)
|
|
95
|
+
json_content = data # Use the original string if it's valid JSON
|
|
96
|
+
except json.JSONDecodeError:
|
|
97
|
+
# If it's not valid JSON, treat as plain text and wrap it
|
|
98
|
+
json_content = json.dumps({"content": data}, indent=2)
|
|
99
|
+
else:
|
|
100
|
+
json_content = json.dumps(data, indent=2, ensure_ascii=False)
|
|
101
|
+
|
|
102
|
+
# Generate filename if not provided
|
|
103
|
+
if not filename:
|
|
104
|
+
filename = f"generated_file_{str(uuid4())[:8]}.json"
|
|
105
|
+
elif not filename.endswith(".json"):
|
|
106
|
+
filename += ".json"
|
|
107
|
+
|
|
108
|
+
# Save file to disk (if output_directory is set)
|
|
109
|
+
file_path = self._save_file_to_disk(json_content, filename)
|
|
110
|
+
|
|
111
|
+
# Create FileArtifact
|
|
112
|
+
file_artifact = File(
|
|
113
|
+
id=str(uuid4()),
|
|
114
|
+
content=json_content,
|
|
115
|
+
mime_type="application/json",
|
|
116
|
+
file_type="json",
|
|
117
|
+
filename=filename,
|
|
118
|
+
size=len(json_content.encode("utf-8")),
|
|
119
|
+
url=f"file://{file_path}" if file_path else None,
|
|
120
|
+
)
|
|
121
|
+
|
|
122
|
+
log_debug("JSON file generated successfully")
|
|
123
|
+
success_msg = f"JSON file '{filename}' has been generated successfully with {len(json_content)} characters."
|
|
124
|
+
if file_path:
|
|
125
|
+
success_msg += f" File saved to: {file_path}"
|
|
126
|
+
else:
|
|
127
|
+
success_msg += " File is available in response."
|
|
128
|
+
|
|
129
|
+
return ToolResult(content=success_msg, files=[file_artifact])
|
|
130
|
+
|
|
131
|
+
except Exception as e:
|
|
132
|
+
logger.error(f"Failed to generate JSON file: {e}")
|
|
133
|
+
return ToolResult(content=f"Error generating JSON file: {e}")
|
|
134
|
+
|
|
135
|
+
def generate_csv_file(
|
|
136
|
+
self,
|
|
137
|
+
data: Union[List[List], List[Dict], str],
|
|
138
|
+
filename: Optional[str] = None,
|
|
139
|
+
headers: Optional[List[str]] = None,
|
|
140
|
+
) -> ToolResult:
|
|
141
|
+
"""Generate a CSV file from the provided data.
|
|
142
|
+
|
|
143
|
+
Args:
|
|
144
|
+
data: The data to write to the CSV file. Can be a list of lists, list of dictionaries, or CSV string.
|
|
145
|
+
filename: Optional filename for the generated file. If not provided, a UUID will be used.
|
|
146
|
+
headers: Optional headers for the CSV. Used when data is a list of lists.
|
|
147
|
+
|
|
148
|
+
Returns:
|
|
149
|
+
ToolResult: Result containing the generated CSV file as a FileArtifact.
|
|
150
|
+
"""
|
|
151
|
+
try:
|
|
152
|
+
log_debug(f"Generating CSV file with data: {type(data)}")
|
|
153
|
+
|
|
154
|
+
# Create CSV content
|
|
155
|
+
output = io.StringIO()
|
|
156
|
+
|
|
157
|
+
if isinstance(data, str):
|
|
158
|
+
# If it's already a CSV string, use it directly
|
|
159
|
+
csv_content = data
|
|
160
|
+
elif isinstance(data, list) and len(data) > 0:
|
|
161
|
+
writer = csv.writer(output)
|
|
162
|
+
|
|
163
|
+
if isinstance(data[0], dict):
|
|
164
|
+
# List of dictionaries - use keys as headers
|
|
165
|
+
if data:
|
|
166
|
+
fieldnames = list(data[0].keys())
|
|
167
|
+
writer.writerow(fieldnames)
|
|
168
|
+
for row in data:
|
|
169
|
+
if isinstance(row, dict):
|
|
170
|
+
writer.writerow([row.get(field, "") for field in fieldnames])
|
|
171
|
+
else:
|
|
172
|
+
writer.writerow([str(row)] + [""] * (len(fieldnames) - 1))
|
|
173
|
+
elif isinstance(data[0], list):
|
|
174
|
+
# List of lists
|
|
175
|
+
if headers:
|
|
176
|
+
writer.writerow(headers)
|
|
177
|
+
writer.writerows(data)
|
|
178
|
+
else:
|
|
179
|
+
# List of other types
|
|
180
|
+
if headers:
|
|
181
|
+
writer.writerow(headers)
|
|
182
|
+
for item in data:
|
|
183
|
+
writer.writerow([str(item)])
|
|
184
|
+
|
|
185
|
+
csv_content = output.getvalue()
|
|
186
|
+
else:
|
|
187
|
+
csv_content = ""
|
|
188
|
+
|
|
189
|
+
# Generate filename if not provided
|
|
190
|
+
if not filename:
|
|
191
|
+
filename = f"generated_file_{str(uuid4())[:8]}.csv"
|
|
192
|
+
elif not filename.endswith(".csv"):
|
|
193
|
+
filename += ".csv"
|
|
194
|
+
|
|
195
|
+
# Save file to disk (if output_directory is set)
|
|
196
|
+
file_path = self._save_file_to_disk(csv_content, filename)
|
|
197
|
+
|
|
198
|
+
# Create FileArtifact
|
|
199
|
+
file_artifact = File(
|
|
200
|
+
id=str(uuid4()),
|
|
201
|
+
content=csv_content,
|
|
202
|
+
mime_type="text/csv",
|
|
203
|
+
file_type="csv",
|
|
204
|
+
filename=filename,
|
|
205
|
+
size=len(csv_content.encode("utf-8")),
|
|
206
|
+
url=f"file://{file_path}" if file_path else None,
|
|
207
|
+
)
|
|
208
|
+
|
|
209
|
+
log_debug("CSV file generated successfully")
|
|
210
|
+
success_msg = f"CSV file '{filename}' has been generated successfully with {len(csv_content)} characters."
|
|
211
|
+
if file_path:
|
|
212
|
+
success_msg += f" File saved to: {file_path}"
|
|
213
|
+
else:
|
|
214
|
+
success_msg += " File is available in response."
|
|
215
|
+
|
|
216
|
+
return ToolResult(content=success_msg, files=[file_artifact])
|
|
217
|
+
|
|
218
|
+
except Exception as e:
|
|
219
|
+
logger.error(f"Failed to generate CSV file: {e}")
|
|
220
|
+
return ToolResult(content=f"Error generating CSV file: {e}")
|
|
221
|
+
|
|
222
|
+
def generate_pdf_file(
|
|
223
|
+
self, content: str, filename: Optional[str] = None, title: Optional[str] = None
|
|
224
|
+
) -> ToolResult:
|
|
225
|
+
"""Generate a PDF file from the provided content.
|
|
226
|
+
|
|
227
|
+
Args:
|
|
228
|
+
content: The text content to write to the PDF file.
|
|
229
|
+
filename: Optional filename for the generated file. If not provided, a UUID will be used.
|
|
230
|
+
title: Optional title for the PDF document.
|
|
231
|
+
|
|
232
|
+
Returns:
|
|
233
|
+
ToolResult: Result containing the generated PDF file as a FileArtifact.
|
|
234
|
+
"""
|
|
235
|
+
if not PDF_AVAILABLE:
|
|
236
|
+
return ToolResult(
|
|
237
|
+
content="PDF generation is not available. Please install reportlab: pip install reportlab"
|
|
238
|
+
)
|
|
239
|
+
|
|
240
|
+
try:
|
|
241
|
+
log_debug(f"Generating PDF file with content length: {len(content)}")
|
|
242
|
+
|
|
243
|
+
# Create PDF content in memory
|
|
244
|
+
buffer = io.BytesIO()
|
|
245
|
+
doc = SimpleDocTemplate(buffer, pagesize=letter, topMargin=1 * inch)
|
|
246
|
+
|
|
247
|
+
# Get styles
|
|
248
|
+
styles = getSampleStyleSheet()
|
|
249
|
+
title_style = styles["Title"]
|
|
250
|
+
normal_style = styles["Normal"]
|
|
251
|
+
|
|
252
|
+
# Build story (content elements)
|
|
253
|
+
story = []
|
|
254
|
+
|
|
255
|
+
if title:
|
|
256
|
+
story.append(Paragraph(title, title_style))
|
|
257
|
+
story.append(Spacer(1, 20))
|
|
258
|
+
|
|
259
|
+
# Split content into paragraphs and add to story
|
|
260
|
+
paragraphs = content.split("\n\n")
|
|
261
|
+
for para in paragraphs:
|
|
262
|
+
if para.strip():
|
|
263
|
+
# Clean the paragraph text for PDF
|
|
264
|
+
clean_para = para.strip().replace("<", "<").replace(">", ">")
|
|
265
|
+
story.append(Paragraph(clean_para, normal_style))
|
|
266
|
+
story.append(Spacer(1, 10))
|
|
267
|
+
|
|
268
|
+
# Build PDF
|
|
269
|
+
doc.build(story)
|
|
270
|
+
pdf_content = buffer.getvalue()
|
|
271
|
+
buffer.close()
|
|
272
|
+
|
|
273
|
+
# Generate filename if not provided
|
|
274
|
+
if not filename:
|
|
275
|
+
filename = f"generated_file_{str(uuid4())[:8]}.pdf"
|
|
276
|
+
elif not filename.endswith(".pdf"):
|
|
277
|
+
filename += ".pdf"
|
|
278
|
+
|
|
279
|
+
# Save file to disk (if output_directory is set)
|
|
280
|
+
file_path = self._save_file_to_disk(pdf_content, filename)
|
|
281
|
+
|
|
282
|
+
# Create FileArtifact
|
|
283
|
+
file_artifact = File(
|
|
284
|
+
id=str(uuid4()),
|
|
285
|
+
content=pdf_content,
|
|
286
|
+
mime_type="application/pdf",
|
|
287
|
+
file_type="pdf",
|
|
288
|
+
filename=filename,
|
|
289
|
+
size=len(pdf_content),
|
|
290
|
+
url=f"file://{file_path}" if file_path else None,
|
|
291
|
+
)
|
|
292
|
+
|
|
293
|
+
log_debug("PDF file generated successfully")
|
|
294
|
+
success_msg = f"PDF file '{filename}' has been generated successfully with {len(pdf_content)} bytes."
|
|
295
|
+
if file_path:
|
|
296
|
+
success_msg += f" File saved to: {file_path}"
|
|
297
|
+
else:
|
|
298
|
+
success_msg += " File is available in response."
|
|
299
|
+
|
|
300
|
+
return ToolResult(content=success_msg, files=[file_artifact])
|
|
301
|
+
|
|
302
|
+
except Exception as e:
|
|
303
|
+
logger.error(f"Failed to generate PDF file: {e}")
|
|
304
|
+
return ToolResult(content=f"Error generating PDF file: {e}")
|
|
305
|
+
|
|
306
|
+
def generate_text_file(self, content: str, filename: Optional[str] = None) -> ToolResult:
|
|
307
|
+
"""Generate a text file from the provided content.
|
|
308
|
+
|
|
309
|
+
Args:
|
|
310
|
+
content: The text content to write to the file.
|
|
311
|
+
filename: Optional filename for the generated file. If not provided, a UUID will be used.
|
|
312
|
+
|
|
313
|
+
Returns:
|
|
314
|
+
ToolResult: Result containing the generated text file as a FileArtifact.
|
|
315
|
+
"""
|
|
316
|
+
try:
|
|
317
|
+
log_debug(f"Generating text file with content length: {len(content)}")
|
|
318
|
+
|
|
319
|
+
# Generate filename if not provided
|
|
320
|
+
if not filename:
|
|
321
|
+
filename = f"generated_file_{str(uuid4())[:8]}.txt"
|
|
322
|
+
elif not filename.endswith(".txt"):
|
|
323
|
+
filename += ".txt"
|
|
324
|
+
|
|
325
|
+
# Save file to disk (if output_directory is set)
|
|
326
|
+
file_path = self._save_file_to_disk(content, filename)
|
|
327
|
+
|
|
328
|
+
# Create FileArtifact
|
|
329
|
+
file_artifact = File(
|
|
330
|
+
id=str(uuid4()),
|
|
331
|
+
content=content,
|
|
332
|
+
mime_type="text/plain",
|
|
333
|
+
file_type="txt",
|
|
334
|
+
filename=filename,
|
|
335
|
+
size=len(content.encode("utf-8")),
|
|
336
|
+
url=f"file://{file_path}" if file_path else None,
|
|
337
|
+
)
|
|
338
|
+
|
|
339
|
+
log_debug("Text file generated successfully")
|
|
340
|
+
success_msg = f"Text file '{filename}' has been generated successfully with {len(content)} characters."
|
|
341
|
+
if file_path:
|
|
342
|
+
success_msg += f" File saved to: {file_path}"
|
|
343
|
+
else:
|
|
344
|
+
success_msg += " File is available in response."
|
|
345
|
+
|
|
346
|
+
return ToolResult(content=success_msg, files=[file_artifact])
|
|
347
|
+
|
|
348
|
+
except Exception as e:
|
|
349
|
+
logger.error(f"Failed to generate text file: {e}")
|
|
350
|
+
return ToolResult(content=f"Error generating text file: {e}")
|
agno/tools/function.py
CHANGED
|
@@ -485,6 +485,7 @@ class FunctionExecutionResult(BaseModel):
|
|
|
485
485
|
images: Optional[List[Image]] = None
|
|
486
486
|
videos: Optional[List[Video]] = None
|
|
487
487
|
audios: Optional[List[Audio]] = None
|
|
488
|
+
files: Optional[List[File]] = None
|
|
488
489
|
|
|
489
490
|
|
|
490
491
|
class FunctionCall(BaseModel):
|
|
@@ -965,3 +966,4 @@ class ToolResult(BaseModel):
|
|
|
965
966
|
images: Optional[List[Image]] = None
|
|
966
967
|
videos: Optional[List[Video]] = None
|
|
967
968
|
audios: Optional[List[Audio]] = None
|
|
969
|
+
files: Optional[List[File]] = None
|
agno/tools/googlesearch.py
CHANGED
|
@@ -82,7 +82,7 @@ class GoogleSearchTools(Toolkit):
|
|
|
82
82
|
log_debug(f"Searching Google [{language}] for: {query}")
|
|
83
83
|
|
|
84
84
|
# Perform Google search using the googlesearch-python package
|
|
85
|
-
results = list(search(query,
|
|
85
|
+
results = list(search(query, num_results=max_results, lang=language))
|
|
86
86
|
|
|
87
87
|
# Collect the search results
|
|
88
88
|
res: List[Dict[str, str]] = []
|
agno/utils/gemini.py
CHANGED
|
@@ -146,13 +146,24 @@ def convert_schema(schema_dict: Dict[str, Any], root_schema: Optional[Dict[str,
|
|
|
146
146
|
# For Gemini, we need to represent Dict[str, T] as an object with at least one property
|
|
147
147
|
# to avoid the "properties should be non-empty" error.
|
|
148
148
|
# We'll create a generic property that represents the dictionary structure
|
|
149
|
-
|
|
149
|
+
|
|
150
|
+
# Handle both single types and union types (arrays) from Zod schemas
|
|
151
|
+
type_value = additional_props.get("type", "string")
|
|
152
|
+
if isinstance(type_value, list):
|
|
153
|
+
value_type = type_value[0].upper() if type_value else "STRING"
|
|
154
|
+
union_types = ", ".join(type_value)
|
|
155
|
+
type_description_suffix = f" (supports union types: {union_types})"
|
|
156
|
+
else:
|
|
157
|
+
# Single type
|
|
158
|
+
value_type = type_value.upper()
|
|
159
|
+
type_description_suffix = ""
|
|
160
|
+
|
|
150
161
|
# Create a placeholder property to satisfy Gemini's requirements
|
|
151
162
|
# This is a workaround since Gemini doesn't support additionalProperties directly
|
|
152
163
|
placeholder_properties = {
|
|
153
164
|
"example_key": Schema(
|
|
154
165
|
type=value_type,
|
|
155
|
-
description=f"Example key-value pair. This object can contain any number of keys with {value_type.lower()} values.",
|
|
166
|
+
description=f"Example key-value pair. This object can contain any number of keys with {value_type.lower()} values{type_description_suffix}.",
|
|
156
167
|
)
|
|
157
168
|
}
|
|
158
169
|
if value_type == "ARRAY":
|
|
@@ -162,7 +173,7 @@ def convert_schema(schema_dict: Dict[str, Any], root_schema: Optional[Dict[str,
|
|
|
162
173
|
type=Type.OBJECT,
|
|
163
174
|
properties=placeholder_properties,
|
|
164
175
|
description=description
|
|
165
|
-
or f"Dictionary with {value_type.lower()} values. Can contain any number of key-value pairs.",
|
|
176
|
+
or f"Dictionary with {value_type.lower()} values{type_description_suffix}. Can contain any number of key-value pairs.",
|
|
166
177
|
default=default,
|
|
167
178
|
)
|
|
168
179
|
else:
|
|
@@ -174,7 +185,10 @@ def convert_schema(schema_dict: Dict[str, Any], root_schema: Optional[Dict[str,
|
|
|
174
185
|
return Schema(type=Type.OBJECT, description=description, default=default)
|
|
175
186
|
|
|
176
187
|
elif schema_type == "array" and "items" in schema_dict:
|
|
177
|
-
|
|
188
|
+
if not schema_dict["items"]: # Handle empty {}
|
|
189
|
+
items = Schema(type=Type.STRING)
|
|
190
|
+
else:
|
|
191
|
+
items = convert_schema(schema_dict["items"], root_schema)
|
|
178
192
|
min_items = schema_dict.get("minItems")
|
|
179
193
|
max_items = schema_dict.get("maxItems")
|
|
180
194
|
return Schema(
|
|
@@ -233,6 +247,12 @@ def convert_schema(schema_dict: Dict[str, Any], root_schema: Optional[Dict[str,
|
|
|
233
247
|
default=default,
|
|
234
248
|
)
|
|
235
249
|
else:
|
|
250
|
+
if isinstance(schema_type, list):
|
|
251
|
+
non_null_types = [t for t in schema_type if t != "null"]
|
|
252
|
+
if non_null_types:
|
|
253
|
+
schema_type = non_null_types[0]
|
|
254
|
+
else:
|
|
255
|
+
schema_type = ""
|
|
236
256
|
# Only convert to uppercase if schema_type is not empty
|
|
237
257
|
if schema_type:
|
|
238
258
|
schema_type = schema_type.upper()
|
agno/utils/string.py
CHANGED
|
@@ -1,7 +1,9 @@
|
|
|
1
1
|
import hashlib
|
|
2
2
|
import json
|
|
3
3
|
import re
|
|
4
|
+
import uuid
|
|
4
5
|
from typing import Optional, Type
|
|
6
|
+
from uuid import uuid4
|
|
5
7
|
|
|
6
8
|
from pydantic import BaseModel, ValidationError
|
|
7
9
|
|
|
@@ -188,3 +190,33 @@ def parse_response_model_str(content: str, output_schema: Type[BaseModel]) -> Op
|
|
|
188
190
|
logger.warning("All parsing attempts failed.")
|
|
189
191
|
|
|
190
192
|
return structured_output
|
|
193
|
+
|
|
194
|
+
|
|
195
|
+
def generate_id(seed: Optional[str] = None) -> str:
|
|
196
|
+
"""
|
|
197
|
+
Generate a deterministic UUID5 based on a seed string.
|
|
198
|
+
If no seed is provided, generate a random UUID4.
|
|
199
|
+
|
|
200
|
+
Args:
|
|
201
|
+
seed (str): The seed string to generate the UUID from.
|
|
202
|
+
|
|
203
|
+
Returns:
|
|
204
|
+
str: A deterministic UUID5 string.
|
|
205
|
+
"""
|
|
206
|
+
if seed is None:
|
|
207
|
+
return str(uuid4())
|
|
208
|
+
return str(uuid.uuid5(uuid.NAMESPACE_DNS, seed))
|
|
209
|
+
|
|
210
|
+
|
|
211
|
+
def generate_id_from_name(name: Optional[str] = None) -> str:
|
|
212
|
+
"""
|
|
213
|
+
Generate a deterministic ID from a name string.
|
|
214
|
+
If no name is provided, generate a random UUID4.
|
|
215
|
+
|
|
216
|
+
Args:
|
|
217
|
+
name (str): The name string to generate the ID from.
|
|
218
|
+
"""
|
|
219
|
+
if name:
|
|
220
|
+
return name.lower().replace(" ", "-").replace("_", "-")
|
|
221
|
+
else:
|
|
222
|
+
return str(uuid4())
|
agno/utils/tools.py
CHANGED
|
@@ -13,7 +13,7 @@ def get_function_call_for_tool_call(
|
|
|
13
13
|
_tool_call_function = tool_call.get("function")
|
|
14
14
|
if _tool_call_function is not None:
|
|
15
15
|
_tool_call_function_name = _tool_call_function.get("name")
|
|
16
|
-
_tool_call_function_arguments_str = _tool_call_function.get("arguments")
|
|
16
|
+
_tool_call_function_arguments_str = _tool_call_function.get("arguments") or "{}"
|
|
17
17
|
if _tool_call_function_name is not None:
|
|
18
18
|
return get_function_call(
|
|
19
19
|
name=_tool_call_function_name,
|