agno 2.2.5__py3-none-any.whl → 2.2.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agno/agent/agent.py +82 -19
- agno/culture/manager.py +3 -4
- agno/knowledge/chunking/agentic.py +6 -2
- agno/memory/manager.py +9 -4
- agno/models/anthropic/claude.py +1 -2
- agno/models/azure/ai_foundry.py +31 -14
- agno/models/azure/openai_chat.py +12 -4
- agno/models/base.py +44 -11
- agno/models/cerebras/cerebras.py +11 -6
- agno/models/groq/groq.py +7 -4
- agno/models/meta/llama.py +12 -6
- agno/models/meta/llama_openai.py +5 -1
- agno/models/openai/chat.py +20 -12
- agno/models/openai/responses.py +10 -5
- agno/models/utils.py +254 -8
- agno/models/vertexai/claude.py +9 -13
- agno/os/routers/evals/evals.py +8 -8
- agno/os/routers/evals/utils.py +1 -0
- agno/os/schema.py +48 -33
- agno/os/utils.py +27 -0
- agno/run/agent.py +5 -0
- agno/run/team.py +2 -0
- agno/run/workflow.py +39 -0
- agno/session/summary.py +8 -2
- agno/session/workflow.py +4 -3
- agno/team/team.py +50 -14
- agno/tools/file.py +153 -25
- agno/tools/function.py +5 -1
- agno/tools/notion.py +201 -0
- agno/utils/events.py +2 -0
- agno/utils/print_response/workflow.py +115 -16
- agno/vectordb/milvus/milvus.py +5 -0
- agno/workflow/__init__.py +2 -0
- agno/workflow/agent.py +298 -0
- agno/workflow/workflow.py +929 -64
- {agno-2.2.5.dist-info → agno-2.2.6.dist-info}/METADATA +4 -1
- {agno-2.2.5.dist-info → agno-2.2.6.dist-info}/RECORD +40 -38
- {agno-2.2.5.dist-info → agno-2.2.6.dist-info}/WHEEL +0 -0
- {agno-2.2.5.dist-info → agno-2.2.6.dist-info}/licenses/LICENSE +0 -0
- {agno-2.2.5.dist-info → agno-2.2.6.dist-info}/top_level.txt +0 -0
agno/tools/file.py
CHANGED
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
import json
|
|
2
2
|
from pathlib import Path
|
|
3
|
-
from typing import Any, List, Optional
|
|
3
|
+
from typing import Any, List, Optional, Tuple
|
|
4
4
|
|
|
5
5
|
from agno.tools import Toolkit
|
|
6
|
-
from agno.utils.log import log_debug, log_error
|
|
6
|
+
from agno.utils.log import log_debug, log_error
|
|
7
7
|
|
|
8
8
|
|
|
9
9
|
class FileTools(Toolkit):
|
|
@@ -12,14 +12,26 @@ class FileTools(Toolkit):
|
|
|
12
12
|
base_dir: Optional[Path] = None,
|
|
13
13
|
enable_save_file: bool = True,
|
|
14
14
|
enable_read_file: bool = True,
|
|
15
|
+
enable_delete_file: bool = False,
|
|
15
16
|
enable_list_files: bool = True,
|
|
16
17
|
enable_search_files: bool = True,
|
|
18
|
+
enable_read_file_chunk: bool = True,
|
|
19
|
+
enable_replace_file_chunk: bool = True,
|
|
20
|
+
expose_base_directory: bool = False,
|
|
21
|
+
max_file_length: int = 10000000,
|
|
22
|
+
max_file_lines: int = 100000,
|
|
23
|
+
line_separator: str = "\n",
|
|
17
24
|
all: bool = False,
|
|
18
25
|
**kwargs,
|
|
19
26
|
):
|
|
20
27
|
self.base_dir: Path = base_dir or Path.cwd()
|
|
28
|
+
self.base_dir = self.base_dir.resolve()
|
|
21
29
|
|
|
22
30
|
tools: List[Any] = []
|
|
31
|
+
self.max_file_length = max_file_length
|
|
32
|
+
self.max_file_lines = max_file_lines
|
|
33
|
+
self.line_separator = line_separator
|
|
34
|
+
self.expose_base_directory = expose_base_directory
|
|
23
35
|
if all or enable_save_file:
|
|
24
36
|
tools.append(self.save_file)
|
|
25
37
|
if all or enable_read_file:
|
|
@@ -28,10 +40,16 @@ class FileTools(Toolkit):
|
|
|
28
40
|
tools.append(self.list_files)
|
|
29
41
|
if all or enable_search_files:
|
|
30
42
|
tools.append(self.search_files)
|
|
43
|
+
if all or enable_delete_file:
|
|
44
|
+
tools.append(self.delete_file)
|
|
45
|
+
if all or enable_read_file_chunk:
|
|
46
|
+
tools.append(self.read_file_chunk)
|
|
47
|
+
if all or enable_replace_file_chunk:
|
|
48
|
+
tools.append(self.replace_file_chunk)
|
|
31
49
|
|
|
32
50
|
super().__init__(name="file_tools", tools=tools, **kwargs)
|
|
33
51
|
|
|
34
|
-
def save_file(self, contents: str, file_name: str, overwrite: bool = True) -> str:
|
|
52
|
+
def save_file(self, contents: str, file_name: str, overwrite: bool = True, encoding: str = "utf-8") -> str:
|
|
35
53
|
"""Saves the contents to a file called `file_name` and returns the file name if successful.
|
|
36
54
|
|
|
37
55
|
:param contents: The contents to save.
|
|
@@ -40,44 +58,146 @@ class FileTools(Toolkit):
|
|
|
40
58
|
:return: The file name if successful, otherwise returns an error message.
|
|
41
59
|
"""
|
|
42
60
|
try:
|
|
43
|
-
file_path = self.
|
|
61
|
+
safe, file_path = self.check_escape(file_name)
|
|
62
|
+
if not (safe):
|
|
63
|
+
log_error(f"Attempted to save file: {file_name}")
|
|
64
|
+
return "Error saving file"
|
|
44
65
|
log_debug(f"Saving contents to {file_path}")
|
|
45
66
|
if not file_path.parent.exists():
|
|
46
67
|
file_path.parent.mkdir(parents=True, exist_ok=True)
|
|
47
68
|
if file_path.exists() and not overwrite:
|
|
48
69
|
return f"File {file_name} already exists"
|
|
49
|
-
file_path.write_text(contents)
|
|
50
|
-
|
|
70
|
+
file_path.write_text(contents, encoding=encoding)
|
|
71
|
+
log_debug(f"Saved: {file_path}")
|
|
51
72
|
return str(file_name)
|
|
52
73
|
except Exception as e:
|
|
53
74
|
log_error(f"Error saving to file: {e}")
|
|
54
75
|
return f"Error saving to file: {e}"
|
|
55
76
|
|
|
56
|
-
def
|
|
77
|
+
def read_file_chunk(self, file_name: str, start_line: int, end_line: int, encoding: str = "utf-8") -> str:
|
|
78
|
+
"""Reads the contents of the file `file_name` and returns lines from start_line to end_line.
|
|
79
|
+
|
|
80
|
+
:param file_name: The name of the file to read.
|
|
81
|
+
:param start_line: Number of first line in the returned chunk
|
|
82
|
+
:param end_line: Number of the last line in the returned chunk
|
|
83
|
+
:param encoding: Encoding to use, default - utf-8
|
|
84
|
+
|
|
85
|
+
:return: The contents of the selected chunk
|
|
86
|
+
"""
|
|
87
|
+
try:
|
|
88
|
+
log_debug(f"Reading file: {file_name}")
|
|
89
|
+
safe, file_path = self.check_escape(file_name)
|
|
90
|
+
if not (safe):
|
|
91
|
+
log_error(f"Attempted to read file: {file_name}")
|
|
92
|
+
return "Error reading file"
|
|
93
|
+
contents = file_path.read_text(encoding=encoding)
|
|
94
|
+
lines = contents.split(self.line_separator)
|
|
95
|
+
return self.line_separator.join(lines[start_line : end_line + 1])
|
|
96
|
+
except Exception as e:
|
|
97
|
+
log_error(f"Error reading file: {e}")
|
|
98
|
+
return f"Error reading file: {e}"
|
|
99
|
+
|
|
100
|
+
def replace_file_chunk(
|
|
101
|
+
self, file_name: str, start_line: int, end_line: int, chunk: str, encoding: str = "utf-8"
|
|
102
|
+
) -> str:
|
|
103
|
+
"""Reads the contents of the file, replaces lines
|
|
104
|
+
between start_line and end_line with chunk and writes the file
|
|
105
|
+
|
|
106
|
+
:param file_name: The name of the file to process.
|
|
107
|
+
:param start_line: Number of first line in the replaced chunk
|
|
108
|
+
:param end_line: Number of the last line in the replaced chunk
|
|
109
|
+
:param chunk: String to be inserted instead of lines from start_line to end_line. Can have multiple lines.
|
|
110
|
+
:param encoding: Encoding to use, default - utf-8
|
|
111
|
+
|
|
112
|
+
:return: file name if successfull, error message otherwise
|
|
113
|
+
"""
|
|
114
|
+
try:
|
|
115
|
+
log_debug(f"Patching file: {file_name}")
|
|
116
|
+
safe, file_path = self.check_escape(file_name)
|
|
117
|
+
if not (safe):
|
|
118
|
+
log_error(f"Attempted to read file: {file_name}")
|
|
119
|
+
return "Error reading file"
|
|
120
|
+
contents = file_path.read_text(encoding=encoding)
|
|
121
|
+
lines = contents.split(self.line_separator)
|
|
122
|
+
start = lines[0:start_line]
|
|
123
|
+
end = lines[end_line + 1 :]
|
|
124
|
+
return self.save_file(
|
|
125
|
+
file_name=file_name, contents=self.line_separator.join(start + [chunk] + end), encoding=encoding
|
|
126
|
+
)
|
|
127
|
+
except Exception as e:
|
|
128
|
+
log_error(f"Error patching file: {e}")
|
|
129
|
+
return f"Error patching file: {e}"
|
|
130
|
+
|
|
131
|
+
def read_file(self, file_name: str, encoding: str = "utf-8") -> str:
|
|
57
132
|
"""Reads the contents of the file `file_name` and returns the contents if successful.
|
|
58
133
|
|
|
59
134
|
:param file_name: The name of the file to read.
|
|
135
|
+
:param encoding: Encoding to use, default - utf-8
|
|
60
136
|
:return: The contents of the file if successful, otherwise returns an error message.
|
|
61
137
|
"""
|
|
62
138
|
try:
|
|
63
|
-
|
|
64
|
-
file_path = self.
|
|
65
|
-
|
|
139
|
+
log_debug(f"Reading file: {file_name}")
|
|
140
|
+
safe, file_path = self.check_escape(file_name)
|
|
141
|
+
if not (safe):
|
|
142
|
+
log_error(f"Attempted to read file: {file_name}")
|
|
143
|
+
return "Error reading file"
|
|
144
|
+
contents = file_path.read_text(encoding=encoding)
|
|
145
|
+
if len(contents) > self.max_file_length:
|
|
146
|
+
return "Error reading file: file too long. Use read_file_chunk instead"
|
|
147
|
+
if len(contents.split(self.line_separator)) > self.max_file_lines:
|
|
148
|
+
return "Error reading file: file too long. Use read_file_chunk instead"
|
|
149
|
+
|
|
66
150
|
return str(contents)
|
|
67
151
|
except Exception as e:
|
|
68
152
|
log_error(f"Error reading file: {e}")
|
|
69
153
|
return f"Error reading file: {e}"
|
|
70
154
|
|
|
71
|
-
def
|
|
72
|
-
"""
|
|
155
|
+
def delete_file(self, file_name: str) -> str:
|
|
156
|
+
"""Deletes a file
|
|
157
|
+
:param file_name: Name of the file to delete
|
|
158
|
+
|
|
159
|
+
:return: Empty string, if operation succeeded, otherwise returns an error message
|
|
160
|
+
"""
|
|
161
|
+
safe, path = self.check_escape(file_name)
|
|
162
|
+
try:
|
|
163
|
+
if safe:
|
|
164
|
+
if path.is_dir():
|
|
165
|
+
path.rmdir()
|
|
166
|
+
return ""
|
|
167
|
+
path.unlink()
|
|
168
|
+
return ""
|
|
169
|
+
else:
|
|
170
|
+
log_error(f"Attempt to delete file outside {self.base_dir}: {file_name}")
|
|
171
|
+
return "Incorrect file_name"
|
|
172
|
+
except Exception as e:
|
|
173
|
+
log_error(f"Error removing {file_name}: {e}")
|
|
174
|
+
return f"Error removing file: {e}"
|
|
175
|
+
|
|
176
|
+
def check_escape(self, relative_path: str) -> Tuple[bool, Path]:
|
|
177
|
+
d = self.base_dir.joinpath(Path(relative_path)).resolve()
|
|
178
|
+
if self.base_dir == d:
|
|
179
|
+
return True, d
|
|
180
|
+
try:
|
|
181
|
+
d.relative_to(self.base_dir)
|
|
182
|
+
except ValueError:
|
|
183
|
+
log_error("Attempted to escape base_dir")
|
|
184
|
+
return False, self.base_dir
|
|
185
|
+
return True, d
|
|
186
|
+
|
|
187
|
+
def list_files(self, **kwargs) -> str:
|
|
188
|
+
"""Returns a list of files in directory
|
|
189
|
+
:param directory: (Optional) name of directory to list.
|
|
73
190
|
|
|
74
191
|
:return: The contents of the file if successful, otherwise returns an error message.
|
|
75
192
|
"""
|
|
193
|
+
directory = kwargs.get("directory", ".")
|
|
76
194
|
try:
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
195
|
+
log_debug(f"Reading files in : {self.base_dir}/{directory}")
|
|
196
|
+
safe, d = self.check_escape(directory)
|
|
197
|
+
if safe:
|
|
198
|
+
return json.dumps([str(file_path.relative_to(self.base_dir)) for file_path in d.iterdir()], indent=4)
|
|
199
|
+
else:
|
|
200
|
+
return "{}"
|
|
81
201
|
except Exception as e:
|
|
82
202
|
log_error(f"Error reading files: {e}")
|
|
83
203
|
return f"Error reading files: {e}"
|
|
@@ -94,15 +214,23 @@ class FileTools(Toolkit):
|
|
|
94
214
|
|
|
95
215
|
log_debug(f"Searching files in {self.base_dir} with pattern {pattern}")
|
|
96
216
|
matching_files = list(self.base_dir.glob(pattern))
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
217
|
+
result = None
|
|
218
|
+
if self.expose_base_directory:
|
|
219
|
+
file_paths = [str(file_path) for file_path in matching_files]
|
|
220
|
+
result = {
|
|
221
|
+
"pattern": pattern,
|
|
222
|
+
"matches_found": len(file_paths),
|
|
223
|
+
"base_directory": str(self.base_dir),
|
|
224
|
+
"files": file_paths,
|
|
225
|
+
}
|
|
226
|
+
else:
|
|
227
|
+
file_paths = [str(file_path.relative_to(self.base_dir)) for file_path in matching_files]
|
|
228
|
+
|
|
229
|
+
result = {
|
|
230
|
+
"pattern": pattern,
|
|
231
|
+
"matches_found": len(file_paths),
|
|
232
|
+
"files": file_paths,
|
|
233
|
+
}
|
|
106
234
|
log_debug(f"Found {len(file_paths)} files matching pattern {pattern}")
|
|
107
235
|
return json.dumps(result, indent=2)
|
|
108
236
|
|
agno/tools/function.py
CHANGED
|
@@ -440,7 +440,7 @@ class Function(BaseModel):
|
|
|
440
440
|
@staticmethod
|
|
441
441
|
def _wrap_callable(func: Callable) -> Callable:
|
|
442
442
|
"""Wrap a callable with Pydantic's validate_call decorator, if relevant"""
|
|
443
|
-
from inspect import isasyncgenfunction, iscoroutinefunction
|
|
443
|
+
from inspect import isasyncgenfunction, iscoroutinefunction, signature
|
|
444
444
|
|
|
445
445
|
pydantic_version = Version(version("pydantic"))
|
|
446
446
|
|
|
@@ -458,6 +458,10 @@ class Function(BaseModel):
|
|
|
458
458
|
# Don't wrap callables that are already wrapped with validate_call
|
|
459
459
|
elif getattr(func, "_wrapped_for_validation", False):
|
|
460
460
|
return func
|
|
461
|
+
# Don't wrap functions with session_state parameter
|
|
462
|
+
# session_state needs to be passed by reference, not copied by pydantic's validation
|
|
463
|
+
elif "session_state" in signature(func).parameters:
|
|
464
|
+
return func
|
|
461
465
|
# Wrap the callable with validate_call
|
|
462
466
|
else:
|
|
463
467
|
wrapped = validate_call(func, config=dict(arbitrary_types_allowed=True)) # type: ignore
|
agno/tools/notion.py
ADDED
|
@@ -0,0 +1,201 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import os
|
|
3
|
+
from typing import Any, List, Optional
|
|
4
|
+
|
|
5
|
+
from agno.tools import Toolkit
|
|
6
|
+
from agno.utils.log import log_debug, logger
|
|
7
|
+
|
|
8
|
+
try:
|
|
9
|
+
from notion_client import Client
|
|
10
|
+
except ImportError:
|
|
11
|
+
raise ImportError("`notion-client` not installed. Please install using `pip install notion-client`")
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class NotionTools(Toolkit):
|
|
15
|
+
"""
|
|
16
|
+
Notion toolkit for creating and managing Notion pages.
|
|
17
|
+
|
|
18
|
+
Args:
|
|
19
|
+
api_key (Optional[str]): Notion API key (integration token). If not provided, uses NOTION_API_KEY env var.
|
|
20
|
+
database_id (Optional[str]): The ID of the database to work with. If not provided, uses NOTION_DATABASE_ID env var.
|
|
21
|
+
enable_create_page (bool): Enable creating pages. Default is True.
|
|
22
|
+
enable_update_page (bool): Enable updating pages. Default is True.
|
|
23
|
+
enable_search_pages (bool): Enable searching pages. Default is True.
|
|
24
|
+
all (bool): Enable all tools. Overrides individual flags when True. Default is False.
|
|
25
|
+
"""
|
|
26
|
+
|
|
27
|
+
def __init__(
|
|
28
|
+
self,
|
|
29
|
+
api_key: Optional[str] = None,
|
|
30
|
+
database_id: Optional[str] = None,
|
|
31
|
+
enable_create_page: bool = True,
|
|
32
|
+
enable_update_page: bool = True,
|
|
33
|
+
enable_search_pages: bool = True,
|
|
34
|
+
all: bool = False,
|
|
35
|
+
**kwargs,
|
|
36
|
+
):
|
|
37
|
+
self.api_key = api_key or os.getenv("NOTION_API_KEY")
|
|
38
|
+
self.database_id = database_id or os.getenv("NOTION_DATABASE_ID")
|
|
39
|
+
|
|
40
|
+
if not self.api_key:
|
|
41
|
+
raise ValueError(
|
|
42
|
+
"Notion API key is required. Either pass api_key parameter or set NOTION_API_KEY environment variable."
|
|
43
|
+
)
|
|
44
|
+
if not self.database_id:
|
|
45
|
+
raise ValueError(
|
|
46
|
+
"Notion database ID is required. Either pass database_id parameter or set NOTION_DATABASE_ID environment variable."
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
self.client = Client(auth=self.api_key)
|
|
50
|
+
|
|
51
|
+
tools: List[Any] = []
|
|
52
|
+
if all or enable_create_page:
|
|
53
|
+
tools.append(self.create_page)
|
|
54
|
+
if all or enable_update_page:
|
|
55
|
+
tools.append(self.update_page)
|
|
56
|
+
if all or enable_search_pages:
|
|
57
|
+
tools.append(self.search_pages)
|
|
58
|
+
|
|
59
|
+
super().__init__(name="notion_tools", tools=tools, **kwargs)
|
|
60
|
+
|
|
61
|
+
def create_page(self, title: str, tag: str, content: str) -> str:
|
|
62
|
+
"""Create a new page in the Notion database with a title, tag, and content.
|
|
63
|
+
|
|
64
|
+
Args:
|
|
65
|
+
title (str): The title of the page
|
|
66
|
+
tag (str): The tag/category for the page (e.g., travel, tech, general-blogs, fashion, documents)
|
|
67
|
+
content (str): The content to add to the page
|
|
68
|
+
|
|
69
|
+
Returns:
|
|
70
|
+
str: JSON string with page creation details
|
|
71
|
+
"""
|
|
72
|
+
try:
|
|
73
|
+
log_debug(f"Creating Notion page with title: {title}, tag: {tag}")
|
|
74
|
+
|
|
75
|
+
# Create the page in the database
|
|
76
|
+
new_page = self.client.pages.create(
|
|
77
|
+
parent={"database_id": self.database_id},
|
|
78
|
+
properties={"Name": {"title": [{"text": {"content": title}}]}, "Tag": {"select": {"name": tag}}},
|
|
79
|
+
children=[
|
|
80
|
+
{
|
|
81
|
+
"object": "block",
|
|
82
|
+
"type": "paragraph",
|
|
83
|
+
"paragraph": {"rich_text": [{"type": "text", "text": {"content": content}}]},
|
|
84
|
+
}
|
|
85
|
+
],
|
|
86
|
+
)
|
|
87
|
+
|
|
88
|
+
result = {"success": True, "page_id": new_page["id"], "url": new_page["url"], "title": title, "tag": tag}
|
|
89
|
+
return json.dumps(result, indent=2)
|
|
90
|
+
|
|
91
|
+
except Exception as e:
|
|
92
|
+
logger.exception(e)
|
|
93
|
+
return json.dumps({"success": False, "error": str(e)})
|
|
94
|
+
|
|
95
|
+
def update_page(self, page_id: str, content: str) -> str:
|
|
96
|
+
"""Add content to an existing Notion page.
|
|
97
|
+
|
|
98
|
+
Args:
|
|
99
|
+
page_id (str): The ID of the page to update
|
|
100
|
+
content (str): The content to append to the page
|
|
101
|
+
|
|
102
|
+
Returns:
|
|
103
|
+
str: JSON string with update status
|
|
104
|
+
"""
|
|
105
|
+
try:
|
|
106
|
+
log_debug(f"Updating Notion page: {page_id}")
|
|
107
|
+
|
|
108
|
+
# Append content to the page
|
|
109
|
+
self.client.blocks.children.append(
|
|
110
|
+
block_id=page_id,
|
|
111
|
+
children=[
|
|
112
|
+
{
|
|
113
|
+
"object": "block",
|
|
114
|
+
"type": "paragraph",
|
|
115
|
+
"paragraph": {"rich_text": [{"type": "text", "text": {"content": content}}]},
|
|
116
|
+
}
|
|
117
|
+
],
|
|
118
|
+
)
|
|
119
|
+
|
|
120
|
+
result = {"success": True, "page_id": page_id, "message": "Content added successfully"}
|
|
121
|
+
return json.dumps(result, indent=2)
|
|
122
|
+
|
|
123
|
+
except Exception as e:
|
|
124
|
+
logger.exception(e)
|
|
125
|
+
return json.dumps({"success": False, "error": str(e)})
|
|
126
|
+
|
|
127
|
+
def search_pages(self, tag: str) -> str:
|
|
128
|
+
"""Search for pages in the database by tag.
|
|
129
|
+
|
|
130
|
+
Args:
|
|
131
|
+
tag (str): The tag to search for
|
|
132
|
+
|
|
133
|
+
Returns:
|
|
134
|
+
str: JSON string with list of matching pages
|
|
135
|
+
"""
|
|
136
|
+
try:
|
|
137
|
+
log_debug(f"Searching for pages with tag: {tag}")
|
|
138
|
+
|
|
139
|
+
import httpx
|
|
140
|
+
|
|
141
|
+
headers = {
|
|
142
|
+
"Authorization": f"Bearer {self.api_key}",
|
|
143
|
+
"Notion-Version": "2022-06-28",
|
|
144
|
+
"Content-Type": "application/json",
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
payload = {"filter": {"property": "Tag", "select": {"equals": tag}}}
|
|
148
|
+
|
|
149
|
+
# The SDK client does not support the query method
|
|
150
|
+
response = httpx.post(
|
|
151
|
+
f"https://api.notion.com/v1/databases/{self.database_id}/query",
|
|
152
|
+
headers=headers,
|
|
153
|
+
json=payload,
|
|
154
|
+
timeout=30.0,
|
|
155
|
+
)
|
|
156
|
+
|
|
157
|
+
if response.status_code != 200:
|
|
158
|
+
return json.dumps(
|
|
159
|
+
{
|
|
160
|
+
"success": False,
|
|
161
|
+
"error": f"API request failed with status {response.status_code}",
|
|
162
|
+
"message": response.text,
|
|
163
|
+
}
|
|
164
|
+
)
|
|
165
|
+
|
|
166
|
+
data = response.json()
|
|
167
|
+
pages = []
|
|
168
|
+
|
|
169
|
+
for page in data.get("results", []):
|
|
170
|
+
try:
|
|
171
|
+
page_title = "Untitled"
|
|
172
|
+
if page.get("properties", {}).get("Name", {}).get("title"):
|
|
173
|
+
page_title = page["properties"]["Name"]["title"][0]["text"]["content"]
|
|
174
|
+
|
|
175
|
+
page_tag = None
|
|
176
|
+
if page.get("properties", {}).get("Tag", {}).get("select"):
|
|
177
|
+
page_tag = page["properties"]["Tag"]["select"]["name"]
|
|
178
|
+
|
|
179
|
+
page_info = {
|
|
180
|
+
"page_id": page["id"],
|
|
181
|
+
"title": page_title,
|
|
182
|
+
"tag": page_tag,
|
|
183
|
+
"url": page.get("url", ""),
|
|
184
|
+
}
|
|
185
|
+
pages.append(page_info)
|
|
186
|
+
except Exception as page_error:
|
|
187
|
+
log_debug(f"Error parsing page: {page_error}")
|
|
188
|
+
continue
|
|
189
|
+
|
|
190
|
+
result = {"success": True, "count": len(pages), "pages": pages}
|
|
191
|
+
return json.dumps(result, indent=2)
|
|
192
|
+
|
|
193
|
+
except Exception as e:
|
|
194
|
+
logger.exception(e)
|
|
195
|
+
return json.dumps(
|
|
196
|
+
{
|
|
197
|
+
"success": False,
|
|
198
|
+
"error": str(e),
|
|
199
|
+
"message": "Failed to search pages. Make sure the database is shared with the integration and has a 'Tag' property.",
|
|
200
|
+
}
|
|
201
|
+
)
|
agno/utils/events.py
CHANGED
|
@@ -106,6 +106,7 @@ def create_team_run_completed_event(from_run_response: TeamRunOutput) -> TeamRun
|
|
|
106
106
|
member_responses=from_run_response.member_responses, # type: ignore
|
|
107
107
|
metadata=from_run_response.metadata, # type: ignore
|
|
108
108
|
metrics=from_run_response.metrics, # type: ignore
|
|
109
|
+
session_state=from_run_response.session_state, # type: ignore
|
|
109
110
|
)
|
|
110
111
|
|
|
111
112
|
|
|
@@ -130,6 +131,7 @@ def create_run_completed_event(from_run_response: RunOutput) -> RunCompletedEven
|
|
|
130
131
|
reasoning_messages=from_run_response.reasoning_messages, # type: ignore
|
|
131
132
|
metadata=from_run_response.metadata, # type: ignore
|
|
132
133
|
metrics=from_run_response.metrics, # type: ignore
|
|
134
|
+
session_state=from_run_response.session_state, # type: ignore
|
|
133
135
|
)
|
|
134
136
|
|
|
135
137
|
|