agno 2.4.0__py3-none-any.whl → 2.4.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,204 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import TYPE_CHECKING, Optional
4
+
5
+ from pydantic import BaseModel
6
+
7
+ if TYPE_CHECKING:
8
+ from agno.knowledge.remote_content.remote_content import (
9
+ GCSContent,
10
+ GitHubContent,
11
+ S3Content,
12
+ SharePointContent,
13
+ )
14
+
15
+
16
+ class RemoteContentConfig(BaseModel):
17
+ """Base configuration for remote content sources."""
18
+
19
+ id: str
20
+ name: str
21
+ metadata: Optional[dict] = None
22
+
23
+ class Config:
24
+ extra = "allow"
25
+
26
+
27
+ class S3Config(RemoteContentConfig):
28
+ """Configuration for AWS S3 content source."""
29
+
30
+ bucket_name: str
31
+ region: Optional[str] = None
32
+ aws_access_key_id: Optional[str] = None
33
+ aws_secret_access_key: Optional[str] = None
34
+ prefix: Optional[str] = None
35
+
36
+ def file(self, key: str) -> "S3Content":
37
+ """Create a content reference for a specific file.
38
+
39
+ Args:
40
+ key: The S3 object key (path to file).
41
+
42
+ Returns:
43
+ S3Content configured with this source's credentials.
44
+ """
45
+ from agno.knowledge.remote_content.remote_content import S3Content
46
+
47
+ return S3Content(
48
+ bucket_name=self.bucket_name,
49
+ key=key,
50
+ config_id=self.id,
51
+ )
52
+
53
+ def folder(self, prefix: str) -> "S3Content":
54
+ """Create a content reference for a folder (prefix).
55
+
56
+ Args:
57
+ prefix: The S3 prefix (folder path).
58
+
59
+ Returns:
60
+ S3Content configured with this source's credentials.
61
+ """
62
+ from agno.knowledge.remote_content.remote_content import S3Content
63
+
64
+ return S3Content(
65
+ bucket_name=self.bucket_name,
66
+ prefix=prefix,
67
+ config_id=self.id,
68
+ )
69
+
70
+
71
+ class GcsConfig(RemoteContentConfig):
72
+ """Configuration for Google Cloud Storage content source."""
73
+
74
+ bucket_name: str
75
+ project: Optional[str] = None
76
+ credentials_path: Optional[str] = None
77
+ prefix: Optional[str] = None
78
+
79
+ def file(self, blob_name: str) -> "GCSContent":
80
+ """Create a content reference for a specific file.
81
+
82
+ Args:
83
+ blob_name: The GCS blob name (path to file).
84
+
85
+ Returns:
86
+ GCSContent configured with this source's credentials.
87
+ """
88
+ from agno.knowledge.remote_content.remote_content import GCSContent
89
+
90
+ return GCSContent(
91
+ bucket_name=self.bucket_name,
92
+ blob_name=blob_name,
93
+ config_id=self.id,
94
+ )
95
+
96
+ def folder(self, prefix: str) -> "GCSContent":
97
+ """Create a content reference for a folder (prefix).
98
+
99
+ Args:
100
+ prefix: The GCS prefix (folder path).
101
+
102
+ Returns:
103
+ GCSContent configured with this source's credentials.
104
+ """
105
+ from agno.knowledge.remote_content.remote_content import GCSContent
106
+
107
+ return GCSContent(
108
+ bucket_name=self.bucket_name,
109
+ prefix=prefix,
110
+ config_id=self.id,
111
+ )
112
+
113
+
114
+ class SharePointConfig(RemoteContentConfig):
115
+ """Configuration for SharePoint content source."""
116
+
117
+ tenant_id: str
118
+ client_id: str
119
+ client_secret: str
120
+ hostname: str
121
+ site_path: Optional[str] = None
122
+ site_id: Optional[str] = None # Full site ID (e.g., "contoso.sharepoint.com,guid1,guid2")
123
+ folder_path: Optional[str] = None
124
+
125
+ def file(self, file_path: str, site_path: Optional[str] = None) -> "SharePointContent":
126
+ """Create a content reference for a specific file.
127
+
128
+ Args:
129
+ file_path: Path to the file in SharePoint.
130
+ site_path: Optional site path override.
131
+
132
+ Returns:
133
+ SharePointContent configured with this source's credentials.
134
+ """
135
+ from agno.knowledge.remote_content.remote_content import SharePointContent
136
+
137
+ return SharePointContent(
138
+ config_id=self.id,
139
+ file_path=file_path,
140
+ site_path=site_path or self.site_path,
141
+ )
142
+
143
+ def folder(self, folder_path: str, site_path: Optional[str] = None) -> "SharePointContent":
144
+ """Create a content reference for a folder.
145
+
146
+ Args:
147
+ folder_path: Path to the folder in SharePoint.
148
+ site_path: Optional site path override.
149
+
150
+ Returns:
151
+ SharePointContent configured with this source's credentials.
152
+ """
153
+ from agno.knowledge.remote_content.remote_content import SharePointContent
154
+
155
+ return SharePointContent(
156
+ config_id=self.id,
157
+ folder_path=folder_path,
158
+ site_path=site_path or self.site_path,
159
+ )
160
+
161
+
162
+ class GitHubConfig(RemoteContentConfig):
163
+ """Configuration for GitHub content source."""
164
+
165
+ repo: str
166
+ token: Optional[str] = None
167
+ branch: Optional[str] = None
168
+ path: Optional[str] = None
169
+
170
+ def file(self, file_path: str, branch: Optional[str] = None) -> "GitHubContent":
171
+ """Create a content reference for a specific file.
172
+
173
+ Args:
174
+ file_path: Path to the file in the repository.
175
+ branch: Optional branch override.
176
+
177
+ Returns:
178
+ GitHubContent configured with this source's credentials.
179
+ """
180
+ from agno.knowledge.remote_content.remote_content import GitHubContent
181
+
182
+ return GitHubContent(
183
+ config_id=self.id,
184
+ file_path=file_path,
185
+ branch=branch or self.branch,
186
+ )
187
+
188
+ def folder(self, folder_path: str, branch: Optional[str] = None) -> "GitHubContent":
189
+ """Create a content reference for a folder.
190
+
191
+ Args:
192
+ folder_path: Path to the folder in the repository.
193
+ branch: Optional branch override.
194
+
195
+ Returns:
196
+ GitHubContent configured with this source's credentials.
197
+ """
198
+ from agno.knowledge.remote_content.remote_content import GitHubContent
199
+
200
+ return GitHubContent(
201
+ config_id=self.id,
202
+ folder_path=folder_path,
203
+ branch=branch or self.branch,
204
+ )
@@ -14,21 +14,23 @@ class S3Content:
14
14
  key: Optional[str] = None,
15
15
  object: Optional[S3Object] = None,
16
16
  prefix: Optional[str] = None,
17
+ config_id: Optional[str] = None,
17
18
  ):
18
19
  self.bucket_name = bucket_name
19
20
  self.bucket = bucket
20
21
  self.key = key
21
22
  self.object = object
22
23
  self.prefix = prefix
24
+ self.config_id = config_id
23
25
 
24
26
  if bucket_name is None and bucket is None:
25
27
  raise ValueError("Either bucket_name or bucket must be provided")
26
- if key is None and object is None:
27
- raise ValueError("Either key or object must be provided")
28
+ if key is None and object is None and prefix is None:
29
+ raise ValueError("Either key, object, or prefix must be provided")
28
30
  if bucket_name is not None and bucket is not None:
29
31
  raise ValueError("Either bucket_name or bucket must be provided, not both")
30
- if key is not None and object is not None:
31
- raise ValueError("Either key or object must be provided, not both")
32
+ if sum(x is not None for x in [key, object, prefix]) > 1:
33
+ raise ValueError("Only one of key, object, or prefix should be provided")
32
34
 
33
35
  if self.bucket_name is not None:
34
36
  self.bucket = S3Bucket(name=self.bucket_name)
@@ -40,6 +42,7 @@ class S3Content:
40
42
  "key": self.key,
41
43
  "object": self.object,
42
44
  "prefix": self.prefix,
45
+ "config_id": self.config_id,
43
46
  }
44
47
 
45
48
 
@@ -51,19 +54,13 @@ class GCSContent:
51
54
  bucket_name: Optional[str] = None,
52
55
  blob_name: Optional[str] = None,
53
56
  prefix: Optional[str] = None,
57
+ config_id: Optional[str] = None,
54
58
  ):
55
- # Import Google Cloud Storage only when actually needed
56
- try:
57
- from google.cloud import storage # type: ignore
58
- except ImportError:
59
- raise ImportError(
60
- "The `google-cloud-storage` package is not installed. Please install it via `pip install google-cloud-storage`."
61
- )
62
-
63
59
  self.bucket = bucket
64
60
  self.bucket_name = bucket_name
65
61
  self.blob_name = blob_name
66
62
  self.prefix = prefix
63
+ self.config_id = config_id
67
64
 
68
65
  if self.bucket is None and self.bucket_name is None:
69
66
  raise ValueError("No bucket or bucket_name provided")
@@ -72,17 +69,77 @@ class GCSContent:
72
69
  if self.blob_name is None and self.prefix is None:
73
70
  raise ValueError("Either blob_name or prefix must be provided")
74
71
 
75
- if self.bucket is None:
76
- client = storage.Client()
77
- self.bucket = client.bucket(self.bucket_name)
78
-
79
72
  def get_config(self):
80
73
  return {
81
74
  "bucket": self.bucket,
82
75
  "bucket_name": self.bucket_name,
83
76
  "blob_name": self.blob_name,
84
77
  "prefix": self.prefix,
78
+ "config_id": self.config_id,
79
+ }
80
+
81
+
82
+ @dataclass
83
+ class SharePointContent:
84
+ """Content reference for SharePoint files."""
85
+
86
+ def __init__(
87
+ self,
88
+ config_id: str,
89
+ file_path: Optional[str] = None,
90
+ folder_path: Optional[str] = None,
91
+ site_path: Optional[str] = None,
92
+ drive_id: Optional[str] = None,
93
+ ):
94
+ self.config_id = config_id
95
+ self.file_path = file_path
96
+ self.folder_path = folder_path
97
+ self.site_path = site_path
98
+ self.drive_id = drive_id
99
+
100
+ if self.file_path is None and self.folder_path is None:
101
+ raise ValueError("Either file_path or folder_path must be provided")
102
+ if self.file_path is not None and self.folder_path is not None:
103
+ raise ValueError("Provide either file_path or folder_path, not both")
104
+
105
+ def get_config(self):
106
+ return {
107
+ "config_id": self.config_id,
108
+ "file_path": self.file_path,
109
+ "folder_path": self.folder_path,
110
+ "site_path": self.site_path,
111
+ "drive_id": self.drive_id,
112
+ }
113
+
114
+
115
+ @dataclass
116
+ class GitHubContent:
117
+ """Content reference for GitHub files."""
118
+
119
+ def __init__(
120
+ self,
121
+ config_id: str,
122
+ file_path: Optional[str] = None,
123
+ folder_path: Optional[str] = None,
124
+ branch: Optional[str] = None,
125
+ ):
126
+ self.config_id = config_id
127
+ self.file_path = file_path
128
+ self.folder_path = folder_path
129
+ self.branch = branch
130
+
131
+ if self.file_path is None and self.folder_path is None:
132
+ raise ValueError("Either file_path or folder_path must be provided")
133
+ if self.file_path is not None and self.folder_path is not None:
134
+ raise ValueError("Provide either file_path or folder_path, not both")
135
+
136
+ def get_config(self):
137
+ return {
138
+ "config_id": self.config_id,
139
+ "file_path": self.file_path,
140
+ "folder_path": self.folder_path,
141
+ "branch": self.branch,
85
142
  }
86
143
 
87
144
 
88
- RemoteContent = Union[S3Content, GCSContent]
145
+ RemoteContent = Union[S3Content, GCSContent, SharePointContent, GitHubContent]
agno/models/base.py CHANGED
@@ -1988,6 +1988,7 @@ class Model(ABC):
1988
1988
 
1989
1989
  if isinstance(item, CustomEvent):
1990
1990
  function_call_output += str(item)
1991
+ item.tool_call_id = function_call.call_id
1991
1992
 
1992
1993
  # For WorkflowCompletedEvent, extract content for final output
1993
1994
  from agno.run.workflow import WorkflowCompletedEvent
@@ -2430,6 +2431,7 @@ class Model(ABC):
2430
2431
 
2431
2432
  if isinstance(item, CustomEvent):
2432
2433
  function_call_output += str(item)
2434
+ item.tool_call_id = function_call.call_id
2433
2435
 
2434
2436
  # For WorkflowCompletedEvent, extract content for final output
2435
2437
  from agno.run.workflow import WorkflowCompletedEvent
@@ -2507,8 +2509,12 @@ class Model(ABC):
2507
2509
  if async_gen_index in async_generator_outputs:
2508
2510
  _, async_function_call_output, error = async_generator_outputs[async_gen_index]
2509
2511
  if error:
2510
- log_error(f"Error in async generator: {error}")
2511
- raise error
2512
+ # Handle async generator exceptions gracefully like sync generators
2513
+ log_error(
2514
+ f"Error while iterating async generator for {function_call.function.name}: {error}"
2515
+ )
2516
+ function_call.error = str(error)
2517
+ function_call_success = False
2512
2518
  break
2513
2519
  async_gen_index += 1
2514
2520
 
@@ -2555,6 +2561,10 @@ class Model(ABC):
2555
2561
  yield ModelResponse(content=item.content)
2556
2562
  continue
2557
2563
 
2564
+ elif isinstance(item, CustomEvent):
2565
+ function_call_output += str(item)
2566
+ item.tool_call_id = function_call.call_id
2567
+
2558
2568
  # Yield the event itself to bubble it up
2559
2569
  yield item
2560
2570
  else:
@@ -97,6 +97,35 @@ class Cerebras(Model):
97
97
  client_params.update(self.client_params)
98
98
  return client_params
99
99
 
100
+ def _ensure_additional_properties_false(self, schema: Dict[str, Any]) -> None:
101
+ """
102
+ Recursively ensure all object types have additionalProperties: false.
103
+ Cerebras API requires this for JSON schema validation.
104
+ """
105
+ if not isinstance(schema, dict):
106
+ return
107
+
108
+ # Set additionalProperties: false for object types
109
+ if schema.get("type") == "object":
110
+ schema["additionalProperties"] = False
111
+
112
+ # Recursively process nested schemas
113
+ if "properties" in schema and isinstance(schema["properties"], dict):
114
+ for prop_schema in schema["properties"].values():
115
+ self._ensure_additional_properties_false(prop_schema)
116
+
117
+ if "items" in schema:
118
+ self._ensure_additional_properties_false(schema["items"])
119
+
120
+ if "$defs" in schema and isinstance(schema["$defs"], dict):
121
+ for def_schema in schema["$defs"].values():
122
+ self._ensure_additional_properties_false(def_schema)
123
+
124
+ for key in ["allOf", "anyOf", "oneOf"]:
125
+ if key in schema and isinstance(schema[key], list):
126
+ for item in schema[key]:
127
+ self._ensure_additional_properties_false(item)
128
+
100
129
  def get_client(self) -> CerebrasClient:
101
130
  """
102
131
  Returns a Cerebras client.
@@ -191,8 +220,11 @@ class Cerebras(Model):
191
220
  ):
192
221
  # Ensure json_schema has strict parameter set
193
222
  schema = response_format["json_schema"]
194
- if isinstance(schema.get("schema"), dict) and "strict" not in schema:
195
- schema["strict"] = self.strict_output
223
+ if isinstance(schema.get("schema"), dict):
224
+ if "strict" not in schema:
225
+ schema["strict"] = self.strict_output
226
+ # Cerebras requires additionalProperties: false for all object types
227
+ self._ensure_additional_properties_false(schema["schema"])
196
228
 
197
229
  request_params["response_format"] = response_format
198
230
 
@@ -0,0 +1,3 @@
1
+ from agno.models.n1n.n1n import N1N
2
+
3
+ __all__ = ["N1N"]
agno/models/n1n/n1n.py ADDED
@@ -0,0 +1,57 @@
1
+ from dataclasses import dataclass, field
2
+ from os import getenv
3
+ from typing import Any, Dict, Optional
4
+
5
+ from agno.exceptions import ModelAuthenticationError
6
+ from agno.models.openai.like import OpenAILike
7
+
8
+
9
+ @dataclass
10
+ class N1N(OpenAILike):
11
+ """
12
+ A class for interacting with n1n.ai models.
13
+
14
+ Attributes:
15
+ id (str): The model id. Defaults to "gpt-4o".
16
+ name (str): The model name. Defaults to "N1N".
17
+ provider (str): The provider name. Defaults to "N1N".
18
+ api_key (Optional[str]): The API key.
19
+ base_url (str): The base URL. Defaults to "https://api.n1n.ai/v1".
20
+ """
21
+
22
+ id: str = "gpt-4o"
23
+ name: str = "N1N"
24
+ provider: str = "N1N"
25
+
26
+ api_key: Optional[str] = field(default_factory=lambda: getenv("N1N_API_KEY"))
27
+ base_url: str = "https://api.n1n.ai/v1"
28
+
29
+ def _get_client_params(self) -> Dict[str, Any]:
30
+ # Fetch API key from env if not already set
31
+ if not self.api_key:
32
+ self.api_key = getenv("N1N_API_KEY")
33
+ if not self.api_key:
34
+ # Raise error immediately if key is missing
35
+ raise ModelAuthenticationError(
36
+ message="N1N_API_KEY not set. Please set the N1N_API_KEY environment variable.",
37
+ model_name=self.name,
38
+ )
39
+
40
+ # Define base client params
41
+ base_params = {
42
+ "api_key": self.api_key,
43
+ "organization": self.organization,
44
+ "base_url": self.base_url,
45
+ "timeout": self.timeout,
46
+ "max_retries": self.max_retries,
47
+ "default_headers": self.default_headers,
48
+ "default_query": self.default_query,
49
+ }
50
+
51
+ # Create client_params dict with non-None values
52
+ client_params = {k: v for k, v in base_params.items() if v is not None}
53
+
54
+ # Add additional client params if provided
55
+ if self.client_params:
56
+ client_params.update(self.client_params)
57
+ return client_params
@@ -43,6 +43,8 @@ class OpenAIChat(Model):
43
43
  name: str = "OpenAIChat"
44
44
  provider: str = "OpenAI"
45
45
  supports_native_structured_outputs: bool = True
46
+ # If True, only collect metrics on the final streaming chunk (for providers with cumulative token counts)
47
+ collect_metrics_on_completion: bool = False
46
48
 
47
49
  # Request parameters
48
50
  store: Optional[bool] = None
@@ -752,6 +754,21 @@ class OpenAIChat(Model):
752
754
  tool_call_entry["type"] = _tool_call_type
753
755
  return tool_calls
754
756
 
757
+ def _should_collect_metrics(self, response: ChatCompletionChunk) -> bool:
758
+ """
759
+ Determine if metrics should be collected from the response.
760
+ """
761
+ if not response.usage:
762
+ return False
763
+
764
+ if not self.collect_metrics_on_completion:
765
+ return True
766
+
767
+ if not response.choices:
768
+ return False
769
+
770
+ return response.choices[0].finish_reason is not None
771
+
755
772
  def _parse_provider_response(
756
773
  self,
757
774
  response: ChatCompletion,
@@ -920,7 +937,7 @@ class OpenAIChat(Model):
920
937
  log_warning(f"Error processing audio: {e}")
921
938
 
922
939
  # Add usage metrics if present
923
- if response_delta.usage is not None:
940
+ if self._should_collect_metrics(response_delta) and response_delta.usage is not None:
924
941
  model_response.response_usage = self._get_metrics(response_delta.usage)
925
942
 
926
943
  return model_response
@@ -41,6 +41,8 @@ class Perplexity(OpenAILike):
41
41
  id: str = "sonar"
42
42
  name: str = "Perplexity"
43
43
  provider: str = "Perplexity"
44
+ # Perplexity returns cumulative token counts in each streaming chunk, so only collect on final chunk
45
+ collect_metrics_on_completion: bool = True
44
46
 
45
47
  api_key: Optional[str] = field(default_factory=lambda: getenv("PERPLEXITY_API_KEY"))
46
48
  base_url: str = "https://api.perplexity.ai/"
@@ -7,7 +7,7 @@ from agno.agent import Agent, RemoteAgent
7
7
  from agno.os.interfaces.slack.security import verify_slack_signature
8
8
  from agno.team import RemoteTeam, Team
9
9
  from agno.tools.slack import SlackTools
10
- from agno.utils.log import log_info
10
+ from agno.utils.log import log_error, log_info
11
11
  from agno.workflow import RemoteWorkflow, Workflow
12
12
 
13
13
 
@@ -112,6 +112,15 @@ def attach_routes(
112
112
  response = await workflow.arun(message_text, user_id=user, session_id=session_id) # type: ignore
113
113
 
114
114
  if response:
115
+ if response.status == "ERROR":
116
+ log_error(f"Error processing message: {response.content}")
117
+ _send_slack_message(
118
+ channel=channel_id,
119
+ message="Sorry, there was an error processing your message. Please try again later.",
120
+ thread_ts=ts,
121
+ )
122
+ return
123
+
115
124
  if hasattr(response, "reasoning_content") and response.reasoning_content:
116
125
  _send_slack_message(
117
126
  channel=channel_id,
@@ -162,6 +162,12 @@ def attach_routes(
162
162
  videos=[Video(content=await get_media_async(message_video))] if message_video else None,
163
163
  audio=[Audio(content=await get_media_async(message_audio))] if message_audio else None,
164
164
  )
165
+ if response.status == "ERROR":
166
+ await _send_whatsapp_message(
167
+ phone_number, "Sorry, there was an error processing your message. Please try again later."
168
+ )
169
+ log_error(response.content)
170
+ return
165
171
 
166
172
  if response.reasoning_content:
167
173
  await _send_whatsapp_message(phone_number, f"Reasoning: \n{response.reasoning_content}", italics=True)
@@ -25,7 +25,7 @@ from agno.os.schema import (
25
25
  )
26
26
  from agno.os.settings import AgnoAPISettings
27
27
  from agno.registry import Registry
28
- from agno.utils.log import log_error
28
+ from agno.utils.log import log_error, log_warning
29
29
  from agno.utils.string import generate_id_from_name
30
30
 
31
31
  logger = logging.getLogger(__name__)
@@ -167,6 +167,15 @@ def attach_routes(
167
167
  config = body.config or {}
168
168
  config = _resolve_db_in_config(config, db, registry)
169
169
 
170
+ # Warn if creating a team without members
171
+ if body.component_type == ComponentType.TEAM:
172
+ members = config.get("members")
173
+ if not members or len(members) == 0:
174
+ log_warning(
175
+ f"Creating team '{body.name}' without members. "
176
+ "If this is unintended, add members to the config."
177
+ )
178
+
170
179
  component, _config = db.create_component_with_config(
171
180
  component_id=component_id,
172
181
  component_type=DbComponentType(body.component_type.value),