agno 2.3.3__py3-none-any.whl → 2.3.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agno/db/postgres/async_postgres.py +5 -1
- agno/models/aimlapi/aimlapi.py +18 -0
- agno/models/anthropic/claude.py +3 -1
- agno/models/cerebras/cerebras.py +7 -2
- agno/models/cerebras/cerebras_openai.py +18 -0
- agno/models/cohere/chat.py +10 -2
- agno/models/cometapi/cometapi.py +19 -1
- agno/models/deepinfra/deepinfra.py +19 -1
- agno/models/fireworks/fireworks.py +19 -1
- agno/models/google/gemini.py +5 -1
- agno/models/groq/groq.py +5 -1
- agno/models/huggingface/huggingface.py +5 -1
- agno/models/ibm/watsonx.py +5 -1
- agno/models/internlm/internlm.py +19 -1
- agno/models/langdb/langdb.py +10 -0
- agno/models/litellm/litellm_openai.py +19 -1
- agno/models/meta/llama.py +5 -1
- agno/models/meta/llama_openai.py +18 -0
- agno/models/mistral/mistral.py +5 -1
- agno/models/nvidia/nvidia.py +19 -1
- agno/models/openai/chat.py +5 -1
- agno/models/openai/responses.py +5 -1
- agno/models/openrouter/openrouter.py +20 -0
- agno/models/perplexity/perplexity.py +17 -0
- agno/models/requesty/requesty.py +18 -0
- agno/models/sambanova/sambanova.py +19 -1
- agno/models/siliconflow/siliconflow.py +19 -1
- agno/models/together/together.py +19 -1
- agno/models/vercel/v0.py +19 -1
- agno/models/xai/xai.py +18 -0
- agno/os/schema.py +12 -0
- agno/team/team.py +1 -0
- {agno-2.3.3.dist-info → agno-2.3.4.dist-info}/METADATA +1 -1
- {agno-2.3.3.dist-info → agno-2.3.4.dist-info}/RECORD +37 -37
- {agno-2.3.3.dist-info → agno-2.3.4.dist-info}/WHEEL +0 -0
- {agno-2.3.3.dist-info → agno-2.3.4.dist-info}/licenses/LICENSE +0 -0
- {agno-2.3.3.dist-info → agno-2.3.4.dist-info}/top_level.txt +0 -0
|
@@ -821,7 +821,7 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
821
821
|
except Exception as e:
|
|
822
822
|
log_error(f"Error deleting user memory: {e}")
|
|
823
823
|
|
|
824
|
-
async def delete_user_memories(self, memory_ids: List[str]) -> None:
|
|
824
|
+
async def delete_user_memories(self, memory_ids: List[str], user_id: Optional[str] = None) -> None:
|
|
825
825
|
"""Delete user memories from the database.
|
|
826
826
|
|
|
827
827
|
Args:
|
|
@@ -835,6 +835,10 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
835
835
|
|
|
836
836
|
async with self.async_session_factory() as sess, sess.begin():
|
|
837
837
|
delete_stmt = table.delete().where(table.c.memory_id.in_(memory_ids))
|
|
838
|
+
|
|
839
|
+
if user_id is not None:
|
|
840
|
+
delete_stmt = delete_stmt.where(table.c.user_id == user_id)
|
|
841
|
+
|
|
838
842
|
result = await sess.execute(delete_stmt)
|
|
839
843
|
|
|
840
844
|
if result.rowcount == 0: # type: ignore
|
agno/models/aimlapi/aimlapi.py
CHANGED
|
@@ -2,6 +2,7 @@ from dataclasses import dataclass, field
|
|
|
2
2
|
from os import getenv
|
|
3
3
|
from typing import Any, Dict, Optional
|
|
4
4
|
|
|
5
|
+
from agno.exceptions import ModelProviderError
|
|
5
6
|
from agno.models.message import Message
|
|
6
7
|
from agno.models.openai.like import OpenAILike
|
|
7
8
|
|
|
@@ -28,6 +29,23 @@ class AIMLAPI(OpenAILike):
|
|
|
28
29
|
base_url: str = "https://api.aimlapi.com/v1"
|
|
29
30
|
max_tokens: int = 4096
|
|
30
31
|
|
|
32
|
+
def _get_client_params(self) -> Dict[str, Any]:
|
|
33
|
+
"""
|
|
34
|
+
Returns client parameters for API requests, checking for AIMLAPI_API_KEY.
|
|
35
|
+
|
|
36
|
+
Returns:
|
|
37
|
+
Dict[str, Any]: A dictionary of client parameters for API requests.
|
|
38
|
+
"""
|
|
39
|
+
if not self.api_key:
|
|
40
|
+
self.api_key = getenv("AIMLAPI_API_KEY")
|
|
41
|
+
if not self.api_key:
|
|
42
|
+
raise ModelProviderError(
|
|
43
|
+
message="AIMLAPI_API_KEY not set. Please set the AIMLAPI_API_KEY environment variable.",
|
|
44
|
+
model_name=self.name,
|
|
45
|
+
model_id=self.id,
|
|
46
|
+
)
|
|
47
|
+
return super()._get_client_params()
|
|
48
|
+
|
|
31
49
|
def _format_message(self, message: Message) -> Dict[str, Any]:
|
|
32
50
|
"""
|
|
33
51
|
Minimal additional formatter that only replaces None with empty string.
|
agno/models/anthropic/claude.py
CHANGED
|
@@ -154,7 +154,9 @@ class Claude(Model):
|
|
|
154
154
|
|
|
155
155
|
self.api_key = self.api_key or getenv("ANTHROPIC_API_KEY")
|
|
156
156
|
if not self.api_key:
|
|
157
|
-
|
|
157
|
+
raise ModelProviderError(
|
|
158
|
+
"ANTHROPIC_API_KEY not set. Please set the ANTHROPIC_API_KEY environment variable."
|
|
159
|
+
)
|
|
158
160
|
|
|
159
161
|
# Add API key to client parameters
|
|
160
162
|
client_params["api_key"] = self.api_key
|
agno/models/cerebras/cerebras.py
CHANGED
|
@@ -7,13 +7,14 @@ from typing import Any, Dict, Iterator, List, Optional, Type, Union
|
|
|
7
7
|
import httpx
|
|
8
8
|
from pydantic import BaseModel
|
|
9
9
|
|
|
10
|
+
from agno.exceptions import ModelProviderError
|
|
10
11
|
from agno.models.base import Model
|
|
11
12
|
from agno.models.message import Message
|
|
12
13
|
from agno.models.metrics import Metrics
|
|
13
14
|
from agno.models.response import ModelResponse
|
|
14
15
|
from agno.run.agent import RunOutput
|
|
15
16
|
from agno.utils.http import get_default_async_client, get_default_sync_client
|
|
16
|
-
from agno.utils.log import log_debug,
|
|
17
|
+
from agno.utils.log import log_debug, log_warning
|
|
17
18
|
|
|
18
19
|
try:
|
|
19
20
|
from cerebras.cloud.sdk import AsyncCerebras as AsyncCerebrasClient
|
|
@@ -77,7 +78,11 @@ class Cerebras(Model):
|
|
|
77
78
|
if not self.api_key:
|
|
78
79
|
self.api_key = getenv("CEREBRAS_API_KEY")
|
|
79
80
|
if not self.api_key:
|
|
80
|
-
|
|
81
|
+
raise ModelProviderError(
|
|
82
|
+
message="CEREBRAS_API_KEY not set. Please set the CEREBRAS_API_KEY environment variable.",
|
|
83
|
+
model_name=self.name,
|
|
84
|
+
model_id=self.id,
|
|
85
|
+
)
|
|
81
86
|
|
|
82
87
|
# Define base client params
|
|
83
88
|
base_params = {
|
|
@@ -5,6 +5,7 @@ from typing import Any, Dict, List, Optional, Type, Union
|
|
|
5
5
|
|
|
6
6
|
from pydantic import BaseModel
|
|
7
7
|
|
|
8
|
+
from agno.exceptions import ModelProviderError
|
|
8
9
|
from agno.models.message import Message
|
|
9
10
|
from agno.models.openai.like import OpenAILike
|
|
10
11
|
from agno.utils.log import log_debug
|
|
@@ -20,6 +21,23 @@ class CerebrasOpenAI(OpenAILike):
|
|
|
20
21
|
base_url: str = "https://api.cerebras.ai/v1"
|
|
21
22
|
api_key: Optional[str] = field(default_factory=lambda: getenv("CEREBRAS_API_KEY", None))
|
|
22
23
|
|
|
24
|
+
def _get_client_params(self) -> Dict[str, Any]:
|
|
25
|
+
"""
|
|
26
|
+
Returns client parameters for API requests, checking for CEREBRAS_API_KEY.
|
|
27
|
+
|
|
28
|
+
Returns:
|
|
29
|
+
Dict[str, Any]: A dictionary of client parameters for API requests.
|
|
30
|
+
"""
|
|
31
|
+
if not self.api_key:
|
|
32
|
+
self.api_key = getenv("CEREBRAS_API_KEY")
|
|
33
|
+
if not self.api_key:
|
|
34
|
+
raise ModelProviderError(
|
|
35
|
+
message="CEREBRAS_API_KEY not set. Please set the CEREBRAS_API_KEY environment variable.",
|
|
36
|
+
model_name=self.name,
|
|
37
|
+
model_id=self.id,
|
|
38
|
+
)
|
|
39
|
+
return super()._get_client_params()
|
|
40
|
+
|
|
23
41
|
def get_request_params(
|
|
24
42
|
self,
|
|
25
43
|
response_format: Optional[Union[Dict, Type[BaseModel]]] = None,
|
agno/models/cohere/chat.py
CHANGED
|
@@ -65,7 +65,11 @@ class Cohere(Model):
|
|
|
65
65
|
|
|
66
66
|
self.api_key = self.api_key or getenv("CO_API_KEY")
|
|
67
67
|
if not self.api_key:
|
|
68
|
-
|
|
68
|
+
raise ModelProviderError(
|
|
69
|
+
message="CO_API_KEY not set. Please set the CO_API_KEY environment variable.",
|
|
70
|
+
model_name=self.name,
|
|
71
|
+
model_id=self.id,
|
|
72
|
+
)
|
|
69
73
|
|
|
70
74
|
_client_params["api_key"] = self.api_key
|
|
71
75
|
|
|
@@ -92,7 +96,11 @@ class Cohere(Model):
|
|
|
92
96
|
self.api_key = self.api_key or getenv("CO_API_KEY")
|
|
93
97
|
|
|
94
98
|
if not self.api_key:
|
|
95
|
-
|
|
99
|
+
raise ModelProviderError(
|
|
100
|
+
message="CO_API_KEY not set. Please set the CO_API_KEY environment variable.",
|
|
101
|
+
model_name=self.name,
|
|
102
|
+
model_id=self.id,
|
|
103
|
+
)
|
|
96
104
|
|
|
97
105
|
_client_params["api_key"] = self.api_key
|
|
98
106
|
|
agno/models/cometapi/cometapi.py
CHANGED
|
@@ -1,9 +1,10 @@
|
|
|
1
1
|
from dataclasses import dataclass, field
|
|
2
2
|
from os import getenv
|
|
3
|
-
from typing import List, Optional
|
|
3
|
+
from typing import Any, Dict, List, Optional
|
|
4
4
|
|
|
5
5
|
import httpx
|
|
6
6
|
|
|
7
|
+
from agno.exceptions import ModelProviderError
|
|
7
8
|
from agno.models.openai.like import OpenAILike
|
|
8
9
|
from agno.utils.log import log_debug
|
|
9
10
|
|
|
@@ -26,6 +27,23 @@ class CometAPI(OpenAILike):
|
|
|
26
27
|
api_key: Optional[str] = field(default_factory=lambda: getenv("COMETAPI_KEY"))
|
|
27
28
|
base_url: str = "https://api.cometapi.com/v1"
|
|
28
29
|
|
|
30
|
+
def _get_client_params(self) -> Dict[str, Any]:
|
|
31
|
+
"""
|
|
32
|
+
Returns client parameters for API requests, checking for COMETAPI_KEY.
|
|
33
|
+
|
|
34
|
+
Returns:
|
|
35
|
+
Dict[str, Any]: A dictionary of client parameters for API requests.
|
|
36
|
+
"""
|
|
37
|
+
if not self.api_key:
|
|
38
|
+
self.api_key = getenv("COMETAPI_KEY")
|
|
39
|
+
if not self.api_key:
|
|
40
|
+
raise ModelProviderError(
|
|
41
|
+
message="COMETAPI_KEY not set. Please set the COMETAPI_KEY environment variable.",
|
|
42
|
+
model_name=self.name,
|
|
43
|
+
model_id=self.id,
|
|
44
|
+
)
|
|
45
|
+
return super()._get_client_params()
|
|
46
|
+
|
|
29
47
|
def get_available_models(self) -> List[str]:
|
|
30
48
|
"""
|
|
31
49
|
Fetch available chat models from CometAPI, filtering out non-chat models.
|
|
@@ -1,7 +1,8 @@
|
|
|
1
1
|
from dataclasses import dataclass, field
|
|
2
2
|
from os import getenv
|
|
3
|
-
from typing import Optional
|
|
3
|
+
from typing import Any, Dict, Optional
|
|
4
4
|
|
|
5
|
+
from agno.exceptions import ModelProviderError
|
|
5
6
|
from agno.models.openai.like import OpenAILike
|
|
6
7
|
|
|
7
8
|
|
|
@@ -26,3 +27,20 @@ class DeepInfra(OpenAILike):
|
|
|
26
27
|
base_url: str = "https://api.deepinfra.com/v1/openai"
|
|
27
28
|
|
|
28
29
|
supports_native_structured_outputs: bool = False
|
|
30
|
+
|
|
31
|
+
def _get_client_params(self) -> Dict[str, Any]:
|
|
32
|
+
"""
|
|
33
|
+
Returns client parameters for API requests, checking for DEEPINFRA_API_KEY.
|
|
34
|
+
|
|
35
|
+
Returns:
|
|
36
|
+
Dict[str, Any]: A dictionary of client parameters for API requests.
|
|
37
|
+
"""
|
|
38
|
+
if not self.api_key:
|
|
39
|
+
self.api_key = getenv("DEEPINFRA_API_KEY")
|
|
40
|
+
if not self.api_key:
|
|
41
|
+
raise ModelProviderError(
|
|
42
|
+
message="DEEPINFRA_API_KEY not set. Please set the DEEPINFRA_API_KEY environment variable.",
|
|
43
|
+
model_name=self.name,
|
|
44
|
+
model_id=self.id,
|
|
45
|
+
)
|
|
46
|
+
return super()._get_client_params()
|
|
@@ -1,7 +1,8 @@
|
|
|
1
1
|
from dataclasses import dataclass, field
|
|
2
2
|
from os import getenv
|
|
3
|
-
from typing import Optional
|
|
3
|
+
from typing import Any, Dict, Optional
|
|
4
4
|
|
|
5
|
+
from agno.exceptions import ModelProviderError
|
|
5
6
|
from agno.models.openai import OpenAILike
|
|
6
7
|
|
|
7
8
|
|
|
@@ -24,3 +25,20 @@ class Fireworks(OpenAILike):
|
|
|
24
25
|
|
|
25
26
|
api_key: Optional[str] = field(default_factory=lambda: getenv("FIREWORKS_API_KEY"))
|
|
26
27
|
base_url: str = "https://api.fireworks.ai/inference/v1"
|
|
28
|
+
|
|
29
|
+
def _get_client_params(self) -> Dict[str, Any]:
|
|
30
|
+
"""
|
|
31
|
+
Returns client parameters for API requests, checking for FIREWORKS_API_KEY.
|
|
32
|
+
|
|
33
|
+
Returns:
|
|
34
|
+
Dict[str, Any]: A dictionary of client parameters for API requests.
|
|
35
|
+
"""
|
|
36
|
+
if not self.api_key:
|
|
37
|
+
self.api_key = getenv("FIREWORKS_API_KEY")
|
|
38
|
+
if not self.api_key:
|
|
39
|
+
raise ModelProviderError(
|
|
40
|
+
message="FIREWORKS_API_KEY not set. Please set the FIREWORKS_API_KEY environment variable.",
|
|
41
|
+
model_name=self.name,
|
|
42
|
+
model_id=self.id,
|
|
43
|
+
)
|
|
44
|
+
return super()._get_client_params()
|
agno/models/google/gemini.py
CHANGED
|
@@ -141,7 +141,11 @@ class Gemini(Model):
|
|
|
141
141
|
if not vertexai:
|
|
142
142
|
self.api_key = self.api_key or getenv("GOOGLE_API_KEY")
|
|
143
143
|
if not self.api_key:
|
|
144
|
-
|
|
144
|
+
raise ModelProviderError(
|
|
145
|
+
message="GOOGLE_API_KEY not set. Please set the GOOGLE_API_KEY environment variable.",
|
|
146
|
+
model_name=self.name,
|
|
147
|
+
model_id=self.id,
|
|
148
|
+
)
|
|
145
149
|
client_params["api_key"] = self.api_key
|
|
146
150
|
else:
|
|
147
151
|
log_info("Using Vertex AI API")
|
agno/models/groq/groq.py
CHANGED
|
@@ -74,7 +74,11 @@ class Groq(Model):
|
|
|
74
74
|
if not self.api_key:
|
|
75
75
|
self.api_key = getenv("GROQ_API_KEY")
|
|
76
76
|
if not self.api_key:
|
|
77
|
-
|
|
77
|
+
raise ModelProviderError(
|
|
78
|
+
message="GROQ_API_KEY not set. Please set the GROQ_API_KEY environment variable.",
|
|
79
|
+
model_name=self.name,
|
|
80
|
+
model_id=self.id,
|
|
81
|
+
)
|
|
78
82
|
|
|
79
83
|
# Define base client params
|
|
80
84
|
base_params = {
|
|
@@ -73,7 +73,11 @@ class HuggingFace(Model):
|
|
|
73
73
|
def get_client_params(self) -> Dict[str, Any]:
|
|
74
74
|
self.api_key = self.api_key or getenv("HF_TOKEN")
|
|
75
75
|
if not self.api_key:
|
|
76
|
-
|
|
76
|
+
raise ModelProviderError(
|
|
77
|
+
message="HF_TOKEN not set. Please set the HF_TOKEN environment variable.",
|
|
78
|
+
model_name=self.name,
|
|
79
|
+
model_id=self.id,
|
|
80
|
+
)
|
|
77
81
|
|
|
78
82
|
_client_params: Dict[str, Any] = {}
|
|
79
83
|
if self.api_key is not None:
|
agno/models/ibm/watsonx.py
CHANGED
|
@@ -59,7 +59,11 @@ class WatsonX(Model):
|
|
|
59
59
|
# Fetch API key and project ID from env if not already set
|
|
60
60
|
self.api_key = self.api_key or getenv("IBM_WATSONX_API_KEY")
|
|
61
61
|
if not self.api_key:
|
|
62
|
-
|
|
62
|
+
raise ModelProviderError(
|
|
63
|
+
message="IBM_WATSONX_API_KEY not set. Please set the IBM_WATSONX_API_KEY environment variable.",
|
|
64
|
+
model_name=self.name,
|
|
65
|
+
model_id=self.id,
|
|
66
|
+
)
|
|
63
67
|
|
|
64
68
|
self.project_id = self.project_id or getenv("IBM_WATSONX_PROJECT_ID")
|
|
65
69
|
if not self.project_id:
|
agno/models/internlm/internlm.py
CHANGED
|
@@ -1,7 +1,8 @@
|
|
|
1
1
|
from dataclasses import dataclass, field
|
|
2
2
|
from os import getenv
|
|
3
|
-
from typing import Optional
|
|
3
|
+
from typing import Any, Dict, Optional
|
|
4
4
|
|
|
5
|
+
from agno.exceptions import ModelProviderError
|
|
5
6
|
from agno.models.openai.like import OpenAILike
|
|
6
7
|
|
|
7
8
|
|
|
@@ -24,3 +25,20 @@ class InternLM(OpenAILike):
|
|
|
24
25
|
|
|
25
26
|
api_key: Optional[str] = field(default_factory=lambda: getenv("INTERNLM_API_KEY"))
|
|
26
27
|
base_url: Optional[str] = "https://internlm-chat.intern-ai.org.cn/puyu/api/v1/chat/completions"
|
|
28
|
+
|
|
29
|
+
def _get_client_params(self) -> Dict[str, Any]:
|
|
30
|
+
"""
|
|
31
|
+
Returns client parameters for API requests, checking for INTERNLM_API_KEY.
|
|
32
|
+
|
|
33
|
+
Returns:
|
|
34
|
+
Dict[str, Any]: A dictionary of client parameters for API requests.
|
|
35
|
+
"""
|
|
36
|
+
if not self.api_key:
|
|
37
|
+
self.api_key = getenv("INTERNLM_API_KEY")
|
|
38
|
+
if not self.api_key:
|
|
39
|
+
raise ModelProviderError(
|
|
40
|
+
message="INTERNLM_API_KEY not set. Please set the INTERNLM_API_KEY environment variable.",
|
|
41
|
+
model_name=self.name,
|
|
42
|
+
model_id=self.id,
|
|
43
|
+
)
|
|
44
|
+
return super()._get_client_params()
|
agno/models/langdb/langdb.py
CHANGED
|
@@ -2,6 +2,7 @@ from dataclasses import dataclass, field
|
|
|
2
2
|
from os import getenv
|
|
3
3
|
from typing import Any, Dict, Optional
|
|
4
4
|
|
|
5
|
+
from agno.exceptions import ModelProviderError
|
|
5
6
|
from agno.models.openai.like import OpenAILike
|
|
6
7
|
|
|
7
8
|
|
|
@@ -32,6 +33,15 @@ class LangDB(OpenAILike):
|
|
|
32
33
|
default_headers: Optional[dict] = None
|
|
33
34
|
|
|
34
35
|
def _get_client_params(self) -> Dict[str, Any]:
|
|
36
|
+
if not self.api_key:
|
|
37
|
+
self.api_key = getenv("LANGDB_API_KEY")
|
|
38
|
+
if not self.api_key:
|
|
39
|
+
raise ModelProviderError(
|
|
40
|
+
message="LANGDB_API_KEY not set. Please set the LANGDB_API_KEY environment variable.",
|
|
41
|
+
model_name=self.name,
|
|
42
|
+
model_id=self.id,
|
|
43
|
+
)
|
|
44
|
+
|
|
35
45
|
if not self.project_id:
|
|
36
46
|
raise ValueError("LANGDB_PROJECT_ID not set in the environment")
|
|
37
47
|
|
|
@@ -1,7 +1,8 @@
|
|
|
1
1
|
from dataclasses import dataclass, field
|
|
2
2
|
from os import getenv
|
|
3
|
-
from typing import Optional
|
|
3
|
+
from typing import Any, Dict, Optional
|
|
4
4
|
|
|
5
|
+
from agno.exceptions import ModelProviderError
|
|
5
6
|
from agno.models.openai.like import OpenAILike
|
|
6
7
|
|
|
7
8
|
|
|
@@ -23,3 +24,20 @@ class LiteLLMOpenAI(OpenAILike):
|
|
|
23
24
|
|
|
24
25
|
api_key: Optional[str] = field(default_factory=lambda: getenv("LITELLM_API_KEY"))
|
|
25
26
|
base_url: str = "http://0.0.0.0:4000"
|
|
27
|
+
|
|
28
|
+
def _get_client_params(self) -> Dict[str, Any]:
|
|
29
|
+
"""
|
|
30
|
+
Returns client parameters for API requests, checking for LITELLM_API_KEY.
|
|
31
|
+
|
|
32
|
+
Returns:
|
|
33
|
+
Dict[str, Any]: A dictionary of client parameters for API requests.
|
|
34
|
+
"""
|
|
35
|
+
if not self.api_key:
|
|
36
|
+
self.api_key = getenv("LITELLM_API_KEY")
|
|
37
|
+
if not self.api_key:
|
|
38
|
+
raise ModelProviderError(
|
|
39
|
+
message="LITELLM_API_KEY not set. Please set the LITELLM_API_KEY environment variable.",
|
|
40
|
+
model_name=self.name,
|
|
41
|
+
model_id=self.id,
|
|
42
|
+
)
|
|
43
|
+
return super()._get_client_params()
|
agno/models/meta/llama.py
CHANGED
|
@@ -74,7 +74,11 @@ class Llama(Model):
|
|
|
74
74
|
if not self.api_key:
|
|
75
75
|
self.api_key = getenv("LLAMA_API_KEY")
|
|
76
76
|
if not self.api_key:
|
|
77
|
-
|
|
77
|
+
raise ModelProviderError(
|
|
78
|
+
message="LLAMA_API_KEY not set. Please set the LLAMA_API_KEY environment variable.",
|
|
79
|
+
model_name=self.name,
|
|
80
|
+
model_id=self.id,
|
|
81
|
+
)
|
|
78
82
|
|
|
79
83
|
# Define base client params
|
|
80
84
|
base_params = {
|
agno/models/meta/llama_openai.py
CHANGED
|
@@ -7,6 +7,7 @@ try:
|
|
|
7
7
|
except ImportError:
|
|
8
8
|
raise ImportError("`openai` not installed. Please install using `pip install openai`")
|
|
9
9
|
|
|
10
|
+
from agno.exceptions import ModelProviderError
|
|
10
11
|
from agno.models.meta.llama import Message
|
|
11
12
|
from agno.models.openai.like import OpenAILike
|
|
12
13
|
from agno.utils.models.llama import format_message
|
|
@@ -49,6 +50,23 @@ class LlamaOpenAI(OpenAILike):
|
|
|
49
50
|
# Cached async client
|
|
50
51
|
openai_async_client: Optional[AsyncOpenAIClient] = None
|
|
51
52
|
|
|
53
|
+
def _get_client_params(self) -> Dict[str, Any]:
|
|
54
|
+
"""
|
|
55
|
+
Returns client parameters for API requests, checking for LLAMA_API_KEY.
|
|
56
|
+
|
|
57
|
+
Returns:
|
|
58
|
+
Dict[str, Any]: A dictionary of client parameters for API requests.
|
|
59
|
+
"""
|
|
60
|
+
if not self.api_key:
|
|
61
|
+
self.api_key = getenv("LLAMA_API_KEY")
|
|
62
|
+
if not self.api_key:
|
|
63
|
+
raise ModelProviderError(
|
|
64
|
+
message="LLAMA_API_KEY not set. Please set the LLAMA_API_KEY environment variable.",
|
|
65
|
+
model_name=self.name,
|
|
66
|
+
model_id=self.id,
|
|
67
|
+
)
|
|
68
|
+
return super()._get_client_params()
|
|
69
|
+
|
|
52
70
|
def _format_message(self, message: Message) -> Dict[str, Any]:
|
|
53
71
|
"""
|
|
54
72
|
Format a message into the format expected by Llama API.
|
agno/models/mistral/mistral.py
CHANGED
|
@@ -94,7 +94,11 @@ class MistralChat(Model):
|
|
|
94
94
|
|
|
95
95
|
self.api_key = self.api_key or getenv("MISTRAL_API_KEY")
|
|
96
96
|
if not self.api_key:
|
|
97
|
-
|
|
97
|
+
raise ModelProviderError(
|
|
98
|
+
message="MISTRAL_API_KEY not set. Please set the MISTRAL_API_KEY environment variable.",
|
|
99
|
+
model_name=self.name,
|
|
100
|
+
model_id=self.id,
|
|
101
|
+
)
|
|
98
102
|
|
|
99
103
|
client_params.update(
|
|
100
104
|
{
|
agno/models/nvidia/nvidia.py
CHANGED
|
@@ -1,7 +1,8 @@
|
|
|
1
1
|
from dataclasses import dataclass, field
|
|
2
2
|
from os import getenv
|
|
3
|
-
from typing import Optional
|
|
3
|
+
from typing import Any, Dict, Optional
|
|
4
4
|
|
|
5
|
+
from agno.exceptions import ModelProviderError
|
|
5
6
|
from agno.models.openai.like import OpenAILike
|
|
6
7
|
|
|
7
8
|
|
|
@@ -26,3 +27,20 @@ class Nvidia(OpenAILike):
|
|
|
26
27
|
base_url: str = "https://integrate.api.nvidia.com/v1"
|
|
27
28
|
|
|
28
29
|
supports_native_structured_outputs: bool = False
|
|
30
|
+
|
|
31
|
+
def _get_client_params(self) -> Dict[str, Any]:
|
|
32
|
+
"""
|
|
33
|
+
Returns client parameters for API requests, checking for NVIDIA_API_KEY.
|
|
34
|
+
|
|
35
|
+
Returns:
|
|
36
|
+
Dict[str, Any]: A dictionary of client parameters for API requests.
|
|
37
|
+
"""
|
|
38
|
+
if not self.api_key:
|
|
39
|
+
self.api_key = getenv("NVIDIA_API_KEY")
|
|
40
|
+
if not self.api_key:
|
|
41
|
+
raise ModelProviderError(
|
|
42
|
+
message="NVIDIA_API_KEY not set. Please set the NVIDIA_API_KEY environment variable.",
|
|
43
|
+
model_name=self.name,
|
|
44
|
+
model_id=self.id,
|
|
45
|
+
)
|
|
46
|
+
return super()._get_client_params()
|
agno/models/openai/chat.py
CHANGED
|
@@ -102,7 +102,11 @@ class OpenAIChat(Model):
|
|
|
102
102
|
if not self.api_key:
|
|
103
103
|
self.api_key = getenv("OPENAI_API_KEY")
|
|
104
104
|
if not self.api_key:
|
|
105
|
-
|
|
105
|
+
raise ModelProviderError(
|
|
106
|
+
message="OPENAI_API_KEY not set. Please set the OPENAI_API_KEY environment variable.",
|
|
107
|
+
model_name=self.name,
|
|
108
|
+
model_id=self.id,
|
|
109
|
+
)
|
|
106
110
|
|
|
107
111
|
# Define base client params
|
|
108
112
|
base_params = {
|
agno/models/openai/responses.py
CHANGED
|
@@ -117,7 +117,11 @@ class OpenAIResponses(Model):
|
|
|
117
117
|
if not self.api_key:
|
|
118
118
|
self.api_key = getenv("OPENAI_API_KEY")
|
|
119
119
|
if not self.api_key:
|
|
120
|
-
|
|
120
|
+
raise ModelProviderError(
|
|
121
|
+
message="OPENAI_API_KEY not set. Please set the OPENAI_API_KEY environment variable.",
|
|
122
|
+
model_name=self.name,
|
|
123
|
+
model_id=self.id,
|
|
124
|
+
)
|
|
121
125
|
|
|
122
126
|
# Define base client params
|
|
123
127
|
base_params = {
|
|
@@ -4,6 +4,7 @@ from typing import Any, Dict, List, Optional, Type, Union
|
|
|
4
4
|
|
|
5
5
|
from pydantic import BaseModel
|
|
6
6
|
|
|
7
|
+
from agno.exceptions import ModelProviderError
|
|
7
8
|
from agno.models.openai.like import OpenAILike
|
|
8
9
|
from agno.run.agent import RunOutput
|
|
9
10
|
|
|
@@ -34,6 +35,25 @@ class OpenRouter(OpenAILike):
|
|
|
34
35
|
max_tokens: int = 1024
|
|
35
36
|
models: Optional[List[str]] = None # Dynamic model routing https://openrouter.ai/docs/features/model-routing
|
|
36
37
|
|
|
38
|
+
def _get_client_params(self) -> Dict[str, Any]:
|
|
39
|
+
"""
|
|
40
|
+
Returns client parameters for API requests, checking for OPENROUTER_API_KEY.
|
|
41
|
+
|
|
42
|
+
Returns:
|
|
43
|
+
Dict[str, Any]: A dictionary of client parameters for API requests.
|
|
44
|
+
"""
|
|
45
|
+
# Fetch API key from env if not already set
|
|
46
|
+
if not self.api_key:
|
|
47
|
+
self.api_key = getenv("OPENROUTER_API_KEY")
|
|
48
|
+
if not self.api_key:
|
|
49
|
+
raise ModelProviderError(
|
|
50
|
+
message="OPENROUTER_API_KEY not set. Please set the OPENROUTER_API_KEY environment variable.",
|
|
51
|
+
model_name=self.name,
|
|
52
|
+
model_id=self.id,
|
|
53
|
+
)
|
|
54
|
+
|
|
55
|
+
return super()._get_client_params()
|
|
56
|
+
|
|
37
57
|
def get_request_params(
|
|
38
58
|
self,
|
|
39
59
|
response_format: Optional[Union[Dict, Type[BaseModel]]] = None,
|
|
@@ -50,6 +50,23 @@ class Perplexity(OpenAILike):
|
|
|
50
50
|
supports_native_structured_outputs: bool = False
|
|
51
51
|
supports_json_schema_outputs: bool = True
|
|
52
52
|
|
|
53
|
+
def _get_client_params(self) -> Dict[str, Any]:
|
|
54
|
+
"""
|
|
55
|
+
Returns client parameters for API requests, checking for PERPLEXITY_API_KEY.
|
|
56
|
+
|
|
57
|
+
Returns:
|
|
58
|
+
Dict[str, Any]: A dictionary of client parameters for API requests.
|
|
59
|
+
"""
|
|
60
|
+
if not self.api_key:
|
|
61
|
+
self.api_key = getenv("PERPLEXITY_API_KEY")
|
|
62
|
+
if not self.api_key:
|
|
63
|
+
raise ModelProviderError(
|
|
64
|
+
message="PERPLEXITY_API_KEY not set. Please set the PERPLEXITY_API_KEY environment variable.",
|
|
65
|
+
model_name=self.name,
|
|
66
|
+
model_id=self.id,
|
|
67
|
+
)
|
|
68
|
+
return super()._get_client_params()
|
|
69
|
+
|
|
53
70
|
def get_request_params(
|
|
54
71
|
self,
|
|
55
72
|
response_format: Optional[Union[Dict, Type[BaseModel]]] = None,
|
agno/models/requesty/requesty.py
CHANGED
|
@@ -4,6 +4,7 @@ from typing import Any, Dict, List, Optional, Type, Union
|
|
|
4
4
|
|
|
5
5
|
from pydantic import BaseModel
|
|
6
6
|
|
|
7
|
+
from agno.exceptions import ModelProviderError
|
|
7
8
|
from agno.models.openai.like import OpenAILike
|
|
8
9
|
from agno.run.agent import RunOutput
|
|
9
10
|
from agno.run.team import TeamRunOutput
|
|
@@ -30,6 +31,23 @@ class Requesty(OpenAILike):
|
|
|
30
31
|
base_url: str = "https://router.requesty.ai/v1"
|
|
31
32
|
max_tokens: int = 1024
|
|
32
33
|
|
|
34
|
+
def _get_client_params(self) -> Dict[str, Any]:
|
|
35
|
+
"""
|
|
36
|
+
Returns client parameters for API requests, checking for REQUESTY_API_KEY.
|
|
37
|
+
|
|
38
|
+
Returns:
|
|
39
|
+
Dict[str, Any]: A dictionary of client parameters for API requests.
|
|
40
|
+
"""
|
|
41
|
+
if not self.api_key:
|
|
42
|
+
self.api_key = getenv("REQUESTY_API_KEY")
|
|
43
|
+
if not self.api_key:
|
|
44
|
+
raise ModelProviderError(
|
|
45
|
+
message="REQUESTY_API_KEY not set. Please set the REQUESTY_API_KEY environment variable.",
|
|
46
|
+
model_name=self.name,
|
|
47
|
+
model_id=self.id,
|
|
48
|
+
)
|
|
49
|
+
return super()._get_client_params()
|
|
50
|
+
|
|
33
51
|
def get_request_params(
|
|
34
52
|
self,
|
|
35
53
|
response_format: Optional[Union[Dict, Type[BaseModel]]] = None,
|
|
@@ -1,7 +1,8 @@
|
|
|
1
1
|
from dataclasses import dataclass, field
|
|
2
2
|
from os import getenv
|
|
3
|
-
from typing import Optional
|
|
3
|
+
from typing import Any, Dict, Optional
|
|
4
4
|
|
|
5
|
+
from agno.exceptions import ModelProviderError
|
|
5
6
|
from agno.models.openai.like import OpenAILike
|
|
6
7
|
|
|
7
8
|
|
|
@@ -26,3 +27,20 @@ class Sambanova(OpenAILike):
|
|
|
26
27
|
base_url: str = "https://api.sambanova.ai/v1"
|
|
27
28
|
|
|
28
29
|
supports_native_structured_outputs: bool = False
|
|
30
|
+
|
|
31
|
+
def _get_client_params(self) -> Dict[str, Any]:
|
|
32
|
+
"""
|
|
33
|
+
Returns client parameters for API requests, checking for SAMBANOVA_API_KEY.
|
|
34
|
+
|
|
35
|
+
Returns:
|
|
36
|
+
Dict[str, Any]: A dictionary of client parameters for API requests.
|
|
37
|
+
"""
|
|
38
|
+
if not self.api_key:
|
|
39
|
+
self.api_key = getenv("SAMBANOVA_API_KEY")
|
|
40
|
+
if not self.api_key:
|
|
41
|
+
raise ModelProviderError(
|
|
42
|
+
message="SAMBANOVA_API_KEY not set. Please set the SAMBANOVA_API_KEY environment variable.",
|
|
43
|
+
model_name=self.name,
|
|
44
|
+
model_id=self.id,
|
|
45
|
+
)
|
|
46
|
+
return super()._get_client_params()
|
|
@@ -1,7 +1,8 @@
|
|
|
1
1
|
from dataclasses import dataclass
|
|
2
2
|
from os import getenv
|
|
3
|
-
from typing import Optional
|
|
3
|
+
from typing import Any, Dict, Optional
|
|
4
4
|
|
|
5
|
+
from agno.exceptions import ModelProviderError
|
|
5
6
|
from agno.models.openai.like import OpenAILike
|
|
6
7
|
|
|
7
8
|
|
|
@@ -23,3 +24,20 @@ class Siliconflow(OpenAILike):
|
|
|
23
24
|
provider: str = "Siliconflow"
|
|
24
25
|
api_key: Optional[str] = getenv("SILICONFLOW_API_KEY")
|
|
25
26
|
base_url: str = "https://api.siliconflow.com/v1"
|
|
27
|
+
|
|
28
|
+
def _get_client_params(self) -> Dict[str, Any]:
|
|
29
|
+
"""
|
|
30
|
+
Returns client parameters for API requests, checking for SILICONFLOW_API_KEY.
|
|
31
|
+
|
|
32
|
+
Returns:
|
|
33
|
+
Dict[str, Any]: A dictionary of client parameters for API requests.
|
|
34
|
+
"""
|
|
35
|
+
if not self.api_key:
|
|
36
|
+
self.api_key = getenv("SILICONFLOW_API_KEY")
|
|
37
|
+
if not self.api_key:
|
|
38
|
+
raise ModelProviderError(
|
|
39
|
+
message="SILICONFLOW_API_KEY not set. Please set the SILICONFLOW_API_KEY environment variable.",
|
|
40
|
+
model_name=self.name,
|
|
41
|
+
model_id=self.id,
|
|
42
|
+
)
|
|
43
|
+
return super()._get_client_params()
|
agno/models/together/together.py
CHANGED
|
@@ -1,7 +1,8 @@
|
|
|
1
1
|
from dataclasses import dataclass, field
|
|
2
2
|
from os import getenv
|
|
3
|
-
from typing import Optional
|
|
3
|
+
from typing import Any, Dict, Optional
|
|
4
4
|
|
|
5
|
+
from agno.exceptions import ModelProviderError
|
|
5
6
|
from agno.models.openai.like import OpenAILike
|
|
6
7
|
|
|
7
8
|
|
|
@@ -23,3 +24,20 @@ class Together(OpenAILike):
|
|
|
23
24
|
provider: str = "Together"
|
|
24
25
|
api_key: Optional[str] = field(default_factory=lambda: getenv("TOGETHER_API_KEY"))
|
|
25
26
|
base_url: str = "https://api.together.xyz/v1"
|
|
27
|
+
|
|
28
|
+
def _get_client_params(self) -> Dict[str, Any]:
|
|
29
|
+
"""
|
|
30
|
+
Returns client parameters for API requests, checking for TOGETHER_API_KEY.
|
|
31
|
+
|
|
32
|
+
Returns:
|
|
33
|
+
Dict[str, Any]: A dictionary of client parameters for API requests.
|
|
34
|
+
"""
|
|
35
|
+
if not self.api_key:
|
|
36
|
+
self.api_key = getenv("TOGETHER_API_KEY")
|
|
37
|
+
if not self.api_key:
|
|
38
|
+
raise ModelProviderError(
|
|
39
|
+
message="TOGETHER_API_KEY not set. Please set the TOGETHER_API_KEY environment variable.",
|
|
40
|
+
model_name=self.name,
|
|
41
|
+
model_id=self.id,
|
|
42
|
+
)
|
|
43
|
+
return super()._get_client_params()
|
agno/models/vercel/v0.py
CHANGED
|
@@ -1,7 +1,8 @@
|
|
|
1
1
|
from dataclasses import dataclass, field
|
|
2
2
|
from os import getenv
|
|
3
|
-
from typing import Optional
|
|
3
|
+
from typing import Any, Dict, Optional
|
|
4
4
|
|
|
5
|
+
from agno.exceptions import ModelProviderError
|
|
5
6
|
from agno.models.openai.like import OpenAILike
|
|
6
7
|
|
|
7
8
|
|
|
@@ -24,3 +25,20 @@ class V0(OpenAILike):
|
|
|
24
25
|
|
|
25
26
|
api_key: Optional[str] = field(default_factory=lambda: getenv("V0_API_KEY"))
|
|
26
27
|
base_url: str = "https://api.v0.dev/v1/"
|
|
28
|
+
|
|
29
|
+
def _get_client_params(self) -> Dict[str, Any]:
|
|
30
|
+
"""
|
|
31
|
+
Returns client parameters for API requests, checking for V0_API_KEY.
|
|
32
|
+
|
|
33
|
+
Returns:
|
|
34
|
+
Dict[str, Any]: A dictionary of client parameters for API requests.
|
|
35
|
+
"""
|
|
36
|
+
if not self.api_key:
|
|
37
|
+
self.api_key = getenv("V0_API_KEY")
|
|
38
|
+
if not self.api_key:
|
|
39
|
+
raise ModelProviderError(
|
|
40
|
+
message="V0_API_KEY not set. Please set the V0_API_KEY environment variable.",
|
|
41
|
+
model_name=self.name,
|
|
42
|
+
model_id=self.id,
|
|
43
|
+
)
|
|
44
|
+
return super()._get_client_params()
|
agno/models/xai/xai.py
CHANGED
|
@@ -4,6 +4,7 @@ from typing import Any, Dict, List, Optional, Type, Union
|
|
|
4
4
|
|
|
5
5
|
from pydantic import BaseModel
|
|
6
6
|
|
|
7
|
+
from agno.exceptions import ModelProviderError
|
|
7
8
|
from agno.models.message import Citations, UrlCitation
|
|
8
9
|
from agno.models.openai.like import OpenAILike
|
|
9
10
|
from agno.models.response import ModelResponse
|
|
@@ -39,6 +40,23 @@ class xAI(OpenAILike):
|
|
|
39
40
|
|
|
40
41
|
search_parameters: Optional[Dict[str, Any]] = None
|
|
41
42
|
|
|
43
|
+
def _get_client_params(self) -> Dict[str, Any]:
|
|
44
|
+
"""
|
|
45
|
+
Returns client parameters for API requests, checking for XAI_API_KEY.
|
|
46
|
+
|
|
47
|
+
Returns:
|
|
48
|
+
Dict[str, Any]: A dictionary of client parameters for API requests.
|
|
49
|
+
"""
|
|
50
|
+
if not self.api_key:
|
|
51
|
+
self.api_key = getenv("XAI_API_KEY")
|
|
52
|
+
if not self.api_key:
|
|
53
|
+
raise ModelProviderError(
|
|
54
|
+
message="XAI_API_KEY not set. Please set the XAI_API_KEY environment variable.",
|
|
55
|
+
model_name=self.name,
|
|
56
|
+
model_id=self.id,
|
|
57
|
+
)
|
|
58
|
+
return super()._get_client_params()
|
|
59
|
+
|
|
42
60
|
def get_request_params(
|
|
43
61
|
self,
|
|
44
62
|
response_format: Optional[Union[Dict, Type[BaseModel]]] = None,
|
agno/os/schema.py
CHANGED
|
@@ -898,6 +898,9 @@ class RunSchema(BaseModel):
|
|
|
898
898
|
events: Optional[List[dict]] = Field(None, description="Events generated during the run")
|
|
899
899
|
created_at: Optional[datetime] = Field(None, description="Run creation timestamp")
|
|
900
900
|
references: Optional[List[dict]] = Field(None, description="References cited in the run")
|
|
901
|
+
citations: Optional[Dict[str, Any]] = Field(
|
|
902
|
+
None, description="Citations from the model (e.g., from Gemini grounding/search)"
|
|
903
|
+
)
|
|
901
904
|
reasoning_messages: Optional[List[dict]] = Field(None, description="Reasoning process messages")
|
|
902
905
|
session_state: Optional[dict] = Field(None, description="Session state at the end of the run")
|
|
903
906
|
images: Optional[List[dict]] = Field(None, description="Images included in the run")
|
|
@@ -926,6 +929,7 @@ class RunSchema(BaseModel):
|
|
|
926
929
|
tools=[tool for tool in run_dict.get("tools", [])] if run_dict.get("tools") else None,
|
|
927
930
|
events=[event for event in run_dict["events"]] if run_dict.get("events") else None,
|
|
928
931
|
references=run_dict.get("references", []),
|
|
932
|
+
citations=run_dict.get("citations", None),
|
|
929
933
|
reasoning_messages=run_dict.get("reasoning_messages", []),
|
|
930
934
|
session_state=run_dict.get("session_state"),
|
|
931
935
|
images=run_dict.get("images", []),
|
|
@@ -955,6 +959,9 @@ class TeamRunSchema(BaseModel):
|
|
|
955
959
|
events: Optional[List[dict]] = Field(None, description="Events generated during the run")
|
|
956
960
|
created_at: Optional[datetime] = Field(None, description="Run creation timestamp")
|
|
957
961
|
references: Optional[List[dict]] = Field(None, description="References cited in the run")
|
|
962
|
+
citations: Optional[Dict[str, Any]] = Field(
|
|
963
|
+
None, description="Citations from the model (e.g., from Gemini grounding/search)"
|
|
964
|
+
)
|
|
958
965
|
reasoning_messages: Optional[List[dict]] = Field(None, description="Reasoning process messages")
|
|
959
966
|
session_state: Optional[dict] = Field(None, description="Session state at the end of the run")
|
|
960
967
|
input_media: Optional[Dict[str, Any]] = Field(None, description="Input media attachments")
|
|
@@ -985,6 +992,7 @@ class TeamRunSchema(BaseModel):
|
|
|
985
992
|
if run_dict.get("created_at") is not None
|
|
986
993
|
else None,
|
|
987
994
|
references=run_dict.get("references", []),
|
|
995
|
+
citations=run_dict.get("citations", None),
|
|
988
996
|
reasoning_messages=run_dict.get("reasoning_messages", []),
|
|
989
997
|
session_state=run_dict.get("session_state"),
|
|
990
998
|
images=run_dict.get("images", []),
|
|
@@ -1012,6 +1020,9 @@ class WorkflowRunSchema(BaseModel):
|
|
|
1012
1020
|
reasoning_content: Optional[str] = Field(None, description="Reasoning content if reasoning was enabled")
|
|
1013
1021
|
reasoning_steps: Optional[List[dict]] = Field(None, description="List of reasoning steps")
|
|
1014
1022
|
references: Optional[List[dict]] = Field(None, description="References cited in the workflow")
|
|
1023
|
+
citations: Optional[Dict[str, Any]] = Field(
|
|
1024
|
+
None, description="Citations from the model (e.g., from Gemini grounding/search)"
|
|
1025
|
+
)
|
|
1015
1026
|
reasoning_messages: Optional[List[dict]] = Field(None, description="Reasoning process messages")
|
|
1016
1027
|
images: Optional[List[dict]] = Field(None, description="Images included in the workflow")
|
|
1017
1028
|
videos: Optional[List[dict]] = Field(None, description="Videos included in the workflow")
|
|
@@ -1038,6 +1049,7 @@ class WorkflowRunSchema(BaseModel):
|
|
|
1038
1049
|
reasoning_content=run_response.get("reasoning_content", ""),
|
|
1039
1050
|
reasoning_steps=run_response.get("reasoning_steps", []),
|
|
1040
1051
|
references=run_response.get("references", []),
|
|
1052
|
+
citations=run_response.get("citations", None),
|
|
1041
1053
|
reasoning_messages=run_response.get("reasoning_messages", []),
|
|
1042
1054
|
images=run_response.get("images", []),
|
|
1043
1055
|
videos=run_response.get("videos", []),
|
agno/team/team.py
CHANGED
|
@@ -68,7 +68,7 @@ agno/db/mysql/mysql.py,sha256=9Rbq9orJX6MtHHVb749cU3cl5DpneYBEOQBGOeMOv_U,99249
|
|
|
68
68
|
agno/db/mysql/schemas.py,sha256=W_irrKPO9geylUFkzoZ8mG3nDKKTgROfVG2GdqQBDm8,6816
|
|
69
69
|
agno/db/mysql/utils.py,sha256=PdqN-SxM-ox8HU9CZyxzvs2D1FE2vdZFVCyFgFcQsyU,12366
|
|
70
70
|
agno/db/postgres/__init__.py,sha256=Ojk00nTCzQFiH2ViD7KIBjgpkTKLRNPCwWnuXMKtNXY,154
|
|
71
|
-
agno/db/postgres/async_postgres.py,sha256=
|
|
71
|
+
agno/db/postgres/async_postgres.py,sha256=bGigIlmjAaIlPeO9kAYIOBumZjST1ldL0-sKcz2CoFk,83447
|
|
72
72
|
agno/db/postgres/postgres.py,sha256=PW7Y7HIOp1azGqZc2j-YAasv1E9wWlzywLqhVv-JV3Q,95477
|
|
73
73
|
agno/db/postgres/schemas.py,sha256=ndtJVd5xx3RvqiOH5zh9tynHiaEabO0uAu4IUlozXnw,6423
|
|
74
74
|
agno/db/postgres/utils.py,sha256=UE3UQZ-h7fADAKBsX4BWcDka54YNROEpBrlfTmDvpqc,15471
|
|
@@ -185,9 +185,9 @@ agno/models/metrics.py,sha256=81IILXZwGmOTiWK003bi5mg4bM1f4LCWbwyamjFzp18,4500
|
|
|
185
185
|
agno/models/response.py,sha256=gkoRWEsChS544VL0grPmS9L0BM0FZgBWxGtR10PtoKc,6996
|
|
186
186
|
agno/models/utils.py,sha256=jxAIIG2y7KBypwFlc87GzFnvogRpGLfd-wwr6KXZIj8,7269
|
|
187
187
|
agno/models/aimlapi/__init__.py,sha256=XQcFRvt4qJ8ol9nCC0XKEkVEDivdNf3nZNoJZMZ5m8M,78
|
|
188
|
-
agno/models/aimlapi/aimlapi.py,sha256=
|
|
188
|
+
agno/models/aimlapi/aimlapi.py,sha256=oNq02Ja4gL5TG-g_v-4Ibr8J5hRl9GRT8o3Vvt4iOFs,2205
|
|
189
189
|
agno/models/anthropic/__init__.py,sha256=nbReX3p17JCwfrMDR9hR7-OaEFZm80I7dng93dl-Fhw,77
|
|
190
|
-
agno/models/anthropic/claude.py,sha256=
|
|
190
|
+
agno/models/anthropic/claude.py,sha256=mIo-NdxN7MWO90bQXCKVUk_TRo2vYfgnAoeeAekQK6Q,46887
|
|
191
191
|
agno/models/aws/__init__.py,sha256=TbcwQwv9A7KjqBM5RQBR8x46GvyyCxbBCjwkpjfVGKE,352
|
|
192
192
|
agno/models/aws/bedrock.py,sha256=U_IzwNe2DMUzxHGykjWDGRFuVa9EJsPKWp--JXs-6F4,29818
|
|
193
193
|
agno/models/aws/claude.py,sha256=3QdEBkOrmA2_BriRsbTdJszymYFJSyhu5pXJKW5X6UI,8722
|
|
@@ -195,85 +195,85 @@ agno/models/azure/__init__.py,sha256=EoFdJHjayvmv_VOmaW9cJguwA1K5OFS_nFeazyn0B2w
|
|
|
195
195
|
agno/models/azure/ai_foundry.py,sha256=am28Us4Ub9yv1NqXi_OqZWldOlvkEOpxLfZre4hxkbA,19824
|
|
196
196
|
agno/models/azure/openai_chat.py,sha256=X3IttroDTuMJzMwlXvTpsB5DS63j7IEDCPYDTXUKnBs,5439
|
|
197
197
|
agno/models/cerebras/__init__.py,sha256=F3vE0lmMu-qDQ_Y7hg_czJitLsvNu4SfPv174wg1cq8,376
|
|
198
|
-
agno/models/cerebras/cerebras.py,sha256=
|
|
199
|
-
agno/models/cerebras/cerebras_openai.py,sha256=
|
|
198
|
+
agno/models/cerebras/cerebras.py,sha256=PBrwiA1rk1wPR6cr55Ana5RzBKxo1jffjBq6kHze_oI,19466
|
|
199
|
+
agno/models/cerebras/cerebras_openai.py,sha256=NRhrigD01nMdGegytk-ta8dTm7x-L0iSol78dhSTAe8,4933
|
|
200
200
|
agno/models/cohere/__init__.py,sha256=4kFUnfPEL3__hd1TRW7fZxh7D_DctcpY5QDV58lR6s0,72
|
|
201
|
-
agno/models/cohere/chat.py,sha256=
|
|
201
|
+
agno/models/cohere/chat.py,sha256=xDyc4Cle_O5VOofnnhgyr-gFtPBaQWmsUWFUZm9T2ag,17037
|
|
202
202
|
agno/models/cometapi/__init__.py,sha256=_t5JqHLyohg2u1RkkOR9eSwKsPoAhWUr48i9ei-Rn4Y,82
|
|
203
|
-
agno/models/cometapi/cometapi.py,sha256=
|
|
203
|
+
agno/models/cometapi/cometapi.py,sha256=UPfXT29y67B3g6kCEoPigFC9BPyvsy0bn_7aXgtO6ns,2657
|
|
204
204
|
agno/models/dashscope/__init__.py,sha256=lHZTGvs7fVeX4N1G7JNMa4mfsSQHgQq02DsSVqYFqpw,86
|
|
205
205
|
agno/models/dashscope/dashscope.py,sha256=FotbdJy79DfDGS6vaN23fBD_P-mz120jtXeerdorbdo,3393
|
|
206
206
|
agno/models/deepinfra/__init__.py,sha256=24gMCeFHNbHw6l5gHZ1GwVg02546E9F_0yIZVSK15C8,86
|
|
207
|
-
agno/models/deepinfra/deepinfra.py,sha256=
|
|
207
|
+
agno/models/deepinfra/deepinfra.py,sha256=ayOl01Jsbo9iwi9B8eCiEmDmDiDrwQ5IBw8emVqg6N8,1647
|
|
208
208
|
agno/models/deepseek/__init__.py,sha256=Q73VJ6rA0LqQbC0AWO6o5PWwr-Fdez7Imdar7X07LyU,82
|
|
209
209
|
agno/models/deepseek/deepseek.py,sha256=Sc2Jtc6-lROb2BnSHzG_m6p48RPlYXbsyrgjHqYWNek,2187
|
|
210
210
|
agno/models/fireworks/__init__.py,sha256=qIDjKUnwmrnwfa9B2Y3ybRyuUsF7Pzw6_bVq4N6M0Cg,86
|
|
211
|
-
agno/models/fireworks/fireworks.py,sha256=
|
|
211
|
+
agno/models/fireworks/fireworks.py,sha256=sQcGm9SWTpwrCLUFgHJ0A_7ufet3fAtDZffhM-7fbpc,1674
|
|
212
212
|
agno/models/google/__init__.py,sha256=bEOSroFJ4__38XaCgBUWiOe_Qga66ZRm_gis__yIMmc,74
|
|
213
|
-
agno/models/google/gemini.py,sha256=
|
|
213
|
+
agno/models/google/gemini.py,sha256=36Y4yl2EWgjQAv0j8-ccY6gr3mVWMTofKK6HTgErEy8,72054
|
|
214
214
|
agno/models/groq/__init__.py,sha256=gODf5IA4yJKlwTEYsUywmA-dsiQVyL2_yWMc8VncdVU,66
|
|
215
|
-
agno/models/groq/groq.py,sha256=
|
|
215
|
+
agno/models/groq/groq.py,sha256=SMh8Yl8NT6kbC4Gir8SMgeaWaC3J9OtsZX3EIZQN7V0,24191
|
|
216
216
|
agno/models/huggingface/__init__.py,sha256=VgdYkgSHqsFLhvJ9lSUCyEZfest8hbCAUpWU6WCk-_c,94
|
|
217
|
-
agno/models/huggingface/huggingface.py,sha256=
|
|
217
|
+
agno/models/huggingface/huggingface.py,sha256=kOYO93zSdC9_4VAK3wwwpBbjnkfFFLqkHKieInEMHxM,20625
|
|
218
218
|
agno/models/ibm/__init__.py,sha256=jwrz0JL4pd1cAPN7wLi51qgQfOB8kUIhFjs_oEc4NWc,74
|
|
219
|
-
agno/models/ibm/watsonx.py,sha256=
|
|
219
|
+
agno/models/ibm/watsonx.py,sha256=YUgNvon22bY0tMBYnNF1zJQdyi9VWgp2mI75SgNIGss,16772
|
|
220
220
|
agno/models/internlm/__init__.py,sha256=88O1Vb6HuNls8KDUOKuQdKF_3iG9wI3uc56Xy-qBoMI,75
|
|
221
|
-
agno/models/internlm/internlm.py,sha256=
|
|
221
|
+
agno/models/internlm/internlm.py,sha256=bJy_O1A4ZW1P6C2ZboAffMUewRj9_eGgZTB7rarHWgI,1631
|
|
222
222
|
agno/models/langdb/__init__.py,sha256=ubh5nDcxyH33_ONwsmY4tWQz5esRwRjHBe68u9hdAIM,45
|
|
223
|
-
agno/models/langdb/langdb.py,sha256=
|
|
223
|
+
agno/models/langdb/langdb.py,sha256=n2Cy07uPXOU1LDmL5d2BzVHcUugySFcMSD2XMhALPy4,2040
|
|
224
224
|
agno/models/litellm/__init__.py,sha256=5e4yHqepF9-fOE0DMDIKnH6psFV1OcRgfAD5BaoVRgI,353
|
|
225
225
|
agno/models/litellm/chat.py,sha256=BLVEdddJ5NCxIGq_S0sqPXAEIyp_bWsUtA6u5-JlA_Y,19814
|
|
226
|
-
agno/models/litellm/litellm_openai.py,sha256=
|
|
226
|
+
agno/models/litellm/litellm_openai.py,sha256=tYTsoeUS-hPApDGn6BsvPHO2Aiyi_dVvwyXgy0cm1Ms,1482
|
|
227
227
|
agno/models/llama_cpp/__init__.py,sha256=oxOZfqEcOdcB74VLwOTO6bPmXHHA88uaeJO-IkXgr8A,84
|
|
228
228
|
agno/models/llama_cpp/llama_cpp.py,sha256=tWIF0TQH-sK29xCrTe7oy0SjSC-FlOmwmPL1d5IVMPM,673
|
|
229
229
|
agno/models/lmstudio/__init__.py,sha256=3GPW_YrtFalcpsyoHSFKCre9fYcMHf3gvNcMLerVOZg,82
|
|
230
230
|
agno/models/lmstudio/lmstudio.py,sha256=E7pmyOcrYUzYr3IhgptL9_CnmI_clftnP4Erw6ADdoQ,756
|
|
231
231
|
agno/models/meta/__init__.py,sha256=Of02Sw_EzexIdap-GHuDEcvGTSUbho4Eh66jG7xzha8,347
|
|
232
|
-
agno/models/meta/llama.py,sha256=
|
|
233
|
-
agno/models/meta/llama_openai.py,sha256=
|
|
232
|
+
agno/models/meta/llama.py,sha256=SkPJRPARObNLML1iT7_Dewfaz0zupfFAj9EvtkY8Tn0,19080
|
|
233
|
+
agno/models/meta/llama_openai.py,sha256=UgpA05OmCG6B7daJd28ElHmRKRz84qfX2rtdjEnfDwM,2736
|
|
234
234
|
agno/models/mistral/__init__.py,sha256=6CP9TDn8oRUjtGBk1McvSQHrjY935vB6msGPlXBhkSw,86
|
|
235
|
-
agno/models/mistral/mistral.py,sha256=
|
|
235
|
+
agno/models/mistral/mistral.py,sha256=BSwvzNoEHGvsJyT22SHfYwgoZgfH9Rr3o7We1YVl14Q,16971
|
|
236
236
|
agno/models/nebius/__init__.py,sha256=gW2yvxIfV2gxxOnBtTP8MCpI9AvMbIE6VTw-gY01Uvg,67
|
|
237
237
|
agno/models/nebius/nebius.py,sha256=25NJ1aFdCOnfAaGmho-TLQ_vgbhWNn0fhLyvMq5d8a8,1915
|
|
238
238
|
agno/models/nexus/__init__.py,sha256=q9pwjZ2KXpG1B3Cy8ujrj3_s0a_LI5SaekXJL6mh4gE,63
|
|
239
239
|
agno/models/nexus/nexus.py,sha256=rJcBQXR1aqUiLWMPBRuHIEh87wVrsqXup1hr_smanBQ,635
|
|
240
240
|
agno/models/nvidia/__init__.py,sha256=O0g3_0_ciOz0AH4Y4CAL7YRfhdDPAvhDzNjJmgWKT78,74
|
|
241
|
-
agno/models/nvidia/nvidia.py,sha256=
|
|
241
|
+
agno/models/nvidia/nvidia.py,sha256=ttWmimeAO6POlS5_vOGgow_sq-2Pu09zm0Jss6RhRnc,1667
|
|
242
242
|
agno/models/ollama/__init__.py,sha256=TIhwxG7ek3eyfoKTLoZQXwdgzcIngYKjbjSlkf2gkWE,72
|
|
243
243
|
agno/models/ollama/chat.py,sha256=Szc8rEWRvQ2CW50V5xAuccX4Ozc1BAV9wUPbFJhY_J8,16862
|
|
244
244
|
agno/models/openai/__init__.py,sha256=OssVgQRpsriU6aJZ3lIp_jFuqvX6y78L4Fd3uTlmI3E,225
|
|
245
|
-
agno/models/openai/chat.py,sha256=
|
|
245
|
+
agno/models/openai/chat.py,sha256=kXwixF2xpnNENQe9y8KH2NNrJqRUPsLSmGyfj_cwCM4,39542
|
|
246
246
|
agno/models/openai/like.py,sha256=wmw9PfAVqluBs4MMY73dgjelKn1yl5JDKyCRvaNFjFw,745
|
|
247
|
-
agno/models/openai/responses.py,sha256=
|
|
247
|
+
agno/models/openai/responses.py,sha256=DxbXNne0Mz-n0G0HjzgYPHYbkRrBnumPJnLEusIL3iw,46065
|
|
248
248
|
agno/models/openrouter/__init__.py,sha256=ZpZhNyy_EGSXp58uC9e2iyjnxBctql7GaY8rUG-599I,90
|
|
249
|
-
agno/models/openrouter/openrouter.py,sha256=
|
|
249
|
+
agno/models/openrouter/openrouter.py,sha256=TEwhZSamABZ0WN8Q8gZW5N1s9TvOMHkZKPIVonncmNc,3372
|
|
250
250
|
agno/models/perplexity/__init__.py,sha256=JNmOElDLwcZ9_Lk5owkEdgwmAhaH3YJ-VJqOI8rgp5c,90
|
|
251
|
-
agno/models/perplexity/perplexity.py,sha256=
|
|
251
|
+
agno/models/perplexity/perplexity.py,sha256=z6tFtbex23W6-Zkj8kcH_UWcChTezPa5PL5s_veUpvc,7665
|
|
252
252
|
agno/models/portkey/__init__.py,sha256=CjGmltOuDlYfuJgpYHmfRkKiIS9W9MH4oYaGKaNNZeM,71
|
|
253
253
|
agno/models/portkey/portkey.py,sha256=dtst4y85wkc8OeKqScbB15_9K6iDWWRo71mQV8lKYfY,3052
|
|
254
254
|
agno/models/requesty/__init__.py,sha256=pcvbjspqNFhjxpbBcNki1tR6GoWsqU3idQuoPe1TiAg,82
|
|
255
|
-
agno/models/requesty/requesty.py,sha256=
|
|
255
|
+
agno/models/requesty/requesty.py,sha256=jzfYiqN74sr3soCUfjMmUnj6cg8Hoj69da9ZPXpnQqY,2614
|
|
256
256
|
agno/models/sambanova/__init__.py,sha256=3RiEVJYiYxakKsQAmv4ATEuwT4iAezvHnr5SL1gEWHo,86
|
|
257
|
-
agno/models/sambanova/sambanova.py,sha256=
|
|
257
|
+
agno/models/sambanova/sambanova.py,sha256=ixjE4sLLE23kBOFuf7GeXx7TXRGmb-7vkcPUnmWKhHk,1725
|
|
258
258
|
agno/models/siliconflow/__init__.py,sha256=TANxpIdp9mz6LV8YS9FRmy-2GfpgObrPKt19tlmBVj8,94
|
|
259
|
-
agno/models/siliconflow/siliconflow.py,sha256=
|
|
259
|
+
agno/models/siliconflow/siliconflow.py,sha256=hxps3F_LXHFmKoU8usEek4Lj4XxZoA0V_X6Yi5Jo4mE,1636
|
|
260
260
|
agno/models/together/__init__.py,sha256=y6-pgHLEInpJtffjLGHkUWTDpoQNnMlKHa4fstyH6pk,82
|
|
261
|
-
agno/models/together/together.py,sha256=
|
|
261
|
+
agno/models/together/together.py,sha256=4mpTULfvPPH559YAzr9-6P22SW2s_ISxHWdTiCiC7aQ,1677
|
|
262
262
|
agno/models/vercel/__init__.py,sha256=BYQD23dB-dmIXm8iy4S6yxXRW8xg24E9TLOgwckH674,55
|
|
263
|
-
agno/models/vercel/v0.py,sha256=
|
|
263
|
+
agno/models/vercel/v0.py,sha256=WtONm9eumGG7x-gkrUf7BB0Xt10fRVCsesREWotY4VQ,1529
|
|
264
264
|
agno/models/vertexai/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
265
265
|
agno/models/vertexai/claude.py,sha256=2mttuqwzHKEE5eIP8wvUpo2bGAOFYhG61_VODk6ZJC8,7530
|
|
266
266
|
agno/models/vllm/__init__.py,sha256=G8cpCZtu4zJvMGud5eSYMMxGBpNTe9jM4W-p0XUeCGE,59
|
|
267
267
|
agno/models/vllm/vllm.py,sha256=UtiiSvUR4pG_1CzuhY5MWduRgzM2hGVTakKJ6ZBdQmo,2730
|
|
268
268
|
agno/models/xai/__init__.py,sha256=ukcCxnCHxTtkJNA2bAMTX4MhCv1wJcbiq8ZIfYczIxs,55
|
|
269
|
-
agno/models/xai/xai.py,sha256=
|
|
269
|
+
agno/models/xai/xai.py,sha256=hpIH7i0E1z7oN1QrgLzPlc-MDUxPNxxDGrWi7XxzUhU,4877
|
|
270
270
|
agno/os/__init__.py,sha256=h8oQu7vhD5RZf09jkyM_Kt1Kdq_d5kFB9gJju8QPwcY,55
|
|
271
271
|
agno/os/app.py,sha256=sAwBkRFAuJqix_KtHwrYEaViD7wlQTZ80fbzzh9WUro,34228
|
|
272
272
|
agno/os/auth.py,sha256=FyBtAKWtg-qSunCas5m5pK1dVEmikOSZvcCp5r25tTA,1844
|
|
273
273
|
agno/os/config.py,sha256=QPGxENF2yezEOp0yV9OXU-FBs4_vYSXkxbbSol51wPE,2932
|
|
274
274
|
agno/os/mcp.py,sha256=7lAiELFmwcF-eN_pOIJVjun9r5dFcQfPTHD_rP1Zu-s,10318
|
|
275
275
|
agno/os/router.py,sha256=4N0OZbA6q-CrE05GAmJqVjGum_ubJvvA1yNzkjVAXhU,76288
|
|
276
|
-
agno/os/schema.py,sha256=
|
|
276
|
+
agno/os/schema.py,sha256=eaO3pH-aCN9Qgw70a2aZF7w6iE5KVXKoJd-ACyla4ck,54267
|
|
277
277
|
agno/os/settings.py,sha256=Cn5_8lZI8Vx1UaUYqs9h6Qp4IMDFn4f3c35uppiaMy4,1343
|
|
278
278
|
agno/os/utils.py,sha256=Pi95WSSI9ohKD8F68G3pqSY4ngGIwxi1a3wyXb0VYKE,28169
|
|
279
279
|
agno/os/interfaces/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
|
@@ -339,7 +339,7 @@ agno/session/summary.py,sha256=9JnDyQyggckd3zx6L8Q5f-lglZvrFQxvPjGU8gLCgR4,10292
|
|
|
339
339
|
agno/session/team.py,sha256=-MkB6qQCrnXLKko8L5s9fJOWPsjeK5Gx0SXEPoOwSFQ,13437
|
|
340
340
|
agno/session/workflow.py,sha256=nPHnh1N0SJby5JRjysCUI-kTDCelQMFfqosEnnLzPIg,19690
|
|
341
341
|
agno/team/__init__.py,sha256=toHidBOo5M3n_TIVtIKHgcDbLL9HR-_U-YQYuIt_XtE,847
|
|
342
|
-
agno/team/team.py,sha256=
|
|
342
|
+
agno/team/team.py,sha256=MN1stFaWV3r2VWvS_w_jcWCFYyg4QKtTVMZ9-I7z0Vo,422795
|
|
343
343
|
agno/tools/__init__.py,sha256=jNll2sELhPPbqm5nPeT4_uyzRO2_KRTW-8Or60kioS0,210
|
|
344
344
|
agno/tools/agentql.py,sha256=S82Z9aTNr-E5wnA4fbFs76COljJtiQIjf2grjz3CkHU,4104
|
|
345
345
|
agno/tools/airflow.py,sha256=uf2rOzZpSU64l_qRJ5Raku-R3Gky-uewmYkh6W0-oxg,2610
|
|
@@ -578,8 +578,8 @@ agno/workflow/step.py,sha256=_sge_L8WBWSYJRNtgzrfCWIPjrWyani1rCRTkQZu3EM,73296
|
|
|
578
578
|
agno/workflow/steps.py,sha256=NXAOgQ8bssgl-6K1Fxd9zLm1m3ranPnMFJp-SM-GmA8,26706
|
|
579
579
|
agno/workflow/types.py,sha256=LObJ0VkUtepZ-uewv3j283S4hrCXy0eCplQzIzRG1ic,19175
|
|
580
580
|
agno/workflow/workflow.py,sha256=XjwOxrJrWUvsWJH7qYJs-0BX7z6xyzX9mbw8dMNevUg,189538
|
|
581
|
-
agno-2.3.
|
|
582
|
-
agno-2.3.
|
|
583
|
-
agno-2.3.
|
|
584
|
-
agno-2.3.
|
|
585
|
-
agno-2.3.
|
|
581
|
+
agno-2.3.4.dist-info/licenses/LICENSE,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
|
|
582
|
+
agno-2.3.4.dist-info/METADATA,sha256=OfSTDT5WC3iRg9bF1Kg6A4R8SPVW607mNvpqI-ZI1yc,30904
|
|
583
|
+
agno-2.3.4.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
584
|
+
agno-2.3.4.dist-info/top_level.txt,sha256=MKyeuVesTyOKIXUhc-d_tPa2Hrh0oTA4LM0izowpx70,5
|
|
585
|
+
agno-2.3.4.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|