agno 2.3.3__py3-none-any.whl → 2.3.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (108) hide show
  1. agno/agent/agent.py +177 -41
  2. agno/culture/manager.py +2 -2
  3. agno/db/base.py +330 -8
  4. agno/db/dynamo/dynamo.py +722 -2
  5. agno/db/dynamo/schemas.py +127 -0
  6. agno/db/firestore/firestore.py +573 -1
  7. agno/db/firestore/schemas.py +40 -0
  8. agno/db/gcs_json/gcs_json_db.py +446 -1
  9. agno/db/in_memory/in_memory_db.py +143 -1
  10. agno/db/json/json_db.py +438 -1
  11. agno/db/mongo/async_mongo.py +522 -0
  12. agno/db/mongo/mongo.py +523 -1
  13. agno/db/mongo/schemas.py +29 -0
  14. agno/db/mysql/mysql.py +536 -3
  15. agno/db/mysql/schemas.py +38 -0
  16. agno/db/postgres/async_postgres.py +546 -14
  17. agno/db/postgres/postgres.py +535 -2
  18. agno/db/postgres/schemas.py +38 -0
  19. agno/db/redis/redis.py +468 -1
  20. agno/db/redis/schemas.py +32 -0
  21. agno/db/singlestore/schemas.py +38 -0
  22. agno/db/singlestore/singlestore.py +523 -1
  23. agno/db/sqlite/async_sqlite.py +548 -9
  24. agno/db/sqlite/schemas.py +38 -0
  25. agno/db/sqlite/sqlite.py +537 -5
  26. agno/db/sqlite/utils.py +6 -8
  27. agno/db/surrealdb/models.py +25 -0
  28. agno/db/surrealdb/surrealdb.py +548 -1
  29. agno/eval/accuracy.py +10 -4
  30. agno/eval/performance.py +10 -4
  31. agno/eval/reliability.py +22 -13
  32. agno/exceptions.py +11 -0
  33. agno/hooks/__init__.py +3 -0
  34. agno/hooks/decorator.py +164 -0
  35. agno/knowledge/chunking/semantic.py +2 -2
  36. agno/models/aimlapi/aimlapi.py +17 -0
  37. agno/models/anthropic/claude.py +19 -12
  38. agno/models/aws/bedrock.py +3 -4
  39. agno/models/aws/claude.py +5 -1
  40. agno/models/azure/ai_foundry.py +2 -2
  41. agno/models/azure/openai_chat.py +8 -0
  42. agno/models/cerebras/cerebras.py +61 -4
  43. agno/models/cerebras/cerebras_openai.py +17 -0
  44. agno/models/cohere/chat.py +5 -1
  45. agno/models/cometapi/cometapi.py +18 -1
  46. agno/models/dashscope/dashscope.py +2 -3
  47. agno/models/deepinfra/deepinfra.py +18 -1
  48. agno/models/deepseek/deepseek.py +2 -3
  49. agno/models/fireworks/fireworks.py +18 -1
  50. agno/models/google/gemini.py +8 -2
  51. agno/models/groq/groq.py +5 -2
  52. agno/models/internlm/internlm.py +18 -1
  53. agno/models/langdb/langdb.py +13 -1
  54. agno/models/litellm/chat.py +2 -2
  55. agno/models/litellm/litellm_openai.py +18 -1
  56. agno/models/meta/llama_openai.py +19 -2
  57. agno/models/nebius/nebius.py +2 -3
  58. agno/models/nvidia/nvidia.py +20 -3
  59. agno/models/openai/chat.py +17 -2
  60. agno/models/openai/responses.py +17 -2
  61. agno/models/openrouter/openrouter.py +21 -2
  62. agno/models/perplexity/perplexity.py +17 -1
  63. agno/models/portkey/portkey.py +7 -6
  64. agno/models/requesty/requesty.py +19 -2
  65. agno/models/response.py +2 -1
  66. agno/models/sambanova/sambanova.py +20 -3
  67. agno/models/siliconflow/siliconflow.py +19 -2
  68. agno/models/together/together.py +20 -3
  69. agno/models/vercel/v0.py +20 -3
  70. agno/models/vllm/vllm.py +19 -14
  71. agno/models/xai/xai.py +19 -2
  72. agno/os/app.py +104 -0
  73. agno/os/config.py +13 -0
  74. agno/os/interfaces/whatsapp/router.py +0 -1
  75. agno/os/mcp.py +1 -0
  76. agno/os/router.py +31 -0
  77. agno/os/routers/traces/__init__.py +3 -0
  78. agno/os/routers/traces/schemas.py +414 -0
  79. agno/os/routers/traces/traces.py +499 -0
  80. agno/os/schema.py +22 -1
  81. agno/os/utils.py +57 -0
  82. agno/run/agent.py +1 -0
  83. agno/run/base.py +17 -0
  84. agno/run/team.py +4 -0
  85. agno/session/team.py +1 -0
  86. agno/table.py +10 -0
  87. agno/team/team.py +215 -65
  88. agno/tools/function.py +10 -8
  89. agno/tools/nano_banana.py +1 -1
  90. agno/tracing/__init__.py +12 -0
  91. agno/tracing/exporter.py +157 -0
  92. agno/tracing/schemas.py +276 -0
  93. agno/tracing/setup.py +111 -0
  94. agno/utils/agent.py +4 -4
  95. agno/utils/hooks.py +56 -1
  96. agno/vectordb/qdrant/qdrant.py +22 -22
  97. agno/workflow/condition.py +8 -0
  98. agno/workflow/loop.py +8 -0
  99. agno/workflow/parallel.py +8 -0
  100. agno/workflow/router.py +8 -0
  101. agno/workflow/step.py +20 -0
  102. agno/workflow/steps.py +8 -0
  103. agno/workflow/workflow.py +83 -17
  104. {agno-2.3.3.dist-info → agno-2.3.5.dist-info}/METADATA +2 -2
  105. {agno-2.3.3.dist-info → agno-2.3.5.dist-info}/RECORD +108 -98
  106. {agno-2.3.3.dist-info → agno-2.3.5.dist-info}/WHEEL +0 -0
  107. {agno-2.3.3.dist-info → agno-2.3.5.dist-info}/licenses/LICENSE +0 -0
  108. {agno-2.3.3.dist-info → agno-2.3.5.dist-info}/top_level.txt +0 -0
@@ -470,18 +470,19 @@ class Cerebras(Model):
470
470
  if choice_delta.content:
471
471
  model_response.content = choice_delta.content
472
472
 
473
- # Add tool calls
473
+ # Add tool calls - preserve index for proper aggregation in parse_tool_calls
474
474
  if choice_delta.tool_calls:
475
475
  model_response.tool_calls = [
476
476
  {
477
+ "index": tool_call.index if hasattr(tool_call, "index") else idx,
477
478
  "id": tool_call.id,
478
479
  "type": tool_call.type,
479
480
  "function": {
480
- "name": tool_call.function.name,
481
- "arguments": tool_call.function.arguments,
481
+ "name": tool_call.function.name if tool_call.function else None,
482
+ "arguments": tool_call.function.arguments if tool_call.function else None,
482
483
  },
483
484
  }
484
- for tool_call in choice_delta.tool_calls
485
+ for idx, tool_call in enumerate(choice_delta.tool_calls)
485
486
  ]
486
487
 
487
488
  # Add usage metrics
@@ -490,6 +491,62 @@ class Cerebras(Model):
490
491
 
491
492
  return model_response
492
493
 
494
+ def parse_tool_calls(self, tool_calls_data: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
495
+ """
496
+ Build complete tool calls from streamed tool call delta data.
497
+
498
+ Cerebras streams tool calls incrementally with partial data in each chunk.
499
+ This method aggregates those chunks by index to produce complete tool calls.
500
+
501
+ Args:
502
+ tool_calls_data: List of tool call deltas from streaming chunks.
503
+
504
+ Returns:
505
+ List[Dict[str, Any]]: List of fully-formed tool call dicts.
506
+ """
507
+ tool_calls: List[Dict[str, Any]] = []
508
+
509
+ for tool_call_delta in tool_calls_data:
510
+ # Get the index for this tool call (default to 0 if not present)
511
+ index = tool_call_delta.get("index", 0)
512
+
513
+ # Extend the list if needed
514
+ while len(tool_calls) <= index:
515
+ tool_calls.append({
516
+ "id": None,
517
+ "type": None,
518
+ "function": {
519
+ "name": "",
520
+ "arguments": "",
521
+ },
522
+ })
523
+
524
+ tool_call_entry = tool_calls[index]
525
+
526
+ # Update id if present
527
+ if tool_call_delta.get("id"):
528
+ tool_call_entry["id"] = tool_call_delta["id"]
529
+
530
+ # Update type if present
531
+ if tool_call_delta.get("type"):
532
+ tool_call_entry["type"] = tool_call_delta["type"]
533
+
534
+ # Update function name and arguments (concatenate for streaming)
535
+ if tool_call_delta.get("function"):
536
+ func_delta = tool_call_delta["function"]
537
+ if func_delta.get("name"):
538
+ tool_call_entry["function"]["name"] += func_delta["name"]
539
+ if func_delta.get("arguments"):
540
+ tool_call_entry["function"]["arguments"] += func_delta["arguments"]
541
+
542
+ # Filter out any incomplete tool calls (missing id or function name)
543
+ complete_tool_calls = [
544
+ tc for tc in tool_calls
545
+ if tc.get("id") and tc.get("function", {}).get("name")
546
+ ]
547
+
548
+ return complete_tool_calls
549
+
493
550
  def _get_metrics(self, response_usage: Union[ChatCompletionResponseUsage, ChatChunkResponseUsage]) -> Metrics:
494
551
  """
495
552
  Parse the given Cerebras usage into an Agno Metrics object.
@@ -5,6 +5,7 @@ from typing import Any, Dict, List, Optional, Type, Union
5
5
 
6
6
  from pydantic import BaseModel
7
7
 
8
+ from agno.exceptions import ModelAuthenticationError
8
9
  from agno.models.message import Message
9
10
  from agno.models.openai.like import OpenAILike
10
11
  from agno.utils.log import log_debug
@@ -20,6 +21,22 @@ class CerebrasOpenAI(OpenAILike):
20
21
  base_url: str = "https://api.cerebras.ai/v1"
21
22
  api_key: Optional[str] = field(default_factory=lambda: getenv("CEREBRAS_API_KEY", None))
22
23
 
24
+ def _get_client_params(self) -> Dict[str, Any]:
25
+ """
26
+ Returns client parameters for API requests, checking for CEREBRAS_API_KEY.
27
+
28
+ Returns:
29
+ Dict[str, Any]: A dictionary of client parameters for API requests.
30
+ """
31
+ if not self.api_key:
32
+ self.api_key = getenv("CEREBRAS_API_KEY")
33
+ if not self.api_key:
34
+ raise ModelAuthenticationError(
35
+ message="CEREBRAS_API_KEY not set. Please set the CEREBRAS_API_KEY environment variable.",
36
+ model_name=self.name,
37
+ )
38
+ return super()._get_client_params()
39
+
23
40
  def get_request_params(
24
41
  self,
25
42
  response_format: Optional[Union[Dict, Type[BaseModel]]] = None,
@@ -92,7 +92,11 @@ class Cohere(Model):
92
92
  self.api_key = self.api_key or getenv("CO_API_KEY")
93
93
 
94
94
  if not self.api_key:
95
- log_error("CO_API_KEY not set. Please set the CO_API_KEY environment variable.")
95
+ raise ModelProviderError(
96
+ message="CO_API_KEY not set. Please set the CO_API_KEY environment variable.",
97
+ model_name=self.name,
98
+ model_id=self.id,
99
+ )
96
100
 
97
101
  _client_params["api_key"] = self.api_key
98
102
 
@@ -1,9 +1,10 @@
1
1
  from dataclasses import dataclass, field
2
2
  from os import getenv
3
- from typing import List, Optional
3
+ from typing import Any, Dict, List, Optional
4
4
 
5
5
  import httpx
6
6
 
7
+ from agno.exceptions import ModelAuthenticationError
7
8
  from agno.models.openai.like import OpenAILike
8
9
  from agno.utils.log import log_debug
9
10
 
@@ -26,6 +27,22 @@ class CometAPI(OpenAILike):
26
27
  api_key: Optional[str] = field(default_factory=lambda: getenv("COMETAPI_KEY"))
27
28
  base_url: str = "https://api.cometapi.com/v1"
28
29
 
30
+ def _get_client_params(self) -> Dict[str, Any]:
31
+ """
32
+ Returns client parameters for API requests, checking for COMETAPI_KEY.
33
+
34
+ Returns:
35
+ Dict[str, Any]: A dictionary of client parameters for API requests.
36
+ """
37
+ if not self.api_key:
38
+ self.api_key = getenv("COMETAPI_KEY")
39
+ if not self.api_key:
40
+ raise ModelAuthenticationError(
41
+ message="COMETAPI_KEY not set. Please set the COMETAPI_KEY environment variable.",
42
+ model_name=self.name,
43
+ )
44
+ return super()._get_client_params()
45
+
29
46
  def get_available_models(self) -> List[str]:
30
47
  """
31
48
  Fetch available chat models from CometAPI, filtering out non-chat models.
@@ -4,7 +4,7 @@ from typing import Any, Dict, List, Optional, Type, Union
4
4
 
5
5
  from pydantic import BaseModel
6
6
 
7
- from agno.exceptions import ModelProviderError
7
+ from agno.exceptions import ModelAuthenticationError
8
8
  from agno.models.openai.like import OpenAILike
9
9
 
10
10
 
@@ -43,10 +43,9 @@ class DashScope(OpenAILike):
43
43
  if not self.api_key:
44
44
  self.api_key = getenv("DASHSCOPE_API_KEY")
45
45
  if not self.api_key:
46
- raise ModelProviderError(
46
+ raise ModelAuthenticationError(
47
47
  message="DASHSCOPE_API_KEY not set. Please set the DASHSCOPE_API_KEY environment variable.",
48
48
  model_name=self.name,
49
- model_id=self.id,
50
49
  )
51
50
 
52
51
  # Define base client params
@@ -1,7 +1,8 @@
1
1
  from dataclasses import dataclass, field
2
2
  from os import getenv
3
- from typing import Optional
3
+ from typing import Any, Dict, Optional
4
4
 
5
+ from agno.exceptions import ModelAuthenticationError
5
6
  from agno.models.openai.like import OpenAILike
6
7
 
7
8
 
@@ -26,3 +27,19 @@ class DeepInfra(OpenAILike):
26
27
  base_url: str = "https://api.deepinfra.com/v1/openai"
27
28
 
28
29
  supports_native_structured_outputs: bool = False
30
+
31
+ def _get_client_params(self) -> Dict[str, Any]:
32
+ """
33
+ Returns client parameters for API requests, checking for DEEPINFRA_API_KEY.
34
+
35
+ Returns:
36
+ Dict[str, Any]: A dictionary of client parameters for API requests.
37
+ """
38
+ if not self.api_key:
39
+ self.api_key = getenv("DEEPINFRA_API_KEY")
40
+ if not self.api_key:
41
+ raise ModelAuthenticationError(
42
+ message="DEEPINFRA_API_KEY not set. Please set the DEEPINFRA_API_KEY environment variable.",
43
+ model_name=self.name,
44
+ )
45
+ return super()._get_client_params()
@@ -2,7 +2,7 @@ from dataclasses import dataclass, field
2
2
  from os import getenv
3
3
  from typing import Any, Dict, Optional
4
4
 
5
- from agno.exceptions import ModelProviderError
5
+ from agno.exceptions import ModelAuthenticationError
6
6
  from agno.models.openai.like import OpenAILike
7
7
 
8
8
 
@@ -35,10 +35,9 @@ class DeepSeek(OpenAILike):
35
35
  self.api_key = getenv("DEEPSEEK_API_KEY")
36
36
  if not self.api_key:
37
37
  # Raise error immediately if key is missing
38
- raise ModelProviderError(
38
+ raise ModelAuthenticationError(
39
39
  message="DEEPSEEK_API_KEY not set. Please set the DEEPSEEK_API_KEY environment variable.",
40
40
  model_name=self.name,
41
- model_id=self.id,
42
41
  )
43
42
 
44
43
  # Define base client params
@@ -1,7 +1,8 @@
1
1
  from dataclasses import dataclass, field
2
2
  from os import getenv
3
- from typing import Optional
3
+ from typing import Any, Dict, Optional
4
4
 
5
+ from agno.exceptions import ModelAuthenticationError
5
6
  from agno.models.openai import OpenAILike
6
7
 
7
8
 
@@ -24,3 +25,19 @@ class Fireworks(OpenAILike):
24
25
 
25
26
  api_key: Optional[str] = field(default_factory=lambda: getenv("FIREWORKS_API_KEY"))
26
27
  base_url: str = "https://api.fireworks.ai/inference/v1"
28
+
29
+ def _get_client_params(self) -> Dict[str, Any]:
30
+ """
31
+ Returns client parameters for API requests, checking for FIREWORKS_API_KEY.
32
+
33
+ Returns:
34
+ Dict[str, Any]: A dictionary of client parameters for API requests.
35
+ """
36
+ if not self.api_key:
37
+ self.api_key = getenv("FIREWORKS_API_KEY")
38
+ if not self.api_key:
39
+ raise ModelAuthenticationError(
40
+ message="FIREWORKS_API_KEY not set. Please set the FIREWORKS_API_KEY environment variable.",
41
+ model_name=self.name,
42
+ )
43
+ return super()._get_client_params()
@@ -146,8 +146,14 @@ class Gemini(Model):
146
146
  else:
147
147
  log_info("Using Vertex AI API")
148
148
  client_params["vertexai"] = True
149
- client_params["project"] = self.project_id or getenv("GOOGLE_CLOUD_PROJECT")
150
- client_params["location"] = self.location or getenv("GOOGLE_CLOUD_LOCATION")
149
+ project_id = self.project_id or getenv("GOOGLE_CLOUD_PROJECT")
150
+ if not project_id:
151
+ log_error("GOOGLE_CLOUD_PROJECT not set. Please set the GOOGLE_CLOUD_PROJECT environment variable.")
152
+ location = self.location or getenv("GOOGLE_CLOUD_LOCATION")
153
+ if not location:
154
+ log_error("GOOGLE_CLOUD_LOCATION not set. Please set the GOOGLE_CLOUD_LOCATION environment variable.")
155
+ client_params["project"] = project_id
156
+ client_params["location"] = location
151
157
 
152
158
  client_params = {k: v for k, v in client_params.items() if v is not None}
153
159
 
agno/models/groq/groq.py CHANGED
@@ -6,7 +6,7 @@ from typing import Any, Dict, Iterator, List, Optional, Type, Union
6
6
  import httpx
7
7
  from pydantic import BaseModel
8
8
 
9
- from agno.exceptions import ModelProviderError
9
+ from agno.exceptions import ModelAuthenticationError, ModelProviderError
10
10
  from agno.models.base import Model
11
11
  from agno.models.message import Message
12
12
  from agno.models.metrics import Metrics
@@ -74,7 +74,10 @@ class Groq(Model):
74
74
  if not self.api_key:
75
75
  self.api_key = getenv("GROQ_API_KEY")
76
76
  if not self.api_key:
77
- log_error("GROQ_API_KEY not set. Please set the GROQ_API_KEY environment variable.")
77
+ raise ModelAuthenticationError(
78
+ message="GROQ_API_KEY not set. Please set the GROQ_API_KEY environment variable.",
79
+ model_name=self.name,
80
+ )
78
81
 
79
82
  # Define base client params
80
83
  base_params = {
@@ -1,7 +1,8 @@
1
1
  from dataclasses import dataclass, field
2
2
  from os import getenv
3
- from typing import Optional
3
+ from typing import Any, Dict, Optional
4
4
 
5
+ from agno.exceptions import ModelAuthenticationError
5
6
  from agno.models.openai.like import OpenAILike
6
7
 
7
8
 
@@ -24,3 +25,19 @@ class InternLM(OpenAILike):
24
25
 
25
26
  api_key: Optional[str] = field(default_factory=lambda: getenv("INTERNLM_API_KEY"))
26
27
  base_url: Optional[str] = "https://internlm-chat.intern-ai.org.cn/puyu/api/v1/chat/completions"
28
+
29
+ def _get_client_params(self) -> Dict[str, Any]:
30
+ """
31
+ Returns client parameters for API requests, checking for INTERNLM_API_KEY.
32
+
33
+ Returns:
34
+ Dict[str, Any]: A dictionary of client parameters for API requests.
35
+ """
36
+ if not self.api_key:
37
+ self.api_key = getenv("INTERNLM_API_KEY")
38
+ if not self.api_key:
39
+ raise ModelAuthenticationError(
40
+ message="INTERNLM_API_KEY not set. Please set the INTERNLM_API_KEY environment variable.",
41
+ model_name=self.name,
42
+ )
43
+ return super()._get_client_params()
@@ -2,6 +2,7 @@ from dataclasses import dataclass, field
2
2
  from os import getenv
3
3
  from typing import Any, Dict, Optional
4
4
 
5
+ from agno.exceptions import ModelAuthenticationError
5
6
  from agno.models.openai.like import OpenAILike
6
7
 
7
8
 
@@ -32,8 +33,19 @@ class LangDB(OpenAILike):
32
33
  default_headers: Optional[dict] = None
33
34
 
34
35
  def _get_client_params(self) -> Dict[str, Any]:
36
+ if not self.api_key:
37
+ self.api_key = getenv("LANGDB_API_KEY")
38
+ if not self.api_key:
39
+ raise ModelAuthenticationError(
40
+ message="LANGDB_API_KEY not set. Please set the LANGDB_API_KEY environment variable.",
41
+ model_name=self.name,
42
+ )
43
+
35
44
  if not self.project_id:
36
- raise ValueError("LANGDB_PROJECT_ID not set in the environment")
45
+ raise ModelAuthenticationError(
46
+ message="LANGDB_PROJECT_ID not set. Please set the LANGDB_PROJECT_ID environment variable.",
47
+ model_name=self.name,
48
+ )
37
49
 
38
50
  if not self.base_url:
39
51
  self.base_url = f"{self.base_host_url}/{self.project_id}/v1"
@@ -57,8 +57,8 @@ class LiteLLM(Model):
57
57
  # Check for other present valid keys, e.g. OPENAI_API_KEY if self.id is an OpenAI model
58
58
  env_validation = validate_environment(model=self.id, api_base=self.api_base)
59
59
  if not env_validation.get("keys_in_environment"):
60
- log_warning(
61
- "Missing required key. Please set the LITELLM_API_KEY or other valid environment variables."
60
+ log_error(
61
+ "LITELLM_API_KEY not set. Please set the LITELLM_API_KEY or other valid environment variables."
62
62
  )
63
63
 
64
64
  def get_client(self) -> Any:
@@ -1,7 +1,8 @@
1
1
  from dataclasses import dataclass, field
2
2
  from os import getenv
3
- from typing import Optional
3
+ from typing import Any, Dict, Optional
4
4
 
5
+ from agno.exceptions import ModelAuthenticationError
5
6
  from agno.models.openai.like import OpenAILike
6
7
 
7
8
 
@@ -23,3 +24,19 @@ class LiteLLMOpenAI(OpenAILike):
23
24
 
24
25
  api_key: Optional[str] = field(default_factory=lambda: getenv("LITELLM_API_KEY"))
25
26
  base_url: str = "http://0.0.0.0:4000"
27
+
28
+ def _get_client_params(self) -> Dict[str, Any]:
29
+ """
30
+ Returns client parameters for API requests, checking for LITELLM_API_KEY.
31
+
32
+ Returns:
33
+ Dict[str, Any]: A dictionary of client parameters for API requests.
34
+ """
35
+ if not self.api_key:
36
+ self.api_key = getenv("LITELLM_API_KEY")
37
+ if not self.api_key:
38
+ raise ModelAuthenticationError(
39
+ message="LITELLM_API_KEY not set. Please set the LITELLM_API_KEY environment variable.",
40
+ model_name=self.name,
41
+ )
42
+ return super()._get_client_params()
@@ -1,4 +1,4 @@
1
- from dataclasses import dataclass, field
1
+ from dataclasses import dataclass
2
2
  from os import getenv
3
3
  from typing import Any, Dict, Optional
4
4
 
@@ -7,6 +7,7 @@ try:
7
7
  except ImportError:
8
8
  raise ImportError("`openai` not installed. Please install using `pip install openai`")
9
9
 
10
+ from agno.exceptions import ModelAuthenticationError
10
11
  from agno.models.meta.llama import Message
11
12
  from agno.models.openai.like import OpenAILike
12
13
  from agno.utils.models.llama import format_message
@@ -29,7 +30,7 @@ class LlamaOpenAI(OpenAILike):
29
30
  name: str = "LlamaOpenAI"
30
31
  provider: str = "LlamaOpenAI"
31
32
 
32
- api_key: Optional[str] = field(default_factory=lambda: getenv("LLAMA_API_KEY"))
33
+ api_key: Optional[str] = None
33
34
  base_url: Optional[str] = "https://api.llama.com/compat/v1/"
34
35
 
35
36
  # Request parameters
@@ -49,6 +50,22 @@ class LlamaOpenAI(OpenAILike):
49
50
  # Cached async client
50
51
  openai_async_client: Optional[AsyncOpenAIClient] = None
51
52
 
53
+ def _get_client_params(self) -> Dict[str, Any]:
54
+ """
55
+ Returns client parameters for API requests, checking for LLAMA_API_KEY.
56
+
57
+ Returns:
58
+ Dict[str, Any]: A dictionary of client parameters for API requests.
59
+ """
60
+ if not self.api_key:
61
+ self.api_key = getenv("LLAMA_API_KEY")
62
+ if not self.api_key:
63
+ raise ModelAuthenticationError(
64
+ message="LLAMA_API_KEY not set. Please set the LLAMA_API_KEY environment variable.",
65
+ model_name=self.name,
66
+ )
67
+ return super()._get_client_params()
68
+
52
69
  def _format_message(self, message: Message) -> Dict[str, Any]:
53
70
  """
54
71
  Format a message into the format expected by Llama API.
@@ -2,7 +2,7 @@ from dataclasses import dataclass, field
2
2
  from os import getenv
3
3
  from typing import Any, Dict, Optional
4
4
 
5
- from agno.exceptions import ModelProviderError
5
+ from agno.exceptions import ModelAuthenticationError
6
6
  from agno.models.openai.like import OpenAILike
7
7
 
8
8
 
@@ -28,10 +28,9 @@ class Nebius(OpenAILike):
28
28
 
29
29
  def _get_client_params(self) -> Dict[str, Any]:
30
30
  if not self.api_key:
31
- raise ModelProviderError(
31
+ raise ModelAuthenticationError(
32
32
  message="NEBIUS_API_KEY not set. Please set the NEBIUS_API_KEY environment variable.",
33
33
  model_name=self.name,
34
- model_id=self.id,
35
34
  )
36
35
 
37
36
  # Define base client params
@@ -1,7 +1,8 @@
1
- from dataclasses import dataclass, field
1
+ from dataclasses import dataclass
2
2
  from os import getenv
3
- from typing import Optional
3
+ from typing import Any, Dict, Optional
4
4
 
5
+ from agno.exceptions import ModelAuthenticationError
5
6
  from agno.models.openai.like import OpenAILike
6
7
 
7
8
 
@@ -22,7 +23,23 @@ class Nvidia(OpenAILike):
22
23
  name: str = "Nvidia"
23
24
  provider: str = "Nvidia"
24
25
 
25
- api_key: Optional[str] = field(default_factory=lambda: getenv("NVIDIA_API_KEY"))
26
+ api_key: Optional[str] = None
26
27
  base_url: str = "https://integrate.api.nvidia.com/v1"
27
28
 
28
29
  supports_native_structured_outputs: bool = False
30
+
31
+ def _get_client_params(self) -> Dict[str, Any]:
32
+ """
33
+ Returns client parameters for API requests, checking for NVIDIA_API_KEY.
34
+
35
+ Returns:
36
+ Dict[str, Any]: A dictionary of client parameters for API requests.
37
+ """
38
+ if not self.api_key:
39
+ self.api_key = getenv("NVIDIA_API_KEY")
40
+ if not self.api_key:
41
+ raise ModelAuthenticationError(
42
+ message="NVIDIA_API_KEY not set. Please set the NVIDIA_API_KEY environment variable.",
43
+ model_name=self.name,
44
+ )
45
+ return super()._get_client_params()
@@ -7,7 +7,7 @@ from uuid import uuid4
7
7
  import httpx
8
8
  from pydantic import BaseModel
9
9
 
10
- from agno.exceptions import ModelProviderError
10
+ from agno.exceptions import ModelAuthenticationError, ModelProviderError
11
11
  from agno.media import Audio
12
12
  from agno.models.base import Model
13
13
  from agno.models.message import Message
@@ -102,7 +102,10 @@ class OpenAIChat(Model):
102
102
  if not self.api_key:
103
103
  self.api_key = getenv("OPENAI_API_KEY")
104
104
  if not self.api_key:
105
- log_error("OPENAI_API_KEY not set. Please set the OPENAI_API_KEY environment variable.")
105
+ raise ModelAuthenticationError(
106
+ message="OPENAI_API_KEY not set. Please set the OPENAI_API_KEY environment variable.",
107
+ model_name=self.name,
108
+ )
106
109
 
107
110
  # Define base client params
108
111
  base_params = {
@@ -447,6 +450,9 @@ class OpenAIChat(Model):
447
450
  model_name=self.name,
448
451
  model_id=self.id,
449
452
  ) from e
453
+ except ModelAuthenticationError as e:
454
+ log_error(f"Model authentication error from OpenAI API: {e}")
455
+ raise e
450
456
  except Exception as e:
451
457
  log_error(f"Error from OpenAI API: {e}")
452
458
  raise ModelProviderError(message=str(e), model_name=self.name, model_id=self.id) from e
@@ -528,6 +534,9 @@ class OpenAIChat(Model):
528
534
  model_name=self.name,
529
535
  model_id=self.id,
530
536
  ) from e
537
+ except ModelAuthenticationError as e:
538
+ log_error(f"Model authentication error from OpenAI API: {e}")
539
+ raise e
531
540
  except Exception as e:
532
541
  log_error(f"Error from OpenAI API: {e}")
533
542
  raise ModelProviderError(message=str(e), model_name=self.name, model_id=self.id) from e
@@ -606,6 +615,9 @@ class OpenAIChat(Model):
606
615
  model_name=self.name,
607
616
  model_id=self.id,
608
617
  ) from e
618
+ except ModelAuthenticationError as e:
619
+ log_error(f"Model authentication error from OpenAI API: {e}")
620
+ raise e
609
621
  except Exception as e:
610
622
  log_error(f"Error from OpenAI API: {e}")
611
623
  raise ModelProviderError(message=str(e), model_name=self.name, model_id=self.id) from e
@@ -686,6 +698,9 @@ class OpenAIChat(Model):
686
698
  model_name=self.name,
687
699
  model_id=self.id,
688
700
  ) from e
701
+ except ModelAuthenticationError as e:
702
+ log_error(f"Model authentication error from OpenAI API: {e}")
703
+ raise e
689
704
  except Exception as e:
690
705
  log_error(f"Error from OpenAI API: {e}")
691
706
  raise ModelProviderError(message=str(e), model_name=self.name, model_id=self.id) from e
@@ -6,7 +6,7 @@ import httpx
6
6
  from pydantic import BaseModel
7
7
  from typing_extensions import Literal
8
8
 
9
- from agno.exceptions import ModelProviderError
9
+ from agno.exceptions import ModelAuthenticationError, ModelProviderError
10
10
  from agno.media import File
11
11
  from agno.models.base import Model
12
12
  from agno.models.message import Citations, Message, UrlCitation
@@ -117,7 +117,10 @@ class OpenAIResponses(Model):
117
117
  if not self.api_key:
118
118
  self.api_key = getenv("OPENAI_API_KEY")
119
119
  if not self.api_key:
120
- log_error("OPENAI_API_KEY not set. Please set the OPENAI_API_KEY environment variable.")
120
+ raise ModelAuthenticationError(
121
+ message="OPENAI_API_KEY not set. Please set the OPENAI_API_KEY environment variable.",
122
+ model_name=self.name,
123
+ )
121
124
 
122
125
  # Define base client params
123
126
  base_params = {
@@ -582,6 +585,9 @@ class OpenAIResponses(Model):
582
585
  model_name=self.name,
583
586
  model_id=self.id,
584
587
  ) from exc
588
+ except ModelAuthenticationError as exc:
589
+ log_error(f"Model authentication error from OpenAI API: {exc}")
590
+ raise exc
585
591
  except Exception as exc:
586
592
  log_error(f"Error from OpenAI API: {exc}")
587
593
  raise ModelProviderError(message=str(exc), model_name=self.name, model_id=self.id) from exc
@@ -652,6 +658,9 @@ class OpenAIResponses(Model):
652
658
  model_name=self.name,
653
659
  model_id=self.id,
654
660
  ) from exc
661
+ except ModelAuthenticationError as exc:
662
+ log_error(f"Model authentication error from OpenAI API: {exc}")
663
+ raise exc
655
664
  except Exception as exc:
656
665
  log_error(f"Error from OpenAI API: {exc}")
657
666
  raise ModelProviderError(message=str(exc), model_name=self.name, model_id=self.id) from exc
@@ -726,6 +735,9 @@ class OpenAIResponses(Model):
726
735
  model_name=self.name,
727
736
  model_id=self.id,
728
737
  ) from exc
738
+ except ModelAuthenticationError as exc:
739
+ log_error(f"Model authentication error from OpenAI API: {exc}")
740
+ raise exc
729
741
  except Exception as exc:
730
742
  log_error(f"Error from OpenAI API: {exc}")
731
743
  raise ModelProviderError(message=str(exc), model_name=self.name, model_id=self.id) from exc
@@ -797,6 +809,9 @@ class OpenAIResponses(Model):
797
809
  model_name=self.name,
798
810
  model_id=self.id,
799
811
  ) from exc
812
+ except ModelAuthenticationError as exc:
813
+ log_error(f"Model authentication error from OpenAI API: {exc}")
814
+ raise exc
800
815
  except Exception as exc:
801
816
  log_error(f"Error from OpenAI API: {exc}")
802
817
  raise ModelProviderError(message=str(exc), model_name=self.name, model_id=self.id) from exc