glaip-sdk 0.7.12__py3-none-any.whl → 0.7.14__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,101 @@
1
+ """Provider configuration for model name standardization.
2
+
3
+ This module centralizes provider configurations, including how provider names
4
+ map to server implementations and their base URLs for local execution.
5
+
6
+ Authors:
7
+ Christian Trisno Sen Long Chen (christian.t.s.l.chen@gdplabs.id)
8
+ """
9
+
10
+ __all__ = ["PROVIDERS", "ProviderConfig", "get_driver", "get_base_url"]
11
+
12
+
13
+ class ProviderConfig:
14
+ """Configuration for a model provider.
15
+
16
+ Defines how a provider is referenced in SDK constants and how it maps to
17
+ the underlying driver implementation and API endpoints.
18
+ """
19
+
20
+ def __init__(
21
+ self,
22
+ name: str,
23
+ driver: str,
24
+ base_url: str | None = None,
25
+ ):
26
+ """Initialize provider configuration.
27
+
28
+ Args:
29
+ name: User-facing provider name used in SDK constants (e.g., "deepinfra").
30
+ driver: Driver implementation name in language_models.yaml (e.g., "openai-compatible").
31
+ base_url: Base URL for the provider's API (required for local execution).
32
+ """
33
+ self.name = name
34
+ self.driver = driver
35
+ self.base_url = base_url
36
+
37
+
38
+ # Centralized provider configurations
39
+ # Key: provider name (used in SDK constants)
40
+ # Value: ProviderConfig with driver mapping and base URL
41
+ PROVIDERS: dict[str, ProviderConfig] = {
42
+ "deepinfra": ProviderConfig(
43
+ name="deepinfra",
44
+ driver="openai-compatible",
45
+ base_url="https://api.deepinfra.com/v1/openai",
46
+ ),
47
+ "deepseek": ProviderConfig(
48
+ name="deepseek",
49
+ driver="openai-compatible",
50
+ base_url="https://api.deepseek.com",
51
+ ),
52
+ "custom": ProviderConfig(
53
+ name="custom",
54
+ driver="openai-compatible",
55
+ base_url=None, # User-provided via Model.base_url
56
+ ),
57
+ }
58
+
59
+
60
+ def get_driver(provider: str) -> str:
61
+ """Get driver name for a given provider.
62
+
63
+ Maps SDK provider names to their underlying driver implementations.
64
+ For providers not in the config, returns the provider name unchanged
65
+ (assumes provider name matches driver name).
66
+
67
+ Args:
68
+ provider: Provider name from SDK constants (e.g., "deepinfra", "openai").
69
+
70
+ Returns:
71
+ Driver name (e.g., "openai-compatible" for deepinfra, "openai" for openai).
72
+
73
+ Examples:
74
+ >>> get_driver("deepinfra")
75
+ "openai-compatible"
76
+ >>> get_driver("openai")
77
+ "openai"
78
+ """
79
+ config = PROVIDERS.get(provider)
80
+ return config.driver if config else provider
81
+
82
+
83
+ def get_base_url(provider: str) -> str | None:
84
+ """Get default base URL for a provider.
85
+
86
+ Returns the configured base URL for local execution, if available.
87
+
88
+ Args:
89
+ provider: Provider name from SDK constants (e.g., "deepinfra").
90
+
91
+ Returns:
92
+ Base URL string, or None if no config exists or no base_url configured.
93
+
94
+ Examples:
95
+ >>> get_base_url("deepinfra")
96
+ "https://api.deepinfra.com/v1/openai"
97
+ >>> get_base_url("openai")
98
+ None
99
+ """
100
+ config = PROVIDERS.get(provider)
101
+ return config.base_url if config else None
@@ -0,0 +1,97 @@
1
+ """Model validation utility for GL AIP SDK.
2
+
3
+ Validates model names in 'provider/model' format and provides
4
+ helpful error messages for invalid formats.
5
+
6
+ Authors:
7
+ Christian Trisno Sen Long Chen (christian.t.s.l.chen@gdplabs.id)
8
+ """
9
+
10
+ from __future__ import annotations
11
+
12
+ import warnings
13
+ from typing import Any
14
+
15
+
16
+ def _validate_model(model: str) -> str:
17
+ """Validate model format and deprecate bare names.
18
+
19
+ Args:
20
+ model: Model string to validate
21
+
22
+ Returns:
23
+ Normalized model string in provider/model format
24
+
25
+ Raises:
26
+ ValueError: If model format is invalid
27
+ """
28
+ if "/" not in model:
29
+ warnings.warn(
30
+ f"Bare model name '{model}' is deprecated. "
31
+ f"Use 'provider/model' format (e.g., 'openai/{model}'). "
32
+ f"This will be an error in v2.0. Use constants: from glaip_sdk.models import OpenAI",
33
+ DeprecationWarning,
34
+ stacklevel=2,
35
+ )
36
+ return f"openai/{model}"
37
+
38
+ provider, model_name = model.split("/", 1)
39
+ if not provider or not model_name:
40
+ raise ValueError(
41
+ f"Invalid model format: '{model}'. "
42
+ f"Expected 'provider/model' format (e.g., 'openai/gpt-4o-mini'). "
43
+ f"Use constants: from glaip_sdk.models import OpenAI; Agent(model=OpenAI.GPT_4O_MINI)."
44
+ )
45
+ return model
46
+
47
+
48
+ def convert_model_for_local_execution(model: str | Any) -> tuple[str, dict[str, Any]]:
49
+ """Convert model to aip_agents format for local execution.
50
+
51
+ Converts provider/model format appropriately for aip_agents.
52
+ Handles both Model objects and string models.
53
+
54
+ Args:
55
+ model: Model object or string identifier.
56
+
57
+ Returns:
58
+ Tuple of (model_string, config_dict) where:
59
+ - model_string: Model in format expected by aip_agents (provider/model)
60
+ - config_dict: Configuration dict with credentials, hyperparameters, etc.
61
+ """
62
+ from glaip_sdk.models import Model # noqa: PLC0415
63
+
64
+ # Handle Model objects
65
+ if isinstance(model, Model):
66
+ return model.to_aip_agents_format()
67
+
68
+ # Handle string models
69
+ if isinstance(model, str):
70
+ if "/" not in model:
71
+ return model, {}
72
+
73
+ parts = model.split("/", 1)
74
+ provider = parts[0]
75
+ model_name = parts[1]
76
+
77
+ # Map provider to driver and get base_url from config
78
+ from glaip_sdk.models._provider_mappings import ( # noqa: PLC0415
79
+ get_base_url,
80
+ get_driver,
81
+ )
82
+
83
+ driver = get_driver(provider)
84
+ base_url = get_base_url(provider)
85
+
86
+ config: dict[str, Any] = {}
87
+ if base_url:
88
+ config["lm_base_url"] = base_url
89
+
90
+ # Return with driver name
91
+ return f"{driver}/{model_name}", config
92
+
93
+ # For other types (None, etc.), return as-is
94
+ return model, {}
95
+
96
+
97
+ __all__ = ["_validate_model", "convert_model_for_local_execution"]
glaip_sdk/models/agent.py CHANGED
@@ -13,9 +13,10 @@ Authors:
13
13
  from datetime import datetime
14
14
  from typing import Any
15
15
 
16
- from glaip_sdk.config.constants import DEFAULT_AGENT_RUN_TIMEOUT
17
16
  from pydantic import BaseModel
18
17
 
18
+ from glaip_sdk.config.constants import DEFAULT_AGENT_RUN_TIMEOUT
19
+
19
20
 
20
21
  class AgentResponse(BaseModel):
21
22
  """Pydantic model for Agent API responses.
@@ -0,0 +1,141 @@
1
+ # pylint: disable=duplicate-code
2
+ """Model constants for GL AIP SDK.
3
+
4
+ Typed constants for popular AI models matching AIP Server's language_models.yaml keys.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ __all__ = [
10
+ "ModelProvider",
11
+ "OpenAI",
12
+ "Anthropic",
13
+ "Google",
14
+ "AzureOpenAI",
15
+ "DeepInfra",
16
+ "DeepSeek",
17
+ "Bedrock",
18
+ "DEFAULT_MODEL",
19
+ ]
20
+
21
+ # Note: DeepInfra provider changed to 'openai-compatible' for aip_agents compatibility
22
+
23
+
24
+ class ModelProvider:
25
+ """Supported model providers."""
26
+
27
+ OPENAI = "openai"
28
+ ANTHROPIC = "anthropic"
29
+ AZURE_OPENAI = "azure-openai"
30
+ GOOGLE = "google"
31
+ DEEPINFRA = "deepinfra"
32
+ DEEPSEEK = "deepseek"
33
+ OPENAI_COMPATIBLE = "openai-compatible"
34
+ BEDROCK = "bedrock"
35
+
36
+
37
+ class OpenAI:
38
+ """OpenAI model constants."""
39
+
40
+ GPT_4O_MINI = "openai/gpt-4o-mini"
41
+ GPT_4O = "openai/gpt-4o"
42
+ GPT_4_1 = "openai/gpt-4.1"
43
+ GPT_4_1_NANO = "openai/gpt-4.1-nano"
44
+ GPT_4_1_MINI = "openai/gpt-4.1-mini"
45
+ GPT_5 = "openai/gpt-5"
46
+ GPT_5_MINI = "openai/gpt-5-mini"
47
+ GPT_5_NANO = "openai/gpt-5-nano"
48
+ GPT_5_LOW = "openai/gpt-5-low"
49
+ GPT_5_MINIMAL = "openai/gpt-5-minimal"
50
+ GPT_5_MEDIUM = "openai/gpt-5-medium"
51
+ GPT_5_HIGH = "openai/gpt-5-high"
52
+ GPT_5_1 = "openai/gpt-5.1"
53
+ GPT_5_1_LOW = "openai/gpt-5.1-low"
54
+ GPT_5_1_MEDIUM = "openai/gpt-5.1-medium"
55
+ GPT_5_1_HIGH = "openai/gpt-5.1-high"
56
+ GPT_5_2 = "openai/gpt-5.2"
57
+ GPT_5_2_LOW = "openai/gpt-5.2-low"
58
+ GPT_5_2_MEDIUM = "openai/gpt-5.2-medium"
59
+ GPT_5_2_HIGH = "openai/gpt-5.2-high"
60
+ GPT_5_2_XHIGH = "openai/gpt-5.2-xhigh"
61
+ GPT_4O_2024_11_20 = "openai/gpt-4o-2024-11-20"
62
+ O4_MINI = "openai/o4-mini"
63
+
64
+
65
+ class Anthropic:
66
+ """Anthropic model constants."""
67
+
68
+ CLAUDE_3_5_SONNET_LATEST = "anthropic/claude-3-5-sonnet-latest"
69
+ CLAUDE_3_7_SONNET_LATEST = "anthropic/claude-3-7-sonnet-latest"
70
+ CLAUDE_SONNET_4_0 = "anthropic/claude-sonnet-4-0"
71
+ CLAUDE_OPUS_4_0 = "anthropic/claude-opus-4-0"
72
+ CLAUDE_OPUS_4_1 = "anthropic/claude-opus-4-1"
73
+ CLAUDE_SONNET_4_5 = "anthropic/claude-sonnet-4-5"
74
+ # DX Alias
75
+ CLAUDE_3_5_SONNET = CLAUDE_3_5_SONNET_LATEST
76
+
77
+
78
+ class Google:
79
+ """Google model constants."""
80
+
81
+ GEMINI_2_5_FLASH = "google/gemini-2.5-flash"
82
+ GEMINI_3_FLASH_PREVIEW = "google/gemini-3-flash-preview"
83
+ GEMINI_3_PRO_PREVIEW = "google/gemini-3-pro-preview"
84
+ GEMINI_2_5_PRO = "google/gemini-2.5-pro"
85
+
86
+
87
+ class AzureOpenAI:
88
+ """Azure OpenAI model constants."""
89
+
90
+ GPT_4O = "azure-openai/gpt-4o"
91
+ GPT_4O_MINI = "azure-openai/gpt-4o-mini"
92
+ GPT_4_1 = "azure-openai/gpt-4.1"
93
+
94
+
95
+ class DeepInfra:
96
+ """DeepInfra model constants.
97
+
98
+ Note: DeepInfra models use the openai-compatible driver internally,
99
+ but we expose them with the 'deepinfra/' prefix for better DX.
100
+ The SDK handles the mapping to 'openai-compatible/' automatically.
101
+ """
102
+
103
+ QWEN3_235B_A22B_INSTRUCT_2507 = "deepinfra/Qwen/Qwen3-235B-A22B-Instruct-2507"
104
+ KIMI_K2_INSTRUCT = "deepinfra/moonshotai/Kimi-K2-Instruct"
105
+ QWEN3_30B_A3B = "deepinfra/Qwen/Qwen3-30B-A3B"
106
+ GLM_4_5_AIR = "deepinfra/zai-org/GLM-4.5-Air"
107
+ QWEN3_32B = "deepinfra/Qwen/Qwen3-32B"
108
+ QWEN3_NEXT_80B_A3B_THINKING = "deepinfra/Qwen/Qwen3-Next-80B-A3B-Thinking"
109
+ QWEN3_NEXT_80B_A3B_INSTRUCT = "deepinfra/Qwen/Qwen3-Next-80B-A3B-Instruct"
110
+ QWEN3_14B = "deepinfra/Qwen/Qwen3-14B"
111
+ GLM_4_5 = "deepinfra/zai-org/GLM-4.5"
112
+
113
+
114
+ class DeepSeek:
115
+ """DeepSeek model constants.
116
+
117
+ Note: DeepSeek models use the openai-compatible driver internally,
118
+ but we expose them with the 'deepseek/' prefix for better DX.
119
+ The SDK handles the mapping to 'openai-compatible/' automatically.
120
+ """
121
+
122
+ DEEPSEEK_CHAT = "deepseek/deepseek-chat"
123
+ DEEPSEEK_V3_1 = "deepseek/deepseek-ai/DeepSeek-V3.1"
124
+ DEEPSEEK_V3_1_TERMINUS = "deepseek/deepseek-ai/DeepSeek-V3.1-Terminus"
125
+
126
+
127
+ class Bedrock:
128
+ """AWS Bedrock model constants."""
129
+
130
+ CLAUDE_SONNET_4_20250514_V1_0 = "bedrock/us.anthropic.claude-sonnet-4-20250514-v1:0"
131
+ CLAUDE_3_5_HAIKU_20241022_V1_0 = "bedrock/us.anthropic.claude-3-5-haiku-20241022-v1:0"
132
+ CLAUDE_SONNET_4_5_20250929_V1_0 = "bedrock/us.anthropic.claude-sonnet-4-5-20250929-v1:0"
133
+
134
+ # DX Aliases
135
+ ANTHROPIC_CLAUDE_SONNET_4 = CLAUDE_SONNET_4_20250514_V1_0
136
+ ANTHROPIC_CLAUDE_3_5_HAIKU = CLAUDE_3_5_HAIKU_20241022_V1_0
137
+ ANTHROPIC_CLAUDE_SONNET_4_5 = CLAUDE_SONNET_4_5_20250929_V1_0
138
+
139
+
140
+ # Default model selection
141
+ DEFAULT_MODEL = OpenAI.GPT_5_NANO
@@ -0,0 +1,170 @@
1
+ """Model configuration class for GL AIP SDK.
2
+
3
+ Provides a structured way to specify models, credentials, and hyperparameters
4
+ for local execution.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ from typing import Any
10
+
11
+ from pydantic import BaseModel, field_validator
12
+
13
+
14
+ class Model(BaseModel):
15
+ """Model configuration class for local execution.
16
+
17
+ Bundles model identity with credentials and hyperparameters.
18
+ """
19
+
20
+ id: str
21
+ credentials: dict[str, Any] | str | None = None
22
+ hyperparameters: dict[str, Any] | None = None
23
+ base_url: str | None = None
24
+
25
+ @field_validator("id")
26
+ @classmethod
27
+ def validate_id(cls, v: str) -> str:
28
+ """Validate model ID format.
29
+
30
+ Args:
31
+ v: Model identifier string.
32
+
33
+ Returns:
34
+ Validated model identifier in 'provider/model' format.
35
+
36
+ Raises:
37
+ ValueError: If model format is invalid.
38
+ """
39
+ from glaip_sdk.models._validation import _validate_model # noqa: PLC0415
40
+
41
+ return _validate_model(v)
42
+
43
+ def to_aip_agents_format(self) -> tuple[str, dict[str, Any]]:
44
+ """Convert Model to aip_agents format.
45
+
46
+ Converts provider/model format to driver/model format for aip_agents.
47
+ For custom models with base_url, uses format: openai-compatible/base_url:model_name
48
+
49
+ Returns:
50
+ Tuple of (model_string, agent_config_dict).
51
+ """
52
+ if "/" not in self.id:
53
+ return self.id, {}
54
+
55
+ parts = self.id.split("/", 1)
56
+ provider = parts[0]
57
+ model_name = parts[1]
58
+
59
+ config = self._build_agent_config(provider)
60
+ model_string = self._build_model_string(provider, model_name, config)
61
+
62
+ return model_string, config
63
+
64
+ def _build_agent_config(self, provider: str) -> dict[str, Any]:
65
+ """Build agent config dict from Model attributes.
66
+
67
+ Args:
68
+ provider: Provider name extracted from model id.
69
+
70
+ Returns:
71
+ Configuration dict with credentials, hyperparameters, and base_url.
72
+ """
73
+ config: dict[str, Any] = {}
74
+
75
+ if self.credentials:
76
+ if isinstance(self.credentials, str):
77
+ config["lm_api_key"] = self.credentials
78
+ elif isinstance(self.credentials, dict):
79
+ config["lm_credentials"] = self.credentials
80
+
81
+ if self.hyperparameters:
82
+ config["lm_hyperparameters"] = self.hyperparameters
83
+
84
+ base_url = self._resolve_base_url(provider)
85
+ if base_url:
86
+ config["lm_base_url"] = base_url
87
+
88
+ return config
89
+
90
+ def _resolve_base_url(self, provider: str) -> str | None:
91
+ """Resolve base URL for the provider.
92
+
93
+ Uses centralized provider configurations to determine base_url.
94
+ Users can override by explicitly setting base_url attribute.
95
+
96
+ Args:
97
+ provider: Provider name from model ID (e.g., "deepinfra").
98
+
99
+ Returns:
100
+ Base URL string or None.
101
+
102
+ Examples:
103
+ >>> model = Model(id="deepinfra/Qwen/Qwen3-30B")
104
+ >>> model._resolve_base_url("deepinfra")
105
+ "https://api.deepinfra.com/v1/openai"
106
+ """
107
+ if self.base_url:
108
+ return self.base_url
109
+
110
+ # Get base_url from provider config
111
+ from glaip_sdk.models._provider_mappings import get_base_url # noqa: PLC0415
112
+
113
+ base_url = get_base_url(provider)
114
+ if base_url:
115
+ return base_url
116
+
117
+ return None
118
+
119
+ def _build_model_string(self, provider: str, model_name: str, config: dict[str, Any]) -> str:
120
+ """Build normalized model string for aip_agents.
121
+
122
+ Converts provider names to their driver implementations for local execution.
123
+
124
+ Conversion strategy:
125
+ 1. Custom models with base_url: Use colon format (openai-compatible/{base_url}:{model_name})
126
+ This allows aip_agents to parse base_url and model_name separately.
127
+ 2. Standard providers: Map to driver and use slash format (driver/{model_name})
128
+ aip_agents will handle provider/model format internally.
129
+
130
+ Args:
131
+ provider: Provider name from model ID (e.g., "deepinfra", "openai").
132
+ model_name: Model name after provider prefix.
133
+ config: Agent config dict (may contain base_url).
134
+
135
+ Returns:
136
+ Normalized model string in provider/model format with driver name.
137
+
138
+ Examples:
139
+ >>> _build_model_string("deepinfra", "Qwen/Qwen3-30B", {})
140
+ "openai-compatible/Qwen/Qwen3-30B"
141
+ >>> _build_model_string("openai", "gpt-4o", {})
142
+ "openai/gpt-4o"
143
+ """
144
+ from glaip_sdk.models._provider_mappings import get_driver # noqa: PLC0415
145
+
146
+ # Map provider to driver (e.g., deepinfra → openai-compatible)
147
+ driver = get_driver(provider)
148
+
149
+ if provider == "custom":
150
+ base_url = config.get("lm_base_url")
151
+ if base_url:
152
+ return f"{driver}/{base_url}:{model_name}"
153
+
154
+ # Standard case: driver with slash format
155
+ # aip_agents will handle provider/model format internally
156
+ return f"{driver}/{model_name}"
157
+
158
+ def __repr__(self) -> str:
159
+ """Return string representation of Model.
160
+
161
+ Note: Credentials are masked to avoid leaking secrets in logs.
162
+ Hyperparameters and base_url are shown if present.
163
+ """
164
+ creds_repr = "***" if self.credentials else "None"
165
+ return (
166
+ f"Model(id={self.id!r}, "
167
+ f"credentials={creds_repr}, "
168
+ f"hyperparameters={self.hyperparameters!r}, "
169
+ f"base_url={self.base_url!r})"
170
+ )