claude-dev-cli 0.13.3__py3-none-any.whl → 0.16.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of claude-dev-cli might be problematic. Click here for more details.
- claude_dev_cli/__init__.py +1 -1
- claude_dev_cli/cli.py +231 -13
- claude_dev_cli/config.py +95 -9
- claude_dev_cli/core.py +48 -53
- claude_dev_cli/providers/__init__.py +28 -0
- claude_dev_cli/providers/anthropic.py +216 -0
- claude_dev_cli/providers/base.py +168 -0
- claude_dev_cli/providers/factory.py +114 -0
- claude_dev_cli/providers/ollama.py +283 -0
- claude_dev_cli/providers/openai.py +268 -0
- {claude_dev_cli-0.13.3.dist-info → claude_dev_cli-0.16.0.dist-info}/METADATA +196 -15
- {claude_dev_cli-0.13.3.dist-info → claude_dev_cli-0.16.0.dist-info}/RECORD +16 -10
- {claude_dev_cli-0.13.3.dist-info → claude_dev_cli-0.16.0.dist-info}/WHEEL +0 -0
- {claude_dev_cli-0.13.3.dist-info → claude_dev_cli-0.16.0.dist-info}/entry_points.txt +0 -0
- {claude_dev_cli-0.13.3.dist-info → claude_dev_cli-0.16.0.dist-info}/licenses/LICENSE +0 -0
- {claude_dev_cli-0.13.3.dist-info → claude_dev_cli-0.16.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,216 @@
|
|
|
1
|
+
"""Anthropic (Claude) AI provider implementation."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
from datetime import datetime
|
|
5
|
+
from typing import Iterator, Optional, List, Dict, Any
|
|
6
|
+
from anthropic import Anthropic, APIError
|
|
7
|
+
|
|
8
|
+
from claude_dev_cli.providers.base import (
|
|
9
|
+
AIProvider,
|
|
10
|
+
ModelInfo,
|
|
11
|
+
UsageInfo,
|
|
12
|
+
InsufficientCreditsError,
|
|
13
|
+
ProviderConnectionError,
|
|
14
|
+
ModelNotFoundError,
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class AnthropicProvider(AIProvider):
|
|
19
|
+
"""Anthropic Claude API provider implementation."""
|
|
20
|
+
|
|
21
|
+
# Known Claude models with their capabilities
|
|
22
|
+
KNOWN_MODELS = {
|
|
23
|
+
"claude-3-5-haiku-20241022": {
|
|
24
|
+
"display_name": "Claude 3.5 Haiku",
|
|
25
|
+
"context_window": 200000,
|
|
26
|
+
"input_price": 0.80,
|
|
27
|
+
"output_price": 4.00,
|
|
28
|
+
"capabilities": ["chat", "code", "analysis"]
|
|
29
|
+
},
|
|
30
|
+
"claude-sonnet-4-5-20250929": {
|
|
31
|
+
"display_name": "Claude Sonnet 4.5",
|
|
32
|
+
"context_window": 200000,
|
|
33
|
+
"input_price": 3.00,
|
|
34
|
+
"output_price": 15.00,
|
|
35
|
+
"capabilities": ["chat", "code", "analysis", "vision"]
|
|
36
|
+
},
|
|
37
|
+
"claude-opus-4-20250514": {
|
|
38
|
+
"display_name": "Claude Opus 4",
|
|
39
|
+
"context_window": 200000,
|
|
40
|
+
"input_price": 15.00,
|
|
41
|
+
"output_price": 75.00,
|
|
42
|
+
"capabilities": ["chat", "code", "analysis", "vision", "research"]
|
|
43
|
+
},
|
|
44
|
+
# Legacy models
|
|
45
|
+
"claude-3-5-sonnet-20241022": {
|
|
46
|
+
"display_name": "Claude 3.5 Sonnet",
|
|
47
|
+
"context_window": 200000,
|
|
48
|
+
"input_price": 3.00,
|
|
49
|
+
"output_price": 15.00,
|
|
50
|
+
"capabilities": ["chat", "code", "analysis", "vision"]
|
|
51
|
+
},
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
def __init__(self, config: Any) -> None:
|
|
55
|
+
"""Initialize Anthropic provider.
|
|
56
|
+
|
|
57
|
+
Args:
|
|
58
|
+
config: ProviderConfig or APIConfig with api_key
|
|
59
|
+
"""
|
|
60
|
+
super().__init__(config)
|
|
61
|
+
|
|
62
|
+
# Extract API key from config
|
|
63
|
+
api_key = getattr(config, 'api_key', None)
|
|
64
|
+
if not api_key:
|
|
65
|
+
raise ValueError("Anthropic provider requires api_key in config")
|
|
66
|
+
|
|
67
|
+
self.client = Anthropic(api_key=api_key)
|
|
68
|
+
self.last_usage: Optional[UsageInfo] = None
|
|
69
|
+
|
|
70
|
+
def call(
|
|
71
|
+
self,
|
|
72
|
+
prompt: str,
|
|
73
|
+
system_prompt: Optional[str] = None,
|
|
74
|
+
model: Optional[str] = None,
|
|
75
|
+
max_tokens: Optional[int] = None,
|
|
76
|
+
temperature: float = 1.0,
|
|
77
|
+
) -> str:
|
|
78
|
+
"""Make a synchronous call to Claude API."""
|
|
79
|
+
model = model or "claude-sonnet-4-5-20250929"
|
|
80
|
+
max_tokens = max_tokens or 4096
|
|
81
|
+
|
|
82
|
+
kwargs: Dict[str, Any] = {
|
|
83
|
+
"model": model,
|
|
84
|
+
"max_tokens": max_tokens,
|
|
85
|
+
"temperature": temperature,
|
|
86
|
+
"messages": [{"role": "user", "content": prompt}]
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
if system_prompt:
|
|
90
|
+
kwargs["system"] = system_prompt
|
|
91
|
+
|
|
92
|
+
start_time = datetime.utcnow()
|
|
93
|
+
|
|
94
|
+
try:
|
|
95
|
+
response = self.client.messages.create(**kwargs)
|
|
96
|
+
except APIError as e:
|
|
97
|
+
# Check for insufficient credits
|
|
98
|
+
if e.status_code == 400 and "credit balance" in str(e).lower():
|
|
99
|
+
raise InsufficientCreditsError(
|
|
100
|
+
f"Insufficient credits for Anthropic API: {e}",
|
|
101
|
+
provider="anthropic"
|
|
102
|
+
)
|
|
103
|
+
elif e.status_code == 404:
|
|
104
|
+
raise ModelNotFoundError(
|
|
105
|
+
f"Model not found: {model}",
|
|
106
|
+
model=model,
|
|
107
|
+
provider="anthropic"
|
|
108
|
+
)
|
|
109
|
+
else:
|
|
110
|
+
raise ProviderConnectionError(
|
|
111
|
+
f"Anthropic API error: {e}",
|
|
112
|
+
provider="anthropic"
|
|
113
|
+
)
|
|
114
|
+
|
|
115
|
+
end_time = datetime.utcnow()
|
|
116
|
+
duration_ms = int((end_time - start_time).total_seconds() * 1000)
|
|
117
|
+
|
|
118
|
+
# Calculate cost
|
|
119
|
+
model_info = self.KNOWN_MODELS.get(model, {})
|
|
120
|
+
input_price = model_info.get("input_price", 0.0)
|
|
121
|
+
output_price = model_info.get("output_price", 0.0)
|
|
122
|
+
|
|
123
|
+
input_cost = (response.usage.input_tokens / 1_000_000) * input_price
|
|
124
|
+
output_cost = (response.usage.output_tokens / 1_000_000) * output_price
|
|
125
|
+
total_cost = input_cost + output_cost
|
|
126
|
+
|
|
127
|
+
# Store usage info
|
|
128
|
+
self.last_usage = UsageInfo(
|
|
129
|
+
input_tokens=response.usage.input_tokens,
|
|
130
|
+
output_tokens=response.usage.output_tokens,
|
|
131
|
+
duration_ms=duration_ms,
|
|
132
|
+
model=model,
|
|
133
|
+
timestamp=end_time,
|
|
134
|
+
cost_usd=total_cost
|
|
135
|
+
)
|
|
136
|
+
|
|
137
|
+
# Extract text from response
|
|
138
|
+
text_blocks = [
|
|
139
|
+
block.text for block in response.content if hasattr(block, 'text')
|
|
140
|
+
]
|
|
141
|
+
return '\n'.join(text_blocks)
|
|
142
|
+
|
|
143
|
+
def call_streaming(
|
|
144
|
+
self,
|
|
145
|
+
prompt: str,
|
|
146
|
+
system_prompt: Optional[str] = None,
|
|
147
|
+
model: Optional[str] = None,
|
|
148
|
+
max_tokens: Optional[int] = None,
|
|
149
|
+
temperature: float = 1.0,
|
|
150
|
+
) -> Iterator[str]:
|
|
151
|
+
"""Make a streaming call to Claude API."""
|
|
152
|
+
model = model or "claude-sonnet-4-5-20250929"
|
|
153
|
+
max_tokens = max_tokens or 4096
|
|
154
|
+
|
|
155
|
+
kwargs: Dict[str, Any] = {
|
|
156
|
+
"model": model,
|
|
157
|
+
"max_tokens": max_tokens,
|
|
158
|
+
"temperature": temperature,
|
|
159
|
+
"messages": [{"role": "user", "content": prompt}]
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
if system_prompt:
|
|
163
|
+
kwargs["system"] = system_prompt
|
|
164
|
+
|
|
165
|
+
try:
|
|
166
|
+
with self.client.messages.stream(**kwargs) as stream:
|
|
167
|
+
for text in stream.text_stream:
|
|
168
|
+
yield text
|
|
169
|
+
except APIError as e:
|
|
170
|
+
if e.status_code == 400 and "credit balance" in str(e).lower():
|
|
171
|
+
raise InsufficientCreditsError(
|
|
172
|
+
f"Insufficient credits for Anthropic API: {e}",
|
|
173
|
+
provider="anthropic"
|
|
174
|
+
)
|
|
175
|
+
else:
|
|
176
|
+
raise ProviderConnectionError(
|
|
177
|
+
f"Anthropic API error: {e}",
|
|
178
|
+
provider="anthropic"
|
|
179
|
+
)
|
|
180
|
+
|
|
181
|
+
def list_models(self) -> List[ModelInfo]:
|
|
182
|
+
"""List available Claude models."""
|
|
183
|
+
models = []
|
|
184
|
+
for model_id, info in self.KNOWN_MODELS.items():
|
|
185
|
+
models.append(ModelInfo(
|
|
186
|
+
model_id=model_id,
|
|
187
|
+
display_name=info["display_name"],
|
|
188
|
+
provider="anthropic",
|
|
189
|
+
context_window=info["context_window"],
|
|
190
|
+
input_price_per_mtok=info["input_price"],
|
|
191
|
+
output_price_per_mtok=info["output_price"],
|
|
192
|
+
capabilities=info["capabilities"]
|
|
193
|
+
))
|
|
194
|
+
return models
|
|
195
|
+
|
|
196
|
+
def get_last_usage(self) -> Optional[UsageInfo]:
|
|
197
|
+
"""Get usage information from the last API call."""
|
|
198
|
+
return self.last_usage
|
|
199
|
+
|
|
200
|
+
@property
|
|
201
|
+
def provider_name(self) -> str:
|
|
202
|
+
"""Get the provider's name."""
|
|
203
|
+
return "anthropic"
|
|
204
|
+
|
|
205
|
+
def test_connection(self) -> bool:
|
|
206
|
+
"""Test if the Anthropic API is accessible."""
|
|
207
|
+
try:
|
|
208
|
+
# Make a minimal API call to test credentials
|
|
209
|
+
response = self.client.messages.create(
|
|
210
|
+
model="claude-3-5-haiku-20241022",
|
|
211
|
+
max_tokens=10,
|
|
212
|
+
messages=[{"role": "user", "content": "test"}]
|
|
213
|
+
)
|
|
214
|
+
return True
|
|
215
|
+
except APIError:
|
|
216
|
+
return False
|
|
@@ -0,0 +1,168 @@
|
|
|
1
|
+
"""Abstract base class for AI providers."""
|
|
2
|
+
|
|
3
|
+
from abc import ABC, abstractmethod
|
|
4
|
+
from typing import Iterator, Optional, Dict, Any, List
|
|
5
|
+
from dataclasses import dataclass
|
|
6
|
+
from datetime import datetime
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
@dataclass
|
|
10
|
+
class ModelInfo:
|
|
11
|
+
"""Information about an AI model."""
|
|
12
|
+
|
|
13
|
+
model_id: str
|
|
14
|
+
display_name: str
|
|
15
|
+
provider: str
|
|
16
|
+
context_window: int
|
|
17
|
+
input_price_per_mtok: float
|
|
18
|
+
output_price_per_mtok: float
|
|
19
|
+
capabilities: List[str] # e.g., ["chat", "code", "vision"]
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
@dataclass
|
|
23
|
+
class UsageInfo:
|
|
24
|
+
"""Usage information for a single API call."""
|
|
25
|
+
|
|
26
|
+
input_tokens: int
|
|
27
|
+
output_tokens: int
|
|
28
|
+
duration_ms: int
|
|
29
|
+
model: str
|
|
30
|
+
timestamp: datetime
|
|
31
|
+
cost_usd: float
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class AIProvider(ABC):
|
|
35
|
+
"""Abstract base class for AI provider implementations.
|
|
36
|
+
|
|
37
|
+
All AI providers (Anthropic, OpenAI, Ollama, etc.) must implement this interface.
|
|
38
|
+
This allows the CLI to work with any provider transparently.
|
|
39
|
+
"""
|
|
40
|
+
|
|
41
|
+
def __init__(self, config: Any) -> None:
|
|
42
|
+
"""Initialize provider with configuration.
|
|
43
|
+
|
|
44
|
+
Args:
|
|
45
|
+
config: Provider-specific configuration (ProviderConfig)
|
|
46
|
+
"""
|
|
47
|
+
self.config = config
|
|
48
|
+
|
|
49
|
+
@abstractmethod
|
|
50
|
+
def call(
|
|
51
|
+
self,
|
|
52
|
+
prompt: str,
|
|
53
|
+
system_prompt: Optional[str] = None,
|
|
54
|
+
model: Optional[str] = None,
|
|
55
|
+
max_tokens: Optional[int] = None,
|
|
56
|
+
temperature: float = 1.0,
|
|
57
|
+
) -> str:
|
|
58
|
+
"""Make a synchronous call to the AI provider.
|
|
59
|
+
|
|
60
|
+
Args:
|
|
61
|
+
prompt: User prompt/message
|
|
62
|
+
system_prompt: Optional system prompt for context
|
|
63
|
+
model: Model ID or profile name to use
|
|
64
|
+
max_tokens: Maximum tokens in response
|
|
65
|
+
temperature: Sampling temperature (0.0-2.0)
|
|
66
|
+
|
|
67
|
+
Returns:
|
|
68
|
+
The AI's text response
|
|
69
|
+
|
|
70
|
+
Raises:
|
|
71
|
+
ProviderError: On API errors
|
|
72
|
+
InsufficientCreditsError: When credits are too low
|
|
73
|
+
"""
|
|
74
|
+
pass
|
|
75
|
+
|
|
76
|
+
@abstractmethod
|
|
77
|
+
def call_streaming(
|
|
78
|
+
self,
|
|
79
|
+
prompt: str,
|
|
80
|
+
system_prompt: Optional[str] = None,
|
|
81
|
+
model: Optional[str] = None,
|
|
82
|
+
max_tokens: Optional[int] = None,
|
|
83
|
+
temperature: float = 1.0,
|
|
84
|
+
) -> Iterator[str]:
|
|
85
|
+
"""Make a streaming call to the AI provider.
|
|
86
|
+
|
|
87
|
+
Args:
|
|
88
|
+
prompt: User prompt/message
|
|
89
|
+
system_prompt: Optional system prompt for context
|
|
90
|
+
model: Model ID or profile name to use
|
|
91
|
+
max_tokens: Maximum tokens in response
|
|
92
|
+
temperature: Sampling temperature (0.0-2.0)
|
|
93
|
+
|
|
94
|
+
Yields:
|
|
95
|
+
Text chunks as they arrive from the provider
|
|
96
|
+
|
|
97
|
+
Raises:
|
|
98
|
+
ProviderError: On API errors
|
|
99
|
+
InsufficientCreditsError: When credits are too low
|
|
100
|
+
"""
|
|
101
|
+
pass
|
|
102
|
+
|
|
103
|
+
@abstractmethod
|
|
104
|
+
def list_models(self) -> List[ModelInfo]:
|
|
105
|
+
"""List available models from this provider.
|
|
106
|
+
|
|
107
|
+
Returns:
|
|
108
|
+
List of ModelInfo objects describing available models
|
|
109
|
+
"""
|
|
110
|
+
pass
|
|
111
|
+
|
|
112
|
+
@abstractmethod
|
|
113
|
+
def get_last_usage(self) -> Optional[UsageInfo]:
|
|
114
|
+
"""Get usage information from the last API call.
|
|
115
|
+
|
|
116
|
+
Returns:
|
|
117
|
+
UsageInfo for the most recent call, or None if no calls made
|
|
118
|
+
"""
|
|
119
|
+
pass
|
|
120
|
+
|
|
121
|
+
@property
|
|
122
|
+
@abstractmethod
|
|
123
|
+
def provider_name(self) -> str:
|
|
124
|
+
"""Get the provider's name (e.g., 'anthropic', 'openai', 'ollama').
|
|
125
|
+
|
|
126
|
+
Returns:
|
|
127
|
+
Provider name string
|
|
128
|
+
"""
|
|
129
|
+
pass
|
|
130
|
+
|
|
131
|
+
@abstractmethod
|
|
132
|
+
def test_connection(self) -> bool:
|
|
133
|
+
"""Test if the provider is accessible and credentials are valid.
|
|
134
|
+
|
|
135
|
+
Returns:
|
|
136
|
+
True if connection successful, False otherwise
|
|
137
|
+
"""
|
|
138
|
+
pass
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
class ProviderError(Exception):
|
|
142
|
+
"""Base exception for provider errors."""
|
|
143
|
+
pass
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
class InsufficientCreditsError(ProviderError):
|
|
147
|
+
"""Raised when API credits are insufficient."""
|
|
148
|
+
|
|
149
|
+
def __init__(self, message: str, provider: str) -> None:
|
|
150
|
+
super().__init__(message)
|
|
151
|
+
self.provider = provider
|
|
152
|
+
|
|
153
|
+
|
|
154
|
+
class ProviderConnectionError(ProviderError):
|
|
155
|
+
"""Raised when provider connection fails."""
|
|
156
|
+
|
|
157
|
+
def __init__(self, message: str, provider: str) -> None:
|
|
158
|
+
super().__init__(message)
|
|
159
|
+
self.provider = provider
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
class ModelNotFoundError(ProviderError):
|
|
163
|
+
"""Raised when requested model is not available."""
|
|
164
|
+
|
|
165
|
+
def __init__(self, message: str, model: str, provider: str) -> None:
|
|
166
|
+
super().__init__(message)
|
|
167
|
+
self.model = model
|
|
168
|
+
self.provider = provider
|
|
@@ -0,0 +1,114 @@
|
|
|
1
|
+
"""Factory for creating AI provider instances."""
|
|
2
|
+
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
from claude_dev_cli.providers.base import AIProvider, ProviderError
|
|
6
|
+
from claude_dev_cli.providers.anthropic import AnthropicProvider
|
|
7
|
+
|
|
8
|
+
# Try to import OpenAI provider, graceful fallback if not installed
|
|
9
|
+
try:
|
|
10
|
+
from claude_dev_cli.providers.openai import OpenAIProvider
|
|
11
|
+
OPENAI_PROVIDER_AVAILABLE = True
|
|
12
|
+
except (ImportError, RuntimeError):
|
|
13
|
+
OpenAIProvider = None # type: ignore
|
|
14
|
+
OPENAI_PROVIDER_AVAILABLE = False
|
|
15
|
+
|
|
16
|
+
# Try to import Ollama provider, graceful fallback if not installed
|
|
17
|
+
try:
|
|
18
|
+
from claude_dev_cli.providers.ollama import OllamaProvider
|
|
19
|
+
OLLAMA_PROVIDER_AVAILABLE = True
|
|
20
|
+
except (ImportError, RuntimeError):
|
|
21
|
+
OllamaProvider = None # type: ignore
|
|
22
|
+
OLLAMA_PROVIDER_AVAILABLE = False
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class ProviderFactory:
|
|
26
|
+
"""Factory for creating AI provider instances based on configuration."""
|
|
27
|
+
|
|
28
|
+
# Build registry of available providers
|
|
29
|
+
@staticmethod
|
|
30
|
+
def _build_provider_registry() -> dict:
|
|
31
|
+
"""Build registry of available providers based on installed dependencies."""
|
|
32
|
+
registry = {
|
|
33
|
+
"anthropic": AnthropicProvider,
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
# Add OpenAI if available
|
|
37
|
+
if OPENAI_PROVIDER_AVAILABLE and OpenAIProvider:
|
|
38
|
+
registry["openai"] = OpenAIProvider
|
|
39
|
+
|
|
40
|
+
# Add Ollama if available
|
|
41
|
+
if OLLAMA_PROVIDER_AVAILABLE and OllamaProvider:
|
|
42
|
+
registry["ollama"] = OllamaProvider
|
|
43
|
+
|
|
44
|
+
# Future providers:
|
|
45
|
+
# "lmstudio": LMStudioProvider, # v0.16.0
|
|
46
|
+
|
|
47
|
+
return registry
|
|
48
|
+
|
|
49
|
+
# Registry of available providers
|
|
50
|
+
_PROVIDERS = None
|
|
51
|
+
|
|
52
|
+
@staticmethod
|
|
53
|
+
def create(config: Any) -> AIProvider:
|
|
54
|
+
"""Create a provider instance based on configuration.
|
|
55
|
+
|
|
56
|
+
Args:
|
|
57
|
+
config: Provider configuration (ProviderConfig or APIConfig)
|
|
58
|
+
Must have 'provider' attribute or defaults to 'anthropic'
|
|
59
|
+
|
|
60
|
+
Returns:
|
|
61
|
+
AIProvider instance
|
|
62
|
+
|
|
63
|
+
Raises:
|
|
64
|
+
ProviderError: If provider type is unknown or unavailable
|
|
65
|
+
"""
|
|
66
|
+
# Build registry if not already done
|
|
67
|
+
if ProviderFactory._PROVIDERS is None:
|
|
68
|
+
ProviderFactory._PROVIDERS = ProviderFactory._build_provider_registry()
|
|
69
|
+
|
|
70
|
+
# Determine provider type
|
|
71
|
+
provider_type = getattr(config, 'provider', 'anthropic')
|
|
72
|
+
|
|
73
|
+
# Look up provider class
|
|
74
|
+
provider_class = ProviderFactory._PROVIDERS.get(provider_type.lower())
|
|
75
|
+
|
|
76
|
+
if not provider_class:
|
|
77
|
+
available = ", ".join(ProviderFactory._PROVIDERS.keys())
|
|
78
|
+
raise ProviderError(
|
|
79
|
+
f"Unknown provider: {provider_type}. "
|
|
80
|
+
f"Available providers: {available}"
|
|
81
|
+
)
|
|
82
|
+
|
|
83
|
+
# Instantiate and return provider
|
|
84
|
+
try:
|
|
85
|
+
return provider_class(config)
|
|
86
|
+
except Exception as e:
|
|
87
|
+
raise ProviderError(
|
|
88
|
+
f"Failed to initialize {provider_type} provider: {e}"
|
|
89
|
+
)
|
|
90
|
+
|
|
91
|
+
@staticmethod
|
|
92
|
+
def list_providers() -> list[str]:
|
|
93
|
+
"""List available provider types.
|
|
94
|
+
|
|
95
|
+
Returns:
|
|
96
|
+
List of provider type names (e.g., ['anthropic', 'openai'])
|
|
97
|
+
"""
|
|
98
|
+
if ProviderFactory._PROVIDERS is None:
|
|
99
|
+
ProviderFactory._PROVIDERS = ProviderFactory._build_provider_registry()
|
|
100
|
+
return list(ProviderFactory._PROVIDERS.keys())
|
|
101
|
+
|
|
102
|
+
@staticmethod
|
|
103
|
+
def is_provider_available(provider_type: str) -> bool:
|
|
104
|
+
"""Check if a provider type is available.
|
|
105
|
+
|
|
106
|
+
Args:
|
|
107
|
+
provider_type: Provider type name (e.g., 'anthropic', 'openai')
|
|
108
|
+
|
|
109
|
+
Returns:
|
|
110
|
+
True if provider is available, False otherwise
|
|
111
|
+
"""
|
|
112
|
+
if ProviderFactory._PROVIDERS is None:
|
|
113
|
+
ProviderFactory._PROVIDERS = ProviderFactory._build_provider_registry()
|
|
114
|
+
return provider_type.lower() in ProviderFactory._PROVIDERS
|