mirascope 2.0.0a3__py3-none-any.whl → 2.0.0a4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mirascope/api/_generated/__init__.py +62 -6
- mirascope/api/_generated/client.py +8 -0
- mirascope/api/_generated/errors/__init__.py +11 -1
- mirascope/api/_generated/errors/conflict_error.py +15 -0
- mirascope/api/_generated/errors/forbidden_error.py +15 -0
- mirascope/api/_generated/errors/internal_server_error.py +15 -0
- mirascope/api/_generated/errors/not_found_error.py +15 -0
- mirascope/api/_generated/organizations/__init__.py +25 -0
- mirascope/api/_generated/organizations/client.py +380 -0
- mirascope/api/_generated/organizations/raw_client.py +876 -0
- mirascope/api/_generated/organizations/types/__init__.py +23 -0
- mirascope/api/_generated/organizations/types/organizations_create_response.py +24 -0
- mirascope/api/_generated/organizations/types/organizations_create_response_role.py +7 -0
- mirascope/api/_generated/organizations/types/organizations_get_response.py +24 -0
- mirascope/api/_generated/organizations/types/organizations_get_response_role.py +7 -0
- mirascope/api/_generated/organizations/types/organizations_list_response_item.py +24 -0
- mirascope/api/_generated/organizations/types/organizations_list_response_item_role.py +7 -0
- mirascope/api/_generated/organizations/types/organizations_update_response.py +24 -0
- mirascope/api/_generated/organizations/types/organizations_update_response_role.py +7 -0
- mirascope/api/_generated/projects/__init__.py +17 -0
- mirascope/api/_generated/projects/client.py +458 -0
- mirascope/api/_generated/projects/raw_client.py +1016 -0
- mirascope/api/_generated/projects/types/__init__.py +15 -0
- mirascope/api/_generated/projects/types/projects_create_response.py +30 -0
- mirascope/api/_generated/projects/types/projects_get_response.py +30 -0
- mirascope/api/_generated/projects/types/projects_list_response_item.py +30 -0
- mirascope/api/_generated/projects/types/projects_update_response.py +30 -0
- mirascope/api/_generated/reference.md +586 -0
- mirascope/api/_generated/types/__init__.py +20 -4
- mirascope/api/_generated/types/already_exists_error.py +24 -0
- mirascope/api/_generated/types/already_exists_error_tag.py +5 -0
- mirascope/api/_generated/types/database_error.py +24 -0
- mirascope/api/_generated/types/database_error_tag.py +5 -0
- mirascope/api/_generated/types/http_api_decode_error.py +1 -3
- mirascope/api/_generated/types/issue.py +1 -5
- mirascope/api/_generated/types/not_found_error_body.py +24 -0
- mirascope/api/_generated/types/not_found_error_tag.py +5 -0
- mirascope/api/_generated/types/permission_denied_error.py +24 -0
- mirascope/api/_generated/types/permission_denied_error_tag.py +7 -0
- mirascope/api/_generated/types/property_key.py +2 -2
- mirascope/api/_generated/types/{property_key_tag.py → property_key_key.py} +3 -5
- mirascope/api/_generated/types/{property_key_tag_tag.py → property_key_key_tag.py} +1 -1
- mirascope/llm/__init__.py +4 -0
- mirascope/llm/providers/__init__.py +6 -0
- mirascope/llm/providers/anthropic/__init__.py +6 -1
- mirascope/llm/providers/anthropic/_utils/__init__.py +15 -5
- mirascope/llm/providers/anthropic/_utils/beta_decode.py +271 -0
- mirascope/llm/providers/anthropic/_utils/beta_encode.py +216 -0
- mirascope/llm/providers/anthropic/_utils/decode.py +39 -7
- mirascope/llm/providers/anthropic/_utils/encode.py +156 -64
- mirascope/llm/providers/anthropic/beta_provider.py +322 -0
- mirascope/llm/providers/anthropic/model_id.py +10 -27
- mirascope/llm/providers/anthropic/model_info.py +87 -0
- mirascope/llm/providers/anthropic/provider.py +127 -145
- mirascope/llm/providers/base/_utils.py +15 -1
- mirascope/llm/providers/google/_utils/decode.py +55 -3
- mirascope/llm/providers/google/_utils/encode.py +14 -6
- mirascope/llm/providers/google/model_id.py +7 -13
- mirascope/llm/providers/google/model_info.py +62 -0
- mirascope/llm/providers/google/provider.py +8 -4
- mirascope/llm/providers/load_provider.py +8 -2
- mirascope/llm/providers/mlx/_utils.py +23 -1
- mirascope/llm/providers/mlx/encoding/transformers.py +17 -1
- mirascope/llm/providers/mlx/provider.py +4 -0
- mirascope/llm/providers/ollama/__init__.py +19 -0
- mirascope/llm/providers/ollama/provider.py +71 -0
- mirascope/llm/providers/openai/completions/__init__.py +6 -1
- mirascope/llm/providers/openai/completions/_utils/decode.py +57 -5
- mirascope/llm/providers/openai/completions/_utils/encode.py +9 -8
- mirascope/llm/providers/openai/completions/base_provider.py +513 -0
- mirascope/llm/providers/openai/completions/provider.py +13 -447
- mirascope/llm/providers/openai/model_info.py +57 -0
- mirascope/llm/providers/openai/provider.py +16 -4
- mirascope/llm/providers/openai/responses/_utils/decode.py +55 -4
- mirascope/llm/providers/openai/responses/_utils/encode.py +9 -9
- mirascope/llm/providers/openai/responses/provider.py +20 -21
- mirascope/llm/providers/provider_id.py +11 -1
- mirascope/llm/providers/provider_registry.py +3 -1
- mirascope/llm/providers/together/__init__.py +19 -0
- mirascope/llm/providers/together/provider.py +40 -0
- mirascope/llm/responses/__init__.py +3 -0
- mirascope/llm/responses/base_response.py +4 -0
- mirascope/llm/responses/base_stream_response.py +25 -1
- mirascope/llm/responses/finish_reason.py +1 -0
- mirascope/llm/responses/response.py +9 -0
- mirascope/llm/responses/root_response.py +5 -1
- mirascope/llm/responses/usage.py +95 -0
- {mirascope-2.0.0a3.dist-info → mirascope-2.0.0a4.dist-info}/METADATA +3 -3
- {mirascope-2.0.0a3.dist-info → mirascope-2.0.0a4.dist-info}/RECORD +91 -50
- mirascope/llm/providers/openai/shared/__init__.py +0 -7
- mirascope/llm/providers/openai/shared/_utils.py +0 -59
- {mirascope-2.0.0a3.dist-info → mirascope-2.0.0a4.dist-info}/WHEEL +0 -0
- {mirascope-2.0.0a3.dist-info → mirascope-2.0.0a4.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,322 @@
|
|
|
1
|
+
"""Beta Anthropic provider implementation."""
|
|
2
|
+
|
|
3
|
+
from collections.abc import Sequence
|
|
4
|
+
from typing_extensions import Unpack
|
|
5
|
+
|
|
6
|
+
from anthropic import Anthropic, AsyncAnthropic
|
|
7
|
+
|
|
8
|
+
from ...context import Context, DepsT
|
|
9
|
+
from ...formatting import Format, FormattableT
|
|
10
|
+
from ...messages import Message
|
|
11
|
+
from ...responses import (
|
|
12
|
+
AsyncContextResponse,
|
|
13
|
+
AsyncContextStreamResponse,
|
|
14
|
+
AsyncResponse,
|
|
15
|
+
AsyncStreamResponse,
|
|
16
|
+
ContextResponse,
|
|
17
|
+
ContextStreamResponse,
|
|
18
|
+
Response,
|
|
19
|
+
StreamResponse,
|
|
20
|
+
)
|
|
21
|
+
from ...tools import (
|
|
22
|
+
AsyncContextTool,
|
|
23
|
+
AsyncContextToolkit,
|
|
24
|
+
AsyncTool,
|
|
25
|
+
AsyncToolkit,
|
|
26
|
+
ContextTool,
|
|
27
|
+
ContextToolkit,
|
|
28
|
+
Tool,
|
|
29
|
+
Toolkit,
|
|
30
|
+
)
|
|
31
|
+
from ..base import BaseProvider, Params
|
|
32
|
+
from ._utils import beta_decode, beta_encode
|
|
33
|
+
from .model_id import model_name
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class AnthropicBetaProvider(BaseProvider[Anthropic]):
|
|
37
|
+
"""Provider using beta Anthropic API."""
|
|
38
|
+
|
|
39
|
+
id = "anthropic-beta"
|
|
40
|
+
default_scope = "anthropic-beta/"
|
|
41
|
+
|
|
42
|
+
def __init__(
|
|
43
|
+
self, *, api_key: str | None = None, base_url: str | None = None
|
|
44
|
+
) -> None:
|
|
45
|
+
"""Initialize the beta Anthropic client."""
|
|
46
|
+
self.client = Anthropic(api_key=api_key, base_url=base_url)
|
|
47
|
+
self.async_client = AsyncAnthropic(api_key=api_key, base_url=base_url)
|
|
48
|
+
|
|
49
|
+
def _call(
|
|
50
|
+
self,
|
|
51
|
+
*,
|
|
52
|
+
model_id: str,
|
|
53
|
+
messages: Sequence[Message],
|
|
54
|
+
tools: Sequence[Tool] | Toolkit | None = None,
|
|
55
|
+
format: type[FormattableT] | Format[FormattableT] | None = None,
|
|
56
|
+
**params: Unpack[Params],
|
|
57
|
+
) -> Response | Response[FormattableT]:
|
|
58
|
+
"""Generate an `llm.Response` using the beta Anthropic API."""
|
|
59
|
+
input_messages, resolved_format, kwargs = beta_encode.beta_encode_request(
|
|
60
|
+
model_id=model_id,
|
|
61
|
+
messages=messages,
|
|
62
|
+
tools=tools,
|
|
63
|
+
format=format,
|
|
64
|
+
params=params,
|
|
65
|
+
)
|
|
66
|
+
beta_response = self.client.beta.messages.parse(**kwargs)
|
|
67
|
+
assistant_message, finish_reason, usage = beta_decode.beta_decode_response(
|
|
68
|
+
beta_response, model_id
|
|
69
|
+
)
|
|
70
|
+
return Response(
|
|
71
|
+
raw=beta_response,
|
|
72
|
+
provider_id="anthropic",
|
|
73
|
+
model_id=model_id,
|
|
74
|
+
provider_model_name=model_name(model_id),
|
|
75
|
+
params=params,
|
|
76
|
+
tools=tools,
|
|
77
|
+
input_messages=input_messages,
|
|
78
|
+
assistant_message=assistant_message,
|
|
79
|
+
finish_reason=finish_reason,
|
|
80
|
+
usage=usage,
|
|
81
|
+
format=resolved_format,
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
def _context_call(
|
|
85
|
+
self,
|
|
86
|
+
*,
|
|
87
|
+
ctx: Context[DepsT],
|
|
88
|
+
model_id: str,
|
|
89
|
+
messages: Sequence[Message],
|
|
90
|
+
tools: Sequence[Tool | ContextTool[DepsT]]
|
|
91
|
+
| ContextToolkit[DepsT]
|
|
92
|
+
| None = None,
|
|
93
|
+
format: type[FormattableT] | Format[FormattableT] | None = None,
|
|
94
|
+
**params: Unpack[Params],
|
|
95
|
+
) -> ContextResponse[DepsT, None] | ContextResponse[DepsT, FormattableT]:
|
|
96
|
+
"""Generate an `llm.ContextResponse` using the beta Anthropic API."""
|
|
97
|
+
input_messages, resolved_format, kwargs = beta_encode.beta_encode_request(
|
|
98
|
+
model_id=model_id,
|
|
99
|
+
messages=messages,
|
|
100
|
+
tools=tools,
|
|
101
|
+
format=format,
|
|
102
|
+
params=params,
|
|
103
|
+
)
|
|
104
|
+
beta_response = self.client.beta.messages.parse(**kwargs)
|
|
105
|
+
assistant_message, finish_reason, usage = beta_decode.beta_decode_response(
|
|
106
|
+
beta_response, model_id
|
|
107
|
+
)
|
|
108
|
+
return ContextResponse(
|
|
109
|
+
raw=beta_response,
|
|
110
|
+
provider_id="anthropic",
|
|
111
|
+
model_id=model_id,
|
|
112
|
+
provider_model_name=model_name(model_id),
|
|
113
|
+
params=params,
|
|
114
|
+
tools=tools,
|
|
115
|
+
input_messages=input_messages,
|
|
116
|
+
assistant_message=assistant_message,
|
|
117
|
+
finish_reason=finish_reason,
|
|
118
|
+
usage=usage,
|
|
119
|
+
format=resolved_format,
|
|
120
|
+
)
|
|
121
|
+
|
|
122
|
+
async def _call_async(
|
|
123
|
+
self,
|
|
124
|
+
*,
|
|
125
|
+
model_id: str,
|
|
126
|
+
messages: Sequence[Message],
|
|
127
|
+
tools: Sequence[AsyncTool] | AsyncToolkit | None = None,
|
|
128
|
+
format: type[FormattableT] | Format[FormattableT] | None = None,
|
|
129
|
+
**params: Unpack[Params],
|
|
130
|
+
) -> AsyncResponse | AsyncResponse[FormattableT]:
|
|
131
|
+
"""Generate an `llm.AsyncResponse` using the beta Anthropic API."""
|
|
132
|
+
input_messages, resolved_format, kwargs = beta_encode.beta_encode_request(
|
|
133
|
+
model_id=model_id,
|
|
134
|
+
messages=messages,
|
|
135
|
+
tools=tools,
|
|
136
|
+
format=format,
|
|
137
|
+
params=params,
|
|
138
|
+
)
|
|
139
|
+
beta_response = await self.async_client.beta.messages.parse(**kwargs)
|
|
140
|
+
assistant_message, finish_reason, usage = beta_decode.beta_decode_response(
|
|
141
|
+
beta_response, model_id
|
|
142
|
+
)
|
|
143
|
+
return AsyncResponse(
|
|
144
|
+
raw=beta_response,
|
|
145
|
+
provider_id="anthropic",
|
|
146
|
+
model_id=model_id,
|
|
147
|
+
provider_model_name=model_name(model_id),
|
|
148
|
+
params=params,
|
|
149
|
+
tools=tools,
|
|
150
|
+
input_messages=input_messages,
|
|
151
|
+
assistant_message=assistant_message,
|
|
152
|
+
finish_reason=finish_reason,
|
|
153
|
+
usage=usage,
|
|
154
|
+
format=resolved_format,
|
|
155
|
+
)
|
|
156
|
+
|
|
157
|
+
async def _context_call_async(
|
|
158
|
+
self,
|
|
159
|
+
*,
|
|
160
|
+
ctx: Context[DepsT],
|
|
161
|
+
model_id: str,
|
|
162
|
+
messages: Sequence[Message],
|
|
163
|
+
tools: Sequence[AsyncTool | AsyncContextTool[DepsT]]
|
|
164
|
+
| AsyncContextToolkit[DepsT]
|
|
165
|
+
| None = None,
|
|
166
|
+
format: type[FormattableT] | Format[FormattableT] | None = None,
|
|
167
|
+
**params: Unpack[Params],
|
|
168
|
+
) -> AsyncContextResponse[DepsT, None] | AsyncContextResponse[DepsT, FormattableT]:
|
|
169
|
+
"""Generate an `llm.AsyncContextResponse` using the beta Anthropic API."""
|
|
170
|
+
input_messages, resolved_format, kwargs = beta_encode.beta_encode_request(
|
|
171
|
+
model_id=model_id,
|
|
172
|
+
messages=messages,
|
|
173
|
+
tools=tools,
|
|
174
|
+
format=format,
|
|
175
|
+
params=params,
|
|
176
|
+
)
|
|
177
|
+
beta_response = await self.async_client.beta.messages.parse(**kwargs)
|
|
178
|
+
assistant_message, finish_reason, usage = beta_decode.beta_decode_response(
|
|
179
|
+
beta_response, model_id
|
|
180
|
+
)
|
|
181
|
+
return AsyncContextResponse(
|
|
182
|
+
raw=beta_response,
|
|
183
|
+
provider_id="anthropic",
|
|
184
|
+
model_id=model_id,
|
|
185
|
+
provider_model_name=model_name(model_id),
|
|
186
|
+
params=params,
|
|
187
|
+
tools=tools,
|
|
188
|
+
input_messages=input_messages,
|
|
189
|
+
assistant_message=assistant_message,
|
|
190
|
+
finish_reason=finish_reason,
|
|
191
|
+
usage=usage,
|
|
192
|
+
format=resolved_format,
|
|
193
|
+
)
|
|
194
|
+
|
|
195
|
+
def _stream(
|
|
196
|
+
self,
|
|
197
|
+
*,
|
|
198
|
+
model_id: str,
|
|
199
|
+
messages: Sequence[Message],
|
|
200
|
+
tools: Sequence[Tool] | Toolkit | None = None,
|
|
201
|
+
format: type[FormattableT] | Format[FormattableT] | None = None,
|
|
202
|
+
**params: Unpack[Params],
|
|
203
|
+
) -> StreamResponse | StreamResponse[FormattableT]:
|
|
204
|
+
"""Generate an `llm.StreamResponse` using the beta Anthropic API."""
|
|
205
|
+
input_messages, resolved_format, kwargs = beta_encode.beta_encode_request(
|
|
206
|
+
model_id=model_id,
|
|
207
|
+
messages=messages,
|
|
208
|
+
tools=tools,
|
|
209
|
+
format=format,
|
|
210
|
+
params=params,
|
|
211
|
+
)
|
|
212
|
+
beta_stream = self.client.beta.messages.stream(**kwargs)
|
|
213
|
+
chunk_iterator = beta_decode.beta_decode_stream(beta_stream)
|
|
214
|
+
return StreamResponse(
|
|
215
|
+
provider_id="anthropic",
|
|
216
|
+
model_id=model_id,
|
|
217
|
+
provider_model_name=model_name(model_id),
|
|
218
|
+
params=params,
|
|
219
|
+
tools=tools,
|
|
220
|
+
input_messages=input_messages,
|
|
221
|
+
chunk_iterator=chunk_iterator,
|
|
222
|
+
format=resolved_format,
|
|
223
|
+
)
|
|
224
|
+
|
|
225
|
+
def _context_stream(
|
|
226
|
+
self,
|
|
227
|
+
*,
|
|
228
|
+
ctx: Context[DepsT],
|
|
229
|
+
model_id: str,
|
|
230
|
+
messages: Sequence[Message],
|
|
231
|
+
tools: Sequence[Tool | ContextTool[DepsT]]
|
|
232
|
+
| ContextToolkit[DepsT]
|
|
233
|
+
| None = None,
|
|
234
|
+
format: type[FormattableT] | Format[FormattableT] | None = None,
|
|
235
|
+
**params: Unpack[Params],
|
|
236
|
+
) -> ContextStreamResponse[DepsT] | ContextStreamResponse[DepsT, FormattableT]:
|
|
237
|
+
"""Generate an `llm.ContextStreamResponse` using the beta Anthropic API."""
|
|
238
|
+
input_messages, resolved_format, kwargs = beta_encode.beta_encode_request(
|
|
239
|
+
model_id=model_id,
|
|
240
|
+
messages=messages,
|
|
241
|
+
tools=tools,
|
|
242
|
+
format=format,
|
|
243
|
+
params=params,
|
|
244
|
+
)
|
|
245
|
+
beta_stream = self.client.beta.messages.stream(**kwargs)
|
|
246
|
+
chunk_iterator = beta_decode.beta_decode_stream(beta_stream)
|
|
247
|
+
return ContextStreamResponse(
|
|
248
|
+
provider_id="anthropic",
|
|
249
|
+
model_id=model_id,
|
|
250
|
+
provider_model_name=model_name(model_id),
|
|
251
|
+
params=params,
|
|
252
|
+
tools=tools,
|
|
253
|
+
input_messages=input_messages,
|
|
254
|
+
chunk_iterator=chunk_iterator,
|
|
255
|
+
format=resolved_format,
|
|
256
|
+
)
|
|
257
|
+
|
|
258
|
+
async def _stream_async(
|
|
259
|
+
self,
|
|
260
|
+
*,
|
|
261
|
+
model_id: str,
|
|
262
|
+
messages: Sequence[Message],
|
|
263
|
+
tools: Sequence[AsyncTool] | AsyncToolkit | None = None,
|
|
264
|
+
format: type[FormattableT] | Format[FormattableT] | None = None,
|
|
265
|
+
**params: Unpack[Params],
|
|
266
|
+
) -> AsyncStreamResponse | AsyncStreamResponse[FormattableT]:
|
|
267
|
+
"""Generate an `llm.AsyncStreamResponse` using the beta Anthropic API."""
|
|
268
|
+
input_messages, resolved_format, kwargs = beta_encode.beta_encode_request(
|
|
269
|
+
model_id=model_id,
|
|
270
|
+
messages=messages,
|
|
271
|
+
tools=tools,
|
|
272
|
+
format=format,
|
|
273
|
+
params=params,
|
|
274
|
+
)
|
|
275
|
+
beta_stream = self.async_client.beta.messages.stream(**kwargs)
|
|
276
|
+
chunk_iterator = beta_decode.beta_decode_async_stream(beta_stream)
|
|
277
|
+
return AsyncStreamResponse(
|
|
278
|
+
provider_id="anthropic",
|
|
279
|
+
model_id=model_id,
|
|
280
|
+
provider_model_name=model_name(model_id),
|
|
281
|
+
params=params,
|
|
282
|
+
tools=tools,
|
|
283
|
+
input_messages=input_messages,
|
|
284
|
+
chunk_iterator=chunk_iterator,
|
|
285
|
+
format=resolved_format,
|
|
286
|
+
)
|
|
287
|
+
|
|
288
|
+
async def _context_stream_async(
|
|
289
|
+
self,
|
|
290
|
+
*,
|
|
291
|
+
ctx: Context[DepsT],
|
|
292
|
+
model_id: str,
|
|
293
|
+
messages: Sequence[Message],
|
|
294
|
+
tools: Sequence[AsyncTool | AsyncContextTool[DepsT]]
|
|
295
|
+
| AsyncContextToolkit[DepsT]
|
|
296
|
+
| None = None,
|
|
297
|
+
format: type[FormattableT] | Format[FormattableT] | None = None,
|
|
298
|
+
**params: Unpack[Params],
|
|
299
|
+
) -> (
|
|
300
|
+
AsyncContextStreamResponse[DepsT]
|
|
301
|
+
| AsyncContextStreamResponse[DepsT, FormattableT]
|
|
302
|
+
):
|
|
303
|
+
"""Generate an `llm.AsyncContextStreamResponse` using the beta Anthropic API."""
|
|
304
|
+
input_messages, resolved_format, kwargs = beta_encode.beta_encode_request(
|
|
305
|
+
model_id=model_id,
|
|
306
|
+
messages=messages,
|
|
307
|
+
tools=tools,
|
|
308
|
+
format=format,
|
|
309
|
+
params=params,
|
|
310
|
+
)
|
|
311
|
+
beta_stream = self.async_client.beta.messages.stream(**kwargs)
|
|
312
|
+
chunk_iterator = beta_decode.beta_decode_async_stream(beta_stream)
|
|
313
|
+
return AsyncContextStreamResponse(
|
|
314
|
+
provider_id="anthropic",
|
|
315
|
+
model_id=model_id,
|
|
316
|
+
provider_model_name=model_name(model_id),
|
|
317
|
+
params=params,
|
|
318
|
+
tools=tools,
|
|
319
|
+
input_messages=input_messages,
|
|
320
|
+
chunk_iterator=chunk_iterator,
|
|
321
|
+
format=resolved_format,
|
|
322
|
+
)
|
|
@@ -1,40 +1,23 @@
|
|
|
1
1
|
"""Anthropic registered LLM models."""
|
|
2
2
|
|
|
3
|
-
from typing import
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
"anthropic/claude-3-7-sonnet-20250219",
|
|
9
|
-
"anthropic/claude-3-5-haiku-latest",
|
|
10
|
-
"anthropic/claude-3-5-haiku-20241022",
|
|
11
|
-
"anthropic/claude-haiku-4-5",
|
|
12
|
-
"anthropic/claude-haiku-4-5-20251001",
|
|
13
|
-
"anthropic/claude-sonnet-4-20250514",
|
|
14
|
-
"anthropic/claude-sonnet-4-0",
|
|
15
|
-
"anthropic/claude-4-sonnet-20250514",
|
|
16
|
-
"anthropic/claude-sonnet-4-5",
|
|
17
|
-
"anthropic/claude-sonnet-4-5-20250929",
|
|
18
|
-
"anthropic/claude-opus-4-0",
|
|
19
|
-
"anthropic/claude-opus-4-20250514",
|
|
20
|
-
"anthropic/claude-4-opus-20250514",
|
|
21
|
-
"anthropic/claude-opus-4-1-20250805",
|
|
22
|
-
"anthropic/claude-3-opus-latest",
|
|
23
|
-
"anthropic/claude-3-opus-20240229",
|
|
24
|
-
"anthropic/claude-3-haiku-20240307",
|
|
25
|
-
]
|
|
26
|
-
| str
|
|
27
|
-
)
|
|
3
|
+
from typing import TypeAlias, get_args
|
|
4
|
+
|
|
5
|
+
from .model_info import AnthropicKnownModels
|
|
6
|
+
|
|
7
|
+
AnthropicModelId: TypeAlias = AnthropicKnownModels | str
|
|
28
8
|
"""The Anthropic model ids registered with Mirascope."""
|
|
29
9
|
|
|
10
|
+
ANTHROPIC_KNOWN_MODELS: set[str] = set(get_args(AnthropicKnownModels))
|
|
11
|
+
|
|
30
12
|
|
|
31
13
|
def model_name(model_id: AnthropicModelId) -> str:
|
|
32
14
|
"""Extract the anthropic model name from the ModelId
|
|
33
15
|
|
|
34
16
|
Args:
|
|
35
|
-
model_id: Full model ID (e.g. "anthropic/claude-sonnet-4-5"
|
|
17
|
+
model_id: Full model ID (e.g. "anthropic/claude-sonnet-4-5" or
|
|
18
|
+
"anthropic-beta/claude-sonnet-4-5")
|
|
36
19
|
|
|
37
20
|
Returns:
|
|
38
21
|
Provider-specific model ID (e.g. "claude-sonnet-4-5")
|
|
39
22
|
"""
|
|
40
|
-
return model_id.removeprefix("anthropic/")
|
|
23
|
+
return model_id.removeprefix("anthropic-beta/").removeprefix("anthropic/")
|
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
"""Anthropic model information.
|
|
2
|
+
|
|
3
|
+
This file is auto-generated by scripts/model_features/codegen_anthropic.py
|
|
4
|
+
Do not edit manually - run the codegen script to update."""
|
|
5
|
+
|
|
6
|
+
from typing import Literal
|
|
7
|
+
|
|
8
|
+
AnthropicKnownModels = Literal[
|
|
9
|
+
"anthropic/claude-3-5-haiku",
|
|
10
|
+
"anthropic/claude-3-5-haiku-20241022",
|
|
11
|
+
"anthropic/claude-3-5-haiku-latest",
|
|
12
|
+
"anthropic/claude-3-7-sonnet",
|
|
13
|
+
"anthropic/claude-3-7-sonnet-20250219",
|
|
14
|
+
"anthropic/claude-3-7-sonnet-latest",
|
|
15
|
+
"anthropic/claude-3-haiku",
|
|
16
|
+
"anthropic/claude-3-haiku-20240307",
|
|
17
|
+
"anthropic/claude-3-haiku-latest",
|
|
18
|
+
"anthropic/claude-3-opus",
|
|
19
|
+
"anthropic/claude-3-opus-20240229",
|
|
20
|
+
"anthropic/claude-3-opus-latest",
|
|
21
|
+
"anthropic/claude-haiku-4-5",
|
|
22
|
+
"anthropic/claude-haiku-4-5-0",
|
|
23
|
+
"anthropic/claude-haiku-4-5-0-20251001",
|
|
24
|
+
"anthropic/claude-haiku-4-5-0-latest",
|
|
25
|
+
"anthropic/claude-haiku-4-5-20251001",
|
|
26
|
+
"anthropic/claude-haiku-4-5-latest",
|
|
27
|
+
"anthropic/claude-opus-4",
|
|
28
|
+
"anthropic/claude-opus-4-0",
|
|
29
|
+
"anthropic/claude-opus-4-0-20250514",
|
|
30
|
+
"anthropic/claude-opus-4-0-latest",
|
|
31
|
+
"anthropic/claude-opus-4-1",
|
|
32
|
+
"anthropic/claude-opus-4-1-0",
|
|
33
|
+
"anthropic/claude-opus-4-1-0-20250805",
|
|
34
|
+
"anthropic/claude-opus-4-1-0-latest",
|
|
35
|
+
"anthropic/claude-opus-4-1-20250805",
|
|
36
|
+
"anthropic/claude-opus-4-1-latest",
|
|
37
|
+
"anthropic/claude-opus-4-20250514",
|
|
38
|
+
"anthropic/claude-opus-4-5",
|
|
39
|
+
"anthropic/claude-opus-4-5-0",
|
|
40
|
+
"anthropic/claude-opus-4-5-0-20251101",
|
|
41
|
+
"anthropic/claude-opus-4-5-0-latest",
|
|
42
|
+
"anthropic/claude-opus-4-5-20251101",
|
|
43
|
+
"anthropic/claude-opus-4-5-latest",
|
|
44
|
+
"anthropic/claude-opus-4-latest",
|
|
45
|
+
"anthropic/claude-sonnet-4",
|
|
46
|
+
"anthropic/claude-sonnet-4-0",
|
|
47
|
+
"anthropic/claude-sonnet-4-0-20250514",
|
|
48
|
+
"anthropic/claude-sonnet-4-0-latest",
|
|
49
|
+
"anthropic/claude-sonnet-4-20250514",
|
|
50
|
+
"anthropic/claude-sonnet-4-5",
|
|
51
|
+
"anthropic/claude-sonnet-4-5-0",
|
|
52
|
+
"anthropic/claude-sonnet-4-5-0-20250929",
|
|
53
|
+
"anthropic/claude-sonnet-4-5-0-latest",
|
|
54
|
+
"anthropic/claude-sonnet-4-5-20250929",
|
|
55
|
+
"anthropic/claude-sonnet-4-5-latest",
|
|
56
|
+
"anthropic/claude-sonnet-4-latest",
|
|
57
|
+
]
|
|
58
|
+
"""Valid Anthropic model IDs."""
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
MODELS_WITHOUT_STRICT_STRUCTURED_OUTPUTS: set[str] = {
|
|
62
|
+
"claude-3-5-haiku",
|
|
63
|
+
"claude-3-5-haiku-20241022",
|
|
64
|
+
"claude-3-5-haiku-latest",
|
|
65
|
+
"claude-3-7-sonnet",
|
|
66
|
+
"claude-3-7-sonnet-20250219",
|
|
67
|
+
"claude-3-7-sonnet-latest",
|
|
68
|
+
"claude-3-haiku",
|
|
69
|
+
"claude-3-haiku-20240307",
|
|
70
|
+
"claude-3-haiku-latest",
|
|
71
|
+
"claude-3-opus",
|
|
72
|
+
"claude-3-opus-20240229",
|
|
73
|
+
"claude-3-opus-latest",
|
|
74
|
+
"claude-opus-4",
|
|
75
|
+
"claude-opus-4-0",
|
|
76
|
+
"claude-opus-4-0-20250514",
|
|
77
|
+
"claude-opus-4-0-latest",
|
|
78
|
+
"claude-opus-4-20250514",
|
|
79
|
+
"claude-opus-4-latest",
|
|
80
|
+
"claude-sonnet-4",
|
|
81
|
+
"claude-sonnet-4-0",
|
|
82
|
+
"claude-sonnet-4-0-20250514",
|
|
83
|
+
"claude-sonnet-4-0-latest",
|
|
84
|
+
"claude-sonnet-4-20250514",
|
|
85
|
+
"claude-sonnet-4-latest",
|
|
86
|
+
}
|
|
87
|
+
"""Models that do not support strict structured outputs (strict mode tools)."""
|