mirascope 2.0.0a3__py3-none-any.whl → 2.0.0a5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (118) hide show
  1. mirascope/api/_generated/__init__.py +78 -6
  2. mirascope/api/_generated/api_keys/__init__.py +7 -0
  3. mirascope/api/_generated/api_keys/client.py +453 -0
  4. mirascope/api/_generated/api_keys/raw_client.py +853 -0
  5. mirascope/api/_generated/api_keys/types/__init__.py +9 -0
  6. mirascope/api/_generated/api_keys/types/api_keys_create_response.py +36 -0
  7. mirascope/api/_generated/api_keys/types/api_keys_get_response.py +35 -0
  8. mirascope/api/_generated/api_keys/types/api_keys_list_response_item.py +35 -0
  9. mirascope/api/_generated/client.py +14 -0
  10. mirascope/api/_generated/environments/__init__.py +17 -0
  11. mirascope/api/_generated/environments/client.py +532 -0
  12. mirascope/api/_generated/environments/raw_client.py +1088 -0
  13. mirascope/api/_generated/environments/types/__init__.py +15 -0
  14. mirascope/api/_generated/environments/types/environments_create_response.py +26 -0
  15. mirascope/api/_generated/environments/types/environments_get_response.py +26 -0
  16. mirascope/api/_generated/environments/types/environments_list_response_item.py +26 -0
  17. mirascope/api/_generated/environments/types/environments_update_response.py +26 -0
  18. mirascope/api/_generated/errors/__init__.py +11 -1
  19. mirascope/api/_generated/errors/conflict_error.py +15 -0
  20. mirascope/api/_generated/errors/forbidden_error.py +15 -0
  21. mirascope/api/_generated/errors/internal_server_error.py +15 -0
  22. mirascope/api/_generated/errors/not_found_error.py +15 -0
  23. mirascope/api/_generated/organizations/__init__.py +25 -0
  24. mirascope/api/_generated/organizations/client.py +404 -0
  25. mirascope/api/_generated/organizations/raw_client.py +902 -0
  26. mirascope/api/_generated/organizations/types/__init__.py +23 -0
  27. mirascope/api/_generated/organizations/types/organizations_create_response.py +25 -0
  28. mirascope/api/_generated/organizations/types/organizations_create_response_role.py +7 -0
  29. mirascope/api/_generated/organizations/types/organizations_get_response.py +25 -0
  30. mirascope/api/_generated/organizations/types/organizations_get_response_role.py +7 -0
  31. mirascope/api/_generated/organizations/types/organizations_list_response_item.py +25 -0
  32. mirascope/api/_generated/organizations/types/organizations_list_response_item_role.py +7 -0
  33. mirascope/api/_generated/organizations/types/organizations_update_response.py +25 -0
  34. mirascope/api/_generated/organizations/types/organizations_update_response_role.py +7 -0
  35. mirascope/api/_generated/projects/__init__.py +17 -0
  36. mirascope/api/_generated/projects/client.py +482 -0
  37. mirascope/api/_generated/projects/raw_client.py +1058 -0
  38. mirascope/api/_generated/projects/types/__init__.py +15 -0
  39. mirascope/api/_generated/projects/types/projects_create_response.py +31 -0
  40. mirascope/api/_generated/projects/types/projects_get_response.py +31 -0
  41. mirascope/api/_generated/projects/types/projects_list_response_item.py +31 -0
  42. mirascope/api/_generated/projects/types/projects_update_response.py +31 -0
  43. mirascope/api/_generated/reference.md +1311 -0
  44. mirascope/api/_generated/types/__init__.py +20 -4
  45. mirascope/api/_generated/types/already_exists_error.py +24 -0
  46. mirascope/api/_generated/types/already_exists_error_tag.py +5 -0
  47. mirascope/api/_generated/types/database_error.py +24 -0
  48. mirascope/api/_generated/types/database_error_tag.py +5 -0
  49. mirascope/api/_generated/types/http_api_decode_error.py +1 -3
  50. mirascope/api/_generated/types/issue.py +1 -5
  51. mirascope/api/_generated/types/not_found_error_body.py +24 -0
  52. mirascope/api/_generated/types/not_found_error_tag.py +5 -0
  53. mirascope/api/_generated/types/permission_denied_error.py +24 -0
  54. mirascope/api/_generated/types/permission_denied_error_tag.py +7 -0
  55. mirascope/api/_generated/types/property_key.py +2 -2
  56. mirascope/api/_generated/types/{property_key_tag.py → property_key_key.py} +3 -5
  57. mirascope/api/_generated/types/{property_key_tag_tag.py → property_key_key_tag.py} +1 -1
  58. mirascope/llm/__init__.py +6 -2
  59. mirascope/llm/exceptions.py +28 -0
  60. mirascope/llm/providers/__init__.py +12 -4
  61. mirascope/llm/providers/anthropic/__init__.py +6 -1
  62. mirascope/llm/providers/anthropic/_utils/__init__.py +17 -5
  63. mirascope/llm/providers/anthropic/_utils/beta_decode.py +271 -0
  64. mirascope/llm/providers/anthropic/_utils/beta_encode.py +216 -0
  65. mirascope/llm/providers/anthropic/_utils/decode.py +39 -7
  66. mirascope/llm/providers/anthropic/_utils/encode.py +156 -64
  67. mirascope/llm/providers/anthropic/_utils/errors.py +46 -0
  68. mirascope/llm/providers/anthropic/beta_provider.py +328 -0
  69. mirascope/llm/providers/anthropic/model_id.py +10 -27
  70. mirascope/llm/providers/anthropic/model_info.py +87 -0
  71. mirascope/llm/providers/anthropic/provider.py +132 -145
  72. mirascope/llm/providers/base/__init__.py +2 -1
  73. mirascope/llm/providers/base/_utils.py +15 -1
  74. mirascope/llm/providers/base/base_provider.py +173 -58
  75. mirascope/llm/providers/google/_utils/__init__.py +2 -0
  76. mirascope/llm/providers/google/_utils/decode.py +55 -3
  77. mirascope/llm/providers/google/_utils/encode.py +14 -6
  78. mirascope/llm/providers/google/_utils/errors.py +49 -0
  79. mirascope/llm/providers/google/model_id.py +7 -13
  80. mirascope/llm/providers/google/model_info.py +62 -0
  81. mirascope/llm/providers/google/provider.py +13 -8
  82. mirascope/llm/providers/mlx/_utils.py +31 -2
  83. mirascope/llm/providers/mlx/encoding/transformers.py +17 -1
  84. mirascope/llm/providers/mlx/provider.py +12 -0
  85. mirascope/llm/providers/ollama/__init__.py +19 -0
  86. mirascope/llm/providers/ollama/provider.py +71 -0
  87. mirascope/llm/providers/openai/__init__.py +10 -1
  88. mirascope/llm/providers/openai/_utils/__init__.py +5 -0
  89. mirascope/llm/providers/openai/_utils/errors.py +46 -0
  90. mirascope/llm/providers/openai/completions/__init__.py +6 -1
  91. mirascope/llm/providers/openai/completions/_utils/decode.py +57 -5
  92. mirascope/llm/providers/openai/completions/_utils/encode.py +9 -8
  93. mirascope/llm/providers/openai/completions/base_provider.py +513 -0
  94. mirascope/llm/providers/openai/completions/provider.py +13 -447
  95. mirascope/llm/providers/openai/model_info.py +57 -0
  96. mirascope/llm/providers/openai/provider.py +30 -5
  97. mirascope/llm/providers/openai/responses/_utils/decode.py +55 -4
  98. mirascope/llm/providers/openai/responses/_utils/encode.py +9 -9
  99. mirascope/llm/providers/openai/responses/provider.py +33 -28
  100. mirascope/llm/providers/provider_id.py +11 -1
  101. mirascope/llm/providers/provider_registry.py +59 -4
  102. mirascope/llm/providers/together/__init__.py +19 -0
  103. mirascope/llm/providers/together/provider.py +40 -0
  104. mirascope/llm/responses/__init__.py +3 -0
  105. mirascope/llm/responses/base_response.py +4 -0
  106. mirascope/llm/responses/base_stream_response.py +25 -1
  107. mirascope/llm/responses/finish_reason.py +1 -0
  108. mirascope/llm/responses/response.py +9 -0
  109. mirascope/llm/responses/root_response.py +5 -1
  110. mirascope/llm/responses/usage.py +95 -0
  111. mirascope/ops/_internal/closure.py +62 -11
  112. {mirascope-2.0.0a3.dist-info → mirascope-2.0.0a5.dist-info}/METADATA +3 -3
  113. {mirascope-2.0.0a3.dist-info → mirascope-2.0.0a5.dist-info}/RECORD +115 -56
  114. mirascope/llm/providers/load_provider.py +0 -48
  115. mirascope/llm/providers/openai/shared/__init__.py +0 -7
  116. mirascope/llm/providers/openai/shared/_utils.py +0 -59
  117. {mirascope-2.0.0a3.dist-info → mirascope-2.0.0a5.dist-info}/WHEEL +0 -0
  118. {mirascope-2.0.0a3.dist-info → mirascope-2.0.0a5.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,328 @@
1
+ """Beta Anthropic provider implementation."""
2
+
3
+ from collections.abc import Sequence
4
+ from typing_extensions import Unpack
5
+
6
+ from anthropic import Anthropic, AsyncAnthropic
7
+
8
+ from ...context import Context, DepsT
9
+ from ...formatting import Format, FormattableT
10
+ from ...messages import Message
11
+ from ...responses import (
12
+ AsyncContextResponse,
13
+ AsyncContextStreamResponse,
14
+ AsyncResponse,
15
+ AsyncStreamResponse,
16
+ ContextResponse,
17
+ ContextStreamResponse,
18
+ Response,
19
+ StreamResponse,
20
+ )
21
+ from ...tools import (
22
+ AsyncContextTool,
23
+ AsyncContextToolkit,
24
+ AsyncTool,
25
+ AsyncToolkit,
26
+ ContextTool,
27
+ ContextToolkit,
28
+ Tool,
29
+ Toolkit,
30
+ )
31
+ from ..base import BaseProvider, Params
32
+ from . import _utils
33
+ from ._utils import beta_decode, beta_encode
34
+ from .model_id import model_name
35
+
36
+
37
+ class AnthropicBetaProvider(BaseProvider[Anthropic]):
38
+ """Provider using beta Anthropic API."""
39
+
40
+ id = "anthropic-beta"
41
+ default_scope = "anthropic-beta/"
42
+ error_map = _utils.ANTHROPIC_ERROR_MAP
43
+
44
+ def __init__(
45
+ self, *, api_key: str | None = None, base_url: str | None = None
46
+ ) -> None:
47
+ """Initialize the beta Anthropic client."""
48
+ self.client = Anthropic(api_key=api_key, base_url=base_url)
49
+ self.async_client = AsyncAnthropic(api_key=api_key, base_url=base_url)
50
+
51
+ def get_error_status(self, e: Exception) -> int | None:
52
+ """Extract HTTP status code from Anthropic exception."""
53
+ return getattr(e, "status_code", None)
54
+
55
+ def _call(
56
+ self,
57
+ *,
58
+ model_id: str,
59
+ messages: Sequence[Message],
60
+ tools: Sequence[Tool] | Toolkit | None = None,
61
+ format: type[FormattableT] | Format[FormattableT] | None = None,
62
+ **params: Unpack[Params],
63
+ ) -> Response | Response[FormattableT]:
64
+ """Generate an `llm.Response` using the beta Anthropic API."""
65
+ input_messages, resolved_format, kwargs = beta_encode.beta_encode_request(
66
+ model_id=model_id,
67
+ messages=messages,
68
+ tools=tools,
69
+ format=format,
70
+ params=params,
71
+ )
72
+ beta_response = self.client.beta.messages.parse(**kwargs)
73
+ assistant_message, finish_reason, usage = beta_decode.beta_decode_response(
74
+ beta_response, model_id
75
+ )
76
+ return Response(
77
+ raw=beta_response,
78
+ provider_id="anthropic",
79
+ model_id=model_id,
80
+ provider_model_name=model_name(model_id),
81
+ params=params,
82
+ tools=tools,
83
+ input_messages=input_messages,
84
+ assistant_message=assistant_message,
85
+ finish_reason=finish_reason,
86
+ usage=usage,
87
+ format=resolved_format,
88
+ )
89
+
90
+ def _context_call(
91
+ self,
92
+ *,
93
+ ctx: Context[DepsT],
94
+ model_id: str,
95
+ messages: Sequence[Message],
96
+ tools: Sequence[Tool | ContextTool[DepsT]]
97
+ | ContextToolkit[DepsT]
98
+ | None = None,
99
+ format: type[FormattableT] | Format[FormattableT] | None = None,
100
+ **params: Unpack[Params],
101
+ ) -> ContextResponse[DepsT, None] | ContextResponse[DepsT, FormattableT]:
102
+ """Generate an `llm.ContextResponse` using the beta Anthropic API."""
103
+ input_messages, resolved_format, kwargs = beta_encode.beta_encode_request(
104
+ model_id=model_id,
105
+ messages=messages,
106
+ tools=tools,
107
+ format=format,
108
+ params=params,
109
+ )
110
+ beta_response = self.client.beta.messages.parse(**kwargs)
111
+ assistant_message, finish_reason, usage = beta_decode.beta_decode_response(
112
+ beta_response, model_id
113
+ )
114
+ return ContextResponse(
115
+ raw=beta_response,
116
+ provider_id="anthropic",
117
+ model_id=model_id,
118
+ provider_model_name=model_name(model_id),
119
+ params=params,
120
+ tools=tools,
121
+ input_messages=input_messages,
122
+ assistant_message=assistant_message,
123
+ finish_reason=finish_reason,
124
+ usage=usage,
125
+ format=resolved_format,
126
+ )
127
+
128
+ async def _call_async(
129
+ self,
130
+ *,
131
+ model_id: str,
132
+ messages: Sequence[Message],
133
+ tools: Sequence[AsyncTool] | AsyncToolkit | None = None,
134
+ format: type[FormattableT] | Format[FormattableT] | None = None,
135
+ **params: Unpack[Params],
136
+ ) -> AsyncResponse | AsyncResponse[FormattableT]:
137
+ """Generate an `llm.AsyncResponse` using the beta Anthropic API."""
138
+ input_messages, resolved_format, kwargs = beta_encode.beta_encode_request(
139
+ model_id=model_id,
140
+ messages=messages,
141
+ tools=tools,
142
+ format=format,
143
+ params=params,
144
+ )
145
+ beta_response = await self.async_client.beta.messages.parse(**kwargs)
146
+ assistant_message, finish_reason, usage = beta_decode.beta_decode_response(
147
+ beta_response, model_id
148
+ )
149
+ return AsyncResponse(
150
+ raw=beta_response,
151
+ provider_id="anthropic",
152
+ model_id=model_id,
153
+ provider_model_name=model_name(model_id),
154
+ params=params,
155
+ tools=tools,
156
+ input_messages=input_messages,
157
+ assistant_message=assistant_message,
158
+ finish_reason=finish_reason,
159
+ usage=usage,
160
+ format=resolved_format,
161
+ )
162
+
163
+ async def _context_call_async(
164
+ self,
165
+ *,
166
+ ctx: Context[DepsT],
167
+ model_id: str,
168
+ messages: Sequence[Message],
169
+ tools: Sequence[AsyncTool | AsyncContextTool[DepsT]]
170
+ | AsyncContextToolkit[DepsT]
171
+ | None = None,
172
+ format: type[FormattableT] | Format[FormattableT] | None = None,
173
+ **params: Unpack[Params],
174
+ ) -> AsyncContextResponse[DepsT, None] | AsyncContextResponse[DepsT, FormattableT]:
175
+ """Generate an `llm.AsyncContextResponse` using the beta Anthropic API."""
176
+ input_messages, resolved_format, kwargs = beta_encode.beta_encode_request(
177
+ model_id=model_id,
178
+ messages=messages,
179
+ tools=tools,
180
+ format=format,
181
+ params=params,
182
+ )
183
+ beta_response = await self.async_client.beta.messages.parse(**kwargs)
184
+ assistant_message, finish_reason, usage = beta_decode.beta_decode_response(
185
+ beta_response, model_id
186
+ )
187
+ return AsyncContextResponse(
188
+ raw=beta_response,
189
+ provider_id="anthropic",
190
+ model_id=model_id,
191
+ provider_model_name=model_name(model_id),
192
+ params=params,
193
+ tools=tools,
194
+ input_messages=input_messages,
195
+ assistant_message=assistant_message,
196
+ finish_reason=finish_reason,
197
+ usage=usage,
198
+ format=resolved_format,
199
+ )
200
+
201
+ def _stream(
202
+ self,
203
+ *,
204
+ model_id: str,
205
+ messages: Sequence[Message],
206
+ tools: Sequence[Tool] | Toolkit | None = None,
207
+ format: type[FormattableT] | Format[FormattableT] | None = None,
208
+ **params: Unpack[Params],
209
+ ) -> StreamResponse | StreamResponse[FormattableT]:
210
+ """Generate an `llm.StreamResponse` using the beta Anthropic API."""
211
+ input_messages, resolved_format, kwargs = beta_encode.beta_encode_request(
212
+ model_id=model_id,
213
+ messages=messages,
214
+ tools=tools,
215
+ format=format,
216
+ params=params,
217
+ )
218
+ beta_stream = self.client.beta.messages.stream(**kwargs)
219
+ chunk_iterator = beta_decode.beta_decode_stream(beta_stream)
220
+ return StreamResponse(
221
+ provider_id="anthropic",
222
+ model_id=model_id,
223
+ provider_model_name=model_name(model_id),
224
+ params=params,
225
+ tools=tools,
226
+ input_messages=input_messages,
227
+ chunk_iterator=chunk_iterator,
228
+ format=resolved_format,
229
+ )
230
+
231
+ def _context_stream(
232
+ self,
233
+ *,
234
+ ctx: Context[DepsT],
235
+ model_id: str,
236
+ messages: Sequence[Message],
237
+ tools: Sequence[Tool | ContextTool[DepsT]]
238
+ | ContextToolkit[DepsT]
239
+ | None = None,
240
+ format: type[FormattableT] | Format[FormattableT] | None = None,
241
+ **params: Unpack[Params],
242
+ ) -> ContextStreamResponse[DepsT] | ContextStreamResponse[DepsT, FormattableT]:
243
+ """Generate an `llm.ContextStreamResponse` using the beta Anthropic API."""
244
+ input_messages, resolved_format, kwargs = beta_encode.beta_encode_request(
245
+ model_id=model_id,
246
+ messages=messages,
247
+ tools=tools,
248
+ format=format,
249
+ params=params,
250
+ )
251
+ beta_stream = self.client.beta.messages.stream(**kwargs)
252
+ chunk_iterator = beta_decode.beta_decode_stream(beta_stream)
253
+ return ContextStreamResponse(
254
+ provider_id="anthropic",
255
+ model_id=model_id,
256
+ provider_model_name=model_name(model_id),
257
+ params=params,
258
+ tools=tools,
259
+ input_messages=input_messages,
260
+ chunk_iterator=chunk_iterator,
261
+ format=resolved_format,
262
+ )
263
+
264
+ async def _stream_async(
265
+ self,
266
+ *,
267
+ model_id: str,
268
+ messages: Sequence[Message],
269
+ tools: Sequence[AsyncTool] | AsyncToolkit | None = None,
270
+ format: type[FormattableT] | Format[FormattableT] | None = None,
271
+ **params: Unpack[Params],
272
+ ) -> AsyncStreamResponse | AsyncStreamResponse[FormattableT]:
273
+ """Generate an `llm.AsyncStreamResponse` using the beta Anthropic API."""
274
+ input_messages, resolved_format, kwargs = beta_encode.beta_encode_request(
275
+ model_id=model_id,
276
+ messages=messages,
277
+ tools=tools,
278
+ format=format,
279
+ params=params,
280
+ )
281
+ beta_stream = self.async_client.beta.messages.stream(**kwargs)
282
+ chunk_iterator = beta_decode.beta_decode_async_stream(beta_stream)
283
+ return AsyncStreamResponse(
284
+ provider_id="anthropic",
285
+ model_id=model_id,
286
+ provider_model_name=model_name(model_id),
287
+ params=params,
288
+ tools=tools,
289
+ input_messages=input_messages,
290
+ chunk_iterator=chunk_iterator,
291
+ format=resolved_format,
292
+ )
293
+
294
+ async def _context_stream_async(
295
+ self,
296
+ *,
297
+ ctx: Context[DepsT],
298
+ model_id: str,
299
+ messages: Sequence[Message],
300
+ tools: Sequence[AsyncTool | AsyncContextTool[DepsT]]
301
+ | AsyncContextToolkit[DepsT]
302
+ | None = None,
303
+ format: type[FormattableT] | Format[FormattableT] | None = None,
304
+ **params: Unpack[Params],
305
+ ) -> (
306
+ AsyncContextStreamResponse[DepsT]
307
+ | AsyncContextStreamResponse[DepsT, FormattableT]
308
+ ):
309
+ """Generate an `llm.AsyncContextStreamResponse` using the beta Anthropic API."""
310
+ input_messages, resolved_format, kwargs = beta_encode.beta_encode_request(
311
+ model_id=model_id,
312
+ messages=messages,
313
+ tools=tools,
314
+ format=format,
315
+ params=params,
316
+ )
317
+ beta_stream = self.async_client.beta.messages.stream(**kwargs)
318
+ chunk_iterator = beta_decode.beta_decode_async_stream(beta_stream)
319
+ return AsyncContextStreamResponse(
320
+ provider_id="anthropic",
321
+ model_id=model_id,
322
+ provider_model_name=model_name(model_id),
323
+ params=params,
324
+ tools=tools,
325
+ input_messages=input_messages,
326
+ chunk_iterator=chunk_iterator,
327
+ format=resolved_format,
328
+ )
@@ -1,40 +1,23 @@
1
1
  """Anthropic registered LLM models."""
2
2
 
3
- from typing import Literal, TypeAlias
4
-
5
- AnthropicModelId: TypeAlias = (
6
- Literal[
7
- "anthropic/claude-3-7-sonnet-latest",
8
- "anthropic/claude-3-7-sonnet-20250219",
9
- "anthropic/claude-3-5-haiku-latest",
10
- "anthropic/claude-3-5-haiku-20241022",
11
- "anthropic/claude-haiku-4-5",
12
- "anthropic/claude-haiku-4-5-20251001",
13
- "anthropic/claude-sonnet-4-20250514",
14
- "anthropic/claude-sonnet-4-0",
15
- "anthropic/claude-4-sonnet-20250514",
16
- "anthropic/claude-sonnet-4-5",
17
- "anthropic/claude-sonnet-4-5-20250929",
18
- "anthropic/claude-opus-4-0",
19
- "anthropic/claude-opus-4-20250514",
20
- "anthropic/claude-4-opus-20250514",
21
- "anthropic/claude-opus-4-1-20250805",
22
- "anthropic/claude-3-opus-latest",
23
- "anthropic/claude-3-opus-20240229",
24
- "anthropic/claude-3-haiku-20240307",
25
- ]
26
- | str
27
- )
3
+ from typing import TypeAlias, get_args
4
+
5
+ from .model_info import AnthropicKnownModels
6
+
7
+ AnthropicModelId: TypeAlias = AnthropicKnownModels | str
28
8
  """The Anthropic model ids registered with Mirascope."""
29
9
 
10
+ ANTHROPIC_KNOWN_MODELS: set[str] = set(get_args(AnthropicKnownModels))
11
+
30
12
 
31
13
  def model_name(model_id: AnthropicModelId) -> str:
32
14
  """Extract the anthropic model name from the ModelId
33
15
 
34
16
  Args:
35
- model_id: Full model ID (e.g. "anthropic/claude-sonnet-4-5")
17
+ model_id: Full model ID (e.g. "anthropic/claude-sonnet-4-5" or
18
+ "anthropic-beta/claude-sonnet-4-5")
36
19
 
37
20
  Returns:
38
21
  Provider-specific model ID (e.g. "claude-sonnet-4-5")
39
22
  """
40
- return model_id.removeprefix("anthropic/")
23
+ return model_id.removeprefix("anthropic-beta/").removeprefix("anthropic/")
@@ -0,0 +1,87 @@
1
+ """Anthropic model information.
2
+
3
+ This file is auto-generated by scripts/model_features/codegen_anthropic.py
4
+ Do not edit manually - run the codegen script to update."""
5
+
6
+ from typing import Literal
7
+
8
+ AnthropicKnownModels = Literal[
9
+ "anthropic/claude-3-5-haiku",
10
+ "anthropic/claude-3-5-haiku-20241022",
11
+ "anthropic/claude-3-5-haiku-latest",
12
+ "anthropic/claude-3-7-sonnet",
13
+ "anthropic/claude-3-7-sonnet-20250219",
14
+ "anthropic/claude-3-7-sonnet-latest",
15
+ "anthropic/claude-3-haiku",
16
+ "anthropic/claude-3-haiku-20240307",
17
+ "anthropic/claude-3-haiku-latest",
18
+ "anthropic/claude-3-opus",
19
+ "anthropic/claude-3-opus-20240229",
20
+ "anthropic/claude-3-opus-latest",
21
+ "anthropic/claude-haiku-4-5",
22
+ "anthropic/claude-haiku-4-5-0",
23
+ "anthropic/claude-haiku-4-5-0-20251001",
24
+ "anthropic/claude-haiku-4-5-0-latest",
25
+ "anthropic/claude-haiku-4-5-20251001",
26
+ "anthropic/claude-haiku-4-5-latest",
27
+ "anthropic/claude-opus-4",
28
+ "anthropic/claude-opus-4-0",
29
+ "anthropic/claude-opus-4-0-20250514",
30
+ "anthropic/claude-opus-4-0-latest",
31
+ "anthropic/claude-opus-4-1",
32
+ "anthropic/claude-opus-4-1-0",
33
+ "anthropic/claude-opus-4-1-0-20250805",
34
+ "anthropic/claude-opus-4-1-0-latest",
35
+ "anthropic/claude-opus-4-1-20250805",
36
+ "anthropic/claude-opus-4-1-latest",
37
+ "anthropic/claude-opus-4-20250514",
38
+ "anthropic/claude-opus-4-5",
39
+ "anthropic/claude-opus-4-5-0",
40
+ "anthropic/claude-opus-4-5-0-20251101",
41
+ "anthropic/claude-opus-4-5-0-latest",
42
+ "anthropic/claude-opus-4-5-20251101",
43
+ "anthropic/claude-opus-4-5-latest",
44
+ "anthropic/claude-opus-4-latest",
45
+ "anthropic/claude-sonnet-4",
46
+ "anthropic/claude-sonnet-4-0",
47
+ "anthropic/claude-sonnet-4-0-20250514",
48
+ "anthropic/claude-sonnet-4-0-latest",
49
+ "anthropic/claude-sonnet-4-20250514",
50
+ "anthropic/claude-sonnet-4-5",
51
+ "anthropic/claude-sonnet-4-5-0",
52
+ "anthropic/claude-sonnet-4-5-0-20250929",
53
+ "anthropic/claude-sonnet-4-5-0-latest",
54
+ "anthropic/claude-sonnet-4-5-20250929",
55
+ "anthropic/claude-sonnet-4-5-latest",
56
+ "anthropic/claude-sonnet-4-latest",
57
+ ]
58
+ """Valid Anthropic model IDs."""
59
+
60
+
61
+ MODELS_WITHOUT_STRICT_STRUCTURED_OUTPUTS: set[str] = {
62
+ "claude-3-5-haiku",
63
+ "claude-3-5-haiku-20241022",
64
+ "claude-3-5-haiku-latest",
65
+ "claude-3-7-sonnet",
66
+ "claude-3-7-sonnet-20250219",
67
+ "claude-3-7-sonnet-latest",
68
+ "claude-3-haiku",
69
+ "claude-3-haiku-20240307",
70
+ "claude-3-haiku-latest",
71
+ "claude-3-opus",
72
+ "claude-3-opus-20240229",
73
+ "claude-3-opus-latest",
74
+ "claude-opus-4",
75
+ "claude-opus-4-0",
76
+ "claude-opus-4-0-20250514",
77
+ "claude-opus-4-0-latest",
78
+ "claude-opus-4-20250514",
79
+ "claude-opus-4-latest",
80
+ "claude-sonnet-4",
81
+ "claude-sonnet-4-0",
82
+ "claude-sonnet-4-0-20250514",
83
+ "claude-sonnet-4-0-latest",
84
+ "claude-sonnet-4-20250514",
85
+ "claude-sonnet-4-latest",
86
+ }
87
+ """Models that do not support strict structured outputs (strict mode tools)."""