mirascope 2.0.0a6__py3-none-any.whl → 2.0.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (230) hide show
  1. mirascope/_utils.py +34 -0
  2. mirascope/api/_generated/__init__.py +186 -5
  3. mirascope/api/_generated/annotations/client.py +38 -6
  4. mirascope/api/_generated/annotations/raw_client.py +366 -47
  5. mirascope/api/_generated/annotations/types/annotations_create_response.py +19 -6
  6. mirascope/api/_generated/annotations/types/annotations_get_response.py +19 -6
  7. mirascope/api/_generated/annotations/types/annotations_list_response_annotations_item.py +22 -7
  8. mirascope/api/_generated/annotations/types/annotations_update_response.py +19 -6
  9. mirascope/api/_generated/api_keys/__init__.py +12 -2
  10. mirascope/api/_generated/api_keys/client.py +107 -6
  11. mirascope/api/_generated/api_keys/raw_client.py +486 -38
  12. mirascope/api/_generated/api_keys/types/__init__.py +7 -1
  13. mirascope/api/_generated/api_keys/types/api_keys_list_all_for_org_response_item.py +40 -0
  14. mirascope/api/_generated/client.py +36 -0
  15. mirascope/api/_generated/docs/raw_client.py +71 -9
  16. mirascope/api/_generated/environment.py +3 -3
  17. mirascope/api/_generated/environments/__init__.py +6 -0
  18. mirascope/api/_generated/environments/client.py +158 -9
  19. mirascope/api/_generated/environments/raw_client.py +620 -52
  20. mirascope/api/_generated/environments/types/__init__.py +10 -0
  21. mirascope/api/_generated/environments/types/environments_get_analytics_response.py +60 -0
  22. mirascope/api/_generated/environments/types/environments_get_analytics_response_top_functions_item.py +24 -0
  23. mirascope/api/_generated/{organizations/types/organizations_credits_response.py → environments/types/environments_get_analytics_response_top_models_item.py} +6 -3
  24. mirascope/api/_generated/errors/__init__.py +6 -0
  25. mirascope/api/_generated/errors/bad_request_error.py +5 -2
  26. mirascope/api/_generated/errors/conflict_error.py +5 -2
  27. mirascope/api/_generated/errors/payment_required_error.py +15 -0
  28. mirascope/api/_generated/errors/service_unavailable_error.py +14 -0
  29. mirascope/api/_generated/errors/too_many_requests_error.py +15 -0
  30. mirascope/api/_generated/functions/__init__.py +10 -0
  31. mirascope/api/_generated/functions/client.py +222 -8
  32. mirascope/api/_generated/functions/raw_client.py +975 -134
  33. mirascope/api/_generated/functions/types/__init__.py +28 -4
  34. mirascope/api/_generated/functions/types/functions_get_by_env_response.py +53 -0
  35. mirascope/api/_generated/functions/types/functions_get_by_env_response_dependencies_value.py +22 -0
  36. mirascope/api/_generated/functions/types/functions_list_by_env_response.py +25 -0
  37. mirascope/api/_generated/functions/types/functions_list_by_env_response_functions_item.py +56 -0
  38. mirascope/api/_generated/functions/types/functions_list_by_env_response_functions_item_dependencies_value.py +22 -0
  39. mirascope/api/_generated/health/raw_client.py +74 -10
  40. mirascope/api/_generated/organization_invitations/__init__.py +33 -0
  41. mirascope/api/_generated/organization_invitations/client.py +546 -0
  42. mirascope/api/_generated/organization_invitations/raw_client.py +1519 -0
  43. mirascope/api/_generated/organization_invitations/types/__init__.py +53 -0
  44. mirascope/api/_generated/organization_invitations/types/organization_invitations_accept_response.py +34 -0
  45. mirascope/api/_generated/organization_invitations/types/organization_invitations_accept_response_role.py +7 -0
  46. mirascope/api/_generated/organization_invitations/types/organization_invitations_create_request_role.py +7 -0
  47. mirascope/api/_generated/organization_invitations/types/organization_invitations_create_response.py +48 -0
  48. mirascope/api/_generated/organization_invitations/types/organization_invitations_create_response_role.py +7 -0
  49. mirascope/api/_generated/organization_invitations/types/organization_invitations_create_response_status.py +7 -0
  50. mirascope/api/_generated/organization_invitations/types/organization_invitations_get_response.py +48 -0
  51. mirascope/api/_generated/organization_invitations/types/organization_invitations_get_response_role.py +7 -0
  52. mirascope/api/_generated/organization_invitations/types/organization_invitations_get_response_status.py +7 -0
  53. mirascope/api/_generated/organization_invitations/types/organization_invitations_list_response_item.py +48 -0
  54. mirascope/api/_generated/organization_invitations/types/organization_invitations_list_response_item_role.py +7 -0
  55. mirascope/api/_generated/organization_invitations/types/organization_invitations_list_response_item_status.py +7 -0
  56. mirascope/api/_generated/organization_memberships/__init__.py +19 -0
  57. mirascope/api/_generated/organization_memberships/client.py +302 -0
  58. mirascope/api/_generated/organization_memberships/raw_client.py +736 -0
  59. mirascope/api/_generated/organization_memberships/types/__init__.py +27 -0
  60. mirascope/api/_generated/organization_memberships/types/organization_memberships_list_response_item.py +33 -0
  61. mirascope/api/_generated/organization_memberships/types/organization_memberships_list_response_item_role.py +7 -0
  62. mirascope/api/_generated/organization_memberships/types/organization_memberships_update_request_role.py +7 -0
  63. mirascope/api/_generated/organization_memberships/types/organization_memberships_update_response.py +31 -0
  64. mirascope/api/_generated/organization_memberships/types/organization_memberships_update_response_role.py +7 -0
  65. mirascope/api/_generated/organizations/__init__.py +26 -2
  66. mirascope/api/_generated/organizations/client.py +442 -20
  67. mirascope/api/_generated/organizations/raw_client.py +1763 -164
  68. mirascope/api/_generated/organizations/types/__init__.py +48 -2
  69. mirascope/api/_generated/organizations/types/organizations_create_payment_intent_response.py +24 -0
  70. mirascope/api/_generated/organizations/types/organizations_preview_subscription_change_request_target_plan.py +7 -0
  71. mirascope/api/_generated/organizations/types/organizations_preview_subscription_change_response.py +47 -0
  72. mirascope/api/_generated/organizations/types/organizations_preview_subscription_change_response_validation_errors_item.py +33 -0
  73. mirascope/api/_generated/organizations/types/organizations_preview_subscription_change_response_validation_errors_item_resource.py +7 -0
  74. mirascope/api/_generated/organizations/types/organizations_router_balance_response.py +24 -0
  75. mirascope/api/_generated/organizations/types/organizations_subscription_response.py +53 -0
  76. mirascope/api/_generated/organizations/types/organizations_subscription_response_current_plan.py +7 -0
  77. mirascope/api/_generated/organizations/types/organizations_subscription_response_payment_method.py +26 -0
  78. mirascope/api/_generated/organizations/types/organizations_subscription_response_scheduled_change.py +34 -0
  79. mirascope/api/_generated/organizations/types/organizations_subscription_response_scheduled_change_target_plan.py +7 -0
  80. mirascope/api/_generated/organizations/types/organizations_update_subscription_request_target_plan.py +7 -0
  81. mirascope/api/_generated/organizations/types/organizations_update_subscription_response.py +35 -0
  82. mirascope/api/_generated/project_memberships/__init__.py +25 -0
  83. mirascope/api/_generated/project_memberships/client.py +437 -0
  84. mirascope/api/_generated/project_memberships/raw_client.py +1039 -0
  85. mirascope/api/_generated/project_memberships/types/__init__.py +29 -0
  86. mirascope/api/_generated/project_memberships/types/project_memberships_create_request_role.py +7 -0
  87. mirascope/api/_generated/project_memberships/types/project_memberships_create_response.py +35 -0
  88. mirascope/api/_generated/project_memberships/types/project_memberships_create_response_role.py +7 -0
  89. mirascope/api/_generated/project_memberships/types/project_memberships_list_response_item.py +33 -0
  90. mirascope/api/_generated/project_memberships/types/project_memberships_list_response_item_role.py +7 -0
  91. mirascope/api/_generated/project_memberships/types/project_memberships_update_request_role.py +7 -0
  92. mirascope/api/_generated/project_memberships/types/project_memberships_update_response.py +35 -0
  93. mirascope/api/_generated/project_memberships/types/project_memberships_update_response_role.py +7 -0
  94. mirascope/api/_generated/projects/raw_client.py +415 -58
  95. mirascope/api/_generated/reference.md +2767 -397
  96. mirascope/api/_generated/tags/__init__.py +19 -0
  97. mirascope/api/_generated/tags/client.py +504 -0
  98. mirascope/api/_generated/tags/raw_client.py +1288 -0
  99. mirascope/api/_generated/tags/types/__init__.py +17 -0
  100. mirascope/api/_generated/tags/types/tags_create_response.py +41 -0
  101. mirascope/api/_generated/tags/types/tags_get_response.py +41 -0
  102. mirascope/api/_generated/tags/types/tags_list_response.py +23 -0
  103. mirascope/api/_generated/tags/types/tags_list_response_tags_item.py +41 -0
  104. mirascope/api/_generated/tags/types/tags_update_response.py +41 -0
  105. mirascope/api/_generated/token_cost/__init__.py +7 -0
  106. mirascope/api/_generated/token_cost/client.py +160 -0
  107. mirascope/api/_generated/token_cost/raw_client.py +264 -0
  108. mirascope/api/_generated/token_cost/types/__init__.py +8 -0
  109. mirascope/api/_generated/token_cost/types/token_cost_calculate_request_usage.py +54 -0
  110. mirascope/api/_generated/token_cost/types/token_cost_calculate_response.py +52 -0
  111. mirascope/api/_generated/traces/__init__.py +20 -0
  112. mirascope/api/_generated/traces/client.py +543 -0
  113. mirascope/api/_generated/traces/raw_client.py +1366 -96
  114. mirascope/api/_generated/traces/types/__init__.py +28 -0
  115. mirascope/api/_generated/traces/types/traces_get_analytics_summary_response.py +6 -0
  116. mirascope/api/_generated/traces/types/traces_get_trace_detail_by_env_response.py +33 -0
  117. mirascope/api/_generated/traces/types/traces_get_trace_detail_by_env_response_spans_item.py +88 -0
  118. mirascope/api/_generated/traces/types/traces_get_trace_detail_response_spans_item.py +0 -2
  119. mirascope/api/_generated/traces/types/traces_list_by_function_hash_response.py +25 -0
  120. mirascope/api/_generated/traces/types/traces_list_by_function_hash_response_traces_item.py +44 -0
  121. mirascope/api/_generated/traces/types/traces_search_by_env_request_attribute_filters_item.py +26 -0
  122. mirascope/api/_generated/traces/types/traces_search_by_env_request_attribute_filters_item_operator.py +7 -0
  123. mirascope/api/_generated/traces/types/traces_search_by_env_request_sort_by.py +7 -0
  124. mirascope/api/_generated/traces/types/traces_search_by_env_request_sort_order.py +7 -0
  125. mirascope/api/_generated/traces/types/traces_search_by_env_response.py +26 -0
  126. mirascope/api/_generated/traces/types/traces_search_by_env_response_spans_item.py +50 -0
  127. mirascope/api/_generated/traces/types/traces_search_response_spans_item.py +10 -1
  128. mirascope/api/_generated/types/__init__.py +32 -2
  129. mirascope/api/_generated/types/bad_request_error_body.py +50 -0
  130. mirascope/api/_generated/types/date.py +3 -0
  131. mirascope/api/_generated/types/immutable_resource_error.py +22 -0
  132. mirascope/api/_generated/types/internal_server_error_body.py +3 -3
  133. mirascope/api/_generated/types/plan_limit_exceeded_error.py +32 -0
  134. mirascope/api/_generated/types/plan_limit_exceeded_error_tag.py +7 -0
  135. mirascope/api/_generated/types/pricing_unavailable_error.py +23 -0
  136. mirascope/api/_generated/types/rate_limit_error.py +31 -0
  137. mirascope/api/_generated/types/rate_limit_error_tag.py +5 -0
  138. mirascope/api/_generated/types/service_unavailable_error_body.py +24 -0
  139. mirascope/api/_generated/types/service_unavailable_error_tag.py +7 -0
  140. mirascope/api/_generated/types/subscription_past_due_error.py +31 -0
  141. mirascope/api/_generated/types/subscription_past_due_error_tag.py +7 -0
  142. mirascope/api/settings.py +19 -1
  143. mirascope/llm/__init__.py +53 -10
  144. mirascope/llm/calls/__init__.py +2 -1
  145. mirascope/llm/calls/calls.py +29 -20
  146. mirascope/llm/calls/decorator.py +21 -7
  147. mirascope/llm/content/tool_output.py +22 -5
  148. mirascope/llm/exceptions.py +284 -71
  149. mirascope/llm/formatting/__init__.py +17 -0
  150. mirascope/llm/formatting/format.py +112 -35
  151. mirascope/llm/formatting/output_parser.py +178 -0
  152. mirascope/llm/formatting/partial.py +80 -7
  153. mirascope/llm/formatting/primitives.py +192 -0
  154. mirascope/llm/formatting/types.py +20 -8
  155. mirascope/llm/messages/__init__.py +3 -0
  156. mirascope/llm/messages/_utils.py +34 -0
  157. mirascope/llm/models/__init__.py +5 -0
  158. mirascope/llm/models/models.py +137 -69
  159. mirascope/llm/{providers/base → models}/params.py +7 -57
  160. mirascope/llm/models/thinking_config.py +61 -0
  161. mirascope/llm/prompts/_utils.py +0 -32
  162. mirascope/llm/prompts/decorator.py +16 -5
  163. mirascope/llm/prompts/prompts.py +160 -92
  164. mirascope/llm/providers/__init__.py +1 -4
  165. mirascope/llm/providers/anthropic/_utils/__init__.py +2 -0
  166. mirascope/llm/providers/anthropic/_utils/beta_decode.py +18 -9
  167. mirascope/llm/providers/anthropic/_utils/beta_encode.py +62 -13
  168. mirascope/llm/providers/anthropic/_utils/decode.py +18 -9
  169. mirascope/llm/providers/anthropic/_utils/encode.py +26 -7
  170. mirascope/llm/providers/anthropic/_utils/errors.py +2 -2
  171. mirascope/llm/providers/anthropic/beta_provider.py +64 -18
  172. mirascope/llm/providers/anthropic/provider.py +91 -33
  173. mirascope/llm/providers/base/__init__.py +0 -4
  174. mirascope/llm/providers/base/_utils.py +55 -6
  175. mirascope/llm/providers/base/base_provider.py +116 -37
  176. mirascope/llm/providers/google/_utils/__init__.py +2 -0
  177. mirascope/llm/providers/google/_utils/decode.py +20 -7
  178. mirascope/llm/providers/google/_utils/encode.py +26 -7
  179. mirascope/llm/providers/google/_utils/errors.py +3 -2
  180. mirascope/llm/providers/google/provider.py +64 -18
  181. mirascope/llm/providers/mirascope/_utils.py +13 -17
  182. mirascope/llm/providers/mirascope/provider.py +49 -18
  183. mirascope/llm/providers/mlx/_utils.py +7 -2
  184. mirascope/llm/providers/mlx/encoding/base.py +5 -2
  185. mirascope/llm/providers/mlx/encoding/transformers.py +5 -2
  186. mirascope/llm/providers/mlx/mlx.py +23 -6
  187. mirascope/llm/providers/mlx/provider.py +42 -13
  188. mirascope/llm/providers/openai/_utils/errors.py +2 -2
  189. mirascope/llm/providers/openai/completions/_utils/encode.py +20 -16
  190. mirascope/llm/providers/openai/completions/base_provider.py +40 -11
  191. mirascope/llm/providers/openai/provider.py +40 -10
  192. mirascope/llm/providers/openai/responses/_utils/__init__.py +2 -0
  193. mirascope/llm/providers/openai/responses/_utils/decode.py +19 -6
  194. mirascope/llm/providers/openai/responses/_utils/encode.py +22 -10
  195. mirascope/llm/providers/openai/responses/provider.py +56 -18
  196. mirascope/llm/providers/provider_registry.py +93 -19
  197. mirascope/llm/responses/__init__.py +6 -1
  198. mirascope/llm/responses/_utils.py +102 -12
  199. mirascope/llm/responses/base_response.py +5 -2
  200. mirascope/llm/responses/base_stream_response.py +115 -25
  201. mirascope/llm/responses/response.py +2 -1
  202. mirascope/llm/responses/root_response.py +89 -17
  203. mirascope/llm/responses/stream_response.py +6 -9
  204. mirascope/llm/tools/decorator.py +9 -4
  205. mirascope/llm/tools/tool_schema.py +17 -6
  206. mirascope/llm/tools/toolkit.py +35 -27
  207. mirascope/llm/tools/tools.py +45 -20
  208. mirascope/ops/__init__.py +4 -0
  209. mirascope/ops/_internal/closure.py +4 -1
  210. mirascope/ops/_internal/configuration.py +82 -31
  211. mirascope/ops/_internal/exporters/exporters.py +55 -35
  212. mirascope/ops/_internal/exporters/utils.py +37 -0
  213. mirascope/ops/_internal/instrumentation/llm/common.py +530 -0
  214. mirascope/ops/_internal/instrumentation/llm/cost.py +190 -0
  215. mirascope/ops/_internal/instrumentation/llm/encode.py +1 -1
  216. mirascope/ops/_internal/instrumentation/llm/llm.py +116 -1242
  217. mirascope/ops/_internal/instrumentation/llm/model.py +1798 -0
  218. mirascope/ops/_internal/instrumentation/llm/response.py +521 -0
  219. mirascope/ops/_internal/instrumentation/llm/serialize.py +300 -0
  220. mirascope/ops/_internal/protocols.py +83 -1
  221. mirascope/ops/_internal/traced_calls.py +18 -0
  222. mirascope/ops/_internal/traced_functions.py +125 -10
  223. mirascope/ops/_internal/tracing.py +78 -1
  224. mirascope/ops/_internal/utils.py +60 -4
  225. mirascope/ops/_internal/versioned_functions.py +1 -1
  226. {mirascope-2.0.0a6.dist-info → mirascope-2.0.2.dist-info}/METADATA +12 -11
  227. mirascope-2.0.2.dist-info/RECORD +424 -0
  228. {mirascope-2.0.0a6.dist-info → mirascope-2.0.2.dist-info}/licenses/LICENSE +1 -1
  229. mirascope-2.0.0a6.dist-info/RECORD +0 -316
  230. {mirascope-2.0.0a6.dist-info → mirascope-2.0.2.dist-info}/WHEEL +0 -0
@@ -1,7 +1,7 @@
1
1
  """Beta Anthropic message encoding and request preparation."""
2
2
 
3
3
  from collections.abc import Sequence
4
- from typing import Any, TypedDict, cast
4
+ from typing import TYPE_CHECKING, Any, TypedDict, cast
5
5
  from typing_extensions import Required
6
6
 
7
7
  from anthropic import Omit
@@ -17,25 +17,28 @@ from anthropic.types.beta import (
17
17
  from pydantic import BaseModel
18
18
 
19
19
  from ....content import ContentPart
20
- from ....exceptions import FormattingModeNotSupportedError
20
+ from ....exceptions import FeatureNotSupportedError
21
21
  from ....formatting import (
22
22
  Format,
23
23
  FormattableT,
24
+ OutputParser,
24
25
  resolve_format,
25
26
  )
26
27
  from ....messages import AssistantMessage, Message, UserMessage
27
28
  from ....tools import AnyToolSchema, BaseToolkit
28
- from ...base import Params, _utils as _base_utils
29
+ from ...base import _utils as _base_utils
29
30
  from ..model_id import model_name
30
31
  from ..model_info import MODELS_WITHOUT_STRICT_STRUCTURED_OUTPUTS
31
32
  from .encode import (
32
33
  DEFAULT_MAX_TOKENS,
33
34
  FORMAT_TOOL_NAME,
34
- convert_tool_to_tool_param,
35
35
  encode_content,
36
36
  process_params,
37
37
  )
38
38
 
39
+ if TYPE_CHECKING:
40
+ from ....models import Params
41
+
39
42
  DEFAULT_FORMAT_MODE = "strict"
40
43
 
41
44
 
@@ -131,9 +134,29 @@ def _beta_encode_messages(
131
134
  return encoded_messages
132
135
 
133
136
 
134
- def _beta_convert_tool_to_tool_param(tool: AnyToolSchema) -> BetaToolParam:
135
- """Convert a single Mirascope tool to Beta Anthropic tool format."""
136
- return cast(BetaToolParam, convert_tool_to_tool_param(tool))
137
+ def _beta_convert_tool_to_tool_param(
138
+ tool: AnyToolSchema, model_supports_strict: bool
139
+ ) -> BetaToolParam:
140
+ """Convert a single Mirascope tool to Beta Anthropic tool format.
141
+
142
+ If the tool has strict=True (or None, and the model supports strict), the schema
143
+ is modified to be compatible with Anthropic's strict structured outputs beta
144
+ by adding additionalProperties: false to all object schemas, and strict=True
145
+ is passed to the API.
146
+ """
147
+ schema_dict = tool.parameters.model_dump(by_alias=True, exclude_none=True)
148
+ schema_dict["type"] = "object"
149
+
150
+ strict = model_supports_strict if tool.strict is None else tool.strict
151
+ if strict:
152
+ _base_utils.ensure_additional_properties_false(schema_dict)
153
+
154
+ return BetaToolParam(
155
+ name=tool.name,
156
+ description=tool.description,
157
+ input_schema=schema_dict,
158
+ strict=strict,
159
+ )
137
160
 
138
161
 
139
162
  def beta_encode_request(
@@ -141,8 +164,11 @@ def beta_encode_request(
141
164
  model_id: str,
142
165
  messages: Sequence[Message],
143
166
  tools: Sequence[AnyToolSchema] | BaseToolkit[AnyToolSchema] | None,
144
- format: type[FormattableT] | Format[FormattableT] | None,
145
- params: Params,
167
+ format: type[FormattableT]
168
+ | Format[FormattableT]
169
+ | OutputParser[FormattableT]
170
+ | None,
171
+ params: "Params",
146
172
  ) -> tuple[Sequence[Message], Format[FormattableT] | None, BetaParseKwargs]:
147
173
  """Prepares a request for the Anthropic beta.messages.parse method."""
148
174
 
@@ -160,14 +186,33 @@ def beta_encode_request(
160
186
  )
161
187
 
162
188
  tools = tools.tools if isinstance(tools, BaseToolkit) else tools or []
163
- anthropic_tools = [_beta_convert_tool_to_tool_param(tool) for tool in tools]
189
+
190
+ model_supports_strict = (
191
+ model_name(model_id) not in MODELS_WITHOUT_STRICT_STRUCTURED_OUTPUTS
192
+ )
193
+ # Check for strict tools on models that don't support them
194
+ if _base_utils.has_strict_tools(tools) and not model_supports_strict:
195
+ raise FeatureNotSupportedError(
196
+ feature="strict tools",
197
+ provider_id="anthropic",
198
+ model_id=model_id,
199
+ message="Strict tools require a model that supports structured outputs. "
200
+ "Use a newer model like claude-sonnet-4-5 or set strict=False on your tools.",
201
+ )
202
+
203
+ anthropic_tools = [
204
+ _beta_convert_tool_to_tool_param(
205
+ tool, model_supports_strict=model_supports_strict
206
+ )
207
+ for tool in tools
208
+ ]
164
209
  format = resolve_format(format, default_mode=DEFAULT_FORMAT_MODE)
165
210
 
166
211
  if format is not None:
167
212
  if format.mode == "strict":
168
213
  if model_name(model_id) in MODELS_WITHOUT_STRICT_STRUCTURED_OUTPUTS:
169
- raise FormattingModeNotSupportedError(
170
- formatting_mode=format.mode,
214
+ raise FeatureNotSupportedError(
215
+ feature=f"formatting_mode:{format.mode}",
171
216
  provider_id="anthropic",
172
217
  model_id=model_id,
173
218
  )
@@ -176,7 +221,11 @@ def beta_encode_request(
176
221
 
177
222
  if format.mode == "tool":
178
223
  format_tool_schema = format.create_tool_schema()
179
- anthropic_tools.append(_beta_convert_tool_to_tool_param(format_tool_schema))
224
+ anthropic_tools.append(
225
+ _beta_convert_tool_to_tool_param(
226
+ format_tool_schema, model_supports_strict=model_supports_strict
227
+ )
228
+ )
180
229
  if tools:
181
230
  kwargs["tool_choice"] = {"type": "any"}
182
231
  else:
@@ -83,10 +83,15 @@ def decode_usage(
83
83
  def decode_response(
84
84
  response: anthropic_types.Message,
85
85
  model_id: AnthropicModelId,
86
+ *,
87
+ include_thoughts: bool,
86
88
  ) -> tuple[AssistantMessage, FinishReason | None, Usage]:
87
89
  """Convert Anthropic message to mirascope AssistantMessage and usage."""
90
+ content = [_decode_assistant_content(part) for part in response.content]
91
+ if not include_thoughts:
92
+ content = [part for part in content if part.type != "thought"]
88
93
  assistant_message = AssistantMessage(
89
- content=[_decode_assistant_content(part) for part in response.content],
94
+ content=content,
90
95
  provider_id="anthropic",
91
96
  model_id=model_id,
92
97
  provider_model_name=model_name(model_id),
@@ -115,10 +120,11 @@ ContentBlock: TypeAlias = (
115
120
  class _AnthropicChunkProcessor:
116
121
  """Processes Anthropic stream events and maintains state across events."""
117
122
 
118
- def __init__(self) -> None:
123
+ def __init__(self, *, include_thoughts: bool) -> None:
119
124
  self.current_block_param: ContentBlock | None = None
120
125
  self.accumulated_tool_json: str = ""
121
126
  self.accumulated_blocks: list[ContentBlock] = []
127
+ self.include_thoughts = include_thoughts
122
128
 
123
129
  def process_event(
124
130
  self, event: anthropic_types.RawMessageStreamEvent
@@ -153,7 +159,8 @@ class _AnthropicChunkProcessor:
153
159
  "thinking": "",
154
160
  "signature": "",
155
161
  }
156
- yield ThoughtStartChunk()
162
+ if self.include_thoughts:
163
+ yield ThoughtStartChunk()
157
164
  elif content_block.type == "redacted_thinking": # pragma: no cover
158
165
  self.current_block_param = {
159
166
  "type": "redacted_thinking",
@@ -189,7 +196,8 @@ class _AnthropicChunkProcessor:
189
196
  f"Received thinking_delta for {self.current_block_param['type']} block"
190
197
  )
191
198
  self.current_block_param["thinking"] += delta.thinking
192
- yield ThoughtChunk(delta=delta.thinking)
199
+ if self.include_thoughts:
200
+ yield ThoughtChunk(delta=delta.thinking)
193
201
  elif delta.type == "signature_delta":
194
202
  if self.current_block_param["type"] != "thinking": # pragma: no cover
195
203
  raise RuntimeError(
@@ -221,7 +229,8 @@ class _AnthropicChunkProcessor:
221
229
  )
222
230
  yield ToolCallEndChunk(id=self.current_block_param["id"])
223
231
  elif block_type == "thinking":
224
- yield ThoughtEndChunk()
232
+ if self.include_thoughts:
233
+ yield ThoughtEndChunk()
225
234
  else:
226
235
  raise NotImplementedError
227
236
 
@@ -257,10 +266,10 @@ class _AnthropicChunkProcessor:
257
266
 
258
267
 
259
268
  def decode_stream(
260
- anthropic_stream_manager: MessageStreamManager,
269
+ anthropic_stream_manager: MessageStreamManager, *, include_thoughts: bool
261
270
  ) -> ChunkIterator:
262
271
  """Returns a ChunkIterator converted from an Anthropic MessageStreamManager."""
263
- processor = _AnthropicChunkProcessor()
272
+ processor = _AnthropicChunkProcessor(include_thoughts=include_thoughts)
264
273
  with anthropic_stream_manager as stream:
265
274
  for event in stream._raw_stream: # pyright: ignore[reportPrivateUsage]
266
275
  yield from processor.process_event(event)
@@ -268,10 +277,10 @@ def decode_stream(
268
277
 
269
278
 
270
279
  async def decode_async_stream(
271
- anthropic_stream_manager: AsyncMessageStreamManager,
280
+ anthropic_stream_manager: AsyncMessageStreamManager, *, include_thoughts: bool
272
281
  ) -> AsyncChunkIterator:
273
282
  """Returns an AsyncChunkIterator converted from an Anthropic MessageStreamManager."""
274
- processor = _AnthropicChunkProcessor()
283
+ processor = _AnthropicChunkProcessor(include_thoughts=include_thoughts)
275
284
  async with anthropic_stream_manager as stream:
276
285
  async for event in stream._raw_stream: # pyright: ignore[reportPrivateUsage]
277
286
  for item in processor.process_event(event):
@@ -1,25 +1,31 @@
1
1
  """Shared Anthropic encoding utilities."""
2
2
 
3
+ from __future__ import annotations
4
+
3
5
  import json
4
6
  from collections.abc import Sequence
5
7
  from functools import lru_cache
6
- from typing import Any, Literal, TypedDict, cast
8
+ from typing import TYPE_CHECKING, Any, Literal, TypedDict, cast
7
9
  from typing_extensions import Required
8
10
 
9
11
  from anthropic import Omit, types as anthropic_types
10
12
 
11
13
  from ....content import ContentPart, ImageMimeType
12
- from ....exceptions import FeatureNotSupportedError, FormattingModeNotSupportedError
14
+ from ....exceptions import FeatureNotSupportedError
13
15
  from ....formatting import (
14
16
  Format,
15
17
  FormattableT,
18
+ OutputParser,
16
19
  resolve_format,
17
20
  )
18
21
  from ....messages import AssistantMessage, Message, UserMessage
19
22
  from ....tools import FORMAT_TOOL_NAME, AnyToolSchema, BaseToolkit
20
- from ...base import Params, ThinkingLevel, _utils as _base_utils
23
+ from ...base import _utils as _base_utils
21
24
  from ..model_id import AnthropicModelId, model_name
22
25
 
26
+ if TYPE_CHECKING:
27
+ from ....models import Params, ThinkingLevel
28
+
23
29
  DEFAULT_MAX_TOKENS = 16000
24
30
  # TODO: Change DEFAULT_FORMAT_MODE to strict when strict is no longer a beta feature.
25
31
  DEFAULT_FORMAT_MODE = "tool"
@@ -226,7 +232,7 @@ def encode_content(
226
232
  anthropic_types.ToolResultBlockParam(
227
233
  type="tool_result",
228
234
  tool_use_id=part.id,
229
- content=str(part.value),
235
+ content=str(part.result),
230
236
  cache_control={"type": "ephemeral"} if should_add_cache else None,
231
237
  )
232
238
  )
@@ -333,7 +339,10 @@ def encode_request(
333
339
  model_id: AnthropicModelId,
334
340
  messages: Sequence[Message],
335
341
  tools: Sequence[AnyToolSchema] | BaseToolkit[AnyToolSchema] | None,
336
- format: type[FormattableT] | Format[FormattableT] | None,
342
+ format: type[FormattableT]
343
+ | Format[FormattableT]
344
+ | OutputParser[FormattableT]
345
+ | None,
337
346
  params: Params,
338
347
  ) -> tuple[Sequence[Message], Format[FormattableT] | None, MessageCreateKwargs]:
339
348
  """Prepares a request for the Anthropic messages.create method."""
@@ -347,12 +356,22 @@ def encode_request(
347
356
  )
348
357
 
349
358
  tools = tools.tools if isinstance(tools, BaseToolkit) else tools or []
359
+
360
+ # Check for strict tools - the non-beta API doesn't support them
361
+ if _base_utils.has_strict_tools(tools):
362
+ raise FeatureNotSupportedError(
363
+ feature="strict tools",
364
+ provider_id="anthropic",
365
+ model_id=model_id,
366
+ message="Anthropic provider does not support strict tools. Try the beta provider.",
367
+ )
368
+
350
369
  anthropic_tools = [convert_tool_to_tool_param(tool) for tool in tools]
351
370
  format = resolve_format(format, default_mode=DEFAULT_FORMAT_MODE)
352
371
  if format is not None:
353
372
  if format.mode == "strict":
354
- raise FormattingModeNotSupportedError(
355
- formatting_mode="strict",
373
+ raise FeatureNotSupportedError(
374
+ feature="formatting_mode:strict",
356
375
  provider_id="anthropic",
357
376
  model_id=model_id,
358
377
  )
@@ -16,12 +16,12 @@ from anthropic import (
16
16
  )
17
17
 
18
18
  from ....exceptions import (
19
- APIError,
20
19
  AuthenticationError,
21
20
  BadRequestError,
22
21
  ConnectionError,
23
22
  NotFoundError,
24
23
  PermissionError,
24
+ ProviderError,
25
25
  RateLimitError,
26
26
  ResponseValidationError,
27
27
  ServerError,
@@ -42,5 +42,5 @@ ANTHROPIC_ERROR_MAP: ProviderErrorMap = {
42
42
  AnthropicAPITimeoutError: TimeoutError,
43
43
  AnthropicAPIConnectionError: ConnectionError,
44
44
  AnthropicAPIResponseValidationError: ResponseValidationError,
45
- AnthropicError: APIError, # Catch-all for unknown Anthropic errors
45
+ AnthropicError: ProviderError, # Catch-all for unknown Anthropic errors
46
46
  }
@@ -1,12 +1,15 @@
1
1
  """Beta Anthropic provider implementation."""
2
2
 
3
+ from __future__ import annotations
4
+
3
5
  from collections.abc import Sequence
6
+ from typing import TYPE_CHECKING
4
7
  from typing_extensions import Unpack
5
8
 
6
9
  from anthropic import Anthropic, AsyncAnthropic
7
10
 
8
11
  from ...context import Context, DepsT
9
- from ...formatting import Format, FormattableT
12
+ from ...formatting import Format, FormattableT, OutputParser
10
13
  from ...messages import Message
11
14
  from ...responses import (
12
15
  AsyncContextResponse,
@@ -28,11 +31,14 @@ from ...tools import (
28
31
  Tool,
29
32
  Toolkit,
30
33
  )
31
- from ..base import BaseProvider, Params
34
+ from ..base import BaseProvider
32
35
  from . import _utils
33
36
  from ._utils import beta_decode, beta_encode
34
37
  from .model_id import model_name
35
38
 
39
+ if TYPE_CHECKING:
40
+ from ...models import Params
41
+
36
42
 
37
43
  class AnthropicBetaProvider(BaseProvider[Anthropic]):
38
44
  """Provider using beta Anthropic API."""
@@ -58,7 +64,10 @@ class AnthropicBetaProvider(BaseProvider[Anthropic]):
58
64
  model_id: str,
59
65
  messages: Sequence[Message],
60
66
  tools: Sequence[Tool] | Toolkit | None = None,
61
- format: type[FormattableT] | Format[FormattableT] | None = None,
67
+ format: type[FormattableT]
68
+ | Format[FormattableT]
69
+ | OutputParser[FormattableT]
70
+ | None = None,
62
71
  **params: Unpack[Params],
63
72
  ) -> Response | Response[FormattableT]:
64
73
  """Generate an `llm.Response` using the beta Anthropic API."""
@@ -70,8 +79,9 @@ class AnthropicBetaProvider(BaseProvider[Anthropic]):
70
79
  params=params,
71
80
  )
72
81
  beta_response = self.client.beta.messages.parse(**kwargs)
82
+ include_thoughts = _utils.get_include_thoughts(params)
73
83
  assistant_message, finish_reason, usage = beta_decode.beta_decode_response(
74
- beta_response, model_id
84
+ beta_response, model_id, include_thoughts=include_thoughts
75
85
  )
76
86
  return Response(
77
87
  raw=beta_response,
@@ -96,7 +106,10 @@ class AnthropicBetaProvider(BaseProvider[Anthropic]):
96
106
  tools: Sequence[Tool | ContextTool[DepsT]]
97
107
  | ContextToolkit[DepsT]
98
108
  | None = None,
99
- format: type[FormattableT] | Format[FormattableT] | None = None,
109
+ format: type[FormattableT]
110
+ | Format[FormattableT]
111
+ | OutputParser[FormattableT]
112
+ | None = None,
100
113
  **params: Unpack[Params],
101
114
  ) -> ContextResponse[DepsT, None] | ContextResponse[DepsT, FormattableT]:
102
115
  """Generate an `llm.ContextResponse` using the beta Anthropic API."""
@@ -108,8 +121,9 @@ class AnthropicBetaProvider(BaseProvider[Anthropic]):
108
121
  params=params,
109
122
  )
110
123
  beta_response = self.client.beta.messages.parse(**kwargs)
124
+ include_thoughts = _utils.get_include_thoughts(params)
111
125
  assistant_message, finish_reason, usage = beta_decode.beta_decode_response(
112
- beta_response, model_id
126
+ beta_response, model_id, include_thoughts=include_thoughts
113
127
  )
114
128
  return ContextResponse(
115
129
  raw=beta_response,
@@ -131,7 +145,10 @@ class AnthropicBetaProvider(BaseProvider[Anthropic]):
131
145
  model_id: str,
132
146
  messages: Sequence[Message],
133
147
  tools: Sequence[AsyncTool] | AsyncToolkit | None = None,
134
- format: type[FormattableT] | Format[FormattableT] | None = None,
148
+ format: type[FormattableT]
149
+ | Format[FormattableT]
150
+ | OutputParser[FormattableT]
151
+ | None = None,
135
152
  **params: Unpack[Params],
136
153
  ) -> AsyncResponse | AsyncResponse[FormattableT]:
137
154
  """Generate an `llm.AsyncResponse` using the beta Anthropic API."""
@@ -143,8 +160,9 @@ class AnthropicBetaProvider(BaseProvider[Anthropic]):
143
160
  params=params,
144
161
  )
145
162
  beta_response = await self.async_client.beta.messages.parse(**kwargs)
163
+ include_thoughts = _utils.get_include_thoughts(params)
146
164
  assistant_message, finish_reason, usage = beta_decode.beta_decode_response(
147
- beta_response, model_id
165
+ beta_response, model_id, include_thoughts=include_thoughts
148
166
  )
149
167
  return AsyncResponse(
150
168
  raw=beta_response,
@@ -169,7 +187,10 @@ class AnthropicBetaProvider(BaseProvider[Anthropic]):
169
187
  tools: Sequence[AsyncTool | AsyncContextTool[DepsT]]
170
188
  | AsyncContextToolkit[DepsT]
171
189
  | None = None,
172
- format: type[FormattableT] | Format[FormattableT] | None = None,
190
+ format: type[FormattableT]
191
+ | Format[FormattableT]
192
+ | OutputParser[FormattableT]
193
+ | None = None,
173
194
  **params: Unpack[Params],
174
195
  ) -> AsyncContextResponse[DepsT, None] | AsyncContextResponse[DepsT, FormattableT]:
175
196
  """Generate an `llm.AsyncContextResponse` using the beta Anthropic API."""
@@ -181,8 +202,9 @@ class AnthropicBetaProvider(BaseProvider[Anthropic]):
181
202
  params=params,
182
203
  )
183
204
  beta_response = await self.async_client.beta.messages.parse(**kwargs)
205
+ include_thoughts = _utils.get_include_thoughts(params)
184
206
  assistant_message, finish_reason, usage = beta_decode.beta_decode_response(
185
- beta_response, model_id
207
+ beta_response, model_id, include_thoughts=include_thoughts
186
208
  )
187
209
  return AsyncContextResponse(
188
210
  raw=beta_response,
@@ -204,7 +226,10 @@ class AnthropicBetaProvider(BaseProvider[Anthropic]):
204
226
  model_id: str,
205
227
  messages: Sequence[Message],
206
228
  tools: Sequence[Tool] | Toolkit | None = None,
207
- format: type[FormattableT] | Format[FormattableT] | None = None,
229
+ format: type[FormattableT]
230
+ | Format[FormattableT]
231
+ | OutputParser[FormattableT]
232
+ | None = None,
208
233
  **params: Unpack[Params],
209
234
  ) -> StreamResponse | StreamResponse[FormattableT]:
210
235
  """Generate an `llm.StreamResponse` using the beta Anthropic API."""
@@ -216,7 +241,10 @@ class AnthropicBetaProvider(BaseProvider[Anthropic]):
216
241
  params=params,
217
242
  )
218
243
  beta_stream = self.client.beta.messages.stream(**kwargs)
219
- chunk_iterator = beta_decode.beta_decode_stream(beta_stream)
244
+ include_thoughts = _utils.get_include_thoughts(params)
245
+ chunk_iterator = beta_decode.beta_decode_stream(
246
+ beta_stream, include_thoughts=include_thoughts
247
+ )
220
248
  return StreamResponse(
221
249
  provider_id="anthropic",
222
250
  model_id=model_id,
@@ -237,7 +265,10 @@ class AnthropicBetaProvider(BaseProvider[Anthropic]):
237
265
  tools: Sequence[Tool | ContextTool[DepsT]]
238
266
  | ContextToolkit[DepsT]
239
267
  | None = None,
240
- format: type[FormattableT] | Format[FormattableT] | None = None,
268
+ format: type[FormattableT]
269
+ | Format[FormattableT]
270
+ | OutputParser[FormattableT]
271
+ | None = None,
241
272
  **params: Unpack[Params],
242
273
  ) -> ContextStreamResponse[DepsT] | ContextStreamResponse[DepsT, FormattableT]:
243
274
  """Generate an `llm.ContextStreamResponse` using the beta Anthropic API."""
@@ -249,7 +280,10 @@ class AnthropicBetaProvider(BaseProvider[Anthropic]):
249
280
  params=params,
250
281
  )
251
282
  beta_stream = self.client.beta.messages.stream(**kwargs)
252
- chunk_iterator = beta_decode.beta_decode_stream(beta_stream)
283
+ include_thoughts = _utils.get_include_thoughts(params)
284
+ chunk_iterator = beta_decode.beta_decode_stream(
285
+ beta_stream, include_thoughts=include_thoughts
286
+ )
253
287
  return ContextStreamResponse(
254
288
  provider_id="anthropic",
255
289
  model_id=model_id,
@@ -267,7 +301,10 @@ class AnthropicBetaProvider(BaseProvider[Anthropic]):
267
301
  model_id: str,
268
302
  messages: Sequence[Message],
269
303
  tools: Sequence[AsyncTool] | AsyncToolkit | None = None,
270
- format: type[FormattableT] | Format[FormattableT] | None = None,
304
+ format: type[FormattableT]
305
+ | Format[FormattableT]
306
+ | OutputParser[FormattableT]
307
+ | None = None,
271
308
  **params: Unpack[Params],
272
309
  ) -> AsyncStreamResponse | AsyncStreamResponse[FormattableT]:
273
310
  """Generate an `llm.AsyncStreamResponse` using the beta Anthropic API."""
@@ -279,7 +316,10 @@ class AnthropicBetaProvider(BaseProvider[Anthropic]):
279
316
  params=params,
280
317
  )
281
318
  beta_stream = self.async_client.beta.messages.stream(**kwargs)
282
- chunk_iterator = beta_decode.beta_decode_async_stream(beta_stream)
319
+ include_thoughts = _utils.get_include_thoughts(params)
320
+ chunk_iterator = beta_decode.beta_decode_async_stream(
321
+ beta_stream, include_thoughts=include_thoughts
322
+ )
283
323
  return AsyncStreamResponse(
284
324
  provider_id="anthropic",
285
325
  model_id=model_id,
@@ -300,7 +340,10 @@ class AnthropicBetaProvider(BaseProvider[Anthropic]):
300
340
  tools: Sequence[AsyncTool | AsyncContextTool[DepsT]]
301
341
  | AsyncContextToolkit[DepsT]
302
342
  | None = None,
303
- format: type[FormattableT] | Format[FormattableT] | None = None,
343
+ format: type[FormattableT]
344
+ | Format[FormattableT]
345
+ | OutputParser[FormattableT]
346
+ | None = None,
304
347
  **params: Unpack[Params],
305
348
  ) -> (
306
349
  AsyncContextStreamResponse[DepsT]
@@ -315,7 +358,10 @@ class AnthropicBetaProvider(BaseProvider[Anthropic]):
315
358
  params=params,
316
359
  )
317
360
  beta_stream = self.async_client.beta.messages.stream(**kwargs)
318
- chunk_iterator = beta_decode.beta_decode_async_stream(beta_stream)
361
+ include_thoughts = _utils.get_include_thoughts(params)
362
+ chunk_iterator = beta_decode.beta_decode_async_stream(
363
+ beta_stream, include_thoughts=include_thoughts
364
+ )
319
365
  return AsyncContextStreamResponse(
320
366
  provider_id="anthropic",
321
367
  model_id=model_id,