anthropic 0.75.0__py3-none-any.whl → 0.76.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. anthropic/_base_client.py +140 -11
  2. anthropic/_client.py +4 -12
  3. anthropic/_models.py +16 -1
  4. anthropic/_streaming.py +78 -76
  5. anthropic/_types.py +12 -2
  6. anthropic/_version.py +1 -1
  7. anthropic/lib/tools/_beta_compaction_control.py +2 -2
  8. anthropic/lib/tools/_beta_runner.py +17 -0
  9. anthropic/resources/beta/messages/messages.py +62 -27
  10. anthropic/resources/messages/messages.py +2 -0
  11. anthropic/types/beta/beta_container.py +4 -0
  12. anthropic/types/beta/beta_container_params.py +2 -0
  13. anthropic/types/beta/beta_container_upload_block.py +2 -0
  14. anthropic/types/beta/beta_container_upload_block_param.py +5 -0
  15. anthropic/types/beta/beta_direct_caller.py +2 -0
  16. anthropic/types/beta/beta_direct_caller_param.py +2 -0
  17. anthropic/types/beta/beta_mcp_tool_config_param.py +2 -0
  18. anthropic/types/beta/beta_mcp_tool_default_config_param.py +2 -0
  19. anthropic/types/beta/beta_mcp_toolset_param.py +6 -0
  20. anthropic/types/beta/beta_server_tool_caller.py +2 -0
  21. anthropic/types/beta/beta_server_tool_caller_param.py +2 -0
  22. anthropic/types/beta/beta_skill.py +2 -0
  23. anthropic/types/beta/beta_skill_params.py +2 -0
  24. anthropic/types/beta/beta_tool_choice_any_param.py +2 -0
  25. anthropic/types/beta/beta_tool_choice_auto_param.py +2 -0
  26. anthropic/types/beta/beta_tool_choice_none_param.py +2 -0
  27. anthropic/types/beta/beta_tool_choice_tool_param.py +2 -0
  28. anthropic/types/beta/beta_tool_param.py +5 -0
  29. anthropic/types/beta/beta_tool_reference_block_param.py +2 -0
  30. anthropic/types/beta/beta_web_search_tool_20250305_param.py +5 -0
  31. anthropic/types/beta/messages/beta_message_batch_individual_response.py +4 -0
  32. anthropic/types/messages/message_batch_individual_response.py +4 -0
  33. anthropic/types/tool_choice_any_param.py +2 -0
  34. anthropic/types/tool_choice_auto_param.py +2 -0
  35. anthropic/types/tool_choice_none_param.py +2 -0
  36. anthropic/types/tool_choice_tool_param.py +2 -0
  37. anthropic/types/tool_param.py +5 -0
  38. anthropic/types/web_search_tool_20250305_param.py +5 -0
  39. {anthropic-0.75.0.dist-info → anthropic-0.76.0.dist-info}/METADATA +4 -2
  40. {anthropic-0.75.0.dist-info → anthropic-0.76.0.dist-info}/RECORD +42 -42
  41. {anthropic-0.75.0.dist-info → anthropic-0.76.0.dist-info}/WHEEL +0 -0
  42. {anthropic-0.75.0.dist-info → anthropic-0.76.0.dist-info}/licenses/LICENSE +0 -0
@@ -41,13 +41,21 @@ from ....types.beta import (
41
41
  message_count_tokens_params,
42
42
  )
43
43
  from ...._base_client import make_request_options
44
+ from ...._utils._utils import is_dict
44
45
  from ....lib.streaming import BetaMessageStreamManager, BetaAsyncMessageStreamManager
45
46
  from ...messages.messages import DEPRECATED_MODELS
46
47
  from ....types.model_param import ModelParam
47
48
  from ....lib._parse._response import ResponseFormatT, parse_response
48
49
  from ....lib._parse._transform import transform_schema
49
50
  from ....types.beta.beta_message import BetaMessage
50
- from ....lib.tools._beta_functions import BetaRunnableTool, BetaAsyncRunnableTool
51
+ from ....lib.tools._beta_functions import (
52
+ BetaFunctionTool,
53
+ BetaRunnableTool,
54
+ BetaAsyncFunctionTool,
55
+ BetaAsyncRunnableTool,
56
+ BetaBuiltinFunctionTool,
57
+ BetaAsyncBuiltinFunctionTool,
58
+ )
51
59
  from ....types.anthropic_beta_param import AnthropicBetaParam
52
60
  from ....types.beta.beta_message_param import BetaMessageParam
53
61
  from ....types.beta.beta_metadata_param import BetaMetadataParam
@@ -1174,7 +1182,7 @@ class Messages(SyncAPIResource):
1174
1182
  max_tokens: int,
1175
1183
  messages: Iterable[BetaMessageParam],
1176
1184
  model: ModelParam,
1177
- tools: Iterable[BetaRunnableTool],
1185
+ tools: Iterable[BetaRunnableTool | BetaToolUnionParam],
1178
1186
  compaction_control: CompactionControl | Omit = omit,
1179
1187
  container: Optional[message_create_params.Container] | Omit = omit,
1180
1188
  context_management: Optional[BetaContextManagementConfigParam] | Omit = omit,
@@ -1208,7 +1216,7 @@ class Messages(SyncAPIResource):
1208
1216
  max_tokens: int,
1209
1217
  messages: Iterable[BetaMessageParam],
1210
1218
  model: ModelParam,
1211
- tools: Iterable[BetaRunnableTool],
1219
+ tools: Iterable[BetaRunnableTool | BetaToolUnionParam],
1212
1220
  compaction_control: CompactionControl | Omit = omit,
1213
1221
  stream: Literal[True],
1214
1222
  max_iterations: int | Omit = omit,
@@ -1242,7 +1250,7 @@ class Messages(SyncAPIResource):
1242
1250
  max_tokens: int,
1243
1251
  messages: Iterable[BetaMessageParam],
1244
1252
  model: ModelParam,
1245
- tools: Iterable[BetaRunnableTool],
1253
+ tools: Iterable[BetaRunnableTool | BetaToolUnionParam],
1246
1254
  compaction_control: CompactionControl | Omit = omit,
1247
1255
  stream: bool,
1248
1256
  max_iterations: int | Omit = omit,
@@ -1275,7 +1283,7 @@ class Messages(SyncAPIResource):
1275
1283
  max_tokens: int,
1276
1284
  messages: Iterable[BetaMessageParam],
1277
1285
  model: ModelParam,
1278
- tools: Iterable[BetaRunnableTool],
1286
+ tools: Iterable[BetaRunnableTool | BetaToolUnionParam],
1279
1287
  compaction_control: CompactionControl | Omit = omit,
1280
1288
  max_iterations: int | Omit = omit,
1281
1289
  container: Optional[message_create_params.Container] | Omit = omit,
@@ -1315,6 +1323,15 @@ class Messages(SyncAPIResource):
1315
1323
  **(extra_headers or {}),
1316
1324
  }
1317
1325
 
1326
+ runnable_tools: list[BetaRunnableTool] = []
1327
+ raw_tools: list[BetaToolUnionParam] = []
1328
+
1329
+ for tool in tools:
1330
+ if isinstance(tool, (BetaFunctionTool, BetaBuiltinFunctionTool)):
1331
+ runnable_tools.append(tool)
1332
+ else:
1333
+ raw_tools.append(tool)
1334
+
1318
1335
  params = cast(
1319
1336
  message_create_params.ParseMessageCreateParamsBase[ResponseFormatT],
1320
1337
  {
@@ -1333,7 +1350,7 @@ class Messages(SyncAPIResource):
1333
1350
  "temperature": temperature,
1334
1351
  "thinking": thinking,
1335
1352
  "tool_choice": tool_choice,
1336
- "tools": [tool.to_dict() for tool in tools],
1353
+ "tools": [*[tool.to_dict() for tool in runnable_tools], *raw_tools],
1337
1354
  "top_k": top_k,
1338
1355
  "top_p": top_p,
1339
1356
  },
@@ -1341,7 +1358,7 @@ class Messages(SyncAPIResource):
1341
1358
 
1342
1359
  if stream:
1343
1360
  return BetaStreamingToolRunner[ResponseFormatT](
1344
- tools=tools,
1361
+ tools=runnable_tools,
1345
1362
  params=params,
1346
1363
  options={
1347
1364
  "extra_headers": extra_headers,
@@ -1354,7 +1371,7 @@ class Messages(SyncAPIResource):
1354
1371
  compaction_control=compaction_control if is_given(compaction_control) else None,
1355
1372
  )
1356
1373
  return BetaToolRunner[ResponseFormatT](
1357
- tools=tools,
1374
+ tools=runnable_tools,
1358
1375
  params=params,
1359
1376
  options={
1360
1377
  "extra_headers": extra_headers,
@@ -1378,7 +1395,7 @@ class Messages(SyncAPIResource):
1378
1395
  mcp_servers: Iterable[BetaRequestMCPServerURLDefinitionParam] | Omit = omit,
1379
1396
  metadata: BetaMetadataParam | Omit = omit,
1380
1397
  output_config: BetaOutputConfigParam | Omit = omit,
1381
- output_format: Optional[type[ResponseFormatT]] | Omit = omit,
1398
+ output_format: None | BetaJSONOutputFormatParam | type[ResponseFormatT] | Omit = omit,
1382
1399
  service_tier: Literal["auto", "standard_only"] | Omit = omit,
1383
1400
  stop_sequences: SequenceNotStr[str] | Omit = omit,
1384
1401
  system: Union[str, Iterable[BetaTextBlockParam]] | Omit = omit,
@@ -1411,14 +1428,16 @@ class Messages(SyncAPIResource):
1411
1428
  **(extra_headers or {}),
1412
1429
  }
1413
1430
 
1414
- transformed_output_format: Optional[message_create_params.OutputFormat] | NotGiven = NOT_GIVEN
1431
+ transformed_output_format: Optional[BetaJSONOutputFormatParam] | NotGiven = NOT_GIVEN
1415
1432
 
1416
- if is_given(output_format) and output_format is not None:
1433
+ if is_dict(output_format):
1434
+ transformed_output_format = cast(BetaJSONOutputFormatParam, output_format)
1435
+ elif is_given(output_format) and output_format is not None:
1417
1436
  adapted_type: TypeAdapter[ResponseFormatT] = TypeAdapter(output_format)
1418
1437
 
1419
1438
  try:
1420
1439
  schema = adapted_type.json_schema()
1421
- transformed_output_format = message_create_params.OutputFormat(
1440
+ transformed_output_format = BetaJSONOutputFormatParam(
1422
1441
  schema=transform_schema(schema), type="json_schema"
1423
1442
  )
1424
1443
  except pydantic.errors.PydanticSchemaGenerationError as e:
@@ -1428,7 +1447,6 @@ class Messages(SyncAPIResource):
1428
1447
  "Use a type that works with `pydanitc.TypeAdapter`"
1429
1448
  )
1430
1449
  ) from e
1431
-
1432
1450
  make_request = partial(
1433
1451
  self._post,
1434
1452
  "/v1/messages?beta=true",
@@ -1463,7 +1481,10 @@ class Messages(SyncAPIResource):
1463
1481
  stream=True,
1464
1482
  stream_cls=Stream[BetaRawMessageStreamEvent],
1465
1483
  )
1466
- return BetaMessageStreamManager(make_request, output_format=cast(ResponseFormatT, output_format))
1484
+ return BetaMessageStreamManager(
1485
+ make_request,
1486
+ output_format=NOT_GIVEN if is_dict(output_format) else cast(ResponseFormatT, output_format),
1487
+ )
1467
1488
 
1468
1489
  def count_tokens(
1469
1490
  self,
@@ -2821,7 +2842,7 @@ class AsyncMessages(AsyncAPIResource):
2821
2842
  max_tokens: int,
2822
2843
  messages: Iterable[BetaMessageParam],
2823
2844
  model: ModelParam,
2824
- tools: Iterable[BetaAsyncRunnableTool],
2845
+ tools: Iterable[BetaAsyncRunnableTool | BetaToolUnionParam],
2825
2846
  compaction_control: CompactionControl | Omit = omit,
2826
2847
  max_iterations: int | Omit = omit,
2827
2848
  container: Optional[message_create_params.Container] | Omit = omit,
@@ -2855,7 +2876,7 @@ class AsyncMessages(AsyncAPIResource):
2855
2876
  max_tokens: int,
2856
2877
  messages: Iterable[BetaMessageParam],
2857
2878
  model: ModelParam,
2858
- tools: Iterable[BetaAsyncRunnableTool],
2879
+ tools: Iterable[BetaAsyncRunnableTool | BetaToolUnionParam],
2859
2880
  compaction_control: CompactionControl | Omit = omit,
2860
2881
  stream: Literal[True],
2861
2882
  max_iterations: int | Omit = omit,
@@ -2889,7 +2910,7 @@ class AsyncMessages(AsyncAPIResource):
2889
2910
  max_tokens: int,
2890
2911
  messages: Iterable[BetaMessageParam],
2891
2912
  model: ModelParam,
2892
- tools: Iterable[BetaAsyncRunnableTool],
2913
+ tools: Iterable[BetaAsyncRunnableTool | BetaToolUnionParam],
2893
2914
  compaction_control: CompactionControl | Omit = omit,
2894
2915
  stream: bool,
2895
2916
  max_iterations: int | Omit = omit,
@@ -2922,7 +2943,7 @@ class AsyncMessages(AsyncAPIResource):
2922
2943
  max_tokens: int,
2923
2944
  messages: Iterable[BetaMessageParam],
2924
2945
  model: ModelParam,
2925
- tools: Iterable[BetaAsyncRunnableTool],
2946
+ tools: Iterable[BetaAsyncRunnableTool | BetaToolUnionParam],
2926
2947
  compaction_control: CompactionControl | Omit = omit,
2927
2948
  max_iterations: int | Omit = omit,
2928
2949
  container: Optional[message_create_params.Container] | Omit = omit,
@@ -2962,6 +2983,15 @@ class AsyncMessages(AsyncAPIResource):
2962
2983
  **(extra_headers or {}),
2963
2984
  }
2964
2985
 
2986
+ runnable_tools: list[BetaAsyncRunnableTool] = []
2987
+ raw_tools: list[BetaToolUnionParam] = []
2988
+
2989
+ for tool in tools:
2990
+ if isinstance(tool, (BetaAsyncFunctionTool, BetaAsyncBuiltinFunctionTool)):
2991
+ runnable_tools.append(tool)
2992
+ else:
2993
+ raw_tools.append(tool)
2994
+
2965
2995
  params = cast(
2966
2996
  message_create_params.ParseMessageCreateParamsBase[ResponseFormatT],
2967
2997
  {
@@ -2980,7 +3010,7 @@ class AsyncMessages(AsyncAPIResource):
2980
3010
  "temperature": temperature,
2981
3011
  "thinking": thinking,
2982
3012
  "tool_choice": tool_choice,
2983
- "tools": [tool.to_dict() for tool in tools],
3013
+ "tools": [*[tool.to_dict() for tool in runnable_tools], *raw_tools],
2984
3014
  "top_k": top_k,
2985
3015
  "top_p": top_p,
2986
3016
  },
@@ -2988,7 +3018,7 @@ class AsyncMessages(AsyncAPIResource):
2988
3018
 
2989
3019
  if stream:
2990
3020
  return BetaAsyncStreamingToolRunner[ResponseFormatT](
2991
- tools=tools,
3021
+ tools=runnable_tools,
2992
3022
  params=params,
2993
3023
  options={
2994
3024
  "extra_headers": extra_headers,
@@ -3001,7 +3031,7 @@ class AsyncMessages(AsyncAPIResource):
3001
3031
  compaction_control=compaction_control if is_given(compaction_control) else None,
3002
3032
  )
3003
3033
  return BetaAsyncToolRunner[ResponseFormatT](
3004
- tools=tools,
3034
+ tools=runnable_tools,
3005
3035
  params=params,
3006
3036
  options={
3007
3037
  "extra_headers": extra_headers,
@@ -3022,7 +3052,7 @@ class AsyncMessages(AsyncAPIResource):
3022
3052
  model: ModelParam,
3023
3053
  metadata: BetaMetadataParam | Omit = omit,
3024
3054
  output_config: BetaOutputConfigParam | Omit = omit,
3025
- output_format: Optional[type[ResponseFormatT]] | Omit = omit,
3055
+ output_format: None | type[ResponseFormatT] | BetaJSONOutputFormatParam | Omit = omit,
3026
3056
  container: Optional[message_create_params.Container] | Omit = omit,
3027
3057
  context_management: Optional[BetaContextManagementConfigParam] | Omit = omit,
3028
3058
  mcp_servers: Iterable[BetaRequestMCPServerURLDefinitionParam] | Omit = omit,
@@ -3057,14 +3087,16 @@ class AsyncMessages(AsyncAPIResource):
3057
3087
  **(extra_headers or {}),
3058
3088
  }
3059
3089
 
3060
- transformed_output_format: Optional[message_create_params.OutputFormat] | NotGiven = NOT_GIVEN
3090
+ transformed_output_format: Optional[BetaJSONOutputFormatParam] | NotGiven = NOT_GIVEN
3061
3091
 
3062
- if is_given(output_format) and output_format is not None:
3092
+ if is_dict(output_format):
3093
+ transformed_output_format = cast(BetaJSONOutputFormatParam, output_format)
3094
+ elif is_given(output_format) and output_format is not None:
3063
3095
  adapted_type: TypeAdapter[ResponseFormatT] = TypeAdapter(output_format)
3064
3096
 
3065
3097
  try:
3066
3098
  schema = adapted_type.json_schema()
3067
- transformed_output_format = message_create_params.OutputFormat(
3099
+ transformed_output_format = BetaJSONOutputFormatParam(
3068
3100
  schema=transform_schema(schema), type="json_schema"
3069
3101
  )
3070
3102
  except pydantic.errors.PydanticSchemaGenerationError as e:
@@ -3075,7 +3107,7 @@ class AsyncMessages(AsyncAPIResource):
3075
3107
  )
3076
3108
  ) from e
3077
3109
  request = self._post(
3078
- "/v1/messages",
3110
+ "/v1/messages?beta=true",
3079
3111
  body=maybe_transform(
3080
3112
  {
3081
3113
  "max_tokens": max_tokens,
@@ -3107,7 +3139,10 @@ class AsyncMessages(AsyncAPIResource):
3107
3139
  stream=True,
3108
3140
  stream_cls=AsyncStream[BetaRawMessageStreamEvent],
3109
3141
  )
3110
- return BetaAsyncMessageStreamManager(request, output_format=cast(ResponseFormatT, output_format))
3142
+ return BetaAsyncMessageStreamManager(
3143
+ request,
3144
+ output_format=NOT_GIVEN if is_dict(output_format) else cast(ResponseFormatT, output_format),
3145
+ )
3111
3146
 
3112
3147
  async def count_tokens(
3113
3148
  self,
@@ -59,6 +59,8 @@ DEPRECATED_MODELS = {
59
59
  "claude-2.0": "July 21st, 2025",
60
60
  "claude-3-7-sonnet-latest": "February 19th, 2026",
61
61
  "claude-3-7-sonnet-20250219": "February 19th, 2026",
62
+ "claude-3-5-haiku-latest": "February 19th, 2026",
63
+ "claude-3-5-haiku-20241022": "February 19th, 2026",
62
64
  }
63
65
 
64
66
 
@@ -10,6 +10,10 @@ __all__ = ["BetaContainer"]
10
10
 
11
11
 
12
12
  class BetaContainer(BaseModel):
13
+ """
14
+ Information about the container used in the request (for the code execution tool)
15
+ """
16
+
13
17
  id: str
14
18
  """Identifier for the container used in this request"""
15
19
 
@@ -11,6 +11,8 @@ __all__ = ["BetaContainerParams"]
11
11
 
12
12
 
13
13
  class BetaContainerParams(TypedDict, total=False):
14
+ """Container parameters with skills to be loaded."""
15
+
14
16
  id: Optional[str]
15
17
  """Container id"""
16
18
 
@@ -8,6 +8,8 @@ __all__ = ["BetaContainerUploadBlock"]
8
8
 
9
9
 
10
10
  class BetaContainerUploadBlock(BaseModel):
11
+ """Response model for a file uploaded to the container."""
12
+
11
13
  file_id: str
12
14
 
13
15
  type: Literal["container_upload"]
@@ -11,6 +11,11 @@ __all__ = ["BetaContainerUploadBlockParam"]
11
11
 
12
12
 
13
13
  class BetaContainerUploadBlockParam(TypedDict, total=False):
14
+ """
15
+ A content block that represents a file to be uploaded to the container
16
+ Files uploaded via this block will be available in the container's input directory.
17
+ """
18
+
14
19
  file_id: Required[str]
15
20
 
16
21
  type: Required[Literal["container_upload"]]
@@ -8,4 +8,6 @@ __all__ = ["BetaDirectCaller"]
8
8
 
9
9
 
10
10
  class BetaDirectCaller(BaseModel):
11
+ """Tool invocation directly from the model."""
12
+
11
13
  type: Literal["direct"]
@@ -8,4 +8,6 @@ __all__ = ["BetaDirectCallerParam"]
8
8
 
9
9
 
10
10
  class BetaDirectCallerParam(TypedDict, total=False):
11
+ """Tool invocation directly from the model."""
12
+
11
13
  type: Required[Literal["direct"]]
@@ -8,6 +8,8 @@ __all__ = ["BetaMCPToolConfigParam"]
8
8
 
9
9
 
10
10
  class BetaMCPToolConfigParam(TypedDict, total=False):
11
+ """Configuration for a specific tool in an MCP toolset."""
12
+
11
13
  defer_loading: bool
12
14
 
13
15
  enabled: bool
@@ -8,6 +8,8 @@ __all__ = ["BetaMCPToolDefaultConfigParam"]
8
8
 
9
9
 
10
10
  class BetaMCPToolDefaultConfigParam(TypedDict, total=False):
11
+ """Default configuration for tools in an MCP toolset."""
12
+
11
13
  defer_loading: bool
12
14
 
13
15
  enabled: bool
@@ -13,6 +13,12 @@ __all__ = ["BetaMCPToolsetParam"]
13
13
 
14
14
 
15
15
  class BetaMCPToolsetParam(TypedDict, total=False):
16
+ """Configuration for a group of tools from an MCP server.
17
+
18
+ Allows configuring enabled status and defer_loading for all tools
19
+ from an MCP server, with optional per-tool overrides.
20
+ """
21
+
16
22
  mcp_server_name: Required[str]
17
23
  """Name of the MCP server to configure tools for"""
18
24
 
@@ -8,6 +8,8 @@ __all__ = ["BetaServerToolCaller"]
8
8
 
9
9
 
10
10
  class BetaServerToolCaller(BaseModel):
11
+ """Tool invocation generated by a server-side tool."""
12
+
11
13
  tool_id: str
12
14
 
13
15
  type: Literal["code_execution_20250825"]
@@ -8,6 +8,8 @@ __all__ = ["BetaServerToolCallerParam"]
8
8
 
9
9
 
10
10
  class BetaServerToolCallerParam(TypedDict, total=False):
11
+ """Tool invocation generated by a server-side tool."""
12
+
11
13
  tool_id: Required[str]
12
14
 
13
15
  type: Required[Literal["code_execution_20250825"]]
@@ -8,6 +8,8 @@ __all__ = ["BetaSkill"]
8
8
 
9
9
 
10
10
  class BetaSkill(BaseModel):
11
+ """A skill that was loaded in a container (response model)."""
12
+
11
13
  skill_id: str
12
14
  """Skill ID"""
13
15
 
@@ -8,6 +8,8 @@ __all__ = ["BetaSkillParams"]
8
8
 
9
9
 
10
10
  class BetaSkillParams(TypedDict, total=False):
11
+ """Specification for a skill to be loaded in a container (request model)."""
12
+
11
13
  skill_id: Required[str]
12
14
  """Skill ID"""
13
15
 
@@ -8,6 +8,8 @@ __all__ = ["BetaToolChoiceAnyParam"]
8
8
 
9
9
 
10
10
  class BetaToolChoiceAnyParam(TypedDict, total=False):
11
+ """The model will use any available tools."""
12
+
11
13
  type: Required[Literal["any"]]
12
14
 
13
15
  disable_parallel_tool_use: bool
@@ -8,6 +8,8 @@ __all__ = ["BetaToolChoiceAutoParam"]
8
8
 
9
9
 
10
10
  class BetaToolChoiceAutoParam(TypedDict, total=False):
11
+ """The model will automatically decide whether to use tools."""
12
+
11
13
  type: Required[Literal["auto"]]
12
14
 
13
15
  disable_parallel_tool_use: bool
@@ -8,4 +8,6 @@ __all__ = ["BetaToolChoiceNoneParam"]
8
8
 
9
9
 
10
10
  class BetaToolChoiceNoneParam(TypedDict, total=False):
11
+ """The model will not be allowed to use tools."""
12
+
11
13
  type: Required[Literal["none"]]
@@ -8,6 +8,8 @@ __all__ = ["BetaToolChoiceToolParam"]
8
8
 
9
9
 
10
10
  class BetaToolChoiceToolParam(TypedDict, total=False):
11
+ """The model will use the specified tool with `tool_choice.name`."""
12
+
11
13
  name: Required[str]
12
14
  """The name of the tool to use."""
13
15
 
@@ -12,6 +12,11 @@ __all__ = ["BetaToolParam", "InputSchema"]
12
12
 
13
13
 
14
14
  class InputSchemaTyped(TypedDict, total=False):
15
+ """[JSON schema](https://json-schema.org/draft/2020-12) for this tool's input.
16
+
17
+ This defines the shape of the `input` that your tool accepts and that the model will produce.
18
+ """
19
+
15
20
  type: Required[Literal["object"]]
16
21
 
17
22
  properties: Optional[Dict[str, object]]
@@ -11,6 +11,8 @@ __all__ = ["BetaToolReferenceBlockParam"]
11
11
 
12
12
 
13
13
  class BetaToolReferenceBlockParam(TypedDict, total=False):
14
+ """Tool reference block that can be included in tool_result content."""
15
+
14
16
  tool_name: Required[str]
15
17
 
16
18
  type: Required[Literal["tool_reference"]]
@@ -12,6 +12,11 @@ __all__ = ["BetaWebSearchTool20250305Param", "UserLocation"]
12
12
 
13
13
 
14
14
  class UserLocation(TypedDict, total=False):
15
+ """Parameters for the user's location.
16
+
17
+ Used to provide more relevant search results.
18
+ """
19
+
15
20
  type: Required[Literal["approximate"]]
16
21
 
17
22
  city: Optional[str]
@@ -7,6 +7,10 @@ __all__ = ["BetaMessageBatchIndividualResponse"]
7
7
 
8
8
 
9
9
  class BetaMessageBatchIndividualResponse(BaseModel):
10
+ """
11
+ This is a single line in the response `.jsonl` file and does not represent the response as a whole.
12
+ """
13
+
10
14
  custom_id: str
11
15
  """Developer-provided ID created for each request in a Message Batch.
12
16
 
@@ -7,6 +7,10 @@ __all__ = ["MessageBatchIndividualResponse"]
7
7
 
8
8
 
9
9
  class MessageBatchIndividualResponse(BaseModel):
10
+ """
11
+ This is a single line in the response `.jsonl` file and does not represent the response as a whole.
12
+ """
13
+
10
14
  custom_id: str
11
15
  """Developer-provided ID created for each request in a Message Batch.
12
16
 
@@ -8,6 +8,8 @@ __all__ = ["ToolChoiceAnyParam"]
8
8
 
9
9
 
10
10
  class ToolChoiceAnyParam(TypedDict, total=False):
11
+ """The model will use any available tools."""
12
+
11
13
  type: Required[Literal["any"]]
12
14
 
13
15
  disable_parallel_tool_use: bool
@@ -8,6 +8,8 @@ __all__ = ["ToolChoiceAutoParam"]
8
8
 
9
9
 
10
10
  class ToolChoiceAutoParam(TypedDict, total=False):
11
+ """The model will automatically decide whether to use tools."""
12
+
11
13
  type: Required[Literal["auto"]]
12
14
 
13
15
  disable_parallel_tool_use: bool
@@ -8,4 +8,6 @@ __all__ = ["ToolChoiceNoneParam"]
8
8
 
9
9
 
10
10
  class ToolChoiceNoneParam(TypedDict, total=False):
11
+ """The model will not be allowed to use tools."""
12
+
11
13
  type: Required[Literal["none"]]
@@ -8,6 +8,8 @@ __all__ = ["ToolChoiceToolParam"]
8
8
 
9
9
 
10
10
  class ToolChoiceToolParam(TypedDict, total=False):
11
+ """The model will use the specified tool with `tool_choice.name`."""
12
+
11
13
  name: Required[str]
12
14
  """The name of the tool to use."""
13
15
 
@@ -13,6 +13,11 @@ __all__ = ["ToolParam", "InputSchema"]
13
13
 
14
14
 
15
15
  class InputSchemaTyped(TypedDict, total=False):
16
+ """[JSON schema](https://json-schema.org/draft/2020-12) for this tool's input.
17
+
18
+ This defines the shape of the `input` that your tool accepts and that the model will produce.
19
+ """
20
+
16
21
  type: Required[Literal["object"]]
17
22
 
18
23
  properties: Optional[Dict[str, object]]
@@ -12,6 +12,11 @@ __all__ = ["WebSearchTool20250305Param", "UserLocation"]
12
12
 
13
13
 
14
14
  class UserLocation(TypedDict, total=False):
15
+ """Parameters for the user's location.
16
+
17
+ Used to provide more relevant search results.
18
+ """
19
+
15
20
  type: Required[Literal["approximate"]]
16
21
 
17
22
  city: Optional[str]
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: anthropic
3
- Version: 0.75.0
3
+ Version: 0.76.0
4
4
  Summary: The official Python library for the anthropic API
5
5
  Project-URL: Homepage, https://github.com/anthropics/anthropic-sdk-python
6
6
  Project-URL: Repository, https://github.com/anthropics/anthropic-sdk-python
@@ -18,6 +18,7 @@ Classifier: Programming Language :: Python :: 3.10
18
18
  Classifier: Programming Language :: Python :: 3.11
19
19
  Classifier: Programming Language :: Python :: 3.12
20
20
  Classifier: Programming Language :: Python :: 3.13
21
+ Classifier: Programming Language :: Python :: 3.14
21
22
  Classifier: Topic :: Software Development :: Libraries :: Python Modules
22
23
  Classifier: Typing :: Typed
23
24
  Requires-Python: >=3.9
@@ -136,6 +137,7 @@ pip install anthropic[aiohttp]
136
137
  Then you can enable it by instantiating the client with `http_client=DefaultAioHttpClient()`:
137
138
 
138
139
  ```python
140
+ import os
139
141
  import asyncio
140
142
  from anthropic import DefaultAioHttpClient
141
143
  from anthropic import AsyncAnthropic
@@ -143,7 +145,7 @@ from anthropic import AsyncAnthropic
143
145
 
144
146
  async def main() -> None:
145
147
  async with AsyncAnthropic(
146
- api_key="my-anthropic-api-key",
148
+ api_key=os.environ.get("ANTHROPIC_API_KEY"), # This is the default and can be omitted
147
149
  http_client=DefaultAioHttpClient(),
148
150
  ) as client:
149
151
  message = await client.messages.create(