langchain-core 1.0.0a6__py3-none-any.whl → 1.0.0a8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of langchain-core might be problematic. Click here for more details.

Files changed (131) hide show
  1. langchain_core/_api/__init__.py +3 -3
  2. langchain_core/_api/beta_decorator.py +6 -6
  3. langchain_core/_api/deprecation.py +21 -29
  4. langchain_core/_api/path.py +3 -6
  5. langchain_core/_import_utils.py +2 -3
  6. langchain_core/agents.py +10 -11
  7. langchain_core/caches.py +7 -7
  8. langchain_core/callbacks/base.py +91 -91
  9. langchain_core/callbacks/file.py +11 -11
  10. langchain_core/callbacks/manager.py +86 -89
  11. langchain_core/callbacks/stdout.py +8 -8
  12. langchain_core/callbacks/usage.py +4 -4
  13. langchain_core/chat_history.py +5 -5
  14. langchain_core/document_loaders/base.py +2 -2
  15. langchain_core/document_loaders/langsmith.py +15 -15
  16. langchain_core/documents/base.py +16 -16
  17. langchain_core/documents/compressor.py +4 -4
  18. langchain_core/example_selectors/length_based.py +1 -1
  19. langchain_core/example_selectors/semantic_similarity.py +17 -19
  20. langchain_core/exceptions.py +3 -3
  21. langchain_core/globals.py +3 -151
  22. langchain_core/indexing/api.py +44 -43
  23. langchain_core/indexing/base.py +30 -30
  24. langchain_core/indexing/in_memory.py +3 -3
  25. langchain_core/language_models/_utils.py +5 -7
  26. langchain_core/language_models/base.py +18 -132
  27. langchain_core/language_models/chat_models.py +118 -227
  28. langchain_core/language_models/fake.py +11 -11
  29. langchain_core/language_models/fake_chat_models.py +35 -29
  30. langchain_core/language_models/llms.py +91 -201
  31. langchain_core/load/dump.py +1 -1
  32. langchain_core/load/load.py +11 -12
  33. langchain_core/load/mapping.py +2 -4
  34. langchain_core/load/serializable.py +2 -4
  35. langchain_core/messages/ai.py +17 -20
  36. langchain_core/messages/base.py +23 -25
  37. langchain_core/messages/block_translators/__init__.py +2 -5
  38. langchain_core/messages/block_translators/anthropic.py +3 -3
  39. langchain_core/messages/block_translators/bedrock_converse.py +2 -2
  40. langchain_core/messages/block_translators/langchain_v0.py +2 -2
  41. langchain_core/messages/block_translators/openai.py +6 -6
  42. langchain_core/messages/content.py +120 -124
  43. langchain_core/messages/human.py +7 -7
  44. langchain_core/messages/system.py +7 -7
  45. langchain_core/messages/tool.py +24 -24
  46. langchain_core/messages/utils.py +67 -79
  47. langchain_core/output_parsers/base.py +12 -14
  48. langchain_core/output_parsers/json.py +4 -4
  49. langchain_core/output_parsers/list.py +3 -5
  50. langchain_core/output_parsers/openai_functions.py +3 -3
  51. langchain_core/output_parsers/openai_tools.py +3 -3
  52. langchain_core/output_parsers/pydantic.py +2 -2
  53. langchain_core/output_parsers/transform.py +13 -15
  54. langchain_core/output_parsers/xml.py +7 -9
  55. langchain_core/outputs/chat_generation.py +4 -4
  56. langchain_core/outputs/chat_result.py +1 -3
  57. langchain_core/outputs/generation.py +2 -2
  58. langchain_core/outputs/llm_result.py +5 -5
  59. langchain_core/prompts/__init__.py +1 -5
  60. langchain_core/prompts/base.py +10 -15
  61. langchain_core/prompts/chat.py +31 -82
  62. langchain_core/prompts/dict.py +2 -2
  63. langchain_core/prompts/few_shot.py +5 -5
  64. langchain_core/prompts/few_shot_with_templates.py +4 -4
  65. langchain_core/prompts/loading.py +3 -5
  66. langchain_core/prompts/prompt.py +4 -16
  67. langchain_core/prompts/string.py +2 -1
  68. langchain_core/prompts/structured.py +16 -23
  69. langchain_core/rate_limiters.py +3 -4
  70. langchain_core/retrievers.py +14 -14
  71. langchain_core/runnables/base.py +928 -1042
  72. langchain_core/runnables/branch.py +36 -40
  73. langchain_core/runnables/config.py +27 -35
  74. langchain_core/runnables/configurable.py +108 -124
  75. langchain_core/runnables/fallbacks.py +76 -72
  76. langchain_core/runnables/graph.py +39 -45
  77. langchain_core/runnables/graph_ascii.py +9 -11
  78. langchain_core/runnables/graph_mermaid.py +18 -19
  79. langchain_core/runnables/graph_png.py +8 -9
  80. langchain_core/runnables/history.py +114 -127
  81. langchain_core/runnables/passthrough.py +113 -139
  82. langchain_core/runnables/retry.py +43 -48
  83. langchain_core/runnables/router.py +23 -28
  84. langchain_core/runnables/schema.py +42 -44
  85. langchain_core/runnables/utils.py +28 -31
  86. langchain_core/stores.py +9 -13
  87. langchain_core/structured_query.py +8 -8
  88. langchain_core/tools/base.py +62 -115
  89. langchain_core/tools/convert.py +31 -35
  90. langchain_core/tools/render.py +1 -1
  91. langchain_core/tools/retriever.py +4 -4
  92. langchain_core/tools/simple.py +13 -17
  93. langchain_core/tools/structured.py +12 -15
  94. langchain_core/tracers/base.py +62 -64
  95. langchain_core/tracers/context.py +17 -35
  96. langchain_core/tracers/core.py +49 -53
  97. langchain_core/tracers/evaluation.py +11 -11
  98. langchain_core/tracers/event_stream.py +58 -60
  99. langchain_core/tracers/langchain.py +13 -13
  100. langchain_core/tracers/log_stream.py +22 -24
  101. langchain_core/tracers/root_listeners.py +14 -14
  102. langchain_core/tracers/run_collector.py +2 -4
  103. langchain_core/tracers/schemas.py +8 -8
  104. langchain_core/tracers/stdout.py +2 -1
  105. langchain_core/utils/__init__.py +0 -3
  106. langchain_core/utils/_merge.py +2 -2
  107. langchain_core/utils/aiter.py +24 -28
  108. langchain_core/utils/env.py +4 -4
  109. langchain_core/utils/function_calling.py +31 -41
  110. langchain_core/utils/html.py +3 -4
  111. langchain_core/utils/input.py +3 -3
  112. langchain_core/utils/iter.py +15 -19
  113. langchain_core/utils/json.py +3 -2
  114. langchain_core/utils/json_schema.py +6 -6
  115. langchain_core/utils/mustache.py +3 -5
  116. langchain_core/utils/pydantic.py +16 -18
  117. langchain_core/utils/usage.py +1 -1
  118. langchain_core/utils/utils.py +29 -29
  119. langchain_core/vectorstores/base.py +18 -21
  120. langchain_core/vectorstores/in_memory.py +14 -87
  121. langchain_core/vectorstores/utils.py +2 -2
  122. langchain_core/version.py +1 -1
  123. {langchain_core-1.0.0a6.dist-info → langchain_core-1.0.0a8.dist-info}/METADATA +10 -21
  124. langchain_core-1.0.0a8.dist-info/RECORD +176 -0
  125. {langchain_core-1.0.0a6.dist-info → langchain_core-1.0.0a8.dist-info}/WHEEL +1 -1
  126. langchain_core/messages/block_translators/ollama.py +0 -47
  127. langchain_core/prompts/pipeline.py +0 -138
  128. langchain_core/tracers/langchain_v1.py +0 -31
  129. langchain_core/utils/loading.py +0 -35
  130. langchain_core-1.0.0a6.dist-info/RECORD +0 -181
  131. langchain_core-1.0.0a6.dist-info/entry_points.txt +0 -4
@@ -8,13 +8,12 @@ import logging
8
8
  import types
9
9
  import typing
10
10
  import uuid
11
+ from collections.abc import Callable
11
12
  from typing import (
12
13
  TYPE_CHECKING,
13
14
  Annotated,
14
15
  Any,
15
- Callable,
16
16
  Literal,
17
- Optional,
18
17
  Union,
19
18
  cast,
20
19
  get_args,
@@ -103,8 +102,8 @@ def _rm_titles(kv: dict, prev_key: str = "") -> dict:
103
102
  def _convert_json_schema_to_openai_function(
104
103
  schema: dict,
105
104
  *,
106
- name: Optional[str] = None,
107
- description: Optional[str] = None,
105
+ name: str | None = None,
106
+ description: str | None = None,
108
107
  rm_titles: bool = True,
109
108
  ) -> FunctionDescription:
110
109
  """Converts a Pydantic model to a function description for the OpenAI API.
@@ -137,8 +136,8 @@ def _convert_json_schema_to_openai_function(
137
136
  def _convert_pydantic_to_openai_function(
138
137
  model: type,
139
138
  *,
140
- name: Optional[str] = None,
141
- description: Optional[str] = None,
139
+ name: str | None = None,
140
+ description: str | None = None,
142
141
  rm_titles: bool = True,
143
142
  ) -> FunctionDescription:
144
143
  """Converts a Pydantic model to a function description for the OpenAI API.
@@ -184,8 +183,8 @@ convert_pydantic_to_openai_function = deprecated(
184
183
  def convert_pydantic_to_openai_tool(
185
184
  model: type[BaseModel],
186
185
  *,
187
- name: Optional[str] = None,
188
- description: Optional[str] = None,
186
+ name: str | None = None,
187
+ description: str | None = None,
189
188
  ) -> ToolDescription:
190
189
  """Converts a Pydantic model to a function description for the OpenAI API.
191
190
 
@@ -285,7 +284,9 @@ def _convert_any_typed_dicts_to_pydantic(
285
284
  new_arg_type = _convert_any_typed_dicts_to_pydantic(
286
285
  annotated_args[0], depth=depth + 1, visited=visited
287
286
  )
288
- field_kwargs = dict(zip(("default", "description"), annotated_args[1:]))
287
+ field_kwargs = dict(
288
+ zip(("default", "description"), annotated_args[1:], strict=False)
289
+ )
289
290
  if (field_desc := field_kwargs.get("description")) and not isinstance(
290
291
  field_desc, str
291
292
  ):
@@ -393,9 +394,9 @@ def format_tool_to_openai_tool(tool: BaseTool) -> ToolDescription:
393
394
 
394
395
 
395
396
  def convert_to_openai_function(
396
- function: Union[dict[str, Any], type, Callable, BaseTool],
397
+ function: dict[str, Any] | type | Callable | BaseTool,
397
398
  *,
398
- strict: Optional[bool] = None,
399
+ strict: bool | None = None,
399
400
  ) -> dict[str, Any]:
400
401
  """Convert a raw function/class to an OpenAI function.
401
402
 
@@ -418,20 +419,16 @@ def convert_to_openai_function(
418
419
  Raises:
419
420
  ValueError: If function is not in a supported format.
420
421
 
421
- .. versionchanged:: 0.2.29
422
-
422
+ !!! warning "Behavior changed in 0.2.29"
423
423
  ``strict`` arg added.
424
424
 
425
- .. versionchanged:: 0.3.13
426
-
425
+ !!! warning "Behavior changed in 0.3.13"
427
426
  Support for Anthropic format tools added.
428
427
 
429
- .. versionchanged:: 0.3.14
430
-
428
+ !!! warning "Behavior changed in 0.3.14"
431
429
  Support for Amazon Bedrock Converse format tools added.
432
430
 
433
- .. versionchanged:: 0.3.16
434
-
431
+ !!! warning "Behavior changed in 0.3.16"
435
432
  'description' and 'parameters' keys are now optional. Only 'name' is
436
433
  required and guaranteed to be part of the output.
437
434
  """
@@ -524,9 +521,9 @@ _WellKnownOpenAITools = (
524
521
 
525
522
 
526
523
  def convert_to_openai_tool(
527
- tool: Union[dict[str, Any], type[BaseModel], Callable, BaseTool],
524
+ tool: dict[str, Any] | type[BaseModel] | Callable | BaseTool,
528
525
  *,
529
- strict: Optional[bool] = None,
526
+ strict: bool | None = None,
530
527
  ) -> dict[str, Any]:
531
528
  """Convert a tool-like object to an OpenAI tool schema.
532
529
 
@@ -549,35 +546,28 @@ def convert_to_openai_tool(
549
546
  A dict version of the passed in tool which is compatible with the
550
547
  OpenAI tool-calling API.
551
548
 
552
- .. versionchanged:: 0.2.29
553
-
549
+ !!! warning "Behavior changed in 0.2.29"
554
550
  ``strict`` arg added.
555
551
 
556
- .. versionchanged:: 0.3.13
557
-
552
+ !!! warning "Behavior changed in 0.3.13"
558
553
  Support for Anthropic format tools added.
559
554
 
560
- .. versionchanged:: 0.3.14
561
-
555
+ !!! warning "Behavior changed in 0.3.14"
562
556
  Support for Amazon Bedrock Converse format tools added.
563
557
 
564
- .. versionchanged:: 0.3.16
565
-
558
+ !!! warning "Behavior changed in 0.3.16"
566
559
  'description' and 'parameters' keys are now optional. Only 'name' is
567
560
  required and guaranteed to be part of the output.
568
561
 
569
- .. versionchanged:: 0.3.44
570
-
562
+ !!! warning "Behavior changed in 0.3.44"
571
563
  Return OpenAI Responses API-style tools unchanged. This includes
572
564
  any dict with "type" in "file_search", "function", "computer_use_preview",
573
565
  "web_search_preview".
574
566
 
575
- .. versionchanged:: 0.3.61
576
-
567
+ !!! warning "Behavior changed in 0.3.61"
577
568
  Added support for OpenAI's built-in code interpreter and remote MCP tools.
578
569
 
579
- .. versionchanged:: 0.3.63
580
-
570
+ !!! warning "Behavior changed in 0.3.63"
581
571
  Added support for OpenAI's image generation built-in tool.
582
572
  """
583
573
  # Import locally to prevent circular import
@@ -603,9 +593,9 @@ def convert_to_openai_tool(
603
593
 
604
594
 
605
595
  def convert_to_json_schema(
606
- schema: Union[dict[str, Any], type[BaseModel], Callable, BaseTool],
596
+ schema: dict[str, Any] | type[BaseModel] | Callable | BaseTool,
607
597
  *,
608
- strict: Optional[bool] = None,
598
+ strict: bool | None = None,
609
599
  ) -> dict[str, Any]:
610
600
  """Convert a schema representation to a JSON schema.
611
601
 
@@ -648,9 +638,9 @@ def convert_to_json_schema(
648
638
  def tool_example_to_messages(
649
639
  input: str,
650
640
  tool_calls: list[BaseModel],
651
- tool_outputs: Optional[list[str]] = None,
641
+ tool_outputs: list[str] | None = None,
652
642
  *,
653
- ai_response: Optional[str] = None,
643
+ ai_response: str | None = None,
654
644
  ) -> list[BaseMessage]:
655
645
  """Convert an example into a list of messages that can be fed into an LLM.
656
646
 
@@ -742,7 +732,7 @@ def tool_example_to_messages(
742
732
  tool_outputs = tool_outputs or ["You have correctly called this tool."] * len(
743
733
  openai_tool_calls
744
734
  )
745
- for output, tool_call_dict in zip(tool_outputs, openai_tool_calls):
735
+ for output, tool_call_dict in zip(tool_outputs, openai_tool_calls, strict=False):
746
736
  messages.append(ToolMessage(content=output, tool_call_id=tool_call_dict["id"]))
747
737
 
748
738
  if ai_response:
@@ -751,7 +741,7 @@ def tool_example_to_messages(
751
741
 
752
742
 
753
743
  def _parse_google_docstring(
754
- docstring: Optional[str],
744
+ docstring: str | None,
755
745
  args: list[str],
756
746
  *,
757
747
  error_on_invalid_docstring: bool = False,
@@ -3,7 +3,6 @@
3
3
  import logging
4
4
  import re
5
5
  from collections.abc import Sequence
6
- from typing import Optional, Union
7
6
  from urllib.parse import urljoin, urlparse
8
7
 
9
8
  logger = logging.getLogger(__name__)
@@ -35,7 +34,7 @@ DEFAULT_LINK_REGEX = (
35
34
 
36
35
 
37
36
  def find_all_links(
38
- raw_html: str, *, pattern: Union[str, re.Pattern, None] = None
37
+ raw_html: str, *, pattern: str | re.Pattern | None = None
39
38
  ) -> list[str]:
40
39
  """Extract all links from a raw HTML string.
41
40
 
@@ -54,8 +53,8 @@ def extract_sub_links(
54
53
  raw_html: str,
55
54
  url: str,
56
55
  *,
57
- base_url: Optional[str] = None,
58
- pattern: Union[str, re.Pattern, None] = None,
56
+ base_url: str | None = None,
57
+ pattern: str | re.Pattern | None = None,
59
58
  prevent_outside: bool = True,
60
59
  exclude_prefixes: Sequence[str] = (),
61
60
  continue_on_failure: bool = False,
@@ -1,6 +1,6 @@
1
1
  """Handle chained inputs."""
2
2
 
3
- from typing import Optional, TextIO
3
+ from typing import TextIO
4
4
 
5
5
  _TEXT_COLOR_MAPPING = {
6
6
  "blue": "36;1",
@@ -12,7 +12,7 @@ _TEXT_COLOR_MAPPING = {
12
12
 
13
13
 
14
14
  def get_color_mapping(
15
- items: list[str], excluded_colors: Optional[list] = None
15
+ items: list[str], excluded_colors: list | None = None
16
16
  ) -> dict[str, str]:
17
17
  """Get mapping for items to a support color.
18
18
 
@@ -56,7 +56,7 @@ def get_bolded_text(text: str) -> str:
56
56
 
57
57
 
58
58
  def print_text(
59
- text: str, color: Optional[str] = None, end: str = "", file: Optional[TextIO] = None
59
+ text: str, color: str | None = None, end: str = "", file: TextIO | None = None
60
60
  ) -> None:
61
61
  """Print text with highlighting and no end characters.
62
62
 
@@ -9,9 +9,7 @@ from typing import (
9
9
  Any,
10
10
  Generic,
11
11
  Literal,
12
- Optional,
13
12
  TypeVar,
14
- Union,
15
13
  overload,
16
14
  )
17
15
 
@@ -26,9 +24,9 @@ class NoLock:
26
24
 
27
25
  def __exit__(
28
26
  self,
29
- exc_type: Optional[type[BaseException]],
30
- exc_val: Optional[BaseException],
31
- exc_tb: Optional[TracebackType],
27
+ exc_type: type[BaseException] | None,
28
+ exc_val: BaseException | None,
29
+ exc_tb: TracebackType | None,
32
30
  ) -> Literal[False]:
33
31
  """Return False (exception not suppressed)."""
34
32
  return False
@@ -42,7 +40,7 @@ def tee_peer(
42
40
  peers: list[deque[T]],
43
41
  lock: AbstractContextManager[Any],
44
42
  ) -> Generator[T, None, None]:
45
- """An individual iterator of a :py:func:`~.tee`.
43
+ """An individual iterator of a `.tee`.
46
44
 
47
45
  This function is a generator that yields items from the shared iterator
48
46
  ``iterator``. It buffers items until the least advanced iterator has
@@ -108,22 +106,22 @@ class Tee(Generic[T]):
108
106
  await a.anext(previous) # advance one iterator
109
107
  return a.map(operator.sub, previous, current)
110
108
 
111
- Unlike :py:func:`itertools.tee`, :py:func:`~.tee` returns a custom type instead
112
- of a :py:class:`tuple`. Like a tuple, it can be indexed, iterated and unpacked
113
- to get the child iterators. In addition, its :py:meth:`~.tee.aclose` method
109
+ Unlike `itertools.tee`, `.tee` returns a custom type instead
110
+ of a :py`tuple`. Like a tuple, it can be indexed, iterated and unpacked
111
+ to get the child iterators. In addition, its `.tee.aclose` method
114
112
  immediately closes all children, and it can be used in an ``async with`` context
115
113
  for the same effect.
116
114
 
117
115
  If ``iterable`` is an iterator and read elsewhere, ``tee`` will *not*
118
116
  provide these items. Also, ``tee`` must internally buffer each item until the
119
117
  last iterator has yielded it; if the most and least advanced iterator differ
120
- by most data, using a :py:class:`list` is more efficient (but not lazy).
118
+ by most data, using a :py`list` is more efficient (but not lazy).
121
119
 
122
120
  If the underlying iterable is concurrency safe (``anext`` may be awaited
123
121
  concurrently) the resulting iterators are concurrency safe as well. Otherwise,
124
122
  the iterators are safe if there is only ever one single "most advanced" iterator.
125
123
  To enforce sequential use of ``anext``, provide a ``lock``
126
- - e.g. an :py:class:`asyncio.Lock` instance in an :py:mod:`asyncio` application -
124
+ - e.g. an :py`asyncio.Lock` instance in an :py:mod:`asyncio` application -
127
125
  and access is automatically synchronised.
128
126
 
129
127
  """
@@ -133,7 +131,7 @@ class Tee(Generic[T]):
133
131
  iterable: Iterator[T],
134
132
  n: int = 2,
135
133
  *,
136
- lock: Optional[AbstractContextManager[Any]] = None,
134
+ lock: AbstractContextManager[Any] | None = None,
137
135
  ):
138
136
  """Create a ``tee``.
139
137
 
@@ -165,9 +163,7 @@ class Tee(Generic[T]):
165
163
  @overload
166
164
  def __getitem__(self, item: slice) -> tuple[Iterator[T], ...]: ...
167
165
 
168
- def __getitem__(
169
- self, item: Union[int, slice]
170
- ) -> Union[Iterator[T], tuple[Iterator[T], ...]]:
166
+ def __getitem__(self, item: int | slice) -> Iterator[T] | tuple[Iterator[T], ...]:
171
167
  """Return the child iterator(s) at the given index or slice."""
172
168
  return self._children[item]
173
169
 
@@ -185,9 +181,9 @@ class Tee(Generic[T]):
185
181
 
186
182
  def __exit__(
187
183
  self,
188
- exc_type: Optional[type[BaseException]],
189
- exc_val: Optional[BaseException],
190
- exc_tb: Optional[TracebackType],
184
+ exc_type: type[BaseException] | None,
185
+ exc_val: BaseException | None,
186
+ exc_tb: TracebackType | None,
191
187
  ) -> Literal[False]:
192
188
  """Close all child iterators.
193
189
 
@@ -207,7 +203,7 @@ class Tee(Generic[T]):
207
203
  safetee = Tee
208
204
 
209
205
 
210
- def batch_iterate(size: Optional[int], iterable: Iterable[T]) -> Iterator[list[T]]:
206
+ def batch_iterate(size: int | None, iterable: Iterable[T]) -> Iterator[list[T]]:
211
207
  """Utility batching function.
212
208
 
213
209
  Args:
@@ -4,7 +4,8 @@ from __future__ import annotations
4
4
 
5
5
  import json
6
6
  import re
7
- from typing import Any, Callable, Union
7
+ from collections.abc import Callable
8
+ from typing import Any
8
9
 
9
10
  from langchain_core.exceptions import OutputParserException
10
11
 
@@ -19,7 +20,7 @@ def _replace_new_line(match: re.Match[str]) -> str:
19
20
  return match.group(1) + value + match.group(3)
20
21
 
21
22
 
22
- def _custom_parser(multiline_string: Union[str, bytes, bytearray]) -> str:
23
+ def _custom_parser(multiline_string: str | bytes | bytearray) -> str:
23
24
  r"""Custom parser for multiline strings.
24
25
 
25
26
  The LLM response for `action_input` may be a multiline
@@ -3,13 +3,13 @@
3
3
  from __future__ import annotations
4
4
 
5
5
  from copy import deepcopy
6
- from typing import TYPE_CHECKING, Any, Optional, Union
6
+ from typing import TYPE_CHECKING, Any
7
7
 
8
8
  if TYPE_CHECKING:
9
9
  from collections.abc import Sequence
10
10
 
11
11
 
12
- def _retrieve_ref(path: str, schema: dict) -> Union[list, dict]:
12
+ def _retrieve_ref(path: str, schema: dict) -> list | dict:
13
13
  components = path.split("/")
14
14
  if components[0] != "#":
15
15
  msg = (
@@ -17,7 +17,7 @@ def _retrieve_ref(path: str, schema: dict) -> Union[list, dict]:
17
17
  "with #."
18
18
  )
19
19
  raise ValueError(msg)
20
- out: Union[list, dict] = schema
20
+ out: list | dict = schema
21
21
  for component in components[1:]:
22
22
  if component in out:
23
23
  if isinstance(out, list):
@@ -67,7 +67,7 @@ def _process_dict_properties(
67
67
  def _dereference_refs_helper(
68
68
  obj: Any,
69
69
  full_schema: dict[str, Any],
70
- processed_refs: Optional[set[str]],
70
+ processed_refs: set[str] | None,
71
71
  skip_keys: Sequence[str],
72
72
  shallow_refs: bool, # noqa: FBT001
73
73
  ) -> Any:
@@ -167,8 +167,8 @@ def _dereference_refs_helper(
167
167
  def dereference_refs(
168
168
  schema_obj: dict,
169
169
  *,
170
- full_schema: Optional[dict] = None,
171
- skip_keys: Optional[Sequence[str]] = None,
170
+ full_schema: dict | None = None,
171
+ skip_keys: Sequence[str] | None = None,
172
172
  ) -> dict:
173
173
  """Resolve and inline JSON Schema $ref references in a schema object.
174
174
 
@@ -12,8 +12,6 @@ from typing import (
12
12
  TYPE_CHECKING,
13
13
  Any,
14
14
  Literal,
15
- Optional,
16
- Union,
17
15
  cast,
18
16
  )
19
17
 
@@ -23,7 +21,7 @@ if TYPE_CHECKING:
23
21
  logger = logging.getLogger(__name__)
24
22
 
25
23
 
26
- Scopes: TypeAlias = list[Union[Literal[False, 0], Mapping[str, Any]]]
24
+ Scopes: TypeAlias = list[Literal[False, 0] | Mapping[str, Any]]
27
25
 
28
26
 
29
27
  # Globals
@@ -433,13 +431,13 @@ EMPTY_DICT: MappingProxyType[str, str] = MappingProxyType({})
433
431
 
434
432
 
435
433
  def render(
436
- template: Union[str, list[tuple[str, str]]] = "",
434
+ template: str | list[tuple[str, str]] = "",
437
435
  data: Mapping[str, Any] = EMPTY_DICT,
438
436
  partials_dict: Mapping[str, str] = EMPTY_DICT,
439
437
  padding: str = "",
440
438
  def_ldel: str = "{{",
441
439
  def_rdel: str = "}}",
442
- scopes: Optional[Scopes] = None,
440
+ scopes: Scopes | None = None,
443
441
  warn: bool = False, # noqa: FBT001,FBT002
444
442
  keep: bool = False, # noqa: FBT001,FBT002
445
443
  ) -> str:
@@ -5,16 +5,14 @@ from __future__ import annotations
5
5
  import inspect
6
6
  import textwrap
7
7
  import warnings
8
+ from collections.abc import Callable
8
9
  from contextlib import nullcontext
9
10
  from functools import lru_cache, wraps
10
11
  from types import GenericAlias
11
12
  from typing import (
12
13
  TYPE_CHECKING,
13
14
  Any,
14
- Callable,
15
- Optional,
16
15
  TypeVar,
17
- Union,
18
16
  cast,
19
17
  overload,
20
18
  )
@@ -205,8 +203,8 @@ def _create_subset_model_v1(
205
203
  model: type[BaseModelV1],
206
204
  field_names: list,
207
205
  *,
208
- descriptions: Optional[dict] = None,
209
- fn_description: Optional[str] = None,
206
+ descriptions: dict | None = None,
207
+ fn_description: str | None = None,
210
208
  ) -> type[BaseModel]:
211
209
  """Create a pydantic model with only a subset of model's fields."""
212
210
  fields = {}
@@ -218,7 +216,7 @@ def _create_subset_model_v1(
218
216
  # this isn't perfect but should work for most functions
219
217
  field.outer_type_
220
218
  if field.required and not field.allow_none
221
- else Optional[field.outer_type_]
219
+ else field.outer_type_ | None
222
220
  )
223
221
  if descriptions and field_name in descriptions:
224
222
  field.field_info.description = descriptions[field_name]
@@ -234,8 +232,8 @@ def _create_subset_model_v2(
234
232
  model: type[BaseModel],
235
233
  field_names: list[str],
236
234
  *,
237
- descriptions: Optional[dict] = None,
238
- fn_description: Optional[str] = None,
235
+ descriptions: dict | None = None,
236
+ fn_description: str | None = None,
239
237
  ) -> type[BaseModel]:
240
238
  """Create a pydantic model with a subset of the model fields."""
241
239
  descriptions_ = descriptions or {}
@@ -276,8 +274,8 @@ def _create_subset_model(
276
274
  model: TypeBaseModel,
277
275
  field_names: list[str],
278
276
  *,
279
- descriptions: Optional[dict] = None,
280
- fn_description: Optional[str] = None,
277
+ descriptions: dict | None = None,
278
+ fn_description: str | None = None,
281
279
  ) -> type[BaseModel]:
282
280
  """Create subset model using the same pydantic version as the input model.
283
281
 
@@ -318,8 +316,8 @@ def get_fields(model: BaseModelV1) -> dict[str, ModelField]: ...
318
316
 
319
317
 
320
318
  def get_fields(
321
- model: Union[type[Union[BaseModel, BaseModelV1]], BaseModel, BaseModelV1],
322
- ) -> Union[dict[str, FieldInfoV2], dict[str, ModelField]]:
319
+ model: type[BaseModel | BaseModelV1] | BaseModel | BaseModelV1,
320
+ ) -> dict[str, FieldInfoV2] | dict[str, ModelField]:
323
321
  """Return the field names of a Pydantic model.
324
322
 
325
323
  Args:
@@ -348,7 +346,7 @@ NO_DEFAULT = object()
348
346
  def _create_root_model(
349
347
  name: str,
350
348
  type_: Any,
351
- module_name: Optional[str] = None,
349
+ module_name: str | None = None,
352
350
  default_: object = NO_DEFAULT,
353
351
  ) -> type[BaseModel]:
354
352
  """Create a base class."""
@@ -413,7 +411,7 @@ def _create_root_model_cached(
413
411
  model_name: str,
414
412
  type_: Any,
415
413
  *,
416
- module_name: Optional[str] = None,
414
+ module_name: str | None = None,
417
415
  default_: object = NO_DEFAULT,
418
416
  ) -> type[BaseModel]:
419
417
  return _create_root_model(
@@ -436,7 +434,7 @@ def _create_model_cached(
436
434
 
437
435
  def create_model(
438
436
  model_name: str,
439
- module_name: Optional[str] = None,
437
+ module_name: str | None = None,
440
438
  /,
441
439
  **field_definitions: Any,
442
440
  ) -> type[BaseModel]:
@@ -509,9 +507,9 @@ def _remap_field_definitions(field_definitions: dict[str, Any]) -> dict[str, Any
509
507
  def create_model_v2(
510
508
  model_name: str,
511
509
  *,
512
- module_name: Optional[str] = None,
513
- field_definitions: Optional[dict[str, Any]] = None,
514
- root: Optional[Any] = None,
510
+ module_name: str | None = None,
511
+ field_definitions: dict[str, Any] | None = None,
512
+ root: Any | None = None,
515
513
  ) -> type[BaseModel]:
516
514
  """Create a pydantic model with the given field definitions.
517
515
 
@@ -1,6 +1,6 @@
1
1
  """Usage utilities."""
2
2
 
3
- from typing import Callable
3
+ from collections.abc import Callable
4
4
 
5
5
 
6
6
  def _dict_int_op(