langchain-core 0.4.0.dev0__py3-none-any.whl → 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of langchain-core might be problematic. Click here for more details.

Files changed (172) hide show
  1. langchain_core/__init__.py +1 -1
  2. langchain_core/_api/__init__.py +3 -4
  3. langchain_core/_api/beta_decorator.py +45 -70
  4. langchain_core/_api/deprecation.py +80 -80
  5. langchain_core/_api/path.py +22 -8
  6. langchain_core/_import_utils.py +10 -4
  7. langchain_core/agents.py +25 -21
  8. langchain_core/caches.py +53 -63
  9. langchain_core/callbacks/__init__.py +1 -8
  10. langchain_core/callbacks/base.py +341 -348
  11. langchain_core/callbacks/file.py +55 -44
  12. langchain_core/callbacks/manager.py +546 -683
  13. langchain_core/callbacks/stdout.py +29 -30
  14. langchain_core/callbacks/streaming_stdout.py +35 -36
  15. langchain_core/callbacks/usage.py +65 -70
  16. langchain_core/chat_history.py +48 -55
  17. langchain_core/document_loaders/base.py +46 -21
  18. langchain_core/document_loaders/langsmith.py +39 -36
  19. langchain_core/documents/__init__.py +0 -1
  20. langchain_core/documents/base.py +96 -74
  21. langchain_core/documents/compressor.py +12 -9
  22. langchain_core/documents/transformers.py +29 -28
  23. langchain_core/embeddings/fake.py +56 -57
  24. langchain_core/env.py +2 -3
  25. langchain_core/example_selectors/base.py +12 -0
  26. langchain_core/example_selectors/length_based.py +1 -1
  27. langchain_core/example_selectors/semantic_similarity.py +21 -25
  28. langchain_core/exceptions.py +15 -9
  29. langchain_core/globals.py +4 -163
  30. langchain_core/indexing/api.py +132 -125
  31. langchain_core/indexing/base.py +64 -67
  32. langchain_core/indexing/in_memory.py +26 -6
  33. langchain_core/language_models/__init__.py +15 -27
  34. langchain_core/language_models/_utils.py +267 -117
  35. langchain_core/language_models/base.py +92 -177
  36. langchain_core/language_models/chat_models.py +547 -407
  37. langchain_core/language_models/fake.py +11 -11
  38. langchain_core/language_models/fake_chat_models.py +72 -118
  39. langchain_core/language_models/llms.py +168 -242
  40. langchain_core/load/dump.py +8 -11
  41. langchain_core/load/load.py +32 -28
  42. langchain_core/load/mapping.py +2 -4
  43. langchain_core/load/serializable.py +50 -56
  44. langchain_core/messages/__init__.py +36 -51
  45. langchain_core/messages/ai.py +377 -150
  46. langchain_core/messages/base.py +239 -47
  47. langchain_core/messages/block_translators/__init__.py +111 -0
  48. langchain_core/messages/block_translators/anthropic.py +470 -0
  49. langchain_core/messages/block_translators/bedrock.py +94 -0
  50. langchain_core/messages/block_translators/bedrock_converse.py +297 -0
  51. langchain_core/messages/block_translators/google_genai.py +530 -0
  52. langchain_core/messages/block_translators/google_vertexai.py +21 -0
  53. langchain_core/messages/block_translators/groq.py +143 -0
  54. langchain_core/messages/block_translators/langchain_v0.py +301 -0
  55. langchain_core/messages/block_translators/openai.py +1010 -0
  56. langchain_core/messages/chat.py +2 -3
  57. langchain_core/messages/content.py +1423 -0
  58. langchain_core/messages/function.py +7 -7
  59. langchain_core/messages/human.py +44 -38
  60. langchain_core/messages/modifier.py +3 -2
  61. langchain_core/messages/system.py +40 -27
  62. langchain_core/messages/tool.py +160 -58
  63. langchain_core/messages/utils.py +527 -638
  64. langchain_core/output_parsers/__init__.py +1 -14
  65. langchain_core/output_parsers/base.py +68 -104
  66. langchain_core/output_parsers/json.py +13 -17
  67. langchain_core/output_parsers/list.py +11 -33
  68. langchain_core/output_parsers/openai_functions.py +56 -74
  69. langchain_core/output_parsers/openai_tools.py +68 -109
  70. langchain_core/output_parsers/pydantic.py +15 -13
  71. langchain_core/output_parsers/string.py +6 -2
  72. langchain_core/output_parsers/transform.py +17 -60
  73. langchain_core/output_parsers/xml.py +34 -44
  74. langchain_core/outputs/__init__.py +1 -1
  75. langchain_core/outputs/chat_generation.py +26 -11
  76. langchain_core/outputs/chat_result.py +1 -3
  77. langchain_core/outputs/generation.py +17 -6
  78. langchain_core/outputs/llm_result.py +15 -8
  79. langchain_core/prompt_values.py +29 -123
  80. langchain_core/prompts/__init__.py +3 -27
  81. langchain_core/prompts/base.py +48 -63
  82. langchain_core/prompts/chat.py +259 -288
  83. langchain_core/prompts/dict.py +19 -11
  84. langchain_core/prompts/few_shot.py +84 -90
  85. langchain_core/prompts/few_shot_with_templates.py +14 -12
  86. langchain_core/prompts/image.py +19 -14
  87. langchain_core/prompts/loading.py +6 -8
  88. langchain_core/prompts/message.py +7 -8
  89. langchain_core/prompts/prompt.py +42 -43
  90. langchain_core/prompts/string.py +37 -16
  91. langchain_core/prompts/structured.py +43 -46
  92. langchain_core/rate_limiters.py +51 -60
  93. langchain_core/retrievers.py +52 -192
  94. langchain_core/runnables/base.py +1727 -1683
  95. langchain_core/runnables/branch.py +52 -73
  96. langchain_core/runnables/config.py +89 -103
  97. langchain_core/runnables/configurable.py +128 -130
  98. langchain_core/runnables/fallbacks.py +93 -82
  99. langchain_core/runnables/graph.py +127 -127
  100. langchain_core/runnables/graph_ascii.py +63 -41
  101. langchain_core/runnables/graph_mermaid.py +87 -70
  102. langchain_core/runnables/graph_png.py +31 -36
  103. langchain_core/runnables/history.py +145 -161
  104. langchain_core/runnables/passthrough.py +141 -144
  105. langchain_core/runnables/retry.py +84 -68
  106. langchain_core/runnables/router.py +33 -37
  107. langchain_core/runnables/schema.py +79 -72
  108. langchain_core/runnables/utils.py +95 -139
  109. langchain_core/stores.py +85 -131
  110. langchain_core/structured_query.py +11 -15
  111. langchain_core/sys_info.py +31 -32
  112. langchain_core/tools/__init__.py +1 -14
  113. langchain_core/tools/base.py +221 -247
  114. langchain_core/tools/convert.py +144 -161
  115. langchain_core/tools/render.py +10 -10
  116. langchain_core/tools/retriever.py +12 -19
  117. langchain_core/tools/simple.py +52 -29
  118. langchain_core/tools/structured.py +56 -60
  119. langchain_core/tracers/__init__.py +1 -9
  120. langchain_core/tracers/_streaming.py +6 -7
  121. langchain_core/tracers/base.py +103 -112
  122. langchain_core/tracers/context.py +29 -48
  123. langchain_core/tracers/core.py +142 -105
  124. langchain_core/tracers/evaluation.py +30 -34
  125. langchain_core/tracers/event_stream.py +162 -117
  126. langchain_core/tracers/langchain.py +34 -36
  127. langchain_core/tracers/log_stream.py +87 -49
  128. langchain_core/tracers/memory_stream.py +3 -3
  129. langchain_core/tracers/root_listeners.py +18 -34
  130. langchain_core/tracers/run_collector.py +8 -20
  131. langchain_core/tracers/schemas.py +0 -125
  132. langchain_core/tracers/stdout.py +3 -3
  133. langchain_core/utils/__init__.py +1 -4
  134. langchain_core/utils/_merge.py +47 -9
  135. langchain_core/utils/aiter.py +70 -66
  136. langchain_core/utils/env.py +12 -9
  137. langchain_core/utils/function_calling.py +139 -206
  138. langchain_core/utils/html.py +7 -8
  139. langchain_core/utils/input.py +6 -6
  140. langchain_core/utils/interactive_env.py +6 -2
  141. langchain_core/utils/iter.py +48 -45
  142. langchain_core/utils/json.py +14 -4
  143. langchain_core/utils/json_schema.py +159 -43
  144. langchain_core/utils/mustache.py +32 -25
  145. langchain_core/utils/pydantic.py +67 -40
  146. langchain_core/utils/strings.py +5 -5
  147. langchain_core/utils/usage.py +1 -1
  148. langchain_core/utils/utils.py +104 -62
  149. langchain_core/vectorstores/base.py +131 -179
  150. langchain_core/vectorstores/in_memory.py +113 -182
  151. langchain_core/vectorstores/utils.py +23 -17
  152. langchain_core/version.py +1 -1
  153. langchain_core-1.0.0.dist-info/METADATA +68 -0
  154. langchain_core-1.0.0.dist-info/RECORD +172 -0
  155. {langchain_core-0.4.0.dev0.dist-info → langchain_core-1.0.0.dist-info}/WHEEL +1 -1
  156. langchain_core/beta/__init__.py +0 -1
  157. langchain_core/beta/runnables/__init__.py +0 -1
  158. langchain_core/beta/runnables/context.py +0 -448
  159. langchain_core/memory.py +0 -116
  160. langchain_core/messages/content_blocks.py +0 -1435
  161. langchain_core/prompts/pipeline.py +0 -133
  162. langchain_core/pydantic_v1/__init__.py +0 -30
  163. langchain_core/pydantic_v1/dataclasses.py +0 -23
  164. langchain_core/pydantic_v1/main.py +0 -23
  165. langchain_core/tracers/langchain_v1.py +0 -23
  166. langchain_core/utils/loading.py +0 -31
  167. langchain_core/v1/__init__.py +0 -1
  168. langchain_core/v1/chat_models.py +0 -1047
  169. langchain_core/v1/messages.py +0 -755
  170. langchain_core-0.4.0.dev0.dist-info/METADATA +0 -108
  171. langchain_core-0.4.0.dev0.dist-info/RECORD +0 -177
  172. langchain_core-0.4.0.dev0.dist-info/entry_points.txt +0 -4
@@ -2,138 +2,13 @@
2
2
 
3
3
  from __future__ import annotations
4
4
 
5
- import warnings
6
- from datetime import datetime, timezone
7
- from typing import Any, Optional
8
- from uuid import UUID
9
-
10
5
  from langsmith import RunTree
11
- from langsmith.schemas import RunTypeEnum as RunTypeEnumDep
12
- from pydantic import PydanticDeprecationWarning
13
- from pydantic.v1 import BaseModel as BaseModelV1
14
- from pydantic.v1 import Field as FieldV1
15
-
16
- from langchain_core._api import deprecated
17
-
18
-
19
- @deprecated("0.1.0", alternative="Use string instead.", removal="1.0")
20
- def RunTypeEnum() -> type[RunTypeEnumDep]: # noqa: N802
21
- """RunTypeEnum."""
22
- warnings.warn(
23
- "RunTypeEnum is deprecated. Please directly use a string instead"
24
- " (e.g. 'llm', 'chain', 'tool').",
25
- DeprecationWarning,
26
- stacklevel=2,
27
- )
28
- return RunTypeEnumDep
29
-
30
-
31
- @deprecated("0.1.0", removal="1.0")
32
- class TracerSessionV1Base(BaseModelV1):
33
- """Base class for TracerSessionV1."""
34
-
35
- start_time: datetime = FieldV1(default_factory=lambda: datetime.now(timezone.utc))
36
- name: Optional[str] = None
37
- extra: Optional[dict[str, Any]] = None
38
-
39
-
40
- @deprecated("0.1.0", removal="1.0")
41
- class TracerSessionV1Create(TracerSessionV1Base):
42
- """Create class for TracerSessionV1."""
43
-
44
-
45
- @deprecated("0.1.0", removal="1.0")
46
- class TracerSessionV1(TracerSessionV1Base):
47
- """TracerSessionV1 schema."""
48
-
49
- id: int
50
-
51
-
52
- @deprecated("0.1.0", removal="1.0")
53
- class TracerSessionBase(TracerSessionV1Base):
54
- """Base class for TracerSession."""
55
-
56
- tenant_id: UUID
57
-
58
-
59
- @deprecated("0.1.0", removal="1.0")
60
- class TracerSession(TracerSessionBase):
61
- """TracerSessionV1 schema for the V2 API."""
62
-
63
- id: UUID
64
-
65
-
66
- @deprecated("0.1.0", alternative="Run", removal="1.0")
67
- class BaseRun(BaseModelV1):
68
- """Base class for Run."""
69
-
70
- uuid: str
71
- parent_uuid: Optional[str] = None
72
- start_time: datetime = FieldV1(default_factory=lambda: datetime.now(timezone.utc))
73
- end_time: datetime = FieldV1(default_factory=lambda: datetime.now(timezone.utc))
74
- extra: Optional[dict[str, Any]] = None
75
- execution_order: int
76
- child_execution_order: int
77
- serialized: dict[str, Any]
78
- session_id: int
79
- error: Optional[str] = None
80
-
81
-
82
- @deprecated("0.1.0", alternative="Run", removal="1.0")
83
- class LLMRun(BaseRun):
84
- """Class for LLMRun."""
85
-
86
- prompts: list[str]
87
- # Temporarily, remove but we will completely remove LLMRun
88
- # response: Optional[LLMResult] = None
89
-
90
-
91
- @deprecated("0.1.0", alternative="Run", removal="1.0")
92
- class ChainRun(BaseRun):
93
- """Class for ChainRun."""
94
-
95
- inputs: dict[str, Any]
96
- outputs: Optional[dict[str, Any]] = None
97
- child_llm_runs: list[LLMRun] = FieldV1(default_factory=list)
98
- child_chain_runs: list[ChainRun] = FieldV1(default_factory=list)
99
- child_tool_runs: list[ToolRun] = FieldV1(default_factory=list)
100
-
101
-
102
- @deprecated("0.1.0", alternative="Run", removal="1.0")
103
- class ToolRun(BaseRun):
104
- """Class for ToolRun."""
105
-
106
- tool_input: str
107
- output: Optional[str] = None
108
- action: str
109
- child_llm_runs: list[LLMRun] = FieldV1(default_factory=list)
110
- child_chain_runs: list[ChainRun] = FieldV1(default_factory=list)
111
- child_tool_runs: list[ToolRun] = FieldV1(default_factory=list)
112
-
113
6
 
114
7
  # Begin V2 API Schemas
115
8
 
116
9
 
117
10
  Run = RunTree # For backwards compatibility
118
11
 
119
- # TODO: Update once langsmith moves to Pydantic V2 and we can swap Run.model_rebuild
120
- # for Run.update_forward_refs
121
- with warnings.catch_warnings():
122
- warnings.simplefilter("ignore", category=PydanticDeprecationWarning)
123
-
124
- ChainRun.update_forward_refs()
125
- ToolRun.update_forward_refs()
126
-
127
12
  __all__ = [
128
- "BaseRun",
129
- "ChainRun",
130
- "LLMRun",
131
13
  "Run",
132
- "RunTypeEnum",
133
- "ToolRun",
134
- "TracerSession",
135
- "TracerSessionBase",
136
- "TracerSessionV1",
137
- "TracerSessionV1Base",
138
- "TracerSessionV1Create",
139
14
  ]
@@ -1,7 +1,8 @@
1
1
  """Tracers that print to the console."""
2
2
 
3
3
  import json
4
- from typing import Any, Callable
4
+ from collections.abc import Callable
5
+ from typing import Any
5
6
 
6
7
  from langchain_core.tracers.base import BaseTracer
7
8
  from langchain_core.tracers.schemas import Run
@@ -48,8 +49,7 @@ class FunctionCallbackHandler(BaseTracer):
48
49
  """Tracer that calls a function with a single str parameter."""
49
50
 
50
51
  name: str = "function_callback_handler"
51
- """The name of the tracer. This is used to identify the tracer in the logs.
52
- Default is "function_callback_handler"."""
52
+ """The name of the tracer. This is used to identify the tracer in the logs."""
53
53
 
54
54
  def __init__(self, function: Callable[[str], None], **kwargs: Any) -> None:
55
55
  """Create a FunctionCallbackHandler.
@@ -1,4 +1,4 @@
1
- """**Utility functions** for LangChain.
1
+ """Utility functions for LangChain.
2
2
 
3
3
  These functions do not depend on any other LangChain module.
4
4
  """
@@ -21,7 +21,6 @@ if TYPE_CHECKING:
21
21
  print_text,
22
22
  )
23
23
  from langchain_core.utils.iter import batch_iterate
24
- from langchain_core.utils.loading import try_load_from_hub
25
24
  from langchain_core.utils.pydantic import pre_init
26
25
  from langchain_core.utils.strings import (
27
26
  comma_list,
@@ -68,7 +67,6 @@ __all__ = (
68
67
  "secret_from_env",
69
68
  "stringify_dict",
70
69
  "stringify_value",
71
- "try_load_from_hub",
72
70
  "xor_args",
73
71
  )
74
72
 
@@ -84,7 +82,6 @@ _dynamic_imports = {
84
82
  "get_colored_text": "input",
85
83
  "print_text": "input",
86
84
  "batch_iterate": "iter",
87
- "try_load_from_hub": "loading",
88
85
  "pre_init": "pydantic",
89
86
  "comma_list": "strings",
90
87
  "sanitize_for_postgres": "strings",
@@ -1,6 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
- from typing import Any, Optional
3
+ from typing import Any
4
4
 
5
5
 
6
6
  def merge_dicts(left: dict[str, Any], *others: dict[str, Any]) -> dict[str, Any]:
@@ -57,6 +57,11 @@ def merge_dicts(left: dict[str, Any], *others: dict[str, Any]) -> dict[str, Any]
57
57
  # "should either occur once or have the same value across "
58
58
  # "all dicts."
59
59
  # )
60
+ if (right_k == "index" and merged[right_k].startswith("lc_")) or (
61
+ right_k in ("id", "output_version", "model_provider")
62
+ and merged[right_k] == right_v
63
+ ):
64
+ continue
60
65
  merged[right_k] += right_v
61
66
  elif isinstance(merged[right_k], dict):
62
67
  merged[right_k] = merge_dicts(merged[right_k], right_v)
@@ -75,7 +80,7 @@ def merge_dicts(left: dict[str, Any], *others: dict[str, Any]) -> dict[str, Any]
75
80
  return merged
76
81
 
77
82
 
78
- def merge_lists(left: Optional[list], *others: Optional[list]) -> Optional[list]:
83
+ def merge_lists(left: list | None, *others: list | None) -> list | None:
79
84
  """Add many lists, handling None.
80
85
 
81
86
  Args:
@@ -93,20 +98,53 @@ def merge_lists(left: Optional[list], *others: Optional[list]) -> Optional[list]
93
98
  merged = other.copy()
94
99
  else:
95
100
  for e in other:
96
- if isinstance(e, dict) and "index" in e and isinstance(e["index"], int):
101
+ if (
102
+ isinstance(e, dict)
103
+ and "index" in e
104
+ and (
105
+ isinstance(e["index"], int)
106
+ or (
107
+ isinstance(e["index"], str) and e["index"].startswith("lc_")
108
+ )
109
+ )
110
+ ):
97
111
  to_merge = [
98
112
  i
99
113
  for i, e_left in enumerate(merged)
100
- if e_left["index"] == e["index"]
114
+ if "index" in e_left and e_left["index"] == e["index"]
101
115
  ]
102
116
  if to_merge:
103
117
  # TODO: Remove this once merge_dict is updated with special
104
118
  # handling for 'type'.
105
- new_e = (
106
- {k: v for k, v in e.items() if k != "type"}
107
- if "type" in e
108
- else e
109
- )
119
+ if (left_type := merged[to_merge[0]].get("type")) and (
120
+ e.get("type") == "non_standard" and "value" in e
121
+ ):
122
+ if left_type != "non_standard":
123
+ # standard + non_standard
124
+ new_e: dict[str, Any] = {
125
+ "extras": {
126
+ k: v
127
+ for k, v in e["value"].items()
128
+ if k != "type"
129
+ }
130
+ }
131
+ else:
132
+ # non_standard + non_standard
133
+ new_e = {
134
+ "value": {
135
+ k: v
136
+ for k, v in e["value"].items()
137
+ if k != "type"
138
+ }
139
+ }
140
+ if "index" in e:
141
+ new_e["index"] = e["index"]
142
+ else:
143
+ new_e = (
144
+ {k: v for k, v in e.items() if k != "type"}
145
+ if "type" in e
146
+ else e
147
+ )
110
148
  merged[to_merge[0]] = merge_dicts(merged[to_merge[0]], new_e)
111
149
  else:
112
150
  merged.append(e)
@@ -11,17 +11,15 @@ from collections.abc import (
11
11
  AsyncIterable,
12
12
  AsyncIterator,
13
13
  Awaitable,
14
+ Callable,
14
15
  Iterator,
15
16
  )
16
17
  from contextlib import AbstractAsyncContextManager
17
18
  from types import TracebackType
18
19
  from typing import (
19
20
  Any,
20
- Callable,
21
21
  Generic,
22
- Optional,
23
22
  TypeVar,
24
- Union,
25
23
  cast,
26
24
  overload,
27
25
  )
@@ -36,8 +34,8 @@ _no_default = object()
36
34
  # https://github.com/python/cpython/blob/main/Lib/test/test_asyncgen.py#L54
37
35
  # before 3.10, the builtin anext() was not available
38
36
  def py_anext(
39
- iterator: AsyncIterator[T], default: Union[T, Any] = _no_default
40
- ) -> Awaitable[Union[T, None, Any]]:
37
+ iterator: AsyncIterator[T], default: T | Any = _no_default
38
+ ) -> Awaitable[T | Any | None]:
41
39
  """Pure-Python implementation of anext() for testing purposes.
42
40
 
43
41
  Closely matches the builtin anext() C implementation.
@@ -52,7 +50,7 @@ def py_anext(
52
50
 
53
51
  Returns:
54
52
  The next value from the iterator, or the default value
55
- if the iterator is exhausted.
53
+ if the iterator is exhausted.
56
54
 
57
55
  Raises:
58
56
  TypeError: If the iterator is not an async iterator.
@@ -68,7 +66,7 @@ def py_anext(
68
66
  if default is _no_default:
69
67
  return __anext__(iterator)
70
68
 
71
- async def anext_impl() -> Union[T, Any]:
69
+ async def anext_impl() -> T | Any:
72
70
  try:
73
71
  # The C code is way more low-level than this, as it implements
74
72
  # all methods of the iterator protocol. In this implementation
@@ -90,11 +88,11 @@ class NoLock:
90
88
 
91
89
  async def __aexit__(
92
90
  self,
93
- exc_type: Optional[type[BaseException]],
94
- exc_val: Optional[BaseException],
95
- exc_tb: Optional[TracebackType],
91
+ exc_type: type[BaseException] | None,
92
+ exc_val: BaseException | None,
93
+ exc_tb: TracebackType | None,
96
94
  ) -> bool:
97
- """Exception not handled."""
95
+ """Return False, exception not suppressed."""
98
96
  return False
99
97
 
100
98
 
@@ -106,10 +104,10 @@ async def tee_peer(
106
104
  peers: list[deque[T]],
107
105
  lock: AbstractAsyncContextManager[Any],
108
106
  ) -> AsyncGenerator[T, None]:
109
- """An individual iterator of a :py:func:`~.tee`.
107
+ """An individual iterator of a `tee`.
110
108
 
111
109
  This function is a generator that yields items from the shared iterator
112
- ``iterator``. It buffers items until the least advanced iterator has
110
+ `iterator`. It buffers items until the least advanced iterator has
113
111
  yielded them as well. The buffer is shared with all other peers.
114
112
 
115
113
  Args:
@@ -155,39 +153,39 @@ async def tee_peer(
155
153
 
156
154
 
157
155
  class Tee(Generic[T]):
158
- """Create ``n`` separate asynchronous iterators over ``iterable``.
156
+ """Create `n` separate asynchronous iterators over `iterable`.
159
157
 
160
- This splits a single ``iterable`` into multiple iterators, each providing
158
+ This splits a single `iterable` into multiple iterators, each providing
161
159
  the same items in the same order.
162
160
  All child iterators may advance separately but share the same items
163
- from ``iterable`` -- when the most advanced iterator retrieves an item,
161
+ from `iterable` -- when the most advanced iterator retrieves an item,
164
162
  it is buffered until the least advanced iterator has yielded it as well.
165
- A ``tee`` works lazily and can handle an infinite ``iterable``, provided
163
+ A `tee` works lazily and can handle an infinite `iterable`, provided
166
164
  that all iterators advance.
167
165
 
168
- .. code-block:: python3
169
-
170
- async def derivative(sensor_data):
171
- previous, current = a.tee(sensor_data, n=2)
172
- await a.anext(previous) # advance one iterator
173
- return a.map(operator.sub, previous, current)
174
-
175
- Unlike :py:func:`itertools.tee`, :py:func:`~.tee` returns a custom type instead
176
- of a :py:class:`tuple`. Like a tuple, it can be indexed, iterated and unpacked
177
- to get the child iterators. In addition, its :py:meth:`~.tee.aclose` method
178
- immediately closes all children, and it can be used in an ``async with`` context
166
+ ```python
167
+ async def derivative(sensor_data):
168
+ previous, current = a.tee(sensor_data, n=2)
169
+ await a.anext(previous) # advance one iterator
170
+ return a.map(operator.sub, previous, current)
171
+ ```
172
+
173
+ Unlike `itertools.tee`, `.tee` returns a custom type instead
174
+ of a :py`tuple`. Like a tuple, it can be indexed, iterated and unpacked
175
+ to get the child iterators. In addition, its `.tee.aclose` method
176
+ immediately closes all children, and it can be used in an `async with` context
179
177
  for the same effect.
180
178
 
181
- If ``iterable`` is an iterator and read elsewhere, ``tee`` will *not*
182
- provide these items. Also, ``tee`` must internally buffer each item until the
179
+ If `iterable` is an iterator and read elsewhere, `tee` will *not*
180
+ provide these items. Also, `tee` must internally buffer each item until the
183
181
  last iterator has yielded it; if the most and least advanced iterator differ
184
- by most data, using a :py:class:`list` is more efficient (but not lazy).
182
+ by most data, using a :py`list` is more efficient (but not lazy).
185
183
 
186
- If the underlying iterable is concurrency safe (``anext`` may be awaited
184
+ If the underlying iterable is concurrency safe (`anext` may be awaited
187
185
  concurrently) the resulting iterators are concurrency safe as well. Otherwise,
188
186
  the iterators are safe if there is only ever one single "most advanced" iterator.
189
- To enforce sequential use of ``anext``, provide a ``lock``
190
- - e.g. an :py:class:`asyncio.Lock` instance in an :py:mod:`asyncio` application -
187
+ To enforce sequential use of `anext`, provide a `lock`
188
+ - e.g. an :py`asyncio.Lock` instance in an :py:mod:`asyncio` application -
191
189
  and access is automatically synchronised.
192
190
 
193
191
  """
@@ -197,15 +195,15 @@ class Tee(Generic[T]):
197
195
  iterable: AsyncIterator[T],
198
196
  n: int = 2,
199
197
  *,
200
- lock: Optional[AbstractAsyncContextManager[Any]] = None,
198
+ lock: AbstractAsyncContextManager[Any] | None = None,
201
199
  ):
202
- """Create a ``tee``.
200
+ """Create a `tee`.
203
201
 
204
202
  Args:
205
203
  iterable: The iterable to split.
206
- n: The number of iterators to create. Defaults to 2.
204
+ n: The number of iterators to create.
207
205
  lock: The lock to synchronise access to the shared buffers.
208
- Defaults to None.
206
+
209
207
  """
210
208
  self._iterator = iterable.__aiter__() # before 3.10 aiter() doesn't exist
211
209
  self._buffers: list[deque[T]] = [deque() for _ in range(n)]
@@ -230,13 +228,17 @@ class Tee(Generic[T]):
230
228
  def __getitem__(self, item: slice) -> tuple[AsyncIterator[T], ...]: ...
231
229
 
232
230
  def __getitem__(
233
- self, item: Union[int, slice]
234
- ) -> Union[AsyncIterator[T], tuple[AsyncIterator[T], ...]]:
231
+ self, item: int | slice
232
+ ) -> AsyncIterator[T] | tuple[AsyncIterator[T], ...]:
235
233
  """Return the child iterator(s) for the given index or slice."""
236
234
  return self._children[item]
237
235
 
238
236
  def __iter__(self) -> Iterator[AsyncIterator[T]]:
239
- """Iterate over the child iterators."""
237
+ """Iterate over the child iterators.
238
+
239
+ Yields:
240
+ The child iterators.
241
+ """
240
242
  yield from self._children
241
243
 
242
244
  async def __aenter__(self) -> "Tee[T]":
@@ -245,11 +247,15 @@ class Tee(Generic[T]):
245
247
 
246
248
  async def __aexit__(
247
249
  self,
248
- exc_type: Optional[type[BaseException]],
249
- exc_val: Optional[BaseException],
250
- exc_tb: Optional[TracebackType],
250
+ exc_type: type[BaseException] | None,
251
+ exc_val: BaseException | None,
252
+ exc_tb: TracebackType | None,
251
253
  ) -> bool:
252
- """Close all child iterators."""
254
+ """Close all child iterators.
255
+
256
+ Returns:
257
+ False, exceptions not suppressed.
258
+ """
253
259
  await self.aclose()
254
260
  return False
255
261
 
@@ -263,30 +269,28 @@ atee = Tee
263
269
 
264
270
 
265
271
  class aclosing(AbstractAsyncContextManager): # noqa: N801
266
- """Async context manager to wrap an AsyncGenerator that has a ``aclose()`` method.
272
+ """Async context manager to wrap an AsyncGenerator that has a `aclose()` method.
267
273
 
268
274
  Code like this:
269
275
 
270
- .. code-block:: python
271
-
272
- async with aclosing(<module>.fetch(<arguments>)) as agen:
273
- <block>
276
+ ```python
277
+ async with aclosing(<module>.fetch(<arguments>)) as agen:
278
+ <block>
279
+ ```
274
280
 
275
281
  is equivalent to this:
276
282
 
277
- .. code-block:: python
278
-
279
- agen = <module>.fetch(<arguments>)
280
- try:
281
- <block>
282
- finally:
283
- await agen.aclose()
283
+ ```python
284
+ agen = <module>.fetch(<arguments>)
285
+ try:
286
+ <block>
287
+ finally:
288
+ await agen.aclose()
284
289
 
290
+ ```
285
291
  """
286
292
 
287
- def __init__(
288
- self, thing: Union[AsyncGenerator[Any, Any], AsyncIterator[Any]]
289
- ) -> None:
293
+ def __init__(self, thing: AsyncGenerator[Any, Any] | AsyncIterator[Any]) -> None:
290
294
  """Create the context manager.
291
295
 
292
296
  Args:
@@ -295,15 +299,15 @@ class aclosing(AbstractAsyncContextManager): # noqa: N801
295
299
  self.thing = thing
296
300
 
297
301
  @override
298
- async def __aenter__(self) -> Union[AsyncGenerator[Any, Any], AsyncIterator[Any]]:
302
+ async def __aenter__(self) -> AsyncGenerator[Any, Any] | AsyncIterator[Any]:
299
303
  return self.thing
300
304
 
301
305
  @override
302
306
  async def __aexit__(
303
307
  self,
304
- exc_type: Optional[type[BaseException]],
305
- exc_value: Optional[BaseException],
306
- traceback: Optional[TracebackType],
308
+ exc_type: type[BaseException] | None,
309
+ exc_value: BaseException | None,
310
+ traceback: TracebackType | None,
307
311
  ) -> None:
308
312
  if hasattr(self.thing, "aclose"):
309
313
  await self.thing.aclose()
@@ -318,8 +322,8 @@ async def abatch_iterate(
318
322
  size: The size of the batch.
319
323
  iterable: The async iterable to batch.
320
324
 
321
- Returns:
322
- An async iterator over the batches.
325
+ Yields:
326
+ The batches.
323
327
  """
324
328
  batch: list[T] = []
325
329
  async for element in iterable:
@@ -3,17 +3,17 @@
3
3
  from __future__ import annotations
4
4
 
5
5
  import os
6
- from typing import Any, Optional, Union
6
+ from typing import Any
7
7
 
8
8
 
9
9
  def env_var_is_set(env_var: str) -> bool:
10
10
  """Check if an environment variable is set.
11
11
 
12
12
  Args:
13
- env_var (str): The name of the environment variable.
13
+ env_var: The name of the environment variable.
14
14
 
15
15
  Returns:
16
- bool: True if the environment variable is set, False otherwise.
16
+ `True` if the environment variable is set, `False` otherwise.
17
17
  """
18
18
  return env_var in os.environ and os.environ[env_var] not in {
19
19
  "",
@@ -25,9 +25,9 @@ def env_var_is_set(env_var: str) -> bool:
25
25
 
26
26
  def get_from_dict_or_env(
27
27
  data: dict[str, Any],
28
- key: Union[str, list[str]],
28
+ key: str | list[str],
29
29
  env_key: str,
30
- default: Optional[str] = None,
30
+ default: str | None = None,
31
31
  ) -> str:
32
32
  """Get a value from a dictionary or an environment variable.
33
33
 
@@ -38,7 +38,10 @@ def get_from_dict_or_env(
38
38
  env_key: The environment variable to look up if the key is not
39
39
  in the dictionary.
40
40
  default: The default value to return if the key is not in the dictionary
41
- or the environment. Defaults to None.
41
+ or the environment.
42
+
43
+ Returns:
44
+ The dict value or the environment variable value.
42
45
  """
43
46
  if isinstance(key, (list, tuple)):
44
47
  for k in key:
@@ -53,7 +56,7 @@ def get_from_dict_or_env(
53
56
  return get_from_env(key_for_err, env_key, default=default)
54
57
 
55
58
 
56
- def get_from_env(key: str, env_key: str, default: Optional[str] = None) -> str:
59
+ def get_from_env(key: str, env_key: str, default: str | None = None) -> str:
57
60
  """Get a value from a dictionary or an environment variable.
58
61
 
59
62
  Args:
@@ -61,10 +64,10 @@ def get_from_env(key: str, env_key: str, default: Optional[str] = None) -> str:
61
64
  env_key: The environment variable to look up if the key is not
62
65
  in the dictionary.
63
66
  default: The default value to return if the key is not in the dictionary
64
- or the environment. Defaults to None.
67
+ or the environment.
65
68
 
66
69
  Returns:
67
- str: The value of the key.
70
+ The value of the key.
68
71
 
69
72
  Raises:
70
73
  ValueError: If the key is not in the dictionary and no default value is