langchain-core 1.0.0a8__py3-none-any.whl → 1.0.0rc2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of langchain-core might be problematic. Click here for more details.

Files changed (142) hide show
  1. langchain_core/__init__.py +1 -1
  2. langchain_core/_api/__init__.py +0 -1
  3. langchain_core/_api/beta_decorator.py +17 -20
  4. langchain_core/_api/deprecation.py +30 -35
  5. langchain_core/_import_utils.py +1 -1
  6. langchain_core/agents.py +10 -9
  7. langchain_core/caches.py +46 -56
  8. langchain_core/callbacks/__init__.py +1 -8
  9. langchain_core/callbacks/base.py +232 -243
  10. langchain_core/callbacks/file.py +33 -33
  11. langchain_core/callbacks/manager.py +353 -416
  12. langchain_core/callbacks/stdout.py +21 -22
  13. langchain_core/callbacks/streaming_stdout.py +32 -32
  14. langchain_core/callbacks/usage.py +54 -51
  15. langchain_core/chat_history.py +43 -58
  16. langchain_core/document_loaders/base.py +21 -21
  17. langchain_core/document_loaders/langsmith.py +22 -22
  18. langchain_core/documents/__init__.py +0 -1
  19. langchain_core/documents/base.py +46 -49
  20. langchain_core/documents/transformers.py +28 -29
  21. langchain_core/embeddings/fake.py +50 -54
  22. langchain_core/example_selectors/semantic_similarity.py +4 -6
  23. langchain_core/exceptions.py +7 -8
  24. langchain_core/indexing/api.py +19 -25
  25. langchain_core/indexing/base.py +24 -24
  26. langchain_core/language_models/__init__.py +11 -27
  27. langchain_core/language_models/_utils.py +53 -54
  28. langchain_core/language_models/base.py +30 -24
  29. langchain_core/language_models/chat_models.py +123 -148
  30. langchain_core/language_models/fake_chat_models.py +7 -7
  31. langchain_core/language_models/llms.py +14 -16
  32. langchain_core/load/dump.py +3 -4
  33. langchain_core/load/load.py +7 -16
  34. langchain_core/load/serializable.py +37 -36
  35. langchain_core/messages/__init__.py +1 -16
  36. langchain_core/messages/ai.py +122 -123
  37. langchain_core/messages/base.py +31 -31
  38. langchain_core/messages/block_translators/__init__.py +17 -17
  39. langchain_core/messages/block_translators/anthropic.py +3 -3
  40. langchain_core/messages/block_translators/bedrock_converse.py +3 -3
  41. langchain_core/messages/block_translators/google_genai.py +5 -4
  42. langchain_core/messages/block_translators/google_vertexai.py +4 -32
  43. langchain_core/messages/block_translators/groq.py +117 -21
  44. langchain_core/messages/block_translators/langchain_v0.py +3 -3
  45. langchain_core/messages/block_translators/openai.py +5 -5
  46. langchain_core/messages/chat.py +2 -6
  47. langchain_core/messages/content.py +222 -209
  48. langchain_core/messages/function.py +6 -10
  49. langchain_core/messages/human.py +17 -24
  50. langchain_core/messages/modifier.py +2 -2
  51. langchain_core/messages/system.py +12 -22
  52. langchain_core/messages/tool.py +53 -69
  53. langchain_core/messages/utils.py +399 -417
  54. langchain_core/output_parsers/__init__.py +1 -14
  55. langchain_core/output_parsers/base.py +46 -47
  56. langchain_core/output_parsers/json.py +3 -4
  57. langchain_core/output_parsers/list.py +2 -2
  58. langchain_core/output_parsers/openai_functions.py +46 -44
  59. langchain_core/output_parsers/openai_tools.py +11 -16
  60. langchain_core/output_parsers/pydantic.py +10 -11
  61. langchain_core/output_parsers/string.py +2 -2
  62. langchain_core/output_parsers/transform.py +2 -2
  63. langchain_core/output_parsers/xml.py +1 -1
  64. langchain_core/outputs/__init__.py +1 -1
  65. langchain_core/outputs/chat_generation.py +14 -14
  66. langchain_core/outputs/generation.py +6 -6
  67. langchain_core/outputs/llm_result.py +5 -5
  68. langchain_core/prompt_values.py +11 -11
  69. langchain_core/prompts/__init__.py +3 -23
  70. langchain_core/prompts/base.py +33 -38
  71. langchain_core/prompts/chat.py +222 -229
  72. langchain_core/prompts/dict.py +3 -3
  73. langchain_core/prompts/few_shot.py +76 -83
  74. langchain_core/prompts/few_shot_with_templates.py +7 -9
  75. langchain_core/prompts/image.py +12 -14
  76. langchain_core/prompts/loading.py +1 -1
  77. langchain_core/prompts/message.py +3 -3
  78. langchain_core/prompts/prompt.py +20 -23
  79. langchain_core/prompts/string.py +20 -8
  80. langchain_core/prompts/structured.py +26 -27
  81. langchain_core/rate_limiters.py +50 -58
  82. langchain_core/retrievers.py +41 -182
  83. langchain_core/runnables/base.py +565 -597
  84. langchain_core/runnables/branch.py +8 -8
  85. langchain_core/runnables/config.py +37 -44
  86. langchain_core/runnables/configurable.py +9 -10
  87. langchain_core/runnables/fallbacks.py +9 -9
  88. langchain_core/runnables/graph.py +46 -50
  89. langchain_core/runnables/graph_ascii.py +19 -18
  90. langchain_core/runnables/graph_mermaid.py +20 -31
  91. langchain_core/runnables/graph_png.py +7 -7
  92. langchain_core/runnables/history.py +22 -22
  93. langchain_core/runnables/passthrough.py +11 -11
  94. langchain_core/runnables/retry.py +3 -3
  95. langchain_core/runnables/router.py +2 -2
  96. langchain_core/runnables/schema.py +33 -33
  97. langchain_core/runnables/utils.py +30 -34
  98. langchain_core/stores.py +72 -102
  99. langchain_core/sys_info.py +27 -29
  100. langchain_core/tools/__init__.py +1 -14
  101. langchain_core/tools/base.py +70 -71
  102. langchain_core/tools/convert.py +100 -104
  103. langchain_core/tools/render.py +9 -9
  104. langchain_core/tools/retriever.py +7 -7
  105. langchain_core/tools/simple.py +6 -7
  106. langchain_core/tools/structured.py +18 -24
  107. langchain_core/tracers/__init__.py +1 -9
  108. langchain_core/tracers/base.py +35 -35
  109. langchain_core/tracers/context.py +12 -17
  110. langchain_core/tracers/event_stream.py +3 -3
  111. langchain_core/tracers/langchain.py +8 -8
  112. langchain_core/tracers/log_stream.py +17 -18
  113. langchain_core/tracers/memory_stream.py +3 -3
  114. langchain_core/tracers/root_listeners.py +2 -2
  115. langchain_core/tracers/schemas.py +0 -129
  116. langchain_core/tracers/stdout.py +1 -2
  117. langchain_core/utils/__init__.py +1 -1
  118. langchain_core/utils/aiter.py +32 -32
  119. langchain_core/utils/env.py +5 -5
  120. langchain_core/utils/function_calling.py +59 -154
  121. langchain_core/utils/html.py +4 -4
  122. langchain_core/utils/input.py +3 -3
  123. langchain_core/utils/interactive_env.py +1 -1
  124. langchain_core/utils/iter.py +20 -20
  125. langchain_core/utils/json.py +1 -1
  126. langchain_core/utils/json_schema.py +2 -2
  127. langchain_core/utils/mustache.py +5 -5
  128. langchain_core/utils/pydantic.py +17 -17
  129. langchain_core/utils/strings.py +5 -5
  130. langchain_core/utils/utils.py +25 -28
  131. langchain_core/vectorstores/base.py +55 -87
  132. langchain_core/vectorstores/in_memory.py +83 -85
  133. langchain_core/vectorstores/utils.py +2 -2
  134. langchain_core/version.py +1 -1
  135. {langchain_core-1.0.0a8.dist-info → langchain_core-1.0.0rc2.dist-info}/METADATA +23 -11
  136. langchain_core-1.0.0rc2.dist-info/RECORD +172 -0
  137. langchain_core/memory.py +0 -120
  138. langchain_core/pydantic_v1/__init__.py +0 -30
  139. langchain_core/pydantic_v1/dataclasses.py +0 -23
  140. langchain_core/pydantic_v1/main.py +0 -23
  141. langchain_core-1.0.0a8.dist-info/RECORD +0 -176
  142. {langchain_core-1.0.0a8.dist-info → langchain_core-1.0.0rc2.dist-info}/WHEEL +0 -0
@@ -49,8 +49,7 @@ class FunctionCallbackHandler(BaseTracer):
49
49
  """Tracer that calls a function with a single str parameter."""
50
50
 
51
51
  name: str = "function_callback_handler"
52
- """The name of the tracer. This is used to identify the tracer in the logs.
53
- Default is "function_callback_handler"."""
52
+ """The name of the tracer. This is used to identify the tracer in the logs."""
54
53
 
55
54
  def __init__(self, function: Callable[[str], None], **kwargs: Any) -> None:
56
55
  """Create a FunctionCallbackHandler.
@@ -1,4 +1,4 @@
1
- """**Utility functions** for LangChain.
1
+ """Utility functions for LangChain.
2
2
 
3
3
  These functions do not depend on any other LangChain module.
4
4
  """
@@ -50,7 +50,7 @@ def py_anext(
50
50
 
51
51
  Returns:
52
52
  The next value from the iterator, or the default value
53
- if the iterator is exhausted.
53
+ if the iterator is exhausted.
54
54
 
55
55
  Raises:
56
56
  TypeError: If the iterator is not an async iterator.
@@ -107,7 +107,7 @@ async def tee_peer(
107
107
  """An individual iterator of a `tee`.
108
108
 
109
109
  This function is a generator that yields items from the shared iterator
110
- ``iterator``. It buffers items until the least advanced iterator has
110
+ `iterator`. It buffers items until the least advanced iterator has
111
111
  yielded them as well. The buffer is shared with all other peers.
112
112
 
113
113
  Args:
@@ -153,38 +153,38 @@ async def tee_peer(
153
153
 
154
154
 
155
155
  class Tee(Generic[T]):
156
- """Create ``n`` separate asynchronous iterators over ``iterable``.
156
+ """Create `n` separate asynchronous iterators over `iterable`.
157
157
 
158
- This splits a single ``iterable`` into multiple iterators, each providing
158
+ This splits a single `iterable` into multiple iterators, each providing
159
159
  the same items in the same order.
160
160
  All child iterators may advance separately but share the same items
161
- from ``iterable`` -- when the most advanced iterator retrieves an item,
161
+ from `iterable` -- when the most advanced iterator retrieves an item,
162
162
  it is buffered until the least advanced iterator has yielded it as well.
163
- A ``tee`` works lazily and can handle an infinite ``iterable``, provided
163
+ A `tee` works lazily and can handle an infinite `iterable`, provided
164
164
  that all iterators advance.
165
165
 
166
- .. code-block:: python
167
-
168
- async def derivative(sensor_data):
169
- previous, current = a.tee(sensor_data, n=2)
170
- await a.anext(previous) # advance one iterator
171
- return a.map(operator.sub, previous, current)
166
+ ```python
167
+ async def derivative(sensor_data):
168
+ previous, current = a.tee(sensor_data, n=2)
169
+ await a.anext(previous) # advance one iterator
170
+ return a.map(operator.sub, previous, current)
171
+ ```
172
172
 
173
173
  Unlike `itertools.tee`, `.tee` returns a custom type instead
174
174
  of a :py`tuple`. Like a tuple, it can be indexed, iterated and unpacked
175
175
  to get the child iterators. In addition, its `.tee.aclose` method
176
- immediately closes all children, and it can be used in an ``async with`` context
176
+ immediately closes all children, and it can be used in an `async with` context
177
177
  for the same effect.
178
178
 
179
- If ``iterable`` is an iterator and read elsewhere, ``tee`` will *not*
180
- provide these items. Also, ``tee`` must internally buffer each item until the
179
+ If `iterable` is an iterator and read elsewhere, `tee` will *not*
180
+ provide these items. Also, `tee` must internally buffer each item until the
181
181
  last iterator has yielded it; if the most and least advanced iterator differ
182
182
  by most data, using a :py`list` is more efficient (but not lazy).
183
183
 
184
- If the underlying iterable is concurrency safe (``anext`` may be awaited
184
+ If the underlying iterable is concurrency safe (`anext` may be awaited
185
185
  concurrently) the resulting iterators are concurrency safe as well. Otherwise,
186
186
  the iterators are safe if there is only ever one single "most advanced" iterator.
187
- To enforce sequential use of ``anext``, provide a ``lock``
187
+ To enforce sequential use of `anext`, provide a `lock`
188
188
  - e.g. an :py`asyncio.Lock` instance in an :py:mod:`asyncio` application -
189
189
  and access is automatically synchronised.
190
190
 
@@ -197,13 +197,13 @@ class Tee(Generic[T]):
197
197
  *,
198
198
  lock: AbstractAsyncContextManager[Any] | None = None,
199
199
  ):
200
- """Create a ``tee``.
200
+ """Create a `tee`.
201
201
 
202
202
  Args:
203
203
  iterable: The iterable to split.
204
- n: The number of iterators to create. Defaults to 2.
204
+ n: The number of iterators to create.
205
205
  lock: The lock to synchronise access to the shared buffers.
206
- Defaults to None.
206
+
207
207
  """
208
208
  self._iterator = iterable.__aiter__() # before 3.10 aiter() doesn't exist
209
209
  self._buffers: list[deque[T]] = [deque() for _ in range(n)]
@@ -269,25 +269,25 @@ atee = Tee
269
269
 
270
270
 
271
271
  class aclosing(AbstractAsyncContextManager): # noqa: N801
272
- """Async context manager to wrap an AsyncGenerator that has a ``aclose()`` method.
272
+ """Async context manager to wrap an AsyncGenerator that has a `aclose()` method.
273
273
 
274
274
  Code like this:
275
275
 
276
- .. code-block:: python
277
-
278
- async with aclosing(<module>.fetch(<arguments>)) as agen:
279
- <block>
276
+ ```python
277
+ async with aclosing(<module>.fetch(<arguments>)) as agen:
278
+ <block>
279
+ ```
280
280
 
281
281
  is equivalent to this:
282
282
 
283
- .. code-block:: python
284
-
285
- agen = <module>.fetch(<arguments>)
286
- try:
287
- <block>
288
- finally:
289
- await agen.aclose()
283
+ ```python
284
+ agen = <module>.fetch(<arguments>)
285
+ try:
286
+ <block>
287
+ finally:
288
+ await agen.aclose()
290
289
 
290
+ ```
291
291
  """
292
292
 
293
293
  def __init__(self, thing: AsyncGenerator[Any, Any] | AsyncIterator[Any]) -> None:
@@ -10,10 +10,10 @@ def env_var_is_set(env_var: str) -> bool:
10
10
  """Check if an environment variable is set.
11
11
 
12
12
  Args:
13
- env_var (str): The name of the environment variable.
13
+ env_var: The name of the environment variable.
14
14
 
15
15
  Returns:
16
- bool: True if the environment variable is set, False otherwise.
16
+ `True` if the environment variable is set, `False` otherwise.
17
17
  """
18
18
  return env_var in os.environ and os.environ[env_var] not in {
19
19
  "",
@@ -38,7 +38,7 @@ def get_from_dict_or_env(
38
38
  env_key: The environment variable to look up if the key is not
39
39
  in the dictionary.
40
40
  default: The default value to return if the key is not in the dictionary
41
- or the environment. Defaults to None.
41
+ or the environment.
42
42
 
43
43
  Returns:
44
44
  The dict value or the environment variable value.
@@ -64,10 +64,10 @@ def get_from_env(key: str, env_key: str, default: str | None = None) -> str:
64
64
  env_key: The environment variable to look up if the key is not
65
65
  in the dictionary.
66
66
  default: The default value to return if the key is not in the dictionary
67
- or the environment. Defaults to None.
67
+ or the environment.
68
68
 
69
69
  Returns:
70
- str: The value of the key.
70
+ The value of the key.
71
71
 
72
72
  Raises:
73
73
  ValueError: If the key is not in the dictionary and no default value is
@@ -27,7 +27,7 @@ from pydantic.v1 import create_model as create_model_v1
27
27
  from typing_extensions import TypedDict, is_typeddict
28
28
 
29
29
  import langchain_core
30
- from langchain_core._api import beta, deprecated
30
+ from langchain_core._api import beta
31
31
  from langchain_core.messages import AIMessage, BaseMessage, HumanMessage, ToolMessage
32
32
  from langchain_core.utils.json_schema import dereference_refs
33
33
  from langchain_core.utils.pydantic import is_basemodel_subclass
@@ -72,11 +72,11 @@ def _rm_titles(kv: dict, prev_key: str = "") -> dict:
72
72
  except when a "title" appears within a property definition under "properties".
73
73
 
74
74
  Args:
75
- kv (dict): The input JSON schema as a dictionary.
76
- prev_key (str): The key from the parent dictionary, used to identify context.
75
+ kv: The input JSON schema as a dictionary.
76
+ prev_key: The key from the parent dictionary, used to identify context.
77
77
 
78
78
  Returns:
79
- dict: A new dictionary with appropriate "title" fields removed.
79
+ A new dictionary with appropriate "title" fields removed.
80
80
  """
81
81
  new_kv = {}
82
82
 
@@ -114,7 +114,7 @@ def _convert_json_schema_to_openai_function(
114
114
  used.
115
115
  description: The description of the function. If not provided, the description
116
116
  of the schema will be used.
117
- rm_titles: Whether to remove titles from the schema. Defaults to True.
117
+ rm_titles: Whether to remove titles from the schema.
118
118
 
119
119
  Returns:
120
120
  The function description.
@@ -148,7 +148,7 @@ def _convert_pydantic_to_openai_function(
148
148
  used.
149
149
  description: The description of the function. If not provided, the description
150
150
  of the schema will be used.
151
- rm_titles: Whether to remove titles from the schema. Defaults to True.
151
+ rm_titles: Whether to remove titles from the schema.
152
152
 
153
153
  Raises:
154
154
  TypeError: If the model is not a Pydantic model.
@@ -168,42 +168,6 @@ def _convert_pydantic_to_openai_function(
168
168
  )
169
169
 
170
170
 
171
- convert_pydantic_to_openai_function = deprecated(
172
- "0.1.16",
173
- alternative="langchain_core.utils.function_calling.convert_to_openai_function()",
174
- removal="1.0",
175
- )(_convert_pydantic_to_openai_function)
176
-
177
-
178
- @deprecated(
179
- "0.1.16",
180
- alternative="langchain_core.utils.function_calling.convert_to_openai_tool()",
181
- removal="1.0",
182
- )
183
- def convert_pydantic_to_openai_tool(
184
- model: type[BaseModel],
185
- *,
186
- name: str | None = None,
187
- description: str | None = None,
188
- ) -> ToolDescription:
189
- """Converts a Pydantic model to a function description for the OpenAI API.
190
-
191
- Args:
192
- model: The Pydantic model to convert.
193
- name: The name of the function. If not provided, the title of the schema will be
194
- used.
195
- description: The description of the function. If not provided, the description
196
- of the schema will be used.
197
-
198
- Returns:
199
- The tool description.
200
- """
201
- function = _convert_pydantic_to_openai_function(
202
- model, name=name, description=description
203
- )
204
- return {"type": "function", "function": function}
205
-
206
-
207
171
  def _get_python_function_name(function: Callable) -> str:
208
172
  """Get the name of a Python function."""
209
173
  return function.__name__
@@ -240,13 +204,6 @@ def _convert_python_function_to_openai_function(
240
204
  )
241
205
 
242
206
 
243
- convert_python_function_to_openai_function = deprecated(
244
- "0.1.16",
245
- alternative="langchain_core.utils.function_calling.convert_to_openai_function()",
246
- removal="1.0",
247
- )(_convert_python_function_to_openai_function)
248
-
249
-
250
207
  def _convert_typed_dict_to_openai_function(typed_dict: type) -> FunctionDescription:
251
208
  visited: dict = {}
252
209
 
@@ -368,31 +325,6 @@ def _format_tool_to_openai_function(tool: BaseTool) -> FunctionDescription:
368
325
  }
369
326
 
370
327
 
371
- format_tool_to_openai_function = deprecated(
372
- "0.1.16",
373
- alternative="langchain_core.utils.function_calling.convert_to_openai_function()",
374
- removal="1.0",
375
- )(_format_tool_to_openai_function)
376
-
377
-
378
- @deprecated(
379
- "0.1.16",
380
- alternative="langchain_core.utils.function_calling.convert_to_openai_tool()",
381
- removal="1.0",
382
- )
383
- def format_tool_to_openai_tool(tool: BaseTool) -> ToolDescription:
384
- """Format tool into the OpenAI function API.
385
-
386
- Args:
387
- tool: The tool to format.
388
-
389
- Returns:
390
- The tool description.
391
- """
392
- function = _format_tool_to_openai_function(tool)
393
- return {"type": "function", "function": function}
394
-
395
-
396
328
  def convert_to_openai_function(
397
329
  function: dict[str, Any] | type | Callable | BaseTool,
398
330
  *,
@@ -402,14 +334,14 @@ def convert_to_openai_function(
402
334
 
403
335
  Args:
404
336
  function:
405
- A dictionary, Pydantic BaseModel class, TypedDict class, a LangChain
406
- Tool object, or a Python function. If a dictionary is passed in, it is
337
+ A dictionary, Pydantic `BaseModel` class, `TypedDict` class, a LangChain
338
+ `Tool` object, or a Python function. If a dictionary is passed in, it is
407
339
  assumed to already be a valid OpenAI function, a JSON schema with
408
- top-level 'title' key specified, an Anthropic format
409
- tool, or an Amazon Bedrock Converse format tool.
340
+ top-level `title` key specified, an Anthropic format tool, or an Amazon
341
+ Bedrock Converse format tool.
410
342
  strict:
411
- If True, model output is guaranteed to exactly match the JSON Schema
412
- provided in the function definition. If None, ``strict`` argument will not
343
+ If `True`, model output is guaranteed to exactly match the JSON Schema
344
+ provided in the function definition. If `None`, `strict` argument will not
413
345
  be included in function definition.
414
346
 
415
347
  Returns:
@@ -419,17 +351,8 @@ def convert_to_openai_function(
419
351
  Raises:
420
352
  ValueError: If function is not in a supported format.
421
353
 
422
- !!! warning "Behavior changed in 0.2.29"
423
- ``strict`` arg added.
424
-
425
- !!! warning "Behavior changed in 0.3.13"
426
- Support for Anthropic format tools added.
427
-
428
- !!! warning "Behavior changed in 0.3.14"
429
- Support for Amazon Bedrock Converse format tools added.
430
-
431
354
  !!! warning "Behavior changed in 0.3.16"
432
- 'description' and 'parameters' keys are now optional. Only 'name' is
355
+ `description` and `parameters` keys are now optional. Only `name` is
433
356
  required and guaranteed to be part of the output.
434
357
  """
435
358
  # an Anthropic format tool
@@ -527,45 +450,31 @@ def convert_to_openai_tool(
527
450
  ) -> dict[str, Any]:
528
451
  """Convert a tool-like object to an OpenAI tool schema.
529
452
 
530
- OpenAI tool schema reference:
531
- https://platform.openai.com/docs/api-reference/chat/create#chat-create-tools
453
+ [OpenAI tool schema reference](https://platform.openai.com/docs/api-reference/chat/create#chat-create-tools)
532
454
 
533
455
  Args:
534
456
  tool:
535
- Either a dictionary, a pydantic.BaseModel class, Python function, or
536
- BaseTool. If a dictionary is passed in, it is
537
- assumed to already be a valid OpenAI function, a JSON schema with
538
- top-level 'title' key specified, an Anthropic format
539
- tool, or an Amazon Bedrock Converse format tool.
457
+ Either a dictionary, a `pydantic.BaseModel` class, Python function, or
458
+ `BaseTool`. If a dictionary is passed in, it is assumed to already be a
459
+ valid OpenAI function, a JSON schema with top-level `title` key specified,
460
+ an Anthropic format tool, or an Amazon Bedrock Converse format tool.
540
461
  strict:
541
- If True, model output is guaranteed to exactly match the JSON Schema
542
- provided in the function definition. If None, ``strict`` argument will not
462
+ If `True`, model output is guaranteed to exactly match the JSON Schema
463
+ provided in the function definition. If `None`, `strict` argument will not
543
464
  be included in tool definition.
544
465
 
545
466
  Returns:
546
467
  A dict version of the passed in tool which is compatible with the
547
468
  OpenAI tool-calling API.
548
469
 
549
- !!! warning "Behavior changed in 0.2.29"
550
- ``strict`` arg added.
551
-
552
- !!! warning "Behavior changed in 0.3.13"
553
- Support for Anthropic format tools added.
554
-
555
- !!! warning "Behavior changed in 0.3.14"
556
- Support for Amazon Bedrock Converse format tools added.
557
-
558
470
  !!! warning "Behavior changed in 0.3.16"
559
- 'description' and 'parameters' keys are now optional. Only 'name' is
471
+ `description` and `parameters` keys are now optional. Only `name` is
560
472
  required and guaranteed to be part of the output.
561
473
 
562
474
  !!! warning "Behavior changed in 0.3.44"
563
475
  Return OpenAI Responses API-style tools unchanged. This includes
564
- any dict with "type" in "file_search", "function", "computer_use_preview",
565
- "web_search_preview".
566
-
567
- !!! warning "Behavior changed in 0.3.61"
568
- Added support for OpenAI's built-in code interpreter and remote MCP tools.
476
+ any dict with `"type"` in `"file_search"`, `"function"`,
477
+ `"computer_use_preview"`, `"web_search_preview"`.
569
478
 
570
479
  !!! warning "Behavior changed in 0.3.63"
571
480
  Added support for OpenAI's image generation built-in tool.
@@ -601,8 +510,8 @@ def convert_to_json_schema(
601
510
 
602
511
  Args:
603
512
  schema: The schema to convert.
604
- strict: If True, model output is guaranteed to exactly match the JSON Schema
605
- provided in the function definition. If None, ``strict`` argument will not
513
+ strict: If `True`, model output is guaranteed to exactly match the JSON Schema
514
+ provided in the function definition. If `None`, `strict` argument will not
606
515
  be included in function definition.
607
516
 
608
517
  Raises:
@@ -649,15 +558,15 @@ def tool_example_to_messages(
649
558
 
650
559
  The list of messages per example by default corresponds to:
651
560
 
652
- 1. ``HumanMessage``: contains the content from which content should be extracted.
653
- 2. ``AIMessage``: contains the extracted information from the model
654
- 3. ``ToolMessage``: contains confirmation to the model that the model requested a
655
- tool correctly.
561
+ 1. `HumanMessage`: contains the content from which content should be extracted.
562
+ 2. `AIMessage`: contains the extracted information from the model
563
+ 3. `ToolMessage`: contains confirmation to the model that the model requested a
564
+ tool correctly.
656
565
 
657
- If ``ai_response`` is specified, there will be a final ``AIMessage`` with that
566
+ If `ai_response` is specified, there will be a final `AIMessage` with that
658
567
  response.
659
568
 
660
- The ``ToolMessage`` is required because some chat models are hyper-optimized for
569
+ The `ToolMessage` is required because some chat models are hyper-optimized for
661
570
  agents rather than for an extraction use case.
662
571
 
663
572
  Args:
@@ -665,50 +574,46 @@ def tool_example_to_messages(
665
574
  tool_calls: Tool calls represented as Pydantic BaseModels
666
575
  tool_outputs: Tool call outputs.
667
576
  Does not need to be provided. If not provided, a placeholder value
668
- will be inserted. Defaults to None.
669
- ai_response: If provided, content for a final ``AIMessage``.
577
+ will be inserted.
578
+ ai_response: If provided, content for a final `AIMessage`.
670
579
 
671
580
  Returns:
672
581
  A list of messages
673
582
 
674
583
  Examples:
584
+ ```python
585
+ from typing import Optional
586
+ from pydantic import BaseModel, Field
587
+ from langchain_openai import ChatOpenAI
675
588
 
676
- .. code-block:: python
677
589
 
678
- from typing import Optional
679
- from pydantic import BaseModel, Field
680
- from langchain_openai import ChatOpenAI
681
-
682
-
683
- class Person(BaseModel):
684
- '''Information about a person.'''
685
-
686
- name: Optional[str] = Field(..., description="The name of the person")
687
- hair_color: Optional[str] = Field(
688
- ..., description="The color of the person's hair if known"
689
- )
690
- height_in_meters: Optional[str] = Field(
691
- ..., description="Height in METERS"
692
- )
590
+ class Person(BaseModel):
591
+ '''Information about a person.'''
693
592
 
593
+ name: str | None = Field(..., description="The name of the person")
594
+ hair_color: str | None = Field(
595
+ ..., description="The color of the person's hair if known"
596
+ )
597
+ height_in_meters: str | None = Field(..., description="Height in METERS")
694
598
 
695
- examples = [
696
- (
697
- "The ocean is vast and blue. It's more than 20,000 feet deep.",
698
- Person(name=None, height_in_meters=None, hair_color=None),
699
- ),
700
- (
701
- "Fiona traveled far from France to Spain.",
702
- Person(name="Fiona", height_in_meters=None, hair_color=None),
703
- ),
704
- ]
705
599
 
600
+ examples = [
601
+ (
602
+ "The ocean is vast and blue. It's more than 20,000 feet deep.",
603
+ Person(name=None, height_in_meters=None, hair_color=None),
604
+ ),
605
+ (
606
+ "Fiona traveled far from France to Spain.",
607
+ Person(name="Fiona", height_in_meters=None, hair_color=None),
608
+ ),
609
+ ]
706
610
 
707
- messages = []
708
611
 
709
- for txt, tool_call in examples:
710
- messages.extend(tool_example_to_messages(txt, [tool_call]))
612
+ messages = []
711
613
 
614
+ for txt, tool_call in examples:
615
+ messages.extend(tool_example_to_messages(txt, [tool_call]))
616
+ ```
712
617
  """
713
618
  messages: list[BaseMessage] = [HumanMessage(content=input)]
714
619
  openai_tool_calls = [
@@ -717,7 +622,7 @@ def tool_example_to_messages(
717
622
  "type": "function",
718
623
  "function": {
719
624
  # The name of the function right now corresponds to the name
720
- # of the pydantic model. This is implicit in the API right now,
625
+ # of the Pydantic model. This is implicit in the API right now,
721
626
  # and will be improved over time.
722
627
  "name": tool_call.__class__.__name__,
723
628
  "arguments": tool_call.model_dump_json(),
@@ -43,7 +43,7 @@ def find_all_links(
43
43
  pattern: Regex to use for extracting links from raw HTML.
44
44
 
45
45
  Returns:
46
- list[str]: all links
46
+ all links
47
47
  """
48
48
  pattern = pattern or DEFAULT_LINK_REGEX
49
49
  return list(set(re.findall(pattern, raw_html)))
@@ -66,14 +66,14 @@ def extract_sub_links(
66
66
  url: the url of the HTML.
67
67
  base_url: the base URL to check for outside links against.
68
68
  pattern: Regex to use for extracting links from raw HTML.
69
- prevent_outside: If True, ignore external links which are not children
69
+ prevent_outside: If `True`, ignore external links which are not children
70
70
  of the base URL.
71
71
  exclude_prefixes: Exclude any URLs that start with one of these prefixes.
72
- continue_on_failure: If True, continue if parsing a specific link raises an
72
+ continue_on_failure: If `True`, continue if parsing a specific link raises an
73
73
  exception. Otherwise, raise the exception.
74
74
 
75
75
  Returns:
76
- list[str]: sub links.
76
+ sub links.
77
77
  """
78
78
  base_url_to_use = base_url if base_url is not None else url
79
79
  parsed_base_url = urlparse(base_url_to_use)
@@ -65,9 +65,9 @@ def print_text(
65
65
 
66
66
  Args:
67
67
  text: The text to print.
68
- color: The color to use. Defaults to None.
69
- end: The end character to use. Defaults to "".
70
- file: The file to write to. Defaults to None.
68
+ color: The color to use.
69
+ end: The end character to use.
70
+ file: The file to write to.
71
71
  """
72
72
  text_to_print = get_colored_text(text, color) if color else text
73
73
  print(text_to_print, end=end, file=file)
@@ -7,6 +7,6 @@ def is_interactive_env() -> bool:
7
7
  """Determine if running within IPython or Jupyter.
8
8
 
9
9
  Returns:
10
- True if running in an interactive environment, False otherwise.
10
+ True if running in an interactive environment, `False` otherwise.
11
11
  """
12
12
  return hasattr(sys, "ps2")
@@ -43,7 +43,7 @@ def tee_peer(
43
43
  """An individual iterator of a `.tee`.
44
44
 
45
45
  This function is a generator that yields items from the shared iterator
46
- ``iterator``. It buffers items until the least advanced iterator has
46
+ `iterator`. It buffers items until the least advanced iterator has
47
47
  yielded them as well. The buffer is shared with all other peers.
48
48
 
49
49
  Args:
@@ -89,38 +89,38 @@ def tee_peer(
89
89
 
90
90
 
91
91
  class Tee(Generic[T]):
92
- """Create ``n`` separate asynchronous iterators over ``iterable``.
92
+ """Create `n` separate asynchronous iterators over `iterable`.
93
93
 
94
- This splits a single ``iterable`` into multiple iterators, each providing
94
+ This splits a single `iterable` into multiple iterators, each providing
95
95
  the same items in the same order.
96
96
  All child iterators may advance separately but share the same items
97
- from ``iterable`` -- when the most advanced iterator retrieves an item,
97
+ from `iterable` -- when the most advanced iterator retrieves an item,
98
98
  it is buffered until the least advanced iterator has yielded it as well.
99
- A ``tee`` works lazily and can handle an infinite ``iterable``, provided
99
+ A `tee` works lazily and can handle an infinite `iterable`, provided
100
100
  that all iterators advance.
101
101
 
102
- .. code-block:: python
103
-
104
- async def derivative(sensor_data):
105
- previous, current = a.tee(sensor_data, n=2)
106
- await a.anext(previous) # advance one iterator
107
- return a.map(operator.sub, previous, current)
102
+ ```python
103
+ async def derivative(sensor_data):
104
+ previous, current = a.tee(sensor_data, n=2)
105
+ await a.anext(previous) # advance one iterator
106
+ return a.map(operator.sub, previous, current)
107
+ ```
108
108
 
109
109
  Unlike `itertools.tee`, `.tee` returns a custom type instead
110
110
  of a :py`tuple`. Like a tuple, it can be indexed, iterated and unpacked
111
111
  to get the child iterators. In addition, its `.tee.aclose` method
112
- immediately closes all children, and it can be used in an ``async with`` context
112
+ immediately closes all children, and it can be used in an `async with` context
113
113
  for the same effect.
114
114
 
115
- If ``iterable`` is an iterator and read elsewhere, ``tee`` will *not*
116
- provide these items. Also, ``tee`` must internally buffer each item until the
115
+ If `iterable` is an iterator and read elsewhere, `tee` will *not*
116
+ provide these items. Also, `tee` must internally buffer each item until the
117
117
  last iterator has yielded it; if the most and least advanced iterator differ
118
118
  by most data, using a :py`list` is more efficient (but not lazy).
119
119
 
120
- If the underlying iterable is concurrency safe (``anext`` may be awaited
120
+ If the underlying iterable is concurrency safe (`anext` may be awaited
121
121
  concurrently) the resulting iterators are concurrency safe as well. Otherwise,
122
122
  the iterators are safe if there is only ever one single "most advanced" iterator.
123
- To enforce sequential use of ``anext``, provide a ``lock``
123
+ To enforce sequential use of `anext`, provide a `lock`
124
124
  - e.g. an :py`asyncio.Lock` instance in an :py:mod:`asyncio` application -
125
125
  and access is automatically synchronised.
126
126
 
@@ -133,13 +133,13 @@ class Tee(Generic[T]):
133
133
  *,
134
134
  lock: AbstractContextManager[Any] | None = None,
135
135
  ):
136
- """Create a ``tee``.
136
+ """Create a `tee`.
137
137
 
138
138
  Args:
139
139
  iterable: The iterable to split.
140
- n: The number of iterators to create. Defaults to 2.
140
+ n: The number of iterators to create.
141
141
  lock: The lock to synchronise access to the shared buffers.
142
- Defaults to None.
142
+
143
143
  """
144
144
  self._iterator = iter(iterable)
145
145
  self._buffers: list[deque[T]] = [deque() for _ in range(n)]
@@ -207,7 +207,7 @@ def batch_iterate(size: int | None, iterable: Iterable[T]) -> Iterator[list[T]]:
207
207
  """Utility batching function.
208
208
 
209
209
  Args:
210
- size: The size of the batch. If None, returns a single batch.
210
+ size: The size of the batch. If `None`, returns a single batch.
211
211
  iterable: The iterable to batch.
212
212
 
213
213
  Yields: