langchain-core 1.0.0a6__py3-none-any.whl → 1.0.0a8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of langchain-core might be problematic. Click here for more details.

Files changed (131) hide show
  1. langchain_core/_api/__init__.py +3 -3
  2. langchain_core/_api/beta_decorator.py +6 -6
  3. langchain_core/_api/deprecation.py +21 -29
  4. langchain_core/_api/path.py +3 -6
  5. langchain_core/_import_utils.py +2 -3
  6. langchain_core/agents.py +10 -11
  7. langchain_core/caches.py +7 -7
  8. langchain_core/callbacks/base.py +91 -91
  9. langchain_core/callbacks/file.py +11 -11
  10. langchain_core/callbacks/manager.py +86 -89
  11. langchain_core/callbacks/stdout.py +8 -8
  12. langchain_core/callbacks/usage.py +4 -4
  13. langchain_core/chat_history.py +5 -5
  14. langchain_core/document_loaders/base.py +2 -2
  15. langchain_core/document_loaders/langsmith.py +15 -15
  16. langchain_core/documents/base.py +16 -16
  17. langchain_core/documents/compressor.py +4 -4
  18. langchain_core/example_selectors/length_based.py +1 -1
  19. langchain_core/example_selectors/semantic_similarity.py +17 -19
  20. langchain_core/exceptions.py +3 -3
  21. langchain_core/globals.py +3 -151
  22. langchain_core/indexing/api.py +44 -43
  23. langchain_core/indexing/base.py +30 -30
  24. langchain_core/indexing/in_memory.py +3 -3
  25. langchain_core/language_models/_utils.py +5 -7
  26. langchain_core/language_models/base.py +18 -132
  27. langchain_core/language_models/chat_models.py +118 -227
  28. langchain_core/language_models/fake.py +11 -11
  29. langchain_core/language_models/fake_chat_models.py +35 -29
  30. langchain_core/language_models/llms.py +91 -201
  31. langchain_core/load/dump.py +1 -1
  32. langchain_core/load/load.py +11 -12
  33. langchain_core/load/mapping.py +2 -4
  34. langchain_core/load/serializable.py +2 -4
  35. langchain_core/messages/ai.py +17 -20
  36. langchain_core/messages/base.py +23 -25
  37. langchain_core/messages/block_translators/__init__.py +2 -5
  38. langchain_core/messages/block_translators/anthropic.py +3 -3
  39. langchain_core/messages/block_translators/bedrock_converse.py +2 -2
  40. langchain_core/messages/block_translators/langchain_v0.py +2 -2
  41. langchain_core/messages/block_translators/openai.py +6 -6
  42. langchain_core/messages/content.py +120 -124
  43. langchain_core/messages/human.py +7 -7
  44. langchain_core/messages/system.py +7 -7
  45. langchain_core/messages/tool.py +24 -24
  46. langchain_core/messages/utils.py +67 -79
  47. langchain_core/output_parsers/base.py +12 -14
  48. langchain_core/output_parsers/json.py +4 -4
  49. langchain_core/output_parsers/list.py +3 -5
  50. langchain_core/output_parsers/openai_functions.py +3 -3
  51. langchain_core/output_parsers/openai_tools.py +3 -3
  52. langchain_core/output_parsers/pydantic.py +2 -2
  53. langchain_core/output_parsers/transform.py +13 -15
  54. langchain_core/output_parsers/xml.py +7 -9
  55. langchain_core/outputs/chat_generation.py +4 -4
  56. langchain_core/outputs/chat_result.py +1 -3
  57. langchain_core/outputs/generation.py +2 -2
  58. langchain_core/outputs/llm_result.py +5 -5
  59. langchain_core/prompts/__init__.py +1 -5
  60. langchain_core/prompts/base.py +10 -15
  61. langchain_core/prompts/chat.py +31 -82
  62. langchain_core/prompts/dict.py +2 -2
  63. langchain_core/prompts/few_shot.py +5 -5
  64. langchain_core/prompts/few_shot_with_templates.py +4 -4
  65. langchain_core/prompts/loading.py +3 -5
  66. langchain_core/prompts/prompt.py +4 -16
  67. langchain_core/prompts/string.py +2 -1
  68. langchain_core/prompts/structured.py +16 -23
  69. langchain_core/rate_limiters.py +3 -4
  70. langchain_core/retrievers.py +14 -14
  71. langchain_core/runnables/base.py +928 -1042
  72. langchain_core/runnables/branch.py +36 -40
  73. langchain_core/runnables/config.py +27 -35
  74. langchain_core/runnables/configurable.py +108 -124
  75. langchain_core/runnables/fallbacks.py +76 -72
  76. langchain_core/runnables/graph.py +39 -45
  77. langchain_core/runnables/graph_ascii.py +9 -11
  78. langchain_core/runnables/graph_mermaid.py +18 -19
  79. langchain_core/runnables/graph_png.py +8 -9
  80. langchain_core/runnables/history.py +114 -127
  81. langchain_core/runnables/passthrough.py +113 -139
  82. langchain_core/runnables/retry.py +43 -48
  83. langchain_core/runnables/router.py +23 -28
  84. langchain_core/runnables/schema.py +42 -44
  85. langchain_core/runnables/utils.py +28 -31
  86. langchain_core/stores.py +9 -13
  87. langchain_core/structured_query.py +8 -8
  88. langchain_core/tools/base.py +62 -115
  89. langchain_core/tools/convert.py +31 -35
  90. langchain_core/tools/render.py +1 -1
  91. langchain_core/tools/retriever.py +4 -4
  92. langchain_core/tools/simple.py +13 -17
  93. langchain_core/tools/structured.py +12 -15
  94. langchain_core/tracers/base.py +62 -64
  95. langchain_core/tracers/context.py +17 -35
  96. langchain_core/tracers/core.py +49 -53
  97. langchain_core/tracers/evaluation.py +11 -11
  98. langchain_core/tracers/event_stream.py +58 -60
  99. langchain_core/tracers/langchain.py +13 -13
  100. langchain_core/tracers/log_stream.py +22 -24
  101. langchain_core/tracers/root_listeners.py +14 -14
  102. langchain_core/tracers/run_collector.py +2 -4
  103. langchain_core/tracers/schemas.py +8 -8
  104. langchain_core/tracers/stdout.py +2 -1
  105. langchain_core/utils/__init__.py +0 -3
  106. langchain_core/utils/_merge.py +2 -2
  107. langchain_core/utils/aiter.py +24 -28
  108. langchain_core/utils/env.py +4 -4
  109. langchain_core/utils/function_calling.py +31 -41
  110. langchain_core/utils/html.py +3 -4
  111. langchain_core/utils/input.py +3 -3
  112. langchain_core/utils/iter.py +15 -19
  113. langchain_core/utils/json.py +3 -2
  114. langchain_core/utils/json_schema.py +6 -6
  115. langchain_core/utils/mustache.py +3 -5
  116. langchain_core/utils/pydantic.py +16 -18
  117. langchain_core/utils/usage.py +1 -1
  118. langchain_core/utils/utils.py +29 -29
  119. langchain_core/vectorstores/base.py +18 -21
  120. langchain_core/vectorstores/in_memory.py +14 -87
  121. langchain_core/vectorstores/utils.py +2 -2
  122. langchain_core/version.py +1 -1
  123. {langchain_core-1.0.0a6.dist-info → langchain_core-1.0.0a8.dist-info}/METADATA +10 -21
  124. langchain_core-1.0.0a8.dist-info/RECORD +176 -0
  125. {langchain_core-1.0.0a6.dist-info → langchain_core-1.0.0a8.dist-info}/WHEEL +1 -1
  126. langchain_core/messages/block_translators/ollama.py +0 -47
  127. langchain_core/prompts/pipeline.py +0 -138
  128. langchain_core/tracers/langchain_v1.py +0 -31
  129. langchain_core/utils/loading.py +0 -35
  130. langchain_core-1.0.0a6.dist-info/RECORD +0 -181
  131. langchain_core-1.0.0a6.dist-info/entry_points.txt +0 -4
@@ -8,10 +8,8 @@ from typing import (
8
8
  TYPE_CHECKING,
9
9
  Annotated,
10
10
  Any,
11
- Optional,
12
11
  TypedDict,
13
12
  TypeVar,
14
- Union,
15
13
  cast,
16
14
  overload,
17
15
  )
@@ -24,7 +22,6 @@ from pydantic import (
24
22
  )
25
23
  from typing_extensions import Self, override
26
24
 
27
- from langchain_core._api import deprecated
28
25
  from langchain_core.messages import (
29
26
  AIMessage,
30
27
  AnyMessage,
@@ -137,7 +134,7 @@ class MessagesPlaceholder(BaseMessagePromptTemplate):
137
134
  list. If False then a named argument with name `variable_name` must be passed
138
135
  in, even if the value is an empty list."""
139
136
 
140
- n_messages: Optional[PositiveInt] = None
137
+ n_messages: PositiveInt | None = None
141
138
  """Maximum number of messages to include. If None, then will include all.
142
139
  Defaults to None."""
143
140
 
@@ -232,7 +229,7 @@ class BaseStringMessagePromptTemplate(BaseMessagePromptTemplate, ABC):
232
229
  cls,
233
230
  template: str,
234
231
  template_format: PromptTemplateFormat = "f-string",
235
- partial_variables: Optional[dict[str, Any]] = None,
232
+ partial_variables: dict[str, Any] | None = None,
236
233
  **kwargs: Any,
237
234
  ) -> Self:
238
235
  """Create a class from a string template.
@@ -261,15 +258,13 @@ class BaseStringMessagePromptTemplate(BaseMessagePromptTemplate, ABC):
261
258
  @classmethod
262
259
  def from_template_file(
263
260
  cls,
264
- template_file: Union[str, Path],
265
- input_variables: list[str], # noqa: ARG003 # Deprecated
261
+ template_file: str | Path,
266
262
  **kwargs: Any,
267
263
  ) -> Self:
268
264
  """Create a class from a template file.
269
265
 
270
266
  Args:
271
267
  template_file: path to a template file. String or Path.
272
- input_variables: list of input variables.
273
268
  **kwargs: keyword arguments to pass to the constructor.
274
269
 
275
270
  Returns:
@@ -383,20 +378,20 @@ class ChatMessagePromptTemplate(BaseStringMessagePromptTemplate):
383
378
 
384
379
 
385
380
  class _TextTemplateParam(TypedDict, total=False):
386
- text: Union[str, dict]
381
+ text: str | dict
387
382
 
388
383
 
389
384
  class _ImageTemplateParam(TypedDict, total=False):
390
- image_url: Union[str, dict]
385
+ image_url: str | dict
391
386
 
392
387
 
393
388
  class _StringImageMessagePromptTemplate(BaseMessagePromptTemplate):
394
389
  """Human message prompt template. This is a message sent from the user."""
395
390
 
396
- prompt: Union[
397
- StringPromptTemplate,
398
- list[Union[StringPromptTemplate, ImagePromptTemplate, DictPromptTemplate]],
399
- ]
391
+ prompt: (
392
+ StringPromptTemplate
393
+ | list[StringPromptTemplate | ImagePromptTemplate | DictPromptTemplate]
394
+ )
400
395
  """Prompt template."""
401
396
  additional_kwargs: dict = Field(default_factory=dict)
402
397
  """Additional keyword arguments to pass to the prompt template."""
@@ -406,13 +401,11 @@ class _StringImageMessagePromptTemplate(BaseMessagePromptTemplate):
406
401
  @classmethod
407
402
  def from_template(
408
403
  cls: type[Self],
409
- template: Union[
410
- str,
411
- list[Union[str, _TextTemplateParam, _ImageTemplateParam, dict[str, Any]]],
412
- ],
404
+ template: str
405
+ | list[str | _TextTemplateParam | _ImageTemplateParam | dict[str, Any]],
413
406
  template_format: PromptTemplateFormat = "f-string",
414
407
  *,
415
- partial_variables: Optional[dict[str, Any]] = None,
408
+ partial_variables: dict[str, Any] | None = None,
416
409
  **kwargs: Any,
417
410
  ) -> Self:
418
411
  """Create a class from a string template.
@@ -432,7 +425,7 @@ class _StringImageMessagePromptTemplate(BaseMessagePromptTemplate):
432
425
  ValueError: If the template is not a string or list of strings.
433
426
  """
434
427
  if isinstance(template, str):
435
- prompt: Union[StringPromptTemplate, list] = PromptTemplate.from_template(
428
+ prompt: StringPromptTemplate | list = PromptTemplate.from_template(
436
429
  template,
437
430
  template_format=template_format,
438
431
  partial_variables=partial_variables,
@@ -529,7 +522,7 @@ class _StringImageMessagePromptTemplate(BaseMessagePromptTemplate):
529
522
  @classmethod
530
523
  def from_template_file(
531
524
  cls: type[Self],
532
- template_file: Union[str, Path],
525
+ template_file: str | Path,
533
526
  input_variables: list[str],
534
527
  **kwargs: Any,
535
528
  ) -> Self:
@@ -596,9 +589,7 @@ class _StringImageMessagePromptTemplate(BaseMessagePromptTemplate):
596
589
  for prompt in self.prompt:
597
590
  inputs = {var: kwargs[var] for var in prompt.input_variables}
598
591
  if isinstance(prompt, StringPromptTemplate):
599
- formatted: Union[str, ImageURL, dict[str, Any]] = prompt.format(
600
- **inputs
601
- )
592
+ formatted: str | ImageURL | dict[str, Any] = prompt.format(**inputs)
602
593
  content.append({"type": "text", "text": formatted})
603
594
  elif isinstance(prompt, ImagePromptTemplate):
604
595
  formatted = prompt.format(**inputs)
@@ -628,7 +619,7 @@ class _StringImageMessagePromptTemplate(BaseMessagePromptTemplate):
628
619
  for prompt in self.prompt:
629
620
  inputs = {var: kwargs[var] for var in prompt.input_variables}
630
621
  if isinstance(prompt, StringPromptTemplate):
631
- formatted: Union[str, ImageURL, dict[str, Any]] = await prompt.aformat(
622
+ formatted: str | ImageURL | dict[str, Any] = await prompt.aformat(
632
623
  **inputs
633
624
  )
634
625
  content.append({"type": "text", "text": formatted})
@@ -772,17 +763,14 @@ class BaseChatPromptTemplate(BasePromptTemplate, ABC):
772
763
  print(self.pretty_repr(html=is_interactive_env())) # noqa: T201
773
764
 
774
765
 
775
- MessageLike = Union[BaseMessagePromptTemplate, BaseMessage, BaseChatPromptTemplate]
766
+ MessageLike = BaseMessagePromptTemplate | BaseMessage | BaseChatPromptTemplate
776
767
 
777
- MessageLikeRepresentation = Union[
778
- MessageLike,
779
- tuple[
780
- Union[str, type],
781
- Union[str, list[dict], list[object]],
782
- ],
783
- str,
784
- dict[str, Any],
785
- ]
768
+ MessageLikeRepresentation = (
769
+ MessageLike
770
+ | tuple[str | type, str | list[dict] | list[object]]
771
+ | str
772
+ | dict[str, Any]
773
+ )
786
774
 
787
775
 
788
776
  class ChatPromptTemplate(BaseChatPromptTemplate):
@@ -791,9 +779,7 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
791
779
  Use to create flexible templated prompts for chat models.
792
780
 
793
781
  Examples:
794
-
795
- .. versionchanged:: 0.2.24
796
-
782
+ !!! warning "Behavior changed in 0.2.24"
797
783
  You can pass any Message-like formats supported by
798
784
  ``ChatPromptTemplate.from_messages()`` directly to ``ChatPromptTemplate()``
799
785
  init.
@@ -1107,41 +1093,6 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
1107
1093
  message = HumanMessagePromptTemplate(prompt=prompt_template)
1108
1094
  return cls.from_messages([message])
1109
1095
 
1110
- @classmethod
1111
- @deprecated("0.0.1", alternative="from_messages", pending=True)
1112
- def from_role_strings(
1113
- cls, string_messages: list[tuple[str, str]]
1114
- ) -> ChatPromptTemplate:
1115
- """Create a chat prompt template from a list of (role, template) tuples.
1116
-
1117
- Args:
1118
- string_messages: list of (role, template) tuples.
1119
-
1120
- Returns:
1121
- a chat prompt template.
1122
- """
1123
- return cls(
1124
- messages=[
1125
- ChatMessagePromptTemplate.from_template(template, role=role)
1126
- for role, template in string_messages
1127
- ]
1128
- )
1129
-
1130
- @classmethod
1131
- @deprecated("0.0.1", alternative="from_messages", pending=True)
1132
- def from_strings(
1133
- cls, string_messages: list[tuple[type[BaseMessagePromptTemplate], str]]
1134
- ) -> ChatPromptTemplate:
1135
- """Create a chat prompt template from a list of (role class, template) tuples.
1136
-
1137
- Args:
1138
- string_messages: list of (role class, template) tuples.
1139
-
1140
- Returns:
1141
- a chat prompt template.
1142
- """
1143
- return cls.from_messages(string_messages)
1144
-
1145
1096
  @classmethod
1146
1097
  def from_messages(
1147
1098
  cls,
@@ -1306,9 +1257,7 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
1306
1257
  @overload
1307
1258
  def __getitem__(self, index: slice) -> ChatPromptTemplate: ...
1308
1259
 
1309
- def __getitem__(
1310
- self, index: Union[int, slice]
1311
- ) -> Union[MessageLike, ChatPromptTemplate]:
1260
+ def __getitem__(self, index: int | slice) -> MessageLike | ChatPromptTemplate:
1312
1261
  """Use to index into the chat template.
1313
1262
 
1314
1263
  Returns:
@@ -1331,7 +1280,7 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
1331
1280
  """Name of prompt type. Used for serialization."""
1332
1281
  return "chat"
1333
1282
 
1334
- def save(self, file_path: Union[Path, str]) -> None:
1283
+ def save(self, file_path: Path | str) -> None:
1335
1284
  """Save prompt to file.
1336
1285
 
1337
1286
  Args:
@@ -1355,7 +1304,7 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
1355
1304
 
1356
1305
  def _create_template_from_message_type(
1357
1306
  message_type: str,
1358
- template: Union[str, list],
1307
+ template: str | list,
1359
1308
  template_format: PromptTemplateFormat = "f-string",
1360
1309
  ) -> BaseMessagePromptTemplate:
1361
1310
  """Create a message prompt template from a message type and template string.
@@ -1427,7 +1376,7 @@ def _create_template_from_message_type(
1427
1376
  def _convert_to_message_template(
1428
1377
  message: MessageLikeRepresentation,
1429
1378
  template_format: PromptTemplateFormat = "f-string",
1430
- ) -> Union[BaseMessage, BaseMessagePromptTemplate, BaseChatPromptTemplate]:
1379
+ ) -> BaseMessage | BaseMessagePromptTemplate | BaseChatPromptTemplate:
1431
1380
  """Instantiate a message from a variety of message formats.
1432
1381
 
1433
1382
  The message format can be one of the following:
@@ -1450,9 +1399,9 @@ def _convert_to_message_template(
1450
1399
  ValueError: If 2-tuple does not have 2 elements.
1451
1400
  """
1452
1401
  if isinstance(message, (BaseMessagePromptTemplate, BaseChatPromptTemplate)):
1453
- message_: Union[
1454
- BaseMessage, BaseMessagePromptTemplate, BaseChatPromptTemplate
1455
- ] = message
1402
+ message_: BaseMessage | BaseMessagePromptTemplate | BaseChatPromptTemplate = (
1403
+ message
1404
+ )
1456
1405
  elif isinstance(message, BaseMessage):
1457
1406
  message_ = message
1458
1407
  elif isinstance(message, str):
@@ -2,7 +2,7 @@
2
2
 
3
3
  import warnings
4
4
  from functools import cached_property
5
- from typing import Any, Literal, Optional
5
+ from typing import Any, Literal
6
6
 
7
7
  from typing_extensions import override
8
8
 
@@ -48,7 +48,7 @@ class DictPromptTemplate(RunnableSerializable[dict, dict]):
48
48
 
49
49
  @override
50
50
  def invoke(
51
- self, input: dict, config: Optional[RunnableConfig] = None, **kwargs: Any
51
+ self, input: dict, config: RunnableConfig | None = None, **kwargs: Any
52
52
  ) -> dict:
53
53
  return self._call_with_config(
54
54
  lambda x: self.format(**x),
@@ -2,7 +2,7 @@
2
2
 
3
3
  from __future__ import annotations
4
4
 
5
- from typing import TYPE_CHECKING, Any, Literal, Optional, Union
5
+ from typing import TYPE_CHECKING, Any, Literal
6
6
 
7
7
  from pydantic import (
8
8
  BaseModel,
@@ -33,11 +33,11 @@ if TYPE_CHECKING:
33
33
  class _FewShotPromptTemplateMixin(BaseModel):
34
34
  """Prompt template that contains few shot examples."""
35
35
 
36
- examples: Optional[list[dict]] = None
36
+ examples: list[dict] | None = None
37
37
  """Examples to format into the prompt.
38
38
  Either this or example_selector should be provided."""
39
39
 
40
- example_selector: Optional[BaseExampleSelector] = None
40
+ example_selector: BaseExampleSelector | None = None
41
41
  """ExampleSelector to choose the examples to format into the prompt.
42
42
  Either this or examples should be provided."""
43
43
 
@@ -229,7 +229,7 @@ class FewShotPromptTemplate(_FewShotPromptTemplateMixin, StringPromptTemplate):
229
229
  """Return the prompt type key."""
230
230
  return "few_shot"
231
231
 
232
- def save(self, file_path: Union[Path, str]) -> None:
232
+ def save(self, file_path: Path | str) -> None:
233
233
  """Save the prompt template to a file.
234
234
 
235
235
  Args:
@@ -365,7 +365,7 @@ class FewShotChatMessagePromptTemplate(
365
365
  """A list of the names of the variables the prompt template will use
366
366
  to pass to the example_selector, if provided."""
367
367
 
368
- example_prompt: Union[BaseMessagePromptTemplate, BaseChatPromptTemplate]
368
+ example_prompt: BaseMessagePromptTemplate | BaseChatPromptTemplate
369
369
  """The class to format each example."""
370
370
 
371
371
  @classmethod
@@ -1,7 +1,7 @@
1
1
  """Prompt template that contains few shot examples."""
2
2
 
3
3
  from pathlib import Path
4
- from typing import Any, Optional, Union
4
+ from typing import Any
5
5
 
6
6
  from pydantic import ConfigDict, model_validator
7
7
  from typing_extensions import Self
@@ -17,7 +17,7 @@ from langchain_core.prompts.string import (
17
17
  class FewShotPromptWithTemplates(StringPromptTemplate):
18
18
  """Prompt template that contains few shot examples."""
19
19
 
20
- examples: Optional[list[dict]] = None
20
+ examples: list[dict] | None = None
21
21
  """Examples to format into the prompt.
22
22
  Either this or example_selector should be provided."""
23
23
 
@@ -34,7 +34,7 @@ class FewShotPromptWithTemplates(StringPromptTemplate):
34
34
  example_separator: str = "\n\n"
35
35
  """String separator used to join the prefix, the examples, and suffix."""
36
36
 
37
- prefix: Optional[StringPromptTemplate] = None
37
+ prefix: StringPromptTemplate | None = None
38
38
  """A PromptTemplate to put before the examples."""
39
39
 
40
40
  template_format: PromptTemplateFormat = "f-string"
@@ -210,7 +210,7 @@ class FewShotPromptWithTemplates(StringPromptTemplate):
210
210
  """Return the prompt type key."""
211
211
  return "few_shot_with_templates"
212
212
 
213
- def save(self, file_path: Union[Path, str]) -> None:
213
+ def save(self, file_path: Path | str) -> None:
214
214
  """Save the prompt to a file.
215
215
 
216
216
  Args:
@@ -2,8 +2,8 @@
2
2
 
3
3
  import json
4
4
  import logging
5
+ from collections.abc import Callable
5
6
  from pathlib import Path
6
- from typing import Callable, Optional, Union
7
7
 
8
8
  import yaml
9
9
 
@@ -134,9 +134,7 @@ def _load_prompt(config: dict) -> PromptTemplate:
134
134
  return PromptTemplate(**config)
135
135
 
136
136
 
137
- def load_prompt(
138
- path: Union[str, Path], encoding: Optional[str] = None
139
- ) -> BasePromptTemplate:
137
+ def load_prompt(path: str | Path, encoding: str | None = None) -> BasePromptTemplate:
140
138
  """Unified method for loading a prompt from LangChainHub or local fs.
141
139
 
142
140
  Args:
@@ -160,7 +158,7 @@ def load_prompt(
160
158
 
161
159
 
162
160
  def _load_prompt_from_file(
163
- file: Union[str, Path], encoding: Optional[str] = None
161
+ file: str | Path, encoding: str | None = None
164
162
  ) -> BasePromptTemplate:
165
163
  """Load prompt from file."""
166
164
  # Convert file to a Path object.
@@ -2,9 +2,8 @@
2
2
 
3
3
  from __future__ import annotations
4
4
 
5
- import warnings
6
5
  from pathlib import Path
7
- from typing import TYPE_CHECKING, Any, Optional, Union
6
+ from typing import TYPE_CHECKING, Any
8
7
 
9
8
  from pydantic import BaseModel, model_validator
10
9
  from typing_extensions import override
@@ -234,32 +233,21 @@ class PromptTemplate(StringPromptTemplate):
234
233
  @classmethod
235
234
  def from_file(
236
235
  cls,
237
- template_file: Union[str, Path],
238
- input_variables: Optional[list[str]] = None,
239
- encoding: Optional[str] = None,
236
+ template_file: str | Path,
237
+ encoding: str | None = None,
240
238
  **kwargs: Any,
241
239
  ) -> PromptTemplate:
242
240
  """Load a prompt from a file.
243
241
 
244
242
  Args:
245
243
  template_file: The path to the file containing the prompt template.
246
- input_variables: [DEPRECATED] A list of variable names the final prompt
247
- template will expect. Defaults to None.
248
244
  encoding: The encoding system for opening the template file.
249
245
  If not provided, will use the OS default.
250
246
 
251
- input_variables is ignored as from_file now delegates to from_template().
252
-
253
247
  Returns:
254
248
  The prompt loaded from the file.
255
249
  """
256
250
  template = Path(template_file).read_text(encoding=encoding)
257
- if input_variables:
258
- warnings.warn(
259
- "`input_variables' is deprecated and ignored.",
260
- DeprecationWarning,
261
- stacklevel=2,
262
- )
263
251
  return cls.from_template(template=template, **kwargs)
264
252
 
265
253
  @classmethod
@@ -268,7 +256,7 @@ class PromptTemplate(StringPromptTemplate):
268
256
  template: str,
269
257
  *,
270
258
  template_format: PromptTemplateFormat = "f-string",
271
- partial_variables: Optional[dict[str, Any]] = None,
259
+ partial_variables: dict[str, Any] | None = None,
272
260
  **kwargs: Any,
273
261
  ) -> PromptTemplate:
274
262
  """Load a prompt template from a template.
@@ -4,8 +4,9 @@ from __future__ import annotations
4
4
 
5
5
  import warnings
6
6
  from abc import ABC
7
+ from collections.abc import Callable
7
8
  from string import Formatter
8
- from typing import Any, Callable, Literal
9
+ from typing import Any, Literal
9
10
 
10
11
  from pydantic import BaseModel, create_model
11
12
 
@@ -1,11 +1,8 @@
1
1
  """Structured prompt template for a language model."""
2
2
 
3
- from collections.abc import AsyncIterator, Iterator, Mapping, Sequence
3
+ from collections.abc import AsyncIterator, Callable, Iterator, Mapping, Sequence
4
4
  from typing import (
5
5
  Any,
6
- Callable,
7
- Optional,
8
- Union,
9
6
  )
10
7
 
11
8
  from pydantic import BaseModel, Field
@@ -31,16 +28,16 @@ from langchain_core.utils import get_pydantic_field_names
31
28
  class StructuredPrompt(ChatPromptTemplate):
32
29
  """Structured prompt template for a language model."""
33
30
 
34
- schema_: Union[dict, type]
31
+ schema_: dict | type
35
32
  """Schema for the structured prompt."""
36
33
  structured_output_kwargs: dict[str, Any] = Field(default_factory=dict)
37
34
 
38
35
  def __init__(
39
36
  self,
40
37
  messages: Sequence[MessageLikeRepresentation],
41
- schema_: Optional[Union[dict, type[BaseModel]]] = None,
38
+ schema_: dict | type[BaseModel] | None = None,
42
39
  *,
43
- structured_output_kwargs: Optional[dict[str, Any]] = None,
40
+ structured_output_kwargs: dict[str, Any] | None = None,
44
41
  template_format: PromptTemplateFormat = "f-string",
45
42
  **kwargs: Any,
46
43
  ) -> None:
@@ -80,7 +77,7 @@ class StructuredPrompt(ChatPromptTemplate):
80
77
  def from_messages_and_schema(
81
78
  cls,
82
79
  messages: Sequence[MessageLikeRepresentation],
83
- schema: Union[dict, type],
80
+ schema: dict | type,
84
81
  **kwargs: Any,
85
82
  ) -> ChatPromptTemplate:
86
83
  """Create a chat prompt template from a variety of message formats.
@@ -127,26 +124,22 @@ class StructuredPrompt(ChatPromptTemplate):
127
124
  @override
128
125
  def __or__(
129
126
  self,
130
- other: Union[
131
- Runnable[Any, Other],
132
- Callable[[Iterator[Any]], Iterator[Other]],
133
- Callable[[AsyncIterator[Any]], AsyncIterator[Other]],
134
- Callable[[Any], Other],
135
- Mapping[str, Union[Runnable[Any, Other], Callable[[Any], Other], Any]],
136
- ],
127
+ other: Runnable[Any, Other]
128
+ | Callable[[Iterator[Any]], Iterator[Other]]
129
+ | Callable[[AsyncIterator[Any]], AsyncIterator[Other]]
130
+ | Callable[[Any], Other]
131
+ | Mapping[str, Runnable[Any, Other] | Callable[[Any], Other] | Any],
137
132
  ) -> RunnableSerializable[dict, Other]:
138
133
  return self.pipe(other)
139
134
 
140
135
  def pipe(
141
136
  self,
142
- *others: Union[
143
- Runnable[Any, Other],
144
- Callable[[Iterator[Any]], Iterator[Other]],
145
- Callable[[AsyncIterator[Any]], AsyncIterator[Other]],
146
- Callable[[Any], Other],
147
- Mapping[str, Union[Runnable[Any, Other], Callable[[Any], Other], Any]],
148
- ],
149
- name: Optional[str] = None,
137
+ *others: Runnable[Any, Other]
138
+ | Callable[[Iterator[Any]], Iterator[Other]]
139
+ | Callable[[AsyncIterator[Any]], AsyncIterator[Other]]
140
+ | Callable[[Any], Other]
141
+ | Mapping[str, Runnable[Any, Other] | Callable[[Any], Other] | Any],
142
+ name: str | None = None,
150
143
  ) -> RunnableSerializable[dict, Other]:
151
144
  """Pipe the structured prompt to a language model.
152
145
 
@@ -6,7 +6,6 @@ import abc
6
6
  import asyncio
7
7
  import threading
8
8
  import time
9
- from typing import Optional
10
9
 
11
10
 
12
11
  class BaseRateLimiter(abc.ABC):
@@ -26,7 +25,7 @@ class BaseRateLimiter(abc.ABC):
26
25
  the time spent waiting for tokens and the time spent making the request.
27
26
 
28
27
 
29
- .. versionadded:: 0.2.24
28
+ !!! version-added "Added in version 0.2.24"
30
29
  """
31
30
 
32
31
  @abc.abstractmethod
@@ -122,7 +121,7 @@ class InMemoryRateLimiter(BaseRateLimiter):
122
121
  print(toc - tic)
123
122
 
124
123
 
125
- .. versionadded:: 0.2.24
124
+ !!! version-added "Added in version 0.2.24"
126
125
 
127
126
  """ # noqa: E501
128
127
 
@@ -163,7 +162,7 @@ class InMemoryRateLimiter(BaseRateLimiter):
163
162
  # at a given time.
164
163
  self._consume_lock = threading.Lock()
165
164
  # The last time we tried to consume tokens.
166
- self.last: Optional[float] = None
165
+ self.last: float | None = None
167
166
  self.check_every_n_seconds = check_every_n_seconds
168
167
 
169
168
  def _consume(self) -> bool:
@@ -24,7 +24,7 @@ from __future__ import annotations
24
24
  import warnings
25
25
  from abc import ABC, abstractmethod
26
26
  from inspect import signature
27
- from typing import TYPE_CHECKING, Any, Optional
27
+ from typing import TYPE_CHECKING, Any
28
28
 
29
29
  from pydantic import ConfigDict
30
30
  from typing_extensions import Self, TypedDict, override
@@ -58,11 +58,11 @@ class LangSmithRetrieverParams(TypedDict, total=False):
58
58
 
59
59
  ls_retriever_name: str
60
60
  """Retriever name."""
61
- ls_vector_store_provider: Optional[str]
61
+ ls_vector_store_provider: str | None
62
62
  """Vector store provider."""
63
- ls_embedding_provider: Optional[str]
63
+ ls_embedding_provider: str | None
64
64
  """Embedding provider."""
65
- ls_embedding_model: Optional[str]
65
+ ls_embedding_model: str | None
66
66
  """Embedding model."""
67
67
 
68
68
 
@@ -137,14 +137,14 @@ class BaseRetriever(RunnableSerializable[RetrieverInput, RetrieverOutput], ABC):
137
137
 
138
138
  _new_arg_supported: bool = False
139
139
  _expects_other_args: bool = False
140
- tags: Optional[list[str]] = None
140
+ tags: list[str] | None = None
141
141
  """Optional list of tags associated with the retriever. Defaults to None.
142
142
  These tags will be associated with each call to this retriever,
143
143
  and passed as arguments to the handlers defined in `callbacks`.
144
144
  You can use these to eg identify a specific instance of a retriever with its
145
145
  use case.
146
146
  """
147
- metadata: Optional[dict[str, Any]] = None
147
+ metadata: dict[str, Any] | None = None
148
148
  """Optional metadata associated with the retriever. Defaults to None.
149
149
  This metadata will be associated with each call to this retriever,
150
150
  and passed as arguments to the handlers defined in `callbacks`.
@@ -216,7 +216,7 @@ class BaseRetriever(RunnableSerializable[RetrieverInput, RetrieverOutput], ABC):
216
216
 
217
217
  @override
218
218
  def invoke(
219
- self, input: str, config: Optional[RunnableConfig] = None, **kwargs: Any
219
+ self, input: str, config: RunnableConfig | None = None, **kwargs: Any
220
220
  ) -> list[Document]:
221
221
  """Invoke the retriever to get relevant documents.
222
222
 
@@ -278,7 +278,7 @@ class BaseRetriever(RunnableSerializable[RetrieverInput, RetrieverOutput], ABC):
278
278
  async def ainvoke(
279
279
  self,
280
280
  input: str,
281
- config: Optional[RunnableConfig] = None,
281
+ config: RunnableConfig | None = None,
282
282
  **kwargs: Any,
283
283
  ) -> list[Document]:
284
284
  """Asynchronously invoke the retriever to get relevant documents.
@@ -376,9 +376,9 @@ class BaseRetriever(RunnableSerializable[RetrieverInput, RetrieverOutput], ABC):
376
376
  query: str,
377
377
  *,
378
378
  callbacks: Callbacks = None,
379
- tags: Optional[list[str]] = None,
380
- metadata: Optional[dict[str, Any]] = None,
381
- run_name: Optional[str] = None,
379
+ tags: list[str] | None = None,
380
+ metadata: dict[str, Any] | None = None,
381
+ run_name: str | None = None,
382
382
  **kwargs: Any,
383
383
  ) -> list[Document]:
384
384
  """Retrieve documents relevant to a query.
@@ -420,9 +420,9 @@ class BaseRetriever(RunnableSerializable[RetrieverInput, RetrieverOutput], ABC):
420
420
  query: str,
421
421
  *,
422
422
  callbacks: Callbacks = None,
423
- tags: Optional[list[str]] = None,
424
- metadata: Optional[dict[str, Any]] = None,
425
- run_name: Optional[str] = None,
423
+ tags: list[str] | None = None,
424
+ metadata: dict[str, Any] | None = None,
425
+ run_name: str | None = None,
426
426
  **kwargs: Any,
427
427
  ) -> list[Document]:
428
428
  """Asynchronously get documents relevant to a query.