langchain-core 1.0.0a5__py3-none-any.whl → 1.0.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (165) hide show
  1. langchain_core/__init__.py +1 -1
  2. langchain_core/_api/__init__.py +3 -4
  3. langchain_core/_api/beta_decorator.py +23 -26
  4. langchain_core/_api/deprecation.py +51 -64
  5. langchain_core/_api/path.py +3 -6
  6. langchain_core/_import_utils.py +3 -4
  7. langchain_core/agents.py +20 -22
  8. langchain_core/caches.py +65 -66
  9. langchain_core/callbacks/__init__.py +1 -8
  10. langchain_core/callbacks/base.py +321 -336
  11. langchain_core/callbacks/file.py +44 -44
  12. langchain_core/callbacks/manager.py +436 -513
  13. langchain_core/callbacks/stdout.py +29 -30
  14. langchain_core/callbacks/streaming_stdout.py +32 -32
  15. langchain_core/callbacks/usage.py +60 -57
  16. langchain_core/chat_history.py +53 -68
  17. langchain_core/document_loaders/base.py +27 -25
  18. langchain_core/document_loaders/blob_loaders.py +1 -1
  19. langchain_core/document_loaders/langsmith.py +44 -48
  20. langchain_core/documents/__init__.py +23 -3
  21. langchain_core/documents/base.py +98 -90
  22. langchain_core/documents/compressor.py +10 -10
  23. langchain_core/documents/transformers.py +34 -35
  24. langchain_core/embeddings/fake.py +50 -54
  25. langchain_core/example_selectors/length_based.py +1 -1
  26. langchain_core/example_selectors/semantic_similarity.py +28 -32
  27. langchain_core/exceptions.py +21 -20
  28. langchain_core/globals.py +3 -151
  29. langchain_core/indexing/__init__.py +1 -1
  30. langchain_core/indexing/api.py +121 -126
  31. langchain_core/indexing/base.py +73 -75
  32. langchain_core/indexing/in_memory.py +4 -6
  33. langchain_core/language_models/__init__.py +14 -29
  34. langchain_core/language_models/_utils.py +58 -61
  35. langchain_core/language_models/base.py +53 -162
  36. langchain_core/language_models/chat_models.py +298 -387
  37. langchain_core/language_models/fake.py +11 -11
  38. langchain_core/language_models/fake_chat_models.py +42 -36
  39. langchain_core/language_models/llms.py +125 -235
  40. langchain_core/load/dump.py +9 -12
  41. langchain_core/load/load.py +18 -28
  42. langchain_core/load/mapping.py +2 -4
  43. langchain_core/load/serializable.py +42 -40
  44. langchain_core/messages/__init__.py +10 -16
  45. langchain_core/messages/ai.py +148 -148
  46. langchain_core/messages/base.py +58 -52
  47. langchain_core/messages/block_translators/__init__.py +27 -17
  48. langchain_core/messages/block_translators/anthropic.py +6 -6
  49. langchain_core/messages/block_translators/bedrock_converse.py +5 -5
  50. langchain_core/messages/block_translators/google_genai.py +505 -20
  51. langchain_core/messages/block_translators/google_vertexai.py +4 -32
  52. langchain_core/messages/block_translators/groq.py +117 -21
  53. langchain_core/messages/block_translators/langchain_v0.py +5 -5
  54. langchain_core/messages/block_translators/openai.py +11 -11
  55. langchain_core/messages/chat.py +2 -6
  56. langchain_core/messages/content.py +337 -328
  57. langchain_core/messages/function.py +6 -10
  58. langchain_core/messages/human.py +24 -31
  59. langchain_core/messages/modifier.py +2 -2
  60. langchain_core/messages/system.py +19 -29
  61. langchain_core/messages/tool.py +74 -90
  62. langchain_core/messages/utils.py +474 -504
  63. langchain_core/output_parsers/__init__.py +13 -10
  64. langchain_core/output_parsers/base.py +61 -61
  65. langchain_core/output_parsers/format_instructions.py +9 -4
  66. langchain_core/output_parsers/json.py +12 -10
  67. langchain_core/output_parsers/list.py +21 -23
  68. langchain_core/output_parsers/openai_functions.py +49 -47
  69. langchain_core/output_parsers/openai_tools.py +16 -21
  70. langchain_core/output_parsers/pydantic.py +13 -14
  71. langchain_core/output_parsers/string.py +5 -5
  72. langchain_core/output_parsers/transform.py +15 -17
  73. langchain_core/output_parsers/xml.py +35 -34
  74. langchain_core/outputs/__init__.py +1 -1
  75. langchain_core/outputs/chat_generation.py +18 -18
  76. langchain_core/outputs/chat_result.py +1 -3
  77. langchain_core/outputs/generation.py +10 -11
  78. langchain_core/outputs/llm_result.py +10 -10
  79. langchain_core/prompt_values.py +11 -17
  80. langchain_core/prompts/__init__.py +3 -27
  81. langchain_core/prompts/base.py +48 -56
  82. langchain_core/prompts/chat.py +275 -325
  83. langchain_core/prompts/dict.py +5 -5
  84. langchain_core/prompts/few_shot.py +81 -88
  85. langchain_core/prompts/few_shot_with_templates.py +11 -13
  86. langchain_core/prompts/image.py +12 -14
  87. langchain_core/prompts/loading.py +4 -6
  88. langchain_core/prompts/message.py +3 -3
  89. langchain_core/prompts/prompt.py +24 -39
  90. langchain_core/prompts/string.py +26 -10
  91. langchain_core/prompts/structured.py +49 -53
  92. langchain_core/rate_limiters.py +51 -60
  93. langchain_core/retrievers.py +61 -198
  94. langchain_core/runnables/base.py +1478 -1630
  95. langchain_core/runnables/branch.py +53 -57
  96. langchain_core/runnables/config.py +72 -89
  97. langchain_core/runnables/configurable.py +120 -137
  98. langchain_core/runnables/fallbacks.py +83 -79
  99. langchain_core/runnables/graph.py +91 -97
  100. langchain_core/runnables/graph_ascii.py +27 -28
  101. langchain_core/runnables/graph_mermaid.py +38 -50
  102. langchain_core/runnables/graph_png.py +15 -16
  103. langchain_core/runnables/history.py +135 -148
  104. langchain_core/runnables/passthrough.py +124 -150
  105. langchain_core/runnables/retry.py +46 -51
  106. langchain_core/runnables/router.py +25 -30
  107. langchain_core/runnables/schema.py +75 -80
  108. langchain_core/runnables/utils.py +60 -67
  109. langchain_core/stores.py +85 -121
  110. langchain_core/structured_query.py +8 -8
  111. langchain_core/sys_info.py +27 -29
  112. langchain_core/tools/__init__.py +1 -14
  113. langchain_core/tools/base.py +285 -229
  114. langchain_core/tools/convert.py +160 -155
  115. langchain_core/tools/render.py +10 -10
  116. langchain_core/tools/retriever.py +12 -11
  117. langchain_core/tools/simple.py +19 -24
  118. langchain_core/tools/structured.py +32 -39
  119. langchain_core/tracers/__init__.py +1 -9
  120. langchain_core/tracers/base.py +97 -99
  121. langchain_core/tracers/context.py +29 -52
  122. langchain_core/tracers/core.py +49 -53
  123. langchain_core/tracers/evaluation.py +11 -11
  124. langchain_core/tracers/event_stream.py +65 -64
  125. langchain_core/tracers/langchain.py +21 -21
  126. langchain_core/tracers/log_stream.py +45 -45
  127. langchain_core/tracers/memory_stream.py +3 -3
  128. langchain_core/tracers/root_listeners.py +16 -16
  129. langchain_core/tracers/run_collector.py +2 -4
  130. langchain_core/tracers/schemas.py +0 -129
  131. langchain_core/tracers/stdout.py +3 -3
  132. langchain_core/utils/__init__.py +1 -4
  133. langchain_core/utils/_merge.py +2 -2
  134. langchain_core/utils/aiter.py +57 -61
  135. langchain_core/utils/env.py +9 -9
  136. langchain_core/utils/function_calling.py +89 -186
  137. langchain_core/utils/html.py +7 -8
  138. langchain_core/utils/input.py +6 -6
  139. langchain_core/utils/interactive_env.py +1 -1
  140. langchain_core/utils/iter.py +36 -40
  141. langchain_core/utils/json.py +4 -3
  142. langchain_core/utils/json_schema.py +9 -9
  143. langchain_core/utils/mustache.py +8 -10
  144. langchain_core/utils/pydantic.py +33 -35
  145. langchain_core/utils/strings.py +6 -9
  146. langchain_core/utils/usage.py +1 -1
  147. langchain_core/utils/utils.py +66 -62
  148. langchain_core/vectorstores/base.py +182 -216
  149. langchain_core/vectorstores/in_memory.py +101 -176
  150. langchain_core/vectorstores/utils.py +5 -5
  151. langchain_core/version.py +1 -1
  152. langchain_core-1.0.3.dist-info/METADATA +69 -0
  153. langchain_core-1.0.3.dist-info/RECORD +172 -0
  154. {langchain_core-1.0.0a5.dist-info → langchain_core-1.0.3.dist-info}/WHEEL +1 -1
  155. langchain_core/memory.py +0 -120
  156. langchain_core/messages/block_translators/ollama.py +0 -47
  157. langchain_core/prompts/pipeline.py +0 -138
  158. langchain_core/pydantic_v1/__init__.py +0 -30
  159. langchain_core/pydantic_v1/dataclasses.py +0 -23
  160. langchain_core/pydantic_v1/main.py +0 -23
  161. langchain_core/tracers/langchain_v1.py +0 -31
  162. langchain_core/utils/loading.py +0 -35
  163. langchain_core-1.0.0a5.dist-info/METADATA +0 -77
  164. langchain_core-1.0.0a5.dist-info/RECORD +0 -181
  165. langchain_core-1.0.0a5.dist-info/entry_points.txt +0 -4
@@ -8,10 +8,8 @@ from typing import (
8
8
  TYPE_CHECKING,
9
9
  Annotated,
10
10
  Any,
11
- Optional,
12
11
  TypedDict,
13
12
  TypeVar,
14
- Union,
15
13
  cast,
16
14
  overload,
17
15
  )
@@ -24,7 +22,6 @@ from pydantic import (
24
22
  )
25
23
  from typing_extensions import Self, override
26
24
 
27
- from langchain_core._api import deprecated
28
25
  from langchain_core.messages import (
29
26
  AIMessage,
30
27
  AnyMessage,
@@ -62,84 +59,83 @@ class MessagesPlaceholder(BaseMessagePromptTemplate):
62
59
 
63
60
  Direct usage:
64
61
 
65
- .. code-block:: python
62
+ ```python
63
+ from langchain_core.prompts import MessagesPlaceholder
66
64
 
67
- from langchain_core.prompts import MessagesPlaceholder
65
+ prompt = MessagesPlaceholder("history")
66
+ prompt.format_messages() # raises KeyError
68
67
 
69
- prompt = MessagesPlaceholder("history")
70
- prompt.format_messages() # raises KeyError
68
+ prompt = MessagesPlaceholder("history", optional=True)
69
+ prompt.format_messages() # returns empty list []
71
70
 
72
- prompt = MessagesPlaceholder("history", optional=True)
73
- prompt.format_messages() # returns empty list []
74
-
75
- prompt.format_messages(
76
- history=[
77
- ("system", "You are an AI assistant."),
78
- ("human", "Hello!"),
79
- ]
80
- )
81
- # -> [
82
- # SystemMessage(content="You are an AI assistant."),
83
- # HumanMessage(content="Hello!"),
84
- # ]
71
+ prompt.format_messages(
72
+ history=[
73
+ ("system", "You are an AI assistant."),
74
+ ("human", "Hello!"),
75
+ ]
76
+ )
77
+ # -> [
78
+ # SystemMessage(content="You are an AI assistant."),
79
+ # HumanMessage(content="Hello!"),
80
+ # ]
81
+ ```
85
82
 
86
83
  Building a prompt with chat history:
87
84
 
88
- .. code-block:: python
89
-
90
- from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
85
+ ```python
86
+ from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
91
87
 
92
- prompt = ChatPromptTemplate.from_messages(
93
- [
94
- ("system", "You are a helpful assistant."),
95
- MessagesPlaceholder("history"),
96
- ("human", "{question}"),
97
- ]
98
- )
99
- prompt.invoke(
100
- {
101
- "history": [("human", "what's 5 + 2"), ("ai", "5 + 2 is 7")],
102
- "question": "now multiply that by 4",
103
- }
104
- )
105
- # -> ChatPromptValue(messages=[
106
- # SystemMessage(content="You are a helpful assistant."),
107
- # HumanMessage(content="what's 5 + 2"),
108
- # AIMessage(content="5 + 2 is 7"),
109
- # HumanMessage(content="now multiply that by 4"),
110
- # ])
88
+ prompt = ChatPromptTemplate.from_messages(
89
+ [
90
+ ("system", "You are a helpful assistant."),
91
+ MessagesPlaceholder("history"),
92
+ ("human", "{question}"),
93
+ ]
94
+ )
95
+ prompt.invoke(
96
+ {
97
+ "history": [("human", "what's 5 + 2"), ("ai", "5 + 2 is 7")],
98
+ "question": "now multiply that by 4",
99
+ }
100
+ )
101
+ # -> ChatPromptValue(messages=[
102
+ # SystemMessage(content="You are a helpful assistant."),
103
+ # HumanMessage(content="what's 5 + 2"),
104
+ # AIMessage(content="5 + 2 is 7"),
105
+ # HumanMessage(content="now multiply that by 4"),
106
+ # ])
107
+ ```
111
108
 
112
109
  Limiting the number of messages:
113
110
 
114
- .. code-block:: python
115
-
116
- from langchain_core.prompts import MessagesPlaceholder
111
+ ```python
112
+ from langchain_core.prompts import MessagesPlaceholder
117
113
 
118
- prompt = MessagesPlaceholder("history", n_messages=1)
119
-
120
- prompt.format_messages(
121
- history=[
122
- ("system", "You are an AI assistant."),
123
- ("human", "Hello!"),
124
- ]
125
- )
126
- # -> [
127
- # HumanMessage(content="Hello!"),
128
- # ]
114
+ prompt = MessagesPlaceholder("history", n_messages=1)
129
115
 
116
+ prompt.format_messages(
117
+ history=[
118
+ ("system", "You are an AI assistant."),
119
+ ("human", "Hello!"),
120
+ ]
121
+ )
122
+ # -> [
123
+ # HumanMessage(content="Hello!"),
124
+ # ]
125
+ ```
130
126
  """
131
127
 
132
128
  variable_name: str
133
129
  """Name of variable to use as messages."""
134
130
 
135
131
  optional: bool = False
136
- """If True format_messages can be called with no arguments and will return an empty
137
- list. If False then a named argument with name `variable_name` must be passed
138
- in, even if the value is an empty list."""
132
+ """If `True` format_messages can be called with no arguments and will return an
133
+ empty list. If `False` then a named argument with name `variable_name` must be
134
+ passed in, even if the value is an empty list."""
139
135
 
140
- n_messages: Optional[PositiveInt] = None
141
- """Maximum number of messages to include. If None, then will include all.
142
- Defaults to None."""
136
+ n_messages: PositiveInt | None = None
137
+ """Maximum number of messages to include. If `None`, then will include all.
138
+ """
143
139
 
144
140
  def __init__(
145
141
  self, variable_name: str, *, optional: bool = False, **kwargs: Any
@@ -148,10 +144,9 @@ class MessagesPlaceholder(BaseMessagePromptTemplate):
148
144
 
149
145
  Args:
150
146
  variable_name: Name of variable to use as messages.
151
- optional: If True format_messages can be called with no arguments and will
152
- return an empty list. If False then a named argument with name
147
+ optional: If `True` format_messages can be called with no arguments and will
148
+ return an empty list. If `False` then a named argument with name
153
149
  `variable_name` must be passed in, even if the value is an empty list.
154
- Defaults to False.]
155
150
  """
156
151
  # mypy can't detect the init which is defined in the parent class
157
152
  # b/c these are BaseModel classes.
@@ -199,7 +194,7 @@ class MessagesPlaceholder(BaseMessagePromptTemplate):
199
194
  """Human-readable representation.
200
195
 
201
196
  Args:
202
- html: Whether to format as HTML. Defaults to False.
197
+ html: Whether to format as HTML.
203
198
 
204
199
  Returns:
205
200
  Human-readable representation.
@@ -232,20 +227,20 @@ class BaseStringMessagePromptTemplate(BaseMessagePromptTemplate, ABC):
232
227
  cls,
233
228
  template: str,
234
229
  template_format: PromptTemplateFormat = "f-string",
235
- partial_variables: Optional[dict[str, Any]] = None,
230
+ partial_variables: dict[str, Any] | None = None,
236
231
  **kwargs: Any,
237
232
  ) -> Self:
238
233
  """Create a class from a string template.
239
234
 
240
235
  Args:
241
236
  template: a template.
242
- template_format: format of the template. Defaults to "f-string".
237
+ template_format: format of the template.
243
238
  partial_variables: A dictionary of variables that can be used to partially
244
- fill in the template. For example, if the template is
245
- `"{variable1} {variable2}"`, and `partial_variables` is
246
- `{"variable1": "foo"}`, then the final prompt will be
247
- `"foo {variable2}"`.
248
- Defaults to None.
239
+ fill in the template. For example, if the template is
240
+ `"{variable1} {variable2}"`, and `partial_variables` is
241
+ `{"variable1": "foo"}`, then the final prompt will be
242
+ `"foo {variable2}"`.
243
+
249
244
  **kwargs: keyword arguments to pass to the constructor.
250
245
 
251
246
  Returns:
@@ -261,15 +256,13 @@ class BaseStringMessagePromptTemplate(BaseMessagePromptTemplate, ABC):
261
256
  @classmethod
262
257
  def from_template_file(
263
258
  cls,
264
- template_file: Union[str, Path],
265
- input_variables: list[str], # noqa: ARG003 # Deprecated
259
+ template_file: str | Path,
266
260
  **kwargs: Any,
267
261
  ) -> Self:
268
262
  """Create a class from a template file.
269
263
 
270
264
  Args:
271
265
  template_file: path to a template file. String or Path.
272
- input_variables: list of input variables.
273
266
  **kwargs: keyword arguments to pass to the constructor.
274
267
 
275
268
  Returns:
@@ -336,7 +329,7 @@ class BaseStringMessagePromptTemplate(BaseMessagePromptTemplate, ABC):
336
329
  """Human-readable representation.
337
330
 
338
331
  Args:
339
- html: Whether to format as HTML. Defaults to False.
332
+ html: Whether to format as HTML.
340
333
 
341
334
  Returns:
342
335
  Human-readable representation.
@@ -383,20 +376,20 @@ class ChatMessagePromptTemplate(BaseStringMessagePromptTemplate):
383
376
 
384
377
 
385
378
  class _TextTemplateParam(TypedDict, total=False):
386
- text: Union[str, dict]
379
+ text: str | dict
387
380
 
388
381
 
389
382
  class _ImageTemplateParam(TypedDict, total=False):
390
- image_url: Union[str, dict]
383
+ image_url: str | dict
391
384
 
392
385
 
393
386
  class _StringImageMessagePromptTemplate(BaseMessagePromptTemplate):
394
387
  """Human message prompt template. This is a message sent from the user."""
395
388
 
396
- prompt: Union[
397
- StringPromptTemplate,
398
- list[Union[StringPromptTemplate, ImagePromptTemplate, DictPromptTemplate]],
399
- ]
389
+ prompt: (
390
+ StringPromptTemplate
391
+ | list[StringPromptTemplate | ImagePromptTemplate | DictPromptTemplate]
392
+ )
400
393
  """Prompt template."""
401
394
  additional_kwargs: dict = Field(default_factory=dict)
402
395
  """Additional keyword arguments to pass to the prompt template."""
@@ -406,13 +399,11 @@ class _StringImageMessagePromptTemplate(BaseMessagePromptTemplate):
406
399
  @classmethod
407
400
  def from_template(
408
401
  cls: type[Self],
409
- template: Union[
410
- str,
411
- list[Union[str, _TextTemplateParam, _ImageTemplateParam, dict[str, Any]]],
412
- ],
402
+ template: str
403
+ | list[str | _TextTemplateParam | _ImageTemplateParam | dict[str, Any]],
413
404
  template_format: PromptTemplateFormat = "f-string",
414
405
  *,
415
- partial_variables: Optional[dict[str, Any]] = None,
406
+ partial_variables: dict[str, Any] | None = None,
416
407
  **kwargs: Any,
417
408
  ) -> Self:
418
409
  """Create a class from a string template.
@@ -420,9 +411,9 @@ class _StringImageMessagePromptTemplate(BaseMessagePromptTemplate):
420
411
  Args:
421
412
  template: a template.
422
413
  template_format: format of the template.
423
- Options are: 'f-string', 'mustache', 'jinja2'. Defaults to "f-string".
414
+ Options are: 'f-string', 'mustache', 'jinja2'.
424
415
  partial_variables: A dictionary of variables that can be used too partially.
425
- Defaults to None.
416
+
426
417
  **kwargs: keyword arguments to pass to the constructor.
427
418
 
428
419
  Returns:
@@ -432,7 +423,7 @@ class _StringImageMessagePromptTemplate(BaseMessagePromptTemplate):
432
423
  ValueError: If the template is not a string or list of strings.
433
424
  """
434
425
  if isinstance(template, str):
435
- prompt: Union[StringPromptTemplate, list] = PromptTemplate.from_template(
426
+ prompt: StringPromptTemplate | list = PromptTemplate.from_template(
436
427
  template,
437
428
  template_format=template_format,
438
429
  partial_variables=partial_variables,
@@ -529,7 +520,7 @@ class _StringImageMessagePromptTemplate(BaseMessagePromptTemplate):
529
520
  @classmethod
530
521
  def from_template_file(
531
522
  cls: type[Self],
532
- template_file: Union[str, Path],
523
+ template_file: str | Path,
533
524
  input_variables: list[str],
534
525
  **kwargs: Any,
535
526
  ) -> Self:
@@ -596,9 +587,7 @@ class _StringImageMessagePromptTemplate(BaseMessagePromptTemplate):
596
587
  for prompt in self.prompt:
597
588
  inputs = {var: kwargs[var] for var in prompt.input_variables}
598
589
  if isinstance(prompt, StringPromptTemplate):
599
- formatted: Union[str, ImageURL, dict[str, Any]] = prompt.format(
600
- **inputs
601
- )
590
+ formatted: str | ImageURL | dict[str, Any] = prompt.format(**inputs)
602
591
  content.append({"type": "text", "text": formatted})
603
592
  elif isinstance(prompt, ImagePromptTemplate):
604
593
  formatted = prompt.format(**inputs)
@@ -628,7 +617,7 @@ class _StringImageMessagePromptTemplate(BaseMessagePromptTemplate):
628
617
  for prompt in self.prompt:
629
618
  inputs = {var: kwargs[var] for var in prompt.input_variables}
630
619
  if isinstance(prompt, StringPromptTemplate):
631
- formatted: Union[str, ImageURL, dict[str, Any]] = await prompt.aformat(
620
+ formatted: str | ImageURL | dict[str, Any] = await prompt.aformat(
632
621
  **inputs
633
622
  )
634
623
  content.append({"type": "text", "text": formatted})
@@ -647,7 +636,7 @@ class _StringImageMessagePromptTemplate(BaseMessagePromptTemplate):
647
636
  """Human-readable representation.
648
637
 
649
638
  Args:
650
- html: Whether to format as HTML. Defaults to False.
639
+ html: Whether to format as HTML.
651
640
 
652
641
  Returns:
653
642
  Human-readable representation.
@@ -694,7 +683,7 @@ class BaseChatPromptTemplate(BasePromptTemplate, ABC):
694
683
 
695
684
  Args:
696
685
  **kwargs: keyword arguments to use for filling in template variables
697
- in all the template messages in this chat template.
686
+ in all the template messages in this chat template.
698
687
 
699
688
  Returns:
700
689
  formatted string.
@@ -706,7 +695,7 @@ class BaseChatPromptTemplate(BasePromptTemplate, ABC):
706
695
 
707
696
  Args:
708
697
  **kwargs: keyword arguments to use for filling in template variables
709
- in all the template messages in this chat template.
698
+ in all the template messages in this chat template.
710
699
 
711
700
  Returns:
712
701
  formatted string.
@@ -760,7 +749,7 @@ class BaseChatPromptTemplate(BasePromptTemplate, ABC):
760
749
  """Human-readable representation.
761
750
 
762
751
  Args:
763
- html: Whether to format as HTML. Defaults to False.
752
+ html: Whether to format as HTML.
764
753
 
765
754
  Returns:
766
755
  Human-readable representation.
@@ -772,17 +761,14 @@ class BaseChatPromptTemplate(BasePromptTemplate, ABC):
772
761
  print(self.pretty_repr(html=is_interactive_env())) # noqa: T201
773
762
 
774
763
 
775
- MessageLike = Union[BaseMessagePromptTemplate, BaseMessage, BaseChatPromptTemplate]
764
+ MessageLike = BaseMessagePromptTemplate | BaseMessage | BaseChatPromptTemplate
776
765
 
777
- MessageLikeRepresentation = Union[
778
- MessageLike,
779
- tuple[
780
- Union[str, type],
781
- Union[str, list[dict], list[object]],
782
- ],
783
- str,
784
- dict[str, Any],
785
- ]
766
+ MessageLikeRepresentation = (
767
+ MessageLike
768
+ | tuple[str | type, str | list[dict] | list[object]]
769
+ | str
770
+ | dict[str, Any]
771
+ )
786
772
 
787
773
 
788
774
  class ChatPromptTemplate(BaseChatPromptTemplate):
@@ -790,114 +776,104 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
790
776
 
791
777
  Use to create flexible templated prompts for chat models.
792
778
 
793
- Examples:
794
-
795
- .. versionchanged:: 0.2.24
779
+ ```python
780
+ from langchain_core.prompts import ChatPromptTemplate
796
781
 
797
- You can pass any Message-like formats supported by
798
- ``ChatPromptTemplate.from_messages()`` directly to ``ChatPromptTemplate()``
799
- init.
800
-
801
- .. code-block:: python
802
-
803
- from langchain_core.prompts import ChatPromptTemplate
804
-
805
- template = ChatPromptTemplate(
806
- [
807
- ("system", "You are a helpful AI bot. Your name is {name}."),
808
- ("human", "Hello, how are you doing?"),
809
- ("ai", "I'm doing well, thanks!"),
810
- ("human", "{user_input}"),
811
- ]
812
- )
813
-
814
- prompt_value = template.invoke(
815
- {
816
- "name": "Bob",
817
- "user_input": "What is your name?",
818
- }
819
- )
820
- # Output:
821
- # ChatPromptValue(
822
- # messages=[
823
- # SystemMessage(content='You are a helpful AI bot. Your name is Bob.'),
824
- # HumanMessage(content='Hello, how are you doing?'),
825
- # AIMessage(content="I'm doing well, thanks!"),
826
- # HumanMessage(content='What is your name?')
827
- # ]
828
- # )
829
-
830
- Messages Placeholder:
831
-
832
- .. code-block:: python
782
+ template = ChatPromptTemplate(
783
+ [
784
+ ("system", "You are a helpful AI bot. Your name is {name}."),
785
+ ("human", "Hello, how are you doing?"),
786
+ ("ai", "I'm doing well, thanks!"),
787
+ ("human", "{user_input}"),
788
+ ]
789
+ )
833
790
 
834
- # In addition to Human/AI/Tool/Function messages,
835
- # you can initialize the template with a MessagesPlaceholder
836
- # either using the class directly or with the shorthand tuple syntax:
791
+ prompt_value = template.invoke(
792
+ {
793
+ "name": "Bob",
794
+ "user_input": "What is your name?",
795
+ }
796
+ )
797
+ # Output:
798
+ # ChatPromptValue(
799
+ # messages=[
800
+ # SystemMessage(content='You are a helpful AI bot. Your name is Bob.'),
801
+ # HumanMessage(content='Hello, how are you doing?'),
802
+ # AIMessage(content="I'm doing well, thanks!"),
803
+ # HumanMessage(content='What is your name?')
804
+ # ]
805
+ # )
806
+ ```
807
+
808
+ !!! note "Messages Placeholder"
809
+
810
+ ```python
811
+ # In addition to Human/AI/Tool/Function messages,
812
+ # you can initialize the template with a MessagesPlaceholder
813
+ # either using the class directly or with the shorthand tuple syntax:
814
+
815
+ template = ChatPromptTemplate(
816
+ [
817
+ ("system", "You are a helpful AI bot."),
818
+ # Means the template will receive an optional list of messages under
819
+ # the "conversation" key
820
+ ("placeholder", "{conversation}"),
821
+ # Equivalently:
822
+ # MessagesPlaceholder(variable_name="conversation", optional=True)
823
+ ]
824
+ )
837
825
 
838
- template = ChatPromptTemplate(
839
- [
840
- ("system", "You are a helpful AI bot."),
841
- # Means the template will receive an optional list of messages under
842
- # the "conversation" key
843
- ("placeholder", "{conversation}"),
844
- # Equivalently:
845
- # MessagesPlaceholder(variable_name="conversation", optional=True)
826
+ prompt_value = template.invoke(
827
+ {
828
+ "conversation": [
829
+ ("human", "Hi!"),
830
+ ("ai", "How can I assist you today?"),
831
+ ("human", "Can you make me an ice cream sundae?"),
832
+ ("ai", "No."),
846
833
  ]
847
- )
848
-
849
- prompt_value = template.invoke(
850
- {
851
- "conversation": [
852
- ("human", "Hi!"),
853
- ("ai", "How can I assist you today?"),
854
- ("human", "Can you make me an ice cream sundae?"),
855
- ("ai", "No."),
856
- ]
857
- }
858
- )
834
+ }
835
+ )
859
836
 
860
- # Output:
861
- # ChatPromptValue(
862
- # messages=[
863
- # SystemMessage(content='You are a helpful AI bot.'),
864
- # HumanMessage(content='Hi!'),
865
- # AIMessage(content='How can I assist you today?'),
866
- # HumanMessage(content='Can you make me an ice cream sundae?'),
867
- # AIMessage(content='No.'),
868
- # ]
869
- # )
837
+ # Output:
838
+ # ChatPromptValue(
839
+ # messages=[
840
+ # SystemMessage(content='You are a helpful AI bot.'),
841
+ # HumanMessage(content='Hi!'),
842
+ # AIMessage(content='How can I assist you today?'),
843
+ # HumanMessage(content='Can you make me an ice cream sundae?'),
844
+ # AIMessage(content='No.'),
845
+ # ]
846
+ # )
847
+ ```
870
848
 
871
- Single-variable template:
849
+ !!! note "Single-variable template"
872
850
 
873
851
  If your prompt has only a single input variable (i.e., 1 instance of "{variable_nams}"),
874
852
  and you invoke the template with a non-dict object, the prompt template will
875
853
  inject the provided argument into that variable location.
876
854
 
855
+ ```python
856
+ from langchain_core.prompts import ChatPromptTemplate
877
857
 
878
- .. code-block:: python
879
-
880
- from langchain_core.prompts import ChatPromptTemplate
881
-
882
- template = ChatPromptTemplate(
883
- [
884
- ("system", "You are a helpful AI bot. Your name is Carl."),
885
- ("human", "{user_input}"),
886
- ]
887
- )
888
-
889
- prompt_value = template.invoke("Hello, there!")
890
- # Equivalent to
891
- # prompt_value = template.invoke({"user_input": "Hello, there!"})
892
-
893
- # Output:
894
- # ChatPromptValue(
895
- # messages=[
896
- # SystemMessage(content='You are a helpful AI bot. Your name is Carl.'),
897
- # HumanMessage(content='Hello, there!'),
898
- # ]
899
- # )
858
+ template = ChatPromptTemplate(
859
+ [
860
+ ("system", "You are a helpful AI bot. Your name is Carl."),
861
+ ("human", "{user_input}"),
862
+ ]
863
+ )
900
864
 
865
+ prompt_value = template.invoke("Hello, there!")
866
+ # Equivalent to
867
+ # prompt_value = template.invoke({"user_input": "Hello, there!"})
868
+
869
+ # Output:
870
+ # ChatPromptValue(
871
+ # messages=[
872
+ # SystemMessage(content='You are a helpful AI bot. Your name is Carl.'),
873
+ # HumanMessage(content='Hello, there!'),
874
+ # ]
875
+ # )
876
+ ```
901
877
  """ # noqa: E501
902
878
 
903
879
  messages: Annotated[list[MessageLike], SkipValidation()]
@@ -915,50 +891,59 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
915
891
  """Create a chat prompt template from a variety of message formats.
916
892
 
917
893
  Args:
918
- messages: sequence of message representations.
919
- A message can be represented using the following formats:
920
- (1) BaseMessagePromptTemplate, (2) BaseMessage, (3) 2-tuple of
921
- (message type, template); e.g., ("human", "{user_input}"),
922
- (4) 2-tuple of (message class, template), (5) a string which is
923
- shorthand for ("human", template); e.g., "{user_input}".
924
- template_format: format of the template. Defaults to "f-string".
894
+ messages: Sequence of message representations.
895
+
896
+ A message can be represented using the following formats:
897
+
898
+ 1. `BaseMessagePromptTemplate`
899
+ 2. `BaseMessage`
900
+ 3. 2-tuple of `(message type, template)`; e.g.,
901
+ `("human", "{user_input}")`
902
+ 4. 2-tuple of `(message class, template)`
903
+ 5. A string which is shorthand for `("human", template)`; e.g.,
904
+ `"{user_input}"`
905
+ template_format: Format of the template.
925
906
  input_variables: A list of the names of the variables whose values are
926
907
  required as inputs to the prompt.
927
908
  optional_variables: A list of the names of the variables for placeholder
928
909
  or MessagePlaceholder that are optional.
910
+
929
911
  These variables are auto inferred from the prompt and user need not
930
912
  provide them.
931
913
  partial_variables: A dictionary of the partial variables the prompt
932
- template carries. Partial variables populate the template so that you
933
- don't need to pass them in every time you call the prompt.
914
+ template carries.
915
+
916
+ Partial variables populate the template so that you don't need to pass
917
+ them in every time you call the prompt.
934
918
  validate_template: Whether to validate the template.
935
919
  input_types: A dictionary of the types of the variables the prompt template
936
- expects. If not provided, all variables are assumed to be strings.
920
+ expects.
921
+
922
+ If not provided, all variables are assumed to be strings.
937
923
 
938
924
  Examples:
939
925
  Instantiation from a list of message templates:
940
926
 
941
- .. code-block:: python
942
-
943
- template = ChatPromptTemplate(
944
- [
945
- ("human", "Hello, how are you?"),
946
- ("ai", "I'm doing well, thanks!"),
947
- ("human", "That's good to hear."),
948
- ]
949
- )
927
+ ```python
928
+ template = ChatPromptTemplate(
929
+ [
930
+ ("human", "Hello, how are you?"),
931
+ ("ai", "I'm doing well, thanks!"),
932
+ ("human", "That's good to hear."),
933
+ ]
934
+ )
935
+ ```
950
936
 
951
937
  Instantiation from mixed message formats:
952
938
 
953
- .. code-block:: python
954
-
955
- template = ChatPromptTemplate(
956
- [
957
- SystemMessage(content="hello"),
958
- ("human", "Hello, how are you?"),
959
- ]
960
- )
961
-
939
+ ```python
940
+ template = ChatPromptTemplate(
941
+ [
942
+ SystemMessage(content="hello"),
943
+ ("human", "Hello, how are you?"),
944
+ ]
945
+ )
946
+ ```
962
947
  """
963
948
  messages_ = [
964
949
  _convert_to_message_template(message, template_format)
@@ -988,10 +973,10 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
988
973
 
989
974
  @classmethod
990
975
  def get_lc_namespace(cls) -> list[str]:
991
- """Get the namespace of the langchain object.
976
+ """Get the namespace of the LangChain object.
992
977
 
993
978
  Returns:
994
- ``["langchain", "prompts", "chat"]``
979
+ `["langchain", "prompts", "chat"]`
995
980
  """
996
981
  return ["langchain", "prompts", "chat"]
997
982
 
@@ -1107,41 +1092,6 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
1107
1092
  message = HumanMessagePromptTemplate(prompt=prompt_template)
1108
1093
  return cls.from_messages([message])
1109
1094
 
1110
- @classmethod
1111
- @deprecated("0.0.1", alternative="from_messages", pending=True)
1112
- def from_role_strings(
1113
- cls, string_messages: list[tuple[str, str]]
1114
- ) -> ChatPromptTemplate:
1115
- """Create a chat prompt template from a list of (role, template) tuples.
1116
-
1117
- Args:
1118
- string_messages: list of (role, template) tuples.
1119
-
1120
- Returns:
1121
- a chat prompt template.
1122
- """
1123
- return cls(
1124
- messages=[
1125
- ChatMessagePromptTemplate.from_template(template, role=role)
1126
- for role, template in string_messages
1127
- ]
1128
- )
1129
-
1130
- @classmethod
1131
- @deprecated("0.0.1", alternative="from_messages", pending=True)
1132
- def from_strings(
1133
- cls, string_messages: list[tuple[type[BaseMessagePromptTemplate], str]]
1134
- ) -> ChatPromptTemplate:
1135
- """Create a chat prompt template from a list of (role class, template) tuples.
1136
-
1137
- Args:
1138
- string_messages: list of (role class, template) tuples.
1139
-
1140
- Returns:
1141
- a chat prompt template.
1142
- """
1143
- return cls.from_messages(string_messages)
1144
-
1145
1095
  @classmethod
1146
1096
  def from_messages(
1147
1097
  cls,
@@ -1153,35 +1103,39 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
1153
1103
  Examples:
1154
1104
  Instantiation from a list of message templates:
1155
1105
 
1156
- .. code-block:: python
1157
-
1158
- template = ChatPromptTemplate.from_messages(
1159
- [
1160
- ("human", "Hello, how are you?"),
1161
- ("ai", "I'm doing well, thanks!"),
1162
- ("human", "That's good to hear."),
1163
- ]
1164
- )
1106
+ ```python
1107
+ template = ChatPromptTemplate.from_messages(
1108
+ [
1109
+ ("human", "Hello, how are you?"),
1110
+ ("ai", "I'm doing well, thanks!"),
1111
+ ("human", "That's good to hear."),
1112
+ ]
1113
+ )
1114
+ ```
1165
1115
 
1166
1116
  Instantiation from mixed message formats:
1167
1117
 
1168
- .. code-block:: python
1118
+ ```python
1119
+ template = ChatPromptTemplate.from_messages(
1120
+ [
1121
+ SystemMessage(content="hello"),
1122
+ ("human", "Hello, how are you?"),
1123
+ ]
1124
+ )
1125
+ ```
1126
+ Args:
1127
+ messages: Sequence of message representations.
1169
1128
 
1170
- template = ChatPromptTemplate.from_messages(
1171
- [
1172
- SystemMessage(content="hello"),
1173
- ("human", "Hello, how are you?"),
1174
- ]
1175
- )
1129
+ A message can be represented using the following formats:
1176
1130
 
1177
- Args:
1178
- messages: sequence of message representations.
1179
- A message can be represented using the following formats:
1180
- (1) BaseMessagePromptTemplate, (2) BaseMessage, (3) 2-tuple of
1181
- (message type, template); e.g., ("human", "{user_input}"),
1182
- (4) 2-tuple of (message class, template), (5) a string which is
1183
- shorthand for ("human", template); e.g., "{user_input}".
1184
- template_format: format of the template. Defaults to "f-string".
1131
+ 1. `BaseMessagePromptTemplate`
1132
+ 2. `BaseMessage`
1133
+ 3. 2-tuple of `(message type, template)`; e.g.,
1134
+ `("human", "{user_input}")`
1135
+ 4. 2-tuple of `(message class, template)`
1136
+ 5. A string which is shorthand for `("human", template)`; e.g.,
1137
+ `"{user_input}"`
1138
+ template_format: format of the template.
1185
1139
 
1186
1140
  Returns:
1187
1141
  a chat prompt template.
@@ -1194,7 +1148,7 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
1194
1148
 
1195
1149
  Args:
1196
1150
  **kwargs: keyword arguments to use for filling in template variables
1197
- in all the template messages in this chat template.
1151
+ in all the template messages in this chat template.
1198
1152
 
1199
1153
  Raises:
1200
1154
  ValueError: if messages are of unexpected types.
@@ -1222,7 +1176,7 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
1222
1176
 
1223
1177
  Args:
1224
1178
  **kwargs: keyword arguments to use for filling in template variables
1225
- in all the template messages in this chat template.
1179
+ in all the template messages in this chat template.
1226
1180
 
1227
1181
  Returns:
1228
1182
  list of formatted messages.
@@ -1257,23 +1211,21 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
1257
1211
 
1258
1212
 
1259
1213
  Example:
1214
+ ```python
1215
+ from langchain_core.prompts import ChatPromptTemplate
1260
1216
 
1261
- .. code-block:: python
1262
-
1263
- from langchain_core.prompts import ChatPromptTemplate
1264
-
1265
- template = ChatPromptTemplate.from_messages(
1266
- [
1267
- ("system", "You are an AI assistant named {name}."),
1268
- ("human", "Hi I'm {user}"),
1269
- ("ai", "Hi there, {user}, I'm {name}."),
1270
- ("human", "{input}"),
1271
- ]
1272
- )
1273
- template2 = template.partial(user="Lucy", name="R2D2")
1274
-
1275
- template2.format_messages(input="hello")
1217
+ template = ChatPromptTemplate.from_messages(
1218
+ [
1219
+ ("system", "You are an AI assistant named {name}."),
1220
+ ("human", "Hi I'm {user}"),
1221
+ ("ai", "Hi there, {user}, I'm {name}."),
1222
+ ("human", "{input}"),
1223
+ ]
1224
+ )
1225
+ template2 = template.partial(user="Lucy", name="R2D2")
1276
1226
 
1227
+ template2.format_messages(input="hello")
1228
+ ```
1277
1229
  """
1278
1230
  prompt_dict = self.__dict__.copy()
1279
1231
  prompt_dict["input_variables"] = list(
@@ -1294,7 +1246,7 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
1294
1246
  """Extend the chat template with a sequence of messages.
1295
1247
 
1296
1248
  Args:
1297
- messages: sequence of message representations to append.
1249
+ messages: Sequence of message representations to append.
1298
1250
  """
1299
1251
  self.messages.extend(
1300
1252
  [_convert_to_message_template(message) for message in messages]
@@ -1306,14 +1258,12 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
1306
1258
  @overload
1307
1259
  def __getitem__(self, index: slice) -> ChatPromptTemplate: ...
1308
1260
 
1309
- def __getitem__(
1310
- self, index: Union[int, slice]
1311
- ) -> Union[MessageLike, ChatPromptTemplate]:
1261
+ def __getitem__(self, index: int | slice) -> MessageLike | ChatPromptTemplate:
1312
1262
  """Use to index into the chat template.
1313
1263
 
1314
1264
  Returns:
1315
1265
  If index is an int, returns the message at that index.
1316
- If index is a slice, returns a new ``ChatPromptTemplate``
1266
+ If index is a slice, returns a new `ChatPromptTemplate`
1317
1267
  containing the messages in that slice.
1318
1268
  """
1319
1269
  if isinstance(index, slice):
@@ -1331,7 +1281,7 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
1331
1281
  """Name of prompt type. Used for serialization."""
1332
1282
  return "chat"
1333
1283
 
1334
- def save(self, file_path: Union[Path, str]) -> None:
1284
+ def save(self, file_path: Path | str) -> None:
1335
1285
  """Save prompt to file.
1336
1286
 
1337
1287
  Args:
@@ -1344,7 +1294,7 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
1344
1294
  """Human-readable representation.
1345
1295
 
1346
1296
  Args:
1347
- html: Whether to format as HTML. Defaults to False.
1297
+ html: Whether to format as HTML.
1348
1298
 
1349
1299
  Returns:
1350
1300
  Human-readable representation.
@@ -1355,7 +1305,7 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
1355
1305
 
1356
1306
  def _create_template_from_message_type(
1357
1307
  message_type: str,
1358
- template: Union[str, list],
1308
+ template: str | list,
1359
1309
  template_format: PromptTemplateFormat = "f-string",
1360
1310
  ) -> BaseMessagePromptTemplate:
1361
1311
  """Create a message prompt template from a message type and template string.
@@ -1363,7 +1313,7 @@ def _create_template_from_message_type(
1363
1313
  Args:
1364
1314
  message_type: str the type of the message template (e.g., "human", "ai", etc.)
1365
1315
  template: str the template string.
1366
- template_format: format of the template. Defaults to "f-string".
1316
+ template_format: format of the template.
1367
1317
 
1368
1318
  Returns:
1369
1319
  a message prompt template of the appropriate type.
@@ -1427,7 +1377,7 @@ def _create_template_from_message_type(
1427
1377
  def _convert_to_message_template(
1428
1378
  message: MessageLikeRepresentation,
1429
1379
  template_format: PromptTemplateFormat = "f-string",
1430
- ) -> Union[BaseMessage, BaseMessagePromptTemplate, BaseChatPromptTemplate]:
1380
+ ) -> BaseMessage | BaseMessagePromptTemplate | BaseChatPromptTemplate:
1431
1381
  """Instantiate a message from a variety of message formats.
1432
1382
 
1433
1383
  The message format can be one of the following:
@@ -1440,7 +1390,7 @@ def _convert_to_message_template(
1440
1390
 
1441
1391
  Args:
1442
1392
  message: a representation of a message in one of the supported formats.
1443
- template_format: format of the template. Defaults to "f-string".
1393
+ template_format: format of the template.
1444
1394
 
1445
1395
  Returns:
1446
1396
  an instance of a message or a message template.
@@ -1450,9 +1400,9 @@ def _convert_to_message_template(
1450
1400
  ValueError: If 2-tuple does not have 2 elements.
1451
1401
  """
1452
1402
  if isinstance(message, (BaseMessagePromptTemplate, BaseChatPromptTemplate)):
1453
- message_: Union[
1454
- BaseMessage, BaseMessagePromptTemplate, BaseChatPromptTemplate
1455
- ] = message
1403
+ message_: BaseMessage | BaseMessagePromptTemplate | BaseChatPromptTemplate = (
1404
+ message
1405
+ )
1456
1406
  elif isinstance(message, BaseMessage):
1457
1407
  message_ = message
1458
1408
  elif isinstance(message, str):