langchain-core 0.4.0.dev0__py3-none-any.whl → 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of langchain-core might be problematic. Click here for more details.

Files changed (172) hide show
  1. langchain_core/__init__.py +1 -1
  2. langchain_core/_api/__init__.py +3 -4
  3. langchain_core/_api/beta_decorator.py +45 -70
  4. langchain_core/_api/deprecation.py +80 -80
  5. langchain_core/_api/path.py +22 -8
  6. langchain_core/_import_utils.py +10 -4
  7. langchain_core/agents.py +25 -21
  8. langchain_core/caches.py +53 -63
  9. langchain_core/callbacks/__init__.py +1 -8
  10. langchain_core/callbacks/base.py +341 -348
  11. langchain_core/callbacks/file.py +55 -44
  12. langchain_core/callbacks/manager.py +546 -683
  13. langchain_core/callbacks/stdout.py +29 -30
  14. langchain_core/callbacks/streaming_stdout.py +35 -36
  15. langchain_core/callbacks/usage.py +65 -70
  16. langchain_core/chat_history.py +48 -55
  17. langchain_core/document_loaders/base.py +46 -21
  18. langchain_core/document_loaders/langsmith.py +39 -36
  19. langchain_core/documents/__init__.py +0 -1
  20. langchain_core/documents/base.py +96 -74
  21. langchain_core/documents/compressor.py +12 -9
  22. langchain_core/documents/transformers.py +29 -28
  23. langchain_core/embeddings/fake.py +56 -57
  24. langchain_core/env.py +2 -3
  25. langchain_core/example_selectors/base.py +12 -0
  26. langchain_core/example_selectors/length_based.py +1 -1
  27. langchain_core/example_selectors/semantic_similarity.py +21 -25
  28. langchain_core/exceptions.py +15 -9
  29. langchain_core/globals.py +4 -163
  30. langchain_core/indexing/api.py +132 -125
  31. langchain_core/indexing/base.py +64 -67
  32. langchain_core/indexing/in_memory.py +26 -6
  33. langchain_core/language_models/__init__.py +15 -27
  34. langchain_core/language_models/_utils.py +267 -117
  35. langchain_core/language_models/base.py +92 -177
  36. langchain_core/language_models/chat_models.py +547 -407
  37. langchain_core/language_models/fake.py +11 -11
  38. langchain_core/language_models/fake_chat_models.py +72 -118
  39. langchain_core/language_models/llms.py +168 -242
  40. langchain_core/load/dump.py +8 -11
  41. langchain_core/load/load.py +32 -28
  42. langchain_core/load/mapping.py +2 -4
  43. langchain_core/load/serializable.py +50 -56
  44. langchain_core/messages/__init__.py +36 -51
  45. langchain_core/messages/ai.py +377 -150
  46. langchain_core/messages/base.py +239 -47
  47. langchain_core/messages/block_translators/__init__.py +111 -0
  48. langchain_core/messages/block_translators/anthropic.py +470 -0
  49. langchain_core/messages/block_translators/bedrock.py +94 -0
  50. langchain_core/messages/block_translators/bedrock_converse.py +297 -0
  51. langchain_core/messages/block_translators/google_genai.py +530 -0
  52. langchain_core/messages/block_translators/google_vertexai.py +21 -0
  53. langchain_core/messages/block_translators/groq.py +143 -0
  54. langchain_core/messages/block_translators/langchain_v0.py +301 -0
  55. langchain_core/messages/block_translators/openai.py +1010 -0
  56. langchain_core/messages/chat.py +2 -3
  57. langchain_core/messages/content.py +1423 -0
  58. langchain_core/messages/function.py +7 -7
  59. langchain_core/messages/human.py +44 -38
  60. langchain_core/messages/modifier.py +3 -2
  61. langchain_core/messages/system.py +40 -27
  62. langchain_core/messages/tool.py +160 -58
  63. langchain_core/messages/utils.py +527 -638
  64. langchain_core/output_parsers/__init__.py +1 -14
  65. langchain_core/output_parsers/base.py +68 -104
  66. langchain_core/output_parsers/json.py +13 -17
  67. langchain_core/output_parsers/list.py +11 -33
  68. langchain_core/output_parsers/openai_functions.py +56 -74
  69. langchain_core/output_parsers/openai_tools.py +68 -109
  70. langchain_core/output_parsers/pydantic.py +15 -13
  71. langchain_core/output_parsers/string.py +6 -2
  72. langchain_core/output_parsers/transform.py +17 -60
  73. langchain_core/output_parsers/xml.py +34 -44
  74. langchain_core/outputs/__init__.py +1 -1
  75. langchain_core/outputs/chat_generation.py +26 -11
  76. langchain_core/outputs/chat_result.py +1 -3
  77. langchain_core/outputs/generation.py +17 -6
  78. langchain_core/outputs/llm_result.py +15 -8
  79. langchain_core/prompt_values.py +29 -123
  80. langchain_core/prompts/__init__.py +3 -27
  81. langchain_core/prompts/base.py +48 -63
  82. langchain_core/prompts/chat.py +259 -288
  83. langchain_core/prompts/dict.py +19 -11
  84. langchain_core/prompts/few_shot.py +84 -90
  85. langchain_core/prompts/few_shot_with_templates.py +14 -12
  86. langchain_core/prompts/image.py +19 -14
  87. langchain_core/prompts/loading.py +6 -8
  88. langchain_core/prompts/message.py +7 -8
  89. langchain_core/prompts/prompt.py +42 -43
  90. langchain_core/prompts/string.py +37 -16
  91. langchain_core/prompts/structured.py +43 -46
  92. langchain_core/rate_limiters.py +51 -60
  93. langchain_core/retrievers.py +52 -192
  94. langchain_core/runnables/base.py +1727 -1683
  95. langchain_core/runnables/branch.py +52 -73
  96. langchain_core/runnables/config.py +89 -103
  97. langchain_core/runnables/configurable.py +128 -130
  98. langchain_core/runnables/fallbacks.py +93 -82
  99. langchain_core/runnables/graph.py +127 -127
  100. langchain_core/runnables/graph_ascii.py +63 -41
  101. langchain_core/runnables/graph_mermaid.py +87 -70
  102. langchain_core/runnables/graph_png.py +31 -36
  103. langchain_core/runnables/history.py +145 -161
  104. langchain_core/runnables/passthrough.py +141 -144
  105. langchain_core/runnables/retry.py +84 -68
  106. langchain_core/runnables/router.py +33 -37
  107. langchain_core/runnables/schema.py +79 -72
  108. langchain_core/runnables/utils.py +95 -139
  109. langchain_core/stores.py +85 -131
  110. langchain_core/structured_query.py +11 -15
  111. langchain_core/sys_info.py +31 -32
  112. langchain_core/tools/__init__.py +1 -14
  113. langchain_core/tools/base.py +221 -247
  114. langchain_core/tools/convert.py +144 -161
  115. langchain_core/tools/render.py +10 -10
  116. langchain_core/tools/retriever.py +12 -19
  117. langchain_core/tools/simple.py +52 -29
  118. langchain_core/tools/structured.py +56 -60
  119. langchain_core/tracers/__init__.py +1 -9
  120. langchain_core/tracers/_streaming.py +6 -7
  121. langchain_core/tracers/base.py +103 -112
  122. langchain_core/tracers/context.py +29 -48
  123. langchain_core/tracers/core.py +142 -105
  124. langchain_core/tracers/evaluation.py +30 -34
  125. langchain_core/tracers/event_stream.py +162 -117
  126. langchain_core/tracers/langchain.py +34 -36
  127. langchain_core/tracers/log_stream.py +87 -49
  128. langchain_core/tracers/memory_stream.py +3 -3
  129. langchain_core/tracers/root_listeners.py +18 -34
  130. langchain_core/tracers/run_collector.py +8 -20
  131. langchain_core/tracers/schemas.py +0 -125
  132. langchain_core/tracers/stdout.py +3 -3
  133. langchain_core/utils/__init__.py +1 -4
  134. langchain_core/utils/_merge.py +47 -9
  135. langchain_core/utils/aiter.py +70 -66
  136. langchain_core/utils/env.py +12 -9
  137. langchain_core/utils/function_calling.py +139 -206
  138. langchain_core/utils/html.py +7 -8
  139. langchain_core/utils/input.py +6 -6
  140. langchain_core/utils/interactive_env.py +6 -2
  141. langchain_core/utils/iter.py +48 -45
  142. langchain_core/utils/json.py +14 -4
  143. langchain_core/utils/json_schema.py +159 -43
  144. langchain_core/utils/mustache.py +32 -25
  145. langchain_core/utils/pydantic.py +67 -40
  146. langchain_core/utils/strings.py +5 -5
  147. langchain_core/utils/usage.py +1 -1
  148. langchain_core/utils/utils.py +104 -62
  149. langchain_core/vectorstores/base.py +131 -179
  150. langchain_core/vectorstores/in_memory.py +113 -182
  151. langchain_core/vectorstores/utils.py +23 -17
  152. langchain_core/version.py +1 -1
  153. langchain_core-1.0.0.dist-info/METADATA +68 -0
  154. langchain_core-1.0.0.dist-info/RECORD +172 -0
  155. {langchain_core-0.4.0.dev0.dist-info → langchain_core-1.0.0.dist-info}/WHEEL +1 -1
  156. langchain_core/beta/__init__.py +0 -1
  157. langchain_core/beta/runnables/__init__.py +0 -1
  158. langchain_core/beta/runnables/context.py +0 -448
  159. langchain_core/memory.py +0 -116
  160. langchain_core/messages/content_blocks.py +0 -1435
  161. langchain_core/prompts/pipeline.py +0 -133
  162. langchain_core/pydantic_v1/__init__.py +0 -30
  163. langchain_core/pydantic_v1/dataclasses.py +0 -23
  164. langchain_core/pydantic_v1/main.py +0 -23
  165. langchain_core/tracers/langchain_v1.py +0 -23
  166. langchain_core/utils/loading.py +0 -31
  167. langchain_core/v1/__init__.py +0 -1
  168. langchain_core/v1/chat_models.py +0 -1047
  169. langchain_core/v1/messages.py +0 -755
  170. langchain_core-0.4.0.dev0.dist-info/METADATA +0 -108
  171. langchain_core-0.4.0.dev0.dist-info/RECORD +0 -177
  172. langchain_core-0.4.0.dev0.dist-info/entry_points.txt +0 -4
@@ -8,10 +8,8 @@ from typing import (
8
8
  TYPE_CHECKING,
9
9
  Annotated,
10
10
  Any,
11
- Optional,
12
11
  TypedDict,
13
12
  TypeVar,
14
- Union,
15
13
  cast,
16
14
  overload,
17
15
  )
@@ -24,7 +22,6 @@ from pydantic import (
24
22
  )
25
23
  from typing_extensions import Self, override
26
24
 
27
- from langchain_core._api import deprecated
28
25
  from langchain_core.messages import (
29
26
  AIMessage,
30
27
  AnyMessage,
@@ -62,84 +59,83 @@ class MessagesPlaceholder(BaseMessagePromptTemplate):
62
59
 
63
60
  Direct usage:
64
61
 
65
- .. code-block:: python
62
+ ```python
63
+ from langchain_core.prompts import MessagesPlaceholder
66
64
 
67
- from langchain_core.prompts import MessagesPlaceholder
65
+ prompt = MessagesPlaceholder("history")
66
+ prompt.format_messages() # raises KeyError
68
67
 
69
- prompt = MessagesPlaceholder("history")
70
- prompt.format_messages() # raises KeyError
68
+ prompt = MessagesPlaceholder("history", optional=True)
69
+ prompt.format_messages() # returns empty list []
71
70
 
72
- prompt = MessagesPlaceholder("history", optional=True)
73
- prompt.format_messages() # returns empty list []
74
-
75
- prompt.format_messages(
76
- history=[
77
- ("system", "You are an AI assistant."),
78
- ("human", "Hello!"),
79
- ]
80
- )
81
- # -> [
82
- # SystemMessage(content="You are an AI assistant."),
83
- # HumanMessage(content="Hello!"),
84
- # ]
71
+ prompt.format_messages(
72
+ history=[
73
+ ("system", "You are an AI assistant."),
74
+ ("human", "Hello!"),
75
+ ]
76
+ )
77
+ # -> [
78
+ # SystemMessage(content="You are an AI assistant."),
79
+ # HumanMessage(content="Hello!"),
80
+ # ]
81
+ ```
85
82
 
86
83
  Building a prompt with chat history:
87
84
 
88
- .. code-block:: python
89
-
90
- from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
85
+ ```python
86
+ from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
91
87
 
92
- prompt = ChatPromptTemplate.from_messages(
93
- [
94
- ("system", "You are a helpful assistant."),
95
- MessagesPlaceholder("history"),
96
- ("human", "{question}")
97
- ]
98
- )
99
- prompt.invoke(
100
- {
101
- "history": [("human", "what's 5 + 2"), ("ai", "5 + 2 is 7")],
102
- "question": "now multiply that by 4"
103
- }
104
- )
105
- # -> ChatPromptValue(messages=[
106
- # SystemMessage(content="You are a helpful assistant."),
107
- # HumanMessage(content="what's 5 + 2"),
108
- # AIMessage(content="5 + 2 is 7"),
109
- # HumanMessage(content="now multiply that by 4"),
110
- # ])
88
+ prompt = ChatPromptTemplate.from_messages(
89
+ [
90
+ ("system", "You are a helpful assistant."),
91
+ MessagesPlaceholder("history"),
92
+ ("human", "{question}"),
93
+ ]
94
+ )
95
+ prompt.invoke(
96
+ {
97
+ "history": [("human", "what's 5 + 2"), ("ai", "5 + 2 is 7")],
98
+ "question": "now multiply that by 4",
99
+ }
100
+ )
101
+ # -> ChatPromptValue(messages=[
102
+ # SystemMessage(content="You are a helpful assistant."),
103
+ # HumanMessage(content="what's 5 + 2"),
104
+ # AIMessage(content="5 + 2 is 7"),
105
+ # HumanMessage(content="now multiply that by 4"),
106
+ # ])
107
+ ```
111
108
 
112
109
  Limiting the number of messages:
113
110
 
114
- .. code-block:: python
115
-
116
- from langchain_core.prompts import MessagesPlaceholder
111
+ ```python
112
+ from langchain_core.prompts import MessagesPlaceholder
117
113
 
118
- prompt = MessagesPlaceholder("history", n_messages=1)
119
-
120
- prompt.format_messages(
121
- history=[
122
- ("system", "You are an AI assistant."),
123
- ("human", "Hello!"),
124
- ]
125
- )
126
- # -> [
127
- # HumanMessage(content="Hello!"),
128
- # ]
114
+ prompt = MessagesPlaceholder("history", n_messages=1)
129
115
 
116
+ prompt.format_messages(
117
+ history=[
118
+ ("system", "You are an AI assistant."),
119
+ ("human", "Hello!"),
120
+ ]
121
+ )
122
+ # -> [
123
+ # HumanMessage(content="Hello!"),
124
+ # ]
125
+ ```
130
126
  """
131
127
 
132
128
  variable_name: str
133
129
  """Name of variable to use as messages."""
134
130
 
135
131
  optional: bool = False
136
- """If True format_messages can be called with no arguments and will return an empty
137
- list. If False then a named argument with name `variable_name` must be passed
138
- in, even if the value is an empty list."""
132
+ """If `True` format_messages can be called with no arguments and will return an
133
+ empty list. If `False` then a named argument with name `variable_name` must be
134
+ passed in, even if the value is an empty list."""
139
135
 
140
- n_messages: Optional[PositiveInt] = None
141
- """Maximum number of messages to include. If None, then will include all.
142
- Defaults to None."""
136
+ n_messages: PositiveInt | None = None
137
+ """Maximum number of messages to include. If `None`, then will include all.
138
+ """
143
139
 
144
140
  def __init__(
145
141
  self, variable_name: str, *, optional: bool = False, **kwargs: Any
@@ -148,16 +144,13 @@ class MessagesPlaceholder(BaseMessagePromptTemplate):
148
144
 
149
145
  Args:
150
146
  variable_name: Name of variable to use as messages.
151
- optional: If True format_messages can be called with no arguments and will
152
- return an empty list. If False then a named argument with name
147
+ optional: If `True` format_messages can be called with no arguments and will
148
+ return an empty list. If `False` then a named argument with name
153
149
  `variable_name` must be passed in, even if the value is an empty list.
154
- Defaults to False.]
155
150
  """
156
151
  # mypy can't detect the init which is defined in the parent class
157
152
  # b/c these are BaseModel classes.
158
- super().__init__( # type: ignore[call-arg]
159
- variable_name=variable_name, optional=optional, **kwargs
160
- )
153
+ super().__init__(variable_name=variable_name, optional=optional, **kwargs)
161
154
 
162
155
  def format_messages(self, **kwargs: Any) -> list[BaseMessage]:
163
156
  """Format messages from kwargs.
@@ -201,7 +194,7 @@ class MessagesPlaceholder(BaseMessagePromptTemplate):
201
194
  """Human-readable representation.
202
195
 
203
196
  Args:
204
- html: Whether to format as HTML. Defaults to False.
197
+ html: Whether to format as HTML.
205
198
 
206
199
  Returns:
207
200
  Human-readable representation.
@@ -234,20 +227,20 @@ class BaseStringMessagePromptTemplate(BaseMessagePromptTemplate, ABC):
234
227
  cls,
235
228
  template: str,
236
229
  template_format: PromptTemplateFormat = "f-string",
237
- partial_variables: Optional[dict[str, Any]] = None,
230
+ partial_variables: dict[str, Any] | None = None,
238
231
  **kwargs: Any,
239
232
  ) -> Self:
240
233
  """Create a class from a string template.
241
234
 
242
235
  Args:
243
236
  template: a template.
244
- template_format: format of the template. Defaults to "f-string".
237
+ template_format: format of the template.
245
238
  partial_variables: A dictionary of variables that can be used to partially
246
- fill in the template. For example, if the template is
247
- `"{variable1} {variable2}"`, and `partial_variables` is
248
- `{"variable1": "foo"}`, then the final prompt will be
249
- `"foo {variable2}"`.
250
- Defaults to None.
239
+ fill in the template. For example, if the template is
240
+ `"{variable1} {variable2}"`, and `partial_variables` is
241
+ `{"variable1": "foo"}`, then the final prompt will be
242
+ `"foo {variable2}"`.
243
+
251
244
  **kwargs: keyword arguments to pass to the constructor.
252
245
 
253
246
  Returns:
@@ -263,21 +256,19 @@ class BaseStringMessagePromptTemplate(BaseMessagePromptTemplate, ABC):
263
256
  @classmethod
264
257
  def from_template_file(
265
258
  cls,
266
- template_file: Union[str, Path],
267
- input_variables: list[str],
259
+ template_file: str | Path,
268
260
  **kwargs: Any,
269
261
  ) -> Self:
270
262
  """Create a class from a template file.
271
263
 
272
264
  Args:
273
265
  template_file: path to a template file. String or Path.
274
- input_variables: list of input variables.
275
266
  **kwargs: keyword arguments to pass to the constructor.
276
267
 
277
268
  Returns:
278
269
  A new instance of this class.
279
270
  """
280
- prompt = PromptTemplate.from_file(template_file, input_variables)
271
+ prompt = PromptTemplate.from_file(template_file)
281
272
  return cls(prompt=prompt, **kwargs)
282
273
 
283
274
  @abstractmethod
@@ -338,7 +329,7 @@ class BaseStringMessagePromptTemplate(BaseMessagePromptTemplate, ABC):
338
329
  """Human-readable representation.
339
330
 
340
331
  Args:
341
- html: Whether to format as HTML. Defaults to False.
332
+ html: Whether to format as HTML.
342
333
 
343
334
  Returns:
344
335
  Human-readable representation.
@@ -385,20 +376,20 @@ class ChatMessagePromptTemplate(BaseStringMessagePromptTemplate):
385
376
 
386
377
 
387
378
  class _TextTemplateParam(TypedDict, total=False):
388
- text: Union[str, dict]
379
+ text: str | dict
389
380
 
390
381
 
391
382
  class _ImageTemplateParam(TypedDict, total=False):
392
- image_url: Union[str, dict]
383
+ image_url: str | dict
393
384
 
394
385
 
395
386
  class _StringImageMessagePromptTemplate(BaseMessagePromptTemplate):
396
387
  """Human message prompt template. This is a message sent from the user."""
397
388
 
398
- prompt: Union[
399
- StringPromptTemplate,
400
- list[Union[StringPromptTemplate, ImagePromptTemplate, DictPromptTemplate]],
401
- ]
389
+ prompt: (
390
+ StringPromptTemplate
391
+ | list[StringPromptTemplate | ImagePromptTemplate | DictPromptTemplate]
392
+ )
402
393
  """Prompt template."""
403
394
  additional_kwargs: dict = Field(default_factory=dict)
404
395
  """Additional keyword arguments to pass to the prompt template."""
@@ -408,13 +399,11 @@ class _StringImageMessagePromptTemplate(BaseMessagePromptTemplate):
408
399
  @classmethod
409
400
  def from_template(
410
401
  cls: type[Self],
411
- template: Union[
412
- str,
413
- list[Union[str, _TextTemplateParam, _ImageTemplateParam, dict[str, Any]]],
414
- ],
402
+ template: str
403
+ | list[str | _TextTemplateParam | _ImageTemplateParam | dict[str, Any]],
415
404
  template_format: PromptTemplateFormat = "f-string",
416
405
  *,
417
- partial_variables: Optional[dict[str, Any]] = None,
406
+ partial_variables: dict[str, Any] | None = None,
418
407
  **kwargs: Any,
419
408
  ) -> Self:
420
409
  """Create a class from a string template.
@@ -422,9 +411,9 @@ class _StringImageMessagePromptTemplate(BaseMessagePromptTemplate):
422
411
  Args:
423
412
  template: a template.
424
413
  template_format: format of the template.
425
- Options are: 'f-string', 'mustache', 'jinja2'. Defaults to "f-string".
414
+ Options are: 'f-string', 'mustache', 'jinja2'.
426
415
  partial_variables: A dictionary of variables that can be used too partially.
427
- Defaults to None.
416
+
428
417
  **kwargs: keyword arguments to pass to the constructor.
429
418
 
430
419
  Returns:
@@ -434,7 +423,7 @@ class _StringImageMessagePromptTemplate(BaseMessagePromptTemplate):
434
423
  ValueError: If the template is not a string or list of strings.
435
424
  """
436
425
  if isinstance(template, str):
437
- prompt: Union[StringPromptTemplate, list] = PromptTemplate.from_template(
426
+ prompt: StringPromptTemplate | list = PromptTemplate.from_template(
438
427
  template,
439
428
  template_format=template_format,
440
429
  partial_variables=partial_variables,
@@ -531,7 +520,7 @@ class _StringImageMessagePromptTemplate(BaseMessagePromptTemplate):
531
520
  @classmethod
532
521
  def from_template_file(
533
522
  cls: type[Self],
534
- template_file: Union[str, Path],
523
+ template_file: str | Path,
535
524
  input_variables: list[str],
536
525
  **kwargs: Any,
537
526
  ) -> Self:
@@ -545,8 +534,7 @@ class _StringImageMessagePromptTemplate(BaseMessagePromptTemplate):
545
534
  Returns:
546
535
  A new instance of this class.
547
536
  """
548
- template = Path(template_file).read_text()
549
- # TODO: .read_text(encoding="utf-8") for v0.4
537
+ template = Path(template_file).read_text(encoding="utf-8")
550
538
  return cls.from_template(template, input_variables=input_variables, **kwargs)
551
539
 
552
540
  def format_messages(self, **kwargs: Any) -> list[BaseMessage]:
@@ -599,9 +587,7 @@ class _StringImageMessagePromptTemplate(BaseMessagePromptTemplate):
599
587
  for prompt in self.prompt:
600
588
  inputs = {var: kwargs[var] for var in prompt.input_variables}
601
589
  if isinstance(prompt, StringPromptTemplate):
602
- formatted: Union[str, ImageURL, dict[str, Any]] = prompt.format(
603
- **inputs
604
- )
590
+ formatted: str | ImageURL | dict[str, Any] = prompt.format(**inputs)
605
591
  content.append({"type": "text", "text": formatted})
606
592
  elif isinstance(prompt, ImagePromptTemplate):
607
593
  formatted = prompt.format(**inputs)
@@ -631,7 +617,7 @@ class _StringImageMessagePromptTemplate(BaseMessagePromptTemplate):
631
617
  for prompt in self.prompt:
632
618
  inputs = {var: kwargs[var] for var in prompt.input_variables}
633
619
  if isinstance(prompt, StringPromptTemplate):
634
- formatted: Union[str, ImageURL, dict[str, Any]] = await prompt.aformat(
620
+ formatted: str | ImageURL | dict[str, Any] = await prompt.aformat(
635
621
  **inputs
636
622
  )
637
623
  content.append({"type": "text", "text": formatted})
@@ -650,7 +636,7 @@ class _StringImageMessagePromptTemplate(BaseMessagePromptTemplate):
650
636
  """Human-readable representation.
651
637
 
652
638
  Args:
653
- html: Whether to format as HTML. Defaults to False.
639
+ html: Whether to format as HTML.
654
640
 
655
641
  Returns:
656
642
  Human-readable representation.
@@ -697,7 +683,7 @@ class BaseChatPromptTemplate(BasePromptTemplate, ABC):
697
683
 
698
684
  Args:
699
685
  **kwargs: keyword arguments to use for filling in template variables
700
- in all the template messages in this chat template.
686
+ in all the template messages in this chat template.
701
687
 
702
688
  Returns:
703
689
  formatted string.
@@ -709,7 +695,7 @@ class BaseChatPromptTemplate(BasePromptTemplate, ABC):
709
695
 
710
696
  Args:
711
697
  **kwargs: keyword arguments to use for filling in template variables
712
- in all the template messages in this chat template.
698
+ in all the template messages in this chat template.
713
699
 
714
700
  Returns:
715
701
  formatted string.
@@ -742,10 +728,18 @@ class BaseChatPromptTemplate(BasePromptTemplate, ABC):
742
728
 
743
729
  @abstractmethod
744
730
  def format_messages(self, **kwargs: Any) -> list[BaseMessage]:
745
- """Format kwargs into a list of messages."""
731
+ """Format kwargs into a list of messages.
732
+
733
+ Returns:
734
+ List of messages.
735
+ """
746
736
 
747
737
  async def aformat_messages(self, **kwargs: Any) -> list[BaseMessage]:
748
- """Async format kwargs into a list of messages."""
738
+ """Async format kwargs into a list of messages.
739
+
740
+ Returns:
741
+ List of messages.
742
+ """
749
743
  return self.format_messages(**kwargs)
750
744
 
751
745
  def pretty_repr(
@@ -755,7 +749,7 @@ class BaseChatPromptTemplate(BasePromptTemplate, ABC):
755
749
  """Human-readable representation.
756
750
 
757
751
  Args:
758
- html: Whether to format as HTML. Defaults to False.
752
+ html: Whether to format as HTML.
759
753
 
760
754
  Returns:
761
755
  Human-readable representation.
@@ -767,17 +761,14 @@ class BaseChatPromptTemplate(BasePromptTemplate, ABC):
767
761
  print(self.pretty_repr(html=is_interactive_env())) # noqa: T201
768
762
 
769
763
 
770
- MessageLike = Union[BaseMessagePromptTemplate, BaseMessage, BaseChatPromptTemplate]
764
+ MessageLike = BaseMessagePromptTemplate | BaseMessage | BaseChatPromptTemplate
771
765
 
772
- MessageLikeRepresentation = Union[
773
- MessageLike,
774
- tuple[
775
- Union[str, type],
776
- Union[str, list[dict], list[object]],
777
- ],
778
- str,
779
- dict[str, Any],
780
- ]
766
+ MessageLikeRepresentation = (
767
+ MessageLike
768
+ | tuple[str | type, str | list[dict] | list[object]]
769
+ | str
770
+ | dict[str, Any]
771
+ )
781
772
 
782
773
 
783
774
  class ChatPromptTemplate(BaseChatPromptTemplate):
@@ -786,78 +777,80 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
786
777
  Use to create flexible templated prompts for chat models.
787
778
 
788
779
  Examples:
789
-
790
- .. versionchanged:: 0.2.24
791
-
780
+ !!! warning "Behavior changed in 0.2.24"
792
781
  You can pass any Message-like formats supported by
793
- ``ChatPromptTemplate.from_messages()`` directly to ``ChatPromptTemplate()``
782
+ `ChatPromptTemplate.from_messages()` directly to `ChatPromptTemplate()`
794
783
  init.
795
784
 
796
- .. code-block:: python
785
+ ```python
786
+ from langchain_core.prompts import ChatPromptTemplate
797
787
 
798
- from langchain_core.prompts import ChatPromptTemplate
799
-
800
- template = ChatPromptTemplate([
788
+ template = ChatPromptTemplate(
789
+ [
801
790
  ("system", "You are a helpful AI bot. Your name is {name}."),
802
791
  ("human", "Hello, how are you doing?"),
803
792
  ("ai", "I'm doing well, thanks!"),
804
793
  ("human", "{user_input}"),
805
- ])
794
+ ]
795
+ )
806
796
 
807
- prompt_value = template.invoke(
808
- {
809
- "name": "Bob",
810
- "user_input": "What is your name?"
811
- }
812
- )
813
- # Output:
814
- # ChatPromptValue(
815
- # messages=[
816
- # SystemMessage(content='You are a helpful AI bot. Your name is Bob.'),
817
- # HumanMessage(content='Hello, how are you doing?'),
818
- # AIMessage(content="I'm doing well, thanks!"),
819
- # HumanMessage(content='What is your name?')
820
- # ]
821
- #)
797
+ prompt_value = template.invoke(
798
+ {
799
+ "name": "Bob",
800
+ "user_input": "What is your name?",
801
+ }
802
+ )
803
+ # Output:
804
+ # ChatPromptValue(
805
+ # messages=[
806
+ # SystemMessage(content='You are a helpful AI bot. Your name is Bob.'),
807
+ # HumanMessage(content='Hello, how are you doing?'),
808
+ # AIMessage(content="I'm doing well, thanks!"),
809
+ # HumanMessage(content='What is your name?')
810
+ # ]
811
+ # )
812
+ ```
822
813
 
823
814
  Messages Placeholder:
824
815
 
825
- .. code-block:: python
826
-
827
- # In addition to Human/AI/Tool/Function messages,
828
- # you can initialize the template with a MessagesPlaceholder
829
- # either using the class directly or with the shorthand tuple syntax:
816
+ ```python
817
+ # In addition to Human/AI/Tool/Function messages,
818
+ # you can initialize the template with a MessagesPlaceholder
819
+ # either using the class directly or with the shorthand tuple syntax:
830
820
 
831
- template = ChatPromptTemplate([
821
+ template = ChatPromptTemplate(
822
+ [
832
823
  ("system", "You are a helpful AI bot."),
833
824
  # Means the template will receive an optional list of messages under
834
825
  # the "conversation" key
835
- ("placeholder", "{conversation}")
826
+ ("placeholder", "{conversation}"),
836
827
  # Equivalently:
837
828
  # MessagesPlaceholder(variable_name="conversation", optional=True)
838
- ])
839
-
840
- prompt_value = template.invoke(
841
- {
842
- "conversation": [
843
- ("human", "Hi!"),
844
- ("ai", "How can I assist you today?"),
845
- ("human", "Can you make me an ice cream sundae?"),
846
- ("ai", "No.")
847
- ]
848
- }
849
- )
829
+ ]
830
+ )
850
831
 
851
- # Output:
852
- # ChatPromptValue(
853
- # messages=[
854
- # SystemMessage(content='You are a helpful AI bot.'),
855
- # HumanMessage(content='Hi!'),
856
- # AIMessage(content='How can I assist you today?'),
857
- # HumanMessage(content='Can you make me an ice cream sundae?'),
858
- # AIMessage(content='No.'),
859
- # ]
860
- #)
832
+ prompt_value = template.invoke(
833
+ {
834
+ "conversation": [
835
+ ("human", "Hi!"),
836
+ ("ai", "How can I assist you today?"),
837
+ ("human", "Can you make me an ice cream sundae?"),
838
+ ("ai", "No."),
839
+ ]
840
+ }
841
+ )
842
+
843
+ # Output:
844
+ # ChatPromptValue(
845
+ # messages=[
846
+ # SystemMessage(content='You are a helpful AI bot.'),
847
+ # HumanMessage(content='Hi!'),
848
+ # AIMessage(content='How can I assist you today?'),
849
+ # HumanMessage(content='Can you make me an ice cream sundae?'),
850
+ # AIMessage(content='No.'),
851
+ # ]
852
+ # )
853
+ ```
861
854
 
862
855
  Single-variable template:
863
856
 
@@ -866,27 +859,28 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
866
859
  inject the provided argument into that variable location.
867
860
 
868
861
 
869
- .. code-block:: python
870
-
871
- from langchain_core.prompts import ChatPromptTemplate
862
+ ```python
863
+ from langchain_core.prompts import ChatPromptTemplate
872
864
 
873
- template = ChatPromptTemplate([
865
+ template = ChatPromptTemplate(
866
+ [
874
867
  ("system", "You are a helpful AI bot. Your name is Carl."),
875
868
  ("human", "{user_input}"),
876
- ])
877
-
878
- prompt_value = template.invoke("Hello, there!")
879
- # Equivalent to
880
- # prompt_value = template.invoke({"user_input": "Hello, there!"})
881
-
882
- # Output:
883
- # ChatPromptValue(
884
- # messages=[
885
- # SystemMessage(content='You are a helpful AI bot. Your name is Carl.'),
886
- # HumanMessage(content='Hello, there!'),
887
- # ]
888
- # )
869
+ ]
870
+ )
889
871
 
872
+ prompt_value = template.invoke("Hello, there!")
873
+ # Equivalent to
874
+ # prompt_value = template.invoke({"user_input": "Hello, there!"})
875
+
876
+ # Output:
877
+ # ChatPromptValue(
878
+ # messages=[
879
+ # SystemMessage(content='You are a helpful AI bot. Your name is Carl.'),
880
+ # HumanMessage(content='Hello, there!'),
881
+ # ]
882
+ # )
883
+ ```
890
884
  """ # noqa: E501
891
885
 
892
886
  messages: Annotated[list[MessageLike], SkipValidation()]
@@ -905,12 +899,12 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
905
899
 
906
900
  Args:
907
901
  messages: sequence of message representations.
908
- A message can be represented using the following formats:
909
- (1) BaseMessagePromptTemplate, (2) BaseMessage, (3) 2-tuple of
910
- (message type, template); e.g., ("human", "{user_input}"),
911
- (4) 2-tuple of (message class, template), (5) a string which is
912
- shorthand for ("human", template); e.g., "{user_input}".
913
- template_format: format of the template. Defaults to "f-string".
902
+ A message can be represented using the following formats:
903
+ (1) BaseMessagePromptTemplate, (2) BaseMessage, (3) 2-tuple of
904
+ (message type, template); e.g., ("human", "{user_input}"),
905
+ (4) 2-tuple of (message class, template), (5) a string which is
906
+ shorthand for ("human", template); e.g., "{user_input}".
907
+ template_format: format of the template.
914
908
  input_variables: A list of the names of the variables whose values are
915
909
  required as inputs to the prompt.
916
910
  optional_variables: A list of the names of the variables for placeholder
@@ -924,29 +918,29 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
924
918
  input_types: A dictionary of the types of the variables the prompt template
925
919
  expects. If not provided, all variables are assumed to be strings.
926
920
 
927
- Returns:
928
- A chat prompt template.
929
-
930
921
  Examples:
931
922
  Instantiation from a list of message templates:
932
923
 
933
- .. code-block:: python
934
-
935
- template = ChatPromptTemplate([
924
+ ```python
925
+ template = ChatPromptTemplate(
926
+ [
936
927
  ("human", "Hello, how are you?"),
937
928
  ("ai", "I'm doing well, thanks!"),
938
929
  ("human", "That's good to hear."),
939
- ])
930
+ ]
931
+ )
932
+ ```
940
933
 
941
934
  Instantiation from mixed message formats:
942
935
 
943
- .. code-block:: python
944
-
945
- template = ChatPromptTemplate([
936
+ ```python
937
+ template = ChatPromptTemplate(
938
+ [
946
939
  SystemMessage(content="hello"),
947
940
  ("human", "Hello, how are you?"),
948
- ])
949
-
941
+ ]
942
+ )
943
+ ```
950
944
  """
951
945
  messages_ = [
952
946
  _convert_to_message_template(message, template_format)
@@ -976,7 +970,11 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
976
970
 
977
971
  @classmethod
978
972
  def get_lc_namespace(cls) -> list[str]:
979
- """Get the namespace of the langchain object."""
973
+ """Get the namespace of the LangChain object.
974
+
975
+ Returns:
976
+ `["langchain", "prompts", "chat"]`
977
+ """
980
978
  return ["langchain", "prompts", "chat"]
981
979
 
982
980
  def __add__(self, other: Any) -> ChatPromptTemplate:
@@ -1091,41 +1089,6 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
1091
1089
  message = HumanMessagePromptTemplate(prompt=prompt_template)
1092
1090
  return cls.from_messages([message])
1093
1091
 
1094
- @classmethod
1095
- @deprecated("0.0.1", alternative="from_messages", pending=True)
1096
- def from_role_strings(
1097
- cls, string_messages: list[tuple[str, str]]
1098
- ) -> ChatPromptTemplate:
1099
- """Create a chat prompt template from a list of (role, template) tuples.
1100
-
1101
- Args:
1102
- string_messages: list of (role, template) tuples.
1103
-
1104
- Returns:
1105
- a chat prompt template.
1106
- """
1107
- return cls(
1108
- messages=[
1109
- ChatMessagePromptTemplate.from_template(template, role=role)
1110
- for role, template in string_messages
1111
- ]
1112
- )
1113
-
1114
- @classmethod
1115
- @deprecated("0.0.1", alternative="from_messages", pending=True)
1116
- def from_strings(
1117
- cls, string_messages: list[tuple[type[BaseMessagePromptTemplate], str]]
1118
- ) -> ChatPromptTemplate:
1119
- """Create a chat prompt template from a list of (role class, template) tuples.
1120
-
1121
- Args:
1122
- string_messages: list of (role class, template) tuples.
1123
-
1124
- Returns:
1125
- a chat prompt template.
1126
- """
1127
- return cls.from_messages(string_messages)
1128
-
1129
1092
  @classmethod
1130
1093
  def from_messages(
1131
1094
  cls,
@@ -1137,31 +1100,34 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
1137
1100
  Examples:
1138
1101
  Instantiation from a list of message templates:
1139
1102
 
1140
- .. code-block:: python
1141
-
1142
- template = ChatPromptTemplate.from_messages([
1103
+ ```python
1104
+ template = ChatPromptTemplate.from_messages(
1105
+ [
1143
1106
  ("human", "Hello, how are you?"),
1144
1107
  ("ai", "I'm doing well, thanks!"),
1145
1108
  ("human", "That's good to hear."),
1146
- ])
1109
+ ]
1110
+ )
1111
+ ```
1147
1112
 
1148
1113
  Instantiation from mixed message formats:
1149
1114
 
1150
- .. code-block:: python
1151
-
1152
- template = ChatPromptTemplate.from_messages([
1115
+ ```python
1116
+ template = ChatPromptTemplate.from_messages(
1117
+ [
1153
1118
  SystemMessage(content="hello"),
1154
1119
  ("human", "Hello, how are you?"),
1155
- ])
1156
-
1120
+ ]
1121
+ )
1122
+ ```
1157
1123
  Args:
1158
1124
  messages: sequence of message representations.
1159
- A message can be represented using the following formats:
1160
- (1) BaseMessagePromptTemplate, (2) BaseMessage, (3) 2-tuple of
1161
- (message type, template); e.g., ("human", "{user_input}"),
1162
- (4) 2-tuple of (message class, template), (5) a string which is
1163
- shorthand for ("human", template); e.g., "{user_input}".
1164
- template_format: format of the template. Defaults to "f-string".
1125
+ A message can be represented using the following formats:
1126
+ (1) BaseMessagePromptTemplate, (2) BaseMessage, (3) 2-tuple of
1127
+ (message type, template); e.g., ("human", "{user_input}"),
1128
+ (4) 2-tuple of (message class, template), (5) a string which is
1129
+ shorthand for ("human", template); e.g., "{user_input}".
1130
+ template_format: format of the template.
1165
1131
 
1166
1132
  Returns:
1167
1133
  a chat prompt template.
@@ -1174,7 +1140,10 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
1174
1140
 
1175
1141
  Args:
1176
1142
  **kwargs: keyword arguments to use for filling in template variables
1177
- in all the template messages in this chat template.
1143
+ in all the template messages in this chat template.
1144
+
1145
+ Raises:
1146
+ ValueError: if messages are of unexpected types.
1178
1147
 
1179
1148
  Returns:
1180
1149
  list of formatted messages.
@@ -1199,7 +1168,7 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
1199
1168
 
1200
1169
  Args:
1201
1170
  **kwargs: keyword arguments to use for filling in template variables
1202
- in all the template messages in this chat template.
1171
+ in all the template messages in this chat template.
1203
1172
 
1204
1173
  Returns:
1205
1174
  list of formatted messages.
@@ -1234,23 +1203,21 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
1234
1203
 
1235
1204
 
1236
1205
  Example:
1206
+ ```python
1207
+ from langchain_core.prompts import ChatPromptTemplate
1237
1208
 
1238
- .. code-block:: python
1239
-
1240
- from langchain_core.prompts import ChatPromptTemplate
1241
-
1242
- template = ChatPromptTemplate.from_messages(
1243
- [
1244
- ("system", "You are an AI assistant named {name}."),
1245
- ("human", "Hi I'm {user}"),
1246
- ("ai", "Hi there, {user}, I'm {name}."),
1247
- ("human", "{input}"),
1248
- ]
1249
- )
1250
- template2 = template.partial(user="Lucy", name="R2D2")
1251
-
1252
- template2.format_messages(input="hello")
1209
+ template = ChatPromptTemplate.from_messages(
1210
+ [
1211
+ ("system", "You are an AI assistant named {name}."),
1212
+ ("human", "Hi I'm {user}"),
1213
+ ("ai", "Hi there, {user}, I'm {name}."),
1214
+ ("human", "{input}"),
1215
+ ]
1216
+ )
1217
+ template2 = template.partial(user="Lucy", name="R2D2")
1253
1218
 
1219
+ template2.format_messages(input="hello")
1220
+ ```
1254
1221
  """
1255
1222
  prompt_dict = self.__dict__.copy()
1256
1223
  prompt_dict["input_variables"] = list(
@@ -1283,10 +1250,14 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
1283
1250
  @overload
1284
1251
  def __getitem__(self, index: slice) -> ChatPromptTemplate: ...
1285
1252
 
1286
- def __getitem__(
1287
- self, index: Union[int, slice]
1288
- ) -> Union[MessageLike, ChatPromptTemplate]:
1289
- """Use to index into the chat template."""
1253
+ def __getitem__(self, index: int | slice) -> MessageLike | ChatPromptTemplate:
1254
+ """Use to index into the chat template.
1255
+
1256
+ Returns:
1257
+ If index is an int, returns the message at that index.
1258
+ If index is a slice, returns a new `ChatPromptTemplate`
1259
+ containing the messages in that slice.
1260
+ """
1290
1261
  if isinstance(index, slice):
1291
1262
  start, stop, step = index.indices(len(self.messages))
1292
1263
  messages = self.messages[start:stop:step]
@@ -1294,7 +1265,7 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
1294
1265
  return self.messages[index]
1295
1266
 
1296
1267
  def __len__(self) -> int:
1297
- """Get the length of the chat template."""
1268
+ """Return the length of the chat template."""
1298
1269
  return len(self.messages)
1299
1270
 
1300
1271
  @property
@@ -1302,7 +1273,7 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
1302
1273
  """Name of prompt type. Used for serialization."""
1303
1274
  return "chat"
1304
1275
 
1305
- def save(self, file_path: Union[Path, str]) -> None:
1276
+ def save(self, file_path: Path | str) -> None:
1306
1277
  """Save prompt to file.
1307
1278
 
1308
1279
  Args:
@@ -1315,7 +1286,7 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
1315
1286
  """Human-readable representation.
1316
1287
 
1317
1288
  Args:
1318
- html: Whether to format as HTML. Defaults to False.
1289
+ html: Whether to format as HTML.
1319
1290
 
1320
1291
  Returns:
1321
1292
  Human-readable representation.
@@ -1326,7 +1297,7 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
1326
1297
 
1327
1298
  def _create_template_from_message_type(
1328
1299
  message_type: str,
1329
- template: Union[str, list],
1300
+ template: str | list,
1330
1301
  template_format: PromptTemplateFormat = "f-string",
1331
1302
  ) -> BaseMessagePromptTemplate:
1332
1303
  """Create a message prompt template from a message type and template string.
@@ -1334,7 +1305,7 @@ def _create_template_from_message_type(
1334
1305
  Args:
1335
1306
  message_type: str the type of the message template (e.g., "human", "ai", etc.)
1336
1307
  template: str the template string.
1337
- template_format: format of the template. Defaults to "f-string".
1308
+ template_format: format of the template.
1338
1309
 
1339
1310
  Returns:
1340
1311
  a message prompt template of the appropriate type.
@@ -1398,7 +1369,7 @@ def _create_template_from_message_type(
1398
1369
  def _convert_to_message_template(
1399
1370
  message: MessageLikeRepresentation,
1400
1371
  template_format: PromptTemplateFormat = "f-string",
1401
- ) -> Union[BaseMessage, BaseMessagePromptTemplate, BaseChatPromptTemplate]:
1372
+ ) -> BaseMessage | BaseMessagePromptTemplate | BaseChatPromptTemplate:
1402
1373
  """Instantiate a message from a variety of message formats.
1403
1374
 
1404
1375
  The message format can be one of the following:
@@ -1411,7 +1382,7 @@ def _convert_to_message_template(
1411
1382
 
1412
1383
  Args:
1413
1384
  message: a representation of a message in one of the supported formats.
1414
- template_format: format of the template. Defaults to "f-string".
1385
+ template_format: format of the template.
1415
1386
 
1416
1387
  Returns:
1417
1388
  an instance of a message or a message template.
@@ -1421,9 +1392,9 @@ def _convert_to_message_template(
1421
1392
  ValueError: If 2-tuple does not have 2 elements.
1422
1393
  """
1423
1394
  if isinstance(message, (BaseMessagePromptTemplate, BaseChatPromptTemplate)):
1424
- message_: Union[
1425
- BaseMessage, BaseMessagePromptTemplate, BaseChatPromptTemplate
1426
- ] = message
1395
+ message_: BaseMessage | BaseMessagePromptTemplate | BaseChatPromptTemplate = (
1396
+ message
1397
+ )
1427
1398
  elif isinstance(message, BaseMessage):
1428
1399
  message_ = message
1429
1400
  elif isinstance(message, str):