langchain-core 1.0.0a8__py3-none-any.whl → 1.0.0rc2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of langchain-core might be problematic. Click here for more details.

Files changed (142) hide show
  1. langchain_core/__init__.py +1 -1
  2. langchain_core/_api/__init__.py +0 -1
  3. langchain_core/_api/beta_decorator.py +17 -20
  4. langchain_core/_api/deprecation.py +30 -35
  5. langchain_core/_import_utils.py +1 -1
  6. langchain_core/agents.py +10 -9
  7. langchain_core/caches.py +46 -56
  8. langchain_core/callbacks/__init__.py +1 -8
  9. langchain_core/callbacks/base.py +232 -243
  10. langchain_core/callbacks/file.py +33 -33
  11. langchain_core/callbacks/manager.py +353 -416
  12. langchain_core/callbacks/stdout.py +21 -22
  13. langchain_core/callbacks/streaming_stdout.py +32 -32
  14. langchain_core/callbacks/usage.py +54 -51
  15. langchain_core/chat_history.py +43 -58
  16. langchain_core/document_loaders/base.py +21 -21
  17. langchain_core/document_loaders/langsmith.py +22 -22
  18. langchain_core/documents/__init__.py +0 -1
  19. langchain_core/documents/base.py +46 -49
  20. langchain_core/documents/transformers.py +28 -29
  21. langchain_core/embeddings/fake.py +50 -54
  22. langchain_core/example_selectors/semantic_similarity.py +4 -6
  23. langchain_core/exceptions.py +7 -8
  24. langchain_core/indexing/api.py +19 -25
  25. langchain_core/indexing/base.py +24 -24
  26. langchain_core/language_models/__init__.py +11 -27
  27. langchain_core/language_models/_utils.py +53 -54
  28. langchain_core/language_models/base.py +30 -24
  29. langchain_core/language_models/chat_models.py +123 -148
  30. langchain_core/language_models/fake_chat_models.py +7 -7
  31. langchain_core/language_models/llms.py +14 -16
  32. langchain_core/load/dump.py +3 -4
  33. langchain_core/load/load.py +7 -16
  34. langchain_core/load/serializable.py +37 -36
  35. langchain_core/messages/__init__.py +1 -16
  36. langchain_core/messages/ai.py +122 -123
  37. langchain_core/messages/base.py +31 -31
  38. langchain_core/messages/block_translators/__init__.py +17 -17
  39. langchain_core/messages/block_translators/anthropic.py +3 -3
  40. langchain_core/messages/block_translators/bedrock_converse.py +3 -3
  41. langchain_core/messages/block_translators/google_genai.py +5 -4
  42. langchain_core/messages/block_translators/google_vertexai.py +4 -32
  43. langchain_core/messages/block_translators/groq.py +117 -21
  44. langchain_core/messages/block_translators/langchain_v0.py +3 -3
  45. langchain_core/messages/block_translators/openai.py +5 -5
  46. langchain_core/messages/chat.py +2 -6
  47. langchain_core/messages/content.py +222 -209
  48. langchain_core/messages/function.py +6 -10
  49. langchain_core/messages/human.py +17 -24
  50. langchain_core/messages/modifier.py +2 -2
  51. langchain_core/messages/system.py +12 -22
  52. langchain_core/messages/tool.py +53 -69
  53. langchain_core/messages/utils.py +399 -417
  54. langchain_core/output_parsers/__init__.py +1 -14
  55. langchain_core/output_parsers/base.py +46 -47
  56. langchain_core/output_parsers/json.py +3 -4
  57. langchain_core/output_parsers/list.py +2 -2
  58. langchain_core/output_parsers/openai_functions.py +46 -44
  59. langchain_core/output_parsers/openai_tools.py +11 -16
  60. langchain_core/output_parsers/pydantic.py +10 -11
  61. langchain_core/output_parsers/string.py +2 -2
  62. langchain_core/output_parsers/transform.py +2 -2
  63. langchain_core/output_parsers/xml.py +1 -1
  64. langchain_core/outputs/__init__.py +1 -1
  65. langchain_core/outputs/chat_generation.py +14 -14
  66. langchain_core/outputs/generation.py +6 -6
  67. langchain_core/outputs/llm_result.py +5 -5
  68. langchain_core/prompt_values.py +11 -11
  69. langchain_core/prompts/__init__.py +3 -23
  70. langchain_core/prompts/base.py +33 -38
  71. langchain_core/prompts/chat.py +222 -229
  72. langchain_core/prompts/dict.py +3 -3
  73. langchain_core/prompts/few_shot.py +76 -83
  74. langchain_core/prompts/few_shot_with_templates.py +7 -9
  75. langchain_core/prompts/image.py +12 -14
  76. langchain_core/prompts/loading.py +1 -1
  77. langchain_core/prompts/message.py +3 -3
  78. langchain_core/prompts/prompt.py +20 -23
  79. langchain_core/prompts/string.py +20 -8
  80. langchain_core/prompts/structured.py +26 -27
  81. langchain_core/rate_limiters.py +50 -58
  82. langchain_core/retrievers.py +41 -182
  83. langchain_core/runnables/base.py +565 -597
  84. langchain_core/runnables/branch.py +8 -8
  85. langchain_core/runnables/config.py +37 -44
  86. langchain_core/runnables/configurable.py +9 -10
  87. langchain_core/runnables/fallbacks.py +9 -9
  88. langchain_core/runnables/graph.py +46 -50
  89. langchain_core/runnables/graph_ascii.py +19 -18
  90. langchain_core/runnables/graph_mermaid.py +20 -31
  91. langchain_core/runnables/graph_png.py +7 -7
  92. langchain_core/runnables/history.py +22 -22
  93. langchain_core/runnables/passthrough.py +11 -11
  94. langchain_core/runnables/retry.py +3 -3
  95. langchain_core/runnables/router.py +2 -2
  96. langchain_core/runnables/schema.py +33 -33
  97. langchain_core/runnables/utils.py +30 -34
  98. langchain_core/stores.py +72 -102
  99. langchain_core/sys_info.py +27 -29
  100. langchain_core/tools/__init__.py +1 -14
  101. langchain_core/tools/base.py +70 -71
  102. langchain_core/tools/convert.py +100 -104
  103. langchain_core/tools/render.py +9 -9
  104. langchain_core/tools/retriever.py +7 -7
  105. langchain_core/tools/simple.py +6 -7
  106. langchain_core/tools/structured.py +18 -24
  107. langchain_core/tracers/__init__.py +1 -9
  108. langchain_core/tracers/base.py +35 -35
  109. langchain_core/tracers/context.py +12 -17
  110. langchain_core/tracers/event_stream.py +3 -3
  111. langchain_core/tracers/langchain.py +8 -8
  112. langchain_core/tracers/log_stream.py +17 -18
  113. langchain_core/tracers/memory_stream.py +3 -3
  114. langchain_core/tracers/root_listeners.py +2 -2
  115. langchain_core/tracers/schemas.py +0 -129
  116. langchain_core/tracers/stdout.py +1 -2
  117. langchain_core/utils/__init__.py +1 -1
  118. langchain_core/utils/aiter.py +32 -32
  119. langchain_core/utils/env.py +5 -5
  120. langchain_core/utils/function_calling.py +59 -154
  121. langchain_core/utils/html.py +4 -4
  122. langchain_core/utils/input.py +3 -3
  123. langchain_core/utils/interactive_env.py +1 -1
  124. langchain_core/utils/iter.py +20 -20
  125. langchain_core/utils/json.py +1 -1
  126. langchain_core/utils/json_schema.py +2 -2
  127. langchain_core/utils/mustache.py +5 -5
  128. langchain_core/utils/pydantic.py +17 -17
  129. langchain_core/utils/strings.py +5 -5
  130. langchain_core/utils/utils.py +25 -28
  131. langchain_core/vectorstores/base.py +55 -87
  132. langchain_core/vectorstores/in_memory.py +83 -85
  133. langchain_core/vectorstores/utils.py +2 -2
  134. langchain_core/version.py +1 -1
  135. {langchain_core-1.0.0a8.dist-info → langchain_core-1.0.0rc2.dist-info}/METADATA +23 -11
  136. langchain_core-1.0.0rc2.dist-info/RECORD +172 -0
  137. langchain_core/memory.py +0 -120
  138. langchain_core/pydantic_v1/__init__.py +0 -30
  139. langchain_core/pydantic_v1/dataclasses.py +0 -23
  140. langchain_core/pydantic_v1/main.py +0 -23
  141. langchain_core-1.0.0a8.dist-info/RECORD +0 -176
  142. {langchain_core-1.0.0a8.dist-info → langchain_core-1.0.0rc2.dist-info}/WHEEL +0 -0
@@ -15,14 +15,14 @@ from langchain_core.utils._merge import merge_dicts
15
15
  class ChatGeneration(Generation):
16
16
  """A single chat generation output.
17
17
 
18
- A subclass of ``Generation`` that represents the response from a chat model
18
+ A subclass of `Generation` that represents the response from a chat model
19
19
  that generates chat messages.
20
20
 
21
- The ``message`` attribute is a structured representation of the chat message.
22
- Most of the time, the message will be of type ``AIMessage``.
21
+ The `message` attribute is a structured representation of the chat message.
22
+ Most of the time, the message will be of type `AIMessage`.
23
23
 
24
24
  Users working with chat models will usually access information via either
25
- ``AIMessage`` (returned from runnable interfaces) or ``LLMResult`` (available
25
+ `AIMessage` (returned from runnable interfaces) or `LLMResult` (available
26
26
  via callbacks).
27
27
  """
28
28
 
@@ -70,9 +70,9 @@ class ChatGeneration(Generation):
70
70
 
71
71
 
72
72
  class ChatGenerationChunk(ChatGeneration):
73
- """``ChatGeneration`` chunk.
73
+ """`ChatGeneration` chunk.
74
74
 
75
- ``ChatGeneration`` chunks can be concatenated with other ``ChatGeneration`` chunks.
75
+ `ChatGeneration` chunks can be concatenated with other `ChatGeneration` chunks.
76
76
  """
77
77
 
78
78
  message: BaseMessageChunk
@@ -84,18 +84,18 @@ class ChatGenerationChunk(ChatGeneration):
84
84
  def __add__(
85
85
  self, other: ChatGenerationChunk | list[ChatGenerationChunk]
86
86
  ) -> ChatGenerationChunk:
87
- """Concatenate two ``ChatGenerationChunk``s.
87
+ """Concatenate two `ChatGenerationChunk`s.
88
88
 
89
89
  Args:
90
- other: The other ``ChatGenerationChunk`` or list of ``ChatGenerationChunk``
90
+ other: The other `ChatGenerationChunk` or list of `ChatGenerationChunk`
91
91
  to concatenate.
92
92
 
93
93
  Raises:
94
- TypeError: If other is not a ``ChatGenerationChunk`` or list of
95
- ``ChatGenerationChunk``.
94
+ TypeError: If other is not a `ChatGenerationChunk` or list of
95
+ `ChatGenerationChunk`.
96
96
 
97
97
  Returns:
98
- A new ``ChatGenerationChunk`` concatenated from self and other.
98
+ A new `ChatGenerationChunk` concatenated from self and other.
99
99
  """
100
100
  if isinstance(other, ChatGenerationChunk):
101
101
  generation_info = merge_dicts(
@@ -124,13 +124,13 @@ class ChatGenerationChunk(ChatGeneration):
124
124
  def merge_chat_generation_chunks(
125
125
  chunks: list[ChatGenerationChunk],
126
126
  ) -> ChatGenerationChunk | None:
127
- """Merge a list of ``ChatGenerationChunk``s into a single ``ChatGenerationChunk``.
127
+ """Merge a list of `ChatGenerationChunk`s into a single `ChatGenerationChunk`.
128
128
 
129
129
  Args:
130
- chunks: A list of ``ChatGenerationChunk`` to merge.
130
+ chunks: A list of `ChatGenerationChunk` to merge.
131
131
 
132
132
  Returns:
133
- A merged ``ChatGenerationChunk``, or None if the input list is empty.
133
+ A merged `ChatGenerationChunk`, or None if the input list is empty.
134
134
  """
135
135
  if not chunks:
136
136
  return None
@@ -44,10 +44,10 @@ class Generation(Serializable):
44
44
 
45
45
  @classmethod
46
46
  def get_lc_namespace(cls) -> list[str]:
47
- """Get the namespace of the langchain object.
47
+ """Get the namespace of the LangChain object.
48
48
 
49
49
  Returns:
50
- ``["langchain", "schema", "output"]``
50
+ `["langchain", "schema", "output"]`
51
51
  """
52
52
  return ["langchain", "schema", "output"]
53
53
 
@@ -56,16 +56,16 @@ class GenerationChunk(Generation):
56
56
  """Generation chunk, which can be concatenated with other Generation chunks."""
57
57
 
58
58
  def __add__(self, other: GenerationChunk) -> GenerationChunk:
59
- """Concatenate two ``GenerationChunk``s.
59
+ """Concatenate two `GenerationChunk`s.
60
60
 
61
61
  Args:
62
- other: Another ``GenerationChunk`` to concatenate with.
62
+ other: Another `GenerationChunk` to concatenate with.
63
63
 
64
64
  Raises:
65
- TypeError: If other is not a ``GenerationChunk``.
65
+ TypeError: If other is not a `GenerationChunk`.
66
66
 
67
67
  Returns:
68
- A new ``GenerationChunk`` concatenated from self and other.
68
+ A new `GenerationChunk` concatenated from self and other.
69
69
  """
70
70
  if isinstance(other, GenerationChunk):
71
71
  generation_info = merge_dicts(
@@ -30,8 +30,8 @@ class LLMResult(BaseModel):
30
30
  The second dimension of the list represents different candidate generations for a
31
31
  given prompt.
32
32
 
33
- - When returned from **an LLM**, the type is ``list[list[Generation]]``.
34
- - When returned from a **chat model**, the type is ``list[list[ChatGeneration]]``.
33
+ - When returned from **an LLM**, the type is `list[list[Generation]]`.
34
+ - When returned from a **chat model**, the type is `list[list[ChatGeneration]]`.
35
35
 
36
36
  ChatGeneration is a subclass of Generation that has a field for a structured chat
37
37
  message.
@@ -91,13 +91,13 @@ class LLMResult(BaseModel):
91
91
  return llm_results
92
92
 
93
93
  def __eq__(self, other: object) -> bool:
94
- """Check for ``LLMResult`` equality by ignoring any metadata related to runs.
94
+ """Check for `LLMResult` equality by ignoring any metadata related to runs.
95
95
 
96
96
  Args:
97
- other: Another ``LLMResult`` object to compare against.
97
+ other: Another `LLMResult` object to compare against.
98
98
 
99
99
  Returns:
100
- True if the generations and ``llm_output`` are equal, False otherwise.
100
+ `True` if the generations and `llm_output` are equal, `False` otherwise.
101
101
  """
102
102
  if not isinstance(other, LLMResult):
103
103
  return NotImplemented
@@ -24,8 +24,8 @@ from langchain_core.messages import (
24
24
  class PromptValue(Serializable, ABC):
25
25
  """Base abstract class for inputs to any language model.
26
26
 
27
- PromptValues can be converted to both LLM (pure text-generation) inputs and
28
- ChatModel inputs.
27
+ `PromptValues` can be converted to both LLM (pure text-generation) inputs and
28
+ chat model inputs.
29
29
  """
30
30
 
31
31
  @classmethod
@@ -35,12 +35,12 @@ class PromptValue(Serializable, ABC):
35
35
 
36
36
  @classmethod
37
37
  def get_lc_namespace(cls) -> list[str]:
38
- """Get the namespace of the langchain object.
38
+ """Get the namespace of the LangChain object.
39
39
 
40
40
  This is used to determine the namespace of the object when serializing.
41
41
 
42
42
  Returns:
43
- ``["langchain", "schema", "prompt"]``
43
+ `["langchain", "schema", "prompt"]`
44
44
  """
45
45
  return ["langchain", "schema", "prompt"]
46
46
 
@@ -62,12 +62,12 @@ class StringPromptValue(PromptValue):
62
62
 
63
63
  @classmethod
64
64
  def get_lc_namespace(cls) -> list[str]:
65
- """Get the namespace of the langchain object.
65
+ """Get the namespace of the LangChain object.
66
66
 
67
67
  This is used to determine the namespace of the object when serializing.
68
68
 
69
69
  Returns:
70
- ``["langchain", "prompts", "base"]``
70
+ `["langchain", "prompts", "base"]`
71
71
  """
72
72
  return ["langchain", "prompts", "base"]
73
73
 
@@ -99,12 +99,12 @@ class ChatPromptValue(PromptValue):
99
99
 
100
100
  @classmethod
101
101
  def get_lc_namespace(cls) -> list[str]:
102
- """Get the namespace of the langchain object.
102
+ """Get the namespace of the LangChain object.
103
103
 
104
104
  This is used to determine the namespace of the object when serializing.
105
105
 
106
106
  Returns:
107
- ``["langchain", "prompts", "chat"]``
107
+ `["langchain", "prompts", "chat"]`
108
108
  """
109
109
  return ["langchain", "prompts", "chat"]
110
110
 
@@ -113,11 +113,11 @@ class ImageURL(TypedDict, total=False):
113
113
  """Image URL."""
114
114
 
115
115
  detail: Literal["auto", "low", "high"]
116
- """Specifies the detail level of the image. Defaults to ``'auto'``.
117
- Can be ``'auto'``, ``'low'``, or ``'high'``.
116
+ """Specifies the detail level of the image.
118
117
 
119
- This follows OpenAI's Chat Completion API's image URL format.
118
+ Can be `'auto'`, `'low'`, or `'high'`.
120
119
 
120
+ This follows OpenAI's Chat Completion API's image URL format.
121
121
  """
122
122
 
123
123
  url: str
@@ -1,28 +1,8 @@
1
1
  """**Prompt** is the input to the model.
2
2
 
3
- Prompt is often constructed
4
- from multiple components and prompt values. Prompt classes and functions make constructing
5
- and working with prompts easy.
6
-
7
- **Class hierarchy:**
8
-
9
- .. code-block::
10
-
11
- BasePromptTemplate --> StringPromptTemplate --> PromptTemplate
12
- FewShotPromptTemplate
13
- FewShotPromptWithTemplates
14
- BaseChatPromptTemplate --> AutoGPTPrompt
15
- ChatPromptTemplate --> AgentScratchPadChatPromptTemplate
16
-
17
-
18
-
19
- BaseMessagePromptTemplate --> MessagesPlaceholder
20
- BaseStringMessagePromptTemplate --> ChatMessagePromptTemplate
21
- HumanMessagePromptTemplate
22
- AIMessagePromptTemplate
23
- SystemMessagePromptTemplate
24
-
25
- """ # noqa: E501
3
+ Prompt is often constructed from multiple components and prompt values. Prompt classes
4
+ and functions make constructing and working with prompts easy.
5
+ """
26
6
 
27
7
  from typing import TYPE_CHECKING
28
8
 
@@ -96,10 +96,10 @@ class BasePromptTemplate(
96
96
 
97
97
  @classmethod
98
98
  def get_lc_namespace(cls) -> list[str]:
99
- """Get the namespace of the langchain object.
99
+ """Get the namespace of the LangChain object.
100
100
 
101
101
  Returns:
102
- ``["langchain", "schema", "prompt_template"]``
102
+ `["langchain", "schema", "prompt_template"]`
103
103
  """
104
104
  return ["langchain", "schema", "prompt_template"]
105
105
 
@@ -127,10 +127,10 @@ class BasePromptTemplate(
127
127
  """Get the input schema for the prompt.
128
128
 
129
129
  Args:
130
- config: RunnableConfig, configuration for the prompt.
130
+ config: configuration for the prompt.
131
131
 
132
132
  Returns:
133
- Type[BaseModel]: The input schema for the prompt.
133
+ The input schema for the prompt.
134
134
  """
135
135
  # This is correct, but pydantic typings/mypy don't think so.
136
136
  required_input_variables = {
@@ -199,7 +199,7 @@ class BasePromptTemplate(
199
199
  config: RunnableConfig, configuration for the prompt.
200
200
 
201
201
  Returns:
202
- PromptValue: The output of the prompt.
202
+ The output of the prompt.
203
203
  """
204
204
  config = ensure_config(config)
205
205
  if self.metadata:
@@ -225,7 +225,7 @@ class BasePromptTemplate(
225
225
  config: RunnableConfig, configuration for the prompt.
226
226
 
227
227
  Returns:
228
- PromptValue: The output of the prompt.
228
+ The output of the prompt.
229
229
  """
230
230
  config = ensure_config(config)
231
231
  if self.metadata:
@@ -245,20 +245,20 @@ class BasePromptTemplate(
245
245
  """Create Prompt Value.
246
246
 
247
247
  Args:
248
- kwargs: Any arguments to be passed to the prompt template.
248
+ **kwargs: Any arguments to be passed to the prompt template.
249
249
 
250
250
  Returns:
251
- PromptValue: The output of the prompt.
251
+ The output of the prompt.
252
252
  """
253
253
 
254
254
  async def aformat_prompt(self, **kwargs: Any) -> PromptValue:
255
255
  """Async create Prompt Value.
256
256
 
257
257
  Args:
258
- kwargs: Any arguments to be passed to the prompt template.
258
+ **kwargs: Any arguments to be passed to the prompt template.
259
259
 
260
260
  Returns:
261
- PromptValue: The output of the prompt.
261
+ The output of the prompt.
262
262
  """
263
263
  return self.format_prompt(**kwargs)
264
264
 
@@ -266,10 +266,10 @@ class BasePromptTemplate(
266
266
  """Return a partial of the prompt template.
267
267
 
268
268
  Args:
269
- kwargs: Union[str, Callable[[], str]], partial variables to set.
269
+ **kwargs: partial variables to set.
270
270
 
271
271
  Returns:
272
- BasePromptTemplate: A partial of the prompt template.
272
+ A partial of the prompt template.
273
273
  """
274
274
  prompt_dict = self.__dict__.copy()
275
275
  prompt_dict["input_variables"] = list(
@@ -290,34 +290,30 @@ class BasePromptTemplate(
290
290
  """Format the prompt with the inputs.
291
291
 
292
292
  Args:
293
- kwargs: Any arguments to be passed to the prompt template.
293
+ **kwargs: Any arguments to be passed to the prompt template.
294
294
 
295
295
  Returns:
296
296
  A formatted string.
297
297
 
298
298
  Example:
299
-
300
- .. code-block:: python
301
-
302
- prompt.format(variable1="foo")
303
-
299
+ ```python
300
+ prompt.format(variable1="foo")
301
+ ```
304
302
  """
305
303
 
306
304
  async def aformat(self, **kwargs: Any) -> FormatOutputType:
307
305
  """Async format the prompt with the inputs.
308
306
 
309
307
  Args:
310
- kwargs: Any arguments to be passed to the prompt template.
308
+ **kwargs: Any arguments to be passed to the prompt template.
311
309
 
312
310
  Returns:
313
311
  A formatted string.
314
312
 
315
313
  Example:
316
-
317
- .. code-block:: python
318
-
319
- await prompt.aformat(variable1="foo")
320
-
314
+ ```python
315
+ await prompt.aformat(variable1="foo")
316
+ ```
321
317
  """
322
318
  return self.format(**kwargs)
323
319
 
@@ -330,10 +326,10 @@ class BasePromptTemplate(
330
326
  """Return dictionary representation of prompt.
331
327
 
332
328
  Args:
333
- kwargs: Any additional arguments to pass to the dictionary.
329
+ **kwargs: Any additional arguments to pass to the dictionary.
334
330
 
335
331
  Returns:
336
- Dict: Dictionary representation of the prompt.
332
+ Dictionary representation of the prompt.
337
333
  """
338
334
  prompt_dict = super().model_dump(**kwargs)
339
335
  with contextlib.suppress(NotImplementedError):
@@ -352,10 +348,9 @@ class BasePromptTemplate(
352
348
  NotImplementedError: If the prompt type is not implemented.
353
349
 
354
350
  Example:
355
- .. code-block:: python
356
-
357
- prompt.save(file_path="path/prompt.yaml")
358
-
351
+ ```python
352
+ prompt.save(file_path="path/prompt.yaml")
353
+ ```
359
354
  """
360
355
  if self.partial_variables:
361
356
  msg = "Cannot save prompt with partial variables."
@@ -426,16 +421,16 @@ def format_document(doc: Document, prompt: BasePromptTemplate[str]) -> str:
426
421
  string of the document formatted.
427
422
 
428
423
  Example:
429
- .. code-block:: python
430
-
431
- from langchain_core.documents import Document
432
- from langchain_core.prompts import PromptTemplate
424
+ ```python
425
+ from langchain_core.documents import Document
426
+ from langchain_core.prompts import PromptTemplate
433
427
 
434
- doc = Document(page_content="This is a joke", metadata={"page": "1"})
435
- prompt = PromptTemplate.from_template("Page {page}: {page_content}")
436
- format_document(doc, prompt)
437
- >>> "Page 1: This is a joke"
428
+ doc = Document(page_content="This is a joke", metadata={"page": "1"})
429
+ prompt = PromptTemplate.from_template("Page {page}: {page_content}")
430
+ format_document(doc, prompt)
431
+ >>> "Page 1: This is a joke"
438
432
 
433
+ ```
439
434
  """
440
435
  return prompt.format(**_get_document_info(doc, prompt))
441
436