langchain-core 0.3.76__py3-none-any.whl → 0.3.77__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of langchain-core might be problematic. Click here for more details.

Files changed (49) hide show
  1. langchain_core/_api/beta_decorator.py +6 -5
  2. langchain_core/_api/deprecation.py +11 -11
  3. langchain_core/callbacks/base.py +17 -11
  4. langchain_core/callbacks/manager.py +2 -2
  5. langchain_core/callbacks/usage.py +2 -2
  6. langchain_core/chat_history.py +26 -16
  7. langchain_core/document_loaders/langsmith.py +1 -1
  8. langchain_core/indexing/api.py +31 -31
  9. langchain_core/language_models/chat_models.py +4 -2
  10. langchain_core/language_models/fake_chat_models.py +5 -2
  11. langchain_core/language_models/llms.py +3 -1
  12. langchain_core/load/serializable.py +1 -1
  13. langchain_core/messages/ai.py +22 -10
  14. langchain_core/messages/base.py +30 -16
  15. langchain_core/messages/chat.py +4 -1
  16. langchain_core/messages/function.py +9 -5
  17. langchain_core/messages/human.py +11 -4
  18. langchain_core/messages/modifier.py +1 -0
  19. langchain_core/messages/system.py +9 -2
  20. langchain_core/messages/tool.py +27 -16
  21. langchain_core/messages/utils.py +92 -83
  22. langchain_core/outputs/chat_generation.py +10 -6
  23. langchain_core/prompt_values.py +6 -2
  24. langchain_core/prompts/chat.py +6 -3
  25. langchain_core/prompts/few_shot.py +4 -1
  26. langchain_core/runnables/base.py +14 -13
  27. langchain_core/runnables/graph.py +4 -1
  28. langchain_core/runnables/graph_ascii.py +1 -1
  29. langchain_core/runnables/graph_mermaid.py +27 -10
  30. langchain_core/runnables/retry.py +35 -18
  31. langchain_core/stores.py +6 -6
  32. langchain_core/tools/base.py +7 -5
  33. langchain_core/tools/convert.py +2 -2
  34. langchain_core/tools/simple.py +1 -5
  35. langchain_core/tools/structured.py +0 -10
  36. langchain_core/tracers/event_stream.py +13 -15
  37. langchain_core/utils/aiter.py +1 -1
  38. langchain_core/utils/function_calling.py +13 -8
  39. langchain_core/utils/iter.py +1 -1
  40. langchain_core/utils/json.py +7 -1
  41. langchain_core/utils/json_schema.py +145 -39
  42. langchain_core/utils/pydantic.py +6 -5
  43. langchain_core/utils/utils.py +1 -1
  44. langchain_core/vectorstores/in_memory.py +5 -5
  45. langchain_core/version.py +1 -1
  46. {langchain_core-0.3.76.dist-info → langchain_core-0.3.77.dist-info}/METADATA +8 -18
  47. {langchain_core-0.3.76.dist-info → langchain_core-0.3.77.dist-info}/RECORD +49 -49
  48. {langchain_core-0.3.76.dist-info → langchain_core-0.3.77.dist-info}/WHEEL +0 -0
  49. {langchain_core-0.3.76.dist-info → langchain_core-0.3.77.dist-info}/entry_points.txt +0 -0
@@ -20,7 +20,7 @@ if TYPE_CHECKING:
20
20
  class BaseMessage(Serializable):
21
21
  """Base abstract message class.
22
22
 
23
- Messages are the inputs and outputs of ChatModels.
23
+ Messages are the inputs and outputs of ``ChatModel``s.
24
24
  """
25
25
 
26
26
  content: Union[str, list[Union[str, dict]]]
@@ -31,17 +31,18 @@ class BaseMessage(Serializable):
31
31
 
32
32
  For example, for a message from an AI, this could include tool calls as
33
33
  encoded by the model provider.
34
+
34
35
  """
35
36
 
36
37
  response_metadata: dict = Field(default_factory=dict)
37
- """Response metadata. For example: response headers, logprobs, token counts, model
38
- name."""
38
+ """Examples: response headers, logprobs, token counts, model name."""
39
39
 
40
40
  type: str
41
41
  """The type of the message. Must be a string that is unique to the message type.
42
42
 
43
43
  The purpose of this field is to allow for easy identification of the message type
44
44
  when deserializing messages.
45
+
45
46
  """
46
47
 
47
48
  name: Optional[str] = None
@@ -51,20 +52,26 @@ class BaseMessage(Serializable):
51
52
 
52
53
  Usage of this field is optional, and whether it's used or not is up to the
53
54
  model implementation.
55
+
54
56
  """
55
57
 
56
58
  id: Optional[str] = Field(default=None, coerce_numbers_to_str=True)
57
- """An optional unique identifier for the message. This should ideally be
58
- provided by the provider/model which created the message."""
59
+ """An optional unique identifier for the message.
60
+
61
+ This should ideally be provided by the provider/model which created the message.
62
+
63
+ """
59
64
 
60
65
  model_config = ConfigDict(
61
66
  extra="allow",
62
67
  )
63
68
 
64
69
  def __init__(
65
- self, content: Union[str, list[Union[str, dict]]], **kwargs: Any
70
+ self,
71
+ content: Union[str, list[Union[str, dict]]],
72
+ **kwargs: Any,
66
73
  ) -> None:
67
- """Pass in content as positional arg.
74
+ """Initialize ``BaseMessage``.
68
75
 
69
76
  Args:
70
77
  content: The string contents of the message.
@@ -73,7 +80,7 @@ class BaseMessage(Serializable):
73
80
 
74
81
  @classmethod
75
82
  def is_lc_serializable(cls) -> bool:
76
- """BaseMessage is serializable.
83
+ """``BaseMessage`` is serializable.
77
84
 
78
85
  Returns:
79
86
  True
@@ -90,10 +97,11 @@ class BaseMessage(Serializable):
90
97
  return ["langchain", "schema", "messages"]
91
98
 
92
99
  def text(self) -> str:
93
- """Get the text content of the message.
100
+ """Get the text ``content`` of the message.
94
101
 
95
102
  Returns:
96
103
  The text content of the message.
104
+
97
105
  """
98
106
  if isinstance(self.content, str):
99
107
  return self.content
@@ -136,6 +144,7 @@ class BaseMessage(Serializable):
136
144
 
137
145
  Returns:
138
146
  A pretty representation of the message.
147
+
139
148
  """
140
149
  title = get_msg_title_repr(self.type.title() + " Message", bold=html)
141
150
  # TODO: handle non-string content.
@@ -155,11 +164,12 @@ def merge_content(
155
164
  """Merge multiple message contents.
156
165
 
157
166
  Args:
158
- first_content: The first content. Can be a string or a list.
159
- contents: The other contents. Can be a string or a list.
167
+ first_content: The first ``content``. Can be a string or a list.
168
+ contents: The other ``content``s. Can be a string or a list.
160
169
 
161
170
  Returns:
162
171
  The merged content.
172
+
163
173
  """
164
174
  merged = first_content
165
175
  for content in contents:
@@ -207,9 +217,10 @@ class BaseMessageChunk(BaseMessage):
207
217
 
208
218
  For example,
209
219
 
210
- `AIMessageChunk(content="Hello") + AIMessageChunk(content=" World")`
220
+ ``AIMessageChunk(content="Hello") + AIMessageChunk(content=" World")``
221
+
222
+ will give ``AIMessageChunk(content="Hello World")``
211
223
 
212
- will give `AIMessageChunk(content="Hello World")`
213
224
  """
214
225
  if isinstance(other, BaseMessageChunk):
215
226
  # If both are (subclasses of) BaseMessageChunk,
@@ -257,8 +268,9 @@ def message_to_dict(message: BaseMessage) -> dict:
257
268
  message: Message to convert.
258
269
 
259
270
  Returns:
260
- Message as a dict. The dict will have a "type" key with the message type
261
- and a "data" key with the message data as a dict.
271
+ Message as a dict. The dict will have a ``type`` key with the message type
272
+ and a ``data`` key with the message data as a dict.
273
+
262
274
  """
263
275
  return {"type": message.type, "data": message.model_dump()}
264
276
 
@@ -267,10 +279,11 @@ def messages_to_dict(messages: Sequence[BaseMessage]) -> list[dict]:
267
279
  """Convert a sequence of Messages to a list of dictionaries.
268
280
 
269
281
  Args:
270
- messages: Sequence of messages (as BaseMessages) to convert.
282
+ messages: Sequence of messages (as ``BaseMessage``s) to convert.
271
283
 
272
284
  Returns:
273
285
  List of messages as dicts.
286
+
274
287
  """
275
288
  return [message_to_dict(m) for m in messages]
276
289
 
@@ -284,6 +297,7 @@ def get_msg_title_repr(title: str, *, bold: bool = False) -> str:
284
297
 
285
298
  Returns:
286
299
  The title representation.
300
+
287
301
  """
288
302
  padded = " " + title + " "
289
303
  sep_len = (80 - len(padded)) // 2
@@ -30,7 +30,10 @@ class ChatMessageChunk(ChatMessage, BaseMessageChunk):
30
30
  # non-chunk variant.
31
31
  type: Literal["ChatMessageChunk"] = "ChatMessageChunk" # type: ignore[assignment]
32
32
  """The type of the message (used during serialization).
33
- Defaults to "ChatMessageChunk"."""
33
+
34
+ Defaults to ``'ChatMessageChunk'``.
35
+
36
+ """
34
37
 
35
38
  @override
36
39
  def __add__(self, other: Any) -> BaseMessageChunk: # type: ignore[override]
@@ -15,19 +15,20 @@ from langchain_core.utils._merge import merge_dicts
15
15
  class FunctionMessage(BaseMessage):
16
16
  """Message for passing the result of executing a tool back to a model.
17
17
 
18
- FunctionMessage are an older version of the ToolMessage schema, and
19
- do not contain the tool_call_id field.
18
+ ``FunctionMessage`` are an older version of the ``ToolMessage`` schema, and
19
+ do not contain the ``tool_call_id`` field.
20
20
 
21
- The tool_call_id field is used to associate the tool call request with the
21
+ The ``tool_call_id`` field is used to associate the tool call request with the
22
22
  tool call response. This is useful in situations where a chat model is able
23
23
  to request multiple tool calls in parallel.
24
+
24
25
  """
25
26
 
26
27
  name: str
27
28
  """The name of the function that was executed."""
28
29
 
29
30
  type: Literal["function"] = "function"
30
- """The type of the message (used for serialization). Defaults to "function"."""
31
+ """The type of the message (used for serialization). Defaults to ``'function'``."""
31
32
 
32
33
 
33
34
  class FunctionMessageChunk(FunctionMessage, BaseMessageChunk):
@@ -38,7 +39,10 @@ class FunctionMessageChunk(FunctionMessage, BaseMessageChunk):
38
39
  # non-chunk variant.
39
40
  type: Literal["FunctionMessageChunk"] = "FunctionMessageChunk" # type: ignore[assignment]
40
41
  """The type of the message (used for serialization).
41
- Defaults to "FunctionMessageChunk"."""
42
+
43
+ Defaults to ``'FunctionMessageChunk'``.
44
+
45
+ """
42
46
 
43
47
  @override
44
48
  def __add__(self, other: Any) -> BaseMessageChunk: # type: ignore[override]
@@ -8,7 +8,7 @@ from langchain_core.messages.base import BaseMessage, BaseMessageChunk
8
8
  class HumanMessage(BaseMessage):
9
9
  """Message from a human.
10
10
 
11
- HumanMessages are messages that are passed in from a human to the model.
11
+ ``HumanMessage``s are messages that are passed in from a human to the model.
12
12
 
13
13
  Example:
14
14
 
@@ -32,15 +32,22 @@ class HumanMessage(BaseMessage):
32
32
 
33
33
  At the moment, this is ignored by most models. Usage is discouraged.
34
34
  Defaults to False.
35
+
35
36
  """
36
37
 
37
38
  type: Literal["human"] = "human"
38
- """The type of the message (used for serialization). Defaults to "human"."""
39
+ """The type of the message (used for serialization).
40
+
41
+ Defaults to ``'human'``.
42
+
43
+ """
39
44
 
40
45
  def __init__(
41
- self, content: Union[str, list[Union[str, dict]]], **kwargs: Any
46
+ self,
47
+ content: Union[str, list[Union[str, dict]]],
48
+ **kwargs: Any,
42
49
  ) -> None:
43
- """Pass in content as positional arg.
50
+ """Initialize ``HumanMessage``.
44
51
 
45
52
  Args:
46
53
  content: The string contents of the message.
@@ -24,6 +24,7 @@ class RemoveMessage(BaseMessage):
24
24
 
25
25
  Raises:
26
26
  ValueError: If the 'content' field is passed in kwargs.
27
+
27
28
  """
28
29
  if kwargs.pop("content", None):
29
30
  msg = "RemoveMessage does not support 'content' field."
@@ -28,7 +28,11 @@ class SystemMessage(BaseMessage):
28
28
  """
29
29
 
30
30
  type: Literal["system"] = "system"
31
- """The type of the message (used for serialization). Defaults to "system"."""
31
+ """The type of the message (used for serialization).
32
+
33
+ Defaults to ``'system'``.
34
+
35
+ """
32
36
 
33
37
  def __init__(
34
38
  self, content: Union[str, list[Union[str, dict]]], **kwargs: Any
@@ -50,4 +54,7 @@ class SystemMessageChunk(SystemMessage, BaseMessageChunk):
50
54
  # non-chunk variant.
51
55
  type: Literal["SystemMessageChunk"] = "SystemMessageChunk" # type: ignore[assignment]
52
56
  """The type of the message (used for serialization).
53
- Defaults to "SystemMessageChunk"."""
57
+
58
+ Defaults to ``'SystemMessageChunk'``.
59
+
60
+ """
@@ -14,19 +14,20 @@ from langchain_core.utils._merge import merge_dicts, merge_obj
14
14
  class ToolOutputMixin:
15
15
  """Mixin for objects that tools can return directly.
16
16
 
17
- If a custom BaseTool is invoked with a ToolCall and the output of custom code is
18
- not an instance of ToolOutputMixin, the output will automatically be coerced to a
19
- string and wrapped in a ToolMessage.
17
+ If a custom BaseTool is invoked with a ``ToolCall`` and the output of custom code is
18
+ not an instance of ``ToolOutputMixin``, the output will automatically be coerced to
19
+ a string and wrapped in a ``ToolMessage``.
20
+
20
21
  """
21
22
 
22
23
 
23
24
  class ToolMessage(BaseMessage, ToolOutputMixin):
24
25
  """Message for passing the result of executing a tool back to a model.
25
26
 
26
- ToolMessages contain the result of a tool invocation. Typically, the result
27
- is encoded inside the `content` field.
27
+ ``ToolMessage``s contain the result of a tool invocation. Typically, the result
28
+ is encoded inside the ``content`` field.
28
29
 
29
- Example: A ToolMessage representing a result of 42 from a tool call with id
30
+ Example: A ``ToolMessage`` representing a result of ``42`` from a tool call with id
30
31
 
31
32
  .. code-block:: python
32
33
 
@@ -35,7 +36,7 @@ class ToolMessage(BaseMessage, ToolOutputMixin):
35
36
  ToolMessage(content="42", tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL")
36
37
 
37
38
 
38
- Example: A ToolMessage where only part of the tool output is sent to the model
39
+ Example: A ``ToolMessage`` where only part of the tool output is sent to the model
39
40
  and the full output is passed in to artifact.
40
41
 
41
42
  .. versionadded:: 0.2.17
@@ -57,7 +58,7 @@ class ToolMessage(BaseMessage, ToolOutputMixin):
57
58
  tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL",
58
59
  )
59
60
 
60
- The tool_call_id field is used to associate the tool call request with the
61
+ The ``tool_call_id`` field is used to associate the tool call request with the
61
62
  tool call response. This is useful in situations where a chat model is able
62
63
  to request multiple tool calls in parallel.
63
64
 
@@ -67,7 +68,11 @@ class ToolMessage(BaseMessage, ToolOutputMixin):
67
68
  """Tool call that this message is responding to."""
68
69
 
69
70
  type: Literal["tool"] = "tool"
70
- """The type of the message (used for serialization). Defaults to "tool"."""
71
+ """The type of the message (used for serialization).
72
+
73
+ Defaults to ``'tool'``.
74
+
75
+ """
71
76
 
72
77
  artifact: Any = None
73
78
  """Artifact of the Tool execution which is not meant to be sent to the model.
@@ -77,12 +82,14 @@ class ToolMessage(BaseMessage, ToolOutputMixin):
77
82
  output is needed in other parts of the code.
78
83
 
79
84
  .. versionadded:: 0.2.17
85
+
80
86
  """
81
87
 
82
88
  status: Literal["success", "error"] = "success"
83
89
  """Status of the tool invocation.
84
90
 
85
91
  .. versionadded:: 0.2.24
92
+
86
93
  """
87
94
 
88
95
  additional_kwargs: dict = Field(default_factory=dict, repr=False)
@@ -97,6 +104,7 @@ class ToolMessage(BaseMessage, ToolOutputMixin):
97
104
 
98
105
  Args:
99
106
  values: The model arguments.
107
+
100
108
  """
101
109
  content = values["content"]
102
110
  if isinstance(content, tuple):
@@ -135,9 +143,11 @@ class ToolMessage(BaseMessage, ToolOutputMixin):
135
143
  return values
136
144
 
137
145
  def __init__(
138
- self, content: Union[str, list[Union[str, dict]]], **kwargs: Any
146
+ self,
147
+ content: Union[str, list[Union[str, dict]]],
148
+ **kwargs: Any,
139
149
  ) -> None:
140
- """Create a ToolMessage.
150
+ """Initialize ``ToolMessage``.
141
151
 
142
152
  Args:
143
153
  content: The string contents of the message.
@@ -187,8 +197,8 @@ class ToolCall(TypedDict):
187
197
 
188
198
  {"name": "foo", "args": {"a": 1}, "id": "123"}
189
199
 
190
- This represents a request to call the tool named "foo" with arguments {"a": 1}
191
- and an identifier of "123".
200
+ This represents a request to call the tool named ``'foo'`` with arguments
201
+ ``{"a": 1}`` and an identifier of ``'123'``.
192
202
 
193
203
  """
194
204
 
@@ -201,6 +211,7 @@ class ToolCall(TypedDict):
201
211
 
202
212
  An identifier is needed to associate a tool call request with a tool
203
213
  call result in events when multiple concurrent tool calls are made.
214
+
204
215
  """
205
216
  type: NotRequired[Literal["tool_call"]]
206
217
 
@@ -227,9 +238,9 @@ def tool_call(
227
238
  class ToolCallChunk(TypedDict):
228
239
  """A chunk of a tool call (e.g., as part of a stream).
229
240
 
230
- When merging ToolCallChunks (e.g., via AIMessageChunk.__add__),
241
+ When merging ``ToolCallChunk``s (e.g., via ``AIMessageChunk.__add__``),
231
242
  all string attributes are concatenated. Chunks are only merged if their
232
- values of `index` are equal and not None.
243
+ values of ``index`` are equal and not None.
233
244
 
234
245
  Example:
235
246
 
@@ -282,7 +293,7 @@ def tool_call_chunk(
282
293
  class InvalidToolCall(TypedDict):
283
294
  """Allowance for errors made by LLM.
284
295
 
285
- Here we add an `error` key to surface errors made during generation
296
+ Here we add an ``error`` key to surface errors made during generation
286
297
  (e.g., invalid JSON arguments.)
287
298
  """
288
299