langchain-core 0.4.0.dev0__py3-none-any.whl → 1.0.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of langchain-core might be problematic. Click here for more details.

Files changed (74) hide show
  1. langchain_core/_api/beta_decorator.py +2 -2
  2. langchain_core/_api/deprecation.py +1 -1
  3. langchain_core/beta/runnables/context.py +1 -1
  4. langchain_core/callbacks/base.py +14 -23
  5. langchain_core/callbacks/file.py +13 -2
  6. langchain_core/callbacks/manager.py +74 -157
  7. langchain_core/callbacks/streaming_stdout.py +3 -4
  8. langchain_core/callbacks/usage.py +2 -12
  9. langchain_core/chat_history.py +6 -6
  10. langchain_core/documents/base.py +1 -1
  11. langchain_core/documents/compressor.py +9 -6
  12. langchain_core/indexing/base.py +2 -2
  13. langchain_core/language_models/_utils.py +230 -101
  14. langchain_core/language_models/base.py +35 -23
  15. langchain_core/language_models/chat_models.py +245 -53
  16. langchain_core/language_models/fake_chat_models.py +28 -81
  17. langchain_core/load/dump.py +3 -4
  18. langchain_core/messages/__init__.py +38 -22
  19. langchain_core/messages/ai.py +188 -30
  20. langchain_core/messages/base.py +164 -25
  21. langchain_core/messages/block_translators/__init__.py +89 -0
  22. langchain_core/messages/block_translators/anthropic.py +451 -0
  23. langchain_core/messages/block_translators/bedrock.py +45 -0
  24. langchain_core/messages/block_translators/bedrock_converse.py +47 -0
  25. langchain_core/messages/block_translators/google_genai.py +45 -0
  26. langchain_core/messages/block_translators/google_vertexai.py +47 -0
  27. langchain_core/messages/block_translators/groq.py +45 -0
  28. langchain_core/messages/block_translators/langchain_v0.py +297 -0
  29. langchain_core/messages/block_translators/ollama.py +45 -0
  30. langchain_core/messages/block_translators/openai.py +586 -0
  31. langchain_core/messages/{content_blocks.py → content.py} +346 -213
  32. langchain_core/messages/human.py +29 -9
  33. langchain_core/messages/system.py +29 -9
  34. langchain_core/messages/tool.py +94 -13
  35. langchain_core/messages/utils.py +32 -234
  36. langchain_core/output_parsers/base.py +14 -50
  37. langchain_core/output_parsers/json.py +2 -5
  38. langchain_core/output_parsers/list.py +2 -7
  39. langchain_core/output_parsers/openai_functions.py +5 -28
  40. langchain_core/output_parsers/openai_tools.py +49 -90
  41. langchain_core/output_parsers/pydantic.py +2 -3
  42. langchain_core/output_parsers/transform.py +12 -53
  43. langchain_core/output_parsers/xml.py +9 -17
  44. langchain_core/prompt_values.py +8 -112
  45. langchain_core/prompts/chat.py +1 -3
  46. langchain_core/runnables/base.py +500 -451
  47. langchain_core/runnables/branch.py +1 -1
  48. langchain_core/runnables/fallbacks.py +4 -4
  49. langchain_core/runnables/history.py +1 -1
  50. langchain_core/runnables/passthrough.py +3 -3
  51. langchain_core/runnables/retry.py +1 -1
  52. langchain_core/runnables/router.py +1 -1
  53. langchain_core/structured_query.py +3 -7
  54. langchain_core/tools/base.py +14 -41
  55. langchain_core/tools/convert.py +2 -22
  56. langchain_core/tools/retriever.py +1 -8
  57. langchain_core/tools/structured.py +2 -10
  58. langchain_core/tracers/_streaming.py +6 -7
  59. langchain_core/tracers/base.py +7 -14
  60. langchain_core/tracers/core.py +4 -27
  61. langchain_core/tracers/event_stream.py +4 -15
  62. langchain_core/tracers/langchain.py +3 -14
  63. langchain_core/tracers/log_stream.py +2 -3
  64. langchain_core/utils/_merge.py +45 -7
  65. langchain_core/utils/function_calling.py +22 -9
  66. langchain_core/utils/utils.py +29 -0
  67. langchain_core/version.py +1 -1
  68. {langchain_core-0.4.0.dev0.dist-info → langchain_core-1.0.0a1.dist-info}/METADATA +7 -9
  69. {langchain_core-0.4.0.dev0.dist-info → langchain_core-1.0.0a1.dist-info}/RECORD +71 -64
  70. langchain_core/v1/__init__.py +0 -1
  71. langchain_core/v1/chat_models.py +0 -1047
  72. langchain_core/v1/messages.py +0 -755
  73. {langchain_core-0.4.0.dev0.dist-info → langchain_core-1.0.0a1.dist-info}/WHEEL +0 -0
  74. {langchain_core-0.4.0.dev0.dist-info → langchain_core-1.0.0a1.dist-info}/entry_points.txt +0 -0
@@ -20,7 +20,6 @@ from langchain_core.outputs import (
20
20
  GenerationChunk,
21
21
  )
22
22
  from langchain_core.runnables.config import run_in_executor
23
- from langchain_core.v1.messages import AIMessage, AIMessageChunk
24
23
 
25
24
  if TYPE_CHECKING:
26
25
  from collections.abc import AsyncIterator, Iterator
@@ -33,27 +32,23 @@ class BaseTransformOutputParser(BaseOutputParser[T]):
33
32
 
34
33
  def _transform(
35
34
  self,
36
- input: Iterator[Union[str, BaseMessage, AIMessage]],
35
+ input: Iterator[Union[str, BaseMessage]],
37
36
  ) -> Iterator[T]:
38
37
  for chunk in input:
39
38
  if isinstance(chunk, BaseMessage):
40
39
  yield self.parse_result([ChatGeneration(message=chunk)])
41
- elif isinstance(chunk, AIMessage):
42
- yield self.parse_result(chunk)
43
40
  else:
44
41
  yield self.parse_result([Generation(text=chunk)])
45
42
 
46
43
  async def _atransform(
47
44
  self,
48
- input: AsyncIterator[Union[str, BaseMessage, AIMessage]],
45
+ input: AsyncIterator[Union[str, BaseMessage]],
49
46
  ) -> AsyncIterator[T]:
50
47
  async for chunk in input:
51
48
  if isinstance(chunk, BaseMessage):
52
49
  yield await run_in_executor(
53
50
  None, self.parse_result, [ChatGeneration(message=chunk)]
54
51
  )
55
- elif isinstance(chunk, AIMessage):
56
- yield await run_in_executor(None, self.parse_result, chunk)
57
52
  else:
58
53
  yield await run_in_executor(
59
54
  None, self.parse_result, [Generation(text=chunk)]
@@ -62,7 +57,7 @@ class BaseTransformOutputParser(BaseOutputParser[T]):
62
57
  @override
63
58
  def transform(
64
59
  self,
65
- input: Iterator[Union[str, BaseMessage, AIMessage]],
60
+ input: Iterator[Union[str, BaseMessage]],
66
61
  config: Optional[RunnableConfig] = None,
67
62
  **kwargs: Any,
68
63
  ) -> Iterator[T]:
@@ -83,7 +78,7 @@ class BaseTransformOutputParser(BaseOutputParser[T]):
83
78
  @override
84
79
  async def atransform(
85
80
  self,
86
- input: AsyncIterator[Union[str, BaseMessage, AIMessage]],
81
+ input: AsyncIterator[Union[str, BaseMessage]],
87
82
  config: Optional[RunnableConfig] = None,
88
83
  **kwargs: Any,
89
84
  ) -> AsyncIterator[T]:
@@ -130,42 +125,23 @@ class BaseCumulativeTransformOutputParser(BaseTransformOutputParser[T]):
130
125
  raise NotImplementedError
131
126
 
132
127
  @override
133
- def _transform(
134
- self, input: Iterator[Union[str, BaseMessage, AIMessage]]
135
- ) -> Iterator[Any]:
128
+ def _transform(self, input: Iterator[Union[str, BaseMessage]]) -> Iterator[Any]:
136
129
  prev_parsed = None
137
- acc_gen: Union[GenerationChunk, ChatGenerationChunk, AIMessageChunk, None] = (
138
- None
139
- )
130
+ acc_gen: Union[GenerationChunk, ChatGenerationChunk, None] = None
140
131
  for chunk in input:
141
- chunk_gen: Union[GenerationChunk, ChatGenerationChunk, AIMessageChunk]
132
+ chunk_gen: Union[GenerationChunk, ChatGenerationChunk]
142
133
  if isinstance(chunk, BaseMessageChunk):
143
134
  chunk_gen = ChatGenerationChunk(message=chunk)
144
135
  elif isinstance(chunk, BaseMessage):
145
136
  chunk_gen = ChatGenerationChunk(
146
137
  message=BaseMessageChunk(**chunk.model_dump())
147
138
  )
148
- elif isinstance(chunk, AIMessageChunk):
149
- chunk_gen = chunk
150
- elif isinstance(chunk, AIMessage):
151
- chunk_gen = AIMessageChunk(
152
- content=chunk.content,
153
- id=chunk.id,
154
- name=chunk.name,
155
- lc_version=chunk.lc_version,
156
- response_metadata=chunk.response_metadata,
157
- usage_metadata=chunk.usage_metadata,
158
- parsed=chunk.parsed,
159
- )
160
139
  else:
161
140
  chunk_gen = GenerationChunk(text=chunk)
162
141
 
163
142
  acc_gen = chunk_gen if acc_gen is None else acc_gen + chunk_gen # type: ignore[operator]
164
143
 
165
- if isinstance(acc_gen, AIMessageChunk):
166
- parsed = self.parse_result(acc_gen, partial=True)
167
- else:
168
- parsed = self.parse_result([acc_gen], partial=True)
144
+ parsed = self.parse_result([acc_gen], partial=True)
169
145
  if parsed is not None and parsed != prev_parsed:
170
146
  if self.diff:
171
147
  yield self._diff(prev_parsed, parsed)
@@ -175,41 +151,24 @@ class BaseCumulativeTransformOutputParser(BaseTransformOutputParser[T]):
175
151
 
176
152
  @override
177
153
  async def _atransform(
178
- self, input: AsyncIterator[Union[str, BaseMessage, AIMessage]]
154
+ self, input: AsyncIterator[Union[str, BaseMessage]]
179
155
  ) -> AsyncIterator[T]:
180
156
  prev_parsed = None
181
- acc_gen: Union[GenerationChunk, ChatGenerationChunk, AIMessageChunk, None] = (
182
- None
183
- )
157
+ acc_gen: Union[GenerationChunk, ChatGenerationChunk, None] = None
184
158
  async for chunk in input:
185
- chunk_gen: Union[GenerationChunk, ChatGenerationChunk, AIMessageChunk]
159
+ chunk_gen: Union[GenerationChunk, ChatGenerationChunk]
186
160
  if isinstance(chunk, BaseMessageChunk):
187
161
  chunk_gen = ChatGenerationChunk(message=chunk)
188
162
  elif isinstance(chunk, BaseMessage):
189
163
  chunk_gen = ChatGenerationChunk(
190
164
  message=BaseMessageChunk(**chunk.model_dump())
191
165
  )
192
- elif isinstance(chunk, AIMessageChunk):
193
- chunk_gen = chunk
194
- elif isinstance(chunk, AIMessage):
195
- chunk_gen = AIMessageChunk(
196
- content=chunk.content,
197
- id=chunk.id,
198
- name=chunk.name,
199
- lc_version=chunk.lc_version,
200
- response_metadata=chunk.response_metadata,
201
- usage_metadata=chunk.usage_metadata,
202
- parsed=chunk.parsed,
203
- )
204
166
  else:
205
167
  chunk_gen = GenerationChunk(text=chunk)
206
168
 
207
169
  acc_gen = chunk_gen if acc_gen is None else acc_gen + chunk_gen # type: ignore[operator]
208
170
 
209
- if isinstance(acc_gen, AIMessageChunk):
210
- parsed = await self.aparse_result(acc_gen, partial=True)
211
- else:
212
- parsed = await self.aparse_result([acc_gen], partial=True)
171
+ parsed = await self.aparse_result([acc_gen], partial=True)
213
172
  if parsed is not None and parsed != prev_parsed:
214
173
  if self.diff:
215
174
  yield await run_in_executor(None, self._diff, prev_parsed, parsed)
@@ -12,10 +12,8 @@ from typing_extensions import override
12
12
 
13
13
  from langchain_core.exceptions import OutputParserException
14
14
  from langchain_core.messages import BaseMessage
15
- from langchain_core.messages.utils import convert_from_v1_message
16
15
  from langchain_core.output_parsers.transform import BaseTransformOutputParser
17
16
  from langchain_core.runnables.utils import AddableDict
18
- from langchain_core.v1.messages import AIMessage
19
17
 
20
18
  XML_FORMAT_INSTRUCTIONS = """The output should be formatted as a XML file.
21
19
  1. Output should conform to the tags below.
@@ -107,10 +105,11 @@ class _StreamingParser:
107
105
  self.buffer = ""
108
106
  # yield all events
109
107
  try:
110
- for event, elem in self.pull_parser.read_events():
108
+ events = self.pull_parser.read_events()
109
+ for event, elem in events: # type: ignore[misc]
111
110
  if event == "start":
112
111
  # update current path
113
- self.current_path.append(elem.tag)
112
+ self.current_path.append(elem.tag) # type: ignore[union-attr]
114
113
  self.current_path_has_children = False
115
114
  elif event == "end":
116
115
  # remove last element from current path
@@ -118,7 +117,7 @@ class _StreamingParser:
118
117
  self.current_path.pop()
119
118
  # yield element
120
119
  if not self.current_path_has_children:
121
- yield nested_element(self.current_path, elem)
120
+ yield nested_element(self.current_path, elem) # type: ignore[arg-type]
122
121
  # prevent yielding of parent element
123
122
  if self.current_path:
124
123
  self.current_path_has_children = True
@@ -242,28 +241,21 @@ class XMLOutputParser(BaseTransformOutputParser):
242
241
 
243
242
  @override
244
243
  def _transform(
245
- self, input: Iterator[Union[str, BaseMessage, AIMessage]]
244
+ self, input: Iterator[Union[str, BaseMessage]]
246
245
  ) -> Iterator[AddableDict]:
247
246
  streaming_parser = _StreamingParser(self.parser)
248
247
  for chunk in input:
249
- if isinstance(chunk, AIMessage):
250
- yield from streaming_parser.parse(convert_from_v1_message(chunk))
251
- else:
252
- yield from streaming_parser.parse(chunk)
248
+ yield from streaming_parser.parse(chunk)
253
249
  streaming_parser.close()
254
250
 
255
251
  @override
256
252
  async def _atransform(
257
- self, input: AsyncIterator[Union[str, BaseMessage, AIMessage]]
253
+ self, input: AsyncIterator[Union[str, BaseMessage]]
258
254
  ) -> AsyncIterator[AddableDict]:
259
255
  streaming_parser = _StreamingParser(self.parser)
260
256
  async for chunk in input:
261
- if isinstance(chunk, AIMessage):
262
- for output in streaming_parser.parse(convert_from_v1_message(chunk)):
263
- yield output
264
- else:
265
- for output in streaming_parser.parse(chunk):
266
- yield output
257
+ for output in streaming_parser.parse(chunk):
258
+ yield output
267
259
  streaming_parser.close()
268
260
 
269
261
  def _root_to_dict(self, root: ET.Element) -> dict[str, Union[str, list[Any]]]:
@@ -8,65 +8,17 @@ from __future__ import annotations
8
8
 
9
9
  from abc import ABC, abstractmethod
10
10
  from collections.abc import Sequence
11
- from typing import Literal, Union, cast
11
+ from typing import Literal, cast
12
12
 
13
- from typing_extensions import TypedDict, overload
13
+ from typing_extensions import TypedDict
14
14
 
15
15
  from langchain_core.load.serializable import Serializable
16
16
  from langchain_core.messages import (
17
- AIMessage,
18
17
  AnyMessage,
19
18
  BaseMessage,
20
19
  HumanMessage,
21
- SystemMessage,
22
- ToolMessage,
23
20
  get_buffer_string,
24
21
  )
25
- from langchain_core.messages import content_blocks as types
26
- from langchain_core.v1.messages import AIMessage as AIMessageV1
27
- from langchain_core.v1.messages import HumanMessage as HumanMessageV1
28
- from langchain_core.v1.messages import MessageV1, ResponseMetadata
29
- from langchain_core.v1.messages import SystemMessage as SystemMessageV1
30
- from langchain_core.v1.messages import ToolMessage as ToolMessageV1
31
-
32
-
33
- def _convert_to_v1(message: BaseMessage) -> MessageV1:
34
- """Best-effort conversion of a V0 AIMessage to V1."""
35
- if isinstance(message.content, str):
36
- content: list[types.ContentBlock] = []
37
- if message.content:
38
- content = [{"type": "text", "text": message.content}]
39
- else:
40
- content = []
41
- for block in message.content:
42
- if isinstance(block, str):
43
- content.append({"type": "text", "text": block})
44
- elif isinstance(block, dict):
45
- content.append(cast("types.ContentBlock", block))
46
- else:
47
- pass
48
-
49
- if isinstance(message, HumanMessage):
50
- return HumanMessageV1(content=content)
51
- if isinstance(message, AIMessage):
52
- for tool_call in message.tool_calls:
53
- content.append(tool_call)
54
- return AIMessageV1(
55
- content=content,
56
- usage_metadata=message.usage_metadata,
57
- response_metadata=cast("ResponseMetadata", message.response_metadata),
58
- tool_calls=message.tool_calls,
59
- )
60
- if isinstance(message, SystemMessage):
61
- return SystemMessageV1(content=content)
62
- if isinstance(message, ToolMessage):
63
- return ToolMessageV1(
64
- tool_call_id=message.tool_call_id,
65
- content=content,
66
- artifact=message.artifact,
67
- )
68
- error_message = f"Unsupported message type: {type(message)}"
69
- raise TypeError(error_message)
70
22
 
71
23
 
72
24
  class PromptValue(Serializable, ABC):
@@ -94,18 +46,8 @@ class PromptValue(Serializable, ABC):
94
46
  def to_string(self) -> str:
95
47
  """Return prompt value as string."""
96
48
 
97
- @overload
98
- def to_messages(
99
- self, message_version: Literal["v0"] = "v0"
100
- ) -> list[BaseMessage]: ...
101
-
102
- @overload
103
- def to_messages(self, message_version: Literal["v1"]) -> list[MessageV1]: ...
104
-
105
49
  @abstractmethod
106
- def to_messages(
107
- self, message_version: Literal["v0", "v1"] = "v0"
108
- ) -> Union[Sequence[BaseMessage], Sequence[MessageV1]]:
50
+ def to_messages(self) -> list[BaseMessage]:
109
51
  """Return prompt as a list of Messages."""
110
52
 
111
53
 
@@ -129,20 +71,8 @@ class StringPromptValue(PromptValue):
129
71
  """Return prompt as string."""
130
72
  return self.text
131
73
 
132
- @overload
133
- def to_messages(
134
- self, message_version: Literal["v0"] = "v0"
135
- ) -> list[BaseMessage]: ...
136
-
137
- @overload
138
- def to_messages(self, message_version: Literal["v1"]) -> list[MessageV1]: ...
139
-
140
- def to_messages(
141
- self, message_version: Literal["v0", "v1"] = "v0"
142
- ) -> Union[Sequence[BaseMessage], Sequence[MessageV1]]:
74
+ def to_messages(self) -> list[BaseMessage]:
143
75
  """Return prompt as messages."""
144
- if message_version == "v1":
145
- return [HumanMessageV1(content=self.text)]
146
76
  return [HumanMessage(content=self.text)]
147
77
 
148
78
 
@@ -159,24 +89,8 @@ class ChatPromptValue(PromptValue):
159
89
  """Return prompt as string."""
160
90
  return get_buffer_string(self.messages)
161
91
 
162
- @overload
163
- def to_messages(
164
- self, message_version: Literal["v0"] = "v0"
165
- ) -> list[BaseMessage]: ...
166
-
167
- @overload
168
- def to_messages(self, message_version: Literal["v1"]) -> list[MessageV1]: ...
169
-
170
- def to_messages(
171
- self, message_version: Literal["v0", "v1"] = "v0"
172
- ) -> Union[Sequence[BaseMessage], Sequence[MessageV1]]:
173
- """Return prompt as a list of messages.
174
-
175
- Args:
176
- message_version: The output version, either "v0" (default) or "v1".
177
- """
178
- if message_version == "v1":
179
- return [_convert_to_v1(m) for m in self.messages]
92
+ def to_messages(self) -> list[BaseMessage]:
93
+ """Return prompt as a list of messages."""
180
94
  return list(self.messages)
181
95
 
182
96
  @classmethod
@@ -209,28 +123,10 @@ class ImagePromptValue(PromptValue):
209
123
 
210
124
  def to_string(self) -> str:
211
125
  """Return prompt (image URL) as string."""
212
- return self.image_url["url"]
213
-
214
- @overload
215
- def to_messages(
216
- self, message_version: Literal["v0"] = "v0"
217
- ) -> list[BaseMessage]: ...
218
-
219
- @overload
220
- def to_messages(self, message_version: Literal["v1"]) -> list[MessageV1]: ...
126
+ return self.image_url.get("url", "")
221
127
 
222
- def to_messages(
223
- self, message_version: Literal["v0", "v1"] = "v0"
224
- ) -> Union[Sequence[BaseMessage], Sequence[MessageV1]]:
128
+ def to_messages(self) -> list[BaseMessage]:
225
129
  """Return prompt (image URL) as messages."""
226
- if message_version == "v1":
227
- block: types.ImageContentBlock = {
228
- "type": "image",
229
- "url": self.image_url["url"],
230
- }
231
- if "detail" in self.image_url:
232
- block["detail"] = self.image_url["detail"]
233
- return [HumanMessageV1(content=[block])]
234
130
  return [HumanMessage(content=[cast("dict", self.image_url)])]
235
131
 
236
132
 
@@ -155,9 +155,7 @@ class MessagesPlaceholder(BaseMessagePromptTemplate):
155
155
  """
156
156
  # mypy can't detect the init which is defined in the parent class
157
157
  # b/c these are BaseModel classes.
158
- super().__init__( # type: ignore[call-arg]
159
- variable_name=variable_name, optional=optional, **kwargs
160
- )
158
+ super().__init__(variable_name=variable_name, optional=optional, **kwargs)
161
159
 
162
160
  def format_messages(self, **kwargs: Any) -> list[BaseMessage]:
163
161
  """Format messages from kwargs.