langchain-core 0.3.75__py3-none-any.whl → 1.0.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of langchain-core might be problematic. Click here for more details.

Files changed (32) hide show
  1. langchain_core/language_models/_utils.py +233 -68
  2. langchain_core/language_models/base.py +2 -1
  3. langchain_core/language_models/chat_models.py +196 -33
  4. langchain_core/language_models/fake_chat_models.py +22 -6
  5. langchain_core/messages/__init__.py +74 -4
  6. langchain_core/messages/ai.py +191 -26
  7. langchain_core/messages/base.py +164 -25
  8. langchain_core/messages/block_translators/__init__.py +89 -0
  9. langchain_core/messages/block_translators/anthropic.py +451 -0
  10. langchain_core/messages/block_translators/bedrock.py +45 -0
  11. langchain_core/messages/block_translators/bedrock_converse.py +47 -0
  12. langchain_core/messages/block_translators/google_genai.py +45 -0
  13. langchain_core/messages/block_translators/google_vertexai.py +47 -0
  14. langchain_core/messages/block_translators/groq.py +45 -0
  15. langchain_core/messages/block_translators/langchain_v0.py +297 -0
  16. langchain_core/messages/block_translators/ollama.py +45 -0
  17. langchain_core/messages/block_translators/openai.py +586 -0
  18. langchain_core/messages/content.py +1568 -0
  19. langchain_core/messages/human.py +29 -9
  20. langchain_core/messages/system.py +29 -9
  21. langchain_core/messages/tool.py +30 -27
  22. langchain_core/messages/utils.py +12 -5
  23. langchain_core/prompt_values.py +1 -1
  24. langchain_core/runnables/base.py +1 -1
  25. langchain_core/utils/_merge.py +44 -6
  26. langchain_core/utils/utils.py +29 -0
  27. langchain_core/version.py +1 -1
  28. {langchain_core-0.3.75.dist-info → langchain_core-1.0.0a1.dist-info}/METADATA +2 -2
  29. {langchain_core-0.3.75.dist-info → langchain_core-1.0.0a1.dist-info}/RECORD +31 -21
  30. langchain_core/messages/content_blocks.py +0 -155
  31. {langchain_core-0.3.75.dist-info → langchain_core-1.0.0a1.dist-info}/WHEEL +0 -0
  32. {langchain_core-0.3.75.dist-info → langchain_core-1.0.0a1.dist-info}/entry_points.txt +0 -0
@@ -3,11 +3,13 @@
3
3
  import json
4
4
  import logging
5
5
  import operator
6
+ from collections.abc import Sequence
6
7
  from typing import Any, Literal, Optional, Union, cast
7
8
 
8
9
  from pydantic import model_validator
9
- from typing_extensions import NotRequired, Self, TypedDict, override
10
+ from typing_extensions import NotRequired, Self, TypedDict, overload, override
10
11
 
12
+ from langchain_core.messages import content as types
11
13
  from langchain_core.messages.base import (
12
14
  BaseMessage,
13
15
  BaseMessageChunk,
@@ -20,25 +22,17 @@ from langchain_core.messages.tool import (
20
22
  default_tool_chunk_parser,
21
23
  default_tool_parser,
22
24
  )
23
- from langchain_core.messages.tool import (
24
- invalid_tool_call as create_invalid_tool_call,
25
- )
26
- from langchain_core.messages.tool import (
27
- tool_call as create_tool_call,
28
- )
29
- from langchain_core.messages.tool import (
30
- tool_call_chunk as create_tool_call_chunk,
31
- )
25
+ from langchain_core.messages.tool import invalid_tool_call as create_invalid_tool_call
26
+ from langchain_core.messages.tool import tool_call as create_tool_call
27
+ from langchain_core.messages.tool import tool_call_chunk as create_tool_call_chunk
32
28
  from langchain_core.utils._merge import merge_dicts, merge_lists
33
29
  from langchain_core.utils.json import parse_partial_json
34
30
  from langchain_core.utils.usage import _dict_int_op
31
+ from langchain_core.utils.utils import LC_AUTO_PREFIX, LC_ID_PREFIX
35
32
 
36
33
  logger = logging.getLogger(__name__)
37
34
 
38
35
 
39
- _LC_ID_PREFIX = "run-"
40
-
41
-
42
36
  class InputTokenDetails(TypedDict, total=False):
43
37
  """Breakdown of input token counts.
44
38
 
@@ -180,16 +174,42 @@ class AIMessage(BaseMessage):
180
174
  type: Literal["ai"] = "ai"
181
175
  """The type of the message (used for deserialization). Defaults to "ai"."""
182
176
 
177
+ @overload
178
+ def __init__(
179
+ self,
180
+ content: Union[str, list[Union[str, dict]]],
181
+ **kwargs: Any,
182
+ ) -> None: ...
183
+
184
+ @overload
185
+ def __init__(
186
+ self,
187
+ content: Optional[Union[str, list[Union[str, dict]]]] = None,
188
+ content_blocks: Optional[list[types.ContentBlock]] = None,
189
+ **kwargs: Any,
190
+ ) -> None: ...
191
+
183
192
  def __init__(
184
- self, content: Union[str, list[Union[str, dict]]], **kwargs: Any
193
+ self,
194
+ content: Optional[Union[str, list[Union[str, dict]]]] = None,
195
+ content_blocks: Optional[list[types.ContentBlock]] = None,
196
+ **kwargs: Any,
185
197
  ) -> None:
186
- """Pass in content as positional arg.
198
+ """Specify ``content`` as positional arg or ``content_blocks`` for typing."""
199
+ if content_blocks is not None:
200
+ # If there are tool calls in content_blocks, but not in tool_calls, add them
201
+ content_tool_calls = [
202
+ block for block in content_blocks if block.get("type") == "tool_call"
203
+ ]
204
+ if content_tool_calls and "tool_calls" not in kwargs:
205
+ kwargs["tool_calls"] = content_tool_calls
187
206
 
188
- Args:
189
- content: The content of the message.
190
- kwargs: Additional arguments to pass to the parent class.
191
- """
192
- super().__init__(content=content, **kwargs)
207
+ super().__init__(
208
+ content=cast("Union[str, list[Union[str, dict]]]", content_blocks),
209
+ **kwargs,
210
+ )
211
+ else:
212
+ super().__init__(content=content, **kwargs)
193
213
 
194
214
  @property
195
215
  def lc_attributes(self) -> dict:
@@ -199,6 +219,49 @@ class AIMessage(BaseMessage):
199
219
  "invalid_tool_calls": self.invalid_tool_calls,
200
220
  }
201
221
 
222
+ @property
223
+ def content_blocks(self) -> list[types.ContentBlock]:
224
+ """Return content blocks of the message."""
225
+ if self.response_metadata.get("output_version") == "v1":
226
+ return cast("list[types.ContentBlock]", self.content)
227
+
228
+ model_provider = self.response_metadata.get("model_provider")
229
+ if model_provider:
230
+ from langchain_core.messages.block_translators import get_translator
231
+
232
+ translator = get_translator(model_provider)
233
+ if translator:
234
+ try:
235
+ return translator["translate_content"](self)
236
+ except NotImplementedError:
237
+ pass
238
+
239
+ # Otherwise, use best-effort parsing
240
+ blocks = super().content_blocks
241
+
242
+ if self.tool_calls:
243
+ # Add from tool_calls if missing from content
244
+ content_tool_call_ids = {
245
+ block.get("id")
246
+ for block in self.content
247
+ if isinstance(block, dict) and block.get("type") == "tool_call"
248
+ }
249
+ for tool_call in self.tool_calls:
250
+ if (id_ := tool_call.get("id")) and id_ not in content_tool_call_ids:
251
+ tool_call_block: types.ToolCall = {
252
+ "type": "tool_call",
253
+ "id": id_,
254
+ "name": tool_call["name"],
255
+ "args": tool_call["args"],
256
+ }
257
+ if "index" in tool_call:
258
+ tool_call_block["index"] = tool_call["index"] # type: ignore[typeddict-item]
259
+ if "extras" in tool_call:
260
+ tool_call_block["extras"] = tool_call["extras"] # type: ignore[typeddict-item]
261
+ blocks.append(tool_call_block)
262
+
263
+ return blocks
264
+
202
265
  # TODO: remove this logic if possible, reducing breaking nature of changes
203
266
  @model_validator(mode="before")
204
267
  @classmethod
@@ -227,7 +290,9 @@ class AIMessage(BaseMessage):
227
290
  # Ensure "type" is properly set on all tool call-like dicts.
228
291
  if tool_calls := values.get("tool_calls"):
229
292
  values["tool_calls"] = [
230
- create_tool_call(**{k: v for k, v in tc.items() if k != "type"})
293
+ create_tool_call(
294
+ **{k: v for k, v in tc.items() if k not in ("type", "extras")}
295
+ )
231
296
  for tc in tool_calls
232
297
  ]
233
298
  if invalid_tool_calls := values.get("invalid_tool_calls"):
@@ -298,6 +363,13 @@ class AIMessageChunk(AIMessage, BaseMessageChunk):
298
363
  tool_call_chunks: list[ToolCallChunk] = []
299
364
  """If provided, tool call chunks associated with the message."""
300
365
 
366
+ chunk_position: Optional[Literal["last"]] = None
367
+ """Optional span represented by an aggregated AIMessageChunk.
368
+
369
+ If a chunk with ``chunk_position="last"`` is aggregated into a stream,
370
+ ``tool_call_chunks`` in message content will be parsed into ``tool_calls``.
371
+ """
372
+
301
373
  @property
302
374
  def lc_attributes(self) -> dict:
303
375
  """Attrs to be serialized even if they are derived from other init args."""
@@ -306,6 +378,49 @@ class AIMessageChunk(AIMessage, BaseMessageChunk):
306
378
  "invalid_tool_calls": self.invalid_tool_calls,
307
379
  }
308
380
 
381
+ @property
382
+ def content_blocks(self) -> list[types.ContentBlock]:
383
+ """Return content blocks of the message."""
384
+ if self.response_metadata.get("output_version") == "v1":
385
+ return cast("list[types.ContentBlock]", self.content)
386
+
387
+ model_provider = self.response_metadata.get("model_provider")
388
+ if model_provider:
389
+ from langchain_core.messages.block_translators import get_translator
390
+
391
+ translator = get_translator(model_provider)
392
+ if translator:
393
+ try:
394
+ return translator["translate_content_chunk"](self)
395
+ except NotImplementedError:
396
+ pass
397
+
398
+ # Otherwise, use best-effort parsing
399
+ blocks = super().content_blocks
400
+
401
+ if (
402
+ self.tool_call_chunks
403
+ and not self.content
404
+ and self.chunk_position != "last" # keep tool_calls if aggregated
405
+ ):
406
+ blocks = [
407
+ block
408
+ for block in blocks
409
+ if block["type"] not in ("tool_call", "invalid_tool_call")
410
+ ]
411
+ for tool_call_chunk in self.tool_call_chunks:
412
+ tc: types.ToolCallChunk = {
413
+ "type": "tool_call_chunk",
414
+ "id": tool_call_chunk.get("id"),
415
+ "name": tool_call_chunk.get("name"),
416
+ "args": tool_call_chunk.get("args"),
417
+ }
418
+ if (idx := tool_call_chunk.get("index")) is not None:
419
+ tc["index"] = idx
420
+ blocks.append(tc)
421
+
422
+ return blocks
423
+
309
424
  @model_validator(mode="after")
310
425
  def init_tool_calls(self) -> Self:
311
426
  """Initialize tool calls from tool call chunks.
@@ -376,10 +491,45 @@ class AIMessageChunk(AIMessage, BaseMessageChunk):
376
491
  add_chunk_to_invalid_tool_calls(chunk)
377
492
  self.tool_calls = tool_calls
378
493
  self.invalid_tool_calls = invalid_tool_calls
494
+
495
+ if (
496
+ self.chunk_position == "last"
497
+ and self.tool_call_chunks
498
+ and self.response_metadata.get("output_version") == "v1"
499
+ and isinstance(self.content, list)
500
+ ):
501
+ id_to_tc: dict[str, types.ToolCall] = {
502
+ cast("str", tc.get("id")): {
503
+ "type": "tool_call",
504
+ "name": tc["name"],
505
+ "args": tc["args"],
506
+ "id": tc.get("id"),
507
+ }
508
+ for tc in self.tool_calls
509
+ if "id" in tc
510
+ }
511
+ for idx, block in enumerate(self.content):
512
+ if (
513
+ isinstance(block, dict)
514
+ and block.get("type") == "tool_call_chunk"
515
+ and (call_id := block.get("id"))
516
+ and call_id in id_to_tc
517
+ ):
518
+ self.content[idx] = cast("dict[str, Any]", id_to_tc[call_id])
519
+
379
520
  return self
380
521
 
522
+ @overload # type: ignore[override] # summing BaseMessages gives ChatPromptTemplate
523
+ def __add__(self, other: "AIMessageChunk") -> "AIMessageChunk": ...
524
+
525
+ @overload
526
+ def __add__(self, other: Sequence["AIMessageChunk"]) -> "AIMessageChunk": ...
527
+
528
+ @overload
529
+ def __add__(self, other: Any) -> BaseMessageChunk: ...
530
+
381
531
  @override
382
- def __add__(self, other: Any) -> BaseMessageChunk: # type: ignore[override]
532
+ def __add__(self, other: Any) -> BaseMessageChunk:
383
533
  if isinstance(other, AIMessageChunk):
384
534
  return add_ai_message_chunks(self, other)
385
535
  if isinstance(other, (list, tuple)) and all(
@@ -431,17 +581,31 @@ def add_ai_message_chunks(
431
581
 
432
582
  chunk_id = None
433
583
  candidates = [left.id] + [o.id for o in others]
434
- # first pass: pick the first non-run-* id
584
+ # first pass: pick the first provider-assigned id (non-run-* and non-lc_*)
435
585
  for id_ in candidates:
436
- if id_ and not id_.startswith(_LC_ID_PREFIX):
586
+ if (
587
+ id_
588
+ and not id_.startswith(LC_ID_PREFIX)
589
+ and not id_.startswith(LC_AUTO_PREFIX)
590
+ ):
437
591
  chunk_id = id_
438
592
  break
439
593
  else:
440
- # second pass: no provider-assigned id found, just take the first non-null
594
+ # second pass: prefer lc_run-* ids over lc_* ids
441
595
  for id_ in candidates:
442
- if id_:
596
+ if id_ and id_.startswith(LC_ID_PREFIX):
443
597
  chunk_id = id_
444
598
  break
599
+ else:
600
+ # third pass: take any remaining id (auto-generated lc_* ids)
601
+ for id_ in candidates:
602
+ if id_:
603
+ chunk_id = id_
604
+ break
605
+
606
+ chunk_position: Optional[Literal["last"]] = (
607
+ "last" if any(x.chunk_position == "last" for x in [left, *others]) else None
608
+ )
445
609
 
446
610
  return left.__class__(
447
611
  example=left.example,
@@ -451,6 +615,7 @@ def add_ai_message_chunks(
451
615
  response_metadata=response_metadata,
452
616
  usage_metadata=usage_metadata,
453
617
  id=chunk_id,
618
+ chunk_position=chunk_position,
454
619
  )
455
620
 
456
621
 
@@ -2,11 +2,14 @@
2
2
 
3
3
  from __future__ import annotations
4
4
 
5
- from typing import TYPE_CHECKING, Any, Optional, Union, cast
5
+ from typing import TYPE_CHECKING, Any, Optional, Union, cast, overload
6
6
 
7
7
  from pydantic import ConfigDict, Field
8
+ from typing_extensions import Self
8
9
 
10
+ from langchain_core._api.deprecation import warn_deprecated
9
11
  from langchain_core.load.serializable import Serializable
12
+ from langchain_core.messages import content as types
10
13
  from langchain_core.utils import get_bolded_text
11
14
  from langchain_core.utils._merge import merge_dicts, merge_lists
12
15
  from langchain_core.utils.interactive_env import is_interactive_env
@@ -17,6 +20,52 @@ if TYPE_CHECKING:
17
20
  from langchain_core.prompts.chat import ChatPromptTemplate
18
21
 
19
22
 
23
+ class TextAccessor(str):
24
+ """String-like object that supports both property and method access patterns.
25
+
26
+ Exists to maintain backward compatibility while transitioning from method-based to
27
+ property-based text access in message objects. In LangChain <v1.0, message text was
28
+ accessed via ``.text()`` method calls. In v1.0=<, the preferred pattern is property
29
+ access via ``.text``.
30
+
31
+ Rather than breaking existing code immediately, ``TextAccessor`` allows both
32
+ patterns:
33
+ - Modern property access: ``message.text`` (returns string directly)
34
+ - Legacy method access: ``message.text()`` (callable, emits deprecation warning)
35
+
36
+ """
37
+
38
+ __slots__ = ()
39
+
40
+ def __new__(cls, value: str) -> Self:
41
+ """Create new TextAccessor instance."""
42
+ return str.__new__(cls, value)
43
+
44
+ def __call__(self) -> str:
45
+ """Enable method-style text access for backward compatibility.
46
+
47
+ This method exists solely to support legacy code that calls ``.text()``
48
+ as a method. New code should use property access (``.text``) instead.
49
+
50
+ .. deprecated:: 1.0.0
51
+ Calling ``.text()`` as a method is deprecated. Use ``.text`` as a property
52
+ instead. This method will be removed in 2.0.0.
53
+
54
+ Returns:
55
+ The string content, identical to property access.
56
+
57
+ """
58
+ warn_deprecated(
59
+ since="1.0.0",
60
+ message=(
61
+ "Calling .text() as a method is deprecated. "
62
+ "Use .text as a property instead (e.g., message.text)."
63
+ ),
64
+ removal="2.0.0",
65
+ )
66
+ return str(self)
67
+
68
+
20
69
  class BaseMessage(Serializable):
21
70
  """Base abstract message class.
22
71
 
@@ -61,15 +110,32 @@ class BaseMessage(Serializable):
61
110
  extra="allow",
62
111
  )
63
112
 
113
+ @overload
64
114
  def __init__(
65
- self, content: Union[str, list[Union[str, dict]]], **kwargs: Any
66
- ) -> None:
67
- """Pass in content as positional arg.
115
+ self,
116
+ content: Union[str, list[Union[str, dict]]],
117
+ **kwargs: Any,
118
+ ) -> None: ...
68
119
 
69
- Args:
70
- content: The string contents of the message.
71
- """
72
- super().__init__(content=content, **kwargs)
120
+ @overload
121
+ def __init__(
122
+ self,
123
+ content: Optional[Union[str, list[Union[str, dict]]]] = None,
124
+ content_blocks: Optional[list[types.ContentBlock]] = None,
125
+ **kwargs: Any,
126
+ ) -> None: ...
127
+
128
+ def __init__(
129
+ self,
130
+ content: Optional[Union[str, list[Union[str, dict]]]] = None,
131
+ content_blocks: Optional[list[types.ContentBlock]] = None,
132
+ **kwargs: Any,
133
+ ) -> None:
134
+ """Specify ``content`` as positional arg or ``content_blocks`` for typing."""
135
+ if content_blocks is not None:
136
+ super().__init__(content=content_blocks, **kwargs)
137
+ else:
138
+ super().__init__(content=content, **kwargs)
73
139
 
74
140
  @classmethod
75
141
  def is_lc_serializable(cls) -> bool:
@@ -88,25 +154,96 @@ class BaseMessage(Serializable):
88
154
  """
89
155
  return ["langchain", "schema", "messages"]
90
156
 
91
- def text(self) -> str:
92
- """Get the text content of the message.
157
+ @property
158
+ def content_blocks(self) -> list[types.ContentBlock]:
159
+ r"""Return ``content`` as a list of standardized :class:`~langchain_core.messages.content.ContentBlock`\s.
160
+
161
+ .. important::
162
+
163
+ To use this property correctly, the corresponding ``ChatModel`` must support
164
+ ``message_version='v1'`` or higher (and it must be set):
165
+
166
+ .. code-block:: python
167
+
168
+ from langchain.chat_models import init_chat_model
169
+ llm = init_chat_model("...", message_version="v1")
170
+
171
+ # or
172
+
173
+ from langchain-openai import ChatOpenAI
174
+ llm = ChatOpenAI(model="gpt-4o", message_version="v1")
175
+
176
+ Otherwise, the property will perform best-effort parsing to standard types,
177
+ though some content may be misinterpreted.
178
+
179
+ .. versionadded:: 1.0.0
180
+
181
+ """ # noqa: E501
182
+ from langchain_core.messages import content as types
183
+ from langchain_core.messages.block_translators.anthropic import (
184
+ _convert_to_v1_from_anthropic_input,
185
+ )
186
+ from langchain_core.messages.block_translators.langchain_v0 import (
187
+ _convert_v0_multimodal_input_to_v1,
188
+ )
189
+ from langchain_core.messages.block_translators.openai import (
190
+ _convert_to_v1_from_chat_completions_input,
191
+ )
192
+
193
+ blocks: list[types.ContentBlock] = []
194
+
195
+ # First pass: convert to standard blocks
196
+ content = (
197
+ [self.content]
198
+ if isinstance(self.content, str) and self.content
199
+ else self.content
200
+ )
201
+ for item in content:
202
+ if isinstance(item, str):
203
+ blocks.append({"type": "text", "text": item})
204
+ elif isinstance(item, dict):
205
+ item_type = item.get("type")
206
+ if item_type not in types.KNOWN_BLOCK_TYPES:
207
+ blocks.append({"type": "non_standard", "value": item})
208
+ else:
209
+ blocks.append(cast("types.ContentBlock", item))
210
+
211
+ # Subsequent passes: attempt to unpack non-standard blocks
212
+ for parsing_step in [
213
+ _convert_v0_multimodal_input_to_v1,
214
+ _convert_to_v1_from_chat_completions_input,
215
+ _convert_to_v1_from_anthropic_input,
216
+ ]:
217
+ blocks = parsing_step(blocks)
218
+ return blocks
219
+
220
+ @property
221
+ def text(self) -> TextAccessor:
222
+ """Get the text content of the message as a string.
223
+
224
+ Can be used as both property (``message.text``) and method (``message.text()``).
225
+
226
+ .. deprecated:: 1.0.0
227
+ Calling ``.text()`` as a method is deprecated. Use ``.text`` as a property
228
+ instead. This method will be removed in 2.0.0.
93
229
 
94
230
  Returns:
95
231
  The text content of the message.
96
232
  """
97
233
  if isinstance(self.content, str):
98
- return self.content
99
-
100
- # must be a list
101
- blocks = [
102
- block
103
- for block in self.content
104
- if isinstance(block, str)
105
- or (block.get("type") == "text" and isinstance(block.get("text"), str))
106
- ]
107
- return "".join(
108
- block if isinstance(block, str) else block["text"] for block in blocks
109
- )
234
+ text_value = self.content
235
+ else:
236
+ # must be a list
237
+ blocks = [
238
+ block
239
+ for block in self.content
240
+ if isinstance(block, str)
241
+ or (block.get("type") == "text" and isinstance(block.get("text"), str))
242
+ ]
243
+ text_value = "".join(
244
+ block if isinstance(block, str) else block["text"] for block in blocks
245
+ )
246
+ return TextAccessor(text_value)
110
247
 
111
248
  def __add__(self, other: Any) -> ChatPromptTemplate:
112
249
  """Concatenate this message with another message."""
@@ -152,7 +289,9 @@ def merge_content(
152
289
  Returns:
153
290
  The merged content.
154
291
  """
155
- merged = first_content
292
+ merged: Union[str, list[Union[str, dict]]]
293
+ merged = "" if first_content is None else first_content
294
+
156
295
  for content in contents:
157
296
  # If current is a string
158
297
  if isinstance(merged, str):
@@ -173,8 +312,8 @@ def merge_content(
173
312
  # If second content is an empty string, treat as a no-op
174
313
  elif content == "":
175
314
  pass
176
- else:
177
- # Otherwise, add the second content as a new element of the list
315
+ # Otherwise, add the second content as a new element of the list
316
+ elif merged:
178
317
  merged.append(content)
179
318
  return merged
180
319
 
@@ -0,0 +1,89 @@
1
+ """Derivations of standard content blocks from provider content."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from typing import TYPE_CHECKING, Callable
6
+
7
+ if TYPE_CHECKING:
8
+ from langchain_core.messages import AIMessage, AIMessageChunk
9
+ from langchain_core.messages import content as types
10
+
11
+ # Provider to translator mapping
12
+ PROVIDER_TRANSLATORS: dict[str, dict[str, Callable[..., list[types.ContentBlock]]]] = {}
13
+
14
+
15
+ def register_translator(
16
+ provider: str,
17
+ translate_content: Callable[[AIMessage], list[types.ContentBlock]],
18
+ translate_content_chunk: Callable[[AIMessageChunk], list[types.ContentBlock]],
19
+ ) -> None:
20
+ """Register content translators for a provider.
21
+
22
+ Args:
23
+ provider: The model provider name (e.g. ``'openai'``, ``'anthropic'``).
24
+ translate_content: Function to translate ``AIMessage`` content.
25
+ translate_content_chunk: Function to translate ``AIMessageChunk`` content.
26
+ """
27
+ PROVIDER_TRANSLATORS[provider] = {
28
+ "translate_content": translate_content,
29
+ "translate_content_chunk": translate_content_chunk,
30
+ }
31
+
32
+
33
+ def get_translator(
34
+ provider: str,
35
+ ) -> dict[str, Callable[..., list[types.ContentBlock]]] | None:
36
+ """Get the translator functions for a provider.
37
+
38
+ Args:
39
+ provider: The model provider name.
40
+
41
+ Returns:
42
+ Dictionary with ``'translate_content'`` and ``'translate_content_chunk'``
43
+ functions, or None if no translator is registered for the provider.
44
+ """
45
+ return PROVIDER_TRANSLATORS.get(provider)
46
+
47
+
48
+ def _register_translators() -> None:
49
+ """Register all translators in langchain-core.
50
+
51
+ A unit test ensures all modules in ``block_translators`` are represented here.
52
+
53
+ For translators implemented outside langchain-core, they can be registered by
54
+ calling ``register_translator`` from within the integration package.
55
+ """
56
+ from langchain_core.messages.block_translators.anthropic import (
57
+ _register_anthropic_translator,
58
+ )
59
+ from langchain_core.messages.block_translators.bedrock import (
60
+ _register_bedrock_translator,
61
+ )
62
+ from langchain_core.messages.block_translators.bedrock_converse import (
63
+ _register_bedrock_converse_translator,
64
+ )
65
+ from langchain_core.messages.block_translators.google_genai import (
66
+ _register_google_genai_translator,
67
+ )
68
+ from langchain_core.messages.block_translators.google_vertexai import (
69
+ _register_google_vertexai_translator,
70
+ )
71
+ from langchain_core.messages.block_translators.groq import _register_groq_translator
72
+ from langchain_core.messages.block_translators.ollama import (
73
+ _register_ollama_translator,
74
+ )
75
+ from langchain_core.messages.block_translators.openai import (
76
+ _register_openai_translator,
77
+ )
78
+
79
+ _register_bedrock_translator()
80
+ _register_bedrock_converse_translator()
81
+ _register_anthropic_translator()
82
+ _register_google_genai_translator()
83
+ _register_google_vertexai_translator()
84
+ _register_groq_translator()
85
+ _register_ollama_translator()
86
+ _register_openai_translator()
87
+
88
+
89
+ _register_translators()