langchain-core 0.3.75__py3-none-any.whl → 1.0.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of langchain-core might be problematic. Click here for more details.

Files changed (32) hide show
  1. langchain_core/language_models/_utils.py +233 -68
  2. langchain_core/language_models/base.py +2 -1
  3. langchain_core/language_models/chat_models.py +196 -33
  4. langchain_core/language_models/fake_chat_models.py +22 -6
  5. langchain_core/messages/__init__.py +74 -4
  6. langchain_core/messages/ai.py +191 -26
  7. langchain_core/messages/base.py +164 -25
  8. langchain_core/messages/block_translators/__init__.py +89 -0
  9. langchain_core/messages/block_translators/anthropic.py +451 -0
  10. langchain_core/messages/block_translators/bedrock.py +45 -0
  11. langchain_core/messages/block_translators/bedrock_converse.py +47 -0
  12. langchain_core/messages/block_translators/google_genai.py +45 -0
  13. langchain_core/messages/block_translators/google_vertexai.py +47 -0
  14. langchain_core/messages/block_translators/groq.py +45 -0
  15. langchain_core/messages/block_translators/langchain_v0.py +297 -0
  16. langchain_core/messages/block_translators/ollama.py +45 -0
  17. langchain_core/messages/block_translators/openai.py +586 -0
  18. langchain_core/messages/content.py +1568 -0
  19. langchain_core/messages/human.py +29 -9
  20. langchain_core/messages/system.py +29 -9
  21. langchain_core/messages/tool.py +30 -27
  22. langchain_core/messages/utils.py +12 -5
  23. langchain_core/prompt_values.py +1 -1
  24. langchain_core/runnables/base.py +1 -1
  25. langchain_core/utils/_merge.py +44 -6
  26. langchain_core/utils/utils.py +29 -0
  27. langchain_core/version.py +1 -1
  28. {langchain_core-0.3.75.dist-info → langchain_core-1.0.0a1.dist-info}/METADATA +2 -2
  29. {langchain_core-0.3.75.dist-info → langchain_core-1.0.0a1.dist-info}/RECORD +31 -21
  30. langchain_core/messages/content_blocks.py +0 -155
  31. {langchain_core-0.3.75.dist-info → langchain_core-1.0.0a1.dist-info}/WHEEL +0 -0
  32. {langchain_core-0.3.75.dist-info → langchain_core-1.0.0a1.dist-info}/entry_points.txt +0 -0
@@ -1,7 +1,8 @@
1
1
  """Human message."""
2
2
 
3
- from typing import Any, Literal, Union
3
+ from typing import Any, Literal, Optional, Union, cast, overload
4
4
 
5
+ from langchain_core.messages import content as types
5
6
  from langchain_core.messages.base import BaseMessage, BaseMessageChunk
6
7
 
7
8
 
@@ -41,16 +42,35 @@ class HumanMessage(BaseMessage):
41
42
  type: Literal["human"] = "human"
42
43
  """The type of the message (used for serialization). Defaults to "human"."""
43
44
 
45
+ @overload
44
46
  def __init__(
45
- self, content: Union[str, list[Union[str, dict]]], **kwargs: Any
46
- ) -> None:
47
- """Pass in content as positional arg.
47
+ self,
48
+ content: Union[str, list[Union[str, dict]]],
49
+ **kwargs: Any,
50
+ ) -> None: ...
51
+
52
+ @overload
53
+ def __init__(
54
+ self,
55
+ content: Optional[Union[str, list[Union[str, dict]]]] = None,
56
+ content_blocks: Optional[list[types.ContentBlock]] = None,
57
+ **kwargs: Any,
58
+ ) -> None: ...
48
59
 
49
- Args:
50
- content: The string contents of the message.
51
- kwargs: Additional fields to pass to the message.
52
- """
53
- super().__init__(content=content, **kwargs)
60
+ def __init__(
61
+ self,
62
+ content: Optional[Union[str, list[Union[str, dict]]]] = None,
63
+ content_blocks: Optional[list[types.ContentBlock]] = None,
64
+ **kwargs: Any,
65
+ ) -> None:
66
+ """Specify ``content`` as positional arg or ``content_blocks`` for typing."""
67
+ if content_blocks is not None:
68
+ super().__init__(
69
+ content=cast("Union[str, list[Union[str, dict]]]", content_blocks),
70
+ **kwargs,
71
+ )
72
+ else:
73
+ super().__init__(content=content, **kwargs)
54
74
 
55
75
 
56
76
  class HumanMessageChunk(HumanMessage, BaseMessageChunk):
@@ -1,7 +1,8 @@
1
1
  """System message."""
2
2
 
3
- from typing import Any, Literal, Union
3
+ from typing import Any, Literal, Optional, Union, cast, overload
4
4
 
5
+ from langchain_core.messages import content as types
5
6
  from langchain_core.messages.base import BaseMessage, BaseMessageChunk
6
7
 
7
8
 
@@ -34,16 +35,35 @@ class SystemMessage(BaseMessage):
34
35
  type: Literal["system"] = "system"
35
36
  """The type of the message (used for serialization). Defaults to "system"."""
36
37
 
38
+ @overload
37
39
  def __init__(
38
- self, content: Union[str, list[Union[str, dict]]], **kwargs: Any
39
- ) -> None:
40
- """Pass in content as positional arg.
40
+ self,
41
+ content: Union[str, list[Union[str, dict]]],
42
+ **kwargs: Any,
43
+ ) -> None: ...
44
+
45
+ @overload
46
+ def __init__(
47
+ self,
48
+ content: Optional[Union[str, list[Union[str, dict]]]] = None,
49
+ content_blocks: Optional[list[types.ContentBlock]] = None,
50
+ **kwargs: Any,
51
+ ) -> None: ...
41
52
 
42
- Args:
43
- content: The string contents of the message.
44
- kwargs: Additional fields to pass to the message.
45
- """
46
- super().__init__(content=content, **kwargs)
53
+ def __init__(
54
+ self,
55
+ content: Optional[Union[str, list[Union[str, dict]]]] = None,
56
+ content_blocks: Optional[list[types.ContentBlock]] = None,
57
+ **kwargs: Any,
58
+ ) -> None:
59
+ """Specify ``content`` as positional arg or ``content_blocks`` for typing."""
60
+ if content_blocks is not None:
61
+ super().__init__(
62
+ content=cast("Union[str, list[Union[str, dict]]]", content_blocks),
63
+ **kwargs,
64
+ )
65
+ else:
66
+ super().__init__(content=content, **kwargs)
47
67
 
48
68
 
49
69
  class SystemMessageChunk(SystemMessage, BaseMessageChunk):
@@ -1,13 +1,15 @@
1
1
  """Messages for tools."""
2
2
 
3
3
  import json
4
- from typing import Any, Literal, Optional, Union
4
+ from typing import Any, Literal, Optional, Union, cast, overload
5
5
  from uuid import UUID
6
6
 
7
7
  from pydantic import Field, model_validator
8
8
  from typing_extensions import NotRequired, TypedDict, override
9
9
 
10
+ from langchain_core.messages import content as types
10
11
  from langchain_core.messages.base import BaseMessage, BaseMessageChunk, merge_content
12
+ from langchain_core.messages.content import InvalidToolCall as InvalidToolCall
11
13
  from langchain_core.utils._merge import merge_dicts, merge_obj
12
14
 
13
15
 
@@ -133,16 +135,35 @@ class ToolMessage(BaseMessage, ToolOutputMixin):
133
135
  values["tool_call_id"] = str(tool_call_id)
134
136
  return values
135
137
 
138
+ @overload
136
139
  def __init__(
137
- self, content: Union[str, list[Union[str, dict]]], **kwargs: Any
138
- ) -> None:
139
- """Create a ToolMessage.
140
+ self,
141
+ content: Union[str, list[Union[str, dict]]],
142
+ **kwargs: Any,
143
+ ) -> None: ...
140
144
 
141
- Args:
142
- content: The string contents of the message.
143
- **kwargs: Additional fields.
144
- """
145
- super().__init__(content=content, **kwargs)
145
+ @overload
146
+ def __init__(
147
+ self,
148
+ content: Optional[Union[str, list[Union[str, dict]]]] = None,
149
+ content_blocks: Optional[list[types.ContentBlock]] = None,
150
+ **kwargs: Any,
151
+ ) -> None: ...
152
+
153
+ def __init__(
154
+ self,
155
+ content: Optional[Union[str, list[Union[str, dict]]]] = None,
156
+ content_blocks: Optional[list[types.ContentBlock]] = None,
157
+ **kwargs: Any,
158
+ ) -> None:
159
+ """Specify ``content`` as positional arg or ``content_blocks`` for typing."""
160
+ if content_blocks is not None:
161
+ super().__init__(
162
+ content=cast("Union[str, list[Union[str, dict]]]", content_blocks),
163
+ **kwargs,
164
+ )
165
+ else:
166
+ super().__init__(content=content, **kwargs)
146
167
 
147
168
 
148
169
  class ToolMessageChunk(ToolMessage, BaseMessageChunk):
@@ -276,24 +297,6 @@ def tool_call_chunk(
276
297
  )
277
298
 
278
299
 
279
- class InvalidToolCall(TypedDict):
280
- """Allowance for errors made by LLM.
281
-
282
- Here we add an `error` key to surface errors made during generation
283
- (e.g., invalid JSON arguments.)
284
- """
285
-
286
- name: Optional[str]
287
- """The name of the tool to be called."""
288
- args: Optional[str]
289
- """The arguments to the tool call."""
290
- id: Optional[str]
291
- """An identifier associated with the tool call."""
292
- error: Optional[str]
293
- """An error message associated with the tool call."""
294
- type: NotRequired[Literal["invalid_tool_call"]]
295
-
296
-
297
300
  def invalid_tool_call(
298
301
  *,
299
302
  name: Optional[str] = None,
@@ -31,10 +31,13 @@ from typing import (
31
31
  from pydantic import Discriminator, Field, Tag
32
32
 
33
33
  from langchain_core.exceptions import ErrorCode, create_message
34
- from langchain_core.messages import convert_to_openai_data_block, is_data_content_block
35
34
  from langchain_core.messages.ai import AIMessage, AIMessageChunk
36
35
  from langchain_core.messages.base import BaseMessage, BaseMessageChunk
37
36
  from langchain_core.messages.chat import ChatMessage, ChatMessageChunk
37
+ from langchain_core.messages.content import (
38
+ convert_to_openai_data_block,
39
+ is_data_content_block,
40
+ )
38
41
  from langchain_core.messages.function import FunctionMessage, FunctionMessageChunk
39
42
  from langchain_core.messages.human import HumanMessage, HumanMessageChunk
40
43
  from langchain_core.messages.modifier import RemoveMessage
@@ -130,7 +133,7 @@ def get_buffer_string(
130
133
  else:
131
134
  msg = f"Got unsupported message type: {m}"
132
135
  raise ValueError(msg) # noqa: TRY004
133
- message = f"{role}: {m.text()}"
136
+ message = f"{role}: {m.text}"
134
137
  if isinstance(m, AIMessage) and "function_call" in m.additional_kwargs:
135
138
  message += f"{m.additional_kwargs['function_call']}"
136
139
  string_messages.append(message)
@@ -196,7 +199,7 @@ def message_chunk_to_message(chunk: BaseMessageChunk) -> BaseMessage:
196
199
  # chunk classes always have the equivalent non-chunk class as their first parent
197
200
  ignore_keys = ["type"]
198
201
  if isinstance(chunk, AIMessageChunk):
199
- ignore_keys.append("tool_call_chunks")
202
+ ignore_keys.extend(["tool_call_chunks", "chunk_position"])
200
203
  return chunk.__class__.__mro__[1](
201
204
  **{k: v for k, v in chunk.__dict__.items() if k not in ignore_keys}
202
205
  )
@@ -1498,11 +1501,15 @@ def _msg_to_chunk(message: BaseMessage) -> BaseMessageChunk:
1498
1501
  def _chunk_to_msg(chunk: BaseMessageChunk) -> BaseMessage:
1499
1502
  if chunk.__class__ in _CHUNK_MSG_MAP:
1500
1503
  return _CHUNK_MSG_MAP[chunk.__class__](
1501
- **chunk.model_dump(exclude={"type", "tool_call_chunks"})
1504
+ **chunk.model_dump(exclude={"type", "tool_call_chunks", "chunk_position"})
1502
1505
  )
1503
1506
  for chunk_cls, msg_cls in _CHUNK_MSG_MAP.items():
1504
1507
  if isinstance(chunk, chunk_cls):
1505
- return msg_cls(**chunk.model_dump(exclude={"type", "tool_call_chunks"}))
1508
+ return msg_cls(
1509
+ **chunk.model_dump(
1510
+ exclude={"type", "tool_call_chunks", "chunk_position"}
1511
+ )
1512
+ )
1506
1513
 
1507
1514
  msg = (
1508
1515
  f"Unrecognized message chunk class {chunk.__class__}. Supported classes are "
@@ -123,7 +123,7 @@ class ImagePromptValue(PromptValue):
123
123
 
124
124
  def to_string(self) -> str:
125
125
  """Return prompt (image URL) as string."""
126
- return self.image_url["url"]
126
+ return self.image_url.get("url", "")
127
127
 
128
128
  def to_messages(self) -> list[BaseMessage]:
129
129
  """Return prompt (image URL) as messages."""
@@ -2399,7 +2399,7 @@ class Runnable(ABC, Generic[Input, Output]):
2399
2399
  description: The description of the tool. Defaults to None.
2400
2400
  arg_types: A dictionary of argument names to types. Defaults to None.
2401
2401
  message_version: Version of ``ToolMessage`` to return given
2402
- :class:`~langchain_core.messages.content_blocks.ToolCall` input.
2402
+ :class:`~langchain_core.messages.content.ToolCall` input.
2403
2403
 
2404
2404
  Returns:
2405
2405
  A ``BaseTool`` instance.
@@ -57,6 +57,11 @@ def merge_dicts(left: dict[str, Any], *others: dict[str, Any]) -> dict[str, Any]
57
57
  # "should either occur once or have the same value across "
58
58
  # "all dicts."
59
59
  # )
60
+ if (right_k == "index" and merged[right_k].startswith("lc_")) or (
61
+ right_k in ("id", "output_version", "model_provider")
62
+ and merged[right_k] == right_v
63
+ ):
64
+ continue
60
65
  merged[right_k] += right_v
61
66
  elif isinstance(merged[right_k], dict):
62
67
  merged[right_k] = merge_dicts(merged[right_k], right_v)
@@ -93,7 +98,16 @@ def merge_lists(left: Optional[list], *others: Optional[list]) -> Optional[list]
93
98
  merged = other.copy()
94
99
  else:
95
100
  for e in other:
96
- if isinstance(e, dict) and "index" in e and isinstance(e["index"], int):
101
+ if (
102
+ isinstance(e, dict)
103
+ and "index" in e
104
+ and (
105
+ isinstance(e["index"], int)
106
+ or (
107
+ isinstance(e["index"], str) and e["index"].startswith("lc_")
108
+ )
109
+ )
110
+ ):
97
111
  to_merge = [
98
112
  i
99
113
  for i, e_left in enumerate(merged)
@@ -102,11 +116,35 @@ def merge_lists(left: Optional[list], *others: Optional[list]) -> Optional[list]
102
116
  if to_merge:
103
117
  # TODO: Remove this once merge_dict is updated with special
104
118
  # handling for 'type'.
105
- new_e = (
106
- {k: v for k, v in e.items() if k != "type"}
107
- if "type" in e
108
- else e
109
- )
119
+ if (left_type := merged[to_merge[0]].get("type")) and (
120
+ e.get("type") == "non_standard" and "value" in e
121
+ ):
122
+ if left_type != "non_standard":
123
+ # standard + non_standard
124
+ new_e: dict[str, Any] = {
125
+ "extras": {
126
+ k: v
127
+ for k, v in e["value"].items()
128
+ if k != "type"
129
+ }
130
+ }
131
+ else:
132
+ # non_standard + non_standard
133
+ new_e = {
134
+ "value": {
135
+ k: v
136
+ for k, v in e["value"].items()
137
+ if k != "type"
138
+ }
139
+ }
140
+ if "index" in e:
141
+ new_e["index"] = e["index"]
142
+ else:
143
+ new_e = (
144
+ {k: v for k, v in e.items() if k != "type"}
145
+ if "type" in e
146
+ else e
147
+ )
110
148
  merged[to_merge[0]] = merge_dicts(merged[to_merge[0]], new_e)
111
149
  else:
112
150
  merged.append(e)
@@ -9,6 +9,7 @@ import warnings
9
9
  from collections.abc import Iterator, Sequence
10
10
  from importlib.metadata import version
11
11
  from typing import Any, Callable, Optional, Union, overload
12
+ from uuid import uuid4
12
13
 
13
14
  from packaging.version import parse
14
15
  from pydantic import SecretStr
@@ -466,3 +467,31 @@ def secret_from_env(
466
467
  raise ValueError(msg)
467
468
 
468
469
  return get_secret_from_env
470
+
471
+
472
+ LC_AUTO_PREFIX = "lc_"
473
+ """LangChain auto-generated ID prefix for messages and content blocks."""
474
+
475
+ LC_ID_PREFIX = "lc_run-"
476
+ """Internal tracing/callback system identifier.
477
+
478
+ Used for:
479
+ - Tracing. Every LangChain operation (LLM call, chain execution, tool use, etc.)
480
+ gets a unique run_id (UUID)
481
+ - Enables tracking parent-child relationships between operations
482
+ """
483
+
484
+
485
+ def ensure_id(id_val: Optional[str]) -> str:
486
+ """Ensure the ID is a valid string, generating a new UUID if not provided.
487
+
488
+ Auto-generated UUIDs are prefixed by ``'lc_'`` to indicate they are
489
+ LangChain-generated IDs.
490
+
491
+ Args:
492
+ id_val: Optional string ID value to validate.
493
+
494
+ Returns:
495
+ A string ID, either the validated provided value or a newly generated UUID4.
496
+ """
497
+ return id_val or str(f"{LC_AUTO_PREFIX}{uuid4()}")
langchain_core/version.py CHANGED
@@ -1,3 +1,3 @@
1
1
  """langchain-core version information and utilities."""
2
2
 
3
- VERSION = "0.3.75"
3
+ VERSION = "1.0.0a1"
@@ -1,12 +1,12 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: langchain-core
3
- Version: 0.3.75
3
+ Version: 1.0.0a1
4
4
  Summary: Building applications with LLMs through composability
5
5
  License: MIT
6
6
  Project-URL: Source Code, https://github.com/langchain-ai/langchain/tree/master/libs/core
7
7
  Project-URL: Release Notes, https://github.com/langchain-ai/langchain/releases?q=tag%3A%22langchain-core%3D%3D0%22&expanded=true
8
8
  Project-URL: repository, https://github.com/langchain-ai/langchain
9
- Requires-Python: >=3.9
9
+ Requires-Python: >=3.10
10
10
  Requires-Dist: langsmith>=0.3.45
11
11
  Requires-Dist: tenacity!=8.4.0,<10.0.0,>=8.1.0
12
12
  Requires-Dist: jsonpatch<2.0,>=1.33
@@ -1,6 +1,6 @@
1
- langchain_core-0.3.75.dist-info/METADATA,sha256=4bfp0MxOsonXbrNYiFGaMFugFDR5JPE6ov0RoHPgtvY,5714
2
- langchain_core-0.3.75.dist-info/WHEEL,sha256=9P2ygRxDrTJz3gsagc0Z96ukrxjr-LFBGOgv3AuKlCA,90
3
- langchain_core-0.3.75.dist-info/entry_points.txt,sha256=6OYgBcLyFCUgeqLgnvMyOJxPCWzgy7se4rLPKtNonMs,34
1
+ langchain_core-1.0.0a1.dist-info/METADATA,sha256=SQGbriHdvIcGvwox-0Y4trQZu8CouYfDZw8QlxCGFgg,5716
2
+ langchain_core-1.0.0a1.dist-info/WHEEL,sha256=9P2ygRxDrTJz3gsagc0Z96ukrxjr-LFBGOgv3AuKlCA,90
3
+ langchain_core-1.0.0a1.dist-info/entry_points.txt,sha256=6OYgBcLyFCUgeqLgnvMyOJxPCWzgy7se4rLPKtNonMs,34
4
4
  langchain_core/__init__.py,sha256=TgvhxbrjCRVJwr2HddiyHvtH8W94K-uLM6-6ifNIBXo,713
5
5
  langchain_core/_api/__init__.py,sha256=WDOMw4faVuscjDCL5ttnRQNienJP_M9vGMmJUXS6L5w,1976
6
6
  langchain_core/_api/beta_decorator.py,sha256=uN-N3vGj7-56mNbXw-eh7I-Cvgrt4V4YOoz-7jLQl1Y,9908
@@ -46,11 +46,11 @@ langchain_core/indexing/api.py,sha256=z620e6bVNUKH_2bbVGrSqQiMVfVAJQl_iyQCSBiMmY
46
46
  langchain_core/indexing/base.py,sha256=NGqIzktMBlzqK_AN3xF42DC9tP6uM0EJzBr-rJnHDu8,23298
47
47
  langchain_core/indexing/in_memory.py,sha256=-qyKjAWJFWxtH_MbUu3JJct0x3R_pbHyHuxA4Cra1nA,2709
48
48
  langchain_core/language_models/__init__.py,sha256=j6OXr7CriShFr7BYfCWZ2kOTEZpzvlE7dNDTab75prg,3778
49
- langchain_core/language_models/_utils.py,sha256=4TS92kBO5ee4QNH68FFWhX-2uCTe8QaxTXVFMiJLXt4,4786
50
- langchain_core/language_models/base.py,sha256=B5mfSVqbzVBg7lkOJIsLOHNcbgbSR24fLwGAAlPO8Xg,14473
51
- langchain_core/language_models/chat_models.py,sha256=AGYgVT45BRmbia0bq1j507OHCzWMcpgF4pcfWs4TnXQ,70753
49
+ langchain_core/language_models/_utils.py,sha256=SNhec_vW0RLE5SXHvF4npT8vGL-2GPDY8EPqcAVvY_c,10111
50
+ langchain_core/language_models/base.py,sha256=dwn3uiZsvI0zUqALQ0OmpIUFHe3YmOhRI2pEc9qQxmY,14486
51
+ langchain_core/language_models/chat_models.py,sha256=FRYgoRdzZsh0V2iwhO-p4hhCaNlwLRBL-vk-rziymXs,77482
52
52
  langchain_core/language_models/fake.py,sha256=h9LhVTkmYLXkJ1_VvsKhqYVpkQsM7eAr9geXF_IVkPs,3772
53
- langchain_core/language_models/fake_chat_models.py,sha256=pmeGehdqLJFmwHWK5Ppn0v25_YLwckJgc4hQlm-YU5I,13012
53
+ langchain_core/language_models/fake_chat_models.py,sha256=rojzv3arvsT0x2RLVJfSpaZ_zQLo18EM5MUnmwktpN4,13690
54
54
  langchain_core/language_models/llms.py,sha256=jzhJ1v4tGuuuD9lFZEYfesoE3WhtRNIWI6XKgQjPooc,56803
55
55
  langchain_core/load/__init__.py,sha256=m3_6Fk2gpYZO0xqyTnZzdQigvsYHjMariLq_L2KwJFk,1150
56
56
  langchain_core/load/dump.py,sha256=BIUX32uZUH488YUTaparyvMEAr-Y18NmwM9uyR_HaC4,2663
@@ -58,17 +58,27 @@ langchain_core/load/load.py,sha256=8Jq62M9QYcW78Iv3J_EKQG6OIAsbthudMM60gqyUjFg,9
58
58
  langchain_core/load/mapping.py,sha256=nnFXiTdQkfdv41_wP38aWGtpp9svxW6fwVyC3LmRkok,29633
59
59
  langchain_core/load/serializable.py,sha256=JIM8GTYYLXBTrRn9zal1tMJOP4z5vs-Hi-aAov6JYtY,11684
60
60
  langchain_core/memory.py,sha256=V-ARgyy5_xgvoBfp4WArMrk6NdbBjbqXdGDIbSTI_tA,3631
61
- langchain_core/messages/__init__.py,sha256=8H1BnLGi2oSXdIz_LWtVAwmxFvK_6_CqiDRq2jnGtw0,4253
62
- langchain_core/messages/ai.py,sha256=Bn9PMJZjcZrCFVSZDK8ml-zf8TIMVIwhzszxcJ1Vprk,17993
63
- langchain_core/messages/base.py,sha256=Rx1BIcDmZSokWltmhhzA1V7jiQb8xYwyAeZw9lvvlNU,9392
61
+ langchain_core/messages/__init__.py,sha256=8HJOtGK8pSpql4w-q1xA2a0Fc7KYTkMZMiARuojoVaE,6290
62
+ langchain_core/messages/ai.py,sha256=2V3yqVK5dkeg_roprSWuK2ZWgzlnPVvRV0F98cpTP3Y,24579
63
+ langchain_core/messages/base.py,sha256=TWMnefCv8loiv5LnIBDSw6k_YHfO2uVCI6UWFg8Hpas,14591
64
+ langchain_core/messages/block_translators/__init__.py,sha256=Mo_pXM7a2DCKNvPgp1CStt4EOGHaiNMFNaKFIn01fcA,3102
65
+ langchain_core/messages/block_translators/anthropic.py,sha256=_oJjtSvjlIcTBD7wVSt6ltX6YGia5Tl5EaplrI68z9I,19187
66
+ langchain_core/messages/block_translators/bedrock.py,sha256=2w3W8ClFnWlxKwa6l6CAPUE0rHdutffMGgZ8EsOqB8c,1493
67
+ langchain_core/messages/block_translators/bedrock_converse.py,sha256=k13H9VvFPNqx60PQmoV8oergwLPqYBCKz9JXJrhrD3M,1604
68
+ langchain_core/messages/block_translators/google_genai.py,sha256=qsE1KH3byEBftE6BLcjZBta-WuSFVM8oFGC7k99A72Q,1537
69
+ langchain_core/messages/block_translators/google_vertexai.py,sha256=FXiJT2yIB64WZbxkho9k-qLPbcsDK74vwdFZgKik6QM,1594
70
+ langchain_core/messages/block_translators/groq.py,sha256=6r6j9Hy3HS2hOEAZ1V-_hYHGKNPk6KmRrCmbiTTDgrY,1465
71
+ langchain_core/messages/block_translators/langchain_v0.py,sha256=RvUfo27YkkqEMdj8ueWpg2XEj2gWnsx1yOD6aKh5qPg,11236
72
+ langchain_core/messages/block_translators/ollama.py,sha256=BuPm47bluNhKseHozaiBNi9He8I4V7Fn038luhBttcg,1483
73
+ langchain_core/messages/block_translators/openai.py,sha256=9Bvb_APKTPo9dfKd4DbT7ra9M5tqQIQYSE94zQOcM58,22431
64
74
  langchain_core/messages/chat.py,sha256=Vgk3y03F9NP-wKkXAjBDLOtrH43NpEMN2xaWRp6qhRA,2260
65
- langchain_core/messages/content_blocks.py,sha256=qs-3t-Xqpm34YmIaSXrCOItKKkAcgAR3Ha-HGvhF5d4,5026
75
+ langchain_core/messages/content.py,sha256=1PVxEsdfzpBxFdhz8p-E2zvOkcfBMUDrfrImq4SZmm0,47747
66
76
  langchain_core/messages/function.py,sha256=QO2WgKmJ5nm7QL-xXG11Fmz3qFkHm1lL0k41WjDeEZE,2157
67
- langchain_core/messages/human.py,sha256=SZt8B0MhGNlnkEE7tZUmH74xgNEwZlmxYL-9VCGA_uI,1929
77
+ langchain_core/messages/human.py,sha256=VB8sw1DaNmzrc77T9NYd1QWQYCman6GFOfrlmVBWZMU,2582
68
78
  langchain_core/messages/modifier.py,sha256=ch0RedUM_uA7wOEJHk8mkoJSNR0Rli_32BmOfdbS1dU,894
69
- langchain_core/messages/system.py,sha256=Zbb8zeezWs8SN6nOP-MjeBed5OtNetAsdGzf3lcl2Yc,1741
70
- langchain_core/messages/tool.py,sha256=OLOFxVlhWNciwoMH2DiDhVY-BUWM17iFHK0Sc7EHYsk,12192
71
- langchain_core/messages/utils.py,sha256=cDHeQt78RXAu9BQdnXN0ItkVje4IrKNK2ySByPWd4uE,67530
79
+ langchain_core/messages/system.py,sha256=FE2XZ7oHWqqIxOjEOMGEkMO97PqLXwVLa-jL5mvriGE,2388
80
+ langchain_core/messages/tool.py,sha256=5uZsdZg-FbYc5U3v71TaxtrDQf6sT895LHNXZh-CRJ8,12406
81
+ langchain_core/messages/utils.py,sha256=ZLG9I0fJbIA3HjtGk05j-ahLJpBPF8Hqhev2pFpVAb8,67673
72
82
  langchain_core/output_parsers/__init__.py,sha256=R8L0GwY-vD9qvqze3EVELXF6i45IYUJ_FbSfno_IREg,2873
73
83
  langchain_core/output_parsers/base.py,sha256=RD0BgBBeNKDUTrEGxnLmA1DuCJowcEAfTB70Y8yqVoc,11168
74
84
  langchain_core/output_parsers/format_instructions.py,sha256=8oUbeysnVGvXWyNd5gqXlEL850D31gMTy74GflsuvRU,553
@@ -86,7 +96,7 @@ langchain_core/outputs/chat_result.py,sha256=us15wVh00AYkIVNmf0VETEI9aoEQy-cT-SI
86
96
  langchain_core/outputs/generation.py,sha256=gZRSOwdA8A4T-isxt80LasjnCKfqGbOB7zLKrpPUmkw,2376
87
97
  langchain_core/outputs/llm_result.py,sha256=Qx9UlBri8Qi2Zk5HyuC8oh2-urnzkTUcrDenengbg9Y,3738
88
98
  langchain_core/outputs/run_info.py,sha256=xCMWdsHfgnnodaf4OCMvZaWUfS836X7mV15JPkqvZjo,594
89
- langchain_core/prompt_values.py,sha256=HuG3X7gIYRXfFwpdOYnwksJM-OmcdAFchjoln1nXSg0,4002
99
+ langchain_core/prompt_values.py,sha256=10UuMgiDwKkxcMBnJjHCbiWR5qOL1DQUDoEC4O-vsGA,4010
90
100
  langchain_core/prompts/__init__.py,sha256=sp3NU858CEf4YUuDYiY_-iF1x1Gb5msSyoyrk2FUI94,4123
91
101
  langchain_core/prompts/base.py,sha256=5VgLQTUBJeWjNw9_J0Ltg8L3OqbMM3OSAJ3OHlwgGBc,16092
92
102
  langchain_core/prompts/chat.py,sha256=yi0g8W6io9C8ps2es3aEscLYFLj0pfZNXcsFkZf0oEY,51891
@@ -107,7 +117,7 @@ langchain_core/pydantic_v1/main.py,sha256=uTB_757DTfo-mFKJUn_a4qS_GxmSxlqYmL2WOC
107
117
  langchain_core/rate_limiters.py,sha256=84aDspeSNpakjUZtZQGPBGjM9-w77EodI4PNh7C8fDA,9565
108
118
  langchain_core/retrievers.py,sha256=vRVCi8tuBBTswIyKykuRA0EfAjTf4P8skgC5jPjS_p8,16738
109
119
  langchain_core/runnables/__init__.py,sha256=efTnFjwN_QSAv5ThLmKuWeu8P1BLARH-cWKZBuimfDM,3858
110
- langchain_core/runnables/base.py,sha256=ku26cx7R0eKBtyqpm-nV5RiKtXdcG8-Im_zzaKBCub4,224003
120
+ langchain_core/runnables/base.py,sha256=m_kLzmH-RO0s_Ko870E3sO_iw20jSoxvAOlgxvMMhlg,223996
111
121
  langchain_core/runnables/branch.py,sha256=cSJEAjM1r5Voprs6M4Cnv3Hx5pRjvHrMViiRNBnkLj4,16532
112
122
  langchain_core/runnables/config.py,sha256=c3E_CgGxTYS46ogV2EN0Lt5gribAMwHmQYBuDCYelEo,20428
113
123
  langchain_core/runnables/configurable.py,sha256=I9oRM6f3CQ3AWriWM-q4UcS5DyUn1CEIO4LUHmubt_0,24371
@@ -148,7 +158,7 @@ langchain_core/tracers/run_collector.py,sha256=Tnnz5sfKkUI6Rapj8mGjScYGkyEKRyicW
148
158
  langchain_core/tracers/schemas.py,sha256=2gDs-9zloHTjIrMfuWsr9w9cRdZ6ZMMD_h5hCRH6xHw,3768
149
159
  langchain_core/tracers/stdout.py,sha256=aZN-yz545zj34kYfrEmYzWeSz83pbqN8wNqi-ZvS1Iw,6732
150
160
  langchain_core/utils/__init__.py,sha256=N0ZeV09FHvZIVITLJlqGibb0JNtmmLvvoareFtG0DuI,3169
151
- langchain_core/utils/_merge.py,sha256=uo_n2mJ0_FuRJZUUgJemsXQ8rAC9fyYGOMmnPfbbDUg,5785
161
+ langchain_core/utils/_merge.py,sha256=GxSCcGqVhYvx58B6QZFu0SA4B0JCfHLUwYDRX34GBlA,7555
152
162
  langchain_core/utils/aiter.py,sha256=wAW_a_5Lhpf8l1-iUpWHIAnyK3u-FREBvavjT83MPAM,10767
153
163
  langchain_core/utils/env.py,sha256=swKMUVFS-Jr_9KK2ToWam6qd9lt73Pz4RtRqwcaiFQw,2464
154
164
  langchain_core/utils/formatting.py,sha256=fkieArzKXxSsLcEa3B-MX60O4ZLeeLjiPtVtxCJPcOU,1480
@@ -165,10 +175,10 @@ langchain_core/utils/mustache.py,sha256=K_EnRcbYQMjQ-95-fP5G9rB2rCbpgcr1yn5QF6-T
165
175
  langchain_core/utils/pydantic.py,sha256=UFuDwQpGMZ95YFfb2coPMXva48sWn-ytQQhnqdy1ExM,17987
166
176
  langchain_core/utils/strings.py,sha256=0LaQiqpshHwMrWBGvNfFPc-AxihLGMM9vsQcSx3uAkI,1804
167
177
  langchain_core/utils/usage.py,sha256=EYv0poDqA7VejEsPyoA19lEt9M4L24Tppf4OPtOjGwI,1202
168
- langchain_core/utils/utils.py,sha256=RK9JRNsdb4mXu1XYuJFuvDqyglSpnr6ak0vb0ELc7Eo,15043
178
+ langchain_core/utils/utils.py,sha256=9QpXTmA0beoaK2pvGYXvuKe3Z-c6WH6nBJuVPCfn3OA,15885
169
179
  langchain_core/vectorstores/__init__.py,sha256=5P0eoeoH5LHab64JjmEeWa6SxX4eMy-etAP1MEHsETY,804
170
180
  langchain_core/vectorstores/base.py,sha256=4AR5L6RWuAPEo9DQj9pOIN7UR3Ln45s02pU_Oe8sYsI,42026
171
181
  langchain_core/vectorstores/in_memory.py,sha256=lxe2bR-wFtvNN2Ii7EGOh3ON3MwqNRP996eUEek55fA,18076
172
182
  langchain_core/vectorstores/utils.py,sha256=DZUUR1xDybHDhmZJsd1V2OEPsYiFVc2nhtD4w8hw9ns,4934
173
- langchain_core/version.py,sha256=1lRxq37KIZU8Z7Y0qqWpcZYOPn7L6khPalFZl1u7AvU,76
174
- langchain_core-0.3.75.dist-info/RECORD,,
183
+ langchain_core/version.py,sha256=HfC-RSEPgK8xUsSnhsgkQrAYtIg4me_JR67qwEwqo-0,77
184
+ langchain_core-1.0.0a1.dist-info/RECORD,,
@@ -1,155 +0,0 @@
1
- """Types for content blocks."""
2
-
3
- import warnings
4
- from typing import Any, Literal, Union
5
-
6
- from pydantic import TypeAdapter, ValidationError
7
- from typing_extensions import NotRequired, TypedDict
8
-
9
-
10
- class BaseDataContentBlock(TypedDict, total=False):
11
- """Base class for data content blocks."""
12
-
13
- mime_type: NotRequired[str]
14
- """MIME type of the content block (if needed)."""
15
-
16
-
17
- class URLContentBlock(BaseDataContentBlock):
18
- """Content block for data from a URL."""
19
-
20
- type: Literal["image", "audio", "file"]
21
- """Type of the content block."""
22
- source_type: Literal["url"]
23
- """Source type (url)."""
24
- url: str
25
- """URL for data."""
26
-
27
-
28
- class Base64ContentBlock(BaseDataContentBlock):
29
- """Content block for inline data from a base64 string."""
30
-
31
- type: Literal["image", "audio", "file"]
32
- """Type of the content block."""
33
- source_type: Literal["base64"]
34
- """Source type (base64)."""
35
- data: str
36
- """Data as a base64 string."""
37
-
38
-
39
- class PlainTextContentBlock(BaseDataContentBlock):
40
- """Content block for plain text data (e.g., from a document)."""
41
-
42
- type: Literal["file"]
43
- """Type of the content block."""
44
- source_type: Literal["text"]
45
- """Source type (text)."""
46
- text: str
47
- """Text data."""
48
-
49
-
50
- class IDContentBlock(TypedDict):
51
- """Content block for data specified by an identifier."""
52
-
53
- type: Literal["image", "audio", "file"]
54
- """Type of the content block."""
55
- source_type: Literal["id"]
56
- """Source type (id)."""
57
- id: str
58
- """Identifier for data source."""
59
-
60
-
61
- DataContentBlock = Union[
62
- URLContentBlock,
63
- Base64ContentBlock,
64
- PlainTextContentBlock,
65
- IDContentBlock,
66
- ]
67
-
68
- _DataContentBlockAdapter: TypeAdapter[DataContentBlock] = TypeAdapter(DataContentBlock)
69
-
70
-
71
- def is_data_content_block(
72
- content_block: dict,
73
- ) -> bool:
74
- """Check if the content block is a standard data content block.
75
-
76
- Args:
77
- content_block: The content block to check.
78
-
79
- Returns:
80
- True if the content block is a data content block, False otherwise.
81
- """
82
- try:
83
- _ = _DataContentBlockAdapter.validate_python(content_block)
84
- except ValidationError:
85
- return False
86
- else:
87
- return True
88
-
89
-
90
- def convert_to_openai_image_block(content_block: dict[str, Any]) -> dict:
91
- """Convert image content block to format expected by OpenAI Chat Completions API."""
92
- if content_block["source_type"] == "url":
93
- return {
94
- "type": "image_url",
95
- "image_url": {
96
- "url": content_block["url"],
97
- },
98
- }
99
- if content_block["source_type"] == "base64":
100
- if "mime_type" not in content_block:
101
- error_message = "mime_type key is required for base64 data."
102
- raise ValueError(error_message)
103
- mime_type = content_block["mime_type"]
104
- return {
105
- "type": "image_url",
106
- "image_url": {
107
- "url": f"data:{mime_type};base64,{content_block['data']}",
108
- },
109
- }
110
- error_message = "Unsupported source type. Only 'url' and 'base64' are supported."
111
- raise ValueError(error_message)
112
-
113
-
114
- def convert_to_openai_data_block(block: dict) -> dict:
115
- """Format standard data content block to format expected by OpenAI."""
116
- if block["type"] == "image":
117
- formatted_block = convert_to_openai_image_block(block)
118
-
119
- elif block["type"] == "file":
120
- if block["source_type"] == "base64":
121
- file = {"file_data": f"data:{block['mime_type']};base64,{block['data']}"}
122
- if filename := block.get("filename"):
123
- file["filename"] = filename
124
- elif (metadata := block.get("metadata")) and ("filename" in metadata):
125
- file["filename"] = metadata["filename"]
126
- else:
127
- warnings.warn(
128
- "OpenAI may require a filename for file inputs. Specify a filename "
129
- "in the content block: {'type': 'file', 'source_type': 'base64', "
130
- "'mime_type': 'application/pdf', 'data': '...', "
131
- "'filename': 'my-pdf'}",
132
- stacklevel=1,
133
- )
134
- formatted_block = {"type": "file", "file": file}
135
- elif block["source_type"] == "id":
136
- formatted_block = {"type": "file", "file": {"file_id": block["id"]}}
137
- else:
138
- error_msg = "source_type base64 or id is required for file blocks."
139
- raise ValueError(error_msg)
140
-
141
- elif block["type"] == "audio":
142
- if block["source_type"] == "base64":
143
- audio_format = block["mime_type"].split("/")[-1]
144
- formatted_block = {
145
- "type": "input_audio",
146
- "input_audio": {"data": block["data"], "format": audio_format},
147
- }
148
- else:
149
- error_msg = "source_type base64 is required for audio blocks."
150
- raise ValueError(error_msg)
151
- else:
152
- error_msg = f"Block of type {block['type']} is not supported."
153
- raise ValueError(error_msg)
154
-
155
- return formatted_block