langchain-core 1.0.3__py3-none-any.whl → 1.0.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. langchain_core/agents.py +36 -27
  2. langchain_core/callbacks/base.py +1 -2
  3. langchain_core/callbacks/manager.py +19 -2
  4. langchain_core/callbacks/usage.py +2 -2
  5. langchain_core/documents/base.py +6 -6
  6. langchain_core/example_selectors/length_based.py +1 -1
  7. langchain_core/indexing/api.py +17 -14
  8. langchain_core/language_models/_utils.py +1 -1
  9. langchain_core/language_models/base.py +50 -20
  10. langchain_core/language_models/chat_models.py +48 -29
  11. langchain_core/language_models/llms.py +66 -36
  12. langchain_core/load/load.py +15 -9
  13. langchain_core/messages/ai.py +3 -3
  14. langchain_core/messages/base.py +4 -3
  15. langchain_core/messages/block_translators/__init__.py +2 -1
  16. langchain_core/messages/block_translators/openai.py +2 -1
  17. langchain_core/messages/content.py +2 -2
  18. langchain_core/messages/utils.py +12 -8
  19. langchain_core/output_parsers/openai_tools.py +14 -2
  20. langchain_core/outputs/chat_generation.py +4 -2
  21. langchain_core/outputs/generation.py +6 -5
  22. langchain_core/prompt_values.py +2 -2
  23. langchain_core/prompts/base.py +50 -45
  24. langchain_core/prompts/chat.py +35 -28
  25. langchain_core/prompts/dict.py +1 -1
  26. langchain_core/prompts/message.py +5 -5
  27. langchain_core/prompts/string.py +4 -2
  28. langchain_core/runnables/base.py +97 -52
  29. langchain_core/runnables/branch.py +22 -20
  30. langchain_core/runnables/configurable.py +30 -29
  31. langchain_core/runnables/fallbacks.py +22 -20
  32. langchain_core/runnables/graph.py +1 -2
  33. langchain_core/runnables/graph_ascii.py +2 -1
  34. langchain_core/runnables/graph_mermaid.py +4 -1
  35. langchain_core/runnables/graph_png.py +28 -0
  36. langchain_core/runnables/history.py +43 -32
  37. langchain_core/runnables/passthrough.py +35 -25
  38. langchain_core/runnables/router.py +5 -5
  39. langchain_core/runnables/schema.py +1 -1
  40. langchain_core/runnables/utils.py +3 -2
  41. langchain_core/sys_info.py +4 -2
  42. langchain_core/tools/base.py +22 -16
  43. langchain_core/tracers/core.py +6 -6
  44. langchain_core/utils/function_calling.py +11 -7
  45. langchain_core/utils/input.py +3 -0
  46. langchain_core/utils/json.py +4 -2
  47. langchain_core/utils/pydantic.py +5 -4
  48. langchain_core/vectorstores/base.py +1 -2
  49. langchain_core/vectorstores/in_memory.py +1 -2
  50. langchain_core/version.py +1 -1
  51. {langchain_core-1.0.3.dist-info → langchain_core-1.0.5.dist-info}/METADATA +2 -2
  52. {langchain_core-1.0.3.dist-info → langchain_core-1.0.5.dist-info}/RECORD +53 -53
  53. {langchain_core-1.0.3.dist-info → langchain_core-1.0.5.dist-info}/WHEEL +0 -0
@@ -6,7 +6,7 @@ import contextlib
6
6
  import json
7
7
  import typing
8
8
  from abc import ABC, abstractmethod
9
- from collections.abc import Callable, Mapping
9
+ from collections.abc import Mapping
10
10
  from functools import cached_property
11
11
  from pathlib import Path
12
12
  from typing import (
@@ -33,6 +33,8 @@ from langchain_core.runnables.config import ensure_config
33
33
  from langchain_core.utils.pydantic import create_model_v2
34
34
 
35
35
  if TYPE_CHECKING:
36
+ from collections.abc import Callable
37
+
36
38
  from langchain_core.documents import Document
37
39
 
38
40
 
@@ -46,23 +48,27 @@ class BasePromptTemplate(
46
48
 
47
49
  input_variables: list[str]
48
50
  """A list of the names of the variables whose values are required as inputs to the
49
- prompt."""
51
+ prompt.
52
+ """
50
53
  optional_variables: list[str] = Field(default=[])
51
54
  """A list of the names of the variables for placeholder or `MessagePlaceholder` that
52
55
  are optional.
53
56
 
54
- These variables are auto inferred from the prompt and user need not provide them."""
57
+ These variables are auto inferred from the prompt and user need not provide them.
58
+ """
55
59
  input_types: typing.Dict[str, Any] = Field(default_factory=dict, exclude=True) # noqa: UP006
56
60
  """A dictionary of the types of the variables the prompt template expects.
57
61
 
58
- If not provided, all variables are assumed to be strings."""
62
+ If not provided, all variables are assumed to be strings.
63
+ """
59
64
  output_parser: BaseOutputParser | None = None
60
65
  """How to parse the output of calling an LLM on this formatted prompt."""
61
66
  partial_variables: Mapping[str, Any] = Field(default_factory=dict)
62
67
  """A dictionary of the partial variables the prompt template carries.
63
68
 
64
- Partial variables populate the template so that you don't need to
65
- pass them in every time you call the prompt."""
69
+ Partial variables populate the template so that you don't need to pass them in every
70
+ time you call the prompt.
71
+ """
66
72
  metadata: typing.Dict[str, Any] | None = None # noqa: UP006
67
73
  """Metadata to be used for tracing."""
68
74
  tags: list[str] | None = None
@@ -107,7 +113,7 @@ class BasePromptTemplate(
107
113
 
108
114
  @classmethod
109
115
  def is_lc_serializable(cls) -> bool:
110
- """Return True as this class is serializable."""
116
+ """Return `True` as this class is serializable."""
111
117
  return True
112
118
 
113
119
  model_config = ConfigDict(
@@ -129,7 +135,7 @@ class BasePromptTemplate(
129
135
  """Get the input schema for the prompt.
130
136
 
131
137
  Args:
132
- config: configuration for the prompt.
138
+ config: Configuration for the prompt.
133
139
 
134
140
  Returns:
135
141
  The input schema for the prompt.
@@ -197,8 +203,8 @@ class BasePromptTemplate(
197
203
  """Invoke the prompt.
198
204
 
199
205
  Args:
200
- input: Dict, input to the prompt.
201
- config: RunnableConfig, configuration for the prompt.
206
+ input: Input to the prompt.
207
+ config: Configuration for the prompt.
202
208
 
203
209
  Returns:
204
210
  The output of the prompt.
@@ -223,8 +229,8 @@ class BasePromptTemplate(
223
229
  """Async invoke the prompt.
224
230
 
225
231
  Args:
226
- input: Dict, input to the prompt.
227
- config: RunnableConfig, configuration for the prompt.
232
+ input: Input to the prompt.
233
+ config: Configuration for the prompt.
228
234
 
229
235
  Returns:
230
236
  The output of the prompt.
@@ -244,7 +250,7 @@ class BasePromptTemplate(
244
250
 
245
251
  @abstractmethod
246
252
  def format_prompt(self, **kwargs: Any) -> PromptValue:
247
- """Create Prompt Value.
253
+ """Create `PromptValue`.
248
254
 
249
255
  Args:
250
256
  **kwargs: Any arguments to be passed to the prompt template.
@@ -254,7 +260,7 @@ class BasePromptTemplate(
254
260
  """
255
261
 
256
262
  async def aformat_prompt(self, **kwargs: Any) -> PromptValue:
257
- """Async create Prompt Value.
263
+ """Async create `PromptValue`.
258
264
 
259
265
  Args:
260
266
  **kwargs: Any arguments to be passed to the prompt template.
@@ -268,7 +274,7 @@ class BasePromptTemplate(
268
274
  """Return a partial of the prompt template.
269
275
 
270
276
  Args:
271
- **kwargs: partial variables to set.
277
+ **kwargs: Partial variables to set.
272
278
 
273
279
  Returns:
274
280
  A partial of the prompt template.
@@ -298,9 +304,9 @@ class BasePromptTemplate(
298
304
  A formatted string.
299
305
 
300
306
  Example:
301
- ```python
302
- prompt.format(variable1="foo")
303
- ```
307
+ ```python
308
+ prompt.format(variable1="foo")
309
+ ```
304
310
  """
305
311
 
306
312
  async def aformat(self, **kwargs: Any) -> FormatOutputType:
@@ -313,9 +319,9 @@ class BasePromptTemplate(
313
319
  A formatted string.
314
320
 
315
321
  Example:
316
- ```python
317
- await prompt.aformat(variable1="foo")
318
- ```
322
+ ```python
323
+ await prompt.aformat(variable1="foo")
324
+ ```
319
325
  """
320
326
  return self.format(**kwargs)
321
327
 
@@ -350,9 +356,9 @@ class BasePromptTemplate(
350
356
  NotImplementedError: If the prompt type is not implemented.
351
357
 
352
358
  Example:
353
- ```python
354
- prompt.save(file_path="path/prompt.yaml")
355
- ```
359
+ ```python
360
+ prompt.save(file_path="path/prompt.yaml")
361
+ ```
356
362
  """
357
363
  if self.partial_variables:
358
364
  msg = "Cannot save prompt with partial variables."
@@ -404,23 +410,23 @@ def format_document(doc: Document, prompt: BasePromptTemplate[str]) -> str:
404
410
 
405
411
  First, this pulls information from the document from two sources:
406
412
 
407
- 1. page_content:
408
- This takes the information from the `document.page_content`
409
- and assigns it to a variable named `page_content`.
410
- 2. metadata:
411
- This takes information from `document.metadata` and assigns
412
- it to variables of the same name.
413
+ 1. `page_content`:
414
+ This takes the information from the `document.page_content` and assigns it to a
415
+ variable named `page_content`.
416
+ 2. `metadata`:
417
+ This takes information from `document.metadata` and assigns it to variables of
418
+ the same name.
413
419
 
414
420
  Those variables are then passed into the `prompt` to produce a formatted string.
415
421
 
416
422
  Args:
417
- doc: Document, the page_content and metadata will be used to create
423
+ doc: `Document`, the `page_content` and `metadata` will be used to create
418
424
  the final string.
419
- prompt: BasePromptTemplate, will be used to format the page_content
420
- and metadata into the final string.
425
+ prompt: `BasePromptTemplate`, will be used to format the `page_content`
426
+ and `metadata` into the final string.
421
427
 
422
428
  Returns:
423
- string of the document formatted.
429
+ String of the document formatted.
424
430
 
425
431
  Example:
426
432
  ```python
@@ -431,7 +437,6 @@ def format_document(doc: Document, prompt: BasePromptTemplate[str]) -> str:
431
437
  prompt = PromptTemplate.from_template("Page {page}: {page_content}")
432
438
  format_document(doc, prompt)
433
439
  >>> "Page 1: This is a joke"
434
-
435
440
  ```
436
441
  """
437
442
  return prompt.format(**_get_document_info(doc, prompt))
@@ -442,22 +447,22 @@ async def aformat_document(doc: Document, prompt: BasePromptTemplate[str]) -> st
442
447
 
443
448
  First, this pulls information from the document from two sources:
444
449
 
445
- 1. page_content:
446
- This takes the information from the `document.page_content`
447
- and assigns it to a variable named `page_content`.
448
- 2. metadata:
449
- This takes information from `document.metadata` and assigns
450
- it to variables of the same name.
450
+ 1. `page_content`:
451
+ This takes the information from the `document.page_content` and assigns it to a
452
+ variable named `page_content`.
453
+ 2. `metadata`:
454
+ This takes information from `document.metadata` and assigns it to variables of
455
+ the same name.
451
456
 
452
457
  Those variables are then passed into the `prompt` to produce a formatted string.
453
458
 
454
459
  Args:
455
- doc: Document, the page_content and metadata will be used to create
460
+ doc: `Document`, the `page_content` and `metadata` will be used to create
456
461
  the final string.
457
- prompt: BasePromptTemplate, will be used to format the page_content
458
- and metadata into the final string.
462
+ prompt: `BasePromptTemplate`, will be used to format the `page_content`
463
+ and `metadata` into the final string.
459
464
 
460
465
  Returns:
461
- string of the document formatted.
466
+ String of the document formatted.
462
467
  """
463
468
  return await prompt.aformat(**_get_document_info(doc, prompt))
@@ -587,14 +587,15 @@ class _StringImageMessagePromptTemplate(BaseMessagePromptTemplate):
587
587
  for prompt in self.prompt:
588
588
  inputs = {var: kwargs[var] for var in prompt.input_variables}
589
589
  if isinstance(prompt, StringPromptTemplate):
590
- formatted: str | ImageURL | dict[str, Any] = prompt.format(**inputs)
591
- content.append({"type": "text", "text": formatted})
590
+ formatted_text: str = prompt.format(**inputs)
591
+ if formatted_text != "":
592
+ content.append({"type": "text", "text": formatted_text})
592
593
  elif isinstance(prompt, ImagePromptTemplate):
593
- formatted = prompt.format(**inputs)
594
- content.append({"type": "image_url", "image_url": formatted})
594
+ formatted_image: ImageURL = prompt.format(**inputs)
595
+ content.append({"type": "image_url", "image_url": formatted_image})
595
596
  elif isinstance(prompt, DictPromptTemplate):
596
- formatted = prompt.format(**inputs)
597
- content.append(formatted)
597
+ formatted_dict: dict[str, Any] = prompt.format(**inputs)
598
+ content.append(formatted_dict)
598
599
  return self._msg_class(
599
600
  content=content, additional_kwargs=self.additional_kwargs
600
601
  )
@@ -617,16 +618,15 @@ class _StringImageMessagePromptTemplate(BaseMessagePromptTemplate):
617
618
  for prompt in self.prompt:
618
619
  inputs = {var: kwargs[var] for var in prompt.input_variables}
619
620
  if isinstance(prompt, StringPromptTemplate):
620
- formatted: str | ImageURL | dict[str, Any] = await prompt.aformat(
621
- **inputs
622
- )
623
- content.append({"type": "text", "text": formatted})
621
+ formatted_text: str = await prompt.aformat(**inputs)
622
+ if formatted_text != "":
623
+ content.append({"type": "text", "text": formatted_text})
624
624
  elif isinstance(prompt, ImagePromptTemplate):
625
- formatted = await prompt.aformat(**inputs)
626
- content.append({"type": "image_url", "image_url": formatted})
625
+ formatted_image: ImageURL = await prompt.aformat(**inputs)
626
+ content.append({"type": "image_url", "image_url": formatted_image})
627
627
  elif isinstance(prompt, DictPromptTemplate):
628
- formatted = prompt.format(**inputs)
629
- content.append(formatted)
628
+ formatted_dict: dict[str, Any] = prompt.format(**inputs)
629
+ content.append(formatted_dict)
630
630
  return self._msg_class(
631
631
  content=content, additional_kwargs=self.additional_kwargs
632
632
  )
@@ -1343,11 +1343,25 @@ def _create_template_from_message_type(
1343
1343
  raise ValueError(msg)
1344
1344
  var_name = template[1:-1]
1345
1345
  message = MessagesPlaceholder(variable_name=var_name, optional=True)
1346
- elif len(template) == 2 and isinstance(template[1], bool):
1347
- var_name_wrapped, is_optional = template
1346
+ else:
1347
+ try:
1348
+ var_name_wrapped, is_optional = template
1349
+ except ValueError as e:
1350
+ msg = (
1351
+ "Unexpected arguments for placeholder message type."
1352
+ " Expected either a single string variable name"
1353
+ " or a list of [variable_name: str, is_optional: bool]."
1354
+ f" Got: {template}"
1355
+ )
1356
+ raise ValueError(msg) from e
1357
+
1358
+ if not isinstance(is_optional, bool):
1359
+ msg = f"Expected is_optional to be a boolean. Got: {is_optional}"
1360
+ raise ValueError(msg) # noqa: TRY004
1361
+
1348
1362
  if not isinstance(var_name_wrapped, str):
1349
1363
  msg = f"Expected variable name to be a string. Got: {var_name_wrapped}"
1350
- raise ValueError(msg) # noqa:TRY004
1364
+ raise ValueError(msg) # noqa: TRY004
1351
1365
  if var_name_wrapped[0] != "{" or var_name_wrapped[-1] != "}":
1352
1366
  msg = (
1353
1367
  f"Invalid placeholder template: {var_name_wrapped}."
@@ -1357,14 +1371,6 @@ def _create_template_from_message_type(
1357
1371
  var_name = var_name_wrapped[1:-1]
1358
1372
 
1359
1373
  message = MessagesPlaceholder(variable_name=var_name, optional=is_optional)
1360
- else:
1361
- msg = (
1362
- "Unexpected arguments for placeholder message type."
1363
- " Expected either a single string variable name"
1364
- " or a list of [variable_name: str, is_optional: bool]."
1365
- f" Got: {template}"
1366
- )
1367
- raise ValueError(msg)
1368
1374
  else:
1369
1375
  msg = (
1370
1376
  f"Unexpected message type: {message_type}. Use one of 'human',"
@@ -1418,10 +1424,11 @@ def _convert_to_message_template(
1418
1424
  )
1419
1425
  raise ValueError(msg)
1420
1426
  message = (message["role"], message["content"])
1421
- if len(message) != 2:
1427
+ try:
1428
+ message_type_str, template = message
1429
+ except ValueError as e:
1422
1430
  msg = f"Expected 2-tuple of (role, template), got {message}"
1423
- raise ValueError(msg)
1424
- message_type_str, template = message
1431
+ raise ValueError(msg) from e
1425
1432
  if isinstance(message_type_str, str):
1426
1433
  message_ = _create_template_from_message_type(
1427
1434
  message_type_str, template, template_format=template_format
@@ -69,7 +69,7 @@ class DictPromptTemplate(RunnableSerializable[dict, dict]):
69
69
 
70
70
  @classmethod
71
71
  def is_lc_serializable(cls) -> bool:
72
- """Return True as this class is serializable."""
72
+ """Return `True` as this class is serializable."""
73
73
  return True
74
74
 
75
75
  @classmethod
@@ -6,10 +6,10 @@ from abc import ABC, abstractmethod
6
6
  from typing import TYPE_CHECKING, Any
7
7
 
8
8
  from langchain_core.load import Serializable
9
- from langchain_core.messages import BaseMessage
10
9
  from langchain_core.utils.interactive_env import is_interactive_env
11
10
 
12
11
  if TYPE_CHECKING:
12
+ from langchain_core.messages import BaseMessage
13
13
  from langchain_core.prompts.chat import ChatPromptTemplate
14
14
 
15
15
 
@@ -18,7 +18,7 @@ class BaseMessagePromptTemplate(Serializable, ABC):
18
18
 
19
19
  @classmethod
20
20
  def is_lc_serializable(cls) -> bool:
21
- """Return True as this class is serializable."""
21
+ """Return `True` as this class is serializable."""
22
22
  return True
23
23
 
24
24
  @classmethod
@@ -32,13 +32,13 @@ class BaseMessagePromptTemplate(Serializable, ABC):
32
32
 
33
33
  @abstractmethod
34
34
  def format_messages(self, **kwargs: Any) -> list[BaseMessage]:
35
- """Format messages from kwargs. Should return a list of BaseMessages.
35
+ """Format messages from kwargs. Should return a list of `BaseMessage` objects.
36
36
 
37
37
  Args:
38
38
  **kwargs: Keyword arguments to use for formatting.
39
39
 
40
40
  Returns:
41
- List of BaseMessages.
41
+ List of `BaseMessage` objects.
42
42
  """
43
43
 
44
44
  async def aformat_messages(self, **kwargs: Any) -> list[BaseMessage]:
@@ -48,7 +48,7 @@ class BaseMessagePromptTemplate(Serializable, ABC):
48
48
  **kwargs: Keyword arguments to use for formatting.
49
49
 
50
50
  Returns:
51
- List of BaseMessages.
51
+ List of `BaseMessage` objects.
52
52
  """
53
53
  return self.format_messages(**kwargs)
54
54
 
@@ -4,9 +4,8 @@ from __future__ import annotations
4
4
 
5
5
  import warnings
6
6
  from abc import ABC
7
- from collections.abc import Callable, Sequence
8
7
  from string import Formatter
9
- from typing import Any, Literal
8
+ from typing import TYPE_CHECKING, Any, Literal
10
9
 
11
10
  from pydantic import BaseModel, create_model
12
11
 
@@ -16,6 +15,9 @@ from langchain_core.utils import get_colored_text, mustache
16
15
  from langchain_core.utils.formatting import formatter
17
16
  from langchain_core.utils.interactive_env import is_interactive_env
18
17
 
18
+ if TYPE_CHECKING:
19
+ from collections.abc import Callable, Sequence
20
+
19
21
  try:
20
22
  from jinja2 import Environment, meta
21
23
  from jinja2.sandbox import SandboxedEnvironment