langchain-google-genai 0.0.2__tar.gz → 0.0.4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of langchain-google-genai might be problematic. Click here for more details.

@@ -1,14 +1,18 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: langchain-google-genai
3
- Version: 0.0.2
3
+ Version: 0.0.4
4
4
  Summary: An integration package connecting Google's genai package and LangChain
5
+ Home-page: https://github.com/langchain-ai/langchain/blob/master/libs/partners/google-genai
5
6
  Requires-Python: >=3.9,<4.0
6
7
  Classifier: Programming Language :: Python :: 3
7
8
  Classifier: Programming Language :: Python :: 3.9
8
9
  Classifier: Programming Language :: Python :: 3.10
9
10
  Classifier: Programming Language :: Python :: 3.11
11
+ Provides-Extra: images
10
12
  Requires-Dist: google-generativeai (>=0.3.1,<0.4.0)
11
13
  Requires-Dist: langchain-core (>=0.1,<0.2)
14
+ Requires-Dist: pillow (>=10.1.0,<11.0.0) ; extra == "images"
15
+ Project-URL: Repository, https://github.com/langchain-ai/langchain/blob/master/libs/partners/google-genai
12
16
  Description-Content-Type: text/markdown
13
17
 
14
18
  # langchain-google-genai
@@ -17,10 +21,17 @@ This package contains the LangChain integrations for Gemini through their genera
17
21
 
18
22
  ## Installation
19
23
 
20
- ```python
24
+ ```bash
21
25
  pip install -U langchain-google-genai
22
26
  ```
23
27
 
28
+ ### Image utilities
29
+ To use image utility methods, like loading images from GCS urls, install with extras group 'images':
30
+
31
+ ```bash
32
+ pip install -e "langchain-google-genai[images]"
33
+ ```
34
+
24
35
  ## Chat Models
25
36
 
26
37
  This package contains the `ChatGoogleGenerativeAI` class, which is the recommended way to interface with the Google Gemini series of models.
@@ -70,3 +81,15 @@ The value of `image_url` can be any of the following:
70
81
  - A base64 encoded image (e.g., `data:image/png;base64,abcd124`)
71
82
  - A PIL image
72
83
 
84
+
85
+
86
+ ## Embeddings
87
+
88
+ This package also adds support for google's embeddings models.
89
+
90
+ ```
91
+ from langchain_google_genai import GoogleGenerativeAIEmbeddings
92
+
93
+ embeddings = GoogleGenerativeAIEmbeddings(model="models/embedding-001")
94
+ embeddings.embed_query("hello, world!")
95
+ ```
@@ -4,10 +4,17 @@ This package contains the LangChain integrations for Gemini through their genera
4
4
 
5
5
  ## Installation
6
6
 
7
- ```python
7
+ ```bash
8
8
  pip install -U langchain-google-genai
9
9
  ```
10
10
 
11
+ ### Image utilities
12
+ To use image utility methods, like loading images from GCS urls, install with extras group 'images':
13
+
14
+ ```bash
15
+ pip install -e "langchain-google-genai[images]"
16
+ ```
17
+
11
18
  ## Chat Models
12
19
 
13
20
  This package contains the `ChatGoogleGenerativeAI` class, which is the recommended way to interface with the Google Gemini series of models.
@@ -56,3 +63,16 @@ The value of `image_url` can be any of the following:
56
63
  - A local file path
57
64
  - A base64 encoded image (e.g., `data:image/png;base64,abcd124`)
58
65
  - A PIL image
66
+
67
+
68
+
69
+ ## Embeddings
70
+
71
+ This package also adds support for google's embeddings models.
72
+
73
+ ```
74
+ from langchain_google_genai import GoogleGenerativeAIEmbeddings
75
+
76
+ embeddings = GoogleGenerativeAIEmbeddings(model="models/embedding-001")
77
+ embeddings.embed_query("hello, world!")
78
+ ```
@@ -0,0 +1,46 @@
1
+ """**LangChain Google Generative AI Integration**
2
+
3
+ This module integrates Google's Generative AI models, specifically the Gemini series, with the LangChain framework. It provides classes for interacting with chat models and generating embeddings, leveraging Google's advanced AI capabilities.
4
+
5
+ **Chat Models**
6
+
7
+ The `ChatGoogleGenerativeAI` class is the primary interface for interacting with Google's Gemini chat models. It allows users to send and receive messages using a specified Gemini model, suitable for various conversational AI applications.
8
+
9
+ **Embeddings**
10
+
11
+ The `GoogleGenerativeAIEmbeddings` class provides functionalities to generate embeddings using Google's models.
12
+ These embeddings can be used for a range of NLP tasks, including semantic analysis, similarity comparisons, and more.
13
+
14
+ **Installation**
15
+
16
+ To install the package, use pip:
17
+
18
+ ```python
19
+ pip install -U langchain-google-genai
20
+ ```
21
+ ## Using Chat Models
22
+
23
+ After setting up your environment with the required API key, you can interact with the Google Gemini models.
24
+
25
+ ```python
26
+ from langchain_google_genai import ChatGoogleGenerativeAI
27
+
28
+ llm = ChatGoogleGenerativeAI(model="gemini-pro")
29
+ llm.invoke("Sing a ballad of LangChain.")
30
+ ```
31
+
32
+ ## Embedding Generation
33
+
34
+ The package also supports creating embeddings with Google's models, useful for textual similarity and other NLP applications.
35
+
36
+ ```python
37
+ from langchain_google_genai import GoogleGenerativeAIEmbeddings
38
+
39
+ embeddings = GoogleGenerativeAIEmbeddings(model="models/embedding-001")
40
+ embeddings.embed_query("hello, world!")
41
+ ```
42
+ """ # noqa: E501
43
+ from langchain_google_genai.chat_models import ChatGoogleGenerativeAI
44
+ from langchain_google_genai.embeddings import GoogleGenerativeAIEmbeddings
45
+
46
+ __all__ = ["ChatGoogleGenerativeAI", "GoogleGenerativeAIEmbeddings"]
@@ -0,0 +1,4 @@
1
+ class GoogleGenerativeAIError(Exception):
2
+ """
3
+ Custom exception class for errors associated with the `Google GenAI` API.
4
+ """
@@ -5,7 +5,6 @@ import logging
5
5
  import os
6
6
  from io import BytesIO
7
7
  from typing import (
8
- TYPE_CHECKING,
9
8
  Any,
10
9
  AsyncIterator,
11
10
  Callable,
@@ -22,6 +21,8 @@ from typing import (
22
21
  )
23
22
  from urllib.parse import urlparse
24
23
 
24
+ # TODO: remove ignore once the google package is published with types
25
+ import google.generativeai as genai # type: ignore[import]
25
26
  import requests
26
27
  from langchain_core.callbacks.manager import (
27
28
  AsyncCallbackManagerForLLMRun,
@@ -38,7 +39,7 @@ from langchain_core.messages import (
38
39
  HumanMessageChunk,
39
40
  )
40
41
  from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult
41
- from langchain_core.pydantic_v1 import Field, root_validator
42
+ from langchain_core.pydantic_v1 import Field, SecretStr, root_validator
42
43
  from langchain_core.utils import get_from_dict_or_env
43
44
  from tenacity import (
44
45
  before_sleep_log,
@@ -48,11 +49,8 @@ from tenacity import (
48
49
  wait_exponential,
49
50
  )
50
51
 
51
- logger = logging.getLogger(__name__)
52
+ from langchain_google_genai._common import GoogleGenerativeAIError
52
53
 
53
- if TYPE_CHECKING:
54
- # TODO: remove ignore once the google package is published with types
55
- import google.generativeai as genai # type: ignore[import]
56
54
  IMAGE_TYPES: Tuple = ()
57
55
  try:
58
56
  import PIL
@@ -63,8 +61,10 @@ except ImportError:
63
61
  PIL = None # type: ignore
64
62
  Image = None # type: ignore
65
63
 
64
+ logger = logging.getLogger(__name__)
65
+
66
66
 
67
- class ChatGoogleGenerativeAIError(Exception):
67
+ class ChatGoogleGenerativeAIError(GoogleGenerativeAIError):
68
68
  """
69
69
  Custom exception class for errors associated with the `Google GenAI` API.
70
70
 
@@ -106,7 +106,7 @@ def _create_retry_decorator() -> Callable[[Any], Any]:
106
106
  )
107
107
 
108
108
 
109
- def chat_with_retry(*, generation_method: Callable, **kwargs: Any) -> Any:
109
+ def _chat_with_retry(*, generation_method: Callable, **kwargs: Any) -> Any:
110
110
  """
111
111
  Executes a chat generation method with retry logic using tenacity.
112
112
 
@@ -139,7 +139,7 @@ def chat_with_retry(*, generation_method: Callable, **kwargs: Any) -> Any:
139
139
  return _chat_with_retry(**kwargs)
140
140
 
141
141
 
142
- async def achat_with_retry(*, generation_method: Callable, **kwargs: Any) -> Any:
142
+ async def _achat_with_retry(*, generation_method: Callable, **kwargs: Any) -> Any:
143
143
  """
144
144
  Executes a chat generation method with retry logic using tenacity.
145
145
 
@@ -269,8 +269,6 @@ def _convert_to_parts(
269
269
  content: Sequence[Union[str, dict]],
270
270
  ) -> List[genai.types.PartType]:
271
271
  """Converts a list of LangChain messages into a google parts."""
272
- import google.generativeai as genai
273
-
274
272
  parts = []
275
273
  for part in content:
276
274
  if isinstance(part, str):
@@ -410,8 +408,7 @@ def _response_to_result(
410
408
  class ChatGoogleGenerativeAI(BaseChatModel):
411
409
  """`Google Generative AI` Chat models API.
412
410
 
413
- To use you must have the google.generativeai Python package installed and
414
- either:
411
+ To use, you must have either:
415
412
 
416
413
  1. The ``GOOGLE_API_KEY``` environment variable set with your API key, or
417
414
  2. Pass your API key using the google_api_key kwarg to the ChatGoogle
@@ -435,7 +432,7 @@ Supported examples:
435
432
  max_output_tokens: int = Field(default=None, description="Max output tokens")
436
433
 
437
434
  client: Any #: :meta private:
438
- google_api_key: Optional[str] = None
435
+ google_api_key: Optional[SecretStr] = None
439
436
  temperature: Optional[float] = None
440
437
  """Run inference with this temperature. Must by in the closed
441
438
  interval [0.0, 1.0]."""
@@ -487,17 +484,9 @@ Supported examples:
487
484
  google_api_key = get_from_dict_or_env(
488
485
  values, "google_api_key", "GOOGLE_API_KEY"
489
486
  )
490
- try:
491
- import google.generativeai as genai
492
-
493
- genai.configure(api_key=google_api_key)
494
- except ImportError:
495
- raise ChatGoogleGenerativeAIError(
496
- "Could not import google.generativeai python package. "
497
- "Please install it with `pip install google-generativeai`"
498
- )
499
-
500
- values["client"] = genai
487
+ if isinstance(google_api_key, SecretStr):
488
+ google_api_key = google_api_key.get_secret_value()
489
+ genai.configure(api_key=google_api_key)
501
490
  if (
502
491
  values.get("temperature") is not None
503
492
  and not 0 <= values["temperature"] <= 1
@@ -560,7 +549,7 @@ Supported examples:
560
549
  **kwargs: Any,
561
550
  ) -> ChatResult:
562
551
  params = self._prepare_params(messages, stop, **kwargs)
563
- response: genai.types.GenerateContentResponse = chat_with_retry(
552
+ response: genai.types.GenerateContentResponse = _chat_with_retry(
564
553
  **params,
565
554
  generation_method=self._generation_method,
566
555
  )
@@ -574,7 +563,7 @@ Supported examples:
574
563
  **kwargs: Any,
575
564
  ) -> ChatResult:
576
565
  params = self._prepare_params(messages, stop, **kwargs)
577
- response: genai.types.GenerateContentResponse = await achat_with_retry(
566
+ response: genai.types.GenerateContentResponse = await _achat_with_retry(
578
567
  **params,
579
568
  generation_method=self._async_generation_method,
580
569
  )
@@ -588,7 +577,7 @@ Supported examples:
588
577
  **kwargs: Any,
589
578
  ) -> Iterator[ChatGenerationChunk]:
590
579
  params = self._prepare_params(messages, stop, **kwargs)
591
- response: genai.types.GenerateContentResponse = chat_with_retry(
580
+ response: genai.types.GenerateContentResponse = _chat_with_retry(
592
581
  **params,
593
582
  generation_method=self._generation_method,
594
583
  stream=True,
@@ -614,7 +603,7 @@ Supported examples:
614
603
  **kwargs: Any,
615
604
  ) -> AsyncIterator[ChatGenerationChunk]:
616
605
  params = self._prepare_params(messages, stop, **kwargs)
617
- async for chunk in await achat_with_retry(
606
+ async for chunk in await _achat_with_retry(
618
607
  **params,
619
608
  generation_method=self._async_generation_method,
620
609
  stream=True,
@@ -0,0 +1,99 @@
1
+ from typing import Dict, List, Optional
2
+
3
+ # TODO: remove ignore once the google package is published with types
4
+ import google.generativeai as genai # type: ignore[import]
5
+ from langchain_core.embeddings import Embeddings
6
+ from langchain_core.pydantic_v1 import BaseModel, Field, SecretStr, root_validator
7
+ from langchain_core.utils import get_from_dict_or_env
8
+
9
+ from langchain_google_genai._common import GoogleGenerativeAIError
10
+
11
+
12
+ class GoogleGenerativeAIEmbeddings(BaseModel, Embeddings):
13
+ """`Google Generative AI Embeddings`.
14
+
15
+ To use, you must have either:
16
+
17
+ 1. The ``GOOGLE_API_KEY``` environment variable set with your API key, or
18
+ 2. Pass your API key using the google_api_key kwarg to the ChatGoogle
19
+ constructor.
20
+
21
+ Example:
22
+ .. code-block:: python
23
+
24
+ from langchain_google_genai import GoogleGenerativeAIEmbeddings
25
+
26
+ embeddings = GoogleGenerativeAIEmbeddings(model="models/embedding-001")
27
+ embeddings.embed_query("What's our Q1 revenue?")
28
+ """
29
+
30
+ model: str = Field(
31
+ ...,
32
+ description="The name of the embedding model to use. "
33
+ "Example: models/embedding-001",
34
+ )
35
+ task_type: Optional[str] = Field(
36
+ None,
37
+ description="The task type. Valid options include: "
38
+ "task_type_unspecified, retrieval_query, retrieval_document, "
39
+ "semantic_similarity, classification, and clustering",
40
+ )
41
+ google_api_key: Optional[SecretStr] = Field(
42
+ None,
43
+ description="The Google API key to use. If not provided, "
44
+ "the GOOGLE_API_KEY environment variable will be used.",
45
+ )
46
+
47
+ @root_validator()
48
+ def validate_environment(cls, values: Dict) -> Dict:
49
+ """Validates that the python package exists in environment."""
50
+ google_api_key = get_from_dict_or_env(
51
+ values, "google_api_key", "GOOGLE_API_KEY"
52
+ )
53
+ if isinstance(google_api_key, SecretStr):
54
+ google_api_key = google_api_key.get_secret_value()
55
+ genai.configure(api_key=google_api_key)
56
+ return values
57
+
58
+ def _embed(
59
+ self, texts: List[str], task_type: str, title: Optional[str] = None
60
+ ) -> List[List[float]]:
61
+ task_type = self.task_type or "retrieval_document"
62
+ try:
63
+ result = genai.embed_content(
64
+ model=self.model,
65
+ content=texts,
66
+ task_type=task_type,
67
+ title=title,
68
+ )
69
+ except Exception as e:
70
+ raise GoogleGenerativeAIError(f"Error embedding content: {e}") from e
71
+ return result["embedding"]
72
+
73
+ def embed_documents(
74
+ self, texts: List[str], batch_size: int = 5
75
+ ) -> List[List[float]]:
76
+ """Embed a list of strings. Vertex AI currently
77
+ sets a max batch size of 5 strings.
78
+
79
+ Args:
80
+ texts: List[str] The list of strings to embed.
81
+ batch_size: [int] The batch size of embeddings to send to the model
82
+
83
+ Returns:
84
+ List of embeddings, one for each text.
85
+ """
86
+ task_type = self.task_type or "retrieval_document"
87
+ return self._embed(texts, task_type=task_type)
88
+
89
+ def embed_query(self, text: str) -> List[float]:
90
+ """Embed a text.
91
+
92
+ Args:
93
+ text: The text to embed.
94
+
95
+ Returns:
96
+ Embedding for the text.
97
+ """
98
+ task_type = self.task_type or "retrieval_query"
99
+ return self._embed([text], task_type=task_type)[0]
@@ -1,14 +1,19 @@
1
1
  [tool.poetry]
2
2
  name = "langchain-google-genai"
3
- version = "0.0.2"
3
+ version = "0.0.4"
4
4
  description = "An integration package connecting Google's genai package and LangChain"
5
5
  authors = []
6
6
  readme = "README.md"
7
+ repository = "https://github.com/langchain-ai/langchain/blob/master/libs/partners/google-genai"
7
8
 
8
9
  [tool.poetry.dependencies]
9
10
  python = ">=3.9,<4.0"
10
11
  langchain-core = "^0.1"
11
12
  google-generativeai = "^0.3.1"
13
+ pillow = { version = "^10.1.0", optional = true }
14
+
15
+ [tool.poetry.extras]
16
+ images = ["pillow"]
12
17
 
13
18
  [tool.poetry.group.test]
14
19
  optional = true
@@ -16,11 +21,12 @@ optional = true
16
21
  [tool.poetry.group.test.dependencies]
17
22
  pytest = "^7.3.0"
18
23
  freezegun = "^1.2.2"
19
- pytest-mock = "^3.10.0"
24
+ pytest-mock = "^3.10.0"
20
25
  syrupy = "^4.0.2"
21
26
  pytest-watcher = "^0.3.4"
22
27
  pytest-asyncio = "^0.21.1"
23
- langchain-core = {path = "../../core", develop = true}
28
+ langchain-core = { path = "../../core", develop = true }
29
+ numpy = "^1.26.2"
24
30
 
25
31
  [tool.poetry.group.codespell]
26
32
  optional = true
@@ -32,6 +38,8 @@ codespell = "^2.2.0"
32
38
  optional = true
33
39
 
34
40
  [tool.poetry.group.test_integration.dependencies]
41
+ pillow = "^10.1.0"
42
+
35
43
 
36
44
  [tool.poetry.group.lint]
37
45
  optional = true
@@ -41,7 +49,7 @@ ruff = "^0.1.5"
41
49
 
42
50
  [tool.poetry.group.typing.dependencies]
43
51
  mypy = "^0.991"
44
- langchain-core = {path = "../../core", develop = true}
52
+ langchain-core = { path = "../../core", develop = true }
45
53
  types-requests = "^2.28.11.5"
46
54
  types-google-cloud-ndb = "^2.2.0.1"
47
55
  types-pillow = "^10.1.0.2"
@@ -50,7 +58,7 @@ types-pillow = "^10.1.0.2"
50
58
  optional = true
51
59
 
52
60
  [tool.poetry.group.dev.dependencies]
53
- langchain-core = {path = "../../core", develop = true}
61
+ langchain-core = { path = "../../core", develop = true }
54
62
  pillow = "^10.1.0"
55
63
  types-requests = "^2.31.0.10"
56
64
  types-pillow = "^10.1.0.2"
@@ -58,19 +66,16 @@ types-google-cloud-ndb = "^2.2.0.1"
58
66
 
59
67
  [tool.ruff]
60
68
  select = [
61
- "E", # pycodestyle
62
- "F", # pyflakes
63
- "I", # isort
69
+ "E", # pycodestyle
70
+ "F", # pyflakes
71
+ "I", # isort
64
72
  ]
65
73
 
66
74
  [tool.mypy]
67
75
  disallow_untyped_defs = "True"
68
- exclude = ["notebooks", "examples", "example_data", "langchain_core/pydantic"]
69
76
 
70
77
  [tool.coverage.run]
71
- omit = [
72
- "tests/*",
73
- ]
78
+ omit = ["tests/*"]
74
79
 
75
80
  [build-system]
76
81
  requires = ["poetry-core>=1.0.0"]
@@ -1,3 +0,0 @@
1
- from langchain_google_genai.chat_models import ChatGoogleGenerativeAI
2
-
3
- __all__ = ["ChatGoogleGenerativeAI"]