langchain-google-genai 0.0.11__tar.gz → 1.0.2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of langchain-google-genai might be problematic. Click here for more details.
- {langchain_google_genai-0.0.11 → langchain_google_genai-1.0.2}/PKG-INFO +3 -3
- {langchain_google_genai-0.0.11 → langchain_google_genai-1.0.2}/langchain_google_genai/chat_models.py +37 -39
- {langchain_google_genai-0.0.11 → langchain_google_genai-1.0.2}/langchain_google_genai/embeddings.py +26 -12
- {langchain_google_genai-0.0.11 → langchain_google_genai-1.0.2}/langchain_google_genai/llms.py +28 -12
- {langchain_google_genai-0.0.11 → langchain_google_genai-1.0.2}/pyproject.toml +6 -3
- {langchain_google_genai-0.0.11 → langchain_google_genai-1.0.2}/LICENSE +0 -0
- {langchain_google_genai-0.0.11 → langchain_google_genai-1.0.2}/README.md +0 -0
- {langchain_google_genai-0.0.11 → langchain_google_genai-1.0.2}/langchain_google_genai/__init__.py +0 -0
- {langchain_google_genai-0.0.11 → langchain_google_genai-1.0.2}/langchain_google_genai/_common.py +0 -0
- {langchain_google_genai-0.0.11 → langchain_google_genai-1.0.2}/langchain_google_genai/_enums.py +0 -0
- {langchain_google_genai-0.0.11 → langchain_google_genai-1.0.2}/langchain_google_genai/_function_utils.py +0 -0
- {langchain_google_genai-0.0.11 → langchain_google_genai-1.0.2}/langchain_google_genai/_genai_extension.py +0 -0
- {langchain_google_genai-0.0.11 → langchain_google_genai-1.0.2}/langchain_google_genai/genai_aqa.py +0 -0
- {langchain_google_genai-0.0.11 → langchain_google_genai-1.0.2}/langchain_google_genai/google_vector_store.py +0 -0
- {langchain_google_genai-0.0.11 → langchain_google_genai-1.0.2}/langchain_google_genai/py.typed +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: langchain-google-genai
|
|
3
|
-
Version:
|
|
3
|
+
Version: 1.0.2
|
|
4
4
|
Summary: An integration package connecting Google's genai package and LangChain
|
|
5
5
|
Home-page: https://github.com/langchain-ai/langchain-google
|
|
6
6
|
License: MIT
|
|
@@ -12,8 +12,8 @@ Classifier: Programming Language :: Python :: 3.10
|
|
|
12
12
|
Classifier: Programming Language :: Python :: 3.11
|
|
13
13
|
Classifier: Programming Language :: Python :: 3.12
|
|
14
14
|
Provides-Extra: images
|
|
15
|
-
Requires-Dist: google-generativeai (>=0.
|
|
16
|
-
Requires-Dist: langchain-core (>=0.1,<0.2)
|
|
15
|
+
Requires-Dist: google-generativeai (>=0.5.0,<0.6.0)
|
|
16
|
+
Requires-Dist: langchain-core (>=0.1.27,<0.2)
|
|
17
17
|
Requires-Dist: pillow (>=10.1.0,<11.0.0) ; extra == "images"
|
|
18
18
|
Project-URL: Repository, https://github.com/langchain-ai/langchain-google
|
|
19
19
|
Project-URL: Source Code, https://github.com/langchain-ai/langchain-google/tree/main/libs/genai
|
{langchain_google_genai-0.0.11 → langchain_google_genai-1.0.2}/langchain_google_genai/chat_models.py
RENAMED
|
@@ -4,6 +4,7 @@ import base64
|
|
|
4
4
|
import json
|
|
5
5
|
import logging
|
|
6
6
|
import os
|
|
7
|
+
import warnings
|
|
7
8
|
from io import BytesIO
|
|
8
9
|
from typing import (
|
|
9
10
|
Any,
|
|
@@ -300,27 +301,16 @@ def _convert_to_parts(
|
|
|
300
301
|
|
|
301
302
|
def _parse_chat_history(
|
|
302
303
|
input_messages: Sequence[BaseMessage], convert_system_message_to_human: bool = False
|
|
303
|
-
) -> List[genai.types.ContentDict]:
|
|
304
|
+
) -> Tuple[Optional[genai.types.ContentDict], List[genai.types.ContentDict]]:
|
|
304
305
|
messages: List[genai.types.MessageDict] = []
|
|
305
306
|
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
if (
|
|
309
|
-
i == 0
|
|
310
|
-
and isinstance(message, SystemMessage)
|
|
311
|
-
and not convert_system_message_to_human
|
|
312
|
-
):
|
|
313
|
-
raise ValueError(
|
|
314
|
-
"""SystemMessages are not yet supported!
|
|
307
|
+
if convert_system_message_to_human:
|
|
308
|
+
warnings.warn("Convert_system_message_to_human will be deprecated!")
|
|
315
309
|
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
"""
|
|
321
|
-
)
|
|
322
|
-
elif i == 0 and isinstance(message, SystemMessage):
|
|
323
|
-
raw_system_message = message
|
|
310
|
+
system_instruction: Optional[genai.types.ContentDict] = None
|
|
311
|
+
for i, message in enumerate(input_messages):
|
|
312
|
+
if i == 0 and isinstance(message, SystemMessage):
|
|
313
|
+
system_instruction = _convert_to_parts(message.content)
|
|
324
314
|
continue
|
|
325
315
|
elif isinstance(message, AIMessage):
|
|
326
316
|
role = "model"
|
|
@@ -365,16 +355,8 @@ llm = ChatGoogleGenerativeAI(model="gemini-pro", convert_system_message_to_human
|
|
|
365
355
|
f"Unexpected message with type {type(message)} at the position {i}."
|
|
366
356
|
)
|
|
367
357
|
|
|
368
|
-
if raw_system_message:
|
|
369
|
-
if role == "model":
|
|
370
|
-
raise ValueError(
|
|
371
|
-
"SystemMessage should be followed by a HumanMessage and "
|
|
372
|
-
"not by AIMessage."
|
|
373
|
-
)
|
|
374
|
-
parts = _convert_to_parts(raw_system_message.content) + parts
|
|
375
|
-
raw_system_message = None
|
|
376
358
|
messages.append({"role": role, "parts": parts})
|
|
377
|
-
return messages
|
|
359
|
+
return system_instruction, messages
|
|
378
360
|
|
|
379
361
|
|
|
380
362
|
def _parse_response_candidate(
|
|
@@ -483,17 +465,29 @@ class ChatGoogleGenerativeAI(_BaseGoogleGenerativeAI, BaseChatModel):
|
|
|
483
465
|
@root_validator()
|
|
484
466
|
def validate_environment(cls, values: Dict) -> Dict:
|
|
485
467
|
"""Validates params and passes them to google-generativeai package."""
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
if
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
|
|
468
|
+
additional_headers = values.get("additional_headers") or {}
|
|
469
|
+
default_metadata = tuple(additional_headers.items())
|
|
470
|
+
|
|
471
|
+
if values.get("credentials"):
|
|
472
|
+
genai.configure(
|
|
473
|
+
credentials=values.get("credentials"),
|
|
474
|
+
transport=values.get("transport"),
|
|
475
|
+
client_options=values.get("client_options"),
|
|
476
|
+
default_metadata=default_metadata,
|
|
477
|
+
)
|
|
478
|
+
else:
|
|
479
|
+
google_api_key = get_from_dict_or_env(
|
|
480
|
+
values, "google_api_key", "GOOGLE_API_KEY"
|
|
481
|
+
)
|
|
482
|
+
if isinstance(google_api_key, SecretStr):
|
|
483
|
+
google_api_key = google_api_key.get_secret_value()
|
|
484
|
+
|
|
485
|
+
genai.configure(
|
|
486
|
+
api_key=google_api_key,
|
|
487
|
+
transport=values.get("transport"),
|
|
488
|
+
client_options=values.get("client_options"),
|
|
489
|
+
default_metadata=default_metadata,
|
|
490
|
+
)
|
|
497
491
|
if (
|
|
498
492
|
values.get("temperature") is not None
|
|
499
493
|
and not 0 <= values["temperature"] <= 1
|
|
@@ -647,11 +641,15 @@ class ChatGoogleGenerativeAI(_BaseGoogleGenerativeAI, BaseChatModel):
|
|
|
647
641
|
)
|
|
648
642
|
|
|
649
643
|
params = self._prepare_params(stop, **kwargs)
|
|
650
|
-
history = _parse_chat_history(
|
|
644
|
+
system_instruction, history = _parse_chat_history(
|
|
651
645
|
messages,
|
|
652
646
|
convert_system_message_to_human=self.convert_system_message_to_human,
|
|
653
647
|
)
|
|
654
648
|
message = history.pop()
|
|
649
|
+
if self.client._system_instruction != system_instruction:
|
|
650
|
+
self.client = genai.GenerativeModel(
|
|
651
|
+
model_name=self.model, system_instruction=system_instruction
|
|
652
|
+
)
|
|
655
653
|
chat = client.start_chat(history=history)
|
|
656
654
|
return params, chat, message
|
|
657
655
|
|
{langchain_google_genai-0.0.11 → langchain_google_genai-1.0.2}/langchain_google_genai/embeddings.py
RENAMED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from typing import Dict, List, Optional
|
|
1
|
+
from typing import Any, Dict, List, Optional
|
|
2
2
|
|
|
3
3
|
# TODO: remove ignore once the google package is published with types
|
|
4
4
|
import google.generativeai as genai # type: ignore[import]
|
|
@@ -43,6 +43,13 @@ class GoogleGenerativeAIEmbeddings(BaseModel, Embeddings):
|
|
|
43
43
|
description="The Google API key to use. If not provided, "
|
|
44
44
|
"the GOOGLE_API_KEY environment variable will be used.",
|
|
45
45
|
)
|
|
46
|
+
credentials: Any = Field(
|
|
47
|
+
default=None,
|
|
48
|
+
exclude=True,
|
|
49
|
+
description="The default custom credentials "
|
|
50
|
+
"(google.auth.credentials.Credentials) to use when making API calls. If not "
|
|
51
|
+
"provided, credentials will be ascertained from the GOOGLE_API_KEY envvar",
|
|
52
|
+
)
|
|
46
53
|
client_options: Optional[Dict] = Field(
|
|
47
54
|
None,
|
|
48
55
|
description=(
|
|
@@ -58,17 +65,24 @@ class GoogleGenerativeAIEmbeddings(BaseModel, Embeddings):
|
|
|
58
65
|
@root_validator()
|
|
59
66
|
def validate_environment(cls, values: Dict) -> Dict:
|
|
60
67
|
"""Validates params and passes them to google-generativeai package."""
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
68
|
+
if values.get("credentials"):
|
|
69
|
+
genai.configure(
|
|
70
|
+
credentials=values.get("credentials"),
|
|
71
|
+
transport=values.get("transport"),
|
|
72
|
+
client_options=values.get("client_options"),
|
|
73
|
+
)
|
|
74
|
+
else:
|
|
75
|
+
google_api_key = get_from_dict_or_env(
|
|
76
|
+
values, "google_api_key", "GOOGLE_API_KEY"
|
|
77
|
+
)
|
|
78
|
+
if isinstance(google_api_key, SecretStr):
|
|
79
|
+
google_api_key = google_api_key.get_secret_value()
|
|
80
|
+
|
|
81
|
+
genai.configure(
|
|
82
|
+
api_key=google_api_key,
|
|
83
|
+
transport=values.get("transport"),
|
|
84
|
+
client_options=values.get("client_options"),
|
|
85
|
+
)
|
|
72
86
|
return values
|
|
73
87
|
|
|
74
88
|
def _embed(
|
{langchain_google_genai-0.0.11 → langchain_google_genai-1.0.2}/langchain_google_genai/llms.py
RENAMED
|
@@ -122,6 +122,10 @@ Supported examples:
|
|
|
122
122
|
)
|
|
123
123
|
"""Model name to use."""
|
|
124
124
|
google_api_key: Optional[SecretStr] = None
|
|
125
|
+
credentials: Any = None
|
|
126
|
+
"The default custom credentials (google.auth.credentials.Credentials) to use "
|
|
127
|
+
"when making API calls. If not provided, credentials will be ascertained from "
|
|
128
|
+
"the GOOGLE_API_KEY envvar"
|
|
125
129
|
temperature: float = 0.7
|
|
126
130
|
"""Run inference with this temperature. Must by in the closed interval
|
|
127
131
|
[0.0, 1.0]."""
|
|
@@ -150,6 +154,12 @@ Supported examples:
|
|
|
150
154
|
None,
|
|
151
155
|
description="A string, one of: [`rest`, `grpc`, `grpc_asyncio`].",
|
|
152
156
|
)
|
|
157
|
+
additional_headers: Optional[Dict[str, str]] = Field(
|
|
158
|
+
None,
|
|
159
|
+
description=(
|
|
160
|
+
"A key-value dictionary representing additional headers for the model call"
|
|
161
|
+
),
|
|
162
|
+
)
|
|
153
163
|
|
|
154
164
|
safety_settings: Optional[Dict[HarmCategory, HarmBlockThreshold]] = None
|
|
155
165
|
"""The default safety settings to use for all generations.
|
|
@@ -203,22 +213,28 @@ class GoogleGenerativeAI(_BaseGoogleGenerativeAI, BaseLLM):
|
|
|
203
213
|
@root_validator()
|
|
204
214
|
def validate_environment(cls, values: Dict) -> Dict:
|
|
205
215
|
"""Validates params and passes them to google-generativeai package."""
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
216
|
+
if values.get("credentials"):
|
|
217
|
+
genai.configure(
|
|
218
|
+
credentials=values.get("credentials"),
|
|
219
|
+
transport=values.get("transport"),
|
|
220
|
+
client_options=values.get("client_options"),
|
|
221
|
+
)
|
|
222
|
+
else:
|
|
223
|
+
google_api_key = get_from_dict_or_env(
|
|
224
|
+
values, "google_api_key", "GOOGLE_API_KEY"
|
|
225
|
+
)
|
|
226
|
+
if isinstance(google_api_key, SecretStr):
|
|
227
|
+
google_api_key = google_api_key.get_secret_value()
|
|
228
|
+
genai.configure(
|
|
229
|
+
api_key=google_api_key,
|
|
230
|
+
transport=values.get("transport"),
|
|
231
|
+
client_options=values.get("client_options"),
|
|
232
|
+
)
|
|
233
|
+
|
|
209
234
|
model_name = values["model"]
|
|
210
235
|
|
|
211
236
|
safety_settings = values["safety_settings"]
|
|
212
237
|
|
|
213
|
-
if isinstance(google_api_key, SecretStr):
|
|
214
|
-
google_api_key = google_api_key.get_secret_value()
|
|
215
|
-
|
|
216
|
-
genai.configure(
|
|
217
|
-
api_key=google_api_key,
|
|
218
|
-
transport=values.get("transport"),
|
|
219
|
-
client_options=values.get("client_options"),
|
|
220
|
-
)
|
|
221
|
-
|
|
222
238
|
if safety_settings and (
|
|
223
239
|
not GoogleModelFamily(model_name) == GoogleModelFamily.GEMINI
|
|
224
240
|
):
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[tool.poetry]
|
|
2
2
|
name = "langchain-google-genai"
|
|
3
|
-
version = "
|
|
3
|
+
version = "1.0.2"
|
|
4
4
|
description = "An integration package connecting Google's genai package and LangChain"
|
|
5
5
|
authors = []
|
|
6
6
|
readme = "README.md"
|
|
@@ -12,8 +12,8 @@ license = "MIT"
|
|
|
12
12
|
|
|
13
13
|
[tool.poetry.dependencies]
|
|
14
14
|
python = ">=3.9,<4.0"
|
|
15
|
-
langchain-core = "
|
|
16
|
-
google-generativeai = "^0.
|
|
15
|
+
langchain-core = ">=0.1.27,<0.2"
|
|
16
|
+
google-generativeai = "^0.5.0"
|
|
17
17
|
pillow = { version = "^10.1.0", optional = true }
|
|
18
18
|
|
|
19
19
|
[tool.poetry.extras]
|
|
@@ -30,6 +30,7 @@ syrupy = "^4.0.2"
|
|
|
30
30
|
pytest-watcher = "^0.3.4"
|
|
31
31
|
pytest-asyncio = "^0.21.1"
|
|
32
32
|
numpy = "^1.26.2"
|
|
33
|
+
langchain-core = {git = "https://github.com/langchain-ai/langchain.git", subdirectory = "libs/core"}
|
|
33
34
|
|
|
34
35
|
[tool.poetry.group.codespell]
|
|
35
36
|
optional = true
|
|
@@ -56,6 +57,7 @@ types-requests = "^2.28.11.5"
|
|
|
56
57
|
types-google-cloud-ndb = "^2.2.0.1"
|
|
57
58
|
types-pillow = "^10.1.0.2"
|
|
58
59
|
types-protobuf = "^4.24.0.20240302"
|
|
60
|
+
langchain-core = {git = "https://github.com/langchain-ai/langchain.git", subdirectory = "libs/core"}
|
|
59
61
|
|
|
60
62
|
[tool.poetry.group.dev]
|
|
61
63
|
optional = true
|
|
@@ -65,6 +67,7 @@ pillow = "^10.1.0"
|
|
|
65
67
|
types-requests = "^2.31.0.10"
|
|
66
68
|
types-pillow = "^10.1.0.2"
|
|
67
69
|
types-google-cloud-ndb = "^2.2.0.1"
|
|
70
|
+
langchain-core = {git = "https://github.com/langchain-ai/langchain.git", subdirectory = "libs/core"}
|
|
68
71
|
|
|
69
72
|
[tool.ruff]
|
|
70
73
|
select = [
|
|
File without changes
|
|
File without changes
|
{langchain_google_genai-0.0.11 → langchain_google_genai-1.0.2}/langchain_google_genai/__init__.py
RENAMED
|
File without changes
|
{langchain_google_genai-0.0.11 → langchain_google_genai-1.0.2}/langchain_google_genai/_common.py
RENAMED
|
File without changes
|
{langchain_google_genai-0.0.11 → langchain_google_genai-1.0.2}/langchain_google_genai/_enums.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
{langchain_google_genai-0.0.11 → langchain_google_genai-1.0.2}/langchain_google_genai/genai_aqa.py
RENAMED
|
File without changes
|
|
File without changes
|
{langchain_google_genai-0.0.11 → langchain_google_genai-1.0.2}/langchain_google_genai/py.typed
RENAMED
|
File without changes
|