langchain-google-genai 2.0.0.dev1__tar.gz → 2.0.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {langchain_google_genai-2.0.0.dev1 → langchain_google_genai-2.0.1}/PKG-INFO +3 -3
- {langchain_google_genai-2.0.0.dev1 → langchain_google_genai-2.0.1}/langchain_google_genai/_function_utils.py +113 -14
- {langchain_google_genai-2.0.0.dev1 → langchain_google_genai-2.0.1}/langchain_google_genai/chat_models.py +24 -14
- {langchain_google_genai-2.0.0.dev1 → langchain_google_genai-2.0.1}/langchain_google_genai/genai_aqa.py +1 -1
- {langchain_google_genai-2.0.0.dev1 → langchain_google_genai-2.0.1}/langchain_google_genai/llms.py +5 -2
- {langchain_google_genai-2.0.0.dev1 → langchain_google_genai-2.0.1}/pyproject.toml +7 -7
- {langchain_google_genai-2.0.0.dev1 → langchain_google_genai-2.0.1}/LICENSE +0 -0
- {langchain_google_genai-2.0.0.dev1 → langchain_google_genai-2.0.1}/README.md +0 -0
- {langchain_google_genai-2.0.0.dev1 → langchain_google_genai-2.0.1}/langchain_google_genai/__init__.py +0 -0
- {langchain_google_genai-2.0.0.dev1 → langchain_google_genai-2.0.1}/langchain_google_genai/_common.py +0 -0
- {langchain_google_genai-2.0.0.dev1 → langchain_google_genai-2.0.1}/langchain_google_genai/_enums.py +0 -0
- {langchain_google_genai-2.0.0.dev1 → langchain_google_genai-2.0.1}/langchain_google_genai/_genai_extension.py +0 -0
- {langchain_google_genai-2.0.0.dev1 → langchain_google_genai-2.0.1}/langchain_google_genai/_image_utils.py +0 -0
- {langchain_google_genai-2.0.0.dev1 → langchain_google_genai-2.0.1}/langchain_google_genai/embeddings.py +0 -0
- {langchain_google_genai-2.0.0.dev1 → langchain_google_genai-2.0.1}/langchain_google_genai/google_vector_store.py +0 -0
- {langchain_google_genai-2.0.0.dev1 → langchain_google_genai-2.0.1}/langchain_google_genai/py.typed +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: langchain-google-genai
|
|
3
|
-
Version: 2.0.
|
|
3
|
+
Version: 2.0.1
|
|
4
4
|
Summary: An integration package connecting Google's genai package and LangChain
|
|
5
5
|
Home-page: https://github.com/langchain-ai/langchain-google
|
|
6
6
|
License: MIT
|
|
@@ -12,8 +12,8 @@ Classifier: Programming Language :: Python :: 3.10
|
|
|
12
12
|
Classifier: Programming Language :: Python :: 3.11
|
|
13
13
|
Classifier: Programming Language :: Python :: 3.12
|
|
14
14
|
Provides-Extra: images
|
|
15
|
-
Requires-Dist: google-generativeai (>=0.
|
|
16
|
-
Requires-Dist: langchain-core (>=0.3.0
|
|
15
|
+
Requires-Dist: google-generativeai (>=0.8.0,<0.9.0)
|
|
16
|
+
Requires-Dist: langchain-core (>=0.3.0,<0.4)
|
|
17
17
|
Requires-Dist: pillow (>=10.1.0,<11.0.0) ; extra == "images"
|
|
18
18
|
Requires-Dist: pydantic (>=2,<3)
|
|
19
19
|
Project-URL: Repository, https://github.com/langchain-ai/langchain-google
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
import collections
|
|
4
|
+
import importlib
|
|
4
5
|
import json
|
|
5
6
|
import logging
|
|
6
7
|
from typing import (
|
|
@@ -136,8 +137,9 @@ def _dict_to_gapic_schema(schema: Dict[str, Any]) -> Optional[gapic.Schema]:
|
|
|
136
137
|
def _format_dict_to_function_declaration(
|
|
137
138
|
tool: Union[FunctionDescription, Dict[str, Any]],
|
|
138
139
|
) -> gapic.FunctionDeclaration:
|
|
140
|
+
print(tool)
|
|
139
141
|
return gapic.FunctionDeclaration(
|
|
140
|
-
name=tool.get("name"),
|
|
142
|
+
name=tool.get("name") or tool.get("title"),
|
|
141
143
|
description=tool.get("description"),
|
|
142
144
|
parameters=_dict_to_gapic_schema(tool.get("parameters", {})),
|
|
143
145
|
)
|
|
@@ -199,14 +201,16 @@ def _format_to_gapic_function_declaration(
|
|
|
199
201
|
) -> gapic.FunctionDeclaration:
|
|
200
202
|
if isinstance(tool, BaseTool):
|
|
201
203
|
return _format_base_tool_to_function_declaration(tool)
|
|
202
|
-
elif isinstance(tool, type) and
|
|
204
|
+
elif isinstance(tool, type) and is_basemodel_subclass_safe(tool):
|
|
203
205
|
return _convert_pydantic_to_genai_function(tool)
|
|
204
206
|
elif isinstance(tool, dict):
|
|
205
207
|
if all(k in tool for k in ("name", "description")) and "parameters" not in tool:
|
|
206
208
|
function = cast(dict, tool)
|
|
207
209
|
function["parameters"] = {}
|
|
208
210
|
else:
|
|
209
|
-
if
|
|
211
|
+
if (
|
|
212
|
+
"parameters" in tool and tool["parameters"].get("properties") # type: ignore[index]
|
|
213
|
+
):
|
|
210
214
|
function = convert_to_openai_tool(cast(dict, tool))["function"]
|
|
211
215
|
else:
|
|
212
216
|
function = cast(dict, tool)
|
|
@@ -269,13 +273,12 @@ def _convert_pydantic_to_genai_function(
|
|
|
269
273
|
name=tool_name if tool_name else schema.get("title"),
|
|
270
274
|
description=tool_description if tool_description else schema.get("description"),
|
|
271
275
|
parameters={
|
|
272
|
-
"properties":
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
},
|
|
276
|
+
"properties": _get_properties_from_schema_any(
|
|
277
|
+
schema.get("properties")
|
|
278
|
+
), # TODO: use _dict_to_gapic_schema() if possible
|
|
279
|
+
# "items": _get_items_from_schema_any(
|
|
280
|
+
# schema
|
|
281
|
+
# ), # TODO: fix it https://cloud.google.com/vertex-ai/generative-ai/docs/model-reference/function-calling?hl#schema
|
|
279
282
|
"required": schema.get("required", []),
|
|
280
283
|
"type_": TYPE_ENUM[schema["type"]],
|
|
281
284
|
},
|
|
@@ -283,6 +286,84 @@ def _convert_pydantic_to_genai_function(
|
|
|
283
286
|
return function_declaration
|
|
284
287
|
|
|
285
288
|
|
|
289
|
+
def _get_properties_from_schema_any(schema: Any) -> Dict[str, Any]:
|
|
290
|
+
if isinstance(schema, Dict):
|
|
291
|
+
return _get_properties_from_schema(schema)
|
|
292
|
+
return {}
|
|
293
|
+
|
|
294
|
+
|
|
295
|
+
def _get_properties_from_schema(schema: Dict) -> Dict[str, Any]:
|
|
296
|
+
properties = {}
|
|
297
|
+
for k, v in schema.items():
|
|
298
|
+
if not isinstance(k, str):
|
|
299
|
+
logger.warning(f"Key '{k}' is not supported in schema, type={type(k)}")
|
|
300
|
+
continue
|
|
301
|
+
if not isinstance(v, Dict):
|
|
302
|
+
logger.warning(f"Value '{v}' is not supported in schema, ignoring v={v}")
|
|
303
|
+
continue
|
|
304
|
+
properties_item: Dict[str, Union[str, int, Dict, List]] = {}
|
|
305
|
+
if v.get("type") or v.get("anyOf"):
|
|
306
|
+
properties_item["type_"] = _get_type_from_schema(v)
|
|
307
|
+
|
|
308
|
+
if v.get("enum"):
|
|
309
|
+
properties_item["enum"] = v["enum"]
|
|
310
|
+
|
|
311
|
+
description = v.get("description")
|
|
312
|
+
if description and isinstance(description, str):
|
|
313
|
+
properties_item["description"] = description
|
|
314
|
+
|
|
315
|
+
if v.get("type") == "array" and v.get("items"):
|
|
316
|
+
properties_item["items"] = _get_items_from_schema_any(v.get("items"))
|
|
317
|
+
|
|
318
|
+
if v.get("type") == "object" and v.get("properties"):
|
|
319
|
+
properties_item["properties"] = _get_properties_from_schema_any(
|
|
320
|
+
v.get("properties")
|
|
321
|
+
)
|
|
322
|
+
if k == "title" and "description" not in properties_item:
|
|
323
|
+
properties_item["description"] = k + " is " + str(v)
|
|
324
|
+
|
|
325
|
+
properties[k] = properties_item
|
|
326
|
+
|
|
327
|
+
return properties
|
|
328
|
+
|
|
329
|
+
|
|
330
|
+
def _get_items_from_schema_any(schema: Any) -> Dict[str, Any]:
|
|
331
|
+
if isinstance(schema, Dict):
|
|
332
|
+
return _get_items_from_schema(schema)
|
|
333
|
+
if isinstance(schema, List):
|
|
334
|
+
return _get_items_from_schema(schema)
|
|
335
|
+
if isinstance(schema, str):
|
|
336
|
+
return _get_items_from_schema(schema)
|
|
337
|
+
return {}
|
|
338
|
+
|
|
339
|
+
|
|
340
|
+
def _get_items_from_schema(schema: Union[Dict, List, str]) -> Dict[str, Any]:
|
|
341
|
+
items: Dict = {}
|
|
342
|
+
if isinstance(schema, List):
|
|
343
|
+
for i, v in enumerate(schema):
|
|
344
|
+
items[f"item{i}"] = _get_properties_from_schema_any(v)
|
|
345
|
+
elif isinstance(schema, Dict):
|
|
346
|
+
item: Dict = {}
|
|
347
|
+
for k, v in schema.items():
|
|
348
|
+
item["type_"] = _get_type_from_schema(v)
|
|
349
|
+
if not isinstance(v, Dict):
|
|
350
|
+
logger.warning(
|
|
351
|
+
f"Value '{v}' is not supported in schema, ignoring v={v}"
|
|
352
|
+
)
|
|
353
|
+
continue
|
|
354
|
+
if v.get("type") == "object" and v.get("properties"):
|
|
355
|
+
item["properties"] = _get_properties_from_schema_any(
|
|
356
|
+
v.get("properties")
|
|
357
|
+
)
|
|
358
|
+
if k == "title" and "description" not in item:
|
|
359
|
+
item["description"] = v
|
|
360
|
+
items = item
|
|
361
|
+
else:
|
|
362
|
+
# str
|
|
363
|
+
items["type_"] = TYPE_ENUM.get(str(schema), glm.Type.STRING)
|
|
364
|
+
return items
|
|
365
|
+
|
|
366
|
+
|
|
286
367
|
def _get_type_from_schema(schema: Dict[str, Any]) -> int:
|
|
287
368
|
if "anyOf" in schema:
|
|
288
369
|
types = [_get_type_from_schema(sub_schema) for sub_schema in schema["anyOf"]]
|
|
@@ -293,10 +374,7 @@ def _get_type_from_schema(schema: Dict[str, Any]) -> int:
|
|
|
293
374
|
pass
|
|
294
375
|
elif "type" in schema:
|
|
295
376
|
stype = str(schema["type"])
|
|
296
|
-
|
|
297
|
-
return TYPE_ENUM[stype]
|
|
298
|
-
else:
|
|
299
|
-
pass
|
|
377
|
+
return TYPE_ENUM.get(stype, glm.Type.STRING)
|
|
300
378
|
else:
|
|
301
379
|
pass
|
|
302
380
|
return TYPE_ENUM["string"] # Default to string if no valid types found
|
|
@@ -356,3 +434,24 @@ def _tool_choice_to_tool_config(
|
|
|
356
434
|
"allowed_function_names": allowed_function_names,
|
|
357
435
|
}
|
|
358
436
|
)
|
|
437
|
+
|
|
438
|
+
|
|
439
|
+
def is_basemodel_subclass_safe(tool: Type) -> bool:
|
|
440
|
+
if safe_import("langchain_core.utils.pydantic", "is_basemodel_subclass"):
|
|
441
|
+
from langchain_core.utils.pydantic import (
|
|
442
|
+
is_basemodel_subclass, # type: ignore[import]
|
|
443
|
+
)
|
|
444
|
+
|
|
445
|
+
return is_basemodel_subclass(tool)
|
|
446
|
+
else:
|
|
447
|
+
return issubclass(tool, BaseModel)
|
|
448
|
+
|
|
449
|
+
|
|
450
|
+
def safe_import(module_name: str, attribute_name: str = "") -> bool:
|
|
451
|
+
try:
|
|
452
|
+
module = importlib.import_module(module_name)
|
|
453
|
+
if attribute_name:
|
|
454
|
+
return hasattr(module, attribute_name)
|
|
455
|
+
return True
|
|
456
|
+
except ImportError:
|
|
457
|
+
return False
|
|
@@ -31,6 +31,9 @@ import google.api_core
|
|
|
31
31
|
# TODO: remove ignore once the google package is published with types
|
|
32
32
|
import proto # type: ignore[import]
|
|
33
33
|
import requests
|
|
34
|
+
from google.ai.generativelanguage_v1beta import (
|
|
35
|
+
GenerativeServiceAsyncClient as v1betaGenerativeServiceAsyncClient,
|
|
36
|
+
)
|
|
34
37
|
from google.ai.generativelanguage_v1beta.types import (
|
|
35
38
|
Blob,
|
|
36
39
|
Candidate,
|
|
@@ -77,7 +80,6 @@ from langchain_core.output_parsers.openai_tools import (
|
|
|
77
80
|
from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult
|
|
78
81
|
from langchain_core.runnables import Runnable, RunnablePassthrough
|
|
79
82
|
from langchain_core.utils import secret_from_env
|
|
80
|
-
from langchain_core.utils.pydantic import is_basemodel_subclass
|
|
81
83
|
from pydantic import (
|
|
82
84
|
BaseModel,
|
|
83
85
|
ConfigDict,
|
|
@@ -104,6 +106,7 @@ from langchain_google_genai._function_utils import (
|
|
|
104
106
|
_ToolChoiceType,
|
|
105
107
|
_ToolConfigDict,
|
|
106
108
|
convert_to_genai_function_declarations,
|
|
109
|
+
is_basemodel_subclass_safe,
|
|
107
110
|
tool_to_dict,
|
|
108
111
|
)
|
|
109
112
|
from langchain_google_genai._image_utils import ImageBytesLoader
|
|
@@ -703,7 +706,7 @@ class ChatGoogleGenerativeAI(_BaseGoogleGenerativeAI, BaseChatModel):
|
|
|
703
706
|
Tool calling:
|
|
704
707
|
.. code-block:: python
|
|
705
708
|
|
|
706
|
-
from
|
|
709
|
+
from pydantic import BaseModel, Field
|
|
707
710
|
|
|
708
711
|
|
|
709
712
|
class GetWeather(BaseModel):
|
|
@@ -748,7 +751,7 @@ class ChatGoogleGenerativeAI(_BaseGoogleGenerativeAI, BaseChatModel):
|
|
|
748
751
|
|
|
749
752
|
from typing import Optional
|
|
750
753
|
|
|
751
|
-
from
|
|
754
|
+
from pydantic import BaseModel, Field
|
|
752
755
|
|
|
753
756
|
|
|
754
757
|
class Joke(BaseModel):
|
|
@@ -823,11 +826,11 @@ class ChatGoogleGenerativeAI(_BaseGoogleGenerativeAI, BaseChatModel):
|
|
|
823
826
|
""" # noqa: E501
|
|
824
827
|
|
|
825
828
|
client: Any = Field(default=None, exclude=True) #: :meta private:
|
|
826
|
-
|
|
829
|
+
async_client_running: Any = Field(default=None, exclude=True) #: :meta private:
|
|
827
830
|
google_api_key: Optional[SecretStr] = Field(
|
|
828
831
|
alias="api_key", default_factory=secret_from_env("GOOGLE_API_KEY", default=None)
|
|
829
832
|
)
|
|
830
|
-
"""Google AI API key.
|
|
833
|
+
"""Google AI API key.
|
|
831
834
|
If not specified will be read from env var ``GOOGLE_API_KEY``."""
|
|
832
835
|
default_metadata: Sequence[Tuple[str, str]] = Field(
|
|
833
836
|
default_factory=list
|
|
@@ -887,24 +890,31 @@ class ChatGoogleGenerativeAI(_BaseGoogleGenerativeAI, BaseChatModel):
|
|
|
887
890
|
client_options=self.client_options,
|
|
888
891
|
transport=transport,
|
|
889
892
|
)
|
|
893
|
+
self.async_client_running = None
|
|
894
|
+
return self
|
|
890
895
|
|
|
896
|
+
@property
|
|
897
|
+
def async_client(self) -> v1betaGenerativeServiceAsyncClient:
|
|
898
|
+
google_api_key = None
|
|
899
|
+
if not self.credentials:
|
|
900
|
+
if isinstance(self.google_api_key, SecretStr):
|
|
901
|
+
google_api_key = self.google_api_key.get_secret_value()
|
|
902
|
+
else:
|
|
903
|
+
google_api_key = self.google_api_key
|
|
891
904
|
# NOTE: genaix.build_generative_async_service requires
|
|
892
905
|
# a running event loop, which causes an error
|
|
893
906
|
# when initialized inside a ThreadPoolExecutor.
|
|
894
907
|
# this check ensures that async client is only initialized
|
|
895
908
|
# within an asyncio event loop to avoid the error
|
|
896
|
-
if _is_event_loop_running():
|
|
897
|
-
self.
|
|
909
|
+
if not self.async_client_running and _is_event_loop_running():
|
|
910
|
+
self.async_client_running = genaix.build_generative_async_service(
|
|
898
911
|
credentials=self.credentials,
|
|
899
912
|
api_key=google_api_key,
|
|
900
|
-
client_info=
|
|
913
|
+
client_info=get_client_info("ChatGoogleGenerativeAI"),
|
|
901
914
|
client_options=self.client_options,
|
|
902
|
-
transport=transport,
|
|
915
|
+
transport=self.transport,
|
|
903
916
|
)
|
|
904
|
-
|
|
905
|
-
self.async_client = None
|
|
906
|
-
|
|
907
|
-
return self
|
|
917
|
+
return self.async_client_running
|
|
908
918
|
|
|
909
919
|
@property
|
|
910
920
|
def _identifying_params(self) -> Dict[str, Any]:
|
|
@@ -1189,7 +1199,7 @@ class ChatGoogleGenerativeAI(_BaseGoogleGenerativeAI, BaseChatModel):
|
|
|
1189
1199
|
) -> Runnable[LanguageModelInput, Union[Dict, BaseModel]]:
|
|
1190
1200
|
if kwargs:
|
|
1191
1201
|
raise ValueError(f"Received unsupported arguments {kwargs}")
|
|
1192
|
-
if isinstance(schema, type) and
|
|
1202
|
+
if isinstance(schema, type) and is_basemodel_subclass_safe(schema):
|
|
1193
1203
|
parser: OutputParserLike = PydanticToolsParser(
|
|
1194
1204
|
tools=[schema], first_tool_only=True
|
|
1195
1205
|
)
|
|
@@ -116,7 +116,7 @@ class GenAIAqa(RunnableSerializable[AqaInput, AqaOutput]):
|
|
|
116
116
|
super().__init__(**kwargs)
|
|
117
117
|
self._client = _AqaModel(**kwargs)
|
|
118
118
|
|
|
119
|
-
def invoke(
|
|
119
|
+
def invoke( # type: ignore[override]
|
|
120
120
|
self, input: AqaInput, config: Optional[RunnableConfig] = None
|
|
121
121
|
) -> AqaOutput:
|
|
122
122
|
"""Generates a grounded response using the provided passages."""
|
{langchain_google_genai-2.0.0.dev1 → langchain_google_genai-2.0.1}/langchain_google_genai/llms.py
RENAMED
|
@@ -13,7 +13,7 @@ from langchain_core.language_models import LangSmithParams, LanguageModelInput
|
|
|
13
13
|
from langchain_core.language_models.llms import BaseLLM, create_base_retry_decorator
|
|
14
14
|
from langchain_core.outputs import Generation, GenerationChunk, LLMResult
|
|
15
15
|
from langchain_core.utils import secret_from_env
|
|
16
|
-
from pydantic import BaseModel, Field, SecretStr, model_validator
|
|
16
|
+
from pydantic import BaseModel, ConfigDict, Field, SecretStr, model_validator
|
|
17
17
|
from typing_extensions import Self
|
|
18
18
|
|
|
19
19
|
from langchain_google_genai._enums import (
|
|
@@ -139,7 +139,7 @@ Supported examples:
|
|
|
139
139
|
top_k: Optional[int] = None
|
|
140
140
|
"""Decode using top-k sampling: consider the set of top_k most probable tokens.
|
|
141
141
|
Must be positive."""
|
|
142
|
-
max_output_tokens: Optional[int] = None
|
|
142
|
+
max_output_tokens: Optional[int] = Field(default=None, alias="max_tokens")
|
|
143
143
|
"""Maximum number of tokens to include in a candidate. Must be greater than zero.
|
|
144
144
|
If unset, will default to 64."""
|
|
145
145
|
n: int = 1
|
|
@@ -216,6 +216,9 @@ class GoogleGenerativeAI(_BaseGoogleGenerativeAI, BaseLLM):
|
|
|
216
216
|
"""
|
|
217
217
|
|
|
218
218
|
client: Any = None #: :meta private:
|
|
219
|
+
model_config = ConfigDict(
|
|
220
|
+
populate_by_name=True,
|
|
221
|
+
)
|
|
219
222
|
|
|
220
223
|
@model_validator(mode="after")
|
|
221
224
|
def validate_environment(self) -> Self:
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[tool.poetry]
|
|
2
2
|
name = "langchain-google-genai"
|
|
3
|
-
version = "2.0.
|
|
3
|
+
version = "2.0.1"
|
|
4
4
|
description = "An integration package connecting Google's genai package and LangChain"
|
|
5
5
|
authors = []
|
|
6
6
|
readme = "README.md"
|
|
@@ -12,8 +12,8 @@ license = "MIT"
|
|
|
12
12
|
|
|
13
13
|
[tool.poetry.dependencies]
|
|
14
14
|
python = ">=3.9,<4.0"
|
|
15
|
-
langchain-core =
|
|
16
|
-
google-generativeai = "^0.
|
|
15
|
+
langchain-core = ">=0.3.0,<0.4"
|
|
16
|
+
google-generativeai = "^0.8.0"
|
|
17
17
|
pillow = { version = "^10.1.0", optional = true }
|
|
18
18
|
pydantic = ">=2,<3"
|
|
19
19
|
|
|
@@ -31,8 +31,8 @@ syrupy = "^4.0.2"
|
|
|
31
31
|
pytest-watcher = "^0.3.4"
|
|
32
32
|
pytest-asyncio = "^0.21.1"
|
|
33
33
|
numpy = "^1.26.2"
|
|
34
|
-
langchain-core = { git = "https://github.com/langchain-ai/langchain.git", subdirectory = "libs/core"
|
|
35
|
-
langchain-standard-tests = { git = "https://github.com/langchain-ai/langchain.git", subdirectory = "libs/standard-tests"
|
|
34
|
+
langchain-core = { git = "https://github.com/langchain-ai/langchain.git", subdirectory = "libs/core" }
|
|
35
|
+
langchain-standard-tests = { git = "https://github.com/langchain-ai/langchain.git", subdirectory = "libs/standard-tests" }
|
|
36
36
|
|
|
37
37
|
[tool.codespell]
|
|
38
38
|
ignore-words-list = "rouge"
|
|
@@ -62,7 +62,7 @@ types-requests = "^2.28.11.5"
|
|
|
62
62
|
types-google-cloud-ndb = "^2.2.0.1"
|
|
63
63
|
types-pillow = "^10.1.0.2"
|
|
64
64
|
types-protobuf = "^4.24.0.20240302"
|
|
65
|
-
langchain-core = { git = "https://github.com/langchain-ai/langchain.git", subdirectory = "libs/core"
|
|
65
|
+
langchain-core = { git = "https://github.com/langchain-ai/langchain.git", subdirectory = "libs/core" }
|
|
66
66
|
numpy = "^1.26.2"
|
|
67
67
|
|
|
68
68
|
[tool.poetry.group.dev]
|
|
@@ -73,7 +73,7 @@ pillow = "^10.1.0"
|
|
|
73
73
|
types-requests = "^2.31.0.10"
|
|
74
74
|
types-pillow = "^10.1.0.2"
|
|
75
75
|
types-google-cloud-ndb = "^2.2.0.1"
|
|
76
|
-
langchain-core = { git = "https://github.com/langchain-ai/langchain.git", subdirectory = "libs/core"
|
|
76
|
+
langchain-core = { git = "https://github.com/langchain-ai/langchain.git", subdirectory = "libs/core" }
|
|
77
77
|
|
|
78
78
|
[tool.ruff.lint]
|
|
79
79
|
select = [
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{langchain_google_genai-2.0.0.dev1 → langchain_google_genai-2.0.1}/langchain_google_genai/_common.py
RENAMED
|
File without changes
|
{langchain_google_genai-2.0.0.dev1 → langchain_google_genai-2.0.1}/langchain_google_genai/_enums.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{langchain_google_genai-2.0.0.dev1 → langchain_google_genai-2.0.1}/langchain_google_genai/py.typed
RENAMED
|
File without changes
|