langchain-google-genai 1.0.8__tar.gz → 1.0.10__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (16) hide show
  1. {langchain_google_genai-1.0.8 → langchain_google_genai-1.0.10}/PKG-INFO +2 -2
  2. {langchain_google_genai-1.0.8 → langchain_google_genai-1.0.10}/langchain_google_genai/_function_utils.py +19 -6
  3. {langchain_google_genai-1.0.8 → langchain_google_genai-1.0.10}/langchain_google_genai/chat_models.py +35 -4
  4. {langchain_google_genai-1.0.8 → langchain_google_genai-1.0.10}/langchain_google_genai/llms.py +11 -1
  5. {langchain_google_genai-1.0.8 → langchain_google_genai-1.0.10}/pyproject.toml +2 -2
  6. {langchain_google_genai-1.0.8 → langchain_google_genai-1.0.10}/LICENSE +0 -0
  7. {langchain_google_genai-1.0.8 → langchain_google_genai-1.0.10}/README.md +0 -0
  8. {langchain_google_genai-1.0.8 → langchain_google_genai-1.0.10}/langchain_google_genai/__init__.py +0 -0
  9. {langchain_google_genai-1.0.8 → langchain_google_genai-1.0.10}/langchain_google_genai/_common.py +0 -0
  10. {langchain_google_genai-1.0.8 → langchain_google_genai-1.0.10}/langchain_google_genai/_enums.py +0 -0
  11. {langchain_google_genai-1.0.8 → langchain_google_genai-1.0.10}/langchain_google_genai/_genai_extension.py +0 -0
  12. {langchain_google_genai-1.0.8 → langchain_google_genai-1.0.10}/langchain_google_genai/_image_utils.py +0 -0
  13. {langchain_google_genai-1.0.8 → langchain_google_genai-1.0.10}/langchain_google_genai/embeddings.py +0 -0
  14. {langchain_google_genai-1.0.8 → langchain_google_genai-1.0.10}/langchain_google_genai/genai_aqa.py +0 -0
  15. {langchain_google_genai-1.0.8 → langchain_google_genai-1.0.10}/langchain_google_genai/google_vector_store.py +0 -0
  16. {langchain_google_genai-1.0.8 → langchain_google_genai-1.0.10}/langchain_google_genai/py.typed +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: langchain-google-genai
3
- Version: 1.0.8
3
+ Version: 1.0.10
4
4
  Summary: An integration package connecting Google's genai package and LangChain
5
5
  Home-page: https://github.com/langchain-ai/langchain-google
6
6
  License: MIT
@@ -13,7 +13,7 @@ Classifier: Programming Language :: Python :: 3.11
13
13
  Classifier: Programming Language :: Python :: 3.12
14
14
  Provides-Extra: images
15
15
  Requires-Dist: google-generativeai (>=0.7.0,<0.8.0)
16
- Requires-Dist: langchain-core (>=0.2.17,<0.3)
16
+ Requires-Dist: langchain-core (>=0.2.33,<0.3)
17
17
  Requires-Dist: pillow (>=10.1.0,<11.0.0) ; extra == "images"
18
18
  Project-URL: Repository, https://github.com/langchain-ai/langchain-google
19
19
  Project-URL: Source Code, https://github.com/langchain-ai/langchain-google/tree/main/libs/genai
@@ -107,6 +107,13 @@ def _format_json_schema_to_gapic(schema: Dict[str, Any]) -> Dict[str, Any]:
107
107
  pvalue
108
108
  )
109
109
  continue
110
+ elif key == "allOf":
111
+ if len(value) > 1:
112
+ logger.warning(
113
+ "Only first value for 'allOf' key is supported. "
114
+ f"Got {len(value)}, ignoring other than first value!"
115
+ )
116
+ return _format_json_schema_to_gapic(value[0])
110
117
  elif key in ["type", "_type"]:
111
118
  converted_schema["type"] = str(value).upper()
112
119
  elif key not in _ALLOWED_SCHEMA_FIELDS_SET:
@@ -116,11 +123,13 @@ def _format_json_schema_to_gapic(schema: Dict[str, Any]) -> Dict[str, Any]:
116
123
  return converted_schema
117
124
 
118
125
 
119
- def _dict_to_gapic_schema(schema: Dict[str, Any]) -> gapic.Schema:
120
- dereferenced_schema = dereference_refs(schema)
121
- formatted_schema = _format_json_schema_to_gapic(dereferenced_schema)
122
- json_schema = json.dumps(formatted_schema)
123
- return gapic.Schema.from_json(json_schema)
126
+ def _dict_to_gapic_schema(schema: Dict[str, Any]) -> Optional[gapic.Schema]:
127
+ if schema:
128
+ dereferenced_schema = dereference_refs(schema)
129
+ formatted_schema = _format_json_schema_to_gapic(dereferenced_schema)
130
+ json_schema = json.dumps(formatted_schema)
131
+ return gapic.Schema.from_json(json_schema)
132
+ return None
124
133
 
125
134
 
126
135
  def _format_dict_to_function_declaration(
@@ -196,7 +205,11 @@ def _format_to_gapic_function_declaration(
196
205
  function = cast(dict, tool)
197
206
  function["parameters"] = {}
198
207
  else:
199
- function = convert_to_openai_tool(cast(dict, tool))["function"]
208
+ if "parameters" in tool and tool["parameters"].get("properties"):
209
+ function = convert_to_openai_tool(cast(dict, tool))["function"]
210
+ else:
211
+ function = cast(dict, tool)
212
+ function["parameters"] = {}
200
213
  return _format_dict_to_function_declaration(cast(FunctionDescription, function))
201
214
  elif callable(tool):
202
215
  return _format_base_tool_to_function_declaration(callable_as_lc_tool()(tool))
@@ -32,8 +32,10 @@ import google.api_core
32
32
  import proto # type: ignore[import]
33
33
  import requests
34
34
  from google.ai.generativelanguage_v1beta.types import (
35
+ Blob,
35
36
  Candidate,
36
37
  Content,
38
+ FileData,
37
39
  FunctionCall,
38
40
  FunctionResponse,
39
41
  GenerateContentRequest,
@@ -42,6 +44,7 @@ from google.ai.generativelanguage_v1beta.types import (
42
44
  Part,
43
45
  SafetySetting,
44
46
  ToolConfig,
47
+ VideoMetadata,
45
48
  )
46
49
  from google.generativeai.types import Tool as GoogleTool # type: ignore[import]
47
50
  from google.generativeai.types.content_types import ( # type: ignore[import]
@@ -75,6 +78,7 @@ from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResu
75
78
  from langchain_core.pydantic_v1 import BaseModel, Field, SecretStr, root_validator
76
79
  from langchain_core.runnables import Runnable, RunnablePassthrough
77
80
  from langchain_core.utils import get_from_dict_or_env
81
+ from langchain_core.utils.pydantic import is_basemodel_subclass
78
82
  from tenacity import (
79
83
  before_sleep_log,
80
84
  retry,
@@ -324,8 +328,35 @@ def _convert_to_parts(
324
328
  )
325
329
  img_url = img_url["url"]
326
330
  parts.append(image_loader.load_part(img_url))
331
+ # Handle media type like LangChain.js
332
+ # https://github.com/langchain-ai/langchainjs/blob/e536593e2585f1dd7b0afc187de4d07cb40689ba/libs/langchain-google-common/src/utils/gemini.ts#L93-L106
333
+ elif part["type"] == "media":
334
+ if "mime_type" not in part:
335
+ raise ValueError(f"Missing mime_type in media part: {part}")
336
+ mime_type = part["mime_type"]
337
+ media_part = Part()
338
+
339
+ if "data" in part:
340
+ media_part.inline_data = Blob(
341
+ data=part["data"], mime_type=mime_type
342
+ )
343
+ elif "file_uri" in part:
344
+ media_part.file_data = FileData(
345
+ file_uri=part["file_uri"], mime_type=mime_type
346
+ )
347
+ else:
348
+ raise ValueError(
349
+ f"Media part must have either data or file_uri: {part}"
350
+ )
351
+ if "video_metadata" in part:
352
+ metadata = VideoMetadata(part["video_metadata"])
353
+ media_part.video_metadata = metadata
354
+ parts.append(media_part)
327
355
  else:
328
- raise ValueError(f"Unrecognized message part type: {part['type']}")
356
+ raise ValueError(
357
+ f"Unrecognized message part type: {part['type']}. Only text, "
358
+ f"image_url, and media types are supported."
359
+ )
329
360
  else:
330
361
  # Yolo
331
362
  logger.warning(
@@ -784,8 +815,8 @@ class ChatGoogleGenerativeAI(_BaseGoogleGenerativeAI, BaseChatModel):
784
815
 
785
816
  """ # noqa: E501
786
817
 
787
- client: Any = None #: :meta private:
788
- async_client: Any = None #: :meta private:
818
+ client: Any = Field(default=None, exclude=True) #: :meta private:
819
+ async_client: Any = Field(default=None, exclude=True) #: :meta private:
789
820
  google_api_key: Optional[SecretStr] = Field(default=None, alias="api_key")
790
821
  """Google AI API key.
791
822
 
@@ -1153,7 +1184,7 @@ class ChatGoogleGenerativeAI(_BaseGoogleGenerativeAI, BaseChatModel):
1153
1184
  ) -> Runnable[LanguageModelInput, Union[Dict, BaseModel]]:
1154
1185
  if kwargs:
1155
1186
  raise ValueError(f"Received unsupported arguments {kwargs}")
1156
- if isinstance(schema, type) and issubclass(schema, BaseModel):
1187
+ if isinstance(schema, type) and is_basemodel_subclass(schema):
1157
1188
  parser: OutputParserLike = PydanticToolsParser(
1158
1189
  tools=[schema], first_tool_only=True
1159
1190
  )
@@ -9,7 +9,7 @@ from langchain_core.callbacks import (
9
9
  AsyncCallbackManagerForLLMRun,
10
10
  CallbackManagerForLLMRun,
11
11
  )
12
- from langchain_core.language_models import LanguageModelInput
12
+ from langchain_core.language_models import LangSmithParams, LanguageModelInput
13
13
  from langchain_core.language_models.llms import BaseLLM, create_base_retry_decorator
14
14
  from langchain_core.outputs import Generation, GenerationChunk, LLMResult
15
15
  from langchain_core.pydantic_v1 import BaseModel, Field, SecretStr, root_validator
@@ -268,6 +268,16 @@ class GoogleGenerativeAI(_BaseGoogleGenerativeAI, BaseLLM):
268
268
 
269
269
  return values
270
270
 
271
+ def _get_ls_params(
272
+ self, stop: Optional[List[str]] = None, **kwargs: Any
273
+ ) -> LangSmithParams:
274
+ """Get standard params for tracing."""
275
+ ls_params = super()._get_ls_params(stop=stop, **kwargs)
276
+ ls_params["ls_provider"] = "google_genai"
277
+ if ls_max_tokens := kwargs.get("max_output_tokens", self.max_output_tokens):
278
+ ls_params["ls_max_tokens"] = ls_max_tokens
279
+ return ls_params
280
+
271
281
  def _generate(
272
282
  self,
273
283
  prompts: List[str],
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "langchain-google-genai"
3
- version = "1.0.8"
3
+ version = "1.0.10"
4
4
  description = "An integration package connecting Google's genai package and LangChain"
5
5
  authors = []
6
6
  readme = "README.md"
@@ -12,7 +12,7 @@ license = "MIT"
12
12
 
13
13
  [tool.poetry.dependencies]
14
14
  python = ">=3.9,<4.0"
15
- langchain-core = ">=0.2.17,<0.3"
15
+ langchain-core = ">=0.2.33,<0.3"
16
16
  google-generativeai = "^0.7.0"
17
17
  pillow = { version = "^10.1.0", optional = true }
18
18