langchain-google-genai 2.0.7__py3-none-any.whl → 2.0.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of langchain-google-genai might be problematic. Click here for more details.
- langchain_google_genai/_function_utils.py +20 -4
- langchain_google_genai/chat_models.py +62 -57
- {langchain_google_genai-2.0.7.dist-info → langchain_google_genai-2.0.9.dist-info}/METADATA +2 -2
- {langchain_google_genai-2.0.7.dist-info → langchain_google_genai-2.0.9.dist-info}/RECORD +6 -6
- {langchain_google_genai-2.0.7.dist-info → langchain_google_genai-2.0.9.dist-info}/LICENSE +0 -0
- {langchain_google_genai-2.0.7.dist-info → langchain_google_genai-2.0.9.dist-info}/WHEEL +0 -0
|
@@ -315,10 +315,26 @@ def _get_properties_from_schema(schema: Dict) -> Dict[str, Any]:
|
|
|
315
315
|
if properties_item.get("type_") == glm.Type.ARRAY and v.get("items"):
|
|
316
316
|
properties_item["items"] = _get_items_from_schema_any(v.get("items"))
|
|
317
317
|
|
|
318
|
-
if properties_item.get("type_") == glm.Type.OBJECT
|
|
319
|
-
|
|
320
|
-
v.get("
|
|
321
|
-
|
|
318
|
+
if properties_item.get("type_") == glm.Type.OBJECT:
|
|
319
|
+
if (
|
|
320
|
+
v.get("anyOf")
|
|
321
|
+
and isinstance(v["anyOf"], list)
|
|
322
|
+
and isinstance(v["anyOf"][0], dict)
|
|
323
|
+
):
|
|
324
|
+
v = v["anyOf"][0]
|
|
325
|
+
v_properties = v.get("properties")
|
|
326
|
+
if v_properties:
|
|
327
|
+
properties_item["properties"] = _get_properties_from_schema_any(
|
|
328
|
+
v_properties
|
|
329
|
+
)
|
|
330
|
+
if isinstance(v_properties, dict):
|
|
331
|
+
properties_item["required"] = [
|
|
332
|
+
k for k, v in v_properties.items() if "default" not in v
|
|
333
|
+
]
|
|
334
|
+
else:
|
|
335
|
+
# Providing dummy type for object without properties
|
|
336
|
+
properties_item["type_"] = glm.Type.STRING
|
|
337
|
+
|
|
322
338
|
if k == "title" and "description" not in properties_item:
|
|
323
339
|
properties_item["description"] = k + " is " + str(v)
|
|
324
340
|
|
|
@@ -301,6 +301,49 @@ def _convert_to_parts(
|
|
|
301
301
|
return parts
|
|
302
302
|
|
|
303
303
|
|
|
304
|
+
def _convert_tool_message_to_part(message: ToolMessage | FunctionMessage) -> Part:
|
|
305
|
+
"""Converts a tool or function message to a google part."""
|
|
306
|
+
name = message.name
|
|
307
|
+
response: Any
|
|
308
|
+
if not isinstance(message.content, str):
|
|
309
|
+
response = message.content
|
|
310
|
+
else:
|
|
311
|
+
try:
|
|
312
|
+
response = json.loads(message.content)
|
|
313
|
+
except json.JSONDecodeError:
|
|
314
|
+
response = message.content # leave as str representation
|
|
315
|
+
part = Part(
|
|
316
|
+
function_response=FunctionResponse(
|
|
317
|
+
name=name,
|
|
318
|
+
response=(
|
|
319
|
+
{"output": response} if not isinstance(response, dict) else response
|
|
320
|
+
),
|
|
321
|
+
)
|
|
322
|
+
)
|
|
323
|
+
return part
|
|
324
|
+
|
|
325
|
+
|
|
326
|
+
def _get_ai_message_tool_messages_parts(
|
|
327
|
+
tool_messages: Sequence[ToolMessage], ai_message: AIMessage
|
|
328
|
+
) -> list[Part]:
|
|
329
|
+
"""
|
|
330
|
+
Finds relevant tool messages for the AI message and converts them to a single
|
|
331
|
+
list of Parts.
|
|
332
|
+
"""
|
|
333
|
+
# We are interested only in the tool messages that are part of the AI message
|
|
334
|
+
tool_calls_ids = [tool_call["id"] for tool_call in ai_message.tool_calls]
|
|
335
|
+
parts = []
|
|
336
|
+
for i, message in enumerate(tool_messages):
|
|
337
|
+
if not tool_calls_ids:
|
|
338
|
+
break
|
|
339
|
+
if message.tool_call_id in tool_calls_ids:
|
|
340
|
+
# remove the id from the list, so that we do not iterate over it again
|
|
341
|
+
tool_calls_ids.remove(message.tool_call_id)
|
|
342
|
+
part = _convert_tool_message_to_part(message)
|
|
343
|
+
parts.append(part)
|
|
344
|
+
return parts
|
|
345
|
+
|
|
346
|
+
|
|
304
347
|
def _parse_chat_history(
|
|
305
348
|
input_messages: Sequence[BaseMessage], convert_system_message_to_human: bool = False
|
|
306
349
|
) -> Tuple[Optional[Content], List[Content]]:
|
|
@@ -310,14 +353,20 @@ def _parse_chat_history(
|
|
|
310
353
|
warnings.warn("Convert_system_message_to_human will be deprecated!")
|
|
311
354
|
|
|
312
355
|
system_instruction: Optional[Content] = None
|
|
313
|
-
|
|
356
|
+
messages_without_tool_messages = [
|
|
357
|
+
message for message in input_messages if not isinstance(message, ToolMessage)
|
|
358
|
+
]
|
|
359
|
+
tool_messages = [
|
|
360
|
+
message for message in input_messages if isinstance(message, ToolMessage)
|
|
361
|
+
]
|
|
362
|
+
for i, message in enumerate(messages_without_tool_messages):
|
|
314
363
|
if i == 0 and isinstance(message, SystemMessage):
|
|
315
364
|
system_instruction = Content(parts=_convert_to_parts(message.content))
|
|
316
365
|
continue
|
|
317
366
|
elif isinstance(message, AIMessage):
|
|
318
367
|
role = "model"
|
|
319
368
|
if message.tool_calls:
|
|
320
|
-
|
|
369
|
+
ai_message_parts = []
|
|
321
370
|
for tool_call in message.tool_calls:
|
|
322
371
|
function_call = FunctionCall(
|
|
323
372
|
{
|
|
@@ -325,7 +374,13 @@ def _parse_chat_history(
|
|
|
325
374
|
"args": tool_call["args"],
|
|
326
375
|
}
|
|
327
376
|
)
|
|
328
|
-
|
|
377
|
+
ai_message_parts.append(Part(function_call=function_call))
|
|
378
|
+
tool_messages_parts = _get_ai_message_tool_messages_parts(
|
|
379
|
+
tool_messages=tool_messages, ai_message=message
|
|
380
|
+
)
|
|
381
|
+
messages.append(Content(role=role, parts=ai_message_parts))
|
|
382
|
+
messages.append(Content(role="user", parts=tool_messages_parts))
|
|
383
|
+
continue
|
|
329
384
|
elif raw_function_call := message.additional_kwargs.get("function_call"):
|
|
330
385
|
function_call = FunctionCall(
|
|
331
386
|
{
|
|
@@ -344,60 +399,7 @@ def _parse_chat_history(
|
|
|
344
399
|
system_instruction = None
|
|
345
400
|
elif isinstance(message, FunctionMessage):
|
|
346
401
|
role = "user"
|
|
347
|
-
|
|
348
|
-
if not isinstance(message.content, str):
|
|
349
|
-
response = message.content
|
|
350
|
-
else:
|
|
351
|
-
try:
|
|
352
|
-
response = json.loads(message.content)
|
|
353
|
-
except json.JSONDecodeError:
|
|
354
|
-
response = message.content # leave as str representation
|
|
355
|
-
parts = [
|
|
356
|
-
Part(
|
|
357
|
-
function_response=FunctionResponse(
|
|
358
|
-
name=message.name,
|
|
359
|
-
response=(
|
|
360
|
-
{"output": response}
|
|
361
|
-
if not isinstance(response, dict)
|
|
362
|
-
else response
|
|
363
|
-
),
|
|
364
|
-
)
|
|
365
|
-
)
|
|
366
|
-
]
|
|
367
|
-
elif isinstance(message, ToolMessage):
|
|
368
|
-
role = "user"
|
|
369
|
-
prev_message: Optional[BaseMessage] = (
|
|
370
|
-
input_messages[i - 1] if i > 0 else None
|
|
371
|
-
)
|
|
372
|
-
if (
|
|
373
|
-
prev_message
|
|
374
|
-
and isinstance(prev_message, AIMessage)
|
|
375
|
-
and prev_message.tool_calls
|
|
376
|
-
):
|
|
377
|
-
# message.name can be null for ToolMessage
|
|
378
|
-
name: str = prev_message.tool_calls[0]["name"]
|
|
379
|
-
else:
|
|
380
|
-
name = message.name # type: ignore
|
|
381
|
-
tool_response: Any
|
|
382
|
-
if not isinstance(message.content, str):
|
|
383
|
-
tool_response = message.content
|
|
384
|
-
else:
|
|
385
|
-
try:
|
|
386
|
-
tool_response = json.loads(message.content)
|
|
387
|
-
except json.JSONDecodeError:
|
|
388
|
-
tool_response = message.content # leave as str representation
|
|
389
|
-
parts = [
|
|
390
|
-
Part(
|
|
391
|
-
function_response=FunctionResponse(
|
|
392
|
-
name=name,
|
|
393
|
-
response=(
|
|
394
|
-
{"output": tool_response}
|
|
395
|
-
if not isinstance(tool_response, dict)
|
|
396
|
-
else tool_response
|
|
397
|
-
),
|
|
398
|
-
)
|
|
399
|
-
)
|
|
400
|
-
]
|
|
402
|
+
parts = [_convert_tool_message_to_part(message)]
|
|
401
403
|
else:
|
|
402
404
|
raise ValueError(
|
|
403
405
|
f"Unexpected message with type {type(message)} at the position {i}."
|
|
@@ -419,6 +421,9 @@ def _parse_response_candidate(
|
|
|
419
421
|
for part in response_candidate.content.parts:
|
|
420
422
|
try:
|
|
421
423
|
text: Optional[str] = part.text
|
|
424
|
+
# Remove erroneous newline character if present
|
|
425
|
+
if text is not None:
|
|
426
|
+
text = text.rstrip("\n")
|
|
422
427
|
except AttributeError:
|
|
423
428
|
text = None
|
|
424
429
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: langchain-google-genai
|
|
3
|
-
Version: 2.0.
|
|
3
|
+
Version: 2.0.9
|
|
4
4
|
Summary: An integration package connecting Google's genai package and LangChain
|
|
5
5
|
Home-page: https://github.com/langchain-ai/langchain-google
|
|
6
6
|
License: MIT
|
|
@@ -13,7 +13,7 @@ Classifier: Programming Language :: Python :: 3.11
|
|
|
13
13
|
Classifier: Programming Language :: Python :: 3.12
|
|
14
14
|
Requires-Dist: filetype (>=1.2.0,<2.0.0)
|
|
15
15
|
Requires-Dist: google-generativeai (>=0.8.0,<0.9.0)
|
|
16
|
-
Requires-Dist: langchain-core (>=0.3.
|
|
16
|
+
Requires-Dist: langchain-core (>=0.3.27,<0.4.0)
|
|
17
17
|
Requires-Dist: pydantic (>=2,<3)
|
|
18
18
|
Project-URL: Repository, https://github.com/langchain-ai/langchain-google
|
|
19
19
|
Project-URL: Source Code, https://github.com/langchain-ai/langchain-google/tree/main/libs/genai
|
|
@@ -1,16 +1,16 @@
|
|
|
1
1
|
langchain_google_genai/__init__.py,sha256=Oji-S2KYWrku1wyQEskY84IOfY8MfRhujjJ4d7hbsk4,2758
|
|
2
2
|
langchain_google_genai/_common.py,sha256=ASlwE8hEbvOm55BVF_D4rf2nl7RYsnpsi5xbM6DW3Cc,1576
|
|
3
3
|
langchain_google_genai/_enums.py,sha256=KLPmxS1K83K4HjBIXFaXoL_sFEOv8Hq-2B2PDMKyDgo,197
|
|
4
|
-
langchain_google_genai/_function_utils.py,sha256=
|
|
4
|
+
langchain_google_genai/_function_utils.py,sha256=xcUwX2DmGM4UwH7bhBC6W9E5oVAE8k_l9lMYzJUJwA0,17433
|
|
5
5
|
langchain_google_genai/_genai_extension.py,sha256=81a4ly5ZHlqMf37uJfdB8K41qE6J5ujLnbUypIfFf2o,20775
|
|
6
6
|
langchain_google_genai/_image_utils.py,sha256=tPrQyMvVmO8xkuow1SvA91omxUEv9ZUy1EMHNGjMAKY,5202
|
|
7
|
-
langchain_google_genai/chat_models.py,sha256=
|
|
7
|
+
langchain_google_genai/chat_models.py,sha256=DpTIfMdO_HvuM7s6T3BMuVLfVH0n_ndAq-eiGHKncug,54547
|
|
8
8
|
langchain_google_genai/embeddings.py,sha256=jQRWPXD9twXoVBlXJQG7Duz0fb8UC0kgRzzwAmW3Dic,10146
|
|
9
9
|
langchain_google_genai/genai_aqa.py,sha256=qB6h3-BSXqe0YLR3eeVllYzmNKK6ofI6xJLdBahUVZo,4300
|
|
10
10
|
langchain_google_genai/google_vector_store.py,sha256=4wvhIiOmc3Fo046FyafPmT9NBCLek-9bgluvuTfrbpQ,16148
|
|
11
11
|
langchain_google_genai/llms.py,sha256=EPUgkz5aqKOyKbztT7br8w60Uo5D_X_bF5qP-zd6iLs,14593
|
|
12
12
|
langchain_google_genai/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
13
|
-
langchain_google_genai-2.0.
|
|
14
|
-
langchain_google_genai-2.0.
|
|
15
|
-
langchain_google_genai-2.0.
|
|
16
|
-
langchain_google_genai-2.0.
|
|
13
|
+
langchain_google_genai-2.0.9.dist-info/LICENSE,sha256=DppmdYJVSc1jd0aio6ptnMUn5tIHrdAhQ12SclEBfBg,1072
|
|
14
|
+
langchain_google_genai-2.0.9.dist-info/METADATA,sha256=lYcZ7V1qdeG84ekvofzoFACmjjMolZ-Hevgigsdw7dQ,3594
|
|
15
|
+
langchain_google_genai-2.0.9.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
|
|
16
|
+
langchain_google_genai-2.0.9.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|