langchain-b12 0.1.4__py3-none-any.whl → 0.1.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- langchain_b12/citations/citations.py +21 -13
- langchain_b12/genai/genai.py +22 -1
- {langchain_b12-0.1.4.dist-info → langchain_b12-0.1.6.dist-info}/METADATA +1 -1
- langchain_b12-0.1.6.dist-info/RECORD +9 -0
- langchain_b12-0.1.4.dist-info/RECORD +0 -9
- {langchain_b12-0.1.4.dist-info → langchain_b12-0.1.6.dist-info}/WHEEL +0 -0
|
@@ -3,7 +3,6 @@ from collections.abc import Sequence
|
|
|
3
3
|
from typing import Any, Literal, TypedDict
|
|
4
4
|
from uuid import UUID
|
|
5
5
|
|
|
6
|
-
from fuzzysearch import find_near_matches
|
|
7
6
|
from langchain_core.callbacks import Callbacks
|
|
8
7
|
from langchain_core.language_models import BaseChatModel
|
|
9
8
|
from langchain_core.messages import AIMessage, BaseMessage, SystemMessage
|
|
@@ -155,6 +154,8 @@ def validate_citations(
|
|
|
155
154
|
sentences: list[str],
|
|
156
155
|
) -> list[tuple[Citation, Match | None]]:
|
|
157
156
|
"""Validate the citations. Invalid citations are dropped."""
|
|
157
|
+
from fuzzysearch import find_near_matches
|
|
158
|
+
|
|
158
159
|
n_sentences = len(sentences)
|
|
159
160
|
|
|
160
161
|
all_text = "\n".join(
|
|
@@ -244,18 +245,25 @@ def create_citation_model(
|
|
|
244
245
|
e.g. `<context key="abc">Today is a sunny day</context>`.
|
|
245
246
|
The returned AIMessage will have the following structure:
|
|
246
247
|
AIMessage(
|
|
247
|
-
content=
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
248
|
+
content=[
|
|
249
|
+
{
|
|
250
|
+
"citations": [
|
|
251
|
+
{
|
|
252
|
+
"cited_text": "the color of the grass is green",
|
|
253
|
+
"generated_cited_text": "the color of the grass is green",
|
|
254
|
+
"key": "abc",
|
|
255
|
+
"dist": 0,
|
|
256
|
+
}
|
|
257
|
+
],
|
|
258
|
+
"text": "The grass is green",
|
|
259
|
+
"type": "text",
|
|
260
|
+
},
|
|
261
|
+
{
|
|
262
|
+
"citations": None,
|
|
263
|
+
"text": "Is there anything else I can help you with?",
|
|
264
|
+
"type": "text",
|
|
265
|
+
}
|
|
266
|
+
]
|
|
259
267
|
)
|
|
260
268
|
|
|
261
269
|
Args:
|
langchain_b12/genai/genai.py
CHANGED
|
@@ -90,6 +90,8 @@ class ChatGenAI(BaseChatModel):
|
|
|
90
90
|
HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT: HarmBlockThreshold.BLOCK_NONE,
|
|
91
91
|
}
|
|
92
92
|
""" # noqa: E501
|
|
93
|
+
thinking_config: types.ThinkingConfig | None = None
|
|
94
|
+
"The thinking configuration to use for the model."
|
|
93
95
|
|
|
94
96
|
model_config = ConfigDict(
|
|
95
97
|
arbitrary_types_allowed=True,
|
|
@@ -208,6 +210,10 @@ class ChatGenAI(BaseChatModel):
|
|
|
208
210
|
candidate_count=self.n,
|
|
209
211
|
stop_sequences=stop or self.stop,
|
|
210
212
|
safety_settings=self.safety_settings,
|
|
213
|
+
thinking_config=self.thinking_config,
|
|
214
|
+
automatic_function_calling=types.AutomaticFunctionCallingConfig(
|
|
215
|
+
disable=True,
|
|
216
|
+
),
|
|
211
217
|
**kwargs,
|
|
212
218
|
),
|
|
213
219
|
)
|
|
@@ -240,6 +246,10 @@ class ChatGenAI(BaseChatModel):
|
|
|
240
246
|
candidate_count=self.n,
|
|
241
247
|
stop_sequences=stop or self.stop,
|
|
242
248
|
safety_settings=self.safety_settings,
|
|
249
|
+
thinking_config=self.thinking_config,
|
|
250
|
+
automatic_function_calling=types.AutomaticFunctionCallingConfig(
|
|
251
|
+
disable=True,
|
|
252
|
+
),
|
|
243
253
|
**kwargs,
|
|
244
254
|
),
|
|
245
255
|
)
|
|
@@ -362,6 +372,12 @@ class ChatGenAI(BaseChatModel):
|
|
|
362
372
|
input_tokens=usage_metadata.prompt_token_count or 0,
|
|
363
373
|
output_tokens=usage_metadata.candidates_token_count or 0,
|
|
364
374
|
total_tokens=usage_metadata.total_token_count or 0,
|
|
375
|
+
input_token_details={
|
|
376
|
+
"cache_read": usage_metadata.cached_content_token_count or 0
|
|
377
|
+
},
|
|
378
|
+
output_token_details={
|
|
379
|
+
"reasoning": usage_metadata.thoughts_token_count or 0
|
|
380
|
+
},
|
|
365
381
|
)
|
|
366
382
|
|
|
367
383
|
total_lc_usage: UsageMetadata | None = (
|
|
@@ -392,7 +408,12 @@ class ChatGenAI(BaseChatModel):
|
|
|
392
408
|
"finish_reason": top_candidate.finish_reason,
|
|
393
409
|
"finish_message": top_candidate.finish_message,
|
|
394
410
|
}
|
|
395
|
-
|
|
411
|
+
try:
|
|
412
|
+
message = parse_response_candidate(top_candidate)
|
|
413
|
+
except Exception as e:
|
|
414
|
+
raise ValueError(
|
|
415
|
+
f"Failed to parse model response: {top_candidate.finish_message}"
|
|
416
|
+
) from e
|
|
396
417
|
if lc_usage:
|
|
397
418
|
message.usage_metadata = lc_usage
|
|
398
419
|
# add model name if final chunk
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
langchain_b12/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
|
+
langchain_b12/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
3
|
+
langchain_b12/citations/citations.py,sha256=ZQvYayjQXIUaRosJ0qwL3Nc7kC8sBzmaIkE-BOslaVI,12261
|
|
4
|
+
langchain_b12/genai/embeddings.py,sha256=od2bVIgt7v9aNAHG0PVypVF1H_XgHto2nTd8vwfvyN8,3355
|
|
5
|
+
langchain_b12/genai/genai.py,sha256=r7v_Z97N_Vd0zIR5mcQrlY3eWCPWhThvOvnXg59Ls8c,16868
|
|
6
|
+
langchain_b12/genai/genai_utils.py,sha256=tA6UiJURK25-11vtaX4768UV47jDCYwVKIIWydD4Egw,10736
|
|
7
|
+
langchain_b12-0.1.6.dist-info/METADATA,sha256=c2_hq-9spfRCb1nNRT6ztrCf1s1KNl8lkg-tLiB0mho,1204
|
|
8
|
+
langchain_b12-0.1.6.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
9
|
+
langchain_b12-0.1.6.dist-info/RECORD,,
|
|
@@ -1,9 +0,0 @@
|
|
|
1
|
-
langchain_b12/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
|
-
langchain_b12/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
3
|
-
langchain_b12/citations/citations.py,sha256=FO9ErybQws082JvV-MtTj81fVzdUWxrALcS81ElRsMw,12023
|
|
4
|
-
langchain_b12/genai/embeddings.py,sha256=od2bVIgt7v9aNAHG0PVypVF1H_XgHto2nTd8vwfvyN8,3355
|
|
5
|
-
langchain_b12/genai/genai.py,sha256=gzkgtvs3wNjcslS_KFZYCajUZIsJkVN2Tq2Q1RMIPyc,15910
|
|
6
|
-
langchain_b12/genai/genai_utils.py,sha256=tA6UiJURK25-11vtaX4768UV47jDCYwVKIIWydD4Egw,10736
|
|
7
|
-
langchain_b12-0.1.4.dist-info/METADATA,sha256=x659l7J9-4XSfjgZgGvR-cVoiCtTqq7cIaujV9JsTrE,1204
|
|
8
|
-
langchain_b12-0.1.4.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
9
|
-
langchain_b12-0.1.4.dist-info/RECORD,,
|
|
File without changes
|