langchain-b12 0.1.6__tar.gz → 0.1.7__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: langchain-b12
3
- Version: 0.1.6
3
+ Version: 0.1.7
4
4
  Summary: A reusable collection of tools and implementations for Langchain
5
5
  Author-email: Vincent Min <vincent.min@b12-consulting.com>
6
6
  Requires-Python: >=3.11
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "langchain-b12"
3
- version = "0.1.6"
3
+ version = "0.1.7"
4
4
  description = "A reusable collection of tools and implementations for Langchain"
5
5
  readme = "README.md"
6
6
  authors = [
@@ -1,6 +1,7 @@
1
1
  import os
2
2
 
3
3
  from google.genai import Client
4
+ from google.genai.types import EmbedContentConfigOrDict
4
5
  from google.oauth2 import service_account
5
6
  from langchain_core.embeddings import Embeddings
6
7
  from pydantic import BaseModel, ConfigDict, Field
@@ -19,6 +20,7 @@ class GenAIEmbeddings(Embeddings, BaseModel):
19
20
  ),
20
21
  exclude=True,
21
22
  )
23
+ embed_content_config: EmbedContentConfigOrDict | None = Field(default=None)
22
24
  model_config = ConfigDict(
23
25
  arbitrary_types_allowed=True,
24
26
  )
@@ -33,19 +35,17 @@ class GenAIEmbeddings(Embeddings, BaseModel):
33
35
  list[list[float]]: The embedding vectors.
34
36
  """
35
37
  embeddings = []
36
- for text in texts:
37
- response = self.client.models.embed_content(
38
- model=self.model_name,
39
- contents=[text],
40
- )
38
+ response = self.client.models.embed_content(
39
+ model=self.model_name,
40
+ contents=texts,
41
+ config=self.embed_content_config,
42
+ )
43
+ assert response.embeddings is not None, "No embeddings found in the response."
44
+ for embedding in response.embeddings:
41
45
  assert (
42
- response.embeddings is not None
43
- ), "No embeddings found in the response."
44
- for embedding in response.embeddings:
45
- assert (
46
- embedding.values is not None
47
- ), "No embedding values found in the response."
48
- embeddings.append(embedding.values)
46
+ embedding.values is not None
47
+ ), "No embedding values found in the response."
48
+ embeddings.append(embedding.values)
49
49
  assert len(embeddings) == len(
50
50
  texts
51
51
  ), "The number of embeddings does not match the number of texts."
@@ -75,6 +75,7 @@ class GenAIEmbeddings(Embeddings, BaseModel):
75
75
  response = await self.client.aio.models.embed_content(
76
76
  model=self.model_name,
77
77
  contents=texts,
78
+ config=self.embed_content_config,
78
79
  )
79
80
  assert response.embeddings is not None, "No embeddings found in the response."
80
81
  for embedding in response.embeddings:
@@ -419,6 +419,7 @@ class ChatGenAI(BaseChatModel):
419
419
  # add model name if final chunk
420
420
  if top_candidate.finish_reason is not None:
421
421
  message.response_metadata["model_name"] = self.model_name
422
+ message.response_metadata["tags"] = self.tags or []
422
423
 
423
424
  return (
424
425
  ChatGenerationChunk(
@@ -295,7 +295,7 @@ wheels = [
295
295
 
296
296
  [[package]]
297
297
  name = "langchain-b12"
298
- version = "0.1.4"
298
+ version = "0.1.6"
299
299
  source = { editable = "." }
300
300
  dependencies = [
301
301
  { name = "langchain-core" },
File without changes
File without changes
File without changes