langchain-nomic 0.1.0__tar.gz → 0.1.2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {langchain_nomic-0.1.0 → langchain_nomic-0.1.2}/PKG-INFO +3 -2
- {langchain_nomic-0.1.0 → langchain_nomic-0.1.2}/langchain_nomic/__init__.py +1 -3
- langchain_nomic-0.1.2/langchain_nomic/embeddings.py +134 -0
- {langchain_nomic-0.1.0 → langchain_nomic-0.1.2}/pyproject.toml +3 -2
- langchain_nomic-0.1.0/langchain_nomic/embeddings.py +0 -77
- {langchain_nomic-0.1.0 → langchain_nomic-0.1.2}/LICENSE +0 -0
- {langchain_nomic-0.1.0 → langchain_nomic-0.1.2}/README.md +0 -0
- {langchain_nomic-0.1.0 → langchain_nomic-0.1.2}/langchain_nomic/py.typed +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: langchain-nomic
|
3
|
-
Version: 0.1.
|
3
|
+
Version: 0.1.2
|
4
4
|
Summary: An integration package connecting Nomic and LangChain
|
5
5
|
Home-page: https://github.com/langchain-ai/langchain
|
6
6
|
License: MIT
|
@@ -12,7 +12,8 @@ Classifier: Programming Language :: Python :: 3.10
|
|
12
12
|
Classifier: Programming Language :: Python :: 3.11
|
13
13
|
Classifier: Programming Language :: Python :: 3.12
|
14
14
|
Requires-Dist: langchain-core (>=0.1.46,<0.3)
|
15
|
-
Requires-Dist: nomic (>=3.0.
|
15
|
+
Requires-Dist: nomic (>=3.0.29,<4.0.0)
|
16
|
+
Requires-Dist: pillow (>=10.3.0,<11.0.0)
|
16
17
|
Project-URL: Repository, https://github.com/langchain-ai/langchain
|
17
18
|
Project-URL: Source Code, https://github.com/langchain-ai/langchain/tree/master/libs/partners/nomic
|
18
19
|
Description-Content-Type: text/markdown
|
@@ -0,0 +1,134 @@
|
|
1
|
+
import os
|
2
|
+
from typing import List, Literal, Optional, overload
|
3
|
+
|
4
|
+
import nomic # type: ignore[import]
|
5
|
+
from langchain_core.embeddings import Embeddings
|
6
|
+
from nomic import embed
|
7
|
+
|
8
|
+
|
9
|
+
class NomicEmbeddings(Embeddings):
|
10
|
+
"""NomicEmbeddings embedding model.
|
11
|
+
|
12
|
+
Example:
|
13
|
+
.. code-block:: python
|
14
|
+
|
15
|
+
from langchain_nomic import NomicEmbeddings
|
16
|
+
|
17
|
+
model = NomicEmbeddings()
|
18
|
+
"""
|
19
|
+
|
20
|
+
@overload
|
21
|
+
def __init__(
|
22
|
+
self,
|
23
|
+
*,
|
24
|
+
model: str,
|
25
|
+
nomic_api_key: Optional[str] = ...,
|
26
|
+
dimensionality: Optional[int] = ...,
|
27
|
+
inference_mode: Literal["remote"] = ...,
|
28
|
+
):
|
29
|
+
...
|
30
|
+
|
31
|
+
@overload
|
32
|
+
def __init__(
|
33
|
+
self,
|
34
|
+
*,
|
35
|
+
model: str,
|
36
|
+
nomic_api_key: Optional[str] = ...,
|
37
|
+
dimensionality: Optional[int] = ...,
|
38
|
+
inference_mode: Literal["local", "dynamic"],
|
39
|
+
device: Optional[str] = ...,
|
40
|
+
):
|
41
|
+
...
|
42
|
+
|
43
|
+
@overload
|
44
|
+
def __init__(
|
45
|
+
self,
|
46
|
+
*,
|
47
|
+
model: str,
|
48
|
+
nomic_api_key: Optional[str] = ...,
|
49
|
+
dimensionality: Optional[int] = ...,
|
50
|
+
inference_mode: str,
|
51
|
+
device: Optional[str] = ...,
|
52
|
+
):
|
53
|
+
...
|
54
|
+
|
55
|
+
def __init__(
|
56
|
+
self,
|
57
|
+
*,
|
58
|
+
model: str,
|
59
|
+
nomic_api_key: Optional[str] = None,
|
60
|
+
dimensionality: Optional[int] = None,
|
61
|
+
inference_mode: str = "remote",
|
62
|
+
device: Optional[str] = None,
|
63
|
+
vision_model: Optional[str] = None,
|
64
|
+
):
|
65
|
+
"""Initialize NomicEmbeddings model.
|
66
|
+
|
67
|
+
Args:
|
68
|
+
model: model name
|
69
|
+
nomic_api_key: optionally, set the Nomic API key. Uses the NOMIC_API_KEY
|
70
|
+
environment variable by default.
|
71
|
+
dimensionality: The embedding dimension, for use with Matryoshka-capable
|
72
|
+
models. Defaults to full-size.
|
73
|
+
inference_mode: How to generate embeddings. One of `remote`, `local`
|
74
|
+
(Embed4All), or `dynamic` (automatic). Defaults to `remote`.
|
75
|
+
device: The device to use for local embeddings. Choices include
|
76
|
+
`cpu`, `gpu`, `nvidia`, `amd`, or a specific device name. See
|
77
|
+
the docstring for `GPT4All.__init__` for more info. Typically
|
78
|
+
defaults to CPU. Do not use on macOS.
|
79
|
+
"""
|
80
|
+
_api_key = nomic_api_key or os.environ.get("NOMIC_API_KEY")
|
81
|
+
if _api_key:
|
82
|
+
nomic.login(_api_key)
|
83
|
+
self.model = model
|
84
|
+
self.dimensionality = dimensionality
|
85
|
+
self.inference_mode = inference_mode
|
86
|
+
self.device = device
|
87
|
+
self.vision_model = vision_model
|
88
|
+
|
89
|
+
def embed(self, texts: List[str], *, task_type: str) -> List[List[float]]:
|
90
|
+
"""Embed texts.
|
91
|
+
|
92
|
+
Args:
|
93
|
+
texts: list of texts to embed
|
94
|
+
task_type: the task type to use when embedding. One of `search_query`,
|
95
|
+
`search_document`, `classification`, `clustering`
|
96
|
+
"""
|
97
|
+
|
98
|
+
output = embed.text(
|
99
|
+
texts=texts,
|
100
|
+
model=self.model,
|
101
|
+
task_type=task_type,
|
102
|
+
dimensionality=self.dimensionality,
|
103
|
+
inference_mode=self.inference_mode,
|
104
|
+
device=self.device,
|
105
|
+
)
|
106
|
+
return output["embeddings"]
|
107
|
+
|
108
|
+
def embed_documents(self, texts: List[str]) -> List[List[float]]:
|
109
|
+
"""Embed search docs.
|
110
|
+
|
111
|
+
Args:
|
112
|
+
texts: list of texts to embed as documents
|
113
|
+
"""
|
114
|
+
return self.embed(
|
115
|
+
texts=texts,
|
116
|
+
task_type="search_document",
|
117
|
+
)
|
118
|
+
|
119
|
+
def embed_query(self, text: str) -> List[float]:
|
120
|
+
"""Embed query text.
|
121
|
+
|
122
|
+
Args:
|
123
|
+
text: query text
|
124
|
+
"""
|
125
|
+
return self.embed(
|
126
|
+
texts=[text],
|
127
|
+
task_type="search_query",
|
128
|
+
)[0]
|
129
|
+
|
130
|
+
def embed_image(self, uris: List[str]) -> List[List[float]]:
|
131
|
+
return embed.image(
|
132
|
+
images=uris,
|
133
|
+
model=self.vision_model,
|
134
|
+
)["embeddings"]
|
@@ -1,6 +1,6 @@
|
|
1
1
|
[tool.poetry]
|
2
2
|
name = "langchain-nomic"
|
3
|
-
version = "0.1.
|
3
|
+
version = "0.1.2"
|
4
4
|
description = "An integration package connecting Nomic and LangChain"
|
5
5
|
authors = []
|
6
6
|
readme = "README.md"
|
@@ -13,7 +13,8 @@ license = "MIT"
|
|
13
13
|
[tool.poetry.dependencies]
|
14
14
|
python = ">=3.8.1,<4.0"
|
15
15
|
langchain-core = ">=0.1.46,<0.3"
|
16
|
-
nomic = "^3.0.
|
16
|
+
nomic = "^3.0.29"
|
17
|
+
pillow = "^10.3.0"
|
17
18
|
|
18
19
|
[tool.poetry.group.test]
|
19
20
|
optional = true
|
@@ -1,77 +0,0 @@
|
|
1
|
-
import os
|
2
|
-
from typing import List, Optional
|
3
|
-
|
4
|
-
import nomic # type: ignore
|
5
|
-
from langchain_core.embeddings import Embeddings
|
6
|
-
from nomic import embed # type: ignore
|
7
|
-
|
8
|
-
|
9
|
-
class NomicEmbeddings(Embeddings):
|
10
|
-
"""NomicEmbeddings embedding model.
|
11
|
-
|
12
|
-
Example:
|
13
|
-
.. code-block:: python
|
14
|
-
|
15
|
-
from langchain_nomic import NomicEmbeddings
|
16
|
-
|
17
|
-
model = NomicEmbeddings()
|
18
|
-
"""
|
19
|
-
|
20
|
-
def __init__(
|
21
|
-
self,
|
22
|
-
*,
|
23
|
-
model: str,
|
24
|
-
nomic_api_key: Optional[str] = None,
|
25
|
-
dimensionality: Optional[int] = None,
|
26
|
-
):
|
27
|
-
"""Initialize NomicEmbeddings model.
|
28
|
-
|
29
|
-
Args:
|
30
|
-
model: model name
|
31
|
-
nomic_api_key: optionally, set the Nomic API key. Uses the NOMIC_API_KEY
|
32
|
-
environment variable by default.
|
33
|
-
"""
|
34
|
-
_api_key = nomic_api_key or os.environ.get("NOMIC_API_KEY")
|
35
|
-
if _api_key:
|
36
|
-
nomic.login(_api_key)
|
37
|
-
self.model = model
|
38
|
-
self.dimensionality = dimensionality
|
39
|
-
|
40
|
-
def embed(self, texts: List[str], *, task_type: str) -> List[List[float]]:
|
41
|
-
"""Embed texts.
|
42
|
-
|
43
|
-
Args:
|
44
|
-
texts: list of texts to embed
|
45
|
-
task_type: the task type to use when embedding. One of `search_query`,
|
46
|
-
`search_document`, `classification`, `clustering`
|
47
|
-
"""
|
48
|
-
|
49
|
-
output = embed.text(
|
50
|
-
texts=texts,
|
51
|
-
model=self.model,
|
52
|
-
task_type=task_type,
|
53
|
-
dimensionality=self.dimensionality,
|
54
|
-
)
|
55
|
-
return output["embeddings"]
|
56
|
-
|
57
|
-
def embed_documents(self, texts: List[str]) -> List[List[float]]:
|
58
|
-
"""Embed search docs.
|
59
|
-
|
60
|
-
Args:
|
61
|
-
texts: list of texts to embed as documents
|
62
|
-
"""
|
63
|
-
return self.embed(
|
64
|
-
texts=texts,
|
65
|
-
task_type="search_document",
|
66
|
-
)
|
67
|
-
|
68
|
-
def embed_query(self, text: str) -> List[float]:
|
69
|
-
"""Embed query text.
|
70
|
-
|
71
|
-
Args:
|
72
|
-
text: query text
|
73
|
-
"""
|
74
|
-
return self.embed(
|
75
|
-
texts=[text],
|
76
|
-
task_type="search_query",
|
77
|
-
)[0]
|
File without changes
|
File without changes
|
File without changes
|