langchain-ollama 0.1.2__py3-none-any.whl → 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- langchain_ollama/chat_models.py +14 -12
- langchain_ollama/embeddings.py +18 -14
- langchain_ollama/llms.py +10 -10
- {langchain_ollama-0.1.2.dist-info → langchain_ollama-0.2.0.dist-info}/METADATA +3 -3
- langchain_ollama-0.2.0.dist-info/RECORD +9 -0
- langchain_ollama-0.1.2.dist-info/RECORD +0 -9
- {langchain_ollama-0.1.2.dist-info → langchain_ollama-0.2.0.dist-info}/LICENSE +0 -0
- {langchain_ollama-0.1.2.dist-info → langchain_ollama-0.2.0.dist-info}/WHEEL +0 -0
langchain_ollama/chat_models.py
CHANGED
@@ -35,11 +35,12 @@ from langchain_core.messages import (
|
|
35
35
|
from langchain_core.messages.ai import UsageMetadata
|
36
36
|
from langchain_core.messages.tool import tool_call
|
37
37
|
from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult
|
38
|
-
from langchain_core.pydantic_v1 import Field, root_validator
|
39
38
|
from langchain_core.runnables import Runnable
|
40
39
|
from langchain_core.tools import BaseTool
|
41
40
|
from langchain_core.utils.function_calling import convert_to_openai_tool
|
42
41
|
from ollama import AsyncClient, Client, Message, Options
|
42
|
+
from pydantic import PrivateAttr, model_validator
|
43
|
+
from typing_extensions import Self
|
43
44
|
|
44
45
|
|
45
46
|
def _get_usage_metadata_from_generation_info(
|
@@ -91,7 +92,9 @@ def _lc_tool_call_to_openai_tool_call(tool_call: ToolCall) -> dict:
|
|
91
92
|
class ChatOllama(BaseChatModel):
|
92
93
|
"""Ollama chat model integration.
|
93
94
|
|
94
|
-
Setup
|
95
|
+
.. dropdown:: Setup
|
96
|
+
:open:
|
97
|
+
|
95
98
|
Install ``langchain-ollama`` and download any models you want to use from ollama.
|
96
99
|
|
97
100
|
.. code-block:: bash
|
@@ -225,7 +228,7 @@ class ChatOllama(BaseChatModel):
|
|
225
228
|
.. code-block:: python
|
226
229
|
|
227
230
|
from langchain_ollama import ChatOllama
|
228
|
-
from
|
231
|
+
from pydantic import BaseModel, Field
|
229
232
|
|
230
233
|
class Multiply(BaseModel):
|
231
234
|
a: int = Field(..., description="First integer")
|
@@ -328,12 +331,12 @@ class ChatOllama(BaseChatModel):
|
|
328
331
|
For a full list of the params, see [this link](https://pydoc.dev/httpx/latest/httpx.Client.html)
|
329
332
|
"""
|
330
333
|
|
331
|
-
_client: Client =
|
334
|
+
_client: Client = PrivateAttr(default=None)
|
332
335
|
"""
|
333
336
|
The client to use for making requests.
|
334
337
|
"""
|
335
338
|
|
336
|
-
_async_client: AsyncClient =
|
339
|
+
_async_client: AsyncClient = PrivateAttr(default=None)
|
337
340
|
"""
|
338
341
|
The async client to use for making requests.
|
339
342
|
"""
|
@@ -364,14 +367,13 @@ class ChatOllama(BaseChatModel):
|
|
364
367
|
"keep_alive": self.keep_alive,
|
365
368
|
}
|
366
369
|
|
367
|
-
@
|
368
|
-
def _set_clients(
|
370
|
+
@model_validator(mode="after")
|
371
|
+
def _set_clients(self) -> Self:
|
369
372
|
"""Set clients to use for ollama."""
|
370
|
-
|
371
|
-
|
372
|
-
|
373
|
-
|
374
|
-
return values
|
373
|
+
client_kwargs = self.client_kwargs or {}
|
374
|
+
self._client = Client(host=self.base_url, **client_kwargs)
|
375
|
+
self._async_client = AsyncClient(host=self.base_url, **client_kwargs)
|
376
|
+
return self
|
375
377
|
|
376
378
|
def _convert_messages_to_ollama_messages(
|
377
379
|
self, messages: List[BaseMessage]
|
langchain_ollama/embeddings.py
CHANGED
@@ -4,8 +4,14 @@ from typing import (
|
|
4
4
|
)
|
5
5
|
|
6
6
|
from langchain_core.embeddings import Embeddings
|
7
|
-
from langchain_core.pydantic_v1 import BaseModel, Field, root_validator
|
8
7
|
from ollama import AsyncClient, Client
|
8
|
+
from pydantic import (
|
9
|
+
BaseModel,
|
10
|
+
ConfigDict,
|
11
|
+
PrivateAttr,
|
12
|
+
model_validator,
|
13
|
+
)
|
14
|
+
from typing_extensions import Self
|
9
15
|
|
10
16
|
|
11
17
|
class OllamaEmbeddings(BaseModel, Embeddings):
|
@@ -126,29 +132,27 @@ class OllamaEmbeddings(BaseModel, Embeddings):
|
|
126
132
|
For a full list of the params, see [this link](https://pydoc.dev/httpx/latest/httpx.Client.html)
|
127
133
|
"""
|
128
134
|
|
129
|
-
_client: Client =
|
135
|
+
_client: Client = PrivateAttr(default=None)
|
130
136
|
"""
|
131
137
|
The client to use for making requests.
|
132
138
|
"""
|
133
139
|
|
134
|
-
_async_client: AsyncClient =
|
140
|
+
_async_client: AsyncClient = PrivateAttr(default=None)
|
135
141
|
"""
|
136
142
|
The async client to use for making requests.
|
137
143
|
"""
|
138
144
|
|
139
|
-
|
140
|
-
""
|
141
|
-
|
142
|
-
extra = "forbid"
|
145
|
+
model_config = ConfigDict(
|
146
|
+
extra="forbid",
|
147
|
+
)
|
143
148
|
|
144
|
-
@
|
145
|
-
def _set_clients(
|
149
|
+
@model_validator(mode="after")
|
150
|
+
def _set_clients(self) -> Self:
|
146
151
|
"""Set clients to use for ollama."""
|
147
|
-
|
148
|
-
|
149
|
-
|
150
|
-
|
151
|
-
return values
|
152
|
+
client_kwargs = self.client_kwargs or {}
|
153
|
+
self._client = Client(host=self.base_url, **client_kwargs)
|
154
|
+
self._async_client = AsyncClient(host=self.base_url, **client_kwargs)
|
155
|
+
return self
|
152
156
|
|
153
157
|
def embed_documents(self, texts: List[str]) -> List[List[float]]:
|
154
158
|
"""Embed search docs."""
|
langchain_ollama/llms.py
CHANGED
@@ -18,8 +18,9 @@ from langchain_core.callbacks import (
|
|
18
18
|
)
|
19
19
|
from langchain_core.language_models import BaseLLM, LangSmithParams
|
20
20
|
from langchain_core.outputs import GenerationChunk, LLMResult
|
21
|
-
from langchain_core.pydantic_v1 import Field, root_validator
|
22
21
|
from ollama import AsyncClient, Client, Options
|
22
|
+
from pydantic import PrivateAttr, model_validator
|
23
|
+
from typing_extensions import Self
|
23
24
|
|
24
25
|
|
25
26
|
class OllamaLLM(BaseLLM):
|
@@ -115,12 +116,12 @@ class OllamaLLM(BaseLLM):
|
|
115
116
|
For a full list of the params, see [this link](https://pydoc.dev/httpx/latest/httpx.Client.html)
|
116
117
|
"""
|
117
118
|
|
118
|
-
_client: Client =
|
119
|
+
_client: Client = PrivateAttr(default=None)
|
119
120
|
"""
|
120
121
|
The client to use for making requests.
|
121
122
|
"""
|
122
123
|
|
123
|
-
_async_client: AsyncClient =
|
124
|
+
_async_client: AsyncClient = PrivateAttr(default=None)
|
124
125
|
"""
|
125
126
|
The async client to use for making requests.
|
126
127
|
"""
|
@@ -164,14 +165,13 @@ class OllamaLLM(BaseLLM):
|
|
164
165
|
params["ls_max_tokens"] = max_tokens
|
165
166
|
return params
|
166
167
|
|
167
|
-
@
|
168
|
-
def _set_clients(
|
168
|
+
@model_validator(mode="after")
|
169
|
+
def _set_clients(self) -> Self:
|
169
170
|
"""Set clients to use for ollama."""
|
170
|
-
|
171
|
-
|
172
|
-
|
173
|
-
|
174
|
-
return values
|
171
|
+
client_kwargs = self.client_kwargs or {}
|
172
|
+
self._client = Client(host=self.base_url, **client_kwargs)
|
173
|
+
self._async_client = AsyncClient(host=self.base_url, **client_kwargs)
|
174
|
+
return self
|
175
175
|
|
176
176
|
async def _acreate_generate_stream(
|
177
177
|
self,
|
@@ -1,17 +1,17 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: langchain-ollama
|
3
|
-
Version: 0.
|
3
|
+
Version: 0.2.0
|
4
4
|
Summary: An integration package connecting Ollama and LangChain
|
5
5
|
Home-page: https://github.com/langchain-ai/langchain
|
6
6
|
License: MIT
|
7
|
-
Requires-Python: >=3.
|
7
|
+
Requires-Python: >=3.9,<4.0
|
8
8
|
Classifier: License :: OSI Approved :: MIT License
|
9
9
|
Classifier: Programming Language :: Python :: 3
|
10
10
|
Classifier: Programming Language :: Python :: 3.9
|
11
11
|
Classifier: Programming Language :: Python :: 3.10
|
12
12
|
Classifier: Programming Language :: Python :: 3.11
|
13
13
|
Classifier: Programming Language :: Python :: 3.12
|
14
|
-
Requires-Dist: langchain-core (
|
14
|
+
Requires-Dist: langchain-core (>=0.3.0,<0.4.0)
|
15
15
|
Requires-Dist: ollama (>=0.3.0,<1)
|
16
16
|
Project-URL: Repository, https://github.com/langchain-ai/langchain
|
17
17
|
Project-URL: Release Notes, https://github.com/langchain-ai/langchain/releases?q=tag%3A%22langchain-ollama%3D%3D0%22&expanded=true
|
@@ -0,0 +1,9 @@
|
|
1
|
+
langchain_ollama/__init__.py,sha256=HhQZqbCjhrbr2dC_9Dkw12pg4HPjnDXUoInROMNJKqA,518
|
2
|
+
langchain_ollama/chat_models.py,sha256=lfpm1D4YM_VjGegHq6JJws9nIzIp-QtX57VZvT8GC4I,30452
|
3
|
+
langchain_ollama/embeddings.py,sha256=46gmGxzK5Cm0GYesTSSgWupJYmJ2ywN7FQUAl0fzpxE,4991
|
4
|
+
langchain_ollama/llms.py,sha256=uwQfKwDHXhWWVSAFzHpuv8SirBwKp0H4irnA8lqU0M4,13259
|
5
|
+
langchain_ollama/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
6
|
+
langchain_ollama-0.2.0.dist-info/LICENSE,sha256=2btS8uNUDWD_UNjw9ba6ZJt_00aUjEw9CGyK-xIHY8c,1072
|
7
|
+
langchain_ollama-0.2.0.dist-info/METADATA,sha256=slqxbRBWofN8p4ewKoKh7hljZqey_qhjr3zrYwDgD0g,1824
|
8
|
+
langchain_ollama-0.2.0.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
|
9
|
+
langchain_ollama-0.2.0.dist-info/RECORD,,
|
@@ -1,9 +0,0 @@
|
|
1
|
-
langchain_ollama/__init__.py,sha256=HhQZqbCjhrbr2dC_9Dkw12pg4HPjnDXUoInROMNJKqA,518
|
2
|
-
langchain_ollama/chat_models.py,sha256=M2q6WK8tvFN9Q_CVH_cAYvNbhx7HNtN2PIw5255cpxs,30451
|
3
|
-
langchain_ollama/embeddings.py,sha256=Sq30Q2lWJxc5BT1TLqDgrH7AW3tEezhGAJw_5dkr_8s,5005
|
4
|
-
langchain_ollama/llms.py,sha256=FObzYNIQJoE5kQY_sh12CGj8AQXiMkfCLqrzsM8VNE0,13269
|
5
|
-
langchain_ollama/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
6
|
-
langchain_ollama-0.1.2.dist-info/LICENSE,sha256=2btS8uNUDWD_UNjw9ba6ZJt_00aUjEw9CGyK-xIHY8c,1072
|
7
|
-
langchain_ollama-0.1.2.dist-info/METADATA,sha256=hzrdbivZX2tDz7Vy4sqf2LBBcJ7e-c22qiyJZILYNuc,1820
|
8
|
-
langchain_ollama-0.1.2.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
|
9
|
-
langchain_ollama-0.1.2.dist-info/RECORD,,
|
File without changes
|
File without changes
|