langchain-ollama 0.3.4__tar.gz → 0.3.5__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (25) hide show
  1. {langchain_ollama-0.3.4 → langchain_ollama-0.3.5}/PKG-INFO +2 -2
  2. {langchain_ollama-0.3.4 → langchain_ollama-0.3.5}/langchain_ollama/embeddings.py +4 -1
  3. {langchain_ollama-0.3.4 → langchain_ollama-0.3.5}/pyproject.toml +2 -2
  4. langchain_ollama-0.3.5/tests/unit_tests/test_embeddings.py +63 -0
  5. langchain_ollama-0.3.4/tests/unit_tests/test_embeddings.py +0 -30
  6. {langchain_ollama-0.3.4 → langchain_ollama-0.3.5}/LICENSE +0 -0
  7. {langchain_ollama-0.3.4 → langchain_ollama-0.3.5}/README.md +0 -0
  8. {langchain_ollama-0.3.4 → langchain_ollama-0.3.5}/langchain_ollama/__init__.py +0 -0
  9. {langchain_ollama-0.3.4 → langchain_ollama-0.3.5}/langchain_ollama/_utils.py +0 -0
  10. {langchain_ollama-0.3.4 → langchain_ollama-0.3.5}/langchain_ollama/chat_models.py +0 -0
  11. {langchain_ollama-0.3.4 → langchain_ollama-0.3.5}/langchain_ollama/llms.py +0 -0
  12. {langchain_ollama-0.3.4 → langchain_ollama-0.3.5}/langchain_ollama/py.typed +0 -0
  13. {langchain_ollama-0.3.4 → langchain_ollama-0.3.5}/tests/__init__.py +0 -0
  14. {langchain_ollama-0.3.4 → langchain_ollama-0.3.5}/tests/integration_tests/__init__.py +0 -0
  15. {langchain_ollama-0.3.4 → langchain_ollama-0.3.5}/tests/integration_tests/chat_models/cassettes/test_chat_models_standard/TestChatOllama.test_stream_time.yaml +0 -0
  16. {langchain_ollama-0.3.4 → langchain_ollama-0.3.5}/tests/integration_tests/chat_models/test_chat_models.py +0 -0
  17. {langchain_ollama-0.3.4 → langchain_ollama-0.3.5}/tests/integration_tests/chat_models/test_chat_models_reasoning.py +0 -0
  18. {langchain_ollama-0.3.4 → langchain_ollama-0.3.5}/tests/integration_tests/chat_models/test_chat_models_standard.py +0 -0
  19. {langchain_ollama-0.3.4 → langchain_ollama-0.3.5}/tests/integration_tests/test_compile.py +0 -0
  20. {langchain_ollama-0.3.4 → langchain_ollama-0.3.5}/tests/integration_tests/test_embeddings.py +0 -0
  21. {langchain_ollama-0.3.4 → langchain_ollama-0.3.5}/tests/integration_tests/test_llms.py +0 -0
  22. {langchain_ollama-0.3.4 → langchain_ollama-0.3.5}/tests/unit_tests/__init__.py +0 -0
  23. {langchain_ollama-0.3.4 → langchain_ollama-0.3.5}/tests/unit_tests/test_chat_models.py +0 -0
  24. {langchain_ollama-0.3.4 → langchain_ollama-0.3.5}/tests/unit_tests/test_imports.py +0 -0
  25. {langchain_ollama-0.3.4 → langchain_ollama-0.3.5}/tests/unit_tests/test_llms.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: langchain-ollama
3
- Version: 0.3.4
3
+ Version: 0.3.5
4
4
  Summary: An integration package connecting Ollama and LangChain
5
5
  License: MIT
6
6
  Project-URL: Source Code, https://github.com/langchain-ai/langchain/tree/master/libs/partners/ollama
@@ -8,7 +8,7 @@ Project-URL: Release Notes, https://github.com/langchain-ai/langchain/releases?q
8
8
  Project-URL: repository, https://github.com/langchain-ai/langchain
9
9
  Requires-Python: >=3.9
10
10
  Requires-Dist: ollama<1.0.0,>=0.5.1
11
- Requires-Dist: langchain-core<1.0.0,>=0.3.68
11
+ Requires-Dist: langchain-core<1.0.0,>=0.3.69
12
12
  Description-Content-Type: text/markdown
13
13
 
14
14
  # langchain-ollama
@@ -296,7 +296,10 @@ class OllamaEmbeddings(BaseModel, Embeddings):
296
296
  raise ValueError(msg)
297
297
  return (
298
298
  await self._async_client.embed(
299
- self.model, texts, keep_alive=self.keep_alive
299
+ self.model,
300
+ texts,
301
+ options=self._default_params,
302
+ keep_alive=self.keep_alive,
300
303
  )
301
304
  )["embeddings"]
302
305
 
@@ -9,10 +9,10 @@ authors = []
9
9
  requires-python = ">=3.9"
10
10
  dependencies = [
11
11
  "ollama>=0.5.1,<1.0.0",
12
- "langchain-core<1.0.0,>=0.3.68",
12
+ "langchain-core<1.0.0,>=0.3.69",
13
13
  ]
14
14
  name = "langchain-ollama"
15
- version = "0.3.4"
15
+ version = "0.3.5"
16
16
  description = "An integration package connecting Ollama and LangChain"
17
17
  readme = "README.md"
18
18
 
@@ -0,0 +1,63 @@
1
+ """Test embedding model integration."""
2
+
3
+ from typing import Any
4
+ from unittest.mock import Mock, patch
5
+
6
+ from langchain_ollama.embeddings import OllamaEmbeddings
7
+
8
+ MODEL_NAME = "llama3.1"
9
+
10
+
11
+ def test_initialization() -> None:
12
+ """Test embedding model initialization."""
13
+ OllamaEmbeddings(model=MODEL_NAME, keep_alive=1)
14
+
15
+
16
+ @patch("langchain_ollama.embeddings.validate_model")
17
+ def test_validate_model_on_init(mock_validate_model: Any) -> None:
18
+ """Test that the model is validated on initialization when requested."""
19
+ # Test that validate_model is called when validate_model_on_init=True
20
+ OllamaEmbeddings(model=MODEL_NAME, validate_model_on_init=True)
21
+ mock_validate_model.assert_called_once()
22
+ mock_validate_model.reset_mock()
23
+
24
+ # Test that validate_model is NOT called when validate_model_on_init=False
25
+ OllamaEmbeddings(model=MODEL_NAME, validate_model_on_init=False)
26
+ mock_validate_model.assert_not_called()
27
+
28
+ # Test that validate_model is NOT called by default
29
+ OllamaEmbeddings(model=MODEL_NAME)
30
+ mock_validate_model.assert_not_called()
31
+
32
+
33
+ @patch("langchain_ollama.embeddings.Client")
34
+ def test_embed_documents_passes_options(mock_client_class: Any) -> None:
35
+ """Test that embed_documents method passes options including num_gpu."""
36
+ # Create a mock client instance
37
+ mock_client = Mock()
38
+ mock_client_class.return_value = mock_client
39
+
40
+ # Mock the embed method response
41
+ mock_client.embed.return_value = {"embeddings": [[0.1, 0.2, 0.3]]}
42
+
43
+ # Create embeddings with num_gpu parameter
44
+ embeddings = OllamaEmbeddings(model=MODEL_NAME, num_gpu=4, temperature=0.5)
45
+
46
+ # Call embed_documents
47
+ result = embeddings.embed_documents(["test text"])
48
+
49
+ # Verify the result
50
+ assert result == [[0.1, 0.2, 0.3]]
51
+
52
+ # Check that embed was called with correct arguments
53
+ mock_client.embed.assert_called_once()
54
+ call_args = mock_client.embed.call_args
55
+
56
+ # Verify the keyword arguments
57
+ assert "options" in call_args.kwargs
58
+ assert "keep_alive" in call_args.kwargs
59
+
60
+ # Verify options contain num_gpu and temperature
61
+ options = call_args.kwargs["options"]
62
+ assert options["num_gpu"] == 4
63
+ assert options["temperature"] == 0.5
@@ -1,30 +0,0 @@
1
- """Test embedding model integration."""
2
-
3
- from typing import Any
4
- from unittest.mock import patch
5
-
6
- from langchain_ollama.embeddings import OllamaEmbeddings
7
-
8
- MODEL_NAME = "llama3.1"
9
-
10
-
11
- def test_initialization() -> None:
12
- """Test embedding model initialization."""
13
- OllamaEmbeddings(model=MODEL_NAME, keep_alive=1)
14
-
15
-
16
- @patch("langchain_ollama.embeddings.validate_model")
17
- def test_validate_model_on_init(mock_validate_model: Any) -> None:
18
- """Test that the model is validated on initialization when requested."""
19
- # Test that validate_model is called when validate_model_on_init=True
20
- OllamaEmbeddings(model=MODEL_NAME, validate_model_on_init=True)
21
- mock_validate_model.assert_called_once()
22
- mock_validate_model.reset_mock()
23
-
24
- # Test that validate_model is NOT called when validate_model_on_init=False
25
- OllamaEmbeddings(model=MODEL_NAME, validate_model_on_init=False)
26
- mock_validate_model.assert_not_called()
27
-
28
- # Test that validate_model is NOT called by default
29
- OllamaEmbeddings(model=MODEL_NAME)
30
- mock_validate_model.assert_not_called()