langchain-mcp-tools 0.0.14__tar.gz → 0.0.16__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {langchain_mcp_tools-0.0.14 → langchain_mcp_tools-0.0.16}/PKG-INFO +9 -10
- {langchain_mcp_tools-0.0.14 → langchain_mcp_tools-0.0.16}/README.md +8 -7
- {langchain_mcp_tools-0.0.14 → langchain_mcp_tools-0.0.16}/langchain_mcp_tools/langchain_mcp_tools.py +7 -5
- {langchain_mcp_tools-0.0.14 → langchain_mcp_tools-0.0.16}/langchain_mcp_tools.egg-info/PKG-INFO +9 -10
- {langchain_mcp_tools-0.0.14 → langchain_mcp_tools-0.0.16}/langchain_mcp_tools.egg-info/SOURCES.txt +1 -2
- {langchain_mcp_tools-0.0.14 → langchain_mcp_tools-0.0.16}/langchain_mcp_tools.egg-info/requires.txt +0 -2
- {langchain_mcp_tools-0.0.14 → langchain_mcp_tools-0.0.16}/pyproject.toml +1 -3
- langchain_mcp_tools-0.0.14/tests/test_langchain_mcp_tools.py +0 -163
- {langchain_mcp_tools-0.0.14 → langchain_mcp_tools-0.0.16}/LICENSE +0 -0
- {langchain_mcp_tools-0.0.14 → langchain_mcp_tools-0.0.16}/langchain_mcp_tools/__init__.py +0 -0
- {langchain_mcp_tools-0.0.14 → langchain_mcp_tools-0.0.16}/langchain_mcp_tools/py.typed +0 -0
- {langchain_mcp_tools-0.0.14 → langchain_mcp_tools-0.0.16}/langchain_mcp_tools.egg-info/dependency_links.txt +0 -0
- {langchain_mcp_tools-0.0.14 → langchain_mcp_tools-0.0.16}/langchain_mcp_tools.egg-info/top_level.txt +0 -0
- {langchain_mcp_tools-0.0.14 → langchain_mcp_tools-0.0.16}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.2
|
2
2
|
Name: langchain-mcp-tools
|
3
|
-
Version: 0.0.
|
3
|
+
Version: 0.0.16
|
4
4
|
Summary: Model Context Protocol (MCP) To LangChain Tools Conversion Utility
|
5
5
|
Project-URL: Bug Tracker, https://github.com/hideya/langchain-mcp-tools-py/issues
|
6
6
|
Project-URL: Source Code, https://github.com/hideya/langchain-mcp-tools-py
|
@@ -18,8 +18,6 @@ Requires-Dist: mcp>=1.2.0
|
|
18
18
|
Requires-Dist: pyjson5>=1.6.8
|
19
19
|
Requires-Dist: pympler>=1.1
|
20
20
|
Requires-Dist: python-dotenv>=1.0.1
|
21
|
-
Requires-Dist: pytest>=8.3.4
|
22
|
-
Requires-Dist: pytest-asyncio>=0.25.2
|
23
21
|
Provides-Extra: dev
|
24
22
|
Requires-Dist: twine>=6.0.1; extra == "dev"
|
25
23
|
|
@@ -108,7 +106,7 @@ Currently, only text results of tool calls are supported.
|
|
108
106
|
## Technical Details
|
109
107
|
|
110
108
|
It was very tricky (for me) to get the parallel MCP server initialization
|
111
|
-
to work
|
109
|
+
to work, including successful final resource cleanup...
|
112
110
|
|
113
111
|
I'm new to Python, so it is very possible that my ignorance is playing
|
114
112
|
a big role here...
|
@@ -119,7 +117,13 @@ Any comments pointing out something I am missing would be greatly appreciated!
|
|
119
117
|
[(comment here)](https://github.com/hideya/langchain-mcp-tools-ts/issues)
|
120
118
|
|
121
119
|
1. Core Challenge:
|
122
|
-
|
120
|
+
|
121
|
+
A key requirement for parallel initialization is that each server must be
|
122
|
+
initialized in its own dedicated task - there's no way around this as far as
|
123
|
+
I know. However, this poses a challenge when combined with
|
124
|
+
`asynccontextmanager`.
|
125
|
+
|
126
|
+
- Resources management for `stdio_client` and `ClientSession` seems
|
123
127
|
to require relying exclusively on `asynccontextmanager` for cleanup,
|
124
128
|
with no manual cleanup options
|
125
129
|
(based on [the mcp python-sdk impl as of Jan 14, 2025](https://github.com/modelcontextprotocol/python-sdk/tree/99727a9/src/mcp/client))
|
@@ -131,11 +135,6 @@ Any comments pointing out something I am missing would be greatly appreciated!
|
|
131
135
|
|
132
136
|
2. Solution Strategy:
|
133
137
|
|
134
|
-
A key requirement for parallel initialization is that each server must be
|
135
|
-
initialized in its own dedicated task - there's no way around this as far
|
136
|
-
as I understand. However, this creates a challenge since we also need to
|
137
|
-
maintain long-lived sessions and handle cleanup properly.
|
138
|
-
|
139
138
|
The key insight is to keep the initialization tasks alive throughout the
|
140
139
|
session lifetime, rather than letting them complete after initialization.
|
141
140
|
|
@@ -83,7 +83,7 @@ Currently, only text results of tool calls are supported.
|
|
83
83
|
## Technical Details
|
84
84
|
|
85
85
|
It was very tricky (for me) to get the parallel MCP server initialization
|
86
|
-
to work
|
86
|
+
to work, including successful final resource cleanup...
|
87
87
|
|
88
88
|
I'm new to Python, so it is very possible that my ignorance is playing
|
89
89
|
a big role here...
|
@@ -94,7 +94,13 @@ Any comments pointing out something I am missing would be greatly appreciated!
|
|
94
94
|
[(comment here)](https://github.com/hideya/langchain-mcp-tools-ts/issues)
|
95
95
|
|
96
96
|
1. Core Challenge:
|
97
|
-
|
97
|
+
|
98
|
+
A key requirement for parallel initialization is that each server must be
|
99
|
+
initialized in its own dedicated task - there's no way around this as far as
|
100
|
+
I know. However, this poses a challenge when combined with
|
101
|
+
`asynccontextmanager`.
|
102
|
+
|
103
|
+
- Resources management for `stdio_client` and `ClientSession` seems
|
98
104
|
to require relying exclusively on `asynccontextmanager` for cleanup,
|
99
105
|
with no manual cleanup options
|
100
106
|
(based on [the mcp python-sdk impl as of Jan 14, 2025](https://github.com/modelcontextprotocol/python-sdk/tree/99727a9/src/mcp/client))
|
@@ -106,11 +112,6 @@ Any comments pointing out something I am missing would be greatly appreciated!
|
|
106
112
|
|
107
113
|
2. Solution Strategy:
|
108
114
|
|
109
|
-
A key requirement for parallel initialization is that each server must be
|
110
|
-
initialized in its own dedicated task - there's no way around this as far
|
111
|
-
as I understand. However, this creates a challenge since we also need to
|
112
|
-
maintain long-lived sessions and handle cleanup properly.
|
113
|
-
|
114
115
|
The key insight is to keep the initialization tasks alive throughout the
|
115
116
|
session lifetime, rather than letting them complete after initialization.
|
116
117
|
|
{langchain_mcp_tools-0.0.14 → langchain_mcp_tools-0.0.16}/langchain_mcp_tools/langchain_mcp_tools.py
RENAMED
@@ -37,7 +37,13 @@ require context managers while enabling parallel initialization.
|
|
37
37
|
The key aspects are:
|
38
38
|
|
39
39
|
1. Core Challenge:
|
40
|
-
|
40
|
+
|
41
|
+
A key requirement for parallel initialization is that each server must be
|
42
|
+
initialized in its own dedicated task - there's no way around this as far as
|
43
|
+
I know. However, this poses a challenge when combined with
|
44
|
+
`asynccontextmanager`.
|
45
|
+
|
46
|
+
- Resources management for `stdio_client` and `ClientSession` seems
|
41
47
|
to require relying exclusively on `asynccontextmanager` for cleanup,
|
42
48
|
with no manual cleanup options
|
43
49
|
(based on [the mcp python-sdk impl as of Jan 14, 2025](https://github.com/modelcontextprotocol/python-sdk/tree/99727a9/src/mcp/client))
|
@@ -48,10 +54,6 @@ The key aspects are:
|
|
48
54
|
- Need to ensure proper cleanup later in the same task that created them
|
49
55
|
|
50
56
|
2. Solution Strategy:
|
51
|
-
A key requirement for parallel initialization is that each server must be
|
52
|
-
initialized in its own dedicated task - there's no way around this as far
|
53
|
-
as I understand. However, this creates a challenge since we also need to
|
54
|
-
maintain long-lived sessions and handle cleanup properly.
|
55
57
|
|
56
58
|
The key insight is to keep the initialization tasks alive throughout the
|
57
59
|
session lifetime, rather than letting them complete after initialization.
|
{langchain_mcp_tools-0.0.14 → langchain_mcp_tools-0.0.16}/langchain_mcp_tools.egg-info/PKG-INFO
RENAMED
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.2
|
2
2
|
Name: langchain-mcp-tools
|
3
|
-
Version: 0.0.
|
3
|
+
Version: 0.0.16
|
4
4
|
Summary: Model Context Protocol (MCP) To LangChain Tools Conversion Utility
|
5
5
|
Project-URL: Bug Tracker, https://github.com/hideya/langchain-mcp-tools-py/issues
|
6
6
|
Project-URL: Source Code, https://github.com/hideya/langchain-mcp-tools-py
|
@@ -18,8 +18,6 @@ Requires-Dist: mcp>=1.2.0
|
|
18
18
|
Requires-Dist: pyjson5>=1.6.8
|
19
19
|
Requires-Dist: pympler>=1.1
|
20
20
|
Requires-Dist: python-dotenv>=1.0.1
|
21
|
-
Requires-Dist: pytest>=8.3.4
|
22
|
-
Requires-Dist: pytest-asyncio>=0.25.2
|
23
21
|
Provides-Extra: dev
|
24
22
|
Requires-Dist: twine>=6.0.1; extra == "dev"
|
25
23
|
|
@@ -108,7 +106,7 @@ Currently, only text results of tool calls are supported.
|
|
108
106
|
## Technical Details
|
109
107
|
|
110
108
|
It was very tricky (for me) to get the parallel MCP server initialization
|
111
|
-
to work
|
109
|
+
to work, including successful final resource cleanup...
|
112
110
|
|
113
111
|
I'm new to Python, so it is very possible that my ignorance is playing
|
114
112
|
a big role here...
|
@@ -119,7 +117,13 @@ Any comments pointing out something I am missing would be greatly appreciated!
|
|
119
117
|
[(comment here)](https://github.com/hideya/langchain-mcp-tools-ts/issues)
|
120
118
|
|
121
119
|
1. Core Challenge:
|
122
|
-
|
120
|
+
|
121
|
+
A key requirement for parallel initialization is that each server must be
|
122
|
+
initialized in its own dedicated task - there's no way around this as far as
|
123
|
+
I know. However, this poses a challenge when combined with
|
124
|
+
`asynccontextmanager`.
|
125
|
+
|
126
|
+
- Resources management for `stdio_client` and `ClientSession` seems
|
123
127
|
to require relying exclusively on `asynccontextmanager` for cleanup,
|
124
128
|
with no manual cleanup options
|
125
129
|
(based on [the mcp python-sdk impl as of Jan 14, 2025](https://github.com/modelcontextprotocol/python-sdk/tree/99727a9/src/mcp/client))
|
@@ -131,11 +135,6 @@ Any comments pointing out something I am missing would be greatly appreciated!
|
|
131
135
|
|
132
136
|
2. Solution Strategy:
|
133
137
|
|
134
|
-
A key requirement for parallel initialization is that each server must be
|
135
|
-
initialized in its own dedicated task - there's no way around this as far
|
136
|
-
as I understand. However, this creates a challenge since we also need to
|
137
|
-
maintain long-lived sessions and handle cleanup properly.
|
138
|
-
|
139
138
|
The key insight is to keep the initialization tasks alive throughout the
|
140
139
|
session lifetime, rather than letting them complete after initialization.
|
141
140
|
|
{langchain_mcp_tools-0.0.14 → langchain_mcp_tools-0.0.16}/langchain_mcp_tools.egg-info/SOURCES.txt
RENAMED
@@ -8,5 +8,4 @@ langchain_mcp_tools.egg-info/PKG-INFO
|
|
8
8
|
langchain_mcp_tools.egg-info/SOURCES.txt
|
9
9
|
langchain_mcp_tools.egg-info/dependency_links.txt
|
10
10
|
langchain_mcp_tools.egg-info/requires.txt
|
11
|
-
langchain_mcp_tools.egg-info/top_level.txt
|
12
|
-
tests/test_langchain_mcp_tools.py
|
11
|
+
langchain_mcp_tools.egg-info/top_level.txt
|
@@ -1,6 +1,6 @@
|
|
1
1
|
[project]
|
2
2
|
name = "langchain-mcp-tools"
|
3
|
-
version = "0.0.
|
3
|
+
version = "0.0.16"
|
4
4
|
description = "Model Context Protocol (MCP) To LangChain Tools Conversion Utility"
|
5
5
|
readme = "README.md"
|
6
6
|
requires-python = ">=3.11"
|
@@ -16,8 +16,6 @@ dependencies = [
|
|
16
16
|
"pyjson5>=1.6.8",
|
17
17
|
"pympler>=1.1",
|
18
18
|
"python-dotenv>=1.0.1",
|
19
|
-
"pytest>=8.3.4",
|
20
|
-
"pytest-asyncio>=0.25.2",
|
21
19
|
]
|
22
20
|
|
23
21
|
[project.optional-dependencies]
|
@@ -1,163 +0,0 @@
|
|
1
|
-
import pytest
|
2
|
-
from unittest.mock import AsyncMock, MagicMock, patch
|
3
|
-
from langchain_core.tools import BaseTool
|
4
|
-
from langchain_mcp_tools.langchain_mcp_tools import (
|
5
|
-
convert_mcp_to_langchain_tools,
|
6
|
-
)
|
7
|
-
|
8
|
-
# Fix the asyncio mark warning by installing pytest-asyncio
|
9
|
-
pytest_plugins = ('pytest_asyncio',)
|
10
|
-
|
11
|
-
|
12
|
-
@pytest.fixture
|
13
|
-
def mock_stdio_client():
|
14
|
-
with patch('langchain_mcp_tools.langchain_mcp_tools.stdio_client') as mock:
|
15
|
-
mock.return_value.__aenter__.return_value = (AsyncMock(), AsyncMock())
|
16
|
-
yield mock
|
17
|
-
|
18
|
-
|
19
|
-
@pytest.fixture
|
20
|
-
def mock_client_session():
|
21
|
-
with patch('langchain_mcp_tools.langchain_mcp_tools.ClientSession') as mock:
|
22
|
-
session = AsyncMock()
|
23
|
-
# Mock the list_tools response
|
24
|
-
session.list_tools.return_value = MagicMock(
|
25
|
-
tools=[
|
26
|
-
MagicMock(
|
27
|
-
name="tool1",
|
28
|
-
description="Test tool",
|
29
|
-
inputSchema={"type": "object", "properties": {}}
|
30
|
-
)
|
31
|
-
]
|
32
|
-
)
|
33
|
-
mock.return_value.__aenter__.return_value = session
|
34
|
-
yield mock
|
35
|
-
|
36
|
-
|
37
|
-
@pytest.mark.asyncio
|
38
|
-
async def test_convert_mcp_to_langchain_tools_empty():
|
39
|
-
server_configs = {}
|
40
|
-
tools, cleanup = await convert_mcp_to_langchain_tools(server_configs)
|
41
|
-
assert isinstance(tools, list)
|
42
|
-
assert len(tools) == 0
|
43
|
-
await cleanup()
|
44
|
-
|
45
|
-
|
46
|
-
"""
|
47
|
-
@pytest.mark.asyncio
|
48
|
-
async def test_convert_mcp_to_langchain_tools_invalid_config():
|
49
|
-
server_configs = {"invalid": {"command": "nonexistent"}}
|
50
|
-
with pytest.raises(Exception):
|
51
|
-
await convert_mcp_to_langchain_tools(server_configs)
|
52
|
-
"""
|
53
|
-
|
54
|
-
|
55
|
-
"""
|
56
|
-
@pytest.mark.asyncio
|
57
|
-
async def test_convert_single_mcp_success(
|
58
|
-
mock_stdio_client,
|
59
|
-
mock_client_session
|
60
|
-
):
|
61
|
-
# Test data
|
62
|
-
server_name = "test_server"
|
63
|
-
server_config = {
|
64
|
-
"command": "test_command",
|
65
|
-
"args": ["--test"],
|
66
|
-
"env": {"TEST_ENV": "value"}
|
67
|
-
}
|
68
|
-
langchain_tools = []
|
69
|
-
ready_event = asyncio.Event()
|
70
|
-
cleanup_event = asyncio.Event()
|
71
|
-
|
72
|
-
# Create task
|
73
|
-
task = asyncio.create_task(
|
74
|
-
convert_single_mcp_to_langchain_tools(
|
75
|
-
server_name,
|
76
|
-
server_config,
|
77
|
-
langchain_tools,
|
78
|
-
ready_event,
|
79
|
-
cleanup_event
|
80
|
-
)
|
81
|
-
)
|
82
|
-
|
83
|
-
# Wait for ready event
|
84
|
-
await asyncio.wait_for(ready_event.wait(), timeout=1.0)
|
85
|
-
|
86
|
-
# Verify tools were created
|
87
|
-
assert len(langchain_tools) == 1
|
88
|
-
assert isinstance(langchain_tools[0], BaseTool)
|
89
|
-
assert langchain_tools[0].name == "tool1"
|
90
|
-
|
91
|
-
# Trigger cleanup
|
92
|
-
cleanup_event.set()
|
93
|
-
await task
|
94
|
-
"""
|
95
|
-
|
96
|
-
|
97
|
-
@pytest.mark.asyncio
|
98
|
-
async def test_convert_mcp_to_langchain_tools_multiple_servers(
|
99
|
-
mock_stdio_client,
|
100
|
-
mock_client_session
|
101
|
-
):
|
102
|
-
server_configs = {
|
103
|
-
"server1": {"command": "cmd1", "args": []},
|
104
|
-
"server2": {"command": "cmd2", "args": []}
|
105
|
-
}
|
106
|
-
|
107
|
-
tools, cleanup = await convert_mcp_to_langchain_tools(server_configs)
|
108
|
-
|
109
|
-
# Verify correct number of tools created
|
110
|
-
assert len(tools) == 2 # One tool per server
|
111
|
-
assert all(isinstance(tool, BaseTool) for tool in tools)
|
112
|
-
|
113
|
-
# Test cleanup
|
114
|
-
await cleanup()
|
115
|
-
|
116
|
-
|
117
|
-
"""
|
118
|
-
@pytest.mark.asyncio
|
119
|
-
async def test_tool_execution(mock_stdio_client, mock_client_session):
|
120
|
-
server_configs = {
|
121
|
-
"test_server": {"command": "test", "args": []}
|
122
|
-
}
|
123
|
-
|
124
|
-
# Mock the tool execution response
|
125
|
-
session = mock_client_session.return_value.__aenter__.return_value
|
126
|
-
session.call_tool.return_value = MagicMock(
|
127
|
-
isError=False,
|
128
|
-
content={"result": "success"}
|
129
|
-
)
|
130
|
-
|
131
|
-
tools, cleanup = await convert_mcp_to_langchain_tools(server_configs)
|
132
|
-
|
133
|
-
# Test tool execution
|
134
|
-
result = await tools[0]._arun(test_param="value")
|
135
|
-
assert result == {"result": "success"}
|
136
|
-
|
137
|
-
# Verify tool was called with correct parameters
|
138
|
-
session.call_tool.assert_called_once_with("tool1", {"test_param": "value"})
|
139
|
-
|
140
|
-
await cleanup()
|
141
|
-
"""
|
142
|
-
|
143
|
-
|
144
|
-
@pytest.mark.asyncio
|
145
|
-
async def test_tool_execution_error(mock_stdio_client, mock_client_session):
|
146
|
-
server_configs = {
|
147
|
-
"test_server": {"command": "test", "args": []}
|
148
|
-
}
|
149
|
-
|
150
|
-
# Mock error response
|
151
|
-
session = mock_client_session.return_value.__aenter__.return_value
|
152
|
-
session.call_tool.return_value = MagicMock(
|
153
|
-
isError=True,
|
154
|
-
content="Error message"
|
155
|
-
)
|
156
|
-
|
157
|
-
tools, cleanup = await convert_mcp_to_langchain_tools(server_configs)
|
158
|
-
|
159
|
-
# Test tool execution error
|
160
|
-
with pytest.raises(Exception):
|
161
|
-
await tools[0]._arun(test_param="value")
|
162
|
-
|
163
|
-
await cleanup()
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{langchain_mcp_tools-0.0.14 → langchain_mcp_tools-0.0.16}/langchain_mcp_tools.egg-info/top_level.txt
RENAMED
File without changes
|
File without changes
|