mcp-use 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mcp-use might be problematic. Click here for more details.

mcp_use-0.1.0/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2025 pietrozullo
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
mcp_use-0.1.0/PKG-INFO ADDED
@@ -0,0 +1,287 @@
1
+ Metadata-Version: 2.4
2
+ Name: mcp_use
3
+ Version: 0.1.0
4
+ Summary: Model-Agnostic MCP Library for LLMs
5
+ Home-page: https://github.com/pietrozullo/mcp_use
6
+ Author: Pietro Zullo
7
+ Author-email: pietro.zullo@gmail.com
8
+ Classifier: Development Status :: 3 - Alpha
9
+ Classifier: Intended Audience :: Developers
10
+ Classifier: License :: OSI Approved :: MIT License
11
+ Classifier: Operating System :: OS Independent
12
+ Classifier: Programming Language :: Python :: 3
13
+ Classifier: Programming Language :: Python :: 3.11
14
+ Classifier: Programming Language :: Python :: 3.12
15
+ Classifier: Topic :: Software Development :: Libraries :: Python Modules
16
+ Requires-Python: >=3.11
17
+ Description-Content-Type: text/markdown
18
+ License-File: LICENSE
19
+ Requires-Dist: mcp
20
+ Requires-Dist: langchain>=0.1.0
21
+ Requires-Dist: langchain-community>=0.0.10
22
+ Requires-Dist: websockets>=12.0
23
+ Requires-Dist: aiohttp>=3.9.0
24
+ Requires-Dist: pydantic>=2.0.0
25
+ Requires-Dist: typing-extensions>=4.8.0
26
+ Requires-Dist: jsonschema-pydantic>=0.1.0
27
+ Requires-Dist: python-dotenv>=1.0.0
28
+ Provides-Extra: dev
29
+ Requires-Dist: pytest>=7.4.0; extra == "dev"
30
+ Requires-Dist: pytest-asyncio>=0.21.0; extra == "dev"
31
+ Requires-Dist: pytest-cov>=4.1.0; extra == "dev"
32
+ Requires-Dist: black>=23.9.0; extra == "dev"
33
+ Requires-Dist: isort>=5.12.0; extra == "dev"
34
+ Requires-Dist: mypy>=1.5.0; extra == "dev"
35
+ Requires-Dist: ruff>=0.1.0; extra == "dev"
36
+ Provides-Extra: anthropic
37
+ Requires-Dist: anthropic>=0.15.0; extra == "anthropic"
38
+ Provides-Extra: openai
39
+ Requires-Dist: openai>=1.10.0; extra == "openai"
40
+ Dynamic: author
41
+ Dynamic: author-email
42
+ Dynamic: classifier
43
+ Dynamic: description
44
+ Dynamic: description-content-type
45
+ Dynamic: home-page
46
+ Dynamic: license-file
47
+ Dynamic: provides-extra
48
+ Dynamic: requires-dist
49
+ Dynamic: requires-python
50
+ Dynamic: summary
51
+
52
+ # Model-Agnostic MCP Library for LLMs
53
+
54
+ A Python library that lets any LLM (Language Learning Model) use MCP (Multi-Channel Platform) tools through a unified interface. The goal is to let developers easily connect any LLM to tools like web browsing, file operations, etc.
55
+
56
+ ## Core Concept
57
+
58
+ - Leverage existing LangChain adapters rather than reinventing them
59
+ - Focus on bridging MCPs and LangChain's tool ecosystem
60
+
61
+ ## Key Components
62
+
63
+ ### Connectors
64
+
65
+ Bridge to MCP implementations:
66
+
67
+ - `stdio.py`: For local MCP processes
68
+ - `websocket.py`: For remote WebSocket MCPs
69
+ - `http.py`: For HTTP API MCPs
70
+
71
+ ### Tool Conversion
72
+
73
+ Convert between MCP and LangChain formats:
74
+
75
+ - Convert MCP tool schemas to formats needed by different LLMs
76
+ - Support OpenAI function calling, Anthropic tool format, etc.
77
+
78
+ ### Session Management
79
+
80
+ Handle connection lifecycle:
81
+
82
+ - Authenticate and initialize MCP connections
83
+ - Discover and register available tools
84
+ - Handle tool calling with proper error management
85
+
86
+ ### Agent Integration
87
+
88
+ Ready-to-use agent implementations:
89
+
90
+ - Pre-configured for MCP tool usage
91
+ - Optimized prompts for tool selection
92
+
93
+ ## Installation
94
+
95
+ ```bash
96
+ pip install mcp_use
97
+ ```
98
+
99
+ Or install from source:
100
+
101
+ ```bash
102
+ git clone https://github.com/pietrozullo/mcp_use.git
103
+ cd mcp_use
104
+ pip install -e .
105
+ ```
106
+
107
+ ## Quick Start
108
+
109
+ Here's a simple example to get you started:
110
+
111
+ ```python
112
+ import asyncio
113
+ from mcp import StdioServerParameters
114
+ from mcp_use import MCPAgent
115
+
116
+ async def main():
117
+ # Create server parameters for stdio connection
118
+ server_params = StdioServerParameters(
119
+ command="npx",
120
+ args=["@playwright/mcp@latest"],
121
+ )
122
+
123
+ # Create a model-agnostic MCP client
124
+ mcp_client = MCPAgent(
125
+ server_params=server_params,
126
+ model_provider="anthropic", # Or "openai"
127
+ model_name="claude-3-7-sonnet-20250219", # Or "gpt-4o" for OpenAI
128
+ temperature=0.7
129
+ )
130
+
131
+ # Initialize the client
132
+ await mcp_client.initialize()
133
+
134
+ # Run a query using the agent with tools
135
+ result = await mcp_client.run_query(
136
+ "Using internet tell me how many people work at OpenAI"
137
+ )
138
+
139
+ print("Result:")
140
+ print(result)
141
+
142
+ # Close the client
143
+ await mcp_client.close()
144
+
145
+ if __name__ == "__main__":
146
+ asyncio.run(main())
147
+ ```
148
+
149
+ ## Simplified Usage
150
+
151
+ You can also use the simplified interface that handles connector lifecycle management automatically:
152
+
153
+ ```python
154
+ import asyncio
155
+ from langchain_openai import ChatOpenAI
156
+ from mcp_use import MCPAgent
157
+ from mcp_use.connectors.stdio import StdioConnector
158
+
159
+ async def main():
160
+ # Create the connector
161
+ connector = StdioConnector(
162
+ command="npx",
163
+ args=["@playwright/mcp@latest"],
164
+ )
165
+
166
+ # Create the LLM
167
+ llm = ChatOpenAI(model="gpt-4o-mini")
168
+
169
+ # Create MCP client
170
+ mcp_client = MCPAgent(connector=connector, llm=llm, max_steps=30)
171
+
172
+ # Run a query - MCPAgent handles connector lifecycle internally
173
+ result = await mcp_client.run(
174
+ "Using internet tell me how many people work at OpenAI",
175
+ # manage_connector=True is the default
176
+ )
177
+
178
+ print("Result:")
179
+ print(result)
180
+
181
+ if __name__ == "__main__":
182
+ asyncio.run(main())
183
+ ```
184
+
185
+ ## Configuration File Support
186
+
187
+ mcp_use supports initialization from configuration files, making it easy to manage and switch between different MCP server setups:
188
+
189
+ ```python
190
+ import asyncio
191
+ from mcp_use import create_session_from_config
192
+
193
+ async def main():
194
+ # Create an MCP session from a config file
195
+ session = create_session_from_config("mcp-config.json")
196
+
197
+ # Initialize the session
198
+ await session.initialize()
199
+
200
+ # Use the session...
201
+
202
+ # Disconnect when done
203
+ await session.disconnect()
204
+
205
+ if __name__ == "__main__":
206
+ asyncio.run(main())
207
+ ```
208
+
209
+ Example configuration file (`mcp-config.json`):
210
+
211
+ ```json
212
+ {
213
+ "mcpServers": {
214
+ "playwright": {
215
+ "command": "npx",
216
+ "args": ["@playwright/mcp@latest", "headless"],
217
+ "env": {
218
+ "PLAYWRIGHT_WS_ENDPOINT": "ws://localhost:41965/"
219
+ }
220
+ }
221
+ }
222
+ }
223
+ ```
224
+
225
+ ## MCPClient for Managing Multiple Servers
226
+
227
+ The `MCPClient` class provides a higher-level abstraction for managing multiple MCP servers from a single client:
228
+
229
+ ```python
230
+ import asyncio
231
+ from langchain_anthropic import ChatAnthropic
232
+ from mcp_use import MCPAgent, MCPClient
233
+
234
+ async def main():
235
+ # Create a client from a config file
236
+ client = MCPClient.from_config_file("mcp-config.json")
237
+
238
+ # Or initialize with a config file path
239
+ # client = MCPClient("mcp-config.json")
240
+
241
+ # Or programmatically add servers
242
+ client.add_server(
243
+ "local-ws",
244
+ {
245
+ "command": "npx",
246
+ "args": ["@playwright/mcp@latest", "headless"]
247
+ }
248
+ )
249
+
250
+ # Create an LLM
251
+ llm = ChatAnthropic(model="claude-3-5-sonnet-20240620")
252
+
253
+ # Create an agent using the client
254
+ agent = MCPAgent(
255
+ llm=llm,
256
+ client=client,
257
+ server_name="playwright", # Optional, uses first server if not specified
258
+ max_steps=30
259
+ )
260
+
261
+ # Run a query
262
+ result = await agent.run("Your query here")
263
+
264
+ # Close all sessions
265
+ await client.close_all_sessions()
266
+
267
+ if __name__ == "__main__":
268
+ asyncio.run(main())
269
+ ```
270
+
271
+ This approach simplifies working with multiple MCP servers and allows for more dynamic configuration management.
272
+
273
+ ## Advanced Usage
274
+
275
+ See the `examples` directory for more advanced usage examples:
276
+
277
+ - `browser_use.py`: Shows how to use MCPClient with configuration files for browser automation
278
+
279
+ ## Requirements
280
+
281
+ - Python 3.11+
282
+ - MCP implementation (like Playwright MCP)
283
+ - LangChain and appropriate model libraries (OpenAI, Anthropic, etc.)
284
+
285
+ ## License
286
+
287
+ MIT
@@ -0,0 +1,236 @@
1
+ # Model-Agnostic MCP Library for LLMs
2
+
3
+ A Python library that lets any LLM (Language Learning Model) use MCP (Multi-Channel Platform) tools through a unified interface. The goal is to let developers easily connect any LLM to tools like web browsing, file operations, etc.
4
+
5
+ ## Core Concept
6
+
7
+ - Leverage existing LangChain adapters rather than reinventing them
8
+ - Focus on bridging MCPs and LangChain's tool ecosystem
9
+
10
+ ## Key Components
11
+
12
+ ### Connectors
13
+
14
+ Bridge to MCP implementations:
15
+
16
+ - `stdio.py`: For local MCP processes
17
+ - `websocket.py`: For remote WebSocket MCPs
18
+ - `http.py`: For HTTP API MCPs
19
+
20
+ ### Tool Conversion
21
+
22
+ Convert between MCP and LangChain formats:
23
+
24
+ - Convert MCP tool schemas to formats needed by different LLMs
25
+ - Support OpenAI function calling, Anthropic tool format, etc.
26
+
27
+ ### Session Management
28
+
29
+ Handle connection lifecycle:
30
+
31
+ - Authenticate and initialize MCP connections
32
+ - Discover and register available tools
33
+ - Handle tool calling with proper error management
34
+
35
+ ### Agent Integration
36
+
37
+ Ready-to-use agent implementations:
38
+
39
+ - Pre-configured for MCP tool usage
40
+ - Optimized prompts for tool selection
41
+
42
+ ## Installation
43
+
44
+ ```bash
45
+ pip install mcp_use
46
+ ```
47
+
48
+ Or install from source:
49
+
50
+ ```bash
51
+ git clone https://github.com/pietrozullo/mcp_use.git
52
+ cd mcp_use
53
+ pip install -e .
54
+ ```
55
+
56
+ ## Quick Start
57
+
58
+ Here's a simple example to get you started:
59
+
60
+ ```python
61
+ import asyncio
62
+ from mcp import StdioServerParameters
63
+ from mcp_use import MCPAgent
64
+
65
+ async def main():
66
+ # Create server parameters for stdio connection
67
+ server_params = StdioServerParameters(
68
+ command="npx",
69
+ args=["@playwright/mcp@latest"],
70
+ )
71
+
72
+ # Create a model-agnostic MCP client
73
+ mcp_client = MCPAgent(
74
+ server_params=server_params,
75
+ model_provider="anthropic", # Or "openai"
76
+ model_name="claude-3-7-sonnet-20250219", # Or "gpt-4o" for OpenAI
77
+ temperature=0.7
78
+ )
79
+
80
+ # Initialize the client
81
+ await mcp_client.initialize()
82
+
83
+ # Run a query using the agent with tools
84
+ result = await mcp_client.run_query(
85
+ "Using internet tell me how many people work at OpenAI"
86
+ )
87
+
88
+ print("Result:")
89
+ print(result)
90
+
91
+ # Close the client
92
+ await mcp_client.close()
93
+
94
+ if __name__ == "__main__":
95
+ asyncio.run(main())
96
+ ```
97
+
98
+ ## Simplified Usage
99
+
100
+ You can also use the simplified interface that handles connector lifecycle management automatically:
101
+
102
+ ```python
103
+ import asyncio
104
+ from langchain_openai import ChatOpenAI
105
+ from mcp_use import MCPAgent
106
+ from mcp_use.connectors.stdio import StdioConnector
107
+
108
+ async def main():
109
+ # Create the connector
110
+ connector = StdioConnector(
111
+ command="npx",
112
+ args=["@playwright/mcp@latest"],
113
+ )
114
+
115
+ # Create the LLM
116
+ llm = ChatOpenAI(model="gpt-4o-mini")
117
+
118
+ # Create MCP client
119
+ mcp_client = MCPAgent(connector=connector, llm=llm, max_steps=30)
120
+
121
+ # Run a query - MCPAgent handles connector lifecycle internally
122
+ result = await mcp_client.run(
123
+ "Using internet tell me how many people work at OpenAI",
124
+ # manage_connector=True is the default
125
+ )
126
+
127
+ print("Result:")
128
+ print(result)
129
+
130
+ if __name__ == "__main__":
131
+ asyncio.run(main())
132
+ ```
133
+
134
+ ## Configuration File Support
135
+
136
+ mcp_use supports initialization from configuration files, making it easy to manage and switch between different MCP server setups:
137
+
138
+ ```python
139
+ import asyncio
140
+ from mcp_use import create_session_from_config
141
+
142
+ async def main():
143
+ # Create an MCP session from a config file
144
+ session = create_session_from_config("mcp-config.json")
145
+
146
+ # Initialize the session
147
+ await session.initialize()
148
+
149
+ # Use the session...
150
+
151
+ # Disconnect when done
152
+ await session.disconnect()
153
+
154
+ if __name__ == "__main__":
155
+ asyncio.run(main())
156
+ ```
157
+
158
+ Example configuration file (`mcp-config.json`):
159
+
160
+ ```json
161
+ {
162
+ "mcpServers": {
163
+ "playwright": {
164
+ "command": "npx",
165
+ "args": ["@playwright/mcp@latest", "headless"],
166
+ "env": {
167
+ "PLAYWRIGHT_WS_ENDPOINT": "ws://localhost:41965/"
168
+ }
169
+ }
170
+ }
171
+ }
172
+ ```
173
+
174
+ ## MCPClient for Managing Multiple Servers
175
+
176
+ The `MCPClient` class provides a higher-level abstraction for managing multiple MCP servers from a single client:
177
+
178
+ ```python
179
+ import asyncio
180
+ from langchain_anthropic import ChatAnthropic
181
+ from mcp_use import MCPAgent, MCPClient
182
+
183
+ async def main():
184
+ # Create a client from a config file
185
+ client = MCPClient.from_config_file("mcp-config.json")
186
+
187
+ # Or initialize with a config file path
188
+ # client = MCPClient("mcp-config.json")
189
+
190
+ # Or programmatically add servers
191
+ client.add_server(
192
+ "local-ws",
193
+ {
194
+ "command": "npx",
195
+ "args": ["@playwright/mcp@latest", "headless"]
196
+ }
197
+ )
198
+
199
+ # Create an LLM
200
+ llm = ChatAnthropic(model="claude-3-5-sonnet-20240620")
201
+
202
+ # Create an agent using the client
203
+ agent = MCPAgent(
204
+ llm=llm,
205
+ client=client,
206
+ server_name="playwright", # Optional, uses first server if not specified
207
+ max_steps=30
208
+ )
209
+
210
+ # Run a query
211
+ result = await agent.run("Your query here")
212
+
213
+ # Close all sessions
214
+ await client.close_all_sessions()
215
+
216
+ if __name__ == "__main__":
217
+ asyncio.run(main())
218
+ ```
219
+
220
+ This approach simplifies working with multiple MCP servers and allows for more dynamic configuration management.
221
+
222
+ ## Advanced Usage
223
+
224
+ See the `examples` directory for more advanced usage examples:
225
+
226
+ - `browser_use.py`: Shows how to use MCPClient with configuration files for browser automation
227
+
228
+ ## Requirements
229
+
230
+ - Python 3.11+
231
+ - MCP implementation (like Playwright MCP)
232
+ - LangChain and appropriate model libraries (OpenAI, Anthropic, etc.)
233
+
234
+ ## License
235
+
236
+ MIT
@@ -0,0 +1,30 @@
1
+ """
2
+ mcp_use - A model-agnostic MCP (Multi-Channel Platform) library for LLMs.
3
+
4
+ This library provides a unified interface for connecting different LLMs
5
+ to MCP tools through existing LangChain adapters.
6
+ """
7
+
8
+ from .agents.mcpagent import MCPAgent
9
+ from .client import MCPClient
10
+ from .config import create_session_from_config, load_config_file
11
+ from .connectors import BaseConnector, HttpConnector, StdioConnector, WebSocketConnector
12
+ from .logging import logger
13
+ from .session import MCPSession
14
+ from .tools.converter import ModelProvider, ToolConverter
15
+
16
+ __version__ = "0.1.0"
17
+ __all__ = [
18
+ "MCPAgent",
19
+ "MCPClient",
20
+ "MCPSession",
21
+ "BaseConnector",
22
+ "StdioConnector",
23
+ "WebSocketConnector",
24
+ "HttpConnector",
25
+ "ModelProvider",
26
+ "ToolConverter",
27
+ "create_session_from_config",
28
+ "load_config_file",
29
+ "logger",
30
+ ]
@@ -0,0 +1,12 @@
1
+ """
2
+ Agent implementations for using MCP tools.
3
+
4
+ This module provides ready-to-use agent implementations
5
+ that are pre-configured for using MCP tools.
6
+ """
7
+
8
+ from .base import BaseAgent
9
+ from .langchain_agent import LangChainAgent
10
+ from .mcpagent import MCPAgent
11
+
12
+ __all__ = ["BaseAgent", "LangChainAgent", "MCPAgent"]
@@ -0,0 +1,63 @@
1
+ """
2
+ Base agent interface for MCP tools.
3
+
4
+ This module provides a base class for agents that use MCP tools.
5
+ """
6
+
7
+ from abc import ABC, abstractmethod
8
+ from typing import Any
9
+
10
+ from ..session import MCPSession
11
+
12
+
13
+ class BaseAgent(ABC):
14
+ """Base class for agents that use MCP tools.
15
+
16
+ This abstract class defines the interface for agents that use MCP tools.
17
+ Agents are responsible for integrating LLMs with MCP tools.
18
+ """
19
+
20
+ def __init__(self, session: MCPSession):
21
+ """Initialize a new agent.
22
+
23
+ Args:
24
+ session: The MCP session to use for tool calls.
25
+ """
26
+ self.session = session
27
+
28
+ @abstractmethod
29
+ async def initialize(self) -> None:
30
+ """Initialize the agent.
31
+
32
+ This method should prepare the agent for use, including initializing
33
+ the MCP session and setting up any necessary components.
34
+ """
35
+ pass
36
+
37
+ @abstractmethod
38
+ async def run(self, query: str, max_steps: int = 10) -> dict[str, Any]:
39
+ """Run the agent with a query.
40
+
41
+ Args:
42
+ query: The query to run.
43
+ max_steps: The maximum number of steps to run.
44
+
45
+ Returns:
46
+ The final result from the agent.
47
+ """
48
+ pass
49
+
50
+ @abstractmethod
51
+ async def step(
52
+ self, query: str, previous_steps: list[dict[str, Any]] | None = None
53
+ ) -> dict[str, Any]:
54
+ """Perform a single step of the agent.
55
+
56
+ Args:
57
+ query: The query to run.
58
+ previous_steps: Optional list of previous steps.
59
+
60
+ Returns:
61
+ The result of the step.
62
+ """
63
+ pass