mcp-use 0.0.4__tar.gz → 0.0.6__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mcp-use might be problematic. Click here for more details.

Files changed (39) hide show
  1. mcp_use-0.0.6/.github/workflows/publish.yml +78 -0
  2. {mcp_use-0.0.4 → mcp_use-0.0.6}/PKG-INFO +22 -57
  3. {mcp_use-0.0.4 → mcp_use-0.0.6}/README.md +20 -55
  4. {mcp_use-0.0.4 → mcp_use-0.0.6}/examples/airbnb_mcp.json +1 -1
  5. {mcp_use-0.0.4 → mcp_use-0.0.6}/examples/airbnb_use.py +2 -0
  6. {mcp_use-0.0.4 → mcp_use-0.0.6}/examples/blender_use.py +3 -1
  7. {mcp_use-0.0.4 → mcp_use-0.0.6}/examples/browser_use.py +2 -0
  8. mcp_use-0.0.4/examples/chat_examle.py → mcp_use-0.0.6/examples/chat_example.py +2 -0
  9. mcp_use-0.0.6/examples/filesystem_use.py +58 -0
  10. {mcp_use-0.0.4 → mcp_use-0.0.6}/mcp_use/__init__.py +5 -2
  11. {mcp_use-0.0.4 → mcp_use-0.0.6}/mcp_use/agents/mcpagent.py +3 -1
  12. mcp_use-0.0.6/mcp_use/agents/prompts/default.py +22 -0
  13. {mcp_use-0.0.4 → mcp_use-0.0.6}/mcp_use/config.py +0 -34
  14. {mcp_use-0.0.4 → mcp_use-0.0.6}/pyproject.toml +2 -2
  15. mcp_use-0.0.6/static/mcpusegrass.png +0 -0
  16. mcp_use-0.0.4/.github/workflows/publish.yml +0 -31
  17. mcp_use-0.0.4/mcp_use/agents/prompts/default.py +0 -11
  18. {mcp_use-0.0.4 → mcp_use-0.0.6}/.github/workflows/tests.yml +0 -0
  19. {mcp_use-0.0.4 → mcp_use-0.0.6}/.gitignore +0 -0
  20. {mcp_use-0.0.4 → mcp_use-0.0.6}/.pre-commit-config.yaml +0 -0
  21. {mcp_use-0.0.4 → mcp_use-0.0.6}/LICENSE +0 -0
  22. {mcp_use-0.0.4 → mcp_use-0.0.6}/examples/browser_mcp.json +0 -0
  23. {mcp_use-0.0.4 → mcp_use-0.0.6}/mcp_use/agents/__init__.py +0 -0
  24. {mcp_use-0.0.4 → mcp_use-0.0.6}/mcp_use/agents/base.py +0 -0
  25. {mcp_use-0.0.4 → mcp_use-0.0.6}/mcp_use/agents/langchain_agent.py +0 -0
  26. {mcp_use-0.0.4 → mcp_use-0.0.6}/mcp_use/client.py +0 -0
  27. {mcp_use-0.0.4 → mcp_use-0.0.6}/mcp_use/connectors/__init__.py +0 -0
  28. {mcp_use-0.0.4 → mcp_use-0.0.6}/mcp_use/connectors/base.py +0 -0
  29. {mcp_use-0.0.4 → mcp_use-0.0.6}/mcp_use/connectors/http.py +0 -0
  30. {mcp_use-0.0.4 → mcp_use-0.0.6}/mcp_use/connectors/stdio.py +0 -0
  31. {mcp_use-0.0.4 → mcp_use-0.0.6}/mcp_use/connectors/websocket.py +0 -0
  32. {mcp_use-0.0.4 → mcp_use-0.0.6}/mcp_use/logging.py +0 -0
  33. {mcp_use-0.0.4 → mcp_use-0.0.6}/mcp_use/session.py +0 -0
  34. {mcp_use-0.0.4 → mcp_use-0.0.6}/mcp_use/task_managers/__init__.py +0 -0
  35. {mcp_use-0.0.4 → mcp_use-0.0.6}/mcp_use/task_managers/base.py +0 -0
  36. {mcp_use-0.0.4 → mcp_use-0.0.6}/mcp_use/task_managers/http.py +0 -0
  37. {mcp_use-0.0.4 → mcp_use-0.0.6}/mcp_use/task_managers/stdio.py +0 -0
  38. {mcp_use-0.0.4 → mcp_use-0.0.6}/mcp_use/task_managers/websocket.py +0 -0
  39. {mcp_use-0.0.4 → mcp_use-0.0.6}/tests/unit/test_placeholder.py +0 -0
@@ -0,0 +1,78 @@
1
+ name: Check Version Bump and Publish to PyPI
2
+
3
+ on:
4
+ push:
5
+ branches:
6
+ - main
7
+ paths:
8
+ - 'pyproject.toml'
9
+
10
+ # Required for PyPI trusted publishing
11
+ permissions:
12
+ id-token: write
13
+ contents: write # Required for creating tags and releases
14
+
15
+ jobs:
16
+ check-version-and-publish:
17
+ runs-on: ubuntu-latest
18
+ steps:
19
+ - uses: actions/checkout@v3
20
+ with:
21
+ fetch-depth: 0 # This fetches all history for comparing versions
22
+
23
+ - name: Set up Python
24
+ uses: actions/setup-python@v4
25
+ with:
26
+ python-version: "3.11"
27
+
28
+ - name: Install dependencies
29
+ run: |
30
+ python -m pip install --upgrade pip
31
+ pip install build twine wheel tomli
32
+
33
+ - name: Check for version bump
34
+ id: check-version
35
+ run: |
36
+ # Extract current version directly from pyproject.toml
37
+ # This is more reliable than using importlib.metadata
38
+ CURRENT_VERSION=$(python -c "
39
+ import tomli
40
+ with open('pyproject.toml', 'rb') as f:
41
+ data = tomli.load(f)
42
+ print(data['project']['version'])
43
+ ")
44
+
45
+ echo "Current version: $CURRENT_VERSION"
46
+
47
+ # Check if this version already has a tag
48
+ if git rev-parse "v$CURRENT_VERSION" >/dev/null 2>&1; then
49
+ echo "Version $CURRENT_VERSION already has a tag. Skipping release."
50
+ echo "is_new_version=false" >> $GITHUB_OUTPUT
51
+ else
52
+ echo "New version detected: $CURRENT_VERSION"
53
+ echo "is_new_version=true" >> $GITHUB_OUTPUT
54
+ echo "new_version=$CURRENT_VERSION" >> $GITHUB_OUTPUT
55
+ fi
56
+
57
+ - name: Build package
58
+ if: steps.check-version.outputs.is_new_version == 'true'
59
+ run: |
60
+ python -m build
61
+
62
+ - name: Create Release
63
+ if: steps.check-version.outputs.is_new_version == 'true'
64
+ id: create_release
65
+ uses: actions/create-release@v1
66
+ env:
67
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
68
+ with:
69
+ tag_name: v${{ steps.check-version.outputs.new_version }}
70
+ release_name: Release v${{ steps.check-version.outputs.new_version }}
71
+ draft: false
72
+ prerelease: false
73
+
74
+ - name: Publish to PyPI
75
+ if: steps.check-version.outputs.is_new_version == 'true'
76
+ uses: pypa/gh-action-pypi-publish@release/v1
77
+ with:
78
+ password: ${{ secrets.PYPI_API_TOKEN }}
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
- Name: mcp_use
3
- Version: 0.0.4
2
+ Name: mcp-use
3
+ Version: 0.0.6
4
4
  Summary: MCP Library for LLMs
5
5
  Author-email: Pietro Zullo <pietro.zullo@gmail.com>
6
6
  License: MIT
@@ -38,16 +38,20 @@ Requires-Dist: openai>=1.10.0; extra == 'openai'
38
38
  Description-Content-Type: text/markdown
39
39
 
40
40
  <picture>
41
- <source media="(prefers-color-scheme: dark)" srcset="./static/mcp-use-dark.png">
42
- <source media="(prefers-color-scheme: light)" srcset="./static/mcp-use.png">
43
- <img alt="Shows a black MCP-Use Logo in light color mode and a white one in dark color mode." src="./static/mcp-use.png" width="full">
41
+ <img alt="" src="./static/mcpusegrass.png" width="full">
44
42
  </picture>
45
43
 
46
- <h1 align="center">Use MCPs directly from python 🤖</h1>
44
+ <h1 align="center">Open Source MCP CLient Library </h1>
47
45
 
46
+ [![](https://img.shields.io/pypi/dd/mcp_use.svg)](https://pypi.org/project/mcp_use/)
47
+ [![PyPI Downloads](https://img.shields.io/pypi/dm/mcp_use.svg)](https://pypi.org/project/mcp_use/)
48
+ [![PyPI Version](https://img.shields.io/pypi/v/mcp_use.svg)](https://pypi.org/project/mcp_use/)
49
+ [![Python Versions](https://img.shields.io/pypi/pyversions/mcp_use.svg)](https://pypi.org/project/mcp_use/)
50
+ [![License](https://img.shields.io/github/license/pietrozullo/mcp-use)](https://github.com/pietrozullo/mcp-use/blob/main/LICENSE)
51
+ [![Code style: Ruff](https://img.shields.io/badge/code%20style-ruff-000000.svg)](https://github.com/astral-sh/ruff)
48
52
  [![GitHub stars](https://img.shields.io/github/stars/pietrozullo/mcp-use?style=social)](https://github.com/pietrozullo/mcp-use/stargazers)
49
53
 
50
- 🌐 MCP-Use is the easiest way to connect any LLM to MCP tools through a unified interface without using closed source or application clients.
54
+ 🌐 MCP-Use is the open source way to connect any LLM to MCP tools and build custom agents that have tool access, without using closed source or application clients.
51
55
 
52
56
  💡 Let developers easily connect any LLM to tools like web browsing, file operations, and more.
53
57
 
@@ -56,14 +60,14 @@ Description-Content-Type: text/markdown
56
60
  With pip:
57
61
 
58
62
  ```bash
59
- pip install mcp_use
63
+ pip install mcp-use
60
64
  ```
61
65
 
62
66
  Or install from source:
63
67
 
64
68
  ```bash
65
- git clone https://github.com/pietrozullo/mcp_use.git
66
- cd mcp_use
69
+ git clone https://github.com/pietrozullo/mcp-use.git
70
+ cd mcp-use
67
71
  pip install -e .
68
72
  ```
69
73
 
@@ -281,51 +285,12 @@ if __name__ == "__main__":
281
285
  asyncio.run(main())
282
286
  ```
283
287
 
284
- # MCPClient for Managing Multiple Servers
285
-
286
- The `MCPClient` class provides a higher-level abstraction for managing multiple MCP servers from a single client:
287
-
288
- ```python
289
- import asyncio
290
- from langchain_anthropic import ChatAnthropic
291
- from mcp_use import MCPAgent, MCPClient
292
-
293
- async def main():
294
- # Create a client from a config file
295
- client = MCPClient.from_config_file("mcp-config.json")
296
-
297
- # Or initialize with a config file path
298
- # client = MCPClient("mcp-config.json")
299
-
300
- # Or programmatically add servers
301
- client.add_server(
302
- "local-ws",
303
- {
304
- "command": "npx",
305
- "args": ["@playwright/mcp@latest", "headless"]
306
- }
307
- )
308
-
309
- # Create an LLM
310
- llm = ChatAnthropic(model="claude-3-5-sonnet-20240620")
311
-
312
- # Create an agent using the client
313
- agent = MCPAgent(
314
- llm=llm,
315
- client=client,
316
- server_name="playwright", # Optional, uses first server if not specified
317
- max_steps=30
318
- )
319
-
320
- # Run a query
321
- result = await agent.run("Your query here")
322
-
323
- # Close all sessions
324
- await client.close_all_sessions()
325
-
326
- if __name__ == "__main__":
327
- asyncio.run(main())
328
- ```
288
+ ## Roadmap
289
+ <ul>
290
+ <li>[ ] Multiple Servers at once </li>
291
+ <li>[ ] Test remote connectors (http, ws)</li>
292
+ <li>[ ] ... </li>
293
+ </ul>
329
294
 
330
295
  ## Contributing
331
296
 
@@ -342,10 +307,10 @@ We love contributions! Feel free to open issues for bugs or feature requests.
342
307
  If you use MCP-Use in your research or project, please cite:
343
308
 
344
309
  ```bibtex
345
- @software{mcp_use2024,
310
+ @software{mcp_use2025,
346
311
  author = {Zullo, Pietro},
347
312
  title = {MCP-Use: MCP Library for Python},
348
- year = {2024},
313
+ year = {2025},
349
314
  publisher = {GitHub},
350
315
  url = {https://github.com/pietrozullo/mcp-use}
351
316
  }
@@ -1,14 +1,18 @@
1
1
  <picture>
2
- <source media="(prefers-color-scheme: dark)" srcset="./static/mcp-use-dark.png">
3
- <source media="(prefers-color-scheme: light)" srcset="./static/mcp-use.png">
4
- <img alt="Shows a black MCP-Use Logo in light color mode and a white one in dark color mode." src="./static/mcp-use.png" width="full">
2
+ <img alt="" src="./static/mcpusegrass.png" width="full">
5
3
  </picture>
6
4
 
7
- <h1 align="center">Use MCPs directly from python 🤖</h1>
5
+ <h1 align="center">Open Source MCP CLient Library </h1>
8
6
 
7
+ [![](https://img.shields.io/pypi/dd/mcp_use.svg)](https://pypi.org/project/mcp_use/)
8
+ [![PyPI Downloads](https://img.shields.io/pypi/dm/mcp_use.svg)](https://pypi.org/project/mcp_use/)
9
+ [![PyPI Version](https://img.shields.io/pypi/v/mcp_use.svg)](https://pypi.org/project/mcp_use/)
10
+ [![Python Versions](https://img.shields.io/pypi/pyversions/mcp_use.svg)](https://pypi.org/project/mcp_use/)
11
+ [![License](https://img.shields.io/github/license/pietrozullo/mcp-use)](https://github.com/pietrozullo/mcp-use/blob/main/LICENSE)
12
+ [![Code style: Ruff](https://img.shields.io/badge/code%20style-ruff-000000.svg)](https://github.com/astral-sh/ruff)
9
13
  [![GitHub stars](https://img.shields.io/github/stars/pietrozullo/mcp-use?style=social)](https://github.com/pietrozullo/mcp-use/stargazers)
10
14
 
11
- 🌐 MCP-Use is the easiest way to connect any LLM to MCP tools through a unified interface without using closed source or application clients.
15
+ 🌐 MCP-Use is the open source way to connect any LLM to MCP tools and build custom agents that have tool access, without using closed source or application clients.
12
16
 
13
17
  💡 Let developers easily connect any LLM to tools like web browsing, file operations, and more.
14
18
 
@@ -17,14 +21,14 @@
17
21
  With pip:
18
22
 
19
23
  ```bash
20
- pip install mcp_use
24
+ pip install mcp-use
21
25
  ```
22
26
 
23
27
  Or install from source:
24
28
 
25
29
  ```bash
26
- git clone https://github.com/pietrozullo/mcp_use.git
27
- cd mcp_use
30
+ git clone https://github.com/pietrozullo/mcp-use.git
31
+ cd mcp-use
28
32
  pip install -e .
29
33
  ```
30
34
 
@@ -242,51 +246,12 @@ if __name__ == "__main__":
242
246
  asyncio.run(main())
243
247
  ```
244
248
 
245
- # MCPClient for Managing Multiple Servers
246
-
247
- The `MCPClient` class provides a higher-level abstraction for managing multiple MCP servers from a single client:
248
-
249
- ```python
250
- import asyncio
251
- from langchain_anthropic import ChatAnthropic
252
- from mcp_use import MCPAgent, MCPClient
253
-
254
- async def main():
255
- # Create a client from a config file
256
- client = MCPClient.from_config_file("mcp-config.json")
257
-
258
- # Or initialize with a config file path
259
- # client = MCPClient("mcp-config.json")
260
-
261
- # Or programmatically add servers
262
- client.add_server(
263
- "local-ws",
264
- {
265
- "command": "npx",
266
- "args": ["@playwright/mcp@latest", "headless"]
267
- }
268
- )
269
-
270
- # Create an LLM
271
- llm = ChatAnthropic(model="claude-3-5-sonnet-20240620")
272
-
273
- # Create an agent using the client
274
- agent = MCPAgent(
275
- llm=llm,
276
- client=client,
277
- server_name="playwright", # Optional, uses first server if not specified
278
- max_steps=30
279
- )
280
-
281
- # Run a query
282
- result = await agent.run("Your query here")
283
-
284
- # Close all sessions
285
- await client.close_all_sessions()
286
-
287
- if __name__ == "__main__":
288
- asyncio.run(main())
289
- ```
249
+ ## Roadmap
250
+ <ul>
251
+ <li>[ ] Multiple Servers at once </li>
252
+ <li>[ ] Test remote connectors (http, ws)</li>
253
+ <li>[ ] ... </li>
254
+ </ul>
290
255
 
291
256
  ## Contributing
292
257
 
@@ -303,10 +268,10 @@ We love contributions! Feel free to open issues for bugs or feature requests.
303
268
  If you use MCP-Use in your research or project, please cite:
304
269
 
305
270
  ```bibtex
306
- @software{mcp_use2024,
271
+ @software{mcp_use2025,
307
272
  author = {Zullo, Pietro},
308
273
  title = {MCP-Use: MCP Library for Python},
309
- year = {2024},
274
+ year = {2025},
310
275
  publisher = {GitHub},
311
276
  url = {https://github.com/pietrozullo/mcp-use}
312
277
  }
@@ -2,7 +2,7 @@
2
2
  "mcpServers": {
3
3
  "airbnb": {
4
4
  "command": "npx",
5
- "args": ["-y", "@openbnb/mcp-server-airbnb"]
5
+ "args": ["-y", "@openbnb/mcp-server-airbnb", "--ignore-robots-txt"]
6
6
  }
7
7
  }
8
8
  }
@@ -3,6 +3,8 @@ Example demonstrating how to use mcp_use with Airbnb.
3
3
 
4
4
  This example shows how to connect an LLM to Airbnb through MCP tools
5
5
  to perform tasks like searching for accommodations.
6
+
7
+ Special Thanks to https://github.com/openbnb-org/mcp-server-airbnb for the server.
6
8
  """
7
9
 
8
10
  import asyncio
@@ -4,10 +4,12 @@ Blender MCP example for mcp_use.
4
4
  This example demonstrates how to use the mcp_use library with MCPClient
5
5
  to connect an LLM to Blender through MCP tools via WebSocket.
6
6
  The example assumes you have installed the Blender MCP addon from:
7
- https://github.com/gd3kr/BlenderGPT/tree/main/mcp_addon
7
+ https://github.com/ahujasid/blender-mcp
8
8
 
9
9
  Make sure the addon is enabled in Blender preferences and the WebSocket
10
10
  server is running before executing this script.
11
+
12
+ Special thanks to https://github.com/ahujasid/blender-mcp for the server.
11
13
  """
12
14
 
13
15
  import asyncio
@@ -3,6 +3,8 @@ Basic usage example for mcp_use.
3
3
 
4
4
  This example demonstrates how to use the mcp_use library with MCPClient
5
5
  to connect any LLM to MCP tools through a unified interface.
6
+
7
+ Special thanks to https://github.com/microsoft/playwright-mcp for the server.
6
8
  """
7
9
 
8
10
  import asyncio
@@ -3,6 +3,8 @@ Simple chat example using MCPAgent with built-in conversation memory.
3
3
 
4
4
  This example demonstrates how to use the MCPAgent with its built-in
5
5
  conversation history capabilities for better contextual interactions.
6
+
7
+ Special thanks to https://github.com/microsoft/playwright-mcp for the server.
6
8
  """
7
9
 
8
10
  import asyncio
@@ -0,0 +1,58 @@
1
+ """
2
+ Basic usage example for mcp_use.
3
+
4
+ This example demonstrates how to use the mcp_use library with MCPClient
5
+ to connect any LLM to MCP tools through a unified interface.
6
+
7
+ Special Thanks to https://github.com/modelcontextprotocol/servers/tree/main/src/filesystem
8
+ for the server.
9
+ """
10
+
11
+ import asyncio
12
+
13
+ from dotenv import load_dotenv
14
+ from langchain_openai import ChatOpenAI
15
+
16
+ from mcp_use import MCPAgent, MCPClient
17
+
18
+ config = {
19
+ "mcpServers": {
20
+ "filesystem": {
21
+ "command": "npx",
22
+ "args": [
23
+ "-y",
24
+ "@modelcontextprotocol/server-filesystem",
25
+ "/home/pietro/projects/mcp-use/",
26
+ ],
27
+ }
28
+ }
29
+ }
30
+
31
+
32
+ async def main():
33
+ """Run the example using a configuration file."""
34
+ # Load environment variables
35
+ load_dotenv()
36
+
37
+ # Create MCPClient from config file
38
+ client = MCPClient.from_dict(config)
39
+ # Create LLM
40
+ llm = ChatOpenAI(model="gpt-4o")
41
+ # llm = init_chat_model(model="llama-3.1-8b-instant", model_provider="groq")
42
+ # llm = ChatAnthropic(model="claude-3-")
43
+ # llm = ChatGroq(model="llama3-8b-8192")
44
+
45
+ # Create agent with the client
46
+ agent = MCPAgent(llm=llm, client=client, max_steps=30)
47
+
48
+ # Run the query
49
+ result = await agent.run(
50
+ "Hello can you give me a list of files and directories in the current directory",
51
+ max_steps=30,
52
+ )
53
+ print(f"\nResult: {result}")
54
+
55
+
56
+ if __name__ == "__main__":
57
+ # Run the appropriate example
58
+ asyncio.run(main())
@@ -5,14 +5,17 @@ This library provides a unified interface for connecting different LLMs
5
5
  to MCP tools through existing LangChain adapters.
6
6
  """
7
7
 
8
+ from importlib.metadata import version
9
+
8
10
  from .agents.mcpagent import MCPAgent
9
11
  from .client import MCPClient
10
- from .config import create_session_from_config, load_config_file
12
+ from .config import load_config_file
11
13
  from .connectors import BaseConnector, HttpConnector, StdioConnector, WebSocketConnector
12
14
  from .logging import logger
13
15
  from .session import MCPSession
14
16
 
15
- __version__ = "0.0.3"
17
+ __version__ = version("mcp-use")
18
+
16
19
  __all__ = [
17
20
  "MCPAgent",
18
21
  "MCPClient",
@@ -124,7 +124,9 @@ class MCPAgent:
124
124
  # Generate tool descriptions
125
125
  tool_descriptions = []
126
126
  for tool in tools:
127
- description = f"- {tool.name}: {tool.description}"
127
+ # Escape curly braces in the description by doubling them
128
+ # (sometimes e.g. blender mcp they are used in the description)
129
+ description = f"- {tool.name}: {tool.description.replace('{', '{{').replace('}', '}}')}"
128
130
  tool_descriptions.append(description)
129
131
 
130
132
  # Format the system prompt template with tool descriptions
@@ -0,0 +1,22 @@
1
+ DEFAULT_SYSTEM_PROMPT_TEMPLATE = """You are an assistant with access to these tools:
2
+
3
+ {tool_descriptions}
4
+
5
+ Proactively use these tools to:
6
+ - Retrieve and analyze information relevant to user requests
7
+ - Process and transform data in various formats
8
+ - Perform computations and generate insights
9
+ - Execute multi-step workflows by combining tools as needed
10
+ - Interact with external systems when authorized
11
+
12
+ When appropriate, use available tools rather than relying on your built-in knowledge alone.
13
+ Your tools enable you to perform tasks that would otherwise be beyond your capabilities.
14
+
15
+ For optimal assistance:
16
+ 1. Identify when a tool can help address the user's request
17
+ 2. Select the most appropriate tool(s) for the task
18
+ 3. Apply tools in the correct sequence when multiple tools are needed
19
+ 4. Clearly communicate your process and findings
20
+
21
+ Remember that you have real capabilities through your tools - use them confidently when needed.
22
+ """
@@ -8,7 +8,6 @@ import json
8
8
  from typing import Any
9
9
 
10
10
  from .connectors import BaseConnector, HttpConnector, StdioConnector, WebSocketConnector
11
- from .session import MCPSession
12
11
 
13
12
 
14
13
  def load_config_file(filepath: str) -> dict[str, Any]:
@@ -58,36 +57,3 @@ def create_connector_from_config(server_config: dict[str, Any]) -> BaseConnector
58
57
  )
59
58
 
60
59
  raise ValueError("Cannot determine connector type from config")
61
-
62
-
63
- def create_session_from_config(
64
- filepath: str,
65
- server_name: str | None = None,
66
- ) -> MCPSession:
67
- """Create an MCPSession from a configuration file.
68
-
69
- Args:
70
- filepath: Path to the configuration file
71
- server_name: Name of the server to use from config, uses first if None
72
-
73
- Returns:
74
- Configured MCPSession instance
75
- """
76
- config = load_config_file(filepath)
77
-
78
- # Get server config
79
- servers = config.get("mcpServers", {})
80
- if not servers:
81
- raise ValueError("No MCP servers defined in config")
82
-
83
- # If server_name not specified, use the first one
84
- if not server_name:
85
- server_name = next(iter(servers.keys()))
86
-
87
- if server_name not in servers:
88
- raise ValueError(f"Server '{server_name}' not found in config")
89
-
90
- server_config = servers[server_name]
91
- connector = create_connector_from_config(server_config)
92
-
93
- return MCPSession(connector)
@@ -1,6 +1,6 @@
1
1
  [project]
2
- name = "mcp_use"
3
- version = "0.0.4"
2
+ name = "mcp-use"
3
+ version = "0.0.6"
4
4
  description = "MCP Library for LLMs"
5
5
  authors = [
6
6
  {name = "Pietro Zullo", email = "pietro.zullo@gmail.com"}
Binary file
@@ -1,31 +0,0 @@
1
- name: Publish to PyPI
2
-
3
- on:
4
- release:
5
- types: [created]
6
-
7
- # Required for PyPI trusted publishing
8
- permissions:
9
- id-token: write
10
- contents: read
11
-
12
- jobs:
13
- deploy:
14
- runs-on: ubuntu-latest
15
- steps:
16
- - uses: actions/checkout@v3
17
- - name: Set up Python
18
- uses: actions/setup-python@v4
19
- with:
20
- python-version: "3.11"
21
- - name: Install dependencies
22
- run: |
23
- python -m pip install --upgrade pip
24
- pip install build twine wheel
25
- - name: Build package
26
- run: |
27
- python -m build
28
- - name: Publish to PyPI
29
- uses: pypa/gh-action-pypi-publish@release/v1
30
- with:
31
- password: ${{ secrets.PYPI_API_TOKEN }}
@@ -1,11 +0,0 @@
1
- DEFAULT_SYSTEM_PROMPT_TEMPLATE = """You are an assistant with access to these tools:
2
-
3
- {tool_descriptions}
4
-
5
- Proactively use these tools to:
6
- - Find real-time information (weather, news, prices)
7
- - Perform web searches and extract relevant data
8
- - Execute multi-step tasks by combining tools
9
-
10
- You CAN access current information using your tools. Never claim you lack access to real-time data.
11
- """
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes