mcp-use 0.0.3__tar.gz → 0.0.5__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mcp-use might be problematic. Click here for more details.

Files changed (48) hide show
  1. mcp_use-0.0.5/.github/workflows/publish.yml +78 -0
  2. mcp_use-0.0.5/.github/workflows/tests.yml +31 -0
  3. mcp_use-0.0.5/.gitignore +124 -0
  4. mcp_use-0.0.5/.pre-commit-config.yaml +24 -0
  5. {mcp_use-0.0.3 → mcp_use-0.0.5}/PKG-INFO +36 -44
  6. {mcp_use-0.0.3 → mcp_use-0.0.5}/README.md +13 -9
  7. mcp_use-0.0.5/examples/airbnb_mcp.json +8 -0
  8. mcp_use-0.0.5/examples/airbnb_use.py +51 -0
  9. mcp_use-0.0.5/examples/blender_use.py +53 -0
  10. mcp_use-0.0.5/examples/browser_mcp.json +11 -0
  11. mcp_use-0.0.5/examples/browser_use.py +46 -0
  12. mcp_use-0.0.5/examples/chat_example.py +80 -0
  13. mcp_use-0.0.5/examples/filesystem_use.py +58 -0
  14. {mcp_use-0.0.3 → mcp_use-0.0.5}/mcp_use/__init__.py +5 -2
  15. {mcp_use-0.0.3 → mcp_use-0.0.5}/mcp_use/agents/langchain_agent.py +46 -6
  16. mcp_use-0.0.5/mcp_use/agents/mcpagent.py +310 -0
  17. mcp_use-0.0.5/mcp_use/agents/prompts/default.py +11 -0
  18. {mcp_use-0.0.3 → mcp_use-0.0.5}/mcp_use/client.py +44 -28
  19. {mcp_use-0.0.3 → mcp_use-0.0.5}/mcp_use/connectors/http.py +91 -7
  20. mcp_use-0.0.5/mcp_use/connectors/stdio.py +188 -0
  21. mcp_use-0.0.5/mcp_use/connectors/websocket.py +245 -0
  22. mcp_use-0.0.5/mcp_use/task_managers/__init__.py +18 -0
  23. mcp_use-0.0.5/mcp_use/task_managers/base.py +151 -0
  24. mcp_use-0.0.5/mcp_use/task_managers/http.py +62 -0
  25. mcp_use-0.0.5/mcp_use/task_managers/stdio.py +73 -0
  26. mcp_use-0.0.5/mcp_use/task_managers/websocket.py +63 -0
  27. mcp_use-0.0.5/pyproject.toml +83 -0
  28. mcp_use-0.0.5/static/ghibli.png +0 -0
  29. mcp_use-0.0.5/tests/unit/test_placeholder.py +16 -0
  30. mcp_use-0.0.3/mcp_use/agents/mcpagent.py +0 -149
  31. mcp_use-0.0.3/mcp_use/connectors/stdio.py +0 -124
  32. mcp_use-0.0.3/mcp_use/connectors/websocket.py +0 -142
  33. mcp_use-0.0.3/mcp_use.egg-info/PKG-INFO +0 -368
  34. mcp_use-0.0.3/mcp_use.egg-info/SOURCES.txt +0 -23
  35. mcp_use-0.0.3/mcp_use.egg-info/dependency_links.txt +0 -1
  36. mcp_use-0.0.3/mcp_use.egg-info/requires.txt +0 -24
  37. mcp_use-0.0.3/mcp_use.egg-info/top_level.txt +0 -1
  38. mcp_use-0.0.3/pyproject.toml +0 -31
  39. mcp_use-0.0.3/setup.cfg +0 -4
  40. mcp_use-0.0.3/setup.py +0 -60
  41. {mcp_use-0.0.3 → mcp_use-0.0.5}/LICENSE +0 -0
  42. {mcp_use-0.0.3 → mcp_use-0.0.5}/mcp_use/agents/__init__.py +0 -0
  43. {mcp_use-0.0.3 → mcp_use-0.0.5}/mcp_use/agents/base.py +0 -0
  44. {mcp_use-0.0.3 → mcp_use-0.0.5}/mcp_use/config.py +0 -0
  45. {mcp_use-0.0.3 → mcp_use-0.0.5}/mcp_use/connectors/__init__.py +0 -0
  46. {mcp_use-0.0.3 → mcp_use-0.0.5}/mcp_use/connectors/base.py +0 -0
  47. {mcp_use-0.0.3 → mcp_use-0.0.5}/mcp_use/logging.py +0 -0
  48. {mcp_use-0.0.3 → mcp_use-0.0.5}/mcp_use/session.py +0 -0
@@ -0,0 +1,78 @@
1
+ name: Check Version Bump and Publish to PyPI
2
+
3
+ on:
4
+ push:
5
+ branches:
6
+ - main
7
+ paths:
8
+ - 'pyproject.toml'
9
+
10
+ # Required for PyPI trusted publishing
11
+ permissions:
12
+ id-token: write
13
+ contents: write # Required for creating tags and releases
14
+
15
+ jobs:
16
+ check-version-and-publish:
17
+ runs-on: ubuntu-latest
18
+ steps:
19
+ - uses: actions/checkout@v3
20
+ with:
21
+ fetch-depth: 0 # This fetches all history for comparing versions
22
+
23
+ - name: Set up Python
24
+ uses: actions/setup-python@v4
25
+ with:
26
+ python-version: "3.11"
27
+
28
+ - name: Install dependencies
29
+ run: |
30
+ python -m pip install --upgrade pip
31
+ pip install build twine wheel tomli
32
+
33
+ - name: Check for version bump
34
+ id: check-version
35
+ run: |
36
+ # Extract current version directly from pyproject.toml
37
+ # This is more reliable than using importlib.metadata
38
+ CURRENT_VERSION=$(python -c "
39
+ import tomli
40
+ with open('pyproject.toml', 'rb') as f:
41
+ data = tomli.load(f)
42
+ print(data['project']['version'])
43
+ ")
44
+
45
+ echo "Current version: $CURRENT_VERSION"
46
+
47
+ # Check if this version already has a tag
48
+ if git rev-parse "v$CURRENT_VERSION" >/dev/null 2>&1; then
49
+ echo "Version $CURRENT_VERSION already has a tag. Skipping release."
50
+ echo "is_new_version=false" >> $GITHUB_OUTPUT
51
+ else
52
+ echo "New version detected: $CURRENT_VERSION"
53
+ echo "is_new_version=true" >> $GITHUB_OUTPUT
54
+ echo "new_version=$CURRENT_VERSION" >> $GITHUB_OUTPUT
55
+ fi
56
+
57
+ - name: Build package
58
+ if: steps.check-version.outputs.is_new_version == 'true'
59
+ run: |
60
+ python -m build
61
+
62
+ - name: Create Release
63
+ if: steps.check-version.outputs.is_new_version == 'true'
64
+ id: create_release
65
+ uses: actions/create-release@v1
66
+ env:
67
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
68
+ with:
69
+ tag_name: v${{ steps.check-version.outputs.new_version }}
70
+ release_name: Release v${{ steps.check-version.outputs.new_version }}
71
+ draft: false
72
+ prerelease: false
73
+
74
+ - name: Publish to PyPI
75
+ if: steps.check-version.outputs.is_new_version == 'true'
76
+ uses: pypa/gh-action-pypi-publish@release/v1
77
+ with:
78
+ password: ${{ secrets.PYPI_API_TOKEN }}
@@ -0,0 +1,31 @@
1
+ name: Python Tests
2
+
3
+ on:
4
+ push:
5
+ branches: [main]
6
+ pull_request:
7
+ branches: [main]
8
+
9
+ jobs:
10
+ test:
11
+ runs-on: ubuntu-latest
12
+ strategy:
13
+ matrix:
14
+ python-version: ["3.11", "3.12"]
15
+
16
+ steps:
17
+ - uses: actions/checkout@v3
18
+ - name: Set up Python ${{ matrix.python-version }}
19
+ uses: actions/setup-python@v4
20
+ with:
21
+ python-version: ${{ matrix.python-version }}
22
+ - name: Install dependencies
23
+ run: |
24
+ python -m pip install --upgrade pip
25
+ pip install .[dev,anthropic,openai]
26
+ - name: Lint with ruff
27
+ run: |
28
+ ruff check .
29
+ - name: Test with pytest
30
+ run: |
31
+ pytest
@@ -0,0 +1,124 @@
1
+
2
+ # Byte-compiled / optimized / DLL files
3
+ __pycache__/
4
+ *.py[cod]
5
+ *$py.class
6
+
7
+ # C extensions
8
+ *.so
9
+
10
+ # Distribution / packaging
11
+ .Python
12
+ build/
13
+ develop-eggs/
14
+ dist/
15
+ downloads/
16
+ eggs/
17
+ .eggs/
18
+ lib/
19
+ lib64/
20
+ parts/
21
+ sdist/
22
+ var/
23
+ wheels/
24
+ *.egg-info/
25
+ .installed.cfg
26
+ *.egg
27
+
28
+ # PyInstaller
29
+ *.manifest
30
+ *.spec
31
+
32
+ # Installer logs
33
+ pip-log.txt
34
+ pip-delete-this-directory.txt
35
+
36
+ # Unit test / coverage reports
37
+ htmlcov/
38
+ .tox/
39
+ .nox/
40
+ .coverage
41
+ .coverage.*
42
+ .cache
43
+ nosetests.xml
44
+ coverage.xml
45
+ *.cover
46
+ .hypothesis/
47
+ .pytest_cache/
48
+
49
+ # Translations
50
+ *.mo
51
+ *.pot
52
+
53
+ # Django stuff:
54
+ *.log
55
+ local_settings.py
56
+ db.sqlite3
57
+ db.sqlite3-journal
58
+
59
+ # Flask stuff:
60
+ instance/
61
+ .webassets-cache
62
+
63
+ # Scrapy stuff:
64
+ .scrapy
65
+
66
+ # Sphinx documentation
67
+ docs/_build/
68
+
69
+ # PyBuilder
70
+ target/
71
+
72
+ # Jupyter Notebook
73
+ .ipynb_checkpoints
74
+
75
+ # IPython
76
+ profile_default/
77
+ ipython_config.py
78
+
79
+ # pyenv
80
+ .python-version
81
+
82
+ # pipenv
83
+ Pipfile.lock
84
+
85
+ # poetry
86
+ poetry.lock
87
+
88
+ # Environment variables
89
+ .env
90
+ .venv
91
+ env/
92
+ venv/
93
+ ENV/
94
+ env.bak/
95
+ venv.bak/
96
+
97
+ # Spyder project settings
98
+ .spyderproject
99
+ .spyproject
100
+
101
+ # Rope project settings
102
+ .ropeproject
103
+
104
+ # mkdocs documentation
105
+ /site
106
+
107
+ # mypy
108
+ .mypy_cache/
109
+ .dmypy.json
110
+ dmypy.json
111
+
112
+ # Pyre type checker
113
+ .pyre/
114
+
115
+ # VS Code
116
+ .vscode/
117
+ *.code-workspace
118
+
119
+ # PyCharm
120
+ .idea/
121
+ *.iml
122
+
123
+ # macOS
124
+ .DS_Store
@@ -0,0 +1,24 @@
1
+ repos:
2
+ - repo: https://github.com/astral-sh/ruff-pre-commit
3
+ # Ruff version.
4
+ rev: v0.3.2
5
+ hooks:
6
+ - id: ruff
7
+ args: [--fix, --exit-non-zero-on-fix, --config=pyproject.toml]
8
+ types: [python]
9
+ - id: ruff-format
10
+ args: [--config=pyproject.toml]
11
+ types: [python]
12
+
13
+ - repo: https://github.com/pre-commit/pre-commit-hooks
14
+ rev: v4.5.0
15
+ hooks:
16
+ - id: trailing-whitespace
17
+ - id: end-of-file-fixer
18
+ - id: check-yaml
19
+ - id: check-added-large-files
20
+ - id: debug-statements
21
+
22
+ # Define configuration for the Python checks
23
+ default_language_version:
24
+ python: python3.11
@@ -1,10 +1,10 @@
1
1
  Metadata-Version: 2.4
2
- Name: mcp_use
3
- Version: 0.0.3
4
- Summary: Model-Agnostic MCP Library for LLMs
5
- Home-page: https://github.com/pietrozullo/mcp_use
6
- Author: Pietro Zullo
7
- Author-email: pietro.zullo@gmail.com
2
+ Name: mcp-use
3
+ Version: 0.0.5
4
+ Summary: MCP Library for LLMs
5
+ Author-email: Pietro Zullo <pietro.zullo@gmail.com>
6
+ License: MIT
7
+ License-File: LICENSE
8
8
  Classifier: Development Status :: 3 - Alpha
9
9
  Classifier: Intended Audience :: Developers
10
10
  Classifier: License :: OSI Approved :: MIT License
@@ -14,52 +14,44 @@ Classifier: Programming Language :: Python :: 3.11
14
14
  Classifier: Programming Language :: Python :: 3.12
15
15
  Classifier: Topic :: Software Development :: Libraries :: Python Modules
16
16
  Requires-Python: >=3.11
17
- Description-Content-Type: text/markdown
18
- License-File: LICENSE
19
- Requires-Dist: mcp
20
- Requires-Dist: langchain>=0.1.0
21
- Requires-Dist: langchain-community>=0.0.10
22
- Requires-Dist: websockets>=12.0
23
17
  Requires-Dist: aiohttp>=3.9.0
24
- Requires-Dist: pydantic>=2.0.0
25
- Requires-Dist: typing-extensions>=4.8.0
26
18
  Requires-Dist: jsonschema-pydantic>=0.1.0
19
+ Requires-Dist: langchain-community>=0.0.10
20
+ Requires-Dist: langchain>=0.1.0
21
+ Requires-Dist: mcp
22
+ Requires-Dist: pydantic>=2.0.0
27
23
  Requires-Dist: python-dotenv>=1.0.0
28
- Provides-Extra: dev
29
- Requires-Dist: pytest>=7.4.0; extra == "dev"
30
- Requires-Dist: pytest-asyncio>=0.21.0; extra == "dev"
31
- Requires-Dist: pytest-cov>=4.1.0; extra == "dev"
32
- Requires-Dist: black>=23.9.0; extra == "dev"
33
- Requires-Dist: isort>=5.12.0; extra == "dev"
34
- Requires-Dist: mypy>=1.5.0; extra == "dev"
35
- Requires-Dist: ruff>=0.1.0; extra == "dev"
24
+ Requires-Dist: typing-extensions>=4.8.0
25
+ Requires-Dist: websockets>=12.0
36
26
  Provides-Extra: anthropic
37
- Requires-Dist: anthropic>=0.15.0; extra == "anthropic"
27
+ Requires-Dist: anthropic>=0.15.0; extra == 'anthropic'
28
+ Provides-Extra: dev
29
+ Requires-Dist: black>=23.9.0; extra == 'dev'
30
+ Requires-Dist: isort>=5.12.0; extra == 'dev'
31
+ Requires-Dist: mypy>=1.5.0; extra == 'dev'
32
+ Requires-Dist: pytest-asyncio>=0.21.0; extra == 'dev'
33
+ Requires-Dist: pytest-cov>=4.1.0; extra == 'dev'
34
+ Requires-Dist: pytest>=7.4.0; extra == 'dev'
35
+ Requires-Dist: ruff>=0.1.0; extra == 'dev'
38
36
  Provides-Extra: openai
39
- Requires-Dist: openai>=1.10.0; extra == "openai"
40
- Dynamic: author
41
- Dynamic: author-email
42
- Dynamic: classifier
43
- Dynamic: description
44
- Dynamic: description-content-type
45
- Dynamic: home-page
46
- Dynamic: license-file
47
- Dynamic: provides-extra
48
- Dynamic: requires-dist
49
- Dynamic: requires-python
50
- Dynamic: summary
37
+ Requires-Dist: openai>=1.10.0; extra == 'openai'
38
+ Description-Content-Type: text/markdown
51
39
 
52
40
  <picture>
53
- <source media="(prefers-color-scheme: dark)" srcset="./static/mcp-use-dark.png">
54
- <source media="(prefers-color-scheme: light)" srcset="./static/mcp-use.png">
55
- <img alt="Shows a black MCP-Use Logo in light color mode and a white one in dark color mode." src="./static/mcp-use.png" width="full">
41
+ <img alt="" src="./static/ghibli.png" width="full">
56
42
  </picture>
57
43
 
58
- <h1 align="center">Model-Agnostic MCP Library for LLMs 🤖</h1>
44
+ <h1 align="center">Open Source MCP CLient Library </h1>
59
45
 
46
+ [![](https://img.shields.io/pypi/dd/mcp_use.svg)](https://pypi.org/project/mcp_use/)
47
+ [![PyPI Downloads](https://img.shields.io/pypi/dm/mcp_use.svg)](https://pypi.org/project/mcp_use/)
48
+ [![PyPI Version](https://img.shields.io/pypi/v/mcp_use.svg)](https://pypi.org/project/mcp_use/)
49
+ [![Python Versions](https://img.shields.io/pypi/pyversions/mcp_use.svg)](https://pypi.org/project/mcp_use/)
50
+ [![License](https://img.shields.io/github/license/pietrozullo/mcp-use)](https://github.com/pietrozullo/mcp-use/blob/main/LICENSE)
51
+ [![Code style: Ruff](https://img.shields.io/badge/code%20style-ruff-000000.svg)](https://github.com/astral-sh/ruff)
60
52
  [![GitHub stars](https://img.shields.io/github/stars/pietrozullo/mcp-use?style=social)](https://github.com/pietrozullo/mcp-use/stargazers)
61
53
 
62
- 🌐 MCP-Use is the easiest way to connect any LLM to MCP tools through a unified interface without using closed source or application clients.
54
+ 🌐 MCP-Use is the open source way to connect any LLM to MCP tools and build custom agents that have tool access, without using closed source or application clients.
63
55
 
64
56
  💡 Let developers easily connect any LLM to tools like web browsing, file operations, and more.
65
57
 
@@ -68,14 +60,14 @@ Dynamic: summary
68
60
  With pip:
69
61
 
70
62
  ```bash
71
- pip install mcp_use
63
+ pip install mcp-use
72
64
  ```
73
65
 
74
66
  Or install from source:
75
67
 
76
68
  ```bash
77
- git clone https://github.com/pietrozullo/mcp_use.git
78
- cd mcp_use
69
+ git clone https://github.com/pietrozullo/mcp-use.git
70
+ cd mcp-use
79
71
  pip install -e .
80
72
  ```
81
73
 
@@ -356,7 +348,7 @@ If you use MCP-Use in your research or project, please cite:
356
348
  ```bibtex
357
349
  @software{mcp_use2024,
358
350
  author = {Zullo, Pietro},
359
- title = {MCP-Use: Model-Agnostic MCP Library for LLMs},
351
+ title = {MCP-Use: MCP Library for Python},
360
352
  year = {2024},
361
353
  publisher = {GitHub},
362
354
  url = {https://github.com/pietrozullo/mcp-use}
@@ -1,14 +1,18 @@
1
1
  <picture>
2
- <source media="(prefers-color-scheme: dark)" srcset="./static/mcp-use-dark.png">
3
- <source media="(prefers-color-scheme: light)" srcset="./static/mcp-use.png">
4
- <img alt="Shows a black MCP-Use Logo in light color mode and a white one in dark color mode." src="./static/mcp-use.png" width="full">
2
+ <img alt="" src="./static/ghibli.png" width="full">
5
3
  </picture>
6
4
 
7
- <h1 align="center">Model-Agnostic MCP Library for LLMs 🤖</h1>
5
+ <h1 align="center">Open Source MCP CLient Library </h1>
8
6
 
7
+ [![](https://img.shields.io/pypi/dd/mcp_use.svg)](https://pypi.org/project/mcp_use/)
8
+ [![PyPI Downloads](https://img.shields.io/pypi/dm/mcp_use.svg)](https://pypi.org/project/mcp_use/)
9
+ [![PyPI Version](https://img.shields.io/pypi/v/mcp_use.svg)](https://pypi.org/project/mcp_use/)
10
+ [![Python Versions](https://img.shields.io/pypi/pyversions/mcp_use.svg)](https://pypi.org/project/mcp_use/)
11
+ [![License](https://img.shields.io/github/license/pietrozullo/mcp-use)](https://github.com/pietrozullo/mcp-use/blob/main/LICENSE)
12
+ [![Code style: Ruff](https://img.shields.io/badge/code%20style-ruff-000000.svg)](https://github.com/astral-sh/ruff)
9
13
  [![GitHub stars](https://img.shields.io/github/stars/pietrozullo/mcp-use?style=social)](https://github.com/pietrozullo/mcp-use/stargazers)
10
14
 
11
- 🌐 MCP-Use is the easiest way to connect any LLM to MCP tools through a unified interface without using closed source or application clients.
15
+ 🌐 MCP-Use is the open source way to connect any LLM to MCP tools and build custom agents that have tool access, without using closed source or application clients.
12
16
 
13
17
  💡 Let developers easily connect any LLM to tools like web browsing, file operations, and more.
14
18
 
@@ -17,14 +21,14 @@
17
21
  With pip:
18
22
 
19
23
  ```bash
20
- pip install mcp_use
24
+ pip install mcp-use
21
25
  ```
22
26
 
23
27
  Or install from source:
24
28
 
25
29
  ```bash
26
- git clone https://github.com/pietrozullo/mcp_use.git
27
- cd mcp_use
30
+ git clone https://github.com/pietrozullo/mcp-use.git
31
+ cd mcp-use
28
32
  pip install -e .
29
33
  ```
30
34
 
@@ -305,7 +309,7 @@ If you use MCP-Use in your research or project, please cite:
305
309
  ```bibtex
306
310
  @software{mcp_use2024,
307
311
  author = {Zullo, Pietro},
308
- title = {MCP-Use: Model-Agnostic MCP Library for LLMs},
312
+ title = {MCP-Use: MCP Library for Python},
309
313
  year = {2024},
310
314
  publisher = {GitHub},
311
315
  url = {https://github.com/pietrozullo/mcp-use}
@@ -0,0 +1,8 @@
1
+ {
2
+ "mcpServers": {
3
+ "airbnb": {
4
+ "command": "npx",
5
+ "args": ["-y", "@openbnb/mcp-server-airbnb", "--ignore-robots-txt"]
6
+ }
7
+ }
8
+ }
@@ -0,0 +1,51 @@
1
+ """
2
+ Example demonstrating how to use mcp_use with Airbnb.
3
+
4
+ This example shows how to connect an LLM to Airbnb through MCP tools
5
+ to perform tasks like searching for accommodations.
6
+
7
+ Special Thanks to https://github.com/openbnb-org/mcp-server-airbnb for the server.
8
+ """
9
+
10
+ import asyncio
11
+ import os
12
+
13
+ from dotenv import load_dotenv
14
+ from langchain_anthropic import ChatAnthropic
15
+
16
+ from mcp_use import MCPAgent, MCPClient
17
+
18
+
19
+ async def run_airbnb_example():
20
+ """Run an example using Airbnb MCP server."""
21
+ # Load environment variables
22
+ load_dotenv()
23
+
24
+ # Create MCPClient with Airbnb configuration
25
+ client = MCPClient.from_config_file(os.path.join(os.path.dirname(__file__), "airbnb_mcp.json"))
26
+ # Create LLM - you can choose between different models
27
+ llm = ChatAnthropic(model="claude-3-5-sonnet-20240620")
28
+ # Alternative models:
29
+ # llm = init_chat_model(model="llama-3.1-8b-instant", model_provider="groq")
30
+ # llm = ChatOpenAI(model="gpt-4o")
31
+
32
+ # Create agent with the client
33
+ agent = MCPAgent(llm=llm, client=client, max_steps=30)
34
+
35
+ try:
36
+ # Run a query to search for accommodations
37
+ result = await agent.run(
38
+ "Find me a nice place to stay in Barcelona for 2 adults "
39
+ "for a week in August. I prefer places with a pool and "
40
+ "good reviews. Show me the top 3 options.",
41
+ max_steps=30,
42
+ )
43
+ print(f"\nResult: {result}")
44
+ finally:
45
+ # Ensure we clean up resources properly
46
+ if client.sessions:
47
+ await client.close_all_sessions()
48
+
49
+
50
+ if __name__ == "__main__":
51
+ asyncio.run(run_airbnb_example())
@@ -0,0 +1,53 @@
1
+ """
2
+ Blender MCP example for mcp_use.
3
+
4
+ This example demonstrates how to use the mcp_use library with MCPClient
5
+ to connect an LLM to Blender through MCP tools via WebSocket.
6
+ The example assumes you have installed the Blender MCP addon from:
7
+ https://github.com/ahujasid/blender-mcp
8
+
9
+ Make sure the addon is enabled in Blender preferences and the WebSocket
10
+ server is running before executing this script.
11
+
12
+ Special thanks to https://github.com/ahujasid/blender-mcp for the server.
13
+ """
14
+
15
+ import asyncio
16
+
17
+ from dotenv import load_dotenv
18
+ from langchain_anthropic import ChatAnthropic
19
+
20
+ from mcp_use import MCPAgent, MCPClient
21
+
22
+
23
+ async def run_blender_example():
24
+ """Run the Blender MCP example."""
25
+ # Load environment variables
26
+ load_dotenv()
27
+
28
+ # Create MCPClient with Blender MCP configuration
29
+ config = {"mcpServers": {"blender": {"command": "uvx", "args": ["blender-mcp"]}}}
30
+ client = MCPClient.from_dict(config)
31
+
32
+ # Create LLM
33
+ llm = ChatAnthropic(model="claude-3-5-sonnet-20240620")
34
+
35
+ # Create agent with the client
36
+ agent = MCPAgent(llm=llm, client=client, max_steps=30)
37
+
38
+ try:
39
+ # Run the query
40
+ result = await agent.run(
41
+ "Create an inflatable cube with soft material and a plane as ground.",
42
+ max_steps=30,
43
+ )
44
+ print(f"\nResult: {result}")
45
+ finally:
46
+ # Ensure we clean up resources properly
47
+ if client.sessions:
48
+ await client.close_all_sessions()
49
+
50
+
51
+ if __name__ == "__main__":
52
+ # Run the Blender example
53
+ asyncio.run(run_blender_example())
@@ -0,0 +1,11 @@
1
+ {
2
+ "mcpServers": {
3
+ "playwright": {
4
+ "command": "npx",
5
+ "args": ["@playwright/mcp@latest"],
6
+ "env": {
7
+ "DISPLAY": ":1"
8
+ }
9
+ }
10
+ }
11
+ }
@@ -0,0 +1,46 @@
1
+ """
2
+ Basic usage example for mcp_use.
3
+
4
+ This example demonstrates how to use the mcp_use library with MCPClient
5
+ to connect any LLM to MCP tools through a unified interface.
6
+
7
+ Special thanks to https://github.com/microsoft/playwright-mcp for the server.
8
+ """
9
+
10
+ import asyncio
11
+ import os
12
+
13
+ from dotenv import load_dotenv
14
+ from langchain_openai import ChatOpenAI
15
+
16
+ from mcp_use import MCPAgent, MCPClient
17
+
18
+
19
+ async def main():
20
+ """Run the example using a configuration file."""
21
+ # Load environment variables
22
+ load_dotenv()
23
+
24
+ # Create MCPClient from config file
25
+ client = MCPClient.from_config_file(os.path.join(os.path.dirname(__file__), "browser_mcp.json"))
26
+
27
+ # Create LLM
28
+ llm = ChatOpenAI(model="gpt-4o")
29
+ # llm = init_chat_model(model="llama-3.1-8b-instant", model_provider="groq")
30
+ # llm = ChatAnthropic(model="claude-3-")
31
+ # llm = ChatGroq(model="llama3-8b-8192")
32
+
33
+ # Create agent with the client
34
+ agent = MCPAgent(llm=llm, client=client, max_steps=30)
35
+
36
+ # Run the query
37
+ result = await agent.run(
38
+ "Find the best restaurant in San Francisco USING GOOGLE SEARCH",
39
+ max_steps=30,
40
+ )
41
+ print(f"\nResult: {result}")
42
+
43
+
44
+ if __name__ == "__main__":
45
+ # Run the appropriate example
46
+ asyncio.run(main())
@@ -0,0 +1,80 @@
1
+ """
2
+ Simple chat example using MCPAgent with built-in conversation memory.
3
+
4
+ This example demonstrates how to use the MCPAgent with its built-in
5
+ conversation history capabilities for better contextual interactions.
6
+
7
+ Special thanks to https://github.com/microsoft/playwright-mcp for the server.
8
+ """
9
+
10
+ import asyncio
11
+
12
+ from dotenv import load_dotenv
13
+ from langchain_openai import ChatOpenAI
14
+
15
+ from mcp_use import MCPAgent, MCPClient
16
+
17
+
18
+ async def run_memory_chat():
19
+ """Run a chat using MCPAgent's built-in conversation memory."""
20
+ # Load environment variables for API keys
21
+ load_dotenv()
22
+
23
+ # Config file path - change this to your config file
24
+ config_file = "examples/browser_mcp.json"
25
+
26
+ print("Initializing chat...")
27
+
28
+ # Create MCP client and agent with memory enabled
29
+ client = MCPClient.from_config_file(config_file)
30
+ llm = ChatOpenAI(model="gpt-4o-mini")
31
+
32
+ # Create agent with memory_enabled=True
33
+ agent = MCPAgent(
34
+ llm=llm,
35
+ client=client,
36
+ max_steps=15,
37
+ memory_enabled=True, # Enable built-in conversation memory
38
+ )
39
+
40
+ print("\n===== Interactive MCP Chat =====")
41
+ print("Type 'exit' or 'quit' to end the conversation")
42
+ print("Type 'clear' to clear conversation history")
43
+ print("==================================\n")
44
+
45
+ try:
46
+ # Main chat loop
47
+ while True:
48
+ # Get user input
49
+ user_input = input("\nYou: ")
50
+
51
+ # Check for exit command
52
+ if user_input.lower() in ["exit", "quit"]:
53
+ print("Ending conversation...")
54
+ break
55
+
56
+ # Check for clear history command
57
+ if user_input.lower() == "clear":
58
+ agent.clear_conversation_history()
59
+ print("Conversation history cleared.")
60
+ continue
61
+
62
+ # Get response from agent
63
+ print("\nAssistant: ", end="", flush=True)
64
+
65
+ try:
66
+ # Run the agent with the user input (memory handling is automatic)
67
+ response = await agent.run(user_input)
68
+ print(response)
69
+
70
+ except Exception as e:
71
+ print(f"\nError: {e}")
72
+
73
+ finally:
74
+ # Clean up
75
+ if client and client.sessions:
76
+ await client.close_all_sessions()
77
+
78
+
79
+ if __name__ == "__main__":
80
+ asyncio.run(run_memory_chat())