mcp-chat 0.0.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mcp_chat-0.0.1/.env.template +8 -0
- mcp_chat-0.0.1/.gitignore +10 -0
- mcp_chat-0.0.1/.python-version +1 -0
- mcp_chat-0.0.1/LICENSE +21 -0
- mcp_chat-0.0.1/Makefile +56 -0
- mcp_chat-0.0.1/PKG-INFO +151 -0
- mcp_chat-0.0.1/README.md +130 -0
- mcp_chat-0.0.1/llm_mcp_config.json5 +119 -0
- mcp_chat-0.0.1/pyproject.toml +43 -0
- mcp_chat-0.0.1/src/mcp_chat/__init__.py +3 -0
- mcp_chat-0.0.1/src/mcp_chat/cli_chat.py +270 -0
- mcp_chat-0.0.1/src/mcp_chat/config_loader.py +77 -0
- mcp_chat-0.0.1/uv.lock +1677 -0
@@ -0,0 +1 @@
|
|
1
|
+
3.11
|
mcp_chat-0.0.1/LICENSE
ADDED
@@ -0,0 +1,21 @@
|
|
1
|
+
MIT License
|
2
|
+
|
3
|
+
Copyright (c) 2025 hideya kawahara
|
4
|
+
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
7
|
+
in the Software without restriction, including without limitation the rights
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
10
|
+
furnished to do so, subject to the following conditions:
|
11
|
+
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
13
|
+
copies or substantial portions of the Software.
|
14
|
+
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
21
|
+
SOFTWARE.
|
mcp_chat-0.0.1/Makefile
ADDED
@@ -0,0 +1,56 @@
|
|
1
|
+
# NOTES:
|
2
|
+
# - The command lines (recipe lines) must start with a TAB character.
|
3
|
+
# - Each command line runs in a separate shell without .ONESHELL:
|
4
|
+
.PHONY: install start start-v start-h build clean
|
5
|
+
.ONESHELL:
|
6
|
+
|
7
|
+
.venv:
|
8
|
+
uv venv
|
9
|
+
|
10
|
+
install: .venv
|
11
|
+
uv pip install .
|
12
|
+
|
13
|
+
start:
|
14
|
+
uv run src/mcp_chat/cli_chat.py
|
15
|
+
|
16
|
+
start-v:
|
17
|
+
uv run src/mcp_chat/cli_chat.py -v
|
18
|
+
|
19
|
+
start-h:
|
20
|
+
uv run src/mcp_chat/cli_chat.py -h
|
21
|
+
|
22
|
+
clean:
|
23
|
+
rm -rf dist
|
24
|
+
|
25
|
+
cleanall:
|
26
|
+
git clean -fdxn -e .env
|
27
|
+
@read -p 'OK?'
|
28
|
+
git clean -fdx -e .env
|
29
|
+
|
30
|
+
build: clean
|
31
|
+
uv build
|
32
|
+
@echo
|
33
|
+
uvx twine check dist/*
|
34
|
+
|
35
|
+
prep-publish: build
|
36
|
+
# set PYPI_API_KEY from .env
|
37
|
+
$(eval export $(shell grep '^PYPI_API_KEY=' .env ))
|
38
|
+
|
39
|
+
# check if PYPI_API_KEY is set
|
40
|
+
@if [ -z "$$PYPI_API_KEY" ]; then \
|
41
|
+
echo "Error: PYPI_API_KEY environment variable is not set"; \
|
42
|
+
exit 1; \
|
43
|
+
fi
|
44
|
+
|
45
|
+
publish: prep-publish
|
46
|
+
uvx twine upload \
|
47
|
+
--verbose \
|
48
|
+
--repository-url https://upload.pypi.org/legacy/ dist/* \
|
49
|
+
--password ${PYPI_API_KEY}
|
50
|
+
|
51
|
+
test-publish: prep-publish
|
52
|
+
tar tzf dist/*.tar.gz
|
53
|
+
@echo
|
54
|
+
unzip -l dist/*.whl
|
55
|
+
@echo
|
56
|
+
uvx twine check dist/*
|
mcp_chat-0.0.1/PKG-INFO
ADDED
@@ -0,0 +1,151 @@
|
|
1
|
+
Metadata-Version: 2.4
|
2
|
+
Name: mcp-chat
|
3
|
+
Version: 0.0.1
|
4
|
+
Summary: Simple CLI MCP Client to quickly test and explore MCP servers from the command line
|
5
|
+
Project-URL: Bug Tracker, https://github.com/hideya/mcp-client-langchain-py/issues
|
6
|
+
Project-URL: Source Code, https://github.com/hideya/mcp-client-langchain-py
|
7
|
+
License-File: LICENSE
|
8
|
+
Keywords: cli,client,explore,langchain,mcp,model-context-protocol,python,quick,simple,test,tools,try
|
9
|
+
Requires-Python: >=3.11
|
10
|
+
Requires-Dist: langchain-anthropic>=0.3.1
|
11
|
+
Requires-Dist: langchain-google-genai>=2.1.5
|
12
|
+
Requires-Dist: langchain-groq>=0.2.3
|
13
|
+
Requires-Dist: langchain-mcp-tools>=0.2.7
|
14
|
+
Requires-Dist: langchain-openai>=0.3.0
|
15
|
+
Requires-Dist: langchain>=0.3.26
|
16
|
+
Requires-Dist: langgraph>=0.5.0
|
17
|
+
Requires-Dist: pyjson5>=1.6.8
|
18
|
+
Requires-Dist: python-dotenv>=1.0.1
|
19
|
+
Requires-Dist: websockets>=15.0.1
|
20
|
+
Description-Content-Type: text/markdown
|
21
|
+
|
22
|
+
# Simple CLI MCP Client Using LangChain / Python [](https://github.com/hideya/langchain-mcp-tools-py/blob/main/LICENSE)
|
23
|
+
|
24
|
+
This is a simple [Model Context Protocol (MCP)](https://modelcontextprotocol.io/) client
|
25
|
+
that is intended for trying out MCP servers via a command-line interface.
|
26
|
+
|
27
|
+
When testing LLM and MCP servers, their settings can be conveniently configured via a configuration file, such as the following:
|
28
|
+
|
29
|
+
```json5
|
30
|
+
{
|
31
|
+
"llm": {
|
32
|
+
"model_provider": "openai",
|
33
|
+
"model": "gpt-4o-mini",
|
34
|
+
// "model_provider": "anthropic",
|
35
|
+
// "model": "claude-3-5-haiku-latest",
|
36
|
+
// "model_provider": "google_genai",
|
37
|
+
// "model": "gemini-2.0-flash",
|
38
|
+
},
|
39
|
+
|
40
|
+
"mcp_servers": {
|
41
|
+
"fetch": {
|
42
|
+
"command": "uvx",
|
43
|
+
"args": [ "mcp-server-fetch" ]
|
44
|
+
},
|
45
|
+
|
46
|
+
"weather": {
|
47
|
+
"command": "npx",
|
48
|
+
"args": [ "-y", "@h1deya/mcp-server-weather" ]
|
49
|
+
},
|
50
|
+
|
51
|
+
// Auto-detection: tries Streamable HTTP first, falls back to SSE
|
52
|
+
"remote-mcp-server": {
|
53
|
+
"url": "https://${SERVER_HOST}:${SERVER_PORT}/..."
|
54
|
+
},
|
55
|
+
|
56
|
+
// Example of authentication via Authorization header
|
57
|
+
"github": {
|
58
|
+
"type": "http", // recommended to specify the protocol explicitly when authentication is used
|
59
|
+
"url": "https://api.githubcopilot.com/mcp/",
|
60
|
+
"headers": {
|
61
|
+
"Authorization": "Bearer ${GITHUB_PERSONAL_ACCESS_TOKEN}"
|
62
|
+
}
|
63
|
+
},
|
64
|
+
}
|
65
|
+
}
|
66
|
+
```
|
67
|
+
|
68
|
+
It leverages [LangChain ReAct Agent](https://langchain-ai.github.io/langgraph/reference/agents/) and
|
69
|
+
a utility function `convert_mcp_to_langchain_tools()` from
|
70
|
+
[`langchain_mcp_tools`](https://pypi.org/project/langchain-mcp-tools/).
|
71
|
+
This function handles parallel initialization of specified multiple MCP servers
|
72
|
+
and converts their available tools into a list of LangChain-compatible tools
|
73
|
+
([list[BaseTool]](https://python.langchain.com/api_reference/core/tools/langchain_core.tools.base.BaseTool.html#langchain_core.tools.base.BaseTool)).
|
74
|
+
|
75
|
+
This client supports both local (stdio) MCP servers as well as
|
76
|
+
remote (Streamable HTTP / SSE / WebSocket) MCP servers
|
77
|
+
which are accessible via a simple URL and optional headers for authentication and other purposes.
|
78
|
+
|
79
|
+
This client only supports text results of MCP tool calls and disregards other result types.
|
80
|
+
|
81
|
+
For the convenience of debugging MCP servers, this client prints local (stdio) MCP server logs to the console.
|
82
|
+
|
83
|
+
LLMs from Anthropic, OpenAI and Google (GenAI) are currently supported.
|
84
|
+
|
85
|
+
A TypeScript version of this MCP client is available
|
86
|
+
[here](https://github.com/hideya/mcp-client-langchain-ts)
|
87
|
+
|
88
|
+
## Prerequisites
|
89
|
+
|
90
|
+
- Python 3.11+
|
91
|
+
- [optional] [`uv` (`uvx`)](https://docs.astral.sh/uv/getting-started/installation/)
|
92
|
+
installed to run Python package-based MCP servers
|
93
|
+
- [optional] [npm 7+ (`npx`)](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm)
|
94
|
+
to run Node.js package-based MCP servers
|
95
|
+
- API keys from [Anthropic](https://console.anthropic.com/settings/keys),
|
96
|
+
[OpenAI](https://platform.openai.com/api-keys), and/or
|
97
|
+
[Groq](https://console.groq.com/keys)
|
98
|
+
as needed
|
99
|
+
|
100
|
+
## Setup
|
101
|
+
|
102
|
+
1. Install dependencies:
|
103
|
+
```bash
|
104
|
+
make install
|
105
|
+
```
|
106
|
+
|
107
|
+
2. Setup API keys:
|
108
|
+
```bash
|
109
|
+
cp .env.template .env
|
110
|
+
```
|
111
|
+
- Update `.env` as needed.
|
112
|
+
- `.gitignore` is configured to ignore `.env`
|
113
|
+
to prevent accidental commits of the credentials.
|
114
|
+
|
115
|
+
3. Configure LLM and MCP Servers settings `llm_mcp_config.json5` as needed.
|
116
|
+
|
117
|
+
- [The configuration file format](https://github.com/hideya/mcp-client-langchain-ts/blob/main/llm_mcp_config.json5)
|
118
|
+
for MCP servers follows the same structure as
|
119
|
+
[Claude for Desktop](https://modelcontextprotocol.io/quickstart/user),
|
120
|
+
with one difference: the key name `mcpServers` has been changed
|
121
|
+
to `mcp_servers` to follow the snake_case convention
|
122
|
+
commonly used in JSON configuration files.
|
123
|
+
- The file format is [JSON5](https://json5.org/),
|
124
|
+
where comments and trailing commas are allowed.
|
125
|
+
- The format is further extended to replace `${...}` notations
|
126
|
+
with the values of corresponding environment variables.
|
127
|
+
- Keep all the credentials and private info in the `.env` file
|
128
|
+
and refer to them with `${...}` notation as needed.
|
129
|
+
|
130
|
+
|
131
|
+
## Usage
|
132
|
+
|
133
|
+
Run the app:
|
134
|
+
```bash
|
135
|
+
make start
|
136
|
+
```
|
137
|
+
It takes a while on the first run.
|
138
|
+
|
139
|
+
Run in verbose mode:
|
140
|
+
```bash
|
141
|
+
make start-v
|
142
|
+
```
|
143
|
+
|
144
|
+
See commandline options:
|
145
|
+
```bash
|
146
|
+
make start-h
|
147
|
+
```
|
148
|
+
|
149
|
+
At the prompt, you can simply press Enter to use example queries that perform MCP server tool invocations.
|
150
|
+
|
151
|
+
Example queries can be configured in `llm_mcp_config.json5`
|
mcp_chat-0.0.1/README.md
ADDED
@@ -0,0 +1,130 @@
|
|
1
|
+
# Simple CLI MCP Client Using LangChain / Python [](https://github.com/hideya/langchain-mcp-tools-py/blob/main/LICENSE)
|
2
|
+
|
3
|
+
This is a simple [Model Context Protocol (MCP)](https://modelcontextprotocol.io/) client
|
4
|
+
that is intended for trying out MCP servers via a command-line interface.
|
5
|
+
|
6
|
+
When testing LLM and MCP servers, their settings can be conveniently configured via a configuration file, such as the following:
|
7
|
+
|
8
|
+
```json5
|
9
|
+
{
|
10
|
+
"llm": {
|
11
|
+
"model_provider": "openai",
|
12
|
+
"model": "gpt-4o-mini",
|
13
|
+
// "model_provider": "anthropic",
|
14
|
+
// "model": "claude-3-5-haiku-latest",
|
15
|
+
// "model_provider": "google_genai",
|
16
|
+
// "model": "gemini-2.0-flash",
|
17
|
+
},
|
18
|
+
|
19
|
+
"mcp_servers": {
|
20
|
+
"fetch": {
|
21
|
+
"command": "uvx",
|
22
|
+
"args": [ "mcp-server-fetch" ]
|
23
|
+
},
|
24
|
+
|
25
|
+
"weather": {
|
26
|
+
"command": "npx",
|
27
|
+
"args": [ "-y", "@h1deya/mcp-server-weather" ]
|
28
|
+
},
|
29
|
+
|
30
|
+
// Auto-detection: tries Streamable HTTP first, falls back to SSE
|
31
|
+
"remote-mcp-server": {
|
32
|
+
"url": "https://${SERVER_HOST}:${SERVER_PORT}/..."
|
33
|
+
},
|
34
|
+
|
35
|
+
// Example of authentication via Authorization header
|
36
|
+
"github": {
|
37
|
+
"type": "http", // recommended to specify the protocol explicitly when authentication is used
|
38
|
+
"url": "https://api.githubcopilot.com/mcp/",
|
39
|
+
"headers": {
|
40
|
+
"Authorization": "Bearer ${GITHUB_PERSONAL_ACCESS_TOKEN}"
|
41
|
+
}
|
42
|
+
},
|
43
|
+
}
|
44
|
+
}
|
45
|
+
```
|
46
|
+
|
47
|
+
It leverages [LangChain ReAct Agent](https://langchain-ai.github.io/langgraph/reference/agents/) and
|
48
|
+
a utility function `convert_mcp_to_langchain_tools()` from
|
49
|
+
[`langchain_mcp_tools`](https://pypi.org/project/langchain-mcp-tools/).
|
50
|
+
This function handles parallel initialization of specified multiple MCP servers
|
51
|
+
and converts their available tools into a list of LangChain-compatible tools
|
52
|
+
([list[BaseTool]](https://python.langchain.com/api_reference/core/tools/langchain_core.tools.base.BaseTool.html#langchain_core.tools.base.BaseTool)).
|
53
|
+
|
54
|
+
This client supports both local (stdio) MCP servers as well as
|
55
|
+
remote (Streamable HTTP / SSE / WebSocket) MCP servers
|
56
|
+
which are accessible via a simple URL and optional headers for authentication and other purposes.
|
57
|
+
|
58
|
+
This client only supports text results of MCP tool calls and disregards other result types.
|
59
|
+
|
60
|
+
For the convenience of debugging MCP servers, this client prints local (stdio) MCP server logs to the console.
|
61
|
+
|
62
|
+
LLMs from Anthropic, OpenAI and Google (GenAI) are currently supported.
|
63
|
+
|
64
|
+
A TypeScript version of this MCP client is available
|
65
|
+
[here](https://github.com/hideya/mcp-client-langchain-ts)
|
66
|
+
|
67
|
+
## Prerequisites
|
68
|
+
|
69
|
+
- Python 3.11+
|
70
|
+
- [optional] [`uv` (`uvx`)](https://docs.astral.sh/uv/getting-started/installation/)
|
71
|
+
installed to run Python package-based MCP servers
|
72
|
+
- [optional] [npm 7+ (`npx`)](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm)
|
73
|
+
to run Node.js package-based MCP servers
|
74
|
+
- API keys from [Anthropic](https://console.anthropic.com/settings/keys),
|
75
|
+
[OpenAI](https://platform.openai.com/api-keys), and/or
|
76
|
+
[Groq](https://console.groq.com/keys)
|
77
|
+
as needed
|
78
|
+
|
79
|
+
## Setup
|
80
|
+
|
81
|
+
1. Install dependencies:
|
82
|
+
```bash
|
83
|
+
make install
|
84
|
+
```
|
85
|
+
|
86
|
+
2. Setup API keys:
|
87
|
+
```bash
|
88
|
+
cp .env.template .env
|
89
|
+
```
|
90
|
+
- Update `.env` as needed.
|
91
|
+
- `.gitignore` is configured to ignore `.env`
|
92
|
+
to prevent accidental commits of the credentials.
|
93
|
+
|
94
|
+
3. Configure LLM and MCP Servers settings `llm_mcp_config.json5` as needed.
|
95
|
+
|
96
|
+
- [The configuration file format](https://github.com/hideya/mcp-client-langchain-ts/blob/main/llm_mcp_config.json5)
|
97
|
+
for MCP servers follows the same structure as
|
98
|
+
[Claude for Desktop](https://modelcontextprotocol.io/quickstart/user),
|
99
|
+
with one difference: the key name `mcpServers` has been changed
|
100
|
+
to `mcp_servers` to follow the snake_case convention
|
101
|
+
commonly used in JSON configuration files.
|
102
|
+
- The file format is [JSON5](https://json5.org/),
|
103
|
+
where comments and trailing commas are allowed.
|
104
|
+
- The format is further extended to replace `${...}` notations
|
105
|
+
with the values of corresponding environment variables.
|
106
|
+
- Keep all the credentials and private info in the `.env` file
|
107
|
+
and refer to them with `${...}` notation as needed.
|
108
|
+
|
109
|
+
|
110
|
+
## Usage
|
111
|
+
|
112
|
+
Run the app:
|
113
|
+
```bash
|
114
|
+
make start
|
115
|
+
```
|
116
|
+
It takes a while on the first run.
|
117
|
+
|
118
|
+
Run in verbose mode:
|
119
|
+
```bash
|
120
|
+
make start-v
|
121
|
+
```
|
122
|
+
|
123
|
+
See commandline options:
|
124
|
+
```bash
|
125
|
+
make start-h
|
126
|
+
```
|
127
|
+
|
128
|
+
At the prompt, you can simply press Enter to use example queries that perform MCP server tool invocations.
|
129
|
+
|
130
|
+
Example queries can be configured in `llm_mcp_config.json5`
|
@@ -0,0 +1,119 @@
|
|
1
|
+
// The configuration file format is [JSON5](https://json5.org/),
|
2
|
+
// where comments and trailing commas are allowed.
|
3
|
+
// The file format is further extended to replace `${...}` notations
|
4
|
+
// with the values of corresponding environment variables.
|
5
|
+
// Keep all the credentials and private into the `.env` file
|
6
|
+
// and refer to them with `${...}` notation as needed.
|
7
|
+
{
|
8
|
+
// "llm": {
|
9
|
+
// // https://docs.anthropic.com/en/docs/about-claude/pricing
|
10
|
+
// // https://console.anthropic.com/settings/billing
|
11
|
+
// "model_provider": "anthropic",
|
12
|
+
// "model": "claude-3-5-haiku-latest",
|
13
|
+
// // "model": "claude-sonnet-4-0",
|
14
|
+
// // "temperature": 0.0,
|
15
|
+
// // "max_tokens": 10000,
|
16
|
+
// "system_prompt": "Answer briefly and clearly",
|
17
|
+
// },
|
18
|
+
|
19
|
+
"llm": {
|
20
|
+
// https://platform.openai.com/docs/pricing
|
21
|
+
// https://platform.openai.com/settings/organization/billing/overview
|
22
|
+
"model_provider": "openai",
|
23
|
+
"model": "gpt-4o-mini",
|
24
|
+
// "model": "o4-mini",
|
25
|
+
// "temperature": 0.0, // 'temperature' is not supported with "o4-mini"
|
26
|
+
// "max_completion_tokens": 10000, // Use 'max_completion_tokens' instead of 'max_tokens'
|
27
|
+
"system_prompt": "Answer briefly and clearly",
|
28
|
+
},
|
29
|
+
|
30
|
+
// "llm": {
|
31
|
+
// // https://ai.google.dev/gemini-api/docs/pricing
|
32
|
+
// // https://console.cloud.google.com/billing
|
33
|
+
// "model_provider": "google_genai",
|
34
|
+
// "model": "gemini-2.0-flash",
|
35
|
+
// // "model": "gemini-1.5-pro",
|
36
|
+
// // "temperature": 0.0,
|
37
|
+
// // "max_tokens": 10000,
|
38
|
+
// "system_prompt": "Answer briefly and clearly",
|
39
|
+
// },
|
40
|
+
|
41
|
+
"example_queries": [
|
42
|
+
"Are there any weather alerts in California?",
|
43
|
+
"Read the news headlines on bbc.com",
|
44
|
+
"Read and briefly summarize the LICENSE file",
|
45
|
+
// "Open the webpage at bbc.com",
|
46
|
+
// "Search the web and get today's news related to tokyo",
|
47
|
+
],
|
48
|
+
|
49
|
+
"mcp_servers": {
|
50
|
+
// https://github.com/modelcontextprotocol/servers/tree/main/src/filesystem
|
51
|
+
"filesystem": {
|
52
|
+
"command": "npx",
|
53
|
+
"args": [
|
54
|
+
"-y",
|
55
|
+
"@modelcontextprotocol/server-filesystem",
|
56
|
+
"."
|
57
|
+
]
|
58
|
+
},
|
59
|
+
|
60
|
+
// https://github.com/modelcontextprotocol/servers/tree/main/src/fetch
|
61
|
+
"fetch": {
|
62
|
+
"command": "uvx",
|
63
|
+
"args": [
|
64
|
+
"mcp-server-fetch"
|
65
|
+
]
|
66
|
+
},
|
67
|
+
|
68
|
+
// https://github.com/modelcontextprotocol/quickstart-resources/tree/main/weather-server-python
|
69
|
+
"weather": {
|
70
|
+
"command": "npx",
|
71
|
+
"args": [
|
72
|
+
"-y",
|
73
|
+
"@h1deya/mcp-server-weather"
|
74
|
+
]
|
75
|
+
},
|
76
|
+
|
77
|
+
// // Auto-detection: tries Streamable HTTP first, falls back to SSE
|
78
|
+
// "auto-detect-server": {
|
79
|
+
// "url": "http://${SERVER_HOST}:${SERVER_PORT}/..."
|
80
|
+
// },
|
81
|
+
|
82
|
+
// // WebSocket
|
83
|
+
// "ws-server-name": {
|
84
|
+
// "url": "ws://${WS_SERVER_HOST}:${WS_SERVER_PORT}/..."
|
85
|
+
// },
|
86
|
+
|
87
|
+
// // Test SSE connection with the auto fallback
|
88
|
+
// // See the comments at the top of index.ts
|
89
|
+
// weather: {
|
90
|
+
// "url": "http://localhost:${SSE_SERVER_PORT}/sse"
|
91
|
+
// },
|
92
|
+
|
93
|
+
// // Example of authentication via Authorization header
|
94
|
+
// // https://github.com/github/github-mcp-server?tab=readme-ov-file#remote-github-mcp-server
|
95
|
+
// "github": {
|
96
|
+
// // To avoid auto protocol fallback, specify the protocol explicitly when using authentication
|
97
|
+
// "type": "http", // or `transport: "http",`
|
98
|
+
// "url": "https://api.githubcopilot.com/mcp/",
|
99
|
+
// "headers": {
|
100
|
+
// "Authorization": "Bearer ${GITHUB_PERSONAL_ACCESS_TOKEN}"
|
101
|
+
// }
|
102
|
+
// },
|
103
|
+
// // NOTE: When accessing the GitHub MCP server, [GitHub PAT (Personal Access Token)](https://github.com/settings/personal-access-tokens)
|
104
|
+
// // alone is not enough; your GitHub account must have an active Copilot subscription or be assigned a Copilot license through your organization.
|
105
|
+
|
106
|
+
// // https://github.com/microsoft/playwright-mcp
|
107
|
+
// "playwright": {
|
108
|
+
// "command": "npx",
|
109
|
+
// "args": [ "-y", "@playwright/mcp@latest" ]
|
110
|
+
// },
|
111
|
+
|
112
|
+
// // https://github.com/modelcontextprotocol/servers/tree/main/src/brave-search
|
113
|
+
// "brave-search": {
|
114
|
+
// "command": "npx",
|
115
|
+
// "args": [ "-y", "@modelcontextprotocol/server-brave-search"],
|
116
|
+
// "env": { "BRAVE_API_KEY": "${BRAVE_API_KEY}" }
|
117
|
+
// },
|
118
|
+
}
|
119
|
+
}
|
@@ -0,0 +1,43 @@
|
|
1
|
+
[project]
|
2
|
+
name = "mcp-chat"
|
3
|
+
version = "0.0.1"
|
4
|
+
description = "Simple CLI MCP Client to quickly test and explore MCP servers from the command line"
|
5
|
+
readme = "README.md"
|
6
|
+
requires-python = ">=3.11"
|
7
|
+
keywords = [
|
8
|
+
"mcp",
|
9
|
+
"cli",
|
10
|
+
"try",
|
11
|
+
"test",
|
12
|
+
"model-context-protocol",
|
13
|
+
"langchain",
|
14
|
+
"tools",
|
15
|
+
"python",
|
16
|
+
"client",
|
17
|
+
"simple",
|
18
|
+
"quick",
|
19
|
+
"explore",
|
20
|
+
]
|
21
|
+
dependencies = [
|
22
|
+
"langchain-mcp-tools>=0.2.7",
|
23
|
+
"langchain>=0.3.26",
|
24
|
+
"langgraph>=0.5.0",
|
25
|
+
"langchain-google-genai>=2.1.5",
|
26
|
+
"langchain-anthropic>=0.3.1",
|
27
|
+
"langchain-openai>=0.3.0",
|
28
|
+
"langchain-groq>=0.2.3",
|
29
|
+
"pyjson5>=1.6.8",
|
30
|
+
"python-dotenv>=1.0.1",
|
31
|
+
"websockets>=15.0.1",
|
32
|
+
]
|
33
|
+
|
34
|
+
[project.urls]
|
35
|
+
"Bug Tracker" = "https://github.com/hideya/mcp-client-langchain-py/issues"
|
36
|
+
"Source Code" = "https://github.com/hideya/mcp-client-langchain-py"
|
37
|
+
|
38
|
+
[project.scripts]
|
39
|
+
mcp-chat = "mcp_chat.cli_chat:main"
|
40
|
+
|
41
|
+
[build-system]
|
42
|
+
requires = ["hatchling"]
|
43
|
+
build-backend = "hatchling.build"
|