@h1deya/mcp-client-cli 0.3.0 → 0.3.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,4 +1,4 @@
1
- # Simple MCP Client to Explore MCP Servers / TypeScript [![License: MIT](https://img.shields.io/badge/License-MIT-blue.svg)](https://github.com/hideya/mcp-langchain-client-ts/blob/main/LICENSE) [![npm version](https://img.shields.io/npm/v/@h1deya/mcp-client-cli.svg)](https://www.npmjs.com/package/@h1deya/mcp-client-cli)
1
+ # Simple MCP Client to Explore MCP Servers [![License: MIT](https://img.shields.io/badge/License-MIT-blue.svg)](https://github.com/hideya/mcp-langchain-client-ts/blob/main/LICENSE) [![npm version](https://img.shields.io/npm/v/@h1deya/mcp-client-cli.svg)](https://www.npmjs.com/package/@h1deya/mcp-client-cli)
2
2
 
3
3
 
4
4
  **Quickly test and explore MCP servers from the command line!**
@@ -21,11 +21,12 @@ A Python equivalent of this utility is available [here](https://pypi.org/project
21
21
  - Node.js 18+
22
22
  - [optional] [`uv` (`uvx`)](https://docs.astral.sh/uv/getting-started/installation/)
23
23
  installed to run Python-based local (stdio) MCP servers
24
- - LLM API keys from
24
+ - LLM API key(s) from
25
25
  [OpenAI](https://platform.openai.com/api-keys),
26
26
  [Anthropic](https://console.anthropic.com/settings/keys),
27
+ [Google AI Studio (for GenAI/Gemini)](https://aistudio.google.com/apikey),
27
28
  and/or
28
- [Google AI Studio (for GenAI/Gemini)](https://aistudio.google.com/apikey)
29
+ [xAI](https://console.x.ai/),
29
30
  as needed
30
31
 
31
32
  ## Quick Start
@@ -51,6 +52,8 @@ A Python equivalent of this utility is available [here](https://pypi.org/project
51
52
  // "model": "claude-3-5-haiku-latest",
52
53
  // "model_provider": "google_genai",
53
54
  // "model": "gemini-2.5-flash",
55
+ // "model_provider": "xai",
56
+ // "model": "grok-3-mini",
54
57
  },
55
58
 
56
59
  "mcp_servers": {
@@ -71,7 +74,8 @@ A Python equivalent of this utility is available [here](https://pypi.org/project
71
74
  ```bash
72
75
  echo "ANTHROPIC_API_KEY=sk-ant-...
73
76
  OPENAI_API_KEY=sk-proj-...
74
- GOOGLE_API_KEY=AI..." > .env
77
+ GOOGLE_API_KEY=AI...
78
+ XAI_API_KEY=xai-..." > .env
75
79
 
76
80
  code .env
77
81
  ```
@@ -163,7 +167,7 @@ Create a `llm_mcp_config.json5` file:
163
167
  {
164
168
  "llm": {
165
169
  "model_provider": "openai",
166
- "model": "gpt-4.1-nano",
170
+ "model": "gpt-4o-mini",
167
171
  // model: "o4-mini",
168
172
  },
169
173
 
@@ -177,7 +181,13 @@ Create a `llm_mcp_config.json5` file:
177
181
  // "model_provider": "google_genai",
178
182
  // "model": "gemini-2.5-flash",
179
183
  // // "model": "gemini-2.5-pro",
180
- // }
184
+ // },
185
+
186
+ // "llm": {
187
+ // "model_provider": "xai",
188
+ // "model": "grok-3-mini",
189
+ // // "model": "grok-4",
190
+ // },
181
191
 
182
192
  "example_queries": [
183
193
  "Tell me how LLMs work in a few sentences",
@@ -1 +1 @@
1
- {"version":3,"file":"init-chat-model.d.ts","sourceRoot":"","sources":["../src/init-chat-model.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,aAAa,EAAE,cAAc,EAAE,MAAM,6CAA6C,CAAC;AAK5F,UAAU,eAAe;IACvB,aAAa,EAAE,MAAM,CAAC;IACtB,KAAK,EAAE,MAAM,CAAC;IACd,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,KAAK,CAAC,EAAE,cAAc,EAAE,CAAC;CAC1B;AAED,wBAAgB,aAAa,CAAC,MAAM,EAAE,eAAe,GAAG,aAAa,CAyCpE"}
1
+ {"version":3,"file":"init-chat-model.d.ts","sourceRoot":"","sources":["../src/init-chat-model.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,aAAa,EAAE,cAAc,EAAE,MAAM,6CAA6C,CAAC;AAK5F,UAAU,eAAe;IACvB,aAAa,EAAE,MAAM,CAAC;IACtB,KAAK,EAAE,MAAM,CAAC;IACd,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,KAAK,CAAC,EAAE,cAAc,EAAE,CAAC;CAC1B;AAED,wBAAgB,aAAa,CAAC,MAAM,EAAE,eAAe,GAAG,aAAa,CA8CpE"}
@@ -1,6 +1,7 @@
1
1
  import { ChatAnthropic } from '@langchain/anthropic';
2
2
  import { ChatOpenAI } from '@langchain/openai';
3
3
  import { ChatGoogleGenerativeAI } from "@langchain/google-genai";
4
+ import { ChatXAI } from "@langchain/xai";
4
5
  export function initChatModel(config) {
5
6
  let model;
6
7
  const { modelProvider, tools, ...llmConfig } = config;
@@ -13,8 +14,12 @@ export function initChatModel(config) {
13
14
  model = new ChatAnthropic(llmConfig);
14
15
  break;
15
16
  case 'google_genai':
17
+ case 'google_gemini':
16
18
  model = new ChatGoogleGenerativeAI(llmConfig);
17
19
  break;
20
+ case 'xai':
21
+ model = new ChatXAI(llmConfig);
22
+ break;
18
23
  default:
19
24
  throw new Error(`Unsupported model_provider: ${modelProvider}`);
20
25
  }
@@ -1 +1 @@
1
- {"version":3,"file":"init-chat-model.js","sourceRoot":"","sources":["../src/init-chat-model.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,aAAa,EAAE,MAAM,sBAAsB,CAAC;AACrD,OAAO,EAAE,UAAU,EAAE,MAAM,mBAAmB,CAAC;AAC/C,OAAO,EAAE,sBAAsB,EAAE,MAAM,yBAAyB,CAAC;AAcjE,MAAM,UAAU,aAAa,CAAC,MAAuB;IACnD,IAAI,KAAoB,CAAC;IAEzB,MAAM,EAAE,aAAa,EAAE,KAAK,EAAE,GAAG,SAAS,EAAE,GAAG,MAAM,CAAC;IAEtD,IAAI,CAAC;QACH,QAAQ,aAAa,CAAC,WAAW,EAAE,EAAE,CAAC;YACpC,KAAK,QAAQ;gBACX,KAAK,GAAG,IAAI,UAAU,CAAC,SAAS,CAAC,CAAC;gBAClC,MAAM;YAER,KAAK,WAAW;gBACd,KAAK,GAAG,IAAI,aAAa,CAAC,SAAS,CAAC,CAAC;gBACrC,MAAM;YAER,KAAK,cAAc;gBACjB,KAAK,GAAG,IAAI,sBAAsB,CAAC,SAAS,CAAC,CAAC;gBAC9C,MAAM;YAER;gBACE,MAAM,IAAI,KAAK,CACb,+BAA+B,aAAa,EAAE,CAC/C,CAAC;QACN,CAAC;QAED,IAAI,OAAO,KAAK,EAAE,SAAS,KAAK,UAAU,EAAE,CAAC;YAC3C,IAAI,KAAK,IAAI,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;gBAC9B,QAAQ;gBACR,sEAAsE;gBACtE,KAAK,GAAI,KAAiC,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC;YAC9D,CAAC;QACH,CAAC;aAAM,CAAC;YACN,MAAM,IAAI,KAAK,CACb,+CAA+C,aAAa,EAAE,CAC/D,CAAC;QACJ,CAAC;QAED,OAAO,KAAK,CAAC;IACf,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,MAAM,IAAI,KAAK,CAAC,oCAAoC,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,eAAe,EAAE,CAAC,CAAC;IAClH,CAAC;AACH,CAAC"}
1
+ {"version":3,"file":"init-chat-model.js","sourceRoot":"","sources":["../src/init-chat-model.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,aAAa,EAAE,MAAM,sBAAsB,CAAC;AACrD,OAAO,EAAE,UAAU,EAAE,MAAM,mBAAmB,CAAC;AAC/C,OAAO,EAAE,sBAAsB,EAAE,MAAM,yBAAyB,CAAC;AACjE,OAAO,EAAE,OAAO,EAAE,MAAM,gBAAgB,CAAC;AAczC,MAAM,UAAU,aAAa,CAAC,MAAuB;IACnD,IAAI,KAAoB,CAAC;IAEzB,MAAM,EAAE,aAAa,EAAE,KAAK,EAAE,GAAG,SAAS,EAAE,GAAG,MAAM,CAAC;IAEtD,IAAI,CAAC;QACH,QAAQ,aAAa,CAAC,WAAW,EAAE,EAAE,CAAC;YACpC,KAAK,QAAQ;gBACX,KAAK,GAAG,IAAI,UAAU,CAAC,SAAS,CAAC,CAAC;gBAClC,MAAM;YAER,KAAK,WAAW;gBACd,KAAK,GAAG,IAAI,aAAa,CAAC,SAAS,CAAC,CAAC;gBACrC,MAAM;YAER,KAAK,cAAc,CAAC;YACpB,KAAK,eAAe;gBAClB,KAAK,GAAG,IAAI,sBAAsB,CAAC,SAAS,CAAC,CAAC;gBAC9C,MAAM;YAER,KAAK,KAAK;gBACR,KAAK,GAAG,IAAI,OAAO,CAAC,SAAS,CAAC,CAAC;gBAC/B,MAAM;YAER;gBACE,MAAM,IAAI,KAAK,CACb,+BAA+B,aAAa,EAAE,CAC/C,CAAC;QACN,CAAC;QAED,IAAI,OAAO,KAAK,EAAE,SAAS,KAAK,UAAU,EAAE,CAAC;YAC3C,IAAI,KAAK,IAAI,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;gBAC9B,QAAQ;gBACR,sEAAsE;gBACtE,KAAK,GAAI,KAAiC,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC;YAC9D,CAAC;QACH,CAAC;aAAM,CAAC;YACN,MAAM,IAAI,KAAK,CACb,+CAA+C,aAAa,EAAE,CAC/D,CAAC;QACJ,CAAC;QAED,OAAO,KAAK,CAAC;IACf,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,MAAM,IAAI,KAAK,CAAC,oCAAoC,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,eAAe,EAAE,CAAC,CAAC;IAClH,CAAC;AACH,CAAC"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@h1deya/mcp-client-cli",
3
- "version": "0.3.0",
3
+ "version": "0.3.4",
4
4
  "description": "Simple MCP Client to quickly test and explore MCP servers from the command line",
5
5
  "license": "MIT",
6
6
  "keywords": [
@@ -52,12 +52,13 @@
52
52
  "publish:do": "npm run publish:prepare && npm publish --access=public"
53
53
  },
54
54
  "dependencies": {
55
- "@h1deya/langchain-mcp-tools": "^0.3.0",
55
+ "@h1deya/langchain-mcp-tools": "^0.3.4",
56
56
  "@langchain/anthropic": "^0.3.11",
57
57
  "@langchain/core": "^0.3.61",
58
58
  "@langchain/google-genai": "^0.2.12",
59
59
  "@langchain/langgraph": "^0.3.6",
60
60
  "@langchain/openai": "^0.3.16",
61
+ "@langchain/xai": "^0.1.0",
61
62
  "@types/yargs": "^17.0.33",
62
63
  "dotenv": "^16.4.7",
63
64
  "json5": "^2.2.3",