@mcp-use/cli 1.0.0 → 1.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. package/dist/InputPrompt.d.ts +13 -0
  2. package/dist/InputPrompt.js +188 -0
  3. package/dist/MultilineInput.d.ts +13 -0
  4. package/dist/MultilineInput.js +154 -0
  5. package/dist/MultilineTextInput.d.ts +11 -0
  6. package/dist/MultilineTextInput.js +97 -0
  7. package/dist/PasteAwareInput.d.ts +13 -0
  8. package/dist/PasteAwareInput.js +183 -0
  9. package/dist/SimpleMultilineInput.d.ts +11 -0
  10. package/dist/SimpleMultilineInput.js +125 -0
  11. package/dist/app.d.ts +1 -5
  12. package/dist/app.js +291 -186
  13. package/dist/cli.js +2 -5
  14. package/dist/commands.d.ts +15 -30
  15. package/dist/commands.js +308 -568
  16. package/dist/components/AsciiLogo.d.ts +2 -0
  17. package/dist/components/AsciiLogo.js +7 -0
  18. package/dist/components/Footer.d.ts +5 -0
  19. package/dist/components/Footer.js +19 -0
  20. package/dist/components/InputPrompt.d.ts +13 -0
  21. package/dist/components/InputPrompt.js +188 -0
  22. package/dist/components/Messages.d.ts +21 -0
  23. package/dist/components/Messages.js +80 -0
  24. package/dist/components/ServerStatus.d.ts +7 -0
  25. package/dist/components/ServerStatus.js +36 -0
  26. package/dist/components/Spinner.d.ts +16 -0
  27. package/dist/components/Spinner.js +63 -0
  28. package/dist/components/ToolStatus.d.ts +8 -0
  29. package/dist/components/ToolStatus.js +33 -0
  30. package/dist/components/textInput.d.ts +1 -0
  31. package/dist/components/textInput.js +1 -0
  32. package/dist/logger.d.ts +10 -0
  33. package/dist/logger.js +48 -0
  34. package/dist/mcp-service.d.ts +5 -4
  35. package/dist/mcp-service.js +98 -207
  36. package/dist/services/agent-service.d.ts +56 -0
  37. package/dist/services/agent-service.js +203 -0
  38. package/dist/services/cli-service.d.ts +132 -0
  39. package/dist/services/cli-service.js +591 -0
  40. package/dist/services/index.d.ts +4 -0
  41. package/dist/services/index.js +4 -0
  42. package/dist/services/llm-service.d.ts +174 -0
  43. package/dist/services/llm-service.js +567 -0
  44. package/dist/services/mcp-config-service.d.ts +69 -0
  45. package/dist/services/mcp-config-service.js +426 -0
  46. package/dist/services/mcp-service.d.ts +1 -0
  47. package/dist/services/mcp-service.js +1 -0
  48. package/dist/services/utility-service.d.ts +47 -0
  49. package/dist/services/utility-service.js +208 -0
  50. package/dist/storage.js +4 -4
  51. package/dist/types.d.ts +30 -0
  52. package/dist/types.js +1 -0
  53. package/package.json +22 -8
  54. package/readme.md +68 -39
@@ -0,0 +1,174 @@
1
+ import { ChatOpenAI, AzureChatOpenAI } from '@langchain/openai';
2
+ import { ChatAnthropic } from '@langchain/anthropic';
3
+ import { ChatGoogleGenerativeAI } from '@langchain/google-genai';
4
+ import { ChatVertexAI } from '@langchain/google-vertexai';
5
+ import { ChatMistralAI } from '@langchain/mistralai';
6
+ import { ChatGroq } from '@langchain/groq';
7
+ import { ChatCohere } from '@langchain/cohere';
8
+ import { ChatFireworks } from '@langchain/community/chat_models/fireworks';
9
+ import { ChatPerplexity } from '@langchain/community/chat_models/perplexity';
10
+ import { ChatOllama } from '@langchain/ollama';
11
+ import { ChatTogetherAI } from '@langchain/community/chat_models/togetherai';
12
+ import { ChatDeepSeek } from '@langchain/deepseek';
13
+ import { ChatXAI } from '@langchain/xai';
14
+ import type { CommandResult } from '../types.js';
15
+ export type ProviderKey = keyof typeof PROVIDERS;
16
+ export interface LLMConfig {
17
+ provider: ProviderKey;
18
+ model: string;
19
+ temperature?: number;
20
+ maxTokens?: number;
21
+ }
22
+ declare const PROVIDERS: {
23
+ readonly openai: {
24
+ readonly envVar: "OPENAI_API_KEY";
25
+ readonly defaultModel: "gpt-4o";
26
+ readonly factory: (key: string, cfg: LLMConfig) => ChatOpenAI<import("@langchain/openai").ChatOpenAICallOptions>;
27
+ };
28
+ readonly azureopenai: {
29
+ readonly envVar: "AZURE_OPENAI_API_KEY";
30
+ readonly defaultModel: "gpt-4o";
31
+ readonly factory: (key: string, cfg: LLMConfig) => AzureChatOpenAI;
32
+ };
33
+ readonly anthropic: {
34
+ readonly envVar: "ANTHROPIC_API_KEY";
35
+ readonly defaultModel: "claude-3-5-sonnet-20240620";
36
+ readonly factory: (key: string, cfg: LLMConfig) => ChatAnthropic;
37
+ };
38
+ readonly gemini: {
39
+ readonly envVar: "GOOGLE_API_KEY";
40
+ readonly defaultModel: "gemini-1.5-pro";
41
+ readonly factory: (key: string, cfg: LLMConfig) => ChatGoogleGenerativeAI;
42
+ };
43
+ readonly vertex: {
44
+ readonly envVar: "GOOGLE_APPLICATION_CREDENTIALS";
45
+ readonly defaultModel: "gemini-1.5-flash";
46
+ readonly factory: (_key: string, cfg: LLMConfig) => ChatVertexAI;
47
+ };
48
+ readonly mistral: {
49
+ readonly envVar: "MISTRAL_API_KEY";
50
+ readonly defaultModel: "mistral-large-latest";
51
+ readonly factory: (key: string, cfg: LLMConfig) => ChatMistralAI<import("@langchain/mistralai").ChatMistralAICallOptions>;
52
+ };
53
+ readonly groq: {
54
+ readonly envVar: "GROQ_API_KEY";
55
+ readonly defaultModel: "llama-3.1-70b-versatile";
56
+ readonly factory: (key: string, cfg: LLMConfig) => ChatGroq;
57
+ };
58
+ readonly cohere: {
59
+ readonly envVar: "COHERE_API_KEY";
60
+ readonly defaultModel: "command-r-plus";
61
+ readonly factory: (key: string, cfg: LLMConfig) => ChatCohere<import("@langchain/cohere").ChatCohereCallOptions>;
62
+ };
63
+ readonly fireworks: {
64
+ readonly envVar: "FIREWORKS_API_KEY";
65
+ readonly defaultModel: "accounts/fireworks/models/llama-v3p1-70b-instruct";
66
+ readonly factory: (key: string, cfg: LLMConfig) => ChatFireworks;
67
+ };
68
+ readonly perplexity: {
69
+ readonly envVar: "PERPLEXITY_API_KEY";
70
+ readonly defaultModel: "pplx-70b-online";
71
+ readonly factory: (key: string, cfg: LLMConfig) => ChatPerplexity;
72
+ };
73
+ readonly ollama: {
74
+ readonly envVar: "OLLAMA_HOST";
75
+ readonly defaultModel: "llama3";
76
+ readonly factory: (_key: string, cfg: LLMConfig) => ChatOllama;
77
+ };
78
+ readonly together: {
79
+ readonly envVar: "TOGETHER_API_KEY";
80
+ readonly defaultModel: "mistralai/Mixtral-8x22B-Instruct-v0.1";
81
+ readonly factory: (key: string, cfg: LLMConfig) => ChatTogetherAI;
82
+ };
83
+ readonly deepseek: {
84
+ readonly envVar: "DEEPSEEK_API_KEY";
85
+ readonly defaultModel: "deepseek-chat";
86
+ readonly factory: (key: string, cfg: LLMConfig) => ChatDeepSeek;
87
+ };
88
+ readonly xai: {
89
+ readonly envVar: "XAI_API_KEY";
90
+ readonly defaultModel: "grok-1.5";
91
+ readonly factory: (key: string, cfg: LLMConfig) => ChatXAI;
92
+ };
93
+ };
94
+ export declare class LLMService {
95
+ private currentLLMConfig;
96
+ private sessionApiKeys;
97
+ private persistentConfig;
98
+ constructor();
99
+ private initializeDefaultProvider;
100
+ private getApiKey;
101
+ getAvailableProviders(): string[];
102
+ isAnyProviderAvailable(): boolean;
103
+ getAvailableModels(provider?: string): Record<string, string[]> | string[];
104
+ getCurrentConfig(): LLMConfig | null;
105
+ setModel(provider: string, model: string): {
106
+ success: boolean;
107
+ message: string;
108
+ requiresApiKey?: boolean;
109
+ envVar?: string;
110
+ };
111
+ setTemperature(temperature: number): {
112
+ success: boolean;
113
+ message: string;
114
+ };
115
+ setMaxTokens(maxTokens: number): {
116
+ success: boolean;
117
+ message: string;
118
+ };
119
+ validateApiKey(_provider: string, apiKey: string): {
120
+ valid: boolean;
121
+ message: string;
122
+ };
123
+ setApiKey(provider: string, apiKey: string, shouldAutoSelect?: boolean): {
124
+ success: boolean;
125
+ message: string;
126
+ autoSelected?: LLMConfig;
127
+ };
128
+ clearApiKeys(): void;
129
+ maskApiKey(apiKey: string): string;
130
+ getApiKeyStatus(): Record<string, {
131
+ status: string;
132
+ source: string;
133
+ masked: string;
134
+ }>;
135
+ createLLM(): any;
136
+ /**
137
+ * Handles the /model command to select a provider and model.
138
+ * @param args - Array of arguments where args[0] is provider and args[1] is model
139
+ * @returns A CommandResult with success/error status and prompts for API key if needed
140
+ */
141
+ handleModelCommand(args: string[]): CommandResult;
142
+ /**
143
+ * Handles the /models command to list available models.
144
+ * @param args - Optional array with provider name to filter models
145
+ * @returns A CommandResult with the list of available models
146
+ */
147
+ handleListModelsCommand(_args: string[]): CommandResult;
148
+ /**
149
+ * Handles the /config command to adjust temperature and max tokens.
150
+ * @param args - Array where args[0] is setting name and args[1] is value
151
+ * @returns A CommandResult with success/error status
152
+ */
153
+ handleConfigCommand(args: string[]): CommandResult;
154
+ /**
155
+ * Handles the /setkey command to manually set API keys.
156
+ * @param args - Array where args[0] is provider and args[1] is API key
157
+ * @returns A CommandResult with success/error status
158
+ */
159
+ handleSetKeyCommand(args: string[]): CommandResult;
160
+ /**
161
+ * Handles the /clearkeys command to clear all stored API keys.
162
+ * @returns A CommandResult indicating success
163
+ */
164
+ handleClearKeysCommand(): CommandResult;
165
+ /**
166
+ * Handles API key input when prompted during model selection.
167
+ * @param apiKey - The API key entered by the user
168
+ * @param provider - The provider for the API key
169
+ * @param model - The model to select after setting the key
170
+ * @returns A CommandResult with success/error status
171
+ */
172
+ handleApiKeyInput(apiKey: string, provider: string, model: string): CommandResult;
173
+ }
174
+ export {};