@juspay/neurolink 1.2.4 → 1.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +170 -0
- package/README.md +96 -232
- package/dist/cli/commands/config.d.ts +403 -0
- package/dist/cli/commands/config.js +567 -0
- package/dist/cli/commands/mcp.d.ts +7 -0
- package/dist/cli/commands/mcp.js +434 -0
- package/dist/cli/index.d.ts +9 -0
- package/dist/cli/index.js +16 -9
- package/dist/core/factory.js +6 -2
- package/dist/core/types.d.ts +12 -2
- package/dist/core/types.js +11 -0
- package/dist/mcp/context-manager.d.ts +164 -0
- package/dist/mcp/context-manager.js +273 -0
- package/dist/mcp/factory.d.ts +144 -0
- package/dist/mcp/factory.js +141 -0
- package/dist/mcp/orchestrator.d.ts +170 -0
- package/dist/mcp/orchestrator.js +372 -0
- package/dist/mcp/registry.d.ts +188 -0
- package/dist/mcp/registry.js +373 -0
- package/dist/mcp/servers/ai-providers/ai-analysis-tools.d.ts +21 -0
- package/dist/mcp/servers/ai-providers/ai-analysis-tools.js +215 -0
- package/dist/mcp/servers/ai-providers/ai-core-server.d.ts +10 -0
- package/dist/mcp/servers/ai-providers/ai-core-server.js +302 -0
- package/dist/mcp/servers/ai-providers/ai-workflow-tools.d.ts +101 -0
- package/dist/mcp/servers/ai-providers/ai-workflow-tools.js +430 -0
- package/dist/neurolink.d.ts +4 -4
- package/dist/neurolink.js +109 -56
- package/dist/providers/googleAIStudio.d.ts +30 -0
- package/dist/providers/googleAIStudio.js +215 -0
- package/dist/providers/googleVertexAI.js +2 -2
- package/dist/providers/index.d.ts +2 -0
- package/dist/providers/index.js +3 -1
- package/dist/providers/openAI.js +2 -2
- package/dist/utils/providerUtils.js +11 -2
- package/package.json +78 -6
|
@@ -0,0 +1,434 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
/**
|
|
3
|
+
* MCP Server Management Commands
|
|
4
|
+
* Real MCP server connectivity and management
|
|
5
|
+
*/
|
|
6
|
+
import ora from 'ora';
|
|
7
|
+
import chalk from 'chalk';
|
|
8
|
+
import fs from 'fs';
|
|
9
|
+
import { spawn } from 'child_process';
|
|
10
|
+
import path from 'path';
|
|
11
|
+
// Default MCP config file location
|
|
12
|
+
const MCP_CONFIG_FILE = path.join(process.cwd(), '.mcp-config.json');
|
|
13
|
+
// Load MCP configuration
|
|
14
|
+
function loadMCPConfig() {
|
|
15
|
+
if (!fs.existsSync(MCP_CONFIG_FILE)) {
|
|
16
|
+
return { mcpServers: {} };
|
|
17
|
+
}
|
|
18
|
+
try {
|
|
19
|
+
const content = fs.readFileSync(MCP_CONFIG_FILE, 'utf-8');
|
|
20
|
+
return JSON.parse(content);
|
|
21
|
+
}
|
|
22
|
+
catch (error) {
|
|
23
|
+
throw new Error(`Invalid MCP config file: ${error.message}`);
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
// Save MCP configuration
|
|
27
|
+
function saveMCPConfig(config) {
|
|
28
|
+
fs.writeFileSync(MCP_CONFIG_FILE, JSON.stringify(config, null, 2));
|
|
29
|
+
}
|
|
30
|
+
// Check if MCP server process is running
|
|
31
|
+
async function checkMCPServerStatus(serverConfig) {
|
|
32
|
+
try {
|
|
33
|
+
if (serverConfig.transport === 'stdio') {
|
|
34
|
+
// For stdio servers, we need to actually try connecting
|
|
35
|
+
const child = spawn(serverConfig.command, serverConfig.args || [], {
|
|
36
|
+
stdio: ['pipe', 'pipe', 'pipe'],
|
|
37
|
+
env: { ...process.env, ...serverConfig.env },
|
|
38
|
+
cwd: serverConfig.cwd
|
|
39
|
+
});
|
|
40
|
+
return new Promise((resolve) => {
|
|
41
|
+
const timeout = setTimeout(() => {
|
|
42
|
+
child.kill();
|
|
43
|
+
resolve(false);
|
|
44
|
+
}, 3000);
|
|
45
|
+
child.on('spawn', () => {
|
|
46
|
+
clearTimeout(timeout);
|
|
47
|
+
child.kill();
|
|
48
|
+
resolve(true);
|
|
49
|
+
});
|
|
50
|
+
child.on('error', () => {
|
|
51
|
+
clearTimeout(timeout);
|
|
52
|
+
resolve(false);
|
|
53
|
+
});
|
|
54
|
+
});
|
|
55
|
+
}
|
|
56
|
+
else if (serverConfig.transport === 'sse' && serverConfig.url) {
|
|
57
|
+
// For SSE servers, check if URL is accessible
|
|
58
|
+
try {
|
|
59
|
+
const response = await fetch(serverConfig.url, { method: 'HEAD' });
|
|
60
|
+
return response.ok;
|
|
61
|
+
}
|
|
62
|
+
catch {
|
|
63
|
+
return false;
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
return false;
|
|
67
|
+
}
|
|
68
|
+
catch {
|
|
69
|
+
return false;
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
// Connect to MCP server and get capabilities
|
|
73
|
+
async function getMCPServerCapabilities(serverConfig) {
|
|
74
|
+
if (serverConfig.transport === 'stdio') {
|
|
75
|
+
// Spawn MCP server and send initialize request
|
|
76
|
+
const child = spawn(serverConfig.command, serverConfig.args || [], {
|
|
77
|
+
stdio: ['pipe', 'pipe', 'pipe'],
|
|
78
|
+
env: { ...process.env, ...serverConfig.env },
|
|
79
|
+
cwd: serverConfig.cwd
|
|
80
|
+
});
|
|
81
|
+
return new Promise((resolve, reject) => {
|
|
82
|
+
const timeout = setTimeout(() => {
|
|
83
|
+
child.kill();
|
|
84
|
+
reject(new Error('Timeout connecting to MCP server'));
|
|
85
|
+
}, 5000);
|
|
86
|
+
let responseData = '';
|
|
87
|
+
child.stdout?.on('data', (data) => {
|
|
88
|
+
responseData += data.toString();
|
|
89
|
+
// Look for JSON-RPC response
|
|
90
|
+
try {
|
|
91
|
+
const lines = responseData.split('\n');
|
|
92
|
+
for (const line of lines) {
|
|
93
|
+
if (line.trim() && line.includes('"result"')) {
|
|
94
|
+
const response = JSON.parse(line.trim());
|
|
95
|
+
if (response.result && response.result.capabilities) {
|
|
96
|
+
clearTimeout(timeout);
|
|
97
|
+
child.kill();
|
|
98
|
+
resolve(response.result);
|
|
99
|
+
return;
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
catch {
|
|
105
|
+
// Continue parsing
|
|
106
|
+
}
|
|
107
|
+
});
|
|
108
|
+
child.on('spawn', () => {
|
|
109
|
+
// Send initialize request
|
|
110
|
+
const initRequest = {
|
|
111
|
+
jsonrpc: '2.0',
|
|
112
|
+
id: 1,
|
|
113
|
+
method: 'initialize',
|
|
114
|
+
params: {
|
|
115
|
+
protocolVersion: '2024-11-05',
|
|
116
|
+
capabilities: {},
|
|
117
|
+
clientInfo: {
|
|
118
|
+
name: 'neurolink-cli',
|
|
119
|
+
version: '1.0.0'
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
};
|
|
123
|
+
child.stdin?.write(JSON.stringify(initRequest) + '\n');
|
|
124
|
+
});
|
|
125
|
+
child.on('error', (error) => {
|
|
126
|
+
clearTimeout(timeout);
|
|
127
|
+
reject(error);
|
|
128
|
+
});
|
|
129
|
+
});
|
|
130
|
+
}
|
|
131
|
+
throw new Error('SSE transport not yet implemented for capabilities');
|
|
132
|
+
}
|
|
133
|
+
// List available tools from MCP server
|
|
134
|
+
async function listMCPServerTools(serverConfig) {
|
|
135
|
+
if (serverConfig.transport === 'stdio') {
|
|
136
|
+
const child = spawn(serverConfig.command, serverConfig.args || [], {
|
|
137
|
+
stdio: ['pipe', 'pipe', 'pipe'],
|
|
138
|
+
env: { ...process.env, ...serverConfig.env },
|
|
139
|
+
cwd: serverConfig.cwd
|
|
140
|
+
});
|
|
141
|
+
return new Promise((resolve, reject) => {
|
|
142
|
+
const timeout = setTimeout(() => {
|
|
143
|
+
child.kill();
|
|
144
|
+
reject(new Error('Timeout listing MCP server tools'));
|
|
145
|
+
}, 5000);
|
|
146
|
+
let responseData = '';
|
|
147
|
+
let initialized = false;
|
|
148
|
+
child.stdout?.on('data', (data) => {
|
|
149
|
+
responseData += data.toString();
|
|
150
|
+
try {
|
|
151
|
+
const lines = responseData.split('\n');
|
|
152
|
+
for (const line of lines) {
|
|
153
|
+
if (line.trim() && line.includes('"result"')) {
|
|
154
|
+
const response = JSON.parse(line.trim());
|
|
155
|
+
if (response.id === 1 && response.result.capabilities) {
|
|
156
|
+
// Initialize successful, now list tools
|
|
157
|
+
initialized = true;
|
|
158
|
+
const listToolsRequest = {
|
|
159
|
+
jsonrpc: '2.0',
|
|
160
|
+
id: 2,
|
|
161
|
+
method: 'tools/list',
|
|
162
|
+
params: {}
|
|
163
|
+
};
|
|
164
|
+
child.stdin?.write(JSON.stringify(listToolsRequest) + '\n');
|
|
165
|
+
}
|
|
166
|
+
else if (response.id === 2 && response.result.tools) {
|
|
167
|
+
clearTimeout(timeout);
|
|
168
|
+
child.kill();
|
|
169
|
+
resolve(response.result.tools);
|
|
170
|
+
return;
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
catch {
|
|
176
|
+
// Continue parsing
|
|
177
|
+
}
|
|
178
|
+
});
|
|
179
|
+
child.on('spawn', () => {
|
|
180
|
+
// Send initialize request first
|
|
181
|
+
const initRequest = {
|
|
182
|
+
jsonrpc: '2.0',
|
|
183
|
+
id: 1,
|
|
184
|
+
method: 'initialize',
|
|
185
|
+
params: {
|
|
186
|
+
protocolVersion: '2024-11-05',
|
|
187
|
+
capabilities: {},
|
|
188
|
+
clientInfo: {
|
|
189
|
+
name: 'neurolink-cli',
|
|
190
|
+
version: '1.0.0'
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
};
|
|
194
|
+
child.stdin?.write(JSON.stringify(initRequest) + '\n');
|
|
195
|
+
});
|
|
196
|
+
child.on('error', (error) => {
|
|
197
|
+
clearTimeout(timeout);
|
|
198
|
+
reject(error);
|
|
199
|
+
});
|
|
200
|
+
});
|
|
201
|
+
}
|
|
202
|
+
throw new Error('SSE transport not yet implemented for tool listing');
|
|
203
|
+
}
|
|
204
|
+
// MCP Commands for yargs
|
|
205
|
+
export function addMCPCommands(yargs) {
|
|
206
|
+
return yargs.command('mcp <subcommand>', 'Manage MCP (Model Context Protocol) servers', (yargsBuilder) => {
|
|
207
|
+
yargsBuilder
|
|
208
|
+
.usage('Usage: $0 mcp <subcommand> [options]')
|
|
209
|
+
// List MCP servers
|
|
210
|
+
.command('list', 'List configured MCP servers', (y) => y
|
|
211
|
+
.usage('Usage: $0 mcp list [options]')
|
|
212
|
+
.option('status', { type: 'boolean', description: 'Check server status' })
|
|
213
|
+
.example('$0 mcp list', 'List all MCP servers')
|
|
214
|
+
.example('$0 mcp list --status', 'List servers with status check'), async (argv) => {
|
|
215
|
+
const config = loadMCPConfig();
|
|
216
|
+
const servers = Object.entries(config.mcpServers);
|
|
217
|
+
if (servers.length === 0) {
|
|
218
|
+
console.log(chalk.yellow('📭 No MCP servers configured'));
|
|
219
|
+
console.log(chalk.blue('💡 Add a server with: neurolink mcp add <name> <command>'));
|
|
220
|
+
return;
|
|
221
|
+
}
|
|
222
|
+
console.log(chalk.blue(`📋 Configured MCP servers (${servers.length}):\n`));
|
|
223
|
+
for (const [name, serverConfig] of servers) {
|
|
224
|
+
console.log(chalk.bold(`🔧 ${name}`));
|
|
225
|
+
console.log(` Command: ${serverConfig.command} ${(serverConfig.args || []).join(' ')}`);
|
|
226
|
+
console.log(` Transport: ${serverConfig.transport}`);
|
|
227
|
+
if (argv.status) {
|
|
228
|
+
const spinner = ora(`Checking ${name}...`).start();
|
|
229
|
+
try {
|
|
230
|
+
const isRunning = await checkMCPServerStatus(serverConfig);
|
|
231
|
+
if (isRunning) {
|
|
232
|
+
spinner.succeed(`${name}: ${chalk.green('✅ Available')}`);
|
|
233
|
+
}
|
|
234
|
+
else {
|
|
235
|
+
spinner.fail(`${name}: ${chalk.red('❌ Not available')}`);
|
|
236
|
+
}
|
|
237
|
+
}
|
|
238
|
+
catch (error) {
|
|
239
|
+
spinner.fail(`${name}: ${chalk.red('❌ Error')} - ${error.message}`);
|
|
240
|
+
}
|
|
241
|
+
}
|
|
242
|
+
console.log(); // Empty line
|
|
243
|
+
}
|
|
244
|
+
})
|
|
245
|
+
// Add MCP server
|
|
246
|
+
.command('add <name> <command>', 'Add a new MCP server', (y) => y
|
|
247
|
+
.usage('Usage: $0 mcp add <name> <command> [options]')
|
|
248
|
+
.positional('name', { type: 'string', description: 'Server name', demandOption: true })
|
|
249
|
+
.positional('command', { type: 'string', description: 'Command to run server', demandOption: true })
|
|
250
|
+
.option('args', { type: 'array', description: 'Command arguments' })
|
|
251
|
+
.option('transport', { choices: ['stdio', 'sse'], default: 'stdio', description: 'Transport type' })
|
|
252
|
+
.option('url', { type: 'string', description: 'URL for SSE transport' })
|
|
253
|
+
.option('env', { type: 'string', description: 'Environment variables (JSON)' })
|
|
254
|
+
.option('cwd', { type: 'string', description: 'Working directory' })
|
|
255
|
+
.example('$0 mcp add filesystem "npx @modelcontextprotocol/server-filesystem"', 'Add filesystem server')
|
|
256
|
+
.example('$0 mcp add github "npx @modelcontextprotocol/server-github"', 'Add GitHub server'), async (argv) => {
|
|
257
|
+
const config = loadMCPConfig();
|
|
258
|
+
const serverConfig = {
|
|
259
|
+
name: argv.name,
|
|
260
|
+
command: argv.command,
|
|
261
|
+
args: argv.args || [],
|
|
262
|
+
transport: argv.transport,
|
|
263
|
+
url: argv.url,
|
|
264
|
+
cwd: argv.cwd
|
|
265
|
+
};
|
|
266
|
+
if (argv.env) {
|
|
267
|
+
try {
|
|
268
|
+
serverConfig.env = JSON.parse(argv.env);
|
|
269
|
+
}
|
|
270
|
+
catch (error) {
|
|
271
|
+
console.error(chalk.red('❌ Invalid JSON for environment variables'));
|
|
272
|
+
process.exit(1);
|
|
273
|
+
}
|
|
274
|
+
}
|
|
275
|
+
config.mcpServers[argv.name] = serverConfig;
|
|
276
|
+
saveMCPConfig(config);
|
|
277
|
+
console.log(chalk.green(`✅ Added MCP server: ${argv.name}`));
|
|
278
|
+
console.log(chalk.blue(`💡 Test it with: neurolink mcp test ${argv.name}`));
|
|
279
|
+
})
|
|
280
|
+
// Remove MCP server
|
|
281
|
+
.command('remove <name>', 'Remove an MCP server', (y) => y
|
|
282
|
+
.usage('Usage: $0 mcp remove <name>')
|
|
283
|
+
.positional('name', { type: 'string', description: 'Server name to remove', demandOption: true })
|
|
284
|
+
.example('$0 mcp remove filesystem', 'Remove filesystem server'), async (argv) => {
|
|
285
|
+
const config = loadMCPConfig();
|
|
286
|
+
if (!config.mcpServers[argv.name]) {
|
|
287
|
+
console.error(chalk.red(`❌ MCP server '${argv.name}' not found`));
|
|
288
|
+
process.exit(1);
|
|
289
|
+
}
|
|
290
|
+
delete config.mcpServers[argv.name];
|
|
291
|
+
saveMCPConfig(config);
|
|
292
|
+
console.log(chalk.green(`✅ Removed MCP server: ${argv.name}`));
|
|
293
|
+
})
|
|
294
|
+
// Test MCP server
|
|
295
|
+
.command('test <name>', 'Test connection to an MCP server', (y) => y
|
|
296
|
+
.usage('Usage: $0 mcp test <name>')
|
|
297
|
+
.positional('name', { type: 'string', description: 'Server name to test', demandOption: true })
|
|
298
|
+
.example('$0 mcp test filesystem', 'Test filesystem server'), async (argv) => {
|
|
299
|
+
const config = loadMCPConfig();
|
|
300
|
+
const serverConfig = config.mcpServers[argv.name];
|
|
301
|
+
if (!serverConfig) {
|
|
302
|
+
console.error(chalk.red(`❌ MCP server '${argv.name}' not found`));
|
|
303
|
+
process.exit(1);
|
|
304
|
+
}
|
|
305
|
+
console.log(chalk.blue(`🔍 Testing MCP server: ${argv.name}\n`));
|
|
306
|
+
const spinner = ora('Connecting...').start();
|
|
307
|
+
try {
|
|
308
|
+
// Test basic connectivity
|
|
309
|
+
const isRunning = await checkMCPServerStatus(serverConfig);
|
|
310
|
+
if (!isRunning) {
|
|
311
|
+
spinner.fail(chalk.red('❌ Server not available'));
|
|
312
|
+
return;
|
|
313
|
+
}
|
|
314
|
+
spinner.text = 'Getting capabilities...';
|
|
315
|
+
const capabilities = await getMCPServerCapabilities(serverConfig);
|
|
316
|
+
spinner.text = 'Listing tools...';
|
|
317
|
+
const tools = await listMCPServerTools(serverConfig);
|
|
318
|
+
spinner.succeed(chalk.green('✅ Connection successful!'));
|
|
319
|
+
console.log(chalk.blue('\n📋 Server Capabilities:'));
|
|
320
|
+
console.log(` Protocol Version: ${capabilities.protocolVersion || 'Unknown'}`);
|
|
321
|
+
if (capabilities.capabilities.tools) {
|
|
322
|
+
console.log(` Tools: ✅ Supported`);
|
|
323
|
+
}
|
|
324
|
+
if (capabilities.capabilities.resources) {
|
|
325
|
+
console.log(` Resources: ✅ Supported`);
|
|
326
|
+
}
|
|
327
|
+
console.log(chalk.blue('\n🛠️ Available Tools:'));
|
|
328
|
+
if (tools.length === 0) {
|
|
329
|
+
console.log(' No tools available');
|
|
330
|
+
}
|
|
331
|
+
else {
|
|
332
|
+
tools.forEach((tool) => {
|
|
333
|
+
console.log(` • ${tool.name}: ${tool.description || 'No description'}`);
|
|
334
|
+
});
|
|
335
|
+
}
|
|
336
|
+
}
|
|
337
|
+
catch (error) {
|
|
338
|
+
spinner.fail(chalk.red('❌ Connection failed'));
|
|
339
|
+
console.error(chalk.red(`Error: ${error.message}`));
|
|
340
|
+
}
|
|
341
|
+
})
|
|
342
|
+
// Install popular MCP servers
|
|
343
|
+
.command('install <server>', 'Install popular MCP servers', (y) => y
|
|
344
|
+
.usage('Usage: $0 mcp install <server>')
|
|
345
|
+
.positional('server', {
|
|
346
|
+
type: 'string',
|
|
347
|
+
choices: ['filesystem', 'github', 'postgres', 'brave-search', 'puppeteer'],
|
|
348
|
+
description: 'Server to install',
|
|
349
|
+
demandOption: true
|
|
350
|
+
})
|
|
351
|
+
.example('$0 mcp install filesystem', 'Install filesystem server')
|
|
352
|
+
.example('$0 mcp install github', 'Install GitHub server'), async (argv) => {
|
|
353
|
+
const serverName = argv.server;
|
|
354
|
+
const config = loadMCPConfig();
|
|
355
|
+
// Pre-configured popular MCP servers
|
|
356
|
+
const serverConfigs = {
|
|
357
|
+
filesystem: {
|
|
358
|
+
name: 'filesystem',
|
|
359
|
+
command: 'npx',
|
|
360
|
+
args: ['-y', '@modelcontextprotocol/server-filesystem', '/'],
|
|
361
|
+
transport: 'stdio'
|
|
362
|
+
},
|
|
363
|
+
github: {
|
|
364
|
+
name: 'github',
|
|
365
|
+
command: 'npx',
|
|
366
|
+
args: ['-y', '@modelcontextprotocol/server-github'],
|
|
367
|
+
transport: 'stdio'
|
|
368
|
+
},
|
|
369
|
+
postgres: {
|
|
370
|
+
name: 'postgres',
|
|
371
|
+
command: 'npx',
|
|
372
|
+
args: ['-y', '@modelcontextprotocol/server-postgres'],
|
|
373
|
+
transport: 'stdio'
|
|
374
|
+
},
|
|
375
|
+
'brave-search': {
|
|
376
|
+
name: 'brave-search',
|
|
377
|
+
command: 'npx',
|
|
378
|
+
args: ['-y', '@modelcontextprotocol/server-brave-search'],
|
|
379
|
+
transport: 'stdio'
|
|
380
|
+
},
|
|
381
|
+
puppeteer: {
|
|
382
|
+
name: 'puppeteer',
|
|
383
|
+
command: 'npx',
|
|
384
|
+
args: ['-y', '@modelcontextprotocol/server-puppeteer'],
|
|
385
|
+
transport: 'stdio'
|
|
386
|
+
}
|
|
387
|
+
};
|
|
388
|
+
const serverConfig = serverConfigs[serverName];
|
|
389
|
+
if (!serverConfig) {
|
|
390
|
+
console.error(chalk.red(`❌ Unknown server: ${serverName}`));
|
|
391
|
+
process.exit(1);
|
|
392
|
+
}
|
|
393
|
+
console.log(chalk.blue(`📦 Installing MCP server: ${serverName}`));
|
|
394
|
+
config.mcpServers[serverName] = serverConfig;
|
|
395
|
+
saveMCPConfig(config);
|
|
396
|
+
console.log(chalk.green(`✅ Installed MCP server: ${serverName}`));
|
|
397
|
+
console.log(chalk.blue(`💡 Test it with: neurolink mcp test ${serverName}`));
|
|
398
|
+
})
|
|
399
|
+
// Execute tool from MCP server
|
|
400
|
+
.command('exec <server> <tool>', 'Execute a tool from an MCP server', (y) => y
|
|
401
|
+
.usage('Usage: $0 mcp exec <server> <tool> [options]')
|
|
402
|
+
.positional('server', { type: 'string', description: 'Server name', demandOption: true })
|
|
403
|
+
.positional('tool', { type: 'string', description: 'Tool name', demandOption: true })
|
|
404
|
+
.option('params', { type: 'string', description: 'Tool parameters (JSON)' })
|
|
405
|
+
.example('$0 mcp exec filesystem read_file --params \'{"path": "README.md"}\'', 'Read file using filesystem server'), async (argv) => {
|
|
406
|
+
const config = loadMCPConfig();
|
|
407
|
+
const serverConfig = config.mcpServers[argv.server];
|
|
408
|
+
if (!serverConfig) {
|
|
409
|
+
console.error(chalk.red(`❌ MCP server '${argv.server}' not found`));
|
|
410
|
+
process.exit(1);
|
|
411
|
+
}
|
|
412
|
+
let params = {};
|
|
413
|
+
if (argv.params) {
|
|
414
|
+
try {
|
|
415
|
+
params = JSON.parse(argv.params);
|
|
416
|
+
}
|
|
417
|
+
catch (error) {
|
|
418
|
+
console.error(chalk.red('❌ Invalid JSON for parameters'));
|
|
419
|
+
process.exit(1);
|
|
420
|
+
}
|
|
421
|
+
}
|
|
422
|
+
console.log(chalk.blue(`🔧 Executing tool: ${argv.tool} on server: ${argv.server}`));
|
|
423
|
+
// This would need full MCP client implementation
|
|
424
|
+
// For now, show what would happen
|
|
425
|
+
console.log(chalk.yellow('⚠️ Tool execution not yet implemented'));
|
|
426
|
+
console.log(`Tool: ${argv.tool}`);
|
|
427
|
+
console.log(`Parameters: ${JSON.stringify(params, null, 2)}`);
|
|
428
|
+
})
|
|
429
|
+
.demandCommand(1, 'Please specify an MCP subcommand')
|
|
430
|
+
.example('$0 mcp list', 'List configured MCP servers')
|
|
431
|
+
.example('$0 mcp install filesystem', 'Install filesystem MCP server')
|
|
432
|
+
.example('$0 mcp test filesystem', 'Test filesystem server connection');
|
|
433
|
+
});
|
|
434
|
+
}
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
/**
|
|
3
|
+
* NeuroLink CLI - Enhanced Simplified Approach
|
|
4
|
+
*
|
|
5
|
+
* Professional CLI experience with minimal maintenance overhead.
|
|
6
|
+
* Features: Spinners, colors, batch processing, provider testing, rich help
|
|
7
|
+
* Implementation: ~300 lines using simple JS utility functions
|
|
8
|
+
*/
|
|
9
|
+
export {};
|
package/dist/cli/index.js
CHANGED
|
@@ -14,6 +14,7 @@ import chalk from 'chalk';
|
|
|
14
14
|
import fs from 'fs';
|
|
15
15
|
import { fileURLToPath } from 'url';
|
|
16
16
|
import { dirname } from 'path';
|
|
17
|
+
import { addMCPCommands } from './commands/mcp.js';
|
|
17
18
|
// Load environment variables from .env file
|
|
18
19
|
try {
|
|
19
20
|
// Try to import and configure dotenv
|
|
@@ -98,6 +99,8 @@ function handleError(error, context) {
|
|
|
98
99
|
console.error(chalk.yellow('💡 Set API key: export OPENAI_API_KEY=sk-...'));
|
|
99
100
|
console.error(chalk.yellow('💡 Or set AWS credentials & region: export AWS_ACCESS_KEY_ID=... AWS_SECRET_ACCESS_KEY=... AWS_REGION=us-east-1'));
|
|
100
101
|
console.error(chalk.yellow('💡 Or set Google credentials: export GOOGLE_APPLICATION_CREDENTIALS=/path/to/key.json'));
|
|
102
|
+
console.error(chalk.yellow('💡 Or set Anthropic API key: export ANTHROPIC_API_KEY=sk-ant-...'));
|
|
103
|
+
console.error(chalk.yellow('💡 Or set Azure OpenAI credentials: export AZURE_OPENAI_API_KEY=... AZURE_OPENAI_ENDPOINT=...'));
|
|
101
104
|
}
|
|
102
105
|
if (error.message.toLowerCase().includes('rate limit')) {
|
|
103
106
|
console.error(chalk.yellow('💡 Try again in a few moments or use --provider vertex'));
|
|
@@ -140,9 +143,9 @@ const cli = yargs(args)
|
|
|
140
143
|
.alias('V', 'version')
|
|
141
144
|
.strictOptions()
|
|
142
145
|
.strictCommands()
|
|
143
|
-
.demandCommand(1, '
|
|
146
|
+
.demandCommand(1, '')
|
|
144
147
|
.epilogue('For more info: https://github.com/juspay/neurolink')
|
|
145
|
-
.showHelpOnFail(
|
|
148
|
+
.showHelpOnFail(true, 'Specify --help for available options')
|
|
146
149
|
.middleware((argv) => {
|
|
147
150
|
// Middleware for NEUROLINK_QUIET is fine
|
|
148
151
|
if (process.env.NEUROLINK_QUIET === 'true' && typeof argv.quiet === 'undefined') {
|
|
@@ -209,7 +212,7 @@ const cli = yargs(args)
|
|
|
209
212
|
demandOption: true,
|
|
210
213
|
})
|
|
211
214
|
.option('provider', {
|
|
212
|
-
choices: ['auto', 'openai', 'bedrock', 'vertex', 'anthropic', 'azure'],
|
|
215
|
+
choices: ['auto', 'openai', 'bedrock', 'vertex', 'anthropic', 'azure', 'google-ai'],
|
|
213
216
|
default: 'auto',
|
|
214
217
|
description: 'AI provider to use (auto-selects best available)'
|
|
215
218
|
})
|
|
@@ -263,6 +266,8 @@ const cli = yargs(args)
|
|
|
263
266
|
if (result.usage)
|
|
264
267
|
console.log(chalk.blue(`ℹ️ ${result.usage.totalTokens} tokens used`));
|
|
265
268
|
}
|
|
269
|
+
// Explicitly exit to prevent hanging, especially with Google AI Studio
|
|
270
|
+
process.exit(0);
|
|
266
271
|
}
|
|
267
272
|
catch (error) {
|
|
268
273
|
if (argv.format === 'json' && originalConsole.log) {
|
|
@@ -283,7 +288,7 @@ const cli = yargs(args)
|
|
|
283
288
|
.command('stream <prompt>', 'Stream text generation in real-time', (yargsInstance) => yargsInstance
|
|
284
289
|
.usage('Usage: $0 stream <prompt> [options]')
|
|
285
290
|
.positional('prompt', { type: 'string', description: 'Text prompt for streaming', demandOption: true })
|
|
286
|
-
.option('provider', { choices: ['auto', 'openai', 'bedrock', 'vertex', 'anthropic', 'azure'], default: 'auto', description: 'AI provider to use' })
|
|
291
|
+
.option('provider', { choices: ['auto', 'openai', 'bedrock', 'vertex', 'anthropic', 'azure', 'google-ai'], default: 'auto', description: 'AI provider to use' })
|
|
287
292
|
.option('temperature', { type: 'number', default: 0.7, description: 'Creativity level' })
|
|
288
293
|
.example('$0 stream "Tell me a story"', 'Stream a story in real-time'), async (argv) => {
|
|
289
294
|
if (!argv.quiet)
|
|
@@ -310,7 +315,7 @@ const cli = yargs(args)
|
|
|
310
315
|
.positional('file', { type: 'string', description: 'File with prompts (one per line)', demandOption: true })
|
|
311
316
|
.option('output', { type: 'string', description: 'Output file for results (default: stdout)' })
|
|
312
317
|
.option('delay', { type: 'number', default: 1000, description: 'Delay between requests in milliseconds' })
|
|
313
|
-
.option('provider', { choices: ['auto', 'openai', 'bedrock', 'vertex', 'anthropic', 'azure'], default: 'auto', description: 'AI provider to use' })
|
|
318
|
+
.option('provider', { choices: ['auto', 'openai', 'bedrock', 'vertex', 'anthropic', 'azure', 'google-ai'], default: 'auto', description: 'AI provider to use' })
|
|
314
319
|
.option('timeout', { type: 'number', default: 30000, description: 'Timeout for each request in milliseconds' })
|
|
315
320
|
.option('temperature', { type: 'number', description: 'Global temperature for batch jobs' })
|
|
316
321
|
.option('max-tokens', { type: 'number', description: 'Global max tokens for batch jobs' })
|
|
@@ -390,7 +395,7 @@ const cli = yargs(args)
|
|
|
390
395
|
const spinner = argv.quiet ? null : ora('🔍 Checking AI provider status...\n').start();
|
|
391
396
|
// Middleware sets argv.verbose if NEUROLINK_DEBUG is true and --verbose is not specified
|
|
392
397
|
// Removed the spinner.stopAndPersist logic from here as it's handled before spinner start
|
|
393
|
-
const providers = ['openai', 'bedrock', 'vertex', 'anthropic', 'azure'];
|
|
398
|
+
const providers = ['openai', 'bedrock', 'vertex', 'anthropic', 'azure', 'google-ai'];
|
|
394
399
|
const results = [];
|
|
395
400
|
for (const p of providers) {
|
|
396
401
|
if (spinner)
|
|
@@ -424,13 +429,13 @@ const cli = yargs(args)
|
|
|
424
429
|
}
|
|
425
430
|
})
|
|
426
431
|
.command('list', 'List available AI providers', (y) => y.usage('Usage: $0 provider list'), async () => {
|
|
427
|
-
console.log('Available providers: openai, bedrock, vertex, anthropic, azure');
|
|
432
|
+
console.log('Available providers: openai, bedrock, vertex, anthropic, azure, google-ai');
|
|
428
433
|
})
|
|
429
434
|
.command('configure <providerName>', 'Display configuration guidance for a provider', (y) => y
|
|
430
435
|
.usage('Usage: $0 provider configure <providerName>')
|
|
431
436
|
.positional('providerName', {
|
|
432
437
|
type: 'string',
|
|
433
|
-
choices: ['openai', 'bedrock', 'vertex', 'anthropic', 'azure'],
|
|
438
|
+
choices: ['openai', 'bedrock', 'vertex', 'anthropic', 'azure', 'google-ai'],
|
|
434
439
|
description: 'Name of the provider to configure',
|
|
435
440
|
demandOption: true,
|
|
436
441
|
})
|
|
@@ -462,7 +467,7 @@ const cli = yargs(args)
|
|
|
462
467
|
const spinner = argv.quiet ? null : ora('🔍 Checking AI provider status...\n').start();
|
|
463
468
|
// Middleware sets argv.verbose if NEUROLINK_DEBUG is true and --verbose is not specified
|
|
464
469
|
// Removed the spinner.stopAndPersist logic from here as it's handled before spinner start
|
|
465
|
-
const providers = ['openai', 'bedrock', 'vertex', 'anthropic', 'azure'];
|
|
470
|
+
const providers = ['openai', 'bedrock', 'vertex', 'anthropic', 'azure', 'google-ai'];
|
|
466
471
|
const results = [];
|
|
467
472
|
for (const p of providers) {
|
|
468
473
|
if (spinner)
|
|
@@ -564,6 +569,8 @@ const cli = yargs(args)
|
|
|
564
569
|
}
|
|
565
570
|
})
|
|
566
571
|
.completion('completion', 'Generate shell completion script');
|
|
572
|
+
// Add MCP commands
|
|
573
|
+
addMCPCommands(cli);
|
|
567
574
|
// Use an async IIFE to allow top-level await for parseAsync
|
|
568
575
|
(async () => {
|
|
569
576
|
try {
|
package/dist/core/factory.js
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { GoogleVertexAI, AmazonBedrock, OpenAI, AnthropicProvider, AzureOpenAIProvider } from '../providers/index.js';
|
|
1
|
+
import { GoogleVertexAI, AmazonBedrock, OpenAI, AnthropicProvider, AzureOpenAIProvider, GoogleAIStudio } from '../providers/index.js';
|
|
2
2
|
import { getBestProvider } from '../utils/providerUtils.js';
|
|
3
3
|
const componentIdentifier = 'aiProviderFactory';
|
|
4
4
|
/**
|
|
@@ -42,8 +42,12 @@ export class AIProviderFactory {
|
|
|
42
42
|
case 'azure-openai':
|
|
43
43
|
provider = new AzureOpenAIProvider();
|
|
44
44
|
break;
|
|
45
|
+
case 'google-ai':
|
|
46
|
+
case 'google-studio':
|
|
47
|
+
provider = new GoogleAIStudio(modelName);
|
|
48
|
+
break;
|
|
45
49
|
default:
|
|
46
|
-
throw new Error(`Unknown provider: ${providerName}. Supported providers: vertex, bedrock, openai, anthropic, azure`);
|
|
50
|
+
throw new Error(`Unknown provider: ${providerName}. Supported providers: vertex, bedrock, openai, anthropic, azure, google-ai`);
|
|
47
51
|
}
|
|
48
52
|
console.log(`[${functionTag}] Provider creation succeeded`, {
|
|
49
53
|
providerName,
|
package/dist/core/types.d.ts
CHANGED
|
@@ -8,7 +8,8 @@ export declare enum AIProviderName {
|
|
|
8
8
|
OPENAI = "openai",
|
|
9
9
|
VERTEX = "vertex",
|
|
10
10
|
ANTHROPIC = "anthropic",
|
|
11
|
-
AZURE = "azure"
|
|
11
|
+
AZURE = "azure",
|
|
12
|
+
GOOGLE_AI = "google-ai"
|
|
12
13
|
}
|
|
13
14
|
/**
|
|
14
15
|
* Supported Models for Amazon Bedrock
|
|
@@ -36,10 +37,19 @@ export declare enum VertexModels {
|
|
|
36
37
|
CLAUDE_4_0_SONNET = "claude-sonnet-4@20250514",
|
|
37
38
|
GEMINI_2_5_FLASH = "gemini-2.5-flash-preview-05-20"
|
|
38
39
|
}
|
|
40
|
+
/**
|
|
41
|
+
* Supported Models for Google AI Studio
|
|
42
|
+
*/
|
|
43
|
+
export declare enum GoogleAIModels {
|
|
44
|
+
GEMINI_1_5_PRO_LATEST = "gemini-1.5-pro-latest",
|
|
45
|
+
GEMINI_1_5_FLASH_LATEST = "gemini-1.5-flash-latest",
|
|
46
|
+
GEMINI_2_0_FLASH_EXP = "gemini-2.0-flash-exp",
|
|
47
|
+
GEMINI_1_0_PRO = "gemini-1.0-pro"
|
|
48
|
+
}
|
|
39
49
|
/**
|
|
40
50
|
* Union type of all supported model names
|
|
41
51
|
*/
|
|
42
|
-
export type SupportedModelName = BedrockModels | OpenAIModels | VertexModels;
|
|
52
|
+
export type SupportedModelName = BedrockModels | OpenAIModels | VertexModels | GoogleAIModels;
|
|
43
53
|
/**
|
|
44
54
|
* Provider configuration specifying provider and its available models
|
|
45
55
|
*/
|
package/dist/core/types.js
CHANGED
|
@@ -8,6 +8,7 @@ export var AIProviderName;
|
|
|
8
8
|
AIProviderName["VERTEX"] = "vertex";
|
|
9
9
|
AIProviderName["ANTHROPIC"] = "anthropic";
|
|
10
10
|
AIProviderName["AZURE"] = "azure";
|
|
11
|
+
AIProviderName["GOOGLE_AI"] = "google-ai";
|
|
11
12
|
})(AIProviderName || (AIProviderName = {}));
|
|
12
13
|
/**
|
|
13
14
|
* Supported Models for Amazon Bedrock
|
|
@@ -38,6 +39,16 @@ export var VertexModels;
|
|
|
38
39
|
VertexModels["CLAUDE_4_0_SONNET"] = "claude-sonnet-4@20250514";
|
|
39
40
|
VertexModels["GEMINI_2_5_FLASH"] = "gemini-2.5-flash-preview-05-20";
|
|
40
41
|
})(VertexModels || (VertexModels = {}));
|
|
42
|
+
/**
|
|
43
|
+
* Supported Models for Google AI Studio
|
|
44
|
+
*/
|
|
45
|
+
export var GoogleAIModels;
|
|
46
|
+
(function (GoogleAIModels) {
|
|
47
|
+
GoogleAIModels["GEMINI_1_5_PRO_LATEST"] = "gemini-1.5-pro-latest";
|
|
48
|
+
GoogleAIModels["GEMINI_1_5_FLASH_LATEST"] = "gemini-1.5-flash-latest";
|
|
49
|
+
GoogleAIModels["GEMINI_2_0_FLASH_EXP"] = "gemini-2.0-flash-exp";
|
|
50
|
+
GoogleAIModels["GEMINI_1_0_PRO"] = "gemini-1.0-pro";
|
|
51
|
+
})(GoogleAIModels || (GoogleAIModels = {}));
|
|
41
52
|
/**
|
|
42
53
|
* Default provider configurations
|
|
43
54
|
*/
|