@juspay/neurolink 3.0.0 → 3.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +7 -0
- package/dist/cli/index.js +12 -2
- package/dist/lib/neurolink.d.ts +2 -0
- package/dist/lib/neurolink.js +5 -3
- package/dist/neurolink.d.ts +2 -0
- package/dist/neurolink.js +5 -3
- package/package.json +1 -1
package/CHANGELOG.md
CHANGED
|
@@ -1,3 +1,10 @@
|
|
|
1
|
+
## [3.0.1](https://github.com/juspay/neurolink/compare/v3.0.0...v3.0.1) (2025-07-01)
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
### Bug Fixes
|
|
5
|
+
|
|
6
|
+
* **cli:** honor --model parameter in CLI commands ([467ea85](https://github.com/juspay/neurolink/commit/467ea8548688a9db6046c98dbfd268ecd297605c))
|
|
7
|
+
|
|
1
8
|
# [3.0.0](https://github.com/juspay/neurolink/compare/v2.1.0...v3.0.0) (2025-07-01)
|
|
2
9
|
|
|
3
10
|
|
package/dist/cli/index.js
CHANGED
|
@@ -256,6 +256,10 @@ const cli = yargs(args)
|
|
|
256
256
|
type: "string",
|
|
257
257
|
default: "30s",
|
|
258
258
|
description: "Timeout for the request (e.g., 30s, 2m, 1h, 5000)",
|
|
259
|
+
})
|
|
260
|
+
.option("model", {
|
|
261
|
+
type: "string",
|
|
262
|
+
description: "Specific model to use (e.g. gemini-2.5-pro, gemini-2.5-flash)",
|
|
259
263
|
})
|
|
260
264
|
.option("disable-tools", {
|
|
261
265
|
type: "boolean",
|
|
@@ -296,6 +300,7 @@ const cli = yargs(args)
|
|
|
296
300
|
provider: argv.provider === "auto"
|
|
297
301
|
? undefined
|
|
298
302
|
: argv.provider,
|
|
303
|
+
model: argv.model,
|
|
299
304
|
temperature: argv.temperature,
|
|
300
305
|
maxTokens: argv.maxTokens,
|
|
301
306
|
systemPrompt: argv.system,
|
|
@@ -318,7 +323,7 @@ const cli = yargs(args)
|
|
|
318
323
|
})();
|
|
319
324
|
const agentProvider = new AgentEnhancedProvider({
|
|
320
325
|
provider: supportedProvider,
|
|
321
|
-
model:
|
|
326
|
+
model: argv.model, // Use specified model or default
|
|
322
327
|
toolCategory: "all", // Enable all tool categories
|
|
323
328
|
});
|
|
324
329
|
generatePromise = agentProvider.generateText(argv.prompt);
|
|
@@ -450,6 +455,10 @@ const cli = yargs(args)
|
|
|
450
455
|
type: "string",
|
|
451
456
|
default: "2m",
|
|
452
457
|
description: "Timeout for streaming (e.g., 30s, 2m, 1h)",
|
|
458
|
+
})
|
|
459
|
+
.option("model", {
|
|
460
|
+
type: "string",
|
|
461
|
+
description: "Specific model to use (e.g., gemini-1.5-pro-latest, gemini-2.0-flash-exp)",
|
|
453
462
|
})
|
|
454
463
|
.option("debug", {
|
|
455
464
|
type: "boolean",
|
|
@@ -481,6 +490,7 @@ const cli = yargs(args)
|
|
|
481
490
|
provider: argv.provider === "auto"
|
|
482
491
|
? undefined
|
|
483
492
|
: argv.provider,
|
|
493
|
+
model: argv.model,
|
|
484
494
|
temperature: argv.temperature,
|
|
485
495
|
timeout: argv.timeout,
|
|
486
496
|
});
|
|
@@ -501,7 +511,7 @@ const cli = yargs(args)
|
|
|
501
511
|
})();
|
|
502
512
|
const agentProvider = new AgentEnhancedProvider({
|
|
503
513
|
provider: supportedProvider,
|
|
504
|
-
model:
|
|
514
|
+
model: argv.model, // Use specified model or default
|
|
505
515
|
toolCategory: "all", // Enable all tool categories
|
|
506
516
|
});
|
|
507
517
|
// Note: AgentEnhancedProvider doesn't support streaming with tools yet
|
package/dist/lib/neurolink.d.ts
CHANGED
|
@@ -8,6 +8,7 @@ import type { AIProviderName } from "./core/types.js";
|
|
|
8
8
|
export interface TextGenerationOptions {
|
|
9
9
|
prompt: string;
|
|
10
10
|
provider?: "openai" | "bedrock" | "vertex" | "anthropic" | "azure" | "google-ai" | "huggingface" | "ollama" | "mistral" | "auto";
|
|
11
|
+
model?: string;
|
|
11
12
|
temperature?: number;
|
|
12
13
|
maxTokens?: number;
|
|
13
14
|
systemPrompt?: string;
|
|
@@ -18,6 +19,7 @@ export interface TextGenerationOptions {
|
|
|
18
19
|
export interface StreamTextOptions {
|
|
19
20
|
prompt: string;
|
|
20
21
|
provider?: "openai" | "bedrock" | "vertex" | "anthropic" | "azure" | "google-ai" | "huggingface" | "ollama" | "mistral" | "auto";
|
|
22
|
+
model?: string;
|
|
21
23
|
temperature?: number;
|
|
22
24
|
maxTokens?: number;
|
|
23
25
|
systemPrompt?: string;
|
package/dist/lib/neurolink.js
CHANGED
|
@@ -127,7 +127,7 @@ export class NeuroLink {
|
|
|
127
127
|
// Create tool-aware system prompt
|
|
128
128
|
const enhancedSystemPrompt = this.createToolAwareSystemPrompt(options.systemPrompt, availableTools);
|
|
129
129
|
// Create provider with MCP enabled using best provider function
|
|
130
|
-
const provider = await AIProviderFactory.createBestProvider(providerName,
|
|
130
|
+
const provider = await AIProviderFactory.createBestProvider(providerName, options.model, true);
|
|
131
131
|
// Generate text with automatic tool detection
|
|
132
132
|
const result = await provider.generateText({
|
|
133
133
|
prompt: options.prompt,
|
|
@@ -217,9 +217,10 @@ export class NeuroLink {
|
|
|
217
217
|
logger.debug(`[${functionTag}] Attempting provider`, {
|
|
218
218
|
provider: providerName,
|
|
219
219
|
});
|
|
220
|
-
const provider = await AIProviderFactory.createProvider(providerName);
|
|
220
|
+
const provider = await AIProviderFactory.createProvider(providerName, options.model);
|
|
221
221
|
const result = await provider.generateText({
|
|
222
222
|
prompt: options.prompt,
|
|
223
|
+
model: options.model,
|
|
223
224
|
temperature: options.temperature,
|
|
224
225
|
maxTokens: options.maxTokens,
|
|
225
226
|
systemPrompt: options.systemPrompt,
|
|
@@ -343,9 +344,10 @@ Note: Tool integration is currently in development. Please provide helpful respo
|
|
|
343
344
|
logger.debug(`[${functionTag}] Attempting provider`, {
|
|
344
345
|
provider: providerName,
|
|
345
346
|
});
|
|
346
|
-
const provider = await AIProviderFactory.createProvider(providerName);
|
|
347
|
+
const provider = await AIProviderFactory.createProvider(providerName, options.model);
|
|
347
348
|
const result = await provider.streamText({
|
|
348
349
|
prompt: options.prompt,
|
|
350
|
+
model: options.model,
|
|
349
351
|
temperature: options.temperature,
|
|
350
352
|
maxTokens: options.maxTokens,
|
|
351
353
|
systemPrompt: options.systemPrompt,
|
package/dist/neurolink.d.ts
CHANGED
|
@@ -8,6 +8,7 @@ import type { AIProviderName } from "./core/types.js";
|
|
|
8
8
|
export interface TextGenerationOptions {
|
|
9
9
|
prompt: string;
|
|
10
10
|
provider?: "openai" | "bedrock" | "vertex" | "anthropic" | "azure" | "google-ai" | "huggingface" | "ollama" | "mistral" | "auto";
|
|
11
|
+
model?: string;
|
|
11
12
|
temperature?: number;
|
|
12
13
|
maxTokens?: number;
|
|
13
14
|
systemPrompt?: string;
|
|
@@ -18,6 +19,7 @@ export interface TextGenerationOptions {
|
|
|
18
19
|
export interface StreamTextOptions {
|
|
19
20
|
prompt: string;
|
|
20
21
|
provider?: "openai" | "bedrock" | "vertex" | "anthropic" | "azure" | "google-ai" | "huggingface" | "ollama" | "mistral" | "auto";
|
|
22
|
+
model?: string;
|
|
21
23
|
temperature?: number;
|
|
22
24
|
maxTokens?: number;
|
|
23
25
|
systemPrompt?: string;
|
package/dist/neurolink.js
CHANGED
|
@@ -127,7 +127,7 @@ export class NeuroLink {
|
|
|
127
127
|
// Create tool-aware system prompt
|
|
128
128
|
const enhancedSystemPrompt = this.createToolAwareSystemPrompt(options.systemPrompt, availableTools);
|
|
129
129
|
// Create provider with MCP enabled using best provider function
|
|
130
|
-
const provider = await AIProviderFactory.createBestProvider(providerName,
|
|
130
|
+
const provider = await AIProviderFactory.createBestProvider(providerName, options.model, true);
|
|
131
131
|
// Generate text with automatic tool detection
|
|
132
132
|
const result = await provider.generateText({
|
|
133
133
|
prompt: options.prompt,
|
|
@@ -217,9 +217,10 @@ export class NeuroLink {
|
|
|
217
217
|
logger.debug(`[${functionTag}] Attempting provider`, {
|
|
218
218
|
provider: providerName,
|
|
219
219
|
});
|
|
220
|
-
const provider = await AIProviderFactory.createProvider(providerName);
|
|
220
|
+
const provider = await AIProviderFactory.createProvider(providerName, options.model);
|
|
221
221
|
const result = await provider.generateText({
|
|
222
222
|
prompt: options.prompt,
|
|
223
|
+
model: options.model,
|
|
223
224
|
temperature: options.temperature,
|
|
224
225
|
maxTokens: options.maxTokens,
|
|
225
226
|
systemPrompt: options.systemPrompt,
|
|
@@ -343,9 +344,10 @@ Note: Tool integration is currently in development. Please provide helpful respo
|
|
|
343
344
|
logger.debug(`[${functionTag}] Attempting provider`, {
|
|
344
345
|
provider: providerName,
|
|
345
346
|
});
|
|
346
|
-
const provider = await AIProviderFactory.createProvider(providerName);
|
|
347
|
+
const provider = await AIProviderFactory.createProvider(providerName, options.model);
|
|
347
348
|
const result = await provider.streamText({
|
|
348
349
|
prompt: options.prompt,
|
|
350
|
+
model: options.model,
|
|
349
351
|
temperature: options.temperature,
|
|
350
352
|
maxTokens: options.maxTokens,
|
|
351
353
|
systemPrompt: options.systemPrompt,
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@juspay/neurolink",
|
|
3
|
-
"version": "3.0.
|
|
3
|
+
"version": "3.0.1",
|
|
4
4
|
"description": "Universal AI Development Platform with working MCP integration, multi-provider support, and professional CLI. Built-in tools operational, 58+ external MCP servers discoverable. Connect to filesystem, GitHub, database operations, and more. Build, test, and deploy AI applications with 9 major providers: OpenAI, Anthropic, Google AI, AWS Bedrock, Azure, Hugging Face, Ollama, and Mistral AI.",
|
|
5
5
|
"author": {
|
|
6
6
|
"name": "Juspay Technologies",
|