@juspay/neurolink 7.22.0 → 7.23.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +6 -0
- package/README.md +5 -47
- package/package.json +1 -1
package/CHANGELOG.md
CHANGED
|
@@ -1,3 +1,9 @@
|
|
|
1
|
+
## [7.23.0](https://github.com/juspay/neurolink/compare/v7.22.0...v7.23.0) (2025-08-19)
|
|
2
|
+
|
|
3
|
+
### Features
|
|
4
|
+
|
|
5
|
+
- **(docs):** modernize api examples ([c77706b](https://github.com/juspay/neurolink/commit/c77706b427c2ea781269c6d0c2dc7ca2511128cb))
|
|
6
|
+
|
|
1
7
|
## [7.22.0](https://github.com/juspay/neurolink/compare/v7.21.0...v7.22.0) (2025-08-19)
|
|
2
8
|
|
|
3
9
|
### Features
|
package/README.md
CHANGED
|
@@ -146,32 +146,7 @@ const { NeuroLink } = require('@juspay/neurolink');
|
|
|
146
146
|
### Basic Usage
|
|
147
147
|
|
|
148
148
|
```typescript
|
|
149
|
-
import { NeuroLink
|
|
150
|
-
|
|
151
|
-
// LiteLLM - Access 100+ models through unified interface
|
|
152
|
-
const litellmProvider = await AIProviderFactory.createProvider(
|
|
153
|
-
"litellm",
|
|
154
|
-
"openai/gpt-4o",
|
|
155
|
-
);
|
|
156
|
-
const result = await litellmProvider.generate({
|
|
157
|
-
input: { text: "Write a haiku about programming" },
|
|
158
|
-
});
|
|
159
|
-
|
|
160
|
-
// Compare multiple models simultaneously
|
|
161
|
-
const models = [
|
|
162
|
-
"openai/gpt-4o",
|
|
163
|
-
"anthropic/claude-3-5-sonnet",
|
|
164
|
-
"google/gemini-2.0-flash",
|
|
165
|
-
];
|
|
166
|
-
const comparisons = await Promise.all(
|
|
167
|
-
models.map(async (model) => {
|
|
168
|
-
const provider = await AIProviderFactory.createProvider("litellm", model);
|
|
169
|
-
const result = await provider.generate({
|
|
170
|
-
input: { text: "Explain quantum computing" },
|
|
171
|
-
});
|
|
172
|
-
return { model, response: result.content, provider: result.provider };
|
|
173
|
-
}),
|
|
174
|
-
);
|
|
149
|
+
import { NeuroLink } from "@juspay/neurolink";
|
|
175
150
|
|
|
176
151
|
// Auto-select best available provider
|
|
177
152
|
const neurolink = new NeuroLink();
|
|
@@ -181,8 +156,8 @@ const autoResult = await neurolink.generate({
|
|
|
181
156
|
timeout: "30s",
|
|
182
157
|
});
|
|
183
158
|
|
|
184
|
-
console.log(
|
|
185
|
-
console.log(`Used: ${
|
|
159
|
+
console.log(autoResult.content);
|
|
160
|
+
console.log(`Used: ${autoResult.provider}`);
|
|
186
161
|
```
|
|
187
162
|
|
|
188
163
|
### Conversation Memory
|
|
@@ -241,27 +216,10 @@ npx @juspay/neurolink generate "Write a proposal" --enable-analytics --enable-ev
|
|
|
241
216
|
npx @juspay/neurolink stream "What time is it and write a file with the current date"
|
|
242
217
|
```
|
|
243
218
|
|
|
244
|
-
#### SDK
|
|
219
|
+
#### SDK and Enhancement Features
|
|
245
220
|
|
|
246
221
|
```typescript
|
|
247
|
-
import { NeuroLink
|
|
248
|
-
|
|
249
|
-
// LiteLLM multi-model comparison
|
|
250
|
-
const models = [
|
|
251
|
-
"openai/gpt-4o",
|
|
252
|
-
"anthropic/claude-3-5-sonnet",
|
|
253
|
-
"google/gemini-2.0-flash",
|
|
254
|
-
];
|
|
255
|
-
const comparisons = await Promise.all(
|
|
256
|
-
models.map(async (model) => {
|
|
257
|
-
const provider = await AIProviderFactory.createProvider("litellm", model);
|
|
258
|
-
return await provider.generate({
|
|
259
|
-
input: { text: "Explain the benefits of renewable energy" },
|
|
260
|
-
enableAnalytics: true,
|
|
261
|
-
enableEvaluation: true,
|
|
262
|
-
});
|
|
263
|
-
}),
|
|
264
|
-
);
|
|
222
|
+
import { NeuroLink } from "@juspay/neurolink";
|
|
265
223
|
|
|
266
224
|
// Enhanced generation with analytics
|
|
267
225
|
const neurolink = new NeuroLink();
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@juspay/neurolink",
|
|
3
|
-
"version": "7.
|
|
3
|
+
"version": "7.23.0",
|
|
4
4
|
"description": "Universal AI Development Platform with working MCP integration, multi-provider support, and professional CLI. Built-in tools operational, 58+ external MCP servers discoverable. Connect to filesystem, GitHub, database operations, and more. Build, test, and deploy AI applications with 9 major providers: OpenAI, Anthropic, Google AI, AWS Bedrock, Azure, Hugging Face, Ollama, and Mistral AI.",
|
|
5
5
|
"author": {
|
|
6
6
|
"name": "Juspay Technologies",
|