@juspay/neurolink 7.11.0 → 7.12.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +12 -0
- package/README.md +15 -0
- package/dist/config/conversationMemoryConfig.d.ts +27 -0
- package/dist/config/conversationMemoryConfig.js +39 -0
- package/dist/core/baseProvider.js +4 -2
- package/dist/core/conversationMemoryManager.d.ts +41 -0
- package/dist/core/conversationMemoryManager.js +152 -0
- package/dist/core/types.d.ts +2 -0
- package/dist/lib/config/conversationMemoryConfig.d.ts +27 -0
- package/dist/lib/config/conversationMemoryConfig.js +39 -0
- package/dist/lib/core/baseProvider.js +4 -2
- package/dist/lib/core/conversationMemoryManager.d.ts +41 -0
- package/dist/lib/core/conversationMemoryManager.js +152 -0
- package/dist/lib/core/types.d.ts +2 -0
- package/dist/lib/neurolink.d.ts +22 -4
- package/dist/lib/neurolink.js +67 -5
- package/dist/lib/providers/amazonBedrock.js +4 -2
- package/dist/lib/providers/anthropic.js +4 -2
- package/dist/lib/providers/azureOpenai.js +4 -2
- package/dist/lib/providers/googleAiStudio.js +4 -2
- package/dist/lib/providers/googleVertex.js +4 -2
- package/dist/lib/providers/huggingFace.js +4 -2
- package/dist/lib/providers/litellm.js +4 -2
- package/dist/lib/providers/mistral.js +3 -2
- package/dist/lib/providers/openAI.js +4 -2
- package/dist/lib/types/conversationTypes.d.ts +95 -0
- package/dist/lib/types/conversationTypes.js +17 -0
- package/dist/lib/types/streamTypes.d.ts +2 -0
- package/dist/lib/utils/conversationMemoryUtils.d.ts +22 -0
- package/dist/lib/utils/conversationMemoryUtils.js +77 -0
- package/dist/lib/utils/messageBuilder.d.ts +13 -0
- package/dist/lib/utils/messageBuilder.js +48 -0
- package/dist/neurolink.d.ts +22 -4
- package/dist/neurolink.js +67 -5
- package/dist/providers/amazonBedrock.js +4 -2
- package/dist/providers/anthropic.js +4 -2
- package/dist/providers/azureOpenai.js +4 -2
- package/dist/providers/googleAiStudio.js +4 -2
- package/dist/providers/googleVertex.js +4 -2
- package/dist/providers/huggingFace.js +4 -2
- package/dist/providers/litellm.js +4 -2
- package/dist/providers/mistral.js +3 -2
- package/dist/providers/openAI.js +4 -2
- package/dist/types/conversationTypes.d.ts +95 -0
- package/dist/types/conversationTypes.js +17 -0
- package/dist/types/streamTypes.d.ts +2 -0
- package/dist/utils/conversationMemoryUtils.d.ts +22 -0
- package/dist/utils/conversationMemoryUtils.js +77 -0
- package/dist/utils/messageBuilder.d.ts +13 -0
- package/dist/utils/messageBuilder.js +48 -0
- package/package.json +1 -1
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Conversation Memory Utilities
|
|
3
|
+
* Handles configuration merging and conversation memory operations
|
|
4
|
+
*/
|
|
5
|
+
import { getConversationMemoryDefaults } from "../config/conversationMemoryConfig.js";
|
|
6
|
+
import { logger } from "./logger.js";
|
|
7
|
+
/**
|
|
8
|
+
* Apply conversation memory defaults to user configuration
|
|
9
|
+
* Merges user config with environment variables and default values
|
|
10
|
+
*/
|
|
11
|
+
export function applyConversationMemoryDefaults(userConfig) {
|
|
12
|
+
const defaults = getConversationMemoryDefaults();
|
|
13
|
+
return {
|
|
14
|
+
enabled: userConfig?.enabled ?? defaults.enabled,
|
|
15
|
+
maxSessions: userConfig?.maxSessions ?? defaults.maxSessions,
|
|
16
|
+
maxTurnsPerSession: userConfig?.maxTurnsPerSession ?? defaults.maxTurnsPerSession,
|
|
17
|
+
};
|
|
18
|
+
}
|
|
19
|
+
/**
|
|
20
|
+
* Get conversation history as message array (PREFERRED METHOD)
|
|
21
|
+
* Returns proper message array format for AI providers
|
|
22
|
+
*/
|
|
23
|
+
export async function getConversationMessages(conversationMemory, options) {
|
|
24
|
+
if (!conversationMemory || !options.context) {
|
|
25
|
+
return [];
|
|
26
|
+
}
|
|
27
|
+
const sessionId = options.context?.sessionId;
|
|
28
|
+
if (typeof sessionId !== "string" || !sessionId) {
|
|
29
|
+
return [];
|
|
30
|
+
}
|
|
31
|
+
try {
|
|
32
|
+
const messages = conversationMemory.buildContextMessages(sessionId);
|
|
33
|
+
logger.debug("Conversation messages retrieved", {
|
|
34
|
+
sessionId,
|
|
35
|
+
messageCount: messages.length,
|
|
36
|
+
});
|
|
37
|
+
return messages;
|
|
38
|
+
}
|
|
39
|
+
catch (error) {
|
|
40
|
+
logger.warn("Failed to get conversation messages", {
|
|
41
|
+
sessionId,
|
|
42
|
+
error: error instanceof Error ? error.message : String(error),
|
|
43
|
+
});
|
|
44
|
+
return [];
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
/**
|
|
48
|
+
* Store conversation turn for future context
|
|
49
|
+
* Saves user messages and AI responses for conversation memory
|
|
50
|
+
*/
|
|
51
|
+
export async function storeConversationTurn(conversationMemory, originalOptions, result) {
|
|
52
|
+
if (!conversationMemory || !originalOptions.context) {
|
|
53
|
+
return;
|
|
54
|
+
}
|
|
55
|
+
const context = originalOptions.context;
|
|
56
|
+
const sessionId = context.sessionId;
|
|
57
|
+
const userId = typeof context.userId === "string" ? context.userId : undefined;
|
|
58
|
+
if (typeof sessionId !== "string" || !sessionId) {
|
|
59
|
+
return;
|
|
60
|
+
}
|
|
61
|
+
try {
|
|
62
|
+
await conversationMemory.storeConversationTurn(sessionId, userId, originalOptions.prompt || "", result.content);
|
|
63
|
+
logger.debug("Conversation turn stored", {
|
|
64
|
+
sessionId,
|
|
65
|
+
userId,
|
|
66
|
+
promptLength: originalOptions.prompt?.length || 0,
|
|
67
|
+
responseLength: result.content.length,
|
|
68
|
+
});
|
|
69
|
+
}
|
|
70
|
+
catch (error) {
|
|
71
|
+
logger.warn("Failed to store conversation turn", {
|
|
72
|
+
sessionId,
|
|
73
|
+
userId,
|
|
74
|
+
error: error instanceof Error ? error.message : String(error),
|
|
75
|
+
});
|
|
76
|
+
}
|
|
77
|
+
}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Message Builder Utility
|
|
3
|
+
* Centralized logic for building message arrays from TextGenerationOptions
|
|
4
|
+
*/
|
|
5
|
+
import type { ChatMessage } from "../types/conversationTypes.js";
|
|
6
|
+
import type { TextGenerationOptions } from "../core/types.js";
|
|
7
|
+
import type { StreamOptions } from "../types/streamTypes.js";
|
|
8
|
+
/**
|
|
9
|
+
* Build a properly formatted message array for AI providers
|
|
10
|
+
* Combines system prompt, conversation history, and current user prompt
|
|
11
|
+
* Supports both TextGenerationOptions and StreamOptions
|
|
12
|
+
*/
|
|
13
|
+
export declare function buildMessagesArray(options: TextGenerationOptions | StreamOptions): ChatMessage[];
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Message Builder Utility
|
|
3
|
+
* Centralized logic for building message arrays from TextGenerationOptions
|
|
4
|
+
*/
|
|
5
|
+
import { CONVERSATION_INSTRUCTIONS } from "../config/conversationMemoryConfig.js";
|
|
6
|
+
/**
|
|
7
|
+
* Build a properly formatted message array for AI providers
|
|
8
|
+
* Combines system prompt, conversation history, and current user prompt
|
|
9
|
+
* Supports both TextGenerationOptions and StreamOptions
|
|
10
|
+
*/
|
|
11
|
+
export function buildMessagesArray(options) {
|
|
12
|
+
const messages = [];
|
|
13
|
+
// Check if conversation history exists
|
|
14
|
+
const hasConversationHistory = options.conversationMessages && options.conversationMessages.length > 0;
|
|
15
|
+
// Build enhanced system prompt
|
|
16
|
+
let systemPrompt = options.systemPrompt?.trim() || "";
|
|
17
|
+
// Add conversation-aware instructions when history exists
|
|
18
|
+
if (hasConversationHistory) {
|
|
19
|
+
systemPrompt = `${systemPrompt.trim()}${CONVERSATION_INSTRUCTIONS}`;
|
|
20
|
+
}
|
|
21
|
+
// Add system message if we have one
|
|
22
|
+
if (systemPrompt.trim()) {
|
|
23
|
+
messages.push({
|
|
24
|
+
role: "system",
|
|
25
|
+
content: systemPrompt.trim(),
|
|
26
|
+
});
|
|
27
|
+
}
|
|
28
|
+
// Add conversation history if available
|
|
29
|
+
if (hasConversationHistory && options.conversationMessages) {
|
|
30
|
+
messages.push(...options.conversationMessages);
|
|
31
|
+
}
|
|
32
|
+
// Add current user prompt (required)
|
|
33
|
+
// Handle both TextGenerationOptions (prompt field) and StreamOptions (input.text field)
|
|
34
|
+
let currentPrompt;
|
|
35
|
+
if ("prompt" in options && options.prompt) {
|
|
36
|
+
currentPrompt = options.prompt;
|
|
37
|
+
}
|
|
38
|
+
else if ("input" in options && options.input?.text) {
|
|
39
|
+
currentPrompt = options.input.text;
|
|
40
|
+
}
|
|
41
|
+
if (currentPrompt?.trim()) {
|
|
42
|
+
messages.push({
|
|
43
|
+
role: "user",
|
|
44
|
+
content: currentPrompt.trim(),
|
|
45
|
+
});
|
|
46
|
+
}
|
|
47
|
+
return messages;
|
|
48
|
+
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@juspay/neurolink",
|
|
3
|
-
"version": "7.
|
|
3
|
+
"version": "7.12.0",
|
|
4
4
|
"description": "Universal AI Development Platform with working MCP integration, multi-provider support, and professional CLI. Built-in tools operational, 58+ external MCP servers discoverable. Connect to filesystem, GitHub, database operations, and more. Build, test, and deploy AI applications with 9 major providers: OpenAI, Anthropic, Google AI, AWS Bedrock, Azure, Hugging Face, Ollama, and Mistral AI.",
|
|
5
5
|
"author": {
|
|
6
6
|
"name": "Juspay Technologies",
|