@juspay/neurolink 7.37.1 → 7.38.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +12 -0
- package/dist/core/baseProvider.d.ts +4 -0
- package/dist/core/baseProvider.js +40 -0
- package/dist/core/redisConversationMemoryManager.d.ts +98 -15
- package/dist/core/redisConversationMemoryManager.js +665 -203
- package/dist/lib/core/baseProvider.d.ts +4 -0
- package/dist/lib/core/baseProvider.js +40 -0
- package/dist/lib/core/redisConversationMemoryManager.d.ts +98 -15
- package/dist/lib/core/redisConversationMemoryManager.js +665 -203
- package/dist/lib/neurolink.d.ts +33 -1
- package/dist/lib/neurolink.js +64 -0
- package/dist/lib/providers/anthropic.js +8 -0
- package/dist/lib/providers/anthropicBaseProvider.js +8 -0
- package/dist/lib/providers/azureOpenai.js +8 -0
- package/dist/lib/providers/googleAiStudio.js +8 -0
- package/dist/lib/providers/googleVertex.js +10 -0
- package/dist/lib/providers/huggingFace.js +8 -0
- package/dist/lib/providers/litellm.js +8 -0
- package/dist/lib/providers/mistral.js +8 -0
- package/dist/lib/providers/openAI.js +12 -2
- package/dist/lib/providers/openaiCompatible.js +8 -0
- package/dist/lib/types/conversation.d.ts +52 -2
- package/dist/lib/utils/conversationMemory.js +3 -1
- package/dist/lib/utils/messageBuilder.d.ts +10 -2
- package/dist/lib/utils/messageBuilder.js +22 -1
- package/dist/lib/utils/redis.d.ts +10 -6
- package/dist/lib/utils/redis.js +71 -70
- package/dist/neurolink.d.ts +33 -1
- package/dist/neurolink.js +64 -0
- package/dist/providers/anthropic.js +8 -0
- package/dist/providers/anthropicBaseProvider.js +8 -0
- package/dist/providers/azureOpenai.js +8 -0
- package/dist/providers/googleAiStudio.js +8 -0
- package/dist/providers/googleVertex.js +10 -0
- package/dist/providers/huggingFace.js +8 -0
- package/dist/providers/litellm.js +8 -0
- package/dist/providers/mistral.js +8 -0
- package/dist/providers/openAI.js +12 -2
- package/dist/providers/openaiCompatible.js +8 -0
- package/dist/types/conversation.d.ts +52 -2
- package/dist/utils/conversationMemory.js +3 -1
- package/dist/utils/messageBuilder.d.ts +10 -2
- package/dist/utils/messageBuilder.js +22 -1
- package/dist/utils/redis.d.ts +10 -6
- package/dist/utils/redis.js +71 -70
- package/package.json +2 -2
package/dist/lib/neurolink.d.ts
CHANGED
@@ -14,6 +14,8 @@ import type { NeuroLinkEvents, TypedEventEmitter, ToolExecutionContext, ToolExec
|
|
14
14
|
import type { JsonObject } from "./types/common.js";
|
15
15
|
import type { BatchOperationResult } from "./types/typeAliases.js";
|
16
16
|
import type { ConversationMemoryConfig, ChatMessage } from "./types/conversation.js";
|
17
|
+
import { ConversationMemoryManager } from "./core/conversationMemoryManager.js";
|
18
|
+
import { RedisConversationMemoryManager } from "./core/redisConversationMemoryManager.js";
|
17
19
|
import type { ExternalMCPServerInstance, ExternalMCPOperationResult, ExternalMCPToolInfo } from "./types/externalMcp.js";
|
18
20
|
export interface ProviderStatus {
|
19
21
|
provider: string;
|
@@ -62,7 +64,7 @@ export declare class NeuroLink {
|
|
62
64
|
* @param error - The error if execution failed (optional)
|
63
65
|
*/
|
64
66
|
private emitToolEndEvent;
|
65
|
-
|
67
|
+
conversationMemory?: ConversationMemoryManager | RedisConversationMemoryManager | null;
|
66
68
|
private conversationMemoryNeedsInit;
|
67
69
|
private conversationMemoryConfig?;
|
68
70
|
private enableOrchestration;
|
@@ -848,6 +850,12 @@ export declare class NeuroLink {
|
|
848
850
|
recommendations: string[];
|
849
851
|
}>;
|
850
852
|
}>;
|
853
|
+
/**
|
854
|
+
* Initialize conversation memory if enabled (public method for explicit initialization)
|
855
|
+
* This is useful for testing or when you want to ensure conversation memory is ready
|
856
|
+
* @returns Promise resolving to true if initialization was successful, false otherwise
|
857
|
+
*/
|
858
|
+
ensureConversationMemoryInitialized(): Promise<boolean>;
|
851
859
|
/**
|
852
860
|
* Get conversation memory statistics (public API)
|
853
861
|
*/
|
@@ -866,6 +874,30 @@ export declare class NeuroLink {
|
|
866
874
|
* Clear all conversation history (public API)
|
867
875
|
*/
|
868
876
|
clearAllConversations(): Promise<void>;
|
877
|
+
/**
|
878
|
+
* Store tool executions in conversation memory if enabled and Redis is configured
|
879
|
+
* @param sessionId - Session identifier
|
880
|
+
* @param userId - User identifier (optional)
|
881
|
+
* @param toolCalls - Array of tool calls
|
882
|
+
* @param toolResults - Array of tool results
|
883
|
+
* @returns Promise resolving when storage is complete
|
884
|
+
*/
|
885
|
+
storeToolExecutions(sessionId: string, userId: string | undefined, toolCalls: Array<{
|
886
|
+
toolCallId?: string;
|
887
|
+
toolName?: string;
|
888
|
+
args?: Record<string, unknown>;
|
889
|
+
[key: string]: unknown;
|
890
|
+
}>, toolResults: Array<{
|
891
|
+
toolCallId?: string;
|
892
|
+
result?: unknown;
|
893
|
+
error?: string;
|
894
|
+
[key: string]: unknown;
|
895
|
+
}>): Promise<void>;
|
896
|
+
/**
|
897
|
+
* Check if tool execution storage is available
|
898
|
+
* @returns boolean indicating if Redis storage is configured and available
|
899
|
+
*/
|
900
|
+
isToolExecutionStorageAvailable(): boolean;
|
869
901
|
/**
|
870
902
|
* Add an external MCP server
|
871
903
|
* Automatically discovers and registers tools from the server
|
package/dist/lib/neurolink.js
CHANGED
@@ -3232,6 +3232,24 @@ export class NeuroLink {
|
|
3232
3232
|
// ============================================================================
|
3233
3233
|
// CONVERSATION MEMORY PUBLIC API
|
3234
3234
|
// ============================================================================
|
3235
|
+
/**
|
3236
|
+
* Initialize conversation memory if enabled (public method for explicit initialization)
|
3237
|
+
* This is useful for testing or when you want to ensure conversation memory is ready
|
3238
|
+
* @returns Promise resolving to true if initialization was successful, false otherwise
|
3239
|
+
*/
|
3240
|
+
async ensureConversationMemoryInitialized() {
|
3241
|
+
try {
|
3242
|
+
const initId = `manual-init-${Date.now()}`;
|
3243
|
+
await this.initializeConversationMemoryForGeneration(initId, Date.now(), process.hrtime.bigint());
|
3244
|
+
return !!this.conversationMemory;
|
3245
|
+
}
|
3246
|
+
catch (error) {
|
3247
|
+
logger.error("Failed to initialize conversation memory", {
|
3248
|
+
error: error instanceof Error ? error.message : String(error),
|
3249
|
+
});
|
3250
|
+
return false;
|
3251
|
+
}
|
3252
|
+
}
|
3235
3253
|
/**
|
3236
3254
|
* Get conversation memory statistics (public API)
|
3237
3255
|
*/
|
@@ -3302,6 +3320,52 @@ export class NeuroLink {
|
|
3302
3320
|
}
|
3303
3321
|
await this.conversationMemory.clearAllSessions();
|
3304
3322
|
}
|
3323
|
+
/**
|
3324
|
+
* Store tool executions in conversation memory if enabled and Redis is configured
|
3325
|
+
* @param sessionId - Session identifier
|
3326
|
+
* @param userId - User identifier (optional)
|
3327
|
+
* @param toolCalls - Array of tool calls
|
3328
|
+
* @param toolResults - Array of tool results
|
3329
|
+
* @returns Promise resolving when storage is complete
|
3330
|
+
*/
|
3331
|
+
async storeToolExecutions(sessionId, userId, toolCalls, toolResults) {
|
3332
|
+
// Check if tools are not empty
|
3333
|
+
const hasToolData = (toolCalls && toolCalls.length > 0) ||
|
3334
|
+
(toolResults && toolResults.length > 0);
|
3335
|
+
if (!hasToolData) {
|
3336
|
+
logger.debug("Tool execution storage skipped", {
|
3337
|
+
hasToolData,
|
3338
|
+
toolCallsCount: toolCalls?.length || 0,
|
3339
|
+
toolResultsCount: toolResults?.length || 0,
|
3340
|
+
});
|
3341
|
+
return;
|
3342
|
+
}
|
3343
|
+
// Type guard to ensure it's Redis conversation memory manager
|
3344
|
+
const redisMemory = this
|
3345
|
+
.conversationMemory;
|
3346
|
+
try {
|
3347
|
+
await redisMemory.storeToolExecution(sessionId, userId, toolCalls, toolResults);
|
3348
|
+
}
|
3349
|
+
catch (error) {
|
3350
|
+
logger.warn("Failed to store tool executions", {
|
3351
|
+
sessionId,
|
3352
|
+
userId,
|
3353
|
+
error: error instanceof Error ? error.message : String(error),
|
3354
|
+
});
|
3355
|
+
// Don't throw - tool storage failures shouldn't break generation
|
3356
|
+
}
|
3357
|
+
}
|
3358
|
+
/**
|
3359
|
+
* Check if tool execution storage is available
|
3360
|
+
* @returns boolean indicating if Redis storage is configured and available
|
3361
|
+
*/
|
3362
|
+
isToolExecutionStorageAvailable() {
|
3363
|
+
const isRedisStorage = process.env.STORAGE_TYPE === "redis";
|
3364
|
+
const hasRedisConversationMemory = this.conversationMemory &&
|
3365
|
+
this.conversationMemory.constructor.name ===
|
3366
|
+
"RedisConversationMemoryManager";
|
3367
|
+
return !!(isRedisStorage && hasRedisConversationMemory);
|
3368
|
+
}
|
3305
3369
|
// ===== EXTERNAL MCP SERVER METHODS =====
|
3306
3370
|
/**
|
3307
3371
|
* Add an external MCP server
|
@@ -103,6 +103,14 @@ export class AnthropicProvider extends BaseProvider {
|
|
103
103
|
maxSteps: options.maxSteps || DEFAULT_MAX_STEPS,
|
104
104
|
toolChoice: shouldUseTools ? "auto" : "none",
|
105
105
|
abortSignal: timeoutController?.controller.signal,
|
106
|
+
onStepFinish: ({ toolCalls, toolResults }) => {
|
107
|
+
this.handleToolExecutionStorage(toolCalls, toolResults, options).catch((error) => {
|
108
|
+
logger.warn("[AnthropicProvider] Failed to store tool executions", {
|
109
|
+
provider: this.providerName,
|
110
|
+
error: error instanceof Error ? error.message : String(error),
|
111
|
+
});
|
112
|
+
});
|
113
|
+
},
|
106
114
|
});
|
107
115
|
timeoutController?.cleanup();
|
108
116
|
const transformedStream = this.createTextStream(result);
|
@@ -73,6 +73,14 @@ export class AnthropicProviderV2 extends BaseProvider {
|
|
73
73
|
tools: options.tools,
|
74
74
|
toolChoice: "auto",
|
75
75
|
abortSignal: timeoutController?.controller.signal,
|
76
|
+
onStepFinish: ({ toolCalls, toolResults }) => {
|
77
|
+
this.handleToolExecutionStorage(toolCalls, toolResults, options).catch((error) => {
|
78
|
+
logger.warn("[AnthropicBaseProvider] Failed to store tool executions", {
|
79
|
+
provider: this.providerName,
|
80
|
+
error: error instanceof Error ? error.message : String(error),
|
81
|
+
});
|
82
|
+
});
|
83
|
+
},
|
76
84
|
});
|
77
85
|
timeoutController?.cleanup();
|
78
86
|
// Transform string stream to content object stream (match Google AI pattern)
|
@@ -122,6 +122,14 @@ export class AzureOpenAIProvider extends BaseProvider {
|
|
122
122
|
: {}),
|
123
123
|
tools,
|
124
124
|
toolChoice: shouldUseTools ? "auto" : "none",
|
125
|
+
onStepFinish: ({ toolCalls, toolResults }) => {
|
126
|
+
this.handleToolExecutionStorage(toolCalls, toolResults, options).catch((error) => {
|
127
|
+
logger.warn("[AzureOpenaiProvider] Failed to store tool executions", {
|
128
|
+
provider: this.providerName,
|
129
|
+
error: error instanceof Error ? error.message : String(error),
|
130
|
+
});
|
131
|
+
});
|
132
|
+
},
|
125
133
|
maxSteps: options.maxSteps || DEFAULT_MAX_STEPS,
|
126
134
|
});
|
127
135
|
// Transform string stream to content object stream using BaseProvider method
|
@@ -101,6 +101,14 @@ export class GoogleAIStudioProvider extends BaseProvider {
|
|
101
101
|
maxSteps: options.maxSteps || DEFAULT_MAX_STEPS,
|
102
102
|
toolChoice: shouldUseTools ? "auto" : "none",
|
103
103
|
abortSignal: timeoutController?.controller.signal,
|
104
|
+
onStepFinish: ({ toolCalls, toolResults }) => {
|
105
|
+
this.handleToolExecutionStorage(toolCalls, toolResults, options).catch((error) => {
|
106
|
+
logger.warn("[GoogleAiStudioProvider] Failed to store tool executions", {
|
107
|
+
provider: this.providerName,
|
108
|
+
error: error instanceof Error ? error.message : String(error),
|
109
|
+
});
|
110
|
+
});
|
111
|
+
},
|
104
112
|
});
|
105
113
|
timeoutController?.cleanup();
|
106
114
|
// Transform string stream to content object stream using BaseProvider method
|
@@ -646,6 +646,16 @@ export class GoogleVertexProvider extends BaseProvider {
|
|
646
646
|
onChunk: () => {
|
647
647
|
chunkCount++;
|
648
648
|
},
|
649
|
+
onStepFinish: ({ toolCalls, toolResults }) => {
|
650
|
+
logger.info("Tool execution completed", { toolResults, toolCalls });
|
651
|
+
// Handle tool execution storage
|
652
|
+
this.handleToolExecutionStorage(toolCalls, toolResults, options).catch((error) => {
|
653
|
+
logger.warn("[GoogleVertexProvider] Failed to store tool executions", {
|
654
|
+
provider: this.providerName,
|
655
|
+
error: error instanceof Error ? error.message : String(error),
|
656
|
+
});
|
657
|
+
});
|
658
|
+
},
|
649
659
|
};
|
650
660
|
if (analysisSchema) {
|
651
661
|
try {
|
@@ -123,6 +123,14 @@ export class HuggingFaceProvider extends BaseProvider {
|
|
123
123
|
tools: streamOptions.tools, // Tools format conversion handled by prepareStreamOptions
|
124
124
|
toolChoice: streamOptions.toolChoice, // Tool choice handled by prepareStreamOptions
|
125
125
|
abortSignal: timeoutController?.controller.signal,
|
126
|
+
onStepFinish: ({ toolCalls, toolResults }) => {
|
127
|
+
this.handleToolExecutionStorage(toolCalls, toolResults, options).catch((error) => {
|
128
|
+
logger.warn("[HuggingFaceProvider] Failed to store tool executions", {
|
129
|
+
provider: this.providerName,
|
130
|
+
error: error instanceof Error ? error.message : String(error),
|
131
|
+
});
|
132
|
+
});
|
133
|
+
},
|
126
134
|
});
|
127
135
|
timeoutController?.cleanup();
|
128
136
|
// Transform stream to match StreamResult interface with enhanced tool call parsing
|
@@ -130,6 +130,14 @@ export class LiteLLMProvider extends BaseProvider {
|
|
130
130
|
tools: options.tools,
|
131
131
|
toolChoice: "auto",
|
132
132
|
abortSignal: timeoutController?.controller.signal,
|
133
|
+
onStepFinish: ({ toolCalls, toolResults }) => {
|
134
|
+
this.handleToolExecutionStorage(toolCalls, toolResults, options).catch((error) => {
|
135
|
+
logger.warn("LiteLLMProvider] Failed to store tool executions", {
|
136
|
+
provider: this.providerName,
|
137
|
+
error: error instanceof Error ? error.message : String(error),
|
138
|
+
});
|
139
|
+
});
|
140
|
+
},
|
133
141
|
});
|
134
142
|
timeoutController?.cleanup();
|
135
143
|
// Transform stream to match StreamResult interface
|
@@ -59,6 +59,14 @@ export class MistralProvider extends BaseProvider {
|
|
59
59
|
maxSteps: options.maxSteps || DEFAULT_MAX_STEPS,
|
60
60
|
toolChoice: shouldUseTools ? "auto" : "none",
|
61
61
|
abortSignal: timeoutController?.controller.signal,
|
62
|
+
onStepFinish: ({ toolCalls, toolResults }) => {
|
63
|
+
this.handleToolExecutionStorage(toolCalls, toolResults, options).catch((error) => {
|
64
|
+
logger.warn("[MistralProvider] Failed to store tool executions", {
|
65
|
+
provider: this.providerName,
|
66
|
+
error: error instanceof Error ? error.message : String(error),
|
67
|
+
});
|
68
|
+
});
|
69
|
+
},
|
62
70
|
});
|
63
71
|
timeoutController?.cleanup();
|
64
72
|
// Transform string stream to content object stream using BaseProvider method
|
@@ -224,8 +224,8 @@ export class OpenAIProvider extends BaseProvider {
|
|
224
224
|
const allTools = shouldUseTools ? await this.getAllTools() : {};
|
225
225
|
// OpenAI-specific fix: Validate tools format and filter out problematic ones
|
226
226
|
let tools = this.validateAndFilterToolsForOpenAI(allTools);
|
227
|
-
// OpenAI
|
228
|
-
const MAX_TOOLS =
|
227
|
+
// OpenAI max tools limit - configurable via environment variable
|
228
|
+
const MAX_TOOLS = parseInt(process.env.OPENAI_MAX_TOOLS || "150", 10);
|
229
229
|
if (Object.keys(tools).length > MAX_TOOLS) {
|
230
230
|
logger.warn(`OpenAI: Too many tools (${Object.keys(tools).length}), limiting to ${MAX_TOOLS} tools`);
|
231
231
|
const toolEntries = Object.entries(tools);
|
@@ -272,6 +272,16 @@ export class OpenAIProvider extends BaseProvider {
|
|
272
272
|
maxSteps: options.maxSteps || DEFAULT_MAX_STEPS,
|
273
273
|
toolChoice: shouldUseTools && Object.keys(tools).length > 0 ? "auto" : "none",
|
274
274
|
abortSignal: timeoutController?.controller.signal,
|
275
|
+
onStepFinish: ({ toolCalls, toolResults }) => {
|
276
|
+
logger.info("Tool execution completed", { toolResults, toolCalls });
|
277
|
+
// Handle tool execution storage
|
278
|
+
this.handleToolExecutionStorage(toolCalls, toolResults, options).catch((error) => {
|
279
|
+
logger.warn("[OpenAIProvider] Failed to store tool executions", {
|
280
|
+
provider: this.providerName,
|
281
|
+
error: error instanceof Error ? error.message : String(error),
|
282
|
+
});
|
283
|
+
});
|
284
|
+
},
|
275
285
|
});
|
276
286
|
timeoutController?.cleanup();
|
277
287
|
// Debug the actual result structure
|
@@ -166,6 +166,14 @@ export class OpenAICompatibleProvider extends BaseProvider {
|
|
166
166
|
tools: options.tools,
|
167
167
|
toolChoice: "auto",
|
168
168
|
abortSignal: timeoutController?.controller.signal,
|
169
|
+
onStepFinish: ({ toolCalls, toolResults }) => {
|
170
|
+
this.handleToolExecutionStorage(toolCalls, toolResults, options).catch((error) => {
|
171
|
+
logger.warn("[OpenAiCompatibleProvider] Failed to store tool executions", {
|
172
|
+
provider: this.providerName,
|
173
|
+
error: error instanceof Error ? error.message : String(error),
|
174
|
+
});
|
175
|
+
});
|
176
|
+
},
|
169
177
|
});
|
170
178
|
timeoutController?.cleanup();
|
171
179
|
// Transform stream to match StreamResult interface
|
@@ -32,6 +32,8 @@ export interface SessionMemory {
|
|
32
32
|
sessionId: string;
|
33
33
|
/** User identifier (optional) */
|
34
34
|
userId?: string;
|
35
|
+
/** Auto-generated conversation title (created on first user message) */
|
36
|
+
title?: string;
|
35
37
|
/** Direct message storage - ready for immediate AI consumption */
|
36
38
|
messages: ChatMessage[];
|
37
39
|
/** When this session was created */
|
@@ -61,10 +63,26 @@ export interface ConversationMemoryStats {
|
|
61
63
|
* Chat message format for conversation history
|
62
64
|
*/
|
63
65
|
export interface ChatMessage {
|
64
|
-
/** Role of the message
|
65
|
-
role: "user" | "assistant" | "system";
|
66
|
+
/** Role/type of the message */
|
67
|
+
role: "user" | "assistant" | "system" | "tool_call" | "tool_result";
|
66
68
|
/** Content of the message */
|
67
69
|
content: string;
|
70
|
+
/** Message ID (optional) - for new format */
|
71
|
+
id?: string;
|
72
|
+
/** Timestamp (optional) - for new format */
|
73
|
+
timestamp?: string;
|
74
|
+
/** Tool name (optional) - for tool_call/tool_result messages */
|
75
|
+
tool?: string;
|
76
|
+
/** Tool arguments (optional) - for tool_call messages */
|
77
|
+
args?: Record<string, unknown>;
|
78
|
+
/** Tool result (optional) - for tool_result messages */
|
79
|
+
result?: {
|
80
|
+
success?: boolean;
|
81
|
+
expression?: string;
|
82
|
+
result?: unknown;
|
83
|
+
type?: string;
|
84
|
+
error?: string;
|
85
|
+
};
|
68
86
|
}
|
69
87
|
/**
|
70
88
|
* Content format for multimodal messages (used internally)
|
@@ -129,6 +147,36 @@ export type SessionIdentifier = {
|
|
129
147
|
sessionId: string;
|
130
148
|
userId?: string;
|
131
149
|
};
|
150
|
+
/**
|
151
|
+
* Lightweight session metadata for efficient session listing
|
152
|
+
* Contains only essential information without heavy message arrays
|
153
|
+
*/
|
154
|
+
export interface SessionMetadata {
|
155
|
+
id: string;
|
156
|
+
title: string;
|
157
|
+
createdAt: string;
|
158
|
+
updatedAt: string;
|
159
|
+
}
|
160
|
+
/**
|
161
|
+
* New Redis conversation storage object format
|
162
|
+
* Contains conversation metadata and history in a single object
|
163
|
+
*/
|
164
|
+
export type RedisConversationObject = {
|
165
|
+
/** Unique conversation identifier (UUID v4) */
|
166
|
+
id: string;
|
167
|
+
/** Auto-generated conversation title */
|
168
|
+
title: string;
|
169
|
+
/** Session identifier */
|
170
|
+
sessionId: string;
|
171
|
+
/** User identifier */
|
172
|
+
userId: string;
|
173
|
+
/** When this conversation was first created */
|
174
|
+
createdAt: string;
|
175
|
+
/** When this conversation was last updated */
|
176
|
+
updatedAt: string;
|
177
|
+
/** Array of conversation messages */
|
178
|
+
messages: ChatMessage[];
|
179
|
+
};
|
132
180
|
/**
|
133
181
|
* Redis storage configuration
|
134
182
|
*/
|
@@ -143,6 +191,8 @@ export type RedisStorageConfig = {
|
|
143
191
|
db?: number;
|
144
192
|
/** Key prefix for Redis keys (default: 'neurolink:conversation:') */
|
145
193
|
keyPrefix?: string;
|
194
|
+
/** Key prefix for user sessions mapping (default: derived from keyPrefix) */
|
195
|
+
userSessionsKeyPrefix?: string;
|
146
196
|
/** Time-to-live in seconds (default: 86400, 24 hours) */
|
147
197
|
ttl?: number;
|
148
198
|
/** Additional Redis connection options */
|
@@ -32,8 +32,10 @@ export async function getConversationMessages(conversationMemory, options) {
|
|
32
32
|
return [];
|
33
33
|
}
|
34
34
|
try {
|
35
|
+
// Extract userId from context
|
36
|
+
const userId = options.context?.userId;
|
35
37
|
// Remove duplicate summarization logic - it should be handled in ConversationMemoryManager
|
36
|
-
const messages = await conversationMemory.buildContextMessages(sessionId);
|
38
|
+
const messages = await conversationMemory.buildContextMessages(sessionId, userId);
|
37
39
|
logger.debug("[conversationMemoryUtils] Conversation messages retrieved successfully", {
|
38
40
|
sessionId,
|
39
41
|
messageCount: messages.length,
|
@@ -3,18 +3,26 @@
|
|
3
3
|
* Centralized logic for building message arrays from TextGenerationOptions
|
4
4
|
* Enhanced with multimodal support for images
|
5
5
|
*/
|
6
|
-
import type {
|
6
|
+
import type { MultimodalChatMessage } from "../types/conversation.js";
|
7
7
|
import type { TextGenerationOptions } from "../types/index.js";
|
8
8
|
import type { StreamOptions } from "../types/streamTypes.js";
|
9
9
|
import type { GenerateOptions } from "../types/generateTypes.js";
|
10
|
+
/**
|
11
|
+
* Core message type compatible with AI SDK
|
12
|
+
*/
|
13
|
+
type CoreMessage = {
|
14
|
+
role: "user" | "assistant" | "system";
|
15
|
+
content: string;
|
16
|
+
};
|
10
17
|
/**
|
11
18
|
* Build a properly formatted message array for AI providers
|
12
19
|
* Combines system prompt, conversation history, and current user prompt
|
13
20
|
* Supports both TextGenerationOptions and StreamOptions
|
14
21
|
*/
|
15
|
-
export declare function buildMessagesArray(options: TextGenerationOptions | StreamOptions):
|
22
|
+
export declare function buildMessagesArray(options: TextGenerationOptions | StreamOptions): CoreMessage[];
|
16
23
|
/**
|
17
24
|
* Build multimodal message array with image support
|
18
25
|
* Detects when images are present and routes through provider adapter
|
19
26
|
*/
|
20
27
|
export declare function buildMultimodalMessagesArray(options: GenerateOptions, provider: string, model: string): Promise<MultimodalChatMessage[]>;
|
28
|
+
export {};
|
@@ -8,6 +8,21 @@ import { ProviderImageAdapter, MultimodalLogger, } from "../adapters/providerIma
|
|
8
8
|
import { logger } from "./logger.js";
|
9
9
|
import { request } from "undici";
|
10
10
|
import { readFileSync, existsSync } from "fs";
|
11
|
+
/**
|
12
|
+
* Convert ChatMessage to CoreMessage for AI SDK compatibility
|
13
|
+
*/
|
14
|
+
function toCoreMessage(message) {
|
15
|
+
// Only include messages with roles supported by AI SDK
|
16
|
+
if (message.role === "user" ||
|
17
|
+
message.role === "assistant" ||
|
18
|
+
message.role === "system") {
|
19
|
+
return {
|
20
|
+
role: message.role,
|
21
|
+
content: message.content,
|
22
|
+
};
|
23
|
+
}
|
24
|
+
return null; // Filter out tool_call and tool_result messages
|
25
|
+
}
|
11
26
|
/**
|
12
27
|
* Build a properly formatted message array for AI providers
|
13
28
|
* Combines system prompt, conversation history, and current user prompt
|
@@ -31,8 +46,14 @@ export function buildMessagesArray(options) {
|
|
31
46
|
});
|
32
47
|
}
|
33
48
|
// Add conversation history if available
|
49
|
+
// Convert ChatMessages to CoreMessages and filter out tool messages
|
34
50
|
if (hasConversationHistory && options.conversationMessages) {
|
35
|
-
|
51
|
+
for (const chatMessage of options.conversationMessages) {
|
52
|
+
const coreMessage = toCoreMessage(chatMessage);
|
53
|
+
if (coreMessage) {
|
54
|
+
messages.push(coreMessage);
|
55
|
+
}
|
56
|
+
}
|
36
57
|
}
|
37
58
|
// Add current user prompt (required)
|
38
59
|
// Handle both TextGenerationOptions (prompt field) and StreamOptions (input.text field)
|
@@ -4,7 +4,7 @@
|
|
4
4
|
*/
|
5
5
|
import { createClient } from "redis";
|
6
6
|
type RedisClient = ReturnType<typeof createClient>;
|
7
|
-
import type {
|
7
|
+
import type { RedisStorageConfig, RedisConversationObject } from "../types/conversation.js";
|
8
8
|
/**
|
9
9
|
* Creates a Redis client with the provided configuration
|
10
10
|
*/
|
@@ -12,15 +12,19 @@ export declare function createRedisClient(config: Required<RedisStorageConfig>):
|
|
12
12
|
/**
|
13
13
|
* Generates a Redis key for session messages
|
14
14
|
*/
|
15
|
-
export declare function getSessionKey(config: Required<RedisStorageConfig>, sessionId: string): string;
|
15
|
+
export declare function getSessionKey(config: Required<RedisStorageConfig>, sessionId: string, userId?: string): string;
|
16
16
|
/**
|
17
|
-
*
|
17
|
+
* Generates a Redis key for user sessions mapping
|
18
18
|
*/
|
19
|
-
export declare function
|
19
|
+
export declare function getUserSessionsKey(config: Required<RedisStorageConfig>, userId: string): string;
|
20
20
|
/**
|
21
|
-
*
|
21
|
+
* Serializes conversation object for Redis storage
|
22
22
|
*/
|
23
|
-
export declare function
|
23
|
+
export declare function serializeConversation(conversation: RedisConversationObject): string;
|
24
|
+
/**
|
25
|
+
* Deserializes conversation object from Redis storage
|
26
|
+
*/
|
27
|
+
export declare function deserializeConversation(data: string | null): RedisConversationObject | null;
|
24
28
|
/**
|
25
29
|
* Checks if Redis client is healthy
|
26
30
|
*/
|