@juspay/neurolink 7.47.1 → 7.47.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +8 -0
- package/dist/core/baseProvider.d.ts +1 -1
- package/dist/core/baseProvider.js +3 -3
- package/dist/core/conversationMemoryManager.d.ts +1 -1
- package/dist/core/conversationMemoryManager.js +1 -1
- package/dist/core/redisConversationMemoryManager.d.ts +2 -2
- package/dist/core/redisConversationMemoryManager.js +15 -9
- package/dist/lib/core/baseProvider.d.ts +1 -1
- package/dist/lib/core/baseProvider.js +3 -3
- package/dist/lib/core/conversationMemoryManager.d.ts +1 -1
- package/dist/lib/core/conversationMemoryManager.js +1 -1
- package/dist/lib/core/redisConversationMemoryManager.d.ts +2 -2
- package/dist/lib/core/redisConversationMemoryManager.js +15 -9
- package/dist/lib/neurolink.d.ts +2 -1
- package/dist/lib/neurolink.js +7 -6
- package/dist/lib/providers/anthropic.js +1 -1
- package/dist/lib/providers/anthropicBaseProvider.js +1 -1
- package/dist/lib/providers/azureOpenai.js +1 -1
- package/dist/lib/providers/googleAiStudio.js +1 -1
- package/dist/lib/providers/googleVertex.js +1 -1
- package/dist/lib/providers/huggingFace.js +1 -1
- package/dist/lib/providers/litellm.js +1 -1
- package/dist/lib/providers/mistral.js +1 -1
- package/dist/lib/providers/openAI.js +1 -1
- package/dist/lib/providers/openaiCompatible.js +1 -1
- package/dist/lib/utils/conversationMemory.d.ts +1 -1
- package/dist/lib/utils/conversationMemory.js +2 -2
- package/dist/lib/utils/conversationMemoryUtils.d.ts +1 -1
- package/dist/lib/utils/conversationMemoryUtils.js +2 -2
- package/dist/neurolink.d.ts +2 -1
- package/dist/neurolink.js +7 -6
- package/dist/providers/anthropic.js +1 -1
- package/dist/providers/anthropicBaseProvider.js +1 -1
- package/dist/providers/azureOpenai.js +1 -1
- package/dist/providers/googleAiStudio.js +1 -1
- package/dist/providers/googleVertex.js +1 -1
- package/dist/providers/huggingFace.js +1 -1
- package/dist/providers/litellm.js +1 -1
- package/dist/providers/mistral.js +1 -1
- package/dist/providers/openAI.js +1 -1
- package/dist/providers/openaiCompatible.js +1 -1
- package/dist/utils/conversationMemory.d.ts +1 -1
- package/dist/utils/conversationMemory.js +2 -2
- package/dist/utils/conversationMemoryUtils.d.ts +1 -1
- package/dist/utils/conversationMemoryUtils.js +2 -2
- package/package.json +1 -1
package/CHANGELOG.md
CHANGED
|
@@ -1,3 +1,11 @@
|
|
|
1
|
+
## [7.47.3](https://github.com/juspay/neurolink/compare/v7.47.2...v7.47.3) (2025-09-26)
|
|
2
|
+
|
|
3
|
+
## [7.47.2](https://github.com/juspay/neurolink/compare/v7.47.1...v7.47.2) (2025-09-26)
|
|
4
|
+
|
|
5
|
+
### Bug Fixes
|
|
6
|
+
|
|
7
|
+
- **(timestamp):** Incorrect timestamps being stored in redis ([2d66232](https://github.com/juspay/neurolink/commit/2d6623275bc4c1f5986957d476ddcf3933ba61e4))
|
|
8
|
+
|
|
1
9
|
## [7.47.1](https://github.com/juspay/neurolink/compare/v7.47.0...v7.47.1) (2025-09-26)
|
|
2
10
|
|
|
3
11
|
### Bug Fixes
|
|
@@ -227,7 +227,7 @@ export declare abstract class BaseProvider implements AIProvider {
|
|
|
227
227
|
/**
|
|
228
228
|
* Check if tool executions should be stored and handle storage
|
|
229
229
|
*/
|
|
230
|
-
protected handleToolExecutionStorage(toolCalls: unknown[], toolResults: unknown[], options: TextGenerationOptions | StreamOptions): Promise<void>;
|
|
230
|
+
protected handleToolExecutionStorage(toolCalls: unknown[], toolResults: unknown[], options: TextGenerationOptions | StreamOptions, currentTime: Date): Promise<void>;
|
|
231
231
|
/**
|
|
232
232
|
* Utility method to chunk large prompts into smaller pieces
|
|
233
233
|
* @param prompt The prompt to chunk
|
|
@@ -300,7 +300,7 @@ export class BaseProvider {
|
|
|
300
300
|
onStepFinish: ({ toolCalls, toolResults }) => {
|
|
301
301
|
logger.info("Tool execution completed", { toolResults, toolCalls });
|
|
302
302
|
// Handle tool execution storage
|
|
303
|
-
this.handleToolExecutionStorage(toolCalls, toolResults, options).catch((error) => {
|
|
303
|
+
this.handleToolExecutionStorage(toolCalls, toolResults, options, new Date()).catch((error) => {
|
|
304
304
|
logger.warn("[BaseProvider] Failed to store tool executions", {
|
|
305
305
|
provider: this.providerName,
|
|
306
306
|
error: error instanceof Error ? error.message : String(error),
|
|
@@ -1499,7 +1499,7 @@ export class BaseProvider {
|
|
|
1499
1499
|
/**
|
|
1500
1500
|
* Check if tool executions should be stored and handle storage
|
|
1501
1501
|
*/
|
|
1502
|
-
async handleToolExecutionStorage(toolCalls, toolResults, options) {
|
|
1502
|
+
async handleToolExecutionStorage(toolCalls, toolResults, options, currentTime) {
|
|
1503
1503
|
// Check if tools are not empty
|
|
1504
1504
|
const hasToolData = (toolCalls && toolCalls.length > 0) ||
|
|
1505
1505
|
(toolResults && toolResults.length > 0);
|
|
@@ -1515,7 +1515,7 @@ export class BaseProvider {
|
|
|
1515
1515
|
const userId = options.context?.userId ||
|
|
1516
1516
|
options.userId;
|
|
1517
1517
|
try {
|
|
1518
|
-
await this.neurolink.storeToolExecutions(sessionId, userId, toolCalls, toolResults);
|
|
1518
|
+
await this.neurolink.storeToolExecutions(sessionId, userId, toolCalls, toolResults, currentTime);
|
|
1519
1519
|
}
|
|
1520
1520
|
catch (error) {
|
|
1521
1521
|
logger.warn("[BaseProvider] Failed to store tool executions", {
|
|
@@ -16,7 +16,7 @@ export declare class ConversationMemoryManager {
|
|
|
16
16
|
* Store a conversation turn for a session
|
|
17
17
|
* ULTRA-OPTIMIZED: Direct ChatMessage[] storage with zero conversion overhead
|
|
18
18
|
*/
|
|
19
|
-
storeConversationTurn(sessionId: string, userId: string | undefined, userMessage: string, aiResponse: string): Promise<void>;
|
|
19
|
+
storeConversationTurn(sessionId: string, userId: string | undefined, userMessage: string, aiResponse: string, _startTimeStamp: Date | undefined): Promise<void>;
|
|
20
20
|
/**
|
|
21
21
|
* Build context messages for AI prompt injection (ULTRA-OPTIMIZED)
|
|
22
22
|
* Returns pre-stored message array with zero conversion overhead
|
|
@@ -36,7 +36,7 @@ export class ConversationMemoryManager {
|
|
|
36
36
|
* Store a conversation turn for a session
|
|
37
37
|
* ULTRA-OPTIMIZED: Direct ChatMessage[] storage with zero conversion overhead
|
|
38
38
|
*/
|
|
39
|
-
async storeConversationTurn(sessionId, userId, userMessage, aiResponse) {
|
|
39
|
+
async storeConversationTurn(sessionId, userId, userMessage, aiResponse, _startTimeStamp) {
|
|
40
40
|
await this.ensureInitialized();
|
|
41
41
|
try {
|
|
42
42
|
// Get or create session
|
|
@@ -60,11 +60,11 @@ export declare class RedisConversationMemoryManager {
|
|
|
60
60
|
result?: unknown;
|
|
61
61
|
error?: string;
|
|
62
62
|
[key: string]: unknown;
|
|
63
|
-
}
|
|
63
|
+
}>, currentTime?: Date): Promise<void>;
|
|
64
64
|
/**
|
|
65
65
|
* Store a conversation turn for a session
|
|
66
66
|
*/
|
|
67
|
-
storeConversationTurn(sessionId: string, userId: string | undefined, userMessage: string, aiResponse: string): Promise<void>;
|
|
67
|
+
storeConversationTurn(sessionId: string, userId: string | undefined, userMessage: string, aiResponse: string, startTimeStamp: Date | undefined): Promise<void>;
|
|
68
68
|
/**
|
|
69
69
|
* Build context messages for AI prompt injection
|
|
70
70
|
*/
|
|
@@ -156,7 +156,7 @@ export class RedisConversationMemoryManager {
|
|
|
156
156
|
/**
|
|
157
157
|
* Store tool execution data for a session (temporarily to avoid race conditions)
|
|
158
158
|
*/
|
|
159
|
-
async storeToolExecution(sessionId, userId, toolCalls, toolResults) {
|
|
159
|
+
async storeToolExecution(sessionId, userId, toolCalls, toolResults, currentTime) {
|
|
160
160
|
logger.debug("[RedisConversationMemoryManager] Storing tool execution temporarily", {
|
|
161
161
|
sessionId,
|
|
162
162
|
userId,
|
|
@@ -168,8 +168,14 @@ export class RedisConversationMemoryManager {
|
|
|
168
168
|
const pendingKey = `${sessionId}:${normalizedUserId}`;
|
|
169
169
|
// Store tool execution data temporarily to prevent race conditions
|
|
170
170
|
const pendingData = {
|
|
171
|
-
toolCalls: toolCalls || []
|
|
172
|
-
|
|
171
|
+
toolCalls: (toolCalls || []).map((call) => ({
|
|
172
|
+
...call,
|
|
173
|
+
timestamp: currentTime,
|
|
174
|
+
})),
|
|
175
|
+
toolResults: (toolResults || []).map((result) => ({
|
|
176
|
+
...result,
|
|
177
|
+
timestamp: currentTime,
|
|
178
|
+
})),
|
|
173
179
|
timestamp: Date.now(),
|
|
174
180
|
};
|
|
175
181
|
// Check if there's existing pending data and merge
|
|
@@ -214,7 +220,7 @@ export class RedisConversationMemoryManager {
|
|
|
214
220
|
/**
|
|
215
221
|
* Store a conversation turn for a session
|
|
216
222
|
*/
|
|
217
|
-
async storeConversationTurn(sessionId, userId, userMessage, aiResponse) {
|
|
223
|
+
async storeConversationTurn(sessionId, userId, userMessage, aiResponse, startTimeStamp) {
|
|
218
224
|
logger.debug("[RedisConversationMemoryManager] Storing conversation turn", {
|
|
219
225
|
sessionId,
|
|
220
226
|
userId,
|
|
@@ -294,8 +300,8 @@ export class RedisConversationMemoryManager {
|
|
|
294
300
|
title: "New Conversation", // Temporary title until generated
|
|
295
301
|
sessionId,
|
|
296
302
|
userId: normalizedUserId,
|
|
297
|
-
createdAt: currentTime,
|
|
298
|
-
updatedAt: currentTime,
|
|
303
|
+
createdAt: startTimeStamp?.toISOString() || currentTime,
|
|
304
|
+
updatedAt: startTimeStamp?.toISOString() || currentTime,
|
|
299
305
|
messages: [],
|
|
300
306
|
};
|
|
301
307
|
}
|
|
@@ -312,7 +318,7 @@ export class RedisConversationMemoryManager {
|
|
|
312
318
|
// Add new messages to conversation history with new format
|
|
313
319
|
const userMsg = {
|
|
314
320
|
id: this.generateMessageId(conversation),
|
|
315
|
-
timestamp: this.generateTimestamp(),
|
|
321
|
+
timestamp: startTimeStamp?.toISOString() || this.generateTimestamp(),
|
|
316
322
|
role: "user",
|
|
317
323
|
content: userMessage,
|
|
318
324
|
};
|
|
@@ -868,7 +874,7 @@ User message: "${userMessage}`;
|
|
|
868
874
|
toolCallMap.set(toolCallId, toolName);
|
|
869
875
|
const toolCallMessage = {
|
|
870
876
|
id: this.generateMessageId(conversation),
|
|
871
|
-
timestamp: this.generateTimestamp(),
|
|
877
|
+
timestamp: toolCall.timestamp?.toISOString() || this.generateTimestamp(),
|
|
872
878
|
role: "tool_call",
|
|
873
879
|
content: "", // Can be empty for tool calls
|
|
874
880
|
tool: toolName,
|
|
@@ -885,7 +891,7 @@ User message: "${userMessage}`;
|
|
|
885
891
|
const toolName = toolCallMap.get(toolCallId) || "unknown";
|
|
886
892
|
const toolResultMessage = {
|
|
887
893
|
id: this.generateMessageId(conversation),
|
|
888
|
-
timestamp: this.generateTimestamp(),
|
|
894
|
+
timestamp: toolResult.timestamp?.toISOString() || this.generateTimestamp(),
|
|
889
895
|
role: "tool_result",
|
|
890
896
|
content: "", // Can be empty for tool results
|
|
891
897
|
tool: toolName, // Now correctly extracted from tool call mapping
|
|
@@ -227,7 +227,7 @@ export declare abstract class BaseProvider implements AIProvider {
|
|
|
227
227
|
/**
|
|
228
228
|
* Check if tool executions should be stored and handle storage
|
|
229
229
|
*/
|
|
230
|
-
protected handleToolExecutionStorage(toolCalls: unknown[], toolResults: unknown[], options: TextGenerationOptions | StreamOptions): Promise<void>;
|
|
230
|
+
protected handleToolExecutionStorage(toolCalls: unknown[], toolResults: unknown[], options: TextGenerationOptions | StreamOptions, currentTime: Date): Promise<void>;
|
|
231
231
|
/**
|
|
232
232
|
* Utility method to chunk large prompts into smaller pieces
|
|
233
233
|
* @param prompt The prompt to chunk
|
|
@@ -300,7 +300,7 @@ export class BaseProvider {
|
|
|
300
300
|
onStepFinish: ({ toolCalls, toolResults }) => {
|
|
301
301
|
logger.info("Tool execution completed", { toolResults, toolCalls });
|
|
302
302
|
// Handle tool execution storage
|
|
303
|
-
this.handleToolExecutionStorage(toolCalls, toolResults, options).catch((error) => {
|
|
303
|
+
this.handleToolExecutionStorage(toolCalls, toolResults, options, new Date()).catch((error) => {
|
|
304
304
|
logger.warn("[BaseProvider] Failed to store tool executions", {
|
|
305
305
|
provider: this.providerName,
|
|
306
306
|
error: error instanceof Error ? error.message : String(error),
|
|
@@ -1499,7 +1499,7 @@ export class BaseProvider {
|
|
|
1499
1499
|
/**
|
|
1500
1500
|
* Check if tool executions should be stored and handle storage
|
|
1501
1501
|
*/
|
|
1502
|
-
async handleToolExecutionStorage(toolCalls, toolResults, options) {
|
|
1502
|
+
async handleToolExecutionStorage(toolCalls, toolResults, options, currentTime) {
|
|
1503
1503
|
// Check if tools are not empty
|
|
1504
1504
|
const hasToolData = (toolCalls && toolCalls.length > 0) ||
|
|
1505
1505
|
(toolResults && toolResults.length > 0);
|
|
@@ -1515,7 +1515,7 @@ export class BaseProvider {
|
|
|
1515
1515
|
const userId = options.context?.userId ||
|
|
1516
1516
|
options.userId;
|
|
1517
1517
|
try {
|
|
1518
|
-
await this.neurolink.storeToolExecutions(sessionId, userId, toolCalls, toolResults);
|
|
1518
|
+
await this.neurolink.storeToolExecutions(sessionId, userId, toolCalls, toolResults, currentTime);
|
|
1519
1519
|
}
|
|
1520
1520
|
catch (error) {
|
|
1521
1521
|
logger.warn("[BaseProvider] Failed to store tool executions", {
|
|
@@ -16,7 +16,7 @@ export declare class ConversationMemoryManager {
|
|
|
16
16
|
* Store a conversation turn for a session
|
|
17
17
|
* ULTRA-OPTIMIZED: Direct ChatMessage[] storage with zero conversion overhead
|
|
18
18
|
*/
|
|
19
|
-
storeConversationTurn(sessionId: string, userId: string | undefined, userMessage: string, aiResponse: string): Promise<void>;
|
|
19
|
+
storeConversationTurn(sessionId: string, userId: string | undefined, userMessage: string, aiResponse: string, _startTimeStamp: Date | undefined): Promise<void>;
|
|
20
20
|
/**
|
|
21
21
|
* Build context messages for AI prompt injection (ULTRA-OPTIMIZED)
|
|
22
22
|
* Returns pre-stored message array with zero conversion overhead
|
|
@@ -36,7 +36,7 @@ export class ConversationMemoryManager {
|
|
|
36
36
|
* Store a conversation turn for a session
|
|
37
37
|
* ULTRA-OPTIMIZED: Direct ChatMessage[] storage with zero conversion overhead
|
|
38
38
|
*/
|
|
39
|
-
async storeConversationTurn(sessionId, userId, userMessage, aiResponse) {
|
|
39
|
+
async storeConversationTurn(sessionId, userId, userMessage, aiResponse, _startTimeStamp) {
|
|
40
40
|
await this.ensureInitialized();
|
|
41
41
|
try {
|
|
42
42
|
// Get or create session
|
|
@@ -60,11 +60,11 @@ export declare class RedisConversationMemoryManager {
|
|
|
60
60
|
result?: unknown;
|
|
61
61
|
error?: string;
|
|
62
62
|
[key: string]: unknown;
|
|
63
|
-
}
|
|
63
|
+
}>, currentTime?: Date): Promise<void>;
|
|
64
64
|
/**
|
|
65
65
|
* Store a conversation turn for a session
|
|
66
66
|
*/
|
|
67
|
-
storeConversationTurn(sessionId: string, userId: string | undefined, userMessage: string, aiResponse: string): Promise<void>;
|
|
67
|
+
storeConversationTurn(sessionId: string, userId: string | undefined, userMessage: string, aiResponse: string, startTimeStamp: Date | undefined): Promise<void>;
|
|
68
68
|
/**
|
|
69
69
|
* Build context messages for AI prompt injection
|
|
70
70
|
*/
|
|
@@ -156,7 +156,7 @@ export class RedisConversationMemoryManager {
|
|
|
156
156
|
/**
|
|
157
157
|
* Store tool execution data for a session (temporarily to avoid race conditions)
|
|
158
158
|
*/
|
|
159
|
-
async storeToolExecution(sessionId, userId, toolCalls, toolResults) {
|
|
159
|
+
async storeToolExecution(sessionId, userId, toolCalls, toolResults, currentTime) {
|
|
160
160
|
logger.debug("[RedisConversationMemoryManager] Storing tool execution temporarily", {
|
|
161
161
|
sessionId,
|
|
162
162
|
userId,
|
|
@@ -168,8 +168,14 @@ export class RedisConversationMemoryManager {
|
|
|
168
168
|
const pendingKey = `${sessionId}:${normalizedUserId}`;
|
|
169
169
|
// Store tool execution data temporarily to prevent race conditions
|
|
170
170
|
const pendingData = {
|
|
171
|
-
toolCalls: toolCalls || []
|
|
172
|
-
|
|
171
|
+
toolCalls: (toolCalls || []).map((call) => ({
|
|
172
|
+
...call,
|
|
173
|
+
timestamp: currentTime,
|
|
174
|
+
})),
|
|
175
|
+
toolResults: (toolResults || []).map((result) => ({
|
|
176
|
+
...result,
|
|
177
|
+
timestamp: currentTime,
|
|
178
|
+
})),
|
|
173
179
|
timestamp: Date.now(),
|
|
174
180
|
};
|
|
175
181
|
// Check if there's existing pending data and merge
|
|
@@ -214,7 +220,7 @@ export class RedisConversationMemoryManager {
|
|
|
214
220
|
/**
|
|
215
221
|
* Store a conversation turn for a session
|
|
216
222
|
*/
|
|
217
|
-
async storeConversationTurn(sessionId, userId, userMessage, aiResponse) {
|
|
223
|
+
async storeConversationTurn(sessionId, userId, userMessage, aiResponse, startTimeStamp) {
|
|
218
224
|
logger.debug("[RedisConversationMemoryManager] Storing conversation turn", {
|
|
219
225
|
sessionId,
|
|
220
226
|
userId,
|
|
@@ -294,8 +300,8 @@ export class RedisConversationMemoryManager {
|
|
|
294
300
|
title: "New Conversation", // Temporary title until generated
|
|
295
301
|
sessionId,
|
|
296
302
|
userId: normalizedUserId,
|
|
297
|
-
createdAt: currentTime,
|
|
298
|
-
updatedAt: currentTime,
|
|
303
|
+
createdAt: startTimeStamp?.toISOString() || currentTime,
|
|
304
|
+
updatedAt: startTimeStamp?.toISOString() || currentTime,
|
|
299
305
|
messages: [],
|
|
300
306
|
};
|
|
301
307
|
}
|
|
@@ -312,7 +318,7 @@ export class RedisConversationMemoryManager {
|
|
|
312
318
|
// Add new messages to conversation history with new format
|
|
313
319
|
const userMsg = {
|
|
314
320
|
id: this.generateMessageId(conversation),
|
|
315
|
-
timestamp: this.generateTimestamp(),
|
|
321
|
+
timestamp: startTimeStamp?.toISOString() || this.generateTimestamp(),
|
|
316
322
|
role: "user",
|
|
317
323
|
content: userMessage,
|
|
318
324
|
};
|
|
@@ -868,7 +874,7 @@ User message: "${userMessage}`;
|
|
|
868
874
|
toolCallMap.set(toolCallId, toolName);
|
|
869
875
|
const toolCallMessage = {
|
|
870
876
|
id: this.generateMessageId(conversation),
|
|
871
|
-
timestamp: this.generateTimestamp(),
|
|
877
|
+
timestamp: toolCall.timestamp?.toISOString() || this.generateTimestamp(),
|
|
872
878
|
role: "tool_call",
|
|
873
879
|
content: "", // Can be empty for tool calls
|
|
874
880
|
tool: toolName,
|
|
@@ -885,7 +891,7 @@ User message: "${userMessage}`;
|
|
|
885
891
|
const toolName = toolCallMap.get(toolCallId) || "unknown";
|
|
886
892
|
const toolResultMessage = {
|
|
887
893
|
id: this.generateMessageId(conversation),
|
|
888
|
-
timestamp: this.generateTimestamp(),
|
|
894
|
+
timestamp: toolResult.timestamp?.toISOString() || this.generateTimestamp(),
|
|
889
895
|
role: "tool_result",
|
|
890
896
|
content: "", // Can be empty for tool results
|
|
891
897
|
tool: toolName, // Now correctly extracted from tool call mapping
|
package/dist/lib/neurolink.d.ts
CHANGED
|
@@ -923,6 +923,7 @@ export declare class NeuroLink {
|
|
|
923
923
|
* @param userId - User identifier (optional)
|
|
924
924
|
* @param toolCalls - Array of tool calls
|
|
925
925
|
* @param toolResults - Array of tool results
|
|
926
|
+
* @param currentTime - Date when the tool execution occurred (optional)
|
|
926
927
|
* @returns Promise resolving when storage is complete
|
|
927
928
|
*/
|
|
928
929
|
storeToolExecutions(sessionId: string, userId: string | undefined, toolCalls: Array<{
|
|
@@ -935,7 +936,7 @@ export declare class NeuroLink {
|
|
|
935
936
|
result?: unknown;
|
|
936
937
|
error?: string;
|
|
937
938
|
[key: string]: unknown;
|
|
938
|
-
}
|
|
939
|
+
}>, currentTime?: Date): Promise<void>;
|
|
939
940
|
/**
|
|
940
941
|
* Check if tool execution storage is available
|
|
941
942
|
* @returns boolean indicating if Redis storage is configured and available
|
package/dist/lib/neurolink.js
CHANGED
|
@@ -1228,13 +1228,13 @@ export class NeuroLink {
|
|
|
1228
1228
|
await this.initializeConversationMemoryForGeneration(generateInternalId, generateInternalStartTime, generateInternalHrTimeStart);
|
|
1229
1229
|
const mcpResult = await this.attemptMCPGeneration(options, generateInternalId, generateInternalStartTime, generateInternalHrTimeStart, functionTag);
|
|
1230
1230
|
if (mcpResult) {
|
|
1231
|
-
await storeConversationTurn(this.conversationMemory, options, mcpResult);
|
|
1231
|
+
await storeConversationTurn(this.conversationMemory, options, mcpResult, new Date(generateInternalStartTime));
|
|
1232
1232
|
this.emitter.emit("response:end", mcpResult.content || "");
|
|
1233
1233
|
return mcpResult;
|
|
1234
1234
|
}
|
|
1235
1235
|
const directResult = await this.directProviderGeneration(options);
|
|
1236
1236
|
logger.debug(`[${functionTag}] Direct generation successful`);
|
|
1237
|
-
await storeConversationTurn(this.conversationMemory, options, directResult);
|
|
1237
|
+
await storeConversationTurn(this.conversationMemory, options, directResult, new Date(generateInternalStartTime));
|
|
1238
1238
|
this.emitter.emit("response:end", directResult.content || "");
|
|
1239
1239
|
this.emitter.emit("message", `Text generation completed successfully`);
|
|
1240
1240
|
return directResult;
|
|
@@ -1803,7 +1803,7 @@ export class NeuroLink {
|
|
|
1803
1803
|
try {
|
|
1804
1804
|
await self.conversationMemory.storeConversationTurn(enhancedOptions.context
|
|
1805
1805
|
?.sessionId, enhancedOptions.context
|
|
1806
|
-
?.userId, originalPrompt ?? "", accumulatedContent);
|
|
1806
|
+
?.userId, originalPrompt ?? "", accumulatedContent, new Date(startTime));
|
|
1807
1807
|
logger.debug("Stream conversation turn stored", {
|
|
1808
1808
|
sessionId: enhancedOptions.context
|
|
1809
1809
|
?.sessionId,
|
|
@@ -2008,7 +2008,7 @@ export class NeuroLink {
|
|
|
2008
2008
|
const sessionId = enhancedOptions?.context?.sessionId;
|
|
2009
2009
|
const userId = enhancedOptions?.context
|
|
2010
2010
|
?.userId;
|
|
2011
|
-
await self.conversationMemory.storeConversationTurn(sessionId || options.context?.sessionId, userId || options.context?.userId, originalPrompt ?? "", fallbackAccumulatedContent);
|
|
2011
|
+
await self.conversationMemory.storeConversationTurn(sessionId || options.context?.sessionId, userId || options.context?.userId, originalPrompt ?? "", fallbackAccumulatedContent, new Date(startTime));
|
|
2012
2012
|
logger.debug("Fallback stream conversation turn stored", {
|
|
2013
2013
|
sessionId: sessionId || options.context?.sessionId,
|
|
2014
2014
|
userInputLength: originalPrompt?.length ?? 0,
|
|
@@ -3633,9 +3633,10 @@ export class NeuroLink {
|
|
|
3633
3633
|
* @param userId - User identifier (optional)
|
|
3634
3634
|
* @param toolCalls - Array of tool calls
|
|
3635
3635
|
* @param toolResults - Array of tool results
|
|
3636
|
+
* @param currentTime - Date when the tool execution occurred (optional)
|
|
3636
3637
|
* @returns Promise resolving when storage is complete
|
|
3637
3638
|
*/
|
|
3638
|
-
async storeToolExecutions(sessionId, userId, toolCalls, toolResults) {
|
|
3639
|
+
async storeToolExecutions(sessionId, userId, toolCalls, toolResults, currentTime) {
|
|
3639
3640
|
// Check if tools are not empty
|
|
3640
3641
|
const hasToolData = (toolCalls && toolCalls.length > 0) ||
|
|
3641
3642
|
(toolResults && toolResults.length > 0);
|
|
@@ -3651,7 +3652,7 @@ export class NeuroLink {
|
|
|
3651
3652
|
const redisMemory = this
|
|
3652
3653
|
.conversationMemory;
|
|
3653
3654
|
try {
|
|
3654
|
-
await redisMemory.storeToolExecution(sessionId, userId, toolCalls, toolResults);
|
|
3655
|
+
await redisMemory.storeToolExecution(sessionId, userId, toolCalls, toolResults, currentTime);
|
|
3655
3656
|
}
|
|
3656
3657
|
catch (error) {
|
|
3657
3658
|
logger.warn("Failed to store tool executions", {
|
|
@@ -105,7 +105,7 @@ export class AnthropicProvider extends BaseProvider {
|
|
|
105
105
|
toolChoice: shouldUseTools ? "auto" : "none",
|
|
106
106
|
abortSignal: timeoutController?.controller.signal,
|
|
107
107
|
onStepFinish: ({ toolCalls, toolResults }) => {
|
|
108
|
-
this.handleToolExecutionStorage(toolCalls, toolResults, options).catch((error) => {
|
|
108
|
+
this.handleToolExecutionStorage(toolCalls, toolResults, options, new Date()).catch((error) => {
|
|
109
109
|
logger.warn("[AnthropicProvider] Failed to store tool executions", {
|
|
110
110
|
provider: this.providerName,
|
|
111
111
|
error: error instanceof Error ? error.message : String(error),
|
|
@@ -72,7 +72,7 @@ export class AnthropicProviderV2 extends BaseProvider {
|
|
|
72
72
|
toolChoice: "auto",
|
|
73
73
|
abortSignal: timeoutController?.controller.signal,
|
|
74
74
|
onStepFinish: ({ toolCalls, toolResults }) => {
|
|
75
|
-
this.handleToolExecutionStorage(toolCalls, toolResults, options).catch((error) => {
|
|
75
|
+
this.handleToolExecutionStorage(toolCalls, toolResults, options, new Date()).catch((error) => {
|
|
76
76
|
logger.warn("[AnthropicBaseProvider] Failed to store tool executions", {
|
|
77
77
|
provider: this.providerName,
|
|
78
78
|
error: error instanceof Error ? error.message : String(error),
|
|
@@ -157,7 +157,7 @@ export class AzureOpenAIProvider extends BaseProvider {
|
|
|
157
157
|
tools,
|
|
158
158
|
toolChoice: shouldUseTools ? "auto" : "none",
|
|
159
159
|
onStepFinish: ({ toolCalls, toolResults }) => {
|
|
160
|
-
this.handleToolExecutionStorage(toolCalls, toolResults, options).catch((error) => {
|
|
160
|
+
this.handleToolExecutionStorage(toolCalls, toolResults, options, new Date()).catch((error) => {
|
|
161
161
|
logger.warn("[AzureOpenaiProvider] Failed to store tool executions", {
|
|
162
162
|
provider: this.providerName,
|
|
163
163
|
error: error instanceof Error ? error.message : String(error),
|
|
@@ -136,7 +136,7 @@ export class GoogleAIStudioProvider extends BaseProvider {
|
|
|
136
136
|
toolChoice: shouldUseTools ? "auto" : "none",
|
|
137
137
|
abortSignal: timeoutController?.controller.signal,
|
|
138
138
|
onStepFinish: ({ toolCalls, toolResults }) => {
|
|
139
|
-
this.handleToolExecutionStorage(toolCalls, toolResults, options).catch((error) => {
|
|
139
|
+
this.handleToolExecutionStorage(toolCalls, toolResults, options, new Date()).catch((error) => {
|
|
140
140
|
logger.warn("[GoogleAiStudioProvider] Failed to store tool executions", {
|
|
141
141
|
provider: this.providerName,
|
|
142
142
|
error: error instanceof Error ? error.message : String(error),
|
|
@@ -682,7 +682,7 @@ export class GoogleVertexProvider extends BaseProvider {
|
|
|
682
682
|
onStepFinish: ({ toolCalls, toolResults }) => {
|
|
683
683
|
logger.info("Tool execution completed", { toolResults, toolCalls });
|
|
684
684
|
// Handle tool execution storage
|
|
685
|
-
this.handleToolExecutionStorage(toolCalls, toolResults, options).catch((error) => {
|
|
685
|
+
this.handleToolExecutionStorage(toolCalls, toolResults, options, new Date()).catch((error) => {
|
|
686
686
|
logger.warn("[GoogleVertexProvider] Failed to store tool executions", {
|
|
687
687
|
provider: this.providerName,
|
|
688
688
|
error: error instanceof Error ? error.message : String(error),
|
|
@@ -124,7 +124,7 @@ export class HuggingFaceProvider extends BaseProvider {
|
|
|
124
124
|
toolChoice: streamOptions.toolChoice, // Tool choice handled by prepareStreamOptions
|
|
125
125
|
abortSignal: timeoutController?.controller.signal,
|
|
126
126
|
onStepFinish: ({ toolCalls, toolResults }) => {
|
|
127
|
-
this.handleToolExecutionStorage(toolCalls, toolResults, options).catch((error) => {
|
|
127
|
+
this.handleToolExecutionStorage(toolCalls, toolResults, options, new Date()).catch((error) => {
|
|
128
128
|
logger.warn("[HuggingFaceProvider] Failed to store tool executions", {
|
|
129
129
|
provider: this.providerName,
|
|
130
130
|
error: error instanceof Error ? error.message : String(error),
|
|
@@ -132,7 +132,7 @@ export class LiteLLMProvider extends BaseProvider {
|
|
|
132
132
|
toolChoice: "auto",
|
|
133
133
|
abortSignal: timeoutController?.controller.signal,
|
|
134
134
|
onStepFinish: ({ toolCalls, toolResults }) => {
|
|
135
|
-
this.handleToolExecutionStorage(toolCalls, toolResults, options).catch((error) => {
|
|
135
|
+
this.handleToolExecutionStorage(toolCalls, toolResults, options, new Date()).catch((error) => {
|
|
136
136
|
logger.warn("LiteLLMProvider] Failed to store tool executions", {
|
|
137
137
|
provider: this.providerName,
|
|
138
138
|
error: error instanceof Error ? error.message : String(error),
|
|
@@ -61,7 +61,7 @@ export class MistralProvider extends BaseProvider {
|
|
|
61
61
|
toolChoice: shouldUseTools ? "auto" : "none",
|
|
62
62
|
abortSignal: timeoutController?.controller.signal,
|
|
63
63
|
onStepFinish: ({ toolCalls, toolResults }) => {
|
|
64
|
-
this.handleToolExecutionStorage(toolCalls, toolResults, options).catch((error) => {
|
|
64
|
+
this.handleToolExecutionStorage(toolCalls, toolResults, options, new Date()).catch((error) => {
|
|
65
65
|
logger.warn("[MistralProvider] Failed to store tool executions", {
|
|
66
66
|
provider: this.providerName,
|
|
67
67
|
error: error instanceof Error ? error.message : String(error),
|
|
@@ -276,7 +276,7 @@ export class OpenAIProvider extends BaseProvider {
|
|
|
276
276
|
onStepFinish: ({ toolCalls, toolResults }) => {
|
|
277
277
|
logger.info("Tool execution completed", { toolResults, toolCalls });
|
|
278
278
|
// Handle tool execution storage
|
|
279
|
-
this.handleToolExecutionStorage(toolCalls, toolResults, options).catch((error) => {
|
|
279
|
+
this.handleToolExecutionStorage(toolCalls, toolResults, options, new Date()).catch((error) => {
|
|
280
280
|
logger.warn("[OpenAIProvider] Failed to store tool executions", {
|
|
281
281
|
provider: this.providerName,
|
|
282
282
|
error: error instanceof Error ? error.message : String(error),
|
|
@@ -168,7 +168,7 @@ export class OpenAICompatibleProvider extends BaseProvider {
|
|
|
168
168
|
toolChoice: "auto",
|
|
169
169
|
abortSignal: timeoutController?.controller.signal,
|
|
170
170
|
onStepFinish: ({ toolCalls, toolResults }) => {
|
|
171
|
-
this.handleToolExecutionStorage(toolCalls, toolResults, options).catch((error) => {
|
|
171
|
+
this.handleToolExecutionStorage(toolCalls, toolResults, options, new Date()).catch((error) => {
|
|
172
172
|
logger.warn("[OpenAiCompatibleProvider] Failed to store tool executions", {
|
|
173
173
|
provider: this.providerName,
|
|
174
174
|
error: error instanceof Error ? error.message : String(error),
|
|
@@ -19,4 +19,4 @@ export declare function getConversationMessages(conversationMemory: Conversation
|
|
|
19
19
|
* Store conversation turn for future context
|
|
20
20
|
* Saves user messages and AI responses for conversation memory
|
|
21
21
|
*/
|
|
22
|
-
export declare function storeConversationTurn(conversationMemory: ConversationMemoryManager | RedisConversationMemoryManager | null | undefined, originalOptions: TextGenerationOptions, result: TextGenerationResult): Promise<void>;
|
|
22
|
+
export declare function storeConversationTurn(conversationMemory: ConversationMemoryManager | RedisConversationMemoryManager | null | undefined, originalOptions: TextGenerationOptions, result: TextGenerationResult, startTimeStamp?: Date | undefined): Promise<void>;
|
|
@@ -71,7 +71,7 @@ export async function getConversationMessages(conversationMemory, options) {
|
|
|
71
71
|
* Store conversation turn for future context
|
|
72
72
|
* Saves user messages and AI responses for conversation memory
|
|
73
73
|
*/
|
|
74
|
-
export async function storeConversationTurn(conversationMemory, originalOptions, result) {
|
|
74
|
+
export async function storeConversationTurn(conversationMemory, originalOptions, result, startTimeStamp) {
|
|
75
75
|
logger.debug("[conversationMemoryUtils] storeConversationTurn called", {
|
|
76
76
|
hasMemory: !!conversationMemory,
|
|
77
77
|
memoryType: conversationMemory?.constructor?.name || "NONE",
|
|
@@ -102,7 +102,7 @@ export async function storeConversationTurn(conversationMemory, originalOptions,
|
|
|
102
102
|
const userMessage = originalOptions.originalPrompt || originalOptions.prompt || "";
|
|
103
103
|
const aiResponse = result.content;
|
|
104
104
|
try {
|
|
105
|
-
await conversationMemory.storeConversationTurn(sessionId, userId, userMessage, aiResponse);
|
|
105
|
+
await conversationMemory.storeConversationTurn(sessionId, userId, userMessage, aiResponse, startTimeStamp);
|
|
106
106
|
logger.debug("[conversationMemoryUtils] Conversation turn stored successfully", {
|
|
107
107
|
sessionId,
|
|
108
108
|
userId,
|
|
@@ -18,7 +18,7 @@ export declare function getConversationMessages(conversationMemory: Conversation
|
|
|
18
18
|
* Store conversation turn for future context
|
|
19
19
|
* Saves user messages and AI responses for conversation memory
|
|
20
20
|
*/
|
|
21
|
-
export declare function storeConversationTurn(conversationMemory: ConversationMemoryManager | undefined, originalOptions: TextGenerationOptions, result: TextGenerationResult): Promise<void>;
|
|
21
|
+
export declare function storeConversationTurn(conversationMemory: ConversationMemoryManager | undefined, originalOptions: TextGenerationOptions, result: TextGenerationResult, startTimeStamp?: Date | undefined): Promise<void>;
|
|
22
22
|
/**
|
|
23
23
|
* Check if Redis is available for conversation memory
|
|
24
24
|
*/
|
|
@@ -48,7 +48,7 @@ export async function getConversationMessages(conversationMemory, options) {
|
|
|
48
48
|
* Store conversation turn for future context
|
|
49
49
|
* Saves user messages and AI responses for conversation memory
|
|
50
50
|
*/
|
|
51
|
-
export async function storeConversationTurn(conversationMemory, originalOptions, result) {
|
|
51
|
+
export async function storeConversationTurn(conversationMemory, originalOptions, result, startTimeStamp) {
|
|
52
52
|
if (!conversationMemory || !originalOptions.context) {
|
|
53
53
|
return;
|
|
54
54
|
}
|
|
@@ -59,7 +59,7 @@ export async function storeConversationTurn(conversationMemory, originalOptions,
|
|
|
59
59
|
return;
|
|
60
60
|
}
|
|
61
61
|
try {
|
|
62
|
-
await conversationMemory.storeConversationTurn(sessionId, userId, originalOptions.originalPrompt || originalOptions.prompt || "", result.content);
|
|
62
|
+
await conversationMemory.storeConversationTurn(sessionId, userId, originalOptions.originalPrompt || originalOptions.prompt || "", result.content, startTimeStamp);
|
|
63
63
|
logger.debug("Conversation turn stored", {
|
|
64
64
|
sessionId,
|
|
65
65
|
userId,
|
package/dist/neurolink.d.ts
CHANGED
|
@@ -923,6 +923,7 @@ export declare class NeuroLink {
|
|
|
923
923
|
* @param userId - User identifier (optional)
|
|
924
924
|
* @param toolCalls - Array of tool calls
|
|
925
925
|
* @param toolResults - Array of tool results
|
|
926
|
+
* @param currentTime - Date when the tool execution occurred (optional)
|
|
926
927
|
* @returns Promise resolving when storage is complete
|
|
927
928
|
*/
|
|
928
929
|
storeToolExecutions(sessionId: string, userId: string | undefined, toolCalls: Array<{
|
|
@@ -935,7 +936,7 @@ export declare class NeuroLink {
|
|
|
935
936
|
result?: unknown;
|
|
936
937
|
error?: string;
|
|
937
938
|
[key: string]: unknown;
|
|
938
|
-
}
|
|
939
|
+
}>, currentTime?: Date): Promise<void>;
|
|
939
940
|
/**
|
|
940
941
|
* Check if tool execution storage is available
|
|
941
942
|
* @returns boolean indicating if Redis storage is configured and available
|
package/dist/neurolink.js
CHANGED
|
@@ -1228,13 +1228,13 @@ export class NeuroLink {
|
|
|
1228
1228
|
await this.initializeConversationMemoryForGeneration(generateInternalId, generateInternalStartTime, generateInternalHrTimeStart);
|
|
1229
1229
|
const mcpResult = await this.attemptMCPGeneration(options, generateInternalId, generateInternalStartTime, generateInternalHrTimeStart, functionTag);
|
|
1230
1230
|
if (mcpResult) {
|
|
1231
|
-
await storeConversationTurn(this.conversationMemory, options, mcpResult);
|
|
1231
|
+
await storeConversationTurn(this.conversationMemory, options, mcpResult, new Date(generateInternalStartTime));
|
|
1232
1232
|
this.emitter.emit("response:end", mcpResult.content || "");
|
|
1233
1233
|
return mcpResult;
|
|
1234
1234
|
}
|
|
1235
1235
|
const directResult = await this.directProviderGeneration(options);
|
|
1236
1236
|
logger.debug(`[${functionTag}] Direct generation successful`);
|
|
1237
|
-
await storeConversationTurn(this.conversationMemory, options, directResult);
|
|
1237
|
+
await storeConversationTurn(this.conversationMemory, options, directResult, new Date(generateInternalStartTime));
|
|
1238
1238
|
this.emitter.emit("response:end", directResult.content || "");
|
|
1239
1239
|
this.emitter.emit("message", `Text generation completed successfully`);
|
|
1240
1240
|
return directResult;
|
|
@@ -1803,7 +1803,7 @@ export class NeuroLink {
|
|
|
1803
1803
|
try {
|
|
1804
1804
|
await self.conversationMemory.storeConversationTurn(enhancedOptions.context
|
|
1805
1805
|
?.sessionId, enhancedOptions.context
|
|
1806
|
-
?.userId, originalPrompt ?? "", accumulatedContent);
|
|
1806
|
+
?.userId, originalPrompt ?? "", accumulatedContent, new Date(startTime));
|
|
1807
1807
|
logger.debug("Stream conversation turn stored", {
|
|
1808
1808
|
sessionId: enhancedOptions.context
|
|
1809
1809
|
?.sessionId,
|
|
@@ -2008,7 +2008,7 @@ export class NeuroLink {
|
|
|
2008
2008
|
const sessionId = enhancedOptions?.context?.sessionId;
|
|
2009
2009
|
const userId = enhancedOptions?.context
|
|
2010
2010
|
?.userId;
|
|
2011
|
-
await self.conversationMemory.storeConversationTurn(sessionId || options.context?.sessionId, userId || options.context?.userId, originalPrompt ?? "", fallbackAccumulatedContent);
|
|
2011
|
+
await self.conversationMemory.storeConversationTurn(sessionId || options.context?.sessionId, userId || options.context?.userId, originalPrompt ?? "", fallbackAccumulatedContent, new Date(startTime));
|
|
2012
2012
|
logger.debug("Fallback stream conversation turn stored", {
|
|
2013
2013
|
sessionId: sessionId || options.context?.sessionId,
|
|
2014
2014
|
userInputLength: originalPrompt?.length ?? 0,
|
|
@@ -3633,9 +3633,10 @@ export class NeuroLink {
|
|
|
3633
3633
|
* @param userId - User identifier (optional)
|
|
3634
3634
|
* @param toolCalls - Array of tool calls
|
|
3635
3635
|
* @param toolResults - Array of tool results
|
|
3636
|
+
* @param currentTime - Date when the tool execution occurred (optional)
|
|
3636
3637
|
* @returns Promise resolving when storage is complete
|
|
3637
3638
|
*/
|
|
3638
|
-
async storeToolExecutions(sessionId, userId, toolCalls, toolResults) {
|
|
3639
|
+
async storeToolExecutions(sessionId, userId, toolCalls, toolResults, currentTime) {
|
|
3639
3640
|
// Check if tools are not empty
|
|
3640
3641
|
const hasToolData = (toolCalls && toolCalls.length > 0) ||
|
|
3641
3642
|
(toolResults && toolResults.length > 0);
|
|
@@ -3651,7 +3652,7 @@ export class NeuroLink {
|
|
|
3651
3652
|
const redisMemory = this
|
|
3652
3653
|
.conversationMemory;
|
|
3653
3654
|
try {
|
|
3654
|
-
await redisMemory.storeToolExecution(sessionId, userId, toolCalls, toolResults);
|
|
3655
|
+
await redisMemory.storeToolExecution(sessionId, userId, toolCalls, toolResults, currentTime);
|
|
3655
3656
|
}
|
|
3656
3657
|
catch (error) {
|
|
3657
3658
|
logger.warn("Failed to store tool executions", {
|
|
@@ -105,7 +105,7 @@ export class AnthropicProvider extends BaseProvider {
|
|
|
105
105
|
toolChoice: shouldUseTools ? "auto" : "none",
|
|
106
106
|
abortSignal: timeoutController?.controller.signal,
|
|
107
107
|
onStepFinish: ({ toolCalls, toolResults }) => {
|
|
108
|
-
this.handleToolExecutionStorage(toolCalls, toolResults, options).catch((error) => {
|
|
108
|
+
this.handleToolExecutionStorage(toolCalls, toolResults, options, new Date()).catch((error) => {
|
|
109
109
|
logger.warn("[AnthropicProvider] Failed to store tool executions", {
|
|
110
110
|
provider: this.providerName,
|
|
111
111
|
error: error instanceof Error ? error.message : String(error),
|
|
@@ -72,7 +72,7 @@ export class AnthropicProviderV2 extends BaseProvider {
|
|
|
72
72
|
toolChoice: "auto",
|
|
73
73
|
abortSignal: timeoutController?.controller.signal,
|
|
74
74
|
onStepFinish: ({ toolCalls, toolResults }) => {
|
|
75
|
-
this.handleToolExecutionStorage(toolCalls, toolResults, options).catch((error) => {
|
|
75
|
+
this.handleToolExecutionStorage(toolCalls, toolResults, options, new Date()).catch((error) => {
|
|
76
76
|
logger.warn("[AnthropicBaseProvider] Failed to store tool executions", {
|
|
77
77
|
provider: this.providerName,
|
|
78
78
|
error: error instanceof Error ? error.message : String(error),
|
|
@@ -157,7 +157,7 @@ export class AzureOpenAIProvider extends BaseProvider {
|
|
|
157
157
|
tools,
|
|
158
158
|
toolChoice: shouldUseTools ? "auto" : "none",
|
|
159
159
|
onStepFinish: ({ toolCalls, toolResults }) => {
|
|
160
|
-
this.handleToolExecutionStorage(toolCalls, toolResults, options).catch((error) => {
|
|
160
|
+
this.handleToolExecutionStorage(toolCalls, toolResults, options, new Date()).catch((error) => {
|
|
161
161
|
logger.warn("[AzureOpenaiProvider] Failed to store tool executions", {
|
|
162
162
|
provider: this.providerName,
|
|
163
163
|
error: error instanceof Error ? error.message : String(error),
|
|
@@ -136,7 +136,7 @@ export class GoogleAIStudioProvider extends BaseProvider {
|
|
|
136
136
|
toolChoice: shouldUseTools ? "auto" : "none",
|
|
137
137
|
abortSignal: timeoutController?.controller.signal,
|
|
138
138
|
onStepFinish: ({ toolCalls, toolResults }) => {
|
|
139
|
-
this.handleToolExecutionStorage(toolCalls, toolResults, options).catch((error) => {
|
|
139
|
+
this.handleToolExecutionStorage(toolCalls, toolResults, options, new Date()).catch((error) => {
|
|
140
140
|
logger.warn("[GoogleAiStudioProvider] Failed to store tool executions", {
|
|
141
141
|
provider: this.providerName,
|
|
142
142
|
error: error instanceof Error ? error.message : String(error),
|
|
@@ -682,7 +682,7 @@ export class GoogleVertexProvider extends BaseProvider {
|
|
|
682
682
|
onStepFinish: ({ toolCalls, toolResults }) => {
|
|
683
683
|
logger.info("Tool execution completed", { toolResults, toolCalls });
|
|
684
684
|
// Handle tool execution storage
|
|
685
|
-
this.handleToolExecutionStorage(toolCalls, toolResults, options).catch((error) => {
|
|
685
|
+
this.handleToolExecutionStorage(toolCalls, toolResults, options, new Date()).catch((error) => {
|
|
686
686
|
logger.warn("[GoogleVertexProvider] Failed to store tool executions", {
|
|
687
687
|
provider: this.providerName,
|
|
688
688
|
error: error instanceof Error ? error.message : String(error),
|
|
@@ -124,7 +124,7 @@ export class HuggingFaceProvider extends BaseProvider {
|
|
|
124
124
|
toolChoice: streamOptions.toolChoice, // Tool choice handled by prepareStreamOptions
|
|
125
125
|
abortSignal: timeoutController?.controller.signal,
|
|
126
126
|
onStepFinish: ({ toolCalls, toolResults }) => {
|
|
127
|
-
this.handleToolExecutionStorage(toolCalls, toolResults, options).catch((error) => {
|
|
127
|
+
this.handleToolExecutionStorage(toolCalls, toolResults, options, new Date()).catch((error) => {
|
|
128
128
|
logger.warn("[HuggingFaceProvider] Failed to store tool executions", {
|
|
129
129
|
provider: this.providerName,
|
|
130
130
|
error: error instanceof Error ? error.message : String(error),
|
|
@@ -132,7 +132,7 @@ export class LiteLLMProvider extends BaseProvider {
|
|
|
132
132
|
toolChoice: "auto",
|
|
133
133
|
abortSignal: timeoutController?.controller.signal,
|
|
134
134
|
onStepFinish: ({ toolCalls, toolResults }) => {
|
|
135
|
-
this.handleToolExecutionStorage(toolCalls, toolResults, options).catch((error) => {
|
|
135
|
+
this.handleToolExecutionStorage(toolCalls, toolResults, options, new Date()).catch((error) => {
|
|
136
136
|
logger.warn("LiteLLMProvider] Failed to store tool executions", {
|
|
137
137
|
provider: this.providerName,
|
|
138
138
|
error: error instanceof Error ? error.message : String(error),
|
|
@@ -61,7 +61,7 @@ export class MistralProvider extends BaseProvider {
|
|
|
61
61
|
toolChoice: shouldUseTools ? "auto" : "none",
|
|
62
62
|
abortSignal: timeoutController?.controller.signal,
|
|
63
63
|
onStepFinish: ({ toolCalls, toolResults }) => {
|
|
64
|
-
this.handleToolExecutionStorage(toolCalls, toolResults, options).catch((error) => {
|
|
64
|
+
this.handleToolExecutionStorage(toolCalls, toolResults, options, new Date()).catch((error) => {
|
|
65
65
|
logger.warn("[MistralProvider] Failed to store tool executions", {
|
|
66
66
|
provider: this.providerName,
|
|
67
67
|
error: error instanceof Error ? error.message : String(error),
|
package/dist/providers/openAI.js
CHANGED
|
@@ -276,7 +276,7 @@ export class OpenAIProvider extends BaseProvider {
|
|
|
276
276
|
onStepFinish: ({ toolCalls, toolResults }) => {
|
|
277
277
|
logger.info("Tool execution completed", { toolResults, toolCalls });
|
|
278
278
|
// Handle tool execution storage
|
|
279
|
-
this.handleToolExecutionStorage(toolCalls, toolResults, options).catch((error) => {
|
|
279
|
+
this.handleToolExecutionStorage(toolCalls, toolResults, options, new Date()).catch((error) => {
|
|
280
280
|
logger.warn("[OpenAIProvider] Failed to store tool executions", {
|
|
281
281
|
provider: this.providerName,
|
|
282
282
|
error: error instanceof Error ? error.message : String(error),
|
|
@@ -168,7 +168,7 @@ export class OpenAICompatibleProvider extends BaseProvider {
|
|
|
168
168
|
toolChoice: "auto",
|
|
169
169
|
abortSignal: timeoutController?.controller.signal,
|
|
170
170
|
onStepFinish: ({ toolCalls, toolResults }) => {
|
|
171
|
-
this.handleToolExecutionStorage(toolCalls, toolResults, options).catch((error) => {
|
|
171
|
+
this.handleToolExecutionStorage(toolCalls, toolResults, options, new Date()).catch((error) => {
|
|
172
172
|
logger.warn("[OpenAiCompatibleProvider] Failed to store tool executions", {
|
|
173
173
|
provider: this.providerName,
|
|
174
174
|
error: error instanceof Error ? error.message : String(error),
|
|
@@ -19,4 +19,4 @@ export declare function getConversationMessages(conversationMemory: Conversation
|
|
|
19
19
|
* Store conversation turn for future context
|
|
20
20
|
* Saves user messages and AI responses for conversation memory
|
|
21
21
|
*/
|
|
22
|
-
export declare function storeConversationTurn(conversationMemory: ConversationMemoryManager | RedisConversationMemoryManager | null | undefined, originalOptions: TextGenerationOptions, result: TextGenerationResult): Promise<void>;
|
|
22
|
+
export declare function storeConversationTurn(conversationMemory: ConversationMemoryManager | RedisConversationMemoryManager | null | undefined, originalOptions: TextGenerationOptions, result: TextGenerationResult, startTimeStamp?: Date | undefined): Promise<void>;
|
|
@@ -71,7 +71,7 @@ export async function getConversationMessages(conversationMemory, options) {
|
|
|
71
71
|
* Store conversation turn for future context
|
|
72
72
|
* Saves user messages and AI responses for conversation memory
|
|
73
73
|
*/
|
|
74
|
-
export async function storeConversationTurn(conversationMemory, originalOptions, result) {
|
|
74
|
+
export async function storeConversationTurn(conversationMemory, originalOptions, result, startTimeStamp) {
|
|
75
75
|
logger.debug("[conversationMemoryUtils] storeConversationTurn called", {
|
|
76
76
|
hasMemory: !!conversationMemory,
|
|
77
77
|
memoryType: conversationMemory?.constructor?.name || "NONE",
|
|
@@ -102,7 +102,7 @@ export async function storeConversationTurn(conversationMemory, originalOptions,
|
|
|
102
102
|
const userMessage = originalOptions.originalPrompt || originalOptions.prompt || "";
|
|
103
103
|
const aiResponse = result.content;
|
|
104
104
|
try {
|
|
105
|
-
await conversationMemory.storeConversationTurn(sessionId, userId, userMessage, aiResponse);
|
|
105
|
+
await conversationMemory.storeConversationTurn(sessionId, userId, userMessage, aiResponse, startTimeStamp);
|
|
106
106
|
logger.debug("[conversationMemoryUtils] Conversation turn stored successfully", {
|
|
107
107
|
sessionId,
|
|
108
108
|
userId,
|
|
@@ -18,7 +18,7 @@ export declare function getConversationMessages(conversationMemory: Conversation
|
|
|
18
18
|
* Store conversation turn for future context
|
|
19
19
|
* Saves user messages and AI responses for conversation memory
|
|
20
20
|
*/
|
|
21
|
-
export declare function storeConversationTurn(conversationMemory: ConversationMemoryManager | undefined, originalOptions: TextGenerationOptions, result: TextGenerationResult): Promise<void>;
|
|
21
|
+
export declare function storeConversationTurn(conversationMemory: ConversationMemoryManager | undefined, originalOptions: TextGenerationOptions, result: TextGenerationResult, startTimeStamp?: Date | undefined): Promise<void>;
|
|
22
22
|
/**
|
|
23
23
|
* Check if Redis is available for conversation memory
|
|
24
24
|
*/
|
|
@@ -48,7 +48,7 @@ export async function getConversationMessages(conversationMemory, options) {
|
|
|
48
48
|
* Store conversation turn for future context
|
|
49
49
|
* Saves user messages and AI responses for conversation memory
|
|
50
50
|
*/
|
|
51
|
-
export async function storeConversationTurn(conversationMemory, originalOptions, result) {
|
|
51
|
+
export async function storeConversationTurn(conversationMemory, originalOptions, result, startTimeStamp) {
|
|
52
52
|
if (!conversationMemory || !originalOptions.context) {
|
|
53
53
|
return;
|
|
54
54
|
}
|
|
@@ -59,7 +59,7 @@ export async function storeConversationTurn(conversationMemory, originalOptions,
|
|
|
59
59
|
return;
|
|
60
60
|
}
|
|
61
61
|
try {
|
|
62
|
-
await conversationMemory.storeConversationTurn(sessionId, userId, originalOptions.originalPrompt || originalOptions.prompt || "", result.content);
|
|
62
|
+
await conversationMemory.storeConversationTurn(sessionId, userId, originalOptions.originalPrompt || originalOptions.prompt || "", result.content, startTimeStamp);
|
|
63
63
|
logger.debug("Conversation turn stored", {
|
|
64
64
|
sessionId,
|
|
65
65
|
userId,
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@juspay/neurolink",
|
|
3
|
-
"version": "7.47.
|
|
3
|
+
"version": "7.47.3",
|
|
4
4
|
"description": "Universal AI Development Platform with working MCP integration, multi-provider support, and professional CLI. Built-in tools operational, 58+ external MCP servers discoverable. Connect to filesystem, GitHub, database operations, and more. Build, test, and deploy AI applications with 9 major providers: OpenAI, Anthropic, Google AI, AWS Bedrock, Azure, Hugging Face, Ollama, and Mistral AI.",
|
|
5
5
|
"author": {
|
|
6
6
|
"name": "Juspay Technologies",
|