@juspay/neurolink 7.33.3 → 7.34.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (135) hide show
  1. package/CHANGELOG.md +15 -0
  2. package/README.md +37 -0
  3. package/dist/cli/commands/config.d.ts +3 -4
  4. package/dist/cli/commands/config.js +2 -3
  5. package/dist/cli/errorHandler.d.ts +1 -0
  6. package/dist/cli/errorHandler.js +28 -0
  7. package/dist/cli/factories/commandFactory.d.ts +23 -0
  8. package/dist/cli/factories/commandFactory.js +375 -60
  9. package/dist/cli/factories/ollamaCommandFactory.js +7 -1
  10. package/dist/cli/index.d.ts +1 -1
  11. package/dist/cli/index.js +9 -164
  12. package/dist/cli/loop/optionsSchema.d.ts +15 -0
  13. package/dist/cli/loop/optionsSchema.js +59 -0
  14. package/dist/cli/loop/session.d.ts +15 -0
  15. package/dist/cli/loop/session.js +252 -0
  16. package/dist/cli/parser.d.ts +1 -0
  17. package/dist/cli/parser.js +158 -0
  18. package/dist/cli/utils/ollamaUtils.js +6 -0
  19. package/dist/config/{conversationMemoryConfig.d.ts → conversationMemory.d.ts} +1 -1
  20. package/dist/core/baseProvider.js +43 -4
  21. package/dist/core/constants.d.ts +12 -3
  22. package/dist/core/constants.js +22 -6
  23. package/dist/core/conversationMemoryFactory.d.ts +23 -0
  24. package/dist/core/conversationMemoryFactory.js +144 -0
  25. package/dist/core/conversationMemoryInitializer.d.ts +14 -0
  26. package/dist/core/conversationMemoryInitializer.js +127 -0
  27. package/dist/core/conversationMemoryManager.d.ts +3 -2
  28. package/dist/core/conversationMemoryManager.js +4 -3
  29. package/dist/core/factory.js +19 -0
  30. package/dist/core/redisConversationMemoryManager.d.ts +73 -0
  31. package/dist/core/redisConversationMemoryManager.js +483 -0
  32. package/dist/core/types.d.ts +1 -1
  33. package/dist/factories/providerRegistry.js +2 -0
  34. package/dist/lib/config/{conversationMemoryConfig.d.ts → conversationMemory.d.ts} +1 -1
  35. package/dist/lib/core/baseProvider.js +43 -4
  36. package/dist/lib/core/constants.d.ts +12 -3
  37. package/dist/lib/core/constants.js +22 -6
  38. package/dist/lib/core/conversationMemoryFactory.d.ts +23 -0
  39. package/dist/lib/core/conversationMemoryFactory.js +144 -0
  40. package/dist/lib/core/conversationMemoryInitializer.d.ts +14 -0
  41. package/dist/lib/core/conversationMemoryInitializer.js +127 -0
  42. package/dist/lib/core/conversationMemoryManager.d.ts +3 -2
  43. package/dist/lib/core/conversationMemoryManager.js +4 -3
  44. package/dist/lib/core/factory.js +19 -0
  45. package/dist/lib/core/redisConversationMemoryManager.d.ts +73 -0
  46. package/dist/lib/core/redisConversationMemoryManager.js +483 -0
  47. package/dist/lib/core/types.d.ts +1 -1
  48. package/dist/lib/factories/providerRegistry.js +2 -0
  49. package/dist/lib/mcp/servers/aiProviders/aiWorkflowTools.js +2 -2
  50. package/dist/lib/neurolink.d.ts +15 -9
  51. package/dist/lib/neurolink.js +218 -67
  52. package/dist/lib/providers/amazonBedrock.d.ts +4 -4
  53. package/dist/lib/providers/amazonBedrock.js +2 -2
  54. package/dist/lib/providers/anthropic.d.ts +4 -4
  55. package/dist/lib/providers/anthropic.js +3 -12
  56. package/dist/lib/providers/anthropicBaseProvider.js +1 -2
  57. package/dist/lib/providers/azureOpenai.d.ts +4 -4
  58. package/dist/lib/providers/azureOpenai.js +49 -8
  59. package/dist/lib/providers/googleAiStudio.d.ts +4 -4
  60. package/dist/lib/providers/googleAiStudio.js +2 -2
  61. package/dist/lib/providers/googleVertex.js +2 -2
  62. package/dist/lib/providers/huggingFace.d.ts +4 -4
  63. package/dist/lib/providers/huggingFace.js +1 -2
  64. package/dist/lib/providers/litellm.d.ts +1 -1
  65. package/dist/lib/providers/litellm.js +1 -2
  66. package/dist/lib/providers/mistral.d.ts +4 -4
  67. package/dist/lib/providers/mistral.js +4 -4
  68. package/dist/lib/providers/ollama.js +7 -8
  69. package/dist/lib/providers/openAI.d.ts +4 -4
  70. package/dist/lib/providers/openAI.js +2 -2
  71. package/dist/lib/providers/openaiCompatible.js +5 -2
  72. package/dist/lib/providers/sagemaker/language-model.d.ts +5 -0
  73. package/dist/lib/providers/sagemaker/language-model.js +9 -1
  74. package/dist/lib/session/globalSessionState.d.ts +27 -0
  75. package/dist/lib/session/globalSessionState.js +77 -0
  76. package/dist/lib/types/{conversationTypes.d.ts → conversation.d.ts} +32 -0
  77. package/dist/lib/types/generateTypes.d.ts +1 -1
  78. package/dist/lib/types/streamTypes.d.ts +1 -1
  79. package/dist/lib/utils/conversationMemory.d.ts +22 -0
  80. package/dist/lib/utils/conversationMemory.js +121 -0
  81. package/dist/lib/utils/conversationMemoryUtils.d.ts +1 -1
  82. package/dist/lib/utils/conversationMemoryUtils.js +2 -2
  83. package/dist/lib/utils/messageBuilder.d.ts +1 -1
  84. package/dist/lib/utils/messageBuilder.js +1 -1
  85. package/dist/lib/utils/providerHealth.js +7 -3
  86. package/dist/lib/utils/redis.d.ts +42 -0
  87. package/dist/lib/utils/redis.js +263 -0
  88. package/dist/lib/utils/tokenLimits.d.ts +2 -2
  89. package/dist/lib/utils/tokenLimits.js +10 -3
  90. package/dist/mcp/servers/aiProviders/aiWorkflowTools.js +2 -2
  91. package/dist/neurolink.d.ts +15 -9
  92. package/dist/neurolink.js +218 -67
  93. package/dist/providers/amazonBedrock.d.ts +4 -4
  94. package/dist/providers/amazonBedrock.js +2 -2
  95. package/dist/providers/anthropic.d.ts +4 -4
  96. package/dist/providers/anthropic.js +3 -12
  97. package/dist/providers/anthropicBaseProvider.js +1 -2
  98. package/dist/providers/azureOpenai.d.ts +4 -4
  99. package/dist/providers/azureOpenai.js +49 -8
  100. package/dist/providers/googleAiStudio.d.ts +4 -4
  101. package/dist/providers/googleAiStudio.js +2 -2
  102. package/dist/providers/googleVertex.js +2 -2
  103. package/dist/providers/huggingFace.d.ts +4 -4
  104. package/dist/providers/huggingFace.js +1 -2
  105. package/dist/providers/litellm.d.ts +1 -1
  106. package/dist/providers/litellm.js +1 -2
  107. package/dist/providers/mistral.d.ts +4 -4
  108. package/dist/providers/mistral.js +4 -4
  109. package/dist/providers/ollama.js +7 -8
  110. package/dist/providers/openAI.d.ts +4 -4
  111. package/dist/providers/openAI.js +2 -2
  112. package/dist/providers/openaiCompatible.js +5 -2
  113. package/dist/providers/sagemaker/language-model.d.ts +5 -0
  114. package/dist/providers/sagemaker/language-model.js +9 -1
  115. package/dist/session/globalSessionState.d.ts +27 -0
  116. package/dist/session/globalSessionState.js +77 -0
  117. package/dist/types/{conversationTypes.d.ts → conversation.d.ts} +32 -0
  118. package/dist/types/generateTypes.d.ts +1 -1
  119. package/dist/types/streamTypes.d.ts +1 -1
  120. package/dist/utils/conversationMemory.d.ts +22 -0
  121. package/dist/utils/conversationMemory.js +121 -0
  122. package/dist/utils/conversationMemoryUtils.d.ts +1 -1
  123. package/dist/utils/conversationMemoryUtils.js +2 -2
  124. package/dist/utils/messageBuilder.d.ts +1 -1
  125. package/dist/utils/messageBuilder.js +1 -1
  126. package/dist/utils/providerHealth.js +7 -3
  127. package/dist/utils/redis.d.ts +42 -0
  128. package/dist/utils/redis.js +263 -0
  129. package/dist/utils/tokenLimits.d.ts +2 -2
  130. package/dist/utils/tokenLimits.js +10 -3
  131. package/package.json +3 -1
  132. /package/dist/config/{conversationMemoryConfig.js → conversationMemory.js} +0 -0
  133. /package/dist/lib/config/{conversationMemoryConfig.js → conversationMemory.js} +0 -0
  134. /package/dist/lib/types/{conversationTypes.js → conversation.js} +0 -0
  135. /package/dist/types/{conversationTypes.js → conversation.js} +0 -0
@@ -0,0 +1,158 @@
1
+ import yargs from "yargs";
2
+ import { hideBin } from "yargs/helpers";
3
+ import chalk from "chalk";
4
+ import packageJson from "../../package.json" with { type: "json" };
5
+ import { CLICommandFactory } from "./factories/commandFactory.js";
6
+ import { globalSession } from "../lib/session/globalSessionState.js";
7
+ import { handleError } from "./errorHandler.js";
8
+ import { logger } from "../lib/utils/logger.js";
9
+ // Enhanced CLI with Professional UX
10
+ export function initializeCliParser() {
11
+ return (yargs(hideBin(process.argv))
12
+ .scriptName("neurolink")
13
+ .usage("Usage: $0 <command> [options]")
14
+ .version(packageJson.version)
15
+ .help()
16
+ .alias("h", "help")
17
+ .alias("V", "version")
18
+ .strictOptions()
19
+ .strictCommands()
20
+ .demandCommand(1, "")
21
+ .recommendCommands()
22
+ .epilogue("For more info: https://github.com/juspay/neurolink")
23
+ .showHelpOnFail(true, "Specify --help for available options")
24
+ .middleware((argv) => {
25
+ // Handle no-color option globally
26
+ if (argv.noColor || process.env.NO_COLOR || !process.stdout.isTTY) {
27
+ process.env.FORCE_COLOR = "0";
28
+ }
29
+ // Handle custom config file
30
+ if (argv.configFile) {
31
+ process.env.NEUROLINK_CONFIG_FILE = argv.configFile;
32
+ }
33
+ // Control SDK logging based on debug flag
34
+ if (argv.debug) {
35
+ process.env.NEUROLINK_DEBUG = "true";
36
+ }
37
+ else {
38
+ // Always set to false when debug is not enabled (including when not provided)
39
+ process.env.NEUROLINK_DEBUG = "false";
40
+ }
41
+ // Keep existing quiet middleware
42
+ if (process.env.NEUROLINK_QUIET === "true" &&
43
+ typeof argv.quiet === "undefined") {
44
+ argv.quiet = true;
45
+ }
46
+ })
47
+ .fail((msg, err, yargsInstance) => {
48
+ // If we are in a loop, we don't want to exit the process.
49
+ // Instead, we just want to display the error and help text.
50
+ if (globalSession.getCurrentSessionId()) {
51
+ if (msg) {
52
+ logger.error(chalk.red(msg)); // This is a yargs validation error (e.g., missing argument)
53
+ yargsInstance.showHelp("log");
54
+ }
55
+ else if (err) {
56
+ // This is an error thrown from a command handler
57
+ // The loop's catch block will handle this, so we just re-throw.
58
+ // throw err;
59
+ handleError(err, "CLI Error in Loop Session");
60
+ }
61
+ return;
62
+ }
63
+ // Original logic for single-command execution
64
+ const exitProcess = () => {
65
+ if (!process.exitCode) {
66
+ process.exit(1);
67
+ }
68
+ };
69
+ if (err) {
70
+ // Error likely from an async command handler (e.g., via _handleError)
71
+ // _handleError already prints and calls process.exit(1).
72
+ // If we're here, it means _handleError's process.exit might not have been caught by the top-level async IIFE.
73
+ // Or, it's a synchronous yargs error during parsing that yargs itself throws.
74
+ const alreadyExitedByHandleError = err?.exitCode !== undefined;
75
+ // A simple heuristic: if the error message doesn't look like one of our handled generic messages,
76
+ // it might be a direct yargs parsing error.
77
+ const isLikelyYargsInternalError = err.message && // Ensure err.message exists
78
+ !err.message.includes("Authentication error") &&
79
+ !err.message.includes("Network error") &&
80
+ !err.message.includes("Authorization error") &&
81
+ !err.message.includes("Permission denied") && // from config export
82
+ !err.message.includes("Invalid or unparseable JSON"); // from config import
83
+ if (!alreadyExitedByHandleError) {
84
+ process.stderr.write(chalk.red(`CLI Error: ${err.message || msg || "An unexpected error occurred."}\n`));
85
+ // If it's a yargs internal parsing error, show help.
86
+ if (isLikelyYargsInternalError && msg) {
87
+ yargsInstance.showHelp((h) => {
88
+ process.stderr.write(h + "\n");
89
+ exitProcess();
90
+ });
91
+ return;
92
+ }
93
+ exitProcess();
94
+ }
95
+ return; // Exit was already called or error handled
96
+ }
97
+ // Yargs parsing/validation error (msg is present, err is null)
98
+ if (msg) {
99
+ let processedMsg = `Error: ${msg}\n`;
100
+ if (msg.includes("Not enough non-option arguments") ||
101
+ msg.includes("Missing required argument") ||
102
+ msg.includes("Unknown command")) {
103
+ process.stderr.write(chalk.red(processedMsg)); // Print error first
104
+ yargsInstance.showHelp((h) => {
105
+ process.stderr.write("\n" + h + "\n");
106
+ exitProcess();
107
+ });
108
+ return; // Exit happens in callback
109
+ }
110
+ else if (msg.includes("Unknown argument") ||
111
+ msg.includes("Invalid values")) {
112
+ processedMsg = `Error: ${msg}\nUse --help to see available options.\n`;
113
+ }
114
+ process.stderr.write(chalk.red(processedMsg));
115
+ }
116
+ else {
117
+ // No specific message, but failure occurred (e.g. demandCommand failed silently)
118
+ yargsInstance.showHelp((h) => {
119
+ process.stderr.write(h + "\n");
120
+ exitProcess();
121
+ });
122
+ return; // Exit happens in callback
123
+ }
124
+ exitProcess(); // Default exit
125
+ })
126
+ // Generate Command (Primary) - Using CLICommandFactory
127
+ .command(CLICommandFactory.createGenerateCommand())
128
+ // Stream Text Command - Using CLICommandFactory
129
+ .command(CLICommandFactory.createStreamCommand())
130
+ // Batch Processing Command - Using CLICommandFactory
131
+ .command(CLICommandFactory.createBatchCommand())
132
+ // Provider Command Group - Using CLICommandFactory
133
+ .command(CLICommandFactory.createProviderCommands())
134
+ // Status command alias - Using CLICommandFactory
135
+ .command(CLICommandFactory.createStatusCommand())
136
+ // Models Command Group - Using CLICommandFactory
137
+ .command(CLICommandFactory.createModelsCommands())
138
+ // MCP Command Group - Using CLICommandFactory
139
+ .command(CLICommandFactory.createMCPCommands())
140
+ // Discover Command - Using CLICommandFactory
141
+ .command(CLICommandFactory.createDiscoverCommand())
142
+ // Configuration Command Group - Using CLICommandFactory
143
+ .command(CLICommandFactory.createConfigCommands())
144
+ // Memory Command Group - Using CLICommandFactory
145
+ .command(CLICommandFactory.createMemoryCommands())
146
+ // Get Best Provider Command - Using CLICommandFactory
147
+ .command(CLICommandFactory.createBestProviderCommand())
148
+ // Validate Command (alias for config validate)
149
+ .command(CLICommandFactory.createValidateCommand())
150
+ // Completion Command - Using CLICommandFactory
151
+ .command(CLICommandFactory.createCompletionCommand())
152
+ // Ollama Command Group - Using CLICommandFactory
153
+ .command(CLICommandFactory.createOllamaCommands())
154
+ // SageMaker Command Group - Using CLICommandFactory
155
+ .command(CLICommandFactory.createSageMakerCommands())
156
+ // Loop Command - Using CLICommandFactory
157
+ .command(CLICommandFactory.createLoopCommand()));
158
+ }
@@ -126,6 +126,9 @@ export class OllamaUtils {
126
126
  stdio: "ignore",
127
127
  detached: true,
128
128
  });
129
+ child.on("error", (err) => {
130
+ logger.error("Error starting Ollama serve process:", err);
131
+ });
129
132
  child.unref();
130
133
  logger.always(chalk.green("✅ Ollama service started"));
131
134
  }
@@ -141,6 +144,9 @@ export class OllamaUtils {
141
144
  stdio: "ignore",
142
145
  detached: true,
143
146
  });
147
+ child.on("error", (err) => {
148
+ logger.error("Error starting Ollama serve process:", err);
149
+ });
144
150
  child.unref();
145
151
  logger.always(chalk.green("✅ Ollama service started"));
146
152
  }
@@ -2,7 +2,7 @@
2
2
  * Conversation Memory Configuration
3
3
  * Provides default values for conversation memory feature with environment variable support
4
4
  */
5
- import type { ConversationMemoryConfig } from "../types/conversationTypes.js";
5
+ import type { ConversationMemoryConfig } from "../types/conversation.js";
6
6
  /**
7
7
  * Default maximum number of turns per session
8
8
  */
@@ -100,7 +100,15 @@ export class BaseProvider {
100
100
  yield { content: buffer };
101
101
  buffer = "";
102
102
  // Small delay to simulate streaming (1-10ms)
103
- await new Promise((resolve) => setTimeout(resolve, Math.random() * 9 + 1));
103
+ await new Promise((resolve, reject) => {
104
+ const timeoutId = setTimeout(resolve, Math.random() * 9 + 1);
105
+ // Handle potential timeout issues
106
+ if (!timeoutId) {
107
+ reject(new Error("Failed to create timeout"));
108
+ }
109
+ }).catch((err) => {
110
+ logger.error("Error in streaming delay:", err);
111
+ });
104
112
  }
105
113
  }
106
114
  // Yield all remaining content
@@ -168,6 +176,31 @@ export class BaseProvider {
168
176
  ...(options.tools || {}), // Include external tools passed from NeuroLink
169
177
  }
170
178
  : {};
179
+ // DEBUG: Log detailed tool information for generate
180
+ logger.debug("BaseProvider Generate - Tool Loading Debug", {
181
+ provider: this.providerName,
182
+ shouldUseTools,
183
+ baseToolsProvided: !!baseTools,
184
+ baseToolCount: baseTools ? Object.keys(baseTools).length : 0,
185
+ finalToolCount: tools ? Object.keys(tools).length : 0,
186
+ toolNames: tools ? Object.keys(tools).slice(0, 10) : [],
187
+ disableTools: options.disableTools,
188
+ supportsTools: this.supportsTools(),
189
+ externalToolsCount: options.tools
190
+ ? Object.keys(options.tools).length
191
+ : 0,
192
+ });
193
+ if (tools && Object.keys(tools).length > 0) {
194
+ logger.debug("BaseProvider Generate - First 5 Tools Detail", {
195
+ provider: this.providerName,
196
+ tools: Object.keys(tools)
197
+ .slice(0, 5)
198
+ .map((name) => ({
199
+ name,
200
+ description: tools[name]?.description?.substring(0, 100),
201
+ })),
202
+ });
203
+ }
171
204
  logger.debug(`[BaseProvider.generate] Tools for ${this.providerName}:`, {
172
205
  directTools: getKeyCount(baseTools),
173
206
  directToolNames: getKeysAsString(baseTools),
@@ -187,13 +220,19 @@ export class BaseProvider {
187
220
  maxSteps: options.maxSteps || DEFAULT_MAX_STEPS,
188
221
  toolChoice: shouldUseTools ? "auto" : "none",
189
222
  temperature: options.temperature,
190
- maxTokens: options.maxTokens || 8192,
223
+ maxTokens: options.maxTokens, // No default limit - unlimited unless specified
191
224
  });
192
225
  // Accumulate the streamed content
193
226
  let accumulatedContent = "";
194
227
  // Wait for the stream to complete and accumulate content
195
- for await (const chunk of streamResult.textStream) {
196
- accumulatedContent += chunk;
228
+ try {
229
+ for await (const chunk of streamResult.textStream) {
230
+ accumulatedContent += chunk;
231
+ }
232
+ }
233
+ catch (streamError) {
234
+ logger.error(`Error reading text stream for ${this.providerName}:`, streamError);
235
+ throw streamError;
197
236
  }
198
237
  // Get the final result - this should include usage, toolCalls, etc.
199
238
  const usage = await streamResult.usage;
@@ -2,7 +2,7 @@
2
2
  * Central configuration constants for NeuroLink
3
3
  * Single source of truth for all default values
4
4
  */
5
- export declare const DEFAULT_MAX_TOKENS = 8192;
5
+ export declare const DEFAULT_MAX_TOKENS: undefined;
6
6
  export declare const DEFAULT_TEMPERATURE = 0.7;
7
7
  export declare const DEFAULT_TIMEOUT = 30000;
8
8
  export declare const DEFAULT_MAX_STEPS = 5;
@@ -67,6 +67,15 @@ export declare const PROVIDER_MAX_TOKENS: {
67
67
  "anthropic.claude-3-5-sonnet-20240620-v1:0": number;
68
68
  default: number;
69
69
  };
70
+ azure: {
71
+ "gpt-4o": number;
72
+ "gpt-4o-mini": number;
73
+ "gpt-4.1": number;
74
+ "gpt-3.5-turbo": number;
75
+ "gpt-4": number;
76
+ "gpt-4-turbo": number;
77
+ default: number;
78
+ };
70
79
  ollama: {
71
80
  default: number;
72
81
  };
@@ -79,7 +88,7 @@ export declare const CLI_LIMITS: {
79
88
  maxTokens: {
80
89
  min: number;
81
90
  max: number;
82
- default: number;
91
+ default: undefined;
83
92
  };
84
93
  temperature: {
85
94
  min: number;
@@ -99,6 +108,6 @@ export declare const SYSTEM_LIMITS: {
99
108
  DEFAULT_BACKOFF_MULTIPLIER: number;
100
109
  };
101
110
  export declare const ENV_DEFAULTS: {
102
- maxTokens: number;
111
+ maxTokens: number | undefined;
103
112
  temperature: number;
104
113
  };
@@ -3,7 +3,7 @@
3
3
  * Single source of truth for all default values
4
4
  */
5
5
  // Core AI Generation Defaults
6
- export const DEFAULT_MAX_TOKENS = 8192; // Changed from 10000 to fix Anthropic error
6
+ export const DEFAULT_MAX_TOKENS = undefined; // Unlimited by default - let providers decide their own limits
7
7
  export const DEFAULT_TEMPERATURE = 0.7;
8
8
  export const DEFAULT_TIMEOUT = 30000;
9
9
  export const DEFAULT_MAX_STEPS = 5; // Default multi-turn tool execution steps
@@ -72,6 +72,15 @@ export const PROVIDER_MAX_TOKENS = {
72
72
  "anthropic.claude-3-5-sonnet-20240620-v1:0": 4096,
73
73
  default: 4096,
74
74
  },
75
+ azure: {
76
+ "gpt-4o": 16384,
77
+ "gpt-4o-mini": 16384,
78
+ "gpt-4.1": 16384,
79
+ "gpt-3.5-turbo": 4096,
80
+ "gpt-4": 8192,
81
+ "gpt-4-turbo": 4096,
82
+ default: 8192, // Azure OpenAI generally supports similar limits to OpenAI
83
+ },
75
84
  ollama: {
76
85
  default: 8192, // Ollama typically supports higher limits
77
86
  },
@@ -85,7 +94,7 @@ export const CLI_LIMITS = {
85
94
  maxTokens: {
86
95
  min: 1,
87
96
  max: 50000,
88
- default: DEFAULT_MAX_TOKENS,
97
+ default: undefined, // No default limit - unlimited by default
89
98
  },
90
99
  temperature: {
91
100
  min: 0,
@@ -112,10 +121,17 @@ export const SYSTEM_LIMITS = {
112
121
  };
113
122
  // Environment Variable Support (for future use)
114
123
  export const ENV_DEFAULTS = {
115
- maxTokens: process.env.NEUROLINK_DEFAULT_MAX_TOKENS
116
- ? parseInt(process.env.NEUROLINK_DEFAULT_MAX_TOKENS, 10)
117
- : DEFAULT_MAX_TOKENS,
124
+ maxTokens: (() => {
125
+ if (!process.env.NEUROLINK_DEFAULT_MAX_TOKENS) {
126
+ return undefined;
127
+ }
128
+ const n = parseInt(process.env.NEUROLINK_DEFAULT_MAX_TOKENS, 10);
129
+ return Number.isFinite(n) ? n : undefined;
130
+ })(),
118
131
  temperature: process.env.NEUROLINK_DEFAULT_TEMPERATURE
119
- ? parseFloat(process.env.NEUROLINK_DEFAULT_TEMPERATURE)
132
+ ? (() => {
133
+ const t = parseFloat(process.env.NEUROLINK_DEFAULT_TEMPERATURE);
134
+ return Number.isFinite(t) ? t : DEFAULT_TEMPERATURE;
135
+ })()
120
136
  : DEFAULT_TEMPERATURE,
121
137
  };
@@ -0,0 +1,23 @@
1
+ /**
2
+ * Conversation Memory Factory for NeuroLink
3
+ * Creates appropriate conversation memory manager based on configuration
4
+ */
5
+ import type { ConversationMemoryConfig, RedisStorageConfig } from "../types/conversation.js";
6
+ import { ConversationMemoryManager } from "./conversationMemoryManager.js";
7
+ import { RedisConversationMemoryManager } from "./redisConversationMemoryManager.js";
8
+ /**
9
+ * Configuration for memory storage type
10
+ */
11
+ export type StorageType = "memory" | "redis";
12
+ /**
13
+ * Creates a conversation memory manager based on configuration
14
+ */
15
+ export declare function createConversationMemoryManager(config: ConversationMemoryConfig, storageType?: StorageType, redisConfig?: RedisStorageConfig): ConversationMemoryManager | RedisConversationMemoryManager;
16
+ /**
17
+ * Get storage type from environment variable or configuration
18
+ */
19
+ export declare function getStorageType(): StorageType;
20
+ /**
21
+ * Get Redis configuration from environment variables
22
+ */
23
+ export declare function getRedisConfigFromEnv(): RedisStorageConfig;
@@ -0,0 +1,144 @@
1
+ /**
2
+ * Conversation Memory Factory for NeuroLink
3
+ * Creates appropriate conversation memory manager based on configuration
4
+ */
5
+ import { ConversationMemoryManager } from "./conversationMemoryManager.js";
6
+ import { RedisConversationMemoryManager } from "./redisConversationMemoryManager.js";
7
+ import { logger } from "../utils/logger.js";
8
+ /**
9
+ * Creates a conversation memory manager based on configuration
10
+ */
11
+ export function createConversationMemoryManager(config, storageType = "memory", redisConfig) {
12
+ logger.debug("[conversationMemoryFactory] Creating conversation memory manager", {
13
+ storageType,
14
+ config: {
15
+ enabled: config.enabled,
16
+ maxSessions: config.maxSessions,
17
+ maxTurnsPerSession: config.maxTurnsPerSession,
18
+ enableSummarization: config.enableSummarization,
19
+ summarizationThresholdTurns: config.summarizationThresholdTurns,
20
+ summarizationTargetTurns: config.summarizationTargetTurns,
21
+ summarizationProvider: config.summarizationProvider,
22
+ summarizationModel: config.summarizationModel,
23
+ },
24
+ hasRedisConfig: !!redisConfig,
25
+ });
26
+ // Default to memory storage
27
+ if (storageType === "memory" || !storageType) {
28
+ logger.debug("[conversationMemoryFactory] Creating in-memory conversation manager");
29
+ const memoryManager = new ConversationMemoryManager(config);
30
+ logger.debug("[conversationMemoryFactory] In-memory conversation manager created successfully", {
31
+ managerType: memoryManager.constructor.name,
32
+ });
33
+ return memoryManager;
34
+ }
35
+ // Redis storage
36
+ if (storageType === "redis") {
37
+ logger.debug("[conversationMemoryFactory] Creating Redis conversation manager", {
38
+ host: redisConfig?.host || "localhost",
39
+ port: redisConfig?.port || 6379,
40
+ keyPrefix: redisConfig?.keyPrefix || "neurolink:conversation:",
41
+ ttl: redisConfig?.ttl || 86400,
42
+ hasConnectionOptions: !!redisConfig?.connectionOptions,
43
+ });
44
+ const redisManager = new RedisConversationMemoryManager(config, redisConfig);
45
+ logger.debug("[conversationMemoryFactory] Redis conversation manager created successfully", {
46
+ managerType: redisManager.constructor.name,
47
+ config: {
48
+ maxSessions: config.maxSessions,
49
+ maxTurnsPerSession: config.maxTurnsPerSession,
50
+ },
51
+ });
52
+ return redisManager;
53
+ }
54
+ // Fallback to memory storage for unknown types
55
+ logger.warn(`[conversationMemoryFactory] Unknown storage type: ${storageType}, falling back to memory storage`);
56
+ const fallbackManager = new ConversationMemoryManager(config);
57
+ logger.debug("[conversationMemoryFactory] Fallback memory manager created successfully", {
58
+ managerType: fallbackManager.constructor.name,
59
+ });
60
+ return fallbackManager;
61
+ }
62
+ /**
63
+ * Get storage type from environment variable or configuration
64
+ */
65
+ export function getStorageType() {
66
+ // Get the raw value from environment, or use default
67
+ const rawStorageType = process.env.STORAGE_TYPE;
68
+ // Default to "memory" if not set
69
+ if (!rawStorageType) {
70
+ logger.debug("[conversationMemoryFactory] No storage type configured, using default", {
71
+ storageType: "memory",
72
+ fromEnv: false,
73
+ });
74
+ return "memory";
75
+ }
76
+ // Normalize: trim and convert to lowercase
77
+ const normalizedStorageType = rawStorageType.trim().toLowerCase();
78
+ // Validate against allowed StorageType values
79
+ const validStorageTypes = ["memory", "redis"];
80
+ if (validStorageTypes.includes(normalizedStorageType)) {
81
+ logger.debug("[conversationMemoryFactory] Determined storage type", {
82
+ storageType: normalizedStorageType,
83
+ fromEnv: true,
84
+ envValue: rawStorageType,
85
+ normalized: normalizedStorageType !== rawStorageType,
86
+ });
87
+ return normalizedStorageType;
88
+ }
89
+ else {
90
+ // Invalid storage type, log warning and return default
91
+ logger.warn(`[conversationMemoryFactory] Unrecognized storage type in environment: "${rawStorageType}", falling back to "memory"`, {
92
+ providedValue: rawStorageType,
93
+ normalizedValue: normalizedStorageType,
94
+ validValues: validStorageTypes,
95
+ usingDefault: true,
96
+ });
97
+ return "memory";
98
+ }
99
+ }
100
+ /**
101
+ * Get Redis configuration from environment variables
102
+ */
103
+ export function getRedisConfigFromEnv() {
104
+ logger.debug("[conversationMemoryFactory] Reading Redis configuration from environment", {
105
+ REDIS_HOST: process.env.REDIS_HOST || "(not set)",
106
+ REDIS_PORT: process.env.REDIS_PORT || "(not set)",
107
+ REDIS_PASSWORD: process.env.REDIS_PASSWORD ? "******" : "(not set)",
108
+ REDIS_DB: process.env.REDIS_DB || "(not set)",
109
+ REDIS_KEY_PREFIX: process.env.REDIS_KEY_PREFIX || "(not set)",
110
+ REDIS_TTL: process.env.REDIS_TTL || "(not set)",
111
+ REDIS_CONNECT_TIMEOUT: process.env.REDIS_CONNECT_TIMEOUT || "(not set)",
112
+ REDIS_MAX_RETRIES: process.env.REDIS_MAX_RETRIES || "(not set)",
113
+ REDIS_RETRY_DELAY: process.env.REDIS_RETRY_DELAY || "(not set)",
114
+ });
115
+ const config = {
116
+ host: process.env.REDIS_HOST,
117
+ port: process.env.REDIS_PORT ? Number(process.env.REDIS_PORT) : undefined,
118
+ password: process.env.REDIS_PASSWORD,
119
+ db: process.env.REDIS_DB ? Number(process.env.REDIS_DB) : undefined,
120
+ keyPrefix: process.env.REDIS_KEY_PREFIX,
121
+ ttl: process.env.REDIS_TTL ? Number(process.env.REDIS_TTL) : undefined,
122
+ connectionOptions: {
123
+ connectTimeout: process.env.REDIS_CONNECT_TIMEOUT
124
+ ? Number(process.env.REDIS_CONNECT_TIMEOUT)
125
+ : undefined,
126
+ maxRetriesPerRequest: process.env.REDIS_MAX_RETRIES
127
+ ? Number(process.env.REDIS_MAX_RETRIES)
128
+ : undefined,
129
+ retryDelayOnFailover: process.env.REDIS_RETRY_DELAY
130
+ ? Number(process.env.REDIS_RETRY_DELAY)
131
+ : undefined,
132
+ },
133
+ };
134
+ logger.debug("[conversationMemoryFactory] Redis configuration normalized", {
135
+ host: config.host || "localhost",
136
+ port: config.port || 6379,
137
+ hasPassword: !!config.password,
138
+ db: config.db || 0,
139
+ keyPrefix: config.keyPrefix || "neurolink:conversation:",
140
+ ttl: config.ttl || 86400,
141
+ hasConnectionOptions: !!config.connectionOptions,
142
+ });
143
+ return config;
144
+ }
@@ -0,0 +1,14 @@
1
+ /**
2
+ * Conversation Memory Initializer
3
+ * Provides integration with Redis storage for conversation memory
4
+ */
5
+ import type { ConversationMemoryConfig } from "../types/conversation.js";
6
+ import type { ConversationMemoryManager } from "./conversationMemoryManager.js";
7
+ import type { RedisConversationMemoryManager } from "./redisConversationMemoryManager.js";
8
+ /**
9
+ * Initialize conversation memory for NeuroLink
10
+ * This function decides whether to use in-memory or Redis storage
11
+ */
12
+ export declare function initializeConversationMemory(config?: {
13
+ conversationMemory?: Partial<ConversationMemoryConfig>;
14
+ }): Promise<ConversationMemoryManager | RedisConversationMemoryManager | null>;
@@ -0,0 +1,127 @@
1
+ /**
2
+ * Conversation Memory Initializer
3
+ * Provides integration with Redis storage for conversation memory
4
+ */
5
+ import { createConversationMemoryManager, getStorageType, getRedisConfigFromEnv, } from "./conversationMemoryFactory.js";
6
+ import { applyConversationMemoryDefaults } from "../utils/conversationMemory.js";
7
+ import { logger } from "../utils/logger.js";
8
+ /**
9
+ * Initialize conversation memory for NeuroLink
10
+ * This function decides whether to use in-memory or Redis storage
11
+ */
12
+ export async function initializeConversationMemory(config) {
13
+ logger.debug("[conversationMemoryInitializer] Initialize conversation memory called", {
14
+ hasConfig: !!config,
15
+ hasMemoryConfig: !!config?.conversationMemory,
16
+ memoryEnabled: config?.conversationMemory?.enabled || false,
17
+ storageType: process.env.STORAGE_TYPE || "memory",
18
+ });
19
+ if (!config?.conversationMemory?.enabled) {
20
+ logger.debug("[conversationMemoryInitializer] Conversation memory not enabled - skipping initialization");
21
+ return null;
22
+ }
23
+ try {
24
+ // Apply default configuration
25
+ logger.debug("[conversationMemoryInitializer] Applying conversation memory defaults");
26
+ const memoryConfig = applyConversationMemoryDefaults(config.conversationMemory);
27
+ logger.debug("[conversationMemoryInitializer] Memory configuration processed", {
28
+ enabled: memoryConfig.enabled,
29
+ maxSessions: memoryConfig.maxSessions,
30
+ maxTurnsPerSession: memoryConfig.maxTurnsPerSession,
31
+ enableSummarization: memoryConfig.enableSummarization,
32
+ });
33
+ // Determine storage type from environment
34
+ const storageType = getStorageType();
35
+ logger.debug("[conversationMemoryInitializer] Storage type determined", {
36
+ storageType,
37
+ fromEnv: !!process.env.STORAGE_TYPE,
38
+ });
39
+ if (storageType === "redis") {
40
+ logger.info("[conversationMemoryInitializer] Initializing Redis-based conversation memory manager");
41
+ // Get Redis configuration from environment
42
+ logger.debug("[conversationMemoryInitializer] Getting Redis configuration from environment");
43
+ const redisConfig = getRedisConfigFromEnv();
44
+ logger.debug("[conversationMemoryInitializer] Redis configuration retrieved", {
45
+ host: redisConfig.host || "localhost",
46
+ port: redisConfig.port || 6379,
47
+ hasPassword: !!redisConfig.password,
48
+ db: redisConfig.db || 0,
49
+ keyPrefix: redisConfig.keyPrefix || "neurolink:conversation:",
50
+ ttl: redisConfig.ttl || 86400,
51
+ });
52
+ // Create Redis-based conversation memory manager
53
+ logger.debug("[conversationMemoryInitializer] Creating Redis conversation memory manager");
54
+ const redisMemoryManager = createConversationMemoryManager(memoryConfig, "redis", redisConfig);
55
+ logger.debug("[conversationMemoryInitializer] Checking Redis manager creation result", {
56
+ managerType: redisMemoryManager?.constructor?.name || "unknown",
57
+ isRedisType: redisMemoryManager?.constructor?.name ===
58
+ "RedisConversationMemoryManager",
59
+ hasConfig: !!redisMemoryManager?.config,
60
+ });
61
+ logger.info("[conversationMemoryInitializer] Redis conversation memory manager created successfully", {
62
+ host: redisConfig.host || "localhost",
63
+ port: redisConfig.port || 6379,
64
+ maxSessions: memoryConfig.maxSessions,
65
+ maxTurnsPerSession: memoryConfig.maxTurnsPerSession,
66
+ managerType: redisMemoryManager?.constructor?.name,
67
+ });
68
+ // Perform basic validation
69
+ if (redisMemoryManager?.constructor?.name !==
70
+ "RedisConversationMemoryManager") {
71
+ logger.warn("[conversationMemoryInitializer] Created manager is not of RedisConversationMemoryManager type", {
72
+ actualType: redisMemoryManager?.constructor?.name,
73
+ });
74
+ }
75
+ return redisMemoryManager;
76
+ }
77
+ else {
78
+ logger.info("[conversationMemoryInitializer] Initializing in-memory conversation memory manager");
79
+ // Create in-memory conversation memory manager
80
+ logger.debug("[conversationMemoryInitializer] Creating in-memory conversation memory manager");
81
+ const memoryManager = createConversationMemoryManager(memoryConfig);
82
+ logger.debug("[conversationMemoryInitializer] Checking memory manager creation result", {
83
+ managerType: memoryManager?.constructor?.name || "unknown",
84
+ isInMemoryType: memoryManager?.constructor?.name === "ConversationMemoryManager",
85
+ hasConfig: !!memoryManager?.config,
86
+ });
87
+ logger.info("[conversationMemoryInitializer] In-memory conversation memory manager created successfully", {
88
+ maxSessions: memoryConfig.maxSessions,
89
+ maxTurnsPerSession: memoryConfig.maxTurnsPerSession,
90
+ managerType: memoryManager?.constructor?.name,
91
+ });
92
+ return memoryManager;
93
+ }
94
+ }
95
+ catch (error) {
96
+ logger.error("[conversationMemoryInitializer] Failed to initialize conversation memory", {
97
+ error: error instanceof Error ? error.message : String(error),
98
+ errorName: error instanceof Error ? error.name : "UnknownError",
99
+ errorStack: error instanceof Error ? error.stack : undefined,
100
+ storageType: process.env.STORAGE_TYPE || "memory",
101
+ memoryConfig: {
102
+ enabled: config?.conversationMemory?.enabled,
103
+ maxSessions: config?.conversationMemory?.maxSessions,
104
+ maxTurnsPerSession: config?.conversationMemory?.maxTurnsPerSession,
105
+ },
106
+ redisConfig: {
107
+ host: process.env.REDIS_HOST || "(not set)",
108
+ port: process.env.REDIS_PORT || "(not set)",
109
+ hasPassword: !!process.env.REDIS_PASSWORD,
110
+ keyPrefix: process.env.REDIS_KEY_PREFIX || "(not set)",
111
+ },
112
+ });
113
+ // Log additional diagnostics for redis errors
114
+ if (process.env.STORAGE_TYPE === "redis") {
115
+ logger.error("[conversationMemoryInitializer] Redis configuration error details", {
116
+ REDIS_HOST: process.env.REDIS_HOST || "(not set)",
117
+ REDIS_PORT: process.env.REDIS_PORT || "(not set)",
118
+ REDIS_PASSWORD: process.env.REDIS_PASSWORD ? "******" : "(not set)",
119
+ REDIS_DB: process.env.REDIS_DB || "(not set)",
120
+ REDIS_KEY_PREFIX: process.env.REDIS_KEY_PREFIX || "(not set)",
121
+ REDIS_TTL: process.env.REDIS_TTL || "(not set)",
122
+ errorMessage: error instanceof Error ? error.message : String(error),
123
+ });
124
+ }
125
+ throw error;
126
+ }
127
+ }