@juspay/neurolink 7.11.1 → 7.13.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. package/CHANGELOG.md +12 -0
  2. package/README.md +16 -0
  3. package/dist/config/conversationMemoryConfig.d.ts +27 -0
  4. package/dist/config/conversationMemoryConfig.js +39 -0
  5. package/dist/context/ContextManager.d.ts +28 -0
  6. package/dist/context/ContextManager.js +102 -0
  7. package/dist/context/config.d.ts +5 -0
  8. package/dist/context/config.js +38 -0
  9. package/dist/context/types.d.ts +20 -0
  10. package/dist/context/types.js +1 -0
  11. package/dist/context/utils.d.ts +7 -0
  12. package/dist/context/utils.js +8 -0
  13. package/dist/core/baseProvider.js +4 -2
  14. package/dist/core/conversationMemoryManager.d.ts +41 -0
  15. package/dist/core/conversationMemoryManager.js +152 -0
  16. package/dist/core/types.d.ts +2 -0
  17. package/dist/lib/config/conversationMemoryConfig.d.ts +27 -0
  18. package/dist/lib/config/conversationMemoryConfig.js +39 -0
  19. package/dist/lib/context/ContextManager.d.ts +28 -0
  20. package/dist/lib/context/ContextManager.js +102 -0
  21. package/dist/lib/context/config.d.ts +5 -0
  22. package/dist/lib/context/config.js +38 -0
  23. package/dist/lib/context/types.d.ts +20 -0
  24. package/dist/lib/context/types.js +1 -0
  25. package/dist/lib/context/utils.d.ts +7 -0
  26. package/dist/lib/context/utils.js +8 -0
  27. package/dist/lib/core/baseProvider.js +4 -2
  28. package/dist/lib/core/conversationMemoryManager.d.ts +41 -0
  29. package/dist/lib/core/conversationMemoryManager.js +152 -0
  30. package/dist/lib/core/types.d.ts +2 -0
  31. package/dist/lib/neurolink.d.ts +39 -4
  32. package/dist/lib/neurolink.js +106 -5
  33. package/dist/lib/providers/amazonBedrock.js +4 -2
  34. package/dist/lib/providers/anthropic.js +4 -2
  35. package/dist/lib/providers/azureOpenai.js +4 -2
  36. package/dist/lib/providers/googleAiStudio.js +4 -2
  37. package/dist/lib/providers/googleVertex.js +4 -2
  38. package/dist/lib/providers/huggingFace.js +4 -2
  39. package/dist/lib/providers/litellm.js +4 -2
  40. package/dist/lib/providers/mistral.js +3 -2
  41. package/dist/lib/providers/openAI.js +4 -2
  42. package/dist/lib/types/conversationTypes.d.ts +95 -0
  43. package/dist/lib/types/conversationTypes.js +17 -0
  44. package/dist/lib/types/streamTypes.d.ts +2 -0
  45. package/dist/lib/utils/conversationMemoryUtils.d.ts +22 -0
  46. package/dist/lib/utils/conversationMemoryUtils.js +77 -0
  47. package/dist/lib/utils/messageBuilder.d.ts +13 -0
  48. package/dist/lib/utils/messageBuilder.js +48 -0
  49. package/dist/neurolink.d.ts +39 -4
  50. package/dist/neurolink.js +106 -5
  51. package/dist/providers/amazonBedrock.js +4 -2
  52. package/dist/providers/anthropic.js +4 -2
  53. package/dist/providers/azureOpenai.js +4 -2
  54. package/dist/providers/googleAiStudio.js +4 -2
  55. package/dist/providers/googleVertex.js +4 -2
  56. package/dist/providers/huggingFace.js +4 -2
  57. package/dist/providers/litellm.js +4 -2
  58. package/dist/providers/mistral.js +3 -2
  59. package/dist/providers/openAI.js +4 -2
  60. package/dist/types/conversationTypes.d.ts +95 -0
  61. package/dist/types/conversationTypes.js +17 -0
  62. package/dist/types/streamTypes.d.ts +2 -0
  63. package/dist/utils/conversationMemoryUtils.d.ts +22 -0
  64. package/dist/utils/conversationMemoryUtils.js +77 -0
  65. package/dist/utils/messageBuilder.d.ts +13 -0
  66. package/dist/utils/messageBuilder.js +48 -0
  67. package/package.json +1 -1
@@ -0,0 +1,102 @@
1
+ import { logger } from "../utils/logger.js";
2
+ import { formatHistoryToString } from "./utils.js";
3
+ /**
4
+ * Manages conversation context, automatically summarizing it when it
5
+ * exceeds a specified word count limit.
6
+ */
7
+ export class ContextManager {
8
+ static SUMMARIZATION_FAILED_WARNING = "[System Warning: Context summarization failed. Conversation history has been truncated.]";
9
+ static SUMMARIZATION_EMPTY_WARNING = "[System Warning: Context summarization failed to return valid content. Conversation history has been truncated.]";
10
+ history;
11
+ wordCount;
12
+ internalGenerator;
13
+ config;
14
+ constructor(generatorFunction, config, initialContext = "This is the start of the conversation.") {
15
+ this.internalGenerator = generatorFunction;
16
+ this.config = config;
17
+ const initialMessage = { role: "system", content: initialContext };
18
+ initialMessage.wordCount = this.config.estimateWordCount([initialMessage]);
19
+ this.history = [initialMessage];
20
+ this.wordCount = initialMessage.wordCount;
21
+ }
22
+ async addTurn(role, message) {
23
+ const newMessage = { role, content: message };
24
+ newMessage.wordCount = this.config.estimateWordCount([newMessage]);
25
+ this.history.push(newMessage);
26
+ this.wordCount += newMessage.wordCount;
27
+ logger.info(`[ContextManager] Current word count: ${this.wordCount} / ${this.config.highWaterMarkWords}`);
28
+ if (this.wordCount > this.config.highWaterMarkWords) {
29
+ await this._summarize();
30
+ }
31
+ }
32
+ /**
33
+ * Formats the history including the latest user turn for the prompt, without modifying the permanent history.
34
+ */
35
+ getContextForPrompt(role, message) {
36
+ const tempHistory = [...this.history, { role, content: message }];
37
+ return formatHistoryToString(tempHistory);
38
+ }
39
+ getCurrentContext() {
40
+ // Format the history into a single string for the provider prompt
41
+ return formatHistoryToString(this.history);
42
+ }
43
+ async _summarize() {
44
+ try {
45
+ const prompt = this.config.getSummarizationPrompt(this.history, this.config.lowWaterMarkWords);
46
+ // Construct options for the internal method, bypassing the main 'generate' entry point
47
+ const textOptions = {
48
+ prompt,
49
+ provider: this.config.summarizationProvider,
50
+ model: this.config.summarizationModel,
51
+ // Ensure summarization does not trigger more context management or tools
52
+ disableTools: true,
53
+ };
54
+ // Call the internal generation function directly to avoid recursion
55
+ const result = await this.internalGenerator(textOptions);
56
+ if (typeof result.content === "string" && result.content.length > 0) {
57
+ // Replace the history with a single system message containing the summary
58
+ const newHistory = [{ role: "system", content: result.content }];
59
+ this.history = newHistory;
60
+ this.wordCount = this.config.estimateWordCount(this.history);
61
+ logger.info(`[ContextManager] Summarization complete. New history length: ${this.wordCount} words.`);
62
+ }
63
+ else {
64
+ logger.warn("[ContextManager] Summarization returned empty or non-string content; truncating history as a fallback.");
65
+ this._truncateHistory(this.config.lowWaterMarkWords);
66
+ this.history.unshift({ role: "system", content: ContextManager.SUMMARIZATION_EMPTY_WARNING });
67
+ this.wordCount = this.config.estimateWordCount(this.history);
68
+ }
69
+ logger.debug(`[ContextManager] New history: ${JSON.stringify(this.history)}`);
70
+ }
71
+ catch (error) {
72
+ logger.error("Context summarization failed:", { error });
73
+ // Fallback strategy: truncate the history to the target word count.
74
+ this._truncateHistory(this.config.lowWaterMarkWords);
75
+ this.history.unshift({ role: "system", content: ContextManager.SUMMARIZATION_FAILED_WARNING });
76
+ this.wordCount = this.config.estimateWordCount(this.history);
77
+ }
78
+ }
79
+ /**
80
+ * Truncates the history to a specific word count, preserving the most recent messages.
81
+ */
82
+ _truncateHistory(wordLimit) {
83
+ if (this.wordCount <= wordLimit) {
84
+ return;
85
+ }
86
+ let runningCount = 0;
87
+ let sliceIndex = this.history.length;
88
+ for (let i = this.history.length - 1; i >= 0; i--) {
89
+ let wordCount = this.history[i].wordCount;
90
+ if (wordCount === undefined) {
91
+ logger.warn(`[ContextManager] Word count cache missing for message at index ${i}. Recalculating.`);
92
+ wordCount = this.config.estimateWordCount([this.history[i]]);
93
+ }
94
+ runningCount += wordCount;
95
+ if (runningCount > wordLimit) {
96
+ sliceIndex = i + 1;
97
+ break;
98
+ }
99
+ }
100
+ this.history = this.history.slice(sliceIndex);
101
+ }
102
+ }
@@ -0,0 +1,5 @@
1
+ import type { ContextManagerConfig } from "./types.js";
2
+ /**
3
+ * Default configuration for the ContextManager.
4
+ */
5
+ export declare const defaultContextConfig: ContextManagerConfig;
@@ -0,0 +1,38 @@
1
+ import { formatHistoryToString } from "./utils.js";
2
+ /**
3
+ * Estimates the word count of a conversation history.
4
+ */
5
+ function estimateWordCount(history) {
6
+ if (!history || history.length === 0) {
7
+ return 0;
8
+ }
9
+ return history.reduce((acc, msg) => acc + (msg.content.trim().split(/\s+/).filter(word => word.length > 0).length || 0), 0);
10
+ }
11
+ /**
12
+ * Generates the default prompt for summarization.
13
+ */
14
+ function getDefaultSummarizationPrompt(history, wordLimit) {
15
+ const formattedHistory = formatHistoryToString(history);
16
+ return `
17
+ You are a context summarization AI. Your task is to condense the following conversation history for another AI assistant.
18
+ The summary must be a concise, third-person narrative that retains all critical information. Pay special attention to retaining key entities, technical details, decisions made, and any specific dates or times mentioned.
19
+ Ensure the summary flows logically and is ready to be used as context for the next turn in the conversation.
20
+ Please keep the summary under ${wordLimit} words.
21
+
22
+ Conversation History to Summarize:
23
+ ---
24
+ ${formattedHistory}
25
+ ---
26
+ `.trim();
27
+ }
28
+ /**
29
+ * Default configuration for the ContextManager.
30
+ */
31
+ export const defaultContextConfig = {
32
+ highWaterMarkWords: 3000,
33
+ lowWaterMarkWords: 800,
34
+ summarizationModel: "gemini-2.5-flash",
35
+ summarizationProvider: "googlevertex",
36
+ getSummarizationPrompt: getDefaultSummarizationPrompt,
37
+ estimateWordCount: estimateWordCount,
38
+ };
@@ -0,0 +1,20 @@
1
+ export interface ChatMessage {
2
+ /** Role of the message sender */
3
+ role: "user" | "assistant" | "system";
4
+ /** Content of the message */
5
+ content: string;
6
+ /** Cached word count for performance */
7
+ wordCount?: number;
8
+ }
9
+ /**
10
+ * Defines the configuration for the ContextManager.
11
+ * This allows for easy customization of the summarization behavior.
12
+ */
13
+ export interface ContextManagerConfig {
14
+ highWaterMarkWords: number;
15
+ lowWaterMarkWords: number;
16
+ summarizationModel: string;
17
+ summarizationProvider: string;
18
+ getSummarizationPrompt: (history: ChatMessage[], wordLimit: number) => string;
19
+ estimateWordCount: (history: ChatMessage[]) => number;
20
+ }
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,7 @@
1
+ import type { ChatMessage } from "./types.js";
2
+ /**
3
+ * Formats a chat history array into a single string for use in a prompt.
4
+ * @param history The array of ChatMessage objects.
5
+ * @returns A formatted string representing the conversation.
6
+ */
7
+ export declare function formatHistoryToString(history: ChatMessage[]): string;
@@ -0,0 +1,8 @@
1
+ /**
2
+ * Formats a chat history array into a single string for use in a prompt.
3
+ * @param history The array of ChatMessage objects.
4
+ * @returns A formatted string representing the conversation.
5
+ */
6
+ export function formatHistoryToString(history) {
7
+ return history.map(msg => `${msg.role}: ${msg.content}`).join('\n\n');
8
+ }
@@ -4,6 +4,7 @@ import { directAgentTools } from "../agent/directTools.js";
4
4
  import { getSafeMaxTokens } from "../utils/tokenLimits.js";
5
5
  import { createTimeoutController, TimeoutError } from "../utils/timeout.js";
6
6
  import { shouldDisableBuiltinTools } from "../utils/toolUtils.js";
7
+ import { buildMessagesArray } from "../utils/messageBuilder.js";
7
8
  /**
8
9
  * Validates if a result contains a valid toolsObject structure
9
10
  * @param result - The result object to validate
@@ -162,10 +163,11 @@ export class BaseProvider {
162
163
  logger.debug(`[BaseProvider.generate] Tools for ${this.providerName}: ${Object.keys(tools).join(", ")}`);
163
164
  // EVERY provider uses Vercel AI SDK - no exceptions
164
165
  const model = await this.getAISDKModel(); // This method is now REQUIRED
166
+ // Build proper message array with conversation history
167
+ const messages = buildMessagesArray(options);
165
168
  const result = await generateText({
166
169
  model,
167
- prompt: options.prompt || options.input?.text || "",
168
- system: options.systemPrompt,
170
+ messages: messages,
169
171
  tools,
170
172
  maxSteps: options.maxSteps || DEFAULT_MAX_STEPS,
171
173
  toolChoice: shouldUseTools ? "auto" : "none",
@@ -0,0 +1,41 @@
1
+ /**
2
+ * Conversation Memory Manager for NeuroLink
3
+ * Handles in-memory conversation storage, session management, and context injection
4
+ */
5
+ import type { ConversationMemoryConfig, ConversationMemoryStats, ChatMessage } from "../types/conversationTypes.js";
6
+ export declare class ConversationMemoryManager {
7
+ private sessions;
8
+ private config;
9
+ private isInitialized;
10
+ constructor(config: ConversationMemoryConfig);
11
+ /**
12
+ * Initialize the memory manager
13
+ */
14
+ initialize(): Promise<void>;
15
+ /**
16
+ * Store a conversation turn for a session
17
+ * ULTRA-OPTIMIZED: Direct ChatMessage[] storage with zero conversion overhead
18
+ */
19
+ storeConversationTurn(sessionId: string, userId: string | undefined, userMessage: string, aiResponse: string): Promise<void>;
20
+ /**
21
+ * Build context messages for AI prompt injection (ULTRA-OPTIMIZED)
22
+ * Returns pre-stored message array with zero conversion overhead
23
+ */
24
+ buildContextMessages(sessionId: string): ChatMessage[];
25
+ /**
26
+ * Get memory statistics (simplified for pure in-memory storage)
27
+ * ULTRA-OPTIMIZED: Calculate turns from message count (each turn = MESSAGES_PER_TURN messages)
28
+ */
29
+ getStats(): Promise<ConversationMemoryStats>;
30
+ /**
31
+ * Clear all conversations for a specific session
32
+ */
33
+ clearSession(sessionId: string): Promise<boolean>;
34
+ /**
35
+ * Clear all conversations (reset memory)
36
+ */
37
+ clearAllSessions(): Promise<void>;
38
+ private ensureInitialized;
39
+ private createNewSession;
40
+ private enforceSessionLimit;
41
+ }
@@ -0,0 +1,152 @@
1
+ /**
2
+ * Conversation Memory Manager for NeuroLink
3
+ * Handles in-memory conversation storage, session management, and context injection
4
+ */
5
+ import { ConversationMemoryError } from "../types/conversationTypes.js";
6
+ import { DEFAULT_MAX_TURNS_PER_SESSION, DEFAULT_MAX_SESSIONS, MESSAGES_PER_TURN } from "../config/conversationMemoryConfig.js";
7
+ import { logger } from "../utils/logger.js";
8
+ export class ConversationMemoryManager {
9
+ sessions = new Map();
10
+ config;
11
+ isInitialized = false;
12
+ constructor(config) {
13
+ // Trust that config is already complete from applyConversationMemoryDefaults()
14
+ this.config = config;
15
+ }
16
+ /**
17
+ * Initialize the memory manager
18
+ */
19
+ async initialize() {
20
+ if (this.isInitialized) {
21
+ return;
22
+ }
23
+ try {
24
+ this.isInitialized = true;
25
+ logger.info("ConversationMemoryManager initialized", {
26
+ storage: "in-memory",
27
+ maxSessions: this.config.maxSessions,
28
+ maxTurnsPerSession: this.config.maxTurnsPerSession,
29
+ });
30
+ }
31
+ catch (error) {
32
+ throw new ConversationMemoryError("Failed to initialize conversation memory", "CONFIG_ERROR", { error: error instanceof Error ? error.message : String(error) });
33
+ }
34
+ }
35
+ /**
36
+ * Store a conversation turn for a session
37
+ * ULTRA-OPTIMIZED: Direct ChatMessage[] storage with zero conversion overhead
38
+ */
39
+ async storeConversationTurn(sessionId, userId, userMessage, aiResponse) {
40
+ await this.ensureInitialized();
41
+ try {
42
+ // Get or create session
43
+ let session = this.sessions.get(sessionId);
44
+ if (!session) {
45
+ session = this.createNewSession(sessionId, userId);
46
+ this.sessions.set(sessionId, session);
47
+ }
48
+ // ULTRA-OPTIMIZED: Direct message storage - no intermediate objects
49
+ session.messages.push({ role: "user", content: userMessage }, { role: "assistant", content: aiResponse });
50
+ session.lastActivity = Date.now();
51
+ // Enforce per-session turn limit (each turn = MESSAGES_PER_TURN messages: user + assistant)
52
+ const maxMessages = (this.config.maxTurnsPerSession || DEFAULT_MAX_TURNS_PER_SESSION) * MESSAGES_PER_TURN;
53
+ if (session.messages.length > maxMessages) {
54
+ session.messages = session.messages.slice(-maxMessages);
55
+ }
56
+ // Enforce global session limit
57
+ this.enforceSessionLimit();
58
+ logger.debug("Conversation turn stored", {
59
+ sessionId,
60
+ messageCount: session.messages.length,
61
+ turnCount: session.messages.length / MESSAGES_PER_TURN, // Each turn = MESSAGES_PER_TURN messages
62
+ userMessageLength: userMessage.length,
63
+ aiResponseLength: aiResponse.length,
64
+ });
65
+ }
66
+ catch (error) {
67
+ throw new ConversationMemoryError(`Failed to store conversation turn for session ${sessionId}`, "STORAGE_ERROR", {
68
+ sessionId,
69
+ error: error instanceof Error ? error.message : String(error),
70
+ });
71
+ }
72
+ }
73
+ /**
74
+ * Build context messages for AI prompt injection (ULTRA-OPTIMIZED)
75
+ * Returns pre-stored message array with zero conversion overhead
76
+ */
77
+ buildContextMessages(sessionId) {
78
+ const session = this.sessions.get(sessionId);
79
+ if (!session || session.messages.length === 0) {
80
+ return [];
81
+ }
82
+ // ULTRA-OPTIMIZED: Direct return - no processing needed!
83
+ return session.messages;
84
+ }
85
+ /**
86
+ * Get memory statistics (simplified for pure in-memory storage)
87
+ * ULTRA-OPTIMIZED: Calculate turns from message count (each turn = MESSAGES_PER_TURN messages)
88
+ */
89
+ async getStats() {
90
+ await this.ensureInitialized();
91
+ const sessions = Array.from(this.sessions.values());
92
+ const totalTurns = sessions.reduce((sum, session) => sum + session.messages.length / MESSAGES_PER_TURN, 0);
93
+ return {
94
+ totalSessions: sessions.length,
95
+ totalTurns,
96
+ };
97
+ }
98
+ /**
99
+ * Clear all conversations for a specific session
100
+ */
101
+ async clearSession(sessionId) {
102
+ const session = this.sessions.get(sessionId);
103
+ if (!session) {
104
+ return false;
105
+ }
106
+ // Remove from memory
107
+ this.sessions.delete(sessionId);
108
+ logger.info("Session cleared", { sessionId });
109
+ return true;
110
+ }
111
+ /**
112
+ * Clear all conversations (reset memory)
113
+ */
114
+ async clearAllSessions() {
115
+ const sessionIds = Array.from(this.sessions.keys());
116
+ // Clear memory
117
+ this.sessions.clear();
118
+ logger.info("All sessions cleared", { clearedCount: sessionIds.length });
119
+ }
120
+ // Private methods
121
+ async ensureInitialized() {
122
+ if (!this.isInitialized) {
123
+ await this.initialize();
124
+ }
125
+ }
126
+ createNewSession(sessionId, userId) {
127
+ return {
128
+ sessionId,
129
+ userId,
130
+ messages: [],
131
+ createdAt: Date.now(),
132
+ lastActivity: Date.now(),
133
+ };
134
+ }
135
+ enforceSessionLimit() {
136
+ const maxSessions = this.config.maxSessions || DEFAULT_MAX_SESSIONS;
137
+ if (this.sessions.size <= maxSessions) {
138
+ return;
139
+ }
140
+ // Sort sessions by last activity (oldest first)
141
+ const sessions = Array.from(this.sessions.entries()).sort(([, a], [, b]) => a.lastActivity - b.lastActivity);
142
+ // Remove oldest sessions
143
+ const sessionsToRemove = sessions.slice(0, sessions.length - maxSessions);
144
+ for (const [sessionId] of sessionsToRemove) {
145
+ this.sessions.delete(sessionId);
146
+ }
147
+ logger.debug("Session limit enforced", {
148
+ removedSessions: sessionsToRemove.length,
149
+ remainingSessions: this.sessions.size,
150
+ });
151
+ }
152
+ }
@@ -3,6 +3,7 @@ import type { Schema, Tool } from "ai";
3
3
  import type { GenerateResult } from "../types/generateTypes.js";
4
4
  import type { StreamOptions, StreamResult } from "../types/streamTypes.js";
5
5
  import type { JsonValue } from "../types/common.js";
6
+ import type { ChatMessage } from "../types/conversationTypes.js";
6
7
  export interface TextGenerationResult {
7
8
  content: string;
8
9
  provider?: string;
@@ -150,6 +151,7 @@ export interface TextGenerationOptions {
150
151
  role: string;
151
152
  content: string;
152
153
  }>;
154
+ conversationMessages?: ChatMessage[];
153
155
  }
154
156
  /**
155
157
  * Analytics data for usage tracking
@@ -11,6 +11,7 @@ import type { StreamOptions, StreamResult } from "./types/streamTypes.js";
11
11
  import type { SimpleTool } from "./sdk/toolRegistration.js";
12
12
  import type { InMemoryMCPServerConfig } from "./types/mcpTypes.js";
13
13
  import { EventEmitter } from "events";
14
+ import type { ConversationMemoryConfig } from "./types/conversationTypes.js";
14
15
  export interface ProviderStatus {
15
16
  provider: string;
16
17
  status: "working" | "failed" | "not-configured";
@@ -40,9 +41,11 @@ export interface MCPServerInfo {
40
41
  hasServer: boolean;
41
42
  metadata?: unknown;
42
43
  }
44
+ import type { ContextManagerConfig } from "./context/types.js";
43
45
  export declare class NeuroLink {
44
46
  private mcpInitialized;
45
47
  private emitter;
48
+ private contextManager;
46
49
  private customTools;
47
50
  private inMemoryServers;
48
51
  private toolCircuitBreakers;
@@ -55,7 +58,10 @@ export declare class NeuroLink {
55
58
  * @param success - Whether the tool execution was successful
56
59
  */
57
60
  private emitToolEndEvent;
58
- constructor();
61
+ private conversationMemory?;
62
+ constructor(config?: {
63
+ conversationMemory?: Partial<ConversationMemoryConfig>;
64
+ });
59
65
  /**
60
66
  * Initialize MCP registry with enhanced error handling and resource cleanup
61
67
  * Uses isolated async context to prevent hanging
@@ -65,6 +71,21 @@ export declare class NeuroLink {
65
71
  * MAIN ENTRY POINT: Enhanced generate method with new function signature
66
72
  * Replaces both generateText and legacy methods
67
73
  */
74
+ /**
75
+ * Extracts the original prompt text from the provided input.
76
+ * If a string is provided, it returns the string directly.
77
+ * If a GenerateOptions object is provided, it returns the input text from the object.
78
+ * @param optionsOrPrompt The prompt input, either as a string or a GenerateOptions object.
79
+ * @returns The original prompt text as a string.
80
+ */
81
+ private _extractOriginalPrompt;
82
+ /**
83
+ * Enables automatic context summarization for the NeuroLink instance.
84
+ * Once enabled, the instance will maintain conversation history and
85
+ * automatically summarize it when it exceeds token limits.
86
+ * @param config Optional configuration to override default summarization settings.
87
+ */
88
+ enableContextSummarization(config?: Partial<ContextManagerConfig>): void;
68
89
  generate(optionsOrPrompt: GenerateOptions | string): Promise<GenerateResult>;
69
90
  /**
70
91
  * BACKWARD COMPATIBILITY: Legacy generateText method
@@ -75,9 +96,11 @@ export declare class NeuroLink {
75
96
  * REDESIGNED INTERNAL GENERATION - NO CIRCULAR DEPENDENCIES
76
97
  *
77
98
  * This method implements a clean fallback chain:
78
- * 1. Try MCP-enhanced generation if available
79
- * 2. Fall back to direct provider generation
80
- * 3. No recursive calls - each method has a specific purpose
99
+ * 1. Initialize conversation memory if enabled
100
+ * 2. Inject conversation history into prompt
101
+ * 3. Try MCP-enhanced generation if available
102
+ * 4. Fall back to direct provider generation
103
+ * 5. Store conversation turn for future context
81
104
  */
82
105
  private generateTextInternal;
83
106
  /**
@@ -367,6 +390,18 @@ export declare class NeuroLink {
367
390
  recommendations: string[];
368
391
  }>;
369
392
  };
393
+ /**
394
+ * Get conversation memory statistics (public API)
395
+ */
396
+ getConversationStats(): Promise<import("./types/conversationTypes.js").ConversationMemoryStats>;
397
+ /**
398
+ * Clear conversation history for a specific session (public API)
399
+ */
400
+ clearConversationSession(sessionId: string): Promise<boolean>;
401
+ /**
402
+ * Clear all conversation history (public API)
403
+ */
404
+ clearAllConversations(): Promise<void>;
370
405
  }
371
406
  export declare const neurolink: NeuroLink;
372
407
  export default neurolink;