node-red-contrib-ai-agent 0.0.5 → 0.0.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -71,8 +71,14 @@ A configuration node that initializes the conversation context in memory. The ag
71
71
  A configuration node that initializes the conversation context with file-based persistence. The agent node uses this configuration to manage the conversation context across restarts.
72
72
 
73
73
  **Properties:**
74
- - **Max Items**: Maximum number of conversation turns to keep
75
- - **File Path**: Path to store the conversation history
74
+ - **Filename**: Path to store the memory file (relative to Node-RED user directory)
75
+ - **Max Conversations**: Maximum number of conversations to store
76
+ - **Max Messages Per Conversation**: Maximum messages per conversation history
77
+ - **Backups**: Enable/disable automatic backups
78
+ - **Backup Count**: Number of backups to keep
79
+ - **Consolidation**: Threshold of messages to trigger auto-summarization
80
+ - **Long-Term Memory**: Enable/disable vector-based storage
81
+ - **Embedding Model**: The model used for semantic embeddings (e.g., text-embedding-ada-002)
76
82
  - **Name**: Display name for the node
77
83
 
78
84
  ### AI Model
@@ -187,10 +193,24 @@ This allows for complex conversation flows where different agents handle differe
187
193
 
188
194
  ## Advanced Features
189
195
 
190
- - **Tool Integration**: Extend functionality with custom tools (Function and HTTP)
191
- - **Context Management**: Maintain conversation history
192
- - **Flexible Configuration**: Customize model parameters and behavior
193
- - **Template Variables**: Use dynamic values in HTTP requests
196
+ ### 1. Vector Storage (Long-Term Memory)
197
+ The `AI Memory (File)` node supports vector-based storage. When enabled, it can store embeddings of summaries or key information. This allows for **semantic search** using the `query` command.
198
+
199
+ ### 2. Memory Consolidation
200
+ Automatically (or manually) summarize conversation threads to save space and maintain long-term context. After a threshold of messages is reached, the node can use an AI model to summarize the history and store it in the vector database.
201
+
202
+ ### 3. Memory Commands
203
+ Memory nodes support the following commands via `msg.command`:
204
+ - **add**: Add a message to a conversation (`msg.message` required).
205
+ - **get**: Retrieve messages for a conversation (`msg.conversationId` optional).
206
+ - **search**: Plain-text search across conversations (`msg.query` required).
207
+ - **query**: Semantic (vector) search in long-term memory (`msg.query` text or vector required).
208
+ - **consolidate**: Manually trigger summarization and long-term storage.
209
+ - **clear**: Clear short-term, long-term, or all memory.
210
+ - **delete**: Delete a specific conversation (`msg.conversationId` required).
211
+
212
+ ### 4. Template Variables
213
+ Use dynamic values in HTTP requests via `${input.property}` syntax.
194
214
 
195
215
  ## Contributing
196
216
 
@@ -5,30 +5,46 @@
5
5
  color: '#a6bbcf',
6
6
  defaults: {
7
7
  name: { value: "" },
8
- filename: {
8
+ filename: {
9
9
  value: "ai-memories.json",
10
10
  required: true,
11
- validate: function(v) {
11
+ validate: function (v) {
12
12
  return v.length > 0;
13
13
  }
14
- }
14
+ },
15
+ maxConversations: { value: 50, validate: RED.validators.number() },
16
+ maxMessagesPerConversation: { value: 100, validate: RED.validators.number() },
17
+ backupEnabled: { value: true },
18
+ backupCount: { value: 3, validate: RED.validators.number() },
19
+ vectorEnabled: { value: false },
20
+ embeddingModel: { value: "text-embedding-ada-002" },
21
+ consolidationThreshold: { value: 10, validate: RED.validators.number() }
15
22
  },
23
+
16
24
  inputs: 1,
17
25
  outputs: 1,
18
26
  icon: "file.png",
19
- label: function() {
27
+ label: function () {
20
28
  return this.name || "AI Memory (File)";
21
29
  },
22
- labelStyle: function() {
30
+ labelStyle: function () {
23
31
  return this.name ? "node_label_italic" : "";
24
32
  },
25
- oneditprepare: function() {
26
- // Initialize any UI components here
33
+ oneditprepare: function () {
34
+ $("#node-input-vectorEnabled").on("change", function () {
35
+ if ($(this).is(":checked")) {
36
+ $(".vector-row").show();
37
+ } else {
38
+ $(".vector-row").hide();
39
+ }
40
+ });
41
+ $("#node-input-vectorEnabled").trigger("change");
27
42
  },
28
- oneditsave: function() {
43
+
44
+ oneditsave: function () {
29
45
  // Handle save if needed
30
46
  },
31
- oneditcancel: function() {
47
+ oneditcancel: function () {
32
48
  // Cleanup if needed
33
49
  }
34
50
  });
@@ -43,6 +59,49 @@
43
59
  <label for="node-input-filename"><i class="fa fa-file"></i> Filename</label>
44
60
  <input type="text" id="node-input-filename" placeholder="ai-memories.json">
45
61
  </div>
62
+ <div class="form-row">
63
+ <label for="node-input-maxConversations"><i class="fa fa-list"></i> Max Conversations</label>
64
+ <input type="number" id="node-input-maxConversations" placeholder="50">
65
+ </div>
66
+ <div class="form-row">
67
+ <label for="node-input-maxMessagesPerConversation"><i class="fa fa-commenting"></i> Max Messages/Conv</label>
68
+ <input type="number" id="node-input-maxMessagesPerConversation" placeholder="100">
69
+ </div>
70
+ <div class="form-row">
71
+ <label for="node-input-backupEnabled"><i class="fa fa-shield"></i> Backups</label>
72
+ <input type="checkbox" id="node-input-backupEnabled" style="display:inline-block; width:auto; vertical-align:top;">
73
+ <label for="node-input-backupEnabled" style="width: auto;">Enable automatic backups</label>
74
+ </div>
75
+ <div class="form-row" id="backupCount-row">
76
+ <label for="node-input-backupCount"><i class="fa fa-history"></i> Backup Count</label>
77
+ <input type="number" id="node-input-backupCount" placeholder="3">
78
+ </div>
79
+
80
+ <hr>
81
+ <h4>AI Context & Consolidation</h4>
82
+ <div class="form-row">
83
+ <label for="node-input-consolidationThreshold"><i class="fa fa-compress"></i> Consolidation</label>
84
+ <input type="number" id="node-input-consolidationThreshold" placeholder="10">
85
+ <div style="margin-left: 105px; font-size: 0.8em; color: #666;">
86
+ Threshold of messages to trigger auto-consolidation.
87
+ </div>
88
+ </div>
89
+
90
+ <div class="form-row">
91
+ <label for="node-input-vectorEnabled"><i class="fa fa-cube"></i> Long-Term</label>
92
+ <input type="checkbox" id="node-input-vectorEnabled" style="display:inline-block; width:auto; vertical-align:top;">
93
+ <label for="node-input-vectorEnabled" style="width: auto;">Enable Long-Term Vector Memory</label>
94
+ </div>
95
+
96
+ <div class="form-row vector-row">
97
+ <label for="node-input-embeddingModel"><i class="fa fa-braille"></i> Embedding</label>
98
+ <select id="node-input-embeddingModel">
99
+ <option value="text-embedding-ada-002">OpenAI Ada 002</option>
100
+ <option value="text-embedding-3-small">OpenAI 3 Small</option>
101
+ <option value="text-embedding-3-large">OpenAI 3 Large</option>
102
+ </select>
103
+ </div>
104
+
46
105
  <div class="form-tips">
47
106
  <p>Memories will be stored in Node-RED's user directory.</p>
48
107
  </div>
@@ -60,4 +119,4 @@
60
119
  <dt>payload <span>object|string</span></dt>
61
120
  <dd>The processed message with memory operations applied.</dd>
62
121
  </dl>
63
- </script>
122
+ </script>
@@ -1,55 +1,608 @@
1
- module.exports = function(RED) {
1
+ const fs = require('fs');
2
+ const path = require('path');
3
+ const axios = require('axios');
4
+
5
+ class VectorStorage {
6
+ constructor(options = {}) {
7
+ this.vectors = [];
8
+ this.metadata = [];
9
+ this.dimensions = options.dimensions || 1536;
10
+ }
11
+
12
+ addItem(text, vector, metadata = {}) {
13
+ const id = Math.random().toString(36).substring(7);
14
+ this.vectors.push({ id, vector, text });
15
+ this.metadata.push({ id, ...metadata, timestamp: new Date().toISOString() });
16
+ return id;
17
+ }
18
+
19
+ search(queryVector, limit = 5) {
20
+ if (!queryVector || this.vectors.length === 0) return [];
21
+
22
+ const results = this.vectors.map((item, index) => {
23
+ return {
24
+ id: item.id,
25
+ text: item.text,
26
+ similarity: this.calculateSimilarity(queryVector, item.vector),
27
+ metadata: this.metadata[index]
28
+ };
29
+ });
30
+
31
+ return results
32
+ .sort((a, b) => b.similarity - a.similarity)
33
+ .slice(0, limit);
34
+ }
35
+
36
+ calculateSimilarity(vec1, vec2) {
37
+ if (vec1.length !== vec2.length) return 0;
38
+ let dotProduct = 0;
39
+ let normA = 0;
40
+ let normB = 0;
41
+ for (let i = 0; i < vec1.length; i++) {
42
+ dotProduct += vec1[i] * vec2[i];
43
+ normA += vec1[i] * vec1[i];
44
+ normB += vec2[i] * vec2[i];
45
+ }
46
+ return dotProduct / (Math.sqrt(normA) * Math.sqrt(normB));
47
+ }
48
+
49
+ toJSON() {
50
+ return {
51
+ vectors: this.vectors,
52
+ metadata: this.metadata,
53
+ dimensions: this.dimensions
54
+ };
55
+ }
56
+
57
+ fromJSON(data) {
58
+ if (data) {
59
+ this.vectors = data.vectors || [];
60
+ this.metadata = data.metadata || [];
61
+ this.dimensions = data.dimensions || 1536;
62
+ }
63
+ }
64
+ }
65
+
66
+
67
+ class SimpleFileStorage {
68
+ constructor(options = {}) {
69
+ this.filePath = options.filePath;
70
+ this.backupEnabled = options.backupEnabled !== false;
71
+ this.backupCount = options.backupCount || 3;
72
+ }
73
+
74
+ async save(data) {
75
+ try {
76
+ const dir = path.dirname(this.filePath);
77
+ if (!fs.existsSync(dir)) {
78
+ fs.mkdirSync(dir, { recursive: true });
79
+ }
80
+
81
+ data.metadata = data.metadata || {};
82
+ data.metadata.lastUpdated = new Date().toISOString();
83
+
84
+ await fs.promises.writeFile(
85
+ this.filePath,
86
+ JSON.stringify(data, null, 2)
87
+ );
88
+
89
+ if (this.backupEnabled) {
90
+ await this.createBackup();
91
+ }
92
+
93
+ return true;
94
+ } catch (error) {
95
+ return false;
96
+ }
97
+ }
98
+
99
+ loadSync() {
100
+ try {
101
+ if (fs.existsSync(this.filePath)) {
102
+ const data = fs.readFileSync(this.filePath, 'utf8');
103
+ return JSON.parse(data);
104
+ }
105
+ return null;
106
+ } catch (error) {
107
+ // Backup recovery is still async, but for initial load sync is safer
108
+ return null;
109
+ }
110
+ }
111
+
112
+ async load() {
113
+ try {
114
+ if (fs.existsSync(this.filePath)) {
115
+ const data = await fs.promises.readFile(this.filePath, 'utf8');
116
+ return JSON.parse(data);
117
+ }
118
+ return null;
119
+ } catch (error) {
120
+ return await this.recoverFromBackup();
121
+ }
122
+ }
123
+
124
+ async createBackup() {
125
+ try {
126
+ const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
127
+ const backupPath = `${this.filePath}.${timestamp}.bak`;
128
+
129
+ await fs.promises.copyFile(this.filePath, backupPath);
130
+
131
+ const backups = await this.listBackups();
132
+ if (backups.length > this.backupCount) {
133
+ const oldestBackups = backups
134
+ .sort((a, b) => a.time - b.time)
135
+ .slice(0, backups.length - this.backupCount);
136
+
137
+ for (const backup of oldestBackups) {
138
+ await fs.promises.unlink(backup.path);
139
+ }
140
+ }
141
+
142
+ return true;
143
+ } catch (error) {
144
+ return false;
145
+ }
146
+ }
147
+
148
+ async listBackups() {
149
+ try {
150
+ const dir = path.dirname(this.filePath);
151
+ const base = path.basename(this.filePath);
152
+
153
+ const files = await fs.promises.readdir(dir);
154
+
155
+ return files
156
+ .filter(file => file.startsWith(`${base}.`) && file.endsWith('.bak'))
157
+ .map(file => {
158
+ const match = file.match(/\.(\d{4}-\d{2}-\d{2}T\d{2}-\d{2}-\d{2}-\d{3}Z)\.bak$/);
159
+ const timestamp = match ? match[1].replace(/-/g, ':').replace(/-(\d{3})Z$/, '.$1Z') : null;
160
+
161
+ return {
162
+ path: path.join(dir, file),
163
+ time: timestamp ? new Date(timestamp).getTime() : 0
164
+ };
165
+ });
166
+ } catch (error) {
167
+ return [];
168
+ }
169
+ }
170
+
171
+ async recoverFromBackup() {
172
+ try {
173
+ const backups = await this.listBackups();
174
+
175
+ if (backups.length === 0) {
176
+ return null;
177
+ }
178
+
179
+ const latestBackup = backups.sort((a, b) => b.time - a.time)[0];
180
+ const data = await fs.promises.readFile(latestBackup.path, 'utf8');
181
+ return JSON.parse(data);
182
+ } catch (error) {
183
+ return null;
184
+ }
185
+ }
186
+ }
187
+
188
+ class SimpleMemoryManager {
189
+ constructor(options = {}) {
190
+ this.maxConversations = options.maxConversations || 50;
191
+ this.maxMessagesPerConversation = options.maxMessagesPerConversation || 100;
192
+ this.conversations = [];
193
+ this.longTerm = new VectorStorage();
194
+ }
195
+
196
+
197
+ addMessage(conversationId, message) {
198
+ let conversation = this.conversations.find(c => c.id === conversationId);
199
+
200
+ if (!conversation) {
201
+ conversation = {
202
+ id: conversationId,
203
+ messages: [],
204
+ createdAt: new Date().toISOString(),
205
+ updatedAt: new Date().toISOString()
206
+ };
207
+
208
+ this.conversations.push(conversation);
209
+
210
+ if (this.conversations.length > this.maxConversations) {
211
+ this.conversations = this.conversations
212
+ .sort((a, b) => new Date(b.updatedAt) - new Date(a.updatedAt))
213
+ .slice(0, this.maxConversations);
214
+ }
215
+ }
216
+
217
+ conversation.messages.push({
218
+ ...message,
219
+ timestamp: new Date().toISOString()
220
+ });
221
+
222
+ conversation.updatedAt = new Date().toISOString();
223
+
224
+ if (conversation.messages.length > this.maxMessagesPerConversation) {
225
+ conversation.messages = conversation.messages.slice(-this.maxMessagesPerConversation);
226
+ }
227
+
228
+ return conversation;
229
+ }
230
+
231
+ getConversation(conversationId) {
232
+ return this.conversations.find(c => c.id === conversationId) || null;
233
+ }
234
+
235
+ getConversationMessages(conversationId, limit = null) {
236
+ const conversation = this.getConversation(conversationId);
237
+
238
+ if (!conversation) {
239
+ return [];
240
+ }
241
+
242
+ const messages = conversation.messages;
243
+
244
+ if (limit && messages.length > limit) {
245
+ return messages.slice(-limit);
246
+ }
247
+
248
+ return messages;
249
+ }
250
+
251
+ searchConversations(query, options = {}) {
252
+ const results = [];
253
+
254
+ for (const conversation of this.conversations) {
255
+ const matchingMessages = conversation.messages.filter(message =>
256
+ message.content && message.content.toLowerCase().includes(query.toLowerCase())
257
+ );
258
+
259
+ if (matchingMessages.length > 0) {
260
+ results.push({
261
+ conversation,
262
+ matchingMessages: options.includeMessages ? matchingMessages : matchingMessages.length
263
+ });
264
+ }
265
+ }
266
+
267
+ return results.sort((a, b) =>
268
+ new Date(b.conversation.updatedAt) - new Date(a.conversation.updatedAt)
269
+ );
270
+ }
271
+
272
+ deleteConversation(conversationId) {
273
+ const index = this.conversations.findIndex(c => c.id === conversationId);
274
+
275
+ if (index !== -1) {
276
+ this.conversations.splice(index, 1);
277
+ return true;
278
+ }
279
+
280
+ return false;
281
+ }
282
+
283
+ clearAllConversations() {
284
+ this.conversations = [];
285
+ return true;
286
+ }
287
+
288
+ async consolidate(node, msg, aiConfig) {
289
+ if (!msg.conversationId) return { success: false, error: "No conversationId" };
290
+ const conversation = this.getConversation(msg.conversationId);
291
+ if (!conversation || conversation.messages.length < 2) return { success: false, error: "Not enough messages to consolidate" };
292
+
293
+ const textToSummarize = conversation.messages.map(m => `${m.role}: ${m.content}`).join('\n');
294
+
295
+ try {
296
+ const prompt = `Summarize the following conversation for long-term memory storage. Focus on key facts, decisions, and preferences. Keep it concise:\n\n${textToSummarize}`;
297
+
298
+ const response = await axios.post(
299
+ 'https://openrouter.ai/api/v1/chat/completions',
300
+ {
301
+ model: aiConfig.model,
302
+ messages: [{ role: 'system', content: 'You are a memory consolidation assistant.' }, { role: 'user', content: prompt }]
303
+ },
304
+ {
305
+ headers: {
306
+ 'Authorization': `Bearer ${aiConfig.apiKey}`,
307
+ 'Content-Type': 'application/json'
308
+ }
309
+ }
310
+ );
311
+
312
+ const summary = response.data.choices[0]?.message?.content?.trim();
313
+ if (summary) {
314
+ // Generate embedding for the summary
315
+ const embeddingResponse = await axios.post(
316
+ 'https://openrouter.ai/api/v1/embeddings',
317
+ {
318
+ model: 'text-embedding-ada-002', // Default embedding model
319
+ input: summary
320
+ },
321
+ {
322
+ headers: {
323
+ 'Authorization': `Bearer ${aiConfig.apiKey}`,
324
+ 'Content-Type': 'application/json'
325
+ }
326
+ }
327
+ );
328
+
329
+ const vector = embeddingResponse.data.data[0].embedding;
330
+ this.longTerm.addItem(summary, vector, {
331
+ conversationId: msg.conversationId,
332
+ type: 'summary',
333
+ originalMessageCount: conversation.messages.length
334
+ });
335
+
336
+ return { success: true, summary };
337
+ }
338
+ } catch (error) {
339
+ node.error("Consolidation error: " + error.message);
340
+ return { success: false, error: error.message };
341
+ }
342
+ }
343
+
344
+ toJSON() {
345
+ return {
346
+ conversations: this.conversations,
347
+ longTerm: this.longTerm.toJSON(),
348
+ metadata: {
349
+ version: '1.1',
350
+ lastUpdated: new Date().toISOString(),
351
+ stats: {
352
+ conversationCount: this.conversations.length,
353
+ longTermItemCount: this.longTerm.vectors.length,
354
+ messageCount: this.conversations.reduce((count, conv) => count + conv.messages.length, 0)
355
+ }
356
+ }
357
+ };
358
+ }
359
+
360
+ fromJSON(data) {
361
+ if (data) {
362
+ this.conversations = data.conversations || [];
363
+ if (data.longTerm) {
364
+ this.longTerm.fromJSON(data.longTerm);
365
+ }
366
+ } else {
367
+ this.conversations = [];
368
+ }
369
+ }
370
+ }
371
+
372
+
373
+ module.exports = function (RED) {
2
374
  'use strict';
3
375
 
4
376
  function MemoryFileNode(config) {
5
377
  RED.nodes.createNode(this, config);
6
378
  const node = this;
7
-
379
+
8
380
  // Configuration
9
381
  node.name = config.name || 'AI Memory (File)';
10
382
  node.filename = config.filename || 'ai-memories.json';
11
-
12
- // Initialize empty memories array
13
- node.memories = [];
14
-
15
- // Load existing memories from file if they exist
16
- const fs = require('fs');
17
- const path = require('path');
18
- const filePath = path.join(RED.settings.userDir, node.filename);
19
-
20
- try {
21
- if (fs.existsSync(filePath)) {
22
- const data = fs.readFileSync(filePath, 'utf8');
23
- node.memories = JSON.parse(data);
24
- node.status({fill:"green",shape:"dot",text:"Ready"});
383
+ node.maxConversations = parseInt(config.maxConversations) || 50;
384
+ node.maxMessagesPerConversation = parseInt(config.maxMessagesPerConversation) || 100;
385
+ node.backupEnabled = config.backupEnabled !== false;
386
+ node.backupCount = parseInt(config.backupCount) || 3;
387
+ node.vectorEnabled = config.vectorEnabled === true;
388
+ node.embeddingModel = config.embeddingModel || 'text-embedding-ada-002';
389
+ node.consolidationThreshold = parseInt(config.consolidationThreshold) || 10;
390
+
391
+
392
+ const userDir = (RED.settings && RED.settings.userDir) || process.cwd();
393
+ const filePath = path.join(userDir, node.filename);
394
+
395
+ // Create storage and memory manager
396
+ node.fileStorage = new SimpleFileStorage({
397
+ filePath,
398
+ backupEnabled: node.backupEnabled,
399
+ backupCount: node.backupCount
400
+ });
401
+
402
+ node.memoryManager = new SimpleMemoryManager({
403
+ maxConversations: node.maxConversations,
404
+ maxMessagesPerConversation: node.maxMessagesPerConversation
405
+ });
406
+
407
+ // Load existing memories synchronously at startup
408
+ try {
409
+ const data = node.fileStorage.loadSync();
410
+ if (data) {
411
+ node.memoryManager.fromJSON(data);
412
+ node.status({
413
+ fill: "green",
414
+ shape: "dot",
415
+ text: `${node.memoryManager.conversations.length} conversations`
416
+ });
25
417
  } else {
26
- node.status({fill:"blue",shape:"ring",text:"New file will be created"});
418
+ node.status({ fill: "blue", shape: "ring", text: "New memory file will be created" });
27
419
  }
28
420
  } catch (err) {
29
421
  node.error("Error loading memory file: " + err.message);
30
- node.status({fill:"red",shape:"ring",text:"Error loading"});
422
+ node.status({ fill: "red", shape: "ring", text: "Error loading" });
31
423
  }
32
424
 
33
425
  // Handle incoming messages
34
- node.on('input', function(msg) {
426
+ node.on('input', async function (msg, send, done) {
427
+ // Use send and done for Node-RED 1.0+ compatibility
428
+ send = send || function () { node.send.apply(node, arguments) };
429
+
35
430
  try {
36
- // For now, just pass through the message
37
- // We'll add memory operations in the next iteration
38
- node.send(msg);
39
-
40
- // Update status
41
- node.status({fill:"green",shape:"dot",text:node.memories.length + " memories"});
431
+ msg.aimemory = msg.aimemory || {};
432
+
433
+ if (msg.command) {
434
+ await processCommand(node, msg);
435
+ } else {
436
+ const conversationId = msg.conversationId || 'default';
437
+ const messages = node.memoryManager.getConversationMessages(conversationId);
438
+
439
+ // Auto-consolidate if threshold reached
440
+ if (messages.length >= node.consolidationThreshold && msg.aiagent) {
441
+ node.memoryManager.consolidate(node, msg, msg.aiagent);
442
+ }
443
+
444
+ msg.aimemory = {
445
+ type: 'file',
446
+ conversationId,
447
+ context: messages,
448
+ longTermEnabled: node.vectorEnabled
449
+ };
450
+ }
451
+
452
+ send(msg);
453
+
454
+ node.status({
455
+ fill: "green",
456
+ shape: "dot",
457
+ text: `${node.memoryManager.conversations.length} convs, ${node.memoryManager.longTerm.vectors.length} long-term`
458
+ });
459
+
460
+ if (done) done();
42
461
  } catch (err) {
462
+
43
463
  node.error("Error in memory node: " + err.message, msg);
44
- node.status({fill:"red",shape:"ring",text:"Error"});
464
+ node.status({ fill: "red", shape: "ring", text: "Error" });
465
+ if (done) done(err);
45
466
  }
46
467
  });
47
468
 
48
- // Cleanup on node removal
49
- node.on('close', function() {
50
- // Save memories to file
469
+ async function processCommand(node, msg) {
470
+ const command = msg.command;
471
+
472
+ switch (command) {
473
+ case 'add':
474
+ if (!msg.message) {
475
+ throw new Error('No message content provided');
476
+ }
477
+
478
+ const conversationId = msg.conversationId || 'default';
479
+ const conversation = node.memoryManager.addMessage(conversationId, msg.message);
480
+
481
+ msg.result = {
482
+ success: true,
483
+ operation: 'add',
484
+ conversationId,
485
+ messageCount: conversation.messages.length
486
+ };
487
+
488
+ await node.fileStorage.save(node.memoryManager.toJSON());
489
+ break;
490
+
491
+ case 'get':
492
+ const getConversationId = msg.conversationId || 'default';
493
+ const limit = msg.limit || null;
494
+
495
+ msg.result = {
496
+ success: true,
497
+ operation: 'get',
498
+ conversationId: getConversationId,
499
+ messages: node.memoryManager.getConversationMessages(getConversationId, limit)
500
+ };
501
+ break;
502
+
503
+ case 'search':
504
+ if (!msg.query) {
505
+ throw new Error('No search query provided');
506
+ }
507
+
508
+ msg.result = {
509
+ success: true,
510
+ operation: 'search',
511
+ query: msg.query,
512
+ results: node.memoryManager.searchConversations(msg.query, {
513
+ includeMessages: msg.includeMessages !== false
514
+ })
515
+ };
516
+ break;
517
+
518
+ case 'delete':
519
+ if (!msg.conversationId) {
520
+ throw new Error('No conversation ID provided');
521
+ }
522
+
523
+ const deleted = node.memoryManager.deleteConversation(msg.conversationId);
524
+
525
+ msg.result = {
526
+ success: deleted,
527
+ operation: 'delete',
528
+ conversationId: msg.conversationId
529
+ };
530
+
531
+ if (deleted) {
532
+ await node.fileStorage.save(node.memoryManager.toJSON());
533
+ }
534
+ break;
535
+
536
+ case 'clear':
537
+ node.memoryManager.clearAllConversations();
538
+ node.memoryManager.longTerm = new VectorStorage();
539
+
540
+ msg.result = {
541
+ success: true,
542
+ operation: 'clear'
543
+ };
544
+
545
+ await node.fileStorage.save(node.memoryManager.toJSON());
546
+ break;
547
+
548
+ case 'consolidate':
549
+ if (!msg.aiagent) {
550
+ throw new Error('AI Agent configuration (msg.aiagent) required for consolidation');
551
+ }
552
+ msg.result = await node.memoryManager.consolidate(node, msg, msg.aiagent);
553
+ await node.fileStorage.save(node.memoryManager.toJSON());
554
+ break;
555
+
556
+ case 'query':
557
+ if (!node.vectorEnabled) {
558
+ throw new Error('Vector storage not enabled for this node');
559
+ }
560
+ if (!msg.query) {
561
+ throw new Error('No query text/vector provided');
562
+ }
563
+ if (!msg.aiagent) {
564
+ throw new Error('AI Agent configuration (msg.aiagent) required for semantic search');
565
+ }
566
+
567
+ try {
568
+ // Generate embedding for query if it's text
569
+ let queryVector = msg.query;
570
+ if (typeof msg.query === 'string') {
571
+ const embeddingResponse = await axios.post(
572
+ 'https://openrouter.ai/api/v1/embeddings',
573
+ {
574
+ model: node.embeddingModel,
575
+ input: msg.query
576
+ },
577
+ {
578
+ headers: {
579
+ 'Authorization': `Bearer ${msg.aiagent.apiKey}`,
580
+ 'Content-Type': 'application/json'
581
+ }
582
+ }
583
+ );
584
+ queryVector = embeddingResponse.data.data[0].embedding;
585
+ }
586
+
587
+ msg.result = {
588
+ success: true,
589
+ operation: 'query',
590
+ results: node.memoryManager.longTerm.search(queryVector, msg.limit || 5)
591
+ };
592
+ } catch (error) {
593
+ throw new Error("Semantic search error: " + error.message);
594
+ }
595
+ break;
596
+
597
+ default:
598
+ throw new Error(`Unknown command: ${command}`);
599
+ }
600
+ }
601
+
602
+
603
+ node.on('close', async function () {
51
604
  try {
52
- fs.writeFileSync(filePath, JSON.stringify(node.memories, null, 2));
605
+ await node.fileStorage.save(node.memoryManager.toJSON());
53
606
  } catch (err) {
54
607
  node.error("Error saving memory file: " + err.message);
55
608
  }
@@ -57,6 +610,5 @@ module.exports = function(RED) {
57
610
  });
58
611
  }
59
612
 
60
- // Register the node type
61
613
  RED.nodes.registerType("ai-memory-file", MemoryFileNode);
62
614
  };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "node-red-contrib-ai-agent",
3
- "version": "0.0.5",
3
+ "version": "0.0.7",
4
4
  "description": "AI Agent for Node-RED",
5
5
  "repository": {
6
6
  "type": "git",
@@ -63,4 +63,4 @@
63
63
  "ai-memory-inmem": "./memory-inmem/memory-inmem.js"
64
64
  }
65
65
  }
66
- }
66
+ }