@elizaos/plugin-memory 1.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,1024 @@
1
+ var __defProp = Object.defineProperty;
2
+ var __export = (target, all) => {
3
+ for (var name in all)
4
+ __defProp(target, name, {
5
+ get: all[name],
6
+ enumerable: true,
7
+ configurable: true,
8
+ set: (newValue) => all[name] = () => newValue
9
+ });
10
+ };
11
+
12
+ // src/services/memory-service.ts
13
+ import {
14
+ Service,
15
+ logger
16
+ } from "@elizaos/core";
17
+ import { eq, and, desc, sql as sql4, cosineDistance, gte } from "drizzle-orm";
18
+
19
+ // src/schemas/index.ts
20
+ var exports_schemas = {};
21
+ __export(exports_schemas, {
22
+ sessionSummaries: () => sessionSummaries,
23
+ memoryAccessLogs: () => memoryAccessLogs,
24
+ longTermMemories: () => longTermMemories
25
+ });
26
+
27
+ // src/schemas/long-term-memories.ts
28
+ import { sql } from "drizzle-orm";
29
+ import {
30
+ pgTable,
31
+ text,
32
+ integer,
33
+ jsonb,
34
+ real,
35
+ index,
36
+ varchar,
37
+ timestamp
38
+ } from "drizzle-orm/pg-core";
39
+ var longTermMemories = pgTable("long_term_memories", {
40
+ id: varchar("id", { length: 36 }).primaryKey(),
41
+ agentId: varchar("agent_id", { length: 36 }).notNull(),
42
+ entityId: varchar("entity_id", { length: 36 }).notNull(),
43
+ category: text("category").notNull(),
44
+ content: text("content").notNull(),
45
+ metadata: jsonb("metadata"),
46
+ embedding: real("embedding").array(),
47
+ confidence: real("confidence").default(1),
48
+ source: text("source"),
49
+ createdAt: timestamp("created_at").default(sql`now()`).notNull(),
50
+ updatedAt: timestamp("updated_at").default(sql`now()`).notNull(),
51
+ lastAccessedAt: timestamp("last_accessed_at"),
52
+ accessCount: integer("access_count").default(0)
53
+ }, (table) => ({
54
+ agentEntityIdx: index("long_term_memories_agent_entity_idx").on(table.agentId, table.entityId),
55
+ categoryIdx: index("long_term_memories_category_idx").on(table.category),
56
+ confidenceIdx: index("long_term_memories_confidence_idx").on(table.confidence),
57
+ createdAtIdx: index("long_term_memories_created_at_idx").on(table.createdAt)
58
+ }));
59
+ // src/schemas/session-summaries.ts
60
+ import { sql as sql2 } from "drizzle-orm";
61
+ import {
62
+ pgTable as pgTable2,
63
+ text as text2,
64
+ integer as integer2,
65
+ jsonb as jsonb2,
66
+ real as real2,
67
+ index as index2,
68
+ varchar as varchar2,
69
+ timestamp as timestamp2
70
+ } from "drizzle-orm/pg-core";
71
+ var sessionSummaries = pgTable2("session_summaries", {
72
+ id: varchar2("id", { length: 36 }).primaryKey(),
73
+ agentId: varchar2("agent_id", { length: 36 }).notNull(),
74
+ roomId: varchar2("room_id", { length: 36 }).notNull(),
75
+ entityId: varchar2("entity_id", { length: 36 }),
76
+ summary: text2("summary").notNull(),
77
+ messageCount: integer2("message_count").notNull(),
78
+ lastMessageOffset: integer2("last_message_offset").notNull().default(0),
79
+ startTime: timestamp2("start_time").notNull(),
80
+ endTime: timestamp2("end_time").notNull(),
81
+ topics: jsonb2("topics"),
82
+ metadata: jsonb2("metadata"),
83
+ embedding: real2("embedding").array(),
84
+ createdAt: timestamp2("created_at").default(sql2`now()`).notNull(),
85
+ updatedAt: timestamp2("updated_at").default(sql2`now()`).notNull()
86
+ }, (table) => ({
87
+ agentRoomIdx: index2("session_summaries_agent_room_idx").on(table.agentId, table.roomId),
88
+ entityIdx: index2("session_summaries_entity_idx").on(table.entityId),
89
+ startTimeIdx: index2("session_summaries_start_time_idx").on(table.startTime)
90
+ }));
91
+ // src/schemas/memory-access-logs.ts
92
+ import { sql as sql3 } from "drizzle-orm";
93
+ import { pgTable as pgTable3, text as text3, integer as integer3, real as real3, index as index3, varchar as varchar3, timestamp as timestamp3 } from "drizzle-orm/pg-core";
94
+ var memoryAccessLogs = pgTable3("memory_access_logs", {
95
+ id: varchar3("id", { length: 36 }).primaryKey(),
96
+ agentId: varchar3("agent_id", { length: 36 }).notNull(),
97
+ memoryId: varchar3("memory_id", { length: 36 }).notNull(),
98
+ memoryType: text3("memory_type").notNull(),
99
+ accessedAt: timestamp3("accessed_at").default(sql3`now()`).notNull(),
100
+ roomId: varchar3("room_id", { length: 36 }),
101
+ relevanceScore: real3("relevance_score"),
102
+ wasUseful: integer3("was_useful")
103
+ }, (table) => ({
104
+ memoryIdx: index3("memory_access_logs_memory_idx").on(table.memoryId),
105
+ agentIdx: index3("memory_access_logs_agent_idx").on(table.agentId),
106
+ accessedAtIdx: index3("memory_access_logs_accessed_at_idx").on(table.accessedAt)
107
+ }));
108
+ // src/services/memory-service.ts
109
+ class MemoryService extends Service {
110
+ static serviceType = "memory";
111
+ sessionMessageCounts;
112
+ memoryConfig;
113
+ lastExtractionCheckpoints;
114
+ capabilityDescription = "Advanced memory management with short-term summarization and long-term persistent facts";
115
+ constructor(runtime) {
116
+ super(runtime);
117
+ this.sessionMessageCounts = new Map;
118
+ this.lastExtractionCheckpoints = new Map;
119
+ this.memoryConfig = {
120
+ shortTermSummarizationThreshold: 5,
121
+ shortTermRetainRecent: 10,
122
+ longTermExtractionEnabled: true,
123
+ longTermVectorSearchEnabled: false,
124
+ longTermConfidenceThreshold: 0.7,
125
+ longTermExtractionInterval: 5,
126
+ summaryModelType: "TEXT_LARGE",
127
+ summaryMaxTokens: 2500
128
+ };
129
+ }
130
+ static async start(runtime) {
131
+ const service = new MemoryService(runtime);
132
+ await service.initialize(runtime);
133
+ return service;
134
+ }
135
+ async stop() {
136
+ logger.info("MemoryService stopped");
137
+ }
138
+ async initialize(runtime) {
139
+ this.runtime = runtime;
140
+ const threshold = runtime.getSetting("MEMORY_SUMMARIZATION_THRESHOLD");
141
+ if (threshold) {
142
+ this.memoryConfig.shortTermSummarizationThreshold = parseInt(threshold, 10);
143
+ }
144
+ const retainRecent = runtime.getSetting("MEMORY_RETAIN_RECENT");
145
+ if (retainRecent) {
146
+ this.memoryConfig.shortTermRetainRecent = parseInt(retainRecent, 10);
147
+ }
148
+ const longTermEnabled = runtime.getSetting("MEMORY_LONG_TERM_ENABLED");
149
+ if (longTermEnabled === "false") {
150
+ this.memoryConfig.longTermExtractionEnabled = false;
151
+ } else if (longTermEnabled === "true") {
152
+ this.memoryConfig.longTermExtractionEnabled = true;
153
+ }
154
+ const confidenceThreshold = runtime.getSetting("MEMORY_CONFIDENCE_THRESHOLD");
155
+ if (confidenceThreshold) {
156
+ this.memoryConfig.longTermConfidenceThreshold = parseFloat(confidenceThreshold);
157
+ }
158
+ logger.info({
159
+ summarizationThreshold: this.memoryConfig.shortTermSummarizationThreshold,
160
+ retainRecent: this.memoryConfig.shortTermRetainRecent,
161
+ longTermEnabled: this.memoryConfig.longTermExtractionEnabled,
162
+ extractionInterval: this.memoryConfig.longTermExtractionInterval,
163
+ confidenceThreshold: this.memoryConfig.longTermConfidenceThreshold
164
+ }, "MemoryService initialized");
165
+ }
166
+ getDb() {
167
+ const db = this.runtime.db;
168
+ if (!db) {
169
+ throw new Error("Database not available");
170
+ }
171
+ return db;
172
+ }
173
+ getConfig() {
174
+ return { ...this.memoryConfig };
175
+ }
176
+ updateConfig(updates) {
177
+ this.memoryConfig = { ...this.memoryConfig, ...updates };
178
+ }
179
+ incrementMessageCount(roomId) {
180
+ const current = this.sessionMessageCounts.get(roomId) || 0;
181
+ const newCount = current + 1;
182
+ this.sessionMessageCounts.set(roomId, newCount);
183
+ return newCount;
184
+ }
185
+ resetMessageCount(roomId) {
186
+ this.sessionMessageCounts.set(roomId, 0);
187
+ }
188
+ async shouldSummarize(roomId) {
189
+ const count = await this.runtime.countMemories(roomId, false, "messages");
190
+ return count >= this.memoryConfig.shortTermSummarizationThreshold;
191
+ }
192
+ getExtractionKey(entityId, roomId) {
193
+ return `memory:extraction:${entityId}:${roomId}`;
194
+ }
195
+ async getLastExtractionCheckpoint(entityId, roomId) {
196
+ const key = this.getExtractionKey(entityId, roomId);
197
+ const cached = this.lastExtractionCheckpoints.get(key);
198
+ if (cached !== undefined) {
199
+ return cached;
200
+ }
201
+ try {
202
+ const checkpoint = await this.runtime.getCache(key);
203
+ const messageCount = checkpoint ?? 0;
204
+ this.lastExtractionCheckpoints.set(key, messageCount);
205
+ return messageCount;
206
+ } catch (error) {
207
+ logger.warn({ error }, "Failed to get extraction checkpoint from cache");
208
+ return 0;
209
+ }
210
+ }
211
+ async setLastExtractionCheckpoint(entityId, roomId, messageCount) {
212
+ const key = this.getExtractionKey(entityId, roomId);
213
+ this.lastExtractionCheckpoints.set(key, messageCount);
214
+ try {
215
+ await this.runtime.setCache(key, messageCount);
216
+ logger.debug(`Set extraction checkpoint for ${entityId} in room ${roomId} at message count ${messageCount}`);
217
+ } catch (error) {
218
+ logger.error({ error }, "Failed to persist extraction checkpoint to cache");
219
+ }
220
+ }
221
+ async shouldRunExtraction(entityId, roomId, currentMessageCount) {
222
+ const interval = this.memoryConfig.longTermExtractionInterval;
223
+ const lastCheckpoint = await this.getLastExtractionCheckpoint(entityId, roomId);
224
+ const currentCheckpoint = Math.floor(currentMessageCount / interval) * interval;
225
+ const shouldRun = currentMessageCount >= interval && currentCheckpoint > lastCheckpoint;
226
+ logger.debug({
227
+ entityId,
228
+ roomId,
229
+ currentMessageCount,
230
+ interval,
231
+ lastCheckpoint,
232
+ currentCheckpoint,
233
+ shouldRun
234
+ }, "Extraction check");
235
+ return shouldRun;
236
+ }
237
+ async storeLongTermMemory(memory) {
238
+ const db = this.getDb();
239
+ const id = crypto.randomUUID();
240
+ const now = new Date;
241
+ const newMemory = {
242
+ id,
243
+ createdAt: now,
244
+ updatedAt: now,
245
+ accessCount: 0,
246
+ ...memory
247
+ };
248
+ try {
249
+ await db.insert(longTermMemories).values({
250
+ id: newMemory.id,
251
+ agentId: newMemory.agentId,
252
+ entityId: newMemory.entityId,
253
+ category: newMemory.category,
254
+ content: newMemory.content,
255
+ metadata: newMemory.metadata || {},
256
+ embedding: newMemory.embedding,
257
+ confidence: newMemory.confidence,
258
+ source: newMemory.source,
259
+ accessCount: newMemory.accessCount,
260
+ createdAt: now,
261
+ updatedAt: now,
262
+ lastAccessedAt: newMemory.lastAccessedAt
263
+ });
264
+ } catch (error) {
265
+ logger.error({ error }, "Failed to store long-term memory");
266
+ throw error;
267
+ }
268
+ logger.info(`Stored long-term memory: ${newMemory.category} for entity ${newMemory.entityId}`);
269
+ return newMemory;
270
+ }
271
+ async getLongTermMemories(entityId, category, limit = 10) {
272
+ const db = this.getDb();
273
+ const conditions = [
274
+ eq(longTermMemories.agentId, this.runtime.agentId),
275
+ eq(longTermMemories.entityId, entityId)
276
+ ];
277
+ if (category) {
278
+ conditions.push(eq(longTermMemories.category, category));
279
+ }
280
+ const results = await db.select().from(longTermMemories).where(and(...conditions)).orderBy(desc(longTermMemories.confidence), desc(longTermMemories.updatedAt)).limit(limit);
281
+ return results.map((row) => ({
282
+ id: row.id,
283
+ agentId: row.agentId,
284
+ entityId: row.entityId,
285
+ category: row.category,
286
+ content: row.content,
287
+ metadata: row.metadata,
288
+ embedding: row.embedding,
289
+ confidence: row.confidence,
290
+ source: row.source,
291
+ createdAt: row.createdAt,
292
+ updatedAt: row.updatedAt,
293
+ lastAccessedAt: row.lastAccessedAt,
294
+ accessCount: row.accessCount
295
+ }));
296
+ }
297
+ async updateLongTermMemory(id, updates) {
298
+ const db = this.getDb();
299
+ const updateData = {
300
+ updatedAt: new Date
301
+ };
302
+ if (updates.content !== undefined) {
303
+ updateData.content = updates.content;
304
+ }
305
+ if (updates.metadata !== undefined) {
306
+ updateData.metadata = updates.metadata;
307
+ }
308
+ if (updates.confidence !== undefined) {
309
+ updateData.confidence = updates.confidence;
310
+ }
311
+ if (updates.embedding !== undefined) {
312
+ updateData.embedding = updates.embedding;
313
+ }
314
+ if (updates.lastAccessedAt !== undefined) {
315
+ updateData.lastAccessedAt = updates.lastAccessedAt;
316
+ }
317
+ if (updates.accessCount !== undefined) {
318
+ updateData.accessCount = updates.accessCount;
319
+ }
320
+ await db.update(longTermMemories).set(updateData).where(eq(longTermMemories.id, id));
321
+ logger.info(`Updated long-term memory: ${id}`);
322
+ }
323
+ async deleteLongTermMemory(id) {
324
+ const db = this.getDb();
325
+ await db.delete(longTermMemories).where(eq(longTermMemories.id, id));
326
+ logger.info(`Deleted long-term memory: ${id}`);
327
+ }
328
+ async getCurrentSessionSummary(roomId) {
329
+ const db = this.getDb();
330
+ const results = await db.select().from(sessionSummaries).where(and(eq(sessionSummaries.agentId, this.runtime.agentId), eq(sessionSummaries.roomId, roomId))).orderBy(desc(sessionSummaries.updatedAt)).limit(1);
331
+ if (results.length === 0) {
332
+ return null;
333
+ }
334
+ const row = results[0];
335
+ return {
336
+ id: row.id,
337
+ agentId: row.agentId,
338
+ roomId: row.roomId,
339
+ entityId: row.entityId,
340
+ summary: row.summary,
341
+ messageCount: row.messageCount,
342
+ lastMessageOffset: row.lastMessageOffset,
343
+ startTime: row.startTime,
344
+ endTime: row.endTime,
345
+ topics: row.topics || [],
346
+ metadata: row.metadata,
347
+ embedding: row.embedding,
348
+ createdAt: row.createdAt,
349
+ updatedAt: row.updatedAt
350
+ };
351
+ }
352
+ async storeSessionSummary(summary) {
353
+ const db = this.getDb();
354
+ const id = crypto.randomUUID();
355
+ const now = new Date;
356
+ const newSummary = {
357
+ id,
358
+ createdAt: now,
359
+ updatedAt: now,
360
+ ...summary
361
+ };
362
+ await db.insert(sessionSummaries).values({
363
+ id: newSummary.id,
364
+ agentId: newSummary.agentId,
365
+ roomId: newSummary.roomId,
366
+ entityId: newSummary.entityId || null,
367
+ summary: newSummary.summary,
368
+ messageCount: newSummary.messageCount,
369
+ lastMessageOffset: newSummary.lastMessageOffset,
370
+ startTime: newSummary.startTime,
371
+ endTime: newSummary.endTime,
372
+ topics: newSummary.topics || [],
373
+ metadata: newSummary.metadata || {},
374
+ embedding: newSummary.embedding,
375
+ createdAt: now,
376
+ updatedAt: now
377
+ });
378
+ logger.info(`Stored session summary for room ${newSummary.roomId}`);
379
+ return newSummary;
380
+ }
381
+ async updateSessionSummary(id, updates) {
382
+ const db = this.getDb();
383
+ const updateData = {
384
+ updatedAt: new Date
385
+ };
386
+ if (updates.summary !== undefined) {
387
+ updateData.summary = updates.summary;
388
+ }
389
+ if (updates.messageCount !== undefined) {
390
+ updateData.messageCount = updates.messageCount;
391
+ }
392
+ if (updates.lastMessageOffset !== undefined) {
393
+ updateData.lastMessageOffset = updates.lastMessageOffset;
394
+ }
395
+ if (updates.endTime !== undefined) {
396
+ updateData.endTime = updates.endTime;
397
+ }
398
+ if (updates.topics !== undefined) {
399
+ updateData.topics = updates.topics;
400
+ }
401
+ if (updates.metadata !== undefined) {
402
+ updateData.metadata = updates.metadata;
403
+ }
404
+ if (updates.embedding !== undefined) {
405
+ updateData.embedding = updates.embedding;
406
+ }
407
+ await db.update(sessionSummaries).set(updateData).where(eq(sessionSummaries.id, id));
408
+ logger.info(`Updated session summary: ${id}`);
409
+ }
410
+ async getSessionSummaries(roomId, limit = 5) {
411
+ const db = this.getDb();
412
+ const results = await db.select().from(sessionSummaries).where(and(eq(sessionSummaries.agentId, this.runtime.agentId), eq(sessionSummaries.roomId, roomId))).orderBy(desc(sessionSummaries.updatedAt)).limit(limit);
413
+ return results.map((row) => ({
414
+ id: row.id,
415
+ agentId: row.agentId,
416
+ roomId: row.roomId,
417
+ entityId: row.entityId,
418
+ summary: row.summary,
419
+ messageCount: row.messageCount,
420
+ lastMessageOffset: row.lastMessageOffset,
421
+ startTime: row.startTime,
422
+ endTime: row.endTime,
423
+ topics: row.topics || [],
424
+ metadata: row.metadata,
425
+ embedding: row.embedding,
426
+ createdAt: row.createdAt,
427
+ updatedAt: row.updatedAt
428
+ }));
429
+ }
430
+ async searchLongTermMemories(entityId, queryEmbedding, limit = 5, matchThreshold = 0.7) {
431
+ if (!this.memoryConfig.longTermVectorSearchEnabled) {
432
+ logger.warn("Vector search is not enabled, falling back to recent memories");
433
+ return this.getLongTermMemories(entityId, undefined, limit);
434
+ }
435
+ const db = this.getDb();
436
+ try {
437
+ const cleanVector = queryEmbedding.map((n) => Number.isFinite(n) ? Number(n.toFixed(6)) : 0);
438
+ const similarity = sql4`1 - (${cosineDistance(longTermMemories.embedding, cleanVector)})`;
439
+ const conditions = [
440
+ eq(longTermMemories.agentId, this.runtime.agentId),
441
+ eq(longTermMemories.entityId, entityId),
442
+ sql4`${longTermMemories.embedding} IS NOT NULL`
443
+ ];
444
+ if (matchThreshold > 0) {
445
+ conditions.push(gte(similarity, matchThreshold));
446
+ }
447
+ const results = await db.select({
448
+ memory: longTermMemories,
449
+ similarity
450
+ }).from(longTermMemories).where(and(...conditions)).orderBy(desc(similarity)).limit(limit);
451
+ return results.map((row) => ({
452
+ id: row.memory.id,
453
+ agentId: row.memory.agentId,
454
+ entityId: row.memory.entityId,
455
+ category: row.memory.category,
456
+ content: row.memory.content,
457
+ metadata: row.memory.metadata,
458
+ embedding: row.memory.embedding,
459
+ confidence: row.memory.confidence,
460
+ source: row.memory.source,
461
+ createdAt: row.memory.createdAt,
462
+ updatedAt: row.memory.updatedAt,
463
+ lastAccessedAt: row.memory.lastAccessedAt,
464
+ accessCount: row.memory.accessCount,
465
+ similarity: row.similarity
466
+ }));
467
+ } catch (error) {
468
+ logger.warn({ error }, "Vector search failed, falling back to recent memories");
469
+ return this.getLongTermMemories(entityId, undefined, limit);
470
+ }
471
+ }
472
+ async getFormattedLongTermMemories(entityId) {
473
+ const memories = await this.getLongTermMemories(entityId, undefined, 20);
474
+ if (memories.length === 0) {
475
+ return "";
476
+ }
477
+ const grouped = new Map;
478
+ for (const memory of memories) {
479
+ if (!grouped.has(memory.category)) {
480
+ grouped.set(memory.category, []);
481
+ }
482
+ grouped.get(memory.category)?.push(memory);
483
+ }
484
+ const sections = [];
485
+ for (const [category, categoryMemories] of grouped.entries()) {
486
+ const categoryName = category.split("_").map((word) => word.charAt(0).toUpperCase() + word.slice(1)).join(" ");
487
+ const items = categoryMemories.map((m) => `- ${m.content}`).join(`
488
+ `);
489
+ sections.push(`**${categoryName}**:
490
+ ${items}`);
491
+ }
492
+ return sections.join(`
493
+
494
+ `);
495
+ }
496
+ }
497
+
498
+ // src/evaluators/summarization.ts
499
+ import {
500
+ logger as logger2,
501
+ ModelType,
502
+ composePromptFromState
503
+ } from "@elizaos/core";
504
+ var initialSummarizationTemplate = `# Task: Summarize Conversation
505
+
506
+ You are analyzing a conversation to create a concise summary that captures the key points, topics, and important details.
507
+
508
+ # Recent Messages
509
+ {{recentMessages}}
510
+
511
+ # Instructions
512
+ Generate a summary that:
513
+ 1. Captures the main topics discussed
514
+ 2. Highlights key information shared
515
+ 3. Notes any decisions made or questions asked
516
+ 4. Maintains context for future reference
517
+ 5. Is concise but comprehensive
518
+
519
+ **IMPORTANT**: Keep the summary under 2500 tokens. Be comprehensive but concise.
520
+
521
+ Also extract:
522
+ - **Topics**: List of main topics discussed (comma-separated)
523
+ - **Key Points**: Important facts or decisions (bullet points)
524
+
525
+ Respond in this XML format:
526
+ <summary>
527
+ <text>Your comprehensive summary here</text>
528
+ <topics>topic1, topic2, topic3</topics>
529
+ <keyPoints>
530
+ <point>First key point</point>
531
+ <point>Second key point</point>
532
+ </keyPoints>
533
+ </summary>`;
534
+ var updateSummarizationTemplate = `# Task: Update and Condense Conversation Summary
535
+
536
+ You are updating an existing conversation summary with new messages, while keeping the total summary concise.
537
+
538
+ # Existing Summary
539
+ {{existingSummary}}
540
+
541
+ # Existing Topics
542
+ {{existingTopics}}
543
+
544
+ # New Messages Since Last Summary
545
+ {{newMessages}}
546
+
547
+ # Instructions
548
+ Update the summary by:
549
+ 1. Merging the existing summary with insights from the new messages
550
+ 2. Removing redundant or less important details to stay under the token limit
551
+ 3. Keeping the most important context and decisions
552
+ 4. Adding new topics if they emerge
553
+ 5. **CRITICAL**: Keep the ENTIRE updated summary under 2500 tokens
554
+
555
+ The goal is a rolling summary that captures the essence of the conversation without growing indefinitely.
556
+
557
+ Respond in this XML format:
558
+ <summary>
559
+ <text>Your updated and condensed summary here</text>
560
+ <topics>topic1, topic2, topic3</topics>
561
+ <keyPoints>
562
+ <point>First key point</point>
563
+ <point>Second key point</point>
564
+ </keyPoints>
565
+ </summary>`;
566
+ function parseSummaryXML(xml) {
567
+ const summaryMatch = xml.match(/<text>([\s\S]*?)<\/text>/);
568
+ const topicsMatch = xml.match(/<topics>([\s\S]*?)<\/topics>/);
569
+ const keyPointsMatches = xml.matchAll(/<point>([\s\S]*?)<\/point>/g);
570
+ const summary = summaryMatch ? summaryMatch[1].trim() : "Summary not available";
571
+ const topics = topicsMatch ? topicsMatch[1].split(",").map((t) => t.trim()).filter(Boolean) : [];
572
+ const keyPoints = Array.from(keyPointsMatches).map((match) => match[1].trim());
573
+ return { summary, topics, keyPoints };
574
+ }
575
+ var summarizationEvaluator = {
576
+ name: "MEMORY_SUMMARIZATION",
577
+ description: "Summarizes conversations to optimize short-term memory",
578
+ similes: ["CONVERSATION_SUMMARY", "CONTEXT_COMPRESSION", "MEMORY_OPTIMIZATION"],
579
+ alwaysRun: true,
580
+ validate: async (runtime, message) => {
581
+ logger2.debug(`Validating summarization for message: ${message.content?.text}`);
582
+ if (!message.content?.text) {
583
+ return false;
584
+ }
585
+ const memoryService = runtime.getService("memory");
586
+ if (!memoryService) {
587
+ return false;
588
+ }
589
+ const config = memoryService.getConfig();
590
+ const currentMessageCount = await runtime.countMemories(message.roomId, false, "messages");
591
+ const shouldSummarize = currentMessageCount >= config.shortTermSummarizationThreshold;
592
+ logger2.debug({
593
+ roomId: message.roomId,
594
+ currentMessageCount,
595
+ threshold: config.shortTermSummarizationThreshold,
596
+ shouldSummarize
597
+ }, "Summarization check");
598
+ return shouldSummarize;
599
+ },
600
+ handler: async (runtime, message) => {
601
+ const memoryService = runtime.getService("memory");
602
+ if (!memoryService) {
603
+ logger2.error("MemoryService not found");
604
+ return;
605
+ }
606
+ const config = memoryService.getConfig();
607
+ const { roomId } = message;
608
+ try {
609
+ logger2.info(`Starting summarization for room ${roomId}`);
610
+ const existingSummary = await memoryService.getCurrentSessionSummary(roomId);
611
+ const lastOffset = existingSummary?.lastMessageOffset || 0;
612
+ const totalMessageCount = await runtime.countMemories(roomId, false, "messages");
613
+ const newMessages = await runtime.getMemories({
614
+ tableName: "messages",
615
+ roomId,
616
+ count: config.shortTermSummarizationThreshold,
617
+ unique: false,
618
+ start: lastOffset
619
+ });
620
+ if (newMessages.length === 0) {
621
+ logger2.debug("No new messages to summarize");
622
+ return;
623
+ }
624
+ const sortedMessages = newMessages.sort((a, b) => (a.createdAt || 0) - (b.createdAt || 0));
625
+ const formattedMessages = sortedMessages.map((msg) => {
626
+ const sender = msg.entityId === runtime.agentId ? runtime.character.name : "User";
627
+ return `${sender}: ${msg.content.text || "[non-text message]"}`;
628
+ }).join(`
629
+ `);
630
+ const state = await runtime.composeState(message);
631
+ let prompt;
632
+ let template;
633
+ if (existingSummary) {
634
+ template = updateSummarizationTemplate;
635
+ prompt = composePromptFromState({
636
+ state: {
637
+ ...state,
638
+ existingSummary: existingSummary.summary,
639
+ existingTopics: existingSummary.topics?.join(", ") || "None",
640
+ newMessages: formattedMessages
641
+ },
642
+ template
643
+ });
644
+ } else {
645
+ template = initialSummarizationTemplate;
646
+ prompt = composePromptFromState({
647
+ state: {
648
+ ...state,
649
+ recentMessages: formattedMessages
650
+ },
651
+ template
652
+ });
653
+ }
654
+ const response = await runtime.useModel(ModelType.TEXT_LARGE, {
655
+ prompt,
656
+ maxTokens: config.summaryMaxTokens || 2500
657
+ });
658
+ const summaryResult = parseSummaryXML(response);
659
+ logger2.info(`${existingSummary ? "Updated" : "Generated"} summary: ${summaryResult.summary.substring(0, 100)}...`);
660
+ const newOffset = totalMessageCount;
661
+ const firstMessage = sortedMessages[0];
662
+ const lastMessage = sortedMessages[sortedMessages.length - 1];
663
+ const startTime = existingSummary ? existingSummary.startTime : firstMessage?.createdAt && firstMessage.createdAt > 0 ? new Date(firstMessage.createdAt) : new Date;
664
+ const endTime = lastMessage?.createdAt && lastMessage.createdAt > 0 ? new Date(lastMessage.createdAt) : new Date;
665
+ if (existingSummary) {
666
+ await memoryService.updateSessionSummary(existingSummary.id, {
667
+ summary: summaryResult.summary,
668
+ messageCount: existingSummary.messageCount + sortedMessages.length,
669
+ lastMessageOffset: newOffset,
670
+ endTime,
671
+ topics: summaryResult.topics,
672
+ metadata: {
673
+ keyPoints: summaryResult.keyPoints
674
+ }
675
+ });
676
+ logger2.info(`Updated summary for room ${roomId}: ${sortedMessages.length} new messages processed (offset: ${lastOffset} → ${newOffset})`);
677
+ } else {
678
+ await memoryService.storeSessionSummary({
679
+ agentId: runtime.agentId,
680
+ roomId,
681
+ entityId: message.entityId !== runtime.agentId ? message.entityId : undefined,
682
+ summary: summaryResult.summary,
683
+ messageCount: sortedMessages.length,
684
+ lastMessageOffset: newOffset,
685
+ startTime,
686
+ endTime,
687
+ topics: summaryResult.topics,
688
+ metadata: {
689
+ keyPoints: summaryResult.keyPoints
690
+ }
691
+ });
692
+ logger2.info(`Created new summary for room ${roomId}: ${sortedMessages.length} messages summarized (offset: 0 → ${newOffset})`);
693
+ }
694
+ } catch (error) {
695
+ logger2.error({ error }, "Error during summarization:");
696
+ }
697
+ },
698
+ examples: []
699
+ };
700
+
701
+ // src/evaluators/long-term-extraction.ts
702
+ import {
703
+ logger as logger3,
704
+ ModelType as ModelType2,
705
+ composePromptFromState as composePromptFromState2
706
+ } from "@elizaos/core";
707
+
708
+ // src/types/index.ts
709
+ var LongTermMemoryCategory;
710
+ ((LongTermMemoryCategory2) => {
711
+ LongTermMemoryCategory2["IDENTITY"] = "identity";
712
+ LongTermMemoryCategory2["EXPERTISE"] = "expertise";
713
+ LongTermMemoryCategory2["PROJECTS"] = "projects";
714
+ LongTermMemoryCategory2["PREFERENCES"] = "preferences";
715
+ LongTermMemoryCategory2["DATA_SOURCES"] = "data_sources";
716
+ LongTermMemoryCategory2["GOALS"] = "goals";
717
+ LongTermMemoryCategory2["CONSTRAINTS"] = "constraints";
718
+ LongTermMemoryCategory2["DEFINITIONS"] = "definitions";
719
+ LongTermMemoryCategory2["BEHAVIORAL_PATTERNS"] = "behavioral_patterns";
720
+ })(LongTermMemoryCategory ||= {});
721
+
722
+ // src/evaluators/long-term-extraction.ts
723
+ var extractionTemplate = `# Task: Extract Long-Term Memory
724
+
725
+ You are analyzing a conversation to extract facts that should be remembered long-term about the user.
726
+
727
+ # Recent Messages
728
+ {{recentMessages}}
729
+
730
+ # Current Long-Term Memories
731
+ {{existingMemories}}
732
+
733
+ # Memory Categories
734
+ 1. **identity**: User's name, role, identity (e.g., "I'm a data scientist")
735
+ 2. **expertise**: User's skills, knowledge domains, or unfamiliarity with topics
736
+ 3. **projects**: Ongoing projects, past interactions, recurring topics
737
+ 4. **preferences**: Communication style, format preferences, verbosity, etc.
738
+ 5. **data_sources**: Frequently used files, databases, APIs
739
+ 6. **goals**: Broader intentions (e.g., "preparing for interview")
740
+ 7. **constraints**: User-defined rules or limitations
741
+ 8. **definitions**: Custom terms, acronyms, glossaries
742
+ 9. **behavioral_patterns**: How the user tends to interact
743
+
744
+ # Instructions
745
+ Extract any NEW information that should be remembered long-term. For each item:
746
+ - Determine which category it belongs to
747
+ - Write a clear, factual statement
748
+ - Assess confidence (0.0 to 1.0)
749
+ - Only include information explicitly stated or strongly implied
750
+
751
+ If there are no new long-term facts to extract, respond with <memories></memories>
752
+
753
+ Respond in this XML format:
754
+ <memories>
755
+ <memory>
756
+ <category>identity</category>
757
+ <content>User is a software engineer specializing in backend development</content>
758
+ <confidence>0.95</confidence>
759
+ </memory>
760
+ <memory>
761
+ <category>preferences</category>
762
+ <content>Prefers code examples over lengthy explanations</content>
763
+ <confidence>0.85</confidence>
764
+ </memory>
765
+ </memories>`;
766
+ function parseMemoryExtractionXML(xml) {
767
+ const memoryMatches = xml.matchAll(/<memory>[\s\S]*?<category>(.*?)<\/category>[\s\S]*?<content>(.*?)<\/content>[\s\S]*?<confidence>(.*?)<\/confidence>[\s\S]*?<\/memory>/g);
768
+ const extractions = [];
769
+ for (const match of memoryMatches) {
770
+ const category = match[1].trim();
771
+ const content = match[2].trim();
772
+ const confidence = parseFloat(match[3].trim());
773
+ if (!Object.values(LongTermMemoryCategory).includes(category)) {
774
+ logger3.warn(`Invalid memory category: ${category}`);
775
+ continue;
776
+ }
777
+ if (content && !isNaN(confidence)) {
778
+ extractions.push({ category, content, confidence });
779
+ }
780
+ }
781
+ return extractions;
782
+ }
783
+ var longTermExtractionEvaluator = {
784
+ name: "LONG_TERM_MEMORY_EXTRACTION",
785
+ description: "Extracts long-term facts about users from conversations",
786
+ similes: ["MEMORY_EXTRACTION", "FACT_LEARNING", "USER_PROFILING"],
787
+ alwaysRun: true,
788
+ validate: async (runtime, message) => {
789
+ logger3.debug(`Validating long-term memory extraction for message: ${message.content?.text}`);
790
+ if (message.entityId === runtime.agentId) {
791
+ logger3.debug("Skipping long-term memory extraction for agent's own message");
792
+ return false;
793
+ }
794
+ if (!message.content?.text) {
795
+ logger3.debug("Skipping long-term memory extraction for message without text");
796
+ return false;
797
+ }
798
+ const memoryService = runtime.getService("memory");
799
+ if (!memoryService) {
800
+ logger3.debug("MemoryService not found");
801
+ return false;
802
+ }
803
+ const config = memoryService.getConfig();
804
+ if (!config.longTermExtractionEnabled) {
805
+ logger3.debug("Long-term memory extraction is disabled");
806
+ return false;
807
+ }
808
+ const currentMessageCount = await runtime.countMemories(message.roomId, false, "messages");
809
+ const shouldRun = await memoryService.shouldRunExtraction(message.entityId, message.roomId, currentMessageCount);
810
+ logger3.debug(`Should run extraction: ${shouldRun}`);
811
+ return shouldRun;
812
+ },
813
+ handler: async (runtime, message) => {
814
+ const memoryService = runtime.getService("memory");
815
+ if (!memoryService) {
816
+ logger3.error("MemoryService not found");
817
+ return;
818
+ }
819
+ const config = memoryService.getConfig();
820
+ const { entityId, roomId } = message;
821
+ try {
822
+ logger3.info(`Extracting long-term memories for entity ${entityId}`);
823
+ const recentMessages = await runtime.getMemories({
824
+ tableName: "messages",
825
+ roomId,
826
+ count: 20,
827
+ unique: false
828
+ });
829
+ const formattedMessages = recentMessages.sort((a, b) => (a.createdAt || 0) - (b.createdAt || 0)).map((msg) => {
830
+ const sender = msg.entityId === runtime.agentId ? runtime.character.name : "User";
831
+ return `${sender}: ${msg.content.text || "[non-text message]"}`;
832
+ }).join(`
833
+ `);
834
+ const existingMemories = await memoryService.getLongTermMemories(entityId, undefined, 30);
835
+ const formattedExisting = existingMemories.length > 0 ? existingMemories.map((m) => `[${m.category}] ${m.content} (confidence: ${m.confidence})`).join(`
836
+ `) : "None yet";
837
+ const state = await runtime.composeState(message);
838
+ const prompt = composePromptFromState2({
839
+ state: {
840
+ ...state,
841
+ recentMessages: formattedMessages,
842
+ existingMemories: formattedExisting
843
+ },
844
+ template: extractionTemplate
845
+ });
846
+ const response = await runtime.useModel(ModelType2.TEXT_LARGE, {
847
+ prompt
848
+ });
849
+ const extractions = parseMemoryExtractionXML(response);
850
+ logger3.info(`Extracted ${extractions.length} long-term memories`);
851
+ for (const extraction of extractions) {
852
+ if (extraction.confidence >= config.longTermConfidenceThreshold) {
853
+ await memoryService.storeLongTermMemory({
854
+ agentId: runtime.agentId,
855
+ entityId,
856
+ category: extraction.category,
857
+ content: extraction.content,
858
+ confidence: extraction.confidence,
859
+ source: "conversation",
860
+ metadata: {
861
+ roomId,
862
+ extractedAt: new Date().toISOString()
863
+ }
864
+ });
865
+ logger3.info(`Stored long-term memory: [${extraction.category}] ${extraction.content.substring(0, 50)}...`);
866
+ } else {
867
+ logger3.debug(`Skipped low-confidence memory: ${extraction.content} (confidence: ${extraction.confidence})`);
868
+ }
869
+ }
870
+ const currentMessageCount = await runtime.countMemories(roomId, false, "messages");
871
+ await memoryService.setLastExtractionCheckpoint(entityId, roomId, currentMessageCount);
872
+ logger3.debug(`Updated extraction checkpoint to ${currentMessageCount} for entity ${entityId} in room ${roomId}`);
873
+ } catch (error) {
874
+ logger3.error({ error }, "Error during long-term memory extraction:");
875
+ }
876
+ },
877
+ examples: []
878
+ };
879
+
880
+ // src/providers/short-term-memory.ts
881
+ import {
882
+ logger as logger4,
883
+ addHeader
884
+ } from "@elizaos/core";
885
+ var shortTermMemoryProvider = {
886
+ name: "SHORT_TERM_MEMORY",
887
+ description: "Recent conversation summaries to maintain context efficiently",
888
+ position: 95,
889
+ get: async (runtime, message, _state) => {
890
+ try {
891
+ const memoryService = runtime.getService("memory");
892
+ if (!memoryService) {
893
+ return {
894
+ data: { summaries: [] },
895
+ values: { sessionSummaries: "" },
896
+ text: ""
897
+ };
898
+ }
899
+ const { roomId } = message;
900
+ const summaries = await memoryService.getSessionSummaries(roomId, 3);
901
+ if (summaries.length === 0) {
902
+ return {
903
+ data: { summaries: [] },
904
+ values: { sessionSummaries: "" },
905
+ text: ""
906
+ };
907
+ }
908
+ const formattedSummaries = summaries.reverse().map((summary, index4) => {
909
+ const messageRange = `${summary.messageCount} messages`;
910
+ const timeRange = new Date(summary.startTime).toLocaleDateString();
911
+ let text5 = `**Session ${index4 + 1}** (${messageRange}, ${timeRange})
912
+ `;
913
+ text5 += summary.summary;
914
+ if (summary.topics && summary.topics.length > 0) {
915
+ text5 += `
916
+ *Topics: ${summary.topics.join(", ")}*`;
917
+ }
918
+ return text5;
919
+ }).join(`
920
+
921
+ `);
922
+ const text4 = addHeader("# Previous Conversation Context", formattedSummaries);
923
+ return {
924
+ data: { summaries },
925
+ values: { sessionSummaries: text4 },
926
+ text: text4
927
+ };
928
+ } catch (error) {
929
+ logger4.error({ error }, "Error in shortTermMemoryProvider:");
930
+ return {
931
+ data: { summaries: [] },
932
+ values: { sessionSummaries: "" },
933
+ text: ""
934
+ };
935
+ }
936
+ }
937
+ };
938
+
939
+ // src/providers/long-term-memory.ts
940
+ import {
941
+ logger as logger5,
942
+ addHeader as addHeader2
943
+ } from "@elizaos/core";
944
+ var longTermMemoryProvider = {
945
+ name: "LONG_TERM_MEMORY",
946
+ description: "Persistent facts and preferences about the user",
947
+ position: 50,
948
+ get: async (runtime, message, _state) => {
949
+ try {
950
+ const memoryService = runtime.getService("memory");
951
+ if (!memoryService) {
952
+ return {
953
+ data: { memories: [] },
954
+ values: { longTermMemories: "" },
955
+ text: ""
956
+ };
957
+ }
958
+ const { entityId } = message;
959
+ if (entityId === runtime.agentId) {
960
+ return {
961
+ data: { memories: [] },
962
+ values: { longTermMemories: "" },
963
+ text: ""
964
+ };
965
+ }
966
+ const memories = await memoryService.getLongTermMemories(entityId, undefined, 25);
967
+ if (memories.length === 0) {
968
+ return {
969
+ data: { memories: [] },
970
+ values: { longTermMemories: "" },
971
+ text: ""
972
+ };
973
+ }
974
+ const formattedMemories = await memoryService.getFormattedLongTermMemories(entityId);
975
+ const text4 = addHeader2("# What I Know About You", formattedMemories);
976
+ const categoryCounts = new Map;
977
+ for (const memory of memories) {
978
+ const count = categoryCounts.get(memory.category) || 0;
979
+ categoryCounts.set(memory.category, count + 1);
980
+ }
981
+ const categoryList = Array.from(categoryCounts.entries()).map(([cat, count]) => `${cat}: ${count}`).join(", ");
982
+ return {
983
+ data: {
984
+ memories,
985
+ categoryCounts: Object.fromEntries(categoryCounts)
986
+ },
987
+ values: {
988
+ longTermMemories: text4,
989
+ memoryCategories: categoryList
990
+ },
991
+ text: text4
992
+ };
993
+ } catch (error) {
994
+ logger5.error({ error }, "Error in longTermMemoryProvider:");
995
+ return {
996
+ data: { memories: [] },
997
+ values: { longTermMemories: "" },
998
+ text: ""
999
+ };
1000
+ }
1001
+ }
1002
+ };
1003
+
1004
+ // src/index.ts
1005
+ var memoryPlugin = {
1006
+ name: "memory",
1007
+ description: "Advanced memory management with conversation summarization and long-term persistent memory",
1008
+ services: [MemoryService],
1009
+ evaluators: [summarizationEvaluator, longTermExtractionEvaluator],
1010
+ providers: [longTermMemoryProvider, shortTermMemoryProvider],
1011
+ schema: exports_schemas
1012
+ };
1013
+ var src_default = memoryPlugin;
1014
+ export {
1015
+ sessionSummaries,
1016
+ memoryPlugin,
1017
+ memoryAccessLogs,
1018
+ longTermMemories,
1019
+ src_default as default,
1020
+ MemoryService,
1021
+ LongTermMemoryCategory
1022
+ };
1023
+
1024
+ //# debugId=97E9763BD19A053564756E2164756E21