@elizaos/plugin-memory 1.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,1010 @@
1
+ var __defProp = Object.defineProperty;
2
+ var __getOwnPropNames = Object.getOwnPropertyNames;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
5
+ var __moduleCache = /* @__PURE__ */ new WeakMap;
6
+ var __toCommonJS = (from) => {
7
+ var entry = __moduleCache.get(from), desc;
8
+ if (entry)
9
+ return entry;
10
+ entry = __defProp({}, "__esModule", { value: true });
11
+ if (from && typeof from === "object" || typeof from === "function")
12
+ __getOwnPropNames(from).map((key) => !__hasOwnProp.call(entry, key) && __defProp(entry, key, {
13
+ get: () => from[key],
14
+ enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable
15
+ }));
16
+ __moduleCache.set(from, entry);
17
+ return entry;
18
+ };
19
+ var __export = (target, all) => {
20
+ for (var name in all)
21
+ __defProp(target, name, {
22
+ get: all[name],
23
+ enumerable: true,
24
+ configurable: true,
25
+ set: (newValue) => all[name] = () => newValue
26
+ });
27
+ };
28
+
29
+ // src/index.node.ts
30
+ var exports_index_node = {};
31
+ __export(exports_index_node, {
32
+ sessionSummaries: () => sessionSummaries,
33
+ memoryPlugin: () => memoryPlugin,
34
+ memoryAccessLogs: () => memoryAccessLogs,
35
+ longTermMemories: () => longTermMemories,
36
+ default: () => src_default,
37
+ MemoryService: () => MemoryService,
38
+ LongTermMemoryCategory: () => LongTermMemoryCategory
39
+ });
40
+ module.exports = __toCommonJS(exports_index_node);
41
+
42
+ // src/services/memory-service.ts
43
+ var import_core = require("@elizaos/core");
44
+ var import_drizzle_orm4 = require("drizzle-orm");
45
+
46
+ // src/schemas/index.ts
47
+ var exports_schemas = {};
48
+ __export(exports_schemas, {
49
+ sessionSummaries: () => sessionSummaries,
50
+ memoryAccessLogs: () => memoryAccessLogs,
51
+ longTermMemories: () => longTermMemories
52
+ });
53
+
54
+ // src/schemas/long-term-memories.ts
55
+ var import_drizzle_orm = require("drizzle-orm");
56
+ var import_pg_core = require("drizzle-orm/pg-core");
57
+ var longTermMemories = import_pg_core.pgTable("long_term_memories", {
58
+ id: import_pg_core.varchar("id", { length: 36 }).primaryKey(),
59
+ agentId: import_pg_core.varchar("agent_id", { length: 36 }).notNull(),
60
+ entityId: import_pg_core.varchar("entity_id", { length: 36 }).notNull(),
61
+ category: import_pg_core.text("category").notNull(),
62
+ content: import_pg_core.text("content").notNull(),
63
+ metadata: import_pg_core.jsonb("metadata"),
64
+ embedding: import_pg_core.real("embedding").array(),
65
+ confidence: import_pg_core.real("confidence").default(1),
66
+ source: import_pg_core.text("source"),
67
+ createdAt: import_pg_core.timestamp("created_at").default(import_drizzle_orm.sql`now()`).notNull(),
68
+ updatedAt: import_pg_core.timestamp("updated_at").default(import_drizzle_orm.sql`now()`).notNull(),
69
+ lastAccessedAt: import_pg_core.timestamp("last_accessed_at"),
70
+ accessCount: import_pg_core.integer("access_count").default(0)
71
+ }, (table) => ({
72
+ agentEntityIdx: import_pg_core.index("long_term_memories_agent_entity_idx").on(table.agentId, table.entityId),
73
+ categoryIdx: import_pg_core.index("long_term_memories_category_idx").on(table.category),
74
+ confidenceIdx: import_pg_core.index("long_term_memories_confidence_idx").on(table.confidence),
75
+ createdAtIdx: import_pg_core.index("long_term_memories_created_at_idx").on(table.createdAt)
76
+ }));
77
+ // src/schemas/session-summaries.ts
78
+ var import_drizzle_orm2 = require("drizzle-orm");
79
+ var import_pg_core2 = require("drizzle-orm/pg-core");
80
+ var sessionSummaries = import_pg_core2.pgTable("session_summaries", {
81
+ id: import_pg_core2.varchar("id", { length: 36 }).primaryKey(),
82
+ agentId: import_pg_core2.varchar("agent_id", { length: 36 }).notNull(),
83
+ roomId: import_pg_core2.varchar("room_id", { length: 36 }).notNull(),
84
+ entityId: import_pg_core2.varchar("entity_id", { length: 36 }),
85
+ summary: import_pg_core2.text("summary").notNull(),
86
+ messageCount: import_pg_core2.integer("message_count").notNull(),
87
+ lastMessageOffset: import_pg_core2.integer("last_message_offset").notNull().default(0),
88
+ startTime: import_pg_core2.timestamp("start_time").notNull(),
89
+ endTime: import_pg_core2.timestamp("end_time").notNull(),
90
+ topics: import_pg_core2.jsonb("topics"),
91
+ metadata: import_pg_core2.jsonb("metadata"),
92
+ embedding: import_pg_core2.real("embedding").array(),
93
+ createdAt: import_pg_core2.timestamp("created_at").default(import_drizzle_orm2.sql`now()`).notNull(),
94
+ updatedAt: import_pg_core2.timestamp("updated_at").default(import_drizzle_orm2.sql`now()`).notNull()
95
+ }, (table) => ({
96
+ agentRoomIdx: import_pg_core2.index("session_summaries_agent_room_idx").on(table.agentId, table.roomId),
97
+ entityIdx: import_pg_core2.index("session_summaries_entity_idx").on(table.entityId),
98
+ startTimeIdx: import_pg_core2.index("session_summaries_start_time_idx").on(table.startTime)
99
+ }));
100
+ // src/schemas/memory-access-logs.ts
101
+ var import_drizzle_orm3 = require("drizzle-orm");
102
+ var import_pg_core3 = require("drizzle-orm/pg-core");
103
+ var memoryAccessLogs = import_pg_core3.pgTable("memory_access_logs", {
104
+ id: import_pg_core3.varchar("id", { length: 36 }).primaryKey(),
105
+ agentId: import_pg_core3.varchar("agent_id", { length: 36 }).notNull(),
106
+ memoryId: import_pg_core3.varchar("memory_id", { length: 36 }).notNull(),
107
+ memoryType: import_pg_core3.text("memory_type").notNull(),
108
+ accessedAt: import_pg_core3.timestamp("accessed_at").default(import_drizzle_orm3.sql`now()`).notNull(),
109
+ roomId: import_pg_core3.varchar("room_id", { length: 36 }),
110
+ relevanceScore: import_pg_core3.real("relevance_score"),
111
+ wasUseful: import_pg_core3.integer("was_useful")
112
+ }, (table) => ({
113
+ memoryIdx: import_pg_core3.index("memory_access_logs_memory_idx").on(table.memoryId),
114
+ agentIdx: import_pg_core3.index("memory_access_logs_agent_idx").on(table.agentId),
115
+ accessedAtIdx: import_pg_core3.index("memory_access_logs_accessed_at_idx").on(table.accessedAt)
116
+ }));
117
+ // src/services/memory-service.ts
118
+ class MemoryService extends import_core.Service {
119
+ static serviceType = "memory";
120
+ sessionMessageCounts;
121
+ memoryConfig;
122
+ lastExtractionCheckpoints;
123
+ capabilityDescription = "Advanced memory management with short-term summarization and long-term persistent facts";
124
+ constructor(runtime) {
125
+ super(runtime);
126
+ this.sessionMessageCounts = new Map;
127
+ this.lastExtractionCheckpoints = new Map;
128
+ this.memoryConfig = {
129
+ shortTermSummarizationThreshold: 5,
130
+ shortTermRetainRecent: 10,
131
+ longTermExtractionEnabled: true,
132
+ longTermVectorSearchEnabled: false,
133
+ longTermConfidenceThreshold: 0.7,
134
+ longTermExtractionInterval: 5,
135
+ summaryModelType: "TEXT_LARGE",
136
+ summaryMaxTokens: 2500
137
+ };
138
+ }
139
+ static async start(runtime) {
140
+ const service = new MemoryService(runtime);
141
+ await service.initialize(runtime);
142
+ return service;
143
+ }
144
+ async stop() {
145
+ import_core.logger.info("MemoryService stopped");
146
+ }
147
+ async initialize(runtime) {
148
+ this.runtime = runtime;
149
+ const threshold = runtime.getSetting("MEMORY_SUMMARIZATION_THRESHOLD");
150
+ if (threshold) {
151
+ this.memoryConfig.shortTermSummarizationThreshold = parseInt(threshold, 10);
152
+ }
153
+ const retainRecent = runtime.getSetting("MEMORY_RETAIN_RECENT");
154
+ if (retainRecent) {
155
+ this.memoryConfig.shortTermRetainRecent = parseInt(retainRecent, 10);
156
+ }
157
+ const longTermEnabled = runtime.getSetting("MEMORY_LONG_TERM_ENABLED");
158
+ if (longTermEnabled === "false") {
159
+ this.memoryConfig.longTermExtractionEnabled = false;
160
+ } else if (longTermEnabled === "true") {
161
+ this.memoryConfig.longTermExtractionEnabled = true;
162
+ }
163
+ const confidenceThreshold = runtime.getSetting("MEMORY_CONFIDENCE_THRESHOLD");
164
+ if (confidenceThreshold) {
165
+ this.memoryConfig.longTermConfidenceThreshold = parseFloat(confidenceThreshold);
166
+ }
167
+ import_core.logger.info({
168
+ summarizationThreshold: this.memoryConfig.shortTermSummarizationThreshold,
169
+ retainRecent: this.memoryConfig.shortTermRetainRecent,
170
+ longTermEnabled: this.memoryConfig.longTermExtractionEnabled,
171
+ extractionInterval: this.memoryConfig.longTermExtractionInterval,
172
+ confidenceThreshold: this.memoryConfig.longTermConfidenceThreshold
173
+ }, "MemoryService initialized");
174
+ }
175
+ getDb() {
176
+ const db = this.runtime.db;
177
+ if (!db) {
178
+ throw new Error("Database not available");
179
+ }
180
+ return db;
181
+ }
182
+ getConfig() {
183
+ return { ...this.memoryConfig };
184
+ }
185
+ updateConfig(updates) {
186
+ this.memoryConfig = { ...this.memoryConfig, ...updates };
187
+ }
188
+ incrementMessageCount(roomId) {
189
+ const current = this.sessionMessageCounts.get(roomId) || 0;
190
+ const newCount = current + 1;
191
+ this.sessionMessageCounts.set(roomId, newCount);
192
+ return newCount;
193
+ }
194
+ resetMessageCount(roomId) {
195
+ this.sessionMessageCounts.set(roomId, 0);
196
+ }
197
+ async shouldSummarize(roomId) {
198
+ const count = await this.runtime.countMemories(roomId, false, "messages");
199
+ return count >= this.memoryConfig.shortTermSummarizationThreshold;
200
+ }
201
+ getExtractionKey(entityId, roomId) {
202
+ return `memory:extraction:${entityId}:${roomId}`;
203
+ }
204
+ async getLastExtractionCheckpoint(entityId, roomId) {
205
+ const key = this.getExtractionKey(entityId, roomId);
206
+ const cached = this.lastExtractionCheckpoints.get(key);
207
+ if (cached !== undefined) {
208
+ return cached;
209
+ }
210
+ try {
211
+ const checkpoint = await this.runtime.getCache(key);
212
+ const messageCount = checkpoint ?? 0;
213
+ this.lastExtractionCheckpoints.set(key, messageCount);
214
+ return messageCount;
215
+ } catch (error) {
216
+ import_core.logger.warn({ error }, "Failed to get extraction checkpoint from cache");
217
+ return 0;
218
+ }
219
+ }
220
+ async setLastExtractionCheckpoint(entityId, roomId, messageCount) {
221
+ const key = this.getExtractionKey(entityId, roomId);
222
+ this.lastExtractionCheckpoints.set(key, messageCount);
223
+ try {
224
+ await this.runtime.setCache(key, messageCount);
225
+ import_core.logger.debug(`Set extraction checkpoint for ${entityId} in room ${roomId} at message count ${messageCount}`);
226
+ } catch (error) {
227
+ import_core.logger.error({ error }, "Failed to persist extraction checkpoint to cache");
228
+ }
229
+ }
230
+ async shouldRunExtraction(entityId, roomId, currentMessageCount) {
231
+ const interval = this.memoryConfig.longTermExtractionInterval;
232
+ const lastCheckpoint = await this.getLastExtractionCheckpoint(entityId, roomId);
233
+ const currentCheckpoint = Math.floor(currentMessageCount / interval) * interval;
234
+ const shouldRun = currentMessageCount >= interval && currentCheckpoint > lastCheckpoint;
235
+ import_core.logger.debug({
236
+ entityId,
237
+ roomId,
238
+ currentMessageCount,
239
+ interval,
240
+ lastCheckpoint,
241
+ currentCheckpoint,
242
+ shouldRun
243
+ }, "Extraction check");
244
+ return shouldRun;
245
+ }
246
+ async storeLongTermMemory(memory) {
247
+ const db = this.getDb();
248
+ const id = crypto.randomUUID();
249
+ const now = new Date;
250
+ const newMemory = {
251
+ id,
252
+ createdAt: now,
253
+ updatedAt: now,
254
+ accessCount: 0,
255
+ ...memory
256
+ };
257
+ try {
258
+ await db.insert(longTermMemories).values({
259
+ id: newMemory.id,
260
+ agentId: newMemory.agentId,
261
+ entityId: newMemory.entityId,
262
+ category: newMemory.category,
263
+ content: newMemory.content,
264
+ metadata: newMemory.metadata || {},
265
+ embedding: newMemory.embedding,
266
+ confidence: newMemory.confidence,
267
+ source: newMemory.source,
268
+ accessCount: newMemory.accessCount,
269
+ createdAt: now,
270
+ updatedAt: now,
271
+ lastAccessedAt: newMemory.lastAccessedAt
272
+ });
273
+ } catch (error) {
274
+ import_core.logger.error({ error }, "Failed to store long-term memory");
275
+ throw error;
276
+ }
277
+ import_core.logger.info(`Stored long-term memory: ${newMemory.category} for entity ${newMemory.entityId}`);
278
+ return newMemory;
279
+ }
280
+ async getLongTermMemories(entityId, category, limit = 10) {
281
+ const db = this.getDb();
282
+ const conditions = [
283
+ import_drizzle_orm4.eq(longTermMemories.agentId, this.runtime.agentId),
284
+ import_drizzle_orm4.eq(longTermMemories.entityId, entityId)
285
+ ];
286
+ if (category) {
287
+ conditions.push(import_drizzle_orm4.eq(longTermMemories.category, category));
288
+ }
289
+ const results = await db.select().from(longTermMemories).where(import_drizzle_orm4.and(...conditions)).orderBy(import_drizzle_orm4.desc(longTermMemories.confidence), import_drizzle_orm4.desc(longTermMemories.updatedAt)).limit(limit);
290
+ return results.map((row) => ({
291
+ id: row.id,
292
+ agentId: row.agentId,
293
+ entityId: row.entityId,
294
+ category: row.category,
295
+ content: row.content,
296
+ metadata: row.metadata,
297
+ embedding: row.embedding,
298
+ confidence: row.confidence,
299
+ source: row.source,
300
+ createdAt: row.createdAt,
301
+ updatedAt: row.updatedAt,
302
+ lastAccessedAt: row.lastAccessedAt,
303
+ accessCount: row.accessCount
304
+ }));
305
+ }
306
+ async updateLongTermMemory(id, updates) {
307
+ const db = this.getDb();
308
+ const updateData = {
309
+ updatedAt: new Date
310
+ };
311
+ if (updates.content !== undefined) {
312
+ updateData.content = updates.content;
313
+ }
314
+ if (updates.metadata !== undefined) {
315
+ updateData.metadata = updates.metadata;
316
+ }
317
+ if (updates.confidence !== undefined) {
318
+ updateData.confidence = updates.confidence;
319
+ }
320
+ if (updates.embedding !== undefined) {
321
+ updateData.embedding = updates.embedding;
322
+ }
323
+ if (updates.lastAccessedAt !== undefined) {
324
+ updateData.lastAccessedAt = updates.lastAccessedAt;
325
+ }
326
+ if (updates.accessCount !== undefined) {
327
+ updateData.accessCount = updates.accessCount;
328
+ }
329
+ await db.update(longTermMemories).set(updateData).where(import_drizzle_orm4.eq(longTermMemories.id, id));
330
+ import_core.logger.info(`Updated long-term memory: ${id}`);
331
+ }
332
+ async deleteLongTermMemory(id) {
333
+ const db = this.getDb();
334
+ await db.delete(longTermMemories).where(import_drizzle_orm4.eq(longTermMemories.id, id));
335
+ import_core.logger.info(`Deleted long-term memory: ${id}`);
336
+ }
337
+ async getCurrentSessionSummary(roomId) {
338
+ const db = this.getDb();
339
+ const results = await db.select().from(sessionSummaries).where(import_drizzle_orm4.and(import_drizzle_orm4.eq(sessionSummaries.agentId, this.runtime.agentId), import_drizzle_orm4.eq(sessionSummaries.roomId, roomId))).orderBy(import_drizzle_orm4.desc(sessionSummaries.updatedAt)).limit(1);
340
+ if (results.length === 0) {
341
+ return null;
342
+ }
343
+ const row = results[0];
344
+ return {
345
+ id: row.id,
346
+ agentId: row.agentId,
347
+ roomId: row.roomId,
348
+ entityId: row.entityId,
349
+ summary: row.summary,
350
+ messageCount: row.messageCount,
351
+ lastMessageOffset: row.lastMessageOffset,
352
+ startTime: row.startTime,
353
+ endTime: row.endTime,
354
+ topics: row.topics || [],
355
+ metadata: row.metadata,
356
+ embedding: row.embedding,
357
+ createdAt: row.createdAt,
358
+ updatedAt: row.updatedAt
359
+ };
360
+ }
361
+ async storeSessionSummary(summary) {
362
+ const db = this.getDb();
363
+ const id = crypto.randomUUID();
364
+ const now = new Date;
365
+ const newSummary = {
366
+ id,
367
+ createdAt: now,
368
+ updatedAt: now,
369
+ ...summary
370
+ };
371
+ await db.insert(sessionSummaries).values({
372
+ id: newSummary.id,
373
+ agentId: newSummary.agentId,
374
+ roomId: newSummary.roomId,
375
+ entityId: newSummary.entityId || null,
376
+ summary: newSummary.summary,
377
+ messageCount: newSummary.messageCount,
378
+ lastMessageOffset: newSummary.lastMessageOffset,
379
+ startTime: newSummary.startTime,
380
+ endTime: newSummary.endTime,
381
+ topics: newSummary.topics || [],
382
+ metadata: newSummary.metadata || {},
383
+ embedding: newSummary.embedding,
384
+ createdAt: now,
385
+ updatedAt: now
386
+ });
387
+ import_core.logger.info(`Stored session summary for room ${newSummary.roomId}`);
388
+ return newSummary;
389
+ }
390
+ async updateSessionSummary(id, updates) {
391
+ const db = this.getDb();
392
+ const updateData = {
393
+ updatedAt: new Date
394
+ };
395
+ if (updates.summary !== undefined) {
396
+ updateData.summary = updates.summary;
397
+ }
398
+ if (updates.messageCount !== undefined) {
399
+ updateData.messageCount = updates.messageCount;
400
+ }
401
+ if (updates.lastMessageOffset !== undefined) {
402
+ updateData.lastMessageOffset = updates.lastMessageOffset;
403
+ }
404
+ if (updates.endTime !== undefined) {
405
+ updateData.endTime = updates.endTime;
406
+ }
407
+ if (updates.topics !== undefined) {
408
+ updateData.topics = updates.topics;
409
+ }
410
+ if (updates.metadata !== undefined) {
411
+ updateData.metadata = updates.metadata;
412
+ }
413
+ if (updates.embedding !== undefined) {
414
+ updateData.embedding = updates.embedding;
415
+ }
416
+ await db.update(sessionSummaries).set(updateData).where(import_drizzle_orm4.eq(sessionSummaries.id, id));
417
+ import_core.logger.info(`Updated session summary: ${id}`);
418
+ }
419
+ async getSessionSummaries(roomId, limit = 5) {
420
+ const db = this.getDb();
421
+ const results = await db.select().from(sessionSummaries).where(import_drizzle_orm4.and(import_drizzle_orm4.eq(sessionSummaries.agentId, this.runtime.agentId), import_drizzle_orm4.eq(sessionSummaries.roomId, roomId))).orderBy(import_drizzle_orm4.desc(sessionSummaries.updatedAt)).limit(limit);
422
+ return results.map((row) => ({
423
+ id: row.id,
424
+ agentId: row.agentId,
425
+ roomId: row.roomId,
426
+ entityId: row.entityId,
427
+ summary: row.summary,
428
+ messageCount: row.messageCount,
429
+ lastMessageOffset: row.lastMessageOffset,
430
+ startTime: row.startTime,
431
+ endTime: row.endTime,
432
+ topics: row.topics || [],
433
+ metadata: row.metadata,
434
+ embedding: row.embedding,
435
+ createdAt: row.createdAt,
436
+ updatedAt: row.updatedAt
437
+ }));
438
+ }
439
+ async searchLongTermMemories(entityId, queryEmbedding, limit = 5, matchThreshold = 0.7) {
440
+ if (!this.memoryConfig.longTermVectorSearchEnabled) {
441
+ import_core.logger.warn("Vector search is not enabled, falling back to recent memories");
442
+ return this.getLongTermMemories(entityId, undefined, limit);
443
+ }
444
+ const db = this.getDb();
445
+ try {
446
+ const cleanVector = queryEmbedding.map((n) => Number.isFinite(n) ? Number(n.toFixed(6)) : 0);
447
+ const similarity = import_drizzle_orm4.sql`1 - (${import_drizzle_orm4.cosineDistance(longTermMemories.embedding, cleanVector)})`;
448
+ const conditions = [
449
+ import_drizzle_orm4.eq(longTermMemories.agentId, this.runtime.agentId),
450
+ import_drizzle_orm4.eq(longTermMemories.entityId, entityId),
451
+ import_drizzle_orm4.sql`${longTermMemories.embedding} IS NOT NULL`
452
+ ];
453
+ if (matchThreshold > 0) {
454
+ conditions.push(import_drizzle_orm4.gte(similarity, matchThreshold));
455
+ }
456
+ const results = await db.select({
457
+ memory: longTermMemories,
458
+ similarity
459
+ }).from(longTermMemories).where(import_drizzle_orm4.and(...conditions)).orderBy(import_drizzle_orm4.desc(similarity)).limit(limit);
460
+ return results.map((row) => ({
461
+ id: row.memory.id,
462
+ agentId: row.memory.agentId,
463
+ entityId: row.memory.entityId,
464
+ category: row.memory.category,
465
+ content: row.memory.content,
466
+ metadata: row.memory.metadata,
467
+ embedding: row.memory.embedding,
468
+ confidence: row.memory.confidence,
469
+ source: row.memory.source,
470
+ createdAt: row.memory.createdAt,
471
+ updatedAt: row.memory.updatedAt,
472
+ lastAccessedAt: row.memory.lastAccessedAt,
473
+ accessCount: row.memory.accessCount,
474
+ similarity: row.similarity
475
+ }));
476
+ } catch (error) {
477
+ import_core.logger.warn({ error }, "Vector search failed, falling back to recent memories");
478
+ return this.getLongTermMemories(entityId, undefined, limit);
479
+ }
480
+ }
481
+ async getFormattedLongTermMemories(entityId) {
482
+ const memories = await this.getLongTermMemories(entityId, undefined, 20);
483
+ if (memories.length === 0) {
484
+ return "";
485
+ }
486
+ const grouped = new Map;
487
+ for (const memory of memories) {
488
+ if (!grouped.has(memory.category)) {
489
+ grouped.set(memory.category, []);
490
+ }
491
+ grouped.get(memory.category)?.push(memory);
492
+ }
493
+ const sections = [];
494
+ for (const [category, categoryMemories] of grouped.entries()) {
495
+ const categoryName = category.split("_").map((word) => word.charAt(0).toUpperCase() + word.slice(1)).join(" ");
496
+ const items = categoryMemories.map((m) => `- ${m.content}`).join(`
497
+ `);
498
+ sections.push(`**${categoryName}**:
499
+ ${items}`);
500
+ }
501
+ return sections.join(`
502
+
503
+ `);
504
+ }
505
+ }
506
+
507
+ // src/evaluators/summarization.ts
508
+ var import_core2 = require("@elizaos/core");
509
+ var initialSummarizationTemplate = `# Task: Summarize Conversation
510
+
511
+ You are analyzing a conversation to create a concise summary that captures the key points, topics, and important details.
512
+
513
+ # Recent Messages
514
+ {{recentMessages}}
515
+
516
+ # Instructions
517
+ Generate a summary that:
518
+ 1. Captures the main topics discussed
519
+ 2. Highlights key information shared
520
+ 3. Notes any decisions made or questions asked
521
+ 4. Maintains context for future reference
522
+ 5. Is concise but comprehensive
523
+
524
+ **IMPORTANT**: Keep the summary under 2500 tokens. Be comprehensive but concise.
525
+
526
+ Also extract:
527
+ - **Topics**: List of main topics discussed (comma-separated)
528
+ - **Key Points**: Important facts or decisions (bullet points)
529
+
530
+ Respond in this XML format:
531
+ <summary>
532
+ <text>Your comprehensive summary here</text>
533
+ <topics>topic1, topic2, topic3</topics>
534
+ <keyPoints>
535
+ <point>First key point</point>
536
+ <point>Second key point</point>
537
+ </keyPoints>
538
+ </summary>`;
539
+ var updateSummarizationTemplate = `# Task: Update and Condense Conversation Summary
540
+
541
+ You are updating an existing conversation summary with new messages, while keeping the total summary concise.
542
+
543
+ # Existing Summary
544
+ {{existingSummary}}
545
+
546
+ # Existing Topics
547
+ {{existingTopics}}
548
+
549
+ # New Messages Since Last Summary
550
+ {{newMessages}}
551
+
552
+ # Instructions
553
+ Update the summary by:
554
+ 1. Merging the existing summary with insights from the new messages
555
+ 2. Removing redundant or less important details to stay under the token limit
556
+ 3. Keeping the most important context and decisions
557
+ 4. Adding new topics if they emerge
558
+ 5. **CRITICAL**: Keep the ENTIRE updated summary under 2500 tokens
559
+
560
+ The goal is a rolling summary that captures the essence of the conversation without growing indefinitely.
561
+
562
+ Respond in this XML format:
563
+ <summary>
564
+ <text>Your updated and condensed summary here</text>
565
+ <topics>topic1, topic2, topic3</topics>
566
+ <keyPoints>
567
+ <point>First key point</point>
568
+ <point>Second key point</point>
569
+ </keyPoints>
570
+ </summary>`;
571
+ function parseSummaryXML(xml) {
572
+ const summaryMatch = xml.match(/<text>([\s\S]*?)<\/text>/);
573
+ const topicsMatch = xml.match(/<topics>([\s\S]*?)<\/topics>/);
574
+ const keyPointsMatches = xml.matchAll(/<point>([\s\S]*?)<\/point>/g);
575
+ const summary = summaryMatch ? summaryMatch[1].trim() : "Summary not available";
576
+ const topics = topicsMatch ? topicsMatch[1].split(",").map((t) => t.trim()).filter(Boolean) : [];
577
+ const keyPoints = Array.from(keyPointsMatches).map((match) => match[1].trim());
578
+ return { summary, topics, keyPoints };
579
+ }
580
+ var summarizationEvaluator = {
581
+ name: "MEMORY_SUMMARIZATION",
582
+ description: "Summarizes conversations to optimize short-term memory",
583
+ similes: ["CONVERSATION_SUMMARY", "CONTEXT_COMPRESSION", "MEMORY_OPTIMIZATION"],
584
+ alwaysRun: true,
585
+ validate: async (runtime, message) => {
586
+ import_core2.logger.debug(`Validating summarization for message: ${message.content?.text}`);
587
+ if (!message.content?.text) {
588
+ return false;
589
+ }
590
+ const memoryService = runtime.getService("memory");
591
+ if (!memoryService) {
592
+ return false;
593
+ }
594
+ const config = memoryService.getConfig();
595
+ const currentMessageCount = await runtime.countMemories(message.roomId, false, "messages");
596
+ const shouldSummarize = currentMessageCount >= config.shortTermSummarizationThreshold;
597
+ import_core2.logger.debug({
598
+ roomId: message.roomId,
599
+ currentMessageCount,
600
+ threshold: config.shortTermSummarizationThreshold,
601
+ shouldSummarize
602
+ }, "Summarization check");
603
+ return shouldSummarize;
604
+ },
605
+ handler: async (runtime, message) => {
606
+ const memoryService = runtime.getService("memory");
607
+ if (!memoryService) {
608
+ import_core2.logger.error("MemoryService not found");
609
+ return;
610
+ }
611
+ const config = memoryService.getConfig();
612
+ const { roomId } = message;
613
+ try {
614
+ import_core2.logger.info(`Starting summarization for room ${roomId}`);
615
+ const existingSummary = await memoryService.getCurrentSessionSummary(roomId);
616
+ const lastOffset = existingSummary?.lastMessageOffset || 0;
617
+ const totalMessageCount = await runtime.countMemories(roomId, false, "messages");
618
+ const newMessages = await runtime.getMemories({
619
+ tableName: "messages",
620
+ roomId,
621
+ count: config.shortTermSummarizationThreshold,
622
+ unique: false,
623
+ start: lastOffset
624
+ });
625
+ if (newMessages.length === 0) {
626
+ import_core2.logger.debug("No new messages to summarize");
627
+ return;
628
+ }
629
+ const sortedMessages = newMessages.sort((a, b) => (a.createdAt || 0) - (b.createdAt || 0));
630
+ const formattedMessages = sortedMessages.map((msg) => {
631
+ const sender = msg.entityId === runtime.agentId ? runtime.character.name : "User";
632
+ return `${sender}: ${msg.content.text || "[non-text message]"}`;
633
+ }).join(`
634
+ `);
635
+ const state = await runtime.composeState(message);
636
+ let prompt;
637
+ let template;
638
+ if (existingSummary) {
639
+ template = updateSummarizationTemplate;
640
+ prompt = import_core2.composePromptFromState({
641
+ state: {
642
+ ...state,
643
+ existingSummary: existingSummary.summary,
644
+ existingTopics: existingSummary.topics?.join(", ") || "None",
645
+ newMessages: formattedMessages
646
+ },
647
+ template
648
+ });
649
+ } else {
650
+ template = initialSummarizationTemplate;
651
+ prompt = import_core2.composePromptFromState({
652
+ state: {
653
+ ...state,
654
+ recentMessages: formattedMessages
655
+ },
656
+ template
657
+ });
658
+ }
659
+ const response = await runtime.useModel(import_core2.ModelType.TEXT_LARGE, {
660
+ prompt,
661
+ maxTokens: config.summaryMaxTokens || 2500
662
+ });
663
+ const summaryResult = parseSummaryXML(response);
664
+ import_core2.logger.info(`${existingSummary ? "Updated" : "Generated"} summary: ${summaryResult.summary.substring(0, 100)}...`);
665
+ const newOffset = totalMessageCount;
666
+ const firstMessage = sortedMessages[0];
667
+ const lastMessage = sortedMessages[sortedMessages.length - 1];
668
+ const startTime = existingSummary ? existingSummary.startTime : firstMessage?.createdAt && firstMessage.createdAt > 0 ? new Date(firstMessage.createdAt) : new Date;
669
+ const endTime = lastMessage?.createdAt && lastMessage.createdAt > 0 ? new Date(lastMessage.createdAt) : new Date;
670
+ if (existingSummary) {
671
+ await memoryService.updateSessionSummary(existingSummary.id, {
672
+ summary: summaryResult.summary,
673
+ messageCount: existingSummary.messageCount + sortedMessages.length,
674
+ lastMessageOffset: newOffset,
675
+ endTime,
676
+ topics: summaryResult.topics,
677
+ metadata: {
678
+ keyPoints: summaryResult.keyPoints
679
+ }
680
+ });
681
+ import_core2.logger.info(`Updated summary for room ${roomId}: ${sortedMessages.length} new messages processed (offset: ${lastOffset} → ${newOffset})`);
682
+ } else {
683
+ await memoryService.storeSessionSummary({
684
+ agentId: runtime.agentId,
685
+ roomId,
686
+ entityId: message.entityId !== runtime.agentId ? message.entityId : undefined,
687
+ summary: summaryResult.summary,
688
+ messageCount: sortedMessages.length,
689
+ lastMessageOffset: newOffset,
690
+ startTime,
691
+ endTime,
692
+ topics: summaryResult.topics,
693
+ metadata: {
694
+ keyPoints: summaryResult.keyPoints
695
+ }
696
+ });
697
+ import_core2.logger.info(`Created new summary for room ${roomId}: ${sortedMessages.length} messages summarized (offset: 0 → ${newOffset})`);
698
+ }
699
+ } catch (error) {
700
+ import_core2.logger.error({ error }, "Error during summarization:");
701
+ }
702
+ },
703
+ examples: []
704
+ };
705
+
706
+ // src/evaluators/long-term-extraction.ts
707
+ var import_core3 = require("@elizaos/core");
708
+
709
+ // src/types/index.ts
710
+ var LongTermMemoryCategory;
711
+ ((LongTermMemoryCategory2) => {
712
+ LongTermMemoryCategory2["IDENTITY"] = "identity";
713
+ LongTermMemoryCategory2["EXPERTISE"] = "expertise";
714
+ LongTermMemoryCategory2["PROJECTS"] = "projects";
715
+ LongTermMemoryCategory2["PREFERENCES"] = "preferences";
716
+ LongTermMemoryCategory2["DATA_SOURCES"] = "data_sources";
717
+ LongTermMemoryCategory2["GOALS"] = "goals";
718
+ LongTermMemoryCategory2["CONSTRAINTS"] = "constraints";
719
+ LongTermMemoryCategory2["DEFINITIONS"] = "definitions";
720
+ LongTermMemoryCategory2["BEHAVIORAL_PATTERNS"] = "behavioral_patterns";
721
+ })(LongTermMemoryCategory ||= {});
722
+
723
+ // src/evaluators/long-term-extraction.ts
724
+ var extractionTemplate = `# Task: Extract Long-Term Memory
725
+
726
+ You are analyzing a conversation to extract facts that should be remembered long-term about the user.
727
+
728
+ # Recent Messages
729
+ {{recentMessages}}
730
+
731
+ # Current Long-Term Memories
732
+ {{existingMemories}}
733
+
734
+ # Memory Categories
735
+ 1. **identity**: User's name, role, identity (e.g., "I'm a data scientist")
736
+ 2. **expertise**: User's skills, knowledge domains, or unfamiliarity with topics
737
+ 3. **projects**: Ongoing projects, past interactions, recurring topics
738
+ 4. **preferences**: Communication style, format preferences, verbosity, etc.
739
+ 5. **data_sources**: Frequently used files, databases, APIs
740
+ 6. **goals**: Broader intentions (e.g., "preparing for interview")
741
+ 7. **constraints**: User-defined rules or limitations
742
+ 8. **definitions**: Custom terms, acronyms, glossaries
743
+ 9. **behavioral_patterns**: How the user tends to interact
744
+
745
+ # Instructions
746
+ Extract any NEW information that should be remembered long-term. For each item:
747
+ - Determine which category it belongs to
748
+ - Write a clear, factual statement
749
+ - Assess confidence (0.0 to 1.0)
750
+ - Only include information explicitly stated or strongly implied
751
+
752
+ If there are no new long-term facts to extract, respond with <memories></memories>
753
+
754
+ Respond in this XML format:
755
+ <memories>
756
+ <memory>
757
+ <category>identity</category>
758
+ <content>User is a software engineer specializing in backend development</content>
759
+ <confidence>0.95</confidence>
760
+ </memory>
761
+ <memory>
762
+ <category>preferences</category>
763
+ <content>Prefers code examples over lengthy explanations</content>
764
+ <confidence>0.85</confidence>
765
+ </memory>
766
+ </memories>`;
767
+ function parseMemoryExtractionXML(xml) {
768
+ const memoryMatches = xml.matchAll(/<memory>[\s\S]*?<category>(.*?)<\/category>[\s\S]*?<content>(.*?)<\/content>[\s\S]*?<confidence>(.*?)<\/confidence>[\s\S]*?<\/memory>/g);
769
+ const extractions = [];
770
+ for (const match of memoryMatches) {
771
+ const category = match[1].trim();
772
+ const content = match[2].trim();
773
+ const confidence = parseFloat(match[3].trim());
774
+ if (!Object.values(LongTermMemoryCategory).includes(category)) {
775
+ import_core3.logger.warn(`Invalid memory category: ${category}`);
776
+ continue;
777
+ }
778
+ if (content && !isNaN(confidence)) {
779
+ extractions.push({ category, content, confidence });
780
+ }
781
+ }
782
+ return extractions;
783
+ }
784
+ var longTermExtractionEvaluator = {
785
+ name: "LONG_TERM_MEMORY_EXTRACTION",
786
+ description: "Extracts long-term facts about users from conversations",
787
+ similes: ["MEMORY_EXTRACTION", "FACT_LEARNING", "USER_PROFILING"],
788
+ alwaysRun: true,
789
+ validate: async (runtime, message) => {
790
+ import_core3.logger.debug(`Validating long-term memory extraction for message: ${message.content?.text}`);
791
+ if (message.entityId === runtime.agentId) {
792
+ import_core3.logger.debug("Skipping long-term memory extraction for agent's own message");
793
+ return false;
794
+ }
795
+ if (!message.content?.text) {
796
+ import_core3.logger.debug("Skipping long-term memory extraction for message without text");
797
+ return false;
798
+ }
799
+ const memoryService = runtime.getService("memory");
800
+ if (!memoryService) {
801
+ import_core3.logger.debug("MemoryService not found");
802
+ return false;
803
+ }
804
+ const config = memoryService.getConfig();
805
+ if (!config.longTermExtractionEnabled) {
806
+ import_core3.logger.debug("Long-term memory extraction is disabled");
807
+ return false;
808
+ }
809
+ const currentMessageCount = await runtime.countMemories(message.roomId, false, "messages");
810
+ const shouldRun = await memoryService.shouldRunExtraction(message.entityId, message.roomId, currentMessageCount);
811
+ import_core3.logger.debug(`Should run extraction: ${shouldRun}`);
812
+ return shouldRun;
813
+ },
814
+ handler: async (runtime, message) => {
815
+ const memoryService = runtime.getService("memory");
816
+ if (!memoryService) {
817
+ import_core3.logger.error("MemoryService not found");
818
+ return;
819
+ }
820
+ const config = memoryService.getConfig();
821
+ const { entityId, roomId } = message;
822
+ try {
823
+ import_core3.logger.info(`Extracting long-term memories for entity ${entityId}`);
824
+ const recentMessages = await runtime.getMemories({
825
+ tableName: "messages",
826
+ roomId,
827
+ count: 20,
828
+ unique: false
829
+ });
830
+ const formattedMessages = recentMessages.sort((a, b) => (a.createdAt || 0) - (b.createdAt || 0)).map((msg) => {
831
+ const sender = msg.entityId === runtime.agentId ? runtime.character.name : "User";
832
+ return `${sender}: ${msg.content.text || "[non-text message]"}`;
833
+ }).join(`
834
+ `);
835
+ const existingMemories = await memoryService.getLongTermMemories(entityId, undefined, 30);
836
+ const formattedExisting = existingMemories.length > 0 ? existingMemories.map((m) => `[${m.category}] ${m.content} (confidence: ${m.confidence})`).join(`
837
+ `) : "None yet";
838
+ const state = await runtime.composeState(message);
839
+ const prompt = import_core3.composePromptFromState({
840
+ state: {
841
+ ...state,
842
+ recentMessages: formattedMessages,
843
+ existingMemories: formattedExisting
844
+ },
845
+ template: extractionTemplate
846
+ });
847
+ const response = await runtime.useModel(import_core3.ModelType.TEXT_LARGE, {
848
+ prompt
849
+ });
850
+ const extractions = parseMemoryExtractionXML(response);
851
+ import_core3.logger.info(`Extracted ${extractions.length} long-term memories`);
852
+ for (const extraction of extractions) {
853
+ if (extraction.confidence >= config.longTermConfidenceThreshold) {
854
+ await memoryService.storeLongTermMemory({
855
+ agentId: runtime.agentId,
856
+ entityId,
857
+ category: extraction.category,
858
+ content: extraction.content,
859
+ confidence: extraction.confidence,
860
+ source: "conversation",
861
+ metadata: {
862
+ roomId,
863
+ extractedAt: new Date().toISOString()
864
+ }
865
+ });
866
+ import_core3.logger.info(`Stored long-term memory: [${extraction.category}] ${extraction.content.substring(0, 50)}...`);
867
+ } else {
868
+ import_core3.logger.debug(`Skipped low-confidence memory: ${extraction.content} (confidence: ${extraction.confidence})`);
869
+ }
870
+ }
871
+ const currentMessageCount = await runtime.countMemories(roomId, false, "messages");
872
+ await memoryService.setLastExtractionCheckpoint(entityId, roomId, currentMessageCount);
873
+ import_core3.logger.debug(`Updated extraction checkpoint to ${currentMessageCount} for entity ${entityId} in room ${roomId}`);
874
+ } catch (error) {
875
+ import_core3.logger.error({ error }, "Error during long-term memory extraction:");
876
+ }
877
+ },
878
+ examples: []
879
+ };
880
+
881
+ // src/providers/short-term-memory.ts
882
+ var import_core4 = require("@elizaos/core");
883
+ var shortTermMemoryProvider = {
884
+ name: "SHORT_TERM_MEMORY",
885
+ description: "Recent conversation summaries to maintain context efficiently",
886
+ position: 95,
887
+ get: async (runtime, message, _state) => {
888
+ try {
889
+ const memoryService = runtime.getService("memory");
890
+ if (!memoryService) {
891
+ return {
892
+ data: { summaries: [] },
893
+ values: { sessionSummaries: "" },
894
+ text: ""
895
+ };
896
+ }
897
+ const { roomId } = message;
898
+ const summaries = await memoryService.getSessionSummaries(roomId, 3);
899
+ if (summaries.length === 0) {
900
+ return {
901
+ data: { summaries: [] },
902
+ values: { sessionSummaries: "" },
903
+ text: ""
904
+ };
905
+ }
906
+ const formattedSummaries = summaries.reverse().map((summary, index4) => {
907
+ const messageRange = `${summary.messageCount} messages`;
908
+ const timeRange = new Date(summary.startTime).toLocaleDateString();
909
+ let text5 = `**Session ${index4 + 1}** (${messageRange}, ${timeRange})
910
+ `;
911
+ text5 += summary.summary;
912
+ if (summary.topics && summary.topics.length > 0) {
913
+ text5 += `
914
+ *Topics: ${summary.topics.join(", ")}*`;
915
+ }
916
+ return text5;
917
+ }).join(`
918
+
919
+ `);
920
+ const text4 = import_core4.addHeader("# Previous Conversation Context", formattedSummaries);
921
+ return {
922
+ data: { summaries },
923
+ values: { sessionSummaries: text4 },
924
+ text: text4
925
+ };
926
+ } catch (error) {
927
+ import_core4.logger.error({ error }, "Error in shortTermMemoryProvider:");
928
+ return {
929
+ data: { summaries: [] },
930
+ values: { sessionSummaries: "" },
931
+ text: ""
932
+ };
933
+ }
934
+ }
935
+ };
936
+
937
+ // src/providers/long-term-memory.ts
938
+ var import_core5 = require("@elizaos/core");
939
+ var longTermMemoryProvider = {
940
+ name: "LONG_TERM_MEMORY",
941
+ description: "Persistent facts and preferences about the user",
942
+ position: 50,
943
+ get: async (runtime, message, _state) => {
944
+ try {
945
+ const memoryService = runtime.getService("memory");
946
+ if (!memoryService) {
947
+ return {
948
+ data: { memories: [] },
949
+ values: { longTermMemories: "" },
950
+ text: ""
951
+ };
952
+ }
953
+ const { entityId } = message;
954
+ if (entityId === runtime.agentId) {
955
+ return {
956
+ data: { memories: [] },
957
+ values: { longTermMemories: "" },
958
+ text: ""
959
+ };
960
+ }
961
+ const memories = await memoryService.getLongTermMemories(entityId, undefined, 25);
962
+ if (memories.length === 0) {
963
+ return {
964
+ data: { memories: [] },
965
+ values: { longTermMemories: "" },
966
+ text: ""
967
+ };
968
+ }
969
+ const formattedMemories = await memoryService.getFormattedLongTermMemories(entityId);
970
+ const text4 = import_core5.addHeader("# What I Know About You", formattedMemories);
971
+ const categoryCounts = new Map;
972
+ for (const memory of memories) {
973
+ const count = categoryCounts.get(memory.category) || 0;
974
+ categoryCounts.set(memory.category, count + 1);
975
+ }
976
+ const categoryList = Array.from(categoryCounts.entries()).map(([cat, count]) => `${cat}: ${count}`).join(", ");
977
+ return {
978
+ data: {
979
+ memories,
980
+ categoryCounts: Object.fromEntries(categoryCounts)
981
+ },
982
+ values: {
983
+ longTermMemories: text4,
984
+ memoryCategories: categoryList
985
+ },
986
+ text: text4
987
+ };
988
+ } catch (error) {
989
+ import_core5.logger.error({ error }, "Error in longTermMemoryProvider:");
990
+ return {
991
+ data: { memories: [] },
992
+ values: { longTermMemories: "" },
993
+ text: ""
994
+ };
995
+ }
996
+ }
997
+ };
998
+
999
+ // src/index.ts
1000
+ var memoryPlugin = {
1001
+ name: "memory",
1002
+ description: "Advanced memory management with conversation summarization and long-term persistent memory",
1003
+ services: [MemoryService],
1004
+ evaluators: [summarizationEvaluator, longTermExtractionEvaluator],
1005
+ providers: [longTermMemoryProvider, shortTermMemoryProvider],
1006
+ schema: exports_schemas
1007
+ };
1008
+ var src_default = memoryPlugin;
1009
+
1010
+ //# debugId=AF5ACD53B0D0FBAC64756E2164756E21