@reactive-agents/memory 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js ADDED
@@ -0,0 +1,1424 @@
1
+ // src/types.ts
2
+ import { Schema } from "effect";
3
+ var MemoryId = Schema.String.pipe(Schema.brand("MemoryId"));
4
+ var MemoryType = Schema.Literal(
5
+ "semantic",
6
+ "episodic",
7
+ "procedural",
8
+ "working"
9
+ );
10
+ var MemorySourceSchema = Schema.Struct({
11
+ type: Schema.Literal("agent", "user", "tool", "system", "llm-extraction"),
12
+ id: Schema.String,
13
+ taskId: Schema.optional(Schema.String)
14
+ });
15
+ var MemoryEntrySchema = Schema.Struct({
16
+ id: MemoryId,
17
+ agentId: Schema.String,
18
+ type: MemoryType,
19
+ content: Schema.String,
20
+ importance: Schema.Number.pipe(Schema.between(0, 1)),
21
+ createdAt: Schema.DateFromSelf,
22
+ updatedAt: Schema.DateFromSelf,
23
+ source: MemorySourceSchema,
24
+ tags: Schema.Array(Schema.String),
25
+ metadata: Schema.optional(
26
+ Schema.Record({ key: Schema.String, value: Schema.Unknown })
27
+ )
28
+ });
29
+ var SemanticEntrySchema = Schema.Struct({
30
+ id: MemoryId,
31
+ agentId: Schema.String,
32
+ content: Schema.String,
33
+ summary: Schema.String,
34
+ importance: Schema.Number.pipe(Schema.between(0, 1)),
35
+ verified: Schema.Boolean,
36
+ tags: Schema.Array(Schema.String),
37
+ embedding: Schema.optional(Schema.Array(Schema.Number)),
38
+ createdAt: Schema.DateFromSelf,
39
+ updatedAt: Schema.DateFromSelf,
40
+ accessCount: Schema.Number,
41
+ lastAccessedAt: Schema.DateFromSelf
42
+ });
43
+ var DailyLogEntrySchema = Schema.Struct({
44
+ id: MemoryId,
45
+ agentId: Schema.String,
46
+ date: Schema.String,
47
+ content: Schema.String,
48
+ taskId: Schema.optional(Schema.String),
49
+ eventType: Schema.Literal(
50
+ "task-started",
51
+ "task-completed",
52
+ "task-failed",
53
+ "decision-made",
54
+ "error-encountered",
55
+ "user-feedback",
56
+ "tool-call",
57
+ "observation"
58
+ ),
59
+ cost: Schema.optional(Schema.Number),
60
+ duration: Schema.optional(Schema.Number),
61
+ metadata: Schema.optional(
62
+ Schema.Record({ key: Schema.String, value: Schema.Unknown })
63
+ ),
64
+ createdAt: Schema.DateFromSelf
65
+ });
66
+ var SessionSnapshotSchema = Schema.Struct({
67
+ id: Schema.String,
68
+ agentId: Schema.String,
69
+ messages: Schema.Array(Schema.Unknown),
70
+ summary: Schema.String,
71
+ keyDecisions: Schema.Array(Schema.String),
72
+ taskIds: Schema.Array(Schema.String),
73
+ startedAt: Schema.DateFromSelf,
74
+ endedAt: Schema.DateFromSelf,
75
+ totalCost: Schema.Number,
76
+ totalTokens: Schema.Number
77
+ });
78
+ var ProceduralEntrySchema = Schema.Struct({
79
+ id: MemoryId,
80
+ agentId: Schema.String,
81
+ name: Schema.String,
82
+ description: Schema.String,
83
+ pattern: Schema.String,
84
+ successRate: Schema.Number.pipe(Schema.between(0, 1)),
85
+ useCount: Schema.Number,
86
+ tags: Schema.Array(Schema.String),
87
+ createdAt: Schema.DateFromSelf,
88
+ updatedAt: Schema.DateFromSelf
89
+ });
90
+ var WorkingMemoryItemSchema = Schema.Struct({
91
+ id: MemoryId,
92
+ content: Schema.String,
93
+ importance: Schema.Number.pipe(Schema.between(0, 1)),
94
+ addedAt: Schema.DateFromSelf,
95
+ source: MemorySourceSchema
96
+ });
97
+ var LinkType = Schema.Literal(
98
+ "similar",
99
+ "sequential",
100
+ "causal",
101
+ "contradicts",
102
+ "supports",
103
+ "elaborates"
104
+ );
105
+ var ZettelLinkSchema = Schema.Struct({
106
+ source: MemoryId,
107
+ target: MemoryId,
108
+ strength: Schema.Number.pipe(Schema.between(0, 1)),
109
+ type: LinkType,
110
+ createdAt: Schema.DateFromSelf
111
+ });
112
+ var CompactionStrategySchema = Schema.Literal(
113
+ "count",
114
+ "time",
115
+ "semantic",
116
+ "progressive"
117
+ );
118
+ var CompactionConfigSchema = Schema.Struct({
119
+ strategy: CompactionStrategySchema,
120
+ maxEntries: Schema.optional(Schema.Number),
121
+ intervalMs: Schema.optional(Schema.Number),
122
+ similarityThreshold: Schema.optional(Schema.Number),
123
+ decayFactor: Schema.optional(Schema.Number)
124
+ });
125
+ var SearchOptionsSchema = Schema.Struct({
126
+ query: Schema.String,
127
+ types: Schema.optional(Schema.Array(MemoryType)),
128
+ limit: Schema.optional(Schema.Number),
129
+ threshold: Schema.optional(Schema.Number),
130
+ useVector: Schema.optional(Schema.Boolean),
131
+ agentId: Schema.String
132
+ });
133
+ var MemoryBootstrapResultSchema = Schema.Struct({
134
+ agentId: Schema.String,
135
+ semanticContext: Schema.String,
136
+ recentEpisodes: Schema.Array(DailyLogEntrySchema),
137
+ activeWorkflows: Schema.Array(ProceduralEntrySchema),
138
+ workingMemory: Schema.Array(WorkingMemoryItemSchema),
139
+ bootstrappedAt: Schema.DateFromSelf,
140
+ tier: Schema.Literal("1", "2")
141
+ });
142
+ var EvictionPolicy = Schema.Literal("fifo", "lru", "importance");
143
+ var MemoryConfigSchema = Schema.Struct({
144
+ tier: Schema.Literal("1", "2"),
145
+ agentId: Schema.String,
146
+ dbPath: Schema.String,
147
+ working: Schema.Struct({
148
+ capacity: Schema.Number,
149
+ evictionPolicy: EvictionPolicy
150
+ }),
151
+ semantic: Schema.Struct({
152
+ maxMarkdownLines: Schema.Number,
153
+ importanceThreshold: Schema.Number
154
+ }),
155
+ episodic: Schema.Struct({
156
+ retainDays: Schema.Number,
157
+ maxSnapshotsPerSession: Schema.Number
158
+ }),
159
+ compaction: CompactionConfigSchema,
160
+ zettelkasten: Schema.Struct({
161
+ enabled: Schema.Boolean,
162
+ linkingThreshold: Schema.Number.pipe(Schema.between(0, 1)),
163
+ maxLinksPerEntry: Schema.Number
164
+ })
165
+ });
166
+ var defaultMemoryConfig = (agentId) => ({
167
+ tier: "1",
168
+ agentId,
169
+ dbPath: `.reactive-agents/memory/${agentId}/memory.db`,
170
+ working: { capacity: 7, evictionPolicy: "fifo" },
171
+ semantic: { maxMarkdownLines: 200, importanceThreshold: 0.7 },
172
+ episodic: { retainDays: 30, maxSnapshotsPerSession: 3 },
173
+ compaction: {
174
+ strategy: "progressive",
175
+ maxEntries: 1e3,
176
+ intervalMs: 864e5,
177
+ similarityThreshold: 0.92,
178
+ decayFactor: 0.05
179
+ },
180
+ zettelkasten: {
181
+ enabled: true,
182
+ linkingThreshold: 0.85,
183
+ maxLinksPerEntry: 10
184
+ }
185
+ });
186
+
187
+ // src/errors.ts
188
+ import { Data } from "effect";
189
+ var MemoryError = class extends Data.TaggedError("MemoryError") {
190
+ };
191
+ var MemoryNotFoundError = class extends Data.TaggedError(
192
+ "MemoryNotFoundError"
193
+ ) {
194
+ };
195
+ var DatabaseError = class extends Data.TaggedError("DatabaseError") {
196
+ };
197
+ var CapacityExceededError = class extends Data.TaggedError(
198
+ "CapacityExceededError"
199
+ ) {
200
+ };
201
+ var ContextError = class extends Data.TaggedError("ContextError") {
202
+ };
203
+ var CompactionError = class extends Data.TaggedError("CompactionError") {
204
+ };
205
+ var SearchError = class extends Data.TaggedError("SearchError") {
206
+ };
207
+ var ExtractionError = class extends Data.TaggedError("ExtractionError") {
208
+ };
209
+
210
+ // src/database.ts
211
+ import { Effect, Context, Layer } from "effect";
212
+ import { Database } from "bun:sqlite";
213
+ import * as fs from "fs";
214
+ import * as path from "path";
215
+ var MemoryDatabase = class extends Context.Tag("MemoryDatabase")() {
216
+ };
217
+ var SCHEMA_SQL = `
218
+ PRAGMA journal_mode = WAL;
219
+ PRAGMA synchronous = NORMAL;
220
+ PRAGMA foreign_keys = ON;
221
+
222
+ CREATE TABLE IF NOT EXISTS semantic_memory (
223
+ id TEXT PRIMARY KEY,
224
+ agent_id TEXT NOT NULL,
225
+ content TEXT NOT NULL,
226
+ summary TEXT NOT NULL,
227
+ importance REAL NOT NULL DEFAULT 0.5,
228
+ verified INTEGER NOT NULL DEFAULT 0,
229
+ tags TEXT NOT NULL DEFAULT '[]',
230
+ embedding BLOB,
231
+ created_at TEXT NOT NULL,
232
+ updated_at TEXT NOT NULL,
233
+ access_count INTEGER NOT NULL DEFAULT 0,
234
+ last_accessed_at TEXT NOT NULL
235
+ );
236
+
237
+ CREATE TABLE IF NOT EXISTS episodic_log (
238
+ id TEXT PRIMARY KEY,
239
+ agent_id TEXT NOT NULL,
240
+ date TEXT NOT NULL,
241
+ content TEXT NOT NULL,
242
+ task_id TEXT,
243
+ event_type TEXT NOT NULL,
244
+ cost REAL,
245
+ duration REAL,
246
+ metadata TEXT DEFAULT '{}',
247
+ created_at TEXT NOT NULL
248
+ );
249
+
250
+ CREATE TABLE IF NOT EXISTS session_snapshots (
251
+ id TEXT PRIMARY KEY,
252
+ agent_id TEXT NOT NULL,
253
+ messages TEXT NOT NULL,
254
+ summary TEXT NOT NULL,
255
+ key_decisions TEXT NOT NULL DEFAULT '[]',
256
+ task_ids TEXT NOT NULL DEFAULT '[]',
257
+ started_at TEXT NOT NULL,
258
+ ended_at TEXT NOT NULL,
259
+ total_cost REAL NOT NULL DEFAULT 0,
260
+ total_tokens INTEGER NOT NULL DEFAULT 0
261
+ );
262
+
263
+ CREATE TABLE IF NOT EXISTS procedural_memory (
264
+ id TEXT PRIMARY KEY,
265
+ agent_id TEXT NOT NULL,
266
+ name TEXT NOT NULL,
267
+ description TEXT NOT NULL,
268
+ pattern TEXT NOT NULL,
269
+ success_rate REAL NOT NULL DEFAULT 0,
270
+ use_count INTEGER NOT NULL DEFAULT 0,
271
+ tags TEXT NOT NULL DEFAULT '[]',
272
+ created_at TEXT NOT NULL,
273
+ updated_at TEXT NOT NULL
274
+ );
275
+
276
+ CREATE TABLE IF NOT EXISTS zettel_links (
277
+ source_id TEXT NOT NULL,
278
+ target_id TEXT NOT NULL,
279
+ strength REAL NOT NULL DEFAULT 0,
280
+ type TEXT NOT NULL,
281
+ created_at TEXT NOT NULL,
282
+ PRIMARY KEY (source_id, target_id)
283
+ );
284
+
285
+ -- FTS5 virtual table for full-text search (Tier 1 semantic search)
286
+ CREATE VIRTUAL TABLE IF NOT EXISTS semantic_fts USING fts5(
287
+ id UNINDEXED,
288
+ content,
289
+ tags,
290
+ content='semantic_memory',
291
+ content_rowid='rowid'
292
+ );
293
+
294
+ -- FTS5 for episodic log
295
+ CREATE VIRTUAL TABLE IF NOT EXISTS episodic_fts USING fts5(
296
+ id UNINDEXED,
297
+ content,
298
+ content='episodic_log',
299
+ content_rowid='rowid'
300
+ );
301
+
302
+ -- Triggers to keep FTS5 in sync
303
+ CREATE TRIGGER IF NOT EXISTS semantic_fts_insert
304
+ AFTER INSERT ON semantic_memory BEGIN
305
+ INSERT INTO semantic_fts(rowid, id, content, tags)
306
+ VALUES (new.rowid, new.id, new.content, new.tags);
307
+ END;
308
+
309
+ CREATE TRIGGER IF NOT EXISTS semantic_fts_delete
310
+ AFTER DELETE ON semantic_memory BEGIN
311
+ INSERT INTO semantic_fts(semantic_fts, rowid, id, content, tags)
312
+ VALUES ('delete', old.rowid, old.id, old.content, old.tags);
313
+ END;
314
+
315
+ CREATE TRIGGER IF NOT EXISTS semantic_fts_update
316
+ AFTER UPDATE ON semantic_memory BEGIN
317
+ INSERT INTO semantic_fts(semantic_fts, rowid, id, content, tags)
318
+ VALUES ('delete', old.rowid, old.id, old.content, old.tags);
319
+ INSERT INTO semantic_fts(rowid, id, content, tags)
320
+ VALUES (new.rowid, new.id, new.content, new.tags);
321
+ END;
322
+
323
+ CREATE TRIGGER IF NOT EXISTS episodic_fts_insert
324
+ AFTER INSERT ON episodic_log BEGIN
325
+ INSERT INTO episodic_fts(rowid, id, content)
326
+ VALUES (new.rowid, new.id, new.content);
327
+ END;
328
+
329
+ CREATE TRIGGER IF NOT EXISTS episodic_fts_delete
330
+ AFTER DELETE ON episodic_log BEGIN
331
+ INSERT INTO episodic_fts(episodic_fts, rowid, id, content)
332
+ VALUES ('delete', old.rowid, old.id, old.content);
333
+ END;
334
+
335
+ CREATE TRIGGER IF NOT EXISTS episodic_fts_update
336
+ AFTER UPDATE ON episodic_log BEGIN
337
+ INSERT INTO episodic_fts(episodic_fts, rowid, id, content)
338
+ VALUES ('delete', old.rowid, old.id, old.content);
339
+ INSERT INTO episodic_fts(rowid, id, content)
340
+ VALUES (new.rowid, new.id, new.content);
341
+ END;
342
+ `;
343
+ var MemoryDatabaseLive = (config) => Layer.scoped(
344
+ MemoryDatabase,
345
+ Effect.gen(function* () {
346
+ const dbDir = path.dirname(config.dbPath);
347
+ yield* Effect.sync(() => {
348
+ if (!fs.existsSync(dbDir)) {
349
+ fs.mkdirSync(dbDir, { recursive: true });
350
+ }
351
+ });
352
+ const db = yield* Effect.try({
353
+ try: () => new Database(config.dbPath, { create: true }),
354
+ catch: (e) => new DatabaseError({
355
+ message: `Failed to open database: ${e}`,
356
+ operation: "migrate",
357
+ cause: e
358
+ })
359
+ });
360
+ yield* Effect.try({
361
+ try: () => db.exec(SCHEMA_SQL),
362
+ catch: (e) => new DatabaseError({
363
+ message: `Schema migration failed: ${e}`,
364
+ operation: "migrate",
365
+ cause: e
366
+ })
367
+ });
368
+ yield* Effect.addFinalizer(
369
+ () => Effect.sync(() => {
370
+ try {
371
+ db.close();
372
+ } catch {
373
+ }
374
+ })
375
+ );
376
+ const service = {
377
+ query: (sql, params = []) => Effect.try({
378
+ try: () => {
379
+ const stmt = db.prepare(sql);
380
+ return stmt.all(...params);
381
+ },
382
+ catch: (e) => new DatabaseError({
383
+ message: `Query failed: ${e}
384
+ SQL: ${sql}`,
385
+ operation: "read",
386
+ cause: e
387
+ })
388
+ }),
389
+ exec: (sql, params = []) => Effect.try({
390
+ try: () => {
391
+ const stmt = db.prepare(sql);
392
+ const result = stmt.run(...params);
393
+ return result.changes;
394
+ },
395
+ catch: (e) => new DatabaseError({
396
+ message: `Exec failed: ${e}
397
+ SQL: ${sql}`,
398
+ operation: "write",
399
+ cause: e
400
+ })
401
+ }),
402
+ transaction: (fn) => Effect.gen(function* () {
403
+ let result;
404
+ yield* Effect.try({
405
+ try: () => {
406
+ const txn = db.transaction(() => {
407
+ result = Effect.runSync(fn(service));
408
+ });
409
+ txn();
410
+ },
411
+ catch: (e) => new DatabaseError({
412
+ message: `Transaction failed: ${e}`,
413
+ operation: "write",
414
+ cause: e
415
+ })
416
+ });
417
+ return result;
418
+ }),
419
+ close: () => Effect.sync(() => {
420
+ try {
421
+ db.close();
422
+ } catch {
423
+ }
424
+ })
425
+ };
426
+ return service;
427
+ })
428
+ );
429
+
430
+ // src/search.ts
431
+ import { Effect as Effect2, Context as Context2, Layer as Layer2 } from "effect";
432
+ var MemorySearchService = class extends Context2.Tag("MemorySearchService")() {
433
+ };
434
+ var MemorySearchServiceLive = Layer2.effect(
435
+ MemorySearchService,
436
+ Effect2.gen(function* () {
437
+ const db = yield* MemoryDatabase;
438
+ return {
439
+ searchSemantic: (options) => Effect2.gen(function* () {
440
+ const limit = options.limit ?? 10;
441
+ const rows = yield* db.query(
442
+ `SELECT sm.*
443
+ FROM semantic_memory sm
444
+ JOIN semantic_fts ON semantic_fts.id = sm.id
445
+ WHERE semantic_fts MATCH ?
446
+ AND sm.agent_id = ?
447
+ ORDER BY rank
448
+ LIMIT ?`,
449
+ [options.query, options.agentId, limit]
450
+ );
451
+ return rows.map((r) => ({
452
+ id: r.id,
453
+ agentId: r.agent_id,
454
+ content: r.content,
455
+ summary: r.summary,
456
+ importance: r.importance,
457
+ verified: Boolean(r.verified),
458
+ tags: JSON.parse(r.tags),
459
+ createdAt: new Date(r.created_at),
460
+ updatedAt: new Date(r.updated_at),
461
+ accessCount: r.access_count,
462
+ lastAccessedAt: new Date(r.last_accessed_at)
463
+ }));
464
+ }),
465
+ searchEpisodic: (options) => Effect2.gen(function* () {
466
+ const limit = options.limit ?? 20;
467
+ const rows = yield* db.query(
468
+ `SELECT el.*
469
+ FROM episodic_log el
470
+ JOIN episodic_fts ON episodic_fts.id = el.id
471
+ WHERE episodic_fts MATCH ?
472
+ AND el.agent_id = ?
473
+ ORDER BY rank
474
+ LIMIT ?`,
475
+ [options.query, options.agentId, limit]
476
+ );
477
+ return rows.map((r) => ({
478
+ id: r.id,
479
+ agentId: r.agent_id,
480
+ date: r.date,
481
+ content: r.content,
482
+ taskId: r.task_id ?? void 0,
483
+ eventType: r.event_type,
484
+ cost: r.cost ?? void 0,
485
+ duration: r.duration ?? void 0,
486
+ metadata: JSON.parse(r.metadata),
487
+ createdAt: new Date(r.created_at)
488
+ }));
489
+ }),
490
+ // Tier 2 only — requires sqlite-vec extension loaded on db connection
491
+ searchVector: (_queryEmbedding, _agentId, _limit) => Effect2.fail(
492
+ new DatabaseError({
493
+ message: "Vector search requires Tier 2 (sqlite-vec). Use createMemoryLayer('2').",
494
+ operation: "search"
495
+ })
496
+ )
497
+ };
498
+ })
499
+ );
500
+
501
+ // src/services/memory-service.ts
502
+ import { Effect as Effect9, Context as Context9, Layer as Layer9 } from "effect";
503
+
504
+ // src/services/working-memory.ts
505
+ import { Effect as Effect3, Context as Context3, Layer as Layer3, Ref } from "effect";
506
+ var WorkingMemoryService = class extends Context3.Tag("WorkingMemoryService")() {
507
+ };
508
+ var WorkingMemoryServiceLive = (capacity = 7, evictionPolicy = "fifo") => Layer3.effect(
509
+ WorkingMemoryService,
510
+ Effect3.gen(function* () {
511
+ const store = yield* Ref.make([]);
512
+ const evictOne = (items) => {
513
+ if (items.length === 0) return items;
514
+ switch (evictionPolicy) {
515
+ case "fifo":
516
+ return items.slice(1);
517
+ case "lru":
518
+ return items.slice(1);
519
+ case "importance": {
520
+ const minIdx = items.reduce(
521
+ (minI, item, i) => item.importance < items[minI].importance ? i : minI,
522
+ 0
523
+ );
524
+ return [...items.slice(0, minIdx), ...items.slice(minIdx + 1)];
525
+ }
526
+ }
527
+ };
528
+ return {
529
+ add: (item) => Ref.update(store, (items) => {
530
+ const withRoom = items.length >= capacity ? evictOne(items) : items;
531
+ return [...withRoom, item];
532
+ }),
533
+ get: () => Ref.get(store).pipe(
534
+ Effect3.map(
535
+ (items) => [...items].reverse()
536
+ )
537
+ ),
538
+ clear: () => Ref.set(store, []),
539
+ evict: () => Effect3.gen(function* () {
540
+ const items = yield* Ref.get(store);
541
+ if (items.length === 0) {
542
+ return yield* Effect3.fail(
543
+ new MemoryError({
544
+ message: "Working memory is empty, cannot evict"
545
+ })
546
+ );
547
+ }
548
+ const evicted = items[0];
549
+ yield* Ref.set(store, items.slice(1));
550
+ return evicted;
551
+ }),
552
+ size: () => Ref.get(store).pipe(Effect3.map((items) => items.length)),
553
+ find: (query) => Ref.get(store).pipe(
554
+ Effect3.map(
555
+ (items) => items.filter(
556
+ (item) => item.content.toLowerCase().includes(query.toLowerCase())
557
+ )
558
+ )
559
+ )
560
+ };
561
+ })
562
+ );
563
+
564
+ // src/services/semantic-memory.ts
565
+ import { Effect as Effect4, Context as Context4, Layer as Layer4 } from "effect";
566
+ var SemanticMemoryService = class extends Context4.Tag("SemanticMemoryService")() {
567
+ };
568
+ var SemanticMemoryServiceLive = Layer4.effect(
569
+ SemanticMemoryService,
570
+ Effect4.gen(function* () {
571
+ const db = yield* MemoryDatabase;
572
+ const rowToEntry = (r) => ({
573
+ id: r.id,
574
+ agentId: r.agent_id,
575
+ content: r.content,
576
+ summary: r.summary,
577
+ importance: r.importance,
578
+ verified: Boolean(r.verified),
579
+ tags: JSON.parse(r.tags),
580
+ embedding: r.embedding ? Array.from(new Float32Array(r.embedding)) : void 0,
581
+ createdAt: new Date(r.created_at),
582
+ updatedAt: new Date(r.updated_at),
583
+ accessCount: r.access_count,
584
+ lastAccessedAt: new Date(r.last_accessed_at)
585
+ });
586
+ return {
587
+ store: (entry) => Effect4.gen(function* () {
588
+ yield* db.exec(
589
+ `INSERT OR REPLACE INTO semantic_memory
590
+ (id, agent_id, content, summary, importance, verified, tags, created_at, updated_at, access_count, last_accessed_at)
591
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
592
+ [
593
+ entry.id,
594
+ entry.agentId,
595
+ entry.content,
596
+ entry.summary,
597
+ entry.importance,
598
+ entry.verified ? 1 : 0,
599
+ JSON.stringify(entry.tags),
600
+ entry.createdAt.toISOString(),
601
+ entry.updatedAt.toISOString(),
602
+ entry.accessCount,
603
+ entry.lastAccessedAt.toISOString()
604
+ ]
605
+ );
606
+ return entry.id;
607
+ }),
608
+ get: (id) => Effect4.gen(function* () {
609
+ const rows = yield* db.query(
610
+ `SELECT * FROM semantic_memory WHERE id = ?`,
611
+ [id]
612
+ );
613
+ if (rows.length === 0) {
614
+ return yield* Effect4.fail(
615
+ new MemoryNotFoundError({
616
+ memoryId: id,
617
+ message: `Semantic entry ${id} not found`
618
+ })
619
+ );
620
+ }
621
+ return rowToEntry(rows[0]);
622
+ }),
623
+ update: (id, patch) => Effect4.gen(function* () {
624
+ const sets = [];
625
+ const params = [];
626
+ if (patch.content !== void 0) {
627
+ sets.push("content = ?");
628
+ params.push(patch.content);
629
+ }
630
+ if (patch.summary !== void 0) {
631
+ sets.push("summary = ?");
632
+ params.push(patch.summary);
633
+ }
634
+ if (patch.importance !== void 0) {
635
+ sets.push("importance = ?");
636
+ params.push(patch.importance);
637
+ }
638
+ if (patch.verified !== void 0) {
639
+ sets.push("verified = ?");
640
+ params.push(patch.verified ? 1 : 0);
641
+ }
642
+ if (patch.tags !== void 0) {
643
+ sets.push("tags = ?");
644
+ params.push(JSON.stringify(patch.tags));
645
+ }
646
+ sets.push("updated_at = ?");
647
+ params.push((/* @__PURE__ */ new Date()).toISOString());
648
+ params.push(id);
649
+ yield* db.exec(
650
+ `UPDATE semantic_memory SET ${sets.join(", ")} WHERE id = ?`,
651
+ params
652
+ );
653
+ }),
654
+ delete: (id) => db.exec(`DELETE FROM semantic_memory WHERE id = ?`, [id]).pipe(Effect4.asVoid),
655
+ listByAgent: (agentId, limit = 100) => db.query(
656
+ `SELECT * FROM semantic_memory WHERE agent_id = ? ORDER BY importance DESC LIMIT ?`,
657
+ [agentId, limit]
658
+ ).pipe(Effect4.map((rows) => rows.map(rowToEntry))),
659
+ recordAccess: (id) => db.exec(
660
+ `UPDATE semantic_memory SET access_count = access_count + 1, last_accessed_at = ? WHERE id = ?`,
661
+ [(/* @__PURE__ */ new Date()).toISOString(), id]
662
+ ).pipe(Effect4.asVoid),
663
+ generateMarkdown: (agentId, maxLines = 200) => Effect4.gen(function* () {
664
+ const entries = yield* db.query(
665
+ `SELECT content, summary, importance, tags, updated_at
666
+ FROM semantic_memory
667
+ WHERE agent_id = ?
668
+ ORDER BY importance DESC, updated_at DESC
669
+ LIMIT 50`,
670
+ [agentId]
671
+ );
672
+ const lines = [
673
+ `# Agent Memory \u2014 ${agentId}`,
674
+ `> Generated: ${(/* @__PURE__ */ new Date()).toISOString()}`,
675
+ ""
676
+ ];
677
+ for (const entry of entries) {
678
+ const tags = JSON.parse(entry.tags);
679
+ const tagStr = tags.length > 0 ? ` [${tags.join(", ")}]` : "";
680
+ const importanceBar = "\u2588".repeat(
681
+ Math.round(entry.importance * 5)
682
+ );
683
+ lines.push(
684
+ `## ${importanceBar} (${entry.importance.toFixed(2)})${tagStr}`
685
+ );
686
+ lines.push(entry.summary);
687
+ lines.push("");
688
+ if (lines.length >= maxLines) break;
689
+ }
690
+ return lines.slice(0, maxLines).join("\n");
691
+ })
692
+ };
693
+ })
694
+ );
695
+
696
+ // src/services/episodic-memory.ts
697
+ import { Effect as Effect5, Context as Context5, Layer as Layer5 } from "effect";
698
+ var EpisodicMemoryService = class extends Context5.Tag("EpisodicMemoryService")() {
699
+ };
700
+ var EpisodicMemoryServiceLive = Layer5.effect(
701
+ EpisodicMemoryService,
702
+ Effect5.gen(function* () {
703
+ const db = yield* MemoryDatabase;
704
+ const rowToEntry = (r) => ({
705
+ id: r.id,
706
+ agentId: r.agent_id,
707
+ date: r.date,
708
+ content: r.content,
709
+ taskId: r.task_id ?? void 0,
710
+ eventType: r.event_type,
711
+ cost: r.cost ?? void 0,
712
+ duration: r.duration ?? void 0,
713
+ metadata: JSON.parse(r.metadata ?? "{}"),
714
+ createdAt: new Date(r.created_at)
715
+ });
716
+ return {
717
+ log: (entry) => Effect5.gen(function* () {
718
+ yield* db.exec(
719
+ `INSERT INTO episodic_log
720
+ (id, agent_id, date, content, task_id, event_type, cost, duration, metadata, created_at)
721
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
722
+ [
723
+ entry.id,
724
+ entry.agentId,
725
+ entry.date,
726
+ entry.content,
727
+ entry.taskId ?? null,
728
+ entry.eventType,
729
+ entry.cost ?? null,
730
+ entry.duration ?? null,
731
+ JSON.stringify(entry.metadata ?? {}),
732
+ entry.createdAt.toISOString()
733
+ ]
734
+ );
735
+ return entry.id;
736
+ }),
737
+ getToday: (agentId) => {
738
+ const today = (/* @__PURE__ */ new Date()).toISOString().slice(0, 10);
739
+ return db.query(
740
+ `SELECT * FROM episodic_log WHERE agent_id = ? AND date = ? ORDER BY created_at DESC`,
741
+ [agentId, today]
742
+ ).pipe(Effect5.map((rows) => rows.map(rowToEntry)));
743
+ },
744
+ getRecent: (agentId, limit) => db.query(
745
+ `SELECT * FROM episodic_log WHERE agent_id = ? ORDER BY created_at DESC LIMIT ?`,
746
+ [agentId, limit]
747
+ ).pipe(Effect5.map((rows) => rows.map(rowToEntry))),
748
+ getByTask: (taskId) => db.query(
749
+ `SELECT * FROM episodic_log WHERE task_id = ? ORDER BY created_at ASC`,
750
+ [taskId]
751
+ ).pipe(Effect5.map((rows) => rows.map(rowToEntry))),
752
+ saveSnapshot: (snapshot) => db.exec(
753
+ `INSERT OR REPLACE INTO session_snapshots
754
+ (id, agent_id, messages, summary, key_decisions, task_ids, started_at, ended_at, total_cost, total_tokens)
755
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
756
+ [
757
+ snapshot.id,
758
+ snapshot.agentId,
759
+ JSON.stringify(snapshot.messages),
760
+ snapshot.summary,
761
+ JSON.stringify(snapshot.keyDecisions),
762
+ JSON.stringify(snapshot.taskIds),
763
+ snapshot.startedAt.toISOString(),
764
+ snapshot.endedAt.toISOString(),
765
+ snapshot.totalCost,
766
+ snapshot.totalTokens
767
+ ]
768
+ ).pipe(Effect5.asVoid),
769
+ getLatestSnapshot: (agentId) => db.query(
770
+ `SELECT * FROM session_snapshots WHERE agent_id = ? ORDER BY ended_at DESC LIMIT 1`,
771
+ [agentId]
772
+ ).pipe(
773
+ Effect5.map((rows) => {
774
+ if (rows.length === 0) return null;
775
+ const r = rows[0];
776
+ return {
777
+ id: r.id,
778
+ agentId: r.agent_id,
779
+ messages: JSON.parse(r.messages),
780
+ summary: r.summary,
781
+ keyDecisions: JSON.parse(r.key_decisions),
782
+ taskIds: JSON.parse(r.task_ids),
783
+ startedAt: new Date(r.started_at),
784
+ endedAt: new Date(r.ended_at),
785
+ totalCost: r.total_cost,
786
+ totalTokens: r.total_tokens
787
+ };
788
+ })
789
+ ),
790
+ prune: (agentId, retainDays) => {
791
+ const cutoff = new Date(
792
+ Date.now() - retainDays * 864e5
793
+ ).toISOString();
794
+ return db.exec(
795
+ `DELETE FROM episodic_log WHERE agent_id = ? AND created_at < ?`,
796
+ [agentId, cutoff]
797
+ );
798
+ }
799
+ };
800
+ })
801
+ );
802
+
803
+ // src/services/procedural-memory.ts
804
+ import { Effect as Effect6, Context as Context6, Layer as Layer6 } from "effect";
805
+ var ProceduralMemoryService = class extends Context6.Tag(
806
+ "ProceduralMemoryService"
807
+ )() {
808
+ };
809
+ var ProceduralMemoryServiceLive = Layer6.effect(
810
+ ProceduralMemoryService,
811
+ Effect6.gen(function* () {
812
+ const db = yield* MemoryDatabase;
813
+ const rowToEntry = (r) => ({
814
+ id: r.id,
815
+ agentId: r.agent_id,
816
+ name: r.name,
817
+ description: r.description,
818
+ pattern: r.pattern,
819
+ successRate: r.success_rate,
820
+ useCount: r.use_count,
821
+ tags: JSON.parse(r.tags),
822
+ createdAt: new Date(r.created_at),
823
+ updatedAt: new Date(r.updated_at)
824
+ });
825
+ return {
826
+ store: (entry) => Effect6.gen(function* () {
827
+ yield* db.exec(
828
+ `INSERT OR REPLACE INTO procedural_memory
829
+ (id, agent_id, name, description, pattern, success_rate, use_count, tags, created_at, updated_at)
830
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
831
+ [
832
+ entry.id,
833
+ entry.agentId,
834
+ entry.name,
835
+ entry.description,
836
+ entry.pattern,
837
+ entry.successRate,
838
+ entry.useCount,
839
+ JSON.stringify(entry.tags),
840
+ entry.createdAt.toISOString(),
841
+ entry.updatedAt.toISOString()
842
+ ]
843
+ );
844
+ return entry.id;
845
+ }),
846
+ get: (id) => Effect6.gen(function* () {
847
+ const rows = yield* db.query(
848
+ `SELECT * FROM procedural_memory WHERE id = ?`,
849
+ [id]
850
+ );
851
+ if (rows.length === 0) {
852
+ return yield* Effect6.fail(
853
+ new MemoryNotFoundError({
854
+ memoryId: id,
855
+ message: `Procedural entry ${id} not found`
856
+ })
857
+ );
858
+ }
859
+ return rowToEntry(rows[0]);
860
+ }),
861
+ recordOutcome: (id, success) => Effect6.gen(function* () {
862
+ const rows = yield* db.query(
863
+ `SELECT success_rate, use_count FROM procedural_memory WHERE id = ?`,
864
+ [id]
865
+ );
866
+ if (rows.length === 0) return;
867
+ const { success_rate, use_count } = rows[0];
868
+ const newCount = use_count + 1;
869
+ const newRate = success_rate * 0.9 + (success ? 1 : 0) * 0.1;
870
+ yield* db.exec(
871
+ `UPDATE procedural_memory SET success_rate = ?, use_count = ?, updated_at = ? WHERE id = ?`,
872
+ [newRate, newCount, (/* @__PURE__ */ new Date()).toISOString(), id]
873
+ );
874
+ }),
875
+ listActive: (agentId) => db.query(
876
+ `SELECT * FROM procedural_memory WHERE agent_id = ? ORDER BY success_rate DESC, use_count DESC`,
877
+ [agentId]
878
+ ).pipe(Effect6.map((rows) => rows.map(rowToEntry))),
879
+ findByTags: (agentId, tags) => Effect6.gen(function* () {
880
+ const all = yield* db.query(
881
+ `SELECT * FROM procedural_memory WHERE agent_id = ?`,
882
+ [agentId]
883
+ );
884
+ return all.map(rowToEntry).filter((e) => tags.some((t) => e.tags.includes(t)));
885
+ })
886
+ };
887
+ })
888
+ );
889
+
890
+ // src/fs/memory-file-system.ts
891
+ import { Effect as Effect7, Context as Context7, Layer as Layer7 } from "effect";
892
+ import * as fs2 from "fs/promises";
893
+ import * as path2 from "path";
894
+ var MemoryFileSystem = class extends Context7.Tag("MemoryFileSystem")() {
895
+ };
896
+ var MemoryFileSystemLive = Layer7.succeed(MemoryFileSystem, {
897
+ writeMarkdown: (agentId, content, basePath) => Effect7.tryPromise({
898
+ try: async () => {
899
+ const dir = path2.join(basePath, agentId);
900
+ await fs2.mkdir(dir, { recursive: true });
901
+ await fs2.writeFile(path2.join(dir, "memory.md"), content, "utf8");
902
+ },
903
+ catch: (e) => new MemoryError({
904
+ message: `Failed to write memory.md: ${e}`,
905
+ cause: e
906
+ })
907
+ }),
908
+ readMarkdown: (agentId, basePath) => Effect7.tryPromise({
909
+ try: async () => {
910
+ const filePath = path2.join(basePath, agentId, "memory.md");
911
+ try {
912
+ return await fs2.readFile(filePath, "utf8");
913
+ } catch {
914
+ return "";
915
+ }
916
+ },
917
+ catch: (e) => new MemoryError({
918
+ message: `Failed to read memory.md: ${e}`,
919
+ cause: e
920
+ })
921
+ }),
922
+ ensureDirectory: (agentId, basePath) => Effect7.tryPromise({
923
+ try: async () => {
924
+ await fs2.mkdir(path2.join(basePath, agentId), { recursive: true });
925
+ },
926
+ catch: (e) => new MemoryError({
927
+ message: `Failed to create memory directory: ${e}`,
928
+ cause: e
929
+ })
930
+ })
931
+ });
932
+
933
+ // src/indexing/zettelkasten.ts
934
+ import { Effect as Effect8, Context as Context8, Layer as Layer8 } from "effect";
935
+ var ZettelkastenService = class extends Context8.Tag("ZettelkastenService")() {
936
+ };
937
+ var ZettelkastenServiceLive = Layer8.effect(
938
+ ZettelkastenService,
939
+ Effect8.gen(function* () {
940
+ const db = yield* MemoryDatabase;
941
+ const rowToLink = (r) => ({
942
+ source: r.source_id,
943
+ target: r.target_id,
944
+ strength: r.strength,
945
+ type: r.type,
946
+ createdAt: new Date(r.created_at)
947
+ });
948
+ return {
949
+ addLink: (link) => db.exec(
950
+ `INSERT OR REPLACE INTO zettel_links (source_id, target_id, strength, type, created_at)
951
+ VALUES (?, ?, ?, ?, ?)`,
952
+ [
953
+ link.source,
954
+ link.target,
955
+ link.strength,
956
+ link.type,
957
+ link.createdAt.toISOString()
958
+ ]
959
+ ).pipe(Effect8.asVoid),
960
+ getLinks: (memoryId) => db.query(
961
+ `SELECT * FROM zettel_links WHERE source_id = ? OR target_id = ? ORDER BY strength DESC`,
962
+ [memoryId, memoryId]
963
+ ).pipe(Effect8.map((rows) => rows.map(rowToLink))),
964
+ getLinked: (memoryId) => db.query(
965
+ `SELECT CASE WHEN source_id = ? THEN target_id ELSE source_id END as linked_id
966
+ FROM zettel_links
967
+ WHERE source_id = ? OR target_id = ?
968
+ ORDER BY strength DESC`,
969
+ [memoryId, memoryId, memoryId]
970
+ ).pipe(
971
+ Effect8.map((rows) => rows.map((r) => r.linked_id))
972
+ ),
973
+ traverse: (startId, depth) => Effect8.gen(function* () {
974
+ const visited = /* @__PURE__ */ new Set();
975
+ const result = [];
976
+ const queue = [
977
+ { id: startId, d: 0 }
978
+ ];
979
+ while (queue.length > 0) {
980
+ const item = queue.shift();
981
+ if (visited.has(item.id) || item.d > depth) continue;
982
+ visited.add(item.id);
983
+ if (item.id !== startId) result.push(item.id);
984
+ const links = yield* db.query(
985
+ `SELECT CASE WHEN source_id = ? THEN target_id ELSE source_id END as linked_id
986
+ FROM zettel_links WHERE source_id = ? OR target_id = ?`,
987
+ [item.id, item.id, item.id]
988
+ );
989
+ for (const link of links) {
990
+ if (!visited.has(link.linked_id)) {
991
+ queue.push({
992
+ id: link.linked_id,
993
+ d: item.d + 1
994
+ });
995
+ }
996
+ }
997
+ }
998
+ return result;
999
+ }),
1000
+ deleteLinks: (memoryId) => db.exec(
1001
+ `DELETE FROM zettel_links WHERE source_id = ? OR target_id = ?`,
1002
+ [memoryId, memoryId]
1003
+ ).pipe(Effect8.asVoid),
1004
+ // Text-based auto-linking via FTS5 search
1005
+ autoLinkText: (memoryId, content, agentId, threshold = 0.85) => Effect8.gen(function* () {
1006
+ const searchTerms = content.split(/\s+/).filter((w) => w.length > 3).slice(0, 10).join(" OR ");
1007
+ if (searchTerms.length === 0) return [];
1008
+ const similar = yield* db.query(
1009
+ `SELECT sm.id, semantic_fts.rank
1010
+ FROM semantic_memory sm
1011
+ JOIN semantic_fts ON semantic_fts.id = sm.id
1012
+ WHERE semantic_fts MATCH ?
1013
+ AND sm.agent_id = ?
1014
+ AND sm.id != ?
1015
+ ORDER BY rank
1016
+ LIMIT 5`,
1017
+ [searchTerms, agentId, memoryId]
1018
+ );
1019
+ const now = /* @__PURE__ */ new Date();
1020
+ const links = [];
1021
+ for (const row of similar) {
1022
+ const strength = Math.min(1, Math.max(0, 1 + row.rank / 10));
1023
+ if (strength < threshold) continue;
1024
+ const link = {
1025
+ source: memoryId,
1026
+ target: row.id,
1027
+ strength,
1028
+ type: "similar",
1029
+ createdAt: now
1030
+ };
1031
+ yield* db.exec(
1032
+ `INSERT OR REPLACE INTO zettel_links (source_id, target_id, strength, type, created_at)
1033
+ VALUES (?, ?, ?, ?, ?)`,
1034
+ [
1035
+ link.source,
1036
+ link.target,
1037
+ link.strength,
1038
+ link.type,
1039
+ link.createdAt.toISOString()
1040
+ ]
1041
+ );
1042
+ links.push(link);
1043
+ }
1044
+ return links;
1045
+ })
1046
+ };
1047
+ })
1048
+ );
1049
+
1050
+ // src/services/memory-service.ts
1051
+ var MemoryService = class extends Context9.Tag("MemoryService")() {
1052
+ };
1053
+ var MemoryServiceLive = (config) => Layer9.effect(
1054
+ MemoryService,
1055
+ Effect9.gen(function* () {
1056
+ const working = yield* WorkingMemoryService;
1057
+ const semantic = yield* SemanticMemoryService;
1058
+ const episodic = yield* EpisodicMemoryService;
1059
+ const _procedural = yield* ProceduralMemoryService;
1060
+ const fileSystem = yield* MemoryFileSystem;
1061
+ const zettel = yield* ZettelkastenService;
1062
+ const basePath = `.reactive-agents/memory`;
1063
+ return {
1064
+ bootstrap: (agentId) => Effect9.gen(function* () {
1065
+ yield* fileSystem.ensureDirectory(agentId, basePath).pipe(Effect9.catchAll(() => Effect9.void));
1066
+ const semanticContext = yield* fileSystem.readMarkdown(agentId, basePath).pipe(Effect9.catchAll(() => Effect9.succeed("")));
1067
+ const recentEpisodes = yield* episodic.getRecent(agentId, 20).pipe(Effect9.catchAll(() => Effect9.succeed([])));
1068
+ const activeWorkflows = yield* _procedural.listActive(agentId).pipe(Effect9.catchAll(() => Effect9.succeed([])));
1069
+ const workingMemory = yield* working.get();
1070
+ return {
1071
+ agentId,
1072
+ semanticContext,
1073
+ recentEpisodes,
1074
+ activeWorkflows,
1075
+ workingMemory: [...workingMemory],
1076
+ bootstrappedAt: /* @__PURE__ */ new Date(),
1077
+ tier: config.tier
1078
+ };
1079
+ }),
1080
+ flush: (agentId) => Effect9.gen(function* () {
1081
+ const markdown = yield* semantic.generateMarkdown(
1082
+ agentId,
1083
+ config.semantic.maxMarkdownLines
1084
+ );
1085
+ yield* fileSystem.writeMarkdown(agentId, markdown, basePath);
1086
+ }),
1087
+ snapshot: (snap) => episodic.saveSnapshot(snap),
1088
+ addToWorking: (item) => working.add(item),
1089
+ storeSemantic: (entry) => Effect9.gen(function* () {
1090
+ const id = yield* semantic.store(entry);
1091
+ if (config.zettelkasten.enabled) {
1092
+ yield* zettel.autoLinkText(
1093
+ entry.id,
1094
+ entry.content,
1095
+ entry.agentId,
1096
+ config.zettelkasten.linkingThreshold
1097
+ ).pipe(Effect9.catchAll(() => Effect9.succeed([])));
1098
+ }
1099
+ return id;
1100
+ }),
1101
+ logEpisode: (entry) => episodic.log(entry),
1102
+ getWorking: () => working.get()
1103
+ };
1104
+ })
1105
+ );
1106
+
1107
+ // src/compaction/compaction-service.ts
1108
+ import { Effect as Effect10, Context as Context10, Layer as Layer10 } from "effect";
1109
+ var CompactionService = class extends Context10.Tag("CompactionService")() {
1110
+ };
1111
+ var CompactionServiceLive = Layer10.effect(
1112
+ CompactionService,
1113
+ Effect10.gen(function* () {
1114
+ const db = yield* MemoryDatabase;
1115
+ return {
1116
+ compact: (agentId, config) => Effect10.gen(function* () {
1117
+ switch (config.strategy) {
1118
+ case "count":
1119
+ return yield* compactByCount(agentId, config.maxEntries ?? 1e3);
1120
+ case "time":
1121
+ return yield* compactByTime(
1122
+ agentId,
1123
+ config.intervalMs ?? 864e5
1124
+ );
1125
+ case "semantic":
1126
+ return yield* compactBySimilarity(
1127
+ agentId,
1128
+ config.similarityThreshold ?? 0.92
1129
+ );
1130
+ case "progressive":
1131
+ return yield* compactProgressive(agentId, config);
1132
+ }
1133
+ }),
1134
+ compactByCount: (agentId, maxEntries) => compactByCount(agentId, maxEntries),
1135
+ compactByTime: (agentId, intervalMs) => compactByTime(agentId, intervalMs),
1136
+ compactBySimilarity: (agentId, threshold) => compactBySimilarity(agentId, threshold),
1137
+ compactProgressive: (agentId, config) => compactProgressive(agentId, config)
1138
+ };
1139
+ function compactByCount(agentId, maxEntries) {
1140
+ return Effect10.gen(function* () {
1141
+ const countRows = yield* db.query(
1142
+ `SELECT COUNT(*) as cnt FROM semantic_memory WHERE agent_id = ?`,
1143
+ [agentId]
1144
+ );
1145
+ const count = countRows[0]?.cnt ?? 0;
1146
+ if (count <= maxEntries) return 0;
1147
+ const toRemove = count - maxEntries;
1148
+ const deleted = yield* db.exec(
1149
+ `DELETE FROM semantic_memory WHERE id IN (
1150
+ SELECT id FROM semantic_memory
1151
+ WHERE agent_id = ?
1152
+ ORDER BY importance ASC, last_accessed_at ASC
1153
+ LIMIT ?
1154
+ )`,
1155
+ [agentId, toRemove]
1156
+ );
1157
+ return deleted;
1158
+ });
1159
+ }
1160
+ function compactByTime(agentId, intervalMs) {
1161
+ return Effect10.gen(function* () {
1162
+ const cutoff = new Date(Date.now() - intervalMs).toISOString();
1163
+ const deleted = yield* db.exec(
1164
+ `DELETE FROM semantic_memory
1165
+ WHERE agent_id = ? AND updated_at < ? AND importance < 0.5`,
1166
+ [agentId, cutoff]
1167
+ );
1168
+ return deleted;
1169
+ });
1170
+ }
1171
+ function compactBySimilarity(agentId, _threshold) {
1172
+ return Effect10.gen(function* () {
1173
+ const duplicates = yield* db.query(
1174
+ `SELECT content, COUNT(*) as cnt FROM semantic_memory
1175
+ WHERE agent_id = ? GROUP BY content HAVING cnt > 1`,
1176
+ [agentId]
1177
+ );
1178
+ let removed = 0;
1179
+ for (const dup of duplicates) {
1180
+ const deleted = yield* db.exec(
1181
+ `DELETE FROM semantic_memory WHERE id IN (
1182
+ SELECT id FROM semantic_memory
1183
+ WHERE agent_id = ? AND content = ?
1184
+ ORDER BY importance DESC
1185
+ LIMIT -1 OFFSET 1
1186
+ )`,
1187
+ [agentId, dup.content]
1188
+ );
1189
+ removed += deleted;
1190
+ }
1191
+ return removed;
1192
+ });
1193
+ }
1194
+ function compactProgressive(agentId, config) {
1195
+ return Effect10.gen(function* () {
1196
+ let totalRemoved = 0;
1197
+ totalRemoved += yield* compactByCount(
1198
+ agentId,
1199
+ config.maxEntries ?? 1e3
1200
+ );
1201
+ totalRemoved += yield* compactByTime(
1202
+ agentId,
1203
+ config.intervalMs ?? 864e5
1204
+ );
1205
+ const decayFactor = config.decayFactor ?? 0.05;
1206
+ yield* db.exec(
1207
+ `UPDATE semantic_memory
1208
+ SET importance = MAX(0, importance - ?)
1209
+ WHERE agent_id = ? AND access_count < 3 AND importance > 0.1`,
1210
+ [decayFactor, agentId]
1211
+ );
1212
+ return totalRemoved;
1213
+ });
1214
+ }
1215
+ })
1216
+ );
1217
+
1218
+ // src/extraction/memory-extractor.ts
1219
+ import { Effect as Effect11, Context as Context11, Layer as Layer11 } from "effect";
1220
+ var MemoryExtractor = class extends Context11.Tag("MemoryExtractor")() {
1221
+ };
1222
+ var idCounter = 0;
1223
+ var nextId = () => `mem-extract-${Date.now()}-${++idCounter}`;
1224
+ var MemoryExtractorLive = Layer11.succeed(MemoryExtractor, {
1225
+ extractFromConversation: (agentId, messages) => Effect11.try({
1226
+ try: () => {
1227
+ const entries = [];
1228
+ const now = /* @__PURE__ */ new Date();
1229
+ for (const msg of messages) {
1230
+ if (msg.role !== "assistant") continue;
1231
+ const sentences = msg.content.split(/[.!?]\s+/).filter((s) => s.length > 30);
1232
+ for (const sentence of sentences.slice(0, 3)) {
1233
+ entries.push({
1234
+ id: nextId(),
1235
+ agentId,
1236
+ content: sentence.trim(),
1237
+ summary: sentence.trim().slice(0, 100),
1238
+ importance: 0.5,
1239
+ verified: false,
1240
+ tags: [],
1241
+ createdAt: now,
1242
+ updatedAt: now,
1243
+ accessCount: 0,
1244
+ lastAccessedAt: now
1245
+ });
1246
+ }
1247
+ }
1248
+ return entries;
1249
+ },
1250
+ catch: (e) => new ExtractionError({
1251
+ message: `Extraction failed: ${e}`,
1252
+ cause: e
1253
+ })
1254
+ }),
1255
+ extractEpisodic: (agentId, messages) => Effect11.try({
1256
+ try: () => {
1257
+ const entries = [];
1258
+ const now = /* @__PURE__ */ new Date();
1259
+ const today = now.toISOString().slice(0, 10);
1260
+ for (const msg of messages) {
1261
+ if (msg.content.length < 10) continue;
1262
+ entries.push({
1263
+ id: nextId(),
1264
+ agentId,
1265
+ date: today,
1266
+ content: msg.content.slice(0, 500),
1267
+ eventType: "observation",
1268
+ createdAt: now,
1269
+ metadata: { role: msg.role }
1270
+ });
1271
+ }
1272
+ return entries.slice(0, 10);
1273
+ },
1274
+ catch: (e) => new ExtractionError({
1275
+ message: `Episodic extraction failed: ${e}`,
1276
+ cause: e
1277
+ })
1278
+ })
1279
+ });
1280
+
1281
+ // src/extraction/memory-consolidator.ts
1282
+ import { Effect as Effect12, Context as Context12, Layer as Layer12 } from "effect";
1283
+ var MemoryConsolidator = class extends Context12.Tag("MemoryConsolidator")() {
1284
+ };
1285
+ var MemoryConsolidatorLive = (config) => Layer12.effect(
1286
+ MemoryConsolidator,
1287
+ Effect12.gen(function* () {
1288
+ const db = yield* MemoryDatabase;
1289
+ const decayFactor = config.compaction.decayFactor ?? 0.05;
1290
+ return {
1291
+ consolidate: (agentId) => Effect12.gen(function* () {
1292
+ let affected = 0;
1293
+ affected += yield* decayUnused(agentId, decayFactor);
1294
+ affected += yield* promoteActive(agentId);
1295
+ const removed = yield* db.exec(
1296
+ `DELETE FROM semantic_memory
1297
+ WHERE agent_id = ? AND importance < 0.05 AND access_count < 2`,
1298
+ [agentId]
1299
+ );
1300
+ affected += removed;
1301
+ return affected;
1302
+ }),
1303
+ decayUnused: (agentId, factor) => decayUnused(agentId, factor),
1304
+ promoteActive: (agentId) => promoteActive(agentId)
1305
+ };
1306
+ function decayUnused(agentId, factor) {
1307
+ return Effect12.gen(function* () {
1308
+ const cutoff = new Date(
1309
+ Date.now() - 7 * 864e5
1310
+ ).toISOString();
1311
+ const result = yield* db.exec(
1312
+ `UPDATE semantic_memory
1313
+ SET importance = MAX(0, importance - ?)
1314
+ WHERE agent_id = ?
1315
+ AND last_accessed_at < ?
1316
+ AND importance > 0.1`,
1317
+ [factor, agentId, cutoff]
1318
+ );
1319
+ return result;
1320
+ });
1321
+ }
1322
+ function promoteActive(agentId) {
1323
+ return Effect12.gen(function* () {
1324
+ const result = yield* db.exec(
1325
+ `UPDATE semantic_memory
1326
+ SET importance = MIN(1, importance + 0.05)
1327
+ WHERE agent_id = ?
1328
+ AND access_count >= 5
1329
+ AND importance < 0.95`,
1330
+ [agentId]
1331
+ );
1332
+ return result;
1333
+ });
1334
+ }
1335
+ })
1336
+ );
1337
+
1338
+ // src/runtime.ts
1339
+ import { Layer as Layer13 } from "effect";
1340
+ var createMemoryLayer = (tier, configOverrides) => {
1341
+ const agentId = configOverrides?.agentId ?? "default";
1342
+ const config = {
1343
+ ...defaultMemoryConfig(agentId),
1344
+ ...configOverrides,
1345
+ tier
1346
+ };
1347
+ const dbLayer = MemoryDatabaseLive(config);
1348
+ const coreServices = Layer13.mergeAll(
1349
+ SemanticMemoryServiceLive,
1350
+ EpisodicMemoryServiceLive,
1351
+ ProceduralMemoryServiceLive,
1352
+ MemorySearchServiceLive,
1353
+ ZettelkastenServiceLive
1354
+ ).pipe(Layer13.provide(dbLayer));
1355
+ const workingLayer = WorkingMemoryServiceLive(
1356
+ config.working.capacity,
1357
+ config.working.evictionPolicy
1358
+ );
1359
+ const fsLayer = MemoryFileSystemLive;
1360
+ const memoryServiceLayer = MemoryServiceLive(config).pipe(
1361
+ Layer13.provide(Layer13.mergeAll(workingLayer, coreServices, fsLayer))
1362
+ );
1363
+ return Layer13.mergeAll(
1364
+ dbLayer,
1365
+ workingLayer,
1366
+ coreServices,
1367
+ fsLayer,
1368
+ memoryServiceLayer
1369
+ );
1370
+ };
1371
+ export {
1372
+ CapacityExceededError,
1373
+ CompactionConfigSchema,
1374
+ CompactionError,
1375
+ CompactionService,
1376
+ CompactionServiceLive,
1377
+ CompactionStrategySchema,
1378
+ ContextError,
1379
+ DailyLogEntrySchema,
1380
+ DatabaseError,
1381
+ EpisodicMemoryService,
1382
+ EpisodicMemoryServiceLive,
1383
+ EvictionPolicy as EvictionPolicySchema,
1384
+ ExtractionError,
1385
+ LinkType as LinkTypeSchema,
1386
+ MemoryBootstrapResultSchema,
1387
+ MemoryConfigSchema,
1388
+ MemoryConsolidator,
1389
+ MemoryConsolidatorLive,
1390
+ MemoryDatabase,
1391
+ MemoryDatabaseLive,
1392
+ MemoryEntrySchema,
1393
+ MemoryError,
1394
+ MemoryExtractor,
1395
+ MemoryExtractorLive,
1396
+ MemoryFileSystem,
1397
+ MemoryFileSystemLive,
1398
+ MemoryId as MemoryIdSchema,
1399
+ MemoryNotFoundError,
1400
+ MemorySearchService,
1401
+ MemorySearchServiceLive,
1402
+ MemoryService,
1403
+ MemoryServiceLive,
1404
+ MemorySourceSchema,
1405
+ MemoryType as MemoryTypeSchema,
1406
+ ProceduralEntrySchema,
1407
+ ProceduralMemoryService,
1408
+ ProceduralMemoryServiceLive,
1409
+ SearchError,
1410
+ SearchOptionsSchema,
1411
+ SemanticEntrySchema,
1412
+ SemanticMemoryService,
1413
+ SemanticMemoryServiceLive,
1414
+ SessionSnapshotSchema,
1415
+ WorkingMemoryItemSchema,
1416
+ WorkingMemoryService,
1417
+ WorkingMemoryServiceLive,
1418
+ ZettelLinkSchema,
1419
+ ZettelkastenService,
1420
+ ZettelkastenServiceLive,
1421
+ createMemoryLayer,
1422
+ defaultMemoryConfig
1423
+ };
1424
+ //# sourceMappingURL=index.js.map