alma-memory 0.3.0__py3-none-any.whl → 0.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (77) hide show
  1. alma/__init__.py +99 -29
  2. alma/confidence/__init__.py +47 -0
  3. alma/confidence/engine.py +540 -0
  4. alma/confidence/types.py +351 -0
  5. alma/config/loader.py +3 -2
  6. alma/consolidation/__init__.py +23 -0
  7. alma/consolidation/engine.py +678 -0
  8. alma/consolidation/prompts.py +84 -0
  9. alma/core.py +15 -15
  10. alma/domains/__init__.py +6 -6
  11. alma/domains/factory.py +12 -9
  12. alma/domains/schemas.py +17 -3
  13. alma/domains/types.py +8 -4
  14. alma/events/__init__.py +75 -0
  15. alma/events/emitter.py +284 -0
  16. alma/events/storage_mixin.py +246 -0
  17. alma/events/types.py +126 -0
  18. alma/events/webhook.py +425 -0
  19. alma/exceptions.py +49 -0
  20. alma/extraction/__init__.py +31 -0
  21. alma/extraction/auto_learner.py +264 -0
  22. alma/extraction/extractor.py +420 -0
  23. alma/graph/__init__.py +81 -0
  24. alma/graph/backends/__init__.py +18 -0
  25. alma/graph/backends/memory.py +236 -0
  26. alma/graph/backends/neo4j.py +417 -0
  27. alma/graph/base.py +159 -0
  28. alma/graph/extraction.py +198 -0
  29. alma/graph/store.py +860 -0
  30. alma/harness/__init__.py +4 -4
  31. alma/harness/base.py +18 -9
  32. alma/harness/domains.py +27 -11
  33. alma/initializer/__init__.py +37 -0
  34. alma/initializer/initializer.py +418 -0
  35. alma/initializer/types.py +250 -0
  36. alma/integration/__init__.py +9 -9
  37. alma/integration/claude_agents.py +10 -10
  38. alma/integration/helena.py +32 -22
  39. alma/integration/victor.py +57 -33
  40. alma/learning/__init__.py +27 -27
  41. alma/learning/forgetting.py +198 -148
  42. alma/learning/heuristic_extractor.py +40 -24
  43. alma/learning/protocols.py +62 -14
  44. alma/learning/validation.py +7 -2
  45. alma/mcp/__init__.py +4 -4
  46. alma/mcp/__main__.py +2 -1
  47. alma/mcp/resources.py +17 -16
  48. alma/mcp/server.py +102 -44
  49. alma/mcp/tools.py +174 -37
  50. alma/progress/__init__.py +3 -3
  51. alma/progress/tracker.py +26 -20
  52. alma/progress/types.py +8 -12
  53. alma/py.typed +0 -0
  54. alma/retrieval/__init__.py +11 -11
  55. alma/retrieval/cache.py +20 -21
  56. alma/retrieval/embeddings.py +4 -4
  57. alma/retrieval/engine.py +114 -35
  58. alma/retrieval/scoring.py +73 -63
  59. alma/session/__init__.py +2 -2
  60. alma/session/manager.py +5 -5
  61. alma/session/types.py +5 -4
  62. alma/storage/__init__.py +41 -0
  63. alma/storage/azure_cosmos.py +107 -31
  64. alma/storage/base.py +157 -4
  65. alma/storage/chroma.py +1443 -0
  66. alma/storage/file_based.py +56 -20
  67. alma/storage/pinecone.py +1080 -0
  68. alma/storage/postgresql.py +1452 -0
  69. alma/storage/qdrant.py +1306 -0
  70. alma/storage/sqlite_local.py +376 -31
  71. alma/types.py +62 -14
  72. alma_memory-0.5.0.dist-info/METADATA +905 -0
  73. alma_memory-0.5.0.dist-info/RECORD +76 -0
  74. {alma_memory-0.3.0.dist-info → alma_memory-0.5.0.dist-info}/WHEEL +1 -1
  75. alma_memory-0.3.0.dist-info/METADATA +0 -438
  76. alma_memory-0.3.0.dist-info/RECORD +0 -46
  77. {alma_memory-0.3.0.dist-info → alma_memory-0.5.0.dist-info}/top_level.txt +0 -0
@@ -6,28 +6,30 @@ This is the recommended backend for local development and testing.
6
6
  """
7
7
 
8
8
  import json
9
- import sqlite3
10
9
  import logging
11
- import numpy as np
12
- from pathlib import Path
13
- from datetime import datetime, timezone
14
- from typing import Optional, List, Dict, Any, Tuple
10
+ import sqlite3
15
11
  from contextlib import contextmanager
12
+ from datetime import datetime, timezone
13
+ from pathlib import Path
14
+ from typing import Any, Dict, List, Optional, Tuple
15
+
16
+ import numpy as np
16
17
 
18
+ from alma.storage.base import StorageBackend
17
19
  from alma.types import (
20
+ AntiPattern,
21
+ DomainKnowledge,
18
22
  Heuristic,
19
23
  Outcome,
20
24
  UserPreference,
21
- DomainKnowledge,
22
- AntiPattern,
23
25
  )
24
- from alma.storage.base import StorageBackend
25
26
 
26
27
  logger = logging.getLogger(__name__)
27
28
 
28
29
  # Try to import FAISS, fall back to numpy-based search if not available
29
30
  try:
30
31
  import faiss
32
+
31
33
  FAISS_AVAILABLE = True
32
34
  except ImportError:
33
35
  FAISS_AVAILABLE = False
@@ -72,6 +74,7 @@ class SQLiteStorage(StorageBackend):
72
74
  # Initialize FAISS indices (one per memory type)
73
75
  self._indices: Dict[str, Any] = {}
74
76
  self._id_maps: Dict[str, List[str]] = {} # memory_type -> [memory_ids]
77
+ self._index_dirty: Dict[str, bool] = {} # Track which indexes need rebuilding
75
78
  self._load_faiss_indices()
76
79
 
77
80
  @classmethod
@@ -149,6 +152,10 @@ class SQLiteStorage(StorageBackend):
149
152
  "CREATE INDEX IF NOT EXISTS idx_outcomes_task_type "
150
153
  "ON outcomes(project_id, agent, task_type)"
151
154
  )
155
+ cursor.execute(
156
+ "CREATE INDEX IF NOT EXISTS idx_outcomes_timestamp "
157
+ "ON outcomes(project_id, timestamp)"
158
+ )
152
159
 
153
160
  # User preferences table
154
161
  cursor.execute("""
@@ -222,9 +229,19 @@ class SQLiteStorage(StorageBackend):
222
229
  "ON embeddings(memory_type)"
223
230
  )
224
231
 
225
- def _load_faiss_indices(self):
226
- """Load or create FAISS indices for each memory type."""
227
- memory_types = ["heuristics", "outcomes", "domain_knowledge", "anti_patterns"]
232
+ def _load_faiss_indices(self, memory_types: Optional[List[str]] = None):
233
+ """Load or create FAISS indices for specified memory types.
234
+
235
+ Args:
236
+ memory_types: List of memory types to load. If None, loads all types.
237
+ """
238
+ if memory_types is None:
239
+ memory_types = [
240
+ "heuristics",
241
+ "outcomes",
242
+ "domain_knowledge",
243
+ "anti_patterns",
244
+ ]
228
245
 
229
246
  for memory_type in memory_types:
230
247
  if FAISS_AVAILABLE:
@@ -235,6 +252,7 @@ class SQLiteStorage(StorageBackend):
235
252
  self._indices[memory_type] = []
236
253
 
237
254
  self._id_maps[memory_type] = []
255
+ self._index_dirty[memory_type] = False # Mark as fresh after rebuild
238
256
 
239
257
  # Load existing embeddings
240
258
  with self._get_connection() as conn:
@@ -257,6 +275,19 @@ class SQLiteStorage(StorageBackend):
257
275
  else:
258
276
  self._indices[memory_type].append(embedding)
259
277
 
278
+ def _ensure_index_fresh(self, memory_type: str) -> None:
279
+ """Rebuild index for a memory type if it has been marked dirty.
280
+
281
+ This implements lazy rebuilding - indexes are only rebuilt when
282
+ actually needed for search, not immediately on every delete.
283
+
284
+ Args:
285
+ memory_type: The type of memory index to check/rebuild.
286
+ """
287
+ if self._index_dirty.get(memory_type, False):
288
+ logger.debug(f"Rebuilding dirty index for {memory_type}")
289
+ self._load_faiss_indices([memory_type])
290
+
260
291
  def _add_to_index(
261
292
  self,
262
293
  memory_type: str,
@@ -296,6 +327,9 @@ class SQLiteStorage(StorageBackend):
296
327
  top_k: int,
297
328
  ) -> List[Tuple[str, float]]:
298
329
  """Search FAISS index for similar embeddings."""
330
+ # Ensure index is up-to-date before searching (lazy rebuild)
331
+ self._ensure_index_fresh(memory_type)
332
+
299
333
  if not self._id_maps[memory_type]:
300
334
  return []
301
335
 
@@ -304,10 +338,12 @@ class SQLiteStorage(StorageBackend):
304
338
  if FAISS_AVAILABLE:
305
339
  # Normalize for cosine similarity (IndexFlatIP)
306
340
  faiss.normalize_L2(query)
307
- scores, indices = self._indices[memory_type].search(query, min(top_k, len(self._id_maps[memory_type])))
341
+ scores, indices = self._indices[memory_type].search(
342
+ query, min(top_k, len(self._id_maps[memory_type]))
343
+ )
308
344
 
309
345
  results = []
310
- for score, idx in zip(scores[0], indices[0]):
346
+ for score, idx in zip(scores[0], indices[0], strict=False):
311
347
  if idx >= 0 and idx < len(self._id_maps[memory_type]):
312
348
  results.append((self._id_maps[memory_type][idx], float(score)))
313
349
  return results
@@ -354,7 +390,11 @@ class SQLiteStorage(StorageBackend):
354
390
  heuristic.confidence,
355
391
  heuristic.occurrence_count,
356
392
  heuristic.success_count,
357
- heuristic.last_validated.isoformat() if heuristic.last_validated else None,
393
+ (
394
+ heuristic.last_validated.isoformat()
395
+ if heuristic.last_validated
396
+ else None
397
+ ),
358
398
  heuristic.created_at.isoformat() if heuristic.created_at else None,
359
399
  json.dumps(heuristic.metadata) if heuristic.metadata else None,
360
400
  ),
@@ -439,7 +479,11 @@ class SQLiteStorage(StorageBackend):
439
479
  knowledge.fact,
440
480
  knowledge.source,
441
481
  knowledge.confidence,
442
- knowledge.last_verified.isoformat() if knowledge.last_verified else None,
482
+ (
483
+ knowledge.last_verified.isoformat()
484
+ if knowledge.last_verified
485
+ else None
486
+ ),
443
487
  json.dumps(knowledge.metadata) if knowledge.metadata else None,
444
488
  ),
445
489
  )
@@ -468,9 +512,21 @@ class SQLiteStorage(StorageBackend):
468
512
  anti_pattern.why_bad,
469
513
  anti_pattern.better_alternative,
470
514
  anti_pattern.occurrence_count,
471
- anti_pattern.last_seen.isoformat() if anti_pattern.last_seen else None,
472
- anti_pattern.created_at.isoformat() if anti_pattern.created_at else None,
473
- json.dumps(anti_pattern.metadata) if anti_pattern.metadata else None,
515
+ (
516
+ anti_pattern.last_seen.isoformat()
517
+ if anti_pattern.last_seen
518
+ else None
519
+ ),
520
+ (
521
+ anti_pattern.created_at.isoformat()
522
+ if anti_pattern.created_at
523
+ else None
524
+ ),
525
+ (
526
+ json.dumps(anti_pattern.metadata)
527
+ if anti_pattern.metadata
528
+ else None
529
+ ),
474
530
  ),
475
531
  )
476
532
 
@@ -479,6 +535,125 @@ class SQLiteStorage(StorageBackend):
479
535
  logger.debug(f"Saved anti-pattern: {anti_pattern.id}")
480
536
  return anti_pattern.id
481
537
 
538
+ # ==================== BATCH WRITE OPERATIONS ====================
539
+
540
+ def save_heuristics(self, heuristics: List[Heuristic]) -> List[str]:
541
+ """Save multiple heuristics in a batch using executemany."""
542
+ if not heuristics:
543
+ return []
544
+
545
+ with self._get_connection() as conn:
546
+ cursor = conn.cursor()
547
+ cursor.executemany(
548
+ """
549
+ INSERT OR REPLACE INTO heuristics
550
+ (id, agent, project_id, condition, strategy, confidence,
551
+ occurrence_count, success_count, last_validated, created_at, metadata)
552
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
553
+ """,
554
+ [
555
+ (
556
+ h.id,
557
+ h.agent,
558
+ h.project_id,
559
+ h.condition,
560
+ h.strategy,
561
+ h.confidence,
562
+ h.occurrence_count,
563
+ h.success_count,
564
+ h.last_validated.isoformat() if h.last_validated else None,
565
+ h.created_at.isoformat() if h.created_at else None,
566
+ json.dumps(h.metadata) if h.metadata else None,
567
+ )
568
+ for h in heuristics
569
+ ],
570
+ )
571
+
572
+ # Add embeddings to index
573
+ for h in heuristics:
574
+ self._add_to_index("heuristics", h.id, h.embedding)
575
+
576
+ logger.debug(f"Batch saved {len(heuristics)} heuristics")
577
+ return [h.id for h in heuristics]
578
+
579
+ def save_outcomes(self, outcomes: List[Outcome]) -> List[str]:
580
+ """Save multiple outcomes in a batch using executemany."""
581
+ if not outcomes:
582
+ return []
583
+
584
+ with self._get_connection() as conn:
585
+ cursor = conn.cursor()
586
+ cursor.executemany(
587
+ """
588
+ INSERT OR REPLACE INTO outcomes
589
+ (id, agent, project_id, task_type, task_description, success,
590
+ strategy_used, duration_ms, error_message, user_feedback, timestamp, metadata)
591
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
592
+ """,
593
+ [
594
+ (
595
+ o.id,
596
+ o.agent,
597
+ o.project_id,
598
+ o.task_type,
599
+ o.task_description,
600
+ 1 if o.success else 0,
601
+ o.strategy_used,
602
+ o.duration_ms,
603
+ o.error_message,
604
+ o.user_feedback,
605
+ o.timestamp.isoformat() if o.timestamp else None,
606
+ json.dumps(o.metadata) if o.metadata else None,
607
+ )
608
+ for o in outcomes
609
+ ],
610
+ )
611
+
612
+ # Add embeddings to index
613
+ for o in outcomes:
614
+ self._add_to_index("outcomes", o.id, o.embedding)
615
+
616
+ logger.debug(f"Batch saved {len(outcomes)} outcomes")
617
+ return [o.id for o in outcomes]
618
+
619
+ def save_domain_knowledge_batch(
620
+ self, knowledge_items: List[DomainKnowledge]
621
+ ) -> List[str]:
622
+ """Save multiple domain knowledge items in a batch using executemany."""
623
+ if not knowledge_items:
624
+ return []
625
+
626
+ with self._get_connection() as conn:
627
+ cursor = conn.cursor()
628
+ cursor.executemany(
629
+ """
630
+ INSERT OR REPLACE INTO domain_knowledge
631
+ (id, agent, project_id, domain, fact, source, confidence, last_verified, metadata)
632
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
633
+ """,
634
+ [
635
+ (
636
+ k.id,
637
+ k.agent,
638
+ k.project_id,
639
+ k.domain,
640
+ k.fact,
641
+ k.source,
642
+ k.confidence,
643
+ k.last_verified.isoformat() if k.last_verified else None,
644
+ json.dumps(k.metadata) if k.metadata else None,
645
+ )
646
+ for k in knowledge_items
647
+ ],
648
+ )
649
+
650
+ # Add embeddings to index
651
+ for k in knowledge_items:
652
+ self._add_to_index("domain_knowledge", k.id, k.embedding)
653
+
654
+ logger.debug(f"Batch saved {len(knowledge_items)} domain knowledge items")
655
+ return [k.id for k in knowledge_items]
656
+
482
657
  # ==================== READ OPERATIONS ====================
483
658
 
484
659
  def get_heuristics(
@@ -596,7 +771,9 @@ class SQLiteStorage(StorageBackend):
596
771
  """Get domain knowledge with optional vector search."""
597
772
  candidate_ids = None
598
773
  if embedding:
599
- search_results = self._search_index("domain_knowledge", embedding, top_k * 2)
774
+ search_results = self._search_index(
775
+ "domain_knowledge", embedding, top_k * 2
776
+ )
600
777
  candidate_ids = [id for id, _ in search_results]
601
778
 
602
779
  with self._get_connection() as conn:
@@ -662,6 +839,175 @@ class SQLiteStorage(StorageBackend):
662
839
 
663
840
  return [self._row_to_anti_pattern(row) for row in rows]
664
841
 
842
+ # ==================== MULTI-AGENT MEMORY SHARING ====================
843
+
844
+ def get_heuristics_for_agents(
845
+ self,
846
+ project_id: str,
847
+ agents: List[str],
848
+ embedding: Optional[List[float]] = None,
849
+ top_k: int = 5,
850
+ min_confidence: float = 0.0,
851
+ ) -> List[Heuristic]:
852
+ """Get heuristics from multiple agents using optimized IN query."""
853
+ if not agents:
854
+ return []
855
+
856
+ candidate_ids = None
857
+ if embedding:
858
+ search_results = self._search_index(
859
+ "heuristics", embedding, top_k * 2 * len(agents)
860
+ )
861
+ candidate_ids = [id for id, _ in search_results]
862
+
863
+ with self._get_connection() as conn:
864
+ cursor = conn.cursor()
865
+
866
+ placeholders = ",".join("?" * len(agents))
867
+ query = f"SELECT * FROM heuristics WHERE project_id = ? AND confidence >= ? AND agent IN ({placeholders})"
868
+ params: List[Any] = [project_id, min_confidence] + list(agents)
869
+
870
+ if candidate_ids is not None:
871
+ id_placeholders = ",".join("?" * len(candidate_ids))
872
+ query += f" AND id IN ({id_placeholders})"
873
+ params.extend(candidate_ids)
874
+
875
+ query += " ORDER BY confidence DESC LIMIT ?"
876
+ params.append(top_k * len(agents))
877
+
878
+ cursor.execute(query, params)
879
+ rows = cursor.fetchall()
880
+
881
+ return [self._row_to_heuristic(row) for row in rows]
882
+
883
+ def get_outcomes_for_agents(
884
+ self,
885
+ project_id: str,
886
+ agents: List[str],
887
+ task_type: Optional[str] = None,
888
+ embedding: Optional[List[float]] = None,
889
+ top_k: int = 5,
890
+ success_only: bool = False,
891
+ ) -> List[Outcome]:
892
+ """Get outcomes from multiple agents using optimized IN query."""
893
+ if not agents:
894
+ return []
895
+
896
+ candidate_ids = None
897
+ if embedding:
898
+ search_results = self._search_index(
899
+ "outcomes", embedding, top_k * 2 * len(agents)
900
+ )
901
+ candidate_ids = [id for id, _ in search_results]
902
+
903
+ with self._get_connection() as conn:
904
+ cursor = conn.cursor()
905
+
906
+ placeholders = ",".join("?" * len(agents))
907
+ query = f"SELECT * FROM outcomes WHERE project_id = ? AND agent IN ({placeholders})"
908
+ params: List[Any] = [project_id] + list(agents)
909
+
910
+ if task_type:
911
+ query += " AND task_type = ?"
912
+ params.append(task_type)
913
+
914
+ if success_only:
915
+ query += " AND success = 1"
916
+
917
+ if candidate_ids is not None:
918
+ id_placeholders = ",".join("?" * len(candidate_ids))
919
+ query += f" AND id IN ({id_placeholders})"
920
+ params.extend(candidate_ids)
921
+
922
+ query += " ORDER BY timestamp DESC LIMIT ?"
923
+ params.append(top_k * len(agents))
924
+
925
+ cursor.execute(query, params)
926
+ rows = cursor.fetchall()
927
+
928
+ return [self._row_to_outcome(row) for row in rows]
929
+
930
+ def get_domain_knowledge_for_agents(
931
+ self,
932
+ project_id: str,
933
+ agents: List[str],
934
+ domain: Optional[str] = None,
935
+ embedding: Optional[List[float]] = None,
936
+ top_k: int = 5,
937
+ ) -> List[DomainKnowledge]:
938
+ """Get domain knowledge from multiple agents using optimized IN query."""
939
+ if not agents:
940
+ return []
941
+
942
+ candidate_ids = None
943
+ if embedding:
944
+ search_results = self._search_index(
945
+ "domain_knowledge", embedding, top_k * 2 * len(agents)
946
+ )
947
+ candidate_ids = [id for id, _ in search_results]
948
+
949
+ with self._get_connection() as conn:
950
+ cursor = conn.cursor()
951
+
952
+ placeholders = ",".join("?" * len(agents))
953
+ query = f"SELECT * FROM domain_knowledge WHERE project_id = ? AND agent IN ({placeholders})"
954
+ params: List[Any] = [project_id] + list(agents)
955
+
956
+ if domain:
957
+ query += " AND domain = ?"
958
+ params.append(domain)
959
+
960
+ if candidate_ids is not None:
961
+ id_placeholders = ",".join("?" * len(candidate_ids))
962
+ query += f" AND id IN ({id_placeholders})"
963
+ params.extend(candidate_ids)
964
+
965
+ query += " ORDER BY confidence DESC LIMIT ?"
966
+ params.append(top_k * len(agents))
967
+
968
+ cursor.execute(query, params)
969
+ rows = cursor.fetchall()
970
+
971
+ return [self._row_to_domain_knowledge(row) for row in rows]
972
+
973
+ def get_anti_patterns_for_agents(
974
+ self,
975
+ project_id: str,
976
+ agents: List[str],
977
+ embedding: Optional[List[float]] = None,
978
+ top_k: int = 5,
979
+ ) -> List[AntiPattern]:
980
+ """Get anti-patterns from multiple agents using optimized IN query."""
981
+ if not agents:
982
+ return []
983
+
984
+ candidate_ids = None
985
+ if embedding:
986
+ search_results = self._search_index(
987
+ "anti_patterns", embedding, top_k * 2 * len(agents)
988
+ )
989
+ candidate_ids = [id for id, _ in search_results]
990
+
991
+ with self._get_connection() as conn:
992
+ cursor = conn.cursor()
993
+
994
+ placeholders = ",".join("?" * len(agents))
995
+ query = f"SELECT * FROM anti_patterns WHERE project_id = ? AND agent IN ({placeholders})"
996
+ params: List[Any] = [project_id] + list(agents)
997
+
998
+ if candidate_ids is not None:
999
+ id_placeholders = ",".join("?" * len(candidate_ids))
1000
+ query += f" AND id IN ({id_placeholders})"
1001
+ params.extend(candidate_ids)
1002
+
1003
+ query += " ORDER BY occurrence_count DESC LIMIT ?"
1004
+ params.append(top_k * len(agents))
1005
+
1006
+ cursor.execute(query, params)
1007
+ rows = cursor.fetchall()
1008
+
1009
+ return [self._row_to_anti_pattern(row) for row in rows]
1010
+
665
1011
  # ==================== UPDATE OPERATIONS ====================
666
1012
 
667
1013
  def update_heuristic(
@@ -944,7 +1290,7 @@ class SQLiteStorage(StorageBackend):
944
1290
  with self._get_connection() as conn:
945
1291
  # Also remove from embedding index
946
1292
  conn.execute(
947
- "DELETE FROM embeddings WHERE memory_type = 'heuristic' AND memory_id = ?",
1293
+ "DELETE FROM embeddings WHERE memory_type = 'heuristics' AND memory_id = ?",
948
1294
  (heuristic_id,),
949
1295
  )
950
1296
  cursor = conn.execute(
@@ -952,9 +1298,8 @@ class SQLiteStorage(StorageBackend):
952
1298
  (heuristic_id,),
953
1299
  )
954
1300
  if cursor.rowcount > 0:
955
- # Rebuild index if we had one
956
- if "heuristic" in self._indices:
957
- self._load_faiss_indices()
1301
+ # Mark index as dirty for lazy rebuild on next search
1302
+ self._index_dirty["heuristics"] = True
958
1303
  return True
959
1304
  return False
960
1305
 
@@ -963,7 +1308,7 @@ class SQLiteStorage(StorageBackend):
963
1308
  with self._get_connection() as conn:
964
1309
  # Also remove from embedding index
965
1310
  conn.execute(
966
- "DELETE FROM embeddings WHERE memory_type = 'outcome' AND memory_id = ?",
1311
+ "DELETE FROM embeddings WHERE memory_type = 'outcomes' AND memory_id = ?",
967
1312
  (outcome_id,),
968
1313
  )
969
1314
  cursor = conn.execute(
@@ -971,8 +1316,8 @@ class SQLiteStorage(StorageBackend):
971
1316
  (outcome_id,),
972
1317
  )
973
1318
  if cursor.rowcount > 0:
974
- if "outcome" in self._indices:
975
- self._load_faiss_indices()
1319
+ # Mark index as dirty for lazy rebuild on next search
1320
+ self._index_dirty["outcomes"] = True
976
1321
  return True
977
1322
  return False
978
1323
 
@@ -989,8 +1334,8 @@ class SQLiteStorage(StorageBackend):
989
1334
  (knowledge_id,),
990
1335
  )
991
1336
  if cursor.rowcount > 0:
992
- if "domain_knowledge" in self._indices:
993
- self._load_faiss_indices()
1337
+ # Mark index as dirty for lazy rebuild on next search
1338
+ self._index_dirty["domain_knowledge"] = True
994
1339
  return True
995
1340
  return False
996
1341
 
@@ -999,7 +1344,7 @@ class SQLiteStorage(StorageBackend):
999
1344
  with self._get_connection() as conn:
1000
1345
  # Also remove from embedding index
1001
1346
  conn.execute(
1002
- "DELETE FROM embeddings WHERE memory_type = 'anti_pattern' AND memory_id = ?",
1347
+ "DELETE FROM embeddings WHERE memory_type = 'anti_patterns' AND memory_id = ?",
1003
1348
  (anti_pattern_id,),
1004
1349
  )
1005
1350
  cursor = conn.execute(
@@ -1007,7 +1352,7 @@ class SQLiteStorage(StorageBackend):
1007
1352
  (anti_pattern_id,),
1008
1353
  )
1009
1354
  if cursor.rowcount > 0:
1010
- if "anti_pattern" in self._indices:
1011
- self._load_faiss_indices()
1355
+ # Mark index as dirty for lazy rebuild on next search
1356
+ self._index_dirty["anti_patterns"] = True
1012
1357
  return True
1013
1358
  return False