alma-memory 0.4.0__py3-none-any.whl → 0.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (77) hide show
  1. alma/__init__.py +88 -44
  2. alma/confidence/__init__.py +1 -1
  3. alma/confidence/engine.py +92 -58
  4. alma/confidence/types.py +34 -14
  5. alma/config/loader.py +3 -2
  6. alma/consolidation/__init__.py +23 -0
  7. alma/consolidation/engine.py +678 -0
  8. alma/consolidation/prompts.py +84 -0
  9. alma/core.py +15 -15
  10. alma/domains/__init__.py +6 -6
  11. alma/domains/factory.py +12 -9
  12. alma/domains/schemas.py +17 -3
  13. alma/domains/types.py +8 -4
  14. alma/events/__init__.py +75 -0
  15. alma/events/emitter.py +284 -0
  16. alma/events/storage_mixin.py +246 -0
  17. alma/events/types.py +126 -0
  18. alma/events/webhook.py +425 -0
  19. alma/exceptions.py +49 -0
  20. alma/extraction/__init__.py +31 -0
  21. alma/extraction/auto_learner.py +264 -0
  22. alma/extraction/extractor.py +420 -0
  23. alma/graph/__init__.py +81 -0
  24. alma/graph/backends/__init__.py +18 -0
  25. alma/graph/backends/memory.py +236 -0
  26. alma/graph/backends/neo4j.py +417 -0
  27. alma/graph/base.py +159 -0
  28. alma/graph/extraction.py +198 -0
  29. alma/graph/store.py +860 -0
  30. alma/harness/__init__.py +4 -4
  31. alma/harness/base.py +18 -9
  32. alma/harness/domains.py +27 -11
  33. alma/initializer/__init__.py +1 -1
  34. alma/initializer/initializer.py +51 -43
  35. alma/initializer/types.py +25 -17
  36. alma/integration/__init__.py +9 -9
  37. alma/integration/claude_agents.py +10 -10
  38. alma/integration/helena.py +32 -22
  39. alma/integration/victor.py +57 -33
  40. alma/learning/__init__.py +27 -27
  41. alma/learning/forgetting.py +198 -148
  42. alma/learning/heuristic_extractor.py +40 -24
  43. alma/learning/protocols.py +62 -14
  44. alma/learning/validation.py +7 -2
  45. alma/mcp/__init__.py +4 -4
  46. alma/mcp/__main__.py +2 -1
  47. alma/mcp/resources.py +17 -16
  48. alma/mcp/server.py +102 -44
  49. alma/mcp/tools.py +174 -37
  50. alma/progress/__init__.py +3 -3
  51. alma/progress/tracker.py +26 -20
  52. alma/progress/types.py +8 -12
  53. alma/py.typed +0 -0
  54. alma/retrieval/__init__.py +11 -11
  55. alma/retrieval/cache.py +20 -21
  56. alma/retrieval/embeddings.py +4 -4
  57. alma/retrieval/engine.py +114 -35
  58. alma/retrieval/scoring.py +73 -63
  59. alma/session/__init__.py +2 -2
  60. alma/session/manager.py +5 -5
  61. alma/session/types.py +5 -4
  62. alma/storage/__init__.py +41 -0
  63. alma/storage/azure_cosmos.py +101 -31
  64. alma/storage/base.py +157 -4
  65. alma/storage/chroma.py +1443 -0
  66. alma/storage/file_based.py +56 -20
  67. alma/storage/pinecone.py +1080 -0
  68. alma/storage/postgresql.py +1452 -0
  69. alma/storage/qdrant.py +1306 -0
  70. alma/storage/sqlite_local.py +376 -31
  71. alma/types.py +62 -14
  72. alma_memory-0.5.0.dist-info/METADATA +905 -0
  73. alma_memory-0.5.0.dist-info/RECORD +76 -0
  74. {alma_memory-0.4.0.dist-info → alma_memory-0.5.0.dist-info}/WHEEL +1 -1
  75. alma_memory-0.4.0.dist-info/METADATA +0 -488
  76. alma_memory-0.4.0.dist-info/RECORD +0 -52
  77. {alma_memory-0.4.0.dist-info → alma_memory-0.5.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,84 @@
1
+ """
2
+ ALMA Consolidation Prompts.
3
+
4
+ LLM prompts for intelligently merging similar memories.
5
+ """
6
+
7
+ # Prompt for merging multiple similar heuristics into one
8
+ MERGE_HEURISTICS_PROMPT = """You are a memory consolidation agent. Given these similar heuristics that have been identified as near-duplicates based on semantic similarity, create a single consolidated heuristic that captures the essence of all.
9
+
10
+ Similar Heuristics:
11
+ {heuristics}
12
+
13
+ Create a consolidated heuristic that:
14
+ 1. Generalizes the condition to cover all cases
15
+ 2. Combines the strategies into a comprehensive approach
16
+ 3. Preserves any unique insights from individual heuristics
17
+
18
+ Output a JSON object with exactly these fields:
19
+ {{
20
+ "condition": "The generalized condition that triggers this heuristic",
21
+ "strategy": "The merged strategy combining all approaches",
22
+ "confidence": <average confidence as a float between 0 and 1>
23
+ }}
24
+
25
+ Only output the JSON object, no other text."""
26
+
27
+ # Prompt for merging similar domain knowledge
28
+ MERGE_DOMAIN_KNOWLEDGE_PROMPT = """You are a memory consolidation agent. Given these similar domain knowledge facts that have been identified as near-duplicates, create a single consolidated fact that captures all the information.
29
+
30
+ Similar Domain Knowledge:
31
+ {knowledge_items}
32
+
33
+ Create a consolidated fact that:
34
+ 1. Combines all unique information
35
+ 2. Removes redundancy
36
+ 3. Maintains accuracy
37
+
38
+ Output a JSON object with exactly these fields:
39
+ {{
40
+ "fact": "The consolidated fact combining all information",
41
+ "confidence": <average confidence as a float between 0 and 1>
42
+ }}
43
+
44
+ Only output the JSON object, no other text."""
45
+
46
+ # Prompt for merging anti-patterns
47
+ MERGE_ANTI_PATTERNS_PROMPT = """You are a memory consolidation agent. Given these similar anti-patterns that have been identified as near-duplicates, create a single consolidated anti-pattern.
48
+
49
+ Similar Anti-Patterns:
50
+ {anti_patterns}
51
+
52
+ Create a consolidated anti-pattern that:
53
+ 1. Generalizes the pattern description
54
+ 2. Combines all reasons why it's bad
55
+ 3. Provides a comprehensive alternative
56
+
57
+ Output a JSON object with exactly these fields:
58
+ {{
59
+ "pattern": "The generalized pattern to avoid",
60
+ "why_bad": "Combined explanation of why this pattern is problematic",
61
+ "better_alternative": "The recommended alternative approach"
62
+ }}
63
+
64
+ Only output the JSON object, no other text."""
65
+
66
+ # Prompt for merging outcomes (typically used for summarization rather than true merge)
67
+ MERGE_OUTCOMES_PROMPT = """You are a memory consolidation agent. Given these similar task outcomes, create a summary that captures the key learnings.
68
+
69
+ Similar Outcomes:
70
+ {outcomes}
71
+
72
+ Create a summary that:
73
+ 1. Identifies the common task type
74
+ 2. Notes the overall success/failure pattern
75
+ 3. Highlights effective strategies
76
+
77
+ Output a JSON object with exactly these fields:
78
+ {{
79
+ "task_type": "The common task type",
80
+ "summary": "Summary of the outcomes and learnings",
81
+ "recommended_strategy": "The most effective strategy based on the outcomes"
82
+ }}
83
+
84
+ Only output the JSON object, no other text."""
alma/core.py CHANGED
@@ -2,24 +2,19 @@
2
2
  ALMA Core - Main interface for the Agent Learning Memory Architecture.
3
3
  """
4
4
 
5
- from typing import Optional, Dict, Any, List
6
- from pathlib import Path
7
- import yaml
8
5
  import logging
6
+ from typing import Any, Dict, Optional
9
7
 
8
+ from alma.config.loader import ConfigLoader
9
+ from alma.learning.protocols import LearningProtocol
10
+ from alma.retrieval.engine import RetrievalEngine
11
+ from alma.storage.base import StorageBackend
10
12
  from alma.types import (
11
- MemorySlice,
13
+ DomainKnowledge,
12
14
  MemoryScope,
13
- Heuristic,
14
- Outcome,
15
+ MemorySlice,
15
16
  UserPreference,
16
- DomainKnowledge,
17
- AntiPattern,
18
17
  )
19
- from alma.storage.base import StorageBackend
20
- from alma.retrieval.engine import RetrievalEngine
21
- from alma.learning.protocols import LearningProtocol
22
- from alma.config.loader import ConfigLoader
23
18
 
24
19
  logger = logging.getLogger(__name__)
25
20
 
@@ -114,12 +109,19 @@ class ALMA:
114
109
 
115
110
  if storage_type == "azure":
116
111
  from alma.storage.azure_cosmos import AzureCosmosStorage
112
+
117
113
  return AzureCosmosStorage.from_config(config)
114
+ elif storage_type == "postgres":
115
+ from alma.storage.postgresql import PostgreSQLStorage
116
+
117
+ return PostgreSQLStorage.from_config(config)
118
118
  elif storage_type == "sqlite":
119
119
  from alma.storage.sqlite_local import SQLiteStorage
120
+
120
121
  return SQLiteStorage.from_config(config)
121
122
  else:
122
123
  from alma.storage.file_based import FileBasedStorage
124
+
123
125
  return FileBasedStorage.from_config(config)
124
126
 
125
127
  def retrieve(
@@ -255,9 +257,7 @@ class ALMA:
255
257
  # Check scope
256
258
  scope = self.scopes.get(agent)
257
259
  if scope and not scope.is_allowed(domain):
258
- logger.warning(
259
- f"Agent '{agent}' not allowed to learn in domain '{domain}'"
260
- )
260
+ logger.warning(f"Agent '{agent}' not allowed to learn in domain '{domain}'")
261
261
  return None
262
262
 
263
263
  result = self.learning.add_domain_knowledge(
alma/domains/__init__.py CHANGED
@@ -5,17 +5,17 @@ Provides domain-agnostic memory schemas and factory pattern
5
5
  for creating domain-specific ALMA instances.
6
6
  """
7
7
 
8
- from alma.domains.types import (
9
- DomainSchema,
10
- EntityType,
11
- RelationshipType,
12
- )
13
8
  from alma.domains.factory import DomainMemoryFactory
14
9
  from alma.domains.schemas import (
15
10
  get_coding_schema,
11
+ get_general_schema,
16
12
  get_research_schema,
17
13
  get_sales_schema,
18
- get_general_schema,
14
+ )
15
+ from alma.domains.types import (
16
+ DomainSchema,
17
+ EntityType,
18
+ RelationshipType,
19
19
  )
20
20
 
21
21
  __all__ = [
alma/domains/factory.py CHANGED
@@ -4,18 +4,18 @@ Domain Memory Factory.
4
4
  Factory pattern for creating domain-specific ALMA instances.
5
5
  """
6
6
 
7
- from typing import Dict, Any, Optional, List, Type
8
7
  import logging
8
+ from typing import Any, Dict, List, Optional
9
9
 
10
- from alma.domains.types import DomainSchema, EntityType, RelationshipType
11
10
  from alma.domains.schemas import (
12
11
  get_coding_schema,
12
+ get_content_creation_schema,
13
+ get_customer_support_schema,
14
+ get_general_schema,
13
15
  get_research_schema,
14
16
  get_sales_schema,
15
- get_general_schema,
16
- get_customer_support_schema,
17
- get_content_creation_schema,
18
17
  )
18
+ from alma.domains.types import DomainSchema
19
19
 
20
20
  logger = logging.getLogger(__name__)
21
21
 
@@ -124,7 +124,9 @@ class DomainMemoryFactory:
124
124
  description=config.get("description", f"Custom schema: {name}"),
125
125
  learning_categories=config.get("learning_categories", []),
126
126
  excluded_categories=config.get("excluded_categories", []),
127
- min_occurrences_for_heuristic=config.get("min_occurrences_for_heuristic", 3),
127
+ min_occurrences_for_heuristic=config.get(
128
+ "min_occurrences_for_heuristic", 3
129
+ ),
128
130
  confidence_decay_days=config.get("confidence_decay_days", 30.0),
129
131
  )
130
132
 
@@ -200,16 +202,17 @@ class DomainMemoryFactory:
200
202
  - Initialize domain-specific entity tracking
201
203
  """
202
204
  # Import here to avoid circular dependency
203
- from alma.storage.file_based import FileBasedStorage
204
- from alma.retrieval import RetrievalEngine
205
+ from alma import ALMA
205
206
  from alma.learning import LearningProtocol
207
+ from alma.retrieval import RetrievalEngine
208
+ from alma.storage.file_based import FileBasedStorage
206
209
  from alma.types import MemoryScope
207
- from alma import ALMA
208
210
 
209
211
  # Create storage if not provided
210
212
  if storage is None:
211
213
  import tempfile
212
214
  from pathlib import Path
215
+
213
216
  storage_dir = Path(tempfile.mkdtemp()) / ".alma" / project_id
214
217
  storage = FileBasedStorage(storage_dir)
215
218
 
alma/domains/schemas.py CHANGED
@@ -4,7 +4,7 @@ Pre-built Domain Schemas.
4
4
  Standard domain schemas for common use cases.
5
5
  """
6
6
 
7
- from alma.domains.types import DomainSchema, EntityType, RelationshipType
7
+ from alma.domains.types import DomainSchema
8
8
 
9
9
 
10
10
  def get_coding_schema() -> DomainSchema:
@@ -110,12 +110,26 @@ def get_research_schema() -> DomainSchema:
110
110
  schema.add_entity_type(
111
111
  name="paper",
112
112
  description="An academic paper or article",
113
- attributes=["title", "authors", "year", "citations", "abstract", "venue", "doi"],
113
+ attributes=[
114
+ "title",
115
+ "authors",
116
+ "year",
117
+ "citations",
118
+ "abstract",
119
+ "venue",
120
+ "doi",
121
+ ],
114
122
  )
115
123
  schema.add_entity_type(
116
124
  name="hypothesis",
117
125
  description="A research hypothesis",
118
- attributes=["statement", "confidence", "evidence_for", "evidence_against", "status"],
126
+ attributes=[
127
+ "statement",
128
+ "confidence",
129
+ "evidence_for",
130
+ "evidence_against",
131
+ "status",
132
+ ],
119
133
  )
120
134
  schema.add_entity_type(
121
135
  name="experiment",
alma/domains/types.py CHANGED
@@ -4,10 +4,10 @@ Domain Memory Types.
4
4
  Data models for domain-specific memory schemas.
5
5
  """
6
6
 
7
+ import uuid
7
8
  from dataclasses import dataclass, field
8
9
  from datetime import datetime, timezone
9
- from typing import List, Dict, Any, Optional
10
- import uuid
10
+ from typing import Any, Dict, List, Optional
11
11
 
12
12
 
13
13
  @dataclass
@@ -21,11 +21,15 @@ class EntityType:
21
21
 
22
22
  name: str # "feature", "test", "paper", "lead"
23
23
  description: str
24
- attributes: List[str] = field(default_factory=list) # ["status", "priority", "owner"]
24
+ attributes: List[str] = field(
25
+ default_factory=list
26
+ ) # ["status", "priority", "owner"]
25
27
 
26
28
  # Optional schema validation
27
29
  required_attributes: List[str] = field(default_factory=list)
28
- attribute_types: Dict[str, str] = field(default_factory=dict) # attr -> "str", "int", "bool"
30
+ attribute_types: Dict[str, str] = field(
31
+ default_factory=dict
32
+ ) # attr -> "str", "int", "bool"
29
33
 
30
34
  def validate_entity(self, entity: Dict[str, Any]) -> List[str]:
31
35
  """Validate an entity instance against this type."""
@@ -0,0 +1,75 @@
1
+ """
2
+ ALMA Event System.
3
+
4
+ Provides event emission and webhook delivery for memory operations.
5
+
6
+ The event system allows external systems to react to memory changes through:
7
+ 1. In-process callbacks (subscribe to event types)
8
+ 2. Webhooks (HTTP delivery with signatures)
9
+
10
+ Example - In-process subscription:
11
+ ```python
12
+ from alma.events import get_emitter, MemoryEventType
13
+
14
+ def on_memory_created(event):
15
+ print(f"Memory created: {event.memory_id}")
16
+
17
+ emitter = get_emitter()
18
+ emitter.subscribe(MemoryEventType.CREATED, on_memory_created)
19
+ ```
20
+
21
+ Example - Webhook delivery:
22
+ ```python
23
+ from alma.events import WebhookConfig, WebhookManager, get_emitter
24
+
25
+ manager = WebhookManager()
26
+ manager.add_webhook(WebhookConfig(
27
+ url="https://example.com/webhook",
28
+ events=[MemoryEventType.CREATED, MemoryEventType.UPDATED],
29
+ secret="my-webhook-secret"
30
+ ))
31
+ manager.start(get_emitter())
32
+ ```
33
+ """
34
+
35
+ from alma.events.emitter import (
36
+ EventEmitter,
37
+ get_emitter,
38
+ reset_emitter,
39
+ )
40
+ from alma.events.storage_mixin import (
41
+ EventAwareStorageMixin,
42
+ emit_on_save,
43
+ )
44
+ from alma.events.types import (
45
+ MemoryEvent,
46
+ MemoryEventType,
47
+ create_memory_event,
48
+ )
49
+ from alma.events.webhook import (
50
+ WebhookConfig,
51
+ WebhookDelivery,
52
+ WebhookDeliveryResult,
53
+ WebhookDeliveryStatus,
54
+ WebhookManager,
55
+ )
56
+
57
+ __all__ = [
58
+ # Types
59
+ "MemoryEvent",
60
+ "MemoryEventType",
61
+ "create_memory_event",
62
+ # Emitter
63
+ "EventEmitter",
64
+ "get_emitter",
65
+ "reset_emitter",
66
+ # Webhook
67
+ "WebhookConfig",
68
+ "WebhookDelivery",
69
+ "WebhookDeliveryResult",
70
+ "WebhookDeliveryStatus",
71
+ "WebhookManager",
72
+ # Storage Mixin
73
+ "EventAwareStorageMixin",
74
+ "emit_on_save",
75
+ ]
alma/events/emitter.py ADDED
@@ -0,0 +1,284 @@
1
+ """
2
+ ALMA Event Emitter.
3
+
4
+ Provides a pub/sub mechanism for memory events, allowing components
5
+ and external systems to subscribe to and receive notifications about
6
+ memory changes.
7
+ """
8
+
9
+ import asyncio
10
+ import logging
11
+ from concurrent.futures import ThreadPoolExecutor
12
+ from typing import Awaitable, Callable, Dict, List, Optional, Union
13
+
14
+ from alma.events.types import MemoryEvent, MemoryEventType
15
+
16
+ logger = logging.getLogger(__name__)
17
+
18
+ # Type aliases for callbacks
19
+ SyncCallback = Callable[[MemoryEvent], None]
20
+ AsyncCallback = Callable[[MemoryEvent], Awaitable[None]]
21
+ EventCallback = Union[SyncCallback, AsyncCallback]
22
+
23
+
24
+ class EventEmitter:
25
+ """
26
+ Event emitter for memory system events.
27
+
28
+ Supports both synchronous and asynchronous callbacks, with options
29
+ to subscribe to specific event types or all events.
30
+
31
+ The emitter is designed to be non-blocking - callbacks are executed
32
+ in a way that doesn't slow down the main storage operations.
33
+
34
+ Example:
35
+ ```python
36
+ emitter = EventEmitter()
37
+
38
+ def on_created(event: MemoryEvent):
39
+ print(f"Memory created: {event.memory_id}")
40
+
41
+ emitter.subscribe(MemoryEventType.CREATED, on_created)
42
+ emitter.emit(event)
43
+ ```
44
+ """
45
+
46
+ def __init__(self, max_workers: int = 4):
47
+ """
48
+ Initialize the event emitter.
49
+
50
+ Args:
51
+ max_workers: Maximum number of worker threads for async callback execution
52
+ """
53
+ self._subscribers: Dict[MemoryEventType, List[EventCallback]] = {}
54
+ self._global_subscribers: List[EventCallback] = []
55
+ self._executor = ThreadPoolExecutor(max_workers=max_workers)
56
+ self._enabled = True
57
+
58
+ def subscribe(
59
+ self,
60
+ event_type: MemoryEventType,
61
+ callback: EventCallback,
62
+ ) -> None:
63
+ """
64
+ Subscribe to a specific event type.
65
+
66
+ Args:
67
+ event_type: The type of event to subscribe to
68
+ callback: Function to call when event occurs (sync or async)
69
+ """
70
+ if event_type not in self._subscribers:
71
+ self._subscribers[event_type] = []
72
+
73
+ if callback not in self._subscribers[event_type]:
74
+ self._subscribers[event_type].append(callback)
75
+ callback_name = getattr(callback, "__name__", repr(callback))
76
+ logger.debug(f"Subscribed to {event_type.value}: {callback_name}")
77
+
78
+ def subscribe_all(self, callback: EventCallback) -> None:
79
+ """
80
+ Subscribe to all events.
81
+
82
+ Args:
83
+ callback: Function to call for any event
84
+ """
85
+ if callback not in self._global_subscribers:
86
+ self._global_subscribers.append(callback)
87
+ callback_name = getattr(callback, "__name__", repr(callback))
88
+ logger.debug(f"Subscribed to all events: {callback_name}")
89
+
90
+ def unsubscribe(
91
+ self,
92
+ event_type: MemoryEventType,
93
+ callback: EventCallback,
94
+ ) -> bool:
95
+ """
96
+ Unsubscribe from a specific event type.
97
+
98
+ Args:
99
+ event_type: The event type to unsubscribe from
100
+ callback: The callback to remove
101
+
102
+ Returns:
103
+ True if callback was removed, False if not found
104
+ """
105
+ if event_type in self._subscribers:
106
+ try:
107
+ self._subscribers[event_type].remove(callback)
108
+ callback_name = getattr(callback, "__name__", repr(callback))
109
+ logger.debug(f"Unsubscribed from {event_type.value}: {callback_name}")
110
+ return True
111
+ except ValueError:
112
+ pass
113
+ return False
114
+
115
+ def unsubscribe_all(self, callback: EventCallback) -> bool:
116
+ """
117
+ Unsubscribe a callback from all events.
118
+
119
+ Args:
120
+ callback: The callback to remove
121
+
122
+ Returns:
123
+ True if callback was removed, False if not found
124
+ """
125
+ try:
126
+ self._global_subscribers.remove(callback)
127
+ callback_name = getattr(callback, "__name__", repr(callback))
128
+ logger.debug(f"Unsubscribed from all events: {callback_name}")
129
+ return True
130
+ except ValueError:
131
+ return False
132
+
133
+ def has_subscribers(self, event_type: Optional[MemoryEventType] = None) -> bool:
134
+ """
135
+ Check if there are any subscribers.
136
+
137
+ Args:
138
+ event_type: Optional specific event type to check
139
+
140
+ Returns:
141
+ True if there are subscribers
142
+ """
143
+ if event_type is None:
144
+ return bool(self._global_subscribers) or any(
145
+ bool(subs) for subs in self._subscribers.values()
146
+ )
147
+ return bool(self._subscribers.get(event_type)) or bool(self._global_subscribers)
148
+
149
+ def emit(self, event: MemoryEvent) -> None:
150
+ """
151
+ Emit an event to all matching subscribers (non-blocking).
152
+
153
+ Callbacks are executed in a thread pool to avoid blocking
154
+ the main thread. Any exceptions in callbacks are logged
155
+ but do not propagate.
156
+
157
+ Args:
158
+ event: The event to emit
159
+ """
160
+ if not self._enabled:
161
+ return
162
+
163
+ callbacks = self._get_callbacks(event.event_type)
164
+ if not callbacks:
165
+ return
166
+
167
+ # Execute callbacks in thread pool (non-blocking)
168
+ for callback in callbacks:
169
+ self._executor.submit(self._safe_call, callback, event)
170
+
171
+ async def emit_async(self, event: MemoryEvent) -> None:
172
+ """
173
+ Emit an event to all matching subscribers asynchronously.
174
+
175
+ For async callbacks, awaits them directly. For sync callbacks,
176
+ runs them in the executor.
177
+
178
+ Args:
179
+ event: The event to emit
180
+ """
181
+ if not self._enabled:
182
+ return
183
+
184
+ callbacks = self._get_callbacks(event.event_type)
185
+ if not callbacks:
186
+ return
187
+
188
+ tasks = []
189
+ for callback in callbacks:
190
+ if asyncio.iscoroutinefunction(callback):
191
+ tasks.append(self._safe_call_async(callback, event))
192
+ else:
193
+ # Run sync callbacks in executor
194
+ loop = asyncio.get_event_loop()
195
+ tasks.append(
196
+ loop.run_in_executor(
197
+ self._executor,
198
+ self._safe_call,
199
+ callback,
200
+ event,
201
+ )
202
+ )
203
+
204
+ if tasks:
205
+ await asyncio.gather(*tasks, return_exceptions=True)
206
+
207
+ def _get_callbacks(self, event_type: MemoryEventType) -> List[EventCallback]:
208
+ """Get all callbacks for an event type."""
209
+ callbacks = list(self._global_subscribers)
210
+ callbacks.extend(self._subscribers.get(event_type, []))
211
+ return callbacks
212
+
213
+ def _safe_call(self, callback: SyncCallback, event: MemoryEvent) -> None:
214
+ """Safely call a sync callback, catching exceptions."""
215
+ try:
216
+ callback(event)
217
+ except Exception as e:
218
+ callback_name = getattr(callback, "__name__", repr(callback))
219
+ logger.error(
220
+ f"Error in event callback {callback_name}: {e}",
221
+ exc_info=True,
222
+ )
223
+
224
+ async def _safe_call_async(
225
+ self,
226
+ callback: AsyncCallback,
227
+ event: MemoryEvent,
228
+ ) -> None:
229
+ """Safely call an async callback, catching exceptions."""
230
+ try:
231
+ await callback(event)
232
+ except Exception as e:
233
+ callback_name = getattr(callback, "__name__", repr(callback))
234
+ logger.error(
235
+ f"Error in async event callback {callback_name}: {e}",
236
+ exc_info=True,
237
+ )
238
+
239
+ def enable(self) -> None:
240
+ """Enable event emission."""
241
+ self._enabled = True
242
+
243
+ def disable(self) -> None:
244
+ """Disable event emission (events will be silently dropped)."""
245
+ self._enabled = False
246
+
247
+ def clear(self) -> None:
248
+ """Remove all subscribers."""
249
+ self._subscribers.clear()
250
+ self._global_subscribers.clear()
251
+
252
+ def shutdown(self) -> None:
253
+ """Shutdown the executor and clear subscribers."""
254
+ self.clear()
255
+ self._executor.shutdown(wait=False)
256
+
257
+
258
+ # Global emitter instance (singleton pattern)
259
+ _emitter: Optional[EventEmitter] = None
260
+
261
+
262
+ def get_emitter() -> EventEmitter:
263
+ """
264
+ Get the global event emitter instance.
265
+
266
+ Returns:
267
+ The singleton EventEmitter instance
268
+ """
269
+ global _emitter
270
+ if _emitter is None:
271
+ _emitter = EventEmitter()
272
+ return _emitter
273
+
274
+
275
+ def reset_emitter() -> None:
276
+ """
277
+ Reset the global emitter (mainly for testing).
278
+
279
+ Creates a fresh emitter instance, clearing all subscriptions.
280
+ """
281
+ global _emitter
282
+ if _emitter is not None:
283
+ _emitter.shutdown()
284
+ _emitter = EventEmitter()