neural-memory 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- neural_memory/__init__.py +38 -0
- neural_memory/cli/__init__.py +15 -0
- neural_memory/cli/__main__.py +6 -0
- neural_memory/cli/config.py +176 -0
- neural_memory/cli/main.py +2702 -0
- neural_memory/cli/storage.py +169 -0
- neural_memory/cli/tui.py +471 -0
- neural_memory/core/__init__.py +52 -0
- neural_memory/core/brain.py +301 -0
- neural_memory/core/brain_mode.py +273 -0
- neural_memory/core/fiber.py +236 -0
- neural_memory/core/memory_types.py +331 -0
- neural_memory/core/neuron.py +168 -0
- neural_memory/core/project.py +257 -0
- neural_memory/core/synapse.py +215 -0
- neural_memory/engine/__init__.py +15 -0
- neural_memory/engine/activation.py +335 -0
- neural_memory/engine/encoder.py +391 -0
- neural_memory/engine/retrieval.py +440 -0
- neural_memory/extraction/__init__.py +42 -0
- neural_memory/extraction/entities.py +547 -0
- neural_memory/extraction/parser.py +337 -0
- neural_memory/extraction/router.py +396 -0
- neural_memory/extraction/temporal.py +428 -0
- neural_memory/mcp/__init__.py +9 -0
- neural_memory/mcp/__main__.py +6 -0
- neural_memory/mcp/server.py +621 -0
- neural_memory/py.typed +0 -0
- neural_memory/safety/__init__.py +31 -0
- neural_memory/safety/freshness.py +238 -0
- neural_memory/safety/sensitive.py +304 -0
- neural_memory/server/__init__.py +5 -0
- neural_memory/server/app.py +99 -0
- neural_memory/server/dependencies.py +33 -0
- neural_memory/server/models.py +138 -0
- neural_memory/server/routes/__init__.py +7 -0
- neural_memory/server/routes/brain.py +221 -0
- neural_memory/server/routes/memory.py +169 -0
- neural_memory/server/routes/sync.py +387 -0
- neural_memory/storage/__init__.py +17 -0
- neural_memory/storage/base.py +441 -0
- neural_memory/storage/factory.py +329 -0
- neural_memory/storage/memory_store.py +896 -0
- neural_memory/storage/shared_store.py +650 -0
- neural_memory/storage/sqlite_store.py +1613 -0
- neural_memory/sync/__init__.py +5 -0
- neural_memory/sync/client.py +435 -0
- neural_memory/unified_config.py +315 -0
- neural_memory/utils/__init__.py +5 -0
- neural_memory/utils/config.py +98 -0
- neural_memory-0.1.0.dist-info/METADATA +314 -0
- neural_memory-0.1.0.dist-info/RECORD +55 -0
- neural_memory-0.1.0.dist-info/WHEEL +4 -0
- neural_memory-0.1.0.dist-info/entry_points.txt +4 -0
- neural_memory-0.1.0.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,168 @@
|
|
|
1
|
+
"""Neuron data structures - the basic units of memory."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from dataclasses import dataclass, field
|
|
6
|
+
from datetime import datetime
|
|
7
|
+
from enum import StrEnum
|
|
8
|
+
from typing import Any
|
|
9
|
+
from uuid import uuid4
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class NeuronType(StrEnum):
|
|
13
|
+
"""Types of neurons in the memory system."""
|
|
14
|
+
|
|
15
|
+
TIME = "time" # Temporal markers: "3pm", "yesterday"
|
|
16
|
+
SPATIAL = "spatial" # Locations: "coffee shop", "office"
|
|
17
|
+
ENTITY = "entity" # Named entities: "Alice", "FastAPI"
|
|
18
|
+
ACTION = "action" # Verbs/actions: "discussed", "completed"
|
|
19
|
+
STATE = "state" # Emotional/mental states: "happy", "frustrated"
|
|
20
|
+
CONCEPT = "concept" # Abstract ideas: "API design", "authentication"
|
|
21
|
+
SENSORY = "sensory" # Sensory experiences: "loud", "bright"
|
|
22
|
+
INTENT = "intent" # Goals/intentions: "learn", "build"
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
@dataclass(frozen=True)
|
|
26
|
+
class Neuron:
|
|
27
|
+
"""
|
|
28
|
+
A neuron represents a single unit of memory.
|
|
29
|
+
|
|
30
|
+
Neurons are immutable - they represent facts that don't change.
|
|
31
|
+
The activation state is stored separately in NeuronState.
|
|
32
|
+
|
|
33
|
+
Attributes:
|
|
34
|
+
id: Unique identifier (UUID or content-hash)
|
|
35
|
+
type: Category of information this neuron represents
|
|
36
|
+
content: The raw value/text of this memory unit
|
|
37
|
+
metadata: Type-specific additional information
|
|
38
|
+
created_at: When this neuron was created
|
|
39
|
+
"""
|
|
40
|
+
|
|
41
|
+
id: str
|
|
42
|
+
type: NeuronType
|
|
43
|
+
content: str
|
|
44
|
+
metadata: dict[str, Any] = field(default_factory=dict)
|
|
45
|
+
created_at: datetime = field(default_factory=datetime.utcnow)
|
|
46
|
+
|
|
47
|
+
@classmethod
|
|
48
|
+
def create(
|
|
49
|
+
cls,
|
|
50
|
+
type: NeuronType,
|
|
51
|
+
content: str,
|
|
52
|
+
metadata: dict[str, Any] | None = None,
|
|
53
|
+
neuron_id: str | None = None,
|
|
54
|
+
) -> Neuron:
|
|
55
|
+
"""
|
|
56
|
+
Factory method to create a new Neuron.
|
|
57
|
+
|
|
58
|
+
Args:
|
|
59
|
+
type: The type of neuron
|
|
60
|
+
content: The content/value
|
|
61
|
+
metadata: Optional metadata dict
|
|
62
|
+
neuron_id: Optional explicit ID (generates UUID if not provided)
|
|
63
|
+
|
|
64
|
+
Returns:
|
|
65
|
+
A new Neuron instance
|
|
66
|
+
"""
|
|
67
|
+
return cls(
|
|
68
|
+
id=neuron_id or str(uuid4()),
|
|
69
|
+
type=type,
|
|
70
|
+
content=content,
|
|
71
|
+
metadata=metadata or {},
|
|
72
|
+
created_at=datetime.utcnow(),
|
|
73
|
+
)
|
|
74
|
+
|
|
75
|
+
def with_metadata(self, **kwargs: Any) -> Neuron:
|
|
76
|
+
"""
|
|
77
|
+
Create a new Neuron with updated metadata.
|
|
78
|
+
|
|
79
|
+
Args:
|
|
80
|
+
**kwargs: Metadata key-value pairs to add/update
|
|
81
|
+
|
|
82
|
+
Returns:
|
|
83
|
+
New Neuron with merged metadata
|
|
84
|
+
"""
|
|
85
|
+
return Neuron(
|
|
86
|
+
id=self.id,
|
|
87
|
+
type=self.type,
|
|
88
|
+
content=self.content,
|
|
89
|
+
metadata={**self.metadata, **kwargs},
|
|
90
|
+
created_at=self.created_at,
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
@dataclass
|
|
95
|
+
class NeuronState:
|
|
96
|
+
"""
|
|
97
|
+
Mutable activation state for a neuron.
|
|
98
|
+
|
|
99
|
+
Separated from Neuron to allow state changes without
|
|
100
|
+
modifying the immutable neuron data.
|
|
101
|
+
|
|
102
|
+
Attributes:
|
|
103
|
+
neuron_id: Reference to the associated Neuron
|
|
104
|
+
activation_level: Current activation (0.0 - 1.0)
|
|
105
|
+
access_frequency: How many times this neuron has been activated
|
|
106
|
+
last_activated: When this neuron was last activated
|
|
107
|
+
decay_rate: How fast activation decays over time
|
|
108
|
+
created_at: When this state was created
|
|
109
|
+
"""
|
|
110
|
+
|
|
111
|
+
neuron_id: str
|
|
112
|
+
activation_level: float = 0.0
|
|
113
|
+
access_frequency: int = 0
|
|
114
|
+
last_activated: datetime | None = None
|
|
115
|
+
decay_rate: float = 0.1
|
|
116
|
+
created_at: datetime = field(default_factory=datetime.utcnow)
|
|
117
|
+
|
|
118
|
+
def activate(self, level: float = 1.0) -> NeuronState:
|
|
119
|
+
"""
|
|
120
|
+
Create a new state with updated activation.
|
|
121
|
+
|
|
122
|
+
Args:
|
|
123
|
+
level: Activation level to set (clamped to 0.0-1.0)
|
|
124
|
+
|
|
125
|
+
Returns:
|
|
126
|
+
New NeuronState with updated activation
|
|
127
|
+
"""
|
|
128
|
+
clamped_level = max(0.0, min(1.0, level))
|
|
129
|
+
return NeuronState(
|
|
130
|
+
neuron_id=self.neuron_id,
|
|
131
|
+
activation_level=clamped_level,
|
|
132
|
+
access_frequency=self.access_frequency + 1,
|
|
133
|
+
last_activated=datetime.utcnow(),
|
|
134
|
+
decay_rate=self.decay_rate,
|
|
135
|
+
created_at=self.created_at,
|
|
136
|
+
)
|
|
137
|
+
|
|
138
|
+
def decay(self, time_delta_seconds: float) -> NeuronState:
|
|
139
|
+
"""
|
|
140
|
+
Apply decay to activation based on time elapsed.
|
|
141
|
+
|
|
142
|
+
Uses exponential decay: new_level = old_level * e^(-decay_rate * time)
|
|
143
|
+
|
|
144
|
+
Args:
|
|
145
|
+
time_delta_seconds: Time elapsed since last update
|
|
146
|
+
|
|
147
|
+
Returns:
|
|
148
|
+
New NeuronState with decayed activation
|
|
149
|
+
"""
|
|
150
|
+
import math
|
|
151
|
+
|
|
152
|
+
days_elapsed = time_delta_seconds / 86400 # Convert to days
|
|
153
|
+
decay_factor = math.exp(-self.decay_rate * days_elapsed)
|
|
154
|
+
new_level = self.activation_level * decay_factor
|
|
155
|
+
|
|
156
|
+
return NeuronState(
|
|
157
|
+
neuron_id=self.neuron_id,
|
|
158
|
+
activation_level=new_level,
|
|
159
|
+
access_frequency=self.access_frequency,
|
|
160
|
+
last_activated=self.last_activated,
|
|
161
|
+
decay_rate=self.decay_rate,
|
|
162
|
+
created_at=self.created_at,
|
|
163
|
+
)
|
|
164
|
+
|
|
165
|
+
@property
|
|
166
|
+
def is_active(self) -> bool:
|
|
167
|
+
"""Check if neuron is currently active (above threshold)."""
|
|
168
|
+
return self.activation_level > 0.1
|
|
@@ -0,0 +1,257 @@
|
|
|
1
|
+
"""Project scoping for memory organization.
|
|
2
|
+
|
|
3
|
+
Projects allow grouping memories by context (sprint, feature, research topic)
|
|
4
|
+
with automatic time-based prioritization.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
import uuid
|
|
10
|
+
from dataclasses import dataclass, field
|
|
11
|
+
from datetime import datetime, timedelta
|
|
12
|
+
from typing import Any
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
@dataclass(frozen=True)
|
|
16
|
+
class Project:
|
|
17
|
+
"""A project scope for organizing memories.
|
|
18
|
+
|
|
19
|
+
Projects define a context boundary for memories, typically representing
|
|
20
|
+
a sprint, feature, research topic, or any focused work period.
|
|
21
|
+
"""
|
|
22
|
+
|
|
23
|
+
id: str
|
|
24
|
+
name: str
|
|
25
|
+
description: str = ""
|
|
26
|
+
start_date: datetime = field(default_factory=datetime.utcnow)
|
|
27
|
+
end_date: datetime | None = None # None = ongoing
|
|
28
|
+
tags: frozenset[str] = field(default_factory=frozenset)
|
|
29
|
+
priority: float = 1.0 # Higher = more important
|
|
30
|
+
metadata: dict[str, Any] = field(default_factory=dict)
|
|
31
|
+
created_at: datetime = field(default_factory=datetime.utcnow)
|
|
32
|
+
|
|
33
|
+
@classmethod
|
|
34
|
+
def create(
|
|
35
|
+
cls,
|
|
36
|
+
name: str,
|
|
37
|
+
description: str = "",
|
|
38
|
+
start_date: datetime | None = None,
|
|
39
|
+
end_date: datetime | None = None,
|
|
40
|
+
duration_days: int | None = None,
|
|
41
|
+
tags: set[str] | None = None,
|
|
42
|
+
priority: float = 1.0,
|
|
43
|
+
metadata: dict[str, Any] | None = None,
|
|
44
|
+
) -> Project:
|
|
45
|
+
"""Create a new project.
|
|
46
|
+
|
|
47
|
+
Args:
|
|
48
|
+
name: Project name
|
|
49
|
+
description: Optional description
|
|
50
|
+
start_date: When project starts (default: now)
|
|
51
|
+
end_date: When project ends (optional)
|
|
52
|
+
duration_days: Alternative to end_date - set duration from start
|
|
53
|
+
tags: Optional tags for categorization
|
|
54
|
+
priority: Project priority (default: 1.0)
|
|
55
|
+
metadata: Optional metadata
|
|
56
|
+
|
|
57
|
+
Returns:
|
|
58
|
+
New Project instance
|
|
59
|
+
"""
|
|
60
|
+
now = datetime.utcnow()
|
|
61
|
+
start = start_date or now
|
|
62
|
+
|
|
63
|
+
# Calculate end_date from duration if provided
|
|
64
|
+
end = end_date
|
|
65
|
+
if end is None and duration_days is not None:
|
|
66
|
+
end = start + timedelta(days=duration_days)
|
|
67
|
+
|
|
68
|
+
return cls(
|
|
69
|
+
id=str(uuid.uuid4()),
|
|
70
|
+
name=name,
|
|
71
|
+
description=description,
|
|
72
|
+
start_date=start,
|
|
73
|
+
end_date=end,
|
|
74
|
+
tags=frozenset(tags) if tags else frozenset(),
|
|
75
|
+
priority=priority,
|
|
76
|
+
metadata=metadata or {},
|
|
77
|
+
created_at=now,
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
@property
|
|
81
|
+
def is_active(self) -> bool:
|
|
82
|
+
"""Check if project is currently active."""
|
|
83
|
+
now = datetime.utcnow()
|
|
84
|
+
if now < self.start_date:
|
|
85
|
+
return False
|
|
86
|
+
if self.end_date is not None and now > self.end_date:
|
|
87
|
+
return False
|
|
88
|
+
return True
|
|
89
|
+
|
|
90
|
+
@property
|
|
91
|
+
def is_ongoing(self) -> bool:
|
|
92
|
+
"""Check if project has no defined end date."""
|
|
93
|
+
return self.end_date is None
|
|
94
|
+
|
|
95
|
+
@property
|
|
96
|
+
def days_remaining(self) -> int | None:
|
|
97
|
+
"""Get days remaining until project end, or None if ongoing."""
|
|
98
|
+
if self.end_date is None:
|
|
99
|
+
return None
|
|
100
|
+
delta = self.end_date - datetime.utcnow()
|
|
101
|
+
return max(0, delta.days)
|
|
102
|
+
|
|
103
|
+
@property
|
|
104
|
+
def duration_days(self) -> int | None:
|
|
105
|
+
"""Get total project duration in days, or None if ongoing."""
|
|
106
|
+
if self.end_date is None:
|
|
107
|
+
return None
|
|
108
|
+
delta = self.end_date - self.start_date
|
|
109
|
+
return delta.days
|
|
110
|
+
|
|
111
|
+
def contains_date(self, date: datetime) -> bool:
|
|
112
|
+
"""Check if a date falls within project timeframe."""
|
|
113
|
+
if date < self.start_date:
|
|
114
|
+
return False
|
|
115
|
+
if self.end_date is not None and date > self.end_date:
|
|
116
|
+
return False
|
|
117
|
+
return True
|
|
118
|
+
|
|
119
|
+
def with_end_date(self, end_date: datetime) -> Project:
|
|
120
|
+
"""Create a copy with new end date."""
|
|
121
|
+
return Project(
|
|
122
|
+
id=self.id,
|
|
123
|
+
name=self.name,
|
|
124
|
+
description=self.description,
|
|
125
|
+
start_date=self.start_date,
|
|
126
|
+
end_date=end_date,
|
|
127
|
+
tags=self.tags,
|
|
128
|
+
priority=self.priority,
|
|
129
|
+
metadata=self.metadata,
|
|
130
|
+
created_at=self.created_at,
|
|
131
|
+
)
|
|
132
|
+
|
|
133
|
+
def with_extended_deadline(self, extra_days: int) -> Project:
|
|
134
|
+
"""Extend project deadline by given days."""
|
|
135
|
+
if self.end_date is None:
|
|
136
|
+
raise ValueError("Cannot extend ongoing project - set end_date first")
|
|
137
|
+
new_end = self.end_date + timedelta(days=extra_days)
|
|
138
|
+
return self.with_end_date(new_end)
|
|
139
|
+
|
|
140
|
+
def with_tags(self, tags: set[str]) -> Project:
|
|
141
|
+
"""Create a copy with new tags."""
|
|
142
|
+
return Project(
|
|
143
|
+
id=self.id,
|
|
144
|
+
name=self.name,
|
|
145
|
+
description=self.description,
|
|
146
|
+
start_date=self.start_date,
|
|
147
|
+
end_date=self.end_date,
|
|
148
|
+
tags=frozenset(tags),
|
|
149
|
+
priority=self.priority,
|
|
150
|
+
metadata=self.metadata,
|
|
151
|
+
created_at=self.created_at,
|
|
152
|
+
)
|
|
153
|
+
|
|
154
|
+
def to_dict(self) -> dict[str, Any]:
|
|
155
|
+
"""Convert to dictionary for serialization."""
|
|
156
|
+
return {
|
|
157
|
+
"id": self.id,
|
|
158
|
+
"name": self.name,
|
|
159
|
+
"description": self.description,
|
|
160
|
+
"start_date": self.start_date.isoformat(),
|
|
161
|
+
"end_date": self.end_date.isoformat() if self.end_date else None,
|
|
162
|
+
"tags": list(self.tags),
|
|
163
|
+
"priority": self.priority,
|
|
164
|
+
"metadata": self.metadata,
|
|
165
|
+
"created_at": self.created_at.isoformat(),
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
@classmethod
|
|
169
|
+
def from_dict(cls, data: dict[str, Any]) -> Project:
|
|
170
|
+
"""Create from dictionary."""
|
|
171
|
+
return cls(
|
|
172
|
+
id=data["id"],
|
|
173
|
+
name=data["name"],
|
|
174
|
+
description=data.get("description", ""),
|
|
175
|
+
start_date=datetime.fromisoformat(data["start_date"]),
|
|
176
|
+
end_date=datetime.fromisoformat(data["end_date"]) if data.get("end_date") else None,
|
|
177
|
+
tags=frozenset(data.get("tags", [])),
|
|
178
|
+
priority=data.get("priority", 1.0),
|
|
179
|
+
metadata=data.get("metadata", {}),
|
|
180
|
+
created_at=datetime.fromisoformat(data["created_at"]),
|
|
181
|
+
)
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
@dataclass(frozen=True)
|
|
185
|
+
class MemoryScope:
|
|
186
|
+
"""Defines what memories to prioritize for retrieval.
|
|
187
|
+
|
|
188
|
+
Used to filter and boost memories based on project, time window, or tags.
|
|
189
|
+
"""
|
|
190
|
+
|
|
191
|
+
project_id: str | None = None
|
|
192
|
+
time_window_days: int | None = 7 # Auto-prioritize recent
|
|
193
|
+
tags: frozenset[str] | None = None
|
|
194
|
+
min_relevance: float = 0.3 # Threshold for inclusion
|
|
195
|
+
|
|
196
|
+
@classmethod
|
|
197
|
+
def for_project(cls, project_id: str) -> MemoryScope:
|
|
198
|
+
"""Create scope for specific project."""
|
|
199
|
+
return cls(project_id=project_id)
|
|
200
|
+
|
|
201
|
+
@classmethod
|
|
202
|
+
def recent(cls, days: int = 7) -> MemoryScope:
|
|
203
|
+
"""Create scope for recent memories."""
|
|
204
|
+
return cls(time_window_days=days)
|
|
205
|
+
|
|
206
|
+
@classmethod
|
|
207
|
+
def with_tags(cls, tags: set[str]) -> MemoryScope:
|
|
208
|
+
"""Create scope for specific tags."""
|
|
209
|
+
return cls(tags=frozenset(tags))
|
|
210
|
+
|
|
211
|
+
def matches(
|
|
212
|
+
self,
|
|
213
|
+
project_id: str | None = None,
|
|
214
|
+
created_at: datetime | None = None,
|
|
215
|
+
tags: frozenset[str] | None = None,
|
|
216
|
+
) -> bool:
|
|
217
|
+
"""Check if memory attributes match this scope."""
|
|
218
|
+
# Project filter
|
|
219
|
+
if self.project_id is not None and project_id != self.project_id:
|
|
220
|
+
return False
|
|
221
|
+
|
|
222
|
+
# Time window filter
|
|
223
|
+
if self.time_window_days is not None and created_at is not None:
|
|
224
|
+
cutoff = datetime.utcnow() - timedelta(days=self.time_window_days)
|
|
225
|
+
if created_at < cutoff:
|
|
226
|
+
return False
|
|
227
|
+
|
|
228
|
+
# Tags filter (must have at least one matching tag)
|
|
229
|
+
if self.tags is not None and tags is not None:
|
|
230
|
+
if not self.tags.intersection(tags):
|
|
231
|
+
return False
|
|
232
|
+
|
|
233
|
+
return True
|
|
234
|
+
|
|
235
|
+
def relevance_boost(
|
|
236
|
+
self,
|
|
237
|
+
created_at: datetime | None = None,
|
|
238
|
+
project_priority: float = 1.0,
|
|
239
|
+
) -> float:
|
|
240
|
+
"""Calculate relevance boost for a memory.
|
|
241
|
+
|
|
242
|
+
Returns multiplier (1.0 = no boost, >1.0 = boosted).
|
|
243
|
+
"""
|
|
244
|
+
boost = 1.0
|
|
245
|
+
|
|
246
|
+
# Recency boost (exponential decay within window)
|
|
247
|
+
if self.time_window_days is not None and created_at is not None:
|
|
248
|
+
days_ago = (datetime.utcnow() - created_at).days
|
|
249
|
+
if days_ago <= self.time_window_days:
|
|
250
|
+
# Linear decay: 1.5 at day 0, 1.0 at window edge
|
|
251
|
+
recency_factor = 1.0 + 0.5 * (1 - days_ago / self.time_window_days)
|
|
252
|
+
boost *= recency_factor
|
|
253
|
+
|
|
254
|
+
# Project priority boost
|
|
255
|
+
boost *= project_priority
|
|
256
|
+
|
|
257
|
+
return boost
|
|
@@ -0,0 +1,215 @@
|
|
|
1
|
+
"""Synapse data structures - connections between neurons."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from dataclasses import dataclass, field
|
|
6
|
+
from datetime import datetime
|
|
7
|
+
from enum import StrEnum
|
|
8
|
+
from typing import Any
|
|
9
|
+
from uuid import uuid4
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class SynapseType(StrEnum):
|
|
13
|
+
"""Types of synaptic connections between neurons."""
|
|
14
|
+
|
|
15
|
+
# Temporal relationships
|
|
16
|
+
HAPPENED_AT = "happened_at" # Event -> Time
|
|
17
|
+
BEFORE = "before" # Event A -> Event B (A happened before B)
|
|
18
|
+
AFTER = "after" # Event A -> Event B (A happened after B)
|
|
19
|
+
DURING = "during" # Event -> Period
|
|
20
|
+
|
|
21
|
+
# Spatial relationships
|
|
22
|
+
AT_LOCATION = "at_location" # Event/Entity -> Place
|
|
23
|
+
CONTAINS = "contains" # Place -> Entity
|
|
24
|
+
NEAR = "near" # Place -> Place
|
|
25
|
+
|
|
26
|
+
# Causal relationships
|
|
27
|
+
CAUSED_BY = "caused_by" # Effect -> Cause
|
|
28
|
+
LEADS_TO = "leads_to" # Cause -> Effect
|
|
29
|
+
ENABLES = "enables" # Condition -> Action
|
|
30
|
+
PREVENTS = "prevents" # Blocker -> Action
|
|
31
|
+
|
|
32
|
+
# Associative relationships
|
|
33
|
+
CO_OCCURS = "co_occurs" # Entity -> Entity (appear together)
|
|
34
|
+
RELATED_TO = "related_to" # General association
|
|
35
|
+
SIMILAR_TO = "similar_to" # Semantic similarity
|
|
36
|
+
|
|
37
|
+
# Semantic relationships
|
|
38
|
+
IS_A = "is_a" # Instance -> Category
|
|
39
|
+
HAS_PROPERTY = "has_property" # Entity -> Property
|
|
40
|
+
INVOLVES = "involves" # Event -> Entity
|
|
41
|
+
|
|
42
|
+
# Emotional relationships
|
|
43
|
+
FELT = "felt" # Event -> Emotion
|
|
44
|
+
EVOKES = "evokes" # Stimulus -> Emotion
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
class Direction(StrEnum):
|
|
48
|
+
"""Direction of synapse connection."""
|
|
49
|
+
|
|
50
|
+
UNIDIRECTIONAL = "uni" # One-way: source -> target
|
|
51
|
+
BIDIRECTIONAL = "bi" # Two-way: source <-> target
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
# Synapse types that are typically bidirectional
|
|
55
|
+
BIDIRECTIONAL_TYPES: frozenset[SynapseType] = frozenset(
|
|
56
|
+
{
|
|
57
|
+
SynapseType.CO_OCCURS,
|
|
58
|
+
SynapseType.RELATED_TO,
|
|
59
|
+
SynapseType.SIMILAR_TO,
|
|
60
|
+
SynapseType.NEAR,
|
|
61
|
+
}
|
|
62
|
+
)
|
|
63
|
+
|
|
64
|
+
# Synapse types with inverse relationships
|
|
65
|
+
INVERSE_TYPES: dict[SynapseType, SynapseType] = {
|
|
66
|
+
SynapseType.BEFORE: SynapseType.AFTER,
|
|
67
|
+
SynapseType.AFTER: SynapseType.BEFORE,
|
|
68
|
+
SynapseType.CAUSED_BY: SynapseType.LEADS_TO,
|
|
69
|
+
SynapseType.LEADS_TO: SynapseType.CAUSED_BY,
|
|
70
|
+
SynapseType.CONTAINS: SynapseType.AT_LOCATION,
|
|
71
|
+
SynapseType.AT_LOCATION: SynapseType.CONTAINS,
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
@dataclass
|
|
76
|
+
class Synapse:
|
|
77
|
+
"""
|
|
78
|
+
A synapse represents a connection between two neurons.
|
|
79
|
+
|
|
80
|
+
Synapses have semantic meaning (type) and strength (weight).
|
|
81
|
+
They can be reinforced through use or decay over time.
|
|
82
|
+
|
|
83
|
+
Attributes:
|
|
84
|
+
id: Unique identifier
|
|
85
|
+
source_id: ID of the source neuron
|
|
86
|
+
target_id: ID of the target neuron
|
|
87
|
+
type: The semantic type of this connection
|
|
88
|
+
weight: Connection strength (0.0 - 1.0)
|
|
89
|
+
direction: Whether connection is uni or bidirectional
|
|
90
|
+
metadata: Additional connection-specific data
|
|
91
|
+
reinforced_count: How many times this connection was reinforced
|
|
92
|
+
last_activated: When this synapse was last used
|
|
93
|
+
created_at: When this synapse was created
|
|
94
|
+
"""
|
|
95
|
+
|
|
96
|
+
id: str
|
|
97
|
+
source_id: str
|
|
98
|
+
target_id: str
|
|
99
|
+
type: SynapseType
|
|
100
|
+
weight: float = 0.5
|
|
101
|
+
direction: Direction = Direction.UNIDIRECTIONAL
|
|
102
|
+
metadata: dict[str, Any] = field(default_factory=dict)
|
|
103
|
+
reinforced_count: int = 0
|
|
104
|
+
last_activated: datetime | None = None
|
|
105
|
+
created_at: datetime = field(default_factory=datetime.utcnow)
|
|
106
|
+
|
|
107
|
+
@classmethod
|
|
108
|
+
def create(
|
|
109
|
+
cls,
|
|
110
|
+
source_id: str,
|
|
111
|
+
target_id: str,
|
|
112
|
+
type: SynapseType,
|
|
113
|
+
weight: float = 0.5,
|
|
114
|
+
direction: Direction | None = None,
|
|
115
|
+
metadata: dict[str, Any] | None = None,
|
|
116
|
+
synapse_id: str | None = None,
|
|
117
|
+
) -> Synapse:
|
|
118
|
+
"""
|
|
119
|
+
Factory method to create a new Synapse.
|
|
120
|
+
|
|
121
|
+
Args:
|
|
122
|
+
source_id: ID of source neuron
|
|
123
|
+
target_id: ID of target neuron
|
|
124
|
+
type: Synapse type
|
|
125
|
+
weight: Initial weight (default 0.5)
|
|
126
|
+
direction: Connection direction (auto-detected if None)
|
|
127
|
+
metadata: Optional metadata
|
|
128
|
+
synapse_id: Optional explicit ID
|
|
129
|
+
|
|
130
|
+
Returns:
|
|
131
|
+
A new Synapse instance
|
|
132
|
+
"""
|
|
133
|
+
# Auto-detect direction based on type
|
|
134
|
+
if direction is None:
|
|
135
|
+
direction = (
|
|
136
|
+
Direction.BIDIRECTIONAL if type in BIDIRECTIONAL_TYPES else Direction.UNIDIRECTIONAL
|
|
137
|
+
)
|
|
138
|
+
|
|
139
|
+
return cls(
|
|
140
|
+
id=synapse_id or str(uuid4()),
|
|
141
|
+
source_id=source_id,
|
|
142
|
+
target_id=target_id,
|
|
143
|
+
type=type,
|
|
144
|
+
weight=max(0.0, min(1.0, weight)),
|
|
145
|
+
direction=direction,
|
|
146
|
+
metadata=metadata or {},
|
|
147
|
+
created_at=datetime.utcnow(),
|
|
148
|
+
)
|
|
149
|
+
|
|
150
|
+
def reinforce(self, delta: float = 0.05) -> Synapse:
|
|
151
|
+
"""
|
|
152
|
+
Create a new Synapse with reinforced weight.
|
|
153
|
+
|
|
154
|
+
Args:
|
|
155
|
+
delta: Amount to increase weight by
|
|
156
|
+
|
|
157
|
+
Returns:
|
|
158
|
+
New Synapse with increased weight (capped at 1.0)
|
|
159
|
+
"""
|
|
160
|
+
return Synapse(
|
|
161
|
+
id=self.id,
|
|
162
|
+
source_id=self.source_id,
|
|
163
|
+
target_id=self.target_id,
|
|
164
|
+
type=self.type,
|
|
165
|
+
weight=min(1.0, self.weight + delta),
|
|
166
|
+
direction=self.direction,
|
|
167
|
+
metadata=self.metadata,
|
|
168
|
+
reinforced_count=self.reinforced_count + 1,
|
|
169
|
+
last_activated=datetime.utcnow(),
|
|
170
|
+
created_at=self.created_at,
|
|
171
|
+
)
|
|
172
|
+
|
|
173
|
+
def decay(self, factor: float = 0.95) -> Synapse:
|
|
174
|
+
"""
|
|
175
|
+
Create a new Synapse with decayed weight.
|
|
176
|
+
|
|
177
|
+
Args:
|
|
178
|
+
factor: Decay multiplier (0.0 - 1.0)
|
|
179
|
+
|
|
180
|
+
Returns:
|
|
181
|
+
New Synapse with decreased weight
|
|
182
|
+
"""
|
|
183
|
+
return Synapse(
|
|
184
|
+
id=self.id,
|
|
185
|
+
source_id=self.source_id,
|
|
186
|
+
target_id=self.target_id,
|
|
187
|
+
type=self.type,
|
|
188
|
+
weight=self.weight * factor,
|
|
189
|
+
direction=self.direction,
|
|
190
|
+
metadata=self.metadata,
|
|
191
|
+
reinforced_count=self.reinforced_count,
|
|
192
|
+
last_activated=self.last_activated,
|
|
193
|
+
created_at=self.created_at,
|
|
194
|
+
)
|
|
195
|
+
|
|
196
|
+
@property
|
|
197
|
+
def is_bidirectional(self) -> bool:
|
|
198
|
+
"""Check if this synapse allows traversal in both directions."""
|
|
199
|
+
return self.direction == Direction.BIDIRECTIONAL
|
|
200
|
+
|
|
201
|
+
def get_inverse_type(self) -> SynapseType | None:
|
|
202
|
+
"""Get the inverse synapse type if one exists."""
|
|
203
|
+
return INVERSE_TYPES.get(self.type)
|
|
204
|
+
|
|
205
|
+
def connects(self, neuron_id: str) -> bool:
|
|
206
|
+
"""Check if this synapse connects to a given neuron."""
|
|
207
|
+
return self.source_id == neuron_id or self.target_id == neuron_id
|
|
208
|
+
|
|
209
|
+
def other_end(self, neuron_id: str) -> str | None:
|
|
210
|
+
"""Get the ID of the neuron at the other end of this synapse."""
|
|
211
|
+
if self.source_id == neuron_id:
|
|
212
|
+
return self.target_id
|
|
213
|
+
if self.target_id == neuron_id:
|
|
214
|
+
return self.source_id
|
|
215
|
+
return None
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
"""Engine components for memory encoding and retrieval."""
|
|
2
|
+
|
|
3
|
+
from neural_memory.engine.activation import ActivationResult, SpreadingActivation
|
|
4
|
+
from neural_memory.engine.encoder import EncodingResult, MemoryEncoder
|
|
5
|
+
from neural_memory.engine.retrieval import DepthLevel, ReflexPipeline, RetrievalResult
|
|
6
|
+
|
|
7
|
+
__all__ = [
|
|
8
|
+
"ActivationResult",
|
|
9
|
+
"DepthLevel",
|
|
10
|
+
"EncodingResult",
|
|
11
|
+
"MemoryEncoder",
|
|
12
|
+
"ReflexPipeline",
|
|
13
|
+
"RetrievalResult",
|
|
14
|
+
"SpreadingActivation",
|
|
15
|
+
]
|