neural-memory 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- neural_memory/__init__.py +38 -0
- neural_memory/cli/__init__.py +15 -0
- neural_memory/cli/__main__.py +6 -0
- neural_memory/cli/config.py +176 -0
- neural_memory/cli/main.py +2702 -0
- neural_memory/cli/storage.py +169 -0
- neural_memory/cli/tui.py +471 -0
- neural_memory/core/__init__.py +52 -0
- neural_memory/core/brain.py +301 -0
- neural_memory/core/brain_mode.py +273 -0
- neural_memory/core/fiber.py +236 -0
- neural_memory/core/memory_types.py +331 -0
- neural_memory/core/neuron.py +168 -0
- neural_memory/core/project.py +257 -0
- neural_memory/core/synapse.py +215 -0
- neural_memory/engine/__init__.py +15 -0
- neural_memory/engine/activation.py +335 -0
- neural_memory/engine/encoder.py +391 -0
- neural_memory/engine/retrieval.py +440 -0
- neural_memory/extraction/__init__.py +42 -0
- neural_memory/extraction/entities.py +547 -0
- neural_memory/extraction/parser.py +337 -0
- neural_memory/extraction/router.py +396 -0
- neural_memory/extraction/temporal.py +428 -0
- neural_memory/mcp/__init__.py +9 -0
- neural_memory/mcp/__main__.py +6 -0
- neural_memory/mcp/server.py +621 -0
- neural_memory/py.typed +0 -0
- neural_memory/safety/__init__.py +31 -0
- neural_memory/safety/freshness.py +238 -0
- neural_memory/safety/sensitive.py +304 -0
- neural_memory/server/__init__.py +5 -0
- neural_memory/server/app.py +99 -0
- neural_memory/server/dependencies.py +33 -0
- neural_memory/server/models.py +138 -0
- neural_memory/server/routes/__init__.py +7 -0
- neural_memory/server/routes/brain.py +221 -0
- neural_memory/server/routes/memory.py +169 -0
- neural_memory/server/routes/sync.py +387 -0
- neural_memory/storage/__init__.py +17 -0
- neural_memory/storage/base.py +441 -0
- neural_memory/storage/factory.py +329 -0
- neural_memory/storage/memory_store.py +896 -0
- neural_memory/storage/shared_store.py +650 -0
- neural_memory/storage/sqlite_store.py +1613 -0
- neural_memory/sync/__init__.py +5 -0
- neural_memory/sync/client.py +435 -0
- neural_memory/unified_config.py +315 -0
- neural_memory/utils/__init__.py +5 -0
- neural_memory/utils/config.py +98 -0
- neural_memory-0.1.0.dist-info/METADATA +314 -0
- neural_memory-0.1.0.dist-info/RECORD +55 -0
- neural_memory-0.1.0.dist-info/WHEEL +4 -0
- neural_memory-0.1.0.dist-info/entry_points.txt +4 -0
- neural_memory-0.1.0.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,1613 @@
|
|
|
1
|
+
"""SQLite storage backend for persistent neural memory."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
import sqlite3
|
|
7
|
+
from datetime import datetime
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from typing import Any, Literal
|
|
10
|
+
|
|
11
|
+
import aiosqlite
|
|
12
|
+
|
|
13
|
+
from neural_memory.core.brain import Brain, BrainConfig, BrainSnapshot
|
|
14
|
+
from neural_memory.core.fiber import Fiber
|
|
15
|
+
from neural_memory.core.memory_types import (
|
|
16
|
+
Confidence,
|
|
17
|
+
MemoryType,
|
|
18
|
+
Priority,
|
|
19
|
+
Provenance,
|
|
20
|
+
TypedMemory,
|
|
21
|
+
)
|
|
22
|
+
from neural_memory.core.neuron import Neuron, NeuronState, NeuronType
|
|
23
|
+
from neural_memory.core.project import Project
|
|
24
|
+
from neural_memory.core.synapse import Direction, Synapse, SynapseType
|
|
25
|
+
from neural_memory.storage.base import NeuralStorage
|
|
26
|
+
|
|
27
|
+
# Schema version for migrations
|
|
28
|
+
SCHEMA_VERSION = 1
|
|
29
|
+
|
|
30
|
+
SCHEMA = """
|
|
31
|
+
-- Schema version tracking
|
|
32
|
+
CREATE TABLE IF NOT EXISTS schema_version (
|
|
33
|
+
version INTEGER PRIMARY KEY
|
|
34
|
+
);
|
|
35
|
+
|
|
36
|
+
-- Brains table
|
|
37
|
+
CREATE TABLE IF NOT EXISTS brains (
|
|
38
|
+
id TEXT PRIMARY KEY,
|
|
39
|
+
name TEXT NOT NULL,
|
|
40
|
+
config TEXT NOT NULL, -- JSON
|
|
41
|
+
owner_id TEXT,
|
|
42
|
+
is_public INTEGER DEFAULT 0,
|
|
43
|
+
shared_with TEXT DEFAULT '[]', -- JSON array
|
|
44
|
+
created_at TEXT NOT NULL,
|
|
45
|
+
updated_at TEXT NOT NULL
|
|
46
|
+
);
|
|
47
|
+
|
|
48
|
+
-- Neurons table (composite key: brain_id + id for brain isolation)
|
|
49
|
+
CREATE TABLE IF NOT EXISTS neurons (
|
|
50
|
+
id TEXT NOT NULL,
|
|
51
|
+
brain_id TEXT NOT NULL,
|
|
52
|
+
type TEXT NOT NULL,
|
|
53
|
+
content TEXT NOT NULL,
|
|
54
|
+
metadata TEXT DEFAULT '{}', -- JSON
|
|
55
|
+
created_at TEXT NOT NULL,
|
|
56
|
+
PRIMARY KEY (brain_id, id),
|
|
57
|
+
FOREIGN KEY (brain_id) REFERENCES brains(id) ON DELETE CASCADE
|
|
58
|
+
);
|
|
59
|
+
CREATE INDEX IF NOT EXISTS idx_neurons_type ON neurons(brain_id, type);
|
|
60
|
+
CREATE INDEX IF NOT EXISTS idx_neurons_created ON neurons(brain_id, created_at);
|
|
61
|
+
|
|
62
|
+
-- Neuron states table
|
|
63
|
+
CREATE TABLE IF NOT EXISTS neuron_states (
|
|
64
|
+
neuron_id TEXT NOT NULL,
|
|
65
|
+
brain_id TEXT NOT NULL,
|
|
66
|
+
activation_level REAL DEFAULT 0.0,
|
|
67
|
+
access_frequency INTEGER DEFAULT 0,
|
|
68
|
+
last_activated TEXT,
|
|
69
|
+
decay_rate REAL DEFAULT 0.1,
|
|
70
|
+
created_at TEXT NOT NULL,
|
|
71
|
+
PRIMARY KEY (brain_id, neuron_id),
|
|
72
|
+
FOREIGN KEY (brain_id, neuron_id) REFERENCES neurons(brain_id, id) ON DELETE CASCADE
|
|
73
|
+
);
|
|
74
|
+
|
|
75
|
+
-- Synapses table
|
|
76
|
+
CREATE TABLE IF NOT EXISTS synapses (
|
|
77
|
+
id TEXT NOT NULL,
|
|
78
|
+
brain_id TEXT NOT NULL,
|
|
79
|
+
source_id TEXT NOT NULL,
|
|
80
|
+
target_id TEXT NOT NULL,
|
|
81
|
+
type TEXT NOT NULL,
|
|
82
|
+
weight REAL DEFAULT 0.5,
|
|
83
|
+
direction TEXT DEFAULT 'uni',
|
|
84
|
+
metadata TEXT DEFAULT '{}', -- JSON
|
|
85
|
+
reinforced_count INTEGER DEFAULT 0,
|
|
86
|
+
last_activated TEXT,
|
|
87
|
+
created_at TEXT NOT NULL,
|
|
88
|
+
PRIMARY KEY (brain_id, id),
|
|
89
|
+
FOREIGN KEY (brain_id) REFERENCES brains(id) ON DELETE CASCADE,
|
|
90
|
+
FOREIGN KEY (brain_id, source_id) REFERENCES neurons(brain_id, id) ON DELETE CASCADE,
|
|
91
|
+
FOREIGN KEY (brain_id, target_id) REFERENCES neurons(brain_id, id) ON DELETE CASCADE
|
|
92
|
+
);
|
|
93
|
+
CREATE INDEX IF NOT EXISTS idx_synapses_source ON synapses(brain_id, source_id);
|
|
94
|
+
CREATE INDEX IF NOT EXISTS idx_synapses_target ON synapses(brain_id, target_id);
|
|
95
|
+
|
|
96
|
+
-- Fibers table
|
|
97
|
+
CREATE TABLE IF NOT EXISTS fibers (
|
|
98
|
+
id TEXT NOT NULL,
|
|
99
|
+
brain_id TEXT NOT NULL,
|
|
100
|
+
neuron_ids TEXT NOT NULL, -- JSON array
|
|
101
|
+
synapse_ids TEXT NOT NULL, -- JSON array
|
|
102
|
+
anchor_neuron_id TEXT NOT NULL,
|
|
103
|
+
time_start TEXT,
|
|
104
|
+
time_end TEXT,
|
|
105
|
+
coherence REAL DEFAULT 0.0,
|
|
106
|
+
salience REAL DEFAULT 0.0,
|
|
107
|
+
frequency INTEGER DEFAULT 0,
|
|
108
|
+
summary TEXT,
|
|
109
|
+
tags TEXT DEFAULT '[]', -- JSON array
|
|
110
|
+
metadata TEXT DEFAULT '{}', -- JSON
|
|
111
|
+
created_at TEXT NOT NULL,
|
|
112
|
+
PRIMARY KEY (brain_id, id),
|
|
113
|
+
FOREIGN KEY (brain_id) REFERENCES brains(id) ON DELETE CASCADE
|
|
114
|
+
);
|
|
115
|
+
CREATE INDEX IF NOT EXISTS idx_fibers_created ON fibers(brain_id, created_at);
|
|
116
|
+
CREATE INDEX IF NOT EXISTS idx_fibers_salience ON fibers(brain_id, salience);
|
|
117
|
+
|
|
118
|
+
-- Typed memories table
|
|
119
|
+
CREATE TABLE IF NOT EXISTS typed_memories (
|
|
120
|
+
fiber_id TEXT NOT NULL,
|
|
121
|
+
brain_id TEXT NOT NULL,
|
|
122
|
+
memory_type TEXT NOT NULL,
|
|
123
|
+
priority INTEGER DEFAULT 5,
|
|
124
|
+
provenance TEXT NOT NULL, -- JSON
|
|
125
|
+
expires_at TEXT,
|
|
126
|
+
project_id TEXT,
|
|
127
|
+
tags TEXT DEFAULT '[]', -- JSON array
|
|
128
|
+
metadata TEXT DEFAULT '{}', -- JSON
|
|
129
|
+
created_at TEXT NOT NULL,
|
|
130
|
+
PRIMARY KEY (brain_id, fiber_id),
|
|
131
|
+
FOREIGN KEY (brain_id, fiber_id) REFERENCES fibers(brain_id, id) ON DELETE CASCADE,
|
|
132
|
+
FOREIGN KEY (brain_id) REFERENCES brains(id) ON DELETE CASCADE,
|
|
133
|
+
FOREIGN KEY (brain_id, project_id) REFERENCES projects(brain_id, id) ON DELETE SET NULL
|
|
134
|
+
);
|
|
135
|
+
CREATE INDEX IF NOT EXISTS idx_typed_memories_type ON typed_memories(brain_id, memory_type);
|
|
136
|
+
CREATE INDEX IF NOT EXISTS idx_typed_memories_project ON typed_memories(brain_id, project_id);
|
|
137
|
+
CREATE INDEX IF NOT EXISTS idx_typed_memories_expires ON typed_memories(brain_id, expires_at);
|
|
138
|
+
|
|
139
|
+
-- Projects table
|
|
140
|
+
CREATE TABLE IF NOT EXISTS projects (
|
|
141
|
+
id TEXT NOT NULL,
|
|
142
|
+
brain_id TEXT NOT NULL,
|
|
143
|
+
name TEXT NOT NULL,
|
|
144
|
+
description TEXT DEFAULT '',
|
|
145
|
+
start_date TEXT NOT NULL,
|
|
146
|
+
end_date TEXT,
|
|
147
|
+
tags TEXT DEFAULT '[]', -- JSON array
|
|
148
|
+
priority REAL DEFAULT 1.0,
|
|
149
|
+
metadata TEXT DEFAULT '{}', -- JSON
|
|
150
|
+
created_at TEXT NOT NULL,
|
|
151
|
+
PRIMARY KEY (brain_id, id),
|
|
152
|
+
FOREIGN KEY (brain_id) REFERENCES brains(id) ON DELETE CASCADE
|
|
153
|
+
);
|
|
154
|
+
CREATE INDEX IF NOT EXISTS idx_projects_name ON projects(brain_id, name);
|
|
155
|
+
"""
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
class SQLiteStorage(NeuralStorage):
|
|
159
|
+
"""
|
|
160
|
+
SQLite-based storage for persistent neural memory.
|
|
161
|
+
|
|
162
|
+
Good for single-instance deployment and local development.
|
|
163
|
+
Data persists to disk and survives restarts.
|
|
164
|
+
"""
|
|
165
|
+
|
|
166
|
+
def __init__(self, db_path: str | Path) -> None:
|
|
167
|
+
"""Initialize SQLite storage.
|
|
168
|
+
|
|
169
|
+
Args:
|
|
170
|
+
db_path: Path to SQLite database file
|
|
171
|
+
"""
|
|
172
|
+
self._db_path = Path(db_path)
|
|
173
|
+
self._conn: aiosqlite.Connection | None = None
|
|
174
|
+
self._current_brain_id: str | None = None
|
|
175
|
+
|
|
176
|
+
async def initialize(self) -> None:
|
|
177
|
+
"""Initialize database connection and schema."""
|
|
178
|
+
# Ensure parent directory exists
|
|
179
|
+
self._db_path.parent.mkdir(parents=True, exist_ok=True)
|
|
180
|
+
|
|
181
|
+
self._conn = await aiosqlite.connect(self._db_path)
|
|
182
|
+
self._conn.row_factory = aiosqlite.Row
|
|
183
|
+
|
|
184
|
+
# Enable foreign keys
|
|
185
|
+
await self._conn.execute("PRAGMA foreign_keys = ON")
|
|
186
|
+
|
|
187
|
+
# Create schema
|
|
188
|
+
await self._conn.executescript(SCHEMA)
|
|
189
|
+
|
|
190
|
+
# Check/set schema version
|
|
191
|
+
async with self._conn.execute("SELECT version FROM schema_version") as cursor:
|
|
192
|
+
row = await cursor.fetchone()
|
|
193
|
+
if row is None:
|
|
194
|
+
await self._conn.execute(
|
|
195
|
+
"INSERT INTO schema_version (version) VALUES (?)", (SCHEMA_VERSION,)
|
|
196
|
+
)
|
|
197
|
+
|
|
198
|
+
await self._conn.commit()
|
|
199
|
+
|
|
200
|
+
async def close(self) -> None:
|
|
201
|
+
"""Close database connection."""
|
|
202
|
+
if self._conn:
|
|
203
|
+
await self._conn.close()
|
|
204
|
+
self._conn = None
|
|
205
|
+
|
|
206
|
+
def set_brain(self, brain_id: str) -> None:
|
|
207
|
+
"""Set the current brain context for operations."""
|
|
208
|
+
self._current_brain_id = brain_id
|
|
209
|
+
|
|
210
|
+
def _get_brain_id(self) -> str:
|
|
211
|
+
"""Get current brain ID or raise error."""
|
|
212
|
+
if self._current_brain_id is None:
|
|
213
|
+
raise ValueError("No brain context set. Call set_brain() first.")
|
|
214
|
+
return self._current_brain_id
|
|
215
|
+
|
|
216
|
+
def _ensure_conn(self) -> aiosqlite.Connection:
|
|
217
|
+
"""Ensure connection is available."""
|
|
218
|
+
if self._conn is None:
|
|
219
|
+
raise RuntimeError("Database not initialized. Call initialize() first.")
|
|
220
|
+
return self._conn
|
|
221
|
+
|
|
222
|
+
# ========== Neuron Operations ==========
|
|
223
|
+
|
|
224
|
+
async def add_neuron(self, neuron: Neuron) -> str:
|
|
225
|
+
conn = self._ensure_conn()
|
|
226
|
+
brain_id = self._get_brain_id()
|
|
227
|
+
|
|
228
|
+
try:
|
|
229
|
+
await conn.execute(
|
|
230
|
+
"""INSERT INTO neurons (id, brain_id, type, content, metadata, created_at)
|
|
231
|
+
VALUES (?, ?, ?, ?, ?, ?)""",
|
|
232
|
+
(
|
|
233
|
+
neuron.id,
|
|
234
|
+
brain_id,
|
|
235
|
+
neuron.type.value,
|
|
236
|
+
neuron.content,
|
|
237
|
+
json.dumps(neuron.metadata),
|
|
238
|
+
neuron.created_at.isoformat(),
|
|
239
|
+
),
|
|
240
|
+
)
|
|
241
|
+
|
|
242
|
+
# Initialize state
|
|
243
|
+
await conn.execute(
|
|
244
|
+
"""INSERT INTO neuron_states (neuron_id, brain_id, created_at)
|
|
245
|
+
VALUES (?, ?, ?)""",
|
|
246
|
+
(neuron.id, brain_id, datetime.utcnow().isoformat()),
|
|
247
|
+
)
|
|
248
|
+
|
|
249
|
+
await conn.commit()
|
|
250
|
+
return neuron.id
|
|
251
|
+
except sqlite3.IntegrityError:
|
|
252
|
+
raise ValueError(f"Neuron {neuron.id} already exists")
|
|
253
|
+
|
|
254
|
+
async def get_neuron(self, neuron_id: str) -> Neuron | None:
|
|
255
|
+
conn = self._ensure_conn()
|
|
256
|
+
brain_id = self._get_brain_id()
|
|
257
|
+
|
|
258
|
+
async with conn.execute(
|
|
259
|
+
"SELECT * FROM neurons WHERE id = ? AND brain_id = ?",
|
|
260
|
+
(neuron_id, brain_id),
|
|
261
|
+
) as cursor:
|
|
262
|
+
row = await cursor.fetchone()
|
|
263
|
+
if row is None:
|
|
264
|
+
return None
|
|
265
|
+
return self._row_to_neuron(row)
|
|
266
|
+
|
|
267
|
+
async def find_neurons(
|
|
268
|
+
self,
|
|
269
|
+
type: NeuronType | None = None,
|
|
270
|
+
content_contains: str | None = None,
|
|
271
|
+
content_exact: str | None = None,
|
|
272
|
+
time_range: tuple[datetime, datetime] | None = None,
|
|
273
|
+
limit: int = 100,
|
|
274
|
+
) -> list[Neuron]:
|
|
275
|
+
conn = self._ensure_conn()
|
|
276
|
+
brain_id = self._get_brain_id()
|
|
277
|
+
|
|
278
|
+
query = "SELECT * FROM neurons WHERE brain_id = ?"
|
|
279
|
+
params: list[Any] = [brain_id]
|
|
280
|
+
|
|
281
|
+
if type is not None:
|
|
282
|
+
query += " AND type = ?"
|
|
283
|
+
params.append(type.value)
|
|
284
|
+
|
|
285
|
+
if content_contains is not None:
|
|
286
|
+
query += " AND content LIKE ?"
|
|
287
|
+
params.append(f"%{content_contains}%")
|
|
288
|
+
|
|
289
|
+
if content_exact is not None:
|
|
290
|
+
query += " AND content = ?"
|
|
291
|
+
params.append(content_exact)
|
|
292
|
+
|
|
293
|
+
if time_range is not None:
|
|
294
|
+
start, end = time_range
|
|
295
|
+
query += " AND created_at >= ? AND created_at <= ?"
|
|
296
|
+
params.append(start.isoformat())
|
|
297
|
+
params.append(end.isoformat())
|
|
298
|
+
|
|
299
|
+
query += " LIMIT ?"
|
|
300
|
+
params.append(limit)
|
|
301
|
+
|
|
302
|
+
async with conn.execute(query, params) as cursor:
|
|
303
|
+
rows = await cursor.fetchall()
|
|
304
|
+
return [self._row_to_neuron(row) for row in rows]
|
|
305
|
+
|
|
306
|
+
async def update_neuron(self, neuron: Neuron) -> None:
|
|
307
|
+
conn = self._ensure_conn()
|
|
308
|
+
brain_id = self._get_brain_id()
|
|
309
|
+
|
|
310
|
+
cursor = await conn.execute(
|
|
311
|
+
"""UPDATE neurons SET type = ?, content = ?, metadata = ?
|
|
312
|
+
WHERE id = ? AND brain_id = ?""",
|
|
313
|
+
(
|
|
314
|
+
neuron.type.value,
|
|
315
|
+
neuron.content,
|
|
316
|
+
json.dumps(neuron.metadata),
|
|
317
|
+
neuron.id,
|
|
318
|
+
brain_id,
|
|
319
|
+
),
|
|
320
|
+
)
|
|
321
|
+
|
|
322
|
+
if cursor.rowcount == 0:
|
|
323
|
+
raise ValueError(f"Neuron {neuron.id} does not exist")
|
|
324
|
+
|
|
325
|
+
await conn.commit()
|
|
326
|
+
|
|
327
|
+
async def delete_neuron(self, neuron_id: str) -> bool:
|
|
328
|
+
conn = self._ensure_conn()
|
|
329
|
+
brain_id = self._get_brain_id()
|
|
330
|
+
|
|
331
|
+
# Delete neuron (cascade will handle synapses and state)
|
|
332
|
+
cursor = await conn.execute(
|
|
333
|
+
"DELETE FROM neurons WHERE id = ? AND brain_id = ?",
|
|
334
|
+
(neuron_id, brain_id),
|
|
335
|
+
)
|
|
336
|
+
await conn.commit()
|
|
337
|
+
|
|
338
|
+
return cursor.rowcount > 0
|
|
339
|
+
|
|
340
|
+
def _row_to_neuron(self, row: aiosqlite.Row) -> Neuron:
|
|
341
|
+
"""Convert database row to Neuron."""
|
|
342
|
+
return Neuron(
|
|
343
|
+
id=row["id"],
|
|
344
|
+
type=NeuronType(row["type"]),
|
|
345
|
+
content=row["content"],
|
|
346
|
+
metadata=json.loads(row["metadata"]),
|
|
347
|
+
created_at=datetime.fromisoformat(row["created_at"]),
|
|
348
|
+
)
|
|
349
|
+
|
|
350
|
+
# ========== Neuron State Operations ==========
|
|
351
|
+
|
|
352
|
+
async def get_neuron_state(self, neuron_id: str) -> NeuronState | None:
|
|
353
|
+
conn = self._ensure_conn()
|
|
354
|
+
brain_id = self._get_brain_id()
|
|
355
|
+
|
|
356
|
+
async with conn.execute(
|
|
357
|
+
"SELECT * FROM neuron_states WHERE neuron_id = ? AND brain_id = ?",
|
|
358
|
+
(neuron_id, brain_id),
|
|
359
|
+
) as cursor:
|
|
360
|
+
row = await cursor.fetchone()
|
|
361
|
+
if row is None:
|
|
362
|
+
return None
|
|
363
|
+
return self._row_to_neuron_state(row)
|
|
364
|
+
|
|
365
|
+
async def update_neuron_state(self, state: NeuronState) -> None:
|
|
366
|
+
conn = self._ensure_conn()
|
|
367
|
+
brain_id = self._get_brain_id()
|
|
368
|
+
|
|
369
|
+
await conn.execute(
|
|
370
|
+
"""INSERT OR REPLACE INTO neuron_states
|
|
371
|
+
(neuron_id, brain_id, activation_level, access_frequency,
|
|
372
|
+
last_activated, decay_rate, created_at)
|
|
373
|
+
VALUES (?, ?, ?, ?, ?, ?, ?)""",
|
|
374
|
+
(
|
|
375
|
+
state.neuron_id,
|
|
376
|
+
brain_id,
|
|
377
|
+
state.activation_level,
|
|
378
|
+
state.access_frequency,
|
|
379
|
+
state.last_activated.isoformat() if state.last_activated else None,
|
|
380
|
+
state.decay_rate,
|
|
381
|
+
state.created_at.isoformat(),
|
|
382
|
+
),
|
|
383
|
+
)
|
|
384
|
+
await conn.commit()
|
|
385
|
+
|
|
386
|
+
def _row_to_neuron_state(self, row: aiosqlite.Row) -> NeuronState:
|
|
387
|
+
"""Convert database row to NeuronState."""
|
|
388
|
+
return NeuronState(
|
|
389
|
+
neuron_id=row["neuron_id"],
|
|
390
|
+
activation_level=row["activation_level"],
|
|
391
|
+
access_frequency=row["access_frequency"],
|
|
392
|
+
last_activated=(
|
|
393
|
+
datetime.fromisoformat(row["last_activated"]) if row["last_activated"] else None
|
|
394
|
+
),
|
|
395
|
+
decay_rate=row["decay_rate"],
|
|
396
|
+
created_at=datetime.fromisoformat(row["created_at"]),
|
|
397
|
+
)
|
|
398
|
+
|
|
399
|
+
# ========== Synapse Operations ==========
|
|
400
|
+
|
|
401
|
+
async def add_synapse(self, synapse: Synapse) -> str:
|
|
402
|
+
conn = self._ensure_conn()
|
|
403
|
+
brain_id = self._get_brain_id()
|
|
404
|
+
|
|
405
|
+
# Verify neurons exist
|
|
406
|
+
async with conn.execute(
|
|
407
|
+
"SELECT id FROM neurons WHERE id IN (?, ?) AND brain_id = ?",
|
|
408
|
+
(synapse.source_id, synapse.target_id, brain_id),
|
|
409
|
+
) as cursor:
|
|
410
|
+
rows = await cursor.fetchall()
|
|
411
|
+
found_ids = {row["id"] for row in rows}
|
|
412
|
+
|
|
413
|
+
if synapse.source_id not in found_ids:
|
|
414
|
+
raise ValueError(f"Source neuron {synapse.source_id} does not exist")
|
|
415
|
+
if synapse.target_id not in found_ids:
|
|
416
|
+
raise ValueError(f"Target neuron {synapse.target_id} does not exist")
|
|
417
|
+
|
|
418
|
+
try:
|
|
419
|
+
await conn.execute(
|
|
420
|
+
"""INSERT INTO synapses
|
|
421
|
+
(id, brain_id, source_id, target_id, type, weight, direction,
|
|
422
|
+
metadata, reinforced_count, last_activated, created_at)
|
|
423
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
|
|
424
|
+
(
|
|
425
|
+
synapse.id,
|
|
426
|
+
brain_id,
|
|
427
|
+
synapse.source_id,
|
|
428
|
+
synapse.target_id,
|
|
429
|
+
synapse.type.value,
|
|
430
|
+
synapse.weight,
|
|
431
|
+
synapse.direction.value,
|
|
432
|
+
json.dumps(synapse.metadata),
|
|
433
|
+
synapse.reinforced_count,
|
|
434
|
+
synapse.last_activated.isoformat() if synapse.last_activated else None,
|
|
435
|
+
synapse.created_at.isoformat(),
|
|
436
|
+
),
|
|
437
|
+
)
|
|
438
|
+
await conn.commit()
|
|
439
|
+
return synapse.id
|
|
440
|
+
except sqlite3.IntegrityError:
|
|
441
|
+
raise ValueError(f"Synapse {synapse.id} already exists")
|
|
442
|
+
|
|
443
|
+
async def get_synapse(self, synapse_id: str) -> Synapse | None:
|
|
444
|
+
conn = self._ensure_conn()
|
|
445
|
+
brain_id = self._get_brain_id()
|
|
446
|
+
|
|
447
|
+
async with conn.execute(
|
|
448
|
+
"SELECT * FROM synapses WHERE id = ? AND brain_id = ?",
|
|
449
|
+
(synapse_id, brain_id),
|
|
450
|
+
) as cursor:
|
|
451
|
+
row = await cursor.fetchone()
|
|
452
|
+
if row is None:
|
|
453
|
+
return None
|
|
454
|
+
return self._row_to_synapse(row)
|
|
455
|
+
|
|
456
|
+
async def get_synapses(
|
|
457
|
+
self,
|
|
458
|
+
source_id: str | None = None,
|
|
459
|
+
target_id: str | None = None,
|
|
460
|
+
type: SynapseType | None = None,
|
|
461
|
+
min_weight: float | None = None,
|
|
462
|
+
) -> list[Synapse]:
|
|
463
|
+
conn = self._ensure_conn()
|
|
464
|
+
brain_id = self._get_brain_id()
|
|
465
|
+
|
|
466
|
+
query = "SELECT * FROM synapses WHERE brain_id = ?"
|
|
467
|
+
params: list[Any] = [brain_id]
|
|
468
|
+
|
|
469
|
+
if source_id is not None:
|
|
470
|
+
query += " AND source_id = ?"
|
|
471
|
+
params.append(source_id)
|
|
472
|
+
|
|
473
|
+
if target_id is not None:
|
|
474
|
+
query += " AND target_id = ?"
|
|
475
|
+
params.append(target_id)
|
|
476
|
+
|
|
477
|
+
if type is not None:
|
|
478
|
+
query += " AND type = ?"
|
|
479
|
+
params.append(type.value)
|
|
480
|
+
|
|
481
|
+
if min_weight is not None:
|
|
482
|
+
query += " AND weight >= ?"
|
|
483
|
+
params.append(min_weight)
|
|
484
|
+
|
|
485
|
+
async with conn.execute(query, params) as cursor:
|
|
486
|
+
rows = await cursor.fetchall()
|
|
487
|
+
return [self._row_to_synapse(row) for row in rows]
|
|
488
|
+
|
|
489
|
+
async def update_synapse(self, synapse: Synapse) -> None:
|
|
490
|
+
conn = self._ensure_conn()
|
|
491
|
+
brain_id = self._get_brain_id()
|
|
492
|
+
|
|
493
|
+
cursor = await conn.execute(
|
|
494
|
+
"""UPDATE synapses SET type = ?, weight = ?, direction = ?,
|
|
495
|
+
metadata = ?, reinforced_count = ?, last_activated = ?
|
|
496
|
+
WHERE id = ? AND brain_id = ?""",
|
|
497
|
+
(
|
|
498
|
+
synapse.type.value,
|
|
499
|
+
synapse.weight,
|
|
500
|
+
synapse.direction.value,
|
|
501
|
+
json.dumps(synapse.metadata),
|
|
502
|
+
synapse.reinforced_count,
|
|
503
|
+
synapse.last_activated.isoformat() if synapse.last_activated else None,
|
|
504
|
+
synapse.id,
|
|
505
|
+
brain_id,
|
|
506
|
+
),
|
|
507
|
+
)
|
|
508
|
+
|
|
509
|
+
if cursor.rowcount == 0:
|
|
510
|
+
raise ValueError(f"Synapse {synapse.id} does not exist")
|
|
511
|
+
|
|
512
|
+
await conn.commit()
|
|
513
|
+
|
|
514
|
+
async def delete_synapse(self, synapse_id: str) -> bool:
|
|
515
|
+
conn = self._ensure_conn()
|
|
516
|
+
brain_id = self._get_brain_id()
|
|
517
|
+
|
|
518
|
+
cursor = await conn.execute(
|
|
519
|
+
"DELETE FROM synapses WHERE id = ? AND brain_id = ?",
|
|
520
|
+
(synapse_id, brain_id),
|
|
521
|
+
)
|
|
522
|
+
await conn.commit()
|
|
523
|
+
|
|
524
|
+
return cursor.rowcount > 0
|
|
525
|
+
|
|
526
|
+
def _row_to_synapse(self, row: aiosqlite.Row) -> Synapse:
|
|
527
|
+
"""Convert database row to Synapse."""
|
|
528
|
+
return Synapse(
|
|
529
|
+
id=row["id"],
|
|
530
|
+
source_id=row["source_id"],
|
|
531
|
+
target_id=row["target_id"],
|
|
532
|
+
type=SynapseType(row["type"]),
|
|
533
|
+
weight=row["weight"],
|
|
534
|
+
direction=Direction(row["direction"]),
|
|
535
|
+
metadata=json.loads(row["metadata"]),
|
|
536
|
+
reinforced_count=row["reinforced_count"],
|
|
537
|
+
last_activated=(
|
|
538
|
+
datetime.fromisoformat(row["last_activated"]) if row["last_activated"] else None
|
|
539
|
+
),
|
|
540
|
+
created_at=datetime.fromisoformat(row["created_at"]),
|
|
541
|
+
)
|
|
542
|
+
|
|
543
|
+
# ========== Graph Traversal ==========
|
|
544
|
+
|
|
545
|
+
async def get_neighbors(
|
|
546
|
+
self,
|
|
547
|
+
neuron_id: str,
|
|
548
|
+
direction: Literal["out", "in", "both"] = "both",
|
|
549
|
+
synapse_types: list[SynapseType] | None = None,
|
|
550
|
+
min_weight: float | None = None,
|
|
551
|
+
) -> list[tuple[Neuron, Synapse]]:
|
|
552
|
+
conn = self._ensure_conn()
|
|
553
|
+
brain_id = self._get_brain_id()
|
|
554
|
+
results: list[tuple[Neuron, Synapse]] = []
|
|
555
|
+
|
|
556
|
+
# Build type filter
|
|
557
|
+
type_filter = ""
|
|
558
|
+
if synapse_types:
|
|
559
|
+
types_str = ",".join(f"'{t.value}'" for t in synapse_types)
|
|
560
|
+
type_filter = f" AND s.type IN ({types_str})"
|
|
561
|
+
|
|
562
|
+
weight_filter = ""
|
|
563
|
+
if min_weight is not None:
|
|
564
|
+
weight_filter = f" AND s.weight >= {min_weight}"
|
|
565
|
+
|
|
566
|
+
# Outgoing connections
|
|
567
|
+
if direction in ("out", "both"):
|
|
568
|
+
query = f"""
|
|
569
|
+
SELECT n.*, s.id as s_id, s.source_id, s.target_id, s.type as s_type,
|
|
570
|
+
s.weight, s.direction, s.metadata as s_metadata,
|
|
571
|
+
s.reinforced_count, s.last_activated as s_last_activated,
|
|
572
|
+
s.created_at as s_created_at
|
|
573
|
+
FROM synapses s
|
|
574
|
+
JOIN neurons n ON s.target_id = n.id
|
|
575
|
+
WHERE s.source_id = ? AND s.brain_id = ?{type_filter}{weight_filter}
|
|
576
|
+
"""
|
|
577
|
+
async with conn.execute(query, (neuron_id, brain_id)) as cursor:
|
|
578
|
+
async for row in cursor:
|
|
579
|
+
neuron = self._row_to_neuron(row)
|
|
580
|
+
synapse = Synapse(
|
|
581
|
+
id=row["s_id"],
|
|
582
|
+
source_id=row["source_id"],
|
|
583
|
+
target_id=row["target_id"],
|
|
584
|
+
type=SynapseType(row["s_type"]),
|
|
585
|
+
weight=row["weight"],
|
|
586
|
+
direction=Direction(row["direction"]),
|
|
587
|
+
metadata=json.loads(row["s_metadata"]),
|
|
588
|
+
reinforced_count=row["reinforced_count"],
|
|
589
|
+
last_activated=(
|
|
590
|
+
datetime.fromisoformat(row["s_last_activated"])
|
|
591
|
+
if row["s_last_activated"]
|
|
592
|
+
else None
|
|
593
|
+
),
|
|
594
|
+
created_at=datetime.fromisoformat(row["s_created_at"]),
|
|
595
|
+
)
|
|
596
|
+
results.append((neuron, synapse))
|
|
597
|
+
|
|
598
|
+
# Incoming connections
|
|
599
|
+
if direction in ("in", "both"):
|
|
600
|
+
query = f"""
|
|
601
|
+
SELECT n.*, s.id as s_id, s.source_id, s.target_id, s.type as s_type,
|
|
602
|
+
s.weight, s.direction, s.metadata as s_metadata,
|
|
603
|
+
s.reinforced_count, s.last_activated as s_last_activated,
|
|
604
|
+
s.created_at as s_created_at
|
|
605
|
+
FROM synapses s
|
|
606
|
+
JOIN neurons n ON s.source_id = n.id
|
|
607
|
+
WHERE s.target_id = ? AND s.brain_id = ?{type_filter}{weight_filter}
|
|
608
|
+
"""
|
|
609
|
+
async with conn.execute(query, (neuron_id, brain_id)) as cursor:
|
|
610
|
+
async for row in cursor:
|
|
611
|
+
synapse = Synapse(
|
|
612
|
+
id=row["s_id"],
|
|
613
|
+
source_id=row["source_id"],
|
|
614
|
+
target_id=row["target_id"],
|
|
615
|
+
type=SynapseType(row["s_type"]),
|
|
616
|
+
weight=row["weight"],
|
|
617
|
+
direction=Direction(row["direction"]),
|
|
618
|
+
metadata=json.loads(row["s_metadata"]),
|
|
619
|
+
reinforced_count=row["reinforced_count"],
|
|
620
|
+
last_activated=(
|
|
621
|
+
datetime.fromisoformat(row["s_last_activated"])
|
|
622
|
+
if row["s_last_activated"]
|
|
623
|
+
else None
|
|
624
|
+
),
|
|
625
|
+
created_at=datetime.fromisoformat(row["s_created_at"]),
|
|
626
|
+
)
|
|
627
|
+
|
|
628
|
+
# For incoming, only include if bidirectional when direction is "in"
|
|
629
|
+
if direction == "in" and not synapse.is_bidirectional:
|
|
630
|
+
continue
|
|
631
|
+
|
|
632
|
+
neuron = self._row_to_neuron(row)
|
|
633
|
+
if (neuron, synapse) not in results:
|
|
634
|
+
results.append((neuron, synapse))
|
|
635
|
+
|
|
636
|
+
return results
|
|
637
|
+
|
|
638
|
+
async def get_path(
|
|
639
|
+
self,
|
|
640
|
+
source_id: str,
|
|
641
|
+
target_id: str,
|
|
642
|
+
max_hops: int = 4,
|
|
643
|
+
) -> list[tuple[Neuron, Synapse]] | None:
|
|
644
|
+
"""Find shortest path using BFS."""
|
|
645
|
+
conn = self._ensure_conn()
|
|
646
|
+
brain_id = self._get_brain_id()
|
|
647
|
+
|
|
648
|
+
# Verify both neurons exist
|
|
649
|
+
async with conn.execute(
|
|
650
|
+
"SELECT id FROM neurons WHERE id IN (?, ?) AND brain_id = ?",
|
|
651
|
+
(source_id, target_id, brain_id),
|
|
652
|
+
) as cursor:
|
|
653
|
+
rows = await cursor.fetchall()
|
|
654
|
+
if len(rows) < 2:
|
|
655
|
+
return None
|
|
656
|
+
|
|
657
|
+
# BFS for shortest path
|
|
658
|
+
from collections import deque
|
|
659
|
+
|
|
660
|
+
visited = {source_id}
|
|
661
|
+
queue: deque[tuple[str, list[tuple[str, str]]]] = deque(
|
|
662
|
+
[(source_id, [])]
|
|
663
|
+
) # (current_id, path of (neuron_id, synapse_id))
|
|
664
|
+
|
|
665
|
+
while queue:
|
|
666
|
+
current_id, path = queue.popleft()
|
|
667
|
+
|
|
668
|
+
if len(path) > max_hops:
|
|
669
|
+
continue
|
|
670
|
+
|
|
671
|
+
# Get outgoing synapses
|
|
672
|
+
async with conn.execute(
|
|
673
|
+
"""SELECT id, target_id FROM synapses
|
|
674
|
+
WHERE source_id = ? AND brain_id = ?""",
|
|
675
|
+
(current_id, brain_id),
|
|
676
|
+
) as cursor:
|
|
677
|
+
async for row in cursor:
|
|
678
|
+
next_id = row["target_id"]
|
|
679
|
+
synapse_id = row["id"]
|
|
680
|
+
|
|
681
|
+
if next_id == target_id:
|
|
682
|
+
# Found path
|
|
683
|
+
full_path = path + [(next_id, synapse_id)]
|
|
684
|
+
return await self._build_path_result(full_path)
|
|
685
|
+
|
|
686
|
+
if next_id not in visited:
|
|
687
|
+
visited.add(next_id)
|
|
688
|
+
queue.append((next_id, path + [(next_id, synapse_id)]))
|
|
689
|
+
|
|
690
|
+
return None
|
|
691
|
+
|
|
692
|
+
async def _build_path_result(self, path: list[tuple[str, str]]) -> list[tuple[Neuron, Synapse]]:
|
|
693
|
+
"""Build path result from neuron/synapse IDs."""
|
|
694
|
+
result: list[tuple[Neuron, Synapse]] = []
|
|
695
|
+
for neuron_id, synapse_id in path:
|
|
696
|
+
neuron = await self.get_neuron(neuron_id)
|
|
697
|
+
synapse = await self.get_synapse(synapse_id)
|
|
698
|
+
if neuron and synapse:
|
|
699
|
+
result.append((neuron, synapse))
|
|
700
|
+
return result
|
|
701
|
+
|
|
702
|
+
# ========== Fiber Operations ==========
|
|
703
|
+
|
|
704
|
+
async def add_fiber(self, fiber: Fiber) -> str:
|
|
705
|
+
conn = self._ensure_conn()
|
|
706
|
+
brain_id = self._get_brain_id()
|
|
707
|
+
|
|
708
|
+
try:
|
|
709
|
+
await conn.execute(
|
|
710
|
+
"""INSERT INTO fibers
|
|
711
|
+
(id, brain_id, neuron_ids, synapse_ids, anchor_neuron_id,
|
|
712
|
+
time_start, time_end, coherence, salience, frequency,
|
|
713
|
+
summary, tags, metadata, created_at)
|
|
714
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
|
|
715
|
+
(
|
|
716
|
+
fiber.id,
|
|
717
|
+
brain_id,
|
|
718
|
+
json.dumps(list(fiber.neuron_ids)),
|
|
719
|
+
json.dumps(list(fiber.synapse_ids)),
|
|
720
|
+
fiber.anchor_neuron_id,
|
|
721
|
+
fiber.time_start.isoformat() if fiber.time_start else None,
|
|
722
|
+
fiber.time_end.isoformat() if fiber.time_end else None,
|
|
723
|
+
fiber.coherence,
|
|
724
|
+
fiber.salience,
|
|
725
|
+
fiber.frequency,
|
|
726
|
+
fiber.summary,
|
|
727
|
+
json.dumps(list(fiber.tags)),
|
|
728
|
+
json.dumps(fiber.metadata),
|
|
729
|
+
fiber.created_at.isoformat(),
|
|
730
|
+
),
|
|
731
|
+
)
|
|
732
|
+
await conn.commit()
|
|
733
|
+
return fiber.id
|
|
734
|
+
except sqlite3.IntegrityError:
|
|
735
|
+
raise ValueError(f"Fiber {fiber.id} already exists")
|
|
736
|
+
|
|
737
|
+
async def get_fiber(self, fiber_id: str) -> Fiber | None:
|
|
738
|
+
conn = self._ensure_conn()
|
|
739
|
+
brain_id = self._get_brain_id()
|
|
740
|
+
|
|
741
|
+
async with conn.execute(
|
|
742
|
+
"SELECT * FROM fibers WHERE id = ? AND brain_id = ?",
|
|
743
|
+
(fiber_id, brain_id),
|
|
744
|
+
) as cursor:
|
|
745
|
+
row = await cursor.fetchone()
|
|
746
|
+
if row is None:
|
|
747
|
+
return None
|
|
748
|
+
return self._row_to_fiber(row)
|
|
749
|
+
|
|
750
|
+
async def find_fibers(
|
|
751
|
+
self,
|
|
752
|
+
contains_neuron: str | None = None,
|
|
753
|
+
time_overlaps: tuple[datetime, datetime] | None = None,
|
|
754
|
+
tags: set[str] | None = None,
|
|
755
|
+
min_salience: float | None = None,
|
|
756
|
+
limit: int = 100,
|
|
757
|
+
) -> list[Fiber]:
|
|
758
|
+
conn = self._ensure_conn()
|
|
759
|
+
brain_id = self._get_brain_id()
|
|
760
|
+
|
|
761
|
+
query = "SELECT * FROM fibers WHERE brain_id = ?"
|
|
762
|
+
params: list[Any] = [brain_id]
|
|
763
|
+
|
|
764
|
+
if contains_neuron is not None:
|
|
765
|
+
query += " AND neuron_ids LIKE ?"
|
|
766
|
+
params.append(f'%"{contains_neuron}"%')
|
|
767
|
+
|
|
768
|
+
if time_overlaps is not None:
|
|
769
|
+
start, end = time_overlaps
|
|
770
|
+
# Fiber overlaps if: fiber_start <= query_end AND fiber_end >= query_start
|
|
771
|
+
query += " AND (time_start IS NULL OR time_start <= ?)"
|
|
772
|
+
query += " AND (time_end IS NULL OR time_end >= ?)"
|
|
773
|
+
params.append(end.isoformat())
|
|
774
|
+
params.append(start.isoformat())
|
|
775
|
+
|
|
776
|
+
if min_salience is not None:
|
|
777
|
+
query += " AND salience >= ?"
|
|
778
|
+
params.append(min_salience)
|
|
779
|
+
|
|
780
|
+
query += " ORDER BY salience DESC LIMIT ?"
|
|
781
|
+
params.append(limit)
|
|
782
|
+
|
|
783
|
+
async with conn.execute(query, params) as cursor:
|
|
784
|
+
rows = await cursor.fetchall()
|
|
785
|
+
fibers = [self._row_to_fiber(row) for row in rows]
|
|
786
|
+
|
|
787
|
+
# Filter by tags in Python (JSON array doesn't support efficient set operations)
|
|
788
|
+
if tags is not None:
|
|
789
|
+
fibers = [f for f in fibers if tags.issubset(f.tags)]
|
|
790
|
+
|
|
791
|
+
return fibers
|
|
792
|
+
|
|
793
|
+
async def update_fiber(self, fiber: Fiber) -> None:
|
|
794
|
+
conn = self._ensure_conn()
|
|
795
|
+
brain_id = self._get_brain_id()
|
|
796
|
+
|
|
797
|
+
cursor = await conn.execute(
|
|
798
|
+
"""UPDATE fibers SET neuron_ids = ?, synapse_ids = ?,
|
|
799
|
+
anchor_neuron_id = ?, time_start = ?, time_end = ?,
|
|
800
|
+
coherence = ?, salience = ?, frequency = ?,
|
|
801
|
+
summary = ?, tags = ?, metadata = ?
|
|
802
|
+
WHERE id = ? AND brain_id = ?""",
|
|
803
|
+
(
|
|
804
|
+
json.dumps(list(fiber.neuron_ids)),
|
|
805
|
+
json.dumps(list(fiber.synapse_ids)),
|
|
806
|
+
fiber.anchor_neuron_id,
|
|
807
|
+
fiber.time_start.isoformat() if fiber.time_start else None,
|
|
808
|
+
fiber.time_end.isoformat() if fiber.time_end else None,
|
|
809
|
+
fiber.coherence,
|
|
810
|
+
fiber.salience,
|
|
811
|
+
fiber.frequency,
|
|
812
|
+
fiber.summary,
|
|
813
|
+
json.dumps(list(fiber.tags)),
|
|
814
|
+
json.dumps(fiber.metadata),
|
|
815
|
+
fiber.id,
|
|
816
|
+
brain_id,
|
|
817
|
+
),
|
|
818
|
+
)
|
|
819
|
+
|
|
820
|
+
if cursor.rowcount == 0:
|
|
821
|
+
raise ValueError(f"Fiber {fiber.id} does not exist")
|
|
822
|
+
|
|
823
|
+
await conn.commit()
|
|
824
|
+
|
|
825
|
+
async def delete_fiber(self, fiber_id: str) -> bool:
|
|
826
|
+
conn = self._ensure_conn()
|
|
827
|
+
brain_id = self._get_brain_id()
|
|
828
|
+
|
|
829
|
+
cursor = await conn.execute(
|
|
830
|
+
"DELETE FROM fibers WHERE id = ? AND brain_id = ?",
|
|
831
|
+
(fiber_id, brain_id),
|
|
832
|
+
)
|
|
833
|
+
await conn.commit()
|
|
834
|
+
|
|
835
|
+
return cursor.rowcount > 0
|
|
836
|
+
|
|
837
|
+
async def get_fibers(
|
|
838
|
+
self,
|
|
839
|
+
limit: int = 10,
|
|
840
|
+
order_by: Literal["created_at", "salience", "frequency"] = "created_at",
|
|
841
|
+
descending: bool = True,
|
|
842
|
+
) -> list[Fiber]:
|
|
843
|
+
conn = self._ensure_conn()
|
|
844
|
+
brain_id = self._get_brain_id()
|
|
845
|
+
|
|
846
|
+
order_dir = "DESC" if descending else "ASC"
|
|
847
|
+
query = f"SELECT * FROM fibers WHERE brain_id = ? ORDER BY {order_by} {order_dir} LIMIT ?"
|
|
848
|
+
|
|
849
|
+
async with conn.execute(query, (brain_id, limit)) as cursor:
|
|
850
|
+
rows = await cursor.fetchall()
|
|
851
|
+
return [self._row_to_fiber(row) for row in rows]
|
|
852
|
+
|
|
853
|
+
def _row_to_fiber(self, row: aiosqlite.Row) -> Fiber:
|
|
854
|
+
"""Convert database row to Fiber."""
|
|
855
|
+
return Fiber(
|
|
856
|
+
id=row["id"],
|
|
857
|
+
neuron_ids=set(json.loads(row["neuron_ids"])),
|
|
858
|
+
synapse_ids=set(json.loads(row["synapse_ids"])),
|
|
859
|
+
anchor_neuron_id=row["anchor_neuron_id"],
|
|
860
|
+
time_start=(datetime.fromisoformat(row["time_start"]) if row["time_start"] else None),
|
|
861
|
+
time_end=(datetime.fromisoformat(row["time_end"]) if row["time_end"] else None),
|
|
862
|
+
coherence=row["coherence"],
|
|
863
|
+
salience=row["salience"],
|
|
864
|
+
frequency=row["frequency"],
|
|
865
|
+
summary=row["summary"],
|
|
866
|
+
tags=set(json.loads(row["tags"])),
|
|
867
|
+
metadata=json.loads(row["metadata"]),
|
|
868
|
+
created_at=datetime.fromisoformat(row["created_at"]),
|
|
869
|
+
)
|
|
870
|
+
|
|
871
|
+
# ========== TypedMemory Operations ==========
|
|
872
|
+
|
|
873
|
+
async def add_typed_memory(self, typed_memory: TypedMemory) -> str:
|
|
874
|
+
conn = self._ensure_conn()
|
|
875
|
+
brain_id = self._get_brain_id()
|
|
876
|
+
|
|
877
|
+
# Verify fiber exists
|
|
878
|
+
async with conn.execute(
|
|
879
|
+
"SELECT id FROM fibers WHERE id = ? AND brain_id = ?",
|
|
880
|
+
(typed_memory.fiber_id, brain_id),
|
|
881
|
+
) as cursor:
|
|
882
|
+
if await cursor.fetchone() is None:
|
|
883
|
+
raise ValueError(f"Fiber {typed_memory.fiber_id} does not exist")
|
|
884
|
+
|
|
885
|
+
provenance_dict = {
|
|
886
|
+
"source": typed_memory.provenance.source,
|
|
887
|
+
"confidence": typed_memory.provenance.confidence.value,
|
|
888
|
+
"verified": typed_memory.provenance.verified,
|
|
889
|
+
"verified_at": (
|
|
890
|
+
typed_memory.provenance.verified_at.isoformat()
|
|
891
|
+
if typed_memory.provenance.verified_at
|
|
892
|
+
else None
|
|
893
|
+
),
|
|
894
|
+
"created_by": typed_memory.provenance.created_by,
|
|
895
|
+
"last_confirmed": (
|
|
896
|
+
typed_memory.provenance.last_confirmed.isoformat()
|
|
897
|
+
if typed_memory.provenance.last_confirmed
|
|
898
|
+
else None
|
|
899
|
+
),
|
|
900
|
+
}
|
|
901
|
+
|
|
902
|
+
await conn.execute(
|
|
903
|
+
"""INSERT OR REPLACE INTO typed_memories
|
|
904
|
+
(fiber_id, brain_id, memory_type, priority, provenance,
|
|
905
|
+
expires_at, project_id, tags, metadata, created_at)
|
|
906
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
|
|
907
|
+
(
|
|
908
|
+
typed_memory.fiber_id,
|
|
909
|
+
brain_id,
|
|
910
|
+
typed_memory.memory_type.value,
|
|
911
|
+
typed_memory.priority.value,
|
|
912
|
+
json.dumps(provenance_dict),
|
|
913
|
+
typed_memory.expires_at.isoformat() if typed_memory.expires_at else None,
|
|
914
|
+
typed_memory.project_id,
|
|
915
|
+
json.dumps(list(typed_memory.tags)),
|
|
916
|
+
json.dumps(typed_memory.metadata),
|
|
917
|
+
typed_memory.created_at.isoformat(),
|
|
918
|
+
),
|
|
919
|
+
)
|
|
920
|
+
await conn.commit()
|
|
921
|
+
return typed_memory.fiber_id
|
|
922
|
+
|
|
923
|
+
async def get_typed_memory(self, fiber_id: str) -> TypedMemory | None:
|
|
924
|
+
conn = self._ensure_conn()
|
|
925
|
+
brain_id = self._get_brain_id()
|
|
926
|
+
|
|
927
|
+
async with conn.execute(
|
|
928
|
+
"SELECT * FROM typed_memories WHERE fiber_id = ? AND brain_id = ?",
|
|
929
|
+
(fiber_id, brain_id),
|
|
930
|
+
) as cursor:
|
|
931
|
+
row = await cursor.fetchone()
|
|
932
|
+
if row is None:
|
|
933
|
+
return None
|
|
934
|
+
return self._row_to_typed_memory(row)
|
|
935
|
+
|
|
936
|
+
async def find_typed_memories(
|
|
937
|
+
self,
|
|
938
|
+
memory_type: MemoryType | None = None,
|
|
939
|
+
min_priority: Priority | None = None,
|
|
940
|
+
include_expired: bool = False,
|
|
941
|
+
project_id: str | None = None,
|
|
942
|
+
tags: set[str] | None = None,
|
|
943
|
+
limit: int = 100,
|
|
944
|
+
) -> list[TypedMemory]:
|
|
945
|
+
conn = self._ensure_conn()
|
|
946
|
+
brain_id = self._get_brain_id()
|
|
947
|
+
|
|
948
|
+
query = "SELECT * FROM typed_memories WHERE brain_id = ?"
|
|
949
|
+
params: list[Any] = [brain_id]
|
|
950
|
+
|
|
951
|
+
if memory_type is not None:
|
|
952
|
+
query += " AND memory_type = ?"
|
|
953
|
+
params.append(memory_type.value)
|
|
954
|
+
|
|
955
|
+
if min_priority is not None:
|
|
956
|
+
query += " AND priority >= ?"
|
|
957
|
+
params.append(min_priority.value)
|
|
958
|
+
|
|
959
|
+
if not include_expired:
|
|
960
|
+
query += " AND (expires_at IS NULL OR expires_at > ?)"
|
|
961
|
+
params.append(datetime.utcnow().isoformat())
|
|
962
|
+
|
|
963
|
+
if project_id is not None:
|
|
964
|
+
query += " AND project_id = ?"
|
|
965
|
+
params.append(project_id)
|
|
966
|
+
|
|
967
|
+
query += " ORDER BY priority DESC, created_at DESC LIMIT ?"
|
|
968
|
+
params.append(limit)
|
|
969
|
+
|
|
970
|
+
async with conn.execute(query, params) as cursor:
|
|
971
|
+
rows = await cursor.fetchall()
|
|
972
|
+
memories = [self._row_to_typed_memory(row) for row in rows]
|
|
973
|
+
|
|
974
|
+
# Filter by tags in Python
|
|
975
|
+
if tags is not None:
|
|
976
|
+
memories = [m for m in memories if tags.issubset(m.tags)]
|
|
977
|
+
|
|
978
|
+
return memories
|
|
979
|
+
|
|
980
|
+
async def update_typed_memory(self, typed_memory: TypedMemory) -> None:
|
|
981
|
+
conn = self._ensure_conn()
|
|
982
|
+
brain_id = self._get_brain_id()
|
|
983
|
+
|
|
984
|
+
provenance_dict = {
|
|
985
|
+
"source": typed_memory.provenance.source,
|
|
986
|
+
"confidence": typed_memory.provenance.confidence.value,
|
|
987
|
+
"verified": typed_memory.provenance.verified,
|
|
988
|
+
"verified_at": (
|
|
989
|
+
typed_memory.provenance.verified_at.isoformat()
|
|
990
|
+
if typed_memory.provenance.verified_at
|
|
991
|
+
else None
|
|
992
|
+
),
|
|
993
|
+
"created_by": typed_memory.provenance.created_by,
|
|
994
|
+
"last_confirmed": (
|
|
995
|
+
typed_memory.provenance.last_confirmed.isoformat()
|
|
996
|
+
if typed_memory.provenance.last_confirmed
|
|
997
|
+
else None
|
|
998
|
+
),
|
|
999
|
+
}
|
|
1000
|
+
|
|
1001
|
+
cursor = await conn.execute(
|
|
1002
|
+
"""UPDATE typed_memories SET memory_type = ?, priority = ?,
|
|
1003
|
+
provenance = ?, expires_at = ?, project_id = ?,
|
|
1004
|
+
tags = ?, metadata = ?
|
|
1005
|
+
WHERE fiber_id = ? AND brain_id = ?""",
|
|
1006
|
+
(
|
|
1007
|
+
typed_memory.memory_type.value,
|
|
1008
|
+
typed_memory.priority.value,
|
|
1009
|
+
json.dumps(provenance_dict),
|
|
1010
|
+
typed_memory.expires_at.isoformat() if typed_memory.expires_at else None,
|
|
1011
|
+
typed_memory.project_id,
|
|
1012
|
+
json.dumps(list(typed_memory.tags)),
|
|
1013
|
+
json.dumps(typed_memory.metadata),
|
|
1014
|
+
typed_memory.fiber_id,
|
|
1015
|
+
brain_id,
|
|
1016
|
+
),
|
|
1017
|
+
)
|
|
1018
|
+
|
|
1019
|
+
if cursor.rowcount == 0:
|
|
1020
|
+
raise ValueError(f"TypedMemory for fiber {typed_memory.fiber_id} does not exist")
|
|
1021
|
+
|
|
1022
|
+
await conn.commit()
|
|
1023
|
+
|
|
1024
|
+
async def delete_typed_memory(self, fiber_id: str) -> bool:
|
|
1025
|
+
conn = self._ensure_conn()
|
|
1026
|
+
brain_id = self._get_brain_id()
|
|
1027
|
+
|
|
1028
|
+
cursor = await conn.execute(
|
|
1029
|
+
"DELETE FROM typed_memories WHERE fiber_id = ? AND brain_id = ?",
|
|
1030
|
+
(fiber_id, brain_id),
|
|
1031
|
+
)
|
|
1032
|
+
await conn.commit()
|
|
1033
|
+
|
|
1034
|
+
return cursor.rowcount > 0
|
|
1035
|
+
|
|
1036
|
+
async def get_expired_memories(self) -> list[TypedMemory]:
|
|
1037
|
+
conn = self._ensure_conn()
|
|
1038
|
+
brain_id = self._get_brain_id()
|
|
1039
|
+
|
|
1040
|
+
async with conn.execute(
|
|
1041
|
+
"""SELECT * FROM typed_memories
|
|
1042
|
+
WHERE brain_id = ? AND expires_at IS NOT NULL AND expires_at <= ?""",
|
|
1043
|
+
(brain_id, datetime.utcnow().isoformat()),
|
|
1044
|
+
) as cursor:
|
|
1045
|
+
rows = await cursor.fetchall()
|
|
1046
|
+
return [self._row_to_typed_memory(row) for row in rows]
|
|
1047
|
+
|
|
1048
|
+
def _row_to_typed_memory(self, row: aiosqlite.Row) -> TypedMemory:
|
|
1049
|
+
"""Convert database row to TypedMemory."""
|
|
1050
|
+
prov_data = json.loads(row["provenance"])
|
|
1051
|
+
provenance = Provenance(
|
|
1052
|
+
source=prov_data.get("source", "unknown"),
|
|
1053
|
+
confidence=Confidence(prov_data.get("confidence", "medium")),
|
|
1054
|
+
verified=prov_data.get("verified", False),
|
|
1055
|
+
verified_at=(
|
|
1056
|
+
datetime.fromisoformat(prov_data["verified_at"])
|
|
1057
|
+
if prov_data.get("verified_at")
|
|
1058
|
+
else None
|
|
1059
|
+
),
|
|
1060
|
+
created_by=prov_data.get("created_by", "unknown"),
|
|
1061
|
+
last_confirmed=(
|
|
1062
|
+
datetime.fromisoformat(prov_data["last_confirmed"])
|
|
1063
|
+
if prov_data.get("last_confirmed")
|
|
1064
|
+
else None
|
|
1065
|
+
),
|
|
1066
|
+
)
|
|
1067
|
+
|
|
1068
|
+
return TypedMemory(
|
|
1069
|
+
fiber_id=row["fiber_id"],
|
|
1070
|
+
memory_type=MemoryType(row["memory_type"]),
|
|
1071
|
+
priority=Priority(row["priority"]),
|
|
1072
|
+
provenance=provenance,
|
|
1073
|
+
expires_at=(datetime.fromisoformat(row["expires_at"]) if row["expires_at"] else None),
|
|
1074
|
+
project_id=row["project_id"],
|
|
1075
|
+
tags=frozenset(json.loads(row["tags"])),
|
|
1076
|
+
metadata=json.loads(row["metadata"]),
|
|
1077
|
+
created_at=datetime.fromisoformat(row["created_at"]),
|
|
1078
|
+
)
|
|
1079
|
+
|
|
1080
|
+
# ========== Project Operations ==========
|
|
1081
|
+
|
|
1082
|
+
async def add_project(self, project: Project) -> str:
|
|
1083
|
+
conn = self._ensure_conn()
|
|
1084
|
+
brain_id = self._get_brain_id()
|
|
1085
|
+
|
|
1086
|
+
try:
|
|
1087
|
+
await conn.execute(
|
|
1088
|
+
"""INSERT INTO projects
|
|
1089
|
+
(id, brain_id, name, description, start_date, end_date,
|
|
1090
|
+
tags, priority, metadata, created_at)
|
|
1091
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
|
|
1092
|
+
(
|
|
1093
|
+
project.id,
|
|
1094
|
+
brain_id,
|
|
1095
|
+
project.name,
|
|
1096
|
+
project.description,
|
|
1097
|
+
project.start_date.isoformat(),
|
|
1098
|
+
project.end_date.isoformat() if project.end_date else None,
|
|
1099
|
+
json.dumps(list(project.tags)),
|
|
1100
|
+
project.priority,
|
|
1101
|
+
json.dumps(project.metadata),
|
|
1102
|
+
project.created_at.isoformat(),
|
|
1103
|
+
),
|
|
1104
|
+
)
|
|
1105
|
+
await conn.commit()
|
|
1106
|
+
return project.id
|
|
1107
|
+
except sqlite3.IntegrityError:
|
|
1108
|
+
raise ValueError(f"Project {project.id} already exists")
|
|
1109
|
+
|
|
1110
|
+
async def get_project(self, project_id: str) -> Project | None:
|
|
1111
|
+
conn = self._ensure_conn()
|
|
1112
|
+
brain_id = self._get_brain_id()
|
|
1113
|
+
|
|
1114
|
+
async with conn.execute(
|
|
1115
|
+
"SELECT * FROM projects WHERE id = ? AND brain_id = ?",
|
|
1116
|
+
(project_id, brain_id),
|
|
1117
|
+
) as cursor:
|
|
1118
|
+
row = await cursor.fetchone()
|
|
1119
|
+
if row is None:
|
|
1120
|
+
return None
|
|
1121
|
+
return self._row_to_project(row)
|
|
1122
|
+
|
|
1123
|
+
async def get_project_by_name(self, name: str) -> Project | None:
|
|
1124
|
+
conn = self._ensure_conn()
|
|
1125
|
+
brain_id = self._get_brain_id()
|
|
1126
|
+
|
|
1127
|
+
async with conn.execute(
|
|
1128
|
+
"SELECT * FROM projects WHERE brain_id = ? AND LOWER(name) = LOWER(?)",
|
|
1129
|
+
(brain_id, name),
|
|
1130
|
+
) as cursor:
|
|
1131
|
+
row = await cursor.fetchone()
|
|
1132
|
+
if row is None:
|
|
1133
|
+
return None
|
|
1134
|
+
return self._row_to_project(row)
|
|
1135
|
+
|
|
1136
|
+
async def list_projects(
|
|
1137
|
+
self,
|
|
1138
|
+
active_only: bool = False,
|
|
1139
|
+
tags: set[str] | None = None,
|
|
1140
|
+
limit: int = 100,
|
|
1141
|
+
) -> list[Project]:
|
|
1142
|
+
conn = self._ensure_conn()
|
|
1143
|
+
brain_id = self._get_brain_id()
|
|
1144
|
+
|
|
1145
|
+
query = "SELECT * FROM projects WHERE brain_id = ?"
|
|
1146
|
+
params: list[Any] = [brain_id]
|
|
1147
|
+
|
|
1148
|
+
if active_only:
|
|
1149
|
+
now = datetime.utcnow().isoformat()
|
|
1150
|
+
query += " AND start_date <= ? AND (end_date IS NULL OR end_date > ?)"
|
|
1151
|
+
params.extend([now, now])
|
|
1152
|
+
|
|
1153
|
+
query += " ORDER BY priority DESC, start_date DESC LIMIT ?"
|
|
1154
|
+
params.append(limit)
|
|
1155
|
+
|
|
1156
|
+
async with conn.execute(query, params) as cursor:
|
|
1157
|
+
rows = await cursor.fetchall()
|
|
1158
|
+
projects = [self._row_to_project(row) for row in rows]
|
|
1159
|
+
|
|
1160
|
+
# Filter by tags in Python
|
|
1161
|
+
if tags is not None:
|
|
1162
|
+
projects = [p for p in projects if tags.intersection(p.tags)]
|
|
1163
|
+
|
|
1164
|
+
return projects
|
|
1165
|
+
|
|
1166
|
+
async def update_project(self, project: Project) -> None:
|
|
1167
|
+
conn = self._ensure_conn()
|
|
1168
|
+
brain_id = self._get_brain_id()
|
|
1169
|
+
|
|
1170
|
+
cursor = await conn.execute(
|
|
1171
|
+
"""UPDATE projects SET name = ?, description = ?,
|
|
1172
|
+
start_date = ?, end_date = ?, tags = ?,
|
|
1173
|
+
priority = ?, metadata = ?
|
|
1174
|
+
WHERE id = ? AND brain_id = ?""",
|
|
1175
|
+
(
|
|
1176
|
+
project.name,
|
|
1177
|
+
project.description,
|
|
1178
|
+
project.start_date.isoformat(),
|
|
1179
|
+
project.end_date.isoformat() if project.end_date else None,
|
|
1180
|
+
json.dumps(list(project.tags)),
|
|
1181
|
+
project.priority,
|
|
1182
|
+
json.dumps(project.metadata),
|
|
1183
|
+
project.id,
|
|
1184
|
+
brain_id,
|
|
1185
|
+
),
|
|
1186
|
+
)
|
|
1187
|
+
|
|
1188
|
+
if cursor.rowcount == 0:
|
|
1189
|
+
raise ValueError(f"Project {project.id} does not exist")
|
|
1190
|
+
|
|
1191
|
+
await conn.commit()
|
|
1192
|
+
|
|
1193
|
+
async def delete_project(self, project_id: str) -> bool:
|
|
1194
|
+
conn = self._ensure_conn()
|
|
1195
|
+
brain_id = self._get_brain_id()
|
|
1196
|
+
|
|
1197
|
+
cursor = await conn.execute(
|
|
1198
|
+
"DELETE FROM projects WHERE id = ? AND brain_id = ?",
|
|
1199
|
+
(project_id, brain_id),
|
|
1200
|
+
)
|
|
1201
|
+
await conn.commit()
|
|
1202
|
+
|
|
1203
|
+
return cursor.rowcount > 0
|
|
1204
|
+
|
|
1205
|
+
async def get_project_memories(
|
|
1206
|
+
self,
|
|
1207
|
+
project_id: str,
|
|
1208
|
+
include_expired: bool = False,
|
|
1209
|
+
) -> list[TypedMemory]:
|
|
1210
|
+
return await self.find_typed_memories(
|
|
1211
|
+
project_id=project_id,
|
|
1212
|
+
include_expired=include_expired,
|
|
1213
|
+
)
|
|
1214
|
+
|
|
1215
|
+
def _row_to_project(self, row: aiosqlite.Row) -> Project:
|
|
1216
|
+
"""Convert database row to Project."""
|
|
1217
|
+
return Project(
|
|
1218
|
+
id=row["id"],
|
|
1219
|
+
name=row["name"],
|
|
1220
|
+
description=row["description"],
|
|
1221
|
+
start_date=datetime.fromisoformat(row["start_date"]),
|
|
1222
|
+
end_date=(datetime.fromisoformat(row["end_date"]) if row["end_date"] else None),
|
|
1223
|
+
tags=frozenset(json.loads(row["tags"])),
|
|
1224
|
+
priority=row["priority"],
|
|
1225
|
+
metadata=json.loads(row["metadata"]),
|
|
1226
|
+
created_at=datetime.fromisoformat(row["created_at"]),
|
|
1227
|
+
)
|
|
1228
|
+
|
|
1229
|
+
# ========== Brain Operations ==========
|
|
1230
|
+
|
|
1231
|
+
async def save_brain(self, brain: Brain) -> None:
|
|
1232
|
+
conn = self._ensure_conn()
|
|
1233
|
+
|
|
1234
|
+
await conn.execute(
|
|
1235
|
+
"""INSERT OR REPLACE INTO brains
|
|
1236
|
+
(id, name, config, owner_id, is_public, shared_with, created_at, updated_at)
|
|
1237
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?)""",
|
|
1238
|
+
(
|
|
1239
|
+
brain.id,
|
|
1240
|
+
brain.name,
|
|
1241
|
+
json.dumps(
|
|
1242
|
+
{
|
|
1243
|
+
"decay_rate": brain.config.decay_rate,
|
|
1244
|
+
"reinforcement_delta": brain.config.reinforcement_delta,
|
|
1245
|
+
"activation_threshold": brain.config.activation_threshold,
|
|
1246
|
+
"max_spread_hops": brain.config.max_spread_hops,
|
|
1247
|
+
"max_context_tokens": brain.config.max_context_tokens,
|
|
1248
|
+
}
|
|
1249
|
+
),
|
|
1250
|
+
brain.owner_id,
|
|
1251
|
+
1 if brain.is_public else 0,
|
|
1252
|
+
json.dumps(brain.shared_with),
|
|
1253
|
+
brain.created_at.isoformat(),
|
|
1254
|
+
brain.updated_at.isoformat(),
|
|
1255
|
+
),
|
|
1256
|
+
)
|
|
1257
|
+
await conn.commit()
|
|
1258
|
+
|
|
1259
|
+
async def get_brain(self, brain_id: str) -> Brain | None:
|
|
1260
|
+
conn = self._ensure_conn()
|
|
1261
|
+
|
|
1262
|
+
async with conn.execute("SELECT * FROM brains WHERE id = ?", (brain_id,)) as cursor:
|
|
1263
|
+
row = await cursor.fetchone()
|
|
1264
|
+
if row is None:
|
|
1265
|
+
return None
|
|
1266
|
+
return self._row_to_brain(row)
|
|
1267
|
+
|
|
1268
|
+
def _row_to_brain(self, row: aiosqlite.Row) -> Brain:
|
|
1269
|
+
"""Convert database row to Brain."""
|
|
1270
|
+
config_data = json.loads(row["config"])
|
|
1271
|
+
config = BrainConfig(
|
|
1272
|
+
decay_rate=config_data.get("decay_rate", 0.1),
|
|
1273
|
+
reinforcement_delta=config_data.get("reinforcement_delta", 0.05),
|
|
1274
|
+
activation_threshold=config_data.get("activation_threshold", 0.2),
|
|
1275
|
+
max_spread_hops=config_data.get("max_spread_hops", 4),
|
|
1276
|
+
max_context_tokens=config_data.get("max_context_tokens", 1500),
|
|
1277
|
+
)
|
|
1278
|
+
|
|
1279
|
+
return Brain(
|
|
1280
|
+
id=row["id"],
|
|
1281
|
+
name=row["name"],
|
|
1282
|
+
config=config,
|
|
1283
|
+
owner_id=row["owner_id"],
|
|
1284
|
+
is_public=bool(row["is_public"]),
|
|
1285
|
+
shared_with=json.loads(row["shared_with"]),
|
|
1286
|
+
created_at=datetime.fromisoformat(row["created_at"]),
|
|
1287
|
+
updated_at=datetime.fromisoformat(row["updated_at"]),
|
|
1288
|
+
)
|
|
1289
|
+
|
|
1290
|
+
async def export_brain(self, brain_id: str) -> BrainSnapshot:
|
|
1291
|
+
conn = self._ensure_conn()
|
|
1292
|
+
|
|
1293
|
+
brain = await self.get_brain(brain_id)
|
|
1294
|
+
if brain is None:
|
|
1295
|
+
raise ValueError(f"Brain {brain_id} does not exist")
|
|
1296
|
+
|
|
1297
|
+
# Export neurons
|
|
1298
|
+
neurons = []
|
|
1299
|
+
async with conn.execute("SELECT * FROM neurons WHERE brain_id = ?", (brain_id,)) as cursor:
|
|
1300
|
+
async for row in cursor:
|
|
1301
|
+
neurons.append(
|
|
1302
|
+
{
|
|
1303
|
+
"id": row["id"],
|
|
1304
|
+
"type": row["type"],
|
|
1305
|
+
"content": row["content"],
|
|
1306
|
+
"metadata": json.loads(row["metadata"]),
|
|
1307
|
+
"created_at": row["created_at"],
|
|
1308
|
+
}
|
|
1309
|
+
)
|
|
1310
|
+
|
|
1311
|
+
# Export synapses
|
|
1312
|
+
synapses = []
|
|
1313
|
+
async with conn.execute("SELECT * FROM synapses WHERE brain_id = ?", (brain_id,)) as cursor:
|
|
1314
|
+
async for row in cursor:
|
|
1315
|
+
synapses.append(
|
|
1316
|
+
{
|
|
1317
|
+
"id": row["id"],
|
|
1318
|
+
"source_id": row["source_id"],
|
|
1319
|
+
"target_id": row["target_id"],
|
|
1320
|
+
"type": row["type"],
|
|
1321
|
+
"weight": row["weight"],
|
|
1322
|
+
"direction": row["direction"],
|
|
1323
|
+
"metadata": json.loads(row["metadata"]),
|
|
1324
|
+
"reinforced_count": row["reinforced_count"],
|
|
1325
|
+
"created_at": row["created_at"],
|
|
1326
|
+
}
|
|
1327
|
+
)
|
|
1328
|
+
|
|
1329
|
+
# Export fibers
|
|
1330
|
+
fibers = []
|
|
1331
|
+
async with conn.execute("SELECT * FROM fibers WHERE brain_id = ?", (brain_id,)) as cursor:
|
|
1332
|
+
async for row in cursor:
|
|
1333
|
+
fibers.append(
|
|
1334
|
+
{
|
|
1335
|
+
"id": row["id"],
|
|
1336
|
+
"neuron_ids": json.loads(row["neuron_ids"]),
|
|
1337
|
+
"synapse_ids": json.loads(row["synapse_ids"]),
|
|
1338
|
+
"anchor_neuron_id": row["anchor_neuron_id"],
|
|
1339
|
+
"time_start": row["time_start"],
|
|
1340
|
+
"time_end": row["time_end"],
|
|
1341
|
+
"coherence": row["coherence"],
|
|
1342
|
+
"salience": row["salience"],
|
|
1343
|
+
"frequency": row["frequency"],
|
|
1344
|
+
"summary": row["summary"],
|
|
1345
|
+
"tags": json.loads(row["tags"]),
|
|
1346
|
+
"metadata": json.loads(row["metadata"]),
|
|
1347
|
+
"created_at": row["created_at"],
|
|
1348
|
+
}
|
|
1349
|
+
)
|
|
1350
|
+
|
|
1351
|
+
# Export typed memories
|
|
1352
|
+
typed_memories = []
|
|
1353
|
+
async with conn.execute(
|
|
1354
|
+
"SELECT * FROM typed_memories WHERE brain_id = ?", (brain_id,)
|
|
1355
|
+
) as cursor:
|
|
1356
|
+
async for row in cursor:
|
|
1357
|
+
typed_memories.append(
|
|
1358
|
+
{
|
|
1359
|
+
"fiber_id": row["fiber_id"],
|
|
1360
|
+
"memory_type": row["memory_type"],
|
|
1361
|
+
"priority": row["priority"],
|
|
1362
|
+
"provenance": json.loads(row["provenance"]),
|
|
1363
|
+
"expires_at": row["expires_at"],
|
|
1364
|
+
"project_id": row["project_id"],
|
|
1365
|
+
"tags": json.loads(row["tags"]),
|
|
1366
|
+
"metadata": json.loads(row["metadata"]),
|
|
1367
|
+
"created_at": row["created_at"],
|
|
1368
|
+
}
|
|
1369
|
+
)
|
|
1370
|
+
|
|
1371
|
+
# Export projects
|
|
1372
|
+
projects = []
|
|
1373
|
+
async with conn.execute("SELECT * FROM projects WHERE brain_id = ?", (brain_id,)) as cursor:
|
|
1374
|
+
async for row in cursor:
|
|
1375
|
+
projects.append(
|
|
1376
|
+
{
|
|
1377
|
+
"id": row["id"],
|
|
1378
|
+
"name": row["name"],
|
|
1379
|
+
"description": row["description"],
|
|
1380
|
+
"start_date": row["start_date"],
|
|
1381
|
+
"end_date": row["end_date"],
|
|
1382
|
+
"tags": json.loads(row["tags"]),
|
|
1383
|
+
"priority": row["priority"],
|
|
1384
|
+
"metadata": json.loads(row["metadata"]),
|
|
1385
|
+
"created_at": row["created_at"],
|
|
1386
|
+
}
|
|
1387
|
+
)
|
|
1388
|
+
|
|
1389
|
+
return BrainSnapshot(
|
|
1390
|
+
brain_id=brain_id,
|
|
1391
|
+
brain_name=brain.name,
|
|
1392
|
+
exported_at=datetime.utcnow(),
|
|
1393
|
+
version="0.1.0",
|
|
1394
|
+
neurons=neurons,
|
|
1395
|
+
synapses=synapses,
|
|
1396
|
+
fibers=fibers,
|
|
1397
|
+
config={
|
|
1398
|
+
"decay_rate": brain.config.decay_rate,
|
|
1399
|
+
"reinforcement_delta": brain.config.reinforcement_delta,
|
|
1400
|
+
"activation_threshold": brain.config.activation_threshold,
|
|
1401
|
+
"max_spread_hops": brain.config.max_spread_hops,
|
|
1402
|
+
"max_context_tokens": brain.config.max_context_tokens,
|
|
1403
|
+
},
|
|
1404
|
+
metadata={
|
|
1405
|
+
"typed_memories": typed_memories,
|
|
1406
|
+
"projects": projects,
|
|
1407
|
+
},
|
|
1408
|
+
)
|
|
1409
|
+
|
|
1410
|
+
async def import_brain(
|
|
1411
|
+
self,
|
|
1412
|
+
snapshot: BrainSnapshot,
|
|
1413
|
+
target_brain_id: str | None = None,
|
|
1414
|
+
) -> str:
|
|
1415
|
+
brain_id = target_brain_id or snapshot.brain_id
|
|
1416
|
+
|
|
1417
|
+
# Create brain
|
|
1418
|
+
config = BrainConfig(**snapshot.config)
|
|
1419
|
+
brain = Brain.create(
|
|
1420
|
+
name=snapshot.brain_name,
|
|
1421
|
+
config=config,
|
|
1422
|
+
brain_id=brain_id,
|
|
1423
|
+
)
|
|
1424
|
+
await self.save_brain(brain)
|
|
1425
|
+
|
|
1426
|
+
# Set context
|
|
1427
|
+
old_brain_id = self._current_brain_id
|
|
1428
|
+
self.set_brain(brain_id)
|
|
1429
|
+
|
|
1430
|
+
try:
|
|
1431
|
+
# Import neurons
|
|
1432
|
+
for n_data in snapshot.neurons:
|
|
1433
|
+
neuron = Neuron(
|
|
1434
|
+
id=n_data["id"],
|
|
1435
|
+
type=NeuronType(n_data["type"]),
|
|
1436
|
+
content=n_data["content"],
|
|
1437
|
+
metadata=n_data.get("metadata", {}),
|
|
1438
|
+
created_at=datetime.fromisoformat(n_data["created_at"]),
|
|
1439
|
+
)
|
|
1440
|
+
await self.add_neuron(neuron)
|
|
1441
|
+
|
|
1442
|
+
# Import synapses
|
|
1443
|
+
for s_data in snapshot.synapses:
|
|
1444
|
+
synapse = Synapse(
|
|
1445
|
+
id=s_data["id"],
|
|
1446
|
+
source_id=s_data["source_id"],
|
|
1447
|
+
target_id=s_data["target_id"],
|
|
1448
|
+
type=SynapseType(s_data["type"]),
|
|
1449
|
+
weight=s_data["weight"],
|
|
1450
|
+
direction=Direction(s_data["direction"]),
|
|
1451
|
+
metadata=s_data.get("metadata", {}),
|
|
1452
|
+
reinforced_count=s_data.get("reinforced_count", 0),
|
|
1453
|
+
created_at=datetime.fromisoformat(s_data["created_at"]),
|
|
1454
|
+
)
|
|
1455
|
+
await self.add_synapse(synapse)
|
|
1456
|
+
|
|
1457
|
+
# Import fibers
|
|
1458
|
+
for f_data in snapshot.fibers:
|
|
1459
|
+
fiber = Fiber(
|
|
1460
|
+
id=f_data["id"],
|
|
1461
|
+
neuron_ids=set(f_data["neuron_ids"]),
|
|
1462
|
+
synapse_ids=set(f_data["synapse_ids"]),
|
|
1463
|
+
anchor_neuron_id=f_data["anchor_neuron_id"],
|
|
1464
|
+
time_start=(
|
|
1465
|
+
datetime.fromisoformat(f_data["time_start"])
|
|
1466
|
+
if f_data.get("time_start")
|
|
1467
|
+
else None
|
|
1468
|
+
),
|
|
1469
|
+
time_end=(
|
|
1470
|
+
datetime.fromisoformat(f_data["time_end"])
|
|
1471
|
+
if f_data.get("time_end")
|
|
1472
|
+
else None
|
|
1473
|
+
),
|
|
1474
|
+
coherence=f_data.get("coherence", 0.0),
|
|
1475
|
+
salience=f_data.get("salience", 0.0),
|
|
1476
|
+
frequency=f_data.get("frequency", 0),
|
|
1477
|
+
summary=f_data.get("summary"),
|
|
1478
|
+
tags=set(f_data.get("tags", [])),
|
|
1479
|
+
metadata=f_data.get("metadata", {}),
|
|
1480
|
+
created_at=datetime.fromisoformat(f_data["created_at"]),
|
|
1481
|
+
)
|
|
1482
|
+
await self.add_fiber(fiber)
|
|
1483
|
+
|
|
1484
|
+
# Import projects first (typed_memories may reference them)
|
|
1485
|
+
projects_data = snapshot.metadata.get("projects", [])
|
|
1486
|
+
for p_data in projects_data:
|
|
1487
|
+
project = Project(
|
|
1488
|
+
id=p_data["id"],
|
|
1489
|
+
name=p_data["name"],
|
|
1490
|
+
description=p_data.get("description", ""),
|
|
1491
|
+
start_date=datetime.fromisoformat(p_data["start_date"]),
|
|
1492
|
+
end_date=(
|
|
1493
|
+
datetime.fromisoformat(p_data["end_date"])
|
|
1494
|
+
if p_data.get("end_date")
|
|
1495
|
+
else None
|
|
1496
|
+
),
|
|
1497
|
+
tags=frozenset(p_data.get("tags", [])),
|
|
1498
|
+
priority=p_data.get("priority", 1.0),
|
|
1499
|
+
metadata=p_data.get("metadata", {}),
|
|
1500
|
+
created_at=datetime.fromisoformat(p_data["created_at"]),
|
|
1501
|
+
)
|
|
1502
|
+
await self.add_project(project)
|
|
1503
|
+
|
|
1504
|
+
# Import typed memories
|
|
1505
|
+
typed_memories_data = snapshot.metadata.get("typed_memories", [])
|
|
1506
|
+
for tm_data in typed_memories_data:
|
|
1507
|
+
prov_data = tm_data.get("provenance", {})
|
|
1508
|
+
provenance = Provenance(
|
|
1509
|
+
source=prov_data.get("source", "import"),
|
|
1510
|
+
confidence=Confidence(prov_data.get("confidence", "medium")),
|
|
1511
|
+
verified=prov_data.get("verified", False),
|
|
1512
|
+
verified_at=(
|
|
1513
|
+
datetime.fromisoformat(prov_data["verified_at"])
|
|
1514
|
+
if prov_data.get("verified_at")
|
|
1515
|
+
else None
|
|
1516
|
+
),
|
|
1517
|
+
created_by=prov_data.get("created_by", "import"),
|
|
1518
|
+
last_confirmed=(
|
|
1519
|
+
datetime.fromisoformat(prov_data["last_confirmed"])
|
|
1520
|
+
if prov_data.get("last_confirmed")
|
|
1521
|
+
else None
|
|
1522
|
+
),
|
|
1523
|
+
)
|
|
1524
|
+
|
|
1525
|
+
typed_memory = TypedMemory(
|
|
1526
|
+
fiber_id=tm_data["fiber_id"],
|
|
1527
|
+
memory_type=MemoryType(tm_data["memory_type"]),
|
|
1528
|
+
priority=Priority(tm_data["priority"]),
|
|
1529
|
+
provenance=provenance,
|
|
1530
|
+
expires_at=(
|
|
1531
|
+
datetime.fromisoformat(tm_data["expires_at"])
|
|
1532
|
+
if tm_data.get("expires_at")
|
|
1533
|
+
else None
|
|
1534
|
+
),
|
|
1535
|
+
project_id=tm_data.get("project_id"),
|
|
1536
|
+
tags=frozenset(tm_data.get("tags", [])),
|
|
1537
|
+
metadata=tm_data.get("metadata", {}),
|
|
1538
|
+
created_at=datetime.fromisoformat(tm_data["created_at"]),
|
|
1539
|
+
)
|
|
1540
|
+
await self.add_typed_memory(typed_memory)
|
|
1541
|
+
|
|
1542
|
+
finally:
|
|
1543
|
+
self._current_brain_id = old_brain_id
|
|
1544
|
+
|
|
1545
|
+
return brain_id
|
|
1546
|
+
|
|
1547
|
+
# ========== Statistics ==========
|
|
1548
|
+
|
|
1549
|
+
async def get_stats(self, brain_id: str) -> dict[str, int]:
|
|
1550
|
+
conn = self._ensure_conn()
|
|
1551
|
+
|
|
1552
|
+
stats = {}
|
|
1553
|
+
|
|
1554
|
+
async with conn.execute(
|
|
1555
|
+
"SELECT COUNT(*) as cnt FROM neurons WHERE brain_id = ?", (brain_id,)
|
|
1556
|
+
) as cursor:
|
|
1557
|
+
row = await cursor.fetchone()
|
|
1558
|
+
stats["neuron_count"] = row["cnt"] if row else 0
|
|
1559
|
+
|
|
1560
|
+
async with conn.execute(
|
|
1561
|
+
"SELECT COUNT(*) as cnt FROM synapses WHERE brain_id = ?", (brain_id,)
|
|
1562
|
+
) as cursor:
|
|
1563
|
+
row = await cursor.fetchone()
|
|
1564
|
+
stats["synapse_count"] = row["cnt"] if row else 0
|
|
1565
|
+
|
|
1566
|
+
async with conn.execute(
|
|
1567
|
+
"SELECT COUNT(*) as cnt FROM fibers WHERE brain_id = ?", (brain_id,)
|
|
1568
|
+
) as cursor:
|
|
1569
|
+
row = await cursor.fetchone()
|
|
1570
|
+
stats["fiber_count"] = row["cnt"] if row else 0
|
|
1571
|
+
|
|
1572
|
+
async with conn.execute(
|
|
1573
|
+
"SELECT COUNT(*) as cnt FROM projects WHERE brain_id = ?", (brain_id,)
|
|
1574
|
+
) as cursor:
|
|
1575
|
+
row = await cursor.fetchone()
|
|
1576
|
+
stats["project_count"] = row["cnt"] if row else 0
|
|
1577
|
+
|
|
1578
|
+
return stats
|
|
1579
|
+
|
|
1580
|
+
# ========== Cleanup ==========
|
|
1581
|
+
|
|
1582
|
+
async def clear(self, brain_id: str) -> None:
|
|
1583
|
+
conn = self._ensure_conn()
|
|
1584
|
+
|
|
1585
|
+
# Delete in order to respect foreign keys
|
|
1586
|
+
await conn.execute("DELETE FROM typed_memories WHERE brain_id = ?", (brain_id,))
|
|
1587
|
+
await conn.execute("DELETE FROM projects WHERE brain_id = ?", (brain_id,))
|
|
1588
|
+
await conn.execute("DELETE FROM fibers WHERE brain_id = ?", (brain_id,))
|
|
1589
|
+
await conn.execute("DELETE FROM synapses WHERE brain_id = ?", (brain_id,))
|
|
1590
|
+
await conn.execute("DELETE FROM neuron_states WHERE brain_id = ?", (brain_id,))
|
|
1591
|
+
await conn.execute("DELETE FROM neurons WHERE brain_id = ?", (brain_id,))
|
|
1592
|
+
await conn.execute("DELETE FROM brains WHERE id = ?", (brain_id,))
|
|
1593
|
+
|
|
1594
|
+
await conn.commit()
|
|
1595
|
+
|
|
1596
|
+
# ========== Compatibility with PersistentStorage ==========
|
|
1597
|
+
|
|
1598
|
+
def disable_auto_save(self) -> None:
|
|
1599
|
+
"""No-op for SQLite (transactions handle this)."""
|
|
1600
|
+
pass
|
|
1601
|
+
|
|
1602
|
+
def enable_auto_save(self) -> None:
|
|
1603
|
+
"""No-op for SQLite (transactions handle this)."""
|
|
1604
|
+
pass
|
|
1605
|
+
|
|
1606
|
+
async def batch_save(self) -> None:
|
|
1607
|
+
"""Commit any pending transactions (SQLite auto-commits, so this is mostly a no-op)."""
|
|
1608
|
+
conn = self._ensure_conn()
|
|
1609
|
+
await conn.commit()
|
|
1610
|
+
|
|
1611
|
+
async def _save_to_file(self) -> None:
|
|
1612
|
+
"""No-op for SQLite (auto-persisted)."""
|
|
1613
|
+
pass
|