neural-memory 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. neural_memory/__init__.py +38 -0
  2. neural_memory/cli/__init__.py +15 -0
  3. neural_memory/cli/__main__.py +6 -0
  4. neural_memory/cli/config.py +176 -0
  5. neural_memory/cli/main.py +2702 -0
  6. neural_memory/cli/storage.py +169 -0
  7. neural_memory/cli/tui.py +471 -0
  8. neural_memory/core/__init__.py +52 -0
  9. neural_memory/core/brain.py +301 -0
  10. neural_memory/core/brain_mode.py +273 -0
  11. neural_memory/core/fiber.py +236 -0
  12. neural_memory/core/memory_types.py +331 -0
  13. neural_memory/core/neuron.py +168 -0
  14. neural_memory/core/project.py +257 -0
  15. neural_memory/core/synapse.py +215 -0
  16. neural_memory/engine/__init__.py +15 -0
  17. neural_memory/engine/activation.py +335 -0
  18. neural_memory/engine/encoder.py +391 -0
  19. neural_memory/engine/retrieval.py +440 -0
  20. neural_memory/extraction/__init__.py +42 -0
  21. neural_memory/extraction/entities.py +547 -0
  22. neural_memory/extraction/parser.py +337 -0
  23. neural_memory/extraction/router.py +396 -0
  24. neural_memory/extraction/temporal.py +428 -0
  25. neural_memory/mcp/__init__.py +9 -0
  26. neural_memory/mcp/__main__.py +6 -0
  27. neural_memory/mcp/server.py +621 -0
  28. neural_memory/py.typed +0 -0
  29. neural_memory/safety/__init__.py +31 -0
  30. neural_memory/safety/freshness.py +238 -0
  31. neural_memory/safety/sensitive.py +304 -0
  32. neural_memory/server/__init__.py +5 -0
  33. neural_memory/server/app.py +99 -0
  34. neural_memory/server/dependencies.py +33 -0
  35. neural_memory/server/models.py +138 -0
  36. neural_memory/server/routes/__init__.py +7 -0
  37. neural_memory/server/routes/brain.py +221 -0
  38. neural_memory/server/routes/memory.py +169 -0
  39. neural_memory/server/routes/sync.py +387 -0
  40. neural_memory/storage/__init__.py +17 -0
  41. neural_memory/storage/base.py +441 -0
  42. neural_memory/storage/factory.py +329 -0
  43. neural_memory/storage/memory_store.py +896 -0
  44. neural_memory/storage/shared_store.py +650 -0
  45. neural_memory/storage/sqlite_store.py +1613 -0
  46. neural_memory/sync/__init__.py +5 -0
  47. neural_memory/sync/client.py +435 -0
  48. neural_memory/unified_config.py +315 -0
  49. neural_memory/utils/__init__.py +5 -0
  50. neural_memory/utils/config.py +98 -0
  51. neural_memory-0.1.0.dist-info/METADATA +314 -0
  52. neural_memory-0.1.0.dist-info/RECORD +55 -0
  53. neural_memory-0.1.0.dist-info/WHEEL +4 -0
  54. neural_memory-0.1.0.dist-info/entry_points.txt +4 -0
  55. neural_memory-0.1.0.dist-info/licenses/LICENSE +21 -0
@@ -0,0 +1,896 @@
1
+ """In-memory storage backend using NetworkX."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from collections import defaultdict
6
+ from datetime import datetime
7
+ from typing import Literal
8
+
9
+ import networkx as nx
10
+
11
+ from neural_memory.core.brain import Brain, BrainSnapshot
12
+ from neural_memory.core.fiber import Fiber
13
+ from neural_memory.core.memory_types import (
14
+ Confidence,
15
+ MemoryType,
16
+ Priority,
17
+ Provenance,
18
+ TypedMemory,
19
+ )
20
+ from neural_memory.core.neuron import Neuron, NeuronState, NeuronType
21
+ from neural_memory.core.project import Project
22
+ from neural_memory.core.synapse import Synapse, SynapseType
23
+ from neural_memory.storage.base import NeuralStorage
24
+
25
+
26
+ class InMemoryStorage(NeuralStorage):
27
+ """
28
+ NetworkX-based in-memory storage for development and testing.
29
+
30
+ This storage backend keeps everything in memory using a NetworkX
31
+ MultiDiGraph. Good for development, testing, and small deployments.
32
+
33
+ Data is lost when the process exits unless explicitly exported.
34
+ """
35
+
36
+ def __init__(self) -> None:
37
+ """Initialize empty storage."""
38
+ # Main graph structure
39
+ self._graph = nx.MultiDiGraph()
40
+
41
+ # Indexed data stores (by brain_id)
42
+ self._neurons: dict[str, dict[str, Neuron]] = defaultdict(dict)
43
+ self._synapses: dict[str, dict[str, Synapse]] = defaultdict(dict)
44
+ self._fibers: dict[str, dict[str, Fiber]] = defaultdict(dict)
45
+ self._states: dict[str, dict[str, NeuronState]] = defaultdict(dict)
46
+ self._typed_memories: dict[str, dict[str, TypedMemory]] = defaultdict(dict)
47
+ self._projects: dict[str, dict[str, Project]] = defaultdict(dict)
48
+ self._brains: dict[str, Brain] = {}
49
+
50
+ # Current brain context
51
+ self._current_brain_id: str | None = None
52
+
53
+ def set_brain(self, brain_id: str) -> None:
54
+ """Set the current brain context for operations."""
55
+ self._current_brain_id = brain_id
56
+
57
+ def _get_brain_id(self) -> str:
58
+ """Get current brain ID or raise error."""
59
+ if self._current_brain_id is None:
60
+ raise ValueError("No brain context set. Call set_brain() first.")
61
+ return self._current_brain_id
62
+
63
+ # ========== Neuron Operations ==========
64
+
65
+ async def add_neuron(self, neuron: Neuron) -> str:
66
+ brain_id = self._get_brain_id()
67
+
68
+ if neuron.id in self._neurons[brain_id]:
69
+ raise ValueError(f"Neuron {neuron.id} already exists")
70
+
71
+ self._neurons[brain_id][neuron.id] = neuron
72
+ self._graph.add_node(
73
+ neuron.id,
74
+ brain_id=brain_id,
75
+ type=neuron.type,
76
+ content=neuron.content,
77
+ )
78
+
79
+ # Initialize state
80
+ self._states[brain_id][neuron.id] = NeuronState(neuron_id=neuron.id)
81
+
82
+ return neuron.id
83
+
84
+ async def get_neuron(self, neuron_id: str) -> Neuron | None:
85
+ brain_id = self._get_brain_id()
86
+ return self._neurons[brain_id].get(neuron_id)
87
+
88
+ async def find_neurons(
89
+ self,
90
+ type: NeuronType | None = None,
91
+ content_contains: str | None = None,
92
+ content_exact: str | None = None,
93
+ time_range: tuple[datetime, datetime] | None = None,
94
+ limit: int = 100,
95
+ ) -> list[Neuron]:
96
+ brain_id = self._get_brain_id()
97
+ results: list[Neuron] = []
98
+
99
+ for neuron in self._neurons[brain_id].values():
100
+ # Type filter
101
+ if type is not None and neuron.type != type:
102
+ continue
103
+
104
+ # Content contains filter (case-insensitive)
105
+ if content_contains is not None:
106
+ if content_contains.lower() not in neuron.content.lower():
107
+ continue
108
+
109
+ # Content exact filter
110
+ if content_exact is not None and neuron.content != content_exact:
111
+ continue
112
+
113
+ # Time range filter
114
+ if time_range is not None:
115
+ start, end = time_range
116
+ if not (start <= neuron.created_at <= end):
117
+ continue
118
+
119
+ results.append(neuron)
120
+
121
+ if len(results) >= limit:
122
+ break
123
+
124
+ return results
125
+
126
+ async def update_neuron(self, neuron: Neuron) -> None:
127
+ brain_id = self._get_brain_id()
128
+
129
+ if neuron.id not in self._neurons[brain_id]:
130
+ raise ValueError(f"Neuron {neuron.id} does not exist")
131
+
132
+ self._neurons[brain_id][neuron.id] = neuron
133
+ self._graph.nodes[neuron.id].update(
134
+ type=neuron.type,
135
+ content=neuron.content,
136
+ )
137
+
138
+ async def delete_neuron(self, neuron_id: str) -> bool:
139
+ brain_id = self._get_brain_id()
140
+
141
+ if neuron_id not in self._neurons[brain_id]:
142
+ return False
143
+
144
+ # Delete connected synapses
145
+ synapses_to_delete = [
146
+ s.id
147
+ for s in self._synapses[brain_id].values()
148
+ if s.source_id == neuron_id or s.target_id == neuron_id
149
+ ]
150
+ for synapse_id in synapses_to_delete:
151
+ await self.delete_synapse(synapse_id)
152
+
153
+ # Delete from graph
154
+ if self._graph.has_node(neuron_id):
155
+ self._graph.remove_node(neuron_id)
156
+
157
+ # Delete neuron and state
158
+ del self._neurons[brain_id][neuron_id]
159
+ self._states[brain_id].pop(neuron_id, None)
160
+
161
+ return True
162
+
163
+ # ========== Neuron State Operations ==========
164
+
165
+ async def get_neuron_state(self, neuron_id: str) -> NeuronState | None:
166
+ brain_id = self._get_brain_id()
167
+ return self._states[brain_id].get(neuron_id)
168
+
169
+ async def update_neuron_state(self, state: NeuronState) -> None:
170
+ brain_id = self._get_brain_id()
171
+ self._states[brain_id][state.neuron_id] = state
172
+
173
+ # ========== Synapse Operations ==========
174
+
175
+ async def add_synapse(self, synapse: Synapse) -> str:
176
+ brain_id = self._get_brain_id()
177
+
178
+ if synapse.id in self._synapses[brain_id]:
179
+ raise ValueError(f"Synapse {synapse.id} already exists")
180
+
181
+ # Verify neurons exist
182
+ if synapse.source_id not in self._neurons[brain_id]:
183
+ raise ValueError(f"Source neuron {synapse.source_id} does not exist")
184
+ if synapse.target_id not in self._neurons[brain_id]:
185
+ raise ValueError(f"Target neuron {synapse.target_id} does not exist")
186
+
187
+ self._synapses[brain_id][synapse.id] = synapse
188
+ self._graph.add_edge(
189
+ synapse.source_id,
190
+ synapse.target_id,
191
+ key=synapse.id,
192
+ type=synapse.type,
193
+ weight=synapse.weight,
194
+ )
195
+
196
+ return synapse.id
197
+
198
+ async def get_synapse(self, synapse_id: str) -> Synapse | None:
199
+ brain_id = self._get_brain_id()
200
+ return self._synapses[brain_id].get(synapse_id)
201
+
202
+ async def get_synapses(
203
+ self,
204
+ source_id: str | None = None,
205
+ target_id: str | None = None,
206
+ type: SynapseType | None = None,
207
+ min_weight: float | None = None,
208
+ ) -> list[Synapse]:
209
+ brain_id = self._get_brain_id()
210
+ results: list[Synapse] = []
211
+
212
+ for synapse in self._synapses[brain_id].values():
213
+ if source_id is not None and synapse.source_id != source_id:
214
+ continue
215
+ if target_id is not None and synapse.target_id != target_id:
216
+ continue
217
+ if type is not None and synapse.type != type:
218
+ continue
219
+ if min_weight is not None and synapse.weight < min_weight:
220
+ continue
221
+
222
+ results.append(synapse)
223
+
224
+ return results
225
+
226
+ async def update_synapse(self, synapse: Synapse) -> None:
227
+ brain_id = self._get_brain_id()
228
+
229
+ if synapse.id not in self._synapses[brain_id]:
230
+ raise ValueError(f"Synapse {synapse.id} does not exist")
231
+
232
+ old_synapse = self._synapses[brain_id][synapse.id]
233
+ self._synapses[brain_id][synapse.id] = synapse
234
+
235
+ # Update graph edge
236
+ if self._graph.has_edge(old_synapse.source_id, old_synapse.target_id, key=synapse.id):
237
+ self._graph[old_synapse.source_id][old_synapse.target_id][synapse.id].update(
238
+ type=synapse.type,
239
+ weight=synapse.weight,
240
+ )
241
+
242
+ async def delete_synapse(self, synapse_id: str) -> bool:
243
+ brain_id = self._get_brain_id()
244
+
245
+ if synapse_id not in self._synapses[brain_id]:
246
+ return False
247
+
248
+ synapse = self._synapses[brain_id][synapse_id]
249
+
250
+ # Remove from graph
251
+ if self._graph.has_edge(synapse.source_id, synapse.target_id, key=synapse_id):
252
+ self._graph.remove_edge(synapse.source_id, synapse.target_id, key=synapse_id)
253
+
254
+ del self._synapses[brain_id][synapse_id]
255
+ return True
256
+
257
+ # ========== Graph Traversal ==========
258
+
259
+ async def get_neighbors(
260
+ self,
261
+ neuron_id: str,
262
+ direction: Literal["out", "in", "both"] = "both",
263
+ synapse_types: list[SynapseType] | None = None,
264
+ min_weight: float | None = None,
265
+ ) -> list[tuple[Neuron, Synapse]]:
266
+ brain_id = self._get_brain_id()
267
+ results: list[tuple[Neuron, Synapse]] = []
268
+
269
+ if neuron_id not in self._neurons[brain_id]:
270
+ return results
271
+
272
+ # Get outgoing connections
273
+ if direction in ("out", "both") and self._graph.has_node(neuron_id):
274
+ for _, target_id, edge_key in self._graph.out_edges(neuron_id, keys=True):
275
+ synapse = self._synapses[brain_id].get(edge_key)
276
+ if synapse is None:
277
+ continue
278
+ if synapse_types and synapse.type not in synapse_types:
279
+ continue
280
+ if min_weight is not None and synapse.weight < min_weight:
281
+ continue
282
+
283
+ neighbor = self._neurons[brain_id].get(target_id)
284
+ if neighbor:
285
+ results.append((neighbor, synapse))
286
+
287
+ # Get incoming connections
288
+ if direction in ("in", "both") and self._graph.has_node(neuron_id):
289
+ for source_id, _, edge_key in self._graph.in_edges(neuron_id, keys=True):
290
+ synapse = self._synapses[brain_id].get(edge_key)
291
+ if synapse is None:
292
+ continue
293
+ if synapse_types and synapse.type not in synapse_types:
294
+ continue
295
+ if min_weight is not None and synapse.weight < min_weight:
296
+ continue
297
+
298
+ # For incoming, also check if bidirectional
299
+ if direction == "in" and not synapse.is_bidirectional:
300
+ continue
301
+
302
+ neighbor = self._neurons[brain_id].get(source_id)
303
+ if neighbor and (neighbor, synapse) not in results:
304
+ results.append((neighbor, synapse))
305
+
306
+ return results
307
+
308
+ async def get_path(
309
+ self,
310
+ source_id: str,
311
+ target_id: str,
312
+ max_hops: int = 4,
313
+ ) -> list[tuple[Neuron, Synapse]] | None:
314
+ brain_id = self._get_brain_id()
315
+
316
+ if source_id not in self._neurons[brain_id]:
317
+ return None
318
+ if target_id not in self._neurons[brain_id]:
319
+ return None
320
+
321
+ try:
322
+ # Use NetworkX shortest path
323
+ path_nodes = nx.shortest_path(
324
+ self._graph,
325
+ source_id,
326
+ target_id,
327
+ weight=None,
328
+ )
329
+ except nx.NetworkXNoPath:
330
+ return None
331
+ except nx.NodeNotFound:
332
+ return None
333
+
334
+ if len(path_nodes) - 1 > max_hops:
335
+ return None
336
+
337
+ # Build result with neurons and synapses
338
+ result: list[tuple[Neuron, Synapse]] = []
339
+
340
+ for i in range(len(path_nodes) - 1):
341
+ from_id = path_nodes[i]
342
+ to_id = path_nodes[i + 1]
343
+
344
+ neuron = self._neurons[brain_id].get(to_id)
345
+ if not neuron:
346
+ return None
347
+
348
+ # Find the connecting synapse
349
+ edge_data = self._graph.get_edge_data(from_id, to_id)
350
+ if not edge_data:
351
+ return None
352
+
353
+ # Get first synapse (highest weight if multiple)
354
+ synapse_id = max(edge_data.keys(), key=lambda k: edge_data[k].get("weight", 0))
355
+ synapse = self._synapses[brain_id].get(synapse_id)
356
+ if not synapse:
357
+ return None
358
+
359
+ result.append((neuron, synapse))
360
+
361
+ return result
362
+
363
+ # ========== Fiber Operations ==========
364
+
365
+ async def add_fiber(self, fiber: Fiber) -> str:
366
+ brain_id = self._get_brain_id()
367
+
368
+ if fiber.id in self._fibers[brain_id]:
369
+ raise ValueError(f"Fiber {fiber.id} already exists")
370
+
371
+ self._fibers[brain_id][fiber.id] = fiber
372
+ return fiber.id
373
+
374
+ async def get_fiber(self, fiber_id: str) -> Fiber | None:
375
+ brain_id = self._get_brain_id()
376
+ return self._fibers[brain_id].get(fiber_id)
377
+
378
+ async def find_fibers(
379
+ self,
380
+ contains_neuron: str | None = None,
381
+ time_overlaps: tuple[datetime, datetime] | None = None,
382
+ tags: set[str] | None = None,
383
+ min_salience: float | None = None,
384
+ limit: int = 100,
385
+ ) -> list[Fiber]:
386
+ brain_id = self._get_brain_id()
387
+ results: list[Fiber] = []
388
+
389
+ for fiber in self._fibers[brain_id].values():
390
+ # Contains neuron filter
391
+ if contains_neuron is not None:
392
+ if contains_neuron not in fiber.neuron_ids:
393
+ continue
394
+
395
+ # Time overlap filter
396
+ if time_overlaps is not None:
397
+ start, end = time_overlaps
398
+ if not fiber.overlaps_time(start, end):
399
+ continue
400
+
401
+ # Tags filter (must have ALL specified tags)
402
+ if tags is not None:
403
+ if not tags.issubset(fiber.tags):
404
+ continue
405
+
406
+ # Salience filter
407
+ if min_salience is not None:
408
+ if fiber.salience < min_salience:
409
+ continue
410
+
411
+ results.append(fiber)
412
+
413
+ if len(results) >= limit:
414
+ break
415
+
416
+ # Sort by salience descending
417
+ results.sort(key=lambda f: f.salience, reverse=True)
418
+ return results
419
+
420
+ async def update_fiber(self, fiber: Fiber) -> None:
421
+ brain_id = self._get_brain_id()
422
+
423
+ if fiber.id not in self._fibers[brain_id]:
424
+ raise ValueError(f"Fiber {fiber.id} does not exist")
425
+
426
+ self._fibers[brain_id][fiber.id] = fiber
427
+
428
+ async def delete_fiber(self, fiber_id: str) -> bool:
429
+ brain_id = self._get_brain_id()
430
+
431
+ if fiber_id not in self._fibers[brain_id]:
432
+ return False
433
+
434
+ del self._fibers[brain_id][fiber_id]
435
+ return True
436
+
437
+ async def get_fibers(
438
+ self,
439
+ limit: int = 10,
440
+ order_by: Literal["created_at", "salience", "frequency"] = "created_at",
441
+ descending: bool = True,
442
+ ) -> list[Fiber]:
443
+ brain_id = self._get_brain_id()
444
+ fibers = list(self._fibers[brain_id].values())
445
+
446
+ # Sort by the specified field
447
+ if order_by == "created_at":
448
+ fibers.sort(key=lambda f: f.created_at, reverse=descending)
449
+ elif order_by == "salience":
450
+ fibers.sort(key=lambda f: f.salience, reverse=descending)
451
+ elif order_by == "frequency":
452
+ fibers.sort(key=lambda f: f.frequency, reverse=descending)
453
+
454
+ return fibers[:limit]
455
+
456
+ # ========== TypedMemory Operations ==========
457
+
458
+ async def add_typed_memory(self, typed_memory: TypedMemory) -> str:
459
+ """Add a typed memory for a fiber."""
460
+ brain_id = self._get_brain_id()
461
+
462
+ # Verify fiber exists
463
+ if typed_memory.fiber_id not in self._fibers[brain_id]:
464
+ raise ValueError(f"Fiber {typed_memory.fiber_id} does not exist")
465
+
466
+ self._typed_memories[brain_id][typed_memory.fiber_id] = typed_memory
467
+ return typed_memory.fiber_id
468
+
469
+ async def get_typed_memory(self, fiber_id: str) -> TypedMemory | None:
470
+ """Get typed memory for a fiber."""
471
+ brain_id = self._get_brain_id()
472
+ return self._typed_memories[brain_id].get(fiber_id)
473
+
474
+ async def find_typed_memories(
475
+ self,
476
+ memory_type: MemoryType | None = None,
477
+ min_priority: Priority | None = None,
478
+ include_expired: bool = False,
479
+ project_id: str | None = None,
480
+ tags: set[str] | None = None,
481
+ limit: int = 100,
482
+ ) -> list[TypedMemory]:
483
+ """Find typed memories matching criteria."""
484
+ brain_id = self._get_brain_id()
485
+ results: list[TypedMemory] = []
486
+
487
+ for tm in self._typed_memories[brain_id].values():
488
+ # Type filter
489
+ if memory_type is not None and tm.memory_type != memory_type:
490
+ continue
491
+
492
+ # Priority filter
493
+ if min_priority is not None and tm.priority < min_priority:
494
+ continue
495
+
496
+ # Expired filter
497
+ if not include_expired and tm.is_expired:
498
+ continue
499
+
500
+ # Project filter
501
+ if project_id is not None and tm.project_id != project_id:
502
+ continue
503
+
504
+ # Tags filter (must have ALL specified tags)
505
+ if tags is not None:
506
+ if not tags.issubset(tm.tags):
507
+ continue
508
+
509
+ results.append(tm)
510
+
511
+ if len(results) >= limit:
512
+ break
513
+
514
+ # Sort by priority descending, then created_at descending
515
+ results.sort(key=lambda t: (t.priority, t.created_at), reverse=True)
516
+ return results
517
+
518
+ async def update_typed_memory(self, typed_memory: TypedMemory) -> None:
519
+ """Update a typed memory."""
520
+ brain_id = self._get_brain_id()
521
+
522
+ if typed_memory.fiber_id not in self._typed_memories[brain_id]:
523
+ raise ValueError(f"TypedMemory for fiber {typed_memory.fiber_id} does not exist")
524
+
525
+ self._typed_memories[brain_id][typed_memory.fiber_id] = typed_memory
526
+
527
+ async def delete_typed_memory(self, fiber_id: str) -> bool:
528
+ """Delete typed memory for a fiber."""
529
+ brain_id = self._get_brain_id()
530
+
531
+ if fiber_id not in self._typed_memories[brain_id]:
532
+ return False
533
+
534
+ del self._typed_memories[brain_id][fiber_id]
535
+ return True
536
+
537
+ async def get_expired_memories(self) -> list[TypedMemory]:
538
+ """Get all expired typed memories."""
539
+ brain_id = self._get_brain_id()
540
+ return [tm for tm in self._typed_memories[brain_id].values() if tm.is_expired]
541
+
542
+ # ========== Project Operations ==========
543
+
544
+ async def add_project(self, project: Project) -> str:
545
+ """Add a project."""
546
+ brain_id = self._get_brain_id()
547
+
548
+ if project.id in self._projects[brain_id]:
549
+ raise ValueError(f"Project {project.id} already exists")
550
+
551
+ self._projects[brain_id][project.id] = project
552
+ return project.id
553
+
554
+ async def get_project(self, project_id: str) -> Project | None:
555
+ """Get a project by ID."""
556
+ brain_id = self._get_brain_id()
557
+ return self._projects[brain_id].get(project_id)
558
+
559
+ async def get_project_by_name(self, name: str) -> Project | None:
560
+ """Get a project by name (case-insensitive)."""
561
+ brain_id = self._get_brain_id()
562
+ name_lower = name.lower()
563
+ for project in self._projects[brain_id].values():
564
+ if project.name.lower() == name_lower:
565
+ return project
566
+ return None
567
+
568
+ async def list_projects(
569
+ self,
570
+ active_only: bool = False,
571
+ tags: set[str] | None = None,
572
+ limit: int = 100,
573
+ ) -> list[Project]:
574
+ """List projects with optional filters."""
575
+ brain_id = self._get_brain_id()
576
+ results: list[Project] = []
577
+
578
+ for project in self._projects[brain_id].values():
579
+ # Active filter
580
+ if active_only and not project.is_active:
581
+ continue
582
+
583
+ # Tags filter (must have at least one matching tag)
584
+ if tags is not None and not tags.intersection(project.tags):
585
+ continue
586
+
587
+ results.append(project)
588
+
589
+ if len(results) >= limit:
590
+ break
591
+
592
+ # Sort by priority descending, then start_date descending
593
+ results.sort(key=lambda p: (p.priority, p.start_date), reverse=True)
594
+ return results
595
+
596
+ async def update_project(self, project: Project) -> None:
597
+ """Update a project."""
598
+ brain_id = self._get_brain_id()
599
+
600
+ if project.id not in self._projects[brain_id]:
601
+ raise ValueError(f"Project {project.id} does not exist")
602
+
603
+ self._projects[brain_id][project.id] = project
604
+
605
+ async def delete_project(self, project_id: str) -> bool:
606
+ """Delete a project."""
607
+ brain_id = self._get_brain_id()
608
+
609
+ if project_id not in self._projects[brain_id]:
610
+ return False
611
+
612
+ del self._projects[brain_id][project_id]
613
+ return True
614
+
615
+ async def get_project_memories(
616
+ self,
617
+ project_id: str,
618
+ include_expired: bool = False,
619
+ ) -> list[TypedMemory]:
620
+ """Get all typed memories associated with a project."""
621
+ brain_id = self._get_brain_id()
622
+ results: list[TypedMemory] = []
623
+
624
+ for tm in self._typed_memories[brain_id].values():
625
+ if tm.project_id != project_id:
626
+ continue
627
+ if not include_expired and tm.is_expired:
628
+ continue
629
+ results.append(tm)
630
+
631
+ # Sort by priority descending, then created_at descending
632
+ results.sort(key=lambda t: (t.priority, t.created_at), reverse=True)
633
+ return results
634
+
635
+ # ========== Brain Operations ==========
636
+
637
+ async def save_brain(self, brain: Brain) -> None:
638
+ self._brains[brain.id] = brain
639
+
640
+ async def get_brain(self, brain_id: str) -> Brain | None:
641
+ return self._brains.get(brain_id)
642
+
643
+ async def export_brain(self, brain_id: str) -> BrainSnapshot:
644
+ brain = self._brains.get(brain_id)
645
+ if brain is None:
646
+ raise ValueError(f"Brain {brain_id} does not exist")
647
+
648
+ from dataclasses import asdict
649
+
650
+ # Serialize neurons
651
+ neurons = [
652
+ {
653
+ "id": n.id,
654
+ "type": n.type.value,
655
+ "content": n.content,
656
+ "metadata": n.metadata,
657
+ "created_at": n.created_at.isoformat(),
658
+ }
659
+ for n in self._neurons[brain_id].values()
660
+ ]
661
+
662
+ # Serialize synapses
663
+ synapses = [
664
+ {
665
+ "id": s.id,
666
+ "source_id": s.source_id,
667
+ "target_id": s.target_id,
668
+ "type": s.type.value,
669
+ "weight": s.weight,
670
+ "direction": s.direction.value,
671
+ "metadata": s.metadata,
672
+ "reinforced_count": s.reinforced_count,
673
+ "created_at": s.created_at.isoformat(),
674
+ }
675
+ for s in self._synapses[brain_id].values()
676
+ ]
677
+
678
+ # Serialize fibers
679
+ fibers = [
680
+ {
681
+ "id": f.id,
682
+ "neuron_ids": list(f.neuron_ids),
683
+ "synapse_ids": list(f.synapse_ids),
684
+ "anchor_neuron_id": f.anchor_neuron_id,
685
+ "time_start": f.time_start.isoformat() if f.time_start else None,
686
+ "time_end": f.time_end.isoformat() if f.time_end else None,
687
+ "coherence": f.coherence,
688
+ "salience": f.salience,
689
+ "frequency": f.frequency,
690
+ "summary": f.summary,
691
+ "tags": list(f.tags),
692
+ "metadata": f.metadata,
693
+ "created_at": f.created_at.isoformat(),
694
+ }
695
+ for f in self._fibers[brain_id].values()
696
+ ]
697
+
698
+ # Serialize typed memories
699
+ typed_memories = [
700
+ {
701
+ "fiber_id": tm.fiber_id,
702
+ "memory_type": tm.memory_type.value,
703
+ "priority": tm.priority.value,
704
+ "provenance": {
705
+ "source": tm.provenance.source,
706
+ "confidence": tm.provenance.confidence.value,
707
+ "verified": tm.provenance.verified,
708
+ "verified_at": tm.provenance.verified_at.isoformat()
709
+ if tm.provenance.verified_at
710
+ else None,
711
+ "created_by": tm.provenance.created_by,
712
+ "last_confirmed": tm.provenance.last_confirmed.isoformat()
713
+ if tm.provenance.last_confirmed
714
+ else None,
715
+ },
716
+ "expires_at": tm.expires_at.isoformat() if tm.expires_at else None,
717
+ "project_id": tm.project_id,
718
+ "tags": list(tm.tags),
719
+ "metadata": tm.metadata,
720
+ "created_at": tm.created_at.isoformat(),
721
+ }
722
+ for tm in self._typed_memories[brain_id].values()
723
+ ]
724
+
725
+ # Serialize projects
726
+ projects = [p.to_dict() for p in self._projects[brain_id].values()]
727
+
728
+ return BrainSnapshot(
729
+ brain_id=brain_id,
730
+ brain_name=brain.name,
731
+ exported_at=datetime.utcnow(),
732
+ version="0.1.0",
733
+ neurons=neurons,
734
+ synapses=synapses,
735
+ fibers=fibers,
736
+ config=asdict(brain.config),
737
+ metadata={"typed_memories": typed_memories, "projects": projects},
738
+ )
739
+
740
+ async def import_brain(
741
+ self,
742
+ snapshot: BrainSnapshot,
743
+ target_brain_id: str | None = None,
744
+ ) -> str:
745
+ from neural_memory.core.brain import BrainConfig
746
+
747
+ brain_id = target_brain_id or snapshot.brain_id
748
+
749
+ # Create brain
750
+ config = BrainConfig(**snapshot.config)
751
+ brain = Brain.create(
752
+ name=snapshot.brain_name,
753
+ config=config,
754
+ brain_id=brain_id,
755
+ )
756
+ await self.save_brain(brain)
757
+
758
+ # Set context
759
+ old_brain_id = self._current_brain_id
760
+ self.set_brain(brain_id)
761
+
762
+ try:
763
+ # Import neurons
764
+ for n_data in snapshot.neurons:
765
+ neuron = Neuron(
766
+ id=n_data["id"],
767
+ type=NeuronType(n_data["type"]),
768
+ content=n_data["content"],
769
+ metadata=n_data.get("metadata", {}),
770
+ created_at=datetime.fromisoformat(n_data["created_at"]),
771
+ )
772
+ await self.add_neuron(neuron)
773
+
774
+ # Import synapses
775
+ from neural_memory.core.synapse import Direction
776
+
777
+ for s_data in snapshot.synapses:
778
+ synapse = Synapse(
779
+ id=s_data["id"],
780
+ source_id=s_data["source_id"],
781
+ target_id=s_data["target_id"],
782
+ type=SynapseType(s_data["type"]),
783
+ weight=s_data["weight"],
784
+ direction=Direction(s_data["direction"]),
785
+ metadata=s_data.get("metadata", {}),
786
+ reinforced_count=s_data.get("reinforced_count", 0),
787
+ created_at=datetime.fromisoformat(s_data["created_at"]),
788
+ )
789
+ await self.add_synapse(synapse)
790
+
791
+ # Import fibers
792
+ for f_data in snapshot.fibers:
793
+ fiber = Fiber(
794
+ id=f_data["id"],
795
+ neuron_ids=set(f_data["neuron_ids"]),
796
+ synapse_ids=set(f_data["synapse_ids"]),
797
+ anchor_neuron_id=f_data["anchor_neuron_id"],
798
+ time_start=(
799
+ datetime.fromisoformat(f_data["time_start"])
800
+ if f_data.get("time_start")
801
+ else None
802
+ ),
803
+ time_end=(
804
+ datetime.fromisoformat(f_data["time_end"])
805
+ if f_data.get("time_end")
806
+ else None
807
+ ),
808
+ coherence=f_data.get("coherence", 0.0),
809
+ salience=f_data.get("salience", 0.0),
810
+ frequency=f_data.get("frequency", 0),
811
+ summary=f_data.get("summary"),
812
+ tags=set(f_data.get("tags", [])),
813
+ metadata=f_data.get("metadata", {}),
814
+ created_at=datetime.fromisoformat(f_data["created_at"]),
815
+ )
816
+ await self.add_fiber(fiber)
817
+
818
+ # Import typed memories
819
+ typed_memories_data = snapshot.metadata.get("typed_memories", [])
820
+ for tm_data in typed_memories_data:
821
+ prov_data = tm_data.get("provenance", {})
822
+ provenance = Provenance(
823
+ source=prov_data.get("source", "import"),
824
+ confidence=Confidence(prov_data.get("confidence", "medium")),
825
+ verified=prov_data.get("verified", False),
826
+ verified_at=(
827
+ datetime.fromisoformat(prov_data["verified_at"])
828
+ if prov_data.get("verified_at")
829
+ else None
830
+ ),
831
+ created_by=prov_data.get("created_by", "import"),
832
+ last_confirmed=(
833
+ datetime.fromisoformat(prov_data["last_confirmed"])
834
+ if prov_data.get("last_confirmed")
835
+ else None
836
+ ),
837
+ )
838
+
839
+ typed_memory = TypedMemory(
840
+ fiber_id=tm_data["fiber_id"],
841
+ memory_type=MemoryType(tm_data["memory_type"]),
842
+ priority=Priority(tm_data["priority"]),
843
+ provenance=provenance,
844
+ expires_at=(
845
+ datetime.fromisoformat(tm_data["expires_at"])
846
+ if tm_data.get("expires_at")
847
+ else None
848
+ ),
849
+ project_id=tm_data.get("project_id"),
850
+ tags=frozenset(tm_data.get("tags", [])),
851
+ metadata=tm_data.get("metadata", {}),
852
+ created_at=datetime.fromisoformat(tm_data["created_at"]),
853
+ )
854
+ # Only add if fiber exists
855
+ if typed_memory.fiber_id in self._fibers[brain_id]:
856
+ self._typed_memories[brain_id][typed_memory.fiber_id] = typed_memory
857
+
858
+ # Import projects
859
+ projects_data = snapshot.metadata.get("projects", [])
860
+ for p_data in projects_data:
861
+ project = Project.from_dict(p_data)
862
+ self._projects[brain_id][project.id] = project
863
+
864
+ finally:
865
+ # Restore context
866
+ self._current_brain_id = old_brain_id
867
+
868
+ return brain_id
869
+
870
+ # ========== Statistics ==========
871
+
872
+ async def get_stats(self, brain_id: str) -> dict[str, int]:
873
+ return {
874
+ "neuron_count": len(self._neurons[brain_id]),
875
+ "synapse_count": len(self._synapses[brain_id]),
876
+ "fiber_count": len(self._fibers[brain_id]),
877
+ "project_count": len(self._projects[brain_id]),
878
+ }
879
+
880
+ # ========== Cleanup ==========
881
+
882
+ async def clear(self, brain_id: str) -> None:
883
+ # Remove nodes from graph
884
+ nodes_to_remove = [
885
+ n for n in self._graph.nodes() if self._graph.nodes[n].get("brain_id") == brain_id
886
+ ]
887
+ self._graph.remove_nodes_from(nodes_to_remove)
888
+
889
+ # Clear data stores
890
+ self._neurons[brain_id].clear()
891
+ self._synapses[brain_id].clear()
892
+ self._fibers[brain_id].clear()
893
+ self._states[brain_id].clear()
894
+ self._typed_memories[brain_id].clear()
895
+ self._projects[brain_id].clear()
896
+ self._brains.pop(brain_id, None)