neural-memory 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- neural_memory/__init__.py +38 -0
- neural_memory/cli/__init__.py +15 -0
- neural_memory/cli/__main__.py +6 -0
- neural_memory/cli/config.py +176 -0
- neural_memory/cli/main.py +2702 -0
- neural_memory/cli/storage.py +169 -0
- neural_memory/cli/tui.py +471 -0
- neural_memory/core/__init__.py +52 -0
- neural_memory/core/brain.py +301 -0
- neural_memory/core/brain_mode.py +273 -0
- neural_memory/core/fiber.py +236 -0
- neural_memory/core/memory_types.py +331 -0
- neural_memory/core/neuron.py +168 -0
- neural_memory/core/project.py +257 -0
- neural_memory/core/synapse.py +215 -0
- neural_memory/engine/__init__.py +15 -0
- neural_memory/engine/activation.py +335 -0
- neural_memory/engine/encoder.py +391 -0
- neural_memory/engine/retrieval.py +440 -0
- neural_memory/extraction/__init__.py +42 -0
- neural_memory/extraction/entities.py +547 -0
- neural_memory/extraction/parser.py +337 -0
- neural_memory/extraction/router.py +396 -0
- neural_memory/extraction/temporal.py +428 -0
- neural_memory/mcp/__init__.py +9 -0
- neural_memory/mcp/__main__.py +6 -0
- neural_memory/mcp/server.py +621 -0
- neural_memory/py.typed +0 -0
- neural_memory/safety/__init__.py +31 -0
- neural_memory/safety/freshness.py +238 -0
- neural_memory/safety/sensitive.py +304 -0
- neural_memory/server/__init__.py +5 -0
- neural_memory/server/app.py +99 -0
- neural_memory/server/dependencies.py +33 -0
- neural_memory/server/models.py +138 -0
- neural_memory/server/routes/__init__.py +7 -0
- neural_memory/server/routes/brain.py +221 -0
- neural_memory/server/routes/memory.py +169 -0
- neural_memory/server/routes/sync.py +387 -0
- neural_memory/storage/__init__.py +17 -0
- neural_memory/storage/base.py +441 -0
- neural_memory/storage/factory.py +329 -0
- neural_memory/storage/memory_store.py +896 -0
- neural_memory/storage/shared_store.py +650 -0
- neural_memory/storage/sqlite_store.py +1613 -0
- neural_memory/sync/__init__.py +5 -0
- neural_memory/sync/client.py +435 -0
- neural_memory/unified_config.py +315 -0
- neural_memory/utils/__init__.py +5 -0
- neural_memory/utils/config.py +98 -0
- neural_memory-0.1.0.dist-info/METADATA +314 -0
- neural_memory-0.1.0.dist-info/RECORD +55 -0
- neural_memory-0.1.0.dist-info/WHEEL +4 -0
- neural_memory-0.1.0.dist-info/entry_points.txt +4 -0
- neural_memory-0.1.0.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,138 @@
|
|
|
1
|
+
"""Pydantic models for API request/response."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from datetime import datetime
|
|
6
|
+
from typing import Any
|
|
7
|
+
|
|
8
|
+
from pydantic import BaseModel, Field
|
|
9
|
+
|
|
10
|
+
# ============ Request Models ============
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class EncodeRequest(BaseModel):
|
|
14
|
+
"""Request to encode a new memory."""
|
|
15
|
+
|
|
16
|
+
content: str = Field(..., description="The content to encode as a memory")
|
|
17
|
+
timestamp: datetime | None = Field(None, description="When this memory occurred (default: now)")
|
|
18
|
+
metadata: dict[str, Any] | None = Field(None, description="Additional metadata to attach")
|
|
19
|
+
tags: list[str] | None = Field(None, description="Tags for categorization")
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class QueryRequest(BaseModel):
|
|
23
|
+
"""Request to query memories."""
|
|
24
|
+
|
|
25
|
+
query: str = Field(..., description="The query text")
|
|
26
|
+
depth: int | None = Field(
|
|
27
|
+
None,
|
|
28
|
+
ge=0,
|
|
29
|
+
le=3,
|
|
30
|
+
description="Retrieval depth (0=instant, 1=context, 2=habit, 3=deep). Auto-detects if not specified.",
|
|
31
|
+
)
|
|
32
|
+
max_tokens: int = Field(
|
|
33
|
+
500,
|
|
34
|
+
ge=50,
|
|
35
|
+
le=5000,
|
|
36
|
+
description="Maximum tokens in returned context",
|
|
37
|
+
)
|
|
38
|
+
include_subgraph: bool = Field(False, description="Whether to include subgraph details")
|
|
39
|
+
reference_time: datetime | None = Field(
|
|
40
|
+
None, description="Reference time for temporal parsing (default: now)"
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class CreateBrainRequest(BaseModel):
|
|
45
|
+
"""Request to create a new brain."""
|
|
46
|
+
|
|
47
|
+
name: str = Field(..., min_length=1, max_length=100, description="Brain name")
|
|
48
|
+
owner_id: str | None = Field(None, description="Owner identifier")
|
|
49
|
+
is_public: bool = Field(False, description="Whether publicly accessible")
|
|
50
|
+
config: BrainConfigModel | None = Field(None, description="Custom configuration")
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class BrainConfigModel(BaseModel):
|
|
54
|
+
"""Brain configuration model."""
|
|
55
|
+
|
|
56
|
+
decay_rate: float = Field(0.1, ge=0, le=1)
|
|
57
|
+
reinforcement_delta: float = Field(0.05, ge=0, le=0.5)
|
|
58
|
+
activation_threshold: float = Field(0.2, ge=0, le=1)
|
|
59
|
+
max_spread_hops: int = Field(4, ge=1, le=10)
|
|
60
|
+
max_context_tokens: int = Field(1500, ge=100, le=10000)
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
# ============ Response Models ============
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
class EncodeResponse(BaseModel):
|
|
67
|
+
"""Response from encoding a memory."""
|
|
68
|
+
|
|
69
|
+
fiber_id: str = Field(..., description="ID of the created fiber")
|
|
70
|
+
neurons_created: int = Field(..., description="Number of neurons created")
|
|
71
|
+
neurons_linked: int = Field(..., description="Number of existing neurons linked")
|
|
72
|
+
synapses_created: int = Field(..., description="Number of synapses created")
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
class SubgraphResponse(BaseModel):
|
|
76
|
+
"""Subgraph details in query response."""
|
|
77
|
+
|
|
78
|
+
neuron_ids: list[str]
|
|
79
|
+
synapse_ids: list[str]
|
|
80
|
+
anchor_ids: list[str]
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
class QueryResponse(BaseModel):
|
|
84
|
+
"""Response from querying memories."""
|
|
85
|
+
|
|
86
|
+
answer: str | None = Field(None, description="Reconstructed answer if available")
|
|
87
|
+
confidence: float = Field(..., ge=0, le=1, description="Confidence in answer")
|
|
88
|
+
depth_used: int = Field(..., description="Depth level used for retrieval")
|
|
89
|
+
neurons_activated: int = Field(..., description="Number of neurons activated")
|
|
90
|
+
fibers_matched: list[str] = Field(..., description="IDs of matched fibers")
|
|
91
|
+
context: str = Field(..., description="Formatted context for injection")
|
|
92
|
+
latency_ms: float = Field(..., description="Retrieval latency in milliseconds")
|
|
93
|
+
subgraph: SubgraphResponse | None = Field(None, description="Subgraph details (if requested)")
|
|
94
|
+
metadata: dict[str, Any] = Field(default_factory=dict)
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
class BrainResponse(BaseModel):
|
|
98
|
+
"""Response with brain details."""
|
|
99
|
+
|
|
100
|
+
id: str
|
|
101
|
+
name: str
|
|
102
|
+
owner_id: str | None
|
|
103
|
+
is_public: bool
|
|
104
|
+
neuron_count: int
|
|
105
|
+
synapse_count: int
|
|
106
|
+
fiber_count: int
|
|
107
|
+
created_at: datetime
|
|
108
|
+
updated_at: datetime
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
class BrainListResponse(BaseModel):
|
|
112
|
+
"""Response with list of brains."""
|
|
113
|
+
|
|
114
|
+
brains: list[BrainResponse]
|
|
115
|
+
total: int
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
class StatsResponse(BaseModel):
|
|
119
|
+
"""Response with brain statistics."""
|
|
120
|
+
|
|
121
|
+
brain_id: str
|
|
122
|
+
neuron_count: int
|
|
123
|
+
synapse_count: int
|
|
124
|
+
fiber_count: int
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
class HealthResponse(BaseModel):
|
|
128
|
+
"""Health check response."""
|
|
129
|
+
|
|
130
|
+
status: str = "healthy"
|
|
131
|
+
version: str
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
class ErrorResponse(BaseModel):
|
|
135
|
+
"""Error response."""
|
|
136
|
+
|
|
137
|
+
error: str
|
|
138
|
+
detail: str | None = None
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
"""API routes for NeuralMemory server."""
|
|
2
|
+
|
|
3
|
+
from neural_memory.server.routes.brain import router as brain_router
|
|
4
|
+
from neural_memory.server.routes.memory import router as memory_router
|
|
5
|
+
from neural_memory.server.routes.sync import router as sync_router
|
|
6
|
+
|
|
7
|
+
__all__ = ["brain_router", "memory_router", "sync_router"]
|
|
@@ -0,0 +1,221 @@
|
|
|
1
|
+
"""Brain API routes."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import Annotated
|
|
6
|
+
|
|
7
|
+
from fastapi import APIRouter, Depends, HTTPException
|
|
8
|
+
|
|
9
|
+
from neural_memory.core.brain import Brain, BrainConfig
|
|
10
|
+
from neural_memory.server.dependencies import get_storage
|
|
11
|
+
from neural_memory.server.models import (
|
|
12
|
+
BrainResponse,
|
|
13
|
+
CreateBrainRequest,
|
|
14
|
+
ErrorResponse,
|
|
15
|
+
StatsResponse,
|
|
16
|
+
)
|
|
17
|
+
from neural_memory.storage.base import NeuralStorage
|
|
18
|
+
|
|
19
|
+
router = APIRouter(prefix="/brain", tags=["brain"])
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
@router.post(
|
|
23
|
+
"/create",
|
|
24
|
+
response_model=BrainResponse,
|
|
25
|
+
summary="Create a new brain",
|
|
26
|
+
description="Create a new brain for storing memories.",
|
|
27
|
+
)
|
|
28
|
+
async def create_brain(
|
|
29
|
+
request: CreateBrainRequest,
|
|
30
|
+
storage: Annotated[NeuralStorage, Depends(get_storage)],
|
|
31
|
+
) -> BrainResponse:
|
|
32
|
+
"""Create a new brain."""
|
|
33
|
+
# Build config
|
|
34
|
+
config = BrainConfig()
|
|
35
|
+
if request.config:
|
|
36
|
+
config = BrainConfig(
|
|
37
|
+
decay_rate=request.config.decay_rate,
|
|
38
|
+
reinforcement_delta=request.config.reinforcement_delta,
|
|
39
|
+
activation_threshold=request.config.activation_threshold,
|
|
40
|
+
max_spread_hops=request.config.max_spread_hops,
|
|
41
|
+
max_context_tokens=request.config.max_context_tokens,
|
|
42
|
+
)
|
|
43
|
+
|
|
44
|
+
brain = Brain.create(
|
|
45
|
+
name=request.name,
|
|
46
|
+
config=config,
|
|
47
|
+
owner_id=request.owner_id,
|
|
48
|
+
is_public=request.is_public,
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
await storage.save_brain(brain)
|
|
52
|
+
|
|
53
|
+
return BrainResponse(
|
|
54
|
+
id=brain.id,
|
|
55
|
+
name=brain.name,
|
|
56
|
+
owner_id=brain.owner_id,
|
|
57
|
+
is_public=brain.is_public,
|
|
58
|
+
neuron_count=0,
|
|
59
|
+
synapse_count=0,
|
|
60
|
+
fiber_count=0,
|
|
61
|
+
created_at=brain.created_at,
|
|
62
|
+
updated_at=brain.updated_at,
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
@router.get(
|
|
67
|
+
"/{brain_id}",
|
|
68
|
+
response_model=BrainResponse,
|
|
69
|
+
responses={404: {"model": ErrorResponse}},
|
|
70
|
+
summary="Get brain details",
|
|
71
|
+
description="Get details of a specific brain.",
|
|
72
|
+
)
|
|
73
|
+
async def get_brain(
|
|
74
|
+
brain_id: str,
|
|
75
|
+
storage: Annotated[NeuralStorage, Depends(get_storage)],
|
|
76
|
+
) -> BrainResponse:
|
|
77
|
+
"""Get brain by ID."""
|
|
78
|
+
brain = await storage.get_brain(brain_id)
|
|
79
|
+
if brain is None:
|
|
80
|
+
raise HTTPException(status_code=404, detail=f"Brain {brain_id} not found")
|
|
81
|
+
|
|
82
|
+
# Get current stats
|
|
83
|
+
stats = await storage.get_stats(brain_id)
|
|
84
|
+
|
|
85
|
+
return BrainResponse(
|
|
86
|
+
id=brain.id,
|
|
87
|
+
name=brain.name,
|
|
88
|
+
owner_id=brain.owner_id,
|
|
89
|
+
is_public=brain.is_public,
|
|
90
|
+
neuron_count=stats["neuron_count"],
|
|
91
|
+
synapse_count=stats["synapse_count"],
|
|
92
|
+
fiber_count=stats["fiber_count"],
|
|
93
|
+
created_at=brain.created_at,
|
|
94
|
+
updated_at=brain.updated_at,
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
@router.get(
|
|
99
|
+
"/{brain_id}/stats",
|
|
100
|
+
response_model=StatsResponse,
|
|
101
|
+
responses={404: {"model": ErrorResponse}},
|
|
102
|
+
summary="Get brain statistics",
|
|
103
|
+
description="Get statistics for a brain.",
|
|
104
|
+
)
|
|
105
|
+
async def get_brain_stats(
|
|
106
|
+
brain_id: str,
|
|
107
|
+
storage: Annotated[NeuralStorage, Depends(get_storage)],
|
|
108
|
+
) -> StatsResponse:
|
|
109
|
+
"""Get brain statistics."""
|
|
110
|
+
brain = await storage.get_brain(brain_id)
|
|
111
|
+
if brain is None:
|
|
112
|
+
raise HTTPException(status_code=404, detail=f"Brain {brain_id} not found")
|
|
113
|
+
|
|
114
|
+
stats = await storage.get_stats(brain_id)
|
|
115
|
+
|
|
116
|
+
return StatsResponse(
|
|
117
|
+
brain_id=brain_id,
|
|
118
|
+
neuron_count=stats["neuron_count"],
|
|
119
|
+
synapse_count=stats["synapse_count"],
|
|
120
|
+
fiber_count=stats["fiber_count"],
|
|
121
|
+
)
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
@router.get(
|
|
125
|
+
"/{brain_id}/export",
|
|
126
|
+
responses={404: {"model": ErrorResponse}},
|
|
127
|
+
summary="Export brain",
|
|
128
|
+
description="Export a brain as a JSON snapshot.",
|
|
129
|
+
)
|
|
130
|
+
async def export_brain(
|
|
131
|
+
brain_id: str,
|
|
132
|
+
storage: Annotated[NeuralStorage, Depends(get_storage)],
|
|
133
|
+
) -> dict:
|
|
134
|
+
"""Export brain as snapshot."""
|
|
135
|
+
brain = await storage.get_brain(brain_id)
|
|
136
|
+
if brain is None:
|
|
137
|
+
raise HTTPException(status_code=404, detail=f"Brain {brain_id} not found")
|
|
138
|
+
|
|
139
|
+
snapshot = await storage.export_brain(brain_id)
|
|
140
|
+
|
|
141
|
+
return {
|
|
142
|
+
"brain_id": snapshot.brain_id,
|
|
143
|
+
"brain_name": snapshot.brain_name,
|
|
144
|
+
"exported_at": snapshot.exported_at.isoformat(),
|
|
145
|
+
"version": snapshot.version,
|
|
146
|
+
"neurons": snapshot.neurons,
|
|
147
|
+
"synapses": snapshot.synapses,
|
|
148
|
+
"fibers": snapshot.fibers,
|
|
149
|
+
"config": snapshot.config,
|
|
150
|
+
"metadata": snapshot.metadata,
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
|
|
154
|
+
@router.post(
|
|
155
|
+
"/{brain_id}/import",
|
|
156
|
+
response_model=BrainResponse,
|
|
157
|
+
summary="Import brain",
|
|
158
|
+
description="Import a brain from a JSON snapshot.",
|
|
159
|
+
)
|
|
160
|
+
async def import_brain(
|
|
161
|
+
brain_id: str,
|
|
162
|
+
snapshot: dict,
|
|
163
|
+
storage: Annotated[NeuralStorage, Depends(get_storage)],
|
|
164
|
+
) -> BrainResponse:
|
|
165
|
+
"""Import brain from snapshot."""
|
|
166
|
+
from datetime import datetime
|
|
167
|
+
|
|
168
|
+
from neural_memory.core.brain import BrainSnapshot
|
|
169
|
+
|
|
170
|
+
# Convert dict to BrainSnapshot
|
|
171
|
+
brain_snapshot = BrainSnapshot(
|
|
172
|
+
brain_id=snapshot.get("brain_id", brain_id),
|
|
173
|
+
brain_name=snapshot["brain_name"],
|
|
174
|
+
exported_at=datetime.fromisoformat(snapshot["exported_at"]),
|
|
175
|
+
version=snapshot["version"],
|
|
176
|
+
neurons=snapshot["neurons"],
|
|
177
|
+
synapses=snapshot["synapses"],
|
|
178
|
+
fibers=snapshot["fibers"],
|
|
179
|
+
config=snapshot["config"],
|
|
180
|
+
metadata=snapshot.get("metadata", {}),
|
|
181
|
+
)
|
|
182
|
+
|
|
183
|
+
imported_id = await storage.import_brain(brain_snapshot, brain_id)
|
|
184
|
+
|
|
185
|
+
brain = await storage.get_brain(imported_id)
|
|
186
|
+
if brain is None:
|
|
187
|
+
raise HTTPException(status_code=500, detail="Import failed")
|
|
188
|
+
|
|
189
|
+
stats = await storage.get_stats(imported_id)
|
|
190
|
+
|
|
191
|
+
return BrainResponse(
|
|
192
|
+
id=brain.id,
|
|
193
|
+
name=brain.name,
|
|
194
|
+
owner_id=brain.owner_id,
|
|
195
|
+
is_public=brain.is_public,
|
|
196
|
+
neuron_count=stats["neuron_count"],
|
|
197
|
+
synapse_count=stats["synapse_count"],
|
|
198
|
+
fiber_count=stats["fiber_count"],
|
|
199
|
+
created_at=brain.created_at,
|
|
200
|
+
updated_at=brain.updated_at,
|
|
201
|
+
)
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
@router.delete(
|
|
205
|
+
"/{brain_id}",
|
|
206
|
+
responses={404: {"model": ErrorResponse}},
|
|
207
|
+
summary="Delete brain",
|
|
208
|
+
description="Delete a brain and all its data.",
|
|
209
|
+
)
|
|
210
|
+
async def delete_brain(
|
|
211
|
+
brain_id: str,
|
|
212
|
+
storage: Annotated[NeuralStorage, Depends(get_storage)],
|
|
213
|
+
) -> dict:
|
|
214
|
+
"""Delete a brain."""
|
|
215
|
+
brain = await storage.get_brain(brain_id)
|
|
216
|
+
if brain is None:
|
|
217
|
+
raise HTTPException(status_code=404, detail=f"Brain {brain_id} not found")
|
|
218
|
+
|
|
219
|
+
await storage.clear(brain_id)
|
|
220
|
+
|
|
221
|
+
return {"status": "deleted", "brain_id": brain_id}
|
|
@@ -0,0 +1,169 @@
|
|
|
1
|
+
"""Memory API routes."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import Annotated
|
|
6
|
+
|
|
7
|
+
from fastapi import APIRouter, Depends, HTTPException
|
|
8
|
+
|
|
9
|
+
from neural_memory.core.brain import Brain
|
|
10
|
+
from neural_memory.engine.encoder import MemoryEncoder
|
|
11
|
+
from neural_memory.engine.retrieval import DepthLevel, ReflexPipeline
|
|
12
|
+
from neural_memory.server.dependencies import get_brain, get_storage
|
|
13
|
+
from neural_memory.server.models import (
|
|
14
|
+
EncodeRequest,
|
|
15
|
+
EncodeResponse,
|
|
16
|
+
ErrorResponse,
|
|
17
|
+
QueryRequest,
|
|
18
|
+
QueryResponse,
|
|
19
|
+
SubgraphResponse,
|
|
20
|
+
)
|
|
21
|
+
from neural_memory.storage.base import NeuralStorage
|
|
22
|
+
|
|
23
|
+
router = APIRouter(prefix="/memory", tags=["memory"])
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
@router.post(
|
|
27
|
+
"/encode",
|
|
28
|
+
response_model=EncodeResponse,
|
|
29
|
+
responses={404: {"model": ErrorResponse}},
|
|
30
|
+
summary="Encode a new memory",
|
|
31
|
+
description="Store a new memory by encoding content into neural structures.",
|
|
32
|
+
)
|
|
33
|
+
async def encode_memory(
|
|
34
|
+
request: EncodeRequest,
|
|
35
|
+
brain: Annotated[Brain, Depends(get_brain)],
|
|
36
|
+
storage: Annotated[NeuralStorage, Depends(get_storage)],
|
|
37
|
+
) -> EncodeResponse:
|
|
38
|
+
"""Encode new content as a memory."""
|
|
39
|
+
encoder = MemoryEncoder(storage, brain.config)
|
|
40
|
+
|
|
41
|
+
tags = set(request.tags) if request.tags else None
|
|
42
|
+
|
|
43
|
+
result = await encoder.encode(
|
|
44
|
+
content=request.content,
|
|
45
|
+
timestamp=request.timestamp,
|
|
46
|
+
metadata=request.metadata,
|
|
47
|
+
tags=tags,
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
return EncodeResponse(
|
|
51
|
+
fiber_id=result.fiber.id,
|
|
52
|
+
neurons_created=len(result.neurons_created),
|
|
53
|
+
neurons_linked=len(result.neurons_linked),
|
|
54
|
+
synapses_created=len(result.synapses_created),
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
@router.post(
|
|
59
|
+
"/query",
|
|
60
|
+
response_model=QueryResponse,
|
|
61
|
+
responses={404: {"model": ErrorResponse}},
|
|
62
|
+
summary="Query memories",
|
|
63
|
+
description="Query memories through spreading activation retrieval.",
|
|
64
|
+
)
|
|
65
|
+
async def query_memory(
|
|
66
|
+
request: QueryRequest,
|
|
67
|
+
brain: Annotated[Brain, Depends(get_brain)],
|
|
68
|
+
storage: Annotated[NeuralStorage, Depends(get_storage)],
|
|
69
|
+
) -> QueryResponse:
|
|
70
|
+
"""Query memories using the reflex pipeline."""
|
|
71
|
+
pipeline = ReflexPipeline(storage, brain.config)
|
|
72
|
+
|
|
73
|
+
depth = DepthLevel(request.depth) if request.depth is not None else None
|
|
74
|
+
|
|
75
|
+
result = await pipeline.query(
|
|
76
|
+
query=request.query,
|
|
77
|
+
depth=depth,
|
|
78
|
+
max_tokens=request.max_tokens,
|
|
79
|
+
reference_time=request.reference_time,
|
|
80
|
+
)
|
|
81
|
+
|
|
82
|
+
subgraph = None
|
|
83
|
+
if request.include_subgraph:
|
|
84
|
+
subgraph = SubgraphResponse(
|
|
85
|
+
neuron_ids=result.subgraph.neuron_ids,
|
|
86
|
+
synapse_ids=result.subgraph.synapse_ids,
|
|
87
|
+
anchor_ids=result.subgraph.anchor_ids,
|
|
88
|
+
)
|
|
89
|
+
|
|
90
|
+
return QueryResponse(
|
|
91
|
+
answer=result.answer,
|
|
92
|
+
confidence=result.confidence,
|
|
93
|
+
depth_used=result.depth_used.value,
|
|
94
|
+
neurons_activated=result.neurons_activated,
|
|
95
|
+
fibers_matched=result.fibers_matched,
|
|
96
|
+
context=result.context,
|
|
97
|
+
latency_ms=result.latency_ms,
|
|
98
|
+
subgraph=subgraph,
|
|
99
|
+
metadata=result.metadata,
|
|
100
|
+
)
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
@router.get(
|
|
104
|
+
"/fiber/{fiber_id}",
|
|
105
|
+
responses={404: {"model": ErrorResponse}},
|
|
106
|
+
summary="Get a specific fiber",
|
|
107
|
+
description="Retrieve details of a specific memory fiber.",
|
|
108
|
+
)
|
|
109
|
+
async def get_fiber(
|
|
110
|
+
fiber_id: str,
|
|
111
|
+
brain: Annotated[Brain, Depends(get_brain)],
|
|
112
|
+
storage: Annotated[NeuralStorage, Depends(get_storage)],
|
|
113
|
+
) -> dict:
|
|
114
|
+
"""Get a specific fiber by ID."""
|
|
115
|
+
fiber = await storage.get_fiber(fiber_id)
|
|
116
|
+
if fiber is None:
|
|
117
|
+
raise HTTPException(status_code=404, detail=f"Fiber {fiber_id} not found")
|
|
118
|
+
|
|
119
|
+
return {
|
|
120
|
+
"id": fiber.id,
|
|
121
|
+
"neuron_ids": list(fiber.neuron_ids),
|
|
122
|
+
"synapse_ids": list(fiber.synapse_ids),
|
|
123
|
+
"anchor_neuron_id": fiber.anchor_neuron_id,
|
|
124
|
+
"time_start": fiber.time_start.isoformat() if fiber.time_start else None,
|
|
125
|
+
"time_end": fiber.time_end.isoformat() if fiber.time_end else None,
|
|
126
|
+
"coherence": fiber.coherence,
|
|
127
|
+
"salience": fiber.salience,
|
|
128
|
+
"frequency": fiber.frequency,
|
|
129
|
+
"summary": fiber.summary,
|
|
130
|
+
"tags": list(fiber.tags),
|
|
131
|
+
"created_at": fiber.created_at.isoformat(),
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
@router.get(
|
|
136
|
+
"/neurons",
|
|
137
|
+
summary="List neurons",
|
|
138
|
+
description="List neurons in the brain with optional filters.",
|
|
139
|
+
)
|
|
140
|
+
async def list_neurons(
|
|
141
|
+
brain: Annotated[Brain, Depends(get_brain)],
|
|
142
|
+
storage: Annotated[NeuralStorage, Depends(get_storage)],
|
|
143
|
+
type: str | None = None,
|
|
144
|
+
content_contains: str | None = None,
|
|
145
|
+
limit: int = 50,
|
|
146
|
+
) -> dict:
|
|
147
|
+
"""List neurons with optional filters."""
|
|
148
|
+
from neural_memory.core.neuron import NeuronType
|
|
149
|
+
|
|
150
|
+
neuron_type = NeuronType(type) if type else None
|
|
151
|
+
|
|
152
|
+
neurons = await storage.find_neurons(
|
|
153
|
+
type=neuron_type,
|
|
154
|
+
content_contains=content_contains,
|
|
155
|
+
limit=limit,
|
|
156
|
+
)
|
|
157
|
+
|
|
158
|
+
return {
|
|
159
|
+
"neurons": [
|
|
160
|
+
{
|
|
161
|
+
"id": n.id,
|
|
162
|
+
"type": n.type.value,
|
|
163
|
+
"content": n.content,
|
|
164
|
+
"created_at": n.created_at.isoformat(),
|
|
165
|
+
}
|
|
166
|
+
for n in neurons
|
|
167
|
+
],
|
|
168
|
+
"count": len(neurons),
|
|
169
|
+
}
|