neural-memory 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- neural_memory/__init__.py +38 -0
- neural_memory/cli/__init__.py +15 -0
- neural_memory/cli/__main__.py +6 -0
- neural_memory/cli/config.py +176 -0
- neural_memory/cli/main.py +2702 -0
- neural_memory/cli/storage.py +169 -0
- neural_memory/cli/tui.py +471 -0
- neural_memory/core/__init__.py +52 -0
- neural_memory/core/brain.py +301 -0
- neural_memory/core/brain_mode.py +273 -0
- neural_memory/core/fiber.py +236 -0
- neural_memory/core/memory_types.py +331 -0
- neural_memory/core/neuron.py +168 -0
- neural_memory/core/project.py +257 -0
- neural_memory/core/synapse.py +215 -0
- neural_memory/engine/__init__.py +15 -0
- neural_memory/engine/activation.py +335 -0
- neural_memory/engine/encoder.py +391 -0
- neural_memory/engine/retrieval.py +440 -0
- neural_memory/extraction/__init__.py +42 -0
- neural_memory/extraction/entities.py +547 -0
- neural_memory/extraction/parser.py +337 -0
- neural_memory/extraction/router.py +396 -0
- neural_memory/extraction/temporal.py +428 -0
- neural_memory/mcp/__init__.py +9 -0
- neural_memory/mcp/__main__.py +6 -0
- neural_memory/mcp/server.py +621 -0
- neural_memory/py.typed +0 -0
- neural_memory/safety/__init__.py +31 -0
- neural_memory/safety/freshness.py +238 -0
- neural_memory/safety/sensitive.py +304 -0
- neural_memory/server/__init__.py +5 -0
- neural_memory/server/app.py +99 -0
- neural_memory/server/dependencies.py +33 -0
- neural_memory/server/models.py +138 -0
- neural_memory/server/routes/__init__.py +7 -0
- neural_memory/server/routes/brain.py +221 -0
- neural_memory/server/routes/memory.py +169 -0
- neural_memory/server/routes/sync.py +387 -0
- neural_memory/storage/__init__.py +17 -0
- neural_memory/storage/base.py +441 -0
- neural_memory/storage/factory.py +329 -0
- neural_memory/storage/memory_store.py +896 -0
- neural_memory/storage/shared_store.py +650 -0
- neural_memory/storage/sqlite_store.py +1613 -0
- neural_memory/sync/__init__.py +5 -0
- neural_memory/sync/client.py +435 -0
- neural_memory/unified_config.py +315 -0
- neural_memory/utils/__init__.py +5 -0
- neural_memory/utils/config.py +98 -0
- neural_memory-0.1.0.dist-info/METADATA +314 -0
- neural_memory-0.1.0.dist-info/RECORD +55 -0
- neural_memory-0.1.0.dist-info/WHEEL +4 -0
- neural_memory-0.1.0.dist-info/entry_points.txt +4 -0
- neural_memory-0.1.0.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,650 @@
|
|
|
1
|
+
"""Shared storage client for remote brain access via HTTP API."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from datetime import datetime
|
|
6
|
+
from typing import Any, Literal
|
|
7
|
+
|
|
8
|
+
import aiohttp
|
|
9
|
+
|
|
10
|
+
from neural_memory.core.brain import Brain, BrainConfig, BrainSnapshot
|
|
11
|
+
from neural_memory.core.fiber import Fiber
|
|
12
|
+
from neural_memory.core.neuron import Neuron, NeuronState, NeuronType
|
|
13
|
+
from neural_memory.core.synapse import Direction, Synapse, SynapseType
|
|
14
|
+
from neural_memory.storage.base import NeuralStorage
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class SharedStorageError(Exception):
|
|
18
|
+
"""Error from shared storage operations."""
|
|
19
|
+
|
|
20
|
+
def __init__(self, message: str, status_code: int | None = None) -> None:
|
|
21
|
+
super().__init__(message)
|
|
22
|
+
self.status_code = status_code
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class SharedStorage(NeuralStorage):
|
|
26
|
+
"""
|
|
27
|
+
HTTP-based storage client that connects to a remote NeuralMemory server.
|
|
28
|
+
|
|
29
|
+
Enables real-time brain sharing between multiple agents/instances.
|
|
30
|
+
|
|
31
|
+
Usage:
|
|
32
|
+
async with SharedStorage("http://localhost:8000", "brain-123") as storage:
|
|
33
|
+
await storage.add_neuron(neuron)
|
|
34
|
+
neurons = await storage.find_neurons(type=NeuronType.CONCEPT)
|
|
35
|
+
|
|
36
|
+
Or without context manager:
|
|
37
|
+
storage = SharedStorage("http://localhost:8000", "brain-123")
|
|
38
|
+
await storage.connect()
|
|
39
|
+
try:
|
|
40
|
+
await storage.add_neuron(neuron)
|
|
41
|
+
finally:
|
|
42
|
+
await storage.disconnect()
|
|
43
|
+
"""
|
|
44
|
+
|
|
45
|
+
def __init__(
|
|
46
|
+
self,
|
|
47
|
+
server_url: str,
|
|
48
|
+
brain_id: str,
|
|
49
|
+
*,
|
|
50
|
+
timeout: float = 30.0,
|
|
51
|
+
api_key: str | None = None,
|
|
52
|
+
) -> None:
|
|
53
|
+
"""
|
|
54
|
+
Initialize shared storage client.
|
|
55
|
+
|
|
56
|
+
Args:
|
|
57
|
+
server_url: Base URL of NeuralMemory server (e.g., "http://localhost:8000")
|
|
58
|
+
brain_id: ID of the brain to connect to
|
|
59
|
+
timeout: Request timeout in seconds
|
|
60
|
+
api_key: Optional API key for authentication
|
|
61
|
+
"""
|
|
62
|
+
self._server_url = server_url.rstrip("/")
|
|
63
|
+
self._brain_id = brain_id
|
|
64
|
+
self._timeout = aiohttp.ClientTimeout(total=timeout)
|
|
65
|
+
self._api_key = api_key
|
|
66
|
+
self._session: aiohttp.ClientSession | None = None
|
|
67
|
+
self._connected = False
|
|
68
|
+
|
|
69
|
+
@property
|
|
70
|
+
def server_url(self) -> str:
|
|
71
|
+
"""Get the server URL."""
|
|
72
|
+
return self._server_url
|
|
73
|
+
|
|
74
|
+
@property
|
|
75
|
+
def brain_id(self) -> str:
|
|
76
|
+
"""Get the current brain ID."""
|
|
77
|
+
return self._brain_id
|
|
78
|
+
|
|
79
|
+
@property
|
|
80
|
+
def is_connected(self) -> bool:
|
|
81
|
+
"""Check if connected to server."""
|
|
82
|
+
return self._connected and self._session is not None
|
|
83
|
+
|
|
84
|
+
def set_brain(self, brain_id: str) -> None:
|
|
85
|
+
"""Set the current brain context."""
|
|
86
|
+
self._brain_id = brain_id
|
|
87
|
+
|
|
88
|
+
async def connect(self) -> None:
|
|
89
|
+
"""Establish connection to server."""
|
|
90
|
+
if self._session is None:
|
|
91
|
+
headers = {"X-Brain-ID": self._brain_id}
|
|
92
|
+
if self._api_key:
|
|
93
|
+
headers["Authorization"] = f"Bearer {self._api_key}"
|
|
94
|
+
|
|
95
|
+
self._session = aiohttp.ClientSession(
|
|
96
|
+
timeout=self._timeout,
|
|
97
|
+
headers=headers,
|
|
98
|
+
)
|
|
99
|
+
self._connected = True
|
|
100
|
+
|
|
101
|
+
async def disconnect(self) -> None:
|
|
102
|
+
"""Close connection to server."""
|
|
103
|
+
if self._session:
|
|
104
|
+
await self._session.close()
|
|
105
|
+
self._session = None
|
|
106
|
+
self._connected = False
|
|
107
|
+
|
|
108
|
+
async def __aenter__(self) -> SharedStorage:
|
|
109
|
+
"""Async context manager entry."""
|
|
110
|
+
await self.connect()
|
|
111
|
+
return self
|
|
112
|
+
|
|
113
|
+
async def __aexit__(
|
|
114
|
+
self,
|
|
115
|
+
exc_type: type[BaseException] | None,
|
|
116
|
+
exc_val: BaseException | None,
|
|
117
|
+
exc_tb: Any,
|
|
118
|
+
) -> None:
|
|
119
|
+
"""Async context manager exit."""
|
|
120
|
+
await self.disconnect()
|
|
121
|
+
|
|
122
|
+
def _get_headers(self) -> dict[str, str]:
|
|
123
|
+
"""Get request headers with brain ID."""
|
|
124
|
+
headers = {"X-Brain-ID": self._brain_id, "Content-Type": "application/json"}
|
|
125
|
+
if self._api_key:
|
|
126
|
+
headers["Authorization"] = f"Bearer {self._api_key}"
|
|
127
|
+
return headers
|
|
128
|
+
|
|
129
|
+
async def _request(
|
|
130
|
+
self,
|
|
131
|
+
method: str,
|
|
132
|
+
path: str,
|
|
133
|
+
*,
|
|
134
|
+
json_data: dict[str, Any] | None = None,
|
|
135
|
+
params: dict[str, Any] | None = None,
|
|
136
|
+
) -> dict[str, Any]:
|
|
137
|
+
"""Make HTTP request to server."""
|
|
138
|
+
if not self._session:
|
|
139
|
+
await self.connect()
|
|
140
|
+
|
|
141
|
+
assert self._session is not None
|
|
142
|
+
|
|
143
|
+
url = f"{self._server_url}{path}"
|
|
144
|
+
headers = self._get_headers()
|
|
145
|
+
|
|
146
|
+
try:
|
|
147
|
+
async with self._session.request(
|
|
148
|
+
method,
|
|
149
|
+
url,
|
|
150
|
+
json=json_data,
|
|
151
|
+
params=params,
|
|
152
|
+
headers=headers,
|
|
153
|
+
) as response:
|
|
154
|
+
if response.status >= 400:
|
|
155
|
+
text = await response.text()
|
|
156
|
+
raise SharedStorageError(
|
|
157
|
+
f"Server error: {text}",
|
|
158
|
+
status_code=response.status,
|
|
159
|
+
)
|
|
160
|
+
return await response.json()
|
|
161
|
+
except aiohttp.ClientError as e:
|
|
162
|
+
raise SharedStorageError(f"Connection error: {e}") from e
|
|
163
|
+
|
|
164
|
+
# ========== Neuron Operations ==========
|
|
165
|
+
|
|
166
|
+
async def add_neuron(self, neuron: Neuron) -> str:
|
|
167
|
+
"""Add a neuron via API."""
|
|
168
|
+
data = {
|
|
169
|
+
"id": neuron.id,
|
|
170
|
+
"type": neuron.type.value,
|
|
171
|
+
"content": neuron.content,
|
|
172
|
+
"metadata": neuron.metadata,
|
|
173
|
+
"created_at": neuron.created_at.isoformat(),
|
|
174
|
+
}
|
|
175
|
+
result = await self._request("POST", "/memory/neurons", json_data=data)
|
|
176
|
+
return result.get("id", neuron.id)
|
|
177
|
+
|
|
178
|
+
async def get_neuron(self, neuron_id: str) -> Neuron | None:
|
|
179
|
+
"""Get a neuron by ID."""
|
|
180
|
+
try:
|
|
181
|
+
result = await self._request("GET", f"/memory/neurons/{neuron_id}")
|
|
182
|
+
return self._dict_to_neuron(result)
|
|
183
|
+
except SharedStorageError as e:
|
|
184
|
+
if e.status_code == 404:
|
|
185
|
+
return None
|
|
186
|
+
raise
|
|
187
|
+
|
|
188
|
+
async def find_neurons(
|
|
189
|
+
self,
|
|
190
|
+
type: NeuronType | None = None,
|
|
191
|
+
content_contains: str | None = None,
|
|
192
|
+
content_exact: str | None = None,
|
|
193
|
+
time_range: tuple[datetime, datetime] | None = None,
|
|
194
|
+
limit: int = 100,
|
|
195
|
+
) -> list[Neuron]:
|
|
196
|
+
"""Find neurons matching criteria."""
|
|
197
|
+
params: dict[str, Any] = {"limit": limit}
|
|
198
|
+
if type:
|
|
199
|
+
params["type"] = type.value
|
|
200
|
+
if content_contains:
|
|
201
|
+
params["content_contains"] = content_contains
|
|
202
|
+
if content_exact:
|
|
203
|
+
params["content_exact"] = content_exact
|
|
204
|
+
if time_range:
|
|
205
|
+
params["time_start"] = time_range[0].isoformat()
|
|
206
|
+
params["time_end"] = time_range[1].isoformat()
|
|
207
|
+
|
|
208
|
+
result = await self._request("GET", "/memory/neurons", params=params)
|
|
209
|
+
return [self._dict_to_neuron(n) for n in result.get("neurons", [])]
|
|
210
|
+
|
|
211
|
+
async def update_neuron(self, neuron: Neuron) -> None:
|
|
212
|
+
"""Update an existing neuron."""
|
|
213
|
+
data = {
|
|
214
|
+
"type": neuron.type.value,
|
|
215
|
+
"content": neuron.content,
|
|
216
|
+
"metadata": neuron.metadata,
|
|
217
|
+
}
|
|
218
|
+
await self._request("PUT", f"/memory/neurons/{neuron.id}", json_data=data)
|
|
219
|
+
|
|
220
|
+
async def delete_neuron(self, neuron_id: str) -> bool:
|
|
221
|
+
"""Delete a neuron."""
|
|
222
|
+
try:
|
|
223
|
+
await self._request("DELETE", f"/memory/neurons/{neuron_id}")
|
|
224
|
+
return True
|
|
225
|
+
except SharedStorageError as e:
|
|
226
|
+
if e.status_code == 404:
|
|
227
|
+
return False
|
|
228
|
+
raise
|
|
229
|
+
|
|
230
|
+
# ========== Neuron State Operations ==========
|
|
231
|
+
|
|
232
|
+
async def get_neuron_state(self, neuron_id: str) -> NeuronState | None:
|
|
233
|
+
"""Get neuron activation state."""
|
|
234
|
+
try:
|
|
235
|
+
result = await self._request("GET", f"/memory/neurons/{neuron_id}/state")
|
|
236
|
+
return self._dict_to_neuron_state(result)
|
|
237
|
+
except SharedStorageError as e:
|
|
238
|
+
if e.status_code == 404:
|
|
239
|
+
return None
|
|
240
|
+
raise
|
|
241
|
+
|
|
242
|
+
async def update_neuron_state(self, state: NeuronState) -> None:
|
|
243
|
+
"""Update neuron state."""
|
|
244
|
+
data = {
|
|
245
|
+
"neuron_id": state.neuron_id,
|
|
246
|
+
"activation_level": state.activation_level,
|
|
247
|
+
"access_frequency": state.access_frequency,
|
|
248
|
+
"last_activated": state.last_activated.isoformat() if state.last_activated else None,
|
|
249
|
+
"decay_rate": state.decay_rate,
|
|
250
|
+
}
|
|
251
|
+
await self._request(
|
|
252
|
+
"PUT",
|
|
253
|
+
f"/memory/neurons/{state.neuron_id}/state",
|
|
254
|
+
json_data=data,
|
|
255
|
+
)
|
|
256
|
+
|
|
257
|
+
# ========== Synapse Operations ==========
|
|
258
|
+
|
|
259
|
+
async def add_synapse(self, synapse: Synapse) -> str:
|
|
260
|
+
"""Add a synapse."""
|
|
261
|
+
data = {
|
|
262
|
+
"id": synapse.id,
|
|
263
|
+
"source_id": synapse.source_id,
|
|
264
|
+
"target_id": synapse.target_id,
|
|
265
|
+
"type": synapse.type.value,
|
|
266
|
+
"weight": synapse.weight,
|
|
267
|
+
"direction": synapse.direction.value,
|
|
268
|
+
"metadata": synapse.metadata,
|
|
269
|
+
"created_at": synapse.created_at.isoformat(),
|
|
270
|
+
}
|
|
271
|
+
result = await self._request("POST", "/memory/synapses", json_data=data)
|
|
272
|
+
return result.get("id", synapse.id)
|
|
273
|
+
|
|
274
|
+
async def get_synapse(self, synapse_id: str) -> Synapse | None:
|
|
275
|
+
"""Get a synapse by ID."""
|
|
276
|
+
try:
|
|
277
|
+
result = await self._request("GET", f"/memory/synapses/{synapse_id}")
|
|
278
|
+
return self._dict_to_synapse(result)
|
|
279
|
+
except SharedStorageError as e:
|
|
280
|
+
if e.status_code == 404:
|
|
281
|
+
return None
|
|
282
|
+
raise
|
|
283
|
+
|
|
284
|
+
async def get_synapses(
|
|
285
|
+
self,
|
|
286
|
+
source_id: str | None = None,
|
|
287
|
+
target_id: str | None = None,
|
|
288
|
+
type: SynapseType | None = None,
|
|
289
|
+
min_weight: float | None = None,
|
|
290
|
+
) -> list[Synapse]:
|
|
291
|
+
"""Find synapses matching criteria."""
|
|
292
|
+
params: dict[str, Any] = {}
|
|
293
|
+
if source_id:
|
|
294
|
+
params["source_id"] = source_id
|
|
295
|
+
if target_id:
|
|
296
|
+
params["target_id"] = target_id
|
|
297
|
+
if type:
|
|
298
|
+
params["type"] = type.value
|
|
299
|
+
if min_weight is not None:
|
|
300
|
+
params["min_weight"] = min_weight
|
|
301
|
+
|
|
302
|
+
result = await self._request("GET", "/memory/synapses", params=params)
|
|
303
|
+
return [self._dict_to_synapse(s) for s in result.get("synapses", [])]
|
|
304
|
+
|
|
305
|
+
async def update_synapse(self, synapse: Synapse) -> None:
|
|
306
|
+
"""Update an existing synapse."""
|
|
307
|
+
data = {
|
|
308
|
+
"weight": synapse.weight,
|
|
309
|
+
"metadata": synapse.metadata,
|
|
310
|
+
}
|
|
311
|
+
await self._request("PUT", f"/memory/synapses/{synapse.id}", json_data=data)
|
|
312
|
+
|
|
313
|
+
async def delete_synapse(self, synapse_id: str) -> bool:
|
|
314
|
+
"""Delete a synapse."""
|
|
315
|
+
try:
|
|
316
|
+
await self._request("DELETE", f"/memory/synapses/{synapse_id}")
|
|
317
|
+
return True
|
|
318
|
+
except SharedStorageError as e:
|
|
319
|
+
if e.status_code == 404:
|
|
320
|
+
return False
|
|
321
|
+
raise
|
|
322
|
+
|
|
323
|
+
# ========== Graph Traversal ==========
|
|
324
|
+
|
|
325
|
+
async def get_neighbors(
|
|
326
|
+
self,
|
|
327
|
+
neuron_id: str,
|
|
328
|
+
direction: Literal["out", "in", "both"] = "both",
|
|
329
|
+
synapse_types: list[SynapseType] | None = None,
|
|
330
|
+
min_weight: float | None = None,
|
|
331
|
+
) -> list[tuple[Neuron, Synapse]]:
|
|
332
|
+
"""Get neighboring neurons."""
|
|
333
|
+
params: dict[str, Any] = {"direction": direction}
|
|
334
|
+
if synapse_types:
|
|
335
|
+
params["synapse_types"] = ",".join(t.value for t in synapse_types)
|
|
336
|
+
if min_weight is not None:
|
|
337
|
+
params["min_weight"] = min_weight
|
|
338
|
+
|
|
339
|
+
result = await self._request(
|
|
340
|
+
"GET",
|
|
341
|
+
f"/memory/neurons/{neuron_id}/neighbors",
|
|
342
|
+
params=params,
|
|
343
|
+
)
|
|
344
|
+
|
|
345
|
+
neighbors = []
|
|
346
|
+
for item in result.get("neighbors", []):
|
|
347
|
+
neuron = self._dict_to_neuron(item["neuron"])
|
|
348
|
+
synapse = self._dict_to_synapse(item["synapse"])
|
|
349
|
+
neighbors.append((neuron, synapse))
|
|
350
|
+
return neighbors
|
|
351
|
+
|
|
352
|
+
async def get_path(
|
|
353
|
+
self,
|
|
354
|
+
source_id: str,
|
|
355
|
+
target_id: str,
|
|
356
|
+
max_hops: int = 4,
|
|
357
|
+
) -> list[tuple[Neuron, Synapse]] | None:
|
|
358
|
+
"""Find shortest path between neurons."""
|
|
359
|
+
params = {"target_id": target_id, "max_hops": max_hops}
|
|
360
|
+
try:
|
|
361
|
+
result = await self._request(
|
|
362
|
+
"GET",
|
|
363
|
+
f"/memory/neurons/{source_id}/path",
|
|
364
|
+
params=params,
|
|
365
|
+
)
|
|
366
|
+
if not result.get("path"):
|
|
367
|
+
return None
|
|
368
|
+
|
|
369
|
+
path = []
|
|
370
|
+
for item in result["path"]:
|
|
371
|
+
neuron = self._dict_to_neuron(item["neuron"])
|
|
372
|
+
synapse = self._dict_to_synapse(item["synapse"])
|
|
373
|
+
path.append((neuron, synapse))
|
|
374
|
+
return path
|
|
375
|
+
except SharedStorageError as e:
|
|
376
|
+
if e.status_code == 404:
|
|
377
|
+
return None
|
|
378
|
+
raise
|
|
379
|
+
|
|
380
|
+
# ========== Fiber Operations ==========
|
|
381
|
+
|
|
382
|
+
async def add_fiber(self, fiber: Fiber) -> str:
|
|
383
|
+
"""Add a fiber."""
|
|
384
|
+
data = {
|
|
385
|
+
"id": fiber.id,
|
|
386
|
+
"neuron_ids": list(fiber.neuron_ids),
|
|
387
|
+
"synapse_ids": list(fiber.synapse_ids),
|
|
388
|
+
"anchor_neuron_id": fiber.anchor_neuron_id,
|
|
389
|
+
"time_start": fiber.time_start.isoformat() if fiber.time_start else None,
|
|
390
|
+
"time_end": fiber.time_end.isoformat() if fiber.time_end else None,
|
|
391
|
+
"coherence": fiber.coherence,
|
|
392
|
+
"salience": fiber.salience,
|
|
393
|
+
"frequency": fiber.frequency,
|
|
394
|
+
"summary": fiber.summary,
|
|
395
|
+
"tags": list(fiber.tags),
|
|
396
|
+
"created_at": fiber.created_at.isoformat(),
|
|
397
|
+
}
|
|
398
|
+
result = await self._request("POST", "/memory/fibers", json_data=data)
|
|
399
|
+
return result.get("id", fiber.id)
|
|
400
|
+
|
|
401
|
+
async def get_fiber(self, fiber_id: str) -> Fiber | None:
|
|
402
|
+
"""Get a fiber by ID."""
|
|
403
|
+
try:
|
|
404
|
+
result = await self._request("GET", f"/memory/fiber/{fiber_id}")
|
|
405
|
+
return self._dict_to_fiber(result)
|
|
406
|
+
except SharedStorageError as e:
|
|
407
|
+
if e.status_code == 404:
|
|
408
|
+
return None
|
|
409
|
+
raise
|
|
410
|
+
|
|
411
|
+
async def find_fibers(
|
|
412
|
+
self,
|
|
413
|
+
contains_neuron: str | None = None,
|
|
414
|
+
time_overlaps: tuple[datetime, datetime] | None = None,
|
|
415
|
+
tags: set[str] | None = None,
|
|
416
|
+
min_salience: float | None = None,
|
|
417
|
+
limit: int = 100,
|
|
418
|
+
) -> list[Fiber]:
|
|
419
|
+
"""Find fibers matching criteria."""
|
|
420
|
+
params: dict[str, Any] = {"limit": limit}
|
|
421
|
+
if contains_neuron:
|
|
422
|
+
params["contains_neuron"] = contains_neuron
|
|
423
|
+
if time_overlaps:
|
|
424
|
+
params["time_start"] = time_overlaps[0].isoformat()
|
|
425
|
+
params["time_end"] = time_overlaps[1].isoformat()
|
|
426
|
+
if tags:
|
|
427
|
+
params["tags"] = ",".join(tags)
|
|
428
|
+
if min_salience is not None:
|
|
429
|
+
params["min_salience"] = min_salience
|
|
430
|
+
|
|
431
|
+
result = await self._request("GET", "/memory/fibers", params=params)
|
|
432
|
+
return [self._dict_to_fiber(f) for f in result.get("fibers", [])]
|
|
433
|
+
|
|
434
|
+
async def update_fiber(self, fiber: Fiber) -> None:
|
|
435
|
+
"""Update an existing fiber."""
|
|
436
|
+
data = {
|
|
437
|
+
"neuron_ids": list(fiber.neuron_ids),
|
|
438
|
+
"synapse_ids": list(fiber.synapse_ids),
|
|
439
|
+
"coherence": fiber.coherence,
|
|
440
|
+
"salience": fiber.salience,
|
|
441
|
+
"frequency": fiber.frequency,
|
|
442
|
+
"summary": fiber.summary,
|
|
443
|
+
"tags": list(fiber.tags),
|
|
444
|
+
}
|
|
445
|
+
await self._request("PUT", f"/memory/fibers/{fiber.id}", json_data=data)
|
|
446
|
+
|
|
447
|
+
async def delete_fiber(self, fiber_id: str) -> bool:
|
|
448
|
+
"""Delete a fiber."""
|
|
449
|
+
try:
|
|
450
|
+
await self._request("DELETE", f"/memory/fibers/{fiber_id}")
|
|
451
|
+
return True
|
|
452
|
+
except SharedStorageError as e:
|
|
453
|
+
if e.status_code == 404:
|
|
454
|
+
return False
|
|
455
|
+
raise
|
|
456
|
+
|
|
457
|
+
async def get_fibers(
|
|
458
|
+
self,
|
|
459
|
+
limit: int = 10,
|
|
460
|
+
order_by: Literal["created_at", "salience", "frequency"] = "created_at",
|
|
461
|
+
descending: bool = True,
|
|
462
|
+
) -> list[Fiber]:
|
|
463
|
+
"""Get fibers with ordering."""
|
|
464
|
+
params = {
|
|
465
|
+
"limit": limit,
|
|
466
|
+
"order_by": order_by,
|
|
467
|
+
"descending": descending,
|
|
468
|
+
}
|
|
469
|
+
result = await self._request("GET", "/memory/fibers", params=params)
|
|
470
|
+
return [self._dict_to_fiber(f) for f in result.get("fibers", [])]
|
|
471
|
+
|
|
472
|
+
# ========== Brain Operations ==========
|
|
473
|
+
|
|
474
|
+
async def save_brain(self, brain: Brain) -> None:
|
|
475
|
+
"""Save brain metadata."""
|
|
476
|
+
# Check if brain exists
|
|
477
|
+
existing = await self.get_brain(brain.id)
|
|
478
|
+
if existing:
|
|
479
|
+
# Update
|
|
480
|
+
data = {
|
|
481
|
+
"name": brain.name,
|
|
482
|
+
"is_public": brain.is_public,
|
|
483
|
+
}
|
|
484
|
+
await self._request("PUT", f"/brain/{brain.id}", json_data=data)
|
|
485
|
+
else:
|
|
486
|
+
# Create
|
|
487
|
+
data = {
|
|
488
|
+
"name": brain.name,
|
|
489
|
+
"owner_id": brain.owner_id,
|
|
490
|
+
"is_public": brain.is_public,
|
|
491
|
+
"config": {
|
|
492
|
+
"decay_rate": brain.config.decay_rate,
|
|
493
|
+
"reinforcement_delta": brain.config.reinforcement_delta,
|
|
494
|
+
"activation_threshold": brain.config.activation_threshold,
|
|
495
|
+
"max_spread_hops": brain.config.max_spread_hops,
|
|
496
|
+
"max_context_tokens": brain.config.max_context_tokens,
|
|
497
|
+
},
|
|
498
|
+
}
|
|
499
|
+
await self._request("POST", "/brain/create", json_data=data)
|
|
500
|
+
|
|
501
|
+
async def get_brain(self, brain_id: str) -> Brain | None:
|
|
502
|
+
"""Get brain metadata."""
|
|
503
|
+
try:
|
|
504
|
+
result = await self._request("GET", f"/brain/{brain_id}")
|
|
505
|
+
return self._dict_to_brain(result)
|
|
506
|
+
except SharedStorageError as e:
|
|
507
|
+
if e.status_code == 404:
|
|
508
|
+
return None
|
|
509
|
+
raise
|
|
510
|
+
|
|
511
|
+
async def export_brain(self, brain_id: str) -> BrainSnapshot:
|
|
512
|
+
"""Export brain as snapshot."""
|
|
513
|
+
result = await self._request("GET", f"/brain/{brain_id}/export")
|
|
514
|
+
return BrainSnapshot(
|
|
515
|
+
brain_id=result["brain_id"],
|
|
516
|
+
brain_name=result["brain_name"],
|
|
517
|
+
exported_at=datetime.fromisoformat(result["exported_at"]),
|
|
518
|
+
version=result["version"],
|
|
519
|
+
neurons=result["neurons"],
|
|
520
|
+
synapses=result["synapses"],
|
|
521
|
+
fibers=result["fibers"],
|
|
522
|
+
config=result["config"],
|
|
523
|
+
metadata=result.get("metadata", {}),
|
|
524
|
+
)
|
|
525
|
+
|
|
526
|
+
async def import_brain(
|
|
527
|
+
self,
|
|
528
|
+
snapshot: BrainSnapshot,
|
|
529
|
+
target_brain_id: str | None = None,
|
|
530
|
+
) -> str:
|
|
531
|
+
"""Import a brain snapshot."""
|
|
532
|
+
brain_id = target_brain_id or snapshot.brain_id
|
|
533
|
+
data = {
|
|
534
|
+
"brain_id": snapshot.brain_id,
|
|
535
|
+
"brain_name": snapshot.brain_name,
|
|
536
|
+
"exported_at": snapshot.exported_at.isoformat(),
|
|
537
|
+
"version": snapshot.version,
|
|
538
|
+
"neurons": snapshot.neurons,
|
|
539
|
+
"synapses": snapshot.synapses,
|
|
540
|
+
"fibers": snapshot.fibers,
|
|
541
|
+
"config": snapshot.config,
|
|
542
|
+
"metadata": snapshot.metadata,
|
|
543
|
+
}
|
|
544
|
+
result = await self._request(
|
|
545
|
+
"POST",
|
|
546
|
+
f"/brain/{brain_id}/import",
|
|
547
|
+
json_data=data,
|
|
548
|
+
)
|
|
549
|
+
return result.get("id", brain_id)
|
|
550
|
+
|
|
551
|
+
# ========== Statistics ==========
|
|
552
|
+
|
|
553
|
+
async def get_stats(self, brain_id: str) -> dict[str, int]:
|
|
554
|
+
"""Get brain statistics."""
|
|
555
|
+
result = await self._request("GET", f"/brain/{brain_id}/stats")
|
|
556
|
+
return {
|
|
557
|
+
"neuron_count": result.get("neuron_count", 0),
|
|
558
|
+
"synapse_count": result.get("synapse_count", 0),
|
|
559
|
+
"fiber_count": result.get("fiber_count", 0),
|
|
560
|
+
}
|
|
561
|
+
|
|
562
|
+
# ========== Cleanup ==========
|
|
563
|
+
|
|
564
|
+
async def clear(self, brain_id: str) -> None:
|
|
565
|
+
"""Clear all data for a brain."""
|
|
566
|
+
await self._request("DELETE", f"/brain/{brain_id}")
|
|
567
|
+
|
|
568
|
+
# ========== Conversion Helpers ==========
|
|
569
|
+
|
|
570
|
+
def _dict_to_neuron(self, data: dict[str, Any]) -> Neuron:
|
|
571
|
+
"""Convert API response dict to Neuron."""
|
|
572
|
+
return Neuron(
|
|
573
|
+
id=data["id"],
|
|
574
|
+
type=NeuronType(data["type"]),
|
|
575
|
+
content=data["content"],
|
|
576
|
+
metadata=data.get("metadata", {}),
|
|
577
|
+
created_at=datetime.fromisoformat(data["created_at"]),
|
|
578
|
+
)
|
|
579
|
+
|
|
580
|
+
def _dict_to_neuron_state(self, data: dict[str, Any]) -> NeuronState:
|
|
581
|
+
"""Convert API response dict to NeuronState."""
|
|
582
|
+
return NeuronState(
|
|
583
|
+
neuron_id=data["neuron_id"],
|
|
584
|
+
activation_level=data.get("activation_level", 0.0),
|
|
585
|
+
access_frequency=data.get("access_frequency", 0),
|
|
586
|
+
last_activated=datetime.fromisoformat(data["last_activated"])
|
|
587
|
+
if data.get("last_activated")
|
|
588
|
+
else None,
|
|
589
|
+
decay_rate=data.get("decay_rate", 0.1),
|
|
590
|
+
created_at=datetime.fromisoformat(data["created_at"])
|
|
591
|
+
if data.get("created_at")
|
|
592
|
+
else datetime.now(),
|
|
593
|
+
)
|
|
594
|
+
|
|
595
|
+
def _dict_to_synapse(self, data: dict[str, Any]) -> Synapse:
|
|
596
|
+
"""Convert API response dict to Synapse."""
|
|
597
|
+
return Synapse(
|
|
598
|
+
id=data["id"],
|
|
599
|
+
source_id=data["source_id"],
|
|
600
|
+
target_id=data["target_id"],
|
|
601
|
+
type=SynapseType(data["type"]),
|
|
602
|
+
weight=data.get("weight", 0.5),
|
|
603
|
+
direction=Direction(data.get("direction", "uni")),
|
|
604
|
+
metadata=data.get("metadata", {}),
|
|
605
|
+
reinforced_count=data.get("reinforced_count", 0),
|
|
606
|
+
last_activated=datetime.fromisoformat(data["last_activated"])
|
|
607
|
+
if data.get("last_activated")
|
|
608
|
+
else None,
|
|
609
|
+
created_at=datetime.fromisoformat(data["created_at"])
|
|
610
|
+
if data.get("created_at")
|
|
611
|
+
else datetime.now(),
|
|
612
|
+
)
|
|
613
|
+
|
|
614
|
+
def _dict_to_fiber(self, data: dict[str, Any]) -> Fiber:
|
|
615
|
+
"""Convert API response dict to Fiber."""
|
|
616
|
+
return Fiber(
|
|
617
|
+
id=data["id"],
|
|
618
|
+
neuron_ids=frozenset(data.get("neuron_ids", [])),
|
|
619
|
+
synapse_ids=frozenset(data.get("synapse_ids", [])),
|
|
620
|
+
anchor_neuron_id=data["anchor_neuron_id"],
|
|
621
|
+
time_start=datetime.fromisoformat(data["time_start"])
|
|
622
|
+
if data.get("time_start")
|
|
623
|
+
else None,
|
|
624
|
+
time_end=datetime.fromisoformat(data["time_end"]) if data.get("time_end") else None,
|
|
625
|
+
coherence=data.get("coherence", 0.0),
|
|
626
|
+
salience=data.get("salience", 0.0),
|
|
627
|
+
frequency=data.get("frequency", 0),
|
|
628
|
+
summary=data.get("summary"),
|
|
629
|
+
tags=frozenset(data.get("tags", [])),
|
|
630
|
+
created_at=datetime.fromisoformat(data["created_at"])
|
|
631
|
+
if data.get("created_at")
|
|
632
|
+
else datetime.now(),
|
|
633
|
+
)
|
|
634
|
+
|
|
635
|
+
def _dict_to_brain(self, data: dict[str, Any]) -> Brain:
|
|
636
|
+
"""Convert API response dict to Brain."""
|
|
637
|
+
return Brain(
|
|
638
|
+
id=data["id"],
|
|
639
|
+
name=data["name"],
|
|
640
|
+
config=BrainConfig(), # Default config, actual config fetched from server
|
|
641
|
+
owner_id=data.get("owner_id"),
|
|
642
|
+
is_public=data.get("is_public", False),
|
|
643
|
+
shared_with=data.get("shared_with", []),
|
|
644
|
+
created_at=datetime.fromisoformat(data["created_at"])
|
|
645
|
+
if data.get("created_at")
|
|
646
|
+
else datetime.now(),
|
|
647
|
+
updated_at=datetime.fromisoformat(data["updated_at"])
|
|
648
|
+
if data.get("updated_at")
|
|
649
|
+
else datetime.now(),
|
|
650
|
+
)
|