neural-memory 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- neural_memory/__init__.py +38 -0
- neural_memory/cli/__init__.py +15 -0
- neural_memory/cli/__main__.py +6 -0
- neural_memory/cli/config.py +176 -0
- neural_memory/cli/main.py +2702 -0
- neural_memory/cli/storage.py +169 -0
- neural_memory/cli/tui.py +471 -0
- neural_memory/core/__init__.py +52 -0
- neural_memory/core/brain.py +301 -0
- neural_memory/core/brain_mode.py +273 -0
- neural_memory/core/fiber.py +236 -0
- neural_memory/core/memory_types.py +331 -0
- neural_memory/core/neuron.py +168 -0
- neural_memory/core/project.py +257 -0
- neural_memory/core/synapse.py +215 -0
- neural_memory/engine/__init__.py +15 -0
- neural_memory/engine/activation.py +335 -0
- neural_memory/engine/encoder.py +391 -0
- neural_memory/engine/retrieval.py +440 -0
- neural_memory/extraction/__init__.py +42 -0
- neural_memory/extraction/entities.py +547 -0
- neural_memory/extraction/parser.py +337 -0
- neural_memory/extraction/router.py +396 -0
- neural_memory/extraction/temporal.py +428 -0
- neural_memory/mcp/__init__.py +9 -0
- neural_memory/mcp/__main__.py +6 -0
- neural_memory/mcp/server.py +621 -0
- neural_memory/py.typed +0 -0
- neural_memory/safety/__init__.py +31 -0
- neural_memory/safety/freshness.py +238 -0
- neural_memory/safety/sensitive.py +304 -0
- neural_memory/server/__init__.py +5 -0
- neural_memory/server/app.py +99 -0
- neural_memory/server/dependencies.py +33 -0
- neural_memory/server/models.py +138 -0
- neural_memory/server/routes/__init__.py +7 -0
- neural_memory/server/routes/brain.py +221 -0
- neural_memory/server/routes/memory.py +169 -0
- neural_memory/server/routes/sync.py +387 -0
- neural_memory/storage/__init__.py +17 -0
- neural_memory/storage/base.py +441 -0
- neural_memory/storage/factory.py +329 -0
- neural_memory/storage/memory_store.py +896 -0
- neural_memory/storage/shared_store.py +650 -0
- neural_memory/storage/sqlite_store.py +1613 -0
- neural_memory/sync/__init__.py +5 -0
- neural_memory/sync/client.py +435 -0
- neural_memory/unified_config.py +315 -0
- neural_memory/utils/__init__.py +5 -0
- neural_memory/utils/config.py +98 -0
- neural_memory-0.1.0.dist-info/METADATA +314 -0
- neural_memory-0.1.0.dist-info/RECORD +55 -0
- neural_memory-0.1.0.dist-info/WHEEL +4 -0
- neural_memory-0.1.0.dist-info/entry_points.txt +4 -0
- neural_memory-0.1.0.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,2702 @@
|
|
|
1
|
+
"""Neural Memory CLI main entry point."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import asyncio
|
|
6
|
+
import json
|
|
7
|
+
from datetime import datetime
|
|
8
|
+
from typing import Annotated
|
|
9
|
+
|
|
10
|
+
import typer
|
|
11
|
+
|
|
12
|
+
from neural_memory.cli.config import CLIConfig
|
|
13
|
+
from neural_memory.cli.storage import PersistentStorage
|
|
14
|
+
from neural_memory.core.memory_types import (
|
|
15
|
+
DEFAULT_EXPIRY_DAYS,
|
|
16
|
+
MemoryType,
|
|
17
|
+
Priority,
|
|
18
|
+
TypedMemory,
|
|
19
|
+
suggest_memory_type,
|
|
20
|
+
)
|
|
21
|
+
from neural_memory.core.project import Project
|
|
22
|
+
from neural_memory.engine.encoder import MemoryEncoder
|
|
23
|
+
from neural_memory.engine.retrieval import DepthLevel, ReflexPipeline
|
|
24
|
+
from neural_memory.extraction.parser import QueryParser
|
|
25
|
+
from neural_memory.extraction.router import QueryRouter
|
|
26
|
+
from neural_memory.safety.freshness import (
|
|
27
|
+
FreshnessLevel,
|
|
28
|
+
analyze_freshness,
|
|
29
|
+
evaluate_freshness,
|
|
30
|
+
format_age,
|
|
31
|
+
get_freshness_indicator,
|
|
32
|
+
)
|
|
33
|
+
from neural_memory.safety.sensitive import (
|
|
34
|
+
check_sensitive_content,
|
|
35
|
+
filter_sensitive_content,
|
|
36
|
+
format_sensitive_warning,
|
|
37
|
+
)
|
|
38
|
+
|
|
39
|
+
# Main app
|
|
40
|
+
app = typer.Typer(
|
|
41
|
+
name="nmem",
|
|
42
|
+
help="Neural Memory - Reflex-based memory for AI agents",
|
|
43
|
+
no_args_is_help=True,
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
# Brain subcommand
|
|
47
|
+
brain_app = typer.Typer(help="Brain management commands")
|
|
48
|
+
app.add_typer(brain_app, name="brain")
|
|
49
|
+
|
|
50
|
+
# Project subcommand
|
|
51
|
+
project_app = typer.Typer(help="Project scoping for memory organization")
|
|
52
|
+
app.add_typer(project_app, name="project")
|
|
53
|
+
|
|
54
|
+
# Shared mode subcommand
|
|
55
|
+
shared_app = typer.Typer(help="Real-time brain sharing configuration")
|
|
56
|
+
app.add_typer(shared_app, name="shared")
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def get_config() -> CLIConfig:
|
|
60
|
+
"""Get CLI configuration."""
|
|
61
|
+
return CLIConfig.load()
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
async def get_storage(
|
|
65
|
+
config: CLIConfig,
|
|
66
|
+
*,
|
|
67
|
+
force_shared: bool = False,
|
|
68
|
+
force_local: bool = False,
|
|
69
|
+
force_sqlite: bool = False,
|
|
70
|
+
) -> PersistentStorage:
|
|
71
|
+
"""
|
|
72
|
+
Get storage for current brain.
|
|
73
|
+
|
|
74
|
+
Args:
|
|
75
|
+
config: CLI configuration
|
|
76
|
+
force_shared: Override config to use remote shared mode
|
|
77
|
+
force_local: Override config to use local JSON mode
|
|
78
|
+
force_sqlite: Override config to use local SQLite mode
|
|
79
|
+
|
|
80
|
+
Returns:
|
|
81
|
+
Storage instance (local JSON, local SQLite, or remote shared)
|
|
82
|
+
"""
|
|
83
|
+
# Remote shared mode (via server)
|
|
84
|
+
use_shared = (config.is_shared_mode or force_shared) and not force_local
|
|
85
|
+
if use_shared:
|
|
86
|
+
from neural_memory.storage.shared_store import SharedStorage
|
|
87
|
+
|
|
88
|
+
storage = SharedStorage(
|
|
89
|
+
server_url=config.shared.server_url,
|
|
90
|
+
brain_id=config.current_brain,
|
|
91
|
+
timeout=config.shared.timeout,
|
|
92
|
+
api_key=config.shared.api_key,
|
|
93
|
+
)
|
|
94
|
+
await storage.connect()
|
|
95
|
+
return storage # type: ignore
|
|
96
|
+
|
|
97
|
+
# SQLite mode (unified config - shared file-based storage)
|
|
98
|
+
if config.use_sqlite or force_sqlite:
|
|
99
|
+
from neural_memory.unified_config import get_shared_storage
|
|
100
|
+
|
|
101
|
+
return await get_shared_storage(config.current_brain) # type: ignore
|
|
102
|
+
|
|
103
|
+
# Legacy JSON mode
|
|
104
|
+
brain_path = config.get_brain_path()
|
|
105
|
+
return await PersistentStorage.load(brain_path)
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
def output_result(data: dict, as_json: bool = False) -> None:
|
|
109
|
+
"""Output result in appropriate format."""
|
|
110
|
+
if as_json:
|
|
111
|
+
typer.echo(json.dumps(data, indent=2, default=str))
|
|
112
|
+
else:
|
|
113
|
+
# Human-readable format
|
|
114
|
+
if "error" in data:
|
|
115
|
+
typer.secho(f"Error: {data['error']}", fg=typer.colors.RED)
|
|
116
|
+
elif "answer" in data:
|
|
117
|
+
typer.echo(data["answer"])
|
|
118
|
+
|
|
119
|
+
# Show freshness warnings
|
|
120
|
+
if data.get("freshness_warnings"):
|
|
121
|
+
typer.echo("")
|
|
122
|
+
for warning in data["freshness_warnings"]:
|
|
123
|
+
typer.secho(warning, fg=typer.colors.YELLOW)
|
|
124
|
+
|
|
125
|
+
# Show metadata
|
|
126
|
+
meta_parts = []
|
|
127
|
+
if data.get("confidence") is not None:
|
|
128
|
+
meta_parts.append(f"confidence: {data['confidence']:.2f}")
|
|
129
|
+
if data.get("neurons_activated"):
|
|
130
|
+
meta_parts.append(f"neurons: {data['neurons_activated']}")
|
|
131
|
+
if data.get("oldest_memory_age"):
|
|
132
|
+
meta_parts.append(f"oldest: {data['oldest_memory_age']}")
|
|
133
|
+
|
|
134
|
+
if meta_parts:
|
|
135
|
+
typer.secho(f"\n[{', '.join(meta_parts)}]", fg=typer.colors.BRIGHT_BLACK)
|
|
136
|
+
|
|
137
|
+
# Show routing info if present
|
|
138
|
+
if data.get("routing"):
|
|
139
|
+
r = data["routing"]
|
|
140
|
+
typer.secho(
|
|
141
|
+
f"\n[routing: {r['query_type']}, depth: {r['suggested_depth']}, "
|
|
142
|
+
f"confidence: {r['confidence']}]",
|
|
143
|
+
fg=typer.colors.BRIGHT_BLACK,
|
|
144
|
+
)
|
|
145
|
+
|
|
146
|
+
elif "message" in data:
|
|
147
|
+
typer.secho(data["message"], fg=typer.colors.GREEN)
|
|
148
|
+
|
|
149
|
+
# Show memory type info
|
|
150
|
+
type_parts = []
|
|
151
|
+
if data.get("memory_type"):
|
|
152
|
+
type_parts.append(f"type: {data['memory_type']}")
|
|
153
|
+
if data.get("priority"):
|
|
154
|
+
type_parts.append(f"priority: {data['priority']}")
|
|
155
|
+
if data.get("expires_in_days") is not None:
|
|
156
|
+
type_parts.append(f"expires: {data['expires_in_days']}d")
|
|
157
|
+
if data.get("project"):
|
|
158
|
+
type_parts.append(f"project: {data['project']}")
|
|
159
|
+
if type_parts:
|
|
160
|
+
typer.secho(f" [{', '.join(type_parts)}]", fg=typer.colors.BRIGHT_BLACK)
|
|
161
|
+
|
|
162
|
+
# Show warnings if any
|
|
163
|
+
if data.get("warnings"):
|
|
164
|
+
for warning in data["warnings"]:
|
|
165
|
+
typer.secho(warning, fg=typer.colors.YELLOW)
|
|
166
|
+
|
|
167
|
+
elif "context" in data:
|
|
168
|
+
typer.echo(data["context"])
|
|
169
|
+
else:
|
|
170
|
+
typer.echo(str(data))
|
|
171
|
+
|
|
172
|
+
|
|
173
|
+
# =============================================================================
|
|
174
|
+
# Core Commands
|
|
175
|
+
# =============================================================================
|
|
176
|
+
|
|
177
|
+
|
|
178
|
+
@app.command()
|
|
179
|
+
def remember(
|
|
180
|
+
content: Annotated[str, typer.Argument(help="Content to remember")],
|
|
181
|
+
tags: Annotated[
|
|
182
|
+
list[str] | None, typer.Option("--tag", "-t", help="Tags for the memory")
|
|
183
|
+
] = None,
|
|
184
|
+
memory_type: Annotated[
|
|
185
|
+
str | None,
|
|
186
|
+
typer.Option(
|
|
187
|
+
"--type",
|
|
188
|
+
"-T",
|
|
189
|
+
help="Memory type: fact, decision, preference, todo, insight, context, instruction, error, workflow, reference (auto-detected if not specified)",
|
|
190
|
+
),
|
|
191
|
+
] = None,
|
|
192
|
+
priority: Annotated[
|
|
193
|
+
int | None,
|
|
194
|
+
typer.Option("--priority", "-p", help="Priority 0-10 (0=lowest, 5=normal, 10=critical)"),
|
|
195
|
+
] = None,
|
|
196
|
+
expires: Annotated[
|
|
197
|
+
int | None,
|
|
198
|
+
typer.Option("--expires", "-e", help="Days until this memory expires"),
|
|
199
|
+
] = None,
|
|
200
|
+
project: Annotated[
|
|
201
|
+
str | None,
|
|
202
|
+
typer.Option("--project", "-P", help="Associate with a project (by name)"),
|
|
203
|
+
] = None,
|
|
204
|
+
shared: Annotated[
|
|
205
|
+
bool, typer.Option("--shared", "-S", help="Use shared/remote storage for this command")
|
|
206
|
+
] = False,
|
|
207
|
+
force: Annotated[
|
|
208
|
+
bool, typer.Option("--force", "-f", help="Store even if sensitive content detected")
|
|
209
|
+
] = False,
|
|
210
|
+
redact: Annotated[
|
|
211
|
+
bool, typer.Option("--redact", "-r", help="Auto-redact sensitive content before storing")
|
|
212
|
+
] = False,
|
|
213
|
+
json_output: Annotated[bool, typer.Option("--json", "-j", help="Output as JSON")] = False,
|
|
214
|
+
) -> None:
|
|
215
|
+
"""Store a new memory.
|
|
216
|
+
|
|
217
|
+
Memory types (auto-detected if not specified):
|
|
218
|
+
fact - Objective information
|
|
219
|
+
decision - Choices made
|
|
220
|
+
preference - User preferences
|
|
221
|
+
todo - Action items (expires in 30 days by default)
|
|
222
|
+
insight - Learned patterns
|
|
223
|
+
context - Situational info (expires in 7 days by default)
|
|
224
|
+
instruction - User guidelines
|
|
225
|
+
error - Error patterns
|
|
226
|
+
workflow - Process patterns
|
|
227
|
+
reference - External references
|
|
228
|
+
|
|
229
|
+
Examples:
|
|
230
|
+
nmem remember "Fixed auth bug by adding null check"
|
|
231
|
+
nmem remember "We decided to use PostgreSQL" --type decision
|
|
232
|
+
nmem remember "Need to refactor auth module" --type todo --priority 7
|
|
233
|
+
nmem remember "Meeting context" --type context --expires 7
|
|
234
|
+
nmem remember "API_KEY=xxx" --redact # Will redact sensitive content
|
|
235
|
+
nmem remember "Sprint task done" --project "Q1 Sprint" # Associate with project
|
|
236
|
+
nmem remember "Shared memory" --shared # Store in shared brain
|
|
237
|
+
"""
|
|
238
|
+
# Check for sensitive content
|
|
239
|
+
sensitive_matches = check_sensitive_content(content, min_severity=2)
|
|
240
|
+
|
|
241
|
+
if sensitive_matches and not force and not redact:
|
|
242
|
+
warning = format_sensitive_warning(sensitive_matches)
|
|
243
|
+
typer.echo(warning)
|
|
244
|
+
raise typer.Exit(1)
|
|
245
|
+
|
|
246
|
+
# Redact if requested
|
|
247
|
+
store_content = content
|
|
248
|
+
if redact and sensitive_matches:
|
|
249
|
+
store_content, _ = filter_sensitive_content(content)
|
|
250
|
+
typer.secho(f"Redacted {len(sensitive_matches)} sensitive item(s)", fg=typer.colors.YELLOW)
|
|
251
|
+
|
|
252
|
+
# Determine memory type
|
|
253
|
+
if memory_type:
|
|
254
|
+
try:
|
|
255
|
+
mem_type = MemoryType(memory_type.lower())
|
|
256
|
+
except ValueError:
|
|
257
|
+
valid_types = ", ".join(t.value for t in MemoryType)
|
|
258
|
+
typer.secho(f"Invalid memory type. Valid types: {valid_types}", fg=typer.colors.RED)
|
|
259
|
+
raise typer.Exit(1)
|
|
260
|
+
else:
|
|
261
|
+
mem_type = suggest_memory_type(store_content)
|
|
262
|
+
|
|
263
|
+
# Determine expiry
|
|
264
|
+
expiry_days = expires
|
|
265
|
+
if expiry_days is None:
|
|
266
|
+
expiry_days = DEFAULT_EXPIRY_DAYS.get(mem_type)
|
|
267
|
+
|
|
268
|
+
# Determine priority
|
|
269
|
+
mem_priority = Priority.from_int(priority) if priority is not None else Priority.NORMAL
|
|
270
|
+
|
|
271
|
+
async def _remember() -> dict:
|
|
272
|
+
config = get_config()
|
|
273
|
+
storage = await get_storage(config, force_shared=shared)
|
|
274
|
+
|
|
275
|
+
brain_id = (
|
|
276
|
+
storage._current_brain_id
|
|
277
|
+
if hasattr(storage, "_current_brain_id")
|
|
278
|
+
else config.current_brain
|
|
279
|
+
)
|
|
280
|
+
brain = await storage.get_brain(brain_id)
|
|
281
|
+
if not brain:
|
|
282
|
+
return {"error": "No brain configured"}
|
|
283
|
+
|
|
284
|
+
# Look up project if specified
|
|
285
|
+
project_id = None
|
|
286
|
+
if project:
|
|
287
|
+
proj = await storage.get_project_by_name(project)
|
|
288
|
+
if not proj:
|
|
289
|
+
return {
|
|
290
|
+
"error": f"Project '{project}' not found. Create it with: nmem project create \"{project}\""
|
|
291
|
+
}
|
|
292
|
+
project_id = proj.id
|
|
293
|
+
|
|
294
|
+
encoder = MemoryEncoder(storage, brain.config)
|
|
295
|
+
|
|
296
|
+
# Disable auto-save for batch operations during encoding
|
|
297
|
+
storage.disable_auto_save()
|
|
298
|
+
|
|
299
|
+
result = await encoder.encode(
|
|
300
|
+
content=store_content,
|
|
301
|
+
timestamp=datetime.now(),
|
|
302
|
+
tags=set(tags) if tags else None,
|
|
303
|
+
)
|
|
304
|
+
|
|
305
|
+
# Create and store typed memory metadata
|
|
306
|
+
typed_mem = TypedMemory.create(
|
|
307
|
+
fiber_id=result.fiber.id,
|
|
308
|
+
memory_type=mem_type,
|
|
309
|
+
priority=mem_priority,
|
|
310
|
+
source="user_input",
|
|
311
|
+
expires_in_days=expiry_days,
|
|
312
|
+
tags=set(tags) if tags else None,
|
|
313
|
+
project_id=project_id,
|
|
314
|
+
)
|
|
315
|
+
await storage.add_typed_memory(typed_mem)
|
|
316
|
+
|
|
317
|
+
# Save once after encoding
|
|
318
|
+
await storage.batch_save()
|
|
319
|
+
|
|
320
|
+
response = {
|
|
321
|
+
"message": f"Remembered: {store_content[:50]}{'...' if len(store_content) > 50 else ''}",
|
|
322
|
+
"fiber_id": result.fiber.id,
|
|
323
|
+
"memory_type": mem_type.value,
|
|
324
|
+
"priority": mem_priority.name.lower(),
|
|
325
|
+
"neurons_created": len(result.neurons_created),
|
|
326
|
+
"neurons_linked": len(result.neurons_linked),
|
|
327
|
+
"synapses_created": len(result.synapses_created),
|
|
328
|
+
}
|
|
329
|
+
|
|
330
|
+
# Add project info
|
|
331
|
+
if project_id:
|
|
332
|
+
response["project"] = project
|
|
333
|
+
|
|
334
|
+
# Add expiry info
|
|
335
|
+
if typed_mem.expires_at:
|
|
336
|
+
response["expires_in_days"] = typed_mem.days_until_expiry
|
|
337
|
+
|
|
338
|
+
# Add warnings
|
|
339
|
+
warnings = []
|
|
340
|
+
if force and sensitive_matches:
|
|
341
|
+
warnings.append(
|
|
342
|
+
f"[!] Stored with {len(sensitive_matches)} sensitive item(s) - consider using --redact"
|
|
343
|
+
)
|
|
344
|
+
if warnings:
|
|
345
|
+
response["warnings"] = warnings
|
|
346
|
+
|
|
347
|
+
return response
|
|
348
|
+
|
|
349
|
+
result = asyncio.run(_remember())
|
|
350
|
+
output_result(result, json_output)
|
|
351
|
+
|
|
352
|
+
|
|
353
|
+
@app.command()
|
|
354
|
+
def todo(
|
|
355
|
+
task: Annotated[str, typer.Argument(help="Task to remember")],
|
|
356
|
+
priority: Annotated[
|
|
357
|
+
int,
|
|
358
|
+
typer.Option(
|
|
359
|
+
"--priority", "-p", help="Priority 0-10 (default: 5=normal, 7=high, 10=critical)"
|
|
360
|
+
),
|
|
361
|
+
] = 5,
|
|
362
|
+
project: Annotated[
|
|
363
|
+
str | None,
|
|
364
|
+
typer.Option("--project", "-P", help="Associate with a project"),
|
|
365
|
+
] = None,
|
|
366
|
+
expires: Annotated[
|
|
367
|
+
int | None,
|
|
368
|
+
typer.Option("--expires", "-e", help="Days until expiry (default: 30)"),
|
|
369
|
+
] = None,
|
|
370
|
+
tags: Annotated[
|
|
371
|
+
list[str] | None,
|
|
372
|
+
typer.Option("--tag", "-t", help="Tags for the task"),
|
|
373
|
+
] = None,
|
|
374
|
+
json_output: Annotated[bool, typer.Option("--json", "-j", help="Output as JSON")] = False,
|
|
375
|
+
) -> None:
|
|
376
|
+
"""Quick shortcut to add a TODO memory.
|
|
377
|
+
|
|
378
|
+
Equivalent to: nmem remember --type todo "task"
|
|
379
|
+
|
|
380
|
+
Examples:
|
|
381
|
+
nmem todo "Fix the login bug"
|
|
382
|
+
nmem todo "Review PR #123" --priority 7
|
|
383
|
+
nmem todo "Deploy to prod" -p 10 --project "Q1 Sprint"
|
|
384
|
+
nmem todo "Update docs" --expires 7
|
|
385
|
+
"""
|
|
386
|
+
# Determine expiry (default 30 days for todos)
|
|
387
|
+
expiry_days = expires if expires is not None else 30
|
|
388
|
+
mem_priority = Priority.from_int(priority)
|
|
389
|
+
|
|
390
|
+
async def _todo() -> dict:
|
|
391
|
+
config = get_config()
|
|
392
|
+
storage = await get_storage(config)
|
|
393
|
+
|
|
394
|
+
brain = await storage.get_brain(storage._current_brain_id)
|
|
395
|
+
if not brain:
|
|
396
|
+
return {"error": "No brain configured"}
|
|
397
|
+
|
|
398
|
+
# Look up project if specified
|
|
399
|
+
project_id = None
|
|
400
|
+
if project:
|
|
401
|
+
proj = await storage.get_project_by_name(project)
|
|
402
|
+
if not proj:
|
|
403
|
+
return {
|
|
404
|
+
"error": f"Project '{project}' not found. Create it with: nmem project create \"{project}\""
|
|
405
|
+
}
|
|
406
|
+
project_id = proj.id
|
|
407
|
+
|
|
408
|
+
encoder = MemoryEncoder(storage, brain.config)
|
|
409
|
+
storage.disable_auto_save()
|
|
410
|
+
|
|
411
|
+
result = await encoder.encode(
|
|
412
|
+
content=task,
|
|
413
|
+
timestamp=datetime.now(),
|
|
414
|
+
tags=set(tags) if tags else None,
|
|
415
|
+
)
|
|
416
|
+
|
|
417
|
+
# Create TODO typed memory
|
|
418
|
+
typed_mem = TypedMemory.create(
|
|
419
|
+
fiber_id=result.fiber.id,
|
|
420
|
+
memory_type=MemoryType.TODO,
|
|
421
|
+
priority=mem_priority,
|
|
422
|
+
source="user_input",
|
|
423
|
+
expires_in_days=expiry_days,
|
|
424
|
+
tags=set(tags) if tags else None,
|
|
425
|
+
project_id=project_id,
|
|
426
|
+
)
|
|
427
|
+
await storage.add_typed_memory(typed_mem)
|
|
428
|
+
await storage.batch_save()
|
|
429
|
+
|
|
430
|
+
response = {
|
|
431
|
+
"message": f"TODO: {task[:50]}{'...' if len(task) > 50 else ''}",
|
|
432
|
+
"fiber_id": result.fiber.id,
|
|
433
|
+
"memory_type": "todo",
|
|
434
|
+
"priority": mem_priority.name.lower(),
|
|
435
|
+
"expires_in_days": typed_mem.days_until_expiry,
|
|
436
|
+
}
|
|
437
|
+
|
|
438
|
+
if project_id:
|
|
439
|
+
response["project"] = project
|
|
440
|
+
|
|
441
|
+
return response
|
|
442
|
+
|
|
443
|
+
result = asyncio.run(_todo())
|
|
444
|
+
output_result(result, json_output)
|
|
445
|
+
|
|
446
|
+
|
|
447
|
+
@app.command()
|
|
448
|
+
def recall(
|
|
449
|
+
query: Annotated[str, typer.Argument(help="Query to search memories")],
|
|
450
|
+
depth: Annotated[
|
|
451
|
+
int | None,
|
|
452
|
+
typer.Option("--depth", "-d", help="Search depth (0=instant, 1=context, 2=habit, 3=deep)"),
|
|
453
|
+
] = None,
|
|
454
|
+
max_tokens: Annotated[
|
|
455
|
+
int, typer.Option("--max-tokens", "-m", help="Max tokens in response")
|
|
456
|
+
] = 500,
|
|
457
|
+
min_confidence: Annotated[
|
|
458
|
+
float, typer.Option("--min-confidence", "-c", help="Minimum confidence threshold (0.0-1.0)")
|
|
459
|
+
] = 0.0,
|
|
460
|
+
shared: Annotated[
|
|
461
|
+
bool, typer.Option("--shared", "-S", help="Use shared/remote storage for this command")
|
|
462
|
+
] = False,
|
|
463
|
+
show_age: Annotated[
|
|
464
|
+
bool, typer.Option("--show-age", "-a", help="Show memory ages in results")
|
|
465
|
+
] = True,
|
|
466
|
+
show_routing: Annotated[
|
|
467
|
+
bool, typer.Option("--show-routing", "-R", help="Show query routing info")
|
|
468
|
+
] = False,
|
|
469
|
+
json_output: Annotated[bool, typer.Option("--json", "-j", help="Output as JSON")] = False,
|
|
470
|
+
) -> None:
|
|
471
|
+
"""Query memories with intelligent routing.
|
|
472
|
+
|
|
473
|
+
Query types (auto-detected):
|
|
474
|
+
semantic - Conceptual queries ("What do I know about auth?")
|
|
475
|
+
temporal - Time-based queries ("What did I do yesterday?")
|
|
476
|
+
causal - Why/how queries ("Why did the build fail?")
|
|
477
|
+
direct - Exact recall ("What's Alice's email?")
|
|
478
|
+
pattern - Habit queries ("What do I usually do on Mondays?")
|
|
479
|
+
comparative - Comparison ("Compare React and Vue")
|
|
480
|
+
|
|
481
|
+
Examples:
|
|
482
|
+
nmem recall "What did I do with auth?"
|
|
483
|
+
nmem recall "meetings with Alice" --depth 2
|
|
484
|
+
nmem recall "Why did the build fail?" --show-routing
|
|
485
|
+
nmem recall "project status" --min-confidence 0.5
|
|
486
|
+
nmem recall "shared knowledge" --shared # Query from shared brain
|
|
487
|
+
"""
|
|
488
|
+
|
|
489
|
+
async def _recall() -> dict:
|
|
490
|
+
config = get_config()
|
|
491
|
+
storage = await get_storage(config, force_shared=shared)
|
|
492
|
+
|
|
493
|
+
brain_id = (
|
|
494
|
+
storage._current_brain_id
|
|
495
|
+
if hasattr(storage, "_current_brain_id")
|
|
496
|
+
else config.current_brain
|
|
497
|
+
)
|
|
498
|
+
brain = await storage.get_brain(brain_id)
|
|
499
|
+
if not brain:
|
|
500
|
+
return {"error": "No brain configured"}
|
|
501
|
+
|
|
502
|
+
# Parse and route query
|
|
503
|
+
parser = QueryParser()
|
|
504
|
+
router = QueryRouter()
|
|
505
|
+
stimulus = parser.parse(query, reference_time=datetime.now())
|
|
506
|
+
route = router.route(stimulus)
|
|
507
|
+
|
|
508
|
+
# Use router's suggested depth if not specified
|
|
509
|
+
if depth is not None:
|
|
510
|
+
depth_level = DepthLevel(depth)
|
|
511
|
+
else:
|
|
512
|
+
depth_level = DepthLevel(min(route.suggested_depth, 3))
|
|
513
|
+
|
|
514
|
+
pipeline = ReflexPipeline(storage, brain.config)
|
|
515
|
+
|
|
516
|
+
result = await pipeline.query(
|
|
517
|
+
query=query,
|
|
518
|
+
depth=depth_level,
|
|
519
|
+
max_tokens=max_tokens,
|
|
520
|
+
reference_time=datetime.now(),
|
|
521
|
+
)
|
|
522
|
+
|
|
523
|
+
# Check confidence threshold
|
|
524
|
+
if result.confidence < min_confidence:
|
|
525
|
+
return {
|
|
526
|
+
"answer": f"No memories found with confidence >= {min_confidence:.2f}",
|
|
527
|
+
"confidence": result.confidence,
|
|
528
|
+
"neurons_activated": result.neurons_activated,
|
|
529
|
+
"below_threshold": True,
|
|
530
|
+
}
|
|
531
|
+
|
|
532
|
+
# Gather freshness information from matched fibers
|
|
533
|
+
freshness_warnings: list[str] = []
|
|
534
|
+
oldest_age = 0
|
|
535
|
+
|
|
536
|
+
if result.fibers_matched:
|
|
537
|
+
for fiber_id in result.fibers_matched:
|
|
538
|
+
fiber = await storage.get_fiber(fiber_id)
|
|
539
|
+
if fiber:
|
|
540
|
+
freshness = evaluate_freshness(fiber.created_at)
|
|
541
|
+
if freshness.warning:
|
|
542
|
+
freshness_warnings.append(freshness.warning)
|
|
543
|
+
if freshness.age_days > oldest_age:
|
|
544
|
+
oldest_age = freshness.age_days
|
|
545
|
+
|
|
546
|
+
response = {
|
|
547
|
+
"answer": result.context or "No relevant memories found.",
|
|
548
|
+
"confidence": result.confidence,
|
|
549
|
+
"depth_used": result.depth_used.value,
|
|
550
|
+
"neurons_activated": result.neurons_activated,
|
|
551
|
+
"fibers_matched": result.fibers_matched,
|
|
552
|
+
"latency_ms": result.latency_ms,
|
|
553
|
+
}
|
|
554
|
+
|
|
555
|
+
# Add routing info if requested
|
|
556
|
+
if show_routing:
|
|
557
|
+
response["routing"] = {
|
|
558
|
+
"query_type": route.primary.value,
|
|
559
|
+
"confidence": route.confidence.name.lower(),
|
|
560
|
+
"suggested_depth": route.suggested_depth,
|
|
561
|
+
"use_embeddings": route.use_embeddings,
|
|
562
|
+
"time_weighted": route.time_weighted,
|
|
563
|
+
"signals": list(route.signals)[:5], # Limit signals shown
|
|
564
|
+
}
|
|
565
|
+
|
|
566
|
+
if show_age and oldest_age > 0:
|
|
567
|
+
response["oldest_memory_age"] = format_age(oldest_age)
|
|
568
|
+
|
|
569
|
+
if freshness_warnings:
|
|
570
|
+
# Deduplicate warnings
|
|
571
|
+
unique_warnings = list(dict.fromkeys(freshness_warnings))[:3]
|
|
572
|
+
response["freshness_warnings"] = unique_warnings
|
|
573
|
+
|
|
574
|
+
return response
|
|
575
|
+
|
|
576
|
+
result = asyncio.run(_recall())
|
|
577
|
+
output_result(result, json_output)
|
|
578
|
+
|
|
579
|
+
|
|
580
|
+
@app.command()
|
|
581
|
+
def context(
|
|
582
|
+
limit: Annotated[int, typer.Option("--limit", "-l", help="Number of recent memories")] = 10,
|
|
583
|
+
fresh_only: Annotated[
|
|
584
|
+
bool, typer.Option("--fresh-only", help="Only include memories < 30 days old")
|
|
585
|
+
] = False,
|
|
586
|
+
json_output: Annotated[bool, typer.Option("--json", "-j", help="Output as JSON")] = False,
|
|
587
|
+
) -> None:
|
|
588
|
+
"""Get recent context (for injecting into AI conversations).
|
|
589
|
+
|
|
590
|
+
Examples:
|
|
591
|
+
nmem context
|
|
592
|
+
nmem context --limit 5 --json
|
|
593
|
+
nmem context --fresh-only
|
|
594
|
+
"""
|
|
595
|
+
|
|
596
|
+
async def _context() -> dict:
|
|
597
|
+
config = get_config()
|
|
598
|
+
storage = await get_storage(config)
|
|
599
|
+
|
|
600
|
+
# Get recent fibers
|
|
601
|
+
fibers = await storage.get_fibers(limit=limit * 2 if fresh_only else limit)
|
|
602
|
+
|
|
603
|
+
if not fibers:
|
|
604
|
+
return {"context": "No memories stored yet.", "count": 0}
|
|
605
|
+
|
|
606
|
+
# Filter by freshness if requested
|
|
607
|
+
now = datetime.now()
|
|
608
|
+
if fresh_only:
|
|
609
|
+
fresh_fibers = []
|
|
610
|
+
for fiber in fibers:
|
|
611
|
+
freshness = evaluate_freshness(fiber.created_at, now)
|
|
612
|
+
if freshness.level in (FreshnessLevel.FRESH, FreshnessLevel.RECENT):
|
|
613
|
+
fresh_fibers.append(fiber)
|
|
614
|
+
fibers = fresh_fibers[:limit]
|
|
615
|
+
|
|
616
|
+
# Build context string with age indicators
|
|
617
|
+
context_parts = []
|
|
618
|
+
fiber_data = []
|
|
619
|
+
|
|
620
|
+
for fiber in fibers:
|
|
621
|
+
freshness = evaluate_freshness(fiber.created_at, now)
|
|
622
|
+
indicator = get_freshness_indicator(freshness.level)
|
|
623
|
+
age_str = format_age(freshness.age_days)
|
|
624
|
+
|
|
625
|
+
content = fiber.summary
|
|
626
|
+
if not content and fiber.anchor_neuron_id:
|
|
627
|
+
anchor = await storage.get_neuron(fiber.anchor_neuron_id)
|
|
628
|
+
if anchor:
|
|
629
|
+
content = anchor.content
|
|
630
|
+
|
|
631
|
+
if content:
|
|
632
|
+
context_parts.append(f"{indicator} [{age_str}] {content}")
|
|
633
|
+
fiber_data.append(
|
|
634
|
+
{
|
|
635
|
+
"id": fiber.id,
|
|
636
|
+
"summary": content,
|
|
637
|
+
"created_at": fiber.created_at.isoformat(),
|
|
638
|
+
"age": age_str,
|
|
639
|
+
"freshness": freshness.level.value,
|
|
640
|
+
}
|
|
641
|
+
)
|
|
642
|
+
|
|
643
|
+
context_str = "\n".join(context_parts) if context_parts else "No context available."
|
|
644
|
+
|
|
645
|
+
# Analyze overall freshness
|
|
646
|
+
created_dates = [f.created_at for f in fibers]
|
|
647
|
+
freshness_report = analyze_freshness(created_dates, now)
|
|
648
|
+
|
|
649
|
+
return {
|
|
650
|
+
"context": context_str,
|
|
651
|
+
"count": len(fiber_data),
|
|
652
|
+
"fibers": fiber_data,
|
|
653
|
+
"freshness_summary": {
|
|
654
|
+
"fresh": freshness_report.fresh,
|
|
655
|
+
"recent": freshness_report.recent,
|
|
656
|
+
"aging": freshness_report.aging,
|
|
657
|
+
"stale": freshness_report.stale,
|
|
658
|
+
"ancient": freshness_report.ancient,
|
|
659
|
+
},
|
|
660
|
+
}
|
|
661
|
+
|
|
662
|
+
result = asyncio.run(_context())
|
|
663
|
+
output_result(result, json_output)
|
|
664
|
+
|
|
665
|
+
|
|
666
|
+
@app.command("list")
|
|
667
|
+
def list_memories(
|
|
668
|
+
memory_type: Annotated[
|
|
669
|
+
str | None,
|
|
670
|
+
typer.Option("--type", "-T", help="Filter by memory type (fact, decision, todo, etc.)"),
|
|
671
|
+
] = None,
|
|
672
|
+
min_priority: Annotated[
|
|
673
|
+
int | None,
|
|
674
|
+
typer.Option("--min-priority", "-p", help="Minimum priority (0-10)"),
|
|
675
|
+
] = None,
|
|
676
|
+
project_name: Annotated[
|
|
677
|
+
str | None,
|
|
678
|
+
typer.Option("--project", "-P", help="Filter by project name"),
|
|
679
|
+
] = None,
|
|
680
|
+
show_expired: Annotated[
|
|
681
|
+
bool,
|
|
682
|
+
typer.Option("--expired", "-e", help="Show only expired memories"),
|
|
683
|
+
] = False,
|
|
684
|
+
include_expired: Annotated[
|
|
685
|
+
bool,
|
|
686
|
+
typer.Option("--include-expired", help="Include expired memories in results"),
|
|
687
|
+
] = False,
|
|
688
|
+
limit: Annotated[int, typer.Option("--limit", "-l", help="Maximum number of results")] = 20,
|
|
689
|
+
json_output: Annotated[bool, typer.Option("--json", "-j", help="Output as JSON")] = False,
|
|
690
|
+
) -> None:
|
|
691
|
+
"""List memories with filtering by type, priority, project, and status.
|
|
692
|
+
|
|
693
|
+
Memory types: fact, decision, preference, todo, insight, context,
|
|
694
|
+
instruction, error, workflow, reference
|
|
695
|
+
|
|
696
|
+
Examples:
|
|
697
|
+
nmem list # List all recent memories
|
|
698
|
+
nmem list --type todo # List all TODOs
|
|
699
|
+
nmem list --type decision -p 7 # High priority decisions
|
|
700
|
+
nmem list --expired # Show expired memories
|
|
701
|
+
nmem list --type todo --expired # Expired TODOs (need cleanup)
|
|
702
|
+
nmem list --project "Q1 Sprint" # Memories in a project
|
|
703
|
+
"""
|
|
704
|
+
|
|
705
|
+
async def _list() -> dict:
|
|
706
|
+
config = get_config()
|
|
707
|
+
storage = await get_storage(config)
|
|
708
|
+
|
|
709
|
+
# Parse memory type if provided
|
|
710
|
+
mem_type = None
|
|
711
|
+
if memory_type:
|
|
712
|
+
try:
|
|
713
|
+
mem_type = MemoryType(memory_type.lower())
|
|
714
|
+
except ValueError:
|
|
715
|
+
valid_types = ", ".join(t.value for t in MemoryType)
|
|
716
|
+
return {"error": f"Invalid memory type. Valid types: {valid_types}"}
|
|
717
|
+
|
|
718
|
+
# Parse priority
|
|
719
|
+
priority = None
|
|
720
|
+
if min_priority is not None:
|
|
721
|
+
priority = Priority.from_int(min_priority)
|
|
722
|
+
|
|
723
|
+
# Look up project if specified
|
|
724
|
+
project_id = None
|
|
725
|
+
if project_name:
|
|
726
|
+
proj = await storage.get_project_by_name(project_name)
|
|
727
|
+
if not proj:
|
|
728
|
+
return {"error": f"Project '{project_name}' not found."}
|
|
729
|
+
project_id = proj.id
|
|
730
|
+
|
|
731
|
+
# Handle expired-only mode
|
|
732
|
+
if show_expired:
|
|
733
|
+
expired_memories = await storage.get_expired_memories()
|
|
734
|
+
if mem_type:
|
|
735
|
+
expired_memories = [tm for tm in expired_memories if tm.memory_type == mem_type]
|
|
736
|
+
|
|
737
|
+
memories_data = []
|
|
738
|
+
for tm in expired_memories[:limit]:
|
|
739
|
+
fiber = await storage.get_fiber(tm.fiber_id)
|
|
740
|
+
content = ""
|
|
741
|
+
if fiber:
|
|
742
|
+
if fiber.summary:
|
|
743
|
+
content = fiber.summary
|
|
744
|
+
elif fiber.anchor_neuron_id:
|
|
745
|
+
anchor = await storage.get_neuron(fiber.anchor_neuron_id)
|
|
746
|
+
if anchor:
|
|
747
|
+
content = anchor.content
|
|
748
|
+
|
|
749
|
+
memories_data.append(
|
|
750
|
+
{
|
|
751
|
+
"fiber_id": tm.fiber_id,
|
|
752
|
+
"type": tm.memory_type.value,
|
|
753
|
+
"priority": tm.priority.name.lower(),
|
|
754
|
+
"content": content[:100] + "..." if len(content) > 100 else content,
|
|
755
|
+
"expired_days_ago": abs(tm.days_until_expiry)
|
|
756
|
+
if tm.days_until_expiry
|
|
757
|
+
else 0,
|
|
758
|
+
"created_at": tm.created_at.isoformat(),
|
|
759
|
+
}
|
|
760
|
+
)
|
|
761
|
+
|
|
762
|
+
return {
|
|
763
|
+
"memories": memories_data,
|
|
764
|
+
"count": len(memories_data),
|
|
765
|
+
"filter": "expired",
|
|
766
|
+
"type_filter": memory_type,
|
|
767
|
+
}
|
|
768
|
+
|
|
769
|
+
# Normal listing with filters
|
|
770
|
+
typed_memories = await storage.find_typed_memories(
|
|
771
|
+
memory_type=mem_type,
|
|
772
|
+
min_priority=priority,
|
|
773
|
+
include_expired=include_expired,
|
|
774
|
+
project_id=project_id,
|
|
775
|
+
limit=limit,
|
|
776
|
+
)
|
|
777
|
+
|
|
778
|
+
# If no typed memories, fall back to listing fibers
|
|
779
|
+
if not typed_memories:
|
|
780
|
+
fibers = await storage.get_fibers(limit=limit)
|
|
781
|
+
memories_data = []
|
|
782
|
+
for fiber in fibers:
|
|
783
|
+
content = fiber.summary
|
|
784
|
+
if not content and fiber.anchor_neuron_id:
|
|
785
|
+
anchor = await storage.get_neuron(fiber.anchor_neuron_id)
|
|
786
|
+
if anchor:
|
|
787
|
+
content = anchor.content
|
|
788
|
+
|
|
789
|
+
freshness = evaluate_freshness(fiber.created_at)
|
|
790
|
+
memories_data.append(
|
|
791
|
+
{
|
|
792
|
+
"fiber_id": fiber.id,
|
|
793
|
+
"type": "unknown",
|
|
794
|
+
"priority": "normal",
|
|
795
|
+
"content": content[:100] + "..."
|
|
796
|
+
if content and len(content) > 100
|
|
797
|
+
else content or "",
|
|
798
|
+
"age": format_age(freshness.age_days),
|
|
799
|
+
"created_at": fiber.created_at.isoformat(),
|
|
800
|
+
}
|
|
801
|
+
)
|
|
802
|
+
|
|
803
|
+
return {
|
|
804
|
+
"memories": memories_data,
|
|
805
|
+
"count": len(memories_data),
|
|
806
|
+
"note": "No typed memories found. Showing raw fibers.",
|
|
807
|
+
}
|
|
808
|
+
|
|
809
|
+
# Build response with typed memories
|
|
810
|
+
memories_data = []
|
|
811
|
+
for tm in typed_memories:
|
|
812
|
+
fiber = await storage.get_fiber(tm.fiber_id)
|
|
813
|
+
content = ""
|
|
814
|
+
if fiber:
|
|
815
|
+
if fiber.summary:
|
|
816
|
+
content = fiber.summary
|
|
817
|
+
elif fiber.anchor_neuron_id:
|
|
818
|
+
anchor = await storage.get_neuron(fiber.anchor_neuron_id)
|
|
819
|
+
if anchor:
|
|
820
|
+
content = anchor.content
|
|
821
|
+
|
|
822
|
+
freshness = evaluate_freshness(tm.created_at)
|
|
823
|
+
expiry_info = None
|
|
824
|
+
if tm.expires_at:
|
|
825
|
+
days = tm.days_until_expiry
|
|
826
|
+
if days is not None:
|
|
827
|
+
expiry_info = f"{days}d" if days > 0 else "EXPIRED"
|
|
828
|
+
|
|
829
|
+
memories_data.append(
|
|
830
|
+
{
|
|
831
|
+
"fiber_id": tm.fiber_id,
|
|
832
|
+
"type": tm.memory_type.value,
|
|
833
|
+
"priority": tm.priority.name.lower(),
|
|
834
|
+
"content": content[:100] + "..." if len(content) > 100 else content,
|
|
835
|
+
"age": format_age(freshness.age_days),
|
|
836
|
+
"expires": expiry_info,
|
|
837
|
+
"verified": tm.provenance.verified,
|
|
838
|
+
"created_at": tm.created_at.isoformat(),
|
|
839
|
+
}
|
|
840
|
+
)
|
|
841
|
+
|
|
842
|
+
return {
|
|
843
|
+
"memories": memories_data,
|
|
844
|
+
"count": len(memories_data),
|
|
845
|
+
"type_filter": memory_type,
|
|
846
|
+
"min_priority": min_priority,
|
|
847
|
+
"project_filter": project_name,
|
|
848
|
+
}
|
|
849
|
+
|
|
850
|
+
result = asyncio.run(_list())
|
|
851
|
+
|
|
852
|
+
if json_output:
|
|
853
|
+
output_result(result, True)
|
|
854
|
+
else:
|
|
855
|
+
if "error" in result:
|
|
856
|
+
typer.secho(result["error"], fg=typer.colors.RED)
|
|
857
|
+
return
|
|
858
|
+
|
|
859
|
+
memories = result.get("memories", [])
|
|
860
|
+
if not memories:
|
|
861
|
+
typer.echo("No memories found.")
|
|
862
|
+
return
|
|
863
|
+
|
|
864
|
+
if result.get("note"):
|
|
865
|
+
typer.secho(result["note"], fg=typer.colors.YELLOW)
|
|
866
|
+
typer.echo("")
|
|
867
|
+
|
|
868
|
+
# Display header
|
|
869
|
+
filter_parts = []
|
|
870
|
+
if result.get("type_filter"):
|
|
871
|
+
filter_parts.append(f"type={result['type_filter']}")
|
|
872
|
+
if result.get("min_priority"):
|
|
873
|
+
filter_parts.append(f"priority>={result['min_priority']}")
|
|
874
|
+
if result.get("project_filter"):
|
|
875
|
+
filter_parts.append(f"project={result['project_filter']}")
|
|
876
|
+
if result.get("filter") == "expired":
|
|
877
|
+
filter_parts.append("EXPIRED")
|
|
878
|
+
|
|
879
|
+
header = f"Memories ({result['count']})"
|
|
880
|
+
if filter_parts:
|
|
881
|
+
header += f" [{', '.join(filter_parts)}]"
|
|
882
|
+
typer.secho(header, fg=typer.colors.CYAN, bold=True)
|
|
883
|
+
typer.echo("-" * 60)
|
|
884
|
+
|
|
885
|
+
# Display memories
|
|
886
|
+
for mem in memories:
|
|
887
|
+
# Type indicator
|
|
888
|
+
type_colors = {
|
|
889
|
+
"todo": typer.colors.YELLOW,
|
|
890
|
+
"decision": typer.colors.BLUE,
|
|
891
|
+
"error": typer.colors.RED,
|
|
892
|
+
"fact": typer.colors.WHITE,
|
|
893
|
+
"preference": typer.colors.MAGENTA,
|
|
894
|
+
"insight": typer.colors.GREEN,
|
|
895
|
+
}
|
|
896
|
+
type_color = type_colors.get(mem["type"], typer.colors.WHITE)
|
|
897
|
+
|
|
898
|
+
# Priority indicator
|
|
899
|
+
priority_indicators = {
|
|
900
|
+
"critical": "[!!!]",
|
|
901
|
+
"high": "[!!]",
|
|
902
|
+
"normal": "[+]",
|
|
903
|
+
"low": "[.]",
|
|
904
|
+
"lowest": "[_]",
|
|
905
|
+
}
|
|
906
|
+
priority_ind = priority_indicators.get(mem["priority"], "[+]")
|
|
907
|
+
|
|
908
|
+
# Build line
|
|
909
|
+
type_badge = f"[{mem['type'][:4].upper()}]"
|
|
910
|
+
content = mem.get("content", "")[:60]
|
|
911
|
+
if len(mem.get("content", "")) > 60:
|
|
912
|
+
content += "..."
|
|
913
|
+
|
|
914
|
+
typer.echo(f"{priority_ind} ", nl=False)
|
|
915
|
+
typer.secho(type_badge, fg=type_color, nl=False)
|
|
916
|
+
typer.echo(f" {content}")
|
|
917
|
+
|
|
918
|
+
# Second line with metadata
|
|
919
|
+
meta_parts = []
|
|
920
|
+
if mem.get("age"):
|
|
921
|
+
meta_parts.append(mem["age"])
|
|
922
|
+
if mem.get("expires"):
|
|
923
|
+
if mem["expires"] == "EXPIRED":
|
|
924
|
+
meta_parts.append(typer.style("EXPIRED", fg=typer.colors.RED))
|
|
925
|
+
else:
|
|
926
|
+
meta_parts.append(f"expires: {mem['expires']}")
|
|
927
|
+
if mem.get("verified"):
|
|
928
|
+
meta_parts.append("verified")
|
|
929
|
+
|
|
930
|
+
if meta_parts:
|
|
931
|
+
typer.secho(f" {' | '.join(meta_parts)}", fg=typer.colors.BRIGHT_BLACK)
|
|
932
|
+
|
|
933
|
+
typer.echo("-" * 60)
|
|
934
|
+
|
|
935
|
+
|
|
936
|
+
@app.command()
|
|
937
|
+
def cleanup(
|
|
938
|
+
expired_only: Annotated[
|
|
939
|
+
bool,
|
|
940
|
+
typer.Option("--expired", "-e", help="Only clean up expired memories"),
|
|
941
|
+
] = True,
|
|
942
|
+
memory_type: Annotated[
|
|
943
|
+
str | None,
|
|
944
|
+
typer.Option("--type", "-T", help="Only clean up specific memory type"),
|
|
945
|
+
] = None,
|
|
946
|
+
dry_run: Annotated[
|
|
947
|
+
bool,
|
|
948
|
+
typer.Option("--dry-run", "-n", help="Show what would be deleted without deleting"),
|
|
949
|
+
] = False,
|
|
950
|
+
force: Annotated[
|
|
951
|
+
bool,
|
|
952
|
+
typer.Option("--force", "-f", help="Skip confirmation"),
|
|
953
|
+
] = False,
|
|
954
|
+
json_output: Annotated[bool, typer.Option("--json", "-j", help="Output as JSON")] = False,
|
|
955
|
+
) -> None:
|
|
956
|
+
"""Clean up expired or old memories.
|
|
957
|
+
|
|
958
|
+
Examples:
|
|
959
|
+
nmem cleanup --expired # Remove all expired memories
|
|
960
|
+
nmem cleanup --expired --dry-run # Preview what would be removed
|
|
961
|
+
nmem cleanup --type context # Remove expired context memories
|
|
962
|
+
"""
|
|
963
|
+
|
|
964
|
+
async def _cleanup() -> dict:
|
|
965
|
+
config = get_config()
|
|
966
|
+
storage = await get_storage(config)
|
|
967
|
+
|
|
968
|
+
# Parse memory type if provided
|
|
969
|
+
mem_type = None
|
|
970
|
+
if memory_type:
|
|
971
|
+
try:
|
|
972
|
+
mem_type = MemoryType(memory_type.lower())
|
|
973
|
+
except ValueError:
|
|
974
|
+
valid_types = ", ".join(t.value for t in MemoryType)
|
|
975
|
+
return {"error": f"Invalid memory type. Valid types: {valid_types}"}
|
|
976
|
+
|
|
977
|
+
# Get expired memories
|
|
978
|
+
expired_memories = await storage.get_expired_memories()
|
|
979
|
+
|
|
980
|
+
# Filter by type if specified
|
|
981
|
+
if mem_type:
|
|
982
|
+
expired_memories = [tm for tm in expired_memories if tm.memory_type == mem_type]
|
|
983
|
+
|
|
984
|
+
if not expired_memories:
|
|
985
|
+
return {"message": "No expired memories to clean up.", "deleted": 0}
|
|
986
|
+
|
|
987
|
+
# Build preview
|
|
988
|
+
to_delete = []
|
|
989
|
+
for tm in expired_memories:
|
|
990
|
+
fiber = await storage.get_fiber(tm.fiber_id)
|
|
991
|
+
content = ""
|
|
992
|
+
if fiber:
|
|
993
|
+
if fiber.summary:
|
|
994
|
+
content = fiber.summary[:50]
|
|
995
|
+
elif fiber.anchor_neuron_id:
|
|
996
|
+
anchor = await storage.get_neuron(fiber.anchor_neuron_id)
|
|
997
|
+
if anchor:
|
|
998
|
+
content = anchor.content[:50]
|
|
999
|
+
|
|
1000
|
+
to_delete.append(
|
|
1001
|
+
{
|
|
1002
|
+
"fiber_id": tm.fiber_id,
|
|
1003
|
+
"type": tm.memory_type.value,
|
|
1004
|
+
"content": content,
|
|
1005
|
+
"expired_at": tm.expires_at.isoformat() if tm.expires_at else None,
|
|
1006
|
+
}
|
|
1007
|
+
)
|
|
1008
|
+
|
|
1009
|
+
if dry_run:
|
|
1010
|
+
return {
|
|
1011
|
+
"dry_run": True,
|
|
1012
|
+
"would_delete": to_delete,
|
|
1013
|
+
"count": len(to_delete),
|
|
1014
|
+
}
|
|
1015
|
+
|
|
1016
|
+
# Actually delete
|
|
1017
|
+
deleted_count = 0
|
|
1018
|
+
for tm in expired_memories:
|
|
1019
|
+
# Delete typed memory
|
|
1020
|
+
await storage.delete_typed_memory(tm.fiber_id)
|
|
1021
|
+
# Optionally delete the fiber too
|
|
1022
|
+
await storage.delete_fiber(tm.fiber_id)
|
|
1023
|
+
deleted_count += 1
|
|
1024
|
+
|
|
1025
|
+
await storage.batch_save()
|
|
1026
|
+
|
|
1027
|
+
return {
|
|
1028
|
+
"message": f"Cleaned up {deleted_count} expired memories.",
|
|
1029
|
+
"deleted": deleted_count,
|
|
1030
|
+
"details": to_delete,
|
|
1031
|
+
}
|
|
1032
|
+
|
|
1033
|
+
# Confirmation for non-dry-run
|
|
1034
|
+
if not dry_run and not force:
|
|
1035
|
+
# First do a dry run to show count
|
|
1036
|
+
async def _preview() -> int:
|
|
1037
|
+
config = get_config()
|
|
1038
|
+
storage = await get_storage(config)
|
|
1039
|
+
expired = await storage.get_expired_memories()
|
|
1040
|
+
if memory_type:
|
|
1041
|
+
try:
|
|
1042
|
+
mem_type = MemoryType(memory_type.lower())
|
|
1043
|
+
expired = [tm for tm in expired if tm.memory_type == mem_type]
|
|
1044
|
+
except ValueError:
|
|
1045
|
+
pass
|
|
1046
|
+
return len(expired)
|
|
1047
|
+
|
|
1048
|
+
count = asyncio.run(_preview())
|
|
1049
|
+
if count == 0:
|
|
1050
|
+
typer.echo("No expired memories to clean up.")
|
|
1051
|
+
return
|
|
1052
|
+
|
|
1053
|
+
if not typer.confirm(f"Delete {count} expired memories? This cannot be undone."):
|
|
1054
|
+
typer.echo("Cancelled.")
|
|
1055
|
+
return
|
|
1056
|
+
|
|
1057
|
+
result = asyncio.run(_cleanup())
|
|
1058
|
+
|
|
1059
|
+
if json_output:
|
|
1060
|
+
output_result(result, True)
|
|
1061
|
+
else:
|
|
1062
|
+
if "error" in result:
|
|
1063
|
+
typer.secho(result["error"], fg=typer.colors.RED)
|
|
1064
|
+
return
|
|
1065
|
+
|
|
1066
|
+
if result.get("dry_run"):
|
|
1067
|
+
typer.secho(f"Would delete {result['count']} memories:", fg=typer.colors.YELLOW)
|
|
1068
|
+
for item in result["would_delete"][:10]:
|
|
1069
|
+
typer.echo(f" [{item['type']}] {item['content']}...")
|
|
1070
|
+
if result["count"] > 10:
|
|
1071
|
+
typer.echo(f" ... and {result['count'] - 10} more")
|
|
1072
|
+
else:
|
|
1073
|
+
typer.secho(result["message"], fg=typer.colors.GREEN)
|
|
1074
|
+
|
|
1075
|
+
|
|
1076
|
+
@app.command()
|
|
1077
|
+
def stats(
|
|
1078
|
+
json_output: Annotated[bool, typer.Option("--json", "-j", help="Output as JSON")] = False,
|
|
1079
|
+
) -> None:
|
|
1080
|
+
"""Show brain statistics including freshness and memory type analysis.
|
|
1081
|
+
|
|
1082
|
+
Examples:
|
|
1083
|
+
nmem stats
|
|
1084
|
+
nmem stats --json
|
|
1085
|
+
"""
|
|
1086
|
+
|
|
1087
|
+
async def _stats() -> dict:
|
|
1088
|
+
config = get_config()
|
|
1089
|
+
storage = await get_storage(config)
|
|
1090
|
+
|
|
1091
|
+
brain = await storage.get_brain(storage._current_brain_id)
|
|
1092
|
+
if not brain:
|
|
1093
|
+
return {"error": "No brain configured"}
|
|
1094
|
+
|
|
1095
|
+
stats_data = await storage.get_stats(brain.id)
|
|
1096
|
+
|
|
1097
|
+
# Get fibers for freshness analysis
|
|
1098
|
+
fibers = await storage.get_fibers(limit=1000)
|
|
1099
|
+
created_dates = [f.created_at for f in fibers]
|
|
1100
|
+
freshness_report = analyze_freshness(created_dates)
|
|
1101
|
+
|
|
1102
|
+
# Get typed memory statistics
|
|
1103
|
+
typed_memories = await storage.find_typed_memories(include_expired=True, limit=10000)
|
|
1104
|
+
expired_memories = await storage.get_expired_memories()
|
|
1105
|
+
|
|
1106
|
+
# Count by type
|
|
1107
|
+
type_counts: dict[str, int] = {}
|
|
1108
|
+
priority_counts: dict[str, int] = {
|
|
1109
|
+
"critical": 0,
|
|
1110
|
+
"high": 0,
|
|
1111
|
+
"normal": 0,
|
|
1112
|
+
"low": 0,
|
|
1113
|
+
"lowest": 0,
|
|
1114
|
+
}
|
|
1115
|
+
|
|
1116
|
+
for tm in typed_memories:
|
|
1117
|
+
type_name = tm.memory_type.value
|
|
1118
|
+
type_counts[type_name] = type_counts.get(type_name, 0) + 1
|
|
1119
|
+
priority_counts[tm.priority.name.lower()] += 1
|
|
1120
|
+
|
|
1121
|
+
return {
|
|
1122
|
+
"brain": brain.name,
|
|
1123
|
+
"brain_id": brain.id,
|
|
1124
|
+
"neuron_count": stats_data["neuron_count"],
|
|
1125
|
+
"synapse_count": stats_data["synapse_count"],
|
|
1126
|
+
"fiber_count": stats_data["fiber_count"],
|
|
1127
|
+
"typed_memory_count": len(typed_memories),
|
|
1128
|
+
"expired_count": len(expired_memories),
|
|
1129
|
+
"created_at": brain.created_at.isoformat(),
|
|
1130
|
+
"freshness": {
|
|
1131
|
+
"fresh": freshness_report.fresh,
|
|
1132
|
+
"recent": freshness_report.recent,
|
|
1133
|
+
"aging": freshness_report.aging,
|
|
1134
|
+
"stale": freshness_report.stale,
|
|
1135
|
+
"ancient": freshness_report.ancient,
|
|
1136
|
+
"average_age_days": round(freshness_report.average_age_days, 1),
|
|
1137
|
+
},
|
|
1138
|
+
"by_type": type_counts,
|
|
1139
|
+
"by_priority": priority_counts,
|
|
1140
|
+
}
|
|
1141
|
+
|
|
1142
|
+
result = asyncio.run(_stats())
|
|
1143
|
+
|
|
1144
|
+
if json_output:
|
|
1145
|
+
output_result(result, True)
|
|
1146
|
+
else:
|
|
1147
|
+
typer.echo(f"Brain: {result['brain']}")
|
|
1148
|
+
typer.echo(f"Neurons: {result['neuron_count']}")
|
|
1149
|
+
typer.echo(f"Synapses: {result['synapse_count']}")
|
|
1150
|
+
typer.echo(f"Fibers (memories): {result['fiber_count']}")
|
|
1151
|
+
|
|
1152
|
+
# Show typed memory stats
|
|
1153
|
+
if result.get("typed_memory_count", 0) > 0:
|
|
1154
|
+
typer.echo(f"\nTyped Memories: {result['typed_memory_count']}")
|
|
1155
|
+
|
|
1156
|
+
# By type
|
|
1157
|
+
by_type = result.get("by_type", {})
|
|
1158
|
+
if by_type:
|
|
1159
|
+
typer.echo(" By type:")
|
|
1160
|
+
for mem_type, count in sorted(by_type.items(), key=lambda x: -x[1]):
|
|
1161
|
+
typer.echo(f" {mem_type}: {count}")
|
|
1162
|
+
|
|
1163
|
+
# By priority (only show non-zero)
|
|
1164
|
+
by_priority = result.get("by_priority", {})
|
|
1165
|
+
non_zero_priority = {k: v for k, v in by_priority.items() if v > 0}
|
|
1166
|
+
if non_zero_priority:
|
|
1167
|
+
typer.echo(" By priority:")
|
|
1168
|
+
for pri in ["critical", "high", "normal", "low", "lowest"]:
|
|
1169
|
+
if pri in non_zero_priority:
|
|
1170
|
+
typer.echo(f" {pri}: {non_zero_priority[pri]}")
|
|
1171
|
+
|
|
1172
|
+
# Expired warning
|
|
1173
|
+
if result.get("expired_count", 0) > 0:
|
|
1174
|
+
typer.secho(
|
|
1175
|
+
f"\n [!] {result['expired_count']} expired memories - run 'nmem cleanup' to remove",
|
|
1176
|
+
fg=typer.colors.YELLOW,
|
|
1177
|
+
)
|
|
1178
|
+
|
|
1179
|
+
if result.get("freshness") and result["fiber_count"] > 0:
|
|
1180
|
+
f = result["freshness"]
|
|
1181
|
+
typer.echo("\nMemory Freshness:")
|
|
1182
|
+
typer.echo(f" [+] Fresh (<7d): {f['fresh']}")
|
|
1183
|
+
typer.echo(f" [+] Recent (7-30d): {f['recent']}")
|
|
1184
|
+
typer.echo(f" [~] Aging (30-90d): {f['aging']}")
|
|
1185
|
+
typer.echo(f" [!] Stale (90-365d): {f['stale']}")
|
|
1186
|
+
typer.echo(f" [!!] Ancient (>365d): {f['ancient']}")
|
|
1187
|
+
typer.echo(f" Average age: {f['average_age_days']} days")
|
|
1188
|
+
|
|
1189
|
+
|
|
1190
|
+
@app.command()
|
|
1191
|
+
def check(
|
|
1192
|
+
content: Annotated[str, typer.Argument(help="Content to check for sensitive data")],
|
|
1193
|
+
json_output: Annotated[bool, typer.Option("--json", "-j", help="Output as JSON")] = False,
|
|
1194
|
+
) -> None:
|
|
1195
|
+
"""Check content for sensitive information without storing.
|
|
1196
|
+
|
|
1197
|
+
Examples:
|
|
1198
|
+
nmem check "My API_KEY=sk-xxx123"
|
|
1199
|
+
nmem check "password: secret123" --json
|
|
1200
|
+
"""
|
|
1201
|
+
matches = check_sensitive_content(content)
|
|
1202
|
+
|
|
1203
|
+
if json_output:
|
|
1204
|
+
output_result(
|
|
1205
|
+
{
|
|
1206
|
+
"sensitive": len(matches) > 0,
|
|
1207
|
+
"matches": [
|
|
1208
|
+
{
|
|
1209
|
+
"type": m.type.value,
|
|
1210
|
+
"pattern": m.pattern_name,
|
|
1211
|
+
"severity": m.severity,
|
|
1212
|
+
"redacted": m.redacted(),
|
|
1213
|
+
}
|
|
1214
|
+
for m in matches
|
|
1215
|
+
],
|
|
1216
|
+
},
|
|
1217
|
+
True,
|
|
1218
|
+
)
|
|
1219
|
+
else:
|
|
1220
|
+
if matches:
|
|
1221
|
+
typer.echo(format_sensitive_warning(matches))
|
|
1222
|
+
else:
|
|
1223
|
+
typer.secho("[OK] No sensitive content detected", fg=typer.colors.GREEN)
|
|
1224
|
+
|
|
1225
|
+
|
|
1226
|
+
# =============================================================================
|
|
1227
|
+
# Brain Management Commands
|
|
1228
|
+
# =============================================================================
|
|
1229
|
+
|
|
1230
|
+
|
|
1231
|
+
@brain_app.command("list")
|
|
1232
|
+
def brain_list(
|
|
1233
|
+
json_output: Annotated[bool, typer.Option("--json", "-j", help="Output as JSON")] = False,
|
|
1234
|
+
) -> None:
|
|
1235
|
+
"""List available brains.
|
|
1236
|
+
|
|
1237
|
+
Examples:
|
|
1238
|
+
nmem brain list
|
|
1239
|
+
nmem brain list --json
|
|
1240
|
+
"""
|
|
1241
|
+
config = get_config()
|
|
1242
|
+
brains = config.list_brains()
|
|
1243
|
+
current = config.current_brain
|
|
1244
|
+
|
|
1245
|
+
if json_output:
|
|
1246
|
+
output_result({"brains": brains, "current": current}, True)
|
|
1247
|
+
else:
|
|
1248
|
+
if not brains:
|
|
1249
|
+
typer.echo("No brains found. Create one with: nmem brain create <name>")
|
|
1250
|
+
return
|
|
1251
|
+
|
|
1252
|
+
typer.echo("Available brains:")
|
|
1253
|
+
for brain in brains:
|
|
1254
|
+
marker = " *" if brain == current else ""
|
|
1255
|
+
typer.echo(f" {brain}{marker}")
|
|
1256
|
+
|
|
1257
|
+
|
|
1258
|
+
@brain_app.command("use")
|
|
1259
|
+
def brain_use(
|
|
1260
|
+
name: Annotated[str, typer.Argument(help="Brain name to switch to")],
|
|
1261
|
+
) -> None:
|
|
1262
|
+
"""Switch to a different brain.
|
|
1263
|
+
|
|
1264
|
+
Examples:
|
|
1265
|
+
nmem brain use work
|
|
1266
|
+
nmem brain use personal
|
|
1267
|
+
"""
|
|
1268
|
+
config = get_config()
|
|
1269
|
+
|
|
1270
|
+
if name not in config.list_brains():
|
|
1271
|
+
typer.secho(
|
|
1272
|
+
f"Brain '{name}' not found. Create it with: nmem brain create {name}",
|
|
1273
|
+
fg=typer.colors.RED,
|
|
1274
|
+
)
|
|
1275
|
+
raise typer.Exit(1)
|
|
1276
|
+
|
|
1277
|
+
config.current_brain = name
|
|
1278
|
+
config.save()
|
|
1279
|
+
typer.secho(f"Switched to brain: {name}", fg=typer.colors.GREEN)
|
|
1280
|
+
|
|
1281
|
+
|
|
1282
|
+
@brain_app.command("create")
|
|
1283
|
+
def brain_create(
|
|
1284
|
+
name: Annotated[str, typer.Argument(help="Name for the new brain")],
|
|
1285
|
+
use: Annotated[
|
|
1286
|
+
bool, typer.Option("--use", "-u", help="Switch to the new brain after creating")
|
|
1287
|
+
] = True,
|
|
1288
|
+
) -> None:
|
|
1289
|
+
"""Create a new brain.
|
|
1290
|
+
|
|
1291
|
+
Examples:
|
|
1292
|
+
nmem brain create work
|
|
1293
|
+
nmem brain create personal --no-use
|
|
1294
|
+
"""
|
|
1295
|
+
|
|
1296
|
+
async def _create() -> None:
|
|
1297
|
+
config = get_config()
|
|
1298
|
+
|
|
1299
|
+
if name in config.list_brains():
|
|
1300
|
+
typer.secho(f"Brain '{name}' already exists.", fg=typer.colors.RED)
|
|
1301
|
+
raise typer.Exit(1)
|
|
1302
|
+
|
|
1303
|
+
# Create new brain by loading storage (which creates if not exists)
|
|
1304
|
+
brain_path = config.get_brain_path(name)
|
|
1305
|
+
await PersistentStorage.load(brain_path)
|
|
1306
|
+
|
|
1307
|
+
if use:
|
|
1308
|
+
config.current_brain = name
|
|
1309
|
+
config.save()
|
|
1310
|
+
|
|
1311
|
+
typer.secho(f"Created brain: {name}", fg=typer.colors.GREEN)
|
|
1312
|
+
if use:
|
|
1313
|
+
typer.echo(f"Now using: {name}")
|
|
1314
|
+
|
|
1315
|
+
asyncio.run(_create())
|
|
1316
|
+
|
|
1317
|
+
|
|
1318
|
+
@brain_app.command("export")
|
|
1319
|
+
def brain_export(
|
|
1320
|
+
output: Annotated[str | None, typer.Option("--output", "-o", help="Output file path")] = None,
|
|
1321
|
+
name: Annotated[
|
|
1322
|
+
str | None, typer.Option("--name", "-n", help="Brain name (default: current)")
|
|
1323
|
+
] = None,
|
|
1324
|
+
exclude_sensitive: Annotated[
|
|
1325
|
+
bool,
|
|
1326
|
+
typer.Option("--exclude-sensitive", "-s", help="Exclude memories with sensitive content"),
|
|
1327
|
+
] = False,
|
|
1328
|
+
) -> None:
|
|
1329
|
+
"""Export brain to JSON file.
|
|
1330
|
+
|
|
1331
|
+
Examples:
|
|
1332
|
+
nmem brain export
|
|
1333
|
+
nmem brain export -o backup.json
|
|
1334
|
+
nmem brain export --exclude-sensitive -o safe.json
|
|
1335
|
+
"""
|
|
1336
|
+
|
|
1337
|
+
async def _export() -> None:
|
|
1338
|
+
config = get_config()
|
|
1339
|
+
brain_name = name or config.current_brain
|
|
1340
|
+
brain_path = config.get_brain_path(brain_name)
|
|
1341
|
+
|
|
1342
|
+
if not brain_path.exists():
|
|
1343
|
+
typer.secho(f"Brain '{brain_name}' not found.", fg=typer.colors.RED)
|
|
1344
|
+
raise typer.Exit(1)
|
|
1345
|
+
|
|
1346
|
+
storage = await PersistentStorage.load(brain_path)
|
|
1347
|
+
snapshot = await storage.export_brain(storage._current_brain_id)
|
|
1348
|
+
|
|
1349
|
+
# Filter sensitive content if requested
|
|
1350
|
+
neurons = snapshot.neurons
|
|
1351
|
+
excluded_count = 0
|
|
1352
|
+
|
|
1353
|
+
if exclude_sensitive:
|
|
1354
|
+
filtered_neurons = []
|
|
1355
|
+
excluded_neuron_ids = set()
|
|
1356
|
+
|
|
1357
|
+
for neuron in neurons:
|
|
1358
|
+
content = neuron.get("content", "")
|
|
1359
|
+
matches = check_sensitive_content(content, min_severity=2)
|
|
1360
|
+
if matches:
|
|
1361
|
+
excluded_neuron_ids.add(neuron["id"])
|
|
1362
|
+
excluded_count += 1
|
|
1363
|
+
else:
|
|
1364
|
+
filtered_neurons.append(neuron)
|
|
1365
|
+
|
|
1366
|
+
neurons = filtered_neurons
|
|
1367
|
+
|
|
1368
|
+
# Also filter synapses connected to excluded neurons
|
|
1369
|
+
synapses = [
|
|
1370
|
+
s
|
|
1371
|
+
for s in snapshot.synapses
|
|
1372
|
+
if s["source_id"] not in excluded_neuron_ids
|
|
1373
|
+
and s["target_id"] not in excluded_neuron_ids
|
|
1374
|
+
]
|
|
1375
|
+
|
|
1376
|
+
# Update fiber neuron references
|
|
1377
|
+
fibers = []
|
|
1378
|
+
for fiber in snapshot.fibers:
|
|
1379
|
+
fiber_neuron_ids = set(fiber.get("neuron_ids", []))
|
|
1380
|
+
if not fiber_neuron_ids.intersection(excluded_neuron_ids):
|
|
1381
|
+
fibers.append(fiber)
|
|
1382
|
+
else:
|
|
1383
|
+
synapses = snapshot.synapses
|
|
1384
|
+
fibers = snapshot.fibers
|
|
1385
|
+
|
|
1386
|
+
export_data = {
|
|
1387
|
+
"brain_id": snapshot.brain_id,
|
|
1388
|
+
"brain_name": snapshot.brain_name,
|
|
1389
|
+
"exported_at": snapshot.exported_at.isoformat(),
|
|
1390
|
+
"version": snapshot.version,
|
|
1391
|
+
"neurons": neurons,
|
|
1392
|
+
"synapses": synapses,
|
|
1393
|
+
"fibers": fibers,
|
|
1394
|
+
"config": snapshot.config,
|
|
1395
|
+
"metadata": snapshot.metadata,
|
|
1396
|
+
}
|
|
1397
|
+
|
|
1398
|
+
if output:
|
|
1399
|
+
with open(output, "w", encoding="utf-8") as f:
|
|
1400
|
+
json.dump(export_data, f, indent=2, default=str)
|
|
1401
|
+
typer.secho(f"Exported to: {output}", fg=typer.colors.GREEN)
|
|
1402
|
+
if excluded_count > 0:
|
|
1403
|
+
typer.secho(
|
|
1404
|
+
f"Excluded {excluded_count} neurons with sensitive content",
|
|
1405
|
+
fg=typer.colors.YELLOW,
|
|
1406
|
+
)
|
|
1407
|
+
else:
|
|
1408
|
+
typer.echo(json.dumps(export_data, indent=2, default=str))
|
|
1409
|
+
|
|
1410
|
+
asyncio.run(_export())
|
|
1411
|
+
|
|
1412
|
+
|
|
1413
|
+
@brain_app.command("import")
|
|
1414
|
+
def brain_import(
|
|
1415
|
+
file: Annotated[str, typer.Argument(help="JSON file to import")],
|
|
1416
|
+
name: Annotated[
|
|
1417
|
+
str | None, typer.Option("--name", "-n", help="Name for imported brain")
|
|
1418
|
+
] = None,
|
|
1419
|
+
use: Annotated[bool, typer.Option("--use", "-u", help="Switch to imported brain")] = True,
|
|
1420
|
+
scan_sensitive: Annotated[
|
|
1421
|
+
bool, typer.Option("--scan", help="Scan for sensitive content before importing")
|
|
1422
|
+
] = True,
|
|
1423
|
+
) -> None:
|
|
1424
|
+
"""Import brain from JSON file.
|
|
1425
|
+
|
|
1426
|
+
Examples:
|
|
1427
|
+
nmem brain import backup.json
|
|
1428
|
+
nmem brain import shared-brain.json --name shared
|
|
1429
|
+
nmem brain import untrusted.json --scan
|
|
1430
|
+
"""
|
|
1431
|
+
from neural_memory.core.brain import BrainSnapshot
|
|
1432
|
+
|
|
1433
|
+
async def _import() -> None:
|
|
1434
|
+
with open(file, encoding="utf-8") as f:
|
|
1435
|
+
data = json.load(f)
|
|
1436
|
+
|
|
1437
|
+
# Scan for sensitive content
|
|
1438
|
+
if scan_sensitive:
|
|
1439
|
+
sensitive_count = 0
|
|
1440
|
+
for neuron in data.get("neurons", []):
|
|
1441
|
+
content = neuron.get("content", "")
|
|
1442
|
+
matches = check_sensitive_content(content, min_severity=2)
|
|
1443
|
+
if matches:
|
|
1444
|
+
sensitive_count += 1
|
|
1445
|
+
|
|
1446
|
+
if sensitive_count > 0:
|
|
1447
|
+
typer.secho(
|
|
1448
|
+
f"[!] Found {sensitive_count} neurons with potentially sensitive content",
|
|
1449
|
+
fg=typer.colors.YELLOW,
|
|
1450
|
+
)
|
|
1451
|
+
if not typer.confirm("Continue importing?"):
|
|
1452
|
+
raise typer.Exit(0)
|
|
1453
|
+
|
|
1454
|
+
brain_name = name or data.get("brain_name", "imported")
|
|
1455
|
+
config = get_config()
|
|
1456
|
+
|
|
1457
|
+
if brain_name in config.list_brains():
|
|
1458
|
+
typer.secho(
|
|
1459
|
+
f"Brain '{brain_name}' already exists. Use --name to specify different name.",
|
|
1460
|
+
fg=typer.colors.RED,
|
|
1461
|
+
)
|
|
1462
|
+
raise typer.Exit(1)
|
|
1463
|
+
|
|
1464
|
+
# Create snapshot
|
|
1465
|
+
snapshot = BrainSnapshot(
|
|
1466
|
+
brain_id=data["brain_id"],
|
|
1467
|
+
brain_name=brain_name,
|
|
1468
|
+
exported_at=datetime.fromisoformat(data["exported_at"]),
|
|
1469
|
+
version=data["version"],
|
|
1470
|
+
neurons=data["neurons"],
|
|
1471
|
+
synapses=data["synapses"],
|
|
1472
|
+
fibers=data["fibers"],
|
|
1473
|
+
config=data.get("config", {}),
|
|
1474
|
+
metadata=data.get("metadata", {}),
|
|
1475
|
+
)
|
|
1476
|
+
|
|
1477
|
+
# Load/create storage and import
|
|
1478
|
+
brain_path = config.get_brain_path(brain_name)
|
|
1479
|
+
storage = await PersistentStorage.load(brain_path)
|
|
1480
|
+
await storage.import_brain(snapshot, storage._current_brain_id)
|
|
1481
|
+
await storage.save()
|
|
1482
|
+
|
|
1483
|
+
if use:
|
|
1484
|
+
config.current_brain = brain_name
|
|
1485
|
+
config.save()
|
|
1486
|
+
|
|
1487
|
+
typer.secho(f"Imported brain: {brain_name}", fg=typer.colors.GREEN)
|
|
1488
|
+
typer.echo(f" Neurons: {len(data['neurons'])}")
|
|
1489
|
+
typer.echo(f" Synapses: {len(data['synapses'])}")
|
|
1490
|
+
typer.echo(f" Fibers: {len(data['fibers'])}")
|
|
1491
|
+
|
|
1492
|
+
asyncio.run(_import())
|
|
1493
|
+
|
|
1494
|
+
|
|
1495
|
+
@brain_app.command("delete")
|
|
1496
|
+
def brain_delete(
|
|
1497
|
+
name: Annotated[str, typer.Argument(help="Brain name to delete")],
|
|
1498
|
+
force: Annotated[bool, typer.Option("--force", "-f", help="Skip confirmation")] = False,
|
|
1499
|
+
) -> None:
|
|
1500
|
+
"""Delete a brain.
|
|
1501
|
+
|
|
1502
|
+
Examples:
|
|
1503
|
+
nmem brain delete old-brain
|
|
1504
|
+
nmem brain delete temp --force
|
|
1505
|
+
"""
|
|
1506
|
+
config = get_config()
|
|
1507
|
+
|
|
1508
|
+
if name not in config.list_brains():
|
|
1509
|
+
typer.secho(f"Brain '{name}' not found.", fg=typer.colors.RED)
|
|
1510
|
+
raise typer.Exit(1)
|
|
1511
|
+
|
|
1512
|
+
if name == config.current_brain:
|
|
1513
|
+
typer.secho(
|
|
1514
|
+
"Cannot delete current brain. Switch to another brain first.", fg=typer.colors.RED
|
|
1515
|
+
)
|
|
1516
|
+
raise typer.Exit(1)
|
|
1517
|
+
|
|
1518
|
+
if not force:
|
|
1519
|
+
confirm = typer.confirm(f"Delete brain '{name}'? This cannot be undone.")
|
|
1520
|
+
if not confirm:
|
|
1521
|
+
typer.echo("Cancelled.")
|
|
1522
|
+
return
|
|
1523
|
+
|
|
1524
|
+
brain_path = config.get_brain_path(name)
|
|
1525
|
+
brain_path.unlink()
|
|
1526
|
+
typer.secho(f"Deleted brain: {name}", fg=typer.colors.GREEN)
|
|
1527
|
+
|
|
1528
|
+
|
|
1529
|
+
@brain_app.command("health")
|
|
1530
|
+
def brain_health(
|
|
1531
|
+
name: Annotated[
|
|
1532
|
+
str | None, typer.Option("--name", "-n", help="Brain name (default: current)")
|
|
1533
|
+
] = None,
|
|
1534
|
+
json_output: Annotated[bool, typer.Option("--json", "-j", help="Output as JSON")] = False,
|
|
1535
|
+
) -> None:
|
|
1536
|
+
"""Check brain health (freshness, sensitive content).
|
|
1537
|
+
|
|
1538
|
+
Examples:
|
|
1539
|
+
nmem brain health
|
|
1540
|
+
nmem brain health --name work --json
|
|
1541
|
+
"""
|
|
1542
|
+
|
|
1543
|
+
async def _health() -> dict:
|
|
1544
|
+
config = get_config()
|
|
1545
|
+
brain_name = name or config.current_brain
|
|
1546
|
+
brain_path = config.get_brain_path(brain_name)
|
|
1547
|
+
|
|
1548
|
+
if not brain_path.exists():
|
|
1549
|
+
return {"error": f"Brain '{brain_name}' not found."}
|
|
1550
|
+
|
|
1551
|
+
storage = await PersistentStorage.load(brain_path)
|
|
1552
|
+
brain = await storage.get_brain(storage._current_brain_id)
|
|
1553
|
+
|
|
1554
|
+
if not brain:
|
|
1555
|
+
return {"error": "No brain configured"}
|
|
1556
|
+
|
|
1557
|
+
# Get all neurons to check
|
|
1558
|
+
neurons = list(storage._neurons[storage._current_brain_id].values())
|
|
1559
|
+
fibers = await storage.get_fibers(limit=10000)
|
|
1560
|
+
|
|
1561
|
+
# Check for sensitive content
|
|
1562
|
+
sensitive_neurons = []
|
|
1563
|
+
for neuron in neurons:
|
|
1564
|
+
matches = check_sensitive_content(neuron.content, min_severity=2)
|
|
1565
|
+
if matches:
|
|
1566
|
+
sensitive_neurons.append(
|
|
1567
|
+
{
|
|
1568
|
+
"id": neuron.id,
|
|
1569
|
+
"type": neuron.type.value,
|
|
1570
|
+
"sensitive_types": [m.type.value for m in matches],
|
|
1571
|
+
}
|
|
1572
|
+
)
|
|
1573
|
+
|
|
1574
|
+
# Analyze freshness
|
|
1575
|
+
created_dates = [f.created_at for f in fibers]
|
|
1576
|
+
freshness_report = analyze_freshness(created_dates)
|
|
1577
|
+
|
|
1578
|
+
# Calculate health score (0-100)
|
|
1579
|
+
health_score = 100
|
|
1580
|
+
issues = []
|
|
1581
|
+
|
|
1582
|
+
# Penalize for sensitive content
|
|
1583
|
+
if sensitive_neurons:
|
|
1584
|
+
penalty = min(30, len(sensitive_neurons) * 5)
|
|
1585
|
+
health_score -= penalty
|
|
1586
|
+
issues.append(f"{len(sensitive_neurons)} neurons with sensitive content")
|
|
1587
|
+
|
|
1588
|
+
# Penalize for stale memories
|
|
1589
|
+
stale_ratio = (freshness_report.stale + freshness_report.ancient) / max(
|
|
1590
|
+
1, freshness_report.total
|
|
1591
|
+
)
|
|
1592
|
+
if stale_ratio > 0.5:
|
|
1593
|
+
health_score -= 20
|
|
1594
|
+
issues.append(f"{stale_ratio * 100:.0f}% of memories are stale/ancient")
|
|
1595
|
+
elif stale_ratio > 0.2:
|
|
1596
|
+
health_score -= 10
|
|
1597
|
+
issues.append(f"{stale_ratio * 100:.0f}% of memories are stale/ancient")
|
|
1598
|
+
|
|
1599
|
+
health_score = max(0, health_score)
|
|
1600
|
+
|
|
1601
|
+
return {
|
|
1602
|
+
"brain": brain_name,
|
|
1603
|
+
"health_score": health_score,
|
|
1604
|
+
"issues": issues,
|
|
1605
|
+
"sensitive_content": {
|
|
1606
|
+
"count": len(sensitive_neurons),
|
|
1607
|
+
"neurons": sensitive_neurons[:5], # Show first 5
|
|
1608
|
+
},
|
|
1609
|
+
"freshness": {
|
|
1610
|
+
"total": freshness_report.total,
|
|
1611
|
+
"fresh": freshness_report.fresh,
|
|
1612
|
+
"recent": freshness_report.recent,
|
|
1613
|
+
"aging": freshness_report.aging,
|
|
1614
|
+
"stale": freshness_report.stale,
|
|
1615
|
+
"ancient": freshness_report.ancient,
|
|
1616
|
+
},
|
|
1617
|
+
}
|
|
1618
|
+
|
|
1619
|
+
result = asyncio.run(_health())
|
|
1620
|
+
|
|
1621
|
+
if json_output:
|
|
1622
|
+
output_result(result, True)
|
|
1623
|
+
else:
|
|
1624
|
+
if "error" in result:
|
|
1625
|
+
typer.secho(result["error"], fg=typer.colors.RED)
|
|
1626
|
+
return
|
|
1627
|
+
|
|
1628
|
+
score = result["health_score"]
|
|
1629
|
+
if score >= 80:
|
|
1630
|
+
color = typer.colors.GREEN
|
|
1631
|
+
indicator = "[OK]"
|
|
1632
|
+
elif score >= 50:
|
|
1633
|
+
color = typer.colors.YELLOW
|
|
1634
|
+
indicator = "[~]"
|
|
1635
|
+
else:
|
|
1636
|
+
color = typer.colors.RED
|
|
1637
|
+
indicator = "[!!]"
|
|
1638
|
+
|
|
1639
|
+
typer.echo(f"\nBrain: {result['brain']}")
|
|
1640
|
+
typer.secho(f"Health Score: {indicator} {score}/100", fg=color)
|
|
1641
|
+
|
|
1642
|
+
if result["issues"]:
|
|
1643
|
+
typer.echo("\nIssues:")
|
|
1644
|
+
for issue in result["issues"]:
|
|
1645
|
+
typer.secho(f" [!] {issue}", fg=typer.colors.YELLOW)
|
|
1646
|
+
|
|
1647
|
+
if result["sensitive_content"]["count"] > 0:
|
|
1648
|
+
typer.echo(f"\nSensitive content: {result['sensitive_content']['count']} neurons")
|
|
1649
|
+
typer.secho(
|
|
1650
|
+
" Run 'nmem brain export --exclude-sensitive' for safe export",
|
|
1651
|
+
fg=typer.colors.BRIGHT_BLACK,
|
|
1652
|
+
)
|
|
1653
|
+
|
|
1654
|
+
f = result["freshness"]
|
|
1655
|
+
if f["total"] > 0:
|
|
1656
|
+
typer.echo(f"\nMemory freshness ({f['total']} total):")
|
|
1657
|
+
typer.echo(f" [+] Fresh/Recent: {f['fresh'] + f['recent']}")
|
|
1658
|
+
typer.echo(f" [~] Aging: {f['aging']}")
|
|
1659
|
+
typer.echo(f" [!!] Stale/Ancient: {f['stale'] + f['ancient']}")
|
|
1660
|
+
|
|
1661
|
+
|
|
1662
|
+
# =============================================================================
|
|
1663
|
+
# Project Commands
|
|
1664
|
+
# =============================================================================
|
|
1665
|
+
|
|
1666
|
+
|
|
1667
|
+
@project_app.command("create")
|
|
1668
|
+
def project_create(
|
|
1669
|
+
name: Annotated[str, typer.Argument(help="Project name")],
|
|
1670
|
+
description: Annotated[
|
|
1671
|
+
str | None,
|
|
1672
|
+
typer.Option("--description", "-d", help="Project description"),
|
|
1673
|
+
] = None,
|
|
1674
|
+
duration: Annotated[
|
|
1675
|
+
int | None,
|
|
1676
|
+
typer.Option("--duration", "-D", help="Duration in days (creates end date)"),
|
|
1677
|
+
] = None,
|
|
1678
|
+
tags: Annotated[
|
|
1679
|
+
list[str] | None,
|
|
1680
|
+
typer.Option("--tag", "-t", help="Project tags"),
|
|
1681
|
+
] = None,
|
|
1682
|
+
priority: Annotated[
|
|
1683
|
+
float,
|
|
1684
|
+
typer.Option("--priority", "-p", help="Project priority (default: 1.0)"),
|
|
1685
|
+
] = 1.0,
|
|
1686
|
+
json_output: Annotated[bool, typer.Option("--json", "-j", help="Output as JSON")] = False,
|
|
1687
|
+
) -> None:
|
|
1688
|
+
"""Create a new project for organizing memories.
|
|
1689
|
+
|
|
1690
|
+
Projects group related memories and enable time-bounded retrieval.
|
|
1691
|
+
|
|
1692
|
+
Examples:
|
|
1693
|
+
nmem project create "Q1 Sprint"
|
|
1694
|
+
nmem project create "Auth Refactor" --duration 14 --tag backend
|
|
1695
|
+
nmem project create "Research" -d "ML exploration" --priority 2.0
|
|
1696
|
+
"""
|
|
1697
|
+
|
|
1698
|
+
async def _create() -> dict:
|
|
1699
|
+
config = get_config()
|
|
1700
|
+
storage = await get_storage(config)
|
|
1701
|
+
|
|
1702
|
+
# Check if project with same name exists
|
|
1703
|
+
existing = await storage.get_project_by_name(name)
|
|
1704
|
+
if existing:
|
|
1705
|
+
return {"error": f"Project '{name}' already exists."}
|
|
1706
|
+
|
|
1707
|
+
project = Project.create(
|
|
1708
|
+
name=name,
|
|
1709
|
+
description=description or "",
|
|
1710
|
+
duration_days=duration,
|
|
1711
|
+
tags=set(tags) if tags else None,
|
|
1712
|
+
priority=priority,
|
|
1713
|
+
)
|
|
1714
|
+
|
|
1715
|
+
await storage.add_project(project)
|
|
1716
|
+
await storage.batch_save()
|
|
1717
|
+
|
|
1718
|
+
response = {
|
|
1719
|
+
"message": f"Created project: {name}",
|
|
1720
|
+
"project_id": project.id,
|
|
1721
|
+
"name": project.name,
|
|
1722
|
+
"is_ongoing": project.is_ongoing,
|
|
1723
|
+
}
|
|
1724
|
+
|
|
1725
|
+
if project.end_date:
|
|
1726
|
+
response["ends_in_days"] = project.days_remaining
|
|
1727
|
+
response["end_date"] = project.end_date.isoformat()
|
|
1728
|
+
|
|
1729
|
+
if tags:
|
|
1730
|
+
response["tags"] = tags
|
|
1731
|
+
|
|
1732
|
+
return response
|
|
1733
|
+
|
|
1734
|
+
result = asyncio.run(_create())
|
|
1735
|
+
|
|
1736
|
+
if json_output:
|
|
1737
|
+
output_result(result, True)
|
|
1738
|
+
else:
|
|
1739
|
+
if "error" in result:
|
|
1740
|
+
typer.secho(result["error"], fg=typer.colors.RED)
|
|
1741
|
+
return
|
|
1742
|
+
|
|
1743
|
+
typer.secho(result["message"], fg=typer.colors.GREEN)
|
|
1744
|
+
if result.get("ends_in_days") is not None:
|
|
1745
|
+
typer.secho(f" Ends in {result['ends_in_days']} days", fg=typer.colors.BRIGHT_BLACK)
|
|
1746
|
+
else:
|
|
1747
|
+
typer.secho(" Ongoing (no end date)", fg=typer.colors.BRIGHT_BLACK)
|
|
1748
|
+
|
|
1749
|
+
|
|
1750
|
+
@project_app.command("list")
|
|
1751
|
+
def project_list(
|
|
1752
|
+
active_only: Annotated[
|
|
1753
|
+
bool,
|
|
1754
|
+
typer.Option("--active", "-a", help="Show only active projects"),
|
|
1755
|
+
] = False,
|
|
1756
|
+
json_output: Annotated[bool, typer.Option("--json", "-j", help="Output as JSON")] = False,
|
|
1757
|
+
) -> None:
|
|
1758
|
+
"""List all projects.
|
|
1759
|
+
|
|
1760
|
+
Examples:
|
|
1761
|
+
nmem project list
|
|
1762
|
+
nmem project list --active
|
|
1763
|
+
"""
|
|
1764
|
+
|
|
1765
|
+
async def _list() -> dict:
|
|
1766
|
+
config = get_config()
|
|
1767
|
+
storage = await get_storage(config)
|
|
1768
|
+
|
|
1769
|
+
projects = await storage.list_projects(active_only=active_only)
|
|
1770
|
+
|
|
1771
|
+
projects_data = []
|
|
1772
|
+
for proj in projects:
|
|
1773
|
+
# Count memories in project
|
|
1774
|
+
memories = await storage.get_project_memories(proj.id)
|
|
1775
|
+
|
|
1776
|
+
proj_data = {
|
|
1777
|
+
"id": proj.id,
|
|
1778
|
+
"name": proj.name,
|
|
1779
|
+
"description": proj.description,
|
|
1780
|
+
"is_active": proj.is_active,
|
|
1781
|
+
"is_ongoing": proj.is_ongoing,
|
|
1782
|
+
"memory_count": len(memories),
|
|
1783
|
+
"priority": proj.priority,
|
|
1784
|
+
"tags": list(proj.tags),
|
|
1785
|
+
"start_date": proj.start_date.isoformat(),
|
|
1786
|
+
"end_date": proj.end_date.isoformat() if proj.end_date else None,
|
|
1787
|
+
}
|
|
1788
|
+
|
|
1789
|
+
if proj.end_date and proj.is_active:
|
|
1790
|
+
proj_data["days_remaining"] = proj.days_remaining
|
|
1791
|
+
|
|
1792
|
+
projects_data.append(proj_data)
|
|
1793
|
+
|
|
1794
|
+
return {
|
|
1795
|
+
"projects": projects_data,
|
|
1796
|
+
"count": len(projects_data),
|
|
1797
|
+
"filter": "active" if active_only else "all",
|
|
1798
|
+
}
|
|
1799
|
+
|
|
1800
|
+
result = asyncio.run(_list())
|
|
1801
|
+
|
|
1802
|
+
if json_output:
|
|
1803
|
+
output_result(result, True)
|
|
1804
|
+
else:
|
|
1805
|
+
projects = result.get("projects", [])
|
|
1806
|
+
if not projects:
|
|
1807
|
+
typer.echo("No projects found. Create one with: nmem project create <name>")
|
|
1808
|
+
return
|
|
1809
|
+
|
|
1810
|
+
typer.secho(f"Projects ({result['count']})", fg=typer.colors.CYAN, bold=True)
|
|
1811
|
+
typer.echo("-" * 50)
|
|
1812
|
+
|
|
1813
|
+
for proj in projects:
|
|
1814
|
+
# Status indicator
|
|
1815
|
+
if proj["is_active"]:
|
|
1816
|
+
status = typer.style("[ACTIVE]", fg=typer.colors.GREEN)
|
|
1817
|
+
else:
|
|
1818
|
+
status = typer.style("[ENDED]", fg=typer.colors.BRIGHT_BLACK)
|
|
1819
|
+
|
|
1820
|
+
typer.echo(f"{status} {proj['name']}")
|
|
1821
|
+
|
|
1822
|
+
# Details line
|
|
1823
|
+
details = []
|
|
1824
|
+
details.append(f"{proj['memory_count']} memories")
|
|
1825
|
+
if proj.get("days_remaining") is not None:
|
|
1826
|
+
details.append(f"{proj['days_remaining']}d remaining")
|
|
1827
|
+
elif proj["is_ongoing"]:
|
|
1828
|
+
details.append("ongoing")
|
|
1829
|
+
|
|
1830
|
+
if proj["tags"]:
|
|
1831
|
+
details.append(f"tags: {', '.join(proj['tags'])}")
|
|
1832
|
+
|
|
1833
|
+
typer.secho(f" {' | '.join(details)}", fg=typer.colors.BRIGHT_BLACK)
|
|
1834
|
+
|
|
1835
|
+
typer.echo("-" * 50)
|
|
1836
|
+
|
|
1837
|
+
|
|
1838
|
+
@project_app.command("show")
|
|
1839
|
+
def project_show(
|
|
1840
|
+
name: Annotated[str, typer.Argument(help="Project name")],
|
|
1841
|
+
json_output: Annotated[bool, typer.Option("--json", "-j", help="Output as JSON")] = False,
|
|
1842
|
+
) -> None:
|
|
1843
|
+
"""Show project details and its memories.
|
|
1844
|
+
|
|
1845
|
+
Examples:
|
|
1846
|
+
nmem project show "Q1 Sprint"
|
|
1847
|
+
"""
|
|
1848
|
+
|
|
1849
|
+
async def _show() -> dict:
|
|
1850
|
+
config = get_config()
|
|
1851
|
+
storage = await get_storage(config)
|
|
1852
|
+
|
|
1853
|
+
proj = await storage.get_project_by_name(name)
|
|
1854
|
+
if not proj:
|
|
1855
|
+
return {"error": f"Project '{name}' not found."}
|
|
1856
|
+
|
|
1857
|
+
# Get memories in project
|
|
1858
|
+
memories = await storage.get_project_memories(proj.id)
|
|
1859
|
+
|
|
1860
|
+
# Get memory content
|
|
1861
|
+
memories_data = []
|
|
1862
|
+
for tm in memories[:20]: # Limit to 20
|
|
1863
|
+
fiber = await storage.get_fiber(tm.fiber_id)
|
|
1864
|
+
content = ""
|
|
1865
|
+
if fiber:
|
|
1866
|
+
if fiber.summary:
|
|
1867
|
+
content = fiber.summary
|
|
1868
|
+
elif fiber.anchor_neuron_id:
|
|
1869
|
+
anchor = await storage.get_neuron(fiber.anchor_neuron_id)
|
|
1870
|
+
if anchor:
|
|
1871
|
+
content = anchor.content
|
|
1872
|
+
|
|
1873
|
+
memories_data.append(
|
|
1874
|
+
{
|
|
1875
|
+
"type": tm.memory_type.value,
|
|
1876
|
+
"priority": tm.priority.name.lower(),
|
|
1877
|
+
"content": content[:80] + "..." if len(content) > 80 else content,
|
|
1878
|
+
"created_at": tm.created_at.isoformat(),
|
|
1879
|
+
}
|
|
1880
|
+
)
|
|
1881
|
+
|
|
1882
|
+
# Count by type
|
|
1883
|
+
type_counts: dict[str, int] = {}
|
|
1884
|
+
for tm in memories:
|
|
1885
|
+
type_name = tm.memory_type.value
|
|
1886
|
+
type_counts[type_name] = type_counts.get(type_name, 0) + 1
|
|
1887
|
+
|
|
1888
|
+
return {
|
|
1889
|
+
"project": {
|
|
1890
|
+
"id": proj.id,
|
|
1891
|
+
"name": proj.name,
|
|
1892
|
+
"description": proj.description,
|
|
1893
|
+
"is_active": proj.is_active,
|
|
1894
|
+
"is_ongoing": proj.is_ongoing,
|
|
1895
|
+
"priority": proj.priority,
|
|
1896
|
+
"tags": list(proj.tags),
|
|
1897
|
+
"start_date": proj.start_date.isoformat(),
|
|
1898
|
+
"end_date": proj.end_date.isoformat() if proj.end_date else None,
|
|
1899
|
+
"days_remaining": proj.days_remaining,
|
|
1900
|
+
"duration_days": proj.duration_days,
|
|
1901
|
+
},
|
|
1902
|
+
"memory_count": len(memories),
|
|
1903
|
+
"by_type": type_counts,
|
|
1904
|
+
"recent_memories": memories_data,
|
|
1905
|
+
}
|
|
1906
|
+
|
|
1907
|
+
result = asyncio.run(_show())
|
|
1908
|
+
|
|
1909
|
+
if json_output:
|
|
1910
|
+
output_result(result, True)
|
|
1911
|
+
else:
|
|
1912
|
+
if "error" in result:
|
|
1913
|
+
typer.secho(result["error"], fg=typer.colors.RED)
|
|
1914
|
+
return
|
|
1915
|
+
|
|
1916
|
+
proj = result["project"]
|
|
1917
|
+
typer.secho(f"\nProject: {proj['name']}", fg=typer.colors.CYAN, bold=True)
|
|
1918
|
+
|
|
1919
|
+
if proj["description"]:
|
|
1920
|
+
typer.echo(f" {proj['description']}")
|
|
1921
|
+
|
|
1922
|
+
typer.echo("")
|
|
1923
|
+
|
|
1924
|
+
# Status
|
|
1925
|
+
if proj["is_active"]:
|
|
1926
|
+
typer.secho(" Status: ACTIVE", fg=typer.colors.GREEN)
|
|
1927
|
+
else:
|
|
1928
|
+
typer.secho(" Status: ENDED", fg=typer.colors.BRIGHT_BLACK)
|
|
1929
|
+
|
|
1930
|
+
# Timeline
|
|
1931
|
+
typer.echo(f" Started: {proj['start_date'][:10]}")
|
|
1932
|
+
if proj["end_date"]:
|
|
1933
|
+
typer.echo(f" Ends: {proj['end_date'][:10]}")
|
|
1934
|
+
if proj["days_remaining"] is not None and proj["days_remaining"] > 0:
|
|
1935
|
+
typer.echo(f" Days remaining: {proj['days_remaining']}")
|
|
1936
|
+
else:
|
|
1937
|
+
typer.echo(" End: ongoing")
|
|
1938
|
+
|
|
1939
|
+
# Tags
|
|
1940
|
+
if proj["tags"]:
|
|
1941
|
+
typer.echo(f" Tags: {', '.join(proj['tags'])}")
|
|
1942
|
+
|
|
1943
|
+
typer.echo("")
|
|
1944
|
+
|
|
1945
|
+
# Memory stats
|
|
1946
|
+
typer.secho(f" Memories: {result['memory_count']}", fg=typer.colors.WHITE, bold=True)
|
|
1947
|
+
if result["by_type"]:
|
|
1948
|
+
for mem_type, count in sorted(result["by_type"].items(), key=lambda x: -x[1]):
|
|
1949
|
+
typer.echo(f" {mem_type}: {count}")
|
|
1950
|
+
|
|
1951
|
+
# Recent memories
|
|
1952
|
+
if result["recent_memories"]:
|
|
1953
|
+
typer.echo("")
|
|
1954
|
+
typer.secho(" Recent:", fg=typer.colors.WHITE, bold=True)
|
|
1955
|
+
for mem in result["recent_memories"][:5]:
|
|
1956
|
+
type_badge = f"[{mem['type'][:4].upper()}]"
|
|
1957
|
+
typer.echo(f" {type_badge} {mem['content']}")
|
|
1958
|
+
|
|
1959
|
+
|
|
1960
|
+
@project_app.command("delete")
|
|
1961
|
+
def project_delete(
|
|
1962
|
+
name: Annotated[str, typer.Argument(help="Project name to delete")],
|
|
1963
|
+
force: Annotated[bool, typer.Option("--force", "-f", help="Skip confirmation")] = False,
|
|
1964
|
+
) -> None:
|
|
1965
|
+
"""Delete a project (memories are preserved but unlinked).
|
|
1966
|
+
|
|
1967
|
+
Examples:
|
|
1968
|
+
nmem project delete "Old Project"
|
|
1969
|
+
nmem project delete "Temp" --force
|
|
1970
|
+
"""
|
|
1971
|
+
|
|
1972
|
+
async def _delete() -> dict:
|
|
1973
|
+
config = get_config()
|
|
1974
|
+
storage = await get_storage(config)
|
|
1975
|
+
|
|
1976
|
+
proj = await storage.get_project_by_name(name)
|
|
1977
|
+
if not proj:
|
|
1978
|
+
return {"error": f"Project '{name}' not found."}
|
|
1979
|
+
|
|
1980
|
+
# Count memories
|
|
1981
|
+
memories = await storage.get_project_memories(proj.id)
|
|
1982
|
+
|
|
1983
|
+
deleted = await storage.delete_project(proj.id)
|
|
1984
|
+
if deleted:
|
|
1985
|
+
await storage.batch_save()
|
|
1986
|
+
return {
|
|
1987
|
+
"message": f"Deleted project: {name}",
|
|
1988
|
+
"memories_preserved": len(memories),
|
|
1989
|
+
}
|
|
1990
|
+
else:
|
|
1991
|
+
return {"error": "Failed to delete project."}
|
|
1992
|
+
|
|
1993
|
+
# Confirmation
|
|
1994
|
+
if not force:
|
|
1995
|
+
|
|
1996
|
+
async def _preview() -> int:
|
|
1997
|
+
config = get_config()
|
|
1998
|
+
storage = await get_storage(config)
|
|
1999
|
+
proj = await storage.get_project_by_name(name)
|
|
2000
|
+
if not proj:
|
|
2001
|
+
return -1
|
|
2002
|
+
memories = await storage.get_project_memories(proj.id)
|
|
2003
|
+
return len(memories)
|
|
2004
|
+
|
|
2005
|
+
count = asyncio.run(_preview())
|
|
2006
|
+
if count < 0:
|
|
2007
|
+
typer.secho(f"Project '{name}' not found.", fg=typer.colors.RED)
|
|
2008
|
+
return
|
|
2009
|
+
|
|
2010
|
+
msg = f"Delete project '{name}'?"
|
|
2011
|
+
if count > 0:
|
|
2012
|
+
msg += f" ({count} memories will be preserved but unlinked)"
|
|
2013
|
+
if not typer.confirm(msg):
|
|
2014
|
+
typer.echo("Cancelled.")
|
|
2015
|
+
return
|
|
2016
|
+
|
|
2017
|
+
result = asyncio.run(_delete())
|
|
2018
|
+
|
|
2019
|
+
if "error" in result:
|
|
2020
|
+
typer.secho(result["error"], fg=typer.colors.RED)
|
|
2021
|
+
else:
|
|
2022
|
+
typer.secho(result["message"], fg=typer.colors.GREEN)
|
|
2023
|
+
if result.get("memories_preserved", 0) > 0:
|
|
2024
|
+
typer.secho(
|
|
2025
|
+
f" {result['memories_preserved']} memories preserved (use 'nmem list' to see them)",
|
|
2026
|
+
fg=typer.colors.BRIGHT_BLACK,
|
|
2027
|
+
)
|
|
2028
|
+
|
|
2029
|
+
|
|
2030
|
+
@project_app.command("extend")
|
|
2031
|
+
def project_extend(
|
|
2032
|
+
name: Annotated[str, typer.Argument(help="Project name")],
|
|
2033
|
+
days: Annotated[int, typer.Argument(help="Days to extend by")],
|
|
2034
|
+
json_output: Annotated[bool, typer.Option("--json", "-j", help="Output as JSON")] = False,
|
|
2035
|
+
) -> None:
|
|
2036
|
+
"""Extend a project's deadline.
|
|
2037
|
+
|
|
2038
|
+
Examples:
|
|
2039
|
+
nmem project extend "Q1 Sprint" 7
|
|
2040
|
+
"""
|
|
2041
|
+
|
|
2042
|
+
async def _extend() -> dict:
|
|
2043
|
+
config = get_config()
|
|
2044
|
+
storage = await get_storage(config)
|
|
2045
|
+
|
|
2046
|
+
proj = await storage.get_project_by_name(name)
|
|
2047
|
+
if not proj:
|
|
2048
|
+
return {"error": f"Project '{name}' not found."}
|
|
2049
|
+
|
|
2050
|
+
if proj.is_ongoing:
|
|
2051
|
+
return {"error": "Cannot extend ongoing project - it has no end date."}
|
|
2052
|
+
|
|
2053
|
+
try:
|
|
2054
|
+
updated = proj.with_extended_deadline(days)
|
|
2055
|
+
await storage.update_project(updated)
|
|
2056
|
+
await storage.batch_save()
|
|
2057
|
+
|
|
2058
|
+
return {
|
|
2059
|
+
"message": f"Extended '{name}' by {days} days",
|
|
2060
|
+
"new_end_date": updated.end_date.isoformat() if updated.end_date else None,
|
|
2061
|
+
"days_remaining": updated.days_remaining,
|
|
2062
|
+
}
|
|
2063
|
+
except ValueError as e:
|
|
2064
|
+
return {"error": str(e)}
|
|
2065
|
+
|
|
2066
|
+
result = asyncio.run(_extend())
|
|
2067
|
+
|
|
2068
|
+
if json_output:
|
|
2069
|
+
output_result(result, True)
|
|
2070
|
+
else:
|
|
2071
|
+
if "error" in result:
|
|
2072
|
+
typer.secho(result["error"], fg=typer.colors.RED)
|
|
2073
|
+
else:
|
|
2074
|
+
typer.secho(result["message"], fg=typer.colors.GREEN)
|
|
2075
|
+
if result.get("days_remaining") is not None:
|
|
2076
|
+
typer.secho(
|
|
2077
|
+
f" New deadline: {result['new_end_date'][:10]} ({result['days_remaining']} days remaining)",
|
|
2078
|
+
fg=typer.colors.BRIGHT_BLACK,
|
|
2079
|
+
)
|
|
2080
|
+
|
|
2081
|
+
|
|
2082
|
+
# =============================================================================
|
|
2083
|
+
# Shared Mode Commands
|
|
2084
|
+
# =============================================================================
|
|
2085
|
+
|
|
2086
|
+
|
|
2087
|
+
@shared_app.command("enable")
|
|
2088
|
+
def shared_enable(
|
|
2089
|
+
server_url: Annotated[
|
|
2090
|
+
str, typer.Argument(help="NeuralMemory server URL (e.g., http://localhost:8000)")
|
|
2091
|
+
],
|
|
2092
|
+
api_key: Annotated[
|
|
2093
|
+
str | None, typer.Option("--api-key", "-k", help="API key for authentication")
|
|
2094
|
+
] = None,
|
|
2095
|
+
timeout: Annotated[
|
|
2096
|
+
float, typer.Option("--timeout", "-t", help="Request timeout in seconds")
|
|
2097
|
+
] = 30.0,
|
|
2098
|
+
) -> None:
|
|
2099
|
+
"""Enable shared mode to connect to a remote NeuralMemory server.
|
|
2100
|
+
|
|
2101
|
+
When shared mode is enabled, all memory operations (remember, recall, etc.)
|
|
2102
|
+
will use the remote server instead of local storage.
|
|
2103
|
+
|
|
2104
|
+
Examples:
|
|
2105
|
+
nmem shared enable http://localhost:8000
|
|
2106
|
+
nmem shared enable https://memory.example.com --api-key mykey
|
|
2107
|
+
nmem shared enable http://server:8000 --timeout 60
|
|
2108
|
+
"""
|
|
2109
|
+
config = get_config()
|
|
2110
|
+
config.shared.enabled = True
|
|
2111
|
+
config.shared.server_url = server_url.rstrip("/")
|
|
2112
|
+
config.shared.api_key = api_key
|
|
2113
|
+
config.shared.timeout = timeout
|
|
2114
|
+
config.save()
|
|
2115
|
+
|
|
2116
|
+
typer.secho("Shared mode enabled!", fg=typer.colors.GREEN)
|
|
2117
|
+
typer.echo(f" Server: {config.shared.server_url}")
|
|
2118
|
+
if api_key:
|
|
2119
|
+
typer.echo(f" API Key: {'*' * 8}...{api_key[-4:] if len(api_key) > 4 else '****'}")
|
|
2120
|
+
typer.echo(f" Timeout: {timeout}s")
|
|
2121
|
+
typer.echo("")
|
|
2122
|
+
typer.secho("All memory commands will now use the remote server.", fg=typer.colors.BRIGHT_BLACK)
|
|
2123
|
+
typer.secho(
|
|
2124
|
+
"Use 'nmem shared disable' to switch back to local storage.", fg=typer.colors.BRIGHT_BLACK
|
|
2125
|
+
)
|
|
2126
|
+
|
|
2127
|
+
|
|
2128
|
+
@shared_app.command("disable")
|
|
2129
|
+
def shared_disable() -> None:
|
|
2130
|
+
"""Disable shared mode and use local storage.
|
|
2131
|
+
|
|
2132
|
+
Examples:
|
|
2133
|
+
nmem shared disable
|
|
2134
|
+
"""
|
|
2135
|
+
config = get_config()
|
|
2136
|
+
config.shared.enabled = False
|
|
2137
|
+
config.save()
|
|
2138
|
+
|
|
2139
|
+
typer.secho("Shared mode disabled.", fg=typer.colors.GREEN)
|
|
2140
|
+
typer.echo("Memory commands will now use local storage.")
|
|
2141
|
+
|
|
2142
|
+
|
|
2143
|
+
@shared_app.command("status")
|
|
2144
|
+
def shared_status(
|
|
2145
|
+
json_output: Annotated[bool, typer.Option("--json", "-j", help="Output as JSON")] = False,
|
|
2146
|
+
) -> None:
|
|
2147
|
+
"""Show shared mode status and configuration.
|
|
2148
|
+
|
|
2149
|
+
Examples:
|
|
2150
|
+
nmem shared status
|
|
2151
|
+
nmem shared status --json
|
|
2152
|
+
"""
|
|
2153
|
+
config = get_config()
|
|
2154
|
+
|
|
2155
|
+
status = {
|
|
2156
|
+
"enabled": config.shared.enabled,
|
|
2157
|
+
"server_url": config.shared.server_url,
|
|
2158
|
+
"api_key_set": config.shared.api_key is not None,
|
|
2159
|
+
"timeout": config.shared.timeout,
|
|
2160
|
+
}
|
|
2161
|
+
|
|
2162
|
+
if json_output:
|
|
2163
|
+
typer.echo(json.dumps(status, indent=2))
|
|
2164
|
+
else:
|
|
2165
|
+
if config.shared.enabled:
|
|
2166
|
+
typer.secho("[ENABLED] Shared mode is active", fg=typer.colors.GREEN)
|
|
2167
|
+
else:
|
|
2168
|
+
typer.secho("[DISABLED] Using local storage", fg=typer.colors.YELLOW)
|
|
2169
|
+
|
|
2170
|
+
typer.echo(f"\nServer URL: {config.shared.server_url}")
|
|
2171
|
+
typer.echo(f"API Key: {'configured' if config.shared.api_key else 'not set'}")
|
|
2172
|
+
typer.echo(f"Timeout: {config.shared.timeout}s")
|
|
2173
|
+
|
|
2174
|
+
|
|
2175
|
+
@shared_app.command("test")
|
|
2176
|
+
def shared_test() -> None:
|
|
2177
|
+
"""Test connection to the shared server.
|
|
2178
|
+
|
|
2179
|
+
Examples:
|
|
2180
|
+
nmem shared test
|
|
2181
|
+
"""
|
|
2182
|
+
config = get_config()
|
|
2183
|
+
|
|
2184
|
+
if not config.shared.server_url:
|
|
2185
|
+
typer.secho(
|
|
2186
|
+
"No server URL configured. Use 'nmem shared enable <url>' first.", fg=typer.colors.RED
|
|
2187
|
+
)
|
|
2188
|
+
raise typer.Exit(1)
|
|
2189
|
+
|
|
2190
|
+
async def _test() -> dict:
|
|
2191
|
+
import aiohttp
|
|
2192
|
+
|
|
2193
|
+
url = f"{config.shared.server_url}/health"
|
|
2194
|
+
headers = {}
|
|
2195
|
+
if config.shared.api_key:
|
|
2196
|
+
headers["Authorization"] = f"Bearer {config.shared.api_key}"
|
|
2197
|
+
|
|
2198
|
+
try:
|
|
2199
|
+
timeout = aiohttp.ClientTimeout(total=config.shared.timeout)
|
|
2200
|
+
async with aiohttp.ClientSession(timeout=timeout) as session:
|
|
2201
|
+
async with session.get(url, headers=headers) as response:
|
|
2202
|
+
if response.status == 200:
|
|
2203
|
+
data = await response.json()
|
|
2204
|
+
return {
|
|
2205
|
+
"success": True,
|
|
2206
|
+
"status": data.get("status", "unknown"),
|
|
2207
|
+
"version": data.get("version", "unknown"),
|
|
2208
|
+
}
|
|
2209
|
+
else:
|
|
2210
|
+
return {
|
|
2211
|
+
"success": False,
|
|
2212
|
+
"error": f"Server returned status {response.status}",
|
|
2213
|
+
}
|
|
2214
|
+
except aiohttp.ClientError as e:
|
|
2215
|
+
return {"success": False, "error": str(e)}
|
|
2216
|
+
except Exception as e:
|
|
2217
|
+
return {"success": False, "error": str(e)}
|
|
2218
|
+
|
|
2219
|
+
typer.echo(f"Testing connection to {config.shared.server_url}...")
|
|
2220
|
+
result = asyncio.run(_test())
|
|
2221
|
+
|
|
2222
|
+
if result["success"]:
|
|
2223
|
+
typer.secho("[OK] Connection successful!", fg=typer.colors.GREEN)
|
|
2224
|
+
typer.echo(f" Server status: {result['status']}")
|
|
2225
|
+
typer.echo(f" Server version: {result['version']}")
|
|
2226
|
+
else:
|
|
2227
|
+
typer.secho("[FAILED] Connection failed!", fg=typer.colors.RED)
|
|
2228
|
+
typer.echo(f" Error: {result['error']}")
|
|
2229
|
+
raise typer.Exit(1)
|
|
2230
|
+
|
|
2231
|
+
|
|
2232
|
+
@shared_app.command("sync")
|
|
2233
|
+
def shared_sync(
|
|
2234
|
+
direction: Annotated[
|
|
2235
|
+
str, typer.Option("--direction", "-d", help="Sync direction: push, pull, or both")
|
|
2236
|
+
] = "both",
|
|
2237
|
+
json_output: Annotated[bool, typer.Option("--json", "-j", help="Output as JSON")] = False,
|
|
2238
|
+
) -> None:
|
|
2239
|
+
"""Manually sync local brain with remote server.
|
|
2240
|
+
|
|
2241
|
+
Directions:
|
|
2242
|
+
push - Upload local brain to server
|
|
2243
|
+
pull - Download brain from server to local
|
|
2244
|
+
both - Full bidirectional sync (default)
|
|
2245
|
+
|
|
2246
|
+
Examples:
|
|
2247
|
+
nmem shared sync
|
|
2248
|
+
nmem shared sync --direction push
|
|
2249
|
+
nmem shared sync --direction pull
|
|
2250
|
+
"""
|
|
2251
|
+
config = get_config()
|
|
2252
|
+
|
|
2253
|
+
if not config.shared.server_url:
|
|
2254
|
+
typer.secho(
|
|
2255
|
+
"No server URL configured. Use 'nmem shared enable <url>' first.", fg=typer.colors.RED
|
|
2256
|
+
)
|
|
2257
|
+
raise typer.Exit(1)
|
|
2258
|
+
|
|
2259
|
+
async def _sync() -> dict:
|
|
2260
|
+
from neural_memory.storage.shared_store import SharedStorage
|
|
2261
|
+
|
|
2262
|
+
# Load local storage
|
|
2263
|
+
local_storage = await PersistentStorage.load(config.get_brain_path())
|
|
2264
|
+
|
|
2265
|
+
# Connect to remote
|
|
2266
|
+
remote = SharedStorage(
|
|
2267
|
+
server_url=config.shared.server_url,
|
|
2268
|
+
brain_id=config.current_brain,
|
|
2269
|
+
api_key=config.shared.api_key,
|
|
2270
|
+
timeout=config.shared.timeout,
|
|
2271
|
+
)
|
|
2272
|
+
await remote.connect()
|
|
2273
|
+
|
|
2274
|
+
result = {"direction": direction, "success": True}
|
|
2275
|
+
|
|
2276
|
+
try:
|
|
2277
|
+
if direction in ("push", "both"):
|
|
2278
|
+
# Export local and push to remote
|
|
2279
|
+
snapshot = await local_storage.export_brain(local_storage._current_brain_id)
|
|
2280
|
+
await remote.import_brain(snapshot, config.current_brain)
|
|
2281
|
+
result["pushed"] = True
|
|
2282
|
+
result["neurons_pushed"] = len(snapshot.neurons)
|
|
2283
|
+
result["synapses_pushed"] = len(snapshot.synapses)
|
|
2284
|
+
result["fibers_pushed"] = len(snapshot.fibers)
|
|
2285
|
+
|
|
2286
|
+
if direction in ("pull", "both"):
|
|
2287
|
+
# Pull from remote and import locally
|
|
2288
|
+
try:
|
|
2289
|
+
snapshot = await remote.export_brain(config.current_brain)
|
|
2290
|
+
await local_storage.import_brain(snapshot, local_storage._current_brain_id)
|
|
2291
|
+
await local_storage.save()
|
|
2292
|
+
result["pulled"] = True
|
|
2293
|
+
result["neurons_pulled"] = len(snapshot.neurons)
|
|
2294
|
+
result["synapses_pulled"] = len(snapshot.synapses)
|
|
2295
|
+
result["fibers_pulled"] = len(snapshot.fibers)
|
|
2296
|
+
except Exception as e:
|
|
2297
|
+
if direction == "pull":
|
|
2298
|
+
raise
|
|
2299
|
+
# For "both", pulling may fail if brain doesn't exist on server
|
|
2300
|
+
result["pulled"] = False
|
|
2301
|
+
result["pull_error"] = str(e)
|
|
2302
|
+
|
|
2303
|
+
finally:
|
|
2304
|
+
await remote.disconnect()
|
|
2305
|
+
|
|
2306
|
+
return result
|
|
2307
|
+
|
|
2308
|
+
typer.echo(f"Syncing brain '{config.current_brain}' with {config.shared.server_url}...")
|
|
2309
|
+
result = asyncio.run(_sync())
|
|
2310
|
+
|
|
2311
|
+
if json_output:
|
|
2312
|
+
typer.echo(json.dumps(result, indent=2))
|
|
2313
|
+
else:
|
|
2314
|
+
if result.get("pushed"):
|
|
2315
|
+
typer.secho(
|
|
2316
|
+
f"[PUSHED] {result.get('neurons_pushed', 0)} neurons, "
|
|
2317
|
+
f"{result.get('synapses_pushed', 0)} synapses, "
|
|
2318
|
+
f"{result.get('fibers_pushed', 0)} fibers",
|
|
2319
|
+
fg=typer.colors.GREEN,
|
|
2320
|
+
)
|
|
2321
|
+
|
|
2322
|
+
if result.get("pulled"):
|
|
2323
|
+
typer.secho(
|
|
2324
|
+
f"[PULLED] {result.get('neurons_pulled', 0)} neurons, "
|
|
2325
|
+
f"{result.get('synapses_pulled', 0)} synapses, "
|
|
2326
|
+
f"{result.get('fibers_pulled', 0)} fibers",
|
|
2327
|
+
fg=typer.colors.GREEN,
|
|
2328
|
+
)
|
|
2329
|
+
elif result.get("pull_error"):
|
|
2330
|
+
typer.secho(f"[PULL FAILED] {result['pull_error']}", fg=typer.colors.YELLOW)
|
|
2331
|
+
|
|
2332
|
+
typer.secho("\nSync complete!", fg=typer.colors.GREEN)
|
|
2333
|
+
|
|
2334
|
+
|
|
2335
|
+
# =============================================================================
|
|
2336
|
+
# Utility Commands
|
|
2337
|
+
# =============================================================================
|
|
2338
|
+
|
|
2339
|
+
|
|
2340
|
+
@app.command()
|
|
2341
|
+
def mcp() -> None:
|
|
2342
|
+
"""Run the MCP (Model Context Protocol) server.
|
|
2343
|
+
|
|
2344
|
+
This starts an MCP server over stdio that exposes NeuralMemory tools
|
|
2345
|
+
to Claude Code, Claude Desktop, and other MCP-compatible clients.
|
|
2346
|
+
|
|
2347
|
+
Available tools:
|
|
2348
|
+
nmem_remember - Store a memory
|
|
2349
|
+
nmem_recall - Query memories
|
|
2350
|
+
nmem_context - Get recent context
|
|
2351
|
+
nmem_todo - Add a TODO memory
|
|
2352
|
+
nmem_stats - Get brain statistics
|
|
2353
|
+
|
|
2354
|
+
Examples:
|
|
2355
|
+
nmem mcp # Run MCP server
|
|
2356
|
+
python -m neural_memory.mcp # Alternative way
|
|
2357
|
+
|
|
2358
|
+
Configuration for Claude Code (~/.claude/mcp_servers.json):
|
|
2359
|
+
{
|
|
2360
|
+
"neural-memory": {
|
|
2361
|
+
"command": "nmem",
|
|
2362
|
+
"args": ["mcp"]
|
|
2363
|
+
}
|
|
2364
|
+
}
|
|
2365
|
+
"""
|
|
2366
|
+
from neural_memory.mcp.server import main as mcp_main
|
|
2367
|
+
|
|
2368
|
+
mcp_main()
|
|
2369
|
+
|
|
2370
|
+
|
|
2371
|
+
@app.command()
|
|
2372
|
+
def dashboard() -> None:
|
|
2373
|
+
"""Show a rich dashboard with brain stats and recent activity.
|
|
2374
|
+
|
|
2375
|
+
Displays:
|
|
2376
|
+
- Brain statistics (neurons, synapses, fibers)
|
|
2377
|
+
- Memory types distribution
|
|
2378
|
+
- Freshness analysis
|
|
2379
|
+
- Recent memories
|
|
2380
|
+
|
|
2381
|
+
Examples:
|
|
2382
|
+
nmem dashboard
|
|
2383
|
+
"""
|
|
2384
|
+
from neural_memory.cli.tui import render_dashboard
|
|
2385
|
+
|
|
2386
|
+
async def _dashboard() -> None:
|
|
2387
|
+
config = get_config()
|
|
2388
|
+
storage = await get_storage(config)
|
|
2389
|
+
await render_dashboard(storage)
|
|
2390
|
+
|
|
2391
|
+
asyncio.run(_dashboard())
|
|
2392
|
+
|
|
2393
|
+
|
|
2394
|
+
@app.command()
|
|
2395
|
+
def ui(
|
|
2396
|
+
memory_type: Annotated[
|
|
2397
|
+
str | None,
|
|
2398
|
+
typer.Option("--type", "-t", help="Filter by memory type"),
|
|
2399
|
+
] = None,
|
|
2400
|
+
search: Annotated[
|
|
2401
|
+
str | None,
|
|
2402
|
+
typer.Option("--search", "-s", help="Search in memory content"),
|
|
2403
|
+
] = None,
|
|
2404
|
+
limit: Annotated[
|
|
2405
|
+
int,
|
|
2406
|
+
typer.Option("--limit", "-n", help="Number of memories to show"),
|
|
2407
|
+
] = 20,
|
|
2408
|
+
) -> None:
|
|
2409
|
+
"""Interactive memory browser with rich formatting.
|
|
2410
|
+
|
|
2411
|
+
Browse memories with color-coded types, priorities, and freshness.
|
|
2412
|
+
|
|
2413
|
+
Examples:
|
|
2414
|
+
nmem ui # Browse all memories
|
|
2415
|
+
nmem ui --type decision # Filter by type
|
|
2416
|
+
nmem ui --search "database" # Search content
|
|
2417
|
+
nmem ui --limit 50 # Show more
|
|
2418
|
+
"""
|
|
2419
|
+
from neural_memory.cli.tui import render_memory_browser
|
|
2420
|
+
|
|
2421
|
+
async def _ui() -> None:
|
|
2422
|
+
config = get_config()
|
|
2423
|
+
storage = await get_storage(config)
|
|
2424
|
+
await render_memory_browser(
|
|
2425
|
+
storage,
|
|
2426
|
+
memory_type=memory_type,
|
|
2427
|
+
limit=limit,
|
|
2428
|
+
search=search,
|
|
2429
|
+
)
|
|
2430
|
+
|
|
2431
|
+
asyncio.run(_ui())
|
|
2432
|
+
|
|
2433
|
+
|
|
2434
|
+
@app.command()
|
|
2435
|
+
def graph(
|
|
2436
|
+
query: Annotated[
|
|
2437
|
+
str | None,
|
|
2438
|
+
typer.Argument(help="Query to find related memories (optional)"),
|
|
2439
|
+
] = None,
|
|
2440
|
+
depth: Annotated[
|
|
2441
|
+
int,
|
|
2442
|
+
typer.Option("--depth", "-d", help="Traversal depth (1-3)"),
|
|
2443
|
+
] = 2,
|
|
2444
|
+
) -> None:
|
|
2445
|
+
"""Visualize neural connections as a tree graph.
|
|
2446
|
+
|
|
2447
|
+
Shows memories and their relationships (caused_by, leads_to, etc.)
|
|
2448
|
+
|
|
2449
|
+
Examples:
|
|
2450
|
+
nmem graph # Show recent memories
|
|
2451
|
+
nmem graph "database" # Graph around query
|
|
2452
|
+
nmem graph "auth" --depth 3 # Deeper traversal
|
|
2453
|
+
"""
|
|
2454
|
+
from neural_memory.cli.tui import render_graph
|
|
2455
|
+
|
|
2456
|
+
async def _graph() -> None:
|
|
2457
|
+
config = get_config()
|
|
2458
|
+
storage = await get_storage(config)
|
|
2459
|
+
await render_graph(storage, query=query, depth=depth)
|
|
2460
|
+
|
|
2461
|
+
asyncio.run(_graph())
|
|
2462
|
+
|
|
2463
|
+
|
|
2464
|
+
@app.command()
|
|
2465
|
+
def init(
|
|
2466
|
+
force: Annotated[
|
|
2467
|
+
bool,
|
|
2468
|
+
typer.Option("--force", "-f", help="Overwrite existing config"),
|
|
2469
|
+
] = False,
|
|
2470
|
+
) -> None:
|
|
2471
|
+
"""Initialize unified config for cross-tool memory sharing.
|
|
2472
|
+
|
|
2473
|
+
This sets up ~/.neuralmemory/ which enables memory sharing between:
|
|
2474
|
+
- CLI (nmem commands)
|
|
2475
|
+
- MCP (Claude Code, Cursor, AntiGravity)
|
|
2476
|
+
- Any other tool using NeuralMemory
|
|
2477
|
+
|
|
2478
|
+
After running this, all tools will share the same brain database.
|
|
2479
|
+
|
|
2480
|
+
Examples:
|
|
2481
|
+
nmem init # Initialize unified config
|
|
2482
|
+
nmem init --force # Overwrite existing config
|
|
2483
|
+
"""
|
|
2484
|
+
from neural_memory.unified_config import UnifiedConfig, get_neuralmemory_dir
|
|
2485
|
+
|
|
2486
|
+
data_dir = get_neuralmemory_dir()
|
|
2487
|
+
config_path = data_dir / "config.toml"
|
|
2488
|
+
|
|
2489
|
+
if config_path.exists() and not force:
|
|
2490
|
+
typer.secho(f"Config already exists at {config_path}", fg=typer.colors.YELLOW)
|
|
2491
|
+
typer.echo("Use --force to overwrite")
|
|
2492
|
+
return
|
|
2493
|
+
|
|
2494
|
+
# Create unified config
|
|
2495
|
+
config = UnifiedConfig(data_dir=data_dir)
|
|
2496
|
+
config.save()
|
|
2497
|
+
|
|
2498
|
+
# Ensure brains directory exists
|
|
2499
|
+
brains_dir = data_dir / "brains"
|
|
2500
|
+
brains_dir.mkdir(parents=True, exist_ok=True)
|
|
2501
|
+
|
|
2502
|
+
typer.secho(f"Initialized NeuralMemory at {data_dir}", fg=typer.colors.GREEN)
|
|
2503
|
+
typer.echo()
|
|
2504
|
+
typer.echo("Directory structure:")
|
|
2505
|
+
typer.echo(f" {data_dir}/")
|
|
2506
|
+
typer.echo(" +-- config.toml # Shared configuration")
|
|
2507
|
+
typer.echo(" +-- brains/")
|
|
2508
|
+
typer.echo(" +-- default.db # SQLite brain database")
|
|
2509
|
+
typer.echo()
|
|
2510
|
+
typer.echo("This enables memory sharing between:")
|
|
2511
|
+
typer.echo(" - CLI: nmem commands")
|
|
2512
|
+
typer.echo(" - MCP: Claude Code, Cursor, AntiGravity")
|
|
2513
|
+
typer.echo()
|
|
2514
|
+
typer.echo("To use a specific brain, set NEURALMEMORY_BRAIN environment variable:")
|
|
2515
|
+
typer.echo(" export NEURALMEMORY_BRAIN=myproject")
|
|
2516
|
+
|
|
2517
|
+
|
|
2518
|
+
# =============================================================================
|
|
2519
|
+
# Quick Shortcut Commands (Natural Language Interface)
|
|
2520
|
+
# =============================================================================
|
|
2521
|
+
|
|
2522
|
+
|
|
2523
|
+
@app.command(name="q")
|
|
2524
|
+
def quick_recall(
|
|
2525
|
+
query: Annotated[str, typer.Argument(help="Query to search")],
|
|
2526
|
+
depth: Annotated[int | None, typer.Option("-d")] = None,
|
|
2527
|
+
) -> None:
|
|
2528
|
+
"""Quick recall - shortcut for 'nmem recall'.
|
|
2529
|
+
|
|
2530
|
+
Examples:
|
|
2531
|
+
nmem q "what's the API format"
|
|
2532
|
+
nmem q "yesterday's work" -d 2
|
|
2533
|
+
"""
|
|
2534
|
+
from neural_memory.engine.retrieval import DepthLevel, ReflexPipeline
|
|
2535
|
+
|
|
2536
|
+
async def _recall() -> None:
|
|
2537
|
+
config = get_config()
|
|
2538
|
+
storage = await get_storage(config)
|
|
2539
|
+
brain = await storage.get_brain(storage._current_brain_id)
|
|
2540
|
+
|
|
2541
|
+
if not brain:
|
|
2542
|
+
typer.secho("No brain configured", fg=typer.colors.RED)
|
|
2543
|
+
return
|
|
2544
|
+
|
|
2545
|
+
pipeline = ReflexPipeline(storage, brain.config)
|
|
2546
|
+
depth_level = DepthLevel(depth) if depth is not None else None
|
|
2547
|
+
result = await pipeline.query(query, depth=depth_level, max_tokens=500)
|
|
2548
|
+
|
|
2549
|
+
if result.confidence < 0.1:
|
|
2550
|
+
typer.secho("No relevant memories found.", fg=typer.colors.YELLOW)
|
|
2551
|
+
return
|
|
2552
|
+
|
|
2553
|
+
typer.echo(result.context)
|
|
2554
|
+
typer.secho(f"\n[confidence: {result.confidence:.2f}]", fg=typer.colors.BRIGHT_BLACK)
|
|
2555
|
+
|
|
2556
|
+
asyncio.run(_recall())
|
|
2557
|
+
|
|
2558
|
+
|
|
2559
|
+
@app.command(name="a")
|
|
2560
|
+
def quick_add(
|
|
2561
|
+
content: Annotated[str, typer.Argument(help="Content to remember")],
|
|
2562
|
+
priority: Annotated[int | None, typer.Option("-p")] = None,
|
|
2563
|
+
) -> None:
|
|
2564
|
+
"""Quick add - shortcut for 'nmem remember' with auto-detect.
|
|
2565
|
+
|
|
2566
|
+
Examples:
|
|
2567
|
+
nmem a "API key format is sk-xxx"
|
|
2568
|
+
nmem a "Always use UTC for timestamps" -p 8
|
|
2569
|
+
nmem a "TODO: Review PR #123"
|
|
2570
|
+
"""
|
|
2571
|
+
from neural_memory.engine.encoder import MemoryEncoder
|
|
2572
|
+
|
|
2573
|
+
async def _add() -> None:
|
|
2574
|
+
config = get_config()
|
|
2575
|
+
storage = await get_storage(config)
|
|
2576
|
+
brain = await storage.get_brain(storage._current_brain_id)
|
|
2577
|
+
|
|
2578
|
+
if not brain:
|
|
2579
|
+
typer.secho("No brain configured", fg=typer.colors.RED)
|
|
2580
|
+
return
|
|
2581
|
+
|
|
2582
|
+
# Auto-detect memory type
|
|
2583
|
+
mem_type = suggest_memory_type(content)
|
|
2584
|
+
mem_priority = Priority.from_int(priority) if priority is not None else Priority.NORMAL
|
|
2585
|
+
|
|
2586
|
+
encoder = MemoryEncoder(storage, brain.config)
|
|
2587
|
+
storage.disable_auto_save()
|
|
2588
|
+
|
|
2589
|
+
result = await encoder.encode(
|
|
2590
|
+
content=content,
|
|
2591
|
+
timestamp=datetime.now(),
|
|
2592
|
+
)
|
|
2593
|
+
|
|
2594
|
+
# Create typed memory
|
|
2595
|
+
typed_mem = TypedMemory.create(
|
|
2596
|
+
fiber_id=result.fiber.id,
|
|
2597
|
+
memory_type=mem_type,
|
|
2598
|
+
priority=mem_priority,
|
|
2599
|
+
)
|
|
2600
|
+
await storage.add_typed_memory(typed_mem)
|
|
2601
|
+
await storage.batch_save()
|
|
2602
|
+
|
|
2603
|
+
typer.secho(f"+ {content[:60]}{'...' if len(content) > 60 else ''}", fg=typer.colors.GREEN)
|
|
2604
|
+
typer.secho(f" [{mem_type.value}]", fg=typer.colors.BRIGHT_BLACK)
|
|
2605
|
+
|
|
2606
|
+
asyncio.run(_add())
|
|
2607
|
+
|
|
2608
|
+
|
|
2609
|
+
@app.command(name="last")
|
|
2610
|
+
def show_last(
|
|
2611
|
+
count: Annotated[int, typer.Option("-n", help="Number of memories to show")] = 5,
|
|
2612
|
+
) -> None:
|
|
2613
|
+
"""Show last N memories - quick view of recent activity.
|
|
2614
|
+
|
|
2615
|
+
Examples:
|
|
2616
|
+
nmem last # Show last 5 memories
|
|
2617
|
+
nmem last -n 10 # Show last 10 memories
|
|
2618
|
+
"""
|
|
2619
|
+
from neural_memory.safety.freshness import evaluate_freshness, format_age
|
|
2620
|
+
|
|
2621
|
+
async def _last() -> None:
|
|
2622
|
+
config = get_config()
|
|
2623
|
+
storage = await get_storage(config)
|
|
2624
|
+
|
|
2625
|
+
fibers = await storage.get_fibers(limit=count)
|
|
2626
|
+
|
|
2627
|
+
if not fibers:
|
|
2628
|
+
typer.secho("No memories found.", fg=typer.colors.YELLOW)
|
|
2629
|
+
return
|
|
2630
|
+
|
|
2631
|
+
for i, fiber in enumerate(fibers, 1):
|
|
2632
|
+
content = fiber.summary or ""
|
|
2633
|
+
if not content and fiber.anchor_neuron_id:
|
|
2634
|
+
anchor = await storage.get_neuron(fiber.anchor_neuron_id)
|
|
2635
|
+
if anchor:
|
|
2636
|
+
content = anchor.content
|
|
2637
|
+
|
|
2638
|
+
display = content[:70] + "..." if len(content) > 70 else content
|
|
2639
|
+
freshness = evaluate_freshness(fiber.created_at)
|
|
2640
|
+
|
|
2641
|
+
typer.echo(f"{i}. {display}")
|
|
2642
|
+
typer.secho(f" [{format_age(freshness.age_days)}]", fg=typer.colors.BRIGHT_BLACK)
|
|
2643
|
+
|
|
2644
|
+
asyncio.run(_last())
|
|
2645
|
+
|
|
2646
|
+
|
|
2647
|
+
@app.command(name="today")
|
|
2648
|
+
def show_today() -> None:
|
|
2649
|
+
"""Show today's memories.
|
|
2650
|
+
|
|
2651
|
+
Examples:
|
|
2652
|
+
nmem today
|
|
2653
|
+
"""
|
|
2654
|
+
|
|
2655
|
+
async def _today() -> None:
|
|
2656
|
+
config = get_config()
|
|
2657
|
+
storage = await get_storage(config)
|
|
2658
|
+
|
|
2659
|
+
# Get recent fibers and filter for today
|
|
2660
|
+
fibers = await storage.get_fibers(limit=100)
|
|
2661
|
+
today = datetime.now().date()
|
|
2662
|
+
today_fibers = [f for f in fibers if f.created_at.date() == today]
|
|
2663
|
+
|
|
2664
|
+
if not today_fibers:
|
|
2665
|
+
typer.secho("No memories from today.", fg=typer.colors.YELLOW)
|
|
2666
|
+
return
|
|
2667
|
+
|
|
2668
|
+
typer.secho(
|
|
2669
|
+
f"Today ({today.strftime('%Y-%m-%d')}) - {len(today_fibers)} memories:\n",
|
|
2670
|
+
fg=typer.colors.CYAN,
|
|
2671
|
+
)
|
|
2672
|
+
|
|
2673
|
+
for fiber in today_fibers:
|
|
2674
|
+
content = fiber.summary or ""
|
|
2675
|
+
if not content and fiber.anchor_neuron_id:
|
|
2676
|
+
anchor = await storage.get_neuron(fiber.anchor_neuron_id)
|
|
2677
|
+
if anchor:
|
|
2678
|
+
content = anchor.content
|
|
2679
|
+
|
|
2680
|
+
display = content[:65] + "..." if len(content) > 65 else content
|
|
2681
|
+
time_str = fiber.created_at.strftime("%H:%M")
|
|
2682
|
+
|
|
2683
|
+
typer.echo(f" {time_str} {display}")
|
|
2684
|
+
|
|
2685
|
+
asyncio.run(_today())
|
|
2686
|
+
|
|
2687
|
+
|
|
2688
|
+
@app.command()
|
|
2689
|
+
def version() -> None:
|
|
2690
|
+
"""Show version information."""
|
|
2691
|
+
from neural_memory import __version__
|
|
2692
|
+
|
|
2693
|
+
typer.echo(f"neural-memory v{__version__}")
|
|
2694
|
+
|
|
2695
|
+
|
|
2696
|
+
def main() -> None:
|
|
2697
|
+
"""Main entry point."""
|
|
2698
|
+
app()
|
|
2699
|
+
|
|
2700
|
+
|
|
2701
|
+
if __name__ == "__main__":
|
|
2702
|
+
main()
|