gnosisllm-knowledge 0.2.0__py3-none-any.whl → 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. gnosisllm_knowledge/__init__.py +91 -39
  2. gnosisllm_knowledge/api/__init__.py +3 -2
  3. gnosisllm_knowledge/api/knowledge.py +287 -7
  4. gnosisllm_knowledge/api/memory.py +966 -0
  5. gnosisllm_knowledge/backends/__init__.py +14 -5
  6. gnosisllm_knowledge/backends/opensearch/agentic.py +341 -39
  7. gnosisllm_knowledge/backends/opensearch/config.py +49 -28
  8. gnosisllm_knowledge/backends/opensearch/indexer.py +1 -0
  9. gnosisllm_knowledge/backends/opensearch/mappings.py +2 -1
  10. gnosisllm_knowledge/backends/opensearch/memory/__init__.py +12 -0
  11. gnosisllm_knowledge/backends/opensearch/memory/client.py +1380 -0
  12. gnosisllm_knowledge/backends/opensearch/memory/config.py +127 -0
  13. gnosisllm_knowledge/backends/opensearch/memory/setup.py +322 -0
  14. gnosisllm_knowledge/backends/opensearch/searcher.py +235 -0
  15. gnosisllm_knowledge/backends/opensearch/setup.py +308 -148
  16. gnosisllm_knowledge/cli/app.py +378 -12
  17. gnosisllm_knowledge/cli/commands/agentic.py +11 -0
  18. gnosisllm_knowledge/cli/commands/memory.py +723 -0
  19. gnosisllm_knowledge/cli/commands/setup.py +24 -22
  20. gnosisllm_knowledge/cli/display/service.py +43 -0
  21. gnosisllm_knowledge/cli/utils/config.py +58 -0
  22. gnosisllm_knowledge/core/domain/__init__.py +41 -0
  23. gnosisllm_knowledge/core/domain/document.py +5 -0
  24. gnosisllm_knowledge/core/domain/memory.py +440 -0
  25. gnosisllm_knowledge/core/domain/result.py +11 -3
  26. gnosisllm_knowledge/core/domain/search.py +2 -0
  27. gnosisllm_knowledge/core/events/types.py +76 -0
  28. gnosisllm_knowledge/core/exceptions.py +134 -0
  29. gnosisllm_knowledge/core/interfaces/__init__.py +17 -0
  30. gnosisllm_knowledge/core/interfaces/memory.py +524 -0
  31. gnosisllm_knowledge/core/interfaces/streaming.py +127 -0
  32. gnosisllm_knowledge/core/streaming/__init__.py +36 -0
  33. gnosisllm_knowledge/core/streaming/pipeline.py +228 -0
  34. gnosisllm_knowledge/loaders/base.py +3 -4
  35. gnosisllm_knowledge/loaders/sitemap.py +129 -1
  36. gnosisllm_knowledge/loaders/sitemap_streaming.py +258 -0
  37. gnosisllm_knowledge/services/indexing.py +67 -75
  38. gnosisllm_knowledge/services/search.py +47 -11
  39. gnosisllm_knowledge/services/streaming_pipeline.py +302 -0
  40. {gnosisllm_knowledge-0.2.0.dist-info → gnosisllm_knowledge-0.3.0.dist-info}/METADATA +44 -1
  41. gnosisllm_knowledge-0.3.0.dist-info/RECORD +77 -0
  42. gnosisllm_knowledge-0.2.0.dist-info/RECORD +0 -64
  43. {gnosisllm_knowledge-0.2.0.dist-info → gnosisllm_knowledge-0.3.0.dist-info}/WHEEL +0 -0
  44. {gnosisllm_knowledge-0.2.0.dist-info → gnosisllm_knowledge-0.3.0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,723 @@
1
+ """Memory CLI commands for Agentic Memory management.
2
+
3
+ Commands:
4
+ - setup: Configure LLM and embedding models for memory
5
+ - container create/list/delete: Manage memory containers
6
+ - store: Store messages in memory
7
+ - recall: Search long-term memory
8
+ - stats: Show container statistics
9
+ - session list: List sessions in a container
10
+ """
11
+
12
+ from __future__ import annotations
13
+
14
+ import json
15
+ import sys
16
+ from pathlib import Path
17
+ from typing import TYPE_CHECKING, Any
18
+
19
+ from gnosisllm_knowledge.api.memory import Memory
20
+ from gnosisllm_knowledge.backends.opensearch.memory.config import MemoryConfig
21
+ from gnosisllm_knowledge.backends.opensearch.memory.setup import MemorySetup
22
+ from gnosisllm_knowledge.cli.display.service import RichDisplayService, StepProgress
23
+ from gnosisllm_knowledge.cli.utils.config import CliConfig
24
+ from gnosisllm_knowledge.core.domain.memory import (
25
+ MemoryStrategy,
26
+ MemoryType,
27
+ Message,
28
+ StrategyConfig,
29
+ )
30
+
31
+ if TYPE_CHECKING:
32
+ pass
33
+
34
+
35
+ def _create_memory_config(cli_config: CliConfig) -> MemoryConfig:
36
+ """Create MemoryConfig from CLI config."""
37
+ return MemoryConfig(
38
+ host=cli_config.opensearch_host,
39
+ port=cli_config.opensearch_port,
40
+ username=cli_config.opensearch_username,
41
+ password=cli_config.opensearch_password,
42
+ use_ssl=cli_config.opensearch_use_ssl,
43
+ verify_certs=cli_config.opensearch_verify_certs,
44
+ llm_model_id=cli_config.memory_llm_model_id,
45
+ embedding_model_id=cli_config.memory_embedding_model_id,
46
+ openai_api_key=cli_config.openai_api_key,
47
+ llm_model=cli_config.memory_llm_model,
48
+ embedding_model=cli_config.memory_embedding_model,
49
+ embedding_dimension=cli_config.memory_embedding_dimension,
50
+ )
51
+
52
+
53
+ # === SETUP COMMAND ===
54
+
55
+
56
+ async def memory_setup_command(
57
+ display: RichDisplayService,
58
+ openai_key: str | None = None,
59
+ llm_model: str = "gpt-4o",
60
+ embedding_model: str = "text-embedding-3-small",
61
+ ) -> None:
62
+ """Setup OpenSearch for Agentic Memory.
63
+
64
+ Creates the required LLM and embedding connectors and models
65
+ for Agentic Memory to work.
66
+
67
+ Args:
68
+ display: Display service for output.
69
+ openai_key: OpenAI API key (overrides env).
70
+ llm_model: LLM model name for fact extraction.
71
+ embedding_model: Embedding model name.
72
+ """
73
+ cli_config = CliConfig.from_env()
74
+
75
+ # Use provided key or fall back to env
76
+ api_key = openai_key or cli_config.openai_api_key
77
+
78
+ if not api_key:
79
+ display.format_error_with_suggestion(
80
+ error="OpenAI API key is required for memory setup.",
81
+ suggestion="Provide --openai-key or set OPENAI_API_KEY environment variable.",
82
+ command="export OPENAI_API_KEY=sk-...",
83
+ )
84
+ sys.exit(1)
85
+
86
+ display.header(
87
+ "GnosisLLM Memory Setup",
88
+ "Configuring Agentic Memory connectors and models",
89
+ )
90
+
91
+ # Show configuration
92
+ display.table(
93
+ "Configuration",
94
+ [
95
+ ("OpenSearch", f"{cli_config.opensearch_host}:{cli_config.opensearch_port}"),
96
+ ("LLM Model", llm_model),
97
+ ("Embedding Model", embedding_model),
98
+ ],
99
+ )
100
+ display.newline()
101
+
102
+ # Create memory config
103
+ config = MemoryConfig(
104
+ host=cli_config.opensearch_host,
105
+ port=cli_config.opensearch_port,
106
+ username=cli_config.opensearch_username,
107
+ password=cli_config.opensearch_password,
108
+ use_ssl=cli_config.opensearch_use_ssl,
109
+ verify_certs=cli_config.opensearch_verify_certs,
110
+ openai_api_key=api_key,
111
+ llm_model=llm_model,
112
+ embedding_model=embedding_model,
113
+ )
114
+
115
+ setup = MemorySetup(config)
116
+
117
+ # Build progress steps
118
+ steps = [
119
+ StepProgress("llm_connector", "Create LLM connector for fact extraction"),
120
+ StepProgress("llm_model", "Deploy LLM model"),
121
+ StepProgress("embed_connector", "Create embedding connector"),
122
+ StepProgress("embed_model", "Deploy embedding model"),
123
+ ]
124
+ progress = display.progress(steps)
125
+
126
+ results: dict[str, str] = {}
127
+
128
+ try:
129
+ # LLM setup (connector + model)
130
+ progress.update(0, "running")
131
+ progress.update(1, "running")
132
+ llm_model_id = await setup.setup_llm_model()
133
+ results["llm_model_id"] = llm_model_id
134
+ progress.complete(0)
135
+ progress.complete(1)
136
+
137
+ # Embedding setup (connector + model)
138
+ progress.update(2, "running")
139
+ progress.update(3, "running")
140
+ embedding_model_id = await setup.setup_embedding_model()
141
+ results["embedding_model_id"] = embedding_model_id
142
+ progress.complete(2)
143
+ progress.complete(3)
144
+
145
+ except Exception as e:
146
+ progress.stop()
147
+ display.newline()
148
+ display.format_error_with_suggestion(
149
+ error=f"Memory setup failed: {e}",
150
+ suggestion="Check OpenSearch connection and ML plugin configuration.",
151
+ )
152
+ sys.exit(1)
153
+
154
+ progress.stop()
155
+ display.newline()
156
+
157
+ # Success panel with environment variables
158
+ content = "[bold green]Setup complete![/bold green]\n\n"
159
+ content += "[bold]Add to your .env file:[/bold]\n\n"
160
+ content += f" [green]OPENSEARCH_MEMORY_LLM_MODEL_ID={results['llm_model_id']}[/green]\n"
161
+ content += f" [green]OPENSEARCH_MEMORY_EMBEDDING_MODEL_ID={results['embedding_model_id']}[/green]\n"
162
+ content += "\n[bold]Next steps:[/bold]\n"
163
+ content += " [dim]gnosisllm-knowledge memory container create my-memory[/dim]\n"
164
+ content += " [dim]gnosisllm-knowledge memory store <container-id> -f messages.json[/dim]"
165
+
166
+ display.panel(content, title="Memory Setup Complete", style="success")
167
+
168
+
169
+ # === CONTAINER COMMANDS ===
170
+
171
+
172
+ async def container_create_command(
173
+ display: RichDisplayService,
174
+ name: str,
175
+ description: str | None = None,
176
+ config_file: str | None = None,
177
+ ) -> None:
178
+ """Create a new memory container.
179
+
180
+ Args:
181
+ display: Display service for output.
182
+ name: Container name.
183
+ description: Optional container description.
184
+ config_file: Optional JSON config file with strategy configuration.
185
+ """
186
+ cli_config = CliConfig.from_env()
187
+
188
+ # Validate configuration
189
+ errors = cli_config.validate_for_memory()
190
+ if errors:
191
+ for error in errors:
192
+ display.error(error)
193
+ display.newline()
194
+ display.format_error_with_suggestion(
195
+ error="Memory is not configured.",
196
+ suggestion="Run memory setup first.",
197
+ command="gnosisllm-knowledge memory setup --openai-key sk-...",
198
+ )
199
+ sys.exit(1)
200
+
201
+ display.header("Create Memory Container", f"Name: {name}")
202
+
203
+ # Parse strategy config
204
+ strategy_configs: list[StrategyConfig] = []
205
+
206
+ if config_file:
207
+ try:
208
+ with open(config_file) as f:
209
+ config_data = json.load(f)
210
+ for s in config_data.get("strategies", []):
211
+ strategy_configs.append(
212
+ StrategyConfig(
213
+ type=MemoryStrategy(s["type"]),
214
+ namespace=s["namespace"],
215
+ )
216
+ )
217
+ display.info(f"Loaded {len(strategy_configs)} strategies from {config_file}")
218
+ except Exception as e:
219
+ display.error(f"Failed to load config file: {e}")
220
+ sys.exit(1)
221
+ else:
222
+ # Default strategy configuration
223
+ strategy_configs = [
224
+ StrategyConfig(type=MemoryStrategy.SEMANTIC, namespace=["user_id"]),
225
+ StrategyConfig(type=MemoryStrategy.USER_PREFERENCE, namespace=["user_id"]),
226
+ StrategyConfig(type=MemoryStrategy.SUMMARY, namespace=["session_id"]),
227
+ ]
228
+ display.info("[dim]Using default strategy configuration:[/dim]")
229
+ display.console.print(" [cyan]SEMANTIC[/cyan], [cyan]USER_PREFERENCE[/cyan] -> scoped to user_id")
230
+ display.console.print(" [cyan]SUMMARY[/cyan] -> scoped to session_id")
231
+ display.newline()
232
+
233
+ # Create container
234
+ memory = Memory.from_config(_create_memory_config(cli_config))
235
+
236
+ with display.loading_spinner("Creating container..."):
237
+ try:
238
+ container = await memory.create_container(
239
+ name=name,
240
+ description=description,
241
+ strategies=strategy_configs,
242
+ )
243
+ except Exception as e:
244
+ display.error(f"Failed to create container: {e}")
245
+ sys.exit(1)
246
+
247
+ display.newline()
248
+ display.success(f"Container created: [cyan]{container.id}[/cyan]")
249
+ display.console.print(f" Name: {container.name}")
250
+ display.console.print(f" Strategies: {', '.join(s.value for s in container.strategies)}")
251
+ display.newline()
252
+ display.info(f"[dim]Use container ID for store/recall operations:[/dim]")
253
+ display.console.print(f" gnosisllm-knowledge memory store {container.id} -f messages.json")
254
+
255
+
256
+ async def container_list_command(
257
+ display: RichDisplayService,
258
+ json_output: bool = False,
259
+ ) -> None:
260
+ """List all memory containers.
261
+
262
+ Args:
263
+ display: Display service for output.
264
+ json_output: Output as JSON.
265
+ """
266
+ cli_config = CliConfig.from_env()
267
+ memory = Memory.from_config(_create_memory_config(cli_config))
268
+
269
+ with display.loading_spinner("Fetching containers..."):
270
+ try:
271
+ containers = await memory.list_containers()
272
+ except Exception as e:
273
+ display.error(f"Failed to list containers: {e}")
274
+ sys.exit(1)
275
+
276
+ if json_output:
277
+ output = [
278
+ {
279
+ "id": c.id,
280
+ "name": c.name,
281
+ "description": c.description,
282
+ "strategies": [s.value for s in c.strategies],
283
+ "created_at": c.created_at.isoformat() if c.created_at else None,
284
+ }
285
+ for c in containers
286
+ ]
287
+ display.json_output({"containers": output, "total": len(containers)})
288
+ return
289
+
290
+ if not containers:
291
+ display.warning("No containers found.")
292
+ display.newline()
293
+ display.info("Create a container with:")
294
+ display.console.print(" gnosisllm-knowledge memory container create my-memory")
295
+ return
296
+
297
+ display.header("Memory Containers", f"{len(containers)} containers")
298
+
299
+ rows = []
300
+ for c in containers:
301
+ strategies = ", ".join(s.value for s in c.strategies)
302
+ created = c.created_at.strftime("%Y-%m-%d") if c.created_at else "-"
303
+ rows.append((c.id[:12] + "...", c.name, strategies, created))
304
+
305
+ display.table(
306
+ "Containers",
307
+ rows,
308
+ headers=["ID", "Name", "Strategies", "Created"],
309
+ )
310
+
311
+
312
+ async def container_delete_command(
313
+ display: RichDisplayService,
314
+ container_id: str,
315
+ force: bool = False,
316
+ ) -> None:
317
+ """Delete a memory container.
318
+
319
+ Args:
320
+ display: Display service for output.
321
+ container_id: Container ID to delete.
322
+ force: Skip confirmation prompt.
323
+ """
324
+ cli_config = CliConfig.from_env()
325
+ memory = Memory.from_config(_create_memory_config(cli_config))
326
+
327
+ if not force:
328
+ confirmed = display.confirm(
329
+ f"[yellow]Delete container {container_id[:12]}...?[/yellow] This cannot be undone."
330
+ )
331
+ if not confirmed:
332
+ display.info("Cancelled.")
333
+ return
334
+
335
+ with display.loading_spinner("Deleting container..."):
336
+ try:
337
+ deleted = await memory.delete_container(container_id)
338
+ except Exception as e:
339
+ display.error(f"Failed to delete container: {e}")
340
+ sys.exit(1)
341
+
342
+ if deleted:
343
+ display.success(f"Container deleted: {container_id}")
344
+ else:
345
+ display.warning(f"Container not found: {container_id}")
346
+
347
+
348
+ # === STORE COMMAND ===
349
+
350
+
351
+ async def memory_store_command(
352
+ display: RichDisplayService,
353
+ container_id: str,
354
+ file: str | None = None,
355
+ user_id: str | None = None,
356
+ session_id: str | None = None,
357
+ infer: bool = True,
358
+ json_output: bool = False,
359
+ ) -> None:
360
+ """Store conversation in memory.
361
+
362
+ Args:
363
+ display: Display service for output.
364
+ container_id: Target container ID.
365
+ file: JSON file with messages.
366
+ user_id: User ID for namespace.
367
+ session_id: Session ID for namespace.
368
+ infer: Enable fact extraction (default: True).
369
+ json_output: Output as JSON.
370
+ """
371
+ if not file:
372
+ display.error("Provide --file with messages to store.")
373
+ display.newline()
374
+ display.info("Example messages.json:")
375
+ display.console.print(""" {
376
+ "messages": [
377
+ {"role": "user", "content": "Hello, I'm Alice"},
378
+ {"role": "assistant", "content": "Hello Alice!"}
379
+ ]
380
+ }""")
381
+ sys.exit(1)
382
+
383
+ cli_config = CliConfig.from_env()
384
+ memory = Memory.from_config(_create_memory_config(cli_config))
385
+
386
+ # Load messages from file
387
+ try:
388
+ with open(file) as f:
389
+ data = json.load(f)
390
+ messages = [Message(**m) for m in data.get("messages", [])]
391
+ except Exception as e:
392
+ display.error(f"Failed to load messages: {e}")
393
+ sys.exit(1)
394
+
395
+ if not messages:
396
+ display.error("No messages found in file.")
397
+ sys.exit(1)
398
+
399
+ if not json_output:
400
+ display.header("Store Memory", f"Container: {container_id[:12]}...")
401
+ display.info(f"Messages: {len(messages)}")
402
+ display.info(f"User ID: {user_id or '[dim]not set[/dim]'}")
403
+ display.info(f"Session ID: {session_id or '[dim]not set[/dim]'}")
404
+ display.info(f"Fact extraction: {'enabled' if infer else 'disabled'}")
405
+ display.newline()
406
+
407
+ with display.loading_spinner("Storing messages..."):
408
+ try:
409
+ result = await memory.store(
410
+ container_id=container_id,
411
+ messages=messages,
412
+ user_id=user_id,
413
+ session_id=session_id,
414
+ infer=infer,
415
+ )
416
+ except Exception as e:
417
+ if json_output:
418
+ print(json.dumps({"error": str(e)}))
419
+ else:
420
+ display.error(f"Failed to store messages: {e}")
421
+ sys.exit(1)
422
+
423
+ if json_output:
424
+ output = {
425
+ "working_memory_id": result.working_memory_id,
426
+ "session_id": result.session_id,
427
+ "long_term_count": result.long_term_count,
428
+ "infer": infer,
429
+ }
430
+ print(json.dumps(output, indent=2))
431
+ return
432
+
433
+ display.success("Messages stored!")
434
+ display.console.print(f" Working memory ID: [cyan]{result.working_memory_id}[/cyan]")
435
+ if result.session_id:
436
+ display.console.print(f" Session ID: [cyan]{result.session_id}[/cyan]")
437
+ if infer:
438
+ display.newline()
439
+ display.info("[dim]Fact extraction is running asynchronously...[/dim]")
440
+ display.info("[dim]Use 'memory recall' to search extracted facts.[/dim]")
441
+
442
+
443
+ # === RECALL COMMAND ===
444
+
445
+
446
+ async def memory_recall_command(
447
+ display: RichDisplayService,
448
+ container_id: str,
449
+ query: str,
450
+ user_id: str | None = None,
451
+ session_id: str | None = None,
452
+ limit: int = 10,
453
+ json_output: bool = False,
454
+ ) -> None:
455
+ """Search long-term memory.
456
+
457
+ Args:
458
+ display: Display service for output.
459
+ container_id: Container ID to search.
460
+ query: Search query text.
461
+ user_id: Filter by user ID.
462
+ session_id: Filter by session ID.
463
+ limit: Maximum results.
464
+ json_output: Output as JSON.
465
+ """
466
+ cli_config = CliConfig.from_env()
467
+ memory = Memory.from_config(_create_memory_config(cli_config))
468
+
469
+ if not json_output:
470
+ display.header("Recall Memory", f"Query: {query[:50]}...")
471
+
472
+ with display.loading_spinner("Searching memories..."):
473
+ try:
474
+ result = await memory.recall(
475
+ container_id=container_id,
476
+ query=query,
477
+ user_id=user_id,
478
+ session_id=session_id,
479
+ limit=limit,
480
+ )
481
+ except Exception as e:
482
+ if json_output:
483
+ print(json.dumps({"error": str(e)}))
484
+ else:
485
+ display.error(f"Failed to search memories: {e}")
486
+ sys.exit(1)
487
+
488
+ if json_output:
489
+ output = {
490
+ "query": result.query,
491
+ "total": result.total,
492
+ "took_ms": result.took_ms,
493
+ "items": [
494
+ {
495
+ "id": e.id,
496
+ "content": e.content,
497
+ "strategy": e.strategy.value if e.strategy else None,
498
+ "score": e.score,
499
+ "namespace": e.namespace,
500
+ "created_at": e.created_at.isoformat() if e.created_at else None,
501
+ }
502
+ for e in result.items
503
+ ],
504
+ }
505
+ print(json.dumps(output, indent=2, default=str))
506
+ return
507
+
508
+ display.console.print(f"[bold]Found {result.total} memories[/bold] ({result.took_ms}ms)")
509
+ display.newline()
510
+
511
+ if not result.items:
512
+ display.warning("No memories found matching your query.")
513
+ return
514
+
515
+ for i, entry in enumerate(result.items, 1):
516
+ strategy_tag = f"[cyan][{entry.strategy.value}][/cyan]" if entry.strategy else ""
517
+ score_pct = entry.score * 100 if entry.score <= 1 else entry.score
518
+ display.console.print(f"{i}. {strategy_tag} {entry.content}")
519
+ display.console.print(f" [dim]Score: {score_pct:.1f}%[/dim]")
520
+ if entry.namespace:
521
+ ns_str = ", ".join(f"{k}={v}" for k, v in entry.namespace.items())
522
+ display.console.print(f" [dim]Namespace: {ns_str}[/dim]")
523
+
524
+
525
+ # === STATS COMMAND ===
526
+
527
+
528
+ async def memory_stats_command(
529
+ display: RichDisplayService,
530
+ container_id: str,
531
+ json_output: bool = False,
532
+ ) -> None:
533
+ """Show container statistics.
534
+
535
+ Args:
536
+ display: Display service for output.
537
+ container_id: Container ID to get stats for.
538
+ json_output: Output as JSON.
539
+ """
540
+ cli_config = CliConfig.from_env()
541
+ memory = Memory.from_config(_create_memory_config(cli_config))
542
+
543
+ with display.loading_spinner("Fetching statistics..."):
544
+ try:
545
+ stats = await memory.get_stats(container_id)
546
+ except Exception as e:
547
+ if json_output:
548
+ print(json.dumps({"error": str(e)}))
549
+ else:
550
+ display.error(f"Failed to get stats: {e}")
551
+ sys.exit(1)
552
+
553
+ if json_output:
554
+ output = {
555
+ "container_id": stats.container_id,
556
+ "container_name": stats.container_name,
557
+ "working_memory_count": stats.working_memory_count,
558
+ "long_term_memory_count": stats.long_term_memory_count,
559
+ "session_count": stats.session_count,
560
+ "strategies_breakdown": {
561
+ k.value: v for k, v in stats.strategies_breakdown.items()
562
+ } if stats.strategies_breakdown else {},
563
+ "storage_size_bytes": stats.storage_size_bytes,
564
+ "last_updated": stats.last_updated.isoformat() if stats.last_updated else None,
565
+ }
566
+ print(json.dumps(output, indent=2))
567
+ return
568
+
569
+ display.header(
570
+ "Memory Statistics",
571
+ f"Container: {stats.container_name} ({stats.container_id[:12]}...)",
572
+ )
573
+
574
+ display.table(
575
+ "Memory Counts",
576
+ [
577
+ ("Working Memory", f"{stats.working_memory_count:,} messages"),
578
+ ("Long-term Memory", f"{stats.long_term_memory_count:,} facts"),
579
+ ("Sessions", f"{stats.session_count:,}"),
580
+ ],
581
+ )
582
+
583
+ if stats.strategies_breakdown:
584
+ display.newline()
585
+ strategy_rows = [
586
+ (strategy.value, f"{count:,} facts")
587
+ for strategy, count in stats.strategies_breakdown.items()
588
+ ]
589
+ display.table("Strategy Breakdown", strategy_rows)
590
+
591
+
592
+ # === SESSION COMMANDS ===
593
+
594
+
595
+ async def session_list_command(
596
+ display: RichDisplayService,
597
+ container_id: str,
598
+ user_id: str | None = None,
599
+ limit: int = 20,
600
+ json_output: bool = False,
601
+ ) -> None:
602
+ """List sessions in a container.
603
+
604
+ Args:
605
+ display: Display service for output.
606
+ container_id: Container ID.
607
+ user_id: Filter by user ID.
608
+ limit: Maximum sessions to return.
609
+ json_output: Output as JSON.
610
+ """
611
+ cli_config = CliConfig.from_env()
612
+ memory = Memory.from_config(_create_memory_config(cli_config))
613
+
614
+ with display.loading_spinner("Fetching sessions..."):
615
+ try:
616
+ sessions = await memory.list_sessions(
617
+ container_id=container_id,
618
+ user_id=user_id,
619
+ limit=limit,
620
+ )
621
+ except Exception as e:
622
+ if json_output:
623
+ print(json.dumps({"error": str(e)}))
624
+ else:
625
+ display.error(f"Failed to list sessions: {e}")
626
+ sys.exit(1)
627
+
628
+ if json_output:
629
+ output = {
630
+ "sessions": [
631
+ {
632
+ "id": s.id,
633
+ "summary": s.summary,
634
+ "namespace": s.namespace,
635
+ "started_at": s.started_at.isoformat() if s.started_at else None,
636
+ "message_count": s.message_count,
637
+ }
638
+ for s in sessions
639
+ ],
640
+ "total": len(sessions),
641
+ }
642
+ print(json.dumps(output, indent=2))
643
+ return
644
+
645
+ if not sessions:
646
+ display.warning("No sessions found.")
647
+ return
648
+
649
+ display.header("Sessions", f"Container: {container_id[:12]}...")
650
+
651
+ rows = []
652
+ for s in sessions:
653
+ started = s.started_at.strftime("%Y-%m-%d %H:%M") if s.started_at else "-"
654
+ summary = (s.summary[:50] + "...") if s.summary and len(s.summary) > 50 else (s.summary or "-")
655
+ rows.append((s.id[:12] + "...", summary, started))
656
+
657
+ display.table("Sessions", rows, headers=["ID", "Summary", "Started"])
658
+
659
+
660
+ # === STATUS COMMAND ===
661
+
662
+
663
+ async def memory_status_command(
664
+ display: RichDisplayService,
665
+ ) -> None:
666
+ """Show memory configuration status."""
667
+ cli_config = CliConfig.from_env()
668
+
669
+ display.header("GnosisLLM Memory Status", "Configuration and health")
670
+
671
+ # Configuration status
672
+ status_rows = []
673
+
674
+ # LLM Model
675
+ if cli_config.memory_llm_model_id:
676
+ status_rows.append(("LLM Model", "[green]Configured[/green]"))
677
+ status_rows.append((" ID", f"[dim]{cli_config.memory_llm_model_id}[/dim]"))
678
+ else:
679
+ status_rows.append(("LLM Model", "[red]Not configured[/red]"))
680
+
681
+ # Embedding Model
682
+ if cli_config.memory_embedding_model_id:
683
+ status_rows.append(("Embedding Model", "[green]Configured[/green]"))
684
+ status_rows.append((" ID", f"[dim]{cli_config.memory_embedding_model_id}[/dim]"))
685
+ else:
686
+ status_rows.append(("Embedding Model", "[red]Not configured[/red]"))
687
+
688
+ # OpenSearch connection
689
+ status_rows.append(("OpenSearch", f"{cli_config.opensearch_host}:{cli_config.opensearch_port}"))
690
+
691
+ display.table("Memory Configuration", status_rows)
692
+
693
+ # Check if setup is needed
694
+ if not cli_config.memory_llm_model_id or not cli_config.memory_embedding_model_id:
695
+ display.newline()
696
+ display.format_error_with_suggestion(
697
+ error="Memory is not fully configured.",
698
+ suggestion="Run memory setup to create connectors and models.",
699
+ command="gnosisllm-knowledge memory setup --openai-key sk-...",
700
+ )
701
+ return
702
+
703
+ # Try to verify setup
704
+ display.newline()
705
+
706
+ config = _create_memory_config(cli_config)
707
+ setup = MemorySetup(config)
708
+
709
+ with display.loading_spinner("Verifying setup..."):
710
+ try:
711
+ status = await setup.verify_setup()
712
+ except Exception as e:
713
+ display.warning(f"Could not verify setup: {e}")
714
+ return
715
+
716
+ if status.is_ready:
717
+ display.success("Memory is ready!")
718
+ display.console.print(" All models are deployed and responding.")
719
+ else:
720
+ display.warning("Memory setup is incomplete:")
721
+ for check, passed in status.checks.items():
722
+ icon = "[green]ok[/green]" if passed else "[red]FAIL[/red]"
723
+ display.console.print(f" {check}: {icon}")